+ ./ya make . -T --test-size=small --test-size=medium --stat --test-threads 52 --link-threads 12 -DUSE_EAT_MY_DATA --build relwithdebinfo -DDEBUGINFO_LINES_ONLY --bazel-remote-store --bazel-remote-base-uri http://cachesrv.internal:8081 --bazel-remote-username cache_user --bazel-remote-password-file /tmp/tmp.HZ6vwnRkal --bazel-remote-put --dist-cache-max-file-size=209715200 -A --retest --stat -DCONSISTENT_DEBUG --no-dir-outputs --test-failure-code 0 --build-all --cache-size 2TB --force-build-depends --log-file /home/runner/actions_runner/_work/ydb/ydb/tmp/results/ya_log.txt --evlog-file /home/runner/actions_runner/_work/ydb/ydb/tmp/results/try_1/ya_evlog.jsonl --junit /home/runner/actions_runner/_work/ydb/ydb/tmp/results/try_1/junit.xml --build-results-report /home/runner/actions_runner/_work/ydb/ydb/tmp/results/try_1/report.json --output /home/runner/actions_runner/_work/ydb/ydb/tmp/out Output root is subdirectory of Arcadia root, this may cause non-idempotent build Configuring dependencies for platform default-linux-x86_64-relwithdebinfo [2 ymakes processing] [8588/8590 modules configured] [2 ymakes processing] [8632/8632 modules configured] Configuring dependencies for platform tools [3 ymakes processing] [9240/9240 modules configured] Warn[-WPluginErr]: in $B/ydb/tests/functional/tpc/medium/ydb-tests-functional-tpc-medium: Requirement ram is redefined 16 -> 28 Warn[-WPluginErr]: in $B/ydb/tests/functional/tpc/medium/tpch/ydb-tests-functional-tpc-medium-tpch: Requirement ram is redefined 16 -> 28 [3 ymakes processing] [9240/9240 modules configured] [149/149 modules rendered] [2 ymakes processing] [9240/9240 modules configured] [5281/5415 modules rendered] [2 ymakes processing] [9240/9240 modules configured] [5415/5415 modules rendered] Configuring dependencies for platform test_tool_tc1-global Configuring tests execution Configuring local and dist store caches Configuration done. Preparing for execution |33.3%| CLEANING SYMRES | 0.4%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/mind/libydb-core-mind.a | 1.2%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/mvp/oidc_proxy/ut/ydb-mvp-oidc_proxy-ut | 1.0%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/blobstorage/bridge/syncer/libblobstorage-bridge-syncer.a | 1.2%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kqp/gateway/libcore-kqp-gateway.a | 1.6%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/util/libydb-core-util.a | 2.2%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/blobstorage/bridge/proxy/libblobstorage-bridge-proxy.a | 2.6%| PREPARE $(VCS) | 3.2%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/columnshard/engines/reader/simple_reader/iterator/sys_view/optimizer/libiterator-sys_view-optimizer.global.a | 3.4%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kqp/node_service/libcore-kqp-node_service.a | 3.5%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/columnshard/engines/reader/simple_reader/iterator/libreader-simple_reader-iterator.a | 3.6%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/columnshard/engines/reader/simple_reader/iterator/sys_view/portions/libiterator-sys_view-portions.a | 3.7%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kqp/opt/logical/libkqp-opt-logical.a | 3.7%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/columnshard/engines/reader/simple_reader/iterator/sys_view/chunks/libiterator-sys_view-chunks.a | 3.8%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/mon/audit/libcore-mon-audit.a | 4.0%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/blobstorage/dsproxy/libcore-blobstorage-dsproxy.a | 3.1%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/ymq/actor/cfg/libymq-actor-cfg.a | 3.2%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/columnshard/engines/reader/simple_reader/iterator/sys_view/granules/libiterator-sys_view-granules.a | 3.2%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kqp/gateway/local_rpc/libkqp-gateway-local_rpc.a | 3.3%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/columnshard/engines/reader/simple_reader/iterator/sys_view/chunks/libiterator-sys_view-chunks.global.a | 3.4%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/columnshard/engines/reader/transaction/libengines-reader-transaction.a | 3.4%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/columnshard/engines/reader/simple_reader/iterator/sys_view/schemas/libiterator-sys_view-schemas.a | 3.5%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/blobstorage/dsproxy/mock/libblobstorage-dsproxy-mock.a | 3.5%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kqp/opt/libcore-kqp-opt.a | 3.6%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/viewer/protos/libcore-viewer-protos.a | 3.6%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/ymq/actor/cloud_events/libymq-actor-cloud_events.a | 3.5%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/columnshard/engines/reader/simple_reader/iterator/sync_points/libsimple_reader-iterator-sync_points.a | 3.8%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/blobstorage/incrhuge/libcore-blobstorage-incrhuge.a | 4.0%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/fq/pq_async_io/ut/ydb-tests-fq-pq_async_io-ut | 4.2%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/ymq/actor/libcore-ymq-actor.a | 4.3%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/ydb_convert/libydb-core-ydb_convert.a | 4.6%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kqp/host/libcore-kqp-host.a | 4.8%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kqp/opt/physical/effects/libopt-physical-effects.a | 5.1%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/persqueue/common/libcore-persqueue-common.a | 5.2%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/columnshard/engines/reader/simple_reader/iterator/sys_view/optimizer/libiterator-sys_view-optimizer.a | 5.3%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/persqueue/pqrb/libcore-persqueue-pqrb.a | 5.5%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kqp/opt/peephole/libkqp-opt-peephole.a | 5.6%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/persqueue/libydb-core-persqueue.a | 5.7%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/persqueue/pqtablet/cache/libpersqueue-pqtablet-cache.a | 5.8%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kqp/gateway/utils/libkqp-gateway-utils.a | 5.9%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/columnshard/engines/reader/simple_reader/iterator/sys_view/portions/libiterator-sys_view-portions.global.a | 6.1%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/mon/libydb-core-mon.a | 6.3%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/mon_alloc/libydb-core-mon_alloc.a | 6.4%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/columnshard/engines/reader/simple_reader/iterator/sys_view/granules/libiterator-sys_view-granules.global.a | 5.1%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kqp/finalize_script_service/libcore-kqp-finalize_script_service.a | 5.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/mind/configured_tablet_bootstrapper.cpp | 5.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/mind/tenant_slot_broker__update_pool_status.cpp | 5.4%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/load_test/libydb-core-load_test.a | 5.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/mind/tenant_slot_broker__update_slot_status.cpp | 5.5%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/blobstorage/nodewarden/libcore-blobstorage-nodewarden.a | 5.5%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/memory_controller/libydb-core-memory_controller.a | 5.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/mind/tenant_slot_broker__update_config.cpp | 5.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/mind/node_broker__update_config_subscription.cpp | 5.6%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/columnshard/engines/scheme/libcolumnshard-engines-scheme.a | 5.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/mind/tenant_slot_broker__update_node_location.cpp | 5.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/mind/node_broker.cpp | 5.8%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kqp/ut/scheme/ydb-core-kqp-ut-scheme | 5.9%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/services/metadata/initializer/ut/ydb-services-metadata-initializer-ut | 5.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/mind/dynamic_nameserver_mon.cpp | 6.0%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/blobstorage/ut_blobstorage/ut_check_integrity/ut_blobstorage-ut_check_integrity | 6.0%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/postgres_integrations/go-libpq/ydb-tests-postgres_integrations-go-libpq | 6.1%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/replication/ydb_proxy/ut/ydb-core-tx-replication-ydb_proxy-ut | 6.2%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/base/ut/ydb-core-base-ut | 6.2%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/mind/ut/ydb-core-mind-ut | 6.2%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/library/yql/providers/s3/actors/ut/ydb-library-yql-providers-s3-actors-ut | 6.3%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/services/persqueue_v1/ut/ydb-services-persqueue_v1-ut | 5.8%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/blobstorage/vdisk/anubis_osiris/ut/ydb-core-blobstorage-vdisk-anubis_osiris-ut | 5.8%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_bsvolume_reboots/ydb-core-tx-schemeshard-ut_bsvolume_reboots | 5.9%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/persqueue/ut/common/libpersqueue-ut-common.a | 5.9%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kqp/tests/tpch/tpch | 6.0%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/mind/tenant_slot_broker__load_state.cpp | 6.0%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/config/validation/ut/ydb-core-config-validation-ut | 6.0%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/library/yql/providers/generic/actors/ut/ydb-library-yql-providers-generic-actors-ut | 6.1%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/config/init/ut/ydb-core-config-init-ut | 6.2%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/functional/api/ydb-tests-functional-api | 6.2%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/long_tx_service/ut/ydb-core-tx-long_tx_service-ut | 6.3%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/client/server/ut/ydb-core-client-server-ut | 6.3%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/functional/benchmarks_init/ydb-tests-functional-benchmarks_init | 6.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/mind/tenant_slot_broker__init_scheme.cpp | 6.4%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/blobstorage/vdisk/hulldb/generic/ut/ydb-core-blobstorage-vdisk-hulldb-generic-ut | 6.5%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kqp/ut/common/libkqp-ut-common.a | 6.5%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_bsvolume/ydb-core-tx-schemeshard-ut_bsvolume | 6.5%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/services/persqueue_cluster_discovery/ut/ydb-services-persqueue_cluster_discovery-ut | 6.6%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kqp/opt/rbo/libkqp-opt-rbo.a | 6.6%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/public/sdk/cpp/src/client/topic/ut/with_direct_read_ut/src-client-topic-ut-with_direct_read_ut | 6.6%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/columnshard/engines/reader/simple_reader/iterator/sys_view/abstract/libiterator-sys_view-abstract.a | 6.7%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/services/fq/ut_integration/ydb-services-fq-ut_integration | 6.7%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/persqueue/pqtablet/common/libpersqueue-pqtablet-common.a | 6.8%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kqp/ut/sysview/ydb-core-kqp-ut-sysview | 6.8%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/functional/ttl/ydb-tests-functional-ttl | 6.9%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/datashard/ut_kqp_errors/ydb-core-tx-datashard-ut_kqp_errors | 6.6%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/stress/ctas/tests/ydb-tests-stress-ctas-tests | 6.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/mind/tenant_slot_broker__check_slot_status.cpp | 6.7%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/datashard/ut_erase_rows/ydb-core-tx-datashard-ut_erase_rows | 6.7%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/public/tools/local_ydb/local_ydb | 6.8%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/columnshard/engines/storage/actualizer/abstract/libstorage-actualizer-abstract.a | 6.9%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/columnshard/engines/reader/simple_reader/iterator/sys_view/schemas/libiterator-sys_view-schemas.global.a | 7.0%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/engines/reader/simple_reader/iterator/sys_view/granules/schema.cpp | 7.0%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/mvp/meta/ut/ydb-mvp-meta-ut | 7.2%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/persqueue/pqtablet/libcore-persqueue-pqtablet.a | 7.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/dsproxy/dsproxy_get_block.cpp | 7.1%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/local_pgwire/libydb-core-local_pgwire.a | 7.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/viewer/protos/viewer.pb.cc | 7.3%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/blobstorage/other/libcore-blobstorage-other.a | 7.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/incrhuge/incrhuge_keeper_write.cpp | 7.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/opt/peephole/kqp_opt_peephole_write_constraint.cpp | 7.6%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kqp/proxy_service/libcore-kqp-proxy_service.a | 7.7%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/parser/pg_catalog/libessentials-parser-pg_catalog.global.a | 7.7%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/threading/queue/libcpp-threading-queue.a | 7.8%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/yt/yson_string/libcpp-yt-yson_string.a | 7.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/bridge/syncer/syncer.cpp | 7.9%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/libidn/static/liblibs-libidn-static.a | 7.9%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/core/services/mounts/libcore-services-mounts.global.a | 7.8%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/persqueue/events/libcore-persqueue-events.a | 7.9%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kqp/opt/physical/libkqp-opt-physical.a | 8.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/util/ui64id.cpp | 8.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/ydb_convert/ydb_convert.cpp | 8.1%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/columnshard/engines/storage/granule/libengines-storage-granule.a | 8.2%| PREPARE $(YMAKE_PYTHON3-212672652) | 8.1%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/arrow/libessentials-minikql-arrow.a | 8.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/opt/physical/effects/kqp_opt_phy_vector_index.cpp | 8.2%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/codegen/llvm16/libminikql-codegen-llvm16.a | 8.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/persqueue/pqrb/read_balancer_app.cpp | 8.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/persqueue/pq.cpp | 8.3%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/conveyor/service/libtx-conveyor-service.a | 8.4%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/core/services/mounts/libcore-services-mounts.a | 8.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/nodewarden/node_warden_vdisk.cpp | 8.3%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/parser/common/antlr4/libparser-common-antlr4.a | 8.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/engines/scheme/snapshot_scheme.cpp | 8.4%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/parser/pg_catalog/libessentials-parser-pg_catalog.a | 8.5%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/parser/pg_catalog/proto/libparser-pg_catalog-proto.a | 8.6%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/parser/pg_wrapper/interface/libparser-pg_wrapper-interface.a | 8.6%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/utils/libyql-essentials-utils.a | 8.6%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/persqueue/pqtablet/blob/libpersqueue-pqtablet-blob.a | 8.9%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/blobstorage/pdisk/libcore-blobstorage-pdisk.a | 9.0%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/protos/libyql-essentials-protos.a | 9.1%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/core/services/libessentials-core-services.a | 9.2%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/testlib/libydb-core-testlib.a | 9.3%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/ymq/queues/std/libymq-queues-std.a | 9.4%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/persqueue/pqtablet/partition/mirrorer/libpqtablet-partition-mirrorer.a | 9.5%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/columnshard/engines/storage/indexes/bloom/libstorage-indexes-bloom.global.a | 9.6%| PREPARE $(LLD_ROOT-3107549726) - 25.18 MB | 9.6%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/ymq/http/libcore-ymq-http.a | 9.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/engines/scheme/schema_version.cpp | 9.5%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/ymq/queues/fifo/libymq-queues-fifo.a | 9.6%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/columnshard/engines/storage/optimizer/lbuckets/planner/liboptimizer-lbuckets-planner.global.a | 9.6%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/columnshard/test_helper/libtx-columnshard-test_helper.a | 9.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/columnshard/engines/scheme/tier_info.cpp | 9.9%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/public/decimal/libessentials-public-decimal.a |10.0%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/columnshard/engines/scheme/indexes/abstract/libscheme-indexes-abstract.a |10.1%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/providers/common/mkql/libproviders-common-mkql.a |10.1%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kqp/provider/libcore-kqp-provider.a |10.2%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/public/issue/libessentials-public-issue.a |10.3%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/jsonpath/rewrapper/hyperscan/libjsonpath-rewrapper-hyperscan.global.a |10.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/types/credentials/login/libtypes-credentials-login.a |10.5%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/common/unicode_base/lib/libcommon-unicode_base-lib.a |10.8%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/common/math/lib/libcommon-math-lib.a |10.9%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/public/issue/protos/libpublic-issue-protos.a |11.0%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/common/math/libmath_udf.global.a |11.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/types/credentials/oauth2_token_exchange/libtypes-credentials-oauth2_token_exchange.a |11.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/memory_controller/memtable_collection.cpp |11.4%| [AR] {BAZEL_DOWNLOAD} $(B)/tools/enum_parser/enum_serialization_runtime/libtools-enum_parser-enum_serialization_runtime.a |11.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/load_test/yql_single_query.cpp |11.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/config/libsrc-client-config.a |11.8%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/utils/log/libessentials-utils-log.a |11.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/extension_common/libsrc-client-extension_common.a |12.0%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/common/unicode_base/libunicode_udf.global.a |12.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/lib/ydb_cli/common/libcommon.a |12.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/discovery/libsrc-client-discovery.a |12.4%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/utils/log/proto/libutils-log-proto.a |12.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/local_pgwire/sql_parser.cpp |12.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/extensions/solomon_stats/libclient-extensions-solomon_stats.a |12.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/coordination/libsrc-client-coordination.a |12.8%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/parser/common/libessentials-parser-common.a |12.8%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/common/string/libstring_udf.global.a |13.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/lib/ydb_cli/common/yql_parser/libydb_cli-common-yql_parser.a |13.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/conveyor/service/worker.cpp |13.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/federated_topic/libsrc-client-federated_topic.a |13.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/iam_private/libsrc-client-iam_private.a |13.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/other/mon_get_blob_page.cpp |13.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/other/mon_blob_range_page.cpp |14.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/persqueue/pqtablet/blob/header.cpp |14.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/pdisk/blobstorage_pdisk_util_atomicblockcounter.cpp |14.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/ymq/http/xml_builder.cpp |14.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/pdisk/blobstorage_pdisk_util_flightcontrol.cpp |14.2%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/core/type_ann/libessentials-core-type_ann.a |14.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/rate_limiter/libsrc-client-rate_limiter.a |14.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/ymq/queues/fifo/schema.cpp |14.3%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/ymq/http/parser.rl6.cpp |14.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/testlib/actor_helpers.cpp |14.3%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/scheme/indexes/abstract/common.h_serialized.cpp |14.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/pdisk/blobstorage_pdisk_requestimpl.cpp |14.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/pdisk/blobstorage_pdisk.cpp |14.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/columnshard/engines/scheme/indexes/abstract/common.cpp |14.5%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/providers/common/proto/libproviders-common-proto.a |14.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/persqueue/pqtablet/blob/type_codecs_defs.cpp |14.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/columnshard/engines/scheme/indexes/abstract/checker.cpp |14.7%| PREPARE $(CLANG_FORMAT-2963054096) |14.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/columnshard/engines/scheme/indexes/abstract/abstract.cpp |15.3%| PREPARE $(BLACK-2187759822) |15.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/persqueue/pqtablet/metering_sink.cpp |16.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/persqueue/pqtablet/common/tracing_support.cpp |16.2%| PREPARE $(FLAKE8_PY2-2255386470) - 8.40 MB |16.3%| PREPARE $(RUFF-4155214026) - 8.40 MB |16.4%| PREPARE $(FLAKE8_PY3-3596799299) - 18.89 MB |16.5%| PREPARE $(TEST_TOOL_HOST-sbr:9790047688) - 33.57 MB |16.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/local_pgwire/local_pgwire_util.cpp |16.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/pdisk/blobstorage_pdisk_delayed_cost_loop.cpp |16.8%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/blobstorage/pdisk/blobstorage_pdisk_defs.h_serialized.cpp |16.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/pdisk/blobstorage_pdisk_drivemodel_db.cpp |16.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/pdisk/blobstorage_pdisk_sectorrestorator.cpp |17.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/pdisk/blobstorage_pdisk_params.cpp |17.2%| PREPARE $(PYTHON) - 50.36 MB |22.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/finalize_script_service/kqp_finalize_script_service.cpp |22.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/pdisk/blobstorage_pdisk_mon.cpp |24.0%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/persqueue/pqtablet/blob/blob_serialization.cpp |25.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/persqueue/pqtablet/blob/blob.cpp |25.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/engines/storage/actualizer/abstract/context.cpp |26.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/pdisk/blobstorage_pdisk_log_cache.cpp |27.0%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/opt/rbo/kqp_rbo_transformer.cpp |27.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/columnshard/engines/scheme/indexes/abstract/header.cpp |27.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/opt/physical/kqp_opt_phy_sort.cpp |27.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/columnshard/test_helper/program_constructor.cpp |28.2%| PREPARE $(CLANG-1922233694) - 201.34 MB |28.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/columnshard/test_helper/kernels_wrapper.cpp |28.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/opt/physical/kqp_opt_phy_olap_filter.cpp |30.1%| PREPARE $(CLANG16-1380963495) - 293.63 MB |30.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/engines/storage/actualizer/abstract/abstract.cpp |31.0%| PREPARE $(CLANG18-1866954364) - 296.78 MB |31.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/opt/rbo/kqp_rbo_rules.cpp |31.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/conveyor/service/service.cpp |31.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/ymq/http/types.cpp |31.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/proxy_service/kqp_session_info.cpp |32.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/ymq/http/xml.cpp |32.5%| PREPARE $(CLANG-2925601218) - 317.92 MB |32.5%| PREPARE $(CLANG20-2303489773) |32.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/engines/storage/granule/storage.cpp |32.8%| PREPARE $(WITH_JDK-sbr:9470949154) - 167.80 MB |33.0%| PREPARE $(JDK17-2548586558) |33.0%| PREPARE $(WITH_JDK17-sbr:9470949154) |33.1%| PREPARE $(JDK_DEFAULT-2548586558) |32.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/pdisk/blobstorage_pdisk_completion_impl.cpp |33.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/opt/physical/kqp_opt_phy_precompute.cpp |33.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/local_pgwire/local_pgwire_auth_actor.cpp |34.0%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/pdisk/blobstorage_pdisk_logreader.cpp |34.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/pdisk/blobstorage_pdisk_blockdevice_async.cpp |34.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/engines/storage/granule/stages.cpp |35.0%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/persqueue/pqtablet/transaction.cpp |35.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/provider/rewrite_io_utils.cpp |35.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/engines/reader/simple_reader/iterator/sys_view/schemas/schema.cpp |35.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/opt/physical/kqp_opt_phy_helpers.cpp |36.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/opt/physical/kqp_opt_phy_limit.cpp |36.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/provider/yql_kikimr_type_ann_pg.cpp |36.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/opt/physical/kqp_opt_phy_olap_agg.cpp |36.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/engines/scheme/indexes/abstract/collection.cpp |37.0%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/pdisk/blobstorage_pdisk_actor.cpp |37.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/opt/rbo/kqp_operator.cpp |36.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/persqueue/events/internal.cpp |37.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/engines/scheme/indexes/abstract/meta.cpp |37.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/pdisk/blobstorage_pdisk_impl_http.cpp |38.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kqp/provider/yql_kikimr_provider.h_serialized.cpp |38.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/engines/storage/granule/granule.cpp |38.0%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/persqueue/pqtablet/pq_impl.cpp |38.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/opt/rbo/kqp_rbo.cpp |38.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/mon/audit/url_matcher.cpp |38.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/engines/storage/optimizer/lbuckets/planner/optimizer.cpp |38.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/provider/yql_kikimr_provider.cpp |39.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/local_pgwire/local_pgwire.cpp |39.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/provider/yql_kikimr_gateway.cpp |38.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/opt/physical/kqp_opt_phy_stage_float_up.cpp |39.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/pdisk/blobstorage_pdisk_impl_log.cpp |39.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/pdisk/blobstorage_pdisk_driveestimator.cpp |39.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/mon/audit/audit_denylist.cpp |39.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/persqueue/pqtablet/common/event_helpers.cpp |40.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/persqueue/pqtablet/pq_impl_app_sendreadset.cpp |40.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/ymq/queues/std/queries.cpp |40.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/provider/yql_kikimr_opt.cpp |41.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/provider/yql_kikimr_results.cpp |40.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/opt/physical/kqp_opt_phy_source.cpp |40.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/engines/scheme/indexes/abstract/constructor.cpp |40.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/pdisk/blobstorage_pdisk_impl.cpp |41.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/provider/yql_kikimr_expr_nodes.cpp |41.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/pdisk/blobstorage_pdisk_impl_metadata.cpp |41.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/provider/yql_kikimr_datasink.cpp |42.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/other/mon_vdisk_stream.cpp |42.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/engines/storage/granule/portions_index.cpp |42.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/persqueue/events/events.cpp |42.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/test_helper/helper.cpp |42.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/provider/yql_kikimr_datasource.cpp |42.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/local_pgwire/pgwire_kqp_proxy.cpp |42.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/provider/read_attributes_utils.cpp |42.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/provider/yql_kikimr_type_ann.cpp |43.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/persqueue/pqtablet/pq_impl_app.cpp |43.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/engines/scheme/indexes/abstract/fetcher.cpp |44.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/incrhuge/incrhuge_keeper_recovery.cpp |44.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/incrhuge/incrhuge_keeper_recovery_read_log.cpp |44.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/pdisk/blobstorage_pdisk_internal_interface.cpp |44.6%| [CF] {default-linux-x86_64, relwithdebinfo} $(B)/library/cpp/build_info/sandbox.cpp |44.6%| [CP] {default-linux-x86_64, relwithdebinfo} $(B)/common_test.context |44.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/provider/yql_kikimr_exec.cpp |44.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/incrhuge/incrhuge_keeper_read.cpp |44.5%| [BI] {default-linux-x86_64, relwithdebinfo} $(B)/library/cpp/build_info/buildinfo_data.h |44.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/ymq/actor/attributes_md5.cpp |44.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/ymq/actor/auth_mocks.cpp |44.6%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/blobstorage/pdisk/blobstorage_pdisk_state.h_serialized.cpp |44.6%| [CF] {default-linux-x86_64, relwithdebinfo} $(B)/library/cpp/build_info/build_info.cpp |44.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/ymq/queues/std/schema.cpp |44.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/pdisk/blobstorage_pdisk_writer.cpp |44.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/pdisk/drivedata_serializer.cpp |44.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/pdisk/blobstorage_pdisk_util_signal_event.cpp |44.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/test_helper/shard_reader.cpp |44.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/provider/yql_kikimr_opt_build.cpp |44.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/engines/reader/simple_reader/iterator/sys_view/schemas/metadata.cpp |44.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/ymq/actor/infly.cpp |44.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/ymq/actor/message_delay_stats.cpp |44.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/provider/yql_kikimr_settings.cpp |44.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/opt/rbo/kqp_convert_to_physical.cpp |44.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/local_pgwire/local_pgwire_connection.cpp |44.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/incrhuge/incrhuge_keeper_defrag.cpp |44.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/columnshard/engines/storage/granule/granule.h_serialized.cpp |44.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/ymq/actor/log.cpp |44.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/ymq/actor/local_rate_limiter_allocator.cpp |44.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/engines/reader/simple_reader/iterator/plain_read_data.cpp |44.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/testlib/storage_helpers.cpp |44.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/opt/kqp_opt_sink_precompute.cpp |44.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/incrhuge/incrhuge_keeper_recovery_scan.cpp |44.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/opt/kqp_type_ann.cpp |44.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/incrhuge/incrhuge_keeper_log.cpp |44.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/ymq/actor/cloud_events/cloud_events.cpp |44.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/incrhuge/incrhuge_keeper.cpp |44.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/dsproxy/mock/dsproxy_mock.cpp |44.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/opt/kqp_column_statistics_requester.cpp |44.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/dsproxy/dsproxy_multicollect.cpp |44.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/engines/reader/simple_reader/iterator/sys_view/schemas/source.cpp |44.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/opt/kqp_opt_phy_check.cpp |44.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/opt/kqp_query_plan.cpp |44.5%| PREPARE $(OS_SDK_ROOT-sbr:243881345) - 8.40 MB |44.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/engines/reader/simple_reader/iterator/context.cpp |44.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/opt/kqp_query_blocks_transformer.cpp |44.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/columnshard/engines/reader/simple_reader/iterator/source.h_serialized.cpp |44.8%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/library/cpp/svnversion/svn_interface.c |44.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/opt/logical/kqp_opt_log_ranges_predext.cpp |44.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/opt/logical/kqp_opt_cbo.cpp |44.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/dsproxy/dsproxy_blackboard.cpp |44.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/engines/reader/simple_reader/iterator/scanner.cpp |44.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/dsproxy/dsproxy_monactor.cpp |44.6%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/library/cpp/build_info/build_info_static.cpp |44.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/opt/logical/kqp_opt_log_indexes.cpp |44.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/dsproxy/dsproxy_check_integrity_get.cpp |44.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/ymq/actor/sha256.cpp |44.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/engines/reader/simple_reader/iterator/sys_view/schemas/constructor.cpp |44.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/opt/logical/kqp_opt_log.cpp |44.8%| [CC] {default-linux-x86_64, relwithdebinfo} $(B)/library/cpp/build_info/build_info.cpp |44.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/opt/logical/kqp_opt_log_join.cpp |44.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/dsproxy/dsproxy_encrypt.cpp |44.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/ymq/actor/user_settings_names.cpp |44.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/opt/logical/kqp_opt_log_helpers.cpp |44.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/opt/logical/kqp_opt_log_effects.cpp |44.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/opt/kqp_statistics_transformer.cpp |44.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/opt/logical/kqp_opt_log_sqlin_compact.cpp |44.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/ydb_convert/compression.cpp |44.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/dsproxy/dsproxy_collect.cpp |44.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/dsproxy/dsproxy_block.cpp |44.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/engines/reader/simple_reader/iterator/sys_view/chunks/constructor.cpp |44.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/dsproxy/group_sessions.cpp |44.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/dsproxy/dsproxy_indexrestoreget.cpp |44.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/opt/logical/kqp_opt_log_ranges.cpp |44.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/engines/reader/simple_reader/iterator/fetched_data.cpp |44.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/dsproxy/dsproxy_assimilate.cpp |44.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/opt/physical/predicate_collector.cpp |45.0%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/dsproxy/dsproxy_range.cpp |45.0%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/mon/audit/audit.cpp |44.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/dsproxy/dsproxy_status.cpp |44.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/dsproxy/dsproxy_patch.cpp |44.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/dsproxy/dsproxy_discover_m3of4.cpp |44.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/dsproxy/dsproxy_nodemonactor.cpp |44.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/persqueue/common/actor.cpp |44.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/opt/logical/kqp_opt_log_extract.cpp |44.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/ymq/actor/executor.cpp |44.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/incrhuge/incrhuge_keeper_alloc.cpp |44.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/ymq/actor/garbage_collector.cpp |44.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/opt/logical/kqp_opt_log_sqlin.cpp |45.0%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/dsproxy/dsproxy_request.cpp |45.0%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/opt/physical/kqp_opt_phy.cpp |45.0%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/dsproxy/dsproxy_state.cpp |44.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/pdisk/blobstorage_pdisk_tools.cpp |44.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/dsproxy/dsproxy_stat.cpp |44.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/dsproxy/dsproxy_nodemon.cpp |44.9%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/persqueue/common/sourceid_info.h_serialized.cpp |44.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/dsproxy/dsproxy_discover.cpp |44.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/ydb_convert/topic_description.cpp |45.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/persqueue/common/heartbeat.cpp |45.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/persqueue/common/microseconds_sliding_window.cpp |45.0%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/dsproxy/dsproxy_strategy_base.cpp |45.0%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/engines/reader/simple_reader/iterator/source.cpp |45.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/persqueue/pqrb/partition_scale_manager_graph_cmp.cpp |45.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/engines/storage/indexes/bloom/constructor.cpp |45.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/ymq/actor/cfg/cfg.cpp |45.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/dsproxy/dsproxy_get_impl.cpp |45.0%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/engines/reader/transaction/tx_internal_scan.cpp |45.0%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/persqueue/pqtablet/partition/mirrorer/mirrorer.cpp |45.0%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/opt/kqp_opt_phase.cpp |45.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/mon_alloc/memory_info.cpp |45.0%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/library/cpp/svnversion/svnversion.cpp |45.0%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/ymq/actor/get_queue_attributes.cpp |45.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/engines/storage/indexes/bloom/meta.cpp |45.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/dsproxy/dsproxy_request_reporting.cpp |45.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/dsproxy/dsproxy_discover_m3dc.cpp |45.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/pdisk/blobstorage_pdisk_syslogreader.cpp |44.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/mon/crossref.cpp |44.7%| [CC] {default-linux-x86_64, relwithdebinfo} $(B)/library/cpp/build_info/sandbox.cpp |44.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/ymq/actor/auth_factory.cpp |44.7%| PREPARE $(GDB) - 26.23 MB |44.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/persqueue/common/common_app.cpp |44.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/ymq/actor/fifo_cleanup.cpp |44.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/incrhuge/incrhuge_keeper_common.cpp |44.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/ymq/queues/fifo/queries.cpp |44.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/ymq/actor/delete_queue.cpp |44.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/ymq/actor/fifo_cleanup.h_serialized.cpp |44.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/opt/kqp_opt.cpp |44.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/ymq/actor/change_visibility.cpp |44.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/testlib/tx_helpers.cpp |44.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/gateway/local_rpc/helper.cpp |44.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/testlib/fake_coordinator.cpp |44.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/ymq/actor/error.cpp |44.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/test_helper/controllers.cpp |45.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/ymq/actor/count_queues.cpp |45.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/opt/kqp_constant_folding_transformer.cpp |45.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/engines/reader/simple_reader/iterator/sys_view/chunks/schema.cpp |45.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/incrhuge/incrhuge_keeper_delete.cpp |45.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/opt/kqp_opt_phy_finalize.cpp |45.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/engines/reader/simple_reader/iterator/sys_view/chunks/source.cpp |45.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/testlib/tenant_runtime.cpp |45.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/test_helper/shard_writer.cpp |45.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/ymq/actor/delete_message.cpp |45.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/dsproxy/dsproxy_get.cpp |45.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/ymq/actor/list_queues.cpp |45.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/ydb_convert/table_settings.cpp |45.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/ymq/http/http.cpp |45.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/ymq/actor/purge.cpp |45.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/ymq/actor/auth_multi_factory.cpp |45.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/dsproxy/dsproxy_put_impl.cpp |45.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/testlib/common_helper.cpp |45.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/ymq/actor/events.h_serialized.cpp |45.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/engines/reader/simple_reader/iterator/sync_points/result.cpp |45.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/dsproxy/dsproxy_mon.cpp |45.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/ymq/actor/monitoring.cpp |45.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/opt/kqp_opt_build_txs.cpp |45.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/engines/reader/simple_reader/iterator/fetching.cpp |45.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/opt/kqp_opt_effects.cpp |45.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/engines/reader/simple_reader/iterator/sync_points/aggr.cpp |45.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/engines/reader/transaction/tx_scan.cpp |45.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/testlib/cs_helper.cpp |45.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/engines/reader/simple_reader/iterator/sys_view/granules/constructor.cpp |45.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/engines/reader/simple_reader/iterator/sync_points/abstract.cpp |45.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/ymq/actor/index_events_processor.cpp |45.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/ymq/actor/delete_user.cpp |45.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/ymq/actor/list_users.cpp |45.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/ymq/actor/modify_permissions.cpp |45.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/ymq/actor/list_dead_letter_source_queues.cpp |45.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/opt/kqp_opt_kql.cpp |45.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/ymq/actor/schema.cpp |45.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/opt/logical/kqp_opt_log_sort.cpp |45.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kqp/opt/kqp_query_plan.h_serialized.cpp |45.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/test_helper/columnshard_ut_common.cpp |45.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/persqueue/common/key.cpp |45.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/engines/reader/simple_reader/iterator/sys_view/granules/source.cpp |45.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/engines/reader/simple_reader/iterator/sys_view/granules/metadata.cpp |45.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/ymq/actor/create_queue.cpp |45.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/ymq/actor/node_tracker.cpp |45.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/engines/reader/simple_reader/iterator/sys_view/chunks/metadata.cpp |45.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/ymq/actor/create_user.cpp |45.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/testlib/tablet_flat_dummy.cpp |45.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/ymq/actor/list_permissions.cpp |45.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/testlib/test_client.cpp |45.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/dsproxy/dsproxy_impl.cpp |45.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/engines/reader/simple_reader/iterator/sync_points/limit.cpp |45.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/ymq/actor/receive_message.cpp |45.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/ymq/actor/metering.cpp |45.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/dsproxy/dsproxy_multiget.cpp |45.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/ydb_convert/column_families.cpp |45.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/ymq/actor/purge_queue.cpp |45.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/ymq/actor/queue_schema.cpp |45.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/opt/kqp_opt_hash_func_propagate_transformer.cpp |45.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/ymq/actor/cleanup_queue_data.cpp |45.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/ymq/actor/proxy_actor.cpp |45.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/opt/kqp_opt_build_phy_query.cpp |45.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/nodewarden/group_stat_aggregator.cpp |45.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/ymq/actor/metering.h_serialized.cpp |45.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/ydb_convert/table_description.cpp |45.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/ymq/actor/actor.cpp |45.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/host/kqp_explain_prepared.cpp |45.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/ymq/actor/user_settings_reader.cpp |45.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/dsproxy/dsproxy_put.cpp |45.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/ymq/actor/get_queue_url.cpp |45.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/ymq/actor/queue_leader.cpp |45.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/ydb_convert/table_profiles.cpp |45.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/ymq/actor/queues_list_reader.cpp |45.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/ymq/actor/proxy_service.cpp |45.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/ymq/actor/queue_schema.h_serialized.cpp |45.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/ymq/actor/send_message.cpp |45.5%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/general_cache/usage/libtx-general_cache-usage.a |45.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/ymq/actor/tag_queue.cpp |45.5%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/columnshard/engines/changes/libcolumnshard-engines-changes.a |45.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/ymq/actor/set_queue_attributes.cpp |45.5%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/columnshard/engines/changes/compaction/sub_columns/libchanges-compaction-sub_columns.global.a |45.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/ymq/actor/service.cpp |45.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/opt/physical/kqp_opt_phy_build_stage.cpp |45.3%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/columnshard/engines/changes/compaction/sparsed/libchanges-compaction-sparsed.global.a |45.3%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/base64/ssse3/liblibs-base64-ssse3.a |45.3%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/columnshard/engines/storage/indexes/categories_bloom/libstorage-indexes-categories_bloom.a |45.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/changes/counters/libengines-changes-counters.a |45.4%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/deprecated/enum_codegen/libcpp-deprecated-enum_codegen.a |45.4%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/columnshard/engines/predicate/libcolumnshard-engines-predicate.a |45.4%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/columnshard/engines/loading/libcolumnshard-engines-loading.a |45.4%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/columnshard/engines/libtx-columnshard-engines.a |45.4%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/cctz/libcontrib-libs-cctz.a |45.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/generic/pushdown/libproviders-generic-pushdown.a |45.4%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/diff/liblibrary-cpp-diff.a |45.5%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/columnshard/engines/reader/common/libengines-reader-common.a |45.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/common/arrow/interface/libcommon-arrow-interface.a |45.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/common/arrow/libproviders-common-arrow.a |45.5%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/digest/md5/libcpp-digest-md5.a |45.2%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/digest/murmur/libcpp-digest-murmur.a |45.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/protos/libpy3columnshard-engines-protos.global.a |45.3%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/columnshard/engines/reader/actor/libengines-reader-actor.a |45.3%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/columnshard/engines/reader/common_reader/iterator/libreader-common_reader-iterator.global.a |45.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/opt/physical/effects/kqp_opt_phy_indexes.cpp |45.3%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/dot_product/liblibrary-cpp-dot_product.a |45.3%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/columnshard/engines/reader/abstract/libengines-reader-abstract.a |45.3%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/columnshard/engines/reader/common_reader/constructor/libreader-common_reader-constructor.a |45.4%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/getopt/liblibrary-cpp-getopt.global.a |45.4%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/int128/liblibrary-cpp-int128.a |45.4%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/columnshard/engines/portions/libcolumnshard-engines-portions.a |45.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/ymq/actor/retention.cpp |45.2%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/columnshard/engines/reader/common_reader/common/libreader-common_reader-common.a |45.2%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/http/server/libcpp-http-server.a |45.2%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/http/io/libcpp-http-io.a |45.2%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/columnshard/engines/reader/plain_reader/constructor/libreader-plain_reader-constructor.global.a |45.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/testlib/tablet_helpers.cpp |45.3%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/iterator/liblibrary-cpp-iterator.a |45.3%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/json/common/libcpp-json-common.a |45.3%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/columnshard/engines/reader/common_reader/iterator/libreader-common_reader-iterator.a |45.3%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/columnshard/engines/changes/compaction/sub_columns/libchanges-compaction-sub_columns.a |45.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/opt/physical/effects/kqp_opt_phy_delete_index.cpp |44.8%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/json/fast_sax/libcpp-json-fast_sax.a |44.9%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/columnshard/engines/reader/simple_reader/constructor/libreader-simple_reader-constructor.global.a |44.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/mon_alloc/profiler.cpp |45.2%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/columnshard/engines/reader/plain_reader/iterator/libreader-plain_reader-iterator.a |45.8%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/columnshard/engines/reader/plain_reader/constructor/libreader-plain_reader-constructor.a |45.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/ydb_convert/tx_proxy_status.cpp |45.8%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/columnshard/engines/reader/simple_reader/duplicates/libreader-simple_reader-duplicates.a |45.8%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kqp/counters/libcore-kqp-counters.a |45.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/ymq/actor/untag_queue.cpp |45.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/persqueue/pqrb/read_balancer__balancing.cpp |45.9%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/digest/argonish/internal/proxies/sse41/libinternal-proxies-sse41.a |45.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/opt/physical/effects/kqp_opt_phy_effects.cpp |45.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/host/kqp_transform.cpp |46.0%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/proxy_service/kqp_proxy_peer_stats_calculator.cpp |46.0%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/mon_alloc/monitor.cpp |46.0%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/host/kqp_statement_rewrite.cpp |46.1%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/json/liblibrary-cpp-json.a |45.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/host/kqp_runner.cpp |45.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/opt/physical/effects/kqp_opt_phy_update_index.cpp |45.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/engines/reader/simple_reader/iterator/sys_view/optimizer/constructor.cpp |45.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/mon_alloc/tcmalloc.cpp |46.0%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/opt/physical/effects/kqp_opt_phy_update.cpp |46.0%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/opt/physical/effects/kqp_opt_phy_uniq_helper.cpp |46.0%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/opt/physical/effects/kqp_opt_phy_upsert_index.cpp |46.0%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/host/kqp_gateway_proxy.cpp |46.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/general_cache/usage/abstract.cpp |46.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/general_cache/usage/events.cpp |46.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/opt/physical/effects/kqp_opt_phy_insert_index.cpp |46.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/gateway/utils/scheme_helpers.cpp |46.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/ymq/actor/list_queue_tags.cpp |46.0%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/nodewarden/node_warden_scrub.cpp |46.0%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/engines/reader/simple_reader/iterator/sys_view/optimizer/metadata.cpp |46.0%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/persqueue/pqrb/read_balancer.cpp |46.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/proxy_service/kqp_proxy_service.cpp |46.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/persqueue/pqrb/read_balancer__balancing.h_serialized.cpp |46.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/persqueue/pqrb/mirror_describer.cpp |46.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/engines/reader/simple_reader/iterator/sys_view/portions/schema.cpp |46.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/engines/scheme/schema_diff.cpp |46.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/host/kqp_host.cpp |46.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/proxy_service/kqp_proxy_databases_cache.cpp |46.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/opt/physical/effects/kqp_opt_phy_insert.cpp |46.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/persqueue/pqrb/partition_scale_manager.cpp |46.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/mon/mon.cpp |46.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/opt/peephole/kqp_opt_peephole_wide_read.cpp |46.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/columnshard/engines/reader/common/stats.cpp |46.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/columnshard/engines/defs.cpp |46.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/engines/scheme/objects_cache.cpp |46.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/engines/reader/simple_reader/iterator/sys_view/optimizer/source.cpp |46.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/nodewarden/distconf_console.cpp |46.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/persqueue/pqtablet/cache/pq_l2_cache.cpp |46.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/columnshard/engines/predicate/predicate.cpp |46.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/opt/physical/effects/kqp_opt_phy_upsert_defaults.cpp |46.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/proxy_service/kqp_script_executions.cpp |46.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/nodewarden/distconf.cpp |46.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/host/kqp_translate.cpp |46.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/opt/physical/effects/kqp_opt_phy_returning.cpp |46.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/columnshard/engines/reader/common/queue.cpp |46.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/columnshard/engines/filter.cpp |46.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/mon_alloc/stats.cpp |46.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/nodewarden/distconf_connectivity.cpp |46.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/opt/peephole/kqp_opt_peephole.cpp |46.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/engines/scheme/index_info.cpp |46.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/persqueue/pqrb/read_balancer__balancing_app.cpp |46.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/s3/provider/libproviders-s3-provider.a |46.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/persqueue/pqrb/partition_scale_request.cpp |46.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/nodewarden/distconf_persistent_storage.cpp |46.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/gateway/utils/metadata_helpers.cpp |46.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/nodewarden/distconf_dynamic.cpp |46.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/nodewarden/distconf_bridge.cpp |46.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/nodewarden/distconf_selfheal.cpp |46.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/nodewarden/distconf_binding.cpp >> test.py::py2_flake8 [GOOD] |46.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/columnshard/engines/portions/common.cpp |46.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/columnshard/engines/portions/base_with_blobs.cpp |46.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/nodewarden/distconf_fsm.cpp |46.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/persqueue/dread_cache_service/caching_service.cpp |46.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/nodewarden/node_warden_pdisk.cpp |46.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/nodewarden/distconf_invoke_state_storage.cpp |46.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/nodewarden/node_warden_pipe.cpp |46.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/nodewarden/distconf_invoke_common.cpp |46.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/nodewarden/distconf_cache.cpp |46.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/engines/scheme/abstract_scheme.cpp |46.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/nodewarden/node_warden_proxy.cpp |46.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/nodewarden/node_warden_cache.cpp |46.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/nodewarden/node_warden_stat_aggr.cpp |46.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/nodewarden/distconf_generate.cpp |46.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/engines/reader/simple_reader/iterator/sys_view/portions/metadata.cpp |46.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/general_cache/usage/config.cpp |46.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/nodewarden/node_warden_group_resolver.cpp |46.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/engines/reader/plain_reader/iterator/plain_read_data.cpp |46.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/nodewarden/distconf_quorum.cpp |46.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/engines/predicate/filter.cpp |46.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/nodewarden/node_warden_resource.cpp |46.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/nodewarden/node_warden_group.cpp |46.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/nodewarden/distconf_statestorage_config_generator.cpp |46.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/nodewarden/distconf_mon.cpp |46.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/nodewarden/node_warden_mon.cpp >> test.py::py2_flake8 [GOOD] >> test.py::py2_flake8 [GOOD] >> test.py::py2_flake8 [GOOD] >> test.py::py2_flake8 [GOOD] >> test.py::py2_flake8 [GOOD] |46.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/nodewarden/distconf_invoke_static_group.cpp |46.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/engines/scheme/filtered_scheme.cpp |46.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/columnshard/engines/reader/common_reader/common/script_counters.cpp |46.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/nodewarden/distconf_invoke_bridge.cpp |46.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/engines/reader/simple_reader/iterator/iterator.cpp |46.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/nodewarden/node_warden_impl.cpp |46.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/engines/scheme/column_features.cpp |46.4%| [TS] ydb/library/yql/tests/sql/dq_file/part16/py2_flake8 >> test.py::py2_flake8 [GOOD] |46.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/nodewarden/distconf_validate.cpp |46.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/general_cache/usage/service.cpp >> test.py::py2_flake8 [GOOD] |46.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/engines/changes/compaction/sub_columns/logic.cpp |46.2%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/columnshard/data_sharing/modification/transactions/libdata_sharing-modification-transactions.a |46.2%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/aws/aws-c-compression/librestricted-aws-aws-c-compression.a |46.3%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/columnshard/engines/changes/abstract/libengines-changes-abstract.a |46.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/s3/serializations/libproviders-s3-serializations.a |46.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/engines/changes/compaction/sparsed/logic.cpp |46.4%| [TS] ydb/library/yql/tests/sql/dq_file/part17/py2_flake8 >> test.py::py2_flake8 [GOOD] |46.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/engines/reader/plain_reader/iterator/iterator.cpp |46.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/nodewarden/distconf_invoke_storage_config.cpp |46.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/engines/reader/common/conveyor_task.cpp >> test.py::py2_flake8 [GOOD] |46.3%| [TS] ydb/library/yql/tests/sql/dq_file/part13/py2_flake8 >> test.py::py2_flake8 [GOOD] |46.3%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/lwtrace/protos/libcpp-lwtrace-protos.a >> test.py::py2_flake8 [GOOD] |46.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/columnshard/engines/changes/abstract/settings.cpp |46.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/engines/reader/simple_reader/duplicates/common.cpp |46.4%| [TS] ydb/library/yql/tests/sql/dq_file/part3/py2_flake8 >> test.py::py2_flake8 [GOOD] |46.4%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/columnshard/data_sharing/modification/events/libdata_sharing-modification-events.a |46.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/engines/reader/plain_reader/iterator/constructors.cpp |46.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/columnshard/engines/changes/abstract/compaction_info.cpp |46.4%| [TS] ydb/library/yql/tests/sql/dq_file/part2/py2_flake8 >> test.py::py2_flake8 [GOOD] |46.4%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/security/libydb-core-security.a |46.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/engines/changes/merge_subset.cpp |46.5%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/library/slide_limiter/usage/liblibrary-slide_limiter-usage.a >> test.py::py2_flake8 [GOOD] |46.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/nbs/libpy3core-protos-nbs.global.a |46.4%| [TS] ydb/library/yql/tests/sql/dq_file/part7/py2_flake8 >> test.py::py2_flake8 [GOOD] |46.4%| [TS] ydb/library/yql/tests/sql/dq_file/part4/py2_flake8 >> test.py::py2_flake8 [GOOD] |46.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/ydb/proto/libproviders-ydb-proto.a |46.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/engines/reader/simple_reader/duplicates/manager.cpp |46.5%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/lfalloc/alloc_profiler/libcpp-lfalloc-alloc_profiler.a |46.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/s3/range_helpers/libproviders-s3-range_helpers.a |46.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/engines/predicate/container.cpp |46.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/nodewarden/distconf_scatter_gather.cpp >> test_liveness_wardens.py::flake8 [GOOD] >> test_workload.py::flake8 [GOOD] |46.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/counters/kqp_counters.cpp |46.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/slide_limiter/usage/events.cpp |46.5%| [TS] ydb/library/yql/tests/sql/solomon/py2_flake8 >> test.py::py2_flake8 [GOOD] |46.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/engines/predicate/range.cpp |46.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/slide_limiter/usage/abstract.cpp |46.6%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/ipmath/liblibrary-cpp-ipmath.a |46.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/engines/storage/indexes/categories_bloom/header.cpp >> test.py::flake8 [GOOD] >> test_cms_erasure.py::flake8 [GOOD] >> test_cms_restart.py::flake8 [GOOD] >> test_cms_state_storage.py::flake8 [GOOD] >> utils.py::flake8 [GOOD] >> conftest.py::flake8 [GOOD] >> test_unknown_data_source.py::flake8 [GOOD] >> __main__.py::flake8 [GOOD] |46.6%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/logger/liblibrary-cpp-logger.a |46.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/engines/reader/plain_reader/iterator/merge.cpp |46.6%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/digest/argonish/internal/proxies/avx2/libinternal-proxies-avx2.a |46.6%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/lcs/liblibrary-cpp-lcs.a |46.6%| [TS] ydb/library/yql/tests/sql/dq_file/part6/py2_flake8 >> test.py::py2_flake8 [GOOD] |46.6%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/lwtrace/mon/analytics/liblwtrace-mon-analytics.a |46.7%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/digest/argonish/internal/proxies/sse2/libinternal-proxies-sse2.a |46.7%| [TS] ydb/library/yql/tests/sql/dq_file/part11/py2_flake8 >> test.py::py2_flake8 [GOOD] >> test_vector_index.py::flake8 [GOOD] >> test_vector_index_large_levels_and_clusters.py::flake8 [GOOD] >> test_leader_start_inflight.py::flake8 [GOOD] >> test_transform.py::flake8 [GOOD] |46.6%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/blockcodecs/codecs/snappy/libblockcodecs-codecs-snappy.global.a |46.6%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/boost/container/librestricted-boost-container.a |46.7%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/malloc/api/libcpp-malloc-api.a |46.7%| [TS] ydb/tests/functional/wardens/flake8 >> test_liveness_wardens.py::flake8 [GOOD] |46.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/engines/reader/simple_reader/duplicates/context.cpp |46.7%| [TS] ydb/tests/stress/transfer/tests/flake8 >> test_workload.py::flake8 [GOOD] |46.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/engines/reader/simple_reader/duplicates/events.cpp |46.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/engines/reader/simple_reader/duplicates/splitter.cpp |46.7%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/malloc/tcmalloc/libcpp-malloc-tcmalloc.a >> collection.py::flake8 [GOOD] >> conftest.py::flake8 [GOOD] >> test.py::flake8 [GOOD] >> select_datetime.py::flake8 [GOOD] >> test_parametrized_queries.py::flake8 [GOOD] >> select_positive.py::flake8 [GOOD] >> test.py::flake8 [GOOD] >> test_example.py::flake8 [GOOD] >> test_ttl.py::flake8 [GOOD] >> test_disk.py::flake8 [GOOD] >> test.py::flake8 [GOOD] >> test_actorsystem.py::flake8 [GOOD] >> test_tablet.py::flake8 [GOOD] |46.7%| [TS] ydb/tests/stress/s3_backups/flake8 >> __main__.py::flake8 [GOOD] |46.7%| [TS] ydb/tests/fq/common/flake8 >> test_unknown_data_source.py::flake8 [GOOD] |46.7%| [TS] ydb/tests/fq/yt/kqp_yt_file/part7/flake8 >> test.py::flake8 [GOOD] |46.7%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/monlib/encode/buffered/libmonlib-encode-buffered.a |46.8%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/lwtrace/mon/libcpp-lwtrace-mon.a |46.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/engines/reader/common/result.cpp |46.8%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/monlib/dynamic_counters/libcpp-monlib-dynamic_counters.a |46.8%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/monlib/exception/libcpp-monlib-exception.a |46.8%| [TS] ydb/tests/functional/cms/flake8 >> utils.py::flake8 [GOOD] >> test.py::flake8 [GOOD] >> conftest.py::flake8 [GOOD] >> test_join.py::flake8 [GOOD] |46.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/engines/reader/simple_reader/duplicates/merge.cpp |46.8%| [AR] {BAZEL_DOWNLOAD} $(B)/library/python/symbols/module/libpy3python-symbols-module.global.a |46.8%| [TS] ydb/tests/datashard/vector_index/large/flake8 >> test_vector_index_large_levels_and_clusters.py::flake8 [GOOD] |46.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/sequenceshard/libcore-tx-sequenceshard.a |46.8%| [TS] ydb/tests/functional/sqs/large/flake8 >> test_leader_start_inflight.py::flake8 [GOOD] |46.9%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/more-itertools/py3/libpy3python-more-itertools-py3.global.a |46.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/formats/arrow/accessor/dictionary/libarrow-accessor-dictionary.global.a |46.9%| [TS] ydb/library/yaml_config/ut_transform/flake8 >> test_transform.py::flake8 [GOOD] |46.9%| [TS] ydb/tests/functional/tpcc/flake8 >> test.py::flake8 [GOOD] >> collection.py::flake8 [GOOD] >> conftest.py::flake8 [GOOD] >> select_positive.py::flake8 [GOOD] >> test_large_import.py::flake8 [GOOD] >> test_sql_negative.py::flake8 [GOOD] >> test.py::flake8 [GOOD] >> __main__.py::flake8 [GOOD] >> test_sql_streaming.py::flake8 [GOOD] |46.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/import/libsrc-client-import.a |46.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/formats/arrow/accessor/common/liblibrary-formats-arrow-accessor-common.a >> test_crud.py::flake8 [GOOD] |46.8%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/monlib/service/pages/libmonlib-service-pages.a |46.8%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/monlib/service/pages/tablesorter/libservice-pages-tablesorter.global.a |46.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/operation/libsrc-client-operation.a |46.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/engines/portions/write_with_blobs.cpp |46.9%| [TS] ydb/tests/datashard/parametrized_queries/flake8 >> test_parametrized_queries.py::flake8 [GOOD] |46.9%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/driver_lib/cli_base/libcli_base.a |46.9%| [TS] ydb/library/yql/providers/generic/connector/tests/datasource/clickhouse/flake8 >> test.py::flake8 [GOOD] |47.0%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/engines/column_engine_logs.cpp >> conftest.py::flake8 [GOOD] >> test_inserts.py::flake8 [GOOD] >> test_kv.py::flake8 [GOOD] >> test.py::flake8 [GOOD] >> test_grants.py::flake8 [GOOD] >> test_paths_lookup.py::flake8 [GOOD] >> test_clean.py::flake8 [GOOD] >> test_clickbench.py::flake8 [GOOD] >> test_default_path.py::flake8 [GOOD] >> test_diff_processing.py::flake8 [GOOD] >> test_external.py::flake8 [GOOD] >> test_import_csv.py::flake8 [GOOD] >> test_upload.py::flake8 [GOOD] >> test_workload_manager.py::flake8 [GOOD] >> runner.py::flake8 [GOOD] |46.9%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/openssl/method/libcpp-openssl-method.a |47.0%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/openssl/io/libcpp-openssl-io.a |47.0%| [TS] ydb/tests/example/flake8 >> test_example.py::flake8 [GOOD] |47.0%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/packedtypes/liblibrary-cpp-packedtypes.a |47.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/blobstorage/lwtrace_probes/libcore-blobstorage-lwtrace_probes.a |47.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/ydb/provider/libproviders-ydb-provider.a |47.0%| [TS] ydb/tests/fq/yt/kqp_yt_file/part2/flake8 >> test.py::flake8 [GOOD] |47.0%| [TS] ydb/tests/datashard/ttl/flake8 >> test_ttl.py::flake8 [GOOD] |47.1%| [TS] ydb/tests/fq/yt/kqp_yt_file/part19/flake8 >> test.py::flake8 [GOOD] |47.1%| [TS] ydb/tests/tools/nemesis/ut/flake8 >> test_tablet.py::flake8 [GOOD] |47.1%| [TS] ydb/tests/olap/s3_import/large/flake8 >> test_large_import.py::flake8 [GOOD] |47.1%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/protobuf/json/libcpp-protobuf-json.a |47.1%| [TS] ydb/tests/functional/autoconfig/flake8 >> test_actorsystem.py::flake8 [GOOD] |47.1%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/protobuf/json/proto/libprotobuf-json-proto.a |47.1%| [TS] ydb/tests/fq/generic/streaming/flake8 >> test_join.py::flake8 [GOOD] >> test_update_script_tables.py::flake8 [GOOD] >> test.py::flake8 [GOOD] |47.1%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/services/auth/libydb-services-auth.a |47.1%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/protobuf/util/libcpp-protobuf-util.a |47.1%| [TS] ydb/tests/sql/flake8 >> test_kv.py::flake8 [GOOD] |47.1%| [TS] ydb/library/yql/providers/generic/connector/tests/datasource/ydb/flake8 >> test.py::flake8 [GOOD] |47.1%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/packaging/py3/libpy3python-packaging-py3.global.a |47.1%| [TS] ydb/tests/fq/streaming_optimize/flake8 >> test_sql_streaming.py::flake8 [GOOD] |47.2%| [TS] ydb/public/tools/ydb_recipe/flake8 >> __main__.py::flake8 [GOOD] |47.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/engines/reader/common_reader/iterator/fetched_data.cpp |47.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/engines/reader/simple_reader/duplicates/private_events.cpp |47.2%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/streams/brotli/libcpp-streams-brotli.a >> base.py::flake8 [GOOD] >> test_delete_all_after_inserts.py::flake8 [GOOD] >> test_delete_by_explicit_row_id.py::flake8 [GOOD] >> compare.py::flake8 [GOOD] |47.1%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/messagebus/www/libcpp-messagebus-www.a |47.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/engines/changes/cleanup_portions.cpp |47.1%| [TS] ydb/library/benchmarks/runner/runner/flake8 >> runner.py::flake8 [GOOD] |47.2%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/services/backup/libydb-services-backup.a |47.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/engines/reader/common/description.cpp |47.2%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/services/cms/libydb-services-cms.a |47.2%| [TS] ydb/tests/functional/security/flake8 >> test_paths_lookup.py::flake8 [GOOD] |47.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/engines/portions/index_chunk.cpp |47.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/engines/column_engine.cpp |47.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/engines/changes/general_compaction.cpp |47.3%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/string_utils/base64/libcpp-string_utils-base64.a |47.3%| [TS] ydb/tests/functional/tpc/medium/flake8 >> test_workload_manager.py::flake8 [GOOD] |47.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/engines/changes/compaction/sub_columns/remap.cpp |47.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/engines/reader/common_reader/iterator/default_fetching.cpp |47.2%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/services/config/libydb-services-config.a |47.3%| [TS] ydb/tests/functional/script_execution/flake8 >> test_update_script_tables.py::flake8 [GOOD] |47.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/engines/reader/plain_reader/iterator/fetched_data.cpp |47.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/columnshard/engines/column_engine_logs.h_serialized.cpp |47.3%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/logger/liblibrary-cpp-logger.global.a |47.3%| [TS] ydb/tests/functional/sqs/merge_split_common_table/fifo/flake8 >> test.py::flake8 [GOOD] |47.3%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/string_utils/url/libcpp-string_utils-url.a |47.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/client/yc_public/common/libclient-yc_public-common.a |47.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/engines/reader/plain_reader/constructor/read_metadata.cpp |47.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/services/deprecated/persqueue_v0/api/grpc/libapi-grpc-persqueue-deprecated.a |47.4%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/services/dynamic_config/libydb-services-dynamic_config.a |47.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/actors/testlib/liblibrary-actors-testlib.a |47.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/services/ext_index/metadata/extractor/libext_index-metadata-extractor.a |47.3%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/services/discovery/libydb-services-discovery.a |47.3%| [TS] ydb/tests/fq/yt/kqp_yt_file/part8/flake8 >> test.py::flake8 [GOOD] |47.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/engines/changes/ttl.cpp |47.3%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/services/datastreams/libydb-services-datastreams.a |47.4%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/testing/common/libcpp-testing-common.a |47.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/grpc/libpy3api-grpc.global.a |47.4%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/threading/cancellation/libcpp-threading-cancellation.a |47.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/dq/actors/input_transforms/libdq-actors-input_transforms.a |47.5%| [TS] ydb/tests/olap/delete/flake8 >> test_delete_by_explicit_row_id.py::flake8 [GOOD] >> test.py::py2_flake8 [GOOD] |47.4%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/testing/unittest_main/libcpp-testing-unittest_main.a |47.5%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/threading/future/libcpp-threading-future.a |47.5%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/type_info/tz/libcpp-type_info-tz.a |47.5%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/threading/poor_man_openmp/libcpp-threading-poor_man_openmp.a |47.5%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/sse/liblibrary-cpp-sse.a |47.5%| [TS] ydb/library/benchmarks/runner/result_compare/flake8 >> compare.py::flake8 [GOOD] |47.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/common/protos/libcolumnshard-common-protos.a |47.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/services/datastreams/shard_iterator.cpp >> conftest.py::flake8 [GOOD] >> test_stats_mode.py::flake8 [GOOD] |47.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/services/datastreams/next_token.cpp |47.4%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/digest/argonish/libcpp-digest-argonish.a |47.4%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/services/deprecated/persqueue_v0/libservices-deprecated-persqueue_v0.a |47.4%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/services/bridge/libydb-services-bridge.a |47.4%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/monlib/service/pages/resources/libservice-pages-resources.global.a |47.4%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/http/simple/libcpp-http-simple.a |47.5%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/yt/assert/libcpp-yt-assert.a |47.5%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/yt/malloc/libcpp-yt-malloc.a |47.5%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/aws/aws-c-cal/librestricted-aws-aws-c-cal.a |47.5%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/services/ext_index/service/libservices-ext_index-service.a >> common.py::flake8 [GOOD] >> test_bridge.py::flake8 [GOOD] >> test_discovery.py::flake8 [GOOD] >> http_client.py::flake8 [GOOD] >> query_results.py::flake8 [GOOD] |47.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/engines/changes/cleanup_tables.cpp |47.3%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/aws/aws-c-sdkutils/librestricted-aws-aws-c-sdkutils.a |47.3%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/cxxsupp/libcxxrt/liblibs-cxxsupp-libcxxrt.a |47.5%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/services/keyvalue/libydb-services-keyvalue.a |47.5%| [AR] {BAZEL_DOWNLOAD} $(B)/build/cow/on/libbuild-cow-on.a |47.5%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/yt/exception/libcpp-yt-exception.a |47.5%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/services/lib/auth/libservices-lib-auth.a |47.6%| [TS] ydb/library/yql/tests/sql/dq_file/part10/py2_flake8 >> test.py::py2_flake8 [GOOD] |47.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/engines/loading/stages.cpp |47.6%| [TS] ydb/tests/fq/plans/flake8 >> test_stats_mode.py::flake8 [GOOD] |47.6%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/services/fq/libydb-services-fq.a |47.6%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/services/ext_index/metadata/libservices-ext_index-metadata.global.a |47.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/engines/metadata_accessor.cpp |47.7%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/libiconv/static/liblibs-libiconv-static.a |47.6%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/base/generated/libcore-base-generated.a |47.6%| [AR] {BAZEL_DOWNLOAD} $(B)/util/libyutil.a |47.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/engines/reader/plain_reader/iterator/source.cpp |47.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/engines/portions/column_record.cpp |47.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/engines/reader/common/comparable.cpp |47.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/engines/reader/plain_reader/iterator/scanner.cpp |47.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/columnshard/engines/reader/common/description.h_serialized.cpp |47.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/blobstorage/crypto/chacha_512/libblobstorage-crypto-chacha_512.a |47.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/reader/tracing/libengines-reader-tracing.a |47.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/engines/reader/abstract/constructor.cpp >> test.py::py2_flake8 [GOOD] >> conftest.py::flake8 [GOOD] >> helpers.py::flake8 [GOOD] >> http_helpers.py::flake8 [GOOD] >> test_auditlog.py::flake8 [GOOD] >> test_canonical_records.py::flake8 [GOOD] |47.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/config/protos/libcore-config-protos.a |47.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/engines/changes/with_appended.cpp |47.6%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/services/metadata/common/libservices-metadata-common.a |47.6%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/columnshard/engines/storage/optimizer/lcbuckets/constructor/level/liblcbuckets-constructor-level.global.a |47.6%| [TS] ydb/tests/functional/bridge/flake8 >> test_discovery.py::flake8 [GOOD] |47.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/scheme/column/libengines-scheme-column.a |47.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/base/generated/runtime_feature_flags.cpp |47.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/engines/reader/common_reader/iterator/fetch_steps.cpp |47.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/engines/changes/compaction.cpp |47.8%| [TS] ydb/core/fq/libs/http_api_client/flake8 >> query_results.py::flake8 [GOOD] |47.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/engines/reader/common_reader/iterator/sub_columns_fetching.cpp |47.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/engines/portions/constructor_meta.cpp |47.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/engines/reader/plain_reader/iterator/context.cpp |47.6%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/control/lib/libcore-control-lib.a |47.6%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/services/monitoring/libydb-services-monitoring.a |47.6%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/services/metadata/ds_table/libservices-metadata-ds_table.a |47.6%| [AR] {BAZEL_DOWNLOAD} $(B)/certs/libcerts.global.a |47.6%| [TS] ydb/tests/functional/audit/flake8 >> test_canonical_records.py::flake8 [GOOD] |47.8%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/services/maintenance/libydb-services-maintenance.a |47.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/engines/db_wrapper.cpp >> test_secondary_index.py::flake8 [GOOD] >> test.py::py2_flake8 [GOOD] |47.7%| [TS] ydb/library/yql/tests/sql/dq_file/part5/py2_flake8 >> test.py::py2_flake8 [GOOD] |47.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/engines/reader/abstract/abstract.cpp |47.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/graph/service/libcore-graph-service.a |47.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/control/lib/dynamic_control_board_impl.cpp |47.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/erasure/libydb-core-erasure.a |47.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/engines/portions/constructors.cpp |47.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/engines/reader/actor/actor.cpp |47.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/engines/reader/abstract/read_context.cpp |47.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/columnshard/engines/reader/common_reader/common/columns_set.h_serialized.cpp |47.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/engines/reader/plain_reader/iterator/interval.cpp |47.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/engines/portions/constructor_accessor.cpp |47.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/engines/reader/common_reader/constructor/resolver.cpp >> test_select.py::flake8 [GOOD] >> test_account_actions.py::flake8 [GOOD] >> test_acl.py::flake8 [GOOD] |47.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/engines/portions/meta.cpp |47.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/control/lib/immediate_control_board_html_renderer.cpp |47.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/engines/reader/abstract/read_metadata.cpp |47.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/columnshard/engines/reader/common_reader/iterator/source.h_serialized.cpp |47.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/engines/reader/common_reader/iterator/iterator.cpp |47.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/engines/reader/common_reader/iterator/context.cpp |47.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/columnshard/engines/reader/abstract/read_metadata.h_serialized.cpp |47.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/engines/reader/common_reader/constructor/read_metadata.cpp |47.8%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/services/view/libydb-services-view.a |47.8%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/grpc_services/counters/libcore-grpc_services-counters.a |47.9%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/public/udf/libessentials-public-udf.a |47.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/graph/protos/libcore-graph-protos.a >> test_counters.py::flake8 [GOOD] >> test_format_without_version.py::flake8 [GOOD] >> test_garbage_collection.py::flake8 [GOOD] >> test_multiplexing_tables_format.py::flake8 [GOOD] >> test_ping.py::flake8 [GOOD] >> test_queue_attributes_validation.py::flake8 [GOOD] >> test.py::flake8 [GOOD] >> test_queue_counters.py::flake8 [GOOD] >> test_queue_tags.py::flake8 [GOOD] >> test_queues_managing.py::flake8 [GOOD] >> test_throttling.py::flake8 [GOOD] >> overlapping_portions.py::flake8 [GOOD] |47.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/graph/shard/protos/libgraph-shard-protos.a |47.8%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/services/replication/libydb-services-replication.a |47.8%| [TS] ydb/tests/datashard/secondary_index/flake8 >> test_secondary_index.py::flake8 [GOOD] |47.8%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/digest/argonish/internal/proxies/ssse3/libinternal-proxies-ssse3.a |47.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/engines/changes/compaction/sub_columns/iterator.cpp |47.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/storage/indexes/bits_storage/libstorage-indexes-bits_storage.a |47.9%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/services/rate_limiter/libydb-services-rate_limiter.a |47.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/storage/actualizer/common/libstorage-actualizer-common.a |47.9%| [TS] ydb/library/yql/tests/sql/hybrid_file/part3/py2_flake8 >> test.py::py2_flake8 [GOOD] |47.9%| [TS] ydb/tests/datashard/select/flake8 >> test_select.py::flake8 [GOOD] |47.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/engines/portions/written.cpp |48.0%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/engines/reader/common_reader/common/script.cpp |48.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/storage/indexes/bits_storage/libstorage-indexes-bits_storage.global.a >> test_cte.py::flake8 [GOOD] >> test.py::flake8 [GOOD] >> test.py::py2_flake8 [GOOD] |47.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/columnshard/engines/portions/portion_info.h_serialized.cpp |47.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/engines/portions/compacted.cpp |47.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/engines/reader/common_reader/iterator/constructor.cpp |47.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/engines/reader/common_reader/common/accessors_ordering.cpp |47.9%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/services/tablet/libydb-services-tablet.a |47.9%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/threading/local_executor/libcpp-threading-local_executor.a |47.9%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/services/ymq/libydb-services-ymq.a |48.0%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/unicode/set/libcpp-unicode-set.a |48.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/export/session/storage/abstract/libsession-storage-abstract.a |48.0%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/sys_view/service/libcore-sys_view-service.a >> __main__.py::flake8 [GOOD] |47.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/engines/portions/constructor_portion.cpp |47.8%| [TS] ydb/tests/functional/sqs/common/flake8 >> test_throttling.py::flake8 [GOOD] |47.9%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yql/providers/yt/codec/codegen/llvm16/libcodec-codegen-llvm16.global.a |48.0%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/testlib/audit_helpers/libcore-testlib-audit_helpers.a |48.0%| [TS] ydb/core/viewer/tests/flake8 >> test.py::flake8 [GOOD] |48.0%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tracing/libydb-core-tracing.a |48.0%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/engines/reader/common_reader/common/script_cursor.cpp |48.0%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yql/providers/yt/codec/codegen/llvm16/libcodec-codegen-llvm16.a |48.0%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/engines/portions/read_with_blobs.cpp |48.0%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/columnshard/engines/storage/indexes/skip_index/libstorage-indexes-skip_index.a |48.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/storage/indexes/portions/extractor/libindexes-portions-extractor.a |48.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/library/slide_limiter/usage/config.cpp |47.9%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/testlib/actors/libcore-testlib-actors.a |48.0%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/jsonpath/libessentials-minikql-jsonpath.a |48.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/storage/indexes/portions/extractor/libindexes-portions-extractor.global.a |48.0%| [TS] ydb/tests/olap/oom/flake8 >> overlapping_portions.py::flake8 [GOOD] |48.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/services/ymq/utils.cpp |48.0%| [TS] ydb/library/yql/tests/sql/hybrid_file/part6/py2_flake8 >> test.py::py2_flake8 [GOOD] |48.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/sys_view/service/query_interval.cpp |48.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/clickhouse/provider/libproviders-clickhouse-provider.a |48.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/sys_view/service/db_counters.cpp |48.2%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/columnshard/engines/storage/optimizer/abstract/libstorage-optimizer-abstract.a >> test_workload.py::flake8 [GOOD] >> common.cpp::clang_format [GOOD] >> common.h::clang_format [GOOD] >> __main__.py::flake8 [GOOD] |48.1%| [TS] ydb/tests/functional/kqp/plan2svg/flake8 >> test_cte.py::flake8 [GOOD] |48.1%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/columnshard/engines/storage/optimizer/lcbuckets/constructor/level/liblcbuckets-constructor-level.a |48.1%| [TS] ydb/tests/fq/yt/kqp_yt_file/part0/flake8 >> test.py::flake8 [GOOD] |48.1%| [TS] ydb/tests/stress/statistics_workload/flake8 >> __main__.py::flake8 [GOOD] |48.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/aclib/libydb-library-aclib.a |48.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/engines/changes/compaction/sub_columns/builder.cpp |48.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tracing/http.cpp |48.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tracing/trace.cpp |48.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/library/slide_limiter/usage/service.cpp |48.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/engines/reader/common_reader/common/accessor_callback.cpp |48.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/engines/reader/plain_reader/iterator/fetching.cpp |48.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/engines/reader/common_reader/common/columns_set.cpp |48.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tracing/trace_collection.cpp >> test.py::flake8 [GOOD] >> collection.py::flake8 [GOOD] >> conftest.py::flake8 [GOOD] >> select_datetime.py::flake8 [GOOD] >> select_positive.py::flake8 [GOOD] >> test.py::flake8 [GOOD] >> test.py::flake8 [GOOD] |48.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/columnshard/engines/storage/optimizer/abstract/counters.cpp |48.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/actors/core/harmonizer/libactors-core-harmonizer.a |48.1%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/string_utils/base32/libcpp-string_utils-base32.a |48.1%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/highwayhash/arch/avx2/libhighwayhash-arch-avx2.a |48.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/security/login_shared_func.cpp |48.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/engines/portions/portion_info.cpp |48.2%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/columnshard/engines/storage/optimizer/lcbuckets/constructor/liboptimizer-lcbuckets-constructor.global.a |48.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/actors/memory_log/liblibrary-actors-memory_log.a >> base.py::flake8 [GOOD] >> collection.py::flake8 [GOOD] >> basic_reading.py::flake8 [GOOD] >> conftest.py::flake8 [GOOD] >> data_paging.py::flake8 [GOOD] >> select_datetime_with_service_name.py::flake8 [GOOD] >> listing_paging.py::flake8 [GOOD] >> settings_validation.py::flake8 [GOOD] >> select_positive_with_service_name.py::flake8 [GOOD] >> test_batch_operations.py::flake8 [GOOD] >> test_rename_table.py::flake8 [GOOD] |48.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/actors/interconnect/mock/libactors-interconnect-mock.a |48.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/scheme/common/libengines-scheme-common.a |48.0%| [TS] ydb/tests/stress/show_create/view/tests/flake8 >> test_workload.py::flake8 [GOOD] |48.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/actors/testlib/common/libactors-testlib-common.a |48.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/actors/util/liblibrary-actors-util.a |48.3%| [TS] ydb/core/tx/columnshard/tools/visualize_portions/flake8 >> __main__.py::flake8 [GOOD] |48.3%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yql/providers/yt/comp_nodes/llvm16/libyt-comp_nodes-llvm16.a >> test_compatibility.py::flake8 [GOOD] >> test.py::flake8 [GOOD] >> test_ctas.py::flake8 [GOOD] >> test.py::flake8 [GOOD] >> test_data_type.py::flake8 [GOOD] >> test_example.py::flake8 [GOOD] >> test_followers.py::flake8 [GOOD] >> test_stability.py::flake8 [GOOD] >> test_kafka_topic.py::flake8 [GOOD] >> test.py::flake8 [GOOD] >> test_restarts.py::flake8 [GOOD] >> test_config_migration.py::flake8 [GOOD] >> test_node_broker_delta_protocol.py::flake8 [GOOD] >> test_config_with_metadata.py::flake8 [GOOD] >> test_rolling.py::flake8 [GOOD] >> test_simple_reader.py::flake8 [GOOD] |48.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/grpc_services/counters/proxy_counters.cpp |48.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/engines/reader/common_reader/iterator/fetching.cpp |48.3%| [TS] ydb/library/yql/providers/generic/connector/tests/datasource/ms_sql_server/flake8 >> test.py::flake8 [GOOD] |48.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/formats/arrow/protos/liblibrary-formats-arrow-protos.a |48.3%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/highwayhash/arch/sse41/libhighwayhash-arch-sse41.a |48.3%| [TS] ydb/tests/fq/yt/kqp_yt_file/part16/flake8 >> test.py::flake8 [GOOD] |48.4%| [TS] ydb/core/kqp/ut/federated_query/common/clang_format >> common.h::clang_format [GOOD] |48.4%| [TS] ydb/tests/fq/yt/kqp_yt_file/part12/flake8 >> test.py::flake8 [GOOD] |48.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/actors/interconnect/liblibrary-actors-interconnect.a >> test_configuration_version.py::flake8 [GOOD] >> test_statistics.py::flake8 [GOOD] >> test_workload.py::flake8 [GOOD] >> test_distconf.py::flake8 [GOOD] >> test_stress.py::flake8 [GOOD] >> test_system_views.py::flake8 [GOOD] >> test_distconf_reassign_state_storage.py::flake8 [GOOD] >> test_distconf_self_heal.py::flake8 [GOOD] >> test_distconf_sentinel_node_status.py::flake8 [GOOD] >> test_generate_dynamic_config.py::flake8 [GOOD] >> test_table_schema_compatibility.py::flake8 [GOOD] >> test_topic.py::flake8 [GOOD] >> test_transfer.py::flake8 [GOOD] >> test_vector_index.py::flake8 [GOOD] >> test_workload_manager.py::flake8 [GOOD] >> udf/test_datetime2.py::flake8 [GOOD] >> udf/test_digest.py::flake8 [GOOD] >> udf/test_digest_regression.py::flake8 [GOOD] |46.9%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/tbb/libcontrib-libs-tbb.a |46.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/security/ticket_parser.cpp |47.0%| [TS] ydb/tests/solomon/reading/flake8 >> settings_validation.py::flake8 [GOOD] |47.1%| [TS] ydb/tests/compatibility/olap/flake8 >> test_rename_table.py::flake8 [GOOD] |47.3%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yql/providers/yt/comp_nodes/dq/llvm16/libcomp_nodes-dq-llvm16.a |47.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/services/cms/grpc_service.cpp |47.4%| [TS] ydb/library/yql/providers/generic/connector/tests/datasource/oracle/flake8 >> test.py::flake8 [GOOD] |47.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/login/libydb-library-login.a >> test.py::py2_flake8 [GOOD] >> test_pdisk_format_info.py::flake8 [GOOD] >> test_replication.py::flake8 [GOOD] >> test_self_heal.py::flake8 [GOOD] >> test_tablet_channel_migration.py::flake8 [GOOD] >> test_serializable.py::flake8 [GOOD] >> test_kqprun_recipe.py::flake8 [GOOD] >> __main__.py::flake8 [GOOD] |47.1%| [TS] ydb/tests/compatibility/flake8 >> udf/test_digest_regression.py::flake8 [GOOD] |47.2%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/columnshard/engines/storage/optimizer/lcbuckets/planner/liboptimizer-lcbuckets-planner.global.a |47.2%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/columnshard/engines/storage/optimizer/tiling/libstorage-optimizer-tiling.global.a |47.4%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/datashard/libcore-tx-datashard.a |47.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/engines/portions/data_accessor.cpp >> test_workload.py::flake8 [GOOD] |47.6%| [TS] ydb/tests/fq/yt/kqp_yt_file/part15/flake8 >> test.py::flake8 [GOOD] |47.8%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/columnshard/engines/storage/optimizer/lcbuckets/constructor/selector/liblcbuckets-constructor-selector.global.a |47.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/engines/reader/simple_reader/constructor/constructor.cpp |47.9%| [TS] ydb/tests/stability/ydb/flake8 >> test_stability.py::flake8 [GOOD] >> test.py::flake8 [GOOD] >> test.py::flake8 [GOOD] >> test_base.py::flake8 [GOOD] >> test_http_api.py::flake8 [GOOD] |47.4%| [TS] ydb/tests/functional/config/flake8 >> test_generate_dynamic_config.py::flake8 [GOOD] |47.5%| [TS] ydb/tests/functional/restarts/flake8 >> test_restarts.py::flake8 [GOOD] |47.6%| [TS] ydb/tests/fq/yt/kqp_yt_file/part10/flake8 >> test.py::flake8 [GOOD] |47.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/engines/reader/plain_reader/constructor/constructor.cpp |48.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/engines/reader/simple_reader/iterator/sys_view/portions/source.cpp |48.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/services/deprecated/persqueue_v0/persqueue.cpp |48.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/services/auth/grpc_service.cpp >> test.py::flake8 [GOOD] |48.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/data_sharing/modification/events/change_owning.cpp |48.3%| [TS] ydb/tests/stress/s3_backups/tests/flake8 >> test_workload.py::flake8 [GOOD] |48.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/driver_lib/cli_base/cli_cmds_root.cpp |48.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/engines/changes/abstract/move_portions.cpp |48.5%| [TS] ydb/tests/tools/kqprun/tests/flake8 >> test_kqprun_recipe.py::flake8 [GOOD] |48.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/key_conflicts.cpp |48.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/services/fq/private_grpc.cpp |48.7%| [TS] ydb/library/yql/tests/sql/dq_file/part18/py2_flake8 >> test.py::py2_flake8 [GOOD] >> integrations_test.py::flake8 [GOOD] >> test_postgres.py::flake8 [GOOD] |48.6%| [TS] ydb/tests/functional/large_serializable/flake8 >> test_serializable.py::flake8 [GOOD] |48.6%| [TS] ydb/tests/tools/ydb_serializable/replay/flake8 >> __main__.py::flake8 [GOOD] |48.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/engines/changes/abstract/abstract.cpp |48.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/services/backup/grpc_service.cpp |48.7%| [TS] ydb/tests/stress/node_broker/tests/flake8 >> test_workload.py::flake8 [GOOD] |48.7%| [TS] ydb/tests/functional/blobstorage/flake8 >> test_tablet_channel_migration.py::flake8 [GOOD] |48.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/data_sharing/modification/transactions/tx_change_blobs_owning.cpp |48.7%| [TS] ydb/tests/fq/solomon/flake8 >> test.py::flake8 [GOOD] |48.7%| [TS] ydb/tests/fq/yt/kqp_yt_file/part18/flake8 >> test.py::flake8 [GOOD] |48.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/services/ext_index/service/deleting.cpp |48.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/engines/changes/abstract/remove_portions.cpp |48.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/engines/reader/common_reader/iterator/source.cpp |48.8%| [TS] ydb/tests/fq/http_api/flake8 >> test_http_api.py::flake8 [GOOD] |48.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/driver_lib/cli_base/cli_kicli.cpp >> main.py::flake8 [GOOD] >> test.py::py2_flake8 [GOOD] >> base.py::flake8 [GOOD] >> test_tpch_import.py::flake8 [GOOD] >> test_types_and_formats.py::flake8 [GOOD] >> test.py::py2_flake8 [GOOD] |48.6%| [TS] ydb/tests/fq/yt/kqp_yt_file/part13/flake8 >> test.py::flake8 [GOOD] |48.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/security/login_page.cpp |48.7%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/datashard/backup_restore_traits.h_serialized.cpp |48.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/backup_restore_traits.cpp |48.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/services/bridge/grpc_service.cpp |48.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/services/fq/ydb_over_fq.cpp |48.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/services/keyvalue/grpc_service.cpp |48.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/services/config/grpc_service.cpp |48.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/engines/changes/abstract/changes.cpp |48.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/services/lib/auth/auth_helpers.cpp >> test_workload.py::flake8 [GOOD] >> run_tests.py::flake8 [GOOD] |48.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/columnshard/engines/changes/abstract/abstract.h_serialized.cpp |48.6%| [TS] ydb/tests/postgres_integrations/library/ut/flake8 >> integrations_test.py::flake8 [GOOD] |48.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/services/ext_index/service/add_data.cpp |48.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/services/dynamic_config/grpc_service.cpp |48.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/driver_lib/cli_base/cli_cmds_db.cpp |48.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/engines/reader/simple_reader/iterator/sys_view/portions/constructor.cpp |48.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/services/metadata/common/timeout.cpp |48.7%| [TS] ydb/tests/functional/postgresql/flake8 >> test_postgres.py::flake8 [GOOD] |48.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/services/ext_index/service/add_index.cpp |48.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/services/fq/grpc_service.cpp |48.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/services/discovery/grpc_service.cpp |48.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/engines/storage/optimizer/lcbuckets/constructor/level/zero_level.cpp |48.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/services/datastreams/grpc_service.cpp >> __main__.py::flake8 [GOOD] |48.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/services/metadata/common/ss_dialog.cpp |48.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/engines/storage/indexes/skip_index/constructor.cpp |48.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/control/lib/immediate_control_board_impl.cpp |48.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/services/ext_index/metadata/behaviour.cpp |48.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/driver_lib/cli_base/cli_cmds_discovery.cpp |48.7%| [TS] ydb/apps/dstool/flake8 >> main.py::flake8 [GOOD] |48.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/services/metadata/ds_table/config.cpp |48.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/services/ext_index/service/executor.cpp |48.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/services/datastreams/put_records_actor.cpp |48.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/services/deprecated/persqueue_v0/grpc_pq_clusters_updater_actor.cpp |48.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/engines/storage/optimizer/lcbuckets/constructor/level/one_layer.cpp |48.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/services/datastreams/datastreams_proxy.cpp |48.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/datashard/change_record.cpp |48.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/services/metadata/ds_table/table_exists.cpp |48.7%| [TS] ydb/tests/olap/s3_import/flake8 >> test_types_and_formats.py::flake8 [GOOD] |48.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/services/ext_index/service/activation.cpp |48.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/services/view/grpc_service.cpp |48.7%| [TS] ydb/library/yql/tests/sql/dq_file/part0/py2_flake8 >> test.py::py2_flake8 [GOOD] |48.7%| [TS] ydb/library/yql/tests/sql/hybrid_file/part10/py2_flake8 >> test.py::py2_flake8 [GOOD] |48.8%| [TS] ydb/tests/stress/cdc/tests/flake8 >> test_workload.py::flake8 [GOOD] |48.8%| [TS] ydb/library/benchmarks/runner/run_tests/flake8 >> run_tests.py::flake8 [GOOD] |48.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/services/monitoring/grpc_service.cpp |48.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/services/deprecated/persqueue_v0/grpc_pq_read_actor.cpp |48.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/engines/storage/indexes/skip_index/meta.cpp |48.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/services/ymq/grpc_service.cpp |48.8%| [TS] ydb/library/yaml_config/tools/simple_json_diff/flake8 >> __main__.py::flake8 [GOOD] |48.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/services/metadata/ds_table/accessor_refresh.cpp |48.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/services/metadata/ds_table/registration.cpp |48.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/services/metadata/ds_table/accessor_snapshot_base.cpp |48.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/services/metadata/ds_table/scheme_describe.cpp |48.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/services/deprecated/persqueue_v0/grpc_pq_read.cpp |48.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/services/tablet/ydb_tablet.cpp |48.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/testlib/audit_helpers/audit_helper.cpp |48.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/grpc_services/counters/counters.cpp |48.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/services/metadata/ds_table/service.cpp |48.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/services/metadata/ds_table/behaviour_registrator_actor.cpp |48.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/services/rate_limiter/grpc_service.cpp >> test_workload.py::flake8 [GOOD] |48.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/services/metadata/ds_table/accessor_snapshot_simple.cpp |48.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/sys_view/service/ext_counters.cpp >> test.py::flake8 [GOOD] >> base.py::flake8 [GOOD] >> collection.py::flake8 [GOOD] >> data_correctness.py::flake8 [GOOD] >> data_migration_when_alter_ttl.py::flake8 [GOOD] >> tier_delete.py::flake8 [GOOD] >> ttl_delete_s3.py::flake8 [GOOD] >> ttl_unavailable_s3.py::flake8 [GOOD] >> unstable_connection.py::flake8 [GOOD] >> conftest.py::flake8 [GOOD] >> select_datetime.py::flake8 [GOOD] >> select_positive.py::flake8 [GOOD] >> select_positive_with_schema.py::flake8 [GOOD] >> test.py::flake8 [GOOD] |48.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/datashard_kqp_upsert_rows.cpp |48.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/sys_view/service/sysview_service.cpp |48.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/testlib/actors/test_runtime.cpp |48.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/datashard_failpoints.cpp |48.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/testlib/actors/wait_events.cpp |48.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tracing/tablet_info.cpp |48.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/services/deprecated/persqueue_v0/grpc_pq_write_actor.cpp |48.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/services/metadata/ds_table/accessor_subscribe.cpp |48.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/datashard_kqp_effects.cpp |48.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/datashard_kqp_delete_rows.cpp |48.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/services/deprecated/persqueue_v0/grpc_pq_write.cpp |48.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/services/maintenance/grpc_service.cpp |48.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/datashard/datashard__get_state_tx.cpp >> __main__.py::flake8 [GOOD] |48.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/testlib/actors/block_events.cpp |48.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/engines/storage/optimizer/lcbuckets/constructor/constructor.cpp |48.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/engines/storage/optimizer/lcbuckets/constructor/level/constructor.cpp |48.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/engines/storage/optimizer/abstract/optimizer.cpp |48.7%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/datashard/datashard_s3_upload.h_serialized.cpp |48.7%| [TS] ydb/tests/fq/yt/kqp_yt_file/part6/flake8 >> test.py::flake8 [GOOD] |48.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/datashard/block_fail_point_unit.cpp |48.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/datashard_s3_download.cpp |48.8%| [TS] ydb/tests/stress/ctas/tests/flake8 >> test_workload.py::flake8 [GOOD] |48.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/services/ymq/ymq_proxy.cpp |48.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/datashard/build_index/fulltext.cpp |48.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/datashard/execute_data_tx_unit.cpp |48.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/datashard/alter_cdc_stream_unit.cpp >> conftest.py::flake8 [GOOD] >> test_alter_compression.py::flake8 [GOOD] >> test_alter_tiering.py::flake8 [GOOD] >> test_insert.py::flake8 [GOOD] >> test_read_update_write_load.py::flake8 [GOOD] >> test_scheme_load.py::flake8 [GOOD] >> test_simple.py::flake8 [GOOD] |48.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/datashard/datashard_split_src.cpp |48.8%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/datashard/execution_unit_kind.h_serialized.cpp |48.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/datashard/build_index/sample_k.cpp |48.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/datashard/build_index/secondary_index.cpp |48.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/datashard/datashard__cleanup_in_rs.cpp |48.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/datashard/datashard__s3_upload_txs.cpp |48.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/services/replication/grpc_service.cpp |48.8%| [TS] ydb/library/yql/providers/generic/connector/tests/datasource/postgresql/flake8 >> test.py::flake8 [GOOD] |48.8%| [TS] ydb/tests/olap/ttl_tiering/flake8 >> unstable_connection.py::flake8 [GOOD] |48.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/datashard/completed_operations_unit.cpp |48.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/datashard/build_index/local_kmeans.cpp |48.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/datashard/datashard__readset.cpp |48.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/incr_restore_helpers.cpp |48.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/datashard/receive_snapshot_cleanup_unit.cpp |48.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/datashard/build_kqp_data_tx_out_rs_unit.cpp |48.8%| [TS] ydb/tests/stability/tool/flake8 >> __main__.py::flake8 [GOOD] |48.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/export_s3_buffer.cpp |48.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/export_iface.cpp |48.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/datashard/datashard__column_stats.cpp |48.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/datashard/build_index/recompute_kmeans.cpp |48.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/datashard/build_and_wait_dependencies_unit.cpp >> test.py::py2_flake8 [GOOD] |48.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/datashard/build_distributed_erase_tx_out_rs_unit.cpp >> test_workload_topic.py::flake8 [GOOD] |48.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/extstorage_usage_config.cpp |48.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/datashard/datashard__engine_host.cpp |48.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/datashard/alter_table_unit.cpp |48.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/datashard/execute_commit_writes_tx_unit.cpp |48.8%| [TS] ydb/tests/olap/scenario/flake8 >> test_simple.py::flake8 [GOOD] |48.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/datashard/datashard__read_iterator.cpp |48.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/datashard/build_index/unique_index.cpp |48.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/datashard/build_data_tx_out_rs_unit.cpp |48.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/datashard/build_scheme_tx_out_rs_unit.cpp |48.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/datashard/backup_unit.cpp |48.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/datashard/create_table_unit.cpp |48.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/datashard/build_index/kmeans_helper.cpp |48.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/datashard/datashard__s3_download_txs.cpp >> reconfig_state_storage_workload_test.py::flake8 [GOOD] >> test_board_workload.py::flake8 [GOOD] >> test_scheme_board_workload.py::flake8 [GOOD] >> test_state_storage_workload.py::flake8 [GOOD] >> test_generator.py::flake8 [GOOD] >> test_init.py::flake8 [GOOD] |48.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/datashard/datashard.h_serialized.cpp |48.8%| [TS] ydb/library/yql/tests/sql/hybrid_file/part5/py2_flake8 >> test.py::py2_flake8 [GOOD] |48.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/datashard/datashard_common_upload.cpp |48.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/datashard/build_index/prefix_kmeans.cpp |48.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/datashard/build_index/reshuffle_kmeans.cpp |48.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/datashard/create_volatile_snapshot_unit.cpp |48.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/datashard/create_incremental_restore_src_unit.cpp |48.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/datashard/datashard__conditional_erase_rows.cpp |48.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/datashard/datashard__cleanup_tx.cpp |48.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/datashard/conflicts_cache.cpp |48.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/datashard/datashard_active_transaction.h_serialized.cpp |48.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/type_serialization.cpp |49.0%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/datashard/create_persistent_snapshot_unit.cpp >> test.py::flake8 [GOOD] >> test_cp_ic.py::flake8 [GOOD] >> test_dispatch.py::flake8 [GOOD] >> test_retry.py::flake8 [GOOD] >> test_retry_high_rate.py::flake8 [GOOD] >> conftest.py::flake8 [GOOD] >> test_auth_system_views.py::flake8 [GOOD] >> test_create_users.py::flake8 [GOOD] >> test_create_users_strict_acl_checks.py::flake8 [GOOD] >> test_db_counters.py::flake8 [GOOD] >> test_dynamic_tenants.py::flake8 [GOOD] >> test_publish_into_schemeboard_with_common_ssring.py::flake8 [GOOD] >> test_storage_config.py::flake8 [GOOD] >> test_system_views.py::flake8 [GOOD] >> test_tenants.py::flake8 [GOOD] >> test_user_administration.py::flake8 [GOOD] >> test_users_groups_with_acl.py::flake8 [GOOD] >> collection.py::flake8 [GOOD] >> conftest.py::flake8 [GOOD] >> scenario.py::flake8 [GOOD] >> test.py::flake8 [GOOD] >> test_case.py::flake8 [GOOD] |48.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/datashard/datashard__compaction.cpp |48.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/datashard/datashard__cleanup_uncommitted.cpp |48.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/datashard/datashard__object_storage_listing.cpp |48.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/datashard/datashard__cleanup_borrowed.cpp |48.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/datashard/datashard__mon_reset_schema_version.cpp |48.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/datashard/datashard__cancel_tx_proposal.cpp |48.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/datashard/datashard__propose_tx_base.cpp |48.9%| [TS] ydb/tests/stress/topic_kafka/tests/flake8 >> test_workload_topic.py::flake8 [GOOD] >> test_async_replication.py::flake8 [GOOD] >> conftest.py::flake8 [GOOD] >> helpers.py::flake8 [GOOD] >> test_ctas.py::flake8 [GOOD] >> test_yt_reading.py::flake8 [GOOD] |48.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/datashard/datashard__progress_resend_rs.cpp |48.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/datashard/datashard_change_sender_activation.cpp |48.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/datashard/datashard__monitoring.cpp |48.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/datashard/datashard__kqp_scan.cpp |48.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/datashard/datashard_direct_transaction.cpp |48.9%| [TS] ydb/tests/stress/reconfig_state_storage_workload/tests/flake8 >> test_state_storage_workload.py::flake8 [GOOD] |48.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/probes.cpp |48.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/datashard/datashard__compact_borrowed.cpp |48.9%| [TS] ydb/tests/functional/benchmarks_init/flake8 >> test_init.py::flake8 [GOOD] |48.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/range_ops.cpp |48.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/datashard/datashard__store_table_path.cpp |49.0%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/datashard/datashard__plan_step.cpp |49.0%| [TS] ydb/tests/fq/yt/kqp_yt_file/part9/flake8 >> test.py::flake8 [GOOD] >> conftest.py::flake8 [GOOD] >> test_clickhouse.py::flake8 [GOOD] >> test_greenplum.py::flake8 [GOOD] >> test_join.py::flake8 [GOOD] >> test_mysql.py::flake8 [GOOD] >> test_postgresql.py::flake8 [GOOD] >> test_ydb.py::flake8 [GOOD] |48.9%| [TS] ydb/tests/fq/multi_plane/flake8 >> test_retry_high_rate.py::flake8 [GOOD] |48.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/datashard/datashard__init.cpp |48.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/datashard/export_common.cpp |48.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/datashard/datashard__migrate_schemeshard.cpp |48.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/datashard/datashard.cpp |48.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/datashard/datashard__op_rows.cpp |48.9%| [TS] ydb/library/yql/providers/generic/connector/tests/join/flake8 >> test_case.py::flake8 [GOOD] |48.9%| [TS] ydb/tests/functional/tenants/flake8 >> test_users_groups_with_acl.py::flake8 [GOOD] |49.0%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/datashard/datashard_repl_offsets_server.cpp |49.0%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/datashard/create_cdc_stream_unit.cpp |49.0%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/datashard/initiate_build_index_unit.cpp >> test.py::py2_flake8 [GOOD] >> test.py::py2_flake8 [GOOD] |48.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/datashard/datashard_outreadset.cpp |48.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/datashard/load_in_rs_unit.cpp |48.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/datashard/datashard__schema_changed.cpp |48.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/datashard/stream_scan_common.cpp |48.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/datashard/datashard_change_receiving.cpp |48.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/scan_common.cpp |49.0%| [TS] ydb/tests/datashard/async_replication/flake8 >> test_async_replication.py::flake8 [GOOD] |49.0%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/datashard/datashard_dep_tracker.cpp |49.0%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/datashard/datashard_kqp.cpp |49.0%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/datashard/datashard_repl_apply.cpp |49.0%| [TS] ydb/tests/fq/yt/kqp_yt_import/flake8 >> test_yt_reading.py::flake8 [GOOD] |49.0%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/datashard/datashard_repl_offsets_client.cpp >> __main__.py::flake8 [GOOD] >> test.py::flake8 [GOOD] |48.9%| [TS] ydb/tests/fq/generic/analytics/flake8 >> test_ydb.py::flake8 [GOOD] |48.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/datashard_user_table.cpp |48.9%| [TS] ydb/library/yql/tests/sql/hybrid_file/part8/py2_flake8 >> test.py::py2_flake8 [GOOD] |48.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/datashard/datashard__progress_tx.cpp |49.0%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/datashard/datashard__write.cpp |49.0%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/datashard/datashard_change_sending.cpp |49.0%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/engines/storage/optimizer/lcbuckets/constructor/selector/snapshot.cpp |49.0%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/datashard/datashard_repl_offsets.cpp |49.0%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/datashard/datashard__read_columns.cpp |49.0%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/datashard/datashard__vacuum.cpp |49.0%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/datashard/datashard__stats.cpp |48.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/datashard/datashard_overload.cpp |48.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/datashard/datashard__snapshot_txs.cpp |48.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/datashard/execution_unit.h_serialized.cpp |48.9%| [TS] ydb/library/yql/tests/sql/hybrid_file/part2/py2_flake8 >> test.py::py2_flake8 [GOOD] |48.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/datashard/datashard_direct_upload.cpp |48.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/datashard/datashard_pipeline.cpp |49.0%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/datashard/datashard_direct_erase.cpp |49.0%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/datashard/datashard__store_scan_state.cpp |49.0%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/datashard/export_s3_uploader.cpp |49.0%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/datashard/execute_kqp_scan_tx_unit.cpp |49.0%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/datashard/export_scan.cpp |49.0%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/datashard/datashard_s3_upload_rows.cpp |49.0%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/datashard/erase_rows_condition.cpp >> test_commit.py::flake8 [GOOD] >> test_timeout.py::flake8 [GOOD] |48.9%| [TS] ydb/tests/tools/ydb_serializable/flake8 >> __main__.py::flake8 [GOOD] |49.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/change_exchange.cpp |49.0%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/datashard/datashard_locks_db.cpp |49.0%| [TS] ydb/tests/fq/yt/kqp_yt_file/part4/flake8 >> test.py::flake8 [GOOD] |49.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/change_record_cdc_serializer.cpp |49.0%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/datashard/incr_restore_scan.cpp |49.0%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/datashard/execution_unit.cpp |49.0%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/datashard/change_exchange.h_serialized.cpp |49.0%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/datashard/datashard_s3_uploads.cpp |49.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/change_record_body_serializer.cpp |49.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/datashard/datashard_kqp_compute.cpp >> conftest.py::flake8 [GOOD] >> test_insert_restarts.py::flake8 [GOOD] |49.0%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/datashard/execute_distributed_erase_tx_unit.cpp |49.0%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/datashard/follower_edge.cpp |49.0%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/datashard/execute_write_unit.cpp |49.0%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/datashard/make_snapshot_unit.cpp |49.0%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/datashard/execute_kqp_data_tx_unit.cpp |49.0%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/datashard/datashard_snapshots.cpp |49.0%| [CP] {default-linux-x86_64, relwithdebinfo} $(B)/yql/essentials/minikql/comp_nodes/llvm16/yql/essentials/minikql/computation/mkql_computation_node_codegen.h |49.0%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/datashard/finalize_plan_tx_unit.cpp |49.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/engines/storage/optimizer/lcbuckets/constructor/selector/transparent.cpp |49.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/datashard/store_write_unit.cpp |49.1%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/columnshard/export/session/selector/abstract/libsession-selector-abstract.a |49.1%| [TS] ydb/tests/tools/pq_read/test/flake8 >> test_timeout.py::flake8 [GOOD] >> test_tpcds.py::flake8 [GOOD] >> test_tpch_spilling.py::flake8 [GOOD] >> conftest.py::flake8 [GOOD] >> test_serverless.py::flake8 [GOOD] >> test.py::flake8 [GOOD] |48.9%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/columnshard/engines/writer/buffer/libengines-writer-buffer.a |49.0%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/columnshard/engines/storage/optimizer/lcbuckets/planner/selector/liblcbuckets-planner-selector.a |49.0%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/columnshard/export/events/libcolumnshard-export-events.a |49.0%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/unicode/normalization/libcpp-unicode-normalization.a |49.0%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/datashard/finalize_build_index_unit.cpp |49.0%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/datashard/volatile_tx_mon.cpp |49.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/export/common/libcolumnshard-export-common.a |49.0%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/columnshard/loading/libtx-columnshard-loading.a |49.0%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/columnshard/export/session/libcolumnshard-export-session.a |49.0%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/columnshard/normalizer/abstract/libcolumnshard-normalizer-abstract.a |49.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/export/protos/libcolumnshard-export-protos.a |49.1%| [TS] ydb/tests/fq/restarts/flake8 >> test_insert_restarts.py::flake8 [GOOD] >> test.py::flake8 [GOOD] >> base.py::flake8 [GOOD] |49.0%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/columnshard/export/actor/libcolumnshard-export-actor.a |49.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/scheme/defaults/protos/libpy3scheme-defaults-protos.global.a |49.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/datashard/datashard_loans.cpp |49.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/datashard/datashard_schema_snapshots.cpp |49.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/datashard/load_write_details_unit.cpp |49.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/datashard/operation.cpp |49.1%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/columnshard/export/session/selector/backup/libsession-selector-backup.global.a |49.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/scheme_types/libydb-core-scheme_types.a |49.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/datashard/datashard_distributed_erase.cpp |49.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/datashard/finish_propose_write_unit.cpp |49.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/columnshard/export/session/cursor.cpp |49.2%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/columnshard/export/session/libcolumnshard-export-session.global.a |49.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/datashard/prepare_write_tx_in_rs_unit.cpp >> test_streaming.py::flake8 [GOOD] >> __main__.py::flake8 [GOOD] |49.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/datashard/key_validator.cpp |49.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/datashard/memory_state_migration.cpp |49.2%| [TS] ydb/tests/functional/tpc/large/flake8 >> test_tpch_spilling.py::flake8 [GOOD] |49.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/datashard/upload_stats.cpp |49.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/datashard/load_tx_details_unit.cpp |49.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/datashard/move_table_unit.cpp |49.2%| [TS] ydb/tests/functional/sqs/merge_split_common_table/std/flake8 >> test.py::flake8 [GOOD] |49.2%| [TS] ydb/tests/functional/serverless/flake8 >> test_serverless.py::flake8 [GOOD] |49.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/datashard/remove_lock_change_records.cpp |49.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/datashard/volatile_tx.h_serialized.cpp |49.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/datashard/wait_for_stream_clearance_unit.cpp >> test.py::py2_flake8 [GOOD] >> __main__.py::flake8 [GOOD] |48.9%| [TS] ydb/tests/fq/streaming/flake8 >> test_streaming.py::flake8 [GOOD] |48.9%| [TS] ydb/tests/fq/yt/kqp_yt_file/part17/flake8 >> test.py::flake8 [GOOD] |48.9%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/streams/lzma/libcpp-streams-lzma.a |49.0%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tablet/libydb-core-tablet.a |49.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/schemeshard/libcore-protos-schemeshard.a |49.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/blobstorage/vdisk/ingress/libblobstorage-vdisk-ingress.a >> test.py::py2_flake8 [GOOD] |49.3%| [TS] ydb/library/yql/tests/sql/hybrid_file/part0/py2_flake8 >> test.py::py2_flake8 [GOOD] |49.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/grpc_streaming/libydb-core-grpc_streaming.a |49.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/datashard/make_scan_snapshot_unit.cpp |49.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/datashard/plan_queue_unit.cpp |49.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/datashard/operation.h_serialized.cpp |49.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/datashard/protect_scheme_echoes_unit.cpp |49.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/datashard/wait_for_plan_unit.cpp |49.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet/private/aggregated_counters.cpp |49.4%| [TS] ydb/tests/stress/scheme_board/pile_promotion/flake8 >> __main__.py::flake8 [GOOD] |49.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/datashard/remove_schema_snapshots.cpp |49.4%| [TS] ydb/public/tools/local_ydb/flake8 >> __main__.py::flake8 [GOOD] >> test.py::flake8 [GOOD] |49.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet/tablet_tracing_signals.cpp |49.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet/labeled_counters_merger.cpp |49.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet/pipe_tracker.cpp |49.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet/tablet_counters.cpp |49.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet/tablet_counters_app.cpp |49.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet/labeled_db_counters.cpp |49.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/datashard/read_table_scan.cpp |49.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/datashard/store_scheme_tx_unit.cpp |49.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/actors/core/liblibrary-actors-core.a |49.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet/tablet_pipe_client_cache.cpp |49.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/datashard/volatile_tx.cpp |49.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/datashard/prepare_distributed_erase_tx_in_rs_unit.cpp >> test_vector_index.py::flake8 [GOOD] >> test_vector_index_negative.py::flake8 [GOOD] >> column_table_helper.py::flake8 [GOOD] >> range_allocator.py::flake8 [GOOD] >> s3_client.py::flake8 [GOOD] >> thread_helper.py::flake8 [GOOD] >> time_histogram.py::flake8 [GOOD] >> utils.py::flake8 [GOOD] >> ydb_client.py::flake8 [GOOD] |49.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/datashard/finish_propose_unit.cpp |49.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/datashard/read_table_scan_unit.cpp |49.4%| [TS] ydb/library/yql/tests/sql/dq_file/part1/py2_flake8 >> test.py::py2_flake8 [GOOD] |49.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet/tablet_pipe_server.cpp |49.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/datashard/read_op_unit.cpp |49.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet/tablet_pipecache.cpp |49.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/datashard/datashard_s3_downloads.cpp |49.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/datashard/import_s3.cpp |49.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/datashard/drop_index_notice_unit.cpp |49.4%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/coroutine/listener/libcpp-coroutine-listener.a |49.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/folder_service/proto/liblibrary-folder_service-proto.a |49.5%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/control/lib/generated/libcontrol-lib-generated.a |49.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/datashard/datashard_split_dst.cpp >> test_quota_exhaustion.py::flake8 [GOOD] >> test_export_import_s3.py::flake8 [GOOD] >> test_s3.py::flake8 [GOOD] >> test_alloc_default.py::flake8 [GOOD] >> test_dc_local.py::flake8 [GOOD] |49.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/datashard/store_snapshot_tx_unit.cpp |49.4%| [TS] ydb/tests/functional/serializable/flake8 >> test.py::flake8 [GOOD] |49.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/datashard/move_index_unit.cpp |49.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/login/account_lockout/liblibrary-login-account_lockout.a |49.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/datashard/load_and_wait_in_rs_unit.cpp |49.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/datashard/rotate_cdc_stream_unit.cpp |49.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/actors/wilson/liblibrary-actors-wilson.a |49.5%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/columnshard/hooks/abstract/libcolumnshard-hooks-abstract.a |49.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/pdisk_io/libydb-library-pdisk_io.a |49.5%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/testing/unittest/libcpp-testing-unittest.a |49.5%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/columnshard/export/session/storage/s3/libsession-storage-s3.global.a >> test_result_limits.py::flake8 [GOOD] >> test_scheduling.py::flake8 [GOOD] >> test_workload.py::flake8 [GOOD] |49.4%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/library/persqueue/topic_parser/liblibrary-persqueue-topic_parser.a |49.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/login/password_checker/liblibrary-login-password_checker.a |49.4%| [TS] ydb/tests/olap/common/flake8 >> ydb_client.py::flake8 [GOOD] |49.5%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/columnshard/export/session/storage/tier/libsession-storage-tier.global.a |49.5%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/docker/libpy3contrib-python-docker.global.a |49.5%| [TS] ydb/tests/datashard/vector_index/medium/flake8 >> test_vector_index_negative.py::flake8 [GOOD] |49.5%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/columnshard/normalizer/portion/libcolumnshard-normalizer-portion.a |49.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/actors/http/liblibrary-actors-http.a |49.5%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/columnshard/normalizer/granule/libcolumnshard-normalizer-granule.global.a |49.5%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/blobstorage/ut_blobstorage/lib/libblobstorage-ut_blobstorage-lib.a >> test.py::flake8 [GOOD] >> test_workload.py::flake8 [GOOD] |49.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/persqueue/topic_parser/counters.cpp |49.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/engines/storage/optimizer/tiling/tiling.cpp |49.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/datashard/store_and_send_write_out_rs_unit.cpp |49.4%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/columnshard/normalizer/schema_version/libcolumnshard-normalizer-schema_version.global.a |49.4%| [TS] ydb/tests/olap/data_quotas/flake8 >> test_quota_exhaustion.py::flake8 [GOOD] |49.5%| [TS] ydb/tests/compatibility/s3_backups/flake8 >> test_export_import_s3.py::flake8 [GOOD] |49.6%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/sql/pg/libessentials-sql-pg.a |49.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/ydb_issue/libydb-library-ydb_issue.global.a |49.6%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/columnshard/normalizer/tablet/libcolumnshard-normalizer-tablet.global.a |49.6%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/columnshard/normalizer/tables/libcolumnshard-normalizer-tables.global.a |49.6%| [TS] ydb/tests/datashard/s3/flake8 >> test_s3.py::flake8 [GOOD] |49.6%| [TS] ydb/tests/fq/mem_alloc/flake8 >> test_scheduling.py::flake8 [GOOD] |49.7%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/columnshard/operations/batch_builder/libcolumnshard-operations-batch_builder.a >> test_workload_topic.py::flake8 [GOOD] |49.3%| [TS] ydb/tests/fq/yt/kqp_yt_file/part11/flake8 >> test.py::flake8 [GOOD] |49.3%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/sqlite3/libcontrib-libs-sqlite3.a |49.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/datashard/prepare_kqp_data_tx_in_rs_unit.cpp |49.3%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/jemalloc/libcontrib-libs-jemalloc.a |49.3%| [TS] ydb/tests/stress/mixedpy/tests/flake8 >> test_workload.py::flake8 [GOOD] |49.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/datashard/prepare_data_tx_in_rs_unit.cpp |49.4%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/columnshard/normalizer/portion/libcolumnshard-normalizer-portion.global.a |49.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/datashard/store_commit_writes_tx_unit.cpp |49.4%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/columnshard/operations/libtx-columnshard-operations.a |49.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/datashard/datashard_write_operation.cpp |49.5%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/asttokens/libpy3contrib-python-asttokens.global.a |49.5%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/columnshard/engines/storage/optimizer/lcbuckets/constructor/selector/liblcbuckets-constructor-selector.a |49.6%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/columnshard/engines/storage/optimizer/lcbuckets/planner/level/liblcbuckets-planner-level.a |49.7%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/columnshard/operations/slice_builder/libcolumnshard-operations-slice_builder.a >> test_partitioning.py::flake8 [GOOD] >> test_sql.py::flake8 [GOOD] >> test.py::py2_flake8 [GOOD] >> conftest.py::black [GOOD] >> test_clickhouse.py::black [GOOD] >> test_greenplum.py::black [GOOD] >> test_join.py::black [GOOD] >> test_mysql.py::black [GOOD] >> test_postgresql.py::black [GOOD] >> test_ydb.py::black [GOOD] >> test_break.py::flake8 [GOOD] >> test_query_cache.py::flake8 [GOOD] |49.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/dq/actors/protos/libdq-actors-protos.a |49.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/datashard/datashard_user_db.cpp |49.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/engines/storage/optimizer/lcbuckets/planner/optimizer.cpp |49.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/datashard/store_distributed_erase_tx_unit.cpp |49.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/datashard/remove_locks.cpp |49.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/datashard/drop_volatile_snapshot_unit.cpp |49.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/datashard/restore_unit.cpp |49.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/datashard/complete_write_unit.cpp |49.7%| [TS] ydb/tests/stress/oltp_workload/tests/flake8 >> test_workload.py::flake8 [GOOD] |49.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/datashard/receive_snapshot_unit.cpp |49.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/datashard/change_sender_cdc_stream.cpp |49.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/datashard/cdc_stream_scan.cpp |49.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/engines/storage/optimizer/lcbuckets/constructor/selector/empty.cpp |49.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/datashard/store_and_send_out_rs_unit.cpp >> test_fifo_messaging.py::flake8 [GOOD] >> test_generic_messaging.py::flake8 [GOOD] >> test_polling.py::flake8 [GOOD] >> test_copy_table.py::flake8 [GOOD] >> alter_compression.py::flake8 [GOOD] >> base.py::flake8 [GOOD] |49.6%| [TS] ydb/tests/datashard/partitioning/flake8 >> test_partitioning.py::flake8 [GOOD] |49.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/datashard/prepare_scheme_tx_in_rs_unit.cpp |49.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/datashard/drop_cdc_stream_unit.cpp |49.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/datashard/datashard_trans_queue.cpp |49.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/datashard/datashard_subdomain_path_id.cpp |49.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/datashard/drop_table_unit.cpp |49.7%| [TS] ydb/tests/stress/topic/tests/flake8 >> test_workload_topic.py::flake8 [GOOD] |49.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/datashard/change_collector.cpp |49.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tablet/private/labeled_db_counters.cpp |49.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/datashard/store_data_tx_unit.cpp |49.8%| [TS] ydb/tests/functional/canonical/flake8 >> test_sql.py::flake8 [GOOD] |49.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/datashard/drop_persistent_snapshot_unit.cpp >> test_workload.py::flake8 [GOOD] >> test.py::flake8 [GOOD] |49.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/columnshard/engines/storage/optimizer/lcbuckets/planner/level/counters.cpp |49.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/datashard/direct_tx_unit.cpp |49.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/datashard/change_collector_async_index.cpp |49.7%| [TS] ydb/library/yql/tests/sql/dq_file/part9/py2_flake8 >> test.py::py2_flake8 [GOOD] |49.8%| [TS] ydb/tests/functional/minidumps/flake8 >> test_break.py::flake8 [GOOD] |49.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/scheme/defaults/protos/libscheme-defaults-protos.a |49.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/client/nc_private/audit/v1/common/libaudit-v1-common.a |49.8%| [TS] ydb/tests/functional/query_cache/flake8 >> test_query_cache.py::flake8 [GOOD] >> test_ttl.py::flake8 [GOOD] >> test_decimal.py::flake8 [GOOD] >> test_duplicates.py::flake8 [GOOD] >> test_parallel.py::flake8 [GOOD] >> test_s1.py::flake8 [GOOD] >> test_s_float.py::flake8 [GOOD] >> test.py::py2_flake8 [GOOD] >> test_common.py::flake8 [GOOD] >> kikimr_config.py::flake8 [GOOD] >> test_yandex_audit.py::flake8 [GOOD] >> test_yandex_cloud_mode.py::flake8 [GOOD] >> test_yandex_cloud_queue_counters.py::flake8 [GOOD] |49.8%| [TS] ydb/tests/fq/generic/analytics/black >> test_ydb.py::black [GOOD] |49.8%| [TS] ydb/tests/functional/sqs/messaging/flake8 >> test_polling.py::flake8 [GOOD] |49.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/subscriber/abstract/events/libsubscriber-abstract-events.a |49.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/datashard/change_collector_base.cpp |49.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/splitter/abstract/libcolumnshard-splitter-abstract.a |49.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/resources/libtx-columnshard-resources.a |49.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/datashard/change_sender_table_base.cpp |49.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/datashard/change_collector_cdc_stream.cpp |49.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/client/nc_private/audit/libclient-nc_private-audit.a |49.9%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/public/udf/service/exception_policy/libudf-service-exception_policy.global.a |49.9%| [TS] ydb/tests/olap/column_family/compression/flake8 >> base.py::flake8 [GOOD] |49.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/datashard/check_scheme_tx_unit.cpp |49.9%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/matplotlib-inline/libpy3contrib-python-matplotlib-inline.global.a |49.9%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/columnshard/engines/storage/actualizer/counters/libstorage-actualizer-counters.a >> __main__.py::flake8 [GOOD] |49.8%| [TS] ydb/tests/datashard/copy_table/flake8 >> test_copy_table.py::flake8 [GOOD] |49.8%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/columnshard/splitter/libtx-columnshard-splitter.a |49.9%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/columnshard/tablet/libtx-columnshard-tablet.a |49.9%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/executing/libpy3contrib-python-executing.global.a |49.9%| [TS] ydb/tests/functional/tpc/medium/tpch/flake8 >> test_s_float.py::flake8 [GOOD] |49.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/subscriber/abstract/subscriber/libsubscriber-abstract-subscriber.a |49.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/client/nc_private/common/v1/libnc_private-common-v1.a |50.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/subscriber/events/tx_completed/libsubscriber-events-tx_completed.a |50.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/subscriber/events/tables_erased/libsubscriber-events-tables_erased.a |50.0%| [TS] ydb/public/tools/lib/cmds/ut/flake8 >> test.py::flake8 [GOOD] >> test_dump_restore.py::flake8 [GOOD] >> __main__.py::flake8 [GOOD] >> pile_promotion_test.py::flake8 [GOOD] >> test_scheme_board_workload.py::flake8 [GOOD] >> conftest.py::flake8 [GOOD] >> test_2_selects_limit.py::flake8 [GOOD] >> test_3_selects.py::flake8 [GOOD] >> test_bad_syntax.py::flake8 [GOOD] >> test_base.py::flake8 [GOOD] >> test_big_state.py::flake8 [GOOD] >> test_continue_mode.py::flake8 [GOOD] >> test_cpu_quota.py::flake8 [GOOD] >> test_delete_read_rules_after_abort_by_system.py::flake8 [GOOD] >> test_disposition.py::flake8 [GOOD] >> test_eval.py::flake8 [GOOD] >> test_invalid_consumer.py::flake8 [GOOD] >> test_kill_pq_bill.py::flake8 [GOOD] >> test_mem_alloc.py::flake8 [GOOD] >> test_metrics_cleanup.py::flake8 [GOOD] |49.9%| [TS] ydb/tests/stress/log/tests/flake8 >> test_workload.py::flake8 [GOOD] |49.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/tracing/libtx-columnshard-tracing.a |49.9%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/jedi/py3/libpy3python-jedi-py3.global.a |50.0%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/prompt-toolkit/py3/libpy3python-prompt-toolkit-py3.global.a |50.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/columnshard/splitter/settings.cpp |50.0%| [TS] ydb/tests/functional/ttl/flake8 >> test_ttl.py::flake8 [GOOD] |50.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/columnshard/splitter/chunk_meta.cpp |50.0%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/columnshard/transactions/operators/libcolumnshard-transactions-operators.a |50.0%| [TS] ydb/tests/functional/sqs/cloud/flake8 >> test_yandex_cloud_queue_counters.py::flake8 [GOOD] |50.0%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/ipython/py3/libpy3python-ipython-py3.global.a |50.0%| [TS] ydb/library/yql/tests/sql/hybrid_file/part1/py2_flake8 >> test.py::py2_flake8 [GOOD] |50.0%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/pexpect/py3/libpy3python-pexpect-py3.global.a |50.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/transactions/protos/libcolumnshard-transactions-protos.a |50.1%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/parso/py3/libpy3python-parso-py3.global.a |50.1%| [TS] ydb/tests/library/ut/flake8 >> kikimr_config.py::flake8 [GOOD] >> test_pq_read_write.py::flake8 [GOOD] >> test_public_metrics.py::flake8 [GOOD] >> test_read_rules_deletion.py::flake8 [GOOD] >> test_recovery.py::flake8 [GOOD] >> test_recovery_match_recognize.py::flake8 [GOOD] >> test_recovery_mz.py::flake8 [GOOD] >> test_restart_query.py::flake8 [GOOD] >> test_row_dispatcher.py::flake8 [GOOD] >> test_select_1.py::flake8 [GOOD] >> test_select_limit.py::flake8 [GOOD] >> test_select_limit_db_id.py::flake8 [GOOD] >> test_select_timings.py::flake8 [GOOD] >> test_stop.py::flake8 [GOOD] >> test_watermarks.py::flake8 [GOOD] >> test_yds_bindings.py::flake8 [GOOD] >> test_yq_streaming.py::flake8 [GOOD] >> test.py::flake8 [GOOD] |49.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/client/yc_public/iam/libclient-yc_public-iam.a |50.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/protos/annotations/libapi-protos-annotations.a |50.0%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/columnshard/transactions/operators/ev_write/libtransactions-operators-ev_write.a |50.0%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/columnshard/transactions/operators/libcolumnshard-transactions-operators.global.a |50.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/conveyor_composite/tracing/libtx-conveyor_composite-tracing.a |50.0%| [TS] ydb/tests/library/compatibility/configs/comparator/flake8 >> __main__.py::flake8 [GOOD] |50.0%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/conveyor/usage/libtx-conveyor-usage.a |50.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/tx_reader/libtx-columnshard-tx_reader.a |50.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/include/ydb-cpp-sdk/client/topic/libydb-cpp-sdk-client-topic.a |50.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/driver/libsrc-client-driver.a |50.1%| [TS] ydb/tests/stress/scheme_board/pile_promotion/tests/flake8 >> test_scheme_board_workload.py::flake8 [GOOD] >> __main__.py::flake8 [GOOD] >> tpc_tests.py::flake8 [GOOD] >> test.py::flake8 [GOOD] >> test_add_column.py::flake8 [GOOD] >> allure_utils.py::flake8 [GOOD] >> remote_execution.py::flake8 [GOOD] >> results_processor.py::flake8 [GOOD] >> utils.py::flake8 [GOOD] >> ydb_cli.py::flake8 [GOOD] >> ydb_cluster.py::flake8 [GOOD] |49.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/datashard/build_write_out_rs_unit.cpp |49.8%| [TS] ydb/tests/library/compatibility/binaries/downloader/flake8 >> __main__.py::flake8 [GOOD] |49.9%| [TS] ydb/tests/fq/yds/flake8 >> test_yq_streaming.py::flake8 [GOOD] |49.9%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/pure-eval/libpy3contrib-python-pure-eval.global.a |49.9%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/py/py3/libpy3python-py-py3.global.a |49.9%| [TS] ydb/tests/datashard/dump_restore/flake8 >> test_dump_restore.py::flake8 [GOOD] |49.9%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/conveyor_composite/service/libtx-conveyor_composite-service.a |50.0%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/columnshard/resource_subscriber/libtx-columnshard-resource_subscriber.a |50.0%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/base64/plain32/liblibs-base64-plain32.a |50.1%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/data_events/libcore-tx-data_events.a |50.1%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/columnshard/transactions/locks/libcolumnshard-transactions-locks.a |50.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/columnshard/normalizer/abstract/abstract.h_serialized.cpp |50.2%| [TS] ydb/tests/functional/clickbench/flake8 >> test.py::flake8 [GOOD] |50.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/datashard/check_write_unit.cpp |50.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/datashard/change_sender_incr_restore.cpp >> test_workload.py::flake8 [GOOD] |50.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/impl/internal/db_driver_state/libimpl-internal-db_driver_state.a |50.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/datashard/check_commit_writes_tx_unit.cpp |50.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/conveyor_composite/service/counters.cpp |50.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/engines/storage/optimizer/lcbuckets/planner/selector/abstract.cpp |50.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/conveyor_composite/service/common.cpp |50.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/datashard/check_read_unit.cpp |50.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tablet/node_whiteboard.cpp |50.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/datashard/change_sender_async_index.cpp |50.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/columnshard/transactions/locks/interaction.cpp |50.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/columnshard/transactions/locks/abstract.cpp |50.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/datashard/change_sender.cpp |50.2%| [TS] ydb/tests/datashard/add_column/flake8 >> test_add_column.py::flake8 [GOOD] |50.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/datashard/complete_data_tx_unit.cpp |50.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/columnshard/resource_subscriber/container.cpp |50.3%| [TS] ydb/library/benchmarks/runner/flake8 >> tpc_tests.py::flake8 [GOOD] |50.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/datashard/check_data_tx_unit.cpp |50.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/columnshard/resource_subscriber/counters.cpp >> test.py::py2_flake8 [GOOD] >> test.py::flake8 [GOOD] >> test.py::py2_flake8 [GOOD] |50.3%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/stack-data/libpy3contrib-python-stack-data.global.a |50.3%| [TS] ydb/tools/cfg/bin/flake8 >> __main__.py::flake8 [GOOD] |50.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/storage/optimizer/lbuckets/planner/liboptimizer-lbuckets-planner.a |50.3%| [TS] ydb/tests/fq/yt/kqp_yt_file/part1/flake8 >> test.py::flake8 [GOOD] |50.3%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/python-dateutil/py3/libpy3python-python-dateutil-py3.global.a |50.3%| [TS] ydb/tests/olap/lib/flake8 >> ydb_cluster.py::flake8 [GOOD] |50.3%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/backtrace/libcontrib-libs-backtrace.a |50.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/datashard/cdc_stream_heartbeat.cpp |50.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/impl/session/libclient-impl-session.a |50.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/datashard/change_exchange_split.cpp |50.3%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/apache/orc-format/liblibs-apache-orc-format.a |50.3%| [TS] ydb/library/yql/tests/sql/dq_file/part14/py2_flake8 >> test.py::py2_flake8 [GOOD] |50.3%| [TS] ydb/tests/fq/yt/kqp_yt_file/part5/flake8 >> test.py::flake8 [GOOD] |50.3%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/traitlets/py3/libpy3python-traitlets-py3.global.a |50.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/impl/internal/retry/libimpl-internal-retry.a |50.4%| [TS] ydb/tests/stress/kv/tests/flake8 >> test_workload.py::flake8 [GOOD] |50.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/engines/storage/optimizer/lcbuckets/planner/selector/empty.cpp |50.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/grpc/libapi-grpc.a |50.4%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/core/issue/libessentials-core-issue.global.a |50.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/draft/libsrc-client-draft.a |50.4%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/wcwidth/py3/libpy3python-wcwidth-py3.global.a |50.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/persqueue_public/include/libclient-persqueue_public-include.a |50.4%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/xmltodict/py3/libpy3python-xmltodict-py3.global.a >> test.py::py2_flake8 [GOOD] >> test_alter_ops.py::flake8 [GOOD] >> test_copy_ops.py::flake8 [GOOD] >> test_scheme_shard_operations.py::flake8 [GOOD] >> test_split_merge.py::flake8 [GOOD] |50.3%| [TS] ydb/library/yql/tests/sql/hybrid_file/part9/py2_flake8 >> test.py::py2_flake8 [GOOD] |50.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/params/libsrc-client-params.a |50.3%| [AR] {BAZEL_DOWNLOAD} $(B)/library/python/testing/import_test/libpy3python-testing-import_test.global.a |50.3%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/tools/python3/Modules/_sqlite/libpy3python3-Modules-_sqlite.a |50.3%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/columnshard/transactions/locks/libcolumnshard-transactions-locks.global.a |50.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/resources/libsrc-client-resources.a |50.4%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/ptyprocess/py3/libpy3python-ptyprocess-py3.global.a |50.4%| [AR] {BAZEL_DOWNLOAD} $(B)/library/python/runtime_py3/main/libpython-runtime_py3-main.a |50.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/export/events/events.cpp |50.4%| [AR] {BAZEL_DOWNLOAD} $(B)/library/python/import_tracing/constructor/libpy3python-import_tracing-constructor.global.a |50.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/export/libsrc-client-export.a |50.4%| [AR] {BAZEL_DOWNLOAD} $(B)/library/python/pytest/plugins/libpy3python-pytest-plugins.global.a |50.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/export/session/task.cpp |50.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/datashard/datashard_active_transaction.cpp |50.5%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/tools/python3/Modules/_sqlite/libpy3python3-Modules-_sqlite.global.a |50.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/engines/storage/optimizer/lcbuckets/planner/selector/transparent.cpp |50.5%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/general_cache/service/libtx-general_cache-service.a >> data_read_correctness.py::flake8 [GOOD] >> order_by_with_limit.py::flake8 [GOOD] >> tablets_movement.py::flake8 [GOOD] >> test_cs_many_updates.py::flake8 [GOOD] >> test_log_scenario.py::flake8 [GOOD] >> test_overloads.py::flake8 [GOOD] >> upgrade_to_internal_path_id.py::flake8 [GOOD] >> zip_bomb.py::flake8 [GOOD] |50.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/query/libsrc-client-query.a |50.4%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/malloc/jemalloc/libcpp-malloc-jemalloc.a |50.4%| [AR] {BAZEL_DOWNLOAD} $(B)/library/python/find_root/libpy3library-python-find_root.global.a |50.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/topic/codecs/libclient-topic-codecs.global.a |50.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/columnshard/transactions/locks/read_finished.cpp |50.4%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/limiter/grouped_memory/service/liblimiter-grouped_memory-service.a |50.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/columnshard/transactions/locks/write.cpp |50.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/general_cache/source/libtx-general_cache-source.a |50.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/types/credentials/libclient-types-credentials.a |50.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/limiter/grouped_memory/tracing/liblimiter-grouped_memory-tracing.a |50.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/types/status/libclient-types-status.a |50.6%| [AR] {BAZEL_DOWNLOAD} $(B)/library/python/import_tracing/lib/libpy3python-import_tracing-lib.global.a |50.6%| [AR] {BAZEL_DOWNLOAD} $(B)/library/python/pytest/libpy3library-python-pytest.global.a |50.6%| [TS] ydb/tests/datashard/split_merge/flake8 >> test_split_merge.py::flake8 [GOOD] |50.5%| [TS] ydb/tests/functional/scheme_shard/flake8 >> test_scheme_shard_operations.py::flake8 [GOOD] |50.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/library/issue/libsrc-library-issue.a |50.5%| [TS] ydb/library/yql/tests/sql/dq_file/part15/py2_flake8 >> test.py::py2_flake8 [GOOD] |50.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/long_tx_service/public/libtx-long_tx_service-public.a |50.5%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/aws-sdk-cpp/aws-cpp-sdk-core/liblibs-aws-sdk-cpp-aws-cpp-sdk-core.a |50.5%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/limiter/grouped_memory/usage/liblimiter-grouped_memory-usage.a |50.5%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/limiter/grouped_memory/service/allocation.h_serialized.cpp |50.6%| [TS] ydb/tests/olap/flake8 >> zip_bomb.py::flake8 [GOOD] |50.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/normalizer/abstract/abstract.cpp |50.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/limiter/grouped_memory/service/ids.cpp |50.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/limiter/grouped_memory/service/process.cpp |50.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/limiter/grouped_memory/service/group.cpp |50.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/limiter/grouped_memory/service/counters.cpp |50.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/limiter/grouped_memory/service/allocation.cpp |50.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tablet/tablet_list_renderer.cpp >> __main__.py::flake8 [GOOD] >> parser.py::flake8 [GOOD] |49.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/replication/common/libtx-replication-common.a |49.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/library/string_utils/helpers/liblibrary-string_utils-helpers.a |49.8%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/core/issue/libessentials-core-issue.a |49.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/limiter/grouped_memory/usage/events.cpp |49.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/limiter/grouped_memory/usage/abstract.cpp |49.9%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/core/issue/protos/libcore-issue-protos.a |49.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/limiter/grouped_memory/usage/stage_features.cpp |49.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/services/ext_index/metadata/extractor/libext_index-metadata-extractor.global.a |49.9%| [AR] {BAZEL_DOWNLOAD} $(B)/library/python/testing/filter/libpy3python-testing-filter.global.a |50.3%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/priorities/usage/libtx-priorities-usage.a |50.6%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/core/url_lister/interface/libcore-url_lister-interface.a |50.7%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/replication/ydb_proxy/libtx-replication-ydb_proxy.a |50.7%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/olap/options/libschemeshard-olap-options.a >> test_bulkupserts_tpch.py::flake8 [GOOD] >> test_insert_delete_duplicate_records.py::flake8 [GOOD] >> test_insertinto_selectfrom.py::flake8 [GOOD] >> test_tiering.py::flake8 [GOOD] >> test_workload_manager.py::flake8 [GOOD] |49.5%| [TS] ydb/tests/olap/docs/generator/flake8 >> parser.py::flake8 [GOOD] |49.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tablet/tablet_pipe_client.cpp |49.5%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/replication/ydb_proxy/local_proxy/libreplication-ydb_proxy-local_proxy.a |49.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/datashard/check_snapshot_tx_unit.cpp |49.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tablet/tablet_responsiveness_pinger.cpp |49.5%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/replication/controller/libtx-replication-controller.a |49.5%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/priorities/service/libtx-priorities-service.a |49.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tablet/tablet_req_reset.cpp |49.8%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/ast/libyql-essentials-ast.a |50.0%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/replication/service/libtx-replication-service.a |49.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/export/actor/write.cpp |49.4%| [TS] ydb/tests/sql/large/flake8 >> test_workload_manager.py::flake8 [GOOD] |50.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tablet/tablet_req_blockbs.cpp |50.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tablet/resource_broker.cpp |50.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tablet/tablet_monitoring_proxy.cpp |50.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/export/session/selector/abstract/selector.cpp |50.6%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/libcore-tx-schemeshard.a |50.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tablet/tablet_sys.cpp >> hive_matchers.py::flake8 [GOOD] >> test_create_tablets.py::flake8 [GOOD] >> test_drain.py::flake8 [GOOD] >> test_kill_tablets.py::flake8 [GOOD] |50.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tablet/node_tablet_monitor.cpp |50.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard_build_index_helpers.cpp |50.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tablet/tablet_req_writelog.cpp |50.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/engines/writer/buffer/actor2.cpp |50.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/control/lib/generated/control_board_proto.cpp |50.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/engines/storage/optimizer/lcbuckets/planner/selector/snapshot.cpp |50.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/export/session/control.cpp |50.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard_subop_types.cpp |50.7%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/schemeshard/schemeshard_subop_state_types.h_serialized.cpp |50.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/export/actor/export_actor.cpp |50.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/columnshard/export/session/session.h_serialized.cpp |50.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tablet/tablet_req_rebuildhistory.cpp |50.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/engines/writer/buffer/events.cpp |50.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tablet/bootstrapper.cpp |50.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/normalizer/portion/broken_blobs.cpp |50.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tablet/tablet_resolver.cpp |50.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard_types.cpp |50.8%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/schemeshard/schemeshard_types.h_serialized.cpp >> test_dml.py::flake8 [GOOD] |50.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/datashard/check_distributed_erase_tx_unit.cpp |50.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tablet/tablet_counters_aggregator.cpp |50.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard_system_names.cpp |50.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tablet/tablet_metrics.cpp |50.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/user_attributes.cpp |50.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/normalizer/tablet/broken_txs.cpp |50.7%| [TS] ydb/tests/functional/hive/flake8 >> test_kill_tablets.py::flake8 [GOOD] |50.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard_subop_state_types.cpp |50.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/operations/batch_builder/merger.cpp |50.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tablet/tablet_req_findlatest.cpp |50.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/normalizer/granule/clean_granule.cpp |50.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/export/session/session.cpp |50.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/export/session/storage/tier/storage.cpp |50.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/normalizer/portion/clean_empty.cpp |50.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/library/persqueue/topic_parser/topic_parser.cpp |50.8%| [TS] ydb/tests/datashard/dml/flake8 >> test_dml.py::flake8 [GOOD] |50.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/normalizer/portion/restore_appearance_snapshot.cpp |50.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/normalizer/portion/chunks_actualization.cpp |50.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/export/session/storage/s3/storage.cpp >> test.py::py2_flake8 [GOOD] |50.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/normalizer/portion/normalizer.cpp |50.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tablet/tablet_req_delete.cpp |50.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/normalizer/portion/portion.cpp |50.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/operations/batch_builder/restore.cpp |50.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/export/session/selector/backup/selector.cpp |50.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/normalizer/schema_version/version.cpp |50.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/operations/batch_builder/builder.cpp |50.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/operations/write.cpp |50.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/operations/write_data.cpp |50.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/normalizer/portion/restore_v1_chunks.cpp |50.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/conveyor_composite/service/scope.cpp |50.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/normalizer/portion/clean_deprecated_snapshot.cpp |50.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/normalizer/portion/clean_index_columns.cpp |50.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/normalizer/portion/clean.cpp |50.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/normalizer/granule/normalizer.cpp |50.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/loading/stages.cpp |50.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/normalizer/tablet/gc_counters.cpp |50.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/engines/storage/optimizer/lcbuckets/planner/level/common_level.cpp |50.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/normalizer/portion/clean_ttl_preset_setting_version_info.cpp |50.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/schemeshard_tx_infly.cpp |50.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/operations/manager.cpp |50.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/splitter/column_info.cpp |50.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/normalizer/tables/normalizer.cpp |50.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/engines/storage/actualizer/counters/counters.cpp |50.7%| [TS] ydb/library/yql/tests/sql/dq_file/part8/py2_flake8 >> test.py::py2_flake8 [GOOD] |50.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/operations/slice_builder/builder.cpp |50.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/operations/events.cpp |50.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/conveyor_composite/service/manager.cpp |50.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/resource_subscriber/task.cpp |50.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/engines/storage/optimizer/lcbuckets/planner/level/zero_level.cpp |50.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/general_cache/service/counters.cpp |50.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/conveyor_composite/service/category.cpp |50.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/conveyor/usage/service.cpp |50.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/replication/ydb_proxy/ydb_proxy.cpp |50.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/engines/storage/optimizer/lcbuckets/constructor/selector/constructor.cpp |50.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/resource_subscriber/actor.cpp |50.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/conveyor_composite/service/worker.cpp |50.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/conveyor/usage/abstract.cpp |50.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/normalizer/portion/clean_unused_tables_template.cpp |50.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/normalizer/portion/copy_blob_ids_to_v2.cpp |50.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/normalizer/portion/chunks_v0_meta.cpp |50.7%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/parser/pg_wrapper/libessentials-parser-pg_wrapper.a |50.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/data_events/columnshard_splitter.cpp |50.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/hooks/abstract/abstract.cpp |50.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/normalizer/portion/restore_v2_chunks.cpp |50.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/conveyor_composite/service/service.cpp |50.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/conveyor/usage/events.cpp |50.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/splitter/chunks.cpp |50.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/ut_blobstorage/lib/node_warden_mock_state.cpp |50.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/limiter/grouped_memory/service/actor.cpp |50.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/data_events/shards_splitter.cpp |50.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/normalizer/portion/special_cleaner.cpp |50.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/conveyor_composite/service/workers_pool.cpp |50.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/normalizer/portion/clean_ttl_preset_setting_info.cpp |50.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/general_cache/service/manager.cpp |50.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/limiter/grouped_memory/usage/config.cpp |50.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/transactions/locks/dependencies.cpp |50.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/limiter/grouped_memory/usage/service.cpp |50.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/tablet/ext_tx_base.cpp |50.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/data_events/write_data.cpp |50.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/ut_blobstorage/lib/ut_helpers.cpp |50.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/conveyor_composite/service/events.cpp |50.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/engines/storage/optimizer/lcbuckets/planner/level/abstract.cpp |50.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/operations/slice_builder/pack_builder.cpp |50.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/transactions/operators/backup.cpp |50.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/splitter/batch_slice.cpp |50.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/tablet/write_queue.cpp |50.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/data_events/shard_writer.cpp |50.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/replication/ydb_proxy/local_proxy/local_partition_committer.cpp |50.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/resource_subscriber/events.cpp |50.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/general_cache/service/service.cpp |50.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/transactions/operators/sharing.cpp |50.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/replication/controller/dst_creator.cpp |50.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/conveyor_composite/service/process.cpp |50.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard_billing_helpers.cpp >> conftest.py::flake8 [GOOD] >> test_ydb_backup.py::flake8 [GOOD] >> test_ydb_flame_graph.py::flake8 [GOOD] >> test_ydb_impex.py::flake8 [GOOD] >> test_ydb_recursive_remove.py::flake8 [GOOD] >> test_ydb_scheme.py::flake8 [GOOD] >> test_ydb_scripting.py::flake8 [GOOD] >> test_ydb_sql.py::flake8 [GOOD] >> test_ydb_table.py::flake8 [GOOD] |50.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/splitter/blob_info.cpp |50.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/conveyor/usage/config.cpp |50.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard_audit_log_fragment.cpp |50.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/replication/ydb_proxy/local_proxy/local_partition_actor.cpp |50.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/transactions/operators/ev_write/sync.cpp |50.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/replication/controller/logging.cpp |50.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/normalizer/portion/chunks.cpp |50.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/transactions/operators/propose_tx.cpp |50.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/limiter/grouped_memory/service/manager.cpp |50.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/replication/controller/tx_create_replication.cpp |50.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/schemeshard_sysviews_update.cpp |50.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/schemeshard__delete_tablet_reply.cpp |50.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/schemeshard__init_root.cpp |50.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/normalizer/portion/leaked_blobs.cpp |50.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/schemeshard_self_pinger.cpp |50.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/replication/controller/controller.cpp |50.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_alter_kesus.cpp |50.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/transactions/operators/ev_write/abstract.cpp |50.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/olap/options/schema.cpp >> test_workload.py::flake8 [GOOD] |50.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_alter_index.cpp |50.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/schemeshard_utils.cpp |50.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_drop_extsubdomain.cpp |50.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/transactions/locks/read_start.cpp |50.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_alter_external_table.cpp |50.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_alter_extsubdomain.cpp |50.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/schemeshard__clean_pathes.cpp |50.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/schemeshard_svp_migration.cpp |50.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard_effective_acl.cpp |50.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard_identificators.cpp |50.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_common_bsv.cpp |50.9%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/apache/arrow/liblibs-apache-arrow.a |50.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_drop_fs.cpp |50.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/schemeshard_restore_incremental__get.cpp |50.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/schemeshard__borrowed_compaction.cpp |50.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/schemeshard_validate_ttl.cpp |50.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_create_build_index.cpp |50.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard_path_element.cpp |50.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_backup_backup_collection.cpp |50.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/schemeshard__conditional_erase.cpp |50.8%| [TS] ydb/tests/functional/ydb_cli/flake8 >> test_ydb_table.py::flake8 [GOOD] |50.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_apply_build_index.cpp |50.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/transactions/operators/schema.cpp |50.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/schemeshard_shard_deleter.cpp |50.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_blob_depot.cpp |50.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/schemeshard__background_compaction.cpp |50.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/schemeshard_restore_incremental__list.cpp |50.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_common_external_data_source.cpp |50.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_common_resource_pool.cpp |50.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_common_external_table.cpp |50.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard_login_helper.cpp |50.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_common_cdc_stream.cpp |50.9%| [TS] ydb/tests/stress/olap_workload/tests/flake8 >> test_workload.py::flake8 [GOOD] |51.0%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_cancel_tx.cpp |51.0%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_drop_external_table.cpp >> __main__.py::flake8 [GOOD] >> test_crud.py::flake8 [GOOD] >> test_discovery.py::flake8 [GOOD] >> test_execute_scheme.py::flake8 [GOOD] >> test_indexes.py::flake8 [GOOD] >> test_insert.py::flake8 [GOOD] >> test_isolation.py::flake8 [GOOD] >> test_public_api.py::flake8 [GOOD] >> test_read_table.py::flake8 [GOOD] >> test_session_grace_shutdown.py::flake8 [GOOD] >> test_session_pool.py::flake8 [GOOD] |50.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_copy_table.cpp |50.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/schemeshard__background_cleaning.cpp |50.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_create_backup.cpp |50.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_common_subdomain.cpp |50.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_create_index.cpp |50.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_create_replication.cpp |50.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_create_bsv.cpp |50.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_create_backup_collection.cpp |50.9%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/replication/service/worker.h_serialized.cpp |50.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/schemeshard_restore_incremental__forget.cpp |50.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_drop_secret.cpp |50.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_copy_sequence.cpp |50.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_drop_subdomain.cpp |50.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_create_indexed_table.cpp |50.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_create_cdc_stream.cpp |50.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_drop_solomon.cpp |50.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_create_pq.cpp |50.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_drop_index.cpp |50.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_move_table_index.cpp |50.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_create_restore.cpp |50.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp |50.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_side_effects.cpp |50.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_create_extsubdomain.cpp |50.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_drop_sequence.cpp |50.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_common_pq.cpp |50.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/schemeshard_xxport__helpers.cpp |50.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/schemeshard_build_index__progress.cpp |50.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_part.cpp |50.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_create_fs.cpp |50.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_create_kesus.cpp |50.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/schemeshard_import_scheme_query_executor.cpp |50.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_move_table.cpp |51.0%| [TS] ydb/tests/tools/kqprun/recipe/flake8 >> __main__.py::flake8 [GOOD] |51.0%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_create_external_data_source.cpp >> test.py::py2_flake8 [GOOD] |50.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_drop_pq.cpp |50.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_modify_acl.cpp |50.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/schemeshard_build_index__list.cpp |50.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_drop_view.cpp |50.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_rmdir.cpp |50.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_mkdir.cpp |50.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_finalize_build_index.cpp |50.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_move_index.cpp |50.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_drop_kesus.cpp |50.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/schemeshard_import_getters.cpp |50.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/schemeshard__shred_manager.cpp |50.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_change_path_state.cpp |50.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/schemeshard__publish_to_scheme_board.cpp |50.9%| [TS] ydb/tests/functional/api/flake8 >> test_session_pool.py::flake8 [GOOD] |50.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_initiate_build_index.cpp |51.0%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/schemeshard_continuous_backup_cleaner.cpp |51.0%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/schemeshard_build_index__create.cpp |50.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/schemeshard__state_changed_reply.cpp |50.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/priorities/service/counters.cpp |50.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/schemeshard__root_shred_manager.cpp |50.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_drop_indexed_table.cpp |50.9%| [TS] ydb/library/yql/tests/sql/hybrid_file/part7/py2_flake8 >> test.py::py2_flake8 [GOOD] |50.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_create_external_table.cpp |50.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_drop_lock.cpp |50.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/schemeshard__pq_stats.cpp |50.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_incremental_restore_finalize.cpp |50.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_drop_table.cpp |50.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/replication/controller/tenant_resolver.cpp |51.0%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_move_tables.cpp |51.0%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/schemeshard__table_stats_histogram.cpp |51.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/replication/ydb_proxy/topic_message.cpp >> __main__.py::flake8 [GOOD] >> test.py::py2_flake8 [GOOD] |50.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_rotate_cdc_stream.cpp |50.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_upgrade_subdomain.cpp |50.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_just_reject.cpp |50.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/schemeshard__table_stats.cpp |50.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_create_continuous_backup.cpp |50.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/schemeshard_backup_incremental__list.cpp |50.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/schemeshard_build_index__cancel.cpp |50.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_drop_sysview.cpp |50.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/schemeshard_backup.cpp |50.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/schemeshard_export_uploaders.cpp |50.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/schemeshard_backup_incremental__forget.cpp |50.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/priorities/usage/abstract.cpp |51.0%| [TS] ydb/tests/stress/olap_workload/flake8 >> __main__.py::flake8 [GOOD] |51.0%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/schemeshard_export__cancel.cpp |51.0%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_split_merge.cpp |51.0%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_drop_unsafe.cpp |51.0%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/schemeshard__tenant_shred_manager.cpp |51.0%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/schemeshard__upgrade_access_database.cpp |51.0%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_restore_backup_collection.cpp |51.0%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/schemeshard_build_index_tx_base.cpp >> conftest.py::flake8 [GOOD] >> docker_wrapper_test.py::flake8 [GOOD] |50.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/priorities/usage/events.cpp |50.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_memory_changes.cpp |50.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_drop_replication.cpp |50.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/schemeshard__upgrade_schema.cpp |50.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/schemeshard_build_index__get.cpp |51.0%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_drop_streaming_query.cpp |51.0%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/schemeshard__unmark_restore_tables.cpp |51.0%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_drop_resource_pool.cpp |51.0%| [CC] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/schemeshard_info_types.h_serialized.cpp |51.0%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/schemeshard_build_index.cpp |51.0%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/schemeshard__sync_update_tenants.cpp |51.0%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_alter_sequence.cpp |51.0%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/schemeshard_backup_incremental__get.cpp |51.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/replication/service/topic_reader.cpp >> collection.py::flake8 [GOOD] >> conftest.py::flake8 [GOOD] >> select_datetime.py::flake8 [GOOD] >> select_positive.py::flake8 [GOOD] >> test.py::flake8 [GOOD] |51.0%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/schemeshard_export__list.cpp |51.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/replication/controller/stream_consumer_remover.cpp |51.0%| [TS] ydb/library/yql/tests/sql/dq_file/part19/py2_flake8 >> test.py::py2_flake8 [GOOD] |51.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/replication/controller/target_with_stream.cpp |51.0%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/schemeshard_impl.cpp |51.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/replication/controller/target_discoverer.cpp |51.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/replication/controller/target_transfer.cpp |51.0%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/schemeshard_domain_links.cpp |51.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/replication/controller/target_table.cpp |51.0%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/schemeshard_path_describer.cpp |51.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/replication/controller/resource_id_resolver.cpp |51.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/replication/controller/replication.cpp |51.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/replication/controller/event_util.cpp |51.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/replication/controller/lag_provider.cpp >> test_quoting.py::flake8 [GOOD] |51.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/replication/controller/private_events.cpp |51.0%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/schemeshard_cdc_stream_scan.cpp |51.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/replication/controller/target_base.cpp |51.0%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/schemeshard_import_flow_proposals.cpp |51.0%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/schemeshard_export__get.cpp |51.0%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/schemeshard_backup_incremental__progress.cpp |51.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/replication/controller/session_info.cpp |51.1%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/replication/controller/replication.h_serialized.cpp |51.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/schemeshard_export__forget.cpp |51.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/replication/controller/sys_params.cpp |51.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/schemeshard_import__create.cpp |51.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/replication/controller/stream_remover.cpp |51.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/schemeshard_audit_log.cpp |51.1%| [TS] ydb/library/yql/providers/generic/connector/tests/datasource/mysql/flake8 >> test.py::flake8 [GOOD] |51.1%| [TS] ydb/tests/postgres_integrations/go-libpq/flake8 >> docker_wrapper_test.py::flake8 [GOOD] |51.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/util/concurrent_rw_hash.cpp |51.1%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/olap/bg_tasks/transactions/libolap-bg_tasks-transactions.a |51.2%| [TS] ydb/tests/functional/sqs/with_quotas/flake8 >> test_quoting.py::flake8 [GOOD] >> test.py::py2_flake8 [GOOD] |50.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/schemeshard__serverless_storage_billing.cpp |50.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/schemeshard_cdc_stream_common.cpp |50.9%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/olap/bg_tasks/adapter/libolap-bg_tasks-adapter.a |50.9%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/scheme_board/libcore-tx-scheme_board.a |51.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/schemeshard/olap/bg_tasks/events/libolap-bg_tasks-events.a |51.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/schemeshard/olap/bg_tasks/protos/libolap-bg_tasks-protos.a |51.1%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/scheme_cache/libcore-tx-scheme_cache.a |51.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/protos/out/libapi-protos-out.a |51.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/util/console.cpp |51.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/util/cache.cpp |51.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/util/source_location.cpp |51.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/util/format.cpp |51.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/util/gen_step.cpp |51.1%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/olap/bg_tasks/tx_chain/libolap-bg_tasks-tx_chain.a |51.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/scheme_board/two_part_description.cpp |51.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/schemeshard_path.cpp |51.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/schemeshard_export.cpp |51.2%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/scheme_board/subscriber.h_serialized.cpp >> test_clickbench.py::flake8 [GOOD] >> test_external.py::flake8 [GOOD] >> test_import_csv.py::flake8 [GOOD] >> test_tpcds.py::flake8 [GOOD] >> test_tpch.py::flake8 [GOOD] >> test_upload.py::flake8 [GOOD] >> test_workload_cdc.py::flake8 [GOOD] >> test_workload_ctas.py::flake8 [GOOD] >> test_workload_kafka.py::flake8 [GOOD] >> test_workload_kv.py::flake8 [GOOD] >> test_workload_log.py::flake8 [GOOD] >> test_workload_manager.py::flake8 [GOOD] >> test_workload_mixed.py::flake8 [GOOD] >> test_workload_node_broker.py::flake8 [GOOD] >> test_workload_olap.py::flake8 [GOOD] >> test_workload_oltp.py::flake8 [GOOD] >> test_workload_reconfig_state_storage.py::flake8 [GOOD] >> test_workload_show_create.py::flake8 [GOOD] >> test_workload_simple_queue.py::flake8 [GOOD] >> test_workload_statistics.py::flake8 [GOOD] >> test_workload_topic.py::flake8 [GOOD] >> test_workload_topic_kafka.py::flake8 [GOOD] >> test_workload_transfer.py::flake8 [GOOD] |51.0%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/schemeshard_import.cpp |51.0%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_drop_backup_collection.cpp |51.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/schemeshard_import__forget.cpp |51.1%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/scheme_cache/scheme_cache.h_serialized.cpp |51.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/schemeshard_build_index__forget.cpp |51.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/olap/bg_tasks/tx_chain/common.cpp |51.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_create_solomon.cpp |51.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_create_rtmr.cpp |51.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/schemeshard_bg_tasks__list.cpp |51.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/olap/bg_tasks/tx_chain/task.cpp |51.1%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/olap/column_families/libschemeshard-olap-column_families.a |51.2%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/olap/common/libschemeshard-olap-common.a |51.2%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/olap/manager/libschemeshard-olap-manager.a |51.2%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/olap/columns/libschemeshard-olap-columns.a |51.2%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/olap/layout/libschemeshard-olap-layout.a |51.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/schemeshard_import__get.cpp |51.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/olap/bg_tasks/tx_chain/status_channel.cpp >> conftest.py::flake8 [GOOD] >> test_restarts.py::flake8 [GOOD] |51.0%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/olap/operations/alter/in_store/resharding/libalter-in_store-resharding.a |51.0%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/schemeshard_info_types.cpp |51.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/schemeshard_import__cancel.cpp |51.1%| [TS] ydb/library/yql/tests/sql/hybrid_file/part4/py2_flake8 >> test.py::py2_flake8 [GOOD] |51.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/util/random.cpp |51.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/util/stlog.cpp |51.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/schemeshard/common/libtx-schemeshard-common.a |51.1%| [TS] ydb/tests/olap/load/flake8 >> test_workload_transfer.py::flake8 [GOOD] |51.1%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/olap/operations/alter/in_store/config_shards/libalter-in_store-config_shards.a |51.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/arrow_clickhouse/Common/liblibrary-arrow_clickhouse-Common.a |51.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/schemeshard_export_flow_proposals.cpp |51.1%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/olap/operations/alter/abstract/liboperations-alter-abstract.a |51.1%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/olap/operations/alter/in_store/transfer/libalter-in_store-transfer.a |51.2%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/olap/operations/alter/in_store/schema/libalter-in_store-schema.a |51.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_create_sysview.cpp |51.2%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/olap/operations/alter/in_store/liboperations-alter-in_store.a |51.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_create_restore_incremental_backup.cpp >> common.py::flake8 [GOOD] >> conftest.py::flake8 [GOOD] >> test_rename.py::flake8 [GOOD] |51.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_move_sequence.cpp |51.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_db_changes.cpp |51.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_create_set_constraint_check.cpp |51.2%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/antlr3_cpp_runtime/libcontrib-libs-antlr3_cpp_runtime.a |51.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/schemeshard_incremental_restore_scan.cpp |51.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_create_secret.cpp |51.2%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/expat/libcontrib-libs-expat.a |51.2%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/farmhash/arch/sse42/libfarmhash-arch-sse42.a |51.2%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/core/sql_types/libessentials-core-sql_types.a |51.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/priorities/service/service.cpp |51.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_create_streaming_query.cpp |51.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/replication/controller/tx_drop_stream_result.cpp |51.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/priorities/service/manager.cpp |51.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_drop_external_data_source.cpp |51.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/replication/service/json_change_record.cpp |51.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/sequenceshard/public/libtx-sequenceshard-public.a |51.3%| [TS] ydb/tests/functional/rename/flake8 >> test_rename.py::flake8 [GOOD] |51.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_drop_continuous_backup.cpp |51.3%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/olap/table/libschemeshard-olap-table.a >> test_multinode_cluster.py::flake8 [GOOD] >> test_recompiles_requests.py::flake8 [GOOD] >> test.py::flake8 [GOOD] >> test.py::py2_flake8 [GOOD] >> test_kafka_streams.py::flake8 [GOOD] >> conftest.py::black [GOOD] >> test_join.py::black [GOOD] |51.2%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/olap/store/libschemeshard-olap-store.a |51.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/tracing/usage/libtx-tracing-usage.a |51.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_alter_streaming_query.cpp |51.2%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/olap/operations/libschemeshard-olap-operations.a |51.2%| [TS] ydb/tests/functional/statistics/flake8 >> test_restarts.py::flake8 [GOOD] |51.2%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/olap/schema/libschemeshard-olap-schema.a |51.3%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/olap/ttl/libschemeshard-olap-ttl.a |51.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/replication/controller/tx_describe_replication.cpp |51.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/sequenceproxy/public/libtx-sequenceproxy-public.a |51.4%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm16/lib/Bitstream/Reader/liblib-Bitstream-Reader.a |51.4%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/apache/avro/liblibs-apache-avro.a |51.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/tracing/service/libtx-tracing-service.a |51.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_common.cpp >> test_schemeshard_limits.py::flake8 [GOOD] >> tablet_scheme_tests.py::flake8 [GOOD] |51.3%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/libfyaml/libcontrib-libs-libfyaml.a |51.3%| [TS] ydb/tests/functional/sqs/multinode/flake8 >> test_recompiles_requests.py::flake8 [GOOD] |51.4%| [TS] ydb/library/yql/tests/sql/dq_file/part12/py2_flake8 >> test.py::py2_flake8 [GOOD] |51.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/schemeshard_import__list.cpp |51.4%| [TS] ydb/tests/stress/kafka/tests/flake8 >> test_kafka_streams.py::flake8 [GOOD] |51.4%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm16/lib/Bitcode/Writer/liblib-Bitcode-Writer.a |51.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/sequenceproxy/libcore-tx-sequenceproxy.a |51.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/olap/ttl/update.cpp |51.4%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm16/lib/AsmParser/libllvm16-lib-AsmParser.a |51.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/olap/ttl/schema.cpp |51.4%| [TS] ydb/tests/fq/generic/streaming/black >> test_join.py::black [GOOD] |51.4%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/time_cast/libcore-tx-time_cast.a |51.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/replication/controller/tx_heartbeat.cpp |51.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/tiering/abstract/libtx-tiering-abstract.a |51.5%| [TS] ydb/tests/functional/limits/flake8 >> test_schemeshard_limits.py::flake8 [GOOD] >> test.py::flake8 [GOOD] >> helpers.py::flake8 [GOOD] >> test_base.py::flake8 [GOOD] >> test_query.py::flake8 [GOOD] >> test_s3.py::flake8 [GOOD] |51.2%| [TS] ydb/tests/fq/yt/kqp_yt_file/part3/flake8 >> test.py::flake8 [GOOD] |51.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/conclusion/libydb-library-conclusion.a |51.3%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm16/lib/ExecutionEngine/PerfJITEvents/liblib-ExecutionEngine-PerfJITEvents.a |51.3%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm16/lib/ExecutionEngine/Orc/TargetProcess/libExecutionEngine-Orc-TargetProcess.a |51.3%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/sharding/libcore-tx-sharding.a |51.3%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm16/lib/Linker/libllvm16-lib-Linker.a |51.3%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/util/actorsys_test/libcore-util-actorsys_test.a |51.3%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/tiering/libcore-tx-tiering.a |51.4%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm16/lib/MC/MCDisassembler/liblib-MC-MCDisassembler.a |51.4%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/tx_allocator_client/libcore-tx-tx_allocator_client.a |51.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/sharding/hash.cpp |51.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/sharding/unboxed_reader.cpp |51.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/replication/ydb_proxy/local_proxy/local_proxy.cpp |51.5%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/tx_allocator/libcore-tx-tx_allocator.a |51.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/wrappers/libydb-core-wrappers.a |51.5%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm16/lib/Frontend/OpenMP/liblib-Frontend-OpenMP.a |51.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/ymq/actor/cloud_events/proto/libactor-cloud_events-proto.a |51.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/replication/controller/tx_drop_replication.cpp >> test_workload.py::flake8 [GOOD] |51.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/olap/bg_tasks/adapter/adapter.cpp |51.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_backup_incremental_backup_collection.cpp |51.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/tiering/common.cpp |51.5%| [TS] ydb/tests/functional/scheme_tests/flake8 >> tablet_scheme_tests.py::flake8 [GOOD] |51.5%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm16/lib/DebugInfo/PDB/liblib-DebugInfo-PDB.a |51.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_alter_user_attrs.cpp |51.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/tx_allocator_client/client.cpp |51.6%| [TS] ydb/tests/sql/lib/flake8 >> test_s3.py::flake8 [GOOD] |51.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/util/evlog/libcore-util-evlog.a |51.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/schemeshard_export__create.cpp |51.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/ymq/queues/common/libymq-queues-common.a |51.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/ymq/proto/libcore-ymq-proto.a |51.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_create_set_constraint_finalize.cpp |51.6%| [TS] ydb/tests/stress/simple_queue/tests/flake8 >> test_workload.py::flake8 [GOOD] |51.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/arrow_kernels/libydb-library-arrow_kernels.a |51.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/accessor/libydb-library-accessor.a |51.6%| [TS] ydb/tests/fq/yt/kqp_yt_file/part14/flake8 >> test.py::flake8 [GOOD] |51.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/olap/options/update.cpp |51.6%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm16/lib/Transforms/CFGuard/liblib-Transforms-CFGuard.a |51.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/replication/controller/tx_worker_error.cpp |51.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/arrow_clickhouse/Columns/liblibrary-arrow_clickhouse-Columns.a |51.6%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm16/lib/Target/libllvm16-lib-Target.a |51.6%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm16/lib/Target/X86/Disassembler/libTarget-X86-Disassembler.a >> conftest.py::flake8 [GOOD] >> s3_helpers.py::flake8 [GOOD] >> test_bindings_0.py::flake8 [GOOD] >> test_bindings_1.py::flake8 [GOOD] >> test_compressions.py::flake8 [GOOD] >> test_early_finish.py::flake8 [GOOD] >> test_explicit_partitioning_0.py::flake8 [GOOD] >> test_explicit_partitioning_1.py::flake8 [GOOD] >> test_format_setting.py::flake8 [GOOD] >> test_formats.py::flake8 [GOOD] >> test_inflight.py::flake8 [GOOD] >> test_insert.py::flake8 [GOOD] >> test_public_metrics.py::flake8 [GOOD] >> test_push_down.py::flake8 [GOOD] >> test_s3_0.py::flake8 [GOOD] >> test_s3_1.py::flake8 [GOOD] >> test_size_limit.py::flake8 [GOOD] >> test_statistics.py::flake8 [GOOD] >> test_streaming_join.py::flake8 [GOOD] >> test_test_connection.py::flake8 [GOOD] >> test_validation.py::flake8 [GOOD] >> test_ydb_over_fq.py::flake8 [GOOD] >> test_yq_v2.py::flake8 [GOOD] >> __main__.py::flake8 [GOOD] |51.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/workload/mixed/liblibrary-workload-mixed.global.a |51.6%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm16/lib/ProfileData/libllvm16-lib-ProfileData.a |51.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/replication/controller/tx_resolve_resource_id_result.cpp |51.6%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm16/lib/TargetParser/libllvm16-lib-TargetParser.a |51.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/lib/ydb_cli/commands/interactive/libydb_cli-commands-interactive.a |51.6%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/streams/lz/snappy/libstreams-lz-snappy.a |51.6%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/streams/lz/lz4/libstreams-lz-lz4.a |51.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/arrow_clickhouse/libydb-library-arrow_clickhouse.a |51.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp |51.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/chunks_limiter/libydb-library-chunks_limiter.a |51.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/priorities/usage/config.cpp |51.7%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm16/lib/Transforms/Coroutines/liblib-Transforms-Coroutines.a |51.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/folder_service/proto/libpy3library-folder_service-proto.global.a |51.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/db_pool/libydb-library-db_pool.a |51.7%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/bucket_quoter/liblibrary-cpp-bucket_quoter.a >> gen-report.py::flake8 [GOOD] |51.6%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/patched/replxx/librestricted-patched-replxx.a |51.7%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/antlr4-c3/libcontrib-libs-antlr4-c3.a |51.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/db_pool/protos/liblibrary-db_pool-protos.a |51.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/formats/arrow/hash/liblibrary-formats-arrow-hash.a |51.7%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/pcre/pcre16/liblibs-pcre-pcre16.a |51.7%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/lzma/libcontrib-libs-lzma.a |51.7%| [TS] ydb/tests/fq/s3/flake8 >> test_yq_v2.py::flake8 [GOOD] |51.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/formats/arrow/modifier/liblibrary-formats-arrow-modifier.a |51.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/formats/arrow/csv/converter/libarrow-csv-converter.a |51.7%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm16/lib/Object/libllvm16-lib-Object.a |51.7%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/streams/lz/libcpp-streams-lz.a |51.7%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/streams/factory/open_common/libstreams-factory-open_common.a |51.8%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm16/lib/Target/X86/MCTargetDesc/libTarget-X86-MCTargetDesc.a |51.8%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/streams/factory/open_by_signature/libstreams-factory-open_by_signature.a |51.7%| [TS] ydb/tests/stress/oltp_workload/flake8 >> __main__.py::flake8 [GOOD] |51.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/formats/arrow/protos/libpy3library-formats-arrow-protos.global.a |51.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/replication/controller/tx_resolve_secret_result.cpp |51.7%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/sharding/libcore-tx-sharding.global.a |51.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/formats/arrow/simple_builder/liblibrary-formats-arrow-simple_builder.a |51.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/formats/arrow/scalar/liblibrary-formats-arrow-scalar.a |51.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/formats/arrow/splitter/liblibrary-formats-arrow-splitter.a |51.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/replication/controller/tx_create_dst_result.cpp |51.8%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/PyJWT/py3/libpy3python-PyJWT-py3.global.a |51.8%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/poco/Util/liblibs-poco-Util.a |51.8%| [TS] ydb/library/benchmarks/runner/result_convert/flake8 >> gen-report.py::flake8 [GOOD] |51.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/formats/arrow/switch/liblibrary-formats-arrow-switch.a |51.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/formats/arrow/transformer/liblibrary-formats-arrow-transformer.a |51.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/formats/arrow/validation/liblibrary-formats-arrow-validation.a |51.8%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/aiosignal/libpy3contrib-python-aiosignal.global.a |51.8%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/sasl/libcontrib-libs-sasl.a |51.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/http_proxy/authorization/liblibrary-http_proxy-authorization.a |51.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_consistent_copy_tables.cpp >> tstool.py::flake8 [GOOD] >> test_encryption.py::flake8 [GOOD] >> __main__.py::flake8 [GOOD] |51.7%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/cffi/py3/libpy3python-cffi-py3.a |51.8%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/ftxui/libcontrib-libs-ftxui.a |51.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_create_lock.cpp |51.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/mkql_proto/protos/libpy3library-mkql_proto-protos.global.a |51.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/login/protos/libpy3library-login-protos.global.a |51.8%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/cryptography/py3/libpy3python-cryptography-py3.global.a |51.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/actors/protos/libpy3library-actors-protos.global.a |51.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/fyamlcpp/libydb-library-fyamlcpp.a |51.8%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/iniconfig/libpy3contrib-python-iniconfig.global.a |51.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/http_proxy/error/liblibrary-http_proxy-error.a |51.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/arrow_clickhouse/DataStreams/liblibrary-arrow_clickhouse-DataStreams.a |51.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/schlab/libydb-library-schlab.a |51.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/schlab/probes/liblibrary-schlab-probes.a |51.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/mkql_proto/libydb-library-mkql_proto.a |51.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/formats/arrow/liblibrary-formats-arrow.a |51.9%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/library/query_actor/libydb-library-query_actor.a |51.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/ncloud/impl/liblibrary-ncloud-impl.a |51.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/range_treap/libydb-library-range_treap.a |51.9%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/jaraco.functools/py3/libpy3python-jaraco.functools-py3.global.a |51.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/schlab/mon/liblibrary-schlab-mon.global.a |51.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/schlab/mon/liblibrary-schlab-mon.a |51.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_assign_bsv.cpp |51.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/replication/controller/tx_assign_stream_name.cpp |51.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/workload/log/liblibrary-workload-log.global.a |51.8%| [TS] ydb/tools/tstool/flake8 >> tstool.py::flake8 [GOOD] |51.8%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/poco/Foundation/liblibs-poco-Foundation.a |51.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/services/libpy3ydb-library-services.global.a |51.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/lib/ydb_cli/commands/interactive/highlight/libcommands-interactive-highlight.a |51.9%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/library/signals/libydb-library-signals.a |51.9%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/six/py3/libpy3python-six-py3.global.a |51.9%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/library/testlib/pq_helpers/liblibrary-testlib-pq_helpers.a |51.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/schlab/schine/liblibrary-schlab-schine.a |51.9%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/pycparser/py3/libpy3python-pycparser-py3.global.a |51.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/schlab/schoot/liblibrary-schlab-schoot.a |52.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/arrow_inference/libydb-library-arrow_inference.a >> test_base.py::flake8 [GOOD] >> test_postgres.py::flake8 [GOOD] >> test_sql_logic.py::flake8 [GOOD] >> test_stream_query.py::flake8 [GOOD] |51.9%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/boost/context/fcontext_impl/libboost-context-fcontext_impl.a |51.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/replication/controller/tx_discovery_targets_result.cpp |51.9%| [TS] ydb/tests/tools/nemesis/driver/flake8 >> __main__.py::flake8 [GOOD] |51.9%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/yarl/libpy3contrib-python-yarl.a |51.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/common/http_gateway/libproviders-common-http_gateway.a |51.9%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/library/table_creator/libydb-library-table_creator.a |52.0%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_create_subdomain.cpp |52.0%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_alter_resource_pool.cpp |52.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/benchmarks/gen/tpcds-dbgen/libbenchmarks-gen-tpcds-dbgen.global.a |52.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/benchmarks/gen/tpcds-dbgen/libbenchmarks-gen-tpcds-dbgen.a |52.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/workload/clickbench/liblibrary-workload-clickbench.a |52.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yaml_json/libydb-library-yaml_json.a |52.0%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/library/yaml_config/protos/libyaml-config-protos.a |52.0%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/boost/iostreams/librestricted-boost-iostreams.a |52.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/backup/libkikimr_backup.a |52.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/benchmarks/queries/tpch/libbenchmarks-queries-tpch.global.a |52.0%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/libffi/libcontrib-restricted-libffi.a |52.0%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/replication/controller/tx_alter_dst_result.cpp |52.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/benchmarks/gen/tpch-dbgen/libbenchmarks-gen-tpch-dbgen.a |52.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yaml_config/public/liblibrary-yaml_config-public.a |52.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/signals/agent.cpp |52.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/signals/histogram.cpp |52.0%| [TS] ydb/tests/functional/encryption/flake8 >> test_encryption.py::flake8 [GOOD] |52.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/signals/states.cpp |52.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/signals/client.cpp |52.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/signals/private.cpp |52.1%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/boost/program_options/librestricted-boost-program_options.a |52.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/signals/object_counter.cpp |52.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/dq/actors/common/libdq-actors-common.a |52.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/benchmarks/queries/tpcds/libbenchmarks-queries-tpcds.global.a |52.1%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/compproto/liblibrary-cpp-compproto.a |52.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/workload/query/liblibrary-workload-query.global.a |52.0%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_drop_bsv.cpp |52.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/formats/arrow/csv/table/libarrow-csv-table.a |52.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/dq/actors/protos/libpy3dq-actors-protos.global.a |52.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/lib/ydb_cli/commands/interactive/complete/libcommands-interactive-complete.a |52.1%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/deprecated/accessors/libcpp-deprecated-accessors.a |52.1%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/containers/intrusive_avl_tree/libcpp-containers-intrusive_avl_tree.a |52.1%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/deprecated/split/libcpp-deprecated-split.a |52.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/replication/service/service.cpp |52.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/ycloud/impl/liblibrary-ycloud-impl.a |52.1%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/library/yaml_config/protos/blobstorage_config.pb.cc |52.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/schemeshard__op_traits.cpp |52.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/workload/query/liblibrary-workload-query.a |52.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/lib/stat_visualization/libpublic-lib-stat_visualization.a |52.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/replication/controller/tx_init_schema.cpp |52.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/workload/tpch/liblibrary-workload-tpch.global.a |52.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/workload/tpc_base/liblibrary-workload-tpc_base.a |52.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/dq/comp_nodes/hash_join_utils/libdq-comp_nodes-hash_join_utils.a |52.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/replication/service/table_writer.cpp |52.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/replication/controller/tx_init.cpp |52.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/dq/common/libyql-dq-common.a |52.1%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/messagebus/actor/libmessagebus_actor.a |52.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/dq/actors/task_runner/libdq-actors-task_runner.a |52.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/logger/libydb-library-logger.a |52.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_create_set_constraint_lock.cpp |52.2%| [TS] ydb/tests/functional/suite_tests/flake8 >> test_stream_query.py::flake8 [GOOD] |52.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/dq/actors/spilling/libdq-actors-spilling.a |52.2%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/messagebus/scheduler/libcpp-messagebus-scheduler.a |52.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_alter_login.cpp |52.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/replication/controller/dst_alterer.cpp |52.2%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/on_disk/chunks/libcpp-on_disk-chunks.a |52.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/workload/tpcds/liblibrary-workload-tpcds.global.a |52.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/lib/ydb_cli/commands/interactive/highlight/color/libinteractive-highlight-color.a |52.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/dq/state/libyql-dq-state.a |52.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/dq/proto/libpy3yql-dq-proto.global.a |52.2%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/containers/ring_buffer/libcpp-containers-ring_buffer.a |52.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/dq/actors/compute/libdq-actors-compute.a |52.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/dq/transform/libyql-dq-transform.a |52.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/common/db_id_async_resolver/libproviders-common-db_id_async_resolver.a |52.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/dq/tasks/libyql-dq-tasks.a |52.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_create_set_constraint.cpp |52.3%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/sliding_window/liblibrary-cpp-sliding_window.a |52.3%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/threading/task_scheduler/libcpp-threading-task_scheduler.a |52.3%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/string_utils/parse_size/libcpp-string_utils-parse_size.a |52.3%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/threading/thread_local/libcpp-threading-thread_local.a |52.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/dq/comp_nodes/llvm16/libdq-comp_nodes-llvm16.a |52.2%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/threading/cron/libcpp-threading-cron.a |52.2%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/sql/v1/complete/name/cache/libcomplete-name-cache.a |52.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/dq/type_ann/libyql-dq-type_ann.a |52.3%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/sql/v1/lexer/regex/libv1-lexer-regex.a |52.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/lib/ydb_cli/commands/transfer_workload/libtransfer_workload.a |52.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/schlab/schemu/liblibrary-schlab-schemu.a |52.3%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/messagebus/protobuf/libmessagebus_protobuf.a |52.3%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/library/yql/providers/common/ut_helpers/libproviders-common-ut_helpers.a |52.3%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/yt/backtrace/cursors/libunwind/libbacktrace-cursors-libunwind.a |52.3%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/unified_agent_client/proto/libcpp-unified_agent_client-proto.a |52.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/debug/libsrc-client-debug.a |52.2%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/xml/document/libcpp-xml-document.a |52.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/lib/ydb_cli/import/liblib-ydb_cli-import.a |52.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/replication/controller/tx_assign_tx_id.cpp |52.3%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/unified_agent_client/liblibrary-cpp-unified_agent_client.a |52.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/common/pushdown/libproviders-common-pushdown.a |52.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/common/token_accessor/grpc/libcommon-token_accessor-grpc.a |52.3%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/yt/global/libcpp-yt-global.a |52.3%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/yt/backtrace/libcpp-yt-backtrace.a |52.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/common/token_accessor/client/libcommon-token_accessor-client.a |52.3%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/yt/threading/libcpp-yt-threading.a |52.3%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/yt/logging/libcpp-yt-logging.a |52.3%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/yt/logging/plain_text_formatter/libyt-logging-plain_text_formatter.a |52.4%| [AR] {BAZEL_DOWNLOAD} $(B)/library/python/certifi/libpy3library-python-certifi.global.a |52.4%| [AR] {BAZEL_DOWNLOAD} $(B)/library/python/filelock/libpy3library-python-filelock.global.a |52.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/dq/actors/events/libdq-actors-events.a |52.4%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/packers/liblibrary-cpp-packers.a |52.3%| [AR] {BAZEL_DOWNLOAD} $(B)/library/python/func/libpy3library-python-func.global.a |52.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/dq/api/grpc/libdq-api-grpc.a |52.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/config/protos/libpy3core-config-protos.global.a |52.3%| [AR] {BAZEL_DOWNLOAD} $(B)/library/python/retry/libpy3library-python-retry.global.a |52.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/lib/ydb_cli/topic/libtopic.a |52.3%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/tdigest/liblibrary-cpp-tdigest.a |52.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/replication/controller/tx_drop_dst_result.cpp |52.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/scheme_board/helpers.cpp |52.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/replication/controller/secret_resolver.cpp |52.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/scheme_board/subscriber.cpp |52.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/scheme_board/backup.cpp |52.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_create_sequence.cpp |52.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_alter_fs.cpp |52.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/replication/service/base_table_writer.cpp |52.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/scheme_board/opaque_path_description.cpp |52.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_drop_cdc_stream.cpp |52.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/priorities/usage/service.cpp |52.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/scheme_board/populator.cpp |52.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/operation_queue_timer.h_serialized.cpp |52.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_alter_secret.cpp |52.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/replication/controller/dst_remover.cpp |52.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/lib/ydb_cli/dump/liblib-ydb_cli-dump.a |52.4%| [AR] {BAZEL_DOWNLOAD} $(B)/library/python/symbols/module/libpy3python-symbols-module.a |52.4%| [AR] {BAZEL_DOWNLOAD} $(B)/library/python/svn_version/libpy3library-python-svn_version.a |52.4%| [AR] {BAZEL_DOWNLOAD} $(B)/library/python/testing/yatest_lib/libpy3python-testing-yatest_lib.global.a |52.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/dq/common/libproviders-dq-common.a |52.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/cms/libsrc-client-cms.a |52.4%| [AR] {BAZEL_DOWNLOAD} $(B)/library/python/symbols/registry/libpython-symbols-registry.a |52.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/dq/counters/libproviders-dq-counters.a |52.5%| [AR] {BAZEL_DOWNLOAD} $(B)/library/python/windows/libpy3library-python-windows.global.a |52.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/dq/config/libproviders-dq-config.a |52.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/dq/interface/libproviders-dq-interface.a |52.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/dq/runtime/libyql-dq-runtime.a |52.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/schemeshard__notify.cpp |52.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/schemeshard__init.cpp |52.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/backup/common/proto/libbackup-common-proto.a |52.4%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/sql/v1/complete/core/libv1-complete-core.a |52.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/replication/controller/tx_alter_replication.cpp |52.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/backup/common/libcore-backup-common.a |52.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/dq/helper/libproviders-dq-helper.a |52.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/dq/mkql/libproviders-dq-mkql.a |52.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_create_view.cpp |52.5%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/blobstorage/vdisk/hulldb/cache_block/libvdisk-hulldb-cache_block.a |52.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/dq/api/protos/libdq-api-protos.a |52.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_alter_replication.cpp |52.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/monitoring/libsrc-client-monitoring.a |52.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/scheme_board/replica.cpp |52.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/dq/planner/libproviders-dq-planner.a |52.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/dq/opt/libproviders-dq-opt.a |52.4%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/blobstorage/backpressure/libcore-blobstorage-backpressure.a |52.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/dq/actors/libproviders-dq-actors.a |52.4%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/sql/v1/complete/analysis/global/libcomplete-analysis-global.a |52.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/dq/runtime/libproviders-dq-runtime.a |52.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/dq/task_runner_actor/libproviders-dq-task_runner_actor.a |52.5%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/blobstorage/vdisk/syncer/libblobstorage-vdisk-syncer.a |52.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/scheme_board/monitoring.cpp |52.3%| [AR] {BAZEL_DOWNLOAD} $(B)/library/python/strings/libpy3library-python-strings.global.a |52.4%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/sql/v1/complete/antlr4/libv1-complete-antlr4.a |52.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/dq/task_runner/libproviders-dq-task_runner.a |52.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/dq/provider/exec/libdq-provider-exec.a |52.5%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/sql/v1/complete/analysis/local/libcomplete-analysis-local.a |52.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_alter_cdc_stream.cpp |52.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/generic/expr_nodes/libproviders-generic-expr_nodes.a |52.6%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/driver_lib/cli_config_base/libcore-driver_lib-cli_config_base.a |52.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/schemeshard__init_populator.cpp |52.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/generic/proto/libproviders-generic-proto.a |52.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/dq/worker_manager/libproviders-dq-worker_manager.a |52.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/dq/worker_manager/interface/libdq-worker_manager-interface.a |52.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/workload/log/liblibrary-workload-log.a |52.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/blobstorage/vdisk/protos/libblobstorage-vdisk-protos.a |52.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/generic/connector/api/service/libconnector-api-service.a |52.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/client/metadata/libcore-client-metadata.a |52.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/blockstore/core/libcore-blockstore-core.a |52.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/generic/actors/libproviders-generic-actors.a |52.5%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/external_sources/libydb-core-external_sources.a |52.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/replication/controller/nodes_manager.cpp |52.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/generic/connector/libcpp/libgeneric-connector-libcpp.a |52.6%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/sql/v1/lexer/antlr4_pure_ansi/libv1-lexer-antlr4_pure_ansi.a |52.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_alter_table.cpp |52.6%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/sql/v1/complete/name/object/libcomplete-name-object.a |52.6%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/sql/v1/complete/name/service/libcomplete-name-service.a |52.6%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/sql/v1/complete/name/object/simple/libname-object-simple.a |52.6%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/parser/antlr_ast/gen/v1_ansi_antlr4/libantlr_ast-gen-v1_ansi_antlr4.a |52.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/schemeshard__backup_collection_common.cpp |52.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/pq/cm_client/libproviders-pq-cm_client.a |52.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/pq/common/libproviders-pq-common.a |52.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/pq/expr_nodes/libproviders-pq-expr_nodes.a |52.7%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/sql/v1/complete/libsql-v1-complete.a |52.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/generic/connector/api/service/protos/libapi-service-protos.a |52.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/olap/column_families/schema.cpp |52.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/pq/task_meta/libproviders-pq-task_meta.a |52.6%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/sql/v1/complete/name/object/simple/cached/libobject-simple-cached.a |52.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/pq/gateway/dummy/libpq-gateway-dummy.a |52.6%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/sql/v1/complete/name/object/simple/static/libobject-simple-static.a |52.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/util/failure_injection.cpp |52.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/schemeshard__list_users.cpp |52.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/pq/proto/libproviders-pq-proto.a |52.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/schemeshard__make_access_database_no_inheritable.cpp |52.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/dq/provider/libproviders-dq-provider.a |52.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/pq/gateway/native/libpq-gateway-native.a |52.7%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/parser/antlr_ast/gen/v1_antlr4/libantlr_ast-gen-v1_antlr4.a |52.7%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/sql/v1/complete/name/service/impatient/libname-service-impatient.a |52.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/pq/async_io/libproviders-pq-async_io.a |52.7%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/sql/v1/complete/name/service/column/libname-service-column.a |52.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/s3/actors_factory/libproviders-s3-actors_factory.a |52.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/s3/common/libproviders-s3-common.a |52.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/s3/events/libproviders-s3-events.a |52.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/scheme_board/load_test.cpp |52.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/s3/compressors/libproviders-s3-compressors.a |52.7%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm16/lib/CodeGen/libllvm16-lib-CodeGen.a |52.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/formats/arrow/accessor/sparsed/libarrow-accessor-sparsed.a |52.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/lib/ydb_cli/commands/libclicommands.a |52.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/s3/proto/libproviders-s3-proto.a |52.7%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/formats/arrow/serializer/libformats-arrow-serializer.a |52.7%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/formats/arrow/serializer/libformats-arrow-serializer.global.a |52.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/vdisk/syncer/blobstorage_syncer_broker.cpp |52.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/s3/object_listers/libproviders-s3-object_listers.a |52.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/external_sources/external_data_source.cpp |52.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/external_sources/validation_functions.cpp |52.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/external_sources/external_source_factory.cpp |52.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/scheme_cache/scheme_cache.cpp |52.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/formats/arrow/common/libformats-arrow-common.a |52.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/schemeshard__find_subdomain_path_id.cpp |52.6%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/formats/arrow/program/libformats-arrow-program.a |52.6%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/sql/v1/lexer/antlr4_pure/libv1-lexer-antlr4_pure.a |52.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/schemeshard__fix_bad_paths.cpp |52.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/external_sources/external_source_builder.cpp |52.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/schemeshard__monitoring.cpp |52.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/s3/statistics/libproviders-s3-statistics.a |52.8%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/sql/v1/complete/name/service/union/libname-service-union.a |52.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/s3/path_generator/libproviders-s3-path_generator.a |52.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/formats/arrow/save_load/libformats-arrow-save_load.a |52.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/formats/arrow/switch/libformats-arrow-switch.a |52.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/solomon/common/libproviders-solomon-common.a |52.8%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/sql/v1/complete/name/service/ranking/libname-service-ranking.global.a |52.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/formats/arrow/rows/libformats-arrow-rows.a |52.6%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/sql/v1/complete/name/service/static/libname-service-static.a |52.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/solomon/events/libproviders-solomon-events.a |52.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/formats/arrow/transformer/libformats-arrow-transformer.a |52.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/generic/provider/libproviders-generic-provider.a |52.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/config/libfq-libs-config.a |52.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/checkpoint_storage/proto/liblibs-checkpoint_storage-proto.a |52.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/olap/common/common.cpp |52.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/formats/libydb-core-formats.a |52.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/checkpointing_common/libfq-libs-checkpointing_common.a |52.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/control_plane_storage/events/liblibs-control_plane_storage-events.a |52.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/control_plane_proxy/events/liblibs-control_plane_proxy-events.a |52.7%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/config/validation/libcore-config-validation.a |52.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_alter_bsv.cpp |52.8%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/fq/libs/compute/common/liblibs-compute-common.a |52.8%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/sql/v1/complete/name/service/static/libname-service-static.global.a |52.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/formats/arrow/accessor/sub_columns/libarrow-accessor-sub_columns.a |52.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/solomon/expr_nodes/libproviders-solomon-expr_nodes.a |52.8%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/sql/v1/complete/name/service/ranking/libname-service-ranking.a |52.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/solomon/actors/libproviders-solomon-actors.a |52.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/db_schema/libfq-libs-db_schema.a |52.9%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/blobstorage/vdisk/query/libblobstorage-vdisk-query.a |52.6%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/formats/arrow/libcore-formats-arrow.a |52.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/olap/column_families/update.cpp |52.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/pq/provider/libproviders-pq-provider.a |52.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_alter_continuous_backup.cpp |52.8%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/fq/libs/row_dispatcher/format_handler/parsers/librow_dispatcher-format_handler-parsers.a |52.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/schemeshard__login_finalize.cpp |52.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/schemeshard.cpp |52.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/scheme_board/cache.cpp |52.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_alter_pq.cpp |52.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/replication/service/worker.cpp |52.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/replication/ydb_proxy/local_proxy/local_partition_reader.cpp |52.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/scheme_board/events.cpp |52.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/schemeshard__describe_scheme.cpp |52.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/schemeshard__login.cpp |52.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/olap/columns/schema.cpp |52.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_alter_external_data_source.cpp |52.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/olap/store/store.cpp |52.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/olap/bg_tasks/transactions/tasks_list.cpp |52.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/tx_allocator_client/actor_client.cpp |52.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/replication/controller/tx_create_stream_result.cpp |52.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/replication/controller/stream_creator.cpp |52.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/olap/operations/alter/abstract/context.cpp |52.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/solomon/gateway/libproviders-solomon-gateway.a |52.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/grpc/libfq-libs-grpc.a |52.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/formats/arrow/serializer/parsing.cpp |52.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/formats/arrow/serializer/stream.cpp |52.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/tx_allocator/txallocator.cpp |52.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/formats/arrow/serializer/utils.cpp |52.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/quota_manager/events/liblibs-quota_manager-events.a |52.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/graph_params/proto/liblibs-graph_params-proto.a |53.0%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/sharding/random.cpp |53.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/protos/annotations/libpy3api-protos-annotations.global.a |52.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/quota_manager/proto/liblibs-quota_manager-proto.a |52.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/result_formatter/libfq-libs-result_formatter.a |52.8%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/fq/libs/row_dispatcher/libfq-libs-row_dispatcher.a |52.8%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/fq/libs/row_dispatcher/purecalc_compilation/liblibs-row_dispatcher-purecalc_compilation.a |52.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/formats/arrow/program/abstract.cpp |52.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/protos/libpy3api-protos.global.a |52.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/formats/arrow/program/aggr_common.cpp |52.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/row_dispatcher/format_handler/common/librow_dispatcher-format_handler-common.a |52.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/olap/bg_tasks/tx_chain/session.cpp |52.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/formats/arrow/program/assign_internal.cpp |52.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/formats/arrow/program/visitor.cpp |52.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/formats/arrow/program/reserve.cpp |53.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/formats/arrow/program/stream_logic.cpp |53.0%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/tiering/fetcher.cpp |53.0%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/formats/arrow/program/execution.h_serialized.cpp |53.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/formats/arrow/program/original.cpp |53.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/formats/arrow/program/header.cpp |53.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/formats/arrow/program/graph_optimization.cpp |53.0%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/formats/arrow/program/abstract.h_serialized.cpp |53.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/formats/arrow/program/collection.cpp |53.0%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/formats/arrow/program/aggr_common.h_serialized.cpp |53.0%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/olap/operations/alter/abstract/converter.cpp |52.8%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/sys_view/pg_tables/libcore-sys_view-pg_tables.a |52.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/formats/arrow/program/filter.cpp |52.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/formats/arrow/program/assign_const.cpp |52.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/formats/arrow/program/projection.cpp |52.8%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/public/lib/deprecated/client/liblib-deprecated-client.a |52.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/formats/arrow/program/aggr_keys.cpp |52.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/dq/opt/libyql-dq-opt.a |52.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/formats/arrow/program/graph_execute.cpp |52.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/formats/arrow/program/index.cpp |52.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/formats/arrow/program/functions.cpp |52.9%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/blobstorage/vdisk/balance/libblobstorage-vdisk-balance.a |52.9%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/public/lib/deprecated/kicli/liblib-deprecated-kicli.a |53.0%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/sql/v1/highlight/libsql-v1-highlight.global.a |53.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/fq/libs/compute/common/run_actor_params.cpp |53.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/fq/libs/compute/common/pinger.cpp |53.0%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/fq/libs/ydb/libfq-libs-ydb.a |52.6%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kqp/common/shutdown/libkqp-common-shutdown.a |52.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/util/actorsys_test/single_thread_ic_mock.cpp |52.6%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/keyvalue/libydb-core-keyvalue.a |52.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/formats/arrow/arrow_batch_builder.cpp |52.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/fq/libs/row_dispatcher/format_handler/parsers/parser_abstract.cpp |52.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/formats/arrow/arrow_helpers_minikql.cpp |52.7%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kqp/common/libcore-kqp-common.a |52.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/formats/arrow/program/custom_registry.cpp |52.7%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kqp/gateway/actors/libkqp-gateway-actors.a |52.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/fq/libs/row_dispatcher/format_handler/parsers/raw_parser.cpp |52.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/formats/arrow/size_calcer.cpp |52.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/fq/libs/row_dispatcher/format_handler/parsers/parser_base.cpp |52.9%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/grpc_services/cancelation/libcore-grpc_services-cancelation.a |53.0%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kqp/compile_service/libcore-kqp-compile_service.a |53.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/formats/arrow/special_keys.cpp |53.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/grpc_caching/libydb-core-grpc_caching.a |53.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/formats/arrow/process_columns.cpp |53.1%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/services/metadata/initializer/libservices-metadata-initializer.a |53.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/formats/arrow/permutations.cpp |53.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/schemeshard__operation.cpp |52.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/formats/arrow/converter.cpp |52.9%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/regex/hyperscan/libcpp-regex-hyperscan.a |52.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/formats/arrow/arrow_filter.cpp |52.9%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/sql/v1/complete/text/libv1-complete-text.a |53.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/shared_resources/libfq-libs-shared_resources.a |53.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/dq/actors/libyql-dq-actors.a |53.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/grpc_services/cancelation/protos/libgrpc_services-cancelation-protos.a |53.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/lib/ydb_cli/commands/command_base/libydb_cli_command_base.a |53.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/row_dispatcher/format_handler/filters/librow_dispatcher-format_handler-filters.a |53.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/io_formats/ydb_dump/libcore-io_formats-ydb_dump.a |53.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/fq/libs/row_dispatcher/actors_factory.cpp |53.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/fq/libs/row_dispatcher/row_dispatcher_service.cpp |53.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/fq/libs/row_dispatcher/probes.cpp |53.1%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/tornado/tornado-4/libpy3python-tornado-tornado-4.a |53.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/lib/json_value/libpublic-lib-json_value.a |53.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/lib/ydb_cli/commands/ydb_discovery/libydb_cli_command_ydb_discovery.a |53.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/services/bg_tasks/abstract/libservices-bg_tasks-abstract.a |53.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/lib/ydb_cli/commands/sdk_core_access/libydb_sdk_core_access.a |53.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/common/result_set_format/libkqp-common-result_set_format.a |53.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/python/enable_v3_new_behavior/libpy3sdk-python-enable_v3_new_behavior.global.a |53.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/expr_nodes/libcore-kqp-expr_nodes.a |53.2%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/services/metadata/libydb-services-metadata.a |53.1%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kqp/common/simple/libkqp-common-simple.a |53.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/time_cast/time_cast.cpp |53.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/common/shutdown/events.cpp |53.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/keyvalue/keyvalue_stored_state_data.cpp |53.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/common/shutdown/state.cpp |53.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/keyvalue/keyvalue_collect_operation.cpp |53.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/keyvalue/keyvalue_data.cpp |53.2%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kqp/gateway/behaviour/external_data_source/libgateway-behaviour-external_data_source.global.a |53.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/services/metadata/initializer/events.cpp |53.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/keyvalue/keyvalue_helpers.cpp |53.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/keyvalue/keyvalue_simple_db_flat.cpp |53.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/common/kqp_batch_operations.cpp |53.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/common/simple/kqp_event_ids.cpp |53.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/common/simple/reattach.cpp |53.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/common/simple/services.cpp |53.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/common/kqp_types.cpp |53.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/common/kqp_row_builder.cpp |53.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/common/kqp_user_request_context.cpp |53.0%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/sharding/hash_slider.cpp |53.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/common/kqp_script_executions.cpp |53.0%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/common/kqp_tx_info.h_serialized.cpp |53.1%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/common/kqp_yql.h_serialized.cpp |53.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/common/simple/temp_tables.cpp |53.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/schemeshard__init_schema.cpp |53.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/tx_allocator/txallocator__reserve.cpp |53.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/util/actorsys_test/testactorsys.cpp |53.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/lib/fq/libpublic-lib-fq.a |53.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/olap/operations/alter/in_store/transfer/update.cpp |53.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/common/kqp_yql.cpp |53.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/olap/manager/manager.cpp |53.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/fq/libs/ydb/util.cpp |53.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp |53.1%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kqp/gateway/behaviour/resource_pool_classifier/libgateway-behaviour-resource_pool_classifier.global.a |53.2%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kqp/gateway/behaviour/resource_pool/libgateway-behaviour-resource_pool.a |53.2%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kqp/gateway/behaviour/resource_pool_classifier/libgateway-behaviour-resource_pool_classifier.a |53.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/services/bg_tasks/protos/libservices-bg_tasks-protos.a |53.2%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/fq/libs/test_connection/libfq-libs-test_connection.a |53.3%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/services/ext_index/common/libservices-ext_index-common.a |53.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/olap/columns/update.cpp |53.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/tx_allocator/txallocator__scheme.cpp |53.0%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kqp/gateway/behaviour/resource_pool/libgateway-behaviour-resource_pool.global.a |53.0%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/olap/operations/alter_store.cpp |53.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/olap/operations/alter/abstract/object.cpp |53.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/olap/schema/schema.cpp |53.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_alter_solomon.cpp |53.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/olap/operations/alter/in_store/resharding/update.cpp |53.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/library/testlib/pq_helpers/mock_pq_gateway.cpp |53.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/tx_allocator/txallocator_impl.cpp |53.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/tiering/manager.cpp |53.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/fq/libs/test_connection/counters.cpp |53.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/olap/bg_tasks/tx_chain/actor.cpp |53.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/services/ext_index/common/events.cpp |53.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/fq/libs/test_connection/test_object_storage.cpp |53.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/olap/operations/drop_store.cpp |53.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/library/signals/owner.cpp |53.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/fq/libs/test_connection/test_monitoring.cpp |53.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/library/query_actor/query_actor.cpp |53.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/fq/libs/test_connection/probes.cpp |53.2%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/services/kesus/libydb-services-kesus.a |53.2%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/services/metadata/abstract/libservices-metadata-abstract.a |53.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/olap/layout/layout.cpp |53.2%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kqp/gateway/behaviour/tablestore/operations/libbehaviour-tablestore-operations.global.a |53.3%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kqp/gateway/behaviour/table/libgateway-behaviour-table.global.a |53.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/fq/libs/test_connection/test_data_streams.cpp |53.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/services/lib/sharding/libservices-lib-sharding.a |53.2%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kqp/gateway/behaviour/view/libgateway-behaviour-view.global.a |53.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/olap/operations/drop_table.cpp |53.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/library/yql/providers/common/ut_helpers/dq_fake_ca.cpp |53.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/olap/operations/alter_table.cpp |53.2%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kqp/gateway/behaviour/streaming_query/libgateway-behaviour-streaming_query.global.a |53.3%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kqp/gateway/behaviour/view/libgateway-behaviour-view.a |53.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/olap/ttl/validator.cpp |53.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/services/metadata/abstract/decoder.cpp |53.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/cloud_audit/libfq-libs-cloud_audit.a |53.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/olap/operations/create_store.cpp |53.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/library/yaml_config/protos/config.pb.cc |53.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/services/metadata/abstract/parsing.cpp |53.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/sharding/sharding.cpp |53.3%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/services/metadata/abstract/kqp_common.h_serialized.cpp |53.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/services/metadata/abstract/events.cpp |53.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/vdisk/syncer/guid_proxywrite.cpp |53.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/olap/schema/update.cpp |53.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/olap/table/table.cpp |53.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/common/simple/helpers.cpp |53.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/vdisk/syncer/guid_proxyobtain.cpp |53.0%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/olap/operations/alter/abstract/update.cpp |53.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/services/metadata/abstract/request_features.cpp |53.0%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/sharding/hash_intervals.cpp |53.1%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/fq/pq_async_io/libtests-fq-pq_async_io.a |53.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/proxy_service/proto/libkqp-proxy_service-proto.a |53.1%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/services/metadata/secret/accessor/libmetadata-secret-accessor.a |53.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/tests/library/libpy3ydb-tests-library.global.a |53.1%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kqp/workload_service/actors/libkqp-workload_service-actors.a |53.1%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/services/metadata/manager/libservices-metadata-manager.a |53.1%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kqp/workload_service/common/libkqp-workload_service-common.a |53.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/services/metadata/optimization/libservices-metadata-optimization.a |53.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/library/table_creator/table_creator.cpp |53.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/olap/operations/alter/in_store/object.cpp |53.3%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/fq/libs/control_plane_proxy/actors/liblibs-control_plane_proxy-actors.a |53.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/formats/arrow/serializer/abstract.cpp |53.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/vdisk/syncer/blobstorage_syncer_scheduler.cpp |53.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/vdisk/hulldb/cache_block/cache_block.cpp |53.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/services/metadata/manager/restore_controller.cpp |53.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/services/metadata/manager/ydb_value_operator.cpp |53.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/backpressure/unisched.cpp |53.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/sharding/hash_modulo.cpp |53.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/workload_service/common/helpers.cpp |53.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/workload_service/common/cpu_quota_manager.cpp |53.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/services/metadata/manager/preparation_controller.cpp |53.0%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/vdisk/syncer/blobstorage_syncer_data.cpp |53.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/services/metadata/manager/fetch_database.cpp |53.0%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/vdisk/syncer/syncer_job_task.cpp |53.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/common/simple/query_id.cpp |53.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/vdisk/query/assimilation.cpp |53.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/services/metadata/manager/modification_controller.cpp |53.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/common/kqp_lwtrace_probes.cpp |53.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/backpressure/load_based_timeout.cpp |53.1%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/control_plane_proxy/actors/ydb_schema_query_actor.h_serialized.cpp |53.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/vdisk/syncer/guid_propagator.cpp |53.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/vdisk/syncer/blobstorage_syncer_committer.cpp |53.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/vdisk/syncer/blobstorage_syncer_localwriter.cpp |53.1%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/mind/bscontroller/libcore-mind-bscontroller.a |53.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/common/simple/settings.cpp |53.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/formats/arrow/serializer/native.cpp |53.2%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/mind/address_classification/libcore-mind-address_classification.a |53.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/services/metadata/manager/table_record.cpp |53.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/fq/libs/control_plane_proxy/actors/ydb_schema_query_actor.cpp |53.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/fq/libs/control_plane_proxy/actors/control_plane_storage_requester_actor.cpp |53.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/vdisk/syncer/guid_firstrun.cpp |53.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/services/persqueue_cluster_discovery/cluster_ordering/libservices-persqueue_cluster_discovery-cluster_ordering.a |53.2%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/services/metadata/secret/libservices-metadata-secret.a |53.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/formats/arrow/program/execution.cpp |53.3%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/services/metadata/secret/libservices-metadata-secret.global.a |53.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/mind/bscontroller/bsc_audit.cpp |53.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/backpressure/queue_backpressure_client.cpp |53.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/vdisk/syncer/blobstorage_syncer_recoverlostdata.cpp |53.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/vdisk/syncer/syncer_job_actor.cpp |53.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/olap/operations/alter/in_store/config_shards/update.cpp |53.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/mind/bscontroller/util.cpp |53.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/olap/operations/alter/in_store/schema/update.cpp |53.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/vdisk/syncer/guid_recovery.cpp |53.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/vdisk/syncer/blobstorage_syncer_recoverlostdata_proxy.cpp |53.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/vdisk/syncer/blobstorage_syncer.cpp |53.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/olap/operations/create_table.cpp |53.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/external_sources/object_storage.cpp |53.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/backpressure/queue.cpp |53.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/config/validation/auth_config_validator.cpp |53.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/driver_lib/cli_config_base/config_base.cpp |53.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/fq/libs/row_dispatcher/topic_session.cpp |53.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/vdisk/query/query_extr.cpp |53.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/backpressure/event.cpp |53.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/vdisk/query/query_stathuge.cpp |53.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/workload_service/common/events.cpp |53.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/config/validation/column_shard_config_validator.cpp |53.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/config/validation/validators.cpp |53.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/fq/libs/ydb/ydb.h_serialized.cpp |53.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/fq/libs/row_dispatcher/coordinator.cpp |53.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/vdisk/query/query_public.cpp |53.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/fq/libs/row_dispatcher/purecalc_compilation/compile_service.cpp |53.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/formats/arrow/arrow_helpers.cpp |53.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/vdisk/query/query_stattablet.cpp |53.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/fq/libs/compute/common/utils.cpp |53.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/services/metadata/initializer/accessor_init.cpp |53.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/common/control.cpp |53.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/fq/libs/row_dispatcher/row_dispatcher.cpp |53.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/fq/libs/row_dispatcher/format_handler/parsers/json_parser.cpp |53.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/fq/libs/row_dispatcher/leader_election.cpp |53.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/vdisk/query/query_barrier.cpp |53.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/services/ext_index/common/service.cpp |53.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/services/metadata/service.cpp |53.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/persqueue/ut/common/autoscaling_ut_common.cpp |53.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/fq/libs/ydb/ydb.cpp |53.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/vdisk/query/query_readactor.cpp |53.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/services/metadata/initializer/fetcher.cpp |53.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/public/lib/deprecated/client/msgbus_client.cpp |53.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/vdisk/balance/sender.cpp |53.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/vdisk/query/query_readbatch.cpp |53.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/common/kqp_event_impl.cpp |53.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/services/metadata/initializer/behaviour.cpp |53.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/services/metadata/initializer/manager.cpp |53.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/public/lib/deprecated/client/grpc_client.cpp |53.4%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/mind/address_classification/net_classifier.h_serialized.cpp |53.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/mind/address_classification/counters.cpp |53.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/vdisk/balance/handoff_map.cpp |53.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/gateway/behaviour/resource_pool_classifier/object.cpp |53.4%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/persqueue/public/list_topics/libpersqueue-public-list_topics.a |53.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/vdisk/balance/utils.cpp |53.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/persqueue/common/proxy/libpersqueue-common-proxy.a |53.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/public/lib/deprecated/kicli/query.cpp |53.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/public/lib/deprecated/kicli/schema.cpp |53.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/fq/libs/test_connection/test_connection.cpp |53.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/persqueue/public/counters/libpersqueue-public-counters.a |53.5%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/fq/libs/control_plane_storage/internal/liblibs-control_plane_storage-internal.a |53.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/gateway/behaviour/resource_pool_classifier/initializer.cpp |53.5%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/fq/libs/control_plane_config/libfq-libs-control_plane_config.a |52.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/public/lib/deprecated/kicli/result.cpp |53.0%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/persqueue/writer/libcore-persqueue-writer.a |53.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/persqueue/public/partition_index_generator/libpersqueue-public-partition_index_generator.a |53.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/gateway/libfq-libs-gateway.a |53.0%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/tools/kqprun/runlib/libtools-kqprun-runlib.a |53.0%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/fq/libs/control_plane_proxy/libfq-libs-control_plane_proxy.a |53.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/schemeshard/libpy3core-protos-schemeshard.global.a |53.0%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/gateway/behaviour/external_data_source/behaviour.cpp |53.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/persqueue/public/partition_key_range/libpersqueue-public-partition_key_range.a |53.0%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/compile_service/kqp_compile_computation_pattern_service.cpp |53.0%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/sys_view/pg_tables/pg_tables.cpp |53.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/fq/libs/control_plane_storage/internal/response_tasks.cpp |53.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/grpc_services/cancelation/cancelation.cpp |53.5%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/persqueue/public/fetcher/libpersqueue-public-fetcher.a |53.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/vdisk/query/query_statdb.cpp |53.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/keyvalue/keyvalue_storage_read_request.cpp |53.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/workload_service/actors/pool_handlers_actors.cpp |53.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/vdisk/query/query_range.cpp |53.6%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/1a15604c25b1947326f913de39_raw.auxcpp |53.3%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/85072bb936b0763f4b03040c4c_raw.auxcpp |53.3%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/d0c68229b90bd556e2f005552a_raw.auxcpp |53.3%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/9f9206263dfb9ffdca04db0481_raw.auxcpp |53.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/public/lib/deprecated/kicli/configurator.cpp |53.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/gateway/actors/analyze_actor.cpp |53.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/protos/037dadb082c3788ff2d8ca830f_raw.auxcpp |53.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/gateway/behaviour/resource_pool_classifier/behaviour.cpp |53.3%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/services/persqueue_v1/actors/libservices-persqueue_v1-actors.a |53.4%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/2fdfeb43baacde1fbb5220afe4_raw.auxcpp |53.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/common/kqp.cpp |53.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/gateway/behaviour/resource_pool_classifier/fetcher.cpp |53.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/services/persqueue_v1/actors/helpers.cpp |53.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/services/persqueue_v1/actors/codecs.cpp |53.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/services/metadata/abstract/common.cpp |53.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/protos/d52903a0693870f83d0bbe0ab8_raw.auxcpp |53.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/gateway/behaviour/tablestore/operations/alter_column.cpp |53.5%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/e483ada69ee7d9a6998f159f82_raw.auxcpp |53.5%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/protos/libpy3ydb-core-protos.global.a |53.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/services/metadata/initializer/snapshot.cpp |53.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/common/kqp_tx.cpp |53.5%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/d185e5f961bbc6949188a55441_raw.auxcpp |53.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/keyvalue/keyvalue_index_record.cpp |53.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/fq/libs/ydb/schema.cpp |53.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/gateway/actors/scheme.cpp |53.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/protos/e6ce42a762195cf7e946ca411e_raw.auxcpp |53.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/protos/e4a588c704e4418873ed6891de_raw.auxcpp |53.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/public/lib/deprecated/kicli/error.cpp |53.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/public/lib/deprecated/kicli/dynamic_node.cpp |53.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/gateway/behaviour/resource_pool/behaviour.cpp |53.5%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/2eb899e83c0c5b0382f3c259a6_raw.auxcpp |53.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/vdisk/balance/balancing_actor.cpp |53.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/protos/0d9faae2dd392530096b141b6c_raw.auxcpp |53.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/protos/e9ba3ee2f0ee1966e63998b143_raw.auxcpp |53.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/keyvalue/keyvalue.cpp |53.6%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/18b717b256d1557d67c6530796_raw.auxcpp |53.6%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/ee07bf8091b24859c5c98ec8c2_raw.auxcpp |53.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/keyvalue/keyvalue_state.cpp |53.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/common/kqp_timeouts.cpp |53.6%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/f6a913595646bb7217eb864a14_raw.auxcpp |53.6%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/fe6a3d8ff1e9ff5f5286b8217e_raw.auxcpp |53.6%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/ff907458b37cb5e4e58155b540_raw.auxcpp |53.6%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/fab7d045ee17363b5a314402ec_raw.auxcpp |53.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/fq/libs/control_plane_proxy/probes.cpp |53.7%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/18547f9635bde59b5840161212_raw.auxcpp |53.7%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/20d1b0d93728f44e2e57bbcbbe_raw.auxcpp |53.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/protos/968732828ff205567f6707c2fe_raw.auxcpp |53.7%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/e72d7c890a4e6d107b4b82a390_raw.auxcpp |53.6%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/1a15d101197900a3922db1966c_raw.auxcpp |53.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/keyvalue/keyvalue_storage_request.cpp |53.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/protos/383ce71fd9fa04eb3230fc8f2e_raw.auxcpp |53.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/protos/3eb880cc21ffc3fc10ee677b0c_raw.auxcpp |53.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/protos/356aa18c71c00c1ebe811b0407_raw.auxcpp |53.6%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/5c2d132b07ac7894944187faad_raw.auxcpp |53.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/gateway/behaviour/tablestore/operations/upsert_index.cpp |53.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/common/shutdown/controller.cpp |53.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/public/lib/deprecated/kicli/kikimr.cpp |53.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/vdisk/balance/deleter.cpp |53.7%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/441719f19541a7bd10286d509f_raw.auxcpp |53.7%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/48fdddc38932905bdc77c990d9_raw.auxcpp |53.7%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/84782520ef588d8853c297bf60_raw.auxcpp |53.7%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/7198e69630bd21ac0236ecd2eb_raw.auxcpp |53.7%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/51c18970695cc866eb49ac994a_raw.auxcpp |53.7%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/9423afc568b8b8a6ea997c1d59_raw.auxcpp |53.7%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/7dd1047b277d32e86793ab28db_raw.auxcpp |53.7%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/6fab8ab92db2386f1c9487c997_raw.auxcpp |53.7%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/8750dadd4a699d4221d697bf03_raw.auxcpp |53.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/protos/c90d9fb739ea008c06169ff153_raw.auxcpp |53.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/keyvalue/keyvalue_intermediate.cpp |53.6%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/94867da88c159ab97eef2872e8_raw.auxcpp |53.6%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/b26f0c2126be20a447922ea933_raw.auxcpp |53.6%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/a7a23c88fad861e6466faba633_raw.auxcpp |53.6%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/a05e542bd3573127e9b747f64b_raw.auxcpp |53.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/compile_service/kqp_compile_service.cpp |53.6%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/b04a8a4d880a6ab0d69f34e52c_raw.auxcpp |53.6%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/ade3a46a7052335afea8c1167c_raw.auxcpp |53.6%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/ac31b2c1922050f1396103ab8c_raw.auxcpp |53.6%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/ce697fc3b324cb6152c4d7223d_raw.auxcpp |53.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/protos/a48cd84b630c61b8d19c887979_raw.auxcpp |53.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/gateway/behaviour/resource_pool/manager.cpp |53.7%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/c14508fe5e82de48992ac2db7f_raw.auxcpp |53.7%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/b5aff32438324d916e16bc28a1_raw.auxcpp |53.7%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/bf3620c19bd38fa8bd73260c95_raw.auxcpp |53.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/gateway/behaviour/tablestore/operations/add_column.cpp |53.7%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/b46af68c4a367075c07498736c_raw.auxcpp |53.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/gateway/behaviour/resource_pool_classifier/manager.cpp |53.7%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/fq/libs/mock/libfq-libs-mock.a |53.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/resource_pools/libydb-core-resource_pools.a |53.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/quoter/public/libcore-quoter-public.a |53.7%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/fq/libs/logs/libfq-libs-logs.a |53.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/protos/aa848536d47c49c5b2820aeee4_raw.auxcpp |53.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/persqueue/public/write_meta/libpersqueue-public-write_meta.a |53.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/services/metadata/initializer/initializer.cpp |53.8%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/statistics/service/libcore-statistics-service.a |53.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/gateway/behaviour/view/behaviour.cpp |53.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/services/metadata/initializer/common.cpp |53.5%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/archive/liblibrary-cpp-archive.a |53.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/keyvalue/keyvalue_collector.cpp |53.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/gateway/behaviour/tablestore/operations/drop_index.cpp |53.5%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/public/udf/service/stub/libudf-service-stub.global.a |53.6%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/audit/heartbeat_actor/libcore-audit-heartbeat_actor.a |53.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/services/kesus/grpc_service.cpp |53.6%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/fq/libs/rate_limiter/control_plane_service/liblibs-rate_limiter-control_plane_service.a |53.6%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/security/ldap_auth_provider/libcore-security-ldap_auth_provider.a |53.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/common/kqp_tx_manager.cpp |53.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/formats/arrow/reader/libformats-arrow-reader.a |53.7%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/sys_view/nodes/libcore-sys_view-nodes.a |53.7%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/fq/libs/control_plane_storage/libfq-libs-control_plane_storage.a |53.7%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kqp/workload_service/tables/libkqp-workload_service-tables.a |53.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/statistics/service/http_request.cpp |53.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/compile_service/kqp_compile_actor.cpp |53.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/fq/libs/control_plane_storage/util.cpp |53.8%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/services/persqueue_v1/libydb-services-persqueue_v1.a |53.8%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/sys_view/auth/libcore-sys_view-auth.a |53.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/fq/libs/control_plane_storage/control_plane_storage_counters.cpp |53.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/fq/libs/control_plane_storage/request_validators.cpp |53.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/fq/libs/control_plane_storage/probes.cpp |53.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/row_dispatcher/purecalc_no_pg_wrapper/liblibs-row_dispatcher-purecalc_no_pg_wrapper.a |53.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/sys_view/common/libcore-sys_view-common.a |53.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/gateway/behaviour/resource_pool_classifier/snapshot.cpp |53.7%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/statistics/database/libcore-statistics-database.a |53.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kqp/common/kqp_resolve.h_serialized.cpp |53.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/security/ldap_auth_provider/ldap_utils.cpp |53.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/gateway/behaviour/streaming_query/behaviour.cpp |53.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/gateway/behaviour/resource_pool_classifier/checker.cpp |53.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/services/metadata/secret/accessor/secret_id.cpp |53.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/common/kqp_ru_calc.cpp |53.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/keyvalue/keyvalue_state_collect.cpp |53.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/gateway/behaviour/tablestore/operations/drop_column.cpp |53.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/services/ext_index/common/config.cpp |53.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/services/metadata/abstract/kqp_common.cpp |53.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/tests/fq/pq_async_io/ut_helpers.cpp |53.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/services/metadata/manager/restore.cpp |53.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/gateway/behaviour/table/behaviour.cpp |53.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/services/metadata/manager/alter.cpp |53.8%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/sys_view/query_stats/libcore-sys_view-query_stats.a |53.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/services/metadata/initializer/object.cpp |53.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/persqueue/public/codecs/libpersqueue-public-codecs.a |53.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/gateway/behaviour/view/manager.cpp |53.9%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/sys_view/processor/libcore-sys_view-processor.a |53.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/tests/stress/common/libpy3tests-stress-common.global.a |53.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/tests/library/stress/libpy3tests-library-stress.global.a |53.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/services/metadata/manager/common.cpp |53.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/tests/oss/canonical/libpy3tests-oss-canonical.global.a |53.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/workload_service/actors/scheme_actors.cpp |53.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/services/metadata/manager/modification.cpp |53.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/services/metadata/manager/object.cpp |53.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/services/metadata/manager/abstract.cpp |53.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/workload_service/actors/cpu_load_actors.cpp |53.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/gateway/behaviour/tablestore/operations/upsert_opt.cpp |53.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/services/metadata/abstract/fetcher.cpp |53.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/gateway/behaviour/tablestore/operations/alter_sharding.cpp |53.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/sys_view/processor/schema.cpp |53.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/services/metadata/secret/checker_access.cpp |53.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/mind/bscontroller/cmds_host_config.cpp |53.9%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/sys_view/resource_pool_classifiers/libcore-sys_view-resource_pool_classifiers.a |53.9%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/sys_view/resource_pools/libcore-sys_view-resource_pools.a |53.9%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/sys_view/sessions/libcore-sys_view-sessions.a |53.9%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/sys_view/show_create/libcore-sys_view-show_create.a |53.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/services/metadata/abstract/initialization.cpp |53.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/mind/bscontroller/request_controller_info.cpp |53.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/mind/bscontroller/update_seen_operational.cpp |53.2%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/sys_view/tablets/libcore-sys_view-tablets.a |53.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/services/metadata/manager/alter_impl.cpp |53.2%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tablet_flat/libydb-core-tablet_flat.a |53.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/services/metadata/manager/abstract.h_serialized.cpp |53.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/mind/bscontroller/virtual_group.cpp |53.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/fq/libs/control_plane_proxy/actors/query_utils.cpp |53.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/services/metadata/secret/access_behaviour.cpp |53.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/sys_view/show_create/formatters_common.cpp |53.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/sys_view/show_create/create_view_formatter.cpp |53.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tablet_flat/protos/libcore-tablet_flat-protos.a |53.9%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/test_tablet/libydb-core-test_tablet.a |53.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/mind/bscontroller/console_interaction.cpp |53.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/common/kqp_resolve.cpp |53.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet_flat/shared_handle.cpp |53.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet_flat/flat_comp_gen.cpp |53.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet_flat/shared_cache_counters.cpp |53.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet_flat/flat_exec_seat.cpp |53.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/mind/bscontroller/cmds_storage_pool.cpp |53.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/mind/bscontroller/bridge.cpp |53.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/mind/bscontroller/config.cpp |53.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet_flat/flat_executor_counters.cpp |53.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet_flat/flat_executor_tx_env.cpp |53.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet_flat/flat_exec_commit.cpp |53.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet_flat/flat_comp_create.cpp |53.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet_flat/flat_dbase_apply.cpp |53.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/fq/libs/control_plane_storage/config.cpp |53.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet_flat/flat_comp.cpp |53.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/services/metadata/secret/manager.cpp |53.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/services/metadata/secret/secret.cpp |53.9%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/tablet_flat/flat_comp_gen.h_serialized.cpp |54.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet_flat/flat_exec_broker.cpp |54.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet_flat/flat_dbase_scheme.cpp |53.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/services/metadata/manager/generic_manager.cpp |53.9%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/tablet_flat/flat_executor_compaction_logic.h_serialized.cpp |53.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/mind/bscontroller/cmds_box.cpp |53.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet_flat/flat_database.cpp |53.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/services/metadata/secret/secret_behaviour.cpp |53.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet_flat/flat_exec_commit_mgr.cpp |53.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/services/metadata/secret/checker_secret.cpp |53.9%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/tablet_flat/flat_executor.pb.cc |53.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/services/metadata/secret/snapshot.cpp |53.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet_flat/flat_executor_borrowlogic.cpp |53.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/mind/bscontroller/update_group_latencies.cpp |53.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/services/metadata/secret/initializer.cpp |53.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/fq/libs/control_plane_proxy/config.cpp |54.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet_flat/flat_executor_gclogic.cpp |54.0%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/mind/bscontroller/commit_config.cpp |54.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet_flat/flat_executor_snapshot.cpp |54.0%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/mind/bscontroller/layout_helpers.cpp |54.0%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/services/metadata/secret/access.cpp |54.0%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/mind/bscontroller/config_fit_pdisks.cpp |54.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet_flat/flat_executor_txloglogic.cpp |53.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/mind/bscontroller/bsc.cpp |53.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/mind/bscontroller/group_mapper.cpp |53.9%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/tablet_flat/flat_row_eggs.h_serialized.cpp |53.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet_flat/flat_sausage_meta.cpp |53.9%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/core/common_opt/libessentials-core-common_opt.a |54.0%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/mind/bscontroller/cmds_drive_status.cpp |54.0%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/mind/bscontroller/storage_stats_calculator.cpp |54.0%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/mind/bscontroller/self_heal.cpp |54.0%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/mind/bscontroller/cmds_bridge.cpp |54.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet_flat/flat_fwd_misc.cpp |54.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet_flat/flat_part_index_iter_create.cpp |54.0%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/tablet_flat/flat_page_iface.h_serialized.cpp |54.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet_flat/flat_page_label.cpp |54.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet_flat/flat_part_charge_range.cpp |54.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet_flat/flat_part_charge_create.cpp |54.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet_flat/flat_part_overlay.cpp |54.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet_flat/flat_mem_warm.cpp |54.0%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/tablet_flat/flat_part_loader.h_serialized.cpp |54.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet_flat/flat_part_dump.cpp |54.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet_flat/flat_part_outset.cpp |54.0%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/mind/bscontroller/migrate.cpp |54.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet_flat/flat_range_cache.cpp |54.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet_flat/probes.cpp |54.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet_flat/flat_part_slice.cpp |54.0%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/tablet_flat/flat_scan_iface.h_serialized.cpp |54.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet_flat/flat_store_hotdog.cpp |54.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet_flat/flat_stat_part_group_iter_create.cpp |54.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet_flat/flat_row_versions.cpp |54.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet_flat/flat_sausagecache.cpp |54.0%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tablet_flat/flat_part_loader.cpp |54.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet_flat/flat_stat_table_btree_index.cpp |54.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet_flat/flat_stat_table_btree_index_histogram.cpp |54.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet_flat/flat_table_committed.cpp |54.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet_flat/flat_table_observer.cpp |54.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet_flat/flat_stat_table.cpp |54.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet_flat/flat_table_part.cpp |54.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet_flat/flat_table_misc.cpp |54.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/audit/heartbeat_actor/heartbeat_actor.cpp |53.9%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/tablet_flat/shared_cache_s3fifo.h_serialized.cpp |53.9%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/tablet_flat/shared_cache_events.h_serialized.cpp |53.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/mind/bscontroller/update_last_seen_ready.cpp |53.9%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/core/arrow_kernels/registry/libcore-arrow_kernels-registry.a |53.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/tests/tools/fq_runner/libpy3tests-tools-fq_runner.global.a |53.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tablet_flat/flat_table.cpp |53.9%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/core/arrow_kernels/request/libcore-arrow_kernels-request.a |53.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/bg_tasks/events/libcolumnshard-bg_tasks-events.a |53.9%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/fq/libs/actors/libfq-libs-actors.a |53.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/test_connection/events/liblibs-test_connection-events.a |54.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/bg_tasks/manager/libcolumnshard-bg_tasks-manager.a |54.0%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/http_proxy/libydb-core-http_proxy.a |54.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/bg_tasks/abstract/libcolumnshard-bg_tasks-abstract.a |54.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/bg_tasks/protos/libcolumnshard-bg_tasks-protos.a |54.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/mind/address_classification/net_classifier.cpp |54.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/mind/bscontroller/sys_view.cpp |54.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/bg_tasks/session/libcolumnshard-bg_tasks-session.a |54.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/fq/libs/actors/clusters_from_connections.cpp |54.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/fq/libs/actors/task_ping.cpp |54.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/fq/libs/actors/rate_limiter.cpp |54.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/fq/libs/actors/nodes_health_check.cpp |54.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/fq/libs/actors/task_result_write.cpp |54.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/fq/libs/actors/task_get.cpp |54.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/fq/libs/actors/table_bindings_from_bindings.cpp |54.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/mind/bscontroller/init_scheme.cpp |54.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/mind/bscontroller/monitoring.cpp |54.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/fq/libs/actors/result_writer.cpp |54.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/mind/bscontroller/grouper.cpp |54.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/fq/libs/actors/proxy_private.cpp |54.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/mind/bscontroller/load_everything.cpp |54.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/mind/bscontroller/node_report.cpp |54.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/fq/libs/actors/pending_fetcher.cpp |54.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/fq/libs/control_plane_storage/internal/task_result_write.cpp |54.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/mind/bscontroller/propose_group_key.cpp |54.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/fq/libs/actors/error.cpp |54.2%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/grpc_services/tablet/libcore-grpc_services-tablet.a |53.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/http_proxy/exceptions_mapping.cpp |53.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/fq/libs/control_plane_storage/internal/task_ping.cpp |53.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/http_proxy/metrics_actor.cpp |53.9%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/core/dq_integration/libessentials-core-dq_integration.a |54.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/fq/libs/actors/nodes_manager.cpp |54.0%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/core/dq_integration/transform/libcore-dq_integration-transform.a |54.0%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/core/expr_nodes/libessentials-core-expr_nodes.a |54.0%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/mind/bscontroller/config_cmd.cpp |54.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/blobs_action/protos/libcolumnshard-blobs_action-protos.a |54.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/fq/libs/actors/rate_limiter_resources.cpp |54.0%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/sys_view/compile_cache/libcore-sys_view-compile_cache.a |54.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/public_http/libydb-core-public_http.global.a |54.0%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/columnshard/blobs_action/libtx-columnshard-blobs_action.a |54.0%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/core/file_storage/defs/libcore-file_storage-defs.a |54.0%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/columnshard/blobs_action/tier/libcolumnshard-blobs_action-tier.a |54.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/mind/bscontroller/get_group.cpp |54.1%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/columnshard/column_fetching/libtx-columnshard-column_fetching.a |54.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/public_http/protos/libcore-public_http-protos.a |54.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/mind/bscontroller/drop_donor.cpp |54.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/mind/bscontroller/register_node.cpp |54.2%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/columnshard/blobs_action/events/libcolumnshard-blobs_action-events.a |54.2%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/columnshard/counters/libtx-columnshard-counters.a |54.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/services/metadata/secret/fetcher.cpp |54.2%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/core/facade/libessentials-core-facade.a |53.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/mind/bscontroller/cluster_balancing.cpp |53.9%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/columnshard/blobs_reader/libtx-columnshard-blobs_reader.a |53.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/fq/libs/control_plane_storage/internal/rate_limiter_resources.cpp |53.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/columnshard/blobs_action/tier/read.cpp |53.9%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/core/extract_predicate/libessentials-core-extract_predicate.a |54.0%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/columnshard/data_accessor/in_mem/libcolumnshard-data_accessor-in_mem.global.a |54.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/columnshard/blobs_action/tier/common.cpp |54.0%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/columnshard/blobs_action/abstract/libcolumnshard-blobs_action-abstract.a |54.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/services/persqueue_v1/actors/update_offsets_in_transaction_actor.cpp |54.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/columnshard/counters/background_controller.cpp |54.2%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/datetime/libessentials-minikql-datetime.a |54.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/columnshard/counters/column_tables.cpp |54.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/services/persqueue_v1/actors/persqueue_utils.cpp |54.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/mind/bscontroller/config_fit_groups.cpp |54.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/persqueue/writer/metadata_initializers.cpp |54.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/mind/bscontroller/group_metrics_exchange.cpp |54.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/mind/bscontroller/select_groups.cpp |54.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/services/persqueue_v1/actors/partition_writer_cache_actor.cpp |54.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/fq/libs/control_plane_proxy/control_plane_proxy.cpp |54.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/fq/libs/control_plane_storage/internal/nodes_health_check.cpp |54.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/columnshard/counters/splitter.cpp |54.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/fq/libs/control_plane_storage/internal/task_get.cpp |54.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/columnshard/counters/writes_monitor.cpp |54.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/mind/bscontroller/stat_processor.cpp |54.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/columnshard/counters/error_collector.cpp |54.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/mind/bscontroller/group_layout_checker.cpp |54.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/columnshard/counters/common_data.cpp |54.3%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/counters/columnshard.h_serialized.cpp |54.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/columnshard/counters/columnshard.cpp |54.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/columnshard/counters/duplicate_filtering.cpp |54.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/columnshard/counters/req_tracer.cpp |54.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/mind/bscontroller/scrub.cpp |54.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/columnshard/blobs_action/abstract/remove.cpp |54.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/sys_view/nodes/nodes.cpp |54.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/columnshard/blobs_action/abstract/read.cpp |54.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/tests/fq/pq_async_io/ut/dq_pq_read_actor_ut.cpp |54.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/columnshard/blobs_action/abstract/blob_set.cpp |54.4%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/counters/scan.h_serialized.cpp |54.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/columnshard/blobs_action/abstract/common.cpp |54.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/fq/libs/logs/log.cpp |54.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/statistics/service/service_impl.cpp |54.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/security/ldap_auth_provider/ldap_auth_provider.cpp |54.1%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/core/file_storage/proto/libpy3core-file_storage-proto.global.a |54.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/mind/bscontroller/shred.cpp |54.2%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/columnshard/data_accessor/local_db/libcolumnshard-data_accessor-local_db.a |54.2%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/core/file_storage/http_download/proto/libfile_storage-http_download-proto.a |54.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/services/persqueue_v1/actors/partition_writer.cpp |54.2%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/columnshard/data_accessor/local_db/libcolumnshard-data_accessor-local_db.global.a |54.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/services/persqueue_v1/actors/schema_actors.cpp |54.2%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/core/file_storage/http_download/libcore-file_storage-http_download.a |54.2%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/mediator/libcore-tx-mediator.a |54.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/sys_view/processor/processor.cpp |54.2%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/columnshard/data_locks/locks/libcolumnshard-data_locks-locks.a |54.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/services/persqueue_v1/actors/read_init_auth_actor.cpp |54.3%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/core/file_storage/libessentials-core-file_storage.a |54.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/data_sharing/common/context/libdata_sharing-common-context.a |54.3%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/core/issue/protos/libpy3core-issue-protos.global.a |54.3%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/core/langver/libessentials-core-langver.a |54.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/persqueue/public/fetcher/fetch_request_actor.cpp |54.3%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/core/histogram/libessentials-core-histogram.a |54.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/fq/libs/rate_limiter/control_plane_service/update_limit_actor.cpp |54.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/tests/tools/kqprun/runlib/application.cpp |54.2%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/columnshard/data_sharing/destination/transactions/libdata_sharing-destination-transactions.a |54.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/sys_view/auth/permissions.cpp |54.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/services/persqueue_v1/actors/distributed_commit_helper.cpp |54.2%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/columnshard/data_sharing/modification/tasks/libdata_sharing-modification-tasks.a |54.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/data_sharing/initiator/controller/libdata_sharing-initiator-controller.global.a |54.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/grpc/server/actors/libgrpc-server-actors.a |54.2%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/columnshard/data_sharing/destination/events/libdata_sharing-destination-events.a |54.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/fq/libs/control_plane_storage/ydb_control_plane_storage_connections.cpp |54.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/services/persqueue_v1/actors/read_session_actor.cpp |54.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/services/persqueue_v1/actors/partition_actor.cpp |54.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/tests/tools/kqprun/runlib/utils.cpp |54.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/columnshard/data_locks/locks/composite.cpp |54.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/columnshard/data_locks/locks/abstract.cpp |54.3%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/columnshard/data_sharing/manager/libcolumnshard-data_sharing-manager.a |54.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/services/persqueue_v1/actors/read_info_actor.cpp |54.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/fq/libs/control_plane_storage/internal/utils.cpp |54.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/sys_view/auth/groups.cpp |54.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/sys_view/auth/users.cpp |54.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/fq/libs/control_plane_storage/ydb_control_plane_storage_queries.cpp |54.4%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/columnshard/data_reader/libtx-columnshard-data_reader.a |54.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/security/ldap_auth_provider/ldap_auth_provider_linux.cpp |54.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/fq/libs/control_plane_storage/ydb_control_plane_storage.cpp |54.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/fq/libs/mock/yql_mock.cpp |54.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/mind/bscontroller/disk_metrics.cpp |54.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/tests/fq/pq_async_io/ut/dq_pq_write_actor_ut.cpp |54.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/sys_view/processor/tx_configure.cpp |54.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/persqueue/writer/source_id_encoding.cpp |54.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/services/persqueue_v1/actors/direct_read_actor.cpp |54.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/sys_view/query_stats/query_metrics.cpp |54.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/workload_service/tables/table_queries.cpp |54.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/services/persqueue_v1/grpc_pq_write.cpp |54.4%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/columnshard/data_sharing/source/events/libdata_sharing-source-events.a |54.4%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/core/qplayer/udf_resolver/libcore-qplayer-udf_resolver.a |54.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/fq/libs/control_plane_config/control_plane_config.cpp |54.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/services/persqueue_v1/grpc_pq_read.cpp |54.4%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/columnshard/data_sharing/source/transactions/libdata_sharing-source-transactions.a |54.4%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/core/qplayer/storage/interface/libqplayer-storage-interface.a |54.5%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/computation/llvm16/libminikql-computation-llvm16.a |54.5%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/columnshard/engines/changes/actualization/controller/libchanges-actualization-controller.a |54.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/tests/fq/pq_async_io/ut/dq_pq_rd_read_actor_ut.cpp |54.5%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/columnshard/engines/changes/actualization/construction/libchanges-actualization-construction.a |54.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/tests/tools/kqprun/runlib/kikimr_setup.cpp |54.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/persqueue/writer/partition_chooser_impl.cpp |54.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/sys_view/processor/db_counters.cpp |54.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/fq/libs/control_plane_storage/in_memory_control_plane_storage.cpp |54.4%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/fq/libs/compute/ydb/liblibs-compute-ydb.a |54.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/services/persqueue_v1/actors/commit_offset_actor.cpp |54.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/fq/libs/control_plane_storage/validators.cpp |54.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/persqueue/writer/writer.cpp |54.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/sys_view/auth/group_members.cpp |54.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/sys_view/processor/tx_interval_metrics.cpp |54.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/workload/kv/liblibrary-workload-kv.a |54.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/fq/libs/control_plane_storage/ydb_control_plane_storage_bindings.cpp |54.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/workload/stock/liblibrary-workload-stock.a |54.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/fq/libs/control_plane_storage/ydb_control_plane_storage_folder.cpp |54.5%| [AR] {BAZEL_DOWNLOAD} $(B)/library/python/testing/recipe/libpy3python-testing-recipe.global.a |54.5%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/sql/v1/complete/name/service/schema/libname-service-schema.a |54.5%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/core/peephole_opt/libessentials-core-peephole_opt.a |54.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/fq/libs/rate_limiter/control_plane_service/rate_limiter_control_plane_service.cpp |54.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/sys_view/processor/tx_init.cpp |54.5%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/public/result_format/libessentials-public-result_format.a |54.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/statistics/service/service.cpp |54.5%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/columnshard/engines/changes/compaction/plain/libchanges-compaction-plain.a |54.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/persqueue/public/list_topics/list_all_topics_actor.cpp |54.5%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/c-ares/libcontrib-libs-c-ares.a |54.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/services/persqueue_v1/grpc_pq_schema.cpp |54.6%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/columnshard/engines/changes/compaction/abstract/libchanges-compaction-abstract.a |54.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/statistics/database/database.cpp |54.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/workload/tpcds/liblibrary-workload-tpcds.a |54.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/sys_view/query_stats/query_stats.cpp |54.5%| [LD] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/test/test_import/libtest_import_udf.so |54.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/sys_view/processor/tx_collect.cpp |54.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/fq/libs/control_plane_storage/ydb_control_plane_storage_compute_database.cpp |54.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/fq/libs/control_plane_storage/ydb_control_plane_storage_quotas.cpp |54.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tablet_flat/flat_executor_bootlogic.cpp |54.6%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/jsonpath/parser/libminikql-jsonpath-parser.a |54.6%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/parser/proto_ast/antlr4/libparser-proto_ast-antlr4.a |54.6%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/parser/proto_ast/gen/jsonpath/libproto_ast-gen-jsonpath.a |54.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/services/persqueue_v1/services_initializer.cpp |54.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/services/persqueue_v1/actors/write_session_actor.cpp |54.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/services/persqueue_v1/persqueue.cpp |54.6%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/computation/libessentials-minikql-computation.a |54.6%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/providers/common/config/libproviders-common-config.a |54.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/sys_view/auth/owners.cpp |54.6%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/providers/common/arrow_resolve/libproviders-common-arrow_resolve.a |54.6%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/providers/common/activation/libproviders-common-activation.a |54.6%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/core/libyql-essentials-core.a |54.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/sys_view/resource_pool_classifiers/resource_pool_classifiers.cpp |54.6%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/columnshard/engines/changes/compaction/dictionary/libchanges-compaction-dictionary.global.a |54.6%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/providers/common/proto/libpy3providers-common-proto.global.a |54.6%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/providers/common/codec/libproviders-common-codec.a |54.6%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/providers/common/codec/arrow/libcommon-codec-arrow.a |54.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/sys_view/processor/processor_impl.cpp |54.6%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/providers/common/gateway/libproviders-common-gateway.a |54.6%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/providers/common/comp_nodes/libproviders-common-comp_nodes.a |54.6%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/providers/pg/provider/libproviders-pg-provider.a |54.6%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/providers/common/metrics/protos/libcommon-metrics-protos.a |54.6%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/providers/common/metrics/libproviders-common-metrics.a |54.6%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/providers/common/gateways_utils/libproviders-common-gateways_utils.a |54.6%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/parser/proto_ast/gen/v0/libproto_ast-gen-v0.a |54.6%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/providers/common/schema/libproviders-common-schema.a |54.6%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/providers/common/schema/expr/libcommon-schema-expr.a |54.6%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/providers/common/schema/parser/libcommon-schema-parser.a |54.5%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/providers/common/transform/libproviders-common-transform.a |54.5%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/providers/common/schema/skiff/libcommon-schema-skiff.a |54.5%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/providers/common/structured_token/libproviders-common-structured_token.a |54.5%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/providers/config/libessentials-providers-config.a |54.5%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/providers/pg/expr_nodes/libproviders-pg-expr_nodes.a |54.5%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/public/types/libpy3essentials-public-types.global.a |54.5%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/providers/result/expr_nodes/libproviders-result-expr_nodes.a |54.5%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/providers/common/provider/libproviders-common-provider.a |54.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/test_tablet/load_actor_delete.cpp |54.6%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/services/ydb/libydb-services-ydb.a |54.6%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/public/issue/protos/libpy3public-issue-protos.global.a |54.7%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/providers/common/udf_resolve/libproviders-common-udf_resolve.a |54.7%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/threading/chunk_queue/libcpp-threading-chunk_queue.a |54.7%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/parser/proto_ast/antlr3/libparser-proto_ast-antlr3.a |54.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tablet_flat/flat_bio_actor.cpp |54.7%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/brotli/c/enc/libbrotli-c-enc.a |54.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/services/persqueue_v1/topic.cpp |54.6%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/providers/result/provider/libproviders-result-provider.a |54.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tablet_flat/flat_load_blob_queue.cpp |54.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/sys_view/processor/tx_init_schema.cpp |54.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/sys_view/tablets/tablets.cpp |54.6%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/parser/proto_ast/gen/v1_ansi_antlr4/libproto_ast-gen-v1_ansi_antlr4.a |54.6%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/parser/proto_ast/gen/v1_antlr4/libproto_ast-gen-v1_antlr4.a |54.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tablet_flat/flat_executor_backup.cpp |54.7%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/parser/proto_ast/gen/v0_proto_split/libproto_ast-gen-v0_proto_split.a |54.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/test_tablet/tx_load_everything.cpp |54.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/tools/lib/cmds/libpy3tools-lib-cmds.global.a |54.7%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/sql/v0/libessentials-sql-v0.a |54.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/test_tablet/load_actor_impl.cpp |54.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tablet_flat/tablet_flat_executed.cpp |54.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tablet_flat/flat_executor_compaction_logic.cpp |54.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/workload/vector/liblibrary-workload-vector.global.a |54.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/sys_view/resource_pools/resource_pools.cpp |54.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/workload/tpch/liblibrary-workload-tpch.a |54.7%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/tornado/tornado-4/libpy3python-tornado-tornado-4.global.a |54.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/lib/ydb_cli/commands/topic_workload/libtopic_workload.a |54.7%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/providers/common/dq/libproviders-common-dq.a |54.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/sys_view/processor/tx_interval_summary.cpp |54.7%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/sql/libyql-essentials-sql.a |54.7%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/public/udf/support/libpublic-udf-support.a |54.7%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/sql/v1/complete/name/cache/local/libname-cache-local.a |54.7%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/sql/v1/lexer/antlr4_ansi/libv1-lexer-antlr4_ansi.a |54.7%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/sql/pg_dummy/libessentials-sql-pg_dummy.a |54.7%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/providers/common/schema/mkql/libcommon-schema-mkql.a |54.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/http_proxy/discovery_actor.cpp |54.8%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/sql/v0/lexer/libsql-v0-lexer.a |54.8%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/sql/settings/libessentials-sql-settings.a |54.8%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/sql/v1/lexer/antlr4/libv1-lexer-antlr4.a |54.7%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/sql/v1/format/libsql-v1-format.global.a |54.7%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/cpp/mapreduce/common/libcpp-mapreduce-common.a |54.7%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/sql/v1/proto_parser/antlr4/libv1-proto_parser-antlr4.a |54.7%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/utils/failure_injector/libessentials-utils-failure_injector.a |54.7%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/parser/proto_ast/gen/v1/libproto_ast-gen-v1.a |54.7%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yql/providers/yt/lib/res_pull/libyt-lib-res_pull.a |54.8%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/providers/stat/expr_nodes/libproviders-stat-expr_nodes.a |54.8%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/utils/threading/libessentials-utils-threading.a |54.8%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/sql/v1/format/libsql-v1-format.a |54.8%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/sql/v1/proto_parser/antlr4_ansi/libv1-proto_parser-antlr4_ansi.a |54.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/workload/kv/liblibrary-workload-kv.global.a |54.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/workload/vector/liblibrary-workload-vector.a |54.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/workload/stock/liblibrary-workload-stock.global.a |54.8%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/columnshard/engines/storage/actualizer/index/libstorage-actualizer-index.a |54.7%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/ipdb/py3/libpy3python-ipdb-py3.global.a |54.7%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/highwayhash/libcontrib-libs-highwayhash.a |54.8%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yql/providers/yt/lib/init_yt_api/libyt-lib-init_yt_api.a |54.8%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/columnshard/blobs_action/storages_manager/libcolumnshard-blobs_action-storages_manager.a |54.8%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yql/providers/yt/lib/hash/libyt-lib-hash.a |54.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/http_proxy/http_service.cpp |54.8%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yql/providers/yt/lib/lambda_builder/libyt-lib-lambda_builder.a |54.8%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yql/providers/yt/lib/graph_reorder/libyt-lib-graph_reorder.a |54.8%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yql/providers/yt/lib/key_filter/libyt-lib-key_filter.a |54.8%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yql/providers/yt/lib/log/libyt-lib-log.a |54.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/sys_view/show_create/create_table_formatter.cpp |54.8%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yql/providers/yt/mkql_dq/libproviders-yt-mkql_dq.a |54.8%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/sql/v1/lexer/libsql-v1-lexer.a |54.8%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yql/providers/yt/lib/mkql_helpers/libyt-lib-mkql_helpers.a |54.8%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yql/providers/yt/lib/infer_schema/libyt-lib-infer_schema.a |54.3%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yql/providers/yt/lib/yt_download/libyt-lib-yt_download.a |54.3%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yql/providers/yt/lib/yson_helpers/libyt-lib-yson_helpers.a |54.3%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yql/providers/yt/lib/schema/libyt-lib-schema.a |54.3%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yql/providers/ytflow/expr_nodes/libproviders-ytflow-expr_nodes.a |54.3%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yql/providers/yt/lib/skiff/libyt-lib-skiff.a |54.5%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yql/providers/yt/lib/url_mapper/libyt-lib-url_mapper.a |54.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/test_tablet/tx_init_scheme.cpp |54.5%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yql/providers/yt/provider/libproviders-yt-provider.global.a |54.6%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yql/providers/yt/lib/row_spec/libyt-lib-row_spec.a |54.6%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yt/library/tracing/libyt-library-tracing.a |54.7%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yt/build/libyt-yt-build.a |54.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/test_tablet/test_shard_context.cpp |54.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/workload/tpcc/liblibrary-workload-tpcc.a |54.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/api/chaos_lease_base.cpp |54.3%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/cpp/mapreduce/http_client/libcpp-mapreduce-http_client.a |54.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/test_tablet/load_actor_read_validate.cpp |54.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tablet_flat/flat_executor.cpp |54.3%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/columnshard/engines/changes/compaction/plain/libchanges-compaction-plain.global.a |54.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/sys_view/processor/tx_top_partitions.cpp |54.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/http_proxy/auth_factory.cpp |54.4%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/yt/yt/client/libyt-yt-client.a |54.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tablet_flat/flat_boot_misc.cpp |54.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/counters/scan.cpp |54.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/api/client_cache.cpp |54.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/api/delegating_transaction.cpp |54.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tablet_flat/tablet_flat_executor.cpp |54.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/grpc_services/tablet/rpc_restart_tablet.cpp |54.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tablet_flat/shared_sausagecache.cpp |54.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/api/public.cpp |54.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/api/rpc_proxy/file_writer.cpp |54.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/bundle_controller_client/bundle_controller_client.cpp |54.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/api/etc_client.cpp |54.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/api/private.cpp |54.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/api/query_tracker_client.cpp |54.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/api/distributed_table_client.cpp |54.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/test_tablet/tx_initialize.cpp |54.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tablet_flat/flat_executor_vacuum_logic.cpp |54.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/bundle_controller_client/bundle_controller_settings.cpp |54.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/counters/indexation.cpp |54.6%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/api/dynamic_table_transaction_mixin.cpp |54.6%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/api/operation_client.cpp |54.6%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/complex_types/check_type_compatibility.cpp |54.6%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/api/distributed_table_session.cpp |54.6%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/api/delegating_client.cpp |54.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/api/client_common.cpp |54.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/api/client.cpp |54.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/complex_types/time_text.cpp |54.6%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/complex_types/uuid_text.cpp |54.6%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/complex_types/merge_complex_types.cpp |54.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/test_tablet/load_actor_write.cpp |54.6%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/cpp/mapreduce/client/libcpp-mapreduce-client.a |54.7%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/api/journal_client.cpp |54.7%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/api/config.cpp |54.8%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/api/persistent_queue.cpp |54.8%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/file_client/config.cpp |54.8%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/table_client/schemaless_row_reorderer.cpp |54.8%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/table_client/unversioned_value.cpp |54.7%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/table_client/validate_logical_type.cpp |54.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tablet_flat/flat_boot_lease.cpp |54.7%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/table_client/value_consumer.cpp |54.7%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/api/internal_client.cpp |54.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/sys_view/processor/tx_aggregate.cpp |54.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/grpc_services/tablet/rpc_change_schema.cpp |54.8%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/table_client/row_buffer.cpp |54.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/table_client/row_batch.cpp |54.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/table_client/unversioned_row.cpp |54.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/table_client/key_bound_compressor.cpp |54.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/test_tablet/load_actor_mon.cpp |54.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/table_client/key_bound.cpp |54.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/test_tablet/test_shard_mon.cpp |54.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/table_client/logical_type.cpp |54.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/table_client/composite_compare.cpp |54.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/table_client/chunk_stripe_statistics.cpp |54.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/table_client/check_schema_compatibility.cpp |54.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/sys_view/show_create/show_create.cpp |54.8%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/security_client/public.cpp |54.8%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/signature/generator.cpp |54.8%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/node_tracker_client/helpers.cpp |54.8%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/invoke_builtins/llvm16/libminikql-invoke_builtins-llvm16.a |54.8%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/security_client/access_control.cpp |54.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/table_client/blob_reader.cpp |54.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/security_client/helpers.cpp |54.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/signature/validator.cpp |54.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/queue_client/queue_rowset.cpp |54.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/job_tracker_client/public.cpp |54.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/job_tracker_client/helpers.cpp |54.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/election/public.cpp |54.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/hydra/version.cpp |54.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/table_client/column_rename_descriptor.cpp |54.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/journal_client/public.cpp |54.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/counters/portion_index.cpp |54.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/signature/signature.cpp |54.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/misc/config.cpp |54.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/table_client/columnar_statistics.cpp |55.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/table_client/key.cpp |55.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/cypress_client/public.cpp |55.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/table_client/adapters.cpp |55.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/hive/timestamp_map.cpp |55.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/table_client/column_sort_schema.cpp |55.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/misc/method_helpers.cpp |54.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/chunk_client/ready_event_reader_base.cpp |55.0%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/blobs_action/tier/adapter.cpp |55.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/object_client/helpers.cpp |55.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/table_client/config.cpp |55.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/table_client/columnar.cpp |55.0%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/fq/libs/actors/run_actor.cpp |55.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/complex_types/yson_format_conversion.cpp |55.0%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/counters/engine_logs.cpp |55.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/table_client/comparator.cpp |55.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/table_client/helpers.cpp |55.0%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tablet_flat/flat_executor_db_mon.cpp |55.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/table_client/record_helpers.cpp |55.0%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/mediator/mediator__init.cpp |55.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/table_client/public.cpp |55.0%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/blobs_action/tier/gc_info.cpp |55.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/table_client/merge_table_schemas.cpp |55.0%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/counters/counters_manager.cpp |55.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/table_client/record_codegen_cpp.cpp |55.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/table_client/pipe.cpp |55.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/table_client/row_base.cpp |55.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/table_client/schemaless_dynamic_table_writer.cpp |55.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/table_client/timestamped_schema_helpers.cpp |55.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/table_client/serialize.cpp |55.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/table_client/table_output.cpp |55.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/tablet_client/public.cpp |55.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/table_client/table_upload_options.cpp |55.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/table_client/table_consumer.cpp |55.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/ypath/parser_detail.cpp |55.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/tablet_client/helpers.cpp |55.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/table_client/unordered_schemaful_reader.cpp |55.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/table_client/versioned_reader.cpp |55.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/table_client/versioned_io_options.cpp |55.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/transaction_client/helpers.cpp |55.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/table_client/versioned_row.cpp |55.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/tablet_client/table_mount_cache.cpp |55.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/table_client/schema_serialization_helpers.cpp |55.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/table_client/schema.cpp |55.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/table_client/wire_protocol.cpp |55.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/transaction_client/batching_timestamp_provider.cpp |55.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/tablet_client/watermark_runtime_data.cpp |55.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/transaction_client/remote_timestamp_provider.cpp |55.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/blobs_action/abstract/gc_actor.cpp |55.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/http_proxy/grpc_service.cpp |55.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/transaction_client/noop_timestamp_provider.cpp |55.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/tablet_client/config.cpp |55.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/tablet_client/table_mount_cache_detail.cpp |55.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/transaction_client/config.cpp |55.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/transaction_client/timestamp_provider_base.cpp |55.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/table_client/name_table.cpp |55.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/api/helpers.cpp |55.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/ypath/rich.cpp |55.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/queue_client/consumer_client.cpp |55.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/test_tablet/test_tablet.cpp |55.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/complex_types/infinite_entity.cpp |55.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/complex_types/check_yson_token.cpp |55.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/api/options.cpp |55.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/chunk_client/read_limit.cpp |55.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/grpc_services/tablet/rpc_execute_mkql.cpp |55.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/api/rpc_proxy/table_reader.cpp |55.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/api/rpc_proxy/journal_reader.cpp |55.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/api/rpc_proxy/chaos_lease.cpp |55.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/api/rowset.cpp |55.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/api/queue_transaction_mixin.cpp |55.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/api/rpc_proxy/address_helpers.cpp |55.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/api/rpc_proxy/connection.cpp |55.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/api/rpc_proxy/row_batch_writer.cpp |55.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/api/rpc_proxy/connection_impl.cpp |55.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/api/rpc_proxy/public.cpp |55.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/api/rpc_proxy/journal_writer.cpp |55.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/chaos_client/helpers.cpp |55.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/api/rpc_proxy/row_batch_reader.cpp |55.1%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/sql/v1/libessentials-sql-v1.a |55.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/api/rpc_proxy/file_reader.cpp |55.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/api/security_client.cpp |55.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/api/rpc_proxy/table_writer.cpp |55.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/api/rpc_proxy/table_mount_cache.cpp |55.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/api/rpc_proxy/target_cluster_injecting_channel.cpp |55.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/api/rpc_proxy/row_stream.cpp |55.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/api/rpc_proxy/config.cpp |55.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/api/rpc_proxy/wire_row_stream.cpp |55.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/api/rpc_proxy/timestamp_provider.cpp |55.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/api/rpc_proxy/helpers.cpp |55.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/api/rpc_proxy/transaction.cpp |55.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/api/table_partition_reader.cpp |55.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/chunk_client/data_statistics.cpp |55.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/column_fetching/manager.cpp |55.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/api/skynet.cpp |55.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/api/sticky_transaction_pool.cpp |55.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/api/shuffle_client.cpp |55.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/api/table_client.cpp |55.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/api/transaction.cpp |55.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/chunk_client/public.cpp |55.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/chunk_client/helpers.cpp |55.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/blobs_action/abstract/write.cpp |55.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/chaos_client/replication_card_cache.cpp |55.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/counters/portions.cpp |55.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/chaos_client/replication_card.cpp |55.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/chaos_client/config.cpp |55.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/chunk_client/chunk_replica.cpp |55.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/scheduler/operation_id_or_alias.cpp |55.2%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/fq/libs/rate_limiter/quoter_service/liblibs-rate_limiter-quoter_service.a |55.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/scheduler/spec_patch.cpp |55.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/data_accessor/local_db/manager.cpp |55.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/chaos_client/replication_card_serialization.cpp |55.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/test_tablet/load_actor_state.cpp |55.2%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/columnshard/engines/changes/compaction/libengines-changes-compaction.a |55.2%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/tools/kqprun/src/libtools-kqprun-src.a |55.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/api/rpc_proxy/transaction_impl.cpp |55.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/security_client/acl.cpp |55.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/kafka/packet.cpp |55.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/scheduler/operation_cache.cpp |55.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/kafka/protocol.cpp |55.1%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kqp/gateway/behaviour/tablestore/operations/libbehaviour-tablestore-operations.a |55.2%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/columnshard/engines/changes/compaction/common/libchanges-compaction-common.a |55.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/chunk_client/config.cpp |55.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/queue_client/partition_reader.cpp |55.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/journal_client/config.cpp |55.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/node_tracker_client/public.cpp |55.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/misc/io_tags.cpp |55.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/mediator/mediator__configure.cpp |55.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/blobs_action/tier/remove.cpp |55.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/blobs_action/blob_manager_db.cpp |55.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/query_client/query_builder.cpp |55.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/queue_client/helpers.cpp |55.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/queue_client/producer_client.cpp |55.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/object_client/public.cpp |55.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/queue_client/config.cpp |55.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/kafka/requests.cpp |55.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/data_accessor/local_db/constructor.cpp |55.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/query_client/query_statistics.cpp |55.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/data_sharing/initiator/status/libdata_sharing-initiator-status.a |55.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/folder_service/mock/liblibrary-folder_service-mock.a |55.3%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/fq/libs/checkpoint_storage/libfq-libs-checkpoint_storage.a |55.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/queue_client/common.cpp |55.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/blobs_action/events/delete_blobs.cpp |55.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/node_tracker_client/node_directory.cpp |55.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/misc/workload.cpp |55.3%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/core/user_data/libessentials-core-user_data.a |55.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/balance_coverage/libcore-tx-balance_coverage.a |55.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/viewer/yaml/libcore-viewer-yaml.a |55.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/conveyor/tracing/libtx-conveyor-tracing.a |55.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/audit/events/liblibs-audit-events.a |55.3%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/ast/serialize/libessentials-ast-serialize.a |55.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/audit/libfq-libs-audit.a |55.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/blobs_action/abstract/storages_manager.cpp |55.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/api/rpc_proxy/client_base.cpp |55.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/folder_service/libydb-library-folder_service.a |55.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/pgproxy/libydb-core-pgproxy.a |55.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/blobs_action/tier/storage.cpp |55.4%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yql/providers/ytflow/integration/proto/libytflow-integration-proto.a |55.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/metering/libydb-core-metering.a |55.2%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/columnshard/engines/storage/chunks/libengines-storage-chunks.a |55.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/data_locks/locks/list.cpp |55.2%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yt/library/undumpable/libyt-library-undumpable.a |55.2%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yt/client/query_tracker_client/libyt-client-query_tracker_client.a |55.2%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/persqueue/pqtablet/partition/libpersqueue-pqtablet-partition.a |55.2%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yt/core/misc/isa_crc64/libisa-l_crc_yt_patch.a |55.3%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yt/library/decimal/libyt-library-decimal.a |55.3%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/fq/libs/row_dispatcher/format_handler/liblibs-row_dispatcher-format_handler.a |55.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/blobs_action/tier/gc_actor.cpp |55.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/blobs_action/tier/gc.cpp |55.3%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yt/library/erasure/libyt-library-erasure.a |55.3%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/linuxvdso/original/liblibs-linuxvdso-original.a |55.3%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/hyperscan/runtime_corei7/liblibs-hyperscan-runtime_corei7.a |55.4%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yt/core/https/libyt-core-https.a |55.4%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/crcutil/libcontrib-libs-crcutil.a |55.4%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yt/library/auth/libyt-library-auth.a |55.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/columnshard/engines/changes/compaction/common/result.cpp |55.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/blobs_action/abstract/gc.cpp |55.4%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yt/library/procfs/libyt-library-procfs.a |55.4%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yt/library/profiling/resource_tracker/liblibrary-profiling-resource_tracker.global.a |55.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/counters/blobs_manager.cpp |55.4%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yt/library/profiling/libyt-library-profiling.a |55.4%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yt/library/signals/libyt-library-signals.a |55.4%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yt/library/re2/libyt-library-re2.a |55.4%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/sql/v1/proto_parser/libsql-v1-proto_parser.a |55.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/blobs_action/tier/write.cpp |55.4%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yt/library/tvm/libyt-library-tvm.a |55.4%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yt/library/quantile_digest/libyt-library-quantile_digest.a |55.4%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yql/providers/yt/proto/libproviders-yt-proto.a |55.4%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/cxxsupp/builtins/liblibs-cxxsupp-builtins.a |55.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/fq/libs/checkpoint_storage/storage_service.cpp |55.4%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yt/library/profiling/resource_tracker/liblibrary-profiling-resource_tracker.a |55.4%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yt/library/tz_types/libyt-library-tz_types.a |55.4%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yt/library/ytprof/api/liblibrary-ytprof-api.a |55.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/lib/yson_value/libpublic-lib-yson_value.a |55.4%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/libaio/static/liblibs-libaio-static.a |55.4%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/fastlz/libcontrib-libs-fastlz.a |55.5%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/curl/libcontrib-libs-curl.a |55.5%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yql/providers/yt/opt/libproviders-yt-opt.a |55.4%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/cxxsupp/libcxxabi-parts/liblibs-cxxsupp-libcxxabi-parts.a |55.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/test_tablet/state_server_interface.cpp |55.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/data_accessor/in_mem/constructor.cpp |55.4%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/grpc/third_party/address_sorting/libgrpc-third_party-address_sorting.a |55.4%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/base64/neon32/liblibs-base64-neon32.a |55.5%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/base64/avx2/liblibs-base64-avx2.a |55.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/data_sharing/source/events/control.cpp |55.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/mediator/execute_queue.cpp |55.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/mediator/mediator__schema_upgrade.cpp |55.5%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/libbz2/libcontrib-libs-libbz2.a |55.5%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/brotli/c/dec/libbrotli-c-dec.a |55.5%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yt/core/http/libyt-core-http.a |55.5%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yt/core/libyt-yt-core.global.a |55.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/data_sharing/source/events/transfer.cpp |55.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/data_sharing/destination/events/status.cpp |55.5%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/libc_compat/libcontrib-libs-libc_compat.a |55.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/persqueue/pqtablet/partition/quota_tracker.cpp |55.5%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/linuxvdso/libcontrib-libs-linuxvdso.a |55.4%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/libevent/event_thread/liblibs-libevent-event_thread.a |55.4%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/libevent/event_extra/liblibs-libevent-event_extra.a |55.4%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/apache/orc/liblibs-apache-orc.a |55.4%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/libunwind/libcontrib-libs-libunwind.a |55.5%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/libevent/event_core/liblibs-libevent-event_core.a |55.5%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/nayuki_md5/libcontrib-libs-nayuki_md5.a |55.5%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/libssh2/libcontrib-libs-libssh2.a |55.5%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yt/library/numeric/libyt-library-numeric.a |55.5%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/grpc/third_party/upb/libgrpc-third_party-upb.a |55.5%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/lz4/libcontrib-libs-lz4.a |55.5%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/libevent/event_openssl/liblibs-libevent-event_openssl.a |55.5%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/blockcodecs/codecs/bzip/libblockcodecs-codecs-bzip.global.a |55.5%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/nghttp2/libcontrib-libs-nghttp2.a |55.5%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/liburing/libcontrib-libs-liburing.a |55.5%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/nghttp3/libcontrib-libs-nghttp3.a |55.5%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/lzmasdk/libcontrib-libs-lzmasdk.a |55.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/sys_view/compile_cache/compile_cache.cpp |55.5%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/cctz/tzdata/liblibs-cctz-tzdata.global.a |55.5%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/protobuf/libcontrib-libs-protobuf.global.a |55.5%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/ngtcp2/crypto/quictls/libngtcp2-crypto-quictls.a |55.5%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/long_tx_service/libcore-tx-long_tx_service.a |55.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/http_proxy/http_req.cpp |55.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/sys_view/sessions/sessions.cpp |55.5%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/flatbuffers/libcontrib-libs-flatbuffers.a |55.5%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/ngtcp2/libcontrib-libs-ngtcp2.a |55.5%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/protobuf/third_party/utf8_range/libprotobuf-third_party-utf8_range.a |55.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/fq/libs/compute/ydb/initializer_actor.cpp |55.6%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/public/udf/arrow/libpublic-udf-arrow.a |55.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/blobs_action/abstract/storage.cpp |55.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/engines/changes/compaction/plain/column_cursor.cpp |55.6%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/cxxsupp/libcxx/liblibs-cxxsupp-libcxx.a |55.6%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/t1ha/libcontrib-libs-t1ha.a |55.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/mediator/mediator.cpp |55.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/mediator/tablet_queue.cpp |55.5%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/tcmalloc/no_percpu_cache/liblibs-tcmalloc-no_percpu_cache.a |55.5%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/simdjson/libcontrib-libs-simdjson.a |55.5%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/snappy/libcontrib-libs-snappy.a |55.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/ymq/base/acl.cpp |55.6%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/libminikql-comp_nodes-llvm16.a |55.6%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/opentelemetry-proto/libcontrib-libs-opentelemetry-proto.a |55.6%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/re2/libcontrib-libs-re2.a |55.6%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/ymq/base/cloud_enums.h_serialized.cpp |55.6%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/aws/aws-c-s3/librestricted-aws-aws-c-s3.a |55.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/mediator/mediator_impl.cpp |55.6%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/yt/misc/libcpp-yt-misc.a |55.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/mediator/mediator__schema.cpp |55.6%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/utf8proc/libcontrib-libs-utf8proc.a |55.6%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yt_proto/yt/core/libyt_proto-yt-core.a |55.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/blobs_action/abstract/action.cpp |55.6%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/xxhash/libcontrib-libs-xxhash.a |55.6%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/aws/aws-c-mqtt/librestricted-aws-aws-c-mqtt.a |55.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/long_tx_service/long_tx_service.cpp |55.6%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/aws/aws-c-common/librestricted-aws-aws-c-common.a |55.6%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/boost/atomic/librestricted-boost-atomic.a |55.6%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/zstd06/libcontrib-libs-zstd06.a |55.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/long_tx_service/lwtrace_probes.cpp |55.6%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/googleapis-common-protos/libcontrib-libs-googleapis-common-protos.a |55.6%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/api/rpc_proxy/client_impl.cpp |55.6%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/boost/exception/librestricted-boost-exception.a |55.6%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/aws/aws-checksums/librestricted-aws-aws-checksums.a |55.6%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/aws/aws-c-http/librestricted-aws-aws-c-http.a |55.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/data_sharing/destination/transactions/tx_data_from_source.cpp |55.7%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/boost/locale/librestricted-boost-locale.a |55.7%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/blockcodecs/codecs/lzma/libblockcodecs-codecs-lzma.global.a |55.6%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/boost/random/librestricted-boost-random.a |55.6%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/aws/aws-c-auth/librestricted-aws-aws-c-auth.a |55.6%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/boost/chrono/librestricted-boost-chrono.a |55.6%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/aws/aws-c-event-stream/librestricted-aws-aws-c-event-stream.a |55.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/ymq/base/helpers.cpp |55.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/data_sharing/modification/tasks/modification.cpp |55.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/blobs_reader/actor.cpp |55.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/ymq/base/queue_id.cpp |55.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/engines/changes/actualization/construction/context.cpp |55.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/ymq/base/probes.cpp |55.7%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/ymq/base/libcore-ymq-base.a |55.7%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/aws/s2n/librestricted-aws-s2n.a |55.7%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/yaml-cpp/libcontrib-libs-yaml-cpp.a |55.7%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/ymq/base/query_id.h_serialized.cpp |55.7%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/columnshard/engines/storage/optimizer/lbuckets/constructor/liboptimizer-lbuckets-constructor.global.a |55.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/data_sharing/destination/transactions/tx_finish_ack_from_initiator.cpp |55.7%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/digest/crc32c/libcpp-digest-crc32c.a |55.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/fq/libs/compute/ydb/ydb_connector_actor.cpp |55.7%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/containers/atomizer/libcpp-containers-atomizer.a |55.7%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/aws/aws-c-io/librestricted-aws-aws-c-io.a |55.7%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/abseil-cpp/libcontrib-restricted-abseil-cpp.a |55.7%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/openssl/libcontrib-libs-openssl.a |55.7%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/blockcodecs/core/libcpp-blockcodecs-core.a |55.7%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/cgiparam/liblibrary-cpp-cgiparam.a |55.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/data_reader/actor.cpp |55.7%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/uriparser/libcontrib-restricted-uriparser.a |55.7%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/tcmalloc/malloc_extension/liblibs-tcmalloc-malloc_extension.a |55.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/fq/libs/compute/ydb/actors_factory.cpp |55.7%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/blockcodecs/codecs/legacy_zstd06/libblockcodecs-codecs-legacy_zstd06.global.a |55.7%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/blockcodecs/codecs/zstd/libblockcodecs-codecs-zstd.global.a |55.7%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/cache/liblibrary-cpp-cache.a |55.7%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/blockcodecs/codecs/brotli/libblockcodecs-codecs-brotli.global.a |55.7%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/charset/liblibrary-cpp-charset.a |55.8%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yt_proto/yt/client/libyt_proto-yt-client.a |55.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/blobs_reader/task.cpp |55.8%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/blockcodecs/codecs/lz4/libblockcodecs-codecs-lz4.global.a |55.7%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/containers/bitseq/libcpp-containers-bitseq.a |55.7%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/containers/2d_array/libcpp-containers-2d_array.a |55.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/actors/dnscachelib/liblibrary-actors-dnscachelib.a |55.7%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/containers/intrusive_rb_tree/libcpp-containers-intrusive_rb_tree.a |55.8%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/double-conversion/libcontrib-libs-double-conversion.a |55.8%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/boost/thread/librestricted-boost-thread.a |55.8%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/streams/zstd/libcpp-streams-zstd.a |55.8%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/coroutine/engine/libcpp-coroutine-engine.a |55.8%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/logger/global/libcpp-logger-global.a |55.8%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/aws/aws-crt-cpp/librestricted-aws-aws-crt-cpp.a |55.8%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/disjoint_sets/liblibrary-cpp-disjoint_sets.a |55.8%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/tcmalloc/no_percpu_cache/liblibs-tcmalloc-no_percpu_cache.global.a |55.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/scheme/protos/libcore-scheme-protos.a |55.7%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/yson/liblibrary-cpp-yson.a |55.7%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/charset/lite/libcpp-charset-lite.a |55.7%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/execprofile/liblibrary-cpp-execprofile.a |55.7%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/columnshard/transactions/libtx-columnshard-transactions.a |55.8%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/zstd/libcontrib-libs-zstd.a |55.8%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/persqueue/public/cluster_tracker/libpersqueue-public-cluster_tracker.a |55.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/services/ydb/ydb_clickhouse_internal.cpp |55.8%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/protos/libpy3yql-essentials-protos.global.a |55.8%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/abseil-cpp-tstring/libcontrib-restricted-abseil-cpp-tstring.a |55.8%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/blockcodecs/liblibrary-cpp-blockcodecs.a |55.8%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/digest/old_crc/libcpp-digest-old_crc.a |55.8%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/http/misc/libcpp-http-misc.a |55.8%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/columnshard/engines/storage/indexes/max/libstorage-indexes-max.global.a |55.8%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/digest/argonish/internal/proxies/ref/libinternal-proxies-ref.a |55.8%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/json/writer/libcpp-json-writer.a |55.8%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/html/pcdata/libcpp-html-pcdata.a |55.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/engines/changes/compaction/dictionary/logic.cpp |55.8%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/containers/absl_flat_hash/libcpp-containers-absl_flat_hash.a |55.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/impl/internal/plain_status/libimpl-internal-plain_status.a |55.8%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/containers/str_map/libcpp-containers-str_map.a |55.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/ydb/expr_nodes/libproviders-ydb-expr_nodes.a |55.8%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/columnshard/engines/storage/indexes/bloom_ngramm/libstorage-indexes-bloom_ngramm.a |55.8%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yql/providers/yt/common/libproviders-yt-common.a |55.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/impl/internal/logger/libimpl-internal-logger.a |55.8%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/http/fetch/libcpp-http-fetch.a |55.9%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/public/langver/libessentials-public-langver.a |55.9%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/enumbitset/liblibrary-cpp-enumbitset.a |55.9%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/cpp/mapreduce/library/user_job_statistics/libmapreduce-library-user_job_statistics.a |55.9%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/cpuid_check/liblibrary-cpp-cpuid_check.global.a |55.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/lib/ydb_cli/dump/files/libydb_cli-dump-files.a |55.8%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/binsaver/liblibrary-cpp-binsaver.a |55.8%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/thrift/libcontrib-restricted-thrift.a |55.8%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/dns/liblibrary-cpp-dns.a |55.8%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/persqueue/pqtablet/readproxy/libpersqueue-pqtablet-readproxy.a |55.8%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/protobuf/libcontrib-libs-protobuf.a |55.9%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/digest/lower_case/libcpp-digest-lower_case.a |55.9%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/mime/types/libcpp-mime-types.a |55.9%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/cpp/mapreduce/interface/logging/libmapreduce-interface-logging.a |55.9%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/dwarf_backtrace/liblibrary-cpp-dwarf_backtrace.a |55.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/utils/actor_log/libyql-utils-actor_log.a |55.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/services/ydb/ydb_query.cpp |55.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/data_reader/contexts.cpp |55.8%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/monlib/encode/spack/libmonlib-encode-spack.a |55.9%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/monlib/encode/json/libmonlib-encode-json.a |55.9%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/case_insensitive_string/liblibrary-cpp-case_insensitive_string.a |55.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/engines/changes/actualization/controller/controller.cpp |55.9%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/columnshard/engines/storage/actualizer/scheme/libstorage-actualizer-scheme.a |55.9%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/monlib/service/libcpp-monlib-service.a |55.9%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/monlib/encode/text/libmonlib-encode-text.a |55.9%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/lwtrace/mon/libcpp-lwtrace-mon.global.a |55.9%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/lwtrace/liblibrary-cpp-lwtrace.a |55.9%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/openssl/init/libcpp-openssl-init.global.a |55.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/data_reader/fetching_executor.cpp |55.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/clickhouse/proto/libproviders-clickhouse-proto.a |55.9%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/monlib/metrics/libcpp-monlib-metrics.a |55.9%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yt_proto/yt/formats/libyt_proto-yt-formats.a |56.0%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/lfalloc/dbg_info/libcpp-lfalloc-dbg_info.a |55.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/control/lib/base/libcontrol-lib-base.a |55.9%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/streams/bzip2/libcpp-streams-bzip2.a |55.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/data_sharing/destination/transactions/tx_finish_from_source.cpp |55.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/fq/libs/compute/ydb/ydb_run_actor.cpp |55.9%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/cpp/mapreduce/interface/libcpp-mapreduce-interface.a |55.9%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/retry/protos/libcpp-retry-protos.a |55.9%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/html/escape/libcpp-html-escape.a |55.9%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/protobuf/util/proto/libprotobuf-util-proto.a |55.9%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/streams/zc_memory_input/libcpp-streams-zc_memory_input.a |55.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/persqueue/pqtablet/partition/autopartitioning_manager.cpp |56.0%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/containers/disjoint_interval_tree/libcpp-containers-disjoint_interval_tree.a |56.0%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/containers/paged_vector/libcpp-containers-paged_vector.a |56.0%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/monlib/encode/libcpp-monlib-encode.a |56.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/client/yc_public/events/libclient-yc_public-events.a |56.0%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/terminate_handler/liblibrary-cpp-terminate_handler.a |56.0%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/column_fetching/cache_policy.cpp |56.0%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/string_utils/levenshtein_diff/libcpp-string_utils-levenshtein_diff.a |55.9%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/boost/regex/librestricted-boost-regex.a |55.9%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/resource/liblibrary-cpp-resource.a |55.9%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/retry/liblibrary-cpp-retry.a |55.9%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/string_utils/relaxed_escaper/libcpp-string_utils-relaxed_escaper.a |55.9%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/cpp/mapreduce/http/libcpp-mapreduce-http.a |55.9%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/dbg_output/liblibrary-cpp-dbg_output.a |55.9%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/threading/hot_swap/libcpp-threading-hot_swap.a |55.9%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/containers/stack_vector/libcpp-containers-stack_vector.a |55.9%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/columnshard/engines/storage/indexes/bloom_ngramm/libstorage-indexes-bloom_ngramm.global.a |56.0%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/random_provider/liblibrary-cpp-random_provider.a |56.0%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/blobstorage/base/libcore-blobstorage-base.a |56.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/grpc/draft/libpy3api-grpc-draft.global.a |55.9%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/apps/version/libversion_definition.a |55.9%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/yson/json/libcpp-yson-json.a |55.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/services/ydb/ydb_debug.cpp |56.0%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/blobs_reader/read_coordinator.cpp |56.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/workload/mixed/liblibrary-workload-mixed.a |56.0%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/threading/light_rw_lock/libcpp-threading-light_rw_lock.a |56.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/ss_tasks/libsrc-client-ss_tasks.a |56.0%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/string_utils/quote/libcpp-string_utils-quote.a |56.0%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/audit/libydb-core-audit.a |56.0%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/colorizer/liblibrary-cpp-colorizer.a |56.0%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/fq/libs/compute/ydb/finalizer_actor.cpp |56.0%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/testing/hook/libcpp-testing-hook.a |55.9%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/l2_distance/liblibrary-cpp-l2_distance.a |55.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/blobstorage/crypto/libcore-blobstorage-crypto.a |55.9%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/ipv6_address/liblibrary-cpp-ipv6_address.a |55.9%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/time_provider/liblibrary-cpp-time_provider.a |55.9%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/threading/equeue/libcpp-threading-equeue.a |55.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/utils/plan/libyql-utils-plan.a |55.9%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/yson_pull/libyson_pull.a |55.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/utils/actors/libyql-utils-actors.a |56.0%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/terminate_handler/liblibrary-cpp-terminate_handler.global.a |56.0%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/uri/liblibrary-cpp-uri.a |56.0%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/containers/sorted_vector/libcpp-containers-sorted_vector.a |55.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/federated_topic/impl/libclient-federated_topic-impl.a |55.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/types/operation/libclient-types-operation.a |55.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/security/libydb-library-security.a |55.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/clickhouse/expr_nodes/libproviders-clickhouse-expr_nodes.a |55.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/scheme/abstract/libengines-scheme-abstract.a |55.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/scheme/defaults/common/libscheme-defaults-common.a |55.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/data_sharing/destination/events/transfer.cpp |56.0%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/public/types/libessentials-public-types.a |56.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/actors/actor_type/liblibrary-actors-actor_type.a |56.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/lib/ydb_cli/dump/util/libydb_cli-dump-util.a |56.0%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/fq/libs/compute/ydb/resources_cleaner_actor.cpp |56.0%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kqp/query_data/libcore-kqp-query_data.a |56.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/base/vdisk_priorities.cpp |56.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/base/blobstorage_vdiskid.cpp |56.0%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/getopt/small/libcpp-getopt-small.a |56.0%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/protos/out/libcore-protos-out.a |56.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/base/blobstorage_syncstate.cpp |56.0%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/data_sharing/source/transactions/tx_data_ack_to_source.cpp |56.0%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/library/slide_limiter/service/liblibrary-slide_limiter-service.a |56.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/base/actor_activity_names.cpp |56.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/base/html.cpp |56.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/base/auth.cpp |56.1%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/base/libydb-core-base.a |56.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/base/backtrace.cpp |56.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/dq/proto/libyql-dq-proto.a |56.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/ydb_issue/libydb-library-ydb_issue.a |56.1%| [AR] {BAZEL_DOWNLOAD} $(B)/util/draft/libutil-draft.a |56.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/base/blobstorage.cpp |56.1%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/services/local_discovery/libydb-services-local_discovery.a |56.1%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/string_utils/scan/libcpp-string_utils-scan.a |56.1%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/yt/string/libcpp-yt-string.a |56.1%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/cpp/mapreduce/io/libcpp-mapreduce-io.a |56.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/engines/changes/compaction/plain/column_portion_chunk.cpp |56.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/workload/clickbench/liblibrary-workload-clickbench.global.a |56.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/fq/libs/compute/ydb/result_writer_actor.cpp |56.1%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/yson/node/libcpp-yson-node.a |56.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/base/blobstorage_grouptype.cpp |56.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/base/domain.cpp |56.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/base/logoblob.cpp |56.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/base/storage_pools.cpp |56.1%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/cpp/mapreduce/rpc_client/libcpp-mapreduce-rpc_client.a |56.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/base/services_assert.cpp |56.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/data_sharing/source/transactions/tx_finish_ack_to_source.cpp |56.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/base/pool_stats_collector.cpp |56.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/base/row_version.cpp |56.1%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yql/providers/yt/codec/libproviders-yt-codec.a |56.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/base/localdb.cpp |56.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/base/event_filter.cpp |56.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/base/feature_flags_service.cpp |56.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/base/group_stat.cpp |56.1%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/base/memory_controller_iface.h_serialized.cpp |56.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/base/local_user_token.cpp |56.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/protos/out/out_sequenceshard.cpp |56.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/base/subdomain.cpp |56.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/base/tablet.cpp |56.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/protos/out/out_tablet.cpp |56.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/protos/out/out_cms.cpp |56.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/protos/out/out_long_tx_service.cpp |56.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/data_sharing/destination/transactions/tx_start_from_initiator.cpp |56.2%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/driver_lib/version/libversion.a |56.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/base/tablet_status_checker.cpp |56.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/base/traceid.cpp |56.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/base/tx_processing.cpp |56.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/base/counters.cpp |56.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/base/path.cpp |56.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/global_plugins/libydb-library-global_plugins.a |56.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/scheme/libydb-core-scheme.a |56.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/aclib/protos/liblibrary-aclib-protos.a |56.0%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/blockcodecs/codecs/fastlz/libblockcodecs-codecs-fastlz.global.a |56.1%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/columnshard/engines/writer/libcolumnshard-engines-writer.a |56.1%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yql/providers/yt/expr_nodes/libproviders-yt-expr_nodes.a |56.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/pretty_types_print/protobuf/liblibrary-pretty_types_print-protobuf.a |56.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/mkql_proto/protos/liblibrary-mkql_proto-protos.a |56.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/actors/prof/liblibrary-actors-prof.a |56.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/protobuf_printer/libydb-library-protobuf_printer.a |56.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/login/cache/liblibrary-login-cache.a |55.9%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/yt/memory/libcpp-yt-memory.a |55.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/services/deprecated/persqueue_v0/api/protos/libapi-protos-persqueue-deprecated.a |55.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/pdisk_io/protos/liblibrary-pdisk_io-protos.a |55.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/login/protos/liblibrary-login-protos.a |55.9%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/services/ext_index/metadata/libservices-ext_index-metadata.a |55.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/services/libydb-library-services.a |55.9%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/audit/audit_config/libcore-audit-audit_config.a |56.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/grpc/server/liblibrary-grpc-server.a |56.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/s3/actors/libproviders-s3-actors.a |56.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/columnshard/engines/writer/put_status.cpp |56.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/nbs/libcore-protos-nbs.a |56.2%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/base64/plain64/liblibs-base64-plain64.a |56.2%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/zlib/libcontrib-libs-zlib.a |56.2%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/containers/stack_array/libcpp-containers-stack_array.a |56.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/public/ydb_issue/libyql-public-ydb_issue.a |56.2%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/auth.grpc.pb.cc |56.1%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/alloc.grpc.pb.cc |56.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/fq/libs/compute/ydb/stopper_actor.cpp |56.1%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/jaeger_tracing/libydb-core-jaeger_tracing.a |56.2%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/columnshard/engines/storage/indexes/portions/libstorage-indexes-portions.a |56.2%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/parser/proto_ast/gen/v1_proto_split/libproto_ast-gen-v1_proto_split.a |56.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/actors/protos/liblibrary-actors-protos.a |56.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/engines/changes/compaction/plain/merged_column.cpp |56.2%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/base.grpc.pb.cc |56.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/config/protos/liblibs-config-protos.a |56.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/services/ydb/ydb_operation.cpp |56.2%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/blobstorage.grpc.pb.cc |56.2%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/blobstorage_config.grpc.pb.cc |56.3%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/blobstorage_base3.grpc.pb.cc |56.3%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/blobstorage_base3.pb.cc |56.3%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/blobstorage_base.grpc.pb.cc |56.1%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/blobstorage_base.pb.cc |56.1%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/bind_channel_storage_pool.grpc.pb.cc |56.1%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/blob_depot_config.pb.cc |56.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/services/ydb/ydb_object_storage.cpp |56.2%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/alloc.pb.cc |56.2%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/backup.grpc.pb.cc |56.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/data_sharing/destination/events/control.cpp |56.2%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/base.pb.cc |56.3%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/blob_depot_config.grpc.pb.cc |56.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/jaeger_tracing/sampling_throttling_control_internals.cpp |56.3%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/blob_depot.grpc.pb.cc |56.3%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/grpc/libcontrib-libs-grpc.a |56.3%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/backup.pb.cc |56.3%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/auth.pb.cc |56.3%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/bind_channel_storage_pool.pb.cc |56.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/engines/changes/compaction/abstract/merger.cpp |56.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/jaeger_tracing/request_discriminator.cpp |56.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/jaeger_tracing/sampling_throttling_control.cpp |56.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/jaeger_tracing/throttler.cpp |56.3%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/blob_depot.pb.cc |56.3%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/bootstrap.pb.cc |56.3%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/bridge.pb.cc |56.3%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/bootstrapper.grpc.pb.cc |56.3%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/cms.grpc.pb.cc |56.3%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/channel_purpose.pb.cc |56.3%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/channel_purpose.grpc.pb.cc |56.3%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/blockstore_config.pb.cc |56.3%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/change_exchange.grpc.pb.cc |56.3%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/change_exchange.pb.cc |56.3%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/compile_service_config.grpc.pb.cc |56.3%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/compaction.pb.cc |56.3%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/compaction.grpc.pb.cc |56.2%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/blobstorage_pdisk_config.pb.h_serialized.cpp |56.2%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/blobstorage_pdisk_config.grpc.pb.cc |56.2%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/bootstrap.grpc.pb.cc |56.2%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/bootstrapper.pb.cc |56.2%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/blobstorage_pdisk_config.pb.cc |56.3%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/blockstore_config.grpc.pb.cc |56.3%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/bridge.grpc.pb.cc |56.3%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/blobstorage_vdisk_config.grpc.pb.cc |56.3%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/blobstorage.pb.cc |56.3%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/blobstorage_vdisk_config.pb.cc |56.3%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/blobstorage_disk.grpc.pb.cc |56.4%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/blobstorage_disk_color.grpc.pb.cc |56.4%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/cms.pb.cc |56.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/data_sharing/source/transactions/tx_start_source_cursor.cpp |56.3%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/blobstorage_disk_color.pb.cc |56.3%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/compile_service_config.pb.cc |56.3%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/blobstorage_disk.pb.cc |56.3%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/config_units.pb.cc |56.3%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/console_base.pb.cc |56.3%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/console_base.grpc.pb.cc |56.3%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/config_units.grpc.pb.cc |56.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/base/kmeans_clusters.cpp |56.3%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/icu/libcontrib-libs-icu.a |56.4%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/config_metrics.grpc.pb.cc |56.4%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/config_metrics.pb.cc |56.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/base/fulltext.cpp |56.4%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/columnshard/operations/common/libcolumnshard-operations-common.a |56.4%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/decorator/py3/libpy3python-decorator-py3.global.a |56.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/ydb_issue/proto/liblibrary-ydb_issue-proto.a |56.4%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/blobstorage_config.pb.cc |56.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/workload/benchmark_base/liblibrary-workload-benchmark_base.a |56.2%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/monlib/encode/prometheus/libmonlib-encode-prometheus.a |56.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/actors/async/liblibrary-actors-async.a |56.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/client/nc_private/libapi-client-nc_private.a |56.2%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/tools/python3/libcontrib-tools-python3.a |56.2%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/columnshard/engines/storage/actualizer/tiering/libstorage-actualizer-tiering.a |56.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/protos/libcolumnshard-engines-protos.a |56.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/types/exceptions/libclient-types-exceptions.a |56.2%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/openssl/holders/libcpp-openssl-holders.a |56.2%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/types/binary_json/libessentials-types-binary_json.a |56.2%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/columnshard/engines/storage/indexes/count_min_sketch/libstorage-indexes-count_min_sketch.global.a |56.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/client/yc_private/accessservice/libclient-yc_private-accessservice.a |56.2%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/conveyor_composite/usage/libtx-conveyor_composite-usage.a |56.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/lib/scheme_types/libpublic-lib-scheme_types.a |56.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/data_sharing/source/transactions/tx_start_to_source.cpp |56.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/client/yc_private/operation/libclient-yc_private-operation.a |55.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/tests/postgres_integrations/library/libpy3tests-postgres_integrations-library.global.a |55.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/adapters/issue/libcpp-adapters-issue.a |55.4%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/Pygments/py3/libpy3python-Pygments-py3.global.a |55.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/services/ydb/ydb_import.cpp |55.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/data_sharing/source/transactions/tx_write_source_cursor.cpp |55.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/client/nc_private/iam/v1/libnc_private-iam-v1.a |55.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/data_locks/locks/snapshot.cpp |55.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/services/ydb/ydb_export.cpp |55.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/columnshard/blob.cpp |55.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/library/yql/providers/generic/actors/ut/yql_generic_lookup_actor_ut.cpp |56.1%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/columnshard/libcore-tx-columnshard.a |55.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/data_sharing/manager/sessions.cpp |55.5%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/protos/libydb-core-protos.a |55.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/query_data/kqp_query_data.cpp |55.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/engines/changes/compaction/plain/logic.cpp |55.6%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/kafka.grpc.pb.cc |55.6%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/kafka.pb.cc |56.1%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/data_integrity_trails.pb.cc |56.1%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/index_builder.pb.cc |56.2%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/counters_mediator.grpc.pb.cc |56.2%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/counters_keyvalue.pb.cc |56.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/fq/libs/rate_limiter/quoter_service/quoter_service.cpp |56.2%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/tx_scheme.grpc.pb.cc |56.2%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/tx_proxy.grpc.pb.cc |56.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/services/ydb/ydb_dummy.cpp |56.2%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/tx_mediator_timecast.pb.cc |56.2%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/resource_broker.grpc.pb.cc |56.2%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/s3_settings.grpc.pb.cc |56.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/tools/kqprun/src/common.h_serialized.cpp |56.2%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/netclassifier.pb.cc |56.3%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/netclassifier.grpc.pb.cc |56.3%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/replication.grpc.pb.cc |56.3%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/pqconfig.grpc.pb.cc |56.3%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/replication.pb.cc |56.3%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/node_whiteboard.grpc.pb.cc |56.3%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/msgbus_kv.pb.cc |56.3%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/node_limits.grpc.pb.cc |56.2%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/node_broker.pb.cc |56.2%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/node_limits.pb.cc |56.2%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/pdiskfit.pb.cc |56.2%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/msgbus_pq.pb.cc |56.3%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/profiler.grpc.pb.cc |56.3%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/profiler.pb.cc |56.3%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/query_stats.pb.cc |56.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/services/ydb/ydb_scheme.cpp |56.3%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/node_whiteboard.pb.cc |56.3%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/query_stats.grpc.pb.cc |56.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/fq/libs/compute/ydb/executer_actor.cpp |56.3%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/scheme_type_metadata.pb.cc |56.3%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/statistics.grpc.pb.cc |56.3%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/scheme_board.grpc.pb.cc |56.3%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/scheme_log.grpc.pb.cc |56.3%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/table_service_config.pb.cc |56.3%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/scheme_log.pb.cc |56.3%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/s3_settings.pb.cc |56.3%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/resource_broker.pb.cc |56.3%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/scheme_type_metadata.grpc.pb.cc |56.3%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/scheme_board_mon.pb.cc |56.3%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/shared_cache.pb.h_serialized.cpp |56.3%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/scheme_board.pb.cc |56.3%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/scheme_type_operation.pb.cc |56.3%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/scheme_type_operation.grpc.pb.cc |56.3%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/shared_cache.grpc.pb.cc |56.3%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/subdomains.grpc.pb.cc |56.3%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/table_stats.grpc.pb.cc |56.3%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/shared_cache.pb.cc |56.3%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/statestorage.pb.cc |56.3%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/statestorage.grpc.pb.cc |56.4%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/sqs.grpc.pb.cc |56.4%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/pqconfig.pb.cc |56.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/persqueue/pqtablet/partition/subscriber.cpp |56.4%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/stream.pb.cc |56.3%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/stream.grpc.pb.cc |56.3%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/table_service_config.grpc.pb.cc |56.3%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/sys_view_types.grpc.pb.cc |56.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/fq/libs/compute/ydb/status_tracker_actor.cpp |56.3%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/statistics.pb.cc |56.3%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/sys_view.grpc.pb.cc |56.3%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/sys_view_types.pb.cc |56.4%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/tx_mediator_timecast.grpc.pb.cc |56.4%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/subdomains.pb.cc |56.4%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/sqs.pb.cc |56.4%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/tenant_pool.pb.cc |56.4%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/tablet_pipe.grpc.pb.cc |56.4%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/tablet_counters.grpc.pb.cc |56.4%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/tablet_counters.pb.cc |56.4%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/tablet.grpc.pb.cc |56.4%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/tablet_database.grpc.pb.cc |56.4%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/table_stats.pb.cc |56.4%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/tablet_counters_aggregator.pb.cc |56.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/base/table_index.cpp |56.4%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/tablet_counters_aggregator.grpc.pb.cc |56.4%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/tenant_pool.grpc.pb.cc |56.4%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/tablet_tx.grpc.pb.cc |56.4%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/sys_view.pb.cc |56.4%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/tablet_tracing_signals.grpc.pb.cc |56.4%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/tablet_tx.pb.cc |56.4%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/tablet_pipe.pb.cc |56.4%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/tablet_database.pb.cc |56.4%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/tablet_tracing_signals.pb.cc |56.4%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/test_shard.grpc.pb.cc |56.4%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/test_shard.pb.cc |56.4%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/tx.grpc.pb.cc |56.4%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/tenant_slot_broker.grpc.pb.cc |56.4%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/tracing.grpc.pb.cc |56.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/engines/storage/actualizer/index/index.cpp |56.4%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/tx_columnshard.pb.cc |56.4%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/tenant_slot_broker.pb.cc |56.4%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/tracing.pb.cc |56.4%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/tx.pb.cc |56.4%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/tx_columnshard.grpc.pb.cc |56.4%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/counters_coordinator.pb.cc |56.4%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/workload_manager_config.grpc.pb.cc |56.4%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/yql_translation_settings.pb.cc |56.5%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/console_tenant.pb.cc |56.5%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/tx_scheme.pb.cc |56.5%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/tx_sequenceshard.pb.cc |56.5%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/tx_sequenceshard.grpc.pb.cc |56.5%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/whiteboard_flags.pb.cc |56.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/blobs_action/storages_manager/manager.cpp |56.4%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/tx_proxy.pb.cc |56.4%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/whiteboard_disk_states.grpc.pb.cc |56.4%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/whiteboard_disk_states.pb.cc |56.4%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/yql_translation_settings.grpc.pb.cc |56.4%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/workload_manager_config.pb.cc |56.5%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/whiteboard_flags.grpc.pb.cc |56.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/data_reader/fetcher.cpp |56.5%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/ydb_result_set_old.pb.cc |56.5%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/ydb_result_set_old.grpc.pb.cc |56.5%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/counters.grpc.pb.cc |56.5%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/ydb_table_impl.grpc.pb.cc |56.5%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/counters_coordinator.grpc.pb.cc |56.5%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/ydb_table_impl.pb.cc |56.5%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/pdiskfit.grpc.pb.cc |56.5%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/scheme_board_mon.grpc.pb.cc |56.5%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/msgbus_kv.grpc.pb.cc |56.5%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/msgbus_pq.grpc.pb.cc |56.5%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/tablet.pb.cc |56.5%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/node_broker.grpc.pb.cc |56.5%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/counters_hive.pb.cc |56.5%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/counters_kesus.grpc.pb.cc |56.5%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/counters_datashard.grpc.pb.cc |56.5%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/counters_datashard.pb.cc |56.5%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/counters_hive.grpc.pb.cc |56.5%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/counters_keyvalue.grpc.pb.cc |56.5%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/data_integrity_trails.grpc.pb.cc |56.5%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/counters_schemeshard.grpc.pb.cc |56.5%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/counters_pq.grpc.pb.cc |56.5%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/counters_mediator.pb.cc |56.5%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/counters_node_broker.pb.cc |56.5%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/counters_node_broker.grpc.pb.cc |56.5%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/counters_replication.grpc.pb.cc |56.5%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/counters_pq.pb.cc |56.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/data_sharing/manager/shared_blobs.cpp |56.6%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/counters_replication.pb.cc |56.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/blobs_reader/events.cpp |56.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/columnshard/counters.cpp |56.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/audit/audit_log_impl.cpp |56.5%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/counters_testshard.pb.cc |56.5%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/counters_sysview_processor.grpc.pb.cc |56.5%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/data_events.pb.cc |56.5%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/counters_sequenceshard.grpc.pb.cc |56.5%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/counters_schemeshard.pb.cc |56.6%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/counters_sequenceshard.pb.cc |56.6%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/counters_testshard.grpc.pb.cc |56.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/services/ydb/ydb_scripting.cpp |56.6%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/counters_sysview_processor.pb.cc |56.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/engines/storage/chunks/data.cpp |56.6%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/counters_statistics_aggregator.grpc.pb.cc |56.6%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/counters_tx_proxy.pb.cc |56.6%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/data_events.grpc.pb.cc |56.6%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/counters_tx_allocator.pb.cc |56.6%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/counters_tx_allocator.grpc.pb.cc |56.5%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/index_builder.grpc.pb.cc |56.5%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/external_sources.grpc.pb.cc |56.6%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/counters_tx_proxy.grpc.pb.cc |56.6%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/grpc_status_proxy.pb.cc |56.6%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/database_basic_sausage_metainfo.pb.cc |56.6%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/datashard_backup.pb.cc |56.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/fq/libs/row_dispatcher/format_handler/format_handler.cpp |56.6%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/datashard_load.pb.cc |56.6%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/datashard_backup.grpc.pb.cc |56.6%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/datashard_load.grpc.pb.cc |56.6%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/database_basic_sausage_metainfo.grpc.pb.cc |56.6%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/datashard_config.grpc.pb.cc |56.6%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/datashard_config.pb.cc |56.6%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/drivemodel.grpc.pb.cc |56.6%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/db_metadata_cache.grpc.pb.cc |56.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/long_tx_service/commit_impl.cpp |56.6%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/export.pb.cc |56.6%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/datashard_load.pb.h_serialized.cpp |56.7%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/export.grpc.pb.cc |56.6%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/db_metadata_cache.pb.cc |56.6%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/drivemodel.pb.cc |56.6%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/grpc_status_proxy.grpc.pb.cc |56.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/gateway/behaviour/tablestore/operations/abstract.cpp |56.6%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/feature_flags.grpc.pb.cc |56.6%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/feature_flags.pb.cc |56.6%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/flat_tx_scheme.grpc.pb.cc |56.6%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/filestore_config.pb.cc |56.6%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/flat_scheme_op.grpc.pb.cc |56.7%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/grpc_pq_old.pb.cc |56.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/persqueue/pqtablet/partition/partition_compaction.cpp |56.7%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/flat_tx_scheme.pb.cc |56.7%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/external_sources.pb.cc |56.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/persqueue/pqtablet/partition/user_info.cpp |56.7%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/follower_group.pb.cc |56.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/persqueue/public/cluster_tracker/cluster_tracker.cpp |56.7%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/follower_group.grpc.pb.cc |56.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/persqueue/pqtablet/partition/account_read_quoter.cpp |56.6%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/minikql_engine.pb.cc |56.7%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/local.grpc.pb.cc |56.7%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/long_tx_service.grpc.pb.cc |56.7%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/memory_controller_config.grpc.pb.cc |56.7%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/grpc_pq_old.grpc.pb.cc |56.7%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/minikql_engine.grpc.pb.cc |56.7%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/maintenance.grpc.pb.cc |56.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/persqueue/pqtablet/partition/write_quoter.cpp |56.7%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/local.pb.cc |56.7%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/maintenance.pb.cc |56.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/ymq/base/dlq_helpers.cpp |56.7%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/load_test.pb.cc |56.7%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/long_tx_service.pb.cc |56.7%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/memory_stats.pb.cc |56.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/base/board_replica.cpp |56.7%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/memory_controller_config.pb.cc |56.7%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/memory_stats.grpc.pb.cc |56.8%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/metrics.grpc.pb.cc |56.7%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/metrics.pb.cc |56.7%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/hive.pb.cc |56.7%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/msgbus_health.grpc.pb.cc |56.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/engines/changes/compaction/common/context.cpp |56.7%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/msgbus_health.pb.cc |56.7%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/mon.pb.cc |56.7%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/mon.grpc.pb.cc |56.7%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/hive.grpc.pb.cc |56.7%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/health.grpc.pb.cc |56.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/fq/libs/checkpoint_storage/storage_proxy.cpp |56.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/base/wilson_tracing_control.cpp |56.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/columnshard/data_reader/contexts.h_serialized.cpp |56.8%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/health.pb.cc |56.8%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/http_config.grpc.pb.cc |56.8%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/import.grpc.pb.cc |56.8%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/http_config.pb.cc |56.8%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/counters_columnshard.pb.cc |56.8%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/import.pb.cc |56.7%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/counters_kesus.pb.cc |56.7%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yql/providers/yt/provider/libproviders-yt-provider.a |56.7%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/key.pb.cc |56.8%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/kesus.grpc.pb.cc |56.8%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/load_test.grpc.pb.cc |56.8%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/key.grpc.pb.cc |56.8%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/labeled_counters.grpc.pb.cc |56.8%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/kqp_stats.grpc.pb.cc |56.8%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/kqp_physical.grpc.pb.cc |56.8%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/counters_statistics_aggregator.pb.cc |56.8%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/filestore_config.grpc.pb.cc |56.8%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/labeled_counters.pb.cc |56.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/services/ydb/ydb_table.cpp |56.8%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/kqp_stats.pb.cc |56.8%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/counters_cms.grpc.pb.cc |56.8%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/counters_blob_depot.grpc.pb.cc |56.8%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/counters_backup.pb.cc |56.8%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/flat_scheme_op.pb.cc |56.8%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/counters_bs_controller.grpc.pb.cc |56.8%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/counters_bs_controller.pb.cc |56.8%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/counters.pb.cc |56.8%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/counters_blob_depot.pb.cc |56.8%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/console_tenant.grpc.pb.cc |56.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/engines/changes/compaction/merger.cpp |56.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/ymq/base/action.cpp |56.8%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/counters_columnshard.grpc.pb.cc |56.8%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/kqp_physical.pb.cc |56.8%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/counters_cms.pb.cc |56.8%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/kesus.pb.cc |56.8%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/counters_backup.grpc.pb.cc |56.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/base/blobstorage_events.cpp |56.9%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/conveyor_composite/usage/common.h_serialized.cpp |56.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/common_client/libsrc-client-common_client.a |56.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/viewer/json/libcore-viewer-json.a |56.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/impl/endpoints/libclient-impl-endpoints.a |56.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/long_tx_service/acquire_snapshot_impl.cpp |56.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/impl/internal/common/libimpl-internal-common.a |56.8%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/columnshard/transactions/transactions/libcolumnshard-transactions-transactions.a |56.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/tools/visualize_portions/libpy3visualize_portions.global.a |56.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/iam/libsrc-client-iam.a |56.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/common_client/impl/libclient-common_client-impl.a |56.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/data_events/common/libtx-data_events-common.a |56.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/topic/common/libclient-topic-common.a |56.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/udfs/common/clickhouse/client/libclickhouse_client_udf.global.a |56.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/impl/internal/grpc_connections/libimpl-internal-grpc_connections.a |56.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/actors/helpers/liblibrary-actors-helpers.a |56.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/long_tx_service/long_tx_service_impl.cpp |56.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/proto/libsrc-client-proto.a |56.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/client/yc_private/servicecontrol/libclient-yc_private-servicecontrol.a |56.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/impl/internal/value_helpers/libimpl-internal-value_helpers.a |56.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/resources/libsrc-client-resources.global.a |56.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/client/yc_private/iam/libclient-yc_private-iam.a |56.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/solomon/proto/libproviders-solomon-proto.a |56.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/tests/tools/kqprun/src/actors.cpp |56.9%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/columnshard/overload_manager/libtx-columnshard-overload_manager.a |56.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/engines/storage/indexes/max/constructor.cpp |56.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/debug/libydb-core-debug.a |56.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/persqueue/pqtablet/partition/partition_write.cpp |56.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/topic/libsrc-client-topic.a |56.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/client/yc_private/resourcemanager/libclient-yc_private-resourcemanager.a |56.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/ymq/base/counters.cpp |56.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/types/fatal_error_handlers/libclient-types-fatal_error_handlers.a |56.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/scheme/libsrc-client-scheme.a |56.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/query/impl/libclient-query-impl.a |56.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/types/libsrc-client-types.a |56.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/table/query_stats/libclient-table-query_stats.a |56.9%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/columnshard/transactions/operators/ev_write/libtransactions-operators-ev_write.global.a |56.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/tests/tools/kqprun/src/ydb_setup.cpp |56.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/table/libsrc-client-table.a |56.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/value/libsrc-client-value.a |56.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/engines/storage/actualizer/scheme/counters.cpp |56.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/solomon/provider/libproviders-solomon-provider.a |56.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/datastreams/libsrc-client-datastreams.a |56.9%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/libydb-core-tx.a |56.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/engines/storage/chunks/column.cpp |56.9%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yt/core/libyt-yt-core.a |56.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/fq/libs/checkpoint_storage/gc.cpp |56.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/engines/storage/indexes/max/meta.cpp |56.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/coordinator/public/libtx-coordinator-public.a |56.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/signal_backtrace/libydb-library-signal_backtrace.a |56.9%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/brotli/c/common/libbrotli-c-common.a |56.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/persqueue/pqtablet/partition/offload_actor.cpp |56.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/message_seqno.cpp |56.9%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yql/providers/yt/gateway/lib/libyt-gateway-lib.a |57.0%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/persqueue/pqtablet/partition/partition_compactification.cpp |57.0%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/base/bridge.cpp |57.0%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/base/appdata.cpp |57.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/tx_proxy_schemereq.cpp |57.0%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/farmhash/libcontrib-libs-farmhash.a |57.0%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/core/qplayer/url_lister/libcore-qplayer-url_lister.a |57.0%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/engines/storage/optimizer/lbuckets/constructor/constructor.cpp |56.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/library/jwt/libsrc-library-jwt.a |56.8%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/parser/lexer_common/libessentials-parser-lexer_common.a |56.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/driver_lib/version/version.cpp |56.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/library/uuid/libsrc-library-uuid.a |56.9%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/columnshard/common/libtx-columnshard-common.a |56.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/impl/internal/make_request/libimpl-internal-make_request.a |56.9%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/core/file_storage/proto/libcore-file_storage-proto.a |56.9%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/olap/operations/alter/in_store/common/libalter-in_store-common.a |56.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/operations/common/context.cpp |57.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/util/fragmented_buffer.cpp |57.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/util/cpuinfo.cpp |57.0%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/core/cbo/libessentials-core-cbo.a |57.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/impl/stats/libclient-impl-stats.a |57.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/impl/internal/thread_pool/libimpl-internal-thread_pool.a |57.0%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/locks/libcore-tx-locks.a |57.0%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/blockcodecs/codecs/zlib/libblockcodecs-codecs-zlib.global.a |57.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/library/persqueue/topic_parser_public/libsdk-library-persqueue-topic_parser_public-v3.a |57.0%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/persqueue/pqtablet/partition/partition_sourcemanager.cpp |57.0%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/base/statestorage_proxy.cpp |57.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/columnshard/common/blob.cpp |57.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/persqueue_public/impl/libclient-persqueue_public-impl.a |57.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/columnshard/common/scalars.cpp |57.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/library/grpc/client/libsdk-library-grpc-client-v3.a |57.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/columnshard/common/volume.cpp |57.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/columnshard/common/tablet_id.cpp |57.0%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/common/portion.h_serialized.cpp |57.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/columnshard/common/snapshot.cpp |57.0%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/data_reader/fetching_steps.cpp |57.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/columnshard/common/limits.cpp |57.0%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/base/statestorage_replica.cpp |57.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/columnshard/common/reverse_accessor.cpp |57.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/columnshard/common/portion.cpp |57.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/locks/range_treap.cpp |57.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/locks/time_counters.cpp |57.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/locks/locks_db.cpp |57.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/util/hazard.cpp |57.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/result/libsrc-client-result.a |57.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/table/impl/libclient-table-impl.a |57.0%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/persqueue/pqtablet/partition/sourceid.cpp |57.0%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/types/dynumber/libessentials-types-dynumber.a |57.0%| [CC] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/protos/tx_datashard.pb.cc |57.0%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/farmhash/arch/sse42_aesni/libfarmhash-arch-sse42_aesni.a |57.0%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/audit/audit_config/audit_config.cpp |57.0%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/program/libcore-tx-program.a |57.0%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yql/providers/yt/lib/expr_traits/libyt-lib-expr_traits.a |57.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/library/persqueue/obfuscate/libsdk-library-persqueue-obfuscate-v3.a |57.1%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/deprecated/yajl/libcontrib-deprecated-yajl.a |57.1%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/public_http/libydb-core-public_http.a |57.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/blobstorage/groupinfo/libcore-blobstorage-groupinfo.a |57.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/arrow_parquet/libydb-library-arrow_parquet.a |57.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/util/intrusive_heap.cpp |57.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/util/hyperlog_counter.cpp |57.0%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/core/pg_settings/libessentials-core-pg_settings.a |57.0%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yql/providers/yt/job/libproviders-yt-job.a |57.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/util/fast_tls.cpp |57.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/library/decimal/libsrc-library-decimal.a |57.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/protos/config.pb.cc |57.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/program/registry.cpp |57.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/public_http/grpc_request_context_wrapper.cpp |57.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/topic/impl/libclient-topic-impl.a |57.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/program/program.cpp |57.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/public_http/http_router.cpp |57.1%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/olap/indexes/libschemeshard-olap-indexes.a |57.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/columnshard/transactions/tx_controller.h_serialized.cpp |57.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/protos/console.grpc.pb.cc |57.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/base/board_publish.cpp |57.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/program/builder.cpp |57.1%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm16/lib/DebugInfo/MSF/liblib-DebugInfo-MSF.a |57.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/util/backoff.cpp |57.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/ymq/base/events_writer.cpp |57.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/solomon/solomon_accessor/grpc/libsolomon-solomon_accessor-grpc.a |57.1%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yql/providers/yt/gateway/native/libyt-gateway-native.a |57.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/persqueue/pqtablet/partition/partition_read.cpp |57.1%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/olap/operations/alter/common/liboperations-alter-common.a |57.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/schlab/protos/liblibrary-schlab-protos.a |57.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/util/address_classifier.cpp |57.1%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm16/lib/DebugInfo/DWARF/liblib-DebugInfo-DWARF.a |57.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/persqueue/pqtablet/partition/partition_monitoring.cpp |57.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/library/operation_id/protos/liblibrary-operation_id-protos.a |57.1%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm16/lib/ExecutionEngine/Orc/Shared/libExecutionEngine-Orc-Shared.a |57.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/tests/tools/kqprun/src/kqp_runner.cpp |57.1%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm16/lib/IRPrinter/libllvm16-lib-IRPrinter.a |57.1%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm16/lib/ExecutionEngine/MCJIT/liblib-ExecutionEngine-MCJIT.a |57.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/grpc/draft/libapi-grpc-draft.a |57.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/wrappers/events/libcore-wrappers-events.a |57.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/solomon/solomon_accessor/client/libsolomon-solomon_accessor-client.a |57.2%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm16/lib/Target/X86/AsmParser/libTarget-X86-AsmParser.a |57.2%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm16/lib/Bitcode/Reader/liblib-Bitcode-Reader.a |57.2%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm16/lib/Remarks/libllvm16-lib-Remarks.a |57.0%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm16/lib/DebugInfo/CodeView/liblib-DebugInfo-CodeView.a |57.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/util/page_map.cpp |57.0%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/base/statestorage.cpp |57.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/library/operation_id/libsrc-library-operation_id.a |57.1%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/tx_proxy/libcore-tx-tx_proxy.a |57.1%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/core/credentials/libessentials-core-credentials.a |57.1%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm16/lib/Target/X86/TargetInfo/libTarget-X86-TargetInfo.a |57.2%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm16/lib/Demangle/libllvm16-lib-Demangle.a |57.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/fq/libs/checkpoint_storage/ydb_state_storage.cpp |57.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/engines/writer/blob_constructor.cpp |57.1%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm16/lib/ExecutionEngine/RuntimeDyld/liblib-ExecutionEngine-RuntimeDyld.a |57.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/services/local_discovery/grpc_service.cpp |57.1%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm16/lib/Transforms/AggressiveInstCombine/liblib-Transforms-AggressiveInstCombine.a |57.2%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm16/lib/CodeGen/AsmPrinter/liblib-CodeGen-AsmPrinter.a |57.2%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm16/lib/TextAPI/libllvm16-lib-TextAPI.a |57.2%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm16/lib/MC/MCParser/liblib-MC-MCParser.a |57.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/protos/kqp.pb.cc |57.2%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm16/lib/CodeGen/GlobalISel/liblib-CodeGen-GlobalISel.a |57.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/engines/storage/indexes/bloom_ngramm/constructor.cpp |57.2%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/olap/operations/alter/standalone/liboperations-alter-standalone.a |57.2%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/tx_proxy/read_table_impl.h_serialized.cpp |57.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/engines/storage/indexes/bloom_ngramm/meta.cpp |57.2%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm16/lib/MC/libllvm16-lib-MC.a |57.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/tx_proxy/global.cpp |57.1%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/grpcio/py3/libpy3python-grpcio-py3.a |57.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/tx_proxy/mon.cpp |57.1%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/tiering/tier/libtx-tiering-tier.a |57.2%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/blobstorage/vdisk/anubis_osiris/libblobstorage-vdisk-anubis_osiris.a |57.2%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm16/lib/Transforms/ObjCARC/liblib-Transforms-ObjCARC.a |57.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/tiering/tier/identifier.cpp |57.2%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm16/lib/Passes/libllvm16-lib-Passes.a |57.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/ymq/base/run_query.cpp |57.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/engines/writer/write_controller.cpp |57.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/tiering/tier/s3_uri.cpp |57.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/base/tablet_killer.cpp |57.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/persqueue/pqtablet/partition/read_quoter.cpp |57.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/library/slide_limiter/service/service.cpp |57.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/vdisk/anubis_osiris/blobstorage_anubis_algo.cpp |57.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/services/ydb/ydb_logstore.cpp |57.2%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm16/lib/Transforms/Vectorize/liblib-Transforms-Vectorize.a |57.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/columnshard__scan.cpp |57.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/base/board_lookup.cpp |57.2%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm16/lib/IRReader/libllvm16-lib-IRReader.a |57.2%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kqp/query_compiler/libcore-kqp-query_compiler.a |57.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/plan2svg/libydb-library-plan2svg.a |57.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/services/ext_index/metadata/object.cpp |57.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/protos/config.grpc.pb.cc |57.2%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/pcre/libcontrib-libs-pcre.a |57.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/base/statestorage_monitoring.cpp |57.2%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/openldap/libraries/liblber/libopenldap-libraries-liblber.a |57.3%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm16/lib/Transforms/InstCombine/liblib-Transforms-InstCombine.a |57.3%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/aws-sdk-cpp/aws-cpp-sdk-s3/liblibs-aws-sdk-cpp-aws-cpp-sdk-s3.a |57.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/persqueue/pqtablet/partition/partition_blob_encoder.cpp |57.3%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/openldap/libcontrib-libs-openldap.a |57.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/jaeger_tracing/sampling_throttling_configurator.cpp |57.3%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/protobuf/builtin_proto/protos_from_protobuf/libpy3protobuf-builtin_proto-protos_from_protobuf.global.a |57.3%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/poco/Crypto/liblibs-poco-Crypto.a |57.3%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/pcre/pcre32/liblibs-pcre-pcre32.a |57.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/transactions/tx_controller.cpp |57.3%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/protobuf/builtin_proto/protos_from_protoc/libpy3protobuf-builtin_proto-protos_from_protoc.global.a |57.3%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/poco/NetSSL_OpenSSL/liblibs-poco-NetSSL_OpenSSL.a |57.3%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm16/lib/IR/libllvm16-lib-IR.a |57.3%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/yaml/libcontrib-libs-yaml.a |57.3%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/PyHamcrest/py3/libpy3python-PyHamcrest-py3.global.a |57.3%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/poco/JSON/liblibs-poco-JSON.a |57.3%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/aiohttp/libpy3contrib-python-aiohttp.global.a |57.3%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/PyYAML/py3/libpy3python-PyYAML-py3.a |57.3%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/PyYAML/py3/libpy3python-PyYAML-py3.global.a |57.3%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/poco/XML/liblibs-poco-XML.a |57.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/protos/blobstorage_distributed_config.pb.cc |57.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/services/ext_index/metadata/initializer.cpp |57.3%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm16/lib/Transforms/Utils/liblib-Transforms-Utils.a |57.3%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/attrs/py3/libpy3python-attrs-py3.global.a |57.3%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/frozenlist/libpy3contrib-python-frozenlist.global.a |57.3%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/charset-normalizer/libpy3contrib-python-charset-normalizer.global.a |57.3%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/frozenlist/libpy3contrib-python-frozenlist.a |57.3%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/aiohttp/libpy3contrib-python-aiohttp.a |57.3%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/poco/Net/liblibs-poco-Net.a |57.3%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/idna/py3/libpy3python-idna-py3.global.a |57.3%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm16/lib/Transforms/IPO/liblib-Transforms-IPO.a |57.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/naming_conventions/libydb-library-naming_conventions.a |57.4%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/cryptography/py3/libpy3python-cryptography-py3.a |57.3%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/future/py3/libpy3python-future-py3.global.a |57.3%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/grpcio/py3/libpy3python-grpcio-py3.global.a |57.3%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/jaraco.text/libpy3contrib-python-jaraco.text.global.a |57.3%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/codecs/greedy_dict/libcpp-codecs-greedy_dict.a |57.3%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yql/providers/yt/gateway/qplayer/libyt-gateway-qplayer.a |57.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/protos/libapi-protos.a |57.3%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/jaraco.collections/libpy3contrib-python-jaraco.collections.global.a |57.3%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/multidict/libpy3contrib-python-multidict.global.a |57.3%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/protobuf/py3/libpy3python-protobuf-py3.global.a |57.4%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/codecs/liblibrary-cpp-codecs.a |57.4%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/cffi/py3/libpy3python-cffi-py3.global.a |57.4%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/multidict/libpy3contrib-python-multidict.a |57.4%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/pluggy/py3/libpy3python-pluggy-py3.global.a |57.4%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/protobuf/py3/libpy3python-protobuf-py3.a |57.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/datashard/libcore-tx-datashard.global.a |57.4%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/platformdirs/libpy3contrib-python-platformdirs.global.a |57.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/protos/kqp.grpc.pb.cc |57.3%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/wheel/libpy3contrib-python-wheel.global.a |57.4%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/typeguard/libpy3contrib-python-typeguard.global.a |57.4%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/requests/py3/libpy3python-requests-py3.global.a |57.4%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/jsonpath/rewrapper/proto/libjsonpath-rewrapper-proto.a |57.4%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm16/lib/Transforms/Instrumentation/liblib-Transforms-Instrumentation.a |57.4%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/urllib3/py3/libpy3python-urllib3-py3.global.a |57.4%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/typing-extensions/py3/libpy3python-typing-extensions-py3.global.a |57.4%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/yarl/libpy3contrib-python-yarl.global.a |57.4%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/importlib-resources/libpy3contrib-python-importlib-resources.global.a |57.4%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/boost/context/impl_common/libboost-context-impl_common.a |57.4%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/containers/concurrent_hash/libcpp-containers-concurrent_hash.a |57.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/protos/out/out.cpp |57.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/engines/storage/indexes/portions/meta.cpp |57.4%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/pytest/py3/libpy3python-pytest-py3.global.a |57.4%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/tools/python3/lib2/py/libpy3python3-lib2-py.global.a |57.4%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm16/lib/BinaryFormat/libllvm16-lib-BinaryFormat.a |57.3%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/ydb/py3/libpy3python-ydb-py3.global.a |57.4%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/boost/coroutine/librestricted-boost-coroutine.a |57.4%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/llhttp/libcontrib-restricted-llhttp.a |57.4%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/setuptools/py3/libpy3python-setuptools-py3.global.a |57.4%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/dragonbox/libdragonbox.a |57.4%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/cityhash-1.0.2/libcontrib-restricted-cityhash-1.0.2.a |57.4%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/columnshard/engines/storage/indexes/categories_bloom/libstorage-indexes-categories_bloom.global.a |57.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/ydb_issue/proto/libpy3library-ydb_issue-proto.global.a |57.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/engines/storage/actualizer/tiering/counters.cpp |57.4%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/bit_io/liblibrary-cpp-bit_io.a |57.4%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/deprecated/http-parser/libcontrib-deprecated-http-parser.a |57.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/util/aws.cpp |57.4%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm16/lib/Analysis/libllvm16-lib-Analysis.a |57.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/engines/writer/compacted_blob_constructor.cpp |57.4%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/farmhash/arch/sse41/libfarmhash-arch-sse41.a |57.4%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/types/uuid/libessentials-types-uuid.a |57.5%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/persqueue/public/libcore-persqueue-public.a |57.3%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/json/yson/libcpp-json-yson.a |57.3%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/deprecated/kmp/libcpp-deprecated-kmp.a |57.3%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/cron_expression/liblibrary-cpp-cron_expression.a |57.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/services/ext_index/metadata/snapshot.cpp |57.4%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/json/easy_parse/libcpp-json-easy_parse.a |57.4%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/monlib/messagebus/libcpp-monlib-messagebus.a |57.4%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/monlib/encode/legacy_protobuf/protos/libencode-legacy_protobuf-protos.a |57.4%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/messagebus/oldmodule/libcpp-messagebus-oldmodule.a |57.4%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/messagebus/www/libcpp-messagebus-www.global.a |57.4%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/monlib/deprecated/json/libmonlib-deprecated-json.a |57.4%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/libxml/libcontrib-libs-libxml.a |57.4%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/messagebus/monitoring/libcpp-messagebus-monitoring.a |57.4%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/protobuf/interop/libcpp-protobuf-interop.a |57.5%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/sighandler/liblibrary-cpp-sighandler.a |57.5%| [AR] {BAZEL_DOWNLOAD} $(B)/util/charset/libutil-charset.a |57.4%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/openssl/crypto/libcpp-openssl-crypto.a |57.4%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/openssl/big_integer/libcpp-openssl-big_integer.a |57.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/persqueue/pqtablet/readproxy/readproxy.cpp |57.4%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kqp/rm_service/libcore-kqp-rm_service.a |57.4%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm16/lib/ExecutionEngine/libllvm16-lib-ExecutionEngine.a |57.4%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/library/yaml_config/libydb-library-yaml_config.a |57.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/persqueue/public/config.cpp |57.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/persqueue/public/pq_database.cpp |57.4%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/regex/pcre/libcpp-regex-pcre.a |57.4%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/threading/blocking_queue/libcpp-threading-blocking_queue.a |57.4%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/string_utils/indent_text/libcpp-string_utils-indent_text.a |57.5%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/scheme/liblibrary-cpp-scheme.a |57.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/statistics/aggregator/schema.cpp |57.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/protos/blobstorage_vdisk_internal.pb.cc |57.5%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/messagebus/liblibrary-cpp-messagebus.a |57.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/apps/version/version_definition.cpp |57.5%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/threading/atomic/libcpp-threading-atomic.a |57.5%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/tools/python3/Lib/libpy3tools-python3-Lib.global.a |57.5%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/columnshard/engines/reader/simple_reader/iterator/collections/libsimple_reader-iterator-collections.a |57.5%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/string_utils/ztstrbuf/libcpp-string_utils-ztstrbuf.a |57.5%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/string_utils/csv/libcpp-string_utils-csv.a |57.4%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/skiff/liblibrary-cpp-skiff.a |57.4%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/blobstorage/pdisk/mock/libblobstorage-pdisk-mock.a |57.4%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/threading/skip_list/libcpp-threading-skip_list.a |57.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/common/path_id.cpp |57.4%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/timezone_conversion/liblibrary-cpp-timezone_conversion.a |57.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/load_test/ycsb/info_collector.cpp |57.4%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/unified_agent_client/liblibrary-cpp-unified_agent_client.global.a |57.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/base/statestorage_guardian.cpp |57.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/finalize_script_service/kqp_finalize_script_actor.cpp |57.4%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/type_info/liblibrary-cpp-type_info.a |57.4%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/xml/init/libcpp-xml-init.a |57.4%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/yt/cpu_clock/libcpp-yt-cpu_clock.a |57.5%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/base64/neon64/liblibs-base64-neon64.a |57.5%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm16/lib/Transforms/Scalar/liblib-Transforms-Scalar.a |57.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/columnshard__write_index.cpp |57.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/load_test/config_examples.cpp |57.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/columnshard__statistics.cpp |57.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/protos/console.pb.cc |57.6%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/yt/error/libcpp-yt-error.a |57.6%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/ytalloc/api/libcpp-ytalloc-api.a |57.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/protos/tx_datashard.grpc.pb.cc |57.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/protos/blobstorage_vdisk_internal.grpc.pb.cc |57.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/transactions/locks_db.cpp |57.5%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/statistics/aggregator/libcore-statistics-aggregator.a |57.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/finalize_script_service/kqp_check_script_lease_actor.cpp |57.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/columnshard__propose_cancel.cpp |57.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/persqueue/pqtablet/partition/partition_init.cpp |57.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/services/ext_index/metadata/manager.cpp |57.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/base/statestorage_event_filter.cpp |57.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/columnshard__write.cpp |57.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/ut_blobstorage/lib/node_warden_mock_bsc.cpp |57.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/engines/storage/indexes/bloom_ngramm/const.cpp |57.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/services/ext_index/metadata/fetcher.cpp |57.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/tx_proxy/upload_rows_counters.h_serialized.cpp |57.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/engines/writer/indexed_blob_constructor.cpp |57.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/write_actor.cpp |57.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/columnshard/columnshard.h_serialized.cpp |57.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/protos/blobstorage_distributed_config.grpc.pb.cc |57.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/ymq/base/queue_attributes.cpp |57.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/blob_cache.cpp |57.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/engines/storage/indexes/count_min_sketch/meta.cpp |57.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/fq/libs/checkpoint_storage/ydb_checkpoint_storage.cpp |57.5%| [AR] {BAZEL_DOWNLOAD} $(B)/library/python/cores/libpy3library-python-cores.global.a |57.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/engines/storage/actualizer/tiering/tiering.cpp |57.5%| [AR] {BAZEL_DOWNLOAD} $(B)/library/python/fs/libpy3library-python-fs.global.a |57.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/columnshard_private_events.cpp |57.5%| [AR] {BAZEL_DOWNLOAD} $(B)/library/python/resource/libpy3library-python-resource.global.a |57.5%| [AR] {BAZEL_DOWNLOAD} $(B)/library/python/runtime_py3/libpy3library-python-runtime_py3.a |57.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/query_data/kqp_prepared_query.cpp |57.5%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/log_backend/libydb-core-log_backend.a |57.5%| [AR] {BAZEL_DOWNLOAD} $(B)/library/python/runtime_py3/libpy3library-python-runtime_py3.global.a |57.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/protos/serverless_proxy_config.grpc.pb.cc |57.5%| [AR] {BAZEL_DOWNLOAD} $(B)/library/python/strings/libpy3library-python-strings.a |57.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/persqueue/pqtablet/partition/ownerinfo.cpp |57.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/persqueue/pqtablet/partition/partition.cpp |57.6%| [AR] {BAZEL_DOWNLOAD} $(B)/library/python/svn_version/libpy3library-python-svn_version.global.a |57.6%| [AR] {BAZEL_DOWNLOAD} $(B)/library/python/symbols/python/libpy3cpython-symbols-python.global.a |57.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/protos/msgbus.pb.cc |57.6%| [AR] {BAZEL_DOWNLOAD} $(B)/library/python/testing/yatest_common/libpy3python-testing-yatest_common.global.a |57.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/log_backend/json_envelope.cpp |57.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/persqueue/public/write_id.cpp |57.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/protos/serverless_proxy_config.pb.cc |57.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/columnshard__notify_tx_completion.cpp |57.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/tables_manager.cpp |57.3%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/load_test/percentile.h_serialized.cpp |57.3%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/blob_depot/agent/libcore-blob_depot-agent.a |57.3%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/actorlib_impl/libydb-core-actorlib_impl.a |57.5%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/backup/impl/libcore-backup-impl.a |57.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/conveyor_composite/usage/common.cpp |57.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/signer/libfq-libs-signer.a |57.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/tx_proxy/upload_rows_counters.cpp |57.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/protos/grpc.grpc.pb.cc |57.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/protos/grpc.pb.cc |57.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/engines/storage/indexes/count_min_sketch/constructor.cpp |57.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/defs.cpp |57.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/ymq/base/secure_protobuf_printer.cpp |57.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/engines/storage/actualizer/scheme/scheme.cpp |57.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/columnshard_subdomain_path_id.cpp |57.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/tx_proxy/resolvereq.cpp |57.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/vdisk/anubis_osiris/blobstorage_anubis_osiris.cpp |57.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/columnshard__overload.cpp |57.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/background_controller.cpp |57.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/columnshard.cpp |57.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/load_test/archive.cpp |57.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/conveyor_composite/usage/service.cpp |57.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/lib/experimental/libpublic-lib-experimental.a |57.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/protos/msgbus.grpc.pb.cc |57.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/tx.cpp |57.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/query_data/kqp_predictor.cpp |57.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/olap/indexes/update.cpp |57.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/util/memory_tracker.cpp |57.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/actorlib_impl/actor_tracker.cpp |57.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/vdisk/anubis_osiris/blobstorage_anubisfinder.cpp |57.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/columnshard_view.cpp |57.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/protos/console_config.pb.cc |57.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/tx_proxy/upload_columns.cpp |57.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/overload_manager/overload_manager_actor.cpp |57.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/columnshard__progress_tx.cpp |57.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/protos/console_config.grpc.pb.cc |57.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/load_test/aggregated_result.cpp |57.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/actorlib_impl/long_timer.cpp |57.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/conveyor_composite/usage/config.cpp |57.3%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/viewer/libydb-core-viewer.a |57.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/actorlib_impl/name_service_client_protocol.cpp |57.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/actorlib_impl/read_http_reply_protocol.cpp |57.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/workload/abstract/liblibrary-workload-abstract.a |57.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/tx_proxy/proxy.cpp |57.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/locks/locks.cpp |57.5%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/sys_view/storage/libcore-sys_view-storage.a |57.5%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/blobstorage/vdisk/hulldb/base/libvdisk-hulldb-base.a |57.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/scheme/protos/libpy3core-scheme-protos.global.a |57.5%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/blobstorage/vdisk/hulldb/bulksst_add/libvdisk-hulldb-bulksst_add.a |57.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/util/text.cpp |57.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/vdisk/anubis_osiris/blobstorage_anubis.cpp |57.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/conveyor_composite/usage/events.cpp |57.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/tx_proxy/snapshotreq.cpp |57.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/inflight_request_tracker.cpp |57.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/viewer/wb_aggregate.cpp |57.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/viewer/wb_merge.cpp |57.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/viewer/wb_filter.cpp |57.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/overload_manager/overload_subscribers.cpp |57.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/tx_proxy/upload_rows_common_impl.cpp |57.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/public_http/http_service.cpp |57.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/vdisk/anubis_osiris/blobstorage_anubisrunner.cpp |57.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/overload_manager/overload_manager_service.cpp |57.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/engines/storage/indexes/portions/constructor.cpp |57.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/vdisk/anubis_osiris/blobstorage_osiris.cpp |57.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/vdisk/hulldb/base/blobstorage_blob.cpp |57.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/transactions/operators/ev_write/primary.cpp |57.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/columnshard_impl.cpp |57.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/statistics/aggregator/aggregator_impl.cpp |57.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/tiering/tier/object.cpp |57.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/columnshard_schema.cpp |57.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/tx_proxy/describe.cpp |57.5%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/blobstorage/vdisk/hulldb/compstrat/libvdisk-hulldb-compstrat.a |57.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/transactions/operators/ev_write/simple.cpp |57.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/olap/indexes/schema.cpp |57.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/engines/reader/simple_reader/iterator/sys_view/optimizer/schema.cpp |57.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/load_test/interconnect_load.cpp |57.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/public_http/http_req.cpp |57.5%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/blobstorage/vdisk/hulldb/recovery/libvdisk-hulldb-recovery.a |57.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/load_test/kqp.cpp |57.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/ut_blobstorage/lib/node_warden_mock_pipe.cpp |57.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/backup/impl/table_writer.cpp |57.5%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/blobstorage/vdisk/hullop/libblobstorage-vdisk-hullop.a |57.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/program/resolver.cpp |57.6%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/blobstorage/vdisk/hulldb/generic/libvdisk-hulldb-generic.a |57.6%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/blobstorage/vdisk/hulldb/fresh/libvdisk-hulldb-fresh.a |57.4%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/blobstorage/vdisk/localrecovery/libblobstorage-vdisk-localrecovery.a |57.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/library/yaml_config/yaml_config.cpp |57.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/blobstorage/vdisk/libcore-blobstorage-vdisk.a |57.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/transactions/transactions/tx_finish_async.cpp |57.5%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/blobstorage/vdisk/hullop/hullcompdelete/libvdisk-hullop-hullcompdelete.a |57.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/tx_proxy/read_table_impl.cpp |57.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/columnshard__plan_step.cpp |57.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/tx_proxy/upload_rows.cpp |57.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/tx_proxy/schemereq.cpp |57.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/vdisk/hulldb/generic/blobstorage_hulloptlsn.cpp |57.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/vdisk/anubis_osiris/blobstorage_anubisproxy.cpp |57.6%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/blobstorage/vdisk/metadata/libblobstorage-vdisk-metadata.a |57.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/tx_proxy/datareq.cpp |57.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/library/yaml_config/serialize_deserialize.cpp |57.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/query_compiler/kqp_mkql_compiler.cpp |57.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/load_test/ycsb/kqp_upsert.cpp |57.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/tx_proxy/rpc_long_tx.cpp |57.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/tx_proxy/commitreq.cpp |57.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/persqueue/public/utils.cpp |57.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/transactions/operators/ev_write/secondary.cpp |57.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/query_compiler/kqp_olap_compiler.cpp |57.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/library/yaml_config/yaml_config_parser.cpp |57.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/pdisk/mock/pdisk_mock.cpp |57.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/columnshard__init.cpp |57.2%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/blobstorage/vdisk/repl/libblobstorage-vdisk-repl.a |57.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/transactions/transactions/tx_add_sharding_info.cpp |57.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/library/yaml_config/console_dumper.cpp |57.3%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/core/url_preprocessing/interface/libcore-url_preprocessing-interface.a |57.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/engines/reader/simple_reader/iterator/collections/not_sorted.cpp |57.3%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/blobstorage/vdisk/scrub/libblobstorage-vdisk-scrub.a |57.3%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/blobstorage/vdisk/synclog/libblobstorage-vdisk-synclog.a |57.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/columnshard__propose_transaction.cpp |57.3%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/blobstorage/vdisk/skeleton/libblobstorage-vdisk-skeleton.a |57.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/rm_service/kqp_rm_service.cpp |57.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/util/ulid.cpp |57.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/olap/operations/alter/common/object.cpp |57.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/statistics/aggregator/tx_ack_timeout.cpp |57.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/rm_service/kqp_resource_estimation.cpp |57.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/columnshard/columnshard_impl.h_serialized.cpp |57.6%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm16/lib/Support/libllvm16-lib-Support.a |57.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/persqueue/public/pq_rl_helpers.cpp |57.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/load_test/ycsb/bulk_mkql_upsert.cpp |57.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/ut_blobstorage/lib/debug_log.cpp |57.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/load_test/ycsb/test_load_actor.cpp |57.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/rm_service/kqp_resource_info_exchanger.cpp |57.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/olap/operations/alter/in_store/common/update.cpp |57.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/vdisk/synclog/blobstorage_synclogmsgimpl.cpp |57.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/load_test/group_write.cpp |57.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/load_test/pdisk_read.cpp |57.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/statistics/aggregator/aggregator.cpp |57.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/statistics/aggregator/tx_schedule_traversal.cpp |57.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/statistics/aggregator/tx_response_tablet_distribution.cpp |57.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/engines/reader/simple_reader/iterator/collections/full_scan_sorted.cpp |57.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/load_test/ycsb/kqp_select.cpp |57.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/statistics/aggregator/tx_analyze_table_response.cpp |57.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/vdisk/synclog/blobstorage_synclogmem.cpp |57.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/engines/reader/simple_reader/iterator/collections/limit_sorted.cpp |57.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/olap/operations/alter/standalone/update.cpp |57.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/vdisk/synclog/blobstorage_synclogformat.cpp |57.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/statistics/aggregator/tx_navigate.cpp |57.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/engines/storage/indexes/categories_bloom/meta.cpp |57.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/load_test/pdisk_write.cpp |57.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/load_test/ycsb/common.cpp |57.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/olap/operations/alter/common/update.cpp |57.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/log_backend/log_backend.cpp |57.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/vdisk/synclog/blobstorage_synclogmsgwriter.cpp |57.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/vdisk/synclog/blobstorage_synclogmsgreader.cpp |57.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/engines/storage/indexes/categories_bloom/constructor.cpp |57.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/vdisk/synclog/blobstorage_synclogneighbors.cpp |57.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/olap/operations/alter/standalone/object.cpp |57.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/load_test/ycsb/test_load_read_iterator.cpp |57.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/library/yaml_config/yaml_config_helpers.cpp |57.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/statistics/aggregator/tx_analyze_table_request.cpp |57.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/statistics/aggregator/tx_analyze_table_delivery_problem.cpp |57.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/log_backend/log_backend_build.cpp |57.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/statistics/aggregator/tx_analyze_deadline.cpp |57.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/statistics/aggregator/tx_finish_trasersal.cpp |57.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/statistics/aggregator/tx_analyze.cpp |57.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/statistics/aggregator/tx_aggr_stat_response.cpp |57.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/statistics/aggregator/tx_datashard_scan_response.cpp |57.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/load_test/keyvalue_write.cpp |57.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blob_depot/agent/storage_block.cpp |57.6%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/client/scheme_cache_lib/libcore-client-scheme_cache_lib.a |57.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/load_test/vdisk_write.cpp |57.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/change_exchange/libydb-core-change_exchange.a |57.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/blobstorage/common/libcore-blobstorage-common.a |57.7%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/jsonpath/rewrapper/re2/libjsonpath-rewrapper-re2.global.a |57.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/statistics/aggregator/tx_init.cpp |57.3%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/cms/console/validators/libcms-console-validators.a |57.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blob_depot/agent/agent.cpp |57.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/rm_service/kqp_snapshot_manager.cpp |57.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blob_depot/agent/resolved_value.cpp |57.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/load_test/service_actor.cpp |57.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/docapi/libydb-core-docapi.a |57.4%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/config/init/libcore-config-init.a |57.4%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/discovery/libydb-core-discovery.a |57.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/statistics/aggregator/tx_resolve.cpp |57.4%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/control/libydb-core-control.a |57.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/engines/reader/simple_reader/iterator/collections/constructors.cpp |57.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blob_depot/agent/blocks.cpp |57.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blob_depot/agent/channel_kind.cpp |57.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blob_depot/agent/storage_put.cpp |57.5%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/cms/console/libcore-cms-console.a |57.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/actorlib_impl/send_data_protocol.cpp |57.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blob_depot/agent/read.cpp |57.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/query_compiler/kqp_query_compiler.cpp |57.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/actorlib_impl/connect_socket_protocol.cpp |57.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/external_sources/object_storage/libcore-external_sources-object_storage.a |57.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/external_sources/object_storage/inference/libexternal_sources-object_storage-inference.a |57.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/actorlib_impl/read_data_protocol.cpp |57.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/formats/arrow/accessor/abstract/libarrow-accessor-abstract.a |57.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blob_depot/agent/storage_range.cpp |57.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/engine/libydb-core-engine.a |57.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/statistics/aggregator/tx_init_schema.cpp |57.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blob_depot/agent/metrics.cpp |57.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blob_depot/agent/s3.cpp |57.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blob_depot/agent/blob_mapping_cache.cpp |57.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/tx_proxy/proxy_impl.cpp |57.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blob_depot/agent/storage_discover.cpp |57.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blob_depot/agent/proxy.cpp |57.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/load_test/pdisk_log.cpp |57.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/load_test/memory.cpp |57.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blob_depot/agent/storage_patch.cpp |57.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/engines/reader/simple_reader/iterator/collections/abstract.cpp |57.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/sys_view/storage/storage_pools.cpp |57.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blob_depot/agent/request.cpp |57.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blob_depot/agent/comm.cpp |57.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blob_depot/agent/storage_status.cpp |57.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/vdisk/hulldb/base/blobstorage_hullsatisfactionrank.cpp |57.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/vdisk/hullop/blobstorage_hulllog.cpp |57.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/statistics/aggregator/tx_configure.cpp |57.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/sys_view/storage/vslots.cpp |57.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blob_depot/agent/status.cpp |57.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blob_depot/agent/storage_collect_garbage.cpp |57.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/backup/impl/local_partition_reader.cpp |57.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/vdisk/hulldb/base/blobstorage_hulldefs.cpp |57.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blob_depot/agent/storage_check_integrity.cpp |57.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/vdisk/hullop/hullop_entryserialize.cpp |57.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/sys_view/storage/pdisks.cpp |57.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blob_depot/agent/query.cpp |57.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/vdisk/hulldb/compstrat/hulldb_compstrat_defs.cpp |57.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/node_service/kqp_node_service.cpp |57.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/viewer/json_handlers_pq.cpp |57.6%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm16/lib/Target/X86/liblib-Target-X86.a |57.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/vdisk/localrecovery/localrecovery_logreplay.cpp |57.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/vdisk/hulldb/generic/hulldb_bulksstmngr.cpp |57.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/vdisk/hulldb/fresh/fresh_segment.cpp |57.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/sys_view/storage/storage_stats.cpp |57.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/vdisk/hulldb/bulksst_add/hulldb_bulksst_add.cpp |57.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/vdisk/hullop/hullop_compactfreshappendix.cpp |57.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/cms/console/console_audit.cpp |57.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blob_depot/agent/garbage.cpp |57.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/vdisk/hulldb/generic/hullds_idx.cpp |57.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blob_depot/agent/storage_get.cpp |57.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/vdisk/hullop/blobstorage_hull.cpp |57.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/viewer/json_handlers_pdisk.cpp |57.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/vdisk/metadata/metadata_actor.cpp |57.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/cms/console/grpc_library_helper.cpp |57.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blob_depot/agent/storage_get_block.cpp |57.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/cms/console/tx_processor.cpp |57.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/vdisk/hullop/hullcompdelete/blobstorage_hullcompdelete.cpp |57.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/vdisk/hulldb/recovery/hulldb_recovery.cpp |57.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/vdisk/hulldb/generic/hullds_sstvec.cpp |57.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/viewer/json_handlers.cpp |57.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/vdisk/hulldb/fresh/fresh_appendix.cpp |57.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/formats/arrow/accessor/sparsed/libarrow-accessor-sparsed.global.a |57.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/s3/expr_nodes/libproviders-s3-expr_nodes.a |57.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/vdisk/localrecovery/localrecovery_readbulksst.cpp |57.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/formats/arrow/accessor/composite/liblibrary-formats-arrow-accessor-composite.a |57.4%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/client/server/libcore-client-server.a |57.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/formats/arrow/accessor/plain/libarrow-accessor-plain.a |57.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/formats/arrow/accessor/plain/libarrow-accessor-plain.global.a |57.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/actors/log_backend/liblibrary-actors-log_backend.a |57.6%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/cms/console/util/libcms-console-util.a |57.6%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kqp/common/compilation/libkqp-common-compilation.a |57.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/formats/arrow/accessor/sub_columns/libarrow-accessor-sub_columns.global.a |57.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/formats/arrow/accessor/composite_serial/libarrow-accessor-composite_serial.a |57.6%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/blobstorage/vdisk/huge/libblobstorage-vdisk-huge.a |57.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/formats/arrow/accessor/dictionary/libarrow-accessor-dictionary.a |57.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/vdisk/hulldb/base/hullbase_barrier.cpp |57.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/client/server/http_ping.cpp |57.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/statistics/aggregator/tx_schemeshard_stats.cpp |57.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/vdisk/hulldb/generic/hullds_sst.cpp |57.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/viewer/json_handlers_operation.cpp |57.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/viewer/json_handlers_scheme.cpp |57.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/client/server/msgbus_server_configdummy.cpp |57.5%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/antlr4_cpp_runtime/libcontrib-libs-antlr4_cpp_runtime.a |57.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/vdisk/hullop/blobstorage_hulllogcutternotify.cpp |57.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/vdisk/synclog/blobstorage_synclogdsk.cpp |57.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/client/server/ic_nodes_cache_service.cpp |57.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/vdisk/repl/blobstorage_hullrepljob.cpp |57.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/vdisk/localrecovery/localrecovery_defs.cpp |57.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/vdisk/hulldb/compstrat/hulldb_compstrat_selector.cpp |57.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/vdisk/scrub/scrub_actor_unreadable.cpp |57.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/vdisk/scrub/scrub_actor_snapshot.cpp |57.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/vdisk/scrub/blob_recovery.cpp |57.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/vdisk/skeleton/skeleton_overload_handler.cpp |57.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/vdisk/hulldb/fresh/fresh_data.cpp |57.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/vdisk/repl/blobstorage_replmonhandler.cpp |57.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/vdisk/repl/blobstorage_replbroker.cpp |57.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/vdisk/scrub/scrub_actor_sst.cpp |57.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/vdisk/scrub/scrub_actor_mon.cpp |57.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/vdisk/hullop/blobstorage_hullactor.cpp |57.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/viewer/viewer.cpp |57.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/viewer/viewer_topic_data.cpp |57.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/vdisk/synclog/blobstorage_synclogdata.cpp |57.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/vdisk/hulldb/generic/hullds_idxsnap.cpp |57.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/viewer/json_handlers_browse.cpp |57.6%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kqp/executer_actor/libcore-kqp-executer_actor.a |57.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/vdisk/synclog/blobstorage_synclog.cpp |57.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/vdisk/repl/blobstorage_replproxy.cpp |57.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/vdisk/skeleton/skeleton_oos_tracker.cpp |57.6%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/sql/v1/complete/syntax/libv1-complete-syntax.a |57.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/cms/console/validators/validator.cpp |57.6%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/fmt/libcontrib-libs-fmt.a |57.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/vdisk/hulldb/generic/hullds_sstslice.cpp |57.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/vdisk/synclog/blobstorage_synclogkeeper_state.cpp |57.6%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/dom/libessentials-minikql-dom.a |57.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/vdisk/scrub/scrub_actor_huge.cpp |57.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/vdisk/hulldb/fresh/fresh_datasnap.cpp |57.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/vdisk/synclog/blobstorage_synclogreader.cpp |57.6%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/googletest/googletest/librestricted-googletest-googletest.a |57.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/sys_view/storage/groups.cpp |57.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/vdisk/skeleton/skeleton_loggedrec.cpp |57.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/vdisk/scrub/scrub_actor_pdisk.cpp |57.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/viewer/json_wb_req.cpp |57.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/vdisk/synclog/blobstorage_synclog_private_events.cpp |57.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/vdisk/skeleton/skeleton_vmovedpatch_actor.cpp |57.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/vdisk/scrub/restore_corrupted_blob_actor.cpp |57.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/viewer/json_pipe_req.cpp |57.7%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/engine/minikql/libcore-engine-minikql.a |57.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/viewer/json_handlers_query.cpp |57.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/vdisk/synclog/blobstorage_synclogkeeper.cpp |57.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/vdisk/skeleton/skeleton_vmultiput_actor.cpp |57.7%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/library/yql/providers/generic/connector/libcpp/ut_helpers/libconnector-libcpp-ut_helpers.a |57.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/vdisk/scrub/scrub_actor.cpp |57.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/checkpoint_storage/events/liblibs-checkpoint_storage-events.a |57.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/formats/arrow/splitter/libformats-arrow-splitter.a |57.6%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/utils/backtrace/libessentials-utils-backtrace.a |57.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/providers/generic/connector/libcpp/ut_helpers/database_resolver_mock.cpp |57.6%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/utils/fetch/libessentials-utils-fetch.a |57.6%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/fq/libs/checkpointing/libfq-libs-checkpointing.a |57.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/engine/minikql/minikql_engine_host.cpp |57.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/providers/generic/connector/libcpp/ut_helpers/defaults.cpp |57.7%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/columnshard/engines/scheme/tiering/libengines-scheme-tiering.a |57.7%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/sql/v1/reflect/libsql-v1-reflect.a |57.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/config/protos/libpy3libs-config-protos.global.a |57.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/metrics/libfq-libs-metrics.a |57.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/common/libfq-libs-common.a |57.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/fq/libs/checkpointing/checkpoint_id_generator.cpp |57.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/vdisk/skeleton/blobstorage_skeletonfront.cpp |57.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/vdisk/skeleton/skeleton_block_and_get.cpp |57.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/columnshard/engines/scheme/tiering/common.cpp |57.7%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/blobstorage/vdisk/hulldb/barriers/libvdisk-hulldb-barriers.a |57.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/fq/libs/checkpointing/pending_checkpoint.cpp |57.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/s3/credentials/libproviders-s3-credentials.a |57.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/row_dispatcher/events/liblibs-row_dispatcher-events.a |57.7%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/sql/v1/reflect/libsql-v1-reflect.global.a |57.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/events/libfq-libs-events.a |57.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/cms/console/validators/registry.cpp |57.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/viewer/json_handlers_vdisk.cpp |57.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/vdisk/localrecovery/localrecovery_public.cpp |57.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/vdisk/skeleton/skeleton_vpatch_actor.cpp |57.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/vdisk/skeleton/blobstorage_syncfull.cpp |57.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/db_id_async_resolver_impl/libfq-libs-db_id_async_resolver_impl.a |57.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/cms/console/validators/core_validators.cpp |57.8%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/hdr_histogram/libcontrib-libs-hdr_histogram.a |57.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/vdisk/synclog/blobstorage_synclogkeeper_committer.cpp |57.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/lib/ydb_cli/common/ini_config/libini_config.a |57.6%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm16/lib/DebugInfo/Symbolize/liblib-DebugInfo-Symbolize.a |57.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/formats/arrow/hash/libformats-arrow-hash.a |57.6%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/boost/graph/librestricted-boost-graph.a |57.6%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/public/lib/base/libpublic-lib-base.a |57.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/vdisk/scrub/blob_recovery_queue.cpp |57.6%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/health_check/libydb-core-health_check.a |57.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/config/init/dummy.cpp |57.6%| [AR] {BAZEL_DOWNLOAD} $(B)/library/python/symbols/libc/libpython-symbols-libc.global.a |57.6%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/sql/v1/complete/name/service/binding/libname-service-binding.a |57.6%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kqp/compute_actor/libcore-kqp-compute_actor.a |57.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/vdisk/repl/blobstorage_repl.cpp |57.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/io_formats/arrow/scheme/libio_formats-arrow-scheme.a |57.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/cms/console/configs_config.cpp |57.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/viewer/viewer_request.cpp |57.7%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/blob_depot/libydb-core-blob_depot.a |57.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/vdisk/skeleton/blobstorage_syncfullhandler.cpp |57.2%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/sql/v1/highlight/libsql-v1-highlight.a |57.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/formats/arrow/program/libformats-arrow-program.global.a |57.3%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/hyperscan/runtime_avx2/liblibs-hyperscan-runtime_avx2.a |57.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/config/init/init.cpp |57.3%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/messagebus/config/libcpp-messagebus-config.a |57.3%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/hyperscan/runtime_core2/liblibs-hyperscan-runtime_core2.a |57.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/cms/console/validators/validator_nameservice.cpp |57.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/actors/dnsresolver/liblibrary-actors-dnsresolver.a |57.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/vdisk/scrub/blob_recovery_request.cpp |57.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/client/scheme_cache_lib/yql_db_scheme_resolver.cpp |57.4%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/grpc_services/libydb-core-grpc_services.a |57.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/viewer/json_handlers_viewer.cpp |57.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/config/init/init.h_serialized.cpp |57.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/vdisk/skeleton/blobstorage_monactors.cpp |57.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/vdisk/scrub/blob_recovery_process.cpp |57.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/vdisk/skeleton/blobstorage_db.cpp |57.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/vdisk/skeleton/blobstorage_skeleton.cpp |57.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/vdisk/skeleton/skeleton_mon_dbmainpage.cpp |57.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/vdisk/skeleton/skeleton_oos_logic.cpp |57.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/vdisk/skeleton/skeleton_shred.cpp |57.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/discovery/discovery.cpp |57.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/common/compilation/events.cpp |57.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/common/compilation/result.cpp |57.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/vdisk/synclog/blobstorage_syncloghttp.cpp |57.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/cms/console/console__update_subdomain_key.cpp |57.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/cms/console/console__get_yaml_metadata.cpp |57.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/vdisk/skeleton/skeleton_compactionstate.cpp |57.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/grpc_services/grpc_mon.cpp |57.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/executer_actor/kqp_locks_helper.cpp |57.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/cms/console/validators/validator_bootstrap.cpp |57.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/vdisk/synclog/blobstorage_synclogrecovery.cpp |57.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/control/immediate_control_board_actor.cpp |57.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/cms/console/console__update_confirmed_subdomain.cpp |57.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/cms/console/configs_dispatcher_proxy.cpp |57.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/cms/console/console__add_config_subscription.cpp |57.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/cms/console/util.cpp |57.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/cms/console/console.cpp |57.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/cms/console/console__get_yaml_config.cpp |57.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/cms/console/console__drop_yaml_config.cpp |57.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/cms/console/modifications_validator.cpp |57.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/cms/console/console__init_scheme.cpp |57.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/cms/console/config_helpers.cpp |57.7%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm16/lib/CodeGen/SelectionDAG/liblib-CodeGen-SelectionDAG.a |57.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/cms/console/net_classifier_updater.cpp |57.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/cms/console/console__remove_config_subscription.cpp |57.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/cms/console/console__alter_tenant.cpp |57.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/cms/console/console__create_tenant.cpp |57.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/cms/console/configs_cache.cpp |57.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/cms/console/console_configs_provider.cpp |57.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/cms/console/console__replace_config_subscriptions.cpp |57.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/config/init/init_noop.cpp |57.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/cms/console/configs_dispatcher.cpp |57.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/cms/console/console__log_cleanup.cpp |57.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/cms/console/http.cpp |57.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/cms/console/console__cleanup_subscriptions.cpp |57.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/cms/console/console__set_config.cpp |57.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/cms/console/console__remove_computational_units.cpp |57.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/cms/console/console_configs_manager.cpp |57.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/vdisk/huge/blobstorage_hullhuge.cpp |57.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/cms/console/console__update_last_provided_config.cpp |57.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/cms/console/console__remove_tenant_failed.cpp |57.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/cms/console/console_tenants_manager.cpp |57.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/cms/console/console__update_tenant_pool_config.cpp |57.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/cms/console/jaeger_tracing_configurator.cpp |57.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/cms/console/console__configure.cpp |57.7%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/hyperscan/libcontrib-libs-hyperscan.a |57.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/vdisk/huge/blobstorage_hullhugerecovery.cpp |57.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/cms/console/immediate_controls_configurator.cpp |57.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/cms/console/console_configuration_info_collector.cpp |57.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/cms/console/console__replace_yaml_config.cpp |57.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/client/server/msgbus_http_server.cpp |57.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/cms/console/console__remove_tenant_done.cpp |57.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/cms/console/log_settings_configurator.cpp |57.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/cms/console/console__get_log_tail.cpp |57.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/vdisk/huge/blobstorage_hullhugeheap.cpp |57.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/cms/console/feature_flags_configurator.cpp |57.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/cms/console/console_handshake.cpp |57.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/cms/console/console__update_tenant_state.cpp |57.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/vdisk/huge/blobstorage_hullhugedefs.cpp |57.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/cms/console/console_configs_subscriber.cpp |57.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/cms/console/console__toggle_config_validator.cpp |57.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/cms/console/console__load_state.cpp |57.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/cms/console/console__update_pool_state.cpp |57.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/cms/console/console__remove_tenant.cpp |57.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/client/server/msgbus_server_types.cpp |57.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/client/server/msgbus_server_tx_request.cpp |57.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/cms/console/console__revert_pool_state.cpp |57.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/cms/console/logger.cpp |57.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/cms/console/util/config_index.cpp |57.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/client/server/msgbus_server_console.cpp |57.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/cms/console/console__remove_config_subscriptions.cpp |57.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/library/yql/providers/generic/connector/libcpp/ut_helpers/connector_client_mock.cpp |57.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/client/server/msgbus_server.cpp |57.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/vdisk/hulldb/barriers/barriers_chain.cpp |57.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/client/server/grpc_proxy_status.cpp |57.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/client/server/msgbus_server_scheme_initroot.cpp |57.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/client/server/msgbus_server_tablet_state.cpp |57.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/client/server/msgbus_blobstorage_config.cpp |57.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/memory_controller/memory_controller.cpp |57.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/client/server/msgbus_server_tablet_counters.cpp |57.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/vdisk/hulldb/barriers/barriers_tree.cpp |57.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/executer_actor/kqp_planner_strategy.cpp |57.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/executer_actor/kqp_literal_executer.cpp |57.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/client/server/msgbus_server_cms.cpp |57.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/client/server/msgbus_server_proxy.cpp |57.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/client/server/msgbus_server_fill_node.cpp |57.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/client/server/msgbus_server_node_registration.cpp |57.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/client/server/msgbus_server_hive_create_tablet.cpp |57.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/executer_actor/kqp_scan_executer.cpp |57.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/client/server/msgbus_server_drain_node.cpp |57.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/engines/scheme/tiering/tier_info.cpp |57.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/grpc_services/grpc_publisher_service_actor.cpp |57.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blob_depot/given_id_range.cpp |57.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/health_check/health_check.cpp |57.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/client/server/msgbus_server_scheme_request.cpp |57.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/grpc_services/rpc_forget_operation.cpp |57.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/grpc_services/rpc_export.cpp |57.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/engine/minikql/flat_local_tx_factory.cpp |57.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kqp/compute_actor/kqp_compute_state.h_serialized.cpp |57.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/compute_actor/kqp_compute_events.cpp |57.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/fq/libs/checkpointing/checkpoint_coordinator.cpp |57.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/executer_actor/kqp_partition_helper.cpp |57.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/client/server/msgbus_server_persqueue.cpp |57.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/client/server/grpc_server.cpp |57.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/grpc_services/grpc_helper.cpp |57.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/vdisk/hulldb/barriers/barriers_public.cpp |57.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/executer_actor/kqp_executer_impl.cpp |57.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/persqueue/ut/common/pq_ut_common.cpp |57.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/compute_actor/kqp_scan_compute_stat.cpp |57.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/client/server/msgbus_server_pq_metacache.cpp |57.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/executer_actor/kqp_executer_stats.cpp |57.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/grpc_services/ydb_over_fq/keep_alive.cpp |57.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/compute_actor/kqp_pure_compute_actor.cpp |57.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/grpc_services/grpc_request_proxy_simple.cpp |57.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blob_depot/group_metrics_exchange.cpp |57.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/grpc_services/legacy/rpc_legacy.cpp |57.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/grpc_services/grpc_endpoint_publish_actor.cpp |57.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/executer_actor/kqp_scheme_executer.cpp |57.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/grpc_services/query/rpc_fetch_script_results.cpp |57.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/executer_actor/kqp_partitioned_executer.cpp |57.7%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/driver_lib/cli_utils/libcli_utils.a |57.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/compute_actor/kqp_scan_compute_manager.cpp |57.7%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kesus/tablet/libcore-kesus-tablet.a |57.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/client/server/msgbus_server_pq_read_session_info.cpp |57.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/client/server/msgbus_server_resolve_node.cpp |57.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blob_depot/data_uncertain.cpp |57.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/grpc_services/audit_log.cpp |57.4%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/googletest/googlemock/librestricted-googletest-googlemock.a |57.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/io_formats/cell_maker/libcore-io_formats-cell_maker.a |57.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/grpc_services/query/rpc_attach_session.cpp |57.4%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kesus/proxy/libcore-kesus-proxy.a |57.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/lib/value/libpublic-lib-value.a |57.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/client/server/msgbus_server_ic_debug.cpp |57.6%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/comptable/liblibrary-cpp-comptable.a |57.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/compute_actor/kqp_scan_common.cpp |57.6%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/blobstorage/vdisk/defrag/libblobstorage-vdisk-defrag.a |57.6%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kqp/session_actor/libcore-kqp-session_actor.a |57.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/keyvalue/protos/libcore-keyvalue-protos.a |57.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blob_depot/mon_main.cpp |57.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kesus/tablet/tablet.cpp |57.8%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/client/minikql_compile/libcore-client-minikql_compile.a |57.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kesus/tablet/tx_dummy.cpp |57.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kesus/tablet/tx_config_get.cpp |57.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blob_depot/garbage_collection.cpp |57.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kesus/tablet/tablet_impl.cpp |57.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kesus/tablet/schema.cpp |57.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kesus/tablet/events.cpp |57.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kesus/tablet/quoter_runtime.cpp |57.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kesus/tablet/tablet_html.cpp |57.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kesus/tablet/probes.cpp |57.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/executer_actor/kqp_data_executer.cpp |57.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/driver_lib/cli_utils/melancholic_gopher.cpp |57.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kqp/executer_actor/kqp_executer.h_serialized.cpp |57.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/executer_actor/kqp_tasks_validate.cpp |57.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/compute_actor/kqp_compute_state.cpp |57.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/grpc_services/local_rate_limiter.cpp |57.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/grpc_services/rpc_describe_table.cpp |57.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/executer_actor/kqp_tasks_graph.cpp |57.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/executer_actor/kqp_planner.cpp |57.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/grpc_services/rpc_fq.cpp |57.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/vdisk/hulldb/barriers/barriers_essence.cpp |57.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/compute_actor/kqp_compute_actor_helpers.cpp |57.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/client/minikql_compile/compile_result.cpp |57.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/client/server/msgbus_server_test_shard_request.cpp |57.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/client/minikql_compile/db_key_resolver.cpp |57.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/public/lib/base/msgbus.cpp |57.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/client/minikql_compile/compile_context.cpp |57.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/client/server/msgbus_server_pq_metarequest.cpp |57.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/grpc_services/ydb_over_fq/create_session.cpp |57.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/grpc_services/resolve_local_db_table.cpp |57.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/grpc_services/rpc_describe_external_table.cpp |57.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/grpc_services/rpc_alter_table.cpp |57.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/grpc_services/rpc_drop_coordination_node.cpp |57.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/compute_actor/kqp_compute_actor.cpp |57.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/grpc_services/rpc_describe_coordination_node.cpp |57.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/executer_actor/kqp_table_resolver.cpp |57.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/grpc_services/rpc_create_table.cpp |57.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/grpc_services/operation_helpers.cpp |57.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/grpc_services/rpc_cms.cpp |57.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kesus/tablet/tablet_db.cpp |57.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kesus/tablet/tx_sessions_describe.cpp |57.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kesus/tablet/quoter_resource_tree.cpp |57.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/grpc_services/rpc_bridge.cpp |57.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/grpc_services/rpc_alter_coordination_node.cpp |57.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/grpc_services/grpc_request_proxy.cpp |57.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/grpc_services/query/rpc_execute_script.cpp |57.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/grpc_services/rpc_copy_tables.cpp |57.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kesus/tablet/tx_semaphore_update.cpp |57.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/grpc_services/rpc_execute_scheme_query.cpp |57.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kesus/tablet/tx_session_attach.cpp |57.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kesus/tablet/tx_quoter_resource_describe.cpp |57.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kesus/tablet/tx_quoter_resource_delete.cpp |57.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/grpc_services/rpc_describe_external_data_source.cpp |57.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kesus/tablet/tx_init_schema.cpp |57.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kesus/tablet/tx_quoter_resource_add.cpp |57.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kesus/proxy/events.cpp |57.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/grpc_services/query/rpc_execute_query.cpp |57.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kesus/proxy/proxy_actor.cpp |57.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/grpc_services/rpc_keep_alive.cpp |57.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kesus/tablet/tx_semaphore_release.cpp |57.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kesus/tablet/tx_semaphore_acquire.cpp |57.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kesus/tablet/tx_quoter_resource_update.cpp |57.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/grpc_services/rpc_create_coordination_node.cpp |57.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kesus/tablet/tx_semaphore_describe.cpp |57.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kesus/tablet/tx_semaphore_delete.cpp |57.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kesus/tablet/tx_semaphore_create.cpp |57.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kesus/tablet/tx_session_detach.cpp |57.8%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kqp/gateway/behaviour/tablestore/libgateway-behaviour-tablestore.global.a |57.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kesus/tablet/tx_session_destroy.cpp |57.8%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/histogram/hdr/libcpp-histogram-hdr.a |57.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/grpc_services/rpc_cancel_operation.cpp |57.8%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/columnshard/data_sharing/source/session/libdata_sharing-source-session.a |57.8%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/graph/shard/libcore-graph-shard.a |57.8%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yql/providers/yt/lib/config_clusters/libyt-lib-config_clusters.a |57.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/coordinator/protos/libtx-coordinator-protos.a |57.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/grpc_services/rpc_monitoring.cpp |57.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/grpc_services/rpc_calls.cpp |57.8%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/columnshard/engines/reader/simple_reader/constructor/libreader-simple_reader-constructor.a |57.9%| [AR] {BAZEL_DOWNLOAD} $(B)/library/python/reservoir_sampling/libpy3library-python-reservoir_sampling.global.a |57.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/blobs_action/counters/libcolumnshard-blobs_action-counters.a |57.9%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/fq/libs/compute/ydb/control_plane/libcompute-ydb-control_plane.a |57.8%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/services/lib/actors/libservices-lib-actors.a |57.8%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/jsonpath/rewrapper/libminikql-jsonpath-rewrapper.a |57.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/executer_actor/shards_resolver/libkqp-executer_actor-shards_resolver.a |57.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/grpc_services/rpc_kh_snapshots.cpp |57.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/control_plane_config/events/liblibs-control_plane_config-events.a |57.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/grpc_services/rpc_commit_transaction.cpp |57.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/read_rule/libfq-libs-read_rule.a |57.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/grpc_services/rpc_explain_yql_script.cpp |57.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/grpc_services/rpc_dynamic_config.cpp |57.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/grpc_services/rpc_node_registration.cpp |57.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/graph/shard/backends.cpp |57.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/grpc_services/rpc_explain_data_query.cpp |57.9%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/columnshard/engines/scheme/versions/libengines-scheme-versions.a |57.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/fq/libs/compute/ydb/control_plane/compute_databases_cache.cpp |57.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/fq/libs/compute/ydb/control_plane/cms_grpc_client_actor.cpp |57.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/fq/libs/compute/ydb/control_plane/ydbcp_grpc_client_actor.cpp |57.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/fq/libs/compute/ydb/control_plane/database_monitoring.cpp |58.0%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/grpc_services/rpc_kh_describe.cpp |58.0%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/grpc_services/rpc_execute_data_query.cpp |58.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/fq/libs/compute/ydb/control_plane/monitoring_grpc_client_actor.cpp |57.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/grpc_services/rpc_execute_yql_script.cpp |57.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/grpc_services/rpc_ping.cpp |57.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/fq/libs/compute/ydb/control_plane/monitoring_rest_client_actor.cpp |57.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/grpc_services/rpc_kqp_base.cpp |57.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/grpc_services/rpc_copy_table.cpp |57.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/grpc_services/rpc_load_rows.cpp |57.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/grpc_services/rpc_login.cpp |57.6%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/services/metadata/request/libservices-metadata-request.a |57.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/grpc_services/table_settings.cpp |57.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/libydb-core-kqp.global.a |57.7%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/jaraco.context/libpy3contrib-python-jaraco.context.global.a |57.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/grpc_services/rpc_drop_table.cpp |57.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/services/metadata/request/common.cpp |57.7%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/boost/serialization/librestricted-boost-serialization.a |57.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/row_dispatcher/protos/liblibs-row_dispatcher-protos.a |57.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/compute_actor/kqp_compute_actor_factory.cpp |57.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/grpc_services/rpc_config.cpp |57.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/grpc_services/rpc_make_directory.cpp |57.8%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/mind/hive/libcore-mind-hive.a |57.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/grpc_services/rpc_import_data.cpp |57.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/grpc_services/rpc_list_objects_in_s3_export.cpp |57.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/grpc_services/audit_logins.cpp |57.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/grpc_services/rpc_describe_table_options.cpp |57.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/grpc_services/rpc_replication.cpp |57.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/grpc_services/rpc_modify_permissions.cpp |57.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/grpc_services/rpc_begin_transaction.cpp |57.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/grpc_services/rpc_rename_tables.cpp |57.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/grpc_services/rpc_get_scale_recommendation.cpp |57.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/grpc_services/rpc_keyvalue.cpp |57.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/grpc_services/rpc_get_shard_locations.cpp |57.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/grpc_services/ydb_over_fq/describe_table.cpp |57.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/grpc_services/rpc_describe_path.cpp |57.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/grpc_services/rpc_get_operation.cpp |57.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/grpc_services/rpc_fq_internal.cpp |57.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/grpc_services/rpc_whoami.cpp |57.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/grpc_services/rpc_rate_limiter_api.cpp |57.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/grpc_services/rpc_log_store.cpp |57.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/grpc_services/rpc_scheme_base.cpp |57.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blob_depot/data_gc.cpp |57.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/mind/hive/sequencer.cpp |57.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/grpc_services/rpc_remove_directory.cpp |57.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/grpc_services/rpc_maintenance.cpp |57.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/grpc_services/rpc_describe_system_view.cpp |57.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/grpc_services/rpc_object_storage.cpp |57.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blob_depot/data_mon.cpp |58.0%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/grpc_services/rpc_rollback_transaction.cpp |58.0%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/grpc_services/rpc_import.cpp |58.0%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/grpc_services/rpc_discovery.cpp |57.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/grpc_services/audit_dml_operations.cpp |57.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/grpc_services/ydb_over_fq/execute_data_query.cpp |57.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/grpc_services/ydb_over_fq/explain_data_query.cpp |57.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/grpc_services/rpc_prepare_data_query.cpp |57.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/grpc_services/rpc_list_operations.cpp |57.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blob_depot/blob_depot.cpp |57.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/grpc_services/rpc_read_rows.cpp |57.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blob_depot/s3_write.cpp |57.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/grpc_services/ydb_over_fq/list_directory.cpp |57.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/grpc_services/rpc_read_table.cpp |57.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blob_depot/s3_delete.cpp |57.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blob_depot/s3_scan.cpp |57.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/grpc_services/query/rpc_kqp_tx.cpp |57.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blob_depot/op_init_schema.cpp |57.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blob_depot/agent.cpp |57.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blob_depot/data.cpp |57.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/grpc_services/rpc_view.cpp |57.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blob_depot/assimilator.cpp |58.0%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/grpc_services/rpc_read_columns.cpp |58.0%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blob_depot/data_resolve.cpp |58.0%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/compute_actor/kqp_scan_compute_actor.cpp |58.0%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blob_depot/op_commit_blob_seq.cpp |57.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blob_depot/op_load.cpp |57.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/grpc_services/rpc_backup.cpp |57.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blob_depot/s3.cpp |57.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blob_depot/data_trash.cpp |57.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blob_depot/coro_tx.cpp |57.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/grpc_services/rpc_common/rpc_common_kqp_session.cpp |57.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blob_depot/blocks.cpp |57.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/grpc_services/rpc_stream_execute_scan_query.cpp |57.8%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/driver_lib/base_utils/libbase_utils.a |57.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blob_depot/data_decommit.cpp |57.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blob_depot/s3_upload.cpp |57.8%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/fq/libs/health/libfq-libs-health.a |57.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/driver_lib/cli_utils/cli_cmd_config.cpp |57.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/grpc_services/rpc_stream_execute_yql_script.cpp |57.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/workload/tpc_base/liblibrary-workload-tpc_base.global.a |57.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/compute_actor/kqp_scan_events.cpp |57.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/driver_lib/cli_utils/cli_cmds_bs.cpp |57.9%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/cms/libydb-core-cms.a |57.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/backup/controller/libcore-backup-controller.a |57.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/driver_lib/cli_utils/cli_scheme_cache_append.cpp |57.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kesus/tablet/tx_config_set.cpp |57.5%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kqp/runtime/libcore-kqp-runtime.a |57.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/driver_lib/cli_utils/cli_cmds_debug.cpp |57.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blob_depot/testing.cpp |57.5%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/yt/system/libcpp-yt-system.a |57.6%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/security/certificate_check/libcore-security-certificate_check.a |57.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kesus/tablet/rate_accounting.cpp |57.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/common/buffer/libkqp-common-buffer.a |57.9%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/services/persqueue_cluster_discovery/libydb-services-persqueue_cluster_discovery.a |57.9%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kqp/gateway/behaviour/tablestore/libgateway-behaviour-tablestore.a |57.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blob_depot/data_load.cpp |57.9%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kqp/gateway/behaviour/external_data_source/libgateway-behaviour-external_data_source.a |57.9%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/sys_view/libydb-core-sys_view.a |57.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blob_depot/op_apply_config.cpp |57.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/blob_depot/schema.h_serialized.cpp |57.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/driver_lib/cli_utils/cli_cmds_admin.cpp |57.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/driver_lib/cli_utils/cli_cmds_console.cpp |57.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/vdisk/defrag/defrag_actor.cpp |57.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/cms/audit_log.cpp |57.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/fq/libs/compute/ydb/control_plane/compute_database_control_plane_service.cpp |57.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/driver_lib/cli_utils/cli_cmds_node.cpp |57.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kesus/tablet/tx_init.cpp |57.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/session_actor/kqp_query_state.cpp |57.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blob_depot/space_monitor.cpp |57.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/driver_lib/cli_utils/cli_fakeinitshard.cpp |57.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/driver_lib/cli_utils/cli_cmds_tenant.cpp |57.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/graph/shard/tx_get_metrics.cpp |57.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/vdisk/defrag/defrag_quantum.cpp |57.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/driver_lib/cli_utils/cli_actorsystem_perftest.cpp |57.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/graph/shard/tx_aggregate_data.cpp |57.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/driver_lib/cli_utils/cli_cmds_validate_config.cpp |57.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kesus/proxy/proxy.cpp |57.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/driver_lib/cli_utils/cli_cmds_tablet.cpp |57.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/session_actor/kqp_temp_tables_manager.cpp |57.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/client/minikql_compile/mkql_compile_service.cpp |57.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/compute_actor/kqp_scan_fetcher_actor.cpp |57.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kesus/tablet/tx_session_timeout.cpp |57.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/engines/reader/simple_reader/constructor/read_metadata.cpp |57.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/driver_lib/cli_utils/cli_cmds_root.cpp |58.0%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/driver_lib/cli_utils/cli_cmds_disk.cpp |57.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kesus/tablet/tx_semaphore_timeout.cpp |57.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/driver_lib/cli_utils/cli_persqueue.cpp |57.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/graph/shard/shard_impl.cpp |57.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/driver_lib/cli_utils/cli_cmds_server.cpp |57.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/engines/scheme/versions/filtered_scheme.cpp |57.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/driver_lib/cli_utils/cli_cmds_cms.cpp |57.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/driver_lib/cli_utils/cli_cmds_config.cpp |57.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/runtime/kqp_compute.cpp |57.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/session_actor/kqp_query_stats.cpp |57.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/driver_lib/cli_utils/cli_persqueue_cluster_discovery.cpp |57.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/engines/scheme/versions/abstract_scheme.cpp |57.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/graph/shard/tx_monitoring.cpp |57.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/session_actor/kqp_response.cpp |57.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/cms/services.cpp |57.9%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/cms/services.h_serialized.cpp |57.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/mind/hive/balancer.cpp |57.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kesus/tablet/tx_self_check.cpp |57.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/runtime/kqp_arrow_memory_pool.cpp |58.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/runtime/kqp_effects.cpp |58.0%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/driver_lib/cli_utils/cli_cmds_genconfig.cpp |58.0%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/services/metadata/request/request_actor_cb.cpp |57.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/mind/hive/tx__update_tablet_metrics.cpp |57.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/data_sharing/source/session/cursor.cpp |57.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/runtime/kqp_read_table.cpp |57.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/runtime/kqp_program_builder.cpp |57.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/driver_lib/cli_utils/cli.cpp |57.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/driver_lib/cli_utils/cli_scheme_initroot.cpp |57.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/graph/shard/tx_startup.cpp |57.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/security/certificate_check/cert_auth_processor.cpp |57.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/session_actor/kqp_worker_actor.cpp |57.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/graph/shard/tx_store_metrics.cpp |57.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/session_actor/kqp_worker_common.cpp |57.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/mind/hive/follower_tablet_info.cpp |57.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/driver_lib/cli_utils/cli_persqueue_stress.cpp |58.0%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/vdisk/defrag/defrag_rewriter.cpp |58.0%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/services/lib/actors/pq_schema_actor.cpp |58.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/runtime/kqp_stream_lookup_join_helpers.cpp |58.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/runtime/kqp_write_actor_settings.cpp |58.0%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/mind/hive/boot_queue.cpp |58.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/rate_limiter/events/liblibs-rate_limiter-events.a |57.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/engines/scheme/versions/snapshot_scheme.cpp |57.8%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/testing/gtest_extensions/libcpp-testing-gtest_extensions.a |57.8%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/containers/comptrie/libcpp-containers-comptrie.a |57.8%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kqp/workload_service/libcore-kqp-workload_service.a |57.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/client/minikql_compile/yql_expr_minikql.cpp |57.9%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/sys_view/partition_stats/libcore-sys_view-partition_stats.a |57.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/services/metadata/request/config.cpp |57.9%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kqp/federated_query/libcore-kqp-federated_query.a |57.9%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kqp/topics/libcore-kqp-topics.a |57.9%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/quoter/libydb-core-quoter.a |57.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/tests/library/clients/libpy3tests-library-clients.global.a |57.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/graph/shard/tx_change_backend.cpp |57.9%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/fq/libs/init/libfq-libs-init.a |57.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/mind/hive/tx__update_domain.cpp |57.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/engines/scheme/versions/preset_schemas.cpp |58.0%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/fq/libs/compute/ydb/synchronization_service/libcompute-ydb-synchronization_service.a |58.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/filestore/core/libcore-filestore-core.a |58.0%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/mind/hive/tablet_info.cpp |58.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/tests/library/wardens/libpy3tests-library-wardens.global.a |58.0%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/fq/libs/quota_manager/libfq-libs-quota_manager.a |58.0%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/engines/scheme/versions/versioned_index.cpp |58.0%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/graph/shard/tx_init_schema.cpp |57.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/quoter/debug_info.cpp |57.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/quoter/probes.cpp |57.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/mind/hive/tx__release_tablets_reply.cpp |57.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/mind/hive/leader_tablet_info.cpp |57.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/shared_resources/interface/liblibs-shared_resources-interface.a |57.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/data_sharing/source/session/source.cpp |57.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/mind/hive/tx__delete_tablet.cpp |57.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/mind/hive/domain_info.cpp |57.9%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/containers/compact_vector/libcpp-containers-compact_vector.a |57.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/fq/libs/quota_manager/quota_proxy.cpp |57.9%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/columnshard/data_locks/manager/libcolumnshard-data_locks-manager.a |57.9%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kqp/common/events/libkqp-common-events.a |58.0%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/testing/gmock_in_unittest/libcpp-testing-gmock_in_unittest.a |58.0%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kqp/proxy_service/script_executions_utils/libkqp-proxy_service-script_executions_utils.a |58.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/tasks_packer/libfq-libs-tasks_packer.a |58.0%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/mind/hive/tx__unlock_tablet.cpp |58.0%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/testing/gmock_in_unittest/libcpp-testing-gmock_in_unittest.global.a |58.0%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kqp/run_script_actor/libcore-kqp-run_script_actor.a |58.0%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/mind/hive/drain.cpp |58.0%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/mind/hive/hive_log.cpp |58.0%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/mind/hive/hive_impl.cpp |57.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/common/events/workload_service.cpp |57.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/gateway/behaviour/tablestore/behaviour.cpp |57.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/mind/hive/tx__status.cpp |57.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/proxy_service/script_executions_utils/kqp_script_execution_compression.cpp |57.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/mind/hive/monitoring.cpp |57.7%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/blobstorage/vdisk/common/libblobstorage-vdisk-common.a |57.7%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/yt/yson/libcpp-yt-yson.a |57.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/tests/oss/ydb_sdk_import/libpy3tests-oss-ydb_sdk_import.global.a |57.8%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/core/expr_nodes_gen/libessentials-core-expr_nodes_gen.a |57.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/mind/hive/tx__update_tablet_status.cpp |57.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/mind/hive/hive_statics.cpp |57.8%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kqp/gateway/behaviour/streaming_query/libgateway-behaviour-streaming_query.a |57.8%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/columnshard/blobs_action/bs/libcolumnshard-blobs_action-bs.a |57.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/hmac/libfq-libs-hmac.a |57.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/services/metadata/request/request_actor.cpp |57.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/bg_tasks/transactions/libcolumnshard-bg_tasks-transactions.a |58.0%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/mind/hive/fill.cpp |57.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/mind/hive/tx__request_tablet_seq.cpp |57.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/vdisk/common/vdisk_outofspace.cpp |57.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/vdisk/common/vdisk_hugeblobctx.cpp |57.9%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/libyql-essentials-minikql.a |57.9%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kafka_proxy/libydb-core-kafka_proxy.a |57.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/mind/hive/tx__update_tablet_groups.cpp |58.0%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/runtime/scheduler/kqp_compute_scheduler_service.cpp |58.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/formats/arrow/dictionary/libformats-arrow-dictionary.a |58.0%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/mind/hive/tx__tablet_owners_reply.cpp |58.0%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/runtime/scheduler/kqp_schedulable_task.cpp |58.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/vdisk/common/vdisk_handle_class.cpp |58.0%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/runtime/scheduler/tree/dynamic.cpp |58.0%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/mind/hive/tx__create_tablet.cpp |58.0%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/kafka_proxy/kafka.h_serialized.cpp |58.0%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/session_actor/kqp_session_actor.cpp |58.0%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/mind/hive/storage_group_info.cpp |58.0%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/runtime/kqp_sequencer_factory.cpp |58.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/columnshard/blobs_action/bs/read.cpp |58.0%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/mind/hive/tx__release_tablets.cpp |58.0%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/mind/hive/tx__resume_tablet.cpp |58.0%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/mind/hive/tx__seize_tablets.cpp |57.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/mind/hive/tx__restart_tablet.cpp |58.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kafka_proxy/kafka_records.cpp |58.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kafka_proxy/kafka_messages_int.cpp |58.0%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/mind/hive/hive.cpp |58.0%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/mind/hive/tx__stop_tablet.cpp |58.0%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/vdisk/common/vdisk_mongroups.cpp |58.0%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/mind/hive/hive_domains.cpp |58.0%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/mind/hive/node_info.cpp |58.0%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/mind/hive/tx__seize_tablets_reply.cpp |58.0%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/mind/hive/tx__update_pile.cpp |58.0%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/mind/hive/tx__update_tablets_object.cpp |58.0%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/mind/hive/tx__delete_node.cpp |58.0%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/mind/hive/tx__update_dc_followers.cpp |58.0%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/driver_lib/base_utils/format_info.cpp |58.0%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/mind/hive/tx__request_tablet_owners.cpp |58.0%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/mind/hive/tx__generate_data_ut.cpp |58.0%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/mind/hive/tx__sync_tablets.cpp |58.0%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/mind/hive/tx__cut_tablet_history.cpp |58.0%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/mind/hive/tx__response_tablet_seq.cpp |58.0%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/mind/hive/tx__init_scheme.cpp |58.0%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/mind/hive/storage_pool_info.cpp |58.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/mind/hive/tx__set_down.cpp |58.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/mind/hive/tx__process_boot_queue.cpp |58.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kafka_proxy/actors/kafka_transaction_actor_sql.cpp |58.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/runtime/kqp_stream_lookup_factory.cpp |58.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kafka_proxy/actors/kafka_state_name_to_int.cpp |58.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/mind/hive/tx__reassign_groups_on_decommit.cpp |57.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/runtime/kqp_scan_data_meta.cpp |57.9%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/tests/tools/pq_read/pq_read |57.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/mind/hive/storage_balancer.cpp |57.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/mind/hive/tx__delete_tablet_result.cpp |57.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/columnshard/blobs_action/bs/address.cpp |57.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/runtime/scheduler/tree/snapshot.cpp |57.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kafka_proxy/kafka_consumer_protocol.cpp |57.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/runtime/scheduler/kqp_schedulable_actor.cpp |58.0%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/mind/hive/tx__disconnect_node.cpp |58.0%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/mind/hive/tx__start_tablet.cpp |58.0%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/mind/hive/tx__block_storage_result.cpp |58.0%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/common/events/query.cpp |58.0%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/services/persqueue_cluster_discovery/cluster_discovery_worker.cpp |58.0%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/mind/hive/tx__configure_subdomain.cpp |58.0%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/columnshard/blobs_action/transaction/libcolumnshard-blobs_action-transaction.a |58.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/vdisk/common/vdisk_histograms.cpp |58.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/vdisk/common/vdisk_histogram_latency.cpp |58.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/vdisk/common/vdisk_performance_params.cpp |58.0%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/common/events/events.cpp |58.0%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/mind/hive/tx__register_node.cpp |58.0%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/coordinator/libcore-tx-coordinator.a |58.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kafka_proxy/kafka_messages.cpp |57.9%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/testlib/basics/libcore-testlib-basics.a |58.0%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/proxy_service/script_executions_utils/kqp_script_execution_retries.cpp |58.0%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/mind/hive/tx__load_everything.cpp |58.0%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/cms/node_checkers.cpp |58.0%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/mind/hive/tx__adopt_tablet.cpp |58.0%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/mind/hive/tx__kill_node.cpp |58.0%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/runtime/kqp_scan_data.cpp |58.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/cms/libydb-core-cms.global.a |58.0%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/driver_lib/base_utils/node_by_host.cpp |58.0%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/mind/hive/tablet_move_info.cpp |58.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/fq/libs/health/health.cpp |58.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/driver_lib/base_utils/format_util.cpp |58.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/mind/hive/tx__process_pending_operations.cpp |58.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/cms/walle_list_tasks_adapter.cpp |58.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/gateway/behaviour/external_data_source/manager.cpp |58.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/cms/cms_tx_update_config.cpp |58.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/mind/hive/tx__lock_tablet.cpp |58.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/runtime/kqp_output_stream.cpp |58.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/cms/walle_check_task_adapter.cpp |58.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/mind/hive/tx__reassign_groups.cpp |58.0%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/common/events/script_executions.cpp |58.0%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/mind/hive/tx__switch_drain.cpp |58.0%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/topics/kqp_topics.cpp |58.0%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/cms/cms_tx_get_log_tail.cpp |58.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/coordinator/coordinator_hooks.cpp |58.0%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/cms/cms_tx_load_state.cpp |58.0%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/cms/cms_tx_log_and_send.cpp |58.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/cms/info_collector.cpp |58.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/cms/cms_tx_init_scheme.cpp |58.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/cms/cms.cpp |58.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/services/persqueue_cluster_discovery/cluster_discovery_service.cpp |58.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/rate_limiter/utils/liblibs-rate_limiter-utils.a |58.1%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/core/file_storage/download/libcore-file_storage-download.a |58.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/sys_view/scan.cpp |58.1%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/columnshard/data_accessor/libtx-columnshard-data_accessor.a |58.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/cms/node_checkers.h_serialized.cpp |58.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/blobs_action/common/libcolumnshard-blobs_action-common.a |58.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/security/token_manager/libcore-security-token_manager.a |58.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/persqueue/deprecated/read_batch_converter/libpersqueue-deprecated-read_batch_converter.a |58.0%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/columnshard/data_accessor/abstract/libcolumnshard-data_accessor-abstract.a |58.0%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/columnshard/data_sharing/common/session/libdata_sharing-common-session.a |58.0%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/columnshard/data_accessor/in_mem/libcolumnshard-data_accessor-in_mem.a |58.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/protos/libfq-libs-protos.a |58.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/gateway/behaviour/tablestore/manager.cpp |58.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/private_client/libfq-libs-private_client.a |58.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/runtime/kqp_write_table.cpp |58.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/common/protos/libpy3columnshard-common-protos.global.a |58.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/mind/hive/tx__configure_scale_recommender.cpp |58.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/control_plane_storage/proto/liblibs-control_plane_storage-proto.a |58.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/data_sharing/protos/libcolumnshard-data_sharing-protos.a |58.1%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/columnshard/data_accessor/cache_policy/libcolumnshard-data_accessor-cache_policy.a |58.1%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/columnshard/data_sharing/destination/session/libdata_sharing-destination-session.a |58.1%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/core/minsketch/libessentials-core-minsketch.a |58.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/data_sharing/initiator/controller/libdata_sharing-initiator-controller.a |58.1%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yql/providers/ytflow/integration/interface/libytflow-integration-interface.a |58.1%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/transfer/libydb-core-transfer.a |58.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/raw_socket/libydb-core-raw_socket.a |58.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/cms/api_adapters.cpp |58.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/cms/cluster_info.cpp |58.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/pgproxy/protos/libcore-pgproxy-protos.a |58.0%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/cms/http.cpp |58.0%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/columnshard/blobs_action/local/libcolumnshard-blobs_action-local.a |58.0%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/cms/cms_tx_remove_task.cpp |58.0%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/cms/erasure_checkers.cpp |58.0%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/driver_lib/run/librun.a |58.0%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/security/certificate_check/cert_check.cpp |58.0%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/cms/cms_tx_reject_notification.cpp |58.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/security/certificate_check/cert_auth_utils.cpp |58.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/services/persqueue_cluster_discovery/grpc_service.cpp |58.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/cms/downtime.cpp |58.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/cms/cms_tx_remove_request.cpp |58.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/viewer/libydb-core-viewer.global.a |58.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/services/persqueue_cluster_discovery/counters.cpp |58.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/cms/walle_create_task_adapter.cpp |58.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/cms/logger.cpp |58.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/cms/walle_api_handler.cpp |58.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/federated_query/kqp_federated_query_helpers.cpp |58.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/cms/cms_tx_store_permissions.cpp |58.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/runtime/kqp_read_iterator_common.cpp |58.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/cms/cms_tx_process_notification.cpp |58.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/cms/walle_remove_task_adapter.cpp |58.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/sys_view/partition_stats/top_partitions.cpp |58.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/cms/cms_tx_update_downtimes.cpp |58.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/cms/cms_tx_store_walle_task.cpp |58.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/workload_service/kqp_workload_service.cpp |58.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/vdisk/common/blobstorage_dblogcutter.cpp |58.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/base/ut/table_index_ut.cpp |58.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/runtime/kqp_transport.cpp |58.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/vdisk/common/vdisk_response.cpp |58.1%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/yql_testlib/libydb-core-yql_testlib.a |58.1%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/datashard/ut_common/libtx-datashard-ut_common.a |58.1%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/stress/ctas/tests/objcopy_4e3ea6c3c5a0438f05942dbc81.o |58.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/cms/cms_tx_remove_expired_notifications.cpp |58.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/vdisk/common/blobstorage_cost_tracker.cpp |58.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/data_locks/manager/manager.cpp |58.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/cms/cms_tx_remove_permissions.cpp |58.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/cms/sentinel.cpp |58.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/cms/cms_tx_log_cleanup.cpp |58.2%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kqp/workload_service/ut/common/libworkload_service-ut-common.a |58.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/runtime/kqp_tasks_runner.cpp |58.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/runtime/kqp_read_actor.cpp |58.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/vdisk/common/vdisk_recoverylogwriter.cpp |58.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/quoter/kesus_quoter_proxy.cpp |58.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/runtime/kqp_stream_lookup_actor.cpp |58.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kafka_proxy/actors/kafka_api_versions_actor.cpp |58.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/federated_query/kqp_federated_query_actors.cpp |58.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/vdisk/common/vdisk_context.cpp |58.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/vdisk/common/blobstorage_status.cpp |58.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/vdisk/common/blobstorage_event_filter.cpp |58.2%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/boto3/py3/libpy3python-boto3-py3.global.a |58.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kafka_proxy/kafka_metrics.cpp |58.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/mind/slot_indexes_pool.cpp |58.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/runtime/kqp_vector_actor.cpp |58.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/mvp/meta/meta_cache_ut.cpp |58.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/fq/libs/compute/ydb/synchronization_service/synchronization_service.cpp |58.1%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/library/yaml_config/deprecated/liblibrary-yaml_config-deprecated.a |58.1%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/public/sdk/cpp/src/client/topic/ut/ut_utils/libtopic-ut-ut_utils.a |58.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/client/yc_private/oauth/libclient-yc_private-oauth.a |58.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/client/yc_private/access/libclient-yc_private-access.a |58.2%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/core/blobstorage/crypto/ut/ydb-core-blobstorage-crypto-ut |58.2%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/jmespath/py3/libpy3python-jmespath-py3.global.a |58.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/fq/libs/init/init.cpp |58.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/vdisk/common/blobstorage_vdisk_guids.cpp |58.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/vdisk/common/vdisk_costmodel.cpp |58.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kafka_proxy/kafka_transactions_coordinator.cpp |58.2%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/MarkupSafe/py3/libpy3python-MarkupSafe-py3.a |58.2%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/MarkupSafe/py3/libpy3python-MarkupSafe-py3.global.a |58.2%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/Jinja2/py3/libpy3python-Jinja2-py3.global.a |58.2%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/psutil/py3/libpy3python-psutil-py3.a |58.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/blobs_action/bs/storage.cpp |58.2%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/psutil/py3/libpy3python-psutil-py3.global.a |58.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/sys_view/partition_stats/partition_stats.cpp |58.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/runtime/kqp_write_actor.cpp |58.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/client/yc_private/ydb/v1/libyc_private-ydb-v1.a |58.0%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/gateway/behaviour/streaming_query/object.cpp |58.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/mvp/core/protos/libmvp-core-protos.a |58.0%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/runtime/kqp_sequencer_actor.cpp |58.0%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/Werkzeug/py3/libpy3python-Werkzeug-py3.global.a |58.0%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/botocore/py3/libpy3python-botocore-py3.global.a |58.0%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/s3transfer/py3/libpy3python-s3transfer-py3.global.a |58.1%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/tests/postgres_integrations/go-libpq/d78d0f74a3f72be1016c0cf8cf_raw.auxcpp |58.1%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/mvp/oidc_proxy/libydb-mvp-oidc_proxy.a |58.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/coordinator/coordinator__stop_guard.cpp |58.1%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/tools/fqrun/src/libtools-fqrun-src.a |58.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kafka_proxy/actors/kafka_metrics_actor.cpp |58.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/mvp/oidc_proxy/context.cpp |58.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/mvp/oidc_proxy/extension.cpp |58.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kafka_proxy/actors/kafka_balancer_actor.cpp |58.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/mvp/oidc_proxy/oidc_protected_page.cpp |58.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/coordinator/coordinator__last_step_subscriptions.cpp |58.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kafka_proxy/actors/kafka_fetch_actor.cpp |58.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/mvp/oidc_proxy/oidc_impersonate_stop_page_nebius.cpp |58.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/mvp/oidc_proxy/oidc_impersonate_start_page_nebius.cpp |58.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/mvp/oidc_proxy/oidc_client.cpp |58.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/mvp/oidc_proxy/cracked_page.cpp |58.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/mvp/oidc_proxy/oidc_cleanup_page.cpp |58.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/mvp/oidc_proxy/extension_whoami.cpp |58.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/mvp/oidc_proxy/oidc_session_create.cpp |58.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/mvp/oidc_proxy/extension_final.cpp |58.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/mvp/oidc_proxy/oidc_session_create_handler.cpp |58.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/mvp/oidc_proxy/oidc_session_create_nebius.cpp |58.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/mvp/oidc_proxy/oidc_protected_page_nebius.cpp |58.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/mvp/oidc_proxy/oidc_settings.cpp |58.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kafka_proxy/kafka_transactional_producers_initializers.cpp |58.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kafka_proxy/actors/kafka_init_producer_id_actor.cpp |58.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/coordinator/coordinator_state.cpp |58.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/mvp/oidc_proxy/extension_manager.cpp |58.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/mvp/oidc_proxy/openid_connect.cpp |58.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/mvp/oidc_proxy/oidc_protected_page_handler.cpp |58.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/mvp/oidc_proxy/oidc_session_create_yandex.cpp |58.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/mvp/oidc_proxy/oidc_protected_page_yandex.cpp |58.2%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/postgres_integrations/go-libpq/objcopy_95b3eecc97c453f0c55c456659.o |58.2%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/postgres_integrations/go-libpq/objcopy_3ddbad334a37a829b3772ddb05.o |58.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/replication/ydb_proxy/partition_end_watcher_ut.cpp |58.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/vdisk/common/vdisk_config.cpp |58.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/base/logoblob_ut.cpp |58.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kafka_proxy/actors/kafka_transaction_actor.cpp |58.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/quoter/quoter_service.cpp |58.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/base/statestorage_guardian_impl_ut.cpp |58.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/base/blobstorage_grouptype_ut.cpp |58.2%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/public/sdk/cpp/src/client/persqueue_public/ut/ut_utils/libpersqueue_public-ut-ut_utils.a |58.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/base/localdb_ut.cpp |58.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/run_script_actor/kqp_run_script_actor.cpp |58.3%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/postgres_integrations/go-libpq/objcopy_4352b8b3e3cf61532c865b371b.o |58.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kafka_proxy/actors/kafka_alter_configs_actor.cpp |58.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kafka_proxy/kafka_consumer_members_metadata_initializers.cpp |58.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/base/ut/fulltext_ut.cpp |58.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/gateway/behaviour/streaming_query/optimization.cpp |58.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/coordinator/coordinator__acquire_read_step.cpp |58.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/blobs_action/bs/remove.cpp |58.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/testlib/basics/appdata.cpp |58.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/blobs_action/bs/gc_actor.cpp |58.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kafka_proxy/actors/kafka_sasl_auth_actor.cpp |58.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/vdisk/common/vdisk_events.cpp |58.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/fq/libs/quota_manager/quota_manager.cpp |58.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/blobs_action/bs/blob_manager.cpp |58.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/gateway/behaviour/streaming_query/initializer.cpp |58.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kafka_proxy/actors/kafka_describe_configs_actor.cpp |58.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/public/sdk/cpp/src/client/persqueue_public/ut/ut_utils/data_plane_helpers.cpp |58.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/gateway/behaviour/streaming_query/manager.cpp |58.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/coordinator/coordinator__mediators_confirmations.cpp |58.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/persqueue/tests/liblibrary-persqueue-tests.a >> TBlobStorageCrypto::TestOffsetStreamCypher [GOOD] >> TBlobStorageCrypto::TestInplaceStreamCypher [GOOD] >> TBlobStorageCrypto::TestMixedStreamCypher [GOOD] >> TBlobStorageCrypto::UnalignedTestStreamCypher [GOOD] >> TBlobStorageCryptoRope::TestEqualInplaceStreamCypher >> TBlobStorageCrypto::PerfTestStreamCypher [GOOD] |58.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/providers/s3/actors/ut/yql_arrow_push_down_ut.cpp |58.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kafka_proxy/actors/kafka_metadata_actor.cpp |58.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/coordinator/coordinator__plan_step.cpp |58.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kafka_proxy/actors/kafka_sasl_handshake_actor.cpp |58.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/coordinator/mediator_queue.cpp |58.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kafka_proxy/actors/kafka_balance_actor_sql.cpp |58.2%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/cblas/libcontrib-libs-cblas.a |58.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kafka_proxy/actors/kafka_produce_actor.cpp |58.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/services/persqueue_v1/ut/functions_executor_wrapper.cpp |58.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/testlib/basics/runtime.cpp |58.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/mvp/oidc_proxy/openid_connect.cpp |58.2%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/mvp/core/libydb-mvp-core.a |58.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kafka_proxy/actors/kafka_create_partitions_actor.cpp |58.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/tests/tpch/main.cpp |58.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kafka_proxy/actors/kafka_find_coordinator_actor.cpp |58.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/coordinator/coordinator.cpp |58.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/tests/tpch/commands.cpp |58.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/blobs_action/bs/write.cpp |58.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/common/string_udf.cpp |58.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kafka_proxy/actors/kafka_read_session_actor.cpp |58.3%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/clapack/part1/liblibs-clapack-part1.a |58.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/mvp/core/cache_policy.cpp |58.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/tests/tpch/lib/libtests-tpch-lib.global.a |58.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kafka_proxy/actors/txn_actor_response_builder.cpp |58.3%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/clapack/part2/liblibs-clapack-part2.a |58.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/mvp/core/mapper.cpp |58.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kafka_proxy/kqp_helper.cpp |58.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/mvp/core/reducer.cpp |58.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/mvp/core/parser.cpp |58.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/vdisk/common/vdisk_log.cpp |58.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/mvp/core/mvp_test_runtime.cpp |58.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/blobs_action/transaction/tx_write_index.cpp |58.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/mvp/core/filter.cpp |58.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/mvp/core/mvp_mem_profiler.cpp |58.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/mvp/core/appdata.cpp |58.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/blobs_action/bs/gc.cpp |58.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/data_accessor/abstract/manager.cpp |58.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/mvp/core/mvp_tokens.cpp |58.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/mvp/core/merger.cpp |58.3%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/googleapis-common-protos/libpy3contrib-libs-googleapis-common-protos.global.a |58.3%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/moto/py3/libpy3python-moto-py3.global.a |58.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/coordinator/coordinator__configure.cpp |58.3%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kqp/ut/olap/helpers/libut-olap-helpers.a |58.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/coordinator/coordinator__restore_transaction.cpp |58.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kafka_proxy/actors/kafka_list_groups_actor.cpp |58.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kafka_proxy/actors/kafka_create_topics_actor.cpp |58.3%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_helpers/libtx-schemeshard-ut_helpers.a |58.2%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/benchmarks_init/objcopy_de67ee476035f2cc7c8d34c996.o |58.3%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/benchmarks_init/objcopy_c96c333b4f7fc5cb2b98b27907.o |58.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/coordinator/coordinator__monitoring.cpp |58.3%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/libf2c/libcontrib-libs-libf2c.a |58.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/olap/helpers/get_value.cpp |58.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kafka_proxy/actors/kafka_describe_groups_actor.cpp |58.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/coordinator/coordinator__restore_params.cpp |58.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kafka_proxy/kafka_consumer_groups_metadata_initializers.cpp |58.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/transfer/purecalc_input.cpp |57.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/blobs_action/transaction/tx_gc_indexed.cpp |57.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/testlib/basics/services.cpp |57.8%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/cachetools/py3/libpy3python-cachetools-py3.global.a |57.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/transfer/purecalc_output.cpp |57.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/data_accessor/request.cpp |58.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/coordinator/coordinator__check.cpp |57.7%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/libpng/libcontrib-libs-libpng.a |57.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/driver_lib/run/config.cpp |57.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/gateway/behaviour/streaming_query/queries.cpp |57.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/transfer/purecalc.cpp >> TBlobStorageCryptoRope::TestEqualInplaceStreamCypher [GOOD] >> TBlobStorageCryptoRope::TestEqualMixedStreamCypher |57.7%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/libtiff/libcontrib-libs-libtiff.a |57.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/coordinator/coordinator_impl.cpp |57.8%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/protos/libydb-core-protos.a |57.8%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/statistics/ut_common/libcore-statistics-ut_common.a |57.9%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/cms.grpc.pb.cc |57.9%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/stream.grpc.pb.cc |57.8%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/libjpeg-turbo/libcontrib-libs-libjpeg-turbo.a |57.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/transfer/column_table.cpp |57.8%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/cms.pb.cc |57.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/transfer/scheme.cpp |57.8%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/statistics.grpc.pb.cc |57.8%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/statistics.pb.cc |57.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/blobs_action/local/storage.cpp |57.9%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/tx_proxy.grpc.pb.cc |57.9%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/tx_mediator_timecast.grpc.pb.cc |58.0%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/tx_mediator_timecast.pb.cc |57.9%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/tx_columnshard.grpc.pb.cc |57.9%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/tx_columnshard.pb.cc |57.9%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/tablet_tracing_signals.grpc.pb.cc |58.0%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/driver_lib/run/service_initializer.cpp |58.0%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/tablet_tracing_signals.pb.cc |58.0%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/runtime/kqp_stream_lookup_worker.cpp |58.0%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/coordinator/coordinator__schema.cpp |58.1%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/tablet_pipe.pb.cc |58.1%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/tablet_pipe.grpc.pb.cc |58.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kafka_proxy/actors/kafka_list_offsets_actor.cpp |58.1%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/key.pb.cc |58.1%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/kqp_physical.pb.cc |57.9%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/grpc_status_proxy.pb.cc |57.9%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/blobstorage_vdisk_config.grpc.pb.cc |57.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/data_accessor/abstract/collector.cpp |57.9%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/blobstorage_pdisk_config.grpc.pb.cc |57.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/testlib/basics/helpers.cpp |58.0%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/blobstorage_disk.grpc.pb.cc |58.0%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/rsa/py3/libpy3python-rsa-py3.global.a |58.0%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/blobstorage_base3.grpc.pb.cc |58.0%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/backup.pb.cc |58.0%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/data_accessor/abstract/constructor.cpp |58.0%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/base.pb.cc |58.1%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/ruamel.yaml/py3/libpy3python-ruamel.yaml-py3.global.a |58.1%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/bind_channel_storage_pool.pb.cc |58.1%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/blob_depot.pb.cc |58.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/protos/tx_datashard.grpc.pb.cc |58.1%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/blob_depot.grpc.pb.cc |57.9%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/backup.grpc.pb.cc |58.0%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/ruamel.yaml.clib/py3/libpy3python-ruamel.yaml.clib-py3.a |58.0%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/bind_channel_storage_pool.grpc.pb.cc |58.0%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/blobstorage_config.pb.cc |58.0%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/alloc.pb.cc |58.0%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/blobstorage_base.grpc.pb.cc |58.0%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/auth.pb.cc |58.1%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/blob_depot_config.pb.cc |58.1%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/blob_depot_config.grpc.pb.cc |58.1%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/blobstorage_base.pb.cc |58.1%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/blobstorage_config.grpc.pb.cc |58.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kafka_proxy/actors/kafka_topic_offsets_actor.cpp |58.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/services/persqueue_v1/ut/kqp_mock.cpp |58.1%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/blobstorage_base3.pb.cc |58.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/coordinator/coordinator__read_step_subscriptions.cpp |58.0%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/ruamel.yaml.clib/py3/libpy3python-ruamel.yaml.clib-py3.global.a |58.1%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/blobstorage.pb.cc |58.1%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/blobstorage_disk_color.grpc.pb.cc |58.1%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/grpc_status_proxy.grpc.pb.cc |58.1%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/blobstorage_disk_color.pb.cc |58.1%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/blobstorage_disk.pb.cc |58.1%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/blobstorage_pdisk_config.pb.cc |58.1%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/grpc_pq_old.grpc.pb.cc |58.2%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/requests-oauthlib/libpy3contrib-python-requests-oauthlib.global.a |58.2%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/blobstorage_vdisk_config.pb.cc |58.2%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/grpc_pq_old.pb.cc |58.2%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/index_builder.grpc.pb.cc |58.2%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/Pillow/py3/libpy3python-Pillow-py3.a |58.2%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/http_config.pb.cc |58.1%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/health.pb.cc |58.1%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/health.grpc.pb.cc |58.1%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/hive.grpc.pb.cc |58.1%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/kesus.grpc.pb.cc |58.1%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/http_config.grpc.pb.cc |58.1%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/kafka.grpc.pb.cc |58.1%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/import.grpc.pb.cc |58.1%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/kafka.pb.cc |58.1%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/hive.pb.cc |58.1%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/index_builder.pb.cc |58.1%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/key.grpc.pb.cc |58.1%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/import.pb.cc |58.2%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/tablet.grpc.pb.cc |58.2%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/kesus.pb.cc |58.2%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/load_test.grpc.pb.cc |58.2%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/kqp_physical.grpc.pb.cc |58.2%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/Pillow/py3/libpy3python-Pillow-py3.global.a |58.2%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/kqp_stats.grpc.pb.cc |58.2%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/kqp_stats.pb.cc |58.2%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/labeled_counters.pb.cc |58.2%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/load_test.pb.cc |58.2%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/tablet_counters.grpc.pb.cc |58.2%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/tablet_counters_aggregator.pb.cc |58.2%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/tablet_counters_aggregator.grpc.pb.cc |58.2%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/pyrsistent/py3/libpy3python-pyrsistent-py3.global.a |58.2%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/pyasn1/py3/libpy3python-pyasn1-py3.global.a |58.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/coordinator/coordinator__schema_upgrade.cpp |58.1%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/tablet.pb.cc |58.2%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/pyasn1-modules/py3/libpy3python-pyasn1-modules-py3.global.a |58.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/transfer/transfer_writer.cpp |58.2%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/tablet_counters.pb.cc |58.2%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/tablet_database.pb.cc |58.2%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/tracing.grpc.pb.cc |58.2%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/tenant_pool.grpc.pb.cc |58.2%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/websocket-client/libpy3contrib-python-websocket-client.global.a |58.2%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/tenant_slot_broker.grpc.pb.cc |58.2%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/tablet_tx.pb.cc |58.2%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/tablet_database.grpc.pb.cc |58.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/library/yaml_config/deprecated/yaml_config_parser.cpp |58.2%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/tablet_tx.grpc.pb.cc |58.2%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/tenant_pool.pb.cc |58.2%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/test_shard.grpc.pb.cc |58.2%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/test_shard.pb.cc |58.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/driver_lib/run/config_helpers.cpp |58.2%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/tx.grpc.pb.cc |58.2%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/tenant_slot_broker.pb.cc |58.2%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/tracing.pb.cc |58.2%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/tx.pb.cc |58.2%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/mon.grpc.pb.cc |58.2%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/mon.pb.cc |58.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/data_sharing/common/session/common.cpp |58.2%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/yql_translation_settings.grpc.pb.cc |58.2%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/memory_controller_config.pb.cc |58.2%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/memory_stats.grpc.pb.cc |58.2%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/tx_sequenceshard.grpc.pb.cc |58.3%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/whiteboard_flags.grpc.pb.cc |58.3%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/tx_scheme.grpc.pb.cc |58.3%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/tx_scheme.pb.cc |58.3%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/tx_proxy.pb.cc |58.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kafka_proxy/actors/kafka_offset_commit_actor.cpp |58.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/ut/scheme/kqp_constraints_ut.cpp |58.2%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/numpy/py3/numpy/random/libpy3py3-numpy-random.global.a |58.2%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/whiteboard_disk_states.pb.cc |58.2%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/whiteboard_disk_states.grpc.pb.cc |58.2%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/tx_sequenceshard.pb.cc |58.2%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/ydb_table_impl.pb.cc |58.3%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/ydb_result_set_old.grpc.pb.cc |58.3%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/whiteboard_flags.pb.cc |58.3%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/workload_manager_config.grpc.pb.cc |58.3%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/ydb_table_impl.grpc.pb.cc |58.3%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/memory_controller_config.grpc.pb.cc |58.2%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/ydb_result_set_old.pb.cc |58.2%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/workload_manager_config.pb.cc |58.2%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/yql_translation_settings.pb.cc |58.2%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/maintenance.pb.cc |58.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/data_accessor/cache_policy/policy.cpp |58.3%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/local.pb.cc |58.3%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/local.grpc.pb.cc |58.3%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/maintenance.grpc.pb.cc |58.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/columnshard/data_sharing/common/session/common.h_serialized.cpp |58.3%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/long_tx_service.pb.cc |58.3%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/long_tx_service.grpc.pb.cc |58.3%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/minikql_engine.pb.cc |58.3%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/metrics.pb.cc |58.3%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/metrics.grpc.pb.cc |58.3%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/allure-python-commons/libpy3contrib-python-allure-python-commons.global.a |58.3%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/memory_stats.pb.cc |58.3%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/minikql_engine.grpc.pb.cc |58.3%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/pdiskfit.grpc.pb.cc |58.3%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/netclassifier.grpc.pb.cc |58.3%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/msgbus_kv.grpc.pb.cc |58.3%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/msgbus_health.pb.cc |58.3%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/msgbus_health.grpc.pb.cc |58.3%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/msgbus_pq.grpc.pb.cc |58.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/transfer/row_table.cpp |58.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/blobs_action/transaction/tx_draft.cpp |58.2%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/msgbus_pq.pb.cc |58.2%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/node_limits.grpc.pb.cc |58.2%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/msgbus_kv.pb.cc |58.2%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/netclassifier.pb.cc |58.3%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/numpy/py3/libpy3python-numpy-py3.global.a |58.3%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/node_broker.grpc.pb.cc |58.3%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/shared_cache.grpc.pb.cc |58.3%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/node_limits.pb.cc |58.3%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/node_whiteboard.grpc.pb.cc |58.3%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/node_whiteboard.pb.cc |58.3%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/node_broker.pb.cc |58.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kafka_proxy/kafka_connection.cpp |58.3%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/scheme_type_operation.grpc.pb.cc |58.3%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/scheme_type_operation.pb.cc |58.3%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/scheme_type_metadata.grpc.pb.cc |58.3%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/pdiskfit.pb.cc |58.3%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/scheme_type_metadata.pb.cc |58.3%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/numpy/py3/numpy/random/libpy3py3-numpy-random.a |58.3%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/shared_cache.pb.cc |58.3%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/statestorage.grpc.pb.cc |58.3%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/shared_cache.pb.h_serialized.cpp |58.3%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/statestorage.pb.cc |58.3%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/channel_purpose.pb.cc |58.3%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/sqs.grpc.pb.cc |58.3%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/labeled_counters.grpc.pb.cc |58.3%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/table_service_config.grpc.pb.cc |58.3%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/subdomains.grpc.pb.cc |58.3%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/sys_view.grpc.pb.cc |58.3%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/stream.pb.cc |58.3%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/profiler.grpc.pb.cc |58.3%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/sys_view_types.pb.cc |58.3%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/subdomains.pb.cc |58.4%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/sys_view_types.grpc.pb.cc |58.4%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/sqs.pb.cc |58.4%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/table_stats.grpc.pb.cc |58.4%| [AR] {BAZEL_DOWNLOAD} $(B)/library/recipes/common/libpy3library-recipes-common.global.a |58.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kafka_proxy/actors/kafka_offset_fetch_actor.cpp |58.4%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/table_service_config.pb.cc |58.3%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/pytz/py3/libpy3python-pytz-py3.global.a |58.3%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/sys_view.pb.cc |58.4%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/pqconfig.pb.cc |58.4%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/table_stats.pb.cc |58.4%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/pqconfig.grpc.pb.cc |58.4%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/scheme_log.pb.cc |58.4%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/s3_settings.grpc.pb.cc |58.4%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/profiler.pb.cc |58.4%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/query_stats.pb.cc |58.4%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/resource_broker.pb.cc |58.4%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/resource_broker.grpc.pb.cc |58.4%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/query_stats.grpc.pb.cc |58.4%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/replication.pb.cc |58.4%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/replication.grpc.pb.cc |58.4%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/scheme_log.grpc.pb.cc |58.4%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/scheme_board_mon.grpc.pb.cc |58.4%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/scheme_board.grpc.pb.cc |58.4%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/oauthlib/libpy3contrib-python-oauthlib.global.a |58.4%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/s3_settings.pb.cc |58.4%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/scheme_board.pb.cc |58.4%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/bridge.pb.cc |58.4%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/scheme_board_mon.pb.cc |58.4%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/external_sources.grpc.pb.cc |58.4%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/responses/py3/libpy3python-responses-py3.global.a |58.4%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/bootstrap.pb.cc |58.4%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/blockstore_config.pb.cc |58.4%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/blockstore_config.grpc.pb.cc |58.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/ut/scheme/kqp_scheme_ut.cpp |58.4%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/bootstrap.grpc.pb.cc |58.4%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/bridge.grpc.pb.cc |58.4%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/bootstrapper.pb.cc |58.4%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/bootstrapper.grpc.pb.cc |58.4%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/change_exchange.pb.cc |58.4%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/drivemodel.pb.cc |58.4%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/change_exchange.grpc.pb.cc |58.4%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/export.pb.cc |58.4%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/tools/visualize_portions/visualize_portions |58.4%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/allure-pytest/libpy3contrib-python-allure-pytest.global.a |58.4%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/export.grpc.pb.cc |58.4%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/filestore_config.grpc.pb.cc |58.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/data_accessor/in_mem/manager.cpp |58.4%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/flat_tx_scheme.grpc.pb.cc |58.4%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/feature_flags.grpc.pb.cc |58.4%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/external_sources.pb.cc |58.5%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/feature_flags.pb.cc |58.4%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/qhull/libcontrib-libs-qhull.a |58.4%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/flat_scheme_op.grpc.pb.cc |58.4%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/filestore_config.pb.cc |58.4%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/channel_purpose.grpc.pb.cc |58.4%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/follower_group.pb.cc |58.4%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/follower_group.grpc.pb.cc |58.4%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/tests/fq/plans/5a2f230528097042fdaf726fed_raw.auxcpp |58.4%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/counters_replication.pb.cc |58.4%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/counters_replication.grpc.pb.cc |58.4%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/fq/plans/objcopy_d0255dda539959b69d421868a2.o |58.4%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/fq/plans/objcopy_96b8686cd075e874d95d4aa5c5.o |58.4%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/flat_tx_scheme.pb.cc |58.4%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/fq/plans/objcopy_6a5c78aa9f679a0920be5264fe.o |58.5%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/fq/plans/objcopy_b031a661ba244dffa03ab0c7ec.o |58.5%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/counters_columnshard.pb.cc |58.5%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/cppy/libpy3contrib-python-cppy.global.a |58.5%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/cycler/py3/libpy3python-cycler-py3.global.a |58.5%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/contourpy/libpy3contrib-python-contourpy.global.a |58.4%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/compile_service_config.grpc.pb.cc |58.4%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/compaction.pb.cc |58.4%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/flat_scheme_op.pb.cc |58.5%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/compaction.grpc.pb.cc |58.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/tools/ydb_recipe/libpy3ydb_recipe.global.a |58.5%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/compile_service_config.pb.cc |58.5%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/public/tools/ydb_recipe/objcopy_c55121179eeb3b5753498290c4.o |58.5%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/config_metrics.grpc.pb.cc |58.5%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/config_units.pb.cc |58.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/gateway/kqp_metadata_loader.cpp |58.5%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/console_base.grpc.pb.cc |58.5%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/config_metrics.pb.cc |58.5%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/jsonschema/py3/libpy3python-jsonschema-py3.global.a |58.5%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/config_units.grpc.pb.cc |58.5%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/counters_cms.pb.cc |58.5%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/counters_bs_controller.grpc.pb.cc |58.5%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/counters.grpc.pb.cc |58.5%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/console_tenant.grpc.pb.cc |58.5%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/console_tenant.pb.cc |58.5%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/counters_backup.pb.cc |58.5%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/numpy/py3/libpy3python-numpy-py3.a |58.5%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/counters_backup.grpc.pb.cc |58.5%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/counters_coordinator.grpc.pb.cc |58.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/driver_lib/run/factories.cpp |58.5%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/counters.pb.cc |58.5%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/counters_blob_depot.grpc.pb.cc |58.5%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/counters_blob_depot.pb.cc |58.5%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/counters_columnshard.grpc.pb.cc |58.5%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/counters_bs_controller.pb.cc |58.5%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/counters_cms.grpc.pb.cc |58.5%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/datashard_config.pb.cc |58.5%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/data_events.grpc.pb.cc |58.5%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/data_events.pb.cc |58.5%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/counters_hive.grpc.pb.cc |58.5%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/counters_datashard.grpc.pb.cc |58.5%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/contourpy/libpy3contrib-python-contourpy.a |58.5%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/counters_coordinator.pb.cc |58.5%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/counters_kesus.grpc.pb.cc |58.5%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/counters_datashard.pb.cc |58.5%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/counters_kesus.pb.cc |58.5%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/datashard_config.grpc.pb.cc |58.5%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/counters_hive.pb.cc |58.5%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/database_basic_sausage_metainfo.pb.cc |58.5%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/data_integrity_trails.pb.cc |58.5%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/database_basic_sausage_metainfo.grpc.pb.cc |58.5%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/data_integrity_trails.grpc.pb.cc |58.5%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/datashard_backup.pb.cc |58.5%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/datashard_backup.grpc.pb.cc |58.5%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/fonttools/libpy3contrib-python-fonttools.global.a |58.5%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/counters_pq.pb.cc |58.5%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/counters_keyvalue.pb.cc |58.5%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/datashard_load.pb.h_serialized.cpp |58.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/blobs_action/transaction/tx_blobs_written.cpp |58.6%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/datashard_load.grpc.pb.cc |58.6%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/db_metadata_cache.grpc.pb.cc |58.6%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/counters_keyvalue.grpc.pb.cc |58.6%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/drivemodel.grpc.pb.cc |58.6%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/datashard_load.pb.cc |58.5%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/public/tools/ydb_recipe/ydb_recipe |58.5%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/db_metadata_cache.pb.cc |58.6%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/counters_node_broker.grpc.pb.cc |58.6%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/counters_pq.grpc.pb.cc |58.6%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/counters_mediator.pb.cc |58.6%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/counters_mediator.grpc.pb.cc |58.6%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/counters_node_broker.pb.cc |58.6%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/counters_sysview_processor.pb.cc |58.6%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/blobstorage_pdisk_config.pb.h_serialized.cpp |58.6%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/counters_sequenceshard.pb.cc |58.6%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/google-auth/py3/libpy3python-google-auth-py3.global.a |58.6%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/matplotlib/py3/extern/agg24-svn/libpy3-extern-agg24-svn.a |58.6%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/kiwisolver/py3/libpy3python-kiwisolver-py3.global.a |58.5%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/kiwisolver/py3/libpy3python-kiwisolver-py3.a |58.6%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/counters_schemeshard.pb.cc |58.6%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/matplotlib/py3/extern/ttconv/libpy3py3-extern-ttconv.a |58.6%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/counters_sequenceshard.grpc.pb.cc |58.6%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/counters_schemeshard.grpc.pb.cc |58.6%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/counters_sysview_processor.grpc.pb.cc |58.6%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/counters_statistics_aggregator.pb.cc |58.6%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/console_base.pb.cc |58.6%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/counters_statistics_aggregator.grpc.pb.cc |58.6%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/counters_tx_allocator.pb.cc |58.6%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/fq/plans/ydb-tests-fq-plans |58.6%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/counters_testshard.pb.cc |58.6%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/tests/fq/yt/kqp_yt_file/part16/ydb-tests-fq-yt-kqp_yt_file-part16 |58.6%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/counters_testshard.grpc.pb.cc |58.6%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/kubernetes/libpy3contrib-python-kubernetes.global.a |58.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/mind/node_broker__load_state.cpp |58.6%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/counters_tx_allocator.grpc.pb.cc |58.6%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/counters_tx_proxy.pb.cc |58.6%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/counters_tx_proxy.grpc.pb.cc |58.6%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/auth.grpc.pb.cc |58.6%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/base.grpc.pb.cc |58.6%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/alloc.grpc.pb.cc |58.6%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/blobstorage.grpc.pb.cc |58.6%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/durationpy/libpy3contrib-python-durationpy.global.a |58.6%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/libwebp/sharpyuv/liblibs-libwebp-sharpyuv.a |58.6%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/openjpeg/libcontrib-libs-openjpeg.a |58.6%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/lcms2/libcontrib-libs-lcms2.a |58.6%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/matplotlib/py3/libpy3python-matplotlib-py3.a |58.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/base/ut/memory_stats_ut.cpp |58.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/tests/tpch/lib/libtests-tpch-lib.a |58.6%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/mon/ut_utils/libcore-mon-ut_utils.a |58.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/tests/tpch/cmd_prepare_scheme.cpp |58.6%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/freetype/libcontrib-libs-freetype.a |58.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/datashard/ut_common/datashard_ut_common.cpp |58.6%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/libwebp/libcontrib-libs-libwebp.a |58.6%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/monlib/encode/unistat/libmonlib-encode-unistat.a |58.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/data_accessor/manager.cpp |58.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/mind/tenant_ut_local.cpp |58.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/tests/tpch/cmd_drop.cpp |58.6%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/olefile/py3/libpy3python-olefile-py3.global.a |58.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/common/unicode_udf.cpp |58.6%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/api/objcopy_e2a089b95d9316f6e26025d3e3.o |58.6%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/stress/ctas/tests/objcopy_28c396580e7e319c4a82e15fc9.o |58.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/engines/reader/simple_reader/iterator/sys_view/abstract/source.cpp |58.6%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/tests/stress/reconfig_state_storage_workload/reconfig_state_storage_workload |58.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/protos/blobstorage_distributed_config.pb.cc |58.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/protos/kqp.grpc.pb.cc |58.7%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/api/objcopy_253d734e8c901d319d84fcc6e9.o |58.6%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/api/objcopy_363b5875cc5c5e5745458b16b8.o |58.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/common/digest_udf.cpp |58.6%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/tests/integration/server_restart/public-sdk-cpp-tests-integration-server_restart |58.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/protos/kqp.pb.cc |58.6%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/columnshard/hooks/testing/libcolumnshard-hooks-testing.a |58.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/services/persqueue_cluster_discovery/cluster_discovery_service_ut.cpp |58.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/driver_lib/run/main.cpp |58.6%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/matplotlib/py3/libpy3python-matplotlib-py3.global.a |58.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/yql_testlib/yql_testlib.cpp |58.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/common/math_udf.cpp |58.6%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/sqs/cloud/objcopy_8491a772a9425d10f304e6f0e9.o |58.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/mind/tenant_slot_broker__alter_tenant.cpp |58.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/tests/library/sqs/libpy3tests-library-sqs.global.a |58.7%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/sqs/cloud/objcopy_6e0da74b1512d0ffe19c5dc500.o |58.7%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/sqs/cloud/objcopy_fd8d9957a06c9923c501e36fd9.o |58.7%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/pyparsing/py3/libpy3python-pyparsing-py3.global.a |58.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/driver_lib/run/auto_config_initializer.cpp |58.6%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/pytest-timeout/py3/libpy3python-pytest-timeout-py3.global.a |58.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/common/datetime2_udf.cpp |58.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/protos/tx_datashard.pb.cc |58.6%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/functional/sqs/cloud/ydb-tests-functional-sqs-cloud |58.6%| [AR] {BAZEL_DOWNLOAD} $(B)/library/python/monlib/libpy3library-python-monlib.global.a |58.7%| [AR] {BAZEL_DOWNLOAD} $(B)/library/python/monlib/libpy3library-python-monlib.a |58.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/tools/cfg/bin/libpy3ydb_configure.global.a |58.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/tools/cfg/k8s_api/libpy3tools-cfg-k8s_api.global.a |58.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/tools/cfg/walle/libpy3tools-cfg-walle.global.a |58.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/ut/scheme/kqp_secrets_ut.cpp |58.6%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tools/cfg/bin/objcopy_940b9a794cb8fbc6ebdf926276.o |58.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/ut/scheme/kqp_acl_ut.cpp |58.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/tools/cfg/libpy3ydb-tools-cfg.global.a |58.6%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tools/cfg/bin/ydb_configure |58.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/coordinator/coordinator__init.cpp |58.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/data_sharing/destination/session/destination.cpp |58.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/blobs_action/transaction/tx_remove_blobs.cpp |58.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/mind/local.cpp |58.6%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/library/ut/objcopy_6508d12aaafde6f0a60fe8fff3.o |58.6%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/library/ut/objcopy_bd84885c5c24478d181ba9e493.o |58.6%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/library/ut/objcopy_cd9abca883cad9b25e20bf2f08.o |58.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/services/persqueue_v1/ut/partition_writer_cache_actor_fixture.cpp >> TBlobStorageCryptoRope::TestEqualMixedStreamCypher [GOOD] >> TBlobStorageCryptoRope::TestMixedStreamCypher |58.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/driver_lib/run/config_parser.cpp |58.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/tests/tools/fqrun/src/fq_runner.cpp |58.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/services/persqueue_v1/ut/demo_tx.cpp |58.6%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/wardens/objcopy_1555e67a3dd43a3e7f09bf8eee.o |58.6%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/wardens/objcopy_488333b1ebd4c1d6d8ec5bcb8f.o |58.6%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/tools/kqprun/recipe/objcopy_dcbdf62672440a626e79a64e14.o |58.6%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/functional/bridge/ydb-tests-functional-bridge |58.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/tests/tools/kqprun/recipe/libpy3kqprun_recipe.global.a |58.7%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/bridge/objcopy_d80f811b3fe32bcd2128d6ab6f.o |58.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/ut_helpers/ls_checks.cpp |58.7%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/bridge/objcopy_c0b503af0486d120ebabb4c64b.o |58.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/tests/datashard/lib/libpy3tests-datashard-lib.global.a >> TBlobStorageCryptoRope::TestMixedStreamCypher [GOOD] >> TBlobStorageCryptoRope::TestOffsetStreamCypher [GOOD] >> TBlobStorageCryptoRope::TestInplaceStreamCypher |58.5%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/bridge/objcopy_4b2ec656f7e85bc05586d7e6fc.o |58.5%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/wardens/objcopy_3db6af291678d4ac330517956a.o |58.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/client/yc_public/common/libpy3client-yc_public-common.global.a |58.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/tests/library/test_meta/libpy3tests-library-test_meta.global.a |58.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/public/sdk/cpp/src/client/persqueue_public/ut/ut_utils/ut_utils.cpp |58.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/client/yc_public/iam/libpy3client-yc_public-iam.global.a |58.6%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/library/ut/ydb-tests-library-ut |58.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/tests/tools/fqrun/src/actors.cpp |58.6%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/tools/kqprun/recipe/kqprun_recipe |58.6%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/stress/oltp_workload/tests/ydb-tests-stress-oltp_workload-tests |58.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/tests/sql/lib/libpy3tests-sql-lib.global.a >> TBlobStorageCryptoRope::TestInplaceStreamCypher [GOOD] >> TBlobStorageCryptoRope::PerfTestStreamCypher [GOOD] >> TBlobStorageCryptoRope::UnalignedTestStreamCypher [GOOD] >> TChaCha::KeystreamTest1 [GOOD] >> TChaCha::KeystreamTest2 [GOOD] >> TChaCha::KeystreamTest3 [GOOD] >> TChaCha::KeystreamTest4 [GOOD] >> TChaCha::KeystreamTest5 [GOOD] >> TChaCha::KeystreamTest6 [GOOD] >> TChaCha::KeystreamTest7 [GOOD] >> TChaCha::KeystreamTest8 [GOOD] >> TChaCha::MultiEncipherOneDecipher [GOOD] >> TChaCha::SecondBlock [GOOD] >> TChaCha512::KeystreamTest1 [GOOD] >> TChaCha512::KeystreamTest2 [GOOD] >> TChaCha512::KeystreamTest3 [GOOD] >> TChaCha512::KeystreamTest4 [GOOD] >> TChaCha512::KeystreamTest5 [GOOD] >> TChaCha512::KeystreamTest6 [GOOD] >> TChaCha512::KeystreamTest7 [GOOD] >> TChaCha512::KeystreamTest8 [GOOD] >> TChaCha512::MultiEncipherOneDecipher [GOOD] >> TChaCha512::SecondBlock [GOOD] >> TChaCha512::CompatibilityTest |58.5%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/stress/oltp_workload/tests/objcopy_367e2bc5d83faa0907a06d2976.o |58.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/mind/tenant_ut_pool.cpp |58.6%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/functional/wardens/ydb-tests-functional-wardens |58.6%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/sqs/with_quotas/objcopy_245adf3e28f56e6467e034d9f2.o |58.6%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/sqs/with_quotas/objcopy_31d605682329607481eb568ed0.o |58.6%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/stress/oltp_workload/tests/objcopy_0446f521b26a2e8128f94ac50f.o |58.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/tests/stress/oltp_workload/workload/libpy3stress-oltp_workload-workload.global.a |58.7%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/stress/oltp_workload/tests/objcopy_49a1ca9559288648fba9cf7b65.o |58.7%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/olap/column_family/compression/objcopy_1ab2a5a6dd84a6c9ff5d5c50b0.o >> TChaCha512::CompatibilityTest [GOOD] >> TChaChaVec::KeystreamTest1 [GOOD] >> TChaChaVec::KeystreamTest2 [GOOD] >> TChaChaVec::KeystreamTest3 [GOOD] >> TChaChaVec::KeystreamTest4 [GOOD] >> TChaChaVec::KeystreamTest5 [GOOD] >> TChaChaVec::KeystreamTest6 [GOOD] >> TChaChaVec::KeystreamTest7 [GOOD] >> TChaChaVec::KeystreamTest8 [GOOD] >> TChaChaVec::MultiEncipherOneDecipher [GOOD] >> TChaChaVec::SecondBlock [GOOD] >> TChaChaVec::CompatibilityTest |58.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/tests/olap/lib/libpy3tests-olap-lib.global.a |58.6%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/olap/column_family/compression/objcopy_6887bde1dc99f5c5c2f0922842.o |58.6%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/olap/column_family/compression/objcopy_3bdea7737a87c43bfaa0aaf4c3.o |58.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/tests/olap/common/libpy3tests-olap-common.global.a |58.6%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/fq/mem_alloc/objcopy_12d01741952bd4afa836364d84.o |58.6%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/minidumps/objcopy_077abccc5552b4ff2e53b07653.o |58.6%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/minidumps/objcopy_be727953c626d90e9f80dacc0b.o |58.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/tests/tools/datastreams_helpers/libpy3tests-tools-datastreams_helpers.global.a |58.6%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/minidumps/objcopy_e740d8bfaebae830aaeb4ace59.o |58.6%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/fq/mem_alloc/objcopy_cee1e02beaf827051149b5ca30.o |58.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/tests/olap/scenario/helpers/libpy3olap-scenario-helpers.global.a |58.7%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/fq/mem_alloc/objcopy_15e284a8ecb30c90903e842e70.o >> TChaChaVec::CompatibilityTest [GOOD] >> TPoly1305::TestVector1 [GOOD] >> TPoly1305::TestVector2 [GOOD] >> TPoly1305::TestVector3 [GOOD] >> TPoly1305::TestVector4 [GOOD] >> TPoly1305Vec::TestVector1 [GOOD] >> TPoly1305Vec::TestVector2 [GOOD] >> TPoly1305Vec::TestVector3 [GOOD] >> TPoly1305Vec::TestVector4 [GOOD] >> TTest_t1ha::TestZeroInputHashIsNotZero [GOOD] >> TTest_t1ha::PerfTest [GOOD] >> TTest_t1ha::T1haHashResultsStablilityTest [GOOD] |58.6%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/sqs/with_quotas/objcopy_7648c2519d02b8456f762efc4b.o |58.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/tests/stress/oltp_workload/workload/type/libpy3oltp_workload-workload-type.global.a |58.6%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/rename/dc048c91e67372877fc6ad2dfc_raw.auxcpp |58.6%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/rename/objcopy_5865a174a6c25ca1a2d6386702.o |58.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/tests/library/fixtures/libpy3tests-library-fixtures.global.a |58.6%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/rename/objcopy_c02c3d9f840d02af9fad858a55.o |58.7%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/rename/objcopy_bfa810e70cd1de18c5d4a18a62.o |58.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/tests/tools/fqrun/src/common.cpp |58.7%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/rename/objcopy_00c87b13e2f685811a9825079d.o |58.7%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/rename/objcopy_5db899a01c2ec6f53648af6840.o |58.7%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/benchmarks_init/objcopy_287a0728f8b1ad204ac0396eb2.o |58.7%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/olap/column_family/compression/ydb-tests-olap-column_family-compression |58.7%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/ttl/objcopy_965640ca94893d27c182c611e2.o |58.7%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/mvp/meta/libydb-mvp-meta.a |58.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/services/persqueue_v1/topic_yql_ut.cpp |58.7%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/fq/mem_alloc/ydb-tests-fq-mem_alloc |58.7%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/functional/sqs/with_quotas/ydb-tests-functional-sqs-with_quotas |58.7%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/functional/rename/ydb-tests-functional-rename |58.7%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/datashard/dump_restore/objcopy_ce0222bab1634be9f9a52f715d.o |58.7%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/datashard/dump_restore/objcopy_ec94bbf9004678001f4c8195e3.o |58.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/protos/blobstorage_distributed_config.grpc.pb.cc |58.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/mvp/meta/mvp.cpp |58.7%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/datashard/dump_restore/objcopy_da2669c2228a88c83cd32d45da.o |58.7%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/crypto/ut/unittest >> TTest_t1ha::T1haHashResultsStablilityTest [GOOD] |58.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/mvp/meta/meta_cache.cpp |58.7%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/postgres_integrations/library/ut/objcopy_899316667b8914fe8ec3af85d9.o |58.7%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/postgres_integrations/library/ut/objcopy_cf5836766ac30ca7ea957ce368.o |58.7%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/postgres_integrations/library/ut/objcopy_daba02a22b66dd174e40603586.o |58.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/public/sdk/cpp/src/client/topic/ut/ut_utils/topic_sdk_test_setup.cpp |58.7%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/api/objcopy_303f7409bfab4277e367bbd11a.o |58.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/protos/serverless_proxy_config.pb.cc |58.7%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/Flask/py3/libpy3python-Flask-py3.global.a |58.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/mvp/core/core_ydb.cpp |58.7%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/functional/minidumps/ydb-tests-functional-minidumps |58.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/protos/grpc.grpc.pb.cc |58.7%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/postgres_integrations/library/ut/ydb-tests-postgres_integrations-library-ut |58.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/public/sdk/cpp/src/client/persqueue_public/ut/ut_utils/test_server.cpp |58.7%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/stability/tool/libpy3tests-stability-tool.global.a |58.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/protos/blobstorage_vdisk_internal.pb.cc |58.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/ut/olap/helpers/query_executor.cpp |58.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/protos/serverless_proxy_config.grpc.pb.cc |58.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/protos/config.pb.cc |58.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/tools/local_ydb/libpy3local_ydb.global.a |58.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/protos/grpc.pb.cc |58.7%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/stability/tool/objcopy_6403bfa5c5e35b29a21c73fb0e.o |58.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/ut_helpers/shred_helpers.cpp |58.7%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/stability/tool/objcopy_04f56802b68450abc8421282d0.o |58.7%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/datashard/dump_restore/ydb-tests-datashard-dump_restore |58.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/protos/blobstorage_vdisk_internal.grpc.pb.cc |58.7%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/services/persqueue_cluster_discovery/cluster_ordering/ut/cluster_ordering-ut |58.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/services/fq/ut_integration/ut_utils.cpp |58.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/tests/tpch/cmd_run_query.cpp |58.7%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/apps/etcd_proxy/service/libapps-etcd_proxy-service.a |58.6%| [PY] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/stability/tool/objcopy_e64be2702e6aadcfe4f62214e0.o |58.7%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/colorama/py3/libpy3python-colorama-py3.global.a |58.7%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/click/py3/libpy3python-click-py3.global.a |58.7%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/sql/large/objcopy_27c0687ceeb7ce4ff5e4cea90a.o |58.7%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/apache/arrow/cpp/src/arrow/python/libpy3src-arrow-python.a |58.7%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/sql/large/objcopy_d68e1e5b762e412afe6a534487.o |58.7%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/public/tools/lib/cmds/ut/objcopy_e2cd022168ff179d1441f5d3df.o |58.6%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/public/tools/lib/cmds/ut/objcopy_c9ab749ab3188a8582c5cefa5e.o |58.6%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/public/tools/lib/cmds/ut/objcopy_0ade7a5662c6292edc3a8de02f.o |58.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/tests/tools/fqrun/src/fq_setup.cpp |58.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/protos/console_config.pb.cc |58.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/ut/olap/helpers/typed_local.cpp |58.6%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/sql/large/ydb-tests-sql-large |58.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/services/persqueue_v1/persqueue_ut.cpp |58.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/protos/config.grpc.pb.cc |58.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/mvp/core/core_ydbc.cpp |58.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/workload_service/ut/common/kqp_workload_service_ut_common.cpp |58.7%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/sql/large/objcopy_7eab954373d77ffb1fab95ca0d.o |58.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/apps/etcd_proxy/service/etcd_base_init.cpp |58.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/apps/etcd_proxy/service/etcd_shared.cpp |58.7%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/audit/objcopy_e7dfca3c87220ea0ed36a65f9f.o |58.7%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/audit/daf02fd86bb7e2296f1437ae1f_raw.auxcpp |58.7%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/audit/objcopy_22edc4e58ff43cb5e83c9bbe2c.o |58.7%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/audit/objcopy_643fa2679e88d9b2d33558b050.o |58.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/protos/msgbus.grpc.pb.cc |58.6%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/olap/objcopy_2cc418e8604751e5b8f9029a81.o |58.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/config/validation/validators_ut.cpp |58.6%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/olap/objcopy_f7a8cf9a03ef0c1cd4532da3e4.o |58.6%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/olap/objcopy_294c9b1bea23962d01551cba70.o |58.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/driver_lib/run/run.cpp |58.6%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/restarts/objcopy_0359848ae21601186c5b0d9873.o |58.6%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/olap/objcopy_565adec51da3cceeac787115e7.o |58.6%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/restarts/objcopy_afdf6d60c4f76ae91a235d460b.o |58.7%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/restarts/objcopy_277b7e8f79021687bec95be8db.o |58.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/protos/msgbus.pb.cc |58.7%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/itsdangerous/py3/libpy3python-itsdangerous-py3.global.a |58.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/datashard/datashard_ut_erase_rows.cpp |58.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/driver_lib/run/kikimr_services_initializers.cpp |58.7%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/topic/ut/with_direct_read_ut/objcopy_4f055c289b3de8f2a1e827ae5c.o |58.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/postgresql/common/libpy3functional-postgresql-common.global.a |58.7%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/postgresql/objcopy_b9aaa278b10ed44e5645b3ef2f.o |58.7%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/stress/kafka/tests/objcopy_c386e2211742a44d16094d73d0.o |58.7%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/olap/ydb-tests-olap |58.7%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/stress/kafka/tests/objcopy_dac3ec236f3cba753ea226bb96.o |58.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/mon/ut_utils/ut_utils.cpp |58.7%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/postgresql/objcopy_816e2dba53f55d924139cdb3c5.o |58.7%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/stress/kafka/tests/objcopy_2900a0c4957bb4f1bc1729508c.o |58.7%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/tenants/objcopy_e317764e105a7e9e48b67a7b7e.o |58.7%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/tenants/objcopy_5a4a401f33f46c70417a65f584.o |58.7%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/tenants/objcopy_951c70889c9404d1662da27090.o |58.7%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/postgresql/objcopy_e4166f3d104a6751b45e7e712f.o |58.7%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/tenants/aae788a890ddcb1702c659c8aa_raw.auxcpp |58.7%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/tenants/objcopy_86ad37399122e504f3e6d8378d.o |58.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/tests/library/flavours/libpy3tests-library-flavours.global.a |58.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/tests/stress/olap_workload/libpy3olap_workload.global.a |58.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/protos/console.pb.cc |58.7%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/stress/kafka/tests/ydb-tests-stress-kafka-tests |58.7%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/functional/restarts/ydb-tests-functional-restarts |58.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/external_sources/iceberg_ddl_ut.cpp |58.7%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/functional/audit/ydb-tests-functional-audit |58.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/external_sources/external_source_builder_ut.cpp |58.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/external_sources/external_data_source_ut.cpp |58.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/external_sources/object_storage_ut.cpp |58.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/ut/olap/helpers/local.cpp |58.8%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/stress/olap_workload/objcopy_9de271b22d7bcc64ef77cc3cde.o |58.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/tests/stress/olap_workload/workload/libpy3stress-olap_workload-workload.global.a |58.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/tests/stress/olap_workload/workload/type/libpy3olap_workload-workload-type.global.a |58.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/client/yc_private/quota/libclient-yc_private-quota.a |58.8%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/functional/tenants/ydb-tests-functional-tenants |58.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/base/ut/path_ut.cpp |58.8%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/public/tools/lib/cmds/ut/ydb-public-tools-lib-cmds-ut >> TWeighedOrderingTest::WeighedOrderingTest [GOOD] >> TWeighedOrderingTest::SimpleSelectionTest [GOOD] >> TWeighedOrderingTest::WeighedSelectionTest [GOOD] |58.8%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/utils/network/libessentials-utils-network.a |58.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/statistics/ut_common/ut_common.cpp |58.8%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/tools/solomon_emulator/bin/solomon_emulator |58.8%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/kqp/plan2svg/ydb-tests-functional-kqp-plan2svg |58.8%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/ttl/objcopy_0aefef587c181350d3a25f70e0.o |58.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/services/persqueue_v1/persqueue_compat_ut.cpp |58.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/services/persqueue_v1/first_class_src_ids_ut.cpp |58.8%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/stress/olap_workload/olap_workload |58.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/ut_helpers/failing_mtpq.cpp |58.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/protos/console.grpc.pb.cc |58.8%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/functional/postgresql/ydb-tests-functional-postgresql >> visualize_portions::import_test [GOOD] |58.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/vdisk/anubis_osiris/blobstorage_anubis_algo_ut.cpp |58.8%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/services/persqueue_cluster_discovery/cluster_ordering/ut/unittest >> TWeighedOrderingTest::WeighedSelectionTest [GOOD] |58.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/ut_helpers/helpers.cpp |58.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/ut/olap/helpers/writer.cpp |58.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/ut_helpers/test_env.cpp |58.8%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/columnshard/tools/visualize_portions/import_test >> visualize_portions::import_test [GOOD] |58.8%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/core/persqueue/public/partition_index_generator/ut/public-partition_index_generator-ut |58.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/protos/console_config.grpc.pb.cc |58.7%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kqp/ut/federated_query/common/libut-federated_query-common.a |58.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/bridge/proxy/bridge_proxy.cpp |58.7%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/functional/serializable/ydb-tests-functional-serializable |58.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/config/init/init_ut.cpp |58.7%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/functional/tpc/medium/tpch/ydb-tests-functional-tpc-medium-tpch |58.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/common/re2_udf.cpp |58.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/mvp/oidc_proxy/mvp.cpp |58.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/tpc/lib/libpy3functional-tpc-lib.global.a |58.8%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/tpc/medium/tpch/objcopy_7c9715e23edebba4ffb82d27d5.o |58.8%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/serializable/objcopy_51b071d7746089933668451b33.o |58.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/tests/library/serializability/libpy3tests-library-serializability.global.a |58.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/gateway/kqp_gateway.cpp |58.8%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/serializable/objcopy_445797246443360525d31550d1.o |58.8%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/tpc/medium/tpch/objcopy_84b6c628d15e9e575bec5be5c5.o |58.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/tests/olap/load/lib/libpy3olap-load-lib.global.a |58.7%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/tpc/medium/tpch/objcopy_385ba1144ebdaae0c967a41e83.o |58.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/ut/common/columnshard.cpp |58.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/formats/arrow/ut/ut_program_step.cpp |58.8%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/serializable/objcopy_3ea8aa67e7c24c4f0e3b0406b9.o |58.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/formats/arrow/ut/ut_reader.cpp |58.8%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/audit/audit_config/ut/ydb-core-audit-audit_config-ut >> TPartitionIndexGeneratorTest::TestReservationSequenceSkip [GOOD] >> TPartitionIndexGeneratorTest::TestReserve2 [GOOD] >> TPartitionIndexGeneratorTest::TestReservation [GOOD] >> TPartitionIndexGeneratorTest::TestBasicGeneration [GOOD] >> TPartitionIndexGeneratorTest::TestSplitMergeScenario [GOOD] >> TPartitionIndexGeneratorTest::TestReserve3 [GOOD] >> TPartitionIndexGeneratorTest::TestValidationFailures [GOOD] >> TPartitionIndexGeneratorTest::TestReuseExisting [GOOD] |58.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/formats/arrow/ut/ut_hash.cpp |58.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/formats/arrow/ut/ut_column_filter.cpp |58.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/http_api_client/libpy3fq-libs-http_api_client.global.a |58.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/formats/arrow/ut/ut_arrow.cpp |58.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/tests/tools/ydb_serializable/lib/libpy3tools-ydb_serializable-lib.global.a |58.7%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/replication/ut_helpers/libtx-replication-ut_helpers.a |58.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/mind/node_broker__register_node.cpp |58.7%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/fq/http_api/ydb-tests-fq-http_api |58.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/ut/common/kqp_ut_common.cpp |58.7%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/testing/gtest_main/libcpp-testing-gtest_main.a |58.7%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/stress/transfer/tests/ydb-tests-stress-transfer-tests |58.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/ut/olap/helpers/aggregation.cpp |58.8%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/fq/http_api/objcopy_7bfd03a31f5e230607792f10cc.o |58.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/tests/stress/transfer/workload/libpy3stress-transfer-workload.global.a |58.8%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/core/persqueue/public/partition_index_generator/ut/unittest >> TPartitionIndexGeneratorTest::TestReuseExisting [GOOD] |58.7%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/fq/restarts/objcopy_f928a40774b17a9d6cd7cabd2c.o |58.7%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/fq/http_api/objcopy_4f92526e13553482736b942b2c.o |58.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/engines/reader/simple_reader/iterator/sys_view/abstract/constructor.cpp |58.7%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/fq/http_api/objcopy_7eade8c49389813f8c36b72b5b.o |58.8%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/fq/http_api/objcopy_3209cda00462f2963f3cbbc912.o |58.8%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/sequenceshard/public/ut/ydb-core-tx-sequenceshard-public-ut |58.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/apps/ydb/main.cpp |58.8%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/fq/http_api/objcopy_1a1e300767b552f4c13c3295d0.o |58.8%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/apps/ydb/objcopy_774cbd1f10ee287899289ecb3f.o |58.8%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/testing/gtest/libcpp-testing-gtest.a |58.8%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/fq/restarts/objcopy_e7477203b27fa0321cf18fd7ee.o |58.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/apps/ydb/commands/libcommands.a |58.8%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/fq/restarts/ydb-tests-fq-restarts |58.8%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/fq/restarts/objcopy_bf578b7161cc94bf18488d04ca.o |58.7%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/fq/restarts/objcopy_b8d63b589074145793d63c27a3.o |58.7%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/stress/transfer/tests/objcopy_22b5b8dd6ea05f4194f60e6181.o |58.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/replication/ut_helpers/test_table.cpp |58.7%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/stress/transfer/tests/objcopy_76cd981cf66123b7633d25b898.o |58.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/replication/ut_helpers/test_topic.cpp |58.7%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/tests/fq/restarts/849c58233edc33539cbeb93a31_raw.auxcpp |58.7%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/stress/transfer/tests/objcopy_60a4829fdc305e3a74a7ddcb41.o |58.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/base/batched_vec_ut.cpp |58.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/base/bufferwithgaps_ut.cpp |58.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/base/ptr_ut.cpp |58.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/ut_helpers/export_reboots_common.cpp |58.8%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/stress/reconfig_state_storage_workload/tests/objcopy_1f78e7638ae0f2e308bd7331f9.o |58.8%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/fq/streaming/objcopy_7327b174f210974511ac3b7e78.o |58.8%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/stress/reconfig_state_storage_workload/tests/objcopy_f4efacd00293c5fe09c3f84a62.o |58.8%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/library/yaml_config/validator/ut/validator/ydb-library-yaml_config-validator-ut-validator |58.8%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/fq/streaming/objcopy_c8d4a875437c150867560bba8f.o |58.8%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/fq/streaming/objcopy_369973dc15e2616dc6c7b37944.o |58.8%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/pyarrow/libpy3contrib-python-pyarrow.global.a |58.8%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/fq/streaming/objcopy_9001a43ebb2f39da4516c33deb.o |58.8%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/stress/reconfig_state_storage_workload/tests/stress-reconfig_state_storage_workload-tests |58.8%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/fq/streaming/objcopy_2677da7b1d6364e10956f27105.o |58.8%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/stress/reconfig_state_storage_workload/tests/objcopy_988cc467d4da79de606ebf50ee.o |58.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/mind/tenant_slot_broker__assign_free_slots.cpp |58.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/tests/stress/reconfig_state_storage_workload/workload/libpy3stress-reconfig_state_storage_workload-workload.global.a |58.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/mvp/security/simple/libmvp-security-simple.a |58.8%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/tests/fq/streaming_optimize/ydb-tests-fq-streaming_optimize |58.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/tests/tpch/cmd_prepare.cpp |58.8%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/tools/pq_read/test/objcopy_0035b673555f394234ae284e25.o |58.8%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/tools/pq_read/test/objcopy_45b6981aed17dda33d43217f52.o |58.8%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/tests/stress/ctas/ctas |58.7%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/tests/fq/yt/kqp_yt_file/part9/ydb-tests-fq-yt-kqp_yt_file-part9 |58.7%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/tools/pq_read/test/objcopy_9818d2b70aad7db98a0f9c044c.o |58.7%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/fq/streaming/ydb-tests-fq-streaming |58.8%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/tools/pq_read/test/ydb-tests-tools-pq_read-test |58.8%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/stress/simple_queue/tests/objcopy_2492aafb6862566a2398c9f27e.o |58.8%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/stress/simple_queue/tests/objcopy_e66920085df69f6f7e41547063.o |58.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/mind/node_broker__migrate_state.cpp |58.8%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/stress/simple_queue/tests/ydb-tests-stress-simple_queue-tests |58.8%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/stress/simple_queue/tests/objcopy_3df021aac8504049c53286aea0.o |58.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/tests/stress/simple_queue/workload/libpy3stress-simple_queue-workload.global.a |58.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/engines/reader/simple_reader/iterator/sys_view/abstract/schema.cpp |58.8%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/olap/ttl_tiering/objcopy_0664e2ab2eb37ae9f02538e483.o |58.8%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/olap/ttl_tiering/objcopy_4b767dce2ddf7a5424aef828d6.o |58.8%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/stress/olap_workload/tests/objcopy_9be8b6745d0fa150928bab4206.o |58.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/vdisk/hulldb/generic/blobstorage_hullwritesst_ut.cpp |58.8%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/stress/olap_workload/tests/objcopy_e68ca1a2fa9943132c020ae028.o |58.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/public/sdk/cpp/src/client/topic/ut/basic_usage_ut.cpp |58.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/ut/sysview/kqp_sys_col_ut.cpp |58.8%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/olap/ttl_tiering/objcopy_19422d2b60428207055b4ed843.o |58.8%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/stress/olap_workload/tests/objcopy_8e19d47784789c55156c57f816.o |58.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/public/sdk/cpp/src/client/topic/ut/direct_read_ut.cpp |58.8%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/pyarrow/libpy3contrib-python-pyarrow.a |58.8%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/olap/ttl_tiering/objcopy_bd8a6d25e26a719f80141d0711.o |58.8%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/stress/ctas/tests/objcopy_75e82e9b2ff2024ae902b7d5e4.o |58.8%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/stress/olap_workload/tests/ydb-tests-stress-olap_workload-tests |58.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/common/json2_udf.cpp |58.8%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/ttl/objcopy_c068ee86eb127df13256bfbe45.o |58.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/apps/etcd_proxy/service/etcd_impl.cpp |58.8%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/compatibility/s3_backups/objcopy_70be8d5dc43dbc1df67ecd59c9.o |58.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/tests/tpch/cmd_run_bench.cpp |58.8%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/compatibility/s3_backups/objcopy_abdf75b6b1f064446bfb0de382.o |58.8%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/compatibility/s3_backups/objcopy_71f52eacd4ede06f6cee6faac3.o |58.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/tests/library/compatibility/libpy3tests-library-compatibility.global.a |58.8%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/tests/olap/docs/generator/generator |58.8%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/driver_lib/version/ut/ydb-core-driver_lib-version-ut |58.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/datashard/datashard_ut_kqp_errors.cpp >> ydb-tests-functional-kqp-plan2svg::import_test [GOOD] |58.8%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/olap/ttl_tiering/ydb-tests-olap-ttl_tiering |58.8%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/tools/nemesis/ut/ydb-tests-tools-nemesis-ut |58.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/mind/node_broker__graceful_shutdown.cpp |58.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/engines/reader/simple_reader/iterator/sys_view/abstract/metadata.cpp |58.8%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/compatibility/s3_backups/ydb-tests-compatibility-s3_backups |58.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/tests/tools/nemesis/library/libpy3tools-nemesis-library.global.a >> Validator::IntValidation [GOOD] >> Validator::StringValidation [GOOD] >> Validator::MultitypeNodeValidation [GOOD] >> Validator::OpaqueMaps [GOOD] >> Validator::MapValidation [GOOD] >> Validator::Enums [GOOD] >> Validator::BoolValidation [GOOD] >> Validator::IntArrayValidation [GOOD] |58.8%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/tools/nemesis/ut/objcopy_b06d27009e49b9ba3df883a226.o |58.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/fq/libs/control_plane_storage/internal/ut/utils_ut.cpp |58.8%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/control_plane_storage/internal/ut/objcopy_c96ef635306ccee8a5cf6359f1.o |58.8%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/tools/nemesis/ut/objcopy_927a1f7611cf94fb1cd21ef8cf.o |58.8%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/kqp/plan2svg/import_test >> ydb-tests-functional-kqp-plan2svg::import_test [GOOD] |58.8%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/tools/nemesis/ut/objcopy_c98e5b95c64b8486a12f10d408.o |58.8%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/sqs/messaging/objcopy_48a08121f0a68da2f2666b0341.o |58.9%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/sqs/messaging/objcopy_7211c23d9494c46f0f60063e9e.o |58.8%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/sqs/messaging/objcopy_791e2f78c18891d943ecce5e41.o |58.8%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/datashard/async_replication/objcopy_f4b44a5d280d0f27f5ffd278e8.o |58.8%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/datashard/async_replication/objcopy_08a4b5d38a76e21591db0c3424.o |58.8%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/datashard/async_replication/objcopy_e2637cea0f2e4db109b364a246.o |58.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/hooks/testing/ro_controller.cpp |58.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/tests/olap/docs/generator/libpy3olap-docs-generator.global.a |58.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/pgproxy/pg_proxy_ut.cpp |58.8%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/public/tools/local_ydb/objcopy_8d2ea3c78a255bb4c87c2fc54a.o |58.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/ut_blobstorage/check_integrity.cpp |58.8%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/datashard/copy_table/objcopy_589315062f5401a368910248f0.o |58.9%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/datashard/copy_table/objcopy_61613f0bd98876f149d8574891.o |58.9%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/library/yaml_config/validator/ut/validator/unittest >> Validator::IntArrayValidation [GOOD] |58.8%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/datashard/copy_table/objcopy_c114cbf6b820d92320c1e2c912.o |58.8%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/external_sources/ut/ydb-core-external_sources-ut |58.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/tests/integration/topic/utils/libintegration-topic-utils.a |58.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/mvp/meta/meta_versions.cpp |58.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/apps/etcd_proxy/service/etcd_grpc.cpp |58.9%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/functional/sqs/messaging/ydb-tests-functional-sqs-messaging |58.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/mind/tenant_pool.cpp |58.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/apps/etcd_proxy/service/etcd_lease.cpp |58.8%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/blobstorage/base/ut/ydb-core-blobstorage-base-ut |58.8%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/datashard/copy_table/ydb-tests-datashard-copy_table |58.8%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/dwarf_backtrace/registry/libcpp-dwarf_backtrace-registry.global.a |58.8%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/breakpad/src/client/linux/libsrc-client-linux.a |58.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/services/metadata/initializer/ut/ut_init.cpp |58.8%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/stress/scheme_board/pile_promotion/pile_promotion_workload |58.8%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/stress/scheme_board/pile_promotion/objcopy_e99c9b04005e36c324dfb9fd3b.o |58.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/audit/audit_log_ut.cpp |58.8%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/croaring/libcontrib-libs-croaring.a |58.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/tests/stress/scheme_board/pile_promotion/libpy3pile_promotion_workload.global.a |58.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/apps/etcd_proxy/service/etcd_watch.cpp |58.8%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/scheme_shard/objcopy_8120ef49e7e653ed0601604313.o |58.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/formats/arrow/ut/ut_dictionary.cpp |58.8%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/functional/scheme_tests/ydb-tests-functional-scheme_tests |58.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/services/persqueue_v1/ut/pqtablet_mock.cpp |58.8%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/scheme_shard/objcopy_f93c60b04a0499f2ec6880591a.o |58.8%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/scheme_shard/objcopy_d3af02c7d57ea2cbbe5d381baa.o |58.8%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/datashard/s3/objcopy_d191482d8b66f1c03ea8df56d3.o |58.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/audit/audit_config/audit_config_ut.cpp |58.8%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/datashard/s3/objcopy_ff581f3cff717ab223922f0cd8.o |58.8%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/scheme_tests/objcopy_8e57113197bb359e3999b04aab.o |58.8%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/breakpad/src/liblibs-breakpad-src.a |58.8%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/scheme_tests/objcopy_4826ee2207124da1bc398e3bd8.o |58.9%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/olap/delete/objcopy_609c2613d8f9c513602350c6a8.o |58.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/tests/stress/scheme_board/pile_promotion/workload/libpy3scheme_board-pile_promotion-workload.global.a |58.9%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/datashard/s3/objcopy_8685c3ae88e5169a5acffc7bc4.o |58.9%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/olap/delete/objcopy_e6184a39b8332c221c5cda3c2f.o |58.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/client/metadata/ut/functions_metadata_ut.cpp |58.8%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/scheme_tests/objcopy_5b5c3367c789898aa5a6cae866.o |58.8%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/olap/delete/objcopy_ffc5f76f7501b8251738448541.o |58.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/long_tx_service/public/types_ut.cpp |58.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/apps/ydb/ut/ydb-dump.cpp |58.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/apps/ydb/ut/supported_codecs_fixture.cpp |58.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/apps/ydb/ut/run_ydb.cpp |58.8%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/stress/kv/tests/ydb-tests-stress-kv-tests |58.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/apps/ydb/ut/supported_codecs.cpp |58.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/ymq/base/ut/helpers_ut.cpp |58.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/apps/ydb/ut/workload-transfer-topic-to-table.cpp |58.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/apps/ydb/ut/workload-topic.cpp |58.9%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/compatibility/objcopy_d1fbcd1c11b51c9211bca0302b.o |58.9%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/compatibility/objcopy_b1ff49dba71f2b5005c139ecf8.o |58.8%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/datashard/async_replication/ydb-tests-datashard-async_replication |58.8%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/compatibility/objcopy_42a5c9a2dd69111d52bf2b4584.o |58.8%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/compatibility/objcopy_66645e8b2ff7b5afc49ced4928.o |58.8%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/stress/kv/tests/objcopy_5294a064c14cf5a49516321590.o |58.8%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/olap/delete/ydb-tests-olap-delete |58.8%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/compatibility/objcopy_294fbc41fa9ab595168246aac7.o |58.8%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/compatibility/objcopy_65ac58c27d43a55d0ea4eda626.o |58.8%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/stress/kv/tests/objcopy_08f7acdb6eb761b28bf6990862.o |58.8%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/stability/ydb/objcopy_48e09f84949dd34b82c51f21a3.o |58.9%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/stability/ydb/objcopy_ce63bab0f89a8715a42271a26a.o |58.9%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/stability/ydb/objcopy_3b212908932716bae8a8e38b2c.o |58.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/ut/sysview/kqp_sys_view_ut.cpp |58.9%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/stress/kv/tests/objcopy_c7c229be41e9b028572ad1aab3.o |58.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/providers/s3/common/util_ut.cpp |58.8%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/datashard/select/objcopy_d709b1895f91108d9f51b703ea.o |58.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/mind/node_broker__extend_lease.cpp |58.8%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/canonical/objcopy_461999da7ba13deab5689c18ec.o |58.8%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/datashard/select/objcopy_ec9bc627b6d56d1a941c2b7e4f.o |58.9%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/datashard/select/objcopy_dfbd751fc64901b06ded4354c8.o |58.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/mvp/meta/meta.cpp |58.9%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/canonical/objcopy_17cef60c2dd0eb7ea46181ba87.o |58.9%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/canonical/objcopy_065e9244d685c2b8f0ab66e414.o |58.9%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/datashard/vector_index/large/objcopy_28f172e1aa977d907bdfa0a81b.o |58.9%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/datashard/vector_index/large/objcopy_04f2935f3ada8eb9d01ebaba6b.o |58.9%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/datashard/vector_index/large/objcopy_6af7a7ce8a1ee5e67d75a2978a.o |58.9%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/tools/solomon_emulator/recipe/solomon_recipe |58.9%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/grpc/draft/ydb_bridge_v1.{pb.h ... grpc.pb.h} |58.9%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/whiteboard_flags.{pb.h ... grpc.pb.h} |58.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/ut_group/main.cpp |58.9%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/s3/proto/credentials.pb.{h, cc} |58.9%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/export/protos/task.pb.{h, cc} |58.9%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/control_plane_storage/proto/yq_internal.pb.{h, cc} |58.9%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/formats/arrow/ut/ydb-core-formats-arrow-ut |58.9%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/datashard/s3/ydb-tests-datashard-s3 |58.9%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/compatibility/ydb-tests-compatibility |58.9%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/protos/ydb_topic.pb.{h, cc} |58.9%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/functional/scheme_shard/ydb-tests-functional-scheme_shard |58.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/services/persqueue_v1/ut/partition_writer_cache_actor_ut.cpp |58.9%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/datashard/select/ydb-tests-datashard-select |58.9%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/ymq/proto/events.pb.{h, cc} |58.9%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/config_metrics.{pb.h ... grpc.pb.h} |58.9%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/functional/canonical/ydb-tests-functional-canonical |58.9%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/console_base.{pb.h ... grpc.pb.h} |58.9%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/tenacity/py3/libpy3python-tenacity-py3.global.a |58.9%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/counters_schemeshard.{pb.h ... grpc.pb.h} |58.9%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/datashard/vector_index/large/ydb-tests-datashard-vector_index-large |58.9%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/Flask-Cors/py3/libpy3python-Flask-Cors-py3.global.a |58.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/services/persqueue_v1/persqueue_common_ut.cpp |58.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/ut_bsvolume_reboots/ut_bsvolume_reboots.cpp |58.9%| [PR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/providers/pg/expr_nodes/yql_pg_expr_nodes.{gen.h ... defs.inl.h} |58.9%| [PR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/generic/expr_nodes/yql_generic_expr_nodes.{gen.h ... defs.inl.h} |58.9%| [PB] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/library/yaml_config/protos/config.pb.{h, cc} |58.9%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/protos/persqueue_error_codes_v1.pb.{h, cc} |58.9%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/index_builder.{pb.h ... grpc.pb.h} |58.9%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/tablet_database.{pb.h ... grpc.pb.h} |58.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/mind/node_broker__init_scheme.cpp |58.9%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/library/yaml_config/protos/libpy3yaml-config-protos.global.a |58.9%| [PR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/pq/expr_nodes/yql_pq_expr_nodes.{gen.h ... defs.inl.h} |58.9%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/follower_group.{pb.h ... grpc.pb.h} |58.9%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/stability/ydb/ydb-tests-stability-ydb |58.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/mind/tenant_node_enumeration.cpp |58.9%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/viewer/json/ut/ydb-core-viewer-json-ut |58.9%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/kafka.{pb.h ... grpc.pb.h} |58.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/library/yaml_config/protos/4306a854d105ac9e8a68bf91ca_raw.auxcpp |58.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/driver_lib/version/ut/version_ut.cpp |58.9%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/sql/objcopy_2f0e0ac8198858b9ec9901778e.o |58.9%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/sql/objcopy_83efacabe56767ae4f106a6d27.o |58.9%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/sql/objcopy_f738234258cd034cd5383f92ad.o |58.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/gateway/kqp_ic_gateway.cpp |58.9%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/tests/fq/yt/kqp_yt_file/part11/ydb-tests-fq-yt-kqp_yt_file-part11 |59.0%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/pgproxy/ut/ydb-core-pgproxy-ut |58.9%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/tenant_pool.{pb.h ... grpc.pb.h} |58.9%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/scheme/protos/pathid.{pb.h ... grpc.pb.h} |58.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/services/persqueue_v1/ut/rate_limiter_test_setup.cpp |58.9%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/config/protos/audit.pb.{h, cc} |58.9%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/common/protos/snapshot.pb.{h, cc} |58.9%| [PR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/library/yaml_config/protos/4306a854d105ac9e8a68bf91ca_raw.auxcpp |58.9%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/datashard/vector_index/medium/objcopy_1583476a2a074be936cf5a393e.o |58.8%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/datashard/vector_index/medium/objcopy_71b7c7df3e7853e6e7cd11e484.o |58.8%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/core/config/tools/protobuf_plugin/config_proto_plugin |58.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/tests/stress/kafka/workload/libpy3stress-kafka-workload.global.a |58.8%| [LD] {BAZEL_DOWNLOAD} $(B)/contrib/tools/protoc/plugins/cpp_styleguide/cpp_styleguide |58.8%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/datashard/vector_index/medium/objcopy_cc203073bb2a03b31e52a78f24.o |58.8%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/client/nc_private/audit/v1/common/action_type.{pb.h ... grpc.pb.h} |58.9%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/protos/draft/ydb_backup.pb.{h, cc} |58.9%| [LD] {BAZEL_DOWNLOAD} $(B)/contrib/tools/protoc/protoc >> ydb-tests-fq-streaming_optimize::import_test [GOOD] |58.9%| [PR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/providers/result/expr_nodes/yql_res_expr_nodes.{gen.h ... defs.inl.h} |58.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/apps/etcd_proxy/service/etcd_gate.cpp |58.9%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/datashard/add_column/objcopy_04ccb9b757b207bc74705e9bb1.o |58.9%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/tests/stress/topic_kafka/workload_topic_kafka |58.9%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/tests/fq/solomon/ydb-tests-fq-solomon |58.9%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/datashard/add_column/objcopy_d54fb5ab35d376fe3311e9feea.o |58.9%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/fq/s3/objcopy_52d3e6a0651990fc997ab40ba2.o |58.9%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/client/metadata/ut/ydb-core-client-metadata-ut |58.9%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/fq/s3/objcopy_dc1e8788b8287c02880cfe2814.o |58.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/public/sdk/cpp/src/client/topic/ut/describe_topic_ut.cpp |58.9%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/fq/s3/objcopy_ce073e3cc612363936bdd04210.o |58.9%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/fq/s3/objcopy_c43ce24509a50b033fa4050a33.o |58.9%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/fq/s3/objcopy_6cfba3dbee97ec121b2f346459.o |58.9%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/stress/node_broker/tests/objcopy_953328e5c3275a286b65dc3b1d.o |58.9%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/fq/s3/objcopy_64bde13108f9284b2e9f0bbb7a.o |59.0%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/fq/s3/objcopy_03f75cad4510fd9d018635026c.o |58.9%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/fq/s3/objcopy_0c451aebc6dafbdf0d9da2ab02.o |58.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/tests/stress/node_broker/workload/libpy3stress-node_broker-workload.global.a |58.9%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/stress/node_broker/tests/objcopy_d2d4e3343da9b011ee6a983244.o |58.9%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/stress/node_broker/tests/objcopy_d0e1cde98d2ab34e72d18aae9c.o |58.9%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/datashard/add_column/ydb-tests-datashard-add_column |58.9%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/datashard/add_column/objcopy_b9596990f3fd41de0fa350fc68.o |58.9%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/tests/fq/s3/c664ef6ca80e747b410e1da324_raw.auxcpp |58.9%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/stress/log/tests/ydb-tests-stress-log-tests |58.9%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/stress/log/tests/objcopy_2f7ac0f750374152d13c6bfbcf.o |58.9%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/stress/log/tests/objcopy_854d6cc7a0cc5cdd793cfc1e6d.o |58.9%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/stress/log/tests/objcopy_a926d3332cb769ac3e6c9e6e37.o |59.0%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/protos/draft/persqueue_error_codes.pb.{h, cc} |59.0%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/fq/streaming_optimize/import_test >> ydb-tests-fq-streaming_optimize::import_test [GOOD] |59.0%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/config/protos/common.pb.{h, cc} |59.0%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/stress/node_broker/tests/ydb-tests-stress-node_broker-tests |59.0%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/sql/ydb-tests-sql |59.0%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/datashard/vector_index/medium/ydb-tests-datashard-vector_index-medium |59.0%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/dq/api/protos/dqs.pb.{h, cc} |59.0%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/ydb_result_set_old.{pb.h ... grpc.pb.h} |58.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/viewer/json/json_ut.cpp |58.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/mind/dynamic_nameserver.cpp |58.9%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/fq/s3/ydb-tests-fq-s3 |58.9%| [PB] {BAZEL_DOWNLOAD} $(B)/contrib/libs/googleapis-common-protos/google/api/annotations.{pb.h ... grpc.pb.h} |58.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/long_tx_service/long_tx_service_ut.cpp |59.0%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/stress/cdc/tests/objcopy_7c0098f27edc25092453a8033c.o |59.0%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/stress/cdc/tests/objcopy_b9fcf9641e3e569e88014f85ff.o |59.0%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/stress/cdc/tests/objcopy_7f02665786b7523f76c02ad1dd.o |59.0%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/library/yql/providers/s3/common/ut/ydb-library-yql-providers-s3-common-ut |59.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/tests/stress/cdc/workload/libpy3stress-cdc-workload.global.a |59.0%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/sqs/large/objcopy_422ca1effff14e5a08952658d0.o |59.0%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/sqs/large/objcopy_5f161468ff5322b803d4d0dc79.o |59.0%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/audit/ut/ydb-core-audit-ut |58.9%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/sqs/large/objcopy_8ac5034640eee44b1cd5fa5253.o |58.9%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/blobs_action/protos/events.pb.{h, cc} |58.9%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/client/yc_private/iam/service_account_service.{pb.h ... grpc.pb.h} |58.9%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/config/protos/compute.pb.{h, cc} |58.9%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/stress/cdc/tests/ydb-tests-stress-cdc-tests |58.9%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/counters_keyvalue.{pb.h ... grpc.pb.h} |58.9%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/tests/tools/kqprun/tests/ydb-tests-tools-kqprun-tests |59.0%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/test_shard.{pb.h ... grpc.pb.h} |59.0%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/long_tx_service/public/ut/ydb-core-tx-long_tx_service-public-ut |59.0%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/stress/show_create/view/tests/objcopy_59eb97971e5f83d3296e6c33b5.o |58.9%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/tablet_flat/flat_executor.pb.{h, cc} |58.9%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/grpc/ydb_cms_v1.{pb.h ... grpc.pb.h} |58.9%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/apps/ydb/ydb |58.9%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/functional/sqs/large/ydb-tests-functional-sqs-large |58.9%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/stress/show_create/view/tests/objcopy_60e08504076128d310212c6460.o |58.9%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/stress/show_create/view/tests/objcopy_5acd2383ed2cd599cfd64f7c8a.o |58.9%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/tcmalloc/libcontrib-libs-tcmalloc.a |58.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/tests/stress/show_create/view/workload/libpy3show_create-view-workload.global.a |58.9%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/stress/topic_kafka/tests/objcopy_3310cbcd39c3373557308c8e76.o |59.0%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/stress/topic_kafka/tests/objcopy_4ffdb694eb351ca96de1930bf2.o |59.0%| [PB] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/library/yaml_config/protos/config.pb.{h, cc} |59.0%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/stress/topic_kafka/tests/objcopy_e91d43b449a687b2b36f1f5526.o |59.0%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/tests/fq/yds/38dcacd12926621ca72e30ce1b_raw.auxcpp |59.0%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/fq/yds/objcopy_b08299d456f3448b368e814cb8.o |59.0%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/fq/yds/objcopy_25d3afea4b7778a202a80125cb.o |59.0%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tools/ydbd_slice/bin/objcopy_9509442a50bd9d1393fa0d54e4.o |59.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/tools/ydbd_slice/libpy3ydbd_slice.global.a |59.0%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/encryption/objcopy_93dc3386250916dfae1ecb9b13.o |59.0%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/encryption/objcopy_3d6916930a438b51675ef6dda7.o |59.0%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/fq/yds/objcopy_fdd48fc620c42f480ae38b77f5.o |59.0%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/encryption/objcopy_64cecb639c5f85fbf868097a08.o |59.0%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/fq/yds/objcopy_fcc835b175560db56b04f51f44.o |59.0%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/fq/yds/objcopy_9f43001a877b9e371fe700c81d.o |59.0%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/fq/yds/objcopy_1339ee5ef04af3a5a49d43a6c9.o |59.0%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/fq/yds/objcopy_7a185a4b35de7733fde931d298.o |59.0%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/fq/yds/objcopy_dae5a42f53b4f98bf1b9fd8118.o |59.0%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/fq/yds/objcopy_6b8c453743f8fd2c5380af70c6.o |59.0%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/protos/ydb_debug.pb.{h, cc} |59.0%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/client/yc_private/iam/iam_token_service_subject.{pb.h ... grpc.pb.h} |59.0%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/grpc/draft/dummy.{pb.h ... grpc.pb.h} |59.0%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/workload_manager_config.{pb.h ... grpc.pb.h} |59.0%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/client/yc_private/iam/user_account_service.{pb.h ... grpc.pb.h} |59.0%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/tcmalloc/libcontrib-libs-tcmalloc.global.a |59.0%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/stress/topic_kafka/tests/ydb-tests-stress-topic_kafka-tests |59.0%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/digest/sfh/libcpp-digest-sfh.a |59.0%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tools/ydbd_slice/bin/ydbd_slice |59.0%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/protos/index.pb.{h, cc} |59.0%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/stress/show_create/view/tests/ydb-tests-stress-show_create-view-tests |59.0%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/config/validation/column_shard_config_validator_ut/column_shard_config_validator_ut |59.0%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/client/yc_public/common/validation.{pb.h ... grpc.pb.h} |59.0%| [PB] {BAZEL_DOWNLOAD} $(B)/yql/essentials/providers/common/proto/udf_resolver.pb.{h, cc} |59.0%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/vdisk/hulldb/generic/hullds_sst_it_all_ut.cpp |59.0%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/functional/encryption/ydb-tests-functional-encryption |59.0%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/pq/proto/dq_task_params.pb.{h, cc} |59.0%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/mind/node_broker_ut.cpp |59.0%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/config/protos/marker.pb.{h, cc} |59.0%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/config/protos/nodes_manager.pb.{h, cc} |59.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/jaeger_tracing/sampler_ut.cpp |59.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/jaeger_tracing/throttler_ut.cpp |59.0%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/protos/ydb_export.pb.{h, cc} |59.0%| [PB] {BAZEL_DOWNLOAD} $(B)/yql/essentials/protos/clickhouse.pb.{h, cc} |59.0%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/protos/ydb_config.pb.{h, cc} |59.0%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/client/yc_private/iam/iam_token.{pb.h ... grpc.pb.h} >> generator::import_test [GOOD] |58.9%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/client/yc_private/iam/iam_token_service.{pb.h ... grpc.pb.h} |59.0%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/base/statestorage_ut.cpp |59.0%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/hooks/testing/controller.cpp |59.0%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/counters_tx_allocator.{pb.h ... grpc.pb.h} |59.0%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/mind/tenant_slot_broker.cpp |59.0%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/replication/ut_helpers/mock_service.cpp |59.0%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/protos/ydb_scheme.pb.{h, cc} |59.0%| [PR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/protos/d52903a0693870f83d0bbe0ab8_raw.auxcpp |59.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/providers/generic/provider/ut/pushdown/pushdown_ut.cpp |59.0%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/protos/draft/ydb_replication.pb.{h, cc} |59.0%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/client/yc_private/iam/reference.{pb.h ... grpc.pb.h} |59.0%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/ut/federated_query/common/common.cpp |59.0%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/fq/yds/ydb-tests-fq-yds |59.0%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/public/sdk/cpp/src/client/topic/ut/topic_to_table_ut.cpp |59.0%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/base.{pb.h ... grpc.pb.h} |59.0%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/generic/proto/source.pb.{h, cc} |59.0%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/olap/oom/objcopy_a0543c2dc30365e9b2ad3d0ca6.o |59.0%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/olap/oom/objcopy_df0cb3f315162a3110ee243ecd.o |59.0%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/ut/scheme/kqp_scheme_fulltext_ut.cpp |59.0%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/blobstorage_pdisk_config.{pb.h ... grpc.pb.h} |59.0%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/olap/docs/generator/import_test >> generator::import_test [GOOD] |59.0%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/olap/oom/objcopy_e0331f455507fe5ac3b71d0537.o |59.0%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/mind/tenant_node_enumeration_ut.cpp |59.0%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/client/server/msgbus_server_pq_metarequest_ut.cpp |59.0%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/blobstorage_disk__intpy3___pb2_grpc.py.p5ju.yapyc3 |59.0%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/blobstorage_disk_color__intpy3___pb2.py.p5ju.yapyc3 |59.0%| [PB] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/protos/blobstorage_distributed_config__intpy3___pb2.py{ ... i} |59.1%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/blobstorage_disk_color__intpy3___pb2_grpc.py.p5ju.yapyc3 |59.1%| [PY] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/protos/blobstorage_distributed_config__intpy3___pb2.py.p5ju.yapyc3 |59.1%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/blobstorage_pdisk_config__intpy3___pb2.py{ ... i} |59.0%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/olap/oom/ydb-tests-olap-oom |59.0%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/blobstorage_disk_color__intpy3___pb2.py{ ... i} |59.0%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/blobstorage_disk__intpy3___pb2.py{ ... i} |59.0%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/services/fq/ut_integration/fq_ut.cpp |59.0%| [PY] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/protos/blobstorage_distributed_config__intpy3___pb2_grpc.py.p5ju.yapyc3 |59.0%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/compile_service_config.{pb.h ... grpc.pb.h} |59.0%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/large_serializable/objcopy_bab46dc0e0bb01200e952d765c.o |59.0%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/jaeger_tracing/ut/ydb-core-jaeger_tracing-ut |59.0%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/large_serializable/objcopy_aab724be52dad3663d415db204.o |59.0%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/replication/ydb_proxy/ydb_proxy_ut.cpp |59.1%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/large_serializable/objcopy_24cfda7d41447be7f781827fb8.o |59.1%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/boost/filesystem/librestricted-boost-filesystem.a |59.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/ymq/base/ut/dlq_helpers_ut.cpp |59.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/mind/labels_maintainer.cpp |59.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/audit/audit_log_service_ut.cpp |59.0%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/functional/large_serializable/ydb-tests-functional-large_serializable |59.0%| [EN] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/datashard/volatile_tx.h_serialized.cpp |59.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/log_backend/json_envelope_ut.cpp |59.0%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/ymq/base/ut/queue_attributes_ut.cpp |59.0%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/jinja2cpp/libcontrib-libs-jinja2cpp.a |59.0%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/ymq/base/ut/counters_ut.cpp |59.0%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/ymq/base/ut/secure_protobuf_printer_ut.cpp |59.1%| [PB] {BAZEL_DOWNLOAD} $(B)/contrib/libs/googleapis-common-protos/google/rpc/status.{pb.h ... grpc.pb.h} |59.0%| [LD] {BAZEL_DOWNLOAD} $(B)/contrib/tools/protoc/plugins/grpc_python/grpc_python |59.0%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/public/sdk/cpp/src/client/topic/ut/local_partition_ut.cpp |59.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/sqs/merge_split_common_table/libpy3functional-sqs-merge_split_common_table.global.a |59.0%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/sqs/merge_split_common_table/std/objcopy_2efdf95387a81f55cf9c81071a.o |59.0%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/client/yc_public/iam/iam_token_service.{pb.h ... grpc.pb.h} |59.0%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/sqs/merge_split_common_table/std/objcopy_242486256e1af973cd1d5376d1.o |59.0%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/sqs/merge_split_common_table/std/objcopy_5d73baff4bb68923ddbe5f4fcd.o |59.1%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/functional/sqs/merge_split_common_table/std/functional-sqs-merge_split_common_table-std |59.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/tests/unit/client/oauth2_token_exchange/helpers/libclient-oauth2_token_exchange-helpers.a |59.1%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/stress/scheme_board/pile_promotion/tests/objcopy_a14abb13ecebd457a15fc48470.o |59.1%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/blobstorage_base.{pb.h ... grpc.pb.h} |59.1%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/stress/scheme_board/pile_promotion/tests/objcopy_a457e57e9ccca716aa1224bf90.o |59.0%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/stress/scheme_board/pile_promotion/tests/objcopy_f152d89e868e3e70c582478d88.o |59.1%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/client/nc_private/iam/v1/profile_service.{pb.h ... grpc.pb.h} |59.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/mind/node_broker__update_config.cpp |59.1%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/client/yc_private/oauth/session_service.{pb.h ... grpc.pb.h} |59.1%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/client/yc_private/oauth/cloud_user.{pb.h ... grpc.pb.h} |59.1%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/client/yc_private/oauth/claims.{pb.h ... grpc.pb.h} |59.1%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/control/lib/generated/codegen/ydb-core-control-generated-codegen |59.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/ut_bsvolume/ut_bsvolume.cpp |59.1%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/counters.{pb.h ... grpc.pb.h} |59.0%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/client/yc_private/iam/user_account.{pb.h ... grpc.pb.h} |59.0%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/mind/lease_holder.cpp |59.0%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/client/nc_private/iam/v1/service_account.{pb.h ... grpc.pb.h} |59.0%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/client/nc_private/iam/v1/tenant_user_account.{pb.h ... grpc.pb.h} |59.0%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/client/nc_private/common/v1/operation.{pb.h ... grpc.pb.h} |59.0%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/apps/ydb/ut/parse_command_line.cpp |59.1%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/config/protos/activation.pb.{h, cc} |59.1%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/stress/scheme_board/pile_promotion/tests/tests-stress-scheme_board-pile_promotion-tests |59.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/config/validation/column_shard_config_validator_ut/column_shard_config_validator_ut.cpp |59.0%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/replication.{pb.h ... grpc.pb.h} |59.0%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/protos/draft/field_transformation.pb.{h, cc} |59.0%| [LD] {BAZEL_DOWNLOAD} $(B)/tools/enum_parser/enum_parser/enum_parser |59.0%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/mvp/oidc_proxy/oidc_proxy_ut.cpp |59.0%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/apps/ydb/ut/ydb-apps-ydb-ut |59.1%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/sqs/multinode/objcopy_10b0cfa01297f7d7392eb4d9e4.o |59.1%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/grpc/ydb_auth_v1.{pb.h ... grpc.pb.h} |59.1%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/sqs/multinode/objcopy_b306c2955ce13e6db6cae73363.o |59.1%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/olap/s3_import/objcopy_6e536fb2c379a4ebe79c499de8.o |59.1%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/olap/s3_import/objcopy_52a4f8c9597c445f68f1c5bc5d.o |59.1%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/olap/s3_import/objcopy_e2ba9075c2fb99d18373e3a8a1.o |59.1%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/olap/s3_import/objcopy_2d296dfaf373f7f15e6312517a.o |59.0%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/sqs/multinode/objcopy_afb48e06933bdee6c5245db82e.o |59.0%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/containers/top_keeper/libcpp-containers-top_keeper.a |59.0%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/olap/s3_import/objcopy_1dba5118ef0a485f3bf803be50.o |59.0%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/pqconfig.{pb.h ... grpc.pb.h} |59.0%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/ymq/base/ut/action_ut.cpp |59.0%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/functional/sqs/multinode/ydb-tests-functional-sqs-multinode |59.0%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/olap/s3_import/ydb-tests-olap-s3_import |59.0%| [PB] {BAZEL_DOWNLOAD} $(B)/contrib/libs/googleapis-common-protos/google/type/dayofweek.{pb.h ... grpc.pb.h} |59.0%| [PB] {BAZEL_DOWNLOAD} $(B)/contrib/libs/googleapis-common-protos/google/type/timeofday.{pb.h ... grpc.pb.h} |59.1%| [LD] {BAZEL_DOWNLOAD} $(B)/tools/py3cc/py3cc |59.1%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/tests/stress/kv/workload_kv |59.1%| [EN] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/columnshard/engines/reader/abstract/read_metadata.h_serialized.cpp |59.0%| [LD] {BAZEL_DOWNLOAD} $(B)/contrib/python/mypy-protobuf/bin/protoc-gen-mypy/protoc-gen-mypy |59.0%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/hive.{pb.h ... grpc.pb.h} |59.0%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/tests/olap/scenario/709f125727d9ea4165df516509_raw.auxcpp |59.1%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/olap/scenario/objcopy_36807918bd7a86c1ea37310c9c.o |59.1%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/blobstorage/ut_group/ydb-core-blobstorage-ut_group |59.1%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/olap/scenario/objcopy_656baae3c1e24959f5bcc457d7.o |59.1%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/olap/scenario/ydb-tests-olap-scenario |59.1%| [PR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/control/lib/generated/control_board_proto.cpp |59.1%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/olap/scenario/objcopy_0ab925f82bbba07bf3b749dc3c.o |59.0%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/olap/scenario/objcopy_5992d4831c5055a481712a2a80.o |59.0%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/example/objcopy_2b682e146a665bfa19210b0fd9.o |59.0%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/example/objcopy_c623700776b43ee95ec93c56f9.o |59.0%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/example/objcopy_e0aef87c4bf15cfdc957f4bdd1.o |59.0%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kqp/tests/kikimr_tpch/ydb-core-kqp-tests-kikimr_tpch |59.0%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/log_backend/ut/ydb-core-log_backend-ut |59.0%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/stress/s3_backups/tests/ydb-tests-stress-s3_backups-tests |59.0%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/public_http/protos/fq.pb.{h, cc} |59.0%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/example/ydb-tests-example |59.0%| [PR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/generated/dispatch_op.h |59.0%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/services/persqueue_v1/ut/topic_service_ut.cpp |59.0%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/library/login/protos/login.pb.{h, cc} |59.0%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/counters_pq.{pb.h ... grpc.pb.h} |59.0%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/streams/xz/libcpp-streams-xz.a |59.0%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/control/lib/ut/ydb-core-control-lib-ut |59.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tablet_flat/test/libs/table/libtest-libs-table.a |59.1%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/shared_cache.{pb.h ... grpc.pb.h} |59.1%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/histogram/adaptive/protos/libhistogram-adaptive-protos.a |59.1%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/compatibility/olap/ydb-tests-compatibility-olap |59.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/mind/node_broker__update_epoch.cpp |59.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/blobstorage/testing/group_overseer/libblobstorage-testing-group_overseer.a |59.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/tests/stress/s3_backups/workload/libpy3stress-s3_backups-workload.global.a |59.0%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/hyperloglog/liblibrary-cpp-hyperloglog.a |59.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tablet_flat/test/libs/rows/libtest-libs-rows.a |59.0%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/stress/s3_backups/tests/objcopy_3bb523a1011c0a7019f2684a90.o |59.0%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/histogram/adaptive/libcpp-histogram-adaptive.a |59.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/public_http/http_router_ut.cpp |59.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet_flat/ut_large/ut_btree_index_large.cpp |59.1%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/tools/nemesis/driver/nemesis |59.1%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/stress/s3_backups/tests/objcopy_e8c94c485e81b4b2899f52f594.o |59.1%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/stress/s3_backups/tests/objcopy_cd57da3671b96739ee73293fb1.o |59.1%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/compatibility/olap/objcopy_c7c0405528f55543f02099b70d.o |59.1%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/compatibility/olap/objcopy_994f7d36d0c12371f6d6ec62e4.o |59.1%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/compatibility/olap/objcopy_8dd70891bfbac8135389af5f53.o |58.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/tests/tools/nemesis/driver/libpy3nemesis.global.a |58.8%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/tools/nemesis/driver/objcopy_81ae81681ce2388a653cfa5ba3.o |58.8%| [PB] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/protos/console.{pb.h ... grpc.pb.h} |58.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blob_depot/closed_interval_set_ut.cpp |58.9%| [EN] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/columnshard/columnshard_impl.h_serialized.cpp |58.9%| [EN] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/operation_queue_timer.h_serialized.cpp |58.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/dq/comp_nodes/ut/dq_block_hash_join_ut.cpp |58.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/dq/comp_nodes/ut/dq_hash_combine_ut.cpp |58.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blob_depot/given_id_range_ut.cpp |58.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/dq/comp_nodes/ut/dq_scalar_hash_join_ut.cpp |58.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/dq/comp_nodes/ut/utils/libcomp_nodes-ut-utils.a |58.9%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/base/generated/codegen/ydb-core-base-generated-codegen |58.9%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/ymq/base/ut/ydb-core-ymq-base-ut |58.8%| [PB] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/protos/blobstorage_distributed_config__intpy3___pb2.py{ ... i} |58.9%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/scheme/protos/key_range.{pb.h ... grpc.pb.h} |59.0%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/control/lib/generated/codegen/main.cpp |59.0%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/config/validation/auth_config_validator_ut/core-config-validation-auth_config_validator_ut |59.0%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/blob_depot/ut/ydb-core-blob_depot-ut |59.0%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/library/yql/providers/generic/provider/ut/pushdown/yql-providers-generic-provider-ut-pushdown |58.9%| [EN] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/columnshard/export/session/session.h_serialized.cpp |58.9%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tablet_flat/ut_large/ydb-core-tablet_flat-ut_large |58.9%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/tests/fq/yt/kqp_yt_file/part15/ydb-tests-fq-yt-kqp_yt_file-part15 |58.9%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/regex/pire/libcpp-regex-pire.a |58.9%| [EN] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/columnshard/engines/changes/abstract/abstract.h_serialized.cpp |58.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/config/validation/auth_config_validator_ut/auth_config_validator_ut.cpp |58.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/control/lib/immediate_control_board_ut.cpp |58.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/tests/kikimr_tpch/kqp_tpch_ut.cpp |58.9%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/common/re2/libre2_udf.global.a |58.9%| [EN] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/schemeshard_info_types.h_serialized.cpp |59.0%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/functional/autoconfig/ydb-tests-functional-autoconfig |59.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/ut_ru_calculator/ut_ru_calculator.cpp |59.0%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/tld/liblibrary-cpp-tld.a |58.8%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/unicode/punycode/libcpp-unicode-punycode.a |58.9%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/cxxsupp/libcxxabi-parts/liblibs-cxxsupp-libcxxabi-parts.a |58.9%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/blobstorage/vdisk/hulldb/barriers/ut/ydb-core-blobstorage-vdisk-hulldb-barriers-ut |58.9%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/cxxsupp/libcxx/liblibs-cxxsupp-libcxx.a |58.9%| [AR] {BAZEL_DOWNLOAD} $(B)/build/cow/on/libbuild-cow-on.a |58.9%| [AR] {BAZEL_DOWNLOAD} $(B)/certs/libcerts.global.a |58.9%| [EN] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kqp/common/kqp_resolve.h_serialized.cpp |58.9%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/autoconfig/objcopy_994fcbd53c4e2174c302bdb5ab.o |58.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/federated_query/kqp_federated_query_helpers_ut.cpp |58.9%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/autoconfig/objcopy_44fac4fe441507735704a000ad.o |58.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tablet_flat/flat_executor_ut_large.cpp |58.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/cms/cms_ut_common.cpp |58.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/ut_testshard/main.cpp |58.9%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/datashard/dml/objcopy_8fca143a218b930f297b779e3a.o |58.9%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/datashard/dml/ydb-tests-datashard-dml |58.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/apps/ydbd/export.cpp |58.9%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/blobstorage/dsproxy/ut_strategy/ydb-core-blobstorage-dsproxy-ut_strategy |58.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/vdisk/hulldb/barriers/barriers_tree_ut.cpp |58.9%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/library/yaml_config/ut/ydb-library-yaml_config-ut |58.9%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/library/yaml_config/tools/dump_ds_init/yaml-to-proto-dump-ds-init |58.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/vdisk/hullop/hullop_delayedresp_ut.cpp |58.9%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/malloc/api/libcpp-malloc-api.a |58.9%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/tcmalloc/no_percpu_cache/liblibs-tcmalloc-no_percpu_cache.a |58.9%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/library/yql/dq/comp_nodes/ut/ydb-library-yql-dq-comp_nodes-ut |58.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/quota_manager/ut_helpers/liblibs-quota_manager-ut_helpers.a |58.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/fq/libs/row_dispatcher/format_handler/ut/format_handler_ut.cpp |58.9%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/libc_compat/libcontrib-libs-libc_compat.a |58.9%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/fmt/libcontrib-libs-fmt.a |58.9%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/cxxsupp/libcxxrt/liblibs-cxxsupp-libcxxrt.a |58.9%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/datashard/dml/objcopy_9314464e3560b2511ac931acd9.o |58.9%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/c-ares/libcontrib-libs-c-ares.a |58.9%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/fq/libs/control_plane_storage/internal/ut/core-fq-libs-control_plane_storage-internal-ut |58.9%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/double-conversion/libcontrib-libs-double-conversion.a |58.9%| [PY] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/protos/blobstorage_distributed_config__intpy3___pb2_grpc.py.p5ju.yapyc3 |58.9%| [PY] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/protos/blobstorage_distributed_config__intpy3___pb2.py.p5ju.yapyc3 |58.9%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/datashard/dml/objcopy_8db6616d40f8020d0632222fe3.o |58.9%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/cms/console/validators/ut/ydb-core-cms-console-validators-ut |58.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/dsproxy/ut_strategy/strategy_ut.cpp |58.9%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/libunwind/libcontrib-libs-libunwind.a |58.9%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/functional/ydb_cli/ydb-tests-functional-ydb_cli |58.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/cms/sentinel_ut.cpp |58.9%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/tcmalloc/malloc_extension/liblibs-tcmalloc-malloc_extension.a |58.9%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/protobuf/libcontrib-libs-protobuf.global.a |58.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/blobstorage/ut_pdiskfit/lib/libblobstorage-ut_pdiskfit-lib.a |58.9%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/protobuf/third_party/utf8_range/libprotobuf-third_party-utf8_range.a |58.9%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/re2/libcontrib-libs-re2.a |58.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/cms/console/validators/validator_bootstrap_ut.cpp |58.9%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/tcmalloc/no_percpu_cache/liblibs-tcmalloc-no_percpu_cache.global.a |58.9%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/ydb_cli/objcopy_359d47616c1036f0865eb1e662.o |58.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/grpc_services/tablet/rpc_change_schema_ut.cpp |58.9%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/openssl/libcontrib-libs-openssl.a |58.9%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/protobuf/libcontrib-libs-protobuf.a |58.9%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/ydb_cli/5c5fdf614c3039a8dba94a4f38_raw.auxcpp |59.0%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/ydb_cli/objcopy_c52ec5ba5ab0b788efaa5ed704.o |58.9%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/ydb_cli/objcopy_c77713875cf17988efd8fc0fb3.o |58.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/cms/console/validators/registry_ut.cpp |58.9%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/ydb_cli/objcopy_903d4758faea71f1363e296b3f.o |58.9%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/zlib/libcontrib-libs-zlib.a |58.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/mvp/core/mvp_test_runtime.cpp |58.9%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/grpc/libcontrib-libs-grpc.a |58.9%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/cpuid_check/liblibrary-cpp-cpuid_check.global.a |58.9%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/jinja2cpp/libcontrib-libs-jinja2cpp.a |58.9%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/grpc/third_party/address_sorting/libgrpc-third_party-address_sorting.a |58.9%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/boost/filesystem/librestricted-boost-filesystem.a |59.0%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/xxhash/libcontrib-libs-xxhash.a |59.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/mvp/core/mvp_tokens.cpp |59.0%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/boost/atomic/librestricted-boost-atomic.a |59.0%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/boost/container/librestricted-boost-container.a |59.0%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/blobstorage/vdisk/hullop/ut/ydb-core-blobstorage-vdisk-hullop-ut >> ydb-tests-tools-kqprun-tests::import_test [GOOD] |58.9%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/blockcodecs/codecs/zstd/libblockcodecs-codecs-zstd.global.a |58.9%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/containers/absl_flat_hash/libcpp-containers-absl_flat_hash.a |58.9%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/boost/exception/librestricted-boost-exception.a |58.9%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/blockcodecs/core/libcpp-blockcodecs-core.a |59.0%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/boost/regex/librestricted-boost-regex.a |59.0%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/malloc/tcmalloc/libcpp-malloc-tcmalloc.a |59.0%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/resource/liblibrary-cpp-resource.a |59.0%| [AR] {BAZEL_DOWNLOAD} $(B)/tools/enum_parser/enum_serialization_runtime/libtools-enum_parser-enum_serialization_runtime.a |59.0%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/zstd/libcontrib-libs-zstd.a |59.0%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/autoconfig/objcopy_7c328c2741f9dd7697a2e0e8b1.o |59.0%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/abseil-cpp/libcontrib-restricted-abseil-cpp.a |59.0%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/tools/kqprun/tests/import_test >> ydb-tests-tools-kqprun-tests::import_test [GOOD] |59.0%| [AR] {BAZEL_DOWNLOAD} $(B)/util/charset/libutil-charset.a |59.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/base/generated/codegen/main.cpp |59.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/protos/libcolumnshard-engines-protos.a |58.9%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/google/benchmark/librestricted-google-benchmark.a |59.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/schemeshard/libcore-protos-schemeshard.a |59.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/config/protos/libcore-config-protos.a |59.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/nbs/libcore-protos-nbs.a |59.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/common/protos/libcolumnshard-common-protos.a |59.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/scheme/protos/libcore-scheme-protos.a |59.0%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/core/issue/protos/libcore-issue-protos.a |59.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/config/protos/liblibs-config-protos.a |59.0%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/core/file_storage/proto/libcore-file_storage-proto.a |59.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/protos/annotations/libapi-protos-annotations.a |59.0%| [AR] {BAZEL_DOWNLOAD} $(B)/util/libyutil.a |59.0%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/fq/libs/row_dispatcher/format_handler/ut/topic_parser_ut.cpp |59.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/actors/protos/liblibrary-actors-protos.a |59.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/scheme/defaults/protos/libscheme-defaults-protos.a |59.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/login/protos/liblibrary-login-protos.a |59.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/folder_service/proto/liblibrary-folder_service-proto.a |59.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/formats/arrow/protos/liblibrary-formats-arrow-protos.a |59.0%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/nodewarden/ut_sequence/dsproxy_config_retrieval.cpp |59.0%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/cxxsupp/builtins/liblibs-cxxsupp-builtins.a |59.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/services/libydb-library-services.a |59.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/mkql_proto/protos/liblibrary-mkql_proto-protos.a |59.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/dq/proto/libyql-dq-proto.a |59.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/ydb_issue/proto/liblibrary-ydb_issue-proto.a |59.0%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/grpc/third_party/upb/libgrpc-third_party-upb.a |59.0%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/public/types/libessentials-public-types.a |59.0%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/public/issue/protos/libpublic-issue-protos.a |59.0%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/ut_blobstorage/blob_depot_event_managers.cpp |59.0%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/protos/libyql-essentials-protos.a |59.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/dq/actors/protos/libdq-actors-protos.a |59.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/backup/tools/decrypt/main.cpp |59.0%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/library/aclib/protos/aclib.pb.{h, cc} |59.0%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/abseil-cpp-tstring/libcontrib-restricted-abseil-cpp-tstring.a |59.0%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/providers/common/proto/libproviders-common-proto.a |59.0%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/library/formats/arrow/protos/ssa.pb.{h, cc} |59.0%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/yql_translation_settings.{pb.h ... grpc.pb.h} |59.0%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/core/cbo/simple/libcore-cbo-simple.a |59.0%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/library/formats/arrow/protos/accessor.pb.{h, cc} |59.0%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/grpc_services/tablet/rpc_execute_mkql_ut.cpp |59.0%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/grpc/draft/ydb_datastreams_v1.{pb.h ... grpc.pb.h} |59.0%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/services/deprecated/persqueue_v0/api/protos/persqueue.pb.{h, cc} |58.9%| [PR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/protos/356aa18c71c00c1ebe811b0407_raw.auxcpp |58.9%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/data_sharing/protos/sessions.pb.{h, cc} |58.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet_flat/test/tool/perf/colons.cpp |59.0%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/client/yc_public/events/common.{pb.h ... grpc.pb.h} |59.0%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/protos/ydb_persqueue_cluster_discovery.pb.{h, cc} |59.0%| [PR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/protos/968732828ff205567f6707c2fe_raw.auxcpp |59.0%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kqp/federated_query/ut/ydb-core-kqp-federated_query-ut |59.0%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/dq/proto/dq_tasks.pb.{h, cc} |59.0%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/fq/libs/row_dispatcher/format_handler/ut/topic_filter_ut.cpp |59.0%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/ydb_table_impl.{pb.h ... grpc.pb.h} |59.0%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/blobstorage/vdisk/hulldb/compstrat/ut/ydb-core-blobstorage-vdisk-hulldb-compstrat-ut |59.0%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/protos/draft/ydb_maintenance.pb.{h, cc} |59.0%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/auth.{pb.h ... grpc.pb.h} |59.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet_flat/test/tool/perf/main.cpp |59.0%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/protos/annotations/sensitive.pb.{h, cc} |59.0%| [PR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/protos/3eb880cc21ffc3fc10ee677b0c_raw.auxcpp |59.0%| [PY] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/protos/config__intpy3___pb2_grpc.py.p5ju.yapyc3 |59.0%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/compile_service_config__intpy3___pb2_grpc.py.p5ju.yapyc3 |59.0%| [PY] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/protos/config__intpy3___pb2.py.p5ju.yapyc3 |59.0%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/config_metrics__intpy3___pb2_grpc.py.p5ju.yapyc3 |59.0%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/protos/ydb_operation.pb.{h, cc} |59.0%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/config_units__intpy3___pb2.py.p5ju.yapyc3 |59.0%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/ut_blobstorage/blob_depot.cpp |59.0%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/config_units__intpy3___pb2.py{ ... i} |59.0%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/config_metrics__intpy3___pb2.py{ ... i} |59.0%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/grpc_pq_old__intpy3___pb2.py.p5ju.yapyc3 |59.1%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/grpc_pq_old__intpy3___pb2_grpc.py.p5ju.yapyc3 |59.1%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/grpc_status_proxy__intpy3___pb2_grpc.py.p5ju.yapyc3 |59.0%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/health__intpy3___pb2.py.p5ju.yapyc3 |59.0%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/core/pg_ext/libessentials-core-pg_ext.a |59.0%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/grpc_status_proxy__intpy3___pb2.py.p5ju.yapyc3 |59.0%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/grpc_status_proxy__intpy3___pb2.py{ ... i} |59.0%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/health__intpy3___pb2.py{ ... i} |59.0%| [PY] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/protos/console__intpy3___pb2.py.p5ju.yapyc3 |59.0%| [PY] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/protos/grpc__intpy3___pb2_grpc.py.p5ju.yapyc3 |59.0%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/console_base__intpy3___pb2_grpc.py.p5ju.yapyc3 |59.0%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/ut_mirror3of4/main.cpp |59.0%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/console_base__intpy3___pb2.py.p5ju.yapyc3 |59.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/protos/libapi-protos.a |59.0%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/vdisk/hullop/blobstorage_hullcompactdeferredqueue_ut.cpp |59.0%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/grpc_services/tablet/rpc_restart_tablet_ut.cpp |59.0%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/console_base__intpy3___pb2.py{ ... i} |59.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/cms/console/validators/validator_nameservice_ut.cpp |59.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/library/yaml_config/tools/dump_ds_init/main.cpp |59.1%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/core/qplayer/storage/memory/libqplayer-storage-memory.a |59.1%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/core/qplayer/storage/file/libqplayer-storage-file.a |59.1%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/icu/libcontrib-libs-icu.a |59.1%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/config_metrics__intpy3___pb2.py.p5ju.yapyc3 |59.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/vdisk/hulldb/compstrat/hulldb_compstrat_ut.cpp |59.0%| [PB] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/protos/config__intpy3___pb2.py{ ... i} |59.0%| [PY] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/protos/console__intpy3___pb2_grpc.py.p5ju.yapyc3 |59.0%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/compile_service_config__intpy3___pb2.py{ ... i} |59.0%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/protos/ydb_query_stats.pb.{h, cc} |59.0%| [PB] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/protos/console_config__intpy3___pb2.py{ ... i} |59.1%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/scheme/protos/type_info.{pb.h ... grpc.pb.h} |59.1%| [PB] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/protos/console__intpy3___pb2.py{ ... i} |59.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/vdisk/hullop/blobstorage_readbatch_ut.cpp |59.1%| [PB] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/protos/grpc__intpy3___pb2.py{ ... i} |59.0%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/library/yaml_config/yaml_config_parser_ut.cpp |59.0%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/fq/libs/control_plane_proxy/ut/control_plane_proxy_ut.cpp |59.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kafka_proxy/ut/ut_serialization.cpp |59.0%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/ut_blobstorage/blob_depot_test_functions.cpp |59.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kafka_proxy/ut/ut_kafka_functions.cpp |58.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/library/yaml_config/yaml_config_ut.cpp |58.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/ut_pdiskfit/ut/main.cpp |58.9%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/mvp/core/ut/ydb-mvp-core-ut |58.9%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/backup/tools/decrypt/decrypt |59.0%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/library/yaml_config/console_dumper_ut.cpp |59.0%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/library/yaml_config/yaml_config_proto2yaml_ut.cpp |59.0%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tablet_flat/test/tool/perf/table-perf |59.0%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/apps/ydbd/main.cpp |58.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/persqueue/public/list_topics/list_all_topics_ut.cpp |58.9%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/providers/common/mkql_simple_file/libproviders-common-mkql_simple_file.a |58.9%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/blobstorage/ut_mirror3of4/ydb-core-blobstorage-ut_mirror3of4 |58.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/mvp/core/mvp_ut.cpp |58.9%| [EN] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/fq/libs/ydb/ydb.h_serialized.cpp |59.0%| [EN] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/columnshard/data_sharing/common/session/common.h_serialized.cpp |58.8%| [EN] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/columnshard/engines/column_engine_logs.h_serialized.cpp |58.9%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/stress/statistics_workload/statistics_workload |58.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/ut_blobstorage/read_only_pdisk.cpp |58.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/ut_blobstorage/huge.cpp |58.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kafka_proxy/ut/ut_transaction_coordinator.cpp |58.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kafka_proxy/ut/ut_transaction_actor.cpp |58.9%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yql/providers/yt/gateway/file/libyt-gateway-file.a |58.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/datashard/datashard_ut_stats.cpp |58.9%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/fq/multi_plane/ydb-tests-fq-multi_plane |58.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kafka_proxy/ut/kafka_test_client.cpp |58.9%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tablet_flat/test/tool/surg/surg |58.9%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/yql/tools/yqlrun/yqlrun |58.9%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/sql/v1/lexer/check/libv1-lexer-check.a |58.9%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/blobstorage/ut_pdiskfit/ut/ydb-core-blobstorage-ut_pdiskfit-ut |58.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/export_s3_buffer_ut.cpp |58.9%| [PB] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/protos/console__intpy3___pb2.py{ ... i} |58.9%| [PB] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/protos/config__intpy3___pb2.py{ ... i} |58.9%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/fq/multi_plane/objcopy_b8aa61f402be805d2e3e9e75a2.o |58.9%| [PB] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/protos/console_config__intpy3___pb2.py{ ... i} |58.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kafka_proxy/ut/metarequest_ut.cpp |58.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kafka_proxy/ut/ut_protocol.cpp |58.8%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/fq/multi_plane/objcopy_d23500649301df2a8de48ba70d.o |58.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/datashard/build_index/ut/ut_fulltext.cpp |58.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/base/auth_ut.cpp |58.8%| [PB] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/protos/grpc__intpy3___pb2.py{ ... i} |58.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/datashard/build_index/ut/ut_secondary_index.cpp |58.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kafka_proxy/ut/actors_ut.cpp |58.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/fq/libs/test_connection/ut/test_connection_ut.cpp |58.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/runtime/kqp_scan_data_ut.cpp |58.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/datashard/build_index/ut/ut_sample_k.cpp |58.8%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/base/ut_auth/ydb-core-base-ut_auth |58.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/datashard/build_index/ut/ut_prefix_kmeans.cpp |58.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/coordinator/coordinator_ut.cpp |58.9%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/core/http_proxy/ut/objcopy_5fddfa8f171a3216cad65e02ab.o |58.9%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/tools/yql_facade_run/libessentials-tools-yql_facade_run.a |58.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/datashard/build_index/ut/ut_local_kmeans.cpp |58.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/fq/libs/row_dispatcher/ut/leader_election_ut.cpp |58.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/datashard/build_index/ut/ut_reshuffle_kmeans.cpp |58.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/datashard/build_index/ut/ut_unique_index.cpp |58.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kafka_proxy/ut/ut_produce_actor.cpp |58.8%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/signer/ut/ydb-core-fq-libs-signer-ut |58.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/fq/libs/row_dispatcher/ut/topic_session_ut.cpp |58.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/base/board_subscriber_ut.cpp |58.8%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/kqp/kqp_query_svc/ydb-tests-functional-kqp-kqp_query_svc |58.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/datashard/build_index/ut/ut_recompute_kmeans.cpp |58.8%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_ru_calculator/ydb-core-tx-schemeshard-ut_ru_calculator |58.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/mvp/meta/bin/main.cpp |58.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/http_proxy/ut/json_proto_conversion_ut.cpp |58.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/memory_controller/memtable_collection_ut.cpp |58.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/fq/libs/row_dispatcher/ut/row_dispatcher_ut.cpp |58.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/cms/cms_ut.cpp |58.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/coordinator/coordinator_volatile_ut.cpp |58.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/persqueue/public/fetcher/fetch_request_ut.cpp |58.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/quoter/ut_helpers.cpp |58.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/fq/libs/row_dispatcher/ut/coordinator_ut.cpp |58.8%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/public_http/ut/ydb-core-public_http-ut |58.8%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/blobstorage/ut_pdiskfit/pdiskfit/pdiskfit |58.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/quoter/quoter_service_ut.cpp |58.8%| [PY] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/protos/console__intpy3___pb2.py.p5ju.yapyc3 |58.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/quoter/kesus_quoter_ut.cpp |58.7%| [PY] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/protos/console__intpy3___pb2_grpc.py.p5ju.yapyc3 |58.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/ut/view/view_ut.cpp |58.7%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/blobstorage/ut_testshard/ydb-core-blobstorage-ut_testshard |58.7%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/blobstorage/ut_blobstorage/ut_blob_depot/ydb-core-blobstorage-ut_blobstorage-ut_blob_depot |58.7%| [PY] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/protos/config__intpy3___pb2.py.p5ju.yapyc3 |58.8%| [PY] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/protos/config__intpy3___pb2_grpc.py.p5ju.yapyc3 |58.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/tests/stress/statistics_workload/libpy3statistics_workload.global.a |58.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/federated_query/kqp_federated_query_actors_ut.cpp |58.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/ut/effects/kqp_inplace_update_ut.cpp |58.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/ut/effects/kqp_overload_ut.cpp |58.8%| [PY] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/protos/grpc__intpy3___pb2_grpc.py.p5ju.yapyc3 |58.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/runtime/kqp_scan_fetcher_ut.cpp |58.7%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/fq/multi_plane/objcopy_c65a9d5efe13dc05c1466090ba.o |58.7%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/testing/gbenchmark/libcpp-testing-gbenchmark.a |58.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/ut/federated_query/s3/kqp_s3_plan_ut.cpp |58.7%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/mvp/meta/bin/mvp_meta |58.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/datashard/datashard_ut_rs.cpp |58.7%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/lfalloc/dbg/libcpp-lfalloc-dbg.a |58.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/keyvalue/keyvalue_ut_trace.cpp |58.8%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/tools/yqlrun/lib/libtools-yqlrun-lib.a |58.8%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/tools/yqlrun/http/libtools-yqlrun-http.a |58.7%| [CC] {BAZEL_DOWNLOAD} $(S)/yql/tools/yqlrun/yqlrun.cpp |58.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/ut/effects/kqp_effects_ut.cpp |58.7%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/tools/yqlrun/http/libtools-yqlrun-http.global.a |58.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/ymq/actor/yc_search_ut/test_events_writer.cpp |58.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/clickhouse/actors/libproviders-clickhouse-actors.a |58.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/ut/federated_query/s3/kqp_federated_query_ut.cpp |58.7%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/stress/statistics_workload/objcopy_b4ebb94deb4cea673457b77fcc.o |58.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/ut/federated_query/generic_ut/kqp_generic_provider_ut.cpp |58.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/ut/join/kqp_block_hash_join_ut.cpp |58.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/tests/stress/statistics_workload/workload/libpy3stress-statistics_workload-workload.global.a |58.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/sequenceshard/ut_sequenceshard.cpp |58.7%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/lfalloc/liblibrary-cpp-lfalloc.a |58.7%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/protobuf/dynamic_prototype/libcpp-protobuf-dynamic_prototype.a |58.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/dq/metrics/libproviders-dq-metrics.a >> Signer::Basic [GOOD] |58.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/security/ticket_parser_ut.cpp |58.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/dq/local_gateway/libproviders-dq-local_gateway.a |58.7%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/fq/libs/compute/common/ut/ydb-core-fq-libs-compute-common-ut |58.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/dq/stats_collector/libproviders-dq-stats_collector.a |58.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/dq/scheduler/libproviders-dq-scheduler.a |58.7%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/metrics/ut/ydb-core-fq-libs-metrics-ut |58.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/dq/global_worker_manager/libproviders-dq-global_worker_manager.a |58.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/ut/join/kqp_index_lookup_join_ut.cpp |58.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/memory_controller/memory_controller_ut.cpp |58.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/ut/federated_query/generic_ut/iceberg_ut_data.cpp |58.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/dq/service/libproviders-dq-service.a |58.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/ut/join/kqp_join_ut.cpp |58.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/sequenceshard/ut_helpers.cpp |58.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/dq/actors/yt/libdq-actors-yt.a |58.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/ymq/actor/cloud_events/cloud_events_ut/cloud_events_ut.cpp |58.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/breakpad/libydb-library-breakpad.global.a |58.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/mon/audit/url_matcher_ut.cpp |58.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/ut/federated_query/s3/kqp_federated_scheme_ut.cpp |58.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/ut/federated_query/s3/s3_recipe_ut_helpers.cpp |58.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/ydb/actors/libproviders-ydb-actors.a |58.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/tools/dq/worker_node/main.cpp |58.7%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/core/fq/libs/signer/ut/unittest >> Signer::Basic [GOOD] |58.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/ydb/comp_nodes/libproviders-ydb-comp_nodes.a |58.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/ymq/actor/yc_search_ut/index_events_processor_ut.cpp |58.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/keys/libydb-library-keys.a |58.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/ut/arrow/kqp_result_set_formats.cpp |58.7%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/protobuf/yql/libcpp-protobuf-yql.a |58.7%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/sql/v1/complete/name/cluster/static/libname-cluster-static.a |58.7%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/sql/v1/complete/analysis/yql/libcomplete-analysis-yql.a |58.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/utils/actor_system/libyql-utils-actor_system.a |58.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/utils/bindings/libyql-utils-bindings.a |58.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/ut/idx_test/ydb_index_ut.cpp |58.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/ut/join/kqp_flip_join_ut.cpp |58.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/ut/effects/kqp_reattach_ut.cpp |58.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/ut/indexes/kqp_indexes_prefixed_vector_ut.cpp |58.7%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/compute/common/ut/objcopy_caf222d14387d4810b5cb3e853.o |58.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/ut/arrow/kqp_types_arrow_ut.cpp |58.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/yt/actors/libproviders-yt-actors.a |58.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/ut/join/kqp_join_order_ut.cpp |58.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/tools/dqrun/dqrun.cpp |58.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kesus/tablet/ut_helpers.cpp |58.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/yt/dq_task_preprocessor/libproviders-yt-dq_task_preprocessor.a |58.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/ut/effects/kqp_write_ut.cpp |58.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/fq/libs/compute/common/ut/config_ut.cpp |58.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/wrappers/ut_helpers/libcore-wrappers-ut_helpers.a |58.7%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/sql/v1/complete/check/libv1-complete-check.a |58.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet_flat/test/tool/surg/main.cpp |58.7%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/sql/v1/complete/name/service/cluster/libname-service-cluster.a |58.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/ut/indexes/kqp_indexes_ut.cpp |58.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/ut/perf/kqp_workload_ut.cpp |58.7%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/tests/stress/node_broker/node_broker |58.7%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/datashard/build_index/ut/ydb-core-tx-datashard-build_index-ut |58.7%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/datashard/ut_stats/ydb-core-tx-datashard-ut_stats |58.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/ut/indexes/kqp_indexes_multishard_ut.cpp |58.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kesus/tablet/quoter_performance_test/main.cpp |58.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/ut/indexes/kqp_indexes_vector_ut.cpp |58.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/ut/effects/kqp_immediate_effects_ut.cpp |58.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/datashard/datashard_ut_reassign.cpp |58.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/datashard/datashard_ut_background_compaction.cpp |58.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/cms/cms_maintenance_api_ut.cpp |58.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/ut/arrow/kqp_arrow_in_channels_ut.cpp |58.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/backup/impl/table_writer_ut.cpp >> Metrics::SeveralSubItems [GOOD] >> Metrics::SeveralTopItems [GOOD] >> Metrics::EmptyIssuesList [GOOD] >> SanitizeLable::SkipSingleBadSymbol [GOOD] >> Metrics::OnlyOneItem [GOOD] >> SanitizeLable::Empty [GOOD] >> Metrics::CombineSubItems [GOOD] >> SanitizeLable::SkipBadSymbols [GOOD] >> Metrics::MoreThanFiveItems [GOOD] >> SanitizeLable::Truncate200 [GOOD] |58.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/cms/downtime_ut.cpp |58.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/http_proxy/ut/http_ut.cpp |58.6%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/mind/bscontroller/ut_selfheal/ydb-core-mind-bscontroller-ut_selfheal |58.6%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/blobstorage/ut_blobstorage/ut_huge/ydb-core-blobstorage-ut_blobstorage-ut_huge |58.6%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/core/url_lister/libessentials-core-url_lister.a |58.6%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/core/url_preprocessing/libessentials-core-url_preprocessing.a |58.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/mon/audit/audit_ut.cpp |58.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/ut/pg/pg_catalog_ut.cpp |58.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/apps/etcd_proxy/service/libapps-etcd_proxy-service.global.a |58.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/apps/etcd_proxy/proto/libetcd-grpc.a |58.6%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/fq/libs/test_connection/ut/ydb-core-fq-libs-test_connection-ut |58.6%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/blobstorage/ut_blobstorage/ut_read_only_pdisk/ut_blobstorage-ut_read_only_pdisk |58.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/cms/cms_ut_common.cpp |58.6%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/core/fq/libs/metrics/ut/unittest >> SanitizeLable::Truncate200 [GOOD] |58.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tablet_flat/benchmark/b_part.cpp |58.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/ut/pg/kqp_pg_ut.cpp |58.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/udfs/common/datetime/libdatetime_udf.global.a |58.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/udfs/common/roaring/libroaring.global.a |58.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/ut/perf/kqp_query_perf_ut.cpp |58.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/udfs/common/knn/libknn_udf.global.a |58.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/mind/bscontroller/ut_selfheal/main.cpp |58.6%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/tests/fq/yt/kqp_yt_file/part1/ydb-tests-fq-yt-kqp_yt_file-part1 |58.6%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/coordinator/ut/ydb-core-tx-coordinator-ut |58.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/ut_blobstorage/cluster_balancing.cpp |58.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/ut_index_build/ut_schemeshard_build_index_helpers.cpp |58.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/fq/libs/compute/common/ut/utils_ut.cpp |58.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/viewer/ut/ut_utils.cpp |58.6%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/cms/ut_sentinel/ydb-core-cms-ut_sentinel |58.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/ut/scan/kqp_point_consolidation_ut.cpp |58.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/ut_export/ut_export.cpp |58.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/ut_schema/ut_columnshard_schema.cpp |58.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/testlib/s3_recipe_helper/liblibrary-testlib-s3_recipe_helper.a |58.6%| [PB] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/viewer/protos/viewer.pb.{h, cc} |58.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/ut/scan/kqp_flowcontrol_ut.cpp |58.6%| [PB] {BAZEL_DOWNLOAD} $(B)/yql/essentials/public/types/yql_types.pb.{h, cc} |58.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/ut_schema/ut_columnshard_move_table.cpp |58.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/control/immediate_control_board_actor_ut.cpp |58.6%| [PB] {BAZEL_DOWNLOAD} $(B)/yql/essentials/core/file_storage/proto/file_storage.pb.{h, cc} |58.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/dsproxy/ut_ftol/dsproxy_fault_tolerance_ut.cpp |58.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/ut/scan/kqp_split_ut.cpp |58.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/tx_proxy/proxy_ext_tenant_ut.cpp |58.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/scheme_board/ut_helpers.cpp |58.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/cms/ut_helpers.cpp |58.6%| [PR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/base/generated/runtime_feature_flags.cpp |58.6%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/tablet_tx.{pb.h ... grpc.pb.h} |58.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/dsproxy/ut_fat/dsproxy_ut.cpp |58.6%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/grpc/draft/ydb_object_storage_v1.{pb.h ... grpc.pb.h} |58.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/tx_proxy/proxy_ut_helpers.cpp |58.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/tests/tools/kqprun/src/proto/libkqprun-src-proto.a |58.5%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/tests/stress/show_create/view/show_create_view |58.5%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/protos/draft/ydb_view.pb.{h, cc} |58.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/cms/cms_tenants_ut.cpp |58.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/replication/controller/dst_creator_ut.cpp |58.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/tests/olap/high_load/read_update_write.cpp |58.5%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/counters_kesus.{pb.h ... grpc.pb.h} |58.5%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/grpc/draft/ydb_tablet_v1.{pb.h ... grpc.pb.h} |58.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/ut_serverless_reboots/ut_serverless_reboots.cpp |58.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/client/locks_ut.cpp |58.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/ut/scan/kqp_scan_ut.cpp |58.5%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/fq/libs/row_dispatcher/format_handler/ut/ydb-core-fq-libs-row_dispatcher-format_handler-ut |58.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/client/cancel_tx_ut.cpp |58.5%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yql/providers/yt/fmr/coordinator/client/libfmr-coordinator-client.a |58.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/datashard/ut_sequence/datashard_ut_sequence.cpp |58.6%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yql/providers/yt/fmr/coordinator/impl/libfmr-coordinator-impl.global.a |58.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/services/config/bsconfig_ut.cpp |58.6%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yql/providers/yt/fmr/coordinator/yt_coordinator_service/impl/libcoordinator-yt_coordinator_service-impl.a |58.6%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yql/providers/yt/fmr/job_factory/interface/libfmr-job_factory-interface.a |58.6%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yql/providers/yt/fmr/yt_job_service/file/libfmr-yt_job_service-file.a |58.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/mind/bscontroller/ut_selfheal/self_heal_actor_ut.cpp |58.6%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/tests/fq/yt/kqp_yt_file/part13/ydb-tests-fq-yt-kqp_yt_file-part13 |58.6%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yql/providers/yt/fmr/gc_service/interface/libfmr-gc_service-interface.a |58.6%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yql/providers/yt/fmr/gc_service/impl/libfmr-gc_service-impl.a |58.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/scheme_board/cache_ut.cpp |58.6%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yql/providers/yt/fmr/fmr_tool_lib/libyt-fmr-fmr_tool_lib.a |58.6%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yql/providers/yt/fmr/coordinator/yt_coordinator_service/file/libcoordinator-yt_coordinator_service-file.a |58.6%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yql/providers/yt/fmr/coordinator/interface/libfmr-coordinator-interface.a |58.6%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yql/providers/yt/lib/secret_masker/dummy/liblib-secret_masker-dummy.a |58.6%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yql/providers/yt/fmr/coordinator/yt_coordinator_service/interface/libcoordinator-yt_coordinator_service-interface.a |58.6%| [PB] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/viewer/protos/viewer.pb.{h, cc} |58.6%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yql/providers/yt/fmr/worker/impl/libfmr-worker-impl.a |58.6%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yql/providers/yt/fmr/coordinator/interface/proto_helpers/libcoordinator-interface-proto_helpers.a |58.6%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yql/providers/yt/fmr/table_data_service/client/impl/libtable_data_service-client-impl.a |58.6%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yql/providers/yt/fmr/job/interface/libfmr-job-interface.a |58.6%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yql/providers/yt/fmr/job_launcher/libyt-fmr-job_launcher.a |58.6%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yql/providers/yt/fmr/process/libyt-fmr-process.a |58.6%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yql/providers/yt/fmr/coordinator/impl/libfmr-coordinator-impl.a |58.6%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yql/providers/yt/fmr/job_factory/impl/libfmr-job_factory-impl.a |58.6%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yql/providers/yt/fmr/job/impl/libfmr-job-impl.a |58.6%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yql/providers/yt/fmr/table_data_service/interface/libfmr-table_data_service-interface.a |58.6%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yql/providers/yt/fmr/request_options/libyt-fmr-request_options.a |58.6%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yql/providers/yt/fmr/table_data_service/discovery/file/libtable_data_service-discovery-file.a |58.6%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yql/providers/yt/fmr/utils/libyt-fmr-utils.a |58.6%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yql/providers/yt/fmr/request_options/proto_helpers/libfmr-request_options-proto_helpers.a |58.6%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yql/providers/yt/fmr/table_data_service/client/proto_helpers/libtable_data_service-client-proto_helpers.a |58.6%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yql/providers/yt/fmr/table_data_service/discovery/interface/libtable_data_service-discovery-interface.a |58.6%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yql/providers/yt/fmr/table_data_service/local/impl/libtable_data_service-local-impl.a |58.6%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yql/providers/yt/fmr/table_data_service/local/interface/libtable_data_service-local-interface.a |58.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/client/object_storage_listing_ut.cpp |58.6%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yql/providers/yt/fmr/yt_job_service/interface/libfmr-yt_job_service-interface.a |58.6%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yql/providers/yt/fmr/worker/interface/libfmr-worker-interface.a |58.6%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yql/providers/yt/fmr/proto/libyt-fmr-proto.a |58.6%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yql/providers/yt/fmr/yt_job_service/impl/libfmr-yt_job_service-impl.a |58.6%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yql/providers/yt/lib/yt_url_lister/libyt-lib-yt_url_lister.a |58.6%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/grpc_services/tablet/ut/ydb-core-grpc_services-tablet-ut |58.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/client/flat_ut.cpp |58.6%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yql/providers/yt/gateway/fmr/libyt-gateway-fmr.a |58.6%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/tenant_slot_broker.{pb.h ... grpc.pb.h} |58.6%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/tablet.{pb.h ... grpc.pb.h} |58.6%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/counters_node_broker.{pb.h ... grpc.pb.h} |58.6%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/counters_coordinator.{pb.h ... grpc.pb.h} |58.6%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/shared_cache.{pb.h ... grpc.pb.h} |58.6%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/config/protos/db_pool.pb.{h, cc} |58.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/cms/cluster_info_ut.cpp |58.6%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/tracing.{pb.h ... grpc.pb.h} |58.6%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yql/tools/ytrun/lib/libtools-ytrun-lib.a |58.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/ut_pdiskfit/pdiskfit/pdiskfit.cpp |58.6%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/dq/actors/protos/dq_events.pb.{h, cc} |58.5%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/netclassifier.{pb.h ... grpc.pb.h} |58.6%| [PR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/protos/037dadb082c3788ff2d8ca830f_raw.auxcpp |58.6%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/dq/api/protos/service.pb.{h, cc} |58.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/arrow/libsrc-client-arrow.a |58.6%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/config/protos/rate_limiter.pb.{h, cc} |58.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/scheme_board/ut_helpers.cpp |58.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/ut_backup_collection_reboots/ut_backup_collection_reboots.cpp |58.6%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/ymq/actor/yc_search_ut/ydb-core-ymq-actor-yc_search_ut |58.6%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/library/yql/tools/dqrun/dqrun |58.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/sys_view/query_stats/query_stats_ut.cpp |58.6%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/services/config/ut/ydb-services-config-ut |58.6%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/node_broker.{pb.h ... grpc.pb.h} |58.6%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/msgbus.{pb.h ... grpc.pb.h} |58.6%| [PY] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/protos/tx_datashard__intpy3___pb2.py.p5ju.yapyc3 |58.6%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/whiteboard_disk_states.{pb.h ... grpc.pb.h} |58.6%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/bg_tasks/protos/data.pb.{h, cc} |58.6%| [PB] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/protos/tx_datashard__intpy3___pb2.py{ ... i} |58.6%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/tx_columnshard__intpy3___pb2.py.p5ju.yapyc3 |58.6%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/tx_mediator_timecast__intpy3___pb2.py.p5ju.yapyc3 |58.6%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/protos/draft/fq.pb.{h, cc} |58.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/ut_auditsettings/ut_auditsettings.cpp |58.6%| [PY] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/protos/tx_datashard__intpy3___pb2_grpc.py.p5ju.yapyc3 |58.6%| [PB] {BAZEL_DOWNLOAD} $(B)/yql/essentials/providers/common/proto/gateways_config.pb.{h, cc} |58.6%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/tx_mediator_timecast__intpy3___pb2_grpc.py.p5ju.yapyc3 |58.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/ut_login_large/ut_login_large.cpp |58.6%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/counters_replication.{pb.h ... grpc.pb.h} |58.6%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/tx_columnshard__intpy3___pb2_grpc.py.p5ju.yapyc3 |58.6%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/tx_mediator_timecast__intpy3___pb2.py{ ... i} |58.6%| [PB] {BAZEL_DOWNLOAD} $(B)/yql/essentials/core/file_storage/proto/file_storage.pb.{h, cc} |58.6%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/tx_columnshard__intpy3___pb2.py{ ... i} |58.6%| [PR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/protos/383ce71fd9fa04eb3230fc8f2e_raw.auxcpp |58.6%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/graph/shard/protos/counters_shard.pb.{h, cc} |58.6%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/dq/actors/protos/dq_status_codes.pb.{h, cc} |58.6%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/client/yc_private/ydb/v1/backup_service.{pb.h ... grpc.pb.h} |58.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/ut_ttl/ut_ttl_utility.cpp |58.6%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/datashard/ut_rs/ydb-core-tx-datashard-ut_rs |58.6%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/protos/draft/ydb_dynamic_config.pb.{h, cc} |58.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/replication/controller/stream_creator_ut.cpp |58.7%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/client/yc_private/access/access.{pb.h ... grpc.pb.h} |58.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/apps/etcd_proxy/service/ut/etcd_service_ut.cpp |58.6%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/client/nc_private/iam/v1/token_exchange_service.{pb.h ... grpc.pb.h} |58.6%| [BN] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/stability/tool/statistics_workload |58.6%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/kqp_physical.{pb.h ... grpc.pb.h} |58.6%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/client/nc_private/iam/v1/token_service.{pb.h ... grpc.pb.h} |58.6%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/scheme_type_operation__intpy3___pb2_grpc.py.p5ju.yapyc3 |58.6%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/scheme_log__intpy3___pb2.py.p5ju.yapyc3 |58.6%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/scheme_type_metadata__intpy3___pb2.py{ ... i} |58.6%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/client/yc_private/ydb/v1/database.{pb.h ... grpc.pb.h} |58.6%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/scheme_type_metadata__intpy3___pb2.py.p5ju.yapyc3 |58.6%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/ymq/actor/cloud_events/cloud_events_ut/ydb-core-ymq-actor-cloud_events-cloud_events_ut |58.6%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/scheme_type_operation__intpy3___pb2.py.p5ju.yapyc3 |58.7%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/tx_proxy.{pb.h ... grpc.pb.h} |58.7%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/scheme_log__intpy3___pb2_grpc.py.p5ju.yapyc3 |58.7%| [PB] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/protos/serverless_proxy_config__intpy3___pb2.py{ ... i} |58.6%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/scheme_type_operation__intpy3___pb2.py{ ... i} |58.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/ut_index_build/ut_vector_index_build.cpp |58.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/ut_index_build/ut_index_build.cpp |58.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/lib/idx_test/libpublic-lib-idx_test.a |58.6%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/scheme_log__intpy3___pb2.py{ ... i} |58.6%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/library/yql/tools/dq/worker_node/worker_node |58.6%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/datashard/ut_export/ydb-core-tx-datashard-ut_export |58.6%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/ymq/actor/cloud_events/proto/ymq.{pb.h ... grpc.pb.h} |58.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/ut_index_build/ut_fulltext_build.cpp |58.6%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/config_units.{pb.h ... grpc.pb.h} |58.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/scheme_board/populator_ut.cpp |58.7%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/control/ut/ydb-core-control-ut |58.7%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/library/mkql_proto/protos/minikql.{pb.h ... grpc.pb.h} |58.7%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/generic/connector/api/service/protos/connector.pb.{h, cc} |58.7%| [PR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/s3/expr_nodes/yql_s3_expr_nodes.{gen.h ... defs.inl.h} |58.6%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/dq/proto/dq_tasks.pb.{h, cc} |58.6%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/grpc/draft/ydb_persqueue_v1.{pb.h ... grpc.pb.h} |58.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/ut/batch_operations/kqp_batch_delete_ut.cpp |58.6%| [PB] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/protos/tx_datashard__intpy3___pb2.py{ ... i} |58.7%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/tablet_pipe.{pb.h ... grpc.pb.h} |58.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/ut_external_data_source/ut_external_data_source.cpp |58.7%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/persqueue/public/list_topics/ut/ydb-core-persqueue-public-list_topics-ut |58.7%| [EN] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/columnshard/transactions/tx_controller.h_serialized.cpp |58.7%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/common/compress_base/lib/libcommon-compress_base-lib.a |58.7%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/common/compress_base/libcompress_udf.global.a |58.7%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/common/yson2/libyson2_udf.global.a |58.6%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/common/digest/libdigest_udf.global.a |58.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/viewer/viewer_ut.cpp |58.7%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/common/json2/libjson2_udf.global.a |58.7%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/common/datetime2/libdatetime2_udf.global.a |58.7%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/schemeshard/operations.{pb.h ... grpc.pb.h} |58.7%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/protos/portion_info.pb.{h, cc} |58.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/rm_service/kqp_rm_ut.cpp |58.6%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/fq/libs/control_plane_proxy/ut/ydb-core-fq-libs-control_plane_proxy-ut |58.6%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/config/protos/private_proxy.pb.{h, cc} |58.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/testlib/service_mocks/ldap_mock/libtestlib-service_mocks-ldap_mock.a |58.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/ut_ttl/ut_ttl.cpp |58.6%| [BN] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/stability/tool/s3_backups_workload |58.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/tests/tools/kqprun/kqprun.cpp |58.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/ut_continuous_backup/ut_continuous_backup.cpp |58.7%| [BN] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/stability/tool/cfg |58.7%| [BN] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/stability/tool/oltp_workload |58.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/datashard/datashard_ut_ext_blobs_multiple_channels.cpp |58.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/ut/batch_operations/kqp_batch_update_ut.cpp |58.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/ut_cdc_stream_reboots/ut_cdc_stream_reboots.cpp |58.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/datashard/datashard_ut_order.cpp |58.7%| [BN] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/stability/tool/olap_workload |58.7%| [PB] {BAZEL_DOWNLOAD} $(B)/contrib/libs/opentelemetry-proto/opentelemetry/proto/common/v1/common.{pb.h ... grpc.pb.h} |58.7%| [PB] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/protos/serverless_proxy_config__intpy3___pb2.py{ ... i} |58.7%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/protos/ydb_clickhouse_internal.pb.{h, cc} |58.7%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/protos/ydb_monitoring.pb.{h, cc} |58.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/ut/data_integrity/kqp_data_integrity_trails_ut.cpp |58.7%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/tenant_slot_broker.{pb.h ... grpc.pb.h} |58.7%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/tablet_tracing_signals.{pb.h ... grpc.pb.h} |58.7%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/scheme_board_mon.{pb.h ... grpc.pb.h} |58.7%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/schemeshard/olap/bg_tasks/protos/data.pb.{h, cc} |58.7%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/protos/dq_effects.pb.{h, cc} |58.7%| [PY] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/protos/tx_datashard__intpy3___pb2.py.p5ju.yapyc3 |58.7%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/retry/py3/libpy3python-retry-py3.global.a |58.7%| [PY] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/protos/tx_datashard__intpy3___pb2_grpc.py.p5ju.yapyc3 |58.7%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/library/db_pool/protos/config.pb.{h, cc} |58.7%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/config/protos/gateways.pb.{h, cc} |58.7%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/client/nc_private/audit/v1/common/action.{pb.h ... grpc.pb.h} |58.7%| [PB] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/protos/kqp.{pb.h ... grpc.pb.h} |58.7%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/cms.{pb.h ... grpc.pb.h} |58.7%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/statestorage.{pb.h ... grpc.pb.h} |58.7%| [PB] {BAZEL_DOWNLOAD} $(B)/yql/essentials/protos/common.pb.{h, cc} |58.7%| [PR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/dq/expr_nodes/dq_expr_nodes.{gen.h ... defs.inl.h} |58.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/viewer/topic_data_ut.cpp |58.6%| [PR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm16/include/llvm/IR/Attributes.inc{, .d} |58.6%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/datashard/ut_background_compaction/ydb-core-tx-datashard-ut_background_compaction |58.6%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/memory_controller_config.{pb.h ... grpc.pb.h} |58.6%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/minikql_engine.{pb.h ... grpc.pb.h} |58.6%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/tools/kqprun/kqprun |58.6%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/client/nc_private/iam/v1/access_service.{pb.h ... grpc.pb.h} |58.7%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/quoter/ut/ydb-core-quoter-ut |58.7%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/labeled_counters.{pb.h ... grpc.pb.h} |58.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/ut_olap/ut_olap.cpp |58.7%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/sequenceshard/ut/ydb-core-tx-sequenceshard-ut |58.7%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/common/ip_base/lib/libcommon-ip_base-lib.a |58.7%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/common/stat/static/libcommon-stat-static.a |58.7%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/common/topfreq/libtopfreq_udf.global.a |58.7%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/common/stat/libstat_udf.global.a |58.7%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/common/pire/libpire_udf.global.a |58.7%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/common/hyperloglog/libhyperloglog_udf.global.a |58.7%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/common/json/libjson_udf.global.a |58.7%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/common/topfreq/static/libcommon-topfreq-static.a |58.7%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/common/histogram/libhistogram_udf.global.a |58.7%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/common/hyperscan/libhyperscan_udf.global.a |58.7%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/common/ip_base/libip_udf.global.a |58.7%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/logs/dsv/libdsv_udf.global.a |58.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/ut_transfer/ut_transfer.cpp |58.7%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/common/url_base/lib/libcommon-url_base-lib.a |58.7%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/hive/objcopy_5333c1912ecbac0f64ff97551f.o |58.7%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/hive/objcopy_48884f6b745ced4d3e78997cb1.o |58.7%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/olap/high_load/ydb-tests-olap-high_load |58.7%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/hive/objcopy_aebf7c73fcaf6a54715cc177c8.o |58.7%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/common/set/libset_udf.global.a |58.7%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/export/protos/selector.pb.{h, cc} |58.7%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/common/url_base/liburl_udf.global.a |58.7%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/blobstorage_disk_color.{pb.h ... grpc.pb.h} |58.7%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/library/operation_id/protos/operation_id.pb.{h, cc} |58.7%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/blobstorage_vdisk_config.{pb.h ... grpc.pb.h} |58.7%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/blobstorage_disk.{pb.h ... grpc.pb.h} |58.7%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/functional/hive/ydb-tests-functional-hive |58.7%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/common/top/libtop_udf.global.a |58.7%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/compaction.{pb.h ... grpc.pb.h} |58.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/ut_subdomain/ut_subdomain.cpp |58.6%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/datashard/ut_reassign/ydb-core-tx-datashard-ut_reassign |58.7%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kqp/runtime/ut/ydb-core-kqp-runtime-ut |58.7%| [EN] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/ymq/actor/events.h_serialized.cpp |58.7%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/persqueue/public/fetcher/ut/ydb-core-persqueue-public-fetcher-ut |58.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/ut_login/ut_login.cpp |58.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/library/yql/providers/solomon/actors/ut/dq_solomon_write_actor_ut.cpp |58.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/services/rate_limiter/rate_limiter_ut.cpp |58.7%| [LD] {BAZEL_DOWNLOAD} $(B)/contrib/python/moto/bin/moto_server |58.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/tests/stress/s3_backups/libpy3s3_backups.global.a |58.7%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/services/rate_limiter/ut/ydb-services-rate_limiter-ut |58.7%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/external_sources.{pb.h ... grpc.pb.h} |58.7%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/feature_flags.{pb.h ... grpc.pb.h} |58.7%| [BN] {BAZEL_DOWNLOAD} $(B)/ydb/tests/stability/tool/node_broker_workload |58.7%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/key.{pb.h ... grpc.pb.h} |58.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/tools/solomon_emulator/client/libpy3tools-solomon_emulator-client.global.a |58.6%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/flat_scheme_op.{pb.h ... grpc.pb.h} |58.6%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kqp/ut/view/ydb-core-kqp-ut-view |58.6%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/solomon/reading/objcopy_bca798fd9907b940d5669f2f7c.o |58.6%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/solomon/reading/objcopy_38c6001204b7ada03b8b3e421d.o |58.6%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/solomon/reading/objcopy_e56dca5acd30eee0c83b37e424.o |58.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/util/address_classifier_ut.cpp |58.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/scheme_board/monitoring_ut.cpp |58.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/util/intrusive_stack_ut.cpp |58.7%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/blobstorage/nodewarden/ut_sequence/ydb-core-blobstorage-nodewarden-ut_sequence |58.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/util/cache_ut.cpp |58.6%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/core/transfer/ut/column_table/ydb-core-transfer-ut-column_table |58.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/util/bits_ut.cpp |58.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/util/hyperlog_counter_ut.cpp |58.6%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/columnshard/ut_schema/ydb-core-tx-columnshard-ut_schema |58.6%| [EN] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/columnshard/engines/reader/common_reader/iterator/source.h_serialized.cpp |58.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/util/interval_set_ut.cpp |58.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/util/fragmented_buffer_ut.cpp |58.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/util/btree_ut.cpp |58.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/util/fast_tls_ut.cpp |58.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/util/event_priority_queue_ut.cpp |58.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/util/circular_queue_ut.cpp |58.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/ut_cdc_stream/ut_cdc_stream.cpp |58.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/scheme_board/ut_helpers.cpp |58.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/util/hazard_ut.cpp |58.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/util/lf_stack_ut.cpp |58.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/util/lz4_data_generator_ut.cpp |58.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/util/queue_inplace_ut.cpp |58.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/util/log_priority_mute_checker_ut.cpp |58.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/util/intrusive_fixed_hash_set_ut.cpp |58.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/util/intrusive_heap_ut.cpp |58.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/util/page_map_ut.cpp |58.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/util/btree_cow_ut.cpp |58.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/util/wildcard_ut.cpp |58.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/util/token_bucket_ut.cpp |58.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/util/simple_cache_ut.cpp |58.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/util/operation_queue_priority_ut.cpp |58.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/util/queue_oneone_inplace_ut.cpp |58.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/util/ulid_ut.cpp |58.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/util/stlog_ut.cpp |58.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/util/ui64id_ut.cpp |58.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/util/concurrent_rw_hash_ut.cpp |58.7%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/security/ut/ydb-core-security-ut |58.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/util/operation_queue_ut.cpp |58.7%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/ymq/proto/records.pb.{h, cc} |58.7%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/grpc/ydb_topic_v1.{pb.h ... grpc.pb.h} |58.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/ut_streaming_query_reboots/ut_streaming_query_reboots.cpp |58.7%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/solomon/reading/ydb-tests-solomon-reading |58.7%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/change_exchange.{pb.h ... grpc.pb.h} |58.7%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/protos/annotations/validation.pb.{h, cc} |58.7%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/counters_blob_depot.{pb.h ... grpc.pb.h} |58.7%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/base/ut_board_subscriber/ydb-core-base-ut_board_subscriber |58.7%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/library/ydb_issue/proto/issue_id.{pb.h ... grpc.pb.h} |58.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/library/yql/providers/solomon/actors/ut/ut_helpers.cpp |58.6%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/apps/ydbd/ydbd |58.6%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/stream.{pb.h ... grpc.pb.h} |58.6%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/tests/fq/yt/kqp_yt_file/part12/ydb-tests-fq-yt-kqp_yt_file-part12 |58.6%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/core/viewer/tests/objcopy_87b299e07b15c86f4f50f458ef.o |58.6%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/core/viewer/tests/objcopy_f3c323ef80ada193284f036d44.o |58.6%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/core/viewer/tests/objcopy_af18efc2f04dd1af5ca802c329.o |58.7%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/blobstorage/ut_blobstorage/ut_cluster_balancing/ut_blobstorage-ut_cluster_balancing |58.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/persqueue/common/microseconds_sliding_window_ut.cpp |58.7%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/console.{pb.h ... grpc.pb.h} |58.7%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_export/ydb-core-tx-schemeshard-ut_export |58.7%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/dq/proto/dq_state_load_plan.pb.{h, cc} |58.7%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/library/compatibility/configs/dump/dumper/ydb-config-meta-dumper |58.6%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_backup_collection_reboots/tx-schemeshard-ut_backup_collection_reboots |58.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/lib/ut_helpers/libpublic-lib-ut_helpers.a |58.6%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kesus/tablet/quoter_performance_test/quoter_performance_test |58.6%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/client/yc_public/events/options.{pb.h ... grpc.pb.h} |58.6%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/blobstorage/dsproxy/ut_ftol/ydb-core-blobstorage-dsproxy-ut_ftol |58.6%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kafka_proxy/ut/ydb-core-kafka_proxy-ut |58.6%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/client/ut/ydb-core-client-ut |58.7%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/library/yaml_config/tools/dump/yaml-to-proto-dump |58.7%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/tx_proxy/ut_ext_tenant/ydb-core-tx-tx_proxy-ut_ext_tenant |58.7%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_serverless_reboots/ydb-core-tx-schemeshard-ut_serverless_reboots |58.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/scheme_board/double_indexed_ut.cpp |58.6%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/viewer/tests/ydb-core-viewer-tests |58.6%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/fq/libs/row_dispatcher/ut/ydb-core-fq-libs-row_dispatcher-ut |58.6%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/library/mkql_proto/protos/minikql.{pb.h ... grpc.pb.h} |58.6%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kqp/ut/effects/ydb-core-kqp-ut-effects |58.6%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/client/yc_private/accessservice/sensitive.{pb.h ... grpc.pb.h} |58.6%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/protobuf_udf/libessentials-minikql-protobuf_udf.a |58.6%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/library/services/services.{pb.h ... grpc.pb.h} |58.7%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/protos/ydb_cms.pb.{h, cc} |58.7%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/counters_tx_proxy.{pb.h ... grpc.pb.h} |58.7%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/dq/proto/dq_transport.pb.{h, cc} |58.7%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/util/ut/ydb-core-util-ut |58.7%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/grpc/ydb_debug_v1.{pb.h ... grpc.pb.h} |58.7%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/grpc/draft/ydb_maintenance_v1.{pb.h ... grpc.pb.h} |58.5%| [PB] {BAZEL_DOWNLOAD} $(B)/library/cpp/retry/protos/retry_options.pb.{h, cc} |58.5%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kqp/ut/federated_query/generic_ut/ydb-core-kqp-ut-federated_query-generic_ut |58.5%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/node_limits.{pb.h ... grpc.pb.h} |58.5%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/blobstorage/vdisk/hulldb/test/libvdisk-hulldb-test.a |58.5%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kqp/ut/arrow/ydb-core-kqp-ut-arrow |58.5%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/sys_view_types.{pb.h ... grpc.pb.h} |58.5%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/protos/ydb_operation.pb.{h, cc} |58.5%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/protos/ydb_table.pb.{h, cc} |58.5%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/replication/controller/ut_dst_creator/ydb-core-tx-replication-controller-ut_dst_creator |58.5%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/transactions/protos/tx_event.pb.{h, cc} |58.5%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/persqueue/common/ut/ydb-core-persqueue-common-ut |58.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/library/ycloud/impl/access_service_ut.cpp |58.6%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_ttl/ydb-core-tx-schemeshard-ut_ttl |58.6%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kqp/ut/federated_query/s3/ydb-core-kqp-ut-federated_query-s3 |58.6%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kqp/federated_query/ut_service/ydb-core-kqp-federated_query-ut_service |58.6%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_login_large/ydb-core-tx-schemeshard-ut_login_large |58.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/datashard/datashard_ut_followers.cpp |58.6%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kqp/ut/join/ydb-core-kqp-ut-join |58.6%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/blobstorage/dsproxy/ut_fat/ydb-core-blobstorage-dsproxy-ut_fat |58.6%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kqp/ut/idx_test/ydb-core-kqp-ut-idx_test |58.6%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/datashard/ut_sequence/ydb-core-tx-datashard-ut_sequence |58.6%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/cms/ut/ydb-core-cms-ut |58.6%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/apps/etcd_proxy/service/ut/ydb-apps-etcd_proxy-service-ut |58.6%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/scheme_board/ut_cache/ydb-core-tx-scheme_board-ut_cache |58.6%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/scheme_board/ut_populator/ydb-core-tx-scheme_board-ut_populator |58.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/library/ycloud/impl/folder_service_ut.cpp |58.6%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_auditsettings/ydb-core-tx-schemeshard-ut_auditsettings |58.6%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/common/file/libfile_udf.global.a |58.6%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/http_proxy/ut/ydb-core-http_proxy-ut |58.6%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/library/yql/providers/solomon/actors/ut/ydb-library-yql-providers-solomon-actors-ut |58.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/library/ycloud/impl/service_account_service_ut.cpp |58.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/library/ycloud/impl/user_account_service_ut.cpp |58.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/incrhuge/ut/incrhuge_id_dict_ut.cpp |58.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/base/generated/runtime_feature_flags_ut.cpp |58.6%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/services/persqueue_v1/ut/describes_ut/ydb-services-persqueue_v1-ut-describes_ut |58.6%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/replication/controller/ut_stream_creator/tx-replication-controller-ut_stream_creator |58.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/services/persqueue_v1/ut/describes_ut/ic_cache_ut.cpp |58.6%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/base/generated/ut/ydb-core-base-generated-ut |58.6%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/common/protobuf/libprotobuf_udf.global.a |58.6%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kqp/ut/perf/ydb-core-kqp-ut-perf |58.6%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/common/streaming/libstreaming_udf.global.a |58.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/public/sdk/cpp/src/client/federated_topic/ut/basic_usage_ut.cpp |58.6%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/viewer/ut/ydb-core-viewer-ut |58.6%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/memory_controller/ut/ydb-core-memory_controller-ut |58.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/library/yaml_config/tools/dump/main.cpp |58.6%| [EN] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/datashard/datashard.h_serialized.cpp |58.6%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kqp/ut/indexes/ydb-core-kqp-ut-indexes |58.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/services/persqueue_v1/ut/describes_ut/describe_topic_ut.cpp |58.6%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/keyvalue/ut_trace/ydb-core-keyvalue-ut_trace |58.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/tools/query_replay_yt/main.cpp |58.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/tests/library/compatibility/configs/dump/dumper/main.cpp |58.6%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/datashard/ut_order/ydb-core-tx-datashard-ut_order |58.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/sequenceproxy/sequenceproxy_ut.cpp |58.6%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_cdc_stream_reboots/ydb-core-tx-schemeshard-ut_cdc_stream_reboots |58.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/vdisk/hulldb/test/testhull_index.cpp |58.5%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_index_build/ydb-core-tx-schemeshard-ut_index_build |58.5%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/core/debug_tools/ut/ydb-core-debug_tools-ut |58.5%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tools/query_replay_yt/query_replay_yt |58.5%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/yql/essentials/tools/sql2yql/sql2yql |58.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/tools/query_replay_yt/query_compiler.cpp |58.6%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/public/sdk/cpp/src/client/federated_topic/ut/ydb-public-sdk-cpp-src-client-federated_topic-ut |58.6%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_external_data_source/ydb-core-tx-schemeshard-ut_external_data_source |58.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/groupinfo/blobstorage_groupinfo_iter_ut.cpp |58.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/groupinfo/blobstorage_groupinfo_ut.cpp |58.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/tools/query_replay_yt/query_replay.cpp |58.5%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_login/ydb-core-tx-schemeshard-ut_login |58.5%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/backup/impl/ut_table_writer/ydb-core-backup-impl-ut_table_writer |58.5%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_continuous_backup/ydb-core-tx-schemeshard-ut_continuous_backup |58.5%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/tests/fq/common/aba998449c2518e3272d8e87fb_raw.auxcpp |58.5%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/fq/common/ydb-tests-fq-common |58.5%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/fq/common/objcopy_cca8dcd66462c9ca3c57fcb78e.o |58.6%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/fq/common/objcopy_e32003454342267c2263935765.o |58.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/groupinfo/blobstorage_groupinfo_blobmap_ut.cpp |58.6%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/fq/common/objcopy_9a3dabea847c21e0b4fa4cda26.o |58.6%| [CC] {BAZEL_DOWNLOAD} $(S)/yql/essentials/tools/sql2yql/sql2yql.cpp |58.6%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/fq/common/objcopy_b34c6a8a5501db208eebc5d8e4.o |58.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/groupinfo/blobstorage_groupinfo_partlayout_ut.cpp |58.6%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_subdomain/ydb-core-tx-schemeshard-ut_subdomain |58.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/tools/dq/service_node/main.cpp |58.5%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/protos/ydb_export.pb.{h, cc} |58.6%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/protos/ydb_coordination.pb.{h, cc} |58.6%| [EN] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kqp/executer_actor/kqp_executer.h_serialized.cpp |58.6%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/protos/ydb_query.pb.{h, cc} |58.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/mvp/oidc_proxy/bin/main.cpp |58.6%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/client/yc_private/ydb/v1/storage_type_service.{pb.h ... grpc.pb.h} |58.6%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/config/protos/storage.pb.{h, cc} |58.6%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/grpc/ydb_scripting_v1.{pb.h ... grpc.pb.h} |58.6%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/client/nc_private/iam/v1/token.{pb.h ... grpc.pb.h} |58.6%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/tx.{pb.h ... grpc.pb.h} |58.6%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/config/protos/activation.pb.{h, cc} |58.6%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/protos/ydb_scheme.pb.{h, cc} |58.6%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/config/protos/common.pb.{h, cc} |58.6%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/config/protos/checkpoint_coordinator.pb.{h, cc} |58.6%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/protos/ydb_table.pb.{h, cc} |58.6%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_transfer/ydb-core-tx-schemeshard-ut_transfer |58.6%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/config/protos/private_api.pb.{h, cc} |58.6%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/blobstorage_base.{pb.h ... grpc.pb.h} |58.6%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/client/yc_private/servicecontrol/resource.{pb.h ... grpc.pb.h} |58.6%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tablet_flat/benchmark/core_tablet_flat_benchmark |58.6%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/console_config.{pb.h ... grpc.pb.h} |58.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/ut_blobstorage/acceleration.cpp |58.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/incrhuge/ut/incrhuge_log_merger_ut.cpp |58.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/ut_blobstorage/defrag.cpp |58.6%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/config/protos/resource_manager.pb.{h, cc} |58.6%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_olap/ydb-core-tx-schemeshard-ut_olap |58.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/ut_blobstorage/decommit_3dc.cpp |58.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/ut_blobstorage/bsc_cache.cpp |58.6%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/grpc/ydb_monitoring_v1.{pb.h ... grpc.pb.h} |58.6%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/blobstorage_disk_color.{pb.h ... grpc.pb.h} |58.6%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/cms.{pb.h ... grpc.pb.h} |58.6%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/bootstrap.{pb.h ... grpc.pb.h} |58.6%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/scheme/protos/key_range.{pb.h ... grpc.pb.h} |58.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/incrhuge/ut/incrhuge_basic_ut.cpp |58.6%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/quota_manager/proto/quota_internal.pb.{h, cc} |58.6%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/grpc/ydb_operation_v1.{pb.h ... grpc.pb.h} |58.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/ut_blobstorage/counting_events.cpp |58.6%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/library/login/protos/login.pb.{h, cc} |58.6%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/data_sharing/protos/initiator.pb.{h, cc} |58.6%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/dq/actors/protos/dq_status_codes.pb.{h, cc} |58.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/ut_blobstorage/backpressure.cpp |58.6%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/dq/proto/dq_state_load_plan.pb.{h, cc} |58.6%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/protos/annotations/sensitive.pb.{h, cc} |58.6%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/scheme_board/ut_double_indexed/ydb-core-tx-scheme_board-ut_double_indexed >> OperationLog::Size29 [GOOD] >> OperationLog::Size1 [GOOD] >> OperationLog::Size8 [GOOD] >> OperationLog::Size1000 |58.5%| [PR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/expr_nodes/kqp_expr_nodes.{gen.h ... defs.inl.h} |58.6%| [PR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/protos/c90d9fb739ea008c06169ff153_raw.auxcpp |58.6%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/protos/ydb_rate_limiter.pb.{h, cc} |58.6%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/protos/draft/ydb_object_storage.pb.{h, cc} |58.6%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/library/formats/arrow/protos/accessor.pb.{h, cc} |58.6%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/protos/ydb_common.pb.{h, cc} |58.6%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/grpc/fq_private_v1.{pb.h ... grpc.pb.h} |58.6%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/keyvalue/protos/events.pb.{h, cc} |58.6%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/channel_purpose.{pb.h ... grpc.pb.h} |58.6%| [LD] {BAZEL_DOWNLOAD} $(B)/tools/rescompressor/rescompressor |58.6%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/node_broker.{pb.h ... grpc.pb.h} |58.6%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/feature_flags.{pb.h ... grpc.pb.h} |58.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/ut_blobstorage/gc_quorum_3dc.cpp |58.6%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/channel_purpose.{pb.h ... grpc.pb.h} |58.6%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/sqs__intpy3___pb2.py.p5ju.yapyc3 |58.6%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/statistics/c4711c742b4f72331dccea9c2a_raw.auxcpp |58.6%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/statistics/objcopy_3382de65b417782bf648c475b1.o |58.6%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/statistics/objcopy_94f66830f5c535f3f015b42e43.o |58.6%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/blobstorage/groupinfo/ut/ydb-core-blobstorage-groupinfo-ut |58.6%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/statistics/objcopy_2a98e2f0e66f286cb125620511.o |58.6%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/node_whiteboard.{pb.h ... grpc.pb.h} |58.6%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/statistics/objcopy_8cba80b2275265b72407436cdf.o |58.6%| [PY] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/protos/serverless_proxy_config__intpy3___pb2_grpc.py.p5ju.yapyc3 |58.6%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/sqs__intpy3___pb2_grpc.py.p5ju.yapyc3 |58.6%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/shared_cache__intpy3___pb2_grpc.py.p5ju.yapyc3 |58.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/ut_blobstorage/incorrect_queries.cpp |58.6%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/sqs__intpy3___pb2.py{ ... i} |58.6%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/shared_cache__intpy3___pb2.py{ ... i} |58.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/ut_blobstorage/ds_proxy_lwtrace.cpp |58.6%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/compaction.{pb.h ... grpc.pb.h} |58.6%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/memory_controller_config.{pb.h ... grpc.pb.h} |58.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/ut_blobstorage/block_race.cpp |58.6%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/datashard/ut_external_blobs/ydb-core-tx-datashard-ut_external_blobs |58.6%| [PY] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/protos/serverless_proxy_config__intpy3___pb2.py.p5ju.yapyc3 |58.6%| [PB] {BAZEL_DOWNLOAD} $(B)/library/cpp/monlib/encode/legacy_protobuf/protos/metric_meta.pb.{h, cc} |58.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/ut_blobstorage/assimilation.cpp >> OperationLog::Size1000 [GOOD] >> OperationLog::ConcurrentWrites >> OperationLog::ConcurrentWrites [GOOD] |58.6%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/hive.{pb.h ... grpc.pb.h} |58.6%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_cdc_stream/ydb-core-tx-schemeshard-ut_cdc_stream |58.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/ut_blobstorage/deadlines.cpp |58.6%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/flat_tx_scheme.{pb.h ... grpc.pb.h} |58.6%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/tx_columnshard.{pb.h ... grpc.pb.h} |58.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/ut_blobstorage/gc.cpp |58.6%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/grpc/ydb_keyvalue_v1.{pb.h ... grpc.pb.h} |58.7%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/protos/ydb_value.pb.{h, cc} |58.7%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/datashard_config.{pb.h ... grpc.pb.h} |58.7%| [PB] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/protos/config.{pb.h ... grpc.pb.h} |58.6%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/grpc/draft/fq_v1.{pb.h ... grpc.pb.h} |58.6%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/whiteboard_disk_states.{pb.h ... grpc.pb.h} |58.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/ut_blobstorage/get_block.cpp |58.6%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/config.{pb.h ... grpc.pb.h} |58.6%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/tenant_pool.{pb.h ... grpc.pb.h} |58.7%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/tx.{pb.h ... grpc.pb.h} |58.7%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/table_service_config.{pb.h ... grpc.pb.h} |58.7%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/protos/ydb_bridge_common.pb.{h, cc} |58.7%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/config/protos/row_dispatcher.pb.{h, cc} |58.7%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/config/protos/pending_fetcher.pb.{h, cc} |58.6%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/scheme_log.{pb.h ... grpc.pb.h} |58.6%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/functional/statistics/ydb-tests-functional-statistics |58.6%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/library/folder_service/proto/config.pb.{h, cc} |58.6%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/kqp_physical.{pb.h ... grpc.pb.h} |58.6%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/config/protos/test_connection.pb.{h, cc} |58.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/ut_blobstorage/phantom_blobs.cpp |58.6%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/client/yc_private/ydb/v1/resource_preset_service.{pb.h ... grpc.pb.h} |58.6%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/library/yql/tools/dqrun/lib/libtools-dqrun-lib.a |58.6%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/core/debug_tools/ut/unittest >> OperationLog::ConcurrentWrites [GOOD] |58.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/storagepoolmon/ut/storagepoolmon_ut.cpp |58.6%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/scheme/defaults/protos/data.pb.{h, cc} |58.6%| [PR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/protos/aa848536d47c49c5b2820aeee4_raw.auxcpp |58.7%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/proxy_service/proto/result_set_meta.pb.{h, cc} |58.5%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/protos/ydb_formats.pb.{h, cc} |58.6%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/apps/etcd_proxy/proto/rpc.{pb.h ... grpc.pb.h} |58.6%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/blobstorage/ut_vdisk/lib/libblobstorage-ut_vdisk-lib.a |58.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/ut_blobstorage/discover.cpp |58.6%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/client/yc_private/ydb/v1/resource_preset.{pb.h ... grpc.pb.h} |58.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/ut_vdisk/lib/http_client.cpp |58.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/ut_blobstorage/recovery.cpp |58.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/ut_blobstorage/encryption.cpp |58.6%| [PY] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/protos/serverless_proxy_config__intpy3___pb2_grpc.py.p5ju.yapyc3 |58.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/ut_blobstorage/multiget.cpp |58.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/ut_blobstorage/extra_block_checks.cpp |58.7%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/datashard_config.{pb.h ... grpc.pb.h} |58.7%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/mvp/oidc_proxy/bin/mvp_oidc_proxy |58.7%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/library/ycloud/impl/ut/ydb-library-ycloud-impl-ut |58.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/ut_blobstorage/group_size_in_units.cpp |58.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/ut_blobstorage/group_reconfiguration.cpp |58.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/ut_blobstorage/patch.cpp |58.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/ut_blobstorage/index_restore_get.cpp |58.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/ut_blobstorage/get.cpp |58.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/ut_blobstorage/mirror3of4.cpp |58.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/ut_blobstorage/vdisk_malfunction.cpp |58.6%| [PY] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/protos/serverless_proxy_config__intpy3___pb2.py.p5ju.yapyc3 |58.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/ut_vdisk/huge_migration_ut.cpp |58.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/ut_blobstorage/monitoring.cpp |58.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/ut_vdisk/gen_restarts.cpp |58.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/ut_blobstorage/sanitize_groups.cpp |58.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/ut_blobstorage/main.cpp |58.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/ut_blobstorage/scrub_fast.cpp |58.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/vdisk/huge/top_ut.cpp |58.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/ut_blobstorage/mirror3dc.cpp |58.5%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/solomon/proto/dq_solomon_shard.pb.{h, cc} |58.5%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/config/protos/private_api.pb.{h, cc} |58.6%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/s3/proto/retry_config.pb.{h, cc} |58.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/ut_blobstorage/self_heal.cpp |58.6%| [PB] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/protos/msgbus__intpy3___pb2.py{ ... i} |58.6%| [PY] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/protos/msgbus__intpy3___pb2_grpc.py.p5ju.yapyc3 |58.6%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/msgbus_kv__intpy3___pb2.py.p5ju.yapyc3 |58.6%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kqp/ut/scan/ydb-core-kqp-ut-scan |58.6%| [PB] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/protos/blobstorage_vdisk_internal.{pb.h ... grpc.pb.h} |58.6%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/replication/ydb-tests-functional-replication |58.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/ut_blobstorage/validation.cpp |58.6%| [PR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/protos/e6ce42a762195cf7e946ca411e_raw.auxcpp |58.6%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_streaming_query_reboots/core-tx-schemeshard-ut_streaming_query_reboots |58.6%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/protos/ydb_query_stats.pb.{h, cc} |58.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/ut_blobstorage/shred.cpp |58.6%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/msgbus_health__intpy3___pb2_grpc.py.p5ju.yapyc3 |58.6%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/mon/audit/ut/ydb-core-mon-audit-ut |58.6%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kqp/ut/pg/ydb-core-kqp-ut-pg |58.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/ut_blobstorage/snapshots.cpp |58.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/ut_vdisk/mon_reregister_ut.cpp |58.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/ut_blobstorage/sync.cpp |58.6%| [BN] {BAZEL_DOWNLOAD} $(B)/ydb/tests/stability/tool/ctas_workload |58.6%| [EN] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/cms/node_checkers.h_serialized.cpp |58.6%| [EN] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/blob_depot/schema.h_serialized.cpp |58.6%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/scheme_board/ut_monitoring/ydb-core-tx-scheme_board-ut_monitoring |58.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/ut_blobstorage/space_check.cpp |58.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/ut_vdisk/vdisk_test.cpp |58.6%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/library/yql/tools/dq/service_node/service_node |58.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/ut_vdisk/lib/dataset.cpp |58.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/ut_vdisk/lib/helpers.cpp |58.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/driver_lib/run/auto_config_initializer_ut.cpp |58.6%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/serverless/objcopy_7c81cbfa6b5ce112674cb0a849.o |58.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/ut_vdisk/lib/test_bad_blobid.cpp |58.6%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/serverless/objcopy_e2acb41e7099c0db4fe54a1587.o |58.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/ut_vdisk/lib/test_dbstat.cpp |58.6%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/functional/serverless/ydb-tests-functional-serverless |58.6%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/blobstorage/incrhuge/ut/ydb-core-blobstorage-incrhuge-ut |58.6%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/serverless/objcopy_cf3971576aced18377e99f5367.o |58.6%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/serverless/13360e4ecdf34efe6c3a817a44_raw.auxcpp |58.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/ut_vdisk/lib/vdisk_mock.cpp |58.4%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/client/yc_private/accessservice/resource.{pb.h ... grpc.pb.h} |58.4%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/tools/ydb_serializable/objcopy_3fdb568d483b57acc8e627f8c2.o |58.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/tests/tools/ydb_serializable/libpy3tests-tools-ydb_serializable.global.a |58.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/fq/libs/result_formatter/result_formatter_ut.cpp |58.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/ut_vdisk/lib/test_synclog.cpp |58.4%| [PR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/protos/0d9faae2dd392530096b141b6c_raw.auxcpp |58.4%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kqp/ut/data_integrity/ydb-core-kqp-ut-data_integrity |58.5%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/tools/ydb_serializable/ydb_serializable |58.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/ut_vdisk/lib/test_simplebs.cpp |58.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/ut_vdisk/lib/prepare.cpp |58.6%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/library/actors/protos/interconnect.pb.{h, cc} |58.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet_flat/flat_executor_gclogic_ut.cpp |58.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet_flat/flat_row_versions_ut.cpp |58.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet_flat/flat_cxx_database_ut.cpp |58.6%| [EN] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/persqueue/pqrb/read_balancer__balancing.h_serialized.cpp |58.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet_flat/ut/ut_slice_loader.cpp |58.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/ut_vdisk/lib/test_gc.cpp |58.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet_flat/flat_table_part_ut.cpp |58.6%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kqp/ut/batch_operations/ydb-core-kqp-ut-batch_operations |58.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet_flat/ut/ut_bloom.cpp |58.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/ut_vdisk/lib/test_load.cpp |58.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet_flat/ut/ut_db_scheme.cpp |58.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet_flat/ut/ut_slice.cpp |58.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet_flat/shared_handle_ut.cpp |58.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet_flat/shared_cache_s3fifo_ut.cpp |58.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/ut_vdisk/lib/test_huge.cpp |58.6%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/core/tablet_flat/ut/objcopy_9f29b589555ed64086e5eadccf.o |58.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet_flat/shared_cache_tiered_ut.cpp |58.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet_flat/flat_range_cache_ut.cpp |58.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/ut_vdisk/lib/test_defrag.cpp |58.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/library/yql/tools/dqrun/lib/dqrun_lib.cpp |58.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet_flat/ut/ut_btree_index_iter_charge.cpp |58.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet_flat/ut/ut_charge.cpp |58.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet_flat/ut/ut_compaction.cpp |58.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet_flat/ut/ut_btree_index_nodes.cpp |58.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet_flat/ut/ut_db_iface.cpp |58.6%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/blobstorage/ut_vdisk/ydb-core-blobstorage-ut_vdisk |58.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet_flat/ut/ut_redo.cpp |58.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/ut_vdisk/lib/test_faketablet.cpp |58.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet_flat/ut/ut_comp_gen.cpp |58.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet_flat/ut/ut_compaction_multi.cpp |58.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/vdisk/huge/blobstorage_hullhugeheap_ctx_ut.cpp |58.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet_flat/ut/ut_memtable.cpp |58.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet_flat/ut/ut_proto.cpp |58.6%| [PB] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/protos/msgbus__intpy3___pb2.py{ ... i} |58.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet_flat/ut/ut_forward.cpp |58.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/vdisk/huge/blobstorage_hullhugeheap_ut.cpp |58.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet_flat/ut/ut_self.cpp |58.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet_flat/ut/ut_part_multi.cpp |58.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet_flat/ut/ut_decimal.cpp |58.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet_flat/ut/ut_pages.cpp |58.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/ut_vdisk/lib/test_brokendevice.cpp |58.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet_flat/ut/ut_sausage.cpp |58.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/ut_vdisk/lib/test_repl.cpp |58.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet_flat/ut/ut_iterator.cpp |58.6%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/bootstrap__intpy3___pb2_grpc.py.p5ju.yapyc3 |58.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/ut_vdisk/lib/test_outofspace.cpp |58.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/vdisk/huge/blobstorage_hullhuge_ut.cpp |58.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet_flat/ut/ut_screen.cpp |58.5%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/data_integrity_trails.{pb.h ... grpc.pb.h} |58.6%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/bootstrapper__intpy3___pb2.py.p5ju.yapyc3 |58.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet_flat/ut/ut_versions.cpp |58.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/ut_vdisk/lib/test_localrecovery.cpp |58.6%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/bootstrapper__intpy3___pb2.py{ ... i} |58.6%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/blockstore_config__intpy3___pb2.py.p5ju.yapyc3 |58.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet_flat/ut/ut_stat.cpp |58.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet_flat/ut/ut_part.cpp |58.6%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/counters__intpy3___pb2_grpc.py.p5ju.yapyc3 |58.6%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/config/protos/issue_id.pb.{h, cc} |58.6%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/counters_backup__intpy3___pb2.py{ ... i} |58.6%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/mon__intpy3___pb2_grpc.py.p5ju.yapyc3 |58.6%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/export.{pb.h ... grpc.pb.h} |58.6%| [PR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/protos/e4a588c704e4418873ed6891de_raw.auxcpp |58.6%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/apps/etcd_proxy/proto/auth.{pb.h ... grpc.pb.h} |58.6%| [EN] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/ymq/actor/fifo_cleanup.h_serialized.cpp |58.6%| [PY] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/protos/console_config__intpy3___pb2.py.p5ju.yapyc3 |58.6%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/console_base.{pb.h ... grpc.pb.h} |58.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/ut_vdisk/lib/test_many.cpp |58.6%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/sequenceproxy/ut/ydb-core-tx-sequenceproxy-ut |58.6%| [PR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/control/lib/generated/control_board_proto.h |58.6%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/grpc.{pb.h ... grpc.pb.h} |58.6%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/kqp.{pb.h ... grpc.pb.h} |58.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/ut_blobstorage/blob_depot_fat.cpp |58.6%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/graph/protos/graph.pb.{h, cc} |58.6%| [PR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/protos/e9ba3ee2f0ee1966e63998b143_raw.auxcpp |58.6%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/solomon/proto/metrics_queue.pb.{h, cc} |58.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/ut_blobstorage/blob_depot_event_managers.cpp |58.6%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/key__intpy3___pb2.py.p5ju.yapyc3 |58.6%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/kesus__intpy3___pb2_grpc.py.p5ju.yapyc3 |58.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/ut_blobstorage/blob_depot_test_functions.cpp |58.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/mind/bscontroller/group_mapper_ut.cpp |58.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/mind/bscontroller/mv_object_map_ut.cpp |58.6%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/datashard/ut_followers/ydb-core-tx-datashard-ut_followers |58.6%| [PB] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/protos/kqp__intpy3___pb2.py{ ... i} |58.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/ut/tx/kqp_sink_locks_ut.cpp |58.6%| [PY] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/protos/kqp__intpy3___pb2_grpc.py.p5ju.yapyc3 |58.6%| [PY] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/protos/kqp__intpy3___pb2.py.p5ju.yapyc3 |58.6%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/kqp_physical__intpy3___pb2.py.p5ju.yapyc3 |58.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/fq/libs/actors/ut/database_resolver_ut.cpp |58.5%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/kqp_physical__intpy3___pb2.py{ ... i} |58.5%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/key__intpy3___pb2.py{ ... i} |58.5%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/library/folder_service/proto/config.pb.{h, cc} |58.5%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/config/protos/pending_fetcher.pb.{h, cc} |58.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp |58.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/ut/tx/kqp_sink_mvcc_ut.cpp |58.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/external_sources/object_storage/inference/ut/arrow_inference_ut.cpp |58.6%| [PY] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/protos/msgbus__intpy3___pb2_grpc.py.p5ju.yapyc3 |58.6%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/blobstorage_vdisk_config__intpy3___pb2.py{ ... i} |58.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/ut/tx/kqp_mvcc_ut.cpp |58.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tablet_flat/flat_executor_database_ut.cpp |58.6%| [PY] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/protos/blobstorage_vdisk_internal__intpy3___pb2.py.p5ju.yapyc3 |58.6%| [PB] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/protos/blobstorage_vdisk_internal__intpy3___pb2.py{ ... i} |58.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tablet_flat/flat_executor_leases_ut.cpp |58.6%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/config/ut/ydb-core-config-ut |58.6%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/limits/objcopy_d52256d4fa9895f38df6030445.o |58.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/ut/tx/kqp_tx_ut.cpp |58.6%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/limits/objcopy_40779f0570229cef213050a4fa.o |58.6%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/limits/objcopy_14c03c6aecffbe39cb01ddf2ed.o |58.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/common/http_gateway/mock/libcommon-http_gateway-mock.a |58.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/balance_coverage/balance_coverage_builder_ut.cpp |58.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kesus/proxy/proxy_actor_ut.cpp |58.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tablet_flat/flat_executor_ut.cpp |58.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/config/utils/libcore-config-utils.a |58.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/mind/bscontroller/grouper_ut.cpp |58.6%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/config/protos/fq_config.pb.{h, cc} |58.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/ut/tx/kqp_sink_tx_ut.cpp |58.6%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/resource_broker.{pb.h ... grpc.pb.h} |58.6%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/config/protos/resource_manager.pb.{h, cc} |58.6%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/index_builder.{pb.h ... grpc.pb.h} |58.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kesus/proxy/ut_helpers.cpp |58.6%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/config/protos/db_pool.pb.{h, cc} |58.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/ut/tx/kqp_locks_ut.cpp |58.6%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/nbs/blockstore.{pb.h ... grpc.pb.h} |58.6%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/functional/limits/ydb-tests-functional-limits |58.6%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/client/yc_private/resourcemanager/cloud_service.{pb.h ... grpc.pb.h} |58.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tablet_flat/ut/flat_test_db.cpp |58.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tablet_flat/ut/ut_datetime.cpp |58.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tablet_flat/ut/ut_shared_sausagecache_actor.cpp |58.5%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/follower_group.{pb.h ... grpc.pb.h} |58.5%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/kqp_stats.{pb.h ... grpc.pb.h} |58.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/ut/tx/kqp_rollback.cpp |58.5%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/datashard_backup.{pb.h ... grpc.pb.h} |58.5%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tools/blobsan/blobsan |58.5%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/base.{pb.h ... grpc.pb.h} |58.5%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/replication.{pb.h ... grpc.pb.h} |58.6%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/library/actors/protos/actors.pb.{h, cc} |58.6%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/library/yaml_config/protos/blobstorage_config__intpy3___pb2.py.siec.yapyc3 |58.6%| [PY] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/protos/console_config__intpy3___pb2.py.p5ju.yapyc3 |58.6%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/hmac/ut/ydb-core-fq-libs-hmac-ut |58.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tablet_flat/ut/ut_backup.cpp |58.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/vdisk/synclog/blobstorage_synclogmsgwriter_ut.cpp |58.5%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/blobstorage/vdisk/huge/ut/ydb-core-blobstorage-vdisk-huge-ut |58.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/vdisk/synclog/blobstorage_synclogmsgimpl_ut.cpp |58.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/vdisk/synclog/codecs_ut.cpp |58.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/graph/shard/ut/shard_ut.cpp |58.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/vdisk/synclog/blobstorage_synclogmem_ut.cpp |58.5%| [PB] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/protos/kqp__intpy3___pb2.py{ ... i} |58.5%| [PB] {BAZEL_DOWNLOAD} $(B)/yql/essentials/core/issue/protos/issue_id.pb.{h, cc} |58.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/mind/ut_fat/blobstorage_node_warden_ut_fat.cpp |58.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tablet_flat/ut/ut_rename_table_column.cpp |58.6%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/data_integrity_trails.{pb.h ... grpc.pb.h} |58.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/ut_blobstorage/race.cpp |58.5%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kqp/rm_service/ut/ydb-core-kqp-rm_service-ut |58.5%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/sys_view/query_stats/ut/ydb-core-sys_view-query_stats-ut |58.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tablet_flat/ut/ut_shared_sausagecache.cpp |58.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/ut/tx/kqp_locks_tricky_ut.cpp |58.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/ut/runtime/kqp_re2_ut.cpp |58.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tablet_flat/ut/ut_other.cpp |58.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/sys_view/ut_registry.cpp |58.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/ut/runtime/kqp_scan_logging_ut.cpp |58.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/ymq/ut/params_ut.cpp |58.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tablet_flat/ut/ut_vacuum.cpp |58.5%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/generated/codegen/codegen |58.5%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/resource_broker.{pb.h ... grpc.pb.h} |58.5%| [PB] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/protos/blobstorage_vdisk_internal__intpy3___pb2.py{ ... i} |58.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/generated/codegen/main.cpp |58.5%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/simplejson/py3/libpy3python-simplejson-py3.a |58.5%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/simplejson/py3/libpy3python-simplejson-py3.global.a |58.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/ymq/ut/queue_id_ut.cpp |58.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/config/ut/main.cpp |58.5%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/external_sources/object_storage/inference/ut/external_sources-object_storage-inference-ut |58.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/mind/bscontroller/ut_bscontroller/main.cpp |58.5%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/blobs_action/protos/blobs.pb.{h, cc} |58.5%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/sys_view_types.{pb.h ... grpc.pb.h} |58.5%| [PB] {BAZEL_DOWNLOAD} $(B)/yql/essentials/public/issue/protos/issue_message.pb.{h, cc} |58.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/ut/runtime/kqp_scan_spilling_ut.cpp |58.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/vdisk/defrag/defrag_actor_ut.cpp |58.5%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/subdomains.{pb.h ... grpc.pb.h} |58.5%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/counters_statistics_aggregator.{pb.h ... grpc.pb.h} |58.5%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/scheme/defaults/protos/data.pb.{h, cc} |58.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/proxy_service/kqp_script_executions_ut.cpp |58.4%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/protos/ydb_coordination.pb.{h, cc} |58.4%| [AR] {BAZEL_DOWNLOAD} $(B)/library/python/prctl/libpy3library-python-prctl.a |58.4%| [AR] {BAZEL_DOWNLOAD} $(B)/library/python/prctl/libpy3library-python-prctl.global.a |58.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/tools/blobsan/main.cpp |58.5%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/blobstorage/vdisk/defrag/ut/ydb-core-blobstorage-vdisk-defrag-ut |58.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/proxy_service/kqp_proxy_ut.cpp |58.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/health_check/health_check_ut.cpp |58.5%| [PB] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/library/yaml_config/protos/config__intpy3___pb2.py{, i} |58.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/vdisk/synclog/blobstorage_synclogdsk_ut.cpp |58.5%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/node_whiteboard.{pb.h ... grpc.pb.h} |58.5%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/blobstorage/ut_blobstorage/ydb-core-blobstorage-ut_blobstorage |58.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/sys_view/service/query_history_ut.cpp |58.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/ut/rbo/kqp_rbo_ut.cpp |58.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/vdisk/synclog/blobstorage_synclogdata_ut.cpp |58.4%| [PY] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/protos/kqp__intpy3___pb2_grpc.py.p5ju.yapyc3 |58.4%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/data_sharing/protos/links.pb.{h, cc} |58.5%| [PY] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/protos/kqp__intpy3___pb2.py.p5ju.yapyc3 |58.5%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/config/protos/rate_limiter.pb.{h, cc} |58.5%| [PR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/core/expr_nodes/yql_expr_nodes.{gen.h ... defs.inl.h} |58.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/vdisk/synclog/blobstorage_synclogkeeper_ut.cpp |58.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/ut/runtime/kqp_hash_shuffle_ut.cpp |58.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/sys_view/ut_large.cpp |58.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/sys_view/ut_common.cpp |58.5%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/protos/ydb_persqueue_v1.pb.{h, cc} >> HmacSha::HmacSha1 [GOOD] |58.4%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/coordinator/protos/events.pb.{h, cc} |58.4%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/library/yaml_config/protos/blobstorage_config__intpy3___pb2.py{, i} |58.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/generic/connector/tests/utils/libpy3connector-tests-utils.global.a |58.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/generic/connector/tests/utils/types/libpy3tests-utils-types.global.a |58.4%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/tests/fq/generic/streaming/4399546af28cb40e5d74ea4a4b_raw.auxcpp |58.5%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/fq/generic/streaming/objcopy_49bad8251d240ad7c49d384b91.o |58.5%| [PY] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/protos/blobstorage_vdisk_internal__intpy3___pb2.py.p5ju.yapyc3 |58.5%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/fq/generic/streaming/objcopy_181bdcd1743e9a1a78fafe4b60.o |58.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/tests/fq/generic/utils/libpy3fq-generic-utils.global.a |58.4%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/fq/generic/streaming/objcopy_49e9948af399bc60603a7d2db5.o |58.5%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/stability/tool/objcopy_7406de026bf25e30e96a88517d.o |58.5%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/core/fq/libs/hmac/ut/unittest >> HmacSha::HmacSha1 [GOOD] |58.5%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/fq/generic/streaming/ydb-tests-fq-generic-streaming |58.5%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/stability/tool/tool |58.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/splitter/ut/batch_slice.cpp |58.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/ut_secret_reboots/ut_secret_reboots.cpp |58.5%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/python/yt/libpy3yt-python-yt.global.a |58.5%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/python/yt/type_info/libpy3python-yt-type_info.global.a |58.5%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/python/yt/yson/libpy3python-yt-yson.global.a |58.5%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/client/yc_private/iam/oauth_request.{pb.h ... grpc.pb.h} |58.5%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/graph_params/proto/graph_params.pb.{h, cc} |58.5%| [PB] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/library/yaml_config/protos/config__intpy3___pb2.py{, i} |58.4%| [PB] {BAZEL_DOWNLOAD} $(B)/contrib/libs/googleapis-common-protos/google/api/http.{pb.h ... grpc.pb.h} |58.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/datashard/datashard_ut_read_iterator_ext_blobs.cpp |58.5%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/sys_view/service/ut/ydb-core-sys_view-service-ut |58.5%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/import.{pb.h ... grpc.pb.h} |58.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/ut/yql/kqp_yql_ut.cpp |58.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/splitter/ut/ut_splitter.cpp |58.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/sys_view/ut_kqp.cpp |58.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/sys_view/ut_common.cpp |58.5%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/testlib/actors/ut/ydb-core-testlib-actors-ut |58.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/ut/yql/kqp_pragma_ut.cpp |58.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/sys_view/ut_counters.cpp |58.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/ut/yql/kqp_scripting_ut.cpp |58.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/testlib/actors/test_runtime_ut.cpp |58.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/datashard/datashard_ut_locks.cpp |58.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/sys_view/ut_labeled.cpp |58.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/datashard/datashard_ut_read_iterator.cpp |58.4%| [EN] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/datashard/datashard_active_transaction.h_serialized.cpp |58.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/ut_filestore_reboots/ut_filestore_reboots.cpp |58.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/io_formats/arrow/scheme/csv_arrow_ut.cpp |58.4%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/client/yc_private/operation/operation.{pb.h ... grpc.pb.h} |58.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/ut_pq_reboots/ut_pq_reboots.cpp |58.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/ut_resource_pool_reboots/ut_resource_pool_reboots.cpp |58.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/fq/libs/checkpoint_storage/ut/ydb_state_storage_ut.cpp |58.4%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/protos/ydb_common.pb.{h, cc} |58.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/cms/sentinel_ut_unstable.cpp |58.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/cms/cms_ut_common.cpp |58.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/fq/libs/checkpoint_storage/ut/gc_ut.cpp |58.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/ut_rw/ut_normalizer.cpp |58.4%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/blobstorage/vdisk/synclog/ut/ydb-core-blobstorage-vdisk-synclog-ut |58.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/persqueue/pqtablet/blob/blob_ut.cpp |58.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/ut_rw/ut_backup.cpp |58.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/persqueue/pqtablet/blob/type_codecs_ut.cpp |58.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/datashard/datashard_ut_init.cpp |58.4%| [PR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/base/generated/runtime_feature_flags.h |58.4%| [PB] {BAZEL_DOWNLOAD} $(B)/library/cpp/lwtrace/protos/lwtrace.pb.{h, cc} |58.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/ut/data/kqp_read_null_ut.cpp |58.4%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/config/protos/row_dispatcher.pb.{h, cc} |58.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/fq/libs/checkpoint_storage/ut/storage_service_ydb_ut.cpp |58.4%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/library/formats/arrow/protos/fields.pb.{h, cc} |58.3%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/io_formats/arrow/scheme/ut/ydb-core-io_formats-arrow-scheme-ut |58.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/fq/libs/checkpoint_storage/ut/ydb_checkpoint_storage_ut.cpp |58.3%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/blockstore_config.{pb.h ... grpc.pb.h} |58.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/load_test/ut_ycsb.cpp |58.4%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/persqueue/pqtablet/blob/ut/ydb-core-persqueue-pqtablet-blob-ut |58.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/ut_rw/ut_columnshard_read_write.cpp |58.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/tx_proxy/proxy_ut_helpers.cpp |58.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/ut_resource_pool/ut_resource_pool.cpp |58.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/tiering/ut/ut_object.cpp |58.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/statistics/aggregator/ut/ut_traverse_columnshard.cpp |58.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/tx_proxy/schemereq_ut.cpp |58.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/replication/service/worker_ut.cpp |58.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/tiering/ut/ut_tiers.cpp |58.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/apps/pgwire/pgwire.cpp |58.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/statistics/aggregator/ut/ut_analyze_datashard.cpp |58.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/apps/pgwire/main.cpp |58.4%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/functional/suite_tests/ydb-tests-functional-suite_tests |58.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/apps/pgwire/pg_ydb_proxy.cpp |58.3%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/suite_tests/objcopy_b701dac104d6ebd83e6489821f.o |58.3%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/suite_tests/objcopy_9be2dadc45d1a9fdc157172661.o |58.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/tx_proxy/storage_tenant_ut.cpp |58.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/statistics/aggregator/ut/ut_analyze_columnshard.cpp |58.3%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/suite_tests/objcopy_73ddf87b96fcbfc4f715436dc4.o |58.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet_flat/util_string_ut.cpp |58.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/statistics/aggregator/ut/ut_traverse_datashard.cpp |58.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/ut_blobstorage/bridge_get.cpp |58.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/apps/pgwire/pg_ydb_connection.cpp |58.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet_flat/util_pool_ut.cpp |58.3%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/stress/oltp_workload/oltp_workload |58.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/tests/stress/oltp_workload/libpy3oltp_workload.global.a |58.3%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/stress/oltp_workload/objcopy_bcf2142e31bf537964dc063d11.o |58.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/ut_export_reboots_s3/ut_export_reboots_s3.cpp |58.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/security/certificate_check/cert_check_ut.cpp |58.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/grpc_streaming/ut/grpc/libgrpc_streaming-ut-grpc.a |58.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/ut_stats/ut_stats.cpp |58.2%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/client/yc_private/accessservice/access_service.{pb.h ... grpc.pb.h} |58.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/security/certificate_check/cert_utils_ut.cpp |58.2%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/protos/draft/ydb_tablet.pb.{h, cc} |58.2%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/ymq/ut/ydb-core-ymq-ut |58.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/ut_external_table_reboots/ut_external_table_reboots.cpp |58.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/ydb_convert/compression_ut.cpp |58.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/ydb_convert/ydb_convert_ut.cpp |58.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/datashard/datashard_ut_change_exchange.cpp |58.3%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tablet_flat/ut_util/ydb-core-tablet_flat-ut_util |58.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/ut_blobstorage/restart_pdisk.cpp |58.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/dsproxy/ut/dsproxy_get_ut.cpp |58.2%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/blobstorage/ut_blobstorage/ut_blob_depot_fat/blobstorage-ut_blobstorage-ut_blob_depot_fat |58.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/datashard/datashard_ut_incremental_backup.cpp |58.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/dsproxy/ut/dsproxy_counters_ut.cpp |58.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/dsproxy/ut/dsproxy_request_reporting_ut.cpp |58.3%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/blobstorage/ut_blobstorage/ut_group_reconfiguration/ut_group_reconfiguration |58.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/ut_external_data_source_reboots/ut_external_data_source_reboots.cpp |58.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/datashard/datashard_ut_common_pq.cpp |58.2%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_secret_reboots/ydb-core-tx-schemeshard-ut_secret_reboots |58.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/dsproxy/ut/dsproxy_discover_ut.cpp |58.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/dsproxy/ut/dsproxy_put_ut.cpp |58.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/dsproxy/ut/dsproxy_quorum_tracker_ut.cpp |58.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/dsproxy/ut/dsproxy_sequence_ut.cpp |58.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/ut_secret/ut_secret.cpp |58.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/ut_replication/ut_replication.cpp |58.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/dsproxy/ut/dsproxy_patch_ut.cpp |58.2%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/columnshard/splitter/ut/ydb-core-tx-columnshard-splitter-ut |58.2%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_filestore_reboots/ydb-core-tx-schemeshard-ut_filestore_reboots |58.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/tx_proxy/proxy_ut_helpers.cpp |58.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/datashard/datashard_ut_keys.cpp |58.2%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tablet_flat/ut/ydb-core-tablet_flat-ut |58.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/datashard/datashard_ut_volatile.cpp |58.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/ut_blobstorage/replication.cpp |58.2%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/mind/bscontroller/ut/ydb-core-mind-bscontroller-ut |58.2%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_pq_reboots/ydb-core-tx-schemeshard-ut_pq_reboots |58.1%| [BN] {BAZEL_DOWNLOAD} $(B)/ydb/tests/stability/tool/topic_workload |58.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/ut_blobstorage/replication_huge.cpp |58.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/tx_proxy/proxy_ut.cpp |58.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/ut_vdisk2/huge.cpp |58.2%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/client/nc_private/annotations.{pb.h ... grpc.pb.h} |58.2%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/export/protos/cursor.pb.{h, cc} |58.2%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/console_tenant.{pb.h ... grpc.pb.h} |58.2%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/fq/libs/result_formatter/ut/ydb-core-fq-libs-result_formatter-ut |58.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/ydb_convert/table_description_ut.cpp |58.1%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/blob_depot_config.{pb.h ... grpc.pb.h} |58.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/external_sources/hive_metastore/ut/common.cpp |58.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/tests/fq/control_plane_storage/ydb_control_plane_storage_queries_permissions_ut.cpp |58.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/grpc_streaming/grpc_streaming_ut.cpp |58.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/backup/common/encryption_ut.cpp |58.2%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/sqs.{pb.h ... grpc.pb.h} |58.2%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/client/nc_private/common/v1/metadata.{pb.h ... grpc.pb.h} |58.2%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/fq/control_plane_storage/ydb-tests-fq-control_plane_storage |58.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/ut_vector_index_build_reboots/ut_vector_index_build_reboots.cpp |58.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/backup/common/metadata_ut.cpp |58.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/tests/fq/control_plane_storage/ydb_control_plane_storage_connections_ut.cpp |58.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/ymq/http/ut/xml_builder_ut.cpp |58.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/external_sources/hive_metastore/libcore-external_sources-hive_metastore.a |58.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/tests/fq/control_plane_storage/in_memory_control_plane_storage_ut.cpp |58.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/external_sources/hive_metastore/ut/hive_metastore_client_ut.cpp |58.2%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/datashard/ut_locks/ydb-core-tx-datashard-ut_locks |58.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/tests/fq/control_plane_storage/ydb_control_plane_storage_ut.cpp |58.2%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/common/protos/blob_range.pb.{h, cc} |58.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/persqueue/topic_parser/ut/topic_names_converter_ut.cpp |58.2%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/fq/libs/row_dispatcher/format_handler/ut/common/libformat_handler-ut-common.a |58.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/tests/fq/control_plane_storage/ydb_control_plane_storage_bindings_permissions_ut.cpp |58.2%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/tests/fq/yt/kqp_yt_file/part0/ydb-tests-fq-yt-kqp_yt_file-part0 |58.2%| [PB] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/protos/msgbus.{pb.h ... grpc.pb.h} |58.1%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/long_tx_service.{pb.h ... grpc.pb.h} |58.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/tests/fq/control_plane_storage/ydb_control_plane_storage_bindings_ut.cpp |58.1%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_resource_pool_reboots/ydb-core-tx-schemeshard-ut_resource_pool_reboots |58.2%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/grpc/draft/ydb_logstore_v1.{pb.h ... grpc.pb.h} |58.2%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/tests/integration/basic_example/public-sdk-cpp-tests-integration-basic_example |58.2%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/cms/objcopy_7f9e816a97aaeee837ac316091.o |58.2%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/library/persqueue/topic_parser/ut/ydb-library-persqueue-topic_parser-ut |58.2%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/tablet_counters.{pb.h ... grpc.pb.h} |58.2%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/blobstorage/backpressure/ut/ydb-core-blobstorage-backpressure-ut |58.2%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/cms/objcopy_a38b1580810a6e4b419da99dcf.o |58.2%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/cms/objcopy_b9fd5c62781ec3b78d111a0ba7.o |58.2%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/cms/objcopy_9ea5b1fb7a4f8e1b0b8d7cf345.o |58.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/statistics/service/ut/ut_aggregation/ut_aggregate_statistics.cpp |58.2%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/cms/objcopy_a5874452d3dbd6f6e49cd08be6.o |58.2%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/datashard/ut_read_iterator/ydb-core-tx-datashard-ut_read_iterator |58.2%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/scheme_type_metadata__intpy3___pb2_grpc.py.p5ju.yapyc3 |58.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/tests/fq/control_plane_storage/ydb_control_plane_storage_quotas_ut.cpp |58.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/ut_extsubdomain_reboots/ut_extsubdomain_reboots.cpp |58.2%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kesus/proxy/ut/ydb-core-kesus-proxy-ut |58.2%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/table_stats.{pb.h ... grpc.pb.h} |58.2%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/local.{pb.h ... grpc.pb.h} |58.2%| [PB] {BAZEL_DOWNLOAD} $(B)/yql/essentials/utils/log/proto/logger_config.pb.{h, cc} |58.2%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/blobstorage/ut_vdisk2/ydb-core-blobstorage-ut_vdisk2 |58.2%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/functional/cms/ydb-tests-functional-cms |58.2%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/library/actors/protos/actors.pb.{h, cc} |58.2%| [EN] {BAZEL_DOWNLOAD} $(B)/ydb/library/workload/kv/kv.h_serialized.{cpp, h} |58.2%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/protos/ydb_status_codes.pb.{h, cc} |58.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/ut/query/kqp_analyze_ut.cpp |58.2%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/kesus.{pb.h ... grpc.pb.h} |58.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/external_sources/hive_metastore/hive_metastore_native/libexternal_sources-hive_metastore-hive_metastore_native.a |58.2%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/grpc/ydb_query_v1.{pb.h ... grpc.pb.h} |58.2%| [PB] {BAZEL_DOWNLOAD} $(B)/yql/essentials/protos/yql_mount.pb.{h, cc} |58.2%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/protos/draft/ydb_dynamic_config.pb.{h, cc} |58.2%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/blobstorage/vdisk/protos/events.pb.{h, cc} |58.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/tests/fq/control_plane_storage/ydb_control_plane_storage_internal_ut.cpp |58.2%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/dq/proto/dq_transport.pb.{h, cc} |58.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/external_sources/s3/ut/s3_aws_credentials_ut.cpp |58.2%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/config_units.{pb.h ... grpc.pb.h} |58.2%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/mind/ut_fat/ydb-core-mind-ut_fat |58.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/ut_external_table/ut_external_table.cpp |58.2%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/grpc_pq_old__intpy3___pb2.py{ ... i} |58.2%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/grpc/draft/ydb_view_v1.{pb.h ... grpc.pb.h} |58.2%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/table_service_config.{pb.h ... grpc.pb.h} |58.2%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/protos/draft/fq.pb.{h, cc} |58.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/tests/fq/control_plane_storage/ydb_control_plane_storage_connections_permissions_ut.cpp |58.2%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/protos/ydb_keyvalue.pb.{h, cc} |58.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/ut/query/kqp_stats_ut.cpp |58.2%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/tx_datashard.{pb.h ... grpc.pb.h} |58.2%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/datashard/ut_init/ydb-core-tx-datashard-ut_init |58.2%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_resource_pool/ydb-core-tx-schemeshard-ut_resource_pool |58.2%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/tablet_counters_aggregator.{pb.h ... grpc.pb.h} |58.2%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/protos/draft/ydb_bridge.pb.{h, cc} |58.2%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/columnshard/ut_rw/ydb-core-tx-columnshard-ut_rw |58.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/ut/query/kqp_explain_ut.cpp |58.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/tests/fq/control_plane_storage/ydb_control_plane_storage_queries_ut.cpp |58.2%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/blobstorage.{pb.h ... grpc.pb.h} |58.2%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/blobstorage/storagepoolmon/ut/ydb-core-blobstorage-storagepoolmon-ut |58.2%| [PB] {BAZEL_DOWNLOAD} $(B)/yql/essentials/protos/common.pb.{h, cc} |58.2%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_export_reboots_s3/ydb-core-tx-schemeshard-ut_export_reboots_s3 |58.2%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/sqs.{pb.h ... grpc.pb.h} |58.2%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/msgbus_pq.{pb.h ... grpc.pb.h} |58.2%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/blobstorage_base3.{pb.h ... grpc.pb.h} |58.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/library/query_actor/query_actor_ut.cpp |58.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/backpressure/queue_backpressure_client_ut.cpp |58.2%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/tx_proxy/ut_schemereq/ydb-core-tx-tx_proxy-ut_schemereq |58.2%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/backup/common/ut/ydb-core-backup-common-ut |58.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/backpressure/queue_backpressure_server_ut.cpp |58.2%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/protos/ydb_issue_message.pb.{h, cc} |58.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/external_sources/hive_metastore/ut/hive_metastore_fetcher_ut.cpp |58.2%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/serverless_proxy_config.{pb.h ... grpc.pb.h} |58.2%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/grpc/ydb_export_v1.{pb.h ... grpc.pb.h} |58.2%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/query_stats.{pb.h ... grpc.pb.h} |58.2%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/mind/bscontroller/ut_bscontroller/ydb-core-mind-bscontroller-ut_bscontroller |58.2%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/replication/service/ut_worker/ydb-core-tx-replication-service-ut_worker |58.2%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/protos/draft/persqueue_common.pb.{h, cc} |58.2%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/counters_mediator.{pb.h ... grpc.pb.h} |58.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/ut/query/kqp_limits_ut.cpp |58.2%| [PB] {BAZEL_DOWNLOAD} $(B)/yql/essentials/public/issue/protos/issue_severity.pb.{h, cc} |58.2%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/client/yc_private/ydb/v1/console_service.{pb.h ... grpc.pb.h} |58.2%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/tiering/ut/ydb-core-tx-tiering-ut |58.2%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/tx_sequenceshard.{pb.h ... grpc.pb.h} |58.2%| [PB] {BAZEL_DOWNLOAD} $(B)/contrib/libs/opentelemetry-proto/opentelemetry/proto/resource/v1/resource.{pb.h ... grpc.pb.h} |58.2%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/library/yaml_config/static_validator/ut/ydb-library-yaml_config-static_validator-ut |58.2%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/services/ext_index/ut/ydb-services-ext_index-ut |58.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/ut/query/kqp_query_ut.cpp |58.2%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/dq/api/protos/task_command_executor.pb.{h, cc} |58.2%| [BN] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/stability/tool/nemesis |58.2%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kqp/ut/rbo/ydb-core-kqp-ut-rbo |58.2%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/db_metadata_cache.{pb.h ... grpc.pb.h} |58.2%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kqp/proxy_service/ut/ydb-core-kqp-proxy_service-ut |58.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/services/ext_index/ut/ut_ext_index.cpp |58.2%| [PB] {BAZEL_DOWNLOAD} $(B)/library/cpp/messagebus/monitoring/mon_proto.pb.{h, cc} |58.2%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/client/yc_private/quota/quota.{pb.h ... grpc.pb.h} |58.2%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/driver_lib/run/ut/ydb-core-driver_lib-run-ut |58.2%| [PR] {BAZEL_DOWNLOAD} $(B)/yt/yql/providers/yt/expr_nodes/yql_yt_expr_nodes.{gen.h ... defs.inl.h} |58.2%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/tx_proxy/ut_storage_tenant/ydb-core-tx-tx_proxy-ut_storage_tenant |58.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/replication/service/json_change_record_ut.cpp |58.2%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/config/protos/control_plane_storage.pb.{h, cc} |58.2%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/grpc/draft/ydb_backup_v1.{pb.h ... grpc.pb.h} |58.2%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/grpc_status_proxy.{pb.h ... grpc.pb.h} |58.2%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/grpc/ydb_coordination_v1.{pb.h ... grpc.pb.h} |58.2%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/tracing.{pb.h ... grpc.pb.h} |58.2%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/config/protos/pinger.pb.{h, cc} |58.2%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/config/protos/storage.pb.{h, cc} |58.2%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/pq/proto/dq_io.pb.{h, cc} |58.2%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kqp/ut/tx/ydb-core-kqp-ut-tx |58.2%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/bootstrap.{pb.h ... grpc.pb.h} |58.2%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/data_events.{pb.h ... grpc.pb.h} |58.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/ut/query/kqp_params_ut.cpp |58.2%| [PB] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/protos/blobstorage_distributed_config.{pb.h ... grpc.pb.h} |58.2%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/load_test.{pb.h ... grpc.pb.h} |58.2%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/bootstrapper.{pb.h ... grpc.pb.h} |58.2%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/datashard_load.{pb.h ... grpc.pb.h} |58.2%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/grpc/ydb_table_v1.{pb.h ... grpc.pb.h} |58.2%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tools/tstool/tstool |58.2%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/balance_coverage/ut/ydb-core-tx-balance_coverage-ut |58.2%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/nbs/blockstore.{pb.h ... grpc.pb.h} |58.2%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_replication/ydb-core-tx-schemeshard-ut_replication |58.2%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/blobstorage.{pb.h ... grpc.pb.h} |58.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/tools/tstool/libpy3tstool.global.a |58.2%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tools/tstool/objcopy_6077c98b9810fee0e2250a36a4.o |58.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/services/dynamic_config/dynamic_config_ut.cpp >> StaticValidator::HostConfigs [GOOD] >> StaticValidator::Hosts [GOOD] >> StaticValidator::DomainsConfig [GOOD] |58.2%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/graph/shard/ut/ydb-core-graph-shard-ut |58.2%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/data_sharing/protos/events.pb.{h, cc} |58.2%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/fq/libs/actors/ut/ydb-core-fq-libs-actors-ut |58.2%| [PB] {BAZEL_DOWNLOAD} $(B)/yql/essentials/public/issue/protos/issue_severity.pb.{h, cc} |58.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/sharding/ut/ut_sharding.cpp |58.2%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/pqconfig.{pb.h ... grpc.pb.h} |58.2%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/config/protos/marker.pb.{h, cc} |58.2%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/library/yaml_config/static_validator/ut/unittest >> StaticValidator::DomainsConfig [GOOD] |58.2%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/scheme/protos/pathid.{pb.h ... grpc.pb.h} |58.2%| [PR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/provider/yql_kikimr_expr_nodes.{gen.h ... defs.inl.h} |58.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/ut_index_build_reboots/ut_index_build_reboots.cpp |58.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/ut/query/kqp_types_ut.cpp |58.2%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/tablet_counters.{pb.h ... grpc.pb.h} |58.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/grpc_services/grpc_request_check_actor_ut/grpc_request_check_actor_ut.cpp |58.2%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/blobstorage_vdisk_config.{pb.h ... grpc.pb.h} |58.2%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/client/yc_private/resourcemanager/folder_service.{pb.h ... grpc.pb.h} |58.2%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/client/yc_private/resourcemanager/folder.{pb.h ... grpc.pb.h} |58.3%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/services/ydb/table_split_ut/ydb-services-ydb-table_split_ut |58.3%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/dq/actors/protos/dq_stats.pb.{h, cc} |58.2%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/protos/ydb_query.pb.{h, cc} |58.2%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/tablet.{pb.h ... grpc.pb.h} |58.2%| [EN] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/columnshard/data_reader/contexts.h_serialized.cpp |58.2%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/grpc/ydb_scheme_v1.{pb.h ... grpc.pb.h} |58.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/fq/libs/row_dispatcher/format_handler/ut/common/ut_common.cpp |58.2%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/blob_depot_config.{pb.h ... grpc.pb.h} |58.2%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/profiler.{pb.h ... grpc.pb.h} |58.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/services/ydb/ydb_table_split_ut.cpp |58.3%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kqp/executer_actor/ut/ydb-core-kqp-executer_actor-ut |58.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/public/sdk/cpp/src/client/persqueue_public/ut/common_ut.cpp |58.2%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tools/tsserver/tsserver |58.2%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/protos/ydb_status_codes.pb.{h, cc} |58.2%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_external_table_reboots/ydb-core-tx-schemeshard-ut_external_table_reboots |58.2%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/sys_view/ut_large/ydb-core-sys_view-ut_large |58.2%| [BN] {BAZEL_DOWNLOAD} $(B)/ydb/tests/stability/tool/simple_queue |58.2%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kqp/ut/runtime/ydb-core-kqp-ut-runtime |58.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/tools/tsserver/main.cpp |58.2%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/security/objcopy_388676493f4fc142dc0926df96.o |58.2%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/functional/security/ydb-tests-functional-security |58.2%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/security/4342cd9f302f261f8b1a8137d8_raw.auxcpp |58.2%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/security/objcopy_599af7074d669a6697054e1001.o |58.2%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_stats/ydb-core-tx-schemeshard-ut_stats |58.2%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/security/objcopy_bb95af667e01d0dbfb707dfb90.o |58.2%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/accurate_accumulate/liblibrary-cpp-accurate_accumulate.a |58.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/ut_serverless/ut_serverless.cpp |58.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/datashard/datashard_ut_vacuum.cpp |58.2%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/sys_view/ut/ydb-core-sys_view-ut |58.2%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/config/protos/quotas_manager.pb.{h, cc} |58.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/ut_rtmr/ut_rtmr.cpp |58.2%| [BN] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/stability/tool/ydb_cli |58.2%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_external_data_source_reboots/schemeshard-ut_external_data_source_reboots |58.2%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/datashard/ut_change_exchange/ydb-core-tx-datashard-ut_change_exchange |58.2%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/blobstorage_pdisk_config.{pb.h ... grpc.pb.h} |58.1%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/blobstorage_config.{pb.h ... grpc.pb.h} |58.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/persqueue/ut/slow/autopartitioning_ut.cpp |58.2%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/protos/annotations/validation.pb.{h, cc} |58.2%| [EN] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/ymq/actor/metering.h_serialized.cpp |58.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/persqueue/ut/slow/pq_ut.cpp |58.2%| [EN] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/services/metadata/manager/abstract.h_serialized.cpp |58.2%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/datashard/ut_volatile/ydb-core-tx-datashard-ut_volatile |58.2%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kqp/ut/data/ydb-core-kqp-ut-data |58.2%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/datashard/split_merge/ydb-tests-datashard-split_merge |58.2%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_vector_index_build_reboots/tx-schemeshard-ut_vector_index_build_reboots |58.2%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/tools/ydb_serializable/replay/replay |58.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/library/table_creator/table_creator_ut.cpp |58.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/ut_split_merge_reboots/ut_split_merge_reboots.cpp |58.2%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/datashard/split_merge/objcopy_5accfe00d45fb7ebcc30e116b2.o |58.2%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/datashard/split_merge/objcopy_b783a1a2aacb855daa1e55fad6.o |58.2%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/datashard/split_merge/objcopy_93665db601a12d4842de4565e2.o |58.2%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/datashard/partitioning/objcopy_40226ff8497733c6e798ee3940.o |58.2%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/datashard/partitioning/objcopy_265d7fd505d52534f38ea6fb7f.o |58.2%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/datashard/partitioning/objcopy_a52eb3c900a84eaad86a211549.o |58.2%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/services/dynamic_config/ut/ydb-services-dynamic_config-ut |58.2%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/tools/ydb_serializable/replay/objcopy_efd352795aee39d7ac6e163a2d.o |58.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/tests/tools/ydb_serializable/replay/libpy3tools-ydb_serializable-replay.global.a |58.2%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/datashard/partitioning/ydb-tests-datashard-partitioning |58.2%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/ydb_convert/ut/ydb-core-ydb_convert-ut |58.2%| [EN] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/columnshard/engines/reader/common_reader/common/columns_set.h_serialized.cpp |58.2%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/counters_hive.{pb.h ... grpc.pb.h} |58.2%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/library/query_actor/ut/ydb-library-query_actor-ut |58.2%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/config/protos/audit.pb.{h, cc} |58.2%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/library/formats/arrow/protos/ssa.pb.{h, cc} |58.2%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/blobstorage/ut_blobstorage/ut_bridge/ydb-core-blobstorage-ut_blobstorage-ut_bridge |58.2%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/datashard/ut_keys/ydb-core-tx-datashard-ut_keys |58.2%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/tx_proxy/ut_base_tenant/ydb-core-tx-tx_proxy-ut_base_tenant |58.2%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/common/protos/snapshot.pb.{h, cc} |58.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/vdisk/ingress/blobstorage_ingress_matrix_ut.cpp |58.2%| [EN] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kqp/compute_actor/kqp_compute_state.h_serialized.cpp |58.2%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/subdomains.{pb.h ... grpc.pb.h} |58.2%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/query_stats.{pb.h ... grpc.pb.h} |58.2%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/s3_settings.{pb.h ... grpc.pb.h} |58.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/vdisk/ingress/blobstorage_ingress_ut.cpp |58.2%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/kqp/kqp_indexes/ydb-tests-functional-kqp-kqp_indexes |58.2%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/shared_cache__intpy3___pb2.py.p5ju.yapyc3 |58.2%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/drivemodel.{pb.h ... grpc.pb.h} |58.2%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/memory_stats.{pb.h ... grpc.pb.h} |58.2%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/key.{pb.h ... grpc.pb.h} |58.2%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/http_config.{pb.h ... grpc.pb.h} |58.2%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/filestore_config.{pb.h ... grpc.pb.h} |58.2%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/msgbus_kv.{pb.h ... grpc.pb.h} |58.2%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/health_check/ut/ydb-core-health_check-ut |58.2%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/config/protos/control_plane_proxy.pb.{h, cc} |58.2%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/config/protos/health_config.pb.{h, cc} |58.2%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/config/protos/health_config.pb.{h, cc} |58.2%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/blobstorage_vdisk_internal.{pb.h ... grpc.pb.h} |58.2%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/client/yc_private/ydb/v1/database_service.{pb.h ... grpc.pb.h} |58.2%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/grpc/ydb_rate_limiter_v1.{pb.h ... grpc.pb.h} |58.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/datashard/datashard_ut_snapshot.cpp |58.2%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/generic/connector/api/service/protos/error.pb.{h, cc} |58.2%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/tablet_database.{pb.h ... grpc.pb.h} |58.2%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/whiteboard_flags.{pb.h ... grpc.pb.h} |58.2%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/pgproxy/protos/pgproxy.pb.{h, cc} |58.2%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/table_stats.{pb.h ... grpc.pb.h} |58.2%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/config/protos/read_actors_factory.pb.{h, cc} |58.2%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/client/yc_private/resourcemanager/cloud.{pb.h ... grpc.pb.h} |58.2%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/client/yc_private/ydb/v1/storage_type.{pb.h ... grpc.pb.h} |58.2%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/counters.{pb.h ... grpc.pb.h} |58.2%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/console_tenant__intpy3___pb2_grpc.py.p5ju.yapyc3 |58.2%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/netclassifier.{pb.h ... grpc.pb.h} |58.2%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/linear_regression/liblibrary-cpp-linear_regression.a |58.2%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/ymq/http/ut/ydb-core-ymq-http-ut |58.2%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/console_tenant__intpy3___pb2.py.p5ju.yapyc3 |58.2%| [PY] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/protos/blobstorage_vdisk_internal__intpy3___pb2_grpc.py.p5ju.yapyc3 |58.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/public/sdk/cpp/src/client/persqueue_public/ut/read_session_ut.cpp |58.2%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/scheme/protos/type_info.{pb.h ... grpc.pb.h} |58.2%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/checkpoint_storage/proto/graph_description.pb.{h, cc} |58.2%| [EN] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kqp/opt/kqp_query_plan.h_serialized.cpp |58.2%| [PY] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/protos/msgbus__intpy3___pb2.py.p5ju.yapyc3 |58.3%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/console_tenant.{pb.h ... grpc.pb.h} |58.2%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/blobstorage/ut_blobstorage/ut_restart_pdisk/blobstorage-ut_blobstorage-ut_restart_pdisk |58.2%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/blockstore_config__intpy3___pb2_grpc.py.p5ju.yapyc3 |58.2%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/datashard/ut_incremental_backup/ydb-core-tx-datashard-ut_incremental_backup |58.2%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/msgbus_health__intpy3___pb2.py.p5ju.yapyc3 |58.2%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/key__intpy3___pb2_grpc.py.p5ju.yapyc3 |58.2%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/config/protos/checkpoint_coordinator.pb.{h, cc} |58.2%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/msgbus_kv__intpy3___pb2.py{ ... i} |58.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/public/sdk/cpp/src/client/persqueue_public/ut/basic_usage_ut.cpp |58.3%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/counters_cms.{pb.h ... grpc.pb.h} |58.3%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/bridge.{pb.h ... grpc.pb.h} |58.3%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/counters_sysview_processor.{pb.h ... grpc.pb.h} |58.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/actorlib_impl/actor_bootstrapped_ut.cpp |58.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/actorlib_impl/actor_activity_ut.cpp |58.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/actorlib_impl/test_interconnect_ut.cpp |58.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/actorlib_impl/actor_tracker_ut.cpp |58.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/public/sdk/cpp/src/client/persqueue_public/ut/retry_policy_ut.cpp |58.2%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/mon.{pb.h ... grpc.pb.h} |58.2%| [BN] {BAZEL_DOWNLOAD} $(B)/ydb/tests/stability/tool/topic_kafka_workload |58.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/datashard/datashard_ut_upload_rows.cpp |58.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/apps/etcd_proxy/main.cpp |58.2%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/blobstorage/vdisk/ingress/ut/ydb-core-blobstorage-vdisk-ingress-ut |58.2%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/testing/benchmark/libcpp-testing-benchmark.a |58.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/actorlib_impl/test_protocols_ut.cpp |58.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/ut_topic_set_boundaries/ut_topic_set_boundaries.cpp |58.2%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/testing/benchmark/main/libtesting-benchmark-main.global.a |58.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/ut_blobstorage/move_pdisk.cpp |58.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/ut/service/kqp_document_api_ut.cpp |58.2%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/bootstrap__intpy3___pb2.py.p5ju.yapyc3 |58.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/ut/service/kqp_qs_queries_ut.cpp |58.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/cms/console/feature_flags_configurator_ut.cpp |58.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/cms/console/configs_cache_ut.cpp |58.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/cms/console/console_ut_tenants.cpp |58.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/tools/partcheck/main.cpp |58.1%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/msgbus_kv__intpy3___pb2_grpc.py.p5ju.yapyc3 |58.1%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/mvp/core/protos/mvp.pb.{h, cc} |58.1%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kqp/ut/yql/ydb-core-kqp-ut-yql |58.1%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/public/sdk/cpp/src/client/persqueue_public/ut/with_offset_ranges_mode_ut/with_offset_ranges_mode_ut |58.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/cms/console/console_ut_configs.cpp |58.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/cms/console/modifications_validator_ut.cpp |58.1%| [PY] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/library/yaml_config/protos/config__intpy3___pb2.py.siec.yapyc3 |58.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/public/sdk/cpp/src/client/persqueue_public/ut/compress_executor_ut.cpp |58.2%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/statistics/aggregator/ut/ydb-core-statistics-aggregator-ut |58.2%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tools/partcheck/partcheck |58.2%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_secret/ydb-core-tx-schemeshard-ut_secret |58.1%| [PY] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/protos/blobstorage_vdisk_internal__intpy3___pb2_grpc.py.p5ju.yapyc3 |58.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/cms/console/log_settings_configurator_ut.cpp |58.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/pdisk/blobstorage_pdisk_log_cache_ut.cpp |58.2%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/flat_scheme_op.{pb.h ... grpc.pb.h} |58.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/cms/console/configs_dispatcher_ut.cpp |58.2%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/fq/libs/checkpoint_storage/ut/ydb-core-fq-libs-checkpoint_storage-ut |58.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/pdisk/blobstorage_pdisk_ut_context.cpp |58.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/ut/service/kqp_qs_scripts_ut.cpp |58.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/cms/console/immediate_controls_configurator_ut.cpp |58.2%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_extsubdomain_reboots/ydb-core-tx-schemeshard-ut_extsubdomain_reboots |58.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/ut/service/kqp_service_ut.cpp |58.2%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_external_table/ydb-core-tx-schemeshard-ut_external_table |58.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/time_cast/time_cast_ut.cpp |58.1%| [PY] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/protos/msgbus__intpy3___pb2.py.p5ju.yapyc3 |58.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/ut_backup_collection/ut_backup_collection.cpp |58.1%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/load_test/ut_ycsb/ydb-core-load_test-ut_ycsb |58.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/cms/console/jaeger_tracing_configurator_ut.cpp |58.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/pdisk/blobstorage_pdisk_crypto_ut.cpp |58.1%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/apps/pgwire/pgwire |58.1%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/base/generated/codegen/ydb-core-base-generated-codegen |58.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/base/generated/codegen/main.cpp |58.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/util/btree_benchmark/main.cpp |58.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/tests/tools/fqrun/fqrun.cpp |58.1%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/sharding/ut/ydb-core-tx-sharding-ut |58.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/tools/query_replay/query_compiler.cpp |58.1%| [BN] {BAZEL_DOWNLOAD} $(B)/ydb/tests/stability/tool/kafka_workload |58.1%| [PY] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/library/yaml_config/protos/config__intpy3___pb2.py.siec.yapyc3 |58.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/cms/console/net_classifier_updater_ut.cpp |58.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/vdisk/syncer/blobstorage_syncquorum_ut.cpp |58.1%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/tools/fqrun/fqrun |58.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/ut_topic_splitmerge/ut_topic_splitmerge.cpp |58.1%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/library/yaml_config/ut_transform/objcopy_b5b36403e069f48d06f8367722.o |58.1%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/blobstorage/ut_blobstorage/ut_replication/core-blobstorage-ut_blobstorage-ut_replication |58.1%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/library/yaml_config/ut_transform/ydb-library-yaml_config-ut_transform |58.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/tools/query_replay/query_replay.cpp |58.1%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/library/yaml_config/ut_transform/objcopy_342e8590e41686b18307d054a9.o |58.1%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/library/yaml_config/ut_transform/objcopy_c693478edc1220e7a9143567d1.o |58.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/pdisk/blobstorage_pdisk_blockdevice_ut.cpp |58.1%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/blobstorage/pdisk/ut/ydb-core-blobstorage-pdisk-ut |58.1%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/sqs/merge_split_common_table/fifo/objcopy_2aa1916d45dca98014edb3d732.o |58.1%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/sqs/merge_split_common_table/fifo/objcopy_1574e8a5a6c530c7bfd6378c4d.o |58.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/apps/etcd_proxy/proxy.cpp |58.1%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/sqs/merge_split_common_table/fifo/objcopy_504b845d57f1a23561e970de61.o |58.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/tools/query_replay/query_proccessor.cpp |58.1%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/cms/ut_sentinel_unstable/ydb-core-cms-ut_sentinel_unstable |58.1%| [PY] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/protos/console_config__intpy3___pb2_grpc.py.p5ju.yapyc3 |58.1%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/console_tenant__intpy3___pb2.py{ ... i} |58.1%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/statistics.{pb.h ... grpc.pb.h} |58.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/client/minikql_compile/yql_expr_minikql_compile_ut.cpp |58.1%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/data_sharing/protos/data.pb.{h, cc} |58.1%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/functional/sqs/merge_split_common_table/fifo/functional-sqs-merge_split_common_table-fifo |58.1%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/drivemodel.{pb.h ... grpc.pb.h} |58.1%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/protos/ydb_discovery.pb.{h, cc} |58.1%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/tests/fq/generic/analytics/edaf602b2011baa1519a223d63_raw.auxcpp |58.1%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/fq/generic/analytics/objcopy_1326afc143d720f2af434cd836.o |58.1%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/fq/generic/analytics/objcopy_1007df29dec27b0b7a1587d49f.o |58.1%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/fq/generic/analytics/objcopy_b91160bcee04ad1f57e80af064.o |58.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/tools/query_replay/main.cpp |58.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/services/persqueue_v1/persqueue_common_new_schemecache_ut.cpp |58.2%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/tests/tools/kqprun/src/proto/storage_meta.pb.{h, cc} |58.2%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/client/nc_private/iam/v1/user_account.{pb.h ... grpc.pb.h} |58.2%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/protos/ydb_auth.pb.{h, cc} |58.2%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/grpc_streaming/ut/grpc/streaming_service.{pb.h ... grpc.pb.h} |58.1%| [EN] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/columnshard/columnshard.h_serialized.cpp |58.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/pdisk/blobstorage_pdisk_ut_helpers.cpp |58.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/pdisk/mock/pdisk_mock.cpp |58.2%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/protos/ydb_topic.pb.{h, cc} |58.2%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/counters_testshard.{pb.h ... grpc.pb.h} |58.2%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/counters_datashard.{pb.h ... grpc.pb.h} |58.2%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/fq/generic/analytics/ydb-tests-fq-generic-analytics |58.2%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/blobstorage_config.{pb.h ... grpc.pb.h} |58.2%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/util/btree_benchmark/btree_benchmark |58.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/pdisk/blobstorage_pdisk_ut_races.cpp |58.2%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/security/certificate_check/ut/ydb-core-security-certificate_check-ut |58.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/pdisk/blobstorage_pdisk_ut_pdisk_config.cpp |58.2%| [EN] {BAZEL_DOWNLOAD} $(B)/ydb/library/workload/stock/stock.h_serialized.{cpp, h} |58.2%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/msgbus_pq.{pb.h ... grpc.pb.h} |58.2%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/blobstorage_disk.{pb.h ... grpc.pb.h} |58.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/pdisk/blobstorage_pdisk_ut_chunk_tracker.cpp |58.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/services/datastreams/datastreams_ut.cpp |58.2%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/config/protos/task_controller.pb.{h, cc} |58.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/pdisk/blobstorage_pdisk_ut_sectormap.cpp |58.2%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/protos/ydb_formats.pb.{h, cc} |58.2%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/library/actors/protos/interconnect.pb.{h, cc} |58.2%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/export/protos/storage.pb.{h, cc} |58.2%| [LD] {BAZEL_DOWNLOAD} $(B)/contrib/tools/protoc/plugins/grpc_cpp/grpc_cpp |58.2%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/tx_proxy.{pb.h ... grpc.pb.h} |58.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/pdisk/blobstorage_pdisk_util_ut.cpp |58.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/services/persqueue_v1/ut/rate_limiter_test_setup.cpp |58.2%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/grpc/draft/ydb_clickhouse_internal_v1.{pb.h ... grpc.pb.h} |58.2%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/config/protos/gateways.pb.{h, cc} |58.2%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/grpc/draft/ydb_replication_v1.{pb.h ... grpc.pb.h} |58.2%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/grpc/draft/ydb_ymq_v1.{pb.h ... grpc.pb.h} |58.1%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tools/query_replay/ydb_query_replay |58.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/pdisk/blobstorage_pdisk_ut_env.cpp |58.1%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/config/protos/token_accessor.pb.{h, cc} |58.1%| [PY] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/protos/console_config__intpy3___pb2_grpc.py.p5ju.yapyc3 |58.1%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/blob_depot.{pb.h ... grpc.pb.h} |58.2%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/client/yc_private/ydb/v1/quota_service.{pb.h ... grpc.pb.h} |58.2%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/library/table_creator/ut/ydb-library-table_creator-ut |58.2%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/services/persqueue_v1/ut/new_schemecache_ut/ydb-services-persqueue_v1-ut-new_schemecache_ut |58.2%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/protos/draft/persqueue_error_codes.pb.{h, cc} |58.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/pdisk/blobstorage_pdisk_ut_actions.cpp |58.1%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/tests/fq/yt/kqp_yt_file/part17/ydb-tests-fq-yt-kqp_yt_file-part17 |58.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/pdisk/blobstorage_pdisk_ut.cpp |58.1%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/services/datastreams/ut/ydb-services-datastreams-ut |58.1%| [EN] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/datashard/execution_unit.h_serialized.cpp |58.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/fq/libs/common/util_ut.cpp |58.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/pdisk/blobstorage_pdisk_ut_yard.cpp |58.2%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/protos/ydb_import.pb.{h, cc} |58.2%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_rtmr/ydb-core-tx-schemeshard-ut_rtmr |58.1%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/row_dispatcher/protos/events.pb.{h, cc} |58.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/vdisk/hulldb/base/blobstorage_blob_ut.cpp |58.1%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/kqp_stats.{pb.h ... grpc.pb.h} |58.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/vdisk/hulldb/base/blobstorage_hullstorageratio_ut.cpp |58.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/tx_proxy/proxy_ut_helpers.cpp |58.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/ut_streaming_query/ut_streaming_query.cpp |58.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/pdisk/blobstorage_pdisk_ut_color_limits.cpp |58.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/services/persqueue_v1/persqueue_new_schemecache_ut.cpp |58.2%| [PB] {BAZEL_DOWNLOAD} $(B)/yql/essentials/protos/clickhouse.pb.{h, cc} |58.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/ut_blobstorage/statestorage_2_ring_groups.cpp |58.2%| [PB] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/protos/tx_datashard.{pb.h ... grpc.pb.h} |58.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/vdisk/syncer/blobstorage_syncer_localwriter_ut.cpp |58.2%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/s3/proto/sink.pb.{h, cc} |58.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/pdisk/blobstorage_pdisk_ut_run.cpp |58.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/ut_blobstorage/scrub.cpp |58.1%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/grpc/ydb_config_v1.{pb.h ... grpc.pb.h} |58.1%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_index_build_reboots/ydb-core-tx-schemeshard-ut_index_build_reboots |58.1%| [PY] {BAZEL_DOWNLOAD} $(B)/yql/essentials/parser/proto_ast/gen/v1_proto_split/SQLv1Parser.pb.{code0.cc ... main.h} |58.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/ut_blobstorage/statestorage.cpp |58.1%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/config/protos/control_plane_proxy.pb.{h, cc} |58.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/vdisk/syncer/blobstorage_syncer_data_ut.cpp |58.1%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/datashard/ut_vacuum/ydb-core-tx-datashard-ut_vacuum |58.1%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/config/protos/token_accessor.pb.{h, cc} |58.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/pdisk/blobstorage_pdisk_restore_ut.cpp |58.1%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/statistics/service/ut/ut_aggregation/ydb-core-statistics-service-ut-ut_aggregation |58.1%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/blobstorage_base3.{pb.h ... grpc.pb.h} |58.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/providers/s3/provider/yql_s3_listing_strategy_ut.cpp |58.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/vdisk/repl/blobstorage_replrecoverymachine_ut.cpp |58.1%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/schemeshard/ut_index_build/ut_schemeshard_build_index_helpers.cpp |58.1%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/datashard/ut_snapshot/ydb-core-tx-datashard-ut_snapshot |58.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/mind/hive/sequencer_ut.cpp |58.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/ut_blobstorage/read_only_vdisk.cpp |58.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/vdisk/common/circlebuf_ut.cpp |58.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/vdisk/common/circlebufstream_ut.cpp |58.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/vdisk/common/vdisk_outofspace_ut.cpp |58.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/vdisk/repl/blobstorage_hullreplwritesst_ut.cpp |58.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/vdisk/hulldb/cache_block/cache_block_ut.cpp |58.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/vdisk/common/vdisk_syncneighbors_ut.cpp |58.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/vdisk/common/vdisk_lsnmngr_ut.cpp |58.1%| [EN] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/columnshard/engines/reader/simple_reader/iterator/source.h_serialized.cpp |58.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/vdisk/common/circlebufresize_ut.cpp |58.1%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_serverless/ydb-core-tx-schemeshard-ut_serverless |58.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/vdisk/common/memusage_ut.cpp |58.1%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/grpc_services/cancelation/protos/event.pb.{h, cc} |58.1%| [PB] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/protos/kqp.{pb.h ... grpc.pb.h} |58.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/ut_blobstorage/balancing.cpp |58.1%| [PB] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/protos/console.{pb.h ... grpc.pb.h} |58.1%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/maintenance.{pb.h ... grpc.pb.h} |58.1%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/grpc_pq_old.{pb.h ... grpc.pb.h} |58.1%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/grpc/draft/ydb_dynamic_config_v1.{pb.h ... grpc.pb.h} |58.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/ut_failure_injection/ut_failure_injection.cpp |58.1%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/datashard/ut_upload_rows/ydb-core-tx-datashard-ut_upload_rows |58.1%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/tablet_flat/protos/flat_table_part.pb.{h, cc} |58.1%| [PB] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/protos/blobstorage_distributed_config.{pb.h ... grpc.pb.h} |58.1%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/library/actors/protos/services_common.pb.{h, cc} |58.1%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/client/yc_private/resourcemanager/transitional/folder_service.{pb.h ... grpc.pb.h} |58.1%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/tests/olap/s3_import/large/ydb-tests-olap-s3_import-large |58.1%| [PB] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/protos/blobstorage_vdisk_internal.{pb.h ... grpc.pb.h} |58.1%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/protos/draft/ymq.pb.{h, cc} |58.1%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_backup_collection/ydb-core-tx-schemeshard-ut_backup_collection |58.1%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/library/yaml_config/static_validator/ut/example_configs/static_validator-ut-example_configs |58.1%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/schemeshard/operations.{pb.h ... grpc.pb.h} |58.1%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/backup.{pb.h ... grpc.pb.h} |58.1%| [PB] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/protos/msgbus.{pb.h ... grpc.pb.h} |58.1%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_split_merge_reboots/ydb-core-tx-schemeshard-ut_split_merge_reboots |58.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/vdisk/syncer/blobstorage_syncer_broker_ut.cpp |58.1%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/protos/draft/datastreams.pb.{h, cc} |58.1%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_topic_set_boundaries/ydb-core-tx-schemeshard-ut_topic_set_boundaries |58.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/tx_proxy/encrypted_storage_ut.cpp |58.1%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/config/protos/control_plane_storage.pb.{h, cc} |58.1%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/counters__intpy3___pb2.py.p5ju.yapyc3 |58.1%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/protos/ydb_cms.pb.{h, cc} |58.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/services/metadata/secret/ut/ut_secret.cpp |58.1%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/services/metadata/secret/ut/ydb-services-metadata-secret-ut |58.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/tools/combiner_perf/bin/main.cpp |58.1%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/stress/mixedpy/tests/ydb-tests-stress-mixedpy-tests |58.1%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/stress/mixedpy/tests/objcopy_1de592266ca9bc1d10b20d8e9a.o |58.1%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/stress/mixedpy/tests/objcopy_e25036fa51e72ace049084c308.o |58.1%| [LD] {BAZEL_DOWNLOAD} $(B)/tools/rescompiler/rescompiler |58.1%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/replication/service/ut_json_change_record/tx-replication-service-ut_json_change_record |58.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/vdisk/hulldb/base/hullds_heap_it_ut.cpp |58.1%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/stress/mixedpy/tests/objcopy_c4b248e24a6215fa53b9e5552d.o |58.1%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/protos/ydb_scripting.pb.{h, cc} |58.1%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/counters_schemeshard.{pb.h ... grpc.pb.h} |58.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/vdisk/hulldb/base/blobstorage_hullsatisfactionrank_ut.cpp |58.1%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/blobstorage/vdisk/hulldb/base/ut/ydb-core-blobstorage-vdisk-hulldb-base-ut |58.1%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/grpc_streaming/ut/ydb-core-grpc_streaming-ut |58.1%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/blockstore_config.{pb.h ... grpc.pb.h} |58.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/fq/libs/common/iceberg_processor_ut.cpp |58.1%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/alloc.{pb.h ... grpc.pb.h} |58.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/vdisk/hulldb/base/hullds_generic_it_ut.cpp |58.1%| [PR] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/library/yaml_config/protos/4306a854d105ac9e8a68bf91ca_raw.auxcpp |58.1%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/olap/data_quotas/ydb-tests-olap-data_quotas |58.1%| [PR] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/protos/c90d9fb739ea008c06169ff153_raw.auxcpp |58.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/tools/combiner_perf/libkqp-tools-combiner_perf.a |58.1%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/tpc/medium/objcopy_7e6470c67310c26b57384706e4.o |58.1%| [PR] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/protos/383ce71fd9fa04eb3230fc8f2e_raw.auxcpp |58.1%| [PR] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/protos/356aa18c71c00c1ebe811b0407_raw.auxcpp |58.1%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/tpc/medium/objcopy_e5d897582dc0fbda7c578cb53f.o |58.1%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/tpc/medium/objcopy_824785b12dcc08862746468e4b.o |58.1%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/olap/data_quotas/objcopy_a6e393b6d53f4c73feac80b55c.o |58.1%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/olap/data_quotas/objcopy_89b3e69f7cdba68b4eefcae48c.o |58.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/vdisk/hulldb/base/hullbase_barrier_ut.cpp |58.1%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/olap/data_quotas/objcopy_4b2e093abff756c97b675c0a31.o |58.1%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/tpc/medium/objcopy_11e4572b38d275456acaf6e9ab.o |58.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/fq/libs/common/rows_proto_splitter_ut.cpp |58.1%| [PR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/protos/a48cd84b630c61b8d19c887979_raw.auxcpp |58.1%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/workload_manager_config.{pb.h ... grpc.pb.h} |58.1%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/apps/etcd_proxy/proto/kv.{pb.h ... grpc.pb.h} |58.1%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/tx_scheme.{pb.h ... grpc.pb.h} |58.1%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/functional/tpc/medium/ydb-tests-functional-tpc-medium |58.1%| [EN] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/columnshard/engines/portions/portion_info.h_serialized.cpp |58.1%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/dq/actors/protos/dq_stats.pb.{h, cc} |58.1%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/config/protos/pinger.pb.{h, cc} |58.1%| [PB] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/protos/config.{pb.h ... grpc.pb.h} |58.1%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/blobstorage_vdisk_config__intpy3___pb2_grpc.py.p5ju.yapyc3 |58.2%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/blockstore_config__intpy3___pb2.py{ ... i} |58.2%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/yql_translation_settings.{pb.h ... grpc.pb.h} |58.1%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/compile_service_config.{pb.h ... grpc.pb.h} |58.1%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/client/yc_private/servicecontrol/access_service.{pb.h ... grpc.pb.h} |58.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/fq/libs/common/cache_ut.cpp |58.1%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/bootstrap__intpy3___pb2.py{ ... i} |58.1%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/blobstorage/vdisk/repl/ut/ydb-core-blobstorage-vdisk-repl-ut |58.1%| [PR] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/protos/968732828ff205567f6707c2fe_raw.auxcpp |58.1%| [PB] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/protos/tx_datashard.{pb.h ... grpc.pb.h} |58.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/raw_socket/ut/buffered_writer_ut.cpp |58.1%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/config/protos/private_proxy.pb.{h, cc} |58.1%| [EN] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/tx_proxy/upload_rows_counters.h_serialized.cpp |58.1%| [PR] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/protos/e6ce42a762195cf7e946ca411e_raw.auxcpp |58.1%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/s3_settings.{pb.h ... grpc.pb.h} >> StaticConfigExamples::BLOCK42 [GOOD] >> StaticConfigExamples::SINGLE_NODE_IN_MEMORY [GOOD] >> StaticConfigExamples::MIRROR_3_DC_9_NODES [GOOD] >> StaticConfigExamples::MIRROR_3_DC_NODES_IN_MEMORY [GOOD] >> StaticConfigExamples::MIRROR_3_DC_NODES [GOOD] >> StaticConfigExamples::SingleNodeWithFile [GOOD] |58.1%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/flat_tx_scheme__intpy3___pb2_grpc.py.p5ju.yapyc3 |58.1%| [PY] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/protos/grpc__intpy3___pb2.py.p5ju.yapyc3 |58.1%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/blobstorage/vdisk/common/ut/ydb-core-blobstorage-vdisk-common-ut |58.1%| [EN] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/columnshard/normalizer/abstract/abstract.h_serialized.cpp |58.1%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/flat_tx_scheme__intpy3___pb2.py{ ... i} |58.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/ut_incremental_restore_reboots/ut_incremental_restore_reboots.cpp |58.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/fq/libs/common/entity_id_ut.cpp |58.1%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/follower_group__intpy3___pb2.py.p5ju.yapyc3 |58.1%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/follower_group__intpy3___pb2_grpc.py.p5ju.yapyc3 |58.1%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/flat_tx_scheme__intpy3___pb2.py.p5ju.yapyc3 |58.1%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/flat_scheme_op__intpy3___pb2_grpc.py.p5ju.yapyc3 |58.1%| [PB] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/protos/serverless_proxy_config.{pb.h ... grpc.pb.h} |58.1%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/auth.{pb.h ... grpc.pb.h} |58.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/mind/hive/hive_ut.cpp |58.2%| [PB] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/protos/grpc.{pb.h ... grpc.pb.h} |58.1%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/bind_channel_storage_pool.{pb.h ... grpc.pb.h} |58.1%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/scheme_log.{pb.h ... grpc.pb.h} |58.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/vdisk/common/vdisk_config_ut.cpp |58.1%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/protos/draft/ydb_logstore.pb.{h, cc} |58.1%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/alloc.{pb.h ... grpc.pb.h} |58.1%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/grpc/ydb_import_v1.{pb.h ... grpc.pb.h} |58.1%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/config/objcopy_595664246e80606efdfb128414.o |58.1%| [PR] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/protos/d52903a0693870f83d0bbe0ab8_raw.auxcpp |58.1%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/config/objcopy_ae5b9f6e7a00f305f01a3dde87.o |58.1%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/config/objcopy_9f57718fc6e3428041ec840656.o |58.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/mind/hive/storage_pool_info_ut.cpp |58.1%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/config/objcopy_b879aaabc7ba23dfa06152110a.o |58.1%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/library/yaml_config/static_validator/ut/example_configs/unittest >> StaticConfigExamples::SingleNodeWithFile [GOOD] |58.1%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/blobstorage/vdisk/hulldb/cache_block/ut/ydb-core-blobstorage-vdisk-hulldb-cache_block-ut |58.1%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/functional/config/ydb-tests-functional-config |58.1%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/grpc/ydb_discovery_v1.{pb.h ... grpc.pb.h} |58.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/workload_service/ut/kqp_workload_service_ut.cpp |58.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/vdisk/common/vdisk_pdisk_error_ut.cpp |58.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/workload_service/ut/kqp_workload_service_actors_ut.cpp |58.1%| [PR] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/protos/037dadb082c3788ff2d8ca830f_raw.auxcpp |58.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/mind/hive/object_distribution_ut.cpp |58.1%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/time_cast/ut/ydb-core-tx-time_cast-ut |58.1%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/topic/ut/objcopy_1406195445f45d950dda89fcd8.o |58.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/workload_service/ut/kqp_workload_service_tables_ut.cpp |58.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/library/ncloud/impl/access_service_ut.cpp |58.1%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/external_sources/s3/ut/ydb-core-external_sources-s3-ut |58.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/mind/hive/hive_impl_ut.cpp |58.1%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/services/deprecated/persqueue_v0/api/grpc/persqueue.{pb.h ... grpc.pb.h} |58.1%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/pdiskfit.{pb.h ... grpc.pb.h} |58.1%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/library/yql/providers/s3/provider/ut/ydb-library-yql-providers-s3-provider-ut |58.1%| [CC] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/library/yaml_config/protos/4306a854d105ac9e8a68bf91ca_raw.auxcpp |58.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/sys_view/partition_stats/partition_stats_ut.cpp |58.1%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_topic_splitmerge/ydb-core-tx-schemeshard-ut_topic_splitmerge |58.1%| [CC] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/protos/c90d9fb739ea008c06169ff153_raw.auxcpp |58.1%| [CC] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/protos/383ce71fd9fa04eb3230fc8f2e_raw.auxcpp |58.1%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/protos/ydb_value.pb.{h, cc} |58.1%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/tpc/large/objcopy_703c8e1d9a9a2b271b8b995a29.o |58.1%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/tpc/large/objcopy_2194854d9f8cbb3e0ba798b861.o |58.1%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/tpc/large/objcopy_52e86d5ee8fadefdbb415ca379.o |58.1%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/tpc/large/objcopy_912038ceef7de48e0e15c25307.o |58.1%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/tpc/large/objcopy_bac05c8b5a79735451f58d9322.o |58.1%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/library/actors/protos/unittests.pb.{h, cc} |58.1%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/blobstorage/ut_blobstorage/ut_move_pdisk/ydb-core-blobstorage-ut_blobstorage-ut_move_pdisk |58.1%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/counters_bs_controller.{pb.h ... grpc.pb.h} |58.1%| [CC] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/protos/356aa18c71c00c1ebe811b0407_raw.auxcpp |58.1%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/functional/tpc/large/ydb-tests-functional-tpc-large |58.1%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/config/protos/quotas_manager.pb.{h, cc} |58.1%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/client/yc_private/iam/service_account.{pb.h ... grpc.pb.h} |58.1%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/tx_mediator_timecast.{pb.h ... grpc.pb.h} |58.1%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/sqs/common/objcopy_178e64ce5db822fc6aa8b3e608.o |58.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/mind/hive/scale_recommender_policy_ut.cpp |58.1%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/sqs/common/objcopy_0a1f127d9343562caddfbacf79.o |58.1%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/sqs/common/objcopy_b866963286293af0b6f2139fed.o |58.1%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/sqs/common/objcopy_f9b0feecd0e36f08cbf5c53562.o |58.1%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/functional/sqs/common/ydb-tests-functional-sqs-common |58.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/mind/address_classification/net_classifier_ut.cpp |58.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/persqueue/pqtablet/cache/cache_eviction_ut.cpp |58.1%| [PB] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/protos/console_config.{pb.h ... grpc.pb.h} |58.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/vdisk/skeleton/skeleton_vpatch_actor_ut.cpp |58.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/vdisk/skeleton/skeleton_oos_logic_ut.cpp |58.1%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/config/protos/task_controller.pb.{h, cc} |58.1%| [CC] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/protos/968732828ff205567f6707c2fe_raw.auxcpp |58.1%| [CC] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/protos/e6ce42a762195cf7e946ca411e_raw.auxcpp |58.1%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/follower_group__intpy3___pb2.py{ ... i} |58.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/statistics/database/ut/ut_database.cpp |58.1%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/persqueue/ut/slow/ydb-core-persqueue-ut-slow |58.1%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/sys_view.{pb.h ... grpc.pb.h} |58.0%| [PY] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/protos/grpc__intpy3___pb2.py.p5ju.yapyc3 |58.1%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/grpc_services/grpc_request_check_actor_ut/core-grpc_services-grpc_request_check_actor_ut |58.1%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/dq/api/grpc/api.{pb.h ... grpc.pb.h} |58.1%| [PR] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/protos/a48cd84b630c61b8d19c887979_raw.auxcpp |58.1%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/stress/s3_backups/s3_backups |58.1%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_streaming_query/ydb-core-tx-schemeshard-ut_streaming_query |58.1%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/tx_proxy/ut_encrypted_storage/ydb-core-tx-tx_proxy-ut_encrypted_storage |58.0%| [PB] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/protos/serverless_proxy_config.{pb.h ... grpc.pb.h} |58.1%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/functional/blobstorage/ydb-tests-functional-blobstorage |58.1%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/stress/s3_backups/objcopy_4508aef343f36758ea760320db.o |58.1%| [PB] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/protos/grpc.{pb.h ... grpc.pb.h} |58.1%| [CC] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/protos/d52903a0693870f83d0bbe0ab8_raw.auxcpp |58.1%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/blobstorage/objcopy_1c0f807c059fe226699115f242.o |58.1%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/blobstorage/objcopy_790c6ea4aad5e761d21421b25d.o |58.1%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/blobstorage/objcopy_16842d72ae0dac1856818f841e.o |58.1%| [CC] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/protos/037dadb082c3788ff2d8ca830f_raw.auxcpp |58.1%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/blobstorage/dsproxy/ut/ydb-core-blobstorage-dsproxy-ut |58.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/gateway/ut/metadata_conversion.cpp |58.1%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/control/lib/generated/codegen/ydb-core-control-generated-codegen |58.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/nodewarden/bind_queue_ut.cpp |58.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/backup/impl/local_partition_reader_ut.cpp |58.1%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/external_sources/hive_metastore/ut/ydb-core-external_sources-hive_metastore-ut |58.1%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/config/protos/fq_config.pb.{h, cc} |58.1%| [PR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/dq/expr_nodes/dqs_expr_nodes.{gen.h ... defs.inl.h} |58.1%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/protos/ydb_issue_message.pb.{h, cc} |58.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/public/sdk/cpp/src/client/topic/ut/topic_to_table_ut.cpp |58.1%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/client/yc_private/ydb/v1/backup.{pb.h ... grpc.pb.h} |58.1%| [PB] {BAZEL_DOWNLOAD} $(B)/yql/essentials/providers/common/proto/gateways_config.pb.{h, cc} |58.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/public/sdk/cpp/src/client/topic/ut/local_partition_ut.cpp |58.1%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kqp/ut/query/ydb-core-kqp-ut-query |58.1%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kqp/tools/combiner_perf/bin/combiner_perf |58.1%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/blobstorage_pdisk_config__intpy3___pb2.py.p5ju.yapyc3 |58.1%| [EN] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/tools/kqprun/src/common.h_serialized.cpp |58.0%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/msgbus_health__intpy3___pb2.py{ ... i} |58.0%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/data_events.{pb.h ... grpc.pb.h} |58.0%| [PB] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/protos/console_config.{pb.h ... grpc.pb.h} |58.0%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/stream.{pb.h ... grpc.pb.h} |58.0%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_failure_injection/ydb-core-tx-schemeshard-ut_failure_injection |58.1%| [PR] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/protos/aa848536d47c49c5b2820aeee4_raw.auxcpp |58.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/ut/discovery/kqp_discovery_ut.cpp |58.0%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/public/sdk/cpp/src/client/topic/ut/describe_topic_ut.cpp |58.0%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/statistics/service/ut/ut_column_statistics.cpp |58.0%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/public/sdk/cpp/src/client/topic/ut/basic_usage_ut.cpp |58.0%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/conveyor_composite/ut/ut_simple.cpp |58.0%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/blobstorage/ut_blobstorage/ut_scrub/ydb-core-blobstorage-ut_blobstorage-ut_scrub |58.0%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/datashard/secondary_index/ydb-tests-datashard-secondary_index |58.0%| [CC] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/protos/a48cd84b630c61b8d19c887979_raw.auxcpp |58.0%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/apps/etcd_proxy/etcd_proxy |58.0%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/statistics/service/ut/ut_http_request.cpp |58.0%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/public/sdk/cpp/src/client/topic/ut/ydb-public-sdk-cpp-src-client-topic-ut |58.0%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/statistics/service/ut/ut_basic_statistics.cpp |58.0%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/datashard/ttl/ydb-tests-datashard-ttl |58.0%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/persqueue/pqtablet/cache/ut/ydb-core-persqueue-pqtablet-cache-ut |58.0%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/datashard/secondary_index/objcopy_716263ce181e67161f84180281.o |58.0%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/control/lib/generated/codegen/main.cpp |58.0%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/ut_blobstorage/osiris.cpp |58.0%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/blobstorage/ut_blobstorage/ut_statestorage/core-blobstorage-ut_blobstorage-ut_statestorage |58.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/persqueue/public/partition_key_range/partition_key_range_sequence_ut.cpp |58.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet_flat/ut_pg/flat_database_pg_ut.cpp |58.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/providers/pq/provider/ut/yql_pq_ut.cpp |58.0%| [EN] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/ymq/actor/queue_schema.h_serialized.cpp |58.0%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/scheme_board/replica_ut.cpp |58.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/fq/libs/db_id_async_resolver_impl/ut/mdb_endpoint_generator_ut.cpp |58.0%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/scheme_board/ut_helpers.cpp |58.0%| [BN] {BAZEL_DOWNLOAD} $(B)/ydb/tests/stability/tool/transfer_workload |58.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/engine/kikimr_program_builder_ut.cpp |58.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/engine/mkql_engine_flat_host_ut.cpp |58.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/engine/mkql_proto_ut.cpp |58.0%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/tests/fq/yt/kqp_yt_file/part3/ydb-tests-fq-yt-kqp_yt_file-part3 |58.0%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/persqueue/public/partition_key_range/ut/ydb-core-persqueue-public-partition_key_range-ut |58.0%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/nodewarden/distconf_ut.cpp |57.9%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_incremental_restore_reboots/schemeshard-ut_incremental_restore_reboots |57.9%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/blobstorage/ut_blobstorage/ut_balancing/ydb-core-blobstorage-ut_blobstorage-ut_balancing |58.0%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/actorlib_impl/ut/ydb-core-actorlib_impl-ut |58.0%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/library/ncloud/impl/ut/ydb-library-ncloud-impl-ut |58.0%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/datashard/datashard_ut_minstep.cpp |57.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/graph/ut/graph_ut.cpp |57.9%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/tests/fq/yt/kqp_yt_file/part18/ydb-tests-fq-yt-kqp_yt_file-part18 |57.9%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/sdk/cpp/sdk_credprovider/ydb-tests-functional-sdk-cpp-sdk_credprovider |57.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/nodewarden/blobstorage_node_warden_ut.cpp |57.9%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/stress/topic/tests/ydb-tests-stress-topic-tests |57.9%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tablet_flat/ut_pg/ydb-core-tablet_flat-ut_pg |57.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/ut_column_build/ut_column_build.cpp |57.9%| [EN] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/datashard/operation.h_serialized.cpp |57.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kesus/tablet/quoter_resource_tree_ut.cpp |57.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/ut_base/ut_base.cpp |57.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/ut_base/ut_table_decimal_types.cpp |57.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/ut_split_merge/ut_find_split_key.cpp |57.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/ut_base/ut_info_types.cpp |57.9%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/stress/topic/tests/objcopy_7479409fb33baf855b74c3e835.o |57.9%| [CC] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/protos/aa848536d47c49c5b2820aeee4_raw.auxcpp |57.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/services/ydb/ydb_logstore_ut.cpp |57.9%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/stress/topic/tests/objcopy_acf74a4313fbcafa6df239e3ec.o |57.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/datashard/datashard_ut_change_collector.cpp |57.9%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/stress/topic/tests/objcopy_df4191b43fee1a7d77acb3207f.o |57.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/limiter/grouped_memory/ut/ut_manager.cpp |57.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/replication/service/table_writer_ut.cpp |57.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/ut_base/ut_counters.cpp |57.8%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/datashard/ttl/objcopy_b1ab101896e634020e0c6ffeaf.o |57.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/services/ydb/ydb_bulk_upsert_ut.cpp |57.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/replication/controller/assign_tx_id_ut.cpp |57.8%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/datashard/parametrized_queries/ydb-tests-datashard-parametrized_queries |57.8%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/datashard/ttl/objcopy_82d6d29ac7be3798b7e748facc.o |57.8%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/datashard/ttl/objcopy_589d529f9477963cf67237781c.o |57.9%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/datashard/parametrized_queries/objcopy_e1e64d508ce59834ec0a40f731.o |57.9%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/datashard/secondary_index/objcopy_6b62c1db41e3ebd0278a84dced.o |57.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/services/ydb/ydb_object_storage_ut.cpp |57.9%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/services/ydb/ut/ydb-services-ydb-ut |57.9%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/cms/console/ut/ydb-core-cms-console-ut |57.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/ut/federated_query/large_results/kqp_scriptexec_results_ut.cpp |57.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/ut/opt/kqp_returning_ut.cpp |57.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/ut/opt/kqp_agg_ut.cpp |57.9%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kqp/ut/service/ydb-core-kqp-ut-service |57.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/ut_user_attributes/ut_user_attributes.cpp |57.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/ut_base/ut_commit_redo_limit.cpp |57.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/ut/opt/kqp_ranges_ut.cpp |57.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/ut/opt/kqp_extract_predicate_unpack_ut.cpp |57.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/ut_index/ut_unique_index.cpp |57.9%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/library/benchmarks/runner/ydb-library-benchmarks-runner |57.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/ut_index/ut_async_index.cpp |57.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/vdisk/query/query_spacetracker_ut.cpp |57.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/ut/opt/kqp_kv_ut.cpp |57.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/engine/mkql_engine_flat_ut.cpp |57.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/ut/opt/kqp_hash_combine_ut.cpp |57.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/datashard/datashard_ut_replication.cpp |57.8%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/datashard/parametrized_queries/objcopy_6d8369510b03c08a300f2e2657.o |57.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kesus/tablet/ut_helpers.cpp |57.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/datashard/datashard_ut_column_stats.cpp |57.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/ut_index/ut_fulltext_index.cpp |57.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/ut/opt/kqp_named_expressions_ut.cpp >> ydb-tests-olap-s3_import-large::import_test [GOOD] |57.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/keyvalue/keyvalue_collector_ut.cpp |57.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/fq/libs/ydb/ut/ydb_ut.cpp |57.8%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/metering/ut/ydb-core-metering-ut |57.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kesus/tablet/tablet_ut.cpp |57.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/services/ydb/ydb_index_table_ut.cpp |57.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/keyvalue/keyvalue_ut.cpp |57.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/ut_blobstorage/vdisk_restart.cpp |57.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/scheme_board/ut_helpers.cpp |57.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/scheme_board/subscriber_ut.cpp |57.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/metering/time_grid_ut.cpp |57.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/keyvalue/keyvalue_storage_read_request_ut.cpp |57.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/security/ldap_auth_provider/ldap_utils_ut.cpp |57.8%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/blobstorage/backpressure/ut_client/ydb-core-blobstorage-backpressure-ut_client |57.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/ut/opt/kqp_merge_ut.cpp |57.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/metering/stream_ru_calculator_ut.cpp |57.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/ut_olap_reboots/ut_olap_reboots.cpp |57.8%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/fq/libs/db_id_async_resolver_impl/ut/ydb-core-fq-libs-db_id_async_resolver_impl-ut |57.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/ut/opt/kqp_sort_ut.cpp |57.9%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/olap/s3_import/large/import_test >> ydb-tests-olap-s3_import-large::import_test [GOOD] |57.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/ut_split_merge/ut_split_merge.cpp |57.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/services/ydb/ydb_coordination_ut.cpp |57.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/ut_base/ut_table_pg_types.cpp |57.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/backpressure/ut_client/backpressure_ut.cpp |57.8%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/datashard/parametrized_queries/objcopy_7d0deb4120fbddf720c11b5358.o |57.8%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/datashard/secondary_index/objcopy_b83d9052e0bc89877bbe223294.o |57.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/ut_sequence_reboots/ut_sequence_reboots.cpp |57.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/ut/opt/kqp_sqlin_ut.cpp |57.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/ut/opt/kqp_union_ut.cpp |57.8%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/scheme_board/ut_replica/ydb-core-tx-scheme_board-ut_replica |57.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/ut/olap/compression_ut.cpp |57.8%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/library/yql/providers/pq/provider/ut/ydb-library-yql-providers-pq-provider-ut |57.8%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/blobstorage/vdisk/query/ut/ydb-core-blobstorage-vdisk-query-ut |57.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/ut/olap/decimal_ut.cpp |57.9%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kqp/gateway/ut/ydb-core-kqp-gateway-ut |57.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/ut/opt/kqp_ne_ut.cpp |57.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/services/ydb/ydb_import_ut.cpp |57.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/ut/opt/kqp_not_null_ut.cpp |57.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/ut/olap/aggregations_ut.cpp |57.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/ut/olap/json_ut.cpp |57.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/ut/olap/sys_view_ut.cpp |57.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/ut/olap/compaction_ut.cpp |57.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/ut/olap/kqp_olap_ut.cpp |57.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/ut/olap/statistics_ut.cpp |57.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/ut/olap/optimizer_ut.cpp |57.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/ut_index/ut_vector_index.cpp |57.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/ut/olap/write_ut.cpp |57.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/ut/olap/sparsed_ut.cpp |57.8%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/conveyor_composite/ut/ydb-core-tx-conveyor_composite-ut |57.8%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/services/ydb/backup_ut/ydb-services-ydb-backup_ut |57.8%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/bridge.{pb.h ... grpc.pb.h} |57.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/ut/olap/dictionary_ut.cpp |57.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/ut/olap/delete_ut.cpp |57.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/ut/olap/tiering_ut.cpp |57.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/ut/olap/kqp_olap_stats_ut.cpp |57.8%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/config/protos/test_connection.pb.{h, cc} |57.8%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/client/nc_private/audit/annotations.{pb.h ... grpc.pb.h} |57.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/ut/olap/clickbench_ut.cpp |57.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/persqueue/ut/ut_with_sdk/describe_ut.cpp |57.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/ut/olap/locks_ut.cpp |57.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/services/ydb/backup_ut/list_objects_in_s3_export_ut.cpp |57.8%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/library/yaml_config/protos/blobstorage_config.pb.{h, cc} |57.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/persqueue/ut/ut_with_sdk/topic_ut.cpp |57.8%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/bind_channel_storage_pool.{pb.h ... grpc.pb.h} |57.8%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/protos/ydb_federation_discovery.pb.{h, cc} |57.8%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/client/yc_private/iam/yandex_passport_cookie.{pb.h ... grpc.pb.h} |57.8%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/export.{pb.h ... grpc.pb.h} |57.8%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/config_units__intpy3___pb2_grpc.py.p5ju.yapyc3 |57.8%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/filestore_config.{pb.h ... grpc.pb.h} |57.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/quoter/quoter_service_bandwidth_test/quota_requester.cpp |57.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/persqueue/ut/ut_with_sdk/mirrorer_autoscaling_ut.cpp |57.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/services/ydb/backup_ut/ydb_backup_ut.cpp |57.8%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/protos/fq_private.pb.{h, cc} |57.8%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/counters_columnshard.{pb.h ... grpc.pb.h} |57.8%| [PD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/library/yaml_config/protos/yaml-config-protos.{self.protodesc, protosrc} |57.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/persqueue/ut/ut_with_sdk/mirrorer_ut.cpp |57.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/tx_allocator/txallocator_ut_helpers.cpp |57.8%| [PR] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/protos/3eb880cc21ffc3fc10ee677b0c_raw.auxcpp |57.8%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/blobstorage/ut_blobstorage/ut_osiris/ydb-core-blobstorage-ut_blobstorage-ut_osiris |57.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/services/ydb/backup_ut/encrypted_backup_ut.cpp |57.8%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/kesus__intpy3___pb2.py{ ... i} |57.8%| [PD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/library/yaml_config/protos/config.proto.{desc, 236947a227eabf309dc2ce63434b3df8.rawproto} |57.8%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/msgbus_kv.{pb.h ... grpc.pb.h} |57.8%| [PD] {BAZEL_DOWNLOAD} $(B)/ydb/library/yaml_config/protos/blobstorage_config.proto.{desc, 236947a227eabf309dc2ce63434b3df8.rawproto} |57.8%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/statistics/database/ut/ydb-core-statistics-database-ut |57.8%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/http_config.{pb.h ... grpc.pb.h} |57.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/ut_user_attributes_reboots/ut_user_attributes_reboots.cpp |57.8%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/kqp/kqp_query_session/ydb-tests-functional-kqp-kqp_query_session |57.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/quoter/quoter_service_bandwidth_test/server.cpp |57.8%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/library/services/services.{pb.h ... grpc.pb.h} |57.8%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/config/protos/read_actors_factory.pb.{h, cc} |57.8%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/blobstorage/ut_blobstorage/ut_read_only_vdisk/ut_blobstorage-ut_read_only_vdisk |57.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/persqueue/ut/ut_with_sdk/balancing_ut.cpp |57.8%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/memory_stats.{pb.h ... grpc.pb.h} |57.8%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/counters__intpy3___pb2.py{ ... i} |57.8%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/config_metrics.{pb.h ... grpc.pb.h} |57.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/ut_compaction/ut_compaction.cpp |57.8%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/blobstorage_pdisk_config__intpy3___pb2_grpc.py.p5ju.yapyc3 |57.9%| [PR] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/protos/e4a588c704e4418873ed6891de_raw.auxcpp |57.8%| [PR] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/protos/0d9faae2dd392530096b141b6c_raw.auxcpp |57.8%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/config/protos/compute.pb.{h, cc} |57.8%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/node_limits.{pb.h ... grpc.pb.h} |57.8%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/scheme_board.{pb.h ... grpc.pb.h} |57.8%| [PB] {BAZEL_DOWNLOAD} $(B)/contrib/libs/opentelemetry-proto/opentelemetry/proto/trace/v1/trace.{pb.h ... grpc.pb.h} |57.8%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_column_build/ydb-core-tx-schemeshard-ut_column_build |57.8%| [PD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/library/yaml_config/protos/config.proto.{desc, 236947a227eabf309dc2ce63434b3df8.rawproto} |57.8%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kqp/ut/olap/combinatory/libut-olap-combinatory.a |57.8%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_user_attributes/ydb-core-tx-schemeshard-ut_user_attributes |57.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/services/ydb/backup_ut/backup_path_ut.cpp |57.8%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/replication/service/ut_table_writer/ydb-core-tx-replication-service-ut_table_writer |57.8%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/dq/actors/protos/dq_events.pb.{h, cc} |57.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/ut_restore/ut_restore.cpp |57.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/quoter/quoter_service_bandwidth_test/main.cpp |57.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/apps/dstool/lib/libpy3dstool_lib.global.a |57.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/security/ldap_auth_provider/ldap_auth_provider_ut.cpp |57.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/persqueue/ut/ut_with_sdk/autoscaling_ut.cpp |57.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/ut/olap/datatime64_ut.cpp |57.8%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/apps/dstool/objcopy_fca89909cedb628068681e1038.o |57.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/tx_allocator/txallocator_ut.cpp |57.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/services/ydb/ydb_ldap_login_ut.cpp |57.8%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/mind/address_classification/ut/ydb-core-mind-address_classification-ut |57.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/ut_move_reboots/ut_move_reboots.cpp |57.8%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_index/ydb-core-tx-schemeshard-ut_index |57.8%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/apps/dstool/ydb-dstool |57.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/persqueue/ut/ut_with_sdk/commitoffset_ut.cpp |57.8%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/datashard/ut_minstep/ydb-core-tx-datashard-ut_minstep |57.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/apps/dstool/libpy3ydb-dstool.global.a |57.8%| [CC] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/protos/3eb880cc21ffc3fc10ee677b0c_raw.auxcpp |57.8%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/raw_socket/ut/ydb-core-raw_socket-ut |57.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/persqueue/ut/ut_with_sdk/topic_timestamp_ut.cpp |57.8%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/mind/hive/ut/ydb-core-mind-hive-ut |57.8%| [CC] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/protos/0d9faae2dd392530096b141b6c_raw.auxcpp |57.8%| [CC] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/protos/e4a588c704e4418873ed6891de_raw.auxcpp |57.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/services/ydb/ydb_login_ut.cpp |57.8%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kqp/workload_service/ut/ydb-core-kqp-workload_service-ut |57.8%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_base/ydb-core-tx-schemeshard-ut_base |57.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/services/ydb/ydb_stats_ut.cpp |57.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/services/ydb/ydb_query_ut.cpp |57.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/services/ydb/ydb_table_ut.cpp >> ydb-library-benchmarks-runner::import_test [GOOD] |57.8%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/datashard/ut_change_collector/ydb-core-tx-datashard-ut_change_collector |57.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/services/ydb/ydb_olapstore_ut.cpp |57.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/services/ydb/ydb_read_rows_ut.cpp |57.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/services/ydb/ydb_register_node_ut.cpp |57.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/services/ydb/ydb_ut.cpp |57.8%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/limiter/grouped_memory/ut/ydb-core-tx-limiter-grouped_memory-ut |57.8%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/client/minikql_compile/ut/ydb-core-client-minikql_compile-ut |57.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/services/ydb/ydb_monitoring_ut.cpp |57.8%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/blobstorage/vdisk/syncer/ut/ydb-core-blobstorage-vdisk-syncer-ut |57.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/provider/read_attributes_utils_ut.cpp |57.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/fq/libs/checkpointing/ut/checkpoint_coordinator_ut.cpp |57.8%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/library/benchmarks/runner/import_test >> ydb-library-benchmarks-runner::import_test [GOOD] |57.8%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/generated/codegen/codegen |57.8%| [EN] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kqp/provider/yql_kikimr_provider.h_serialized.cpp |57.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/generated/codegen/main.cpp |57.8%| [EN] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/columnshard/engines/reader/common/description.h_serialized.cpp |57.8%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/sys_view/partition_stats/ut/ydb-core-sys_view-partition_stats-ut |57.8%| [EN] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/config/init/init.h_serialized.cpp |57.8%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/replication/controller/ut_assign_tx_id/core-tx-replication-controller-ut_assign_tx_id |57.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/provider/yql_kikimr_provider_ut.cpp |57.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/datashard/datashard_ut_trace.cpp |57.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/services/ydb/ydb_scripting_ut.cpp |57.8%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/datashard/ut_replication/ydb-core-tx-datashard-ut_replication |57.8%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/client/nc_private/iam/v1/access.{pb.h ... grpc.pb.h} |57.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/datashard/datashard_ut_kqp_scan.cpp |57.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/load_test/ut/group_test_ut.cpp |57.8%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_sequence_reboots/ydb-core-tx-schemeshard-ut_sequence_reboots |57.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/services/ydb/ydb_bulk_upsert_olap_ut.cpp |57.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/ut/olap/combinatory/actualization.cpp |57.8%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/fq/libs/common/ut/ydb-core-fq-libs-common-ut |57.8%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/tx_scheme.{pb.h ... grpc.pb.h} |57.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/ut/olap/blobs_sharing_ut.cpp |57.8%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/local.{pb.h ... grpc.pb.h} |57.8%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_olap_reboots/ydb-core-tx-schemeshard-ut_olap_reboots |57.8%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/blobstorage_vdisk_config__intpy3___pb2.py.p5ju.yapyc3 |57.8%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/metrics.{pb.h ... grpc.pb.h} |57.8%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/blobstorage_distributed_config.{pb.h ... grpc.pb.h} |57.8%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/statistics/service/ut/ydb-core-statistics-service-ut |57.8%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/config/protos/nodes_manager.pb.{h, cc} |57.8%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/functional/script_execution/ydb-tests-functional-script_execution |57.7%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_split_merge/ydb-core-tx-schemeshard-ut_split_merge |57.7%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/grpc/ydb_federation_discovery_v1.{pb.h ... grpc.pb.h} |57.7%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kqp/ut/discovery/ydb-core-kqp-ut-discovery |57.7%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/library/ydb_issue/proto/issue_id.{pb.h ... grpc.pb.h} |57.8%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/functional/query_cache/ydb-tests-functional-query_cache |57.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/ut/olap/combinatory/abstract.cpp |57.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/ut/olap/indexes_ut.cpp |57.9%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/scheme_board/ut_subscriber/ydb-core-tx-scheme_board-ut_subscriber |57.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/provider/yql_kikimr_gateway_ut.cpp |58.0%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/datashard/ut_column_stats/ydb-core-tx-datashard-ut_column_stats |58.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/ut_reboots/ut_reboots.cpp |58.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/ut/olap/combinatory/compaction.cpp |58.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/ut_system_names/ut_system_names.cpp |59.1%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/base/kmeans_clusters.cpp |59.2%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/query_cache/objcopy_388aef0b6ac03d4f661ae7a30e.o |59.2%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/base64/ssse3/liblibs-base64-ssse3.a |59.3%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/string_utils/relaxed_escaper/libcpp-string_utils-relaxed_escaper.a |59.3%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/base64/plain64/liblibs-base64-plain64.a |59.4%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/tests/fq/yt/kqp_yt_import/ydb-tests-fq-yt-kqp_yt_import |59.4%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/string_utils/base64/libcpp-string_utils-base64.a |59.4%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/json/fast_sax/libcpp-json-fast_sax.a |59.4%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/json/common/libcpp-json-common.a |59.4%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/json/writer/libcpp-json-writer.a |59.4%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/library/compatibility/configs/dump/dumper/ydb-config-meta-dumper |59.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/mkql_proto/ut/helpers/libmkql_proto-ut-helpers.a |59.4%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/json/liblibrary-cpp-json.a |59.4%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/colorizer/liblibrary-cpp-colorizer.a |59.4%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/getopt/small/libcpp-getopt-small.a |59.5%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/getopt/liblibrary-cpp-getopt.global.a |60.0%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/base/fulltext.cpp |60.1%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/script_execution/objcopy_f05ead59375a9db120b95dd730.o |60.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/datashard/datashard_ut_range_ops.cpp |60.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/tools/stress_tool/proto/libtools-stress_tool-proto.a |60.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/tools/stress_tool/lib/libydb_device_test.a |60.1%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/base64/neon64/liblibs-base64-neon64.a |60.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/datashard/datashard_ut_incremental_restore_scan.cpp |60.1%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/base64/plain32/liblibs-base64-plain32.a |60.1%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/base64/neon32/liblibs-base64-neon32.a |60.1%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/script_execution/objcopy_bcbbd2d8f2367d5f3ed5199234.o |60.1%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/query_cache/objcopy_e31620202d3ba8df14ff2a18e1.o |60.1%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/kqp/common/simple/helpers.cpp |60.1%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/script_execution/objcopy_1aeeb50f676472f975830c135d.o |60.1%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/query_cache/objcopy_f8b2cbafb1fed0e25bf9683c2d.o |60.1%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/base64/avx2/liblibs-base64-avx2.a |60.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/wrappers/s3_wrapper_ut.cpp |61.0%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/ut/olap/combinatory/bulk_upsert.cpp |61.1%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/base/ut/fulltext_ut.cpp |61.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/datashard/datashard_ut_kqp_stream_lookup.cpp |61.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/ut/olap/combinatory/select.cpp |61.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/datashard/datashard_ut_kqp.cpp |61.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/ut/olap/combinatory/executor.cpp |61.9%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/kqp/common/simple/query_id.cpp |62.1%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_compaction/ydb-core-tx-schemeshard-ut_compaction |62.1%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kqp/ut/federated_query/large_results/ydb-core-kqp-ut-federated_query-large_results |62.1%| [CC] {BAZEL_UPLOAD} $(B)/ydb/core/protos/d52903a0693870f83d0bbe0ab8_raw.auxcpp |62.1%| [CC] {BAZEL_UPLOAD} $(B)/ydb/core/protos/c90d9fb739ea008c06169ff153_raw.auxcpp |62.1%| [CC] {BAZEL_UPLOAD} $(B)/ydb/core/protos/383ce71fd9fa04eb3230fc8f2e_raw.auxcpp |62.1%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/tx_allocator/ut/ydb-core-tx-tx_allocator-ut |62.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/common/simple/helpers.cpp |62.2%| [CC] {BAZEL_UPLOAD} $(B)/ydb/core/protos/e6ce42a762195cf7e946ca411e_raw.auxcpp |62.2%| [CC] {BAZEL_UPLOAD} $(B)/ydb/core/protos/a48cd84b630c61b8d19c887979_raw.auxcpp |62.2%| [CC] {BAZEL_UPLOAD} $(B)/ydb/core/protos/968732828ff205567f6707c2fe_raw.auxcpp |62.2%| [CC] {BAZEL_UPLOAD} $(B)/ydb/core/protos/037dadb082c3788ff2d8ca830f_raw.auxcpp |62.2%| [CC] {BAZEL_UPLOAD} $(B)/ydb/core/protos/356aa18c71c00c1ebe811b0407_raw.auxcpp |62.2%| [CC] {BAZEL_UPLOAD} $(B)/ydb/core/protos/aa848536d47c49c5b2820aeee4_raw.auxcpp |62.2%| [CC] {BAZEL_UPLOAD} $(B)/ydb/core/protos/e4a588c704e4418873ed6891de_raw.auxcpp |62.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/common/simple/query_id.cpp |62.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/columnshard/engines/ut/helper.cpp |62.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/base/fulltext.cpp |62.3%| [AR] {default-linux-x86_64, relwithdebinfo} $(B)/library/cpp/build_info/liblibrary-cpp-build_info.a |62.4%| [CC] {BAZEL_UPLOAD} $(B)/ydb/core/protos/0d9faae2dd392530096b141b6c_raw.auxcpp |62.4%| [CC] {BAZEL_UPLOAD} $(B)/ydb/core/protos/3eb880cc21ffc3fc10ee677b0c_raw.auxcpp |62.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/ut_extsubdomain/ut_extsubdomain.cpp |62.4%| [CC] {BAZEL_UPLOAD} $(B)/ydb/library/yaml_config/protos/4306a854d105ac9e8a68bf91ca_raw.auxcpp |62.4%| [PY] {BAZEL_UPLOAD} $(B)/ydb/core/protos/msgbus__intpy3___pb2.py.p5ju.yapyc3 |62.4%| [PD] {BAZEL_UPLOAD} $(B)/ydb/library/yaml_config/protos/config.proto.{desc, 236947a227eabf309dc2ce63434b3df8.rawproto} |62.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/base/kmeans_clusters.cpp |62.4%| [PR] {BAZEL_UPLOAD} $(B)/ydb/core/protos/aa848536d47c49c5b2820aeee4_raw.auxcpp |62.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/ut_index_build/ut_schemeshard_build_index_helpers.cpp |62.5%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_restore/ydb-core-tx-schemeshard-ut_restore |62.6%| [AR] {default-linux-x86_64, relwithdebinfo} $(B)/library/cpp/svnversion/liblibrary-cpp-svnversion.a |62.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/base/ut/fulltext_ut.cpp |62.7%| [EN] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/columnshard/engines/storage/granule/granule.h_serialized.cpp |62.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/replication/controller/target_discoverer_ut.cpp |62.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/ut/olap/combinatory/variator.cpp |63.0%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/ut_sequence/ut_sequence.cpp |63.1%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_user_attributes_reboots/core-tx-schemeshard-ut_user_attributes_reboots |63.1%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/datashard/ut_kqp_scan/ydb-core-tx-datashard-ut_kqp_scan |63.1%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/blobstorage/vdisk/skeleton/ut/ydb-core-blobstorage-vdisk-skeleton-ut |63.1%| [PY] {BAZEL_UPLOAD} $(B)/ydb/core/protos/serverless_proxy_config__intpy3___pb2_grpc.py.p5ju.yapyc3 |63.6%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/audit/audit_config/audit_config.cpp |63.7%| [PY] {BAZEL_UPLOAD} $(B)/ydb/core/protos/config__intpy3___pb2_grpc.py.p5ju.yapyc3 |63.7%| [PY] {BAZEL_UPLOAD} $(B)/ydb/core/protos/grpc__intpy3___pb2.py.p5ju.yapyc3 |63.7%| [PY] {BAZEL_UPLOAD} $(B)/ydb/core/protos/kqp__intpy3___pb2_grpc.py.p5ju.yapyc3 |63.7%| [PY] {BAZEL_UPLOAD} $(B)/ydb/core/protos/kqp__intpy3___pb2.py.p5ju.yapyc3 |63.7%| [PR] {BAZEL_UPLOAD} $(B)/ydb/core/protos/a48cd84b630c61b8d19c887979_raw.auxcpp |63.7%| [PR] {BAZEL_UPLOAD} $(B)/ydb/core/protos/d52903a0693870f83d0bbe0ab8_raw.auxcpp |63.8%| [PB] {BAZEL_UPLOAD} $(B)/ydb/core/protos/blobstorage_vdisk_internal__intpy3___pb2.py{ ... i} |63.8%| [PB] {BAZEL_UPLOAD} $(B)/ydb/core/protos/serverless_proxy_config.{pb.h ... grpc.pb.h} |63.8%| [AR] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/audit/audit_config/libcore-audit-audit_config.a |63.8%| [PR] {BAZEL_UPLOAD} $(B)/ydb/core/protos/037dadb082c3788ff2d8ca830f_raw.auxcpp |63.9%| [PB] {BAZEL_UPLOAD} $(B)/ydb/core/protos/tx_datashard__intpy3___pb2.py{ ... i} |63.9%| [PB] {BAZEL_UPLOAD} $(B)/ydb/core/protos/console.{pb.h ... grpc.pb.h} |63.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/ut/cost/kqp_cost_ut.cpp |64.0%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/graph/ut/ydb-core-graph-ut |64.3%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/backup/impl/ut_local_partition_reader/ydb-core-backup-impl-ut_local_partition_reader |64.3%| [PB] {BAZEL_UPLOAD} $(B)/ydb/core/protos/console_config.{pb.h ... grpc.pb.h} |64.3%| [PY] {BAZEL_UPLOAD} $(B)/ydb/core/protos/blobstorage_vdisk_internal__intpy3___pb2_grpc.py.p5ju.yapyc3 |64.3%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/audit/audit_config/libcore-audit-audit_config.a |64.3%| [PR] {BAZEL_UPLOAD} $(B)/ydb/core/protos/383ce71fd9fa04eb3230fc8f2e_raw.auxcpp |64.3%| [PR] {BAZEL_UPLOAD} $(B)/ydb/core/protos/968732828ff205567f6707c2fe_raw.auxcpp |64.3%| [PR] {BAZEL_UPLOAD} $(B)/ydb/library/yaml_config/protos/4306a854d105ac9e8a68bf91ca_raw.auxcpp |64.3%| [PR] {BAZEL_UPLOAD} $(B)/ydb/core/protos/c90d9fb739ea008c06169ff153_raw.auxcpp |64.3%| [PR] {BAZEL_UPLOAD} $(B)/ydb/core/protos/0d9faae2dd392530096b141b6c_raw.auxcpp |64.3%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kqp/ut/opt/ydb-core-kqp-ut-opt |64.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/ut/olap/combinatory/execute.cpp |64.3%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/blobstorage/ut_blobstorage/ut_vdisk_restart/blobstorage-ut_blobstorage-ut_vdisk_restart |64.3%| [PY] {BAZEL_UPLOAD} $(B)/ydb/core/protos/console_config__intpy3___pb2_grpc.py.p5ju.yapyc3 |64.3%| [PB] {BAZEL_UPLOAD} $(B)/ydb/core/protos/msgbus__intpy3___pb2.py{ ... i} |64.4%| [PR] {BAZEL_UPLOAD} $(B)/ydb/core/protos/356aa18c71c00c1ebe811b0407_raw.auxcpp |64.4%| [PY] {BAZEL_UPLOAD} $(B)/ydb/core/protos/console__intpy3___pb2.py.p5ju.yapyc3 |64.4%| [PY] {BAZEL_UPLOAD} $(B)/ydb/core/protos/blobstorage_distributed_config__intpy3___pb2.py.p5ju.yapyc3 |64.4%| [PB] {BAZEL_UPLOAD} $(B)/ydb/core/protos/grpc__intpy3___pb2.py{ ... i} |64.4%| [PY] {BAZEL_UPLOAD} $(B)/ydb/core/protos/blobstorage_vdisk_internal__intpy3___pb2.py.p5ju.yapyc3 |64.4%| [PB] {BAZEL_UPLOAD} $(B)/ydb/library/yaml_config/protos/config__intpy3___pb2.py{, i} |64.4%| [PY] {BAZEL_UPLOAD} $(B)/ydb/core/protos/msgbus__intpy3___pb2_grpc.py.p5ju.yapyc3 |64.4%| [PY] {BAZEL_UPLOAD} $(B)/ydb/core/protos/grpc__intpy3___pb2_grpc.py.p5ju.yapyc3 |64.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/tests/library/compatibility/configs/dump/dumper/main.cpp |64.4%| [PY] {BAZEL_UPLOAD} $(B)/ydb/core/protos/console__intpy3___pb2_grpc.py.p5ju.yapyc3 |64.4%| [PY] {BAZEL_UPLOAD} $(B)/ydb/core/protos/console_config__intpy3___pb2.py.p5ju.yapyc3 |64.4%| [PY] {BAZEL_UPLOAD} $(B)/ydb/core/protos/serverless_proxy_config__intpy3___pb2.py.p5ju.yapyc3 |64.4%| [PB] {BAZEL_UPLOAD} $(B)/ydb/core/protos/console__intpy3___pb2.py{ ... i} |64.4%| [PY] {BAZEL_UPLOAD} $(B)/ydb/library/yaml_config/protos/config__intpy3___pb2.py.siec.yapyc3 |64.4%| [PB] {BAZEL_UPLOAD} $(B)/ydb/core/protos/config__intpy3___pb2.py{ ... i} |64.4%| [PY] {BAZEL_UPLOAD} $(B)/ydb/core/protos/tx_datashard__intpy3___pb2.py.p5ju.yapyc3 |64.4%| [PY] {BAZEL_UPLOAD} $(B)/ydb/core/protos/tx_datashard__intpy3___pb2_grpc.py.p5ju.yapyc3 |64.4%| [PB] {BAZEL_UPLOAD} $(B)/ydb/core/protos/kqp__intpy3___pb2.py{ ... i} |64.4%| [PY] {BAZEL_UPLOAD} $(B)/ydb/core/protos/blobstorage_distributed_config__intpy3___pb2_grpc.py.p5ju.yapyc3 |64.4%| [PB] {BAZEL_UPLOAD} $(B)/ydb/library/yaml_config/protos/config.pb.{h, cc} |64.4%| [PB] {BAZEL_UPLOAD} $(B)/ydb/core/protos/serverless_proxy_config__intpy3___pb2.py{ ... i} |64.4%| [PR] {BAZEL_UPLOAD} $(B)/ydb/core/protos/e6ce42a762195cf7e946ca411e_raw.auxcpp |64.4%| [PY] {BAZEL_UPLOAD} $(B)/ydb/core/protos/config__intpy3___pb2.py.p5ju.yapyc3 |64.4%| [PB] {BAZEL_UPLOAD} $(B)/ydb/core/protos/blobstorage_distributed_config__intpy3___pb2.py{ ... i} |64.5%| [PR] {BAZEL_UPLOAD} $(B)/ydb/core/protos/3eb880cc21ffc3fc10ee677b0c_raw.auxcpp |64.5%| [PR] {BAZEL_UPLOAD} $(B)/ydb/core/protos/e4a588c704e4418873ed6891de_raw.auxcpp |64.5%| [PB] {BAZEL_UPLOAD} $(B)/ydb/core/protos/blobstorage_vdisk_internal.{pb.h ... grpc.pb.h} |64.6%| [PB] {BAZEL_UPLOAD} $(B)/ydb/core/protos/console_config__intpy3___pb2.py{ ... i} |64.7%| [PB] {BAZEL_UPLOAD} $(B)/ydb/core/protos/grpc.{pb.h ... grpc.pb.h} |65.2%| [PB] {BAZEL_UPLOAD} $(B)/ydb/core/protos/kqp.{pb.h ... grpc.pb.h} |65.3%| [PB] {BAZEL_UPLOAD} $(B)/ydb/core/protos/blobstorage_distributed_config.{pb.h ... grpc.pb.h} |65.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/audit/audit_config/audit_config.cpp |65.6%| [PB] {BAZEL_UPLOAD} $(B)/ydb/core/viewer/protos/viewer.pb.{h, cc} |65.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/tools/stress_tool/device_test_tool_ut.cpp |65.7%| [PB] {BAZEL_UPLOAD} $(B)/ydb/core/protos/msgbus.{pb.h ... grpc.pb.h} |65.7%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/tests/integration/bulk_upsert/ydb-public-sdk-cpp-tests-integration-bulk_upsert |65.9%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/tools/stress_tool/proto/device_perf_test.{pb.h ... grpc.pb.h} |66.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/ut_background_cleaning/ut_background_cleaning.cpp |66.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/ut_view/ut_view.cpp |66.5%| [PB] {BAZEL_UPLOAD} $(B)/ydb/core/protos/tx_datashard.{pb.h ... grpc.pb.h} |66.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/persqueue/dread_cache_service/ut/caching_proxy_ut.cpp |66.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/ut_sysview/ut_sysview.cpp |66.7%| [PB] {BAZEL_UPLOAD} $(B)/ydb/core/protos/config.{pb.h ... grpc.pb.h} |66.9%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/blobstorage/incrhuge/incrhuge_keeper.cpp |66.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/datashard/datashard_ut_read_table.cpp |67.0%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/mediator/mediator_ut.cpp |67.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/incrhuge/incrhuge_keeper.cpp |67.3%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_move_reboots/ydb-core-tx-schemeshard-ut_move_reboots |67.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/vdisk/hulldb/fresh/snap_vec_ut.cpp |67.7%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/base/bridge.cpp |67.8%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/blobstorage/other/mon_vdisk_stream.cpp |67.8%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/kqp/common/events/query.cpp |67.9%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/datashard/erase_rows_condition.cpp |67.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/base/bridge.cpp |68.0%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/blobstorage/nodewarden/ut/ydb-core-blobstorage-nodewarden-ut |68.1%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/datashard/ut_trace/ydb-core-tx-datashard-ut_trace |68.1%| [AR] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/blobstorage/other/libcore-blobstorage-other.a |68.2%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/tests/stress/mixedpy/workload_mixed |68.8%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/keyvalue/ut/ydb-core-keyvalue-ut |68.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/datashard/datashard_ut_minikql.cpp |69.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/engines/ut/ut_script.cpp |69.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/services/cms/cms_ut.cpp |69.9%| [CC] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/library/yaml_config/protos/config.pb.cc |69.9%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/driver_lib/version/version.cpp |70.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/common/events/query.cpp |70.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/datashard/erase_rows_condition.cpp |70.2%| [AR] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/driver_lib/version/libversion.a |70.2%| [AR] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/library/yaml_config/protos/libyaml-config-protos.a |70.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/ut_continuous_backup_reboots/ut_continuous_backup_reboots.cpp |70.3%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/blobstorage/other/libcore-blobstorage-other.a |70.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/grpc_services/operation_helpers_ut.cpp |70.5%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/apps/ydb/ydb |70.6%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kesus/tablet/ut/ydb-core-kesus-tablet-ut |70.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/ut_shred/ut_shred.cpp |70.6%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tools/stress_tool/ut/ydb-tools-stress_tool-ut |70.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/other/mon_vdisk_stream.cpp |70.9%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/formats/arrow/serializer/abstract.cpp |71.0%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/apps/ydb/ydb |71.0%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/ut_sysview_reboots/ut_sysview_reboots.cpp |71.2%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/driver_lib/version/libversion.a |71.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/services/keyvalue/grpc_service_ut.cpp |71.4%| [AR] {BAZEL_UPLOAD} $(B)/ydb/library/yaml_config/protos/libyaml-config-protos.a |71.4%| [AR] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/formats/arrow/serializer/libformats-arrow-serializer.a |71.4%| [CC] {BAZEL_UPLOAD} $(B)/ydb/library/yaml_config/protos/config.pb.cc |71.6%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/blobstorage/vdisk/hulldb/fresh/ut/ydb-core-blobstorage-vdisk-hulldb-fresh-ut |71.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/datashard/datashard_ut_compaction.cpp |71.7%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/grpc_services/audit_logins.cpp |71.7%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/formats/arrow/serializer/libformats-arrow-serializer.a |71.7%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/conveyor/service/service.cpp |71.8%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/fq/libs/checkpoint_storage/gc.cpp |71.8%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/datashard/stream_scan_common.cpp |71.8%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/fq/libs/ydb/ut/ydb-core-fq-libs-ydb-ut |71.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/grpc_services/audit_logins.cpp |72.0%| [AR] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/tx/conveyor/service/libtx-conveyor-service.a |72.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/driver_lib/version/version.cpp |72.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/fq/libs/checkpoint_storage/gc.cpp |72.1%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/testlib/audit_helpers/audit_helper.cpp |72.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/engines/ut/ut_program.cpp |72.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/datashard/stream_scan_common.cpp |72.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/public/lib/ydb_cli/topic/topic_write_ut.cpp |72.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/http_proxy/ut/inside_ydb_ut/inside_ydb_ut.cpp |72.2%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_sequence/ydb-core-tx-schemeshard-ut_sequence |72.2%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/core/http_proxy/ut/inside_ydb_ut/objcopy_484246668d943fbae3b476ec7d.o |72.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/public/lib/ydb_cli/topic/topic_write.cpp |72.4%| [AR] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/testlib/audit_helpers/libcore-testlib-audit_helpers.a |72.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/ut_blobstorage/donor.cpp |72.5%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/blobstorage/base/blobstorage_events.cpp |72.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/formats/arrow/serializer/abstract.cpp |72.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/vdisk/hulldb/fresh/fresh_segment_ut.cpp |72.6%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/testlib/audit_helpers/libcore-testlib-audit_helpers.a |72.6%| [AR] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/library/yaml_config/protos/libpy3yaml-config-protos.global.a |72.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/public/sdk/cpp/src/client/persqueue_public/ut/common_ut.cpp |72.7%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/services/cms/ut/ydb-services-cms-ut |72.8%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/tx/conveyor/service/libtx-conveyor-service.a |72.8%| [AR] {BAZEL_UPLOAD} $(B)/ydb/library/yaml_config/protos/libpy3yaml-config-protos.global.a |72.8%| [AR] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/blobstorage/base/libcore-blobstorage-base.a |72.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/vdisk/hulldb/fresh/fresh_appendix_ut.cpp |72.8%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/wrappers/ut/ydb-core-wrappers-ut |72.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/vdisk/hulldb/fresh/fresh_data_ut.cpp |72.8%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/services/keyvalue/ut/ydb-services-keyvalue-ut |72.8%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_system_names/ydb-core-tx-schemeshard-ut_system_names |72.8%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/blobstorage/base/libcore-blobstorage-base.a |72.8%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kqp/ut/olap/ydb-core-kqp-ut-olap |72.8%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/tests/tools/s3_recipe/s3_recipe |72.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/conveyor/service/service.cpp |72.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/ut_blobstorage/stop_pdisk.cpp |72.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/testlib/audit_helpers/audit_helper.cpp |72.8%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/base/statestorage_proxy.cpp |72.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/base/blobstorage_events.cpp |72.8%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_reboots/ydb-core-tx-schemeshard-ut_reboots |72.8%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/kqp/common/events/events.cpp |72.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/base/statestorage_proxy.cpp |72.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/common/events/events.cpp |72.7%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/quoter/quoter_service_bandwidth_test/quoter_service_bandwidth_test |72.7%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/security/ldap_auth_provider/ut/ydb-core-security-ldap_auth_provider-ut |72.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/grpc_services/rpc_calls_ut.cpp |72.7%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_extsubdomain/ydb-core-tx-schemeshard-ut_extsubdomain |72.7%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/grpc_services/audit_log.cpp |72.6%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/persqueue/ut/ut_with_sdk/ydb-core-persqueue-ut-ut_with_sdk |72.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/grpc_services/audit_log.cpp |72.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/public/sdk/cpp/src/client/persqueue_public/ut/retry_policy_ut.cpp |72.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/public/sdk/cpp/src/client/persqueue_public/ut/read_session_ut.cpp |72.6%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/datashard/ut_range_ops/ydb-core-tx-datashard-ut_range_ops |72.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/public/lib/ydb_cli/topic/topic_read_ut.cpp |72.6%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/core/persqueue/ut/objcopy_8f964d45a9735415d4aae7946d.o |72.6%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/grpc_services/rpc_whoami.cpp |72.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/persqueue/ut/make_config.cpp |72.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/persqueue/ut/quota_tracker_ut.cpp |72.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/persqueue/ut/utils_ut.cpp |72.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/persqueue/ut/partition_scale_manager_graph_cmp_ut.cpp |72.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/persqueue/ut/metering_sink_ut.cpp |72.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/persqueue/ut/partitiongraph_ut.cpp |72.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/grpc_services/rpc_whoami.cpp |72.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet/tablet_counters_ut.cpp |72.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/http_proxy/ut/ymq_ut.cpp |72.6%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/datashard/ut_incremental_restore_scan/ydb-core-tx-datashard-ut_incremental_restore_scan |72.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet/pipe_tracker_ut.cpp |72.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet/tablet_metrics_ut.cpp |72.6%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/replication/controller/ut_target_discoverer/replication-controller-ut_target_discoverer |72.6%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kqp/provider/ut/ydb-core-kqp-provider-ut |72.6%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/fq/libs/checkpointing/ut/ydb-core-fq-libs-checkpointing-ut |72.6%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_sysview/ydb-core-tx-schemeshard-ut_sysview |72.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/public/sdk/cpp/src/client/persqueue_public/ut/compression_ut.cpp |72.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/ut_replication_reboots/ut_replication_reboots.cpp |72.6%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/load_test/ut/ydb-core-load_test-ut |72.6%| [AR] {default-linux-x86_64, relwithdebinfo} $(B)/yt/yt/client/libyt-yt-client.a |72.6%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_shred/ydb-core-tx-schemeshard-ut_shred |72.6%| [AR] {BAZEL_UPLOAD, SKIPPED} $(B)/yt/yt/client/libyt-yt-client.a |72.6%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/datashard/ut_kqp/ydb-core-tx-datashard-ut_kqp |72.6%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_view/ydb-core-tx-schemeshard-ut_view |72.6%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_background_cleaning/ydb-core-tx-schemeshard-ut_background_cleaning |72.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/public/sdk/cpp/src/client/persqueue_public/ut/basic_usage_ut.cpp |72.6%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/engine/ut/ydb-core-engine-ut |72.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/public/sdk/cpp/src/client/persqueue_public/ut/compress_executor_ut.cpp |72.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/http_proxy/ut/kinesis_ut.cpp |72.6%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/datashard/ut_minikql/ydb-core-tx-datashard-ut_minikql |72.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/ut/federated_query/datastreams/datastreams_ut.cpp |72.6%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/datashard/ut_compaction/ydb-core-tx-datashard-ut_compaction |72.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/persqueue/ut/internals_ut.cpp |72.5%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/public/sdk/cpp/src/client/persqueue_public/ut/ydb-public-sdk-cpp-src-client-persqueue_public-ut |72.5%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_sysview_reboots/ydb-core-tx-schemeshard-ut_sysview_reboots |72.5%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/datashard/ut_read_table/ydb-core-tx-datashard-ut_read_table |72.5%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_continuous_backup_reboots/tx-schemeshard-ut_continuous_backup_reboots |72.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tablet/tablet_resolver_ut.cpp |72.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tablet/tablet_req_blockbs_ut.cpp |72.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tablet/tablet_pipecache_ut.cpp |72.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tablet/tablet_pipe_ut.cpp |72.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/persqueue/ut/sourceid_ut.cpp |72.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/persqueue/ut/pqtablet_ut.cpp |72.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/persqueue/ut/pqtablet_mock.cpp |72.5%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/mediator/ut/ydb-core-tx-mediator-ut |72.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tablet/bootstrapper_ut.cpp |72.5%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/public/lib/ydb_cli/topic/ut/ydb-public-lib-ydb_cli-topic-ut |72.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tablet/tablet_counters_aggregator_ut.cpp |72.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tablet/resource_broker_ut.cpp |72.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/persqueue/ut/partition_chooser_ut.cpp |72.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tablet/tablet_state_ut.cpp |72.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/persqueue/ut/counters_ut.cpp |72.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/mon/mon_ut.cpp |72.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/persqueue/ut/partition_ut.cpp |72.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/persqueue/ut/pq_ut.cpp |72.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/persqueue/ut/user_info_ut.cpp |72.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/ut_incremental_restore/ut_incremental_restore.cpp |72.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/replication/service/topic_reader_ut.cpp |72.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/persqueue/ut/pqrb_describes_ut.cpp |72.3%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/blobstorage/ut_blobstorage/ut_donor/ydb-core-blobstorage-ut_blobstorage-ut_donor |72.3%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/columnshard/engines/ut/ydb-core-tx-columnshard-engines-ut |72.3%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/blobstorage/ut_blobstorage/ut_stop_pdisk/ydb-core-blobstorage-ut_blobstorage-ut_stop_pdisk |72.3%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/replication/service/json_change_record.cpp |72.3%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/replication/service/table_writer.cpp |72.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/replication/service/json_change_record.cpp |72.3%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kqp/ut/cost/ydb-core-kqp-ut-cost |72.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/replication/service/table_writer.cpp |72.2%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/persqueue/dread_cache_service/ut/ydb-core-persqueue-dread_cache_service-ut |72.2%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/persqueue/public/pq_rl_helpers.cpp |72.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/persqueue/public/pq_rl_helpers.cpp |72.1%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/priorities/usage/config.cpp |72.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/priorities/usage/config.cpp |72.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/ut_rtmr_reboots/ut_rtmr_reboots.cpp |72.1%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/ymq/base/dlq_helpers.cpp |72.1%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_replication_reboots/ydb-core-tx-schemeshard-ut_replication_reboots |72.1%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/grpc_services/cancelation/cancelation.cpp |72.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/ymq/base/dlq_helpers.cpp |72.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/ut_backup/ut_backup.cpp |72.1%| [AR] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/grpc_services/cancelation/libcore-grpc_services-cancelation.a |72.1%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/grpc_services/cancelation/libcore-grpc_services-cancelation.a |72.1%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/grpc_services/audit_dml_operations.cpp |72.1%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/yql/essentials/tools/sql2yql/sql2yql |72.1%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/yql/essentials/tools/sql2yql/sql2yql |72.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/grpc_services/audit_dml_operations.cpp |72.1%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/base/table_index.cpp |72.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/base/table_index.cpp |72.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/grpc_services/cancelation/cancelation.cpp |72.0%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/tx_allocator_client/actor_client_ut.cpp |72.0%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/ut_subdomain_reboots/ut_subdomain_reboots.cpp |72.0%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/tx_allocator_client/ut_helpers.cpp |72.0%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/ut_base_reboots/ut_base_reboots.cpp |72.0%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/ut_shred_reboots/ut_shred_reboots.cpp |72.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/tests/library/compatibility/binaries/downloader/libpy3compatibility-binaries-downloader.global.a |72.0%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/ut_move/ut_move.cpp |72.0%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/driver_lib/run/auto_config_initializer.cpp |71.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/datashard/datashard_ut_object_storage_listing.cpp |71.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/driver_lib/run/auto_config_initializer.cpp |71.9%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/base/wilson_tracing_control.cpp |71.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/datashard/datashard_ut_write.cpp |71.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/base/wilson_tracing_control.cpp |71.8%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/tests/stress/cdc/cdc |71.8%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/services/persqueue_cluster_discovery/cluster_discovery_worker.cpp |71.8%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/tests/fq/yt/kqp_yt_file/part7/ydb-tests-fq-yt-kqp_yt_file-part7 |71.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/services/persqueue_cluster_discovery/cluster_discovery_worker.cpp |71.8%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_rtmr_reboots/ydb-core-tx-schemeshard-ut_rtmr_reboots |71.8%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_incremental_restore/ydb-core-tx-schemeshard-ut_incremental_restore |71.8%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/replication/service/ut_topic_reader/ydb-core-tx-replication-service-ut_topic_reader |71.8%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/backup/s3_path_style/ydb-tests-functional-backup-s3_path_style |71.7%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/blobstorage/incrhuge/incrhuge_keeper_log.cpp |71.7%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/grpc_services/ut/ydb-core-grpc_services-ut |71.7%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/replication/controller/nodes_manager.cpp |71.7%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/grpc_services/rpc_calls.cpp |71.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/replication/controller/nodes_manager.cpp |71.7%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/priorities/usage/service.cpp |71.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/incrhuge/incrhuge_keeper_log.cpp |71.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/grpc_services/rpc_calls.cpp |71.7%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_base_reboots/ydb-core-tx-schemeshard-ut_base_reboots |71.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/priorities/usage/service.cpp |71.7%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/http_proxy/ut/inside_ydb_ut/ydb-core-http_proxy-ut-inside_ydb_ut |71.7%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_backup/ydb-core-tx-schemeshard-ut_backup |71.7%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/persqueue/ut/ydb-core-persqueue-ut |71.5%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/mon/ut/ydb-core-mon-ut |71.5%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kqp/ut/federated_query/datastreams/ydb-core-kqp-ut-federated_query-datastreams |71.5%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_subdomain_reboots/ydb-core-tx-schemeshard-ut_subdomain_reboots |71.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/tools/stress_tool/device_test_tool.cpp |71.5%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tablet/ut/ydb-core-tablet-ut |71.5%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/services/persqueue_cluster_discovery/counters.cpp |71.5%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_shred_reboots/ydb-core-tx-schemeshard-ut_shred_reboots |71.5%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/tx_allocator_client/ut/ydb-core-tx-tx_allocator_client-ut |71.5%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tools/stress_tool/ydb_stress_tool |71.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/services/persqueue_cluster_discovery/counters.cpp |71.4%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/datashard/ut_object_storage_listing/ydb-core-tx-datashard-ut_object_storage_listing |71.1%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_move/ydb-core-tx-schemeshard-ut_move |70.9%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/backup/ydb-tests-functional-backup |70.9%| [LD] {BAZEL_DOWNLOAD} $(B)/library/recipes/docker_compose/docker_compose |70.7%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/tests/library/compatibility/binaries/downloader/downloader |70.7%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/driver_lib/run/config_helpers.cpp |70.8%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/datashard/ut_write/ydb-core-tx-datashard-ut_write |70.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/driver_lib/run/config_helpers.cpp |70.6%| [AR] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/tx/priorities/usage/libtx-priorities-usage.a |70.6%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/audit/audit_log_impl.cpp |70.6%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/tx/priorities/usage/libtx-priorities-usage.a |70.4%| [AR] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/audit/libydb-core-audit.a |70.2%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/services/metadata/ds_table/config.cpp |70.2%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/audit/libydb-core-audit.a |69.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/services/metadata/ds_table/config.cpp |69.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/audit/audit_log_impl.cpp |67.8%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/security/certificate_check/cert_auth_utils.cpp |67.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/security/certificate_check/cert_auth_utils.cpp |67.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yaml_config/tools/simple_json_diff/libpy3simple_json_diff.global.a |67.4%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/blobstorage/incrhuge/incrhuge_keeper_recovery_scan.cpp |67.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/benchmarks/runner/runner/libpy3benchmarks-runner-runner.global.a |67.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/incrhuge/incrhuge_keeper_recovery_scan.cpp ------- [LD] {default-linux-x86_64, relwithdebinfo} $(B)/yql/tools/yqlrun/yqlrun ld.lld: warning: version script assignment of 'global' to symbol '__after_morecore_hook' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'daylight' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'environ' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '_environ' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '__malloc_initialize_hook' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'program_invocation_name' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'program_invocation_short_name' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'timezone' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'tzname' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '__libc_start_main' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'AnnotateHappensAfter' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'AnnotateHappensBefore' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'AnnotateIgnoreWritesBegin' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'AnnotateIgnoreWritesEnd' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'AnnotateIgnoreReadsBegin' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'AnnotateIgnoreReadsEnd' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'abort' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'accept' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'accept4' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'asctime' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'asctime_r' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'asprintf' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'backtrace' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'backtrace_symbols' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'bind' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'canonicalize_file_name' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'capget' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'capset' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'cfree' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'clock_getres' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'clock_gettime' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'clock_settime' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'close' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '__close' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'closedir' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'confstr' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'connect' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'creat' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'creat64' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'ctermid' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'ctime' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'ctime_r' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '__cxa_atexit' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'dlclose' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'dl_iterate_phdr' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'dlopen' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'drand48_r' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'dup' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'dup2' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'dup3' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'endgrent' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'endpwent' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'epoll_create' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'epoll_create1' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'epoll_ctl' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'epoll_pwait' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'epoll_wait' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'ether_aton' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'ether_aton_r' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'ether_hostton' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'ether_line' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'ether_ntoa' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'ether_ntoa_r' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'ether_ntohost' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'eventfd' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'eventfd_read' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'eventfd_write' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '_exit' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'fclose' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'fdopen' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'fflush' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'fgetxattr' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'flistxattr' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'fmemopen' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'fopen' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'fopen64' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'fopencookie' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'fork' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'fprintf' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'fread' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'freopen' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'freopen64' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'frexp' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'frexpf' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'frexpl' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'fscanf' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'fstatfs' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'fstatfs64' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'fstatvfs' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'fstatvfs64' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'ftime' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'fwrite' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '__fxstat' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '__fxstat64' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'getaddrinfo' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'get_current_dir_name' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'getcwd' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'getdelim' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '__getdelim' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'getgroups' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'gethostbyaddr' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'gethostbyaddr_r' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'gethostbyname' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'gethostbyname2' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'gethostbyname2_r' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'gethostbyname_r' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'gethostent' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'gethostent_r' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'getifaddrs' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'getitimer' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'getline' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'getmntent' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'getmntent_r' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'getnameinfo' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'getpass' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'getpeername' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'getresgid' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'getresuid' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'getsockname' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'getsockopt' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'gettimeofday' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'getxattr' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'glob' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'glob64' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'gmtime' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'gmtime_r' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'iconv' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'if_indextoname' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'if_nametoindex' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'inet_aton' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'inet_ntop' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'inet_pton' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'initgroups' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'inotify_init' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'inotify_init1' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'ioctl' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '__isoc99_fprintf' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '__isoc99_fscanf' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '__isoc99_printf' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '__isoc99_scanf' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '__isoc99_snprintf' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '__isoc99_sprintf' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '__isoc99_sscanf' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '__isoc99_vfprintf' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '__isoc99_vfscanf' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '__isoc99_vprintf' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '__isoc99_vscanf' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '__isoc99_vsnprintf' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '__isoc99_vsprintf' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '__isoc99_vsscanf' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'kill' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'lgamma' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'lgammaf' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'lgammaf_r' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'lgammal' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'lgammal_r' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'lgamma_r' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'lgetxattr' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'listen' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'listxattr' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'llistxattr' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'localtime' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'localtime_r' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'longjmp' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'lrand48_r' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '__lxstat' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '__lxstat64' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'mbsnrtowcs' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'mbsrtowcs' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'mbstowcs' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'memchr' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'memcmp' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'memcpy' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'memmem' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'memmove' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'memrchr' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'memset' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'mincore' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'mktime' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'mlock' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'mlockall' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'mmap' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'mmap64' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'modf' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'modff' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'modfl' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'munlock' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'munlockall' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'munmap' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'nanosleep' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '_obstack_begin' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '_obstack_begin_1' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '_obstack_newchunk' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'on_exit' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'open' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'open64' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'opendir' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'open_memstream' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'open_wmemstream' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '__overflow' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pipe' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pipe2' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'poll' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'ppoll' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'prctl' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pread' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pread64' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'preadv' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'preadv64' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'printf' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'process_vm_readv' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'process_vm_writev' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pthread_attr_getaffinity_np' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pthread_attr_getdetachstate' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pthread_attr_getguardsize' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pthread_attr_getinheritsched' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pthread_attr_getschedparam' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pthread_attr_getschedpolicy' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pthread_attr_getscope' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pthread_attr_getstack' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pthread_attr_getstacksize' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pthread_barrierattr_getpshared' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pthread_barrier_destroy' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pthread_barrier_init' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pthread_barrier_wait' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pthread_condattr_getclock' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pthread_condattr_getpshared' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pthread_cond_broadcast' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pthread_cond_destroy' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pthread_cond_init' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pthread_cond_signal' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pthread_cond_timedwait' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pthread_cond_wait' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pthread_create' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pthread_detach' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pthread_getschedparam' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pthread_join' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pthread_kill' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pthread_mutexattr_getprioceiling' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pthread_mutexattr_getprotocol' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pthread_mutexattr_getpshared' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pthread_mutexattr_getrobust' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pthread_mutexattr_getrobust_np' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pthread_mutexattr_gettype' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pthread_mutex_destroy' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pthread_mutex_init' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pthread_mutex_lock' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pthread_mutex_timedlock' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pthread_mutex_trylock' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pthread_mutex_unlock' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pthread_once' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pthread_rwlockattr_getkind_np' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pthread_rwlockattr_getpshared' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pthread_rwlock_destroy' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pthread_rwlock_init' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pthread_rwlock_rdlock' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pthread_rwlock_timedrdlock' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pthread_rwlock_timedwrlock' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pthread_rwlock_tryrdlock' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pthread_rwlock_trywrlock' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pthread_rwlock_unlock' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pthread_rwlock_wrlock' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pthread_setcancelstate' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pthread_setcanceltype' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pthread_setname_np' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pthread_spin_destroy' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pthread_spin_init' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pthread_spin_lock' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pthread_spin_trylock' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pthread_spin_unlock' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'ptrace' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'puts' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pvalloc' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pwrite' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pwrite64' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pwritev' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pwritev64' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'raise' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'random_r' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'rand_r' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'read' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'readdir' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'readdir64' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'readdir64_r' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'readdir_r' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'readv' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'realpath' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'recv' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'recvfrom' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'recvmsg' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'remquo' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'remquof' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'remquol' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '__res_iclose' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'rmdir' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'scandir' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'scandir64' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'scanf' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'sched_getaffinity' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'sched_getparam' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'sem_destroy' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'sem_getvalue' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'sem_init' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'sem_post' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'sem_timedwait' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'sem_trywait' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'sem_wait' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'send' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'sendmsg' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'sendto' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'setgrent' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'setitimer' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'setjmp' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '_setjmp' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'setlocale' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'setpwent' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'shmctl' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'sigaction' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'sigemptyset' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'sigfillset' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'siglongjmp' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'signal' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'signalfd' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'sigpending' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'sigprocmask' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'sigsetjmp' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '__sigsetjmp' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'sigsuspend' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'sigtimedwait' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'sigwait' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'sigwaitinfo' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'sincos' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'sincosf' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'sincosl' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'sleep' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'snprintf' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'socket' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'socketpair' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'sprintf' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'sscanf' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'statfs' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'statfs64' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'statvfs' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'statvfs64' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'strcasecmp' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'strcasestr' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'strchr' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'strchrnul' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'strcmp' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'strcpy' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'strcspn' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'strdup' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'strerror' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'strerror_r' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'strlen' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'strncasecmp' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'strncmp' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'strncpy' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'strnlen' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'strpbrk' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'strptime' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'strrchr' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'strspn' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'strstr' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'strtoimax' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'strtoumax' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'sysinfo' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'tcgetattr' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'tempnam' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'textdomain' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'time' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'timerfd_gettime' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'timerfd_settime' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'times' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '__tls_get_addr' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'tmpfile' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'tmpfile64' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'tmpnam' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'tmpnam_r' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'tsearch' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '__uflow' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '__underflow' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'unlink' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'usleep' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'vasprintf' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'vfork' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'vfprintf' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'vfscanf' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'vprintf' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'vscanf' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'vsnprintf' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'vsprintf' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'vsscanf' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'wait' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'wait3' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'wait4' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'waitid' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'waitpid' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'wcrtomb' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'wcsnrtombs' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'wcsrtombs' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'wcstombs' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'wordexp' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '__woverflow' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'write' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'writev' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '__wuflow' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '__wunderflow' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'xdr_bool' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'xdr_bytes' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'xdr_char' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'xdr_double' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'xdr_enum' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'xdr_float' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'xdr_hyper' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'xdr_int' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'xdr_int16_t' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'xdr_int32_t' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'xdr_int64_t' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'xdr_int8_t' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'xdr_long' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'xdr_longlong_t' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'xdrmem_create' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'xdr_quad_t' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'xdr_short' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'xdrstdio_create' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'xdr_string' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'xdr_u_char' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'xdr_u_hyper' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'xdr_u_int' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'xdr_uint16_t' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'xdr_uint32_t' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'xdr_uint64_t' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'xdr_uint8_t' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'xdr_u_long' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'xdr_u_longlong_t' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'xdr_u_quad_t' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'xdr_u_short' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '__xpg_strerror_r' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '__xstat' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '__xstat64' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'accept' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'accept4' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'asctime' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'asctime_r' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'asprintf' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'backtrace' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'backtrace_symbols' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'bcopy' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'canonicalize_file_name' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'capget' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'capset' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'cfree' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'clock_getres' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'clock_gettime' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'clock_settime' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'confstr' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'ctermid' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'ctime' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'ctime_r' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '__cxa_atexit' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'dladdr' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'dlclose' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'dlerror' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'dl_iterate_phdr' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'dlopen' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'drand48_r' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'endgrent' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'endpwent' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'epoll_pwait' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'epoll_wait' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'ether_aton' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'ether_aton_r' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'ether_hostton' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'ether_line' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'ether_ntoa' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'ether_ntoa_r' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'ether_ntohost' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'eventfd_read' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'eventfd_write' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '_exit' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'fclose' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'fcvt' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'fdopen' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'fflush' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'fgetgrent' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'fgetgrent_r' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'fgetpwent' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'fgetpwent_r' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'fgets' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'fgets_unlocked' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'fgetxattr' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'flistxattr' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'fmemopen' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'fopen' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'fopen64' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'fopencookie' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'fork' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'forkpty' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'fprintf' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'fread' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'fread_unlocked' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'freopen' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'freopen64' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'frexp' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'frexpf' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'frexpl' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'fscanf' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'fstatfs' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'fstatfs64' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'fstatvfs' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'fstatvfs64' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'ftime' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '__fxstat' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '__fxstat64' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '__fxstatat' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '__fxstatat64' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'gcvt' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'getaddrinfo' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'get_current_dir_name' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'getcwd' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'getdelim' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '__getdelim' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'getenv' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'getgrent' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'getgrent_r' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'getgrgid' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'getgrgid_r' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'getgrnam' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'getgrnam_r' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'getgroups' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'gethostbyaddr' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'gethostbyaddr_r' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'gethostbyname' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'gethostbyname2' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'gethostbyname2_r' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'gethostbyname_r' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'gethostent' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'gethostent_r' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'gethostname' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'getifaddrs' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'getitimer' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'getline' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'getmntent' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'getmntent_r' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'getnameinfo' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'getpass' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'getpeername' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'getpwent' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'getpwent_r' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'getpwnam' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'getpwnam_r' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'getpwuid' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'getpwuid_r' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'getresgid' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'getresuid' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'getrlimit' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'getrlimit64' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'getrusage' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'getsockname' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'getsockopt' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'gettimeofday' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'getxattr' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'glob' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'glob64' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'gmtime' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'gmtime_r' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'iconv' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'if_indextoname' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'if_nametoindex' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'inet_aton' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'inet_ntop' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'inet_pton' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'initgroups' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'ioctl' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '__isoc99_fprintf' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '__isoc99_fscanf' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '__isoc99_printf' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '__isoc99_scanf' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '__isoc99_snprintf' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '__isoc99_sprintf' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '__isoc99_sscanf' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '__isoc99_vfprintf' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '__isoc99_vfscanf' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '__isoc99_vprintf' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '__isoc99_vscanf' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '__isoc99_vsnprintf' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '__isoc99_vsprintf' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '__isoc99_vsscanf' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'lgamma' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'lgammaf' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'lgammaf_r' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'lgammal' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'lgammal_r' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'lgamma_r' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'lgetxattr' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'listxattr' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'llistxattr' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'localtime' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'localtime_r' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'lrand48_r' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '__lxstat' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '__lxstat64' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'mallinfo' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'malloc_stats' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'mallopt' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'mbrtowc' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'mbsnrtowcs' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'mbsrtowcs' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'mbstowcs' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'mbtowc' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'memccpy' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'memchr' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'memcmp' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'memcpy' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'memmem' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'memmove' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'mempcpy' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'memrchr' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'memset' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'mincore' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'mktime' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'mlock' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'mlockall' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'mmap' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'mmap64' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'modf' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'modff' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'modfl' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'munlock' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'munlockall' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '_obstack_begin' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '_obstack_begin_1' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '_obstack_newchunk' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'opendir' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'open_memstream' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'openpty' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'open_wmemstream' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '__overflow' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pipe' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pipe2' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'poll' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'ppoll' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'prctl' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pread' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pread64' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'preadv' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'preadv64' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'printf' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'prlimit' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'prlimit64' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'process_vm_readv' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'process_vm_writev' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pthread_attr_getaffinity_np' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pthread_attr_getdetachstate' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pthread_attr_getguardsize' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pthread_attr_getinheritsched' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pthread_attr_getschedparam' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pthread_attr_getschedpolicy' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pthread_attr_getscope' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pthread_attr_getstack' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pthread_attr_getstacksize' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pthread_barrierattr_getpshared' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pthread_condattr_getclock' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pthread_condattr_getpshared' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pthread_create' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pthread_getschedparam' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pthread_join' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pthread_key_create' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pthread_mutexattr_getprioceiling' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pthread_mutexattr_getprotocol' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pthread_mutexattr_getpshared' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pthread_mutexattr_getrobust' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pthread_mutexattr_getrobust_np' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pthread_mutexattr_gettype' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pthread_mutex_lock' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pthread_mutex_unlock' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pthread_rwlockattr_getkind_np' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pthread_rwlockattr_getpshared' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pthread_setcancelstate' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pthread_setcanceltype' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pthread_setname_np' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'ptrace' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'putenv' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pvalloc' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pwrite' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pwrite64' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pwritev' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pwritev64' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'random_r' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'rand_r' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'read' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'readdir' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'readdir64' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'readdir64_r' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'readdir_r' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'readlink' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'readv' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'realpath' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'recv' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'recvfrom' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'recvmsg' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'remquo' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'remquof' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'remquol' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'scandir' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'scandir64' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'scanf' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'sched_getaffinity' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'sched_getparam' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'sem_destroy' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'sem_getvalue' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'sem_init' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'sem_post' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'sem_timedwait' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'sem_trywait' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'sem_wait' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'send' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'sendmsg' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'sendto' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'setenv' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'setgrent' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'setitimer' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'setlocale' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'setpwent' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'shmat' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'shmctl' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'sigaction' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'sigemptyset' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'sigfillset' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'signal' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'sigpending' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'sigprocmask' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'sigtimedwait' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'sigwait' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'sigwaitinfo' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'sincos' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'sincosf' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'sincosl' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'snprintf' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'socketpair' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'sprintf' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'sscanf' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'statfs' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'statfs64' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'statvfs' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'statvfs64' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'stpcpy' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'strcasecmp' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'strcasestr' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'strcat' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'strchr' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'strchrnul' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'strcmp' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'strcpy' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'strcspn' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'strdup' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '__strdup' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'strerror' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'strerror_r' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'strftime' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '__strftime_l' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'strftime_l' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'strlen' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'strncasecmp' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'strncat' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'strncmp' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'strncpy' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'strndup' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '__strndup' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'strnlen' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'strpbrk' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'strptime' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'strrchr' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'strspn' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'strstr' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'strtod' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '__strtod_internal' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '__strtod_l' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'strtod_l' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'strtof' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '__strtof_internal' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '__strtof_l' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'strtof_l' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'strtoimax' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'strtol' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'strtold' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '__strtold_internal' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '__strtold_l' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'strtold_l' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '__strtol_internal' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'strtoll' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '__strtol_l' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'strtol_l' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '__strtoll_internal' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '__strtoll_l' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'strtoll_l' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'strtoul' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '__strtoul_internal' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'strtoull' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '__strtoul_l' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'strtoul_l' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '__strtoull_internal' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '__strtoull_l' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'strtoull_l' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'strtoumax' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'strxfrm' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'strxfrm_l' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'swprintf' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'sysinfo' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'tcgetattr' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'tempnam' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'textdomain' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'time' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'timerfd_gettime' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'timerfd_settime' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'times' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '__tls_get_addr' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'tmpnam' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'tmpnam_r' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'tsearch' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'tzset' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '__uflow' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'uname' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '__underflow' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'vasprintf' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'vfprintf' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'vfscanf' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'vprintf' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'vscanf' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'vsnprintf' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'vsprintf' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'vsscanf' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'vswprintf' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'wait' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'wait3' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'wait4' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'waitid' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'waitpid' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'wcrtomb' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'wcschr' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'wcscmp' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'wcscpy' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'wcsftime' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '__wcsftime_l' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'wcsftime_l' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'wcslen' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'wcsnrtombs' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'wcsrtombs' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'wcstod' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '__wcstod_internal' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '__wcstod_l' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'wcstod_l' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'wcstof' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '__wcstof_internal' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '__wcstof_l' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'wcstof_l' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'wcstol' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'wcstold' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '__wcstold_internal' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '__wcstold_l' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'wcstold_l' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '__wcstol_internal' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'wcstoll' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '__wcstol_l' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'wcstol_l' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '__wcstoll_internal' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '__wcstoll_l' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'wcstoll_l' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'wcstombs' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'wcstoul' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '__wcstoul_internal' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'wcstoull' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '__wcstoul_l' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'wcstoul_l' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '__wcstoull_internal' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '__wcstoull_l' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'wcstoull_l' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'wmemcpy' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'wmemmove' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'wmempcpy' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'wmemset' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'wordexp' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '__woverflow' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'write' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'writev' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '__wuflow' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '__wunderflow' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'xdr_bool' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'xdr_bytes' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'xdr_char' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'xdr_double' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'xdr_enum' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'xdr_float' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'xdr_hyper' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'xdr_int' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'xdr_int16_t' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'xdr_int32_t' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'xdr_int64_t' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'xdr_int8_t' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'xdr_long' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'xdr_longlong_t' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'xdrmem_create' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'xdr_quad_t' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'xdr_short' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'xdrstdio_create' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'xdr_string' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'xdr_u_char' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'xdr_u_hyper' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'xdr_u_int' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'xdr_uint16_t' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'xdr_uint32_t' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'xdr_uint64_t' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'xdr_uint8_t' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'xdr_u_long' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'xdr_u_longlong_t' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'xdr_u_quad_t' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'xdr_u_short' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '__xpg_strerror_r' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '__xstat' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '__xstat64' failed: symbol not defined |66.8%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/yql/tools/yqlrun/yqlrun |66.8%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/blobstorage/incrhuge/incrhuge_keeper_defrag.cpp |66.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/incrhuge/incrhuge_keeper_defrag.cpp |66.3%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/kqp/common/kqp.cpp |66.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/common/kqp.cpp |66.2%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/kqp/common/kqp_event_impl.cpp |66.1%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/kqp/compute_actor/kqp_scan_events.cpp |66.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/common/kqp_event_impl.cpp |66.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/compute_actor/kqp_scan_events.cpp |66.1%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/library/yaml_config/tools/simple_json_diff/simple_json_diff |66.0%| [CC] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/viewer/protos/viewer.pb.cc |65.9%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/datashard/change_record.cpp |65.9%| [AR] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/viewer/protos/libcore-viewer-protos.a |65.9%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/udfs/common/datetime/libdatetime_udf.so |65.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/datashard/change_record.cpp |65.9%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/priorities/service/manager.cpp |65.9%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/replication/ydb_proxy/local_proxy/local_proxy.cpp |65.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/benchmarks/runner/result_convert/libpy3benchmarks-runner-result_convert.global.a |65.9%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/library/benchmarks/runner/runner/runner |65.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/priorities/service/manager.cpp |65.8%| [LD] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/common/re2/libre2_udf.so |65.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/replication/ydb_proxy/local_proxy/local_proxy.cpp |65.8%| [LD] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/common/pire/libpire_udf.so |65.2%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/kqp/proxy_service/script_executions_utils/kqp_script_execution_retries.cpp |65.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/benchmarks/runner/run_tests/libpy3benchmarks-runner-run_tests.global.a |65.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/proxy_service/script_executions_utils/kqp_script_execution_retries.cpp |65.1%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/kafka_proxy/actors/kafka_metrics_actor.cpp |65.2%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/grpc_services/counters/proxy_counters.cpp |65.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kafka_proxy/actors/kafka_metrics_actor.cpp |65.1%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/kafka_proxy/actors/kafka_api_versions_actor.cpp |65.1%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/kqp/compute_actor/kqp_compute_events.cpp |65.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/grpc_services/counters/proxy_counters.cpp |65.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/compute_actor/kqp_compute_events.cpp |65.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kafka_proxy/actors/kafka_api_versions_actor.cpp |65.0%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/core/resource_pools/ut/ydb-core-resource_pools-ut |64.6%| [AR] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/kqp/proxy_service/script_executions_utils/libkqp-proxy_service-script_executions_utils.a |64.6%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/viewer/protos/libcore-viewer-protos.a |64.5%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/kqp/proxy_service/script_executions_utils/libkqp-proxy_service-script_executions_utils.a |64.2%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/blobstorage/vdisk/common/vdisk_costmodel.cpp |64.1%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/kafka_proxy/actors/kafka_find_coordinator_actor.cpp |64.1%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/library/yaml_config/validator/ut/validator_checks/yaml_config-validator-ut-validator_checks |64.1%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/core/erasure/ut_perf/ydb-core-erasure-ut_perf |64.2%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/blobstorage/vdisk/common/blobstorage_event_filter.cpp |64.1%| [CC] {BAZEL_UPLOAD} $(B)/ydb/core/viewer/protos/viewer.pb.cc |64.1%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/kafka_proxy/kafka_metrics.cpp |64.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/vdisk/common/vdisk_costmodel.cpp |64.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/benchmarks/runner/result_compare/libpy3benchmarks-runner-result_compare.global.a |64.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/vdisk/common/blobstorage_event_filter.cpp |64.1%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/base/board_replica.cpp |64.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kafka_proxy/actors/kafka_find_coordinator_actor.cpp |64.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kafka_proxy/kafka_metrics.cpp |64.1%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/core/transfer/ut/row_table/ydb-core-transfer-ut-row_table |64.1%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/security/certificate_check/cert_check.cpp |64.1%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/kqp/common/events/script_executions.cpp |64.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/base/board_replica.cpp |64.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/security/certificate_check/cert_check.cpp |64.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/common/events/script_executions.cpp |64.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/tests/library/compatibility/configs/comparator/libpy3compatibility-configs-comparator.global.a |64.0%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/library/yaml_config/validator/ut/validator_builder/yaml_config-validator-ut-validator_builder |63.9%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/library/benchmarks/runner/result_convert/result_convert |63.9%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/tests/fq/yt/kqp_yt_file/part19/ydb-tests-fq-yt-kqp_yt_file-part19 |63.9%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/tests/fq/yt/kqp_yt_file/part10/ydb-tests-fq-yt-kqp_yt_file-part10 |63.8%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/kqp/query_data/kqp_query_data.cpp |63.5%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/kqp/compute_actor/kqp_scan_common.cpp |63.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/compute_actor/kqp_scan_common.cpp |63.4%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/fq/libs/rate_limiter/quoter_service/quoter_service.cpp |63.4%| RESOURCE $(sbr:4966407557) |63.3%| [AR] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/fq/libs/rate_limiter/quoter_service/liblibs-rate_limiter-quoter_service.a |63.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/query_data/kqp_query_data.cpp |63.3%| [SB] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/tests/functional/postgresql/psql/psql |63.3%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/priorities/service/service.cpp |63.2%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/tests/stress/transfer/transfer |63.2%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/base/tablet_killer.cpp |63.2%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/blobstorage/vdisk/common/blobstorage_cost_tracker.cpp |63.2%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/fq/libs/rate_limiter/quoter_service/liblibs-rate_limiter-quoter_service.a |63.2%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/library/benchmarks/runner/run_tests/run_tests |63.2%| [LD] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/common/string/libstring_udf.so |63.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/priorities/service/service.cpp |63.2%| [LD] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/common/datetime2/libdatetime2_udf.so |63.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/base/tablet_killer.cpp |63.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/vdisk/common/blobstorage_cost_tracker.cpp |63.0%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/schemeshard/schemeshard_import_scheme_query_executor.cpp |63.0%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/kqp/runtime/scheduler/kqp_schedulable_actor.cpp |63.0%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/fq/libs/row_dispatcher/format_handler/format_handler.cpp |63.0%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/limiter/grouped_memory/service/manager.cpp |63.0%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/kqp/runtime/scheduler/tree/snapshot.cpp |63.0%| [LD] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/examples/callables/libcallables_udf.so |63.0%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/cms/node_checkers.cpp |63.0%| [LD] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/examples/type_inspection/libtype_inspection_udf.so |63.0%| [AR] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/fq/libs/row_dispatcher/format_handler/liblibs-row_dispatcher-format_handler.a |63.0%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/kqp/runtime/kqp_scan_data_meta.cpp |63.0%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/tests/fq/yt/kqp_yt_file/part5/ydb-tests-fq-yt-kqp_yt_file-part5 |63.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/runtime/scheduler/kqp_schedulable_actor.cpp |63.0%| [LD] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/examples/dicts/libdicts_udf.so |63.0%| [LD] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/common/streaming/libstreaming_udf.so |63.0%| [LD] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/logs/dsv/libdsv_udf.so |63.0%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tablet/tablet_req_findlatest.cpp |63.0%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/kqp/workload_service/actors/pool_handlers_actors.cpp |63.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard_import_scheme_query_executor.cpp |62.9%| [LD] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/common/set/libset_udf.so |63.0%| [LD] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/examples/dummylog/libdummylog.so |63.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/limiter/grouped_memory/service/manager.cpp |63.0%| [LD] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/examples/structs/libstructs_udf.so |63.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/cms/node_checkers.cpp |63.0%| [LD] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/common/hyperloglog/libhyperloglog_udf.so |63.0%| [LD] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/test/simple/libsimple_udf.so |63.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/runtime/scheduler/tree/snapshot.cpp |63.0%| [LD] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/common/math/libmath_udf.so |63.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/runtime/kqp_scan_data_meta.cpp |63.0%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tablet/tablet_req_blockbs.cpp |63.0%| [UN] {default-linux-x86_64, relwithdebinfo} $(B)/yql/essentials/tests/common/test_framework/udfs_deps/common-test_framework-udfs_deps.pkg.fake |63.0%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/tests/fq/yt/kqp_yt_file/part4/ydb-tests-fq-yt-kqp_yt_file-part4 |63.0%| [LD] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/common/topfreq/libtopfreq_udf.so |63.0%| [LD] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/examples/lists/liblists_udf.so |63.0%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/library/benchmarks/runner/result_compare/result_compare |63.0%| [LD] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/common/vector/libvector_udf.so |63.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tablet/tablet_req_findlatest.cpp |63.0%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/persqueue/pqtablet/partition/autopartitioning_manager.cpp |63.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tablet/tablet_req_blockbs.cpp |63.0%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/conveyor/usage/config.cpp |62.9%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/fq/libs/row_dispatcher/format_handler/liblibs-row_dispatcher-format_handler.a |62.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/persqueue/pqtablet/partition/autopartitioning_manager.cpp |62.9%| [LD] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/common/top/libtop_udf.so |62.9%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/library/slide_limiter/usage/service.cpp |62.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/conveyor/usage/config.cpp |63.0%| [LD] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/common/stat/libstat_udf.so |62.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/library/slide_limiter/usage/service.cpp |62.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/fq/libs/rate_limiter/quoter_service/quoter_service.cpp |62.9%| [LD] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/common/yson2/libyson2_udf.so |62.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/workload_service/actors/pool_handlers_actors.cpp |62.9%| [LD] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/common/digest/libdigest_udf.so |62.9%| [AR] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/security/certificate_check/libcore-security-certificate_check.a |62.9%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/fq/libs/checkpoint_storage/storage_proxy.cpp |62.9%| [AR] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/kqp/common/events/libkqp-common-events.a |62.9%| [LD] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/common/histogram/libhistogram_udf.so |62.9%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/core/persqueue/public/codecs/ut/ydb-core-persqueue-public-codecs-ut |62.9%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tablet/tablet_req_rebuildhistory.cpp |62.9%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/cms/console/console__update_last_provided_config.cpp |62.9%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tablet/tablet_req_delete.cpp |62.9%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/kqp/common/events/libkqp-common-events.a |62.9%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/security/certificate_check/libcore-security-certificate_check.a |62.9%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/keyvalue/keyvalue_storage_read_request.cpp |62.9%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/tests/library/compatibility/configs/comparator/comparator |62.8%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/schemeshard/olap/bg_tasks/adapter/adapter.cpp |62.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/cms/console/console__update_last_provided_config.cpp |62.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tablet/tablet_req_rebuildhistory.cpp |62.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tablet/tablet_req_delete.cpp |62.8%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/tests/fq/yt/kqp_yt_file/part14/ydb-tests-fq-yt-kqp_yt_file-part14 |62.8%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/kqp/topics/kqp_topics.cpp |62.8%| [AR] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/tx/schemeshard/olap/bg_tasks/adapter/libolap-bg_tasks-adapter.a |62.8%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/ymq/base/secure_protobuf_printer.cpp |62.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/fq/libs/checkpoint_storage/storage_proxy.cpp |62.8%| [AR] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/kqp/topics/libcore-kqp-topics.a |62.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/ymq/base/secure_protobuf_printer.cpp |62.8%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/tx/schemeshard/olap/bg_tasks/adapter/libolap-bg_tasks-adapter.a |62.8%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/cms/console/console__replace_config_subscriptions.cpp |62.8%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/cms/console/console__log_cleanup.cpp |62.8%| RESOURCE $(sbr:770480022) - 7.18 MB |62.8%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/cms/console/console__drop_yaml_config.cpp |62.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/keyvalue/keyvalue_storage_read_request.cpp |62.8%| [LD] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/common/file/libfile_udf.so |62.8%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/cms/console/console__get_yaml_metadata.cpp |62.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/cms/console/console__log_cleanup.cpp |62.8%| [SB] {default-linux-x86_64, relwithdebinfo} $(B)/library/recipes/docker_compose/bin/docker-compose |62.8%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/kqp/topics/libcore-kqp-topics.a |62.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/cms/console/console__replace_config_subscriptions.cpp |62.8%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tablet_flat/shared_sausagecache.cpp |62.8%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/cms/console/console__configure.cpp |62.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/cms/console/console__drop_yaml_config.cpp |62.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/cms/console/console__get_yaml_metadata.cpp |62.7%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/kqp/runtime/kqp_stream_lookup_factory.cpp |62.7%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/cms/console/console__remove_config_subscriptions.cpp |62.7%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/cms/console/console__remove_config_subscription.cpp |62.7%| [CC] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/protos/grpc.pb.cc |62.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/runtime/kqp_stream_lookup_factory.cpp |62.7%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/cms/console/console__get_log_tail.cpp |62.7%| [CC] {BAZEL_UPLOAD} $(B)/ydb/core/protos/grpc.pb.cc |62.7%| [LD] {BAZEL_DOWNLOAD} $(B)/yql/essentials/tools/astdiff/astdiff |62.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/cms/console/console__remove_config_subscriptions.cpp |62.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/fq/libs/row_dispatcher/format_handler/format_handler.cpp |62.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/cms/console/console__configure.cpp |62.7%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/public/lib/base/msgbus.cpp |62.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/cms/console/console__remove_config_subscription.cpp |62.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/cms/console/console__get_log_tail.cpp |62.7%| [LD] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/common/hyperscan/libhyperscan_udf.so |62.7%| [AR] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/public/lib/base/libpublic-lib-base.a |62.7%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/cms/console/console__get_yaml_config.cpp |62.7%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/cms/console/console__add_config_subscription.cpp |62.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tablet_flat/shared_sausagecache.cpp |62.7%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/kqp/runtime/kqp_scan_data.cpp |62.7%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/security/login_shared_func.cpp |62.6%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/ymq/base/action.cpp |62.6%| [AR] {BAZEL_UPLOAD} $(B)/ydb/public/lib/base/libpublic-lib-base.a |62.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/cms/console/console__get_yaml_config.cpp |62.6%| [AR] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/tx/priorities/service/libtx-priorities-service.a |62.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/topics/kqp_topics.cpp |62.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/olap/bg_tasks/adapter/adapter.cpp |62.6%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/fq/libs/compute/ydb/executer_actor.cpp |62.6%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/tests/sql/solomon/ydb-library-yql-tests-sql-solomon |62.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/cms/console/console__add_config_subscription.cpp |62.6%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/tests/sql/hybrid_file/part3/ydb-library-yql-tests-sql-hybrid_file-part3 |62.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/ymq/base/action.cpp |62.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/security/login_shared_func.cpp |62.6%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/tx/priorities/service/libtx-priorities-service.a |62.6%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/fq/libs/compute/ydb/status_tracker_actor.cpp |62.6%| [CC] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/protos/grpc.grpc.pb.cc |62.6%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/tests/sql/dq_file/part1/ydb-library-yql-tests-sql-dq_file-part1 |62.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/fq/libs/compute/ydb/executer_actor.cpp |62.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/runtime/kqp_scan_data.cpp |62.5%| [CC] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/protos/msgbus.grpc.pb.cc |62.5%| [LD] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/common/unicode_base/libunicode_udf.so |62.5%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/fq/libs/compute/ydb/stopper_actor.cpp |62.5%| [CC] {BAZEL_UPLOAD} $(B)/ydb/core/protos/msgbus.grpc.pb.cc |62.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/fq/libs/compute/ydb/status_tracker_actor.cpp |62.5%| [LD] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/common/url_base/liburl_udf.so |62.5%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/tests/sql/hybrid_file/part10/ydb-library-yql-tests-sql-hybrid_file-part10 |62.5%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/kqp/runtime/kqp_sequencer_factory.cpp |62.4%| [CC] {BAZEL_UPLOAD} $(B)/ydb/core/protos/grpc.grpc.pb.cc |62.4%| [AR] {RESULT} $(B)/ydb/core/audit/audit_config/libcore-audit-audit_config.a |62.4%| [AR] {RESULT} $(B)/ydb/core/audit/libydb-core-audit.a |62.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/runtime/kqp_sequencer_factory.cpp |62.4%| [AR] {RESULT} $(B)/ydb/core/blobstorage/base/libcore-blobstorage-base.a |62.4%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/tests/sql/dq_file/part7/ydb-library-yql-tests-sql-dq_file-part7 |62.4%| [AR] {RESULT} $(B)/ydb/core/blobstorage/other/libcore-blobstorage-other.a |61.9%| [AR] {RESULT} $(B)/ydb/core/driver_lib/version/libversion.a |61.9%| [AR] {RESULT} $(B)/ydb/core/formats/arrow/serializer/libformats-arrow-serializer.a |61.9%| [AR] {RESULT} $(B)/ydb/core/fq/libs/row_dispatcher/format_handler/liblibs-row_dispatcher-format_handler.a |61.9%| [AR] {RESULT} $(B)/ydb/core/grpc_services/cancelation/libcore-grpc_services-cancelation.a |61.9%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/core/config/tools/protobuf_plugin/ut/ydb-core-config-tools-protobuf_plugin-ut |61.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/fq/libs/compute/ydb/stopper_actor.cpp |61.9%| [AR] {RESULT} $(B)/ydb/core/kqp/topics/libcore-kqp-topics.a |61.9%| [AR] {RESULT} $(B)/ydb/core/kqp/common/events/libkqp-common-events.a |61.9%| [AR] {RESULT} $(B)/ydb/core/kqp/proxy_service/script_executions_utils/libkqp-proxy_service-script_executions_utils.a |61.9%| [AR] {RESULT} $(B)/ydb/core/security/certificate_check/libcore-security-certificate_check.a |61.9%| [AR] {RESULT} $(B)/ydb/core/testlib/audit_helpers/libcore-testlib-audit_helpers.a |61.9%| [AR] {RESULT} $(B)/ydb/core/tx/priorities/service/libtx-priorities-service.a |61.9%| [LD] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/common/python/python3_small/libpython3_udf.so |61.9%| [AR] {RESULT} $(B)/ydb/core/tx/priorities/usage/libtx-priorities-usage.a |61.9%| [AR] {RESULT} $(B)/ydb/core/tx/schemeshard/olap/bg_tasks/adapter/libolap-bg_tasks-adapter.a |61.9%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/tests/sql/dq_file/part13/ydb-library-yql-tests-sql-dq_file-part13 |61.9%| [AR] {RESULT} $(B)/ydb/core/viewer/protos/libcore-viewer-protos.a |61.9%| [AR] {RESULT} $(B)/ydb/public/lib/base/libpublic-lib-base.a |61.7%| [AR] {RESULT} $(B)/ydb/library/yaml_config/protos/libyaml-config-protos.a |61.7%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/kqp/runtime/scheduler/tree/dynamic.cpp |61.7%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/driver_lib/base_utils/node_by_host.cpp |61.7%| [AR] {RESULT} $(B)/ydb/core/fq/libs/rate_limiter/quoter_service/liblibs-rate_limiter-quoter_service.a |61.7%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/tests/sql/dq_file/part11/ydb-library-yql-tests-sql-dq_file-part11 |61.7%| [AR] {RESULT} $(B)/yt/yt/client/libyt-yt-client.a |61.7%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/fq/libs/quota_manager/quota_manager.cpp |61.7%| [AR] {RESULT} $(B)/ydb/core/tx/conveyor/service/libtx-conveyor-service.a |61.7%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/core/transfer/ut/functional/ydb-core-transfer-ut-functional |61.7%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/tpcc/ydb-tests-functional-tpcc |61.7%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/tests/fq/yt/kqp_yt_file/part2/ydb-tests-fq-yt-kqp_yt_file-part2 |61.7%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/driver_lib/run/config.cpp |61.7%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/tests/sql/dq_file/part9/ydb-library-yql-tests-sql-dq_file-part9 |61.7%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/fq/libs/compute/ydb/resources_cleaner_actor.cpp |61.7%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/driver_lib/base_utils/format_info.cpp |61.7%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/core/erasure/ut/ydb-core-erasure-ut |61.7%| [CC] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/protos/tx_datashard.pb.cc |61.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/driver_lib/base_utils/node_by_host.cpp |61.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/driver_lib/run/config.cpp |61.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/runtime/scheduler/tree/dynamic.cpp |61.7%| [AR] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/fq/libs/quota_manager/libfq-libs-quota_manager.a |61.7%| [AR] {RESULT} $(B)/ydb/core/fq/libs/quota_manager/libfq-libs-quota_manager.a |61.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/driver_lib/base_utils/format_info.cpp |61.4%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/clickbench/ydb-tests-functional-clickbench |61.2%| [LD] {RESULT} $(B)/ydb/apps/ydb/ydb |61.1%| [LD] {RESULT} $(B)/yql/essentials/tools/sql2yql/sql2yql |61.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/fq/libs/compute/ydb/resources_cleaner_actor.cpp |61.1%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/tests/sql/dq_file/part0/ydb-library-yql-tests-sql-dq_file-part0 |60.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/public/lib/base/msgbus.cpp |60.6%| [LD] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/common/json2/libjson2_udf.so |60.6%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/tests/sql/dq_file/part4/ydb-library-yql-tests-sql-dq_file-part4 |60.6%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/tests/fq/yt/kqp_yt_file/part6/ydb-tests-fq-yt-kqp_yt_file-part6 |60.4%| [AR] {RESULT} $(B)/ydb/library/yaml_config/protos/libpy3yaml-config-protos.global.a |60.4%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/driver_lib/base_utils/format_util.cpp |60.4%| [LD] {RESULT} $(B)/yql/tools/yqlrun/yqlrun |60.4%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/tests/supp/ydb_supp |60.2%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/public/lib/deprecated/client/grpc_client.cpp |59.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/driver_lib/base_utils/format_util.cpp |59.7%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/tests/sql/hybrid_file/part2/ydb-library-yql-tests-sql-hybrid_file-part2 |59.7%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/public/lib/deprecated/client/msgbus_client.cpp |59.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/public/lib/deprecated/client/grpc_client.cpp |59.5%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/tests/sql/hybrid_file/part5/ydb-library-yql-tests-sql-hybrid_file-part5 |59.5%| COMPACTING CACHE 12.5GiB |59.5%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/tests/fq/yt/kqp_yt_file/part8/ydb-tests-fq-yt-kqp_yt_file-part8 |59.5%| [LD] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/common/protobuf/libprotobuf_udf.so |59.5%| [TS] {RESULT} ydb/tests/fq/common/flake8 |59.5%| [TS] {RESULT} ydb/library/yql/tests/sql/dq_file/part6/py2_flake8 |59.5%| [TS] {RESULT} ydb/library/yql/tests/sql/dq_file/part2/py2_flake8 |59.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/public/lib/deprecated/client/msgbus_client.cpp |59.5%| [TS] {RESULT} ydb/tests/fq/yt/kqp_yt_file/part8/flake8 |59.5%| [TS] {RESULT} ydb/library/benchmarks/runner/runner/flake8 |59.5%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/fq/libs/quota_manager/libfq-libs-quota_manager.a |59.5%| [TS] {RESULT} ydb/tests/fq/generic/streaming/flake8 |59.5%| [TS] {RESULT} ydb/tests/functional/sqs/large/flake8 |59.5%| [TS] {RESULT} ydb/tests/fq/yt/kqp_yt_file/part2/flake8 |59.5%| [TS] {RESULT} ydb/library/yaml_config/static_validator/ut/unittest |59.5%| [TS] {RESULT} ydb/core/fq/libs/signer/ut/unittest |59.5%| [TS] {RESULT} ydb/tests/datashard/async_replication/flake8 |59.5%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/tests/sql/hybrid_file/part4/ydb-library-yql-tests-sql-hybrid_file-part4 |59.5%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/tests/sql/dq_file/part10/ydb-library-yql-tests-sql-dq_file-part10 |59.5%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/fq/libs/compute/ydb/synchronization_service/synchronization_service.cpp |59.5%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/log_backend/log_backend.cpp |59.5%| [TS] {RESULT} ydb/core/fq/libs/metrics/ut/unittest |59.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/log_backend/log_backend.cpp |59.5%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/tests/sql/dq_file/part16/ydb-library-yql-tests-sql-dq_file-part16 |59.5%| [TS] {RESULT} ydb/tests/functional/minidumps/flake8 |59.5%| [TS] {RESULT} ydb/library/yql/providers/generic/connector/tests/datasource/clickhouse/flake8 |59.5%| [TS] {RESULT} ydb/library/benchmarks/runner/result_compare/flake8 |59.5%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/tests/sql/dq_file/part17/ydb-library-yql-tests-sql-dq_file-part17 |59.5%| [TS] {RESULT} ydb/library/yaml_config/ut_transform/flake8 |59.5%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/fq/libs/compute/ydb/finalizer_actor.cpp |59.5%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/tests/sql/dq_file/part12/ydb-library-yql-tests-sql-dq_file-part12 |59.5%| [TS] {RESULT} ydb/library/yql/tests/sql/dq_file/part18/py2_flake8 |59.5%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/ymq/http/types.cpp |59.5%| [TS] {RESULT} ydb/tests/olap/lib/flake8 |59.5%| [TS] {RESULT} ydb/tests/stress/kafka/tests/flake8 |59.5%| [TS] {RESULT} ydb/tests/functional/scheme_tests/flake8 |59.5%| [TS] {RESULT} ydb/tests/olap/s3_import/flake8 |59.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/ymq/http/types.cpp |59.5%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/tests/sql/dq_file/part19/ydb-library-yql-tests-sql-dq_file-part19 |59.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/fq/libs/compute/ydb/finalizer_actor.cpp |59.6%| [TS] {RESULT} ydb/tests/functional/limits/flake8 |59.6%| [TS] {RESULT} ydb/library/yaml_config/validator/ut/validator/unittest |59.6%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/tests/sql/dq_file/part5/ydb-library-yql-tests-sql-dq_file-part5 |59.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/fq/libs/compute/ydb/synchronization_service/synchronization_service.cpp |59.6%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/core/scheme/ut/ydb-core-scheme-ut |59.6%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/tests/stress/simple_queue/simple_queue |59.6%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/tests/sql/dq_file/part15/ydb-library-yql-tests-sql-dq_file-part15 |59.6%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/tests/sql/hybrid_file/part8/ydb-library-yql-tests-sql-hybrid_file-part8 |59.6%| [TS] {RESULT} ydb/tests/datashard/dml/flake8 |59.6%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/kqp/runtime/scheduler/kqp_schedulable_task.cpp |59.6%| [TS] {RESULT} ydb/tests/datashard/split_merge/flake8 |59.6%| [TS] {RESULT} ydb/library/yql/providers/generic/connector/tests/datasource/mysql/flake8 |59.6%| [TS] {RESULT} ydb/library/yql/tests/sql/dq_file/part14/py2_flake8 |59.6%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/tests/sql/dq_file/part18/ydb-library-yql-tests-sql-dq_file-part18 |59.6%| [TS] {RESULT} ydb/tests/sql/large/flake8 |59.6%| [TS] {RESULT} ydb/tests/functional/api/flake8 |59.6%| [TS] {RESULT} ydb/tests/functional/tpc/large/flake8 |59.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/runtime/scheduler/kqp_schedulable_task.cpp |59.6%| [TS] {RESULT} ydb/tests/fq/yt/kqp_yt_file/part17/flake8 |59.6%| [TS] {RESULT} ydb/tests/functional/wardens/flake8 |59.6%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/tests/sql/hybrid_file/part6/ydb-library-yql-tests-sql-hybrid_file-part6 |59.6%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/tests/sql/hybrid_file/part1/ydb-library-yql-tests-sql-hybrid_file-part1 |59.6%| [TS] {RESULT} ydb/tests/compatibility/s3_backups/flake8 |59.6%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/fq/libs/compute/ydb/result_writer_actor.cpp |59.6%| [TS] {RESULT} ydb/tests/tools/ydb_serializable/flake8 |59.6%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/tests/sql/dq_file/part14/ydb-library-yql-tests-sql-dq_file-part14 |59.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/fq/libs/quota_manager/quota_manager.cpp |59.6%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/tests/sql/hybrid_file/part7/ydb-library-yql-tests-sql-hybrid_file-part7 |59.6%| [TS] {RESULT} ydb/library/benchmarks/runner/result_convert/flake8 |59.6%| [TS] {RESULT} ydb/tests/stress/oltp_workload/flake8 |59.6%| [TS] {RESULT} ydb/tests/functional/encryption/flake8 |59.6%| [TS] {RESULT} ydb/tests/functional/tpc/medium/tpch/flake8 |59.6%| [TS] {RESULT} ydb/tests/fq/yt/kqp_yt_file/part14/flake8 |59.6%| [TS] {RESULT} ydb/library/yql/tests/sql/dq_file/part19/py2_flake8 |59.6%| [TS] {RESULT} ydb/library/yql/tests/sql/dq_file/part12/py2_flake8 |59.6%| [TS] {RESULT} ydb/tests/tools/pq_read/test/flake8 |59.6%| [TS] {RESULT} ydb/tools/tstool/flake8 |59.6%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/tests/sql/hybrid_file/part0/ydb-library-yql-tests-sql-hybrid_file-part0 |59.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/fq/libs/compute/ydb/result_writer_actor.cpp |59.6%| [TS] {RESULT} ydb/tests/functional/sqs/with_quotas/flake8 |59.6%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/tests/sql/dq_file/part8/ydb-library-yql-tests-sql-dq_file-part8 |59.6%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/tests/sql/hybrid_file/part9/ydb-library-yql-tests-sql-hybrid_file-part9 |59.6%| [TS] {RESULT} ydb/tests/stress/simple_queue/tests/flake8 |59.6%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/tests/sql/dq_file/part2/ydb-library-yql-tests-sql-dq_file-part2 |59.6%| [TS] {RESULT} ydb/library/yql/tests/sql/dq_file/part1/py2_flake8 |59.6%| [TS] {RESULT} ydb/tests/sql/lib/flake8 |59.6%| [TS] {RESULT} ydb/tests/olap/column_family/compression/flake8 |59.6%| [TS] {RESULT} ydb/tests/functional/statistics/flake8 |59.6%| [TS] {RESULT} ydb/tests/stress/kv/tests/flake8 |59.6%| [TS] {RESULT} ydb/library/yql/tests/sql/hybrid_file/part4/py2_flake8 |59.7%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/tests/sql/dq_file/part3/ydb-library-yql-tests-sql-dq_file-part3 |59.7%| [TS] {RESULT} ydb/tests/stress/olap_workload/flake8 |59.7%| [TS] {RESULT} ydb/tests/fq/yt/kqp_yt_file/part5/flake8 |59.7%| [TS] {RESULT} ydb/tests/postgres_integrations/go-libpq/flake8 |59.7%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/tests/sql/dq_file/part6/ydb-library-yql-tests-sql-dq_file-part6 |59.7%| [TS] {RESULT} ydb/core/blobstorage/crypto/ut/unittest |59.7%| [CC] {BAZEL_UPLOAD} $(B)/ydb/core/protos/tx_datashard.pb.cc |59.7%| [TS] {RESULT} ydb/tests/functional/query_cache/flake8 |59.7%| [TS] {RESULT} ydb/tests/functional/ttl/flake8 |59.7%| [TS] {RESULT} ydb/tests/functional/sqs/cloud/flake8 |59.7%| [TS] {RESULT} ydb/tests/stress/reconfig_state_storage_workload/tests/flake8 |59.7%| [TS] {RESULT} ydb/tests/stress/log/tests/flake8 |59.7%| [TS] {RESULT} ydb/library/yql/tests/sql/dq_file/part0/py2_flake8 |59.7%| [TS] {RESULT} ydb/tests/stress/show_create/view/tests/flake8 |59.7%| [TS] {RESULT} ydb/tests/fq/yt/kqp_yt_file/part3/flake8 |59.7%| [TS] {RESULT} ydb/tests/olap/load/flake8 |59.7%| [TS] {RESULT} ydb/library/yql/tests/sql/dq_file/part9/py2_flake8 |59.7%| [TS] {RESULT} ydb/tests/tools/kqprun/recipe/flake8 |59.7%| [TS] {RESULT} ydb/tests/stress/s3_backups/tests/flake8 |59.7%| [TS] {RESULT} ydb/tests/fq/yt/kqp_yt_file/part13/flake8 |59.7%| [TS] {RESULT} ydb/tests/stress/olap_workload/tests/flake8 |59.7%| [TS] {RESULT} ydb/tests/fq/yt/kqp_yt_file/part16/flake8 |59.7%| [TS] {RESULT} ydb/tests/olap/s3_import/large/import_test |59.7%| [TS] {RESULT} ydb/tools/cfg/bin/flake8 |59.7%| [TS] {RESULT} ydb/tests/datashard/vector_index/large/flake8 |59.7%| [TS] {RESULT} ydb/library/benchmarks/runner/flake8 |59.7%| [TS] {RESULT} ydb/library/yql/tests/sql/dq_file/part16/py2_flake8 |59.7%| [TS] {RESULT} ydb/tests/functional/sqs/common/flake8 |59.7%| [TS] {RESULT} ydb/tests/functional/autoconfig/flake8 |59.7%| [TS] {RESULT} ydb/core/viewer/tests/flake8 |59.7%| [TS] {RESULT} ydb/core/fq/libs/http_api_client/flake8 |59.7%| [TS] {RESULT} ydb/core/debug_tools/ut/unittest |59.7%| [TS] {RESULT} ydb/tests/olap/s3_import/large/flake8 |59.7%| [TS] {RESULT} ydb/tests/functional/ydb_cli/flake8 |59.7%| [TS] {RESULT} ydb/tests/stability/ydb/flake8 |59.7%| [TS] {RESULT} ydb/tests/stress/mixedpy/tests/flake8 |59.7%| [TS] {RESULT} ydb/library/yql/tests/sql/hybrid_file/part6/py2_flake8 |59.7%| [TS] {RESULT} ydb/tests/olap/common/flake8 |59.7%| [TS] {RESULT} ydb/library/yaml_config/tools/simple_json_diff/flake8 |59.7%| [TS] {RESULT} ydb/tests/stability/tool/flake8 |59.7%| [TS] {RESULT} ydb/tests/datashard/select/flake8 |59.8%| [TS] {RESULT} ydb/tests/functional/script_execution/flake8 |59.8%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/fq/libs/compute/ydb/actors_factory.cpp |59.8%| [TS] {RESULT} ydb/tests/functional/bridge/flake8 |59.8%| [TS] {RESULT} ydb/tests/olap/oom/flake8 |59.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/fq/libs/compute/ydb/actors_factory.cpp |59.8%| [TS] {RESULT} ydb/library/yql/tests/sql/solomon/py2_flake8 |59.8%| [TS] {RESULT} ydb/tests/sql/flake8 |59.8%| [TS] {RESULT} ydb/tests/functional/tpc/medium/flake8 |59.8%| [TS] {RESULT} ydb/library/yql/tests/sql/dq_file/part4/py2_flake8 |59.8%| [TS] {RESULT} ydb/library/yql/tests/sql/hybrid_file/part0/py2_flake8 |59.8%| [TS] {RESULT} ydb/tests/olap/flake8 |59.8%| [TS] {RESULT} ydb/tests/fq/yt/kqp_yt_file/part1/flake8 |59.8%| [TS] {RESULT} ydb/tests/fq/streaming_optimize/flake8 |59.8%| [TS] {RESULT} ydb/tests/tools/nemesis/ut/flake8 |59.8%| [TS] {RESULT} ydb/tests/stress/oltp_workload/tests/flake8 |59.8%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/kqp/runtime/scheduler/kqp_compute_scheduler_service.cpp |59.8%| [TS] {RESULT} ydb/tests/functional/audit/flake8 |59.8%| [TS] {RESULT} ydb/public/tools/ydb_recipe/flake8 |59.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/runtime/scheduler/kqp_compute_scheduler_service.cpp |59.8%| [TS] {RESULT} ydb/tests/functional/kqp/plan2svg/import_test |59.8%| [TS] {RESULT} ydb/tests/fq/plans/flake8 |59.8%| [TS] {RESULT} ydb/tests/fq/s3/flake8 |59.8%| [TS] {RESULT} ydb/tests/fq/yds/flake8 |59.8%| [TS] {RESULT} ydb/library/yql/providers/generic/connector/tests/datasource/postgresql/flake8 |59.8%| [TS] {RESULT} ydb/library/yql/providers/generic/connector/tests/datasource/ydb/flake8 |59.8%| [TS] {RESULT} ydb/tests/stress/scheme_board/pile_promotion/tests/flake8 |59.8%| [TS] {RESULT} ydb/tests/functional/sqs/merge_split_common_table/std/flake8 |59.8%| [TS] {RESULT} ydb/public/tools/local_ydb/flake8 |59.8%| [TS] {RESULT} ydb/tests/fq/restarts/flake8 |59.8%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/tests/olap/load/ydb-tests-olap-load |59.8%| [TS] {RESULT} ydb/tests/fq/yt/kqp_yt_file/part11/flake8 |59.8%| [TS] {RESULT} ydb/tests/functional/hive/flake8 |59.8%| [TS] {RESULT} ydb/tests/datashard/copy_table/flake8 |59.8%| [TS] {RESULT} ydb/tests/fq/generic/analytics/black |59.8%| [TS] {RESULT} ydb/tests/functional/sqs/messaging/flake8 |59.8%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/tests/integration/sessions_pool/public-sdk-cpp-tests-integration-sessions_pool |59.8%| [TS] {RESULT} ydb/tests/fq/yt/kqp_yt_file/part12/flake8 |59.8%| [TS] {RESULT} ydb/library/yql/tests/sql/dq_file/part7/py2_flake8 |59.8%| [TS] {RESULT} ydb/tests/fq/http_api/flake8 |59.8%| [TS] {RESULT} ydb/library/yql/tests/sql/hybrid_file/part2/py2_flake8 |59.8%| [TS] {RESULT} ydb/library/yql/tests/sql/dq_file/part17/py2_flake8 |59.9%| [TS] {RESULT} ydb/core/tx/columnshard/tools/visualize_portions/import_test |59.9%| [TS] {RESULT} ydb/tests/fq/generic/analytics/flake8 |59.9%| [TS] {RESULT} ydb/tests/functional/suite_tests/flake8 |59.9%| [TS] {RESULT} ydb/tests/fq/streaming/flake8 |59.9%| [TS] {RESULT} ydb/tests/fq/generic/streaming/black |59.9%| [TS] {RESULT} ydb/tests/functional/rename/flake8 |59.9%| [TS] {RESULT} ydb/tests/functional/scheme_shard/flake8 |59.9%| [TS] {RESULT} ydb/tests/datashard/partitioning/flake8 |59.9%| [TS] {RESULT} ydb/tests/stress/topic_kafka/tests/flake8 |59.9%| [TS] {RESULT} ydb/library/yql/tests/sql/hybrid_file/part7/py2_flake8 |59.9%| [TS] {RESULT} ydb/tests/olap/docs/generator/flake8 |59.9%| [TS] {RESULT} ydb/tests/functional/clickbench/flake8 |59.9%| [TS] {RESULT} ydb/library/yql/providers/generic/connector/tests/datasource/oracle/flake8 |59.9%| [TS] {RESULT} ydb/tests/datashard/add_column/flake8 |59.9%| [TS] {RESULT} ydb/tests/functional/tenants/flake8 |59.9%| [TS] {RESULT} ydb/library/yql/tests/sql/dq_file/part5/py2_flake8 |59.9%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/tests/integration/sessions/ydb-public-sdk-cpp-tests-integration-sessions |59.9%| [TS] {RESULT} ydb/tests/tools/kqprun/tests/flake8 |59.9%| [TS] {RESULT} ydb/tests/functional/restarts/flake8 |59.9%| [TS] {RESULT} ydb/tests/fq/yt/kqp_yt_file/part7/flake8 |59.9%| [TS] {RESULT} ydb/tests/datashard/parametrized_queries/flake8 |59.9%| [TS] {RESULT} ydb/tests/functional/cms/flake8 |59.9%| [TS] {RESULT} ydb/tests/example/flake8 |59.9%| [TS] {RESULT} ydb/tests/olap/docs/generator/import_test |59.9%| [TS] {RESULT} ydb/tests/postgres_integrations/library/ut/flake8 |59.9%| [TS] {RESULT} ydb/tests/stress/cdc/tests/flake8 |59.9%| [TS] {RESULT} ydb/tests/functional/kqp/plan2svg/flake8 |59.9%| [TS] {RESULT} ydb/core/persqueue/public/partition_index_generator/ut/unittest |59.9%| [TS] {RESULT} ydb/tests/datashard/dump_restore/flake8 |59.9%| [CC] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/protos/config.pb.cc |59.9%| [TS] {RESULT} ydb/tests/olap/scenario/flake8 |59.9%| [TS] {RESULT} ydb/tests/fq/yt/kqp_yt_file/part0/flake8 |59.9%| [TS] {RESULT} ydb/tests/library/ut/flake8 |59.9%| [TS] {RESULT} ydb/tests/fq/yt/kqp_yt_file/part15/flake8 |59.9%| [TS] {RESULT} ydb/tests/stress/statistics_workload/flake8 |59.9%| [TS] {RESULT} ydb/library/yql/providers/generic/connector/tests/join/flake8 |59.9%| [TS] {RESULT} ydb/tests/stress/topic/tests/flake8 |59.9%| [TS] {RESULT} ydb/tests/functional/tpcc/flake8 |59.9%| [TS] {RESULT} ydb/tests/tools/kqprun/tests/import_test |59.9%| [TS] {RESULT} ydb/core/fq/libs/hmac/ut/unittest |59.9%| [TS] {RESULT} ydb/library/yql/tests/sql/dq_file/part8/py2_flake8 |59.9%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/tests/integration/topic/with_direct_read/topic_direct_read_it |59.9%| [TS] {RESULT} ydb/tests/functional/benchmarks_init/flake8 |60.0%| [TS] {RESULT} ydb/tests/library/compatibility/configs/comparator/flake8 |60.0%| [TS] {RESULT} ydb/tests/fq/yt/kqp_yt_file/part9/flake8 |60.0%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/tests/integration/topic/topic_it |60.0%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/core/scheme/ut_pg/ydb-core-scheme-ut_pg |60.0%| [TS] {RESULT} ydb/library/yql/tests/sql/dq_file/part3/py2_flake8 |60.0%| [TS] {RESULT} ydb/tests/datashard/ttl/flake8 |60.0%| [TS] {RESULT} ydb/tests/stress/node_broker/tests/flake8 |60.0%| [TS] {RESULT} ydb/tests/olap/delete/flake8 |60.0%| [TS] {RESULT} ydb/services/persqueue_cluster_discovery/cluster_ordering/ut/unittest |60.0%| [TS] {RESULT} ydb/tests/functional/serializable/flake8 |60.0%| [TS] {RESULT} ydb/library/yql/providers/generic/connector/tests/datasource/ms_sql_server/flake8 |60.0%| [TS] {RESULT} ydb/tests/functional/large_serializable/flake8 |60.0%| [TS] {RESULT} ydb/tests/fq/solomon/flake8 |60.0%| [TS] {RESULT} ydb/library/yql/tests/sql/dq_file/part10/py2_flake8 |60.0%| [TS] {RESULT} ydb/library/yql/tests/sql/dq_file/part11/py2_flake8 |60.0%| [TS] {RESULT} ydb/tests/olap/ttl_tiering/flake8 |60.0%| [TS] {RESULT} ydb/library/yql/tests/sql/hybrid_file/part1/py2_flake8 |60.0%| [TS] {RESULT} ydb/tests/functional/canonical/flake8 |60.0%| [TS] {RESULT} ydb/library/yql/tests/sql/hybrid_file/part8/py2_flake8 |60.0%| [TS] {RESULT} ydb/tests/functional/postgresql/flake8 |60.0%| [TS] {RESULT} ydb/library/benchmarks/runner/run_tests/flake8 |60.0%| [TS] {RESULT} ydb/tests/datashard/vector_index/medium/flake8 |60.0%| [TS] {RESULT} ydb/core/kqp/ut/federated_query/common/clang_format |60.0%| [TS] {RESULT} ydb/tests/stress/scheme_board/pile_promotion/flake8 |60.0%| [TS] {RESULT} ydb/tests/fq/streaming_optimize/import_test |60.0%| [TS] {RESULT} ydb/tests/stress/ctas/tests/flake8 |60.0%| [TS] {RESULT} ydb/library/yql/tests/sql/hybrid_file/part5/py2_flake8 |60.0%| [TS] {RESULT} ydb/tests/fq/yt/kqp_yt_file/part6/flake8 |60.0%| [TS] {RESULT} ydb/tests/functional/blobstorage/flake8 |60.0%| [TS] {RESULT} ydb/tests/fq/multi_plane/flake8 |60.0%| [TS] {RESULT} ydb/library/yql/tests/sql/hybrid_file/part10/py2_flake8 |60.0%| [TS] {RESULT} ydb/tests/functional/sqs/multinode/flake8 |60.0%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/tests/stress/log/workload_log |60.0%| [TS] {RESULT} ydb/core/tx/columnshard/tools/visualize_portions/flake8 |60.0%| [TS] {RESULT} ydb/library/yql/tests/sql/hybrid_file/part9/py2_flake8 |60.0%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/services/metadata/request/config.cpp |60.0%| [TS] {RESULT} ydb/tests/fq/yt/kqp_yt_import/flake8 |60.0%| [TS] {RESULT} ydb/library/benchmarks/runner/import_test |60.0%| [TS] {RESULT} ydb/tests/fq/yt/kqp_yt_file/part10/flake8 |60.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/services/metadata/request/config.cpp |60.0%| [TS] {RESULT} ydb/library/yql/tests/sql/dq_file/part15/py2_flake8 |60.0%| [TS] {RESULT} ydb/tests/compatibility/flake8 |60.1%| [TS] {RESULT} ydb/tests/tools/nemesis/driver/flake8 |60.1%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/tests/stress/kafka/kafka_streams_test |60.1%| [TS] {RESULT} ydb/tests/solomon/reading/flake8 |60.1%| [TS] {RESULT} ydb/tests/stress/s3_backups/flake8 |60.1%| [TS] {RESULT} ydb/tests/functional/security/flake8 |60.1%| [TS] {RESULT} ydb/tests/library/compatibility/binaries/downloader/flake8 |60.1%| [TS] {RESULT} ydb/tests/stress/transfer/tests/flake8 |60.1%| [TS] {RESULT} ydb/tests/olap/data_quotas/flake8 |60.1%| [TS] {RESULT} ydb/tests/datashard/s3/flake8 |60.1%| [TS] {RESULT} ydb/tests/fq/mem_alloc/flake8 |60.1%| [TS] {RESULT} ydb/tests/compatibility/olap/flake8 |60.1%| [TS] {RESULT} ydb/tests/tools/ydb_serializable/replay/flake8 |60.1%| [TS] {RESULT} ydb/public/tools/lib/cmds/ut/flake8 |60.1%| [TS] {RESULT} ydb/library/yql/tests/sql/dq_file/part13/py2_flake8 |60.1%| [TS] {RESULT} ydb/library/yql/tests/sql/hybrid_file/part3/py2_flake8 |60.1%| [TS] {RESULT} ydb/library/yaml_config/static_validator/ut/example_configs/unittest |60.2%| [TS] {RESULT} ydb/tests/functional/config/flake8 |60.2%| [TS] {RESULT} ydb/tests/fq/yt/kqp_yt_file/part18/flake8 |60.2%| [TS] {RESULT} ydb/tests/functional/serverless/flake8 |60.2%| [TS] {RESULT} ydb/tests/fq/yt/kqp_yt_file/part4/flake8 |60.2%| [CC] {BAZEL_UPLOAD} $(B)/ydb/core/protos/config.pb.cc |60.2%| [TS] {RESULT} ydb/tests/functional/sqs/merge_split_common_table/fifo/flake8 |60.2%| [TS] {RESULT} ydb/tests/fq/yt/kqp_yt_file/part19/flake8 |60.2%| [TS] {RESULT} ydb/apps/dstool/flake8 |60.2%| [TS] {RESULT} ydb/tests/datashard/secondary_index/flake8 |60.2%| [AR] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/fq/libs/compute/ydb/synchronization_service/libcompute-ydb-synchronization_service.a |60.2%| [AR] {RESULT} $(B)/ydb/core/fq/libs/compute/ydb/synchronization_service/libcompute-ydb-synchronization_service.a |60.2%| [AR] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/public/lib/deprecated/client/liblib-deprecated-client.a |60.2%| [AR] {RESULT} $(B)/ydb/public/lib/deprecated/client/liblib-deprecated-client.a |60.2%| [AR] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/driver_lib/base_utils/libbase_utils.a |60.3%| [AR] {RESULT} $(B)/ydb/core/driver_lib/base_utils/libbase_utils.a |60.3%| [AR] {BAZEL_UPLOAD} $(B)/ydb/public/lib/deprecated/client/liblib-deprecated-client.a |60.3%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/driver_lib/base_utils/libbase_utils.a |60.3%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/fq/libs/compute/ydb/synchronization_service/libcompute-ydb-synchronization_service.a |60.3%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/tests/stress/topic/workload_topic |60.3%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/ymq/http/xml.cpp |60.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/ymq/http/xml.cpp |60.3%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/ymq/actor/error.cpp |60.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/ymq/actor/error.cpp |60.3%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/viewer/json_handlers.cpp |60.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/viewer/json_handlers.cpp |60.3%| [CC] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/protos/msgbus.pb.cc |60.3%| [CC] {BAZEL_UPLOAD} $(B)/ydb/core/protos/msgbus.pb.cc |60.3%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/public/lib/deprecated/kicli/query.cpp |60.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/public/lib/deprecated/kicli/query.cpp |60.3%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/scheme_board/opaque_path_description.cpp |60.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/scheme_board/opaque_path_description.cpp |60.3%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/public/lib/deprecated/kicli/dynamic_node.cpp |60.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/public/lib/deprecated/kicli/dynamic_node.cpp |60.3%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/public/lib/deprecated/kicli/result.cpp |60.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/public/lib/deprecated/kicli/result.cpp |60.3%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/library/slide_limiter/usage/config.cpp |60.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/library/slide_limiter/usage/config.cpp |60.3%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/fq/libs/compute/ydb/ydb_run_actor.cpp |60.3%| [AR] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/library/slide_limiter/usage/liblibrary-slide_limiter-usage.a |60.3%| [AR] {RESULT} $(B)/ydb/library/slide_limiter/usage/liblibrary-slide_limiter-usage.a |60.3%| [AR] {BAZEL_UPLOAD} $(B)/ydb/library/slide_limiter/usage/liblibrary-slide_limiter-usage.a |60.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/fq/libs/compute/ydb/ydb_run_actor.cpp |60.3%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/driver_lib/cli_config_base/config_base.cpp |60.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/driver_lib/cli_config_base/config_base.cpp |60.3%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/conveyor_composite/service/process.cpp |60.3%| [AR] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/driver_lib/cli_config_base/libcore-driver_lib-cli_config_base.a |60.3%| [AR] {RESULT} $(B)/ydb/core/driver_lib/cli_config_base/libcore-driver_lib-cli_config_base.a |60.3%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/public/lib/deprecated/kicli/error.cpp |60.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/conveyor_composite/service/process.cpp |60.3%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/services/persqueue_v1/actors/partition_writer.cpp |60.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/public/lib/deprecated/kicli/error.cpp |60.4%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/driver_lib/cli_config_base/libcore-driver_lib-cli_config_base.a |60.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/services/persqueue_v1/actors/partition_writer.cpp |60.4%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/general_cache/service/service.cpp |60.4%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/public/lib/deprecated/kicli/configurator.cpp |60.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/general_cache/service/service.cpp |60.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/public/lib/deprecated/kicli/configurator.cpp |60.4%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/public/lib/deprecated/kicli/schema.cpp |60.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/public/lib/deprecated/kicli/schema.cpp |60.4%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/kafka_proxy/actors/kafka_produce_actor.cpp |60.4%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/http_proxy/discovery_actor.cpp |60.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/http_proxy/discovery_actor.cpp |60.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kafka_proxy/actors/kafka_produce_actor.cpp |60.4%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/limiter/grouped_memory/usage/config.cpp |60.4%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/scheme_board/events.cpp |60.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/limiter/grouped_memory/usage/config.cpp |60.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/scheme_board/events.cpp |60.4%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/blobstorage/vdisk/huge/blobstorage_hullhugedefs.cpp |60.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/vdisk/huge/blobstorage_hullhugedefs.cpp |60.4%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/conveyor_composite/service/events.cpp |60.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/conveyor_composite/service/events.cpp |60.4%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/blobstorage/pdisk/blobstorage_pdisk_internal_interface.cpp |60.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/pdisk/blobstorage_pdisk_internal_interface.cpp |60.4%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/persqueue/pqtablet/blob/blob.cpp |60.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/persqueue/pqtablet/blob/blob.cpp |60.4%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/public/lib/deprecated/kicli/kikimr.cpp |60.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/public/lib/deprecated/kicli/kikimr.cpp |60.4%| [AR] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/public/lib/deprecated/kicli/liblib-deprecated-kicli.a |60.4%| [AR] {RESULT} $(B)/ydb/public/lib/deprecated/kicli/liblib-deprecated-kicli.a |60.4%| [AR] {BAZEL_UPLOAD} $(B)/ydb/public/lib/deprecated/kicli/liblib-deprecated-kicli.a |60.4%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/security/ldap_auth_provider/ldap_auth_provider_linux.cpp |60.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/security/ldap_auth_provider/ldap_auth_provider_linux.cpp |60.4%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/kqp/common/compilation/result.cpp |60.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/common/compilation/result.cpp |60.4%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tablet_flat/flat_executor_backup.cpp |60.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tablet_flat/flat_executor_backup.cpp |60.4%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/limiter/grouped_memory/usage/service.cpp |60.5%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/blobstorage/incrhuge/incrhuge_keeper_write.cpp |60.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/limiter/grouped_memory/usage/service.cpp |60.5%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/cms/console/configs_config.cpp |60.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/incrhuge/incrhuge_keeper_write.cpp |60.5%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/kqp/executer_actor/kqp_locks_helper.cpp |60.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/cms/console/configs_config.cpp |60.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/executer_actor/kqp_locks_helper.cpp |60.5%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/cms/console/util.cpp |60.5%| [AR] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/tx/limiter/grouped_memory/usage/liblimiter-grouped_memory-usage.a |60.5%| [AR] {RESULT} $(B)/ydb/core/tx/limiter/grouped_memory/usage/liblimiter-grouped_memory-usage.a |60.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/cms/console/util.cpp |60.5%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/tx/limiter/grouped_memory/usage/liblimiter-grouped_memory-usage.a |60.5%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/cms/console/modifications_validator.cpp |60.5%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/scheme_board/helpers.cpp |60.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/cms/console/modifications_validator.cpp |60.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/scheme_board/helpers.cpp |60.5%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/http_proxy/http_service.cpp |60.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/http_proxy/http_service.cpp |60.5%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/cms/console/util/config_index.cpp |60.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/cms/console/util/config_index.cpp |60.5%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/blobstorage/vdisk/huge/blobstorage_hullhugeheap.cpp |60.5%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/config/init/init_noop.cpp |60.5%| [AR] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/cms/console/util/libcms-console-util.a |60.5%| [AR] {RESULT} $(B)/ydb/core/cms/console/util/libcms-console-util.a |60.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/vdisk/huge/blobstorage_hullhugeheap.cpp |60.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/config/init/init_noop.cpp |60.5%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/cms/console/util/libcms-console-util.a |60.5%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/cms/console/http.cpp |60.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/cms/console/http.cpp |60.5%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/ymq/base/run_query.cpp |60.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/ymq/base/run_query.cpp |60.5%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/services/persqueue_v1/actors/partition_writer_cache_actor.cpp |60.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/services/persqueue_v1/actors/partition_writer_cache_actor.cpp |60.5%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/cms/console/validators/validator.cpp |60.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/cms/console/validators/validator.cpp |60.5%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/cms/console/validators/core_validators.cpp |60.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/cms/console/validators/core_validators.cpp |60.6%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/cms/console/validators/validator_bootstrap.cpp |60.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/cms/console/validators/validator_bootstrap.cpp |60.6%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/cms/console/validators/registry.cpp |60.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/cms/console/validators/registry.cpp |60.6%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/general_cache/usage/service.cpp |60.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/general_cache/usage/service.cpp |60.6%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/cms/console/console_configuration_info_collector.cpp |60.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/cms/console/console_configuration_info_collector.cpp |60.6%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/cms/console/log_settings_configurator.cpp |60.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/cms/console/log_settings_configurator.cpp |60.6%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/general_cache/service/manager.cpp |60.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/general_cache/service/manager.cpp |60.6%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/general_cache/usage/config.cpp |60.6%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/blobstorage/vdisk/hullop/hullop_entryserialize.cpp |60.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/vdisk/hullop/hullop_entryserialize.cpp |60.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/general_cache/usage/config.cpp |60.6%| [AR] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/tx/general_cache/usage/libtx-general_cache-usage.a |60.6%| [AR] {RESULT} $(B)/ydb/core/tx/general_cache/usage/libtx-general_cache-usage.a |60.6%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/tx/general_cache/usage/libtx-general_cache-usage.a |60.6%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/ymq/queues/std/queries.cpp |60.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/ymq/queues/std/queries.cpp |60.6%| [AR] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/ymq/queues/std/libymq-queues-std.a |60.6%| [AR] {RESULT} $(B)/ydb/core/ymq/queues/std/libymq-queues-std.a |60.6%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/ymq/queues/std/libymq-queues-std.a |60.6%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/kqp/provider/yql_kikimr_settings.cpp |60.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/provider/yql_kikimr_settings.cpp |60.6%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/kqp/executer_actor/kqp_planner_strategy.cpp |60.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/executer_actor/kqp_planner_strategy.cpp |60.6%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/audit/heartbeat_actor/heartbeat_actor.cpp |60.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/audit/heartbeat_actor/heartbeat_actor.cpp |60.6%| [AR] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/audit/heartbeat_actor/libcore-audit-heartbeat_actor.a |60.6%| [AR] {RESULT} $(B)/ydb/core/audit/heartbeat_actor/libcore-audit-heartbeat_actor.a |60.6%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/audit/heartbeat_actor/libcore-audit-heartbeat_actor.a |60.6%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/kqp/common/compilation/events.cpp |60.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/common/compilation/events.cpp |60.6%| [AR] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/kqp/common/compilation/libkqp-common-compilation.a |60.6%| [AR] {RESULT} $(B)/ydb/core/kqp/common/compilation/libkqp-common-compilation.a |60.7%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/ymq/queues/fifo/queries.cpp |60.7%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/kqp/common/compilation/libkqp-common-compilation.a |60.7%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/cms/console/jaeger_tracing_configurator.cpp |60.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/cms/console/jaeger_tracing_configurator.cpp |60.7%| [AR] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/ymq/queues/fifo/libymq-queues-fifo.a |60.7%| [AR] {RESULT} $(B)/ydb/core/ymq/queues/fifo/libymq-queues-fifo.a |60.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/ymq/queues/fifo/queries.cpp |60.7%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/ymq/queues/fifo/libymq-queues-fifo.a |60.7%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/cms/console/validators/validator_nameservice.cpp |60.7%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/security/ldap_auth_provider/ldap_auth_provider.cpp |60.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/cms/console/validators/validator_nameservice.cpp |60.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/security/ldap_auth_provider/ldap_auth_provider.cpp |60.7%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/driver_lib/cli_base/cli_kicli.cpp |60.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/driver_lib/cli_base/cli_kicli.cpp |60.7%| [AR] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/security/ldap_auth_provider/libcore-security-ldap_auth_provider.a |60.7%| [AR] {RESULT} $(B)/ydb/core/security/ldap_auth_provider/libcore-security-ldap_auth_provider.a |60.7%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/security/ldap_auth_provider/libcore-security-ldap_auth_provider.a |60.7%| [AR] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/cms/console/validators/libcms-console-validators.a |60.7%| [AR] {RESULT} $(B)/ydb/core/cms/console/validators/libcms-console-validators.a |60.7%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/cms/console/validators/libcms-console-validators.a |60.7%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/cms/console/configs_dispatcher_proxy.cpp |60.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/cms/console/configs_dispatcher_proxy.cpp |60.7%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/config/init/dummy.cpp |60.7%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/blobstorage/vdisk/synclog/blobstorage_synclog_private_events.cpp |60.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/config/init/dummy.cpp |60.7%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/scheme_board/replica.cpp |60.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/vdisk/synclog/blobstorage_synclog_private_events.cpp |60.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/scheme_board/replica.cpp |60.7%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/kqp/common/kqp_timeouts.cpp |60.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/common/kqp_timeouts.cpp |60.7%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/fq/libs/compute/ydb/control_plane/compute_database_control_plane_service.cpp |60.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/fq/libs/compute/ydb/control_plane/compute_database_control_plane_service.cpp |60.7%| [AR] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/fq/libs/compute/ydb/control_plane/libcompute-ydb-control_plane.a |60.7%| [AR] {RESULT} $(B)/ydb/core/fq/libs/compute/ydb/control_plane/libcompute-ydb-control_plane.a |60.8%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/fq/libs/compute/ydb/control_plane/libcompute-ydb-control_plane.a |60.7%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/blobstorage/incrhuge/incrhuge_keeper_read.cpp |60.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/incrhuge/incrhuge_keeper_read.cpp |60.7%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/config/init/init.cpp |60.8%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/ymq/actor/cleanup_queue_data.cpp |60.8%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/keyvalue/keyvalue_index_record.cpp |60.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/config/init/init.cpp |60.8%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/ymq/actor/cloud_events/cloud_events.cpp |60.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/keyvalue/keyvalue_index_record.cpp |60.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/ymq/actor/cleanup_queue_data.cpp |60.8%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/log_backend/log_backend_build.cpp |60.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/log_backend/log_backend_build.cpp |60.8%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/persqueue/common/key.cpp |60.8%| [AR] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/ymq/actor/cloud_events/libymq-actor-cloud_events.a |60.8%| [AR] {RESULT} $(B)/ydb/core/ymq/actor/cloud_events/libymq-actor-cloud_events.a |60.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/persqueue/common/key.cpp |60.8%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/ymq/actor/cloud_events/libymq-actor-cloud_events.a |60.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/ymq/actor/cloud_events/cloud_events.cpp |60.8%| [AR] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/persqueue/common/libcore-persqueue-common.a |60.8%| [AR] {RESULT} $(B)/ydb/core/persqueue/common/libcore-persqueue-common.a |60.8%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/kqp/common/shutdown/controller.cpp |60.8%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/persqueue/common/libcore-persqueue-common.a |60.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/common/shutdown/controller.cpp |60.8%| [AR] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/log_backend/libydb-core-log_backend.a |60.8%| [AR] {RESULT} $(B)/ydb/core/log_backend/libydb-core-log_backend.a |60.8%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/log_backend/libydb-core-log_backend.a |60.8%| [AR] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/kqp/common/shutdown/libkqp-common-shutdown.a |60.8%| [AR] {RESULT} $(B)/ydb/core/kqp/common/shutdown/libkqp-common-shutdown.a |60.8%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/driver_lib/cli_base/cli_cmds_discovery.cpp |60.8%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/kqp/common/shutdown/libkqp-common-shutdown.a |60.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/driver_lib/cli_base/cli_cmds_discovery.cpp |60.8%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/fq/libs/checkpointing/checkpoint_coordinator.cpp |60.8%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/blobstorage/vdisk/synclog/blobstorage_synclogdsk.cpp |60.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/vdisk/synclog/blobstorage_synclogdsk.cpp |60.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/fq/libs/checkpointing/checkpoint_coordinator.cpp |60.8%| [AR] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/fq/libs/checkpointing/libfq-libs-checkpointing.a |60.8%| [AR] {RESULT} $(B)/ydb/core/fq/libs/checkpointing/libfq-libs-checkpointing.a |60.9%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/fq/libs/checkpointing/libfq-libs-checkpointing.a |60.8%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/driver_lib/cli_base/cli_cmds_root.cpp |60.8%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/fq/libs/logs/log.cpp |60.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/driver_lib/cli_base/cli_cmds_root.cpp |60.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/fq/libs/logs/log.cpp |60.9%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/backup/impl/table_writer.cpp |60.9%| [AR] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/fq/libs/logs/libfq-libs-logs.a |60.9%| [AR] {RESULT} $(B)/ydb/core/fq/libs/logs/libfq-libs-logs.a |60.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/backup/impl/table_writer.cpp |60.9%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/limiter/grouped_memory/service/actor.cpp |60.9%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/fq/libs/logs/libfq-libs-logs.a |60.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/limiter/grouped_memory/service/actor.cpp |60.9%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/conveyor_composite/service/workers_pool.cpp |60.9%| [AR] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/tx/limiter/grouped_memory/service/liblimiter-grouped_memory-service.a |60.9%| [AR] {RESULT} $(B)/ydb/core/tx/limiter/grouped_memory/service/liblimiter-grouped_memory-service.a |60.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/conveyor_composite/service/workers_pool.cpp |60.9%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/tx/limiter/grouped_memory/service/liblimiter-grouped_memory-service.a |60.9%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/conveyor/usage/events.cpp |60.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/conveyor/usage/events.cpp |60.9%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/ymq/actor/monitoring.cpp |60.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/ymq/actor/monitoring.cpp |60.9%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/services/persqueue_v1/actors/persqueue_utils.cpp |60.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/services/persqueue_v1/actors/persqueue_utils.cpp |60.9%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/services/ext_index/common/service.cpp |60.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/services/ext_index/common/service.cpp |60.9%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/library/yaml_config/serialize_deserialize.cpp |60.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/library/yaml_config/serialize_deserialize.cpp |60.9%| [CC] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/protos/console_config.grpc.pb.cc |60.9%| [CC] {BAZEL_UPLOAD} $(B)/ydb/core/protos/console_config.grpc.pb.cc |60.9%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/fq/libs/ydb/ydb.cpp |60.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/fq/libs/ydb/ydb.cpp |60.9%| [CC] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/protos/serverless_proxy_config.pb.cc |60.9%| [CC] {BAZEL_UPLOAD} $(B)/ydb/core/protos/serverless_proxy_config.pb.cc |60.9%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/public_http/http_req.cpp |60.9%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/library/yaml_config/yaml_config_helpers.cpp |60.9%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/fq/libs/control_plane_proxy/config.cpp |60.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/library/yaml_config/yaml_config_helpers.cpp |60.9%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/fq/libs/compute/ydb/ydb_connector_actor.cpp |60.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/public_http/http_req.cpp |61.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/fq/libs/control_plane_proxy/config.cpp |61.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/fq/libs/compute/ydb/ydb_connector_actor.cpp |61.0%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/fq/libs/rate_limiter/control_plane_service/rate_limiter_control_plane_service.cpp |61.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/fq/libs/rate_limiter/control_plane_service/rate_limiter_control_plane_service.cpp |61.0%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/conveyor/usage/abstract.cpp |61.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/conveyor/usage/abstract.cpp |61.0%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/persqueue/public/write_id.cpp |61.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/persqueue/public/write_id.cpp |61.0%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/fq/libs/control_plane_storage/config.cpp |61.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/fq/libs/control_plane_storage/config.cpp |61.0%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/library/yaml_config/console_dumper.cpp |61.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/library/yaml_config/console_dumper.cpp |61.0%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/persqueue/pqtablet/blob/blob_serialization.cpp |61.0%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/conveyor_composite/usage/events.cpp |61.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/conveyor_composite/usage/events.cpp |61.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/persqueue/pqtablet/blob/blob_serialization.cpp |61.0%| [AR] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/persqueue/pqtablet/blob/libpersqueue-pqtablet-blob.a |61.0%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/fq/libs/rate_limiter/control_plane_service/update_limit_actor.cpp |61.0%| [AR] {RESULT} $(B)/ydb/core/persqueue/pqtablet/blob/libpersqueue-pqtablet-blob.a |61.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/fq/libs/rate_limiter/control_plane_service/update_limit_actor.cpp |61.0%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/persqueue/pqtablet/blob/libpersqueue-pqtablet-blob.a |61.0%| [AR] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/fq/libs/rate_limiter/control_plane_service/liblibs-rate_limiter-control_plane_service.a |61.0%| [AR] {RESULT} $(B)/ydb/core/fq/libs/rate_limiter/control_plane_service/liblibs-rate_limiter-control_plane_service.a |61.0%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/driver_lib/cli_base/cli_cmds_db.cpp |61.0%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/fq/libs/rate_limiter/control_plane_service/liblibs-rate_limiter-control_plane_service.a |61.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/driver_lib/cli_base/cli_cmds_db.cpp |61.0%| [CC] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/protos/serverless_proxy_config.grpc.pb.cc |61.0%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/blobstorage/incrhuge/incrhuge_keeper_recovery_read_log.cpp |61.0%| [CC] {BAZEL_UPLOAD} $(B)/ydb/core/protos/serverless_proxy_config.grpc.pb.cc |61.0%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/blobstorage/incrhuge/incrhuge_keeper_delete.cpp |61.0%| [AR] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/driver_lib/cli_base/libcli_base.a |61.0%| [AR] {RESULT} $(B)/ydb/core/driver_lib/cli_base/libcli_base.a |61.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/incrhuge/incrhuge_keeper_recovery_read_log.cpp |61.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/incrhuge/incrhuge_keeper_delete.cpp |61.0%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/driver_lib/cli_base/libcli_base.a |61.0%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/conveyor_composite/usage/config.cpp |61.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/conveyor_composite/usage/config.cpp |61.1%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tablet/tablet_req_writelog.cpp |61.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tablet/tablet_req_writelog.cpp |61.1%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/ymq/base/queue_attributes.cpp |61.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/ymq/base/queue_attributes.cpp |61.1%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/kqp/rm_service/kqp_resource_estimation.cpp |61.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/rm_service/kqp_resource_estimation.cpp |61.1%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/conveyor_composite/service/service.cpp |61.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/conveyor_composite/service/service.cpp |61.1%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/conveyor_composite/service/worker.cpp |61.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/conveyor_composite/service/worker.cpp |61.1%| [CC] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/protos/console.pb.cc |61.1%| [CC] {BAZEL_UPLOAD} $(B)/ydb/core/protos/console.pb.cc |61.1%| [CC] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/protos/blobstorage_distributed_config.grpc.pb.cc |61.1%| [CC] {BAZEL_UPLOAD} $(B)/ydb/core/protos/blobstorage_distributed_config.grpc.pb.cc |61.1%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/blobstorage/incrhuge/incrhuge_keeper_common.cpp |61.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/incrhuge/incrhuge_keeper_common.cpp |61.1%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/fq/libs/compute/ydb/initializer_actor.cpp |61.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/fq/libs/compute/ydb/initializer_actor.cpp |61.1%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/conveyor/usage/service.cpp |61.1%| [AR] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/fq/libs/compute/ydb/liblibs-compute-ydb.a |61.1%| [AR] {RESULT} $(B)/ydb/core/fq/libs/compute/ydb/liblibs-compute-ydb.a |61.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/conveyor/usage/service.cpp |61.1%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/fq/libs/compute/ydb/liblibs-compute-ydb.a |61.1%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/conveyor_composite/usage/service.cpp |61.1%| [AR] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/tx/conveyor/usage/libtx-conveyor-usage.a |61.1%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/kqp/workload_service/common/events.cpp |61.1%| [AR] {RESULT} $(B)/ydb/core/tx/conveyor/usage/libtx-conveyor-usage.a |61.1%| [PK] {default-linux-x86_64, relwithdebinfo} $(B)/yql/essentials/tests/common/test_framework/udfs_deps/{common-test_framework-udfs_deps.final.pkg.fake ... yql/essentials/udfs/common/hyperscan/libhyperscan_udf.so} |61.1%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/conveyor_composite/usage/common.cpp |61.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/conveyor_composite/usage/service.cpp |61.1%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/tx/conveyor/usage/libtx-conveyor-usage.a |61.1%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/config/validation/validators.cpp |61.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/config/validation/validators.cpp |61.1%| [AR] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/kqp/workload_service/common/libkqp-workload_service-common.a |61.1%| [AR] {RESULT} $(B)/ydb/core/kqp/workload_service/common/libkqp-workload_service-common.a |61.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/workload_service/common/events.cpp |61.1%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/kqp/workload_service/common/libkqp-workload_service-common.a |61.2%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/general_cache/service/counters.cpp |61.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/conveyor_composite/usage/common.cpp |61.2%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/base/statestorage_event_filter.cpp |61.2%| [CC] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/protos/blobstorage_vdisk_internal.grpc.pb.cc |61.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/base/statestorage_event_filter.cpp |61.2%| [CC] {BAZEL_UPLOAD} $(B)/ydb/core/protos/blobstorage_vdisk_internal.grpc.pb.cc |61.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/general_cache/service/counters.cpp |61.2%| [AR] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/tx/conveyor_composite/usage/libtx-conveyor_composite-usage.a |61.2%| [AR] {RESULT} $(B)/ydb/core/tx/conveyor_composite/usage/libtx-conveyor_composite-usage.a |61.2%| [AR] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/tx/general_cache/service/libtx-general_cache-service.a |61.2%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/tx/conveyor_composite/usage/libtx-conveyor_composite-usage.a |61.2%| [AR] {RESULT} $(B)/ydb/core/tx/general_cache/service/libtx-general_cache-service.a |61.2%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/tx/general_cache/service/libtx-general_cache-service.a |61.2%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/config/validation/column_shard_config_validator.cpp |61.2%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/config/validation/auth_config_validator.cpp |61.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/config/validation/auth_config_validator.cpp |61.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/config/validation/column_shard_config_validator.cpp |61.2%| [AR] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/config/validation/libcore-config-validation.a |61.2%| [AR] {RESULT} $(B)/ydb/core/config/validation/libcore-config-validation.a |61.2%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/config/validation/libcore-config-validation.a |61.2%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/library/yaml_config/yaml_config_parser.cpp |61.2%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/kqp/common/simple/settings.cpp |61.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/library/yaml_config/yaml_config_parser.cpp |61.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/common/simple/settings.cpp |61.2%| [AR] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/kqp/common/simple/libkqp-common-simple.a |61.2%| [AR] {RESULT} $(B)/ydb/core/kqp/common/simple/libkqp-common-simple.a |61.2%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/kqp/common/simple/libkqp-common-simple.a |61.2%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/tx_proxy/upload_rows_counters.cpp |61.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/tx_proxy/upload_rows_counters.cpp |61.2%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/kqp/common/kqp_lwtrace_probes.cpp |61.2%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/fq/libs/control_plane_storage/internal/utils.cpp |61.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/common/kqp_lwtrace_probes.cpp |61.2%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/blobstorage/incrhuge/incrhuge_keeper_alloc.cpp |61.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/incrhuge/incrhuge_keeper_alloc.cpp |61.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/fq/libs/control_plane_storage/internal/utils.cpp |61.2%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/conveyor_composite/service/category.cpp |61.2%| [CC] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/protos/console.grpc.pb.cc |61.3%| [CC] {BAZEL_UPLOAD} $(B)/ydb/core/protos/console.grpc.pb.cc |61.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/conveyor_composite/service/category.cpp |61.3%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/apps/version/version_definition.cpp |61.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/apps/version/version_definition.cpp |61.3%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/fq/libs/row_dispatcher/format_handler/parsers/json_parser.cpp |61.3%| [AR] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/apps/version/libversion_definition.a |61.3%| [AR] {RESULT} $(B)/ydb/apps/version/libversion_definition.a |61.3%| [AR] {BAZEL_UPLOAD} $(B)/ydb/apps/version/libversion_definition.a |61.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/fq/libs/row_dispatcher/format_handler/parsers/json_parser.cpp |61.3%| [AR] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/fq/libs/row_dispatcher/format_handler/parsers/librow_dispatcher-format_handler-parsers.a |61.3%| [AR] {RESULT} $(B)/ydb/core/fq/libs/row_dispatcher/format_handler/parsers/librow_dispatcher-format_handler-parsers.a |61.3%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/fq/libs/row_dispatcher/format_handler/parsers/librow_dispatcher-format_handler-parsers.a |61.3%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/fq/libs/row_dispatcher/purecalc_compilation/compile_service.cpp |61.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/fq/libs/row_dispatcher/purecalc_compilation/compile_service.cpp |61.3%| [AR] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/fq/libs/row_dispatcher/purecalc_compilation/liblibs-row_dispatcher-purecalc_compilation.a |61.3%| [AR] {RESULT} $(B)/ydb/core/fq/libs/row_dispatcher/purecalc_compilation/liblibs-row_dispatcher-purecalc_compilation.a |61.3%| [CC] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/protos/kqp.grpc.pb.cc |61.3%| [CC] {BAZEL_UPLOAD} $(B)/ydb/core/protos/kqp.grpc.pb.cc |61.3%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/fq/libs/row_dispatcher/purecalc_compilation/liblibs-row_dispatcher-purecalc_compilation.a |61.3%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/fq/libs/control_plane_config/control_plane_config.cpp |61.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/fq/libs/control_plane_config/control_plane_config.cpp |61.3%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/jaeger_tracing/sampling_throttling_configurator.cpp |61.3%| [AR] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/fq/libs/control_plane_config/libfq-libs-control_plane_config.a |61.3%| [AR] {RESULT} $(B)/ydb/core/fq/libs/control_plane_config/libfq-libs-control_plane_config.a |61.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/jaeger_tracing/sampling_throttling_configurator.cpp |61.3%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/fq/libs/control_plane_config/libfq-libs-control_plane_config.a |61.3%| [AR] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/jaeger_tracing/libydb-core-jaeger_tracing.a |61.3%| [AR] {RESULT} $(B)/ydb/core/jaeger_tracing/libydb-core-jaeger_tracing.a |61.3%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/ymq/base/events_writer.cpp |61.3%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/blobstorage/pdisk/mock/pdisk_mock.cpp |61.3%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/jaeger_tracing/libydb-core-jaeger_tracing.a |61.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/ymq/base/events_writer.cpp |61.3%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/columnshard/common/path_id.cpp |61.3%| [CC] {tool} $(B)/ydb/core/protos/console.grpc.pb.cc |61.3%| [AR] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/blobstorage/pdisk/mock/libblobstorage-pdisk-mock.a |61.3%| [AR] {RESULT} $(B)/ydb/core/blobstorage/pdisk/mock/libblobstorage-pdisk-mock.a |61.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/common/path_id.cpp |61.4%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/blobstorage/pdisk/mock/libblobstorage-pdisk-mock.a |61.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/pdisk/mock/pdisk_mock.cpp |61.4%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/blobstorage/incrhuge/incrhuge_keeper_recovery.cpp |61.4%| [AR] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/tx/columnshard/common/libtx-columnshard-common.a |61.4%| [AR] {RESULT} $(B)/ydb/core/tx/columnshard/common/libtx-columnshard-common.a |61.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/incrhuge/incrhuge_keeper_recovery.cpp |61.4%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/tx/columnshard/common/libtx-columnshard-common.a |61.4%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/load_test/archive.cpp |61.4%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/conveyor_composite/service/manager.cpp |61.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/load_test/archive.cpp |61.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/conveyor_composite/service/manager.cpp |61.4%| [CC] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/protos/console_config.pb.cc |61.4%| [CC] {BAZEL_UPLOAD} $(B)/ydb/core/protos/console_config.pb.cc |61.4%| [AR] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/blobstorage/incrhuge/libcore-blobstorage-incrhuge.a |61.4%| [AR] {RESULT} $(B)/ydb/core/blobstorage/incrhuge/libcore-blobstorage-incrhuge.a |61.4%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/blobstorage/incrhuge/libcore-blobstorage-incrhuge.a |61.4%| [CC] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/protos/tx_datashard.grpc.pb.cc |61.4%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/schemeshard/schemeshard_tx_infly.cpp |61.4%| [CC] {BAZEL_UPLOAD} $(B)/ydb/core/protos/tx_datashard.grpc.pb.cc |61.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard_tx_infly.cpp |61.4%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/kafka_proxy/actors/txn_actor_response_builder.cpp |61.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kafka_proxy/actors/txn_actor_response_builder.cpp |61.4%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/fq/libs/ydb/schema.cpp |61.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/fq/libs/ydb/schema.cpp |61.4%| [UN] {default-linux-x86_64, relwithdebinfo} $(B)/library/recipes/docker_compose/bin/docker-compose |61.4%| [CC] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/protos/blobstorage_vdisk_internal.pb.cc |61.4%| [CC] {BAZEL_UPLOAD} $(B)/ydb/core/protos/blobstorage_vdisk_internal.pb.cc |61.4%| [CC] {tool} $(B)/ydb/core/protos/serverless_proxy_config.grpc.pb.cc |61.4%| [CC] {tool} $(B)/ydb/core/protos/console.pb.cc |61.4%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/library/slide_limiter/service/service.cpp |61.4%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/formats/arrow/arrow_helpers.cpp |61.4%| [AR] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/library/slide_limiter/service/liblibrary-slide_limiter-service.a |61.4%| [AR] {RESULT} $(B)/ydb/library/slide_limiter/service/liblibrary-slide_limiter-service.a |61.4%| [AR] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/formats/arrow/libcore-formats-arrow.a |61.4%| [AR] {RESULT} $(B)/ydb/core/formats/arrow/libcore-formats-arrow.a |61.4%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/base/statestorage_monitoring.cpp |61.5%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/driver_lib/run/config_parser.cpp |61.5%| [CC] {tool} $(B)/ydb/core/protos/kqp.grpc.pb.cc |61.5%| [AR] {BAZEL_UPLOAD} $(B)/ydb/library/slide_limiter/service/liblibrary-slide_limiter-service.a |61.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/driver_lib/run/config_parser.cpp |61.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/library/slide_limiter/service/service.cpp |61.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/base/statestorage_monitoring.cpp |61.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/formats/arrow/arrow_helpers.cpp |61.5%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/formats/arrow/libcore-formats-arrow.a |61.5%| [CC] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/protos/blobstorage_distributed_config.pb.cc |61.5%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tablet/private/labeled_db_counters.cpp |61.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tablet/private/labeled_db_counters.cpp |61.5%| [CC] {BAZEL_UPLOAD} $(B)/ydb/core/protos/blobstorage_distributed_config.pb.cc |61.5%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/conveyor_composite/service/scope.cpp |61.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/conveyor_composite/service/scope.cpp |61.5%| [AR] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/tx/conveyor_composite/service/libtx-conveyor_composite-service.a |61.5%| [AR] {RESULT} $(B)/ydb/core/tx/conveyor_composite/service/libtx-conveyor_composite-service.a |61.5%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/tx/conveyor_composite/service/libtx-conveyor_composite-service.a |61.5%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/base/statestorage.cpp |61.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/base/statestorage.cpp |61.5%| [CC] {tool} $(B)/ydb/core/protos/blobstorage_vdisk_internal.pb.cc |61.5%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/config/validation/column_shard_config_validator_ut/column_shard_config_validator_ut.cpp |61.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/config/validation/column_shard_config_validator_ut/column_shard_config_validator_ut.cpp |61.5%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/ymq/base/ut/action_ut.cpp |61.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/ymq/base/ut/action_ut.cpp |61.5%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/ymq/base/ut/queue_attributes_ut.cpp |61.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/ymq/base/ut/queue_attributes_ut.cpp |61.5%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/fq/libs/checkpoint_storage/ydb_checkpoint_storage.cpp |61.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/fq/libs/checkpoint_storage/ydb_checkpoint_storage.cpp |61.5%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/load_test/aggregated_result.cpp |61.5%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/config/validation/auth_config_validator_ut/auth_config_validator_ut.cpp |61.5%| [CC] {tool} $(B)/ydb/core/protos/serverless_proxy_config.pb.cc |61.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/config/validation/auth_config_validator_ut/auth_config_validator_ut.cpp |61.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/load_test/aggregated_result.cpp |61.5%| [CC] {tool} $(B)/ydb/core/protos/grpc.pb.cc |61.5%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/formats/arrow/serializer/native.cpp |61.6%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/audit/audit_config/audit_config_ut.cpp |61.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/formats/arrow/serializer/native.cpp |61.6%| [AR] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/formats/arrow/serializer/libformats-arrow-serializer.global.a |61.6%| [AR] {RESULT} $(B)/ydb/core/formats/arrow/serializer/libformats-arrow-serializer.global.a |61.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/audit/audit_config/audit_config_ut.cpp |61.6%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/formats/arrow/serializer/libformats-arrow-serializer.global.a |61.6%| [CC] {tool} $(B)/ydb/core/protos/msgbus.grpc.pb.cc |61.6%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/viewer/json/json_ut.cpp |61.6%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/protos/out/out.cpp |61.6%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/formats/arrow/program/execution.cpp |61.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/viewer/json/json_ut.cpp |61.6%| [CC] {tool} $(B)/ydb/core/protos/tx_datashard.grpc.pb.cc |61.6%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/audit/audit_log_service_ut.cpp |61.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/formats/arrow/program/execution.cpp |61.6%| [AR] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/protos/out/libcore-protos-out.a |61.6%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/blobstorage/dsproxy/mock/dsproxy_mock.cpp |61.6%| [AR] {RESULT} $(B)/ydb/core/protos/out/libcore-protos-out.a |61.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/audit/audit_log_service_ut.cpp |61.6%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/protos/out/libcore-protos-out.a |61.6%| [CC] {tool} $(B)/ydb/core/protos/console_config.grpc.pb.cc |61.6%| [AR] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/blobstorage/dsproxy/mock/libblobstorage-dsproxy-mock.a |61.6%| [AR] {RESULT} $(B)/ydb/core/blobstorage/dsproxy/mock/libblobstorage-dsproxy-mock.a |61.6%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/driver_lib/cli_utils/cli_cmd_config.cpp |61.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/driver_lib/cli_utils/cli_cmd_config.cpp |61.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/protos/out/out.cpp |61.6%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/blobstorage/dsproxy/mock/libblobstorage-dsproxy-mock.a |61.6%| [CC] {tool} $(B)/ydb/core/protos/blobstorage_distributed_config.grpc.pb.cc |61.6%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/ymq/base/ut/secure_protobuf_printer_ut.cpp |61.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/ymq/base/ut/secure_protobuf_printer_ut.cpp |61.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/dsproxy/mock/dsproxy_mock.cpp |61.6%| [AR] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/formats/arrow/program/libformats-arrow-program.a |61.6%| [AR] {RESULT} $(B)/ydb/core/formats/arrow/program/libformats-arrow-program.a |61.6%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/formats/arrow/program/libformats-arrow-program.a |61.6%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/mon/ut_utils/ut_utils.cpp |61.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/mon/ut_utils/ut_utils.cpp |61.6%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/replication/ut_helpers/mock_service.cpp |61.7%| [AR] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/mon/ut_utils/libcore-mon-ut_utils.a |61.7%| [AR] {RESULT} $(B)/ydb/core/mon/ut_utils/libcore-mon-ut_utils.a |61.7%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/mon/ut_utils/libcore-mon-ut_utils.a |61.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/replication/ut_helpers/mock_service.cpp |61.7%| [AR] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/tx/replication/ut_helpers/libtx-replication-ut_helpers.a |61.7%| [AR] {RESULT} $(B)/ydb/core/tx/replication/ut_helpers/libtx-replication-ut_helpers.a |61.7%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/tx/replication/ut_helpers/libtx-replication-ut_helpers.a |61.7%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/ymq/base/ut/dlq_helpers_ut.cpp |61.7%| [CC] {tool} $(B)/ydb/core/protos/grpc.grpc.pb.cc |61.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/ymq/base/ut/dlq_helpers_ut.cpp |61.7%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/fq/libs/row_dispatcher/leader_election.cpp |61.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/fq/libs/row_dispatcher/leader_election.cpp |61.7%| [CC] {tool} $(B)/ydb/core/protos/blobstorage_distributed_config.pb.cc |61.7%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/load_test/interconnect_load.cpp |61.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/load_test/interconnect_load.cpp |61.7%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/fq/libs/test_connection/test_connection.cpp |61.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/fq/libs/test_connection/test_connection.cpp |61.7%| [AR] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/fq/libs/test_connection/libfq-libs-test_connection.a |61.7%| [AR] {RESULT} $(B)/ydb/core/fq/libs/test_connection/libfq-libs-test_connection.a |61.7%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/fq/libs/test_connection/libfq-libs-test_connection.a |61.7%| [CC] {tool} $(B)/ydb/core/protos/blobstorage_vdisk_internal.grpc.pb.cc |61.7%| [CC] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/protos/config.grpc.pb.cc |61.7%| [CC] {BAZEL_UPLOAD} $(B)/ydb/core/protos/config.grpc.pb.cc |61.7%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/fq/libs/row_dispatcher/topic_session.cpp |61.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/fq/libs/row_dispatcher/topic_session.cpp |61.7%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/library/yaml_config/deprecated/yaml_config_parser.cpp |61.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/library/yaml_config/deprecated/yaml_config_parser.cpp |61.7%| [AR] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/library/yaml_config/deprecated/liblibrary-yaml_config-deprecated.a |61.7%| [AR] {RESULT} $(B)/ydb/library/yaml_config/deprecated/liblibrary-yaml_config-deprecated.a |61.7%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/ymq/base/ut/counters_ut.cpp |61.7%| [AR] {BAZEL_UPLOAD} $(B)/ydb/library/yaml_config/deprecated/liblibrary-yaml_config-deprecated.a |61.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/ymq/base/ut/counters_ut.cpp |61.7%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/fq/libs/checkpoint_storage/ydb_state_storage.cpp |61.7%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/fq/libs/row_dispatcher/coordinator.cpp |61.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/fq/libs/checkpoint_storage/ydb_state_storage.cpp |61.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/fq/libs/row_dispatcher/coordinator.cpp |61.8%| [AR] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/fq/libs/checkpoint_storage/libfq-libs-checkpoint_storage.a |61.8%| [AR] {RESULT} $(B)/ydb/core/fq/libs/checkpoint_storage/libfq-libs-checkpoint_storage.a |61.8%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/fq/libs/checkpoint_storage/libfq-libs-checkpoint_storage.a |61.8%| [CC] {tool} $(B)/ydb/core/protos/kqp.pb.cc |61.8%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/tests/library/compatibility/configs/dump/dumper/main.cpp |61.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/tests/library/compatibility/configs/dump/dumper/main.cpp |61.8%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/fq/libs/compute/common/utils.cpp |61.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/fq/libs/compute/common/utils.cpp |61.8%| [AR] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/fq/libs/compute/common/liblibs-compute-common.a |61.8%| [AR] {RESULT} $(B)/ydb/core/fq/libs/compute/common/liblibs-compute-common.a |61.8%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/fq/libs/compute/common/liblibs-compute-common.a |61.8%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/apps/ydb/ut/parse_command_line.cpp |61.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/apps/ydb/ut/parse_command_line.cpp |61.8%| [CC] {tool} $(S)/ydb/tests/library/compatibility/configs/dump/dumper/main.cpp |61.8%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/driver_lib/version/ut/version_ut.cpp |61.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/driver_lib/version/ut/version_ut.cpp |61.8%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/config/ut/main.cpp |61.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/config/ut/main.cpp |61.8%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/load_test/pdisk_log.cpp |61.8%| [PK] {default-linux-x86_64, relwithdebinfo} $(B)/library/recipes/docker_compose/bin/{recipes-docker_compose-bin.final.pkg.fake ... library/recipes/docker_compose/bin/docker-compose} |61.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/load_test/pdisk_log.cpp |61.8%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/mvp/core/core_ydbc.cpp |61.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/mvp/core/core_ydbc.cpp |61.8%| [UN] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/tests/functional/postgresql/psql/psql |61.8%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/fq/libs/row_dispatcher/row_dispatcher.cpp |61.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/fq/libs/row_dispatcher/row_dispatcher.cpp |61.8%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/load_test/pdisk_write.cpp |61.8%| [AR] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/fq/libs/row_dispatcher/libfq-libs-row_dispatcher.a |61.8%| [AR] {RESULT} $(B)/ydb/core/fq/libs/row_dispatcher/libfq-libs-row_dispatcher.a |61.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/load_test/pdisk_write.cpp |61.8%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/formats/arrow/ut/ut_dictionary.cpp |61.8%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/fq/libs/row_dispatcher/libfq-libs-row_dispatcher.a |61.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/formats/arrow/ut/ut_dictionary.cpp |61.8%| [CC] {tool} $(B)/ydb/core/protos/msgbus.pb.cc |61.8%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/blobstorage/vdisk/hullop/blobstorage_hullcompactdeferredqueue_ut.cpp |61.9%| [CC] {tool} $(B)/ydb/core/protos/console_config.pb.cc |61.9%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/blobstorage/incrhuge/ut/incrhuge_log_merger_ut.cpp |61.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/vdisk/hullop/blobstorage_hullcompactdeferredqueue_ut.cpp |61.9%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/cms/console/validators/validator_nameservice_ut.cpp |61.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/incrhuge/ut/incrhuge_log_merger_ut.cpp |61.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/cms/console/validators/validator_nameservice_ut.cpp |61.9%| [EN] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/kqp/compute_actor/kqp_compute_state.h_serialized.cpp |61.9%| [EN] {BAZEL_UPLOAD} $(B)/ydb/core/kqp/compute_actor/kqp_compute_state.h_serialized.cpp |61.9%| [PD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/library/yaml_config/protos/yaml-config-protos.{self.protodesc, protosrc} |61.9%| [PD] {RESULT} $(B)/ydb/library/yaml_config/protos/yaml-config-protos.{self.protodesc, protosrc} |61.9%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/cms/console/validators/validator_bootstrap_ut.cpp |61.9%| [PD] {BAZEL_UPLOAD} $(B)/ydb/library/yaml_config/protos/yaml-config-protos.{self.protodesc, protosrc} |61.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/cms/console/validators/validator_bootstrap_ut.cpp |61.9%| [EN] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/cms/node_checkers.h_serialized.cpp |61.9%| [EN] {BAZEL_UPLOAD} $(B)/ydb/core/cms/node_checkers.h_serialized.cpp |61.9%| [BN] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/tests/stability/tool/ydb_cli |61.9%| [BN] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/stability/tool/ydb_cli |61.9%| [EN] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/tx/tx_proxy/upload_rows_counters.h_serialized.cpp |61.9%| [EN] {BAZEL_UPLOAD} $(B)/ydb/core/tx/tx_proxy/upload_rows_counters.h_serialized.cpp |61.9%| [CC] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/protos/kqp.pb.cc |61.9%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/blobstorage/vdisk/huge/blobstorage_hullhugeheap_ut.cpp |61.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/vdisk/huge/blobstorage_hullhugeheap_ut.cpp |61.9%| [EN] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/fq/libs/ydb/ydb.h_serialized.cpp |61.9%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/load_test/yql_single_query.cpp |61.9%| [EN] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/tests/tools/kqprun/src/common.h_serialized.cpp |61.9%| [CC] {BAZEL_UPLOAD} $(B)/ydb/core/protos/kqp.pb.cc |61.9%| [CC] {tool} $(B)/library/cpp/build_info/sandbox.cpp |61.9%| [EN] {BAZEL_UPLOAD} $(B)/ydb/core/fq/libs/ydb/ydb.h_serialized.cpp |61.9%| [EN] {BAZEL_UPLOAD} $(B)/ydb/tests/tools/kqprun/src/common.h_serialized.cpp |61.9%| [CF] {tool} $(B)/library/cpp/build_info/build_info.cpp |61.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/load_test/yql_single_query.cpp |61.9%| [CC] {tool} $(S)/library/cpp/svnversion/svn_interface.c |61.9%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/limiter/grouped_memory/ut/ut_manager.cpp |61.9%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/load_test/keyvalue_write.cpp |62.0%| [CC] {tool} $(B)/library/cpp/build_info/build_info.cpp |62.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/limiter/grouped_memory/ut/ut_manager.cpp |62.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/load_test/keyvalue_write.cpp |62.0%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/control/lib/generated/codegen/main.cpp |62.0%| [PR] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/protos/e9ba3ee2f0ee1966e63998b143_raw.auxcpp |62.0%| [PR] {BAZEL_UPLOAD} $(B)/ydb/core/protos/e9ba3ee2f0ee1966e63998b143_raw.auxcpp |62.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/control/lib/generated/codegen/main.cpp |62.0%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/security/certificate_check/cert_check_ut.cpp |62.0%| [EN] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/config/init/init.h_serialized.cpp |62.0%| [EN] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/ymq/actor/metering.h_serialized.cpp |62.0%| [EN] {BAZEL_UPLOAD} $(B)/ydb/core/ymq/actor/metering.h_serialized.cpp |62.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/security/certificate_check/cert_check_ut.cpp |62.0%| [EN] {BAZEL_UPLOAD} $(B)/ydb/core/config/init/init.h_serialized.cpp |62.0%| [CC] {tool} $(S)/library/cpp/svnversion/svnversion.cpp |62.0%| [BI] {tool} $(B)/library/cpp/build_info/buildinfo_data.h |62.0%| [CC] {tool} $(S)/library/cpp/build_info/build_info_static.cpp |62.0%| [AR] {tool} $(B)/library/cpp/svnversion/liblibrary-cpp-svnversion.a |62.0%| [AR] {tool} $(B)/library/cpp/build_info/liblibrary-cpp-build_info.a |62.0%| [AR] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/protos/libydb-core-protos.a |62.0%| [AR] {RESULT} $(B)/ydb/core/protos/libydb-core-protos.a >> PersQueueCodecs::ToV1Codec [GOOD] |62.0%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/blobstorage/vdisk/synclog/blobstorage_synclogdsk_ut.cpp |62.0%| [CC] {tool} $(S)/ydb/core/control/lib/generated/codegen/main.cpp |62.0%| [CC] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/protos/e9ba3ee2f0ee1966e63998b143_raw.auxcpp |62.0%| [CC] {BAZEL_UPLOAD} $(B)/ydb/core/protos/e9ba3ee2f0ee1966e63998b143_raw.auxcpp |62.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/vdisk/synclog/blobstorage_synclogdsk_ut.cpp |62.0%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/mon/audit/audit_ut.cpp |62.0%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/core/persqueue/public/codecs/ut/unittest >> PersQueueCodecs::ToV1Codec [GOOD] |62.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/mon/audit/audit_ut.cpp >> PersQueueCodecs::FromV1Codec [GOOD] |62.0%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/core/persqueue/public/codecs/ut/unittest |62.0%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/core/persqueue/public/codecs/ut/unittest >> PersQueueCodecs::FromV1Codec [GOOD] |62.0%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/backup/impl/table_writer_ut.cpp |62.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/backup/impl/table_writer_ut.cpp >> ValidationTests::MapType [GOOD] |62.0%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/jaeger_tracing/ut/ydb-core-jaeger_tracing-ut |62.0%| [LD] {RESULT} $(B)/ydb/core/jaeger_tracing/ut/ydb-core-jaeger_tracing-ut |62.0%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/security/certificate_check/cert_utils_ut.cpp |62.0%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/base/generated/codegen/ydb-core-base-generated-codegen >> TErasureTypeTest::TestBlock23LossOfAllPossible3 >> TErasureTypeTest::TestStripe22LossOfAllPossible2 |62.0%| [LD] {RESULT} $(B)/ydb/core/base/generated/codegen/ydb-core-base-generated-codegen |62.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/security/certificate_check/cert_utils_ut.cpp |62.1%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/mvp/core/core_ydb.cpp |62.1%| [AR] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/protos/libpy3ydb-core-protos.global.a |62.1%| [AR] {RESULT} $(B)/ydb/core/protos/libpy3ydb-core-protos.global.a |62.1%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/core/persqueue/public/codecs/ut/unittest |62.1%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/core/config/tools/protobuf_plugin/ut/unittest >> ValidationTests::MapType [GOOD] |62.1%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/conveyor_composite/ut/ut_simple.cpp |62.1%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/tools/tsserver/tsserver |62.1%| [LD] {RESULT} $(B)/ydb/tools/tsserver/tsserver |62.1%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/core/scheme/ut_pg/unittest |62.1%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/protos/libpy3ydb-core-protos.global.a >> TErasureTypeTest::TestStripe22LossOfAllPossible2 [GOOD] |62.1%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/tx/schemeshard/generated/codegen/codegen |62.1%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/generated/codegen/codegen |62.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/conveyor_composite/ut/ut_simple.cpp |62.1%| [CC] {tool} $(B)/ydb/core/protos/config.grpc.pb.cc >> PgTest::DumpIntCells |62.1%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/base/statestorage_ut.cpp |62.1%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/persqueue/public/partition_key_range/ut/ydb-core-persqueue-public-partition_key_range-ut |62.1%| [LD] {RESULT} $(B)/ydb/core/persqueue/public/partition_key_range/ut/ydb-core-persqueue-public-partition_key_range-ut |62.1%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/viewer/json/ut/ydb-core-viewer-json-ut |62.1%| [LD] {RESULT} $(B)/ydb/core/viewer/json/ut/ydb-core-viewer-json-ut >> PgTest::DumpIntCells [GOOD] >> TErasureTypeTest::TestBlock23LossOfAllPossible3 [GOOD] |62.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/base/statestorage_ut.cpp |62.1%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/erasure/ut/unittest >> TErasureTypeTest::TestStripe22LossOfAllPossible2 [GOOD] |62.1%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/core/scheme/ut_pg/unittest |62.1%| [AR] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/mvp/core/libydb-mvp-core.a |62.1%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/blobstorage/ut_vdisk/lib/dataset.cpp |62.1%| [LD] {BAZEL_UPLOAD} $(B)/ydb/core/jaeger_tracing/ut/ydb-core-jaeger_tracing-ut |62.1%| [AR] {RESULT} $(B)/ydb/mvp/core/libydb-mvp-core.a |62.1%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/core/scheme/ut_pg/unittest |62.1%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/core/scheme/ut_pg/unittest >> PgTest::DumpIntCells [GOOD] |62.1%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/tests/library/compatibility/configs/dump/dumper/ydb-config-meta-dumper |62.1%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/config/ut/ydb-core-config-ut |62.1%| [LD] {RESULT} $(B)/ydb/tests/library/compatibility/configs/dump/dumper/ydb-config-meta-dumper |62.1%| [LD] {RESULT} $(B)/ydb/core/config/ut/ydb-core-config-ut |62.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/ut_vdisk/lib/dataset.cpp |62.1%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/driver_lib/version/ut/ydb-core-driver_lib-version-ut |62.1%| [LD] {RESULT} $(B)/ydb/core/driver_lib/version/ut/ydb-core-driver_lib-version-ut |62.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/mvp/core/core_ydb.cpp |62.1%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/control/lib/generated/codegen/ydb-core-control-generated-codegen |62.1%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/erasure/ut/unittest >> TErasureTypeTest::TestBlock23LossOfAllPossible3 [GOOD] |62.1%| [LD] {RESULT} $(B)/ydb/core/control/lib/generated/codegen/ydb-core-control-generated-codegen |62.1%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/audit/audit_config/ut/ydb-core-audit-audit_config-ut |62.1%| [LD] {RESULT} $(B)/ydb/core/audit/audit_config/ut/ydb-core-audit-audit_config-ut |62.1%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/core/scheme/ut_pg/unittest >> TErasureTypeTest::isSplittedDataEqualsToOldVerion [GOOD] |62.1%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/client/metadata/ut/ydb-core-client-metadata-ut |62.1%| [LD] {RESULT} $(B)/ydb/core/client/metadata/ut/ydb-core-client-metadata-ut |62.1%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/core/scheme/ut_pg/unittest >> ValidationTests::CanDispatchByTag [GOOD] |62.1%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/core/scheme/ut_pg/unittest >> ThrottlerControlTests::MultiThreaded2Threads200Ticks30Init7Step |62.1%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/mvp/meta/meta_versions.cpp |62.1%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/core/scheme/ut_pg/unittest >> ThrottlerControlTests::MultiThreaded5Threads150Ticks500Init15Step |62.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/mvp/meta/meta_versions.cpp |62.2%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/core/config/tools/protobuf_plugin/ut/unittest >> ValidationTests::CanDispatchByTag [GOOD] >> SamplingControlTests::EdgeCaseLower [GOOD] >> SamplingControlTests::EdgeCaseUpper [GOOD] |62.1%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/services/persqueue_v1/ut/kqp_mock.cpp |62.2%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/erasure/ut/unittest >> TErasureTypeTest::isSplittedDataEqualsToOldVerion [GOOD] |62.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/services/persqueue_v1/ut/kqp_mock.cpp >> ThrottlerControlTests::MultiThreaded10Threads100Ticks1000Init22Step >> ThrottlerControlTests::MultiThreaded2Threads200Ticks30Init7Step [GOOD] >> ThrottlerControlTests::MultiThreaded5Threads150Ticks500Init15Step [GOOD] >> ThrottlerControlTests::Simple [GOOD] |62.2%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/tests/tools/fqrun/src/common.cpp |62.2%| [AR] {BAZEL_UPLOAD} $(B)/ydb/mvp/core/libydb-mvp-core.a |62.2%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/blobstorage/incrhuge/ut/incrhuge_basic_ut.cpp |62.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/tests/tools/fqrun/src/common.cpp |62.2%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/jaeger_tracing/ut/unittest >> SamplingControlTests::EdgeCaseUpper [GOOD] >> SamplingControlTests::Simple [GOOD] >> ThrottlerControlTests::Overflow_1 [GOOD] |62.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/incrhuge/ut/incrhuge_basic_ut.cpp |62.2%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/jaeger_tracing/ut/unittest >> ThrottlerControlTests::MultiThreaded2Threads200Ticks30Init7Step [GOOD] >> ThrottlerControlTests::MultiThreaded10Threads100Ticks1000Init22Step [GOOD] >> TErasureTypeTest::TestSplitDiffBlock4Plus2SpecialCase1 [GOOD] >> ThrottlerControlTests::Overflow_2 [GOOD] |62.2%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/jaeger_tracing/ut/unittest >> SamplingControlTests::EdgeCaseLower [GOOD] |62.2%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/jaeger_tracing/ut/unittest >> ThrottlerControlTests::MultiThreaded5Threads150Ticks500Init15Step [GOOD] |62.2%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/jaeger_tracing/ut/unittest >> ThrottlerControlTests::Simple [GOOD] |62.2%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/tests/functional/restarts/ydb-tests-functional-restarts |62.2%| [LD] {RESULT} $(B)/ydb/tests/functional/restarts/ydb-tests-functional-restarts |62.2%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/jaeger_tracing/ut/unittest >> SamplingControlTests::Simple [GOOD] |62.2%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/tests/functional/api/ydb-tests-functional-api |62.2%| [LD] {RESULT} $(B)/ydb/tests/functional/api/ydb-tests-functional-api |62.2%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/jaeger_tracing/ut/unittest >> ThrottlerControlTests::MultiThreaded10Threads100Ticks1000Init22Step [GOOD] |62.2%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/jaeger_tracing/ut/unittest >> ThrottlerControlTests::Overflow_2 [GOOD] |62.2%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/erasure/ut/unittest >> TErasureTypeTest::TestSplitDiffBlock4Plus2SpecialCase1 [GOOD] |62.2%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/jaeger_tracing/ut/unittest >> ThrottlerControlTests::Overflow_1 [GOOD] >> TErasureTypeTest::TestStripe43LossOfAllPossible3 >> TErasureTypeTest::TestBlock33LossOfAllPossible3 >> TErasureTypeTest::TestBlock42PartialRestore3 >> Scheme::NullCell [GOOD] >> Scheme::NotEmptyCell [GOOD] >> TypesProto::DecimalNoTypeInfo [GOOD] >> TypesProto::Decimal35 [GOOD] |62.2%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/core/config/tools/protobuf_plugin/ut/unittest >> Scheme::CellVecTryParse [GOOD] >> Scheme::CompareOrder [GOOD] >> Scheme::CompareUuidCells [GOOD] >> Scheme::TSerializedCellMatrix [GOOD] >> Scheme::OwnedCellVecFromSerialized [GOOD] |62.2%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/blobstorage/vdisk/syncer/blobstorage_syncer_data_ut.cpp |62.2%| [LD] {BAZEL_UPLOAD} $(B)/ydb/core/persqueue/public/partition_key_range/ut/ydb-core-persqueue-public-partition_key_range-ut >> TErasureTypeTest::TestMirror3LossOfAllPossible3 >> ThrottlerControlTests::LongIdle [GOOD] |62.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/vdisk/syncer/blobstorage_syncer_data_ut.cpp |62.2%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/core/scheme/ut/unittest >> Scheme::NotEmptyCell [GOOD] |62.2%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/core/scheme/ut/unittest >> TypesProto::Decimal35 [GOOD] >> Scheme::EmptyCell [GOOD] >> Scheme::CompareWithNullSemantics [GOOD] >> Scheme::EmptyOwnedCellVec [GOOD] >> Scheme::NonEmptyOwnedCellVec [GOOD] >> TErasureTypeTest::TestMirror3LossOfAllPossible3 [GOOD] |62.2%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/core/config/tools/protobuf_plugin/ut/unittest |62.2%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/tests/datashard/parametrized_queries/ydb-tests-datashard-parametrized_queries |62.2%| [LD] {RESULT} $(B)/ydb/tests/datashard/parametrized_queries/ydb-tests-datashard-parametrized_queries >> ValidationTests::HasReservedPaths [GOOD] |62.2%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/apps/dstool/ydb-dstool |62.2%| [LD] {RESULT} $(B)/ydb/apps/dstool/ydb-dstool |62.2%| [LD] {BAZEL_UPLOAD} $(B)/ydb/tools/tsserver/tsserver >> ErasureBrandNew::Block42_restore |62.2%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/driver_lib/run/auto_config_initializer_ut.cpp |62.2%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/tests/functional/tenants/ydb-tests-functional-tenants |62.2%| [LD] {RESULT} $(B)/ydb/tests/functional/tenants/ydb-tests-functional-tenants |62.2%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/tests/functional/script_execution/ydb-tests-functional-script_execution |62.2%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/core/scheme/ut/unittest >> Scheme::CompareUuidCells [GOOD] >> ValidationTests::CanCopyTo [GOOD] >> ValidationTests::AdvancedCopyTo [GOOD] >> TErasureTypeTest::TestBlock22LossOfAllPossible2 |62.2%| [LD] {RESULT} $(B)/ydb/tests/functional/script_execution/ydb-tests-functional-script_execution |62.2%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/core/scheme/ut/unittest >> Scheme::CompareWithNullSemantics [GOOD] |62.2%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/core/scheme/ut/unittest >> Scheme::NonEmptyOwnedCellVec [GOOD] |62.2%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/core/scheme/ut/unittest >> Scheme::OwnedCellVecFromSerialized [GOOD] |62.2%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/jaeger_tracing/ut/unittest >> ThrottlerControlTests::LongIdle [GOOD] >> TErasureTypeTest::TestBlock42PartialRestore1 |62.2%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/tests/functional/audit/ydb-tests-functional-audit |62.2%| [LD] {RESULT} $(B)/ydb/tests/functional/audit/ydb-tests-functional-audit |62.2%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/erasure/ut/unittest >> TErasureTypeTest::TestMirror3LossOfAllPossible3 [GOOD] |62.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/driver_lib/run/auto_config_initializer_ut.cpp |62.2%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/core/config/tools/protobuf_plugin/ut/unittest >> ValidationTests::CanCopyTo [GOOD] |62.3%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/cms/console/validators/registry_ut.cpp |62.3%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/library/yaml_config/console_dumper_ut.cpp |62.3%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/core/config/tools/protobuf_plugin/ut/unittest >> ValidationTests::HasReservedPaths [GOOD] >> TErasureTypeTest::TestBlock22LossOfAllPossible2 [GOOD] |62.3%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/tests/functional/scheme_shard/ydb-tests-functional-scheme_shard |62.3%| [LD] {BAZEL_UPLOAD} $(B)/ydb/core/base/generated/codegen/ydb-core-base-generated-codegen |62.3%| [LD] {RESULT} $(B)/ydb/tests/functional/scheme_shard/ydb-tests-functional-scheme_shard |62.3%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/tests/datashard/ttl/ydb-tests-datashard-ttl |62.3%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/tests/functional/blobstorage/ydb-tests-functional-blobstorage >> TErasureTypeTest::TestBlock33LossOfAllPossible3 [GOOD] >> TErasureTypeTest::TestBlock42PartialRestore2 |62.3%| [LD] {RESULT} $(B)/ydb/tests/datashard/ttl/ydb-tests-datashard-ttl |62.3%| [LD] {RESULT} $(B)/ydb/tests/functional/blobstorage/ydb-tests-functional-blobstorage |62.3%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/core/config/tools/protobuf_plugin/ut/unittest >> ValidationTests::AdvancedCopyTo [GOOD] >> TErasureTypeTest::TestBlock43LossOfAllPossible3 >> TErasureTypeTest::TestStripe43LossOfAllPossible3 [GOOD] |62.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/cms/console/validators/registry_ut.cpp |62.3%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/erasure/ut/unittest >> TErasureTypeTest::TestBlock22LossOfAllPossible2 [GOOD] >> TErasureTypeTest::TestStripe32LossOfAllPossible2 |62.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/library/yaml_config/console_dumper_ut.cpp |62.3%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/erasure/ut/unittest >> TErasureTypeTest::TestBlock33LossOfAllPossible3 [GOOD] |62.3%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/tests/functional/canonical/ydb-tests-functional-canonical >> TErasureTypeTest::TestBlock42LossOfAllPossible2 |62.3%| [LD] {RESULT} $(B)/ydb/tests/functional/canonical/ydb-tests-functional-canonical |62.3%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/tests/datashard/secondary_index/ydb-tests-datashard-secondary_index |62.3%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/erasure/ut/unittest >> TErasureTypeTest::TestStripe43LossOfAllPossible3 [GOOD] |62.3%| [LD] {RESULT} $(B)/ydb/tests/datashard/secondary_index/ydb-tests-datashard-secondary_index |62.3%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/core/config/tools/protobuf_plugin/ut/unittest >> TErasureTypeTest::TestEo [GOOD] >> TErasureTypeTest::TestStripe32LossOfAllPossible2 [GOOD] |62.3%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/blobstorage/bridge/syncer/syncer.cpp >> TErasureTypeTest::TestBlockByteOrder [GOOD] >> TErasureTypeTest::TestBlock42LossOfAllPossible2 [GOOD] |62.3%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/grpc_services/rpc_calls_ut.cpp |62.3%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/fq/libs/ydb/ut/ydb_ut.cpp >> TErasureTypeTest::TestDifferentCasesInDiffSplitingMirror3Of4 [GOOD] >> TErasureTypeTest::TestStripe42LossOfAllPossible2 |62.3%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/tests/olap/ydb-tests-olap |62.3%| [LD] {RESULT} $(B)/ydb/tests/olap/ydb-tests-olap |62.3%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/core/persqueue/public/codecs/ut/unittest |62.3%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/erasure/ut/unittest >> TErasureTypeTest::TestStripe32LossOfAllPossible2 [GOOD] >> TErasureTypeTest::TestAllSpeciesCrcWhole2of2 >> TErasureTypeTest::TestBlock32LossOfAllPossible2 |62.3%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/blobstorage/vdisk/synclog/blobstorage_synclogdata_ut.cpp |62.3%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/erasure/ut/unittest >> TErasureTypeTest::TestEo [GOOD] |62.3%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/core/persqueue/public/codecs/ut/unittest |62.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/grpc_services/rpc_calls_ut.cpp |62.3%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/core/config/tools/protobuf_plugin/ut/unittest >> TErasureTypeTest::TestStripe33LossOfAllPossible3 |62.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/bridge/syncer/syncer.cpp |62.3%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/erasure/ut/unittest >> TErasureTypeTest::TestBlockByteOrder [GOOD] |62.3%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/core/persqueue/public/codecs/ut/unittest |62.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/vdisk/synclog/blobstorage_synclogdata_ut.cpp |62.3%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/erasure/ut/unittest >> TErasureTypeTest::TestBlock42LossOfAllPossible2 [GOOD] >> PgTest::DumpStringCells [GOOD] |62.3%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/erasure/ut/unittest >> TErasureTypeTest::TestDifferentCasesInDiffSplitingMirror3Of4 [GOOD] |62.3%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/tests/functional/sqs/common/ydb-tests-functional-sqs-common |62.3%| [LD] {RESULT} $(B)/ydb/tests/functional/sqs/common/ydb-tests-functional-sqs-common |62.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/fq/libs/ydb/ut/ydb_ut.cpp |62.4%| [LD] {BAZEL_UPLOAD} $(B)/ydb/core/tx/schemeshard/generated/codegen/codegen >> TErasureTypeTest::TestStripe42LossOfAllPossible2 [GOOD] >> TErasureTypeTest::TestBlock32LossOfAllPossible2 [GOOD] >> TErasureTypeTest::TestStripe31LossOfAllPossible1 >> TErasureTypeTest::TestAllSpeciesCrcWhole1of2 >> TErasureTypeTest::TestStripe31LossOfAllPossible1 [GOOD] |62.4%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/library/yaml_config/yaml_config_ut.cpp |62.4%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/core/scheme/ut_pg/unittest >> PgTest::DumpStringCells [GOOD] >> TErasureTypeTest::TestBlock43LossOfAllPossible3 [GOOD] |62.4%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/erasure/ut/unittest >> TErasureTypeTest::TestStripe42LossOfAllPossible2 [GOOD] |62.4%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/kafka_proxy/ut/kafka_test_client.cpp |62.4%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/erasure/ut/unittest >> TErasureTypeTest::TestBlock32LossOfAllPossible2 [GOOD] |62.4%| [TA] $(B)/ydb/core/jaeger_tracing/ut/test-results/unittest/{meta.json ... results_accumulator.log} |62.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/library/yaml_config/yaml_config_ut.cpp |62.4%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/erasure/ut/unittest >> TErasureTypeTest::TestStripe31LossOfAllPossible1 [GOOD] |62.4%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/core/scheme/ut_pg/unittest |62.4%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/tests/datashard/s3/ydb-tests-datashard-s3 >> ErasureBrandNew::Block42_encode >> TErasureTypeTest::TestDifferentCasesInDiffSplitingBlock4Plus2 [GOOD] >> TErasureTypeTest::TestStripe23LossOfAllPossible3 >> SchemeRanges::CmpBorders [GOOD] >> SchemeBorders::Partial [GOOD] |62.4%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/config/validation/validators_ut.cpp |62.4%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/erasure/ut/unittest >> TErasureTypeTest::TestBlock43LossOfAllPossible3 [GOOD] >> TErasureTypeTest::TestAllSpecies1of2 >> Scheme::TSerializedCellVec [GOOD] >> Scheme::UnsafeAppend [GOOD] |62.4%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/core/persqueue/public/codecs/ut/unittest |62.4%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/core/persqueue/public/codecs/ut/unittest |62.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kafka_proxy/ut/kafka_test_client.cpp >> TErasureTypeTest::TestBlock42PartialRestore0 >> TErasureTypeTest::TestBlock31LossOfAllPossible1 [GOOD] |62.4%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/load_test/kqp.cpp |62.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/config/validation/validators_ut.cpp >> TErasureTypeTest::TestStripe33LossOfAllPossible3 [GOOD] |62.4%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/erasure/ut/unittest >> TErasureTypeTest::TestDifferentCasesInDiffSplitingBlock4Plus2 [GOOD] >> SchemeBorders::Full [GOOD] |62.4%| [TA] {RESULT} $(B)/ydb/core/jaeger_tracing/ut/test-results/unittest/{meta.json ... results_accumulator.log} >> Scheme::YqlTypesMustBeDefined [GOOD] |62.4%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/core/config/tools/protobuf_plugin/ut/unittest |62.4%| [LD] {RESULT} $(B)/ydb/tests/datashard/s3/ydb-tests-datashard-s3 ------- [TS] {default-linux-x86_64, relwithdebinfo} ydb/core/scheme/ut/unittest >> Scheme::UnsafeAppend [GOOD] Test command err: Serialize: 0.000118s Cells constructor: 0.000356s Parse: 0.000088s Copy: 0.000044s Move: 0.000018s |62.4%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/core/persqueue/public/codecs/ut/unittest >> SchemeRanges::RangesBorders [GOOD] >> TypesProto::Decimal22 [GOOD] >> TErasureTypeTest::TestStripe23LossOfAllPossible3 [GOOD] |62.4%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/core/scheme/ut/unittest >> SchemeBorders::Partial [GOOD] |62.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/load_test/kqp.cpp >> ResourcePoolClassifierTest::StringSettingsParsing [GOOD] >> ResourcePoolTest::PercentSettingsParsing [GOOD] >> ResourcePoolTest::SettingsValidation [GOOD] >> ResourcePoolClassifierTest::IntSettingsParsing [GOOD] >> ResourcePoolTest::SecondsSettingsParsing [GOOD] >> ResourcePoolClassifierTest::SettingsExtracting [GOOD] >> ResourcePoolTest::SettingsExtracting [GOOD] >> ResourcePoolTest::IntSettingsParsing [GOOD] >> ResourcePoolClassifierTest::SettingsValidation [GOOD] |62.4%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/erasure/ut/unittest >> TErasureTypeTest::TestBlock31LossOfAllPossible1 [GOOD] |62.4%| [CC] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/fq/libs/ydb/ydb.h_serialized.cpp |62.4%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/erasure/ut/unittest >> TErasureTypeTest::TestStripe33LossOfAllPossible3 [GOOD] |62.4%| [CC] {BAZEL_UPLOAD} $(B)/ydb/core/fq/libs/ydb/ydb.h_serialized.cpp |62.4%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/tests/datashard/vector_index/medium/ydb-tests-datashard-vector_index-medium |62.4%| [LD] {RESULT} $(B)/ydb/tests/datashard/vector_index/medium/ydb-tests-datashard-vector_index-medium |62.4%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/core/scheme/ut/unittest >> Scheme::YqlTypesMustBeDefined [GOOD] >> AuditConfigTest::Processing [GOOD] >> AuditConfigTest::IncorrectConfig [GOOD] >> AuditConfigTest::DefaultInitialization [GOOD] >> AuditConfigTest::LogPhaseDefault [GOOD] >> AuditConfigTest::LogPhaseInConfig [GOOD] |62.4%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/core/scheme/ut/unittest >> TypesProto::Decimal22 [GOOD] |62.4%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/tests/olap/delete/ydb-tests-olap-delete >> TPartitionKeyRangeSequenceTest::ValidSequence [GOOD] >> TPartitionKeyRangeSequenceTest::ValidSinglePartition [GOOD] >> TPartitionKeyRangeSequenceTest::InvalidOverlap [GOOD] >> TPartitionKeyRangeSequenceTest::InvalidOverlapLong [GOOD] >> Json::BasicRendering [GOOD] |62.4%| [LD] {RESULT} $(B)/ydb/tests/olap/delete/ydb-tests-olap-delete |62.4%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/library/yaml_config/tools/dump_ds_init/main.cpp |62.4%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/erasure/ut/unittest >> TErasureTypeTest::TestStripe23LossOfAllPossible3 [GOOD] |62.4%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/public/tools/ydb_recipe/ydb_recipe |62.4%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/core/audit/audit_config/ut/unittest >> AuditConfigTest::LogPhaseInConfig [GOOD] >> TPartitionKeyRangeSequenceTest::InvalidContains [GOOD] >> TPartitionKeyRangeSequenceTest::InvalidGap [GOOD] >> TPartitionKeyRangeSequenceTest::InvalidOrder [GOOD] >> TPartitionKeyRangeSequenceTest::InvalidFullCoverHi [GOOD] >> TPartitionKeyRangeSequenceTest::InvalidFullCoverLo [GOOD] >> TPartitionKeyRangeSequenceTest::EmptyInput [GOOD] >> TPartitionKeyRangeSequenceTest::ValidFivePartitions [GOOD] |62.4%| [LD] {RESULT} $(B)/ydb/public/tools/ydb_recipe/ydb_recipe |62.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/library/yaml_config/tools/dump_ds_init/main.cpp |62.4%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/core/resource_pools/ut/unittest >> ResourcePoolClassifierTest::SettingsValidation [GOOD] |62.4%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/tests/postgres_integrations/go-libpq/ydb-tests-postgres_integrations-go-libpq >> Checks::BasicIntChecks [GOOD] >> Checks::BasicStringChecks [GOOD] >> Checks::IntArrayValidation [GOOD] >> Checks::MapValidation [GOOD] >> Checks::ErrorInCheck [GOOD] >> Checks::OpaqueMaps [GOOD] |62.4%| [CC] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/cms/node_checkers.h_serialized.cpp |62.4%| [CC] {BAZEL_UPLOAD} $(B)/ydb/core/cms/node_checkers.h_serialized.cpp |62.4%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/library/yaml_config/tools/dump/main.cpp |62.5%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/core/viewer/json/ut/unittest >> Json::BasicRendering [GOOD] |62.5%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/core/persqueue/public/partition_key_range/ut/unittest >> TPartitionKeyRangeSequenceTest::ValidFivePartitions [GOOD] |62.5%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/library/yaml_config/validator/ut/validator_checks/unittest >> Checks::OpaqueMaps [GOOD] |62.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/library/yaml_config/tools/dump/main.cpp |62.5%| [TS] {RESULT} ydb/core/audit/audit_config/ut/unittest |62.5%| [LD] {RESULT} $(B)/ydb/tests/postgres_integrations/go-libpq/ydb-tests-postgres_integrations-go-libpq |62.5%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/tests/olap/data_quotas/ydb-tests-olap-data_quotas |62.5%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/kqp/tests/kikimr_tpch/kqp_tpch_ut.cpp |62.5%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/tests/datashard/split_merge/ydb-tests-datashard-split_merge |62.5%| [TS] {RESULT} ydb/core/resource_pools/ut/unittest >> ConfigProto::ForbidNewRequired [GOOD] |62.5%| [LD] {RESULT} $(B)/ydb/tests/olap/data_quotas/ydb-tests-olap-data_quotas |62.5%| [LD] {RESULT} $(B)/ydb/tests/datashard/split_merge/ydb-tests-datashard-split_merge |62.5%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/tests/functional/tpc/medium/ydb-tests-functional-tpc-medium |62.5%| [TS] {RESULT} ydb/core/viewer/json/ut/unittest |62.5%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/tests/stress/oltp_workload/oltp_workload |62.5%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/library/yaml_config/yaml_config_proto2yaml_ut.cpp |62.5%| [LD] {RESULT} $(B)/ydb/tests/functional/tpc/medium/ydb-tests-functional-tpc-medium |62.5%| [LD] {RESULT} $(B)/ydb/tests/stress/oltp_workload/oltp_workload |62.5%| [TA] $(B)/ydb/core/scheme/ut_pg/test-results/unittest/{meta.json ... results_accumulator.log} |62.5%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/core/config/ut/unittest >> ConfigProto::ForbidNewRequired [GOOD] |62.5%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/tests/datashard/copy_table/ydb-tests-datashard-copy_table >> comparator::import_test [GOOD] |62.5%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/mvp/oidc_proxy/mvp.cpp |62.5%| [TS] {RESULT} ydb/library/yaml_config/validator/ut/validator_checks/unittest |62.5%| [CC] {tool} $(B)/ydb/core/protos/tx_datashard.pb.cc |62.5%| [LD] {RESULT} $(B)/ydb/tests/datashard/copy_table/ydb-tests-datashard-copy_table |62.5%| [BN] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/tests/stability/tool/oltp_workload |62.5%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/tests/datashard/partitioning/ydb-tests-datashard-partitioning |62.5%| [CC] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/ymq/actor/metering.h_serialized.cpp |62.5%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/tests/functional/security/ydb-tests-functional-security |62.5%| [LD] {RESULT} $(B)/ydb/tests/datashard/partitioning/ydb-tests-datashard-partitioning >> result_convert::import_test [GOOD] >> runner::import_test [GOOD] >> TErasurePerfTest::Split |62.5%| [LD] {RESULT} $(B)/ydb/tests/functional/security/ydb-tests-functional-security |62.5%| [TS] {RESULT} ydb/core/config/ut/unittest >> YdbVersion::DefaultRulesWithExtraForbidden [GOOD] >> YdbVersion::CurrentStoresReadableBy [GOOD] >> YdbVersion::OtherComponent [GOOD] >> YdbVersion::StoredWithRules [GOOD] |62.5%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/library/compatibility/configs/comparator/import_test >> comparator::import_test [GOOD] |62.5%| [TA] {RESULT} $(B)/ydb/core/scheme/ut_pg/test-results/unittest/{meta.json ... results_accumulator.log} |62.5%| [TS] {RESULT} ydb/core/persqueue/public/partition_key_range/ut/unittest |62.5%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/kqp/tests/tpch/cmd_run_bench.cpp |62.5%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/library/yaml_config/yaml_config_parser_ut.cpp >> TErasurePerfTest::Split [GOOD] >> YdbVersion::TrunkYDBAndNbs [GOOD] >> downloader::import_test [GOOD] >> TErasurePerfTest::Restore >> run_tests::import_test [GOOD] |62.6%| [CC] {BAZEL_UPLOAD} $(B)/ydb/core/ymq/actor/metering.h_serialized.cpp >> YdbVersion::CompatibleWithSelf [GOOD] |62.6%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/library/benchmarks/runner/runner/import_test >> runner::import_test [GOOD] |62.6%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/library/benchmarks/runner/result_convert/import_test >> result_convert::import_test [GOOD] >> YdbVersion::DefaultCompatible [GOOD] >> YdbVersion::CurrentCanLoadFromAllOlder [GOOD] >> YdbVersion::DefaultNextYear [GOOD] >> YdbVersion::WithPatchAndWithoutPatch [GOOD] |62.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/library/yaml_config/yaml_config_proto2yaml_ut.cpp |62.6%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/library/compatibility/binaries/downloader/import_test >> downloader::import_test [GOOD] |62.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/mvp/oidc_proxy/mvp.cpp >> YdbVersion::CurrentCanLoadFrom [GOOD] >> YdbVersion::OldNbsIncompatibleStored [GOOD] >> YdbVersion::DefaultDifferentBuild [GOOD] >> YdbVersion::DefaultDifferentBuildIncompatible [GOOD] >> YdbVersion::LimitOld [GOOD] >> OldFormat::SameVersion [GOOD] |62.6%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/library/benchmarks/runner/run_tests/import_test >> run_tests::import_test [GOOD] |62.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/tests/kikimr_tpch/kqp_tpch_ut.cpp |62.6%| [TS] {RESULT} ydb/tests/library/compatibility/configs/comparator/import_test >> ydb-dstool::import_test [GOOD] >> TFunctionsMetadataTest::Serialization [GOOD] >> YdbVersion::DefaultHotfix [GOOD] >> YdbVersion::DefaultSameVersion [GOOD] |62.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/tests/tpch/cmd_run_bench.cpp |62.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/library/yaml_config/yaml_config_parser_ut.cpp |62.6%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/blobstorage/pdisk/mock/pdisk_mock.cpp >> YdbVersion::NewNbsCurrent [GOOD] >> ErasureBrandNew::Block42_encode [GOOD] >> YdbVersion::NewNbsIncompatibleCurrent [GOOD] >> ErasureBrandNew::Block42_chunked >> YdbVersion::ExtraAndForbidden [GOOD] >> YdbVersion::AcceptSpecificHotfixWithoutPatch [GOOD] >> YdbVersion::LimitNew [GOOD] >> YdbVersion::DefaultPrevMajor [GOOD] >> YdbVersion::ForbiddenMinor [GOOD] >> YdbVersion::DefaultNewMajor [GOOD] >> YdbVersion::StoredReadableBy [GOOD] >> OldFormat::OldNbs [GOOD] >> YdbVersion::YDBAndNbs [GOOD] |62.6%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/apps/dstool/import_test >> ydb-dstool::import_test [GOOD] |62.6%| [TS] {RESULT} ydb/library/benchmarks/runner/runner/import_test >> YdbVersion::PrintCurrentVersionProto [GOOD] >> YdbVersion::StoredReadableByIncompatible [GOOD] >> YdbVersion::DefaultNextMajor [GOOD] >> OldFormat::TooOld [GOOD] >> YdbVersion::CurrentCanLoadFromIncompatible [GOOD] >> YdbVersion::OldNbsStored [GOOD] >> OldFormat::UnexpectedTrunk [GOOD] >> YdbVersion::TrunkAndStable [GOOD] >> YdbVersion::OneAcceptedVersion [GOOD] >> OldFormat::DefaultRules [GOOD] >> YdbVersion::Component [GOOD] >> OldFormat::PrevYear [GOOD] >> YdbVersion::SomeRulesAndOtherForbidden [GOOD] >> YdbVersion::DifferentYdbVersionsWithNBSRules [GOOD] >> YdbVersion::DefaultOldMajor [GOOD] >> YdbVersion::StoredWithRulesIncompatible [GOOD] >> YdbVersion::DefaulPatchTag [GOOD] >> VersionParser::Basic [GOOD] >> OldFormat::Trunk [GOOD] >> YdbVersion::DefaultPrevYear [GOOD] |62.6%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/core/client/metadata/ut/unittest >> TFunctionsMetadataTest::Serialization [GOOD] |62.6%| [TA] $(B)/ydb/core/persqueue/public/codecs/ut/test-results/unittest/{meta.json ... results_accumulator.log} |62.6%| [TS] {RESULT} ydb/library/benchmarks/runner/result_convert/import_test |62.6%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/tests/functional/cms/ydb-tests-functional-cms ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/driver_lib/version/ut/unittest >> YdbVersion::DefaultPrevYear [GOOD] Test command err: Application: "ydb" >> result_compare::import_test [GOOD] |62.6%| [TA] $(B)/ydb/core/config/tools/protobuf_plugin/ut/test-results/unittest/{meta.json ... results_accumulator.log} |62.6%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/mind/node_broker__update_config_subscription.cpp |62.6%| [TA] $(B)/ydb/core/scheme/ut/test-results/unittest/{meta.json ... results_accumulator.log} |62.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/mind/node_broker__update_config_subscription.cpp |62.6%| [TS] {RESULT} ydb/tests/library/compatibility/binaries/downloader/import_test |62.6%| [TS] {RESULT} ydb/library/benchmarks/runner/run_tests/import_test |62.6%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/library/benchmarks/runner/result_compare/import_test >> result_compare::import_test [GOOD] >> ValidatorBuilder::CreateMultitypeNode [GOOD] >> ValidatorBuilder::BuildSimpleValidator [GOOD] >> ValidatorBuilder::CanHaveDuplicateType [GOOD] >> ValidatorBuilder::CanHaveMultipleType [GOOD] >> ValidatorBuilder::CanCreateAllTypesOfNodes [GOOD] |62.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/pdisk/mock/pdisk_mock.cpp |62.6%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/mind/node_broker__update_config.cpp |62.6%| [TS] {RESULT} ydb/apps/dstool/import_test |62.6%| [TS] {RESULT} ydb/core/client/metadata/ut/unittest |62.6%| [LD] {RESULT} $(B)/ydb/tests/functional/cms/ydb-tests-functional-cms |62.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/mind/node_broker__update_config.cpp |62.6%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/library/yaml_config/validator/ut/validator_builder/unittest >> ValidatorBuilder::CanCreateAllTypesOfNodes [GOOD] |62.6%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/mind/node_broker__migrate_state.cpp |62.6%| [TA] {RESULT} $(B)/ydb/core/persqueue/public/codecs/ut/test-results/unittest/{meta.json ... results_accumulator.log} |62.6%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/tests/functional/limits/ydb-tests-functional-limits |62.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/mind/node_broker__migrate_state.cpp >> simple_json_diff::import_test [GOOD] >> ydb-tests-fq-yt-kqp_yt_import::import_test [GOOD] |62.6%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/persqueue/ut/internals_ut.cpp |62.6%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/tests/datashard/async_replication/ydb-tests-datashard-async_replication |62.6%| [TA] {RESULT} $(B)/ydb/core/config/tools/protobuf_plugin/ut/test-results/unittest/{meta.json ... results_accumulator.log} |62.6%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/fq/libs/checkpoint_storage/ut/ydb_checkpoint_storage_ut.cpp |62.6%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/tests/tools/fqrun/src/actors.cpp |62.6%| [TA] {RESULT} $(B)/ydb/core/scheme/ut/test-results/unittest/{meta.json ... results_accumulator.log} |62.6%| [TS] {RESULT} ydb/library/benchmarks/runner/result_compare/import_test |62.6%| [LD] {RESULT} $(B)/ydb/tests/functional/limits/ydb-tests-functional-limits |62.6%| [LD] {RESULT} $(B)/ydb/tests/datashard/async_replication/ydb-tests-datashard-async_replication |62.6%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/fq/yt/kqp_yt_import/import_test >> ydb-tests-fq-yt-kqp_yt_import::import_test [GOOD] |62.6%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/library/yaml_config/tools/simple_json_diff/import_test >> simple_json_diff::import_test [GOOD] |62.7%| [TS] {RESULT} ydb/library/yaml_config/validator/ut/validator_builder/unittest >> ErasureBrandNew::Block42_chunked [GOOD] |62.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/persqueue/ut/internals_ut.cpp |62.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/tests/tools/fqrun/src/actors.cpp |62.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/fq/libs/checkpoint_storage/ut/ydb_checkpoint_storage_ut.cpp >> ydb-tests-functional-tpcc::import_test [GOOD] |62.7%| [TS] {RESULT} ydb/tests/fq/yt/kqp_yt_import/import_test |62.7%| [CC] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/config/init/init.h_serialized.cpp |62.7%| [TS] {RESULT} ydb/library/yaml_config/tools/simple_json_diff/import_test |62.7%| [CC] {BAZEL_UPLOAD} $(B)/ydb/core/config/init/init.h_serialized.cpp ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/erasure/ut/unittest >> ErasureBrandNew::Block42_chunked [GOOD] Test command err: totalSize# 498266790 period1# 2.526553s period2# 0.347521s MB/s1# 197.212087 MB/s2# 1433.774621 factor# 7.270216764 |62.7%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/tpcc/import_test >> ydb-tests-functional-tpcc::import_test [GOOD] |62.7%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/tests/functional/serverless/ydb-tests-functional-serverless |62.7%| [TM] {RESULT} ydb/core/driver_lib/version/ut/unittest |62.7%| [LD] {RESULT} $(B)/ydb/tests/functional/serverless/ydb-tests-functional-serverless |62.7%| [TS] {RESULT} ydb/tests/functional/tpcc/import_test |62.7%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/tests/olap/column_family/compression/ydb-tests-olap-column_family-compression |62.7%| [LD] {RESULT} $(B)/ydb/tests/olap/column_family/compression/ydb-tests-olap-column_family-compression |62.7%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/tests/functional/hive/ydb-tests-functional-hive |62.7%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/tests/datashard/select/ydb-tests-datashard-select |62.7%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/tests/datashard/dump_restore/ydb-tests-datashard-dump_restore >> ydb-tests-olap::import_test [GOOD] |62.7%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/tests/olap/ttl_tiering/ydb-tests-olap-ttl_tiering |62.7%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/tests/functional/suite_tests/ydb-tests-functional-suite_tests |62.7%| [LD] {RESULT} $(B)/ydb/tests/functional/hive/ydb-tests-functional-hive |62.7%| [LD] {RESULT} $(B)/ydb/tests/datashard/select/ydb-tests-datashard-select |62.7%| [LD] {RESULT} $(B)/ydb/tests/olap/ttl_tiering/ydb-tests-olap-ttl_tiering >> ydb-tests-functional-api::import_test [GOOD] |62.7%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/mvp/meta/meta.cpp |62.7%| [LD] {RESULT} $(B)/ydb/tests/datashard/dump_restore/ydb-tests-datashard-dump_restore |62.7%| [LD] {RESULT} $(B)/ydb/tests/functional/suite_tests/ydb-tests-functional-suite_tests |62.7%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/olap/import_test >> ydb-tests-olap::import_test [GOOD] |62.7%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/mind/node_broker__load_state.cpp |62.7%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/mind/node_broker__init_scheme.cpp |62.7%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/api/import_test >> ydb-tests-functional-api::import_test [GOOD] |62.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/mind/node_broker__load_state.cpp |62.7%| [TS] {RESULT} ydb/tests/olap/import_test |62.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/mind/node_broker__init_scheme.cpp |62.7%| [TS] {RESULT} ydb/tests/functional/api/import_test |62.7%| [CC] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/tx/tx_proxy/upload_rows_counters.h_serialized.cpp >> ydb-tests-functional-sqs-common::import_test [GOOD] |62.7%| [CC] {BAZEL_UPLOAD} $(B)/ydb/core/tx/tx_proxy/upload_rows_counters.h_serialized.cpp |62.7%| [AR] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/fq/libs/ydb/libfq-libs-ydb.a |62.7%| [AR] {RESULT} $(B)/ydb/core/fq/libs/ydb/libfq-libs-ydb.a >> ydb_recipe::import_test [GOOD] |62.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/mvp/meta/meta.cpp |62.8%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/common/import_test >> ydb-tests-functional-sqs-common::import_test [GOOD] >> ydb-tests-postgres_integrations-go-libpq::import_test [GOOD] >> ydb-tests-olap-load::import_test [GOOD] |62.8%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/fq/libs/ydb/libfq-libs-ydb.a |62.8%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/public/tools/ydb_recipe/import_test >> ydb_recipe::import_test [GOOD] >> ydb-tests-olap-data_quotas::import_test [GOOD] >> ydb-tests-functional-blobstorage::import_test [GOOD] |62.8%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/tests/fq/multi_plane/ydb-tests-fq-multi_plane |62.8%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/apps/etcd_proxy/proxy.cpp |62.8%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/postgres_integrations/go-libpq/import_test >> ydb-tests-postgres_integrations-go-libpq::import_test [GOOD] >> ydb-tests-olap-delete::import_test [GOOD] >> ydb-tests-functional-clickbench::import_test [GOOD] >> ydb-tests-functional-restarts::import_test [GOOD] |62.8%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/olap/data_quotas/import_test >> ydb-tests-olap-data_quotas::import_test [GOOD] |62.8%| [TS] {RESULT} ydb/tests/functional/sqs/common/import_test |62.8%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/blobstorage/import_test >> ydb-tests-functional-blobstorage::import_test [GOOD] |62.8%| [AR] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/blobstorage/bridge/syncer/libblobstorage-bridge-syncer.a |62.8%| [LD] {RESULT} $(B)/ydb/tests/fq/multi_plane/ydb-tests-fq-multi_plane |62.8%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/tests/functional/sqs/with_quotas/ydb-tests-functional-sqs-with_quotas >> ydb-tests-functional-audit::import_test [GOOD] |62.8%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/olap/delete/import_test >> ydb-tests-olap-delete::import_test [GOOD] |62.8%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/clickbench/import_test >> ydb-tests-functional-clickbench::import_test [GOOD] |62.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/apps/etcd_proxy/proxy.cpp |62.8%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/restarts/import_test >> ydb-tests-functional-restarts::import_test [GOOD] |62.8%| [TS] {RESULT} ydb/public/tools/ydb_recipe/import_test |62.8%| [TS] {RESULT} ydb/tests/postgres_integrations/go-libpq/import_test >> TErasurePerfTest::Restore [GOOD] >> TErasureSmallBlobSizePerfTest::StringErasureMode [GOOD] >> TErasureSmallBlobSizePerfTest::ConvertToRopeMode [GOOD] |62.8%| [TS] {RESULT} ydb/tests/olap/data_quotas/import_test |62.8%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/audit/import_test >> ydb-tests-functional-audit::import_test [GOOD] |62.8%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/protos/libydb-core-protos.a |62.8%| [AR] {RESULT} $(B)/ydb/core/blobstorage/bridge/syncer/libblobstorage-bridge-syncer.a |62.8%| [LD] {RESULT} $(B)/ydb/tests/functional/sqs/with_quotas/ydb-tests-functional-sqs-with_quotas |62.8%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/olap/load/import_test >> ydb-tests-olap-load::import_test [GOOD] |62.8%| [TS] {RESULT} ydb/tests/functional/blobstorage/import_test |62.8%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/tests/datashard/dml/ydb-tests-datashard-dml >> ydb-tests-functional-security::import_test [GOOD] |62.8%| [TS] {RESULT} ydb/tests/functional/restarts/import_test |62.8%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/tests/tools/fqrun/src/fq_runner.cpp |62.8%| [TS] {RESULT} ydb/tests/olap/delete/import_test |62.8%| [LD] {RESULT} $(B)/ydb/tests/datashard/dml/ydb-tests-datashard-dml |62.8%| [TS] {RESULT} ydb/tests/functional/clickbench/import_test |62.8%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/core/erasure/ut_perf/unittest >> TErasureSmallBlobSizePerfTest::ConvertToRopeMode [GOOD] |62.8%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/blobstorage/bridge/syncer/libblobstorage-bridge-syncer.a |62.8%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/security/import_test >> ydb-tests-functional-security::import_test [GOOD] |62.8%| [TS] {RESULT} ydb/tests/functional/audit/import_test |62.8%| [TS] {RESULT} ydb/tests/olap/load/import_test >> ydb-tests-functional-script_execution::import_test [GOOD] |62.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/tests/tools/fqrun/src/fq_runner.cpp |62.8%| [TS] {RESULT} ydb/tests/functional/security/import_test |62.8%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/tests/functional/sqs/large/ydb-tests-functional-sqs-large |62.8%| [LD] {RESULT} $(B)/ydb/tests/functional/sqs/large/ydb-tests-functional-sqs-large |62.8%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/script_execution/import_test >> ydb-tests-functional-script_execution::import_test [GOOD] |62.8%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/fq/libs/compute/common/ut/utils_ut.cpp |62.8%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/tests/functional/rename/ydb-tests-functional-rename >> ydb-tests-functional-scheme_shard::import_test [GOOD] |62.8%| [LD] {RESULT} $(B)/ydb/tests/functional/rename/ydb-tests-functional-rename |62.9%| [TS] {RESULT} ydb/core/erasure/ut_perf/unittest |62.9%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/blobstorage/pdisk/blobstorage_pdisk_restore_ut.cpp |62.9%| [TS] {RESULT} ydb/tests/functional/script_execution/import_test >> ydb-tests-functional-tenants::import_test [GOOD] >> ydb-tests-functional-canonical::import_test [GOOD] >> ydb-tests-functional-cms::import_test [GOOD] |62.9%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/scheme_shard/import_test >> ydb-tests-functional-scheme_shard::import_test [GOOD] |62.9%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/tests/functional/sqs/multinode/ydb-tests-functional-sqs-multinode |62.9%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/tests/functional/sqs/messaging/ydb-tests-functional-sqs-messaging |62.9%| [LD] {RESULT} $(B)/ydb/tests/functional/sqs/multinode/ydb-tests-functional-sqs-multinode |62.9%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/canonical/import_test >> ydb-tests-functional-canonical::import_test [GOOD] |62.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/pdisk/blobstorage_pdisk_restore_ut.cpp |62.9%| [LD] {RESULT} $(B)/ydb/tests/functional/sqs/messaging/ydb-tests-functional-sqs-messaging |62.9%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/tenants/import_test >> ydb-tests-functional-tenants::import_test [GOOD] |62.9%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/cms/import_test >> ydb-tests-functional-cms::import_test [GOOD] |62.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/fq/libs/compute/common/ut/utils_ut.cpp |62.9%| [TS] {RESULT} ydb/tests/functional/scheme_shard/import_test |62.9%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/tests/functional/benchmarks_init/ydb-tests-functional-benchmarks_init |62.9%| [TS] {RESULT} ydb/tests/functional/canonical/import_test |62.9%| [LD] {RESULT} $(B)/ydb/tests/functional/benchmarks_init/ydb-tests-functional-benchmarks_init >> ydb-tests-functional-limits::import_test [GOOD] |62.9%| [TS] {RESULT} ydb/tests/functional/tenants/import_test |62.9%| [TS] {RESULT} ydb/tests/functional/cms/import_test |62.9%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/tests/fq/s3/ydb-tests-fq-s3 |62.9%| [LD] {RESULT} $(B)/ydb/tests/fq/s3/ydb-tests-fq-s3 |62.9%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/limits/import_test >> ydb-tests-functional-limits::import_test [GOOD] |62.9%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/tests/tools/nemesis/ut/ydb-tests-tools-nemesis-ut |62.9%| [TS] {RESULT} ydb/tests/functional/limits/import_test |62.9%| [LD] {RESULT} $(B)/ydb/tests/tools/nemesis/ut/ydb-tests-tools-nemesis-ut >> ydb-tests-functional-tpc-medium::import_test [GOOD] >> test_init.py::TestClickbenchInit::test_s1_s3 >> test_init.py::TestClickbenchInit::test_s1_s3 [GOOD] >> ydb-tests-functional-suite_tests::import_test [GOOD] |62.9%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/tpc/medium/import_test >> ydb-tests-functional-tpc-medium::import_test [GOOD] |62.9%| [LD] {BAZEL_UPLOAD} $(B)/ydb/core/viewer/json/ut/ydb-core-viewer-json-ut >> test_init.py::TestTpcdsInit::test_s100_column >> test_init.py::TestTpchInit::test_s1_s3 >> test_generator.py::TestTpchGenerator::test_s1 >> test_init.py::TestTpchInit::test_s1_row >> test_generator.py::TestTpcdsGenerator::test_s1_state >> test_generator.py::TestTpchGenerator::test_s1_parts >> test_init.py::TestTpcdsInit::test_s100_column [GOOD] >> test_init.py::TestTpchInit::test_s1_row [GOOD] >> test_init.py::TestTpchInit::test_s1_s3 [GOOD] >> ydb-tests-functional-serverless::import_test [GOOD] |62.9%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/tests/functional/statistics/ydb-tests-functional-statistics |62.9%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/suite_tests/import_test >> ydb-tests-functional-suite_tests::import_test [GOOD] >> ydb-tests-functional-hive::import_test [GOOD] |62.9%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/serverless/import_test >> ydb-tests-functional-serverless::import_test [GOOD] |62.9%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/benchmarks_init/py3test |62.9%| [TS] {RESULT} ydb/tests/functional/tpc/medium/import_test |62.9%| [LD] {RESULT} $(B)/ydb/tests/functional/statistics/ydb-tests-functional-statistics >> ydb-tests-olap-ttl_tiering::import_test [GOOD] |63.0%| [TS] {RESULT} ydb/tests/functional/suite_tests/import_test |63.0%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/hive/import_test >> ydb-tests-functional-hive::import_test [GOOD] |63.0%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/benchmarks_init/py3test >> test_init.py::TestTpchInit::test_s1_row [GOOD] |63.0%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/benchmarks_init/py3test >> test_init.py::TestTpcdsInit::test_s100_column [GOOD] |63.0%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/benchmarks_init/py3test >> test_init.py::TestTpchInit::test_s1_s3 [GOOD] >> ydb-tests-olap-column_family-compression::import_test [GOOD] |63.0%| [TS] {RESULT} ydb/tests/functional/serverless/import_test |63.0%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/olap/ttl_tiering/import_test >> ydb-tests-olap-ttl_tiering::import_test [GOOD] |63.0%| [TS] {RESULT} ydb/tests/functional/hive/import_test |63.0%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/olap/column_family/compression/import_test >> ydb-tests-olap-column_family-compression::import_test [GOOD] >> ydb-tests-datashard-s3::import_test [GOOD] |63.0%| [TS] {RESULT} ydb/tests/olap/ttl_tiering/import_test |63.0%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/tests/olap/oom/ydb-tests-olap-oom |63.0%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/tests/functional/autoconfig/ydb-tests-functional-autoconfig |63.0%| [LD] {RESULT} $(B)/ydb/tests/olap/oom/ydb-tests-olap-oom |63.0%| [LD] {RESULT} $(B)/ydb/tests/functional/autoconfig/ydb-tests-functional-autoconfig |63.0%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/tests/functional/tpc/medium/tpch/ydb-tests-functional-tpc-medium-tpch |63.0%| [LD] {RESULT} $(B)/ydb/tests/functional/tpc/medium/tpch/ydb-tests-functional-tpc-medium-tpch |63.0%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/datashard/s3/import_test >> ydb-tests-datashard-s3::import_test [GOOD] |63.0%| [TS] {RESULT} ydb/tests/olap/column_family/compression/import_test |63.0%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/tests/fq/yds/ydb-tests-fq-yds >> ydb-tests-datashard-parametrized_queries::import_test [GOOD] |63.0%| [LD] {RESULT} $(B)/ydb/tests/fq/yds/ydb-tests-fq-yds |63.0%| [AR] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/config/init/libcore-config-init.a |63.0%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/tests/functional/ttl/ydb-tests-functional-ttl |63.0%| [AR] {RESULT} $(B)/ydb/core/config/init/libcore-config-init.a |63.0%| [LD] {RESULT} $(B)/ydb/tests/functional/ttl/ydb-tests-functional-ttl |63.0%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/tests/olap/scenario/ydb-tests-olap-scenario |63.0%| [TS] {RESULT} ydb/tests/datashard/s3/import_test |63.0%| [LD] {RESULT} $(B)/ydb/tests/olap/scenario/ydb-tests-olap-scenario |63.0%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/datashard/parametrized_queries/import_test >> ydb-tests-datashard-parametrized_queries::import_test [GOOD] >> ydb-tests-functional-sqs-with_quotas::import_test [GOOD] |63.0%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/tests/solomon/reading/ydb-tests-solomon-reading |63.0%| [LD] {RESULT} $(B)/ydb/tests/solomon/reading/ydb-tests-solomon-reading >> ydb-tests-fq-multi_plane::import_test [GOOD] >> test_init.py::TestTpchInit::test_s1_column |63.0%| [TS] {RESULT} ydb/tests/datashard/parametrized_queries/import_test >> test_init.py::TestTpchInit::test_s1_column [GOOD] |63.0%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/with_quotas/import_test >> ydb-tests-functional-sqs-with_quotas::import_test [GOOD] >> test_init.py::TestTpchInit::test_s100_column |63.0%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/fq/multi_plane/import_test >> ydb-tests-fq-multi_plane::import_test [GOOD] |63.0%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/mind/node_broker__graceful_shutdown.cpp >> test_init.py::TestTpchInit::test_s100_column [GOOD] |63.0%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/tests/functional/sqs/cloud/ydb-tests-functional-sqs-cloud >> test_init.py::TestClickbenchInit::test_s1_column >> ydb-tests-datashard-vector_index-medium::import_test [GOOD] >> test_init.py::TestClickbenchInit::test_s1_column [GOOD] >> test_init.py::TestClickbenchInit::test_s1_row |63.0%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/mind/node_broker__update_epoch.cpp |63.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/mind/node_broker__graceful_shutdown.cpp |63.0%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/public/tools/lib/cmds/ut/ydb-public-tools-lib-cmds-ut >> test_init.py::TestClickbenchInit::test_s1_row [GOOD] >> test_generator.py::TestTpcdsGenerator::test_s1 >> test_generator.py::TestTpchGenerator::test_s1_state |63.0%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/benchmarks_init/py3test >> test_init.py::TestTpchInit::test_s1_column [GOOD] |63.0%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/datashard/vector_index/medium/import_test >> ydb-tests-datashard-vector_index-medium::import_test [GOOD] |63.1%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/tests/functional/ydb_cli/ydb-tests-functional-ydb_cli |63.1%| [LD] {RESULT} $(B)/ydb/tests/functional/sqs/cloud/ydb-tests-functional-sqs-cloud |63.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/mind/node_broker__update_epoch.cpp |63.1%| [TS] {RESULT} ydb/tests/functional/sqs/with_quotas/import_test |63.1%| [LD] {RESULT} $(B)/ydb/public/tools/lib/cmds/ut/ydb-public-tools-lib-cmds-ut |63.1%| [LD] {RESULT} $(B)/ydb/tests/functional/ydb_cli/ydb-tests-functional-ydb_cli >> ydb-tests-functional-rename::import_test [GOOD] >> ydb-tests-datashard-copy_table::import_test [GOOD] >> ydb-tests-datashard-split_merge::import_test [GOOD] >> ydb-tests-datashard-ttl::import_test [GOOD] |63.1%| [TS] {RESULT} ydb/tests/fq/multi_plane/import_test |63.1%| [TS] {RESULT} ydb/tests/datashard/vector_index/medium/import_test >> oltp_workload::import_test [GOOD] |63.1%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/config/init/libcore-config-init.a |63.1%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/datashard/split_merge/import_test >> ydb-tests-datashard-split_merge::import_test [GOOD] |63.1%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/rename/import_test >> ydb-tests-functional-rename::import_test [GOOD] |63.1%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/datashard/copy_table/import_test >> ydb-tests-datashard-copy_table::import_test [GOOD] |63.1%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/benchmarks_init/py3test >> test_init.py::TestClickbenchInit::test_s1_row [GOOD] |63.1%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/benchmarks_init/py3test >> test_init.py::TestTpchInit::test_s100_column [GOOD] |63.1%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/datashard/ttl/import_test >> ydb-tests-datashard-ttl::import_test [GOOD] |63.1%| [CC] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/tests/tools/kqprun/src/common.h_serialized.cpp >> ydb-tests-functional-sqs-large::import_test [GOOD] |63.1%| [CC] {BAZEL_UPLOAD} $(B)/ydb/tests/tools/kqprun/src/common.h_serialized.cpp |63.1%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/stress/oltp_workload/import_test >> oltp_workload::import_test [GOOD] >> ydb-tests-functional-sqs-multinode::import_test [GOOD] |63.1%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/large/import_test >> ydb-tests-functional-sqs-large::import_test [GOOD] >> ydb-tests-functional-sqs-messaging::import_test [GOOD] >> ydb-tests-datashard-partitioning::import_test [GOOD] >> ydb-tests-datashard-secondary_index::import_test [GOOD] |63.1%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/multinode/import_test >> ydb-tests-functional-sqs-multinode::import_test [GOOD] |63.1%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/datashard/partitioning/import_test >> ydb-tests-datashard-partitioning::import_test [GOOD] |63.1%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/messaging/import_test >> ydb-tests-functional-sqs-messaging::import_test [GOOD] |63.1%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/datashard/secondary_index/import_test >> ydb-tests-datashard-secondary_index::import_test [GOOD] |63.1%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/tests/tools/kqprun/recipe/kqprun_recipe |63.1%| [TS] {RESULT} ydb/tests/datashard/split_merge/import_test |63.1%| [TS] {RESULT} ydb/tests/datashard/copy_table/import_test >> ydb-tests-functional-benchmarks_init::import_test [GOOD] |63.1%| [TS] {RESULT} ydb/tests/datashard/ttl/import_test |63.1%| [LD] {RESULT} $(B)/ydb/tests/tools/kqprun/recipe/kqprun_recipe |63.1%| [TS] {RESULT} ydb/tests/stress/oltp_workload/import_test |63.1%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/kqp/tests/kikimr_tpch/ydb-core-kqp-tests-kikimr_tpch |63.1%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/tests/functional/config/ydb-tests-functional-config >> test.py::test_kikimr_config_generator_generic_connector_config [GOOD] |63.1%| [TS] {RESULT} ydb/tests/functional/sqs/large/import_test |63.1%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/benchmarks_init/import_test >> ydb-tests-functional-benchmarks_init::import_test [GOOD] |63.1%| [TS] {RESULT} ydb/tests/functional/sqs/messaging/import_test |63.1%| [TS] {RESULT} ydb/tests/datashard/partitioning/import_test |63.1%| [LD] {RESULT} $(B)/ydb/core/kqp/tests/kikimr_tpch/ydb-core-kqp-tests-kikimr_tpch |63.1%| [LD] {RESULT} $(B)/ydb/tests/functional/config/ydb-tests-functional-config |63.2%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/tests/library/ut/ydb-tests-library-ut |63.2%| [TS] {RESULT} ydb/tests/datashard/secondary_index/import_test |63.2%| [LD] {RESULT} $(B)/ydb/tests/library/ut/ydb-tests-library-ut |63.2%| [TS] {RESULT} ydb/tests/functional/sqs/multinode/import_test >> test_generator.py::TestTpchGenerator::test_s1_state_and_parts >> test_init.py::TestTpcdsInit::test_s1_column >> test_generator.py::TestTpcdsGenerator::test_s1_parts >> ydb-tests-tools-nemesis-ut::import_test [GOOD] |63.2%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/tests/stress/mixedpy/tests/ydb-tests-stress-mixedpy-tests |63.2%| [LD] {RESULT} $(B)/ydb/tests/stress/mixedpy/tests/ydb-tests-stress-mixedpy-tests |63.2%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/tests/sql/ydb-tests-sql |63.2%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/public/tools/lib/cmds/ut/py3test >> test.py::test_kikimr_config_generator_generic_connector_config [GOOD] >> test_init.py::TestTpcdsInit::test_s1_column [GOOD] >> test_init.py::TestTpcdsInit::test_s1_column_decimal |63.2%| [LD] {RESULT} $(B)/ydb/tests/sql/ydb-tests-sql |63.2%| [TS] {RESULT} ydb/tests/functional/benchmarks_init/import_test |63.2%| [TS] {RESULT} ydb/tests/functional/rename/import_test >> test_init.py::TestTpcdsInit::test_s1_column_decimal [GOOD] >> ydb-tests-datashard-async_replication::import_test [GOOD] |63.2%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/tools/nemesis/ut/import_test >> ydb-tests-tools-nemesis-ut::import_test [GOOD] |63.2%| [TS] {RESULT} ydb/public/tools/lib/cmds/ut/py3test |63.2%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/tests/stress/kafka/tests/ydb-tests-stress-kafka-tests |63.2%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/stress/kafka/tests/ydb-tests-stress-kafka-tests |63.2%| [LD] {RESULT} $(B)/ydb/tests/stress/kafka/tests/ydb-tests-stress-kafka-tests |63.2%| [TS] {RESULT} ydb/tests/tools/nemesis/ut/import_test |63.2%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/tools/tstool/tstool |63.2%| [LD] {RESULT} $(B)/ydb/tools/tstool/tstool >> test_init.py::TestTpchInit::test_s1_column_decimal |63.2%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/tests/functional/tpc/large/ydb-tests-functional-tpc-large |63.2%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/datashard/async_replication/import_test >> ydb-tests-datashard-async_replication::import_test [GOOD] |63.2%| [LD] {RESULT} $(B)/ydb/tests/functional/tpc/large/ydb-tests-functional-tpc-large >> test_init.py::TestTpchInit::test_s1_column_decimal [GOOD] |63.2%| [CC] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/kqp/compute_actor/kqp_compute_state.h_serialized.cpp |63.2%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/benchmarks_init/py3test |63.2%| [TS] {RESULT} ydb/tests/datashard/async_replication/import_test |63.2%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/benchmarks_init/py3test >> test_init.py::TestTpcdsInit::test_s1_column_decimal [GOOD] |63.2%| [CC] {BAZEL_UPLOAD} $(B)/ydb/core/kqp/compute_actor/kqp_compute_state.h_serialized.cpp |63.2%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/tests/functional/scheme_tests/ydb-tests-functional-scheme_tests |63.2%| [LD] {RESULT} $(B)/ydb/tests/functional/scheme_tests/ydb-tests-functional-scheme_tests |63.2%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/tests/stress/node_broker/tests/ydb-tests-stress-node_broker-tests |63.2%| [LD] {RESULT} $(B)/ydb/tests/stress/node_broker/tests/ydb-tests-stress-node_broker-tests >> test_init.py::TestTpcdsInit::test_s1_row >> ydb-tests-datashard-dump_restore::import_test [GOOD] |63.2%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/benchmarks_init/py3test >> test_init.py::TestTpchInit::test_s1_column_decimal [GOOD] >> test_init.py::TestTpcdsInit::test_s1_row [GOOD] |63.3%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/datashard/dump_restore/import_test >> ydb-tests-datashard-dump_restore::import_test [GOOD] >> ydb-tests-fq-yds::import_test [GOOD] >> ydb-tests-functional-statistics::import_test [GOOD] |63.3%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/fq/yds/import_test >> ydb-tests-fq-yds::import_test [GOOD] |63.3%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/statistics/import_test >> ydb-tests-functional-statistics::import_test [GOOD] >> TErasureTypeTest::TestBlock42PartialRestore1 [GOOD] >> ydb-tests-datashard-select::import_test [GOOD] |63.3%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/benchmarks_init/py3test >> test_init.py::TestTpcdsInit::test_s1_row [GOOD] >> ydb-tests-functional-autoconfig::import_test [GOOD] |63.3%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/erasure/ut/unittest >> TErasureTypeTest::TestBlock42PartialRestore1 [GOOD] >> test_generator.py::TestTpcdsGenerator::test_s1_state_and_parts |63.3%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/datashard/select/import_test >> ydb-tests-datashard-select::import_test [GOOD] |63.3%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/tools/cfg/bin/ydb_configure >> TErasureTypeTest::TestBlock42PartialRestore3 [GOOD] |63.3%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/tests/stress/scheme_board/pile_promotion/pile_promotion_workload |63.3%| [LD] {BAZEL_UPLOAD} $(B)/ydb/tests/library/compatibility/configs/dump/dumper/ydb-config-meta-dumper |63.3%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/tests/tools/ydb_serializable/ydb_serializable |63.3%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/autoconfig/import_test >> ydb-tests-functional-autoconfig::import_test [GOOD] >> ydb-tests-olap-oom::import_test [GOOD] |63.3%| [BN] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/tests/stability/tool/cfg |63.3%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/erasure/ut/unittest >> TErasureTypeTest::TestBlock42PartialRestore3 [GOOD] |63.3%| [TS] {RESULT} ydb/tests/datashard/dump_restore/import_test >> ydb-tests-solomon-reading::import_test [GOOD] |63.3%| [TS] {RESULT} ydb/tests/fq/yds/import_test |63.3%| [LD] {RESULT} $(B)/ydb/tools/cfg/bin/ydb_configure |63.3%| [LD] {RESULT} $(B)/ydb/tests/stress/scheme_board/pile_promotion/pile_promotion_workload |63.3%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/olap/oom/import_test >> ydb-tests-olap-oom::import_test [GOOD] |63.3%| [TS] {RESULT} ydb/tests/functional/statistics/import_test |63.3%| [LD] {RESULT} $(B)/ydb/tests/tools/ydb_serializable/ydb_serializable |63.3%| [CC] {tool} $(B)/ydb/core/protos/config.pb.cc |63.3%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/tests/stress/kv/tests/ydb-tests-stress-kv-tests |63.3%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/solomon/reading/import_test >> ydb-tests-solomon-reading::import_test [GOOD] >> ydb-tests-functional-ttl::import_test [GOOD] |63.3%| [LD] {RESULT} $(B)/ydb/tests/stress/kv/tests/ydb-tests-stress-kv-tests |63.3%| [TS] {RESULT} ydb/tests/datashard/select/import_test |63.3%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/tests/stress/ctas/tests/ydb-tests-stress-ctas-tests >> tstool::import_test [GOOD] >> ydb-public-tools-lib-cmds-ut::import_test [GOOD] |63.3%| [LD] {RESULT} $(B)/ydb/tests/stress/ctas/tests/ydb-tests-stress-ctas-tests |63.3%| [TS] {RESULT} ydb/tests/functional/autoconfig/import_test |63.3%| [TS] {RESULT} ydb/tests/olap/oom/import_test |63.3%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tools/tstool/import_test >> tstool::import_test [GOOD] |63.3%| [TS] {RESULT} ydb/tests/solomon/reading/import_test |63.3%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/ttl/import_test >> ydb-tests-functional-ttl::import_test [GOOD] |63.3%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/tests/stress/oltp_workload/tests/ydb-tests-stress-oltp_workload-tests |63.3%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/public/tools/lib/cmds/ut/import_test >> ydb-public-tools-lib-cmds-ut::import_test [GOOD] |63.3%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/tests/stress/topic_kafka/tests/ydb-tests-stress-topic_kafka-tests |63.3%| [LD] {RESULT} $(B)/ydb/tests/stress/oltp_workload/tests/ydb-tests-stress-oltp_workload-tests |63.3%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/viewer/tests/ydb-core-viewer-tests >> test_init.py::TestTpcdsInit::test_s1_s3 |63.3%| [LD] {RESULT} $(B)/ydb/tests/stress/topic_kafka/tests/ydb-tests-stress-topic_kafka-tests |63.3%| [LD] {RESULT} $(B)/ydb/core/viewer/tests/ydb-core-viewer-tests >> test_init.py::TestTpcdsInit::test_s1_s3 [GOOD] |63.3%| [TS] {RESULT} ydb/tools/tstool/import_test |63.3%| [TS] {RESULT} ydb/tests/functional/ttl/import_test |63.3%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/tests/stress/olap_workload/tests/ydb-tests-stress-olap_workload-tests >> ydb-tests-functional-ydb_cli::import_test [GOOD] |63.3%| [LD] {RESULT} $(B)/ydb/tests/stress/olap_workload/tests/ydb-tests-stress-olap_workload-tests |63.3%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/tests/postgres_integrations/library/ut/ydb-tests-postgres_integrations-library-ut |63.4%| [LD] {RESULT} $(B)/ydb/tests/postgres_integrations/library/ut/ydb-tests-postgres_integrations-library-ut >> ydb-tests-datashard-dml::import_test [GOOD] |63.4%| [TS] {RESULT} ydb/public/tools/lib/cmds/ut/import_test >> ydb-tests-functional-sqs-cloud::import_test [GOOD] |63.4%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/ydb_cli/import_test >> ydb-tests-functional-ydb_cli::import_test [GOOD] |63.4%| [LD] {BAZEL_UPLOAD} $(B)/ydb/core/config/ut/ydb-core-config-ut |63.4%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/datashard/dml/import_test >> ydb-tests-datashard-dml::import_test [GOOD] >> TErasureTypeTest::TestBlock42PartialRestore0 [GOOD] >> TErasureTypeTest::TestBlock42PartialRestore2 [GOOD] |63.4%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/benchmarks_init/py3test >> test_init.py::TestTpcdsInit::test_s1_s3 [GOOD] |63.4%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/cloud/import_test >> ydb-tests-functional-sqs-cloud::import_test [GOOD] |63.4%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/tests/example/ydb-tests-example |63.4%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/erasure/ut/unittest >> TErasureTypeTest::TestBlock42PartialRestore2 [GOOD] |63.4%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/tests/stress/simple_queue/tests/ydb-tests-stress-simple_queue-tests >> ydb-tests-functional-tpc-medium-tpch::import_test [GOOD] |63.4%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/erasure/ut/unittest >> TErasureTypeTest::TestBlock42PartialRestore0 [GOOD] |63.4%| [TS] {RESULT} ydb/tests/functional/ydb_cli/import_test |63.4%| [LD] {BAZEL_UPLOAD} $(B)/ydb/core/audit/audit_config/ut/ydb-core-audit-audit_config-ut |63.4%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/blobstorage/pdisk/blobstorage_pdisk_ut_actions.cpp >> kqprun_recipe::import_test [GOOD] |63.4%| [TS] {RESULT} ydb/tests/datashard/dml/import_test |63.4%| [LD] {RESULT} $(B)/ydb/tests/stress/simple_queue/tests/ydb-tests-stress-simple_queue-tests |63.4%| [TS] {RESULT} ydb/tests/functional/sqs/cloud/import_test |63.4%| [LD] {RESULT} $(B)/ydb/tests/example/ydb-tests-example |63.4%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/tools/kqprun/recipe/import_test >> kqprun_recipe::import_test [GOOD] >> ydb-tests-stress-mixedpy-tests::import_test [GOOD] >> ydb-tests-functional-config::import_test [GOOD] |63.4%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/tpc/medium/tpch/import_test >> ydb-tests-functional-tpc-medium-tpch::import_test [GOOD] |63.4%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/stress/mixedpy/tests/import_test >> ydb-tests-stress-mixedpy-tests::import_test [GOOD] >> integrations_test.py::test_read_jtest_results[o/OK] [GOOD] >> integrations_test.py::test_read_jtest_results[f/failed1] [GOOD] >> integrations_test.py::test_read_jtest_results[f/failed2] [GOOD] >> integrations_test.py::test_read_jtest_results[f/error1] [GOOD] >> integrations_test.py::test_read_jtest_results[s/skipped1] [GOOD] >> integrations_test.py::test_read_jtest_results[s/skipped2] [GOOD] >> integrations_test.py::test_read_jtest_with_one_result [GOOD] |63.4%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/config/import_test >> ydb-tests-functional-config::import_test [GOOD] >> ydb-tests-stress-kafka-tests::import_test [GOOD] >> ydb-tests-library-ut::import_test [GOOD] ------- [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/postgres_integrations/library/ut/py3test >> integrations_test.py::test_read_jtest_with_one_result [GOOD] Test command err: /home/runner/.ya/build/build_root/endf/001534/ydb/tests/postgres_integrations/library/ut/test-results/py3test/ydb/tests/postgres_integrations/library/pytest_integration.py:26: PytestCollectionWarning: cannot collect test class 'TestCase' because it has a __init__ constructor (from: integrations_test.py) /home/runner/.ya/build/build_root/endf/001534/ydb/tests/postgres_integrations/library/ut/test-results/py3test/ydb/tests/postgres_integrations/library/pytest_integration.py:20: PytestCollectionWarning: cannot collect test class 'TestState' because it has a __init__ constructor (from: integrations_test.py) |63.4%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/stress/kafka/tests/import_test >> ydb-tests-stress-kafka-tests::import_test [GOOD] >> ydb-tests-fq-s3::import_test [GOOD] |63.4%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/library/ut/import_test >> ydb-tests-library-ut::import_test [GOOD] |63.4%| [TS] {RESULT} ydb/tests/tools/kqprun/recipe/import_test |63.4%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/fq/s3/import_test >> ydb-tests-fq-s3::import_test [GOOD] |63.4%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/tests/stress/statistics_workload/statistics_workload >> ydb-tests-stress-node_broker-tests::import_test [GOOD] >> pile_promotion_workload::import_test [GOOD] >> ydb-tests-functional-scheme_tests::import_test [GOOD] |63.4%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/tests/compatibility/olap/ydb-tests-compatibility-olap |63.4%| [TS] {RESULT} ydb/tests/functional/tpc/medium/tpch/import_test |63.4%| [BN] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/tests/stability/tool/statistics_workload >> ydb_serializable::import_test [GOOD] |63.4%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/stress/node_broker/tests/import_test >> ydb-tests-stress-node_broker-tests::import_test [GOOD] |63.4%| [TS] {RESULT} ydb/tests/stress/mixedpy/tests/import_test |63.4%| [TS] {RESULT} ydb/tests/stress/kafka/tests/import_test |63.4%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/stress/scheme_board/pile_promotion/import_test >> pile_promotion_workload::import_test [GOOD] |63.4%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/tests/fq/streaming/ydb-tests-fq-streaming |63.4%| [TS] {RESULT} ydb/tests/library/ut/import_test |63.4%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/scheme_tests/import_test >> ydb-tests-functional-scheme_tests::import_test [GOOD] |63.4%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/config/init/init_ut.cpp |63.4%| [LD] {RESULT} $(B)/ydb/tests/stress/statistics_workload/statistics_workload |63.4%| [LD] {RESULT} $(B)/ydb/tests/compatibility/olap/ydb-tests-compatibility-olap |63.4%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/tests/stress/reconfig_state_storage_workload/tests/stress-reconfig_state_storage_workload-tests |63.4%| [LD] {RESULT} $(B)/ydb/tests/fq/streaming/ydb-tests-fq-streaming |63.4%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/tools/ydb_serializable/import_test >> ydb_serializable::import_test [GOOD] |63.4%| [TS] {RESULT} ydb/tests/fq/s3/import_test |63.4%| [LD] {RESULT} $(B)/ydb/tests/stress/reconfig_state_storage_workload/tests/stress-reconfig_state_storage_workload-tests |63.4%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/tests/fq/plans/ydb-tests-fq-plans |63.4%| [TS] {RESULT} ydb/tests/stress/node_broker/tests/import_test |63.4%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/tests/stress/scheme_board/pile_promotion/tests/tests-stress-scheme_board-pile_promotion-tests |63.4%| [LD] {RESULT} $(B)/ydb/tests/stress/scheme_board/pile_promotion/tests/tests-stress-scheme_board-pile_promotion-tests |63.4%| [LD] {RESULT} $(B)/ydb/tests/fq/plans/ydb-tests-fq-plans |63.5%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/tests/fq/restarts/ydb-tests-fq-restarts |63.5%| [TS] {RESULT} ydb/tests/stress/scheme_board/pile_promotion/import_test |63.5%| [LD] {RESULT} $(B)/ydb/tests/fq/restarts/ydb-tests-fq-restarts |63.5%| [TS] {RESULT} ydb/tests/functional/scheme_tests/import_test |63.5%| [TS] {RESULT} ydb/tests/functional/config/import_test |63.5%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/tests/olap/s3_import/ydb-tests-olap-s3_import |63.5%| [TS] {RESULT} ydb/tests/postgres_integrations/library/ut/py3test |63.5%| [LD] {RESULT} $(B)/ydb/tests/olap/s3_import/ydb-tests-olap-s3_import |63.5%| [TS] {RESULT} ydb/tests/tools/ydb_serializable/import_test >> ydb-tests-functional-tpc-large::import_test [GOOD] >> ydb-tests-stress-ctas-tests::import_test [GOOD] |63.5%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/tests/tools/pq_read/test/ydb-tests-tools-pq_read-test |63.5%| [LD] {RESULT} $(B)/ydb/tests/tools/pq_read/test/ydb-tests-tools-pq_read-test >> ydb-tests-stress-kv-tests::import_test [GOOD] |63.5%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/tests/functional/sqs/merge_split_common_table/std/functional-sqs-merge_split_common_table-std |63.5%| [LD] {RESULT} $(B)/ydb/tests/functional/sqs/merge_split_common_table/std/functional-sqs-merge_split_common_table-std |63.5%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/stress/ctas/tests/import_test >> ydb-tests-stress-ctas-tests::import_test [GOOD] >> ydb-tests-stress-topic_kafka-tests::import_test [GOOD] >> ydb-tests-stress-olap_workload-tests::import_test [GOOD] >> ydb-core-viewer-tests::import_test [GOOD] |63.5%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/stress/topic_kafka/tests/import_test >> ydb-tests-stress-topic_kafka-tests::import_test [GOOD] |63.5%| [LD] {BAZEL_UPLOAD} $(B)/ydb/core/driver_lib/version/ut/ydb-core-driver_lib-version-ut |63.5%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/stress/olap_workload/tests/import_test >> ydb-tests-stress-olap_workload-tests::import_test [GOOD] |63.5%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/stress/kv/tests/import_test >> ydb-tests-stress-kv-tests::import_test [GOOD] |63.5%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/core/viewer/tests/import_test >> ydb-core-viewer-tests::import_test [GOOD] >> ydb_configure::import_test [GOOD] >> ydb-tests-postgres_integrations-library-ut::import_test [GOOD] |63.5%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tools/cfg/bin/import_test >> ydb_configure::import_test [GOOD] |63.5%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/tpc/large/import_test >> ydb-tests-functional-tpc-large::import_test [GOOD] >> ydb-tests-olap-scenario::import_test [GOOD] |63.5%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/postgres_integrations/library/ut/import_test >> ydb-tests-postgres_integrations-library-ut::import_test [GOOD] >> ydb-tests-example::import_test [GOOD] >> ydb-tests-stress-simple_queue-tests::import_test [GOOD] |63.5%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/olap/scenario/import_test >> ydb-tests-olap-scenario::import_test [GOOD] |63.5%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/example/import_test >> ydb-tests-example::import_test [GOOD] |63.5%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/stress/simple_queue/tests/import_test >> ydb-tests-stress-simple_queue-tests::import_test [GOOD] |63.5%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/tests/stress/s3_backups/s3_backups >> statistics_workload::import_test [GOOD] |63.5%| [BN] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/tests/stability/tool/s3_backups_workload |63.5%| [TS] {RESULT} ydb/tests/stress/ctas/tests/import_test |63.5%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/tests/stress/s3_backups/tests/ydb-tests-stress-s3_backups-tests |63.5%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/tests/fq/common/ydb-tests-fq-common |63.5%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/tests/functional/minidumps/ydb-tests-functional-minidumps |63.5%| [LD] {RESULT} $(B)/ydb/tests/stress/s3_backups/s3_backups |63.5%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/tests/tools/nemesis/driver/nemesis |63.5%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/stress/statistics_workload/import_test >> statistics_workload::import_test [GOOD] |63.5%| [LD] {RESULT} $(B)/ydb/tests/stress/s3_backups/tests/ydb-tests-stress-s3_backups-tests |63.5%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/tests/datashard/add_column/ydb-tests-datashard-add_column |63.5%| [LD] {RESULT} $(B)/ydb/tests/functional/minidumps/ydb-tests-functional-minidumps |63.5%| [LD] {RESULT} $(B)/ydb/tests/fq/common/ydb-tests-fq-common |63.5%| [LD] {RESULT} $(B)/ydb/tests/tools/nemesis/driver/nemesis |63.5%| [LD] {RESULT} $(B)/ydb/tests/datashard/add_column/ydb-tests-datashard-add_column |63.5%| [TS] {RESULT} ydb/tests/stress/simple_queue/tests/import_test |63.5%| [TS] {RESULT} ydb/tests/stress/topic_kafka/tests/import_test |63.5%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/tests/fq/mem_alloc/ydb-tests-fq-mem_alloc |63.5%| [BN] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/tests/stability/tool/nemesis |63.5%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/tests/fq/http_api/ydb-tests-fq-http_api |63.5%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/tests/functional/encryption/ydb-tests-functional-encryption |63.5%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/tests/stress/transfer/tests/ydb-tests-stress-transfer-tests |63.6%| [LD] {RESULT} $(B)/ydb/tests/fq/mem_alloc/ydb-tests-fq-mem_alloc |63.6%| [LD] {RESULT} $(B)/ydb/tests/fq/http_api/ydb-tests-fq-http_api |63.6%| [TS] {RESULT} ydb/tests/stress/kv/tests/import_test |63.6%| [TS] {RESULT} ydb/tests/stress/olap_workload/tests/import_test |63.6%| [TS] {RESULT} ydb/tests/stress/statistics_workload/import_test |63.6%| [LD] {RESULT} $(B)/ydb/tests/functional/encryption/ydb-tests-functional-encryption |63.6%| [LD] {RESULT} $(B)/ydb/tests/stress/transfer/tests/ydb-tests-stress-transfer-tests >> ydb-tests-compatibility-olap::import_test [GOOD] |63.6%| [TS] {RESULT} ydb/tests/functional/tpc/large/import_test |63.6%| [TS] {RESULT} ydb/tests/olap/scenario/import_test >> ydb-tests-fq-streaming::import_test [GOOD] |63.6%| [TS] {RESULT} ydb/tests/example/import_test |63.6%| [TS] {RESULT} ydb/tests/postgres_integrations/library/ut/import_test >> tests-stress-scheme_board-pile_promotion-tests::import_test [GOOD] >> ydb-tests-sql::import_test [GOOD] >> stress-reconfig_state_storage_workload-tests::import_test [GOOD] |63.6%| [TS] {RESULT} ydb/core/viewer/tests/import_test |63.6%| [TS] {RESULT} ydb/tools/cfg/bin/import_test |63.6%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/compatibility/olap/import_test >> ydb-tests-compatibility-olap::import_test [GOOD] |63.6%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/fq/streaming/import_test >> ydb-tests-fq-streaming::import_test [GOOD] |63.6%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/stress/scheme_board/pile_promotion/tests/import_test >> tests-stress-scheme_board-pile_promotion-tests::import_test [GOOD] |63.6%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/sql/import_test >> ydb-tests-sql::import_test [GOOD] |63.6%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/tools/ydbd_slice/bin/ydbd_slice |63.6%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/stress/reconfig_state_storage_workload/tests/import_test >> stress-reconfig_state_storage_workload-tests::import_test [GOOD] |63.6%| [LD] {BAZEL_UPLOAD} $(B)/ydb/apps/dstool/ydb-dstool >> ydb-tests-fq-restarts::import_test [GOOD] >> ydb-tests-fq-plans::import_test [GOOD] |63.6%| [TS] {RESULT} ydb/tests/compatibility/olap/import_test >> functional-sqs-merge_split_common_table-std::import_test [GOOD] |63.6%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/fq/restarts/import_test >> ydb-tests-fq-restarts::import_test [GOOD] >> ydb-tests-olap-s3_import::import_test [GOOD] |63.6%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/fq/plans/import_test >> ydb-tests-fq-plans::import_test [GOOD] |63.6%| [TS] {RESULT} ydb/tests/fq/streaming/import_test |63.6%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/merge_split_common_table/std/import_test >> functional-sqs-merge_split_common_table-std::import_test [GOOD] |63.6%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/olap/s3_import/import_test >> ydb-tests-olap-s3_import::import_test [GOOD] >> ydb-tests-tools-pq_read-test::import_test [GOOD] |63.7%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/tools/pq_read/test/import_test >> ydb-tests-tools-pq_read-test::import_test [GOOD] |63.7%| [LD] {BAZEL_UPLOAD} $(B)/ydb/tests/functional/restarts/ydb-tests-functional-restarts |63.7%| [TS] {RESULT} ydb/tests/stress/scheme_board/pile_promotion/tests/import_test |63.7%| [LD] {RESULT} $(B)/ydb/tools/ydbd_slice/bin/ydbd_slice |63.7%| [TS] {RESULT} ydb/tests/stress/reconfig_state_storage_workload/tests/import_test |63.7%| [LD] {BAZEL_UPLOAD} $(B)/ydb/tests/functional/api/ydb-tests-functional-api |63.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/config/init/init_ut.cpp |63.7%| [TS] {RESULT} ydb/tests/fq/restarts/import_test >> ydb-tests-stress-oltp_workload-tests::import_test [GOOD] |63.7%| [TS] {RESULT} ydb/tests/fq/plans/import_test >> ErasureBrandNew::Block42_restore [GOOD] >> ErasureBrandNew::Block42_restore_benchmark |63.7%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/stress/oltp_workload/tests/import_test >> ydb-tests-stress-oltp_workload-tests::import_test [GOOD] |63.7%| [LD] {BAZEL_UPLOAD} $(B)/ydb/core/control/lib/generated/codegen/ydb-core-control-generated-codegen |63.7%| [TS] {RESULT} ydb/tests/functional/sqs/merge_split_common_table/std/import_test >> s3_backups::import_test [GOOD] |63.7%| [TS] {RESULT} ydb/tests/olap/s3_import/import_test |63.7%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/stress/s3_backups/import_test >> s3_backups::import_test [GOOD] |63.7%| [TS] {RESULT} ydb/tests/sql/import_test |63.7%| [TS] {RESULT} ydb/tests/stress/oltp_workload/tests/import_test >> ydb-tests-stress-s3_backups-tests::import_test [GOOD] |63.7%| [TS] {RESULT} ydb/tests/tools/pq_read/test/import_test |63.7%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/public/tools/local_ydb/local_ydb |63.7%| [LD] {RESULT} $(B)/ydb/public/tools/local_ydb/local_ydb |63.7%| [TS] {RESULT} ydb/tests/stress/s3_backups/import_test |63.7%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/tests/functional/serializable/ydb-tests-functional-serializable |63.7%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/stress/s3_backups/tests/import_test >> ydb-tests-stress-s3_backups-tests::import_test [GOOD] |63.7%| [LD] {RESULT} $(B)/ydb/tests/functional/serializable/ydb-tests-functional-serializable |63.7%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/tests/functional/large_serializable/ydb-tests-functional-large_serializable |63.7%| [LD] {RESULT} $(B)/ydb/tests/functional/large_serializable/ydb-tests-functional-large_serializable >> ydb-tests-fq-common::import_test [GOOD] |63.7%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/tests/stress/topic/tests/ydb-tests-stress-topic-tests >> ydb-tests-functional-minidumps::import_test [GOOD] |63.7%| [LD] {RESULT} $(B)/ydb/tests/stress/topic/tests/ydb-tests-stress-topic-tests |63.7%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/fq/common/import_test >> ydb-tests-fq-common::import_test [GOOD] >> ydb-tests-fq-mem_alloc::import_test [GOOD] |63.7%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/tests/stress/show_create/view/tests/ydb-tests-stress-show_create-view-tests |63.7%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/tests/datashard/vector_index/large/ydb-tests-datashard-vector_index-large |63.7%| [TS] {RESULT} ydb/tests/stress/s3_backups/tests/import_test |63.7%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/tests/stress/log/tests/ydb-tests-stress-log-tests |63.7%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/minidumps/import_test >> ydb-tests-functional-minidumps::import_test [GOOD] >> ydb-tests-functional-encryption::import_test [GOOD] |63.7%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/tests/stress/olap_workload/olap_workload |63.7%| [LD] {RESULT} $(B)/ydb/tests/stress/show_create/view/tests/ydb-tests-stress-show_create-view-tests |63.7%| [LD] {RESULT} $(B)/ydb/tests/datashard/vector_index/large/ydb-tests-datashard-vector_index-large |63.7%| [LD] {RESULT} $(B)/ydb/tests/stress/log/tests/ydb-tests-stress-log-tests |63.7%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/fq/mem_alloc/import_test >> ydb-tests-fq-mem_alloc::import_test [GOOD] |63.8%| [LD] {RESULT} $(B)/ydb/tests/stress/olap_workload/olap_workload |63.8%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/encryption/import_test >> ydb-tests-functional-encryption::import_test [GOOD] >> ydb-tests-fq-http_api::import_test [GOOD] >> ydb-tests-stress-transfer-tests::import_test [GOOD] |63.8%| [BN] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/tests/stability/tool/olap_workload |63.8%| [TS] {RESULT} ydb/tests/fq/common/import_test |63.8%| [TS] {RESULT} ydb/tests/functional/minidumps/import_test |63.8%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/fq/http_api/import_test >> ydb-tests-fq-http_api::import_test [GOOD] |63.8%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/stress/transfer/tests/import_test >> ydb-tests-stress-transfer-tests::import_test [GOOD] |63.8%| [TS] {RESULT} ydb/tests/fq/mem_alloc/import_test |63.8%| [TS] {RESULT} ydb/tests/functional/encryption/import_test |63.8%| [TS] {RESULT} ydb/tests/fq/http_api/import_test |63.8%| [TS] {RESULT} ydb/tests/stress/transfer/tests/import_test >> nemesis::import_test [GOOD] >> ErasureBrandNew::Block42_restore_benchmark [GOOD] |63.8%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/tools/nemesis/driver/import_test >> nemesis::import_test [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/erasure/ut/unittest >> ErasureBrandNew::Block42_restore_benchmark [GOOD] Test command err: totalSize# 495179526 period1# 1.163306s period2# 0.629323s MB/s1# 425.6657543 MB/s2# 786.8447935 factor# 1.848503868 |63.8%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/tests/stability/ydb/ydb-tests-stability-ydb |63.8%| [LD] {RESULT} $(B)/ydb/tests/stability/ydb/ydb-tests-stability-ydb |63.8%| [TS] {RESULT} ydb/tests/tools/nemesis/driver/import_test >> ydbd_slice::import_test [GOOD] |63.8%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/tests/functional/wardens/ydb-tests-functional-wardens |63.8%| [LD] {RESULT} $(B)/ydb/tests/functional/wardens/ydb-tests-functional-wardens |63.8%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tools/ydbd_slice/bin/import_test >> ydbd_slice::import_test [GOOD] |63.8%| [TS] {RESULT} ydb/tools/ydbd_slice/bin/import_test |63.8%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/tests/stress/cdc/tests/ydb-tests-stress-cdc-tests |63.8%| [LD] {RESULT} $(B)/ydb/tests/stress/cdc/tests/ydb-tests-stress-cdc-tests |63.8%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/tests/functional/sqs/merge_split_common_table/fifo/functional-sqs-merge_split_common_table-fifo |63.8%| [LD] {RESULT} $(B)/ydb/tests/functional/sqs/merge_split_common_table/fifo/functional-sqs-merge_split_common_table-fifo |63.8%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/tests/tools/ydb_serializable/replay/replay |63.8%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/tests/functional/bridge/ydb-tests-functional-bridge |63.9%| [LD] {RESULT} $(B)/ydb/tests/functional/bridge/ydb-tests-functional-bridge |63.9%| [LD] {RESULT} $(B)/ydb/tests/tools/ydb_serializable/replay/replay >> ydb-tests-stress-topic-tests::import_test [GOOD] >> local_ydb::import_test [GOOD] >> ydb-tests-functional-large_serializable::import_test [GOOD] |63.9%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/stress/topic/tests/import_test >> ydb-tests-stress-topic-tests::import_test [GOOD] |63.9%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/library/yaml_config/ut_transform/ydb-library-yaml_config-ut_transform |63.9%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/large_serializable/import_test >> ydb-tests-functional-large_serializable::import_test [GOOD] |63.9%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/tests/compatibility/ydb-tests-compatibility |63.9%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/public/tools/local_ydb/import_test >> local_ydb::import_test [GOOD] |63.9%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/tests/sql/large/ydb-tests-sql-large |63.9%| [LD] {RESULT} $(B)/ydb/library/yaml_config/ut_transform/ydb-library-yaml_config-ut_transform |63.9%| [LD] {RESULT} $(B)/ydb/tests/compatibility/ydb-tests-compatibility |63.9%| [LD] {BAZEL_UPLOAD} $(B)/ydb/tests/functional/script_execution/ydb-tests-functional-script_execution |63.9%| [LD] {RESULT} $(B)/ydb/tests/sql/large/ydb-tests-sql-large |63.9%| [TS] {RESULT} ydb/tests/stress/topic/tests/import_test >> ydb-tests-stress-show_create-view-tests::import_test [GOOD] >> ydb-tests-functional-serializable::import_test [GOOD] |63.9%| [TS] {RESULT} ydb/public/tools/local_ydb/import_test |63.9%| [TS] {RESULT} ydb/tests/functional/large_serializable/import_test >> ydb-tests-stress-log-tests::import_test [GOOD] |63.9%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/stress/show_create/view/tests/import_test >> ydb-tests-stress-show_create-view-tests::import_test [GOOD] |63.9%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/serializable/import_test >> ydb-tests-functional-serializable::import_test [GOOD] |63.9%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/stress/log/tests/import_test >> ydb-tests-stress-log-tests::import_test [GOOD] >> ydb-tests-datashard-add_column::import_test [GOOD] |63.9%| [LD] {BAZEL_UPLOAD} $(B)/ydb/tests/functional/audit/ydb-tests-functional-audit |63.9%| [LD] {BAZEL_UPLOAD} $(B)/ydb/tests/functional/blobstorage/ydb-tests-functional-blobstorage |63.9%| [LD] {BAZEL_UPLOAD} $(B)/ydb/tests/functional/tenants/ydb-tests-functional-tenants |63.9%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/datashard/add_column/import_test >> ydb-tests-datashard-add_column::import_test [GOOD] |63.9%| [LD] {BAZEL_UPLOAD} $(B)/ydb/tests/functional/scheme_shard/ydb-tests-functional-scheme_shard |63.9%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/tests/compatibility/s3_backups/ydb-tests-compatibility-s3_backups |64.0%| [TS] {RESULT} ydb/tests/stress/show_create/view/tests/import_test |64.0%| [TS] {RESULT} ydb/tests/functional/serializable/import_test |64.0%| [TS] {RESULT} ydb/tests/stress/log/tests/import_test |64.0%| [LD] {RESULT} $(B)/ydb/tests/compatibility/s3_backups/ydb-tests-compatibility-s3_backups |64.0%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/tests/functional/query_cache/ydb-tests-functional-query_cache |64.0%| [TS] {RESULT} ydb/tests/datashard/add_column/import_test |64.0%| [LD] {RESULT} $(B)/ydb/tests/functional/query_cache/ydb-tests-functional-query_cache >> ydb-tests-stability-ydb::import_test [GOOD] |64.0%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/tests/functional/postgresql/ydb-tests-functional-postgresql |64.0%| [LD] {RESULT} $(B)/ydb/tests/functional/postgresql/ydb-tests-functional-postgresql |64.0%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/tests/fq/generic/analytics/ydb-tests-fq-generic-analytics |64.0%| [LD] {BAZEL_UPLOAD} $(B)/ydb/tests/functional/canonical/ydb-tests-functional-canonical |64.0%| [LD] {RESULT} $(B)/ydb/tests/fq/generic/analytics/ydb-tests-fq-generic-analytics |64.0%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/stability/ydb/import_test >> ydb-tests-stability-ydb::import_test [GOOD] >> replay::import_test [GOOD] |64.0%| [TS] {RESULT} ydb/tests/stability/ydb/import_test >> ydb-tests-functional-wardens::import_test [GOOD] |64.0%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/tools/ydb_serializable/replay/import_test >> replay::import_test [GOOD] >> ydb-tests-functional-bridge::import_test [GOOD] |64.0%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/wardens/import_test >> ydb-tests-functional-wardens::import_test [GOOD] |64.0%| [TS] {RESULT} ydb/tests/tools/ydb_serializable/replay/import_test >> ydb-tests-stress-cdc-tests::import_test [GOOD] |64.0%| [TS] {RESULT} ydb/tests/functional/wardens/import_test |64.0%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/bridge/import_test >> ydb-tests-functional-bridge::import_test [GOOD] |64.0%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/stress/cdc/tests/import_test >> ydb-tests-stress-cdc-tests::import_test [GOOD] |64.0%| [AR] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/mvp/oidc_proxy/libydb-mvp-oidc_proxy.a |64.0%| [AR] {RESULT} $(B)/ydb/mvp/oidc_proxy/libydb-mvp-oidc_proxy.a |64.0%| [TS] {RESULT} ydb/tests/functional/bridge/import_test |64.0%| [AR] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/mvp/meta/libydb-mvp-meta.a >> functional-sqs-merge_split_common_table-fifo::import_test [GOOD] |64.0%| [AR] {RESULT} $(B)/ydb/mvp/meta/libydb-mvp-meta.a |64.0%| [TS] {RESULT} ydb/tests/stress/cdc/tests/import_test >> ydb-library-yaml_config-ut_transform::import_test [GOOD] |64.0%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/merge_split_common_table/fifo/import_test >> functional-sqs-merge_split_common_table-fifo::import_test [GOOD] |64.0%| [TS] {RESULT} ydb/tests/functional/sqs/merge_split_common_table/fifo/import_test |64.0%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/library/yaml_config/ut_transform/import_test >> ydb-library-yaml_config-ut_transform::import_test [GOOD] |64.0%| [TS] {RESULT} ydb/library/yaml_config/ut_transform/import_test >> olap_workload::import_test [GOOD] >> ydb-tests-datashard-vector_index-large::import_test [GOOD] |64.0%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/stress/olap_workload/import_test >> olap_workload::import_test [GOOD] |64.0%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/datashard/vector_index/large/import_test >> ydb-tests-datashard-vector_index-large::import_test [GOOD] >> ydb-tests-compatibility-s3_backups::import_test [GOOD] |64.0%| [TS] {RESULT} ydb/tests/stress/olap_workload/import_test |64.0%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/tests/fq/generic/streaming/ydb-tests-fq-generic-streaming |64.1%| [LD] {RESULT} $(B)/ydb/tests/fq/generic/streaming/ydb-tests-fq-generic-streaming |64.1%| [TS] {RESULT} ydb/tests/datashard/vector_index/large/import_test |64.1%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/compatibility/s3_backups/import_test >> ydb-tests-compatibility-s3_backups::import_test [GOOD] |64.1%| [TS] {RESULT} ydb/tests/compatibility/s3_backups/import_test |64.1%| [LD] {BAZEL_UPLOAD} $(B)/ydb/public/tools/ydb_recipe/ydb_recipe >> ydb-tests-functional-query_cache::import_test [GOOD] >> ydb-tests-functional-postgresql::import_test [GOOD] |64.1%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/mvp/oidc_proxy/bin/mvp_oidc_proxy |64.1%| [LD] {RESULT} $(B)/ydb/mvp/oidc_proxy/bin/mvp_oidc_proxy |64.1%| [LD] {BAZEL_UPLOAD} $(B)/ydb/tests/postgres_integrations/go-libpq/ydb-tests-postgres_integrations-go-libpq |64.1%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/query_cache/import_test >> ydb-tests-functional-query_cache::import_test [GOOD] |64.1%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/postgresql/import_test >> ydb-tests-functional-postgresql::import_test [GOOD] |64.1%| [TS] {RESULT} ydb/tests/functional/query_cache/import_test |64.1%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/kqp/tests/tpch/tpch |64.1%| [TS] {RESULT} ydb/tests/functional/postgresql/import_test |64.1%| [LD] {RESULT} $(B)/ydb/core/kqp/tests/tpch/tpch |64.1%| [LD] {BAZEL_UPLOAD} $(B)/ydb/tests/olap/data_quotas/ydb-tests-olap-data_quotas |64.1%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/mvp/meta/bin/mvp_meta |64.1%| [LD] {RESULT} $(B)/ydb/mvp/meta/bin/mvp_meta |64.1%| [LD] {BAZEL_UPLOAD} $(B)/ydb/tests/datashard/parametrized_queries/ydb-tests-datashard-parametrized_queries |64.1%| [LD] {BAZEL_UPLOAD} $(B)/ydb/tests/functional/security/ydb-tests-functional-security |64.1%| [LD] {BAZEL_UPLOAD} $(B)/ydb/tests/functional/sqs/common/ydb-tests-functional-sqs-common |64.1%| [LD] {BAZEL_UPLOAD} $(B)/ydb/tests/datashard/ttl/ydb-tests-datashard-ttl |64.1%| [LD] {BAZEL_UPLOAD} $(B)/ydb/tests/functional/cms/ydb-tests-functional-cms >> ydb-tests-compatibility::import_test [GOOD] |64.1%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/compatibility/import_test >> ydb-tests-compatibility::import_test [GOOD] |64.1%| [TS] {RESULT} ydb/tests/compatibility/import_test |64.1%| [LD] {BAZEL_UPLOAD} $(B)/ydb/tests/functional/limits/ydb-tests-functional-limits |64.1%| [LD] {BAZEL_UPLOAD} $(B)/ydb/tests/datashard/secondary_index/ydb-tests-datashard-secondary_index >> ydb-tests-sql-large::import_test [GOOD] |64.1%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/sql/large/import_test >> ydb-tests-sql-large::import_test [GOOD] |64.1%| [TS] {RESULT} ydb/tests/sql/large/import_test |64.1%| [LD] {BAZEL_UPLOAD} $(B)/ydb/tests/datashard/s3/ydb-tests-datashard-s3 |64.1%| [LD] {BAZEL_UPLOAD} $(B)/ydb/core/client/metadata/ut/ydb-core-client-metadata-ut |64.1%| [AR] {BAZEL_UPLOAD} $(B)/ydb/mvp/meta/libydb-mvp-meta.a |64.1%| [LD] {BAZEL_UPLOAD} $(B)/ydb/tests/functional/serverless/ydb-tests-functional-serverless |64.1%| [LD] {BAZEL_UPLOAD} $(B)/ydb/tests/functional/suite_tests/ydb-tests-functional-suite_tests |64.1%| [LD] {BAZEL_UPLOAD} $(B)/ydb/tests/functional/hive/ydb-tests-functional-hive |64.1%| [LD] {BAZEL_UPLOAD} $(B)/ydb/tests/olap/ydb-tests-olap |64.1%| [LD] {BAZEL_UPLOAD} $(B)/ydb/tests/datashard/vector_index/medium/ydb-tests-datashard-vector_index-medium |64.1%| [LD] {BAZEL_UPLOAD} $(B)/ydb/tests/datashard/split_merge/ydb-tests-datashard-split_merge |64.1%| [AR] {BAZEL_UPLOAD} $(B)/ydb/mvp/oidc_proxy/libydb-mvp-oidc_proxy.a |64.1%| [LD] {BAZEL_UPLOAD} $(B)/ydb/tests/stress/oltp_workload/oltp_workload |64.1%| [BN] {BAZEL_UPLOAD} $(B)/ydb/tests/stability/tool/oltp_workload |64.2%| [LD] {BAZEL_UPLOAD} $(B)/ydb/tests/fq/multi_plane/ydb-tests-fq-multi_plane |64.2%| [LD] {BAZEL_UPLOAD} $(B)/ydb/tests/datashard/partitioning/ydb-tests-datashard-partitioning |64.2%| [LD] {BAZEL_UPLOAD} $(B)/ydb/tests/datashard/copy_table/ydb-tests-datashard-copy_table |64.2%| [LD] {BAZEL_UPLOAD} $(B)/ydb/tests/olap/delete/ydb-tests-olap-delete |64.2%| [LD] {BAZEL_UPLOAD} $(B)/ydb/tests/functional/rename/ydb-tests-functional-rename |64.2%| [LD] {BAZEL_UPLOAD} $(B)/ydb/tools/tstool/tstool |64.2%| [LD] {BAZEL_UPLOAD} $(B)/ydb/tests/datashard/async_replication/ydb-tests-datashard-async_replication |64.2%| [LD] {BAZEL_UPLOAD} $(B)/ydb/tests/datashard/dump_restore/ydb-tests-datashard-dump_restore |64.2%| [LD] {BAZEL_UPLOAD} $(B)/ydb/tests/functional/statistics/ydb-tests-functional-statistics |64.2%| [LD] {BAZEL_UPLOAD} $(B)/ydb/tests/tools/nemesis/ut/ydb-tests-tools-nemesis-ut |64.2%| [LD] {BAZEL_UPLOAD} $(B)/ydb/tests/functional/sqs/with_quotas/ydb-tests-functional-sqs-with_quotas |64.2%| [LD] {BAZEL_UPLOAD} $(B)/ydb/tests/functional/tpc/medium/ydb-tests-functional-tpc-medium |64.2%| [LD] {BAZEL_UPLOAD} $(B)/ydb/tests/datashard/select/ydb-tests-datashard-select |64.2%| [LD] {BAZEL_UPLOAD} $(B)/ydb/core/kqp/tests/kikimr_tpch/ydb-core-kqp-tests-kikimr_tpch |64.2%| [LD] {BAZEL_UPLOAD} $(B)/ydb/tests/fq/yds/ydb-tests-fq-yds |64.2%| [LD] {BAZEL_UPLOAD} $(B)/ydb/tests/functional/sqs/large/ydb-tests-functional-sqs-large |64.2%| [LD] {BAZEL_UPLOAD} $(B)/ydb/tests/functional/sqs/multinode/ydb-tests-functional-sqs-multinode |64.2%| [LD] {BAZEL_UPLOAD} $(B)/ydb/tests/functional/ttl/ydb-tests-functional-ttl |64.2%| [LD] {BAZEL_UPLOAD} $(B)/ydb/tests/functional/autoconfig/ydb-tests-functional-autoconfig |64.2%| [LD] {BAZEL_UPLOAD} $(B)/ydb/tests/functional/sqs/messaging/ydb-tests-functional-sqs-messaging |64.2%| [LD] {BAZEL_UPLOAD} $(B)/ydb/tests/solomon/reading/ydb-tests-solomon-reading |64.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/pdisk/blobstorage_pdisk_ut_actions.cpp |64.2%| [LD] {BAZEL_UPLOAD} $(B)/ydb/tests/functional/benchmarks_init/ydb-tests-functional-benchmarks_init |64.2%| [LD] {BAZEL_UPLOAD} $(B)/ydb/tests/datashard/dml/ydb-tests-datashard-dml |64.2%| [LD] {BAZEL_UPLOAD} $(B)/ydb/tests/olap/ttl_tiering/ydb-tests-olap-ttl_tiering |64.2%| [LD] {BAZEL_UPLOAD} $(B)/ydb/tests/tools/kqprun/recipe/kqprun_recipe |64.2%| [LD] {BAZEL_UPLOAD} $(B)/ydb/tests/olap/column_family/compression/ydb-tests-olap-column_family-compression |64.2%| [LD] {BAZEL_UPLOAD} $(B)/ydb/tests/library/ut/ydb-tests-library-ut |64.2%| [LD] {BAZEL_UPLOAD} $(B)/ydb/tests/stress/scheme_board/pile_promotion/pile_promotion_workload |64.2%| [LD] {BAZEL_UPLOAD} $(B)/ydb/tests/stress/mixedpy/tests/ydb-tests-stress-mixedpy-tests |64.2%| [LD] {BAZEL_UPLOAD} $(B)/ydb/tests/functional/sqs/cloud/ydb-tests-functional-sqs-cloud |64.2%| [LD] {BAZEL_UPLOAD} $(B)/ydb/public/tools/lib/cmds/ut/ydb-public-tools-lib-cmds-ut |64.2%| [LD] {BAZEL_UPLOAD} $(B)/ydb/tests/functional/scheme_tests/ydb-tests-functional-scheme_tests |64.2%| [LD] {BAZEL_UPLOAD} $(B)/ydb/tests/stress/node_broker/tests/ydb-tests-stress-node_broker-tests |64.2%| [LD] {BAZEL_UPLOAD} $(B)/ydb/tests/tools/ydb_serializable/ydb_serializable |64.2%| [LD] {BAZEL_UPLOAD} $(B)/ydb/tools/cfg/bin/ydb_configure |64.2%| [LD] {BAZEL_UPLOAD} $(B)/ydb/tests/functional/config/ydb-tests-functional-config |64.2%| [LD] {BAZEL_UPLOAD} $(B)/ydb/tests/stress/kv/tests/ydb-tests-stress-kv-tests >> test_generator.py::TestTpchGenerator::test_s1_parts [GOOD] |64.2%| [LD] {BAZEL_UPLOAD} $(B)/ydb/tests/stress/ctas/tests/ydb-tests-stress-ctas-tests |64.2%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/kqp/tests/tpch/tpch |64.2%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/benchmarks_init/py3test >> test_generator.py::TestTpchGenerator::test_s1_parts [GOOD] |64.2%| [PY] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/tests/stability/tool/objcopy_e64be2702e6aadcfe4f62214e0.o |64.3%| [PY] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/stability/tool/objcopy_e64be2702e6aadcfe4f62214e0.o |64.3%| [LD] {BAZEL_UPLOAD} $(B)/ydb/tests/olap/oom/ydb-tests-olap-oom |64.3%| [AR] {tool} $(B)/ydb/core/protos/libydb-core-protos.a |64.3%| [AR] {RESULT} $(B)/ydb/core/protos/libydb-core-protos.a |64.3%| [LD] {BAZEL_UPLOAD} $(B)/ydb/tests/stress/olap_workload/tests/ydb-tests-stress-olap_workload-tests |64.3%| [LD] {BAZEL_UPLOAD} $(B)/ydb/tests/sql/ydb-tests-sql |64.3%| [AR] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/tests/stability/tool/libpy3tests-stability-tool.global.a |64.3%| [AR] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/stability/tool/libpy3tests-stability-tool.global.a |64.3%| [AR] {RESULT} $(B)/ydb/tests/stability/tool/libpy3tests-stability-tool.global.a |64.3%| [LD] {BAZEL_UPLOAD} $(B)/ydb/tests/functional/tpc/medium/tpch/ydb-tests-functional-tpc-medium-tpch |64.3%| [LD] {tool} $(B)/ydb/core/tx/schemeshard/generated/codegen/codegen |64.3%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/generated/codegen/codegen |64.3%| [LD] {tool} $(B)/ydb/core/base/generated/codegen/ydb-core-base-generated-codegen |64.3%| [LD] {RESULT} $(B)/ydb/core/base/generated/codegen/ydb-core-base-generated-codegen |64.3%| [PR] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/base/generated/runtime_feature_flags.h |64.3%| [PR] {BAZEL_UPLOAD} $(B)/ydb/core/base/generated/runtime_feature_flags.h |64.3%| [LD] {tool} $(B)/ydb/tests/library/compatibility/configs/dump/dumper/ydb-config-meta-dumper |64.3%| [LD] {RESULT} $(B)/ydb/tests/library/compatibility/configs/dump/dumper/ydb-config-meta-dumper |64.3%| [LD] {BAZEL_UPLOAD} $(B)/ydb/tests/functional/ydb_cli/ydb-tests-functional-ydb_cli |64.3%| [PR] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/tx/schemeshard/generated/dispatch_op.h |64.3%| [LD] {BAZEL_UPLOAD} $(B)/ydb/tests/olap/scenario/ydb-tests-olap-scenario |64.3%| [PR] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/base/generated/runtime_feature_flags.cpp |64.3%| [PR] {BAZEL_UPLOAD} $(B)/ydb/core/tx/schemeshard/generated/dispatch_op.h |64.3%| [PR] {BAZEL_UPLOAD} $(B)/ydb/core/base/generated/runtime_feature_flags.cpp |64.3%| [LD] {BAZEL_UPLOAD} $(B)/ydb/core/viewer/tests/ydb-core-viewer-tests |64.3%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/tests/stability/tool/tool |64.3%| [LD] {RESULT} $(B)/ydb/tests/stability/tool/tool |64.3%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/stability/tool/tool |64.3%| [LD] {BAZEL_UPLOAD} $(B)/ydb/tests/stress/reconfig_state_storage_workload/tests/stress-reconfig_state_storage_workload-tests |64.3%| [LD] {tool} $(B)/ydb/core/control/lib/generated/codegen/ydb-core-control-generated-codegen |64.3%| [LD] {RESULT} $(B)/ydb/core/control/lib/generated/codegen/ydb-core-control-generated-codegen |64.3%| [PR] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/control/lib/generated/control_board_proto.h |64.3%| [LD] {BAZEL_UPLOAD} $(B)/ydb/tests/functional/encryption/ydb-tests-functional-encryption |64.3%| [PR] {BAZEL_UPLOAD} $(B)/ydb/core/control/lib/generated/control_board_proto.h |64.3%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/base/generated/runtime_feature_flags_ut.cpp >> test_generator.py::TestTpchGenerator::test_s1 [GOOD] |64.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/base/generated/runtime_feature_flags_ut.cpp |64.3%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/benchmarks_init/py3test >> test_generator.py::TestTpchGenerator::test_s1 [GOOD] |64.4%| [LD] {BAZEL_UPLOAD} $(B)/ydb/tests/fq/s3/ydb-tests-fq-s3 |64.3%| [CC] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/base/generated/runtime_feature_flags.cpp |64.4%| [AR] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/base/generated/libcore-base-generated.a |64.4%| [AR] {RESULT} $(B)/ydb/core/base/generated/libcore-base-generated.a |64.4%| [LD] {BAZEL_UPLOAD} $(B)/ydb/tests/functional/sqs/merge_split_common_table/std/functional-sqs-merge_split_common_table-std |64.4%| [CC] {BAZEL_UPLOAD} $(B)/ydb/core/base/generated/runtime_feature_flags.cpp |64.4%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/base/generated/libcore-base-generated.a |64.4%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/blobstorage/vdisk/common/vdisk_mongroups.cpp |64.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/vdisk/common/vdisk_mongroups.cpp |64.4%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/base/generated/ut/ydb-core-base-generated-ut |64.4%| [LD] {RESULT} $(B)/ydb/core/base/generated/ut/ydb-core-base-generated-ut |64.4%| [LD] {BAZEL_UPLOAD} $(B)/ydb/tests/fq/http_api/ydb-tests-fq-http_api |64.4%| [LD] {BAZEL_UPLOAD} $(B)/ydb/tests/functional/serializable/ydb-tests-functional-serializable |64.4%| [LD] {BAZEL_UPLOAD} $(B)/ydb/tests/stress/transfer/tests/ydb-tests-stress-transfer-tests >> test_generator.py::TestTpchGenerator::test_s1_state [GOOD] |64.4%| [LD] {BAZEL_UPLOAD} $(B)/ydb/tests/functional/tpc/large/ydb-tests-functional-tpc-large |64.4%| [LD] {BAZEL_UPLOAD} $(B)/ydb/tools/ydbd_slice/bin/ydbd_slice |64.4%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/benchmarks_init/py3test >> test_generator.py::TestTpchGenerator::test_s1_state [GOOD] |64.4%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tablet_flat/flat_part_loader.cpp |64.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tablet_flat/flat_part_loader.cpp >> tool::import_test [GOOD] |64.4%| [LD] {BAZEL_UPLOAD} $(B)/ydb/tests/functional/large_serializable/ydb-tests-functional-large_serializable |64.4%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/mon_alloc/stats.cpp |64.4%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/stability/tool/import_test >> tool::import_test [GOOD] |64.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/mon_alloc/stats.cpp |64.4%| [TS] {RESULT} ydb/tests/stability/tool/import_test |64.4%| [LD] {BAZEL_UPLOAD} $(B)/ydb/tests/fq/mem_alloc/ydb-tests-fq-mem_alloc |64.4%| [LD] {BAZEL_UPLOAD} $(B)/ydb/tests/tools/ydb_serializable/replay/replay >> test_generator.py::TestTpchGenerator::test_s1_state_and_parts [GOOD] |64.4%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/ydb_convert/topic_description.cpp |64.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/ydb_convert/topic_description.cpp |64.4%| [LD] {BAZEL_UPLOAD} $(B)/ydb/tests/stress/simple_queue/tests/ydb-tests-stress-simple_queue-tests |64.4%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/benchmarks_init/py3test >> test_generator.py::TestTpchGenerator::test_s1_state_and_parts [GOOD] |64.4%| [LD] {BAZEL_UPLOAD} $(B)/ydb/core/base/generated/ut/ydb-core-base-generated-ut |64.4%| [LD] {BAZEL_UPLOAD} $(B)/ydb/public/tools/local_ydb/local_ydb |64.4%| [LD] {BAZEL_UPLOAD} $(B)/ydb/tests/functional/bridge/ydb-tests-functional-bridge |64.4%| [LD] {BAZEL_UPLOAD} $(B)/ydb/tests/functional/postgresql/ydb-tests-functional-postgresql |64.4%| [LD] {BAZEL_UPLOAD} $(B)/ydb/tests/stress/cdc/tests/ydb-tests-stress-cdc-tests |64.4%| [LD] {BAZEL_UPLOAD} $(B)/ydb/tests/functional/query_cache/ydb-tests-functional-query_cache |64.4%| [LD] {BAZEL_UPLOAD} $(B)/ydb/tests/sql/large/ydb-tests-sql-large |64.4%| [LD] {BAZEL_UPLOAD} $(B)/ydb/tests/functional/wardens/ydb-tests-functional-wardens |64.4%| [LD] {BAZEL_UPLOAD} $(B)/ydb/tests/stress/log/tests/ydb-tests-stress-log-tests |64.4%| [LD] {BAZEL_UPLOAD} $(B)/ydb/tests/stress/topic/tests/ydb-tests-stress-topic-tests |64.4%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/cms/console/feature_flags_configurator.cpp |64.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/cms/console/feature_flags_configurator.cpp |64.5%| [LD] {BAZEL_UPLOAD} $(B)/ydb/tests/functional/minidumps/ydb-tests-functional-minidumps |64.5%| [LD] {BAZEL_UPLOAD} $(B)/ydb/tests/stress/show_create/view/tests/ydb-tests-stress-show_create-view-tests |64.5%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/blobstorage/pdisk/blobstorage_pdisk_mon.cpp |64.5%| [LD] {BAZEL_UPLOAD} $(B)/ydb/tests/functional/sqs/merge_split_common_table/fifo/functional-sqs-merge_split_common_table-fifo |64.5%| [LD] {BAZEL_UPLOAD} $(B)/ydb/tests/fq/common/ydb-tests-fq-common |64.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/pdisk/blobstorage_pdisk_mon.cpp |64.5%| [LD] {BAZEL_UPLOAD} $(B)/ydb/tests/tools/nemesis/driver/nemesis |64.5%| [LD] {BAZEL_UPLOAD} $(B)/ydb/tests/stress/topic_kafka/tests/ydb-tests-stress-topic_kafka-tests |64.5%| [LD] {BAZEL_UPLOAD} $(B)/ydb/tests/fq/streaming/ydb-tests-fq-streaming |64.5%| [LD] {BAZEL_UPLOAD} $(B)/ydb/tests/datashard/add_column/ydb-tests-datashard-add_column |64.5%| [LD] {BAZEL_UPLOAD} $(B)/ydb/tests/stability/ydb/ydb-tests-stability-ydb |64.5%| [LD] {BAZEL_UPLOAD} $(B)/ydb/tests/compatibility/ydb-tests-compatibility |64.5%| [LD] {BAZEL_UPLOAD} $(B)/ydb/library/yaml_config/ut_transform/ydb-library-yaml_config-ut_transform |64.5%| [BN] {BAZEL_UPLOAD} $(B)/ydb/tests/stability/tool/cfg |64.5%| [LD] {BAZEL_UPLOAD} $(B)/ydb/tests/stress/olap_workload/olap_workload |64.5%| [LD] {BAZEL_UPLOAD} $(B)/ydb/tests/tools/pq_read/test/ydb-tests-tools-pq_read-test |64.5%| [LD] {BAZEL_UPLOAD} $(B)/ydb/tests/stress/scheme_board/pile_promotion/tests/tests-stress-scheme_board-pile_promotion-tests |64.5%| [LD] {BAZEL_UPLOAD} $(B)/ydb/tests/stress/statistics_workload/statistics_workload |64.5%| [LD] {BAZEL_UPLOAD} $(B)/ydb/tests/stress/s3_backups/s3_backups |64.5%| [LD] {BAZEL_UPLOAD} $(B)/ydb/tests/compatibility/s3_backups/ydb-tests-compatibility-s3_backups |64.5%| [BN] {BAZEL_UPLOAD} $(B)/ydb/tests/stability/tool/statistics_workload |64.5%| [LD] {BAZEL_UPLOAD} $(B)/ydb/tests/postgres_integrations/library/ut/ydb-tests-postgres_integrations-library-ut |64.5%| [LD] {BAZEL_UPLOAD} $(B)/ydb/tests/example/ydb-tests-example |64.5%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/mon_alloc/monitor.cpp |64.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/mon_alloc/monitor.cpp |64.5%| [LD] {BAZEL_UPLOAD} $(B)/ydb/tests/stress/oltp_workload/tests/ydb-tests-stress-oltp_workload-tests |64.5%| [BN] {BAZEL_UPLOAD} $(B)/ydb/tests/stability/tool/s3_backups_workload |64.5%| [BN] {BAZEL_UPLOAD} $(B)/ydb/tests/stability/tool/nemesis |64.5%| [LD] {BAZEL_UPLOAD} $(B)/ydb/tests/stress/s3_backups/tests/ydb-tests-stress-s3_backups-tests |64.5%| [LD] {BAZEL_UPLOAD} $(B)/ydb/tests/datashard/vector_index/large/ydb-tests-datashard-vector_index-large |64.5%| [LD] {BAZEL_UPLOAD} $(B)/ydb/tests/fq/generic/analytics/ydb-tests-fq-generic-analytics |64.5%| [LD] {BAZEL_UPLOAD} $(B)/ydb/tests/compatibility/olap/ydb-tests-compatibility-olap |64.5%| [LD] {BAZEL_UPLOAD} $(B)/ydb/tests/fq/generic/streaming/ydb-tests-fq-generic-streaming |64.5%| [LD] {BAZEL_UPLOAD} $(B)/ydb/tests/fq/restarts/ydb-tests-fq-restarts |64.5%| [LD] {BAZEL_UPLOAD} $(B)/ydb/tests/fq/plans/ydb-tests-fq-plans |64.5%| [BN] {BAZEL_UPLOAD} $(B)/ydb/tests/stability/tool/olap_workload |64.5%| [LD] {BAZEL_UPLOAD} $(B)/ydb/tests/olap/s3_import/ydb-tests-olap-s3_import |64.5%| [LD] {BAZEL_UPLOAD} $(B)/ydb/mvp/oidc_proxy/bin/mvp_oidc_proxy |64.5%| [LD] {BAZEL_UPLOAD} $(B)/ydb/mvp/meta/bin/mvp_meta |64.5%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tablet_flat/flat_table.cpp |64.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tablet_flat/flat_table.cpp |64.6%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/sys_view/service/ext_counters.cpp |64.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/sys_view/service/ext_counters.cpp |64.6%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/statistics/service/service.cpp |64.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/statistics/service/service.cpp |64.6%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/blobstorage/vdisk/common/vdisk_config.cpp |64.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/vdisk/common/vdisk_config.cpp |64.6%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/blobstorage/dsproxy/dsproxy_get_block.cpp |64.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/dsproxy/dsproxy_get_block.cpp |64.6%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/blobstorage/dsproxy/dsproxy_discover.cpp |64.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/dsproxy/dsproxy_discover.cpp |64.6%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/blobstorage/dsproxy/dsproxy_nodemon.cpp |64.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/dsproxy/dsproxy_nodemon.cpp |64.6%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/columnshard/engines/scheme/indexes/abstract/constructor.cpp |64.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/engines/scheme/indexes/abstract/constructor.cpp |64.6%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/sys_view/service/sysview_service.cpp |64.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/sys_view/service/sysview_service.cpp |64.6%| [AR] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/sys_view/service/libcore-sys_view-service.a |64.6%| [AR] {RESULT} $(B)/ydb/core/sys_view/service/libcore-sys_view-service.a |64.6%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/blobstorage/vdisk/syncer/guid_recovery.cpp |64.6%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/sys_view/service/libcore-sys_view-service.a |64.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/vdisk/syncer/guid_recovery.cpp |64.6%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/blobstorage/vdisk/syncer/blobstorage_syncer.cpp |64.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/vdisk/syncer/blobstorage_syncer.cpp |64.6%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/blobstorage/vdisk/syncer/blobstorage_syncer_recoverlostdata_proxy.cpp |64.6%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/coordinator/coordinator_state.cpp |64.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/vdisk/syncer/blobstorage_syncer_recoverlostdata_proxy.cpp |64.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/coordinator/coordinator_state.cpp |64.6%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/kqp/workload_service/kqp_workload_service.cpp |64.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/workload_service/kqp_workload_service.cpp |64.6%| [AR] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/kqp/workload_service/libcore-kqp-workload_service.a |64.6%| [AR] {RESULT} $(B)/ydb/core/kqp/workload_service/libcore-kqp-workload_service.a |64.6%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/kqp/workload_service/libcore-kqp-workload_service.a |64.6%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/blobstorage/pdisk/blobstorage_pdisk_logreader.cpp |64.6%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/testlib/fake_coordinator.cpp |64.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/pdisk/blobstorage_pdisk_logreader.cpp |64.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/testlib/fake_coordinator.cpp |64.6%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/coordinator/coordinator__last_step_subscriptions.cpp |64.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/coordinator/coordinator__last_step_subscriptions.cpp |64.6%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/kqp/proxy_service/kqp_proxy_peer_stats_calculator.cpp |64.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/proxy_service/kqp_proxy_peer_stats_calculator.cpp |64.7%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/columnshard/engines/reader/abstract/abstract.cpp |64.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/engines/reader/abstract/abstract.cpp |64.7%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/columnshard/engines/reader/simple_reader/iterator/sys_view/granules/schema.cpp |64.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/engines/reader/simple_reader/iterator/sys_view/granules/schema.cpp |64.7%| [AR] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/tx/columnshard/engines/reader/simple_reader/iterator/sys_view/granules/libiterator-sys_view-granules.global.a |64.7%| [AR] {RESULT} $(B)/ydb/core/tx/columnshard/engines/reader/simple_reader/iterator/sys_view/granules/libiterator-sys_view-granules.global.a |64.7%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/tx/columnshard/engines/reader/simple_reader/iterator/sys_view/granules/libiterator-sys_view-granules.global.a |64.7%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/blobstorage/nodewarden/distconf_dynamic.cpp |64.7%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/ymq/actor/count_queues.cpp |64.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/nodewarden/distconf_dynamic.cpp |64.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/ymq/actor/count_queues.cpp |64.7%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/services/metadata/ds_table/accessor_snapshot_simple.cpp |64.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/services/metadata/ds_table/accessor_snapshot_simple.cpp |64.7%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/columnshard/engines/portions/constructors.cpp |64.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/engines/portions/constructors.cpp |64.7%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/coordinator/coordinator__stop_guard.cpp |64.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/coordinator/coordinator__stop_guard.cpp |64.7%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/columnshard/engines/reader/abstract/read_context.cpp |64.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/engines/reader/abstract/read_context.cpp |64.7%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/kqp/opt/physical/effects/kqp_opt_phy_update_index.cpp |64.7%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/columnshard/test_helper/helper.cpp |64.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/opt/physical/effects/kqp_opt_phy_update_index.cpp |64.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/test_helper/helper.cpp |64.7%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/columnshard/engines/db_wrapper.cpp |64.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/engines/db_wrapper.cpp |64.7%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/columnshard/engines/portions/constructor_accessor.cpp |64.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/engines/portions/constructor_accessor.cpp |64.7%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/kafka_proxy/kafka_transactional_producers_initializers.cpp |64.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kafka_proxy/kafka_transactional_producers_initializers.cpp |64.7%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/blobstorage/vdisk/common/blobstorage_vdisk_guids.cpp |64.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/vdisk/common/blobstorage_vdisk_guids.cpp |64.7%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/kqp/opt/kqp_constant_folding_transformer.cpp |64.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/opt/kqp_constant_folding_transformer.cpp |64.7%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/kqp/host/kqp_transform.cpp |64.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/host/kqp_transform.cpp |64.7%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/blobstorage/vdisk/syncer/syncer_job_actor.cpp |64.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/vdisk/syncer/syncer_job_actor.cpp |64.7%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/kqp/opt/physical/kqp_opt_phy_precompute.cpp |64.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/opt/physical/kqp_opt_phy_precompute.cpp |64.7%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/kafka_proxy/actors/kafka_transaction_actor.cpp |64.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kafka_proxy/actors/kafka_transaction_actor.cpp |64.8%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/columnshard/engines/storage/granule/portions_index.cpp |64.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/engines/storage/granule/portions_index.cpp |64.8%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/persqueue/pqrb/read_balancer_app.cpp |64.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/persqueue/pqrb/read_balancer_app.cpp |64.8%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/columnshard/engines/storage/optimizer/lcbuckets/constructor/selector/snapshot.cpp |64.8%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/datashard/datashard__vacuum.cpp |64.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/engines/storage/optimizer/lcbuckets/constructor/selector/snapshot.cpp |64.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/datashard/datashard__vacuum.cpp |64.8%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/kafka_proxy/actors/kafka_fetch_actor.cpp |64.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kafka_proxy/actors/kafka_fetch_actor.cpp |64.8%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/kqp/opt/physical/effects/kqp_opt_phy_vector_index.cpp |64.8%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/blobstorage/vdisk/syncer/blobstorage_syncer_recoverlostdata.cpp |64.8%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/datashard/datashard__read_columns.cpp |64.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/vdisk/syncer/blobstorage_syncer_recoverlostdata.cpp |64.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/opt/physical/effects/kqp_opt_phy_vector_index.cpp |64.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/datashard/datashard__read_columns.cpp |64.8%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/kqp/gateway/behaviour/streaming_query/object.cpp |64.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/gateway/behaviour/streaming_query/object.cpp |64.8%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/columnshard/engines/reader/actor/actor.cpp |64.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/engines/reader/actor/actor.cpp |64.8%| [AR] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/tx/columnshard/engines/reader/actor/libengines-reader-actor.a |64.8%| [AR] {RESULT} $(B)/ydb/core/tx/columnshard/engines/reader/actor/libengines-reader-actor.a |64.8%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/tx/columnshard/engines/reader/actor/libengines-reader-actor.a |64.8%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/blobstorage/pdisk/blobstorage_pdisk_impl.cpp |64.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/pdisk/blobstorage_pdisk_impl.cpp |64.8%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/blobstorage/vdisk/syncer/guid_firstrun.cpp |64.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/vdisk/syncer/guid_firstrun.cpp |64.8%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/blobstorage/vdisk/common/blobstorage_status.cpp |64.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/vdisk/common/blobstorage_status.cpp |64.8%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/columnshard/engines/reader/simple_reader/iterator/sys_view/optimizer/constructor.cpp |64.8%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/kqp/gateway/utils/metadata_helpers.cpp |64.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/engines/reader/simple_reader/iterator/sys_view/optimizer/constructor.cpp |64.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/gateway/utils/metadata_helpers.cpp |64.8%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/schemeshard/olap/operations/alter/in_store/schema/update.cpp |64.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/olap/operations/alter/in_store/schema/update.cpp |64.8%| [AR] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/tx/schemeshard/olap/operations/alter/in_store/schema/libalter-in_store-schema.a |64.8%| [AR] {RESULT} $(B)/ydb/core/tx/schemeshard/olap/operations/alter/in_store/schema/libalter-in_store-schema.a |64.8%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/tx/schemeshard/olap/operations/alter/in_store/schema/libalter-in_store-schema.a |64.8%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/kqp/host/kqp_runner.cpp |64.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/host/kqp_runner.cpp |64.9%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/kqp/host/kqp_statement_rewrite.cpp |64.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/host/kqp_statement_rewrite.cpp |64.9%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/blobstorage/vdisk/common/vdisk_context.cpp |64.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/vdisk/common/vdisk_context.cpp |64.9%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/columnshard/engines/reader/plain_reader/iterator/interval.cpp |64.9%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/blobstorage/backpressure/queue_backpressure_client.cpp |64.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/engines/reader/plain_reader/iterator/interval.cpp |64.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/backpressure/queue_backpressure_client.cpp |64.9%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/blobstorage/vdisk/common/vdisk_recoverylogwriter.cpp |64.9%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/datashard/datashard_repl_offsets.cpp |64.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/vdisk/common/vdisk_recoverylogwriter.cpp |64.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/datashard/datashard_repl_offsets.cpp |64.9%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/blobstorage/vdisk/syncer/guid_propagator.cpp |64.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/vdisk/syncer/guid_propagator.cpp |64.9%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/blobstorage/vdisk/syncer/blobstorage_syncer_localwriter.cpp |64.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/vdisk/syncer/blobstorage_syncer_localwriter.cpp |64.9%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/blobstorage/vdisk/syncer/blobstorage_syncer_committer.cpp |64.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/vdisk/syncer/blobstorage_syncer_committer.cpp |64.9%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/blobstorage/backpressure/load_based_timeout.cpp |64.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/backpressure/load_based_timeout.cpp |64.9%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/kafka_proxy/actors/kafka_init_producer_id_actor.cpp |64.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kafka_proxy/actors/kafka_init_producer_id_actor.cpp >> TErasureTypeTest::TestAllSpeciesCrcWhole2of2 [GOOD] |64.9%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/erasure/ut/unittest >> TErasureTypeTest::TestAllSpeciesCrcWhole2of2 [GOOD] |64.9%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/kqp/opt/physical/effects/kqp_opt_phy_returning.cpp |64.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/opt/physical/effects/kqp_opt_phy_returning.cpp |64.9%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/quoter/kesus_quoter_proxy.cpp |64.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/quoter/kesus_quoter_proxy.cpp |64.9%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/kafka_proxy/actors/kafka_balancer_actor.cpp |64.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kafka_proxy/actors/kafka_balancer_actor.cpp |64.9%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/blobstorage/vdisk/common/vdisk_response.cpp |64.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/vdisk/common/vdisk_response.cpp |64.9%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/blobstorage/backpressure/unisched.cpp |64.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/backpressure/unisched.cpp >> test_generator.py::TestTpcdsGenerator::test_s1_parts [GOOD] |64.9%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/benchmarks_init/py3test >> test_generator.py::TestTpcdsGenerator::test_s1_parts [GOOD] |64.9%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/blobstorage/dsproxy/dsproxy_indexrestoreget.cpp |64.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/dsproxy/dsproxy_indexrestoreget.cpp |64.9%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/kafka_proxy/kafka_transactions_coordinator.cpp |64.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kafka_proxy/kafka_transactions_coordinator.cpp |64.9%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/blobstorage/dsproxy/dsproxy_stat.cpp |65.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/dsproxy/dsproxy_stat.cpp |65.0%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/sys_view/partition_stats/partition_stats.cpp |65.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/sys_view/partition_stats/partition_stats.cpp |65.0%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/blobstorage/vdisk/common/blobstorage_dblogcutter.cpp |65.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/vdisk/common/blobstorage_dblogcutter.cpp |65.0%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/blobstorage/vdisk/query/assimilation.cpp |65.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/vdisk/query/assimilation.cpp |65.0%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/blobstorage/vdisk/syncer/syncer_job_task.cpp |65.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/vdisk/syncer/syncer_job_task.cpp |65.0%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/blobstorage/nodewarden/distconf.cpp |65.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/nodewarden/distconf.cpp |65.0%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/blobstorage/vdisk/syncer/blobstorage_syncer_data.cpp |65.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/vdisk/syncer/blobstorage_syncer_data.cpp |65.0%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/schemeshard/olap/operations/alter/in_store/config_shards/update.cpp |65.0%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/datashard/datashard__write.cpp |65.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/datashard/datashard__write.cpp |65.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/olap/operations/alter/in_store/config_shards/update.cpp |65.0%| [AR] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/tx/schemeshard/olap/operations/alter/in_store/config_shards/libalter-in_store-config_shards.a |65.0%| [AR] {RESULT} $(B)/ydb/core/tx/schemeshard/olap/operations/alter/in_store/config_shards/libalter-in_store-config_shards.a |65.0%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/tx/schemeshard/olap/operations/alter/in_store/config_shards/libalter-in_store-config_shards.a |65.0%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/datashard/datashard_repl_offsets_client.cpp |65.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/datashard/datashard_repl_offsets_client.cpp |65.0%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/fq/libs/init/init.cpp |65.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/fq/libs/init/init.cpp |65.0%| [AR] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/fq/libs/init/libfq-libs-init.a |65.0%| [AR] {RESULT} $(B)/ydb/core/fq/libs/init/libfq-libs-init.a |65.0%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/fq/libs/init/libfq-libs-init.a |65.0%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/datashard/datashard_change_sending.cpp |65.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/datashard/datashard_change_sending.cpp |65.0%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/kqp/federated_query/kqp_federated_query_actors.cpp |65.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/federated_query/kqp_federated_query_actors.cpp |65.0%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/blobstorage/vdisk/hulldb/cache_block/cache_block.cpp |65.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/vdisk/hulldb/cache_block/cache_block.cpp |65.0%| [AR] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/blobstorage/vdisk/hulldb/cache_block/libvdisk-hulldb-cache_block.a |65.0%| [AR] {RESULT} $(B)/ydb/core/blobstorage/vdisk/hulldb/cache_block/libvdisk-hulldb-cache_block.a |65.0%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/blobstorage/vdisk/hulldb/cache_block/libvdisk-hulldb-cache_block.a |65.0%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/kqp/runtime/kqp_read_iterator_common.cpp |65.0%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/datashard/datashard__progress_tx.cpp |65.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/runtime/kqp_read_iterator_common.cpp |65.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/datashard/datashard__progress_tx.cpp |65.0%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/cms/cms_tx_remove_permissions.cpp |65.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/cms/cms_tx_remove_permissions.cpp |65.1%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/blobstorage/vdisk/syncer/blobstorage_syncer_scheduler.cpp |65.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/vdisk/syncer/blobstorage_syncer_scheduler.cpp >> TErasureTypeTest::TestAllSpecies1of2 [GOOD] >> TErasureTypeTest::TestAllSpecies2of2 |65.1%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/cms/cms_tx_log_cleanup.cpp |65.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/cms/cms_tx_log_cleanup.cpp |65.1%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/cms/cms_tx_remove_expired_notifications.cpp |65.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/cms/cms_tx_remove_expired_notifications.cpp |65.1%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/cms/walle_remove_task_adapter.cpp |65.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/cms/walle_remove_task_adapter.cpp |65.1%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/cms/sentinel.cpp |65.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/cms/sentinel.cpp |65.1%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/columnshard/data_locks/manager/manager.cpp |65.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/data_locks/manager/manager.cpp |65.1%| [AR] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/tx/columnshard/data_locks/manager/libcolumnshard-data_locks-manager.a |65.1%| [AR] {RESULT} $(B)/ydb/core/tx/columnshard/data_locks/manager/libcolumnshard-data_locks-manager.a |65.1%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/tx/columnshard/data_locks/manager/libcolumnshard-data_locks-manager.a >> test_generator.py::TestTpcdsGenerator::test_s1_state [GOOD] |65.1%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/benchmarks_init/py3test >> test_generator.py::TestTpcdsGenerator::test_s1_state [GOOD] |65.1%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/kqp/runtime/kqp_sequencer_actor.cpp |65.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/runtime/kqp_sequencer_actor.cpp |65.1%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/kqp/runtime/kqp_vector_actor.cpp |65.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/runtime/kqp_vector_actor.cpp >> test_generator.py::TestTpcdsGenerator::test_s1 [GOOD] |65.1%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/columnshard/blobs_action/bs/storage.cpp |65.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/blobs_action/bs/storage.cpp |65.1%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/benchmarks_init/py3test >> test_generator.py::TestTpcdsGenerator::test_s1 [GOOD] |65.1%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/kqp/host/kqp_translate.cpp |65.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/host/kqp_translate.cpp |65.1%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/cms/cms_tx_store_walle_task.cpp |65.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/cms/cms_tx_store_walle_task.cpp |65.1%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/cms/cms_tx_update_downtimes.cpp |65.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/cms/cms_tx_update_downtimes.cpp |65.1%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/sharding/hash_modulo.cpp |65.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/sharding/hash_modulo.cpp >> TErasureTypeTest::TestAllSpeciesCrcWhole1of2 [GOOD] |65.1%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/cms/cms_tx_process_notification.cpp |65.1%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/erasure/ut/unittest >> TErasureTypeTest::TestAllSpeciesCrcWhole1of2 [GOOD] |65.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/cms/cms_tx_process_notification.cpp |65.1%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/blobstorage/vdisk/syncer/guid_proxyobtain.cpp |65.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/vdisk/syncer/guid_proxyobtain.cpp |65.1%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/columnshard/test_helper/controllers.cpp |65.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/test_helper/controllers.cpp |65.1%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/kqp/runtime/kqp_write_actor.cpp |65.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/runtime/kqp_write_actor.cpp |65.1%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/cms/walle_api_handler.cpp |65.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/cms/walle_api_handler.cpp |65.2%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/blobstorage/vdisk/syncer/guid_proxywrite.cpp |65.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/vdisk/syncer/guid_proxywrite.cpp |65.2%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/kqp/runtime/kqp_transport.cpp |65.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/runtime/kqp_transport.cpp |65.2%| [AR] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/blobstorage/vdisk/syncer/libblobstorage-vdisk-syncer.a |65.2%| [AR] {RESULT} $(B)/ydb/core/blobstorage/vdisk/syncer/libblobstorage-vdisk-syncer.a |65.2%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/blobstorage/vdisk/syncer/libblobstorage-vdisk-syncer.a |65.2%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/cms/cms_tx_store_permissions.cpp |65.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/cms/cms_tx_store_permissions.cpp |65.2%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/datashard/datashard_repl_apply.cpp |65.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/datashard/datashard_repl_apply.cpp |65.2%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/blobstorage/pdisk/blobstorage_pdisk_tools.cpp |65.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/pdisk/blobstorage_pdisk_tools.cpp |65.2%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/sys_view/partition_stats/top_partitions.cpp |65.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/sys_view/partition_stats/top_partitions.cpp |65.2%| [AR] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/sys_view/partition_stats/libcore-sys_view-partition_stats.a |65.2%| [AR] {RESULT} $(B)/ydb/core/sys_view/partition_stats/libcore-sys_view-partition_stats.a |65.2%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/sys_view/partition_stats/libcore-sys_view-partition_stats.a |65.2%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/cms/logger.cpp |65.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/cms/logger.cpp |65.2%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/services/persqueue_cluster_discovery/grpc_service.cpp |65.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/services/persqueue_cluster_discovery/grpc_service.cpp |65.2%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/cms/erasure_checkers.cpp |65.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/cms/erasure_checkers.cpp |65.2%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/sharding/hash_intervals.cpp |65.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/sharding/hash_intervals.cpp |65.2%| [AR] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/tx/sharding/libcore-tx-sharding.global.a |65.2%| [AR] {RESULT} $(B)/ydb/core/tx/sharding/libcore-tx-sharding.global.a |65.2%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/tx/sharding/libcore-tx-sharding.global.a |65.2%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/cms/walle_create_task_adapter.cpp |65.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/cms/walle_create_task_adapter.cpp |65.2%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/cms/downtime.cpp |65.2%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/cms/cms_tx_remove_request.cpp |65.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/cms/downtime.cpp |65.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/cms/cms_tx_remove_request.cpp |65.2%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/datashard/datashard_dep_tracker.cpp |65.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/datashard/datashard_dep_tracker.cpp |65.2%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/datashard/datashard_kqp.cpp |65.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/datashard/datashard_kqp.cpp |65.2%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/cms/cms_tx_remove_task.cpp |65.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/cms/cms_tx_remove_task.cpp |65.3%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/kqp/federated_query/kqp_federated_query_helpers.cpp |65.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/federated_query/kqp_federated_query_helpers.cpp |65.3%| [AR] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/kqp/federated_query/libcore-kqp-federated_query.a |65.3%| [AR] {RESULT} $(B)/ydb/core/kqp/federated_query/libcore-kqp-federated_query.a |65.3%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/kqp/federated_query/libcore-kqp-federated_query.a |65.3%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/services/persqueue_cluster_discovery/cluster_discovery_service.cpp |65.3%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/cms/cluster_info.cpp |65.3%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/kqp/runtime/kqp_tasks_runner.cpp |65.3%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/kqp/proxy_service/kqp_script_executions.cpp |65.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/cms/cluster_info.cpp |65.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/runtime/kqp_tasks_runner.cpp |65.3%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/cms/cms_tx_reject_notification.cpp |65.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/services/persqueue_cluster_discovery/cluster_discovery_service.cpp |65.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/cms/cms_tx_reject_notification.cpp |65.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/proxy_service/kqp_script_executions.cpp |65.3%| [AR] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/services/persqueue_cluster_discovery/libydb-services-persqueue_cluster_discovery.a |65.3%| [AR] {RESULT} $(B)/ydb/services/persqueue_cluster_discovery/libydb-services-persqueue_cluster_discovery.a |65.3%| [AR] {BAZEL_UPLOAD} $(B)/ydb/services/persqueue_cluster_discovery/libydb-services-persqueue_cluster_discovery.a |65.3%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/schemeshard/olap/schema/update.cpp |65.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/olap/schema/update.cpp |65.3%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/kqp/runtime/kqp_stream_lookup_actor.cpp |65.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/runtime/kqp_stream_lookup_actor.cpp |65.3%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/kqp/runtime/kqp_read_actor.cpp |65.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/runtime/kqp_read_actor.cpp |65.3%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/datashard/datashard_change_receiving.cpp |65.3%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/kqp/opt/physical/effects/kqp_opt_phy_upsert_defaults.cpp |65.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/datashard/datashard_change_receiving.cpp |65.3%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/library/table_creator/table_creator.cpp |65.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/opt/physical/effects/kqp_opt_phy_upsert_defaults.cpp |65.3%| [AR] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/library/table_creator/libydb-library-table_creator.a |65.3%| [AR] {RESULT} $(B)/ydb/library/table_creator/libydb-library-table_creator.a |65.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/library/table_creator/table_creator.cpp |65.3%| [AR] {BAZEL_UPLOAD} $(B)/ydb/library/table_creator/libydb-library-table_creator.a |65.3%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/schemeshard/olap/operations/alter/abstract/update.cpp |65.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/olap/operations/alter/abstract/update.cpp |65.3%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/blobstorage/dsproxy/dsproxy_state.cpp |65.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/dsproxy/dsproxy_state.cpp |65.3%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/sharding/sharding.cpp |65.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/sharding/sharding.cpp |65.3%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/library/signals/owner.cpp |65.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/library/signals/owner.cpp |65.4%| [AR] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/library/signals/libydb-library-signals.a |65.4%| [AR] {RESULT} $(B)/ydb/library/signals/libydb-library-signals.a |65.4%| [AR] {BAZEL_UPLOAD} $(B)/ydb/library/signals/libydb-library-signals.a |65.4%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/cms/cms_tx_init_scheme.cpp |65.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/cms/cms_tx_init_scheme.cpp |65.4%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/cms/api_adapters.cpp |65.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/cms/api_adapters.cpp |65.4%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/persqueue/pqtablet/cache/pq_l2_cache.cpp |65.4%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/mind/hive/tx__configure_scale_recommender.cpp |65.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/mind/hive/tx__configure_scale_recommender.cpp |65.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/persqueue/pqtablet/cache/pq_l2_cache.cpp |65.4%| [AR] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/persqueue/pqtablet/cache/libpersqueue-pqtablet-cache.a |65.4%| [AR] {RESULT} $(B)/ydb/core/persqueue/pqtablet/cache/libpersqueue-pqtablet-cache.a |65.4%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/datashard/datashard__schema_changed.cpp |65.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/datashard/datashard__schema_changed.cpp |65.4%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/persqueue/pqtablet/cache/libpersqueue-pqtablet-cache.a |65.4%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/datashard/load_in_rs_unit.cpp |65.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/datashard/load_in_rs_unit.cpp |65.4%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/local_pgwire/local_pgwire_auth_actor.cpp |65.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/local_pgwire/local_pgwire_auth_actor.cpp |65.4%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/schemeshard/olap/table/table.cpp |65.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/olap/table/table.cpp |65.4%| [AR] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/tx/schemeshard/olap/table/libschemeshard-olap-table.a |65.4%| [AR] {RESULT} $(B)/ydb/core/tx/schemeshard/olap/table/libschemeshard-olap-table.a |65.4%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/tx/schemeshard/olap/table/libschemeshard-olap-table.a |65.4%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/cms/cms_tx_log_and_send.cpp |65.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/cms/cms_tx_log_and_send.cpp |65.4%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/blobstorage/nodewarden/distconf_console.cpp |65.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/nodewarden/distconf_console.cpp |65.4%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/cms/info_collector.cpp |65.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/cms/info_collector.cpp |65.4%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/cms/http.cpp |65.4%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/cms/walle_check_task_adapter.cpp |65.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/cms/http.cpp |65.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/cms/walle_check_task_adapter.cpp |65.4%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/cms/cms_tx_get_log_tail.cpp |65.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/cms/cms_tx_get_log_tail.cpp |65.4%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/library/yql/providers/common/ut_helpers/dq_fake_ca.cpp |65.4%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/cms/cms_tx_load_state.cpp |65.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/cms/cms_tx_load_state.cpp |65.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/library/yql/providers/common/ut_helpers/dq_fake_ca.cpp |65.5%| [AR] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/library/yql/providers/common/ut_helpers/libproviders-common-ut_helpers.a |65.5%| [AR] {RESULT} $(B)/ydb/library/yql/providers/common/ut_helpers/libproviders-common-ut_helpers.a |65.5%| [AR] {BAZEL_UPLOAD} $(B)/ydb/library/yql/providers/common/ut_helpers/libproviders-common-ut_helpers.a |65.5%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/schemeshard/olap/operations/alter/in_store/object.cpp |65.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/olap/operations/alter/in_store/object.cpp |65.5%| [AR] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/tx/schemeshard/olap/operations/alter/in_store/liboperations-alter-in_store.a |65.5%| [AR] {RESULT} $(B)/ydb/core/tx/schemeshard/olap/operations/alter/in_store/liboperations-alter-in_store.a |65.5%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/tx/schemeshard/olap/operations/alter/in_store/liboperations-alter-in_store.a |65.5%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/kqp/runtime/kqp_write_table.cpp |65.5%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/fq/libs/health/health.cpp |65.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/runtime/kqp_write_table.cpp |65.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/fq/libs/health/health.cpp |65.5%| [AR] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/fq/libs/health/libfq-libs-health.a |65.5%| [AR] {RESULT} $(B)/ydb/core/fq/libs/health/libfq-libs-health.a |65.5%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/fq/libs/health/libfq-libs-health.a |65.5%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/sys_view/scan.cpp |65.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/sys_view/scan.cpp |65.5%| [AR] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/sys_view/libydb-core-sys_view.a |65.5%| [AR] {RESULT} $(B)/ydb/core/sys_view/libydb-core-sys_view.a |65.5%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/sys_view/libydb-core-sys_view.a |65.5%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/datashard/datashard_outreadset.cpp |65.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/datashard/datashard_outreadset.cpp |65.5%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/columnshard/engines/scheme/objects_cache.cpp |65.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/engines/scheme/objects_cache.cpp |65.5%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/mind/hive/tx__switch_drain.cpp |65.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/mind/hive/tx__switch_drain.cpp |65.5%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/cms/cms.cpp |65.5%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/datashard/initiate_build_index_unit.cpp |65.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/datashard/initiate_build_index_unit.cpp |65.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/cms/cms.cpp |65.5%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/library/query_actor/query_actor.cpp |65.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/library/query_actor/query_actor.cpp |65.5%| [AR] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/library/query_actor/libydb-library-query_actor.a |65.5%| [AR] {RESULT} $(B)/ydb/library/query_actor/libydb-library-query_actor.a |65.5%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/cms/walle_list_tasks_adapter.cpp |65.5%| [AR] {BAZEL_UPLOAD} $(B)/ydb/library/query_actor/libydb-library-query_actor.a |65.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/cms/walle_list_tasks_adapter.cpp |65.5%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/datashard/create_cdc_stream_unit.cpp |65.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/datashard/create_cdc_stream_unit.cpp |65.5%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/tx_allocator/txallocator_impl.cpp |65.6%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/mind/hive/tx__reassign_groups.cpp |65.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/tx_allocator/txallocator_impl.cpp |65.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/mind/hive/tx__reassign_groups.cpp |65.6%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/schemeshard/olap/bg_tasks/tx_chain/actor.cpp |65.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/olap/bg_tasks/tx_chain/actor.cpp |65.6%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/cms/cms_tx_update_config.cpp |65.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/cms/cms_tx_update_config.cpp |65.6%| [AR] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/cms/libydb-core-cms.a |65.6%| [AR] {RESULT} $(B)/ydb/core/cms/libydb-core-cms.a |65.6%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/cms/libydb-core-cms.a |65.6%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/datashard/datashard_repl_offsets_server.cpp |65.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/datashard/datashard_repl_offsets_server.cpp |65.6%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/kqp/gateway/behaviour/tablestore/manager.cpp |65.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/gateway/behaviour/tablestore/manager.cpp |65.6%| [AR] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/kqp/gateway/behaviour/tablestore/libgateway-behaviour-tablestore.a |65.6%| [AR] {RESULT} $(B)/ydb/core/kqp/gateway/behaviour/tablestore/libgateway-behaviour-tablestore.a |65.6%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/kqp/gateway/behaviour/tablestore/libgateway-behaviour-tablestore.a |65.6%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/mind/hive/tx__lock_tablet.cpp |65.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/mind/hive/tx__lock_tablet.cpp |65.6%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/library/testlib/pq_helpers/mock_pq_gateway.cpp |65.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/library/testlib/pq_helpers/mock_pq_gateway.cpp |65.6%| [AR] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/library/testlib/pq_helpers/liblibrary-testlib-pq_helpers.a |65.6%| [AR] {RESULT} $(B)/ydb/library/testlib/pq_helpers/liblibrary-testlib-pq_helpers.a |65.6%| [AR] {BAZEL_UPLOAD} $(B)/ydb/library/testlib/pq_helpers/liblibrary-testlib-pq_helpers.a |65.6%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/kqp/opt/physical/kqp_opt_phy_source.cpp |65.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/opt/physical/kqp_opt_phy_source.cpp |65.6%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/datashard/export_common.cpp |65.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/datashard/export_common.cpp |65.6%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/mind/hive/tx__process_pending_operations.cpp |65.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/mind/hive/tx__process_pending_operations.cpp |65.6%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/schemeshard/olap/ttl/validator.cpp |65.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/olap/ttl/validator.cpp |65.6%| [AR] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/tx/schemeshard/olap/ttl/libschemeshard-olap-ttl.a |65.6%| [AR] {RESULT} $(B)/ydb/core/tx/schemeshard/olap/ttl/libschemeshard-olap-ttl.a |65.6%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/tx/schemeshard/olap/ttl/libschemeshard-olap-ttl.a |65.6%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/mind/hive/tx__adopt_tablet.cpp |65.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/mind/hive/tx__adopt_tablet.cpp |65.6%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/mind/hive/tx__kill_node.cpp |65.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/mind/hive/tx__kill_node.cpp |65.6%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/mind/hive/tx__register_node.cpp |65.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/mind/hive/tx__register_node.cpp |65.7%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/datashard/datashard__op_rows.cpp |65.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/datashard/datashard__op_rows.cpp |65.7%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/schemeshard/olap/operations/create_store.cpp |65.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/olap/operations/create_store.cpp |65.7%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/mind/hive/tablet_move_info.cpp |65.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/mind/hive/tablet_move_info.cpp |65.7%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/mind/hive/tx__configure_subdomain.cpp |65.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/mind/hive/tx__configure_subdomain.cpp |65.7%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/columnshard/engines/reader/simple_reader/iterator/sys_view/optimizer/source.cpp |65.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/engines/reader/simple_reader/iterator/sys_view/optimizer/source.cpp |65.7%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/mon/mon.cpp |65.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/mon/mon.cpp |65.7%| [AR] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/mon/libydb-core-mon.a |65.7%| [AR] {RESULT} $(B)/ydb/core/mon/libydb-core-mon.a |65.7%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/mon/libydb-core-mon.a |65.7%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/mind/hive/tx__block_storage_result.cpp |65.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/mind/hive/tx__block_storage_result.cpp |65.7%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/kqp/gateway/behaviour/external_data_source/manager.cpp |65.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/gateway/behaviour/external_data_source/manager.cpp |65.7%| [AR] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/kqp/gateway/behaviour/external_data_source/libgateway-behaviour-external_data_source.a |65.7%| [AR] {RESULT} $(B)/ydb/core/kqp/gateway/behaviour/external_data_source/libgateway-behaviour-external_data_source.a |65.7%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/kqp/gateway/behaviour/external_data_source/libgateway-behaviour-external_data_source.a |65.7%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/tx_allocator/txallocator__scheme.cpp |65.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/tx_allocator/txallocator__scheme.cpp >> test_generator.py::TestTpcdsGenerator::test_s1_state_and_parts [GOOD] |65.7%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/schemeshard/olap/operations/alter_table.cpp |65.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/olap/operations/alter_table.cpp |65.7%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/benchmarks_init/py3test >> test_generator.py::TestTpcdsGenerator::test_s1_state_and_parts [GOOD] |65.7%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/mind/hive/tx__start_tablet.cpp |65.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/mind/hive/tx__start_tablet.cpp |65.7%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/mind/hive/tx__disconnect_node.cpp |65.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/mind/hive/tx__disconnect_node.cpp |65.7%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/datashard/datashard__migrate_schemeshard.cpp |65.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/datashard/datashard__migrate_schemeshard.cpp |65.7%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/mind/hive/tx__delete_tablet_result.cpp |65.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/mind/hive/tx__delete_tablet_result.cpp |65.7%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/schemeshard/olap/schema/schema.cpp |65.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/olap/schema/schema.cpp |65.7%| [AR] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/tx/schemeshard/olap/schema/libschemeshard-olap-schema.a |65.7%| [AR] {RESULT} $(B)/ydb/core/tx/schemeshard/olap/schema/libschemeshard-olap-schema.a |65.8%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/tx/schemeshard/olap/schema/libschemeshard-olap-schema.a |65.7%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/mind/hive/tx__reassign_groups_on_decommit.cpp |65.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/mind/hive/tx__reassign_groups_on_decommit.cpp |65.8%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/mind/hive/tx__process_boot_queue.cpp |65.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/mind/hive/tx__process_boot_queue.cpp |65.8%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/mind/hive/storage_balancer.cpp |65.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/mind/hive/storage_balancer.cpp |65.8%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/mind/hive/tx__load_everything.cpp |65.8%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/schemeshard/olap/operations/drop_table.cpp |65.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/olap/operations/drop_table.cpp |65.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/mind/hive/tx__load_everything.cpp |65.8%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/schemeshard/olap/layout/layout.cpp |65.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/olap/layout/layout.cpp |65.8%| [AR] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/tx/schemeshard/olap/layout/libschemeshard-olap-layout.a |65.8%| [AR] {RESULT} $(B)/ydb/core/tx/schemeshard/olap/layout/libschemeshard-olap-layout.a |65.8%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/tx/schemeshard/olap/layout/libschemeshard-olap-layout.a |65.8%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/blobstorage/dsproxy/dsproxy_request.cpp |65.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/dsproxy/dsproxy_request.cpp |65.8%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/kqp/opt/physical/kqp_opt_phy.cpp |65.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/opt/physical/kqp_opt_phy.cpp |65.8%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/mind/hive/storage_pool_info.cpp |65.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/mind/hive/storage_pool_info.cpp |65.8%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/mind/hive/tx__response_tablet_seq.cpp |65.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/mind/hive/tx__response_tablet_seq.cpp |65.8%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/kqp/opt/peephole/kqp_opt_peephole_wide_read.cpp |65.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/opt/peephole/kqp_opt_peephole_wide_read.cpp |65.8%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/mind/hive/tx__set_down.cpp |65.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/mind/hive/tx__set_down.cpp |65.8%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/kqp/runtime/kqp_output_stream.cpp |65.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/runtime/kqp_output_stream.cpp |65.8%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/mind/hive/tx__generate_data_ut.cpp |65.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/mind/hive/tx__generate_data_ut.cpp |65.8%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/mind/hive/tx__cut_tablet_history.cpp |65.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/mind/hive/tx__cut_tablet_history.cpp |65.8%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/mind/hive/tx__sync_tablets.cpp |65.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/mind/hive/tx__sync_tablets.cpp |65.8%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/tiering/manager.cpp |65.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/tiering/manager.cpp |65.8%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/mind/hive/tx__init_scheme.cpp |65.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/mind/hive/tx__init_scheme.cpp |65.8%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/datashard/datashard__plan_step.cpp |65.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/datashard/datashard__plan_step.cpp |65.8%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/mind/hive/tx__request_tablet_owners.cpp |65.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/mind/hive/tx__request_tablet_owners.cpp |65.9%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/schemeshard/olap/operations/drop_store.cpp |65.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/olap/operations/drop_store.cpp |65.9%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/util/actorsys_test/testactorsys.cpp |65.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/util/actorsys_test/testactorsys.cpp |65.9%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/time_cast/time_cast.cpp |65.9%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/tx_allocator/txallocator__reserve.cpp |65.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/tx_allocator/txallocator__reserve.cpp |65.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/time_cast/time_cast.cpp |65.9%| [AR] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/tx/time_cast/libcore-tx-time_cast.a |65.9%| [AR] {RESULT} $(B)/ydb/core/tx/time_cast/libcore-tx-time_cast.a |65.9%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/tx/time_cast/libcore-tx-time_cast.a |65.9%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/mind/hive/tx__delete_node.cpp |65.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/mind/hive/tx__delete_node.cpp |65.9%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/mind/hive/tx__update_dc_followers.cpp |65.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/mind/hive/tx__update_dc_followers.cpp |65.9%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/mind/hive/tx__update_tablets_object.cpp |65.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/mind/hive/tx__update_tablets_object.cpp |65.9%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/datashard/datashard__store_table_path.cpp |65.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/datashard/datashard__store_table_path.cpp |65.9%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/kqp/provider/yql_kikimr_results.cpp |65.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/provider/yql_kikimr_results.cpp |65.9%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/datashard/datashard__init.cpp |65.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/datashard/datashard__init.cpp |65.9%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/mind/hive/tx__update_pile.cpp |65.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/mind/hive/tx__update_pile.cpp |65.9%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/mind/hive/storage_group_info.cpp |65.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/mind/hive/storage_group_info.cpp |65.9%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/mind/hive/tx__seize_tablets_reply.cpp |65.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/mind/hive/tx__seize_tablets_reply.cpp |65.9%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/schemeshard/olap/operations/alter/in_store/resharding/update.cpp |65.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/olap/operations/alter/in_store/resharding/update.cpp |65.9%| [AR] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/tx/schemeshard/olap/operations/alter/in_store/resharding/libalter-in_store-resharding.a |65.9%| [AR] {RESULT} $(B)/ydb/core/tx/schemeshard/olap/operations/alter/in_store/resharding/libalter-in_store-resharding.a |65.9%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/tx/schemeshard/olap/operations/alter/in_store/resharding/libalter-in_store-resharding.a |65.9%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/services/metadata/request/request_actor.cpp |65.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/services/metadata/request/request_actor.cpp |65.9%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/schemeshard/olap/columns/update.cpp |65.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/olap/columns/update.cpp |65.9%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/datashard/datashard__compact_borrowed.cpp |66.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/datashard/datashard__compact_borrowed.cpp |65.9%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_alter_solomon.cpp |66.0%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/datashard/datashard.cpp |66.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_alter_solomon.cpp |66.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/datashard/datashard.cpp |66.0%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/mind/hive/hive_domains.cpp |66.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/mind/hive/hive_domains.cpp |66.0%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/mind/hive/node_info.cpp |66.0%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/mind/hive/tx__stop_tablet.cpp |66.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/mind/hive/node_info.cpp |66.0%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/schemeshard/olap/operations/alter/abstract/converter.cpp |66.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/olap/operations/alter/abstract/converter.cpp |66.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/mind/hive/tx__stop_tablet.cpp |66.0%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/schemeshard/olap/manager/manager.cpp |66.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/olap/manager/manager.cpp |66.0%| [AR] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/tx/schemeshard/olap/manager/libschemeshard-olap-manager.a |66.0%| [AR] {RESULT} $(B)/ydb/core/tx/schemeshard/olap/manager/libschemeshard-olap-manager.a |66.0%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/tx/schemeshard/olap/manager/libschemeshard-olap-manager.a |66.0%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/mind/hive/hive.cpp |66.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/mind/hive/hive.cpp |66.0%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/schemeshard/olap/operations/alter/abstract/object.cpp |66.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/olap/operations/alter/abstract/object.cpp |66.0%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/mind/hive/tx__restart_tablet.cpp |66.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/mind/hive/tx__restart_tablet.cpp |66.0%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/util/actorsys_test/single_thread_ic_mock.cpp |66.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/util/actorsys_test/single_thread_ic_mock.cpp |66.0%| [AR] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/util/actorsys_test/libcore-util-actorsys_test.a |66.0%| [AR] {RESULT} $(B)/ydb/core/util/actorsys_test/libcore-util-actorsys_test.a |66.0%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/util/actorsys_test/libcore-util-actorsys_test.a |66.0%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/sharding/hash_slider.cpp |66.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/sharding/hash_slider.cpp |66.0%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/mind/hive/tx__resume_tablet.cpp |66.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/mind/hive/tx__resume_tablet.cpp |66.0%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/datashard/datashard_direct_transaction.cpp |66.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/datashard/datashard_direct_transaction.cpp |66.0%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/persqueue/pqrb/partition_scale_manager.cpp |66.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/persqueue/pqrb/partition_scale_manager.cpp |66.0%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/mind/hive/tx__release_tablets.cpp |66.0%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/tx_allocator/txallocator.cpp |66.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/mind/hive/tx__release_tablets.cpp |66.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/tx_allocator/txallocator.cpp |66.0%| [AR] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/tx/tx_allocator/libcore-tx-tx_allocator.a |66.0%| [AR] {RESULT} $(B)/ydb/core/tx/tx_allocator/libcore-tx-tx_allocator.a |66.0%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/schemeshard/olap/bg_tasks/tx_chain/session.cpp |66.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/olap/bg_tasks/tx_chain/session.cpp |66.1%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/tx/tx_allocator/libcore-tx-tx_allocator.a |66.1%| [AR] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/tx/schemeshard/olap/bg_tasks/tx_chain/libolap-bg_tasks-tx_chain.a |66.1%| [AR] {RESULT} $(B)/ydb/core/tx/schemeshard/olap/bg_tasks/tx_chain/libolap-bg_tasks-tx_chain.a |66.1%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/tx/schemeshard/olap/bg_tasks/tx_chain/libolap-bg_tasks-tx_chain.a |66.1%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/mind/hive/tx__tablet_owners_reply.cpp |66.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/mind/hive/tx__tablet_owners_reply.cpp |66.1%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/mind/hive/tx__create_tablet.cpp |66.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/mind/hive/tx__create_tablet.cpp |66.1%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/mind/hive/tx__seize_tablets.cpp |66.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/mind/hive/tx__seize_tablets.cpp |66.1%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/schemeshard/olap/operations/alter_store.cpp |66.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/olap/operations/alter_store.cpp |66.1%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/mind/hive/tx__request_tablet_seq.cpp |66.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/mind/hive/tx__request_tablet_seq.cpp |66.1%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/mind/hive/tx__update_tablet_groups.cpp |66.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/mind/hive/tx__update_tablet_groups.cpp |66.1%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/tx_allocator_client/actor_client.cpp |66.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/tx_allocator_client/actor_client.cpp |66.1%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/mind/hive/fill.cpp |66.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/mind/hive/fill.cpp |66.1%| [AR] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/tx/tx_allocator_client/libcore-tx-tx_allocator_client.a |66.1%| [AR] {RESULT} $(B)/ydb/core/tx/tx_allocator_client/libcore-tx-tx_allocator_client.a |66.1%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/tx/tx_allocator_client/libcore-tx-tx_allocator_client.a |66.1%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/mind/hive/hive_statics.cpp |66.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/mind/hive/hive_statics.cpp |66.1%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/datashard/datashard__monitoring.cpp |66.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/datashard/datashard__monitoring.cpp |66.1%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/mind/hive/tx__update_tablet_status.cpp |66.1%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/kqp/proxy_service/kqp_proxy_databases_cache.cpp |66.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/mind/hive/tx__update_tablet_status.cpp |66.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/proxy_service/kqp_proxy_databases_cache.cpp |66.1%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/kqp/gateway/behaviour/tablestore/behaviour.cpp |66.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/gateway/behaviour/tablestore/behaviour.cpp |66.1%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/kqp/opt/physical/effects/kqp_opt_phy_insert.cpp |66.1%| [AR] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/kqp/gateway/behaviour/tablestore/libgateway-behaviour-tablestore.global.a |66.1%| [AR] {RESULT} $(B)/ydb/core/kqp/gateway/behaviour/tablestore/libgateway-behaviour-tablestore.global.a |66.1%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/kqp/gateway/behaviour/tablestore/libgateway-behaviour-tablestore.global.a |66.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/opt/physical/effects/kqp_opt_phy_insert.cpp |66.1%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/kqp/opt/logical/kqp_opt_log_sqlin.cpp |66.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/opt/logical/kqp_opt_log_sqlin.cpp |66.1%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/replication/controller/stream_creator.cpp |66.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/replication/controller/stream_creator.cpp |66.2%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/blobstorage/pdisk/blobstorage_pdisk_completion_impl.cpp |66.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/pdisk/blobstorage_pdisk_completion_impl.cpp |66.2%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp |66.2%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/sharding/random.cpp |66.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp |66.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/sharding/random.cpp |66.2%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/datashard/datashard__kqp_scan.cpp |66.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/datashard/datashard__kqp_scan.cpp |66.2%| [AR] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/tx/sharding/libcore-tx-sharding.a |66.2%| [AR] {RESULT} $(B)/ydb/core/tx/sharding/libcore-tx-sharding.a |66.2%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/tx/sharding/libcore-tx-sharding.a |66.2%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/tiering/fetcher.cpp |66.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/tiering/fetcher.cpp |66.2%| [AR] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/tx/tiering/libcore-tx-tiering.a |66.2%| [AR] {RESULT} $(B)/ydb/core/tx/tiering/libcore-tx-tiering.a |66.2%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/mind/hive/tx__status.cpp |66.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/mind/hive/tx__status.cpp |66.2%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/tx/tiering/libcore-tx-tiering.a |66.2%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/datashard/datashard_change_sender_activation.cpp |66.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/datashard/datashard_change_sender_activation.cpp |66.2%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/datashard/datashard__progress_resend_rs.cpp |66.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/datashard/datashard__progress_resend_rs.cpp |66.2%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/mind/hive/hive_log.cpp |66.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/mind/hive/hive_log.cpp |66.2%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/columnshard/engines/scheme/schema_diff.cpp |66.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/engines/scheme/schema_diff.cpp |66.2%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/graph/shard/tx_init_schema.cpp |66.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/graph/shard/tx_init_schema.cpp |66.2%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/schemeshard/olap/operations/alter/in_store/transfer/update.cpp |66.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/olap/operations/alter/in_store/transfer/update.cpp |66.2%| [AR] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/tx/schemeshard/olap/operations/alter/in_store/transfer/libalter-in_store-transfer.a |66.2%| [AR] {RESULT} $(B)/ydb/core/tx/schemeshard/olap/operations/alter/in_store/transfer/libalter-in_store-transfer.a |66.2%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/tx/schemeshard/olap/operations/alter/in_store/transfer/libalter-in_store-transfer.a |66.2%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/mind/hive/drain.cpp |66.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/mind/hive/drain.cpp |66.2%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/mind/hive/tx__unlock_tablet.cpp |66.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/mind/hive/tx__unlock_tablet.cpp |66.2%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/graph/shard/tx_change_backend.cpp |66.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/graph/shard/tx_change_backend.cpp |66.2%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/mind/hive/domain_info.cpp |66.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/mind/hive/domain_info.cpp |66.3%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/replication/controller/tx_create_stream_result.cpp |66.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/replication/controller/tx_create_stream_result.cpp |66.3%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/mind/hive/tx__delete_tablet.cpp |66.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/mind/hive/tx__delete_tablet.cpp |66.3%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/ymq/actor/garbage_collector.cpp |66.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/ymq/actor/garbage_collector.cpp |66.3%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/mind/hive/monitoring.cpp |66.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/mind/hive/monitoring.cpp |66.3%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/columnshard/engines/scheme/versions/versioned_index.cpp |66.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/engines/scheme/versions/versioned_index.cpp |66.3%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/columnshard/engines/scheme/versions/preset_schemas.cpp |66.3%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/columnshard/engines/scheme/versions/snapshot_scheme.cpp |66.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/engines/scheme/versions/preset_schemas.cpp |66.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/engines/scheme/versions/snapshot_scheme.cpp |66.3%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/datashard/datashard__propose_tx_base.cpp |66.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/datashard/datashard__propose_tx_base.cpp |66.3%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/mind/hive/leader_tablet_info.cpp |66.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/mind/hive/leader_tablet_info.cpp |66.3%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/blobstorage/vdisk/defrag/defrag_rewriter.cpp |66.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/vdisk/defrag/defrag_rewriter.cpp |66.3%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/mind/hive/boot_queue.cpp |66.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/mind/hive/boot_queue.cpp |66.3%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/schemeshard/olap/columns/schema.cpp |66.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/olap/columns/schema.cpp |66.3%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/replication/service/worker.cpp |66.3%| [AR] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/tx/schemeshard/olap/columns/libschemeshard-olap-columns.a |66.3%| [AR] {RESULT} $(B)/ydb/core/tx/schemeshard/olap/columns/libschemeshard-olap-columns.a |66.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/replication/service/worker.cpp |66.3%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/tx/schemeshard/olap/columns/libschemeshard-olap-columns.a |66.3%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/mind/hive/tx__release_tablets_reply.cpp |66.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/mind/hive/tx__release_tablets_reply.cpp |66.3%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/graph/shard/tx_store_metrics.cpp |66.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/graph/shard/tx_store_metrics.cpp |66.3%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/kqp/provider/yql_kikimr_opt.cpp |66.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/provider/yql_kikimr_opt.cpp |66.3%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/mind/hive/tablet_info.cpp |66.3%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/datashard/datashard__cancel_tx_proposal.cpp |66.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/mind/hive/tablet_info.cpp |66.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/datashard/datashard__cancel_tx_proposal.cpp |66.3%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/mind/hive/tx__update_domain.cpp |66.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/mind/hive/tx__update_domain.cpp |66.3%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/columnshard/engines/reader/simple_reader/iterator/sys_view/portions/schema.cpp |66.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/engines/reader/simple_reader/iterator/sys_view/portions/schema.cpp |66.4%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/mind/hive/follower_tablet_info.cpp |66.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/mind/hive/follower_tablet_info.cpp |66.4%| [AR] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/tx/columnshard/engines/reader/simple_reader/iterator/sys_view/portions/libiterator-sys_view-portions.global.a |66.4%| [AR] {RESULT} $(B)/ydb/core/tx/columnshard/engines/reader/simple_reader/iterator/sys_view/portions/libiterator-sys_view-portions.global.a |66.4%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/tx/columnshard/engines/reader/simple_reader/iterator/sys_view/portions/libiterator-sys_view-portions.global.a |66.4%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/schemeshard/olap/operations/alter/abstract/context.cpp |66.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/olap/operations/alter/abstract/context.cpp |66.4%| [AR] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/tx/schemeshard/olap/operations/alter/abstract/liboperations-alter-abstract.a |66.4%| [AR] {RESULT} $(B)/ydb/core/tx/schemeshard/olap/operations/alter/abstract/liboperations-alter-abstract.a |66.4%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/tx/schemeshard/olap/operations/alter/abstract/liboperations-alter-abstract.a |66.4%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/datashard/datashard__mon_reset_schema_version.cpp |66.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/datashard/datashard__mon_reset_schema_version.cpp |66.4%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/columnshard/engines/storage/granule/storage.cpp |66.4%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/graph/shard/tx_startup.cpp |66.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/engines/storage/granule/storage.cpp |66.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/graph/shard/tx_startup.cpp |66.4%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/datashard/datashard__cleanup_borrowed.cpp |66.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/datashard/datashard__cleanup_borrowed.cpp |66.4%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/services/metadata/request/request_actor_cb.cpp |66.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/services/metadata/request/request_actor_cb.cpp |66.4%| [AR] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/services/metadata/request/libservices-metadata-request.a |66.4%| [AR] {RESULT} $(B)/ydb/services/metadata/request/libservices-metadata-request.a |66.4%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/schemeshard/schemeshard__init_schema.cpp |66.4%| [AR] {BAZEL_UPLOAD} $(B)/ydb/services/metadata/request/libservices-metadata-request.a |66.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__init_schema.cpp |66.4%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/services/lib/actors/pq_schema_actor.cpp |66.4%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/kesus/tablet/tx_self_check.cpp |66.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/services/lib/actors/pq_schema_actor.cpp |66.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kesus/tablet/tx_self_check.cpp |66.4%| [AR] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/services/lib/actors/libservices-lib-actors.a |66.4%| [AR] {RESULT} $(B)/ydb/services/lib/actors/libservices-lib-actors.a |66.4%| [AR] {BAZEL_UPLOAD} $(B)/ydb/services/lib/actors/libservices-lib-actors.a |66.4%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/ymq/actor/executor.cpp |66.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/ymq/actor/executor.cpp |66.4%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/graph/shard/tx_monitoring.cpp |66.4%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/mind/hive/hive_impl.cpp |66.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/graph/shard/tx_monitoring.cpp |66.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/mind/hive/hive_impl.cpp |66.4%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/kqp/host/kqp_host.cpp |66.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/host/kqp_host.cpp |66.4%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/replication/ydb_proxy/local_proxy/local_partition_reader.cpp |66.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/replication/ydb_proxy/local_proxy/local_partition_reader.cpp |66.5%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/persqueue/pqrb/mirror_describer.cpp |66.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/persqueue/pqrb/mirror_describer.cpp |66.5%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/datashard/datashard__object_storage_listing.cpp |66.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/datashard/datashard__object_storage_listing.cpp |66.5%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/datashard/datashard__cleanup_uncommitted.cpp |66.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/datashard/datashard__cleanup_uncommitted.cpp |66.5%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/schemeshard/olap/store/store.cpp |66.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/olap/store/store.cpp |66.5%| [AR] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/tx/schemeshard/olap/store/libschemeshard-olap-store.a |66.5%| [AR] {RESULT} $(B)/ydb/core/tx/schemeshard/olap/store/libschemeshard-olap-store.a |66.5%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/tx/schemeshard/olap/store/libschemeshard-olap-store.a |66.5%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/driver_lib/cli_utils/cli_persqueue_stress.cpp |66.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/driver_lib/cli_utils/cli_persqueue_stress.cpp |66.5%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/mind/hive/tx__update_tablet_metrics.cpp |66.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/mind/hive/tx__update_tablet_metrics.cpp |66.5%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/client/minikql_compile/yql_expr_minikql.cpp |66.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/client/minikql_compile/yql_expr_minikql.cpp |66.5%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/columnshard/data_sharing/source/session/source.cpp |66.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/data_sharing/source/session/source.cpp |66.5%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/schemeshard/schemeshard__operation.cpp |66.5%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/schemeshard/olap/column_families/update.cpp |66.5%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/kesus/tablet/tx_semaphore_timeout.cpp |66.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation.cpp |66.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/olap/column_families/update.cpp |66.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kesus/tablet/tx_semaphore_timeout.cpp |66.5%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/datashard/datashard__compaction.cpp |66.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/datashard/datashard__compaction.cpp |66.5%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/schemeshard/olap/common/common.cpp |66.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/olap/common/common.cpp |66.5%| [AR] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/tx/schemeshard/olap/common/libschemeshard-olap-common.a |66.5%| [AR] {RESULT} $(B)/ydb/core/tx/schemeshard/olap/common/libschemeshard-olap-common.a |66.5%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/tx/schemeshard/olap/common/libschemeshard-olap-common.a |66.5%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/datashard/create_persistent_snapshot_unit.cpp |66.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/datashard/create_persistent_snapshot_unit.cpp |66.5%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/mind/hive/balancer.cpp |66.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/mind/hive/balancer.cpp |66.5%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/kqp/session_actor/kqp_session_actor.cpp |66.5%| [AR] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/mind/hive/libcore-mind-hive.a |66.5%| [AR] {RESULT} $(B)/ydb/core/mind/hive/libcore-mind-hive.a |66.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/session_actor/kqp_session_actor.cpp |66.5%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/schemeshard/olap/bg_tasks/transactions/tasks_list.cpp |66.6%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/graph/shard/shard_impl.cpp |66.6%| [AR] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/tx/schemeshard/olap/bg_tasks/transactions/libolap-bg_tasks-transactions.a |66.6%| [AR] {RESULT} $(B)/ydb/core/tx/schemeshard/olap/bg_tasks/transactions/libolap-bg_tasks-transactions.a |66.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/graph/shard/shard_impl.cpp |66.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/olap/bg_tasks/transactions/tasks_list.cpp |66.6%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/tx/schemeshard/olap/bg_tasks/transactions/libolap-bg_tasks-transactions.a |66.6%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/mind/hive/libcore-mind-hive.a >> TErasureTypeTest::TestAllSpecies2of2 [GOOD] |66.6%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/columnshard/engines/scheme/versions/filtered_scheme.cpp |66.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/engines/scheme/versions/filtered_scheme.cpp |66.6%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/erasure/ut/unittest >> TErasureTypeTest::TestAllSpecies2of2 [GOOD] |66.6%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/driver_lib/cli_utils/cli_scheme_initroot.cpp |66.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/driver_lib/cli_utils/cli_scheme_initroot.cpp |66.6%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/kesus/tablet/tx_session_timeout.cpp |66.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kesus/tablet/tx_session_timeout.cpp |66.6%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/scheme_cache/scheme_cache.cpp |66.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/scheme_cache/scheme_cache.cpp |66.6%| [AR] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/tx/scheme_cache/libcore-tx-scheme_cache.a |66.6%| [AR] {RESULT} $(B)/ydb/core/tx/scheme_cache/libcore-tx-scheme_cache.a |66.6%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/tx/scheme_cache/libcore-tx-scheme_cache.a |66.6%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/driver_lib/cli_utils/cli.cpp |66.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/driver_lib/cli_utils/cli.cpp |66.6%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/client/minikql_compile/mkql_compile_service.cpp |66.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/client/minikql_compile/mkql_compile_service.cpp |66.6%| [AR] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/client/minikql_compile/libcore-client-minikql_compile.a |66.6%| [AR] {RESULT} $(B)/ydb/core/client/minikql_compile/libcore-client-minikql_compile.a |66.6%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_alter_external_data_source.cpp |66.6%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/client/minikql_compile/libcore-client-minikql_compile.a |66.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_alter_external_data_source.cpp |66.6%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/scheme_board/cache.cpp |66.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/scheme_board/cache.cpp |66.6%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/datashard/conflicts_cache.cpp |66.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/datashard/conflicts_cache.cpp |66.6%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/blobstorage/dsproxy/dsproxy_nodemonactor.cpp |66.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/dsproxy/dsproxy_nodemonactor.cpp |66.6%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/schemeshard/schemeshard__login.cpp |66.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__login.cpp |66.6%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/persqueue/pqtablet/pq_impl_app_sendreadset.cpp |66.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/persqueue/pqtablet/pq_impl_app_sendreadset.cpp |66.6%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/schemeshard/schemeshard__describe_scheme.cpp |66.6%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/blobstorage/nodewarden/node_warden_scrub.cpp |66.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__describe_scheme.cpp |66.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/nodewarden/node_warden_scrub.cpp |66.6%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/columnshard/data_sharing/source/session/cursor.cpp |66.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/data_sharing/source/session/cursor.cpp |66.7%| [AR] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/tx/columnshard/data_sharing/source/session/libdata_sharing-source-session.a |66.7%| [AR] {RESULT} $(B)/ydb/core/tx/columnshard/data_sharing/source/session/libdata_sharing-source-session.a |66.7%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/tx/columnshard/data_sharing/source/session/libdata_sharing-source-session.a |66.7%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/datashard/datashard__cleanup_tx.cpp |66.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/datashard/datashard__cleanup_tx.cpp |66.7%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/driver_lib/cli_utils/cli_persqueue_cluster_discovery.cpp |66.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/driver_lib/cli_utils/cli_persqueue_cluster_discovery.cpp |66.7%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/scheme_board/load_test.cpp |66.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/scheme_board/load_test.cpp |66.7%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/driver_lib/cli_utils/cli_cmds_config.cpp |66.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/driver_lib/cli_utils/cli_cmds_config.cpp |66.7%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/graph/shard/tx_aggregate_data.cpp |66.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/graph/shard/tx_aggregate_data.cpp |66.7%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/kqp/opt/logical/kqp_opt_log_extract.cpp |66.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/opt/logical/kqp_opt_log_extract.cpp |66.7%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/datashard/datashard__conditional_erase_rows.cpp |66.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/datashard/datashard__conditional_erase_rows.cpp |66.7%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/kesus/proxy/proxy.cpp |66.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kesus/proxy/proxy.cpp |66.7%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/driver_lib/cli_utils/cli_cmds_genconfig.cpp |66.7%| [AR] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/kesus/proxy/libcore-kesus-proxy.a |66.7%| [AR] {RESULT} $(B)/ydb/core/kesus/proxy/libcore-kesus-proxy.a |66.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/driver_lib/cli_utils/cli_cmds_genconfig.cpp |66.7%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/kesus/proxy/libcore-kesus-proxy.a |66.7%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/driver_lib/cli_utils/cli_cmds_cms.cpp |66.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/driver_lib/cli_utils/cli_cmds_cms.cpp |66.7%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/kqp/session_actor/kqp_worker_common.cpp |66.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/session_actor/kqp_worker_common.cpp |66.7%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/driver_lib/cli_utils/cli_persqueue.cpp |66.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/driver_lib/cli_utils/cli_persqueue.cpp |66.7%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/driver_lib/cli_utils/cli_cmds_server.cpp |66.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/driver_lib/cli_utils/cli_cmds_server.cpp |66.7%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/columnshard/engines/scheme/versions/abstract_scheme.cpp |66.7%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_alter_pq.cpp |66.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_alter_pq.cpp |66.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/engines/scheme/versions/abstract_scheme.cpp |66.7%| [AR] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/tx/columnshard/engines/scheme/versions/libengines-scheme-versions.a |66.7%| [AR] {RESULT} $(B)/ydb/core/tx/columnshard/engines/scheme/versions/libengines-scheme-versions.a |66.7%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/tx/columnshard/engines/scheme/versions/libengines-scheme-versions.a |66.7%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/datashard/create_incremental_restore_src_unit.cpp |66.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/datashard/create_incremental_restore_src_unit.cpp |66.7%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/graph/shard/tx_get_metrics.cpp |66.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/graph/shard/tx_get_metrics.cpp |66.8%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/datashard/create_volatile_snapshot_unit.cpp |66.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/datashard/create_volatile_snapshot_unit.cpp |66.8%| [AR] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/graph/shard/libcore-graph-shard.a |66.8%| [AR] {RESULT} $(B)/ydb/core/graph/shard/libcore-graph-shard.a |66.8%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/graph/shard/libcore-graph-shard.a |66.8%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/blobstorage/vdisk/defrag/defrag_quantum.cpp |66.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/vdisk/defrag/defrag_quantum.cpp |66.8%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/driver_lib/cli_utils/cli_cmds_root.cpp |66.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/driver_lib/cli_utils/cli_cmds_root.cpp |66.8%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/schemeshard/olap/column_families/schema.cpp |66.8%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/driver_lib/cli_utils/cli_cmds_disk.cpp |66.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/driver_lib/cli_utils/cli_cmds_disk.cpp |66.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/olap/column_families/schema.cpp |66.8%| [AR] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/tx/schemeshard/olap/column_families/libschemeshard-olap-column_families.a |66.8%| [AR] {RESULT} $(B)/ydb/core/tx/schemeshard/olap/column_families/libschemeshard-olap-column_families.a |66.8%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/tx/schemeshard/olap/column_families/libschemeshard-olap-column_families.a |66.8%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/schemeshard/schemeshard__login_finalize.cpp |66.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__login_finalize.cpp |66.8%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/kqp/session_actor/kqp_worker_actor.cpp |66.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/session_actor/kqp_worker_actor.cpp |66.8%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/schemeshard/schemeshard.cpp |66.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard.cpp |66.8%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/kesus/tablet/tx_init.cpp |66.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kesus/tablet/tx_init.cpp |66.8%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_alter_continuous_backup.cpp |66.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_alter_continuous_backup.cpp |66.8%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/blob_depot/space_monitor.cpp |66.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blob_depot/space_monitor.cpp |66.8%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/columnshard/engines/reader/simple_reader/iterator/sys_view/optimizer/metadata.cpp |66.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/engines/reader/simple_reader/iterator/sys_view/optimizer/metadata.cpp |66.8%| [AR] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/tx/columnshard/engines/reader/simple_reader/iterator/sys_view/optimizer/libiterator-sys_view-optimizer.a |66.8%| [AR] {RESULT} $(B)/ydb/core/tx/columnshard/engines/reader/simple_reader/iterator/sys_view/optimizer/libiterator-sys_view-optimizer.a |66.8%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/tx/columnshard/engines/reader/simple_reader/iterator/sys_view/optimizer/libiterator-sys_view-optimizer.a |66.8%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/kesus/tablet/rate_accounting.cpp |66.8%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/blobstorage/vdisk/defrag/defrag_actor.cpp |66.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kesus/tablet/rate_accounting.cpp |66.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/vdisk/defrag/defrag_actor.cpp |66.8%| [AR] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/blobstorage/vdisk/defrag/libblobstorage-vdisk-defrag.a |66.8%| [AR] {RESULT} $(B)/ydb/core/blobstorage/vdisk/defrag/libblobstorage-vdisk-defrag.a |66.9%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/blobstorage/vdisk/defrag/libblobstorage-vdisk-defrag.a |66.8%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/blobstorage/dsproxy/dsproxy_discover_m3of4.cpp |66.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/dsproxy/dsproxy_discover_m3of4.cpp |66.9%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/persqueue/pqrb/read_balancer.cpp |66.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/persqueue/pqrb/read_balancer.cpp |66.9%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/kqp/session_actor/kqp_query_stats.cpp |66.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/session_actor/kqp_query_stats.cpp |66.9%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_alter_bsv.cpp |66.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_alter_bsv.cpp |66.9%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/kqp/session_actor/kqp_response.cpp |66.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/session_actor/kqp_response.cpp |66.9%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/blobstorage/pdisk/blobstorage_pdisk_driveestimator.cpp |66.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/pdisk/blobstorage_pdisk_driveestimator.cpp |66.9%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/driver_lib/cli_utils/cli_cmds_tablet.cpp |66.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/driver_lib/cli_utils/cli_cmds_tablet.cpp |66.9%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/datashard/build_index/reshuffle_kmeans.cpp |66.9%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/driver_lib/cli_utils/cli_cmds_validate_config.cpp |66.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/datashard/build_index/reshuffle_kmeans.cpp |66.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/driver_lib/cli_utils/cli_cmds_validate_config.cpp |66.9%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/schemeshard/schemeshard__fix_bad_paths.cpp |66.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__fix_bad_paths.cpp |66.9%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/scheme_board/monitoring.cpp |66.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/scheme_board/monitoring.cpp |66.9%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/ymq/actor/list_queue_tags.cpp |66.9%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/kesus/tablet/tx_config_set.cpp |66.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kesus/tablet/tx_config_set.cpp |66.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/ymq/actor/list_queue_tags.cpp |66.9%| [AR] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/kesus/tablet/libcore-kesus-tablet.a |66.9%| [AR] {RESULT} $(B)/ydb/core/kesus/tablet/libcore-kesus-tablet.a |66.9%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/blob_depot/op_apply_config.cpp |66.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blob_depot/op_apply_config.cpp |66.9%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/kesus/tablet/libcore-kesus-tablet.a |66.9%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/driver_lib/cli_utils/cli_actorsystem_perftest.cpp |66.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/driver_lib/cli_utils/cli_actorsystem_perftest.cpp |66.9%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/datashard/build_index/prefix_kmeans.cpp |66.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/datashard/build_index/prefix_kmeans.cpp |66.9%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/schemeshard/schemeshard__find_subdomain_path_id.cpp |66.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__find_subdomain_path_id.cpp |66.9%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/driver_lib/cli_utils/cli_fakeinitshard.cpp |66.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/driver_lib/cli_utils/cli_fakeinitshard.cpp |66.9%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/blob_depot/testing.cpp |66.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blob_depot/testing.cpp |66.9%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/schemeshard/schemeshard__monitoring.cpp |67.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__monitoring.cpp |66.9%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/datashard/datashard_common_upload.cpp |67.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/datashard/datashard_common_upload.cpp |67.0%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/columnshard/engines/reader/simple_reader/constructor/read_metadata.cpp |67.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/engines/reader/simple_reader/constructor/read_metadata.cpp |67.0%| [AR] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/tx/columnshard/engines/reader/simple_reader/constructor/libreader-simple_reader-constructor.a |67.0%| [AR] {RESULT} $(B)/ydb/core/tx/columnshard/engines/reader/simple_reader/constructor/libreader-simple_reader-constructor.a |67.0%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/tx/columnshard/engines/reader/simple_reader/constructor/libreader-simple_reader-constructor.a |67.0%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/datashard/datashard__s3_download_txs.cpp |67.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/datashard/datashard__s3_download_txs.cpp |67.0%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/persqueue/pqtablet/common/event_helpers.cpp |67.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/persqueue/pqtablet/common/event_helpers.cpp |67.0%| [AR] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/persqueue/pqtablet/common/libpersqueue-pqtablet-common.a |67.0%| [AR] {RESULT} $(B)/ydb/core/persqueue/pqtablet/common/libpersqueue-pqtablet-common.a |67.0%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/persqueue/pqtablet/common/libpersqueue-pqtablet-common.a |67.0%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/driver_lib/cli_utils/cli_cmds_tenant.cpp |67.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/driver_lib/cli_utils/cli_cmds_tenant.cpp |67.0%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/driver_lib/cli_utils/cli_cmds_node.cpp |67.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/driver_lib/cli_utils/cli_cmds_node.cpp |67.0%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/kqp/proxy_service/kqp_proxy_service.cpp |67.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/proxy_service/kqp_proxy_service.cpp |67.0%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/blob_depot/data_load.cpp |67.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blob_depot/data_load.cpp |67.0%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/replication/controller/tx_alter_replication.cpp |67.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/replication/controller/tx_alter_replication.cpp |67.0%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/driver_lib/cli_utils/cli_cmds_console.cpp |67.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/driver_lib/cli_utils/cli_cmds_console.cpp |67.0%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/schemeshard/schemeshard__make_access_database_no_inheritable.cpp |67.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__make_access_database_no_inheritable.cpp |67.0%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/blob_depot/s3_upload.cpp |67.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blob_depot/s3_upload.cpp |67.0%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/kqp/gateway/utils/scheme_helpers.cpp |67.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/gateway/utils/scheme_helpers.cpp |67.0%| [AR] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/kqp/gateway/utils/libkqp-gateway-utils.a |67.0%| [AR] {RESULT} $(B)/ydb/core/kqp/gateway/utils/libkqp-gateway-utils.a |67.0%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/kqp/gateway/utils/libkqp-gateway-utils.a |67.0%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/blob_depot/blocks.cpp |67.0%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/blob_depot/coro_tx.cpp |67.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blob_depot/coro_tx.cpp |67.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blob_depot/blocks.cpp |67.0%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/driver_lib/cli_utils/cli_cmds_admin.cpp |67.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/driver_lib/cli_utils/cli_cmds_admin.cpp |67.0%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/schemeshard/schemeshard__list_users.cpp |67.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__list_users.cpp |67.0%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/replication/controller/dst_remover.cpp |67.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/replication/controller/dst_remover.cpp |67.1%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/datashard/build_index/kmeans_helper.cpp |67.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/datashard/build_index/kmeans_helper.cpp |67.1%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/blobstorage/dsproxy/dsproxy_patch.cpp |67.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/dsproxy/dsproxy_patch.cpp |67.1%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/blob_depot/s3.cpp |67.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blob_depot/s3.cpp |67.1%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/blob_depot/data_trash.cpp |67.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blob_depot/data_trash.cpp |67.1%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/grpc_services/rpc_common/rpc_common_kqp_session.cpp |67.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/grpc_services/rpc_common/rpc_common_kqp_session.cpp |67.1%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/driver_lib/cli_utils/cli_cmds_debug.cpp |67.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/driver_lib/cli_utils/cli_cmds_debug.cpp |67.1%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/kqp/session_actor/kqp_temp_tables_manager.cpp |67.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/session_actor/kqp_temp_tables_manager.cpp |67.1%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/schemeshard/schemeshard__backup_collection_common.cpp |67.1%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/datashard/create_table_unit.cpp |67.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__backup_collection_common.cpp |67.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/datashard/create_table_unit.cpp |67.1%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/driver_lib/cli_utils/cli_scheme_cache_append.cpp |67.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/driver_lib/cli_utils/cli_scheme_cache_append.cpp |67.1%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/blob_depot/op_load.cpp |67.1%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/scheme_board/populator.cpp |67.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blob_depot/op_load.cpp |67.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/scheme_board/populator.cpp |67.1%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/driver_lib/cli_utils/cli_cmds_bs.cpp |67.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/driver_lib/cli_utils/cli_cmds_bs.cpp |67.1%| [AR] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/driver_lib/cli_utils/libcli_utils.a |67.1%| [AR] {RESULT} $(B)/ydb/core/driver_lib/cli_utils/libcli_utils.a |67.1%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/driver_lib/cli_utils/libcli_utils.a |67.1%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/datashard/backup_unit.cpp |67.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/datashard/backup_unit.cpp |67.1%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/blob_depot/op_commit_blob_seq.cpp |67.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blob_depot/op_commit_blob_seq.cpp |67.1%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/blob_depot/data_decommit.cpp |67.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blob_depot/data_decommit.cpp |67.1%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/kqp/compute_actor/kqp_scan_fetcher_actor.cpp |67.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/compute_actor/kqp_scan_fetcher_actor.cpp |67.1%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/datashard/build_scheme_tx_out_rs_unit.cpp |67.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/datashard/build_scheme_tx_out_rs_unit.cpp |67.1%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/grpc_services/rpc_backup.cpp |67.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/grpc_services/rpc_backup.cpp |67.1%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/replication/service/base_table_writer.cpp |67.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/replication/service/base_table_writer.cpp |67.2%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_alter_table.cpp |67.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_alter_table.cpp |67.2%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/scheme_board/backup.cpp |67.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/scheme_board/backup.cpp |67.2%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/schemeshard/schemeshard__init_populator.cpp |67.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__init_populator.cpp |67.2%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/mon/audit/audit.cpp |67.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/mon/audit/audit.cpp |67.2%| [AR] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/mon/audit/libcore-mon-audit.a |67.2%| [AR] {RESULT} $(B)/ydb/core/mon/audit/libcore-mon-audit.a |67.2%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/mon/audit/libcore-mon-audit.a |67.2%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/kqp/opt/physical/effects/kqp_opt_phy_insert_index.cpp |67.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/opt/physical/effects/kqp_opt_phy_insert_index.cpp |67.2%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/blob_depot/op_init_schema.cpp |67.2%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/blobstorage/dsproxy/dsproxy_status.cpp |67.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blob_depot/op_init_schema.cpp |67.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/dsproxy/dsproxy_status.cpp |67.2%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/scheme_board/subscriber.cpp |67.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/scheme_board/subscriber.cpp |67.2%| [AR] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/tx/scheme_board/libcore-tx-scheme_board.a |67.2%| [AR] {RESULT} $(B)/ydb/core/tx/scheme_board/libcore-tx-scheme_board.a |67.2%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/tx/scheme_board/libcore-tx-scheme_board.a |67.2%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/blob_depot/agent.cpp |67.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blob_depot/agent.cpp |67.2%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/grpc_services/rpc_view.cpp |67.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/grpc_services/rpc_view.cpp |67.2%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/kqp/session_actor/kqp_query_state.cpp |67.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/session_actor/kqp_query_state.cpp |67.2%| [AR] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/kqp/session_actor/libcore-kqp-session_actor.a |67.2%| [AR] {RESULT} $(B)/ydb/core/kqp/session_actor/libcore-kqp-session_actor.a |67.2%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/kqp/session_actor/libcore-kqp-session_actor.a |67.2%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/datashard/build_data_tx_out_rs_unit.cpp |67.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/datashard/build_data_tx_out_rs_unit.cpp |67.2%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_alter_cdc_stream.cpp |67.2%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/blob_depot/data_resolve.cpp |67.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_alter_cdc_stream.cpp |67.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blob_depot/data_resolve.cpp |67.2%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/grpc_services/query/rpc_kqp_tx.cpp |67.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/grpc_services/query/rpc_kqp_tx.cpp |67.2%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/datashard/build_index/unique_index.cpp |67.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/datashard/build_index/unique_index.cpp |67.2%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_alter_replication.cpp |67.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_alter_replication.cpp |67.3%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/blob_depot/s3_delete.cpp |67.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blob_depot/s3_delete.cpp |67.3%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_create_view.cpp |67.3%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/mon_alloc/tcmalloc.cpp |67.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_create_view.cpp |67.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/mon_alloc/tcmalloc.cpp |67.3%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/blob_depot/s3_write.cpp |67.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blob_depot/s3_write.cpp |67.3%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/blob_depot/assimilator.cpp |67.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blob_depot/assimilator.cpp |67.3%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/grpc_services/rpc_read_columns.cpp |67.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/grpc_services/rpc_read_columns.cpp |67.3%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/blob_depot/s3_scan.cpp |67.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blob_depot/s3_scan.cpp |67.3%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/replication/controller/tx_drop_dst_result.cpp |67.3%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/blob_depot/blob_depot.cpp |67.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/replication/controller/tx_drop_dst_result.cpp |67.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blob_depot/blob_depot.cpp |67.3%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/grpc_services/rpc_stream_execute_yql_script.cpp |67.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/grpc_services/rpc_stream_execute_yql_script.cpp |67.3%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/schemeshard/schemeshard__notify.cpp |67.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__notify.cpp |67.3%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/grpc_services/ydb_over_fq/list_directory.cpp |67.3%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/replication/controller/dst_alterer.cpp |67.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/replication/controller/dst_alterer.cpp |67.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/grpc_services/ydb_over_fq/list_directory.cpp |67.3%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/blob_depot/data.cpp |67.3%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/replication/controller/secret_resolver.cpp |67.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blob_depot/data.cpp |67.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/replication/controller/secret_resolver.cpp |67.3%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/replication/controller/tx_assign_tx_id.cpp |67.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/replication/controller/tx_assign_tx_id.cpp |67.3%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/datashard/execute_commit_writes_tx_unit.cpp |67.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/datashard/execute_commit_writes_tx_unit.cpp |67.3%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/blob_depot/data_mon.cpp |67.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blob_depot/data_mon.cpp |67.3%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/grpc_services/rpc_list_operations.cpp |67.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/grpc_services/rpc_list_operations.cpp |67.3%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/grpc_services/rpc_import.cpp |67.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/grpc_services/rpc_import.cpp |67.3%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_alter_secret.cpp |67.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_alter_secret.cpp |67.4%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/grpc_services/rpc_stream_execute_scan_query.cpp |67.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/grpc_services/rpc_stream_execute_scan_query.cpp |67.4%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/grpc_services/rpc_read_table.cpp |67.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/grpc_services/rpc_read_table.cpp |67.4%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/blob_depot/data_gc.cpp |67.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blob_depot/data_gc.cpp |67.4%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/datashard/datashard__read_iterator.cpp |67.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/datashard/datashard__read_iterator.cpp |67.4%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/grpc_services/rpc_remove_directory.cpp |67.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/grpc_services/rpc_remove_directory.cpp |67.4%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_drop_cdc_stream.cpp |67.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_drop_cdc_stream.cpp |67.4%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/grpc_services/rpc_describe_system_view.cpp |67.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/grpc_services/rpc_describe_system_view.cpp |67.4%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/datashard/alter_table_unit.cpp |67.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/datashard/alter_table_unit.cpp |67.4%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/grpc_services/ydb_over_fq/explain_data_query.cpp |67.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/grpc_services/ydb_over_fq/explain_data_query.cpp |67.4%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/grpc_services/rpc_scheme_base.cpp |67.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/grpc_services/rpc_scheme_base.cpp |67.4%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/grpc_services/rpc_prepare_data_query.cpp |67.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/grpc_services/rpc_prepare_data_query.cpp |67.4%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/replication/controller/tx_init_schema.cpp |67.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/replication/controller/tx_init_schema.cpp |67.4%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/grpc_services/ydb_over_fq/execute_data_query.cpp |67.4%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/grpc_services/rpc_maintenance.cpp |67.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/grpc_services/ydb_over_fq/execute_data_query.cpp |67.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/grpc_services/rpc_maintenance.cpp |67.4%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_alter_fs.cpp |67.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_alter_fs.cpp |67.4%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/replication/service/service.cpp |67.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/replication/service/service.cpp |67.4%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/replication/controller/tx_init.cpp |67.4%| [AR] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/tx/replication/service/libtx-replication-service.a |67.4%| [AR] {RESULT} $(B)/ydb/core/tx/replication/service/libtx-replication-service.a |67.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/replication/controller/tx_init.cpp |67.4%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/tx/replication/service/libtx-replication-service.a |67.4%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/grpc_services/rpc_rollback_transaction.cpp |67.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/grpc_services/rpc_rollback_transaction.cpp |67.4%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/grpc_services/rpc_discovery.cpp |67.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/grpc_services/rpc_discovery.cpp |67.4%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/datashard/build_distributed_erase_tx_out_rs_unit.cpp |67.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/datashard/build_distributed_erase_tx_out_rs_unit.cpp |67.5%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/grpc_services/rpc_object_storage.cpp |67.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/grpc_services/rpc_object_storage.cpp |67.5%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/datashard/datashard__engine_host.cpp |67.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/datashard/datashard__engine_host.cpp |67.5%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/kqp/host/kqp_gateway_proxy.cpp |67.5%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/datashard/build_and_wait_dependencies_unit.cpp |67.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/datashard/build_and_wait_dependencies_unit.cpp |67.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/host/kqp_gateway_proxy.cpp |67.5%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/kqp/opt/physical/effects/kqp_opt_phy_upsert_index.cpp |67.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/opt/physical/effects/kqp_opt_phy_upsert_index.cpp |67.5%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/grpc_services/rpc_log_store.cpp |67.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/grpc_services/rpc_log_store.cpp |67.5%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_create_sequence.cpp |67.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_create_sequence.cpp |67.5%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/kqp/opt/physical/effects/kqp_opt_phy_uniq_helper.cpp |67.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/opt/physical/effects/kqp_opt_phy_uniq_helper.cpp |67.5%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/replication/controller/tx_alter_dst_result.cpp |67.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/replication/controller/tx_alter_dst_result.cpp |67.5%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/grpc_services/rpc_describe_path.cpp |67.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/grpc_services/rpc_describe_path.cpp |67.5%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/grpc_services/rpc_get_shard_locations.cpp |67.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/grpc_services/rpc_get_shard_locations.cpp |67.5%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/kqp/opt/physical/effects/kqp_opt_phy_update.cpp |67.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/opt/physical/effects/kqp_opt_phy_update.cpp |67.5%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/datashard/build_index/recompute_kmeans.cpp |67.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/datashard/build_index/recompute_kmeans.cpp |67.5%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/grpc_services/rpc_rate_limiter_api.cpp |67.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/grpc_services/rpc_rate_limiter_api.cpp |67.5%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/blobstorage/dsproxy/dsproxy_range.cpp |67.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/dsproxy/dsproxy_range.cpp |67.5%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/kqp/compute_actor/kqp_scan_compute_actor.cpp |67.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/compute_actor/kqp_scan_compute_actor.cpp |67.5%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/grpc_services/table_settings.cpp |67.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/grpc_services/table_settings.cpp |67.5%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/replication/controller/tx_discovery_targets_result.cpp |67.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/replication/controller/tx_discovery_targets_result.cpp |67.5%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/datashard/datashard__column_stats.cpp |67.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/datashard/datashard__column_stats.cpp |67.5%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/grpc_services/rpc_modify_permissions.cpp |67.5%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/grpc_services/rpc_fq_internal.cpp |67.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/grpc_services/rpc_modify_permissions.cpp |67.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/grpc_services/rpc_fq_internal.cpp |67.5%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/grpc_services/rpc_rename_tables.cpp |67.5%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/kqp/opt/physical/effects/kqp_opt_phy_effects.cpp |67.6%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/grpc_services/rpc_list_objects_in_s3_export.cpp |67.5%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/grpc_services/ydb_over_fq/describe_table.cpp |67.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/grpc_services/rpc_rename_tables.cpp |67.6%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_create_set_constraint.cpp |67.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/grpc_services/ydb_over_fq/describe_table.cpp |67.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/grpc_services/rpc_list_objects_in_s3_export.cpp |67.6%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/grpc_services/rpc_replication.cpp |67.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_create_set_constraint.cpp |67.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/opt/physical/effects/kqp_opt_phy_effects.cpp |67.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/grpc_services/rpc_replication.cpp |67.6%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_alter_login.cpp |67.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_alter_login.cpp |67.6%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/grpc_services/rpc_get_scale_recommendation.cpp |67.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/grpc_services/rpc_get_scale_recommendation.cpp |67.6%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/datashard/receive_snapshot_cleanup_unit.cpp |67.6%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/grpc_services/rpc_get_operation.cpp |67.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/datashard/receive_snapshot_cleanup_unit.cpp |67.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/grpc_services/rpc_get_operation.cpp |67.6%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/grpc_services/rpc_keyvalue.cpp |67.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/grpc_services/rpc_keyvalue.cpp |67.6%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/grpc_services/rpc_describe_table_options.cpp |67.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/grpc_services/rpc_describe_table_options.cpp |67.6%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/grpc_services/rpc_make_directory.cpp |67.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/grpc_services/rpc_make_directory.cpp |67.6%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/replication/controller/tx_assign_stream_name.cpp |67.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/replication/controller/tx_assign_stream_name.cpp |67.6%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/datashard/build_kqp_data_tx_out_rs_unit.cpp |67.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/datashard/build_kqp_data_tx_out_rs_unit.cpp |67.6%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_create_set_constraint_lock.cpp |67.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_create_set_constraint_lock.cpp |67.6%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/grpc_services/rpc_read_rows.cpp |67.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/grpc_services/rpc_read_rows.cpp |67.6%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/mon_alloc/profiler.cpp |67.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/mon_alloc/profiler.cpp |67.6%| [AR] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/mon_alloc/libydb-core-mon_alloc.a |67.6%| [AR] {RESULT} $(B)/ydb/core/mon_alloc/libydb-core-mon_alloc.a |67.6%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/mon_alloc/libydb-core-mon_alloc.a |67.6%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/grpc_services/rpc_begin_transaction.cpp |67.6%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/services/replication/grpc_service.cpp |67.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/grpc_services/rpc_begin_transaction.cpp |67.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/services/replication/grpc_service.cpp |67.6%| [AR] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/services/replication/libydb-services-replication.a |67.6%| [AR] {RESULT} $(B)/ydb/services/replication/libydb-services-replication.a |67.7%| [AR] {BAZEL_UPLOAD} $(B)/ydb/services/replication/libydb-services-replication.a |67.7%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/grpc_services/rpc_import_data.cpp |67.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/grpc_services/rpc_import_data.cpp |67.7%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/replication/controller/tx_create_dst_result.cpp |67.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/replication/controller/tx_create_dst_result.cpp |67.7%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/datashard/completed_operations_unit.cpp |67.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/datashard/completed_operations_unit.cpp |67.7%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/ydb_convert/tx_proxy_status.cpp |67.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/ydb_convert/tx_proxy_status.cpp |67.7%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/grpc_services/rpc_drop_table.cpp |67.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/grpc_services/rpc_drop_table.cpp |67.7%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/replication/controller/tx_resolve_secret_result.cpp |67.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/replication/controller/tx_resolve_secret_result.cpp |67.7%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/blobstorage/pdisk/blobstorage_pdisk_impl_log.cpp |67.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/pdisk/blobstorage_pdisk_impl_log.cpp |67.7%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/schemeshard/schemeshard__op_traits.cpp |67.7%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/kqp/proxy_service/kqp_session_info.cpp |67.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__op_traits.cpp |67.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/proxy_service/kqp_session_info.cpp |67.7%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/grpc_services/rpc_login.cpp |67.7%| [AR] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/kqp/proxy_service/libcore-kqp-proxy_service.a |67.7%| [AR] {RESULT} $(B)/ydb/core/kqp/proxy_service/libcore-kqp-proxy_service.a |67.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/grpc_services/rpc_login.cpp |67.7%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/kqp/proxy_service/libcore-kqp-proxy_service.a |67.7%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/blobstorage/dsproxy/dsproxy_assimilate.cpp |67.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/dsproxy/dsproxy_assimilate.cpp |67.7%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/grpc_services/rpc_node_registration.cpp |67.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/grpc_services/rpc_node_registration.cpp |67.7%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/datashard/datashard__readset.cpp |67.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/datashard/datashard__readset.cpp |67.7%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/kqp/opt/peephole/kqp_opt_peephole_write_constraint.cpp |67.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/opt/peephole/kqp_opt_peephole_write_constraint.cpp |67.7%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/grpc_services/rpc_copy_table.cpp |67.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/grpc_services/rpc_copy_table.cpp |67.7%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_drop_bsv.cpp |67.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_drop_bsv.cpp |67.7%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/grpc_services/rpc_kqp_base.cpp |67.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/grpc_services/rpc_kqp_base.cpp |67.7%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/grpc_services/rpc_kh_describe.cpp |67.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/grpc_services/rpc_kh_describe.cpp |67.7%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/replication/controller/tx_resolve_resource_id_result.cpp |67.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/replication/controller/tx_resolve_resource_id_result.cpp |67.7%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/grpc_services/rpc_config.cpp |67.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/grpc_services/rpc_config.cpp |67.8%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/grpc_services/rpc_ping.cpp |67.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/grpc_services/rpc_ping.cpp |67.8%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/grpc_services/rpc_execute_yql_script.cpp |67.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/grpc_services/rpc_execute_yql_script.cpp |67.8%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/grpc_services/rpc_load_rows.cpp |67.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/grpc_services/rpc_load_rows.cpp |67.8%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/schemeshard/schemeshard__init.cpp |67.8%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/kqp/opt/rbo/kqp_rbo_rules.cpp |67.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__init.cpp |67.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/opt/rbo/kqp_rbo_rules.cpp |67.8%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_alter_resource_pool.cpp |67.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_alter_resource_pool.cpp |67.8%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/replication/controller/tx_worker_error.cpp |67.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/replication/controller/tx_worker_error.cpp |67.8%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/datashard/build_index/local_kmeans.cpp |67.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/datashard/build_index/local_kmeans.cpp |67.8%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/schemeshard/olap/options/update.cpp |67.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/olap/options/update.cpp |67.8%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/grpc_services/rpc_create_coordination_node.cpp |67.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/grpc_services/rpc_create_coordination_node.cpp |67.8%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_create_subdomain.cpp |67.8%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/grpc_services/rpc_explain_data_query.cpp |67.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_create_subdomain.cpp |67.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/grpc_services/rpc_explain_data_query.cpp |67.8%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/grpc_services/rpc_kh_snapshots.cpp |67.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/grpc_services/rpc_kh_snapshots.cpp |67.8%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/ymq/actor/untag_queue.cpp |67.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/ymq/actor/untag_queue.cpp |67.8%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/grpc_services/rpc_dynamic_config.cpp |67.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/grpc_services/rpc_dynamic_config.cpp |67.8%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/persqueue/pqrb/read_balancer__balancing.cpp |67.8%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_assign_bsv.cpp |67.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_assign_bsv.cpp |67.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/persqueue/pqrb/read_balancer__balancing.cpp |67.8%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/datashard/datashard__s3_upload_txs.cpp |67.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/datashard/datashard__s3_upload_txs.cpp |67.8%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/grpc_services/rpc_explain_yql_script.cpp |67.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/grpc_services/rpc_explain_yql_script.cpp |67.8%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/grpc_services/rpc_cancel_operation.cpp |67.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/grpc_services/rpc_cancel_operation.cpp |67.8%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_create_lock.cpp |67.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_create_lock.cpp |67.8%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/replication/controller/tx_drop_replication.cpp |67.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/replication/controller/tx_drop_replication.cpp |67.9%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/grpc_services/rpc_describe_external_data_source.cpp |67.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/grpc_services/rpc_describe_external_data_source.cpp |67.9%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/grpc_services/rpc_copy_tables.cpp |67.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/grpc_services/rpc_copy_tables.cpp |67.9%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/grpc_services/rpc_alter_coordination_node.cpp |67.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/grpc_services/rpc_alter_coordination_node.cpp |67.9%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/replication/controller/tx_heartbeat.cpp |67.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/replication/controller/tx_heartbeat.cpp |67.9%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/grpc_services/rpc_monitoring.cpp |67.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/grpc_services/rpc_monitoring.cpp |67.9%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/grpc_services/resolve_local_db_table.cpp |67.9%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/grpc_services/rpc_keep_alive.cpp |67.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/grpc_services/resolve_local_db_table.cpp |67.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/grpc_services/rpc_keep_alive.cpp |67.9%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_consistent_copy_tables.cpp |67.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_consistent_copy_tables.cpp |67.9%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/grpc_services/rpc_commit_transaction.cpp |67.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/grpc_services/rpc_commit_transaction.cpp |67.9%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/columnshard/engines/reader/simple_reader/iterator/fetched_data.cpp |67.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/engines/reader/simple_reader/iterator/fetched_data.cpp |67.9%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/datashard/datashard__cleanup_in_rs.cpp |67.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/datashard/datashard__cleanup_in_rs.cpp |67.9%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/grpc_services/rpc_execute_scheme_query.cpp |67.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/grpc_services/rpc_execute_scheme_query.cpp |67.9%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/kqp/compute_actor/kqp_compute_actor_factory.cpp |67.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/compute_actor/kqp_compute_actor_factory.cpp |67.9%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/grpc_services/rpc_bridge.cpp |67.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/grpc_services/rpc_bridge.cpp |67.9%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/grpc_services/rpc_cms.cpp |67.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/grpc_services/rpc_cms.cpp |67.9%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/blobstorage/vdisk/hulldb/barriers/barriers_essence.cpp |67.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/vdisk/hulldb/barriers/barriers_essence.cpp |67.9%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/grpc_services/operation_helpers.cpp |67.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/grpc_services/operation_helpers.cpp |67.9%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp |67.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp |67.9%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/grpc_services/query/rpc_execute_script.cpp |67.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/grpc_services/query/rpc_execute_script.cpp |67.9%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/grpc_services/rpc_describe_coordination_node.cpp |67.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/grpc_services/rpc_describe_coordination_node.cpp |67.9%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/grpc_services/rpc_drop_coordination_node.cpp |67.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/grpc_services/rpc_drop_coordination_node.cpp |67.9%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/replication/controller/tx_describe_replication.cpp |68.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/replication/controller/tx_describe_replication.cpp |68.0%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/datashard/build_index/secondary_index.cpp |68.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/datashard/build_index/secondary_index.cpp |68.0%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/grpc_services/rpc_create_table.cpp |68.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/grpc_services/rpc_create_table.cpp |68.0%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/datashard/build_index/sample_k.cpp |68.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/datashard/build_index/sample_k.cpp |68.0%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/grpc_services/rpc_alter_table.cpp |68.0%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/grpc_services/ydb_over_fq/create_session.cpp |68.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/grpc_services/rpc_alter_table.cpp |68.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/grpc_services/ydb_over_fq/create_session.cpp |68.0%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/grpc_services/rpc_execute_data_query.cpp |68.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/grpc_services/rpc_execute_data_query.cpp |68.0%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/replication/controller/tx_drop_stream_result.cpp |68.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/replication/controller/tx_drop_stream_result.cpp |68.0%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/grpc_services/rpc_describe_external_table.cpp |68.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/grpc_services/rpc_describe_external_table.cpp |68.0%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/datashard/alter_cdc_stream_unit.cpp |68.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/datashard/alter_cdc_stream_unit.cpp |68.0%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/kqp/opt/physical/kqp_opt_phy_stage_float_up.cpp |68.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/opt/physical/kqp_opt_phy_stage_float_up.cpp |68.0%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/kqp/opt/physical/effects/kqp_opt_phy_delete_index.cpp |68.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/opt/physical/effects/kqp_opt_phy_delete_index.cpp |68.0%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/datashard/datashard_split_src.cpp |68.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/datashard/datashard_split_src.cpp |68.0%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_create_set_constraint_finalize.cpp |68.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_create_set_constraint_finalize.cpp |68.0%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/schemeshard/schemeshard_export__create.cpp |68.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard_export__create.cpp |68.0%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/grpc_services/local_rate_limiter.cpp |68.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/grpc_services/local_rate_limiter.cpp |68.0%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/ymq/actor/retention.cpp |68.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/ymq/actor/retention.cpp |68.0%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/datashard/execute_data_tx_unit.cpp |68.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/datashard/execute_data_tx_unit.cpp |68.0%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/grpc_services/grpc_request_proxy.cpp |68.0%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_alter_user_attrs.cpp |68.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/grpc_services/grpc_request_proxy.cpp |68.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_alter_user_attrs.cpp |68.0%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_backup_incremental_backup_collection.cpp |68.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_backup_incremental_backup_collection.cpp |68.0%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/client/server/msgbus_server_test_shard_request.cpp |68.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/client/server/msgbus_server_test_shard_request.cpp |68.0%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/grpc_services/rpc_describe_table.cpp |68.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/grpc_services/rpc_describe_table.cpp |68.0%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/blobstorage/dsproxy/group_sessions.cpp |68.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/dsproxy/group_sessions.cpp |68.1%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/datashard/build_index/fulltext.cpp |68.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/datashard/build_index/fulltext.cpp |68.1%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/schemeshard/schemeshard_import__list.cpp |68.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard_import__list.cpp |68.1%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/testlib/actors/block_events.cpp |68.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/testlib/actors/block_events.cpp |68.1%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/blob_depot/mon_main.cpp |68.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blob_depot/mon_main.cpp |68.1%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/kqp/compute_actor/kqp_compute_actor_helpers.cpp |68.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/compute_actor/kqp_compute_actor_helpers.cpp |68.1%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/grpc_services/query/rpc_execute_query.cpp |68.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/grpc_services/query/rpc_execute_query.cpp |68.1%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/grpc_services/grpc_endpoint_publish_actor.cpp |68.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/grpc_services/grpc_endpoint_publish_actor.cpp |68.1%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/blob_depot/data_uncertain.cpp |68.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blob_depot/data_uncertain.cpp |68.1%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/local_pgwire/local_pgwire.cpp |68.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/local_pgwire/local_pgwire.cpp |68.1%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/kqp/compute_actor/kqp_compute_state.cpp |68.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/compute_actor/kqp_compute_state.cpp |68.1%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/client/server/msgbus_server_pq_metarequest.cpp |68.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/client/server/msgbus_server_pq_metarequest.cpp |68.1%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/blob_depot/garbage_collection.cpp |68.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blob_depot/garbage_collection.cpp |68.1%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_alter_streaming_query.cpp |68.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_alter_streaming_query.cpp |68.1%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/kqp/opt/logical/kqp_opt_log_ranges.cpp |68.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/opt/logical/kqp_opt_log_ranges.cpp |68.1%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/testlib/tablet_helpers.cpp |68.1%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_common.cpp |68.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/testlib/tablet_helpers.cpp |68.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_common.cpp |68.1%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/grpc_services/query/rpc_attach_session.cpp |68.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/grpc_services/query/rpc_attach_session.cpp |68.1%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/datashard/block_fail_point_unit.cpp |68.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/datashard/block_fail_point_unit.cpp |68.1%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_drop_continuous_backup.cpp |68.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_drop_continuous_backup.cpp |68.1%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/grpc_services/grpc_helper.cpp |68.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/grpc_services/grpc_helper.cpp |68.1%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/client/server/msgbus_server_resolve_node.cpp |68.1%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/client/server/msgbus_server_ic_debug.cpp |68.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/client/server/msgbus_server_resolve_node.cpp |68.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/client/server/msgbus_server_ic_debug.cpp |68.2%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_drop_external_data_source.cpp |68.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_drop_external_data_source.cpp |68.2%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/kqp/executer_actor/kqp_table_resolver.cpp |68.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/executer_actor/kqp_table_resolver.cpp |68.2%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/services/ymq/ymq_proxy.cpp |68.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/services/ymq/ymq_proxy.cpp |68.2%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/grpc_services/query/rpc_fetch_script_results.cpp |68.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/grpc_services/query/rpc_fetch_script_results.cpp |68.2%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/blob_depot/group_metrics_exchange.cpp |68.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blob_depot/group_metrics_exchange.cpp |68.2%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/grpc_services/rpc_fq.cpp |68.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/grpc_services/rpc_fq.cpp |68.2%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_create_streaming_query.cpp |68.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_create_streaming_query.cpp |68.2%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/columnshard/engines/storage/optimizer/abstract/optimizer.cpp |68.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/engines/storage/optimizer/abstract/optimizer.cpp |68.2%| [AR] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/tx/columnshard/engines/storage/optimizer/abstract/libstorage-optimizer-abstract.a |68.2%| [AR] {RESULT} $(B)/ydb/core/tx/columnshard/engines/storage/optimizer/abstract/libstorage-optimizer-abstract.a |68.2%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/tx/columnshard/engines/storage/optimizer/abstract/libstorage-optimizer-abstract.a |68.2%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/kqp/provider/yql_kikimr_gateway.cpp |68.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/provider/yql_kikimr_gateway.cpp |68.2%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_create_secret.cpp |68.2%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/grpc_services/legacy/rpc_legacy.cpp |68.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/grpc_services/legacy/rpc_legacy.cpp |68.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_create_secret.cpp |68.2%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/grpc_services/grpc_request_proxy_simple.cpp |68.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/grpc_services/grpc_request_proxy_simple.cpp |68.2%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/blobstorage/vdisk/hulldb/barriers/barriers_public.cpp |68.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/vdisk/hulldb/barriers/barriers_public.cpp |68.2%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/kqp/compute_actor/kqp_compute_actor.cpp |68.2%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/kqp/opt/physical/effects/kqp_opt_phy_indexes.cpp |68.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/compute_actor/kqp_compute_actor.cpp |68.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/opt/physical/effects/kqp_opt_phy_indexes.cpp |68.2%| [AR] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/kqp/opt/physical/effects/libopt-physical-effects.a |68.2%| [AR] {RESULT} $(B)/ydb/core/kqp/opt/physical/effects/libopt-physical-effects.a |68.2%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/kqp/opt/physical/effects/libopt-physical-effects.a |68.2%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_create_set_constraint_check.cpp |68.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_create_set_constraint_check.cpp |68.2%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/client/server/msgbus_server_pq_read_session_info.cpp |68.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/client/server/msgbus_server_pq_read_session_info.cpp |68.2%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/columnshard/engines/storage/optimizer/lcbuckets/constructor/level/constructor.cpp |68.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/engines/storage/optimizer/lcbuckets/constructor/level/constructor.cpp |68.2%| [AR] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/tx/columnshard/engines/storage/optimizer/lcbuckets/constructor/level/liblcbuckets-constructor-level.a |68.3%| [AR] {RESULT} $(B)/ydb/core/tx/columnshard/engines/storage/optimizer/lcbuckets/constructor/level/liblcbuckets-constructor-level.a |68.3%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/tx/columnshard/engines/storage/optimizer/lcbuckets/constructor/level/liblcbuckets-constructor-level.a |68.2%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_db_changes.cpp |68.3%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/grpc_services/ydb_over_fq/keep_alive.cpp |68.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_db_changes.cpp |68.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/grpc_services/ydb_over_fq/keep_alive.cpp |68.3%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/schemeshard/schemeshard_incremental_restore_scan.cpp |68.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard_incremental_restore_scan.cpp |68.3%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/columnshard/engines/scheme/tiering/tier_info.cpp |68.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/engines/scheme/tiering/tier_info.cpp |68.3%| [AR] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/tx/columnshard/engines/scheme/tiering/libengines-scheme-tiering.a |68.3%| [AR] {RESULT} $(B)/ydb/core/tx/columnshard/engines/scheme/tiering/libengines-scheme-tiering.a |68.3%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/tx/columnshard/engines/scheme/tiering/libengines-scheme-tiering.a |68.3%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/services/maintenance/grpc_service.cpp |68.3%| [AR] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/services/maintenance/libydb-services-maintenance.a |68.3%| [AR] {RESULT} $(B)/ydb/services/maintenance/libydb-services-maintenance.a |68.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/services/maintenance/grpc_service.cpp |68.3%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/kqp/compute_actor/kqp_scan_compute_manager.cpp |68.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/compute_actor/kqp_scan_compute_manager.cpp |68.3%| [AR] {BAZEL_UPLOAD} $(B)/ydb/services/maintenance/libydb-services-maintenance.a |68.3%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_move_sequence.cpp |68.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_move_sequence.cpp |68.3%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_create_sysview.cpp |68.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_create_sysview.cpp |68.3%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/schemeshard/schemeshard_export_flow_proposals.cpp |68.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard_export_flow_proposals.cpp |68.3%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/datashard/datashard__get_state_tx.cpp |68.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/datashard/datashard__get_state_tx.cpp |68.3%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/engine/minikql/flat_local_tx_factory.cpp |68.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/engine/minikql/flat_local_tx_factory.cpp |68.3%| [AR] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/engine/minikql/libcore-engine-minikql.a |68.3%| [AR] {RESULT} $(B)/ydb/core/engine/minikql/libcore-engine-minikql.a |68.3%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/engine/minikql/libcore-engine-minikql.a |68.3%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_create_restore_incremental_backup.cpp |68.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_create_restore_incremental_backup.cpp |68.3%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/columnshard/engines/storage/optimizer/lcbuckets/constructor/constructor.cpp |68.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/engines/storage/optimizer/lcbuckets/constructor/constructor.cpp |68.3%| [AR] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/tx/columnshard/engines/storage/optimizer/lcbuckets/constructor/liboptimizer-lcbuckets-constructor.global.a |68.3%| [AR] {RESULT} $(B)/ydb/core/tx/columnshard/engines/storage/optimizer/lcbuckets/constructor/liboptimizer-lcbuckets-constructor.global.a |68.3%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/tx/columnshard/engines/storage/optimizer/lcbuckets/constructor/liboptimizer-lcbuckets-constructor.global.a |68.3%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/grpc_services/rpc_export.cpp |68.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/grpc_services/rpc_export.cpp |68.3%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/blob_depot/given_id_range.cpp |68.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blob_depot/given_id_range.cpp |68.3%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/kqp/executer_actor/kqp_planner.cpp |68.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/executer_actor/kqp_planner.cpp |68.4%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/kqp/executer_actor/kqp_tasks_validate.cpp |68.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/executer_actor/kqp_tasks_validate.cpp |68.4%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tracing/tablet_info.cpp |68.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tracing/tablet_info.cpp |68.4%| [AR] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/tracing/libydb-core-tracing.a |68.4%| [AR] {RESULT} $(B)/ydb/core/tracing/libydb-core-tracing.a |68.4%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/tracing/libydb-core-tracing.a |68.4%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/kqp/executer_actor/kqp_tasks_graph.cpp |68.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/executer_actor/kqp_tasks_graph.cpp |68.4%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/schemeshard/schemeshard_import__cancel.cpp |68.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard_import__cancel.cpp |68.4%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/kqp/opt/physical/kqp_opt_phy_build_stage.cpp |68.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/opt/physical/kqp_opt_phy_build_stage.cpp |68.4%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/testlib/actors/wait_events.cpp |68.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/testlib/actors/wait_events.cpp |68.4%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/services/metadata/ds_table/accessor_subscribe.cpp |68.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/services/metadata/ds_table/accessor_subscribe.cpp |68.4%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/grpc_services/rpc_forget_operation.cpp |68.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/grpc_services/rpc_forget_operation.cpp |68.4%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/blobstorage/dsproxy/dsproxy_block.cpp |68.4%| [AR] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/grpc_services/libydb-core-grpc_services.a |68.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/dsproxy/dsproxy_block.cpp |68.4%| [AR] {RESULT} $(B)/ydb/core/grpc_services/libydb-core-grpc_services.a |68.4%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/ymq/actor/service.cpp |68.4%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/blobstorage/dsproxy/dsproxy_collect.cpp |68.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/ymq/actor/service.cpp |68.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/dsproxy/dsproxy_collect.cpp |68.4%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/schemeshard/schemeshard_bg_tasks__list.cpp |68.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard_bg_tasks__list.cpp |68.4%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/ymq/actor/set_queue_attributes.cpp |68.4%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/grpc_services/libydb-core-grpc_services.a |68.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/ymq/actor/set_queue_attributes.cpp |68.4%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/blobstorage/vdisk/hulldb/barriers/barriers_tree.cpp |68.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/vdisk/hulldb/barriers/barriers_tree.cpp |68.4%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/client/server/msgbus_server_pq_metacache.cpp |68.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/client/server/msgbus_server_pq_metacache.cpp |68.4%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/schemeshard/schemeshard_import__get.cpp |68.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard_import__get.cpp |68.4%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/kqp/executer_actor/kqp_partitioned_executer.cpp |68.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/executer_actor/kqp_partitioned_executer.cpp |68.4%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/client/server/msgbus_server_persqueue.cpp |68.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/client/server/msgbus_server_persqueue.cpp |68.4%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/schemeshard/schemeshard_info_types.cpp |68.4%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/client/server/msgbus_server_scheme_request.cpp |68.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/client/server/msgbus_server_scheme_request.cpp |68.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard_info_types.cpp |68.5%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/kqp/compute_actor/kqp_pure_compute_actor.cpp |68.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/compute_actor/kqp_pure_compute_actor.cpp |68.5%| [AR] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/kqp/compute_actor/libcore-kqp-compute_actor.a |68.5%| [AR] {RESULT} $(B)/ydb/core/kqp/compute_actor/libcore-kqp-compute_actor.a |68.5%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/kqp/compute_actor/libcore-kqp-compute_actor.a |68.5%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/blobstorage/vdisk/hulldb/barriers/barriers_chain.cpp |68.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/vdisk/hulldb/barriers/barriers_chain.cpp |68.5%| [AR] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/blobstorage/vdisk/hulldb/barriers/libvdisk-hulldb-barriers.a |68.5%| [AR] {RESULT} $(B)/ydb/core/blobstorage/vdisk/hulldb/barriers/libvdisk-hulldb-barriers.a |68.5%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/blobstorage/vdisk/hulldb/barriers/libvdisk-hulldb-barriers.a |68.5%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/client/server/msgbus_server_drain_node.cpp |68.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/client/server/msgbus_server_drain_node.cpp |68.5%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_create_solomon.cpp |68.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_create_solomon.cpp |68.5%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_create_rtmr.cpp |68.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_create_rtmr.cpp |68.5%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/schemeshard/schemeshard_import__forget.cpp |68.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard_import__forget.cpp |68.5%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/testlib/actors/test_runtime.cpp |68.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/testlib/actors/test_runtime.cpp |68.5%| [AR] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/testlib/actors/libcore-testlib-actors.a |68.5%| [AR] {RESULT} $(B)/ydb/core/testlib/actors/libcore-testlib-actors.a |68.5%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/testlib/actors/libcore-testlib-actors.a |68.5%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/services/deprecated/persqueue_v0/grpc_pq_write.cpp |68.5%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/ymq/actor/tag_queue.cpp |68.5%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/schemeshard/schemeshard_build_index__forget.cpp |68.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/services/deprecated/persqueue_v0/grpc_pq_write.cpp |68.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard_build_index__forget.cpp |68.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/ymq/actor/tag_queue.cpp |68.5%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/client/server/msgbus_server_hive_create_tablet.cpp |68.5%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/client/server/msgbus_server_fill_node.cpp |68.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/client/server/msgbus_server_hive_create_tablet.cpp |68.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/client/server/msgbus_server_fill_node.cpp |68.5%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/schemeshard/schemeshard_import.cpp |68.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard_import.cpp |68.5%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/client/server/msgbus_server_node_registration.cpp |68.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/client/server/msgbus_server_node_registration.cpp |68.5%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/columnshard/engines/reader/simple_reader/iterator/sys_view/chunks/constructor.cpp |68.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/engines/reader/simple_reader/iterator/sys_view/chunks/constructor.cpp |68.5%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/kqp/executer_actor/kqp_scheme_executer.cpp |68.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/executer_actor/kqp_scheme_executer.cpp |68.5%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/client/server/msgbus_server_proxy.cpp |68.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/client/server/msgbus_server_proxy.cpp |68.6%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/kqp/executer_actor/kqp_executer_stats.cpp |68.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/executer_actor/kqp_executer_stats.cpp |68.6%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/services/rate_limiter/grpc_service.cpp |68.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/services/rate_limiter/grpc_service.cpp |68.6%| [AR] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/services/rate_limiter/libydb-services-rate_limiter.a |68.6%| [AR] {RESULT} $(B)/ydb/services/rate_limiter/libydb-services-rate_limiter.a |68.6%| [AR] {BAZEL_UPLOAD} $(B)/ydb/services/rate_limiter/libydb-services-rate_limiter.a |68.6%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/client/server/msgbus_server_tablet_counters.cpp |68.6%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/client/server/grpc_server.cpp |68.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/client/server/msgbus_server_tablet_counters.cpp |68.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/client/server/grpc_server.cpp |68.6%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/client/server/msgbus_server_cms.cpp |68.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/client/server/msgbus_server_cms.cpp |68.6%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/schemeshard/schemeshard_export.cpp |68.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard_export.cpp |68.6%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/kqp/executer_actor/kqp_data_executer.cpp |68.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/executer_actor/kqp_data_executer.cpp |68.6%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/health_check/health_check.cpp |68.6%| [AR] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/health_check/libydb-core-health_check.a |68.6%| [AR] {RESULT} $(B)/ydb/core/health_check/libydb-core-health_check.a |68.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/health_check/health_check.cpp |68.6%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/ydb_convert/table_profiles.cpp |68.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/ydb_convert/table_profiles.cpp |68.6%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/health_check/libydb-core-health_check.a |68.6%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/schemeshard/schemeshard_path.cpp |68.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard_path.cpp |68.6%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/ymq/actor/send_message.cpp |68.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/ymq/actor/send_message.cpp |68.6%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/kqp/executer_actor/kqp_partition_helper.cpp |68.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/executer_actor/kqp_partition_helper.cpp |68.6%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_drop_backup_collection.cpp |68.6%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/client/server/msgbus_blobstorage_config.cpp |68.6%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/services/deprecated/persqueue_v0/grpc_pq_write_actor.cpp |68.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_drop_backup_collection.cpp |68.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/client/server/msgbus_blobstorage_config.cpp |68.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/services/deprecated/persqueue_v0/grpc_pq_write_actor.cpp |68.6%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/client/server/msgbus_server_scheme_initroot.cpp |68.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/client/server/msgbus_server_scheme_initroot.cpp |68.6%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/grpc_services/counters/counters.cpp |68.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/grpc_services/counters/counters.cpp |68.6%| [AR] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/grpc_services/counters/libcore-grpc_services-counters.a |68.6%| [AR] {RESULT} $(B)/ydb/core/grpc_services/counters/libcore-grpc_services-counters.a |68.6%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/schemeshard/schemeshard_cdc_stream_common.cpp |68.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard_cdc_stream_common.cpp |68.7%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/grpc_services/counters/libcore-grpc_services-counters.a |68.7%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/client/server/msgbus_server_tablet_state.cpp |68.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/client/server/msgbus_server_tablet_state.cpp |68.7%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/client/server/grpc_proxy_status.cpp |68.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/client/server/grpc_proxy_status.cpp |68.7%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/kqp/executer_actor/kqp_executer_impl.cpp |68.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/executer_actor/kqp_executer_impl.cpp |68.7%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/cms/console/console__revert_pool_state.cpp |68.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/cms/console/console__revert_pool_state.cpp |68.7%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/cms/console/logger.cpp |68.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/cms/console/logger.cpp |68.7%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/ymq/actor/queues_list_reader.cpp |68.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/ymq/actor/queues_list_reader.cpp |68.7%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/schemeshard/schemeshard__serverless_storage_billing.cpp |68.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__serverless_storage_billing.cpp |68.7%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/cms/console/console_configs_subscriber.cpp |68.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/cms/console/console_configs_subscriber.cpp |68.7%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/schemeshard/schemeshard_audit_log.cpp |68.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard_audit_log.cpp |68.7%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/cms/console/console__remove_tenant.cpp |68.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/cms/console/console__remove_tenant.cpp |68.7%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/client/server/msgbus_server.cpp |68.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/client/server/msgbus_server.cpp |68.7%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/client/server/msgbus_http_server.cpp |68.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/client/server/msgbus_http_server.cpp |68.7%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/client/server/msgbus_server_tx_request.cpp |68.7%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/blobstorage/vdisk/huge/blobstorage_hullhugerecovery.cpp |68.7%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/client/server/msgbus_server_types.cpp |68.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/vdisk/huge/blobstorage_hullhugerecovery.cpp |68.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/client/server/msgbus_server_tx_request.cpp |68.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/client/server/msgbus_server_types.cpp |68.7%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/services/metadata/ds_table/behaviour_registrator_actor.cpp |68.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/services/metadata/ds_table/behaviour_registrator_actor.cpp |68.7%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/cms/console/console__update_pool_state.cpp |68.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/cms/console/console__update_pool_state.cpp |68.7%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/cms/console/immediate_controls_configurator.cpp |68.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/cms/console/immediate_controls_configurator.cpp |68.7%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/client/server/msgbus_server_console.cpp |68.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/client/server/msgbus_server_console.cpp |68.7%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/ymq/actor/proxy_service.cpp |68.7%| [AR] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/client/server/libcore-client-server.a |68.7%| [AR] {RESULT} $(B)/ydb/core/client/server/libcore-client-server.a |68.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/ymq/actor/proxy_service.cpp |68.7%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/services/tablet/ydb_tablet.cpp |68.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/services/tablet/ydb_tablet.cpp |68.8%| [AR] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/services/tablet/libydb-services-tablet.a |68.8%| [AR] {RESULT} $(B)/ydb/services/tablet/libydb-services-tablet.a |68.8%| [AR] {BAZEL_UPLOAD} $(B)/ydb/services/tablet/libydb-services-tablet.a |68.8%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/client/server/libcore-client-server.a |68.8%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/schemeshard/schemeshard_import__create.cpp |68.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard_import__create.cpp |68.8%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/cms/console/console__update_tenant_state.cpp |68.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/cms/console/console__update_tenant_state.cpp |68.8%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/schemeshard/schemeshard_export__forget.cpp |68.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard_export__forget.cpp |68.8%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/cms/console/console__toggle_config_validator.cpp |68.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/cms/console/console__toggle_config_validator.cpp |68.8%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/services/metadata/ds_table/service.cpp |68.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/services/metadata/ds_table/service.cpp |68.8%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/cms/console/console__load_state.cpp |68.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/cms/console/console__load_state.cpp |68.8%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/cms/console/console__replace_yaml_config.cpp |68.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/cms/console/console__replace_yaml_config.cpp |68.8%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/cms/console/console__remove_tenant_done.cpp |68.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/cms/console/console__remove_tenant_done.cpp |68.8%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/blobstorage/vdisk/huge/blobstorage_hullhuge.cpp |68.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/vdisk/huge/blobstorage_hullhuge.cpp |68.8%| [AR] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/blobstorage/vdisk/huge/libblobstorage-vdisk-huge.a |68.8%| [AR] {RESULT} $(B)/ydb/core/blobstorage/vdisk/huge/libblobstorage-vdisk-huge.a |68.8%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/blobstorage/vdisk/huge/libblobstorage-vdisk-huge.a |68.8%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/columnshard/engines/storage/actualizer/abstract/abstract.cpp |68.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/engines/storage/actualizer/abstract/abstract.cpp |68.8%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/schemeshard/schemeshard_export__get.cpp |68.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard_export__get.cpp |68.8%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/schemeshard/schemeshard_backup_incremental__progress.cpp |68.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard_backup_incremental__progress.cpp |68.8%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/cms/console/console_handshake.cpp |68.8%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/control/immediate_control_board_actor.cpp |68.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/cms/console/console_handshake.cpp |68.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/control/immediate_control_board_actor.cpp |68.8%| [AR] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/control/libydb-core-control.a |68.8%| [AR] {RESULT} $(B)/ydb/core/control/libydb-core-control.a |68.8%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/control/libydb-core-control.a |68.8%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/cms/console/console__remove_tenant_failed.cpp |68.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/cms/console/console__remove_tenant_failed.cpp |68.8%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/cms/console/console__update_tenant_pool_config.cpp |68.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/cms/console/console__update_tenant_pool_config.cpp |68.8%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/kqp/executer_actor/kqp_scan_executer.cpp |68.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/executer_actor/kqp_scan_executer.cpp |68.8%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/schemeshard/schemeshard_import_flow_proposals.cpp |68.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard_import_flow_proposals.cpp |68.9%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/cms/console/configs_cache.cpp |68.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/cms/console/configs_cache.cpp |68.9%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/schemeshard/schemeshard_cdc_stream_scan.cpp |68.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard_cdc_stream_scan.cpp |68.9%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/cms/console/net_classifier_updater.cpp |68.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/cms/console/net_classifier_updater.cpp |68.9%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/cms/console/config_helpers.cpp |68.9%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/schemeshard/schemeshard_domain_links.cpp |68.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/cms/console/config_helpers.cpp |68.9%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/kqp/executer_actor/kqp_literal_executer.cpp |68.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard_domain_links.cpp |68.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/executer_actor/kqp_literal_executer.cpp |68.9%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/schemeshard/schemeshard_path_describer.cpp |68.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard_path_describer.cpp |68.9%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/cms/console/console__remove_computational_units.cpp |68.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/cms/console/console__remove_computational_units.cpp |68.9%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/blobstorage/vdisk/skeleton/skeleton_compactionstate.cpp |68.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/vdisk/skeleton/skeleton_compactionstate.cpp |68.9%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/blobstorage/vdisk/synclog/blobstorage_synclogrecovery.cpp |68.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/vdisk/synclog/blobstorage_synclogrecovery.cpp |68.9%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/cms/console/console__cleanup_subscriptions.cpp |68.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/cms/console/console__cleanup_subscriptions.cpp |68.9%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/services/metadata/ds_table/scheme_describe.cpp |68.9%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/library/yql/providers/generic/connector/libcpp/ut_helpers/connector_client_mock.cpp |68.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/services/metadata/ds_table/scheme_describe.cpp |68.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/library/yql/providers/generic/connector/libcpp/ut_helpers/connector_client_mock.cpp |68.9%| [AR] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/library/yql/providers/generic/connector/libcpp/ut_helpers/libconnector-libcpp-ut_helpers.a |68.9%| [AR] {RESULT} $(B)/ydb/library/yql/providers/generic/connector/libcpp/ut_helpers/libconnector-libcpp-ut_helpers.a |68.9%| [AR] {BAZEL_UPLOAD} $(B)/ydb/library/yql/providers/generic/connector/libcpp/ut_helpers/libconnector-libcpp-ut_helpers.a |68.9%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/cms/console/console__set_config.cpp |68.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/cms/console/console__set_config.cpp |68.9%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/services/metadata/ds_table/registration.cpp |68.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/services/metadata/ds_table/registration.cpp |68.9%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/cms/console/console_configs_provider.cpp |68.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/cms/console/console_configs_provider.cpp |68.9%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/blobstorage/vdisk/synclog/blobstorage_syncloghttp.cpp |68.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/vdisk/synclog/blobstorage_syncloghttp.cpp |68.9%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/services/metadata/ds_table/accessor_snapshot_base.cpp |68.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/services/metadata/ds_table/accessor_snapshot_base.cpp |68.9%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/schemeshard/schemeshard_export__list.cpp |68.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard_export__list.cpp |68.9%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/cms/console/console_configs_manager.cpp |68.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/cms/console/console_configs_manager.cpp |68.9%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/cms/console/console__create_tenant.cpp |69.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/cms/console/console__create_tenant.cpp |69.0%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/cms/console/console__update_confirmed_subdomain.cpp |69.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/cms/console/console__update_confirmed_subdomain.cpp |69.0%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/ymq/actor/queue_leader.cpp |69.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/ymq/actor/queue_leader.cpp |69.0%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/cms/console/console__init_scheme.cpp |69.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/cms/console/console__init_scheme.cpp |69.0%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/blobstorage/vdisk/skeleton/skeleton_mon_dbmainpage.cpp |69.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/vdisk/skeleton/skeleton_mon_dbmainpage.cpp |69.0%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/cms/console/console__alter_tenant.cpp |69.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/cms/console/console__alter_tenant.cpp |69.0%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/schemeshard/schemeshard_backup_incremental__get.cpp |69.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard_backup_incremental__get.cpp |69.0%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/blobstorage/dsproxy/dsproxy_put.cpp |69.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/dsproxy/dsproxy_put.cpp |69.0%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/kqp/provider/yql_kikimr_provider.cpp |69.0%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/services/metadata/ds_table/accessor_refresh.cpp |69.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/provider/yql_kikimr_provider.cpp |69.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/services/metadata/ds_table/accessor_refresh.cpp |69.0%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/columnshard/engines/storage/indexes/skip_index/meta.cpp |69.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/engines/storage/indexes/skip_index/meta.cpp |69.0%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/blobstorage/vdisk/skeleton/skeleton_oos_logic.cpp |69.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/vdisk/skeleton/skeleton_oos_logic.cpp |69.0%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/blobstorage/vdisk/skeleton/skeleton_shred.cpp |69.0%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_alter_sequence.cpp |69.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_alter_sequence.cpp |69.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/vdisk/skeleton/skeleton_shred.cpp |69.0%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/kqp/opt/logical/kqp_opt_log_sqlin_compact.cpp |69.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/opt/logical/kqp_opt_log_sqlin_compact.cpp |69.0%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/services/deprecated/persqueue_v0/grpc_pq_read.cpp |69.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/services/deprecated/persqueue_v0/grpc_pq_read.cpp |69.0%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/cms/console/console.cpp |69.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/cms/console/console.cpp |69.0%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/schemeshard/schemeshard_build_index.cpp |69.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard_build_index.cpp |69.0%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/cms/console/configs_dispatcher.cpp |69.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/cms/console/configs_dispatcher.cpp |69.0%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/ymq/actor/get_queue_url.cpp |69.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/ymq/actor/get_queue_url.cpp |69.0%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/discovery/discovery.cpp |69.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/discovery/discovery.cpp |69.0%| [AR] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/discovery/libydb-core-discovery.a |69.0%| [AR] {RESULT} $(B)/ydb/core/discovery/libydb-core-discovery.a |69.0%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/discovery/libydb-core-discovery.a |69.0%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/blobstorage/vdisk/skeleton/blobstorage_db.cpp |69.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/vdisk/skeleton/blobstorage_db.cpp |69.0%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/cms/console/console__update_subdomain_key.cpp |69.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/cms/console/console__update_subdomain_key.cpp |69.0%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/cms/console/console_tenants_manager.cpp |69.1%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/schemeshard/schemeshard__sync_update_tenants.cpp |69.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__sync_update_tenants.cpp |69.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/cms/console/console_tenants_manager.cpp |69.1%| [AR] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/cms/console/libcore-cms-console.a |69.1%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_drop_streaming_query.cpp |69.1%| [AR] {RESULT} $(B)/ydb/core/cms/console/libcore-cms-console.a |69.1%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_drop_resource_pool.cpp |69.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_drop_streaming_query.cpp |69.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_drop_resource_pool.cpp |69.1%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/cms/console/libcore-cms-console.a |69.1%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/blobstorage/vdisk/scrub/blob_recovery_process.cpp |69.1%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/kqp/opt/physical/kqp_opt_phy_olap_filter.cpp |69.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/vdisk/scrub/blob_recovery_process.cpp |69.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/opt/physical/kqp_opt_phy_olap_filter.cpp |69.1%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/blobstorage/vdisk/scrub/blob_recovery_request.cpp |69.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/vdisk/scrub/blob_recovery_request.cpp |69.1%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/schemeshard/schemeshard__unmark_restore_tables.cpp |69.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__unmark_restore_tables.cpp |69.1%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/blobstorage/vdisk/skeleton/blobstorage_monactors.cpp |69.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/vdisk/skeleton/blobstorage_monactors.cpp |69.1%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/services/monitoring/grpc_service.cpp |69.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/services/monitoring/grpc_service.cpp |69.1%| [AR] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/services/monitoring/libydb-services-monitoring.a |69.1%| [AR] {RESULT} $(B)/ydb/services/monitoring/libydb-services-monitoring.a |69.1%| [AR] {BAZEL_UPLOAD} $(B)/ydb/services/monitoring/libydb-services-monitoring.a |69.1%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/blobstorage/vdisk/skeleton/blobstorage_syncfullhandler.cpp |69.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/vdisk/skeleton/blobstorage_syncfullhandler.cpp |69.1%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/services/view/grpc_service.cpp |69.1%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/ymq/actor/user_settings_reader.cpp |69.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/services/view/grpc_service.cpp |69.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/ymq/actor/user_settings_reader.cpp |69.1%| [AR] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/services/view/libydb-services-view.a |69.1%| [AR] {RESULT} $(B)/ydb/services/view/libydb-services-view.a |69.1%| [AR] {BAZEL_UPLOAD} $(B)/ydb/services/view/libydb-services-view.a |69.1%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/schemeshard/schemeshard__upgrade_schema.cpp |69.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__upgrade_schema.cpp |69.1%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/schemeshard/schemeshard_build_index__get.cpp |69.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard_build_index__get.cpp |69.1%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/blobstorage/vdisk/synclog/blobstorage_synclogkeeper_committer.cpp |69.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/vdisk/synclog/blobstorage_synclogkeeper_committer.cpp |69.1%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/coordinator/coordinator__init.cpp |69.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/coordinator/coordinator__init.cpp |69.1%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/client/scheme_cache_lib/yql_db_scheme_resolver.cpp |69.2%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/schemeshard/schemeshard_export_uploaders.cpp |69.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/client/scheme_cache_lib/yql_db_scheme_resolver.cpp |69.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard_export_uploaders.cpp |69.2%| [AR] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/client/scheme_cache_lib/libcore-client-scheme_cache_lib.a |69.2%| [AR] {RESULT} $(B)/ydb/core/client/scheme_cache_lib/libcore-client-scheme_cache_lib.a |69.2%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/client/scheme_cache_lib/libcore-client-scheme_cache_lib.a |69.2%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/blobstorage/vdisk/scrub/blob_recovery_queue.cpp |69.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/vdisk/scrub/blob_recovery_queue.cpp |69.2%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_drop_replication.cpp |69.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_drop_replication.cpp |69.2%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/blobstorage/vdisk/repl/blobstorage_repl.cpp |69.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/vdisk/repl/blobstorage_repl.cpp |69.2%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_restore_backup_collection.cpp |69.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_restore_backup_collection.cpp |69.2%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/blobstorage/vdisk/skeleton/blobstorage_syncfull.cpp |69.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/vdisk/skeleton/blobstorage_syncfull.cpp |69.2%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/kqp/opt/kqp_statistics_transformer.cpp |69.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/opt/kqp_statistics_transformer.cpp |69.2%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/driver_lib/run/service_initializer.cpp |69.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/driver_lib/run/service_initializer.cpp |69.2%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/schemeshard/schemeshard_build_index_tx_base.cpp |69.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard_build_index_tx_base.cpp |69.2%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/coordinator/coordinator__read_step_subscriptions.cpp |69.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/coordinator/coordinator__read_step_subscriptions.cpp |69.2%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/services/ymq/grpc_service.cpp |69.2%| [AR] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/services/ymq/libydb-services-ymq.a |69.2%| [AR] {RESULT} $(B)/ydb/services/ymq/libydb-services-ymq.a |69.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/services/ymq/grpc_service.cpp |69.2%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/blobstorage/vdisk/skeleton/skeleton_vpatch_actor.cpp |69.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/vdisk/skeleton/skeleton_vpatch_actor.cpp |69.2%| [AR] {BAZEL_UPLOAD} $(B)/ydb/services/ymq/libydb-services-ymq.a |69.2%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/transfer/row_table.cpp |69.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/transfer/row_table.cpp |69.2%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/coordinator/coordinator__schema_upgrade.cpp |69.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/coordinator/coordinator__schema_upgrade.cpp |69.2%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/schemeshard/schemeshard__upgrade_access_database.cpp |69.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__upgrade_access_database.cpp |69.2%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/coordinator/coordinator__schema.cpp |69.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/coordinator/coordinator__schema.cpp |69.2%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_memory_changes.cpp |69.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_memory_changes.cpp |69.2%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/transfer/transfer_writer.cpp |69.2%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/columnshard/blobs_action/local/storage.cpp |69.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/transfer/transfer_writer.cpp |69.2%| [AR] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/tx/columnshard/blobs_action/local/libcolumnshard-blobs_action-local.a |69.2%| [AR] {RESULT} $(B)/ydb/core/tx/columnshard/blobs_action/local/libcolumnshard-blobs_action-local.a |69.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/blobs_action/local/storage.cpp |69.3%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/tx/columnshard/blobs_action/local/libcolumnshard-blobs_action-local.a |69.3%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/columnshard/data_accessor/abstract/constructor.cpp |69.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/data_accessor/abstract/constructor.cpp |69.3%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/blobstorage/vdisk/skeleton/blobstorage_skeleton.cpp |69.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/vdisk/skeleton/blobstorage_skeleton.cpp |69.3%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/transfer/scheme.cpp |69.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/transfer/scheme.cpp |69.3%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/schemeshard/schemeshard_impl.cpp |69.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard_impl.cpp |69.3%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_drop_unsafe.cpp |69.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_drop_unsafe.cpp |69.3%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/schemeshard/schemeshard__tenant_shred_manager.cpp |69.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__tenant_shred_manager.cpp |69.3%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/transfer/column_table.cpp |69.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/transfer/column_table.cpp |69.3%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/columnshard/data_accessor/in_mem/manager.cpp |69.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/data_accessor/in_mem/manager.cpp |69.3%| [AR] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/tx/columnshard/data_accessor/in_mem/libcolumnshard-data_accessor-in_mem.a |69.3%| [AR] {RESULT} $(B)/ydb/core/tx/columnshard/data_accessor/in_mem/libcolumnshard-data_accessor-in_mem.a |69.3%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/tx/columnshard/data_accessor/in_mem/libcolumnshard-data_accessor-in_mem.a |69.3%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/columnshard/data_accessor/manager.cpp |69.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/data_accessor/manager.cpp |69.3%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/transfer/purecalc.cpp |69.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/transfer/purecalc.cpp |69.3%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/testlib/basics/helpers.cpp |69.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/testlib/basics/helpers.cpp |69.3%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/coordinator/coordinator__check.cpp |69.3%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/coordinator/coordinator_impl.cpp |69.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/coordinator/coordinator__check.cpp |69.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/coordinator/coordinator_impl.cpp |69.3%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/schemeshard/schemeshard_export__cancel.cpp |69.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard_export__cancel.cpp |69.3%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/driver_lib/run/factories.cpp |69.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/driver_lib/run/factories.cpp |69.3%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/transfer/purecalc_output.cpp |69.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/transfer/purecalc_output.cpp |69.3%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/driver_lib/run/main.cpp |69.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/driver_lib/run/main.cpp |69.3%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/transfer/purecalc_input.cpp |69.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/transfer/purecalc_input.cpp |69.3%| [AR] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/transfer/libydb-core-transfer.a |69.3%| [AR] {RESULT} $(B)/ydb/core/transfer/libydb-core-transfer.a |69.3%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/kqp/opt/logical/kqp_opt_log_effects.cpp |69.3%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/transfer/libydb-core-transfer.a |69.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/opt/logical/kqp_opt_log_effects.cpp |69.4%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_split_merge.cpp |69.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_split_merge.cpp |69.4%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/columnshard/data_accessor/request.cpp |69.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/data_accessor/request.cpp |69.4%| [AR] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/tx/columnshard/data_accessor/libtx-columnshard-data_accessor.a |69.4%| [AR] {RESULT} $(B)/ydb/core/tx/columnshard/data_accessor/libtx-columnshard-data_accessor.a |69.4%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/tx/columnshard/data_accessor/libtx-columnshard-data_accessor.a |69.4%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/schemeshard/schemeshard_backup_incremental__forget.cpp |69.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard_backup_incremental__forget.cpp |69.4%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/blobstorage/vdisk/common/vdisk_log.cpp |69.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/vdisk/common/vdisk_log.cpp |69.4%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/coordinator/coordinator__restore_params.cpp |69.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/coordinator/coordinator__restore_params.cpp |69.4%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/viewer/viewer_request.cpp |69.4%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/schemeshard/schemeshard_backup.cpp |69.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/viewer/viewer_request.cpp |69.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard_backup.cpp |69.4%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/coordinator/coordinator__monitoring.cpp |69.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/coordinator/coordinator__monitoring.cpp |69.4%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/coordinator/coordinator__configure.cpp |69.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/coordinator/coordinator__configure.cpp |69.4%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/coordinator/coordinator__restore_transaction.cpp |69.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/coordinator/coordinator__restore_transaction.cpp |69.4%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_drop_sysview.cpp |69.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_drop_sysview.cpp |69.4%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/kafka_proxy/actors/kafka_offset_commit_actor.cpp |69.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kafka_proxy/actors/kafka_offset_commit_actor.cpp |69.4%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/kafka_proxy/actors/kafka_topic_offsets_actor.cpp |69.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kafka_proxy/actors/kafka_topic_offsets_actor.cpp |69.4%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/coordinator/coordinator.cpp |69.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/coordinator/coordinator.cpp |69.4%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/testlib/basics/runtime.cpp |69.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/testlib/basics/runtime.cpp |69.4%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/columnshard/data_accessor/abstract/manager.cpp |69.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/data_accessor/abstract/manager.cpp |69.4%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/kafka_proxy/actors/kafka_list_offsets_actor.cpp |69.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kafka_proxy/actors/kafka_list_offsets_actor.cpp |69.4%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/services/deprecated/persqueue_v0/grpc_pq_read_actor.cpp |69.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/services/deprecated/persqueue_v0/grpc_pq_read_actor.cpp |69.4%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/columnshard/engines/storage/optimizer/lbuckets/planner/optimizer.cpp |69.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/engines/storage/optimizer/lbuckets/planner/optimizer.cpp |69.4%| [AR] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/tx/columnshard/engines/storage/optimizer/lbuckets/planner/liboptimizer-lbuckets-planner.global.a |69.4%| [AR] {RESULT} $(B)/ydb/core/tx/columnshard/engines/storage/optimizer/lbuckets/planner/liboptimizer-lbuckets-planner.global.a |69.4%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/tx/columnshard/engines/storage/optimizer/lbuckets/planner/liboptimizer-lbuckets-planner.global.a |69.4%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/schemeshard/schemeshard_build_index__cancel.cpp |69.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard_build_index__cancel.cpp |69.5%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/kqp/host/kqp_explain_prepared.cpp |69.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/host/kqp_explain_prepared.cpp |69.5%| [AR] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/kqp/host/libcore-kqp-host.a |69.5%| [AR] {RESULT} $(B)/ydb/core/kqp/host/libcore-kqp-host.a |69.5%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/kqp/host/libcore-kqp-host.a |69.5%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/columnshard/data_sharing/destination/session/destination.cpp |69.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/data_sharing/destination/session/destination.cpp |69.5%| [AR] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/tx/columnshard/data_sharing/destination/session/libdata_sharing-destination-session.a |69.5%| [AR] {RESULT} $(B)/ydb/core/tx/columnshard/data_sharing/destination/session/libdata_sharing-destination-session.a |69.5%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/tx/columnshard/data_sharing/destination/session/libdata_sharing-destination-session.a |69.5%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/columnshard/blobs_action/transaction/tx_remove_blobs.cpp |69.5%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/schemeshard/schemeshard_backup_incremental__list.cpp |69.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/blobs_action/transaction/tx_remove_blobs.cpp |69.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard_backup_incremental__list.cpp |69.5%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/coordinator/mediator_queue.cpp |69.5%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/blobstorage/vdisk/common/vdisk_events.cpp |69.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/coordinator/mediator_queue.cpp |69.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/vdisk/common/vdisk_events.cpp |69.5%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/coordinator/coordinator__mediators_confirmations.cpp |69.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/coordinator/coordinator__mediators_confirmations.cpp |69.5%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/coordinator/coordinator__plan_step.cpp |69.5%| [AR] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/blobstorage/vdisk/common/libblobstorage-vdisk-common.a |69.5%| [AR] {RESULT} $(B)/ydb/core/blobstorage/vdisk/common/libblobstorage-vdisk-common.a |69.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/coordinator/coordinator__plan_step.cpp |69.5%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/blobstorage/vdisk/common/libblobstorage-vdisk-common.a |69.5%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/kafka_proxy/kafka_connection.cpp |69.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kafka_proxy/kafka_connection.cpp |69.5%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/kafka_proxy/actors/kafka_offset_fetch_actor.cpp |69.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kafka_proxy/actors/kafka_offset_fetch_actor.cpp |69.5%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/columnshard/data_accessor/abstract/collector.cpp |69.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/data_accessor/abstract/collector.cpp |69.5%| [AR] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/tx/columnshard/data_accessor/abstract/libcolumnshard-data_accessor-abstract.a |69.5%| [AR] {RESULT} $(B)/ydb/core/tx/columnshard/data_accessor/abstract/libcolumnshard-data_accessor-abstract.a |69.5%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/tx/columnshard/data_accessor/abstract/libcolumnshard-data_accessor-abstract.a |69.5%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/columnshard/blobs_action/transaction/tx_draft.cpp |69.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/blobs_action/transaction/tx_draft.cpp |69.5%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/kafka_proxy/actors/kafka_create_topics_actor.cpp |69.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kafka_proxy/actors/kafka_create_topics_actor.cpp |69.5%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_create_continuous_backup.cpp |69.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_create_continuous_backup.cpp |69.5%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/coordinator/coordinator__acquire_read_step.cpp |69.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/coordinator/coordinator__acquire_read_step.cpp |69.5%| [AR] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/tx/coordinator/libcore-tx-coordinator.a |69.5%| [AR] {RESULT} $(B)/ydb/core/tx/coordinator/libcore-tx-coordinator.a |69.6%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/tx/coordinator/libcore-tx-coordinator.a |69.5%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/columnshard/data_sharing/common/session/common.cpp |69.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/data_sharing/common/session/common.cpp |69.6%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/blobstorage/vdisk/skeleton/skeleton_block_and_get.cpp |69.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/vdisk/skeleton/skeleton_block_and_get.cpp |69.6%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/kafka_proxy/actors/kafka_sasl_handshake_actor.cpp |69.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kafka_proxy/actors/kafka_sasl_handshake_actor.cpp |69.6%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/kafka_proxy/kqp_helper.cpp |69.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kafka_proxy/kqp_helper.cpp |69.6%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/columnshard/blobs_action/transaction/tx_blobs_written.cpp |69.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/blobs_action/transaction/tx_blobs_written.cpp |69.6%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/columnshard/blobs_action/bs/remove.cpp |69.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/blobs_action/bs/remove.cpp |69.6%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/kqp/gateway/behaviour/streaming_query/initializer.cpp |69.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/gateway/behaviour/streaming_query/initializer.cpp |69.6%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/blobstorage/vdisk/skeleton/skeleton_vmultiput_actor.cpp |69.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/vdisk/skeleton/skeleton_vmultiput_actor.cpp |69.6%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/kafka_proxy/actors/kafka_create_partitions_actor.cpp |69.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kafka_proxy/actors/kafka_create_partitions_actor.cpp |69.6%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/testlib/basics/appdata.cpp |69.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/testlib/basics/appdata.cpp |69.6%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/blobstorage/vdisk/synclog/blobstorage_synclogkeeper.cpp |69.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/vdisk/synclog/blobstorage_synclogkeeper.cpp |69.6%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/testlib/basics/services.cpp |69.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/testlib/basics/services.cpp |69.6%| [AR] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/testlib/basics/libcore-testlib-basics.a |69.6%| [AR] {RESULT} $(B)/ydb/core/testlib/basics/libcore-testlib-basics.a |69.6%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/testlib/basics/libcore-testlib-basics.a |69.6%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/driver_lib/run/run.cpp |69.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/driver_lib/run/run.cpp |69.6%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/blobstorage/vdisk/scrub/scrub_actor.cpp |69.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/vdisk/scrub/scrub_actor.cpp |69.6%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/kafka_proxy/actors/kafka_sasl_auth_actor.cpp |69.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kafka_proxy/actors/kafka_sasl_auth_actor.cpp |69.6%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/blobstorage/vdisk/skeleton/skeleton_vmovedpatch_actor.cpp |69.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/vdisk/skeleton/skeleton_vmovedpatch_actor.cpp |69.6%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/blobstorage/vdisk/localrecovery/localrecovery_public.cpp |69.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/vdisk/localrecovery/localrecovery_public.cpp |69.6%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/kafka_proxy/kafka_consumer_groups_metadata_initializers.cpp |69.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kafka_proxy/kafka_consumer_groups_metadata_initializers.cpp |69.6%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/services/metadata/ds_table/table_exists.cpp |69.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/services/metadata/ds_table/table_exists.cpp |69.6%| [AR] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/services/metadata/ds_table/libservices-metadata-ds_table.a |69.6%| [AR] {RESULT} $(B)/ydb/services/metadata/ds_table/libservices-metadata-ds_table.a |69.6%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/kafka_proxy/actors/kafka_read_session_actor.cpp |69.6%| [AR] {BAZEL_UPLOAD} $(B)/ydb/services/metadata/ds_table/libservices-metadata-ds_table.a |69.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kafka_proxy/actors/kafka_read_session_actor.cpp |69.6%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/kafka_proxy/actors/kafka_describe_configs_actor.cpp |69.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kafka_proxy/actors/kafka_describe_configs_actor.cpp |69.7%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/kafka_proxy/actors/kafka_list_groups_actor.cpp |69.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kafka_proxy/actors/kafka_list_groups_actor.cpp |69.7%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/blobstorage/vdisk/scrub/scrub_actor_pdisk.cpp |69.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/vdisk/scrub/scrub_actor_pdisk.cpp |69.7%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/blobstorage/vdisk/scrub/restore_corrupted_blob_actor.cpp |69.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/vdisk/scrub/restore_corrupted_blob_actor.cpp |69.7%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/kqp/gateway/behaviour/streaming_query/manager.cpp |69.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/gateway/behaviour/streaming_query/manager.cpp |69.7%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/columnshard/blobs_action/bs/blob_manager.cpp |69.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/blobs_action/bs/blob_manager.cpp |69.7%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/blobstorage/vdisk/synclog/blobstorage_synclogreader.cpp |69.7%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/blobstorage/vdisk/skeleton/skeleton_loggedrec.cpp |69.7%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/kqp/runtime/kqp_stream_lookup_worker.cpp |69.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/vdisk/synclog/blobstorage_synclogreader.cpp |69.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/vdisk/skeleton/skeleton_loggedrec.cpp |69.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/runtime/kqp_stream_lookup_worker.cpp |69.7%| [AR] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/kqp/runtime/libcore-kqp-runtime.a |69.7%| [AR] {RESULT} $(B)/ydb/core/kqp/runtime/libcore-kqp-runtime.a |69.7%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/kqp/runtime/libcore-kqp-runtime.a |69.7%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/kafka_proxy/actors/kafka_alter_configs_actor.cpp |69.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kafka_proxy/actors/kafka_alter_configs_actor.cpp |69.7%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/blobstorage/vdisk/hulldb/fresh/fresh_datasnap.cpp |69.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/vdisk/hulldb/fresh/fresh_datasnap.cpp |69.7%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/kafka_proxy/actors/kafka_describe_groups_actor.cpp |69.7%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/blobstorage/vdisk/synclog/blobstorage_synclogkeeper_state.cpp |69.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kafka_proxy/actors/kafka_describe_groups_actor.cpp |69.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/vdisk/synclog/blobstorage_synclogkeeper_state.cpp |69.7%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/ydb_convert/table_description.cpp |69.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/ydb_convert/table_description.cpp |69.7%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/columnshard/blobs_action/transaction/tx_gc_indexed.cpp |69.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/blobs_action/transaction/tx_gc_indexed.cpp |69.7%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/kafka_proxy/actors/kafka_metadata_actor.cpp |69.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kafka_proxy/actors/kafka_metadata_actor.cpp |69.7%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/blobstorage/vdisk/skeleton/blobstorage_skeletonfront.cpp |69.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/vdisk/skeleton/blobstorage_skeletonfront.cpp |69.7%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/ymq/actor/actor.cpp |69.7%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/kafka_proxy/actors/kafka_balance_actor_sql.cpp |69.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kafka_proxy/actors/kafka_balance_actor_sql.cpp |69.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/ymq/actor/actor.cpp |69.7%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/blobstorage/vdisk/scrub/scrub_actor_huge.cpp |69.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/vdisk/scrub/scrub_actor_huge.cpp |69.7%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/kqp/gateway/behaviour/streaming_query/queries.cpp |69.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/gateway/behaviour/streaming_query/queries.cpp |69.7%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/blobstorage/vdisk/skeleton/skeleton_oos_tracker.cpp |69.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/vdisk/skeleton/skeleton_oos_tracker.cpp |69.7%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/services/ext_index/service/activation.cpp |69.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/services/ext_index/service/activation.cpp |69.8%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/kqp/gateway/behaviour/streaming_query/optimization.cpp |69.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/gateway/behaviour/streaming_query/optimization.cpp |69.8%| [AR] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/kqp/gateway/behaviour/streaming_query/libgateway-behaviour-streaming_query.a |69.8%| [AR] {RESULT} $(B)/ydb/core/kqp/gateway/behaviour/streaming_query/libgateway-behaviour-streaming_query.a |69.8%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/kqp/gateway/behaviour/streaming_query/libgateway-behaviour-streaming_query.a |69.8%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/columnshard/blobs_action/transaction/tx_write_index.cpp |69.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/blobs_action/transaction/tx_write_index.cpp |69.8%| [AR] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/tx/columnshard/blobs_action/transaction/libcolumnshard-blobs_action-transaction.a |69.8%| [AR] {RESULT} $(B)/ydb/core/tx/columnshard/blobs_action/transaction/libcolumnshard-blobs_action-transaction.a |69.8%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/control/lib/immediate_control_board_impl.cpp |69.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/control/lib/immediate_control_board_impl.cpp |69.8%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/tx/columnshard/blobs_action/transaction/libcolumnshard-blobs_action-transaction.a |69.8%| [AR] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/control/lib/libcore-control-lib.a |69.8%| [AR] {RESULT} $(B)/ydb/core/control/lib/libcore-control-lib.a |69.8%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/control/lib/libcore-control-lib.a |69.8%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/services/deprecated/persqueue_v0/grpc_pq_clusters_updater_actor.cpp |69.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/services/deprecated/persqueue_v0/grpc_pq_clusters_updater_actor.cpp |69.8%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/blobstorage/vdisk/hulldb/generic/hullds_sstslice.cpp |69.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/vdisk/hulldb/generic/hullds_sstslice.cpp |69.8%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/blobstorage/vdisk/repl/blobstorage_replproxy.cpp |69.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/vdisk/repl/blobstorage_replproxy.cpp |69.8%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/columnshard/blobs_action/bs/gc.cpp |69.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/blobs_action/bs/gc.cpp |69.8%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/blobstorage/vdisk/synclog/blobstorage_synclog.cpp |69.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/vdisk/synclog/blobstorage_synclog.cpp |69.8%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/blobstorage/vdisk/synclog/blobstorage_synclogdata.cpp |69.8%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/columnshard/blobs_action/bs/gc_actor.cpp |69.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/vdisk/synclog/blobstorage_synclogdata.cpp |69.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/blobs_action/bs/gc_actor.cpp |69.8%| [AR] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/blobstorage/vdisk/synclog/libblobstorage-vdisk-synclog.a |69.8%| [AR] {RESULT} $(B)/ydb/core/blobstorage/vdisk/synclog/libblobstorage-vdisk-synclog.a |69.8%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/blobstorage/vdisk/synclog/libblobstorage-vdisk-synclog.a |69.8%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/viewer/json_handlers_vdisk.cpp |69.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/viewer/json_handlers_vdisk.cpp |69.8%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/kafka_proxy/kafka_consumer_members_metadata_initializers.cpp |69.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kafka_proxy/kafka_consumer_members_metadata_initializers.cpp |69.8%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/blobstorage/vdisk/hulldb/generic/hullds_idxsnap.cpp |69.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/vdisk/hulldb/generic/hullds_idxsnap.cpp |69.8%| [AR] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/kafka_proxy/libydb-core-kafka_proxy.a |69.8%| [AR] {RESULT} $(B)/ydb/core/kafka_proxy/libydb-core-kafka_proxy.a |69.8%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/kafka_proxy/libydb-core-kafka_proxy.a |69.8%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/blobstorage/dsproxy/dsproxy_encrypt.cpp |69.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/dsproxy/dsproxy_encrypt.cpp |69.8%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/blobstorage/vdisk/scrub/scrub_actor_mon.cpp |69.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/vdisk/scrub/scrub_actor_mon.cpp |69.9%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/blobstorage/vdisk/repl/blobstorage_replbroker.cpp |69.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/vdisk/repl/blobstorage_replbroker.cpp |69.9%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/driver_lib/run/kikimr_services_initializers.cpp |69.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/driver_lib/run/kikimr_services_initializers.cpp |69.9%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/blobstorage/vdisk/repl/blobstorage_replmonhandler.cpp |69.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/vdisk/repl/blobstorage_replmonhandler.cpp |69.9%| [AR] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/driver_lib/run/librun.a |69.9%| [AR] {RESULT} $(B)/ydb/core/driver_lib/run/librun.a |69.9%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/driver_lib/run/librun.a |69.9%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/schemeshard/schemeshard_continuous_backup_cleaner.cpp |69.9%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/blobstorage/vdisk/hulldb/fresh/fresh_data.cpp |69.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard_continuous_backup_cleaner.cpp |69.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/vdisk/hulldb/fresh/fresh_data.cpp |69.9%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/blobstorage/vdisk/scrub/blob_recovery.cpp |69.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/vdisk/scrub/blob_recovery.cpp |69.9%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/sys_view/storage/groups.cpp |69.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/sys_view/storage/groups.cpp |69.9%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/ymq/actor/proxy_actor.cpp |69.9%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/blobstorage/vdisk/skeleton/skeleton_overload_handler.cpp |69.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/ymq/actor/proxy_actor.cpp |69.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/vdisk/skeleton/skeleton_overload_handler.cpp |69.9%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/blobstorage/vdisk/localrecovery/localrecovery_defs.cpp |69.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/vdisk/localrecovery/localrecovery_defs.cpp |69.9%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_just_reject.cpp |69.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_just_reject.cpp |69.9%| [AR] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/blobstorage/vdisk/skeleton/libblobstorage-vdisk-skeleton.a |69.9%| [AR] {RESULT} $(B)/ydb/core/blobstorage/vdisk/skeleton/libblobstorage-vdisk-skeleton.a |69.9%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/viewer/json_pipe_req.cpp |69.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/viewer/json_pipe_req.cpp |69.9%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/blobstorage/vdisk/skeleton/libblobstorage-vdisk-skeleton.a |69.9%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/services/metadata/common/ss_dialog.cpp |69.9%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/blobstorage/vdisk/scrub/scrub_actor_sst.cpp |69.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/services/metadata/common/ss_dialog.cpp |69.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/vdisk/scrub/scrub_actor_sst.cpp |69.9%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/blobstorage/vdisk/scrub/scrub_actor_snapshot.cpp |69.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/vdisk/scrub/scrub_actor_snapshot.cpp |69.9%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/services/datastreams/put_records_actor.cpp |69.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/services/datastreams/put_records_actor.cpp |69.9%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/viewer/json_handlers_query.cpp |69.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/viewer/json_handlers_query.cpp |69.9%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/blobstorage/vdisk/hullop/blobstorage_hulllogcutternotify.cpp |69.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/vdisk/hullop/blobstorage_hulllogcutternotify.cpp |69.9%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/blobstorage/vdisk/scrub/scrub_actor_unreadable.cpp |69.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/vdisk/scrub/scrub_actor_unreadable.cpp |69.9%| [AR] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/blobstorage/vdisk/scrub/libblobstorage-vdisk-scrub.a |69.9%| [AR] {RESULT} $(B)/ydb/core/blobstorage/vdisk/scrub/libblobstorage-vdisk-scrub.a |69.9%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/kqp/opt/logical/kqp_opt_log_helpers.cpp |70.0%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/blobstorage/vdisk/scrub/libblobstorage-vdisk-scrub.a |70.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/opt/logical/kqp_opt_log_helpers.cpp |70.0%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/schemeshard/schemeshard__table_stats.cpp |70.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__table_stats.cpp |70.0%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/kqp/opt/kqp_opt_build_phy_query.cpp |70.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/opt/kqp_opt_build_phy_query.cpp |70.0%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/services/ext_index/metadata/behaviour.cpp |70.0%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/services/ext_index/service/executor.cpp |70.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/services/ext_index/service/executor.cpp |70.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/services/ext_index/metadata/behaviour.cpp |70.0%| [AR] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/services/ext_index/metadata/libservices-ext_index-metadata.global.a |70.0%| [AR] {RESULT} $(B)/ydb/services/ext_index/metadata/libservices-ext_index-metadata.global.a |70.0%| [AR] {BAZEL_UPLOAD} $(B)/ydb/services/ext_index/metadata/libservices-ext_index-metadata.global.a |70.0%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/columnshard/engines/storage/indexes/skip_index/constructor.cpp |70.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/engines/storage/indexes/skip_index/constructor.cpp |70.0%| [AR] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/tx/columnshard/engines/storage/indexes/skip_index/libstorage-indexes-skip_index.a |70.0%| [AR] {RESULT} $(B)/ydb/core/tx/columnshard/engines/storage/indexes/skip_index/libstorage-indexes-skip_index.a |70.0%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/tx/columnshard/engines/storage/indexes/skip_index/libstorage-indexes-skip_index.a |70.0%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/columnshard/engines/storage/optimizer/lcbuckets/constructor/level/one_layer.cpp |70.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/engines/storage/optimizer/lcbuckets/constructor/level/one_layer.cpp |70.0%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_upgrade_subdomain.cpp |70.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_upgrade_subdomain.cpp |70.0%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_move_tables.cpp |70.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_move_tables.cpp |70.0%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/blobstorage/vdisk/hulldb/compstrat/hulldb_compstrat_selector.cpp |70.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/vdisk/hulldb/compstrat/hulldb_compstrat_selector.cpp |70.0%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_rotate_cdc_stream.cpp |70.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_rotate_cdc_stream.cpp |70.0%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/services/datastreams/datastreams_proxy.cpp |70.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/services/datastreams/datastreams_proxy.cpp |70.0%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/blobstorage/vdisk/hulldb/generic/hullds_sst.cpp |70.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/vdisk/hulldb/generic/hullds_sst.cpp |70.0%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/kqp/run_script_actor/kqp_run_script_actor.cpp |70.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/run_script_actor/kqp_run_script_actor.cpp |70.0%| [AR] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/kqp/run_script_actor/libcore-kqp-run_script_actor.a |70.0%| [AR] {RESULT} $(B)/ydb/core/kqp/run_script_actor/libcore-kqp-run_script_actor.a |70.0%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/kqp/run_script_actor/libcore-kqp-run_script_actor.a |70.0%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/blobstorage/vdisk/hulldb/base/hullbase_barrier.cpp |70.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/vdisk/hulldb/base/hullbase_barrier.cpp |70.0%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_drop_table.cpp |70.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_drop_table.cpp |70.0%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/schemeshard/schemeshard__table_stats_histogram.cpp |70.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__table_stats_histogram.cpp |70.0%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/viewer/json_wb_req.cpp |70.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/viewer/json_wb_req.cpp |70.0%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/blobstorage/vdisk/hullop/hullcompdelete/blobstorage_hullcompdelete.cpp |70.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/vdisk/hullop/hullcompdelete/blobstorage_hullcompdelete.cpp |70.1%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/kqp/opt/rbo/kqp_rbo.cpp |70.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/opt/rbo/kqp_rbo.cpp |70.1%| [AR] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/blobstorage/vdisk/hullop/hullcompdelete/libvdisk-hullop-hullcompdelete.a |70.1%| [AR] {RESULT} $(B)/ydb/core/blobstorage/vdisk/hullop/hullcompdelete/libvdisk-hullop-hullcompdelete.a |70.1%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_incremental_restore_finalize.cpp |70.1%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/blobstorage/vdisk/hullop/hullcompdelete/libvdisk-hullop-hullcompdelete.a |70.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_incremental_restore_finalize.cpp |70.1%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/blobstorage/vdisk/hulldb/generic/hullds_sstvec.cpp |70.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/vdisk/hulldb/generic/hullds_sstvec.cpp |70.1%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_drop_lock.cpp |70.1%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/blobstorage/vdisk/localrecovery/localrecovery_readbulksst.cpp |70.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_drop_lock.cpp |70.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/vdisk/localrecovery/localrecovery_readbulksst.cpp |70.1%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/blobstorage/vdisk/metadata/metadata_actor.cpp |70.1%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/blobstorage/vdisk/repl/blobstorage_hullrepljob.cpp |70.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/vdisk/metadata/metadata_actor.cpp |70.1%| [AR] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/blobstorage/vdisk/metadata/libblobstorage-vdisk-metadata.a |70.1%| [AR] {RESULT} $(B)/ydb/core/blobstorage/vdisk/metadata/libblobstorage-vdisk-metadata.a |70.1%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/blobstorage/vdisk/metadata/libblobstorage-vdisk-metadata.a |70.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/vdisk/repl/blobstorage_hullrepljob.cpp |70.1%| [AR] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/blobstorage/vdisk/repl/libblobstorage-vdisk-repl.a |70.1%| [AR] {RESULT} $(B)/ydb/core/blobstorage/vdisk/repl/libblobstorage-vdisk-repl.a |70.1%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/blobstorage/vdisk/repl/libblobstorage-vdisk-repl.a |70.1%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/blobstorage/vdisk/hulldb/fresh/fresh_appendix.cpp |70.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/vdisk/hulldb/fresh/fresh_appendix.cpp |70.1%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/schemeshard/schemeshard__pq_stats.cpp |70.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__pq_stats.cpp |70.1%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/blobstorage/vdisk/hulldb/recovery/hulldb_recovery.cpp |70.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/vdisk/hulldb/recovery/hulldb_recovery.cpp |70.1%| [AR] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/blobstorage/vdisk/hulldb/recovery/libvdisk-hulldb-recovery.a |70.1%| [AR] {RESULT} $(B)/ydb/core/blobstorage/vdisk/hulldb/recovery/libvdisk-hulldb-recovery.a |70.1%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/blobstorage/vdisk/hulldb/recovery/libvdisk-hulldb-recovery.a |70.1%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/viewer/json_handlers_browse.cpp |70.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/viewer/json_handlers_browse.cpp |70.1%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_drop_indexed_table.cpp |70.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_drop_indexed_table.cpp |70.1%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/services/discovery/grpc_service.cpp |70.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/services/discovery/grpc_service.cpp |70.1%| [AR] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/services/discovery/libydb-services-discovery.a |70.1%| [AR] {RESULT} $(B)/ydb/services/discovery/libydb-services-discovery.a |70.1%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/viewer/viewer.cpp |70.1%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/ydb_convert/column_families.cpp |70.1%| [AR] {BAZEL_UPLOAD} $(B)/ydb/services/discovery/libydb-services-discovery.a |70.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/ydb_convert/column_families.cpp |70.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/viewer/viewer.cpp |70.1%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_create_external_table.cpp |70.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_create_external_table.cpp |70.2%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/blobstorage/vdisk/hullop/hullop_compactfreshappendix.cpp |70.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/vdisk/hullop/hullop_compactfreshappendix.cpp |70.2%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/viewer/viewer_topic_data.cpp |70.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/viewer/viewer_topic_data.cpp |70.2%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/schemeshard/schemeshard__shred_manager.cpp |70.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__shred_manager.cpp |70.2%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/blobstorage/vdisk/hulldb/generic/hullds_idx.cpp |70.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/vdisk/hulldb/generic/hullds_idx.cpp |70.2%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/services/metadata/common/timeout.cpp |70.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/services/metadata/common/timeout.cpp |70.2%| [AR] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/services/metadata/common/libservices-metadata-common.a |70.2%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/blobstorage/vdisk/hullop/blobstorage_hull.cpp |70.2%| [AR] {RESULT} $(B)/ydb/services/metadata/common/libservices-metadata-common.a |70.2%| [AR] {BAZEL_UPLOAD} $(B)/ydb/services/metadata/common/libservices-metadata-common.a |70.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/vdisk/hullop/blobstorage_hull.cpp |70.2%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/blob_depot/agent/storage_get_block.cpp |70.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blob_depot/agent/storage_get_block.cpp |70.2%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/statistics/aggregator/tx_schemeshard_stats.cpp |70.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/statistics/aggregator/tx_schemeshard_stats.cpp |70.2%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/kqp/opt/kqp_opt_hash_func_propagate_transformer.cpp |70.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/opt/kqp_opt_hash_func_propagate_transformer.cpp |70.2%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/blobstorage/vdisk/hullop/blobstorage_hullactor.cpp |70.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/vdisk/hullop/blobstorage_hullactor.cpp |70.2%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/blobstorage/vdisk/hulldb/bulksst_add/hulldb_bulksst_add.cpp |70.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/vdisk/hulldb/bulksst_add/hulldb_bulksst_add.cpp |70.2%| [AR] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/blobstorage/vdisk/hulldb/bulksst_add/libvdisk-hulldb-bulksst_add.a |70.2%| [AR] {RESULT} $(B)/ydb/core/blobstorage/vdisk/hulldb/bulksst_add/libvdisk-hulldb-bulksst_add.a |70.2%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/blobstorage/vdisk/hulldb/bulksst_add/libvdisk-hulldb-bulksst_add.a |70.2%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/blob_depot/agent/garbage.cpp |70.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blob_depot/agent/garbage.cpp |70.2%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/schemeshard/schemeshard__state_changed_reply.cpp |70.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__state_changed_reply.cpp |70.2%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/schemeshard/schemeshard_import_getters.cpp |70.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard_import_getters.cpp |70.2%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/blob_depot/agent/storage_get.cpp |70.2%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/blobstorage/vdisk/hulldb/compstrat/hulldb_compstrat_defs.cpp |70.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blob_depot/agent/storage_get.cpp |70.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/vdisk/hulldb/compstrat/hulldb_compstrat_defs.cpp |70.2%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/schemeshard/schemeshard__root_shred_manager.cpp |70.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__root_shred_manager.cpp |70.2%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/viewer/json_handlers_viewer.cpp |70.2%| [AR] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/blobstorage/vdisk/hulldb/compstrat/libvdisk-hulldb-compstrat.a |70.2%| [AR] {RESULT} $(B)/ydb/core/blobstorage/vdisk/hulldb/compstrat/libvdisk-hulldb-compstrat.a |70.2%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/blobstorage/vdisk/hulldb/generic/hulldb_bulksstmngr.cpp |70.3%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/blobstorage/vdisk/hulldb/compstrat/libvdisk-hulldb-compstrat.a |70.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/vdisk/hulldb/generic/hulldb_bulksstmngr.cpp |70.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/viewer/json_handlers_viewer.cpp |70.3%| [AR] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/blobstorage/vdisk/hulldb/generic/libvdisk-hulldb-generic.a |70.3%| [AR] {RESULT} $(B)/ydb/core/blobstorage/vdisk/hulldb/generic/libvdisk-hulldb-generic.a |70.3%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/blobstorage/vdisk/hulldb/generic/libvdisk-hulldb-generic.a |70.3%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/schemeshard/schemeshard_build_index__create.cpp |70.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard_build_index__create.cpp |70.3%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/ymq/actor/queue_schema.cpp |70.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/ymq/actor/queue_schema.cpp |70.3%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/blobstorage/dsproxy/dsproxy_multiget.cpp |70.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/dsproxy/dsproxy_multiget.cpp |70.3%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/blobstorage/vdisk/hulldb/base/blobstorage_hulldefs.cpp |70.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/vdisk/hulldb/base/blobstorage_hulldefs.cpp |70.3%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/schemeshard/schemeshard__publish_to_scheme_board.cpp |70.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__publish_to_scheme_board.cpp |70.3%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/blobstorage/vdisk/hulldb/fresh/fresh_segment.cpp |70.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/vdisk/hulldb/fresh/fresh_segment.cpp |70.3%| [AR] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/blobstorage/vdisk/hulldb/fresh/libvdisk-hulldb-fresh.a |70.3%| [AR] {RESULT} $(B)/ydb/core/blobstorage/vdisk/hulldb/fresh/libvdisk-hulldb-fresh.a |70.3%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/blobstorage/vdisk/hulldb/fresh/libvdisk-hulldb-fresh.a |70.3%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_change_path_state.cpp |70.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_change_path_state.cpp |70.3%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_initiate_build_index.cpp |70.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_initiate_build_index.cpp |70.3%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/blobstorage/vdisk/localrecovery/localrecovery_logreplay.cpp |70.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/vdisk/localrecovery/localrecovery_logreplay.cpp |70.3%| [AR] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/blobstorage/vdisk/localrecovery/libblobstorage-vdisk-localrecovery.a |70.3%| [AR] {RESULT} $(B)/ydb/core/blobstorage/vdisk/localrecovery/libblobstorage-vdisk-localrecovery.a |70.3%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/blobstorage/vdisk/localrecovery/libblobstorage-vdisk-localrecovery.a |70.3%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/ymq/actor/purge_queue.cpp |70.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/ymq/actor/purge_queue.cpp |70.3%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/viewer/json_handlers_scheme.cpp |70.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/viewer/json_handlers_scheme.cpp |70.3%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/blobstorage/vdisk/hullop/blobstorage_hulllog.cpp |70.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/vdisk/hullop/blobstorage_hulllog.cpp |70.3%| [AR] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/blobstorage/vdisk/hullop/libblobstorage-vdisk-hullop.a |70.3%| [AR] {RESULT} $(B)/ydb/core/blobstorage/vdisk/hullop/libblobstorage-vdisk-hullop.a |70.3%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/blobstorage/vdisk/hullop/libblobstorage-vdisk-hullop.a |70.3%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/blobstorage/vdisk/hulldb/base/blobstorage_hullsatisfactionrank.cpp |70.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/vdisk/hulldb/base/blobstorage_hullsatisfactionrank.cpp |70.3%| [AR] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/blobstorage/vdisk/hulldb/base/libvdisk-hulldb-base.a |70.3%| [AR] {RESULT} $(B)/ydb/core/blobstorage/vdisk/hulldb/base/libvdisk-hulldb-base.a |70.3%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/blobstorage/vdisk/hulldb/base/libvdisk-hulldb-base.a |70.3%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/services/lib/auth/auth_helpers.cpp |70.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/services/lib/auth/auth_helpers.cpp |70.3%| [AR] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/services/lib/auth/libservices-lib-auth.a |70.3%| [AR] {RESULT} $(B)/ydb/services/lib/auth/libservices-lib-auth.a |70.4%| [AR] {BAZEL_UPLOAD} $(B)/ydb/services/lib/auth/libservices-lib-auth.a |70.3%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/backup/impl/local_partition_reader.cpp |70.4%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/services/ext_index/service/add_index.cpp |70.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/services/ext_index/service/add_index.cpp |70.4%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/viewer/json_handlers_operation.cpp |70.4%| [AR] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/backup/impl/libcore-backup-impl.a |70.4%| [AR] {RESULT} $(B)/ydb/core/backup/impl/libcore-backup-impl.a |70.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/backup/impl/local_partition_reader.cpp |70.4%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/backup/impl/libcore-backup-impl.a |70.4%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_drop_kesus.cpp |70.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/viewer/json_handlers_operation.cpp |70.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_drop_kesus.cpp |70.4%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/blob_depot/agent/query.cpp |70.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blob_depot/agent/query.cpp |70.4%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/persqueue/pqtablet/pq_impl.cpp |70.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/persqueue/pqtablet/pq_impl.cpp |70.4%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/columnshard/engines/storage/optimizer/lcbuckets/constructor/level/zero_level.cpp |70.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/engines/storage/optimizer/lcbuckets/constructor/level/zero_level.cpp |70.4%| [AR] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/tx/columnshard/engines/storage/optimizer/lcbuckets/constructor/level/liblcbuckets-constructor-level.global.a |70.4%| [AR] {RESULT} $(B)/ydb/core/tx/columnshard/engines/storage/optimizer/lcbuckets/constructor/level/liblcbuckets-constructor-level.global.a |70.4%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/tx/columnshard/engines/storage/optimizer/lcbuckets/constructor/level/liblcbuckets-constructor-level.global.a |70.4%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/blob_depot/agent/storage_check_integrity.cpp |70.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blob_depot/agent/storage_check_integrity.cpp |70.4%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/blob_depot/agent/status.cpp |70.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blob_depot/agent/status.cpp |70.4%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/kqp/opt/logical/kqp_opt_log_join.cpp |70.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/opt/logical/kqp_opt_log_join.cpp |70.4%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/viewer/json_handlers_pdisk.cpp |70.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/viewer/json_handlers_pdisk.cpp |70.4%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/blob_depot/agent/storage_collect_garbage.cpp |70.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blob_depot/agent/storage_collect_garbage.cpp |70.4%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/ymq/actor/metering.cpp |70.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/ymq/actor/metering.cpp |70.4%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/services/fq/grpc_service.cpp |70.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/services/fq/grpc_service.cpp |70.4%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_mkdir.cpp |70.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_mkdir.cpp |70.4%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/services/config/grpc_service.cpp |70.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/services/config/grpc_service.cpp |70.4%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_rmdir.cpp |70.4%| [AR] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/services/config/libydb-services-config.a |70.4%| [AR] {RESULT} $(B)/ydb/services/config/libydb-services-config.a |70.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_rmdir.cpp |70.4%| [AR] {BAZEL_UPLOAD} $(B)/ydb/services/config/libydb-services-config.a |70.4%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_finalize_build_index.cpp |70.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_finalize_build_index.cpp |70.4%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/kqp/opt/logical/kqp_opt_log.cpp |70.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/opt/logical/kqp_opt_log.cpp |70.4%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/sys_view/storage/storage_stats.cpp |70.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/sys_view/storage/storage_stats.cpp |70.5%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/blob_depot/agent/storage_status.cpp |70.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blob_depot/agent/storage_status.cpp |70.5%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/services/dynamic_config/grpc_service.cpp |70.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/services/dynamic_config/grpc_service.cpp |70.5%| [AR] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/services/dynamic_config/libydb-services-dynamic_config.a |70.5%| [AR] {RESULT} $(B)/ydb/services/dynamic_config/libydb-services-dynamic_config.a |70.5%| [AR] {BAZEL_UPLOAD} $(B)/ydb/services/dynamic_config/libydb-services-dynamic_config.a |70.5%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_move_index.cpp |70.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_move_index.cpp |70.5%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/statistics/aggregator/tx_configure.cpp |70.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/statistics/aggregator/tx_configure.cpp |70.5%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_drop_view.cpp |70.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_drop_view.cpp |70.5%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/sys_view/storage/pdisks.cpp |70.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/sys_view/storage/pdisks.cpp |70.5%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/services/ext_index/service/add_data.cpp |70.5%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/blob_depot/agent/storage_patch.cpp |70.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/services/ext_index/service/add_data.cpp |70.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blob_depot/agent/storage_patch.cpp |70.5%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_modify_acl.cpp |70.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_modify_acl.cpp |70.5%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/schemeshard/schemeshard_build_index__list.cpp |70.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard_build_index__list.cpp |70.5%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/blob_depot/agent/request.cpp |70.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blob_depot/agent/request.cpp |70.5%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/blob_depot/agent/proxy.cpp |70.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blob_depot/agent/proxy.cpp |70.5%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/blob_depot/agent/comm.cpp |70.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blob_depot/agent/comm.cpp |70.5%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/ymq/actor/receive_message.cpp |70.5%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/actorlib_impl/connect_socket_protocol.cpp |70.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/ymq/actor/receive_message.cpp |70.5%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/actorlib_impl/read_data_protocol.cpp |70.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/actorlib_impl/connect_socket_protocol.cpp |70.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/actorlib_impl/read_data_protocol.cpp |70.5%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/viewer/json_handlers_pq.cpp |70.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/viewer/json_handlers_pq.cpp |70.5%| [AR] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/viewer/libydb-core-viewer.a |70.5%| [AR] {RESULT} $(B)/ydb/core/viewer/libydb-core-viewer.a |70.5%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/viewer/libydb-core-viewer.a |70.5%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/sys_view/storage/vslots.cpp |70.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/sys_view/storage/vslots.cpp |70.5%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/columnshard/engines/reader/simple_reader/iterator/sys_view/schemas/constructor.cpp |70.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/engines/reader/simple_reader/iterator/sys_view/schemas/constructor.cpp |70.5%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/blob_depot/agent/storage_discover.cpp |70.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blob_depot/agent/storage_discover.cpp |70.6%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/services/datastreams/grpc_service.cpp |70.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/services/datastreams/grpc_service.cpp |70.6%| [AR] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/services/datastreams/libydb-services-datastreams.a |70.6%| [AR] {RESULT} $(B)/ydb/services/datastreams/libydb-services-datastreams.a |70.6%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/columnshard/engines/storage/actualizer/abstract/context.cpp |70.6%| [AR] {BAZEL_UPLOAD} $(B)/ydb/services/datastreams/libydb-services-datastreams.a |70.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/engines/storage/actualizer/abstract/context.cpp |70.6%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_drop_pq.cpp |70.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_drop_pq.cpp |70.6%| [AR] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/tx/columnshard/engines/storage/actualizer/abstract/libstorage-actualizer-abstract.a |70.6%| [AR] {RESULT} $(B)/ydb/core/tx/columnshard/engines/storage/actualizer/abstract/libstorage-actualizer-abstract.a |70.6%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/tx/columnshard/engines/storage/actualizer/abstract/libstorage-actualizer-abstract.a |70.6%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_create_external_data_source.cpp |70.6%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/blobstorage/dsproxy/dsproxy_check_integrity_get.cpp |70.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_create_external_data_source.cpp |70.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/dsproxy/dsproxy_check_integrity_get.cpp |70.6%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/actorlib_impl/send_data_protocol.cpp |70.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/actorlib_impl/send_data_protocol.cpp |70.6%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/blob_depot/agent/blob_mapping_cache.cpp |70.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blob_depot/agent/blob_mapping_cache.cpp |70.6%| [AR] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/actorlib_impl/libydb-core-actorlib_impl.a |70.6%| [AR] {RESULT} $(B)/ydb/core/actorlib_impl/libydb-core-actorlib_impl.a |70.6%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/actorlib_impl/libydb-core-actorlib_impl.a |70.6%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/blob_depot/agent/metrics.cpp |70.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blob_depot/agent/metrics.cpp |70.6%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/blobstorage/dsproxy/dsproxy_impl.cpp |70.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/dsproxy/dsproxy_impl.cpp |70.6%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/sys_view/storage/storage_pools.cpp |70.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/sys_view/storage/storage_pools.cpp |70.6%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/services/bridge/grpc_service.cpp |70.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/services/bridge/grpc_service.cpp |70.6%| [AR] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/sys_view/storage/libcore-sys_view-storage.a |70.6%| [AR] {RESULT} $(B)/ydb/core/sys_view/storage/libcore-sys_view-storage.a |70.6%| [AR] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/services/bridge/libydb-services-bridge.a |70.6%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/sys_view/storage/libcore-sys_view-storage.a |70.6%| [AR] {RESULT} $(B)/ydb/services/bridge/libydb-services-bridge.a |70.6%| [AR] {BAZEL_UPLOAD} $(B)/ydb/services/bridge/libydb-services-bridge.a |70.6%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_create_kesus.cpp |70.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_create_kesus.cpp |70.6%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/blob_depot/agent/s3.cpp |70.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blob_depot/agent/s3.cpp |70.6%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/blob_depot/agent/storage_range.cpp |70.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blob_depot/agent/storage_range.cpp |70.6%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/persqueue/pq.cpp |70.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/persqueue/pq.cpp |70.6%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/schemeshard/schemeshard_xxport__helpers.cpp |70.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard_xxport__helpers.cpp |70.6%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_create_fs.cpp |70.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_create_fs.cpp |70.7%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_move_table.cpp |70.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_move_table.cpp |70.7%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/statistics/aggregator/tx_init_schema.cpp |70.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/statistics/aggregator/tx_init_schema.cpp |70.7%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_part.cpp |70.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_part.cpp |70.7%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/services/keyvalue/grpc_service.cpp |70.7%| [AR] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/services/keyvalue/libydb-services-keyvalue.a |70.7%| [AR] {RESULT} $(B)/ydb/services/keyvalue/libydb-services-keyvalue.a |70.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/services/keyvalue/grpc_service.cpp |70.7%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_common_pq.cpp |70.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_common_pq.cpp |70.7%| [AR] {BAZEL_UPLOAD} $(B)/ydb/services/keyvalue/libydb-services-keyvalue.a |70.7%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/blob_depot/agent/channel_kind.cpp |70.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blob_depot/agent/channel_kind.cpp |70.7%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_create_extsubdomain.cpp |70.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_create_extsubdomain.cpp |70.7%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/columnshard/engines/reader/simple_reader/iterator/collections/abstract.cpp |70.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/engines/reader/simple_reader/iterator/collections/abstract.cpp |70.7%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/blob_depot/agent/read.cpp |70.7%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/columnshard/engines/changes/abstract/changes.cpp |70.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/engines/changes/abstract/changes.cpp |70.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blob_depot/agent/read.cpp |70.7%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/blob_depot/agent/storage_put.cpp |70.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blob_depot/agent/storage_put.cpp |70.7%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/security/login_page.cpp |70.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/security/login_page.cpp |70.7%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/blob_depot/agent/blocks.cpp |70.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blob_depot/agent/blocks.cpp |70.7%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/schemeshard/schemeshard_build_index__progress.cpp |70.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard_build_index__progress.cpp |70.7%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/blob_depot/agent/resolved_value.cpp |70.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blob_depot/agent/resolved_value.cpp |70.7%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_drop_sequence.cpp |70.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_drop_sequence.cpp |70.7%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/blob_depot/agent/agent.cpp |70.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blob_depot/agent/agent.cpp |70.7%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp |70.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp |70.7%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/blob_depot/agent/storage_block.cpp |70.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blob_depot/agent/storage_block.cpp |70.7%| [AR] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/blob_depot/agent/libcore-blob_depot-agent.a |70.7%| [AR] {RESULT} $(B)/ydb/core/blob_depot/agent/libcore-blob_depot-agent.a |70.7%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/blob_depot/agent/libcore-blob_depot-agent.a |70.7%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/services/fq/ydb_over_fq.cpp |70.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/services/fq/ydb_over_fq.cpp |70.7%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/statistics/aggregator/tx_resolve.cpp |70.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/statistics/aggregator/tx_resolve.cpp |70.8%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_move_table_index.cpp |70.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_move_table_index.cpp |70.8%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/kqp/opt/physical/kqp_opt_phy_sort.cpp |70.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/opt/physical/kqp_opt_phy_sort.cpp |70.8%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_create_restore.cpp |70.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_create_restore.cpp |70.8%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_drop_index.cpp |70.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_drop_index.cpp |70.8%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_side_effects.cpp |70.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_side_effects.cpp |70.8%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/kqp/opt/rbo/kqp_rbo_transformer.cpp |70.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/opt/rbo/kqp_rbo_transformer.cpp |70.8%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/statistics/aggregator/tx_datashard_scan_response.cpp |70.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/statistics/aggregator/tx_datashard_scan_response.cpp |70.8%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/statistics/aggregator/tx_analyze.cpp |70.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/statistics/aggregator/tx_analyze.cpp |70.8%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/statistics/aggregator/tx_aggr_stat_response.cpp |70.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/statistics/aggregator/tx_aggr_stat_response.cpp |70.8%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/statistics/aggregator/tx_finish_trasersal.cpp |70.8%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/columnshard/engines/reader/simple_reader/iterator/sync_points/limit.cpp |70.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/engines/reader/simple_reader/iterator/sync_points/limit.cpp |70.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/statistics/aggregator/tx_finish_trasersal.cpp |70.8%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/statistics/aggregator/tx_init.cpp |70.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/statistics/aggregator/tx_init.cpp |70.8%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_create_pq.cpp |70.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_create_pq.cpp |70.8%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/columnshard/engines/storage/granule/granule.cpp |70.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/engines/storage/granule/granule.cpp |70.8%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/columnshard/engines/reader/common_reader/iterator/source.cpp |70.8%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/tx_proxy/proxy_impl.cpp |70.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/engines/reader/common_reader/iterator/source.cpp |70.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/tx_proxy/proxy_impl.cpp |70.8%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/statistics/aggregator/tx_analyze_deadline.cpp |70.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/statistics/aggregator/tx_analyze_deadline.cpp |70.8%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_drop_solomon.cpp |70.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_drop_solomon.cpp |70.8%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/blobstorage/dsproxy/dsproxy_monactor.cpp |70.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/dsproxy/dsproxy_monactor.cpp |70.8%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/blobstorage/pdisk/blobstorage_pdisk_impl_http.cpp |70.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/pdisk/blobstorage_pdisk_impl_http.cpp |70.8%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/statistics/aggregator/tx_analyze_table_delivery_problem.cpp |70.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/statistics/aggregator/tx_analyze_table_delivery_problem.cpp |70.8%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/statistics/aggregator/tx_analyze_table_request.cpp |70.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/statistics/aggregator/tx_analyze_table_request.cpp |70.8%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/services/ext_index/service/deleting.cpp |70.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/services/ext_index/service/deleting.cpp |70.8%| [AR] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/services/ext_index/service/libservices-ext_index-service.a |70.9%| [AR] {RESULT} $(B)/ydb/services/ext_index/service/libservices-ext_index-service.a |70.8%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/services/backup/grpc_service.cpp |70.9%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/kqp/rm_service/kqp_snapshot_manager.cpp |70.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/rm_service/kqp_snapshot_manager.cpp |70.9%| [AR] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/services/backup/libydb-services-backup.a |70.9%| [AR] {BAZEL_UPLOAD} $(B)/ydb/services/ext_index/service/libservices-ext_index-service.a |70.9%| [AR] {RESULT} $(B)/ydb/services/backup/libydb-services-backup.a |70.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/services/backup/grpc_service.cpp |70.9%| [AR] {BAZEL_UPLOAD} $(B)/ydb/services/backup/libydb-services-backup.a |70.9%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/ymq/actor/list_permissions.cpp |70.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/ymq/actor/list_permissions.cpp |70.9%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/columnshard/engines/storage/indexes/categories_bloom/constructor.cpp |70.9%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_create_cdc_stream.cpp |70.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/engines/storage/indexes/categories_bloom/constructor.cpp |70.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_create_cdc_stream.cpp |70.9%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_create_indexed_table.cpp |70.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_create_indexed_table.cpp |70.9%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/columnshard/engines/reader/simple_reader/iterator/collections/constructors.cpp |70.9%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_drop_subdomain.cpp |70.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_drop_subdomain.cpp |70.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/engines/reader/simple_reader/iterator/collections/constructors.cpp |70.9%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/columnshard/engines/storage/indexes/categories_bloom/meta.cpp |70.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/engines/storage/indexes/categories_bloom/meta.cpp |70.9%| [AR] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/tx/columnshard/engines/storage/indexes/categories_bloom/libstorage-indexes-categories_bloom.global.a |70.9%| [AR] {RESULT} $(B)/ydb/core/tx/columnshard/engines/storage/indexes/categories_bloom/libstorage-indexes-categories_bloom.global.a |70.9%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/tx/columnshard/engines/storage/indexes/categories_bloom/libstorage-indexes-categories_bloom.global.a |70.9%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/testlib/tablet_flat_dummy.cpp |70.9%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_drop_secret.cpp |70.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/testlib/tablet_flat_dummy.cpp |70.9%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_copy_sequence.cpp |70.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_drop_secret.cpp |70.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_copy_sequence.cpp |70.9%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/statistics/aggregator/tx_navigate.cpp |70.9%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/kqp/opt/logical/kqp_opt_log_indexes.cpp |70.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/statistics/aggregator/tx_navigate.cpp |70.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/opt/logical/kqp_opt_log_indexes.cpp |70.9%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/services/fq/private_grpc.cpp |70.9%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_create_backup_collection.cpp |70.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/services/fq/private_grpc.cpp |70.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_create_backup_collection.cpp |70.9%| [AR] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/services/fq/libydb-services-fq.a |70.9%| [AR] {RESULT} $(B)/ydb/services/fq/libydb-services-fq.a |70.9%| [AR] {BAZEL_UPLOAD} $(B)/ydb/services/fq/libydb-services-fq.a |70.9%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/statistics/aggregator/tx_analyze_table_response.cpp |70.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/statistics/aggregator/tx_analyze_table_response.cpp |70.9%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/schemeshard/schemeshard_restore_incremental__forget.cpp |71.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard_restore_incremental__forget.cpp |70.9%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/kqp/query_compiler/kqp_query_compiler.cpp |71.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/query_compiler/kqp_query_compiler.cpp |71.0%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_create_bsv.cpp |71.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_create_bsv.cpp |71.0%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/statistics/aggregator/tx_response_tablet_distribution.cpp |71.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/statistics/aggregator/tx_response_tablet_distribution.cpp |71.0%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/columnshard/engines/changes/abstract/remove_portions.cpp |71.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/engines/changes/abstract/remove_portions.cpp |71.0%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/statistics/aggregator/tx_schedule_traversal.cpp |71.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/statistics/aggregator/tx_schedule_traversal.cpp |71.0%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/services/auth/grpc_service.cpp |71.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/services/auth/grpc_service.cpp |71.0%| [AR] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/services/auth/libydb-services-auth.a |71.0%| [AR] {RESULT} $(B)/ydb/services/auth/libydb-services-auth.a |71.0%| [AR] {BAZEL_UPLOAD} $(B)/ydb/services/auth/libydb-services-auth.a |71.0%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_create_replication.cpp |71.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_create_replication.cpp |71.0%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_create_index.cpp |71.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_create_index.cpp |71.0%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_common_subdomain.cpp |71.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_common_subdomain.cpp |71.0%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/statistics/aggregator/aggregator.cpp |71.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/statistics/aggregator/aggregator.cpp |71.0%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/persqueue/public/utils.cpp |71.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/persqueue/public/utils.cpp |71.0%| [AR] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/persqueue/public/libcore-persqueue-public.a |71.0%| [AR] {RESULT} $(B)/ydb/core/persqueue/public/libcore-persqueue-public.a |71.0%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/persqueue/public/libcore-persqueue-public.a |71.0%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/ymq/actor/create_user.cpp |71.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/ymq/actor/create_user.cpp |71.0%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/schemeshard/olap/operations/alter/standalone/update.cpp |71.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/olap/operations/alter/standalone/update.cpp |71.0%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/schemeshard/olap/operations/alter/standalone/object.cpp |71.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/olap/operations/alter/standalone/object.cpp |71.0%| [AR] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/tx/schemeshard/olap/operations/alter/standalone/liboperations-alter-standalone.a |71.0%| [AR] {RESULT} $(B)/ydb/core/tx/schemeshard/olap/operations/alter/standalone/liboperations-alter-standalone.a |71.0%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/tx/schemeshard/olap/operations/alter/standalone/liboperations-alter-standalone.a |71.0%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/statistics/aggregator/tx_ack_timeout.cpp |71.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/statistics/aggregator/tx_ack_timeout.cpp |71.0%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_create_backup.cpp |71.0%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/ymq/actor/node_tracker.cpp |71.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_create_backup.cpp |71.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/ymq/actor/node_tracker.cpp |71.0%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/blobstorage/vdisk/anubis_osiris/blobstorage_anubisproxy.cpp |71.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/vdisk/anubis_osiris/blobstorage_anubisproxy.cpp |71.0%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/schemeshard/schemeshard__background_cleaning.cpp |71.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__background_cleaning.cpp |71.0%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/columnshard/engines/reader/simple_reader/iterator/collections/limit_sorted.cpp |71.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/engines/reader/simple_reader/iterator/collections/limit_sorted.cpp |71.1%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/kqp/rm_service/kqp_resource_info_exchanger.cpp |71.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/rm_service/kqp_resource_info_exchanger.cpp |71.1%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/columnshard/data_sharing/modification/transactions/tx_change_blobs_owning.cpp |71.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/data_sharing/modification/transactions/tx_change_blobs_owning.cpp |71.1%| [AR] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/tx/columnshard/data_sharing/modification/transactions/libdata_sharing-modification-transactions.a |71.1%| [AR] {RESULT} $(B)/ydb/core/tx/columnshard/data_sharing/modification/transactions/libdata_sharing-modification-transactions.a |71.1%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/tx/columnshard/data_sharing/modification/transactions/libdata_sharing-modification-transactions.a |71.1%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_drop_external_table.cpp |71.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_drop_external_table.cpp |71.1%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/columnshard/engines/reader/simple_reader/iterator/collections/full_scan_sorted.cpp |71.1%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/schemeshard/olap/operations/alter/common/update.cpp |71.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/engines/reader/simple_reader/iterator/collections/full_scan_sorted.cpp |71.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/olap/operations/alter/common/update.cpp |71.1%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/services/deprecated/persqueue_v0/persqueue.cpp |71.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/services/deprecated/persqueue_v0/persqueue.cpp |71.1%| [AR] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/services/deprecated/persqueue_v0/libservices-deprecated-persqueue_v0.a |71.1%| [AR] {RESULT} $(B)/ydb/services/deprecated/persqueue_v0/libservices-deprecated-persqueue_v0.a |71.1%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_cancel_tx.cpp |71.1%| [AR] {BAZEL_UPLOAD} $(B)/ydb/services/deprecated/persqueue_v0/libservices-deprecated-persqueue_v0.a |71.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_cancel_tx.cpp |71.1%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/tx_proxy/commitreq.cpp |71.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/tx_proxy/commitreq.cpp |71.1%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_copy_table.cpp |71.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_copy_table.cpp |71.1%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/columnshard/data_sharing/modification/events/change_owning.cpp |71.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/data_sharing/modification/events/change_owning.cpp |71.1%| [AR] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/tx/columnshard/data_sharing/modification/events/libdata_sharing-modification-events.a |71.1%| [AR] {RESULT} $(B)/ydb/core/tx/columnshard/data_sharing/modification/events/libdata_sharing-modification-events.a |71.1%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/tx/columnshard/data_sharing/modification/events/libdata_sharing-modification-events.a |71.1%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/blobstorage/dsproxy/dsproxy_blackboard.cpp |71.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/dsproxy/dsproxy_blackboard.cpp |71.1%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_common_cdc_stream.cpp |71.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_common_cdc_stream.cpp |71.1%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_common_external_table.cpp |71.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_common_external_table.cpp |71.1%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/schemeshard/schemeshard_shard_deleter.cpp |71.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard_shard_deleter.cpp |71.1%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_common_resource_pool.cpp |71.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_common_resource_pool.cpp |71.1%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/kqp/rm_service/kqp_rm_service.cpp |71.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/rm_service/kqp_rm_service.cpp |71.1%| [AR] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/kqp/rm_service/libcore-kqp-rm_service.a |71.1%| [AR] {RESULT} $(B)/ydb/core/kqp/rm_service/libcore-kqp-rm_service.a |71.1%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/kqp/rm_service/libcore-kqp-rm_service.a |71.1%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/tx_proxy/rpc_long_tx.cpp |71.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/tx_proxy/rpc_long_tx.cpp |71.1%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/columnshard/engines/changes/abstract/move_portions.cpp |71.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/engines/changes/abstract/move_portions.cpp |71.1%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/columnshard/engines/changes/abstract/abstract.cpp |71.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/engines/changes/abstract/abstract.cpp |71.2%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/library/yaml_config/yaml_config.cpp |71.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/library/yaml_config/yaml_config.cpp |71.2%| [AR] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/library/yaml_config/libydb-library-yaml_config.a |71.2%| [AR] {RESULT} $(B)/ydb/library/yaml_config/libydb-library-yaml_config.a |71.2%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/tx_proxy/datareq.cpp |71.2%| [AR] {BAZEL_UPLOAD} $(B)/ydb/library/yaml_config/libydb-library-yaml_config.a |71.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/tx_proxy/datareq.cpp |71.2%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/persqueue/events/internal.cpp |71.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/persqueue/events/internal.cpp |71.2%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_common_external_data_source.cpp |71.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_common_external_data_source.cpp |71.2%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/tiering/tier/object.cpp |71.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/tiering/tier/object.cpp |71.2%| [AR] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/tx/tiering/tier/libtx-tiering-tier.a |71.2%| [AR] {RESULT} $(B)/ydb/core/tx/tiering/tier/libtx-tiering-tier.a |71.2%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/tx/tiering/tier/libtx-tiering-tier.a |71.2%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/columnshard/engines/reader/simple_reader/iterator/scanner.cpp |71.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/engines/reader/simple_reader/iterator/scanner.cpp |71.2%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/schemeshard/schemeshard_restore_incremental__list.cpp |71.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard_restore_incremental__list.cpp |71.2%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/schemeshard/olap/operations/alter/in_store/common/update.cpp |71.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/olap/operations/alter/in_store/common/update.cpp |71.2%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/tx_proxy/schemereq.cpp |71.2%| [AR] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/tx/schemeshard/olap/operations/alter/in_store/common/libalter-in_store-common.a |71.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/tx_proxy/schemereq.cpp |71.2%| [AR] {RESULT} $(B)/ydb/core/tx/schemeshard/olap/operations/alter/in_store/common/libalter-in_store-common.a |71.2%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/tx/schemeshard/olap/operations/alter/in_store/common/libalter-in_store-common.a |71.2%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/kqp/query_compiler/kqp_olap_compiler.cpp |71.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/query_compiler/kqp_olap_compiler.cpp |71.2%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/schemeshard/schemeshard__background_compaction.cpp |71.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__background_compaction.cpp |71.2%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/tx_proxy/read_table_impl.cpp |71.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/tx_proxy/read_table_impl.cpp |71.2%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/schemeshard/schemeshard_validate_ttl.cpp |71.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard_validate_ttl.cpp |71.2%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_blob_depot.cpp |71.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_blob_depot.cpp |71.2%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/schemeshard/olap/operations/alter/common/object.cpp |71.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/olap/operations/alter/common/object.cpp |71.2%| [AR] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/tx/schemeshard/olap/operations/alter/common/liboperations-alter-common.a |71.2%| [AR] {RESULT} $(B)/ydb/core/tx/schemeshard/olap/operations/alter/common/liboperations-alter-common.a |71.2%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/tx/schemeshard/olap/operations/alter/common/liboperations-alter-common.a |71.2%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/columnshard/engines/reader/simple_reader/iterator/collections/not_sorted.cpp |71.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/engines/reader/simple_reader/iterator/collections/not_sorted.cpp |71.2%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/columnshard/engines/scheme/indexes/abstract/meta.cpp |71.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/engines/scheme/indexes/abstract/meta.cpp |71.3%| [AR] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/tx/columnshard/engines/reader/simple_reader/iterator/collections/libsimple_reader-iterator-collections.a |71.3%| [AR] {RESULT} $(B)/ydb/core/tx/columnshard/engines/reader/simple_reader/iterator/collections/libsimple_reader-iterator-collections.a |71.3%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/tx/columnshard/engines/reader/simple_reader/iterator/collections/libsimple_reader-iterator-collections.a |71.3%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/tx_proxy/upload_rows.cpp |71.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/tx_proxy/upload_rows.cpp |71.3%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/columnshard/engines/reader/simple_reader/iterator/sys_view/chunks/metadata.cpp |71.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/engines/reader/simple_reader/iterator/sys_view/chunks/metadata.cpp |71.3%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/tx_proxy/describe.cpp |71.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/tx_proxy/describe.cpp |71.3%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/services/cms/grpc_service.cpp |71.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/services/cms/grpc_service.cpp |71.3%| [AR] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/services/cms/libydb-services-cms.a |71.3%| [AR] {RESULT} $(B)/ydb/services/cms/libydb-services-cms.a |71.3%| [AR] {BAZEL_UPLOAD} $(B)/ydb/services/cms/libydb-services-cms.a |71.3%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/program/resolver.cpp |71.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/program/resolver.cpp |71.3%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/blobstorage/vdisk/anubis_osiris/blobstorage_osiris.cpp |71.3%| [AR] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/tx/program/libcore-tx-program.a |71.3%| [AR] {RESULT} $(B)/ydb/core/tx/program/libcore-tx-program.a |71.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/vdisk/anubis_osiris/blobstorage_osiris.cpp |71.3%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/tx/program/libcore-tx-program.a |71.3%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_apply_build_index.cpp |71.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_apply_build_index.cpp |71.3%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/schemeshard/olap/indexes/schema.cpp |71.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/olap/indexes/schema.cpp |71.3%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/columnshard/transactions/operators/schema.cpp |71.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/transactions/operators/schema.cpp |71.3%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/columnshard/transactions/transactions/tx_add_sharding_info.cpp |71.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/transactions/transactions/tx_add_sharding_info.cpp |71.3%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/blobstorage/vdisk/anubis_osiris/blobstorage_anubisrunner.cpp |71.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/vdisk/anubis_osiris/blobstorage_anubisrunner.cpp |71.3%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/ymq/actor/create_queue.cpp |71.3%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/columnshard/columnshard__propose_transaction.cpp |71.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/ymq/actor/create_queue.cpp |71.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/columnshard__propose_transaction.cpp |71.3%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_backup_backup_collection.cpp |71.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_backup_backup_collection.cpp |71.3%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/schemeshard/schemeshard__conditional_erase.cpp |71.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__conditional_erase.cpp |71.3%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/columnshard/transactions/locks/read_start.cpp |71.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/transactions/locks/read_start.cpp |71.3%| [AR] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/tx/columnshard/transactions/locks/libcolumnshard-transactions-locks.global.a |71.3%| [AR] {RESULT} $(B)/ydb/core/tx/columnshard/transactions/locks/libcolumnshard-transactions-locks.global.a |71.3%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/tx/columnshard/transactions/locks/libcolumnshard-transactions-locks.global.a |71.3%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/security/ticket_parser.cpp |71.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/security/ticket_parser.cpp |71.3%| [AR] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/security/libydb-core-security.a |71.3%| [AR] {RESULT} $(B)/ydb/core/security/libydb-core-security.a |71.3%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_create_build_index.cpp |71.4%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/security/libydb-core-security.a |71.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_create_build_index.cpp |71.4%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/columnshard/engines/portions/data_accessor.cpp |71.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/engines/portions/data_accessor.cpp |71.4%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/schemeshard/schemeshard__borrowed_compaction.cpp |71.4%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/columnshard/transactions/operators/ev_write/secondary.cpp |71.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__borrowed_compaction.cpp |71.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/transactions/operators/ev_write/secondary.cpp |71.4%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/columnshard/overload_manager/overload_subscribers.cpp |71.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/overload_manager/overload_subscribers.cpp |71.4%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/columnshard/columnshard__init.cpp |71.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/columnshard__init.cpp |71.4%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/columnshard/engines/storage/indexes/portions/constructor.cpp |71.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/engines/storage/indexes/portions/constructor.cpp |71.4%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/columnshard/columnshard_schema.cpp |71.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/columnshard_schema.cpp |71.4%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/blobstorage/vdisk/anubis_osiris/blobstorage_anubis.cpp |71.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/vdisk/anubis_osiris/blobstorage_anubis.cpp |71.4%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/kqp/query_compiler/kqp_mkql_compiler.cpp |71.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/query_compiler/kqp_mkql_compiler.cpp |71.4%| [AR] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/kqp/query_compiler/libcore-kqp-query_compiler.a |71.4%| [AR] {RESULT} $(B)/ydb/core/kqp/query_compiler/libcore-kqp-query_compiler.a |71.4%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/kqp/query_compiler/libcore-kqp-query_compiler.a |71.4%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/locks/locks.cpp |71.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/locks/locks.cpp |71.4%| [AR] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/tx/locks/libcore-tx-locks.a |71.4%| [AR] {RESULT} $(B)/ydb/core/tx/locks/libcore-tx-locks.a |71.4%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/tx/locks/libcore-tx-locks.a |71.4%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/tx_proxy/proxy.cpp |71.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/tx_proxy/proxy.cpp |71.4%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/schemeshard/olap/options/schema.cpp |71.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/olap/options/schema.cpp |71.4%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/public_http/http_service.cpp |71.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/public_http/http_service.cpp |71.4%| [AR] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/tx/schemeshard/olap/options/libschemeshard-olap-options.a |71.4%| [AR] {RESULT} $(B)/ydb/core/tx/schemeshard/olap/options/libschemeshard-olap-options.a |71.4%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/tx/schemeshard/olap/options/libschemeshard-olap-options.a |71.4%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/tx_proxy/upload_rows_common_impl.cpp |71.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/tx_proxy/upload_rows_common_impl.cpp |71.4%| [AR] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/public_http/libydb-core-public_http.a |71.4%| [AR] {RESULT} $(B)/ydb/core/public_http/libydb-core-public_http.a |71.4%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/public_http/libydb-core-public_http.a |71.4%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/statistics/aggregator/aggregator_impl.cpp |71.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/statistics/aggregator/aggregator_impl.cpp |71.4%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/schemeshard/schemeshard_restore_incremental__get.cpp |71.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard_restore_incremental__get.cpp |71.4%| [AR] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/statistics/aggregator/libcore-statistics-aggregator.a |71.4%| [AR] {RESULT} $(B)/ydb/core/statistics/aggregator/libcore-statistics-aggregator.a |71.5%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/statistics/aggregator/libcore-statistics-aggregator.a |71.5%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/tx_proxy/snapshotreq.cpp |71.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/tx_proxy/snapshotreq.cpp |71.5%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/testlib/test_client.cpp |71.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/testlib/test_client.cpp |71.5%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_drop_fs.cpp |71.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_drop_fs.cpp |71.5%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/columnshard/engines/reader/plain_reader/constructor/constructor.cpp |71.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/engines/reader/plain_reader/constructor/constructor.cpp |71.5%| [AR] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/tx/columnshard/engines/reader/plain_reader/constructor/libreader-plain_reader-constructor.global.a |71.5%| [AR] {RESULT} $(B)/ydb/core/tx/columnshard/engines/reader/plain_reader/constructor/libreader-plain_reader-constructor.global.a |71.5%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/tx/columnshard/engines/reader/plain_reader/constructor/libreader-plain_reader-constructor.global.a |71.5%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/schemeshard/schemeshard_self_pinger.cpp |71.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard_self_pinger.cpp |71.5%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/columnshard/columnshard__plan_step.cpp |71.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/columnshard__plan_step.cpp |71.5%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_common_bsv.cpp |71.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_common_bsv.cpp |71.5%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/tx.cpp |71.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/tx.cpp |71.5%| [AR] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/tx/libydb-core-tx.a |71.5%| [AR] {RESULT} $(B)/ydb/core/tx/libydb-core-tx.a |71.5%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/tx/libydb-core-tx.a |71.5%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/columnshard/engines/portions/portion_info.cpp |71.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/engines/portions/portion_info.cpp |71.5%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/columnshard/engines/reader/simple_reader/constructor/constructor.cpp |71.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/engines/reader/simple_reader/constructor/constructor.cpp |71.5%| [AR] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/tx/columnshard/engines/reader/simple_reader/constructor/libreader-simple_reader-constructor.global.a |71.5%| [AR] {RESULT} $(B)/ydb/core/tx/columnshard/engines/reader/simple_reader/constructor/libreader-simple_reader-constructor.global.a |71.5%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/tx/columnshard/engines/reader/simple_reader/constructor/libreader-simple_reader-constructor.global.a |71.5%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/kqp/opt/logical/kqp_opt_cbo.cpp |71.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/opt/logical/kqp_opt_cbo.cpp |71.5%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/blobstorage/vdisk/anubis_osiris/blobstorage_anubisfinder.cpp |71.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/vdisk/anubis_osiris/blobstorage_anubisfinder.cpp |71.5%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/columnshard/engines/reader/common_reader/common/columns_set.cpp |71.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/engines/reader/common_reader/common/columns_set.cpp |71.5%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/columnshard/overload_manager/overload_manager_service.cpp |71.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/overload_manager/overload_manager_service.cpp |71.5%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/columnshard/defs.cpp |71.5%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/columnshard/engines/reader/simple_reader/iterator/sys_view/granules/metadata.cpp |71.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/defs.cpp |71.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/engines/reader/simple_reader/iterator/sys_view/granules/metadata.cpp |71.5%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/schemeshard/schemeshard_utils.cpp |71.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard_utils.cpp |71.5%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/schemeshard/schemeshard_svp_migration.cpp |71.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard_svp_migration.cpp |71.5%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/columnshard/transactions/transactions/tx_finish_async.cpp |71.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/transactions/transactions/tx_finish_async.cpp |71.5%| [AR] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/tx/columnshard/transactions/transactions/libcolumnshard-transactions-transactions.a |71.5%| [AR] {RESULT} $(B)/ydb/core/tx/columnshard/transactions/transactions/libcolumnshard-transactions-transactions.a |71.6%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/tx/columnshard/transactions/transactions/libcolumnshard-transactions-transactions.a |71.6%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/columnshard/engines/reader/common_reader/iterator/fetching.cpp |71.6%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/blobstorage/vdisk/anubis_osiris/blobstorage_anubis_osiris.cpp |71.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/vdisk/anubis_osiris/blobstorage_anubis_osiris.cpp |71.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/engines/reader/common_reader/iterator/fetching.cpp |71.6%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/kqp/query_data/kqp_predictor.cpp |71.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/query_data/kqp_predictor.cpp |71.6%| [AR] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/blobstorage/vdisk/anubis_osiris/libblobstorage-vdisk-anubis_osiris.a |71.6%| [AR] {RESULT} $(B)/ydb/core/blobstorage/vdisk/anubis_osiris/libblobstorage-vdisk-anubis_osiris.a |71.6%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/blobstorage/vdisk/anubis_osiris/libblobstorage-vdisk-anubis_osiris.a |71.6%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/schemeshard/schemeshard__clean_pathes.cpp |71.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__clean_pathes.cpp |71.6%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/schemeshard/schemeshard_sysviews_update.cpp |71.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard_sysviews_update.cpp |71.6%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/columnshard/engines/reader/simple_reader/iterator/sys_view/granules/source.cpp |71.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/engines/reader/simple_reader/iterator/sys_view/granules/source.cpp |71.6%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/replication/controller/controller.cpp |71.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/replication/controller/controller.cpp |71.6%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/schemeshard/olap/indexes/update.cpp |71.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/olap/indexes/update.cpp |71.6%| [AR] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/tx/schemeshard/olap/indexes/libschemeshard-olap-indexes.a |71.6%| [AR] {RESULT} $(B)/ydb/core/tx/schemeshard/olap/indexes/libschemeshard-olap-indexes.a |71.6%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/tx/schemeshard/olap/indexes/libschemeshard-olap-indexes.a |71.6%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_alter_external_table.cpp |71.6%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/tx_proxy/resolvereq.cpp |71.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_alter_external_table.cpp |71.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/tx_proxy/resolvereq.cpp |71.6%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/columnshard/transactions/operators/ev_write/simple.cpp |71.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/transactions/operators/ev_write/simple.cpp |71.6%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_alter_extsubdomain.cpp |71.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_alter_extsubdomain.cpp |71.6%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/replication/ydb_proxy/local_proxy/local_partition_actor.cpp |71.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/replication/ydb_proxy/local_proxy/local_partition_actor.cpp |71.6%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/tx_proxy/upload_columns.cpp |71.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/tx_proxy/upload_columns.cpp |71.6%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/columnshard/overload_manager/overload_manager_actor.cpp |71.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/overload_manager/overload_manager_actor.cpp |71.6%| [AR] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/tx/columnshard/overload_manager/libtx-columnshard-overload_manager.a |71.6%| [AR] {RESULT} $(B)/ydb/core/tx/columnshard/overload_manager/libtx-columnshard-overload_manager.a |71.6%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/tx/columnshard/overload_manager/libtx-columnshard-overload_manager.a |71.6%| [AR] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/tx/tx_proxy/libcore-tx-tx_proxy.a |71.6%| [AR] {RESULT} $(B)/ydb/core/tx/tx_proxy/libcore-tx-tx_proxy.a |71.6%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/tx/tx_proxy/libcore-tx-tx_proxy.a |71.6%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/replication/controller/tx_create_replication.cpp |71.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/replication/controller/tx_create_replication.cpp |71.6%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/columnshard/splitter/blob_info.cpp |71.6%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_drop_extsubdomain.cpp |71.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/splitter/blob_info.cpp |71.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_drop_extsubdomain.cpp |71.7%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/columnshard/transactions/operators/ev_write/primary.cpp |71.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/transactions/operators/ev_write/primary.cpp |71.7%| [AR] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/tx/columnshard/transactions/operators/ev_write/libtransactions-operators-ev_write.global.a |71.7%| [AR] {RESULT} $(B)/ydb/core/tx/columnshard/transactions/operators/ev_write/libtransactions-operators-ev_write.global.a |71.7%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/tx/columnshard/transactions/operators/ev_write/libtransactions-operators-ev_write.global.a |71.7%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/columnshard/engines/changes/compaction/sub_columns/builder.cpp |71.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/engines/changes/compaction/sub_columns/builder.cpp |71.7%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_alter_index.cpp |71.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_alter_index.cpp |71.7%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/replication/controller/logging.cpp |71.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/replication/controller/logging.cpp |71.7%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/ymq/actor/schema.cpp |71.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/ymq/actor/schema.cpp |71.7%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/kqp/opt/kqp_query_blocks_transformer.cpp |71.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/opt/kqp_query_blocks_transformer.cpp |71.7%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/columnshard/engines/storage/indexes/count_min_sketch/constructor.cpp |71.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/engines/storage/indexes/count_min_sketch/constructor.cpp |71.7%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/kqp/opt/logical/kqp_opt_log_sort.cpp |71.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/opt/logical/kqp_opt_log_sort.cpp |71.7%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/columnshard/blob_cache.cpp |71.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/blob_cache.cpp |71.7%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/columnshard/engines/reader/common_reader/common/accessor_callback.cpp |71.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/engines/reader/common_reader/common/accessor_callback.cpp |71.7%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/kqp/opt/logical/kqp_opt_log_ranges_predext.cpp |71.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/opt/logical/kqp_opt_log_ranges_predext.cpp |71.7%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/columnshard/inflight_request_tracker.cpp |71.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/inflight_request_tracker.cpp |71.7%| [AR] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/kqp/opt/logical/libkqp-opt-logical.a |71.7%| [AR] {RESULT} $(B)/ydb/core/kqp/opt/logical/libkqp-opt-logical.a |71.7%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/columnshard/transactions/operators/ev_write/abstract.cpp |71.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/transactions/operators/ev_write/abstract.cpp |71.7%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/kqp/opt/logical/libkqp-opt-logical.a |71.7%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/data_events/shard_writer.cpp |71.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/data_events/shard_writer.cpp |71.7%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/persqueue/pqtablet/partition/ownerinfo.cpp |71.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/persqueue/pqtablet/partition/ownerinfo.cpp |71.7%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/columnshard/engines/storage/indexes/count_min_sketch/meta.cpp |71.7%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_alter_kesus.cpp |71.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/engines/storage/indexes/count_min_sketch/meta.cpp |71.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_alter_kesus.cpp |71.7%| [AR] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/tx/columnshard/engines/storage/indexes/count_min_sketch/libstorage-indexes-count_min_sketch.global.a |71.7%| [AR] {RESULT} $(B)/ydb/core/tx/columnshard/engines/storage/indexes/count_min_sketch/libstorage-indexes-count_min_sketch.global.a |71.7%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/tx/columnshard/engines/storage/indexes/count_min_sketch/libstorage-indexes-count_min_sketch.global.a |71.7%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/columnshard/engines/reader/plain_reader/iterator/fetching.cpp |71.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/engines/reader/plain_reader/iterator/fetching.cpp |71.7%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/ymq/actor/list_dead_letter_source_queues.cpp |71.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/ymq/actor/list_dead_letter_source_queues.cpp |71.7%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/columnshard/engines/storage/indexes/bloom_ngramm/const.cpp |71.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/engines/storage/indexes/bloom_ngramm/const.cpp |71.8%| [AR] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/tx/columnshard/engines/storage/indexes/bloom_ngramm/libstorage-indexes-bloom_ngramm.a |71.7%| [AR] {RESULT} $(B)/ydb/core/tx/columnshard/engines/storage/indexes/bloom_ngramm/libstorage-indexes-bloom_ngramm.a |71.8%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/tx/columnshard/engines/storage/indexes/bloom_ngramm/libstorage-indexes-bloom_ngramm.a |71.8%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/services/ext_index/metadata/fetcher.cpp |71.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/services/ext_index/metadata/fetcher.cpp |71.8%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/replication/controller/dst_creator.cpp |71.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/replication/controller/dst_creator.cpp |71.8%| [AR] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/tx/replication/controller/libtx-replication-controller.a |71.8%| [AR] {RESULT} $(B)/ydb/core/tx/replication/controller/libtx-replication-controller.a |71.8%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/columnshard/engines/reader/common_reader/common/script_cursor.cpp |71.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/engines/reader/common_reader/common/script_cursor.cpp |71.8%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/tx/replication/controller/libtx-replication-controller.a |71.8%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/columnshard/normalizer/portion/chunks.cpp |71.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/normalizer/portion/chunks.cpp |71.8%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/columnshard/columnshard_impl.cpp |71.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/columnshard_impl.cpp |71.8%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/columnshard/columnshard_private_events.cpp |71.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/columnshard_private_events.cpp |71.8%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/columnshard/columnshard__progress_tx.cpp |71.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/columnshard__progress_tx.cpp |71.8%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/columnshard/background_controller.cpp |71.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/background_controller.cpp |71.8%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/replication/ydb_proxy/local_proxy/local_partition_committer.cpp |71.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/replication/ydb_proxy/local_proxy/local_partition_committer.cpp |71.8%| [AR] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/tx/replication/ydb_proxy/local_proxy/libreplication-ydb_proxy-local_proxy.a |71.8%| [AR] {RESULT} $(B)/ydb/core/tx/replication/ydb_proxy/local_proxy/libreplication-ydb_proxy-local_proxy.a |71.8%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/tx/replication/ydb_proxy/local_proxy/libreplication-ydb_proxy-local_proxy.a |71.8%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/columnshard/columnshard_view.cpp |71.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/columnshard_view.cpp |71.8%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/schemeshard/schemeshard__delete_tablet_reply.cpp |71.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__delete_tablet_reply.cpp |71.8%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/columnshard/normalizer/portion/leaked_blobs.cpp |71.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/normalizer/portion/leaked_blobs.cpp |71.8%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/columnshard/engines/portions/read_with_blobs.cpp |71.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/engines/portions/read_with_blobs.cpp |71.8%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/columnshard/engines/reader/common_reader/iterator/constructor.cpp |71.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/engines/reader/common_reader/iterator/constructor.cpp |71.8%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/base/statestorage_guardian.cpp |71.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/base/statestorage_guardian.cpp |71.8%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/columnshard/engines/storage/actualizer/scheme/scheme.cpp |71.8%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/data_events/write_data.cpp |71.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/engines/storage/actualizer/scheme/scheme.cpp |71.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/data_events/write_data.cpp |71.8%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/columnshard/columnshard__overload.cpp |71.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/columnshard__overload.cpp |71.8%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/schemeshard/schemeshard__init_root.cpp |71.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__init_root.cpp |71.8%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/columnshard/resource_subscriber/events.cpp |71.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/resource_subscriber/events.cpp |71.8%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/columnshard/engines/portions/constructor_portion.cpp |71.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/engines/portions/constructor_portion.cpp |71.9%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/columnshard/transactions/operators/propose_tx.cpp |71.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/transactions/operators/propose_tx.cpp |71.9%| [AR] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/tx/columnshard/transactions/operators/libcolumnshard-transactions-operators.a |71.9%| [AR] {RESULT} $(B)/ydb/core/tx/columnshard/transactions/operators/libcolumnshard-transactions-operators.a |71.9%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/tx/columnshard/transactions/operators/libcolumnshard-transactions-operators.a |71.9%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/persqueue/pqtablet/partition/partition.cpp |71.9%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/columnshard/splitter/batch_slice.cpp |71.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/splitter/batch_slice.cpp |71.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/persqueue/pqtablet/partition/partition.cpp |71.9%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/columnshard/columnshard_subdomain_path_id.cpp |71.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/columnshard_subdomain_path_id.cpp |71.9%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/blobstorage/pdisk/blobstorage_pdisk_actor.cpp |71.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/pdisk/blobstorage_pdisk_actor.cpp |71.9%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/columnshard/engines/storage/actualizer/tiering/counters.cpp |71.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/engines/storage/actualizer/tiering/counters.cpp |71.9%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/ymq/actor/modify_permissions.cpp |71.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/ymq/actor/modify_permissions.cpp |71.9%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/services/ext_index/metadata/manager.cpp |71.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/services/ext_index/metadata/manager.cpp |71.9%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/columnshard/engines/storage/actualizer/tiering/tiering.cpp |71.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/engines/storage/actualizer/tiering/tiering.cpp |71.9%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/persqueue/pqtablet/partition/partition_init.cpp |71.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/persqueue/pqtablet/partition/partition_init.cpp |71.9%| [AR] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/tx/columnshard/engines/storage/actualizer/tiering/libstorage-actualizer-tiering.a |71.9%| [AR] {RESULT} $(B)/ydb/core/tx/columnshard/engines/storage/actualizer/tiering/libstorage-actualizer-tiering.a |71.9%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/tx/columnshard/engines/storage/actualizer/tiering/libstorage-actualizer-tiering.a |71.9%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/columnshard/transactions/operators/ev_write/sync.cpp |71.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/transactions/operators/ev_write/sync.cpp |71.9%| [AR] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/tx/columnshard/transactions/operators/ev_write/libtransactions-operators-ev_write.a |71.9%| [AR] {RESULT} $(B)/ydb/core/tx/columnshard/transactions/operators/ev_write/libtransactions-operators-ev_write.a |71.9%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/tx/columnshard/transactions/operators/ev_write/libtransactions-operators-ev_write.a |71.9%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/columnshard/engines/portions/compacted.cpp |71.9%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/columnshard/columnshard.cpp |71.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/engines/portions/compacted.cpp |71.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/columnshard.cpp |71.9%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/columnshard/engines/reader/common_reader/common/accessors_ordering.cpp |71.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/engines/reader/common_reader/common/accessors_ordering.cpp |71.9%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/columnshard/test_helper/columnshard_ut_common.cpp |71.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/test_helper/columnshard_ut_common.cpp |71.9%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/columnshard/columnshard__notify_tx_completion.cpp |71.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/columnshard__notify_tx_completion.cpp |71.9%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/base/board_lookup.cpp |71.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/base/board_lookup.cpp |71.9%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/services/ext_index/metadata/snapshot.cpp |71.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/services/ext_index/metadata/snapshot.cpp |71.9%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/columnshard/engines/storage/indexes/portions/meta.cpp |72.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/engines/storage/indexes/portions/meta.cpp |71.9%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/kqp/opt/kqp_opt_kql.cpp |72.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/opt/kqp_opt_kql.cpp |72.0%| [AR] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/tx/columnshard/engines/storage/indexes/portions/libstorage-indexes-portions.a |72.0%| [AR] {RESULT} $(B)/ydb/core/tx/columnshard/engines/storage/indexes/portions/libstorage-indexes-portions.a |72.0%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/tx/columnshard/engines/storage/indexes/portions/libstorage-indexes-portions.a |72.0%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/kqp/query_data/kqp_prepared_query.cpp |72.0%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/columnshard/engines/writer/write_controller.cpp |72.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/query_data/kqp_prepared_query.cpp |72.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/engines/writer/write_controller.cpp |72.0%| [AR] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/kqp/query_data/libcore-kqp-query_data.a |72.0%| [AR] {RESULT} $(B)/ydb/core/kqp/query_data/libcore-kqp-query_data.a |72.0%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/kqp/query_data/libcore-kqp-query_data.a |72.0%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/columnshard/tables_manager.cpp |72.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/tables_manager.cpp |72.0%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/persqueue/pqtablet/readproxy/readproxy.cpp |72.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/persqueue/pqtablet/readproxy/readproxy.cpp |72.0%| [AR] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/persqueue/pqtablet/readproxy/libpersqueue-pqtablet-readproxy.a |72.0%| [AR] {RESULT} $(B)/ydb/core/persqueue/pqtablet/readproxy/libpersqueue-pqtablet-readproxy.a |72.0%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/persqueue/pqtablet/readproxy/libpersqueue-pqtablet-readproxy.a |72.0%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/columnshard/engines/changes/compaction/sub_columns/iterator.cpp |72.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/engines/changes/compaction/sub_columns/iterator.cpp |72.0%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/columnshard/engines/writer/blob_constructor.cpp |72.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/engines/writer/blob_constructor.cpp |72.0%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/kqp/opt/physical/kqp_opt_phy_olap_agg.cpp |72.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/opt/physical/kqp_opt_phy_olap_agg.cpp |72.0%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/services/ext_index/metadata/initializer.cpp |72.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/services/ext_index/metadata/initializer.cpp |72.0%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/columnshard/write_actor.cpp |72.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/write_actor.cpp |72.0%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/columnshard/engines/reader/common_reader/common/script.cpp |72.0%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/ymq/actor/list_users.cpp |72.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/engines/reader/common_reader/common/script.cpp |72.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/ymq/actor/list_users.cpp |72.0%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/columnshard/transactions/operators/sharing.cpp |72.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/transactions/operators/sharing.cpp |72.0%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/services/ext_index/metadata/object.cpp |72.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/services/ext_index/metadata/object.cpp |72.0%| [AR] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/services/ext_index/metadata/libservices-ext_index-metadata.a |72.0%| [AR] {RESULT} $(B)/ydb/services/ext_index/metadata/libservices-ext_index-metadata.a |72.0%| [AR] {BAZEL_UPLOAD} $(B)/ydb/services/ext_index/metadata/libservices-ext_index-metadata.a |72.0%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/columnshard/engines/writer/indexed_blob_constructor.cpp |72.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/engines/writer/indexed_blob_constructor.cpp |72.0%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/columnshard/engines/portions/written.cpp |72.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/engines/portions/written.cpp |72.0%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/kqp/opt/rbo/kqp_operator.cpp |72.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/opt/rbo/kqp_operator.cpp |72.0%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/base/board_publish.cpp |72.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/base/board_publish.cpp |72.0%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/services/ydb/ydb_logstore.cpp |72.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/services/ydb/ydb_logstore.cpp |72.0%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/columnshard/engines/reader/simple_reader/iterator/context.cpp |72.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/engines/reader/simple_reader/iterator/context.cpp |72.1%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/columnshard/columnshard__write.cpp |72.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/columnshard__write.cpp |72.1%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/services/local_discovery/grpc_service.cpp |72.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/services/local_discovery/grpc_service.cpp |72.1%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/persqueue/pqtablet/partition/partition_blob_encoder.cpp |72.1%| [AR] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/services/local_discovery/libydb-services-local_discovery.a |72.1%| [AR] {RESULT} $(B)/ydb/services/local_discovery/libydb-services-local_discovery.a |72.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/persqueue/pqtablet/partition/partition_blob_encoder.cpp |72.1%| [AR] {BAZEL_UPLOAD} $(B)/ydb/services/local_discovery/libydb-services-local_discovery.a |72.1%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/columnshard/engines/storage/indexes/bloom_ngramm/constructor.cpp |72.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/engines/storage/indexes/bloom_ngramm/constructor.cpp |72.1%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/persqueue/pqtablet/partition/read_quoter.cpp |72.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/persqueue/pqtablet/partition/read_quoter.cpp |72.1%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/columnshard/transactions/locks_db.cpp |72.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/transactions/locks_db.cpp |72.1%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/columnshard/transactions/locks/dependencies.cpp |72.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/transactions/locks/dependencies.cpp |72.1%| [AR] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/tx/columnshard/transactions/locks/libcolumnshard-transactions-locks.a |72.1%| [AR] {RESULT} $(B)/ydb/core/tx/columnshard/transactions/locks/libcolumnshard-transactions-locks.a |72.1%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/tx/columnshard/transactions/locks/libcolumnshard-transactions-locks.a |72.1%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/columnshard/operations/common/context.cpp |72.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/operations/common/context.cpp |72.1%| [AR] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/tx/columnshard/operations/common/libcolumnshard-operations-common.a |72.1%| [AR] {RESULT} $(B)/ydb/core/tx/columnshard/operations/common/libcolumnshard-operations-common.a |72.1%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/tx/columnshard/operations/common/libcolumnshard-operations-common.a |72.1%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/columnshard/tablet/write_queue.cpp |72.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/tablet/write_queue.cpp |72.1%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/base/statestorage_replica.cpp |72.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/base/statestorage_replica.cpp |72.1%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/columnshard/columnshard__write_index.cpp |72.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/columnshard__write_index.cpp |72.1%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/columnshard/normalizer/portion/clean_ttl_preset_setting_info.cpp |72.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/normalizer/portion/clean_ttl_preset_setting_info.cpp |72.1%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/data_events/shards_splitter.cpp |72.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/data_events/shards_splitter.cpp |72.1%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/columnshard/columnshard__propose_cancel.cpp |72.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/columnshard__propose_cancel.cpp |72.1%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/columnshard/engines/storage/optimizer/lcbuckets/planner/level/abstract.cpp |72.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/engines/storage/optimizer/lcbuckets/planner/level/abstract.cpp |72.1%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/columnshard/columnshard__statistics.cpp |72.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/columnshard__statistics.cpp |72.1%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/base/appdata.cpp |72.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/base/appdata.cpp |72.1%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/ymq/actor/index_events_processor.cpp |72.1%| [AR] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/base/libydb-core-base.a |72.1%| [AR] {RESULT} $(B)/ydb/core/base/libydb-core-base.a |72.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/ymq/actor/index_events_processor.cpp |72.2%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/base/libydb-core-base.a |72.1%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/columnshard/engines/storage/chunks/column.cpp |72.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/engines/storage/chunks/column.cpp |72.2%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/columnshard/engines/writer/compacted_blob_constructor.cpp |72.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/engines/writer/compacted_blob_constructor.cpp |72.2%| [AR] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/tx/columnshard/engines/writer/libcolumnshard-engines-writer.a |72.2%| [AR] {RESULT} $(B)/ydb/core/tx/columnshard/engines/writer/libcolumnshard-engines-writer.a |72.2%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/tx/columnshard/engines/writer/libcolumnshard-engines-writer.a |72.2%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/columnshard/engines/storage/actualizer/scheme/counters.cpp |72.2%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/columnshard/transactions/operators/backup.cpp |72.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/engines/storage/actualizer/scheme/counters.cpp |72.2%| [AR] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/tx/columnshard/engines/storage/actualizer/scheme/libstorage-actualizer-scheme.a |72.2%| [AR] {RESULT} $(B)/ydb/core/tx/columnshard/engines/storage/actualizer/scheme/libstorage-actualizer-scheme.a |72.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/transactions/operators/backup.cpp |72.2%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/tx/columnshard/engines/storage/actualizer/scheme/libstorage-actualizer-scheme.a |72.2%| [AR] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/tx/columnshard/transactions/operators/libcolumnshard-transactions-operators.global.a |72.2%| [AR] {RESULT} $(B)/ydb/core/tx/columnshard/transactions/operators/libcolumnshard-transactions-operators.global.a |72.2%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/columnshard/engines/portions/meta.cpp |72.2%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/tx/columnshard/transactions/operators/libcolumnshard-transactions-operators.global.a |72.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/engines/portions/meta.cpp |72.2%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/ymq/actor/delete_user.cpp |72.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/ymq/actor/delete_user.cpp |72.2%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/columnshard/operations/slice_builder/pack_builder.cpp |72.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/operations/slice_builder/pack_builder.cpp |72.2%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/columnshard/engines/reader/common_reader/iterator/iterator.cpp |72.2%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/columnshard/splitter/chunks.cpp |72.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/engines/reader/common_reader/iterator/iterator.cpp |72.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/splitter/chunks.cpp |72.2%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/columnshard/engines/reader/common_reader/iterator/context.cpp |72.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/engines/reader/common_reader/iterator/context.cpp |72.2%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/columnshard/engines/storage/optimizer/lcbuckets/constructor/selector/constructor.cpp |72.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/engines/storage/optimizer/lcbuckets/constructor/selector/constructor.cpp |72.2%| [AR] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/tx/columnshard/engines/storage/optimizer/lcbuckets/constructor/selector/liblcbuckets-constructor-selector.a |72.2%| [AR] {RESULT} $(B)/ydb/core/tx/columnshard/engines/storage/optimizer/lcbuckets/constructor/selector/liblcbuckets-constructor-selector.a |72.2%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/tx/columnshard/engines/storage/optimizer/lcbuckets/constructor/selector/liblcbuckets-constructor-selector.a |72.2%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/columnshard/engines/reader/abstract/read_metadata.cpp |72.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/engines/reader/abstract/read_metadata.cpp |72.2%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/columnshard/engines/reader/common_reader/constructor/resolver.cpp |72.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/engines/reader/common_reader/constructor/resolver.cpp |72.2%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/columnshard/normalizer/portion/special_cleaner.cpp |72.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/normalizer/portion/special_cleaner.cpp |72.2%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/ymq/base/counters.cpp |72.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/ymq/base/counters.cpp |72.2%| [AR] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/ymq/base/libcore-ymq-base.a |72.2%| [AR] {RESULT} $(B)/ydb/core/ymq/base/libcore-ymq-base.a |72.2%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/ymq/base/libcore-ymq-base.a |72.2%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/columnshard/tablet/ext_tx_base.cpp |72.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/tablet/ext_tx_base.cpp |72.2%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/persqueue/pqtablet/partition/offload_actor.cpp |72.2%| [AR] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/tx/columnshard/tablet/libtx-columnshard-tablet.a |72.3%| [AR] {RESULT} $(B)/ydb/core/tx/columnshard/tablet/libtx-columnshard-tablet.a |72.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/persqueue/pqtablet/partition/offload_actor.cpp |72.3%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/tx/columnshard/tablet/libtx-columnshard-tablet.a |72.3%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/persqueue/pqtablet/partition/sourceid.cpp |72.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/persqueue/pqtablet/partition/sourceid.cpp |72.3%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/columnshard/engines/storage/indexes/max/meta.cpp |72.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/engines/storage/indexes/max/meta.cpp |72.3%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/columnshard/data_reader/fetching_steps.cpp |72.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/data_reader/fetching_steps.cpp |72.3%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/long_tx_service/long_tx_service_impl.cpp |72.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/long_tx_service/long_tx_service_impl.cpp |72.3%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/persqueue/pqtablet/partition/partition_sourcemanager.cpp |72.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/persqueue/pqtablet/partition/partition_sourcemanager.cpp |72.3%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/persqueue/pqtablet/partition/partition_read.cpp |72.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/persqueue/pqtablet/partition/partition_read.cpp |72.3%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/long_tx_service/acquire_snapshot_impl.cpp |72.3%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/columnshard/normalizer/portion/restore_v2_chunks.cpp |72.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/long_tx_service/acquire_snapshot_impl.cpp |72.3%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/data_events/columnshard_splitter.cpp |72.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/normalizer/portion/restore_v2_chunks.cpp |72.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/data_events/columnshard_splitter.cpp |72.3%| [AR] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/tx/data_events/libcore-tx-data_events.a |72.3%| [AR] {RESULT} $(B)/ydb/core/tx/data_events/libcore-tx-data_events.a |72.3%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/columnshard/transactions/tx_controller.cpp |72.3%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/tx/data_events/libcore-tx-data_events.a |72.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/transactions/tx_controller.cpp |72.3%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/persqueue/pqtablet/partition/partition_monitoring.cpp |72.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/persqueue/pqtablet/partition/partition_monitoring.cpp |72.3%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/columnshard/engines/portions/constructor_meta.cpp |72.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/engines/portions/constructor_meta.cpp |72.3%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/replication/ydb_proxy/ydb_proxy.cpp |72.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/replication/ydb_proxy/ydb_proxy.cpp |72.3%| [AR] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/tx/replication/ydb_proxy/libtx-replication-ydb_proxy.a |72.3%| [AR] {RESULT} $(B)/ydb/core/tx/replication/ydb_proxy/libtx-replication-ydb_proxy.a |72.3%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/kqp/opt/kqp_opt_phy_check.cpp |72.3%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/tx/replication/ydb_proxy/libtx-replication-ydb_proxy.a |72.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/opt/kqp_opt_phy_check.cpp |72.3%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/columnshard/normalizer/portion/clean_unused_tables_template.cpp |72.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/normalizer/portion/clean_unused_tables_template.cpp |72.3%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/columnshard/columnshard__scan.cpp |72.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/columnshard__scan.cpp |72.3%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/persqueue/pqtablet/partition/partition_compactification.cpp |72.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/persqueue/pqtablet/partition/partition_compactification.cpp |72.3%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/columnshard/engines/reader/common_reader/constructor/read_metadata.cpp |72.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/engines/reader/common_reader/constructor/read_metadata.cpp |72.3%| [AR] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/tx/columnshard/engines/reader/common_reader/constructor/libreader-common_reader-constructor.a |72.3%| [AR] {RESULT} $(B)/ydb/core/tx/columnshard/engines/reader/common_reader/constructor/libreader-common_reader-constructor.a |72.3%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/tx/columnshard/engines/reader/common_reader/constructor/libreader-common_reader-constructor.a |72.3%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/columnshard/resource_subscriber/actor.cpp |72.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/resource_subscriber/actor.cpp |72.3%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/columnshard/normalizer/portion/copy_blob_ids_to_v2.cpp |72.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/normalizer/portion/copy_blob_ids_to_v2.cpp |72.4%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/columnshard/engines/storage/indexes/max/constructor.cpp |72.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/engines/storage/indexes/max/constructor.cpp |72.4%| [AR] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/tx/columnshard/engines/storage/indexes/max/libstorage-indexes-max.global.a |72.4%| [AR] {RESULT} $(B)/ydb/core/tx/columnshard/engines/storage/indexes/max/libstorage-indexes-max.global.a |72.4%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/columnshard/engines/storage/actualizer/counters/counters.cpp |72.4%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/tx/columnshard/engines/storage/indexes/max/libstorage-indexes-max.global.a |72.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/engines/storage/actualizer/counters/counters.cpp |72.4%| [AR] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/tx/columnshard/engines/storage/actualizer/counters/libstorage-actualizer-counters.a |72.4%| [AR] {RESULT} $(B)/ydb/core/tx/columnshard/engines/storage/actualizer/counters/libstorage-actualizer-counters.a |72.4%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/tx/columnshard/engines/storage/actualizer/counters/libstorage-actualizer-counters.a |72.4%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/kqp/opt/kqp_query_plan.cpp |72.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/opt/kqp_query_plan.cpp |72.4%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/columnshard/engines/reader/simple_reader/iterator/sync_points/abstract.cpp |72.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/engines/reader/simple_reader/iterator/sync_points/abstract.cpp |72.4%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/columnshard/splitter/column_info.cpp |72.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/splitter/column_info.cpp |72.4%| [AR] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/tx/columnshard/splitter/libtx-columnshard-splitter.a |72.4%| [AR] {RESULT} $(B)/ydb/core/tx/columnshard/splitter/libtx-columnshard-splitter.a |72.4%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/tx/columnshard/splitter/libtx-columnshard-splitter.a |72.4%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/columnshard/engines/changes/compaction/common/context.cpp |72.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/engines/changes/compaction/common/context.cpp |72.4%| [AR] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/tx/columnshard/engines/changes/compaction/common/libchanges-compaction-common.a |72.4%| [AR] {RESULT} $(B)/ydb/core/tx/columnshard/engines/changes/compaction/common/libchanges-compaction-common.a |72.4%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/tx/columnshard/engines/changes/compaction/common/libchanges-compaction-common.a |72.4%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/persqueue/public/cluster_tracker/cluster_tracker.cpp |72.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/persqueue/public/cluster_tracker/cluster_tracker.cpp |72.4%| [AR] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/persqueue/public/cluster_tracker/libpersqueue-public-cluster_tracker.a |72.4%| [AR] {RESULT} $(B)/ydb/core/persqueue/public/cluster_tracker/libpersqueue-public-cluster_tracker.a |72.4%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/persqueue/public/cluster_tracker/libpersqueue-public-cluster_tracker.a |72.4%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/services/ydb/ydb_scripting.cpp |72.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/services/ydb/ydb_scripting.cpp |72.4%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/long_tx_service/commit_impl.cpp |72.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/long_tx_service/commit_impl.cpp |72.4%| [AR] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/tx/long_tx_service/libcore-tx-long_tx_service.a |72.4%| [AR] {RESULT} $(B)/ydb/core/tx/long_tx_service/libcore-tx-long_tx_service.a |72.4%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/columnshard/engines/reader/simple_reader/iterator/sys_view/schemas/source.cpp |72.4%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/tx/long_tx_service/libcore-tx-long_tx_service.a |72.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/engines/reader/simple_reader/iterator/sys_view/schemas/source.cpp |72.4%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/blobstorage/dsproxy/dsproxy_multicollect.cpp |72.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/dsproxy/dsproxy_multicollect.cpp |72.4%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/columnshard/engines/scheme/indexes/abstract/collection.cpp |72.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/engines/scheme/indexes/abstract/collection.cpp |72.4%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/columnshard/engines/storage/optimizer/lbuckets/constructor/constructor.cpp |72.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/engines/storage/optimizer/lbuckets/constructor/constructor.cpp |72.4%| [AR] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/tx/columnshard/engines/storage/optimizer/lbuckets/constructor/liboptimizer-lbuckets-constructor.global.a |72.4%| [AR] {RESULT} $(B)/ydb/core/tx/columnshard/engines/storage/optimizer/lbuckets/constructor/liboptimizer-lbuckets-constructor.global.a |72.4%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/tx/columnshard/engines/storage/optimizer/lbuckets/constructor/liboptimizer-lbuckets-constructor.global.a |72.4%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/columnshard/hooks/abstract/abstract.cpp |72.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/hooks/abstract/abstract.cpp |72.5%| [AR] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/tx/columnshard/hooks/abstract/libcolumnshard-hooks-abstract.a |72.5%| [AR] {RESULT} $(B)/ydb/core/tx/columnshard/hooks/abstract/libcolumnshard-hooks-abstract.a |72.5%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/tx/columnshard/hooks/abstract/libcolumnshard-hooks-abstract.a |72.5%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/columnshard/resource_subscriber/task.cpp |72.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/resource_subscriber/task.cpp |72.5%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/persqueue/pqtablet/partition/account_read_quoter.cpp |72.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/persqueue/pqtablet/partition/account_read_quoter.cpp |72.5%| [AR] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/tx/columnshard/resource_subscriber/libtx-columnshard-resource_subscriber.a |72.5%| [AR] {RESULT} $(B)/ydb/core/tx/columnshard/resource_subscriber/libtx-columnshard-resource_subscriber.a |72.5%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/tx/columnshard/resource_subscriber/libtx-columnshard-resource_subscriber.a |72.5%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/columnshard/engines/reader/plain_reader/iterator/context.cpp |72.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/engines/reader/plain_reader/iterator/context.cpp |72.5%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/services/ydb/ydb_export.cpp |72.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/services/ydb/ydb_export.cpp |72.5%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/persqueue/pqtablet/partition/partition_write.cpp |72.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/persqueue/pqtablet/partition/partition_write.cpp |72.5%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/persqueue/pqtablet/partition/write_quoter.cpp |72.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/persqueue/pqtablet/partition/write_quoter.cpp |72.5%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/columnshard/engines/reader/common_reader/iterator/sub_columns_fetching.cpp |72.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/engines/reader/common_reader/iterator/sub_columns_fetching.cpp |72.5%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/columnshard/normalizer/portion/chunks_v0_meta.cpp |72.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/normalizer/portion/chunks_v0_meta.cpp |72.5%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/columnshard/engines/reader/simple_reader/iterator/sys_view/granules/constructor.cpp |72.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/engines/reader/simple_reader/iterator/sys_view/granules/constructor.cpp |72.5%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/services/ydb/ydb_scheme.cpp |72.5%| [AR] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/tx/columnshard/engines/reader/simple_reader/iterator/sys_view/granules/libiterator-sys_view-granules.a |72.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/services/ydb/ydb_scheme.cpp |72.5%| [AR] {RESULT} $(B)/ydb/core/tx/columnshard/engines/reader/simple_reader/iterator/sys_view/granules/libiterator-sys_view-granules.a |72.5%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/tx/columnshard/engines/reader/simple_reader/iterator/sys_view/granules/libiterator-sys_view-granules.a |72.5%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/persqueue/pqtablet/partition/user_info.cpp |72.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/persqueue/pqtablet/partition/user_info.cpp |72.5%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/columnshard/engines/storage/chunks/data.cpp |72.5%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/tests/tools/kqprun/src/kqp_runner.cpp |72.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/engines/storage/chunks/data.cpp |72.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/tests/tools/kqprun/src/kqp_runner.cpp |72.5%| [AR] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/tx/columnshard/engines/storage/chunks/libengines-storage-chunks.a |72.5%| [AR] {RESULT} $(B)/ydb/core/tx/columnshard/engines/storage/chunks/libengines-storage-chunks.a |72.5%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/tx/columnshard/engines/storage/chunks/libengines-storage-chunks.a |72.5%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/persqueue/pqtablet/partition/partition_compaction.cpp |72.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/persqueue/pqtablet/partition/partition_compaction.cpp |72.5%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/columnshard/blobs_reader/events.cpp |72.5%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/kqp/gateway/behaviour/tablestore/operations/abstract.cpp |72.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/blobs_reader/events.cpp |72.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/gateway/behaviour/tablestore/operations/abstract.cpp |72.5%| [AR] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/kqp/gateway/behaviour/tablestore/operations/libbehaviour-tablestore-operations.a |72.5%| [AR] {RESULT} $(B)/ydb/core/kqp/gateway/behaviour/tablestore/operations/libbehaviour-tablestore-operations.a |72.5%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/kqp/gateway/behaviour/tablestore/operations/libbehaviour-tablestore-operations.a |72.5%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/columnshard/normalizer/tables/normalizer.cpp |72.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/normalizer/tables/normalizer.cpp |72.5%| [AR] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/tx/columnshard/normalizer/tables/libcolumnshard-normalizer-tables.global.a |72.6%| [AR] {RESULT} $(B)/ydb/core/tx/columnshard/normalizer/tables/libcolumnshard-normalizer-tables.global.a |72.6%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/tx/columnshard/normalizer/tables/libcolumnshard-normalizer-tables.global.a |72.6%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/columnshard/engines/reader/common_reader/iterator/fetch_steps.cpp |72.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/engines/reader/common_reader/iterator/fetch_steps.cpp |72.6%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/services/ydb/ydb_import.cpp |72.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/services/ydb/ydb_import.cpp |72.6%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/columnshard/normalizer/portion/clean_ttl_preset_setting_version_info.cpp |72.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/normalizer/portion/clean_ttl_preset_setting_version_info.cpp |72.6%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/columnshard/engines/storage/optimizer/lcbuckets/planner/level/zero_level.cpp |72.6%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/columnshard/engines/changes/compaction/plain/logic.cpp |72.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/engines/storage/optimizer/lcbuckets/planner/level/zero_level.cpp |72.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/engines/changes/compaction/plain/logic.cpp |72.6%| [AR] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/tx/columnshard/engines/changes/compaction/plain/libchanges-compaction-plain.global.a |72.6%| [AR] {RESULT} $(B)/ydb/core/tx/columnshard/engines/changes/compaction/plain/libchanges-compaction-plain.global.a |72.6%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/tx/columnshard/engines/changes/compaction/plain/libchanges-compaction-plain.global.a |72.6%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/persqueue/pqtablet/partition/subscriber.cpp |72.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/persqueue/pqtablet/partition/subscriber.cpp |72.6%| [AR] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/persqueue/pqtablet/partition/libpersqueue-pqtablet-partition.a |72.6%| [AR] {RESULT} $(B)/ydb/core/persqueue/pqtablet/partition/libpersqueue-pqtablet-partition.a |72.6%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/persqueue/pqtablet/partition/libpersqueue-pqtablet-partition.a |72.6%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/services/ydb/ydb_object_storage.cpp |72.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/services/ydb/ydb_object_storage.cpp |72.6%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/columnshard/operations/manager.cpp |72.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/operations/manager.cpp |72.6%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/columnshard/engines/storage/indexes/bloom_ngramm/meta.cpp |72.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/engines/storage/indexes/bloom_ngramm/meta.cpp |72.6%| [AR] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/tx/columnshard/engines/storage/indexes/bloom_ngramm/libstorage-indexes-bloom_ngramm.global.a |72.6%| [AR] {RESULT} $(B)/ydb/core/tx/columnshard/engines/storage/indexes/bloom_ngramm/libstorage-indexes-bloom_ngramm.global.a |72.6%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/tx/columnshard/engines/storage/indexes/bloom_ngramm/libstorage-indexes-bloom_ngramm.global.a |72.6%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/columnshard/data_reader/fetcher.cpp |72.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/data_reader/fetcher.cpp |72.6%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/columnshard/data_sharing/manager/shared_blobs.cpp |72.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/data_sharing/manager/shared_blobs.cpp |72.6%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/columnshard/engines/reader/abstract/constructor.cpp |72.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/engines/reader/abstract/constructor.cpp |72.6%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/services/ydb/ydb_dummy.cpp |72.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/services/ydb/ydb_dummy.cpp |72.6%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/columnshard/normalizer/tablet/gc_counters.cpp |72.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/normalizer/tablet/gc_counters.cpp |72.6%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/columnshard/operations/events.cpp |72.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/operations/events.cpp |72.6%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/columnshard/engines/changes/compaction/abstract/merger.cpp |72.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/engines/changes/compaction/abstract/merger.cpp |72.6%| [AR] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/tx/columnshard/engines/changes/compaction/abstract/libchanges-compaction-abstract.a |72.6%| [AR] {RESULT} $(B)/ydb/core/tx/columnshard/engines/changes/compaction/abstract/libchanges-compaction-abstract.a |72.6%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/tx/columnshard/engines/changes/compaction/abstract/libchanges-compaction-abstract.a |72.6%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/kqp/opt/kqp_column_statistics_requester.cpp |72.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/opt/kqp_column_statistics_requester.cpp |72.6%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/columnshard/engines/changes/compaction/merger.cpp |72.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/engines/changes/compaction/merger.cpp |72.6%| [AR] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/tx/columnshard/engines/changes/compaction/libengines-changes-compaction.a |72.7%| [AR] {RESULT} $(B)/ydb/core/tx/columnshard/engines/changes/compaction/libengines-changes-compaction.a |72.7%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/tx/columnshard/engines/changes/compaction/libengines-changes-compaction.a |72.7%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/columnshard/engines/reader/transaction/tx_scan.cpp |72.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/engines/reader/transaction/tx_scan.cpp |72.7%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/services/ydb/ydb_operation.cpp |72.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/services/ydb/ydb_operation.cpp |72.7%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/columnshard/engines/reader/common/comparable.cpp |72.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/engines/reader/common/comparable.cpp |72.7%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/services/ydb/ydb_table.cpp |72.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/services/ydb/ydb_table.cpp |72.7%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/tests/tools/kqprun/src/actors.cpp |72.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/tests/tools/kqprun/src/actors.cpp |72.7%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/columnshard/engines/changes/compaction.cpp |72.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/engines/changes/compaction.cpp |72.7%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/columnshard/engines/changes/with_appended.cpp |72.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/engines/changes/with_appended.cpp |72.7%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/columnshard/operations/slice_builder/builder.cpp |72.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/operations/slice_builder/builder.cpp |72.7%| [AR] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/tx/columnshard/operations/slice_builder/libcolumnshard-operations-slice_builder.a |72.7%| [AR] {RESULT} $(B)/ydb/core/tx/columnshard/operations/slice_builder/libcolumnshard-operations-slice_builder.a |72.7%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/columnshard/normalizer/granule/normalizer.cpp |72.7%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/tx/columnshard/operations/slice_builder/libcolumnshard-operations-slice_builder.a |72.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/normalizer/granule/normalizer.cpp |72.7%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/columnshard/normalizer/portion/clean.cpp |72.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/normalizer/portion/clean.cpp |72.7%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/columnshard/engines/storage/actualizer/index/index.cpp |72.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/engines/storage/actualizer/index/index.cpp |72.7%| [AR] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/tx/columnshard/engines/storage/actualizer/index/libstorage-actualizer-index.a |72.7%| [AR] {RESULT} $(B)/ydb/core/tx/columnshard/engines/storage/actualizer/index/libstorage-actualizer-index.a |72.7%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/tx/columnshard/engines/storage/actualizer/index/libstorage-actualizer-index.a |72.7%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/columnshard/engines/changes/compaction/plain/merged_column.cpp |72.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/engines/changes/compaction/plain/merged_column.cpp |72.7%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/columnshard/engines/portions/column_record.cpp |72.7%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/testlib/cs_helper.cpp |72.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/engines/portions/column_record.cpp |72.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/testlib/cs_helper.cpp |72.7%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/columnshard/operations/write_data.cpp |72.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/operations/write_data.cpp |72.7%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/columnshard/engines/reader/simple_reader/iterator/sync_points/aggr.cpp |72.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/engines/reader/simple_reader/iterator/sync_points/aggr.cpp |72.7%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/columnshard/export/session/selector/backup/selector.cpp |72.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/export/session/selector/backup/selector.cpp |72.7%| [AR] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/tx/columnshard/export/session/selector/backup/libsession-selector-backup.global.a |72.7%| [AR] {RESULT} $(B)/ydb/core/tx/columnshard/export/session/selector/backup/libsession-selector-backup.global.a |72.7%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/tx/columnshard/export/session/selector/backup/libsession-selector-backup.global.a |72.7%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/columnshard/engines/changes/compaction/plain/column_portion_chunk.cpp |72.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/engines/changes/compaction/plain/column_portion_chunk.cpp |72.7%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/columnshard/normalizer/portion/clean_index_columns.cpp |72.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/normalizer/portion/clean_index_columns.cpp |72.7%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/columnshard/engines/storage/optimizer/lcbuckets/planner/level/common_level.cpp |72.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/engines/storage/optimizer/lcbuckets/planner/level/common_level.cpp |72.8%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/services/ydb/ydb_debug.cpp |72.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/services/ydb/ydb_debug.cpp |72.8%| [AR] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/tx/columnshard/engines/storage/optimizer/lcbuckets/planner/level/liblcbuckets-planner-level.a |72.8%| [AR] {RESULT} $(B)/ydb/core/tx/columnshard/engines/storage/optimizer/lcbuckets/planner/level/liblcbuckets-planner-level.a |72.8%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/columnshard/engines/reader/plain_reader/iterator/scanner.cpp |72.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/engines/reader/plain_reader/iterator/scanner.cpp |72.8%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/tx/columnshard/engines/storage/optimizer/lcbuckets/planner/level/liblcbuckets-planner-level.a |72.8%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/columnshard/normalizer/portion/clean_deprecated_snapshot.cpp |72.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/normalizer/portion/clean_deprecated_snapshot.cpp |72.8%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/columnshard/data_locks/locks/snapshot.cpp |72.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/data_locks/locks/snapshot.cpp |72.8%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/columnshard/blobs_action/storages_manager/manager.cpp |72.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/blobs_action/storages_manager/manager.cpp |72.8%| [AR] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/tx/columnshard/blobs_action/storages_manager/libcolumnshard-blobs_action-storages_manager.a |72.8%| [AR] {RESULT} $(B)/ydb/core/tx/columnshard/blobs_action/storages_manager/libcolumnshard-blobs_action-storages_manager.a |72.8%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/tx/columnshard/blobs_action/storages_manager/libcolumnshard-blobs_action-storages_manager.a |72.8%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/kqp/opt/kqp_opt_effects.cpp |72.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/opt/kqp_opt_effects.cpp |72.8%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/blobstorage/dsproxy/dsproxy_mon.cpp |72.8%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/columnshard/normalizer/portion/restore_v1_chunks.cpp |72.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/dsproxy/dsproxy_mon.cpp |72.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/normalizer/portion/restore_v1_chunks.cpp |72.8%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/library/persqueue/topic_parser/topic_parser.cpp |72.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/library/persqueue/topic_parser/topic_parser.cpp |72.8%| [AR] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/library/persqueue/topic_parser/liblibrary-persqueue-topic_parser.a |72.8%| [AR] {RESULT} $(B)/ydb/library/persqueue/topic_parser/liblibrary-persqueue-topic_parser.a |72.8%| [AR] {BAZEL_UPLOAD} $(B)/ydb/library/persqueue/topic_parser/liblibrary-persqueue-topic_parser.a |72.8%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/columnshard/data_reader/fetching_executor.cpp |72.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/data_reader/fetching_executor.cpp |72.8%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/columnshard/data_sharing/source/transactions/tx_write_source_cursor.cpp |72.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/data_sharing/source/transactions/tx_write_source_cursor.cpp |72.8%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/columnshard/blobs_reader/read_coordinator.cpp |72.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/blobs_reader/read_coordinator.cpp |72.8%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/services/ydb/ydb_query.cpp |72.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/services/ydb/ydb_query.cpp |72.8%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/columnshard/data_sharing/manager/sessions.cpp |72.8%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/tests/tools/kqprun/src/ydb_setup.cpp |72.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/data_sharing/manager/sessions.cpp |72.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/tests/tools/kqprun/src/ydb_setup.cpp |72.8%| [AR] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/tx/columnshard/data_sharing/manager/libcolumnshard-data_sharing-manager.a |72.8%| [AR] {RESULT} $(B)/ydb/core/tx/columnshard/data_sharing/manager/libcolumnshard-data_sharing-manager.a |72.8%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/tx/columnshard/data_sharing/manager/libcolumnshard-data_sharing-manager.a |72.8%| [AR] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/tests/tools/kqprun/src/libtools-kqprun-src.a |72.8%| [AR] {RESULT} $(B)/ydb/tests/tools/kqprun/src/libtools-kqprun-src.a |72.8%| [AR] {BAZEL_UPLOAD} $(B)/ydb/tests/tools/kqprun/src/libtools-kqprun-src.a |72.8%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/columnshard/export/session/storage/tier/storage.cpp |72.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/export/session/storage/tier/storage.cpp |72.8%| [AR] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/tx/columnshard/export/session/storage/tier/libsession-storage-tier.global.a |72.8%| [AR] {RESULT} $(B)/ydb/core/tx/columnshard/export/session/storage/tier/libsession-storage-tier.global.a |72.9%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/tx/columnshard/export/session/storage/tier/libsession-storage-tier.global.a |72.9%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/columnshard/column_fetching/cache_policy.cpp |72.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/column_fetching/cache_policy.cpp |72.9%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/columnshard/normalizer/schema_version/version.cpp |72.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/normalizer/schema_version/version.cpp |72.9%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/mediator/mediator__schema.cpp |72.9%| [AR] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/tx/columnshard/normalizer/schema_version/libcolumnshard-normalizer-schema_version.global.a |72.9%| [AR] {RESULT} $(B)/ydb/core/tx/columnshard/normalizer/schema_version/libcolumnshard-normalizer-schema_version.global.a |72.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/mediator/mediator__schema.cpp |72.9%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/tx/columnshard/normalizer/schema_version/libcolumnshard-normalizer-schema_version.global.a |72.9%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/columnshard/loading/stages.cpp |72.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/loading/stages.cpp |72.9%| [AR] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/tx/columnshard/loading/libtx-columnshard-loading.a |72.9%| [AR] {RESULT} $(B)/ydb/core/tx/columnshard/loading/libtx-columnshard-loading.a |72.9%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/tx/columnshard/loading/libtx-columnshard-loading.a |72.9%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/columnshard/data_sharing/destination/events/control.cpp |72.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/data_sharing/destination/events/control.cpp |72.9%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/columnshard/data_reader/contexts.cpp |72.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/data_reader/contexts.cpp |72.9%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/columnshard/blobs_action/abstract/action.cpp |72.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/blobs_action/abstract/action.cpp |72.9%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/columnshard/operations/batch_builder/builder.cpp |72.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/operations/batch_builder/builder.cpp |72.9%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/columnshard/data_sharing/source/transactions/tx_start_to_source.cpp |72.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/data_sharing/source/transactions/tx_start_to_source.cpp |72.9%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/columnshard/engines/changes/compaction/dictionary/logic.cpp |72.9%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/mediator/mediator_impl.cpp |72.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/mediator/mediator_impl.cpp |72.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/engines/changes/compaction/dictionary/logic.cpp |72.9%| [AR] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/tx/columnshard/engines/changes/compaction/dictionary/libchanges-compaction-dictionary.global.a |72.9%| [AR] {RESULT} $(B)/ydb/core/tx/columnshard/engines/changes/compaction/dictionary/libchanges-compaction-dictionary.global.a |72.9%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/tx/columnshard/engines/changes/compaction/dictionary/libchanges-compaction-dictionary.global.a |72.9%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/mediator/mediator.cpp |72.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/mediator/mediator.cpp |72.9%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/columnshard/engines/reader/plain_reader/iterator/source.cpp |72.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/engines/reader/plain_reader/iterator/source.cpp |72.9%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/services/ydb/ydb_clickhouse_internal.cpp |72.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/services/ydb/ydb_clickhouse_internal.cpp |72.9%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/columnshard/engines/reader/simple_reader/iterator/fetching.cpp |72.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/engines/reader/simple_reader/iterator/fetching.cpp |72.9%| [AR] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/services/ydb/libydb-services-ydb.a |72.9%| [AR] {RESULT} $(B)/ydb/services/ydb/libydb-services-ydb.a |72.9%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/kqp/opt/kqp_opt_build_txs.cpp |72.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/opt/kqp_opt_build_txs.cpp |72.9%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/columnshard/data_reader/actor.cpp |72.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/data_reader/actor.cpp |72.9%| [AR] {BAZEL_UPLOAD} $(B)/ydb/services/ydb/libydb-services-ydb.a |72.9%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/columnshard/engines/reader/plain_reader/iterator/fetched_data.cpp |72.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/engines/reader/plain_reader/iterator/fetched_data.cpp |72.9%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/columnshard/normalizer/portion/portion.cpp |73.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/normalizer/portion/portion.cpp |72.9%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/mediator/tablet_queue.cpp |73.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/mediator/tablet_queue.cpp |73.0%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/columnshard/engines/metadata_accessor.cpp |73.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/engines/metadata_accessor.cpp |73.0%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/columnshard/data_sharing/source/transactions/tx_start_source_cursor.cpp |73.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/data_sharing/source/transactions/tx_start_source_cursor.cpp |73.0%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/columnshard/operations/batch_builder/restore.cpp |73.0%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/columnshard/data_sharing/destination/transactions/tx_start_from_initiator.cpp |73.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/operations/batch_builder/restore.cpp |73.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/data_sharing/destination/transactions/tx_start_from_initiator.cpp |73.0%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/blobstorage/dsproxy/dsproxy_put_impl.cpp |73.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/dsproxy/dsproxy_put_impl.cpp |73.0%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/columnshard/data_sharing/destination/events/transfer.cpp |73.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/data_sharing/destination/events/transfer.cpp |73.0%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/columnshard/export/session/storage/s3/storage.cpp |73.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/export/session/storage/s3/storage.cpp |73.0%| [AR] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/tx/columnshard/export/session/storage/s3/libsession-storage-s3.global.a |73.0%| [AR] {RESULT} $(B)/ydb/core/tx/columnshard/export/session/storage/s3/libsession-storage-s3.global.a |73.0%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/tx/columnshard/export/session/storage/s3/libsession-storage-s3.global.a |73.0%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/columnshard/blobs_reader/task.cpp |73.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/blobs_reader/task.cpp |73.0%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tablet/tablet_metrics.cpp |73.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tablet/tablet_metrics.cpp |73.0%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/columnshard/engines/changes/compaction/plain/column_cursor.cpp |73.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/engines/changes/compaction/plain/column_cursor.cpp |73.0%| [AR] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/tx/columnshard/engines/changes/compaction/plain/libchanges-compaction-plain.a |73.0%| [AR] {RESULT} $(B)/ydb/core/tx/columnshard/engines/changes/compaction/plain/libchanges-compaction-plain.a |73.0%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/tx/columnshard/engines/changes/compaction/plain/libchanges-compaction-plain.a |73.0%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/columnshard/operations/write.cpp |73.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/operations/write.cpp |73.0%| [AR] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/tx/columnshard/operations/libtx-columnshard-operations.a |73.0%| [AR] {RESULT} $(B)/ydb/core/tx/columnshard/operations/libtx-columnshard-operations.a |73.0%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/tx/columnshard/operations/libtx-columnshard-operations.a |73.0%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/columnshard/blobs_reader/actor.cpp |73.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/blobs_reader/actor.cpp |73.0%| [AR] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/tx/columnshard/blobs_reader/libtx-columnshard-blobs_reader.a |73.0%| [AR] {RESULT} $(B)/ydb/core/tx/columnshard/blobs_reader/libtx-columnshard-blobs_reader.a |73.0%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/tx/columnshard/blobs_reader/libtx-columnshard-blobs_reader.a |73.0%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/columnshard/data_sharing/source/transactions/tx_finish_ack_to_source.cpp |73.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/data_sharing/source/transactions/tx_finish_ack_to_source.cpp |73.0%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/columnshard/engines/changes/cleanup_tables.cpp |73.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/engines/changes/cleanup_tables.cpp |73.0%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/columnshard/engines/loading/stages.cpp |73.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/engines/loading/stages.cpp |73.0%| [AR] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/tx/columnshard/engines/loading/libcolumnshard-engines-loading.a |73.0%| [AR] {RESULT} $(B)/ydb/core/tx/columnshard/engines/loading/libcolumnshard-engines-loading.a |73.0%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/tx/columnshard/engines/loading/libcolumnshard-engines-loading.a |73.0%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/columnshard/counters/blobs_manager.cpp |73.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/counters/blobs_manager.cpp |73.0%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/columnshard/engines/changes/actualization/controller/controller.cpp |73.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/engines/changes/actualization/controller/controller.cpp |73.0%| [AR] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/tx/columnshard/engines/changes/actualization/controller/libchanges-actualization-controller.a |73.1%| [AR] {RESULT} $(B)/ydb/core/tx/columnshard/engines/changes/actualization/controller/libchanges-actualization-controller.a |73.1%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/tx/columnshard/engines/changes/actualization/controller/libchanges-actualization-controller.a |73.1%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/columnshard/data_sharing/destination/events/status.cpp |73.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/data_sharing/destination/events/status.cpp |73.1%| [AR] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/tx/columnshard/data_sharing/destination/events/libdata_sharing-destination-events.a |73.1%| [AR] {RESULT} $(B)/ydb/core/tx/columnshard/data_sharing/destination/events/libdata_sharing-destination-events.a |73.1%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/tx/columnshard/data_sharing/destination/events/libdata_sharing-destination-events.a |73.1%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/columnshard/data_sharing/source/events/transfer.cpp |73.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/data_sharing/source/events/transfer.cpp |73.1%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/mediator/execute_queue.cpp |73.1%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/columnshard/data_sharing/source/transactions/tx_data_ack_to_source.cpp |73.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/mediator/execute_queue.cpp |73.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/data_sharing/source/transactions/tx_data_ack_to_source.cpp |73.1%| [AR] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/tx/columnshard/data_sharing/source/transactions/libdata_sharing-source-transactions.a |73.1%| [AR] {RESULT} $(B)/ydb/core/tx/columnshard/data_sharing/source/transactions/libdata_sharing-source-transactions.a |73.1%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/columnshard/normalizer/portion/restore_appearance_snapshot.cpp |73.1%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/tx/columnshard/data_sharing/source/transactions/libdata_sharing-source-transactions.a |73.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/normalizer/portion/restore_appearance_snapshot.cpp |73.1%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/kqp/opt/kqp_type_ann.cpp |73.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/opt/kqp_type_ann.cpp |73.1%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/columnshard/engines/changes/compaction/sub_columns/remap.cpp |73.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/engines/changes/compaction/sub_columns/remap.cpp |73.1%| [AR] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/tx/columnshard/engines/changes/compaction/sub_columns/libchanges-compaction-sub_columns.a |73.1%| [AR] {RESULT} $(B)/ydb/core/tx/columnshard/engines/changes/compaction/sub_columns/libchanges-compaction-sub_columns.a |73.1%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/tx/columnshard/engines/changes/compaction/sub_columns/libchanges-compaction-sub_columns.a |73.1%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/columnshard/normalizer/portion/chunks_actualization.cpp |73.1%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/mediator/mediator__schema_upgrade.cpp |73.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/normalizer/portion/chunks_actualization.cpp |73.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/mediator/mediator__schema_upgrade.cpp |73.1%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/columnshard/normalizer/portion/clean_empty.cpp |73.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/normalizer/portion/clean_empty.cpp |73.1%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/kqp/opt/kqp_opt_sink_precompute.cpp |73.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/opt/kqp_opt_sink_precompute.cpp |73.1%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/columnshard/data_sharing/destination/transactions/tx_finish_from_source.cpp |73.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/data_sharing/destination/transactions/tx_finish_from_source.cpp |73.1%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/columnshard/engines/portions/index_chunk.cpp |73.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/engines/portions/index_chunk.cpp |73.1%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/columnshard/engines/reader/common_reader/iterator/default_fetching.cpp |73.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/engines/reader/common_reader/iterator/default_fetching.cpp |73.1%| [AR] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/tx/columnshard/engines/reader/common_reader/iterator/libreader-common_reader-iterator.global.a |73.1%| [AR] {RESULT} $(B)/ydb/core/tx/columnshard/engines/reader/common_reader/iterator/libreader-common_reader-iterator.global.a |73.1%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/tx/columnshard/engines/reader/common_reader/iterator/libreader-common_reader-iterator.global.a |73.1%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/columnshard/operations/batch_builder/merger.cpp |73.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/operations/batch_builder/merger.cpp |73.1%| [AR] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/tx/columnshard/operations/batch_builder/libcolumnshard-operations-batch_builder.a |73.1%| [AR] {RESULT} $(B)/ydb/core/tx/columnshard/operations/batch_builder/libcolumnshard-operations-batch_builder.a |73.1%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/tx/columnshard/operations/batch_builder/libcolumnshard-operations-batch_builder.a |73.1%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/columnshard/blobs_action/abstract/storage.cpp |73.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/blobs_action/abstract/storage.cpp |73.1%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tablet/tablet_resolver.cpp |73.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tablet/tablet_resolver.cpp |73.2%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/columnshard/data_accessor/in_mem/constructor.cpp |73.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/data_accessor/in_mem/constructor.cpp |73.2%| [AR] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/tx/columnshard/data_accessor/in_mem/libcolumnshard-data_accessor-in_mem.global.a |73.2%| [AR] {RESULT} $(B)/ydb/core/tx/columnshard/data_accessor/in_mem/libcolumnshard-data_accessor-in_mem.global.a |73.2%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/tx/columnshard/data_accessor/in_mem/libcolumnshard-data_accessor-in_mem.global.a |73.2%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/columnshard/normalizer/portion/normalizer.cpp |73.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/normalizer/portion/normalizer.cpp |73.2%| [AR] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/tx/columnshard/normalizer/portion/libcolumnshard-normalizer-portion.a |73.2%| [AR] {RESULT} $(B)/ydb/core/tx/columnshard/normalizer/portion/libcolumnshard-normalizer-portion.a |73.2%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/tx/columnshard/normalizer/portion/libcolumnshard-normalizer-portion.a |73.2%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/test_tablet/state_server_interface.cpp |73.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/test_tablet/state_server_interface.cpp |73.2%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tablet/tablet_counters_aggregator.cpp |73.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tablet/tablet_counters_aggregator.cpp |73.2%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tablet/bootstrapper.cpp |73.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tablet/bootstrapper.cpp |73.2%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/columnshard/engines/changes/actualization/construction/context.cpp |73.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/engines/changes/actualization/construction/context.cpp |73.2%| [AR] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/tx/columnshard/engines/changes/actualization/construction/libchanges-actualization-construction.a |73.2%| [AR] {RESULT} $(B)/ydb/core/tx/columnshard/engines/changes/actualization/construction/libchanges-actualization-construction.a |73.2%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/tx/columnshard/engines/changes/actualization/construction/libchanges-actualization-construction.a |73.2%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/columnshard/normalizer/granule/clean_granule.cpp |73.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/normalizer/granule/clean_granule.cpp |73.2%| [AR] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/tx/columnshard/normalizer/granule/libcolumnshard-normalizer-granule.global.a |73.2%| [AR] {RESULT} $(B)/ydb/core/tx/columnshard/normalizer/granule/libcolumnshard-normalizer-granule.global.a |73.2%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/tx/columnshard/normalizer/granule/libcolumnshard-normalizer-granule.global.a |73.2%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/columnshard/blobs_action/tier/remove.cpp |73.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/blobs_action/tier/remove.cpp |73.2%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/columnshard/blobs_action/abstract/write.cpp |73.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/blobs_action/abstract/write.cpp |73.2%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/columnshard/blobs_action/events/delete_blobs.cpp |73.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/blobs_action/events/delete_blobs.cpp |73.2%| [AR] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/tx/columnshard/blobs_action/events/libcolumnshard-blobs_action-events.a |73.2%| [AR] {RESULT} $(B)/ydb/core/tx/columnshard/blobs_action/events/libcolumnshard-blobs_action-events.a |73.2%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/tx/columnshard/blobs_action/events/libcolumnshard-blobs_action-events.a |73.2%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/columnshard/engines/reader/simple_reader/iterator/sync_points/result.cpp |73.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/engines/reader/simple_reader/iterator/sync_points/result.cpp |73.2%| [AR] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/tx/columnshard/engines/reader/simple_reader/iterator/sync_points/libsimple_reader-iterator-sync_points.a |73.2%| [AR] {RESULT} $(B)/ydb/core/tx/columnshard/engines/reader/simple_reader/iterator/sync_points/libsimple_reader-iterator-sync_points.a |73.2%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/tx/columnshard/engines/reader/simple_reader/iterator/sync_points/libsimple_reader-iterator-sync_points.a |73.2%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/columnshard/data_sharing/destination/transactions/tx_finish_ack_from_initiator.cpp |73.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/data_sharing/destination/transactions/tx_finish_ack_from_initiator.cpp |73.2%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/columnshard/engines/changes/ttl.cpp |73.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/engines/changes/ttl.cpp |73.2%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/blobstorage/nodewarden/node_warden_vdisk.cpp |73.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/nodewarden/node_warden_vdisk.cpp |73.2%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/columnshard/engines/column_engine.cpp |73.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/engines/column_engine.cpp |73.2%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/columnshard/data_sharing/destination/transactions/tx_data_from_source.cpp |73.2%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/columnshard/engines/reader/plain_reader/constructor/read_metadata.cpp |73.2%| [AR] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/tx/columnshard/engines/reader/plain_reader/constructor/libreader-plain_reader-constructor.a |73.2%| [AR] {RESULT} $(B)/ydb/core/tx/columnshard/engines/reader/plain_reader/constructor/libreader-plain_reader-constructor.a |73.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/data_sharing/destination/transactions/tx_data_from_source.cpp |73.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/engines/reader/plain_reader/constructor/read_metadata.cpp |73.3%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/tx/columnshard/engines/reader/plain_reader/constructor/libreader-plain_reader-constructor.a |73.3%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/sys_view/sessions/sessions.cpp |73.3%| [AR] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/tx/columnshard/data_sharing/destination/transactions/libdata_sharing-destination-transactions.a |73.3%| [AR] {RESULT} $(B)/ydb/core/tx/columnshard/data_sharing/destination/transactions/libdata_sharing-destination-transactions.a |73.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/sys_view/sessions/sessions.cpp |73.3%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/tx/columnshard/data_sharing/destination/transactions/libdata_sharing-destination-transactions.a |73.3%| [AR] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/sys_view/sessions/libcore-sys_view-sessions.a |73.3%| [AR] {RESULT} $(B)/ydb/core/sys_view/sessions/libcore-sys_view-sessions.a |73.3%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/sys_view/sessions/libcore-sys_view-sessions.a |73.3%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/mediator/mediator__configure.cpp |73.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/mediator/mediator__configure.cpp |73.3%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/columnshard/engines/storage/optimizer/lcbuckets/planner/selector/snapshot.cpp |73.3%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/columnshard/export/session/control.cpp |73.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/engines/storage/optimizer/lcbuckets/planner/selector/snapshot.cpp |73.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/export/session/control.cpp |73.3%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/columnshard/normalizer/tablet/broken_txs.cpp |73.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/normalizer/tablet/broken_txs.cpp |73.3%| [AR] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/tx/columnshard/normalizer/tablet/libcolumnshard-normalizer-tablet.global.a |73.3%| [AR] {RESULT} $(B)/ydb/core/tx/columnshard/normalizer/tablet/libcolumnshard-normalizer-tablet.global.a |73.3%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/tx/columnshard/normalizer/tablet/libcolumnshard-normalizer-tablet.global.a |73.3%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/columnshard/data_accessor/local_db/constructor.cpp |73.3%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/columnshard/engines/scheme/snapshot_scheme.cpp |73.3%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/ymq/actor/auth_multi_factory.cpp |73.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/engines/scheme/snapshot_scheme.cpp |73.3%| [AR] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/tx/columnshard/data_accessor/local_db/libcolumnshard-data_accessor-local_db.global.a |73.3%| [AR] {RESULT} $(B)/ydb/core/tx/columnshard/data_accessor/local_db/libcolumnshard-data_accessor-local_db.global.a |73.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/ymq/actor/auth_multi_factory.cpp |73.3%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/tx/columnshard/data_accessor/local_db/libcolumnshard-data_accessor-local_db.global.a |73.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/data_accessor/local_db/constructor.cpp |73.3%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/sys_view/compile_cache/compile_cache.cpp |73.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/sys_view/compile_cache/compile_cache.cpp |73.3%| [AR] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/sys_view/compile_cache/libcore-sys_view-compile_cache.a |73.3%| [AR] {RESULT} $(B)/ydb/core/sys_view/compile_cache/libcore-sys_view-compile_cache.a |73.3%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/sys_view/compile_cache/libcore-sys_view-compile_cache.a |73.3%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/columnshard/counters/portions.cpp |73.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/counters/portions.cpp |73.3%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/columnshard/data_sharing/modification/tasks/modification.cpp |73.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/data_sharing/modification/tasks/modification.cpp |73.3%| [AR] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/tx/columnshard/data_sharing/modification/tasks/libdata_sharing-modification-tasks.a |73.3%| [AR] {RESULT} $(B)/ydb/core/tx/columnshard/data_sharing/modification/tasks/libdata_sharing-modification-tasks.a |73.3%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/tx/columnshard/data_sharing/modification/tasks/libdata_sharing-modification-tasks.a |73.3%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/test_tablet/load_actor_state.cpp |73.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/test_tablet/load_actor_state.cpp |73.3%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/columnshard/export/session/selector/abstract/selector.cpp |73.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/export/session/selector/abstract/selector.cpp |73.3%| [AR] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/tx/columnshard/export/session/selector/abstract/libsession-selector-abstract.a |73.3%| [AR] {RESULT} $(B)/ydb/core/tx/columnshard/export/session/selector/abstract/libsession-selector-abstract.a |73.4%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/tx/columnshard/export/session/selector/abstract/libsession-selector-abstract.a |73.3%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/datashard/check_distributed_erase_tx_unit.cpp |73.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/datashard/check_distributed_erase_tx_unit.cpp |73.4%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tablet/node_tablet_monitor.cpp |73.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tablet/node_tablet_monitor.cpp |73.4%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/http_proxy/grpc_service.cpp |73.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/http_proxy/grpc_service.cpp |73.4%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/columnshard/column_fetching/manager.cpp |73.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/column_fetching/manager.cpp |73.4%| [AR] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/tx/columnshard/column_fetching/libtx-columnshard-column_fetching.a |73.4%| [AR] {RESULT} $(B)/ydb/core/tx/columnshard/column_fetching/libtx-columnshard-column_fetching.a |73.4%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/tx/columnshard/column_fetching/libtx-columnshard-column_fetching.a |73.4%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/columnshard/blobs_action/tier/gc_info.cpp |73.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/blobs_action/tier/gc_info.cpp |73.4%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/columnshard/blobs_action/abstract/gc_actor.cpp |73.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/blobs_action/abstract/gc_actor.cpp |73.4%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/columnshard/engines/writer/buffer/events.cpp |73.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/engines/writer/buffer/events.cpp |73.4%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/columnshard/engines/reader/common/description.cpp |73.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/engines/reader/common/description.cpp |73.4%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/columnshard/normalizer/portion/broken_blobs.cpp |73.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/normalizer/portion/broken_blobs.cpp |73.4%| [AR] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/tx/columnshard/normalizer/portion/libcolumnshard-normalizer-portion.global.a |73.4%| [AR] {RESULT} $(B)/ydb/core/tx/columnshard/normalizer/portion/libcolumnshard-normalizer-portion.global.a |73.4%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/tx/columnshard/normalizer/portion/libcolumnshard-normalizer-portion.global.a |73.4%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/columnshard/blobs_action/tier/gc_actor.cpp |73.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/blobs_action/tier/gc_actor.cpp |73.4%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/columnshard/export/session/session.cpp |73.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/export/session/session.cpp |73.4%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tablet/resource_broker.cpp |73.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tablet/resource_broker.cpp |73.4%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/ymq/actor/purge.cpp |73.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/ymq/actor/purge.cpp |73.4%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/columnshard/blobs_action/abstract/storages_manager.cpp |73.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/blobs_action/abstract/storages_manager.cpp |73.4%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tablet/tablet_responsiveness_pinger.cpp |73.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tablet/tablet_responsiveness_pinger.cpp |73.4%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/grpc_services/tablet/rpc_execute_mkql.cpp |73.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/grpc_services/tablet/rpc_execute_mkql.cpp |73.4%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tablet/tablet_monitoring_proxy.cpp |73.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tablet/tablet_monitoring_proxy.cpp |73.4%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/mediator/mediator__init.cpp |73.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/mediator/mediator__init.cpp |73.4%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tablet/tablet_sys.cpp |73.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tablet/tablet_sys.cpp |73.4%| [AR] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/tx/mediator/libcore-tx-mediator.a |73.4%| [AR] {RESULT} $(B)/ydb/core/tx/mediator/libcore-tx-mediator.a |73.4%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/tx/mediator/libcore-tx-mediator.a |73.4%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/kqp/provider/yql_kikimr_type_ann_pg.cpp |73.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/provider/yql_kikimr_type_ann_pg.cpp |73.4%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/test_tablet/test_tablet.cpp |73.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/test_tablet/test_tablet.cpp |73.4%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/columnshard/data_locks/locks/list.cpp |73.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/data_locks/locks/list.cpp |73.5%| [AR] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/tx/columnshard/data_locks/locks/libcolumnshard-data_locks-locks.a |73.4%| [AR] {RESULT} $(B)/ydb/core/tx/columnshard/data_locks/locks/libcolumnshard-data_locks-locks.a |73.5%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/columnshard/data_sharing/source/events/control.cpp |73.5%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/tx/columnshard/data_locks/locks/libcolumnshard-data_locks-locks.a |73.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/data_sharing/source/events/control.cpp |73.5%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/columnshard/blobs_action/blob_manager_db.cpp |73.5%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/ymq/http/http.cpp |73.5%| [AR] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/tx/columnshard/data_sharing/source/events/libdata_sharing-source-events.a |73.5%| [AR] {RESULT} $(B)/ydb/core/tx/columnshard/data_sharing/source/events/libdata_sharing-source-events.a |73.5%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/tx/columnshard/data_sharing/source/events/libdata_sharing-source-events.a |73.5%| [AR] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/tx/columnshard/blobs_action/libtx-columnshard-blobs_action.a |73.5%| [AR] {RESULT} $(B)/ydb/core/tx/columnshard/blobs_action/libtx-columnshard-blobs_action.a |73.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/blobs_action/blob_manager_db.cpp |73.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/ymq/http/http.cpp |73.5%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/tx/columnshard/blobs_action/libtx-columnshard-blobs_action.a |73.5%| [AR] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/ymq/http/libcore-ymq-http.a |73.5%| [AR] {RESULT} $(B)/ydb/core/ymq/http/libcore-ymq-http.a |73.5%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/ymq/http/libcore-ymq-http.a |73.5%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/columnshard/counters/indexation.cpp |73.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/counters/indexation.cpp |73.5%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/columnshard/data_accessor/local_db/manager.cpp |73.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/data_accessor/local_db/manager.cpp |73.5%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tablet/tablet_req_reset.cpp |73.5%| [AR] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/tx/columnshard/data_accessor/local_db/libcolumnshard-data_accessor-local_db.a |73.5%| [AR] {RESULT} $(B)/ydb/core/tx/columnshard/data_accessor/local_db/libcolumnshard-data_accessor-local_db.a |73.5%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/tx/columnshard/data_accessor/local_db/libcolumnshard-data_accessor-local_db.a |73.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tablet/tablet_req_reset.cpp |73.5%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/columnshard/counters/engine_logs.cpp |73.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/counters/engine_logs.cpp |73.5%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/columnshard/counters/portion_index.cpp |73.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/counters/portion_index.cpp |73.5%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/ydb_convert/table_settings.cpp |73.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/ydb_convert/table_settings.cpp |73.5%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tablet_flat/flat_executor_vacuum_logic.cpp |73.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tablet_flat/flat_executor_vacuum_logic.cpp |73.5%| [AR] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/ydb_convert/libydb-core-ydb_convert.a |73.5%| [AR] {RESULT} $(B)/ydb/core/ydb_convert/libydb-core-ydb_convert.a |73.5%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/ydb_convert/libydb-core-ydb_convert.a |73.5%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tablet/tablet_pipe_client.cpp |73.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tablet/tablet_pipe_client.cpp |73.5%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/columnshard/engines/changes/general_compaction.cpp |73.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/engines/changes/general_compaction.cpp |73.5%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/columnshard/engines/reader/simple_reader/duplicates/private_events.cpp |73.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/engines/reader/simple_reader/duplicates/private_events.cpp |73.5%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/testlib/common_helper.cpp |73.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/testlib/common_helper.cpp |73.5%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tablet_flat/flat_executor_db_mon.cpp |73.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tablet_flat/flat_executor_db_mon.cpp |73.5%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/test_tablet/test_shard_mon.cpp |73.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/test_tablet/test_shard_mon.cpp |73.6%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/columnshard/counters/counters_manager.cpp |73.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/counters/counters_manager.cpp |73.6%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/sys_view/processor/tx_aggregate.cpp |73.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/sys_view/processor/tx_aggregate.cpp |73.6%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/test_tablet/load_actor_mon.cpp |73.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/test_tablet/load_actor_mon.cpp |73.6%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tablet/tablet_list_renderer.cpp |73.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tablet/tablet_list_renderer.cpp |73.6%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/grpc_services/tablet/rpc_change_schema.cpp |73.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/grpc_services/tablet/rpc_change_schema.cpp |73.6%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/columnshard/counters/scan.cpp |73.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/counters/scan.cpp |73.6%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/columnshard/engines/scheme/schema_version.cpp |73.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/engines/scheme/schema_version.cpp |73.6%| [AR] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/tx/columnshard/counters/libtx-columnshard-counters.a |73.6%| [AR] {RESULT} $(B)/ydb/core/tx/columnshard/counters/libtx-columnshard-counters.a |73.6%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/tx/columnshard/counters/libtx-columnshard-counters.a |73.6%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/columnshard/engines/reader/common_reader/iterator/fetched_data.cpp |73.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/engines/reader/common_reader/iterator/fetched_data.cpp |73.6%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/columnshard/engines/reader/simple_reader/iterator/plain_read_data.cpp |73.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/engines/reader/simple_reader/iterator/plain_read_data.cpp |73.6%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/columnshard/blobs_action/tier/write.cpp |73.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/blobs_action/tier/write.cpp |73.6%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tablet_flat/flat_boot_lease.cpp |73.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tablet_flat/flat_boot_lease.cpp |73.6%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/columnshard/blobs_action/abstract/gc.cpp |73.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/blobs_action/abstract/gc.cpp |73.6%| [AR] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/tx/columnshard/blobs_action/abstract/libcolumnshard-blobs_action-abstract.a |73.6%| [AR] {RESULT} $(B)/ydb/core/tx/columnshard/blobs_action/abstract/libcolumnshard-blobs_action-abstract.a |73.6%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/tx/columnshard/blobs_action/abstract/libcolumnshard-blobs_action-abstract.a |73.6%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/columnshard/engines/reader/simple_reader/duplicates/splitter.cpp |73.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/engines/reader/simple_reader/duplicates/splitter.cpp |73.6%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/http_proxy/auth_factory.cpp |73.6%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/test_tablet/load_actor_write.cpp |73.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/http_proxy/auth_factory.cpp |73.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/test_tablet/load_actor_write.cpp |73.6%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/columnshard/blobs_action/tier/gc.cpp |73.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/blobs_action/tier/gc.cpp |73.6%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/columnshard/engines/storage/optimizer/lcbuckets/planner/selector/transparent.cpp |73.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/engines/storage/optimizer/lcbuckets/planner/selector/transparent.cpp |73.6%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/columnshard/engines/portions/write_with_blobs.cpp |73.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/engines/portions/write_with_blobs.cpp |73.6%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/test_tablet/tx_initialize.cpp |73.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/test_tablet/tx_initialize.cpp |73.6%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/grpc_services/tablet/rpc_restart_tablet.cpp |73.6%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/columnshard/blobs_action/tier/storage.cpp |73.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/grpc_services/tablet/rpc_restart_tablet.cpp |73.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/blobs_action/tier/storage.cpp |73.6%| [AR] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/grpc_services/tablet/libcore-grpc_services-tablet.a |73.6%| [AR] {RESULT} $(B)/ydb/core/grpc_services/tablet/libcore-grpc_services-tablet.a |73.6%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/grpc_services/tablet/libcore-grpc_services-tablet.a |73.6%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tablet_flat/tablet_flat_executor.cpp |73.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tablet_flat/tablet_flat_executor.cpp |73.7%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/columnshard/export/session/task.cpp |73.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/export/session/task.cpp |73.7%| [AR] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/tx/columnshard/export/session/libcolumnshard-export-session.global.a |73.7%| [AR] {RESULT} $(B)/ydb/core/tx/columnshard/export/session/libcolumnshard-export-session.global.a |73.7%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/tx/columnshard/export/session/libcolumnshard-export-session.global.a |73.7%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/sys_view/processor/tx_top_partitions.cpp |73.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/sys_view/processor/tx_top_partitions.cpp |73.7%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/columnshard/engines/changes/cleanup_portions.cpp |73.7%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/columnshard/export/actor/export_actor.cpp |73.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/engines/changes/cleanup_portions.cpp |73.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/export/actor/export_actor.cpp |73.7%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/columnshard/engines/storage/optimizer/lcbuckets/planner/selector/empty.cpp |73.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/engines/storage/optimizer/lcbuckets/planner/selector/empty.cpp |73.7%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/columnshard/export/actor/write.cpp |73.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/export/actor/write.cpp |73.7%| [AR] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/tx/columnshard/export/actor/libcolumnshard-export-actor.a |73.7%| [AR] {RESULT} $(B)/ydb/core/tx/columnshard/export/actor/libcolumnshard-export-actor.a |73.7%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/tx/columnshard/export/actor/libcolumnshard-export-actor.a |73.7%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tablet_flat/flat_executor_compaction_logic.cpp |73.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tablet_flat/flat_executor_compaction_logic.cpp |73.7%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tablet_flat/flat_boot_misc.cpp |73.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tablet_flat/flat_boot_misc.cpp |73.7%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/datashard/check_snapshot_tx_unit.cpp |73.7%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/columnshard/engines/reader/common/result.cpp |73.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/datashard/check_snapshot_tx_unit.cpp |73.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/engines/reader/common/result.cpp |73.7%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/blobstorage/dsproxy/dsproxy_get.cpp |73.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/dsproxy/dsproxy_get.cpp |73.7%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/test_tablet/test_shard_context.cpp |73.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/test_tablet/test_shard_context.cpp |73.7%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tablet_flat/flat_load_blob_queue.cpp |73.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tablet_flat/flat_load_blob_queue.cpp |73.7%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tablet_flat/flat_bio_actor.cpp |73.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tablet_flat/flat_bio_actor.cpp |73.7%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/test_tablet/tx_init_scheme.cpp |73.7%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/sys_view/show_create/show_create.cpp |73.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/test_tablet/tx_init_scheme.cpp |73.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/sys_view/show_create/show_create.cpp |73.7%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/sys_view/processor/tx_interval_summary.cpp |73.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/sys_view/processor/tx_interval_summary.cpp |73.7%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/columnshard/engines/storage/indexes/categories_bloom/header.cpp |73.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/engines/storage/indexes/categories_bloom/header.cpp |73.7%| [AR] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/tx/columnshard/engines/storage/indexes/categories_bloom/libstorage-indexes-categories_bloom.a |73.7%| [AR] {RESULT} $(B)/ydb/core/tx/columnshard/engines/storage/indexes/categories_bloom/libstorage-indexes-categories_bloom.a |73.7%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/tx/columnshard/engines/storage/indexes/categories_bloom/libstorage-indexes-categories_bloom.a |73.7%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/columnshard/engines/reader/simple_reader/duplicates/merge.cpp |73.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/engines/reader/simple_reader/duplicates/merge.cpp |73.7%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tablet_flat/tablet_flat_executed.cpp |73.7%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/columnshard/engines/writer/buffer/actor2.cpp |73.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tablet_flat/tablet_flat_executed.cpp |73.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/engines/writer/buffer/actor2.cpp |73.7%| [AR] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/tx/columnshard/engines/writer/buffer/libengines-writer-buffer.a |73.8%| [AR] {RESULT} $(B)/ydb/core/tx/columnshard/engines/writer/buffer/libengines-writer-buffer.a |73.8%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/tx/columnshard/engines/writer/buffer/libengines-writer-buffer.a |73.8%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/test_tablet/load_actor_read_validate.cpp |73.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/test_tablet/load_actor_read_validate.cpp |73.8%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/ymq/actor/list_queues.cpp |73.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/ymq/actor/list_queues.cpp |73.8%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/test_tablet/tx_load_everything.cpp |73.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/test_tablet/tx_load_everything.cpp |73.8%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/sys_view/processor/tx_init_schema.cpp |73.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/sys_view/processor/tx_init_schema.cpp |73.8%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/kqp/counters/kqp_counters.cpp |73.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/counters/kqp_counters.cpp |73.8%| [AR] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/kqp/counters/libcore-kqp-counters.a |73.8%| [AR] {RESULT} $(B)/ydb/core/kqp/counters/libcore-kqp-counters.a |73.8%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/kqp/counters/libcore-kqp-counters.a |73.8%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/columnshard/engines/column_engine_logs.cpp |73.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/engines/column_engine_logs.cpp |73.8%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/columnshard/export/events/events.cpp |73.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/export/events/events.cpp |73.8%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/test_tablet/load_actor_delete.cpp |73.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/test_tablet/load_actor_delete.cpp |73.8%| [AR] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/tx/columnshard/export/events/libcolumnshard-export-events.a |73.8%| [AR] {RESULT} $(B)/ydb/core/tx/columnshard/export/events/libcolumnshard-export-events.a |73.8%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/tx/columnshard/export/events/libcolumnshard-export-events.a |73.8%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/columnshard/normalizer/abstract/abstract.cpp |73.8%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/kqp/opt/physical/kqp_opt_phy_limit.cpp |73.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/normalizer/abstract/abstract.cpp |73.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/opt/physical/kqp_opt_phy_limit.cpp |73.8%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/test_tablet/load_actor_impl.cpp |73.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/test_tablet/load_actor_impl.cpp |73.8%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/columnshard/engines/storage/optimizer/lcbuckets/planner/selector/abstract.cpp |73.8%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/fq/libs/actors/run_actor.cpp |73.8%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/datashard/datashard_active_transaction.cpp |73.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/datashard/datashard_active_transaction.cpp |73.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/engines/storage/optimizer/lcbuckets/planner/selector/abstract.cpp |73.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/fq/libs/actors/run_actor.cpp |73.8%| [AR] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/fq/libs/actors/libfq-libs-actors.a |73.8%| [AR] {RESULT} $(B)/ydb/core/fq/libs/actors/libfq-libs-actors.a |73.8%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/fq/libs/actors/libfq-libs-actors.a |73.8%| [AR] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/tx/columnshard/engines/storage/optimizer/lcbuckets/planner/selector/liblcbuckets-planner-selector.a |73.8%| [AR] {RESULT} $(B)/ydb/core/tx/columnshard/engines/storage/optimizer/lcbuckets/planner/selector/liblcbuckets-planner-selector.a |73.8%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/tx/columnshard/engines/storage/optimizer/lcbuckets/planner/selector/liblcbuckets-planner-selector.a |73.8%| [AR] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/test_tablet/libydb-core-test_tablet.a |73.8%| [AR] {RESULT} $(B)/ydb/core/test_tablet/libydb-core-test_tablet.a |73.8%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/test_tablet/libydb-core-test_tablet.a |73.8%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/columnshard/engines/predicate/range.cpp |73.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/engines/predicate/range.cpp |73.8%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/local_pgwire/local_pgwire_connection.cpp |73.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/local_pgwire/local_pgwire_connection.cpp |73.8%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tablet/node_whiteboard.cpp |73.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tablet/node_whiteboard.cpp |73.8%| [AR] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/tablet/libydb-core-tablet.a |73.9%| [AR] {RESULT} $(B)/ydb/core/tablet/libydb-core-tablet.a |73.9%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/tablet/libydb-core-tablet.a |73.9%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/columnshard/blobs_action/tier/adapter.cpp |73.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/blobs_action/tier/adapter.cpp |73.9%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/columnshard/engines/reader/simple_reader/duplicates/context.cpp |73.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/engines/reader/simple_reader/duplicates/context.cpp |73.9%| [AR] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/tx/columnshard/blobs_action/tier/libcolumnshard-blobs_action-tier.a |73.9%| [AR] {RESULT} $(B)/ydb/core/tx/columnshard/blobs_action/tier/libcolumnshard-blobs_action-tier.a |73.9%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/tx/columnshard/blobs_action/tier/libcolumnshard-blobs_action-tier.a |73.9%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/ymq/actor/delete_message.cpp |73.9%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/sys_view/processor/processor_impl.cpp |73.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/ymq/actor/delete_message.cpp |73.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/sys_view/processor/processor_impl.cpp |73.9%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/sys_view/resource_pools/resource_pools.cpp |73.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/sys_view/resource_pools/resource_pools.cpp |73.9%| [AR] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/sys_view/resource_pools/libcore-sys_view-resource_pools.a |73.9%| [AR] {RESULT} $(B)/ydb/core/sys_view/resource_pools/libcore-sys_view-resource_pools.a |73.9%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/sys_view/resource_pools/libcore-sys_view-resource_pools.a |73.9%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/sys_view/show_create/create_table_formatter.cpp |73.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/sys_view/show_create/create_table_formatter.cpp |73.9%| [AR] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/sys_view/show_create/libcore-sys_view-show_create.a |73.9%| [AR] {RESULT} $(B)/ydb/core/sys_view/show_create/libcore-sys_view-show_create.a |73.9%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/sys_view/processor/tx_collect.cpp |73.9%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/sys_view/show_create/libcore-sys_view-show_create.a |73.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/sys_view/processor/tx_collect.cpp |73.9%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/datashard/cdc_stream_heartbeat.cpp |73.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/datashard/cdc_stream_heartbeat.cpp |73.9%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/datashard/check_data_tx_unit.cpp |73.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/datashard/check_data_tx_unit.cpp |73.9%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/http_proxy/http_req.cpp |73.9%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/datashard/complete_data_tx_unit.cpp |73.9%| [AR] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/http_proxy/libydb-core-http_proxy.a |73.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/datashard/complete_data_tx_unit.cpp |73.9%| [AR] {RESULT} $(B)/ydb/core/http_proxy/libydb-core-http_proxy.a |73.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/http_proxy/http_req.cpp |73.9%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/columnshard/engines/reader/simple_reader/duplicates/events.cpp |73.9%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/http_proxy/libydb-core-http_proxy.a |73.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/engines/reader/simple_reader/duplicates/events.cpp |73.9%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/kqp/opt/rbo/kqp_convert_to_physical.cpp |73.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/opt/rbo/kqp_convert_to_physical.cpp |73.9%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/datashard/change_exchange_split.cpp |73.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/datashard/change_exchange_split.cpp |73.9%| [AR] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/kqp/opt/rbo/libkqp-opt-rbo.a |73.9%| [AR] {RESULT} $(B)/ydb/core/kqp/opt/rbo/libkqp-opt-rbo.a |73.9%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/kqp/opt/rbo/libkqp-opt-rbo.a |73.9%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/datashard/change_sender.cpp |73.9%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/columnshard/engines/predicate/container.cpp |73.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/datashard/change_sender.cpp |73.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/engines/predicate/container.cpp |73.9%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/sys_view/tablets/tablets.cpp |73.9%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/datashard/change_sender_async_index.cpp |73.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/sys_view/tablets/tablets.cpp |73.9%| [AR] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/sys_view/tablets/libcore-sys_view-tablets.a |74.0%| [AR] {RESULT} $(B)/ydb/core/sys_view/tablets/libcore-sys_view-tablets.a |74.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/datashard/change_sender_async_index.cpp |74.0%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/sys_view/tablets/libcore-sys_view-tablets.a |74.0%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tablet_flat/flat_executor_bootlogic.cpp |74.0%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/columnshard/engines/reader/simple_reader/iterator/sys_view/schemas/schema.cpp |74.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tablet_flat/flat_executor_bootlogic.cpp |74.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/engines/reader/simple_reader/iterator/sys_view/schemas/schema.cpp |74.0%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/blobstorage/nodewarden/distconf_scatter_gather.cpp |74.0%| [AR] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/tx/columnshard/engines/reader/simple_reader/iterator/sys_view/schemas/libiterator-sys_view-schemas.global.a |74.0%| [AR] {RESULT} $(B)/ydb/core/tx/columnshard/engines/reader/simple_reader/iterator/sys_view/schemas/libiterator-sys_view-schemas.global.a |74.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/nodewarden/distconf_scatter_gather.cpp |74.0%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/tx/columnshard/engines/reader/simple_reader/iterator/sys_view/schemas/libiterator-sys_view-schemas.global.a |74.0%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/statistics/database/database.cpp |74.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/statistics/database/database.cpp |74.0%| [AR] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/statistics/database/libcore-statistics-database.a |74.0%| [AR] {RESULT} $(B)/ydb/core/statistics/database/libcore-statistics-database.a |74.0%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/statistics/database/libcore-statistics-database.a |74.0%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/datashard/check_read_unit.cpp |74.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/datashard/check_read_unit.cpp |74.0%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/columnshard/engines/reader/simple_reader/iterator/sys_view/schemas/metadata.cpp |74.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/engines/reader/simple_reader/iterator/sys_view/schemas/metadata.cpp |74.0%| [AR] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/tx/columnshard/engines/reader/simple_reader/iterator/sys_view/schemas/libiterator-sys_view-schemas.a |74.0%| [AR] {RESULT} $(B)/ydb/core/tx/columnshard/engines/reader/simple_reader/iterator/sys_view/schemas/libiterator-sys_view-schemas.a |74.0%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/tx/columnshard/engines/reader/simple_reader/iterator/sys_view/schemas/libiterator-sys_view-schemas.a |74.0%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/sys_view/auth/owners.cpp |74.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/sys_view/auth/owners.cpp |74.0%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/sys_view/processor/tx_interval_metrics.cpp |74.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/sys_view/processor/tx_interval_metrics.cpp |74.0%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/fq/libs/control_plane_storage/ydb_control_plane_storage_quotas.cpp |74.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/fq/libs/control_plane_storage/ydb_control_plane_storage_quotas.cpp |74.0%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/columnshard/engines/reader/plain_reader/iterator/merge.cpp |74.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/engines/reader/plain_reader/iterator/merge.cpp |74.0%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/datashard/check_commit_writes_tx_unit.cpp |74.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/datashard/check_commit_writes_tx_unit.cpp |74.0%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/sys_view/processor/tx_init.cpp |74.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/sys_view/processor/tx_init.cpp |74.0%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/services/persqueue_v1/grpc_pq_schema.cpp |74.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/services/persqueue_v1/grpc_pq_schema.cpp |74.0%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/persqueue/public/list_topics/list_all_topics_actor.cpp |74.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/persqueue/public/list_topics/list_all_topics_actor.cpp |74.0%| [AR] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/persqueue/public/list_topics/libpersqueue-public-list_topics.a |74.0%| [AR] {RESULT} $(B)/ydb/core/persqueue/public/list_topics/libpersqueue-public-list_topics.a |74.0%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/persqueue/public/list_topics/libpersqueue-public-list_topics.a |74.0%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tablet_flat/flat_executor.cpp |74.0%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/columnshard/engines/reader/simple_reader/duplicates/common.cpp |74.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/engines/reader/simple_reader/duplicates/common.cpp |74.0%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/datashard/check_write_unit.cpp |74.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tablet_flat/flat_executor.cpp |74.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/datashard/check_write_unit.cpp |74.0%| [AR] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/tablet_flat/libydb-core-tablet_flat.a |74.0%| [AR] {RESULT} $(B)/ydb/core/tablet_flat/libydb-core-tablet_flat.a |74.1%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/tablet_flat/libydb-core-tablet_flat.a |74.1%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/fq/libs/control_plane_storage/ydb_control_plane_storage_compute_database.cpp |74.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/fq/libs/control_plane_storage/ydb_control_plane_storage_compute_database.cpp |74.1%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/datashard/change_sender_incr_restore.cpp |74.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/datashard/change_sender_incr_restore.cpp |74.1%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/sys_view/resource_pool_classifiers/resource_pool_classifiers.cpp |74.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/sys_view/resource_pool_classifiers/resource_pool_classifiers.cpp |74.1%| [AR] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/sys_view/resource_pool_classifiers/libcore-sys_view-resource_pool_classifiers.a |74.1%| [AR] {RESULT} $(B)/ydb/core/sys_view/resource_pool_classifiers/libcore-sys_view-resource_pool_classifiers.a |74.1%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/sys_view/resource_pool_classifiers/libcore-sys_view-resource_pool_classifiers.a |74.1%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/kqp/opt/physical/kqp_opt_phy_helpers.cpp |74.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/opt/physical/kqp_opt_phy_helpers.cpp |74.1%| [AR] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/kqp/opt/physical/libkqp-opt-physical.a |74.1%| [AR] {RESULT} $(B)/ydb/core/kqp/opt/physical/libkqp-opt-physical.a |74.1%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/kqp/opt/physical/libkqp-opt-physical.a |74.1%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/sys_view/processor/db_counters.cpp |74.1%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/datashard/check_scheme_tx_unit.cpp |74.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/sys_view/processor/db_counters.cpp |74.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/datashard/check_scheme_tx_unit.cpp |74.1%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/services/persqueue_v1/services_initializer.cpp |74.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/services/persqueue_v1/services_initializer.cpp |74.1%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/fq/libs/control_plane_storage/ydb_control_plane_storage_folder.cpp |74.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/fq/libs/control_plane_storage/ydb_control_plane_storage_folder.cpp |74.1%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/datashard/build_write_out_rs_unit.cpp |74.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/datashard/build_write_out_rs_unit.cpp |74.1%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/persqueue/writer/writer.cpp |74.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/persqueue/writer/writer.cpp |74.1%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/sys_view/query_stats/query_stats.cpp |74.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/sys_view/query_stats/query_stats.cpp |74.1%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/datashard/change_collector_cdc_stream.cpp |74.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/datashard/change_collector_cdc_stream.cpp |74.1%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/sys_view/processor/tx_configure.cpp |74.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/sys_view/processor/tx_configure.cpp |74.1%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/datashard/change_sender_table_base.cpp |74.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/datashard/change_sender_table_base.cpp |74.1%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/datashard/change_collector_base.cpp |74.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/datashard/change_collector_base.cpp |74.1%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/fq/libs/control_plane_storage/validators.cpp |74.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/fq/libs/control_plane_storage/validators.cpp |74.1%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/services/persqueue_v1/actors/write_session_actor.cpp |74.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/services/persqueue_v1/actors/write_session_actor.cpp |74.1%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/fq/libs/control_plane_storage/ydb_control_plane_storage_bindings.cpp |74.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/fq/libs/control_plane_storage/ydb_control_plane_storage_bindings.cpp |74.1%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/sys_view/auth/group_members.cpp |74.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/sys_view/auth/group_members.cpp |74.1%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/datashard/change_collector_async_index.cpp |74.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/datashard/change_collector_async_index.cpp |74.1%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/services/persqueue_v1/topic.cpp |74.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/services/persqueue_v1/topic.cpp |74.1%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/kqp/workload_service/tables/table_queries.cpp |74.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/workload_service/tables/table_queries.cpp |74.1%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/kqp/provider/yql_kikimr_opt_build.cpp |74.1%| [AR] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/kqp/workload_service/tables/libkqp-workload_service-tables.a |74.1%| [AR] {RESULT} $(B)/ydb/core/kqp/workload_service/tables/libkqp-workload_service-tables.a |74.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/provider/yql_kikimr_opt_build.cpp |74.2%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/kqp/workload_service/tables/libkqp-workload_service-tables.a |74.2%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/services/persqueue_v1/persqueue.cpp |74.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/services/persqueue_v1/persqueue.cpp |74.2%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/testlib/tenant_runtime.cpp |74.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/testlib/tenant_runtime.cpp |74.2%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/columnshard/engines/reader/common/conveyor_task.cpp |74.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/engines/reader/common/conveyor_task.cpp |74.2%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/columnshard/engines/changes/merge_subset.cpp |74.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/engines/changes/merge_subset.cpp |74.2%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/columnshard/test_helper/shard_writer.cpp |74.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/test_helper/shard_writer.cpp |74.2%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/columnshard/engines/reader/simple_reader/iterator/sys_view/chunks/schema.cpp |74.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/engines/reader/simple_reader/iterator/sys_view/chunks/schema.cpp |74.2%| [AR] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/tx/columnshard/engines/changes/libcolumnshard-engines-changes.a |74.2%| [AR] {RESULT} $(B)/ydb/core/tx/columnshard/engines/changes/libcolumnshard-engines-changes.a |74.2%| [AR] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/tx/columnshard/engines/reader/simple_reader/iterator/sys_view/chunks/libiterator-sys_view-chunks.global.a |74.2%| [AR] {RESULT} $(B)/ydb/core/tx/columnshard/engines/reader/simple_reader/iterator/sys_view/chunks/libiterator-sys_view-chunks.global.a |74.2%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/tx/columnshard/engines/changes/libcolumnshard-engines-changes.a |74.2%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/tx/columnshard/engines/reader/simple_reader/iterator/sys_view/chunks/libiterator-sys_view-chunks.global.a |74.2%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/services/persqueue_v1/actors/commit_offset_actor.cpp |74.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/services/persqueue_v1/actors/commit_offset_actor.cpp |74.2%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/datashard/drop_persistent_snapshot_unit.cpp |74.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/datashard/drop_persistent_snapshot_unit.cpp |74.2%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/persqueue/writer/source_id_encoding.cpp |74.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/persqueue/writer/source_id_encoding.cpp |74.2%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/datashard/direct_tx_unit.cpp |74.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/datashard/direct_tx_unit.cpp |74.2%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/datashard/store_data_tx_unit.cpp |74.2%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/columnshard/engines/reader/simple_reader/duplicates/manager.cpp |74.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/datashard/store_data_tx_unit.cpp |74.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/engines/reader/simple_reader/duplicates/manager.cpp |74.2%| [AR] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/tx/columnshard/engines/reader/simple_reader/duplicates/libreader-simple_reader-duplicates.a |74.2%| [AR] {RESULT} $(B)/ydb/core/tx/columnshard/engines/reader/simple_reader/duplicates/libreader-simple_reader-duplicates.a |74.2%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/tx/columnshard/engines/reader/simple_reader/duplicates/libreader-simple_reader-duplicates.a |74.2%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/columnshard/engines/reader/plain_reader/iterator/constructors.cpp |74.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/engines/reader/plain_reader/iterator/constructors.cpp |74.2%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/columnshard/engines/storage/optimizer/lcbuckets/constructor/selector/empty.cpp |74.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/engines/storage/optimizer/lcbuckets/constructor/selector/empty.cpp |74.2%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/columnshard/engines/reader/simple_reader/iterator/sys_view/chunks/source.cpp |74.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/engines/reader/simple_reader/iterator/sys_view/chunks/source.cpp |74.2%| [AR] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/tx/columnshard/engines/reader/simple_reader/iterator/sys_view/chunks/libiterator-sys_view-chunks.a |74.2%| [AR] {RESULT} $(B)/ydb/core/tx/columnshard/engines/reader/simple_reader/iterator/sys_view/chunks/libiterator-sys_view-chunks.a |74.2%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/tx/columnshard/engines/reader/simple_reader/iterator/sys_view/chunks/libiterator-sys_view-chunks.a |74.2%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/fq/libs/mock/yql_mock.cpp |74.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/fq/libs/mock/yql_mock.cpp |74.2%| [AR] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/fq/libs/mock/libfq-libs-mock.a |74.2%| [AR] {RESULT} $(B)/ydb/core/fq/libs/mock/libfq-libs-mock.a |74.2%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/fq/libs/mock/libfq-libs-mock.a |74.2%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/datashard/change_collector.cpp |74.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/datashard/change_collector.cpp |74.2%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/datashard/drop_table_unit.cpp |74.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/datashard/drop_table_unit.cpp |74.3%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/datashard/datashard_subdomain_path_id.cpp |74.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/datashard/datashard_subdomain_path_id.cpp |74.3%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/columnshard/engines/reader/plain_reader/iterator/iterator.cpp |74.3%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/columnshard/engines/changes/compaction/sparsed/logic.cpp |74.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/engines/reader/plain_reader/iterator/iterator.cpp |74.3%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/kqp/opt/kqp_opt_phy_finalize.cpp |74.3%| [AR] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/tx/columnshard/engines/changes/compaction/sparsed/libchanges-compaction-sparsed.global.a |74.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/opt/kqp_opt_phy_finalize.cpp |74.3%| [AR] {RESULT} $(B)/ydb/core/tx/columnshard/engines/changes/compaction/sparsed/libchanges-compaction-sparsed.global.a |74.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/engines/changes/compaction/sparsed/logic.cpp |74.3%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/tx/columnshard/engines/changes/compaction/sparsed/libchanges-compaction-sparsed.global.a |74.3%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/sys_view/query_stats/query_metrics.cpp |74.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/sys_view/query_stats/query_metrics.cpp |74.3%| [AR] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/sys_view/query_stats/libcore-sys_view-query_stats.a |74.3%| [AR] {RESULT} $(B)/ydb/core/sys_view/query_stats/libcore-sys_view-query_stats.a |74.3%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/sys_view/query_stats/libcore-sys_view-query_stats.a |74.3%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/persqueue/writer/partition_chooser_impl.cpp |74.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/persqueue/writer/partition_chooser_impl.cpp |74.3%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/services/persqueue_v1/grpc_pq_read.cpp |74.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/services/persqueue_v1/grpc_pq_read.cpp |74.3%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/blobstorage/nodewarden/distconf_invoke_storage_config.cpp |74.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/nodewarden/distconf_invoke_storage_config.cpp |74.3%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/mind/bscontroller/disk_metrics.cpp |74.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/mind/bscontroller/disk_metrics.cpp |74.3%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/columnshard/test_helper/shard_reader.cpp |74.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/test_helper/shard_reader.cpp |74.3%| [AR] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/tx/columnshard/test_helper/libtx-columnshard-test_helper.a |74.3%| [AR] {RESULT} $(B)/ydb/core/tx/columnshard/test_helper/libtx-columnshard-test_helper.a |74.3%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/tx/columnshard/test_helper/libtx-columnshard-test_helper.a |74.3%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/fq/libs/control_plane_storage/in_memory_control_plane_storage.cpp |74.3%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/services/persqueue_v1/grpc_pq_write.cpp |74.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/fq/libs/control_plane_storage/in_memory_control_plane_storage.cpp |74.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/services/persqueue_v1/grpc_pq_write.cpp |74.3%| [AR] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/services/persqueue_v1/libydb-services-persqueue_v1.a |74.3%| [AR] {RESULT} $(B)/ydb/services/persqueue_v1/libydb-services-persqueue_v1.a |74.3%| [AR] {BAZEL_UPLOAD} $(B)/ydb/services/persqueue_v1/libydb-services-persqueue_v1.a |74.3%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/services/persqueue_v1/actors/direct_read_actor.cpp |74.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/services/persqueue_v1/actors/direct_read_actor.cpp |74.3%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/fq/libs/control_plane_storage/ydb_control_plane_storage.cpp |74.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/fq/libs/control_plane_storage/ydb_control_plane_storage.cpp |74.3%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/datashard/datashard_trans_queue.cpp |74.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/datashard/datashard_trans_queue.cpp |74.3%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/sys_view/processor/processor.cpp |74.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/sys_view/processor/processor.cpp |74.3%| [AR] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/sys_view/processor/libcore-sys_view-processor.a |74.3%| [AR] {RESULT} $(B)/ydb/core/sys_view/processor/libcore-sys_view-processor.a |74.3%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/sys_view/processor/libcore-sys_view-processor.a |74.3%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/columnshard/engines/changes/compaction/sub_columns/logic.cpp |74.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/engines/changes/compaction/sub_columns/logic.cpp |74.3%| [AR] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/tx/columnshard/engines/changes/compaction/sub_columns/libchanges-compaction-sub_columns.global.a |74.3%| [AR] {RESULT} $(B)/ydb/core/tx/columnshard/engines/changes/compaction/sub_columns/libchanges-compaction-sub_columns.global.a |74.4%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/tx/columnshard/engines/changes/compaction/sub_columns/libchanges-compaction-sub_columns.global.a |74.3%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/datashard/drop_cdc_stream_unit.cpp |74.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/datashard/drop_cdc_stream_unit.cpp |74.4%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/sys_view/auth/users.cpp |74.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/sys_view/auth/users.cpp |74.4%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/kqp/gateway/local_rpc/helper.cpp |74.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/gateway/local_rpc/helper.cpp |74.4%| [AR] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/kqp/gateway/local_rpc/libkqp-gateway-local_rpc.a |74.4%| [AR] {RESULT} $(B)/ydb/core/kqp/gateway/local_rpc/libkqp-gateway-local_rpc.a |74.4%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/kqp/gateway/local_rpc/libkqp-gateway-local_rpc.a |74.4%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/datashard/prepare_scheme_tx_in_rs_unit.cpp |74.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/datashard/prepare_scheme_tx_in_rs_unit.cpp |74.4%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/columnshard/engines/scheme/indexes/abstract/fetcher.cpp |74.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/engines/scheme/indexes/abstract/fetcher.cpp |74.4%| [AR] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/tx/columnshard/engines/scheme/indexes/abstract/libscheme-indexes-abstract.a |74.4%| [AR] {RESULT} $(B)/ydb/core/tx/columnshard/engines/scheme/indexes/abstract/libscheme-indexes-abstract.a |74.4%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/tx/columnshard/engines/scheme/indexes/abstract/libscheme-indexes-abstract.a |74.4%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/sys_view/auth/groups.cpp |74.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/sys_view/auth/groups.cpp |74.4%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/datashard/change_sender_cdc_stream.cpp |74.4%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/datashard/store_and_send_out_rs_unit.cpp |74.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/datashard/change_sender_cdc_stream.cpp |74.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/datashard/store_and_send_out_rs_unit.cpp |74.4%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/testlib/tx_helpers.cpp |74.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/testlib/tx_helpers.cpp |74.4%| [AR] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/testlib/libydb-core-testlib.a |74.4%| [AR] {RESULT} $(B)/ydb/core/testlib/libydb-core-testlib.a |74.4%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/testlib/libydb-core-testlib.a |74.4%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/services/persqueue_v1/actors/read_info_actor.cpp |74.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/services/persqueue_v1/actors/read_info_actor.cpp |74.4%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/blobstorage/nodewarden/distconf_validate.cpp |74.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/nodewarden/distconf_validate.cpp |74.4%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/services/persqueue_v1/actors/read_init_auth_actor.cpp |74.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/services/persqueue_v1/actors/read_init_auth_actor.cpp |74.4%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/tests/tools/kqprun/runlib/kikimr_setup.cpp |74.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/tests/tools/kqprun/runlib/kikimr_setup.cpp |74.4%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/services/persqueue_v1/actors/distributed_commit_helper.cpp |74.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/services/persqueue_v1/actors/distributed_commit_helper.cpp |74.4%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/persqueue/writer/metadata_initializers.cpp |74.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/persqueue/writer/metadata_initializers.cpp |74.4%| [AR] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/persqueue/writer/libcore-persqueue-writer.a |74.4%| [AR] {RESULT} $(B)/ydb/core/persqueue/writer/libcore-persqueue-writer.a |74.4%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/persqueue/writer/libcore-persqueue-writer.a |74.4%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/sys_view/auth/permissions.cpp |74.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/sys_view/auth/permissions.cpp |74.4%| [AR] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/sys_view/auth/libcore-sys_view-auth.a |74.4%| [AR] {RESULT} $(B)/ydb/core/sys_view/auth/libcore-sys_view-auth.a |74.4%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/sys_view/auth/libcore-sys_view-auth.a |74.4%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/services/persqueue_v1/actors/partition_actor.cpp |74.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/services/persqueue_v1/actors/partition_actor.cpp |74.4%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/services/metadata/secret/fetcher.cpp |74.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/services/metadata/secret/fetcher.cpp |74.4%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/fq/libs/control_plane_storage/ydb_control_plane_storage_queries.cpp |74.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/fq/libs/control_plane_storage/ydb_control_plane_storage_queries.cpp |74.4%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/mind/address_classification/net_classifier.cpp |74.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/mind/address_classification/net_classifier.cpp |74.5%| [AR] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/mind/address_classification/libcore-mind-address_classification.a |74.5%| [AR] {RESULT} $(B)/ydb/core/mind/address_classification/libcore-mind-address_classification.a |74.5%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/mind/address_classification/libcore-mind-address_classification.a |74.5%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/statistics/service/service_impl.cpp |74.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/statistics/service/service_impl.cpp |74.5%| [AR] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/statistics/service/libcore-statistics-service.a |74.5%| [AR] {RESULT} $(B)/ydb/core/statistics/service/libcore-statistics-service.a |74.5%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/fq/libs/control_plane_storage/ydb_control_plane_storage_connections.cpp |74.5%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/statistics/service/libcore-statistics-service.a |74.5%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/mind/bscontroller/shred.cpp |74.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/mind/bscontroller/shred.cpp |74.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/fq/libs/control_plane_storage/ydb_control_plane_storage_connections.cpp |74.5%| [AR] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/fq/libs/control_plane_storage/libfq-libs-control_plane_storage.a |74.5%| [AR] {RESULT} $(B)/ydb/core/fq/libs/control_plane_storage/libfq-libs-control_plane_storage.a |74.5%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/tests/tools/kqprun/runlib/utils.cpp |74.5%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/persqueue/public/fetcher/fetch_request_actor.cpp |74.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/tests/tools/kqprun/runlib/utils.cpp |74.5%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/mind/bscontroller/stat_processor.cpp |74.5%| [AR] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/persqueue/public/fetcher/libpersqueue-public-fetcher.a |74.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/mind/bscontroller/stat_processor.cpp |74.5%| [AR] {RESULT} $(B)/ydb/core/persqueue/public/fetcher/libpersqueue-public-fetcher.a |74.5%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/persqueue/public/fetcher/libpersqueue-public-fetcher.a |74.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/persqueue/public/fetcher/fetch_request_actor.cpp |74.5%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/mind/bscontroller/scrub.cpp |74.5%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/fq/libs/control_plane_storage/libfq-libs-control_plane_storage.a |74.5%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/datashard/cdc_stream_scan.cpp |74.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/mind/bscontroller/scrub.cpp |74.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/datashard/cdc_stream_scan.cpp |74.5%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/datashard/receive_snapshot_unit.cpp |74.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/datashard/receive_snapshot_unit.cpp |74.5%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/fq/libs/control_plane_storage/internal/nodes_health_check.cpp |74.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/fq/libs/control_plane_storage/internal/nodes_health_check.cpp |74.5%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/mind/bscontroller/select_groups.cpp |74.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/mind/bscontroller/select_groups.cpp |74.5%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/mind/bscontroller/group_layout_checker.cpp |74.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/mind/bscontroller/group_layout_checker.cpp |74.5%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/mind/bscontroller/group_metrics_exchange.cpp |74.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/mind/bscontroller/group_metrics_exchange.cpp |74.5%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/mind/bscontroller/cluster_balancing.cpp |74.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/mind/bscontroller/cluster_balancing.cpp |74.5%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/sys_view/nodes/nodes.cpp |74.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/sys_view/nodes/nodes.cpp |74.5%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/fq/libs/control_plane_storage/internal/rate_limiter_resources.cpp |74.5%| [AR] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/sys_view/nodes/libcore-sys_view-nodes.a |74.5%| [AR] {RESULT} $(B)/ydb/core/sys_view/nodes/libcore-sys_view-nodes.a |74.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/fq/libs/control_plane_storage/internal/rate_limiter_resources.cpp |74.5%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/sys_view/nodes/libcore-sys_view-nodes.a |74.5%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/services/persqueue_v1/actors/update_offsets_in_transaction_actor.cpp |74.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/services/persqueue_v1/actors/update_offsets_in_transaction_actor.cpp |74.5%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/services/persqueue_v1/actors/read_session_actor.cpp |74.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/services/persqueue_v1/actors/read_session_actor.cpp |74.5%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/services/metadata/secret/access.cpp |74.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/services/metadata/secret/access.cpp |74.6%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/mind/bscontroller/grouper.cpp |74.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/mind/bscontroller/grouper.cpp |74.6%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/mind/bscontroller/drop_donor.cpp |74.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/mind/bscontroller/drop_donor.cpp |74.6%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/fq/libs/control_plane_storage/internal/task_get.cpp |74.6%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/tests/tools/kqprun/runlib/application.cpp |74.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/fq/libs/control_plane_storage/internal/task_get.cpp |74.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/tests/tools/kqprun/runlib/application.cpp |74.6%| [AR] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/tests/tools/kqprun/runlib/libtools-kqprun-runlib.a |74.6%| [AR] {RESULT} $(B)/ydb/tests/tools/kqprun/runlib/libtools-kqprun-runlib.a |74.6%| [AR] {BAZEL_UPLOAD} $(B)/ydb/tests/tools/kqprun/runlib/libtools-kqprun-runlib.a |74.6%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/services/persqueue_v1/actors/schema_actors.cpp |74.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/services/persqueue_v1/actors/schema_actors.cpp |74.6%| [AR] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/services/persqueue_v1/actors/libservices-persqueue_v1-actors.a |74.6%| [AR] {RESULT} $(B)/ydb/services/persqueue_v1/actors/libservices-persqueue_v1-actors.a |74.6%| [AR] {BAZEL_UPLOAD} $(B)/ydb/services/persqueue_v1/actors/libservices-persqueue_v1-actors.a |74.6%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/mind/bscontroller/get_group.cpp |74.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/mind/bscontroller/get_group.cpp |74.6%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/mind/bscontroller/propose_group_key.cpp |74.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/mind/bscontroller/propose_group_key.cpp |74.6%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/mind/bscontroller/config_fit_groups.cpp |74.6%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/services/metadata/secret/initializer.cpp |74.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/mind/bscontroller/config_fit_groups.cpp |74.6%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/mind/bscontroller/update_last_seen_ready.cpp |74.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/services/metadata/secret/initializer.cpp |74.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/mind/bscontroller/update_last_seen_ready.cpp |74.6%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/mind/bscontroller/config_cmd.cpp |74.6%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/mind/bscontroller/node_report.cpp |74.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/mind/bscontroller/config_cmd.cpp |74.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/mind/bscontroller/node_report.cpp |74.6%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/fq/libs/control_plane_storage/internal/task_result_write.cpp |74.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/fq/libs/control_plane_storage/internal/task_result_write.cpp |74.6%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/services/metadata/secret/checker_secret.cpp |74.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/services/metadata/secret/checker_secret.cpp |74.6%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/services/metadata/secret/snapshot.cpp |74.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/services/metadata/secret/snapshot.cpp |74.6%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/mind/bscontroller/cmds_bridge.cpp |74.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/mind/bscontroller/cmds_bridge.cpp |74.6%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/services/metadata/manager/generic_manager.cpp |74.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/services/metadata/manager/generic_manager.cpp |74.6%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/services/metadata/secret/secret_behaviour.cpp |74.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/services/metadata/secret/secret_behaviour.cpp |74.6%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/mind/bscontroller/migrate.cpp |74.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/mind/bscontroller/migrate.cpp |74.6%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/kqp/provider/yql_kikimr_exec.cpp |74.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/provider/yql_kikimr_exec.cpp |74.6%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/mind/bscontroller/sys_view.cpp |74.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/mind/bscontroller/sys_view.cpp |74.6%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/services/metadata/secret/secret.cpp |74.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/services/metadata/secret/secret.cpp |74.6%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/mind/bscontroller/layout_helpers.cpp |74.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/mind/bscontroller/layout_helpers.cpp |74.6%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/mind/bscontroller/cmds_drive_status.cpp |74.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/mind/bscontroller/cmds_drive_status.cpp |74.7%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/mind/bscontroller/monitoring.cpp |74.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/mind/bscontroller/monitoring.cpp |74.7%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/mind/bscontroller/init_scheme.cpp |74.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/mind/bscontroller/init_scheme.cpp |74.7%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/mind/bscontroller/config_fit_pdisks.cpp |74.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/mind/bscontroller/config_fit_pdisks.cpp |74.7%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/services/metadata/abstract/initialization.cpp |74.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/services/metadata/abstract/initialization.cpp |74.7%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/fq/libs/control_plane_storage/internal/task_ping.cpp |74.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/fq/libs/control_plane_storage/internal/task_ping.cpp |74.7%| [AR] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/fq/libs/control_plane_storage/internal/liblibs-control_plane_storage-internal.a |74.7%| [AR] {RESULT} $(B)/ydb/core/fq/libs/control_plane_storage/internal/liblibs-control_plane_storage-internal.a |74.7%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/fq/libs/control_plane_storage/internal/liblibs-control_plane_storage-internal.a |74.7%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/mind/bscontroller/update_group_latencies.cpp |74.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/mind/bscontroller/update_group_latencies.cpp |74.7%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/mind/bscontroller/storage_stats_calculator.cpp |74.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/mind/bscontroller/storage_stats_calculator.cpp |74.7%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/mind/bscontroller/commit_config.cpp |74.7%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/mind/bscontroller/group_mapper.cpp |74.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/mind/bscontroller/commit_config.cpp |74.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/mind/bscontroller/group_mapper.cpp |74.7%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/services/ext_index/common/config.cpp |74.7%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/services/metadata/manager/modification.cpp |74.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/services/metadata/manager/modification.cpp |74.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/services/ext_index/common/config.cpp |74.7%| [AR] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/services/ext_index/common/libservices-ext_index-common.a |74.7%| [AR] {RESULT} $(B)/ydb/services/ext_index/common/libservices-ext_index-common.a |74.7%| [AR] {BAZEL_UPLOAD} $(B)/ydb/services/ext_index/common/libservices-ext_index-common.a |74.7%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/mind/bscontroller/self_heal.cpp |74.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/mind/bscontroller/self_heal.cpp |74.7%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/services/metadata/secret/accessor/secret_id.cpp |74.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/services/metadata/secret/accessor/secret_id.cpp |74.7%| [AR] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/services/metadata/secret/accessor/libmetadata-secret-accessor.a |74.7%| [AR] {RESULT} $(B)/ydb/services/metadata/secret/accessor/libmetadata-secret-accessor.a |74.7%| [AR] {BAZEL_UPLOAD} $(B)/ydb/services/metadata/secret/accessor/libmetadata-secret-accessor.a |74.7%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/services/metadata/manager/alter_impl.cpp |74.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/services/metadata/manager/alter_impl.cpp |74.7%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/columnshard/engines/scheme/column_features.cpp |74.7%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/kqp/workload_service/actors/cpu_load_actors.cpp |74.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/engines/scheme/column_features.cpp |74.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/workload_service/actors/cpu_load_actors.cpp |74.7%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/services/metadata/secret/manager.cpp |74.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/services/metadata/secret/manager.cpp |74.7%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/services/metadata/manager/restore.cpp |74.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/services/metadata/manager/restore.cpp |74.7%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/services/metadata/secret/access_behaviour.cpp |74.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/services/metadata/secret/access_behaviour.cpp |74.7%| [AR] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/services/metadata/secret/libservices-metadata-secret.global.a |74.7%| [AR] {RESULT} $(B)/ydb/services/metadata/secret/libservices-metadata-secret.global.a |74.7%| [AR] {BAZEL_UPLOAD} $(B)/ydb/services/metadata/secret/libservices-metadata-secret.global.a |74.7%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/mind/bscontroller/load_everything.cpp |74.7%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/services/metadata/secret/checker_access.cpp |74.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/mind/bscontroller/load_everything.cpp |74.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/services/metadata/secret/checker_access.cpp |74.8%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/services/metadata/abstract/fetcher.cpp |74.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/services/metadata/abstract/fetcher.cpp |74.8%| [AR] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/services/metadata/secret/libservices-metadata-secret.a |74.8%| [AR] {RESULT} $(B)/ydb/services/metadata/secret/libservices-metadata-secret.a |74.8%| [AR] {BAZEL_UPLOAD} $(B)/ydb/services/metadata/secret/libservices-metadata-secret.a |74.8%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/kqp/gateway/behaviour/tablestore/operations/alter_sharding.cpp |74.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/gateway/behaviour/tablestore/operations/alter_sharding.cpp |74.8%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/keyvalue/keyvalue_state_collect.cpp |74.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/keyvalue/keyvalue_state_collect.cpp |74.8%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/mind/bscontroller/cmds_box.cpp |74.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/mind/bscontroller/cmds_box.cpp |74.8%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/services/metadata/manager/common.cpp |74.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/services/metadata/manager/common.cpp |74.8%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/services/metadata/manager/object.cpp |74.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/services/metadata/manager/object.cpp |74.8%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/services/metadata/manager/abstract.cpp |74.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/services/metadata/manager/abstract.cpp |74.8%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/services/metadata/manager/alter.cpp |74.8%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/kqp/workload_service/actors/scheme_actors.cpp |74.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/services/metadata/manager/alter.cpp |74.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/workload_service/actors/scheme_actors.cpp |74.8%| [AR] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/kqp/workload_service/actors/libkqp-workload_service-actors.a |74.8%| [AR] {RESULT} $(B)/ydb/core/kqp/workload_service/actors/libkqp-workload_service-actors.a |74.8%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/kqp/workload_service/actors/libkqp-workload_service-actors.a |74.8%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/services/metadata/initializer/object.cpp |74.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/services/metadata/initializer/object.cpp |74.8%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/mind/bscontroller/bsc.cpp |74.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/mind/bscontroller/bsc.cpp |74.8%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/keyvalue/keyvalue_intermediate.cpp |74.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/keyvalue/keyvalue_intermediate.cpp |74.8%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/services/metadata/abstract/kqp_common.cpp |74.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/services/metadata/abstract/kqp_common.cpp |74.8%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/mind/bscontroller/bridge.cpp |74.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/mind/bscontroller/bridge.cpp |74.8%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/mind/bscontroller/cmds_host_config.cpp |74.8%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/mind/bscontroller/cmds_storage_pool.cpp |74.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/mind/bscontroller/cmds_host_config.cpp |74.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/mind/bscontroller/cmds_storage_pool.cpp |74.8%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/kqp/gateway/behaviour/streaming_query/behaviour.cpp |74.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/gateway/behaviour/streaming_query/behaviour.cpp |74.8%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/kqp/gateway/behaviour/tablestore/operations/drop_column.cpp |74.8%| [AR] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/kqp/gateway/behaviour/streaming_query/libgateway-behaviour-streaming_query.global.a |74.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/gateway/behaviour/tablestore/operations/drop_column.cpp |74.8%| [AR] {RESULT} $(B)/ydb/core/kqp/gateway/behaviour/streaming_query/libgateway-behaviour-streaming_query.global.a |74.8%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/kqp/gateway/behaviour/streaming_query/libgateway-behaviour-streaming_query.global.a |74.8%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/mind/bscontroller/request_controller_info.cpp |74.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/mind/bscontroller/request_controller_info.cpp |74.8%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/services/metadata/initializer/common.cpp |74.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/services/metadata/initializer/common.cpp |74.8%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/datashard/complete_write_unit.cpp |74.8%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/kqp/gateway/behaviour/table/behaviour.cpp |74.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/datashard/complete_write_unit.cpp |74.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/gateway/behaviour/table/behaviour.cpp |74.9%| [AR] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/kqp/gateway/behaviour/table/libgateway-behaviour-table.global.a |74.8%| [AR] {RESULT} $(B)/ydb/core/kqp/gateway/behaviour/table/libgateway-behaviour-table.global.a |74.9%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/kqp/gateway/behaviour/table/libgateway-behaviour-table.global.a |74.9%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/kqp/gateway/behaviour/tablestore/operations/upsert_opt.cpp |74.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/gateway/behaviour/tablestore/operations/upsert_opt.cpp |74.9%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/kqp/gateway/behaviour/resource_pool_classifier/snapshot.cpp |74.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/gateway/behaviour/resource_pool_classifier/snapshot.cpp |74.9%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/blobstorage/vdisk/balance/deleter.cpp |74.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/vdisk/balance/deleter.cpp |74.9%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/keyvalue/keyvalue_collector.cpp |74.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/keyvalue/keyvalue_collector.cpp |74.9%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/fq/libs/control_plane_proxy/actors/query_utils.cpp |74.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/fq/libs/control_plane_proxy/actors/query_utils.cpp |74.9%| [AR] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/fq/libs/control_plane_proxy/actors/liblibs-control_plane_proxy-actors.a |74.9%| [AR] {RESULT} $(B)/ydb/core/fq/libs/control_plane_proxy/actors/liblibs-control_plane_proxy-actors.a |74.9%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/kqp/gateway/behaviour/tablestore/operations/drop_index.cpp |74.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/gateway/behaviour/tablestore/operations/drop_index.cpp |74.9%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/fq/libs/control_plane_proxy/actors/liblibs-control_plane_proxy-actors.a |74.9%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/mind/bscontroller/console_interaction.cpp |74.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/mind/bscontroller/console_interaction.cpp |74.9%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/kqp/common/control.cpp |74.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/common/control.cpp |74.9%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/fq/libs/control_plane_proxy/control_plane_proxy.cpp |74.9%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/kqp/gateway/behaviour/tablestore/operations/add_column.cpp |74.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/gateway/behaviour/tablestore/operations/add_column.cpp |74.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/fq/libs/control_plane_proxy/control_plane_proxy.cpp |74.9%| [AR] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/fq/libs/control_plane_proxy/libfq-libs-control_plane_proxy.a |74.9%| [AR] {RESULT} $(B)/ydb/core/fq/libs/control_plane_proxy/libfq-libs-control_plane_proxy.a |74.9%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/fq/libs/control_plane_proxy/libfq-libs-control_plane_proxy.a |74.9%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/kqp/gateway/behaviour/view/behaviour.cpp |74.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/gateway/behaviour/view/behaviour.cpp |74.9%| [AR] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/kqp/gateway/behaviour/view/libgateway-behaviour-view.global.a |74.9%| [AR] {RESULT} $(B)/ydb/core/kqp/gateway/behaviour/view/libgateway-behaviour-view.global.a |74.9%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/kqp/gateway/behaviour/view/libgateway-behaviour-view.global.a |74.9%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/blobstorage/vdisk/balance/balancing_actor.cpp |74.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/vdisk/balance/balancing_actor.cpp |74.9%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/mind/bscontroller/virtual_group.cpp |74.9%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/kqp/gateway/behaviour/resource_pool_classifier/checker.cpp |74.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/gateway/behaviour/resource_pool_classifier/checker.cpp |74.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/mind/bscontroller/virtual_group.cpp |74.9%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/tests/fq/pq_async_io/ut_helpers.cpp |74.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/tests/fq/pq_async_io/ut_helpers.cpp |74.9%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/services/metadata/initializer/initializer.cpp |74.9%| [AR] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/tests/fq/pq_async_io/libtests-fq-pq_async_io.a |74.9%| [AR] {RESULT} $(B)/ydb/tests/fq/pq_async_io/libtests-fq-pq_async_io.a |74.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/services/metadata/initializer/initializer.cpp |74.9%| [AR] {BAZEL_UPLOAD} $(B)/ydb/tests/fq/pq_async_io/libtests-fq-pq_async_io.a |74.9%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/keyvalue/keyvalue_storage_request.cpp |74.9%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/blobstorage/vdisk/balance/utils.cpp |74.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/keyvalue/keyvalue_storage_request.cpp |74.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/vdisk/balance/utils.cpp |74.9%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/blobstorage/vdisk/query/query_range.cpp |74.9%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/blobstorage/vdisk/balance/handoff_map.cpp |75.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/vdisk/query/query_range.cpp |75.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/vdisk/balance/handoff_map.cpp |75.0%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/blobstorage/vdisk/query/query_statdb.cpp |75.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/vdisk/query/query_statdb.cpp |75.0%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/kqp/gateway/behaviour/resource_pool/behaviour.cpp |75.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/gateway/behaviour/resource_pool/behaviour.cpp |75.0%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/blobstorage/vdisk/query/query_readbatch.cpp |75.0%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/kqp/common/kqp_resolve.cpp |75.0%| [AR] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/kqp/gateway/behaviour/resource_pool/libgateway-behaviour-resource_pool.global.a |75.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/vdisk/query/query_readbatch.cpp |75.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/common/kqp_resolve.cpp |75.0%| [AR] {RESULT} $(B)/ydb/core/kqp/gateway/behaviour/resource_pool/libgateway-behaviour-resource_pool.global.a |75.0%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/kqp/gateway/behaviour/resource_pool/manager.cpp |75.0%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/kqp/gateway/behaviour/resource_pool/libgateway-behaviour-resource_pool.global.a |75.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/gateway/behaviour/resource_pool/manager.cpp |75.0%| [AR] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/kqp/gateway/behaviour/resource_pool/libgateway-behaviour-resource_pool.a |75.0%| [AR] {RESULT} $(B)/ydb/core/kqp/gateway/behaviour/resource_pool/libgateway-behaviour-resource_pool.a |75.0%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/kqp/gateway/behaviour/resource_pool/libgateway-behaviour-resource_pool.a |75.0%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/keyvalue/keyvalue.cpp |75.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/keyvalue/keyvalue.cpp |75.0%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/blobstorage/vdisk/query/query_readactor.cpp |75.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/vdisk/query/query_readactor.cpp |75.0%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/services/metadata/initializer/snapshot.cpp |75.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/services/metadata/initializer/snapshot.cpp |75.0%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/kqp/gateway/behaviour/resource_pool_classifier/manager.cpp |75.0%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/services/kesus/grpc_service.cpp |75.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/gateway/behaviour/resource_pool_classifier/manager.cpp |75.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/services/kesus/grpc_service.cpp |75.0%| [AR] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/services/kesus/libydb-services-kesus.a |75.0%| [AR] {RESULT} $(B)/ydb/services/kesus/libydb-services-kesus.a |75.0%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/keyvalue/keyvalue_state.cpp |75.0%| [AR] {BAZEL_UPLOAD} $(B)/ydb/services/kesus/libydb-services-kesus.a |75.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/keyvalue/keyvalue_state.cpp |75.0%| [AR] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/keyvalue/libydb-core-keyvalue.a |75.0%| [AR] {RESULT} $(B)/ydb/core/keyvalue/libydb-core-keyvalue.a |75.0%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/keyvalue/libydb-core-keyvalue.a |75.0%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/kqp/gateway/behaviour/resource_pool_classifier/fetcher.cpp |75.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/gateway/behaviour/resource_pool_classifier/fetcher.cpp |75.0%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/blobstorage/vdisk/balance/sender.cpp |75.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/vdisk/balance/sender.cpp |75.0%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/services/metadata/abstract/common.cpp |75.0%| [AR] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/blobstorage/vdisk/balance/libblobstorage-vdisk-balance.a |75.0%| [AR] {RESULT} $(B)/ydb/core/blobstorage/vdisk/balance/libblobstorage-vdisk-balance.a |75.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/services/metadata/abstract/common.cpp |75.0%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/blobstorage/vdisk/balance/libblobstorage-vdisk-balance.a |75.0%| [AR] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/services/metadata/abstract/libservices-metadata-abstract.a |75.0%| [AR] {RESULT} $(B)/ydb/services/metadata/abstract/libservices-metadata-abstract.a |75.0%| [AR] {BAZEL_UPLOAD} $(B)/ydb/services/metadata/abstract/libservices-metadata-abstract.a |75.0%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/blobstorage/vdisk/query/query_barrier.cpp |75.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/vdisk/query/query_barrier.cpp |75.0%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/blobstorage/backpressure/queue.cpp |75.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/backpressure/queue.cpp |75.0%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/kqp/gateway/behaviour/view/manager.cpp |75.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/gateway/behaviour/view/manager.cpp |75.0%| [AR] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/kqp/gateway/behaviour/view/libgateway-behaviour-view.a |75.0%| [AR] {RESULT} $(B)/ydb/core/kqp/gateway/behaviour/view/libgateway-behaviour-view.a |75.1%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/kqp/gateway/behaviour/view/libgateway-behaviour-view.a |75.1%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/kqp/gateway/behaviour/resource_pool_classifier/initializer.cpp |75.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/gateway/behaviour/resource_pool_classifier/initializer.cpp |75.1%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/kqp/gateway/behaviour/external_data_source/behaviour.cpp |75.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/gateway/behaviour/external_data_source/behaviour.cpp |75.1%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/kqp/gateway/behaviour/resource_pool_classifier/behaviour.cpp |75.1%| [AR] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/kqp/gateway/behaviour/external_data_source/libgateway-behaviour-external_data_source.global.a |75.1%| [AR] {RESULT} $(B)/ydb/core/kqp/gateway/behaviour/external_data_source/libgateway-behaviour-external_data_source.global.a |75.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/gateway/behaviour/resource_pool_classifier/behaviour.cpp |75.1%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/kqp/gateway/behaviour/external_data_source/libgateway-behaviour-external_data_source.global.a |75.1%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/kqp/gateway/behaviour/resource_pool_classifier/object.cpp |75.1%| [AR] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/kqp/gateway/behaviour/resource_pool_classifier/libgateway-behaviour-resource_pool_classifier.global.a |75.1%| [AR] {RESULT} $(B)/ydb/core/kqp/gateway/behaviour/resource_pool_classifier/libgateway-behaviour-resource_pool_classifier.global.a |75.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/gateway/behaviour/resource_pool_classifier/object.cpp |75.1%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/kqp/gateway/behaviour/resource_pool_classifier/libgateway-behaviour-resource_pool_classifier.global.a |75.1%| [AR] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/kqp/gateway/behaviour/resource_pool_classifier/libgateway-behaviour-resource_pool_classifier.a |75.1%| [AR] {RESULT} $(B)/ydb/core/kqp/gateway/behaviour/resource_pool_classifier/libgateway-behaviour-resource_pool_classifier.a |75.1%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/kqp/gateway/behaviour/resource_pool_classifier/libgateway-behaviour-resource_pool_classifier.a |75.1%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/mind/bscontroller/config.cpp |75.1%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/kqp/gateway/behaviour/tablestore/operations/upsert_index.cpp |75.1%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/blobstorage/vdisk/query/query_stathuge.cpp |75.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/gateway/behaviour/tablestore/operations/upsert_index.cpp |75.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/vdisk/query/query_stathuge.cpp |75.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/mind/bscontroller/config.cpp |75.1%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/blobstorage/vdisk/query/query_stattablet.cpp |75.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/vdisk/query/query_stattablet.cpp |75.1%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/blobstorage/backpressure/event.cpp |75.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/backpressure/event.cpp |75.1%| [AR] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/blobstorage/backpressure/libcore-blobstorage-backpressure.a |75.1%| [AR] {RESULT} $(B)/ydb/core/blobstorage/backpressure/libcore-blobstorage-backpressure.a |75.1%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/blobstorage/backpressure/libcore-blobstorage-backpressure.a |75.1%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/services/metadata/initializer/fetcher.cpp |75.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/services/metadata/initializer/fetcher.cpp |75.1%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/quoter/quoter_service.cpp |75.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/quoter/quoter_service.cpp |75.1%| [AR] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/quoter/libydb-core-quoter.a |75.1%| [AR] {RESULT} $(B)/ydb/core/quoter/libydb-core-quoter.a |75.1%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/quoter/libydb-core-quoter.a |75.1%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/blobstorage/vdisk/query/query_extr.cpp |75.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/vdisk/query/query_extr.cpp |75.1%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/blobstorage/vdisk/query/query_public.cpp |75.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/vdisk/query/query_public.cpp |75.1%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/services/metadata/initializer/behaviour.cpp |75.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/services/metadata/initializer/behaviour.cpp |75.1%| [AR] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/blobstorage/vdisk/query/libblobstorage-vdisk-query.a |75.1%| [AR] {RESULT} $(B)/ydb/core/blobstorage/vdisk/query/libblobstorage-vdisk-query.a |75.1%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/blobstorage/vdisk/query/libblobstorage-vdisk-query.a |75.1%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/columnshard/data_accessor/cache_policy/policy.cpp |75.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/data_accessor/cache_policy/policy.cpp |75.1%| [AR] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/tx/columnshard/data_accessor/cache_policy/libcolumnshard-data_accessor-cache_policy.a |75.1%| [AR] {RESULT} $(B)/ydb/core/tx/columnshard/data_accessor/cache_policy/libcolumnshard-data_accessor-cache_policy.a |75.1%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/tx/columnshard/data_accessor/cache_policy/libcolumnshard-data_accessor-cache_policy.a |75.1%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/kqp/common/kqp_tx_manager.cpp |75.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/common/kqp_tx_manager.cpp |75.1%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/external_sources/object_storage.cpp |75.2%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/services/metadata/service.cpp |75.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/external_sources/object_storage.cpp |75.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/services/metadata/service.cpp |75.2%| [AR] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/external_sources/libydb-core-external_sources.a |75.2%| [AR] {RESULT} $(B)/ydb/core/external_sources/libydb-core-external_sources.a |75.2%| [AR] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/services/metadata/libydb-services-metadata.a |75.2%| [AR] {RESULT} $(B)/ydb/services/metadata/libydb-services-metadata.a |75.2%| [AR] {BAZEL_UPLOAD} $(B)/ydb/services/metadata/libydb-services-metadata.a |75.2%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/external_sources/libydb-core-external_sources.a |75.2%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/services/metadata/initializer/manager.cpp |75.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/services/metadata/initializer/manager.cpp |75.2%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/kqp/gateway/actors/scheme.cpp |75.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/gateway/actors/scheme.cpp |75.2%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/kqp/gateway/actors/analyze_actor.cpp |75.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/gateway/actors/analyze_actor.cpp |75.2%| [AR] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/kqp/gateway/actors/libkqp-gateway-actors.a |75.2%| [AR] {RESULT} $(B)/ydb/core/kqp/gateway/actors/libkqp-gateway-actors.a |75.2%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/kqp/gateway/actors/libkqp-gateway-actors.a |75.2%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/services/metadata/initializer/accessor_init.cpp |75.2%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/kqp/common/kqp_tx.cpp |75.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/services/metadata/initializer/accessor_init.cpp |75.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/common/kqp_tx.cpp |75.2%| [AR] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/services/metadata/initializer/libservices-metadata-initializer.a |75.2%| [AR] {RESULT} $(B)/ydb/services/metadata/initializer/libservices-metadata-initializer.a |75.2%| [AR] {BAZEL_UPLOAD} $(B)/ydb/services/metadata/initializer/libservices-metadata-initializer.a |75.2%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/sys_view/pg_tables/pg_tables.cpp |75.2%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/ymq/actor/auth_factory.cpp |75.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/ymq/actor/auth_factory.cpp |75.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/sys_view/pg_tables/pg_tables.cpp |75.2%| [AR] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/sys_view/pg_tables/libcore-sys_view-pg_tables.a |75.2%| [AR] {RESULT} $(B)/ydb/core/sys_view/pg_tables/libcore-sys_view-pg_tables.a |75.2%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/sys_view/pg_tables/libcore-sys_view-pg_tables.a |75.2%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/kqp/common/kqp_ru_calc.cpp |75.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/common/kqp_ru_calc.cpp |75.2%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/columnshard/engines/scheme/filtered_scheme.cpp |75.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/engines/scheme/filtered_scheme.cpp |75.2%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/blobstorage/nodewarden/node_warden_mon.cpp |75.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/nodewarden/node_warden_mon.cpp |75.2%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/datashard/store_distributed_erase_tx_unit.cpp |75.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/datashard/store_distributed_erase_tx_unit.cpp |75.2%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/blobstorage/pdisk/blobstorage_pdisk_blockdevice_async.cpp |75.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/pdisk/blobstorage_pdisk_blockdevice_async.cpp |75.2%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/datashard/store_commit_writes_tx_unit.cpp |75.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/datashard/store_commit_writes_tx_unit.cpp |75.2%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/kqp/compile_service/kqp_compile_service.cpp |75.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/compile_service/kqp_compile_service.cpp |75.2%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/ymq/actor/fifo_cleanup.cpp |75.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/ymq/actor/fifo_cleanup.cpp |75.2%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/datashard/restore_unit.cpp |75.2%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/ymq/actor/change_visibility.cpp |75.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/datashard/restore_unit.cpp |75.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/ymq/actor/change_visibility.cpp |75.2%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/blobstorage/pdisk/blobstorage_pdisk_syslogreader.cpp |75.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/pdisk/blobstorage_pdisk_syslogreader.cpp |75.3%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/columnshard/engines/reader/simple_reader/iterator/iterator.cpp |75.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/engines/reader/simple_reader/iterator/iterator.cpp |75.3%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/blobstorage/nodewarden/distconf_invoke_bridge.cpp |75.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/nodewarden/distconf_invoke_bridge.cpp |75.3%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/datashard/drop_volatile_snapshot_unit.cpp |75.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/datashard/drop_volatile_snapshot_unit.cpp |75.3%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/persqueue/pqtablet/transaction.cpp |75.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/persqueue/pqtablet/transaction.cpp |75.3%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/blobstorage/dsproxy/dsproxy_discover_m3dc.cpp |75.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/dsproxy/dsproxy_discover_m3dc.cpp |75.3%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/kqp/compile_service/kqp_compile_computation_pattern_service.cpp |75.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/compile_service/kqp_compile_computation_pattern_service.cpp |75.3%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/kqp/compile_service/kqp_compile_actor.cpp |75.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/compile_service/kqp_compile_actor.cpp |75.3%| [AR] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/kqp/compile_service/libcore-kqp-compile_service.a |75.3%| [AR] {RESULT} $(B)/ydb/core/kqp/compile_service/libcore-kqp-compile_service.a |75.3%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/datashard/remove_locks.cpp |75.3%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/kqp/compile_service/libcore-kqp-compile_service.a |75.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/datashard/remove_locks.cpp |75.3%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/blobstorage/nodewarden/distconf_invoke_static_group.cpp |75.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/nodewarden/distconf_invoke_static_group.cpp |75.3%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/datashard/prepare_data_tx_in_rs_unit.cpp |75.3%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/blobstorage/dsproxy/dsproxy_request_reporting.cpp |75.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/datashard/prepare_data_tx_in_rs_unit.cpp |75.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/dsproxy/dsproxy_request_reporting.cpp |75.3%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/datashard/prepare_kqp_data_tx_in_rs_unit.cpp |75.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/datashard/prepare_kqp_data_tx_in_rs_unit.cpp |75.3%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/kqp/opt/kqp_opt.cpp |75.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/opt/kqp_opt.cpp |75.3%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/blobstorage/nodewarden/node_warden_group.cpp |75.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/nodewarden/node_warden_group.cpp |75.3%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/datashard/datashard_user_db.cpp |75.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/datashard/datashard_user_db.cpp |75.3%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/datashard/datashard_write_operation.cpp |75.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/datashard/datashard_write_operation.cpp |75.3%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/mind/bscontroller/register_node.cpp |75.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/mind/bscontroller/register_node.cpp |75.3%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/columnshard/engines/predicate/filter.cpp |75.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/engines/predicate/filter.cpp |75.3%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/datashard/store_and_send_write_out_rs_unit.cpp |75.3%| [AR] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/tx/columnshard/engines/predicate/libcolumnshard-engines-predicate.a |75.3%| [AR] {RESULT} $(B)/ydb/core/tx/columnshard/engines/predicate/libcolumnshard-engines-predicate.a |75.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/datashard/store_and_send_write_out_rs_unit.cpp |75.3%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/tx/columnshard/engines/predicate/libcolumnshard-engines-predicate.a |75.3%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/persqueue/pqtablet/pq_impl_app.cpp |75.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/persqueue/pqtablet/pq_impl_app.cpp |75.3%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/datashard/move_index_unit.cpp |75.3%| [AR] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/persqueue/pqtablet/libcore-persqueue-pqtablet.a |75.3%| [AR] {RESULT} $(B)/ydb/core/persqueue/pqtablet/libcore-persqueue-pqtablet.a |75.3%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/datashard/load_and_wait_in_rs_unit.cpp |75.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/datashard/move_index_unit.cpp |75.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/datashard/load_and_wait_in_rs_unit.cpp |75.3%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/blobstorage/nodewarden/node_warden_group_resolver.cpp |75.3%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/blobstorage/nodewarden/distconf_mon.cpp |75.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/nodewarden/node_warden_group_resolver.cpp |75.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/nodewarden/distconf_mon.cpp |75.4%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/persqueue/pqtablet/libcore-persqueue-pqtablet.a |75.4%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/persqueue/events/events.cpp |75.4%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/datashard/rotate_cdc_stream_unit.cpp |75.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/datashard/rotate_cdc_stream_unit.cpp |75.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/persqueue/events/events.cpp |75.4%| [AR] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/persqueue/events/libcore-persqueue-events.a |75.4%| [AR] {RESULT} $(B)/ydb/core/persqueue/events/libcore-persqueue-events.a |75.4%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/persqueue/events/libcore-persqueue-events.a |75.4%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/datashard/upload_stats.cpp |75.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/datashard/upload_stats.cpp |75.4%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/datashard/store_snapshot_tx_unit.cpp |75.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/datashard/store_snapshot_tx_unit.cpp |75.4%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/datashard/read_op_unit.cpp |75.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/datashard/read_op_unit.cpp |75.4%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/columnshard/engines/storage/optimizer/lcbuckets/planner/optimizer.cpp |75.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/engines/storage/optimizer/lcbuckets/planner/optimizer.cpp |75.4%| [AR] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/tx/columnshard/engines/storage/optimizer/lcbuckets/planner/liboptimizer-lcbuckets-planner.global.a |75.4%| [AR] {RESULT} $(B)/ydb/core/tx/columnshard/engines/storage/optimizer/lcbuckets/planner/liboptimizer-lcbuckets-planner.global.a |75.4%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/tx/columnshard/engines/storage/optimizer/lcbuckets/planner/liboptimizer-lcbuckets-planner.global.a |75.4%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/blobstorage/nodewarden/distconf_statestorage_config_generator.cpp |75.4%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/datashard/datashard_split_dst.cpp |75.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/nodewarden/distconf_statestorage_config_generator.cpp |75.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/datashard/datashard_split_dst.cpp |75.4%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/columnshard/blobs_action/bs/write.cpp |75.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/blobs_action/bs/write.cpp |75.4%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/datashard/drop_index_notice_unit.cpp |75.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/datashard/drop_index_notice_unit.cpp |75.4%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/blobstorage/nodewarden/node_warden_impl.cpp |75.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/nodewarden/node_warden_impl.cpp |75.4%| [AR] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/tx/columnshard/blobs_action/bs/libcolumnshard-blobs_action-bs.a |75.4%| [AR] {RESULT} $(B)/ydb/core/tx/columnshard/blobs_action/bs/libcolumnshard-blobs_action-bs.a |75.4%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/tx/columnshard/blobs_action/bs/libcolumnshard-blobs_action-bs.a |75.4%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/blobstorage/nodewarden/node_warden_resource.cpp |75.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/nodewarden/node_warden_resource.cpp |75.4%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/blobstorage/nodewarden/distconf_quorum.cpp |75.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/nodewarden/distconf_quorum.cpp |75.4%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/ymq/actor/delete_queue.cpp |75.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/ymq/actor/delete_queue.cpp |75.4%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/datashard/wait_for_plan_unit.cpp |75.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/datashard/wait_for_plan_unit.cpp |75.4%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/datashard/datashard_s3_downloads.cpp |75.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/datashard/datashard_s3_downloads.cpp |75.4%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/datashard/read_table_scan_unit.cpp |75.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/datashard/read_table_scan_unit.cpp |75.4%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/datashard/store_scheme_tx_unit.cpp |75.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/datashard/store_scheme_tx_unit.cpp |75.4%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/blobstorage/nodewarden/node_warden_stat_aggr.cpp |75.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/nodewarden/node_warden_stat_aggr.cpp |75.4%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/columnshard/engines/scheme/abstract_scheme.cpp |75.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/engines/scheme/abstract_scheme.cpp |75.4%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/datashard/prepare_distributed_erase_tx_in_rs_unit.cpp |75.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/datashard/prepare_distributed_erase_tx_in_rs_unit.cpp |75.4%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/datashard/finish_propose_unit.cpp |75.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/datashard/finish_propose_unit.cpp |75.5%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/blobstorage/nodewarden/node_warden_cache.cpp |75.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/nodewarden/node_warden_cache.cpp |75.5%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/datashard/import_s3.cpp |75.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/datashard/import_s3.cpp |75.5%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/kqp/provider/rewrite_io_utils.cpp |75.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/provider/rewrite_io_utils.cpp |75.5%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/columnshard/engines/storage/optimizer/tiling/tiling.cpp |75.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/engines/storage/optimizer/tiling/tiling.cpp |75.5%| [AR] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/tx/columnshard/engines/storage/optimizer/tiling/libstorage-optimizer-tiling.global.a |75.5%| [AR] {RESULT} $(B)/ydb/core/tx/columnshard/engines/storage/optimizer/tiling/libstorage-optimizer-tiling.global.a |75.5%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/tx/columnshard/engines/storage/optimizer/tiling/libstorage-optimizer-tiling.global.a |75.5%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/kqp/provider/read_attributes_utils.cpp |75.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/provider/read_attributes_utils.cpp |75.5%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/mind/bscontroller/update_seen_operational.cpp |75.5%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/datashard/read_table_scan.cpp |75.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/mind/bscontroller/update_seen_operational.cpp |75.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/datashard/read_table_scan.cpp |75.5%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/datashard/plan_queue_unit.cpp |75.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/datashard/plan_queue_unit.cpp |75.5%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/datashard/remove_schema_snapshots.cpp |75.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/datashard/remove_schema_snapshots.cpp |75.5%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/datashard/protect_scheme_echoes_unit.cpp |75.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/datashard/protect_scheme_echoes_unit.cpp |75.5%| [AR] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/mind/bscontroller/libcore-mind-bscontroller.a |75.5%| [AR] {RESULT} $(B)/ydb/core/mind/bscontroller/libcore-mind-bscontroller.a |75.5%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/datashard/volatile_tx.cpp |75.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/datashard/volatile_tx.cpp |75.5%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/mind/bscontroller/libcore-mind-bscontroller.a |75.5%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/kqp/provider/yql_kikimr_type_ann.cpp |75.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/provider/yql_kikimr_type_ann.cpp |75.5%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/datashard/load_tx_details_unit.cpp |75.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/datashard/load_tx_details_unit.cpp |75.5%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/columnshard/engines/storage/indexes/bloom/meta.cpp |75.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/engines/storage/indexes/bloom/meta.cpp |75.5%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/blobstorage/nodewarden/distconf_generate.cpp |75.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/nodewarden/distconf_generate.cpp |75.5%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/datashard/make_scan_snapshot_unit.cpp |75.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/datashard/make_scan_snapshot_unit.cpp |75.5%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/blobstorage/nodewarden/node_warden_proxy.cpp |75.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/nodewarden/node_warden_proxy.cpp |75.5%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/columnshard/engines/reader/plain_reader/iterator/plain_read_data.cpp |75.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/engines/reader/plain_reader/iterator/plain_read_data.cpp |75.5%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/datashard/wait_for_stream_clearance_unit.cpp |75.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/datashard/wait_for_stream_clearance_unit.cpp |75.5%| [AR] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/tx/columnshard/engines/reader/plain_reader/iterator/libreader-plain_reader-iterator.a |75.5%| [AR] {RESULT} $(B)/ydb/core/tx/columnshard/engines/reader/plain_reader/iterator/libreader-plain_reader-iterator.a |75.5%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/tx/columnshard/engines/reader/plain_reader/iterator/libreader-plain_reader-iterator.a |75.5%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/columnshard/engines/storage/optimizer/lcbuckets/constructor/selector/transparent.cpp |75.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/engines/storage/optimizer/lcbuckets/constructor/selector/transparent.cpp |75.5%| [AR] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/tx/columnshard/engines/storage/optimizer/lcbuckets/constructor/selector/liblcbuckets-constructor-selector.global.a |75.5%| [AR] {RESULT} $(B)/ydb/core/tx/columnshard/engines/storage/optimizer/lcbuckets/constructor/selector/liblcbuckets-constructor-selector.global.a |75.5%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/tx/columnshard/engines/storage/optimizer/lcbuckets/constructor/selector/liblcbuckets-constructor-selector.global.a |75.5%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/ymq/actor/cfg/cfg.cpp |75.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/ymq/actor/cfg/cfg.cpp |75.5%| [AR] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/ymq/actor/cfg/libymq-actor-cfg.a |75.6%| [AR] {RESULT} $(B)/ydb/core/ymq/actor/cfg/libymq-actor-cfg.a |75.6%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/ymq/actor/cfg/libymq-actor-cfg.a |75.6%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/blobstorage/nodewarden/node_warden_pipe.cpp |75.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/nodewarden/node_warden_pipe.cpp |75.6%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/blobstorage/dsproxy/dsproxy_get_impl.cpp |75.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/dsproxy/dsproxy_get_impl.cpp |75.6%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/datashard/move_table_unit.cpp |75.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/datashard/move_table_unit.cpp |75.6%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/datashard/load_write_details_unit.cpp |75.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/datashard/load_write_details_unit.cpp |75.6%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/datashard/remove_lock_change_records.cpp |75.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/datashard/remove_lock_change_records.cpp |75.6%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/ymq/actor/get_queue_attributes.cpp |75.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/ymq/actor/get_queue_attributes.cpp |75.6%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/datashard/prepare_write_tx_in_rs_unit.cpp |75.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/datashard/prepare_write_tx_in_rs_unit.cpp |75.6%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/blobstorage/nodewarden/node_warden_pdisk.cpp |75.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/nodewarden/node_warden_pdisk.cpp |75.6%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/datashard/key_validator.cpp |75.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/datashard/key_validator.cpp |75.6%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/blobstorage/nodewarden/distconf_cache.cpp |75.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/nodewarden/distconf_cache.cpp |75.6%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/datashard/store_write_unit.cpp |75.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/datashard/store_write_unit.cpp |75.6%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/datashard/finish_propose_write_unit.cpp |75.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/datashard/finish_propose_write_unit.cpp |75.6%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/datashard/memory_state_migration.cpp |75.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/datashard/memory_state_migration.cpp |75.6%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/datashard/export_scan.cpp |75.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/datashard/export_scan.cpp |75.6%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/kqp/gateway/behaviour/tablestore/operations/alter_column.cpp |75.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/gateway/behaviour/tablestore/operations/alter_column.cpp |75.6%| [AR] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/kqp/gateway/behaviour/tablestore/operations/libbehaviour-tablestore-operations.global.a |75.6%| [AR] {RESULT} $(B)/ydb/core/kqp/gateway/behaviour/tablestore/operations/libbehaviour-tablestore-operations.global.a |75.6%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/kqp/gateway/behaviour/tablestore/operations/libbehaviour-tablestore-operations.global.a |75.6%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/blobstorage/nodewarden/distconf_invoke_common.cpp |75.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/nodewarden/distconf_invoke_common.cpp |75.6%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/blobstorage/nodewarden/distconf_invoke_state_storage.cpp |75.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/nodewarden/distconf_invoke_state_storage.cpp |75.6%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/blobstorage/dsproxy/dsproxy_strategy_base.cpp |75.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/dsproxy/dsproxy_strategy_base.cpp |75.6%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/persqueue/pqtablet/partition/mirrorer/mirrorer.cpp |75.6%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/kqp/opt/kqp_opt_phase.cpp |75.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/persqueue/pqtablet/partition/mirrorer/mirrorer.cpp |75.6%| [AR] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/persqueue/pqtablet/partition/mirrorer/libpqtablet-partition-mirrorer.a |75.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/opt/kqp_opt_phase.cpp |75.6%| [AR] {RESULT} $(B)/ydb/core/persqueue/pqtablet/partition/mirrorer/libpqtablet-partition-mirrorer.a |75.6%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/persqueue/pqtablet/partition/mirrorer/libpqtablet-partition-mirrorer.a |75.6%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/datashard/datashard_loans.cpp |75.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/datashard/datashard_loans.cpp |75.6%| [AR] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/blobstorage/dsproxy/libcore-blobstorage-dsproxy.a |75.6%| [AR] {RESULT} $(B)/ydb/core/blobstorage/dsproxy/libcore-blobstorage-dsproxy.a |75.6%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/blobstorage/pdisk/blobstorage_pdisk_impl_metadata.cpp |75.7%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/blobstorage/dsproxy/libcore-blobstorage-dsproxy.a |75.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/pdisk/blobstorage_pdisk_impl_metadata.cpp |75.7%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/datashard/operation.cpp |75.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/datashard/operation.cpp |75.7%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/datashard/volatile_tx_mon.cpp |75.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/datashard/volatile_tx_mon.cpp |75.7%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/datashard/datashard_schema_snapshots.cpp |75.7%| [AR] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/blobstorage/pdisk/libcore-blobstorage-pdisk.a |75.7%| [AR] {RESULT} $(B)/ydb/core/blobstorage/pdisk/libcore-blobstorage-pdisk.a |75.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/datashard/datashard_schema_snapshots.cpp |75.7%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/blobstorage/pdisk/libcore-blobstorage-pdisk.a |75.7%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/datashard/finalize_build_index_unit.cpp |75.7%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/persqueue/dread_cache_service/caching_service.cpp |75.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/datashard/finalize_build_index_unit.cpp |75.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/persqueue/dread_cache_service/caching_service.cpp |75.7%| [AR] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/persqueue/libydb-core-persqueue.a |75.7%| [AR] {RESULT} $(B)/ydb/core/persqueue/libydb-core-persqueue.a |75.7%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/persqueue/libydb-core-persqueue.a |75.7%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/datashard/datashard_distributed_erase.cpp |75.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/datashard/datashard_distributed_erase.cpp |75.7%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/datashard/finalize_plan_tx_unit.cpp |75.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/datashard/finalize_plan_tx_unit.cpp |75.7%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/datashard/make_snapshot_unit.cpp |75.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/datashard/make_snapshot_unit.cpp |75.7%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/kqp/provider/yql_kikimr_datasource.cpp |75.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/provider/yql_kikimr_datasource.cpp |75.7%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/blobstorage/nodewarden/distconf_fsm.cpp |75.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/nodewarden/distconf_fsm.cpp |75.7%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/blobstorage/nodewarden/distconf_selfheal.cpp |75.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/nodewarden/distconf_selfheal.cpp |75.7%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/datashard/follower_edge.cpp |75.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/datashard/follower_edge.cpp |75.7%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/columnshard/engines/storage/indexes/bloom/constructor.cpp |75.7%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/blobstorage/nodewarden/distconf_binding.cpp |75.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/engines/storage/indexes/bloom/constructor.cpp |75.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/nodewarden/distconf_binding.cpp |75.7%| [AR] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/tx/columnshard/engines/storage/indexes/bloom/libstorage-indexes-bloom.global.a |75.7%| [AR] {RESULT} $(B)/ydb/core/tx/columnshard/engines/storage/indexes/bloom/libstorage-indexes-bloom.global.a |75.7%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/columnshard/engines/storage/granule/stages.cpp |75.7%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/tx/columnshard/engines/storage/indexes/bloom/libstorage-indexes-bloom.global.a |75.7%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/util/failure_injection.cpp |75.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/engines/storage/granule/stages.cpp |75.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/util/failure_injection.cpp |75.7%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/datashard/datashard_snapshots.cpp |75.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/datashard/datashard_snapshots.cpp |75.7%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/blobstorage/nodewarden/distconf_bridge.cpp |75.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/nodewarden/distconf_bridge.cpp |75.7%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/kqp/provider/yql_kikimr_expr_nodes.cpp |75.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/provider/yql_kikimr_expr_nodes.cpp |75.7%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/datashard/execute_write_unit.cpp |75.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/datashard/execute_write_unit.cpp |75.7%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/datashard/execute_kqp_data_tx_unit.cpp |75.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/datashard/execute_kqp_data_tx_unit.cpp |75.7%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/datashard/execute_distributed_erase_tx_unit.cpp |75.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/datashard/execute_distributed_erase_tx_unit.cpp |75.7%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/datashard/datashard_s3_uploads.cpp |75.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/datashard/datashard_s3_uploads.cpp |75.7%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/persqueue/pqrb/partition_scale_request.cpp |75.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/persqueue/pqrb/partition_scale_request.cpp |75.8%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/local_pgwire/pgwire_kqp_proxy.cpp |75.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/local_pgwire/pgwire_kqp_proxy.cpp |75.8%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/datashard/execution_unit.cpp |75.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/datashard/execution_unit.cpp |75.8%| [AR] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/local_pgwire/libydb-core-local_pgwire.a |75.8%| [AR] {RESULT} $(B)/ydb/core/local_pgwire/libydb-core-local_pgwire.a |75.8%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/local_pgwire/libydb-core-local_pgwire.a |75.8%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/blobstorage/nodewarden/distconf_persistent_storage.cpp |75.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/nodewarden/distconf_persistent_storage.cpp |75.8%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/datashard/datashard_s3_upload_rows.cpp |75.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/datashard/datashard_s3_upload_rows.cpp |75.8%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/datashard/incr_restore_scan.cpp |75.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/datashard/incr_restore_scan.cpp |75.8%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/datashard/datashard_kqp_compute.cpp |75.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/datashard/datashard_kqp_compute.cpp |75.8%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/datashard/export_s3_uploader.cpp |75.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/datashard/export_s3_uploader.cpp |75.8%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/columnshard/engines/scheme/index_info.cpp |75.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/engines/scheme/index_info.cpp |75.8%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/datashard/execute_kqp_scan_tx_unit.cpp |75.8%| [AR] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/tx/columnshard/engines/scheme/libcolumnshard-engines-scheme.a |75.8%| [AR] {RESULT} $(B)/ydb/core/tx/columnshard/engines/scheme/libcolumnshard-engines-scheme.a |75.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/datashard/execute_kqp_scan_tx_unit.cpp |75.8%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/tx/columnshard/engines/scheme/libcolumnshard-engines-scheme.a |75.8%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/blobstorage/nodewarden/distconf_connectivity.cpp |75.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/nodewarden/distconf_connectivity.cpp |75.8%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/datashard/datashard_locks_db.cpp |75.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/datashard/datashard_locks_db.cpp |75.8%| [AR] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/blobstorage/nodewarden/libcore-blobstorage-nodewarden.a |75.8%| [AR] {RESULT} $(B)/ydb/core/blobstorage/nodewarden/libcore-blobstorage-nodewarden.a |75.8%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/datashard/datashard__store_scan_state.cpp |75.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/datashard/datashard__store_scan_state.cpp |75.8%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/blobstorage/nodewarden/libcore-blobstorage-nodewarden.a |75.8%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/datashard/datashard_direct_erase.cpp |75.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/datashard/datashard_direct_erase.cpp |75.8%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/kqp/provider/yql_kikimr_datasink.cpp |75.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/provider/yql_kikimr_datasink.cpp |75.8%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/columnshard/engines/reader/transaction/tx_internal_scan.cpp |75.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/engines/reader/transaction/tx_internal_scan.cpp |75.8%| [AR] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/tx/columnshard/engines/reader/transaction/libengines-reader-transaction.a |75.8%| [AR] {RESULT} $(B)/ydb/core/tx/columnshard/engines/reader/transaction/libengines-reader-transaction.a |75.8%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/tx/columnshard/engines/reader/transaction/libengines-reader-transaction.a |75.8%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/datashard/datashard_direct_upload.cpp |75.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/datashard/datashard_direct_upload.cpp |75.8%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/apps/etcd_proxy/service/etcd_gate.cpp |75.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/apps/etcd_proxy/service/etcd_gate.cpp |75.8%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/datashard/datashard_overload.cpp |75.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/datashard/datashard_overload.cpp |75.8%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/datashard/datashard__snapshot_txs.cpp |75.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/datashard/datashard__snapshot_txs.cpp |75.8%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/datashard/datashard__stats.cpp |75.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/datashard/datashard__stats.cpp |75.8%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/ymq/actor/yc_search_ut/test_events_writer.cpp |75.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/ymq/actor/yc_search_ut/test_events_writer.cpp |75.8%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/datashard/datashard_pipeline.cpp |75.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/datashard/datashard_pipeline.cpp |75.9%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/persqueue/pqrb/read_balancer__balancing_app.cpp |75.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/persqueue/pqrb/read_balancer__balancing_app.cpp |75.9%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/sequenceshard/ut_sequenceshard.cpp |75.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/sequenceshard/ut_sequenceshard.cpp |75.9%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/fq/libs/row_dispatcher/format_handler/ut/format_handler_ut.cpp |75.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/fq/libs/row_dispatcher/format_handler/ut/format_handler_ut.cpp |75.9%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/kqp/opt/peephole/kqp_opt_peephole.cpp |75.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/opt/peephole/kqp_opt_peephole.cpp |75.9%| [AR] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/kqp/opt/peephole/libkqp-opt-peephole.a |75.9%| [AR] {RESULT} $(B)/ydb/core/kqp/opt/peephole/libkqp-opt-peephole.a |75.9%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/kqp/opt/peephole/libkqp-opt-peephole.a |75.9%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/apps/etcd_proxy/service/etcd_watch.cpp |75.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/apps/etcd_proxy/service/etcd_watch.cpp |75.9%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/fq/libs/row_dispatcher/format_handler/ut/topic_filter_ut.cpp |75.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/fq/libs/row_dispatcher/format_handler/ut/topic_filter_ut.cpp |75.9%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/blob_depot/given_id_range_ut.cpp |75.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blob_depot/given_id_range_ut.cpp |75.9%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/apps/etcd_proxy/service/etcd_lease.cpp |75.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/apps/etcd_proxy/service/etcd_lease.cpp |75.9%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/blob_depot/closed_interval_set_ut.cpp |75.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blob_depot/closed_interval_set_ut.cpp |75.9%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/memory_controller/memory_controller.cpp |75.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/memory_controller/memory_controller.cpp |75.9%| [AR] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/memory_controller/libydb-core-memory_controller.a |75.9%| [AR] {RESULT} $(B)/ydb/core/memory_controller/libydb-core-memory_controller.a |75.9%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/memory_controller/libydb-core-memory_controller.a |75.9%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/kqp/federated_query/kqp_federated_query_helpers_ut.cpp |75.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/federated_query/kqp_federated_query_helpers_ut.cpp |75.9%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/columnshard/engines/reader/simple_reader/iterator/source.cpp |75.9%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/blobstorage/ut_group/main.cpp |75.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/ut_group/main.cpp |75.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/engines/reader/simple_reader/iterator/source.cpp |75.9%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/fq/libs/row_dispatcher/ut/coordinator_ut.cpp |75.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/fq/libs/row_dispatcher/ut/coordinator_ut.cpp |75.9%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/apps/ydbd/main.cpp |75.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/apps/ydbd/main.cpp |75.9%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/schemeshard/olap/operations/create_table.cpp |75.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/olap/operations/create_table.cpp |75.9%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/fq/libs/row_dispatcher/ut/leader_election_ut.cpp |75.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/fq/libs/row_dispatcher/ut/leader_election_ut.cpp |75.9%| [AR] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/tx/schemeshard/olap/operations/libschemeshard-olap-operations.a |75.9%| [AR] {RESULT} $(B)/ydb/core/tx/schemeshard/olap/operations/libschemeshard-olap-operations.a |75.9%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/tx/schemeshard/olap/operations/libschemeshard-olap-operations.a |75.9%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/tests/tools/kqprun/kqprun.cpp |75.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/tests/tools/kqprun/kqprun.cpp |75.9%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/persqueue/ut/common/pq_ut_common.cpp |75.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/persqueue/ut/common/pq_ut_common.cpp |75.9%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/blobstorage/nodewarden/ut_sequence/dsproxy_config_retrieval.cpp |75.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/nodewarden/ut_sequence/dsproxy_config_retrieval.cpp |75.9%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/apps/etcd_proxy/service/etcd_grpc.cpp |75.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/apps/etcd_proxy/service/etcd_grpc.cpp |75.9%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/fq/libs/row_dispatcher/ut/topic_session_ut.cpp |75.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/fq/libs/row_dispatcher/ut/topic_session_ut.cpp |75.9%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/blobstorage/ut_blobstorage/blob_depot_test_functions.cpp |76.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/ut_blobstorage/blob_depot_test_functions.cpp |76.0%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/services/config/bsconfig_ut.cpp |76.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/services/config/bsconfig_ut.cpp |76.0%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/load_test/memory.cpp |76.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/load_test/memory.cpp |76.0%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/blobstorage/ut_blobstorage/blob_depot.cpp |76.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/ut_blobstorage/blob_depot.cpp |76.0%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/apps/etcd_proxy/service/etcd_impl.cpp |76.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/apps/etcd_proxy/service/etcd_impl.cpp |76.0%| [AR] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/apps/etcd_proxy/service/libapps-etcd_proxy-service.a |76.0%| [AR] {RESULT} $(B)/ydb/apps/etcd_proxy/service/libapps-etcd_proxy-service.a |76.0%| [AR] {BAZEL_UPLOAD} $(B)/ydb/apps/etcd_proxy/service/libapps-etcd_proxy-service.a |76.0%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/blobstorage/ut_testshard/main.cpp |76.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/ut_testshard/main.cpp |76.0%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/fq/libs/row_dispatcher/ut/row_dispatcher_ut.cpp |76.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/fq/libs/row_dispatcher/ut/row_dispatcher_ut.cpp |76.0%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/cms/sentinel_ut.cpp |76.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/cms/sentinel_ut.cpp |76.0%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/kafka_proxy/ut/ut_produce_actor.cpp |76.0%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/blobstorage/ut_blobstorage/blob_depot_event_managers.cpp |76.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kafka_proxy/ut/ut_produce_actor.cpp |76.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/ut_blobstorage/blob_depot_event_managers.cpp |76.0%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/load_test/ycsb/common.cpp |76.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/load_test/ycsb/common.cpp |76.0%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/services/rate_limiter/rate_limiter_ut.cpp |76.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/services/rate_limiter/rate_limiter_ut.cpp |76.0%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/kafka_proxy/ut/ut_transaction_coordinator.cpp |76.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kafka_proxy/ut/ut_transaction_coordinator.cpp |76.0%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/ymq/actor/yc_search_ut/index_events_processor_ut.cpp |76.0%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/load_test/vdisk_write.cpp |76.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/load_test/vdisk_write.cpp |76.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/ymq/actor/yc_search_ut/index_events_processor_ut.cpp |76.0%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/ymq/actor/cloud_events/cloud_events_ut/cloud_events_ut.cpp |76.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/ymq/actor/cloud_events/cloud_events_ut/cloud_events_ut.cpp |76.0%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/kqp/federated_query/kqp_federated_query_actors_ut.cpp |76.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/federated_query/kqp_federated_query_actors_ut.cpp |76.0%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/datashard/datashard_ut_reassign.cpp |76.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/datashard/datashard_ut_reassign.cpp |76.0%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/load_test/pdisk_read.cpp |76.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/load_test/pdisk_read.cpp |76.0%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/load_test/service_actor.cpp |76.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/load_test/service_actor.cpp |76.0%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/datashard/ut_sequence/datashard_ut_sequence.cpp |76.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/datashard/ut_sequence/datashard_ut_sequence.cpp |76.0%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/datashard/datashard_ut_background_compaction.cpp |76.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/datashard/datashard_ut_background_compaction.cpp |76.0%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/schemeshard/ut_helpers/ls_checks.cpp |76.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/ut_helpers/ls_checks.cpp |76.0%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/load_test/group_write.cpp |76.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/load_test/group_write.cpp |76.0%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/quoter/ut_helpers.cpp |76.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/quoter/ut_helpers.cpp |76.0%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/load_test/ycsb/test_load_read_iterator.cpp |76.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/load_test/ycsb/test_load_read_iterator.cpp |76.0%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/grpc_services/tablet/rpc_restart_tablet_ut.cpp |76.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/grpc_services/tablet/rpc_restart_tablet_ut.cpp |76.0%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/util/memory_tracker.cpp |76.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/util/memory_tracker.cpp |76.1%| [AR] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/util/libydb-core-util.a |76.1%| [AR] {RESULT} $(B)/ydb/core/util/libydb-core-util.a |76.1%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/util/libydb-core-util.a |76.1%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/coordinator/coordinator_volatile_ut.cpp |76.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/coordinator/coordinator_volatile_ut.cpp |76.1%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/schemeshard/ut_helpers/export_reboots_common.cpp |76.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/ut_helpers/export_reboots_common.cpp |76.1%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/kqp/ut/federated_query/common/common.cpp |76.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/ut/federated_query/common/common.cpp |76.1%| [AR] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/kqp/ut/federated_query/common/libut-federated_query-common.a |76.1%| [AR] {RESULT} $(B)/ydb/core/kqp/ut/federated_query/common/libut-federated_query-common.a |76.1%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/kqp/ut/federated_query/common/libut-federated_query-common.a |76.1%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/columnshard/hooks/testing/ro_controller.cpp |76.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/hooks/testing/ro_controller.cpp |76.1%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/datashard/datashard_ut_rs.cpp |76.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/datashard/datashard_ut_rs.cpp |76.1%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/columnshard/hooks/testing/controller.cpp |76.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/hooks/testing/controller.cpp |76.1%| [AR] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/tx/columnshard/hooks/testing/libcolumnshard-hooks-testing.a |76.1%| [AR] {RESULT} $(B)/ydb/core/tx/columnshard/hooks/testing/libcolumnshard-hooks-testing.a |76.1%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/tx/columnshard/hooks/testing/libcolumnshard-hooks-testing.a |76.1%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/datashard/build_index/ut/ut_fulltext.cpp |76.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/datashard/build_index/ut/ut_fulltext.cpp |76.1%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/datashard/build_index/ut/ut_sample_k.cpp |76.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/datashard/build_index/ut/ut_sample_k.cpp |76.1%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/load_test/ycsb/kqp_upsert.cpp |76.1%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/datashard/build_index/ut/ut_reshuffle_kmeans.cpp |76.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/load_test/ycsb/kqp_upsert.cpp |76.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/datashard/build_index/ut/ut_reshuffle_kmeans.cpp |76.1%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/columnshard/engines/reader/simple_reader/iterator/sys_view/optimizer/schema.cpp |76.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/engines/reader/simple_reader/iterator/sys_view/optimizer/schema.cpp |76.1%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/kqp/ut/olap/helpers/local.cpp |76.1%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/datashard/build_index/ut/ut_secondary_index.cpp |76.1%| [AR] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/tx/columnshard/engines/reader/simple_reader/iterator/sys_view/optimizer/libiterator-sys_view-optimizer.global.a |76.1%| [AR] {RESULT} $(B)/ydb/core/tx/columnshard/engines/reader/simple_reader/iterator/sys_view/optimizer/libiterator-sys_view-optimizer.global.a |76.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/ut/olap/helpers/local.cpp |76.1%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/tx/columnshard/engines/reader/simple_reader/iterator/sys_view/optimizer/libiterator-sys_view-optimizer.global.a |76.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/datashard/build_index/ut/ut_secondary_index.cpp |76.1%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/persqueue/public/list_topics/list_all_topics_ut.cpp |76.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/persqueue/public/list_topics/list_all_topics_ut.cpp |76.1%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/fq/libs/row_dispatcher/format_handler/ut/topic_parser_ut.cpp |76.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/fq/libs/row_dispatcher/format_handler/ut/topic_parser_ut.cpp |76.1%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/datashard/build_index/ut/ut_prefix_kmeans.cpp |76.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/datashard/build_index/ut/ut_prefix_kmeans.cpp |76.1%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/load_test/ycsb/test_load_actor.cpp |76.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/load_test/ycsb/test_load_actor.cpp |76.1%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/load_test/ycsb/kqp_select.cpp |76.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/load_test/ycsb/kqp_select.cpp |76.1%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/kqp/ut/view/view_ut.cpp |76.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/ut/view/view_ut.cpp |76.1%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/schemeshard/ut_helpers/shred_helpers.cpp |76.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/ut_helpers/shred_helpers.cpp |76.1%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/load_test/ycsb/bulk_mkql_upsert.cpp |76.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/load_test/ycsb/bulk_mkql_upsert.cpp |76.1%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/blobstorage/ut_blobstorage/lib/debug_log.cpp |76.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/ut_blobstorage/lib/debug_log.cpp |76.2%| [AR] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/load_test/libydb-core-load_test.a |76.2%| [AR] {RESULT} $(B)/ydb/core/load_test/libydb-core-load_test.a |76.2%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/load_test/libydb-core-load_test.a |76.2%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/blobstorage/ut_blobstorage/lib/node_warden_mock_pipe.cpp |76.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/ut_blobstorage/lib/node_warden_mock_pipe.cpp |76.2%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/fq/libs/control_plane_proxy/ut/control_plane_proxy_ut.cpp |76.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/fq/libs/control_plane_proxy/ut/control_plane_proxy_ut.cpp |76.2%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/columnshard/engines/reader/simple_reader/iterator/sys_view/portions/constructor.cpp |76.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/engines/reader/simple_reader/iterator/sys_view/portions/constructor.cpp |76.2%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tablet_flat/flat_executor_ut_large.cpp |76.2%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/kqp/ut/olap/helpers/writer.cpp |76.2%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/kqp/ut/olap/helpers/query_executor.cpp |76.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/ut/olap/helpers/writer.cpp |76.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/ut/olap/helpers/query_executor.cpp |76.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tablet_flat/flat_executor_ut_large.cpp |76.2%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/yql_testlib/yql_testlib.cpp |76.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/yql_testlib/yql_testlib.cpp |76.2%| [AR] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/yql_testlib/libydb-core-yql_testlib.a |76.2%| [AR] {RESULT} $(B)/ydb/core/yql_testlib/libydb-core-yql_testlib.a |76.2%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/yql_testlib/libydb-core-yql_testlib.a |76.2%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/columnshard/engines/reader/simple_reader/iterator/sys_view/portions/source.cpp |76.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/engines/reader/simple_reader/iterator/sys_view/portions/source.cpp |76.2%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/kqp/ut/olap/helpers/aggregation.cpp |76.2%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/public/sdk/cpp/src/client/persqueue_public/ut/ut_utils/ut_utils.cpp |76.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/public/sdk/cpp/src/client/persqueue_public/ut/ut_utils/ut_utils.cpp |76.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/ut/olap/helpers/aggregation.cpp |76.2%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/blobstorage/ut_vdisk/lib/test_huge.cpp |76.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/ut_vdisk/lib/test_huge.cpp |76.2%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/tests/fq/pq_async_io/ut/dq_pq_write_actor_ut.cpp |76.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/tests/fq/pq_async_io/ut/dq_pq_write_actor_ut.cpp |76.2%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/tests/fq/pq_async_io/ut/dq_pq_read_actor_ut.cpp |76.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/tests/fq/pq_async_io/ut/dq_pq_read_actor_ut.cpp |76.2%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/tests/fq/pq_async_io/ut/dq_pq_rd_read_actor_ut.cpp |76.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/tests/fq/pq_async_io/ut/dq_pq_rd_read_actor_ut.cpp |76.2%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/kqp/finalize_script_service/kqp_check_script_lease_actor.cpp |76.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/finalize_script_service/kqp_check_script_lease_actor.cpp |76.2%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/public/sdk/cpp/src/client/topic/ut/ut_utils/topic_sdk_test_setup.cpp |76.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/public/sdk/cpp/src/client/topic/ut/ut_utils/topic_sdk_test_setup.cpp |76.2%| [AR] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/public/sdk/cpp/src/client/topic/ut/ut_utils/libtopic-ut-ut_utils.a |76.2%| [AR] {RESULT} $(B)/ydb/public/sdk/cpp/src/client/topic/ut/ut_utils/libtopic-ut-ut_utils.a |76.2%| [AR] {BAZEL_UPLOAD} $(B)/ydb/public/sdk/cpp/src/client/topic/ut/ut_utils/libtopic-ut-ut_utils.a |76.2%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/blobstorage/ut_blobstorage/lib/node_warden_mock_state.cpp |76.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/ut_blobstorage/lib/node_warden_mock_state.cpp |76.2%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/blobstorage/ut_vdisk/lib/helpers.cpp |76.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/ut_vdisk/lib/helpers.cpp |76.2%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/blobstorage/ut_blobstorage/lib/node_warden_mock_bsc.cpp |76.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/ut_blobstorage/lib/node_warden_mock_bsc.cpp |76.2%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/blobstorage/ut_blobstorage/lib/ut_helpers.cpp |76.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/ut_blobstorage/lib/ut_helpers.cpp |76.2%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/schemeshard/ut_helpers/helpers.cpp |76.2%| [AR] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/blobstorage/ut_blobstorage/lib/libblobstorage-ut_blobstorage-lib.a |76.2%| [AR] {RESULT} $(B)/ydb/core/blobstorage/ut_blobstorage/lib/libblobstorage-ut_blobstorage-lib.a |76.2%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/blobstorage/ut_blobstorage/lib/libblobstorage-ut_blobstorage-lib.a |76.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/ut_helpers/helpers.cpp |76.2%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/cms/cms_ut_common.cpp |76.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/cms/cms_ut_common.cpp |76.2%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/kqp/node_service/kqp_node_service.cpp |76.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/node_service/kqp_node_service.cpp |76.3%| [AR] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/kqp/node_service/libcore-kqp-node_service.a |76.3%| [AR] {RESULT} $(B)/ydb/core/kqp/node_service/libcore-kqp-node_service.a |76.3%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/kqp/node_service/libcore-kqp-node_service.a |76.3%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/quoter/quoter_service_ut.cpp |76.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/quoter/quoter_service_ut.cpp |76.3%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/tools/stress_tool/device_test_tool.cpp |76.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/tools/stress_tool/device_test_tool.cpp |76.3%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/statistics/ut_common/ut_common.cpp |76.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/statistics/ut_common/ut_common.cpp |76.3%| [AR] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/statistics/ut_common/libcore-statistics-ut_common.a |76.3%| [AR] {RESULT} $(B)/ydb/core/statistics/ut_common/libcore-statistics-ut_common.a |76.3%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/tx_allocator_client/ut_helpers.cpp |76.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/tx_allocator_client/ut_helpers.cpp |76.3%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/statistics/ut_common/libcore-statistics-ut_common.a |76.3%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/tools/stress_tool/device_test_tool_ut.cpp |76.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/tools/stress_tool/device_test_tool_ut.cpp |76.3%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/schemeshard/ut_helpers/test_env.cpp |76.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/ut_helpers/test_env.cpp |76.3%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/schemeshard/ut_helpers/failing_mtpq.cpp |76.3%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/sequenceshard/ut_helpers.cpp |76.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/ut_helpers/failing_mtpq.cpp |76.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/sequenceshard/ut_helpers.cpp |76.3%| [AR] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/tx/schemeshard/ut_helpers/libtx-schemeshard-ut_helpers.a |76.3%| [AR] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_helpers/libtx-schemeshard-ut_helpers.a |76.3%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/kqp/workload_service/ut/common/kqp_workload_service_ut_common.cpp |76.3%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/tx/schemeshard/ut_helpers/libtx-schemeshard-ut_helpers.a |76.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/workload_service/ut/common/kqp_workload_service_ut_common.cpp |76.3%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/persqueue/ut/user_info_ut.cpp |76.3%| [AR] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/kqp/workload_service/ut/common/libworkload_service-ut-common.a |76.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/persqueue/ut/user_info_ut.cpp |76.3%| [AR] {RESULT} $(B)/ydb/core/kqp/workload_service/ut/common/libworkload_service-ut-common.a |76.3%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/kqp/workload_service/ut/common/libworkload_service-ut-common.a |76.3%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/kqp/finalize_script_service/kqp_finalize_script_service.cpp |76.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/finalize_script_service/kqp_finalize_script_service.cpp |76.3%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tablet/tablet_resolver_ut.cpp |76.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tablet/tablet_resolver_ut.cpp |76.3%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/kqp/finalize_script_service/kqp_finalize_script_actor.cpp |76.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/finalize_script_service/kqp_finalize_script_actor.cpp |76.3%| [AR] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/kqp/finalize_script_service/libcore-kqp-finalize_script_service.a |76.3%| [AR] {RESULT} $(B)/ydb/core/kqp/finalize_script_service/libcore-kqp-finalize_script_service.a |76.3%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/kqp/finalize_script_service/libcore-kqp-finalize_script_service.a |76.3%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/persqueue/ut/common/autoscaling_ut_common.cpp |76.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/persqueue/ut/common/autoscaling_ut_common.cpp |76.3%| [AR] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/persqueue/ut/common/libpersqueue-ut-common.a |76.3%| [AR] {RESULT} $(B)/ydb/core/persqueue/ut/common/libpersqueue-ut-common.a |76.3%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/persqueue/ut/common/libpersqueue-ut-common.a |76.3%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/kafka_proxy/ut/ut_transaction_actor.cpp |76.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kafka_proxy/ut/ut_transaction_actor.cpp |76.3%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/quoter/kesus_quoter_ut.cpp |76.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/quoter/kesus_quoter_ut.cpp |76.3%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/grpc_services/tablet/rpc_execute_mkql_ut.cpp |76.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/grpc_services/tablet/rpc_execute_mkql_ut.cpp |76.3%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/grpc_services/tablet/rpc_change_schema_ut.cpp |76.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/grpc_services/tablet/rpc_change_schema_ut.cpp |76.3%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/kqp/provider/read_attributes_utils_ut.cpp |76.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/provider/read_attributes_utils_ut.cpp |76.4%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/library/yql/providers/solomon/actors/ut/ut_helpers.cpp |76.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/library/yql/providers/solomon/actors/ut/ut_helpers.cpp |76.4%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/cms/ut_helpers.cpp |76.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/cms/ut_helpers.cpp |76.4%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/datashard/ut_common/datashard_ut_common.cpp |76.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/datashard/ut_common/datashard_ut_common.cpp |76.4%| [AR] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/tx/datashard/ut_common/libtx-datashard-ut_common.a |76.4%| [AR] {RESULT} $(B)/ydb/core/tx/datashard/ut_common/libtx-datashard-ut_common.a |76.4%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/tx/datashard/ut_common/libtx-datashard-ut_common.a |76.4%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/kqp/ut/olap/helpers/typed_local.cpp |76.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/ut/olap/helpers/typed_local.cpp |76.4%| [AR] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/kqp/ut/olap/helpers/libut-olap-helpers.a |76.4%| [AR] {RESULT} $(B)/ydb/core/kqp/ut/olap/helpers/libut-olap-helpers.a |76.4%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/kqp/ut/olap/helpers/libut-olap-helpers.a |76.4%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/datashard/build_index/ut/ut_unique_index.cpp |76.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/datashard/build_index/ut/ut_unique_index.cpp |76.4%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/tests/tools/fqrun/src/fq_setup.cpp |76.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/tests/tools/fqrun/src/fq_setup.cpp |76.4%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/library/ncloud/impl/access_service_ut.cpp |76.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/library/ncloud/impl/access_service_ut.cpp |76.4%| [AR] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/tests/tools/fqrun/src/libtools-fqrun-src.a |76.4%| [AR] {RESULT} $(B)/ydb/tests/tools/fqrun/src/libtools-fqrun-src.a |76.4%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/public/sdk/cpp/src/client/persqueue_public/ut/ut_utils/test_server.cpp |76.4%| [AR] {BAZEL_UPLOAD} $(B)/ydb/tests/tools/fqrun/src/libtools-fqrun-src.a |76.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/public/sdk/cpp/src/client/persqueue_public/ut/ut_utils/test_server.cpp |76.4%| [AR] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/public/sdk/cpp/src/client/persqueue_public/ut/ut_utils/libpersqueue_public-ut-ut_utils.a |76.4%| [AR] {RESULT} $(B)/ydb/public/sdk/cpp/src/client/persqueue_public/ut/ut_utils/libpersqueue_public-ut-ut_utils.a |76.4%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/blobstorage/ut_blobstorage/shred.cpp |76.4%| [AR] {BAZEL_UPLOAD} $(B)/ydb/public/sdk/cpp/src/client/persqueue_public/ut/ut_utils/libpersqueue_public-ut-ut_utils.a |76.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/ut_blobstorage/shred.cpp |76.4%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/mind/tenant_node_enumeration_ut.cpp |76.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/mind/tenant_node_enumeration_ut.cpp |76.4%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/blobstorage/vdisk/hulldb/generic/hullds_sst_it_all_ut.cpp |76.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/vdisk/hulldb/generic/hullds_sst_it_all_ut.cpp |76.4%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/columnshard/engines/reader/simple_reader/iterator/sys_view/portions/metadata.cpp |76.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/engines/reader/simple_reader/iterator/sys_view/portions/metadata.cpp |76.4%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/datashard/build_index/ut/ut_recompute_kmeans.cpp |76.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/datashard/build_index/ut/ut_recompute_kmeans.cpp |76.4%| [AR] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/tx/columnshard/engines/reader/simple_reader/iterator/sys_view/portions/libiterator-sys_view-portions.a |76.4%| [AR] {RESULT} $(B)/ydb/core/tx/columnshard/engines/reader/simple_reader/iterator/sys_view/portions/libiterator-sys_view-portions.a |76.4%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/tx/columnshard/engines/reader/simple_reader/iterator/sys_view/portions/libiterator-sys_view-portions.a |76.4%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/kafka_proxy/ut/actors_ut.cpp |76.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kafka_proxy/ut/actors_ut.cpp |76.4%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/blobstorage/ut_blobstorage/get.cpp |76.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/ut_blobstorage/get.cpp |76.4%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/library/yql/providers/generic/actors/ut/yql_generic_lookup_actor_ut.cpp |76.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/library/yql/providers/generic/actors/ut/yql_generic_lookup_actor_ut.cpp |76.4%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/persqueue/public/fetcher/fetch_request_ut.cpp |76.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/persqueue/public/fetcher/fetch_request_ut.cpp |76.4%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/coordinator/coordinator_ut.cpp |76.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/coordinator/coordinator_ut.cpp |76.4%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/mind/tenant_slot_broker__update_slot_status.cpp |76.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/mind/tenant_slot_broker__update_slot_status.cpp |76.4%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/datashard/datashard_ut_stats.cpp |76.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/datashard/datashard_ut_stats.cpp |76.4%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/kqp/ut/opt/kqp_merge_ut.cpp |76.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/ut/opt/kqp_merge_ut.cpp |76.5%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/blobstorage/ut_blobstorage/defrag.cpp |76.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/ut_blobstorage/defrag.cpp |76.5%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/fq/libs/row_dispatcher/format_handler/ut/common/ut_common.cpp |76.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/fq/libs/row_dispatcher/format_handler/ut/common/ut_common.cpp |76.5%| [AR] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/fq/libs/row_dispatcher/format_handler/ut/common/libformat_handler-ut-common.a |76.5%| [AR] {RESULT} $(B)/ydb/core/fq/libs/row_dispatcher/format_handler/ut/common/libformat_handler-ut-common.a |76.5%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/fq/libs/row_dispatcher/format_handler/ut/common/libformat_handler-ut-common.a |76.5%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/schemeshard/ut_base/ut_table_pg_types.cpp |76.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/ut_base/ut_table_pg_types.cpp |76.5%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/scheme_board/replica_ut.cpp |76.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/scheme_board/replica_ut.cpp |76.5%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/blobstorage/vdisk/skeleton/skeleton_oos_logic_ut.cpp |76.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/vdisk/skeleton/skeleton_oos_logic_ut.cpp |76.5%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/cms/cms_ut_common.cpp |76.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/cms/cms_ut_common.cpp |76.5%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/datashard/build_index/ut/ut_local_kmeans.cpp |76.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/datashard/build_index/ut/ut_local_kmeans.cpp |76.5%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/kafka_proxy/ut/metarequest_ut.cpp |76.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kafka_proxy/ut/metarequest_ut.cpp |76.5%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/scheme_board/populator_ut.cpp |76.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/scheme_board/populator_ut.cpp |76.5%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/blobstorage/ut_blobstorage/huge.cpp |76.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/ut_blobstorage/huge.cpp |76.5%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/blobstorage/vdisk/hulldb/cache_block/cache_block_ut.cpp |76.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/vdisk/hulldb/cache_block/cache_block_ut.cpp |76.5%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/library/ycloud/impl/user_account_service_ut.cpp |76.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/library/ycloud/impl/user_account_service_ut.cpp |76.5%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/services/ydb/ydb_import_ut.cpp |76.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/services/ydb/ydb_import_ut.cpp |76.5%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/datashard/datashard_ut_object_storage_listing.cpp |76.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/datashard/datashard_ut_object_storage_listing.cpp |76.5%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/cms/console/configs_dispatcher_ut.cpp |76.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/cms/console/configs_dispatcher_ut.cpp |76.5%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/persqueue/ut/ut_with_sdk/mirrorer_ut.cpp |76.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/persqueue/ut/ut_with_sdk/mirrorer_ut.cpp |76.5%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/mind/hive/scale_recommender_policy_ut.cpp |76.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/mind/hive/scale_recommender_policy_ut.cpp |76.5%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/services/ydb/ydb_bulk_upsert_olap_ut.cpp |76.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/services/ydb/ydb_bulk_upsert_olap_ut.cpp |76.5%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/blobstorage/ut_blobstorage/read_only_vdisk.cpp |76.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/ut_blobstorage/read_only_vdisk.cpp |76.5%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/blobstorage/ut_blobstorage/restart_pdisk.cpp |76.5%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tablet/tablet_counters_aggregator_ut.cpp |76.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/ut_blobstorage/restart_pdisk.cpp |76.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tablet/tablet_counters_aggregator_ut.cpp |76.5%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/columnshard/engines/reader/simple_reader/iterator/sys_view/abstract/constructor.cpp |76.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/engines/reader/simple_reader/iterator/sys_view/abstract/constructor.cpp |76.5%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/kqp/ut/idx_test/ydb_index_ut.cpp |76.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/ut/idx_test/ydb_index_ut.cpp |76.5%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/scheme_board/cache_ut.cpp |76.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/scheme_board/cache_ut.cpp |76.5%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/schemeshard/ut_subdomain_reboots/ut_subdomain_reboots.cpp |76.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/ut_subdomain_reboots/ut_subdomain_reboots.cpp |76.5%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/services/persqueue_v1/ut/partition_writer_cache_actor_fixture.cpp |76.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/services/persqueue_v1/ut/partition_writer_cache_actor_fixture.cpp |76.5%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/persqueue/ut/ut_with_sdk/topic_ut.cpp |76.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/persqueue/ut/ut_with_sdk/topic_ut.cpp |76.6%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/kqp/ut/effects/kqp_write_ut.cpp |76.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/ut/effects/kqp_write_ut.cpp |76.6%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/schemeshard/ut_export/ut_export.cpp |76.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/ut_export/ut_export.cpp |76.6%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/kqp/ut/query/kqp_params_ut.cpp |76.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/ut/query/kqp_params_ut.cpp |76.6%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/kqp/ut/olap/tiering_ut.cpp |76.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/ut/olap/tiering_ut.cpp |76.6%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/actorlib_impl/test_interconnect_ut.cpp |76.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/actorlib_impl/test_interconnect_ut.cpp |76.6%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/replication/controller/target_discoverer_ut.cpp |76.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/replication/controller/target_discoverer_ut.cpp |76.6%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/blobstorage/ut_vdisk/lib/prepare.cpp |76.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/ut_vdisk/lib/prepare.cpp |76.6%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/kqp/ut/federated_query/s3/kqp_s3_plan_ut.cpp |76.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/ut/federated_query/s3/kqp_s3_plan_ut.cpp |76.6%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/kqp/ut/effects/kqp_inplace_update_ut.cpp |76.6%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/columnshard/splitter/ut/batch_slice.cpp |76.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/splitter/ut/batch_slice.cpp |76.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/ut/effects/kqp_inplace_update_ut.cpp |76.6%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/schemeshard/ut_background_cleaning/ut_background_cleaning.cpp |76.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/ut_background_cleaning/ut_background_cleaning.cpp |76.6%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/schemeshard/ut_incremental_restore/ut_incremental_restore.cpp |76.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/ut_incremental_restore/ut_incremental_restore.cpp |76.6%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/library/table_creator/table_creator_ut.cpp |76.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/library/table_creator/table_creator_ut.cpp |76.6%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/blobstorage/dsproxy/ut/dsproxy_counters_ut.cpp |76.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/dsproxy/ut/dsproxy_counters_ut.cpp |76.6%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/cms/console/net_classifier_updater_ut.cpp |76.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/cms/console/net_classifier_updater_ut.cpp |76.6%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/kqp/ut/common/columnshard.cpp |76.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/ut/common/columnshard.cpp |76.6%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/mon/mon_ut.cpp |76.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/mon/mon_ut.cpp |76.6%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/viewer/viewer_ut.cpp |76.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/viewer/viewer_ut.cpp |76.6%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/mind/ut_fat/blobstorage_node_warden_ut_fat.cpp |76.6%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/schemeshard/ut_user_attributes/ut_user_attributes.cpp |76.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/mind/ut_fat/blobstorage_node_warden_ut_fat.cpp |76.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/ut_user_attributes/ut_user_attributes.cpp |76.6%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/blobstorage/vdisk/hulldb/barriers/barriers_tree_ut.cpp |76.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/vdisk/hulldb/barriers/barriers_tree_ut.cpp |76.6%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/persqueue/ut/pq_ut.cpp |76.6%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/kqp/provider/yql_kikimr_provider_ut.cpp |76.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/persqueue/ut/pq_ut.cpp |76.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/provider/yql_kikimr_provider_ut.cpp |76.6%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/kqp/ut/opt/kqp_union_ut.cpp |76.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/ut/opt/kqp_union_ut.cpp |76.6%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/blobstorage/dsproxy/ut_strategy/strategy_ut.cpp |76.6%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/blobstorage/dsproxy/ut_ftol/dsproxy_fault_tolerance_ut.cpp |76.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/dsproxy/ut_strategy/strategy_ut.cpp |76.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/dsproxy/ut_ftol/dsproxy_fault_tolerance_ut.cpp |76.6%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/tests/fq/control_plane_storage/ydb_control_plane_storage_connections_permissions_ut.cpp |76.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/tests/fq/control_plane_storage/ydb_control_plane_storage_connections_permissions_ut.cpp |76.6%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/kafka_proxy/ut/ut_protocol.cpp |76.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kafka_proxy/ut/ut_protocol.cpp |76.6%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/cms/cms_ut_common.cpp |76.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/cms/cms_ut_common.cpp |76.7%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/kqp/ut/common/kqp_ut_common.cpp |76.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/ut/common/kqp_ut_common.cpp |76.7%| [AR] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/kqp/ut/common/libkqp-ut-common.a |76.7%| [AR] {RESULT} $(B)/ydb/core/kqp/ut/common/libkqp-ut-common.a |76.7%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/kqp/ut/common/libkqp-ut-common.a |76.7%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/mind/tenant_slot_broker__assign_free_slots.cpp |76.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/mind/tenant_slot_broker__assign_free_slots.cpp |76.7%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/services/ydb/ydb_coordination_ut.cpp |76.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/services/ydb/ydb_coordination_ut.cpp |76.7%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/blobstorage/ut_mirror3of4/main.cpp |76.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/ut_mirror3of4/main.cpp |76.7%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/services/ydb/ydb_read_rows_ut.cpp |76.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/services/ydb/ydb_read_rows_ut.cpp |76.7%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/mind/tenant_slot_broker__alter_tenant.cpp |76.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/mind/tenant_slot_broker__alter_tenant.cpp |76.7%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/kqp/ut/olap/statistics_ut.cpp |76.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/ut/olap/statistics_ut.cpp |76.7%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/kqp/ut/olap/indexes_ut.cpp |76.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/ut/olap/indexes_ut.cpp |76.7%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/blobstorage/ut_blobstorage/gc.cpp |76.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/ut_blobstorage/gc.cpp |76.7%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/library/query_actor/query_actor_ut.cpp |76.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/library/query_actor/query_actor_ut.cpp |76.7%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp |76.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp |76.7%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/datashard/datashard_ut_incremental_restore_scan.cpp |76.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/datashard/datashard_ut_incremental_restore_scan.cpp |76.7%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/datashard/datashard_ut_order.cpp |76.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/datashard/datashard_ut_order.cpp |76.7%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/services/persqueue_v1/ut/rate_limiter_test_setup.cpp |76.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/services/persqueue_v1/ut/rate_limiter_test_setup.cpp |76.7%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/tests/fq/control_plane_storage/ydb_control_plane_storage_bindings_permissions_ut.cpp |76.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/tests/fq/control_plane_storage/ydb_control_plane_storage_bindings_permissions_ut.cpp |76.7%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/kqp/ut/query/kqp_types_ut.cpp |76.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/ut/query/kqp_types_ut.cpp |76.7%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/schemeshard/ut_resource_pool_reboots/ut_resource_pool_reboots.cpp |76.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/ut_resource_pool_reboots/ut_resource_pool_reboots.cpp |76.7%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/ydb_convert/table_description_ut.cpp |76.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/ydb_convert/table_description_ut.cpp |76.7%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/public/sdk/cpp/src/client/topic/ut/topic_to_table_ut.cpp |76.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/public/sdk/cpp/src/client/topic/ut/topic_to_table_ut.cpp |76.7%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/kqp/ut/runtime/kqp_scan_logging_ut.cpp |76.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/ut/runtime/kqp_scan_logging_ut.cpp |76.7%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/schemeshard/ut_filestore_reboots/ut_filestore_reboots.cpp |76.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/ut_filestore_reboots/ut_filestore_reboots.cpp |76.7%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/mind/tenant_slot_broker__update_pool_status.cpp |76.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/mind/tenant_slot_broker__update_pool_status.cpp |76.7%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/replication/service/topic_reader_ut.cpp |76.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/replication/service/topic_reader_ut.cpp |76.7%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/persqueue/dread_cache_service/ut/caching_proxy_ut.cpp |76.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/persqueue/dread_cache_service/ut/caching_proxy_ut.cpp |76.7%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/cms/console/log_settings_configurator_ut.cpp |76.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/cms/console/log_settings_configurator_ut.cpp |76.7%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/persqueue/ut/ut_with_sdk/commitoffset_ut.cpp |76.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/persqueue/ut/ut_with_sdk/commitoffset_ut.cpp |76.7%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/blobstorage/vdisk/hulldb/fresh/fresh_appendix_ut.cpp |76.8%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/kqp/ut/data_integrity/kqp_data_integrity_trails_ut.cpp |76.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/vdisk/hulldb/fresh/fresh_appendix_ut.cpp |76.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/ut/data_integrity/kqp_data_integrity_trails_ut.cpp |76.8%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/services/persqueue_v1/persqueue_common_new_schemecache_ut.cpp |76.8%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/schemeshard/ut_base/ut_base.cpp |76.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/services/persqueue_v1/persqueue_common_new_schemecache_ut.cpp |76.8%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/statistics/aggregator/ut/ut_analyze_columnshard.cpp |76.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/ut_base/ut_base.cpp |76.8%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/library/ycloud/impl/access_service_ut.cpp |76.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/statistics/aggregator/ut/ut_analyze_columnshard.cpp |76.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/library/ycloud/impl/access_service_ut.cpp |76.8%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/kqp/ut/batch_operations/kqp_batch_update_ut.cpp |76.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/ut/batch_operations/kqp_batch_update_ut.cpp |76.8%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/sys_view/ut_common.cpp |76.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/sys_view/ut_common.cpp |76.8%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/blobstorage/ut_vdisk/huge_migration_ut.cpp |76.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/ut_vdisk/huge_migration_ut.cpp |76.8%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/kesus/tablet/tablet_ut.cpp |76.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kesus/tablet/tablet_ut.cpp |76.8%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/schemeshard/ut_bsvolume_reboots/ut_bsvolume_reboots.cpp |76.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/ut_bsvolume_reboots/ut_bsvolume_reboots.cpp |76.8%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/kesus/tablet/ut_helpers.cpp |76.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kesus/tablet/ut_helpers.cpp |76.8%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/base/board_subscriber_ut.cpp |76.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/base/board_subscriber_ut.cpp |76.8%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/viewer/topic_data_ut.cpp |76.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/viewer/topic_data_ut.cpp |76.8%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/scheme_board/ut_helpers.cpp |76.8%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/kesus/tablet/quoter_performance_test/main.cpp |76.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/scheme_board/ut_helpers.cpp |76.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kesus/tablet/quoter_performance_test/main.cpp |76.8%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/kqp/ut/scheme/kqp_constraints_ut.cpp |76.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/ut/scheme/kqp_constraints_ut.cpp |76.8%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tablet_flat/benchmark/b_part.cpp |76.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tablet_flat/benchmark/b_part.cpp |76.8%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/kqp/ut/yql/kqp_pragma_ut.cpp |76.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/ut/yql/kqp_pragma_ut.cpp |76.8%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/blobstorage/vdisk/hulldb/fresh/fresh_segment_ut.cpp |76.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/vdisk/hulldb/fresh/fresh_segment_ut.cpp |76.8%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/fq/libs/common/entity_id_ut.cpp |76.8%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/kqp/ut/effects/kqp_effects_ut.cpp |76.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/fq/libs/common/entity_id_ut.cpp |76.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/ut/effects/kqp_effects_ut.cpp |76.8%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/datashard/datashard_ut_volatile.cpp |76.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/datashard/datashard_ut_volatile.cpp |76.8%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/blobstorage/ut_blobstorage/encryption.cpp |76.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/ut_blobstorage/encryption.cpp |76.8%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/datashard/datashard_ut_snapshot.cpp |76.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/datashard/datashard_ut_snapshot.cpp |76.8%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/services/ydb/ydb_scripting_ut.cpp |76.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/services/ydb/ydb_scripting_ut.cpp |76.8%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/services/persqueue_v1/first_class_src_ids_ut.cpp |76.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/services/persqueue_v1/first_class_src_ids_ut.cpp |76.8%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/kqp/gateway/kqp_ic_gateway.cpp |76.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/gateway/kqp_ic_gateway.cpp |76.8%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/blobstorage/ut_blobstorage/index_restore_get.cpp |76.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/ut_blobstorage/index_restore_get.cpp |76.8%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/schemeshard/ut_stats/ut_stats.cpp |76.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/ut_stats/ut_stats.cpp |76.9%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/tests/fq/control_plane_storage/ydb_control_plane_storage_quotas_ut.cpp |76.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/tests/fq/control_plane_storage/ydb_control_plane_storage_quotas_ut.cpp |76.9%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/sys_view/ut_counters.cpp |76.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/sys_view/ut_counters.cpp |76.9%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/blobstorage/ut_blobstorage/blob_depot_fat.cpp |76.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/ut_blobstorage/blob_depot_fat.cpp |76.9%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/columnshard/engines/reader/simple_reader/iterator/sys_view/abstract/source.cpp |76.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/engines/reader/simple_reader/iterator/sys_view/abstract/source.cpp |76.9%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tablet_flat/ut/ut_other.cpp |76.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tablet_flat/ut/ut_other.cpp |76.9%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/kqp/ut/indexes/kqp_indexes_prefixed_vector_ut.cpp |76.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/ut/indexes/kqp_indexes_prefixed_vector_ut.cpp |76.9%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/columnshard/engines/ut/ut_script.cpp |76.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/engines/ut/ut_script.cpp |76.9%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/kqp/ut/olap/locks_ut.cpp |76.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/ut/olap/locks_ut.cpp |76.9%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/tx_allocator/txallocator_ut.cpp |76.9%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/kqp/ut/tx/kqp_sink_mvcc_ut.cpp |76.9%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/schemeshard/ut_vector_index_build_reboots/ut_vector_index_build_reboots.cpp |76.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/tx_allocator/txallocator_ut.cpp |76.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/ut_vector_index_build_reboots/ut_vector_index_build_reboots.cpp |76.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/ut/tx/kqp_sink_mvcc_ut.cpp |76.9%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/services/ydb/backup_ut/ydb_backup_ut.cpp |76.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/services/ydb/backup_ut/ydb_backup_ut.cpp |76.9%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/blobstorage/ut_blobstorage/blob_depot_event_managers.cpp |76.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/ut_blobstorage/blob_depot_event_managers.cpp |76.9%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/columnshard/ut_schema/ut_columnshard_schema.cpp |76.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/ut_schema/ut_columnshard_schema.cpp |76.9%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/services/persqueue_v1/ut/rate_limiter_test_setup.cpp |76.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/services/persqueue_v1/ut/rate_limiter_test_setup.cpp |76.9%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/schemeshard/ut_auditsettings/ut_auditsettings.cpp |76.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/ut_auditsettings/ut_auditsettings.cpp |76.9%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tablet/resource_broker_ut.cpp |76.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tablet/resource_broker_ut.cpp |76.9%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/columnshard/ut_rw/ut_backup.cpp |76.9%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/kqp/ut/scheme/kqp_secrets_ut.cpp |76.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/ut_rw/ut_backup.cpp |76.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/ut/scheme/kqp_secrets_ut.cpp |76.9%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/fq/libs/checkpointing/ut/checkpoint_coordinator_ut.cpp |76.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/fq/libs/checkpointing/ut/checkpoint_coordinator_ut.cpp |76.9%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/kqp/ut/sysview/kqp_sys_view_ut.cpp |76.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/ut/sysview/kqp_sys_view_ut.cpp |76.9%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/kqp/ut/effects/kqp_immediate_effects_ut.cpp |76.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/ut/effects/kqp_immediate_effects_ut.cpp |76.9%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/persqueue/ut/pqtablet_mock.cpp |76.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/persqueue/ut/pqtablet_mock.cpp |76.9%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/scheme_board/ut_helpers.cpp |76.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/scheme_board/ut_helpers.cpp |76.9%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/blobstorage/vdisk/common/vdisk_config_ut.cpp |76.9%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/services/persqueue_v1/topic_yql_ut.cpp |76.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/vdisk/common/vdisk_config_ut.cpp |76.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/services/persqueue_v1/topic_yql_ut.cpp |76.9%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/kqp/ut/tx/kqp_locks_tricky_ut.cpp |76.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/ut/tx/kqp_locks_tricky_ut.cpp |76.9%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/kqp/ut/pg/pg_catalog_ut.cpp |76.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/ut/pg/pg_catalog_ut.cpp |76.9%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/schemeshard/ut_reboots/ut_reboots.cpp |77.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/ut_reboots/ut_reboots.cpp |76.9%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tablet_flat/flat_executor_ut.cpp |77.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tablet_flat/flat_executor_ut.cpp |77.0%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/kqp/ut/olap/combinatory/execute.cpp |77.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/ut/olap/combinatory/execute.cpp |77.0%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/kqp/ut/federated_query/datastreams/datastreams_ut.cpp |77.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/ut/federated_query/datastreams/datastreams_ut.cpp |77.0%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/services/keyvalue/grpc_service_ut.cpp |77.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/services/keyvalue/grpc_service_ut.cpp |77.0%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/public/lib/ydb_cli/topic/topic_read_ut.cpp |77.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/public/lib/ydb_cli/topic/topic_read_ut.cpp |77.0%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/public/sdk/cpp/src/client/persqueue_public/ut/read_session_ut.cpp |77.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/public/sdk/cpp/src/client/persqueue_public/ut/read_session_ut.cpp |77.0%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/graph/ut/graph_ut.cpp |77.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/graph/ut/graph_ut.cpp |77.0%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/http_proxy/ut/ymq_ut.cpp |77.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/http_proxy/ut/ymq_ut.cpp |77.0%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/services/ydb/ydb_monitoring_ut.cpp |77.0%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/schemeshard/ut_sysview_reboots/ut_sysview_reboots.cpp |77.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/services/ydb/ydb_monitoring_ut.cpp |77.0%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/public/sdk/cpp/src/client/persqueue_public/ut/compress_executor_ut.cpp |77.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/ut_sysview_reboots/ut_sysview_reboots.cpp |77.0%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/keyvalue/keyvalue_collector_ut.cpp |77.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/public/sdk/cpp/src/client/persqueue_public/ut/compress_executor_ut.cpp |77.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/keyvalue/keyvalue_collector_ut.cpp |77.0%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/schemeshard/ut_backup/ut_backup.cpp |77.0%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/blobstorage/vdisk/syncer/blobstorage_syncer_broker_ut.cpp |77.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/ut_backup/ut_backup.cpp |77.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/vdisk/syncer/blobstorage_syncer_broker_ut.cpp |77.0%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/blobstorage/pdisk/blobstorage_pdisk_ut_env.cpp |77.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/pdisk/blobstorage_pdisk_ut_env.cpp |77.0%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/schemeshard/ut_extsubdomain/ut_extsubdomain.cpp |77.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/ut_extsubdomain/ut_extsubdomain.cpp |77.0%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/kqp/ut/olap/combinatory/actualization.cpp |77.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/ut/olap/combinatory/actualization.cpp |77.0%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/schemeshard/ut_system_names/ut_system_names.cpp |77.0%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/columnshard/engines/ut/ut_program.cpp |77.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/ut_system_names/ut_system_names.cpp |77.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/engines/ut/ut_program.cpp |77.0%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/services/ydb/ydb_olapstore_ut.cpp |77.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/services/ydb/ydb_olapstore_ut.cpp |77.0%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/kqp/ut/olap/json_ut.cpp |77.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/ut/olap/json_ut.cpp |77.0%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/blobstorage/nodewarden/distconf_ut.cpp |77.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/nodewarden/distconf_ut.cpp |77.0%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/datashard/datashard_ut_range_ops.cpp |77.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/datashard/datashard_ut_range_ops.cpp |77.0%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/schemeshard/ut_sequence/ut_sequence.cpp |77.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/ut_sequence/ut_sequence.cpp |77.0%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/schemeshard/ut_user_attributes_reboots/ut_user_attributes_reboots.cpp |77.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/ut_user_attributes_reboots/ut_user_attributes_reboots.cpp |77.0%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/blobstorage/ut_vdisk/lib/test_localrecovery.cpp |77.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/ut_vdisk/lib/test_localrecovery.cpp |77.0%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/schemeshard/ut_sequence_reboots/ut_sequence_reboots.cpp |77.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/ut_sequence_reboots/ut_sequence_reboots.cpp |77.0%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/kqp/ut/olap/combinatory/variator.cpp |77.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/ut/olap/combinatory/variator.cpp |77.0%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/kqp/ut/olap/combinatory/select.cpp |77.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/ut/olap/combinatory/select.cpp |77.0%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/schemeshard/ut_compaction/ut_compaction.cpp |77.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/ut_compaction/ut_compaction.cpp |77.1%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/blobstorage/ut_vdisk/lib/test_load.cpp |77.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/ut_vdisk/lib/test_load.cpp |77.1%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/kqp/ut/olap/combinatory/executor.cpp |77.1%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/blobstorage/ut_vdisk/lib/test_brokendevice.cpp |77.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/ut/olap/combinatory/executor.cpp |77.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/ut_vdisk/lib/test_brokendevice.cpp |77.1%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/blobstorage/pdisk/blobstorage_pdisk_ut.cpp |77.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/pdisk/blobstorage_pdisk_ut.cpp |77.1%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/services/ydb/ydb_login_ut.cpp |77.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/services/ydb/ydb_login_ut.cpp |77.1%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/blobstorage/ut_vdisk/lib/test_bad_blobid.cpp |77.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/ut_vdisk/lib/test_bad_blobid.cpp |77.1%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/services/ydb/ydb_ldap_login_ut.cpp |77.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/services/ydb/ydb_ldap_login_ut.cpp |77.1%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/schemeshard/ut_index/ut_vector_index.cpp |77.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/ut_index/ut_vector_index.cpp |77.1%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/schemeshard/ut_base/ut_table_decimal_types.cpp |77.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/ut_base/ut_table_decimal_types.cpp |77.1%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/blobstorage/ut_vdisk2/huge.cpp |77.1%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/schemeshard/ut_move/ut_move.cpp |77.1%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/tools/query_replay/query_replay.cpp |77.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/ut_vdisk2/huge.cpp |77.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/tools/query_replay/query_replay.cpp |77.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/ut_move/ut_move.cpp |77.1%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/services/ydb/backup_ut/backup_path_ut.cpp |77.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/services/ydb/backup_ut/backup_path_ut.cpp |77.1%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/datashard/datashard_ut_kqp_stream_lookup.cpp |77.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/datashard/datashard_ut_kqp_stream_lookup.cpp |77.1%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/kqp/ut/opt/kqp_agg_ut.cpp |77.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/ut/opt/kqp_agg_ut.cpp |77.1%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/kqp/ut/olap/decimal_ut.cpp |77.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/ut/olap/decimal_ut.cpp |77.1%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/kesus/proxy/ut_helpers.cpp |77.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kesus/proxy/ut_helpers.cpp |77.1%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/public/sdk/cpp/src/client/persqueue_public/ut/basic_usage_ut.cpp |77.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/public/sdk/cpp/src/client/persqueue_public/ut/basic_usage_ut.cpp |77.1%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/kqp/ut/olap/write_ut.cpp |77.1%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/public/sdk/cpp/src/client/persqueue_public/ut/retry_policy_ut.cpp |77.1%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tablet_flat/ut/ut_backup.cpp |77.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/ut/olap/write_ut.cpp |77.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/public/sdk/cpp/src/client/persqueue_public/ut/retry_policy_ut.cpp |77.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tablet_flat/ut/ut_backup.cpp |77.1%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/persqueue/ut/ut_with_sdk/autoscaling_ut.cpp |77.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/persqueue/ut/ut_with_sdk/autoscaling_ut.cpp |77.1%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/statistics/database/ut/ut_database.cpp |77.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/statistics/database/ut/ut_database.cpp |77.1%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/cms/sentinel_ut_unstable.cpp |77.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/cms/sentinel_ut_unstable.cpp |77.1%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/cms/console/console_ut_tenants.cpp |77.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/cms/console/console_ut_tenants.cpp |77.1%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/schemeshard/ut_serverless/ut_serverless.cpp |77.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/ut_serverless/ut_serverless.cpp |77.1%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/datashard/datashard_ut_minstep.cpp |77.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/datashard/datashard_ut_minstep.cpp |77.1%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/sharding/ut/ut_sharding.cpp |77.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/sharding/ut/ut_sharding.cpp |77.1%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/blobstorage/ut_blobstorage/bridge_get.cpp |77.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/ut_blobstorage/bridge_get.cpp |77.1%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/kqp/ut/opt/kqp_named_expressions_ut.cpp |77.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/ut/opt/kqp_named_expressions_ut.cpp |77.2%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/mind/bscontroller/group_mapper_ut.cpp |77.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/mind/bscontroller/group_mapper_ut.cpp |77.2%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/kqp/rm_service/kqp_rm_ut.cpp |77.2%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/keyvalue/keyvalue_storage_read_request_ut.cpp |77.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/rm_service/kqp_rm_ut.cpp |77.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/keyvalue/keyvalue_storage_read_request_ut.cpp |77.2%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/blobstorage/vdisk/common/vdisk_pdisk_error_ut.cpp |77.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/vdisk/common/vdisk_pdisk_error_ut.cpp |77.2%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/kqp/ut/query/kqp_explain_ut.cpp |77.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/ut/query/kqp_explain_ut.cpp |77.2%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/tools/query_replay_yt/query_replay.cpp |77.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/tools/query_replay_yt/query_replay.cpp |77.2%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/public/sdk/cpp/src/client/topic/ut/local_partition_ut.cpp |77.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/public/sdk/cpp/src/client/topic/ut/local_partition_ut.cpp |77.2%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/kqp/ut/service/kqp_qs_scripts_ut.cpp |77.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/ut/service/kqp_qs_scripts_ut.cpp |77.2%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/blobstorage/ut_blobstorage/mirror3of4.cpp |77.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/ut_blobstorage/mirror3of4.cpp |77.2%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/replication/service/worker_ut.cpp |77.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/replication/service/worker_ut.cpp |77.2%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/services/ydb/ydb_table_ut.cpp |77.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/services/ydb/ydb_table_ut.cpp |77.2%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/blobstorage/ut_blobstorage/extra_block_checks.cpp |77.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/ut_blobstorage/extra_block_checks.cpp |77.2%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/schemeshard/ut_index_build_reboots/ut_index_build_reboots.cpp |77.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/ut_index_build_reboots/ut_index_build_reboots.cpp |77.2%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/kqp/ut/cost/kqp_cost_ut.cpp |77.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/ut/cost/kqp_cost_ut.cpp |77.2%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/external_sources/s3/ut/s3_aws_credentials_ut.cpp |77.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/external_sources/s3/ut/s3_aws_credentials_ut.cpp |77.2%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/mind/hive/hive_ut.cpp |77.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/mind/hive/hive_ut.cpp |77.2%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/kqp/workload_service/ut/kqp_workload_service_tables_ut.cpp |77.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/workload_service/ut/kqp_workload_service_tables_ut.cpp |77.2%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/tx_proxy/proxy_ut.cpp |77.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/tx_proxy/proxy_ut.cpp |77.2%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/tx_proxy/schemereq_ut.cpp |77.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/tx_proxy/schemereq_ut.cpp |77.2%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/statistics/service/ut/ut_aggregation/ut_aggregate_statistics.cpp |77.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/statistics/service/ut/ut_aggregation/ut_aggregate_statistics.cpp |77.2%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/blobstorage/ut_blobstorage/assimilation.cpp |77.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/ut_blobstorage/assimilation.cpp |77.2%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/tools/query_replay_yt/main.cpp |77.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/tools/query_replay_yt/main.cpp |77.2%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/public/sdk/cpp/src/client/persqueue_public/ut/retry_policy_ut.cpp |77.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/public/sdk/cpp/src/client/persqueue_public/ut/retry_policy_ut.cpp |77.2%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/blobstorage/vdisk/hulldb/base/hullbase_barrier_ut.cpp |77.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/vdisk/hulldb/base/hullbase_barrier_ut.cpp |77.2%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/schemeshard/ut_streaming_query_reboots/ut_streaming_query_reboots.cpp |77.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/ut_streaming_query_reboots/ut_streaming_query_reboots.cpp |77.2%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/schemeshard/ut_index_build/ut_index_build.cpp |77.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/ut_index_build/ut_index_build.cpp |77.2%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/datashard/datashard_ut_common_pq.cpp |77.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/datashard/datashard_ut_common_pq.cpp |77.2%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/client/flat_ut.cpp |77.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/client/flat_ut.cpp |77.2%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/kqp/ut/yql/kqp_yql_ut.cpp |77.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/ut/yql/kqp_yql_ut.cpp |77.3%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/services/persqueue_v1/persqueue_new_schemecache_ut.cpp |77.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/services/persqueue_v1/persqueue_new_schemecache_ut.cpp |77.3%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/schemeshard/ut_serverless_reboots/ut_serverless_reboots.cpp |77.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/ut_serverless_reboots/ut_serverless_reboots.cpp |77.3%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/kqp/ut/tx/kqp_sink_tx_ut.cpp |77.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/ut/tx/kqp_sink_tx_ut.cpp |77.3%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/schemeshard/ut_cdc_stream_reboots/ut_cdc_stream_reboots.cpp |77.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/ut_cdc_stream_reboots/ut_cdc_stream_reboots.cpp |77.3%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/blobstorage/backpressure/queue_backpressure_server_ut.cpp |77.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/backpressure/queue_backpressure_server_ut.cpp |77.3%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/kqp/ut/yql/kqp_scripting_ut.cpp |77.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/ut/yql/kqp_scripting_ut.cpp |77.3%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/mind/hive/hive_impl_ut.cpp |77.3%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/blobstorage/vdisk/defrag/defrag_actor_ut.cpp |77.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/vdisk/defrag/defrag_actor_ut.cpp |77.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/mind/hive/hive_impl_ut.cpp |77.3%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/actorlib_impl/actor_bootstrapped_ut.cpp |77.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/actorlib_impl/actor_bootstrapped_ut.cpp |77.3%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/kqp/workload_service/ut/kqp_workload_service_ut.cpp |77.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/workload_service/ut/kqp_workload_service_ut.cpp |77.3%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/blobstorage/nodewarden/blobstorage_node_warden_ut.cpp |77.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/nodewarden/blobstorage_node_warden_ut.cpp |77.3%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/schemeshard/ut_olap_reboots/ut_olap_reboots.cpp |77.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/ut_olap_reboots/ut_olap_reboots.cpp |77.3%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/schemeshard/ut_base/ut_commit_redo_limit.cpp |77.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/ut_base/ut_commit_redo_limit.cpp |77.3%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/blobstorage/pdisk/blobstorage_pdisk_ut_races.cpp |77.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/pdisk/blobstorage_pdisk_ut_races.cpp |77.3%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/blobstorage/ut_vdisk/lib/test_dbstat.cpp |77.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/ut_vdisk/lib/test_dbstat.cpp |77.3%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/kqp/ut/opt/kqp_sort_ut.cpp |77.3%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/keyvalue/keyvalue_ut.cpp |77.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/ut/opt/kqp_sort_ut.cpp |77.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/keyvalue/keyvalue_ut.cpp |77.3%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/blobstorage/ut_vdisk/lib/test_defrag.cpp |77.3%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/blobstorage/ut_blobstorage/osiris.cpp |77.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/ut_vdisk/lib/test_defrag.cpp |77.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/ut_blobstorage/osiris.cpp |77.3%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/kqp/ut/opt/kqp_hash_combine_ut.cpp |77.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/ut/opt/kqp_hash_combine_ut.cpp |77.3%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/schemeshard/ut_index_build/ut_fulltext_build.cpp |77.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/ut_index_build/ut_fulltext_build.cpp |77.3%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/services/ydb/backup_ut/list_objects_in_s3_export_ut.cpp |77.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/services/ydb/backup_ut/list_objects_in_s3_export_ut.cpp |77.3%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/schemeshard/ut_column_build/ut_column_build.cpp |77.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/ut_column_build/ut_column_build.cpp |77.3%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/kqp/ut/olap/delete_ut.cpp |77.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/ut/olap/delete_ut.cpp |77.3%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tablet_flat/ut/flat_test_db.cpp |77.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tablet_flat/ut/flat_test_db.cpp |77.3%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/blobstorage/pdisk/blobstorage_pdisk_ut_yard.cpp |77.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/pdisk/blobstorage_pdisk_ut_yard.cpp |77.3%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/kqp/ut/olap/aggregations_ut.cpp |77.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/ut/olap/aggregations_ut.cpp |77.3%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/services/ydb/ydb_index_table_ut.cpp |77.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/services/ydb/ydb_index_table_ut.cpp |77.3%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/tools/query_replay/query_proccessor.cpp |77.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/tools/query_replay/query_proccessor.cpp |77.3%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/blobstorage/dsproxy/ut/dsproxy_patch_ut.cpp |77.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/dsproxy/ut/dsproxy_patch_ut.cpp |77.4%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/persqueue/ut/slow/pq_ut.cpp |77.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/persqueue/ut/slow/pq_ut.cpp |77.4%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/kqp/ut/opt/kqp_kv_ut.cpp |77.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/ut/opt/kqp_kv_ut.cpp |77.4%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/kqp/ut/olap/compression_ut.cpp |77.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/ut/olap/compression_ut.cpp |77.4%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/control/immediate_control_board_actor_ut.cpp |77.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/control/immediate_control_board_actor_ut.cpp |77.4%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/kqp/ut/olap/sys_view_ut.cpp |77.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/ut/olap/sys_view_ut.cpp |77.4%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/blobstorage/dsproxy/ut/dsproxy_put_ut.cpp |77.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/dsproxy/ut/dsproxy_put_ut.cpp |77.4%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/schemeshard/ut_olap/ut_olap.cpp |77.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/ut_olap/ut_olap.cpp |77.4%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/cms/console/modifications_validator_ut.cpp |77.4%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/mind/tenant_ut_local.cpp |77.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/cms/console/modifications_validator_ut.cpp |77.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/mind/tenant_ut_local.cpp |77.4%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/persqueue/ut/ut_with_sdk/describe_ut.cpp |77.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/persqueue/ut/ut_with_sdk/describe_ut.cpp |77.4%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/fq/libs/checkpoint_storage/ut/gc_ut.cpp |77.4%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/kqp/ut/olap/compaction_ut.cpp |77.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/fq/libs/checkpoint_storage/ut/gc_ut.cpp |77.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/ut/olap/compaction_ut.cpp |77.4%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/kqp/ut/olap/dictionary_ut.cpp |77.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/ut/olap/dictionary_ut.cpp |77.4%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/fq/libs/common/rows_proto_splitter_ut.cpp |77.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/fq/libs/common/rows_proto_splitter_ut.cpp |77.4%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/health_check/health_check_ut.cpp |77.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/health_check/health_check_ut.cpp |77.4%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/fq/libs/checkpoint_storage/ut/storage_service_ydb_ut.cpp |77.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/fq/libs/checkpoint_storage/ut/storage_service_ydb_ut.cpp |77.4%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/schemeshard/ut_base/ut_info_types.cpp |77.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/ut_base/ut_info_types.cpp |77.4%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/library/yql/tools/dqrun/lib/dqrun_lib.cpp |77.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/library/yql/tools/dqrun/lib/dqrun_lib.cpp |77.4%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/statistics/service/ut/ut_basic_statistics.cpp |77.4%| [AR] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/library/yql/tools/dqrun/lib/libtools-dqrun-lib.a |77.4%| [AR] {RESULT} $(B)/ydb/library/yql/tools/dqrun/lib/libtools-dqrun-lib.a |77.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/statistics/service/ut/ut_basic_statistics.cpp |77.4%| [AR] {BAZEL_UPLOAD} $(B)/ydb/library/yql/tools/dqrun/lib/libtools-dqrun-lib.a |77.4%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/datashard/datashard_ut_change_collector.cpp |77.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/datashard/datashard_ut_change_collector.cpp |77.4%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/public/sdk/cpp/src/client/topic/ut/basic_usage_ut.cpp |77.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/public/sdk/cpp/src/client/topic/ut/basic_usage_ut.cpp |77.4%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/services/metadata/secret/ut/ut_secret.cpp |77.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/services/metadata/secret/ut/ut_secret.cpp |77.4%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/blobstorage/ut_blobstorage/sync.cpp |77.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/ut_blobstorage/sync.cpp |77.4%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/blobstorage/ut_blobstorage/group_reconfiguration.cpp |77.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/ut_blobstorage/group_reconfiguration.cpp |77.4%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/services/persqueue_v1/ut/pqtablet_mock.cpp |77.4%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/blobstorage/vdisk/hulldb/fresh/fresh_data_ut.cpp |77.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/services/persqueue_v1/ut/pqtablet_mock.cpp |77.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/vdisk/hulldb/fresh/fresh_data_ut.cpp |77.4%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/blobstorage/ut_blobstorage/deadlines.cpp |77.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/ut_blobstorage/deadlines.cpp |77.4%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/grpc_streaming/grpc_streaming_ut.cpp |77.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/grpc_streaming/grpc_streaming_ut.cpp |77.5%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/blobstorage/ut_blobstorage/gc_quorum_3dc.cpp |77.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/ut_blobstorage/gc_quorum_3dc.cpp |77.5%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/columnshard/engines/reader/simple_reader/iterator/sys_view/abstract/schema.cpp |77.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/engines/reader/simple_reader/iterator/sys_view/abstract/schema.cpp |77.5%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/grpc_services/grpc_request_check_actor_ut/grpc_request_check_actor_ut.cpp |77.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/grpc_services/grpc_request_check_actor_ut/grpc_request_check_actor_ut.cpp |77.5%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/blobstorage/bridge/proxy/bridge_proxy.cpp |77.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/bridge/proxy/bridge_proxy.cpp |77.5%| [AR] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/blobstorage/bridge/proxy/libblobstorage-bridge-proxy.a |77.5%| [AR] {RESULT} $(B)/ydb/core/blobstorage/bridge/proxy/libblobstorage-bridge-proxy.a |77.5%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/blobstorage/bridge/proxy/libblobstorage-bridge-proxy.a |77.5%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/public/sdk/cpp/src/client/persqueue_public/ut/compress_executor_ut.cpp |77.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/public/sdk/cpp/src/client/persqueue_public/ut/compress_executor_ut.cpp |77.5%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/sys_view/ut_common.cpp |77.5%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/statistics/aggregator/ut/ut_analyze_datashard.cpp |77.5%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/kqp/ut/tx/kqp_tx_ut.cpp |77.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/sys_view/ut_common.cpp |77.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/statistics/aggregator/ut/ut_analyze_datashard.cpp |77.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/ut/tx/kqp_tx_ut.cpp |77.5%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/kqp/ut/tx/kqp_mvcc_ut.cpp |77.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/ut/tx/kqp_mvcc_ut.cpp |77.5%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/datashard/datashard_ut_upload_rows.cpp |77.5%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/kqp/ut/query/kqp_stats_ut.cpp |77.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/datashard/datashard_ut_upload_rows.cpp |77.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/ut/query/kqp_stats_ut.cpp |77.5%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/mind/tenant_slot_broker__update_config.cpp |77.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/mind/tenant_slot_broker__update_config.cpp |77.5%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/tests/fq/control_plane_storage/ydb_control_plane_storage_queries_permissions_ut.cpp |77.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/tests/fq/control_plane_storage/ydb_control_plane_storage_queries_permissions_ut.cpp |77.5%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/mvp/oidc_proxy/oidc_proxy_ut.cpp |77.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/mvp/oidc_proxy/oidc_proxy_ut.cpp |77.5%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/public/sdk/cpp/src/client/topic/ut/topic_to_table_ut.cpp |77.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/public/sdk/cpp/src/client/topic/ut/topic_to_table_ut.cpp |77.5%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/wrappers/s3_wrapper_ut.cpp |77.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/wrappers/s3_wrapper_ut.cpp |77.5%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tablet/tablet_state_ut.cpp |77.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tablet/tablet_state_ut.cpp |77.5%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/schemeshard/ut_backup_collection_reboots/ut_backup_collection_reboots.cpp |77.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/ut_backup_collection_reboots/ut_backup_collection_reboots.cpp |77.5%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/services/persqueue_v1/ut/partition_writer_cache_actor_ut.cpp |77.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/services/persqueue_v1/ut/partition_writer_cache_actor_ut.cpp |77.5%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/kqp/ut/tx/kqp_sink_locks_ut.cpp |77.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/ut/tx/kqp_sink_locks_ut.cpp |77.5%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/kqp/ut/join/kqp_flip_join_ut.cpp |77.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/ut/join/kqp_flip_join_ut.cpp |77.5%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/kqp/ut/rbo/kqp_rbo_ut.cpp |77.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/ut/rbo/kqp_rbo_ut.cpp |77.5%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/services/persqueue_v1/ut/describes_ut/ic_cache_ut.cpp |77.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/services/persqueue_v1/ut/describes_ut/ic_cache_ut.cpp |77.5%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tablet/tablet_pipe_ut.cpp |77.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tablet/tablet_pipe_ut.cpp |77.5%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/kqp/ut/batch_operations/kqp_batch_delete_ut.cpp |77.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/ut/batch_operations/kqp_batch_delete_ut.cpp |77.5%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/persqueue/ut/sourceid_ut.cpp |77.5%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/blobstorage/ut_blobstorage/check_integrity.cpp |77.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/persqueue/ut/sourceid_ut.cpp |77.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/ut_blobstorage/check_integrity.cpp |77.5%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/kqp/ut/join/kqp_block_hash_join_ut.cpp |77.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/ut/join/kqp_block_hash_join_ut.cpp |77.6%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/blobstorage/dsproxy/ut_fat/dsproxy_ut.cpp |77.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/dsproxy/ut_fat/dsproxy_ut.cpp |77.6%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/schemeshard/ut_subdomain/ut_subdomain.cpp |77.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/ut_subdomain/ut_subdomain.cpp |77.6%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/persqueue/ut/ut_with_sdk/mirrorer_autoscaling_ut.cpp |77.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/persqueue/ut/ut_with_sdk/mirrorer_autoscaling_ut.cpp |77.6%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/http_proxy/ut/http_ut.cpp |77.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/http_proxy/ut/http_ut.cpp |77.6%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/kqp/ut/query/kqp_query_ut.cpp |77.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/ut/query/kqp_query_ut.cpp |77.6%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/kqp/ut/perf/kqp_query_perf_ut.cpp |77.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/ut/perf/kqp_query_perf_ut.cpp |77.6%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/actorlib_impl/actor_tracker_ut.cpp |77.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/actorlib_impl/actor_tracker_ut.cpp |77.6%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/schemeshard/ut_cdc_stream/ut_cdc_stream.cpp |77.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/ut_cdc_stream/ut_cdc_stream.cpp |77.6%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/kqp/ut/federated_query/s3/kqp_federated_scheme_ut.cpp |77.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/ut/federated_query/s3/kqp_federated_scheme_ut.cpp |77.6%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/blobstorage/backpressure/ut_client/backpressure_ut.cpp |77.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/backpressure/ut_client/backpressure_ut.cpp |77.6%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/client/object_storage_listing_ut.cpp |77.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/client/object_storage_listing_ut.cpp |77.6%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/sys_view/ut_kqp.cpp |77.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/sys_view/ut_kqp.cpp |77.6%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/blobstorage/vdisk/skeleton/skeleton_vpatch_actor_ut.cpp |77.6%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/schemeshard/ut_shred_reboots/ut_shred_reboots.cpp |77.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/ut_shred_reboots/ut_shred_reboots.cpp |77.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/vdisk/skeleton/skeleton_vpatch_actor_ut.cpp |77.6%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/kqp/ut/olap/combinatory/abstract.cpp |77.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/ut/olap/combinatory/abstract.cpp |77.6%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/public/sdk/cpp/src/client/federated_topic/ut/basic_usage_ut.cpp |77.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/public/sdk/cpp/src/client/federated_topic/ut/basic_usage_ut.cpp |77.6%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/blobstorage/vdisk/hulldb/test/testhull_index.cpp |77.6%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/kqp/ut/join/kqp_join_order_ut.cpp |77.6%| [AR] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/blobstorage/vdisk/hulldb/test/libvdisk-hulldb-test.a |77.6%| [AR] {RESULT} $(B)/ydb/core/blobstorage/vdisk/hulldb/test/libvdisk-hulldb-test.a |77.6%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/blobstorage/vdisk/hulldb/test/libvdisk-hulldb-test.a |77.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/vdisk/hulldb/test/testhull_index.cpp |77.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/ut/join/kqp_join_order_ut.cpp |77.6%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/blobstorage/ut_blobstorage/scrub.cpp |77.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/ut_blobstorage/scrub.cpp |77.6%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/library/yql/providers/solomon/actors/ut/dq_solomon_write_actor_ut.cpp |77.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/library/yql/providers/solomon/actors/ut/dq_solomon_write_actor_ut.cpp |77.6%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/schemeshard/ut_sysview/ut_sysview.cpp |77.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/ut_sysview/ut_sysview.cpp |77.6%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/blobstorage/dsproxy/ut/dsproxy_request_reporting_ut.cpp |77.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/dsproxy/ut/dsproxy_request_reporting_ut.cpp |77.6%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/blobstorage/ut_vdisk/vdisk_test.cpp |77.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/ut_vdisk/vdisk_test.cpp |77.6%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/blobstorage/ut_blobstorage/statestorage.cpp |77.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/ut_blobstorage/statestorage.cpp |77.6%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/mind/bscontroller/mv_object_map_ut.cpp |77.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/mind/bscontroller/mv_object_map_ut.cpp |77.6%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/mind/node_broker_ut.cpp |77.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/mind/node_broker_ut.cpp |77.6%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/memory_controller/memory_controller_ut.cpp |77.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/memory_controller/memory_controller_ut.cpp |77.6%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/services/persqueue_v1/ut/topic_service_ut.cpp |77.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/services/persqueue_v1/ut/topic_service_ut.cpp |77.6%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/mind/lease_holder.cpp |77.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/mind/lease_holder.cpp |77.6%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/services/fq/ut_integration/fq_ut.cpp |77.7%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tablet_flat/flat_executor_database_ut.cpp |77.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/services/fq/ut_integration/fq_ut.cpp |77.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tablet_flat/flat_executor_database_ut.cpp |77.7%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/cms/console/jaeger_tracing_configurator_ut.cpp |77.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/cms/console/jaeger_tracing_configurator_ut.cpp |77.7%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/external_sources/hive_metastore/ut/hive_metastore_fetcher_ut.cpp |77.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/external_sources/hive_metastore/ut/hive_metastore_fetcher_ut.cpp |77.7%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/kqp/ut/olap/combinatory/compaction.cpp |77.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/ut/olap/combinatory/compaction.cpp |77.7%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/schemeshard/ut_index/ut_fulltext_index.cpp |77.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/ut_index/ut_fulltext_index.cpp |77.7%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/blobstorage/ut_blobstorage/balancing.cpp |77.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/ut_blobstorage/balancing.cpp |77.7%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/kqp/ut/scan/kqp_split_ut.cpp |77.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/ut/scan/kqp_split_ut.cpp |77.7%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/blobstorage/pdisk/blobstorage_pdisk_ut_color_limits.cpp |77.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/pdisk/blobstorage_pdisk_ut_color_limits.cpp |77.7%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/services/persqueue_v1/persqueue_common_ut.cpp |77.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/services/persqueue_v1/persqueue_common_ut.cpp |77.7%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/cms/downtime_ut.cpp |77.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/cms/downtime_ut.cpp |77.7%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/mind/dynamic_nameserver.cpp |77.7%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/mind/node_broker__register_node.cpp |77.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/mind/dynamic_nameserver.cpp |77.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/mind/node_broker__register_node.cpp |77.7%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/datashard/datashard_ut_read_iterator.cpp |77.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/datashard/datashard_ut_read_iterator.cpp |77.7%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/blobstorage/vdisk/repl/blobstorage_replrecoverymachine_ut.cpp |77.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/vdisk/repl/blobstorage_replrecoverymachine_ut.cpp |77.7%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/security/ticket_parser_ut.cpp |77.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/security/ticket_parser_ut.cpp |77.7%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/services/ydb/ydb_object_storage_ut.cpp |77.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/services/ydb/ydb_object_storage_ut.cpp |77.7%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/blobstorage/ut_blobstorage/discover.cpp |77.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/ut_blobstorage/discover.cpp |77.7%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/tests/tools/fqrun/fqrun.cpp |77.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/tests/tools/fqrun/fqrun.cpp |77.7%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/mind/hive/storage_pool_info_ut.cpp |77.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/mind/hive/storage_pool_info_ut.cpp |77.7%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/keyvalue/keyvalue_ut_trace.cpp |77.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/keyvalue/keyvalue_ut_trace.cpp |77.7%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/kqp/ut/federated_query/large_results/kqp_scriptexec_results_ut.cpp |77.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/ut/federated_query/large_results/kqp_scriptexec_results_ut.cpp |77.7%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/persqueue/ut/partition_chooser_ut.cpp |77.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/persqueue/ut/partition_chooser_ut.cpp |77.7%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/schemeshard/ut_pq_reboots/ut_pq_reboots.cpp |77.7%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/mind/local.cpp |77.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/ut_pq_reboots/ut_pq_reboots.cpp |77.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/mind/local.cpp |77.7%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/blobstorage/ut_blobstorage/group_size_in_units.cpp |77.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/ut_blobstorage/group_size_in_units.cpp |77.7%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/datashard/datashard_ut_trace.cpp |77.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/datashard/datashard_ut_trace.cpp |77.7%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/schemeshard/ut_split_merge_reboots/ut_split_merge_reboots.cpp |77.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/ut_split_merge_reboots/ut_split_merge_reboots.cpp |77.7%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tablet/tablet_pipecache_ut.cpp |77.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tablet/tablet_pipecache_ut.cpp |77.7%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tablet_flat/ut/ut_shared_sausagecache_actor.cpp |77.8%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/tests/fq/control_plane_storage/ydb_control_plane_storage_connections_ut.cpp |77.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tablet_flat/ut/ut_shared_sausagecache_actor.cpp |77.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/tests/fq/control_plane_storage/ydb_control_plane_storage_connections_ut.cpp |77.8%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/statistics/service/ut/ut_http_request.cpp |77.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/statistics/service/ut/ut_http_request.cpp |77.8%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/schemeshard/ut_index/ut_unique_index.cpp |77.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/ut_index/ut_unique_index.cpp |77.8%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/kqp/ut/tx/kqp_locks_ut.cpp |77.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/ut/tx/kqp_locks_ut.cpp |77.8%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/blobstorage/dsproxy/ut/dsproxy_sequence_ut.cpp |77.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/dsproxy/ut/dsproxy_sequence_ut.cpp |77.8%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/statistics/aggregator/ut/ut_traverse_columnshard.cpp |77.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/statistics/aggregator/ut/ut_traverse_columnshard.cpp |77.8%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/blobstorage/pdisk/blobstorage_pdisk_blockdevice_ut.cpp |77.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/pdisk/blobstorage_pdisk_blockdevice_ut.cpp |77.8%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/fq/libs/common/iceberg_processor_ut.cpp |77.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/fq/libs/common/iceberg_processor_ut.cpp |77.8%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/tests/fq/control_plane_storage/ydb_control_plane_storage_queries_ut.cpp |77.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/tests/fq/control_plane_storage/ydb_control_plane_storage_queries_ut.cpp |77.8%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/mind/dynamic_nameserver_mon.cpp |77.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/mind/dynamic_nameserver_mon.cpp |77.8%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/mind/tenant_slot_broker.cpp |77.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/mind/tenant_slot_broker.cpp |77.8%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/kqp/ut/discovery/kqp_discovery_ut.cpp |77.8%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/mind/node_broker__extend_lease.cpp |77.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/mind/node_broker__extend_lease.cpp |77.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/ut/discovery/kqp_discovery_ut.cpp |77.8%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/cms/cms_tenants_ut.cpp |77.8%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/kqp/proxy_service/kqp_proxy_ut.cpp |77.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/cms/cms_tenants_ut.cpp |77.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/proxy_service/kqp_proxy_ut.cpp |77.8%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/tx_allocator_client/actor_client_ut.cpp |77.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/tx_allocator_client/actor_client_ut.cpp |77.8%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/scheme_board/ut_helpers.cpp |77.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/scheme_board/ut_helpers.cpp |77.8%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/kqp/ut/olap/kqp_olap_stats_ut.cpp |77.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/ut/olap/kqp_olap_stats_ut.cpp |77.8%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/kqp/ut/opt/kqp_ne_ut.cpp |77.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/ut/opt/kqp_ne_ut.cpp |77.8%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/backup/impl/local_partition_reader_ut.cpp |77.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/backup/impl/local_partition_reader_ut.cpp |77.8%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/mind/tenant_slot_broker__update_node_location.cpp |77.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/mind/tenant_slot_broker__update_node_location.cpp |77.8%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/quoter/quoter_service_bandwidth_test/quota_requester.cpp |77.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/quoter/quoter_service_bandwidth_test/quota_requester.cpp |77.8%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/services/persqueue_v1/ut/describes_ut/describe_topic_ut.cpp |77.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/services/persqueue_v1/ut/describes_ut/describe_topic_ut.cpp |77.8%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/tx_allocator/txallocator_ut_helpers.cpp |77.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/tx_allocator/txallocator_ut_helpers.cpp |77.8%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/blobstorage/ut_blobstorage/mirror3dc.cpp |77.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/ut_blobstorage/mirror3dc.cpp |77.8%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/kqp/ut/sysview/kqp_sys_col_ut.cpp |77.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/ut/sysview/kqp_sys_col_ut.cpp |77.8%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/blobstorage/ut_blobstorage/decommit_3dc.cpp |77.8%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/mind/bscontroller/grouper_ut.cpp |77.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/ut_blobstorage/decommit_3dc.cpp |77.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/mind/bscontroller/grouper_ut.cpp |77.8%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/schemeshard/ut_streaming_query/ut_streaming_query.cpp |77.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/ut_streaming_query/ut_streaming_query.cpp |77.8%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/schemeshard/ut_ttl/ut_ttl.cpp |77.8%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/blobstorage/ut_blobstorage/validation.cpp |77.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/ut_ttl/ut_ttl.cpp |77.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/ut_blobstorage/validation.cpp |77.9%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/schemeshard/ut_resource_pool/ut_resource_pool.cpp |77.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/ut_resource_pool/ut_resource_pool.cpp |77.9%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/blobstorage/pdisk/blobstorage_pdisk_ut_run.cpp |77.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/pdisk/blobstorage_pdisk_ut_run.cpp |77.9%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/mind/tenant_slot_broker__check_slot_status.cpp |77.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/mind/tenant_slot_broker__check_slot_status.cpp |77.9%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/tools/query_replay/query_compiler.cpp |77.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/tools/query_replay/query_compiler.cpp |77.9%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/kqp/ut/runtime/kqp_re2_ut.cpp |77.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/ut/runtime/kqp_re2_ut.cpp |77.9%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/services/ydb/ydb_query_ut.cpp |77.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/services/ydb/ydb_query_ut.cpp |77.9%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/blobstorage/backpressure/queue_backpressure_client_ut.cpp |77.9%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/kqp/ut/pg/kqp_pg_ut.cpp |77.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/backpressure/queue_backpressure_client_ut.cpp |77.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/ut/pg/kqp_pg_ut.cpp |77.9%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/kqp/ut/query/kqp_analyze_ut.cpp |77.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/ut/query/kqp_analyze_ut.cpp |77.9%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/kqp/ut/indexes/kqp_indexes_vector_ut.cpp |77.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/ut/indexes/kqp_indexes_vector_ut.cpp |77.9%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/statistics/aggregator/ut/ut_traverse_datashard.cpp |77.9%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/datashard/datashard_ut_column_stats.cpp |77.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/statistics/aggregator/ut/ut_traverse_datashard.cpp |77.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/datashard/datashard_ut_column_stats.cpp |77.9%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/blobstorage/pdisk/blobstorage_pdisk_ut_chunk_tracker.cpp |77.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/pdisk/blobstorage_pdisk_ut_chunk_tracker.cpp |77.9%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/kqp/ut/scan/kqp_flowcontrol_ut.cpp |77.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/ut/scan/kqp_flowcontrol_ut.cpp |77.9%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/load_test/ut_ycsb.cpp |77.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/load_test/ut_ycsb.cpp |77.9%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/sys_view/partition_stats/partition_stats_ut.cpp |77.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/sys_view/partition_stats/partition_stats_ut.cpp |77.9%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/kqp/ut/arrow/kqp_types_arrow_ut.cpp |77.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/ut/arrow/kqp_types_arrow_ut.cpp |77.9%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/schemeshard/ut_secret/ut_secret.cpp |77.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/ut_secret/ut_secret.cpp |77.9%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/client/locks_ut.cpp |77.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/client/locks_ut.cpp |77.9%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/persqueue/ut/partition_ut.cpp |77.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/persqueue/ut/partition_ut.cpp |77.9%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/blobstorage/ut_blobstorage/ds_proxy_lwtrace.cpp |77.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/ut_blobstorage/ds_proxy_lwtrace.cpp |77.9%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/load_test/ut/group_test_ut.cpp |77.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/load_test/ut/group_test_ut.cpp |77.9%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/services/dynamic_config/dynamic_config_ut.cpp |77.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/services/dynamic_config/dynamic_config_ut.cpp |77.9%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/kqp/gateway/kqp_metadata_loader.cpp |77.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/gateway/kqp_metadata_loader.cpp |77.9%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/persqueue/ut/pqrb_describes_ut.cpp |77.9%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/blobstorage/ut_blobstorage/statestorage_2_ring_groups.cpp |77.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/persqueue/ut/pqrb_describes_ut.cpp |77.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/ut_blobstorage/statestorage_2_ring_groups.cpp |77.9%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/kqp/ut/join/kqp_index_lookup_join_ut.cpp |77.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/ut/join/kqp_index_lookup_join_ut.cpp |77.9%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/cms/console/immediate_controls_configurator_ut.cpp |77.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/cms/console/immediate_controls_configurator_ut.cpp |77.9%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/datashard/datashard_ut_followers.cpp |78.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/datashard/datashard_ut_followers.cpp |77.9%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/schemeshard/ut_replication_reboots/ut_replication_reboots.cpp |78.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/ut_replication_reboots/ut_replication_reboots.cpp |78.0%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/datashard/datashard_ut_kqp.cpp |78.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/datashard/datashard_ut_kqp.cpp |78.0%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/blobstorage/ut_vdisk/mon_reregister_ut.cpp |78.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/ut_vdisk/mon_reregister_ut.cpp |78.0%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/blobstorage/ut_vdisk/lib/test_synclog.cpp |78.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/ut_vdisk/lib/test_synclog.cpp |78.0%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tablet_flat/ut/ut_shared_sausagecache.cpp |78.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tablet_flat/ut/ut_shared_sausagecache.cpp |78.0%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/mind/hive/object_distribution_ut.cpp |78.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/mind/hive/object_distribution_ut.cpp |78.0%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/engine/mkql_engine_flat_ut.cpp |78.0%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/scheme_board/subscriber_ut.cpp |78.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/engine/mkql_engine_flat_ut.cpp |78.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/scheme_board/subscriber_ut.cpp |78.0%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/blobstorage/ut_blobstorage/stop_pdisk.cpp |78.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/ut_blobstorage/stop_pdisk.cpp |78.0%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/public/sdk/cpp/src/client/topic/ut/basic_usage_ut.cpp |78.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/public/sdk/cpp/src/client/topic/ut/basic_usage_ut.cpp |78.0%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/schemeshard/ut_base/ut_counters.cpp |78.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/ut_base/ut_counters.cpp |78.0%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/apps/etcd_proxy/service/ut/etcd_service_ut.cpp |78.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/apps/etcd_proxy/service/ut/etcd_service_ut.cpp |78.0%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/cms/console/configs_cache_ut.cpp |78.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/cms/console/configs_cache_ut.cpp |78.0%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/blobstorage/ut_blobstorage/replication.cpp |78.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/ut_blobstorage/replication.cpp |78.0%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/tx_proxy/proxy_ut_helpers.cpp |78.0%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/blobstorage/ut_blobstorage/donor.cpp |78.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/tx_proxy/proxy_ut_helpers.cpp |78.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/ut_blobstorage/donor.cpp |78.0%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/tools/query_replay/main.cpp |78.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/tools/query_replay/main.cpp |78.0%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/fq/libs/actors/ut/database_resolver_ut.cpp |78.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/fq/libs/actors/ut/database_resolver_ut.cpp |78.0%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/schemeshard/ut_backup_collection/ut_backup_collection.cpp |78.0%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/datashard/datashard_ut_kqp_errors.cpp |78.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/ut_backup_collection/ut_backup_collection.cpp |78.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/datashard/datashard_ut_kqp_errors.cpp |78.0%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/blobstorage/ut_blobstorage/move_pdisk.cpp |78.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/ut_blobstorage/move_pdisk.cpp |78.0%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/datashard/datashard_ut_ext_blobs_multiple_channels.cpp |78.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/datashard/datashard_ut_ext_blobs_multiple_channels.cpp |78.0%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/schemeshard/ut_failure_injection/ut_failure_injection.cpp |78.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/ut_failure_injection/ut_failure_injection.cpp |78.0%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/blobstorage/ut_blobstorage/sanitize_groups.cpp |78.0%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/kqp/ut/runtime/kqp_scan_spilling_ut.cpp |78.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/ut_blobstorage/sanitize_groups.cpp |78.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/ut/runtime/kqp_scan_spilling_ut.cpp |78.0%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/scheme_board/monitoring_ut.cpp |78.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/scheme_board/monitoring_ut.cpp |78.0%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tablet_flat/ut/ut_vacuum.cpp |78.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tablet_flat/ut/ut_vacuum.cpp |78.0%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/persqueue/ut/ut_with_sdk/topic_timestamp_ut.cpp |78.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/persqueue/ut/ut_with_sdk/topic_timestamp_ut.cpp |78.0%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/time_cast/time_cast_ut.cpp |78.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/time_cast/time_cast_ut.cpp |78.0%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/testlib/actors/test_runtime_ut.cpp |78.0%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/blobstorage/vdisk/huge/blobstorage_hullhuge_ut.cpp |78.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/testlib/actors/test_runtime_ut.cpp |78.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/vdisk/huge/blobstorage_hullhuge_ut.cpp |78.1%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/blobstorage/ut_blobstorage/race.cpp |78.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/ut_blobstorage/race.cpp |78.1%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/services/ydb/ydb_stats_ut.cpp |78.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/services/ydb/ydb_stats_ut.cpp |78.1%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/kqp/ut/olap/kqp_olap_ut.cpp |78.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/ut/olap/kqp_olap_ut.cpp |78.1%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/datashard/datashard_ut_change_exchange.cpp |78.1%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/blobstorage/ut_blobstorage/multiget.cpp |78.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/datashard/datashard_ut_change_exchange.cpp |78.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/ut_blobstorage/multiget.cpp |78.1%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/blobstorage/ut_vdisk/lib/test_repl.cpp |78.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/ut_vdisk/lib/test_repl.cpp |78.1%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/schemeshard/ut_external_data_source_reboots/ut_external_data_source_reboots.cpp |78.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/ut_external_data_source_reboots/ut_external_data_source_reboots.cpp |78.1%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/tests/fq/control_plane_storage/in_memory_control_plane_storage_ut.cpp |78.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/tests/fq/control_plane_storage/in_memory_control_plane_storage_ut.cpp |78.1%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/mind/bscontroller/ut_bscontroller/main.cpp |78.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/mind/bscontroller/ut_bscontroller/main.cpp |78.1%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/blobstorage/ut_blobstorage/bsc_cache.cpp |78.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/ut_blobstorage/bsc_cache.cpp |78.1%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/blobstorage/ut_blobstorage/snapshots.cpp |78.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/ut_blobstorage/snapshots.cpp |78.1%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/blobstorage/vdisk/hulldb/generic/blobstorage_hullwritesst_ut.cpp |78.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/vdisk/hulldb/generic/blobstorage_hullwritesst_ut.cpp |78.1%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/mediator/mediator_ut.cpp |78.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/mediator/mediator_ut.cpp |78.1%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/kqp/ut/service/kqp_qs_queries_ut.cpp |78.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/ut/service/kqp_qs_queries_ut.cpp |78.1%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/kqp/ut/scan/kqp_scan_ut.cpp |78.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/ut/scan/kqp_scan_ut.cpp |78.1%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/kqp/ut/opt/kqp_returning_ut.cpp |78.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/ut/opt/kqp_returning_ut.cpp |78.1%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tablet_flat/ut/ut_rename_table_column.cpp |78.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tablet_flat/ut/ut_rename_table_column.cpp |78.1%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/kqp/ut/olap/clickbench_ut.cpp |78.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/ut/olap/clickbench_ut.cpp |78.1%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/kqp/ut/opt/kqp_ranges_ut.cpp |78.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/ut/opt/kqp_ranges_ut.cpp |78.1%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/replication/service/table_writer_ut.cpp |78.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/replication/service/table_writer_ut.cpp |78.1%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/datashard/datashard_ut_read_table.cpp |78.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/datashard/datashard_ut_read_table.cpp |78.1%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tablet_flat/ut/ut_datetime.cpp |78.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tablet_flat/ut/ut_datetime.cpp |78.1%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/sys_view/ut_large.cpp |78.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/sys_view/ut_large.cpp |78.1%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/schemeshard/ut_transfer/ut_transfer.cpp |78.1%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tablet_flat/flat_executor_leases_ut.cpp |78.1%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/schemeshard/ut_topic_set_boundaries/ut_topic_set_boundaries.cpp |78.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/ut_transfer/ut_transfer.cpp |78.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tablet_flat/flat_executor_leases_ut.cpp |78.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/ut_topic_set_boundaries/ut_topic_set_boundaries.cpp |78.1%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/schemeshard/ut_rtmr/ut_rtmr.cpp |78.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/ut_rtmr/ut_rtmr.cpp |78.1%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/scheme_board/ut_helpers.cpp |78.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/scheme_board/ut_helpers.cpp |78.1%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/public/sdk/cpp/src/client/persqueue_public/ut/basic_usage_ut.cpp |78.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/public/sdk/cpp/src/client/persqueue_public/ut/basic_usage_ut.cpp |78.1%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/library/ycloud/impl/service_account_service_ut.cpp |78.1%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/mind/labels_maintainer.cpp |78.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/library/ycloud/impl/service_account_service_ut.cpp |78.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/mind/labels_maintainer.cpp |78.2%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/schemeshard/ut_external_table_reboots/ut_external_table_reboots.cpp |78.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/ut_external_table_reboots/ut_external_table_reboots.cpp |78.2%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/sequenceproxy/sequenceproxy_ut.cpp |78.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/sequenceproxy/sequenceproxy_ut.cpp |78.2%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/schemeshard/ut_export_reboots_s3/ut_export_reboots_s3.cpp |78.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/ut_export_reboots_s3/ut_export_reboots_s3.cpp |78.2%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/blobstorage/ut_blobstorage/phantom_blobs.cpp |78.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/ut_blobstorage/phantom_blobs.cpp |78.2%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/tests/fq/control_plane_storage/ydb_control_plane_storage_ut.cpp |78.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/tests/fq/control_plane_storage/ydb_control_plane_storage_ut.cpp |78.2%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/datashard/datashard_ut_read_iterator_ext_blobs.cpp |78.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/datashard/datashard_ut_read_iterator_ext_blobs.cpp |78.2%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/tiering/ut/ut_object.cpp |78.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/tiering/ut/ut_object.cpp |78.2%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/blobstorage/ut_blobstorage/vdisk_malfunction.cpp |78.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/ut_blobstorage/vdisk_malfunction.cpp |78.2%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/blobstorage/ut_blobstorage/monitoring.cpp |78.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/ut_blobstorage/monitoring.cpp |78.2%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/schemeshard/ut_ttl/ut_ttl_utility.cpp |78.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/ut_ttl/ut_ttl_utility.cpp |78.2%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/columnshard/engines/reader/simple_reader/iterator/sys_view/abstract/metadata.cpp |78.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/engines/reader/simple_reader/iterator/sys_view/abstract/metadata.cpp |78.2%| [AR] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/tx/columnshard/engines/reader/simple_reader/iterator/sys_view/abstract/libiterator-sys_view-abstract.a |78.2%| [AR] {RESULT} $(B)/ydb/core/tx/columnshard/engines/reader/simple_reader/iterator/sys_view/abstract/libiterator-sys_view-abstract.a |78.2%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/tx/columnshard/engines/reader/simple_reader/iterator/sys_view/abstract/libiterator-sys_view-abstract.a |78.2%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/tools/blobsan/main.cpp |78.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/tools/blobsan/main.cpp |78.2%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/blobstorage/ut_blobstorage/get_block.cpp |78.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/ut_blobstorage/get_block.cpp |78.2%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/long_tx_service/long_tx_service_ut.cpp |78.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/long_tx_service/long_tx_service_ut.cpp |78.2%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/blobstorage/ut_blobstorage/patch.cpp |78.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/ut_blobstorage/patch.cpp |78.2%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/tx_proxy/proxy_ut_helpers.cpp |78.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/tx_proxy/proxy_ut_helpers.cpp |78.2%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/kqp/ut/runtime/kqp_hash_shuffle_ut.cpp |78.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/ut/runtime/kqp_hash_shuffle_ut.cpp |78.2%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tablet/tablet_req_blockbs_ut.cpp |78.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tablet/tablet_req_blockbs_ut.cpp |78.2%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/blobstorage/pdisk/blobstorage_pdisk_util_ut.cpp |78.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/pdisk/blobstorage_pdisk_util_ut.cpp |78.2%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/tests/fq/control_plane_storage/ydb_control_plane_storage_bindings_ut.cpp |78.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/tests/fq/control_plane_storage/ydb_control_plane_storage_bindings_ut.cpp |78.2%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/datashard/datashard_ut_vacuum.cpp |78.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/datashard/datashard_ut_vacuum.cpp |78.2%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/mind/configured_tablet_bootstrapper.cpp |78.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/mind/configured_tablet_bootstrapper.cpp |78.2%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/datashard/datashard_ut_keys.cpp |78.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/datashard/datashard_ut_keys.cpp |78.2%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/scheme_board/ut_helpers.cpp |78.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/scheme_board/ut_helpers.cpp |78.2%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/fq/libs/result_formatter/result_formatter_ut.cpp |78.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/fq/libs/result_formatter/result_formatter_ut.cpp |78.2%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/blobstorage/ut_blobstorage/cluster_balancing.cpp |78.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/ut_blobstorage/cluster_balancing.cpp |78.2%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/sys_view/ut_labeled.cpp |78.2%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/schemeshard/ut_external_data_source/ut_external_data_source.cpp |78.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/sys_view/ut_labeled.cpp |78.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/ut_external_data_source/ut_external_data_source.cpp |78.2%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/schemeshard/ut_login_large/ut_login_large.cpp |78.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/ut_login_large/ut_login_large.cpp |78.3%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/mind/tenant_slot_broker__init_scheme.cpp |78.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/mind/tenant_slot_broker__init_scheme.cpp |78.3%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/cms/console/feature_flags_configurator_ut.cpp |78.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/cms/console/feature_flags_configurator_ut.cpp |78.3%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/public/sdk/cpp/src/client/persqueue_public/ut/read_session_ut.cpp |78.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/public/sdk/cpp/src/client/persqueue_public/ut/read_session_ut.cpp |78.3%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/blobstorage/pdisk/blobstorage_pdisk_ut_pdisk_config.cpp |78.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/pdisk/blobstorage_pdisk_ut_pdisk_config.cpp |78.3%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/graph/shard/ut/shard_ut.cpp |78.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/graph/shard/ut/shard_ut.cpp |78.3%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/persqueue/ut/slow/autopartitioning_ut.cpp |78.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/persqueue/ut/slow/autopartitioning_ut.cpp |78.3%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/mind/tenant_pool.cpp |78.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/mind/tenant_pool.cpp |78.3%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/blobstorage/ut_vdisk/gen_restarts.cpp |78.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/ut_vdisk/gen_restarts.cpp |78.3%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/tx_proxy/proxy_ut_helpers.cpp |78.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/tx_proxy/proxy_ut_helpers.cpp |78.3%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/schemeshard/ut_shred/ut_shred.cpp |78.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/ut_shred/ut_shred.cpp |78.3%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/tools/query_replay_yt/query_compiler.cpp |78.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/tools/query_replay_yt/query_compiler.cpp |78.3%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/tx_proxy/storage_tenant_ut.cpp |78.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/tx_proxy/storage_tenant_ut.cpp |78.3%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/services/datastreams/datastreams_ut.cpp |78.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/services/datastreams/datastreams_ut.cpp |78.3%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/schemeshard/ut_continuous_backup/ut_continuous_backup.cpp |78.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/ut_continuous_backup/ut_continuous_backup.cpp |78.3%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/blobstorage/ut_vdisk/lib/test_outofspace.cpp |78.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/ut_vdisk/lib/test_outofspace.cpp |78.3%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/kqp/ut/arrow/kqp_arrow_in_channels_ut.cpp |78.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/ut/arrow/kqp_arrow_in_channels_ut.cpp |78.3%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/kqp/ut/query/kqp_limits_ut.cpp |78.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/ut/query/kqp_limits_ut.cpp |78.3%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/schemeshard/ut_base_reboots/ut_base_reboots.cpp |78.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/ut_base_reboots/ut_base_reboots.cpp |78.3%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/schemeshard/ut_split_merge/ut_split_merge.cpp |78.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/ut_split_merge/ut_split_merge.cpp |78.3%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/kqp/ut/join/kqp_join_ut.cpp |78.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/ut/join/kqp_join_ut.cpp |78.3%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/client/server/msgbus_server_pq_metarequest_ut.cpp |78.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/client/server/msgbus_server_pq_metarequest_ut.cpp |78.3%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/tx_proxy/proxy_ext_tenant_ut.cpp |78.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/tx_proxy/proxy_ext_tenant_ut.cpp |78.3%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/blobstorage/ut_blobstorage/self_heal.cpp |78.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/ut_blobstorage/self_heal.cpp |78.3%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/blobstorage/ut_blobstorage/backpressure.cpp |78.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/ut_blobstorage/backpressure.cpp |78.3%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/mind/bscontroller/ut_selfheal/main.cpp |78.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/mind/bscontroller/ut_selfheal/main.cpp |78.3%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/schemeshard/ut_secret_reboots/ut_secret_reboots.cpp |78.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/ut_secret_reboots/ut_secret_reboots.cpp |78.3%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/kqp/ut/opt/kqp_extract_predicate_unpack_ut.cpp |78.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/ut/opt/kqp_extract_predicate_unpack_ut.cpp |78.3%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/kqp/ut/olap/combinatory/bulk_upsert.cpp |78.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/ut/olap/combinatory/bulk_upsert.cpp |78.3%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/kqp/workload_service/ut/kqp_workload_service_actors_ut.cpp |78.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/workload_service/ut/kqp_workload_service_actors_ut.cpp |78.3%| [AR] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/kqp/ut/olap/combinatory/libut-olap-combinatory.a |78.3%| [AR] {RESULT} $(B)/ydb/core/kqp/ut/olap/combinatory/libut-olap-combinatory.a |78.3%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/kqp/ut/service/kqp_document_api_ut.cpp |78.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/ut/service/kqp_document_api_ut.cpp |78.4%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/kqp/ut/olap/combinatory/libut-olap-combinatory.a |78.4%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/mind/tenant_ut_pool.cpp |78.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/mind/tenant_ut_pool.cpp |78.4%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/kqp/ut/tx/kqp_rollback.cpp |78.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/ut/tx/kqp_rollback.cpp |78.4%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/kesus/proxy/proxy_actor_ut.cpp |78.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kesus/proxy/proxy_actor_ut.cpp |78.4%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/blobstorage/vdisk/hulldb/base/hullds_generic_it_ut.cpp |78.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/vdisk/hulldb/base/hullds_generic_it_ut.cpp |78.4%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/kqp/ut/arrow/kqp_result_set_formats.cpp |78.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/ut/arrow/kqp_result_set_formats.cpp |78.4%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/kqp/ut/olap/optimizer_ut.cpp |78.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/ut/olap/optimizer_ut.cpp |78.4%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/control/lib/immediate_control_board_ut.cpp |78.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/control/lib/immediate_control_board_ut.cpp |78.4%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/schemeshard/ut_continuous_backup_reboots/ut_continuous_backup_reboots.cpp |78.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/ut_continuous_backup_reboots/ut_continuous_backup_reboots.cpp |78.4%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/services/ext_index/ut/ut_ext_index.cpp |78.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/services/ext_index/ut/ut_ext_index.cpp |78.4%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/blobstorage/ut_vdisk/lib/test_gc.cpp |78.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/ut_vdisk/lib/test_gc.cpp |78.4%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/schemeshard/ut_bsvolume/ut_bsvolume.cpp |78.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/ut_bsvolume/ut_bsvolume.cpp |78.4%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/columnshard/ut_schema/ut_columnshard_move_table.cpp |78.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/ut_schema/ut_columnshard_move_table.cpp |78.4%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/kqp/ut/federated_query/generic_ut/iceberg_ut_data.cpp |78.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/ut/federated_query/generic_ut/iceberg_ut_data.cpp |78.4%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/columnshard/ut_rw/ut_columnshard_read_write.cpp |78.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/ut_rw/ut_columnshard_read_write.cpp |78.4%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/blobstorage/vdisk/hulldb/base/hullds_heap_it_ut.cpp |78.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/vdisk/hulldb/base/hullds_heap_it_ut.cpp |78.4%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/blobstorage/ut_blobstorage/main.cpp |78.4%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/kqp/ut/perf/kqp_workload_ut.cpp |78.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/ut_blobstorage/main.cpp |78.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/ut/perf/kqp_workload_ut.cpp |78.4%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/blobstorage/ut_blobstorage/recovery.cpp |78.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/ut_blobstorage/recovery.cpp |78.4%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/kqp/gateway/kqp_gateway.cpp |78.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/gateway/kqp_gateway.cpp |78.4%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/public/sdk/cpp/src/client/topic/ut/local_partition_ut.cpp |78.4%| [AR] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/kqp/gateway/libcore-kqp-gateway.a |78.4%| [AR] {RESULT} $(B)/ydb/core/kqp/gateway/libcore-kqp-gateway.a |78.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/public/sdk/cpp/src/client/topic/ut/local_partition_ut.cpp |78.4%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/kqp/gateway/libcore-kqp-gateway.a |78.4%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/public/sdk/cpp/src/client/persqueue_public/ut/compression_ut.cpp |78.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/public/sdk/cpp/src/client/persqueue_public/ut/compression_ut.cpp |78.4%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/actorlib_impl/actor_activity_ut.cpp |78.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/actorlib_impl/actor_activity_ut.cpp |78.4%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/schemeshard/ut_extsubdomain_reboots/ut_extsubdomain_reboots.cpp |78.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/ut_extsubdomain_reboots/ut_extsubdomain_reboots.cpp |78.4%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/mind/tenant_node_enumeration.cpp |78.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/mind/tenant_node_enumeration.cpp |78.4%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/fq/libs/checkpoint_storage/ut/ydb_state_storage_ut.cpp |78.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/fq/libs/checkpoint_storage/ut/ydb_state_storage_ut.cpp |78.4%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/schemeshard/ut_login/ut_login.cpp |78.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/ut_login/ut_login.cpp |78.4%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/services/ydb/ydb_logstore_ut.cpp |78.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/services/ydb/ydb_logstore_ut.cpp |78.4%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/mind/bscontroller/ut_selfheal/self_heal_actor_ut.cpp |78.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/mind/bscontroller/ut_selfheal/self_heal_actor_ut.cpp |78.5%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/schemeshard/ut_incremental_restore_reboots/ut_incremental_restore_reboots.cpp |78.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/ut_incremental_restore_reboots/ut_incremental_restore_reboots.cpp |78.5%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/blobstorage/ut_blobstorage/acceleration.cpp |78.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/ut_blobstorage/acceleration.cpp |78.5%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/blobstorage/vdisk/hulldb/base/blobstorage_hullsatisfactionrank_ut.cpp |78.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/vdisk/hulldb/base/blobstorage_hullsatisfactionrank_ut.cpp |78.5%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/replication/controller/stream_creator_ut.cpp |78.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/replication/controller/stream_creator_ut.cpp |78.5%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/blobstorage/ut_vdisk/lib/test_faketablet.cpp |78.5%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/services/ydb/ydb_table_split_ut.cpp |78.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/ut_vdisk/lib/test_faketablet.cpp |78.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/services/ydb/ydb_table_split_ut.cpp |78.5%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/blobstorage/vdisk/repl/blobstorage_hullreplwritesst_ut.cpp |78.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/vdisk/repl/blobstorage_hullreplwritesst_ut.cpp |78.5%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/services/ydb/ydb_register_node_ut.cpp |78.5%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/blobstorage/ut_vdisk/lib/test_simplebs.cpp |78.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/services/ydb/ydb_register_node_ut.cpp |78.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/ut_vdisk/lib/test_simplebs.cpp |78.5%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/kqp/runtime/kqp_scan_fetcher_ut.cpp |78.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/runtime/kqp_scan_fetcher_ut.cpp |78.5%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/cms/cms_maintenance_api_ut.cpp |78.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/cms/cms_maintenance_api_ut.cpp |78.5%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/kqp/ut/service/kqp_service_ut.cpp |78.5%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/mind/tenant_slot_broker__load_state.cpp |78.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/ut/service/kqp_service_ut.cpp |78.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/mind/tenant_slot_broker__load_state.cpp |78.5%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/persqueue/ut/ut_with_sdk/balancing_ut.cpp |78.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/persqueue/ut/ut_with_sdk/balancing_ut.cpp |78.5%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/datashard/datashard_ut_locks.cpp |78.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/datashard/datashard_ut_locks.cpp |78.5%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/schemeshard/ut_index/ut_async_index.cpp |78.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/ut_index/ut_async_index.cpp |78.5%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/kqp/ut/effects/kqp_overload_ut.cpp |78.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/ut/effects/kqp_overload_ut.cpp |78.5%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/datashard/datashard_ut_minikql.cpp |78.5%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/replication/ydb_proxy/ydb_proxy_ut.cpp |78.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/datashard/datashard_ut_minikql.cpp |78.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/replication/ydb_proxy/ydb_proxy_ut.cpp |78.5%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/datashard/datashard_ut_erase_rows.cpp |78.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/datashard/datashard_ut_erase_rows.cpp |78.5%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/blobstorage/ut_blobstorage/space_check.cpp |78.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/ut_blobstorage/space_check.cpp |78.5%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/quoter/quoter_service_bandwidth_test/server.cpp |78.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/quoter/quoter_service_bandwidth_test/server.cpp |78.5%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/blobstorage/ut_blobstorage/counting_events.cpp |78.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/ut_blobstorage/counting_events.cpp |78.5%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/schemeshard/ut_topic_splitmerge/ut_topic_splitmerge.cpp |78.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/ut_topic_splitmerge/ut_topic_splitmerge.cpp |78.5%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/kesus/tablet/ut_helpers.cpp |78.5%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/persqueue/ut/pqtablet_ut.cpp |78.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/persqueue/ut/pqtablet_ut.cpp |78.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kesus/tablet/ut_helpers.cpp |78.5%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/schemeshard/ut_replication/ut_replication.cpp |78.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/ut_replication/ut_replication.cpp |78.5%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/kqp/ut/olap/sparsed_ut.cpp |78.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/ut/olap/sparsed_ut.cpp |78.5%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/columnshard/splitter/ut/ut_splitter.cpp |78.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/splitter/ut/ut_splitter.cpp |78.5%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/replication/controller/dst_creator_ut.cpp |78.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/replication/controller/dst_creator_ut.cpp |78.5%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/services/persqueue_v1/persqueue_compat_ut.cpp |78.5%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/datashard/datashard_ut_incremental_backup.cpp |78.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/services/persqueue_v1/persqueue_compat_ut.cpp |78.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/datashard/datashard_ut_incremental_backup.cpp |78.6%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/blobstorage/vdisk/syncer/blobstorage_syncer_localwriter_ut.cpp |78.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/vdisk/syncer/blobstorage_syncer_localwriter_ut.cpp |78.6%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/blobstorage/dsproxy/ut/dsproxy_discover_ut.cpp |78.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/dsproxy/ut/dsproxy_discover_ut.cpp |78.6%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/blobstorage/vdisk/hulldb/compstrat/hulldb_compstrat_ut.cpp |78.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/vdisk/hulldb/compstrat/hulldb_compstrat_ut.cpp |78.6%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/services/cms/cms_ut.cpp |78.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/services/cms/cms_ut.cpp |78.6%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/tests/fq/control_plane_storage/ydb_control_plane_storage_internal_ut.cpp |78.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/tests/fq/control_plane_storage/ydb_control_plane_storage_internal_ut.cpp |78.6%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/blobstorage/ut_blobstorage/read_only_pdisk.cpp |78.6%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/statistics/service/ut/ut_column_statistics.cpp |78.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/statistics/service/ut/ut_column_statistics.cpp |78.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/ut_blobstorage/read_only_pdisk.cpp |78.6%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/blobstorage/ut_pdiskfit/pdiskfit/pdiskfit.cpp |78.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/ut_pdiskfit/pdiskfit/pdiskfit.cpp |78.6%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/blobstorage/ut_vdisk/lib/vdisk_mock.cpp |78.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/ut_vdisk/lib/vdisk_mock.cpp |78.6%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/cms/cluster_info_ut.cpp |78.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/cms/cluster_info_ut.cpp |78.6%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/schemeshard/ut_view/ut_view.cpp |78.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/ut_view/ut_view.cpp |78.6%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/blobstorage/ut_blobstorage/vdisk_restart.cpp |78.6%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/library/ycloud/impl/folder_service_ut.cpp |78.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/library/ycloud/impl/folder_service_ut.cpp |78.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/ut_blobstorage/vdisk_restart.cpp |78.6%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/blobstorage/pdisk/blobstorage_pdisk_ut_sectormap.cpp |78.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/pdisk/blobstorage_pdisk_ut_sectormap.cpp |78.6%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/blobstorage/dsproxy/ut/dsproxy_quorum_tracker_ut.cpp |78.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/dsproxy/ut/dsproxy_quorum_tracker_ut.cpp |78.6%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/kqp/provider/yql_kikimr_gateway_ut.cpp |78.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/provider/yql_kikimr_gateway_ut.cpp |78.6%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/datashard/datashard_ut_replication.cpp |78.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/datashard/datashard_ut_replication.cpp |78.6%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/kqp/proxy_service/kqp_script_executions_ut.cpp |78.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/proxy_service/kqp_script_executions_ut.cpp |78.6%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/blobstorage/ut_vdisk/lib/test_many.cpp |78.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/ut_vdisk/lib/test_many.cpp |78.6%| [AR] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/blobstorage/ut_vdisk/lib/libblobstorage-ut_vdisk-lib.a |78.6%| [AR] {RESULT} $(B)/ydb/core/blobstorage/ut_vdisk/lib/libblobstorage-ut_vdisk-lib.a |78.6%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/blobstorage/ut_vdisk/lib/libblobstorage-ut_vdisk-lib.a |78.6%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/blobstorage/ut_blobstorage/replication_huge.cpp |78.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/ut_blobstorage/replication_huge.cpp |78.6%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/kqp/ut/effects/kqp_reattach_ut.cpp |78.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/ut/effects/kqp_reattach_ut.cpp |78.6%| [EN] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/tx/columnshard/data_sharing/common/session/common.h_serialized.cpp |78.6%| [EN] {BAZEL_UPLOAD} $(B)/ydb/core/tx/columnshard/data_sharing/common/session/common.h_serialized.cpp |78.6%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/quoter/quoter_service_bandwidth_test/main.cpp |78.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/quoter/quoter_service_bandwidth_test/main.cpp |78.6%| [EN] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/tx/datashard/execution_unit.h_serialized.cpp |78.6%| [EN] {BAZEL_UPLOAD} $(B)/ydb/core/tx/datashard/execution_unit.h_serialized.cpp |78.6%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/blobstorage/vdisk/huge/blobstorage_hullhugeheap_ctx_ut.cpp |78.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/vdisk/huge/blobstorage_hullhugeheap_ctx_ut.cpp |78.6%| [EN] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/kqp/common/kqp_resolve.h_serialized.cpp |78.6%| [EN] {BAZEL_UPLOAD} $(B)/ydb/core/kqp/common/kqp_resolve.h_serialized.cpp |78.6%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/blobstorage/pdisk/blobstorage_pdisk_ut_helpers.cpp |78.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/pdisk/blobstorage_pdisk_ut_helpers.cpp |78.6%| [EN] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/tx/columnshard/columnshard_impl.h_serialized.cpp |78.7%| [EN] {BAZEL_UPLOAD} $(B)/ydb/core/tx/columnshard/columnshard_impl.h_serialized.cpp |78.6%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/blobstorage/ut_blobstorage/block_race.cpp |78.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/ut_blobstorage/block_race.cpp |78.7%| [TA] $(B)/ydb/core/erasure/ut/test-results/unittest/{meta.json ... results_accumulator.log} |78.7%| [TA] {RESULT} $(B)/ydb/core/erasure/ut/test-results/unittest/{meta.json ... results_accumulator.log} |78.7%| [EN] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/tx/datashard/volatile_tx.h_serialized.cpp |78.7%| [EN] {BAZEL_UPLOAD} $(B)/ydb/core/tx/datashard/volatile_tx.h_serialized.cpp |78.7%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/blobstorage/ut_pdiskfit/ut/main.cpp |78.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/ut_pdiskfit/ut/main.cpp |78.7%| [EN] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/ymq/actor/queue_schema.h_serialized.cpp |78.7%| [EN] {BAZEL_UPLOAD} $(B)/ydb/core/ymq/actor/queue_schema.h_serialized.cpp |78.7%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/blobstorage/vdisk/synclog/blobstorage_synclogkeeper_ut.cpp |78.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/vdisk/synclog/blobstorage_synclogkeeper_ut.cpp |78.7%| [EN] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/tx/datashard/operation.h_serialized.cpp |78.7%| [EN] {BAZEL_UPLOAD} $(B)/ydb/core/tx/datashard/operation.h_serialized.cpp |78.7%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/mind/node_broker.cpp |78.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/mind/node_broker.cpp |78.7%| [EN] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/tx/columnshard/engines/reader/common_reader/iterator/source.h_serialized.cpp |78.7%| [EN] {BAZEL_UPLOAD} $(B)/ydb/core/tx/columnshard/engines/reader/common_reader/iterator/source.h_serialized.cpp |78.7%| [AR] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/mind/libydb-core-mind.a |78.7%| [AR] {RESULT} $(B)/ydb/core/mind/libydb-core-mind.a |78.7%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/mind/libydb-core-mind.a |78.7%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/blobstorage/vdisk/hullop/blobstorage_readbatch_ut.cpp |78.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/vdisk/hullop/blobstorage_readbatch_ut.cpp |78.7%| [EN] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/persqueue/pqrb/read_balancer__balancing.h_serialized.cpp |78.7%| [EN] {BAZEL_UPLOAD} $(B)/ydb/core/persqueue/pqrb/read_balancer__balancing.h_serialized.cpp |78.7%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/public/sdk/cpp/src/client/topic/ut/describe_topic_ut.cpp |78.7%| [EN] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/blob_depot/schema.h_serialized.cpp |78.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/public/sdk/cpp/src/client/topic/ut/describe_topic_ut.cpp |78.7%| [EN] {BAZEL_UPLOAD} $(B)/ydb/core/blob_depot/schema.h_serialized.cpp |78.7%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/mvp/core/mvp_ut.cpp |78.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/mvp/core/mvp_ut.cpp |78.7%| [EN] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/kqp/opt/kqp_query_plan.h_serialized.cpp |78.7%| [EN] {BAZEL_UPLOAD} $(B)/ydb/core/kqp/opt/kqp_query_plan.h_serialized.cpp |78.7%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/services/ydb/ydb_bulk_upsert_ut.cpp |78.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/services/ydb/ydb_bulk_upsert_ut.cpp |78.7%| [EN] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/tx/datashard/datashard_active_transaction.h_serialized.cpp |78.7%| [EN] {BAZEL_UPLOAD} $(B)/ydb/core/tx/datashard/datashard_active_transaction.h_serialized.cpp |78.7%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/services/persqueue_v1/ut/demo_tx.cpp |78.7%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/services/ydb/backup_ut/encrypted_backup_ut.cpp |78.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/services/ydb/backup_ut/encrypted_backup_ut.cpp |78.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/services/persqueue_v1/ut/demo_tx.cpp |78.7%| [EN] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/tx/columnshard/engines/reader/common_reader/common/columns_set.h_serialized.cpp |78.7%| [EN] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/tx/columnshard/export/session/session.h_serialized.cpp |78.7%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/columnshard/ut_rw/ut_normalizer.cpp |78.7%| [EN] {BAZEL_UPLOAD} $(B)/ydb/core/tx/columnshard/engines/reader/common_reader/common/columns_set.h_serialized.cpp |78.7%| [EN] {BAZEL_UPLOAD} $(B)/ydb/core/tx/columnshard/export/session/session.h_serialized.cpp |78.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/ut_rw/ut_normalizer.cpp |78.7%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/public/sdk/cpp/src/client/topic/ut/direct_read_ut.cpp |78.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/public/sdk/cpp/src/client/topic/ut/direct_read_ut.cpp |78.7%| [EN] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/tx/columnshard/engines/changes/abstract/abstract.h_serialized.cpp |78.7%| [EN] {BAZEL_UPLOAD} $(B)/ydb/core/tx/columnshard/engines/changes/abstract/abstract.h_serialized.cpp |78.7%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/tx_proxy/proxy_ut_helpers.cpp |78.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/tx_proxy/proxy_ut_helpers.cpp |78.7%| [EN] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/tx/columnshard/engines/reader/common/description.h_serialized.cpp |78.7%| [EN] {BAZEL_UPLOAD} $(B)/ydb/core/tx/columnshard/engines/reader/common/description.h_serialized.cpp |78.7%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/actorlib_impl/test_protocols_ut.cpp |78.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/actorlib_impl/test_protocols_ut.cpp |78.7%| [EN] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/tx/schemeshard/schemeshard_info_types.h_serialized.cpp |78.7%| [EN] {BAZEL_UPLOAD} $(B)/ydb/core/tx/schemeshard/schemeshard_info_types.h_serialized.cpp |78.7%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/kqp/ut/olap/datatime64_ut.cpp |78.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/ut/olap/datatime64_ut.cpp |78.7%| [EN] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/ymq/actor/events.h_serialized.cpp |78.8%| [EN] {BAZEL_UPLOAD} $(B)/ydb/core/ymq/actor/events.h_serialized.cpp |78.8%| [TA] $(B)/ydb/tests/functional/benchmarks_init/test-results/py3test/{meta.json ... results_accumulator.log} |78.8%| [TA] {RESULT} $(B)/ydb/tests/functional/benchmarks_init/test-results/py3test/{meta.json ... results_accumulator.log} |78.8%| [EN] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/tx/columnshard/data_reader/contexts.h_serialized.cpp |78.8%| [EN] {BAZEL_UPLOAD} $(B)/ydb/core/tx/columnshard/data_reader/contexts.h_serialized.cpp |78.8%| [CC] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/tx/datashard/volatile_tx.h_serialized.cpp |78.8%| [CC] {BAZEL_UPLOAD} $(B)/ydb/core/tx/datashard/volatile_tx.h_serialized.cpp |78.8%| [EN] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/tx/datashard/datashard.h_serialized.cpp |78.8%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tablet/bootstrapper_ut.cpp |78.8%| [EN] {BAZEL_UPLOAD} $(B)/ydb/core/tx/datashard/datashard.h_serialized.cpp |78.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tablet/bootstrapper_ut.cpp |78.8%| [EN] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/tx/columnshard/engines/reader/simple_reader/iterator/source.h_serialized.cpp |78.8%| [EN] {BAZEL_UPLOAD} $(B)/ydb/core/tx/columnshard/engines/reader/simple_reader/iterator/source.h_serialized.cpp |78.8%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/client/cancel_tx_ut.cpp |78.8%| [EN] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/tx/columnshard/normalizer/abstract/abstract.h_serialized.cpp |78.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/client/cancel_tx_ut.cpp |78.8%| [EN] {BAZEL_UPLOAD} $(B)/ydb/core/tx/columnshard/normalizer/abstract/abstract.h_serialized.cpp |78.8%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/cms/console/console_ut_configs.cpp |78.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/cms/console/console_ut_configs.cpp |78.8%| [PR] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/control/lib/generated/control_board_proto.cpp |78.8%| [PR] {BAZEL_UPLOAD} $(B)/ydb/core/control/lib/generated/control_board_proto.cpp |78.8%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/kqp/ut/scheme/kqp_acl_ut.cpp |78.8%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/kqp/ut/federated_query/s3/s3_recipe_ut_helpers.cpp |78.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/ut/federated_query/s3/s3_recipe_ut_helpers.cpp |78.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/ut/scheme/kqp_acl_ut.cpp |78.8%| [EN] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/tx/columnshard/transactions/tx_controller.h_serialized.cpp |78.8%| [EN] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/tx/schemeshard/operation_queue_timer.h_serialized.cpp |78.8%| [EN] {BAZEL_UPLOAD} $(B)/ydb/core/tx/columnshard/transactions/tx_controller.h_serialized.cpp |78.8%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/external_sources/object_storage/inference/ut/arrow_inference_ut.cpp |78.8%| [EN] {BAZEL_UPLOAD} $(B)/ydb/core/tx/schemeshard/operation_queue_timer.h_serialized.cpp |78.8%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/blobstorage/ut_blobstorage/scrub_fast.cpp |78.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/external_sources/object_storage/inference/ut/arrow_inference_ut.cpp |78.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/ut_blobstorage/scrub_fast.cpp |78.8%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/kqp/ut/scan/kqp_point_consolidation_ut.cpp |78.8%| [EN] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/tx/columnshard/engines/reader/abstract/read_metadata.h_serialized.cpp |78.8%| [EN] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/tx/columnshard/columnshard.h_serialized.cpp |78.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/ut/scan/kqp_point_consolidation_ut.cpp |78.8%| [EN] {BAZEL_UPLOAD} $(B)/ydb/core/tx/columnshard/engines/reader/abstract/read_metadata.h_serialized.cpp |78.8%| [EN] {BAZEL_UPLOAD} $(B)/ydb/core/tx/columnshard/columnshard.h_serialized.cpp |78.8%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/tiering/ut/ut_tiers.cpp |78.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/tiering/ut/ut_tiers.cpp |78.8%| [EN] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/tx/columnshard/engines/storage/granule/granule.h_serialized.cpp |78.8%| [EN] {BAZEL_UPLOAD} $(B)/ydb/core/tx/columnshard/engines/storage/granule/granule.h_serialized.cpp |78.8%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/cms/cms_ut.cpp |78.8%| [CC] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/tx/datashard/execution_unit.h_serialized.cpp |78.8%| [EN] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/kqp/executer_actor/kqp_executer.h_serialized.cpp |78.8%| [CC] {BAZEL_UPLOAD} $(B)/ydb/core/tx/datashard/execution_unit.h_serialized.cpp |78.8%| [EN] {BAZEL_UPLOAD} $(B)/ydb/core/kqp/executer_actor/kqp_executer.h_serialized.cpp |78.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/cms/cms_ut.cpp |78.8%| [EN] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/ymq/actor/fifo_cleanup.h_serialized.cpp |78.8%| [EN] {BAZEL_UPLOAD} $(B)/ydb/core/ymq/actor/fifo_cleanup.h_serialized.cpp |78.8%| [EN] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/kqp/provider/yql_kikimr_provider.h_serialized.cpp |78.8%| [EN] {BAZEL_UPLOAD} $(B)/ydb/core/kqp/provider/yql_kikimr_provider.h_serialized.cpp |78.8%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/blobstorage/ut_blobstorage/blob_depot_test_functions.cpp |78.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/ut_blobstorage/blob_depot_test_functions.cpp |78.8%| [EN] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/services/metadata/manager/abstract.h_serialized.cpp |78.8%| [EN] {BAZEL_UPLOAD} $(B)/ydb/services/metadata/manager/abstract.h_serialized.cpp |78.8%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/http_proxy/ut/kinesis_ut.cpp |78.8%| [EN] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/tx/columnshard/engines/column_engine_logs.h_serialized.cpp |78.8%| [EN] {BAZEL_UPLOAD} $(B)/ydb/core/tx/columnshard/engines/column_engine_logs.h_serialized.cpp |78.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/http_proxy/ut/kinesis_ut.cpp |78.8%| [CC] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/tx/datashard/operation.h_serialized.cpp |78.9%| [CC] {BAZEL_UPLOAD} $(B)/ydb/core/tx/datashard/operation.h_serialized.cpp |78.9%| [EN] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/tx/columnshard/engines/portions/portion_info.h_serialized.cpp |78.9%| [EN] {BAZEL_UPLOAD} $(B)/ydb/core/tx/columnshard/engines/portions/portion_info.h_serialized.cpp |78.9%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/blobstorage/ut_blobstorage/incorrect_queries.cpp |78.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/ut_blobstorage/incorrect_queries.cpp |78.9%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/replication/controller/assign_tx_id_ut.cpp |78.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/replication/controller/assign_tx_id_ut.cpp |78.9%| [CC] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/tx/columnshard/data_sharing/common/session/common.h_serialized.cpp |78.9%| [CC] {BAZEL_UPLOAD} $(B)/ydb/core/tx/columnshard/data_sharing/common/session/common.h_serialized.cpp |78.9%| [AR] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/tx/columnshard/data_sharing/common/session/libdata_sharing-common-session.a |78.9%| [AR] {RESULT} $(B)/ydb/core/tx/columnshard/data_sharing/common/session/libdata_sharing-common-session.a |78.9%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/tx/columnshard/data_sharing/common/session/libdata_sharing-common-session.a >> RuntimeFeatureFlags::ConversionToProto [GOOD] >> RuntimeFeatureFlags::ConversionFromProto [GOOD] >> RuntimeFeatureFlags::UpdatingRuntimeFlags [GOOD] >> RuntimeFeatureFlags::DefaultValues [GOOD] |78.9%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/core/base/generated/ut/unittest >> RuntimeFeatureFlags::DefaultValues [GOOD] |78.9%| [TS] {RESULT} ydb/core/base/generated/ut/unittest |78.9%| [CC] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/control/lib/generated/control_board_proto.cpp |78.9%| [CC] {BAZEL_UPLOAD} $(B)/ydb/core/control/lib/generated/control_board_proto.cpp |78.9%| [AR] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/control/lib/generated/libcontrol-lib-generated.a |78.9%| [AR] {RESULT} $(B)/ydb/core/control/lib/generated/libcontrol-lib-generated.a |78.9%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/control/lib/generated/libcontrol-lib-generated.a |78.9%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/blobstorage/dsproxy/ut/dsproxy_get_ut.cpp |78.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/dsproxy/ut/dsproxy_get_ut.cpp |78.9%| [CC] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/tx/datashard/datashard.h_serialized.cpp |78.9%| [CC] {BAZEL_UPLOAD} $(B)/ydb/core/tx/datashard/datashard.h_serialized.cpp |78.9%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/persqueue/ut/counters_ut.cpp |78.9%| [CC] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/tx/columnshard/export/session/session.h_serialized.cpp |78.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/persqueue/ut/counters_ut.cpp |78.9%| [CC] {BAZEL_UPLOAD} $(B)/ydb/core/tx/columnshard/export/session/session.h_serialized.cpp |78.9%| [CC] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/blob_depot/schema.h_serialized.cpp |78.9%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/base/ut_auth/ydb-core-base-ut_auth |78.9%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/control/lib/ut/ydb-core-control-lib-ut |78.9%| [CC] {BAZEL_UPLOAD} $(B)/ydb/core/blob_depot/schema.h_serialized.cpp |78.9%| [LD] {RESULT} $(B)/ydb/core/base/ut_auth/ydb-core-base-ut_auth |78.9%| [LD] {RESULT} $(B)/ydb/core/control/lib/ut/ydb-core-control-lib-ut |78.9%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/services/persqueue_v1/persqueue_ut.cpp |78.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/services/persqueue_v1/persqueue_ut.cpp |78.9%| [LD] {BAZEL_UPLOAD} $(B)/ydb/core/control/lib/ut/ydb-core-control-lib-ut |78.9%| [LD] {BAZEL_UPLOAD} $(B)/ydb/core/base/ut_auth/ydb-core-base-ut_auth |78.9%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/blobstorage/vdisk/anubis_osiris/ut/ydb-core-blobstorage-vdisk-anubis_osiris-ut |78.9%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/blobstorage/vdisk/hulldb/barriers/ut/ydb-core-blobstorage-vdisk-hulldb-barriers-ut |78.9%| [LD] {RESULT} $(B)/ydb/core/blobstorage/vdisk/anubis_osiris/ut/ydb-core-blobstorage-vdisk-anubis_osiris-ut |78.9%| [LD] {RESULT} $(B)/ydb/core/blobstorage/vdisk/hulldb/barriers/ut/ydb-core-blobstorage-vdisk-hulldb-barriers-ut |78.9%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/config/validation/ut/ydb-core-config-validation-ut |78.9%| [LD] {RESULT} $(B)/ydb/core/config/validation/ut/ydb-core-config-validation-ut |78.9%| [AR] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/tx/columnshard/export/session/libcolumnshard-export-session.a >> AuthDatabaseAdmin::FailOnEmptyOwnerAndTokenWithEmptyUserSidAndGroups [GOOD] >> AuthDatabaseAdmin::FailOnOwnerAndEmptyToken [GOOD] >> AuthDatabaseAdmin::FailOnOwnerAndNoToken [GOOD] >> AuthDatabaseAdmin::FailOnEmptyOwnerAndEmptyToken [GOOD] >> AuthDatabaseAdmin::FailOnEmptyOwnerAndNoToken [GOOD] >> AuthDatabaseAdmin::FailOnEmptyOwnerAndTokenWithEmptyUserSid [GOOD] |78.9%| [AR] {RESULT} $(B)/ydb/core/tx/columnshard/export/session/libcolumnshard-export-session.a >> AuthTokenAllowed::PassOnListMatchUserSidWithGroup [GOOD] >> AuthTokenAllowed::PassOnListMatchUserSid [GOOD] >> AuthTokenAllowed::PassOnEmptyListAndNoToken [GOOD] >> AuthTokenAllowed::PassOnEmptyListAndInvalidTokenSerialized [GOOD] |78.9%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/tx/columnshard/export/session/libcolumnshard-export-session.a >> AuthTokenAllowed::PassOnListMatchGroupSid [GOOD] >> AuthTokenAllowed::PassOnEmptyListAndTokenWithEmptyUserSidAndGroups [GOOD] >> AuthTokenAllowed::FailOnListAndTokenWithEmptyUserSidAndGroups [GOOD] >> AuthTokenAllowed::FailOnListAndNoToken [GOOD] >> AuthTokenAllowed::FailOnListAndTokenWithEmptyUserSid [GOOD] >> AuthTokenAllowed::FailOnListMatchGroupSid [GOOD] >> AuthTokenAllowed::PassOnEmptyListAndEmptyToken [GOOD] |78.9%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/blobstorage/dsproxy/ut_strategy/ydb-core-blobstorage-dsproxy-ut_strategy |78.9%| [LD] {RESULT} $(B)/ydb/core/blobstorage/dsproxy/ut_strategy/ydb-core-blobstorage-dsproxy-ut_strategy |78.9%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/base/ut_auth/unittest >> AuthDatabaseAdmin::FailOnOwnerAndNoToken [GOOD] >> AuthDatabaseAdmin::PassOnOwnerMatchUserSid [GOOD] >> AuthTokenAllowed::FailOnListAndEmptyToken [GOOD] >> AuthDatabaseAdmin::PassOnOwnerMatchUserSidWithGroup [GOOD] >> AuthTokenAllowed::PassOnEmptyListAndToken [GOOD] >> AuthTokenAllowed::PassOnEmptyListAndTokenWithEmptyUserSid [GOOD] >> AuthDatabaseAdmin::FailOnOwnerAndTokenWithEmptyUserSid [GOOD] >> AuthDatabaseAdmin::FailOnOwnerAndTokenWithEmptyUserSidAndGroups [GOOD] >> AuthDatabaseAdmin::PassOnOwnerMatchGroupSid [GOOD] |78.9%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/base/ut_auth/unittest >> AuthDatabaseAdmin::FailOnEmptyOwnerAndTokenWithEmptyUserSid [GOOD] |78.9%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/blobstorage/vdisk/hulldb/compstrat/ut/ydb-core-blobstorage-vdisk-hulldb-compstrat-ut |78.9%| [LD] {RESULT} $(B)/ydb/core/blobstorage/vdisk/hulldb/compstrat/ut/ydb-core-blobstorage-vdisk-hulldb-compstrat-ut |78.9%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/base/ut_auth/unittest >> AuthTokenAllowed::PassOnEmptyListAndTokenWithEmptyUserSidAndGroups [GOOD] |78.9%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/blobstorage/vdisk/hullop/ut/ydb-core-blobstorage-vdisk-hullop-ut |78.9%| [LD] {RESULT} $(B)/ydb/core/blobstorage/vdisk/hullop/ut/ydb-core-blobstorage-vdisk-hullop-ut |78.9%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/base/ut_auth/unittest >> AuthTokenAllowed::PassOnListMatchUserSid [GOOD] |78.9%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/base/ut/ydb-core-base-ut |78.9%| [LD] {RESULT} $(B)/ydb/core/base/ut/ydb-core-base-ut |78.9%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/base/ut_auth/unittest >> AuthTokenAllowed::PassOnEmptyListAndInvalidTokenSerialized [GOOD] |78.9%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/base/ut_auth/unittest >> AuthTokenAllowed::FailOnListAndTokenWithEmptyUserSid [GOOD] |78.9%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/mvp/oidc_proxy/ut/ydb-mvp-oidc_proxy-ut |78.9%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/blobstorage/vdisk/hulldb/generic/ut/ydb-core-blobstorage-vdisk-hulldb-generic-ut |78.9%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/base/ut_auth/unittest >> AuthTokenAllowed::PassOnEmptyListAndEmptyToken [GOOD] |78.9%| [LD] {RESULT} $(B)/ydb/mvp/oidc_proxy/ut/ydb-mvp-oidc_proxy-ut |78.9%| [LD] {RESULT} $(B)/ydb/core/blobstorage/vdisk/hulldb/generic/ut/ydb-core-blobstorage-vdisk-hulldb-generic-ut |78.9%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/kqp/ut/scheme/kqp_scheme_fulltext_ut.cpp |78.9%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/base/ut_auth/unittest >> AuthDatabaseAdmin::PassOnOwnerMatchUserSidWithGroup [GOOD] |79.0%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/blobstorage/vdisk/query/ut/ydb-core-blobstorage-vdisk-query-ut |79.0%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/base/ut_auth/unittest >> AuthDatabaseAdmin::PassOnOwnerMatchGroupSid [GOOD] |79.0%| [LD] {RESULT} $(B)/ydb/core/blobstorage/vdisk/query/ut/ydb-core-blobstorage-vdisk-query-ut |79.0%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/base/ut_auth/unittest >> AuthTokenAllowed::PassOnEmptyListAndTokenWithEmptyUserSid [GOOD] |79.0%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/tablet_flat/ut_pg/ydb-core-tablet_flat-ut_pg |79.0%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tablet_flat/ut_pg/ydb-core-tablet_flat-ut_pg |79.0%| [LD] {RESULT} $(B)/ydb/core/tablet_flat/ut_pg/ydb-core-tablet_flat-ut_pg |79.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/ut/scheme/kqp_scheme_fulltext_ut.cpp >> ControlImplementationTests::TestControlWrapperAsI64 >> ControlImplementationTests::TestControlWrapperAsI64 [GOOD] |79.0%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/blobstorage/vdisk/ingress/ut/ydb-core-blobstorage-vdisk-ingress-ut |79.0%| [LD] {RESULT} $(B)/ydb/core/blobstorage/vdisk/ingress/ut/ydb-core-blobstorage-vdisk-ingress-ut >> ControlImplementationTests::TestRegisterSharedControl [GOOD] >> TGuardianImpl::FollowerTracker [GOOD] >> TBlobStorageGroupTypeTest::TestCorrectLayout [GOOD] >> TGuardianImpl::FollowerTrackerDuplicates [GOOD] >> TLocalDbTest::BackupTaskNameChangedAtLoadTime [GOOD] >> TBlobStorageGroupTypeTest::OutputInfoAboutErasureSpecies [GOOD] |79.0%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/vdisk/hulldb/barriers/ut/unittest |79.0%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/control/lib/ut/unittest >> ControlImplementationTests::TestControlWrapperAsI64 [GOOD] >> TStateStorageConfig::TestReplicaSelectionUniqueCombinations |79.0%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/vdisk/anubis_osiris/ut/unittest |79.0%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/dsproxy/ut_strategy/unittest |79.0%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/dsproxy/ut_strategy/unittest |79.0%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/control/lib/ut/unittest |79.0%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/control/lib/ut/unittest >> ControlImplementationTests::TestRegisterSharedControl [GOOD] |79.0%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/base/ut/unittest >> TBlobStorageGroupTypeTest::OutputInfoAboutErasureSpecies [GOOD] |79.0%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/vdisk/hulldb/compstrat/ut/unittest |79.0%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/blobstorage/vdisk/hulldb/fresh/ut/ydb-core-blobstorage-vdisk-hulldb-fresh-ut |79.0%| [LD] {RESULT} $(B)/ydb/core/blobstorage/vdisk/hulldb/fresh/ut/ydb-core-blobstorage-vdisk-hulldb-fresh-ut |79.0%| [LD] {BAZEL_UPLOAD} $(B)/ydb/core/blobstorage/vdisk/hulldb/barriers/ut/ydb-core-blobstorage-vdisk-hulldb-barriers-ut |79.0%| [AR] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/blob_depot/libydb-core-blob_depot.a |79.0%| [AR] {RESULT} $(B)/ydb/core/blob_depot/libydb-core-blob_depot.a |79.0%| [TA] $(B)/ydb/core/base/ut_auth/test-results/unittest/{meta.json ... results_accumulator.log} >> Path::Name_RussianAlphabet_SetLocale_C_UTF8 [GOOD] >> Path::Name_RussianAlphabet_SetLocale_C [GOOD] >> Path::Name_WeirdLocale_RegularName [GOOD] >> Path::Name_RussianAlphabet [GOOD] >> Path::Name_WeirdLocale_WeirdName [GOOD] >> NFulltext::ValidateColumnsMatches [GOOD] >> NFulltext::ValidateSettings [GOOD] >> NFulltext::FillSetting [GOOD] >> NFulltext::FillSettingInvalid [GOOD] >> NFulltext::Analyze [GOOD] >> Path::CanonizeFast [GOOD] >> TBlobStorageHullWriteSst::LogoBlobMultiSstOneIndex [GOOD] >> TBlobStorageHullWriteSst::LogoBlobMultiSstMultiIndex [GOOD] >> TBlobStorageHullSstIt::TestSstIndexSeekAndIterate [GOOD] >> TBlobStorageHullWriteSst::BlockMultiSstOneIndex >> TBlobStorageIngressMatrix::VectorTest [GOOD] >> TBlobStorageIngressMatrix::VectorTestBitsBefore1 [GOOD] >> TBlobStorageIngressMatrix::ShiftedMainBitVec [GOOD] >> TBlobStorageHullWriteSst::BlockMultiSstOneIndex [GOOD] |79.0%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/base/ut/unittest >> Path::Name_WeirdLocale_WeirdName [GOOD] |79.0%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/sys_view/service/ut/ydb-core-sys_view-service-ut >> TBlobStorageIngress::IngressPartsWeMustHaveLocally [GOOD] >> TBlobStorageIngress::IngressLocalParts [GOOD] >> TBlobStorageIngress::IngressPrintDistribution [GOOD] >> TBlobStorageHullOrderedSstsIt::TestSeekToFirst [GOOD] >> TBlobStorageHullOrderedSstsIt::TestSeekToLast [GOOD] >> TBlobStorageHullOrderedSstsIt::TestSeekAfterAndPrev [GOOD] |79.0%| [LD] {RESULT} $(B)/ydb/core/sys_view/service/ut/ydb-core-sys_view-service-ut |79.0%| [TA] {RESULT} $(B)/ydb/core/base/ut_auth/test-results/unittest/{meta.json ... results_accumulator.log} |79.0%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/vdisk/query/ut/unittest >> TBlobStorageIngressMatrix::VectorTestEmpty [GOOD] >> TBlobStorageIngressMatrix::VectorTestBitwiseComplement2 [GOOD] |79.0%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/vdisk/ingress/ut/unittest >> TBlobStorageIngressMatrix::ShiftedMainBitVec [GOOD] |79.0%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/base/ut/unittest >> Path::CanonizeFast [GOOD] |79.0%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/vdisk/hulldb/generic/ut/unittest >> TBlobStorageHullWriteSst::LogoBlobMultiSstMultiIndex [GOOD] |79.0%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/library/persqueue/topic_parser/ut/ydb-library-persqueue-topic_parser-ut |79.0%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/vdisk/hulldb/generic/ut/unittest >> TBlobStorageHullWriteSst::BlockMultiSstOneIndex [GOOD] |79.0%| [LD] {RESULT} $(B)/ydb/library/persqueue/topic_parser/ut/ydb-library-persqueue-topic_parser-ut |79.0%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/services/metadata/initializer/ut/ut_init.cpp |79.0%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/vdisk/hulldb/generic/ut/unittest >> TBlobStorageHullOrderedSstsIt::TestSeekAfterAndPrev [GOOD] >> TBlobStorageIngressMatrix::VectorTestIterator1 [GOOD] |79.0%| [CC] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/ymq/actor/events.h_serialized.cpp |79.0%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/vdisk/ingress/ut/unittest >> TBlobStorageIngress::IngressPrintDistribution [GOOD] |79.0%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/vdisk/ingress/ut/unittest >> TBlobStorageIngressMatrix::VectorTestBitwiseComplement2 [GOOD] >> TBlobStorageIngressMatrix::VectorTestIterator2 [GOOD] |79.0%| [CC] {BAZEL_UPLOAD} $(B)/ydb/core/ymq/actor/events.h_serialized.cpp |79.0%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/blobstorage/groupinfo/ut/ydb-core-blobstorage-groupinfo-ut |79.0%| [LD] {RESULT} $(B)/ydb/core/blobstorage/groupinfo/ut/ydb-core-blobstorage-groupinfo-ut |79.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/services/metadata/initializer/ut/ut_init.cpp |79.0%| [CC] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/tx/columnshard/engines/reader/common_reader/common/columns_set.h_serialized.cpp |79.0%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/cms/console/validators/ut/ydb-core-cms-console-validators-ut |79.0%| [LD] {RESULT} $(B)/ydb/core/cms/console/validators/ut/ydb-core-cms-console-validators-ut >> TBlobStorageIngress::IngressHandoffPartsDelete [GOOD] >> TBlobStorageIngress::IngressGetMainReplica [GOOD] >> TBlobStorageIngress::IngressCreateFromRepl [GOOD] |79.0%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/kqp/ut/opt/kqp_sqlin_ut.cpp |79.0%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/vdisk/ingress/ut/unittest >> TBlobStorageIngressMatrix::VectorTestIterator2 [GOOD] |79.0%| [CC] {BAZEL_UPLOAD} $(B)/ydb/core/tx/columnshard/engines/reader/common_reader/common/columns_set.h_serialized.cpp |79.0%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/fq/libs/common/cache_ut.cpp >> TBlobStorageIngressMatrix::VectorTestBitwiseAnd [GOOD] >> TBlobStorageIngressMatrix::VectorTestBitwiseComplement1 [GOOD] >> TBlobStorageIngressMatrix::VectorTestBitsBefore2 [GOOD] |79.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/ut/opt/kqp_sqlin_ut.cpp |79.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/fq/libs/common/cache_ut.cpp >> TBlobStorageIngress::BarrierIngressQuorumBasicMirror3_4_2 [GOOD] >> TBlobStorageIngress::BarrierIngressQuorumBasic4Plus2_8_1 [GOOD] >> TBlobStorageIngress::BarrierIngressQuorumMirror3 [GOOD] >> TBlobStorageIngressMatrix::VectorTestMinus [GOOD] >> TBlobStorageIngressMatrix::VectorTestIterator3 [GOOD] |79.0%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/vdisk/ingress/ut/unittest >> TBlobStorageIngress::IngressCreateFromRepl [GOOD] |79.0%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/tx/sequenceshard/public/ut/ydb-core-tx-sequenceshard-public-ut |79.0%| [LD] {RESULT} $(B)/ydb/core/tx/sequenceshard/public/ut/ydb-core-tx-sequenceshard-public-ut |79.0%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/util/ut/ydb-core-util-ut >> TStateStorageConfig::TestReplicaSelectionUniqueCombinations [GOOD] >> TStateStorageConfig::UniformityTest >> TBlobStorageHullFreshSegment::PerfAppendix >> SysViewQueryHistory::AggrMerge [GOOD] >> TBlobStorageIngressMatrix::MatrixTest [GOOD] >> TBlobStorageIngressMatrix::ShiftedBitVecBase [GOOD] >> TBlobStorageIngressMatrix::ShiftedHandoffBitVec [GOOD] |79.1%| [LD] {RESULT} $(B)/ydb/core/util/ut/ydb-core-util-ut |79.1%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/vdisk/ingress/ut/unittest >> TBlobStorageIngressMatrix::VectorTestBitsBefore2 [GOOD] >> SysViewQueryHistory::ServiceQueryHistoryAdd [GOOD] >> SysViewQueryHistory::TopReadBytesAdd [GOOD] >> SysViewQueryHistory::StableMerge [GOOD] >> TBlobStorageHullFresh::SimpleBackwardEnd [GOOD] >> TBlobStorageHullFresh::SimpleBackWardMiddle2Times [GOOD] |79.1%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/vdisk/ingress/ut/unittest >> TBlobStorageIngress::BarrierIngressQuorumMirror3 [GOOD] |79.1%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/vdisk/ingress/ut/unittest >> TBlobStorageIngressMatrix::VectorTestIterator3 [GOOD] |79.1%| [LD] {BAZEL_UPLOAD} $(B)/ydb/core/blobstorage/vdisk/anubis_osiris/ut/ydb-core-blobstorage-vdisk-anubis_osiris-ut |79.1%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/blobstorage/backpressure/ut/ydb-core-blobstorage-backpressure-ut |79.1%| [LD] {RESULT} $(B)/ydb/core/blobstorage/backpressure/ut/ydb-core-blobstorage-backpressure-ut |79.1%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/vdisk/ingress/ut/unittest >> TBlobStorageIngressMatrix::ShiftedHandoffBitVec [GOOD] >> DiscoveryConverterTest::DiscoveryConverter [GOOD] >> DiscoveryConverterTest::EmptyModern [GOOD] >> TopicNameConverterTest::LegacyStyle [GOOD] >> TopicNameConverterTest::FirstClass [GOOD] |79.1%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/sys_view/service/ut/unittest >> SysViewQueryHistory::AggrMerge [GOOD] |79.1%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/sys_view/service/ut/unittest >> SysViewQueryHistory::ServiceQueryHistoryAdd [GOOD] |79.1%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/blobstorage/vdisk/common/ut/ydb-core-blobstorage-vdisk-common-ut >> DiscoveryConverterTest::AccountDatabase [GOOD] >> DiscoveryConverterTest::CmWay [GOOD] |79.1%| [LD] {RESULT} $(B)/ydb/core/blobstorage/vdisk/common/ut/ydb-core-blobstorage-vdisk-common-ut |79.1%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/sys_view/service/ut/unittest >> SysViewQueryHistory::TopReadBytesAdd [GOOD] |79.1%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/vdisk/hulldb/fresh/ut/unittest >> TBlobStorageHullFresh::SimpleBackWardMiddle2Times [GOOD] |79.1%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/sys_view/service/ut/unittest >> SysViewQueryHistory::StableMerge [GOOD] >> DiscoveryConverterTest::MinimalName [GOOD] >> DiscoveryConverterTest::WithLogbrokerPath [GOOD] |79.1%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/library/persqueue/topic_parser/ut/unittest >> DiscoveryConverterTest::EmptyModern [GOOD] |79.1%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/library/persqueue/topic_parser/ut/unittest >> DiscoveryConverterTest::CmWay [GOOD] >> TopicNameConverterTest::Paths [GOOD] >> TopicNameConverterTest::PathFromDiscoveryConverter [GOOD] |79.1%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/library/persqueue/topic_parser/ut/unittest >> TopicNameConverterTest::FirstClass [GOOD] |79.1%| [CC] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/tx/datashard/datashard_active_transaction.h_serialized.cpp |79.1%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/testlib/actors/ut/ydb-core-testlib-actors-ut |79.1%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/metering/ut/ydb-core-metering-ut |79.1%| [LD] {RESULT} $(B)/ydb/core/testlib/actors/ut/ydb-core-testlib-actors-ut |79.1%| [LD] {RESULT} $(B)/ydb/core/metering/ut/ydb-core-metering-ut |79.1%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/library/persqueue/topic_parser/ut/unittest >> DiscoveryConverterTest::WithLogbrokerPath [GOOD] |79.1%| [CC] {BAZEL_UPLOAD} $(B)/ydb/core/tx/datashard/datashard_active_transaction.h_serialized.cpp |79.1%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/library/persqueue/topic_parser/ut/unittest >> TopicNameConverterTest::PathFromDiscoveryConverter [GOOD] >> TBlobStorageGroupInfoIterTest::PerRealmIterator [GOOD] >> TBlobStorageGroupInfoIterTest::WalkFailRealms [GOOD] >> DiscoveryConverterTest::FullLegacyPath [GOOD] >> DiscoveryConverterTest::FullLegacyNamesWithRootDatabase [GOOD] >> BootstrapTabletsValidatorTests::TestNoNodeForTablet [GOOD] >> BootstrapTabletsValidatorTests::TestRequiredTablet [GOOD] >> BootstrapTabletsValidatorTests::TestImportantTablet [GOOD] >> BootstrapTabletsValidatorTests::TestCompactionBroker [GOOD] >> TRegistryTests::TestLock [GOOD] >> TRegistryTests::TestClasses [GOOD] >> TRegistryTests::TestDisableEnable [GOOD] >> TPriorityOperationQueueTest::ShouldUpdatePriorityReadyQueue [GOOD] >> TPriorityOperationQueueTest::ShouldUpdatePriorityWaitingQueue [GOOD] >> TPriorityOperationQueueTest::UpdateNonExistingShouldReturnFalse [GOOD] >> TPriorityQueueTest::TestOrder [GOOD] >> TQueueInplaceTests::TestSimpleInplace [GOOD] >> TQueueInplaceTests::DestroyInDestructor [GOOD] >> TQueueInplaceTests::EmplacePopDefault [GOOD] >> BootstrapTabletsValidatorTests::TestUnknownNodeForTablet [GOOD] >> TQueueInplaceTests::PopTooManyTimes [GOOD] >> TLockFreeIntrusiveStackTest::ConcurrentRefCountNeverEmpty >> NameserviceConfigValidatorTests::TestAddNewNode [GOOD] >> TQueueInplaceTests::MoveConstructor [GOOD] |79.1%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/pgproxy/ut/ydb-core-pgproxy-ut |79.1%| [LD] {RESULT} $(B)/ydb/core/pgproxy/ut/ydb-core-pgproxy-ut |79.1%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/library/persqueue/topic_parser/ut/unittest >> DiscoveryConverterTest::FullLegacyNamesWithRootDatabase [GOOD] >> NameserviceConfigValidatorTests::TestDuplicatingHostPort [GOOD] >> TQueueInplaceTests::MoveAssignment [GOOD] >> NameserviceConfigValidatorTests::TestDuplicatingAddrPort [GOOD] >> ResourceBrokerConfigValidatorTests::TestMinConfig [GOOD] >> ResourceBrokerConfigValidatorTests::TestRepeatedQueueName [GOOD] >> TSimpleCacheTest::TestSimpleCache [GOOD] >> ResourceBrokerConfigValidatorTests::TestNoDefaultQueue [GOOD] >> ResourceBrokerConfigValidatorTests::TestNoUnknownTask [GOOD] >> TSimpleCacheTest::TestNotSoSimpleCache [GOOD] >> TStrongTypeTest::DefaultConstructorDeleted [GOOD] >> TStrongTypeTest::DefaultConstructorValue [GOOD] >> TTokenBucketTest::Unlimited [GOOD] >> TTokenBucketTest::Limited [GOOD] >> TCircularOperationQueueTest::ShouldTolerateInaccurateTimer [GOOD] >> TCircularOperationQueueTest::UseMinOperationRepeatDelayWhenTimeout [GOOD] >> TTokenBucketTest::DelayCalculation [GOOD] >> TCircularQueueTest::ShouldNextSingleItem [GOOD] >> TCircularOperationQueueTest::ShouldStartInflightEnqueue100 [GOOD] >> TCircularQueueTest::Empty [GOOD] >> TCircularOperationQueueTest::ShouldStartInflightEnqueue2 [GOOD] >> TCircularOperationQueueTest::ShouldStartInflight100 [GOOD] >> TULID::ParseAndFormat [GOOD] >> TCircularQueueTest::ShouldNotPushTwice [GOOD] >> TCircularOperationQueueTest::ShouldStartInflight10 [GOOD] >> TCircularQueueTest::ShouldPush [GOOD] >> TCircularOperationQueueTest::ShouldStartInflight1 [GOOD] >> TULID::HeadByteOrder [GOOD] >> TCircularOperationQueueTest::ShouldScheduleWakeupWhenHasWaitingAndStart [GOOD] >> TCircularQueueTest::ShouldGetQueue [GOOD] >> TCircularOperationQueueTest::ShouldStartInflightEnqueue10 [GOOD] >> TULID::TailByteOrder [GOOD] >> TCircularOperationQueueTest::ShouldTryToStartAnotherOneWhenStartFails [GOOD] >> TCircularOperationQueueTest::ShouldScheduleWakeupWhenNothingStarted [GOOD] >> TCircularQueueTest::ShouldNotRemoveMissing [GOOD] >> TCircularOperationQueueTest::ShouldStartInflightEnqueue3 [GOOD] >> TCircularOperationQueueTest::ShouldStartInflightEnqueue1 [GOOD] >> TCircularQueueTest::ShouldRemove [GOOD] >> TULID::EveryBitOrder [GOOD] >> TCircularQueueTest::ShouldNextMulti [GOOD] >> TCircularOperationQueueTest::ShouldStartEmpty [GOOD] >> TULID::Generate [GOOD] >> TCircularOperationQueueTest::ShouldStartInflight3 [GOOD] >> TCircularOperationQueueTest::ShouldStartInflight2 [GOOD] >> TWildcardTest::TestWildcard [GOOD] >> TCircularOperationQueueTest::ShouldShuffle [GOOD] >> TWildcardTest::TestWildcards [GOOD] |79.1%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/groupinfo/ut/unittest >> TBlobStorageGroupInfoIterTest::WalkFailRealms [GOOD] |79.1%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/cms/console/validators/ut/unittest >> BootstrapTabletsValidatorTests::TestCompactionBroker [GOOD] >> TRegistryTests::TestAddGet [GOOD] >> TRegistryTests::TestCheckConfig [GOOD] >> ResourceBrokerConfigValidatorTests::TestZeroQueueWeight [GOOD] >> ResourceBrokerConfigValidatorTests::TestZeroDefaultDuration [GOOD] |79.1%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/util/ut/unittest >> TWildcardTest::TestWildcards [GOOD] |79.1%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/cms/console/validators/ut/unittest >> TRegistryTests::TestDisableEnable [GOOD] |79.1%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/backpressure/ut/unittest >> TQueueBackpressureTest::PerfInFlight |79.1%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/cms/console/validators/ut/unittest >> ResourceBrokerConfigValidatorTests::TestNoUnknownTask [GOOD] |79.1%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/cms/console/validators/ut/unittest >> NameserviceConfigValidatorTests::TestDuplicatingAddrPort [GOOD] |79.1%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/fq/libs/db_id_async_resolver_impl/ut/ydb-core-fq-libs-db_id_async_resolver_impl-ut >> TVDiskConfigTest::RtmrProblem1 [GOOD] >> TVDiskConfigTest::RtmrProblem2 [GOOD] >> TVDiskConfigTest::ThreeLevels [GOOD] >> TBlobStorageSyncNeighborsTest::IterateOverAllDisks [GOOD] |79.1%| [LD] {RESULT} $(B)/ydb/core/fq/libs/db_id_async_resolver_impl/ut/ydb-core-fq-libs-db_id_async_resolver_impl-ut |79.1%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/backpressure/ut/unittest >> TBlobStorageSyncNeighborsTest::SerDes [GOOD] >> TBlobStorageSyncNeighborsTest::CheckVDiskIterators [GOOD] >> TCircleBufStringStreamTest::TestAligned [GOOD] >> TBlobStorageHullFreshSegment::PerfAppendix [GOOD] >> TBlobStorageHullFreshSegment::PerfSkipList |79.1%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/util/ut/unittest >> TCircularOperationQueueTest::ShouldShuffle [GOOD] |79.1%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/kqp/ut/indexes/kqp_indexes_multishard_ut.cpp |79.1%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/cms/console/validators/ut/unittest >> ResourceBrokerConfigValidatorTests::TestZeroDefaultDuration [GOOD] >> TStateStorageConfig::UniformityTest [GOOD] >> TStateStorageConfigCompareWithOld::TestReplicaActorIdAndSelectionIsSame1 |79.1%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/vdisk/common/ut/unittest >> TCircleBufStringStreamTest::TestAligned [GOOD] |79.1%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/vdisk/common/ut/unittest >> TVDiskConfigTest::ThreeLevels [GOOD] >> TQueueBackpressureTest::PerfInFlight [GOOD] >> TTrackable::TVector [GOOD] >> TTrackable::TList [GOOD] >> TTrackable::TString [GOOD] >> TStateStorageConfigCompareWithOld::TestReplicaActorIdAndSelectionIsSame1 [GOOD] >> TStateStorageConfigCompareWithOld::TestReplicaActorIdAndSelectionIsSame2 >> TCircleBufTest::SimpleTest [GOOD] >> TCircleBufTest::PtrTest [GOOD] >> TLsnAllocTrackerTests::Test1 [GOOD] >> TLsnMngrTests::AllocLsnForLocalUse >> TPDiskErrorStateTests::Basic [GOOD] >> TPDiskErrorStateTests::Basic2 [GOOD] >> TPDiskErrorStateTests::BasicErrorReason [GOOD] >> TActorTest::TestCreateChildActor [GOOD] |79.1%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/backpressure/ut/unittest >> TQueueBackpressureTest::PerfInFlight [GOOD] >> TActorTest::TestBlockEvents >> TBlobStorageHullFreshSegment::PerfSkipList [GOOD] >> TActorTest::TestBlockEvents [GOOD] >> TCircleBufStringStreamTest::TestNotAligned [GOOD] >> TCircleBufStringStreamTest::TestOverflow [GOOD] >> TCircleBufTest::EmptyTest [GOOD] >> TCircleBufTest::OverflowTest [GOOD] >> TLsnMngrTests::AllocLsnForLocalUse2Threads |79.1%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/vdisk/common/ut/unittest >> TTrackable::TString [GOOD] |79.1%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/backup/common/ut/ydb-core-backup-common-ut |79.1%| [LD] {RESULT} $(B)/ydb/core/backup/common/ut/ydb-core-backup-common-ut >> NameserviceConfigValidatorTests::TestModifyIdForHostPort [GOOD] >> NameserviceConfigValidatorTests::TestModifyIdForResolveHostPort [GOOD] >> NameserviceConfigValidatorTests::TestModifyResolveHost [GOOD] >> NameserviceConfigValidatorTests::TestModifyPort [GOOD] >> NameserviceConfigValidatorTests::TestEmptyConfig [GOOD] >> NameserviceConfigValidatorTests::TestDuplicatingId [GOOD] >> NameserviceConfigValidatorTests::TestDuplicatingResolveHostPort [GOOD] >> NameserviceConfigValidatorTests::TestEmptyAddresses [GOOD] >> TActorTest::TestScheduleEvent |79.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/ut/indexes/kqp_indexes_multishard_ut.cpp >> TActorTest::TestScheduleEvent [GOOD] >> TActorTest::TestScheduleReaction [GOOD] >> TActorTest::TestWaitForFirstEvent |79.1%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/vdisk/common/ut/unittest >> TPDiskErrorStateTests::BasicErrorReason [GOOD] |79.1%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/tablet_flat/ut_util/ydb-core-tablet_flat-ut_util ------- [TS] {default-linux-x86_64, relwithdebinfo} ydb/core/testlib/actors/ut/unittest >> TActorTest::TestBlockEvents [GOOD] Test command err: ... waiting for blocked 3 events ... blocking NKikimr::NTestSuiteTActorTest::TTestCaseTestBlockEvents::Execute_(NUnitTest::TTestContext&)::TEvTrigger from NKikimr::NTestSuiteTActorTest::TTestCaseTestBlockEvents::Execute_(NUnitTest::TTestContext&)::TSourceActor to NKikimr::NTestSuiteTActorTest::TTestCaseTestBlockEvents::Execute_(NUnitTest::TTestContext&)::TTargetActor cookie 0 ... blocking NKikimr::NTestSuiteTActorTest::TTestCaseTestBlockEvents::Execute_(NUnitTest::TTestContext&)::TEvTrigger from NKikimr::NTestSuiteTActorTest::TTestCaseTestBlockEvents::Execute_(NUnitTest::TTestContext&)::TSourceActor to NKikimr::NTestSuiteTActorTest::TTestCaseTestBlockEvents::Execute_(NUnitTest::TTestContext&)::TTargetActor cookie 0 ... blocking NKikimr::NTestSuiteTActorTest::TTestCaseTestBlockEvents::Execute_(NUnitTest::TTestContext&)::TEvTrigger from NKikimr::NTestSuiteTActorTest::TTestCaseTestBlockEvents::Execute_(NUnitTest::TTestContext&)::TSourceActor to NKikimr::NTestSuiteTActorTest::TTestCaseTestBlockEvents::Execute_(NUnitTest::TTestContext&)::TTargetActor cookie 0 ... waiting for blocked 3 events (done) ... unblocking NKikimr::NTestSuiteTActorTest::TTestCaseTestBlockEvents::Execute_(NUnitTest::TTestContext&)::TEvTrigger from NKikimr::NTestSuiteTActorTest::TTestCaseTestBlockEvents::Execute_(NUnitTest::TTestContext&)::TSourceActor to NKikimr::NTestSuiteTActorTest::TTestCaseTestBlockEvents::Execute_(NUnitTest::TTestContext&)::TTargetActor ... unblocking NKikimr::NTestSuiteTActorTest::TTestCaseTestBlockEvents::Execute_(NUnitTest::TTestContext&)::TEvTrigger from NKikimr::NTestSuiteTActorTest::TTestCaseTestBlockEvents::Execute_(NUnitTest::TTestContext&)::TSourceActor to NKikimr::NTestSuiteTActorTest::TTestCaseTestBlockEvents::Execute_(NUnitTest::TTestContext&)::TTargetActor ... waiting for blocked 1 more event ... blocking NKikimr::NTestSuiteTActorTest::TTestCaseTestBlockEvents::Execute_(NUnitTest::TTestContext&)::TEvTrigger from NKikimr::NTestSuiteTActorTest::TTestCaseTestBlockEvents::Execute_(NUnitTest::TTestContext&)::TSourceActor to NKikimr::NTestSuiteTActorTest::TTestCaseTestBlockEvents::Execute_(NUnitTest::TTestContext&)::TTargetActor cookie 0 ... waiting for blocked 1 more event (done) ... waiting for processed 2 more events ... waiting for processed 2 more events (done) ... unblocking NKikimr::NTestSuiteTActorTest::TTestCaseTestBlockEvents::Execute_(NUnitTest::TTestContext&)::TEvTrigger from NKikimr::NTestSuiteTActorTest::TTestCaseTestBlockEvents::Execute_(NUnitTest::TTestContext&)::TSourceActor to NKikimr::NTestSuiteTActorTest::TTestCaseTestBlockEvents::Execute_(NUnitTest::TTestContext&)::TTargetActor ... unblocking NKikimr::NTestSuiteTActorTest::TTestCaseTestBlockEvents::Execute_(NUnitTest::TTestContext&)::TEvTrigger from NKikimr::NTestSuiteTActorTest::TTestCaseTestBlockEvents::Execute_(NUnitTest::TTestContext&)::TSourceActor to NKikimr::NTestSuiteTActorTest::TTestCaseTestBlockEvents::Execute_(NUnitTest::TTestContext&)::TTargetActor ... waiting for processed 3 more events ... waiting for processed 3 more events (done) >> TActorTest::TestSendFromAnotherThread >> TActorTest::TestWaitForFirstEvent [GOOD] >> TActorTest::TestStateSwitch [GOOD] |79.1%| [LD] {RESULT} $(B)/ydb/core/tablet_flat/ut_util/ydb-core-tablet_flat-ut_util |79.1%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/vdisk/hulldb/fresh/ut/unittest >> TBlobStorageHullFreshSegment::PerfSkipList [GOOD] >> TStateStorageConfigCompareWithOld::TestReplicaActorIdAndSelectionIsSame2 [GOOD] >> TStateStorageConfigCompareWithOld::TestReplicaActorIdAndSelectionIsSame3 |79.1%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/vdisk/common/ut/unittest >> TCircleBufTest::OverflowTest [GOOD] |79.1%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/cms/console/validators/ut/unittest >> NameserviceConfigValidatorTests::TestEmptyAddresses [GOOD] |79.1%| [CC] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/tx/columnshard/normalizer/abstract/abstract.h_serialized.cpp |79.2%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/core/testlib/actors/ut/unittest >> TActorTest::TestScheduleReaction [GOOD] >> TActorTest::TestHandleEvent [GOOD] >> TActorTest::TestGetCtxTime [GOOD] |79.2%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/cms/console/validators/ut/unittest >> NameserviceConfigValidatorTests::TestModifyPort [GOOD] |79.2%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/blobstorage/vdisk/hulldb/base/ut/ydb-core-blobstorage-vdisk-hulldb-base-ut |79.2%| [LD] {RESULT} $(B)/ydb/core/blobstorage/vdisk/hulldb/base/ut/ydb-core-blobstorage-vdisk-hulldb-base-ut |79.2%| [CC] {BAZEL_UPLOAD} $(B)/ydb/core/tx/columnshard/normalizer/abstract/abstract.h_serialized.cpp >> TActorTest::TestWaitFuture |79.2%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/core/testlib/actors/ut/unittest >> TActorTest::TestStateSwitch [GOOD] >> TLsnMngrTests::AllocLsnForLocalUse [GOOD] >> TActorTest::TestWaitFuture [GOOD] ------- [TS] {default-linux-x86_64, relwithdebinfo} ydb/core/testlib/actors/ut/unittest >> TActorTest::TestWaitForFirstEvent [GOOD] Test command err: ... waiting for NKikimr::NTestSuiteTActorTest::TTestCaseTestWaitForFirstEvent::Execute_(NUnitTest::TTestContext&)::TEvTrigger ... waiting for NKikimr::NTestSuiteTActorTest::TTestCaseTestWaitForFirstEvent::Execute_(NUnitTest::TTestContext&)::TEvTrigger (done) ... waiting for NKikimr::NTestSuiteTActorTest::TTestCaseTestWaitForFirstEvent::Execute_(NUnitTest::TTestContext&)::TEvTrigger ... waiting for NKikimr::NTestSuiteTActorTest::TTestCaseTestWaitForFirstEvent::Execute_(NUnitTest::TTestContext&)::TEvTrigger (done) |79.2%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/blob_depot/libydb-core-blob_depot.a |79.2%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/core/testlib/actors/ut/unittest >> TActorTest::TestGetCtxTime [GOOD] >> TActorTest::TestDie [GOOD] >> TActorTest::TestFilteredGrab >> TActorTest::TestSendEvent [GOOD] >> TActorTest::TestSendAfterDelay |79.2%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/kqp/gateway/ut/metadata_conversion.cpp |79.2%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/vdisk/common/ut/unittest >> TLsnMngrTests::AllocLsnForLocalUse [GOOD] >> TActorTest::TestSendAfterDelay [GOOD] |79.2%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/kqp/ut/federated_query/generic_ut/kqp_generic_provider_ut.cpp >> TActorTest::TestFilteredGrab [GOOD] >> TStateStorageConfigCompareWithOld::TestReplicaActorIdAndSelectionIsSame3 [GOOD] |79.2%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/core/testlib/actors/ut/unittest >> TActorTest::TestWaitFuture [GOOD] |79.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/gateway/ut/metadata_conversion.cpp >> TLsnMngrTests::AllocLsnForLocalUse2Threads [GOOD] >> TLsnMngrTests::AllocLsnForLocalUse10Threads >> TVDiskConfigTest::JustConfig [GOOD] >> TVDiskConfigTest::Basic [GOOD] >> TVDiskConfigTest::NoMoneyNoHoney [GOOD] >> TActorTest::TestSendFromAnotherThread [GOOD] >> TActorTest::TestWaitFor |79.2%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/external_sources/ut/ydb-core-external_sources-ut |79.2%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/core/testlib/actors/ut/unittest >> TActorTest::TestSendAfterDelay [GOOD] >> TResizableCircleBufTest::Test1 [GOOD] >> TResizableCircleBufTest::Test2 [GOOD] >> TTrackable::TBuffer [GOOD] >> TQueueBackpressureTest::CreateDelete [GOOD] >> TActorTest::TestWaitFor [GOOD] >> TLogoBlobIdHashTest::SimpleTestWithDifferentTabletId [GOOD] >> TLogoBlobTest::LogoBlobParse [GOOD] >> TLogoBlobTest::LogoBlobCompare [GOOD] >> TLogoBlobTest::LogoBlobSort [GOOD] >> TMemoryStatsAggregator::Aggregate_Empty [GOOD] |79.2%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/core/testlib/actors/ut/unittest >> TActorTest::TestFilteredGrab [GOOD] |79.2%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/external_sources/ut/ydb-core-external_sources-ut |79.2%| [LD] {RESULT} $(B)/ydb/core/external_sources/ut/ydb-core-external_sources-ut >> ReadBatcher::Range >> TBlobStorageSyncNeighborsTest::CheckRevLookup [GOOD] >> TBlobStorageSyncNeighborsTest::CheckIsMyDomain [GOOD] >> TBlobStorageSyncNeighborsTest::CheckFailDomainsIterators [GOOD] >> TBlobStorageSyncNeighborsTest::CheckVDiskDistance [GOOD] |79.2%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/base/ut/unittest >> TStateStorageConfigCompareWithOld::TestReplicaActorIdAndSelectionIsSame3 [GOOD] >> TQueueBackpressureTest::IncorrectMessageId [GOOD] |79.2%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/vdisk/common/ut/unittest >> TVDiskConfigTest::NoMoneyNoHoney [GOOD] |79.2%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/blob_depot/ut/ydb-core-blob_depot-ut >> NameserviceConfigValidatorTests::TestRemoveTooMany [GOOD] >> ResourceBrokerConfigValidatorTests::TestEmptyConfig [GOOD] >> ResourceBrokerConfigValidatorTests::TestEmptyQueueName [GOOD] >> ResourceBrokerConfigValidatorTests::TestEmptyTaskName [GOOD] |79.2%| [CC] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/tx/columnshard/columnshard.h_serialized.cpp |79.2%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/vdisk/common/ut/unittest >> TTrackable::TBuffer [GOOD] |79.2%| [LD] {RESULT} $(B)/ydb/core/blob_depot/ut/ydb-core-blob_depot-ut |79.2%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/backpressure/ut/unittest >> TQueueBackpressureTest::CreateDelete [GOOD] |79.2%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/backpressure/ut/unittest |79.2%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/core/testlib/actors/ut/unittest >> TActorTest::TestSendFromAnotherThread [GOOD] |79.2%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/base/ut/unittest >> TMemoryStatsAggregator::Aggregate_Empty [GOOD] |79.2%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/vdisk/common/ut/unittest >> TBlobStorageSyncNeighborsTest::CheckVDiskDistance [GOOD] ------- [TS] {default-linux-x86_64, relwithdebinfo} ydb/core/testlib/actors/ut/unittest >> TActorTest::TestWaitFor [GOOD] Test command err: ... waiting for value = 42 ... waiting for value = 42 (done) >> TQueueBackpressureTest::PerfTrivial |79.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/ut/federated_query/generic_ut/kqp_generic_provider_ut.cpp |79.2%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/backpressure/ut/unittest |79.2%| [CC] {BAZEL_UPLOAD} $(B)/ydb/core/tx/columnshard/columnshard.h_serialized.cpp |79.2%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/cms/console/validators/ut/unittest >> ResourceBrokerConfigValidatorTests::TestEmptyTaskName [GOOD] |79.2%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/backpressure/ut/unittest >> TQueueBackpressureTest::IncorrectMessageId [GOOD] >> TBlobStorageQueueTest::TMessageLost [GOOD] >> TCowBTreeTest::SeekForwardPermutationsThreadSafe [GOOD] >> TCowBTreeTest::SnapshotRollback >> TCowBTreeTest::SnapshotCascade [GOOD] >> TLockFreeIntrusiveStackTest::ConcurrentRefCountNeverEmpty [GOOD] >> TLogPriorityMuteTests::MuteUntilTest [GOOD] >> TLogPriorityMuteTests::AtomicMuteUntilTest [GOOD] >> TLogPriorityMuteTests::UnmuteTest [GOOD] >> TLogPriorityMuteTests::AtomicUnmuteTest [GOOD] >> TLogPriorityMuteTests::CheckPriorityWithSetMuteTest [GOOD] >> TLogPriorityMuteTests::AtomicCheckPriorityWithSetMuteTest [GOOD] >> TLogPriorityMuteTests::CheckPriorityWithSetMuteDurationTest [GOOD] >> TLogPriorityMuteTests::AtomicCheckPriorityWithSetMuteDurationTest [GOOD] >> TOneOneQueueTests::TestSimpleEnqueueDequeue [GOOD] >> TOneOneQueueTests::CleanInDestructor [GOOD] >> TOneOneQueueTests::DeleteInDestructor [GOOD] >> TOneOneQueueTests::ReadIterator [GOOD] >> TPageMapTest::TestResize [GOOD] >> TPageMapTest::TestRandom >> THullDsHeapItTest::HeapForwardIteratorAllEntities [GOOD] >> THullDsHeapItTest::HeapBackwardIteratorAllEntities [GOOD] >> TQueueBackpressureTest::PerfTrivial [GOOD] >> THullDsGenericNWayIt::ForwardIteration [GOOD] >> THullDsGenericNWayIt::BackwardIteration [GOOD] |79.2%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/backpressure/ut/unittest |79.2%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/backpressure/ut/unittest >> TBlobStorageQueueTest::TMessageLost [GOOD] |79.2%| [LD] {BAZEL_UPLOAD} $(B)/ydb/core/tx/sequenceshard/public/ut/ydb-core-tx-sequenceshard-public-ut >> TBlobStorageDiskBlob::Merge [GOOD] >> TBlobStorageHullDecimal::TestMkDecimal [GOOD] >> TBlobStorageLinearTrackBar::TestLinearTrackBarDouble [GOOD] >> TBlobStorageLinearTrackBar::TestLinearTrackBarWithDecimal [GOOD] |79.2%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/backpressure/ut/unittest >> TQueueBackpressureTest::PerfTrivial [GOOD] |79.2%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/vdisk/hulldb/base/ut/unittest >> THullDsGenericNWayIt::BackwardIteration [GOOD] >> THullDsHeapItTest::HeapLevelSliceForwardIteratorBenchmark [GOOD] >> THullDsHeapItTest::HeapLevelSliceBackwardIteratorBenchmark >> THullDsHeapItTest::HeapAppendixTreeForwardIteratorBenchmark >> THullDsHeapItTest::HeapLevelSliceBackwardIteratorBenchmark [GOOD] |79.2%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/vdisk/hulldb/base/ut/unittest >> THullDsHeapItTest::HeapBackwardIteratorAllEntities [GOOD] >> THullDsHeapItTest::HeapAppendixTreeForwardIteratorBenchmark [GOOD] >> THullDsHeapItTest::HeapAppendixTreeBackwardIteratorBenchmark >> TBlobStorageDiskBlob::CreateFromDistinctParts [GOOD] >> TBlobStorageDiskBlob::CreateIterate [GOOD] >> ResourceBrokerConfigValidatorTests::TestRepeatedTaskName [GOOD] >> ResourceBrokerConfigValidatorTests::TestUnknownQueue [GOOD] >> ResourceBrokerConfigValidatorTests::TestUnlimitedResource [GOOD] >> ResourceBrokerConfigValidatorTests::TestUnusedQueue [GOOD] >> TBlobStorageHullStorageRatio::Test [GOOD] >> TCircularQueueTest::ShouldRemoveCurrentLast [GOOD] >> TBlobStorageKeyBarrierTest::ParseTest [GOOD] >> TCowBTreeTest::SeekBackwardPermutationsInplace [GOOD] >> TCircularQueueTest::ShouldRemoveCurrent [GOOD] >> TConcurrentRWHashTest::TEraseTest [GOOD] >> TConcurrentRWHashTest::TInsertIfAbsentTest [GOOD] >> TCowBTreeTest::ClearAndReuse [GOOD] >> TConcurrentRWHashTest::TInsertTest [GOOD] >> TCowBTreeTest::SeekBackwardPermutationsThreadSafe >> TCowBTreeTest::SeekForwardPermutationsInplace [GOOD] >> TConcurrentRWHashTest::TEmptyGetTest [GOOD] >> TCowBTreeTest::Empty [GOOD] >> TCowBTreeTest::Basics [GOOD] >> TConcurrentRWHashTest::TInsertIfAbsentTestFunc [GOOD] >> TConcurrentRWHashTest::TRemoveTest [GOOD] |79.2%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/vdisk/hulldb/base/ut/unittest >> TBlobStorageHullDecimal::TestMkDecimal [GOOD] |79.2%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/vdisk/hulldb/base/ut/unittest >> TBlobStorageLinearTrackBar::TestLinearTrackBarWithDecimal [GOOD] |79.2%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/vdisk/hulldb/base/ut/unittest >> THullDsHeapItTest::HeapLevelSliceBackwardIteratorBenchmark [GOOD] >> ReadBatcher::Range [GOOD] >> THullDsHeapItTest::HeapAppendixTreeBackwardIteratorBenchmark [GOOD] >> TCowBTreeTest::SeekBackwardPermutationsThreadSafe [GOOD] >> TBlobStorageHullDecimal::TestMkRatio [GOOD] >> TCowBTreeTest::RandomInsertInplace >> TBlobStorageHullDecimal::TestMult [GOOD] >> NameserviceConfigValidatorTests::TestLongWalleDC [GOOD] >> NameserviceConfigValidatorTests::TestModifyClusterUUID [GOOD] >> NameserviceConfigValidatorTests::TestModifyIdForAddrPort [GOOD] >> NameserviceConfigValidatorTests::TestModifyHost [GOOD] |79.2%| [LD] {BAZEL_UPLOAD} $(B)/ydb/core/blobstorage/vdisk/ingress/ut/ydb-core-blobstorage-vdisk-ingress-ut |79.2%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/schemeshard/ut_move_reboots/ut_move_reboots.cpp >> Path::CanonizeOld [GOOD] >> Path::CanonizedStringIsSame1 [GOOD] >> Path::CanonizedStringIsSame2 [GOOD] >> Path::Name_EnglishAlphabet [GOOD] >> Path::Name_AllSymbols [GOOD] >> Path::Name_ExtraSymbols [GOOD] |79.2%| [AR] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/tx/columnshard/normalizer/abstract/libcolumnshard-normalizer-abstract.a |79.2%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/vdisk/hulldb/base/ut/unittest >> TBlobStorageDiskBlob::CreateIterate [GOOD] |79.2%| [AR] {RESULT} $(B)/ydb/core/tx/columnshard/normalizer/abstract/libcolumnshard-normalizer-abstract.a >> TBlobStorageHullDecimal::TestRoundToInt [GOOD] >> TBlobStorageHullDecimal::TestToUi64 [GOOD] |79.2%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/cms/console/validators/ut/unittest >> ResourceBrokerConfigValidatorTests::TestUnusedQueue [GOOD] |79.2%| [CC] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/tx/columnshard/engines/reader/common_reader/iterator/source.h_serialized.cpp >> TIntervalSetTest::IntervalVecTestEmpty |79.2%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/vdisk/hullop/ut/unittest >> ReadBatcher::Range [GOOD] |79.2%| [CC] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/tx/columnshard/data_reader/contexts.h_serialized.cpp |79.2%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/vdisk/hulldb/base/ut/unittest >> THullDsHeapItTest::HeapAppendixTreeBackwardIteratorBenchmark [GOOD] |79.2%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/base/ut/unittest >> Path::Name_ExtraSymbols [GOOD] >> TIntervalSetTest::IntervalVecTestEmpty [GOOD] >> TIntervalSetTest::IntervalVecTestSpecificAdd [GOOD] >> TIntervalSetTest::IntervalVecTestAdd [GOOD] |79.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/ut_move_reboots/ut_move_reboots.cpp |79.3%| [TA] $(B)/ydb/core/testlib/actors/ut/test-results/unittest/{meta.json ... results_accumulator.log} |79.3%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/vdisk/hulldb/base/ut/unittest >> TBlobStorageKeyBarrierTest::ParseTest [GOOD] |79.3%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/vdisk/hulldb/base/ut/unittest >> TBlobStorageHullDecimal::TestMult [GOOD] |79.3%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/cms/console/validators/ut/unittest >> NameserviceConfigValidatorTests::TestModifyHost [GOOD] >> TIntervalSetTest::IntervalVecTestAddSubtract [GOOD] >> TIntervalSetTest::IntervalVecTestSubtract [GOOD] >> TIntervalSetTest::IntervalVecTestSubtractAgainstReference [GOOD] >> TIntervalSetTest::IntervalVecTestAddAgainstReference [GOOD] >> TIntervalSetTest::IntervalVecTestIsSubsetOfAgainstReference [GOOD] >> TIntervalSetTest::IntervalVecTestToStringAgainstReference [GOOD] >> TIntervalSetTest::IntervalVecUnion >> TIntervalSetTest::IntervalVecUnion [GOOD] >> TIntervalSetTest::IntervalVecUnionInplace [GOOD] >> TIntervalSetTest::IntervalVecUnionInplaceSelf [GOOD] >> THazardTest::AutoProtectedPointers [GOOD] >> TIntrusiveFixedHashSetTest::TestEmptyFind [GOOD] >> THyperLogCounterTest::TestGetSet [GOOD] >> THazardTest::CachedPointers [GOOD] >> THyperLogCounterTest::TestIncrement >> TIntrusiveFixedHashSetTest::TestPushFindClear [GOOD] >> TIntrusiveHeapTest::TestEmpty [GOOD] >> TIntrusiveHeapTest::TestAddRemove [GOOD] >> TIntrusiveHeapTest::TestUpdateNoChange [GOOD] >> TIntrusiveHeapTest::TestUpdateIncrease [GOOD] >> TIntrusiveHeapTest::TestUpdateDecrease [GOOD] >> TIntrusiveStackTest::TestEmptyPop [GOOD] |79.3%| [CC] {BAZEL_UPLOAD} $(B)/ydb/core/tx/columnshard/engines/reader/common_reader/iterator/source.h_serialized.cpp |79.3%| [LD] {BAZEL_UPLOAD} $(B)/ydb/core/blobstorage/vdisk/query/ut/ydb-core-blobstorage-vdisk-query-ut |79.3%| [LD] {BAZEL_UPLOAD} $(B)/ydb/core/sys_view/service/ut/ydb-core-sys_view-service-ut |79.3%| [CC] {BAZEL_UPLOAD} $(B)/ydb/core/tx/columnshard/data_reader/contexts.h_serialized.cpp |79.3%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/tx/columnshard/normalizer/abstract/libcolumnshard-normalizer-abstract.a >> TIntrusiveStackTest::TestPushPop [GOOD] >> TLockFreeIntrusiveStackTest::ConcurrentRefCountHeavyContention >> THyperLogCounterTest::TestIncrement [GOOD] >> THyperLogCounterTest::TestAddRandom |79.3%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/schemeshard/ut_rtmr_reboots/ut_rtmr_reboots.cpp |79.3%| [LD] {BAZEL_UPLOAD} $(B)/ydb/core/metering/ut/ydb-core-metering-ut |79.3%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/vdisk/hulldb/base/ut/unittest >> TBlobStorageHullDecimal::TestToUi64 [GOOD] >> AddressClassifierTest::TestLabeledClassifier [GOOD] >> TBTreeTest::Basics [GOOD] >> AddressClassifierTest::TestAddressExtraction [GOOD] >> AddressClassifierTest::TestAddressParsing [GOOD] >> AddressClassifierTest::TestClassfierWithAllIpTypes [GOOD] >> AddressClassifierTest::TestLabeledClassifierFromNetData [GOOD] >> TBTreeTest::ClearAndReuse >> TBitsTest::TestNaiveClz [GOOD] >> THyperLogCounterTest::TestAddRandom [GOOD] >> THyperLogCounterTest::TestAddFixed [GOOD] >> THyperLogCounterTest::TestHybridIncrement [GOOD] >> THyperLogCounterTest::TestHybridAdd [GOOD] >> TIntervalSetTest::IntervalMapTestEmpty [GOOD] >> TIntervalSetTest::IntervalMapTestSpecificAdd [GOOD] >> TIntervalSetTest::IntervalMapTestAdd >> TIntervalSetTest::IntervalSetTestSpecificAdd [GOOD] >> TIntervalSetTest::IntervalSetTestAdd >> TIntervalSetTest::IntervalSetTestEmpty [GOOD] >> TBTreeTest::ClearAndReuse [GOOD] >> TBTreeTest::SeekForwardPermutationsInplace [GOOD] >> TBTreeTest::SeekForwardPermutationsThreadSafe [GOOD] >> TBTreeTest::SeekBackwardPermutationsInplace [GOOD] >> TBTreeTest::SeekBackwardPermutationsThreadSafe [GOOD] >> TBTreeTest::RandomInsertInplace >> TIntervalSetTest::IntervalMapTestAdd [GOOD] >> TIntervalSetTest::IntervalMapTestAddSubtract [GOOD] >> TIntervalSetTest::IntervalMapTestSubtract [GOOD] >> TIntervalSetTest::IntervalMapTestSubtractAgainstReference [GOOD] >> TIntervalSetTest::IntervalMapTestAddAgainstReference [GOOD] >> TIntervalSetTest::IntervalMapTestIsSubsetOfAgainstReference [GOOD] >> TIntervalSetTest::IntervalMapTestToStringAgainstReference [GOOD] >> TIntervalSetTest::IntervalMapUnion |79.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/ut_rtmr_reboots/ut_rtmr_reboots.cpp >> TIntervalSetTest::IntervalSetTestAdd [GOOD] >> TIntervalSetTest::IntervalSetTestAddSubtract [GOOD] >> TIntervalSetTest::IntervalSetTestSubtract [GOOD] >> TIntervalSetTest::IntervalSetTestSubtractAgainstReference [GOOD] >> TIntervalSetTest::IntervalSetTestAddAgainstReference [GOOD] >> TIntervalSetTest::IntervalSetTestIsSubsetOfAgainstReference [GOOD] >> TIntervalSetTest::IntervalSetTestToStringAgainstReference [GOOD] >> TIntervalSetTest::IntervalSetUnion >> TIntervalSetTest::IntervalMapUnion [GOOD] >> TIntervalSetTest::IntervalMapUnionInplace >> TIntervalSetTest::IntervalSetUnion [GOOD] >> TIntervalSetTest::IntervalSetUnionInplace >> TIntervalSetTest::IntervalSetUnionInplace [GOOD] >> TIntervalSetTest::IntervalMapUnionInplaceSelf [GOOD] >> TIntervalSetTest::IntervalSetUnionInplaceSelf [GOOD] >> TIntervalSetTest::IntervalVecIntersection >> TIntervalSetTest::IntervalMapUnionInplace [GOOD] >> TIntervalSetTest::IntervalMapIntersection >> TIntervalSetTest::IntervalVecIntersection [GOOD] >> TIntervalSetTest::IntervalSetIntersection >> TIntervalSetTest::IntervalMapIntersection [GOOD] >> TIntervalSetTest::IntervalMapIntersectionInplace >> TIntervalSetTest::IntervalSetIntersection [GOOD] >> TIntervalSetTest::IntervalVecIntersectionInplace >> TBlobStorageGroupInfoIterTest::IteratorForwardAndBackward [GOOD] |79.3%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/formats/arrow/ut/ydb-core-formats-arrow-ut >> TBlobStorageGroupInfoIterTest::PerFailDomainRange [GOOD] |79.3%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/formats/arrow/ut/ydb-core-formats-arrow-ut |79.3%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/tablet_flat/ut_large/ydb-core-tablet_flat-ut_large >> TMemoryStatsAggregator::Compaction_Single [GOOD] >> TStateStorageConfig::TestReplicaSelection >> TMemoryStatsAggregator::Aggregate_Summarize_NoExternalConsumption_OneHost [GOOD] >> TIntervalSetTest::IntervalVecIntersectionInplace [GOOD] >> TPageMapTest::TestRandom [GOOD] >> TIntervalSetTest::IntervalSetIntersectionInplace >> TPageMapTest::TestIntrusive [GOOD] >> TPageMapTest::TestSimplePointer [GOOD] >> TPageMapTest::TestSharedPointer [GOOD] >> TPageMapTest::TestSimplePointerFull |79.3%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/schemeshard/ut_index_build/ut_vector_index_build.cpp |79.3%| [LD] {RESULT} $(B)/ydb/core/formats/arrow/ut/ydb-core-formats-arrow-ut |79.3%| [LD] {RESULT} $(B)/ydb/core/tablet_flat/ut_large/ydb-core-tablet_flat-ut_large >> TIntervalSetTest::IntervalMapIntersectionInplace [GOOD] >> TIntervalSetTest::IntervalMapIntersectionInplaceSelf [GOOD] >> TIntervalSetTest::IntervalMapDifference >> TIntervalSetTest::IntervalSetIntersectionInplace [GOOD] >> TIntervalSetTest::IntervalVecIntersectionInplaceSelf [GOOD] >> TIntervalSetTest::IntervalSetIntersectionInplaceSelf [GOOD] >> TIntervalSetTest::IntervalVecDifference >> TPageMapTest::TestSimplePointerFull [GOOD] >> TPriorityOperationQueueTest::ShouldStartEmpty [GOOD] >> TPriorityOperationQueueTest::ShouldNotStartUntilStart [GOOD] >> TPriorityOperationQueueTest::ShouldStartByPriority [GOOD] >> TPriorityOperationQueueTest::ShouldStartByPriorityWithRemove [GOOD] >> TPriorityOperationQueueTest::ShouldReturnExecTimeWhenUpdateRunningPriority [GOOD] >> TBlobStorageGroupInfoBlobMapTest::CheckCorrectBehaviourWithHashOverlow [GOOD] >> TBlobStorageGroupInfoBlobMapTest::Mirror3dcMapper >> TIntervalSetTest::IntervalMapDifference [GOOD] >> TIntervalSetTest::IntervalMapDifferenceInplaceSelf [GOOD] >> TIntervalSetTest::IntervalVecDifference [GOOD] >> TIntervalSetTest::IntervalSetDifference |79.3%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/vdisk/hulldb/barriers/ut/unittest >> TBlobStorageGroupInfoTest::GroupQuorumCheckerOrdinary >> TIntervalSetTest::IntervalSetDifference [GOOD] >> TIntervalSetTest::IntervalVecDifferenceInplaceSelf [GOOD] >> TIntervalSetTest::IntervalSetDifferenceInplaceSelf [GOOD] >> TIntervalSetTest::IntervalSetTestIterator [GOOD] >> TSubgroupPartLayoutTest::CountEffectiveReplicas3of4 >> TBlobStorageBarriersTreeTest::MemViewSnapshots [GOOD] |79.3%| [TA] $(B)/ydb/core/blobstorage/backpressure/ut/test-results/unittest/{meta.json ... results_accumulator.log} >> TLogoBlobIdHashTest::SimpleTest [GOOD] >> TLogoBlobIdHashTest::SimpleTestPartIdDoesNotMatter [GOOD] >> TLogoBlobIdHashTest::SimpleTestBlobSizeDoesNotMatter [GOOD] >> TLogoBlobIdHashTest::SimpleTestWithDifferentSteps [GOOD] >> TLogoBlobIdHashTest::SimpleTestWithDifferentChannel [GOOD] |79.3%| [AR] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/tx/columnshard/engines/reader/common_reader/common/libreader-common_reader-common.a >> TBlobStorageGroupInfoTest::TestBelongsToSubgroup >> TBlobStorageGroupInfoIterTest::IteratorForward [GOOD] >> TBlobStorageGroupInfoIterTest::IteratorBackward [GOOD] >> TBlobStorageGroupInfoBlobMapTest::BelongsToSubgroupBenchmark >> TBlobStorageGroupInfoIterTest::Domains [GOOD] >> TBlobStorageGroupInfoIterTest::Indexes [GOOD] >> TLsnMngrTests::AllocLsnForLocalUse10Threads [GOOD] >> TBlobStorageGroupInfoTest::TestBelongsToSubgroup [GOOD] >> TOutOfSpaceStateTests::TestLocal [GOOD] >> TBlobStorageGroupInfoBlobMapTest::Mirror3dcMapper [GOOD] >> TOutOfSpaceStateTests::TestGlobal [GOOD] >> TBlobStorageGroupInfoTest::SubgroupPartLayout >> TStateStorageConfig::TestReplicaSelection [GOOD] >> TStateStorageConfig::TestMultiReplicaFailDomains >> TSubgroupPartLayoutTest::CountEffectiveReplicas1of4 |79.3%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/vdisk/anubis_osiris/ut/unittest |79.3%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/util/ut/unittest >> TPriorityOperationQueueTest::ShouldReturnExecTimeWhenUpdateRunningPriority [GOOD] |79.3%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/groupinfo/ut/unittest >> TBlobStorageGroupInfoIterTest::PerFailDomainRange [GOOD] |79.3%| [TA] {RESULT} $(B)/ydb/core/testlib/actors/ut/test-results/unittest/{meta.json ... results_accumulator.log} |79.3%| [AR] {RESULT} $(B)/ydb/core/tx/columnshard/engines/reader/common_reader/common/libreader-common_reader-common.a |79.3%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/library/yql/providers/s3/provider/ut/ydb-library-yql-providers-s3-provider-ut |79.3%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/library/yql/providers/s3/provider/ut/ydb-library-yql-providers-s3-provider-ut |79.3%| [LD] {RESULT} $(B)/ydb/library/yql/providers/s3/provider/ut/ydb-library-yql-providers-s3-provider-ut |79.3%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/vdisk/hulldb/barriers/ut/unittest >> TBlobStorageBarriersTreeTest::MemViewSnapshots [GOOD] >> TBlobStorageGroupInfoTest::GroupQuorumCheckerOrdinary [GOOD] >> TBlobStorageGroupInfoTest::GroupQuorumCheckerMirror3dc [GOOD] |79.3%| [TA] {RESULT} $(B)/ydb/core/blobstorage/backpressure/ut/test-results/unittest/{meta.json ... results_accumulator.log} |79.3%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/util/ut/unittest >> TIntervalSetTest::IntervalMapDifferenceInplaceSelf [GOOD] |79.3%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/tx/columnshard/engines/reader/common_reader/common/libreader-common_reader-common.a |79.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/ut_index_build/ut_vector_index_build.cpp |79.3%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/util/ut/unittest >> TIntervalSetTest::IntervalSetTestIterator [GOOD] |79.3%| [CC] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/ymq/actor/queue_schema.h_serialized.cpp ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/groupinfo/ut/unittest >> TBlobStorageGroupInfoBlobMapTest::Mirror3dcMapper [GOOD] Test command err: [0:1:0:3:1]# 173 184 157 167 152 185 195 192 144 [0:1:1:1:1]# 189 195 192 171 157 161 167 155 196 [0:1:3:3:1]# 184 157 182 152 185 157 192 144 189 [0:1:3:4:0]# 148 154 155 158 194 160 156 163 140 [0:1:2:3:2]# 152 177 174 176 154 146 161 170 168 [0:1:1:2:1]# 157 167 152 189 195 192 171 157 161 [0:1:1:0:2]# 158 150 131 167 177 161 177 174 173 [0:1:3:0:1]# 161 155 171 196 154 167 184 157 182 [0:1:0:3:2]# 174 173 152 146 184 176 168 157 161 [0:1:2:2:0]# 163 140 161 148 162 159 168 178 190 [0:1:0:2:0]# 161 156 163 159 196 148 190 162 168 [0:1:3:2:1]# 152 185 157 192 144 189 161 155 171 [0:1:2:3:1]# 157 182 173 185 157 167 144 189 195 [0:1:3:1:2]# 157 161 170 131 190 158 161 178 167 [0:1:2:0:1]# 155 171 157 154 167 155 157 182 173 [0:1:3:0:2]# 131 190 158 161 178 167 173 152 177 [0:1:2:0:2]# 190 158 150 178 167 177 152 177 174 [0:1:2:4:1]# 154 167 155 157 182 173 185 157 167 [0:1:2:1:2]# 161 170 168 190 158 150 178 167 177 [0:1:2:4:2]# 178 167 177 152 177 174 176 154 146 [0:1:0:2:1]# 167 152 185 195 192 144 157 161 155 [0:1:0:0:0]# 190 162 168 174 148 154 177 158 194 [0:1:3:2:0]# 156 163 140 196 148 162 162 168 178 [0:1:1:0:1]# 171 157 161 167 155 196 182 173 184 [0:1:0:2:2]# 146 184 176 168 157 161 150 131 190 [0:1:1:0:0]# 178 190 162 155 174 148 160 177 158 [0:1:2:3:0]# 194 160 177 163 140 161 148 162 159 [0:1:2:4:0]# 154 155 174 194 160 177 163 140 161 [0:1:1:3:2]# 177 174 173 154 146 184 170 168 157 [0:1:2:1:1]# 144 189 195 155 171 157 154 167 155 [0:1:1:1:0]# 162 159 196 178 190 162 155 174 148 [0:1:1:3:1]# 182 173 184 157 167 152 189 195 192 [0:1:3:4:1]# 196 154 167 184 157 182 152 185 157 [0:1:1:4:2]# 167 177 161 177 174 173 154 146 184 [0:1:0:1:0]# 159 196 148 190 162 168 174 148 154 [0:1:3:4:2]# 161 178 167 173 152 177 184 176 154 [0:1:0:0:1]# 157 161 155 155 196 154 173 184 157 [0:1:1:4:0]# 155 174 148 160 177 158 140 161 156 [0:1:2:1:0]# 148 162 159 168 178 190 154 155 174 [0:1:2:0:0]# 168 178 190 154 155 174 194 160 177 [0:1:3:3:2]# 173 152 177 184 176 154 157 161 170 [0:1:0:4:0]# 174 148 154 177 158 194 161 156 163 [0:1:1:2:0]# 140 161 156 162 159 196 178 190 162 [0:1:0:1:1]# 195 192 144 157 161 155 155 196 154 [0:1:3:0:0]# 162 168 178 148 154 155 158 194 160 [0:1:3:1:1]# 192 144 189 161 155 171 196 154 167 [0:1:0:4:1]# 155 196 154 173 184 157 167 152 185 [0:1:2:2:1]# 185 157 167 144 189 195 155 171 157 [0:1:3:1:0]# 196 148 162 162 168 178 148 154 155 [0:1:2:2:2]# 176 154 146 161 170 168 190 158 150 [0:1:0:3:0]# 177 158 194 161 156 163 159 196 148 [0:1:3:3:0]# 158 194 160 156 163 140 196 148 162 [0:1:0:1:2]# 168 157 161 150 131 190 177 161 178 [0:1:3:2:2]# 184 176 154 157 161 170 131 190 158 [0:1:1:3:0]# 160 177 158 140 161 156 162 159 196 [0:1:1:2:2]# 154 146 184 170 168 157 158 150 131 [0:1:1:4:1]# 167 155 196 182 173 184 157 167 152 [0:1:1:1:2]# 170 168 157 158 150 131 167 177 161 [0:1:0:0:2]# 150 131 190 177 161 178 174 173 152 [0:1:0:4:2]# 177 161 178 174 173 152 146 184 176 mean# 166.6666667 dev# 15.11254078 |79.3%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/groupinfo/ut/unittest >> TBlobStorageGroupInfoIterTest::IteratorBackward [GOOD] |79.3%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/base/ut/unittest >> TLogoBlobIdHashTest::SimpleTestWithDifferentChannel [GOOD] |79.3%| [CC] {BAZEL_UPLOAD} $(B)/ydb/core/ymq/actor/queue_schema.h_serialized.cpp |79.3%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/groupinfo/ut/unittest >> TBlobStorageGroupInfoIterTest::Indexes [GOOD] |79.3%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/vdisk/hullop/ut/unittest |79.3%| [TA] $(B)/ydb/core/cms/console/validators/ut/test-results/unittest/{meta.json ... results_accumulator.log} |79.3%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/config/validation/auth_config_validator_ut/core-config-validation-auth_config_validator_ut |79.3%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/library/yaml_config/ut/ydb-library-yaml_config-ut |79.3%| [LD] {BAZEL_UPLOAD} $(B)/ydb/core/tablet_flat/ut_util/ydb-core-tablet_flat-ut_util |79.3%| [LD] {RESULT} $(B)/ydb/core/config/validation/auth_config_validator_ut/core-config-validation-auth_config_validator_ut |79.3%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/library/yql/providers/generic/provider/ut/pushdown/yql-providers-generic-provider-ut-pushdown >> TopicNameConverterForCPTest::BadLegacyTopics [GOOD] >> TopicNameConverterForCPTest::BadModernTopics [GOOD] |79.3%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/library/yql/providers/s3/actors/ut/ydb-library-yql-providers-s3-actors-ut |79.3%| [LD] {RESULT} $(B)/ydb/library/yaml_config/ut/ydb-library-yaml_config-ut |79.3%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/library/yql/providers/generic/provider/ut/pushdown/yql-providers-generic-provider-ut-pushdown |79.3%| [LD] {RESULT} $(B)/ydb/library/yql/providers/generic/provider/ut/pushdown/yql-providers-generic-provider-ut-pushdown |79.3%| [LD] {RESULT} $(B)/ydb/library/yql/providers/s3/actors/ut/ydb-library-yql-providers-s3-actors-ut |79.3%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/groupinfo/ut/unittest >> TBlobStorageGroupInfoTest::GroupQuorumCheckerMirror3dc [GOOD] >> TStateStorageConfig::TestMultiReplicaFailDomains [GOOD] >> TStateStorageConfig::SameConfigurationTest [GOOD] >> TopicNameConverterTest::LegacyStyleDoubleName [GOOD] >> TopicNameConverterTest::NoTopicName [GOOD] >> TopicNameConverterForCPTest::CorrectLegacyTopics [GOOD] >> TopicNameConverterForCPTest::CorrectModernTopics [GOOD] >> TDelayedResponsesTests::Test [GOOD] |79.3%| [TA] $(B)/ydb/core/blobstorage/vdisk/hulldb/base/ut/test-results/unittest/{meta.json ... results_accumulator.log} |79.3%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/vdisk/common/ut/unittest >> TOutOfSpaceStateTests::TestGlobal [GOOD] |79.3%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/services/persqueue_cluster_discovery/cluster_discovery_service_ut.cpp |79.3%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/blobstorage/base/ut/ydb-core-blobstorage-base-ut |79.3%| [CC] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/tx/columnshard/engines/reader/common/description.h_serialized.cpp |79.3%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/library/persqueue/topic_parser/ut/unittest >> TopicNameConverterForCPTest::BadModernTopics [GOOD] >> DiscoveryConverterTest::FullLegacyNames [GOOD] >> SysViewQueryHistory::TopDurationAdd [GOOD] >> DiscoveryConverterTest::FirstClass [GOOD] >> SysViewQueryHistory::StableMerge2 [GOOD] |79.3%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/blobstorage/vdisk/defrag/ut/ydb-core-blobstorage-vdisk-defrag-ut |79.3%| [LD] {RESULT} $(B)/ydb/core/blobstorage/base/ut/ydb-core-blobstorage-base-ut |79.4%| [LD] {RESULT} $(B)/ydb/core/blobstorage/vdisk/defrag/ut/ydb-core-blobstorage-vdisk-defrag-ut ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/base/ut/unittest >> TStateStorageConfig::SameConfigurationTest [GOOD] Test command err: AnonRss: 11 CGroupLimit: 21 MemTotal: 31 MemAvailable: 41 AllocatedMemory: 51 AllocatorCachesMemory: 61 HardLimit: 71 SoftLimit: 81 TargetUtilization: 91 ExternalConsumption: 101 SharedCacheConsumption: 111 SharedCacheLimit: 121 MemTableConsumption: 131 MemTableLimit: 141 QueryExecutionConsumption: 151 QueryExecutionLimit: 161 AnonRss: 12 CGroupLimit: 22 MemTotal: 32 MemAvailable: 42 AllocatedMemory: 52 AllocatorCachesMemory: 62 HardLimit: 72 SoftLimit: 82 TargetUtilization: 92 ExternalConsumption: 102 SharedCacheConsumption: 112 SharedCacheLimit: 122 MemTableConsumption: 132 MemTableLimit: 142 QueryExecutionConsumption: 152 QueryExecutionLimit: 162 AnonRss: 13 CGroupLimit: 23 MemTotal: 33 MemAvailable: 43 AllocatedMemory: 53 AllocatorCachesMemory: 63 HardLimit: 73 SoftLimit: 83 TargetUtilization: 93 ExternalConsumption: 103 SharedCacheConsumption: 113 SharedCacheLimit: 123 MemTableConsumption: 133 MemTableLimit: 143 QueryExecutionConsumption: 153 QueryExecutionLimit: 163 AnonRss: 36 CGroupLimit: 66 MemTotal: 96 MemAvailable: 126 AllocatedMemory: 156 AllocatorCachesMemory: 186 HardLimit: 216 SoftLimit: 246 TargetUtilization: 276 SharedCacheConsumption: 336 SharedCacheLimit: 366 MemTableConsumption: 396 MemTableLimit: 426 QueryExecutionConsumption: 456 QueryExecutionLimit: 486 |79.4%| [CC] {BAZEL_UPLOAD} $(B)/ydb/core/tx/columnshard/engines/reader/common/description.h_serialized.cpp |79.4%| [LD] {BAZEL_UPLOAD} $(B)/ydb/core/fq/libs/db_id_async_resolver_impl/ut/ydb-core-fq-libs-db_id_async_resolver_impl-ut |79.4%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/library/persqueue/topic_parser/ut/unittest >> TopicNameConverterTest::NoTopicName [GOOD] >> TBlobStorageHullFresh::SolomonStandCrash [GOOD] |79.4%| [LD] {BAZEL_UPLOAD} $(B)/ydb/core/backup/common/ut/ydb-core-backup-common-ut |79.4%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/library/persqueue/topic_parser/ut/unittest >> TopicNameConverterForCPTest::CorrectModernTopics [GOOD] >> TBlobStorageHullFreshSegment::IteratorTest |79.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/services/persqueue_cluster_discovery/cluster_discovery_service_ut.cpp |79.4%| [TA] {RESULT} $(B)/ydb/core/cms/console/validators/ut/test-results/unittest/{meta.json ... results_accumulator.log} |79.4%| [TA] {RESULT} $(B)/ydb/core/blobstorage/vdisk/hulldb/base/ut/test-results/unittest/{meta.json ... results_accumulator.log} |79.4%| [CC] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/ymq/actor/fifo_cleanup.h_serialized.cpp >> SysViewQueryHistory::AggrMergeDedup [GOOD] >> TFreshAppendixTest::IterateBackwardAll [GOOD] >> TFreshAppendixTest::IterateBackwardExcluding [GOOD] |79.4%| [CC] {BAZEL_UPLOAD} $(B)/ydb/core/ymq/actor/fifo_cleanup.h_serialized.cpp |79.4%| [LD] {BAZEL_UPLOAD} $(B)/ydb/core/util/ut/ydb-core-util-ut >> TBlobStorageHullFreshSegment::IteratorTest [GOOD] |79.4%| [LD] {BAZEL_UPLOAD} $(B)/ydb/core/blob_depot/ut/ydb-core-blob_depot-ut |79.4%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/vdisk/hullop/ut/unittest >> TDelayedResponsesTests::Test [GOOD] |79.4%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/library/persqueue/topic_parser/ut/unittest >> DiscoveryConverterTest::FirstClass [GOOD] |79.4%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/sys_view/service/ut/unittest >> SysViewQueryHistory::TopDurationAdd [GOOD] >> TBlobStorageHullFresh::AppendixPerf >> SysViewQueryHistory::AddDedupRandom [GOOD] >> TLockFreeIntrusiveStackTest::ConcurrentRefCountHeavyContention [GOOD] >> TLockFreeIntrusiveStackTest::ConcurrentAutoNeverEmpty |79.4%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/vdisk/hulldb/fresh/ut/unittest >> TFreshAppendixTest::IterateBackwardExcluding [GOOD] |79.4%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/sys_view/service/ut/unittest >> SysViewQueryHistory::StableMerge2 [GOOD] |79.4%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/sys_view/service/ut/unittest >> SysViewQueryHistory::AggrMergeDedup [GOOD] >> TSTreeTest::Basic [GOOD] >> TSVecTest::Basic [GOOD] |79.4%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/vdisk/hulldb/fresh/ut/unittest >> TBlobStorageHullFreshSegment::IteratorTest [GOOD] |79.4%| [LD] {BAZEL_UPLOAD} $(B)/ydb/core/pgproxy/ut/ydb-core-pgproxy-ut >> TFreshAppendixTest::IterateForwardAll [GOOD] >> TFreshAppendixTest::IterateBackwardIncluding [GOOD] |79.4%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/sys_view/service/ut/unittest >> SysViewQueryHistory::AddDedupRandom [GOOD] >> SysViewQueryHistory::AddDedup [GOOD] >> SysViewQueryHistory::AddDedup2 [GOOD] |79.4%| [LD] {BAZEL_UPLOAD} $(B)/ydb/core/blobstorage/groupinfo/ut/ydb-core-blobstorage-groupinfo-ut |79.4%| [LD] {BAZEL_UPLOAD} $(B)/ydb/library/persqueue/topic_parser/ut/ydb-library-persqueue-topic_parser-ut |79.4%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/control/lib/ut/unittest |79.4%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/vdisk/hulldb/fresh/ut/unittest >> TFreshAppendixTest::IterateBackwardIncluding [GOOD] |79.4%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/vdisk/hulldb/fresh/ut/unittest >> TSVecTest::Basic [GOOD] |79.4%| [LD] {BAZEL_UPLOAD} $(B)/ydb/core/blobstorage/backpressure/ut/ydb-core-blobstorage-backpressure-ut >> TFreshAppendixTest::IterateForwardIncluding [GOOD] >> TFreshAppendixTest::IterateForwardExcluding [GOOD] |79.4%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/sys_view/service/ut/unittest >> SysViewQueryHistory::AddDedup2 [GOOD] >> TBlobStorageHullFresh::SimpleBackWardEnd2Times [GOOD] |79.4%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/persqueue/pqtablet/blob/ut/ydb-core-persqueue-pqtablet-blob-ut |79.4%| [LD] {RESULT} $(B)/ydb/core/persqueue/pqtablet/blob/ut/ydb-core-persqueue-pqtablet-blob-ut |79.4%| [TA] $(B)/ydb/core/blobstorage/vdisk/common/ut/test-results/unittest/{meta.json ... results_accumulator.log} >> TBlobStorageHullFresh::Perf |79.4%| [TA] $(B)/ydb/library/persqueue/topic_parser/ut/test-results/unittest/{meta.json ... results_accumulator.log} |79.4%| [LD] {BAZEL_UPLOAD} $(B)/ydb/core/blobstorage/vdisk/common/ut/ydb-core-blobstorage-vdisk-common-ut |79.4%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/vdisk/hullop/ut/unittest |79.4%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/vdisk/hullop/ut/unittest >> TBlobStorageHullFresh::SimpleForward [GOOD] >> TBlobStorageHullFresh::SimpleBackwardMiddle [GOOD] |79.4%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/persqueue/common/ut/ydb-core-persqueue-common-ut |79.4%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/vdisk/hulldb/barriers/ut/unittest |79.4%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/vdisk/hulldb/fresh/ut/unittest >> TFreshAppendixTest::IterateForwardExcluding [GOOD] |79.4%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/log_backend/ut/ydb-core-log_backend-ut |79.4%| [TA] {RESULT} $(B)/ydb/core/blobstorage/vdisk/common/ut/test-results/unittest/{meta.json ... results_accumulator.log} |79.4%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/core/tablet_flat/ut_pg/unittest |79.4%| [LD] {RESULT} $(B)/ydb/core/persqueue/common/ut/ydb-core-persqueue-common-ut |79.4%| [LD] {RESULT} $(B)/ydb/core/log_backend/ut/ydb-core-log_backend-ut |79.4%| [TA] {RESULT} $(B)/ydb/library/persqueue/topic_parser/ut/test-results/unittest/{meta.json ... results_accumulator.log} >> TBlobStorageIngress::Ingress [GOOD] >> TBlobStorageIngress::IngressCacheMirror3 [GOOD] >> TBlobStorageIngress::IngressCache4Plus2 [GOOD] |79.4%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/audit/ut/ydb-core-audit-ut |79.4%| [LD] {RESULT} $(B)/ydb/core/audit/ut/ydb-core-audit-ut |79.4%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/vdisk/anubis_osiris/ut/unittest >> TQueryResultSizeTrackerTest::CheckAll [GOOD] |79.5%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/vdisk/hullop/ut/unittest |79.5%| [LD] {BAZEL_UPLOAD} $(B)/ydb/core/blobstorage/base/ut/ydb-core-blobstorage-base-ut |79.5%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/vdisk/hulldb/fresh/ut/unittest >> TBlobStorageHullFresh::SimpleBackwardMiddle [GOOD] |79.5%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/core/tablet_flat/ut_pg/unittest |79.5%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/vdisk/hullop/ut/unittest >> SysViewQueryHistory::ScanQueryHistoryMerge [GOOD] |79.5%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/vdisk/hulldb/barriers/ut/unittest |79.5%| [AR] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/tx/columnshard/data_reader/libtx-columnshard-data_reader.a |79.5%| [AR] {RESULT} $(B)/ydb/core/tx/columnshard/data_reader/libtx-columnshard-data_reader.a |79.5%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/core/tablet_flat/ut_pg/unittest |79.5%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/vdisk/ingress/ut/unittest >> TBlobStorageIngress::IngressCache4Plus2 [GOOD] |79.5%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/core/tablet_flat/ut_pg/unittest |79.5%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/core/tablet_flat/ut_pg/unittest >> TBlobStorageHullFresh::Perf [GOOD] |79.5%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/vdisk/query/ut/unittest |79.5%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/vdisk/query/ut/unittest >> TQueryResultSizeTrackerTest::CheckAll [GOOD] >> TFlatDatabasePgTest::BasicTypes |79.5%| [AR] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/tx/columnshard/engines/reader/common/libengines-reader-common.a |79.5%| [AR] {RESULT} $(B)/ydb/core/tx/columnshard/engines/reader/common/libengines-reader-common.a |79.5%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/tx/columnshard/data_reader/libtx-columnshard-data_reader.a |79.5%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/library/yaml_config/tools/dump/yaml-to-proto-dump |79.5%| [LD] {RESULT} $(B)/ydb/library/yaml_config/tools/dump/yaml-to-proto-dump >> TQueryResultSizeTrackerTest::SerializeDeserializeMaxPtotobufSize >> TFlatDatabasePgTest::BasicTypes [GOOD] |79.5%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/core/tablet_flat/ut_pg/unittest |79.5%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/tx/columnshard/engines/reader/common/libengines-reader-common.a |79.5%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/sys_view/service/ut/unittest >> SysViewQueryHistory::ScanQueryHistoryMerge [GOOD] |79.5%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/vdisk/query/ut/unittest |79.5%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/vdisk/hulldb/fresh/ut/unittest >> TBlobStorageHullFresh::Perf [GOOD] |79.5%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/core/tablet_flat/ut_pg/unittest >> TLockFreeIntrusiveStackTest::ConcurrentAutoNeverEmpty [GOOD] >> TLockFreeIntrusiveStackTest::ConcurrentAutoHeavyContention |79.5%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/core/tablet_flat/ut_pg/unittest |79.5%| [LD] {BAZEL_UPLOAD} $(B)/ydb/core/config/validation/ut/ydb-core-config-validation-ut >> TQueryResultSizeTrackerTest::SerializeDeserializeMaxPtotobufSizeMinusOne |79.5%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/core/tablet_flat/ut_pg/unittest >> TFlatDatabasePgTest::BasicTypes [GOOD] |79.5%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/vdisk/query/ut/unittest >> TQueryResultSizeTrackerTest::CheckOnlyQueryResult [GOOD] |79.5%| [LD] {BAZEL_UPLOAD} $(B)/ydb/core/blobstorage/dsproxy/ut_strategy/ydb-core-blobstorage-dsproxy-ut_strategy |79.5%| [AR] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/tx/columnshard/engines/reader/common_reader/iterator/libreader-common_reader-iterator.a >> TCowBTreeTest::RandomInsertInplace [GOOD] >> TCowBTreeTest::RandomInsertThreadSafe |79.5%| [AR] {RESULT} $(B)/ydb/core/tx/columnshard/engines/reader/common_reader/iterator/libreader-common_reader-iterator.a |79.5%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/mvp/meta/ut/ydb-mvp-meta-ut |79.5%| [LD] {RESULT} $(B)/ydb/mvp/meta/ut/ydb-mvp-meta-ut >> TQueryResultSizeTrackerTest::CheckWithoutQueryResult [GOOD] |79.5%| [TA] $(B)/ydb/core/blobstorage/vdisk/ingress/ut/test-results/unittest/{meta.json ... results_accumulator.log} |79.5%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/vdisk/anubis_osiris/ut/unittest |79.5%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/vdisk/query/ut/unittest >> TQueryResultSizeTrackerTest::CheckOnlyQueryResult [GOOD] |79.5%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/tx/columnshard/engines/reader/common_reader/iterator/libreader-common_reader-iterator.a |79.5%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/vdisk/hullop/ut/unittest |79.5%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/vdisk/query/ut/unittest >> TQueryResultSizeTrackerTest::CheckWithoutQueryResult [GOOD] |79.5%| [CC] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/tx/columnshard/engines/portions/portion_info.h_serialized.cpp >> TBlobStorageHullSstIt::TestSeekToLast [GOOD] >> TBlobStorageHullSstIt::TestSstIndexSaveLoad [GOOD] |79.5%| [CC] {BAZEL_UPLOAD} $(B)/ydb/core/tx/columnshard/engines/portions/portion_info.h_serialized.cpp |79.5%| [LD] {BAZEL_UPLOAD} $(B)/ydb/core/blobstorage/vdisk/hulldb/compstrat/ut/ydb-core-blobstorage-vdisk-hulldb-compstrat-ut |79.5%| [LD] {BAZEL_UPLOAD} $(B)/ydb/core/persqueue/common/ut/ydb-core-persqueue-common-ut >> TBlobStorageHullWriteSst::BlockOneSstOneIndex [GOOD] >> TBlobStorageHullWriteSst::BlockOneSstMultiIndex |79.6%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/vdisk/hulldb/barriers/ut/unittest |79.6%| [TA] $(B)/ydb/core/sys_view/service/ut/test-results/unittest/{meta.json ... results_accumulator.log} >> TBlobStorageHullWriteSst::BlockOneSstMultiIndex [GOOD] |79.6%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/blobstorage/pdisk/ut/ydb-core-blobstorage-pdisk-ut |79.6%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/vdisk/hulldb/generic/ut/unittest >> TBlobStorageHullSstIt::TestSstIndexSaveLoad [GOOD] >> TBlobStorageHullCompactDeferredQueueTest::Basic >> TBlobStorageHullWriteSst::LogoBlobOneSstOneIndex [GOOD] >> TBlobStorageHullWriteSst::LogoBlobOneSstMultiIndexPartOutbound [GOOD] |79.6%| [TA] {RESULT} $(B)/ydb/core/blobstorage/vdisk/ingress/ut/test-results/unittest/{meta.json ... results_accumulator.log} |79.6%| [LD] {RESULT} $(B)/ydb/core/blobstorage/pdisk/ut/ydb-core-blobstorage-pdisk-ut |79.6%| [LD] {BAZEL_UPLOAD} $(B)/ydb/core/persqueue/pqtablet/blob/ut/ydb-core-persqueue-pqtablet-blob-ut |79.6%| [TA] {RESULT} $(B)/ydb/core/sys_view/service/ut/test-results/unittest/{meta.json ... results_accumulator.log} >> TBlobStorageHullSstIt::TestSeekToFirst [GOOD] >> TBlobStorageHullSstIt::TestSeekExactAndPrev [GOOD] >> TBlobStorageHullSstIt::TestSeekNotExactBefore [GOOD] |79.6%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/vdisk/hulldb/generic/ut/unittest >> TBlobStorageHullWriteSst::BlockOneSstMultiIndex [GOOD] >> TableIndex::CompatibleSecondaryIndex [GOOD] >> TableIndex::NotCompatibleSecondaryIndex [GOOD] >> TableIndex::CompatibleVectorIndex [GOOD] >> TableIndex::NotCompatibleVectorIndex [GOOD] >> TStateStorageConfigCompareWithOld::TestReplicaActorIdAndSelectionIsSame4 [GOOD] >> TBlobStorageHullSstIt::TestSeekExactAndNext [GOOD] >> TBlobStorageHullSstIt::TestSeekBefore [GOOD] >> TBlobStorageHullSstIt::TestSeekAfterAndPrev [GOOD] |79.6%| [LD] {BAZEL_UPLOAD} $(B)/ydb/core/base/ut/ydb-core-base-ut >> TBlobStorageHullWriteSst::LogoBlobOneSstMultiIndex [GOOD] >> TBlobStorageHullWriteSst::LogoBlobMultiSstOneIndexPartOutbound |79.6%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/vdisk/hulldb/generic/ut/unittest >> TBlobStorageHullWriteSst::LogoBlobOneSstMultiIndexPartOutbound [GOOD] >> TBlobStorageHullWriteSst::LogoBlobMultiSstOneIndexPartOutbound [GOOD] |79.6%| [LD] {BAZEL_UPLOAD} $(B)/ydb/core/blobstorage/vdisk/hulldb/generic/ut/ydb-core-blobstorage-vdisk-hulldb-generic-ut >> TBlobStorageHullWriteSst::LogoBlobOneSstOneIndexWithSmallWriteBlocks [GOOD] >> TBlobStorageHullWriteSst::LogoBlobOneSstOneIndexPartOutbound >> ControlImplementationTests::TestTControl >> TBlobStorageHullWriteSst::LogoBlobOneSstOneIndexPartOutbound [GOOD] >> ControlImplementationTests::TestTControl [GOOD] >> TCowBTreeTest::SnapshotRollback [GOOD] >> TCowBTreeTest::SnapshotRollbackEarlyErase |79.6%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/datashard/datashard_ut_init.cpp |79.6%| [CC] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/services/metadata/manager/abstract.h_serialized.cpp |79.6%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/base/ut/unittest >> TStateStorageConfigCompareWithOld::TestReplicaActorIdAndSelectionIsSame4 [GOOD] |79.6%| [LD] {BAZEL_UPLOAD} $(B)/ydb/core/blobstorage/vdisk/hullop/ut/ydb-core-blobstorage-vdisk-hullop-ut |79.6%| [CC] {BAZEL_UPLOAD} $(B)/ydb/services/metadata/manager/abstract.h_serialized.cpp |79.6%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/control/lib/ut/unittest |79.6%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/tx/long_tx_service/public/ut/ydb-core-tx-long_tx_service-public-ut |79.6%| [LD] {RESULT} $(B)/ydb/core/tx/long_tx_service/public/ut/ydb-core-tx-long_tx_service-public-ut |79.6%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/vdisk/hulldb/generic/ut/unittest >> TBlobStorageHullSstIt::TestSeekAfterAndPrev [GOOD] |79.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/datashard/datashard_ut_init.cpp >> TMemoryStatsAggregator::Aggregate_Single [GOOD] >> ReadBatcher::ReadBatcher >> TMemoryStatsAggregator::Aggregate_Summarize_ExternalConsumption_DifferentHosts [GOOD] >> TMemoryStatsAggregator::Aggregate_Summarize_NoExternalConsumption_DifferentHosts [GOOD] >> TMemoryStatsAggregator::Aggregate_Summarize_ExternalConsumption_OneHost [GOOD] >> TMemoryStatsAggregator::Aggregate_ExternalConsumption_CollidingHosts [GOOD] |79.6%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/vdisk/hulldb/generic/ut/unittest >> TBlobStorageHullSstIt::TestSeekNotExactBefore [GOOD] |79.6%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/core/tablet_flat/ut_pg/unittest |79.6%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/vdisk/hulldb/barriers/ut/unittest >> TQueryResultSizeTrackerTest::SerializeDeserializeMaxPtotobufSizePlusOne |79.6%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/config/validation/column_shard_config_validator_ut/column_shard_config_validator_ut |79.6%| [LD] {RESULT} $(B)/ydb/core/config/validation/column_shard_config_validator_ut/column_shard_config_validator_ut |79.6%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/control/lib/ut/unittest >> ControlImplementationTests::TestTControl [GOOD] |79.6%| [LD] {BAZEL_UPLOAD} $(B)/ydb/core/blobstorage/vdisk/hulldb/fresh/ut/ydb-core-blobstorage-vdisk-hulldb-fresh-ut |79.6%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/io_formats/arrow/scheme/ut/ydb-core-io_formats-arrow-scheme-ut >> TCacheTest::TestUnboundedMapCache [GOOD] >> TCacheTest::EnsureNoLeakAfterUnboundedCacheOnMapDtor [GOOD] >> TCacheTest::TestSizeBasedOverflowCallback [GOOD] >> TCacheTest::TestLruCache [GOOD] >> TCacheTest::Test2QCache [GOOD] >> TCacheTest::TestUpdateItemSize [GOOD] >> TCircularOperationQueueTest::ShouldNotStartUntilStart [GOOD] >> TCircularOperationQueueTest::CheckOnDoneInflight1 [GOOD] >> TCircularOperationQueueTest::CheckOnDoneInflight2 [GOOD] >> TCircularOperationQueueTest::CheckOnDoneNotExisting [GOOD] >> TCircularOperationQueueTest::CheckRemoveNotRunning [GOOD] >> TCircularOperationQueueTest::CheckRemoveRunning [GOOD] >> TCircularOperationQueueTest::CheckRemoveWaiting [GOOD] >> TCircularOperationQueueTest::CheckRemoveNotExisting [GOOD] >> TCircularOperationQueueTest::CheckTimeout [GOOD] >> TCircularOperationQueueTest::CheckTimeoutWhenFirstItemRemoved [GOOD] >> TCircularOperationQueueTest::ShouldReturnExecTime [GOOD] >> TCircularOperationQueueTest::RemoveExistingWhenShuffle [GOOD] >> TCircularOperationQueueTest::RemoveNonExistingWhenShuffle [GOOD] >> TCircularOperationQueueTest::BasicRPSCheck [GOOD] >> TCircularOperationQueueTest::BasicRPSCheckWithRound [GOOD] >> TCircularOperationQueueTest::CheckWakeupAfterStop [GOOD] >> TCircularOperationQueueTest::CheckWakeupWhenRPSExhausted [GOOD] >> TCircularOperationQueueTest::CheckWakeupWhenRPSExhausted2 [GOOD] >> TCircularOperationQueueTest::CheckStartAfterStop [GOOD] |79.6%| [LD] {BAZEL_UPLOAD} $(B)/ydb/core/config/validation/auth_config_validator_ut/core-config-validation-auth_config_validator_ut |79.6%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/vdisk/hulldb/generic/ut/unittest >> TBlobStorageHullWriteSst::LogoBlobMultiSstOneIndexPartOutbound [GOOD] |79.6%| [LD] {RESULT} $(B)/ydb/core/io_formats/arrow/scheme/ut/ydb-core-io_formats-arrow-scheme-ut |79.6%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/vdisk/hulldb/generic/ut/unittest >> TBlobStorageHullWriteSst::LogoBlobOneSstOneIndexPartOutbound [GOOD] |79.6%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/library/yaml_config/tools/dump_ds_init/yaml-to-proto-dump-ds-init |79.6%| [LD] {RESULT} $(B)/ydb/library/yaml_config/tools/dump_ds_init/yaml-to-proto-dump-ds-init >> TBlobStorageAnubisAlgo::Mirror3 [GOOD] >> PDiskCompatibilityInfo::OldCompatible |79.6%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/vdisk/hulldb/compstrat/ut/unittest |79.6%| [LD] {BAZEL_UPLOAD} $(B)/ydb/mvp/oidc_proxy/ut/ydb-mvp-oidc_proxy-ut |79.6%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/vdisk/hulldb/compstrat/ut/unittest |79.6%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/mvp/core/ut/ydb-mvp-core-ut |79.6%| [LD] {RESULT} $(B)/ydb/mvp/core/ut/ydb-mvp-core-ut ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/base/ut/unittest >> TMemoryStatsAggregator::Aggregate_ExternalConsumption_CollidingHosts [GOOD] Test command err: AnonRss: 11 CGroupLimit: 21 MemTotal: 31 MemAvailable: 41 AllocatedMemory: 51 AllocatorCachesMemory: 61 HardLimit: 71 SoftLimit: 81 TargetUtilization: 91 ExternalConsumption: 101 SharedCacheConsumption: 111 SharedCacheLimit: 121 MemTableConsumption: 131 MemTableLimit: 141 QueryExecutionConsumption: 151 QueryExecutionLimit: 161 AnonRss: 11 CGroupLimit: 21 MemTotal: 31 MemAvailable: 41 AllocatedMemory: 51 AllocatorCachesMemory: 61 HardLimit: 71 SoftLimit: 81 TargetUtilization: 91 ExternalConsumption: 101 SharedCacheConsumption: 111 SharedCacheLimit: 121 MemTableConsumption: 131 MemTableLimit: 141 QueryExecutionConsumption: 151 QueryExecutionLimit: 161 AnonRss: 11 CGroupLimit: 21 MemTotal: 31 MemAvailable: 41 AllocatedMemory: 51 AllocatorCachesMemory: 61 HardLimit: 71 SoftLimit: 81 TargetUtilization: 91 ExternalConsumption: 101 SharedCacheConsumption: 111 SharedCacheLimit: 121 MemTableConsumption: 131 MemTableLimit: 141 QueryExecutionConsumption: 151 QueryExecutionLimit: 161 AnonRss: 12 CGroupLimit: 22 MemTotal: 32 MemAvailable: 42 AllocatedMemory: 52 AllocatorCachesMemory: 62 HardLimit: 72 SoftLimit: 82 TargetUtilization: 92 ExternalConsumption: 102 SharedCacheConsumption: 112 SharedCacheLimit: 122 MemTableConsumption: 132 MemTableLimit: 142 QueryExecutionConsumption: 152 QueryExecutionLimit: 162 AnonRss: 13 CGroupLimit: 23 MemTotal: 33 MemAvailable: 43 AllocatedMemory: 53 AllocatorCachesMemory: 63 HardLimit: 73 SoftLimit: 83 TargetUtilization: 93 ExternalConsumption: 103 SharedCacheConsumption: 113 SharedCacheLimit: 123 MemTableConsumption: 133 MemTableLimit: 143 QueryExecutionConsumption: 153 QueryExecutionLimit: 163 AnonRss: 36 CGroupLimit: 66 MemTotal: 96 MemAvailable: 126 AllocatedMemory: 156 AllocatorCachesMemory: 186 HardLimit: 216 SoftLimit: 246 TargetUtilization: 276 ExternalConsumption: 306 SharedCacheConsumption: 336 SharedCacheLimit: 366 MemTableConsumption: 396 MemTableLimit: 426 QueryExecutionConsumption: 456 QueryExecutionLimit: 486 AnonRss: 11 CGroupLimit: 21 MemTotal: 31 MemAvailable: 41 AllocatedMemory: 51 AllocatorCachesMemory: 61 HardLimit: 71 SoftLimit: 81 TargetUtilization: 91 SharedCacheConsumption: 111 SharedCacheLimit: 121 MemTableConsumption: 131 MemTableLimit: 141 QueryExecutionConsumption: 151 QueryExecutionLimit: 161 AnonRss: 12 CGroupLimit: 22 MemTotal: 32 MemAvailable: 42 AllocatedMemory: 52 AllocatorCachesMemory: 62 HardLimit: 72 SoftLimit: 82 TargetUtilization: 92 SharedCacheConsumption: 112 SharedCacheLimit: 122 MemTableConsumption: 132 MemTableLimit: 142 QueryExecutionConsumption: 152 QueryExecutionLimit: 162 AnonRss: 13 CGroupLimit: 23 MemTotal: 33 MemAvailable: 43 AllocatedMemory: 53 AllocatorCachesMemory: 63 HardLimit: 73 SoftLimit: 83 TargetUtilization: 93 SharedCacheConsumption: 113 SharedCacheLimit: 123 MemTableConsumption: 133 MemTableLimit: 143 QueryExecutionConsumption: 153 QueryExecutionLimit: 163 AnonRss: 36 CGroupLimit: 66 MemTotal: 96 MemAvailable: 126 AllocatedMemory: 156 AllocatorCachesMemory: 186 HardLimit: 216 SoftLimit: 246 TargetUtilization: 276 SharedCacheConsumption: 336 SharedCacheLimit: 366 MemTableConsumption: 396 MemTableLimit: 426 QueryExecutionConsumption: 456 QueryExecutionLimit: 486 AnonRss: 11 CGroupLimit: 21 MemTotal: 31 MemAvailable: 41 AllocatedMemory: 51 AllocatorCachesMemory: 61 HardLimit: 71 SoftLimit: 81 TargetUtilization: 91 ExternalConsumption: 101 SharedCacheConsumption: 111 SharedCacheLimit: 121 MemTableConsumption: 131 MemTableLimit: 141 QueryExecutionConsumption: 151 QueryExecutionLimit: 161 AnonRss: 12 CGroupLimit: 22 MemTotal: 32 MemAvailable: 42 AllocatedMemory: 52 AllocatorCachesMemory: 62 HardLimit: 72 SoftLimit: 82 TargetUtilization: 92 ExternalConsumption: 102 SharedCacheConsumption: 112 SharedCacheLimit: 122 MemTableConsumption: 132 MemTableLimit: 142 QueryExecutionConsumption: 152 QueryExecutionLimit: 162 AnonRss: 13 CGroupLimit: 23 MemTotal: 33 MemAvailable: 43 AllocatedMemory: 53 AllocatorCachesMemory: 63 HardLimit: 73 SoftLimit: 83 TargetUtilization: 93 ExternalConsumption: 103 SharedCacheConsumption: 113 SharedCacheLimit: 123 MemTableConsumption: 133 MemTableLimit: 143 QueryExecutionConsumption: 153 QueryExecutionLimit: 163 AnonRss: 36 CGroupLimit: 66 MemTotal: 33 MemAvailable: 43 AllocatedMemory: 156 AllocatorCachesMemory: 186 HardLimit: 73 SoftLimit: 83 TargetUtilization: 93 ExternalConsumption: 80 SharedCacheConsumption: 336 SharedCacheLimit: 366 MemTableConsumption: 396 MemTableLimit: 426 QueryExecutionConsumption: 456 QueryExecutionLimit: 486 AnonRss: 11 CGroupLimit: 21 MemTotal: 31 MemAvailable: 41 AllocatedMemory: 51 AllocatorCachesMemory: 61 HardLimit: 71 SoftLimit: 81 TargetUtilization: 91 ExternalConsumption: 101 SharedCacheConsumption: 111 SharedCacheLimit: 121 MemTableConsumption: 131 MemTableLimit: 141 QueryExecutionConsumption: 151 QueryExecutionLimit: 161 AnonRss: 12 CGroupLimit: 22 MemTotal: 32 MemAvailable: 42 AllocatedMemory: 52 AllocatorCachesMemory: 62 HardLimit: 72 SoftLimit: 82 TargetUtilization: 92 ExternalConsumption: 102 SharedCacheConsumption: 112 SharedCacheLimit: 122 MemTableConsumption: 132 MemTableLimit: 142 QueryExecutionConsumption: 152 QueryExecutionLimit: 162 AnonRss: 13 CGroupLimit: 23 MemTotal: 33 MemAvailable: 43 AllocatedMemory: 53 AllocatorCachesMemory: 63 HardLimit: 73 SoftLimit: 83 TargetUtilization: 93 ExternalConsumption: 103 SharedCacheConsumption: 113 SharedCacheLimit: 123 MemTableConsumption: 133 MemTableLimit: 143 QueryExecutionConsumption: 153 QueryExecutionLimit: 163 AnonRss: 36 CGroupLimit: 66 MemTotal: 65 MemAvailable: 85 AllocatedMemory: 156 AllocatorCachesMemory: 186 HardLimit: 145 SoftLimit: 165 TargetUtilization: 185 ExternalConsumption: 194 SharedCacheConsumption: 336 SharedCacheLimit: 366 MemTableConsumption: 396 MemTableLimit: 426 QueryExecutionConsumption: 456 QueryExecutionLimit: 486 |79.6%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/util/ut/unittest >> TCircularOperationQueueTest::CheckStartAfterStop [GOOD] >> TLockFreeIntrusiveStackTest::ConcurrentAutoHeavyContention [GOOD] >> PDiskCompatibilityInfo::OldCompatible [GOOD] >> PDiskCompatibilityInfo::Incompatible >> TPDiskRaces::KillOwnerWhileDeletingChunk |79.6%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/library/yql/providers/s3/common/ut/ydb-library-yql-providers-s3-common-ut |79.6%| [LD] {RESULT} $(B)/ydb/library/yql/providers/s3/common/ut/ydb-library-yql-providers-s3-common-ut >> PDiskCompatibilityInfo::Incompatible [GOOD] >> PDiskCompatibilityInfo::NewIncompatibleWithDefault >> TPDiskTest::TestAbstractPDiskInterface [GOOD] >> TPDiskTest::TestChunkWriteRelease |79.6%| [LD] {BAZEL_UPLOAD} $(B)/ydb/core/log_backend/ut/ydb-core-log_backend-ut >> ReadBatcher::ReadBatcher [GOOD] >> TYardTest::TestInit |79.6%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/vdisk/anubis_osiris/ut/unittest >> TBlobStorageAnubisAlgo::Mirror3 [GOOD] |79.6%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/config/init/ut/ydb-core-config-init-ut |79.6%| [LD] {RESULT} $(B)/ydb/core/config/init/ut/ydb-core-config-init-ut |79.6%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/config/init/ut/ydb-core-config-init-ut >> PDiskCompatibilityInfo::NewIncompatibleWithDefault [GOOD] >> PDiskCompatibilityInfo::Trunk |79.6%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/vdisk/hulldb/compstrat/ut/unittest >> PDiskCompatibilityInfo::Trunk [GOOD] >> PDiskCompatibilityInfo::SuppressCompatibilityCheck |79.6%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/util/ut/unittest >> TLockFreeIntrusiveStackTest::ConcurrentAutoHeavyContention [GOOD] >> PDiskCompatibilityInfo::SuppressCompatibilityCheck [GOOD] >> PDiskCompatibilityInfo::Migration |79.6%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/vdisk/hulldb/barriers/ut/unittest |79.7%| [TA] $(B)/ydb/core/tablet_flat/ut_pg/test-results/unittest/{meta.json ... results_accumulator.log} >> TYardTest::TestInit [GOOD] >> TYardTest::TestInitOnIncompleteFormat |79.7%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/vdisk/hullop/ut/unittest >> ReadBatcher::ReadBatcher [GOOD] >> TPDiskUtil::SectorRestorator [GOOD] >> TPDiskUtil::SectorRestoratorOldNewHash [GOOD] >> TPDiskUtil::SectorPrint [GOOD] >> TPDiskUtil::TChunkIdFormatter [GOOD] >> TPDiskUtil::TOwnerPrintTest [GOOD] >> TPDiskUtil::TChunkStateEnumPrintTest [GOOD] >> TPDiskUtil::TIoResultEnumPrintTest [GOOD] >> TPDiskUtil::TIoTypeEnumPrintTest [GOOD] >> TPDiskUtil::TestNVMeSerial [GOOD] >> TPDiskUtil::TestDeviceList [GOOD] >> TPDiskUtil::TestBufferPool >> PDiskCompatibilityInfo::Migration [GOOD] >> ReadOnlyPDisk::SimpleRestartReadOnly |79.7%| [TA] $(B)/ydb/core/blobstorage/vdisk/hulldb/generic/ut/test-results/unittest/{meta.json ... results_accumulator.log} >> ReadOnlyPDisk::SimpleRestartReadOnly [GOOD] >> ReadOnlyPDisk::StartReadOnlyUnformattedShouldFail [GOOD] >> ReadOnlyPDisk::StartReadOnlyZeroedShouldFail >> ReadOnlyPDisk::StartReadOnlyZeroedShouldFail [GOOD] >> ReadOnlyPDisk::VDiskStartsOnReadOnlyPDisk >> TYardTest::TestInitOnIncompleteFormat [GOOD] >> TYardTest::TestInitOwner |79.7%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/apps/ydb/ut/ydb-apps-ydb-ut |79.7%| [TA] {RESULT} $(B)/ydb/core/tablet_flat/ut_pg/test-results/unittest/{meta.json ... results_accumulator.log} |79.7%| [LD] {RESULT} $(B)/ydb/apps/ydb/ut/ydb-apps-ydb-ut >> ReadOnlyPDisk::VDiskStartsOnReadOnlyPDisk [GOOD] >> ReadOnlyPDisk::ReadOnlyPDiskEvents >> TYardTest::TestInitOwner [GOOD] >> TYardTest::TestIncorrectRequests >> WilsonTrace::LogWriteChunkWriteChunkRead |79.7%| [LD] {BAZEL_UPLOAD} $(B)/ydb/mvp/meta/ut/ydb-mvp-meta-ut |79.7%| [TA] {RESULT} $(B)/ydb/core/blobstorage/vdisk/hulldb/generic/ut/test-results/unittest/{meta.json ... results_accumulator.log} >> TYardTest::TestBadDeviceInit >> TYardTest::TestIncorrectRequests [GOOD] >> TYardTest::TestLogWriteRead |79.7%| [TA] $(B)/ydb/core/base/ut/test-results/unittest/{meta.json ... results_accumulator.log} >> TYardTest::TestEmptyLogRead >> TPDiskTest::TestThatEveryValueOfEStateEnumKeepsItIntegerValue [GOOD] >> TPDiskTest::TestPDiskActorErrorState |79.7%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/vdisk/hulldb/compstrat/ut/unittest >> TYardTest::TestBadDeviceInit [GOOD] >> TYardTest::TestChunkReadRandomOffset >> TBlobStoragePDiskCrypto::TestMixedStreamCypher [GOOD] >> TBlobStoragePDiskCrypto::TestInplaceStreamCypher >> TYardTest::TestLogWriteRead [GOOD] >> TYardTest::TestLogWriteReadMedium >> TBlobStoragePDiskCrypto::TestInplaceStreamCypher [GOOD] >> TBlockDeviceTest::TestDeviceWithSubmitGetThread |79.7%| [LD] {BAZEL_UPLOAD} $(B)/ydb/core/cms/console/validators/ut/ydb-core-cms-console-validators-ut >> TYardTest::TestEmptyLogRead [GOOD] >> TYardTest::TestChunkWriteRead >> TPDiskTest::TestPDiskActorErrorState [GOOD] >> TPDiskTest::TestPDiskActorPDiskStopStart >> WilsonTrace::LogWriteChunkWriteChunkRead [GOOD] >> TYardTest::TestWholeLogRead >> TYardTest::TestLogWriteReadMedium [GOOD] >> TYardTest::TestLogWriteReadMediumWithHddSectorMap |79.7%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/tools/partcheck/partcheck |79.7%| [LD] {BAZEL_UPLOAD} $(B)/ydb/core/blobstorage/vdisk/hulldb/base/ut/ydb-core-blobstorage-vdisk-hulldb-base-ut >> TPDiskUtil::TestBufferPool [GOOD] >> TPDiskUtil::SectorMap >> TBlockDeviceTest::TestDeviceWithSubmitGetThread [GOOD] >> TBlockDeviceTest::TestWriteSectorMapAllTypes >> TYardTest::TestWholeLogRead [GOOD] |79.7%| [LD] {RESULT} $(B)/ydb/tools/partcheck/partcheck >> TYardTest::TestSysLogReordering |79.7%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/tablet_flat/test/tool/surg/surg |79.7%| [LD] {RESULT} $(B)/ydb/core/tablet_flat/test/tool/surg/surg |79.7%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/vdisk/hulldb/barriers/ut/unittest |79.7%| [TA] {RESULT} $(B)/ydb/core/base/ut/test-results/unittest/{meta.json ... results_accumulator.log} >> TPDiskTest::TestPDiskActorPDiskStopStart [GOOD] >> TPDiskTest::TestPDiskActorPDiskStopBroken >> TPDiskUtil::SectorMap [GOOD] >> TPDiskUtil::SectorMapStoreLoadFromFile [GOOD] >> TSectorMapPerformance::TestHDD1960GBRead100MBOnFirstSector >> TPDiskTest::TestPDiskActorPDiskStopBroken [GOOD] >> TPDiskTest::TestPDiskActorPDiskStopUninitialized |79.7%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/vdisk/hulldb/compstrat/ut/unittest >> ControlImplementationTests::TestRegisterLocalControl [GOOD] |79.7%| [LD] {BAZEL_UPLOAD} $(B)/ydb/core/tx/long_tx_service/public/ut/ydb-core-tx-long_tx_service-public-ut >> TPDiskTest::TestPDiskActorPDiskStopUninitialized [GOOD] >> TPDiskTest::TestPDiskOwnerRecreation >> TYardTest::TestLogWriteReadMediumWithHddSectorMap [GOOD] >> TYardTest::TestLogWriteReadLarge >> TYardTest::TestLogWriteReadLarge [GOOD] >> TYardTest::TestLogWriteCutEqual |79.7%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/vdisk/anubis_osiris/ut/unittest >> TPDiskTest::TestChunkWriteRelease [GOOD] >> TYardTest::TestChunkWriteRead [GOOD] >> TPDiskTest::TestLogWriteReadWithRestarts >> TYardTest::TestChunkWriteReadWithHddSectorMap |79.7%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/util/btree_benchmark/btree_benchmark |79.7%| [LD] {RESULT} $(B)/ydb/core/util/btree_benchmark/btree_benchmark |79.7%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/kqp/federated_query/ut/ydb-core-kqp-federated_query-ut >> ReadOnlyPDisk::ReadOnlyPDiskEvents [GOOD] >> ShredPDisk::EmptyShred |79.7%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/kqp/federated_query/ut/ydb-core-kqp-federated_query-ut |79.7%| [LD] {RESULT} $(B)/ydb/core/kqp/federated_query/ut/ydb-core-kqp-federated_query-ut |79.7%| [LD] {BAZEL_UPLOAD} $(B)/ydb/core/testlib/actors/ut/ydb-core-testlib-actors-ut >> ControlImplementationTests::TestControlWrapperBounds [GOOD] |79.7%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/vdisk/hulldb/compstrat/ut/unittest |79.7%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/control/lib/ut/unittest >> ControlImplementationTests::TestRegisterLocalControl [GOOD] |79.7%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/vdisk/hulldb/compstrat/ut/unittest |79.7%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/tablet_flat/test/tool/perf/table-perf |79.7%| [LD] {RESULT} $(B)/ydb/core/tablet_flat/test/tool/perf/table-perf >> ShredPDisk::EmptyShred [GOOD] >> ShredPDisk::SimpleShred >> TBlockDeviceTest::TestWriteSectorMapAllTypes [GOOD] >> TBlockDeviceTest::WriteReadRestart >> ControlImplementationTests::TestParallelRegisterSharedControl |79.7%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/vdisk/hulldb/compstrat/ut/unittest |79.7%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/backup/tools/decrypt/decrypt |79.7%| [LD] {RESULT} $(B)/ydb/core/backup/tools/decrypt/decrypt |79.7%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/control/lib/ut/unittest >> ControlImplementationTests::TestControlWrapperBounds [GOOD] |79.7%| [LD] {BAZEL_UPLOAD} $(B)/ydb/library/yaml_config/ut/ydb-library-yaml_config-ut >> ControlImplementationTests::TestParallelRegisterSharedControl [GOOD] |79.7%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/dsproxy/ut_strategy/unittest >> ShredPDisk::SimpleShred [GOOD] >> ShredPDisk::SimpleShredRepeat >> TPDiskTest::TestPDiskOwnerRecreation [GOOD] >> TPDiskTest::TestPDiskOwnerRecreationWithStableOwner >> TBlobStorageBarriersTreeTest::Tree [GOOD] |79.7%| [AR] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/services/metadata/manager/libservices-metadata-manager.a |79.7%| [AR] {RESULT} $(B)/ydb/services/metadata/manager/libservices-metadata-manager.a >> TBlobStorageCompStrat::Test1 >> TYardTest::TestChunkWriteReadWithHddSectorMap [GOOD] >> TYardTest::TestChunkWriteReadMultiple |79.7%| [LD] {BAZEL_UPLOAD} $(B)/ydb/core/blobstorage/vdisk/defrag/ut/ydb-core-blobstorage-vdisk-defrag-ut |79.7%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/vdisk/anubis_osiris/ut/unittest >> TPDiskTest::TestLogWriteReadWithRestarts [GOOD] >> TPDiskTest::TestLogSpliceNonceJump >> TBlobStorageCompStrat::Test1 [GOOD] |79.7%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/dsproxy/ut_strategy/unittest |79.7%| [AR] {BAZEL_UPLOAD} $(B)/ydb/services/metadata/manager/libservices-metadata-manager.a >> ShredPDisk::SimpleShredRepeat [GOOD] >> ShredPDisk::SimpleShredDirtyChunks |79.7%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/vdisk/anubis_osiris/ut/unittest |79.7%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/control/lib/ut/unittest >> ControlImplementationTests::TestParallelRegisterSharedControl [GOOD] >> TSectorMapPerformance::TestHDD1960GBRead100MBOnFirstSector [GOOD] >> TSectorMapPerformance::TestHDD1960GBRead100MBOnLastSector |79.7%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/vdisk/hulldb/barriers/ut/unittest >> TBlobStorageBarriersTreeTest::Tree [GOOD] |79.7%| [LD] {BAZEL_UPLOAD} $(B)/ydb/library/yql/providers/s3/actors/ut/ydb-library-yql-providers-s3-actors-ut |79.7%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/vdisk/hulldb/compstrat/ut/unittest >> TBlobStorageCompStrat::Test1 [GOOD] |79.7%| [LD] {BAZEL_UPLOAD} $(B)/ydb/core/audit/ut/ydb-core-audit-ut >> ShredPDisk::SimpleShredDirtyChunks [GOOD] >> ShredPDisk::KillVDiskWhilePreShredding |79.7%| [CC] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/kqp/opt/kqp_query_plan.h_serialized.cpp |79.7%| [LD] {BAZEL_UPLOAD} $(B)/ydb/tools/partcheck/partcheck >> TPDiskTest::TestLogSpliceNonceJump [GOOD] >> TPDiskTest::TestMultipleLogSpliceNonceJump |79.7%| [CC] {BAZEL_UPLOAD} $(B)/ydb/core/kqp/opt/kqp_query_plan.h_serialized.cpp |79.7%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/datashard/datashard_ut_compaction.cpp |79.7%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/dsproxy/ut_strategy/unittest |79.7%| [AR] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/tx/columnshard/engines/portions/libcolumnshard-engines-portions.a |79.8%| [AR] {RESULT} $(B)/ydb/core/tx/columnshard/engines/portions/libcolumnshard-engines-portions.a |79.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/datashard/datashard_ut_compaction.cpp >> TPDiskTest::TestPDiskOwnerRecreationWithStableOwner [GOOD] >> TPDiskTest::TestPDiskManyOwnersInitiation >> ShredPDisk::KillVDiskWhilePreShredding [GOOD] >> ShredPDisk::KillVDiskWhileShredding >> TBTreeTest::RandomInsertInplace [GOOD] >> TBTreeTest::RandomInsertThreadSafe >> DSProxyStrategyTest::Restore_block42 |79.8%| [TA] $(B)/ydb/core/blobstorage/vdisk/hulldb/barriers/ut/test-results/unittest/{meta.json ... results_accumulator.log} |79.8%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/tx/columnshard/engines/portions/libcolumnshard-engines-portions.a |79.8%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/vdisk/anubis_osiris/ut/unittest |79.8%| [TA] {RESULT} $(B)/ydb/core/blobstorage/vdisk/hulldb/barriers/ut/test-results/unittest/{meta.json ... results_accumulator.log} >> TPDiskTest::TestMultipleLogSpliceNonceJump [GOOD] >> TPDiskTest::TestFakeErrorPDiskManyLogWrite >> TPDiskTest::TestPDiskManyOwnersInitiation [GOOD] >> TPDiskTest::TestVDiskMock >> ShredPDisk::KillVDiskWhileShredding [GOOD] >> ShredPDisk::InitVDiskAfterShredding |79.8%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/dsproxy/ut_strategy/unittest |79.8%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/control/lib/ut/unittest |79.8%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/dsproxy/ut_strategy/unittest |79.8%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/schemeshard/ut_external_table/ut_external_table.cpp >> TCowBTreeTest::SnapshotRollbackEarlyErase [GOOD] >> DSProxyStrategyTest::Restore_mirror3dc >> TCowBTreeTest::ShouldCallDtorsInplace [GOOD] >> TCowBTreeTest::ShouldCallDtorsThreadSafe [GOOD] >> TEventPriorityQueueTest::TestPriority [GOOD] >> TFastTlsTest::IterationAfterThreadDeath |79.8%| [LD] {BAZEL_UPLOAD} $(B)/ydb/core/util/btree_benchmark/btree_benchmark >> TFastTlsTest::IterationAfterThreadDeath [GOOD] >> TFastTlsTest::ManyThreadLocals [GOOD] >> TFastTlsTest::ManyConcurrentKeys >> ShredPDisk::InitVDiskAfterShredding [GOOD] >> ShredPDisk::ReinitVDiskWhilePreShredding >> TBlobStorageGroupInfoTest::SubgroupPartLayout [GOOD] >> TPDiskTest::TestVDiskMock [GOOD] >> TPDiskTest::TestRealFile >> TPDiskTest::TestFakeErrorPDiskManyLogWrite [GOOD] >> TPDiskTest::TestFakeErrorPDiskLogRead |79.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/ut_external_table/ut_external_table.cpp |79.8%| [LD] {BAZEL_UPLOAD} $(B)/ydb/library/yaml_config/tools/dump/yaml-to-proto-dump >> TFastTlsTest::ManyConcurrentKeys [GOOD] >> TFifoQueueTest::ShouldPushPop [GOOD] >> TFragmentedBufferTest::TestWriteRead [GOOD] >> TFragmentedBufferTest::TestOverwriteRead [GOOD] >> TFragmentedBufferTest::TestIntersectedWriteRead [GOOD] >> TFragmentedBufferTest::TestIntersectedWriteRead2 [GOOD] >> TFragmentedBufferTest::TestIntersectedWriteRead3 [GOOD] >> TFragmentedBufferTest::Test3WriteRead [GOOD] >> TFragmentedBufferTest::Test5WriteRead [GOOD] >> TFragmentedBufferTest::TestIsNotMonolith [GOOD] >> TFragmentedBufferTest::TestGetMonolith [GOOD] >> TFragmentedBufferTest::TestSetMonolith [GOOD] >> TFragmentedBufferTest::TestReplaceWithSetMonolith [GOOD] >> TFragmentedBufferTest::CopyFrom [GOOD] >> TFragmentedBufferTest::ReadWriteRandom |79.8%| [TA] $(B)/ydb/core/blobstorage/vdisk/hulldb/compstrat/ut/test-results/unittest/{meta.json ... results_accumulator.log} |79.8%| [TA] {RESULT} $(B)/ydb/core/blobstorage/vdisk/hulldb/compstrat/ut/test-results/unittest/{meta.json ... results_accumulator.log} |79.8%| [LD] {BAZEL_UPLOAD} $(B)/ydb/core/tablet_flat/test/tool/surg/surg >> TPDiskTest::TestFakeErrorPDiskLogRead [GOOD] >> TPDiskTest::TestFakeErrorPDiskSysLogRead |79.8%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/mind/address_classification/net_classifier_ut.cpp >> ShredPDisk::ReinitVDiskWhilePreShredding [GOOD] >> ShredPDisk::ReinitVDiskWhileShredding >> TPDiskTest::TestFakeErrorPDiskSysLogRead [GOOD] >> TPDiskTest::TestFakeErrorPDiskManyChunkRead >> TBtreeIndexTPartLarge::SmallKeys1GB |79.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/mind/address_classification/net_classifier_ut.cpp >> TSectorMapPerformance::TestHDD1960GBRead100MBOnLastSector [GOOD] >> TSectorMapPerformance::TestHDD1960GBWrite100MBOnFirstSector |79.8%| [TA] $(B)/ydb/core/control/lib/ut/test-results/unittest/{meta.json ... results_accumulator.log} |79.8%| [TA] {RESULT} $(B)/ydb/core/control/lib/ut/test-results/unittest/{meta.json ... results_accumulator.log} >> ShredPDisk::ReinitVDiskWhileShredding [GOOD] >> ShredPDisk::RetryPreShredCompactError |79.8%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/groupinfo/ut/unittest >> TBlobStorageGroupInfoTest::SubgroupPartLayout [GOOD] >> TSectorMapPerformance::TestHDD1960GBWrite100MBOnFirstSector [GOOD] >> TSectorMapPerformance::TestHDD1960GBWrite100MBOnLastSector |79.8%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/dsproxy/ut_strategy/unittest >> ShredPDisk::RetryPreShredCompactError [GOOD] >> ShredPDisk::RetryShredError |79.8%| [LD] {BAZEL_UPLOAD} $(B)/ydb/core/backup/tools/decrypt/decrypt |79.8%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/vdisk/anubis_osiris/ut/unittest |79.8%| [LD] {BAZEL_UPLOAD} $(B)/ydb/core/io_formats/arrow/scheme/ut/ydb-core-io_formats-arrow-scheme-ut >> TPDiskTest::TestRealFile [GOOD] >> TPDiskTest::TestSIGSEGVInTUndelivered >> TVDiskDefrag::HugeHeapDefragmentationRequired [GOOD] >> ExternalDataSourceTest::ValidateName [GOOD] >> ExternalDataSourceTest::ValidatePack [GOOD] >> ExternalDataSourceTest::ValidateAuth [GOOD] >> ExternalDataSourceTest::ValidateParameters [GOOD] >> ExternalDataSourceTest::ValidateHasExternalTable [GOOD] >> ExternalDataSourceTest::ValidateProperties [GOOD] >> ExternalDataSourceTest::ValidateLocation [GOOD] >> ExternalSourceBuilderTest::ValidateName [GOOD] >> TPDiskTest::TestFakeErrorPDiskManyChunkRead [GOOD] >> ExternalSourceBuilderTest::ValidateAuthWithoutCondition [GOOD] >> TPDiskTest::TestFakeErrorPDiskManyChunkWrite >> ExternalSourceBuilderTest::ValidateAuthWithCondition [GOOD] >> ExternalSourceBuilderTest::ValidateUnsupportedField [GOOD] >> ExternalSourceBuilderTest::ValidateNonRequiredField [GOOD] >> ExternalSourceBuilderTest::ValidateRequiredField [GOOD] >> ExternalSourceBuilderTest::ValidateNonRequiredFieldValues [GOOD] >> ExternalSourceBuilderTest::ValidateRequiredFieldValues [GOOD] >> ExternalSourceBuilderTest::ValidateRequiredFieldOnCondition [GOOD] >> IcebergDdlTest::HiveCatalogWithS3Test [GOOD] >> IcebergDdlTest::HadoopCatalogWithS3Test [GOOD] >> ObjectStorageTest::SuccessValidation [GOOD] >> ObjectStorageTest::FailedCreate [GOOD] >> ObjectStorageTest::FailedValidation [GOOD] >> ObjectStorageTest::FailedJsonListValidation [GOOD] >> ObjectStorageTest::FailedOptionalTypeValidation [GOOD] >> ObjectStorageTest::WildcardsValidation [GOOD] >> ObjectStorageTest::FailedPartitionedByValidation [GOOD] >> ShredPDisk::RetryShredError [GOOD] |79.8%| [LD] {BAZEL_UPLOAD} $(B)/ydb/core/tablet_flat/ut_large/ydb-core-tablet_flat-ut_large |79.8%| [LD] {BAZEL_UPLOAD} $(B)/ydb/core/config/validation/column_shard_config_validator_ut/column_shard_config_validator_ut |79.8%| [LD] {BAZEL_UPLOAD} $(B)/ydb/core/tablet_flat/test/tool/perf/table-perf >> TPDiskTest::TestSIGSEGVInTUndelivered [GOOD] >> TPDiskTest::TestPDiskOnDifferentKeys >> TCollectingS3ListingStrategyTests::IfNoIssuesOccursShouldReturnCollectedPaths [GOOD] >> TCollectingS3ListingStrategyTests::IfThereAreMoreRecordsThanSpecifiedByLimitShouldReturnError [GOOD] >> TCollectingS3ListingStrategyTests::IfAnyIterationReturnIssueThanWholeStrategyShouldReturnIt [GOOD] >> TCollectingS3ListingStrategyTests::IfExceptionIsReturnedFromIteratorThanItShouldCovertItToIssue [GOOD] |79.8%| [LD] {BAZEL_UPLOAD} $(B)/ydb/library/yaml_config/tools/dump_ds_init/yaml-to-proto-dump-ds-init |79.8%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/vdisk/defrag/ut/unittest >> TVDiskDefrag::HugeHeapDefragmentationRequired [GOOD] >> ConfigValidation::SameStaticGroup [GOOD] >> ConfigValidation::StaticGroupSizesGrow [GOOD] >> ConfigValidation::StaticGroupSizesShrink [GOOD] >> ConfigValidation::VDiskChanged [GOOD] >> ConfigValidation::TooManyVDiskChanged [GOOD] >> DatabaseConfigValidation::AllowedFields [GOOD] >> DatabaseConfigValidation::NotAllowedFields [GOOD] >> TPDiskTest::TestPDiskOnDifferentKeys [GOOD] >> TPDiskTest::WrongPDiskKey |79.8%| [TS] {RESULT} ydb/core/blobstorage/vdisk/defrag/ut/unittest >> TSectorMapPerformance::TestHDD1960GBWrite100MBOnLastSector [GOOD] >> TSectorMapPerformance::TestSSD1960GBRead100MBOnFirstSector >> TPDiskTest::WrongPDiskKey [GOOD] >> TPDiskTest::TestStartEncryptedOrPlainAndRestart |79.8%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/core/external_sources/ut/unittest >> ObjectStorageTest::FailedPartitionedByValidation [GOOD] |79.8%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/library/yql/providers/s3/provider/ut/unittest >> TCollectingS3ListingStrategyTests::IfExceptionIsReturnedFromIteratorThanItShouldCovertItToIssue [GOOD] |79.8%| [TS] {RESULT} ydb/core/external_sources/ut/unittest |79.8%| [TA] $(B)/ydb/core/blobstorage/vdisk/anubis_osiris/ut/test-results/unittest/{meta.json ... results_accumulator.log} |79.8%| [TS] {RESULT} ydb/library/yql/providers/s3/provider/ut/unittest ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/pdisk/ut/unittest >> ShredPDisk::RetryShredError [GOOD] Test command err: 2025-09-25T16:14:42.959327Z node 1 :BS_PDISK NOTICE: {BPD38@blobstorage_pdisk_impl.cpp:2857} OnDriveStartup Path# "" PDiskId# 1 2025-09-25T16:14:42.966277Z node 1 :BS_PDISK NOTICE: {BSP01@blobstorage_pdisk_actor.cpp:581} Successfully read format record Format# {TDiskFormat Version: 3 DiskSize: 134217728000 bytes (134 GB) Guid: 14737012939665631519 MagicNextLogChunkReference: 17878231945205503342 MagicLogChunk: 15649018431463672787 MagicDataChunk: 13620963934613873555 MagicSysLogChunk: 357107827689437668 MagicFormatChunk: 17332287817462050952 ChunkSize: 136314880 bytes (136 MB) SectorSize: 4096 SysLogSectorCount: 64 SystemChunkCount: 1 FormatText: "Info" DiskFormatSize: 1168 (current sizeof: 1168) TimestampUs: 1758816882855350 (2025-09-25T16:14:42.855350Z) FormatFlags: {ErasureEncodeSysLog | ErasureEncodeFormat | ErasureEncodeNextChunkReference | EncryptFormat | EncryptData}} PDiskId# 1 2025-09-25T16:14:42.968913Z node 1 :BS_PDISK NOTICE: {BPD01@blobstorage_pdisk_impl_log.cpp:252} SysLogRecord is read Record# {TSysLogRecord Version# 0 NonceSet# {TNonceSet Version# 0 NonceSysLog# 61 NonceLog# 1 NonceData# 1} LogHeadChunkIdx# 1 LogHeadChunkPreviousNonce# 0} PDiskId# 1 2025-09-25T16:14:42.973775Z node 1 :BS_PDISK NOTICE: {LR018@blobstorage_pdisk_logreader.cpp:809} PDiskId# 1 LogReader IsInitial# 1 ChunkIdx# 1 SectorIdx# 0 OffsetInSector# 0 In ProcessSectorSet got !restorator.GoodSectorFlags LastGoodToWriteLogPosition# { ChunkIdx# 1 OffsetInChunk# 0} PDiskId# 1 2025-09-25T16:14:42.974004Z node 1 :BS_PDISK NOTICE: {BPD01@blobstorage_pdisk_logreader.cpp:1176} Reply to owner OwnerId# 0 Result# {EvReadLogResult Status# OK ErrorReason# "" position# { ChunkIdx# 0 OffsetInChunk# 0} nextPosition# { ChunkIdx# 1 OffsetInChunk# 0} isEndOfLog# true StatusFlags# IsValid | DiskSpaceCyan | DiskSpaceLightYellowMove | DiskSpaceYellowStop | DiskSpaceLightOrange | DiskSpacePreOrange | DiskSpaceOrange | DiskSpaceRed | DiskSpaceBlack Results.size# 0} PDiskId# 1 2025-09-25T16:14:42.975652Z node 1 :BS_PDISK NOTICE: {BPD01@blobstorage_pdisk_impl_log.cpp:1732} PDisk have successfully started PDiskId# 1 2025-09-25T16:14:42.975945Z node 1 :BS_PDISK NOTICE: {BPD02@blobstorage_pdisk_impl.cpp:2055} New owner is created ownerId# 3 vDiskId# [0:_:0:0:0] FirstNonceToKeep# 1371174 CutLogId# [1:7554060991940143822:2050] ownerRound# 2 PDiskId# 1 2025-09-25T16:14:42.976357Z node 1 :BS_PDISK NOTICE: {BPD01@blobstorage_pdisk_logreader.cpp:1176} Reply to owner OwnerId# 3 Result# {EvReadLogResult Status# OK ErrorReason# "" position# { ChunkIdx# 0 OffsetInChunk# 0} nextPosition# { ChunkIdx# 4294967295 OffsetInChunk# 4294967295} isEndOfLog# true StatusFlags# IsValid Results.size# 0} PDiskId# 1 2025-09-25T16:14:42.977014Z node 1 :BS_PDISK NOTICE: {BSP01@blobstorage_pdisk_actor.cpp:1278} Going to restart PDisk since received TEvAskWardenRestartPDiskResult PDiskId# 1 2025-09-25T16:14:42.977448Z node 1 :BS_PDISK NOTICE: {BPD01@blobstorage_pdisk_impl.cpp:302} Shutdown OwnerInfo# {{OwnerId: 3 VDiskId: [0:_:0:0:0] GroupSizeInUnits: 0 ChunkWrites: 0 ChunkReads: 0 LogWrites: 0 LogReader: 0 CurrentFirstLsnToKeep: 0 FirstNonceToKeep: 1371174 StartingPoints: {{TLogRecord Signature# First Data.Size()# 1 Lsn# 1}} Owned chunkIds: {}} PDisk system/log ChunkIds: {0, 1} Free ChunkIds: {2..982} PDiskId# 1 2025-09-25T16:14:42.982987Z node 1 :BS_PDISK NOTICE: {BPD38@blobstorage_pdisk_impl.cpp:2857} OnDriveStartup Path# "" PDiskId# 1 2025-09-25T16:14:43.004949Z node 1 :BS_PDISK NOTICE: {BSP01@blobstorage_pdisk_actor.cpp:581} Successfully read format record Format# {TDiskFormat Version: 3 DiskSize: 134217728000 bytes (134 GB) Guid: 14737012939665631519 MagicNextLogChunkReference: 17878231945205503342 MagicLogChunk: 15649018431463672787 MagicDataChunk: 13620963934613873555 MagicSysLogChunk: 357107827689437668 MagicFormatChunk: 17332287817462050952 ChunkSize: 136314880 bytes (136 MB) SectorSize: 4096 SysLogSectorCount: 64 SystemChunkCount: 1 FormatText: "Info" DiskFormatSize: 1168 (current sizeof: 1168) TimestampUs: 1758816882855350 (2025-09-25T16:14:42.855350Z) FormatFlags: {ErasureEncodeSysLog | ErasureEncodeFormat | ErasureEncodeNextChunkReference | EncryptFormat | EncryptData}} PDiskId# 1 2025-09-25T16:14:43.016947Z node 1 :BS_PDISK NOTICE: {BPD01@blobstorage_pdisk_impl_log.cpp:252} SysLogRecord is read Record# {TSysLogRecord Version# 8 NonceSet# {TNonceSet Version# 0 NonceSysLog# 1059671 NonceLog# 1371174 NonceData# 1135195} LogHeadChunkIdx# 1 LogHeadChunkPreviousNonce# 0 Owner[3]# [0:4294967295:0:0:0]} PDiskId# 1 2025-09-25T16:14:43.022872Z node 1 :BS_PDISK WARN: {LR016@blobstorage_pdisk_logreader.cpp:710} PDiskId# 1 LogReader IsInitial# 1 ChunkIdx# 1 SectorIdx# 0 OffsetInSector# 316 nonce jump2 IsEndOfSplice# false " replacing ChunkInfo->DesiredPrevChunkLastNonce# "# 0 " with nonceJumpLogPageHeader2->PreviousNonce# "# 0 PDiskId# 1 2025-09-25T16:14:43.022906Z node 1 :BS_PDISK NOTICE: {LR018@blobstorage_pdisk_logreader.cpp:809} PDiskId# 1 LogReader IsInitial# 1 ChunkIdx# 1 SectorIdx# 3 OffsetInSector# 0 In ProcessSectorSet got !restorator.GoodSectorFlags LastGoodToWriteLogPosition# { ChunkIdx# 1 OffsetInChunk# 12288} PDiskId# 1 2025-09-25T16:14:43.022930Z node 1 :BS_PDISK NOTICE: {BPD01@blobstorage_pdisk_logreader.cpp:1176} Reply to owner OwnerId# 0 Result# {EvReadLogResult Status# OK ErrorReason# "" position# { ChunkIdx# 0 OffsetInChunk# 0} nextPosition# { ChunkIdx# 1 OffsetInChunk# 12288} isEndOfLog# true StatusFlags# IsValid | DiskSpaceCyan | DiskSpaceLightYellowMove | DiskSpaceYellowStop | DiskSpaceLightOrange | DiskSpacePreOrange | DiskSpaceOrange | DiskSpaceRed | DiskSpaceBlack Results.size# 0} PDiskId# 1 2025-09-25T16:14:43.025164Z node 1 :BS_PDISK NOTICE: {BPD01@blobstorage_pdisk_impl_log.cpp:1732} PDisk have successfully started PDiskId# 1 2025-09-25T16:14:43.033084Z node 1 :BS_PDISK NOTICE: {BPD30@blobstorage_pdisk_impl.cpp:1930} Registered known VDisk VDisk# [0:4294967295:0:0:0] OwnerId# 3 OwnerRound# 3 GroupSizeInUnits# 0 PDiskId# 1 2025-09-25T16:14:43.171430Z node 2 :BS_PDISK NOTICE: {BPD38@blobstorage_pdisk_impl.cpp:2857} OnDriveStartup Path# "" PDiskId# 1 2025-09-25T16:14:43.188921Z node 2 :BS_PDISK NOTICE: {BSP01@blobstorage_pdisk_actor.cpp:581} Successfully read format record Format# {TDiskFormat Version: 3 DiskSize: 134217728000 bytes (134 GB) Guid: 12439380112456399636 MagicNextLogChunkReference: 11353895956712049312 MagicLogChunk: 16791382849778034911 MagicDataChunk: 8404839138219958177 MagicSysLogChunk: 6598342769009521827 MagicFormatChunk: 17332287817462050952 ChunkSize: 136314880 bytes (136 MB) SectorSize: 4096 SysLogSectorCount: 64 SystemChunkCount: 1 FormatText: "Info" DiskFormatSize: 1168 (current sizeof: 1168) TimestampUs: 1758816883102082 (2025-09-25T16:14:43.102082Z) FormatFlags: {ErasureEncodeSysLog | ErasureEncodeFormat | ErasureEncodeNextChunkReference | EncryptFormat | EncryptData}} PDiskId# 1 2025-09-25T16:14:43.205061Z node 2 :BS_PDISK NOTICE: {BPD01@blobstorage_pdisk_impl_log.cpp:252} SysLogRecord is read Record# {TSysLogRecord Version# 0 NonceSet# {TNonceSet Version# 0 NonceSysLog# 61 NonceLog# 1 NonceData# 1} LogHeadChunkIdx# 1 LogHeadChunkPreviousNonce# 0} PDiskId# 1 2025-09-25T16:14:43.208921Z node 2 :BS_PDISK NOTICE: {LR018@blobstorage_pdisk_logreader.cpp:809} PDiskId# 1 LogReader IsInitial# 1 ChunkIdx# 1 SectorIdx# 0 OffsetInSector# 0 In ProcessSectorSet got !restorator.GoodSectorFlags LastGoodToWriteLogPosition# { ChunkIdx# 1 OffsetInChunk# 0} PDiskId# 1 2025-09-25T16:14:43.208952Z node 2 :BS_PDISK NOTICE: {BPD01@blobstorage_pdisk_logreader.cpp:1176} Reply to owner OwnerId# 0 Result# {EvReadLogResult Status# OK ErrorReason# "" position# { ChunkIdx# 0 OffsetInChunk# 0} nextPosition# { ChunkIdx# 1 OffsetInChunk# 0} isEndOfLog# true StatusFlags# IsValid | DiskSpaceCyan | DiskSpaceLightYellowMove | DiskSpaceYellowStop | DiskSpaceLightOrange | DiskSpacePreOrange | DiskSpaceOrange | DiskSpaceRed | DiskSpaceBlack Results.size# 0} PDiskId# 1 2025-09-25T16:14:43.221519Z node 2 :BS_PDISK NOTICE: {BPD01@blobstorage_pdisk_impl_log.cpp:1732} PDisk have successfully started PDiskId# 1 2025-09-25T16:14:43.221640Z node 2 :BS_PDISK NOTICE: {BPD02@blobstorage_pdisk_impl.cpp:2055} New owner is created ownerId# 3 vDiskId# [1:_:0:0:0] FirstNonceToKeep# 1267641 CutLogId# [2:7554060992166654196:2050] ownerRound# 4 PDiskId# 1 2025-09-25T16:14:43.225025Z node 2 :BS_PDISK NOTICE: {BPD01@blobstorage_pdisk_logreader.cpp:1176} Reply to owner OwnerId# 3 Result# {EvReadLogResult Status# OK ErrorReason# "" position# { ChunkIdx# 0 OffsetInChunk# 0} nextPosition# { ChunkIdx# 4294967295 OffsetInChunk# 4294967295} isEndOfLog# true StatusFlags# IsValid Results.size# 0} PDiskId# 1 2025-09-25T16:14:43.233386Z node 2 :BS_PDISK NOTICE: {BSP01@blobstorage_pdisk_actor.cpp:1278} Going to restart PDisk since received TEvAskWardenRestartPDiskResult PDiskId# 1 2025-09-25T16:14:43.233610Z node 2 :BS_PDISK NOTICE: {BPD01@blobstorage_pdisk_impl.cpp:302} Shutdown OwnerInfo# {{OwnerId: 3 VDiskId: [1:_:0:0:0] GroupSizeInUnits: 0 ChunkWrites: 0 ChunkReads: 0 LogWrites: 0 LogReader: 0 CurrentFirstLsnToKeep: 0 FirstNonceToKeep: 1267641 StartingPoints: {{TLogRecord Signature# First Data.Size()# 1 Lsn# 1}} Owned chunkIds: {}} PDisk system/log ChunkIds: {0, 1} Free ChunkIds: {2..982} PDiskId# 1 2025-09-25T16:14:43.239341Z node 2 :BS_PDISK NOTICE: {BPD38@blobstorage_pdisk_impl.cpp:2857} OnDriveStartup Path# "" PDiskId# 1 2025-09-25T16:14:43.261431Z node 2 :BS_PDISK NOTICE: {BSP01@blobstorage_pdisk_actor.cpp:581} Successfully read format record Format# {TDiskFormat Version: 3 DiskSize: 134217728000 bytes (134 GB) Guid: 12439380112456399636 MagicNextLogChunkReference: 11353895956712049312 MagicLogChunk: 16791382849778034911 MagicDataChunk: 8404839138219958177 MagicSysLogChunk: 6598342769009521827 MagicFormatChunk: 17332287817462050952 ChunkSize: 136314880 bytes (136 MB) SectorSize: 4096 SysLogSectorCount: 64 SystemChunkCount: 1 FormatText: "Info" DiskFormatSize: 1168 (current sizeof: 1168) TimestampUs: 1758816883102082 (2025-09-25T16:14:43.102082Z) FormatFlags: {ErasureEncodeSysLog | ErasureEncodeFormat | ErasureEncodeNextChunkReference | EncryptFormat | EncryptData}} PDiskId# 1 2025-09-25T16:14:43.264985Z node 2 :BS_PDISK NOTICE: {BPD01@blobstorage_pdisk_impl_log.cpp:252} SysLogRecord is read Record# {TSysLogRecord Version# 8 NonceSet# {TNonceSet Version# 0 NonceSysLog# 1854042 NonceLog# 1267641 NonceData# 1425080} LogHeadChunkIdx# 1 LogHeadChunkPreviousNonce# 0 Owner[3]# [1:4294967295:0:0:0]} PDiskId# 1 2025-09-25T16:14:43.279641Z node 2 :BS_PDISK ERROR: {BPD01@blobstorage_pdisk_impl_log.cpp:485} Incompatible version ErrorReason# Versions are not compatible neither by common rule nor by provided rule sets, Stored CompatibilityInfo# { Application: "ydb" Version { Year: 23 Major: 1 Minor: 26 Hotfix: 0 } } Current CompatibilityInfo# { Application: "ydb" Version { Year: 23 Major: 3 Minor: 1 Hotfix: 0 } } PDiskId# 1 2025-09-25T16:14:43.446569Z node 3 :BS_PDISK NOTICE: {BPD38@blobstorage_pdisk_impl.cpp:2857} OnDriveStartup Path# "" PDiskId# 1 2025-09-25T16:14:43.468916Z node 3 :BS_PDISK NOTICE: {BSP01@blobstorage_pdisk_actor.cpp:581} Successfully read format record Format# {TDiskFormat Version: 3 DiskSize: 134217728000 bytes (134 GB) Guid: 7287073135978419133 MagicNextLogChunkReference: 16671508809983646881 MagicLogChunk: 10457310420939453495 MagicDataChunk: 6405915325194446939 MagicSysLogChunk: 13132497952276003234 MagicFormatChunk: 17332287817462050952 ChunkSize: 136314880 bytes (136 MB) SectorSize: 4096 SysLogSectorCount: 64 SystemChunkCount: 1 FormatText: "Info" DiskFormatSize: 1168 (current sizeof: 1168) TimestampUs: 1758816883375988 (2025-09-25T16:14:43.375988Z) FormatFlags: {ErasureEncodeSysLog | ErasureEncodeFormat | ErasureEncodeNextChunkReference | EncryptFormat | EncryptData}} PDiskId# 1 2025-09-25T16:14:43.480968Z node 3 :BS_PDISK NOTICE: {BPD01@blobstorage_pdisk_impl_log.cpp:252} SysLogRecord is read Record# {TSysLogR ... wnerRound# 31 GroupSizeInUnits# 0 PDiskId# 1 2025-09-25T16:14:52.420596Z node 20 :BS_PDISK_SHRED DEBUG: blobstorage_pdisk_impl.cpp:4404: PDisk# 1 Is now waiting for VDisks to cut their log, requestsSent# 1 ShredGeneration# 1 2025-09-25T16:14:52.468048Z node 20 :BS_PDISK NOTICE: {BPD01@blobstorage_pdisk_logreader.cpp:1176} Reply to owner OwnerId# 3 Result# {EvReadLogResult Status# OK ErrorReason# "" position# { ChunkIdx# 0 OffsetInChunk# 0} nextPosition# { ChunkIdx# 4294967295 OffsetInChunk# 4294967295} isEndOfLog# true StatusFlags# IsValid Results.size# 4} PDiskId# 1 2025-09-25T16:14:52.468374Z node 20 :BS_PDISK_SHRED DEBUG: blobstorage_pdisk_impl.cpp:4404: PDisk# 1 Is now waiting for VDisks to cut their log, requestsSent# 1 ShredGeneration# 1 /home/runner/actions_runner/_work/ydb/ydb/ydb/core/blobstorage/pdisk/blobstorage_pdisk_ut_env.h:390 2025-09-25T16:14:52.468853Z node 20 :BS_PDISK_SHRED DEBUG: blobstorage_pdisk_impl.cpp:4147: PDisk# 1 found unshredded free chunk# 1 ShredGeneration# 1 2025-09-25T16:14:52.609515Z node 20 :BS_PDISK_SHRED DEBUG: blobstorage_pdisk_impl.cpp:4245: PDisk# 1 is done shredding chunk ChunkBeingShredded# 1 2025-09-25T16:14:52.609667Z node 20 :BS_PDISK_SHRED DEBUG: blobstorage_pdisk_impl.cpp:4448: PDisk# 1 has finished all shred requests ShredGeneration# 1 finishedCount# 1 2025-09-25T16:14:52.609669Z node 20 :BS_PDISK_SHRED NOTICE: blobstorage_pdisk_impl.cpp:4453: Shred request is finished at PDisk# 1 ShredGeneration# 1 2025-09-25T16:14:52.786486Z node 21 :BS_PDISK NOTICE: {BPD38@blobstorage_pdisk_impl.cpp:2857} OnDriveStartup Path# "" PDiskId# 1 2025-09-25T16:14:52.800936Z node 21 :BS_PDISK NOTICE: {BSP01@blobstorage_pdisk_actor.cpp:581} Successfully read format record Format# {TDiskFormat Version: 3 DiskSize: 134217728000 bytes (134 GB) Guid: 88099105580849790 MagicNextLogChunkReference: 17327473957133170075 MagicLogChunk: 15088523128710431067 MagicDataChunk: 2595896659137490988 MagicSysLogChunk: 11401549735039187678 MagicFormatChunk: 17332287817462050952 ChunkSize: 136314880 bytes (136 MB) SectorSize: 4096 SysLogSectorCount: 64 SystemChunkCount: 1 FormatText: "Info" DiskFormatSize: 1168 (current sizeof: 1168) TimestampUs: 1758816892692341 (2025-09-25T16:14:52.692341Z) FormatFlags: {ErasureEncodeSysLog | ErasureEncodeFormat | ErasureEncodeNextChunkReference | EncryptFormat | EncryptData}} PDiskId# 1 2025-09-25T16:14:52.805479Z node 21 :BS_PDISK NOTICE: {BPD01@blobstorage_pdisk_impl_log.cpp:252} SysLogRecord is read Record# {TSysLogRecord Version# 0 NonceSet# {TNonceSet Version# 0 NonceSysLog# 61 NonceLog# 1 NonceData# 1} LogHeadChunkIdx# 1 LogHeadChunkPreviousNonce# 0} PDiskId# 1 2025-09-25T16:14:52.809073Z node 21 :BS_PDISK NOTICE: {LR018@blobstorage_pdisk_logreader.cpp:809} PDiskId# 1 LogReader IsInitial# 1 ChunkIdx# 1 SectorIdx# 0 OffsetInSector# 0 In ProcessSectorSet got !restorator.GoodSectorFlags LastGoodToWriteLogPosition# { ChunkIdx# 1 OffsetInChunk# 0} PDiskId# 1 2025-09-25T16:14:52.809098Z node 21 :BS_PDISK NOTICE: {BPD01@blobstorage_pdisk_logreader.cpp:1176} Reply to owner OwnerId# 0 Result# {EvReadLogResult Status# OK ErrorReason# "" position# { ChunkIdx# 0 OffsetInChunk# 0} nextPosition# { ChunkIdx# 1 OffsetInChunk# 0} isEndOfLog# true StatusFlags# IsValid | DiskSpaceCyan | DiskSpaceLightYellowMove | DiskSpaceYellowStop | DiskSpaceLightOrange | DiskSpacePreOrange | DiskSpaceOrange | DiskSpaceRed | DiskSpaceBlack Results.size# 0} PDiskId# 1 2025-09-25T16:14:52.811069Z node 21 :BS_PDISK NOTICE: {BPD01@blobstorage_pdisk_impl_log.cpp:1732} PDisk have successfully started PDiskId# 1 2025-09-25T16:14:52.813032Z node 21 :BS_PDISK NOTICE: {BPD02@blobstorage_pdisk_impl.cpp:2055} New owner is created ownerId# 3 vDiskId# [12:_:0:0:0] FirstNonceToKeep# 1224547 CutLogId# [21:7554061032525360673:2050] ownerRound# 32 PDiskId# 1 2025-09-25T16:14:52.825201Z node 21 :BS_PDISK NOTICE: {BPD01@blobstorage_pdisk_logreader.cpp:1176} Reply to owner OwnerId# 3 Result# {EvReadLogResult Status# OK ErrorReason# "" position# { ChunkIdx# 0 OffsetInChunk# 0} nextPosition# { ChunkIdx# 4294967295 OffsetInChunk# 4294967295} isEndOfLog# true StatusFlags# IsValid Results.size# 0} PDiskId# 1 2025-09-25T16:14:52.856857Z node 21 :BS_PDISK_SHRED CRIT: blobstorage_pdisk_impl.cpp:4472: ProcessShredPDisk with IS_SHRED_ENABLED# false at PDisk# 1 ShredGeneration# 0 request# TShredPDisk { Owner# 0 OwnerRound# 0 ShredGeneration# 1} 2025-09-25T16:14:52.856881Z node 21 :BS_PDISK_SHRED DEBUG: blobstorage_pdisk_impl.cpp:4185: PDisk# 1 sends compact request to VDisk# [12:4294967295:0:0:0] ownerId# 3 request# {EvPreShredCompactVDisk ShredGeneration# 1} 2025-09-25T16:14:52.856890Z node 21 :BS_PDISK_SHRED DEBUG: blobstorage_pdisk_impl.cpp:4199: PDisk# 1 ShredGeneration# 1 is waiting for ownerId# 3 before finishing pre-shred compact VDiskId# [12:4294967295:0:0:0] VDiskStatus# logged ShredState# "Compact requested" 2025-09-25T16:14:52.864849Z node 21 :BS_PDISK_SHRED DEBUG: blobstorage_pdisk_impl.cpp:4529: ProcessPreShredCompactVDiskResult at PDisk# 1 ShredGeneration# 1 request# TPreShredCompactVDiskResult { Owner# 3 OwnerRound# 32 Status# ERROR ShredGeneration# 1 ErrorReason# } 2025-09-25T16:14:52.864859Z node 21 :BS_PDISK_SHRED ERROR: blobstorage_pdisk_impl.cpp:4573: Shred request failed at PDisk# 1 for shredGeneration# 1 because owner# 3 ownerRound# 32 replied with PreShredCompactVDiskResult status# ERROR and ErrorReason# 2025-09-25T16:14:52.872868Z node 21 :BS_PDISK_SHRED CRIT: blobstorage_pdisk_impl.cpp:4472: ProcessShredPDisk with IS_SHRED_ENABLED# false at PDisk# 1 ShredGeneration# 1 request# TShredPDisk { Owner# 0 OwnerRound# 0 ShredGeneration# 1} 2025-09-25T16:14:52.872883Z node 21 :BS_PDISK_SHRED NOTICE: blobstorage_pdisk_impl.cpp:4504: Retrying a failed shred at PDisk# 1 ShredGeneration# 1 request# TShredPDisk { Owner# 0 OwnerRound# 0 ShredGeneration# 1} 2025-09-25T16:14:52.872896Z node 21 :BS_PDISK_SHRED DEBUG: blobstorage_pdisk_impl.cpp:4185: PDisk# 1 sends compact request to VDisk# [12:4294967295:0:0:0] ownerId# 3 request# {EvPreShredCompactVDisk ShredGeneration# 1} 2025-09-25T16:14:52.872909Z node 21 :BS_PDISK_SHRED DEBUG: blobstorage_pdisk_impl.cpp:4199: PDisk# 1 ShredGeneration# 1 is waiting for ownerId# 3 before finishing pre-shred compact VDiskId# [12:4294967295:0:0:0] VDiskStatus# logged ShredState# "Compact requested" /home/runner/actions_runner/_work/ydb/ydb/ydb/core/blobstorage/pdisk/blobstorage_pdisk_ut_env.h:390 2025-09-25T16:14:52.880929Z node 21 :BS_PDISK_SHRED DEBUG: blobstorage_pdisk_impl.cpp:4529: ProcessPreShredCompactVDiskResult at PDisk# 1 ShredGeneration# 1 request# TPreShredCompactVDiskResult { Owner# 3 OwnerRound# 32 Status# OK ShredGeneration# 1 ErrorReason# } 2025-09-25T16:14:52.880950Z node 21 :BS_PDISK_SHRED DEBUG: blobstorage_pdisk_impl.cpp:4209: PDisk# 1 has finished all pre-shred compact VDisk requests ShredGeneration# 1 finishedCount# 1 2025-09-25T16:14:53.030401Z node 21 :BS_PDISK_SHRED DEBUG: blobstorage_pdisk_impl.cpp:4404: PDisk# 1 Is now waiting for VDisks to cut their log, requestsSent# 1 ShredGeneration# 1 2025-09-25T16:14:53.030838Z node 21 :BS_PDISK_SHRED DEBUG: blobstorage_pdisk_impl.cpp:4147: PDisk# 1 found unshredded free chunk# 1 ShredGeneration# 1 2025-09-25T16:14:53.416377Z node 22 :BS_PDISK NOTICE: {BPD38@blobstorage_pdisk_impl.cpp:2857} OnDriveStartup Path# "" PDiskId# 1 2025-09-25T16:14:53.436976Z node 22 :BS_PDISK NOTICE: {BSP01@blobstorage_pdisk_actor.cpp:581} Successfully read format record Format# {TDiskFormat Version: 3 DiskSize: 134217728000 bytes (134 GB) Guid: 492601167364391262 MagicNextLogChunkReference: 9191577249627248587 MagicLogChunk: 2770829393283364886 MagicDataChunk: 4212596175504475098 MagicSysLogChunk: 12580164544194080865 MagicFormatChunk: 17332287817462050952 ChunkSize: 136314880 bytes (136 MB) SectorSize: 4096 SysLogSectorCount: 64 SystemChunkCount: 1 FormatText: "Info" DiskFormatSize: 1168 (current sizeof: 1168) TimestampUs: 1758816893360420 (2025-09-25T16:14:53.360420Z) FormatFlags: {ErasureEncodeSysLog | ErasureEncodeFormat | ErasureEncodeNextChunkReference | EncryptFormat | EncryptData}} PDiskId# 1 2025-09-25T16:14:53.445172Z node 22 :BS_PDISK NOTICE: {BPD01@blobstorage_pdisk_impl_log.cpp:252} SysLogRecord is read Record# {TSysLogRecord Version# 0 NonceSet# {TNonceSet Version# 0 NonceSysLog# 61 NonceLog# 1 NonceData# 1} LogHeadChunkIdx# 1 LogHeadChunkPreviousNonce# 0} PDiskId# 1 2025-09-25T16:14:53.448915Z node 22 :BS_PDISK NOTICE: {LR018@blobstorage_pdisk_logreader.cpp:809} PDiskId# 1 LogReader IsInitial# 1 ChunkIdx# 1 SectorIdx# 0 OffsetInSector# 0 In ProcessSectorSet got !restorator.GoodSectorFlags LastGoodToWriteLogPosition# { ChunkIdx# 1 OffsetInChunk# 0} PDiskId# 1 2025-09-25T16:14:53.448945Z node 22 :BS_PDISK NOTICE: {BPD01@blobstorage_pdisk_logreader.cpp:1176} Reply to owner OwnerId# 0 Result# {EvReadLogResult Status# OK ErrorReason# "" position# { ChunkIdx# 0 OffsetInChunk# 0} nextPosition# { ChunkIdx# 1 OffsetInChunk# 0} isEndOfLog# true StatusFlags# IsValid | DiskSpaceCyan | DiskSpaceLightYellowMove | DiskSpaceYellowStop | DiskSpaceLightOrange | DiskSpacePreOrange | DiskSpaceOrange | DiskSpaceRed | DiskSpaceBlack Results.size# 0} PDiskId# 1 2025-09-25T16:14:53.456910Z node 22 :BS_PDISK NOTICE: {BPD01@blobstorage_pdisk_impl_log.cpp:1732} PDisk have successfully started PDiskId# 1 2025-09-25T16:14:53.460910Z node 22 :BS_PDISK NOTICE: {BPD02@blobstorage_pdisk_impl.cpp:2055} New owner is created ownerId# 3 vDiskId# [13:_:0:0:0] FirstNonceToKeep# 1822549 CutLogId# [22:7554061038503001862:2050] ownerRound# 33 PDiskId# 1 2025-09-25T16:14:53.463929Z node 22 :BS_PDISK NOTICE: {BPD01@blobstorage_pdisk_logreader.cpp:1176} Reply to owner OwnerId# 3 Result# {EvReadLogResult Status# OK ErrorReason# "" position# { ChunkIdx# 0 OffsetInChunk# 0} nextPosition# { ChunkIdx# 4294967295 OffsetInChunk# 4294967295} isEndOfLog# true StatusFlags# IsValid Results.size# 0} PDiskId# 1 2025-09-25T16:14:53.488849Z node 22 :BS_PDISK_SHRED CRIT: blobstorage_pdisk_impl.cpp:4472: ProcessShredPDisk with IS_SHRED_ENABLED# false at PDisk# 1 ShredGeneration# 0 request# TShredPDisk { Owner# 0 OwnerRound# 0 ShredGeneration# 1} 2025-09-25T16:14:53.488867Z node 22 :BS_PDISK_SHRED DEBUG: blobstorage_pdisk_impl.cpp:4185: PDisk# 1 sends compact request to VDisk# [13:4294967295:0:0:0] ownerId# 3 request# {EvPreShredCompactVDisk ShredGeneration# 1} 2025-09-25T16:14:53.488875Z node 22 :BS_PDISK_SHRED DEBUG: blobstorage_pdisk_impl.cpp:4199: PDisk# 1 ShredGeneration# 1 is waiting for ownerId# 3 before finishing pre-shred compact VDiskId# [13:4294967295:0:0:0] VDiskStatus# logged ShredState# "Compact requested" /home/runner/actions_runner/_work/ydb/ydb/ydb/core/blobstorage/pdisk/blobstorage_pdisk_ut_env.h:390 2025-09-25T16:14:53.493174Z node 22 :BS_PDISK_SHRED DEBUG: blobstorage_pdisk_impl.cpp:4529: ProcessPreShredCompactVDiskResult at PDisk# 1 ShredGeneration# 1 request# TPreShredCompactVDiskResult { Owner# 3 OwnerRound# 33 Status# OK ShredGeneration# 1 ErrorReason# } 2025-09-25T16:14:53.493195Z node 22 :BS_PDISK_SHRED DEBUG: blobstorage_pdisk_impl.cpp:4209: PDisk# 1 has finished all pre-shred compact VDisk requests ShredGeneration# 1 finishedCount# 1 2025-09-25T16:14:53.715472Z node 22 :BS_PDISK_SHRED DEBUG: blobstorage_pdisk_impl.cpp:4404: PDisk# 1 Is now waiting for VDisks to cut their log, requestsSent# 1 ShredGeneration# 1 2025-09-25T16:14:53.715870Z node 22 :BS_PDISK_SHRED DEBUG: blobstorage_pdisk_impl.cpp:4147: PDisk# 1 found unshredded free chunk# 1 ShredGeneration# 1 2025-09-25T16:14:53.905309Z node 22 :BS_PDISK_SHRED DEBUG: blobstorage_pdisk_impl.cpp:4245: PDisk# 1 is done shredding chunk ChunkBeingShredded# 1 2025-09-25T16:14:53.905382Z node 22 :BS_PDISK_SHRED DEBUG: blobstorage_pdisk_impl.cpp:4448: PDisk# 1 has finished all shred requests ShredGeneration# 1 finishedCount# 1 2025-09-25T16:14:53.905383Z node 22 :BS_PDISK_SHRED NOTICE: blobstorage_pdisk_impl.cpp:4453: Shred request is finished at PDisk# 1 ShredGeneration# 1 |79.8%| [TA] {RESULT} $(B)/ydb/core/blobstorage/vdisk/anubis_osiris/ut/test-results/unittest/{meta.json ... results_accumulator.log} >> TPDiskTest::TestFakeErrorPDiskManyChunkWrite [GOOD] >> TPDiskTest::PDiskRestart |79.8%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/core/config/validation/ut/unittest >> DatabaseConfigValidation::NotAllowedFields [GOOD] |79.8%| [TS] {RESULT} ydb/core/config/validation/ut/unittest >> TPGTest::TestLogin >> TSectorMapPerformance::TestSSD1960GBRead100MBOnFirstSector [GOOD] >> TSectorMapPerformance::TestSSD1960GBWrite100MBOnFirstSector >> TYardTest::TestChunkWriteReadMultiple [GOOD] >> TYardTest::TestChunkWriteReadMultipleWithHddSectorMap >> TPGTest::TestLogin [GOOD] >> TPDiskTest::PDiskRestart [GOOD] >> TPDiskTest::PDiskOwnerSlayRace >> TSectorMapPerformance::TestSSD1960GBWrite100MBOnFirstSector [GOOD] >> TSectorMapPerformance::TestSSD1960GBRead1000MBOnFirstSector >> ClosedIntervalSet::Union >> AuthConfigValidation::AcceptValidPasswordComplexity [GOOD] >> AuthConfigValidation::CannotAcceptInvalidPasswordComplexity [GOOD] >> AuthConfigValidation::AcceptValidAccountLockoutConfig [GOOD] >> AuthConfigValidation::CannotAcceptInvalidAccountLockoutConfig [GOOD] >> TBlockDeviceTest::WriteReadRestart [GOOD] >> TChunkTrackerTest::AddRemove [GOOD] >> TChunkTrackerTest::TwoOwnersInterference [GOOD] >> TChunkTrackerTest::AddOwnerWithWeight [GOOD] >> TChunkTrackerTest::ZeroWeight [GOOD] >> TColorLimitsTest::Colors [GOOD] >> TColorLimitsTest::OwnerFreeSpaceShare [GOOD] >> TLogCache::Simple [GOOD] >> TLogCache::EraseRangeOnEmpty [GOOD] >> TLogCache::EraseRangeOutsideOfData [GOOD] >> TLogCache::EraseRangeSingleMinElement [GOOD] >> TLogCache::EraseRangeSingleMidElement [GOOD] >> TLogCache::EraseRangeSingleMaxElement [GOOD] >> TLogCache::EraseRangeSample [GOOD] >> TLogCache::EraseRangeAllExact [GOOD] >> TLogCache::EraseRangeAllAmple [GOOD] >> TPDiskConfig::GetOwnerWeight [GOOD] >> ShredPDisk::SimpleShredRepeatAfterPDiskRestart >> TPDiskTest::PDiskOwnerSlayRace [GOOD] >> TPDiskTest::PDiskRestartManyLogWrites >> TFragmentedBufferTest::ReadWriteRandom [GOOD] >> TPDiskTest::PDiskRestartManyLogWrites [GOOD] >> TPDiskTest::TestLogSpliceChunkReserve ------- [TS] {default-linux-x86_64, relwithdebinfo} ydb/core/pgproxy/ut/unittest >> TPGTest::TestLogin [GOOD] Test command err: 2025-09-25T16:14:54.995999Z :PGWIRE INFO: sock_listener.cpp:66: Listening on [::]:18403 2025-09-25T16:14:55.009024Z :PGWIRE DEBUG: pg_connection.cpp:61: (#13,[::1]:44486) incoming connection opened 2025-09-25T16:14:55.009082Z :PGWIRE DEBUG: pg_connection.cpp:241: (#13,[::1]:44486) -> [1] 'i' "Initial" Size(15) protocol(0x00000300) user=user 2025-09-25T16:14:55.009113Z :PGWIRE DEBUG: pg_connection.cpp:241: (#13,[::1]:44486) <- [1] 'R' "Auth" Size(4) OK |79.8%| [TS] {RESULT} ydb/core/pgproxy/ut/unittest >> TCowBTreeTest::RandomInsertThreadSafe [GOOD] >> TArrowPushDown::SimplePushDown [GOOD] >> TCowBTreeTest::MultipleSnapshots >> TArrowPushDown::FilterEverything [GOOD] >> TArrowPushDown::MatchSeveralRowGroups [GOOD] >> TYardTest::TestSysLogReordering [GOOD] >> TYardTest::TestStartingPoints >> TBlobStorageHullCompactDeferredQueueTest::Basic [GOOD] |79.8%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/core/config/validation/auth_config_validator_ut/unittest >> AuthConfigValidation::CannotAcceptInvalidAccountLockoutConfig [GOOD] |79.8%| [TS] {RESULT} ydb/core/config/validation/auth_config_validator_ut/unittest >> PushdownTest::NoFilter |79.8%| [LD] {BAZEL_UPLOAD} $(B)/ydb/core/blobstorage/pdisk/ut/ydb-core-blobstorage-pdisk-ut |79.8%| [LD] {BAZEL_UPLOAD} $(B)/ydb/mvp/core/ut/ydb-mvp-core-ut |79.8%| [LD] {BAZEL_UPLOAD} $(B)/ydb/apps/ydb/ut/ydb-apps-ydb-ut >> PushdownTest::NoFilter [GOOD] >> PushdownTest::Equal [GOOD] >> ShredPDisk::SimpleShredRepeatAfterPDiskRestart [GOOD] |79.8%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/sequenceshard/public/ut/unittest |79.8%| [TS] {RESULT} ydb/core/tx/sequenceshard/public/ut/unittest |79.8%| [LD] {BAZEL_UPLOAD} $(B)/ydb/library/yql/providers/s3/common/ut/ydb-library-yql-providers-s3-common-ut >> PushdownTest::NotEqualInt32Int64 [GOOD] >> PushdownTest::TrueCoalesce >> TYardTest::TestStartingPoints [GOOD] >> TYardTest::TestWhiteboard >> Mvp::OpenIdConnectRequestWithIamTokenYandex [GOOD] >> Mvp::OpenIdConnectRequestWithIamTokenNebius [GOOD] >> Mvp::OpenIdConnectNonAuthorizeRequestWithOptionMethodYandex |79.8%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/library/yql/providers/s3/actors/ut/unittest >> TArrowPushDown::MatchSeveralRowGroups [GOOD] |79.8%| [TS] {RESULT} ydb/library/yql/providers/s3/actors/ut/unittest >> PushdownTest::TrueCoalesce [GOOD] >> PushdownTest::CmpInt16AndInt32 [GOOD] >> Mvp::OpenIdConnectNonAuthorizeRequestWithOptionMethodYandex [GOOD] >> Mvp::OpenIdConnectNonAuthorizeRequestWithOptionMethodNebius [GOOD] >> Mvp::OpenIdConnectSessionServiceCheckValidCookieYandex [GOOD] >> Mvp::OpenIdConnectSessionServiceCheckValidCookieNebius >> ClosedIntervalSet::Union [GOOD] >> ClosedIntervalSet::Difference >> PushdownTest::PartialAnd [GOOD] >> AuditLogHeartbeatTest::LoggingHeartbeat >> Mvp::OpenIdConnectSessionServiceCheckValidCookieNebius [GOOD] >> Mvp::OpenIdConnectProxyOnHttpsHost >> PushdownTest::PartialAndOneBranchPushdownable [GOOD] >> TPDiskTest::TestStartEncryptedOrPlainAndRestart [GOOD] >> TPDiskUtil::AtomicBlockCounterFunctional [GOOD] >> TPDiskUtil::AtomicBlockCounterSeqno [GOOD] >> TPDiskUtil::Light [GOOD] >> TPDiskUtil::LightOverflow >> Mvp::OpenIdConnectProxyOnHttpsHost [GOOD] >> Mvp::OpenIdConnectFixLocationHeader >> PushdownTest::NotNull [GOOD] >> PushdownTest::NotNullForDatetime [GOOD] >> TPDiskUtil::LightOverflow [GOOD] >> TPDiskUtil::DriveEstimator >> Mvp::OpenIdConnectFixLocationHeader [GOOD] >> Mvp::OpenIdConnectExchangeNebius >> PushdownTest::IsNull [GOOD] >> PushdownTest::StringFieldsNotSupported [GOOD] >> Mvp::OpenIdConnectExchangeNebius [GOOD] >> Mvp::OpenIdConnectSessionServiceCheckAuthorizationFail >> PushdownTest::StringFieldsNotSupported2 [GOOD] >> PushdownTest::RegexpPushdown [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/vdisk/hullop/ut/unittest >> TBlobStorageHullCompactDeferredQueueTest::Basic [GOOD] Test command err: STEP 1 STEP 2 StringToId# 63 numItems# 110271 |79.9%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/util/ut/unittest >> TFragmentedBufferTest::ReadWriteRandom [GOOD] >> Mvp::OpenIdConnectSessionServiceCheckAuthorizationFail [GOOD] >> Mvp::OpenIdConnectFullAuthorizationFlow [GOOD] >> Mvp::OpenIdConnectFullAuthorizationFlowAjax [GOOD] >> Mvp::OpenIdConnectWrongStateAuthorizationFlow [GOOD] >> Mvp::OpenIdConnectWrongStateAuthorizationFlowAjax >> Mvp::TokenatorGetMetadataTokenGood >> Mvp::OpenIdConnectWrongStateAuthorizationFlowAjax [GOOD] >> Mvp::OpenIdConnectSessionServiceCreateAuthorizationFail >> TestS3UrlEscape::EscapeEscapedForce [GOOD] >> Mvp::TokenatorGetMetadataTokenGood [GOOD] >> Mvp::TokenatorRefreshMetadataTokenGood >> TestUrlBuilder::Basic [GOOD] >> TestUrlBuilder::BasicWithEncoding [GOOD] >> TestUrlBuilder::BasicWithAdditionalEncoding [GOOD] >> TestS3UrlEscape::EscapeUnescapeForceRet [GOOD] >> TestUrlBuilder::UriOnly [GOOD] >> TestS3UrlEscape::EscapeAdditionalSymbols [GOOD] >> Mvp::OpenIdConnectSessionServiceCreateAuthorizationFail [GOOD] >> Mvp::OpenIdConnectSessionServiceCreateAccessTokenInvalid >> Mvp::OpenIdConnectSessionServiceCreateAccessTokenInvalid [GOOD] >> Mvp::OpenIdConnectSessionServiceCreateAccessTokenInvalidAjax >> TMicrosecondsSlidingWindow::Basic [GOOD] |79.8%| [AR] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/kqp/opt/libcore-kqp-opt.a |79.9%| [AR] {RESULT} $(B)/ydb/core/kqp/opt/libcore-kqp-opt.a >> BlobTest::Flags_HasUncompressedSize [GOOD] >> BlobTest::Flags_HasPartData [GOOD] >> BlobTest::Flags_HasCreateTimestamp [GOOD] >> TTypeCodecsTest::TestBoolCodec >> BlobTest::Flags_HasKinesisData [GOOD] >> BlobTest::Flags_HasWriteTimestamp [GOOD] >> Mvp::OpenIdConnectSessionServiceCreateAccessTokenInvalidAjax [GOOD] >> Mvp::OpenIdConnectSessionServiceCreateOpenIdScopeMissed [GOOD] >> Mvp::OpenIdConnectAllowedHostsList >> TYardTest::TestWhiteboard [GOOD] >> TYardTest::TestMultiYardLogLatency |79.9%| [AR] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/ymq/actor/libcore-ymq-actor.a |79.9%| [AR] {RESULT} $(B)/ydb/core/ymq/actor/libcore-ymq-actor.a ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/pdisk/ut/unittest >> ShredPDisk::SimpleShredRepeatAfterPDiskRestart [GOOD] Test command err: GREEN 0.5025125628 0 CYAN 0.8623115578 0.862 LIGHT_YELLOW 0.8934673367 0.893 YELLOW 0.9145728643 0.914 LIGHT_ORANGE 0.9306532663 0.93 PRE_ORANGE 0.9467336683 0.946 ORANGE 0.9668341709 0.966 RED 0.9879396985 0.987 BLACK 0.9979899497 0.997 2025-09-25T16:14:55.359862Z node 1 :BS_PDISK NOTICE: {BPD38@blobstorage_pdisk_impl.cpp:2857} OnDriveStartup Path# "" PDiskId# 1 2025-09-25T16:14:55.384928Z node 1 :BS_PDISK NOTICE: {BSP01@blobstorage_pdisk_actor.cpp:581} Successfully read format record Format# {TDiskFormat Version: 3 DiskSize: 134217728000 bytes (134 GB) Guid: 10352472175623996479 MagicNextLogChunkReference: 2180952050549905605 MagicLogChunk: 15759370177030357600 MagicDataChunk: 4143189942708804479 MagicSysLogChunk: 6250188294816936261 MagicFormatChunk: 17332287817462050952 ChunkSize: 136314880 bytes (136 MB) SectorSize: 4096 SysLogSectorCount: 64 SystemChunkCount: 1 FormatText: "Info" DiskFormatSize: 1168 (current sizeof: 1168) TimestampUs: 1758816895301876 (2025-09-25T16:14:55.301876Z) FormatFlags: {ErasureEncodeSysLog | ErasureEncodeFormat | ErasureEncodeNextChunkReference | EncryptFormat | EncryptData}} PDiskId# 1 2025-09-25T16:14:55.397331Z node 1 :BS_PDISK NOTICE: {BPD01@blobstorage_pdisk_impl_log.cpp:252} SysLogRecord is read Record# {TSysLogRecord Version# 0 NonceSet# {TNonceSet Version# 0 NonceSysLog# 61 NonceLog# 1 NonceData# 1} LogHeadChunkIdx# 1 LogHeadChunkPreviousNonce# 0} PDiskId# 1 2025-09-25T16:14:55.400896Z node 1 :BS_PDISK NOTICE: {LR018@blobstorage_pdisk_logreader.cpp:809} PDiskId# 1 LogReader IsInitial# 1 ChunkIdx# 1 SectorIdx# 0 OffsetInSector# 0 In ProcessSectorSet got !restorator.GoodSectorFlags LastGoodToWriteLogPosition# { ChunkIdx# 1 OffsetInChunk# 0} PDiskId# 1 2025-09-25T16:14:55.401107Z node 1 :BS_PDISK NOTICE: {BPD01@blobstorage_pdisk_logreader.cpp:1176} Reply to owner OwnerId# 0 Result# {EvReadLogResult Status# OK ErrorReason# "" position# { ChunkIdx# 0 OffsetInChunk# 0} nextPosition# { ChunkIdx# 1 OffsetInChunk# 0} isEndOfLog# true StatusFlags# IsValid | DiskSpaceCyan | DiskSpaceLightYellowMove | DiskSpaceYellowStop | DiskSpaceLightOrange | DiskSpacePreOrange | DiskSpaceOrange | DiskSpaceRed | DiskSpaceBlack Results.size# 0} PDiskId# 1 2025-09-25T16:14:55.401778Z node 1 :BS_PDISK NOTICE: {BPD01@blobstorage_pdisk_impl_log.cpp:1732} PDisk have successfully started PDiskId# 1 2025-09-25T16:14:55.404905Z node 1 :BS_PDISK NOTICE: {BPD02@blobstorage_pdisk_impl.cpp:2055} New owner is created ownerId# 3 vDiskId# [0:_:0:0:0] FirstNonceToKeep# 1782598 CutLogId# [1:7554061046613672148:2050] ownerRound# 2 PDiskId# 1 2025-09-25T16:14:55.413115Z node 1 :BS_PDISK NOTICE: {BPD01@blobstorage_pdisk_logreader.cpp:1176} Reply to owner OwnerId# 3 Result# {EvReadLogResult Status# OK ErrorReason# "" position# { ChunkIdx# 0 OffsetInChunk# 0} nextPosition# { ChunkIdx# 4294967295 OffsetInChunk# 4294967295} isEndOfLog# true StatusFlags# IsValid Results.size# 0} PDiskId# 1 2025-09-25T16:14:55.424936Z node 1 :BS_PDISK_SHRED CRIT: blobstorage_pdisk_impl.cpp:4472: ProcessShredPDisk with IS_SHRED_ENABLED# false at PDisk# 1 ShredGeneration# 0 request# TShredPDisk { Owner# 0 OwnerRound# 0 ShredGeneration# 1} 2025-09-25T16:14:55.424962Z node 1 :BS_PDISK_SHRED DEBUG: blobstorage_pdisk_impl.cpp:4185: PDisk# 1 sends compact request to VDisk# [0:4294967295:0:0:0] ownerId# 3 request# {EvPreShredCompactVDisk ShredGeneration# 1} 2025-09-25T16:14:55.424973Z node 1 :BS_PDISK_SHRED DEBUG: blobstorage_pdisk_impl.cpp:4199: PDisk# 1 ShredGeneration# 1 is waiting for ownerId# 3 before finishing pre-shred compact VDiskId# [0:4294967295:0:0:0] VDiskStatus# logged ShredState# "Compact requested" /home/runner/actions_runner/_work/ydb/ydb/ydb/core/blobstorage/pdisk/blobstorage_pdisk_ut_env.h:390 2025-09-25T16:14:55.425106Z node 1 :BS_PDISK_SHRED DEBUG: blobstorage_pdisk_impl.cpp:4529: ProcessPreShredCompactVDiskResult at PDisk# 1 ShredGeneration# 1 request# TPreShredCompactVDiskResult { Owner# 3 OwnerRound# 2 Status# OK ShredGeneration# 1 ErrorReason# } 2025-09-25T16:14:55.425116Z node 1 :BS_PDISK_SHRED DEBUG: blobstorage_pdisk_impl.cpp:4209: PDisk# 1 has finished all pre-shred compact VDisk requests ShredGeneration# 1 finishedCount# 1 2025-09-25T16:14:55.864249Z node 1 :BS_PDISK_SHRED DEBUG: blobstorage_pdisk_impl.cpp:4404: PDisk# 1 Is now waiting for VDisks to cut their log, requestsSent# 1 ShredGeneration# 1 2025-09-25T16:14:55.864788Z node 1 :BS_PDISK_SHRED DEBUG: blobstorage_pdisk_impl.cpp:4147: PDisk# 1 found unshredded free chunk# 1 ShredGeneration# 1 2025-09-25T16:14:56.082574Z node 1 :BS_PDISK_SHRED DEBUG: blobstorage_pdisk_impl.cpp:4245: PDisk# 1 is done shredding chunk ChunkBeingShredded# 1 2025-09-25T16:14:56.082647Z node 1 :BS_PDISK_SHRED DEBUG: blobstorage_pdisk_impl.cpp:4448: PDisk# 1 has finished all shred requests ShredGeneration# 1 finishedCount# 1 2025-09-25T16:14:56.082649Z node 1 :BS_PDISK_SHRED NOTICE: blobstorage_pdisk_impl.cpp:4453: Shred request is finished at PDisk# 1 ShredGeneration# 1 2025-09-25T16:14:56.083017Z node 1 :BS_PDISK NOTICE: {BPD01@blobstorage_pdisk_impl.cpp:302} Shutdown OwnerInfo# {{OwnerId: 3 VDiskId: [0:_:0:0:0] GroupSizeInUnits: 0 ChunkWrites: 0 ChunkReads: 0 LogWrites: 0 LogReader: 0 CurrentFirstLsnToKeep: 3 FirstNonceToKeep: 1782598 StartingPoints: {{TLogRecord Signature# First Data.Size()# 1 Lsn# 3}} Owned chunkIds: {}} PDisk system/log ChunkIds: {0, 2} Free ChunkIds: {1, 3..982} PDiskId# 1 2025-09-25T16:14:56.100532Z node 1 :BS_PDISK NOTICE: {BPD38@blobstorage_pdisk_impl.cpp:2857} OnDriveStartup Path# "" PDiskId# 1 2025-09-25T16:14:56.116438Z node 1 :BS_PDISK NOTICE: {BSP01@blobstorage_pdisk_actor.cpp:581} Successfully read format record Format# {TDiskFormat Version: 3 DiskSize: 134217728000 bytes (134 GB) Guid: 10352472175623996479 MagicNextLogChunkReference: 2180952050549905605 MagicLogChunk: 15759370177030357600 MagicDataChunk: 4143189942708804479 MagicSysLogChunk: 6250188294816936261 MagicFormatChunk: 17332287817462050952 ChunkSize: 136314880 bytes (136 MB) SectorSize: 4096 SysLogSectorCount: 64 SystemChunkCount: 1 FormatText: "Info" DiskFormatSize: 1168 (current sizeof: 1168) TimestampUs: 1758816895301876 (2025-09-25T16:14:55.301876Z) FormatFlags: {ErasureEncodeSysLog | ErasureEncodeFormat | ErasureEncodeNextChunkReference | EncryptFormat | EncryptData}} PDiskId# 1 2025-09-25T16:14:56.126969Z node 1 :BS_PDISK NOTICE: {BPD01@blobstorage_pdisk_impl_log.cpp:252} SysLogRecord is read Record# {TSysLogRecord Version# 8 NonceSet# {TNonceSet Version# 0 NonceSysLog# 1414841 NonceLog# 1816272 NonceData# 1959629} LogHeadChunkIdx# 2 LogHeadChunkPreviousNonce# 1815874 Owner[3]# [0:4294967295:0:0:0]} PDiskId# 1 2025-09-25T16:14:56.145735Z node 1 :BS_PDISK NOTICE: {LR018@blobstorage_pdisk_logreader.cpp:809} PDiskId# 1 LogReader IsInitial# 1 ChunkIdx# 2 SectorIdx# 397 OffsetInSector# 0 In ProcessSectorSet got !restorator.GoodSectorFlags LastGoodToWriteLogPosition# { ChunkIdx# 2 OffsetInChunk# 1626112} PDiskId# 1 2025-09-25T16:14:56.145773Z node 1 :BS_PDISK NOTICE: {BPD01@blobstorage_pdisk_logreader.cpp:1176} Reply to owner OwnerId# 0 Result# {EvReadLogResult Status# OK ErrorReason# "" position# { ChunkIdx# 0 OffsetInChunk# 0} nextPosition# { ChunkIdx# 2 OffsetInChunk# 1626112} isEndOfLog# true StatusFlags# IsValid | DiskSpaceCyan | DiskSpaceLightYellowMove | DiskSpaceYellowStop | DiskSpaceLightOrange | DiskSpacePreOrange | DiskSpaceOrange | DiskSpaceRed | DiskSpaceBlack Results.size# 0} PDiskId# 1 2025-09-25T16:14:56.160847Z node 1 :BS_PDISK NOTICE: {BPD01@blobstorage_pdisk_impl_log.cpp:1732} PDisk have successfully started PDiskId# 1 2025-09-25T16:14:56.160956Z node 1 :BS_PDISK NOTICE: {BPD30@blobstorage_pdisk_impl.cpp:1930} Registered known VDisk VDisk# [0:4294967295:0:0:0] OwnerId# 3 OwnerRound# 3 GroupSizeInUnits# 0 PDiskId# 1 2025-09-25T16:14:56.160983Z node 1 :BS_PDISK_SHRED DEBUG: blobstorage_pdisk_impl.cpp:4448: PDisk# 1 has finished all shred requests ShredGeneration# 1 finishedCount# 1 2025-09-25T16:14:56.160986Z node 1 :BS_PDISK_SHRED NOTICE: blobstorage_pdisk_impl.cpp:4453: Shred request is finished at PDisk# 1 ShredGeneration# 1 2025-09-25T16:14:56.169158Z node 1 :BS_PDISK WARN: {LR004@blobstorage_pdisk_logreader.cpp:837} PDiskId# 1 LogReader IsInitial# 0 Owner# 3 VDiskId# [0:_:0:0:0] ChunkIdx# 2 SectorIdx# 398 OffsetInSector# 0 In ProcessSectorSet got !restorator.GoodSectorFlags outside the LogEndSector LogEndChunkIdx# 2 LogEndSectorIdx# 398 PDiskId# 1 2025-09-25T16:14:56.169185Z node 1 :BS_PDISK NOTICE: {BPD01@blobstorage_pdisk_logreader.cpp:1176} Reply to owner OwnerId# 3 Result# {EvReadLogResult Status# OK ErrorReason# "" position# { ChunkIdx# 0 OffsetInChunk# 0} nextPosition# { ChunkIdx# 4294967295 OffsetInChunk# 4294967295} isEndOfLog# true StatusFlags# IsValid Results.size# 1} PDiskId# 1 2025-09-25T16:14:56.173424Z node 1 :BS_PDISK_SHRED CRIT: blobstorage_pdisk_impl.cpp:4472: ProcessShredPDisk with IS_SHRED_ENABLED# false at PDisk# 1 ShredGeneration# 1 request# TShredPDisk { Owner# 0 OwnerRound# 0 ShredGeneration# 1} 2025-09-25T16:14:56.173435Z node 1 :BS_PDISK_SHRED NOTICE: blobstorage_pdisk_impl.cpp:4497: Registered one more shred requester at PDisk# 1 ShredGeneration# 1 request# TShredPDisk { Owner# 0 OwnerRound# 0 ShredGeneration# 1} |79.9%| [TA] $(B)/ydb/core/blobstorage/vdisk/hullop/ut/test-results/unittest/{meta.json ... results_accumulator.log} >> TTypeCodecsTest::TestBoolCodec [GOOD] >> TTypeCodecsTest::TestFixedLenCodec [GOOD] >> TTypeCodecsTest::TestVarLenCodec [GOOD] >> TTypeCodecsTest::TestVarIntCodec [GOOD] >> TTypeCodecsTest::TestZigZagCodec [GOOD] >> TTypeCodecsTest::TestDeltaVarIntCodecAndRev [GOOD] >> TTypeCodecsTest::TestDeltaZigZagCodec [GOOD] >> Mvp::OpenIdConnectAllowedHostsList [GOOD] >> Mvp::OpenIdConnectHandleNullResponseFromProtectedResource [GOOD] >> Mvp::OpenIdConnectSessionServiceCreateNotFoundCookie >> Mvp::OpenIdConnectSessionServiceCreateNotFoundCookie [GOOD] >> Mvp::OpenIdConnectSessionServiceCreateGetWrongStateAndWrongCookie |79.9%| [TA] {RESULT} $(B)/ydb/core/blobstorage/vdisk/hullop/ut/test-results/unittest/{meta.json ... results_accumulator.log} >> EncryptedFileSerializerTest::WrongParametersForSerializer [GOOD] >> EncryptedFileSerializerTest::SplitOnBlocks >> EncryptedFileSerializerTest::WrongParametersForDeserializer [GOOD] >> EncryptedFileSerializerTest::SerializeWholeFileAtATime [GOOD] >> Mvp::OpenIdConnectSessionServiceCreateGetWrongStateAndWrongCookie [GOOD] >> Mvp::OidcImpersonationStartFlow [GOOD] >> Mvp::OidcImpersonationStartNeedServiceAccountId >> EncryptedFileSerializerTest::SplitOnBlocks [GOOD] >> EncryptedFileSerializerTest::EmptyFile [GOOD] >> EncryptedFileSerializerTest::ReadPartial [GOOD] >> EncryptedFileSerializerTest::DeleteLastByte [GOOD] >> EncryptedFileSerializerTest::AddByte [GOOD] >> EncryptedFileSerializerTest::RemoveLastBlock [GOOD] >> EncryptedFileSerializerTest::ChangeAnyByte [GOOD] >> EncryptedFileSerializerTest::BigHeaderSize [GOOD] >> EncryptedFileSerializerTest::BigBlockSize [GOOD] >> EncryptedFileSerializerTest::RestoreFromState [GOOD] >> EncryptedFileSerializerTest::IVSerialization [GOOD] >> PathsNormalizationTest::NormalizeItemPath [GOOD] >> PathsNormalizationTest::NormalizeItemPrefix [GOOD] >> PathsNormalizationTest::NormalizeExportPrefix [GOOD] >> TestFederatedQueryHelpers::TestCheckNestingDepth [GOOD] >> TestFederatedQueryHelpers::TestTruncateIssues [GOOD] >> TestFederatedQueryHelpers::TestValidateResultSetColumns [GOOD] |79.9%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/kqp/opt/libcore-kqp-opt.a |79.9%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/library/yql/providers/s3/common/ut/unittest >> TestS3UrlEscape::EscapeAdditionalSymbols [GOOD] >> Mvp::OidcImpersonationStartNeedServiceAccountId [GOOD] >> Mvp::OidcImpersonationStopFlow [GOOD] >> Mvp::OidcImpersonatedAccessToProtectedResource |79.9%| [TS] {RESULT} ydb/library/yql/providers/s3/common/ut/unittest >> MdbEndpoingGenerator::Legacy [GOOD] >> MdbEndpoingGenerator::Generic_NoTransformHost [GOOD] >> MdbEndpoingGenerator::Generic_WithTransformHost [GOOD] >> Mvp::OidcImpersonatedAccessToProtectedResource [GOOD] >> Mvp::OidcImpersonatedAccessNotAuthorized |79.9%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/core/persqueue/common/ut/unittest >> TMicrosecondsSlidingWindow::Basic [GOOD] |79.9%| [TS] {RESULT} ydb/core/persqueue/common/ut/unittest >> Mvp::OidcImpersonatedAccessNotAuthorized [GOOD] >> Mvp::OpenIdConnectStreamingRequestResponseYandex [GOOD] >> Mvp::OpenIdConnectStreamingRequestResponseNebius [GOOD] >> Mvp::OidcWhoami200 ------- [TS] {default-linux-x86_64, relwithdebinfo} ydb/library/yql/providers/generic/provider/ut/pushdown/unittest >> PushdownTest::RegexpPushdown [GOOD] Test command err: Initial program: ( (let $data_source (DataSource '"generic" '"test_cluster")) (let $empty_lambda (lambda '($arg) (Bool '"true"))) (let $table (MrTableConcat (Key '('table (String '"test_table")))) ) (let $read (Read! world $data_source $table)) (let $map_lambda (lambda '($row) (OptionalIf (Bool '"true") $row ) )) (let $filtered_data (FlatMap (Right! $read) $map_lambda)) (let $resulte_data_sink (DataSink '"result")) (let $result (ResWrite! (Left! $read) $resulte_data_sink (Key) $filtered_data '('('type)))) (return (Commit! $result $resulte_data_sink)) ) Expr: ( (let $1 (Read! world (DataSource '"generic" '"test_cluster") (MrTableConcat (Key '('table (String '"test_table")))))) (let $2 (DataSink '"result")) (let $3 (ResWrite! (Left! $1) $2 (Key) (FlatMap (Right! $1) (lambda '($4) (OptionalIf (Bool '"true") $4))) '('('type)))) (return (Commit! $3 $2)) ) Expr: ( (let $1 (Read! world (DataSource '"generic" '"test_cluster") (MrTableConcat (Key '('table (String '"test_table")))))) (let $2 (DataSink '"result")) (let $3 (ResWrite! (Left! $1) $2 (Key) (FlatMap (Right! $1) (lambda '($4) (OptionalIf (Bool '"true") $4))) '('('type)))) (return (Commit! $3 $2)) ) discovered cluster name: test_clusterLoading table meta for: `test_cluster`.`test_table`Expr: ( (let $1 (GenReadTable! world (DataSource '"generic" '"test_cluster") (GenTable '"test_table") (Void) (lambda '($4) (Bool '"true")))) (let $2 (DataSink '"result")) (let $3 (ResWrite! (Left! $1) $2 (Key) (FlatMap (Right! $1) (lambda '($5) (OptionalIf (Bool '"true") $5))) '('('type)))) (return (Commit! $3 $2)) ) Expr: ( (let $1 (GenReadTable! world (DataSource '"generic" '"test_cluster") (GenTable '"test_table") (Void) (lambda '($4) (Bool '"true")))) (let $2 (DataSink '"result")) (let $3 (ResWrite! (Left! $1) $2 (Key) (FlatMap (Right! $1) (lambda '($5) (OptionalIf (Bool '"true") $5))) '('('type)))) (return (Commit! $3 $2)) ) Expr: ( (let $1 (GenReadTable! world (DataSource '"generic" '"test_cluster") (GenTable '"test_table") (Void) (lambda '($4) (Bool '"true")))) (let $2 (DataSink '"result")) (let $3 (ResWrite! (Left! $1) $2 (Key) (FlatMap (Right! $1) (lambda '($5) (OptionalIf (Bool '"true") $5))) '('('type)))) (return (Commit! $3 $2)) ) Expr to optimize: ( (let $1 (Bool '"true")) (let $2 (GenReadTable! world (DataSource '"generic" '"test_cluster") (GenTable '"test_table") (Void) (lambda '($5) $1))) (let $3 (DataSink '"result")) (let $4 (ResWrite! (Left! $2) $3 (Key) (FlatMap (Right! $2) (lambda '($6) (OptionalIf $1 $6))) '('('type)))) (return (Commit! $4 $3)) ) OptionalIf over Bool 'trueExpr: ( (let $1 (GenReadTable! world (DataSource '"generic" '"test_cluster") (GenTable '"test_table") (Void) (lambda '($4) (Bool '"true")))) (let $2 (DataSink '"result")) (let $3 (ResWrite! (Left! $1) $2 (Key) (FlatMap (Right! $1) (lambda '($5) (Just $5))) '('('type)))) (return (Commit! $3 $2)) ) Expr: ( (let $1 (GenReadTable! world (DataSource '"generic" '"test_cluster") (GenTable '"test_table") (Void) (lambda '($4) (Bool '"true")))) (let $2 (DataSink '"result")) (let $3 (ResWrite! (Left! $1) $2 (Key) (FlatMap (Right! $1) (lambda '($5) (Just $5))) '('('type)))) (return (Commit! $3 $2)) ) Expr to optimize: ( (let $1 (GenReadTable! world (DataSource '"generic" '"test_cluster") (GenTable '"test_table") (Void) (lambda '($4) (Bool '"true")))) (let $2 (DataSink '"result")) (let $3 (ResWrite! (Left! $1) $2 (Key) (FlatMap (Right! $1) (lambda '($5) (Just $5))) '('('type)))) (return (Commit! $3 $2)) ) FlatMap with JustExpr: ( (let $1 (GenReadTable! world (DataSource '"generic" '"test_cluster") (GenTable '"test_table") (Void) (lambda '($4) (Bool '"true")))) (let $2 (DataSink '"result")) (let $3 (ResWrite! (Left! $1) $2 (Key) (Right! $1) '('('type)))) (return (Commit! $3 $2)) ) Expr to optimize: ( (let $1 (GenReadTable! world (DataSource '"generic" '"test_cluster") (GenTable '"test_table") (Void) (lambda '($4) (Bool '"true")))) (let $2 (DataSink '"result")) (let $3 (ResWrite! (Left! $1) $2 (Key) (Right! $1) '('('type)))) (return (Commit! $3 $2)) ) PhysicalOptimizer-TrimReadWorldExpr: ( (let $1 (DataSink '"result")) (let $2 (ResWrite! world $1 (Key) (Right! (GenReadTable! world (DataSource '"generic" '"test_cluster") (GenTable '"test_table") (Void) (lambda '($3) (Bool '"true")))) '('('type)))) (return (Commit! $2 $1)) ) Expr to optimize: ( (let $1 (DataSink '"result")) (let $2 (ResWrite! world $1 (Key) (Right! (GenReadTable! world (DataSource '"generic" '"test_cluster") (GenTable '"test_table") (Void) (lambda '($3) (Bool '"true")))) '('('type)))) (return (Commit! $2 $1)) ) ResPullExpr: ( (let $1 (DataSink '"result")) (let $2 (ResPull! world $1 (Key) (Right! (GenReadTable! world (DataSource '"generic" '"test_cluster") (GenTable '"test_table") (Void) (lambda '($3) (Bool '"true")))) '('('type)) '"generic")) (return (Commit! $2 $1)) ) Expr to optimize: ( (let $1 (DataSink '"result")) (let $2 (ResPull! world $1 (Key) (Right! (GenReadTable! world (DataSource '"generic" '"test_cluster") (GenTable '"test_table") (Void) (lambda '($3) (Bool '"true")))) '('('type)) '"generic")) (return (Commit! $2 $1)) ) Optimized expr: ( (let $1 (DataSink '"result")) (let $2 (ResPull! world $1 (Key) (Right! (GenReadTable! world (DataSource '"generic" '"test_cluster") (GenTable '"test_table") (Void) (lambda '($3) (Bool '"true")))) '('('type)) '"generic")) (return (Commit! $2 $1)) ) Filling source settings: cluster: test_cluster, table: test_table, endpoint: host: "host" port: 42BuildGenericDqSourceSettingsBuilt settings: ( (let $1 (DataSink '"result")) (let $2 '('"col_bool" '"col_date" '"col_datetime" '"col_double" '"col_dynumber" '"col_float" '"col_int16" '"col_int32" '"col_int64" '"col_int8" '"col_interval" '"col_json" '"col_json_document" '"col_optional_bool" '"col_optional_date" '"col_optional_datetime" '"col_optional_double" '"col_optional_dynumber" '"col_optional_float" '"col_optional_int16" '"col_optional_int32" '"col_optional_int64" '"col_optional_int8" '"col_optional_interval" '"col_optional_json" '"col_optional_json_document" '"col_optional_string" '"col_optional_timestamp" '"col_optional_tz_date" '"col_optional_tz_datetime" '"col_optional_tz_timestamp" '"col_optional_uint16" '"col_optional_uint32" '"col_optional_uint64" '"col_optional_uint8" '"col_optional_utf8" '"col_optional_uuid" '"col_optional_yson" '"col_string" '"col_timestamp" '"col_tz_date" '"col_tz_datetime" '"col_tz_timestamp" '"col_uint16" '"col_uint32" '"col_uint64" '"col_uint8" '"col_utf8" '"col_uuid" '"col_yson")) (let $3 (GenSourceSettings world '"test_cluster" '"test_table" (SecureParam '"cluster:default_test_cluster") $2 (lambda '($32) (Bool '"true")))) (let $4 (DataType 'Bool)) (let $5 (DataType 'Date)) (let $6 (DataType 'Datetime)) (let $7 (DataType 'Double)) (let $8 (DataType 'DyNumber)) (let $9 (DataType 'Float)) (let $10 (DataType 'Int16)) (let $11 (DataType 'Int32)) (let $12 (DataType 'Int64)) (let $13 (DataType 'Int8)) (let $14 (DataType 'Interval)) (let $15 (DataType 'Json)) (let $16 (DataType 'JsonDocument)) (let $17 (DataType 'String)) (let $18 (DataType 'Timestamp)) (let $19 (DataType 'TzDate)) (let $20 (DataType 'TzDatetime)) (let $21 (DataType 'TzTimestamp)) (let $22 (DataType 'Uint16)) (let $23 (DataType 'Uint32)) (let $24 (DataType 'Uint64)) (let $25 (DataType 'Uint8)) (let $26 (DataType 'Utf8)) (let $27 (DataType 'Uuid)) (let $28 (DataType 'Yson)) (let $29 (StructType '('"col_bool" $4) '('"col_date" $5) '('"col_datetime" $6) '('"col_double" $7) '('"col_dynumber" $8) '('"col_float" $9) '('"col_int16" $10) '('"col_int32" $11) '('"col_int64" $12) '('"col_int8" $13) '('"col_interval" $14) '('"col_json" $15) '('"col_json_document" $16) '('"col_optional_bool" (OptionalType $4)) '('"col_optional_date" (OptionalType $5)) '('"col_optional_datetime" (OptionalType $6)) '('"col_optional_double" (OptionalType $7)) '('"col_optional_dynumber" (OptionalType $8)) '('"col_optional_float" (OptionalType $9)) '('"col_optional_int16" (OptionalType $10)) '('"col_optional_int32" (OptionalType $11)) '('"col_optional_int64" (OptionalType $12)) '('"col_optional_int8" (OptionalType $13)) '('"col_optional_interval" (OptionalType $14)) '('"col_optional_json" (OptionalType $15)) '('"col_optional_json_document" (OptionalType $16)) '('"col_optional_string" (OptionalType $17)) '('"col_optional_timestamp" (OptionalType $18)) '('"col_optional_tz_date" (OptionalType $19)) '('"col_optional_tz_datetime" (OptionalType $20)) '('"col_optional_tz_timestamp" (OptionalType $21)) '('"col_optional_uint16" (OptionalType $22)) '('"col_optional_uint32" (OptionalType $23)) '('"col_optional_uint64" (OptionalType $24)) '('"col_optional_uint8" (OptionalType $25)) '('"col_optional_utf8" (OptionalType $26)) '('"col_optional_uuid" (OptionalType $27)) '('"col_optional_yson" (OptionalType $28)) '('"col_string" $17) '('"col_timestamp" $18) '('"col_tz_date" $19) '('"col_tz_datetime" $20) '('"col_tz_timestamp" $21) '('"col_uint16" $22) '('"col_uint32" $23) '('"col_uint64" $24) '('"col_uint8" $25) '('"col_utf8" $26) '('"col_uuid" $27) '('"col_yson" $28))) (let $30 (DqSourceWrap $3 (DataSource '"generic" '"test_cluster") $29)) (let $31 (ResPull! world $1 (Key) $30 '('('type)) '"generic")) (return (Commit! $31 $1)) ) Dq source filter settings: GenericConfiguration::AddCluster: name = test_cluster, kind = POSTGRESQL, database name = database, database id = , endpoint = { host: "host" port: 42 }, use tls = 0, protocol = NATIVE Initial program: ( (let $data_source (DataSource '"generic" '"test_cluster")) (let $empty_lambda (lambda '($arg) (Bool '"true"))) (let $table (MrTableConcat (Key '('table (String '"test_table")))) ) (let $read (Read! world $data_source $table)) (let $map_lambda (lambda '($row) (OptionalIf (== (Member $row '"col_int16") (Int16 '42)) $row ) )) (let $filtered_data (FlatMap (Right! $read) $map_lambda)) (let $resulte_data_sink (DataSink '"result")) (let $result (ResWrite! (Left! $read) $resulte_data_sink (Key) $filtered_data '('('type)))) (return (Commit! $result $resulte_data_sink)) ) Expr: ( (let $1 (Read! world (DataSource '"generic" '"test_cluster") (MrTableConcat (Key '('table (String '"test_table")))))) (let $2 (DataSink '"result")) (let $3 (ResWrite! (Left! $1) $2 (Key) (FlatMap (Right! $1) (lambda '($4) (OptionalIf (== (Member $4 '"col_int16") (Int16 '42)) $4))) '('('type)))) (return (Commit! $3 $2)) ) Expr: ( (let $1 (Read! world (DataSource '"generic" '"test_cluster") (MrTableConcat (Key '('table (String '"test_table")))))) (let $2 (DataSink '"result")) (let $3 (ResWrite! (Left! $1) $2 (Key) (FlatMap (Right! $1) (lambda '($4) (OptionalIf (== (Member $4 '"col_int16") (Int16 '42)) $4))) '('('type)))) (return (Commit! $3 $2)) ) discovered cluster name: test_clusterLoading table meta for: `test_cluster`.`test_table`Expr: ( (let $1 (GenReadTable! world (DataSource '"generic" '"test_cluster") (GenTable '"test_table") (Void) (lambda '($4) (Bool '"true")))) (let $2 (DataSink '"result")) (let $3 (ResWrite! (Left! $1) $2 (Key) (FlatMap (Right! $1) (lambda '($5) (OptionalIf (== (Member $5 '"col_int16") (Int16 '42)) $5))) '('('type)))) (return (Commit! $3 $2)) ) Expr: ( (let $1 (GenReadTable! world (DataSource '"generic" '"test_cluster") (GenTable '"test_table") (Void) (lambda '($4) (Bool '"true")))) (let $2 (DataSink '"result")) (let $3 (ResWrite! (Left! $1) $2 (Key) (FlatMap (Right! $1) (lambda '($5) (OptionalIf (== (Member $5 '"col_int16") (Int16 '42)) $5))) '('('type)))) (return (Commit! $3 $2)) ) Expr: ( (let $1 (GenReadTable! world (DataSource '"generic" '"test_cluster") (GenTable '"test_table") (Void) (lambda '($4) (Bool '"true")))) (let $2 (DataSink '"result")) (let $3 (ResWrite! (Left! $1) $2 (Key) (FlatMap (Right! $1) (lambda '($5) (OptionalIf (== (Member $5 '"col_int16") (Int16 '42)) $5))) '('('type)))) (return (Commit! $3 $2)) ) Expr to optimize: ( (let $1 (GenReadTable! world (DataSource '"generic" '"test_cluster") (GenTable '"test_table") (Void) (lambda '($4) (Bool '"true")))) ( ... rors" (DataType 'Bool)) '('"LongestMatch" (DataType 'Bool)) '('"MaxMem" (DataType 'Uint64)) '('"NeverCapture" (DataType 'Bool)) '('"NeverNl" (DataType 'Bool)) '('"OneLine" (DataType 'Bool)) '('"PerlClasses" (DataType 'Bool)) '('"PosixSyntax" (DataType 'Bool)) '('"Utf8" (DataType 'Bool)) '('"WordBoundary" (DataType 'Bool)))))) (VoidType) '"" $8 (TupleType $7 (OptionalType (StructType '('"CaseSensitive" $6) '('"DotNl" $6) '('"Literal" $6) '('"LogErrors" $6) '('"LongestMatch" $6) '('"MaxMem" (DataType 'Uint64)) '('"NeverCapture" $6) '('"NeverNl" $6) '('"OneLine" $6) '('"PerlClasses" $6) '('"PosixSyntax" $6) '('"Utf8" $6) '('"WordBoundary" $6)))) '"" '())) (return (OptionalIf (Apply $9 (Just (Member $5 '"col_string"))) $5)) )))) '('('type)))) (return (Commit! $3 $2)) ) Expr: ( (let $1 (GenReadTable! world (DataSource '"generic" '"test_cluster") (GenTable '"test_table") (Void) (lambda '($4) (Bool '"true")))) (let $2 (DataSink '"result")) (let $3 (ResWrite! (Left! $1) $2 (Key) (FlatMap (Right! $1) (lambda '($5) (block '( (let $6 (DataType 'Bool)) (let $7 (DataType 'String)) (let $8 (CallableType '() '($6) '((OptionalType $7)))) (let $9 (Udf '"Re2.Grep" '((String '"\\\\d+") (Nothing (OptionalType (StructType '('"CaseSensitive" (DataType 'Bool)) '('"DotNl" (DataType 'Bool)) '('"Literal" (DataType 'Bool)) '('"LogErrors" (DataType 'Bool)) '('"LongestMatch" (DataType 'Bool)) '('"MaxMem" (DataType 'Uint64)) '('"NeverCapture" (DataType 'Bool)) '('"NeverNl" (DataType 'Bool)) '('"OneLine" (DataType 'Bool)) '('"PerlClasses" (DataType 'Bool)) '('"PosixSyntax" (DataType 'Bool)) '('"Utf8" (DataType 'Bool)) '('"WordBoundary" (DataType 'Bool)))))) (VoidType) '"" $8 (TupleType $7 (OptionalType (StructType '('"CaseSensitive" $6) '('"DotNl" $6) '('"Literal" $6) '('"LogErrors" $6) '('"LongestMatch" $6) '('"MaxMem" (DataType 'Uint64)) '('"NeverCapture" $6) '('"NeverNl" $6) '('"OneLine" $6) '('"PerlClasses" $6) '('"PosixSyntax" $6) '('"Utf8" $6) '('"WordBoundary" $6)))) '"" '())) (return (OptionalIf (Apply $9 (Just (Member $5 '"col_string"))) $5)) )))) '('('type)))) (return (Commit! $3 $2)) ) Expr to optimize: ( (let $1 (GenReadTable! world (DataSource '"generic" '"test_cluster") (GenTable '"test_table") (Void) (lambda '($4) (Bool '"true")))) (let $2 (DataSink '"result")) (let $3 (ResWrite! (Left! $1) $2 (Key) (FlatMap (Right! $1) (lambda '($5) (block '( (let $6 (DataType 'Bool)) (let $7 (OptionalType (StructType '('"CaseSensitive" $6) '('"DotNl" $6) '('"Literal" $6) '('"LogErrors" $6) '('"LongestMatch" $6) '('"MaxMem" (DataType 'Uint64)) '('"NeverCapture" $6) '('"NeverNl" $6) '('"OneLine" $6) '('"PerlClasses" $6) '('"PosixSyntax" $6) '('"Utf8" $6) '('"WordBoundary" $6)))) (let $8 (DataType 'String)) (let $9 (CallableType '() '($6) '((OptionalType $8)))) (let $10 (Udf '"Re2.Grep" '((String '"\\\\d+") (Nothing $7)) (VoidType) '"" $9 (TupleType $8 $7) '"" '())) (return (OptionalIf (Apply $10 (Just (Member $5 '"col_string"))) $5)) )))) '('('type)))) (return (Commit! $3 $2)) ) PhysicalOptimizer-TrimReadWorldPush filter lambda: ( (return (lambda '($1) (block '( (let $2 (DataType 'Bool)) (let $3 (OptionalType (StructType '('"CaseSensitive" $2) '('"DotNl" $2) '('"Literal" $2) '('"LogErrors" $2) '('"LongestMatch" $2) '('"MaxMem" (DataType 'Uint64)) '('"NeverCapture" $2) '('"NeverNl" $2) '('"OneLine" $2) '('"PerlClasses" $2) '('"PosixSyntax" $2) '('"Utf8" $2) '('"WordBoundary" $2)))) (let $4 (DataType 'String)) (let $5 (CallableType '() '($2) '((OptionalType $4)))) (let $6 (Udf '"Re2.Grep" '((String '"\\\\d+") (Nothing $3)) (VoidType) '"" $5 (TupleType $4 $3) '"" '())) (return (Apply $6 (Just (Member $1 '"col_string")))) )))) ) PhysicalOptimizer-PushFilterToReadTableExpr: ( (let $1 (DataSink '"result")) (let $2 (DataType 'Bool)) (let $3 (OptionalType (StructType '('"CaseSensitive" $2) '('"DotNl" $2) '('"Literal" $2) '('"LogErrors" $2) '('"LongestMatch" $2) '('"MaxMem" (DataType 'Uint64)) '('"NeverCapture" $2) '('"NeverNl" $2) '('"OneLine" $2) '('"PerlClasses" $2) '('"PosixSyntax" $2) '('"Utf8" $2) '('"WordBoundary" $2)))) (let $4 (DataType 'String)) (let $5 (CallableType '() '($2) '((OptionalType $4)))) (let $6 (Udf '"Re2.Grep" '((String '"\\\\d+") (Nothing $3)) (VoidType) '"" $5 (TupleType $4 $3) '"" '())) (let $7 (ResWrite! world $1 (Key) (FlatMap (Right! (GenReadTable! world (DataSource '"generic" '"test_cluster") (GenTable '"test_table") (Void) (lambda '($8) (Apply $6 (Just (Member $8 '"col_string")))))) (lambda '($9) (OptionalIf (Apply $6 (Just (Member $9 '"col_string"))) $9))) '('('type)))) (return (Commit! $7 $1)) ) Expr: ( (let $1 (DataSink '"result")) (let $2 (DataType 'Bool)) (let $3 (OptionalType (StructType '('"CaseSensitive" $2) '('"DotNl" $2) '('"Literal" $2) '('"LogErrors" $2) '('"LongestMatch" $2) '('"MaxMem" (DataType 'Uint64)) '('"NeverCapture" $2) '('"NeverNl" $2) '('"OneLine" $2) '('"PerlClasses" $2) '('"PosixSyntax" $2) '('"Utf8" $2) '('"WordBoundary" $2)))) (let $4 (DataType 'String)) (let $5 (CallableType '() '($2) '((OptionalType $4)))) (let $6 (Udf '"Re2.Grep" '((String '"\\\\d+") (Nothing $3)) (VoidType) '"" $5 (TupleType $4 $3) '"" '())) (let $7 (ResWrite! world $1 (Key) (FlatMap (Right! (GenReadTable! world (DataSource '"generic" '"test_cluster") (GenTable '"test_table") (Void) (lambda '($8) (Apply $6 (Just (Member $8 '"col_string")))))) (lambda '($9) (OptionalIf (Apply $6 (Just (Member $9 '"col_string"))) $9))) '('('type)))) (return (Commit! $7 $1)) ) Expr to optimize: ( (let $1 (DataSink '"result")) (let $2 (DataType 'Bool)) (let $3 (OptionalType (StructType '('"CaseSensitive" $2) '('"DotNl" $2) '('"Literal" $2) '('"LogErrors" $2) '('"LongestMatch" $2) '('"MaxMem" (DataType 'Uint64)) '('"NeverCapture" $2) '('"NeverNl" $2) '('"OneLine" $2) '('"PerlClasses" $2) '('"PosixSyntax" $2) '('"Utf8" $2) '('"WordBoundary" $2)))) (let $4 (DataType 'String)) (let $5 (CallableType '() '($2) '((OptionalType $4)))) (let $6 (Udf '"Re2.Grep" '((String '"\\\\d+") (Nothing $3)) (VoidType) '"" $5 (TupleType $4 $3) '"" '())) (let $7 (ResWrite! world $1 (Key) (FlatMap (Right! (GenReadTable! world (DataSource '"generic" '"test_cluster") (GenTable '"test_table") (Void) (lambda '($8) (Apply $6 (Just (Member $8 '"col_string")))))) (lambda '($9) (OptionalIf (Apply $6 (Just (Member $9 '"col_string"))) $9))) '('('type)))) (return (Commit! $7 $1)) ) Push filter. Lambda is already not emptyOptimized expr: ( (let $1 (DataSink '"result")) (let $2 (DataType 'Bool)) (let $3 (OptionalType (StructType '('"CaseSensitive" $2) '('"DotNl" $2) '('"Literal" $2) '('"LogErrors" $2) '('"LongestMatch" $2) '('"MaxMem" (DataType 'Uint64)) '('"NeverCapture" $2) '('"NeverNl" $2) '('"OneLine" $2) '('"PerlClasses" $2) '('"PosixSyntax" $2) '('"Utf8" $2) '('"WordBoundary" $2)))) (let $4 (DataType 'String)) (let $5 (CallableType '() '($2) '((OptionalType $4)))) (let $6 (Udf '"Re2.Grep" '((String '"\\\\d+") (Nothing $3)) (VoidType) '"" $5 (TupleType $4 $3) '"" '())) (let $7 (ResWrite! world $1 (Key) (FlatMap (Right! (GenReadTable! world (DataSource '"generic" '"test_cluster") (GenTable '"test_table") (Void) (lambda '($8) (Apply $6 (Just (Member $8 '"col_string")))))) (lambda '($9) (OptionalIf (Apply $6 (Just (Member $9 '"col_string"))) $9))) '('('type)))) (return (Commit! $7 $1)) ) Filling source settings: cluster: test_cluster, table: test_table, endpoint: host: "host" port: 42BuildGenericDqSourceSettingsBuilt settings: ( (let $1 (DataSink '"result")) (let $2 '('"col_bool" '"col_date" '"col_datetime" '"col_double" '"col_dynumber" '"col_float" '"col_int16" '"col_int32" '"col_int64" '"col_int8" '"col_interval" '"col_json" '"col_json_document" '"col_optional_bool" '"col_optional_date" '"col_optional_datetime" '"col_optional_double" '"col_optional_dynumber" '"col_optional_float" '"col_optional_int16" '"col_optional_int32" '"col_optional_int64" '"col_optional_int8" '"col_optional_interval" '"col_optional_json" '"col_optional_json_document" '"col_optional_string" '"col_optional_timestamp" '"col_optional_tz_date" '"col_optional_tz_datetime" '"col_optional_tz_timestamp" '"col_optional_uint16" '"col_optional_uint32" '"col_optional_uint64" '"col_optional_uint8" '"col_optional_utf8" '"col_optional_uuid" '"col_optional_yson" '"col_string" '"col_timestamp" '"col_tz_date" '"col_tz_datetime" '"col_tz_timestamp" '"col_uint16" '"col_uint32" '"col_uint64" '"col_uint8" '"col_utf8" '"col_uuid" '"col_yson")) (let $3 (DataType 'Bool)) (let $4 (OptionalType (StructType '('"CaseSensitive" $3) '('"DotNl" $3) '('"Literal" $3) '('"LogErrors" $3) '('"LongestMatch" $3) '('"MaxMem" (DataType 'Uint64)) '('"NeverCapture" $3) '('"NeverNl" $3) '('"OneLine" $3) '('"PerlClasses" $3) '('"PosixSyntax" $3) '('"Utf8" $3) '('"WordBoundary" $3)))) (let $5 (DataType 'String)) (let $6 (OptionalType $5)) (let $7 (CallableType '() '($3) '($6))) (let $8 (Udf '"Re2.Grep" '((String '"\\\\d+") (Nothing $4)) (VoidType) '"" $7 (TupleType $5 $4) '"" '())) (let $9 (GenSourceSettings world '"test_cluster" '"test_table" (SecureParam '"cluster:default_test_cluster") $2 (lambda '($37) (Apply $8 (Just (Member $37 '"col_string")))))) (let $10 (DataType 'Bool)) (let $11 (DataType 'Date)) (let $12 (DataType 'Datetime)) (let $13 (DataType 'Double)) (let $14 (DataType 'DyNumber)) (let $15 (DataType 'Float)) (let $16 (DataType 'Int16)) (let $17 (DataType 'Int32)) (let $18 (DataType 'Int64)) (let $19 (DataType 'Int8)) (let $20 (DataType 'Interval)) (let $21 (DataType 'Json)) (let $22 (DataType 'JsonDocument)) (let $23 (DataType 'Timestamp)) (let $24 (DataType 'TzDate)) (let $25 (DataType 'TzDatetime)) (let $26 (DataType 'TzTimestamp)) (let $27 (DataType 'Uint16)) (let $28 (DataType 'Uint32)) (let $29 (DataType 'Uint64)) (let $30 (DataType 'Uint8)) (let $31 (DataType 'Utf8)) (let $32 (DataType 'Uuid)) (let $33 (DataType 'Yson)) (let $34 (StructType '('"col_bool" $10) '('"col_date" $11) '('"col_datetime" $12) '('"col_double" $13) '('"col_dynumber" $14) '('"col_float" $15) '('"col_int16" $16) '('"col_int32" $17) '('"col_int64" $18) '('"col_int8" $19) '('"col_interval" $20) '('"col_json" $21) '('"col_json_document" $22) '('"col_optional_bool" (OptionalType $10)) '('"col_optional_date" (OptionalType $11)) '('"col_optional_datetime" (OptionalType $12)) '('"col_optional_double" (OptionalType $13)) '('"col_optional_dynumber" (OptionalType $14)) '('"col_optional_float" (OptionalType $15)) '('"col_optional_int16" (OptionalType $16)) '('"col_optional_int32" (OptionalType $17)) '('"col_optional_int64" (OptionalType $18)) '('"col_optional_int8" (OptionalType $19)) '('"col_optional_interval" (OptionalType $20)) '('"col_optional_json" (OptionalType $21)) '('"col_optional_json_document" (OptionalType $22)) '('"col_optional_string" $6) '('"col_optional_timestamp" (OptionalType $23)) '('"col_optional_tz_date" (OptionalType $24)) '('"col_optional_tz_datetime" (OptionalType $25)) '('"col_optional_tz_timestamp" (OptionalType $26)) '('"col_optional_uint16" (OptionalType $27)) '('"col_optional_uint32" (OptionalType $28)) '('"col_optional_uint64" (OptionalType $29)) '('"col_optional_uint8" (OptionalType $30)) '('"col_optional_utf8" (OptionalType $31)) '('"col_optional_uuid" (OptionalType $32)) '('"col_optional_yson" (OptionalType $33)) '('"col_string" $5) '('"col_timestamp" $23) '('"col_tz_date" $24) '('"col_tz_datetime" $25) '('"col_tz_timestamp" $26) '('"col_uint16" $27) '('"col_uint32" $28) '('"col_uint64" $29) '('"col_uint8" $30) '('"col_utf8" $31) '('"col_uuid" $32) '('"col_yson" $33))) (let $35 (DqSourceWrap $9 (DataSource '"generic" '"test_cluster") $34)) (let $36 (ResWrite! world $1 (Key) (FlatMap $35 (lambda '($38) (OptionalIf (Apply $8 (Just (Member $38 '"col_string"))) $38))) '('('type)))) (return (Commit! $36 $1)) ) Dq source filter settings: filter_typed { regexp { value { column: "col_string" } pattern { typed_value { type { type_id: STRING } value { bytes_value: "\\\\d+" } } } } } |79.9%| [TS] {RESULT} ydb/library/yql/providers/generic/provider/ut/pushdown/unittest >> TSubgroupPartLayoutTest::CountEffectiveReplicas1of4 [GOOD] >> TSubgroupPartLayoutTest::CountEffectiveReplicas2of4 >> Mvp::OidcWhoami200 [GOOD] >> Mvp::OidcWhoamiServiceAccount200 [GOOD] >> Mvp::OidcWhoamiBadIam200 >> Mvp::OidcWhoamiBadIam200 [GOOD] >> Mvp::OidcWhoamiBadYdb200 [GOOD] >> Mvp::OidcWhoamiBadYdbServiceAccount200 [GOOD] >> Mvp::OidcWhoamiNoInfo500 >> LongTxServicePublicTypes::Snapshot [GOOD] >> LongTxServicePublicTypes::LongTxId [GOOD] >> LongTxServicePublicTypes::SnapshotMaxTxId [GOOD] >> LongTxServicePublicTypes::SnapshotReadOnly [GOOD] >> ConsoleDumper::Basic [GOOD] >> ConsoleDumper::CoupleMerge [GOOD] >> ConsoleDumper::CoupleOverwrite [GOOD] >> ConsoleDumper::CoupleMergeOverwriteRepeated [GOOD] >> ConsoleDumper::ReverseMerge [GOOD] >> ConsoleDumper::ReverseOverwrite [GOOD] >> ConsoleDumper::ReverseMergeOverwriteRepeated [GOOD] >> ConsoleDumper::Different [GOOD] >> ConsoleDumper::SimpleNode [GOOD] >> ConsoleDumper::JoinSimilar [GOOD] >> ConsoleDumper::DontJoinDifferent [GOOD] >> ConsoleDumper::SimpleTenant [GOOD] >> ConsoleDumper::SimpleNodeTenant [GOOD] >> ConsoleDumper::SimpleHostId [GOOD] >> ConsoleDumper::SimpleNodeId [GOOD] >> ConsoleDumper::DontJoinNodeTenant [GOOD] >> ConsoleDumper::JoinMultipleSimple [GOOD] >> ConsoleDumper::MergeNode [GOOD] >> ConsoleDumper::MergeOverwriteRepeatedNode [GOOD] >> ConsoleDumper::Ordering [GOOD] >> ConsoleDumper::IgnoreUnmanagedItems [GOOD] >> YamlConfig::CollectLabels [GOOD] >> YamlConfig::MaterializeSpecificConfig [GOOD] >> YamlConfig::MaterializeAllConfigSimple [GOOD] >> YamlConfig::MaterializeAllConfigs |79.9%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/core/backup/common/ut/unittest >> PathsNormalizationTest::NormalizeExportPrefix [GOOD] |79.9%| [TS] {RESULT} ydb/core/backup/common/ut/unittest ------- [TS] {default-linux-x86_64, relwithdebinfo} ydb/core/persqueue/pqtablet/blob/ut/unittest >> TTypeCodecsTest::TestDeltaZigZagCodec [GOOD] Test command err: Size: 128 Create chunk: 0.000014s Read by index: 0.000011s Iterate: 0.000008s Size: 252 Create chunk: 0.000022s Read by index: 0.000010s Iterate: 0.000009s Size: 8002 Create chunk: 0.000049s Read by index: 0.000012s Iterate: 0.000013s Size: 8256 Create chunk: 0.000063s Read by index: 0.000016s Iterate: 0.000016s Size: 8532 Create chunk: 0.000026s Read by index: 0.000010s Iterate: 0.000006s Size: 7769 Create chunk: 0.000033s Read by index: 0.000012s Iterate: 0.000011s Size: 2853 Create chunk: 0.000023s Read by index: 0.000028s Iterate: 0.000015s Size: 2419 Create chunk: 0.000029s Read by index: 0.000029s Iterate: 0.000016s Size: 2929 Create chunk: 0.000023s Read by index: 0.000029s Iterate: 0.000016s Size: 2472 Create chunk: 0.000032s Read by index: 0.000034s Iterate: 0.000017s Size: 1887 Create chunk: 0.000017s Read by index: 0.000019s Iterate: 0.000011s Size: 1658 Create chunk: 0.000022s Read by index: 0.000022s Iterate: 0.000012s Size: 1889 Create chunk: 0.000016s Read by index: 0.000017s Iterate: 0.000010s Size: 1660 Create chunk: 0.000021s Read by index: 0.000021s Iterate: 0.000012s Size: 2407 Create chunk: 0.000021s Read by index: 0.000021s Iterate: 0.000016s Size: 2061 Create chunk: 0.000034s Read by index: 0.000026s Iterate: 0.000017s >> ColumnShardConfigValidation::AcceptDefaultCompression [GOOD] >> ColumnShardConfigValidation::NotAcceptDefaultCompression [GOOD] >> ColumnShardConfigValidation::CorrectPlainCompression [GOOD] >> ColumnShardConfigValidation::NotCorrectPlainCompression [GOOD] >> ColumnShardConfigValidation::CorrectLZ4Compression [GOOD] >> ColumnShardConfigValidation::NotCorrectLZ4Compression [GOOD] >> ColumnShardConfigValidation::CorrectZSTDCompression [GOOD] >> ColumnShardConfigValidation::NotCorrectZSTDCompression [GOOD] >> Mvp::OidcWhoamiNoInfo500 [GOOD] >> Mvp::OidcWhoamiForward307 [GOOD] >> Mvp::OidcYandexIgnoresWhoamiExtension >> YamlConfig::MaterializeAllConfigs [GOOD] >> YamlConfig::AppendVolatileConfig [GOOD] >> YamlConfig::AppendAndResolve [GOOD] >> YamlConfig::GetMetadata [GOOD] >> YamlConfig::ReplaceMetadata [GOOD] >> YamlConfigParser::Iterate [GOOD] >> YamlConfigParser::ProtoBytesFieldDoesNotDecodeBase64 [GOOD] >> YamlConfigParser::PdiskCategoryFromString [GOOD] >> YamlConfigParser::AllowDefaultHostConfigId [GOOD] >> YamlConfigParser::IncorrectHostConfigIdFails [GOOD] >> YamlConfigParser::NoMixedHostConfigIds [GOOD] >> YamlConfigProto2Yaml::StorageConfig [GOOD] |79.9%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/core/fq/libs/db_id_async_resolver_impl/ut/unittest >> MdbEndpoingGenerator::Generic_WithTransformHost [GOOD] |79.9%| [TS] {RESULT} ydb/core/fq/libs/db_id_async_resolver_impl/ut/unittest >> TStreamRequestUnitsCalculatorTest::Basic [GOOD] >> Mvp::OidcYandexIgnoresWhoamiExtension [GOOD] >> TTimeGridTest::TimeGrid [GOOD] >> Mvp::GetAddressWithoutPort [GOOD] |79.9%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/ymq/actor/libcore-ymq-actor.a |79.9%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/federated_query/ut/unittest >> TestFederatedQueryHelpers::TestValidateResultSetColumns [GOOD] |79.9%| [TS] {RESULT} ydb/core/persqueue/pqtablet/blob/ut/unittest |79.9%| [TM] {RESULT} ydb/core/kqp/federated_query/ut/unittest >> AuditLogHeartbeatTest::LoggingHeartbeat [GOOD] >> AuditLogWriterServiceTest::LoggingTxt [GOOD] >> AuditLogWriterServiceTest::LoggingJson [GOOD] >> AuditLogWriterServiceTest::LoggingJsonLog >> AuditLogWriterServiceTest::LoggingJsonLog [GOOD] >> EscapeNonUtf8LogPartsTest::Escape [GOOD] >> TSubgroupPartLayoutTest::CountEffectiveReplicas2of4 [GOOD] >> TPDiskTest::TestLogSpliceChunkReserve [GOOD] >> TPDiskTest::SpaceColor [GOOD] >> TPDiskTest::RecreateWithInvalidPDiskKey [GOOD] |79.9%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/library/yql/tools/dq/service_node/service_node |79.9%| [LD] {RESULT} $(B)/ydb/library/yql/tools/dq/service_node/service_node |79.9%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/library/yql/tools/dq/service_node/service_node |79.9%| [CC] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/tx/schemeshard/schemeshard_info_types.h_serialized.cpp |79.9%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/long_tx_service/public/ut/unittest >> LongTxServicePublicTypes::SnapshotReadOnly [GOOD] >> TPDiskTest::SmallDisk10Gb |79.9%| [CC] {BAZEL_UPLOAD} $(B)/ydb/core/tx/schemeshard/schemeshard_info_types.h_serialized.cpp |79.9%| [TS] {RESULT} ydb/core/tx/long_tx_service/public/ut/unittest >> MetaCache::BasicForwarding >> TPDiskTest::SmallDisk10Gb [GOOD] >> TPDiskTest::SuprisinglySmallDisk >> MetaCache::BasicForwarding [GOOD] >> MetaCache::TimeoutFallback [GOOD] >> TYardTest::TestMultiYardLogLatency [GOOD] >> TYardTest::TestMultiYardStartingPoints |79.9%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/core/config/validation/column_shard_config_validator_ut/unittest >> ColumnShardConfigValidation::NotCorrectZSTDCompression [GOOD] |79.9%| [TS] {RESULT} ydb/core/config/validation/column_shard_config_validator_ut/unittest ------- [TS] {default-linux-x86_64, relwithdebinfo} ydb/core/audit/ut/unittest >> EscapeNonUtf8LogPartsTest::Escape [GOOD] Test command err: 2025-09-25T16:14:56.524789Z :AUDIT_LOG_WRITER ERROR: audit_log_impl.cpp:80: WriteLog: unable to write audit log (error: test) 2025-09-25T16:14:57.529819Z :AUDIT_LOG_WRITER ERROR: audit_log_impl.cpp:80: WriteLog: unable to write audit log (error: test) 2025-09-25T16:14:58.541946Z :AUDIT_LOG_WRITER ERROR: audit_log_impl.cpp:80: WriteLog: unable to write audit log (error: test) 2025-09-25T16:14:58.621017Z :AUDIT_LOG_WRITER ERROR: audit_log_impl.cpp:80: WriteLog: unable to write audit log (error: test) 2025-09-25T16:14:58.680997Z :AUDIT_LOG_WRITER ERROR: audit_log_impl.cpp:80: WriteLog: unable to write audit log (error: test) 2025-09-25T16:14:58.708968Z :AUDIT_LOG_WRITER ERROR: audit_log_impl.cpp:80: WriteLog: unable to write audit log (error: test) |79.9%| [TS] {RESULT} ydb/core/audit/ut/unittest |79.9%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/core/metering/ut/unittest >> TTimeGridTest::TimeGrid [GOOD] |79.9%| [TS] {RESULT} ydb/core/metering/ut/unittest >> TPDiskTest::SuprisinglySmallDisk [GOOD] >> TPDiskTest::PDiskIncreaseLogChunksLimitAfterRestart >> FormatCSV::Instants [GOOD] >> FormatCSV::EmptyData [GOOD] >> FormatCSV::Common [GOOD] >> FormatCSV::Strings [GOOD] >> FormatCSV::Nulls [GOOD] >> JsonEnvelopeTest::Simple [GOOD] >> JsonEnvelopeTest::NoReplace [GOOD] >> JsonEnvelopeTest::ArrayItem [GOOD] >> JsonEnvelopeTest::Escape [GOOD] >> JsonEnvelopeTest::BinaryData [GOOD] ------- [TS] {default-linux-x86_64, relwithdebinfo} ydb/library/yaml_config/ut/unittest >> YamlConfigProto2Yaml::StorageConfig [GOOD] Test command err: host_config: "[{\"drive\":[{\"type\":\"NVME\",\"path\":\"\\/dev\\/disk\\/by-partlabel\\/kikimr_nvme_01\"},{\"type\":\"NVME\",\"path\":\"\\/dev\\/disk\\/by-partlabel\\/kikimr_nvme_02\"}],\"host_config_id\":1},{\"drive\":[{\"type\":\"SSD\",\"path\":\"\\/dev\\/disk\\/by-partlabel\\/kikimr_nvme_01\"}],\"host_config_id\":2}]" "\/dev\/disk\/by-partlabel\/kikimr_nvme_02" host_config: "[{\"drive\":[{\"type\":\"NVME\",\"path\":\"\\/dev\\/disk\\/by-partlabel\\/kikimr_nvme_01\"},{\"type\":\"NVME\",\"path\":\"\\/dev\\/disk\\/by-partlabel\\/kikimr_nvme_02\"}],\"host_config_id\":1},{\"drive\":[{\"type\":\"SSD\",\"path\":\"\\/dev\\/disk\\/by-partlabel\\/kikimr_nvme_01\"}],\"host_config_id\":2}]" host_configs: - host_config_id: 1 drive: - path: /dev/disk/by-partlabel/kikimr_nvme_01 type: NVME expected_slot_count: 9 - path: /dev/disk/by-partlabel/kikimr_nvme_02 type: NVME expected_slot_count: 9 - host_config_id: 2 drive: - path: /dev/disk/by-partlabel/kikimr_nvme_01 type: SSD expected_slot_count: 9 hosts: - host: sas8-6954.search.yandex.net port: 19000 host_config_id: 1 - host: sas8-6955.search.yandex.net port: 19000 host_config_id: 2 item_config_generation: 0 |79.9%| [TS] {RESULT} ydb/library/yaml_config/ut/unittest ------- [TS] {default-linux-x86_64, relwithdebinfo} ydb/mvp/oidc_proxy/ut/unittest >> Mvp::GetAddressWithoutPort [GOOD] Test command err: 2025-09-25T16:14:56.282082Z :MVP DEBUG: oidc_protected_page.cpp:119: Forward user request bypass OIDC 2025-09-25T16:14:56.282143Z :MVP DEBUG: oidc_protected_page.cpp:38: Incoming response for protected resource: 200 2025-09-25T16:14:56.320491Z :MVP DEBUG: oidc_protected_page.cpp:119: Forward user request bypass OIDC 2025-09-25T16:14:56.320639Z :MVP DEBUG: oidc_protected_page.cpp:38: Incoming response for protected resource: 200 2025-09-25T16:14:56.358005Z :MVP DEBUG: oidc_protected_page.cpp:119: Forward user request bypass OIDC 2025-09-25T16:14:56.358125Z :MVP DEBUG: oidc_protected_page.cpp:38: Incoming response for protected resource: 204 2025-09-25T16:14:56.386791Z :MVP DEBUG: oidc_protected_page.cpp:119: Forward user request bypass OIDC 2025-09-25T16:14:56.386840Z :MVP DEBUG: oidc_protected_page.cpp:38: Incoming response for protected resource: 204 2025-09-25T16:14:56.422280Z :MVP DEBUG: oidc_protected_page.cpp:119: Forward user request bypass OIDC 2025-09-25T16:14:56.422421Z :MVP DEBUG: oidc_protected_page.cpp:38: Incoming response for protected resource: 204 2025-09-25T16:14:56.465189Z :MVP DEBUG: oidc_protected_page.cpp:119: Forward user request bypass OIDC 2025-09-25T16:14:56.465239Z :MVP DEBUG: oidc_protected_page.cpp:38: Incoming response for protected resource: 204 2025-09-25T16:14:56.515901Z :MVP DEBUG: oidc_protected_page_yandex.cpp:25: SessionService.Check(): OK 2025-09-25T16:14:56.515928Z :MVP DEBUG: oidc_protected_page.cpp:119: Forward user request bypass OIDC 2025-09-25T16:14:56.515987Z :MVP DEBUG: oidc_protected_page.cpp:38: Incoming response for protected resource: 400 2025-09-25T16:14:56.515993Z :MVP DEBUG: oidc_protected_page.cpp:143: Try to send request to HTTPS port 2025-09-25T16:14:56.515997Z :MVP DEBUG: oidc_protected_page.cpp:119: Forward user request bypass OIDC 2025-09-25T16:14:56.516013Z :MVP DEBUG: oidc_protected_page.cpp:38: Incoming response for protected resource: 200 2025-09-25T16:14:56.528554Z :MVP DEBUG: oidc_protected_page_yandex.cpp:25: SessionService.Check(): OK 2025-09-25T16:14:56.528577Z :MVP DEBUG: oidc_protected_page.cpp:119: Forward user request bypass OIDC 2025-09-25T16:14:56.528630Z :MVP DEBUG: oidc_protected_page.cpp:38: Incoming response for protected resource: 400 2025-09-25T16:14:56.612861Z :MVP DEBUG: oidc_protected_page_yandex.cpp:25: SessionService.Check(): OK 2025-09-25T16:14:56.612888Z :MVP DEBUG: oidc_protected_page.cpp:119: Forward user request bypass OIDC 2025-09-25T16:14:56.612940Z :MVP DEBUG: oidc_protected_page.cpp:38: Incoming response for protected resource: 307 2025-09-25T16:14:56.626405Z :MVP DEBUG: oidc_protected_page_yandex.cpp:25: SessionService.Check(): OK 2025-09-25T16:14:56.626432Z :MVP DEBUG: oidc_protected_page.cpp:119: Forward user request bypass OIDC 2025-09-25T16:14:56.626478Z :MVP DEBUG: oidc_protected_page.cpp:38: Incoming response for protected resource: 302 2025-09-25T16:14:56.637380Z :MVP DEBUG: oidc_protected_page_yandex.cpp:25: SessionService.Check(): OK 2025-09-25T16:14:56.637401Z :MVP DEBUG: oidc_protected_page.cpp:119: Forward user request bypass OIDC 2025-09-25T16:14:56.637443Z :MVP DEBUG: oidc_protected_page.cpp:38: Incoming response for protected resource: 302 2025-09-25T16:14:56.641454Z :MVP DEBUG: oidc_protected_page_yandex.cpp:25: SessionService.Check(): OK 2025-09-25T16:14:56.641470Z :MVP DEBUG: oidc_protected_page.cpp:119: Forward user request bypass OIDC 2025-09-25T16:14:56.641506Z :MVP DEBUG: oidc_protected_page.cpp:38: Incoming response for protected resource: 302 2025-09-25T16:14:56.643010Z :MVP DEBUG: oidc_protected_page_yandex.cpp:25: SessionService.Check(): OK 2025-09-25T16:14:56.643023Z :MVP DEBUG: oidc_protected_page.cpp:119: Forward user request bypass OIDC 2025-09-25T16:14:56.643049Z :MVP DEBUG: oidc_protected_page.cpp:38: Incoming response for protected resource: 302 2025-09-25T16:14:56.734070Z :MVP DEBUG: oidc_protected_page_nebius.cpp:24: Start OIDC process 2025-09-25T16:14:56.734680Z :MVP DEBUG: openid_connect.cpp:260: Using cookie (__Host_session_cookie_79632E6F617574682E7964622D766965776572: c2Vz****aWU= (CE0CB168)) 2025-09-25T16:14:56.734881Z :MVP DEBUG: oidc_protected_page_nebius.cpp:96: Exchange session token 2025-09-25T16:14:56.735204Z :MVP DEBUG: oidc_protected_page_nebius.cpp:53: Getting access token: 200 OK 2025-09-25T16:14:56.735227Z :MVP DEBUG: oidc_protected_page.cpp:119: Forward user request bypass OIDC 2025-09-25T16:14:56.735265Z :MVP DEBUG: oidc_protected_page.cpp:38: Incoming response for protected resource: 200 2025-09-25T16:14:56.872236Z :MVP DEBUG: oidc_protected_page_yandex.cpp:33: SessionService.Check(): 401 2025-09-25T16:14:56.890168Z :MVP DEBUG: oidc_protected_page_yandex.cpp:33: SessionService.Check(): 400 2025-09-25T16:14:56.890312Z :MVP DEBUG: oidc_session_create.cpp:21: Restore oidc session 2025-09-25T16:14:56.890375Z :MVP DEBUG: oidc_session_create.cpp:71: Incoming response from authorization server: 200 2025-09-25T16:14:56.898407Z :MVP DEBUG: oidc_session_create_yandex.cpp:69: SessionService.Create(): OK 2025-09-25T16:14:56.903540Z :MVP DEBUG: oidc_protected_page_yandex.cpp:25: SessionService.Check(): OK 2025-09-25T16:14:56.903565Z :MVP DEBUG: oidc_protected_page.cpp:119: Forward user request bypass OIDC 2025-09-25T16:14:56.903617Z :MVP DEBUG: oidc_protected_page.cpp:38: Incoming response for protected resource: 200 2025-09-25T16:14:56.914080Z :MVP DEBUG: oidc_protected_page_yandex.cpp:33: SessionService.Check(): 400 2025-09-25T16:14:56.914253Z :MVP DEBUG: oidc_session_create.cpp:21: Restore oidc session 2025-09-25T16:14:56.914315Z :MVP DEBUG: oidc_session_create.cpp:71: Incoming response from authorization server: 200 2025-09-25T16:14:56.916097Z :MVP DEBUG: oidc_session_create_yandex.cpp:69: SessionService.Create(): OK 2025-09-25T16:14:56.918292Z :MVP DEBUG: oidc_protected_page_yandex.cpp:25: SessionService.Check(): OK 2025-09-25T16:14:56.918311Z :MVP DEBUG: oidc_protected_page.cpp:119: Forward user request bypass OIDC 2025-09-25T16:14:56.918363Z :MVP DEBUG: oidc_protected_page.cpp:38: Incoming response for protected resource: 200 2025-09-25T16:14:56.929555Z :MVP DEBUG: oidc_session_create.cpp:21: Restore oidc session 2025-09-25T16:14:56.929617Z :MVP DEBUG: oidc_session_create.cpp:51: Check state failed: Calculated digest is not equal expected digest 2025-09-25T16:14:56.984316Z :MVP DEBUG: oidc_session_create.cpp:21: Restore oidc session 2025-09-25T16:14:56.984365Z :MVP DEBUG: oidc_session_create.cpp:51: Check state failed: Calculated digest is not equal expected digest 2025-09-25T16:14:57.055925Z :MVP DEBUG: oidc_session_create.cpp:21: Restore oidc session 2025-09-25T16:14:57.056024Z :MVP DEBUG: oidc_session_create.cpp:71: Incoming response from authorization server: 200 2025-09-25T16:14:57.072870Z :MVP DEBUG: oidc_session_create_yandex.cpp:79: SessionService.Create(): 401 2025-09-25T16:14:57.180089Z :MVP DEBUG: oidc_session_create.cpp:21: Restore oidc session 2025-09-25T16:14:57.180240Z :MVP DEBUG: oidc_session_create.cpp:71: Incoming response from authorization server: 200 2025-09-25T16:14:57.208778Z :MVP DEBUG: oidc_session_create_yandex.cpp:79: SessionService.Create(): 400 2025-09-25T16:14:57.297571Z :MVP DEBUG: oidc_session_create.cpp:21: Restore oidc session 2025-09-25T16:14:57.297921Z :MVP DEBUG: oidc_session_create.cpp:71: Incoming response from authorization server: 200 2025-09-25T16:14:57.313388Z :MVP DEBUG: oidc_session_create_yandex.cpp:79: SessionService.Create(): 400 2025-09-25T16:14:57.359605Z :MVP DEBUG: oidc_session_create.cpp:21: Restore oidc session 2025-09-25T16:14:57.359689Z :MVP DEBUG: oidc_session_create.cpp:71: Incoming response from authorization server: 200 2025-09-25T16:14:57.388950Z :MVP DEBUG: oidc_session_create_yandex.cpp:79: SessionService.Create(): 412 2025-09-25T16:14:57.429893Z :MVP DEBUG: oidc_protected_page_yandex.cpp:33: SessionService.Check(): 400 2025-09-25T16:14:57.435848Z :MVP DEBUG: oidc_protected_page_yandex.cpp:33: SessionService.Check(): 400 2025-09-25T16:14:57.445928Z :MVP DEBUG: oidc_protected_page_yandex.cpp:33: SessionService.Check(): 400 2025-09-25T16:14:57.478497Z :MVP DEBUG: oidc_protected_page.cpp:119: Forward user request bypass OIDC 2025-09-25T16:14:57.478545Z :MVP DEBUG: extension.cpp:14: Can not process request to protected resource: GET /ydb.viewer.page/counters HTTP/1.1 Host: oidcproxy.net Authorization: 2025-09-25T16:14:57.516420Z :MVP DEBUG: oidc_session_create.cpp:21: Restore oidc session 2025-09-25T16:14:57.516465Z :MVP DEBUG: oidc_session_create.cpp:43: Restore oidc context failed: Cannot find cookie ydb_oidc_cookie 2025-09-25T16:14:57.561265Z :MVP DEBUG: oidc_session_create.cpp:21: Restore oidc session 2025-09-25T16:14:57.561313Z :MVP DEBUG: oidc_session_create.cpp:51: Check state failed: Calculated digest is not equal expected digest 2025-09-25T16:14:57.680956Z :MVP DEBUG: oidc_impersonate_start_page_nebius.cpp:23: Start impersonation process 2025-09-25T16:14:57.680988Z :MVP DEBUG: openid_connect.cpp:260: Using cookie (__Host_session_cookie_636C69656E745F6964: c2Vz****aWU= (CE0CB168)) 2025-09-25T16:14:57.680995Z :MVP DEBUG: oidc_impersonate_start_page_nebius.cpp:49: Request impersonated token 2025-09-25T16:14:57.681084Z :MVP DEBUG: oidc_impersonate_start_page_nebius.cpp:100: Incoming response from authorization server: 200 2025-09-25T16:14:57.681103Z :MVP DEBUG: oidc_impersonate_start_page_nebius.cpp:89: Set impersonated cookie: (__Host_impersonated_cookie_636C69656E745F6964: aW1w****bg== (B126DD61)) 2025-09-25T16:14:57.754401Z :MVP DEBUG: oidc_impersonate_start_page_nebius.cpp:23: Start impersonation process 2025-09-25T16:14:57.754435Z :MVP DEBUG: openid_connect.cpp:260: Using cookie (__Host_session_cookie_636C69656E745F6964: c2Vz****aWU= (CE0CB168)) 2025-09-25T16:14:57.812061Z :MVP DEBUG: oidc_cleanup_page.cpp:20: Clear cookie: (__Host_impersonated_cookie_636C69656E745F6964) 2025-09-25T16:14:57.883834Z :MVP DEBUG: oidc_protected_page_nebius.cpp:24: Start OIDC process 2025-09-25T16:14:57.883859Z :MVP DEBUG: openid_connect.cpp:260: Using cookie (__Host_session_cookie_636C69656E745F6964: c2Vz****aWU= (CE0CB168)) 2025-09-25T16:14:57.883864Z :MVP DEBUG: openid_connect.cpp:260: Using cookie (__Host_impersonated_cookie_636C69656E745F6964: aW1w****ZQ== (1A20D8C0)) 2025-09-25T16:14:57.883869Z :MVP DEBUG: oidc_protected_page_nebius.cpp:107: Exchange impersonated token 2025-09-25T16:14:57.883953Z :MVP DEBUG: oidc_protected_page_nebius.cpp:53: Getting access token: 200 OK 2025-09-25T16:14:57.883967Z :MVP DEBUG: oidc_protected_page.cpp:119: Forward user request bypass OIDC 2025-09-25T16:14:57.883990Z :MVP DEBUG: oidc_protected_page.cpp:38: Incoming response for protected resource: 200 2025-09-25T16:14:57.905003Z :MVP DEBUG: oidc_protected_page_nebius.cpp:24: Start OIDC process 2025-09-25T16:14:57.905031Z :MVP DEBUG: openid_connect.cpp:260: Using cookie (__Host_session_cookie_636C69656E745F6964: c2Vz****aWU= (CE0CB168)) 2025-09-25T16:14:57.905037Z :MVP DEBUG: openid_connect.cpp:260: Using cookie (__Host_impersonated_cookie_636C69656E745F6964: aW1w****ZQ== (1A20D8C0)) 2025-09-25T16:14:57.905042Z :MVP DEBUG: oidc_protected_page_nebius.cpp:107: Exchange impersonated token 2025-09-25T16:14:57.905093Z :MVP DEBUG: oidc_protected_page_nebius.cpp:53: Getting access token: 401 OK 2025-09-25T16:14:57.905097Z :MVP DEBUG: oidc_protected_page_nebius.cpp:65: Getting access token: {"error": "bad_token"} 2025-09-25T16:14:57.905102Z :MVP DEBUG: oidc_protected_page_nebius.cpp:121: Clear impersonated cookie (__Host_impersonated_cookie_636C69656E745F6964) and retry 2025-09-25T16:14:57.998686Z :MVP DEBUG: oidc_protected_page.cpp:119: Forward user request bypass OIDC 2025-09-25T16:14:57.998823Z :MVP DEBUG: oidc_protected_page.cpp:51: Incoming incomplete response for protected resource: 200 2025-09-25T16:14:57.998844Z :MVP DEBUG: oidc_protected_page.cpp:71: Incoming data chunk for protected resource: 59 bytes 2025-09-25T16:14:57.998875Z :MVP DEBUG: oidc_protected_page.cpp:71: Incoming data chunk for protected resource: 59 bytes 2025-09-25T16:14:57.998890Z :MVP DEBUG: oidc_protected_page.cpp:71: Incoming data chunk for protected resource: 14 bytes 2025-09-25T16:14:57.998896Z :MVP DEBUG: oidc_protected_page.cpp:71: Incoming data chunk for protected resource: 0 bytes 2025-09-25T16:14:58.027374Z :MVP DEBUG: oidc_protected_page.cpp:119: Forward user request bypass OIDC 2025-09-25T16:14:58.027428Z :MVP DEBUG: oidc_protected_page.cpp:51: Incoming incomplete response for protected resource: 200 2025-09-25T16:14:58.027447Z :MVP DEBUG: oidc_protected_page.cpp:71: Incoming data chunk for protected resource: 59 bytes 2025-09-25T16:14:58.027462Z :MVP DEBUG: oidc_protected_page.cpp:71: Incoming data chunk for protected resource: 59 bytes 2025-09-25T16:14:58.027470Z :MVP DEBUG: oidc_protected_page.cpp:71: Incoming data chunk for protected resource: 14 bytes 2025-09-25T16:14:58.027476Z :MVP DEBUG: oidc_protected_page.cpp:71: Incoming data chunk for protected resource: 0 bytes 2025-09-25T16:14:58.032864Z :MVP DEBUG: oidc_protected_page.cpp:119: Forward user request bypass OIDC 2025-09-25T16:14:58.033877Z :MVP DEBUG: oidc_protected_page.cpp:38: Incoming response for protected resource: 200 2025-09-25T16:14:58.035366Z :MVP DEBUG: extension_whoami.cpp:33: Whoami Extension Info: OK 2025-09-25T16:14:58.100435Z :MVP DEBUG: oidc_protected_page.cpp:119: Forward user request bypass OIDC 2025-09-25T16:14:58.101521Z :MVP DEBUG: oidc_protected_page.cpp:38: Incoming response for protected resource: 200 2025-09-25T16:14:58.121343Z :MVP DEBUG: extension_whoami.cpp:33: Whoami Extension Info: OK 2025-09-25T16:14:58.180543Z :MVP DEBUG: oidc_protected_page.cpp:119: Forward user request bypass OIDC 2025-09-25T16:14:58.185563Z :MVP DEBUG: oidc_protected_page.cpp:38: Incoming response for protected resource: 200 TProfileServiceMock Get: Invalid or missing token: Bearer bad-token 2025-09-25T16:14:58.187223Z :MVP DEBUG: extension_whoami.cpp:40: Whoami Extension Info 401: Invalid or missing token, 2025-09-25T16:14:58.260132Z :MVP DEBUG: oidc_protected_page.cpp:119: Forward user request bypass OIDC 2025-09-25T16:14:58.261211Z :MVP DEBUG: oidc_protected_page.cpp:38: Incoming response for protected resource: 403 2025-09-25T16:14:58.264843Z :MVP DEBUG: extension_whoami.cpp:33: Whoami Extension Info: OK 2025-09-25T16:14:58.294248Z :MVP DEBUG: oidc_protected_page.cpp:119: Forward user request bypass OIDC 2025-09-25T16:14:58.295064Z :MVP DEBUG: oidc_protected_page.cpp:38: Incoming response for protected resource: 403 2025-09-25T16:14:58.298532Z :MVP DEBUG: extension_whoami.cpp:33: Whoami Extension Info: OK 2025-09-25T16:14:58.322375Z :MVP DEBUG: oidc_protected_page.cpp:119: Forward user request bypass OIDC 2025-09-25T16:14:58.323272Z :MVP DEBUG: oidc_protected_page.cpp:38: Incoming response for protected resource: 403 TProfileServiceMock Get: Invalid or missing token: Bearer bad-token 2025-09-25T16:14:58.332874Z :MVP DEBUG: extension_whoami.cpp:40: Whoami Extension Info 401: Invalid or missing token, 2025-09-25T16:14:58.347940Z :MVP DEBUG: oidc_protected_page.cpp:119: Forward user request bypass OIDC 2025-09-25T16:14:58.348766Z :MVP DEBUG: oidc_protected_page.cpp:38: Incoming response for protected resource: 307 2025-09-25T16:14:58.439660Z :MVP DEBUG: oidc_protected_page.cpp:119: Forward user request bypass OIDC 2025-09-25T16:14:58.439790Z :MVP DEBUG: oidc_protected_page.cpp:38: Incoming response for protected resource: 200 >> TSectorMapPerformance::TestSSD1960GBRead1000MBOnFirstSector [GOOD] >> TSectorMapPerformance::TestSSD1960GBWrite1000MBOnFirstSector >> TBatchedVecTest::TestOutputTOutputType [GOOD] >> BufferWithGaps::Basic [GOOD] >> PtrTest::Test1 [GOOD] >> BufferWithGaps::IsReadable [GOOD] >> TBatchedVecTest::TestToStringInt [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/groupinfo/ut/unittest >> TSubgroupPartLayoutTest::CountEffectiveReplicas2of4 [GOOD] Test command err: testing erasure none main# 0 main# 1 Checked 2 cases, took 0 us testing erasure block-4-2 main# 0 main# 1 main# 2 main# 3 main# 4 main# 5 main# 6 main# 7 main# 8 main# 9 main# 10 main# 11 main# 12 main# 13 main# 14 main# 15 main# 16 main# 17 main# 18 main# 19 main# 20 main# 21 main# 22 main# 23 main# 24 main# 25 main# 26 main# 27 main# 28 main# 29 main# 30 main# 31 main# 32 main# 33 main# 34 main# 35 main# 36 main# 37 main# 38 main# 39 main# 40 main# 41 main# 42 main# 43 main# 44 main# 45 main# 46 main# 47 main# 48 main# 49 main# 50 main# 51 main# 52 main# 53 main# 54 main# 55 main# 56 main# 57 main# 58 main# 59 main# 60 main# 61 main# 62 main# 63 Checked 262144 cases, took 290767 us testing erasure mirror-3-2 main# 0 main# 1 main# 2 main# 3 main# 4 main# 5 main# 6 main# 7 Checked 512 cases, took 44 us testing erasure block-2-2 main# 0 main# 1 main# 2 main# 3 main# 4 main# 5 main# 6 main# 7 main# 8 main# 9 main# 10 main# 11 main# 12 main# 13 main# 14 main# 15 Checked 4096 cases, took 497 us testing erasure mirror-3 main# 0 main# 1 main# 2 main# 3 main# 4 main# 5 main# 6 main# 7 Checked 64 cases, took 11 us testing erasure block-3-2 main# 0 main# 1 main# 2 main# 3 main# 4 main# 5 main# 6 main# 7 main# 8 main# 9 main# 10 main# 11 main# 12 main# 13 main# 14 main# 15 main# 16 main# 17 main# 18 main# 19 main# 20 main# 21 main# 22 main# 23 main# 24 main# 25 main# 26 main# 27 main# 28 main# 29 main# 30 main# 31 Checked 32768 cases, took 15596 us testing erasure stripe-2-2 main# 0 main# 1 main# 2 main# 3 main# 4 main# 5 main# 6 main# 7 main# 8 main# 9 main# 10 main# 11 main# 12 main# 13 main# 14 main# 15 Checked 4096 cases, took 442 us ------- [TS] {default-linux-x86_64, relwithdebinfo} ydb/mvp/meta/ut/unittest >> MetaCache::TimeoutFallback [GOOD] Test command err: 2025-09-25T16:14:59.069414Z :HTTP INFO: http_proxy_acceptor.cpp:89: Listening on http://[::]:65100 2025-09-25T16:14:59.069521Z :HTTP INFO: http_proxy_acceptor.cpp:89: Listening on http://[::]:7504 2025-09-25T16:14:59.069579Z :HTTP DEBUG: http_proxy.cpp:22: Connection created [1:14:2061] 2025-09-25T16:14:59.069588Z :HTTP DEBUG: http_proxy_outgoing.cpp:180: resolving 127.0.0.1:65100 2025-09-25T16:14:59.069730Z :HTTP DEBUG: http_proxy_outgoing.cpp:375: (#11,127.0.0.1:65100) connecting... 2025-09-25T16:14:59.069893Z :HTTP DEBUG: http_proxy_outgoing.cpp:321: (#11,127.0.0.1:65100) outgoing connection opened 2025-09-25T16:14:59.069902Z :HTTP DEBUG: http_proxy_outgoing.cpp:323: (#11,127.0.0.1:65100) <- (GET /server) 2025-09-25T16:14:59.070031Z :HTTP DEBUG: http_proxy_incoming.cpp:83: (#12,[::ffff:127.0.0.1]:51850) incoming connection opened 2025-09-25T16:14:59.070070Z :HTTP DEBUG: http_proxy_incoming.cpp:156: (#12,[::ffff:127.0.0.1]:51850) -> (GET /server) 2025-09-25T16:14:59.070188Z :HTTP DEBUG: meta_cache.cpp:231: Updating ownership http://127.0.0.1:7504 with deadline 2025-09-25T16:15:59.070179Z 2025-09-25T16:14:59.070198Z :HTTP DEBUG: meta_cache.cpp:237: SetRefreshTime "/server" to 2025-09-25T16:15:59.070179Z (+1758816959.070179s) 2025-09-25T16:14:59.070210Z :HTTP DEBUG: meta_cache.cpp:198: IncomingForward /server to http://127.0.0.1:7504 timeout 30.000000s 2025-09-25T16:14:59.070235Z :HTTP DEBUG: http_proxy.cpp:22: Connection created [1:16:2063] 2025-09-25T16:14:59.070242Z :HTTP DEBUG: http_proxy_outgoing.cpp:180: resolving 127.0.0.1:7504 2025-09-25T16:14:59.070281Z :HTTP DEBUG: http_proxy_outgoing.cpp:375: (#13,127.0.0.1:7504) connecting... 2025-09-25T16:14:59.070343Z :HTTP DEBUG: http_proxy_outgoing.cpp:321: (#13,127.0.0.1:7504) outgoing connection opened 2025-09-25T16:14:59.070349Z :HTTP DEBUG: http_proxy_outgoing.cpp:323: (#13,127.0.0.1:7504) <- (GET /server) 2025-09-25T16:14:59.070449Z :HTTP DEBUG: http_proxy_incoming.cpp:83: (#14,[::ffff:127.0.0.1]:36664) incoming connection opened 2025-09-25T16:14:59.070469Z :HTTP DEBUG: http_proxy_incoming.cpp:156: (#14,[::ffff:127.0.0.1]:36664) -> (GET /server) 2025-09-25T16:14:59.070540Z :HTTP DEBUG: http_proxy_incoming.cpp:280: (#14,[::ffff:127.0.0.1]:36664) <- (200 Found, 6 bytes) 2025-09-25T16:14:59.070570Z :HTTP DEBUG: http_proxy_incoming.cpp:340: (#14,[::ffff:127.0.0.1]:36664) connection closed 2025-09-25T16:14:59.070649Z :HTTP DEBUG: http_proxy_outgoing.cpp:101: (#13,127.0.0.1:7504) -> (200 Found, 6 bytes) 2025-09-25T16:14:59.070662Z :HTTP DEBUG: http_proxy_outgoing.cpp:110: (#13,127.0.0.1:7504) connection closed 2025-09-25T16:14:59.070727Z :HTTP DEBUG: meta_cache.cpp:146: Cache received successfull (200) response for /server 2025-09-25T16:14:59.070781Z :HTTP DEBUG: http_proxy_incoming.cpp:280: (#12,[::ffff:127.0.0.1]:51850) <- (200 Found, 6 bytes) 2025-09-25T16:14:59.070808Z :HTTP DEBUG: http_proxy_incoming.cpp:340: (#12,[::ffff:127.0.0.1]:51850) connection closed 2025-09-25T16:14:59.070870Z :HTTP DEBUG: http_proxy_outgoing.cpp:101: (#11,127.0.0.1:65100) -> (200 Found, 6 bytes) 2025-09-25T16:14:59.070877Z :HTTP DEBUG: http_proxy_outgoing.cpp:110: (#11,127.0.0.1:65100) connection closed 2025-09-25T16:14:59.070925Z :HTTP DEBUG: http_proxy.cpp:131: Connection closed [1:16:2063] 2025-09-25T16:14:59.070953Z :HTTP DEBUG: http_proxy.cpp:131: Connection closed [1:14:2061] 2025-09-25T16:14:59.075896Z :HTTP INFO: http_proxy_acceptor.cpp:89: Listening on http://[::]:15673 2025-09-25T16:14:59.075980Z :HTTP INFO: http_proxy_acceptor.cpp:89: Listening on http://[::]:30297 2025-09-25T16:14:59.076037Z :HTTP DEBUG: http_proxy.cpp:22: Connection created [2:14:2061] 2025-09-25T16:14:59.076045Z :HTTP DEBUG: http_proxy_outgoing.cpp:180: resolving 127.0.0.1:15673 2025-09-25T16:14:59.076086Z :HTTP DEBUG: http_proxy_outgoing.cpp:375: (#11,127.0.0.1:15673) connecting... 2025-09-25T16:14:59.076210Z :HTTP DEBUG: http_proxy_outgoing.cpp:321: (#11,127.0.0.1:15673) outgoing connection opened 2025-09-25T16:14:59.076219Z :HTTP DEBUG: http_proxy_outgoing.cpp:323: (#11,127.0.0.1:15673) <- (GET /server) 2025-09-25T16:14:59.089003Z :HTTP DEBUG: http_proxy_incoming.cpp:83: (#12,[::ffff:127.0.0.1]:45390) incoming connection opened 2025-09-25T16:14:59.089076Z :HTTP DEBUG: http_proxy_incoming.cpp:156: (#12,[::ffff:127.0.0.1]:45390) -> (GET /server) 2025-09-25T16:14:59.089121Z :HTTP DEBUG: meta_cache.cpp:231: Updating ownership http://127.0.0.1:30297 with deadline 2025-09-25T16:24:59.089106Z 2025-09-25T16:14:59.089129Z :HTTP DEBUG: meta_cache.cpp:237: SetRefreshTime "/server" to 2025-09-25T16:24:59.089106Z (+1758817499.089106s) 2025-09-25T16:14:59.089139Z :HTTP DEBUG: meta_cache.cpp:198: IncomingForward /server to http://127.0.0.1:30297 timeout 30.000000s 2025-09-25T16:14:59.089168Z :HTTP DEBUG: http_proxy.cpp:22: Connection created [2:16:2063] 2025-09-25T16:14:59.089179Z :HTTP DEBUG: http_proxy_outgoing.cpp:180: resolving 127.0.0.1:30297 2025-09-25T16:14:59.089231Z :HTTP DEBUG: http_proxy_outgoing.cpp:375: (#13,127.0.0.1:30297) connecting... 2025-09-25T16:14:59.089384Z :HTTP DEBUG: http_proxy_outgoing.cpp:321: (#13,127.0.0.1:30297) outgoing connection opened 2025-09-25T16:14:59.089393Z :HTTP DEBUG: http_proxy_outgoing.cpp:323: (#13,127.0.0.1:30297) <- (GET /server) 2025-09-25T16:14:59.089442Z :HTTP ERROR: http_proxy_outgoing.cpp:124: (#13,127.0.0.1:30297) connection closed with error: Connection timed out 2025-09-25T16:14:59.089547Z :HTTP WARN: meta_cache.cpp:151: Cache received failed response with error "Connection timed out" for /server - retrying locally 2025-09-25T16:14:59.089628Z :HTTP DEBUG: http_proxy_incoming.cpp:83: (#14,[::ffff:127.0.0.1]:52616) incoming connection opened 2025-09-25T16:14:59.089647Z :HTTP DEBUG: http_proxy.cpp:131: Connection closed [2:16:2063] 2025-09-25T16:14:59.089666Z :HTTP DEBUG: http_proxy_incoming.cpp:156: (#14,[::ffff:127.0.0.1]:52616) -> (GET /server) 2025-09-25T16:14:59.089676Z :HTTP DEBUG: http_proxy_incoming.cpp:190: (#14,[::ffff:127.0.0.1]:52616) connection closed 2025-09-25T16:14:59.100989Z :HTTP DEBUG: http_proxy_incoming.cpp:280: (#12,[::ffff:127.0.0.1]:45390) <- (200 Found, 6 bytes) 2025-09-25T16:14:59.101122Z :HTTP DEBUG: http_proxy_incoming.cpp:340: (#12,[::ffff:127.0.0.1]:45390) connection closed 2025-09-25T16:14:59.101228Z :HTTP DEBUG: http_proxy_outgoing.cpp:101: (#11,127.0.0.1:15673) -> (200 Found, 6 bytes) 2025-09-25T16:14:59.101241Z :HTTP DEBUG: http_proxy_outgoing.cpp:110: (#11,127.0.0.1:15673) connection closed 2025-09-25T16:14:59.101395Z :HTTP DEBUG: http_proxy.cpp:131: Connection closed [2:14:2061] |79.9%| [TS] {RESULT} ydb/mvp/meta/ut/unittest |79.9%| [TS] {RESULT} ydb/mvp/oidc_proxy/ut/unittest ------- [TS] {default-linux-x86_64, relwithdebinfo} ydb/core/io_formats/arrow/scheme/ut/unittest >> FormatCSV::Nulls [GOOD] Test command err: 12000000 Cannot read CSV: no columns specified Cannot read CSV: Invalid: Empty CSV file d'Artagnan '"' Jeanne d'Arc "'" 'd'Artagnan' ''"'' 'Jeanne d'Arc' '"'"' d'Artagnan '"' Jeanne d'Arc "'" src: ,"","" ,"","" ,, parsed: ᴺᵁᴸᴸ,, ᴺᵁᴸᴸ,, ᴺᵁᴸᴸ,ᴺᵁᴸᴸ,ᴺᵁᴸᴸ src: ,"","" ,"","" ,, parsed: ᴺᵁᴸᴸ,, ᴺᵁᴸᴸ,, ᴺᵁᴸᴸ,ᴺᵁᴸᴸ,ᴺᵁᴸᴸ src: \N,"","" \N,"\N","\N" \N,\N,\N parsed: ᴺᵁᴸᴸ,, ᴺᵁᴸᴸ,\N,\N ᴺᵁᴸᴸ,ᴺᵁᴸᴸ,ᴺᵁᴸᴸ src: NULL,"","" NULL,"NULL","NULL" NULL,NULL,NULL parsed: ᴺᵁᴸᴸ,, ᴺᵁᴸᴸ,NULL,NULL ᴺᵁᴸᴸ,ᴺᵁᴸᴸ,ᴺᵁᴸᴸ >> TYardTest::TestMultiYardStartingPoints [GOOD] >> TYardTest::TestMultiYardLogMultipleWriteRead |79.9%| [TS] {RESULT} ydb/core/io_formats/arrow/scheme/ut/unittest >> Init::TWithDefaultParser [GOOD] >> StaticNodeSelectorsInit::TestStaticNodeSelectorForActorSystem >> test_transform.py::TestYamlConfigTransformations::test_basic[args0-dump] >> TMemoryPoolTest::AllocOneByte [GOOD] >> TMemoryPoolTest::AppendString [GOOD] >> TMemoryPoolTest::Transactions [GOOD] >> TMemoryPoolTest::TransactionsWithAlignment [GOOD] >> TMemoryPoolTest::LongRollback [GOOD] >> UtilString::ShrinkToFit [GOOD] >> TQueryResultSizeTrackerTest::SerializeDeserializeMaxPtotobufSize [GOOD] >> StaticNodeSelectorsInit::TestStaticNodeSelectorForActorSystem [GOOD] >> StaticNodeSelectorsInit::TestStaticNodeSelectorWithAnotherLabel [GOOD] >> StaticNodeSelectorsInit::TestStaticNodeSelectorInheritance [GOOD] >> StaticNodeSelectorsInit::TestStaticNodeSelectorByNodeId [GOOD] >> StaticNodeSelectorsInit::TestStaticNodeSelectorByNodeHost [GOOD] >> StaticNodeSelectorsInit::TestStaticNodeSelectorByNodeKind >> StaticNodeSelectorsInit::TestStaticNodeSelectorByNodeKind [GOOD] >> ArrowTest::BatchBuilder >> ArrowTest::BatchBuilder [GOOD] |79.9%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/core/log_backend/ut/unittest >> JsonEnvelopeTest::BinaryData [GOOD] |79.9%| [TS] {RESULT} ydb/core/log_backend/ut/unittest |79.9%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/base/ut/gtest >> TBatchedVecTest::TestToStringInt [GOOD] |79.9%| [TS] {RESULT} ydb/core/blobstorage/base/ut/gtest >> ArrowTest::ArrowToYdbConverter [GOOD] >> ArrowTest::SortWithCompositeKey [GOOD] >> ArrowTest::MergingSortedInputStream [GOOD] >> ArrowTest::MergingSortedInputStreamReversed [GOOD] >> ArrowTest::MergingSortedInputStreamReplace [GOOD] >> ArrowTest::MaxVersionFilter [GOOD] >> ArrowTest::EqualKeysVersionFilter [GOOD] >> ColumnFilter::MergeFilters [GOOD] >> ColumnFilter::CombineFilters [GOOD] >> ColumnFilter::ApplyFilterToFilter [GOOD] >> ColumnFilter::FilterSlice [GOOD] >> ColumnFilter::FilterCheckSlice [GOOD] >> ColumnFilter::FilterSlice1 [GOOD] >> ColumnFilter::CutFilter1 [GOOD] >> ColumnFilter::CutFilter2 [GOOD] >> Dictionary::Simple |79.9%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tablet_flat/ut_util/unittest >> UtilString::ShrinkToFit [GOOD] |79.9%| [TM] {RESULT} ydb/core/tablet_flat/ut_util/unittest |79.9%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/public/sdk/cpp/src/client/topic/ut/describe_topic_ut.cpp |79.9%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/core/config/init/ut/unittest >> StaticNodeSelectorsInit::TestStaticNodeSelectorByNodeKind [GOOD] |79.9%| [TS] {RESULT} ydb/core/config/init/ut/unittest |80.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/public/sdk/cpp/src/client/topic/ut/describe_topic_ut.cpp |80.0%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/vdisk/query/ut/unittest >> TQueryResultSizeTrackerTest::SerializeDeserializeMaxPtotobufSize [GOOD] |80.0%| [CC] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/tx/columnshard/engines/reader/abstract/read_metadata.h_serialized.cpp >> TPDiskTest::PDiskIncreaseLogChunksLimitAfterRestart [GOOD] >> TPDiskTest::PDiskSlotSizeInUnits |80.0%| [CC] {BAZEL_UPLOAD} $(B)/ydb/core/tx/columnshard/engines/reader/abstract/read_metadata.h_serialized.cpp >> TYardTest::TestMultiYardLogMultipleWriteRead [GOOD] >> TYardTest::TestSysLogOverwrite |80.0%| [AR] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/tx/columnshard/engines/reader/abstract/libengines-reader-abstract.a |80.0%| [AR] {RESULT} $(B)/ydb/core/tx/columnshard/engines/reader/abstract/libengines-reader-abstract.a |80.0%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/tx/columnshard/engines/reader/abstract/libengines-reader-abstract.a |80.0%| [AR] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/tx/datashard/libcore-tx-datashard.a |80.0%| [AR] {RESULT} $(B)/ydb/core/tx/datashard/libcore-tx-datashard.a >> test_transform.py::TestYamlConfigTransformations::test_basic[args0-dump] [GOOD] >> test_transform.py::TestYamlConfigTransformations::test_basic[args0-dump_ds_init] |80.0%| [CC] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/tx/schemeshard/operation_queue_timer.h_serialized.cpp |80.0%| [CC] {BAZEL_UPLOAD} $(B)/ydb/core/tx/schemeshard/operation_queue_timer.h_serialized.cpp >> TYardTest::TestSysLogOverwrite [GOOD] >> TYardTest::TestUpsAndDownsAtTheBoundary >> ClosedIntervalSet::Difference [GOOD] >> ClosedIntervalSet::Contains [GOOD] >> ClosedIntervalSet::EnumInRange >> TSectorMapPerformance::TestSSD1960GBWrite1000MBOnFirstSector [GOOD] >> TBTreeTest::RandomInsertThreadSafe [GOOD] >> TBTreeTest::DuplicateKeysInplace >> TYardTest::TestChunkReadRandomOffset [GOOD] >> TYardTest::TestChunkContinuity2 >> ClosedIntervalSet::EnumInRange [GOOD] >> ClosedIntervalSet::EnumInRangeReverse >> Mvp::TokenatorRefreshMetadataTokenGood [GOOD] |80.0%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/tx/datashard/libcore-tx-datashard.a >> ClosedIntervalSet::EnumInRangeReverse [GOOD] >> GivenIdRange::IssueNewRange [GOOD] >> GivenIdRange::Trim >> TBTreeTest::DuplicateKeysInplace [GOOD] >> TBTreeTest::DuplicateKeysThreadSafe >> TYardTest::TestChunkContinuity2 [GOOD] >> TYardTest::TestChunkContinuity3000 |80.0%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/pdisk/ut/unittest >> TSectorMapPerformance::TestSSD1960GBWrite1000MBOnFirstSector [GOOD] >> GivenIdRange::Trim [GOOD] >> GivenIdRange::Subtract >> GivenIdRange::Subtract [GOOD] >> GivenIdRange::Points >> GivenIdRange::Points [GOOD] >> GivenIdRange::Runs [GOOD] >> GivenIdRange::Allocate |80.0%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/kqp/ut/opt/kqp_not_null_ut.cpp >> Dictionary::Simple [GOOD] >> GivenIdRange::Allocate [GOOD] >> Dictionary::ComparePayloadAndFull |80.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/ut/opt/kqp_not_null_ut.cpp >> TYardTest::TestChunkWriteReadMultipleWithHddSectorMap [GOOD] >> TYardTest::TestChunkWriteReadWhole >> TYardTest::TestChunkContinuity3000 [GOOD] >> TYardTest::TestChunkContinuity9000 >> test_transform.py::TestYamlConfigTransformations::test_basic[args0-dump_ds_init] [GOOD] >> test_transform.py::TestYamlConfigTransformations::test_basic[args1-dump] >> TBTreeTest::DuplicateKeysThreadSafe [GOOD] >> TBTreeTest::ShouldCallDtorsInplace [GOOD] >> TBTreeTest::ShouldCallDtorsThreadSafe [GOOD] >> TBTreeTest::Concurrent ------- [TS] {default-linux-x86_64, relwithdebinfo} ydb/mvp/core/ut/unittest >> Mvp::TokenatorRefreshMetadataTokenGood [GOOD] Test command err: 2025-09-25T16:14:57.042210Z :MVP DEBUG: mvp_tokens.cpp:77: Refreshing token metadataTokenName 2025-09-25T16:14:57.042313Z :MVP DEBUG: mvp_tokens.cpp:217: Updating metadata token 2025-09-25T16:14:57.096507Z :MVP DEBUG: mvp_tokens.cpp:77: Refreshing token metadataTokenName 2025-09-25T16:14:57.096589Z :MVP DEBUG: mvp_tokens.cpp:217: Updating metadata token 2025-09-25T16:15:02.096740Z :MVP DEBUG: mvp_tokens.cpp:77: Refreshing token metadataTokenName 2025-09-25T16:15:02.096856Z :MVP DEBUG: mvp_tokens.cpp:217: Updating metadata token |80.0%| [TS] {RESULT} ydb/mvp/core/ut/unittest >> TQueryResultSizeTrackerTest::SerializeDeserializeMaxPtotobufSizeMinusOne [GOOD] |80.0%| [CC] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/tx/columnshard/engines/changes/abstract/abstract.h_serialized.cpp |80.0%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blob_depot/ut/unittest >> GivenIdRange::Allocate [GOOD] >> Dictionary::ComparePayloadAndFull [GOOD] >> Hash::ScalarBinaryHash [GOOD] >> Hash::ScalarCTypeHash [GOOD] >> Hash::ScalarCompositeHash [GOOD] >> ProgramStep::Round0 [GOOD] >> ProgramStep::Round1 [GOOD] >> ProgramStep::Filter [GOOD] >> ProgramStep::Add [GOOD] >> ProgramStep::Substract [GOOD] >> ProgramStep::Multiply [GOOD] >> ProgramStep::Divide [GOOD] >> ProgramStep::Gcd [GOOD] >> ProgramStep::Lcm [GOOD] >> ProgramStep::Mod [GOOD] >> ProgramStep::ModOrZero [GOOD] >> ProgramStep::Abs [GOOD] >> ProgramStep::Negate [GOOD] >> ProgramStep::Compares [GOOD] >> ProgramStep::Logic0 [GOOD] >> ProgramStep::Logic1 [GOOD] >> ProgramStep::StartsWith [GOOD] >> ProgramStep::EndsWith [GOOD] >> ProgramStep::MatchSubstring [GOOD] >> ProgramStep::StartsWithIgnoreCase [GOOD] >> ProgramStep::EndsWithIgnoreCase [GOOD] >> ProgramStep::MatchSubstringIgnoreCase [GOOD] >> ProgramStep::ScalarTest [GOOD] >> ProgramStep::TestValueFromNull [GOOD] >> ProgramStep::MergeFilterSimple [GOOD] >> ProgramStep::Projection [GOOD] >> ProgramStep::MinMax [GOOD] >> ProgramStep::Sum [GOOD] >> ProgramStep::SumGroupBy [GOOD] >> ProgramStep::SumGroupByNotNull [GOOD] >> ProgramStep::MinMaxSomeGroupBy [GOOD] >> ProgramStep::MinMaxSomeGroupByNotNull |80.0%| [TM] {RESULT} ydb/core/blob_depot/ut/unittest |80.0%| [CC] {BAZEL_UPLOAD} $(B)/ydb/core/tx/columnshard/engines/changes/abstract/abstract.h_serialized.cpp >> ProgramStep::MinMaxSomeGroupByNotNull [GOOD] >> SortableBatchPosition::FindPosition [GOOD] >> SortableBatchPosition::MergingSortedInputStreamReversedWithOneSearchPoint [GOOD] >> SortableBatchPosition::MergingSortedInputStreamReversedWithRangeSearch [GOOD] >> TYardTest::TestChunkContinuity9000 [GOOD] >> TYardTest::TestChunkLock |80.0%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/vdisk/query/ut/unittest >> TQueryResultSizeTrackerTest::SerializeDeserializeMaxPtotobufSizeMinusOne [GOOD] |80.0%| [AR] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/tx/columnshard/engines/changes/abstract/libengines-changes-abstract.a |80.0%| [AR] {RESULT} $(B)/ydb/core/tx/columnshard/engines/changes/abstract/libengines-changes-abstract.a |80.0%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/tx/columnshard/engines/changes/abstract/libengines-changes-abstract.a >> TYardTest::TestChunkWriteReadWhole [GOOD] >> TYardTest::TestChunkWriteReadWholeWithHddSectorMap >> TYardTest::TestChunkLock [GOOD] >> TYardTest::TestChunkUnlock >> TPDiskRaces::KillOwnerWhileDeletingChunk [GOOD] >> TPDiskRaces::KillOwnerWhileDeletingChunkWithInflight ------- [TS] {default-linux-x86_64, relwithdebinfo} ydb/core/formats/arrow/ut/unittest >> SortableBatchPosition::MergingSortedInputStreamReversedWithRangeSearch [GOOD] Test command err: Process: 100000d;/100000; 10000d;/10000; NO_CODEC(poolsize=1024;keylen=1) 0.2021203448 0.2210911404 NO_CODEC(poolsize=1024;keylen=10) 0.1534132783 0.2482180533 NO_CODEC(poolsize=1024;keylen=16) 0.1104676508 0.2045372848 NO_CODEC(poolsize=1024;keylen=32) 0.06592569055 0.1591802296 NO_CODEC(poolsize=1024;keylen=64) 0.03972180035 0.1324717476 NO_CODEC(poolsize=128;keylen=1) 0.2016566193 0.2164784476 NO_CODEC(poolsize=128;keylen=10) 0.07304169975 0.08752922393 NO_CODEC(poolsize=128;keylen=16) 0.05151637558 0.06514358749 NO_CODEC(poolsize=128;keylen=32) 0.02919093319 0.04189888314 NO_CODEC(poolsize=128;keylen=64) 0.01605694811 0.02821124922 NO_CODEC(poolsize=16;keylen=1) 0.2010010074 0.2099570542 NO_CODEC(poolsize=16;keylen=10) 0.0719219365 0.07635285397 NO_CODEC(poolsize=16;keylen=16) 0.05039654131 0.05396013899 NO_CODEC(poolsize=16;keylen=32) 0.02807102527 0.03070808446 NO_CODEC(poolsize=16;keylen=64) 0.01493699686 0.01701612239 NO_CODEC(poolsize=1;keylen=1) 0.2008730831 0.2086845872 NO_CODEC(poolsize=1;keylen=10) 0.07177339648 0.07487027428 NO_CODEC(poolsize=1;keylen=16) 0.0502445638 0.05244238527 NO_CODEC(poolsize=1;keylen=32) 0.02791992658 0.0291982148 NO_CODEC(poolsize=1;keylen=64) 0.01478641518 0.01551089526 NO_CODEC(poolsize=512;keylen=1) 0.2021203448 0.2210911404 NO_CODEC(poolsize=512;keylen=10) 0.1482943606 0.1971260763 NO_CODEC(poolsize=512;keylen=16) 0.1053484084 0.1534129488 NO_CODEC(poolsize=512;keylen=32) 0.0608061115 0.1080222928 NO_CODEC(poolsize=512;keylen=64) 0.03460202321 0.08129402495 NO_CODEC(poolsize=64;keylen=1) 0.2013687897 0.2136153969 NO_CODEC(poolsize=64;keylen=10) 0.07240183504 0.08114272681 NO_CODEC(poolsize=64;keylen=16) 0.05087647028 0.05875304549 NO_CODEC(poolsize=64;keylen=32) 0.02855098581 0.03550414104 NO_CODEC(poolsize=64;keylen=64) 0.01541697597 0.02181403389 lz4(poolsize=1024;keylen=1) 0.006629768257 0.05541610349 lz4(poolsize=1024;keylen=10) 0.04233951498 0.3344832994 lz4(poolsize=1024;keylen=16) 0.05657489465 0.404264214 lz4(poolsize=1024;keylen=32) 0.09037137941 0.5318074361 lz4(poolsize=1024;keylen=64) 0.01074936154 0.1063492063 lz4(poolsize=128;keylen=1) 0.003831111821 0.02881389382 lz4(poolsize=128;keylen=10) 0.00718182175 0.06087121933 lz4(poolsize=128;keylen=16) 0.008735936466 0.07523964551 lz4(poolsize=128;keylen=32) 0.01375268158 0.117441454 lz4(poolsize=128;keylen=64) 0.02262360212 0.1850289108 lz4(poolsize=16;keylen=1) 0.00273442178 0.01820340324 lz4(poolsize=16;keylen=10) 0.003078137332 0.02169239789 lz4(poolsize=16;keylen=16) 0.003266503667 0.02356577168 lz4(poolsize=16;keylen=32) 0.003742685614 0.02844311377 lz4(poolsize=16;keylen=64) 0.004937163375 0.03979647465 lz4(poolsize=1;keylen=1) 0.00251497006 0.01603325416 lz4(poolsize=1;keylen=10) 0.002531395234 0.01628089447 lz4(poolsize=1;keylen=16) 0.002515970516 0.01617933723 lz4(poolsize=1;keylen=32) 0.00251450677 0.01630226314 lz4(poolsize=1;keylen=64) 0.002511620933 0.01653353149 lz4(poolsize=512;keylen=1) 0.005362411291 0.04359726295 lz4(poolsize=512;keylen=10) 0.02347472854 0.1933066062 lz4(poolsize=512;keylen=16) 0.03056053336 0.2426853056 lz4(poolsize=512;keylen=32) 0.04856356058 0.3467897492 lz4(poolsize=512;keylen=64) 0.04102771881 0.3228658321 lz4(poolsize=64;keylen=1) 0.003312844256 0.02372010279 lz4(poolsize=64;keylen=10) 0.004839661617 0.03863241259 lz4(poolsize=64;keylen=16) 0.005715507689 0.04687204687 lz4(poolsize=64;keylen=32) 0.007821957352 0.06669044223 lz4(poolsize=64;keylen=64) 0.01258912656 0.1073551894 zstd(poolsize=1024;keylen=1) 0.007324840764 0.0754840827 zstd(poolsize=1024;keylen=10) 0.04506846012 0.3776978417 zstd(poolsize=1024;keylen=16) 0.0655640205 0.4694540288 zstd(poolsize=1024;keylen=32) 0.1110720087 0.6098141264 zstd(poolsize=1024;keylen=64) 0.1914108287 0.7447345433 zstd(poolsize=128;keylen=1) 0.003769847609 0.04002713704 zstd(poolsize=128;keylen=10) 0.007456731695 0.07809798271 zstd(poolsize=128;keylen=16) 0.0102539786 0.1029455519 zstd(poolsize=128;keylen=32) 0.01677217062 0.1578947368 zstd(poolsize=128;keylen=64) 0.03005940945 0.2517949988 zstd(poolsize=16;keylen=1) 0.002620896858 0.02794819359 zstd(poolsize=16;keylen=10) 0.002816201441 0.03048416019 zstd(poolsize=16;keylen=16) 0.003368308096 0.03570300158 zstd(poolsize=16;keylen=32) 0.004159808469 0.0434375 zstd(poolsize=16;keylen=64) 0.005779996974 0.05875115349 zstd(poolsize=1;keylen=1) 0.002461243407 0.02626193724 zstd(poolsize=1;keylen=10) 0.002154636612 0.0234375 zstd(poolsize=1;keylen=16) 0.002356872222 0.02519132653 zstd(poolsize=1;keylen=32) 0.002427911996 0.02573879886 zstd(poolsize=1;keylen=64) 0.00258021431 0.02699269609 zstd(poolsize=512;keylen=1) 0.005583027596 0.05848930481 zstd(poolsize=512;keylen=10) 0.0236929438 0.2237078941 zstd(poolsize=512;keylen=16) 0.03443366072 0.2936507937 zstd(poolsize=512;keylen=32) 0.05917328099 0.4212765957 zstd(poolsize=512;keylen=64) 0.1058929843 0.5749553837 zstd(poolsize=64;keylen=1) 0.00319560285 0.03401360544 zstd(poolsize=64;keylen=10) 0.004852093844 0.05176470588 zstd(poolsize=64;keylen=16) 0.00633344236 0.06557881773 zstd(poolsize=64;keylen=32) 0.009647738439 0.09619952494 zstd(poolsize=64;keylen=64) 0.01626771323 0.1514644351 NO_CODEC --1000 ----1 ------1 FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=5168;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=5168;columns=1; --------5168 / 5296 = 2.416918429% ------10 FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=14168;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=14168;columns=1; --------14168 / 14296 = 0.8953553442% ------16 FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=20168;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=20168;columns=1; --------20168 / 20296 = 0.6306661411% ------32 FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=36168;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=36168;columns=1; --------36168 / 36296 = 0.35265594% ------64 FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=68168;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=68168;columns=1; --------68168 / 68296 = 0.1874194682% ----16 ------1 FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=5168;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=5168;columns=1; --------5168 / 5296 = 2.416918429% ------10 FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=14168;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=14168;columns=1; --------14168 / 14296 = 0.8953553442% ------16 FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=20168;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=20168;columns=1; --------20168 / 20296 = 0.6306661411% ------32 FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=36168;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=36168;columns=1; --------36168 / 36296 = 0.35265594% ------64 FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=68168;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=68168;columns=1; --------68168 / 68296 = 0.1874194682% ----64 ------1 FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=5168;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=5168;columns=1; --------5168 / 5296 = 2.416918429% ------10 FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=14168;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=14168;columns=1; --------14168 / 14296 = 0.8953553442% ------16 FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=20168;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=20168;columns=1; --------20168 / 20296 = 0.6306661411% ------32 FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=36168;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=36168;columns=1; --------36168 / 36296 = 0.35265594% ------64 FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=68168;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=68168;columns=1; --------68168 / 68296 = 0.1874194682% ----128 ------1 FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=5168;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=5168;columns=1; --------5168 / 5296 = 2.416918429% ------10 FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=14168;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=14168;columns=1; --------14168 / 14296 = 0.8953553442% ------16 FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=20168;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=20168;columns=1; - ... "id\":2}]},\"o\":\"1,2\",\"t\":\"FetchOriginalData\"}\n",style=filled,color="#FFFF88"]; N7 -> N6[label="1"]; N7->N6->N2->N4->N0->N5[color=red]; }; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=192;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=192;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=216;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=216;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=334;fline=abstract.cpp:30;execute=ReserveMemory; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=334;fline=abstract.cpp:30;execute=FetchOriginalData; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=334;fline=abstract.cpp:30;execute=AssembleOriginalData; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=192;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=334;fline=abstract.cpp:30;execute=AssembleOriginalData; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=216;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=334;fline=abstract.cpp:30;execute=Aggregation; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=334;fline=abstract.cpp:30;execute=Projection; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2101;fline=graph_execute.cpp:162;graph_constructed=digraph program {N7[shape=box, label="N0(0):{\"p\":{\"data\":[{\"name\":\"x\",\"id\":1},{\"name\":\"y\",\"id\":2}]},\"o\":\"0\",\"t\":\"ReserveMemory\"}\n"]; N0[shape=box, label="N4(18):{\"a\":true,\"i\":\"1,2\",\"p\":{\"options\":[\"{3(Min):[1]}\",\"{4(Min):[2]}\"],\"type\":\"AGGREGATION\",\"keys\":[1]},\"o\":\"3,4\",\"t\":\"Aggregation\"}\nREMOVE:2"]; N2 -> N0[label="1"]; N4 -> N0[label="2"]; N2[shape=box, label="N2(9):{\"i\":\"1\",\"p\":{\"address\":{\"name\":\"x\",\"id\":1}},\"o\":\"1\",\"t\":\"AssembleOriginalData\"}\n",style=filled,color="#FFFF88"]; N6 -> N2[label="1"]; N4[shape=box, label="N3(9):{\"i\":\"2\",\"p\":{\"address\":{\"name\":\"y\",\"id\":2}},\"o\":\"2\",\"t\":\"AssembleOriginalData\"}\n",style=filled,color="#FFFF88"]; N6 -> N4[label="1"]; N5[shape=box, label="N5(27):{\"i\":\"1,3,4\",\"t\":\"Projection\"}\n",style=filled,color="#FFAAAA"]; N2 -> N5[label="1"]; N0 -> N5[label="2"]; N6[shape=box, label="N1(4):{\"i\":\"0\",\"p\":{\"data\":[{\"name\":\"x\",\"id\":1},{\"name\":\"y\",\"id\":2}]},\"o\":\"1,2\",\"t\":\"FetchOriginalData\"}\n",style=filled,color="#FFFF88"]; N7 -> N6[label="1"]; N7->N6->N2->N4->N0->N5[color=red]; }; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=192;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=192;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=192;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=192;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=334;fline=abstract.cpp:30;execute=ReserveMemory; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=334;fline=abstract.cpp:30;execute=FetchOriginalData; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=334;fline=abstract.cpp:30;execute=AssembleOriginalData; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=192;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=334;fline=abstract.cpp:30;execute=AssembleOriginalData; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=192;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=334;fline=abstract.cpp:30;execute=Aggregation; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=334;fline=abstract.cpp:30;execute=Projection; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2101;fline=graph_execute.cpp:162;graph_constructed=digraph program {N7[shape=box, label="N0(0):{\"p\":{\"data\":[{\"name\":\"x\",\"id\":1},{\"name\":\"y\",\"id\":2}]},\"o\":\"0\",\"t\":\"ReserveMemory\"}\n"]; N0[shape=box, label="N4(18):{\"a\":true,\"i\":\"1,2\",\"p\":{\"options\":[\"{3(Max):[1]}\",\"{4(Max):[2]}\"],\"type\":\"AGGREGATION\",\"keys\":[1]},\"o\":\"3,4\",\"t\":\"Aggregation\"}\nREMOVE:2"]; N2 -> N0[label="1"]; N4 -> N0[label="2"]; N2[shape=box, label="N2(9):{\"i\":\"1\",\"p\":{\"address\":{\"name\":\"x\",\"id\":1}},\"o\":\"1\",\"t\":\"AssembleOriginalData\"}\n",style=filled,color="#FFFF88"]; N6 -> N2[label="1"]; N4[shape=box, label="N3(9):{\"i\":\"2\",\"p\":{\"address\":{\"name\":\"y\",\"id\":2}},\"o\":\"2\",\"t\":\"AssembleOriginalData\"}\n",style=filled,color="#FFFF88"]; N6 -> N4[label="1"]; N5[shape=box, label="N5(27):{\"i\":\"1,3,4\",\"t\":\"Projection\"}\n",style=filled,color="#FFAAAA"]; N2 -> N5[label="1"]; N0 -> N5[label="2"]; N6[shape=box, label="N1(4):{\"i\":\"0\",\"p\":{\"data\":[{\"name\":\"x\",\"id\":1},{\"name\":\"y\",\"id\":2}]},\"o\":\"1,2\",\"t\":\"FetchOriginalData\"}\n",style=filled,color="#FFFF88"]; N7 -> N6[label="1"]; N7->N6->N2->N4->N0->N5[color=red]; }; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=192;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=192;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=192;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=192;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=334;fline=abstract.cpp:30;execute=ReserveMemory; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=334;fline=abstract.cpp:30;execute=FetchOriginalData; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=334;fline=abstract.cpp:30;execute=AssembleOriginalData; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=192;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=334;fline=abstract.cpp:30;execute=AssembleOriginalData; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=192;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=334;fline=abstract.cpp:30;execute=Aggregation; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=334;fline=abstract.cpp:30;execute=Projection; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2101;fline=graph_execute.cpp:162;graph_constructed=digraph program {N7[shape=box, label="N0(0):{\"p\":{\"data\":[{\"name\":\"x\",\"id\":1},{\"name\":\"y\",\"id\":2}]},\"o\":\"0\",\"t\":\"ReserveMemory\"}\n"]; N0[shape=box, label="N4(18):{\"a\":true,\"i\":\"1,2\",\"p\":{\"options\":[\"{3(Some):[1]}\",\"{4(Some):[2]}\"],\"type\":\"AGGREGATION\",\"keys\":[1]},\"o\":\"3,4\",\"t\":\"Aggregation\"}\nREMOVE:2"]; N2 -> N0[label="1"]; N4 -> N0[label="2"]; N2[shape=box, label="N2(9):{\"i\":\"1\",\"p\":{\"address\":{\"name\":\"x\",\"id\":1}},\"o\":\"1\",\"t\":\"AssembleOriginalData\"}\n",style=filled,color="#FFFF88"]; N6 -> N2[label="1"]; N4[shape=box, label="N3(9):{\"i\":\"2\",\"p\":{\"address\":{\"name\":\"y\",\"id\":2}},\"o\":\"2\",\"t\":\"AssembleOriginalData\"}\n",style=filled,color="#FFFF88"]; N6 -> N4[label="1"]; N5[shape=box, label="N5(27):{\"i\":\"1,3,4\",\"t\":\"Projection\"}\n",style=filled,color="#FFAAAA"]; N2 -> N5[label="1"]; N0 -> N5[label="2"]; N6[shape=box, label="N1(4):{\"i\":\"0\",\"p\":{\"data\":[{\"name\":\"x\",\"id\":1},{\"name\":\"y\",\"id\":2}]},\"o\":\"1,2\",\"t\":\"FetchOriginalData\"}\n",style=filled,color="#FFFF88"]; N7 -> N6[label="1"]; N7->N6->N2->N4->N0->N5[color=red]; }; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=192;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=192;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=192;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=192;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=334;fline=abstract.cpp:30;execute=ReserveMemory; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=334;fline=abstract.cpp:30;execute=FetchOriginalData; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=334;fline=abstract.cpp:30;execute=AssembleOriginalData; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=192;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=334;fline=abstract.cpp:30;execute=AssembleOriginalData; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=192;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=334;fline=abstract.cpp:30;execute=Aggregation; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=334;fline=abstract.cpp:30;execute=Projection; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=merger.cpp:141;pos={"sorting":{"sorting_columns":[{"name":"id1","value":"1111"}],"fields":["id1: int32 not null"]},"reverse":true,"position":0,"records_count":1};heap={"data":{"sorting_columns":[{"name":"id1","value":"2222"},{"name":"value","value":"8888"}],"fields":["id1: int32 not null","value: int32 not null"]},"sorting":{"sorting_columns":[{"name":"id1","value":"2222"}],"fields":["id1: int32 not null"]},"reverse":true,"position":1,"records_count":2}; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=merger.cpp:157;pos={"sorting":{"sorting_columns":[{"name":"id1","value":"1111"}],"fields":["id1: int32 not null"]},"reverse":true,"position":0,"records_count":1};heap={"data":{"sorting_columns":[{"name":"id1","value":"1111"},{"name":"value","value":"8888"}],"fields":["id1: int32 not null","value: int32 not null"]},"sorting":{"sorting_columns":[{"name":"id1","value":"1111"}],"fields":["id1: int32 not null"]},"reverse":true,"position":0,"records_count":2}; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=merger.cpp:157;pos={"sorting":{"sorting_columns":[{"name":"id1","value":"1111"}],"fields":["id1: int32 not null"]},"reverse":true,"position":0,"records_count":1};heap={"data":{"sorting_columns":[{"name":"id1","value":"2222"},{"name":"value","value":"7777"}],"fields":["id1: int32 not null","value: int32 not null"]},"sorting":{"sorting_columns":[{"name":"id1","value":"2222"}],"fields":["id1: int32 not null"]},"reverse":true,"position":0,"records_count":1}; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=merger.cpp:141;pos={"sorting":{"sorting_columns":[{"name":"id1","value":"3333"}],"fields":["id1: int32 not null"]},"reverse":true,"position":0,"records_count":1};heap={"data":{"sorting_columns":[{"name":"id1","value":"4444"},{"name":"value","value":"8888"}],"fields":["id1: int32 not null","value: int32 not null"]},"sorting":{"sorting_columns":[{"name":"id1","value":"4444"}],"fields":["id1: int32 not null"]},"reverse":true,"position":3,"records_count":4}; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=merger.cpp:157;pos={"sorting":{"sorting_columns":[{"name":"id1","value":"3333"}],"fields":["id1: int32 not null"]},"reverse":true,"position":0,"records_count":1};heap={"data":{"sorting_columns":[{"name":"id1","value":"3333"},{"name":"value","value":"8888"}],"fields":["id1: int32 not null","value: int32 not null"]},"sorting":{"sorting_columns":[{"name":"id1","value":"3333"}],"fields":["id1: int32 not null"]},"reverse":true,"position":2,"records_count":4}; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=merger.cpp:157;pos={"sorting":{"sorting_columns":[{"name":"id1","value":"3333"}],"fields":["id1: int32 not null"]},"reverse":true,"position":0,"records_count":1};heap={"data":{"sorting_columns":[{"name":"id1","value":"1111"},{"name":"value","value":"7777"}],"fields":["id1: int32 not null","value: int32 not null"]},"sorting":{"sorting_columns":[{"name":"id1","value":"1111"}],"fields":["id1: int32 not null"]},"reverse":true,"position":0,"records_count":2}; |80.0%| [TS] {RESULT} ydb/core/formats/arrow/ut/unittest |80.0%| [CC] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/tx/columnshard/engines/reader/simple_reader/iterator/source.h_serialized.cpp |80.0%| [CC] {BAZEL_UPLOAD} $(B)/ydb/core/tx/columnshard/engines/reader/simple_reader/iterator/source.h_serialized.cpp >> TYardTest::TestChunkUnlock [GOOD] >> TYardTest::TestChunkUnlockHarakiri |80.0%| [AR] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/tx/columnshard/engines/reader/simple_reader/iterator/libreader-simple_reader-iterator.a |80.0%| [AR] {RESULT} $(B)/ydb/core/tx/columnshard/engines/reader/simple_reader/iterator/libreader-simple_reader-iterator.a >> TYardTest::TestUpsAndDownsAtTheBoundary [GOOD] >> TYardTest::TestUnflushedChunk |80.0%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/tx/columnshard/engines/reader/simple_reader/iterator/libreader-simple_reader-iterator.a |80.0%| [AR] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/tx/schemeshard/libcore-tx-schemeshard.a |80.0%| [AR] {RESULT} $(B)/ydb/core/tx/schemeshard/libcore-tx-schemeshard.a >> TYardTest::TestChunkUnlockHarakiri [GOOD] >> TYardTest::TestChunkReserve >> TYardTest::TestChunkWriteReadWholeWithHddSectorMap [GOOD] >> TYardTest::TestChunkWrite20Read02 |80.0%| [CC] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/kqp/common/kqp_resolve.h_serialized.cpp |80.0%| [CC] {BAZEL_UPLOAD} $(B)/ydb/core/kqp/common/kqp_resolve.h_serialized.cpp >> TYardTest::TestChunkWrite20Read02 [GOOD] >> TYardTest::TestChunkUnlockRestart >> TYardTest::TestChunkReserve [GOOD] >> TYardTest::TestCheckSpace |80.0%| [AR] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/kqp/common/libcore-kqp-common.a |80.0%| [AR] {RESULT} $(B)/ydb/core/kqp/common/libcore-kqp-common.a >> TYardTest::TestUnflushedChunk [GOOD] >> TYardTest::TestRedZoneSurvivability >> test_transform.py::TestYamlConfigTransformations::test_basic[args1-dump] [GOOD] >> test_transform.py::TestYamlConfigTransformations::test_basic[args1-dump_ds_init] >> TYardTest::TestChunkUnlockRestart [GOOD] >> TYardTest::TestHttpInfo >> TPDiskTest::PDiskSlotSizeInUnits [GOOD] >> TPDiskTest::TestChunkWriteCrossOwner >> TYardTest::TestCheckSpace [GOOD] >> TYardTest::TestBootingState |80.0%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/kqp/common/libcore-kqp-common.a >> TPDiskTest::TestChunkWriteCrossOwner [GOOD] >> TPDiskTest::PlainChunksWriteReadALot >> TYardTest::TestHttpInfo [GOOD] >> TYardTest::TestHttpInfoFileDoesntExist >> TYardTest::TestHttpInfoFileDoesntExist [GOOD] >> TYardTest::TestFirstRecordToKeep >> TYardTest::TestFirstRecordToKeep [GOOD] >> TYardTest::TestDamagedFirstRecordToKeep >> TYardTest::TestRedZoneSurvivability [GOOD] >> TYardTest::TestSlay >> TYardTest::TestSlay [GOOD] >> TYardTest::TestSlayRace >> TBTreeTest::Concurrent [GOOD] >> TBTreeTest::IteratorDestructor [GOOD] >> TCacheTest::EnsureNoLeakAfterLruCacheDtor [GOOD] >> TCacheTest::EnsureNoLeakAfterQ2CacheDtor [GOOD] >> CompressionTest::lz4_generator_basic [GOOD] >> CompressionTest::lz4_generator_deflates >> TYardTest::TestSlayRace [GOOD] >> TYardTest::TestSlayRecreate >> CompressionTest::lz4_generator_deflates [GOOD] >> StLog::Basic [GOOD] >> TYardTest::TestSlayRecreate [GOOD] >> TYardTest::TestSlayLogWriteRaceActor >> TYardTest::TestBootingState [GOOD] >> TYardTest::Test3AsyncLog >> test_transform.py::TestYamlConfigTransformations::test_basic[args1-dump_ds_init] [GOOD] >> test_transform.py::TestYamlConfigTransformations::test_simplified[dump] >> TYardTest::TestDamagedFirstRecordToKeep [GOOD] >> TYardTest::TestDamageAtTheBoundary ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/util/ut/unittest >> StLog::Basic [GOOD] Test command err: Producer 0 worked for 0.5014456745 seconds Producer 1 worked for 0.4230605765 seconds Consumer 0 worked for 3.118681168 seconds Consumer 1 worked for 2.573797505 seconds Consumer 2 worked for 3.402680838 seconds Consumer 3 worked for 2.848090303 seconds >> TYardTest::TestSlayLogWriteRaceActor [GOOD] >> TYardTest::TestMultiYardHarakiri |80.0%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/security/ldap_auth_provider/ldap_auth_provider_ut.cpp |80.0%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/tx/schemeshard/libcore-tx-schemeshard.a |80.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/security/ldap_auth_provider/ldap_auth_provider_ut.cpp >> TYardTest::TestLogWriteCutEqual [GOOD] >> TYardTest::TestLogWriteCutEqualRandomWait >> TYardTest::Test3AsyncLog [GOOD] >> TYardTest::TestChunkRecommit >> TBlobStorageHullFresh::AppendixPerf [GOOD] >> TYardTest::TestChunkRecommit [GOOD] >> TBlobStorageHullFresh::AppendixPerf_Tune >> TYardTest::TestChunkRestartRecommit |80.0%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/kqp/ut/data/kqp_read_null_ut.cpp |80.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/ut/data/kqp_read_null_ut.cpp |80.0%| [CC] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/persqueue/pqrb/read_balancer__balancing.h_serialized.cpp |80.0%| [CC] {BAZEL_UPLOAD} $(B)/ydb/core/persqueue/pqrb/read_balancer__balancing.h_serialized.cpp >> TBlobStorageHullFresh::AppendixPerf_Tune [GOOD] |80.0%| [AR] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/persqueue/pqrb/libcore-persqueue-pqrb.a |80.0%| [AR] {RESULT} $(B)/ydb/core/persqueue/pqrb/libcore-persqueue-pqrb.a >> TYardTest::TestChunkRestartRecommit [GOOD] >> TYardTest::TestChunkDelete |80.0%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/persqueue/pqrb/libcore-persqueue-pqrb.a |80.0%| [CC] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/tx/columnshard/engines/storage/granule/granule.h_serialized.cpp |80.0%| [CC] {BAZEL_UPLOAD} $(B)/ydb/core/tx/columnshard/engines/storage/granule/granule.h_serialized.cpp |80.0%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/vdisk/hulldb/fresh/ut/unittest >> TBlobStorageHullFresh::AppendixPerf_Tune [GOOD] |80.0%| [AR] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/tx/columnshard/engines/storage/granule/libengines-storage-granule.a >> TYardTest::TestMultiYardHarakiri [GOOD] >> TYardTest::TestStartingPointReboots >> test_transform.py::TestYamlConfigTransformations::test_simplified[dump] [GOOD] >> TCowBTreeTest::MultipleSnapshots [GOOD] |80.0%| [AR] {RESULT} $(B)/ydb/core/tx/columnshard/engines/storage/granule/libengines-storage-granule.a |80.0%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/tx/columnshard/engines/storage/granule/libengines-storage-granule.a >> test_transform.py::TestYamlConfigTransformations::test_simplified[dump_ds_init] >> TCowBTreeTest::MultipleSnapshotsWithGc >> TYardTest::TestChunkDelete [GOOD] >> TYardTest::TestChunkForget >> TQueryResultSizeTrackerTest::SerializeDeserializeMaxPtotobufSizePlusOne [GOOD] |80.0%| [TA] $(B)/ydb/core/blobstorage/vdisk/hulldb/fresh/ut/test-results/unittest/{meta.json ... results_accumulator.log} |80.1%| [TA] {RESULT} $(B)/ydb/core/blobstorage/vdisk/hulldb/fresh/ut/test-results/unittest/{meta.json ... results_accumulator.log} |80.1%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/vdisk/query/ut/unittest >> TQueryResultSizeTrackerTest::SerializeDeserializeMaxPtotobufSizePlusOne [GOOD] |80.1%| [CC] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/tx/columnshard/columnshard_impl.h_serialized.cpp |80.1%| [CC] {BAZEL_UPLOAD} $(B)/ydb/core/tx/columnshard/columnshard_impl.h_serialized.cpp >> TYardTest::TestChunkForget [GOOD] >> TYardTest::Test3HugeAsyncLog |80.1%| [TA] $(B)/ydb/core/blobstorage/vdisk/query/ut/test-results/unittest/{meta.json ... results_accumulator.log} |80.1%| [TA] {RESULT} $(B)/ydb/core/blobstorage/vdisk/query/ut/test-results/unittest/{meta.json ... results_accumulator.log} |80.1%| [CC] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/tx/columnshard/transactions/tx_controller.h_serialized.cpp |80.1%| [CC] {BAZEL_UPLOAD} $(B)/ydb/core/tx/columnshard/transactions/tx_controller.h_serialized.cpp >> TYardTest::TestDamageAtTheBoundary [GOOD] >> TYardTest::TestDestroySystem |80.1%| [AR] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/tx/columnshard/libcore-tx-columnshard.a |80.1%| [AR] {RESULT} $(B)/ydb/core/tx/columnshard/libcore-tx-columnshard.a |80.1%| [AR] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/tx/columnshard/transactions/libtx-columnshard-transactions.a |80.1%| [AR] {RESULT} $(B)/ydb/core/tx/columnshard/transactions/libtx-columnshard-transactions.a |80.1%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/tx/columnshard/transactions/libtx-columnshard-transactions.a |80.1%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/tx/columnshard/libcore-tx-columnshard.a >> test_transform.py::TestYamlConfigTransformations::test_simplified[dump_ds_init] [GOOD] >> test_transform.py::TestYamlConfigTransformations::test_domains_config[dump] >> TYardTest::TestDestroySystem [GOOD] >> TYardTest::TestCutMultipleLogChunks >> TYardTest::TestCutMultipleLogChunks [GOOD] >> TYardTest::TestDestructionWhileWritingChunk >> TYardTest::Test3HugeAsyncLog [GOOD] >> TYardTest::TestChunkFlushReboot >> TYardTest::TestDestructionWhileWritingChunk [GOOD] >> TYardTest::TestDestructionWhileReadingChunk >> TYardTest::TestDestructionWhileReadingChunk [GOOD] >> TYardTest::TestDestructionWhileReadingLog >> TYardTest::TestChunkFlushReboot [GOOD] >> TYardTest::TestAllocateAllChunks >> TYardTest::TestDestructionWhileReadingLog [GOOD] >> TYardTest::TestFormatInfo >> TYardTest::TestAllocateAllChunks [GOOD] >> TYardTest::TestChunkDeletionWhileWriting >> test_transform.py::TestYamlConfigTransformations::test_domains_config[dump] [GOOD] >> test_transform.py::TestYamlConfigTransformations::test_domains_config[dump_ds_init] >> TYardTest::TestFormatInfo [GOOD] >> TYardTest::TestEnormousDisk >> TYardTest::TestChunkDeletionWhileWriting [GOOD] >> TYardTest::TestChunkPriorityBlock >> TYardTest::TestChunkPriorityBlock [GOOD] |80.1%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/kqp/ut/olap/blobs_sharing_ut.cpp |80.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/ut/olap/blobs_sharing_ut.cpp ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/pdisk/ut/unittest >> TYardTest::TestChunkPriorityBlock [GOOD] Test command err: 2025-09-25T16:14:45.395034Z :BS_PDISK NOTICE: {BPD38@blobstorage_pdisk_impl.cpp:2857} OnDriveStartup Path# "" PDiskId# 1 2025-09-25T16:14:45.425593Z :BS_PDISK CRIT: {BPD01@blobstorage_pdisk_actor.cpp:427} PDiskId# 1 Can not be initialized! Format is incomplete. Magic sector is not present on disk. Maybe wrong PDiskKey Config: {TPDiskConfig Path# "" ExpectedPath# "" ExpectedSerial# "" PDiskGuid# 5343189631624177263 PDiskId# 1 PDiskCategory# {Type# DEVICE_TYPE_ROT Kind# 0} MetadataOnly# 0 StartOwnerRound# 1 SectorMap# true EnableSectorEncryption # 0 ChunkSize# 1658880 SectorSize# 4096 StatisticsUpdateIntervalMs# 1000 SchedulerCfg# {TPDiskSchedulerConfig BytesSchedulerWeight# 1 LogWeight# 1 FreshWeight# 2 CompWeight# 7 SyncLogWeight# 15 HugeWeight# 2 FastReadWeight# 1 OtherReadWeight# 1 LoadWeight# 2 LowReadWeight# 1 MaxChunkReadsPerCycle# 16 MaxChunkReadsDurationPerCycleMs# 0.25 MaxChunkWritesPerCycle# 8 MaxChunkWritesDurationPerCycleMs# 1} MinLogChunksTotal# 4 MaxLogChunksPerOwnerMultiplier# 5 MaxLogChunksPerOwnerDivisor# 4 SortFreeChunksPerItems# 100 GetDriveDataSwitch# DoNotTouch WriteCacheSwitch# DoNotTouch DriveModelSeekTimeNs# 8000000 DriveModelSpeedBps# 127000000 DriveModelSpeedBpsMin# 135000000 DriveModelSpeedBpsMax# 200000000 DriveModelBulkWrieBlockSize# 2097152 DriveModelTrimSpeedBps# 0 ReorderingMs# 50 DeviceInFlight# 4 CostLimitNs# 50000000 BufferPoolBufferSizeBytes# 524288 BufferPoolBufferCount# 256 MaxQueuedCompletionActions# 128 ExpectedSlotCount# 0 SlotSizeInUnits# 0 ReserveLogChunksMultiplier# 56 InsaneLogChunksMultiplier# 40 RedLogChunksMultiplier# 30 OrangeLogChunksMultiplier# 20 WarningLogChunksMultiplier# 4 YellowLogChunksMultiplier# 4 MaxMetadataMegabytes# 32 SpaceColorBorder# GREEN CompletionThreadsCount# 1 UseNoopScheduler# false PlainDataChunks# 0 SeparateHugePriorities# 0} PDiskId# 1 2025-09-25T16:14:45.690725Z :BS_PDISK NOTICE: {BPD38@blobstorage_pdisk_impl.cpp:2857} OnDriveStartup Path# "" PDiskId# 1 2025-09-25T16:14:45.736955Z :BS_PDISK NOTICE: {BSP01@blobstorage_pdisk_actor.cpp:581} Successfully read format record Format# {TDiskFormat Version: 3 DiskSize: 8388608000 bytes (8 GB) Guid: 15768980511106824780 MagicNextLogChunkReference: 10383176717780885897 MagicLogChunk: 69370840730571370 MagicDataChunk: 5705568263137354417 MagicSysLogChunk: 17411079332369053503 MagicFormatChunk: 17332287817462050952 ChunkSize: 10485760 bytes (10 MB) SectorSize: 4096 SysLogSectorCount: 64 SystemChunkCount: 1 FormatText: "Info" DiskFormatSize: 1168 (current sizeof: 1168) TimestampUs: 1758816885577991 (2025-09-25T16:14:45.577991Z) FormatFlags: {ErasureEncodeSysLog | ErasureEncodeFormat | ErasureEncodeNextChunkReference | EncryptFormat | EncryptData}} PDiskId# 1 2025-09-25T16:14:45.749007Z :BS_PDISK NOTICE: {BPD01@blobstorage_pdisk_impl_log.cpp:252} SysLogRecord is read Record# {TSysLogRecord Version# 0 NonceSet# {TNonceSet Version# 0 NonceSysLog# 61 NonceLog# 1 NonceData# 1} LogHeadChunkIdx# 1 LogHeadChunkPreviousNonce# 0} PDiskId# 1 2025-09-25T16:14:45.760912Z :BS_PDISK NOTICE: {LR018@blobstorage_pdisk_logreader.cpp:809} PDiskId# 1 LogReader IsInitial# 1 ChunkIdx# 1 SectorIdx# 0 OffsetInSector# 0 In ProcessSectorSet got !restorator.GoodSectorFlags LastGoodToWriteLogPosition# { ChunkIdx# 1 OffsetInChunk# 0} PDiskId# 1 2025-09-25T16:14:45.761161Z :BS_PDISK NOTICE: {BPD01@blobstorage_pdisk_logreader.cpp:1176} Reply to owner OwnerId# 0 Result# {EvReadLogResult Status# OK ErrorReason# "" position# { ChunkIdx# 0 OffsetInChunk# 0} nextPosition# { ChunkIdx# 1 OffsetInChunk# 0} isEndOfLog# true StatusFlags# IsValid | DiskSpaceCyan | DiskSpaceLightYellowMove | DiskSpaceYellowStop | DiskSpaceLightOrange | DiskSpacePreOrange | DiskSpaceOrange | DiskSpaceRed | DiskSpaceBlack Results.size# 0} PDiskId# 1 2025-09-25T16:14:45.765217Z :BS_PDISK NOTICE: {BPD01@blobstorage_pdisk_impl_log.cpp:1732} PDisk have successfully started PDiskId# 1 2025-09-25T16:14:45.785495Z :BS_PDISK NOTICE: {BPD02@blobstorage_pdisk_impl.cpp:2055} New owner is created ownerId# 3 vDiskId# [0:_:0:0:0] FirstNonceToKeep# 1328411 CutLogId# [0:0:0] ownerRound# 2 PDiskId# 1 2025-09-25T16:14:45.962652Z :BS_PDISK NOTICE: {BPD38@blobstorage_pdisk_impl.cpp:2857} OnDriveStartup Path# "" PDiskId# 1 2025-09-25T16:14:45.988953Z :BS_PDISK NOTICE: {BSP01@blobstorage_pdisk_actor.cpp:581} Successfully read format record Format# {TDiskFormat Version: 3 DiskSize: 8388608000 bytes (8 GB) Guid: 17495788853610403889 MagicNextLogChunkReference: 2413283702860794045 MagicLogChunk: 15750219316411115074 MagicDataChunk: 18008796987917544203 MagicSysLogChunk: 11719100348268484715 MagicFormatChunk: 17332287817462050952 ChunkSize: 10485760 bytes (10 MB) SectorSize: 4096 SysLogSectorCount: 64 SystemChunkCount: 1 FormatText: "Info" DiskFormatSize: 1168 (current sizeof: 1168) TimestampUs: 1758816885831263 (2025-09-25T16:14:45.831263Z) FormatFlags: {ErasureEncodeSysLog | ErasureEncodeFormat | ErasureEncodeNextChunkReference | EncryptFormat | EncryptData}} PDiskId# 1 2025-09-25T16:14:45.997263Z :BS_PDISK NOTICE: {BPD01@blobstorage_pdisk_impl_log.cpp:252} SysLogRecord is read Record# {TSysLogRecord Version# 0 NonceSet# {TNonceSet Version# 0 NonceSysLog# 61 NonceLog# 1 NonceData# 1} LogHeadChunkIdx# 1 LogHeadChunkPreviousNonce# 0} PDiskId# 1 2025-09-25T16:14:46.001108Z :BS_PDISK NOTICE: {LR018@blobstorage_pdisk_logreader.cpp:809} PDiskId# 1 LogReader IsInitial# 1 ChunkIdx# 1 SectorIdx# 0 OffsetInSector# 0 In ProcessSectorSet got !restorator.GoodSectorFlags LastGoodToWriteLogPosition# { ChunkIdx# 1 OffsetInChunk# 0} PDiskId# 1 2025-09-25T16:14:46.001141Z :BS_PDISK NOTICE: {BPD01@blobstorage_pdisk_logreader.cpp:1176} Reply to owner OwnerId# 0 Result# {EvReadLogResult Status# OK ErrorReason# "" position# { ChunkIdx# 0 OffsetInChunk# 0} nextPosition# { ChunkIdx# 1 OffsetInChunk# 0} isEndOfLog# true StatusFlags# IsValid | DiskSpaceCyan | DiskSpaceLightYellowMove | DiskSpaceYellowStop | DiskSpaceLightOrange | DiskSpacePreOrange | DiskSpaceOrange | DiskSpaceRed | DiskSpaceBlack Results.size# 0} PDiskId# 1 2025-09-25T16:14:46.005125Z :BS_PDISK NOTICE: {BPD01@blobstorage_pdisk_impl_log.cpp:1732} PDisk have successfully started PDiskId# 1 2025-09-25T16:14:46.060992Z :BS_PDISK NOTICE: {BPD02@blobstorage_pdisk_impl.cpp:2055} New owner is created ownerId# 3 vDiskId# [0:_:0:0:0] FirstNonceToKeep# 1256669 CutLogId# [0:0:0] ownerRound# 2 PDiskId# 1 2025-09-25T16:14:56.190475Z :BS_PDISK NOTICE: {BPD38@blobstorage_pdisk_impl.cpp:2857} OnDriveStartup Path# "" PDiskId# 1 2025-09-25T16:14:56.228945Z :BS_PDISK NOTICE: {BSP01@blobstorage_pdisk_actor.cpp:581} Successfully read format record Format# {TDiskFormat Version: 3 DiskSize: 8388608000 bytes (8 GB) Guid: 5785202565030905599 MagicNextLogChunkReference: 7031203695872140087 MagicLogChunk: 18217853484687889703 MagicDataChunk: 11220118787839574643 MagicSysLogChunk: 7661795664895687820 MagicFormatChunk: 17332287817462050952 ChunkSize: 10485760 bytes (10 MB) SectorSize: 4096 SysLogSectorCount: 64 SystemChunkCount: 1 FormatText: "Info" DiskFormatSize: 1168 (current sizeof: 1168) TimestampUs: 1758816896076659 (2025-09-25T16:14:56.076659Z) FormatFlags: {ErasureEncodeSysLog | ErasureEncodeFormat | ErasureEncodeNextChunkReference | EncryptFormat | EncryptData}} PDiskId# 1 2025-09-25T16:14:56.236937Z :BS_PDISK NOTICE: {BPD01@blobstorage_pdisk_impl_log.cpp:252} SysLogRecord is read Record# {TSysLogRecord Version# 0 NonceSet# {TNonceSet Version# 0 NonceSysLog# 61 NonceLog# 1 NonceData# 1} LogHeadChunkIdx# 1 LogHeadChunkPreviousNonce# 0} PDiskId# 1 2025-09-25T16:14:56.253431Z :BS_PDISK NOTICE: {LR018@blobstorage_pdisk_logreader.cpp:809} PDiskId# 1 LogReader IsInitial# 1 ChunkIdx# 1 SectorIdx# 0 OffsetInSector# 0 In ProcessSectorSet got !restorator.GoodSectorFlags LastGoodToWriteLogPosition# { ChunkIdx# 1 OffsetInChunk# 0} PDiskId# 1 2025-09-25T16:14:56.253471Z :BS_PDISK NOTICE: {BPD01@blobstorage_pdisk_logreader.cpp:1176} Reply to owner OwnerId# 0 Result# {EvReadLogResult Status# OK ErrorReason# "" position# { ChunkIdx# 0 OffsetInChunk# 0} nextPosition# { ChunkIdx# 1 OffsetInChunk# 0} isEndOfLog# true StatusFlags# IsValid | DiskSpaceCyan | DiskSpaceLightYellowMove | DiskSpaceYellowStop | DiskSpaceLightOrange | DiskSpacePreOrange | DiskSpaceOrange | DiskSpaceRed | DiskSpaceBlack Results.size# 0} PDiskId# 1 2025-09-25T16:14:56.272867Z :BS_PDISK NOTICE: {BPD01@blobstorage_pdisk_impl_log.cpp:1732} PDisk have successfully started PDiskId# 1 2025-09-25T16:14:56.285540Z :BS_PDISK NOTICE: {BPD02@blobstorage_pdisk_impl.cpp:2055} New owner is created ownerId# 3 vDiskId# [0:_:0:0:0] FirstNonceToKeep# 1237269 CutLogId# [0:0:0] ownerRound# 2 PDiskId# 1 2025-09-25T16:14:58.453667Z :BS_PDISK NOTICE: {BPD38@blobstorage_pdisk_impl.cpp:2857} OnDriveStartup Path# "" PDiskId# 1 2025-09-25T16:14:58.476949Z :BS_PDISK NOTICE: {BSP01@blobstorage_pdisk_actor.cpp:581} Successfully read format record Format# {TDiskFormat Version: 3 DiskSize: 8388608000 bytes (8 GB) Guid: 13042525251333509144 MagicNextLogChunkReference: 11542768871153323477 MagicLogChunk: 8588741971298242043 MagicDataChunk: 14679583758455184383 MagicSysLogChunk: 16594561266912794908 MagicFormatChunk: 17332287817462050952 ChunkSize: 10485760 bytes (10 MB) SectorSize: 4096 SysLogSectorCount: 64 SystemChunkCount: 1 FormatText: "Info" DiskFormatSize: 1168 (current sizeof: 1168) TimestampUs: 1758816898309302 (2025-09-25T16:14:58.309302Z) FormatFlags: {ErasureEncodeSysLog | ErasureEncodeFormat | ErasureEncodeNextChunkReference | EncryptFormat | EncryptData}} PDiskId# 1 2025-09-25T16:14:58.488913Z :BS_PDISK NOTICE: {BPD01@blobstorage_pdisk_impl_log.cpp:252} SysLogRecord is read Record# {TSysLogRecord Version# 0 NonceSet# {TNonceSet Version# 0 NonceSysLog# 61 NonceLog# 1 NonceData# 1} LogHeadChunkIdx# 1 LogHeadChunkPreviousNonce# 0} PDiskId# 1 2025-09-25T16:14:58.504916Z :BS_PDISK NOTICE: {LR018@blobstorage_pdisk_logreader.cpp:809} PDiskId# 1 LogReader IsInitial# 1 ChunkIdx# 1 SectorIdx# 0 OffsetInSector# 0 In ProcessSectorSet got !restorator.GoodSectorFlags LastGoodToWriteLogPosition# { ChunkIdx# 1 OffsetInChunk# 0} PDiskId# 1 2025-09-25T16:14:58.504953Z :BS_PDISK NOTICE: {BPD01@blobstorage_pdisk_logreader.cpp:1176} Reply to owner OwnerId# 0 Result# {EvReadLogResult Status# OK ErrorReason# "" position# { ChunkIdx# 0 OffsetInChunk# 0} nextPosition# { ChunkIdx# 1 OffsetInChunk# 0} isEndOfLog# true StatusFlags# IsValid | DiskSpaceCyan | DiskSpaceLightYellowMove | DiskSpaceYellowStop | DiskSpaceLightOrange | DiskSpacePreOrange | DiskSpaceOrange | DiskSpaceRed | DiskSpaceBlack Results.size# 0} PDiskId# 1 2025-09-25T16:14:58.528868Z :BS_PDISK NOTICE: {BPD01@blobstorage_pdisk_impl_log.cpp:1732} PDisk have successfully started PDiskId# 1 2025-09-25T16:14:58.552992Z :BS_PDISK NOTICE: {BPD02@blobstorage_pdisk_impl.cpp:2055} New owner is created ownerId# 3 vDiskId# [0:_:0:0:0] FirstNonceToKeep# 1969298 CutLogId# [0:0:0] ownerRound# 2 PDiskId# 1 2025-09-25T16:15:02.174577Z :BS_PDISK NOTICE: {BPD38@blobstorage_pdisk_impl.cpp:2857} OnDriveStartup Path# "" PDiskId# 1 2025-09-25T16:15:02.200910Z :BS_PDISK NOTICE: {BSP01@blobstorage_pdisk_actor.cpp:581} Successfully read format record Format# {TDiskFormat Version: 3 DiskSize: 1658880000 bytes (1 GB) Guid: 13987588282320957747 MagicNextLogChunkReference: 6265714215175310272 MagicLogChunk: 14233977347878715932 MagicDataChunk: 14610669319935209580 MagicSysLogChunk: 10140991106740240288 MagicFormatChunk: 17332287817462050952 ChunkSize: 2097152 bytes (2 MB) SectorSize: 4096 SysLogSectorCount: 64 SystemChunkCount: 1 FormatText: "Info" DiskFormatSize: 1168 (current sizeof: 1168) TimestampUs: 1758816901955047 (2025-09-25T16:15:01.955047Z) FormatFlags: {ErasureEncodeSysLog | ErasureEncodeFormat | ErasureEncodeNextChunkReference | EncryptFormat | EncryptData}} PDiskId# 1 2025-09-25T16:15:02.220650Z :BS_PDISK NOTICE: {BPD01@blobstorage_pdisk_impl_log.cpp:252} SysLogRecord is read Record# {TSysLogRecord Version# 0 NonceSet# {TNonceSet Version# 0 NonceSysLog# 61 ... OfLog# true StatusFlags# IsValid Results.size# 3} PDiskId# 1 2025-09-25T16:15:12.262572Z :BS_PDISK NOTICE: {BPD38@blobstorage_pdisk_impl.cpp:2857} OnDriveStartup Path# "" PDiskId# 1 2025-09-25T16:15:12.292951Z :BS_PDISK NOTICE: {BSP01@blobstorage_pdisk_actor.cpp:581} Successfully read format record Format# {TDiskFormat Version: 3 DiskSize: 134217728000 bytes (134 GB) Guid: 5092545462740431702 MagicNextLogChunkReference: 14991119575438245353 MagicLogChunk: 5741421448832113130 MagicDataChunk: 1112632930721430832 MagicSysLogChunk: 14764837216739862905 MagicFormatChunk: 17332287817462050952 ChunkSize: 136314880 bytes (136 MB) SectorSize: 4096 SysLogSectorCount: 64 SystemChunkCount: 1 FormatText: "Info" DiskFormatSize: 1168 (current sizeof: 1168) TimestampUs: 1758816911954575 (2025-09-25T16:15:11.954575Z) FormatFlags: {ErasureEncodeSysLog | ErasureEncodeFormat | ErasureEncodeNextChunkReference | EncryptFormat | EncryptData}} PDiskId# 1 2025-09-25T16:15:12.316904Z :BS_PDISK NOTICE: {BPD01@blobstorage_pdisk_impl_log.cpp:252} SysLogRecord is read Record# {TSysLogRecord Version# 0 NonceSet# {TNonceSet Version# 0 NonceSysLog# 61 NonceLog# 1 NonceData# 1} LogHeadChunkIdx# 1 LogHeadChunkPreviousNonce# 0} PDiskId# 1 2025-09-25T16:15:12.336924Z :BS_PDISK NOTICE: {LR018@blobstorage_pdisk_logreader.cpp:809} PDiskId# 1 LogReader IsInitial# 1 ChunkIdx# 1 SectorIdx# 0 OffsetInSector# 0 In ProcessSectorSet got !restorator.GoodSectorFlags LastGoodToWriteLogPosition# { ChunkIdx# 1 OffsetInChunk# 0} PDiskId# 1 2025-09-25T16:15:12.336959Z :BS_PDISK NOTICE: {BPD01@blobstorage_pdisk_logreader.cpp:1176} Reply to owner OwnerId# 0 Result# {EvReadLogResult Status# OK ErrorReason# "" position# { ChunkIdx# 0 OffsetInChunk# 0} nextPosition# { ChunkIdx# 1 OffsetInChunk# 0} isEndOfLog# true StatusFlags# IsValid | DiskSpaceCyan | DiskSpaceLightYellowMove | DiskSpaceYellowStop | DiskSpaceLightOrange | DiskSpacePreOrange | DiskSpaceOrange | DiskSpaceRed | DiskSpaceBlack Results.size# 0} PDiskId# 1 2025-09-25T16:15:12.345174Z :BS_PDISK NOTICE: {BPD01@blobstorage_pdisk_impl_log.cpp:1732} PDisk have successfully started PDiskId# 1 2025-09-25T16:15:12.356917Z :BS_PDISK NOTICE: {BPD02@blobstorage_pdisk_impl.cpp:2055} New owner is created ownerId# 3 vDiskId# [0:_:0:0:0] FirstNonceToKeep# 1518758 CutLogId# [0:0:0] ownerRound# 2 PDiskId# 1 2025-09-25T16:15:12.450492Z :BS_PDISK NOTICE: {BPD38@blobstorage_pdisk_impl.cpp:2857} OnDriveStartup Path# "" PDiskId# 1 2025-09-25T16:15:12.476911Z :BS_PDISK NOTICE: {BSP01@blobstorage_pdisk_actor.cpp:581} Successfully read format record Format# {TDiskFormat Version: 3 DiskSize: 134217728000 bytes (134 GB) Guid: 5092545462740431702 MagicNextLogChunkReference: 14991119575438245353 MagicLogChunk: 5741421448832113130 MagicDataChunk: 1112632930721430832 MagicSysLogChunk: 14764837216739862905 MagicFormatChunk: 17332287817462050952 ChunkSize: 136314880 bytes (136 MB) SectorSize: 4096 SysLogSectorCount: 64 SystemChunkCount: 1 FormatText: "Info" DiskFormatSize: 1168 (current sizeof: 1168) TimestampUs: 1758816911954575 (2025-09-25T16:15:11.954575Z) FormatFlags: {ErasureEncodeSysLog | ErasureEncodeFormat | ErasureEncodeNextChunkReference | EncryptFormat | EncryptData}} PDiskId# 1 2025-09-25T16:15:12.500919Z :BS_PDISK NOTICE: {BPD01@blobstorage_pdisk_impl_log.cpp:252} SysLogRecord is read Record# {TSysLogRecord Version# 8 NonceSet# {TNonceSet Version# 0 NonceSysLog# 1998993 NonceLog# 1518758 NonceData# 1464085} LogHeadChunkIdx# 1 LogHeadChunkPreviousNonce# 0 Owner[3]# [0:4294967295:0:0:0]} PDiskId# 1 2025-09-25T16:15:12.520882Z :BS_PDISK WARN: {LR016@blobstorage_pdisk_logreader.cpp:710} PDiskId# 1 LogReader IsInitial# 1 ChunkIdx# 1 SectorIdx# 0 OffsetInSector# 316 nonce jump2 IsEndOfSplice# false " replacing ChunkInfo->DesiredPrevChunkLastNonce# "# 0 " with nonceJumpLogPageHeader2->PreviousNonce# "# 0 PDiskId# 1 2025-09-25T16:15:12.520925Z :BS_PDISK NOTICE: {LR018@blobstorage_pdisk_logreader.cpp:809} PDiskId# 1 LogReader IsInitial# 1 ChunkIdx# 1 SectorIdx# 2 OffsetInSector# 0 In ProcessSectorSet got !restorator.GoodSectorFlags LastGoodToWriteLogPosition# { ChunkIdx# 1 OffsetInChunk# 8192} PDiskId# 1 2025-09-25T16:15:12.520945Z :BS_PDISK NOTICE: {BPD01@blobstorage_pdisk_logreader.cpp:1176} Reply to owner OwnerId# 0 Result# {EvReadLogResult Status# OK ErrorReason# "" position# { ChunkIdx# 0 OffsetInChunk# 0} nextPosition# { ChunkIdx# 1 OffsetInChunk# 8192} isEndOfLog# true StatusFlags# IsValid | DiskSpaceCyan | DiskSpaceLightYellowMove | DiskSpaceYellowStop | DiskSpaceLightOrange | DiskSpacePreOrange | DiskSpaceOrange | DiskSpaceRed | DiskSpaceBlack Results.size# 0} PDiskId# 1 2025-09-25T16:15:12.529072Z :BS_PDISK NOTICE: {BPD01@blobstorage_pdisk_impl_log.cpp:1732} PDisk have successfully started PDiskId# 1 2025-09-25T16:15:12.544943Z :BS_PDISK NOTICE: {BPD30@blobstorage_pdisk_impl.cpp:1930} Registered known VDisk VDisk# [0:4294967295:0:0:0] OwnerId# 3 OwnerRound# 2 GroupSizeInUnits# 0 PDiskId# 1 2025-09-25T16:15:12.552848Z :BS_PDISK ERROR: {BPD01@blobstorage_pdisk_impl.cpp:988} SendChunkReadErrorPDiskId# 1 chunk owned by the system for ownerId# 3 can't read chunkIdx# 2 ReqId# 2560002094 PDiskId# 1 2025-09-25T16:15:12.918761Z :BS_PDISK NOTICE: {BPD38@blobstorage_pdisk_impl.cpp:2857} OnDriveStartup Path# "" PDiskId# 1 2025-09-25T16:15:12.948957Z :BS_PDISK NOTICE: {BSP01@blobstorage_pdisk_actor.cpp:581} Successfully read format record Format# {TDiskFormat Version: 3 DiskSize: 1658880000 bytes (1 GB) Guid: 158352451594880867 MagicNextLogChunkReference: 456064389833140527 MagicLogChunk: 11204630531226892104 MagicDataChunk: 14916581121590137545 MagicSysLogChunk: 2592483249558699186 MagicFormatChunk: 17332287817462050952 ChunkSize: 2097152 bytes (2 MB) SectorSize: 4096 SysLogSectorCount: 64 SystemChunkCount: 1 FormatText: "Info" DiskFormatSize: 1168 (current sizeof: 1168) TimestampUs: 1758816912631627 (2025-09-25T16:15:12.631627Z) FormatFlags: {ErasureEncodeSysLog | ErasureEncodeFormat | ErasureEncodeNextChunkReference | EncryptFormat | EncryptData}} PDiskId# 1 2025-09-25T16:15:12.972904Z :BS_PDISK NOTICE: {BPD01@blobstorage_pdisk_impl_log.cpp:252} SysLogRecord is read Record# {TSysLogRecord Version# 0 NonceSet# {TNonceSet Version# 0 NonceSysLog# 61 NonceLog# 1 NonceData# 1} LogHeadChunkIdx# 1 LogHeadChunkPreviousNonce# 0} PDiskId# 1 2025-09-25T16:15:12.996896Z :BS_PDISK NOTICE: {LR018@blobstorage_pdisk_logreader.cpp:809} PDiskId# 1 LogReader IsInitial# 1 ChunkIdx# 1 SectorIdx# 0 OffsetInSector# 0 In ProcessSectorSet got !restorator.GoodSectorFlags LastGoodToWriteLogPosition# { ChunkIdx# 1 OffsetInChunk# 0} PDiskId# 1 2025-09-25T16:15:12.996932Z :BS_PDISK NOTICE: {BPD01@blobstorage_pdisk_logreader.cpp:1176} Reply to owner OwnerId# 0 Result# {EvReadLogResult Status# OK ErrorReason# "" position# { ChunkIdx# 0 OffsetInChunk# 0} nextPosition# { ChunkIdx# 1 OffsetInChunk# 0} isEndOfLog# true StatusFlags# IsValid | DiskSpaceCyan | DiskSpaceLightYellowMove | DiskSpaceYellowStop | DiskSpaceLightOrange | DiskSpacePreOrange | DiskSpaceOrange | DiskSpaceRed | DiskSpaceBlack Results.size# 0} PDiskId# 1 2025-09-25T16:15:13.012920Z :BS_PDISK NOTICE: {BPD01@blobstorage_pdisk_impl_log.cpp:1732} PDisk have successfully started PDiskId# 1 2025-09-25T16:15:13.028970Z :BS_PDISK NOTICE: {BPD02@blobstorage_pdisk_impl.cpp:2055} New owner is created ownerId# 3 vDiskId# [0:_:0:0:0] FirstNonceToKeep# 1052767 CutLogId# [0:0:0] ownerRound# 2 PDiskId# 1 2025-09-25T16:15:13.382625Z :BS_PDISK NOTICE: {BPD38@blobstorage_pdisk_impl.cpp:2857} OnDriveStartup Path# "" PDiskId# 1 2025-09-25T16:15:13.407771Z :BS_PDISK NOTICE: {BSP01@blobstorage_pdisk_actor.cpp:581} Successfully read format record Format# {TDiskFormat Version: 3 DiskSize: 16777216000 bytes (16 GB) Guid: 10071955103162214697 MagicNextLogChunkReference: 17320508327230227341 MagicLogChunk: 12844544599979051073 MagicDataChunk: 9146700460051743812 MagicSysLogChunk: 17099634394707407156 MagicFormatChunk: 17332287817462050952 ChunkSize: 18874368 bytes (18 MB) SectorSize: 4096 SysLogSectorCount: 64 SystemChunkCount: 1 FormatText: "Info" DiskFormatSize: 1168 (current sizeof: 1168) TimestampUs: 1758816913160812 (2025-09-25T16:15:13.160812Z) FormatFlags: {ErasureEncodeSysLog | ErasureEncodeFormat | ErasureEncodeNextChunkReference | EncryptFormat | EncryptData}} PDiskId# 1 2025-09-25T16:15:13.424905Z :BS_PDISK NOTICE: {BPD01@blobstorage_pdisk_impl_log.cpp:252} SysLogRecord is read Record# {TSysLogRecord Version# 0 NonceSet# {TNonceSet Version# 0 NonceSysLog# 61 NonceLog# 1 NonceData# 1} LogHeadChunkIdx# 1 LogHeadChunkPreviousNonce# 0} PDiskId# 1 2025-09-25T16:15:13.436908Z :BS_PDISK NOTICE: {LR018@blobstorage_pdisk_logreader.cpp:809} PDiskId# 1 LogReader IsInitial# 1 ChunkIdx# 1 SectorIdx# 0 OffsetInSector# 0 In ProcessSectorSet got !restorator.GoodSectorFlags LastGoodToWriteLogPosition# { ChunkIdx# 1 OffsetInChunk# 0} PDiskId# 1 2025-09-25T16:15:13.436941Z :BS_PDISK NOTICE: {BPD01@blobstorage_pdisk_logreader.cpp:1176} Reply to owner OwnerId# 0 Result# {EvReadLogResult Status# OK ErrorReason# "" position# { ChunkIdx# 0 OffsetInChunk# 0} nextPosition# { ChunkIdx# 1 OffsetInChunk# 0} isEndOfLog# true StatusFlags# IsValid | DiskSpaceCyan | DiskSpaceLightYellowMove | DiskSpaceYellowStop | DiskSpaceLightOrange | DiskSpacePreOrange | DiskSpaceOrange | DiskSpaceRed | DiskSpaceBlack Results.size# 0} PDiskId# 1 2025-09-25T16:15:13.441105Z :BS_PDISK NOTICE: {BPD01@blobstorage_pdisk_impl_log.cpp:1732} PDisk have successfully started PDiskId# 1 2025-09-25T16:15:13.474494Z :BS_PDISK NOTICE: {BPD02@blobstorage_pdisk_impl.cpp:2055} New owner is created ownerId# 3 vDiskId# [0:_:0:0:0] FirstNonceToKeep# 1584130 CutLogId# [0:0:0] ownerRound# 2 PDiskId# 1 2025-09-25T16:15:13.818778Z :BS_PDISK NOTICE: {BPD38@blobstorage_pdisk_impl.cpp:2857} OnDriveStartup Path# "" PDiskId# 1 2025-09-25T16:15:13.844916Z :BS_PDISK NOTICE: {BSP01@blobstorage_pdisk_actor.cpp:581} Successfully read format record Format# {TDiskFormat Version: 3 DiskSize: 134217728000 bytes (134 GB) Guid: 16937185810345155295 MagicNextLogChunkReference: 12505854795355218470 MagicLogChunk: 8137363878384018960 MagicDataChunk: 15599068977469721677 MagicSysLogChunk: 17953284211476506064 MagicFormatChunk: 17332287817462050952 ChunkSize: 136314880 bytes (136 MB) SectorSize: 4096 SysLogSectorCount: 64 SystemChunkCount: 1 FormatText: "Info" DiskFormatSize: 1168 (current sizeof: 1168) TimestampUs: 1758816913556229 (2025-09-25T16:15:13.556229Z) FormatFlags: {ErasureEncodeSysLog | ErasureEncodeFormat | ErasureEncodeNextChunkReference | EncryptFormat | EncryptData}} PDiskId# 1 2025-09-25T16:15:13.868932Z :BS_PDISK NOTICE: {BPD01@blobstorage_pdisk_impl_log.cpp:252} SysLogRecord is read Record# {TSysLogRecord Version# 0 NonceSet# {TNonceSet Version# 0 NonceSysLog# 61 NonceLog# 1 NonceData# 1} LogHeadChunkIdx# 1 LogHeadChunkPreviousNonce# 0} PDiskId# 1 2025-09-25T16:15:13.888926Z :BS_PDISK NOTICE: {LR018@blobstorage_pdisk_logreader.cpp:809} PDiskId# 1 LogReader IsInitial# 1 ChunkIdx# 1 SectorIdx# 0 OffsetInSector# 0 In ProcessSectorSet got !restorator.GoodSectorFlags LastGoodToWriteLogPosition# { ChunkIdx# 1 OffsetInChunk# 0} PDiskId# 1 2025-09-25T16:15:13.888970Z :BS_PDISK NOTICE: {BPD01@blobstorage_pdisk_logreader.cpp:1176} Reply to owner OwnerId# 0 Result# {EvReadLogResult Status# OK ErrorReason# "" position# { ChunkIdx# 0 OffsetInChunk# 0} nextPosition# { ChunkIdx# 1 OffsetInChunk# 0} isEndOfLog# true StatusFlags# IsValid | DiskSpaceCyan | DiskSpaceLightYellowMove | DiskSpaceYellowStop | DiskSpaceLightOrange | DiskSpacePreOrange | DiskSpaceOrange | DiskSpaceRed | DiskSpaceBlack Results.size# 0} PDiskId# 1 2025-09-25T16:15:13.893166Z :BS_PDISK NOTICE: {BPD01@blobstorage_pdisk_impl_log.cpp:1732} PDisk have successfully started PDiskId# 1 2025-09-25T16:15:13.908966Z :BS_PDISK NOTICE: {BPD02@blobstorage_pdisk_impl.cpp:2055} New owner is created ownerId# 3 vDiskId# [0:_:0:0:0] FirstNonceToKeep# 1181856 CutLogId# [0:0:0] ownerRound# 2 PDiskId# 1 >> test_transform.py::TestYamlConfigTransformations::test_domains_config[dump_ds_init] [GOOD] |80.1%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/library/yaml_config/ut_transform/py3test >> test_transform.py::TestYamlConfigTransformations::test_domains_config[dump_ds_init] [GOOD] |80.1%| [TM] {RESULT} ydb/library/yaml_config/ut_transform/py3test >> TYardTest::TestStartingPointReboots [GOOD] >> TYardTest::TestRestartAtNonceJump |80.1%| [CC] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/kqp/provider/yql_kikimr_provider.h_serialized.cpp |80.1%| [CC] {BAZEL_UPLOAD} $(B)/ydb/core/kqp/provider/yql_kikimr_provider.h_serialized.cpp |80.1%| [AR] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/kqp/provider/libcore-kqp-provider.a |80.1%| [AR] {RESULT} $(B)/ydb/core/kqp/provider/libcore-kqp-provider.a |80.1%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/kqp/provider/libcore-kqp-provider.a |80.1%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/tx_proxy/encrypted_storage_ut.cpp |80.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/tx_proxy/encrypted_storage_ut.cpp >> TYardTest::TestRestartAtNonceJump [GOOD] >> TYardTest::TestRestartAtChunkEnd >> TCowBTreeTest::MultipleSnapshotsWithGc [GOOD] >> TCowBTreeTest::MultipleSnapshotsWithClear >> TYardTest::TestRestartAtChunkEnd [GOOD] >> TYardTestRestore::TestRestore15 >> TYardTestRestore::TestRestore15 [GOOD] >> TPDiskTest::PlainChunksWriteReadALot [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/pdisk/ut/unittest >> TYardTestRestore::TestRestore15 [GOOD] Test command err: 2025-09-25T16:14:45.599684Z node 1 :BS_PDISK NOTICE: {BPD38@blobstorage_pdisk_impl.cpp:2857} OnDriveStartup Path# "" PDiskId# 1 2025-09-25T16:14:45.624940Z node 1 :BS_PDISK NOTICE: {BSP01@blobstorage_pdisk_actor.cpp:581} Successfully read format record Format# {TDiskFormat Version: 3 DiskSize: 134217728000 bytes (134 GB) Guid: 13957957302311693002 MagicNextLogChunkReference: 12425880042063182216 MagicLogChunk: 17490086404262037521 MagicDataChunk: 4900274844719772285 MagicSysLogChunk: 5366471593363597328 MagicFormatChunk: 17332287817462050952 ChunkSize: 136314880 bytes (136 MB) SectorSize: 4096 SysLogSectorCount: 64 SystemChunkCount: 1 FormatText: "Info" DiskFormatSize: 1168 (current sizeof: 1168) TimestampUs: 1758816885451422 (2025-09-25T16:14:45.451422Z) FormatFlags: {ErasureEncodeSysLog | ErasureEncodeFormat | ErasureEncodeNextChunkReference | EncryptFormat | EncryptData}} PDiskId# 1 2025-09-25T16:14:45.641458Z node 1 :BS_PDISK NOTICE: {BPD01@blobstorage_pdisk_impl_log.cpp:252} SysLogRecord is read Record# {TSysLogRecord Version# 0 NonceSet# {TNonceSet Version# 0 NonceSysLog# 61 NonceLog# 1 NonceData# 1} LogHeadChunkIdx# 1 LogHeadChunkPreviousNonce# 0} PDiskId# 1 2025-09-25T16:14:45.661018Z node 1 :BS_PDISK NOTICE: {LR018@blobstorage_pdisk_logreader.cpp:809} PDiskId# 1 LogReader IsInitial# 1 ChunkIdx# 1 SectorIdx# 0 OffsetInSector# 0 In ProcessSectorSet got !restorator.GoodSectorFlags LastGoodToWriteLogPosition# { ChunkIdx# 1 OffsetInChunk# 0} PDiskId# 1 2025-09-25T16:14:45.661282Z node 1 :BS_PDISK NOTICE: {BPD01@blobstorage_pdisk_logreader.cpp:1176} Reply to owner OwnerId# 0 Result# {EvReadLogResult Status# OK ErrorReason# "" position# { ChunkIdx# 0 OffsetInChunk# 0} nextPosition# { ChunkIdx# 1 OffsetInChunk# 0} isEndOfLog# true StatusFlags# IsValid | DiskSpaceCyan | DiskSpaceLightYellowMove | DiskSpaceYellowStop | DiskSpaceLightOrange | DiskSpacePreOrange | DiskSpaceOrange | DiskSpaceRed | DiskSpaceBlack Results.size# 0} PDiskId# 1 2025-09-25T16:14:45.669318Z node 1 :BS_PDISK NOTICE: {BPD01@blobstorage_pdisk_impl_log.cpp:1732} PDisk have successfully started PDiskId# 1 2025-09-25T16:14:45.673131Z node 1 :BS_PDISK NOTICE: {BPD02@blobstorage_pdisk_impl.cpp:2055} New owner is created ownerId# 3 vDiskId# [0:_:0:0:0] FirstNonceToKeep# 1706198 CutLogId# [1:7554061004692116859:2050] ownerRound# 2 PDiskId# 1 2025-09-25T16:14:45.692960Z node 1 :BS_PDISK NOTICE: {BPD01@blobstorage_pdisk_logreader.cpp:1176} Reply to owner OwnerId# 3 Result# {EvReadLogResult Status# OK ErrorReason# "" position# { ChunkIdx# 0 OffsetInChunk# 0} nextPosition# { ChunkIdx# 4294967295 OffsetInChunk# 4294967295} isEndOfLog# true StatusFlags# IsValid Results.size# 0} PDiskId# 1 (PDisk.LogWrite) (PDisk.LogWrite) (PDisk.ChunkRead -> [(PDisk.CompletionChunkRead) , (PDisk.ChunkRead.CompletionPart) , (PDisk.ChunkRead.CompletionPart) , (PDisk.ChunkRead.CompletionPart)]) (PDisk.ChunkWrite -> [(PDisk.CompletionChunkWrite) , (PDisk.ChunkWritePiece) , (PDisk.ChunkWritePiece) , (PDisk.ChunkWritePiece)]) (PDisk.LogRead) 2025-09-25T16:14:45.904561Z :BS_PDISK NOTICE: {BPD38@blobstorage_pdisk_impl.cpp:2857} OnDriveStartup Path# "" PDiskId# 1 2025-09-25T16:14:45.920960Z :BS_PDISK NOTICE: {BSP01@blobstorage_pdisk_actor.cpp:581} Successfully read format record Format# {TDiskFormat Version: 3 DiskSize: 1658880000 bytes (1 GB) Guid: 10285159287866902690 MagicNextLogChunkReference: 895048015135289104 MagicLogChunk: 12871172982420561091 MagicDataChunk: 13867488737832750198 MagicSysLogChunk: 16380688148345297952 MagicFormatChunk: 17332287817462050952 ChunkSize: 2097152 bytes (2 MB) SectorSize: 4096 SysLogSectorCount: 64 SystemChunkCount: 1 FormatText: "Info" DiskFormatSize: 1168 (current sizeof: 1168) TimestampUs: 1758816885803167 (2025-09-25T16:14:45.803167Z) FormatFlags: {ErasureEncodeSysLog | ErasureEncodeFormat | ErasureEncodeNextChunkReference | EncryptFormat | EncryptData}} PDiskId# 1 2025-09-25T16:14:45.932998Z :BS_PDISK NOTICE: {BPD01@blobstorage_pdisk_impl_log.cpp:252} SysLogRecord is read Record# {TSysLogRecord Version# 0 NonceSet# {TNonceSet Version# 0 NonceSysLog# 61 NonceLog# 1 NonceData# 1} LogHeadChunkIdx# 1 LogHeadChunkPreviousNonce# 0} PDiskId# 1 2025-09-25T16:14:45.938882Z :BS_PDISK NOTICE: {LR018@blobstorage_pdisk_logreader.cpp:809} PDiskId# 1 LogReader IsInitial# 1 ChunkIdx# 1 SectorIdx# 0 OffsetInSector# 0 In ProcessSectorSet got !restorator.GoodSectorFlags LastGoodToWriteLogPosition# { ChunkIdx# 1 OffsetInChunk# 0} PDiskId# 1 2025-09-25T16:14:45.938923Z :BS_PDISK NOTICE: {BPD01@blobstorage_pdisk_logreader.cpp:1176} Reply to owner OwnerId# 0 Result# {EvReadLogResult Status# OK ErrorReason# "" position# { ChunkIdx# 0 OffsetInChunk# 0} nextPosition# { ChunkIdx# 1 OffsetInChunk# 0} isEndOfLog# true StatusFlags# IsValid | DiskSpaceCyan | DiskSpaceLightYellowMove | DiskSpaceYellowStop | DiskSpaceLightOrange | DiskSpacePreOrange | DiskSpaceOrange | DiskSpaceRed | DiskSpaceBlack Results.size# 0} PDiskId# 1 2025-09-25T16:14:45.948877Z :BS_PDISK NOTICE: {BPD01@blobstorage_pdisk_impl_log.cpp:1732} PDisk have successfully started PDiskId# 1 2025-09-25T16:14:46.004974Z :BS_PDISK NOTICE: {BPD02@blobstorage_pdisk_impl.cpp:2055} New owner is created ownerId# 3 vDiskId# [0:_:0:0:0] FirstNonceToKeep# 1072881 CutLogId# [0:0:0] ownerRound# 5 PDiskId# 1 2025-09-25T16:14:46.009032Z :BS_PDISK NOTICE: {BPD01@blobstorage_pdisk_logreader.cpp:1176} Reply to owner OwnerId# 3 Result# {EvReadLogResult Status# OK ErrorReason# "" position# { ChunkIdx# 0 OffsetInChunk# 0} nextPosition# { ChunkIdx# 4294967295 OffsetInChunk# 4294967295} isEndOfLog# true StatusFlags# IsValid Results.size# 0} PDiskId# 1 2025-09-25T16:14:46.102608Z :BS_PDISK NOTICE: {BPD38@blobstorage_pdisk_impl.cpp:2857} OnDriveStartup Path# "" PDiskId# 1 2025-09-25T16:14:46.136894Z :BS_PDISK NOTICE: {BPD01@blobstorage_pdisk_impl.cpp:302} Shutdown OwnerInfo# { PDisk system/log ChunkIds: {} Free ChunkIds: {} PDiskId# 1 2025-09-25T16:14:46.145054Z :BS_PDISK WARN: {BSP01@blobstorage_pdisk_actor.cpp:436} Magic sector is present on disk, now going to format device PDiskId# 1 2025-09-25T16:14:46.309661Z :BS_PDISK NOTICE: {BPD38@blobstorage_pdisk_impl.cpp:2857} OnDriveStartup Path# "" PDiskId# 1 2025-09-25T16:14:46.309689Z :BS_PDISK WARN: {BSP01@blobstorage_pdisk_actor.cpp:374} Device formatting done PDiskId# 1 2025-09-25T16:14:46.327081Z :BS_PDISK NOTICE: {BSP01@blobstorage_pdisk_actor.cpp:581} Successfully read format record Format# {TDiskFormat Version: 3 DiskSize: 1658880000 bytes (1 GB) Guid: 0 MagicNextLogChunkReference: 8613658877066922093 MagicLogChunk: 5674156057114228751 MagicDataChunk: 6943655999413047845 MagicSysLogChunk: 5472683380485159168 MagicFormatChunk: 17332287817462050952 ChunkSize: 2097152 bytes (2 MB) SectorSize: 4096 SysLogSectorCount: 64 SystemChunkCount: 1 FormatText: "" DiskFormatSize: 1168 (current sizeof: 1168) TimestampUs: 1758816886204481 (2025-09-25T16:14:46.204481Z) FormatFlags: {ErasureEncodeSysLog | ErasureEncodeFormat | ErasureEncodeNextChunkReference | EncryptFormat | EncryptData}} PDiskId# 1 2025-09-25T16:14:46.337358Z :BS_PDISK NOTICE: {BPD01@blobstorage_pdisk_impl_log.cpp:252} SysLogRecord is read Record# {TSysLogRecord Version# 0 NonceSet# {TNonceSet Version# 0 NonceSysLog# 61 NonceLog# 1 NonceData# 1} LogHeadChunkIdx# 1 LogHeadChunkPreviousNonce# 0} PDiskId# 1 2025-09-25T16:14:46.342212Z :BS_PDISK NOTICE: {LR018@blobstorage_pdisk_logreader.cpp:809} PDiskId# 1 LogReader IsInitial# 1 ChunkIdx# 1 SectorIdx# 0 OffsetInSector# 0 In ProcessSectorSet got !restorator.GoodSectorFlags LastGoodToWriteLogPosition# { ChunkIdx# 1 OffsetInChunk# 0} PDiskId# 1 2025-09-25T16:14:46.342249Z :BS_PDISK NOTICE: {BPD01@blobstorage_pdisk_logreader.cpp:1176} Reply to owner OwnerId# 0 Result# {EvReadLogResult Status# OK ErrorReason# "" position# { ChunkIdx# 0 OffsetInChunk# 0} nextPosition# { ChunkIdx# 1 OffsetInChunk# 0} isEndOfLog# true StatusFlags# IsValid | DiskSpaceCyan | DiskSpaceLightYellowMove | DiskSpaceYellowStop | DiskSpaceLightOrange | DiskSpacePreOrange | DiskSpaceOrange | DiskSpaceRed | DiskSpaceBlack Results.size# 0} PDiskId# 1 2025-09-25T16:14:46.343632Z :BS_PDISK NOTICE: {BPD01@blobstorage_pdisk_impl_log.cpp:1732} PDisk have successfully started PDiskId# 1 2025-09-25T16:14:46.349063Z :BS_PDISK NOTICE: {BPD02@blobstorage_pdisk_impl.cpp:2055} New owner is created ownerId# 7 vDiskId# [4:_:0:0:0] FirstNonceToKeep# 1848530 CutLogId# [0:0:0] ownerRound# 2 PDiskId# 1 2025-09-25T16:14:46.349175Z :BS_PDISK NOTICE: {BPD02@blobstorage_pdisk_impl.cpp:2055} New owner is created ownerId# 6 vDiskId# [3:_:0:0:0] FirstNonceToKeep# 1848530 CutLogId# [0:0:0] ownerRound# 2 PDiskId# 1 2025-09-25T16:14:46.349461Z :BS_PDISK NOTICE: {BPD02@blobstorage_pdisk_impl.cpp:2055} New owner is created ownerId# 5 vDiskId# [2:_:0:0:0] FirstNonceToKeep# 1848530 CutLogId# [0:0:0] ownerRound# 2 PDiskId# 1 2025-09-25T16:14:46.349531Z :BS_PDISK NOTICE: {BPD02@blobstorage_pdisk_impl.cpp:2055} New owner is created ownerId# 4 vDiskId# [1:_:0:0:0] FirstNonceToKeep# 1848530 CutLogId# [0:0:0] ownerRound# 2 PDiskId# 1 2025-09-25T16:14:46.349735Z :BS_PDISK NOTICE: {BPD02@blobstorage_pdisk_impl.cpp:2055} New owner is created ownerId# 3 vDiskId# [0:_:0:0:0] FirstNonceToKeep# 1848530 CutLogId# [0:0:0] ownerRound# 2 PDiskId# 1 2025-09-25T16:14:46.739590Z :BS_PDISK NOTICE: {BPD38@blobstorage_pdisk_impl.cpp:2857} OnDriveStartup Path# "" PDiskId# 1 2025-09-25T16:14:46.770124Z :BS_PDISK NOTICE: {BSP01@blobstorage_pdisk_actor.cpp:581} Successfully read format record Format# {TDiskFormat Version: 3 DiskSize: 1658880000 bytes (1 GB) Guid: 0 MagicNextLogChunkReference: 8613658877066922093 MagicLogChunk: 5674156057114228751 MagicDataChunk: 6943655999413047845 MagicSysLogChunk: 5472683380485159168 MagicFormatChunk: 17332287817462050952 ChunkSize: 2097152 bytes (2 MB) SectorSize: 4096 SysLogSectorCount: 64 SystemChunkCount: 1 FormatText: "" DiskFormatSize: 1168 (current sizeof: 1168) TimestampUs: 1758816886204481 (2025-09-25T16:14:46.204481Z) FormatFlags: {ErasureEncodeSysLog | ErasureEncodeFormat | ErasureEncodeNextChunkReference | EncryptFormat | EncryptData}} PDiskId# 1 2025-09-25T16:14:46.784986Z :BS_PDISK NOTICE: {BPD01@blobstorage_pdisk_impl_log.cpp:252} SysLogRecord is read Record# {TSysLogRecord Version# 8 NonceSet# {TNonceSet Version# 0 NonceSysLog# 2086975 NonceLog# 1854549 NonceData# 1515103} LogHeadChunkIdx# 106 LogHeadChunkPreviousNonce# 1851078 Owner[3]# [0:4294967295:0:0:0] Owner[4]# [1:4294967295:0:0:0] Owner[5]# [2:4294967295:0:0:0] Owner[6]# [3:4294967295:0:0:0] Owner[7]# [4:4294967295:0:0:0]} PDiskId# 1 2025-09-25T16:14:46.842746Z :BS_PDISK NOTICE: {LR018@blobstorage_pdisk_logreader.cpp:809} PDiskId# 1 LogReader IsInitial# 1 ChunkIdx# 112 SectorIdx# 411 OffsetInSector# 0 In ProcessSectorSet got !restorator.GoodSectorFlags LastGoodToWriteLogPosition# { ChunkIdx# 112 OffsetInChunk# 1683456} PDiskId# 1 2025-09-25T16:14:46.842783Z :BS_PDISK NOTICE: {BPD01@blobstorage_pdisk_logreader.cpp:1176} Reply to owner OwnerId# 0 Result# {EvReadLogResult Status# OK ErrorReason# "" position# { ChunkIdx# 0 OffsetInChunk# 0} nextPosition# { ChunkIdx# 112 OffsetInChunk# 1683456} isEndOfLog# true StatusFlags# IsValid | DiskSpaceCyan | DiskSpaceLightYellowMove | DiskSpaceYellowStop | DiskSpaceLightOrange | DiskSpacePreOrange | DiskSpaceOrange | DiskSpaceRed | DiskSpaceBlack Results.size# 0} PDiskId# 1 2025-09-25T16:14:46.843080Z :BS_PDISK NOTICE: {BPD01@blobstorage_pdisk_impl_log.cpp:1732} PDisk have successfully started PDiskId# 1 2025-09-25T16:14:46.848499Z :BS_PDISK NOTICE: {BPD30@blobstorage_pdisk_impl.cpp:1930} Registered known VDisk VDisk# [1:4294967295:0:0:0] OwnerId# 4 OwnerRound# 2 GroupSizeInUnits# 0 PDiskId# 1 2025-09-25T16:14:46.848542Z :BS_PDISK NOTICE: {BPD30@blobstorage_pdisk_impl.cpp:1930} Registered known VDisk VDisk# [0:4294967295:0:0:0] OwnerId# 3 OwnerRound# 2 GroupSizeInUnits# 0 PDiskId# 1 2025-09-25T16:14:46.848565Z :BS_PDISK NOTICE: {BPD30@blobstorage_pdisk_impl.cpp:1930} Register ... :0:0:0] ChunkIdx# 3 SectorIdx# 187 OffsetInSector# 0 In ProcessSectorSet got !restorator.GoodSectorFlags outside the LogEndSector LogEndChunkIdx# 3 LogEndSectorIdx# 187 PDiskId# 1 2025-09-25T16:15:19.025369Z :BS_PDISK NOTICE: {BPD01@blobstorage_pdisk_logreader.cpp:1176} Reply to owner OwnerId# 3 Result# {EvReadLogResult Status# OK ErrorReason# "" position# { ChunkIdx# 0 OffsetInChunk# 0} nextPosition# { ChunkIdx# 4294967295 OffsetInChunk# 4294967295} isEndOfLog# true StatusFlags# IsValid Results.size# 1202} PDiskId# 1 2025-09-25T16:15:19.115128Z :BS_PDISK NOTICE: {BPD38@blobstorage_pdisk_impl.cpp:2857} OnDriveStartup Path# "" PDiskId# 1 2025-09-25T16:15:19.133009Z :BS_PDISK NOTICE: {BSP01@blobstorage_pdisk_actor.cpp:581} Successfully read format record Format# {TDiskFormat Version: 3 DiskSize: 1658880000 bytes (1 GB) Guid: 13266214832490057246 MagicNextLogChunkReference: 3639031913167495111 MagicLogChunk: 4292574229062545771 MagicDataChunk: 4222158155172863685 MagicSysLogChunk: 12332754691846439549 MagicFormatChunk: 17332287817462050952 ChunkSize: 2097152 bytes (2 MB) SectorSize: 4096 SysLogSectorCount: 64 SystemChunkCount: 1 FormatText: "Info" DiskFormatSize: 1168 (current sizeof: 1168) TimestampUs: 1758816919076643 (2025-09-25T16:15:19.076643Z) FormatFlags: {ErasureEncodeSysLog | ErasureEncodeFormat | ErasureEncodeNextChunkReference | EncryptFormat | EncryptData}} PDiskId# 1 2025-09-25T16:15:19.152927Z :BS_PDISK NOTICE: {BPD01@blobstorage_pdisk_impl_log.cpp:252} SysLogRecord is read Record# {TSysLogRecord Version# 0 NonceSet# {TNonceSet Version# 0 NonceSysLog# 61 NonceLog# 1 NonceData# 1} LogHeadChunkIdx# 1 LogHeadChunkPreviousNonce# 0} PDiskId# 1 2025-09-25T16:15:19.164923Z :BS_PDISK NOTICE: {LR018@blobstorage_pdisk_logreader.cpp:809} PDiskId# 1 LogReader IsInitial# 1 ChunkIdx# 1 SectorIdx# 0 OffsetInSector# 0 In ProcessSectorSet got !restorator.GoodSectorFlags LastGoodToWriteLogPosition# { ChunkIdx# 1 OffsetInChunk# 0} PDiskId# 1 2025-09-25T16:15:19.164964Z :BS_PDISK NOTICE: {BPD01@blobstorage_pdisk_logreader.cpp:1176} Reply to owner OwnerId# 0 Result# {EvReadLogResult Status# OK ErrorReason# "" position# { ChunkIdx# 0 OffsetInChunk# 0} nextPosition# { ChunkIdx# 1 OffsetInChunk# 0} isEndOfLog# true StatusFlags# IsValid | DiskSpaceCyan | DiskSpaceLightYellowMove | DiskSpaceYellowStop | DiskSpaceLightOrange | DiskSpacePreOrange | DiskSpaceOrange | DiskSpaceRed | DiskSpaceBlack Results.size# 0} PDiskId# 1 2025-09-25T16:15:19.169208Z :BS_PDISK NOTICE: {BPD01@blobstorage_pdisk_impl_log.cpp:1732} PDisk have successfully started PDiskId# 1 2025-09-25T16:15:19.221041Z :BS_PDISK NOTICE: {BPD02@blobstorage_pdisk_impl.cpp:2055} New owner is created ownerId# 3 vDiskId# [0:_:0:0:0] FirstNonceToKeep# 1845393 CutLogId# [0:0:0] ownerRound# 2 PDiskId# 1 2025-09-25T16:15:19.221332Z :BS_PDISK NOTICE: {BPD01@blobstorage_pdisk_logreader.cpp:1176} Reply to owner OwnerId# 3 Result# {EvReadLogResult Status# OK ErrorReason# "" position# { ChunkIdx# 0 OffsetInChunk# 0} nextPosition# { ChunkIdx# 4294967295 OffsetInChunk# 4294967295} isEndOfLog# true StatusFlags# IsValid Results.size# 0} PDiskId# 1 2025-09-25T16:15:19.663050Z :BS_PDISK NOTICE: {BPD38@blobstorage_pdisk_impl.cpp:2857} OnDriveStartup Path# "" PDiskId# 1 2025-09-25T16:15:19.663432Z :BS_PDISK NOTICE: {BSP01@blobstorage_pdisk_actor.cpp:581} Successfully read format record Format# {TDiskFormat Version: 3 DiskSize: 1658880000 bytes (1 GB) Guid: 13266214832490057246 MagicNextLogChunkReference: 3639031913167495111 MagicLogChunk: 4292574229062545771 MagicDataChunk: 4222158155172863685 MagicSysLogChunk: 12332754691846439549 MagicFormatChunk: 17332287817462050952 ChunkSize: 2097152 bytes (2 MB) SectorSize: 4096 SysLogSectorCount: 64 SystemChunkCount: 1 FormatText: "Info" DiskFormatSize: 1168 (current sizeof: 1168) TimestampUs: 1758816919076643 (2025-09-25T16:15:19.076643Z) FormatFlags: {ErasureEncodeSysLog | ErasureEncodeFormat | ErasureEncodeNextChunkReference | EncryptFormat | EncryptData}} PDiskId# 1 2025-09-25T16:15:19.664871Z :BS_PDISK NOTICE: {BPD01@blobstorage_pdisk_impl_log.cpp:252} SysLogRecord is read Record# {TSysLogRecord Version# 8 NonceSet# {TNonceSet Version# 0 NonceSysLog# 1365449 NonceLog# 1845393 NonceData# 1392331} LogHeadChunkIdx# 1 LogHeadChunkPreviousNonce# 0 Owner[3]# [0:4294967295:0:0:0]} PDiskId# 1 2025-09-25T16:15:19.666052Z :BS_PDISK WARN: {LR016@blobstorage_pdisk_logreader.cpp:710} PDiskId# 1 LogReader IsInitial# 1 ChunkIdx# 1 SectorIdx# 0 OffsetInSector# 316 nonce jump2 IsEndOfSplice# false " replacing ChunkInfo->DesiredPrevChunkLastNonce# "# 0 " with nonceJumpLogPageHeader2->PreviousNonce# "# 0 PDiskId# 1 2025-09-25T16:15:19.667765Z :BS_PDISK NOTICE: {LR018@blobstorage_pdisk_logreader.cpp:809} PDiskId# 1 LogReader IsInitial# 1 ChunkIdx# 2 SectorIdx# 0 OffsetInSector# 0 In ProcessSectorSet got !restorator.GoodSectorFlags LastGoodToWriteLogPosition# { ChunkIdx# 2 OffsetInChunk# 0} PDiskId# 1 2025-09-25T16:15:19.667794Z :BS_PDISK NOTICE: {BPD01@blobstorage_pdisk_logreader.cpp:1176} Reply to owner OwnerId# 0 Result# {EvReadLogResult Status# OK ErrorReason# "" position# { ChunkIdx# 0 OffsetInChunk# 0} nextPosition# { ChunkIdx# 2 OffsetInChunk# 0} isEndOfLog# true StatusFlags# IsValid | DiskSpaceCyan | DiskSpaceLightYellowMove | DiskSpaceYellowStop | DiskSpaceLightOrange | DiskSpacePreOrange | DiskSpaceOrange | DiskSpaceRed | DiskSpaceBlack Results.size# 0} PDiskId# 1 2025-09-25T16:15:19.668316Z :BS_PDISK NOTICE: {BPD01@blobstorage_pdisk_impl_log.cpp:1732} PDisk have successfully started PDiskId# 1 2025-09-25T16:15:19.765092Z :BS_PDISK NOTICE: {BPD30@blobstorage_pdisk_impl.cpp:1930} Registered known VDisk VDisk# [0:4294967295:0:0:0] OwnerId# 3 OwnerRound# 2 GroupSizeInUnits# 0 PDiskId# 1 2025-09-25T16:15:19.768993Z :BS_PDISK NOTICE: {BPD01@blobstorage_pdisk_logreader.cpp:1176} Reply to owner OwnerId# 3 Result# {EvReadLogResult Status# OK ErrorReason# "" position# { ChunkIdx# 0 OffsetInChunk# 0} nextPosition# { ChunkIdx# 4294967295 OffsetInChunk# 4294967295} isEndOfLog# true StatusFlags# IsValid Results.size# 508} PDiskId# 1 2025-09-25T16:15:19.846652Z :BS_PDISK NOTICE: {BPD38@blobstorage_pdisk_impl.cpp:2857} OnDriveStartup Path# "" PDiskId# 1 2025-09-25T16:15:19.848907Z :BS_PDISK NOTICE: {BSP01@blobstorage_pdisk_actor.cpp:581} Successfully read format record Format# {TDiskFormat Version: 3 DiskSize: 1658880000 bytes (1 GB) Guid: 13266214832490057246 MagicNextLogChunkReference: 3639031913167495111 MagicLogChunk: 4292574229062545771 MagicDataChunk: 4222158155172863685 MagicSysLogChunk: 12332754691846439549 MagicFormatChunk: 17332287817462050952 ChunkSize: 2097152 bytes (2 MB) SectorSize: 4096 SysLogSectorCount: 64 SystemChunkCount: 1 FormatText: "Info" DiskFormatSize: 1168 (current sizeof: 1168) TimestampUs: 1758816919076643 (2025-09-25T16:15:19.076643Z) FormatFlags: {ErasureEncodeSysLog | ErasureEncodeFormat | ErasureEncodeNextChunkReference | EncryptFormat | EncryptData}} PDiskId# 1 2025-09-25T16:15:19.852927Z :BS_PDISK NOTICE: {BPD01@blobstorage_pdisk_impl_log.cpp:252} SysLogRecord is read Record# {TSysLogRecord Version# 8 NonceSet# {TNonceSet Version# 0 NonceSysLog# 3345648 NonceLog# 3264175 NonceData# 2904744} LogHeadChunkIdx# 1 LogHeadChunkPreviousNonce# 0 Owner[3]# [0:4294967295:0:0:0]} PDiskId# 1 2025-09-25T16:15:19.854181Z :BS_PDISK WARN: {LR016@blobstorage_pdisk_logreader.cpp:710} PDiskId# 1 LogReader IsInitial# 1 ChunkIdx# 1 SectorIdx# 0 OffsetInSector# 316 nonce jump2 IsEndOfSplice# false " replacing ChunkInfo->DesiredPrevChunkLastNonce# "# 0 " with nonceJumpLogPageHeader2->PreviousNonce# "# 0 PDiskId# 1 2025-09-25T16:15:19.856157Z :BS_PDISK WARN: {LR016@blobstorage_pdisk_logreader.cpp:710} PDiskId# 1 LogReader IsInitial# 1 ChunkIdx# 2 SectorIdx# 0 OffsetInSector# 316 nonce jump2 IsEndOfSplice# false " replacing ChunkInfo->DesiredPrevChunkLastNonce# "# 1845901 " with nonceJumpLogPageHeader2->PreviousNonce# "# 1845901 PDiskId# 1 2025-09-25T16:15:19.861419Z :BS_PDISK NOTICE: {LR018@blobstorage_pdisk_logreader.cpp:809} PDiskId# 1 LogReader IsInitial# 1 ChunkIdx# 3 SectorIdx# 186 OffsetInSector# 0 In ProcessSectorSet got !restorator.GoodSectorFlags LastGoodToWriteLogPosition# { ChunkIdx# 3 OffsetInChunk# 761856} PDiskId# 1 2025-09-25T16:15:19.861464Z :BS_PDISK NOTICE: {BPD01@blobstorage_pdisk_logreader.cpp:1176} Reply to owner OwnerId# 0 Result# {EvReadLogResult Status# OK ErrorReason# "" position# { ChunkIdx# 0 OffsetInChunk# 0} nextPosition# { ChunkIdx# 3 OffsetInChunk# 761856} isEndOfLog# true StatusFlags# IsValid | DiskSpaceCyan | DiskSpaceLightYellowMove | DiskSpaceYellowStop | DiskSpaceLightOrange | DiskSpacePreOrange | DiskSpaceOrange | DiskSpaceRed | DiskSpaceBlack Results.size# 0} PDiskId# 1 2025-09-25T16:15:19.872855Z :BS_PDISK NOTICE: {BPD01@blobstorage_pdisk_impl_log.cpp:1732} PDisk have successfully started PDiskId# 1 2025-09-25T16:15:19.936988Z :BS_PDISK NOTICE: {BPD30@blobstorage_pdisk_impl.cpp:1930} Registered known VDisk VDisk# [0:4294967295:0:0:0] OwnerId# 3 OwnerRound# 2 GroupSizeInUnits# 0 PDiskId# 1 2025-09-25T16:15:19.957792Z :BS_PDISK WARN: {LR004@blobstorage_pdisk_logreader.cpp:837} PDiskId# 1 LogReader IsInitial# 0 Owner# 3 VDiskId# [0:_:0:0:0] ChunkIdx# 3 SectorIdx# 187 OffsetInSector# 0 In ProcessSectorSet got !restorator.GoodSectorFlags outside the LogEndSector LogEndChunkIdx# 3 LogEndSectorIdx# 187 PDiskId# 1 2025-09-25T16:15:19.957828Z :BS_PDISK NOTICE: {BPD01@blobstorage_pdisk_logreader.cpp:1176} Reply to owner OwnerId# 3 Result# {EvReadLogResult Status# OK ErrorReason# "" position# { ChunkIdx# 0 OffsetInChunk# 0} nextPosition# { ChunkIdx# 4294967295 OffsetInChunk# 4294967295} isEndOfLog# true StatusFlags# IsValid Results.size# 1202} PDiskId# 1 2025-09-25T16:15:20.090008Z :BS_PDISK NOTICE: {BPD38@blobstorage_pdisk_impl.cpp:2857} OnDriveStartup Path# "" PDiskId# 1 2025-09-25T16:15:20.108986Z :BS_PDISK NOTICE: {BSP01@blobstorage_pdisk_actor.cpp:581} Successfully read format record Format# {TDiskFormat Version: 3 DiskSize: 1658880000 bytes (1 GB) Guid: 13024461254156497676 MagicNextLogChunkReference: 13902686407097524114 MagicLogChunk: 4654787269757922536 MagicDataChunk: 17867357858812531786 MagicSysLogChunk: 1075773978227411405 MagicFormatChunk: 17332287817462050952 ChunkSize: 2097152 bytes (2 MB) SectorSize: 4096 SysLogSectorCount: 64 SystemChunkCount: 1 FormatText: "Info" DiskFormatSize: 1168 (current sizeof: 1168) TimestampUs: 1758816920042396 (2025-09-25T16:15:20.042396Z) FormatFlags: {ErasureEncodeSysLog | ErasureEncodeFormat | ErasureEncodeNextChunkReference | EncryptFormat | EncryptData}} PDiskId# 1 2025-09-25T16:15:20.114462Z :BS_PDISK NOTICE: {BPD01@blobstorage_pdisk_impl_log.cpp:252} SysLogRecord is read Record# {TSysLogRecord Version# 0 NonceSet# {TNonceSet Version# 0 NonceSysLog# 61 NonceLog# 1 NonceData# 1} LogHeadChunkIdx# 1 LogHeadChunkPreviousNonce# 0} PDiskId# 1 2025-09-25T16:15:20.120916Z :BS_PDISK NOTICE: {LR018@blobstorage_pdisk_logreader.cpp:809} PDiskId# 1 LogReader IsInitial# 1 ChunkIdx# 1 SectorIdx# 0 OffsetInSector# 0 In ProcessSectorSet got !restorator.GoodSectorFlags LastGoodToWriteLogPosition# { ChunkIdx# 1 OffsetInChunk# 0} PDiskId# 1 2025-09-25T16:15:20.120953Z :BS_PDISK NOTICE: {BPD01@blobstorage_pdisk_logreader.cpp:1176} Reply to owner OwnerId# 0 Result# {EvReadLogResult Status# OK ErrorReason# "" position# { ChunkIdx# 0 OffsetInChunk# 0} nextPosition# { ChunkIdx# 1 OffsetInChunk# 0} isEndOfLog# true StatusFlags# IsValid | DiskSpaceCyan | DiskSpaceLightYellowMove | DiskSpaceYellowStop | DiskSpaceLightOrange | DiskSpacePreOrange | DiskSpaceOrange | DiskSpaceRed | DiskSpaceBlack Results.size# 0} PDiskId# 1 2025-09-25T16:15:20.121473Z :BS_PDISK NOTICE: {BPD01@blobstorage_pdisk_impl_log.cpp:1732} PDisk have successfully started PDiskId# 1 2025-09-25T16:15:20.189053Z :BS_PDISK NOTICE: {BPD02@blobstorage_pdisk_impl.cpp:2055} New owner is created ownerId# 3 vDiskId# [0:_:0:0:0] FirstNonceToKeep# 1095499 CutLogId# [0:0:0] ownerRound# 2 PDiskId# 1 |80.1%| [CC] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/kqp/executer_actor/kqp_executer.h_serialized.cpp |80.1%| [CC] {BAZEL_UPLOAD} $(B)/ydb/core/kqp/executer_actor/kqp_executer.h_serialized.cpp ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/pdisk/ut/unittest >> TPDiskTest::PlainChunksWriteReadALot [GOOD] Test command err: 2025-09-25T16:14:43.627889Z node 1 :BS_PDISK NOTICE: {BPD38@blobstorage_pdisk_impl.cpp:2857} OnDriveStartup Path# "" PDiskId# 1 2025-09-25T16:14:43.640971Z node 1 :BS_PDISK NOTICE: {BSP01@blobstorage_pdisk_actor.cpp:581} Successfully read format record Format# {TDiskFormat Version: 3 DiskSize: 134217728000 bytes (134 GB) Guid: 8342059832273139446 MagicNextLogChunkReference: 15749270719805282608 MagicLogChunk: 14302894951214246252 MagicDataChunk: 10281115837601890505 MagicSysLogChunk: 4597246051428643764 MagicFormatChunk: 17332287817462050952 ChunkSize: 136314880 bytes (136 MB) SectorSize: 4096 SysLogSectorCount: 64 SystemChunkCount: 1 FormatText: "Info" DiskFormatSize: 1168 (current sizeof: 1168) TimestampUs: 1758816883554523 (2025-09-25T16:14:43.554523Z) FormatFlags: {ErasureEncodeSysLog | ErasureEncodeFormat | ErasureEncodeNextChunkReference | EncryptFormat | EncryptData}} PDiskId# 1 2025-09-25T16:14:43.652951Z node 1 :BS_PDISK NOTICE: {BPD01@blobstorage_pdisk_impl_log.cpp:252} SysLogRecord is read Record# {TSysLogRecord Version# 0 NonceSet# {TNonceSet Version# 0 NonceSysLog# 61 NonceLog# 1 NonceData# 1} LogHeadChunkIdx# 1 LogHeadChunkPreviousNonce# 0} PDiskId# 1 2025-09-25T16:14:43.656910Z node 1 :BS_PDISK NOTICE: {LR018@blobstorage_pdisk_logreader.cpp:809} PDiskId# 1 LogReader IsInitial# 1 ChunkIdx# 1 SectorIdx# 0 OffsetInSector# 0 In ProcessSectorSet got !restorator.GoodSectorFlags LastGoodToWriteLogPosition# { ChunkIdx# 1 OffsetInChunk# 0} PDiskId# 1 2025-09-25T16:14:43.657129Z node 1 :BS_PDISK NOTICE: {BPD01@blobstorage_pdisk_logreader.cpp:1176} Reply to owner OwnerId# 0 Result# {EvReadLogResult Status# OK ErrorReason# "" position# { ChunkIdx# 0 OffsetInChunk# 0} nextPosition# { ChunkIdx# 1 OffsetInChunk# 0} isEndOfLog# true StatusFlags# IsValid | DiskSpaceCyan | DiskSpaceLightYellowMove | DiskSpaceYellowStop | DiskSpaceLightOrange | DiskSpacePreOrange | DiskSpaceOrange | DiskSpaceRed | DiskSpaceBlack Results.size# 0} PDiskId# 1 2025-09-25T16:14:43.665130Z node 1 :BS_PDISK NOTICE: {BPD01@blobstorage_pdisk_impl_log.cpp:1732} PDisk have successfully started PDiskId# 1 2025-09-25T16:14:43.668243Z node 1 :BS_PDISK NOTICE: {BPD02@blobstorage_pdisk_impl.cpp:2055} New owner is created ownerId# 3 vDiskId# [0:_:0:0:0] FirstNonceToKeep# 1093454 CutLogId# [0:0:0] ownerRound# 2 PDiskId# 1 2025-09-25T16:14:43.783098Z node 2 :BS_PDISK NOTICE: {BPD38@blobstorage_pdisk_impl.cpp:2857} OnDriveStartup Path# "" PDiskId# 1 2025-09-25T16:14:43.799185Z node 2 :BS_PDISK NOTICE: {BSP01@blobstorage_pdisk_actor.cpp:581} Successfully read format record Format# {TDiskFormat Version: 3 DiskSize: 134217728000 bytes (134 GB) Guid: 7667806854459399161 MagicNextLogChunkReference: 14661245055151090542 MagicLogChunk: 9184467618038531167 MagicDataChunk: 765756144470722164 MagicSysLogChunk: 8494580511523217698 MagicFormatChunk: 17332287817462050952 ChunkSize: 136314880 bytes (136 MB) SectorSize: 4096 SysLogSectorCount: 64 SystemChunkCount: 1 FormatText: "Info" DiskFormatSize: 1168 (current sizeof: 1168) TimestampUs: 1758816883735145 (2025-09-25T16:14:43.735145Z) FormatFlags: {ErasureEncodeSysLog | ErasureEncodeFormat | ErasureEncodeNextChunkReference | EncryptFormat | EncryptData}} PDiskId# 1 2025-09-25T16:14:43.816241Z node 2 :BS_PDISK NOTICE: {BPD01@blobstorage_pdisk_impl_log.cpp:252} SysLogRecord is read Record# {TSysLogRecord Version# 0 NonceSet# {TNonceSet Version# 0 NonceSysLog# 61 NonceLog# 1 NonceData# 1} LogHeadChunkIdx# 1 LogHeadChunkPreviousNonce# 0} PDiskId# 1 2025-09-25T16:14:43.832932Z node 2 :BS_PDISK NOTICE: {LR018@blobstorage_pdisk_logreader.cpp:809} PDiskId# 1 LogReader IsInitial# 1 ChunkIdx# 1 SectorIdx# 0 OffsetInSector# 0 In ProcessSectorSet got !restorator.GoodSectorFlags LastGoodToWriteLogPosition# { ChunkIdx# 1 OffsetInChunk# 0} PDiskId# 1 2025-09-25T16:14:43.832964Z node 2 :BS_PDISK NOTICE: {BPD01@blobstorage_pdisk_logreader.cpp:1176} Reply to owner OwnerId# 0 Result# {EvReadLogResult Status# OK ErrorReason# "" position# { ChunkIdx# 0 OffsetInChunk# 0} nextPosition# { ChunkIdx# 1 OffsetInChunk# 0} isEndOfLog# true StatusFlags# IsValid | DiskSpaceCyan | DiskSpaceLightYellowMove | DiskSpaceYellowStop | DiskSpaceLightOrange | DiskSpacePreOrange | DiskSpaceOrange | DiskSpaceRed | DiskSpaceBlack Results.size# 0} PDiskId# 1 2025-09-25T16:14:43.841035Z node 2 :BS_PDISK NOTICE: {BPD01@blobstorage_pdisk_impl_log.cpp:1732} PDisk have successfully started PDiskId# 1 2025-09-25T16:14:43.845092Z node 2 :BS_PDISK NOTICE: {BPD02@blobstorage_pdisk_impl.cpp:2055} New owner is created ownerId# 3 vDiskId# [0:_:0:0:0] FirstNonceToKeep# 1265777 CutLogId# [0:0:0] ownerRound# 2 PDiskId# 1 2025-09-25T16:14:44.026598Z node 3 :BS_PDISK NOTICE: {BPD38@blobstorage_pdisk_impl.cpp:2857} OnDriveStartup Path# "" PDiskId# 1 2025-09-25T16:14:44.064919Z node 3 :BS_PDISK NOTICE: {BSP01@blobstorage_pdisk_actor.cpp:581} Successfully read format record Format# {TDiskFormat Version: 3 DiskSize: 134217728000 bytes (134 GB) Guid: 17736632806860428128 MagicNextLogChunkReference: 11476757940884356805 MagicLogChunk: 3470804320080233686 MagicDataChunk: 14722808784844055773 MagicSysLogChunk: 2790380192414762327 MagicFormatChunk: 17332287817462050952 ChunkSize: 136314880 bytes (136 MB) SectorSize: 4096 SysLogSectorCount: 64 SystemChunkCount: 1 FormatText: "Info" DiskFormatSize: 1168 (current sizeof: 1168) TimestampUs: 1758816883932891 (2025-09-25T16:14:43.932891Z) FormatFlags: {ErasureEncodeSysLog | ErasureEncodeFormat | ErasureEncodeNextChunkReference | EncryptFormat | EncryptData}} PDiskId# 1 2025-09-25T16:14:44.072920Z node 3 :BS_PDISK NOTICE: {BPD01@blobstorage_pdisk_impl_log.cpp:252} SysLogRecord is read Record# {TSysLogRecord Version# 0 NonceSet# {TNonceSet Version# 0 NonceSysLog# 61 NonceLog# 1 NonceData# 1} LogHeadChunkIdx# 1 LogHeadChunkPreviousNonce# 0} PDiskId# 1 2025-09-25T16:14:44.080918Z node 3 :BS_PDISK NOTICE: {LR018@blobstorage_pdisk_logreader.cpp:809} PDiskId# 1 LogReader IsInitial# 1 ChunkIdx# 1 SectorIdx# 0 OffsetInSector# 0 In ProcessSectorSet got !restorator.GoodSectorFlags LastGoodToWriteLogPosition# { ChunkIdx# 1 OffsetInChunk# 0} PDiskId# 1 2025-09-25T16:14:44.080946Z node 3 :BS_PDISK NOTICE: {BPD01@blobstorage_pdisk_logreader.cpp:1176} Reply to owner OwnerId# 0 Result# {EvReadLogResult Status# OK ErrorReason# "" position# { ChunkIdx# 0 OffsetInChunk# 0} nextPosition# { ChunkIdx# 1 OffsetInChunk# 0} isEndOfLog# true StatusFlags# IsValid | DiskSpaceCyan | DiskSpaceLightYellowMove | DiskSpaceYellowStop | DiskSpaceLightOrange | DiskSpacePreOrange | DiskSpaceOrange | DiskSpaceRed | DiskSpaceBlack Results.size# 0} PDiskId# 1 2025-09-25T16:14:44.092963Z node 3 :BS_PDISK NOTICE: {BPD01@blobstorage_pdisk_impl_log.cpp:1732} PDisk have successfully started PDiskId# 1 2025-09-25T16:14:44.097119Z node 3 :BS_PDISK NOTICE: {BPD02@blobstorage_pdisk_impl.cpp:2055} New owner is created ownerId# 3 vDiskId# [0:_:0:0:0] FirstNonceToKeep# 1488010 CutLogId# [0:0:0] ownerRound# 2 PDiskId# 1 2025-09-25T16:14:44.116915Z node 3 :BS_PDISK ERROR: {PBD23@blobstorage_pdisk_impl.cpp:969} PDiskId# 1 Can't write chunkIdx# 2 destination chunk has CommitState# DATA_COMMITTED_DELETE_IN_PROGRESS ownerId# 3 PDiskId# 1 2025-09-25T16:14:44.264563Z node 4 :BS_PDISK NOTICE: {BPD38@blobstorage_pdisk_impl.cpp:2857} OnDriveStartup Path# "" PDiskId# 1 2025-09-25T16:14:44.289635Z node 4 :BS_PDISK NOTICE: {BSP01@blobstorage_pdisk_actor.cpp:581} Successfully read format record Format# {TDiskFormat Version: 3 DiskSize: 134217728000 bytes (134 GB) Guid: 12813777480236545321 MagicNextLogChunkReference: 12469907457491917721 MagicLogChunk: 13721546580470394062 MagicDataChunk: 4583776955314223831 MagicSysLogChunk: 13104504432105667907 MagicFormatChunk: 17332287817462050952 ChunkSize: 136314880 bytes (136 MB) SectorSize: 4096 SysLogSectorCount: 64 SystemChunkCount: 1 FormatText: "Info" DiskFormatSize: 1168 (current sizeof: 1168) TimestampUs: 1758816884168598 (2025-09-25T16:14:44.168598Z) FormatFlags: {ErasureEncodeSysLog | ErasureEncodeFormat | ErasureEncodeNextChunkReference | EncryptFormat | EncryptData}} PDiskId# 1 2025-09-25T16:14:44.304905Z node 4 :BS_PDISK NOTICE: {BPD01@blobstorage_pdisk_impl_log.cpp:252} SysLogRecord is read Record# {TSysLogRecord Version# 0 NonceSet# {TNonceSet Version# 0 NonceSysLog# 61 NonceLog# 1 NonceData# 1} LogHeadChunkIdx# 1 LogHeadChunkPreviousNonce# 0} PDiskId# 1 2025-09-25T16:14:44.312922Z node 4 :BS_PDISK NOTICE: {LR018@blobstorage_pdisk_logreader.cpp:809} PDiskId# 1 LogReader IsInitial# 1 ChunkIdx# 1 SectorIdx# 0 OffsetInSector# 0 In ProcessSectorSet got !restorator.GoodSectorFlags LastGoodToWriteLogPosition# { ChunkIdx# 1 OffsetInChunk# 0} PDiskId# 1 2025-09-25T16:14:44.312956Z node 4 :BS_PDISK NOTICE: {BPD01@blobstorage_pdisk_logreader.cpp:1176} Reply to owner OwnerId# 0 Result# {EvReadLogResult Status# OK ErrorReason# "" position# { ChunkIdx# 0 OffsetInChunk# 0} nextPosition# { ChunkIdx# 1 OffsetInChunk# 0} isEndOfLog# true StatusFlags# IsValid | DiskSpaceCyan | DiskSpaceLightYellowMove | DiskSpaceYellowStop | DiskSpaceLightOrange | DiskSpacePreOrange | DiskSpaceOrange | DiskSpaceRed | DiskSpaceBlack Results.size# 0} PDiskId# 1 2025-09-25T16:14:44.313410Z node 4 :BS_PDISK NOTICE: {BPD01@blobstorage_pdisk_impl_log.cpp:1732} PDisk have successfully started PDiskId# 1 2025-09-25T16:14:44.324959Z node 4 :BS_PDISK NOTICE: {BPD02@blobstorage_pdisk_impl.cpp:2055} New owner is created ownerId# 3 vDiskId# [0:_:0:0:0] FirstNonceToKeep# 1977167 CutLogId# [0:0:0] ownerRound# 2 PDiskId# 1 2025-09-25T16:14:44.344915Z node 4 :BS_PDISK ERROR: {PBD23@blobstorage_pdisk_impl.cpp:969} PDiskId# 1 Can't write chunkIdx# 2 destination chunk has CommitState# DATA_RESERVED_DELETE_ON_QUARANTINE ownerId# 3 PDiskId# 1 2025-09-25T16:14:44.514479Z node 5 :BS_PDISK NOTICE: {BPD38@blobstorage_pdisk_impl.cpp:2857} OnDriveStartup Path# "" PDiskId# 1 2025-09-25T16:14:44.532936Z node 5 :BS_PDISK NOTICE: {BSP01@blobstorage_pdisk_actor.cpp:581} Successfully read format record Format# {TDiskFormat Version: 3 DiskSize: 134217728000 bytes (134 GB) Guid: 6742152114575625905 MagicNextLogChunkReference: 12568727947151092564 MagicLogChunk: 6206474327617301888 MagicDataChunk: 15623928965753812116 MagicSysLogChunk: 16418671510389208482 MagicFormatChunk: 17332287817462050952 ChunkSize: 136314880 bytes (136 MB) SectorSize: 4096 SysLogSectorCount: 64 SystemChunkCount: 1 FormatText: "Info" DiskFormatSize: 1168 (current sizeof: 1168) TimestampUs: 1758816884454288 (2025-09-25T16:14:44.454288Z) FormatFlags: {ErasureEncodeSysLog | ErasureEncodeFormat | ErasureEncodeNextChunkReference | EncryptFormat | EncryptData}} PDiskId# 1 2025-09-25T16:14:44.541266Z node 5 :BS_PDISK NOTICE: {BPD01@blobstorage_pdisk_impl_log.cpp:252} SysLogRecord is read Record# {TSysLogRecord Version# 0 NonceSet# {TNonceSet Version# 0 NonceSysLog# 61 NonceLog# 1 NonceData# 1} LogHeadChunkIdx# 1 LogHeadChunkPreviousNonce# 0} PDiskId# 1 2025-09-25T16:14:44.552896Z node 5 :BS_PDISK NOTICE: {LR018@blobstorage_pdisk_logreader.cpp:809} PDiskId# 1 LogReader IsInitial# 1 ChunkIdx# 1 SectorIdx# 0 OffsetInSector# 0 In ProcessSectorSet got !restorator.GoodSectorFlags LastGoodToWriteLogPosition# { ChunkIdx# 1 OffsetInChunk# 0} PDiskId# 1 2025-09-25T16:14:44.552930Z node 5 :BS_PDISK NOTICE: {BPD01@blobstorage_pdisk_logreader.cpp:1176} Reply to owner OwnerId# 0 Result# {EvReadLogResult Status# OK ErrorReason# "" position# { ChunkIdx# 0 OffsetInChunk# 0} nextPosition# { ChunkIdx# 1 OffsetInChunk# 0} isEndOfLog# true StatusFlags# IsValid | DiskSpaceCyan | DiskSpaceLightYellowMove | DiskSpaceYellowStop | DiskSpaceLightOrange | DiskSpacePreOrange | DiskSpaceOrange | DiskSpaceRed | DiskSpaceBlack Results.size# 0} PDiskId# 1 2025-09-25T16:14:44.555870Z node 5 :BS_PDISK NOTICE: {BPD01@blobstorage_pdisk_impl_log.cpp:1732} PDisk have successfully started PDiskId# 1 2025-09-25T16:14:44.565147Z node 5 :BS_PDISK NOTICE: {BPD02@blobstorage_pdisk_impl.cpp:2055} New owner is created ownerId# 3 vDiskId# [0:_:0:0:0] F ... 295} isEndOfLog# true StatusFlags# IsValid Results.size# 3} PDiskId# 1 ... Awaiting EvPDiskStateUpdate SlotSizeInUnits# 0 NumActiveSlots# 5 Got EvPDiskStateUpdate# NKikimrWhiteboard.TPDiskStateInfo PDiskId: 1 Path: "" AvailableSize: 133452267520 TotalSize: 134217728000 State: Normal SerialNumber: "" SystemSize: 817889280 LogUsedSize: 136314880 LogTotalSize: 36805017600 EnforcedDynamicSlotSize: 19220398080 NumActiveSlots: 5 SlotSizeInUnits: 0 - State 2 ... Awaiting EvPDiskStateUpdate SlotSizeInUnits# 0 NumActiveSlots# 6 Got EvPDiskStateUpdate# NKikimrWhiteboard.TPDiskStateInfo PDiskId: 1 Device: Green Realtime: Green Got EvPDiskStateUpdate# NKikimrWhiteboard.TPDiskStateInfo PDiskId: 1 Path: "" AvailableSize: 133452267520 TotalSize: 134217728000 State: Normal SerialNumber: "" SystemSize: 817889280 LogUsedSize: 136314880 LogTotalSize: 36805017600 EnforcedDynamicSlotSize: 16085155840 NumActiveSlots: 6 SlotSizeInUnits: 0 2025-09-25T16:15:04.430000Z node 34 :BS_PDISK NOTICE: {BPD01@blobstorage_pdisk_impl.cpp:302} Shutdown OwnerInfo# {{OwnerId: 3 VDiskId: [11:_:0:0:0] GroupSizeInUnits: 2 ChunkWrites: 0 ChunkReads: 0 LogWrites: 0 LogReader: 0 CurrentFirstLsnToKeep: 0 FirstNonceToKeep: 1359170 StartingPoints: {{TLogRecord Signature# First Data.Size()# 1 Lsn# 4}} Owned chunkIds: {2}}{OwnerId: 4 VDiskId: [12:_:0:0:0] GroupSizeInUnits: 4 ChunkWrites: 0 ChunkReads: 0 LogWrites: 0 LogReader: 0 CurrentFirstLsnToKeep: 0 FirstNonceToKeep: 1359171 StartingPoints: {{TLogRecord Signature# First Data.Size()# 1 Lsn# 1}} Owned chunkIds: {3}} PDisk system/log ChunkIds: {0, 1} Free ChunkIds: {4..982} PDiskId# 1 2025-09-25T16:15:04.447202Z node 34 :BS_PDISK NOTICE: {BPD38@blobstorage_pdisk_impl.cpp:2857} OnDriveStartup Path# "" PDiskId# 1 2025-09-25T16:15:04.453055Z node 34 :BS_PDISK NOTICE: {BSP01@blobstorage_pdisk_actor.cpp:1174} HandlePoison, PDiskThread stopped PDiskId# 1 2025-09-25T16:15:04.460915Z node 34 :BS_PDISK NOTICE: {BSP01@blobstorage_pdisk_actor.cpp:581} Successfully read format record Format# {TDiskFormat Version: 3 DiskSize: 134217728000 bytes (134 GB) Guid: 4611866324516987105 MagicNextLogChunkReference: 17493010330812072438 MagicLogChunk: 12061370114458278955 MagicDataChunk: 9329873996836119496 MagicSysLogChunk: 1549616774025999761 MagicFormatChunk: 17332287817462050952 ChunkSize: 136314880 bytes (136 MB) SectorSize: 4096 SysLogSectorCount: 64 SystemChunkCount: 1 FormatText: "Info" DiskFormatSize: 1168 (current sizeof: 1168) TimestampUs: 1758816900367744 (2025-09-25T16:15:00.367744Z) FormatFlags: {ErasureEncodeSysLog | ErasureEncodeFormat | ErasureEncodeNextChunkReference | EncryptFormat | EncryptData}} PDiskId# 1 2025-09-25T16:15:04.469025Z node 34 :BS_PDISK NOTICE: {BPD01@blobstorage_pdisk_impl_log.cpp:252} SysLogRecord is read Record# {TSysLogRecord Version# 8 NonceSet# {TNonceSet Version# 0 NonceSysLog# 3121062 NonceLog# 2464903 NonceData# 3357100} LogHeadChunkIdx# 1 LogHeadChunkPreviousNonce# 0 Owner[3]# [11:4294967295:0:0:0] Owner[4]# [12:4294967295:0:0:0]} PDiskId# 1 2025-09-25T16:15:04.472916Z node 34 :BS_PDISK WARN: {LR016@blobstorage_pdisk_logreader.cpp:710} PDiskId# 1 LogReader IsInitial# 1 ChunkIdx# 1 SectorIdx# 0 OffsetInSector# 316 nonce jump2 IsEndOfSplice# false " replacing ChunkInfo->DesiredPrevChunkLastNonce# "# 0 " with nonceJumpLogPageHeader2->PreviousNonce# "# 0 PDiskId# 1 2025-09-25T16:15:04.472962Z node 34 :BS_PDISK NOTICE: {LR018@blobstorage_pdisk_logreader.cpp:809} PDiskId# 1 LogReader IsInitial# 1 ChunkIdx# 1 SectorIdx# 10 OffsetInSector# 0 In ProcessSectorSet got !restorator.GoodSectorFlags LastGoodToWriteLogPosition# { ChunkIdx# 1 OffsetInChunk# 40960} PDiskId# 1 2025-09-25T16:15:04.472983Z node 34 :BS_PDISK NOTICE: {BPD01@blobstorage_pdisk_logreader.cpp:1176} Reply to owner OwnerId# 0 Result# {EvReadLogResult Status# OK ErrorReason# "" position# { ChunkIdx# 0 OffsetInChunk# 0} nextPosition# { ChunkIdx# 1 OffsetInChunk# 40960} isEndOfLog# true StatusFlags# IsValid | DiskSpaceCyan | DiskSpaceLightYellowMove | DiskSpaceYellowStop | DiskSpaceLightOrange | DiskSpacePreOrange | DiskSpaceOrange | DiskSpaceRed | DiskSpaceBlack Results.size# 0} PDiskId# 1 2025-09-25T16:15:04.473309Z node 34 :BS_PDISK NOTICE: {BPD01@blobstorage_pdisk_impl_log.cpp:1732} PDisk have successfully started PDiskId# 1 2025-09-25T16:15:04.481005Z node 34 :BS_PDISK NOTICE: {BPD02@blobstorage_pdisk_impl.cpp:2055} New owner is created ownerId# 5 vDiskId# [13:_:0:0:0] FirstNonceToKeep# 3858513 CutLogId# [34:7554061066617044525:2050] ownerRound# 106 PDiskId# 1 2025-09-25T16:15:04.488924Z node 34 :BS_PDISK NOTICE: {BPD01@blobstorage_pdisk_logreader.cpp:1176} Reply to owner OwnerId# 5 Result# {EvReadLogResult Status# OK ErrorReason# "" position# { ChunkIdx# 0 OffsetInChunk# 0} nextPosition# { ChunkIdx# 4294967295 OffsetInChunk# 4294967295} isEndOfLog# true StatusFlags# IsValid Results.size# 0} PDiskId# 1 - State 3 ... Awaiting EvPDiskStateUpdate SlotSizeInUnits# 2 NumActiveSlots# 4 Got EvPDiskStateUpdate# NKikimrWhiteboard.TPDiskStateInfo PDiskId: 1 Path: "" AvailableSize: 133452267520 TotalSize: 134217728000 State: Normal SerialNumber: "" SystemSize: 817889280 LogUsedSize: 136314880 LogTotalSize: 36805017600 EnforcedDynamicSlotSize: 24127733760 NumActiveSlots: 4 SlotSizeInUnits: 2 2025-09-25T16:15:05.479040Z node 35 :BS_PDISK NOTICE: {BPD38@blobstorage_pdisk_impl.cpp:2857} OnDriveStartup Path# "" PDiskId# 1 2025-09-25T16:15:05.479225Z node 35 :BS_PDISK NOTICE: {BSP01@blobstorage_pdisk_actor.cpp:581} Successfully read format record Format# {TDiskFormat Version: 3 DiskSize: 134217728000 bytes (134 GB) Guid: 5046997668739761615 MagicNextLogChunkReference: 2068195464616456242 MagicLogChunk: 10862368025188713562 MagicDataChunk: 4371498846309344147 MagicSysLogChunk: 15583038868091487271 MagicFormatChunk: 17332287817462050952 ChunkSize: 136314880 bytes (136 MB) SectorSize: 4096 SysLogSectorCount: 64 SystemChunkCount: 1 FormatText: "Info" DiskFormatSize: 1168 (current sizeof: 1168) TimestampUs: 1758816905471271 (2025-09-25T16:15:05.471271Z) FormatFlags: {ErasureEncodeSysLog | ErasureEncodeFormat | ErasureEncodeNextChunkReference | EncryptFormat | EncryptData}} PDiskId# 1 2025-09-25T16:15:05.480295Z node 35 :BS_PDISK NOTICE: {BPD01@blobstorage_pdisk_impl_log.cpp:252} SysLogRecord is read Record# {TSysLogRecord Version# 0 NonceSet# {TNonceSet Version# 0 NonceSysLog# 61 NonceLog# 1 NonceData# 1} LogHeadChunkIdx# 1 LogHeadChunkPreviousNonce# 0} PDiskId# 1 2025-09-25T16:15:05.480953Z node 35 :BS_PDISK NOTICE: {LR018@blobstorage_pdisk_logreader.cpp:809} PDiskId# 1 LogReader IsInitial# 1 ChunkIdx# 1 SectorIdx# 0 OffsetInSector# 0 In ProcessSectorSet got !restorator.GoodSectorFlags LastGoodToWriteLogPosition# { ChunkIdx# 1 OffsetInChunk# 0} PDiskId# 1 2025-09-25T16:15:05.480980Z node 35 :BS_PDISK NOTICE: {BPD01@blobstorage_pdisk_logreader.cpp:1176} Reply to owner OwnerId# 0 Result# {EvReadLogResult Status# OK ErrorReason# "" position# { ChunkIdx# 0 OffsetInChunk# 0} nextPosition# { ChunkIdx# 1 OffsetInChunk# 0} isEndOfLog# true StatusFlags# IsValid | DiskSpaceCyan | DiskSpaceLightYellowMove | DiskSpaceYellowStop | DiskSpaceLightOrange | DiskSpacePreOrange | DiskSpaceOrange | DiskSpaceRed | DiskSpaceBlack Results.size# 0} PDiskId# 1 2025-09-25T16:15:05.481277Z node 35 :BS_PDISK NOTICE: {BPD01@blobstorage_pdisk_impl_log.cpp:1732} PDisk have successfully started PDiskId# 1 2025-09-25T16:15:05.481380Z node 35 :BS_PDISK NOTICE: {BPD02@blobstorage_pdisk_impl.cpp:2055} New owner is created ownerId# 3 vDiskId# [14:_:0:0:0] FirstNonceToKeep# 1377019 CutLogId# [35:7554061088469191880:2050] ownerRound# 107 PDiskId# 1 2025-09-25T16:15:05.481625Z node 35 :BS_PDISK NOTICE: {BPD01@blobstorage_pdisk_logreader.cpp:1176} Reply to owner OwnerId# 3 Result# {EvReadLogResult Status# OK ErrorReason# "" position# { ChunkIdx# 0 OffsetInChunk# 0} nextPosition# { ChunkIdx# 4294967295 OffsetInChunk# 4294967295} isEndOfLog# true StatusFlags# IsValid Results.size# 0} PDiskId# 1 2025-09-25T16:15:05.481873Z node 35 :BS_PDISK NOTICE: {BPD02@blobstorage_pdisk_impl.cpp:2055} New owner is created ownerId# 4 vDiskId# [15:_:0:0:0] FirstNonceToKeep# 1377020 CutLogId# [35:7554061088469191880:2050] ownerRound# 108 PDiskId# 1 2025-09-25T16:15:05.482046Z node 35 :BS_PDISK NOTICE: {BPD01@blobstorage_pdisk_logreader.cpp:1176} Reply to owner OwnerId# 4 Result# {EvReadLogResult Status# OK ErrorReason# "" position# { ChunkIdx# 0 OffsetInChunk# 0} nextPosition# { ChunkIdx# 4294967295 OffsetInChunk# 4294967295} isEndOfLog# true StatusFlags# IsValid Results.size# 0} PDiskId# 1 2025-09-25T16:15:05.482874Z node 35 :BS_PDISK ERROR: {PBD23@blobstorage_pdisk_impl.cpp:969} PDiskId# 1 Can't write chunkIdx# 3 chunk is owner by another owner. chunk's owner# 4 request's owner# 3 PDiskId# 1 2025-09-25T16:15:05.482935Z node 35 :BS_PDISK ERROR: {PBD23@blobstorage_pdisk_impl.cpp:969} PDiskId# 1 Can't write chunkIdx# 2 chunk is owner by another owner. chunk's owner# 3 request's owner# 4 PDiskId# 1 2025-09-25T16:15:05.534650Z node 36 :BS_PDISK NOTICE: {BPD38@blobstorage_pdisk_impl.cpp:2857} OnDriveStartup Path# "" PDiskId# 1 2025-09-25T16:15:05.534666Z node 36 :BS_PDISK WARN: {BPD92@blobstorage_pdisk_impl.cpp:2968} PDisk's PlainDataChunks parameters mismatch, flag from Format will be used Format.IsPlainDataChunks()# false Cfg->PlainDataChunks# true PDiskId# 1 2025-09-25T16:15:05.540971Z node 36 :BS_PDISK NOTICE: {BSP01@blobstorage_pdisk_actor.cpp:581} Successfully read format record Format# {TDiskFormat Version: 3 DiskSize: 134217728000 bytes (134 GB) Guid: 6893569969187083412 MagicNextLogChunkReference: 4476464151593210124 MagicLogChunk: 7732242647206784380 MagicDataChunk: 7190315011187341297 MagicSysLogChunk: 1352078665378460183 MagicFormatChunk: 17332287817462050952 ChunkSize: 136314880 bytes (136 MB) SectorSize: 4096 SysLogSectorCount: 64 SystemChunkCount: 1 FormatText: "Info" DiskFormatSize: 1168 (current sizeof: 1168) TimestampUs: 1758816905499598 (2025-09-25T16:15:05.499598Z) FormatFlags: {ErasureEncodeSysLog | ErasureEncodeFormat | ErasureEncodeNextChunkReference | EncryptFormat | EncryptData | FormatFlagPlainDataChunks}} PDiskId# 1 2025-09-25T16:15:05.542236Z node 36 :BS_PDISK NOTICE: {BPD01@blobstorage_pdisk_impl_log.cpp:252} SysLogRecord is read Record# {TSysLogRecord Version# 0 NonceSet# {TNonceSet Version# 0 NonceSysLog# 61 NonceLog# 1 NonceData# 1} LogHeadChunkIdx# 1 LogHeadChunkPreviousNonce# 0} PDiskId# 1 2025-09-25T16:15:05.544218Z node 36 :BS_PDISK NOTICE: {LR018@blobstorage_pdisk_logreader.cpp:809} PDiskId# 1 LogReader IsInitial# 1 ChunkIdx# 1 SectorIdx# 0 OffsetInSector# 0 In ProcessSectorSet got !restorator.GoodSectorFlags LastGoodToWriteLogPosition# { ChunkIdx# 1 OffsetInChunk# 0} PDiskId# 1 2025-09-25T16:15:05.544244Z node 36 :BS_PDISK NOTICE: {BPD01@blobstorage_pdisk_logreader.cpp:1176} Reply to owner OwnerId# 0 Result# {EvReadLogResult Status# OK ErrorReason# "" position# { ChunkIdx# 0 OffsetInChunk# 0} nextPosition# { ChunkIdx# 1 OffsetInChunk# 0} isEndOfLog# true StatusFlags# IsValid | DiskSpaceCyan | DiskSpaceLightYellowMove | DiskSpaceYellowStop | DiskSpaceLightOrange | DiskSpacePreOrange | DiskSpaceOrange | DiskSpaceRed | DiskSpaceBlack Results.size# 0} PDiskId# 1 2025-09-25T16:15:05.544606Z node 36 :BS_PDISK NOTICE: {BPD01@blobstorage_pdisk_impl_log.cpp:1732} PDisk have successfully started PDiskId# 1 2025-09-25T16:15:05.544712Z node 36 :BS_PDISK NOTICE: {BPD02@blobstorage_pdisk_impl.cpp:2055} New owner is created ownerId# 3 vDiskId# [16:_:0:0:0] FirstNonceToKeep# 1298938 CutLogId# [36:7554061088026475990:2050] ownerRound# 109 PDiskId# 1 2025-09-25T16:15:05.545204Z node 36 :BS_PDISK NOTICE: {BPD01@blobstorage_pdisk_logreader.cpp:1176} Reply to owner OwnerId# 3 Result# {EvReadLogResult Status# OK ErrorReason# "" position# { ChunkIdx# 0 OffsetInChunk# 0} nextPosition# { ChunkIdx# 4294967295 OffsetInChunk# 4294967295} isEndOfLog# true StatusFlags# IsValid Results.size# 0} PDiskId# 1 seed# 1758816905549600 total_speed# 1.735882615 GB/s |80.1%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/blobstorage/ut_group/ydb-core-blobstorage-ut_group |80.1%| [LD] {RESULT} $(B)/ydb/core/blobstorage/ut_group/ydb-core-blobstorage-ut_group |80.1%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/fq/libs/control_plane_storage/internal/ut/core-fq-libs-control_plane_storage-internal-ut |80.1%| [LD] {RESULT} $(B)/ydb/core/fq/libs/control_plane_storage/internal/ut/core-fq-libs-control_plane_storage-internal-ut |80.1%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/fq/libs/control_plane_storage/internal/ut/core-fq-libs-control_plane_storage-internal-ut |80.1%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/blobstorage/incrhuge/ut/ydb-core-blobstorage-incrhuge-ut |80.1%| [LD] {RESULT} $(B)/ydb/core/blobstorage/incrhuge/ut/ydb-core-blobstorage-incrhuge-ut |80.1%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/fq/libs/compute/common/ut/ydb-core-fq-libs-compute-common-ut |80.1%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/fq/libs/compute/common/ut/ydb-core-fq-libs-compute-common-ut |80.1%| [LD] {RESULT} $(B)/ydb/core/fq/libs/compute/common/ut/ydb-core-fq-libs-compute-common-ut >> TPDiskRaces::KillOwnerWhileDeletingChunkWithInflight [GOOD] >> TPDiskRaces::KillOwnerWhileDeletingChunkWithInflightMock |80.1%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/blobstorage/ut_vdisk/ydb-core-blobstorage-ut_vdisk |80.1%| [LD] {RESULT} $(B)/ydb/core/blobstorage/ut_vdisk/ydb-core-blobstorage-ut_vdisk |80.1%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/blobstorage/ut_vdisk/ydb-core-blobstorage-ut_vdisk |80.1%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/library/yql/dq/comp_nodes/ut/ydb-library-yql-dq-comp_nodes-ut |80.1%| [LD] {RESULT} $(B)/ydb/library/yql/dq/comp_nodes/ut/ydb-library-yql-dq-comp_nodes-ut |80.1%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/library/yql/dq/comp_nodes/ut/ydb-library-yql-dq-comp_nodes-ut >> GroupStress::Test [GOOD] |80.1%| [LD] {BAZEL_UPLOAD} $(B)/ydb/core/blobstorage/ut_group/ydb-core-blobstorage-ut_group >> TCowBTreeTest::MultipleSnapshotsWithClear [GOOD] >> TCowBTreeTest::MultipleSnapshotsWithClearWithGc >> TIncrHugeBasicTest::WriteReadDeleteEnum [GOOD] >> ParseStats::ParseWithSources [GOOD] >> ParseStats::ParseJustOutput [GOOD] >> ParseStats::ParseMultipleGraphsV1 >> ParseStats::ParseMultipleGraphsV1 [GOOD] >> ParseStats::ParseMultipleGraphsV2 [GOOD] |80.1%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/ut_group/unittest >> GroupStress::Test [GOOD] >> TIncrHugeBasicTest::WriteReadDeleteEnumRecover [GOOD] >> FormatTimes::DurationMs [GOOD] >> Config::ExcludeScope [GOOD] >> TIncrHugeBlobIdDict::Basic [GOOD] >> FormatTimes::DurationUs [GOOD] >> TBsVDiskGC::TGCManyVPutsDelTabletTest >> Config::IncludeScope [GOOD] >> StatsFormat::AggregateStat [GOOD] |80.1%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/incrhuge/ut/unittest >> TIncrHugeBasicTest::WriteReadDeleteEnum [GOOD] |80.1%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/core/fq/libs/compute/common/ut/unittest >> TIncrHugeBasicTest::Recovery [GOOD] >> TIncrHugeBasicTest::Defrag >> StatsFormat::FullStat [GOOD] >> FormatTimes::ParseDuration [GOOD] |80.1%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/incrhuge/ut/unittest |80.1%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/incrhuge/ut/unittest |80.1%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/blobstorage/vdisk/hulldb/cache_block/ut/ydb-core-blobstorage-vdisk-hulldb-cache_block-ut |80.1%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/fq/libs/control_plane_storage/internal/ut/unittest >> ParseStats::ParseMultipleGraphsV2 [GOOD] |80.1%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/incrhuge/ut/unittest |80.1%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/core/fq/libs/compute/common/ut/unittest |80.1%| [TM] {RESULT} ydb/core/blobstorage/ut_group/unittest >> TBsVDiskRange::Simple3PutRangeGetNothingForwardFresh >> TBsVDiskRangeHuge::Simple3PutRangeGetNothingForwardFresh >> TBsVDiskRange::Simple3PutRangeGetAllForwardFresh >> TBsVDiskExtreme::Simple3Put3GetFresh >> TBsVDiskExtremeHuge::Simple3Put3GetFresh |80.1%| [LD] {RESULT} $(B)/ydb/core/blobstorage/vdisk/hulldb/cache_block/ut/ydb-core-blobstorage-vdisk-hulldb-cache_block-ut |80.1%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/incrhuge/ut/unittest |80.1%| [TM] {RESULT} ydb/core/fq/libs/control_plane_storage/internal/ut/unittest |80.1%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/incrhuge/ut/unittest >> TIncrHugeBasicTest::WriteReadDeleteEnumRecover [GOOD] |80.1%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/core/fq/libs/compute/common/ut/unittest >> Config::ExcludeScope [GOOD] |80.1%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/core/fq/libs/compute/common/ut/unittest >> FormatTimes::DurationMs [GOOD] >> TBsVDiskRangeHuge::Simple3PutRangeGetNothingBackwardFresh |80.1%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/incrhuge/ut/unittest >> TIncrHugeBlobIdDict::Basic [GOOD] >> TBsVDiskExtreme::SimpleGetFromEmptyDB >> TBsLocalRecovery::WriteRestartReadHuge >> TBsVDiskRangeHuge::Simple3PutRangeGetAllForwardFresh >> TBsVDiskRepl3::SyncLogTest >> TBsVDiskOutOfSpace::WriteUntilOrangeZone [GOOD] >> TBsVDiskOutOfSpace::WriteUntilYellowZone |80.1%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/core/fq/libs/compute/common/ut/unittest |80.1%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/incrhuge/ut/unittest |80.2%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/core/fq/libs/compute/common/ut/unittest >> FormatTimes::DurationUs [GOOD] |80.2%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/incrhuge/ut/unittest >> TIncrHugeBasicTest::Recovery [GOOD] |80.2%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/core/fq/libs/compute/common/ut/unittest >> Config::IncludeScope [GOOD] >> TBsVDiskGC::GCPutKeepIntoEmptyDB >> TBsVDiskManyPutGet::ManyPutGetWaitCompaction >> TBsVDiskExtreme::Simple3Put1SeqGetAllFresh >> TBsVDiskBadBlobId::PutBlobWithBadId >> TBsVDiskRepl1::ReplProxyKeepBits |80.2%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/core/fq/libs/compute/common/ut/unittest >> StatsFormat::FullStat [GOOD] |80.2%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/core/fq/libs/compute/common/ut/unittest >> StatsFormat::AggregateStat [GOOD] >> TBsLocalRecovery::StartStopNotEmptyDB >> TBsVDiskExtremeHandoffHuge::SimpleHndPut1SeqGetFresh |80.2%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/core/fq/libs/compute/common/ut/unittest >> FormatTimes::ParseDuration [GOOD] >> TDqBlockHashJoinBasicTest::TestBasicPassthrough [GOOD] >> TDqBlockHashJoinBasicTest::TestEmptyStreams [GOOD] >> TDqHashCombineTest::TestBlockModeNoInput >> TDqHashCombineTest::TestBlockModeNoInput [GOOD] >> TDqHashCombineTest::TestBlockModeSingleRow [GOOD] >> TDqHashCombineTest::TestBlockModeMultiBlocks >> TBsVDiskRange::Simple3PutRangeGetAllForwardFresh [GOOD] >> TBsVDiskRange::Simple3PutRangeGetAllForwardCompaction >> TBsVDiskRange::Simple3PutRangeGetNothingForwardFresh [GOOD] >> TBsVDiskRange::Simple3PutRangeGetNothingForwardCompaction >> TBsDbStat::ChaoticParallelWrite_DbStat >> TDqHashCombineTest::TestBlockModeMultiBlocks [GOOD] >> TDqHashCombineTest::TestWideModeNoInput [GOOD] >> TDqHashCombineTest::TestWideModeSingleRow >> TBsVDiskExtreme::Simple3Put1SeqGetAllFresh [GOOD] >> TBsVDiskExtreme::Simple3Put1SeqGetAllCompaction >> TDqHashCombineTest::TestWideModeSingleRow [GOOD] >> TDqHashCombineTest::TestWideModeMultiRows >> TBsVDiskExtreme::Simple3Put3GetFresh [GOOD] >> TBsVDiskExtreme::Simple3Put3GetCompaction >> TBsVDiskRangeHuge::Simple3PutRangeGetNothingForwardFresh [GOOD] >> TBsVDiskRangeHuge::Simple3PutRangeGetNothingForwardCompaction >> TBsVDiskGC::GCPutKeepIntoEmptyDB [GOOD] >> TBsVDiskGC::GCPutBarrierVDisk0NoSync >> TBsVDiskExtremeHuge::Simple3Put3GetFresh [GOOD] >> TBsVDiskExtremeHuge::Simple3Put3GetCompaction >> TBlobStorageBlocksCacheTest::LegacyAndModern [GOOD] |80.2%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/datashard/datashard_ut_write.cpp >> TBsVDiskBadBlobId::PutBlobWithBadId [GOOD] >> TBsVDiskBrokenPDisk::WriteUntilDeviceDeath >> TBsVDiskGC::TGCManyVPutsDelTabletTest [GOOD] >> TBsVDiskManyPutGet::ManyPutGet >> TBlobStorageBlocksCacheTest::MultipleTables [GOOD] >> TBsVDiskExtreme::SimpleGetFromEmptyDB [GOOD] >> TBsVDiskExtremeHandoff::SimpleHnd6Put1SeqGetFresh >> TBsVDiskRangeHuge::Simple3PutRangeGetNothingBackwardFresh [GOOD] >> TBsVDiskRangeHuge::Simple3PutRangeGetNothingBackwardCompaction >> TBlobStorageBlocksCacheTest::PutIntoPast [GOOD] >> TBsVDiskRangeHuge::Simple3PutRangeGetAllForwardFresh [GOOD] >> TBsVDiskRangeHuge::Simple3PutRangeGetAllForwardCompaction >> TBlobStorageBlocksCacheTest::DeepInFlight [GOOD] >> TDqHashCombineTest::TestWideModeMultiRows [GOOD] >> TDqScalarHashJoinBasicTest::TestBasicPassthrough [GOOD] >> TDqScalarHashJoinBasicTest::TestEmptyFlows [GOOD] >> TDqScalarHashJoinBasicTest::TestEmptyLeft |80.2%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/vdisk/hulldb/cache_block/ut/unittest |80.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/datashard/datashard_ut_write.cpp >> TDqScalarHashJoinBasicTest::TestEmptyLeft [GOOD] >> TDqScalarHashJoinBasicTest::TestEmptyRight [GOOD] >> TBlobStorageBlocksCacheTest::PutDeepIntoPast >> TBsVDiskRepl3::SyncLogTest [GOOD] >> THugeMigration::ExtendMap_HugeBlobs >> TBlobStorageBlocksCacheTest::Repeat [GOOD] |80.2%| [LD] {BAZEL_UPLOAD} $(B)/ydb/core/blobstorage/vdisk/hulldb/cache_block/ut/ydb-core-blobstorage-vdisk-hulldb-cache_block-ut |80.2%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/vdisk/hulldb/cache_block/ut/unittest >> TBlobStorageBlocksCacheTest::PutDeepIntoPast [GOOD] |80.2%| [LD] {BAZEL_UPLOAD} $(B)/ydb/core/blobstorage/incrhuge/ut/ydb-core-blobstorage-incrhuge-ut |80.2%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/vdisk/hulldb/cache_block/ut/unittest >> TBlobStorageBlocksCacheTest::PutIntoPast [GOOD] >> TBsVDiskManyPutGet::ManyPutGetWaitCompaction [GOOD] >> TBsVDiskManyPutGet::ManyPutRangeGetFreshIndexOnly |80.2%| [TA] $(B)/ydb/core/fq/libs/compute/common/ut/test-results/unittest/{meta.json ... results_accumulator.log} |80.2%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/vdisk/hulldb/cache_block/ut/unittest |80.2%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/vdisk/hulldb/cache_block/ut/unittest >> TBlobStorageBlocksCacheTest::MultipleTables [GOOD] |80.2%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/vdisk/hulldb/cache_block/ut/unittest >> TBsVDiskExtremeHandoffHuge::SimpleHndPut1SeqGetFresh [GOOD] >> TBsVDiskExtremeHandoffHuge::SimpleHnd2Put1GetFresh |80.2%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/blobstorage/vdisk/huge/ut/ydb-core-blobstorage-vdisk-huge-ut >> TBsVDiskRange::Simple3PutRangeGetAllForwardCompaction [GOOD] >> TBsVDiskRange::Simple3PutRangeGetMiddleForwardCompaction >> TBsVDiskRange::Simple3PutRangeGetNothingForwardCompaction [GOOD] >> TBsVDiskRange::Simple3PutRangeGetNothingBackwardFresh |80.2%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/vdisk/hulldb/cache_block/ut/unittest >> TBlobStorageBlocksCacheTest::LegacyAndModern [GOOD] |80.2%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/vdisk/hulldb/cache_block/ut/unittest >> TBlobStorageBlocksCacheTest::DeepInFlight [GOOD] |80.2%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/blobstorage/vdisk/repl/ut/ydb-core-blobstorage-vdisk-repl-ut |80.2%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/vdisk/hulldb/cache_block/ut/unittest >> TBlobStorageBlocksCacheTest::Repeat [GOOD] >> TBsVDiskBrokenPDisk::WriteUntilDeviceDeath [GOOD] >> TBsVDiskDefrag::DefragEmptyDB >> TBsVDiskRepl1::ReplProxyKeepBits [GOOD] >> TBsVDiskRepl2::ReplEraseDiskRestoreWOOneDisk |80.2%| [LD] {RESULT} $(B)/ydb/core/blobstorage/vdisk/huge/ut/ydb-core-blobstorage-vdisk-huge-ut |80.2%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/vdisk/hulldb/cache_block/ut/unittest >> TBlobStorageBlocksCacheTest::PutDeepIntoPast [GOOD] |80.2%| [LD] {RESULT} $(B)/ydb/core/blobstorage/vdisk/repl/ut/ydb-core-blobstorage-vdisk-repl-ut >> TBsVDiskExtremeHandoff::SimpleHnd6Put1SeqGetFresh [GOOD] >> TBsVDiskExtremeHandoff::SimpleHnd6Put1SeqGetCompaction |80.2%| [TA] {RESULT} $(B)/ydb/core/fq/libs/compute/common/ut/test-results/unittest/{meta.json ... results_accumulator.log} ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/library/yql/dq/comp_nodes/ut/unittest >> TDqScalarHashJoinBasicTest::TestEmptyRight [GOOD] Test command err: WideFetch called: width=2 leftWidth=2 rightWidth=2 leftFinished=0 rightFinished=0 Trying to read left stream with width 2 Left stream status: 0 Left stream read successful! Copying leftInput[0] IsBoxed=0 IsSpecial=0 IsInvalid=0 Successfully copied leftInput[0] Copying block length from leftInput[1] to output[1] IsBoxed=0 IsEmpty=0 IsEmbedded=1 WideFetch called: width=2 leftWidth=2 rightWidth=2 leftFinished=0 rightFinished=0 Trying to read left stream with width 2 Left stream status: 0 Left stream read successful! Copying leftInput[0] IsBoxed=0 IsSpecial=0 IsInvalid=0 Successfully copied leftInput[0] Copying block length from leftInput[1] to output[1] IsBoxed=0 IsEmpty=0 IsEmbedded=1 WideFetch called: width=2 leftWidth=2 rightWidth=2 leftFinished=0 rightFinished=0 Trying to read left stream with width 2 Left stream status: 0 Left stream read successful! Copying leftInput[0] IsBoxed=0 IsSpecial=0 IsInvalid=0 Successfully copied leftInput[0] Copying block length from leftInput[1] to output[1] IsBoxed=0 IsEmpty=0 IsEmbedded=1 WideFetch called: width=2 leftWidth=2 rightWidth=2 leftFinished=0 rightFinished=0 Trying to read left stream with width 2 Left stream status: 0 Left stream read successful! Copying leftInput[0] IsBoxed=0 IsSpecial=0 IsInvalid=0 Successfully copied leftInput[0] Copying block length from leftInput[1] to output[1] IsBoxed=0 IsEmpty=0 IsEmbedded=1 WideFetch called: width=2 leftWidth=2 rightWidth=2 leftFinished=0 rightFinished=0 Trying to read left stream with width 2 Left stream status: 0 Left stream read successful! Copying leftInput[0] IsBoxed=0 IsSpecial=0 IsInvalid=0 Successfully copied leftInput[0] Copying block length from leftInput[1] to output[1] IsBoxed=0 IsEmpty=0 IsEmbedded=1 WideFetch called: width=2 leftWidth=2 rightWidth=2 leftFinished=0 rightFinished=0 Trying to read left stream with width 2 Left stream status: 1 Left stream finished! Right stream read successful! Copying rightInput[0] IsBoxed=0 Copying block length from rightInput[1] to output[1] IsBoxed=0 IsEmpty=0 IsEmbedded=1 WideFetch called: width=2 leftWidth=2 rightWidth=2 leftFinished=1 rightFinished=0 Right stream read successful! Copying rightInput[0] IsBoxed=0 Copying block length from rightInput[1] to output[1] IsBoxed=0 IsEmpty=0 IsEmbedded=1 WideFetch called: width=2 leftWidth=2 rightWidth=2 leftFinished=1 rightFinished=0 Right stream read successful! Copying rightInput[0] IsBoxed=0 Copying block length from rightInput[1] to output[1] IsBoxed=0 IsEmpty=0 IsEmbedded=1 WideFetch called: width=2 leftWidth=2 rightWidth=2 leftFinished=1 rightFinished=0 Right stream read successful! Copying rightInput[0] IsBoxed=0 Copying block length from rightInput[1] to output[1] IsBoxed=0 IsEmpty=0 IsEmbedded=1 WideFetch called: width=2 leftWidth=2 rightWidth=2 leftFinished=1 rightFinished=0 Right stream read successful! Copying rightInput[0] IsBoxed=0 Copying block length from rightInput[1] to output[1] IsBoxed=0 IsEmpty=0 IsEmbedded=1 WideFetch called: width=2 leftWidth=2 rightWidth=2 leftFinished=1 rightFinished=0 Right stream finished! Both streams finished, returning Finish WideFetch called: width=2 leftWidth=2 rightWidth=2 leftFinished=0 rightFinished=0 Trying to read left stream with width 2 Left stream status: 1 Left stream finished! Right stream finished! Both streams finished, returning Finish |80.2%| [TM] {RESULT} ydb/library/yql/dq/comp_nodes/ut/unittest |80.2%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/blobstorage/ut_vdisk2/ydb-core-blobstorage-ut_vdisk2 |80.2%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/blobstorage/ut_vdisk2/ydb-core-blobstorage-ut_vdisk2 |80.2%| [LD] {RESULT} $(B)/ydb/core/blobstorage/ut_vdisk2/ydb-core-blobstorage-ut_vdisk2 >> TBsVDiskExtreme::Simple3Put3GetCompaction [GOOD] >> TBsVDiskExtreme::Simple3Put1SeqSubsOkFresh >> TBsVDiskRangeHuge::Simple3PutRangeGetNothingForwardCompaction [GOOD] >> TBsVDiskRepl1::ReplProxyData >> TBsVDiskRangeHuge::Simple3PutRangeGetAllForwardCompaction [GOOD] >> TBsVDiskRangeHuge::Simple3PutRangeGetAllBackwardFresh >> TBsVDiskExtreme::Simple3Put1SeqGetAllCompaction [GOOD] >> TBsVDiskExtreme::Simple3Put1SeqGet2Fresh |80.2%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/blobstorage/ut_blobstorage/ut_read_only_pdisk/ut_blobstorage-ut_read_only_pdisk |80.2%| [LD] {RESULT} $(B)/ydb/core/blobstorage/ut_blobstorage/ut_read_only_pdisk/ut_blobstorage-ut_read_only_pdisk |80.2%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/blobstorage/ut_blobstorage/ut_read_only_pdisk/ut_blobstorage-ut_read_only_pdisk >> TBsVDiskRangeHuge::Simple3PutRangeGetNothingBackwardCompaction [GOOD] >> TBsVDiskRangeHuge::Simple3PutRangeGetMiddleForwardFresh >> TBsVDiskExtremeHuge::Simple3Put3GetCompaction [GOOD] >> TBsVDiskExtremeHuge::Simple3Put1SeqSubsOkFresh >> TBsVDiskExtremeHandoffHuge::SimpleHnd2Put1GetFresh [GOOD] >> TBsVDiskExtremeHuge::Simple3Put1SeqGetAllFresh >> TChainLayoutBuilder::TestProdConf [GOOD] >> TChainLayoutBuilder::TestMilestoneId [GOOD] >> TopTest::Test2 [GOOD] |80.2%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/blobstorage/ut_blobstorage/ut_huge/ydb-core-blobstorage-ut_blobstorage-ut_huge |80.2%| [LD] {RESULT} $(B)/ydb/core/blobstorage/ut_blobstorage/ut_huge/ydb-core-blobstorage-ut_blobstorage-ut_huge |80.2%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/blobstorage/ut_blobstorage/ut_huge/ydb-core-blobstorage-ut_blobstorage-ut_huge >> TopTest::Test1 [GOOD] >> TBlobStorageHullHugeHeap::WriteRestore [GOOD] >> TBlobStorageHullHugeKeeperPersState::SerializeParse [GOOD] >> TBlobStorageHullHugeChain::AllocFreeAllocTest [GOOD] >> TBlobStorageHullHugeChain::AllocFreeRestartAllocTest [GOOD] >> TBlobStorageHullHugeChain::HeapAllocSmall [GOOD] >> TBlobStorageHullHugeHeap::AllocateAllFromOneChunk [GOOD] |80.2%| [TA] $(B)/ydb/core/blobstorage/vdisk/hulldb/cache_block/ut/test-results/unittest/{meta.json ... results_accumulator.log} >> THugeHeapCtxTests::Basic [GOOD] >> TBsVDiskRange::Simple3PutRangeGetNothingBackwardFresh [GOOD] >> TBsVDiskRange::Simple3PutRangeGetNothingBackwardCompaction >> TBsVDiskDefrag::DefragEmptyDB [GOOD] >> TBsVDiskDefrag::Defrag50PercentGarbage |80.2%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/vdisk/huge/ut/unittest >> TopTest::Test2 [GOOD] |80.2%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/vdisk/repl/ut/unittest >> TBlobStorageHullHugeHeap::AllocateAllReleaseAll [GOOD] >> TBlobStorageHullHugeHeap::AllocateAllSerializeDeserializeReleaseAll [GOOD] |80.2%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/vdisk/huge/ut/unittest >> TChainLayoutBuilder::TestMilestoneId [GOOD] |80.2%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/vdisk/huge/ut/unittest >> TBlobStorageHullHugeKeeperPersState::SerializeParse [GOOD] |80.2%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/vdisk/repl/ut/unittest |80.2%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/vdisk/repl/ut/unittest |80.2%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/vdisk/repl/ut/unittest >> TBlobStorageHullHugeHeap::RecoveryMode [GOOD] >> TBlobStorageHullHugeHeap::BorderValues [GOOD] |80.2%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/vdisk/huge/ut/unittest >> THugeHeapCtxTests::Basic [GOOD] |80.2%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/vdisk/repl/ut/unittest >> HullReplWriteSst::Basic >> TBlobStorageReplRecoveryMachine::BasicFunctionality |80.2%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/vdisk/repl/ut/unittest |80.2%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/vdisk/repl/ut/unittest |80.2%| [TA] {RESULT} $(B)/ydb/core/blobstorage/vdisk/hulldb/cache_block/ut/test-results/unittest/{meta.json ... results_accumulator.log} |80.2%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/vdisk/huge/ut/unittest >> TBlobStorageHullHugeHeap::AllocateAllFromOneChunk [GOOD] |80.2%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/ut_vdisk2/unittest |80.2%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/vdisk/huge/ut/unittest >> TopTest::Test1 [GOOD] >> TBsVDiskManyPutGet::ManyPutRangeGetFreshIndexOnly [GOOD] >> TBsVDiskManyPutGet::ManyPutRangeGetCompactionIndexOnly >> TBlobStorageHullHugeChain::HeapAllocLargeNonStandard [GOOD] >> TBlobStorageHullHugeChain::HeapAllocLargeStandard [GOOD] >> TBlobStorageReplRecoveryMachine::BasicFunctionality [GOOD] |80.2%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/vdisk/repl/ut/unittest |80.2%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/vdisk/huge/ut/unittest >> TBlobStorageHullHugeHeap::AllocateAllSerializeDeserializeReleaseAll [GOOD] |80.2%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/vdisk/huge/ut/unittest >> TBlobStorageHullHugeHeap::BorderValues [GOOD] |80.3%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/vdisk/huge/ut/unittest >> TBlobStorageHullHugeChain::AllocFreeRestartAllocTest [GOOD] >> THugeMigration::ExtendMap_HugeBlobs [GOOD] >> THugeMigration::ExtendMap_SmallBlobsBecameHuge >> TBsVDiskExtreme::Simple3Put1SeqSubsOkFresh [GOOD] >> TBsVDiskExtreme::Simple3Put1SeqSubsOkCompaction |80.3%| [LD] {BAZEL_UPLOAD} $(B)/ydb/core/blobstorage/vdisk/huge/ut/ydb-core-blobstorage-vdisk-huge-ut |80.3%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/vdisk/repl/ut/unittest >> TBlobStorageReplRecoveryMachine::BasicFunctionality [GOOD] |80.3%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/ut_vdisk2/unittest >> TBsVDiskRangeHuge::Simple3PutRangeGetAllBackwardFresh [GOOD] >> TBsVDiskRangeHuge::Simple3PutRangeGetAllBackwardCompaction |80.3%| [LD] {BAZEL_UPLOAD} $(B)/ydb/core/blobstorage/vdisk/repl/ut/ydb-core-blobstorage-vdisk-repl-ut >> TBsVDiskRangeHuge::Simple3PutRangeGetMiddleForwardFresh [GOOD] >> TBsVDiskRangeHuge::Simple3PutRangeGetMiddleForwardCompaction >> TCowBTreeTest::MultipleSnapshotsWithClearWithGc [GOOD] >> TCowBTreeTest::DuplicateKeysInplace |80.3%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/vdisk/huge/ut/unittest >> TBlobStorageHullHugeChain::HeapAllocLargeStandard [GOOD] >> TBsVDiskExtremeHuge::Simple3Put1SeqSubsOkFresh [GOOD] >> TBsVDiskExtremeHuge::Simple3Put1SeqSubsOkCompaction >> TBsVDiskExtreme::Simple3Put1SeqGet2Fresh [GOOD] >> TBsVDiskExtreme::Simple3Put1SeqGet2Compaction >> TBsVDiskRepl1::ReplProxyData [GOOD] >> TBsVDiskRepl1::ReplEraseDiskRestore |80.3%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/blobstorage/ut_blobstorage/ut_restart_pdisk/blobstorage-ut_blobstorage-ut_restart_pdisk |80.3%| [LD] {RESULT} $(B)/ydb/core/blobstorage/ut_blobstorage/ut_restart_pdisk/blobstorage-ut_blobstorage-ut_restart_pdisk >> TBsVDiskExtremeHuge::Simple3Put1SeqGetAllFresh [GOOD] >> TBsVDiskExtremeHuge::Simple3Put1SeqGetAllCompaction |80.3%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/blobstorage/ut_blobstorage/ut_restart_pdisk/blobstorage-ut_blobstorage-ut_restart_pdisk |80.3%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/ut_blobstorage/ut_huge/unittest |80.3%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/ut_vdisk2/unittest >> TBsVDiskExtremeHandoff::SimpleHnd6Put1SeqGetCompaction [GOOD] >> TBsVDiskExtremeHandoff::SimpleHnd2Put1GetFresh |80.3%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/ut_blobstorage/ut_huge/unittest |80.3%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/ut_blobstorage/ut_huge/unittest |80.3%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/mind/bscontroller/ut_selfheal/ydb-core-mind-bscontroller-ut_selfheal |80.3%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/mind/bscontroller/ut_selfheal/ydb-core-mind-bscontroller-ut_selfheal |80.3%| [LD] {RESULT} $(B)/ydb/core/mind/bscontroller/ut_selfheal/ydb-core-mind-bscontroller-ut_selfheal >> HugeBlobOnlineSizeChange::Compaction |80.3%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/ut_blobstorage/ut_huge/unittest |80.3%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/kqp/ut/indexes/kqp_indexes_ut.cpp >> TBsVDiskManyPutGet::ManyPutGet [GOOD] >> TBsVDiskRange::Simple3PutRangeGetNothingBackwardCompaction [GOOD] >> TBsVDiskRange::Simple3PutRangeGetMiddleForwardFresh >> TBsVDiskManyPutGet::ManyMultiSinglePutGet |80.3%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/ut_vdisk2/unittest >> TBsVDiskGC::GCPutBarrierVDisk0NoSync [GOOD] >> TBsVDiskGC::GCPutBarrierSync |80.3%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/ut_blobstorage/ut_read_only_pdisk/unittest |80.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/ut/indexes/kqp_indexes_ut.cpp |80.3%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/ut_blobstorage/ut_huge/unittest >> TCowBTreeTest::DuplicateKeysInplace [GOOD] >> TCowBTreeTest::DuplicateKeysThreadSafe >> TSubgroupPartLayoutTest::CountEffectiveReplicas3of4 [GOOD] >> TSubgroupPartLayoutTest::CountEffectiveReplicas4of4 >> BSCReadOnlyPDisk::RestartAndReadOnlyConsecutive |80.3%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/blobstorage/ut_blobstorage/ydb-core-blobstorage-ut_blobstorage |80.3%| [TA] $(B)/ydb/core/blobstorage/vdisk/huge/ut/test-results/unittest/{meta.json ... results_accumulator.log} |80.3%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/ut_blobstorage/ut_read_only_pdisk/unittest |80.3%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/ut_blobstorage/ut_huge/unittest |80.3%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/blobstorage/ut_blobstorage/ydb-core-blobstorage-ut_blobstorage |80.3%| [LD] {RESULT} $(B)/ydb/core/blobstorage/ut_blobstorage/ydb-core-blobstorage-ut_blobstorage >> BSCReadOnlyPDisk::ReadOnlyNotAllowed >> BSCReadOnlyPDisk::SetGoodDiskInBrokenGroupReadOnlyNotAllowed |80.3%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/ut_blobstorage/ut_huge/unittest |80.3%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/ut_blobstorage/ut_huge/unittest |80.3%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/blobstorage/ut_blobstorage/ut_check_integrity/ut_blobstorage-ut_check_integrity >> BSCReadOnlyPDisk::SetGoodDiskInBrokenGroupReadOnlyNotAllowed [GOOD] |80.3%| [LD] {RESULT} $(B)/ydb/core/blobstorage/ut_blobstorage/ut_check_integrity/ut_blobstorage-ut_check_integrity |80.3%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/blobstorage/ut_blobstorage/ut_check_integrity/ut_blobstorage-ut_check_integrity |80.3%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/ut_blobstorage/ut_read_only_pdisk/unittest >> BSCReadOnlyPDisk::ReadOnlySlay >> THugeMigration::ExtendMap_SmallBlobsBecameHuge [GOOD] >> THugeMigration::RollbackMap_HugeBlobs >> TBsVDiskExtremeHandoff::SimpleHnd2Put1GetFresh [GOOD] >> TBsVDiskExtremeHandoff::SimpleHnd2Put1GetCompaction >> TBsVDiskRange::Simple3PutRangeGetMiddleForwardCompaction [GOOD] >> TBsVDiskRange::Simple3PutRangeGetMiddleBackwardFresh |80.3%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/ut_blobstorage/ut_huge/unittest |80.3%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/ut_blobstorage/ut_read_only_pdisk/unittest >> TBsVDiskRangeHuge::Simple3PutRangeGetAllBackwardCompaction [GOOD] >> TBsVDiskRangeHuge::Simple3PutRangeGetMiddleBackwardCompaction >> TBsVDiskExtremeHuge::Simple3Put1SeqGetAllCompaction [GOOD] >> TBsVDiskExtremeHuge::Simple3Put1SeqGet2Fresh >> BSCReadOnlyPDisk::SetBrokenDiskInBrokenGroupReadOnly >> TBsVDiskRange::Simple3PutRangeGetMiddleForwardFresh [GOOD] >> TBsVDiskExtreme::Simple3Put1SeqSubsOkCompaction [GOOD] >> TBsVDiskExtreme::Simple3Put1SeqSubsErrorFresh >> TBsVDiskRangeHuge::Simple3PutRangeGetMiddleForwardCompaction [GOOD] >> TBsVDiskRangeHuge::Simple3PutRangeGetMiddleBackwardFresh >> TBsVDiskExtremeHuge::Simple3Put1SeqSubsOkCompaction [GOOD] >> TBsVDiskExtremeHuge::Simple3Put1SeqSubsErrorFresh |80.3%| [TA] {RESULT} $(B)/ydb/core/blobstorage/vdisk/huge/ut/test-results/unittest/{meta.json ... results_accumulator.log} >> BSCReadOnlyPDisk::SetBrokenDiskInBrokenGroupReadOnly [GOOD] >> TCowBTreeTest::DuplicateKeysThreadSafe [GOOD] >> TCowBTreeTest::IteratorDestructor |80.3%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/blobstorage/ut_blobstorage/ut_stop_pdisk/ydb-core-blobstorage-ut_blobstorage-ut_stop_pdisk |80.3%| [LD] {RESULT} $(B)/ydb/core/blobstorage/ut_blobstorage/ut_stop_pdisk/ydb-core-blobstorage-ut_blobstorage-ut_stop_pdisk |80.3%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/library/yql/tools/dqrun/dqrun >> TCowBTreeTest::IteratorDestructor [GOOD] >> TCowBTreeTest::Concurrent |80.3%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/blobstorage/ut_blobstorage/ut_move_pdisk/ydb-core-blobstorage-ut_blobstorage-ut_move_pdisk >> BSCRestartPDisk::RestartNotAllowed |80.3%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/blobstorage/ut_blobstorage/ut_stop_pdisk/ydb-core-blobstorage-ut_blobstorage-ut_stop_pdisk |80.3%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/library/yql/tools/dqrun/dqrun |80.3%| [LD] {RESULT} $(B)/ydb/core/blobstorage/ut_blobstorage/ut_move_pdisk/ydb-core-blobstorage-ut_blobstorage-ut_move_pdisk |80.3%| [LD] {RESULT} $(B)/ydb/library/yql/tools/dqrun/dqrun |80.3%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/blobstorage/ut_blobstorage/ut_move_pdisk/ydb-core-blobstorage-ut_blobstorage-ut_move_pdisk |80.3%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/ut_vdisk2/unittest >> BSCRestartPDisk::RestartOneByOneWithReconnects >> TBsVDiskExtreme::Simple3Put1SeqGet2Compaction [GOOD] >> TBsVDiskExtreme::Simple3Put1GetMissingPartFresh |80.3%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/ut_blobstorage/ut_restart_pdisk/unittest ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/ut_blobstorage/ut_read_only_pdisk/unittest >> BSCReadOnlyPDisk::SetGoodDiskInBrokenGroupReadOnlyNotAllowed [GOOD] Test command err: RandomSeed# 7632196560921465390 2025-09-25T16:15:37.704679Z 1 00h00m30.010512s :BS_VDISK_OTHER ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) CheckPDiskResponse: Recoverable error from PDisk: Status# 'CORRUPTED' StatusFlags# 'None' ErrorReason# 'Some error reason' 2025-09-25T16:15:37.704701Z 2 00h00m30.010512s :BS_VDISK_OTHER ERROR: PDiskId# 1000 VDISK[82000000:_:0:1:0]: (2181038080) CheckPDiskResponse: Recoverable error from PDisk: Status# 'CORRUPTED' StatusFlags# 'None' ErrorReason# 'Some error reason' 2025-09-25T16:15:37.704708Z 3 00h00m30.010512s :BS_VDISK_OTHER ERROR: PDiskId# 1000 VDISK[82000000:_:0:2:0]: (2181038080) CheckPDiskResponse: Recoverable error from PDisk: Status# 'CORRUPTED' StatusFlags# 'None' ErrorReason# 'Some error reason' 2025-09-25T16:15:37.704714Z 4 00h00m30.010512s :BS_VDISK_OTHER ERROR: PDiskId# 1000 VDISK[82000000:_:0:3:0]: (2181038080) CheckPDiskResponse: Recoverable error from PDisk: Status# 'CORRUPTED' StatusFlags# 'None' ErrorReason# 'Some error reason' 2025-09-25T16:15:37.704720Z 5 00h00m30.010512s :BS_VDISK_OTHER ERROR: PDiskId# 1000 VDISK[82000000:_:0:4:0]: (2181038080) CheckPDiskResponse: Recoverable error from PDisk: Status# 'CORRUPTED' StatusFlags# 'None' ErrorReason# 'Some error reason' 2025-09-25T16:15:37.704726Z 6 00h00m30.010512s :BS_VDISK_OTHER ERROR: PDiskId# 1000 VDISK[82000000:_:0:5:0]: (2181038080) CheckPDiskResponse: Recoverable error from PDisk: Status# 'CORRUPTED' StatusFlags# 'None' ErrorReason# 'Some error reason' 2025-09-25T16:15:37.704732Z 7 00h00m30.010512s :BS_VDISK_OTHER ERROR: PDiskId# 1000 VDISK[82000000:_:0:6:0]: (2181038080) CheckPDiskResponse: Recoverable error from PDisk: Status# 'CORRUPTED' StatusFlags# 'None' ErrorReason# 'Some error reason' 2025-09-25T16:15:37.704983Z 1 00h00m30.010512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) SkeletonFront: got TEvPDiskErrorStateChange;State# NoWrites, PDiskError# Some error reason Marker# BSVSF03 2025-09-25T16:15:37.705002Z 2 00h00m30.010512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:1:0]: (2181038080) SkeletonFront: got TEvPDiskErrorStateChange;State# NoWrites, PDiskError# Some error reason Marker# BSVSF03 2025-09-25T16:15:37.705014Z 3 00h00m30.010512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:2:0]: (2181038080) SkeletonFront: got TEvPDiskErrorStateChange;State# NoWrites, PDiskError# Some error reason Marker# BSVSF03 2025-09-25T16:15:37.705026Z 4 00h00m30.010512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:3:0]: (2181038080) SkeletonFront: got TEvPDiskErrorStateChange;State# NoWrites, PDiskError# Some error reason Marker# BSVSF03 2025-09-25T16:15:37.705037Z 5 00h00m30.010512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:4:0]: (2181038080) SkeletonFront: got TEvPDiskErrorStateChange;State# NoWrites, PDiskError# Some error reason Marker# BSVSF03 2025-09-25T16:15:37.705049Z 6 00h00m30.010512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:5:0]: (2181038080) SkeletonFront: got TEvPDiskErrorStateChange;State# NoWrites, PDiskError# Some error reason Marker# BSVSF03 2025-09-25T16:15:37.705062Z 7 00h00m30.010512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:6:0]: (2181038080) SkeletonFront: got TEvPDiskErrorStateChange;State# NoWrites, PDiskError# Some error reason Marker# BSVSF03 2025-09-25T16:15:37.705075Z 1 00h00m30.010512s :BS_VDISK_OTHER ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) CheckPDiskResponse: Recoverable error from PDisk: Status# 'CORRUPTED' StatusFlags# 'None' ErrorReason# 'Some error reason' 2025-09-25T16:15:37.705081Z 6 00h00m30.010512s :BS_VDISK_OTHER ERROR: PDiskId# 1000 VDISK[82000000:_:0:5:0]: (2181038080) CheckPDiskResponse: Recoverable error from PDisk: Status# 'CORRUPTED' StatusFlags# 'None' ErrorReason# 'Some error reason' 2025-09-25T16:15:37.705087Z 7 00h00m30.010512s :BS_VDISK_OTHER ERROR: PDiskId# 1000 VDISK[82000000:_:0:6:0]: (2181038080) CheckPDiskResponse: Recoverable error from PDisk: Status# 'CORRUPTED' StatusFlags# 'None' ErrorReason# 'Some error reason' 2025-09-25T16:15:37.705097Z 2 00h00m30.010512s :BS_VDISK_OTHER ERROR: PDiskId# 1000 VDISK[82000000:_:0:1:0]: (2181038080) CheckPDiskResponse: Recoverable error from PDisk: Status# 'CORRUPTED' StatusFlags# 'None' ErrorReason# 'Some error reason' 2025-09-25T16:15:37.705105Z 3 00h00m30.010512s :BS_VDISK_OTHER ERROR: PDiskId# 1000 VDISK[82000000:_:0:2:0]: (2181038080) CheckPDiskResponse: Recoverable error from PDisk: Status# 'CORRUPTED' StatusFlags# 'None' ErrorReason# 'Some error reason' 2025-09-25T16:15:37.705110Z 4 00h00m30.010512s :BS_VDISK_OTHER ERROR: PDiskId# 1000 VDISK[82000000:_:0:3:0]: (2181038080) CheckPDiskResponse: Recoverable error from PDisk: Status# 'CORRUPTED' StatusFlags# 'None' ErrorReason# 'Some error reason' 2025-09-25T16:15:37.705116Z 5 00h00m30.010512s :BS_VDISK_OTHER ERROR: PDiskId# 1000 VDISK[82000000:_:0:4:0]: (2181038080) CheckPDiskResponse: Recoverable error from PDisk: Status# 'CORRUPTED' StatusFlags# 'None' ErrorReason# 'Some error reason' 2025-09-25T16:15:37.705479Z 1 00h00m30.010512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) SkeletonFront: got TEvPDiskErrorStateChange;State# NoWrites, PDiskError# Some error reason Marker# BSVSF03 2025-09-25T16:15:37.705487Z 6 00h00m30.010512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:5:0]: (2181038080) SkeletonFront: got TEvPDiskErrorStateChange;State# NoWrites, PDiskError# Some error reason Marker# BSVSF03 2025-09-25T16:15:37.705494Z 7 00h00m30.010512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:6:0]: (2181038080) SkeletonFront: got TEvPDiskErrorStateChange;State# NoWrites, PDiskError# Some error reason Marker# BSVSF03 2025-09-25T16:15:37.705503Z 2 00h00m30.010512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:1:0]: (2181038080) SkeletonFront: got TEvPDiskErrorStateChange;State# NoWrites, PDiskError# Some error reason Marker# BSVSF03 2025-09-25T16:15:37.705510Z 3 00h00m30.010512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:2:0]: (2181038080) SkeletonFront: got TEvPDiskErrorStateChange;State# NoWrites, PDiskError# Some error reason Marker# BSVSF03 2025-09-25T16:15:37.705517Z 4 00h00m30.010512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:3:0]: (2181038080) SkeletonFront: got TEvPDiskErrorStateChange;State# NoWrites, PDiskError# Some error reason Marker# BSVSF03 2025-09-25T16:15:37.705523Z 5 00h00m30.010512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:4:0]: (2181038080) SkeletonFront: got TEvPDiskErrorStateChange;State# NoWrites, PDiskError# Some error reason Marker# BSVSF03 |80.3%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/ut_vdisk/unittest >> TBsVDiskRange::Simple3PutRangeGetMiddleForwardFresh [GOOD] >> BsControllerTest::DecommitRejected |80.3%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/ut_blobstorage/ut_restart_pdisk/unittest >> BsControllerTest::DecommitRejected [GOOD] >> TBtreeIndexTPartLarge::SmallKeys1GB [GOOD] >> TBtreeIndexTPartLarge::MiddleKeys1GB ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/ut_blobstorage/ut_read_only_pdisk/unittest >> BSCReadOnlyPDisk::SetBrokenDiskInBrokenGroupReadOnly [GOOD] Test command err: RandomSeed# 12221174042425748540 2025-09-25T16:15:38.325760Z 1 00h00m30.010512s :BS_VDISK_OTHER ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) CheckPDiskResponse: Recoverable error from PDisk: Status# 'CORRUPTED' StatusFlags# 'None' ErrorReason# 'Some error reason' 2025-09-25T16:15:38.325805Z 2 00h00m30.010512s :BS_VDISK_OTHER ERROR: PDiskId# 1000 VDISK[82000000:_:0:1:0]: (2181038080) CheckPDiskResponse: Recoverable error from PDisk: Status# 'CORRUPTED' StatusFlags# 'None' ErrorReason# 'Some error reason' 2025-09-25T16:15:38.325828Z 3 00h00m30.010512s :BS_VDISK_OTHER ERROR: PDiskId# 1000 VDISK[82000000:_:0:2:0]: (2181038080) CheckPDiskResponse: Recoverable error from PDisk: Status# 'CORRUPTED' StatusFlags# 'None' ErrorReason# 'Some error reason' 2025-09-25T16:15:38.325837Z 4 00h00m30.010512s :BS_VDISK_OTHER ERROR: PDiskId# 1000 VDISK[82000000:_:0:3:0]: (2181038080) CheckPDiskResponse: Recoverable error from PDisk: Status# 'CORRUPTED' StatusFlags# 'None' ErrorReason# 'Some error reason' 2025-09-25T16:15:38.325845Z 5 00h00m30.010512s :BS_VDISK_OTHER ERROR: PDiskId# 1000 VDISK[82000000:_:0:4:0]: (2181038080) CheckPDiskResponse: Recoverable error from PDisk: Status# 'CORRUPTED' StatusFlags# 'None' ErrorReason# 'Some error reason' 2025-09-25T16:15:38.325853Z 6 00h00m30.010512s :BS_VDISK_OTHER ERROR: PDiskId# 1000 VDISK[82000000:_:0:5:0]: (2181038080) CheckPDiskResponse: Recoverable error from PDisk: Status# 'CORRUPTED' StatusFlags# 'None' ErrorReason# 'Some error reason' 2025-09-25T16:15:38.325862Z 7 00h00m30.010512s :BS_VDISK_OTHER ERROR: PDiskId# 1000 VDISK[82000000:_:0:6:0]: (2181038080) CheckPDiskResponse: Recoverable error from PDisk: Status# 'CORRUPTED' StatusFlags# 'None' ErrorReason# 'Some error reason' 2025-09-25T16:15:38.325869Z 8 00h00m30.010512s :BS_VDISK_OTHER ERROR: PDiskId# 1000 VDISK[82000000:_:0:7:0]: (2181038080) CheckPDiskResponse: Recoverable error from PDisk: Status# 'CORRUPTED' StatusFlags# 'None' ErrorReason# 'Some error reason' 2025-09-25T16:15:38.326195Z 1 00h00m30.010512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) SkeletonFront: got TEvPDiskErrorStateChange;State# NoWrites, PDiskError# Some error reason Marker# BSVSF03 2025-09-25T16:15:38.326217Z 2 00h00m30.010512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:1:0]: (2181038080) SkeletonFront: got TEvPDiskErrorStateChange;State# NoWrites, PDiskError# Some error reason Marker# BSVSF03 2025-09-25T16:15:38.326230Z 3 00h00m30.010512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:2:0]: (2181038080) SkeletonFront: got TEvPDiskErrorStateChange;State# NoWrites, PDiskError# Some error reason Marker# BSVSF03 2025-09-25T16:15:38.326245Z 4 00h00m30.010512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:3:0]: (2181038080) SkeletonFront: got TEvPDiskErrorStateChange;State# NoWrites, PDiskError# Some error reason Marker# BSVSF03 2025-09-25T16:15:38.326257Z 5 00h00m30.010512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:4:0]: (2181038080) SkeletonFront: got TEvPDiskErrorStateChange;State# NoWrites, PDiskError# Some error reason Marker# BSVSF03 2025-09-25T16:15:38.326271Z 6 00h00m30.010512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:5:0]: (2181038080) SkeletonFront: got TEvPDiskErrorStateChange;State# NoWrites, PDiskError# Some error reason Marker# BSVSF03 2025-09-25T16:15:38.326284Z 7 00h00m30.010512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:6:0]: (2181038080) SkeletonFront: got TEvPDiskErrorStateChange;State# NoWrites, PDiskError# Some error reason Marker# BSVSF03 2025-09-25T16:15:38.326298Z 8 00h00m30.010512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:7:0]: (2181038080) SkeletonFront: got TEvPDiskErrorStateChange;State# NoWrites, PDiskError# Some error reason Marker# BSVSF03 2025-09-25T16:15:38.326312Z 1 00h00m30.010512s :BS_VDISK_OTHER ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) CheckPDiskResponse: Recoverable error from PDisk: Status# 'CORRUPTED' StatusFlags# 'None' ErrorReason# 'Some error reason' 2025-09-25T16:15:38.326321Z 6 00h00m30.010512s :BS_VDISK_OTHER ERROR: PDiskId# 1000 VDISK[82000000:_:0:5:0]: (2181038080) CheckPDiskResponse: Recoverable error from PDisk: Status# 'CORRUPTED' StatusFlags# 'None' ErrorReason# 'Some error reason' 2025-09-25T16:15:38.326330Z 7 00h00m30.010512s :BS_VDISK_OTHER ERROR: PDiskId# 1000 VDISK[82000000:_:0:6:0]: (2181038080) CheckPDiskResponse: Recoverable error from PDisk: Status# 'CORRUPTED' StatusFlags# 'None' ErrorReason# 'Some error reason' 2025-09-25T16:15:38.326336Z 8 00h00m30.010512s :BS_VDISK_OTHER ERROR: PDiskId# 1000 VDISK[82000000:_:0:7:0]: (2181038080) CheckPDiskResponse: Recoverable error from PDisk: Status# 'CORRUPTED' StatusFlags# 'None' ErrorReason# 'Some error reason' 2025-09-25T16:15:38.326342Z 2 00h00m30.010512s :BS_VDISK_OTHER ERROR: PDiskId# 1000 VDISK[82000000:_:0:1:0]: (2181038080) CheckPDiskResponse: Recoverable error from PDisk: Status# 'CORRUPTED' StatusFlags# 'None' ErrorReason# 'Some error reason' 2025-09-25T16:15:38.326349Z 3 00h00m30.010512s :BS_VDISK_OTHER ERROR: PDiskId# 1000 VDISK[82000000:_:0:2:0]: (2181038080) CheckPDiskResponse: Recoverable error from PDisk: Status# 'CORRUPTED' StatusFlags# 'None' ErrorReason# 'Some error reason' 2025-09-25T16:15:38.326355Z 4 00h00m30.010512s :BS_VDISK_OTHER ERROR: PDiskId# 1000 VDISK[82000000:_:0:3:0]: (2181038080) CheckPDiskResponse: Recoverable error from PDisk: Status# 'CORRUPTED' StatusFlags# 'None' ErrorReason# 'Some error reason' 2025-09-25T16:15:38.326362Z 5 00h00m30.010512s :BS_VDISK_OTHER ERROR: PDiskId# 1000 VDISK[82000000:_:0:4:0]: (2181038080) CheckPDiskResponse: Recoverable error from PDisk: Status# 'CORRUPTED' StatusFlags# 'None' ErrorReason# 'Some error reason' 2025-09-25T16:15:38.327173Z 1 00h00m30.010512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) SkeletonFront: got TEvPDiskErrorStateChange;State# NoWrites, PDiskError# Some error reason Marker# BSVSF03 2025-09-25T16:15:38.327188Z 6 00h00m30.010512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:5:0]: (2181038080) SkeletonFront: got TEvPDiskErrorStateChange;State# NoWrites, PDiskError# Some error reason Marker# BSVSF03 2025-09-25T16:15:38.327198Z 7 00h00m30.010512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:6:0]: (2181038080) SkeletonFront: got TEvPDiskErrorStateChange;State# NoWrites, PDiskError# Some error reason Marker# BSVSF03 2025-09-25T16:15:38.327207Z 8 00h00m30.010512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:7:0]: (2181038080) SkeletonFront: got TEvPDiskErrorStateChange;State# NoWrites, PDiskError# Some error reason Marker# BSVSF03 2025-09-25T16:15:38.327216Z 2 00h00m30.010512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:1:0]: (2181038080) SkeletonFront: got TEvPDiskErrorStateChange;State# NoWrites, PDiskError# Some error reason Marker# BSVSF03 2025-09-25T16:15:38.327225Z 3 00h00m30.010512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:2:0]: (2181038080) SkeletonFront: got TEvPDiskErrorStateChange;State# NoWrites, PDiskError# Some error reason Marker# BSVSF03 2025-09-25T16:15:38.327234Z 4 00h00m30.010512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:3:0]: (2181038080) SkeletonFront: got TEvPDiskErrorStateChange;State# NoWrites, PDiskError# Some error reason Marker# BSVSF03 2025-09-25T16:15:38.327243Z 5 00h00m30.010512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:4:0]: (2181038080) SkeletonFront: got TEvPDiskErrorStateChange;State# NoWrites, PDiskError# Some error reason Marker# BSVSF03 2025-09-25T16:15:38.354464Z 1 00h01m30.011024s :BS_SYNCER ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) TVDiskGuidProxyBase::Handle(TEvVSyncGuidResult): NOT OK: msg# {TEvVSyncGuidResult: Status# VDISK_ERROR_STATE VDiskId# [82000000:1:0:1:0]} 2025-09-25T16:15:38.354486Z 1 00h01m30.011024s :BS_SYNCER ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) TVDiskGuidProxyBase::Handle(TEvVSyncGuidResult): NOT OK: msg# {TEvVSyncGuidResult: Status# VDISK_ERROR_STATE VDiskId# [82000000:1:0:2:0]} 2025-09-25T16:15:38.354493Z 1 00h01m30.011024s :BS_SYNCER ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) TVDiskGuidProxyBase::Handle(TEvVSyncGuidResult): NOT OK: msg# {TEvVSyncGuidResult: Status# VDISK_ERROR_STATE VDiskId# [82000000:1:0:3:0]} 2025-09-25T16:15:38.354502Z 1 00h01m30.011024s :BS_SYNCER ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) TVDiskGuidProxyBase::Handle(TEvVSyncGuidResult): NOT OK: msg# {TEvVSyncGuidResult: Status# VDISK_ERROR_STATE VDiskId# [82000000:1:0:4:0]} 2025-09-25T16:15:38.354509Z 1 00h01m30.011024s :BS_SYNCER ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) TVDiskGuidProxyBase::Handle(TEvVSyncGuidResult): NOT OK: msg# {TEvVSyncGuidResult: Status# VDISK_ERROR_STATE VDiskId# [82000000:1:0:5:0]} 2025-09-25T16:15:38.354515Z 1 00h01m30.011024s :BS_SYNCER ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) TVDiskGuidProxyBase::Handle(TEvVSyncGuidResult): NOT OK: msg# {TEvVSyncGuidResult: Status# VDISK_ERROR_STATE VDiskId# [82000000:1:0:6:0]} 2025-09-25T16:15:38.354521Z 1 00h01m30.011024s :BS_SYNCER ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) TVDiskGuidProxyBase::Handle(TEvVSyncGuidResult): NOT OK: msg# {TEvVSyncGuidResult: Status# VDISK_ERROR_STATE VDiskId# [82000000:1:0:7:0]} >> SelfHealActorTest::NoMoreThanOneReplicating >> TBsVDiskRange::Simple3PutRangeGetMiddleBackwardFresh [GOOD] >> TBsVDiskRange::Simple3PutRangeGetMiddleBackwardCompaction >> BSCReadOnlyPDisk::ReadOnlyNotAllowed [GOOD] >> TBsVDiskExtreme::Simple3Put1SeqSubsErrorFresh [GOOD] >> TBsVDiskExtreme::Simple3Put1SeqSubsErrorCompaction >> SelfHealActorTest::NoMoreThanOneReplicating [GOOD] >> Acceleration::TestAccelerationMirror3dcPutAsyncBlob1Slow >> CostMetricsPutMirror3dc::TestPutMirror3dcRequests1Inflight1BlobSize1000 >> TBsVDiskRangeHuge::Simple3PutRangeGetMiddleBackwardFresh [GOOD] >> BSCReadOnlyPDisk::RestartAndReadOnlyConsecutive [GOOD] >> BsControllerTest::TestLocalBrokenRelocation >> TBsVDiskExtremeHuge::Simple3Put1SeqGet2Fresh [GOOD] >> TBsVDiskExtremeHuge::Simple3Put1SeqGet2Compaction |80.4%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/mind/bscontroller/ut_selfheal/unittest ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/ut_blobstorage/ut_read_only_pdisk/unittest >> BSCReadOnlyPDisk::ReadOnlyNotAllowed [GOOD] Test command err: RandomSeed# 9270959154103892176 >> BSCRestartPDisk::RestartNotAllowed [GOOD] >> BsControllerTest::TestLocalSelfHeal >> TBsVDiskExtremeHuge::Simple3Put1SeqSubsErrorFresh [GOOD] >> TBsVDiskExtremeHuge::Simple3Put1SeqSubsErrorCompaction >> TBsVDiskExtreme::Simple3Put1GetMissingPartFresh [GOOD] >> TBsVDiskExtreme::Simple3Put1GetMissingPartCompaction >> TBsVDiskRangeHuge::Simple3PutRangeGetMiddleBackwardCompaction [GOOD] |80.4%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/ut_vdisk/unittest >> TBsVDiskRangeHuge::Simple3PutRangeGetMiddleBackwardFresh [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/mind/bscontroller/ut_selfheal/unittest >> BsControllerTest::DecommitRejected [GOOD] Test command err: 2025-09-25T16:15:39.285945Z 1 00h00m00.000000s :BS_NODE DEBUG: [1] Bootstrap 2025-09-25T16:15:39.285967Z 1 00h00m00.000000s :BS_NODE DEBUG: [1] Connect 2025-09-25T16:15:39.286011Z 2 00h00m00.000000s :BS_NODE DEBUG: [2] Bootstrap 2025-09-25T16:15:39.286015Z 2 00h00m00.000000s :BS_NODE DEBUG: [2] Connect 2025-09-25T16:15:39.286021Z 3 00h00m00.000000s :BS_NODE DEBUG: [3] Bootstrap 2025-09-25T16:15:39.286026Z 3 00h00m00.000000s :BS_NODE DEBUG: [3] Connect 2025-09-25T16:15:39.286064Z 4 00h00m00.000000s :BS_NODE DEBUG: [4] Bootstrap 2025-09-25T16:15:39.286069Z 4 00h00m00.000000s :BS_NODE DEBUG: [4] Connect 2025-09-25T16:15:39.286075Z 5 00h00m00.000000s :BS_NODE DEBUG: [5] Bootstrap 2025-09-25T16:15:39.286078Z 5 00h00m00.000000s :BS_NODE DEBUG: [5] Connect 2025-09-25T16:15:39.286084Z 6 00h00m00.000000s :BS_NODE DEBUG: [6] Bootstrap 2025-09-25T16:15:39.286088Z 6 00h00m00.000000s :BS_NODE DEBUG: [6] Connect 2025-09-25T16:15:39.286094Z 7 00h00m00.000000s :BS_NODE DEBUG: [7] Bootstrap 2025-09-25T16:15:39.286099Z 7 00h00m00.000000s :BS_NODE DEBUG: [7] Connect 2025-09-25T16:15:39.286104Z 8 00h00m00.000000s :BS_NODE DEBUG: [8] Bootstrap 2025-09-25T16:15:39.286108Z 8 00h00m00.000000s :BS_NODE DEBUG: [8] Connect 2025-09-25T16:15:39.286114Z 9 00h00m00.000000s :BS_NODE DEBUG: [9] Bootstrap 2025-09-25T16:15:39.286119Z 9 00h00m00.000000s :BS_NODE DEBUG: [9] Connect 2025-09-25T16:15:39.286125Z 10 00h00m00.000000s :BS_NODE DEBUG: [10] Bootstrap 2025-09-25T16:15:39.286129Z 10 00h00m00.000000s :BS_NODE DEBUG: [10] Connect 2025-09-25T16:15:39.286135Z 11 00h00m00.000000s :BS_NODE DEBUG: [11] Bootstrap 2025-09-25T16:15:39.286139Z 11 00h00m00.000000s :BS_NODE DEBUG: [11] Connect 2025-09-25T16:15:39.286145Z 12 00h00m00.000000s :BS_NODE DEBUG: [12] Bootstrap 2025-09-25T16:15:39.286149Z 12 00h00m00.000000s :BS_NODE DEBUG: [12] Connect 2025-09-25T16:15:39.286154Z 13 00h00m00.000000s :BS_NODE DEBUG: [13] Bootstrap 2025-09-25T16:15:39.286158Z 13 00h00m00.000000s :BS_NODE DEBUG: [13] Connect 2025-09-25T16:15:39.286169Z 14 00h00m00.000000s :BS_NODE DEBUG: [14] Bootstrap 2025-09-25T16:15:39.286173Z 14 00h00m00.000000s :BS_NODE DEBUG: [14] Connect 2025-09-25T16:15:39.286179Z 15 00h00m00.000000s :BS_NODE DEBUG: [15] Bootstrap 2025-09-25T16:15:39.286182Z 15 00h00m00.000000s :BS_NODE DEBUG: [15] Connect 2025-09-25T16:15:39.288879Z 1 00h00m00.000000s :BS_NODE DEBUG: [1] ClientConnected Sender# [1:514:38] Status# ERROR ClientId# [1:514:38] ServerId# [0:0:0] PipeClient# [1:514:38] 2025-09-25T16:15:39.289007Z 2 00h00m00.000000s :BS_NODE DEBUG: [2] ClientConnected Sender# [2:515:20] Status# ERROR ClientId# [2:515:20] ServerId# [0:0:0] PipeClient# [2:515:20] 2025-09-25T16:15:39.289015Z 3 00h00m00.000000s :BS_NODE DEBUG: [3] ClientConnected Sender# [3:516:20] Status# ERROR ClientId# [3:516:20] ServerId# [0:0:0] PipeClient# [3:516:20] 2025-09-25T16:15:39.289022Z 4 00h00m00.000000s :BS_NODE DEBUG: [4] ClientConnected Sender# [4:517:20] Status# ERROR ClientId# [4:517:20] ServerId# [0:0:0] PipeClient# [4:517:20] 2025-09-25T16:15:39.289029Z 5 00h00m00.000000s :BS_NODE DEBUG: [5] ClientConnected Sender# [5:518:20] Status# ERROR ClientId# [5:518:20] ServerId# [0:0:0] PipeClient# [5:518:20] 2025-09-25T16:15:39.289037Z 6 00h00m00.000000s :BS_NODE DEBUG: [6] ClientConnected Sender# [6:519:20] Status# ERROR ClientId# [6:519:20] ServerId# [0:0:0] PipeClient# [6:519:20] 2025-09-25T16:15:39.289044Z 7 00h00m00.000000s :BS_NODE DEBUG: [7] ClientConnected Sender# [7:520:20] Status# ERROR ClientId# [7:520:20] ServerId# [0:0:0] PipeClient# [7:520:20] 2025-09-25T16:15:39.289050Z 8 00h00m00.000000s :BS_NODE DEBUG: [8] ClientConnected Sender# [8:521:20] Status# ERROR ClientId# [8:521:20] ServerId# [0:0:0] PipeClient# [8:521:20] 2025-09-25T16:15:39.289057Z 9 00h00m00.000000s :BS_NODE DEBUG: [9] ClientConnected Sender# [9:522:20] Status# ERROR ClientId# [9:522:20] ServerId# [0:0:0] PipeClient# [9:522:20] 2025-09-25T16:15:39.289065Z 10 00h00m00.000000s :BS_NODE DEBUG: [10] ClientConnected Sender# [10:523:20] Status# ERROR ClientId# [10:523:20] ServerId# [0:0:0] PipeClient# [10:523:20] 2025-09-25T16:15:39.289072Z 11 00h00m00.000000s :BS_NODE DEBUG: [11] ClientConnected Sender# [11:524:20] Status# ERROR ClientId# [11:524:20] ServerId# [0:0:0] PipeClient# [11:524:20] 2025-09-25T16:15:39.289081Z 12 00h00m00.000000s :BS_NODE DEBUG: [12] ClientConnected Sender# [12:525:20] Status# ERROR ClientId# [12:525:20] ServerId# [0:0:0] PipeClient# [12:525:20] 2025-09-25T16:15:39.289088Z 13 00h00m00.000000s :BS_NODE DEBUG: [13] ClientConnected Sender# [13:526:20] Status# ERROR ClientId# [13:526:20] ServerId# [0:0:0] PipeClient# [13:526:20] 2025-09-25T16:15:39.289095Z 14 00h00m00.000000s :BS_NODE DEBUG: [14] ClientConnected Sender# [14:527:20] Status# ERROR ClientId# [14:527:20] ServerId# [0:0:0] PipeClient# [14:527:20] 2025-09-25T16:15:39.289102Z 15 00h00m00.000000s :BS_NODE DEBUG: [15] ClientConnected Sender# [15:528:20] Status# ERROR ClientId# [15:528:20] ServerId# [0:0:0] PipeClient# [15:528:20] 2025-09-25T16:15:39.301586Z 1 00h00m00.100000s :BS_NODE DEBUG: [1] Connect 2025-09-25T16:15:39.301609Z 2 00h00m00.100000s :BS_NODE DEBUG: [2] Connect 2025-09-25T16:15:39.301618Z 3 00h00m00.100000s :BS_NODE DEBUG: [3] Connect 2025-09-25T16:15:39.301625Z 4 00h00m00.100000s :BS_NODE DEBUG: [4] Connect 2025-09-25T16:15:39.301634Z 5 00h00m00.100000s :BS_NODE DEBUG: [5] Connect 2025-09-25T16:15:39.301641Z 6 00h00m00.100000s :BS_NODE DEBUG: [6] Connect 2025-09-25T16:15:39.301649Z 7 00h00m00.100000s :BS_NODE DEBUG: [7] Connect 2025-09-25T16:15:39.301657Z 8 00h00m00.100000s :BS_NODE DEBUG: [8] Connect 2025-09-25T16:15:39.301664Z 9 00h00m00.100000s :BS_NODE DEBUG: [9] Connect 2025-09-25T16:15:39.301672Z 10 00h00m00.100000s :BS_NODE DEBUG: [10] Connect 2025-09-25T16:15:39.301679Z 11 00h00m00.100000s :BS_NODE DEBUG: [11] Connect 2025-09-25T16:15:39.301686Z 12 00h00m00.100000s :BS_NODE DEBUG: [12] Connect 2025-09-25T16:15:39.301693Z 13 00h00m00.100000s :BS_NODE DEBUG: [13] Connect 2025-09-25T16:15:39.301700Z 14 00h00m00.100000s :BS_NODE DEBUG: [14] Connect 2025-09-25T16:15:39.301707Z 15 00h00m00.100000s :BS_NODE DEBUG: [15] Connect 2025-09-25T16:15:39.302249Z 1 00h00m00.100000s :BS_NODE DEBUG: [1] ClientConnected Sender# [1:589:66] Status# OK ClientId# [1:589:66] ServerId# [1:618:67] PipeClient# [1:589:66] 2025-09-25T16:15:39.302256Z 1 00h00m00.100000s :BS_NODE DEBUG: [1] State switched from 0 to 1 2025-09-25T16:15:39.303126Z 2 00h00m00.100000s :BS_NODE DEBUG: [2] ClientConnected Sender# [2:590:21] Status# OK ClientId# [2:590:21] ServerId# [1:619:68] PipeClient# [2:590:21] 2025-09-25T16:15:39.303133Z 2 00h00m00.100000s :BS_NODE DEBUG: [2] State switched from 0 to 1 2025-09-25T16:15:39.303139Z 3 00h00m00.100000s :BS_NODE DEBUG: [3] ClientConnected Sender# [3:591:21] Status# OK ClientId# [3:591:21] ServerId# [1:620:69] PipeClient# [3:591:21] 2025-09-25T16:15:39.303143Z 3 00h00m00.100000s :BS_NODE DEBUG: [3] State switched from 0 to 1 2025-09-25T16:15:39.303149Z 4 00h00m00.100000s :BS_NODE DEBUG: [4] ClientConnected Sender# [4:592:21] Status# OK ClientId# [4:592:21] ServerId# [1:621:70] PipeClient# [4:592:21] 2025-09-25T16:15:39.303153Z 4 00h00m00.100000s :BS_NODE DEBUG: [4] State switched from 0 to 1 2025-09-25T16:15:39.303158Z 5 00h00m00.100000s :BS_NODE DEBUG: [5] ClientConnected Sender# [5:593:21] Status# OK ClientId# [5:593:21] ServerId# [1:622:71] PipeClient# [5:593:21] 2025-09-25T16:15:39.303162Z 5 00h00m00.100000s :BS_NODE DEBUG: [5] State switched from 0 to 1 2025-09-25T16:15:39.303167Z 6 00h00m00.100000s :BS_NODE DEBUG: [6] ClientConnected Sender# [6:594:21] Status# OK ClientId# [6:594:21] ServerId# [1:623:72] PipeClient# [6:594:21] 2025-09-25T16:15:39.303171Z 6 00h00m00.100000s :BS_NODE DEBUG: [6] State switched from 0 to 1 2025-09-25T16:15:39.303177Z 7 00h00m00.100000s :BS_NODE DEBUG: [7] ClientConnected Sender# [7:595:21] Status# OK ClientId# [7:595:21] ServerId# [1:624:73] PipeClient# [7:595:21] 2025-09-25T16:15:39.303180Z 7 00h00m00.100000s :BS_NODE DEBUG: [7] State switched from 0 to 1 2025-09-25T16:15:39.303186Z 8 00h00m00.100000s :BS_NODE DEBUG: [8] ClientConnected Sender# [8:596:21] Status# OK ClientId# [8:596:21] ServerId# [1:625:74] PipeClient# [8:596:21] 2025-09-25T16:15:39.303191Z 8 00h00m00.100000s :BS_NODE DEBUG: [8] State switched from 0 to 1 2025-09-25T16:15:39.303196Z 9 00h00m00.100000s :BS_NODE DEBUG: [9] ClientConnected Sender# [9:597:21] Status# OK ClientId# [9:597:21] ServerId# [1:626:75] PipeClient# [9:597:21] 2025-09-25T16:15:39.303200Z 9 00h00m00.100000s :BS_NODE DEBUG: [9] State switched from 0 to 1 2025-09-25T16:15:39.303205Z 10 00h00m00.100000s :BS_NODE DEBUG: [10] ClientConnected Sender# [10:598:21] Status# OK ClientId# [10:598:21] ServerId# [1:627:76] PipeClient# [10:598:21] 2025-09-25T16:15:39.303209Z 10 00h00m00.100000s :BS_NODE DEBUG: [10] State switched from 0 to 1 2025-09-25T16:15:39.303215Z 11 00h00m00.100000s :BS_NODE DEBUG: [11] ClientConnected Sender# [11:599:21] Status# OK ClientId# [11:599:21] ServerId# [1:628:77] PipeClient# [11:599:21] 2025-09-25T16:15:39.303218Z 11 00h00m00.100000s :BS_NODE DEBUG: [11] State switched from 0 to 1 2025-09-25T16:15:39.303223Z 12 00h00m00.100000s :BS_NODE DEBUG: [12] ClientConnected Sender# [12:600:21] Status# OK ClientId# [12:600:21] ServerId# [1:629:78] PipeClient# [12:600:21] 2025-09-25T16:15:39.303227Z 12 00h00m00.100000s :BS_NODE DEBUG: [12] State switched from 0 to 1 2025-09-25T16:15:39.303232Z 13 00h00m00.100000s :BS_NODE DEBUG: [13] ClientConnected Sender# [13:601:21] Status# OK ClientId# [13:601:21] ServerId# [1:630:79] PipeClient# [13:601:21] 2025-09-25T16:15:39.303236Z 13 00h00m00.100000s :BS_NODE DEBUG: [13] State switched from 0 to 1 2025-09-25T16:15:39.303241Z 14 00h00m00.100000s :BS_NODE DEBUG: [14] ClientConnected Sender# [14:602:21] Status# OK ClientId# [14:602:21] ServerId# [1:631:80] PipeClient# [14:602:21] 2025-09-25T16:15:39.303245Z 14 00h00m00.100000s :BS_NODE DEBUG: [14] State switched from 0 to 1 2025-09-25T16:15:39.303250Z 15 00h00m00.100000s :BS_NODE DEBUG: [15] ClientConnected Sender# [15:603:21] Status# OK ClientId# [15:603:21] ServerId# [1:632:81] PipeClient# [15:603:21] 2025-09-25T16:15:39.303254Z 15 00h00m00.100000s :BS_NODE DEBUG: [15] State switched from 0 to 1 2025-09-25T16:15:39.303601Z 1 00h00m00.100512s :BS_NODE DEBUG: [1] NodeServiceSetUpdate 2025-09-25T16:15:39.303612Z 1 00h00m00.100512s :BS_NODE DEBUG: [1] VDiskId# [80000000:1:0:0:0] PDiskId# 1000 VSlotId# 1000 created 2025-09-25T16:15:39.306951Z 1 00h00m00.100512s :BS_NODE DEBUG: [1] VDiskId# [80000000:1:0:0:0] status changed to INIT_PENDING 2025-09-25T16:15:39.307267Z 2 00h00m00.100512s :BS_NODE DEBUG: [2] NodeServiceSetUpdate 2025-09-25T16:15:39.307277Z 2 00h00m00.100512s :BS_NODE DEBUG: [2] VDiskId# [80000000:1:0:1:0] PDiskId# 1000 VSlotId# 1000 created 2025-09-25T16:15:39.307289Z 2 00h00m00.100512s :BS_NODE DEBUG: [2] VDiskId# [80000000:1:0:1:0] status changed to INIT_PENDING 2025-09-25T16:15:39.307303Z 3 00h00m00.100512s :BS_NODE DEBUG: [3] NodeServiceSetUpdate 2025-09-25T16:15:39.307309Z 3 00h00m00.100512s :BS_NODE DEBUG: [3] VDiskId# [80000000:1:0:2:0] PDiskId# 1000 VSlotId# 1000 created 2025-09-25T16:15:39.307315Z 3 00h00m00.100512s :BS_NODE DEBUG: [3] VDiskId# [80000000:1:0:2:0] status changed to INIT_PENDING 2025-09-25T16:15:39.307327Z 4 00h00m00.100512s :BS_NODE DEBUG: [4] NodeServiceSetUpdate 2025-09-25T16:15:39.307332Z 4 00h00m00.100512s :BS_NODE DEBUG: [4] VDiskId# [80000000:1:1:0:0] PDiskId# 1000 VSlotId# 1000 created 2025-09-25T16:15:39.307339Z 4 00h00m00.100512s :BS_NODE DEBUG: [4] VDiskId# [80000000:1:1:0:0] status changed to INIT_PENDING 2025-09-25T16:15:39.307351Z 5 00h00m00.100512s :BS_NODE DEBUG: [5] NodeServiceSetUpdate 2025-09-25T16:15:39.307356Z 5 00h00m00.100512s :BS_NODE DEBUG: [5] VDiskId# [80000000:1:1:1:0] PDiskId# 1000 VSlotId# 1000 created 2025-09-25T16:15:39.307363Z 5 00h00m00.100512s :BS_NODE DEBUG: [5] VDiskId# [80000000:1:1:1:0] status changed to INIT_PENDING 2025-09-25T1 ... :0:0] NotReady},{[80000000:3:2:1:0] NotReady},{[80000000:3:2:2:0] Ready RequiresReassignment Decommitted}] GroupId# 2147483648 2025-09-25T16:15:39.384727Z 1 00h01m20.000000s :BS_SELFHEAL INFO: {BSSH11@self_heal.cpp:709} group can't be reassigned right now [{[80000000:3:0:0:0] Ready},{[80000000:3:0:1:0] Ready},{[80000000:3:0:2:0] Ready},{[80000000:3:1:0:0] Ready},{[80000000:3:1:1:0] Ready},{[80000000:3:1:2:0] Ready},{[80000000:3:2:0:0] NotReady},{[80000000:3:2:1:0] NotReady},{[80000000:3:2:2:0] Ready RequiresReassignment Decommitted}] GroupId# 2147483648 2025-09-25T16:15:39.384743Z 10 00h01m20.461512s :BS_NODE DEBUG: [10] VDiskId# [80000001:1:0:0:0] status changed to READY 2025-09-25T16:15:39.384780Z 1 00h01m20.461512s :BS_SELFHEAL INFO: {BSSH11@self_heal.cpp:709} group can't be reassigned right now [{[80000000:3:0:0:0] Ready},{[80000000:3:0:1:0] Ready},{[80000000:3:0:2:0] Ready},{[80000000:3:1:0:0] Ready},{[80000000:3:1:1:0] Ready},{[80000000:3:1:2:0] Ready},{[80000000:3:2:0:0] NotReady},{[80000000:3:2:1:0] NotReady},{[80000000:3:2:2:0] Ready RequiresReassignment Decommitted}] GroupId# 2147483648 2025-09-25T16:15:39.384839Z 1 00h01m23.246512s :BS_SELFHEAL INFO: {BSSH11@self_heal.cpp:709} group can't be reassigned right now [{[80000000:3:0:0:0] Ready},{[80000000:3:0:1:0] Ready},{[80000000:3:0:2:0] Ready},{[80000000:3:1:0:0] Ready},{[80000000:3:1:1:0] Ready},{[80000000:3:1:2:0] Ready},{[80000000:3:2:0:0] NotReady},{[80000000:3:2:1:0] NotReady},{[80000000:3:2:2:0] Ready RequiresReassignment Decommitted}] GroupId# 2147483648 2025-09-25T16:15:39.384878Z 1 00h01m23.691512s :BS_SELFHEAL INFO: {BSSH11@self_heal.cpp:709} group can't be reassigned right now [{[80000000:3:0:0:0] Ready},{[80000000:3:0:1:0] Ready},{[80000000:3:0:2:0] Ready},{[80000000:3:1:0:0] Ready},{[80000000:3:1:1:0] Ready},{[80000000:3:1:2:0] Ready},{[80000000:3:2:0:0] NotReady},{[80000000:3:2:1:0] NotReady},{[80000000:3:2:2:0] Ready RequiresReassignment Decommitted}] GroupId# 2147483648 2025-09-25T16:15:39.384898Z 15 00h01m24.721512s :BS_NODE DEBUG: [15] VDiskId# [80000001:1:1:2:0] status changed to READY 2025-09-25T16:15:39.384934Z 1 00h01m24.721512s :BS_SELFHEAL INFO: {BSSH11@self_heal.cpp:709} group can't be reassigned right now [{[80000000:3:0:0:0] Ready},{[80000000:3:0:1:0] Ready},{[80000000:3:0:2:0] Ready},{[80000000:3:1:0:0] Ready},{[80000000:3:1:1:0] Ready},{[80000000:3:1:2:0] Ready},{[80000000:3:2:0:0] NotReady},{[80000000:3:2:1:0] NotReady},{[80000000:3:2:2:0] Ready RequiresReassignment Decommitted}] GroupId# 2147483648 2025-09-25T16:15:39.384993Z 1 00h01m27.074512s :BS_SELFHEAL INFO: {BSSH11@self_heal.cpp:709} group can't be reassigned right now [{[80000000:3:0:0:0] Ready},{[80000000:3:0:1:0] Ready},{[80000000:3:0:2:0] Ready},{[80000000:3:1:0:0] Ready},{[80000000:3:1:1:0] Ready},{[80000000:3:1:2:0] Ready},{[80000000:3:2:0:0] NotReady},{[80000000:3:2:1:0] NotReady},{[80000000:3:2:2:0] Ready RequiresReassignment Decommitted}] GroupId# 2147483648 2025-09-25T16:15:39.385014Z 14 00h01m28.278536s :BS_NODE DEBUG: [14] VDiskId# [80000000:3:2:1:0] status changed to READY 2025-09-25T16:15:39.385059Z 1 00h01m28.278536s :BS_SELFHEAL INFO: {BSSH11@self_heal.cpp:709} group can't be reassigned right now [{[80000000:3:0:0:0] Ready},{[80000000:3:0:1:0] Ready},{[80000000:3:0:2:0] Ready},{[80000000:3:1:0:0] Ready},{[80000000:3:1:1:0] Ready},{[80000000:3:1:2:0] Ready},{[80000000:3:2:0:0] NotReady},{[80000000:3:2:1:0] NotReady},{[80000000:3:2:2:0] Ready RequiresReassignment Decommitted}] GroupId# 2147483648 2025-09-25T16:15:39.385396Z 8 00h01m28.279048s :BS_NODE DEBUG: [8] NodeServiceSetUpdate 2025-09-25T16:15:39.385408Z 8 00h01m28.279048s :BS_NODE DEBUG: [8] VDiskId# [80000000:2:2:1:0] destroyed 2025-09-25T16:15:39.385475Z 1 00h01m30.000000s :BS_SELFHEAL INFO: {BSSH11@self_heal.cpp:709} group can't be reassigned right now [{[80000000:3:0:0:0] Ready},{[80000000:3:0:1:0] Ready},{[80000000:3:0:2:0] Ready},{[80000000:3:1:0:0] Ready},{[80000000:3:1:1:0] Ready},{[80000000:3:1:2:0] Ready},{[80000000:3:2:0:0] NotReady},{[80000000:3:2:1:0] NotReady},{[80000000:3:2:2:0] Ready RequiresReassignment Decommitted}] GroupId# 2147483648 2025-09-25T16:15:39.385547Z 1 00h01m30.005512s :BS_SELFHEAL INFO: {BSSH11@self_heal.cpp:709} group can't be reassigned right now [{[80000000:3:0:0:0] Ready},{[80000000:3:0:1:0] Ready},{[80000000:3:0:2:0] Ready},{[80000000:3:1:0:0] Ready},{[80000000:3:1:1:0] Ready},{[80000000:3:1:2:0] Ready},{[80000000:3:2:0:0] NotReady},{[80000000:3:2:1:0] NotReady},{[80000000:3:2:2:0] Ready RequiresReassignment Decommitted}] GroupId# 2147483648 2025-09-25T16:15:39.385576Z 3 00h01m30.084512s :BS_NODE DEBUG: [3] VDiskId# [80000001:1:2:2:0] status changed to READY 2025-09-25T16:15:39.385631Z 1 00h01m30.084512s :BS_SELFHEAL INFO: {BSSH11@self_heal.cpp:709} group can't be reassigned right now [{[80000000:3:0:0:0] Ready},{[80000000:3:0:1:0] Ready},{[80000000:3:0:2:0] Ready},{[80000000:3:1:0:0] Ready},{[80000000:3:1:1:0] Ready},{[80000000:3:1:2:0] Ready},{[80000000:3:2:0:0] NotReady},{[80000000:3:2:1:0] NotReady},{[80000000:3:2:2:0] Ready RequiresReassignment Decommitted}] GroupId# 2147483648 2025-09-25T16:15:39.385673Z 1 00h01m33.070512s :BS_SELFHEAL INFO: {BSSH11@self_heal.cpp:709} group can't be reassigned right now [{[80000000:3:0:0:0] Ready},{[80000000:3:0:1:0] Ready},{[80000000:3:0:2:0] Ready},{[80000000:3:1:0:0] Ready},{[80000000:3:1:1:0] Ready},{[80000000:3:1:2:0] Ready},{[80000000:3:2:0:0] NotReady},{[80000000:3:2:1:0] NotReady},{[80000000:3:2:2:0] Ready RequiresReassignment Decommitted}] GroupId# 2147483648 2025-09-25T16:15:39.385693Z 13 00h01m34.534024s :BS_NODE DEBUG: [13] VDiskId# [80000000:3:2:0:0] status changed to READY 2025-09-25T16:15:39.385775Z 1 00h01m34.534024s :BS_SELFHEAL DEBUG: {BSSH01@self_heal.cpp:71} Reassigner starting GroupId# 2147483648 2025-09-25T16:15:39.385901Z 1 00h01m34.534024s :BS_SELFHEAL DEBUG: {BSSH03@self_heal.cpp:111} Reassigner TEvVStatusResult GroupId# 2147483648 Status# OK JoinedGroup# true Replicated# true 2025-09-25T16:15:39.385909Z 1 00h01m34.534024s :BS_SELFHEAL DEBUG: {BSSH02@self_heal.cpp:96} Reassigner ProcessVDiskReply GroupId# 2147483648 VDiskId# [80000000:3:0:0:0] DiskIsOk# true 2025-09-25T16:15:39.385949Z 1 00h01m34.534024s :BS_SELFHEAL DEBUG: {BSSH03@self_heal.cpp:111} Reassigner TEvVStatusResult GroupId# 2147483648 Status# OK JoinedGroup# true Replicated# true 2025-09-25T16:15:39.385953Z 1 00h01m34.534024s :BS_SELFHEAL DEBUG: {BSSH02@self_heal.cpp:96} Reassigner ProcessVDiskReply GroupId# 2147483648 VDiskId# [80000000:3:0:1:0] DiskIsOk# true 2025-09-25T16:15:39.385959Z 1 00h01m34.534024s :BS_SELFHEAL DEBUG: {BSSH03@self_heal.cpp:111} Reassigner TEvVStatusResult GroupId# 2147483648 Status# OK JoinedGroup# true Replicated# true 2025-09-25T16:15:39.385963Z 1 00h01m34.534024s :BS_SELFHEAL DEBUG: {BSSH02@self_heal.cpp:96} Reassigner ProcessVDiskReply GroupId# 2147483648 VDiskId# [80000000:3:0:2:0] DiskIsOk# true 2025-09-25T16:15:39.385969Z 1 00h01m34.534024s :BS_SELFHEAL DEBUG: {BSSH03@self_heal.cpp:111} Reassigner TEvVStatusResult GroupId# 2147483648 Status# OK JoinedGroup# true Replicated# true 2025-09-25T16:15:39.385973Z 1 00h01m34.534024s :BS_SELFHEAL DEBUG: {BSSH02@self_heal.cpp:96} Reassigner ProcessVDiskReply GroupId# 2147483648 VDiskId# [80000000:3:1:0:0] DiskIsOk# true 2025-09-25T16:15:39.385981Z 1 00h01m34.534024s :BS_SELFHEAL DEBUG: {BSSH03@self_heal.cpp:111} Reassigner TEvVStatusResult GroupId# 2147483648 Status# OK JoinedGroup# true Replicated# true 2025-09-25T16:15:39.385985Z 1 00h01m34.534024s :BS_SELFHEAL DEBUG: {BSSH02@self_heal.cpp:96} Reassigner ProcessVDiskReply GroupId# 2147483648 VDiskId# [80000000:3:1:1:0] DiskIsOk# true 2025-09-25T16:15:39.385990Z 1 00h01m34.534024s :BS_SELFHEAL DEBUG: {BSSH03@self_heal.cpp:111} Reassigner TEvVStatusResult GroupId# 2147483648 Status# OK JoinedGroup# true Replicated# true 2025-09-25T16:15:39.385995Z 1 00h01m34.534024s :BS_SELFHEAL DEBUG: {BSSH02@self_heal.cpp:96} Reassigner ProcessVDiskReply GroupId# 2147483648 VDiskId# [80000000:3:1:2:0] DiskIsOk# true 2025-09-25T16:15:39.386000Z 1 00h01m34.534024s :BS_SELFHEAL DEBUG: {BSSH03@self_heal.cpp:111} Reassigner TEvVStatusResult GroupId# 2147483648 Status# OK JoinedGroup# true Replicated# true 2025-09-25T16:15:39.386005Z 1 00h01m34.534024s :BS_SELFHEAL DEBUG: {BSSH02@self_heal.cpp:96} Reassigner ProcessVDiskReply GroupId# 2147483648 VDiskId# [80000000:3:2:0:0] DiskIsOk# true 2025-09-25T16:15:39.386010Z 1 00h01m34.534024s :BS_SELFHEAL DEBUG: {BSSH03@self_heal.cpp:111} Reassigner TEvVStatusResult GroupId# 2147483648 Status# OK JoinedGroup# true Replicated# true 2025-09-25T16:15:39.386014Z 1 00h01m34.534024s :BS_SELFHEAL DEBUG: {BSSH02@self_heal.cpp:96} Reassigner ProcessVDiskReply GroupId# 2147483648 VDiskId# [80000000:3:2:1:0] DiskIsOk# true 2025-09-25T16:15:39.386397Z 1 00h01m34.534536s :BS_NODE DEBUG: [1] NodeServiceSetUpdate 2025-09-25T16:15:39.386407Z 1 00h01m34.534536s :BS_NODE DEBUG: [1] VDiskId# [80000000:3:0:0:0] -> [80000000:4:0:0:0] 2025-09-25T16:15:39.386509Z 1 00h01m34.534536s :BS_SELFHEAL INFO: {BSSH09@self_heal.cpp:207} Reassigner succeeded GroupId# 2147483648 Items# [80000000:3:2:2:0]: 9:1000:1000 -> 15:1000:1001 ConfigTxSeqNo# 23 2025-09-25T16:15:39.386515Z 1 00h01m34.534536s :BS_SELFHEAL DEBUG: {BSSH08@self_heal.cpp:218} Reassigner finished GroupId# 2147483648 Success# true 2025-09-25T16:15:39.386533Z 7 00h01m34.534536s :BS_NODE DEBUG: [7] NodeServiceSetUpdate 2025-09-25T16:15:39.386539Z 7 00h01m34.534536s :BS_NODE DEBUG: [7] VDiskId# [80000000:1:2:0:0] destroyed 2025-09-25T16:15:39.386552Z 2 00h01m34.534536s :BS_NODE DEBUG: [2] NodeServiceSetUpdate 2025-09-25T16:15:39.386557Z 2 00h01m34.534536s :BS_NODE DEBUG: [2] VDiskId# [80000000:3:0:1:0] -> [80000000:4:0:1:0] 2025-09-25T16:15:39.386569Z 3 00h01m34.534536s :BS_NODE DEBUG: [3] NodeServiceSetUpdate 2025-09-25T16:15:39.386574Z 3 00h01m34.534536s :BS_NODE DEBUG: [3] VDiskId# [80000000:3:0:2:0] -> [80000000:4:0:2:0] 2025-09-25T16:15:39.386584Z 4 00h01m34.534536s :BS_NODE DEBUG: [4] NodeServiceSetUpdate 2025-09-25T16:15:39.386590Z 4 00h01m34.534536s :BS_NODE DEBUG: [4] VDiskId# [80000000:3:1:0:0] -> [80000000:4:1:0:0] 2025-09-25T16:15:39.386601Z 5 00h01m34.534536s :BS_NODE DEBUG: [5] NodeServiceSetUpdate 2025-09-25T16:15:39.386606Z 5 00h01m34.534536s :BS_NODE DEBUG: [5] VDiskId# [80000000:3:1:1:0] -> [80000000:4:1:1:0] 2025-09-25T16:15:39.386616Z 6 00h01m34.534536s :BS_NODE DEBUG: [6] NodeServiceSetUpdate 2025-09-25T16:15:39.386622Z 6 00h01m34.534536s :BS_NODE DEBUG: [6] VDiskId# [80000000:3:1:2:0] -> [80000000:4:1:2:0] 2025-09-25T16:15:39.386630Z 9 00h01m34.534536s :BS_NODE DEBUG: [9] NodeServiceSetUpdate 2025-09-25T16:15:39.386640Z 13 00h01m34.534536s :BS_NODE DEBUG: [13] NodeServiceSetUpdate 2025-09-25T16:15:39.386645Z 13 00h01m34.534536s :BS_NODE DEBUG: [13] VDiskId# [80000000:3:2:0:0] -> [80000000:4:2:0:0] 2025-09-25T16:15:39.386656Z 14 00h01m34.534536s :BS_NODE DEBUG: [14] NodeServiceSetUpdate 2025-09-25T16:15:39.386661Z 14 00h01m34.534536s :BS_NODE DEBUG: [14] VDiskId# [80000000:3:2:1:0] -> [80000000:4:2:1:0] 2025-09-25T16:15:39.386672Z 15 00h01m34.534536s :BS_NODE DEBUG: [15] NodeServiceSetUpdate 2025-09-25T16:15:39.386677Z 15 00h01m34.534536s :BS_NODE DEBUG: [15] VDiskId# [80000000:4:2:2:0] PDiskId# 1000 VSlotId# 1001 created 2025-09-25T16:15:39.386687Z 15 00h01m34.534536s :BS_NODE DEBUG: [15] VDiskId# [80000000:4:2:2:0] status changed to INIT_PENDING 2025-09-25T16:15:39.386882Z 12 00h01m35.143512s :BS_NODE DEBUG: [12] VDiskId# [80000001:1:0:2:0] status changed to READY 2025-09-25T16:15:39.386990Z 15 00h01m39.792536s :BS_NODE DEBUG: [15] VDiskId# [80000000:4:2:2:0] status changed to REPLICATING 2025-09-25T16:15:39.387215Z 15 00h01m58.009536s :BS_NODE DEBUG: [15] VDiskId# [80000000:4:2:2:0] status changed to READY 2025-09-25T16:15:39.387340Z 9 00h01m58.010048s :BS_NODE DEBUG: [9] NodeServiceSetUpdate 2025-09-25T16:15:39.387346Z 9 00h01m58.010048s :BS_NODE DEBUG: [9] VDiskId# [80000000:3:2:2:0] destroyed >> CheckIntegrityBlock42::DataErrorAdditionalUnequalParts >> CheckIntegrityMirror3dc::PlacementOkWithErrors >> TBsVDiskManyPutGet::ManyMultiSinglePutGet [GOOD] >> TBsVDiskManyPutGet::ManyMultiPutGet ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/ut_blobstorage/ut_read_only_pdisk/unittest >> BSCReadOnlyPDisk::RestartAndReadOnlyConsecutive [GOOD] Test command err: RandomSeed# 13329410641728814449 >> CompatibilityInfo::VDiskCompatible |80.4%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/mind/bscontroller/ut_selfheal/unittest >> SelfHealActorTest::NoMoreThanOneReplicating [GOOD] >> CheckIntegrityBlock42::PlacementBlobIsLost >> Acceleration::TestThresholdPutMirror3dc2Slow >> CheckIntegrityBlock42::DataErrorAdditionalUnequalParts [GOOD] >> CheckIntegrityBlock42::DataErrorSixPartsOneBroken >> BSCReadOnlyPDisk::ReadOnlySlay [GOOD] >> CompatibilityInfo::VDiskCompatible [GOOD] >> CompatibilityInfo::VDiskIncompatible [GOOD] >> CompatibilityInfo::VDiskIncompatibleWithDefault [GOOD] >> CompatibilityInfo::VDiskSuppressCompatibilityCheck [GOOD] >> CompatibilityInfo::VDiskMigration >> CheckIntegrityMirror3dc::PlacementOkWithErrors [GOOD] >> CheckIntegrityMirror3dc::PlacementOkWithErrorsOnBlobDisks >> CheckIntegrityMirror3dc::PlacementOk >> CompatibilityInfo::VDiskMigration [GOOD] >> CostMetricsGetBlock4Plus2::TestGet4Plus2BlockRequests1Inflight1BlobSize1000 ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/ut_blobstorage/ut_restart_pdisk/unittest >> BSCRestartPDisk::RestartNotAllowed [GOOD] Test command err: RandomSeed# 18158494922604152533 >> CheckIntegrityBlock42::PlacementBlobIsLost [GOOD] >> CheckIntegrityBlock42::PlacementAllOnHandoff >> CheckIntegrityBlock42::DataErrorSixPartsOneBroken [GOOD] >> CheckIntegrityBlock42::DataErrorFivePartsOneBroken |80.4%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/ut_vdisk/unittest >> TBsVDiskRangeHuge::Simple3PutRangeGetMiddleBackwardCompaction [GOOD] >> CheckIntegrityMirror3dc::PlacementOkWithErrorsOnBlobDisks [GOOD] >> CheckIntegrityMirror3of4::PlacementBlobIsLost >> TCowBTreeTest::Concurrent [GOOD] >> BsControllerTest::TestLocalSelfHeal [GOOD] >> CheckIntegrityBlock42::PlacementAllOnHandoff [GOOD] >> CheckIntegrityBlock42::PlacementDisintegrated >> TCowBTreeTest::Alignment [GOOD] >> CheckIntegrityBlock42::DataErrorFivePartsOneBroken [GOOD] >> CheckIntegrityBlock42::DataErrorHeavySixPartsWithManyBroken >> BsControllerTest::TestLocalBrokenRelocation [GOOD] >> CheckIntegrityMirror3dc::PlacementOk [GOOD] >> CheckIntegrityMirror3dc::PlacementOkHandoff >> THugeMigration::RollbackMap_HugeBlobs [GOOD] >> TMonitoring::ReregisterTest [GOOD] >> CostMetricsPutMirror3dc::TestPutMirror3dcRequests1Inflight1BlobSize1000 [GOOD] >> CostMetricsPutMirror3dc::TestPutMirror3dcRequests10Inflight1BlobSize1000 >> CheckIntegrityMirror3of4::PlacementBlobIsLost [GOOD] >> TBsVDiskRange::Simple3PutRangeGetMiddleBackwardCompaction [GOOD] >> CheckIntegrityBlock42::DataErrorHeavySixPartsWithManyBroken [GOOD] >> TBsVDiskExtreme::Simple3Put1SeqSubsErrorCompaction [GOOD] >> CheckIntegrityBlock42::PlacementDisintegrated [GOOD] >> CheckIntegrityBlock42::DataStatusUnknown >> CheckIntegrityMirror3dc::PlacementOkHandoff [GOOD] >> CheckIntegrityMirror3dc::PlacementMissingParts >> Acceleration::TestAccelerationMirror3dcPutAsyncBlob1Slow [GOOD] >> Acceleration::TestAcceleration4Plus2BlockPutAsyncBlob1Slow ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/ut_blobstorage/ut_read_only_pdisk/unittest >> BSCReadOnlyPDisk::ReadOnlySlay [GOOD] Test command err: RandomSeed# 15576020613863720999 2025-09-25T16:15:38.229801Z 1 00h01m14.361536s :BS_SYNCER ERROR: PDiskId# 1001 VDISK[82000000:_:0:0:0]: (2181038080) TVDiskGuidRecoveryActor: DECISION: [Decision# LostData SubsequentFailure# 0] 2025-09-25T16:15:38.230209Z 1 00h01m14.361536s :BS_SYNCER ERROR: PDiskId# 1001 VDISK[82000000:_:0:0:0]: (2181038080) TVDiskGuidRecoveryActor: FINISH: [Decision# LostData Guid# 11974035864134190814] 2025-09-25T16:15:38.231325Z 1 00h01m14.361536s :BS_SYNCER ERROR: PDiskId# 1001 VDISK[82000000:_:0:0:0]: (2181038080) THullOsirisActor: FINISH: BlobsResurrected# 0 PartsResurrected# 0 >> CheckIntegrityMirror3dc::PlacementMissingParts [GOOD] >> CheckIntegrityBlock42::DataStatusUnknown [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/ut_blobstorage/ut_check_integrity/unittest >> CheckIntegrityMirror3of4::PlacementBlobIsLost [GOOD] Test command err: RandomSeed# 18291176963157198517 *** PUT BLOB [72075186270680851:57:3905:6:786432:1024:1] TO [82000000:1:1:0:0] FINISHED WITH OK *** *** PUT BLOB [72075186270680851:57:3905:6:786432:1024:2] TO [82000000:1:2:0:0] FINISHED WITH OK *** *** PUT BLOB [72075186270680851:57:3905:6:786432:1024:3] TO [82000000:1:0:0:0] FINISHED WITH OK *** *** PUT BLOB [72075186270680851:57:3905:6:786432:1024:1] TO [82000000:1:1:0:0] FINISHED WITH OK *** *** PUT BLOB [72075186270680851:57:3905:6:786432:1024:2] TO [82000000:1:2:0:0] FINISHED WITH OK *** *** PUT BLOB [72075186270680851:57:3905:6:786432:1024:3] TO [82000000:1:0:0:0] FINISHED WITH OK *** *** PUT BLOB [72075186270680851:57:3905:6:786432:1024:3] TO [82000000:1:0:1:0] FINISHED WITH OK *** *** PUT BLOB [72075186270680851:57:3905:6:786432:1024:3] TO [82000000:1:0:2:0] FINISHED WITH OK *** |80.4%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/ut_vdisk/unittest >> TBsVDiskRange::Simple3PutRangeGetMiddleBackwardCompaction [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/ut_blobstorage/ut_check_integrity/unittest >> CheckIntegrityBlock42::DataErrorHeavySixPartsWithManyBroken [GOOD] Test command err: RandomSeed# 10000369050135878509 *** PUT BLOB [72075186270680851:57:3905:6:786432:1024:1] TO [82000000:1:0:5:0] FINISHED WITH OK *** *** PUT BLOB [72075186270680851:57:3905:6:786432:1024:2] TO [82000000:1:0:6:0] FINISHED WITH OK *** *** PUT BLOB [72075186270680851:57:3905:6:786432:1024:3] TO [82000000:1:0:7:0] FINISHED WITH OK *** *** PUT BLOB [72075186270680851:57:3905:6:786432:1024:4] TO [82000000:1:0:0:0] FINISHED WITH OK *** *** PUT BLOB [72075186270680851:57:3905:6:786432:1024:5] TO [82000000:1:0:1:0] FINISHED WITH OK *** *** PUT BLOB [72075186270680851:57:3905:6:786432:1024:6] TO [82000000:1:0:2:0] FINISHED WITH OK *** *** PUT BLOB [72075186270680851:57:3905:6:786432:1024:1] TO [82000000:1:0:3:0] FINISHED WITH OK *** *** PUT BLOB [72075186270680851:57:3905:6:786432:1024:1] TO [82000000:1:0:4:0] FINISHED WITH OK *** Disks: 0: [82000000:1:0:5:0] 1: [82000000:1:0:6:0] 2: [82000000:1:0:7:0] 3: [82000000:1:0:0:0] 4: [82000000:1:0:1:0] 5: [82000000:1:0:2:0] 6: [82000000:1:0:3:0] 7: [82000000:1:0:4:0] Layout info: part 1: ver0 disks [ 6 ], ver1 disks [ 7 ], ver2 disks [ 0 ] part 2: ver0 disks [ 1 ] part 3: ver0 disks [ 2 ] part 4: ver0 disks [ 3 ] part 5: ver0 disks [ 4 ] part 6: ver0 disks [ 5 ] ERROR: There are unequal parts Erasure info: { part 3 disks [ 2 ]; part 4 disks [ 3 ]; part 5 disks [ 4 ]; part 6 disks [ 5 ]; } CHECK part 1 disks [ 0 ] -> OK { part 3 disks [ 2 ]; part 4 disks [ 3 ]; part 5 disks [ 4 ]; part 6 disks [ 5 ]; } CHECK part 2 disks [ 1 ] -> OK { part 2 disks [ 1 ]; part 4 disks [ 3 ]; part 5 disks [ 4 ]; part 6 disks [ 5 ]; } CHECK part 1 disks [ 0 ] -> OK { part 2 disks [ 1 ]; part 4 disks [ 3 ]; part 5 disks [ 4 ]; part 6 disks [ 5 ]; } CHECK part 3 disks [ 2 ] -> OK { part 2 disks [ 1 ]; part 3 disks [ 2 ]; part 5 disks [ 4 ]; part 6 disks [ 5 ]; } CHECK part 1 disks [ 0 ] -> OK { part 2 disks [ 1 ]; part 3 disks [ 2 ]; part 5 disks [ 4 ]; part 6 disks [ 5 ]; } CHECK part 4 disks [ 3 ] -> OK { part 2 disks [ 1 ]; part 3 disks [ 2 ]; part 4 disks [ 3 ]; part 6 disks [ 5 ]; } CHECK part 1 disks [ 0 ] -> OK { part 2 disks [ 1 ]; part 3 disks [ 2 ]; part 4 disks [ 3 ]; part 6 disks [ 5 ]; } CHECK part 5 disks [ 4 ] -> OK { part 2 disks [ 1 ]; part 3 disks [ 2 ]; part 4 disks [ 3 ]; part 5 disks [ 4 ]; } CHECK part 1 disks [ 0 ] -> OK { part 2 disks [ 1 ]; part 3 disks [ 2 ]; part 4 disks [ 3 ]; part 5 disks [ 4 ]; } CHECK part 6 disks [ 5 ] -> OK { part 1 disks [ 0 ]; part 4 disks [ 3 ]; part 5 disks [ 4 ]; part 6 disks [ 5 ]; } CHECK part 2 disks [ 1 ] -> OK { part 1 disks [ 0 ]; part 4 disks [ 3 ]; part 5 disks [ 4 ]; part 6 disks [ 5 ]; } CHECK part 3 disks [ 2 ] -> OK { part 1 disks [ 0 ]; part 3 disks [ 2 ]; part 5 disks [ 4 ]; part 6 disks [ 5 ]; } CHECK part 2 disks [ 1 ] -> OK { part 1 disks [ 0 ]; part 3 disks [ 2 ]; part 5 disks [ 4 ]; part 6 disks [ 5 ]; } CHECK part 4 disks [ 3 ] -> OK { part 1 disks [ 0 ]; part 3 disks [ 2 ]; part 4 disks [ 3 ]; part 6 disks [ 5 ]; } CHECK part 2 disks [ 1 ] -> OK { part 1 disks [ 0 ]; part 3 disks [ 2 ]; part 4 disks [ 3 ]; part 6 disks [ 5 ]; } CHECK part 5 disks [ 4 ] -> OK { part 1 disks [ 0 ]; part 3 disks [ 2 ]; part 4 disks [ 3 ]; part 5 disks [ 4 ]; } CHECK part 2 disks [ 1 ] -> OK { part 1 disks [ 0 ]; part 3 disks [ 2 ]; part 4 disks [ 3 ]; part 5 disks [ 4 ]; } CHECK part 6 disks [ 5 ] -> OK { part 1 disks [ 0 ]; part 2 disks [ 1 ]; part 5 disks [ 4 ]; part 6 disks [ 5 ]; } CHECK part 3 disks [ 2 ] -> OK { part 1 disks [ 0 ]; part 2 disks [ 1 ]; part 5 disks [ 4 ]; part 6 disks [ 5 ]; } CHECK part 4 disks [ 3 ] -> OK { part 1 disks [ 0 ]; part 2 disks [ 1 ]; part 4 disks [ 3 ]; part 6 disks [ 5 ]; } CHECK part 3 disks [ 2 ] -> OK { part 1 disks [ 0 ]; part 2 disks [ 1 ]; part 4 disks [ 3 ]; part 6 disks [ 5 ]; } CHECK part 5 disks [ 4 ] -> OK { part 1 disks [ 0 ]; part 2 disks [ 1 ]; part 4 disks [ 3 ]; part 5 disks [ 4 ]; } CHECK part 3 disks [ 2 ] -> OK { part 1 disks [ 0 ]; part 2 disks [ 1 ]; part 4 disks [ 3 ]; part 5 disks [ 4 ]; } CHECK part 6 disks [ 5 ] -> OK { part 1 disks [ 0 ]; part 2 disks [ 1 ]; part 3 disks [ 2 ]; part 6 disks [ 5 ]; } CHECK part 4 disks [ 3 ] -> OK { part 1 disks [ 0 ]; part 2 disks [ 1 ]; part 3 disks [ 2 ]; part 6 disks [ 5 ]; } CHECK part 5 disks [ 4 ] -> OK { part 1 disks [ 0 ]; part 2 disks [ 1 ]; part 3 disks [ 2 ]; part 5 disks [ 4 ]; } CHECK part 4 disks [ 3 ] -> OK { part 1 disks [ 0 ]; part 2 disks [ 1 ]; part 3 disks [ 2 ]; part 5 disks [ 4 ]; } CHECK part 6 disks [ 5 ] -> OK { part 1 disks [ 0 ]; part 2 disks [ 1 ]; part 3 disks [ 2 ]; part 4 disks [ 3 ]; } CHECK part 5 disks [ 4 ] -> OK { part 1 disks [ 0 ]; part 2 disks [ 1 ]; part 3 disks [ 2 ]; part 4 disks [ 3 ]; } CHECK part 6 disks [ 5 ] -> OK ERROR: There are erasure restore fails *** PUT BLOB [72075186270680851:57:3905:6:786432:1024:1] TO [82000000:1:0:5:0] FINISHED WITH OK *** *** PUT BLOB [72075186270680851:57:3905:6:786432:1024:2] TO [82000000:1:0:6:0] FINISHED WITH OK *** *** PUT BLOB [72075186270680851:57:3905:6:786432:1024:3] TO [82000000:1:0:7:0] FINISHED WITH OK *** *** PUT BLOB [72075186270680851:57:3905:6:786432:1024:4] TO [82000000:1:0:0:0] FINISHED WITH OK *** *** PUT BLOB [72075186270680851:57:3905:6:786432:1024:5] TO [82000000:1:0:1:0] FINISHED WITH OK *** *** PUT BLOB [72075186270680851:57:3905:6:786432:1024:6] TO [82000000:1:0:2:0] FINISHED WITH OK *** Disks: 0: [82000000:1:0:5:0] 1: [82000000:1:0:6:0] 2: [82000000:1:0:7:0] 3: [82000000:1:0:0:0] 4: [82000000:1:0:1:0] 5: [82000000:1:0:2:0] 6: [82000000:1:0:3:0] 7: [82000000:1:0:4:0] Layout info: part 1: ver0 disks [ 0 ] part 2: ver0 disks [ 1 ] part 3: ver0 disks [ 2 ] part 4: ver0 disks [ 3 ] part 5: ver0 disks [ 4 ] part 6: ver0 disks [ 5 ] Erasure info: { part 1 disks [ 0 ]; part 2 disks [ 1 ]; part 3 disks [ 2 ]; part 4 disks [ 3 ]; } CHECK part 5 disks [ 4 ] -> OK { part 2 disks [ 1 ]; part 3 disks [ 2 ]; part 4 disks [ 3 ]; part 5 disks [ 4 ]; } CHECK part 1 disks [ 0 ] -> OK { part 1 disks [ 0 ]; part 3 disks [ 2 ]; part 4 disks [ 3 ]; part 5 disks [ 4 ]; } CHECK part 2 disks [ 1 ] -> OK { part 1 disks [ 0 ]; part 2 disks [ 1 ]; part 4 disks [ 3 ]; part 5 disks [ 4 ]; } CHECK part 3 disks [ 2 ] -> OK { part 1 disks [ 0 ]; part 2 disks [ 1 ]; part 3 disks [ 2 ]; part 5 disks [ 4 ]; } CHECK part 4 disks [ 3 ] -> OK { part 1 disks [ 0 ]; part 2 disks [ 1 ]; part 3 disks [ 2 ]; part 4 disks [ 3 ]; } CHECK part 5 disks [ 4 ] -> OK ERROR: There are erasure restore fails *** PUT BLOB [72075186270680851:57:3905:6:786432:1024:1] TO [82000000:1:0:5:0] FINISHED WITH OK *** *** PUT BLOB [72075186270680851:57:3905:6:786432:1024:2] TO [82000000:1:0:6:0] FINISHED WITH OK *** *** PUT BLOB [72075186270680851:57:3905:6:786432:1024:3] TO [82000000:1:0:7:0] FINISHED WITH OK *** *** PUT BLOB [72075186270680851:57:3905:6:786432:1024:4] TO [82000000:1:0:0:0] FINISHED WITH OK *** *** PUT BLOB [72075186270680851:57:3905:6:786432:1024:5] TO [82000000:1:0:1:0] FINISHED WITH OK *** Disks: 0: [82000000:1:0:5:0] 1: [82000000:1:0:6:0] 2: [82000000:1:0:7:0] 3: [82000000:1:0:0:0] 4: [82000000:1:0:1:0] 5: [82000000:1:0:2:0] 6: [82000000:1:0:3:0] 7: [82000000:1:0:4:0] Layout info: part 1: ver0 disks [ 0 ] part 2: ver0 disks [ 1 ] part 3: ver0 disks [ 2 ] part 4: ver0 disks [ 3 ] part 5: ver0 disks [ 4 ] part 6: Erasure info: ERROR: There are erasure restore fails *** PUT BLOB [72075186270680851:57:3905:6:786432:1024:1] TO [82000000:1:0:5:0] FINISHED WITH OK *** *** PUT BLOB [72075186270680851:57:3905:6:786432:1024:2] TO [82000000:1:0:6:0] FINISHED WITH OK *** *** PUT BLOB [72075186270680851:57:3905:6:786432:1024:3] TO [82000000:1:0:7:0] FINISHED WITH OK *** *** PUT BLOB [72075186270680851:57:3905:6:786432:1024:4] TO [82000000:1:0:0:0] FINISHED WITH OK *** *** PUT BLOB [72075186270680851:57:3905:6:786432:1024:5] TO [82000000:1:0:1:0] FINISHED WITH OK *** *** PUT BLOB [72075186270680851:57:3905:6:786432:1024:6] TO [82000000:1:0:2:0] FINISHED WITH OK *** *** PUT BLOB [72075186270680851:57:3905:6:786432:1024:1] TO [82000000:1:0:3:0] FINISHED WITH OK *** *** PUT BLOB [72075186270680851:57:3905:6:786432:1024:2] TO [82000000:1:0:3:0] FINISHED WITH OK *** *** PUT BLOB [72075186270680851:57:3905:6:786432:1024:3] TO [82000000:1:0:3:0] FINISHED WITH OK *** *** PUT BLOB [72075186270680851:57:3905:6:786432:1024:4] TO [82000000:1:0:3:0] FINISHED WITH OK *** *** PUT BLOB [72075186270680851:57:3905:6:786432:1024:5] TO [82000000:1:0:3:0] FINISHED WITH OK *** *** PUT BLOB [72075186270680851:57:3905:6:786432:1024:6] TO [82000000:1:0:3:0] FINISHED WITH OK *** Disks: 0: [82000000:1:0:5:0] 1: [82000000:1:0:6:0] 2: [82000000:1:0:7:0] 3: [82000000:1:0:0:0] 4: [82000000:1:0:1:0] 5: [82000000:1:0:2:0] 6: [82000000:1:0:3:0] 7: [82000000:1:0:4:0] Layout info: part 1: ver0 disks [ 6 ], ver1 disks [ 0 ] part 2: ver0 disks [ 6 ], ver1 disks [ 1 ] part 3: ver0 disks [ 6 ], ver1 disks [ 2 ] part 4: ver0 disks [ 3 ], ver1 disks [ 6 ] part 5: ver0 disks [ 4 ], ver1 disks [ 6 ] part 6: ver0 disks [ 5 ], ver1 disks [ 6 ] ERROR: There are unequal parts Erasure info: { part 3 disks [ 6 ]; part 4 disks [ 6 ]; part 5 disks [ 6 ]; part 6 disks [ 6 ]; } CHECK part 1 disks [ 6 ] -> OK { part 3 disks [ 6 ]; part 4 disks [ 6 ]; part 5 disks [ 6 ]; part 6 disks [ 6 ]; } CHECK part 2 disks [ 6 ] -> OK { part 3 disks [ 2 ]; part 4 disks [ 3 ]; part 5 disks [ 4 ]; part 6 disks [ 5 ]; } CHECK part 1 disks [ 0 ] -> OK { part 3 disks [ 2 ]; part 4 disks [ 3 ]; part 5 disks [ 4 ]; part 6 disks [ 5 ]; } CHECK part 2 disks [ 1 ] -> OK { part 2 disks [ 6 ]; part 4 disks [ 6 ]; part 5 disks [ 6 ]; part 6 disks [ 6 ]; } CHECK part 1 disks [ 6 ] -> OK { part 2 disks [ 6 ]; part 4 disks [ 6 ]; part 5 disks [ 6 ]; part 6 disks [ 6 ]; } CHECK part 3 disks [ 6 ] -> OK { part 2 disks [ 1 ]; part 4 disks [ 3 ]; part 5 disks [ 4 ]; part 6 disks [ 5 ]; } CHECK part 1 disks [ 0 ] -> OK { part 2 disks [ 1 ]; part 4 disks [ 3 ]; part 5 disks [ 4 ]; part 6 disks [ 5 ]; } CHECK part 3 disks [ 2 ] -> OK { part 2 disks [ 6 ]; part 3 disks [ 6 ]; part 5 disks [ 6 ]; part 6 disks [ 6 ]; } CHECK part 1 disks [ 6 ] -> OK { part 2 disks [ 6 ]; part 3 disks [ 6 ]; part 5 disks [ 6 ]; part 6 disks [ 6 ]; } CHECK part 4 disks [ 6 ] -> OK { part 2 disks [ 1 ]; part 3 disks [ 2 ]; part 5 disks [ 4 ]; part 6 disks [ 5 ]; } CHECK part 1 disks [ 0 ] -> OK { part 2 disks [ 1 ]; part 3 disks [ 2 ]; part 5 disks [ 4 ]; part 6 disks [ 5 ]; } CHECK part 4 disks [ 3 ] -> OK { part 2 disks [ 6 ]; part 3 disks [ 6 ]; part 4 disks [ 6 ]; part 6 disks [ 6 ]; } CHECK part 1 disks [ 6 ] -> OK { part 2 disks [ 6 ]; part 3 disks [ 6 ]; part 4 disks [ 6 ]; part 6 disks [ 6 ]; } CHECK part 5 disks [ 6 ] -> OK { part 2 disks [ 1 ]; part 3 disks [ 2 ]; part 4 disks [ 3 ]; part 6 disks [ 5 ]; } CHECK part 1 disks [ 0 ] -> OK { part 2 disks [ 1 ]; part 3 disks [ 2 ]; part 4 disks [ 3 ]; part 6 disks [ 5 ]; } CHECK part 5 disks [ 4 ] -> OK { part 2 disks [ 6 ]; part 3 disks [ 6 ]; part 4 disks [ 6 ]; part 5 disks [ 6 ]; } CHECK part 1 disks [ 6 ] -> OK { part 2 disks [ 6 ]; part 3 disks [ 6 ]; part 4 disks [ 6 ]; part 5 disks [ 6 ]; } CHECK part 6 disks [ 6 ] -> OK { part 2 disks [ 1 ]; part 3 disks [ 2 ]; part 4 disks [ 3 ]; part 5 disks [ 4 ]; } CHECK part 1 disks [ 0 ] -> OK { part 2 disks [ 1 ]; part 3 disks [ 2 ]; part 4 disks [ 3 ]; part 5 disks [ 4 ]; } CHECK part 6 disks [ 5 ] -> OK { part 1 disks [ 6 ]; part 4 disks [ 6 ]; part 5 disks [ 6 ]; part 6 disks [ 6 ]; } CHECK part 2 disks [ 6 ] -> OK { part 1 disks [ 6 ]; part 4 disks [ 6 ]; part 5 disks [ 6 ]; part 6 disks [ 6 ]; } CHECK part 3 disks [ 6 ] -> OK { part 1 disks [ 0 ]; part 4 disks [ 3 ]; part 5 disks [ 4 ]; part 6 disks [ 5 ]; } CHECK part 2 disks [ 1 ] -> OK { part 1 disks [ 0 ]; part 4 disks [ 3 ]; part 5 disks [ 4 ]; part 6 disks [ 5 ]; } CHECK part 3 disks [ 2 ] -> OK { part 1 disks [ 6 ]; part 3 disks [ 6 ]; part 5 disks [ 6 ]; part 6 disks [ 6 ]; } CHECK part 2 disks [ 6 ] -> OK { part 1 disks [ 6 ]; part 3 disks [ 6 ]; part 5 disks [ 6 ]; part 6 disks [ 6 ]; } CHECK part 4 disks [ 6 ] -> OK { part 1 disks [ 0 ]; part 3 disks [ 2 ]; part 5 disks [ 4 ]; part 6 disks [ 5 ]; } CHECK part 2 disks [ 1 ] -> OK { part 1 disks [ 0 ]; part 3 disks [ 2 ]; part 5 disks [ 4 ]; part 6 disks [ 5 ]; } CHECK part 4 disks [ 3 ] -> OK { part 1 disks [ 6 ]; part 3 disks [ 6 ]; part 4 disks [ 6 ]; part 6 disks [ 6 ]; } CHECK part 2 disks [ 6 ] -> OK { part 1 disks [ 6 ]; part 3 disks [ 6 ]; part 4 disks [ 6 ]; part 6 disks [ 6 ]; } CHECK part 5 disks [ 6 ] -> OK { part 1 disks [ 0 ]; part 3 disks [ 2 ]; part 4 disks [ 3 ]; part 6 disks [ 5 ]; } CHECK part 2 disks [ 1 ] -> OK { part 1 disks [ 0 ]; part 3 disks [ 2 ]; part 4 disks [ 3 ]; part 6 disks [ 5 ]; } CHECK part 5 disks [ 4 ] -> OK { part 1 disks [ 6 ]; part 3 disks [ 6 ]; part 4 disks [ 6 ]; part 5 disks [ 6 ]; } CHECK part 2 disks [ 6 ] -> OK { part 1 disks [ 6 ]; part 3 disks [ 6 ]; part 4 disks [ 6 ]; part 5 disks [ 6 ]; } CHECK part 6 disks [ 6 ] -> OK { part 1 disks [ 0 ]; part 3 disks [ 2 ]; part 4 disks [ 3 ]; part 5 disks [ 4 ]; } CHECK part 2 disks [ 1 ] -> OK { part 1 disks [ 0 ]; part 3 disks [ 2 ]; part 4 disks [ 3 ]; part 5 disks [ 4 ]; } CHECK part 6 disks [ 5 ] -> OK { part 1 disks [ 6 ]; part 2 disks [ 6 ]; part 5 disks [ 6 ]; part 6 disks [ 6 ]; } CHECK part 3 disks [ 6 ] -> OK { part 1 disks [ 6 ]; part 2 disks [ 6 ]; part 5 disks [ 6 ]; part 6 disks [ 6 ]; } CHECK part 4 disks [ 6 ] -> OK { part 1 disks [ 0 ]; part 2 disks [ 1 ]; part 5 disks [ 4 ]; part 6 disks [ 5 ]; } CHECK part 3 disks [ 2 ] -> OK { part 1 disks [ 0 ]; part 2 disks [ 1 ]; part 5 disks [ 4 ]; part 6 disks [ 5 ]; } CHECK part 4 disks [ 3 ] -> OK { part 1 disks [ 6 ]; part 2 disks [ 6 ]; part 4 disks [ 6 ]; part 6 disks [ 6 ]; } CHECK part 3 disks [ 6 ] -> OK { part 1 disks [ 6 ]; part 2 disks [ 6 ]; part 4 disks [ 6 ]; part 6 disks [ 6 ]; } CHECK part 5 disks [ 6 ] -> OK { part 1 disks [ 0 ]; part 2 disks [ 1 ]; part 4 disks [ 3 ]; part 6 disks [ 5 ]; } CHECK part 3 disks [ 2 ] -> OK { part 1 disks [ 0 ]; part 2 disks [ 1 ]; part 4 disks [ 3 ]; part 6 disks [ 5 ]; } CHECK part 5 disks [ 4 ] -> OK { part 1 disks [ 6 ]; part 2 disks [ 6 ]; part 4 disks [ 6 ]; part 5 disks [ 6 ]; } CHECK part 3 disks [ 6 ] -> OK { part 1 disks [ 6 ]; part 2 disks [ 6 ]; part 4 disks [ 6 ]; part 5 disks [ 6 ]; } CHECK part 6 disks [ 6 ] -> OK { part 1 disks [ 0 ]; part 2 disks [ 1 ]; part 4 disks [ 3 ]; part 5 disks [ 4 ]; } CHECK part 3 disks [ 2 ] -> OK { part 1 disks [ 0 ]; part 2 disks [ 1 ]; part 4 disks [ 3 ]; part 5 disks [ 4 ]; } CHECK part 6 disks [ 5 ] -> OK { part 1 disks [ 6 ]; part 2 disks [ 6 ]; part 3 disks [ 6 ]; part 6 disks [ 6 ]; } CHECK part 4 disks [ 6 ] -> OK { part 1 disks [ 6 ]; part 2 disks [ 6 ]; part 3 disks [ 6 ]; part 6 disks [ 6 ]; } CHECK part 5 disks [ 6 ] -> OK { part 1 disks [ 0 ]; part 2 disks [ 1 ]; part 3 disks [ 2 ]; part 6 disks [ 5 ]; } CHECK part 4 disks [ 3 ] -> OK { part 1 disks [ 0 ]; part 2 disks [ 1 ]; part 3 disks [ 2 ]; part 6 disks [ 5 ]; } CHECK part 5 disks [ 4 ] -> OK { part 1 disks [ 6 ]; part 2 disks [ 6 ]; part 3 disks [ 6 ]; part 5 disks [ 6 ]; } CHECK part 4 disks [ 6 ] -> OK { part 1 disks [ 6 ]; part 2 disks [ 6 ]; part 3 disks [ 6 ]; part 5 disks [ 6 ]; } CHECK part 6 disks [ 6 ] -> OK { part 1 disks [ 0 ]; part 2 disks [ 1 ]; part 3 disks [ 2 ]; part 5 disks [ 4 ]; } CHECK part 4 disks [ 3 ] -> OK { part 1 disks [ 0 ]; part 2 disks [ 1 ]; part 3 disks [ 2 ]; part 5 disks [ 4 ]; } CHECK part 6 disks [ 5 ] -> OK { part 1 disks [ 6 ]; part 2 disks [ 6 ]; part 3 disks [ 6 ]; part 4 disks [ 6 ]; } CHECK part 5 disks [ 6 ] -> OK { part 1 disks [ 6 ]; part 2 disks [ 6 ]; part 3 disks [ 6 ]; part 4 disks [ 6 ]; } CHECK part 6 disks [ 6 ] -> OK { part 1 disks [ 0 ]; part 2 disks [ 1 ]; part 3 disks [ 2 ]; part 4 disks [ 3 ]; } CHECK part 5 disks [ 4 ] -> OK { part 1 disks [ 0 ]; part 2 disks [ 1 ]; part 3 disks [ 2 ]; part 4 disks [ 3 ]; } CHECK part 6 disks [ 5 ] -> OK ERROR: There are erasure restore fails >> TBsVDiskExtremeHuge::Simple3Put1SeqGet2Compaction [GOOD] >> TBsLocalRecovery::WriteRestartReadHuge [GOOD] >> TBsVDiskExtreme::Simple3Put1GetMissingPartCompaction [GOOD] >> TBsLocalRecovery::WriteRestartReadHugeIncreased >> CostMetricsGetBlock4Plus2::TestGet4Plus2BlockRequests1Inflight1BlobSize1000 [GOOD] >> CostMetricsGetBlock4Plus2::TestGet4Plus2BlockRequests10Inflight1BlobSize1000 >> TBsVDiskExtremeHuge::Simple3Put1SeqSubsErrorCompaction [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/ut_vdisk/unittest >> TMonitoring::ReregisterTest [GOOD] Test command err: RUN TEST SendData iteration SendData iteration SendData iteration SendData iteration SendData iteration SendData iteration SendData iteration SendData iteration SendData iteration SendData iteration |80.4%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/ut_blobstorage/ut_move_pdisk/unittest >> TPDiskRaces::KillOwnerWhileDeletingChunkWithInflightMock [GOOD] >> TPDiskRaces::Decommit >> Acceleration::TestThresholdPutMirror3dc2Slow [GOOD] >> BlobPatching::Mirror3of4 |80.4%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/ut_vdisk/unittest >> TBsVDiskExtreme::Simple3Put1SeqSubsErrorCompaction [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/mind/bscontroller/ut_selfheal/unittest >> BsControllerTest::TestLocalBrokenRelocation [GOOD] Test command err: 2025-09-25T16:15:40.526259Z 1 00h00m00.000000s :BS_NODE DEBUG: [1] Bootstrap 2025-09-25T16:15:40.526280Z 1 00h00m00.000000s :BS_NODE DEBUG: [1] Connect 2025-09-25T16:15:40.526300Z 2 00h00m00.000000s :BS_NODE DEBUG: [2] Bootstrap 2025-09-25T16:15:40.526304Z 2 00h00m00.000000s :BS_NODE DEBUG: [2] Connect 2025-09-25T16:15:40.526309Z 3 00h00m00.000000s :BS_NODE DEBUG: [3] Bootstrap 2025-09-25T16:15:40.526313Z 3 00h00m00.000000s :BS_NODE DEBUG: [3] Connect 2025-09-25T16:15:40.526324Z 4 00h00m00.000000s :BS_NODE DEBUG: [4] Bootstrap 2025-09-25T16:15:40.526328Z 4 00h00m00.000000s :BS_NODE DEBUG: [4] Connect 2025-09-25T16:15:40.526338Z 5 00h00m00.000000s :BS_NODE DEBUG: [5] Bootstrap 2025-09-25T16:15:40.526342Z 5 00h00m00.000000s :BS_NODE DEBUG: [5] Connect 2025-09-25T16:15:40.526348Z 6 00h00m00.000000s :BS_NODE DEBUG: [6] Bootstrap 2025-09-25T16:15:40.526352Z 6 00h00m00.000000s :BS_NODE DEBUG: [6] Connect 2025-09-25T16:15:40.526357Z 7 00h00m00.000000s :BS_NODE DEBUG: [7] Bootstrap 2025-09-25T16:15:40.526360Z 7 00h00m00.000000s :BS_NODE DEBUG: [7] Connect 2025-09-25T16:15:40.526365Z 8 00h00m00.000000s :BS_NODE DEBUG: [8] Bootstrap 2025-09-25T16:15:40.526369Z 8 00h00m00.000000s :BS_NODE DEBUG: [8] Connect 2025-09-25T16:15:40.526374Z 9 00h00m00.000000s :BS_NODE DEBUG: [9] Bootstrap 2025-09-25T16:15:40.526378Z 9 00h00m00.000000s :BS_NODE DEBUG: [9] Connect 2025-09-25T16:15:40.526384Z 10 00h00m00.000000s :BS_NODE DEBUG: [10] Bootstrap 2025-09-25T16:15:40.526388Z 10 00h00m00.000000s :BS_NODE DEBUG: [10] Connect 2025-09-25T16:15:40.526393Z 11 00h00m00.000000s :BS_NODE DEBUG: [11] Bootstrap 2025-09-25T16:15:40.526397Z 11 00h00m00.000000s :BS_NODE DEBUG: [11] Connect 2025-09-25T16:15:40.526402Z 12 00h00m00.000000s :BS_NODE DEBUG: [12] Bootstrap 2025-09-25T16:15:40.526406Z 12 00h00m00.000000s :BS_NODE DEBUG: [12] Connect 2025-09-25T16:15:40.526411Z 13 00h00m00.000000s :BS_NODE DEBUG: [13] Bootstrap 2025-09-25T16:15:40.526415Z 13 00h00m00.000000s :BS_NODE DEBUG: [13] Connect 2025-09-25T16:15:40.526420Z 14 00h00m00.000000s :BS_NODE DEBUG: [14] Bootstrap 2025-09-25T16:15:40.526425Z 14 00h00m00.000000s :BS_NODE DEBUG: [14] Connect 2025-09-25T16:15:40.526430Z 15 00h00m00.000000s :BS_NODE DEBUG: [15] Bootstrap 2025-09-25T16:15:40.526434Z 15 00h00m00.000000s :BS_NODE DEBUG: [15] Connect 2025-09-25T16:15:40.526440Z 16 00h00m00.000000s :BS_NODE DEBUG: [16] Bootstrap 2025-09-25T16:15:40.526444Z 16 00h00m00.000000s :BS_NODE DEBUG: [16] Connect 2025-09-25T16:15:40.526449Z 17 00h00m00.000000s :BS_NODE DEBUG: [17] Bootstrap 2025-09-25T16:15:40.526453Z 17 00h00m00.000000s :BS_NODE DEBUG: [17] Connect 2025-09-25T16:15:40.526458Z 18 00h00m00.000000s :BS_NODE DEBUG: [18] Bootstrap 2025-09-25T16:15:40.526462Z 18 00h00m00.000000s :BS_NODE DEBUG: [18] Connect 2025-09-25T16:15:40.526468Z 19 00h00m00.000000s :BS_NODE DEBUG: [19] Bootstrap 2025-09-25T16:15:40.526471Z 19 00h00m00.000000s :BS_NODE DEBUG: [19] Connect 2025-09-25T16:15:40.526495Z 20 00h00m00.000000s :BS_NODE DEBUG: [20] Bootstrap 2025-09-25T16:15:40.526499Z 20 00h00m00.000000s :BS_NODE DEBUG: [20] Connect 2025-09-25T16:15:40.526504Z 21 00h00m00.000000s :BS_NODE DEBUG: [21] Bootstrap 2025-09-25T16:15:40.526507Z 21 00h00m00.000000s :BS_NODE DEBUG: [21] Connect 2025-09-25T16:15:40.526513Z 22 00h00m00.000000s :BS_NODE DEBUG: [22] Bootstrap 2025-09-25T16:15:40.526517Z 22 00h00m00.000000s :BS_NODE DEBUG: [22] Connect 2025-09-25T16:15:40.526523Z 23 00h00m00.000000s :BS_NODE DEBUG: [23] Bootstrap 2025-09-25T16:15:40.526526Z 23 00h00m00.000000s :BS_NODE DEBUG: [23] Connect 2025-09-25T16:15:40.526532Z 24 00h00m00.000000s :BS_NODE DEBUG: [24] Bootstrap 2025-09-25T16:15:40.526535Z 24 00h00m00.000000s :BS_NODE DEBUG: [24] Connect 2025-09-25T16:15:40.526540Z 25 00h00m00.000000s :BS_NODE DEBUG: [25] Bootstrap 2025-09-25T16:15:40.526547Z 25 00h00m00.000000s :BS_NODE DEBUG: [25] Connect 2025-09-25T16:15:40.526552Z 26 00h00m00.000000s :BS_NODE DEBUG: [26] Bootstrap 2025-09-25T16:15:40.526556Z 26 00h00m00.000000s :BS_NODE DEBUG: [26] Connect 2025-09-25T16:15:40.526562Z 27 00h00m00.000000s :BS_NODE DEBUG: [27] Bootstrap 2025-09-25T16:15:40.526567Z 27 00h00m00.000000s :BS_NODE DEBUG: [27] Connect 2025-09-25T16:15:40.526573Z 28 00h00m00.000000s :BS_NODE DEBUG: [28] Bootstrap 2025-09-25T16:15:40.526577Z 28 00h00m00.000000s :BS_NODE DEBUG: [28] Connect 2025-09-25T16:15:40.526582Z 29 00h00m00.000000s :BS_NODE DEBUG: [29] Bootstrap 2025-09-25T16:15:40.526586Z 29 00h00m00.000000s :BS_NODE DEBUG: [29] Connect 2025-09-25T16:15:40.526592Z 30 00h00m00.000000s :BS_NODE DEBUG: [30] Bootstrap 2025-09-25T16:15:40.526596Z 30 00h00m00.000000s :BS_NODE DEBUG: [30] Connect 2025-09-25T16:15:40.526601Z 31 00h00m00.000000s :BS_NODE DEBUG: [31] Bootstrap 2025-09-25T16:15:40.526605Z 31 00h00m00.000000s :BS_NODE DEBUG: [31] Connect 2025-09-25T16:15:40.526610Z 32 00h00m00.000000s :BS_NODE DEBUG: [32] Bootstrap 2025-09-25T16:15:40.526614Z 32 00h00m00.000000s :BS_NODE DEBUG: [32] Connect 2025-09-25T16:15:40.526619Z 33 00h00m00.000000s :BS_NODE DEBUG: [33] Bootstrap 2025-09-25T16:15:40.526623Z 33 00h00m00.000000s :BS_NODE DEBUG: [33] Connect 2025-09-25T16:15:40.526628Z 34 00h00m00.000000s :BS_NODE DEBUG: [34] Bootstrap 2025-09-25T16:15:40.526632Z 34 00h00m00.000000s :BS_NODE DEBUG: [34] Connect 2025-09-25T16:15:40.526637Z 35 00h00m00.000000s :BS_NODE DEBUG: [35] Bootstrap 2025-09-25T16:15:40.526641Z 35 00h00m00.000000s :BS_NODE DEBUG: [35] Connect 2025-09-25T16:15:40.526655Z 36 00h00m00.000000s :BS_NODE DEBUG: [36] Bootstrap 2025-09-25T16:15:40.526659Z 36 00h00m00.000000s :BS_NODE DEBUG: [36] Connect 2025-09-25T16:15:40.530845Z 1 00h00m00.000000s :BS_NODE DEBUG: [1] ClientConnected Sender# [1:2719:59] Status# ERROR ClientId# [1:2719:59] ServerId# [0:0:0] PipeClient# [1:2719:59] 2025-09-25T16:15:40.531096Z 2 00h00m00.000000s :BS_NODE DEBUG: [2] ClientConnected Sender# [2:2720:41] Status# ERROR ClientId# [2:2720:41] ServerId# [0:0:0] PipeClient# [2:2720:41] 2025-09-25T16:15:40.531104Z 3 00h00m00.000000s :BS_NODE DEBUG: [3] ClientConnected Sender# [3:2721:41] Status# ERROR ClientId# [3:2721:41] ServerId# [0:0:0] PipeClient# [3:2721:41] 2025-09-25T16:15:40.531111Z 4 00h00m00.000000s :BS_NODE DEBUG: [4] ClientConnected Sender# [4:2722:41] Status# ERROR ClientId# [4:2722:41] ServerId# [0:0:0] PipeClient# [4:2722:41] 2025-09-25T16:15:40.531117Z 5 00h00m00.000000s :BS_NODE DEBUG: [5] ClientConnected Sender# [5:2723:41] Status# ERROR ClientId# [5:2723:41] ServerId# [0:0:0] PipeClient# [5:2723:41] 2025-09-25T16:15:40.531123Z 6 00h00m00.000000s :BS_NODE DEBUG: [6] ClientConnected Sender# [6:2724:41] Status# ERROR ClientId# [6:2724:41] ServerId# [0:0:0] PipeClient# [6:2724:41] 2025-09-25T16:15:40.531130Z 7 00h00m00.000000s :BS_NODE DEBUG: [7] ClientConnected Sender# [7:2725:41] Status# ERROR ClientId# [7:2725:41] ServerId# [0:0:0] PipeClient# [7:2725:41] 2025-09-25T16:15:40.531136Z 8 00h00m00.000000s :BS_NODE DEBUG: [8] ClientConnected Sender# [8:2726:41] Status# ERROR ClientId# [8:2726:41] ServerId# [0:0:0] PipeClient# [8:2726:41] 2025-09-25T16:15:40.531142Z 9 00h00m00.000000s :BS_NODE DEBUG: [9] ClientConnected Sender# [9:2727:41] Status# ERROR ClientId# [9:2727:41] ServerId# [0:0:0] PipeClient# [9:2727:41] 2025-09-25T16:15:40.531148Z 10 00h00m00.000000s :BS_NODE DEBUG: [10] ClientConnected Sender# [10:2728:41] Status# ERROR ClientId# [10:2728:41] ServerId# [0:0:0] PipeClient# [10:2728:41] 2025-09-25T16:15:40.531154Z 11 00h00m00.000000s :BS_NODE DEBUG: [11] ClientConnected Sender# [11:2729:41] Status# ERROR ClientId# [11:2729:41] ServerId# [0:0:0] PipeClient# [11:2729:41] 2025-09-25T16:15:40.531160Z 12 00h00m00.000000s :BS_NODE DEBUG: [12] ClientConnected Sender# [12:2730:41] Status# ERROR ClientId# [12:2730:41] ServerId# [0:0:0] PipeClient# [12:2730:41] 2025-09-25T16:15:40.531166Z 13 00h00m00.000000s :BS_NODE DEBUG: [13] ClientConnected Sender# [13:2731:41] Status# ERROR ClientId# [13:2731:41] ServerId# [0:0:0] PipeClient# [13:2731:41] 2025-09-25T16:15:40.531172Z 14 00h00m00.000000s :BS_NODE DEBUG: [14] ClientConnected Sender# [14:2732:41] Status# ERROR ClientId# [14:2732:41] ServerId# [0:0:0] PipeClient# [14:2732:41] 2025-09-25T16:15:40.531180Z 15 00h00m00.000000s :BS_NODE DEBUG: [15] ClientConnected Sender# [15:2733:41] Status# ERROR ClientId# [15:2733:41] ServerId# [0:0:0] PipeClient# [15:2733:41] 2025-09-25T16:15:40.531187Z 16 00h00m00.000000s :BS_NODE DEBUG: [16] ClientConnected Sender# [16:2734:41] Status# ERROR ClientId# [16:2734:41] ServerId# [0:0:0] PipeClient# [16:2734:41] 2025-09-25T16:15:40.531193Z 17 00h00m00.000000s :BS_NODE DEBUG: [17] ClientConnected Sender# [17:2735:41] Status# ERROR ClientId# [17:2735:41] ServerId# [0:0:0] PipeClient# [17:2735:41] 2025-09-25T16:15:40.531199Z 18 00h00m00.000000s :BS_NODE DEBUG: [18] ClientConnected Sender# [18:2736:41] Status# ERROR ClientId# [18:2736:41] ServerId# [0:0:0] PipeClient# [18:2736:41] 2025-09-25T16:15:40.531206Z 19 00h00m00.000000s :BS_NODE DEBUG: [19] ClientConnected Sender# [19:2737:41] Status# ERROR ClientId# [19:2737:41] ServerId# [0:0:0] PipeClient# [19:2737:41] 2025-09-25T16:15:40.531212Z 20 00h00m00.000000s :BS_NODE DEBUG: [20] ClientConnected Sender# [20:2738:41] Status# ERROR ClientId# [20:2738:41] ServerId# [0:0:0] PipeClient# [20:2738:41] 2025-09-25T16:15:40.531218Z 21 00h00m00.000000s :BS_NODE DEBUG: [21] ClientConnected Sender# [21:2739:41] Status# ERROR ClientId# [21:2739:41] ServerId# [0:0:0] PipeClient# [21:2739:41] 2025-09-25T16:15:40.531224Z 22 00h00m00.000000s :BS_NODE DEBUG: [22] ClientConnected Sender# [22:2740:41] Status# ERROR ClientId# [22:2740:41] ServerId# [0:0:0] PipeClient# [22:2740:41] 2025-09-25T16:15:40.531231Z 23 00h00m00.000000s :BS_NODE DEBUG: [23] ClientConnected Sender# [23:2741:41] Status# ERROR ClientId# [23:2741:41] ServerId# [0:0:0] PipeClient# [23:2741:41] 2025-09-25T16:15:40.531237Z 24 00h00m00.000000s :BS_NODE DEBUG: [24] ClientConnected Sender# [24:2742:41] Status# ERROR ClientId# [24:2742:41] ServerId# [0:0:0] PipeClient# [24:2742:41] 2025-09-25T16:15:40.531243Z 25 00h00m00.000000s :BS_NODE DEBUG: [25] ClientConnected Sender# [25:2743:41] Status# ERROR ClientId# [25:2743:41] ServerId# [0:0:0] PipeClient# [25:2743:41] 2025-09-25T16:15:40.531249Z 26 00h00m00.000000s :BS_NODE DEBUG: [26] ClientConnected Sender# [26:2744:41] Status# ERROR ClientId# [26:2744:41] ServerId# [0:0:0] PipeClient# [26:2744:41] 2025-09-25T16:15:40.531255Z 27 00h00m00.000000s :BS_NODE DEBUG: [27] ClientConnected Sender# [27:2745:41] Status# ERROR ClientId# [27:2745:41] ServerId# [0:0:0] PipeClient# [27:2745:41] 2025-09-25T16:15:40.531261Z 28 00h00m00.000000s :BS_NODE DEBUG: [28] ClientConnected Sender# [28:2746:41] Status# ERROR ClientId# [28:2746:41] ServerId# [0:0:0] PipeClient# [28:2746:41] 2025-09-25T16:15:40.531268Z 29 00h00m00.000000s :BS_NODE DEBUG: [29] ClientConnected Sender# [29:2747:41] Status# ERROR ClientId# [29:2747:41] ServerId# [0:0:0] PipeClient# [29:2747:41] 2025-09-25T16:15:40.531275Z 30 00h00m00.000000s :BS_NODE DEBUG: [30] ClientConnected Sender# [30:2748:41] Status# ERROR ClientId# [30:2748:41] ServerId# [0:0:0] PipeClient# [30:2748:41] 2025-09-25T16:15:40.531281Z 31 00h00m00.000000s :BS_NODE DEBUG: [31] ClientConnected Sender# [31:2749:41] Status# ERROR ClientId# [31:2749:41] ServerId# [0:0:0] PipeClient# [31:2749:41] 2025-09-25T16:15:40.531287Z 32 00h00m00.000000s :BS_NODE DEBUG: [32] ClientConnected Sender# [32:2750:41] Status# ERROR ClientId# [32:2750:41] ServerId# [0:0:0] PipeClient# [32:2750:41] 2025-09-25T16:15:40.531293Z 33 00h00m00.000000s :BS_NODE DEBUG: [33] ClientConnected Sender# [33:2751:41] Status# ERROR ClientId# [33:2751:41] ServerId# [0:0:0] PipeClient# [33:2751:41] 2025-09-25T16:15:40.531299Z 34 00h00m00.000000s :BS_NODE DEBUG: [34] ClientConnected Sender# [34:2752:41] Status# ERROR ClientId# [34:2752:41] ServerId# [0:0:0] PipeClient# [34:2752:41] 2025-09-25T16:15:40.531305Z 35 00h00m00.000000s :BS_NODE DEBUG: [35] ClientConnected Sender# [35:2753:41] Status# ERROR ClientId# [35:2753:41 ... 25m00.102560s :BS_NODE DEBUG: [28] VDiskId# [80000001:2:2:2:0] -> [80000001:3:2:2:0] 2025-09-25T16:15:41.548731Z 28 01h25m00.102560s :BS_NODE DEBUG: [28] VDiskId# [80000021:2:2:2:0] -> [80000021:3:2:2:0] 2025-09-25T16:15:41.548736Z 28 01h25m00.102560s :BS_NODE DEBUG: [28] VDiskId# [80000031:2:2:2:0] -> [80000031:3:2:2:0] 2025-09-25T16:15:41.548742Z 28 01h25m00.102560s :BS_NODE DEBUG: [28] VDiskId# [80000051:2:2:2:0] -> [80000051:3:2:2:0] 2025-09-25T16:15:41.548748Z 28 01h25m00.102560s :BS_NODE DEBUG: [28] VDiskId# [80000061:2:2:2:0] -> [80000061:3:2:2:0] 2025-09-25T16:15:41.555868Z 13 01h25m00.102560s :BS_NODE DEBUG: [13] NodeServiceSetUpdate 2025-09-25T16:15:41.555904Z 13 01h25m00.102560s :BS_NODE DEBUG: [13] VDiskId# [80000010:2:1:0:0] -> [80000010:3:1:0:0] 2025-09-25T16:15:41.555914Z 13 01h25m00.102560s :BS_NODE DEBUG: [13] VDiskId# [80000040:2:1:0:0] -> [80000040:3:1:0:0] 2025-09-25T16:15:41.555921Z 13 01h25m00.102560s :BS_NODE DEBUG: [13] VDiskId# [80000070:2:1:0:0] -> [80000070:3:1:0:0] 2025-09-25T16:15:41.555929Z 13 01h25m00.102560s :BS_NODE DEBUG: [13] VDiskId# [80000001:2:1:1:0] -> [80000001:3:1:1:0] 2025-09-25T16:15:41.555936Z 13 01h25m00.102560s :BS_NODE DEBUG: [13] VDiskId# [80000021:2:1:1:0] -> [80000021:3:1:1:0] 2025-09-25T16:15:41.555943Z 13 01h25m00.102560s :BS_NODE DEBUG: [13] VDiskId# [80000031:2:1:1:0] -> [80000031:3:1:1:0] 2025-09-25T16:15:41.555949Z 13 01h25m00.102560s :BS_NODE DEBUG: [13] VDiskId# [80000051:2:1:1:0] -> [80000051:3:1:1:0] 2025-09-25T16:15:41.555956Z 13 01h25m00.102560s :BS_NODE DEBUG: [13] VDiskId# [80000061:2:1:1:0] -> [80000061:3:1:1:0] 2025-09-25T16:15:41.555963Z 13 01h25m00.102560s :BS_NODE DEBUG: [13] VDiskId# [80000002:1:1:2:0] -> [80000002:2:1:2:0] 2025-09-25T16:15:41.555969Z 13 01h25m00.102560s :BS_NODE DEBUG: [13] VDiskId# [80000012:1:1:2:0] -> [80000012:2:1:2:0] 2025-09-25T16:15:41.555975Z 13 01h25m00.102560s :BS_NODE DEBUG: [13] VDiskId# [80000022:1:1:2:0] -> [80000022:2:1:2:0] 2025-09-25T16:15:41.555982Z 13 01h25m00.102560s :BS_NODE DEBUG: [13] VDiskId# [80000032:1:1:2:0] -> [80000032:2:1:2:0] 2025-09-25T16:15:41.555989Z 13 01h25m00.102560s :BS_NODE DEBUG: [13] VDiskId# [80000042:1:1:2:0] -> [80000042:2:1:2:0] 2025-09-25T16:15:41.555999Z 13 01h25m00.102560s :BS_NODE DEBUG: [13] VDiskId# [80000052:1:1:2:0] -> [80000052:2:1:2:0] 2025-09-25T16:15:41.556006Z 13 01h25m00.102560s :BS_NODE DEBUG: [13] VDiskId# [80000062:1:1:2:0] -> [80000062:2:1:2:0] 2025-09-25T16:15:41.556012Z 13 01h25m00.102560s :BS_NODE DEBUG: [13] VDiskId# [80000072:1:1:2:0] -> [80000072:2:1:2:0] 2025-09-25T16:15:41.556179Z 31 01h25m00.102560s :BS_NODE DEBUG: [31] NodeServiceSetUpdate 2025-09-25T16:15:41.556191Z 31 01h25m00.102560s :BS_NODE DEBUG: [31] VDiskId# [80000010:2:2:2:0] -> [80000010:3:2:2:0] 2025-09-25T16:15:41.556198Z 31 01h25m00.102560s :BS_NODE DEBUG: [31] VDiskId# [80000040:2:2:2:0] -> [80000040:3:2:2:0] 2025-09-25T16:15:41.556205Z 31 01h25m00.102560s :BS_NODE DEBUG: [31] VDiskId# [80000070:2:2:2:0] -> [80000070:3:2:2:0] 2025-09-25T16:15:41.556212Z 31 01h25m00.102560s :BS_NODE DEBUG: [31] VDiskId# [80000002:1:2:0:0] -> [80000002:2:2:0:0] 2025-09-25T16:15:41.556219Z 31 01h25m00.102560s :BS_NODE DEBUG: [31] VDiskId# [80000012:1:2:0:0] -> [80000012:2:2:0:0] 2025-09-25T16:15:41.556226Z 31 01h25m00.102560s :BS_NODE DEBUG: [31] VDiskId# [80000022:1:2:0:0] -> [80000022:2:2:0:0] 2025-09-25T16:15:41.556233Z 31 01h25m00.102560s :BS_NODE DEBUG: [31] VDiskId# [80000032:1:2:0:0] -> [80000032:2:2:0:0] 2025-09-25T16:15:41.556240Z 31 01h25m00.102560s :BS_NODE DEBUG: [31] VDiskId# [80000042:1:2:0:0] -> [80000042:2:2:0:0] 2025-09-25T16:15:41.556246Z 31 01h25m00.102560s :BS_NODE DEBUG: [31] VDiskId# [80000052:1:2:0:0] -> [80000052:2:2:0:0] 2025-09-25T16:15:41.556253Z 31 01h25m00.102560s :BS_NODE DEBUG: [31] VDiskId# [80000062:1:2:0:0] -> [80000062:2:2:0:0] 2025-09-25T16:15:41.556260Z 31 01h25m00.102560s :BS_NODE DEBUG: [31] VDiskId# [80000072:1:2:0:0] -> [80000072:2:2:0:0] 2025-09-25T16:15:41.556333Z 16 01h25m00.102560s :BS_NODE DEBUG: [16] NodeServiceSetUpdate 2025-09-25T16:15:41.556345Z 16 01h25m00.102560s :BS_NODE DEBUG: [16] VDiskId# [80000010:2:1:1:0] -> [80000010:3:1:1:0] 2025-09-25T16:15:41.556352Z 16 01h25m00.102560s :BS_NODE DEBUG: [16] VDiskId# [80000040:2:1:1:0] -> [80000040:3:1:1:0] 2025-09-25T16:15:41.556359Z 16 01h25m00.102560s :BS_NODE DEBUG: [16] VDiskId# [80000070:2:1:1:0] -> [80000070:3:1:1:0] 2025-09-25T16:15:41.556366Z 16 01h25m00.102560s :BS_NODE DEBUG: [16] VDiskId# [80000001:2:1:2:0] -> [80000001:3:1:2:0] 2025-09-25T16:15:41.556373Z 16 01h25m00.102560s :BS_NODE DEBUG: [16] VDiskId# [80000021:2:1:2:0] -> [80000021:3:1:2:0] 2025-09-25T16:15:41.556380Z 16 01h25m00.102560s :BS_NODE DEBUG: [16] VDiskId# [80000031:2:1:2:0] -> [80000031:3:1:2:0] 2025-09-25T16:15:41.556387Z 16 01h25m00.102560s :BS_NODE DEBUG: [16] VDiskId# [80000051:2:1:2:0] -> [80000051:3:1:2:0] 2025-09-25T16:15:41.556394Z 16 01h25m00.102560s :BS_NODE DEBUG: [16] VDiskId# [80000061:2:1:2:0] -> [80000061:3:1:2:0] 2025-09-25T16:15:41.557126Z 7 01h25m01.240560s :BS_NODE DEBUG: [7] VDiskId# [80000021:3:0:1:0] status changed to REPLICATING 2025-09-25T16:15:41.557213Z 7 01h25m01.451560s :BS_NODE DEBUG: [7] VDiskId# [80000001:3:0:1:0] status changed to REPLICATING 2025-09-25T16:15:41.557278Z 2 01h25m01.975560s :BS_NODE DEBUG: [2] VDiskId# [80000042:2:0:2:0] status changed to REPLICATING 2025-09-25T16:15:41.557358Z 10 01h25m02.079560s :BS_NODE DEBUG: [10] VDiskId# [80000040:3:0:0:0] status changed to REPLICATING 2025-09-25T16:15:41.557427Z 10 01h25m02.429560s :BS_NODE DEBUG: [10] VDiskId# [80000070:3:0:0:0] status changed to REPLICATING 2025-09-25T16:15:41.557488Z 5 01h25m02.434560s :BS_NODE DEBUG: [5] VDiskId# [80000052:2:0:2:0] status changed to REPLICATING 2025-09-25T16:15:41.557553Z 5 01h25m02.999560s :BS_NODE DEBUG: [5] VDiskId# [80000072:2:0:2:0] status changed to REPLICATING 2025-09-25T16:15:41.557619Z 4 01h25m03.070560s :BS_NODE DEBUG: [4] VDiskId# [80000032:2:0:2:0] status changed to REPLICATING 2025-09-25T16:15:41.557689Z 4 01h25m03.438560s :BS_NODE DEBUG: [4] VDiskId# [80000012:2:0:2:0] status changed to REPLICATING 2025-09-25T16:15:41.557748Z 4 01h25m03.473560s :BS_NODE DEBUG: [4] VDiskId# [80000002:2:0:2:0] status changed to REPLICATING 2025-09-25T16:15:41.557812Z 2 01h25m04.175560s :BS_NODE DEBUG: [2] VDiskId# [80000062:2:0:2:0] status changed to REPLICATING 2025-09-25T16:15:41.557889Z 4 01h25m04.705560s :BS_NODE DEBUG: [4] VDiskId# [80000022:2:0:2:0] status changed to REPLICATING 2025-09-25T16:15:41.557956Z 7 01h25m04.921560s :BS_NODE DEBUG: [7] VDiskId# [80000051:3:0:1:0] status changed to REPLICATING 2025-09-25T16:15:41.568935Z 8 01h25m05.120560s :BS_NODE DEBUG: [8] VDiskId# [80000061:3:0:1:0] status changed to REPLICATING 2025-09-25T16:15:41.569263Z 10 01h25m05.209560s :BS_NODE DEBUG: [10] VDiskId# [80000010:3:0:0:0] status changed to REPLICATING 2025-09-25T16:15:41.569385Z 7 01h25m05.545560s :BS_NODE DEBUG: [7] VDiskId# [80000031:3:0:1:0] status changed to REPLICATING 2025-09-25T16:15:41.570190Z 4 01h25m11.358560s :BS_NODE DEBUG: [4] VDiskId# [80000032:2:0:2:0] status changed to READY 2025-09-25T16:15:41.572638Z 1 01h25m11.359072s :BS_NODE DEBUG: [1] NodeServiceSetUpdate 2025-09-25T16:15:41.572658Z 1 01h25m11.359072s :BS_NODE DEBUG: [1] VDiskId# [80000032:1:0:2:0] destroyed 2025-09-25T16:15:41.572706Z 7 01h25m13.075560s :BS_NODE DEBUG: [7] VDiskId# [80000051:3:0:1:0] status changed to READY 2025-09-25T16:15:41.574842Z 1 01h25m13.076072s :BS_NODE DEBUG: [1] NodeServiceSetUpdate 2025-09-25T16:15:41.574862Z 1 01h25m13.076072s :BS_NODE DEBUG: [1] VDiskId# [80000051:2:0:1:0] destroyed 2025-09-25T16:15:41.575647Z 7 01h25m15.405560s :BS_NODE DEBUG: [7] VDiskId# [80000001:3:0:1:0] status changed to READY 2025-09-25T16:15:41.582180Z 1 01h25m15.406072s :BS_NODE DEBUG: [1] NodeServiceSetUpdate 2025-09-25T16:15:41.582203Z 1 01h25m15.406072s :BS_NODE DEBUG: [1] VDiskId# [80000001:2:0:1:0] destroyed 2025-09-25T16:15:41.582291Z 2 01h25m15.523560s :BS_NODE DEBUG: [2] VDiskId# [80000042:2:0:2:0] status changed to READY 2025-09-25T16:15:41.584405Z 1 01h25m15.524072s :BS_NODE DEBUG: [1] NodeServiceSetUpdate 2025-09-25T16:15:41.584425Z 1 01h25m15.524072s :BS_NODE DEBUG: [1] VDiskId# [80000042:1:0:2:0] destroyed 2025-09-25T16:15:41.584468Z 4 01h25m18.417560s :BS_NODE DEBUG: [4] VDiskId# [80000022:2:0:2:0] status changed to READY 2025-09-25T16:15:41.586584Z 1 01h25m18.418072s :BS_NODE DEBUG: [1] NodeServiceSetUpdate 2025-09-25T16:15:41.586603Z 1 01h25m18.418072s :BS_NODE DEBUG: [1] VDiskId# [80000022:1:0:2:0] destroyed 2025-09-25T16:15:41.586635Z 4 01h25m18.989560s :BS_NODE DEBUG: [4] VDiskId# [80000002:2:0:2:0] status changed to READY 2025-09-25T16:15:41.588681Z 1 01h25m18.990072s :BS_NODE DEBUG: [1] NodeServiceSetUpdate 2025-09-25T16:15:41.588698Z 1 01h25m18.990072s :BS_NODE DEBUG: [1] VDiskId# [80000002:1:0:2:0] destroyed 2025-09-25T16:15:41.588736Z 7 01h25m19.332560s :BS_NODE DEBUG: [7] VDiskId# [80000021:3:0:1:0] status changed to READY 2025-09-25T16:15:41.599192Z 1 01h25m19.333072s :BS_NODE DEBUG: [1] NodeServiceSetUpdate 2025-09-25T16:15:41.599219Z 1 01h25m19.333072s :BS_NODE DEBUG: [1] VDiskId# [80000021:2:0:1:0] destroyed 2025-09-25T16:15:41.599266Z 7 01h25m19.457560s :BS_NODE DEBUG: [7] VDiskId# [80000031:3:0:1:0] status changed to READY 2025-09-25T16:15:41.609817Z 1 01h25m19.458072s :BS_NODE DEBUG: [1] NodeServiceSetUpdate 2025-09-25T16:15:41.609859Z 1 01h25m19.458072s :BS_NODE DEBUG: [1] VDiskId# [80000031:2:0:1:0] destroyed 2025-09-25T16:15:41.610646Z 10 01h25m21.988560s :BS_NODE DEBUG: [10] VDiskId# [80000070:3:0:0:0] status changed to READY 2025-09-25T16:15:41.612776Z 1 01h25m21.989072s :BS_NODE DEBUG: [1] NodeServiceSetUpdate 2025-09-25T16:15:41.612797Z 1 01h25m21.989072s :BS_NODE DEBUG: [1] VDiskId# [80000070:2:0:0:0] destroyed 2025-09-25T16:15:41.617548Z 5 01h25m25.802560s :BS_NODE DEBUG: [5] VDiskId# [80000072:2:0:2:0] status changed to READY 2025-09-25T16:15:41.619561Z 1 01h25m25.803072s :BS_NODE DEBUG: [1] NodeServiceSetUpdate 2025-09-25T16:15:41.619579Z 1 01h25m25.803072s :BS_NODE DEBUG: [1] VDiskId# [80000072:1:0:2:0] destroyed 2025-09-25T16:15:41.619620Z 2 01h25m26.263560s :BS_NODE DEBUG: [2] VDiskId# [80000062:2:0:2:0] status changed to READY 2025-09-25T16:15:41.629723Z 1 01h25m26.264072s :BS_NODE DEBUG: [1] NodeServiceSetUpdate 2025-09-25T16:15:41.629746Z 1 01h25m26.264072s :BS_NODE DEBUG: [1] VDiskId# [80000062:1:0:2:0] destroyed 2025-09-25T16:15:41.629879Z 10 01h25m27.432560s :BS_NODE DEBUG: [10] VDiskId# [80000010:3:0:0:0] status changed to READY 2025-09-25T16:15:41.631739Z 1 01h25m27.433072s :BS_NODE DEBUG: [1] NodeServiceSetUpdate 2025-09-25T16:15:41.631752Z 1 01h25m27.433072s :BS_NODE DEBUG: [1] VDiskId# [80000010:2:0:0:0] destroyed 2025-09-25T16:15:41.632582Z 10 01h25m30.746560s :BS_NODE DEBUG: [10] VDiskId# [80000040:3:0:0:0] status changed to READY 2025-09-25T16:15:41.634390Z 1 01h25m30.747072s :BS_NODE DEBUG: [1] NodeServiceSetUpdate 2025-09-25T16:15:41.634406Z 1 01h25m30.747072s :BS_NODE DEBUG: [1] VDiskId# [80000040:2:0:0:0] destroyed 2025-09-25T16:15:41.634486Z 5 01h25m33.596560s :BS_NODE DEBUG: [5] VDiskId# [80000052:2:0:2:0] status changed to READY 2025-09-25T16:15:41.636213Z 1 01h25m33.597072s :BS_NODE DEBUG: [1] NodeServiceSetUpdate 2025-09-25T16:15:41.636226Z 1 01h25m33.597072s :BS_NODE DEBUG: [1] VDiskId# [80000052:1:0:2:0] destroyed 2025-09-25T16:15:41.641241Z 8 01h25m36.174560s :BS_NODE DEBUG: [8] VDiskId# [80000061:3:0:1:0] status changed to READY 2025-09-25T16:15:41.650150Z 1 01h25m36.175072s :BS_NODE DEBUG: [1] NodeServiceSetUpdate 2025-09-25T16:15:41.650169Z 1 01h25m36.175072s :BS_NODE DEBUG: [1] VDiskId# [80000061:2:0:1:0] destroyed 2025-09-25T16:15:41.650206Z 4 01h25m36.753560s :BS_NODE DEBUG: [4] VDiskId# [80000012:2:0:2:0] status changed to READY 2025-09-25T16:15:41.651984Z 1 01h25m36.754072s :BS_NODE DEBUG: [1] NodeServiceSetUpdate 2025-09-25T16:15:41.651997Z 1 01h25m36.754072s :BS_NODE DEBUG: [1] VDiskId# [80000012:1:0:2:0] destroyed >> BSCMovePDisk::PDiskMove_ErasureNone |80.4%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/ut_blobstorage/ut_move_pdisk/unittest >> BSCMovePDisk::PDiskMove_ErasureNone [GOOD] >> BSCMovePDisk::PDiskMove_Block42 ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/ut_blobstorage/ut_check_integrity/unittest >> CheckIntegrityMirror3dc::PlacementMissingParts [GOOD] Test command err: RandomSeed# 15180035459359225848 *** PUT BLOB [72075186270680851:57:3905:6:786432:1024:1] TO [82000000:1:1:0:0] FINISHED WITH OK *** *** PUT BLOB [72075186270680851:57:3905:6:786432:1024:2] TO [82000000:1:2:0:0] FINISHED WITH OK *** *** PUT BLOB [72075186270680851:57:3905:6:786432:1024:3] TO [82000000:1:0:0:0] FINISHED WITH OK *** *** PUT BLOB [72075186270680851:57:3905:6:786432:1024:1] TO [82000000:1:1:1:0] FINISHED WITH OK *** *** PUT BLOB [72075186270680851:57:3905:6:786432:1024:2] TO [82000000:1:2:1:0] FINISHED WITH OK *** *** PUT BLOB [72075186270680851:57:3905:6:786432:1024:3] TO [82000000:1:0:1:0] FINISHED WITH OK *** *** PUT BLOB [72075186270680851:57:3905:6:786432:1024:1] TO [82000000:1:1:0:0] FINISHED WITH OK *** |80.4%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/ut_vdisk/unittest >> TBsVDiskExtremeHuge::Simple3Put1SeqGet2Compaction [GOOD] >> BlobPatching::Mirror3of4 [GOOD] >> BlobPatching::Mirror3dc ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/util/ut/unittest >> TCowBTreeTest::Alignment [GOOD] Test command err: Producer 0 worked for 0.2740446574 seconds Producer 1 worked for 0.2959856747 seconds Consumer 0 worked for 1.862009674 seconds on a snapshot of size 80000 Consumer 1 worked for 2.200430217 seconds on a snapshot of size 160000 Consumer 2 worked for 3.480358484 seconds on a snapshot of size 240000 Consumer 3 worked for 3.101464171 seconds on a snapshot of size 320000 Consumers had 11999907 successful seeks >> CostMetricsPutMirror3dc::TestPutMirror3dcRequests10Inflight1BlobSize1000 [GOOD] >> CostMetricsPutMirror3dc::TestPutMirror3dcRequests2Inflight2BlobSize1000 |80.4%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/ut_blobstorage/ut_move_pdisk/unittest ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/ut_blobstorage/ut_check_integrity/unittest >> CheckIntegrityBlock42::DataStatusUnknown [GOOD] Test command err: RandomSeed# 10853049113265973551 *** PUT BLOB [72075186270680851:57:3905:6:786432:1024:1] TO [82000000:1:0:5:0] FINISHED WITH OK *** *** PUT BLOB [72075186270680851:57:3905:6:786432:1024:2] TO [82000000:1:0:6:0] FINISHED WITH OK *** *** PUT BLOB [72075186270680851:57:3905:6:786432:1024:3] TO [82000000:1:0:7:0] FINISHED WITH OK *** *** PUT BLOB [72075186270680851:57:3905:6:786432:1024:1] TO [82000000:1:0:3:0] FINISHED WITH OK *** *** PUT BLOB [72075186270680851:57:3905:6:786432:1024:2] TO [82000000:1:0:3:0] FINISHED WITH OK *** *** PUT BLOB [72075186270680851:57:3905:6:786432:1024:3] TO [82000000:1:0:3:0] FINISHED WITH OK *** *** PUT BLOB [72075186270680851:57:3905:6:786432:1024:4] TO [82000000:1:0:3:0] FINISHED WITH OK *** *** PUT BLOB [72075186270680851:57:3905:6:786432:1024:5] TO [82000000:1:0:3:0] FINISHED WITH OK *** *** PUT BLOB [72075186270680851:57:3905:6:786432:1024:6] TO [82000000:1:0:3:0] FINISHED WITH OK *** *** PUT BLOB [72075186270680851:57:3905:6:786432:1024:1] TO [82000000:1:0:5:0] FINISHED WITH OK *** *** PUT BLOB [72075186270680851:57:3905:6:786432:1024:2] TO [82000000:1:0:6:0] FINISHED WITH OK *** *** PUT BLOB [72075186270680851:57:3905:6:786432:1024:3] TO [82000000:1:0:7:0] FINISHED WITH OK *** *** PUT BLOB [72075186270680851:57:3905:6:786432:1024:4] TO [82000000:1:0:0:0] FINISHED WITH OK *** *** PUT BLOB [72075186270680851:57:3905:6:786432:1024:5] TO [82000000:1:0:1:0] FINISHED WITH OK *** *** PUT BLOB [72075186270680851:57:3905:6:786432:1024:6] TO [82000000:1:0:2:0] FINISHED WITH OK *** Group is disintegrated or has network problems *** PUT BLOB [72075186270680851:57:3905:6:786432:1024:1] TO [82000000:1:0:5:0] FINISHED WITH OK *** *** PUT BLOB [72075186270680851:57:3905:6:786432:1024:2] TO [82000000:1:0:6:0] FINISHED WITH OK *** *** PUT BLOB [72075186270680851:57:3905:6:786432:1024:3] TO [82000000:1:0:7:0] FINISHED WITH OK *** *** PUT BLOB [72075186270680851:57:3905:6:786432:1024:4] TO [82000000:1:0:0:0] FINISHED WITH OK *** *** PUT BLOB [72075186270680851:57:3905:6:786432:1024:5] TO [82000000:1:0:1:0] FINISHED WITH OK *** Disks: 0: [82000000:1:0:5:0] 1: [82000000:1:0:6:0] 2: [82000000:1:0:7:0] 3: [82000000:1:0:0:0] 4: [82000000:1:0:1:0] 5: [82000000:1:0:2:0] 6: [82000000:1:0:3:0] 7: [82000000:1:0:4:0] Layout info: part 1: part 2: part 3: ver0 disks [ 2 ] part 4: ver0 disks [ 3 ] part 5: ver0 disks [ 4 ] part 6: ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/mind/bscontroller/ut_selfheal/unittest >> BsControllerTest::TestLocalSelfHeal [GOOD] Test command err: 2025-09-25T16:15:41.083413Z 1 00h00m00.000000s :BS_NODE DEBUG: [1] Bootstrap 2025-09-25T16:15:41.083437Z 1 00h00m00.000000s :BS_NODE DEBUG: [1] Connect 2025-09-25T16:15:41.083456Z 2 00h00m00.000000s :BS_NODE DEBUG: [2] Bootstrap 2025-09-25T16:15:41.083460Z 2 00h00m00.000000s :BS_NODE DEBUG: [2] Connect 2025-09-25T16:15:41.083466Z 3 00h00m00.000000s :BS_NODE DEBUG: [3] Bootstrap 2025-09-25T16:15:41.083470Z 3 00h00m00.000000s :BS_NODE DEBUG: [3] Connect 2025-09-25T16:15:41.083481Z 4 00h00m00.000000s :BS_NODE DEBUG: [4] Bootstrap 2025-09-25T16:15:41.083484Z 4 00h00m00.000000s :BS_NODE DEBUG: [4] Connect 2025-09-25T16:15:41.083496Z 5 00h00m00.000000s :BS_NODE DEBUG: [5] Bootstrap 2025-09-25T16:15:41.083500Z 5 00h00m00.000000s :BS_NODE DEBUG: [5] Connect 2025-09-25T16:15:41.083505Z 6 00h00m00.000000s :BS_NODE DEBUG: [6] Bootstrap 2025-09-25T16:15:41.083509Z 6 00h00m00.000000s :BS_NODE DEBUG: [6] Connect 2025-09-25T16:15:41.083514Z 7 00h00m00.000000s :BS_NODE DEBUG: [7] Bootstrap 2025-09-25T16:15:41.083517Z 7 00h00m00.000000s :BS_NODE DEBUG: [7] Connect 2025-09-25T16:15:41.083523Z 8 00h00m00.000000s :BS_NODE DEBUG: [8] Bootstrap 2025-09-25T16:15:41.083526Z 8 00h00m00.000000s :BS_NODE DEBUG: [8] Connect 2025-09-25T16:15:41.083531Z 9 00h00m00.000000s :BS_NODE DEBUG: [9] Bootstrap 2025-09-25T16:15:41.083535Z 9 00h00m00.000000s :BS_NODE DEBUG: [9] Connect 2025-09-25T16:15:41.083540Z 10 00h00m00.000000s :BS_NODE DEBUG: [10] Bootstrap 2025-09-25T16:15:41.083544Z 10 00h00m00.000000s :BS_NODE DEBUG: [10] Connect 2025-09-25T16:15:41.083550Z 11 00h00m00.000000s :BS_NODE DEBUG: [11] Bootstrap 2025-09-25T16:15:41.083554Z 11 00h00m00.000000s :BS_NODE DEBUG: [11] Connect 2025-09-25T16:15:41.083559Z 12 00h00m00.000000s :BS_NODE DEBUG: [12] Bootstrap 2025-09-25T16:15:41.083563Z 12 00h00m00.000000s :BS_NODE DEBUG: [12] Connect 2025-09-25T16:15:41.083568Z 13 00h00m00.000000s :BS_NODE DEBUG: [13] Bootstrap 2025-09-25T16:15:41.083571Z 13 00h00m00.000000s :BS_NODE DEBUG: [13] Connect 2025-09-25T16:15:41.083577Z 14 00h00m00.000000s :BS_NODE DEBUG: [14] Bootstrap 2025-09-25T16:15:41.083582Z 14 00h00m00.000000s :BS_NODE DEBUG: [14] Connect 2025-09-25T16:15:41.083587Z 15 00h00m00.000000s :BS_NODE DEBUG: [15] Bootstrap 2025-09-25T16:15:41.083591Z 15 00h00m00.000000s :BS_NODE DEBUG: [15] Connect 2025-09-25T16:15:41.083596Z 16 00h00m00.000000s :BS_NODE DEBUG: [16] Bootstrap 2025-09-25T16:15:41.083601Z 16 00h00m00.000000s :BS_NODE DEBUG: [16] Connect 2025-09-25T16:15:41.083606Z 17 00h00m00.000000s :BS_NODE DEBUG: [17] Bootstrap 2025-09-25T16:15:41.083609Z 17 00h00m00.000000s :BS_NODE DEBUG: [17] Connect 2025-09-25T16:15:41.083614Z 18 00h00m00.000000s :BS_NODE DEBUG: [18] Bootstrap 2025-09-25T16:15:41.083618Z 18 00h00m00.000000s :BS_NODE DEBUG: [18] Connect 2025-09-25T16:15:41.083624Z 19 00h00m00.000000s :BS_NODE DEBUG: [19] Bootstrap 2025-09-25T16:15:41.083627Z 19 00h00m00.000000s :BS_NODE DEBUG: [19] Connect 2025-09-25T16:15:41.083636Z 20 00h00m00.000000s :BS_NODE DEBUG: [20] Bootstrap 2025-09-25T16:15:41.083640Z 20 00h00m00.000000s :BS_NODE DEBUG: [20] Connect 2025-09-25T16:15:41.083644Z 21 00h00m00.000000s :BS_NODE DEBUG: [21] Bootstrap 2025-09-25T16:15:41.083648Z 21 00h00m00.000000s :BS_NODE DEBUG: [21] Connect 2025-09-25T16:15:41.083653Z 22 00h00m00.000000s :BS_NODE DEBUG: [22] Bootstrap 2025-09-25T16:15:41.083657Z 22 00h00m00.000000s :BS_NODE DEBUG: [22] Connect 2025-09-25T16:15:41.083662Z 23 00h00m00.000000s :BS_NODE DEBUG: [23] Bootstrap 2025-09-25T16:15:41.083665Z 23 00h00m00.000000s :BS_NODE DEBUG: [23] Connect 2025-09-25T16:15:41.083671Z 24 00h00m00.000000s :BS_NODE DEBUG: [24] Bootstrap 2025-09-25T16:15:41.083675Z 24 00h00m00.000000s :BS_NODE DEBUG: [24] Connect 2025-09-25T16:15:41.083680Z 25 00h00m00.000000s :BS_NODE DEBUG: [25] Bootstrap 2025-09-25T16:15:41.083686Z 25 00h00m00.000000s :BS_NODE DEBUG: [25] Connect 2025-09-25T16:15:41.083691Z 26 00h00m00.000000s :BS_NODE DEBUG: [26] Bootstrap 2025-09-25T16:15:41.083695Z 26 00h00m00.000000s :BS_NODE DEBUG: [26] Connect 2025-09-25T16:15:41.083700Z 27 00h00m00.000000s :BS_NODE DEBUG: [27] Bootstrap 2025-09-25T16:15:41.083705Z 27 00h00m00.000000s :BS_NODE DEBUG: [27] Connect 2025-09-25T16:15:41.083710Z 28 00h00m00.000000s :BS_NODE DEBUG: [28] Bootstrap 2025-09-25T16:15:41.083714Z 28 00h00m00.000000s :BS_NODE DEBUG: [28] Connect 2025-09-25T16:15:41.083720Z 29 00h00m00.000000s :BS_NODE DEBUG: [29] Bootstrap 2025-09-25T16:15:41.083723Z 29 00h00m00.000000s :BS_NODE DEBUG: [29] Connect 2025-09-25T16:15:41.083729Z 30 00h00m00.000000s :BS_NODE DEBUG: [30] Bootstrap 2025-09-25T16:15:41.083732Z 30 00h00m00.000000s :BS_NODE DEBUG: [30] Connect 2025-09-25T16:15:41.083737Z 31 00h00m00.000000s :BS_NODE DEBUG: [31] Bootstrap 2025-09-25T16:15:41.083741Z 31 00h00m00.000000s :BS_NODE DEBUG: [31] Connect 2025-09-25T16:15:41.083746Z 32 00h00m00.000000s :BS_NODE DEBUG: [32] Bootstrap 2025-09-25T16:15:41.083750Z 32 00h00m00.000000s :BS_NODE DEBUG: [32] Connect 2025-09-25T16:15:41.083755Z 33 00h00m00.000000s :BS_NODE DEBUG: [33] Bootstrap 2025-09-25T16:15:41.083759Z 33 00h00m00.000000s :BS_NODE DEBUG: [33] Connect 2025-09-25T16:15:41.083764Z 34 00h00m00.000000s :BS_NODE DEBUG: [34] Bootstrap 2025-09-25T16:15:41.083768Z 34 00h00m00.000000s :BS_NODE DEBUG: [34] Connect 2025-09-25T16:15:41.083772Z 35 00h00m00.000000s :BS_NODE DEBUG: [35] Bootstrap 2025-09-25T16:15:41.083776Z 35 00h00m00.000000s :BS_NODE DEBUG: [35] Connect 2025-09-25T16:15:41.083791Z 36 00h00m00.000000s :BS_NODE DEBUG: [36] Bootstrap 2025-09-25T16:15:41.083794Z 36 00h00m00.000000s :BS_NODE DEBUG: [36] Connect 2025-09-25T16:15:41.088459Z 1 00h00m00.000000s :BS_NODE DEBUG: [1] ClientConnected Sender# [1:2719:59] Status# ERROR ClientId# [1:2719:59] ServerId# [0:0:0] PipeClient# [1:2719:59] 2025-09-25T16:15:41.088771Z 2 00h00m00.000000s :BS_NODE DEBUG: [2] ClientConnected Sender# [2:2720:41] Status# ERROR ClientId# [2:2720:41] ServerId# [0:0:0] PipeClient# [2:2720:41] 2025-09-25T16:15:41.088780Z 3 00h00m00.000000s :BS_NODE DEBUG: [3] ClientConnected Sender# [3:2721:41] Status# ERROR ClientId# [3:2721:41] ServerId# [0:0:0] PipeClient# [3:2721:41] 2025-09-25T16:15:41.088788Z 4 00h00m00.000000s :BS_NODE DEBUG: [4] ClientConnected Sender# [4:2722:41] Status# ERROR ClientId# [4:2722:41] ServerId# [0:0:0] PipeClient# [4:2722:41] 2025-09-25T16:15:41.088795Z 5 00h00m00.000000s :BS_NODE DEBUG: [5] ClientConnected Sender# [5:2723:41] Status# ERROR ClientId# [5:2723:41] ServerId# [0:0:0] PipeClient# [5:2723:41] 2025-09-25T16:15:41.088802Z 6 00h00m00.000000s :BS_NODE DEBUG: [6] ClientConnected Sender# [6:2724:41] Status# ERROR ClientId# [6:2724:41] ServerId# [0:0:0] PipeClient# [6:2724:41] 2025-09-25T16:15:41.088809Z 7 00h00m00.000000s :BS_NODE DEBUG: [7] ClientConnected Sender# [7:2725:41] Status# ERROR ClientId# [7:2725:41] ServerId# [0:0:0] PipeClient# [7:2725:41] 2025-09-25T16:15:41.088817Z 8 00h00m00.000000s :BS_NODE DEBUG: [8] ClientConnected Sender# [8:2726:41] Status# ERROR ClientId# [8:2726:41] ServerId# [0:0:0] PipeClient# [8:2726:41] 2025-09-25T16:15:41.088845Z 9 00h00m00.000000s :BS_NODE DEBUG: [9] ClientConnected Sender# [9:2727:41] Status# ERROR ClientId# [9:2727:41] ServerId# [0:0:0] PipeClient# [9:2727:41] 2025-09-25T16:15:41.088853Z 10 00h00m00.000000s :BS_NODE DEBUG: [10] ClientConnected Sender# [10:2728:41] Status# ERROR ClientId# [10:2728:41] ServerId# [0:0:0] PipeClient# [10:2728:41] 2025-09-25T16:15:41.088859Z 11 00h00m00.000000s :BS_NODE DEBUG: [11] ClientConnected Sender# [11:2729:41] Status# ERROR ClientId# [11:2729:41] ServerId# [0:0:0] PipeClient# [11:2729:41] 2025-09-25T16:15:41.088865Z 12 00h00m00.000000s :BS_NODE DEBUG: [12] ClientConnected Sender# [12:2730:41] Status# ERROR ClientId# [12:2730:41] ServerId# [0:0:0] PipeClient# [12:2730:41] 2025-09-25T16:15:41.088872Z 13 00h00m00.000000s :BS_NODE DEBUG: [13] ClientConnected Sender# [13:2731:41] Status# ERROR ClientId# [13:2731:41] ServerId# [0:0:0] PipeClient# [13:2731:41] 2025-09-25T16:15:41.088878Z 14 00h00m00.000000s :BS_NODE DEBUG: [14] ClientConnected Sender# [14:2732:41] Status# ERROR ClientId# [14:2732:41] ServerId# [0:0:0] PipeClient# [14:2732:41] 2025-09-25T16:15:41.088887Z 15 00h00m00.000000s :BS_NODE DEBUG: [15] ClientConnected Sender# [15:2733:41] Status# ERROR ClientId# [15:2733:41] ServerId# [0:0:0] PipeClient# [15:2733:41] 2025-09-25T16:15:41.088894Z 16 00h00m00.000000s :BS_NODE DEBUG: [16] ClientConnected Sender# [16:2734:41] Status# ERROR ClientId# [16:2734:41] ServerId# [0:0:0] PipeClient# [16:2734:41] 2025-09-25T16:15:41.088901Z 17 00h00m00.000000s :BS_NODE DEBUG: [17] ClientConnected Sender# [17:2735:41] Status# ERROR ClientId# [17:2735:41] ServerId# [0:0:0] PipeClient# [17:2735:41] 2025-09-25T16:15:41.088907Z 18 00h00m00.000000s :BS_NODE DEBUG: [18] ClientConnected Sender# [18:2736:41] Status# ERROR ClientId# [18:2736:41] ServerId# [0:0:0] PipeClient# [18:2736:41] 2025-09-25T16:15:41.088914Z 19 00h00m00.000000s :BS_NODE DEBUG: [19] ClientConnected Sender# [19:2737:41] Status# ERROR ClientId# [19:2737:41] ServerId# [0:0:0] PipeClient# [19:2737:41] 2025-09-25T16:15:41.088920Z 20 00h00m00.000000s :BS_NODE DEBUG: [20] ClientConnected Sender# [20:2738:41] Status# ERROR ClientId# [20:2738:41] ServerId# [0:0:0] PipeClient# [20:2738:41] 2025-09-25T16:15:41.088926Z 21 00h00m00.000000s :BS_NODE DEBUG: [21] ClientConnected Sender# [21:2739:41] Status# ERROR ClientId# [21:2739:41] ServerId# [0:0:0] PipeClient# [21:2739:41] 2025-09-25T16:15:41.088932Z 22 00h00m00.000000s :BS_NODE DEBUG: [22] ClientConnected Sender# [22:2740:41] Status# ERROR ClientId# [22:2740:41] ServerId# [0:0:0] PipeClient# [22:2740:41] 2025-09-25T16:15:41.088939Z 23 00h00m00.000000s :BS_NODE DEBUG: [23] ClientConnected Sender# [23:2741:41] Status# ERROR ClientId# [23:2741:41] ServerId# [0:0:0] PipeClient# [23:2741:41] 2025-09-25T16:15:41.088944Z 24 00h00m00.000000s :BS_NODE DEBUG: [24] ClientConnected Sender# [24:2742:41] Status# ERROR ClientId# [24:2742:41] ServerId# [0:0:0] PipeClient# [24:2742:41] 2025-09-25T16:15:41.088951Z 25 00h00m00.000000s :BS_NODE DEBUG: [25] ClientConnected Sender# [25:2743:41] Status# ERROR ClientId# [25:2743:41] ServerId# [0:0:0] PipeClient# [25:2743:41] 2025-09-25T16:15:41.088957Z 26 00h00m00.000000s :BS_NODE DEBUG: [26] ClientConnected Sender# [26:2744:41] Status# ERROR ClientId# [26:2744:41] ServerId# [0:0:0] PipeClient# [26:2744:41] 2025-09-25T16:15:41.088963Z 27 00h00m00.000000s :BS_NODE DEBUG: [27] ClientConnected Sender# [27:2745:41] Status# ERROR ClientId# [27:2745:41] ServerId# [0:0:0] PipeClient# [27:2745:41] 2025-09-25T16:15:41.088970Z 28 00h00m00.000000s :BS_NODE DEBUG: [28] ClientConnected Sender# [28:2746:41] Status# ERROR ClientId# [28:2746:41] ServerId# [0:0:0] PipeClient# [28:2746:41] 2025-09-25T16:15:41.088976Z 29 00h00m00.000000s :BS_NODE DEBUG: [29] ClientConnected Sender# [29:2747:41] Status# ERROR ClientId# [29:2747:41] ServerId# [0:0:0] PipeClient# [29:2747:41] 2025-09-25T16:15:41.088983Z 30 00h00m00.000000s :BS_NODE DEBUG: [30] ClientConnected Sender# [30:2748:41] Status# ERROR ClientId# [30:2748:41] ServerId# [0:0:0] PipeClient# [30:2748:41] 2025-09-25T16:15:41.088989Z 31 00h00m00.000000s :BS_NODE DEBUG: [31] ClientConnected Sender# [31:2749:41] Status# ERROR ClientId# [31:2749:41] ServerId# [0:0:0] PipeClient# [31:2749:41] 2025-09-25T16:15:41.088995Z 32 00h00m00.000000s :BS_NODE DEBUG: [32] ClientConnected Sender# [32:2750:41] Status# ERROR ClientId# [32:2750:41] ServerId# [0:0:0] PipeClient# [32:2750:41] 2025-09-25T16:15:41.089001Z 33 00h00m00.000000s :BS_NODE DEBUG: [33] ClientConnected Sender# [33:2751:41] Status# ERROR ClientId# [33:2751:41] ServerId# [0:0:0] PipeClient# [33:2751:41] 2025-09-25T16:15:41.089008Z 34 00h00m00.000000s :BS_NODE DEBUG: [34] ClientConnected Sender# [34:2752:41] Status# ERROR ClientId# [34:2752:41] ServerId# [0:0:0] PipeClient# [34:2752:41] 2025-09-25T16:15:41.089014Z 35 00h00m00.000000s :BS_NODE DEBUG: [35] ClientConnected Sender# [35:2753:41] Status# ERROR ClientId# [35:2753:41 ... edGroup# true Replicated# true 2025-09-25T16:15:41.600009Z 1 00h05m00.104608s :BS_SELFHEAL DEBUG: {BSSH02@self_heal.cpp:96} Reassigner ProcessVDiskReply GroupId# 2147483688 VDiskId# [80000028:1:2:1:0] DiskIsOk# true 2025-09-25T16:15:41.600015Z 1 00h05m00.104608s :BS_SELFHEAL DEBUG: {BSSH03@self_heal.cpp:111} Reassigner TEvVStatusResult GroupId# 2147483688 Status# OK JoinedGroup# true Replicated# true 2025-09-25T16:15:41.600021Z 1 00h05m00.104608s :BS_SELFHEAL DEBUG: {BSSH02@self_heal.cpp:96} Reassigner ProcessVDiskReply GroupId# 2147483688 VDiskId# [80000028:1:2:2:0] DiskIsOk# true 2025-09-25T16:15:41.603672Z 1 00h05m00.105120s :BS_SELFHEAL INFO: {BSSH09@self_heal.cpp:207} Reassigner succeeded GroupId# 2147483688 Items# [80000028:1:1:0:0]: 14:1002:1002 -> 14:1000:1010 ConfigTxSeqNo# 48 2025-09-25T16:15:41.603691Z 1 00h05m00.105120s :BS_SELFHEAL DEBUG: {BSSH08@self_heal.cpp:218} Reassigner finished GroupId# 2147483688 Success# true 2025-09-25T16:15:41.603725Z 17 00h05m00.105120s :BS_NODE DEBUG: [17] NodeServiceSetUpdate 2025-09-25T16:15:41.603737Z 17 00h05m00.105120s :BS_NODE DEBUG: [17] VDiskId# [80000028:1:1:1:0] -> [80000028:2:1:1:0] 2025-09-25T16:15:41.603754Z 2 00h05m00.105120s :BS_NODE DEBUG: [2] NodeServiceSetUpdate 2025-09-25T16:15:41.603761Z 2 00h05m00.105120s :BS_NODE DEBUG: [2] VDiskId# [80000028:1:0:0:0] -> [80000028:2:0:0:0] 2025-09-25T16:15:41.603776Z 20 00h05m00.105120s :BS_NODE DEBUG: [20] NodeServiceSetUpdate 2025-09-25T16:15:41.603783Z 20 00h05m00.105120s :BS_NODE DEBUG: [20] VDiskId# [80000028:1:1:2:0] -> [80000028:2:1:2:0] 2025-09-25T16:15:41.603796Z 5 00h05m00.105120s :BS_NODE DEBUG: [5] NodeServiceSetUpdate 2025-09-25T16:15:41.603804Z 5 00h05m00.105120s :BS_NODE DEBUG: [5] VDiskId# [80000028:1:0:1:0] -> [80000028:2:0:1:0] 2025-09-25T16:15:41.603818Z 8 00h05m00.105120s :BS_NODE DEBUG: [8] NodeServiceSetUpdate 2025-09-25T16:15:41.603826Z 8 00h05m00.105120s :BS_NODE DEBUG: [8] VDiskId# [80000028:1:0:2:0] -> [80000028:2:0:2:0] 2025-09-25T16:15:41.603841Z 26 00h05m00.105120s :BS_NODE DEBUG: [26] NodeServiceSetUpdate 2025-09-25T16:15:41.603849Z 26 00h05m00.105120s :BS_NODE DEBUG: [26] VDiskId# [80000028:1:2:0:0] -> [80000028:2:2:0:0] 2025-09-25T16:15:41.603864Z 29 00h05m00.105120s :BS_NODE DEBUG: [29] NodeServiceSetUpdate 2025-09-25T16:15:41.603871Z 29 00h05m00.105120s :BS_NODE DEBUG: [29] VDiskId# [80000028:1:2:1:0] -> [80000028:2:2:1:0] 2025-09-25T16:15:41.603887Z 14 00h05m00.105120s :BS_NODE DEBUG: [14] NodeServiceSetUpdate 2025-09-25T16:15:41.603895Z 14 00h05m00.105120s :BS_NODE DEBUG: [14] VDiskId# [80000028:2:1:0:0] PDiskId# 1000 VSlotId# 1010 created 2025-09-25T16:15:41.603906Z 14 00h05m00.105120s :BS_NODE DEBUG: [14] VDiskId# [80000028:2:1:0:0] status changed to INIT_PENDING 2025-09-25T16:15:41.603923Z 32 00h05m00.105120s :BS_NODE DEBUG: [32] NodeServiceSetUpdate 2025-09-25T16:15:41.603930Z 32 00h05m00.105120s :BS_NODE DEBUG: [32] VDiskId# [80000028:1:2:2:0] -> [80000028:2:2:2:0] 2025-09-25T16:15:41.603998Z 1 00h05m00.105120s :BS_SELFHEAL DEBUG: {BSSH01@self_heal.cpp:71} Reassigner starting GroupId# 2147483672 2025-09-25T16:15:41.604180Z 1 00h05m00.105120s :BS_SELFHEAL DEBUG: {BSSH03@self_heal.cpp:111} Reassigner TEvVStatusResult GroupId# 2147483672 Status# OK JoinedGroup# true Replicated# true 2025-09-25T16:15:41.604191Z 1 00h05m00.105120s :BS_SELFHEAL DEBUG: {BSSH02@self_heal.cpp:96} Reassigner ProcessVDiskReply GroupId# 2147483672 VDiskId# [80000018:1:0:0:0] DiskIsOk# true 2025-09-25T16:15:41.604197Z 1 00h05m00.105120s :BS_SELFHEAL DEBUG: {BSSH03@self_heal.cpp:111} Reassigner TEvVStatusResult GroupId# 2147483672 Status# OK JoinedGroup# true Replicated# true 2025-09-25T16:15:41.604201Z 1 00h05m00.105120s :BS_SELFHEAL DEBUG: {BSSH02@self_heal.cpp:96} Reassigner ProcessVDiskReply GroupId# 2147483672 VDiskId# [80000018:1:0:1:0] DiskIsOk# true 2025-09-25T16:15:41.604207Z 1 00h05m00.105120s :BS_SELFHEAL DEBUG: {BSSH03@self_heal.cpp:111} Reassigner TEvVStatusResult GroupId# 2147483672 Status# OK JoinedGroup# true Replicated# true 2025-09-25T16:15:41.604212Z 1 00h05m00.105120s :BS_SELFHEAL DEBUG: {BSSH02@self_heal.cpp:96} Reassigner ProcessVDiskReply GroupId# 2147483672 VDiskId# [80000018:1:0:2:0] DiskIsOk# true 2025-09-25T16:15:41.604217Z 1 00h05m00.105120s :BS_SELFHEAL DEBUG: {BSSH03@self_heal.cpp:111} Reassigner TEvVStatusResult GroupId# 2147483672 Status# OK JoinedGroup# true Replicated# true 2025-09-25T16:15:41.604222Z 1 00h05m00.105120s :BS_SELFHEAL DEBUG: {BSSH02@self_heal.cpp:96} Reassigner ProcessVDiskReply GroupId# 2147483672 VDiskId# [80000018:1:1:1:0] DiskIsOk# true 2025-09-25T16:15:41.604228Z 1 00h05m00.105120s :BS_SELFHEAL DEBUG: {BSSH03@self_heal.cpp:111} Reassigner TEvVStatusResult GroupId# 2147483672 Status# OK JoinedGroup# true Replicated# true 2025-09-25T16:15:41.604232Z 1 00h05m00.105120s :BS_SELFHEAL DEBUG: {BSSH02@self_heal.cpp:96} Reassigner ProcessVDiskReply GroupId# 2147483672 VDiskId# [80000018:1:1:2:0] DiskIsOk# true 2025-09-25T16:15:41.604238Z 1 00h05m00.105120s :BS_SELFHEAL DEBUG: {BSSH03@self_heal.cpp:111} Reassigner TEvVStatusResult GroupId# 2147483672 Status# OK JoinedGroup# true Replicated# true 2025-09-25T16:15:41.604242Z 1 00h05m00.105120s :BS_SELFHEAL DEBUG: {BSSH02@self_heal.cpp:96} Reassigner ProcessVDiskReply GroupId# 2147483672 VDiskId# [80000018:1:2:0:0] DiskIsOk# true 2025-09-25T16:15:41.604248Z 1 00h05m00.105120s :BS_SELFHEAL DEBUG: {BSSH03@self_heal.cpp:111} Reassigner TEvVStatusResult GroupId# 2147483672 Status# OK JoinedGroup# true Replicated# true 2025-09-25T16:15:41.604252Z 1 00h05m00.105120s :BS_SELFHEAL DEBUG: {BSSH02@self_heal.cpp:96} Reassigner ProcessVDiskReply GroupId# 2147483672 VDiskId# [80000018:1:2:1:0] DiskIsOk# true 2025-09-25T16:15:41.604258Z 1 00h05m00.105120s :BS_SELFHEAL DEBUG: {BSSH03@self_heal.cpp:111} Reassigner TEvVStatusResult GroupId# 2147483672 Status# OK JoinedGroup# true Replicated# true 2025-09-25T16:15:41.604262Z 1 00h05m00.105120s :BS_SELFHEAL DEBUG: {BSSH02@self_heal.cpp:96} Reassigner ProcessVDiskReply GroupId# 2147483672 VDiskId# [80000018:1:2:2:0] DiskIsOk# true 2025-09-25T16:15:41.614624Z 1 00h05m00.105632s :BS_SELFHEAL INFO: {BSSH09@self_heal.cpp:207} Reassigner succeeded GroupId# 2147483672 Items# [80000018:1:1:0:0]: 14:1002:1001 -> 14:1001:1010 ConfigTxSeqNo# 49 2025-09-25T16:15:41.614643Z 1 00h05m00.105632s :BS_SELFHEAL DEBUG: {BSSH08@self_heal.cpp:218} Reassigner finished GroupId# 2147483672 Success# true 2025-09-25T16:15:41.614688Z 17 00h05m00.105632s :BS_NODE DEBUG: [17] NodeServiceSetUpdate 2025-09-25T16:15:41.614702Z 17 00h05m00.105632s :BS_NODE DEBUG: [17] VDiskId# [80000018:1:1:1:0] -> [80000018:2:1:1:0] 2025-09-25T16:15:41.614720Z 2 00h05m00.105632s :BS_NODE DEBUG: [2] NodeServiceSetUpdate 2025-09-25T16:15:41.614728Z 2 00h05m00.105632s :BS_NODE DEBUG: [2] VDiskId# [80000018:1:0:0:0] -> [80000018:2:0:0:0] 2025-09-25T16:15:41.614745Z 20 00h05m00.105632s :BS_NODE DEBUG: [20] NodeServiceSetUpdate 2025-09-25T16:15:41.614755Z 20 00h05m00.105632s :BS_NODE DEBUG: [20] VDiskId# [80000018:1:1:2:0] -> [80000018:2:1:2:0] 2025-09-25T16:15:41.614769Z 5 00h05m00.105632s :BS_NODE DEBUG: [5] NodeServiceSetUpdate 2025-09-25T16:15:41.614778Z 5 00h05m00.105632s :BS_NODE DEBUG: [5] VDiskId# [80000018:1:0:1:0] -> [80000018:2:0:1:0] 2025-09-25T16:15:41.614797Z 8 00h05m00.105632s :BS_NODE DEBUG: [8] NodeServiceSetUpdate 2025-09-25T16:15:41.614806Z 8 00h05m00.105632s :BS_NODE DEBUG: [8] VDiskId# [80000018:1:0:2:0] -> [80000018:2:0:2:0] 2025-09-25T16:15:41.614823Z 26 00h05m00.105632s :BS_NODE DEBUG: [26] NodeServiceSetUpdate 2025-09-25T16:15:41.614831Z 26 00h05m00.105632s :BS_NODE DEBUG: [26] VDiskId# [80000018:1:2:0:0] -> [80000018:2:2:0:0] 2025-09-25T16:15:41.614846Z 29 00h05m00.105632s :BS_NODE DEBUG: [29] NodeServiceSetUpdate 2025-09-25T16:15:41.614854Z 29 00h05m00.105632s :BS_NODE DEBUG: [29] VDiskId# [80000018:1:2:1:0] -> [80000018:2:2:1:0] 2025-09-25T16:15:41.614871Z 14 00h05m00.105632s :BS_NODE DEBUG: [14] NodeServiceSetUpdate 2025-09-25T16:15:41.614879Z 14 00h05m00.105632s :BS_NODE DEBUG: [14] VDiskId# [80000018:2:1:0:0] PDiskId# 1001 VSlotId# 1010 created 2025-09-25T16:15:41.614894Z 14 00h05m00.105632s :BS_NODE DEBUG: [14] VDiskId# [80000018:2:1:0:0] status changed to INIT_PENDING 2025-09-25T16:15:41.614913Z 32 00h05m00.105632s :BS_NODE DEBUG: [32] NodeServiceSetUpdate 2025-09-25T16:15:41.614921Z 32 00h05m00.105632s :BS_NODE DEBUG: [32] VDiskId# [80000018:1:2:2:0] -> [80000018:2:2:2:0] 2025-09-25T16:15:41.615198Z 14 00h05m02.522048s :BS_NODE DEBUG: [14] VDiskId# [80000008:2:1:0:0] status changed to REPLICATING 2025-09-25T16:15:41.615338Z 14 00h05m02.892096s :BS_NODE DEBUG: [14] VDiskId# [80000048:2:1:0:0] status changed to REPLICATING 2025-09-25T16:15:41.622663Z 14 00h05m03.824120s :BS_NODE DEBUG: [14] VDiskId# [80000028:2:1:0:0] status changed to REPLICATING 2025-09-25T16:15:41.622862Z 14 00h05m04.582608s :BS_NODE DEBUG: [14] VDiskId# [80000038:2:1:0:0] status changed to REPLICATING 2025-09-25T16:15:41.622978Z 14 00h05m04.727560s :BS_NODE DEBUG: [14] VDiskId# [80000078:2:1:0:0] status changed to REPLICATING 2025-09-25T16:15:41.623090Z 14 00h05m04.916632s :BS_NODE DEBUG: [14] VDiskId# [80000018:2:1:0:0] status changed to REPLICATING 2025-09-25T16:15:41.623199Z 14 00h05m04.946584s :BS_NODE DEBUG: [14] VDiskId# [80000058:2:1:0:0] status changed to REPLICATING 2025-09-25T16:15:41.624065Z 14 00h05m05.346072s :BS_NODE DEBUG: [14] VDiskId# [80000068:2:1:0:0] status changed to REPLICATING 2025-09-25T16:15:41.624534Z 14 00h05m12.338584s :BS_NODE DEBUG: [14] VDiskId# [80000058:2:1:0:0] status changed to READY 2025-09-25T16:15:41.626479Z 14 00h05m12.339096s :BS_NODE DEBUG: [14] NodeServiceSetUpdate 2025-09-25T16:15:41.626496Z 14 00h05m12.339096s :BS_NODE DEBUG: [14] VDiskId# [80000058:1:1:0:0] destroyed 2025-09-25T16:15:41.626534Z 14 00h05m12.597072s :BS_NODE DEBUG: [14] VDiskId# [80000068:2:1:0:0] status changed to READY 2025-09-25T16:15:41.628246Z 14 00h05m12.597584s :BS_NODE DEBUG: [14] NodeServiceSetUpdate 2025-09-25T16:15:41.628256Z 14 00h05m12.597584s :BS_NODE DEBUG: [14] VDiskId# [80000068:1:1:0:0] destroyed 2025-09-25T16:15:41.637074Z 14 00h05m18.295096s :BS_NODE DEBUG: [14] VDiskId# [80000048:2:1:0:0] status changed to READY 2025-09-25T16:15:41.639262Z 14 00h05m18.295608s :BS_NODE DEBUG: [14] NodeServiceSetUpdate 2025-09-25T16:15:41.639283Z 14 00h05m18.295608s :BS_NODE DEBUG: [14] VDiskId# [80000048:1:1:0:0] destroyed 2025-09-25T16:15:41.639896Z 14 00h05m22.034608s :BS_NODE DEBUG: [14] VDiskId# [80000038:2:1:0:0] status changed to READY 2025-09-25T16:15:41.643753Z 14 00h05m22.035120s :BS_NODE DEBUG: [14] NodeServiceSetUpdate 2025-09-25T16:15:41.643773Z 14 00h05m22.035120s :BS_NODE DEBUG: [14] VDiskId# [80000038:1:1:0:0] destroyed 2025-09-25T16:15:41.643816Z 14 00h05m22.916560s :BS_NODE DEBUG: [14] VDiskId# [80000078:2:1:0:0] status changed to READY 2025-09-25T16:15:41.645677Z 14 00h05m22.917072s :BS_NODE DEBUG: [14] NodeServiceSetUpdate 2025-09-25T16:15:41.645692Z 14 00h05m22.917072s :BS_NODE DEBUG: [14] VDiskId# [80000078:1:1:0:0] destroyed 2025-09-25T16:15:41.646832Z 14 00h05m31.885632s :BS_NODE DEBUG: [14] VDiskId# [80000018:2:1:0:0] status changed to READY 2025-09-25T16:15:41.648667Z 14 00h05m31.886144s :BS_NODE DEBUG: [14] NodeServiceSetUpdate 2025-09-25T16:15:41.648682Z 14 00h05m31.886144s :BS_NODE DEBUG: [14] VDiskId# [80000018:1:1:0:0] destroyed 2025-09-25T16:15:41.648716Z 14 00h05m32.139048s :BS_NODE DEBUG: [14] VDiskId# [80000008:2:1:0:0] status changed to READY 2025-09-25T16:15:41.657623Z 14 00h05m32.139560s :BS_NODE DEBUG: [14] NodeServiceSetUpdate 2025-09-25T16:15:41.657648Z 14 00h05m32.139560s :BS_NODE DEBUG: [14] VDiskId# [80000008:1:1:0:0] destroyed 2025-09-25T16:15:41.657695Z 14 00h05m32.257120s :BS_NODE DEBUG: [14] VDiskId# [80000028:2:1:0:0] status changed to READY 2025-09-25T16:15:41.663130Z 14 00h05m32.257632s :BS_NODE DEBUG: [14] NodeServiceSetUpdate 2025-09-25T16:15:41.663156Z 14 00h05m32.257632s :BS_NODE DEBUG: [14] VDiskId# [80000028:1:1:0:0] destroyed |80.4%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/ut_blobstorage/ut_move_pdisk/unittest |80.4%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/ut_vdisk/unittest >> TBsVDiskExtreme::Simple3Put1GetMissingPartCompaction [GOOD] >> TBsVDiskRepl2::ReplEraseDiskRestoreWOOneDisk [GOOD] >> TBsVDiskRepl3::ReplEraseDiskRestoreMultipart >> BlobPatching::Mirror3dc [GOOD] >> BlobPatching::Mirror3 >> TBsVDiskManyPutGet::ManyMultiPutGet [GOOD] >> TBsVDiskManyPutGet::ManyMultiPutGetWithLargeBatch ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/ut_blobstorage/ut_move_pdisk/unittest >> BSCMovePDisk::PDiskMove_ErasureNone [GOOD] Test command err: RandomSeed# 18404686887884533445 >> BSCMovePDisk::PDiskMove_Mirror3dc3Nodes >> BSCMovePDisk::PDiskMove_Mirror3dc >> BlobPatching::Mirror3 [GOOD] >> BlobPatching::Block42 |80.4%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/ut_vdisk/unittest >> TBsVDiskExtremeHuge::Simple3Put1SeqSubsErrorCompaction [GOOD] >> BSCReadOnlyPDisk::ReadOnlyOneByOne >> CostMetricsGetBlock4Plus2::TestGet4Plus2BlockRequests10Inflight1BlobSize1000 [GOOD] >> CostMetricsGetBlock4Plus2::TestGet4Plus2BlockRequests10000Inflight1BlobSize1000 >> BlobPatching::Block42 [GOOD] >> BlobPatching::None |80.4%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/blobstorage/vdisk/synclog/ut/ydb-core-blobstorage-vdisk-synclog-ut |80.4%| [LD] {RESULT} $(B)/ydb/core/blobstorage/vdisk/synclog/ut/ydb-core-blobstorage-vdisk-synclog-ut >> BlobPatching::None [GOOD] >> BlobPatching::StressMirror3of4 |80.4%| [AR] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/kqp/executer_actor/libcore-kqp-executer_actor.a |80.4%| [AR] {RESULT} $(B)/ydb/core/kqp/executer_actor/libcore-kqp-executer_actor.a >> CostMetricsPutMirror3dc::TestPutMirror3dcRequests2Inflight2BlobSize1000 [GOOD] >> CountingEvents::Put_Mirror3of4 |80.4%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/ut_blobstorage/ut_move_pdisk/unittest |80.4%| [CC] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/tx/columnshard/engines/column_engine_logs.h_serialized.cpp |80.4%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/kqp/executer_actor/libcore-kqp-executer_actor.a >> BSCMovePDisk::PDiskMove_Mirror3dc3Nodes [GOOD] >> CountingEvents::Put_Mirror3of4 [GOOD] >> CountingEvents::Put_Mirror3dc |80.4%| [CC] {BAZEL_UPLOAD} $(B)/ydb/core/tx/columnshard/engines/column_engine_logs.h_serialized.cpp >> BSCMovePDisk::PDiskMove_Mirror3dc [GOOD] >> TBsVDiskManyPutGet::ManyMultiPutGetWithLargeBatch [GOOD] >> CountingEvents::Put_Mirror3dc [GOOD] >> CountingEvents::Put_Block42 >> TBsVDiskExtremeHandoff::SimpleHnd2Put1GetCompaction [GOOD] >> TBsVDiskExtremeHandoffHuge::SimpleHnd2Put1GetCompaction >> CountingEvents::Put_Block42 [GOOD] >> test.py::test[schema-user_schema_append--Results] >> CountingEvents::Put_None ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/ut_blobstorage/ut_move_pdisk/unittest >> BSCMovePDisk::PDiskMove_Mirror3dc [GOOD] Test command err: RandomSeed# 8872067518643412185 ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/ut_blobstorage/ut_move_pdisk/unittest >> BSCMovePDisk::PDiskMove_Mirror3dc3Nodes [GOOD] Test command err: RandomSeed# 4888082275291545594 >> CountingEvents::Put_None [GOOD] >> CountingEvents::Get_Mirror3of4 [GOOD] >> CountingEvents::Get_Mirror3dc >> BSCMovePDisk::PDiskMove_Block42 [GOOD] |80.4%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/persqueue/pqtablet/cache/ut/ydb-core-persqueue-pqtablet-cache-ut |80.4%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/ut_vdisk/unittest >> TBsVDiskManyPutGet::ManyMultiPutGetWithLargeBatch [GOOD] >> BlobPatching::StressMirror3of4 [GOOD] >> BlobPatching::StressMirror3dc |80.4%| [LD] {RESULT} $(B)/ydb/core/persqueue/pqtablet/cache/ut/ydb-core-persqueue-pqtablet-cache-ut >> CountingEvents::Get_Mirror3dc [GOOD] >> CountingEvents::Get_Block42 >> test.py::test[join-yql-8131-off-Results] >> Acceleration::TestAcceleration4Plus2BlockPutAsyncBlob1Slow [GOOD] >> Acceleration::TestAccelerationMirror3dcPutAsyncBlob2Slow >> CountingEvents::Get_Block42 [GOOD] >> CountingEvents::Get_None |80.4%| [LD] {BAZEL_UPLOAD} $(B)/ydb/core/persqueue/pqtablet/cache/ut/ydb-core-persqueue-pqtablet-cache-ut >> CountingEvents::Get_None [GOOD] >> CountingEvents::Collect_Mirror3of4 >> test.py::test[join-yql-8131-off-Results] [SKIPPED] >> test.py::test[key_filter-empty_range_over_dynamic--Results] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/ut_blobstorage/ut_move_pdisk/unittest >> BSCMovePDisk::PDiskMove_Block42 [GOOD] Test command err: RandomSeed# 3056233419664664518 |80.4%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/blobstorage/ut_blobstorage/ut_cluster_balancing/ut_blobstorage-ut_cluster_balancing |80.4%| [LD] {RESULT} $(B)/ydb/core/blobstorage/ut_blobstorage/ut_cluster_balancing/ut_blobstorage-ut_cluster_balancing |80.4%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/blobstorage/ut_blobstorage/ut_cluster_balancing/ut_blobstorage-ut_cluster_balancing >> TBsVDiskRepl1::ReplEraseDiskRestore [GOOD] >> TBsVDiskRepl1::ReadOnly >> CountingEvents::Collect_Mirror3of4 [GOOD] >> CountingEvents::Collect_Mirror3dc >> CountingEvents::Collect_Mirror3dc [GOOD] >> CountingEvents::Collect_Block42 >> TBsVDiskExtremeHandoffHuge::SimpleHnd2Put1GetCompaction [GOOD] >> Acceleration::TestAccelerationMirror3dcPutAsyncBlob2Slow [GOOD] >> Acceleration::TestAcceleration4Plus2BlockPutAsyncBlob2Slow >> BlobPatching::StressMirror3dc [GOOD] >> BlobPatching::StressMirror3 |80.4%| [LD] {BAZEL_UPLOAD} $(B)/ydb/core/blobstorage/vdisk/synclog/ut/ydb-core-blobstorage-vdisk-synclog-ut |80.4%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/blobstorage/ut_mirror3of4/ydb-core-blobstorage-ut_mirror3of4 >> test.py::test[action-eval_column--Results] |80.4%| [LD] {RESULT} $(B)/ydb/core/blobstorage/ut_mirror3of4/ydb-core-blobstorage-ut_mirror3of4 |80.4%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/blobstorage/ut_mirror3of4/ydb-core-blobstorage-ut_mirror3of4 >> CountingEvents::Collect_Block42 [GOOD] >> CountingEvents::Collect_None |80.4%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/blobstorage/ut_blobstorage/ut_bridge/ydb-core-blobstorage-ut_blobstorage-ut_bridge |80.4%| [LD] {RESULT} $(B)/ydb/core/blobstorage/ut_blobstorage/ut_bridge/ydb-core-blobstorage-ut_blobstorage-ut_bridge >> CountingEvents::Collect_None [GOOD] >> Deadlines::TestGetMirror3dc |80.4%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/blobstorage/ut_blobstorage/ut_bridge/ydb-core-blobstorage-ut_blobstorage-ut_bridge |80.4%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/ut_vdisk/unittest >> TBsVDiskExtremeHandoffHuge::SimpleHnd2Put1GetCompaction [GOOD] |80.4%| [AR] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/tx/columnshard/engines/libtx-columnshard-engines.a |80.4%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/blobstorage/ut_blobstorage/ut_vdisk_restart/blobstorage-ut_blobstorage-ut_vdisk_restart >> TBsVDiskRepl1::ReadOnly [GOOD] >> BlobPatching::StressMirror3 [GOOD] >> BlobPatching::StressBlock42 |80.4%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/blobstorage/ut_blobstorage/ut_vdisk_restart/blobstorage-ut_blobstorage-ut_vdisk_restart |80.5%| [LD] {RESULT} $(B)/ydb/core/blobstorage/ut_blobstorage/ut_vdisk_restart/blobstorage-ut_blobstorage-ut_vdisk_restart >> Deadlines::TestGetMirror3dc [GOOD] >> Deadlines::TestGet4Plus2Block |80.4%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/external_sources/object_storage/inference/ut/external_sources-object_storage-inference-ut |80.5%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/ut_blobstorage/ut_move_pdisk/unittest |80.5%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/external_sources/object_storage/inference/ut/external_sources-object_storage-inference-ut |80.5%| [LD] {RESULT} $(B)/ydb/core/external_sources/object_storage/inference/ut/external_sources-object_storage-inference-ut >> Deadlines::TestGet4Plus2Block [GOOD] >> Deadlines::TestGetMirror3of4 |80.5%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/tx/columnshard/engines/libtx-columnshard-engines.a ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/ut_vdisk/unittest >> TBsVDiskRepl1::ReadOnly [GOOD] Test command err: 2025-09-25T16:15:46.717177Z :BS_SYNCER ERROR: guid_recovery.cpp:714: PDiskId# 4 VDISK[0:_:0:1:1]: (0) TVDiskGuidRecoveryActor: DECISION: [Decision# LostData SubsequentFailure# 0] 2025-09-25T16:15:47.020919Z :BS_SYNCER ERROR: guid_recovery.cpp:767: PDiskId# 4 VDISK[0:_:0:1:1]: (0) TVDiskGuidRecoveryActor: FINISH: [Decision# LostData Guid# 483759751331931271] 2025-09-25T16:15:48.033162Z :BS_SYNCER ERROR: blobstorage_osiris.cpp:203: PDiskId# 4 VDISK[0:_:0:1:1]: (0) THullOsirisActor: FINISH: BlobsResurrected# 0 PartsResurrected# 0 >> TYardTest::TestEnormousDisk [GOOD] |80.5%| [TA] $(B)/ydb/core/util/ut/test-results/unittest/{meta.json ... results_accumulator.log} |80.5%| [AR] {RESULT} $(B)/ydb/core/tx/columnshard/engines/libtx-columnshard-engines.a >> test.py::test[select-unlabeled--Results] >> test.py::test[optimizers-fuse_map_mapreduce--ForceBlocks] >> test.py::test[schema-user_schema_append--Results] [GOOD] >> test.py::test[select-column_labels-default.txt-Results] >> Deadlines::TestGetMirror3of4 [GOOD] >> Deadlines::TestBlockMirror3dc >> test.py::test[select-exists_true-default.txt-ForceBlocks] >> test.py::test[optimizers-fuse_map_mapreduce--ForceBlocks] [SKIPPED] >> test.py::test[optimizers-fuse_map_mapreduce--Results] >> TBsVDiskGC::GCPutBarrierSync [GOOD] >> TBsVDiskGC::GCPutKeepBarrierSync >> DSProxyStrategyTest::Restore_block42 [GOOD] >> test.py::test[optimizers-fuse_map_mapreduce--Results] [SKIPPED] >> test.py::test[optimizers-group_visit_lambdas--ForceBlocks] >> test.py::test[column_group-hint_non_lst_yson_fail--ForceBlocks] >> Deadlines::TestBlockMirror3dc [GOOD] >> Deadlines::TestBlock4Plus2Block >> test.py::test[join-lookupjoin_bug7646_csee-off-ForceBlocks] >> test.py::test[column_group-hint_non_lst_yson_fail--ForceBlocks] [SKIPPED] >> test.py::test[column_group-hint_non_lst_yson_fail--Results] >> test.py::test[join-full_trivial--ForceBlocks] >> test.py::test[produce-process_with_udf_rows-default.txt-Results] >> test.py::test[column_group-hint_non_lst_yson_fail--Results] [SKIPPED] >> test.py::test[column_order-insert_tmp-default.txt-ForceBlocks] >> Deadlines::TestBlock4Plus2Block [GOOD] >> Deadlines::TestBlockMirror3of4 >> test.py::test[blocks-finalize_hashed_keys--ForceBlocks] >> test.py::test[join-anyjoin_common_nodata_keys-off-ForceBlocks] >> test.py::test[pg-tpcds-q63-default.txt-Results] >> test.py::test[produce-reduce_lambda_list_mem-default.txt-ForceBlocks] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/pdisk/ut/unittest >> TYardTest::TestEnormousDisk [GOOD] Test command err: 2025-09-25T16:14:45.667617Z :BS_PDISK NOTICE: {BPD38@blobstorage_pdisk_impl.cpp:2857} OnDriveStartup Path# "" PDiskId# 1 2025-09-25T16:14:45.696999Z :BS_PDISK NOTICE: {BSP01@blobstorage_pdisk_actor.cpp:581} Successfully read format record Format# {TDiskFormat Version: 3 DiskSize: 1658880000 bytes (1 GB) Guid: 7729245642283322882 MagicNextLogChunkReference: 4364342833467591318 MagicLogChunk: 765465174474149475 MagicDataChunk: 5861378689555211335 MagicSysLogChunk: 4926569890761049576 MagicFormatChunk: 17332287817462050952 ChunkSize: 2097152 bytes (2 MB) SectorSize: 4096 SysLogSectorCount: 64 SystemChunkCount: 1 FormatText: "Info" DiskFormatSize: 1168 (current sizeof: 1168) TimestampUs: 1758816885492693 (2025-09-25T16:14:45.492693Z) FormatFlags: {ErasureEncodeSysLog | ErasureEncodeFormat | ErasureEncodeNextChunkReference | EncryptFormat | EncryptData}} PDiskId# 1 2025-09-25T16:14:45.701768Z :BS_PDISK NOTICE: {BPD01@blobstorage_pdisk_impl_log.cpp:252} SysLogRecord is read Record# {TSysLogRecord Version# 0 NonceSet# {TNonceSet Version# 0 NonceSysLog# 61 NonceLog# 1 NonceData# 1} LogHeadChunkIdx# 1 LogHeadChunkPreviousNonce# 0} PDiskId# 1 2025-09-25T16:14:45.716930Z :BS_PDISK NOTICE: {LR018@blobstorage_pdisk_logreader.cpp:809} PDiskId# 1 LogReader IsInitial# 1 ChunkIdx# 1 SectorIdx# 0 OffsetInSector# 0 In ProcessSectorSet got !restorator.GoodSectorFlags LastGoodToWriteLogPosition# { ChunkIdx# 1 OffsetInChunk# 0} PDiskId# 1 2025-09-25T16:14:45.717194Z :BS_PDISK NOTICE: {BPD01@blobstorage_pdisk_logreader.cpp:1176} Reply to owner OwnerId# 0 Result# {EvReadLogResult Status# OK ErrorReason# "" position# { ChunkIdx# 0 OffsetInChunk# 0} nextPosition# { ChunkIdx# 1 OffsetInChunk# 0} isEndOfLog# true StatusFlags# IsValid | DiskSpaceCyan | DiskSpaceLightYellowMove | DiskSpaceYellowStop | DiskSpaceLightOrange | DiskSpacePreOrange | DiskSpaceOrange | DiskSpaceRed | DiskSpaceBlack Results.size# 0} PDiskId# 1 2025-09-25T16:14:45.721107Z :BS_PDISK NOTICE: {BPD01@blobstorage_pdisk_impl_log.cpp:1732} PDisk have successfully started PDiskId# 1 2025-09-25T16:14:45.762238Z :BS_PDISK NOTICE: {BPD02@blobstorage_pdisk_impl.cpp:2055} New owner is created ownerId# 3 vDiskId# [0:_:0:0:0] FirstNonceToKeep# 1434322 CutLogId# [0:0:0] ownerRound# 2 PDiskId# 1 2025-09-25T16:14:45.996628Z :BS_PDISK NOTICE: {BPD38@blobstorage_pdisk_impl.cpp:2857} OnDriveStartup Path# "" PDiskId# 1 2025-09-25T16:14:46.024950Z :BS_PDISK NOTICE: {BSP01@blobstorage_pdisk_actor.cpp:581} Successfully read format record Format# {TDiskFormat Version: 3 DiskSize: 5242880000 bytes (5 GB) Guid: 5809543108641872451 MagicNextLogChunkReference: 9367880951245589824 MagicLogChunk: 3807707944506604517 MagicDataChunk: 16125544422511774678 MagicSysLogChunk: 7459633284692928063 MagicFormatChunk: 17332287817462050952 ChunkSize: 6291456 bytes (6 MB) SectorSize: 4096 SysLogSectorCount: 64 SystemChunkCount: 1 FormatText: "Info" DiskFormatSize: 1168 (current sizeof: 1168) TimestampUs: 1758816885849610 (2025-09-25T16:14:45.849610Z) FormatFlags: {ErasureEncodeSysLog | ErasureEncodeFormat | ErasureEncodeNextChunkReference | EncryptFormat | EncryptData}} PDiskId# 1 2025-09-25T16:14:46.040938Z :BS_PDISK NOTICE: {BPD01@blobstorage_pdisk_impl_log.cpp:252} SysLogRecord is read Record# {TSysLogRecord Version# 0 NonceSet# {TNonceSet Version# 0 NonceSysLog# 61 NonceLog# 1 NonceData# 1} LogHeadChunkIdx# 1 LogHeadChunkPreviousNonce# 0} PDiskId# 1 2025-09-25T16:14:46.056911Z :BS_PDISK NOTICE: {LR018@blobstorage_pdisk_logreader.cpp:809} PDiskId# 1 LogReader IsInitial# 1 ChunkIdx# 1 SectorIdx# 0 OffsetInSector# 0 In ProcessSectorSet got !restorator.GoodSectorFlags LastGoodToWriteLogPosition# { ChunkIdx# 1 OffsetInChunk# 0} PDiskId# 1 2025-09-25T16:14:46.056954Z :BS_PDISK NOTICE: {BPD01@blobstorage_pdisk_logreader.cpp:1176} Reply to owner OwnerId# 0 Result# {EvReadLogResult Status# OK ErrorReason# "" position# { ChunkIdx# 0 OffsetInChunk# 0} nextPosition# { ChunkIdx# 1 OffsetInChunk# 0} isEndOfLog# true StatusFlags# IsValid | DiskSpaceCyan | DiskSpaceLightYellowMove | DiskSpaceYellowStop | DiskSpaceLightOrange | DiskSpacePreOrange | DiskSpaceOrange | DiskSpaceRed | DiskSpaceBlack Results.size# 0} PDiskId# 1 2025-09-25T16:14:46.061261Z :BS_PDISK NOTICE: {BPD01@blobstorage_pdisk_impl_log.cpp:1732} PDisk have successfully started PDiskId# 1 2025-09-25T16:14:46.092963Z :BS_PDISK NOTICE: {BPD02@blobstorage_pdisk_impl.cpp:2055} New owner is created ownerId# 3 vDiskId# [0:_:0:0:0] FirstNonceToKeep# 2095832 CutLogId# [0:0:0] ownerRound# 2 PDiskId# 1 2025-09-25T16:14:46.119822Z :BS_PDISK ERROR: {BPD01@blobstorage_pdisk_impl.cpp:413} outSectorIdx >= chunkSizeUsableSectors PDiskId# 1 OutSectorIdx# 1056832 OutLastSectorIdx# 1056832 ChunkSizeUsableSectors# 1536 Offset# 4294967295 Size# 128 2025-09-25T16:14:46.119840Z :BS_PDISK ERROR: {BPD01@blobstorage_pdisk_impl.cpp:988} SendChunkReadErrorPDiskId# 1 invalid size# 128 and offset# 4294967295 for ownerId# 3 can't read chunkIdx# 2 ReqId# 2560009518 PDiskId# 1 2025-09-25T16:14:47.278756Z :BS_PDISK NOTICE: {BPD38@blobstorage_pdisk_impl.cpp:2857} OnDriveStartup Path# "" PDiskId# 1 2025-09-25T16:14:47.324932Z :BS_PDISK NOTICE: {BSP01@blobstorage_pdisk_actor.cpp:581} Successfully read format record Format# {TDiskFormat Version: 3 DiskSize: 5242880000 bytes (5 GB) Guid: 123297764636328089 MagicNextLogChunkReference: 16026277525844445499 MagicLogChunk: 13296663361274856456 MagicDataChunk: 7827819580743324041 MagicSysLogChunk: 9450851957917632843 MagicFormatChunk: 17332287817462050952 ChunkSize: 6291456 bytes (6 MB) SectorSize: 4096 SysLogSectorCount: 64 SystemChunkCount: 1 FormatText: "Info" DiskFormatSize: 1168 (current sizeof: 1168) TimestampUs: 1758816886982182 (2025-09-25T16:14:46.982182Z) FormatFlags: {ErasureEncodeSysLog | ErasureEncodeFormat | ErasureEncodeNextChunkReference | EncryptFormat | EncryptData}} PDiskId# 1 2025-09-25T16:14:47.356904Z :BS_PDISK NOTICE: {BPD01@blobstorage_pdisk_impl_log.cpp:252} SysLogRecord is read Record# {TSysLogRecord Version# 0 NonceSet# {TNonceSet Version# 0 NonceSysLog# 61 NonceLog# 1 NonceData# 1} LogHeadChunkIdx# 1 LogHeadChunkPreviousNonce# 0} PDiskId# 1 2025-09-25T16:14:47.377006Z :BS_PDISK NOTICE: {LR018@blobstorage_pdisk_logreader.cpp:809} PDiskId# 1 LogReader IsInitial# 1 ChunkIdx# 1 SectorIdx# 0 OffsetInSector# 0 In ProcessSectorSet got !restorator.GoodSectorFlags LastGoodToWriteLogPosition# { ChunkIdx# 1 OffsetInChunk# 0} PDiskId# 1 2025-09-25T16:14:47.377039Z :BS_PDISK NOTICE: {BPD01@blobstorage_pdisk_logreader.cpp:1176} Reply to owner OwnerId# 0 Result# {EvReadLogResult Status# OK ErrorReason# "" position# { ChunkIdx# 0 OffsetInChunk# 0} nextPosition# { ChunkIdx# 1 OffsetInChunk# 0} isEndOfLog# true StatusFlags# IsValid | DiskSpaceCyan | DiskSpaceLightYellowMove | DiskSpaceYellowStop | DiskSpaceLightOrange | DiskSpacePreOrange | DiskSpaceOrange | DiskSpaceRed | DiskSpaceBlack Results.size# 0} PDiskId# 1 2025-09-25T16:14:47.377375Z :BS_PDISK NOTICE: {BPD01@blobstorage_pdisk_impl_log.cpp:1732} PDisk have successfully started PDiskId# 1 2025-09-25T16:14:47.429044Z :BS_PDISK NOTICE: {BPD02@blobstorage_pdisk_impl.cpp:2055} New owner is created ownerId# 3 vDiskId# [0:_:0:0:0] FirstNonceToKeep# 1608402 CutLogId# [0:0:0] ownerRound# 2 PDiskId# 1 2025-09-25T16:14:47.619681Z :BS_PDISK ERROR: {BPD01@blobstorage_pdisk_impl.cpp:413} outSectorIdx >= chunkSizeUsableSectors PDiskId# 1 OutSectorIdx# 1056832 OutLastSectorIdx# 1056832 ChunkSizeUsableSectors# 1536 Offset# 4294967295 Size# 128 2025-09-25T16:14:47.619698Z :BS_PDISK ERROR: {BPD01@blobstorage_pdisk_impl.cpp:988} SendChunkReadErrorPDiskId# 1 invalid size# 128 and offset# 4294967295 for ownerId# 3 can't read chunkIdx# 2 ReqId# 2560005934 PDiskId# 1 2025-09-25T16:14:48.871224Z :BS_PDISK NOTICE: {BPD38@blobstorage_pdisk_impl.cpp:2857} OnDriveStartup Path# "" PDiskId# 1 2025-09-25T16:14:48.901115Z :BS_PDISK NOTICE: {BSP01@blobstorage_pdisk_actor.cpp:581} Successfully read format record Format# {TDiskFormat Version: 3 DiskSize: 16777216000 bytes (16 GB) Guid: 8550772135919683171 MagicNextLogChunkReference: 10954377436642133306 MagicLogChunk: 14898179018834531678 MagicDataChunk: 17665619251783807377 MagicSysLogChunk: 5842268438221000431 MagicFormatChunk: 17332287817462050952 ChunkSize: 18874368 bytes (18 MB) SectorSize: 4096 SysLogSectorCount: 64 SystemChunkCount: 1 FormatText: "Info" DiskFormatSize: 1168 (current sizeof: 1168) TimestampUs: 1758816888757581 (2025-09-25T16:14:48.757581Z) FormatFlags: {ErasureEncodeSysLog | ErasureEncodeFormat | ErasureEncodeNextChunkReference | EncryptFormat | EncryptData}} PDiskId# 1 2025-09-25T16:14:48.912917Z :BS_PDISK NOTICE: {BPD01@blobstorage_pdisk_impl_log.cpp:252} SysLogRecord is read Record# {TSysLogRecord Version# 0 NonceSet# {TNonceSet Version# 0 NonceSysLog# 61 NonceLog# 1 NonceData# 1} LogHeadChunkIdx# 1 LogHeadChunkPreviousNonce# 0} PDiskId# 1 2025-09-25T16:14:48.928080Z :BS_PDISK NOTICE: {LR018@blobstorage_pdisk_logreader.cpp:809} PDiskId# 1 LogReader IsInitial# 1 ChunkIdx# 1 SectorIdx# 0 OffsetInSector# 0 In ProcessSectorSet got !restorator.GoodSectorFlags LastGoodToWriteLogPosition# { ChunkIdx# 1 OffsetInChunk# 0} PDiskId# 1 2025-09-25T16:14:48.928113Z :BS_PDISK NOTICE: {BPD01@blobstorage_pdisk_logreader.cpp:1176} Reply to owner OwnerId# 0 Result# {EvReadLogResult Status# OK ErrorReason# "" position# { ChunkIdx# 0 OffsetInChunk# 0} nextPosition# { ChunkIdx# 1 OffsetInChunk# 0} isEndOfLog# true StatusFlags# IsValid | DiskSpaceCyan | DiskSpaceLightYellowMove | DiskSpaceYellowStop | DiskSpaceLightOrange | DiskSpacePreOrange | DiskSpaceOrange | DiskSpaceRed | DiskSpaceBlack Results.size# 0} PDiskId# 1 2025-09-25T16:14:48.940915Z :BS_PDISK NOTICE: {BPD01@blobstorage_pdisk_impl_log.cpp:1732} PDisk have successfully started PDiskId# 1 2025-09-25T16:14:48.964896Z :BS_PDISK NOTICE: {BPD02@blobstorage_pdisk_impl.cpp:2055} New owner is created ownerId# 3 vDiskId# [0:_:0:0:0] FirstNonceToKeep# 1852591 CutLogId# [0:0:0] ownerRound# 2 PDiskId# 1 2025-09-25T16:14:50.468991Z :BS_PDISK ERROR: {BPD01@blobstorage_pdisk_impl.cpp:413} outSectorIdx >= chunkSizeUsableSectors PDiskId# 1 OutSectorIdx# 1056832 OutLastSectorIdx# 1056832 ChunkSizeUsableSectors# 4608 Offset# 4294967295 Size# 128 2025-09-25T16:14:50.469010Z :BS_PDISK ERROR: {BPD01@blobstorage_pdisk_impl.cpp:988} SendChunkReadErrorPDiskId# 1 invalid size# 128 and offset# 4294967295 for ownerId# 3 can't read chunkIdx# 2 ReqId# 2560006190 PDiskId# 1 2025-09-25T16:14:52.061229Z :BS_PDISK NOTICE: {BPD38@blobstorage_pdisk_impl.cpp:2857} OnDriveStartup Path# "" PDiskId# 1 2025-09-25T16:14:52.074497Z :BS_PDISK NOTICE: {BSP01@blobstorage_pdisk_actor.cpp:581} Successfully read format record Format# {TDiskFormat Version: 3 DiskSize: 8388608000 bytes (8 GB) Guid: 1353528851672987000 MagicNextLogChunkReference: 8379597845397662593 MagicLogChunk: 10402554679094431075 MagicDataChunk: 9125479147583479035 MagicSysLogChunk: 16676989667838600905 MagicFormatChunk: 17332287817462050952 ChunkSize: 10485760 bytes (10 MB) SectorSize: 4096 SysLogSectorCount: 64 SystemChunkCount: 1 FormatText: "Info" DiskFormatSize: 1168 (current sizeof: 1168) TimestampUs: 1758816891961481 (2025-09-25T16:14:51.961481Z) FormatFlags: {ErasureEncodeSysLog | ErasureEncodeFormat | ErasureEncodeNextChunkReference | EncryptFormat | EncryptData}} PDiskId# 1 2025-09-25T16:14:52.078162Z :BS_PDISK NOTICE: {BPD01@blobstorage_pdisk_impl_log.cpp:252} SysLogRecord is read Record# {TSysLogRecord Version# 0 NonceSet# {TNonceSet Version# 0 NonceSysLog# 61 NonceLog# 1 NonceData# 1} LogHeadChunkIdx# 1 LogHeadChunkPreviousNonce# 0} PDiskId# 1 2025-09-25T16:14:52.081282Z :BS_PDISK NOTICE: {LR018@blobstorage_pdisk_logreader.cpp:809} PDiskId# 1 LogReader IsInitial# 1 ChunkIdx# 1 SectorIdx# 0 OffsetInSector# 0 In ProcessSectorSet got !restorator.GoodSectorFlags LastGoodToWriteLogPosition# { ChunkIdx# 1 OffsetInChunk# 0} PDiskId# 1 2025-09-25T16:14:52.081309Z :BS_PDISK NOTICE: {BPD01@blobstorage_pdisk_logreader.cpp:1176} Re ... LogPosition# { ChunkIdx# 1 OffsetInChunk# 4096} PDiskId# 1 2025-09-25T16:15:14.946173Z :BS_PDISK NOTICE: {BPD01@blobstorage_pdisk_logreader.cpp:1176} Reply to owner OwnerId# 0 Result# {EvReadLogResult Status# OK ErrorReason# "" position# { ChunkIdx# 0 OffsetInChunk# 0} nextPosition# { ChunkIdx# 1 OffsetInChunk# 4096} isEndOfLog# true StatusFlags# IsValid | DiskSpaceCyan | DiskSpaceLightYellowMove | DiskSpaceYellowStop | DiskSpaceLightOrange | DiskSpacePreOrange | DiskSpaceOrange | DiskSpaceRed | DiskSpaceBlack Results.size# 0} PDiskId# 1 2025-09-25T16:15:15.064148Z :BS_PDISK NOTICE: {BPD01@blobstorage_pdisk_impl_log.cpp:1732} PDisk have successfully started PDiskId# 1 2025-09-25T16:15:15.066227Z :BS_PDISK NOTICE: {BPD30@blobstorage_pdisk_impl.cpp:1930} Registered known VDisk VDisk# [0:4294967295:0:0:0] OwnerId# 3 OwnerRound# 2 GroupSizeInUnits# 0 PDiskId# 1 2025-09-25T16:15:44.641222Z :BS_PDISK NOTICE: {BPD38@blobstorage_pdisk_impl.cpp:2857} OnDriveStartup Path# "" PDiskId# 1 2025-09-25T16:15:44.654438Z :BS_PDISK NOTICE: {BSP01@blobstorage_pdisk_actor.cpp:581} Successfully read format record Format# {TDiskFormat Version: 3 DiskSize: 109951162777600 bytes (109951 GB) Guid: 16278754935350372091 MagicNextLogChunkReference: 13204310696492021926 MagicLogChunk: 8299337852444241066 MagicDataChunk: 15370984225361136458 MagicSysLogChunk: 12121118987238328005 MagicFormatChunk: 17332287817462050952 ChunkSize: 543162368 bytes (543 MB) SectorSize: 4096 SysLogSectorCount: 8112 SystemChunkCount: 1 FormatText: "Info" DiskFormatSize: 1168 (current sizeof: 1168) TimestampUs: 1758816913292357 (2025-09-25T16:15:13.292357Z) FormatFlags: {ErasureEncodeSysLog | ErasureEncodeFormat | ErasureEncodeNextChunkReference | EncryptFormat | EncryptData}} PDiskId# 1 2025-09-25T16:15:44.956034Z :BS_PDISK NOTICE: {BPD01@blobstorage_pdisk_impl_log.cpp:252} SysLogRecord is read Record# {TSysLogRecord Version# 8 NonceSet# {TNonceSet Version# 0 NonceSysLog# 4404287 NonceLog# 3175403 NonceData# 4245736107} LogHeadChunkIdx# 1 LogHeadChunkPreviousNonce# 0 Owner[3]# [0:4294967295:0:0:0]} PDiskId# 1 2025-09-25T16:15:44.973747Z :BS_PDISK WARN: {LR016@blobstorage_pdisk_logreader.cpp:710} PDiskId# 1 LogReader IsInitial# 1 ChunkIdx# 1 SectorIdx# 0 OffsetInSector# 316 nonce jump2 IsEndOfSplice# false " replacing ChunkInfo->DesiredPrevChunkLastNonce# "# 0 " with nonceJumpLogPageHeader2->PreviousNonce# "# 0 PDiskId# 1 2025-09-25T16:15:44.980792Z :BS_PDISK NOTICE: {LR018@blobstorage_pdisk_logreader.cpp:809} PDiskId# 1 LogReader IsInitial# 1 ChunkIdx# 1 SectorIdx# 97 OffsetInSector# 0 In ProcessSectorSet got !restorator.GoodSectorFlags LastGoodToWriteLogPosition# { ChunkIdx# 1 OffsetInChunk# 397312} PDiskId# 1 2025-09-25T16:15:44.985538Z :BS_PDISK NOTICE: {BPD01@blobstorage_pdisk_logreader.cpp:1176} Reply to owner OwnerId# 0 Result# {EvReadLogResult Status# OK ErrorReason# "" position# { ChunkIdx# 0 OffsetInChunk# 0} nextPosition# { ChunkIdx# 1 OffsetInChunk# 397312} isEndOfLog# true StatusFlags# IsValid | DiskSpaceCyan | DiskSpaceLightYellowMove | DiskSpaceYellowStop | DiskSpaceLightOrange | DiskSpacePreOrange | DiskSpaceOrange | DiskSpaceRed | DiskSpaceBlack Results.size# 0} PDiskId# 1 2025-09-25T16:15:45.018437Z :BS_PDISK NOTICE: {BPD01@blobstorage_pdisk_impl_log.cpp:1732} PDisk have successfully started PDiskId# 1 2025-09-25T16:15:45.178679Z :BS_PDISK NOTICE: {BPD30@blobstorage_pdisk_impl.cpp:1930} Registered known VDisk VDisk# [0:4294967295:0:0:0] OwnerId# 3 OwnerRound# 2 GroupSizeInUnits# 0 PDiskId# 1 2025-09-25T16:15:46.276619Z :BS_PDISK NOTICE: {BPD38@blobstorage_pdisk_impl.cpp:2857} OnDriveStartup Path# "" PDiskId# 1 2025-09-25T16:15:46.285421Z :BS_PDISK NOTICE: {BSP01@blobstorage_pdisk_actor.cpp:581} Successfully read format record Format# {TDiskFormat Version: 3 DiskSize: 109951162777600 bytes (109951 GB) Guid: 16278754935350372091 MagicNextLogChunkReference: 13204310696492021926 MagicLogChunk: 8299337852444241066 MagicDataChunk: 15370984225361136458 MagicSysLogChunk: 12121118987238328005 MagicFormatChunk: 17332287817462050952 ChunkSize: 543162368 bytes (543 MB) SectorSize: 4096 SysLogSectorCount: 8112 SystemChunkCount: 1 FormatText: "Info" DiskFormatSize: 1168 (current sizeof: 1168) TimestampUs: 1758816913292357 (2025-09-25T16:15:13.292357Z) FormatFlags: {ErasureEncodeSysLog | ErasureEncodeFormat | ErasureEncodeNextChunkReference | EncryptFormat | EncryptData}} PDiskId# 1 2025-09-25T16:15:46.476433Z :BS_PDISK NOTICE: {BPD01@blobstorage_pdisk_impl_log.cpp:252} SysLogRecord is read Record# {TSysLogRecord Version# 8 NonceSet# {TNonceSet Version# 0 NonceSysLog# 5812736 NonceLog# 4439688 NonceData# 4247065815} LogHeadChunkIdx# 1 LogHeadChunkPreviousNonce# 0 Owner[3]# [0:4294967295:0:0:0]} PDiskId# 1 2025-09-25T16:15:46.490610Z :BS_PDISK WARN: {LR016@blobstorage_pdisk_logreader.cpp:710} PDiskId# 1 LogReader IsInitial# 1 ChunkIdx# 1 SectorIdx# 0 OffsetInSector# 316 nonce jump2 IsEndOfSplice# false " replacing ChunkInfo->DesiredPrevChunkLastNonce# "# 0 " with nonceJumpLogPageHeader2->PreviousNonce# "# 0 PDiskId# 1 2025-09-25T16:15:47.482603Z :BS_PDISK NOTICE: {LR018@blobstorage_pdisk_logreader.cpp:809} PDiskId# 1 LogReader IsInitial# 1 ChunkIdx# 1 SectorIdx# 126331 OffsetInSector# 0 In ProcessSectorSet got !restorator.GoodSectorFlags LastGoodToWriteLogPosition# { ChunkIdx# 1 OffsetInChunk# 517451776} PDiskId# 1 2025-09-25T16:15:47.488184Z :BS_PDISK NOTICE: {BPD01@blobstorage_pdisk_logreader.cpp:1176} Reply to owner OwnerId# 0 Result# {EvReadLogResult Status# OK ErrorReason# "" position# { ChunkIdx# 0 OffsetInChunk# 0} nextPosition# { ChunkIdx# 1 OffsetInChunk# 517451776} isEndOfLog# true StatusFlags# IsValid | DiskSpaceCyan | DiskSpaceLightYellowMove | DiskSpaceYellowStop | DiskSpaceLightOrange | DiskSpacePreOrange | DiskSpaceOrange | DiskSpaceRed | DiskSpaceBlack Results.size# 0} PDiskId# 1 2025-09-25T16:15:47.518147Z :BS_PDISK NOTICE: {BPD01@blobstorage_pdisk_impl_log.cpp:1732} PDisk have successfully started PDiskId# 1 2025-09-25T16:15:47.520478Z :BS_PDISK NOTICE: {BPD30@blobstorage_pdisk_impl.cpp:1930} Registered known VDisk VDisk# [0:4294967295:0:0:0] OwnerId# 3 OwnerRound# 2 GroupSizeInUnits# 0 PDiskId# 1 2025-09-25T16:15:48.506514Z :BS_PDISK NOTICE: {BPD38@blobstorage_pdisk_impl.cpp:2857} OnDriveStartup Path# "" PDiskId# 1 2025-09-25T16:15:48.526940Z :BS_PDISK NOTICE: {BSP01@blobstorage_pdisk_actor.cpp:581} Successfully read format record Format# {TDiskFormat Version: 3 DiskSize: 109951162777600 bytes (109951 GB) Guid: 16278754935350372091 MagicNextLogChunkReference: 13204310696492021926 MagicLogChunk: 8299337852444241066 MagicDataChunk: 15370984225361136458 MagicSysLogChunk: 12121118987238328005 MagicFormatChunk: 17332287817462050952 ChunkSize: 543162368 bytes (543 MB) SectorSize: 4096 SysLogSectorCount: 8112 SystemChunkCount: 1 FormatText: "Info" DiskFormatSize: 1168 (current sizeof: 1168) TimestampUs: 1758816913292357 (2025-09-25T16:15:13.292357Z) FormatFlags: {ErasureEncodeSysLog | ErasureEncodeFormat | ErasureEncodeNextChunkReference | EncryptFormat | EncryptData}} PDiskId# 1 2025-09-25T16:15:48.824985Z :BS_PDISK NOTICE: {BPD01@blobstorage_pdisk_impl_log.cpp:252} SysLogRecord is read Record# {TSysLogRecord Version# 8 NonceSet# {TNonceSet Version# 0 NonceSysLog# 7898143 NonceLog# 6124851 NonceData# 4248892091} LogHeadChunkIdx# 1 LogHeadChunkPreviousNonce# 0 Owner[3]# [0:4294967295:0:0:0]} PDiskId# 1 2025-09-25T16:15:48.848912Z :BS_PDISK WARN: {LR016@blobstorage_pdisk_logreader.cpp:710} PDiskId# 1 LogReader IsInitial# 1 ChunkIdx# 1 SectorIdx# 0 OffsetInSector# 316 nonce jump2 IsEndOfSplice# false " replacing ChunkInfo->DesiredPrevChunkLastNonce# "# 0 " with nonceJumpLogPageHeader2->PreviousNonce# "# 0 PDiskId# 1 2025-09-25T16:15:50.039446Z :BS_PDISK NOTICE: {LR018@blobstorage_pdisk_logreader.cpp:809} PDiskId# 1 LogReader IsInitial# 1 ChunkIdx# 32000 SectorIdx# 119960 OffsetInSector# 0 In ProcessSectorSet got !restorator.GoodSectorFlags LastGoodToWriteLogPosition# { ChunkIdx# 32000 OffsetInChunk# 491356160} PDiskId# 1 2025-09-25T16:15:50.045232Z :BS_PDISK NOTICE: {BPD01@blobstorage_pdisk_logreader.cpp:1176} Reply to owner OwnerId# 0 Result# {EvReadLogResult Status# OK ErrorReason# "" position# { ChunkIdx# 0 OffsetInChunk# 0} nextPosition# { ChunkIdx# 32000 OffsetInChunk# 491356160} isEndOfLog# true StatusFlags# IsValid | DiskSpaceCyan | DiskSpaceLightYellowMove | DiskSpaceYellowStop | DiskSpaceLightOrange | DiskSpacePreOrange | DiskSpaceOrange | DiskSpaceRed | DiskSpaceBlack Results.size# 0} PDiskId# 1 2025-09-25T16:15:50.079237Z :BS_PDISK NOTICE: {BPD01@blobstorage_pdisk_impl_log.cpp:1732} PDisk have successfully started PDiskId# 1 2025-09-25T16:15:50.081531Z :BS_PDISK NOTICE: {BPD30@blobstorage_pdisk_impl.cpp:1930} Registered known VDisk VDisk# [0:4294967295:0:0:0] OwnerId# 3 OwnerRound# 2 GroupSizeInUnits# 0 PDiskId# 1 2025-09-25T16:15:50.298280Z :BS_PDISK NOTICE: {BPD38@blobstorage_pdisk_impl.cpp:2857} OnDriveStartup Path# "" PDiskId# 1 2025-09-25T16:15:50.316976Z :BS_PDISK NOTICE: {BSP01@blobstorage_pdisk_actor.cpp:581} Successfully read format record Format# {TDiskFormat Version: 3 DiskSize: 109951162777600 bytes (109951 GB) Guid: 16278754935350372091 MagicNextLogChunkReference: 13204310696492021926 MagicLogChunk: 8299337852444241066 MagicDataChunk: 15370984225361136458 MagicSysLogChunk: 12121118987238328005 MagicFormatChunk: 17332287817462050952 ChunkSize: 543162368 bytes (543 MB) SectorSize: 4096 SysLogSectorCount: 8112 SystemChunkCount: 1 FormatText: "Info" DiskFormatSize: 1168 (current sizeof: 1168) TimestampUs: 1758816913292357 (2025-09-25T16:15:13.292357Z) FormatFlags: {ErasureEncodeSysLog | ErasureEncodeFormat | ErasureEncodeNextChunkReference | EncryptFormat | EncryptData}} PDiskId# 1 2025-09-25T16:15:50.575580Z :BS_PDISK NOTICE: {BPD01@blobstorage_pdisk_impl_log.cpp:252} SysLogRecord is read Record# {TSysLogRecord Version# 8 NonceSet# {TNonceSet Version# 0 NonceSysLog# 9839388 NonceLog# 7208311 NonceData# 4250635229} LogHeadChunkIdx# 1 LogHeadChunkPreviousNonce# 0 Owner[3]# [0:4294967295:0:0:0]} PDiskId# 1 2025-09-25T16:15:50.593081Z :BS_PDISK WARN: {LR016@blobstorage_pdisk_logreader.cpp:710} PDiskId# 1 LogReader IsInitial# 1 ChunkIdx# 1 SectorIdx# 0 OffsetInSector# 316 nonce jump2 IsEndOfSplice# false " replacing ChunkInfo->DesiredPrevChunkLastNonce# "# 0 " with nonceJumpLogPageHeader2->PreviousNonce# "# 0 PDiskId# 1 2025-09-25T16:15:51.410302Z :BS_PDISK NOTICE: {LR018@blobstorage_pdisk_logreader.cpp:809} PDiskId# 1 LogReader IsInitial# 1 ChunkIdx# 32001 SectorIdx# 18915 OffsetInSector# 0 In ProcessSectorSet got !restorator.GoodSectorFlags LastGoodToWriteLogPosition# { ChunkIdx# 32001 OffsetInChunk# 77475840} PDiskId# 1 2025-09-25T16:15:51.415766Z :BS_PDISK NOTICE: {BPD01@blobstorage_pdisk_logreader.cpp:1176} Reply to owner OwnerId# 0 Result# {EvReadLogResult Status# OK ErrorReason# "" position# { ChunkIdx# 0 OffsetInChunk# 0} nextPosition# { ChunkIdx# 32001 OffsetInChunk# 77475840} isEndOfLog# true StatusFlags# IsValid | DiskSpaceCyan | DiskSpaceLightYellowMove | DiskSpaceYellowStop | DiskSpaceLightOrange | DiskSpacePreOrange | DiskSpaceOrange | DiskSpaceRed | DiskSpaceBlack Results.size# 0} PDiskId# 1 2025-09-25T16:15:51.440000Z :BS_PDISK NOTICE: {BPD01@blobstorage_pdisk_impl_log.cpp:1732} PDisk have successfully started PDiskId# 1 2025-09-25T16:15:51.442140Z :BS_PDISK NOTICE: {BPD30@blobstorage_pdisk_impl.cpp:1930} Registered known VDisk VDisk# [0:4294967295:0:0:0] OwnerId# 3 OwnerRound# 2 GroupSizeInUnits# 0 PDiskId# 1 2025-09-25T16:15:51.446740Z :BS_PDISK ERROR: {BPD01@blobstorage_pdisk_impl.cpp:413} outSectorIdx >= chunkSizeUsableSectors PDiskId# 1 OutSectorIdx# 1056832 OutLastSectorIdx# 1056832 ChunkSizeUsableSectors# 132608 Offset# 4294967295 Size# 128 2025-09-25T16:15:51.446753Z :BS_PDISK ERROR: {BPD01@blobstorage_pdisk_impl.cpp:988} SendChunkReadErrorPDiskId# 1 invalid size# 128 and offset# 4294967295 for ownerId# 3 can't read chunkIdx# 32002 ReqId# 2560572462 PDiskId# 1 |80.5%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/schemeshard/ut_restore/ut_restore.cpp >> Deadlines::TestBlockMirror3of4 [GOOD] >> Deadlines::TestDiscoverMirror3dc |80.5%| [TA] $(B)/ydb/core/blobstorage/ut_blobstorage/ut_move_pdisk/test-results/unittest/{meta.json ... results_accumulator.log} >> TBtreeIndexTPartLarge::MiddleKeys1GB [GOOD] >> TBtreeIndexTPartLarge::BigKeys1GB >> TBsVDiskRepl3::ReplEraseDiskRestoreMultipart [GOOD] >> TBsVDiskRepl3::AnubisTest [GOOD] >> TBsVDiskRepl3::ReplPerf |80.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/ut_restore/ut_restore.cpp |80.5%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/dsproxy/ut_strategy/unittest >> DSProxyStrategyTest::Restore_block42 [GOOD] >> Deadlines::TestDiscoverMirror3dc [GOOD] >> Deadlines::TestDiscover4Plus2Block >> test.py::test[table_range-each_with_non_existing_all_fail--Results] >> Deadlines::TestDiscover4Plus2Block [GOOD] >> Deadlines::TestDiscoverMirror3of4 |80.5%| [TA] {RESULT} $(B)/ydb/core/util/ut/test-results/unittest/{meta.json ... results_accumulator.log} |80.5%| [TA] {RESULT} $(B)/ydb/core/blobstorage/ut_blobstorage/ut_move_pdisk/test-results/unittest/{meta.json ... results_accumulator.log} |80.5%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/ymq/base/ut/ydb-core-ymq-base-ut >> Deadlines::TestDiscoverMirror3of4 [GOOD] >> Deadlines::TestDiscoverReadBodyMirror3dc |80.5%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/ymq/base/ut/ydb-core-ymq-base-ut |80.5%| [LD] {RESULT} $(B)/ydb/core/ymq/base/ut/ydb-core-ymq-base-ut >> test.py::test[insert-drop_sortness-calc-ForceBlocks] >> test.py::test[aggregate-group_by_rollup_udf--ForceBlocks] >> BlobPatching::StressBlock42 [GOOD] >> BlobPatching::StressNone >> TBlobStorageSyncLogDsk::AddByOne [GOOD] >> TBlobStorageSyncLogDsk::AddFive [GOOD] >> TBlobStorageSyncLogDsk::ComplicatedSerializeWithOverlapping [GOOD] >> TBlobStorageSyncLogDsk::DeleteChunks [GOOD] >> Deadlines::TestDiscoverReadBodyMirror3dc [GOOD] >> Deadlines::TestDiscoverReadBody4Plus2Block |80.5%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/kqp/ut/federated_query/s3/kqp_federated_query_ut.cpp >> test.py::test[join-selfjoin_on_sorted_with_filter-replicate-ForceBlocks] >> test.py::test[blocks-pg_top_sort--ForceBlocks] >> CodecsTest::Basic [GOOD] >> CodecsTest::NaturalNumbersAndZero >> Acceleration::TestAcceleration4Plus2BlockPutAsyncBlob2Slow [GOOD] >> Acceleration::TestAccelerationMirror3dcGetAsyncRead1Slow >> CodecsTest::NaturalNumbersAndZero [GOOD] >> CodecsTest::LargeAndRepeated |80.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/ut/federated_query/s3/kqp_federated_query_ut.cpp >> BlobPatching::StressNone [GOOD] >> BlobPatching::DiffsWithIncorectPatchedBlobPartId >> CodecsTest::LargeAndRepeated [GOOD] >> NaiveFragmentWriterTest::Basic [GOOD] >> Deadlines::TestDiscoverReadBody4Plus2Block [GOOD] >> Deadlines::TestDiscoverReadBodyMirror3of4 |80.5%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/blobstorage/ut_blobstorage/ut_balancing/ydb-core-blobstorage-ut_blobstorage-ut_balancing >> BlobPatching::DiffsWithIncorectPatchedBlobPartId [GOOD] >> BlobPatching::PatchBlock42 |80.5%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/blobstorage/ut_blobstorage/ut_balancing/ydb-core-blobstorage-ut_blobstorage-ut_balancing |80.5%| [LD] {RESULT} $(B)/ydb/core/blobstorage/ut_blobstorage/ut_balancing/ydb-core-blobstorage-ut_blobstorage-ut_balancing >> Deadlines::TestDiscoverReadBodyMirror3of4 [GOOD] >> Deadlines::TestHardCollectGarbage4Plus2Block |80.5%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/vdisk/synclog/ut/unittest >> TBlobStorageSyncLogDsk::DeleteChunks [GOOD] >> test.py::test[aggregate-group_by_rollup_duo_opt--Results] >> test.py::test[pg-select_limit-default.txt-Results] |80.5%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/vdisk/synclog/ut/unittest >> NaiveFragmentWriterTest::Basic [GOOD] >> BSCRestartPDisk::RestartOneByOneWithReconnects [GOOD] >> test.py::test[key_filter-empty_range_over_dynamic--Results] [GOOD] >> test.py::test[key_filter-range_union_lower_excluded-default.txt-Results] >> TBsLocalRecovery::WriteRestartReadHugeIncreased [GOOD] >> TBsLocalRecovery::WriteRestartReadHugeDecreased >> test.py::test[hor_join-runtime_dep-default.txt-Results] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/ut_blobstorage/ut_restart_pdisk/unittest >> BSCRestartPDisk::RestartOneByOneWithReconnects [GOOD] Test command err: RandomSeed# 11997592444209265103 >> test.py::test[file-parse_file_in_select_as_int--ForceBlocks] >> test.py::test[table_range-each_with_non_existing_all_fail--Results] [GOOD] >> test.py::test[type_v3-ignore_v3_hint-protofield-ForceBlocks] >> test.py::test[type_v3-append_diff_flags--ForceBlocks] >> TBlobStorageSyncLogKeeper::CutLog_EntryPointNewFormat [GOOD] >> TBlobStorageSyncLogMem::EmptyMemRecLog [GOOD] >> TBlobStorageSyncLogMem::FilledIn1 [GOOD] >> TBlobStorageSyncLogMem::EmptyMemRecLogPutAfterSnapshot [GOOD] >> test.py::test[join-nested_semi_join-off-Results] |80.5%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/services/ydb/ydb_ut.cpp >> test.py::test[join-nested_semi_join-off-Results] [SKIPPED] >> test.py::test[join-order_of_qualified--ForceBlocks] >> Acceleration::TestAccelerationMirror3dcGetAsyncRead1Slow [GOOD] >> Acceleration::TestAcceleration4Plus2BlockGetAsyncRead1Slow >> test.py::test[select-unlabeled--Results] [GOOD] >> test.py::test[seq_mode-action_shared_subquery_expr_after_commit-default.txt-ForceBlocks] |80.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/services/ydb/ydb_ut.cpp |80.5%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/vdisk/synclog/ut/unittest >> TBlobStorageSyncLogMem::EmptyMemRecLogPutAfterSnapshot [GOOD] >> VarLengthIntCodec::BasicTest64 [GOOD] >> VarLengthIntCodec::Random32 [GOOD] >> test.py::test[pg-tpcds-q17-default.txt-ForceBlocks] >> VarLengthIntCodec::Random64 >> TBtreeIndexTPartLarge::BigKeys1GB [GOOD] >> TBtreeIndexTPartLarge::CutKeys >> VarLengthIntCodec::Random64 [GOOD] |80.5%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/blobstorage/ut_blobstorage/ut_donor/ydb-core-blobstorage-ut_blobstorage-ut_donor >> test.py::test[select-column_labels-default.txt-Results] [GOOD] >> test.py::test[select-from_in_front_sub-default.txt-Results] |80.5%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/blobstorage/ut_blobstorage/ut_donor/ydb-core-blobstorage-ut_blobstorage-ut_donor |80.5%| [LD] {RESULT} $(B)/ydb/core/blobstorage/ut_blobstorage/ut_donor/ydb-core-blobstorage-ut_blobstorage-ut_donor >> BridgeGet::PartRestorationAcrossBridgeOnDiscover |80.5%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/vdisk/synclog/ut/unittest >> VarLengthIntCodec::Random64 [GOOD] |80.5%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/blobstorage/ut_blobstorage/ut_blob_depot/ydb-core-blobstorage-ut_blobstorage-ut_blob_depot |80.5%| [LD] {RESULT} $(B)/ydb/core/blobstorage/ut_blobstorage/ut_blob_depot/ydb-core-blobstorage-ut_blobstorage-ut_blob_depot |80.5%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/blobstorage/ut_blobstorage/ut_blob_depot/ydb-core-blobstorage-ut_blobstorage-ut_blob_depot >> test.py::test[produce-process_with_udf_rows-default.txt-Results] [GOOD] >> test.py::test[produce-reduce_all_opt-default.txt-Results] >> Acceleration::TestAcceleration4Plus2BlockGetAsyncRead1Slow [GOOD] >> Acceleration::TestAccelerationMirror3dcGetAsyncRead2Slow |80.5%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/fq/libs/test_connection/ut/ydb-core-fq-libs-test_connection-ut |80.5%| [LD] {RESULT} $(B)/ydb/core/fq/libs/test_connection/ut/ydb-core-fq-libs-test_connection-ut |80.5%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/fq/libs/test_connection/ut/ydb-core-fq-libs-test_connection-ut >> BSCReadOnlyPDisk::ReadOnlyOneByOne [GOOD] >> CostMetricsGetBlock4Plus2::TestGet4Plus2BlockRequests10000Inflight1BlobSize1000 [GOOD] >> CostMetricsGetBlock4Plus2::TestGet4Plus2BlockRequests2Inflight2BlobSize1000 >> test.py::test[select-exists_true-default.txt-ForceBlocks] [GOOD] >> test.py::test[select-exists_true-default.txt-Results] >> TPDiskRaces::Decommit [GOOD] >> TPDiskRaces::DecommitWithInflight ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/ut_blobstorage/ut_read_only_pdisk/unittest >> BSCReadOnlyPDisk::ReadOnlyOneByOne [GOOD] Test command err: RandomSeed# 5630548647955853548 >> test.py::test[pg-tpcds-q63-default.txt-Results] [GOOD] >> Acceleration::TestAccelerationMirror3dcGetAsyncRead2Slow [GOOD] >> Acceleration::TestAcceleration4Plus2BlockGetAsyncRead2Slow >> test.py::test[produce-reduce_lambda_list_mem-default.txt-ForceBlocks] [GOOD] >> test.py::test[produce-reduce_lambda_list_mem-default.txt-Results] >> test.py::test[pg-tpcds-q67-default.txt-ForceBlocks] >> test.py::test[optimizers-group_visit_lambdas--ForceBlocks] [GOOD] >> test.py::test[optimizers-group_visit_lambdas--Results] |80.5%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/control/ut/ydb-core-control-ut |80.5%| [LD] {RESULT} $(B)/ydb/core/control/ut/ydb-core-control-ut |80.5%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/blobstorage/ut_blobstorage/ut_statestorage/core-blobstorage-ut_blobstorage-ut_statestorage |80.5%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/control/ut/ydb-core-control-ut |80.5%| [LD] {RESULT} $(B)/ydb/core/blobstorage/ut_blobstorage/ut_statestorage/core-blobstorage-ut_blobstorage-ut_statestorage >> TBsVDiskDefrag::Defrag50PercentGarbage [GOOD] >> TBsVDiskExtreme::Simple3Put1GetMissingKeyFresh |80.5%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/blobstorage/ut_blobstorage/ut_statestorage/core-blobstorage-ut_blobstorage-ut_statestorage >> CostMetricsGetBlock4Plus2::TestGet4Plus2BlockRequests2Inflight2BlobSize1000 [GOOD] >> CostMetricsGetBlock4Plus2::TestGet4Plus2BlockRequests10Inflight10BlobSize1000 >> test.py::test[join-full_trivial--ForceBlocks] [GOOD] >> test.py::test[join-full_trivial--Results] >> Deadlines::TestHardCollectGarbage4Plus2Block [GOOD] >> Acceleration::TestAcceleration4Plus2BlockGetAsyncRead2Slow [GOOD] >> Acceleration::TestThresholdPutMirror3dc1Slow >> test.py::test[insert-drop_sortness-calc-ForceBlocks] [GOOD] >> test.py::test[insert-drop_sortness-calc-Results] >> test.py::test[blocks-pg_top_sort--ForceBlocks] [GOOD] |80.5%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/ut_blobstorage/ut_vdisk_restart/unittest |80.5%| [TA] $(B)/ydb/core/blobstorage/ut_blobstorage/ut_read_only_pdisk/test-results/unittest/{meta.json ... results_accumulator.log} >> test.py::test[blocks-finalize_hashed_keys--ForceBlocks] [GOOD] >> test.py::test[blocks-finalize_hashed_keys--Results] >> test.py::test[blocks-pg_top_sort--Results] >> test.py::test[action-action_nested_query-default.txt-Results] >> test.py::test[produce-reduce_lambda_list_mem-default.txt-Results] [GOOD] >> test.py::test[select-exists_true-default.txt-Results] [GOOD] >> test.py::test[pg-select_limit-default.txt-Results] [GOOD] >> test.py::test[select-literal_bool-default.txt-ForceBlocks] >> test.py::test[join-selfjoin_on_sorted_with_filter-replicate-ForceBlocks] [GOOD] >> test.py::test[join-selfjoin_on_sorted_with_filter-replicate-Results] [SKIPPED] >> test.py::test[join-three_equalities_paren--ForceBlocks] >> test.py::test[pg-tpcds-q01-default.txt-ForceBlocks] >> test.py::test[key_filter-range_union_lower_excluded-default.txt-Results] [GOOD] >> test.py::test[key_filter-string_with_legacy--Results] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/ut_blobstorage/unittest >> Deadlines::TestHardCollectGarbage4Plus2Block [GOOD] Test command err: RandomSeed# 6991963598628638957 2025-09-25T16:15:51.375847Z 10 00h01m40.010512s :BS_PROXY_GET ERROR: [4fe793e8f8c36066] Response# TEvGetResult {Status# DEADLINE ResponseSz# 1 {[1:1:1:1:123:1000:0] DEADLINE Size# 0 RequestedSize# 1000} ErrorReason# "Deadline timer hit"} Marker# BPG29 2025-09-25T16:15:51.834442Z 9 00h01m40.010512s :BS_PROXY_GET ERROR: [840c687e3165801c] Response# TEvGetResult {Status# DEADLINE ResponseSz# 1 {[1:1:1:1:123:1000:0] DEADLINE Size# 0 RequestedSize# 1000} ErrorReason# "Deadline timer hit"} Marker# BPG29 2025-09-25T16:15:52.355114Z 9 00h01m40.010512s :BS_PROXY_GET ERROR: [871e8f08bc86e6ce] Response# TEvGetResult {Status# DEADLINE ResponseSz# 1 {[1:1:1:1:123:1000:0] DEADLINE Size# 0 RequestedSize# 1000} ErrorReason# "Deadline timer hit"} Marker# BPG29 2025-09-25T16:15:54.041529Z 10 00h01m40.010512s :BS_PROXY_DISCOVER ERROR: [6ecff229007b7d16] Status# DEADLINE Marker# DSPDM02 2025-09-25T16:15:54.657583Z 9 00h01m40.010512s :BS_PROXY_DISCOVER ERROR: [9836c3a20375a5c8] Result# TEvDiscoverResult {Status# DEADLINE BlockedGeneration# 0 Id# [0:0:0:0:0:0:0] Size# 0 MinGeneration# 1 ErrorReason# "Deadline timer hit"} Marker# BSD01 2025-09-25T16:15:55.177467Z 9 00h01m40.010512s :BS_PROXY_DISCOVER ERROR: [d4fe4d5193250bff] request failed Status# DEADLINE ErrorReason# FailedGroupDisks# [] Marker# DSPDX02 2025-09-25T16:15:56.041242Z 10 00h01m40.010512s :BS_PROXY_DISCOVER ERROR: [54320703deec1599] Status# DEADLINE Marker# DSPDM02 2025-09-25T16:15:56.883071Z 9 00h01m40.010512s :BS_PROXY_DISCOVER ERROR: [18905c07c5c7fbfe] Result# TEvDiscoverResult {Status# DEADLINE BlockedGeneration# 0 Id# [0:0:0:0:0:0:0] Size# 0 MinGeneration# 1 ErrorReason# "Deadline timer hit"} Marker# BSD01 2025-09-25T16:15:57.327526Z 9 00h01m40.010512s :BS_PROXY_DISCOVER ERROR: [ba17bd697b4e62d0] request failed Status# DEADLINE ErrorReason# FailedGroupDisks# [] Marker# DSPDX02 >> test.py::test[produce-reduce_multi_in_difftype_assume_keytuple--ForceBlocks] [SKIPPED] >> test.py::test[produce-reduce_multi_in_difftype_assume_keytuple--Results] >> test.py::test[produce-reduce_multi_in_difftype_assume_keytuple--Results] [SKIPPED] >> test.py::test[produce-reduce_multi_in_sampling-sorted-ForceBlocks] >> TBsVDiskExtreme::Simple3Put1GetMissingKeyFresh [GOOD] >> TBsVDiskExtreme::Simple3Put1GetMissingKeyCompaction >> Acceleration::TestThresholdPutMirror3dc1Slow [GOOD] >> Acceleration::TestThresholdPut4Plus2Block1Slow >> CostMetricsGetBlock4Plus2::TestGet4Plus2BlockRequests10Inflight10BlobSize1000 [GOOD] >> CostMetricsGetBlock4Plus2::TestGet4Plus2BlockRequests100Inflight10BlobSize1000 |80.5%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/ut_blobstorage/ut_vdisk_restart/unittest >> test.py::test[type_v3-ignore_v3_hint-protofield-ForceBlocks] [GOOD] >> test.py::test[type_v3-ignore_v3_hint-protofield-Results] >> test.py::test[pg-tpcds-q20-default.txt-Results] >> test.py::test[file-parse_file_in_select_as_int--ForceBlocks] [GOOD] >> test.py::test[file-parse_file_in_select_as_int--Results] >> test.py::test[insert-drop_sortness-calc-Results] [GOOD] >> test.py::test[insert-replace_inferred_op--ForceBlocks] |80.5%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/kqp/gateway/ut/ydb-core-kqp-gateway-ut |80.5%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/kqp/gateway/ut/ydb-core-kqp-gateway-ut |80.5%| [TA] {RESULT} $(B)/ydb/core/blobstorage/ut_blobstorage/ut_read_only_pdisk/test-results/unittest/{meta.json ... results_accumulator.log} |80.5%| [LD] {RESULT} $(B)/ydb/core/kqp/gateway/ut/ydb-core-kqp-gateway-ut >> test.py::test[type_v3-append_diff_flags--ForceBlocks] [GOOD] >> test.py::test[type_v3-append_diff_flags--Results] >> BridgeGet::PartRestorationAcrossBridgeOnDiscover [GOOD] >> test.py::test[join-order_of_qualified--ForceBlocks] [GOOD] >> test.py::test[join-order_of_qualified--Results] |80.5%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/ut_blobstorage/ut_vdisk_restart/unittest >> TBsVDiskExtreme::Simple3Put1GetMissingKeyCompaction [GOOD] >> test.py::test[blocks-pg_top_sort--Results] [GOOD] >> test.py::test[blocks-string_as_agg_key--ForceBlocks] >> test.py::test[produce-reduce_all_opt-default.txt-Results] [GOOD] >> test.py::test[produce-reduce_all_with_python_input_stream--Results] [SKIPPED] >> test.py::test[produce-reduce_multi_in--Results] >> test.py::test[join-full_trivial--Results] [GOOD] >> TBsLocalRecovery::WriteRestartReadHugeDecreased [GOOD] >> TBsOther1::PoisonPill >> test.py::test[optimizers-group_visit_lambdas--Results] [GOOD] >> test.py::test[optimizers-remove_keep_sorted_setting--ForceBlocks] >> test.py::test[hor_join-runtime_dep-default.txt-Results] [GOOD] >> test.py::test[join-full_trivial-off-ForceBlocks] >> test.py::test[join-lookupjoin_bug7646_csee-off-ForceBlocks] [GOOD] >> test.py::test[in-huge_in-default.txt-ForceBlocks] >> test.py::test[join-lookupjoin_bug7646_csee-off-Results] [SKIPPED] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/ut_blobstorage/ut_bridge/unittest >> BridgeGet::PartRestorationAcrossBridgeOnDiscover [GOOD] Test command err: RandomSeed# 10591863380823011921 readBody# 1 mask1# 0 mask2# 0 mask3# 0 *** performing bridge discover maxId#[0:0:0:0:0:0:0] readBody# 1 mask1# 0 mask2# 0 mask3# 1 *** performing bridge discover maxId#[100501:1:1:0:0:5:0] *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 readBody# 1 mask1# 0 mask2# 0 mask3# 2 *** performing bridge discover maxId#[100502:2:1:0:0:5:0] *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 readBody# 1 mask1# 0 mask2# 0 mask3# 3 *** performing bridge discover maxId#[100503:2:1:0:0:5:0] *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 readBody# 1 mask1# 0 mask2# 0 mask3# 4 *** performing bridge discover maxId#[100504:3:1:0:0:5:0] *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 readBody# 1 mask1# 0 mask2# 0 mask3# 5 *** performing bridge discover maxId#[100505:3:1:0:0:5:0] *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 readBody# 1 mask1# 0 mask2# 0 mask3# 6 *** performing bridge discover maxId#[100506:3:1:0:0:5:0] *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 readBody# 1 mask1# 0 mask2# 0 mask3# 7 *** performing bridge discover maxId#[100507:3:1:0:0:5:0] *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 readBody# 1 mask1# 0 mask2# 1 mask3# 0 *** performing bridge discover maxId#[100508:1:1:0:0:5:0] *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 readBody# 1 mask1# 0 mask2# 1 mask3# 1 *** performing bridge discover maxId#[100509:1:1:0:0:5:0] *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 readBody# 1 mask1# 0 mask2# 1 mask3# 2 *** performing bridge discover maxId#[100510:2:1:0:0:5:0] *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 readBody# 1 mask1# 0 mask2# 1 mask3# 3 *** performing bridge discover maxId#[100511:2:1:0:0:5:0] *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 readBody# 1 mask1# 0 mask2# 1 mask3# 4 *** performing bridge discover maxId#[100512:3:1:0:0:5:0] *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 readBody# 1 mask1# 0 mask2# 1 mask3# 5 *** performing bridge discover maxId#[100513:3:1:0:0:5:0] *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 readBody# 1 mask1# 0 mask2# 1 mask3# 6 *** performing bridge discover maxId#[100514:3:1:0:0:5:0] *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 readBody# 1 mask1# 0 mask2# 1 mask3# 7 *** performing bridge discover maxId#[100515:3:1:0:0:5:0] *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 readBody# 1 mask1# 0 mask2# 2 mask3# 0 *** performing bridge discover maxId#[100516:2:1:0:0:5:0] *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 readBody# 1 mask1# 0 mask2# 2 mask3# 1 *** performing bridge discover maxId#[100517:2:1:0:0:5:0] *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 readBody# 1 mask1# 0 mask2# 2 mask3# 2 *** performing bridge discover maxId#[100518:2:1:0:0:5:0] *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 readBody# 1 mask1# 0 mask2# 2 mask3# 3 *** performing bridge discover maxId#[100519:2:1:0:0:5:0] *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 readBody# 1 mask1# 0 mask2# 2 mask3# 4 *** performing bridge discover maxId#[100520:3:1:0:0:5:0] *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 readBody# 1 mask1# 0 mask2# 2 mask3# 5 *** performing bridge discover maxId#[100521:3:1:0:0:5:0] *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 readBody# 1 mask1# 0 mask2# 2 mask3# 6 *** performing bridge discover maxId#[100522:3:1:0:0:5:0] *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 readBody# 1 mask1# 0 mask2# 2 mask3# 7 *** performing bridge discover maxId#[100523:3:1:0:0:5:0] *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 readBody# 1 mask1# 0 mask2# 3 mask3# 0 *** performing bridge discover maxId#[100524:2:1:0:0:5:0] *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 readBody# 1 mask1# 0 mask2# 3 mask3# 1 *** performing bridge discover maxId#[100525:2:1:0:0:5:0] *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 readBody# 1 mask1# 0 mask2# 3 mask3# 2 *** performing bridge discover maxId#[100526:2:1:0:0:5:0] *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 readBody# 1 mask1# 0 mask2# 3 mask3# 3 *** performing bridge discover maxId#[100527:2:1:0:0:5:0] *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 readBody# 1 mask1# 0 mask2# 3 mask3# 4 *** performing bridge discover maxId#[100528:3:1:0:0:5:0] *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 readBody# 1 mask1# 0 mask2# 3 mask3# 5 *** performing bridge discover maxId#[100529:3:1:0:0:5:0] *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 readBody# 1 mask1# 0 mask2# 3 mask3# 6 *** performing bridge discover maxId#[100530:3:1:0:0:5:0] *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 readBody# 1 mask1# 0 mask2# 3 mask3# 7 *** performing bridge discover maxId#[100531:3:1:0:0:5:0] *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 readBody# 1 mask1# 0 mask2# 4 mask3# 0 *** performing bridge discover maxId#[100532:3:1:0:0:5:0] *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 readBody# 1 mask1# 0 mask2# 4 mask3# 1 *** performing bridge discover maxId#[100533:3:1:0:0:5:0] *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 readBody# 1 mask1# 0 mask2# 4 mask3# 2 *** performing bridge discover maxId#[100534:3:1:0:0:5:0] *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 readBody# 1 mask1# 0 mask2# 4 mask3# 3 *** performing bridge discover maxId#[100535:3:1:0:0:5:0] *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 readBody# 1 mask1# 0 mask2# 4 mask3# 4 *** performing bridge discover maxId#[100536:3:1:0:0:5:0] *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 readBody# 1 mask1# 0 mask2# 4 mask3# 5 *** performing bridge discover maxId#[100537:3:1:0:0:5:0] *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 readBody# 1 mask1# 0 mask2# 4 mask3# 6 *** performing bridge discover maxId#[100538:3:1:0:0:5:0] *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 readBody# 1 mask1# 0 mask2# 4 mask3# 7 *** performing bridge discover maxId#[100539:3:1:0:0:5:0] *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 readBody# 1 mask1# 0 mask2# 5 mask3# 0 *** performing bridge discover maxId#[100540:3:1:0:0:5:0] *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 readBody# 1 mask1# 0 mask2# 5 mask3# 1 *** performing bridge discover maxId#[100541:3:1:0:0:5:0] *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 readBody# 1 mask1# 0 mask2# 5 mask3# 2 *** performing bridge discover maxId#[100542:3:1:0:0:5:0] *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 readBody# 1 mask1# 0 mask2# 5 mask3# 3 *** performing bridge discover maxId#[100543:3:1:0:0:5:0] *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 readBody# 1 mask1# 0 mask2# 5 mask3# 4 *** performing bridge discover maxId#[100544:3:1:0:0:5:0] *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 readBody# 1 mask1# 0 mask2# 5 mask3# 5 *** performing bridge discover maxId#[100545:3:1:0:0:5:0] *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 readBody# 1 mask1# 0 mask2# 5 mask3# 6 *** performing bridge discover maxId#[100546:3:1:0:0:5:0] *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 readBody# 1 mask1# 0 mask2# 5 mask3# 7 *** performing bridge discover maxId#[100547:3:1:0:0:5:0] *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 readBody# 1 mask1# 0 mask2# 6 mask3# 0 *** performing bridge discover maxId#[100548:3:1:0:0:5:0] *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 readBody# 1 mask1# 0 mask2# 6 mask3# 1 *** performing bridge discover maxId#[100549:3:1:0:0:5:0] *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 readBody# 1 mask1# 0 mask2# 6 mask3# 2 *** performing bridge discover maxId#[100550:3:1:0:0:5:0] *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 readBody# 1 mask1# 0 mask2# 6 mask3# 3 *** performing bridge discover maxId#[100551:3:1:0:0:5:0] *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 readBody# 1 mask1# 0 mask2# 6 mask3# 4 *** performing bridge discover maxId#[100552:3:1:0:0:5:0] *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 readBody# 1 mask1# 0 mask2# 6 mask3# 5 *** performing bridge discover maxId#[100553:3:1:0:0:5:0] *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 readBody# 1 mask1# 0 mask2# 6 mask3# 6 *** performing bridge discover maxId#[100554:3:1:0:0:5:0] *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 readBody# 1 mask1# 0 mask2# 6 mask3# 7 *** performing bridge discover maxId#[100555:3:1:0:0:5:0] *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 readBody# 1 mask1# 0 mask2# 7 mask3# 0 *** performing bridge discover maxId#[100556:3:1:0:0:5:0] *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 readBody# 1 mask1# 0 mask2# 7 mask3# 1 *** performing bridge discover maxId#[100557:3:1:0:0:5:0] *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 readBody# 1 mask1# 0 mask2# 7 mask3# 2 *** performing bridge discover maxId#[100558:3:1:0:0:5:0] *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 readBody# 1 mask1# 0 mask2# 7 mask3# 3 *** performing bridge discover maxId#[100559:3:1:0:0:5:0] *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 readBody# 1 mask1# 0 mask2# 7 mask3# 4 *** performing bridge discover maxId#[100560:3:1:0:0:5:0] *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 readBody# 1 mask1# 0 mask2# 7 mask3# 5 *** performing bridge discover maxId#[100561:3:1:0:0:5:0] *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 readBody# 1 mask1# 0 mask2# 7 mask3# 6 *** performing bridge discover maxId#[100562:3:1:0:0:5:0] *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 readBody# 1 mask1# 0 mask2# 7 mask3# 7 *** performing bridge discover maxId#[100563:3:1:0:0:5:0] *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 readBody# 1 mask1# 1 mask2# 0 mask3# 0 *** performing bridge discover maxId#[100564:1:1:0:0:5:0] *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 readBody# 1 mask1# 1 mask2# 0 mask3# 1 *** performing bridge discover maxId#[100565:1:1:0:0:5:0] *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 readBody# 1 mask1# 1 mask2# 0 mask3# 2 *** performing bridge discover maxId#[100566:2:1:0:0:5:0] *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 readBody# 1 mask1# 1 mask2# 0 mask3# 3 *** performing bridge discover maxId#[100567:2:1:0:0:5:0] *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 readBody# 1 mask1# 1 mask2# 0 mask3# 4 *** performing bridge discover maxId#[100568:3:1:0:0:5:0] *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 readBody# 1 mask1# 1 mask2# 0 mask3# 5 *** performing bridge discover maxId#[100569:3:1:0:0:5:0] *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 readBody# 1 mask1# 1 mask2# 0 mask3# 6 *** performing bridge discover maxId#[100570:3:1:0:0:5:0] *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 readBody# 1 mask1# 1 mask2# 0 mask3# 7 *** performing bridge discover maxId#[100571:3:1:0:0:5:0] *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 readBody# 1 mask1# 1 mask2# 1 mask3# 0 *** performing bridge discover maxId#[100572:1:1:0:0:5:0] *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 readBody# 1 mask1# 1 mask2# 1 mask3# 1 *** performing bridge discover maxId#[100573:1:1:0:0:5:0] *** reading from i# 0 *** reading from i# ... iscover maxId#[101450:3:1:0:0:5:0] *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 readBody# 0 mask1# 6 mask2# 6 mask3# 7 *** performing bridge discover maxId#[101451:3:1:0:0:5:0] *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 readBody# 0 mask1# 6 mask2# 7 mask3# 0 *** performing bridge discover maxId#[101452:3:1:0:0:5:0] *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 readBody# 0 mask1# 6 mask2# 7 mask3# 1 *** performing bridge discover maxId#[101453:3:1:0:0:5:0] *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 readBody# 0 mask1# 6 mask2# 7 mask3# 2 *** performing bridge discover maxId#[101454:3:1:0:0:5:0] *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 readBody# 0 mask1# 6 mask2# 7 mask3# 3 *** performing bridge discover maxId#[101455:3:1:0:0:5:0] *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 readBody# 0 mask1# 6 mask2# 7 mask3# 4 *** performing bridge discover maxId#[101456:3:1:0:0:5:0] *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 readBody# 0 mask1# 6 mask2# 7 mask3# 5 *** performing bridge discover maxId#[101457:3:1:0:0:5:0] *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 readBody# 0 mask1# 6 mask2# 7 mask3# 6 *** performing bridge discover maxId#[101458:3:1:0:0:5:0] *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 readBody# 0 mask1# 6 mask2# 7 mask3# 7 *** performing bridge discover maxId#[101459:3:1:0:0:5:0] *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 readBody# 0 mask1# 7 mask2# 0 mask3# 0 *** performing bridge discover maxId#[101460:3:1:0:0:5:0] *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 readBody# 0 mask1# 7 mask2# 0 mask3# 1 *** performing bridge discover maxId#[101461:3:1:0:0:5:0] *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 readBody# 0 mask1# 7 mask2# 0 mask3# 2 *** performing bridge discover maxId#[101462:3:1:0:0:5:0] *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 readBody# 0 mask1# 7 mask2# 0 mask3# 3 *** performing bridge discover maxId#[101463:3:1:0:0:5:0] *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 readBody# 0 mask1# 7 mask2# 0 mask3# 4 *** performing bridge discover maxId#[101464:3:1:0:0:5:0] *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 readBody# 0 mask1# 7 mask2# 0 mask3# 5 *** performing bridge discover maxId#[101465:3:1:0:0:5:0] *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 readBody# 0 mask1# 7 mask2# 0 mask3# 6 *** performing bridge discover maxId#[101466:3:1:0:0:5:0] *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 readBody# 0 mask1# 7 mask2# 0 mask3# 7 *** performing bridge discover maxId#[101467:3:1:0:0:5:0] *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 readBody# 0 mask1# 7 mask2# 1 mask3# 0 *** performing bridge discover maxId#[101468:3:1:0:0:5:0] *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 readBody# 0 mask1# 7 mask2# 1 mask3# 1 *** performing bridge discover maxId#[101469:3:1:0:0:5:0] *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 readBody# 0 mask1# 7 mask2# 1 mask3# 2 *** performing bridge discover maxId#[101470:3:1:0:0:5:0] *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 readBody# 0 mask1# 7 mask2# 1 mask3# 3 *** performing bridge discover maxId#[101471:3:1:0:0:5:0] *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 readBody# 0 mask1# 7 mask2# 1 mask3# 4 *** performing bridge discover maxId#[101472:3:1:0:0:5:0] *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 readBody# 0 mask1# 7 mask2# 1 mask3# 5 *** performing bridge discover maxId#[101473:3:1:0:0:5:0] *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 readBody# 0 mask1# 7 mask2# 1 mask3# 6 *** performing bridge discover maxId#[101474:3:1:0:0:5:0] *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 readBody# 0 mask1# 7 mask2# 1 mask3# 7 *** performing bridge discover maxId#[101475:3:1:0:0:5:0] *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 readBody# 0 mask1# 7 mask2# 2 mask3# 0 *** performing bridge discover maxId#[101476:3:1:0:0:5:0] *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 readBody# 0 mask1# 7 mask2# 2 mask3# 1 *** performing bridge discover maxId#[101477:3:1:0:0:5:0] *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 readBody# 0 mask1# 7 mask2# 2 mask3# 2 *** performing bridge discover maxId#[101478:3:1:0:0:5:0] *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 readBody# 0 mask1# 7 mask2# 2 mask3# 3 *** performing bridge discover maxId#[101479:3:1:0:0:5:0] *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 readBody# 0 mask1# 7 mask2# 2 mask3# 4 *** performing bridge discover maxId#[101480:3:1:0:0:5:0] *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 readBody# 0 mask1# 7 mask2# 2 mask3# 5 *** performing bridge discover maxId#[101481:3:1:0:0:5:0] *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 readBody# 0 mask1# 7 mask2# 2 mask3# 6 *** performing bridge discover maxId#[101482:3:1:0:0:5:0] *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 readBody# 0 mask1# 7 mask2# 2 mask3# 7 *** performing bridge discover maxId#[101483:3:1:0:0:5:0] *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 readBody# 0 mask1# 7 mask2# 3 mask3# 0 *** performing bridge discover maxId#[101484:3:1:0:0:5:0] *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 readBody# 0 mask1# 7 mask2# 3 mask3# 1 *** performing bridge discover maxId#[101485:3:1:0:0:5:0] *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 readBody# 0 mask1# 7 mask2# 3 mask3# 2 *** performing bridge discover maxId#[101486:3:1:0:0:5:0] *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 readBody# 0 mask1# 7 mask2# 3 mask3# 3 *** performing bridge discover maxId#[101487:3:1:0:0:5:0] *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 readBody# 0 mask1# 7 mask2# 3 mask3# 4 *** performing bridge discover maxId#[101488:3:1:0:0:5:0] *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 readBody# 0 mask1# 7 mask2# 3 mask3# 5 *** performing bridge discover maxId#[101489:3:1:0:0:5:0] *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 readBody# 0 mask1# 7 mask2# 3 mask3# 6 *** performing bridge discover maxId#[101490:3:1:0:0:5:0] *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 readBody# 0 mask1# 7 mask2# 3 mask3# 7 *** performing bridge discover maxId#[101491:3:1:0:0:5:0] *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 readBody# 0 mask1# 7 mask2# 4 mask3# 0 *** performing bridge discover maxId#[101492:3:1:0:0:5:0] *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 readBody# 0 mask1# 7 mask2# 4 mask3# 1 *** performing bridge discover maxId#[101493:3:1:0:0:5:0] *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 readBody# 0 mask1# 7 mask2# 4 mask3# 2 *** performing bridge discover maxId#[101494:3:1:0:0:5:0] *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 readBody# 0 mask1# 7 mask2# 4 mask3# 3 *** performing bridge discover maxId#[101495:3:1:0:0:5:0] *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 readBody# 0 mask1# 7 mask2# 4 mask3# 4 *** performing bridge discover maxId#[101496:3:1:0:0:5:0] *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 readBody# 0 mask1# 7 mask2# 4 mask3# 5 *** performing bridge discover maxId#[101497:3:1:0:0:5:0] *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 readBody# 0 mask1# 7 mask2# 4 mask3# 6 *** performing bridge discover maxId#[101498:3:1:0:0:5:0] *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 readBody# 0 mask1# 7 mask2# 4 mask3# 7 *** performing bridge discover maxId#[101499:3:1:0:0:5:0] *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 readBody# 0 mask1# 7 mask2# 5 mask3# 0 *** performing bridge discover maxId#[101500:3:1:0:0:5:0] *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 readBody# 0 mask1# 7 mask2# 5 mask3# 1 *** performing bridge discover maxId#[101501:3:1:0:0:5:0] *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 readBody# 0 mask1# 7 mask2# 5 mask3# 2 *** performing bridge discover maxId#[101502:3:1:0:0:5:0] *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 readBody# 0 mask1# 7 mask2# 5 mask3# 3 *** performing bridge discover maxId#[101503:3:1:0:0:5:0] *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 readBody# 0 mask1# 7 mask2# 5 mask3# 4 *** performing bridge discover maxId#[101504:3:1:0:0:5:0] *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 readBody# 0 mask1# 7 mask2# 5 mask3# 5 *** performing bridge discover maxId#[101505:3:1:0:0:5:0] *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 readBody# 0 mask1# 7 mask2# 5 mask3# 6 *** performing bridge discover maxId#[101506:3:1:0:0:5:0] *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 readBody# 0 mask1# 7 mask2# 5 mask3# 7 *** performing bridge discover maxId#[101507:3:1:0:0:5:0] *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 readBody# 0 mask1# 7 mask2# 6 mask3# 0 *** performing bridge discover maxId#[101508:3:1:0:0:5:0] *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 readBody# 0 mask1# 7 mask2# 6 mask3# 1 *** performing bridge discover maxId#[101509:3:1:0:0:5:0] *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 readBody# 0 mask1# 7 mask2# 6 mask3# 2 *** performing bridge discover maxId#[101510:3:1:0:0:5:0] *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 readBody# 0 mask1# 7 mask2# 6 mask3# 3 *** performing bridge discover maxId#[101511:3:1:0:0:5:0] *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 readBody# 0 mask1# 7 mask2# 6 mask3# 4 *** performing bridge discover maxId#[101512:3:1:0:0:5:0] *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 readBody# 0 mask1# 7 mask2# 6 mask3# 5 *** performing bridge discover maxId#[101513:3:1:0:0:5:0] *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 readBody# 0 mask1# 7 mask2# 6 mask3# 6 *** performing bridge discover maxId#[101514:3:1:0:0:5:0] *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 readBody# 0 mask1# 7 mask2# 6 mask3# 7 *** performing bridge discover maxId#[101515:3:1:0:0:5:0] *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 readBody# 0 mask1# 7 mask2# 7 mask3# 0 *** performing bridge discover maxId#[101516:3:1:0:0:5:0] *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 readBody# 0 mask1# 7 mask2# 7 mask3# 1 *** performing bridge discover maxId#[101517:3:1:0:0:5:0] *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 readBody# 0 mask1# 7 mask2# 7 mask3# 2 *** performing bridge discover maxId#[101518:3:1:0:0:5:0] *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 readBody# 0 mask1# 7 mask2# 7 mask3# 3 *** performing bridge discover maxId#[101519:3:1:0:0:5:0] *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 readBody# 0 mask1# 7 mask2# 7 mask3# 4 *** performing bridge discover maxId#[101520:3:1:0:0:5:0] *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 readBody# 0 mask1# 7 mask2# 7 mask3# 5 *** performing bridge discover maxId#[101521:3:1:0:0:5:0] *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 readBody# 0 mask1# 7 mask2# 7 mask3# 6 *** performing bridge discover maxId#[101522:3:1:0:0:5:0] *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 readBody# 0 mask1# 7 mask2# 7 mask3# 7 *** performing bridge discover maxId#[101523:3:1:0:0:5:0] *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 >> VDiskBalancing::TestDontSendToReadOnlyTest_Block42 >> test.py::test[join-lookupjoin_inner_2o--ForceBlocks] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/ut_vdisk/unittest >> TBsVDiskExtreme::Simple3Put1GetMissingKeyCompaction [GOOD] Test command err: 2025-09-25T16:15:27.409000Z :BS_VDISK_PUT ERROR: blobstorage_skeleton.cpp:568: PDiskId# 1 VDISK[0:_:0:0:0]: (0) TEvVPut: TabletID cannot be empty; id# [0:1:10:0:0:10:1] Marker# BSVS43 2025-09-25T16:15:28.993683Z :BS_VDISK_OTHER ERROR: vdisk_context.h:143: PDiskId# 1 VDISK[0:_:0:0:0]: (0) CheckPDiskResponse: Recoverable error from PDisk: Status# 'CORRUPTED' StatusFlags# 'None' ErrorReason# 'PDiskId# 1 TEvLog error because PDisk State# Error, there is a terminal internal error in PDisk. Did you check EvYardInit result? Marker# BSY07 StateErrorReason# PDisk is in StateError, reason# Received TEvYardControl::Brake' 2025-09-25T16:15:28.993718Z :BS_SKELETON ERROR: blobstorage_skeletonfront.cpp:1750: PDiskId# 1 VDISK[0:_:0:0:0]: (0) SkeletonFront: got TEvPDiskErrorStateChange;State# NoWrites, PDiskError# PDiskId# 1 TEvLog error because PDisk State# Error, there is a terminal internal error in PDisk. Did you check EvYardInit result? Marker# BSY07 StateErrorReason# PDisk is in StateError, reason# Received TEvYardControl::Brake Marker# BSVSF03 >> test.py::test[join-anyjoin_common_nodata_keys-off-ForceBlocks] [GOOD] >> test.py::test[blocks-finalize_hashed_keys--Results] [GOOD] >> test.py::test[blocks-pg_call--ForceBlocks] >> test.py::test[join-anyjoin_common_nodata_keys-off-Results] [SKIPPED] >> test.py::test[join-equi_join_three_simple--ForceBlocks] >> VDiskBalancing::TestStopOneNode_Mirror3dc_HugeBlob >> test.py::test[type_v3-ignore_v3_hint-protofield-Results] [GOOD] >> test.py::test[aggregate-group_by_rollup_duo_opt--Results] [GOOD] >> test.py::test[aggregate-group_by_session_compact--ForceBlocks] >> test.py::test[view-init_view_after_eval-default.txt-ForceBlocks] >> test.py::test[seq_mode-action_shared_subquery_expr_after_commit-default.txt-ForceBlocks] [GOOD] >> test.py::test[seq_mode-action_shared_subquery_expr_after_commit-default.txt-Results] >> VDiskBalancing::TestDontSendToReadOnlyTest_Block42 [GOOD] >> test.py::test[file-parse_file_in_select_as_int--Results] [GOOD] >> test.py::test[flatten_by-flatten_and_where--ForceBlocks] >> TBsVDiskRepl3::ReplPerf [GOOD] >> CostMetricsGetBlock4Plus2::TestGet4Plus2BlockRequests100Inflight10BlobSize1000 [GOOD] >> CostMetricsGetBlock4Plus2::TestGet4Plus2BlockRequests10000Inflight1000BlobSize1000 >> test.py::test[type_v3-append_diff_flags--Results] [GOOD] >> test.py::test[union_all-union_all_subexpr-default.txt-ForceBlocks] >> TBlobStorageGroupInfoBlobMapTest::BelongsToSubgroupBenchmark [GOOD] >> TBlobStorageGroupInfoBlobMapTest::BasicChecks >> VDiskBalancing::TestStopOneNode_Block42 ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/ut_blobstorage/ut_balancing/unittest >> VDiskBalancing::TestDontSendToReadOnlyTest_Block42 [GOOD] Test command err: RandomSeed# 10224190870719164810 SEND TEvPut with key [1:1:1:0:0:100:0] TEvPutResult: TEvPutResult {Id# [1:1:1:0:0:100:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999988} Setting VDisk read-only to 1 for position 0 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:0:0] SEND TEvPut with key [1:1:2:0:0:100:0] 2025-09-25T16:16:14.964530Z 1 00h01m30.044295s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) Unavailable in read-only Sender# [1:6358:837] TEvPutResult: TEvPutResult {Id# [1:1:2:0:0:100:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999988} Setting VDisk read-only to 0 for position 0 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:0:0] Start compaction Finish compaction >> VDiskBalancing::TestRandom_Block42 >> test.py::test[pg-tpcds-q17-default.txt-ForceBlocks] [GOOD] >> test.py::test[action-action_nested_query-default.txt-Results] [GOOD] >> test.py::test[pg-tpcds-q17-default.txt-Results] |80.5%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/tx/scheme_board/ut_monitoring/ydb-core-tx-scheme_board-ut_monitoring |80.5%| [LD] {RESULT} $(B)/ydb/core/tx/scheme_board/ut_monitoring/ydb-core-tx-scheme_board-ut_monitoring |80.5%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/scheme_board/ut_monitoring/ydb-core-tx-scheme_board-ut_monitoring >> test.py::test[action-dep_world_action_quote-default.txt-Results] >> TBlobStorageGroupInfoBlobMapTest::BasicChecks [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/ut_vdisk/unittest >> TBsVDiskRepl3::ReplPerf [GOOD] Test command err: 2025-09-25T16:15:43.385350Z :BS_SYNCER ERROR: guid_recovery.cpp:714: PDiskId# 4 VDISK[0:_:0:1:1]: (0) TVDiskGuidRecoveryActor: DECISION: [Decision# LostData SubsequentFailure# 0] 2025-09-25T16:15:43.448812Z :BS_SYNCER ERROR: guid_recovery.cpp:767: PDiskId# 4 VDISK[0:_:0:1:1]: (0) TVDiskGuidRecoveryActor: FINISH: [Decision# LostData Guid# 15903132205936726835] 2025-09-25T16:15:43.469046Z :BS_SYNCER ERROR: blobstorage_osiris.cpp:203: PDiskId# 4 VDISK[0:_:0:1:1]: (0) THullOsirisActor: FINISH: BlobsResurrected# 0 PartsResurrected# 0 2025-09-25T16:15:52.034983Z :BS_SYNCER ERROR: guid_recovery.cpp:714: PDiskId# 4 VDISK[0:_:0:3:0]: (0) TVDiskGuidRecoveryActor: DECISION: [Decision# LostData SubsequentFailure# 0] 2025-09-25T16:15:52.136999Z :BS_SYNCER ERROR: guid_recovery.cpp:767: PDiskId# 4 VDISK[0:_:0:3:0]: (0) TVDiskGuidRecoveryActor: FINISH: [Decision# LostData Guid# 1899007055748209806] 2025-09-25T16:15:53.200003Z :BS_SYNCER ERROR: blobstorage_osiris.cpp:203: PDiskId# 4 VDISK[0:_:0:3:0]: (0) THullOsirisActor: FINISH: BlobsResurrected# 0 PartsResurrected# 0 2025-09-25T16:16:11.325119Z :BS_SYNCER ERROR: guid_recovery.cpp:714: PDiskId# 4 VDISK[0:_:0:1:1]: (0) TVDiskGuidRecoveryActor: DECISION: [Decision# LostData SubsequentFailure# 0] 2025-09-25T16:16:11.500954Z :BS_SYNCER ERROR: guid_recovery.cpp:767: PDiskId# 4 VDISK[0:_:0:1:1]: (0) TVDiskGuidRecoveryActor: FINISH: [Decision# LostData Guid# 18357951188223193486] 2025-09-25T16:16:11.566050Z :BS_SYNCER ERROR: blobstorage_osiris.cpp:203: PDiskId# 4 VDISK[0:_:0:1:1]: (0) THullOsirisActor: FINISH: BlobsResurrected# 0 PartsResurrected# 0 >> VDiskBalancing::TestStopOneNode_Block42 [GOOD] >> test.py::test[join-order_of_qualified--Results] [GOOD] >> test.py::test[join-premap_common_inner_both_sides--ForceBlocks] >> Acceleration::TestThresholdPut4Plus2Block1Slow [GOOD] >> Acceleration::TestThresholdPut4Plus2Block2Slow |80.5%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/tx/schemeshard/ut_external_data_source/ydb-core-tx-schemeshard-ut_external_data_source |80.5%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_external_data_source/ydb-core-tx-schemeshard-ut_external_data_source |80.5%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_external_data_source/ydb-core-tx-schemeshard-ut_external_data_source >> VDiskBalancing::TestStopOneNode_Mirror3dc_HugeBlob [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/groupinfo/ut/unittest >> TBlobStorageGroupInfoBlobMapTest::BasicChecks [GOOD] Test command err: None domains 1 new (ns): 154.2178864 None domains 1 old (ns): 109.6627466 None domains 9 new (ns): 127.3092609 None domains 9 old (ns): 94.69315507 Mirror3 domains 4 new (ns): 113.5589249 Mirror3 domains 4 old (ns): 49.0727212 Mirror3 domains 9 new (ns): 66.67328096 Mirror3 domains 9 old (ns): 45.89024683 4Plus2Block domains 8 new (ns): 49.44670661 4Plus2Block domains 8 old (ns): 73.86129848 4Plus2Block domains 9 new (ns): 103.4102575 4Plus2Block domains 9 old (ns): 86.00969584 ErasureMirror3of4 domains 8 new (ns): 117.8818983 ErasureMirror3of4 domains 8 old (ns): 62.27785184 ErasureMirror3of4 domains 9 new (ns): 87.02926571 ErasureMirror3of4 domains 9 old (ns): 65.15158328 ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/ut_blobstorage/ut_balancing/unittest >> VDiskBalancing::TestStopOneNode_Block42 [GOOD] Test command err: RandomSeed# 9601026413474800815 SEND TEvPut with key [1:1:1:0:0:100:0] TEvPutResult: TEvPutResult {Id# [1:1:1:0:0:100:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999988} SEND TEvPut with key [1:1:2:0:0:100:0] 2025-09-25T16:16:16.691365Z 3 00h01m00.010512s :BS_NODE ERROR: {NW42@node_warden_pipe.cpp:60} Handle(TEvTabletPipe::TEvClientDestroyed) ClientId# [3:194:17] ServerId# [1:304:64] TabletId# 72057594037932033 PipeClientId# [3:194:17] 2025-09-25T16:16:16.691426Z 8 00h01m00.010512s :BS_NODE ERROR: {NW42@node_warden_pipe.cpp:60} Handle(TEvTabletPipe::TEvClientDestroyed) ClientId# [8:229:17] ServerId# [1:309:69] TabletId# 72057594037932033 PipeClientId# [8:229:17] 2025-09-25T16:16:16.691449Z 6 00h01m00.010512s :BS_NODE ERROR: {NW42@node_warden_pipe.cpp:60} Handle(TEvTabletPipe::TEvClientDestroyed) ClientId# [6:215:17] ServerId# [1:307:67] TabletId# 72057594037932033 PipeClientId# [6:215:17] 2025-09-25T16:16:16.691467Z 5 00h01m00.010512s :BS_NODE ERROR: {NW42@node_warden_pipe.cpp:60} Handle(TEvTabletPipe::TEvClientDestroyed) ClientId# [5:208:17] ServerId# [1:306:66] TabletId# 72057594037932033 PipeClientId# [5:208:17] 2025-09-25T16:16:16.691493Z 4 00h01m00.010512s :BS_NODE ERROR: {NW42@node_warden_pipe.cpp:60} Handle(TEvTabletPipe::TEvClientDestroyed) ClientId# [4:201:17] ServerId# [1:305:65] TabletId# 72057594037932033 PipeClientId# [4:201:17] 2025-09-25T16:16:16.691521Z 2 00h01m00.010512s :BS_NODE ERROR: {NW42@node_warden_pipe.cpp:60} Handle(TEvTabletPipe::TEvClientDestroyed) ClientId# [2:187:17] ServerId# [1:303:63] TabletId# 72057594037932033 PipeClientId# [2:187:17] 2025-09-25T16:16:16.691541Z 7 00h01m00.010512s :BS_NODE ERROR: {NW42@node_warden_pipe.cpp:60} Handle(TEvTabletPipe::TEvClientDestroyed) ClientId# [7:222:17] ServerId# [1:308:68] TabletId# 72057594037932033 PipeClientId# [7:222:17] TEvPutResult: TEvPutResult {Id# [1:1:2:0:0:100:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999988} Start compaction Finish compaction >> TSubgroupPartLayoutTest::CountEffectiveReplicas4of4 [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/ut_blobstorage/ut_balancing/unittest >> VDiskBalancing::TestStopOneNode_Mirror3dc_HugeBlob [GOOD] Test command err: RandomSeed# 9239245014016734957 SEND TEvPut with key [1:1:1:0:0:533504:0] TEvPutResult: TEvPutResult {Id# [1:1:1:0:0:533504:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999988} SEND TEvPut with key [1:1:2:0:0:533504:0] 2025-09-25T16:16:15.633273Z 1 00h01m00.010512s :PIPE_SERVER ERROR: [72057594037932033] NodeDisconnected NodeId# 2 TEvPutResult: TEvPutResult {Id# [1:1:2:0:0:533504:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999988} Start compaction Finish compaction ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/groupinfo/ut/unittest >> TSubgroupPartLayoutTest::CountEffectiveReplicas4of4 [GOOD] Test command err: testing erasure block-3-1 main# 0 main# 1 main# 2 main# 3 main# 4 main# 5 main# 6 main# 7 main# 8 main# 9 main# 10 main# 11 main# 12 main# 13 main# 14 main# 15 Checked 256 cases, took 21 us testing erasure stripe-4-2 main# 0 main# 1 main# 2 main# 3 main# 4 main# 5 main# 6 main# 7 main# 8 main# 9 main# 10 main# 11 main# 12 main# 13 main# 14 main# 15 main# 16 main# 17 main# 18 main# 19 main# 20 main# 21 main# 22 main# 23 main# 24 main# 25 main# 26 main# 27 main# 28 main# 29 main# 30 main# 31 main# 32 main# 33 main# 34 main# 35 main# 36 main# 37 main# 38 main# 39 main# 40 main# 41 main# 42 main# 43 main# 44 main# 45 main# 46 main# 47 main# 48 main# 49 main# 50 main# 51 main# 52 main# 53 main# 54 main# 55 main# 56 main# 57 main# 58 main# 59 main# 60 main# 61 main# 62 main# 63 Checked 262144 cases, took 273543 us testing erasure block-2-3 main# 0 main# 1 main# 2 main# 3 main# 4 main# 5 main# 6 main# 7 main# 8 main# 9 main# 10 main# 11 main# 12 main# 13 main# 14 main# 15 main# 16 main# 17 main# 18 main# 19 main# 20 main# 21 main# 22 main# 23 main# 24 main# 25 main# 26 main# 27 main# 28 main# 29 main# 30 main# 31 Checked 1048576 cases, took 588218 us testing erasure stripe-3-1 main# 0 main# 1 main# 2 main# 3 main# 4 main# 5 main# 6 main# 7 main# 8 main# 9 main# 10 main# 11 main# 12 main# 13 main# 14 main# 15 Checked 256 cases, took 50 us testing erasure stripe-3-2 main# 0 main# 1 main# 2 main# 3 main# 4 main# 5 main# 6 main# 7 main# 8 main# 9 main# 10 main# 11 main# 12 main# 13 main# 14 main# 15 main# 16 main# 17 main# 18 main# 19 main# 20 main# 21 main# 22 main# 23 main# 24 main# 25 main# 26 main# 27 main# 28 main# 29 main# 30 main# 31 Checked 32768 cases, took 6500 us testing erasure stripe-2-3 main# 0 main# 1 main# 2 main# 3 main# 4 main# 5 main# 6 main# 7 main# 8 main# 9 main# 10 main# 11 main# 12 main# 13 main# 14 main# 15 main# 16 main# 17 main# 18 main# 19 main# 20 main# 21 main# 22 main# 23 main# 24 main# 25 main# 26 main# 27 main# 28 main# 29 main# 30 main# 31 Checked 1048576 cases, took 645948 us >> test.py::test[aggregate-group_by_rollup_udf--ForceBlocks] [GOOD] >> test.py::test[aggregate-group_by_rollup_udf--Results] >> test.py::test[column_order-insert_tmp-default.txt-ForceBlocks] [GOOD] >> test.py::test[column_order-insert_tmp-default.txt-Results] >> test.py::test[select-literal_bool-default.txt-ForceBlocks] [GOOD] >> test.py::test[select-literal_bool-default.txt-Results] >> test.py::test[join-three_equalities_paren--ForceBlocks] [GOOD] >> test.py::test[join-three_equalities_paren--Results] >> TStateStorage2RingGroups::TestStateStorageReplyOnce >> BlobPatching::PatchBlock42 [GOOD] >> BlobStorageBlockRace::Test |80.6%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/ut_blobstorage/ut_donor/unittest >> TStateStorage2RingGroups::TestStateStorageReplyOnce [GOOD] >> BlobStorageBlockRace::Test [GOOD] >> BlobStorageBlockRace::BlocksRacingViaSyncLog >> test.py::test[select-from_in_front_sub-default.txt-Results] [GOOD] >> test.py::test[select-hits_count--Results] >> BlobStorageBlockRace::BlocksRacingViaSyncLog [GOOD] >> BlobStorageBlockRace::BlocksRacingViaSyncLog2 >> test.py::test[seq_mode-action_shared_subquery_expr_after_commit-default.txt-Results] [GOOD] >> test.py::test[simple_columns-simple_columns_join_coalesce_all_1-default.txt-ForceBlocks] >> TStateStorage2RingGroups::TestStateStorageReplyOnceWriteOnly [GOOD] >> BlobStorageBlockRace::BlocksRacingViaSyncLog2 [GOOD] >> BlobStorageSync::TestSyncLogCuttingMirror3dc >> TStateStorageRingGroupState::TestStateStorageUpdateSig >> test.py::test[key_filter-string_with_legacy--Results] [GOOD] >> test.py::test[like-ilike_clause-default.txt-Results] >> TStateStorageRingGroupState::TestStateStorageUpdateSig [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/ut_blobstorage/ut_statestorage/unittest >> TStateStorage2RingGroups::TestStateStorageReplyOnce [GOOD] Test command err: RandomSeed# 11838662145732643606 2025-09-25T16:16:19.648139Z 1 00h00m00.000000s :BS_NODE DEBUG: {NWDC15@distconf.cpp:401} StateFunc Type# 268639238 Sender# [2:242:20] SessionId# [1:130:1] Cookie# 5965925594698579317 2025-09-25T16:16:19.649258Z 1 00h00m00.000000s :BS_NODE DEBUG: {NWDC02@distconf_binding.cpp:536} TEvNodeConfigPush NodeId# 2 Cookie# 5965925594698579317 SessionId# [1:130:1] Binding# {6.2/4288977896210761583@[1:142:5]} Record# {Initial: true BoundNodes { NodeId { Host: "127.0.0.6" Port: 19001 NodeId: 6 } Meta { Fingerprint: "\3403\207\365\032>> test.py::test[insert-replace_inferred_op--ForceBlocks] [GOOD] >> test.py::test[insert-replace_inferred_op--Results] >> Acceleration::TestThresholdPut4Plus2Block2Slow [GOOD] >> Acceleration::TestThresholdGetMirror3dc1Slow ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/ut_blobstorage/ut_statestorage/unittest >> TStateStorage2RingGroups::TestStateStorageReplyOnceWriteOnly [GOOD] Test command err: RandomSeed# 16704627542168713990 2025-09-25T16:16:19.891093Z 7 00h00m00.000000s :BS_NODE DEBUG: {NWDC15@distconf.cpp:401} StateFunc Type# 268639238 Sender# [1:235:51] SessionId# [7:146:1] Cookie# 17686118187729007200 2025-09-25T16:16:19.892723Z 7 00h00m00.000000s :BS_NODE DEBUG: {NWDC02@distconf_binding.cpp:536} TEvNodeConfigPush NodeId# 1 Cookie# 17686118187729007200 SessionId# [7:146:1] Binding# {6.0/13425254408822994142@[7:41:6]} Record# {Initial: true BoundNodes { NodeId { Host: "127.0.0.1" Port: 19001 NodeId: 1 } Meta { Fingerprint: "\3403\207\365\032>> TStateStorageRingGroupState::TestBoardConfigMismatch >> test.py::test[pg-tpcds-q17-default.txt-Results] [GOOD] |80.6%| [TA] $(B)/ydb/core/blobstorage/groupinfo/ut/test-results/unittest/{meta.json ... results_accumulator.log} |80.6%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/datashard/datashard_ut_kqp_scan.cpp >> test.py::test[pg-tpcds-q40-default.txt-ForceBlocks] >> test.py::test[pg-tpcds-q01-default.txt-ForceBlocks] [GOOD] >> TStateStorageRingGroupState::TestBoardConfigMismatch [GOOD] |80.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/datashard/datashard_ut_kqp_scan.cpp >> test.py::test[pg-tpcds-q01-default.txt-Results] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/ut_blobstorage/ut_statestorage/unittest >> TStateStorageRingGroupState::TestStateStorageUpdateSig [GOOD] Test command err: RandomSeed# 10184792553112220802 2025-09-25T16:16:20.396155Z 2 00h00m00.000000s :BS_NODE DEBUG: {NWDC15@distconf.cpp:401} StateFunc Type# 268639239 Sender# [1:220:36] SessionId# [2:131:1] Cookie# 16707367545498612149 2025-09-25T16:16:20.396179Z 2 00h00m00.000000s :BS_NODE DEBUG: {NWDC54@distconf_binding.cpp:336} SubscribeToPeerNode NodeId# 1 SessionId# [2:131:1] Inserted# false Subscription# {SessionId# [2:131:1] SubscriptionCookie# 0} NextSubscribeCookie# 6 2025-09-25T16:16:20.397714Z 2 00h00m00.000000s :BS_NODE DEBUG: {NWDC17@distconf_binding.cpp:400} TEvNodeConfigReversePush NodeId# 1 Cookie# 16707367545498612149 SessionId# [2:131:1] Binding# {1.1/16707367545498612149@[2:131:1]} Record# {RootNodeId: 2 } 2025-09-25T16:16:20.397754Z 2 00h00m00.000000s :BS_NODE DEBUG: {NWDC03@distconf_binding.cpp:366} AbortBinding Binding# {1.1/16707367545498612149@[2:131:1]} Reason# binding cycle 2025-09-25T16:16:20.397765Z 2 00h00m00.000000s :BS_NODE DEBUG: {NWDC24@distconf_scatter_gather.cpp:87} AbortAllScatterTasks Binding# {1.1/16707367545498612149@[2:131:1]} 2025-09-25T16:16:20.397780Z 2 00h00m00.000000s :BS_NODE DEBUG: {NWDC55@distconf_binding.cpp:314} UnsubscribeInterconnect NodeId# 1 Subscription# {SessionId# [2:131:1] SubscriptionCookie# 0} 2025-09-25T16:16:20.397789Z 2 00h00m00.000000s :BS_NODE DEBUG: {NWDC30@distconf_binding.cpp:162} Delaying bind 2025-09-25T16:16:20.397823Z 2 00h00m00.000000s :BS_NODE DEBUG: {NWDC15@distconf.cpp:401} StateFunc Type# 268639238 Sender# [3:234:20] SessionId# [2:108:2] Cookie# 3282058737222481097 2025-09-25T16:16:20.397831Z 2 00h00m00.000000s :BS_NODE DEBUG: {NWDC54@distconf_binding.cpp:336} SubscribeToPeerNode NodeId# 3 SessionId# [2:108:2] Inserted# false Subscription# {SessionId# [2:108:2] SubscriptionCookie# 0} NextSubscribeCookie# 6 2025-09-25T16:16:20.397875Z 2 00h00m00.000000s :BS_NODE DEBUG: {NWDC02@distconf_binding.cpp:536} TEvNodeConfigPush NodeId# 3 Cookie# 3282058737222481097 SessionId# [2:108:2] Binding# Record# {BoundNodes { NodeId { Host: "127.0.0.1" Port: 19001 NodeId: 1 } Meta { Fingerprint: "\3403\207\365\032> Record# {BoundNodes { NodeId { Host: "127.0.0.9" Port: 19001 NodeId: 9 } Meta { Fingerprint: "\3403\207\365\032> 2025-09-25T16:16:20.398195Z 1 00h00m00.000000s :BS_NODE DEBUG: {NWDC06@distconf_binding.cpp:697} UnbindNode NodeId# 2 Reason# explicit unbind request 2025-09-25T16:16:20.398199Z 1 00h00m00.000000s :BS_NODE DEBUG: {NWDC34@distconf_binding.cpp:498} DeleteBound RefererNodeId# 2 NodeId# 127.0.0.6:19001/6 2025-09-25T16:16:20.398206Z 1 00h00m00.000000s :BS_NODE DEBUG: {NWDC34@distconf_binding.cpp:498} DeleteBound RefererNodeId# 2 NodeId# 127.0.0.7:19001/7 2025-09-25T16:16:20.398211Z 1 00h00m00.000000s :BS_NODE DEBUG: {NWDC34@distconf_binding.cpp:498} DeleteBound RefererNodeId# 2 NodeId# 127.0.0.9:19001/9 2025-09-25T16:16:20.398216Z 1 00h00m00.000000s :BS_NODE DEBUG: {NWDC34@distconf_binding.cpp:498} DeleteBound RefererNodeId# 2 NodeId# 127.0.0.4:19001/4 2025-09-25T16:16:20.398221Z 1 00h00m00.000000s :BS_NODE DEBUG: {NWDC34@distconf_binding.cpp:498} DeleteBound RefererNodeId# 2 NodeId# 127.0.0.8:19001/8 2025-09-25T16:16:20.398226Z 1 00h00m00.000000s :BS_NODE DEBUG: {NWDC34@distconf_binding.cpp:498} DeleteBound RefererNodeId# 2 NodeId# 127.0.0.5:19001/5 2025-09-25T16:16:20.398231Z 1 00h00m00.000000s :BS_NODE DEBUG: {NWDC34@distconf_binding.cpp:498} DeleteBound RefererNodeId# 2 NodeId# 127.0.0.2:19001/2 2025-09-25T16:16:20.398236Z 1 00h00m00.000000s :BS_NODE DEBUG: {NWDC34@distconf_binding.cpp:498} DeleteBound RefererNodeId# 2 NodeId# 127.0.0.3:19001/3 2025-09-25T16:16:20.398244Z 1 00h00m00.000000s :BS_NODE DEBUG: {NWDC55@distconf_binding.cpp:314} UnsubscribeInterconnect NodeId# 2 Subscription# {SessionId# [1:130:1] SubscriptionCookie# 0} 2025-09-25T16:16:20.398250Z 1 00h00m00.000000s :BS_NODE DEBUG: {NWDC54@distconf_binding.cpp:336} SubscribeToPeerNode NodeId# 5 SessionId# [0:0:0] Inserted# true Subscription# {SessionId# [0:0:0] SubscriptionCookie# 0} NextSubscribeCookie# 6 2025-09-25T16:16:20.398257Z 1 00h00m00.000000s :BS_NODE DEBUG: {NWDC29@distconf_binding.cpp:180} Initiated bind NodeId# 5 Binding# {5.0/12819197808515352321@[0:0:0]} SessionId# [0:0:0] 2025-09-25T16:16:20.398266Z 3 00h00m00.000000s :BS_NODE DEBUG: {NWDC15@distconf.cpp:401} StateFunc Type# 268639239 Sender# [2:227:20] SessionId# [3:109:2] Cookie# 3282058737222481097 2025-09-25T16:16:20.398272Z 3 00h00m00.000000s :BS_NODE DEBUG: {NWDC54@distconf_binding.cpp:336} SubscribeToPeerNode NodeId# 2 SessionId# [3:109:2] Inserted# false Subscription# {SessionId# [3:109:2] SubscriptionCookie# 0} NextSubscribeCookie# 3 2025-09-25T16:16:20.398281Z 3 00h00m00.000000s :BS_NODE DEBUG: {NWDC17@distconf_binding.cpp:400} TEvNodeConfigReversePush NodeId# 2 Cookie# 3282058737222481097 SessionId# [3:109:2] Binding# {2.1/3282058737222481097@[3:109:2]} Record# {RootNodeId: 2 } 2025-09-25T16:16:20.398294Z 4 00h00m00.000000s :BS_NODE DEBUG: {NWDC15@distconf.cpp:401} StateFunc Type# 268639239 Sender# [2:227:20] SessionId# [4:112:2] Cookie# 6067551617502378051 2025-09-25T16:16:20.398300Z 4 00h00m00.000000s :BS_NODE DEBUG: {NWDC54@distconf_binding.cpp:336} SubscribeToPeerNode NodeId# 2 SessionId# [4:112:2] Inserted# false Subscription# {SessionId# [4:112:2] SubscriptionCookie# 0} NextSubscribeCookie# 5 2025-09-25T16:16:20.398307Z 4 00h00m00.000000s :BS_NODE DEBUG: {NWDC17@distconf_binding.cpp:400} TEvNodeConfigReversePush NodeId# 2 Cookie# 6067551617502378051 SessionId# [4:112:2] Binding# {2.1/6067551617502378051@[4:112:2]} Record# {RootNodeId: 2 } 2025-09-25T16:16:20.398583Z 5 00h00m00.000000s :BS_NODE DEBUG: {NWDC15@distconf.cpp:401} StateFunc Type# 268639240 Sender# [1:220:36] SessionId# [5:140:1] Cookie# 12819197808515352320 2025-09-25T16:16:20.398591Z 5 00h00m00.000000s :BS_NODE DEBUG: {NWDC54@distconf_binding.cpp:336} SubscribeToPeerNode NodeId# 1 SessionId# [5:140:1] Inserted# false Subscription# {SessionId# [5:140:1] SubscriptionCookie# 0} NextSubscribeCookie# 7 2025-09-25T16:16:20.398596Z 5 00h00m00.000000s :BS_NODE DEBUG: {NWDC16@distconf_binding.cpp:688} TEvNodeConfigUnbind NodeId# 1 Cookie# 12819197808515352320 SessionId# [5:140:1] Binding# {3.1/15219154400276931870@[5:92:3]} 2025-09-25T16:16:20.398600Z 5 00h00m00.000000s :BS_NODE DEBUG: {NWDC06@distconf_binding.cpp:697} UnbindNode NodeId# 1 Re ... -09-25T16:16:20.409427Z 1 00h00m00.369402s :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037936131 Cookie: 0} 2025-09-25T16:16:20.409432Z 1 00h00m00.369402s :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037936131 Cookie: 1} 2025-09-25T16:16:20.409439Z 1 00h00m00.369402s :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037936131 Cookie: 2} 2025-09-25T16:16:20.409444Z 1 00h00m00.369402s :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037936131 Cookie: 3} 2025-09-25T16:16:20.409449Z 1 00h00m00.369402s :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037936131 Cookie: 4} 2025-09-25T16:16:20.409455Z 1 00h00m00.369402s :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ringGroup:0 ev: {EvReplicaInfo Status: 1 TabletID: 72057594037936131 ClusterStateGeneration: 0 ClusterStateGuid: 0} 2025-09-25T16:16:20.409460Z 1 00h00m00.369402s :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ringGroup:0 ev: {EvReplicaInfo Status: 1 TabletID: 72057594037936131 ClusterStateGeneration: 0 ClusterStateGuid: 0} 2025-09-25T16:16:20.409465Z 1 00h00m00.369402s :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ringGroup:0 ev: {EvReplicaInfo Status: 1 TabletID: 72057594037936131 ClusterStateGeneration: 0 ClusterStateGuid: 0} 2025-09-25T16:16:20.409470Z 1 00h00m00.369402s :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ringGroup:0 ev: {EvReplicaInfo Status: 1 TabletID: 72057594037936131 ClusterStateGeneration: 0 ClusterStateGuid: 0} 2025-09-25T16:16:20.409474Z 1 00h00m00.369402s :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ringGroup:0 ev: {EvReplicaInfo Status: 1 TabletID: 72057594037936131 ClusterStateGeneration: 0 ClusterStateGuid: 0} 2025-09-25T16:16:20.409510Z 1 00h00m00.772490s :STATESTORAGE DEBUG: ProxyRequest::HandleInit ringGroup:0 ev: {EvLookup TabletID: 72057594037936131 Cookie: 0 ProxyOptions: SigNone} 2025-09-25T16:16:20.409518Z 1 00h00m00.772490s :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037936131 Cookie: 0} 2025-09-25T16:16:20.409524Z 1 00h00m00.772490s :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037936131 Cookie: 1} 2025-09-25T16:16:20.409529Z 1 00h00m00.772490s :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037936131 Cookie: 2} 2025-09-25T16:16:20.409533Z 1 00h00m00.772490s :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037936131 Cookie: 3} 2025-09-25T16:16:20.409538Z 1 00h00m00.772490s :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037936131 Cookie: 4} 2025-09-25T16:16:20.409544Z 1 00h00m00.772490s :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ringGroup:0 ev: {EvReplicaInfo Status: 1 TabletID: 72057594037936131 ClusterStateGeneration: 0 ClusterStateGuid: 0} 2025-09-25T16:16:20.409551Z 1 00h00m00.772490s :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ringGroup:0 ev: {EvReplicaInfo Status: 1 TabletID: 72057594037936131 ClusterStateGeneration: 0 ClusterStateGuid: 0} 2025-09-25T16:16:20.409558Z 1 00h00m00.772490s :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ringGroup:0 ev: {EvReplicaInfo Status: 1 TabletID: 72057594037936131 ClusterStateGeneration: 0 ClusterStateGuid: 0} 2025-09-25T16:16:20.409563Z 1 00h00m00.772490s :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ringGroup:0 ev: {EvReplicaInfo Status: 1 TabletID: 72057594037936131 ClusterStateGeneration: 0 ClusterStateGuid: 0} 2025-09-25T16:16:20.409568Z 1 00h00m00.772490s :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ringGroup:0 ev: {EvReplicaInfo Status: 1 TabletID: 72057594037936131 ClusterStateGeneration: 0 ClusterStateGuid: 0} 2025-09-25T16:16:20.409909Z 2 00h00m01.000000s :BS_NODE DEBUG: {NWDC15@distconf.cpp:401} StateFunc Type# 65538 Sender# [0:0:0] SessionId# [0:0:0] Cookie# 0 2025-09-25T16:16:20.410038Z 1 00h00m01.602851s :STATESTORAGE DEBUG: ProxyRequest::HandleInit ringGroup:0 ev: {EvLookup TabletID: 72057594037936131 Cookie: 0 ProxyOptions: SigNone} 2025-09-25T16:16:20.410069Z 1 00h00m01.602851s :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037936131 Cookie: 0} 2025-09-25T16:16:20.410076Z 1 00h00m01.602851s :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037936131 Cookie: 1} 2025-09-25T16:16:20.410081Z 1 00h00m01.602851s :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037936131 Cookie: 2} 2025-09-25T16:16:20.410087Z 1 00h00m01.602851s :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037936131 Cookie: 3} 2025-09-25T16:16:20.410092Z 1 00h00m01.602851s :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037936131 Cookie: 4} 2025-09-25T16:16:20.410097Z 1 00h00m01.602851s :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ringGroup:0 ev: {EvReplicaInfo Status: 1 TabletID: 72057594037936131 ClusterStateGeneration: 0 ClusterStateGuid: 0} 2025-09-25T16:16:20.410103Z 1 00h00m01.602851s :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ringGroup:0 ev: {EvReplicaInfo Status: 1 TabletID: 72057594037936131 ClusterStateGeneration: 0 ClusterStateGuid: 0} 2025-09-25T16:16:20.410108Z 1 00h00m01.602851s :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ringGroup:0 ev: {EvReplicaInfo Status: 1 TabletID: 72057594037936131 ClusterStateGeneration: 0 ClusterStateGuid: 0} 2025-09-25T16:16:20.410112Z 1 00h00m01.602851s :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ringGroup:0 ev: {EvReplicaInfo Status: 1 TabletID: 72057594037936131 ClusterStateGeneration: 0 ClusterStateGuid: 0} 2025-09-25T16:16:20.410116Z 1 00h00m01.602851s :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ringGroup:0 ev: {EvReplicaInfo Status: 1 TabletID: 72057594037936131 ClusterStateGeneration: 0 ClusterStateGuid: 0} 2025-09-25T16:16:20.410631Z 1 00h00m03.363216s :STATESTORAGE DEBUG: ProxyRequest::HandleInit ringGroup:0 ev: {EvLookup TabletID: 72057594037936131 Cookie: 0 ProxyOptions: SigNone} 2025-09-25T16:16:20.410646Z 1 00h00m03.363216s :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037936131 Cookie: 0} 2025-09-25T16:16:20.410653Z 1 00h00m03.363216s :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037936131 Cookie: 1} 2025-09-25T16:16:20.410658Z 1 00h00m03.363216s :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037936131 Cookie: 2} 2025-09-25T16:16:20.410663Z 1 00h00m03.363216s :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037936131 Cookie: 3} 2025-09-25T16:16:20.410668Z 1 00h00m03.363216s :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037936131 Cookie: 4} 2025-09-25T16:16:20.410674Z 1 00h00m03.363216s :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ringGroup:0 ev: {EvReplicaInfo Status: 1 TabletID: 72057594037936131 ClusterStateGeneration: 0 ClusterStateGuid: 0} 2025-09-25T16:16:20.410683Z 1 00h00m03.363216s :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ringGroup:0 ev: {EvReplicaInfo Status: 1 TabletID: 72057594037936131 ClusterStateGeneration: 0 ClusterStateGuid: 0} 2025-09-25T16:16:20.410688Z 1 00h00m03.363216s :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ringGroup:0 ev: {EvReplicaInfo Status: 1 TabletID: 72057594037936131 ClusterStateGeneration: 0 ClusterStateGuid: 0} 2025-09-25T16:16:20.410693Z 1 00h00m03.363216s :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ringGroup:0 ev: {EvReplicaInfo Status: 1 TabletID: 72057594037936131 ClusterStateGeneration: 0 ClusterStateGuid: 0} 2025-09-25T16:16:20.410698Z 1 00h00m03.363216s :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ringGroup:0 ev: {EvReplicaInfo Status: 1 TabletID: 72057594037936131 ClusterStateGeneration: 0 ClusterStateGuid: 0} 2025-09-25T16:16:20.411690Z 1 00h00m07.165604s :STATESTORAGE DEBUG: ProxyRequest::HandleInit ringGroup:0 ev: {EvLookup TabletID: 72057594037936131 Cookie: 0 ProxyOptions: SigNone} 2025-09-25T16:16:20.411705Z 1 00h00m07.165604s :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037936131 Cookie: 0} 2025-09-25T16:16:20.411710Z 1 00h00m07.165604s :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037936131 Cookie: 1} 2025-09-25T16:16:20.411715Z 1 00h00m07.165604s :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037936131 Cookie: 2} 2025-09-25T16:16:20.411719Z 1 00h00m07.165604s :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037936131 Cookie: 3} 2025-09-25T16:16:20.411723Z 1 00h00m07.165604s :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037936131 Cookie: 4} 2025-09-25T16:16:20.411729Z 1 00h00m07.165604s :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ringGroup:0 ev: {EvReplicaInfo Status: 1 TabletID: 72057594037936131 ClusterStateGeneration: 0 ClusterStateGuid: 0} 2025-09-25T16:16:20.411735Z 1 00h00m07.165604s :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ringGroup:0 ev: {EvReplicaInfo Status: 1 TabletID: 72057594037936131 ClusterStateGeneration: 0 ClusterStateGuid: 0} 2025-09-25T16:16:20.411739Z 1 00h00m07.165604s :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ringGroup:0 ev: {EvReplicaInfo Status: 1 TabletID: 72057594037936131 ClusterStateGeneration: 0 ClusterStateGuid: 0} 2025-09-25T16:16:20.411743Z 1 00h00m07.165604s :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ringGroup:0 ev: {EvReplicaInfo Status: 1 TabletID: 72057594037936131 ClusterStateGeneration: 0 ClusterStateGuid: 0} 2025-09-25T16:16:20.411748Z 1 00h00m07.165604s :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ringGroup:0 ev: {EvReplicaInfo Status: 1 TabletID: 72057594037936131 ClusterStateGeneration: 0 ClusterStateGuid: 0} 2025-09-25T16:16:20.412619Z 1 00h00m10.002048s :STATESTORAGE DEBUG: ProxyRequest::HandleInit ringGroup:0 ev: {EvLookup TabletID: 72057594037932033 Cookie: 0 ProxyOptions: SigAsync} 2025-09-25T16:16:20.412637Z 1 00h00m10.002048s :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037932033 Cookie: 0} 2025-09-25T16:16:20.412645Z 1 00h00m10.002048s :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037932033 Cookie: 1} 2025-09-25T16:16:20.412650Z 1 00h00m10.002048s :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037932033 Cookie: 2} 2025-09-25T16:16:20.412654Z 1 00h00m10.002048s :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037932033 Cookie: 3} 2025-09-25T16:16:20.412659Z 1 00h00m10.002048s :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037932033 Cookie: 4} 2025-09-25T16:16:20.412670Z 1 00h00m10.002048s :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ringGroup:0 ev: {EvReplicaInfo Status: 0 TabletID: 72057594037932033 ClusterStateGeneration: 0 ClusterStateGuid: 0 CurrentLeader: [1:306:40] CurrentLeaderTablet: [1:310:42] CurrentGeneration: 2 CurrentStep: 0} 2025-09-25T16:16:20.412680Z 1 00h00m10.002048s :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ringGroup:0 ev: {EvReplicaInfo Status: 0 TabletID: 72057594037932033 ClusterStateGeneration: 0 ClusterStateGuid: 0 CurrentLeader: [1:306:40] CurrentLeaderTablet: [1:310:42] CurrentGeneration: 2 CurrentStep: 0} 2025-09-25T16:16:20.412690Z 1 00h00m10.002048s :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ringGroup:0 ev: {EvReplicaInfo Status: 0 TabletID: 72057594037932033 ClusterStateGeneration: 0 ClusterStateGuid: 0 CurrentLeader: [1:306:40] CurrentLeaderTablet: [1:310:42] CurrentGeneration: 2 CurrentStep: 0} 2025-09-25T16:16:20.412698Z 1 00h00m10.002048s :STATESTORAGE DEBUG: ProxyRequest::HandleUpdateSig undelivered ringGroup:0 for: 3 2025-09-25T16:16:20.412705Z 1 00h00m10.002048s :STATESTORAGE DEBUG: ProxyRequest::HandleUpdateSig ringGroup:0 ev: {EvReplicaInfo Status: 0 TabletID: 72057594037932033 ClusterStateGeneration: 0 ClusterStateGuid: 0 CurrentLeader: [1:306:40] CurrentLeaderTablet: [1:310:42] CurrentGeneration: 2 CurrentStep: 0} >> test.py::test[pg-tpcds-q20-default.txt-Results] [GOOD] >> test.py::test[pg-tpcds-q28-default.txt-Results] >> test.py::test[select-literal_bool-default.txt-Results] [GOOD] >> test.py::test[select-sampleselect--ForceBlocks] >> TStateStorageRingGroupState::TestStateStorageDoubleReply >> TStateStorageRingGroupState::TestStateStorageDoubleReply [GOOD] >> test.py::test[join-lookupjoin_inner_2o--ForceBlocks] [GOOD] >> test.py::test[join-lookupjoin_inner_2o--Results] |80.6%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/tx/replication/ydb_proxy/ut/ydb-core-tx-replication-ydb_proxy-ut ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/ut_blobstorage/ut_statestorage/unittest >> TStateStorageRingGroupState::TestBoardConfigMismatch [GOOD] Test command err: RandomSeed# 4447372992888105710 2025-09-25T16:16:21.144143Z 7 00h00m00.000000s :BS_NODE DEBUG: {NWDC15@distconf.cpp:401} StateFunc Type# 268639239 Sender# [1:220:36] SessionId# [7:146:1] Cookie# 9844128374080421154 2025-09-25T16:16:21.144172Z 7 00h00m00.000000s :BS_NODE DEBUG: {NWDC54@distconf_binding.cpp:336} SubscribeToPeerNode NodeId# 1 SessionId# [7:146:1] Inserted# false Subscription# {SessionId# [7:146:1] SubscriptionCookie# 0} NextSubscribeCookie# 2 2025-09-25T16:16:21.145801Z 7 00h00m00.000000s :BS_NODE DEBUG: {NWDC17@distconf_binding.cpp:400} TEvNodeConfigReversePush NodeId# 1 Cookie# 9844128374080421154 SessionId# [7:146:1] Binding# {1.1/9844128374080421154@[7:146:1]} Record# {RootNodeId: 5 } 2025-09-25T16:16:21.145856Z 2 00h00m00.000000s :BS_NODE DEBUG: {NWDC15@distconf.cpp:401} StateFunc Type# 268639239 Sender# [1:220:36] SessionId# [2:131:1] Cookie# 3208409264092598657 2025-09-25T16:16:21.145867Z 2 00h00m00.000000s :BS_NODE DEBUG: {NWDC54@distconf_binding.cpp:336} SubscribeToPeerNode NodeId# 1 SessionId# [2:131:1] Inserted# false Subscription# {SessionId# [2:131:1] SubscriptionCookie# 0} NextSubscribeCookie# 5 2025-09-25T16:16:21.145881Z 2 00h00m00.000000s :BS_NODE DEBUG: {NWDC17@distconf_binding.cpp:400} TEvNodeConfigReversePush NodeId# 1 Cookie# 3208409264092598657 SessionId# [2:131:1] Binding# {1.1/3208409264092598657@[2:131:1]} Record# {RootNodeId: 5 } 2025-09-25T16:16:21.145894Z 3 00h00m00.000000s :BS_NODE DEBUG: {NWDC15@distconf.cpp:401} StateFunc Type# 268639239 Sender# [1:220:36] SessionId# [3:134:1] Cookie# 17778460121789295526 2025-09-25T16:16:21.145901Z 3 00h00m00.000000s :BS_NODE DEBUG: {NWDC54@distconf_binding.cpp:336} SubscribeToPeerNode NodeId# 1 SessionId# [3:134:1] Inserted# false Subscription# {SessionId# [3:134:1] SubscriptionCookie# 0} NextSubscribeCookie# 4 2025-09-25T16:16:21.145910Z 3 00h00m00.000000s :BS_NODE DEBUG: {NWDC17@distconf_binding.cpp:400} TEvNodeConfigReversePush NodeId# 1 Cookie# 17778460121789295526 SessionId# [3:134:1] Binding# {1.1/17778460121789295526@[3:134:1]} Record# {RootNodeId: 5 } 2025-09-25T16:16:21.145926Z 4 00h00m00.000000s :BS_NODE DEBUG: {NWDC15@distconf.cpp:401} StateFunc Type# 268639238 Sender# [5:248:20] SessionId# [4:70:4] Cookie# 11996760312115883516 2025-09-25T16:16:21.145933Z 4 00h00m00.000000s :BS_NODE DEBUG: {NWDC54@distconf_binding.cpp:336} SubscribeToPeerNode NodeId# 5 SessionId# [4:70:4] Inserted# false Subscription# {SessionId# [4:70:4] SubscriptionCookie# 0} NextSubscribeCookie# 5 2025-09-25T16:16:21.145979Z 4 00h00m00.000000s :BS_NODE DEBUG: {NWDC02@distconf_binding.cpp:536} TEvNodeConfigPush NodeId# 5 Cookie# 11996760312115883516 SessionId# [4:70:4] Binding# {3.1/14879208871440865026@[4:89:3]} Record# {BoundNodes { NodeId { Host: "127.0.0.6" Port: 19001 NodeId: 6 } Meta { Fingerprint: "\3403\207\365\032>> Acceleration::TestThresholdGetMirror3dc1Slow [GOOD] >> Acceleration::TestThresholdGet4Plus2Block1Slow |80.6%| [TA] {RESULT} $(B)/ydb/core/blobstorage/groupinfo/ut/test-results/unittest/{meta.json ... results_accumulator.log} |80.6%| [LD] {RESULT} $(B)/ydb/core/tx/replication/ydb_proxy/ut/ydb-core-tx-replication-ydb_proxy-ut >> TStateStorageRingGroupState::TestProxyConfigMismatchNotSent >> TStateStorageRingGroupState::TestProxyConfigMismatchNotSent [GOOD] >> TBsOther1::PoisonPill [GOOD] >> TBsOther1::ChaoticParallelWrite >> test.py::test[produce-reduce_multi_in_sampling-sorted-ForceBlocks] [GOOD] >> test.py::test[pg-tpcds-q67-default.txt-ForceBlocks] [GOOD] >> test.py::test[pg-tpcds-q67-default.txt-Results] >> test.py::test[produce-reduce_multi_in_sampling-sorted-Results] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/ut_blobstorage/ut_statestorage/unittest >> TStateStorageRingGroupState::TestStateStorageDoubleReply [GOOD] Test command err: RandomSeed# 10365262671902940907 2025-09-25T16:16:21.728929Z 6 00h00m00.000000s :BS_NODE DEBUG: {NWDC15@distconf.cpp:401} StateFunc Type# 268639240 Sender# [1:220:36] SessionId# [6:143:1] Cookie# 6005944673938584433 2025-09-25T16:16:21.728954Z 6 00h00m00.000000s :BS_NODE DEBUG: {NWDC54@distconf_binding.cpp:336} SubscribeToPeerNode NodeId# 1 SessionId# [6:143:1] Inserted# false Subscription# {SessionId# [6:143:1] SubscriptionCookie# 0} NextSubscribeCookie# 6 2025-09-25T16:16:21.728963Z 6 00h00m00.000000s :BS_NODE DEBUG: {NWDC16@distconf_binding.cpp:688} TEvNodeConfigUnbind NodeId# 1 Cookie# 6005944673938584433 SessionId# [6:143:1] Binding# 2025-09-25T16:16:21.728981Z 6 00h00m00.000000s :BS_NODE DEBUG: {NWDC06@distconf_binding.cpp:697} UnbindNode NodeId# 1 Reason# explicit unbind request 2025-09-25T16:16:21.728988Z 6 00h00m00.000000s :BS_NODE DEBUG: {NWDC34@distconf_binding.cpp:498} DeleteBound RefererNodeId# 1 NodeId# 127.0.0.7:19001/7 2025-09-25T16:16:21.728995Z 6 00h00m00.000000s :BS_NODE DEBUG: {NWDC34@distconf_binding.cpp:498} DeleteBound RefererNodeId# 1 NodeId# 127.0.0.9:19001/9 2025-09-25T16:16:21.729001Z 6 00h00m00.000000s :BS_NODE DEBUG: {NWDC34@distconf_binding.cpp:498} DeleteBound RefererNodeId# 1 NodeId# 127.0.0.4:19001/4 2025-09-25T16:16:21.729006Z 6 00h00m00.000000s :BS_NODE DEBUG: {NWDC34@distconf_binding.cpp:498} DeleteBound RefererNodeId# 1 NodeId# 127.0.0.8:19001/8 2025-09-25T16:16:21.729012Z 6 00h00m00.000000s :BS_NODE DEBUG: {NWDC34@distconf_binding.cpp:498} DeleteBound RefererNodeId# 1 NodeId# 127.0.0.1:19001/1 2025-09-25T16:16:21.729017Z 6 00h00m00.000000s :BS_NODE DEBUG: {NWDC34@distconf_binding.cpp:498} DeleteBound RefererNodeId# 1 NodeId# 127.0.0.5:19001/5 2025-09-25T16:16:21.729023Z 6 00h00m00.000000s :BS_NODE DEBUG: {NWDC34@distconf_binding.cpp:498} DeleteBound RefererNodeId# 1 NodeId# 127.0.0.2:19001/2 2025-09-25T16:16:21.729029Z 6 00h00m00.000000s :BS_NODE DEBUG: {NWDC34@distconf_binding.cpp:498} DeleteBound RefererNodeId# 1 NodeId# 127.0.0.3:19001/3 2025-09-25T16:16:21.729039Z 6 00h00m00.000000s :BS_NODE DEBUG: {NWDC55@distconf_binding.cpp:314} UnsubscribeInterconnect NodeId# 1 Subscription# {SessionId# [6:143:1] SubscriptionCookie# 0} 2025-09-25T16:16:21.729048Z 6 00h00m00.000000s :BS_NODE DEBUG: {NWDC54@distconf_binding.cpp:336} SubscribeToPeerNode NodeId# 5 SessionId# [0:0:0] Inserted# true Subscription# {SessionId# [0:0:0] SubscriptionCookie# 0} NextSubscribeCookie# 6 2025-09-25T16:16:21.729058Z 6 00h00m00.000000s :BS_NODE DEBUG: {NWDC29@distconf_binding.cpp:180} Initiated bind NodeId# 5 Binding# {5.0/15039642668819396575@[0:0:0]} SessionId# [0:0:0] 2025-09-25T16:16:21.729074Z 7 00h00m00.000000s :BS_NODE DEBUG: {NWDC15@distconf.cpp:401} StateFunc Type# 268639239 Sender# [1:220:36] SessionId# [7:146:1] Cookie# 13218742797656716090 2025-09-25T16:16:21.729081Z 7 00h00m00.000000s :BS_NODE DEBUG: {NWDC54@distconf_binding.cpp:336} SubscribeToPeerNode NodeId# 1 SessionId# [7:146:1] Inserted# false Subscription# {SessionId# [7:146:1] SubscriptionCookie# 0} NextSubscribeCookie# 2 2025-09-25T16:16:21.730591Z 7 00h00m00.000000s :BS_NODE DEBUG: {NWDC17@distconf_binding.cpp:400} TEvNodeConfigReversePush NodeId# 1 Cookie# 13218742797656716090 SessionId# [7:146:1] Binding# {1.6/13218742797656716090@[7:146:1]} Record# {RootNodeId: 1 } 2025-09-25T16:16:21.730629Z 2 00h00m00.000000s :BS_NODE DEBUG: {NWDC15@distconf.cpp:401} StateFunc Type# 268639239 Sender# [1:220:36] SessionId# [2:131:1] Cookie# 8077690989591376365 2025-09-25T16:16:21.730638Z 2 00h00m00.000000s :BS_NODE DEBUG: {NWDC54@distconf_binding.cpp:336} SubscribeToPeerNode NodeId# 1 SessionId# [2:131:1] Inserted# false Subscription# {SessionId# [2:131:1] SubscriptionCookie# 0} NextSubscribeCookie# 4 2025-09-25T16:16:21.730655Z 2 00h00m00.000000s :BS_NODE DEBUG: {NWDC17@distconf_binding.cpp:400} TEvNodeConfigReversePush NodeId# 1 Cookie# 8077690989591376365 SessionId# [2:131:1] Binding# {1.6/8077690989591376365@[2:131:1]} Record# {RootNodeId: 1 } 2025-09-25T16:16:21.730741Z 9 00h00m00.000000s :BS_NODE DEBUG: {NWDC15@distconf.cpp:401} StateFunc Type# 268639239 Sender# [8:269:20] SessionId# [9:19:8] Cookie# 246191152637820467 2025-09-25T16:16:21.730751Z 9 00h00m00.000000s :BS_NODE DEBUG: {NWDC54@distconf_binding.cpp:336} SubscribeToPeerNode NodeId# 8 SessionId# [9:19:8] Inserted# false Subscription# {SessionId# [9:19:8] SubscriptionCookie# 0} NextSubscribeCookie# 2 2025-09-25T16:16:21.730762Z 9 00h00m00.000000s :BS_NODE DEBUG: {NWDC17@distconf_binding.cpp:400} TEvNodeConfigReversePush NodeId# 8 Cookie# 246191152637820467 SessionId# [9:19:8] Binding# {8.5/246191152637820467@[9:19:8]} Record# {RootNodeId: 1 } 2025-09-25T16:16:21.730774Z 3 00h00m00.000000s :BS_NODE DEBUG: {NWDC15@distconf.cpp:401} StateFunc Type# 268639240 Sender# [6:255:20] SessionId# [3:94:5] Cookie# 15039642668819396574 2025-09-25T16:16:21.730781Z 3 00h00m00.000000s :BS_NODE DEBUG: {NWDC54@distconf_binding.cpp:336} SubscribeToPeerNode NodeId# 6 SessionId# [3:94:5] Inserted# false Subscription# {SessionId# [3:94:5] SubscriptionCookie# 0} NextSubscribeCookie# 4 2025-09-25T16:16:21.730789Z 3 00h00m00.000000s :BS_NODE DEBUG: {NWDC16@distconf_binding.cpp:688} TEvNodeConfigUnbind NodeId# 6 Cookie# 15039642668819396574 SessionId# [3:94:5] Binding# {2.6/16764266259571542326@[3:109:2]} 2025-09-25T16:16:21.730795Z 3 00h00m00.000000s :BS_NODE DEBUG: {NWDC06@distconf_binding.cpp:697} UnbindNode NodeId# 6 Reason# explicit unbind request 2025-09-25T16:16:21.730801Z 3 00h00m00.000000s :BS_NODE DEBUG: {NWDC34@distconf_binding.cpp:498} DeleteBound RefererNodeId# 6 NodeId# 127.0.0.6:19001/6 2025-09-25T16:16:21.730810Z 3 00h00m00.000000s :BS_NODE DEBUG: {NWDC34@distconf_binding.cpp:498} DeleteBound RefererNodeId# 6 NodeId# 127.0.0.7:19001/7 2025-09-25T16:16:21.730816Z 3 00h00m00.000000s :BS_NODE DEBUG: {NWDC34@distconf_binding.cpp:498} DeleteBound RefererNodeId# 6 NodeId# 127.0.0.9:19001/9 2025-09-25T16:16:21.730822Z 3 00h00m00.000000s :BS_NODE DEBUG: {NWDC34@distconf_binding.cpp:498} DeleteBound RefererNodeId# 6 NodeId# 127.0.0.4:19001/4 2025-09-25T16:16:21.730827Z 3 00h00m00.000000s :BS_NODE DEBUG: {NWDC34@distconf_binding.cpp:498} DeleteBound RefererNodeId# 6 NodeId# 127.0.0.8:19001/8 2025-09-25T16:16:21.730833Z 3 00h00m00.000000s :BS_NODE DEBUG: {NWDC34@distconf_binding.cpp:498} DeleteBound RefererNodeId# 6 NodeId# 127.0.0.1:19001/1 2025-09-25T16:16:21.730839Z 3 00h00m00.000000s :BS_NODE DEBUG: {NWDC34@distconf_binding.cpp:498} DeleteBound RefererNodeId# 6 NodeId# 127.0.0.5:19001/5 2025-09-25T16:16:21.730845Z 3 00h00m00.000000s :BS_NODE DEBUG: {NWDC34@distconf_binding.cpp:498} DeleteBound RefererNodeId# 6 NodeId# 127.0.0.2:19001/2 2025-09-25T16:16:21.730850Z 3 00h00m00.000000s :BS_NODE DEBUG: {NWDC34@distconf_binding.cpp:498} DeleteBound RefererNodeId# 6 NodeId# 127.0.0.3:19001/3 2025-09-25T16:16:21.730860Z 3 00h00m00.000000s :BS_NODE DEBUG: {NWDC55@distconf_binding.cpp:314} UnsubscribeInterconnect NodeId# 6 Subscription# {SessionId# [3:94:5] SubscriptionCookie# 0} 2025-09-25T16:16:21.730874Z 1 00h00m00.000000s :BS_NODE DEBUG: {NWDC15@distconf.cpp:401} StateFunc Type# 268639239 Sender# [6:255:20] SessionId# [1:142:5] Cookie# 6005944673938584433 2025-09-25T16:16:21.730881Z 1 00h00m00.000000s :BS_NODE DEBUG: {NWDC54@distconf_binding.cpp:336} SubscribeToPeerNode NodeId# 6 SessionId# [1:142:5] Inserted# false Subscription# {SessionId# [1:142:5] SubscriptionCookie# 0} NextSubscribeCookie# 7 2025-09-25T16:16:21.730890Z 1 00h00m00.000000s :BS_NODE DEBUG: {NWDC17@distconf_binding.cpp:400} TEvNodeConfigReversePush NodeId# 6 Cookie# 6005944673938584433 SessionId# [1:142:5] Binding# {6.0/6005944673938584434@[1:142:5]} Record# {RootNodeId: 6 } 2025-09-25T16:16:21.730903Z 9 00h00m00.000000s :BS_NODE DEBUG: {NWDC15@distconf.cpp:401} StateFunc Type# 268639239 Sender# [8:269:20] SessionId# [9:19:8] Cookie# 246191152637820467 2025-09-25T16:16:21.730909Z 9 00h00m00.000000s :BS_NODE DEBUG: {NWDC54@distconf_binding.cpp:336} SubscribeToPeerNode NodeId# 8 SessionId# [9:19:8] Inserted# false Subscription# {SessionId# [9:19:8] SubscriptionCookie# 0} NextSubscribeCookie# 2 2025-09-25T16:16:21.730919Z 9 00h00m00.000000s :BS_NODE DEBUG: {NWDC17@distconf_binding.cpp:400} TEvNodeConfigReversePush NodeId# 8 Cookie# 246191152637820467 SessionId# [9:19:8] Binding# {8.1/246191152637820467@[9:19:8]} Record# {RootNodeId: 6 } 2025-09-25T16:16:21.730930Z 2 00h00m00.000000s :BS_NODE DEBUG: {NWDC15@distconf.cpp:401} StateFunc Type# 268639238 Sender# [3:234:20] SessionId# [2:108:2] Cookie# 16764266259571542326 2025-09-25T16:16:21.730937Z 2 00h00m00.000000s :BS_NODE DEBUG: {NWDC54@distconf_binding.cpp:336} SubscribeToPeerNode NodeId# 3 SessionId# [2:108:2] Inserted# false Subscription# {SessionId# [2:108:2] SubscriptionCookie# 0} NextSubscribeCookie# 4 2025-09-25T16:16:21.730979Z 2 00h00m00.000000s :BS_NODE DEBUG: {NWDC02@distconf_binding.cpp:536} TEvNodeConfigPush NodeId# 3 Cookie# 16764266259571542326 SessionId# [2:108:2] Binding# {1.1/8077690989591376365@[2:131:1]} Record# {BoundNodes { NodeId { Host: "127.0.0.9" Port: 19001 NodeId: 9 } Meta { Fingerprint: "\3403\207\365\032>> TExternalDataSourceTest::ReplaceExternalDataSourceIfNotExists >> TMonitoringTests::InvalidActorId ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/ut_blobstorage/ut_statestorage/unittest >> TStateStorageRingGroupState::TestProxyConfigMismatchNotSent [GOOD] Test command err: RandomSeed# 10613148012801018984 2025-09-25T16:16:22.495937Z 1 00h00m00.000000s :BS_NODE DEBUG: {NWDC15@distconf.cpp:401} StateFunc Type# 268639239 Sender# [3:234:20] SessionId# [1:133:2] Cookie# 3720891694602033690 2025-09-25T16:16:22.495969Z 1 00h00m00.000000s :BS_NODE DEBUG: {NWDC54@distconf_binding.cpp:336} SubscribeToPeerNode NodeId# 3 SessionId# [1:133:2] Inserted# false Subscription# {SessionId# [1:133:2] SubscriptionCookie# 0} NextSubscribeCookie# 7 2025-09-25T16:16:22.497379Z 1 00h00m00.000000s :BS_NODE DEBUG: {NWDC17@distconf_binding.cpp:400} TEvNodeConfigReversePush NodeId# 3 Cookie# 3720891694602033690 SessionId# [1:133:2] Binding# {3.3/3720891694602033690@[1:133:2]} Record# {RootNodeId: 1 } 2025-09-25T16:16:22.497417Z 1 00h00m00.000000s :BS_NODE DEBUG: {NWDC03@distconf_binding.cpp:366} AbortBinding Binding# {3.3/3720891694602033690@[1:133:2]} Reason# binding cycle 2025-09-25T16:16:22.497427Z 1 00h00m00.000000s :BS_NODE DEBUG: {NWDC24@distconf_scatter_gather.cpp:87} AbortAllScatterTasks Binding# {3.3/3720891694602033690@[1:133:2]} 2025-09-25T16:16:22.497445Z 1 00h00m00.000000s :BS_NODE DEBUG: {NWDC55@distconf_binding.cpp:314} UnsubscribeInterconnect NodeId# 3 Subscription# {SessionId# [1:133:2] SubscriptionCookie# 0} 2025-09-25T16:16:22.497478Z 8 00h00m00.000000s :BS_NODE DEBUG: {NWDC15@distconf.cpp:401} StateFunc Type# 268639239 Sender# [3:234:20] SessionId# [8:101:3] Cookie# 7416065818303555210 2025-09-25T16:16:22.497486Z 8 00h00m00.000000s :BS_NODE DEBUG: {NWDC54@distconf_binding.cpp:336} SubscribeToPeerNode NodeId# 3 SessionId# [8:101:3] Inserted# false Subscription# {SessionId# [8:101:3] SubscriptionCookie# 0} NextSubscribeCookie# 2 2025-09-25T16:16:22.497510Z 8 00h00m00.000000s :BS_NODE DEBUG: {NWDC17@distconf_binding.cpp:400} TEvNodeConfigReversePush NodeId# 3 Cookie# 7416065818303555210 SessionId# [8:101:3] Binding# {3.3/7416065818303555210@[8:101:3]} Record# {RootNodeId: 1 } 2025-09-25T16:16:22.497522Z 9 00h00m00.000000s :BS_NODE DEBUG: {NWDC15@distconf.cpp:401} StateFunc Type# 268639239 Sender# [3:234:20] SessionId# [9:104:3] Cookie# 7901742337921929980 2025-09-25T16:16:22.497527Z 9 00h00m00.000000s :BS_NODE DEBUG: {NWDC54@distconf_binding.cpp:336} SubscribeToPeerNode NodeId# 3 SessionId# [9:104:3] Inserted# false Subscription# {SessionId# [9:104:3] SubscriptionCookie# 0} NextSubscribeCookie# 4 2025-09-25T16:16:22.497534Z 9 00h00m00.000000s :BS_NODE DEBUG: {NWDC17@distconf_binding.cpp:400} TEvNodeConfigReversePush NodeId# 3 Cookie# 7901742337921929980 SessionId# [9:104:3] Binding# {3.3/7901742337921929980@[9:104:3]} Record# {RootNodeId: 1 } 2025-09-25T16:16:22.497546Z 4 00h00m00.000000s :BS_NODE DEBUG: {NWDC15@distconf.cpp:401} StateFunc Type# 268639239 Sender# [3:234:20] SessionId# [4:89:3] Cookie# 15126384716452238718 2025-09-25T16:16:22.497552Z 4 00h00m00.000000s :BS_NODE DEBUG: {NWDC54@distconf_binding.cpp:336} SubscribeToPeerNode NodeId# 3 SessionId# [4:89:3] Inserted# false Subscription# {SessionId# [4:89:3] SubscriptionCookie# 0} NextSubscribeCookie# 2 2025-09-25T16:16:22.497562Z 4 00h00m00.000000s :BS_NODE DEBUG: {NWDC17@distconf_binding.cpp:400} TEvNodeConfigReversePush NodeId# 3 Cookie# 15126384716452238718 SessionId# [4:89:3] Binding# {3.3/15126384716452238718@[4:89:3]} Record# {RootNodeId: 1 } 2025-09-25T16:16:22.497573Z 3 00h00m00.000000s :BS_NODE DEBUG: {NWDC15@distconf.cpp:401} StateFunc Type# 268639238 Sender# [1:220:36] SessionId# [3:134:1] Cookie# 3720891694602033690 2025-09-25T16:16:22.497578Z 3 00h00m00.000000s :BS_NODE DEBUG: {NWDC54@distconf_binding.cpp:336} SubscribeToPeerNode NodeId# 1 SessionId# [3:134:1] Inserted# false Subscription# {SessionId# [3:134:1] SubscriptionCookie# 0} NextSubscribeCookie# 9 2025-09-25T16:16:22.497620Z 3 00h00m00.000000s :BS_NODE DEBUG: {NWDC02@distconf_binding.cpp:536} TEvNodeConfigPush NodeId# 1 Cookie# 3720891694602033690 SessionId# [3:134:1] Binding# Record# {BoundNodes { NodeId { Host: "127.0.0.9" Port: 19001 NodeId: 9 } Meta { Fingerprint: "\3403\207\365\032> Record# {RootNodeId: 3 } 2025-09-25T16:16:22.497845Z 8 00h00m00.000000s :BS_NODE DEBUG: {NWDC15@distconf.cpp:401} StateFunc Type# 268639239 Sender# [3:234:20] SessionId# [8:101:3] Cookie# 7416065818303555210 2025-09-25T16:16:22.497867Z 8 00h00m00.000000s :BS_NODE DEBUG: {NWDC54@distconf_binding.cpp:336} SubscribeToPeerNode NodeId# 3 SessionId# [8:101:3] Inserted# false Subscription# {SessionId# [8:101:3] SubscriptionCookie# 0} NextSubscribeCookie# 2 2025-09-25T16:16:22.497876Z 8 00h00m00.000000s :BS_NODE DEBUG: {NWDC17@distconf_binding.cpp:400} TEvNodeConfigReversePush NodeId# 3 Cookie# 7416065818303555210 SessionId# [8:101:3] Binding# {3.1/7416065818303555210@[8:101:3]} Record# {RootNodeId: 3 } 2025-09-25T16:16:22.497888Z 9 00h00m00.000000s :BS_NODE DEBUG: {NWDC15@distconf.cpp:401} StateFunc Type# 268639239 Sender# [3:234:20] SessionId# [9:104:3] Cookie# 7901742337921929980 2025-09-25T16:16:22.497894Z 9 00h00m00.000000s :BS_NODE DEBUG: {NWDC54@distconf_binding.cpp:336} SubscribeToPeerNode NodeId# 3 SessionId# [9:104:3] Inserted# false Subscription# {SessionId# [9:104:3] SubscriptionCookie# 0} NextSubscribeCookie# 4 2025-09-25T16:16:22.497902Z 9 00h00m00.000000s :BS_NODE DEBUG: {NWDC17@distconf_binding.cpp:400} TEvNodeConfigReversePush NodeId# 3 Cookie# 7901742337921929980 SessionId# [9:104:3] Binding# {3.1/7901742337921929980@[9:104:3]} Record# {RootNodeId: 3 } 2025-09-25T16:16:22.497912Z 4 00h00m00.000000s :BS_NODE DEBUG: {NWDC15@distconf.cpp:401} StateFunc Type# 268639239 Sender# [3:234:20] SessionId# [4:89:3] Cookie# 15126384716452238718 2025-09-25T16:16:22.497918Z 4 00h00m00.000000s :BS_NODE DEBUG: {NWDC54@distconf_binding.cpp:336} SubscribeToPeerNode NodeId# 3 SessionId# [4:89:3] Inserted# false Subscription# {SessionId# [4:89:3] SubscriptionCookie# 0} NextSubscribeCookie# 2 2025-09-25T16:16:22.497925Z 4 00h00m00.000000s :BS_NODE DEBUG: {NWDC17@distconf_binding.cpp:400} TEvNodeConfigReversePush NodeId# 3 Cookie# 15126384716452238718 SessionId# [4:89:3] Binding# {3.1/15126384716452238718@[4:89:3]} Record# {RootNodeId: 3 } 2025-09-25T16:16:22.498015Z 3 00h00m00.000000s :BS_NODE DEBUG: {NWDC15@distconf.cpp:401} StateFunc Type# 268639240 Sender# [1:220:36] SessionId# [3:134:1] Cookie# 3720891694602033690 2025-09-25T16:16:22.498021Z 3 00h00m00.000000s :BS_NODE DEBUG: {NWDC54@distconf_binding.cpp:336} SubscribeToPeerNode NodeId# 1 SessionId# [3:134:1] Inserted# false Subscription# {SessionId# [3:134:1] SubscriptionCookie# 0} NextSubscribeCookie# 9 2025-09-25T16:16:22.498026Z 3 00h00m00.000000s :BS_NODE DEBUG: {NWDC16@distconf_binding.cpp:688} TEvNodeConfigUnbind NodeId# 1 Cookie# 3720891694602033690 SessionId# [3:134:1] Binding# 2025-09-25T16:16:22.498030Z 3 00h00m00.000000s :BS_NODE DEBUG: {NWDC06@distconf_binding.cpp:697} UnbindNode NodeId# 1 Reason# explicit unbind request 2025-09-25T16:16:22.498035Z 3 00h00m00.000000s :BS_NODE DEBUG: {NWDC34@distconf_binding.cpp:498} DeleteBound RefererNodeId# 1 NodeId# 127.0.0.6:19001/6 2025-09-25T16:16:22.498041Z 3 00h00m00.000000s :BS_NODE DEBUG: {NWDC34@distconf_binding.cpp:498} DeleteBound RefererNodeId# 1 NodeId# 127.0.0.7:19001/7 2025-09-25T16:16:22.498046Z 3 00h00m00.000000s :BS_NODE DEBUG: {NWDC34@distconf_binding.cpp:498} DeleteBound RefererNodeId# 1 NodeId# 127.0.0.9:19001/9 2025-09-25T16:16:22.498068Z 3 00h00m00.000000s :BS_NODE DEBUG: {NWDC34@distconf_binding.cpp:498} DeleteBound RefererNodeId# 1 NodeId# 127.0.0.4:19001/4 2025-09-25T16:16:22.498073Z 3 00h00m00.000000s :BS_NODE DEBUG: {NWDC34@distconf_binding.cpp:498} Delete ... 6:16:22.510492Z 1 00h00m00.193423s :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ringGroup:0 ev: {EvReplicaInfo Status: 1 TabletID: 72057594037936131 ClusterStateGeneration: 0 ClusterStateGuid: 0} 2025-09-25T16:16:22.510497Z 1 00h00m00.193423s :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ringGroup:0 ev: {EvReplicaInfo Status: 1 TabletID: 72057594037936131 ClusterStateGeneration: 0 ClusterStateGuid: 0} 2025-09-25T16:16:22.510526Z 1 00h00m00.418455s :STATESTORAGE DEBUG: ProxyRequest::HandleInit ringGroup:0 ev: {EvLookup TabletID: 72057594037936131 Cookie: 0 ProxyOptions: SigNone} 2025-09-25T16:16:22.510534Z 1 00h00m00.418455s :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037936131 Cookie: 0} 2025-09-25T16:16:22.510539Z 1 00h00m00.418455s :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037936131 Cookie: 1} 2025-09-25T16:16:22.510545Z 1 00h00m00.418455s :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037936131 Cookie: 2} 2025-09-25T16:16:22.510551Z 1 00h00m00.418455s :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037936131 Cookie: 3} 2025-09-25T16:16:22.510578Z 1 00h00m00.418455s :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037936131 Cookie: 4} 2025-09-25T16:16:22.510584Z 1 00h00m00.418455s :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ringGroup:0 ev: {EvReplicaInfo Status: 1 TabletID: 72057594037936131 ClusterStateGeneration: 0 ClusterStateGuid: 0} 2025-09-25T16:16:22.510590Z 1 00h00m00.418455s :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ringGroup:0 ev: {EvReplicaInfo Status: 1 TabletID: 72057594037936131 ClusterStateGeneration: 0 ClusterStateGuid: 0} 2025-09-25T16:16:22.510595Z 1 00h00m00.418455s :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ringGroup:0 ev: {EvReplicaInfo Status: 1 TabletID: 72057594037936131 ClusterStateGeneration: 0 ClusterStateGuid: 0} 2025-09-25T16:16:22.510601Z 1 00h00m00.418455s :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ringGroup:0 ev: {EvReplicaInfo Status: 1 TabletID: 72057594037936131 ClusterStateGeneration: 0 ClusterStateGuid: 0} 2025-09-25T16:16:22.510606Z 1 00h00m00.418455s :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ringGroup:0 ev: {EvReplicaInfo Status: 1 TabletID: 72057594037936131 ClusterStateGeneration: 0 ClusterStateGuid: 0} 2025-09-25T16:16:22.510640Z 1 00h00m00.886521s :STATESTORAGE DEBUG: ProxyRequest::HandleInit ringGroup:0 ev: {EvLookup TabletID: 72057594037936131 Cookie: 0 ProxyOptions: SigNone} 2025-09-25T16:16:22.510647Z 1 00h00m00.886521s :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037936131 Cookie: 0} 2025-09-25T16:16:22.510651Z 1 00h00m00.886521s :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037936131 Cookie: 1} 2025-09-25T16:16:22.510656Z 1 00h00m00.886521s :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037936131 Cookie: 2} 2025-09-25T16:16:22.510661Z 1 00h00m00.886521s :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037936131 Cookie: 3} 2025-09-25T16:16:22.510665Z 1 00h00m00.886521s :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037936131 Cookie: 4} 2025-09-25T16:16:22.510671Z 1 00h00m00.886521s :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ringGroup:0 ev: {EvReplicaInfo Status: 1 TabletID: 72057594037936131 ClusterStateGeneration: 0 ClusterStateGuid: 0} 2025-09-25T16:16:22.510676Z 1 00h00m00.886521s :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ringGroup:0 ev: {EvReplicaInfo Status: 1 TabletID: 72057594037936131 ClusterStateGeneration: 0 ClusterStateGuid: 0} 2025-09-25T16:16:22.510681Z 1 00h00m00.886521s :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ringGroup:0 ev: {EvReplicaInfo Status: 1 TabletID: 72057594037936131 ClusterStateGeneration: 0 ClusterStateGuid: 0} 2025-09-25T16:16:22.510687Z 1 00h00m00.886521s :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ringGroup:0 ev: {EvReplicaInfo Status: 1 TabletID: 72057594037936131 ClusterStateGeneration: 0 ClusterStateGuid: 0} 2025-09-25T16:16:22.510693Z 1 00h00m00.886521s :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ringGroup:0 ev: {EvReplicaInfo Status: 1 TabletID: 72057594037936131 ClusterStateGeneration: 0 ClusterStateGuid: 0} 2025-09-25T16:16:22.511042Z 1 00h00m01.869459s :STATESTORAGE DEBUG: ProxyRequest::HandleInit ringGroup:0 ev: {EvLookup TabletID: 72057594037936131 Cookie: 0 ProxyOptions: SigNone} 2025-09-25T16:16:22.511064Z 1 00h00m01.869459s :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037936131 Cookie: 0} 2025-09-25T16:16:22.511071Z 1 00h00m01.869459s :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037936131 Cookie: 1} 2025-09-25T16:16:22.511077Z 1 00h00m01.869459s :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037936131 Cookie: 2} 2025-09-25T16:16:22.511083Z 1 00h00m01.869459s :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037936131 Cookie: 3} 2025-09-25T16:16:22.511088Z 1 00h00m01.869459s :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037936131 Cookie: 4} 2025-09-25T16:16:22.511115Z 1 00h00m01.869459s :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ringGroup:0 ev: {EvReplicaInfo Status: 1 TabletID: 72057594037936131 ClusterStateGeneration: 0 ClusterStateGuid: 0} 2025-09-25T16:16:22.511122Z 1 00h00m01.869459s :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ringGroup:0 ev: {EvReplicaInfo Status: 1 TabletID: 72057594037936131 ClusterStateGeneration: 0 ClusterStateGuid: 0} 2025-09-25T16:16:22.511127Z 1 00h00m01.869459s :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ringGroup:0 ev: {EvReplicaInfo Status: 1 TabletID: 72057594037936131 ClusterStateGeneration: 0 ClusterStateGuid: 0} 2025-09-25T16:16:22.511132Z 1 00h00m01.869459s :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ringGroup:0 ev: {EvReplicaInfo Status: 1 TabletID: 72057594037936131 ClusterStateGeneration: 0 ClusterStateGuid: 0} 2025-09-25T16:16:22.511137Z 1 00h00m01.869459s :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ringGroup:0 ev: {EvReplicaInfo Status: 1 TabletID: 72057594037936131 ClusterStateGeneration: 0 ClusterStateGuid: 0} 2025-09-25T16:16:22.511626Z 1 00h00m03.913970s :STATESTORAGE DEBUG: ProxyRequest::HandleInit ringGroup:0 ev: {EvLookup TabletID: 72057594037936131 Cookie: 0 ProxyOptions: SigNone} 2025-09-25T16:16:22.511640Z 1 00h00m03.913970s :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037936131 Cookie: 0} 2025-09-25T16:16:22.511646Z 1 00h00m03.913970s :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037936131 Cookie: 1} 2025-09-25T16:16:22.511651Z 1 00h00m03.913970s :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037936131 Cookie: 2} 2025-09-25T16:16:22.511655Z 1 00h00m03.913970s :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037936131 Cookie: 3} 2025-09-25T16:16:22.511660Z 1 00h00m03.913970s :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037936131 Cookie: 4} 2025-09-25T16:16:22.511666Z 1 00h00m03.913970s :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ringGroup:0 ev: {EvReplicaInfo Status: 1 TabletID: 72057594037936131 ClusterStateGeneration: 0 ClusterStateGuid: 0} 2025-09-25T16:16:22.511672Z 1 00h00m03.913970s :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ringGroup:0 ev: {EvReplicaInfo Status: 1 TabletID: 72057594037936131 ClusterStateGeneration: 0 ClusterStateGuid: 0} 2025-09-25T16:16:22.511677Z 1 00h00m03.913970s :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ringGroup:0 ev: {EvReplicaInfo Status: 1 TabletID: 72057594037936131 ClusterStateGeneration: 0 ClusterStateGuid: 0} 2025-09-25T16:16:22.511681Z 1 00h00m03.913970s :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ringGroup:0 ev: {EvReplicaInfo Status: 1 TabletID: 72057594037936131 ClusterStateGeneration: 0 ClusterStateGuid: 0} 2025-09-25T16:16:22.511686Z 1 00h00m03.913970s :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ringGroup:0 ev: {EvReplicaInfo Status: 1 TabletID: 72057594037936131 ClusterStateGeneration: 0 ClusterStateGuid: 0} 2025-09-25T16:16:22.512695Z 1 00h00m08.330113s :STATESTORAGE DEBUG: ProxyRequest::HandleInit ringGroup:0 ev: {EvLookup TabletID: 72057594037936131 Cookie: 0 ProxyOptions: SigNone} 2025-09-25T16:16:22.512710Z 1 00h00m08.330113s :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037936131 Cookie: 0} 2025-09-25T16:16:22.512715Z 1 00h00m08.330113s :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037936131 Cookie: 1} 2025-09-25T16:16:22.512720Z 1 00h00m08.330113s :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037936131 Cookie: 2} 2025-09-25T16:16:22.512725Z 1 00h00m08.330113s :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037936131 Cookie: 3} 2025-09-25T16:16:22.512730Z 1 00h00m08.330113s :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037936131 Cookie: 4} 2025-09-25T16:16:22.512735Z 1 00h00m08.330113s :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ringGroup:0 ev: {EvReplicaInfo Status: 1 TabletID: 72057594037936131 ClusterStateGeneration: 0 ClusterStateGuid: 0} 2025-09-25T16:16:22.512761Z 1 00h00m08.330113s :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ringGroup:0 ev: {EvReplicaInfo Status: 1 TabletID: 72057594037936131 ClusterStateGeneration: 0 ClusterStateGuid: 0} 2025-09-25T16:16:22.512766Z 1 00h00m08.330113s :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ringGroup:0 ev: {EvReplicaInfo Status: 1 TabletID: 72057594037936131 ClusterStateGeneration: 0 ClusterStateGuid: 0} 2025-09-25T16:16:22.512771Z 1 00h00m08.330113s :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ringGroup:0 ev: {EvReplicaInfo Status: 1 TabletID: 72057594037936131 ClusterStateGeneration: 0 ClusterStateGuid: 0} 2025-09-25T16:16:22.512777Z 1 00h00m08.330113s :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ringGroup:0 ev: {EvReplicaInfo Status: 1 TabletID: 72057594037936131 ClusterStateGeneration: 0 ClusterStateGuid: 0} 2025-09-25T16:16:22.515702Z 1 00h00m10.002048s :STATESTORAGE DEBUG: ProxyRequest::HandleInit ringGroup:0 ev: {EvLookup TabletID: 72057594037932033 Cookie: 0 ProxyOptions: SigNone} 2025-09-25T16:16:22.515724Z 1 00h00m10.002048s :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037932033 Cookie: 0} 2025-09-25T16:16:22.515735Z 1 00h00m10.002048s :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037932033 Cookie: 1} 2025-09-25T16:16:22.515740Z 1 00h00m10.002048s :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037932033 Cookie: 2} 2025-09-25T16:16:22.515747Z 1 00h00m10.002048s :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037932033 Cookie: 3} 2025-09-25T16:16:22.515752Z 1 00h00m10.002048s :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037932033 Cookie: 4} 2025-09-25T16:16:22.515763Z 1 00h00m10.002048s :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ringGroup:0 ev: {EvReplicaInfo Status: 0 TabletID: 72057594037932033 ClusterStateGeneration: 0 ClusterStateGuid: 0 CurrentLeader: [1:306:40] CurrentLeaderTablet: [1:310:42] CurrentGeneration: 2 CurrentStep: 0} 2025-09-25T16:16:22.515774Z 1 00h00m10.002048s :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ringGroup:0 ev: {EvReplicaInfo Status: 0 TabletID: 72057594037932033 ClusterStateGeneration: 0 ClusterStateGuid: 0 CurrentLeader: [1:306:40] CurrentLeaderTablet: [1:310:42] CurrentGeneration: 2 CurrentStep: 0} 2025-09-25T16:16:22.515781Z 1 00h00m10.002048s :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ringGroup:0 ev: {EvReplicaInfo Status: 0 TabletID: 72057594037932033 ClusterStateGeneration: 0 ClusterStateGuid: 0 CurrentLeader: [1:306:40] CurrentLeaderTablet: [1:310:42] CurrentGeneration: 2 CurrentStep: 0} >> test.py::test[join-three_equalities_paren--Results] [GOOD] >> test.py::test[key_filter-is_null_or_data--ForceBlocks] >> test.py::test[join-full_trivial-off-ForceBlocks] [GOOD] >> test.py::test[join-full_trivial-off-Results] [SKIPPED] >> test.py::test[insert-replace_inferred_op--Results] [GOOD] >> test.py::test[join-count_bans--ForceBlocks] >> test.py::test[aggregate-group_by_hop_static_list_key-default.txt-Results] >> TMonitoringTests::InvalidActorId [GOOD] >> test.py::test[join-group_compact_by--ForceBlocks] >> TExternalDataSourceTest::ReplaceExternalDataSourceIfNotExists [GOOD] >> test.py::test[aggregate-group_by_hop_static_list_key-default.txt-Results] [SKIPPED] >> test.py::test[aggregate-group_compact_sorted_distinct--ForceBlocks] >> test.py::test[view-init_view_after_eval-default.txt-ForceBlocks] [GOOD] >> test.py::test[view-init_view_after_eval-default.txt-Results] |80.6%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/tx/schemeshard/ut_olap_reboots/ydb-core-tx-schemeshard-ut_olap_reboots >> test.py::test[column_order-insert_tmp-default.txt-Results] [GOOD] |80.6%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_olap_reboots/ydb-core-tx-schemeshard-ut_olap_reboots >> test.py::test[column_order-select_win_func-default.txt-ForceBlocks] >> TPDiskRaces::DecommitWithInflight [GOOD] >> TPDiskRaces::DecommitWithInflightMock >> TExternalDataSourceTest::DropTableTwice |80.6%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_olap_reboots/ydb-core-tx-schemeshard-ut_olap_reboots |80.6%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/statistics/service/ut/ydb-core-statistics-service-ut >> test.py::test[produce-reduce_multi_in--Results] [GOOD] |80.6%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/statistics/service/ut/ydb-core-statistics-service-ut >> test.py::test[blocks-pg_call--ForceBlocks] [GOOD] >> test.py::test[blocks-pg_call--Results] >> test.py::test[pg-tpcds-q01-default.txt-Results] [GOOD] >> test.py::test[pg-tpcds-q27-default.txt-ForceBlocks] >> TExternalDataSourceTest::DropTableTwice [GOOD] >> TExternalDataSourceTest::ParallelCreateExternalDataSource |80.6%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/scheme_board/ut_monitoring/unittest >> TMonitoringTests::InvalidActorId [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_external_data_source/unittest >> TExternalDataSourceTest::ReplaceExternalDataSourceIfNotExists [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:120:2058] recipient: [1:114:2145] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:120:2058] recipient: [1:114:2145] Leader for TabletID 72057594046678944 is [1:131:2156] sender: [1:132:2058] recipient: [1:114:2145] 2025-09-25T16:16:23.371049Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7911: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-09-25T16:16:23.371082Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7939: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-09-25T16:16:23.371088Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7825: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-09-25T16:16:23.371093Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7841: OperationsProcessing config: using default configuration 2025-09-25T16:16:23.371100Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7847: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-09-25T16:16:23.371104Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7847: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-09-25T16:16:23.371114Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7971: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-09-25T16:16:23.371128Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-09-25T16:16:23.371251Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8042: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-09-25T16:16:23.371326Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2025-09-25T16:16:23.400346Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:8074: Got new config: QueryServiceConfig { AllExternalDataSourcesAreAvailable: true } 2025-09-25T16:16:23.400390Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded 2025-09-25T16:16:23.400502Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8042: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-09-25T16:16:23.425631Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2025-09-25T16:16:23.428225Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2025-09-25T16:16:23.428288Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-09-25T16:16:23.442219Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2025-09-25T16:16:23.442323Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2025-09-25T16:16:23.442460Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1382: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-09-25T16:16:23.442554Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-09-25T16:16:23.443099Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:158: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-09-25T16:16:23.443157Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2025-09-25T16:16:23.443485Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-09-25T16:16:23.443497Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-09-25T16:16:23.443506Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-09-25T16:16:23.443515Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-09-25T16:16:23.443521Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:205: TTxServerlessStorageBilling.Complete 2025-09-25T16:16:23.443541Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7086: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-09-25T16:16:23.445191Z node 1 :HIVE INFO: tablet_helpers.cpp:1126: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:131:2156] sender: [1:244:2058] recipient: [1:15:2062] 2025-09-25T16:16:23.469569Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-09-25T16:16:23.469685Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-09-25T16:16:23.469763Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:599: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-09-25T16:16:23.469773Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5528: CreateTx for txid 1:0 type: TxAlterSubDomain target path: [OwnerId: 72057594046678944, LocalPathId: 1] source path: 2025-09-25T16:16:23.469820Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-09-25T16:16:23.469837Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2025-09-25T16:16:23.471005Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-09-25T16:16:23.471071Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-09-25T16:16:23.471136Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-09-25T16:16:23.471148Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:315: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-09-25T16:16:23.471155Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:369: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-09-25T16:16:23.471162Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 1:0 2 -> 3 2025-09-25T16:16:23.471690Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-09-25T16:16:23.471703Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-09-25T16:16:23.471709Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 1:0 3 -> 128 2025-09-25T16:16:23.472083Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-09-25T16:16:23.472097Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-09-25T16:16:23.472104Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-09-25T16:16:23.472113Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1723: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-09-25T16:16:23.472851Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1792: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-09-25T16:16:23.473486Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:663: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-09-25T16:16:23.473549Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1824: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-09-25T16:16:23.473792Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:685: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-09-25T16:16:23.473825Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:689: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 139 RawX2: 4294969457 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-09-25T16:16:23.473848Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:303: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-09-25T16:16:23.473923Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 1:0 128 -> 240 2025-09-25T16:16:23.473932Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:362: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-09-25T16:16:23.473968Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:599: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-09-25T16:16:23.473981Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:412: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-09-25T16:16:23.474442Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_sc ... MESHARD DEBUG: schemeshard__operation.cpp:689: TTxOperationPlanStep Execute, message: Transactions { TxId: 102 Coordinator: 72057594046316545 AckTo { RawX1: 139 RawX2: 4294969457 } } Step: 5000003 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-09-25T16:16:23.487919Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_alter_external_data_source.cpp:35: [72057594046678944] TAlterExternalDataSource TPropose, operationId: 102:0HandleReply TEvOperationPlan: step# 5000003 2025-09-25T16:16:23.487945Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 102:0 128 -> 240 2025-09-25T16:16:23.487978Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:599: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-09-25T16:16:23.487988Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:599: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 2 2025-09-25T16:16:23.488341Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 2025-09-25T16:16:23.488430Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 FAKE_COORDINATOR: Erasing txId 102 2025-09-25T16:16:23.488743Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-09-25T16:16:23.488752Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 102, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-09-25T16:16:23.488792Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 102, path id: [OwnerId: 72057594046678944, LocalPathId: 2] 2025-09-25T16:16:23.488806Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 102, path id: [OwnerId: 72057594046678944, LocalPathId: 2] 2025-09-25T16:16:23.488839Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-09-25T16:16:23.488846Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [1:211:2212], at schemeshard: 72057594046678944, txId: 102, path id: 1 2025-09-25T16:16:23.488851Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [1:211:2212], at schemeshard: 72057594046678944, txId: 102, path id: 2 2025-09-25T16:16:23.488855Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [1:211:2212], at schemeshard: 72057594046678944, txId: 102, path id: 2 2025-09-25T16:16:23.488946Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 102:0, at schemeshard: 72057594046678944 2025-09-25T16:16:23.488955Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:497: [72057594046678944] TDone opId# 102:0 ProgressState 2025-09-25T16:16:23.488970Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:926: Part operation is done id#102:0 progress is 1/1 2025-09-25T16:16:23.488975Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2025-09-25T16:16:23.488981Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:926: Part operation is done id#102:0 progress is 1/1 2025-09-25T16:16:23.488985Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2025-09-25T16:16:23.488989Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 102, ready parts: 1/1, is published: false 2025-09-25T16:16:23.488995Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2025-09-25T16:16:23.489001Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:993: Operation and all the parts is done, operation id: 102:0 2025-09-25T16:16:23.489005Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5552: RemoveTx for txid 102:0 2025-09-25T16:16:23.489018Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:610: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 3 2025-09-25T16:16:23.489026Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:1002: Publication still in progress, tx: 102, publications: 2, subscribers: 0 2025-09-25T16:16:23.489030Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1009: Publication details: tx: 102, [OwnerId: 72057594046678944, LocalPathId: 1], 7 2025-09-25T16:16:23.489037Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1009: Publication details: tx: 102, [OwnerId: 72057594046678944, LocalPathId: 2], 3 2025-09-25T16:16:23.489182Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6249: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 7 PathOwnerId: 72057594046678944, cookie: 102 2025-09-25T16:16:23.489194Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 7 PathOwnerId: 72057594046678944, cookie: 102 2025-09-25T16:16:23.489199Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 102 2025-09-25T16:16:23.489204Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 102, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 7 2025-09-25T16:16:23.489209Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:610: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-09-25T16:16:23.489315Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6249: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 3 PathOwnerId: 72057594046678944, cookie: 102 2025-09-25T16:16:23.489324Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 3 PathOwnerId: 72057594046678944, cookie: 102 2025-09-25T16:16:23.489329Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 102 2025-09-25T16:16:23.489333Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 102, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 3 2025-09-25T16:16:23.489337Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:610: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 2 2025-09-25T16:16:23.489345Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__publish_to_scheme_board.cpp:155: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 102, subscribers: 0 2025-09-25T16:16:23.492468Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 2025-09-25T16:16:23.492571Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 TestModificationResult got TxId: 102, wait until txId: 102 TestWaitNotification wait txId: 102 2025-09-25T16:16:23.492631Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:213: tests -- TTxNotificationSubscriber for txId 102: send EvNotifyTxCompletion 2025-09-25T16:16:23.492640Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:259: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 102 2025-09-25T16:16:23.492730Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 102, at schemeshard: 72057594046678944 2025-09-25T16:16:23.492758Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 102: got EvNotifyTxCompletionResult 2025-09-25T16:16:23.492764Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 102: satisfy waiter [1:337:2327] TestWaitNotification: OK eventTxId 102 2025-09-25T16:16:23.492873Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/MyExternalDataSource" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-09-25T16:16:23.492925Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/MyExternalDataSource" took 67us result status StatusSuccess 2025-09-25T16:16:23.493029Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/MyExternalDataSource" PathDescription { Self { Name: "MyExternalDataSource" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeExternalDataSource CreateFinished: true CreateTxId: 101 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 ExternalDataSourceVersion: 2 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 1 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } ExternalDataSourceDescription { Name: "MyExternalDataSource" PathId { OwnerId: 72057594046678944 LocalId: 2 } Version: 2 SourceType: "ObjectStorage" Location: "https://s3.cloud.net/my_new_bucket" Installation: "" Auth { None { } } Properties { } References { } } } PathId: 2 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 >> test.py::test[produce-reduce_multi_in_keytuple--Results] >> test.py::test[join-equi_join_three_simple--ForceBlocks] [GOOD] >> test.py::test[join-equi_join_three_simple--Results] |80.6%| [LD] {RESULT} $(B)/ydb/core/statistics/service/ut/ydb-core-statistics-service-ut >> test.py::test[union_all-union_all_subexpr-default.txt-ForceBlocks] [GOOD] >> BlobStorageSync::TestSyncLogCuttingMirror3dc [GOOD] >> BlobStorageSync::TestSyncLogCuttingMirror3of4 >> TExternalDataSourceTest::ParallelCreateExternalDataSource [GOOD] >> TExternalDataSourceTest::ReadOnlyMode >> test.py::test[union_all-union_all_subexpr-default.txt-Results] >> TExternalDataSourceTest::CreateExternalDataSource |80.6%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/tx/balance_coverage/ut/ydb-core-tx-balance_coverage-ut |80.6%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/balance_coverage/ut/ydb-core-tx-balance_coverage-ut |80.6%| [LD] {RESULT} $(B)/ydb/core/tx/balance_coverage/ut/ydb-core-tx-balance_coverage-ut >> test.py::test[blocks-string_as_agg_key--ForceBlocks] [GOOD] >> test.py::test[blocks-string_as_agg_key--Results] >> test.py::test[simple_columns-simple_columns_join_coalesce_all_1-default.txt-ForceBlocks] [GOOD] >> TExternalDataSourceTest::CreateExternalDataSource [GOOD] >> TExternalDataSourceTest::CreateExternalDataSourceShouldFailIfSuchEntityAlreadyExists >> Acceleration::TestThresholdGet4Plus2Block1Slow [GOOD] >> Acceleration::TestThresholdGetMirror3dc2Slow >> test.py::test[simple_columns-simple_columns_join_coalesce_all_1-default.txt-Results] >> test.py::test[join-lookupjoin_inner_2o--Results] [GOOD] >> test.py::test[join-lookupjoin_inner_empty_subq-off-ForceBlocks] >> TExternalDataSourceTest::ReadOnlyMode [GOOD] >> TExternalDataSourceTest::PreventDeletionOfDependentDataSources >> TExternalDataSourceTest::CreateExternalDataSourceShouldFailIfSuchEntityAlreadyExists [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_external_data_source/unittest >> TExternalDataSourceTest::ParallelCreateExternalDataSource [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:121:2058] recipient: [1:115:2145] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:121:2058] recipient: [1:115:2145] Leader for TabletID 72057594046678944 is [1:132:2156] sender: [1:133:2058] recipient: [1:115:2145] 2025-09-25T16:16:24.375055Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7911: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-09-25T16:16:24.375080Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7939: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-09-25T16:16:24.375086Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7825: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-09-25T16:16:24.375091Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7841: OperationsProcessing config: using default configuration 2025-09-25T16:16:24.375098Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7847: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-09-25T16:16:24.375102Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7847: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-09-25T16:16:24.375112Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7971: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-09-25T16:16:24.375124Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-09-25T16:16:24.375232Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8042: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-09-25T16:16:24.375341Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2025-09-25T16:16:24.415776Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:8074: Got new config: QueryServiceConfig { AllExternalDataSourcesAreAvailable: true } 2025-09-25T16:16:24.415815Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded 2025-09-25T16:16:24.415909Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8042: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-09-25T16:16:24.418889Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2025-09-25T16:16:24.419070Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2025-09-25T16:16:24.419097Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-09-25T16:16:24.420334Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2025-09-25T16:16:24.420489Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2025-09-25T16:16:24.420603Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1382: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-09-25T16:16:24.420699Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-09-25T16:16:24.421110Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:158: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-09-25T16:16:24.421159Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2025-09-25T16:16:24.421416Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-09-25T16:16:24.421426Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-09-25T16:16:24.421459Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-09-25T16:16:24.421467Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-09-25T16:16:24.421473Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:205: TTxServerlessStorageBilling.Complete 2025-09-25T16:16:24.421490Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7086: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-09-25T16:16:24.422786Z node 1 :HIVE INFO: tablet_helpers.cpp:1126: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:132:2156] sender: [1:246:2058] recipient: [1:15:2062] 2025-09-25T16:16:24.443953Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-09-25T16:16:24.444049Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-09-25T16:16:24.444109Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:599: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-09-25T16:16:24.444116Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5528: CreateTx for txid 1:0 type: TxAlterSubDomain target path: [OwnerId: 72057594046678944, LocalPathId: 1] source path: 2025-09-25T16:16:24.444158Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-09-25T16:16:24.444171Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2025-09-25T16:16:24.445052Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-09-25T16:16:24.445114Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-09-25T16:16:24.445166Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-09-25T16:16:24.445176Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:315: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-09-25T16:16:24.445181Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:369: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-09-25T16:16:24.445187Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 1:0 2 -> 3 2025-09-25T16:16:24.445706Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-09-25T16:16:24.445718Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-09-25T16:16:24.445724Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 1:0 3 -> 128 2025-09-25T16:16:24.448778Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-09-25T16:16:24.448794Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-09-25T16:16:24.448801Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-09-25T16:16:24.448809Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1723: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-09-25T16:16:24.449526Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1792: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-09-25T16:16:24.449980Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:663: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-09-25T16:16:24.450032Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1824: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-09-25T16:16:24.450251Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:685: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-09-25T16:16:24.450277Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:689: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 139 RawX2: 4294969456 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-09-25T16:16:24.450299Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:303: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-09-25T16:16:24.450368Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 1:0 128 -> 240 2025-09-25T16:16:24.450376Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:362: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-09-25T16:16:24.450421Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:599: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-09-25T16:16:24.450434Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:412: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-09-25T16:16:24.450962Z node 1 :FLAT_TX_SCHEMESHARD ... PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 3 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } ExternalDataSourceDescription { Name: "MyExternalDataSource1" PathId { OwnerId: 72057594046678944 LocalId: 3 } Version: 1 SourceType: "ObjectStorage" Location: "https://s3.cloud.net/my_bucket" Installation: "" Auth { None { } } Properties { } References { } } } PathId: 3 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-09-25T16:16:24.747988Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/DirA/MyExternalDataSource2" Options { ReturnPartitioningInfo: true ReturnPartitionConfig: true BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-09-25T16:16:24.748003Z node 2 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/DirA/MyExternalDataSource2" took 18us result status StatusSuccess 2025-09-25T16:16:24.748055Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/DirA/MyExternalDataSource2" PathDescription { Self { Name: "MyExternalDataSource2" PathId: 4 SchemeshardId: 72057594046678944 PathType: EPathTypeExternalDataSource CreateFinished: true CreateTxId: 126 CreateStep: 5000003 ParentPathId: 2 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 ExternalDataSourceVersion: 1 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 3 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } ExternalDataSourceDescription { Name: "MyExternalDataSource2" PathId { OwnerId: 72057594046678944 LocalId: 4 } Version: 1 SourceType: "ObjectStorage" Location: "https://s3.cloud.net/my_bucket" Installation: "" Auth { None { } } Properties { } References { } } } PathId: 4 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-09-25T16:16:24.748144Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/DirA" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-09-25T16:16:24.748157Z node 2 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/DirA" took 14us result status StatusSuccess 2025-09-25T16:16:24.748259Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/DirA" PathDescription { Self { Name: "DirA" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 124 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 7 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 7 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 6 } ChildrenExist: true } Children { Name: "MyExternalDataSource1" PathId: 3 SchemeshardId: 72057594046678944 PathType: EPathTypeExternalDataSource CreateFinished: true CreateTxId: 125 CreateStep: 5000004 ParentPathId: 2 PathState: EPathStateCreate Owner: "root@builtin" ACL: "" ChildrenExist: false } Children { Name: "MyExternalDataSource2" PathId: 4 SchemeshardId: 72057594046678944 PathType: EPathTypeExternalDataSource CreateFinished: true CreateTxId: 126 CreateStep: 5000003 ParentPathId: 2 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 3 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 2 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-09-25T16:16:24.748327Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/DirA/MyExternalDataSource1" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-09-25T16:16:24.748344Z node 2 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/DirA/MyExternalDataSource1" took 17us result status StatusSuccess 2025-09-25T16:16:24.748397Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/DirA/MyExternalDataSource1" PathDescription { Self { Name: "MyExternalDataSource1" PathId: 3 SchemeshardId: 72057594046678944 PathType: EPathTypeExternalDataSource CreateFinished: true CreateTxId: 125 CreateStep: 5000004 ParentPathId: 2 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 ExternalDataSourceVersion: 1 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 3 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } ExternalDataSourceDescription { Name: "MyExternalDataSource1" PathId { OwnerId: 72057594046678944 LocalId: 3 } Version: 1 SourceType: "ObjectStorage" Location: "https://s3.cloud.net/my_bucket" Installation: "" Auth { None { } } Properties { } References { } } } PathId: 3 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-09-25T16:16:24.748443Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/DirA/MyExternalDataSource2" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-09-25T16:16:24.748456Z node 2 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/DirA/MyExternalDataSource2" took 14us result status StatusSuccess 2025-09-25T16:16:24.748502Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/DirA/MyExternalDataSource2" PathDescription { Self { Name: "MyExternalDataSource2" PathId: 4 SchemeshardId: 72057594046678944 PathType: EPathTypeExternalDataSource CreateFinished: true CreateTxId: 126 CreateStep: 5000003 ParentPathId: 2 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 ExternalDataSourceVersion: 1 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 3 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } ExternalDataSourceDescription { Name: "MyExternalDataSource2" PathId { OwnerId: 72057594046678944 LocalId: 4 } Version: 1 SourceType: "ObjectStorage" Location: "https://s3.cloud.net/my_bucket" Installation: "" Auth { None { } } Properties { } References { } } } PathId: 4 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 >> test.py::test[aggregate-group_by_rollup_udf--Results] [GOOD] >> test.py::test[aggregate-table_row_aggregation-default.txt-ForceBlocks] >> TExternalDataSourceTest::PreventDeletionOfDependentDataSources [GOOD] >> test.py::test[flatten_by-flatten_and_where--ForceBlocks] [GOOD] >> TExternalDataSourceTest::CreateExternalDataSourceWithProperties >> TExternalDataSourceTest::ReplaceExternalDataStoreShouldFailIfEntityOfAnotherTypeWithSameNameExists >> test.py::test[flatten_by-flatten_and_where--Results] >> test.py::test[action-eval_column--Results] [GOOD] >> test.py::test[action-eval_input_output_table_subquery--Results] >> TExternalDataSourceTest::ReplaceExternalDataStoreShouldFailIfEntityOfAnotherTypeWithSameNameExists [GOOD] >> test.py::test[produce-reduce_multi_in_sampling-sorted-Results] [GOOD] >> test.py::test[result_types-pg-default.txt-ForceBlocks] >> TExternalDataSourceTest::CreateExternalDataSourceWithProperties [GOOD] >> TExternalDataSourceTest::DropExternalDataSource >> test.py::test[join-premap_common_inner_both_sides--ForceBlocks] [GOOD] >> test.py::test[join-premap_common_inner_both_sides--Results] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_external_data_source/unittest >> TExternalDataSourceTest::PreventDeletionOfDependentDataSources [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:121:2058] recipient: [1:115:2145] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:121:2058] recipient: [1:115:2145] Leader for TabletID 72057594046678944 is [1:132:2156] sender: [1:133:2058] recipient: [1:115:2145] 2025-09-25T16:16:25.206477Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7911: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-09-25T16:16:25.206500Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7939: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-09-25T16:16:25.206505Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7825: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-09-25T16:16:25.206509Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7841: OperationsProcessing config: using default configuration 2025-09-25T16:16:25.206515Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7847: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-09-25T16:16:25.206519Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7847: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-09-25T16:16:25.206528Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7971: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-09-25T16:16:25.206540Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-09-25T16:16:25.206639Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8042: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-09-25T16:16:25.206704Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2025-09-25T16:16:25.228996Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:8074: Got new config: QueryServiceConfig { AllExternalDataSourcesAreAvailable: true } 2025-09-25T16:16:25.229027Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded 2025-09-25T16:16:25.229127Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8042: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-09-25T16:16:25.236931Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2025-09-25T16:16:25.239483Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2025-09-25T16:16:25.239528Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-09-25T16:16:25.241701Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2025-09-25T16:16:25.241955Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2025-09-25T16:16:25.242087Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1382: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-09-25T16:16:25.242203Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-09-25T16:16:25.242775Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:158: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-09-25T16:16:25.242823Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2025-09-25T16:16:25.243091Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-09-25T16:16:25.243103Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-09-25T16:16:25.243139Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-09-25T16:16:25.243147Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-09-25T16:16:25.243154Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:205: TTxServerlessStorageBilling.Complete 2025-09-25T16:16:25.243171Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7086: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-09-25T16:16:25.244582Z node 1 :HIVE INFO: tablet_helpers.cpp:1126: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:132:2156] sender: [1:246:2058] recipient: [1:15:2062] 2025-09-25T16:16:25.269090Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-09-25T16:16:25.269192Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-09-25T16:16:25.269258Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:599: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-09-25T16:16:25.269267Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5528: CreateTx for txid 1:0 type: TxAlterSubDomain target path: [OwnerId: 72057594046678944, LocalPathId: 1] source path: 2025-09-25T16:16:25.269309Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-09-25T16:16:25.269324Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2025-09-25T16:16:25.270201Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-09-25T16:16:25.270262Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-09-25T16:16:25.270316Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-09-25T16:16:25.270326Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:315: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-09-25T16:16:25.270332Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:369: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-09-25T16:16:25.270338Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 1:0 2 -> 3 2025-09-25T16:16:25.273913Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-09-25T16:16:25.273936Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-09-25T16:16:25.273944Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 1:0 3 -> 128 2025-09-25T16:16:25.274402Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-09-25T16:16:25.274415Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-09-25T16:16:25.274422Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-09-25T16:16:25.274431Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1723: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-09-25T16:16:25.275181Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1792: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-09-25T16:16:25.279952Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:663: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-09-25T16:16:25.280067Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1824: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-09-25T16:16:25.280427Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:685: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-09-25T16:16:25.280493Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:689: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 139 RawX2: 4294969456 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-09-25T16:16:25.280523Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:303: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-09-25T16:16:25.280659Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 1:0 128 -> 240 2025-09-25T16:16:25.280672Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:362: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-09-25T16:16:25.280739Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:599: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-09-25T16:16:25.280757Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:412: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-09-25T16:16:25.282354Z node 1 :FLAT_TX_SCHEMESHARD ... : DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 3 2025-09-25T16:16:25.996605Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6249: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 2 PathOwnerId: 72057594046678944, cookie: 101 2025-09-25T16:16:25.996620Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 2 PathOwnerId: 72057594046678944, cookie: 101 2025-09-25T16:16:25.996625Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 101 2025-09-25T16:16:25.996631Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 101, pathId: [OwnerId: 72057594046678944, LocalPathId: 3], version: 2 2025-09-25T16:16:25.996636Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:610: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 2 2025-09-25T16:16:25.996859Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6249: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 2 PathOwnerId: 72057594046678944, cookie: 101 2025-09-25T16:16:25.996876Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 2 PathOwnerId: 72057594046678944, cookie: 101 2025-09-25T16:16:25.996882Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 101 2025-09-25T16:16:25.996887Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 101, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 2 2025-09-25T16:16:25.996892Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:610: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 2 2025-09-25T16:16:25.996905Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__publish_to_scheme_board.cpp:155: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 101, subscribers: 0 2025-09-25T16:16:25.997198Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 2025-09-25T16:16:25.997584Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 2025-09-25T16:16:25.997607Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 TestModificationResult got TxId: 101, wait until txId: 101 TestWaitNotification wait txId: 101 2025-09-25T16:16:25.997668Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:213: tests -- TTxNotificationSubscriber for txId 101: send EvNotifyTxCompletion 2025-09-25T16:16:25.997678Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:259: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 101 2025-09-25T16:16:25.997769Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 101, at schemeshard: 72057594046678944 2025-09-25T16:16:25.997789Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 101: got EvNotifyTxCompletionResult 2025-09-25T16:16:25.997795Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 101: satisfy waiter [2:340:2330] TestWaitNotification: OK eventTxId 101 2025-09-25T16:16:25.997886Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/ExternalTable" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-09-25T16:16:25.997935Z node 2 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/ExternalTable" took 63us result status StatusSuccess 2025-09-25T16:16:25.998044Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/ExternalTable" PathDescription { Self { Name: "ExternalTable" PathId: 3 SchemeshardId: 72057594046678944 PathType: EPathTypeExternalTable CreateFinished: true CreateTxId: 101 CreateStep: 5000003 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 ExternalTableVersion: 1 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 2 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } ExternalTableDescription { Name: "ExternalTable" PathId { OwnerId: 72057594046678944 LocalId: 3 } Version: 1 SourceType: "ObjectStorage" DataSourcePath: "/MyRoot/ExternalDataSource" Location: "/" Columns { Name: "key" Type: "Uint64" TypeId: 4 Id: 1 NotNull: false } Content: "" } } PathId: 3 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 TestModificationResults wait txId: 103 2025-09-25T16:16:25.998978Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpDropExternalDataSource Drop { Name: "ExternalDataSource" } } TxId: 103 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-09-25T16:16:25.999024Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_drop_external_data_source.cpp:116: [72057594046678944] TDropExternalDataSource Propose: opId# 103:0, path# /MyRoot/ExternalDataSource 2025-09-25T16:16:25.999042Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 103:1, propose status:StatusSchemeError, reason: Other entities depend on this data source, please remove them at the beginning: /MyRoot/ExternalTable, at schemeshard: 72057594046678944 2025-09-25T16:16:25.999580Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 103, response: Status: StatusSchemeError Reason: "Other entities depend on this data source, please remove them at the beginning: /MyRoot/ExternalTable" TxId: 103 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2025-09-25T16:16:25.999633Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 103, database: /MyRoot, subject: , status: StatusSchemeError, reason: Other entities depend on this data source, please remove them at the beginning: /MyRoot/ExternalTable, operation: DROP EXTERNAL DATA SOURCE, path: /MyRoot/ExternalDataSource TestModificationResult got TxId: 103, wait until txId: 103 TestWaitNotification wait txId: 103 2025-09-25T16:16:25.999703Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:213: tests -- TTxNotificationSubscriber for txId 103: send EvNotifyTxCompletion 2025-09-25T16:16:25.999711Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:259: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 103 2025-09-25T16:16:25.999793Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 103, at schemeshard: 72057594046678944 2025-09-25T16:16:25.999811Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 103: got EvNotifyTxCompletionResult 2025-09-25T16:16:25.999816Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 103: satisfy waiter [2:348:2338] TestWaitNotification: OK eventTxId 103 2025-09-25T16:16:25.999889Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/ExternalDataSource" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-09-25T16:16:25.999927Z node 2 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/ExternalDataSource" took 45us result status StatusSuccess 2025-09-25T16:16:26.000024Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/ExternalDataSource" PathDescription { Self { Name: "ExternalDataSource" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeExternalDataSource CreateFinished: true CreateTxId: 100 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 ExternalDataSourceVersion: 1 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 2 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } ExternalDataSourceDescription { Name: "ExternalDataSource" PathId { OwnerId: 72057594046678944 LocalId: 2 } Version: 1 SourceType: "ObjectStorage" Location: "https://s3.cloud.net/my_bucket" Installation: "" Auth { None { } } Properties { } References { References { Path: "/MyRoot/ExternalTable" PathId { OwnerId: 72057594046678944 LocalId: 3 } } } } } PathId: 2 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_external_data_source/unittest >> TExternalDataSourceTest::CreateExternalDataSourceShouldFailIfSuchEntityAlreadyExists [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:121:2058] recipient: [1:115:2145] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:121:2058] recipient: [1:115:2145] Leader for TabletID 72057594046678944 is [1:132:2156] sender: [1:133:2058] recipient: [1:115:2145] 2025-09-25T16:16:25.410021Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7911: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-09-25T16:16:25.410046Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7939: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-09-25T16:16:25.410052Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7825: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-09-25T16:16:25.410058Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7841: OperationsProcessing config: using default configuration 2025-09-25T16:16:25.410065Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7847: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-09-25T16:16:25.410091Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7847: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-09-25T16:16:25.410102Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7971: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-09-25T16:16:25.410115Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-09-25T16:16:25.410215Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8042: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-09-25T16:16:25.410276Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2025-09-25T16:16:25.439586Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:8074: Got new config: QueryServiceConfig { AllExternalDataSourcesAreAvailable: true } 2025-09-25T16:16:25.439631Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded 2025-09-25T16:16:25.439746Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8042: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-09-25T16:16:25.444457Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2025-09-25T16:16:25.444671Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2025-09-25T16:16:25.444713Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-09-25T16:16:25.446221Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2025-09-25T16:16:25.446385Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2025-09-25T16:16:25.446518Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1382: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-09-25T16:16:25.446624Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-09-25T16:16:25.447046Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:158: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-09-25T16:16:25.447097Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2025-09-25T16:16:25.447364Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-09-25T16:16:25.447378Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-09-25T16:16:25.447417Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-09-25T16:16:25.447426Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-09-25T16:16:25.447433Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:205: TTxServerlessStorageBilling.Complete 2025-09-25T16:16:25.447453Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7086: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-09-25T16:16:25.448915Z node 1 :HIVE INFO: tablet_helpers.cpp:1126: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:132:2156] sender: [1:246:2058] recipient: [1:15:2062] 2025-09-25T16:16:25.493691Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-09-25T16:16:25.493785Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-09-25T16:16:25.493858Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:599: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-09-25T16:16:25.493867Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5528: CreateTx for txid 1:0 type: TxAlterSubDomain target path: [OwnerId: 72057594046678944, LocalPathId: 1] source path: 2025-09-25T16:16:25.493929Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-09-25T16:16:25.493945Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2025-09-25T16:16:25.494863Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-09-25T16:16:25.494921Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-09-25T16:16:25.494983Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-09-25T16:16:25.494995Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:315: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-09-25T16:16:25.495001Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:369: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-09-25T16:16:25.495007Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 1:0 2 -> 3 2025-09-25T16:16:25.495510Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-09-25T16:16:25.495524Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-09-25T16:16:25.495530Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 1:0 3 -> 128 2025-09-25T16:16:25.495881Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-09-25T16:16:25.495892Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-09-25T16:16:25.495898Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-09-25T16:16:25.495906Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1723: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-09-25T16:16:25.496617Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1792: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-09-25T16:16:25.497045Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:663: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-09-25T16:16:25.497090Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1824: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-09-25T16:16:25.497320Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:685: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-09-25T16:16:25.497350Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:689: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 139 RawX2: 4294969456 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-09-25T16:16:25.497374Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:303: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-09-25T16:16:25.497449Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 1:0 128 -> 240 2025-09-25T16:16:25.497456Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:362: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-09-25T16:16:25.497502Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:599: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-09-25T16:16:25.497515Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:412: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-09-25T16:16:25.497966Z node 1 :FLAT_TX_SCHEMESHARD ... 678944, txId: 101, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 5 2025-09-25T16:16:25.785153Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:610: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-09-25T16:16:25.785441Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6249: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 2 PathOwnerId: 72057594046678944, cookie: 101 2025-09-25T16:16:25.785457Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 2 PathOwnerId: 72057594046678944, cookie: 101 2025-09-25T16:16:25.785462Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 101 2025-09-25T16:16:25.785467Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 101, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 2 2025-09-25T16:16:25.785472Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:610: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 2 2025-09-25T16:16:25.785485Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__publish_to_scheme_board.cpp:155: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 101, subscribers: 0 2025-09-25T16:16:25.785922Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 2025-09-25T16:16:25.786167Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 TestModificationResult got TxId: 101, wait until txId: 101 TestWaitNotification wait txId: 101 2025-09-25T16:16:25.786223Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:213: tests -- TTxNotificationSubscriber for txId 101: send EvNotifyTxCompletion 2025-09-25T16:16:25.786231Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:259: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 101 2025-09-25T16:16:25.786291Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 101, at schemeshard: 72057594046678944 2025-09-25T16:16:25.786310Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 101: got EvNotifyTxCompletionResult 2025-09-25T16:16:25.786315Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 101: satisfy waiter [2:310:2300] TestWaitNotification: OK eventTxId 101 2025-09-25T16:16:25.786387Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/MyExternalDataSource" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-09-25T16:16:25.786424Z node 2 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/MyExternalDataSource" took 49us result status StatusSuccess 2025-09-25T16:16:25.786536Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/MyExternalDataSource" PathDescription { Self { Name: "MyExternalDataSource" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeExternalDataSource CreateFinished: true CreateTxId: 101 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 ExternalDataSourceVersion: 1 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 1 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } ExternalDataSourceDescription { Name: "MyExternalDataSource" PathId { OwnerId: 72057594046678944 LocalId: 2 } Version: 1 SourceType: "ObjectStorage" Location: "https://s3.cloud.net/my_bucket" Installation: "" Auth { None { } } Properties { } References { } } } PathId: 2 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 TestModificationResults wait txId: 102 2025-09-25T16:16:25.787469Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpCreateExternalDataSource CreateExternalDataSource { Name: "MyExternalDataSource" SourceType: "ObjectStorage" Location: "https://s3.cloud.net/my_new_bucket" Auth { None { } } } } TxId: 102 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-09-25T16:16:25.787518Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_create_external_data_source.cpp:337: [72057594046678944] CreateNewExternalDataSource, opId 102:0, feature flag EnableReplaceIfExistsForExternalEntities 1, tx WorkingDir: "/MyRoot" OperationType: ESchemeOpCreateExternalDataSource FailOnExist: false CreateExternalDataSource { Name: "MyExternalDataSource" SourceType: "ObjectStorage" Location: "https://s3.cloud.net/my_new_bucket" Auth { None { } } } 2025-09-25T16:16:25.787531Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_create_external_data_source.cpp:233: [72057594046678944] TCreateExternalDataSource Propose: opId# 102:0, path# /MyRoot/MyExternalDataSource 2025-09-25T16:16:25.787555Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 102:1, propose status:StatusAlreadyExists, reason: Check failed: path: '/MyRoot/MyExternalDataSource', error: path exist, request accepts it (id: [OwnerId: 72057594046678944, LocalPathId: 2], type: EPathTypeExternalDataSource, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_external_data_source.cpp:102, at schemeshard: 72057594046678944 2025-09-25T16:16:25.788021Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 102, response: Status: StatusAlreadyExists Reason: "Check failed: path: \'/MyRoot/MyExternalDataSource\', error: path exist, request accepts it (id: [OwnerId: 72057594046678944, LocalPathId: 2], type: EPathTypeExternalDataSource, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_external_data_source.cpp:102" TxId: 102 SchemeshardId: 72057594046678944 PathId: 2 PathCreateTxId: 101, at schemeshard: 72057594046678944 2025-09-25T16:16:25.788071Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 102, database: /MyRoot, subject: , status: StatusAlreadyExists, reason: Check failed: path: '/MyRoot/MyExternalDataSource', error: path exist, request accepts it (id: [OwnerId: 72057594046678944, LocalPathId: 2], type: EPathTypeExternalDataSource, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_external_data_source.cpp:102, operation: CREATE EXTERNAL DATA SOURCE, path: /MyRoot/MyExternalDataSource TestModificationResult got TxId: 102, wait until txId: 102 TestWaitNotification wait txId: 102 2025-09-25T16:16:25.788130Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:213: tests -- TTxNotificationSubscriber for txId 102: send EvNotifyTxCompletion 2025-09-25T16:16:25.788136Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:259: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 102 2025-09-25T16:16:25.788192Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 102, at schemeshard: 72057594046678944 2025-09-25T16:16:25.788210Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 102: got EvNotifyTxCompletionResult 2025-09-25T16:16:25.788215Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 102: satisfy waiter [2:318:2308] TestWaitNotification: OK eventTxId 102 2025-09-25T16:16:25.788276Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/MyExternalDataSource" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-09-25T16:16:25.788302Z node 2 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/MyExternalDataSource" took 31us result status StatusSuccess 2025-09-25T16:16:25.788385Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/MyExternalDataSource" PathDescription { Self { Name: "MyExternalDataSource" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeExternalDataSource CreateFinished: true CreateTxId: 101 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 ExternalDataSourceVersion: 1 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 1 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } ExternalDataSourceDescription { Name: "MyExternalDataSource" PathId { OwnerId: 72057594046678944 LocalId: 2 } Version: 1 SourceType: "ObjectStorage" Location: "https://s3.cloud.net/my_bucket" Installation: "" Auth { None { } } Properties { } References { } } } PathId: 2 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 >> TExternalDataSourceTest::DropExternalDataSource [GOOD] >> Acceleration::TestThresholdGetMirror3dc2Slow [GOOD] >> Acceleration::TestThresholdGet4Plus2Block2Slow >> test.py::test[blocks-pg_call--Results] [GOOD] >> test.py::test[blocks-pg_to_interval--ForceBlocks] >> test.py::test[aggregate-group_by_session_compact--ForceBlocks] [GOOD] >> test.py::test[aggregate-group_by_session_compact--Results] >> test.py::test[view-init_view_after_eval-default.txt-Results] [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_external_data_source/unittest >> TExternalDataSourceTest::ReplaceExternalDataStoreShouldFailIfEntityOfAnotherTypeWithSameNameExists [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2144] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2144] Leader for TabletID 72057594046678944 is [1:130:2155] sender: [1:131:2058] recipient: [1:113:2144] 2025-09-25T16:16:26.445346Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7911: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-09-25T16:16:26.445370Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7939: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-09-25T16:16:26.445376Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7825: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-09-25T16:16:26.445382Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7841: OperationsProcessing config: using default configuration 2025-09-25T16:16:26.445388Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7847: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-09-25T16:16:26.445392Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7847: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-09-25T16:16:26.445402Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7971: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-09-25T16:16:26.445414Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-09-25T16:16:26.445523Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8042: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-09-25T16:16:26.445587Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2025-09-25T16:16:26.461765Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7729: Cannot subscribe to console configs 2025-09-25T16:16:26.461792Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded 2025-09-25T16:16:26.466177Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2025-09-25T16:16:26.466257Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2025-09-25T16:16:26.466280Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-09-25T16:16:26.467847Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2025-09-25T16:16:26.467904Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2025-09-25T16:16:26.467979Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1382: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-09-25T16:16:26.468029Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-09-25T16:16:26.468378Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:158: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-09-25T16:16:26.468415Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2025-09-25T16:16:26.468607Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-09-25T16:16:26.468614Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-09-25T16:16:26.468629Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-09-25T16:16:26.468634Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-09-25T16:16:26.468639Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:205: TTxServerlessStorageBilling.Complete 2025-09-25T16:16:26.468660Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7086: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-09-25T16:16:26.469711Z node 1 :HIVE INFO: tablet_helpers.cpp:1126: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:130:2155] sender: [1:245:2058] recipient: [1:15:2062] 2025-09-25T16:16:26.488122Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-09-25T16:16:26.488219Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-09-25T16:16:26.488282Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:599: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-09-25T16:16:26.488290Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5528: CreateTx for txid 1:0 type: TxAlterSubDomain target path: [OwnerId: 72057594046678944, LocalPathId: 1] source path: 2025-09-25T16:16:26.488351Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-09-25T16:16:26.488365Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2025-09-25T16:16:26.489163Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-09-25T16:16:26.489230Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-09-25T16:16:26.489281Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-09-25T16:16:26.489292Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:315: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-09-25T16:16:26.489298Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:369: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-09-25T16:16:26.489303Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 1:0 2 -> 3 2025-09-25T16:16:26.489754Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-09-25T16:16:26.489767Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-09-25T16:16:26.489775Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 1:0 3 -> 128 2025-09-25T16:16:26.490115Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-09-25T16:16:26.490126Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-09-25T16:16:26.490131Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-09-25T16:16:26.490138Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1723: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-09-25T16:16:26.490783Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1792: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-09-25T16:16:26.491138Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:663: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-09-25T16:16:26.491186Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1824: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-09-25T16:16:26.491381Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:685: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-09-25T16:16:26.491404Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:689: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 137 RawX2: 4294969455 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-09-25T16:16:26.491421Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:303: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-09-25T16:16:26.491480Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 1:0 128 -> 240 2025-09-25T16:16:26.491487Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:362: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-09-25T16:16:26.491512Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:599: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-09-25T16:16:26.491522Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:412: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-09-25T16:16:26.492139Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-09-25T16:16:26.492148Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme ... _side_effects.cpp:926: Part operation is done id#101:0 progress is 1/1 2025-09-25T16:16:26.496491Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2025-09-25T16:16:26.496495Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:926: Part operation is done id#101:0 progress is 1/1 2025-09-25T16:16:26.496499Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2025-09-25T16:16:26.496503Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 101, ready parts: 1/1, is published: false 2025-09-25T16:16:26.496508Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2025-09-25T16:16:26.496512Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:993: Operation and all the parts is done, operation id: 101:0 2025-09-25T16:16:26.496516Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5552: RemoveTx for txid 101:0 2025-09-25T16:16:26.496526Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:610: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 2 2025-09-25T16:16:26.496531Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:1002: Publication still in progress, tx: 101, publications: 2, subscribers: 0 2025-09-25T16:16:26.496535Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1009: Publication details: tx: 101, [OwnerId: 72057594046678944, LocalPathId: 1], 4 2025-09-25T16:16:26.496539Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1009: Publication details: tx: 101, [OwnerId: 72057594046678944, LocalPathId: 2], 2 2025-09-25T16:16:26.496651Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6249: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 4 PathOwnerId: 72057594046678944, cookie: 101 2025-09-25T16:16:26.496662Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 4 PathOwnerId: 72057594046678944, cookie: 101 2025-09-25T16:16:26.496669Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 101 2025-09-25T16:16:26.496674Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 101, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 4 2025-09-25T16:16:26.496678Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:610: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-09-25T16:16:26.496795Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6249: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 2 PathOwnerId: 72057594046678944, cookie: 101 2025-09-25T16:16:26.496806Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 2 PathOwnerId: 72057594046678944, cookie: 101 2025-09-25T16:16:26.496836Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 101 2025-09-25T16:16:26.496841Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 101, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 2 2025-09-25T16:16:26.496845Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:610: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 1 2025-09-25T16:16:26.496854Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__publish_to_scheme_board.cpp:155: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 101, subscribers: 0 2025-09-25T16:16:26.497461Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 2025-09-25T16:16:26.497676Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 TestModificationResult got TxId: 101, wait until txId: 101 TestWaitNotification wait txId: 101 2025-09-25T16:16:26.497725Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:213: tests -- TTxNotificationSubscriber for txId 101: send EvNotifyTxCompletion 2025-09-25T16:16:26.497733Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:259: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 101 2025-09-25T16:16:26.497785Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 101, at schemeshard: 72057594046678944 2025-09-25T16:16:26.497800Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 101: got EvNotifyTxCompletionResult 2025-09-25T16:16:26.497805Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 101: satisfy waiter [1:305:2295] TestWaitNotification: OK eventTxId 101 2025-09-25T16:16:26.497877Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/UniqueName" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-09-25T16:16:26.497904Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/UniqueName" took 34us result status StatusSuccess 2025-09-25T16:16:26.497998Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/UniqueName" PathDescription { Self { Name: "UniqueName" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeView CreateFinished: true CreateTxId: 101 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 ViewVersion: 1 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 1 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } ViewDescription { Name: "UniqueName" PathId { OwnerId: 72057594046678944 LocalId: 2 } Version: 1 QueryText: "Some query" CapturedContext { } } } PathId: 2 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 TestModificationResults wait txId: 102 2025-09-25T16:16:26.498927Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpCreateExternalDataSource CreateExternalDataSource { Name: "UniqueName" SourceType: "ObjectStorage" Location: "https://s3.cloud.net/my_bucket" Auth { None { } } ReplaceIfExists: true } } TxId: 102 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-09-25T16:16:26.498980Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_create_external_data_source.cpp:337: [72057594046678944] CreateNewExternalDataSource, opId 102:0, feature flag EnableReplaceIfExistsForExternalEntities 1, tx WorkingDir: "/MyRoot" OperationType: ESchemeOpCreateExternalDataSource FailOnExist: false CreateExternalDataSource { Name: "UniqueName" SourceType: "ObjectStorage" Location: "https://s3.cloud.net/my_bucket" Auth { None { } } ReplaceIfExists: true } 2025-09-25T16:16:26.498993Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_external_data_source.cpp:204: [72057594046678944] TAlterExternalDataSource Propose: opId# 102:0, path# /MyRoot/UniqueName 2025-09-25T16:16:26.499017Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 102:1, propose status:StatusNameConflict, reason: Check failed: path: '/MyRoot/UniqueName', error: unexpected path type (id: [OwnerId: 72057594046678944, LocalPathId: 2], type: EPathTypeView, state: EPathStateNoChanges), expected types: EPathTypeExternalDataSource, source_location: ydb/core/tx/schemeshard/schemeshard__operation_alter_external_data_source.cpp:95, at schemeshard: 72057594046678944 2025-09-25T16:16:26.499471Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 102, response: Status: StatusNameConflict Reason: "Check failed: path: \'/MyRoot/UniqueName\', error: unexpected path type (id: [OwnerId: 72057594046678944, LocalPathId: 2], type: EPathTypeView, state: EPathStateNoChanges), expected types: EPathTypeExternalDataSource, source_location: ydb/core/tx/schemeshard/schemeshard__operation_alter_external_data_source.cpp:95" TxId: 102 SchemeshardId: 72057594046678944 PathId: 2 PathCreateTxId: 101, at schemeshard: 72057594046678944 2025-09-25T16:16:26.499522Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 102, database: /MyRoot, subject: , status: StatusNameConflict, reason: Check failed: path: '/MyRoot/UniqueName', error: unexpected path type (id: [OwnerId: 72057594046678944, LocalPathId: 2], type: EPathTypeView, state: EPathStateNoChanges), expected types: EPathTypeExternalDataSource, source_location: ydb/core/tx/schemeshard/schemeshard__operation_alter_external_data_source.cpp:95, operation: CREATE EXTERNAL DATA SOURCE, path: /MyRoot/UniqueName TestModificationResult got TxId: 102, wait until txId: 102 TestWaitNotification wait txId: 102 2025-09-25T16:16:26.499577Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:213: tests -- TTxNotificationSubscriber for txId 102: send EvNotifyTxCompletion 2025-09-25T16:16:26.499583Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:259: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 102 2025-09-25T16:16:26.499632Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 102, at schemeshard: 72057594046678944 2025-09-25T16:16:26.499649Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 102: got EvNotifyTxCompletionResult 2025-09-25T16:16:26.499654Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 102: satisfy waiter [1:313:2303] TestWaitNotification: OK eventTxId 102 >> test.py::test[pg-tpcds-q40-default.txt-ForceBlocks] [GOOD] >> test.py::test[simple_columns-simple_columns_join_coalesce_all_1-default.txt-Results] [GOOD] >> test.py::test[weak_field-weak_field_long_fields--ForceBlocks] >> test.py::test[pg-tpcds-q40-default.txt-Results] >> test.py::test[tpch-q5-default.txt-ForceBlocks] >> test.py::test[select-sampleselect--ForceBlocks] [GOOD] >> test.py::test[select-sampleselect--Results] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_external_data_source/unittest >> TExternalDataSourceTest::DropExternalDataSource [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:120:2058] recipient: [1:114:2145] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:120:2058] recipient: [1:114:2145] Leader for TabletID 72057594046678944 is [1:131:2156] sender: [1:132:2058] recipient: [1:114:2145] 2025-09-25T16:16:26.450103Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7911: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-09-25T16:16:26.450133Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7939: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-09-25T16:16:26.450139Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7825: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-09-25T16:16:26.450144Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7841: OperationsProcessing config: using default configuration 2025-09-25T16:16:26.450151Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7847: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-09-25T16:16:26.450155Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7847: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-09-25T16:16:26.450164Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7971: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-09-25T16:16:26.450174Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-09-25T16:16:26.450248Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8042: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-09-25T16:16:26.450292Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2025-09-25T16:16:26.467626Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:8074: Got new config: QueryServiceConfig { AllExternalDataSourcesAreAvailable: true } 2025-09-25T16:16:26.467648Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded 2025-09-25T16:16:26.467729Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8042: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-09-25T16:16:26.469738Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2025-09-25T16:16:26.469925Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2025-09-25T16:16:26.469944Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-09-25T16:16:26.471274Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2025-09-25T16:16:26.471321Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2025-09-25T16:16:26.471427Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1382: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-09-25T16:16:26.471491Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-09-25T16:16:26.471837Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:158: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-09-25T16:16:26.471879Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2025-09-25T16:16:26.472086Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-09-25T16:16:26.472096Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-09-25T16:16:26.472102Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-09-25T16:16:26.472107Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-09-25T16:16:26.472111Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:205: TTxServerlessStorageBilling.Complete 2025-09-25T16:16:26.472123Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7086: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-09-25T16:16:26.473163Z node 1 :HIVE INFO: tablet_helpers.cpp:1126: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:131:2156] sender: [1:244:2058] recipient: [1:15:2062] 2025-09-25T16:16:26.489220Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-09-25T16:16:26.489290Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-09-25T16:16:26.489332Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:599: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-09-25T16:16:26.489339Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5528: CreateTx for txid 1:0 type: TxAlterSubDomain target path: [OwnerId: 72057594046678944, LocalPathId: 1] source path: 2025-09-25T16:16:26.489372Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-09-25T16:16:26.489384Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2025-09-25T16:16:26.490598Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-09-25T16:16:26.490638Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-09-25T16:16:26.490671Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-09-25T16:16:26.490678Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:315: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-09-25T16:16:26.490681Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:369: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-09-25T16:16:26.490684Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 1:0 2 -> 3 2025-09-25T16:16:26.491040Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-09-25T16:16:26.491052Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-09-25T16:16:26.491056Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 1:0 3 -> 128 2025-09-25T16:16:26.491328Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-09-25T16:16:26.491336Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-09-25T16:16:26.491339Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-09-25T16:16:26.491343Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1723: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-09-25T16:16:26.491759Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1792: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-09-25T16:16:26.492089Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:663: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-09-25T16:16:26.492133Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1824: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-09-25T16:16:26.492288Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:685: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-09-25T16:16:26.492312Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:689: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 139 RawX2: 4294969457 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-09-25T16:16:26.492326Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:303: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-09-25T16:16:26.492377Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 1:0 128 -> 240 2025-09-25T16:16:26.492383Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:362: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-09-25T16:16:26.492404Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:599: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-09-25T16:16:26.492414Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:412: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-09-25T16:16:26.492768Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_sc ... 316545 FAKE_COORDINATOR: Add transaction: 102 at step: 5000003 FAKE_COORDINATOR: advance: minStep5000003 State->FrontStep: 5000002 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 102 at step: 5000003 2025-09-25T16:16:26.758459Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:685: TTxOperationPlanStep Execute, stepId: 5000003, transactions count in step: 1, at schemeshard: 72057594046678944 2025-09-25T16:16:26.758474Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:689: TTxOperationPlanStep Execute, message: Transactions { TxId: 102 Coordinator: 72057594046316545 AckTo { RawX1: 138 RawX2: 8589936753 } } Step: 5000003 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-09-25T16:16:26.758480Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_drop_external_data_source.cpp:40: [72057594046678944] TDropExternalDataSource TPropose opId# 102:0 HandleReply TEvOperationPlan: step# 5000003 2025-09-25T16:16:26.758498Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:610: DecrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 2 2025-09-25T16:16:26.758507Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 102:0 128 -> 240 2025-09-25T16:16:26.758525Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:599: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-09-25T16:16:26.758532Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:599: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 1 2025-09-25T16:16:26.758736Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 2025-09-25T16:16:26.758800Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 FAKE_COORDINATOR: Erasing txId 102 2025-09-25T16:16:26.759069Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-09-25T16:16:26.759074Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 102, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-09-25T16:16:26.759092Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 102, path id: [OwnerId: 72057594046678944, LocalPathId: 2] 2025-09-25T16:16:26.759107Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-09-25T16:16:26.759110Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [2:211:2212], at schemeshard: 72057594046678944, txId: 102, path id: 1 2025-09-25T16:16:26.759113Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [2:211:2212], at schemeshard: 72057594046678944, txId: 102, path id: 2 2025-09-25T16:16:26.759120Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 102:0, at schemeshard: 72057594046678944 2025-09-25T16:16:26.759125Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:497: [72057594046678944] TDone opId# 102:0 ProgressState 2025-09-25T16:16:26.759133Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:926: Part operation is done id#102:0 progress is 1/1 2025-09-25T16:16:26.759136Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2025-09-25T16:16:26.759140Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:926: Part operation is done id#102:0 progress is 1/1 2025-09-25T16:16:26.759142Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2025-09-25T16:16:26.759145Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 102, ready parts: 1/1, is published: false 2025-09-25T16:16:26.759148Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2025-09-25T16:16:26.759152Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:993: Operation and all the parts is done, operation id: 102:0 2025-09-25T16:16:26.759154Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5552: RemoveTx for txid 102:0 2025-09-25T16:16:26.759165Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:610: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 2 2025-09-25T16:16:26.759169Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:1002: Publication still in progress, tx: 102, publications: 2, subscribers: 0 2025-09-25T16:16:26.759171Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1009: Publication details: tx: 102, [OwnerId: 72057594046678944, LocalPathId: 1], 7 2025-09-25T16:16:26.759174Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1009: Publication details: tx: 102, [OwnerId: 72057594046678944, LocalPathId: 2], 18446744073709551615 2025-09-25T16:16:26.759259Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6249: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 102 2025-09-25T16:16:26.759271Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 102 2025-09-25T16:16:26.759276Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 102 2025-09-25T16:16:26.759280Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 102, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 18446744073709551615 2025-09-25T16:16:26.759284Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:610: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 1 2025-09-25T16:16:26.759335Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:123: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2025-09-25T16:16:26.759341Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:137: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 2], at schemeshard: 72057594046678944 2025-09-25T16:16:26.759349Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:610: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-09-25T16:16:26.759390Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6249: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 7 PathOwnerId: 72057594046678944, cookie: 102 2025-09-25T16:16:26.759398Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 7 PathOwnerId: 72057594046678944, cookie: 102 2025-09-25T16:16:26.759402Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 102 2025-09-25T16:16:26.759406Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 102, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 7 2025-09-25T16:16:26.759410Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:610: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-09-25T16:16:26.759418Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__publish_to_scheme_board.cpp:155: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 102, subscribers: 0 2025-09-25T16:16:26.769210Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 2025-09-25T16:16:26.769255Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__clean_pathes.cpp:160: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 2025-09-25T16:16:26.769265Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 TestModificationResult got TxId: 102, wait until txId: 102 TestWaitNotification wait txId: 102 2025-09-25T16:16:26.769318Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:213: tests -- TTxNotificationSubscriber for txId 102: send EvNotifyTxCompletion 2025-09-25T16:16:26.769324Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:259: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 102 2025-09-25T16:16:26.769407Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 102, at schemeshard: 72057594046678944 2025-09-25T16:16:26.769430Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 102: got EvNotifyTxCompletionResult 2025-09-25T16:16:26.769435Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 102: satisfy waiter [2:338:2328] TestWaitNotification: OK eventTxId 102 2025-09-25T16:16:26.769520Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/MyExternalDataSource" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-09-25T16:16:26.769556Z node 2 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/MyExternalDataSource" took 54us result status StatusPathDoesNotExist 2025-09-25T16:16:26.769601Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/MyExternalDataSource\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot\' (id: [OwnerId: 72057594046678944, LocalPathId: 1]), source_location: ydb/core/tx/schemeshard/schemeshard_path_describer.cpp:1181" Path: "/MyRoot/MyExternalDataSource" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot" LastExistedPrefixPathId: 1 LastExistedPrefixDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty ChildrenExist: false } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 |80.6%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/kqp/executer_actor/ut/ydb-core-kqp-executer_actor-ut |80.6%| [LD] {RESULT} $(B)/ydb/core/kqp/executer_actor/ut/ydb-core-kqp-executer_actor-ut |80.6%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/kqp/executer_actor/ut/ydb-core-kqp-executer_actor-ut >> TExternalDataSourceTest::ParallelCreateSameExternalDataSource >> test.py::test[like-ilike_clause-default.txt-Results] [GOOD] >> test.py::test[optimizers-remove_keep_sorted_setting--ForceBlocks] [GOOD] >> test.py::test[optimizers-remove_keep_sorted_setting--Results] >> test.py::test[limit-dynamic_limit_offset_overflow-default.txt-Results] >> test.py::test[pg-tpcds-q67-default.txt-Results] [GOOD] >> test.py::test[pg-tpcds-q70-default.txt-ForceBlocks] |80.6%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/services/metadata/initializer/ut/ydb-services-metadata-initializer-ut |80.6%| [LD] {RESULT} $(B)/ydb/services/metadata/initializer/ut/ydb-services-metadata-initializer-ut |80.6%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/services/metadata/initializer/ut/ydb-services-metadata-initializer-ut >> TExternalDataSourceTest::ParallelCreateSameExternalDataSource [GOOD] >> TExternalDataSourceTest::ParallelReplaceExternalDataSourceIfNotExists >> YdbProxy::DropTable >> BlobStorageSync::TestSyncLogCuttingMirror3of4 [GOOD] >> BlobStorageSync::TestSyncLogCuttingBlock4Plus2 >> TExternalDataSourceTest::RemovingReferencesFromDataSources >> TExternalDataSourceTest::ParallelReplaceExternalDataSourceIfNotExists [GOOD] >> test.py::test[union_all-union_all_subexpr-default.txt-Results] [GOOD] >> test.py::test[weak_field-weak_field_type-default.txt-ForceBlocks] >> TExternalDataSourceTest::RemovingReferencesFromDataSources [GOOD] >> YdbProxy::DropTable [GOOD] >> YdbProxy::DescribeTopic >> YdbProxy::CreateTopic >> YdbProxy::RemoveDirectory ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_external_data_source/unittest >> TExternalDataSourceTest::ParallelReplaceExternalDataSourceIfNotExists [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:120:2058] recipient: [1:114:2145] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:120:2058] recipient: [1:114:2145] Leader for TabletID 72057594046678944 is [1:131:2156] sender: [1:132:2058] recipient: [1:114:2145] 2025-09-25T16:16:28.464109Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7911: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-09-25T16:16:28.464146Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7939: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-09-25T16:16:28.464154Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7825: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-09-25T16:16:28.464159Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7841: OperationsProcessing config: using default configuration 2025-09-25T16:16:28.464166Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7847: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-09-25T16:16:28.464171Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7847: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-09-25T16:16:28.464182Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7971: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-09-25T16:16:28.464200Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-09-25T16:16:28.464335Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8042: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-09-25T16:16:28.464414Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2025-09-25T16:16:28.491344Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:8074: Got new config: QueryServiceConfig { AllExternalDataSourcesAreAvailable: true } 2025-09-25T16:16:28.491391Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded 2025-09-25T16:16:28.491513Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8042: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-09-25T16:16:28.495037Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2025-09-25T16:16:28.495319Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2025-09-25T16:16:28.495363Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-09-25T16:16:28.496938Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2025-09-25T16:16:28.497015Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2025-09-25T16:16:28.497145Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1382: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-09-25T16:16:28.497234Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-09-25T16:16:28.497710Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:158: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-09-25T16:16:28.497768Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2025-09-25T16:16:28.498119Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-09-25T16:16:28.498139Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-09-25T16:16:28.498151Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-09-25T16:16:28.498162Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-09-25T16:16:28.498170Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:205: TTxServerlessStorageBilling.Complete 2025-09-25T16:16:28.498193Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7086: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-09-25T16:16:28.499850Z node 1 :HIVE INFO: tablet_helpers.cpp:1126: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:131:2156] sender: [1:244:2058] recipient: [1:15:2062] 2025-09-25T16:16:28.531599Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-09-25T16:16:28.531737Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-09-25T16:16:28.531816Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:599: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-09-25T16:16:28.531825Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5528: CreateTx for txid 1:0 type: TxAlterSubDomain target path: [OwnerId: 72057594046678944, LocalPathId: 1] source path: 2025-09-25T16:16:28.531871Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-09-25T16:16:28.531890Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2025-09-25T16:16:28.534569Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-09-25T16:16:28.534649Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-09-25T16:16:28.534731Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-09-25T16:16:28.534748Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:315: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-09-25T16:16:28.534755Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:369: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-09-25T16:16:28.534762Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 1:0 2 -> 3 2025-09-25T16:16:28.535618Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-09-25T16:16:28.535636Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-09-25T16:16:28.535643Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 1:0 3 -> 128 2025-09-25T16:16:28.536058Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-09-25T16:16:28.536070Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-09-25T16:16:28.536076Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-09-25T16:16:28.536086Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1723: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-09-25T16:16:28.536914Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1792: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-09-25T16:16:28.537397Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:663: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-09-25T16:16:28.537459Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1824: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-09-25T16:16:28.537709Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:685: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-09-25T16:16:28.537744Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:689: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 139 RawX2: 4294969457 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-09-25T16:16:28.537767Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:303: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-09-25T16:16:28.537847Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 1:0 128 -> 240 2025-09-25T16:16:28.537856Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:362: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-09-25T16:16:28.537895Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:599: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-09-25T16:16:28.537908Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:412: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-09-25T16:16:28.538397Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_sc ... chemeshard: 72057594046678944 2025-09-25T16:16:28.900723Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 113: got EvNotifyTxCompletionResult 2025-09-25T16:16:28.900726Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 113: satisfy waiter [2:400:2390] 2025-09-25T16:16:28.900737Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 120, at schemeshard: 72057594046678944 2025-09-25T16:16:28.900747Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 114: got EvNotifyTxCompletionResult 2025-09-25T16:16:28.900749Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 114: satisfy waiter [2:400:2390] 2025-09-25T16:16:28.900758Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 121, at schemeshard: 72057594046678944 2025-09-25T16:16:28.900764Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 115: got EvNotifyTxCompletionResult 2025-09-25T16:16:28.900766Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 115: satisfy waiter [2:400:2390] 2025-09-25T16:16:28.900773Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 122, at schemeshard: 72057594046678944 2025-09-25T16:16:28.900784Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 123, at schemeshard: 72057594046678944 2025-09-25T16:16:28.900791Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 124, at schemeshard: 72057594046678944 2025-09-25T16:16:28.900797Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 116: got EvNotifyTxCompletionResult 2025-09-25T16:16:28.900799Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 116: satisfy waiter [2:400:2390] 2025-09-25T16:16:28.900813Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 125, at schemeshard: 72057594046678944 2025-09-25T16:16:28.900858Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 126, at schemeshard: 72057594046678944 2025-09-25T16:16:28.900868Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 117: got EvNotifyTxCompletionResult 2025-09-25T16:16:28.900872Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 117: satisfy waiter [2:400:2390] 2025-09-25T16:16:28.900879Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 118: got EvNotifyTxCompletionResult 2025-09-25T16:16:28.900882Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 118: satisfy waiter [2:400:2390] 2025-09-25T16:16:28.900896Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 119: got EvNotifyTxCompletionResult 2025-09-25T16:16:28.900899Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 119: satisfy waiter [2:400:2390] 2025-09-25T16:16:28.900909Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 127, at schemeshard: 72057594046678944 2025-09-25T16:16:28.900928Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 120: got EvNotifyTxCompletionResult 2025-09-25T16:16:28.900932Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 120: satisfy waiter [2:400:2390] 2025-09-25T16:16:28.900946Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 121: got EvNotifyTxCompletionResult 2025-09-25T16:16:28.900950Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 121: satisfy waiter [2:400:2390] 2025-09-25T16:16:28.900961Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 128, at schemeshard: 72057594046678944 2025-09-25T16:16:28.900974Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 129, at schemeshard: 72057594046678944 2025-09-25T16:16:28.900982Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 122: got EvNotifyTxCompletionResult 2025-09-25T16:16:28.900985Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 122: satisfy waiter [2:400:2390] 2025-09-25T16:16:28.900993Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 123: got EvNotifyTxCompletionResult 2025-09-25T16:16:28.900995Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 123: satisfy waiter [2:400:2390] 2025-09-25T16:16:28.901002Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 124: got EvNotifyTxCompletionResult 2025-09-25T16:16:28.901004Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 124: satisfy waiter [2:400:2390] 2025-09-25T16:16:28.901017Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 130, at schemeshard: 72057594046678944 2025-09-25T16:16:28.901023Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 125: got EvNotifyTxCompletionResult 2025-09-25T16:16:28.901026Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 125: satisfy waiter [2:400:2390] 2025-09-25T16:16:28.901031Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 131, at schemeshard: 72057594046678944 2025-09-25T16:16:28.901049Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 126: got EvNotifyTxCompletionResult 2025-09-25T16:16:28.901052Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 126: satisfy waiter [2:400:2390] 2025-09-25T16:16:28.901073Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 127: got EvNotifyTxCompletionResult 2025-09-25T16:16:28.901075Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 127: satisfy waiter [2:400:2390] 2025-09-25T16:16:28.901090Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 128: got EvNotifyTxCompletionResult 2025-09-25T16:16:28.901093Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 128: satisfy waiter [2:400:2390] 2025-09-25T16:16:28.901100Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 129: got EvNotifyTxCompletionResult 2025-09-25T16:16:28.901102Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 129: satisfy waiter [2:400:2390] 2025-09-25T16:16:28.901120Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 130: got EvNotifyTxCompletionResult 2025-09-25T16:16:28.901123Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 130: satisfy waiter [2:400:2390] 2025-09-25T16:16:28.901131Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 131: got EvNotifyTxCompletionResult 2025-09-25T16:16:28.901133Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 131: satisfy waiter [2:400:2390] TestWaitNotification: OK eventTxId 102 TestWaitNotification: OK eventTxId 103 TestWaitNotification: OK eventTxId 104 TestWaitNotification: OK eventTxId 105 TestWaitNotification: OK eventTxId 106 TestWaitNotification: OK eventTxId 107 TestWaitNotification: OK eventTxId 108 TestWaitNotification: OK eventTxId 109 TestWaitNotification: OK eventTxId 110 TestWaitNotification: OK eventTxId 111 TestWaitNotification: OK eventTxId 112 TestWaitNotification: OK eventTxId 113 TestWaitNotification: OK eventTxId 114 TestWaitNotification: OK eventTxId 115 TestWaitNotification: OK eventTxId 116 TestWaitNotification: OK eventTxId 117 TestWaitNotification: OK eventTxId 118 TestWaitNotification: OK eventTxId 119 TestWaitNotification: OK eventTxId 120 TestWaitNotification: OK eventTxId 121 TestWaitNotification: OK eventTxId 122 TestWaitNotification: OK eventTxId 123 TestWaitNotification: OK eventTxId 124 TestWaitNotification: OK eventTxId 125 TestWaitNotification: OK eventTxId 126 TestWaitNotification: OK eventTxId 127 TestWaitNotification: OK eventTxId 128 TestWaitNotification: OK eventTxId 129 TestWaitNotification: OK eventTxId 130 TestWaitNotification: OK eventTxId 131 2025-09-25T16:16:28.901433Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/MyExternalDataSource" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-09-25T16:16:28.901484Z node 2 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/MyExternalDataSource" took 67us result status StatusSuccess 2025-09-25T16:16:28.901560Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/MyExternalDataSource" PathDescription { Self { Name: "MyExternalDataSource" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeExternalDataSource CreateFinished: true CreateTxId: 101 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 ExternalDataSourceVersion: 2 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 1 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } ExternalDataSourceDescription { Name: "MyExternalDataSource" PathId { OwnerId: 72057594046678944 LocalId: 2 } Version: 2 SourceType: "ObjectStorage" Location: "https://s3.cloud.net/other_bucket" Installation: "" Auth { None { } } Properties { } References { } } } PathId: 2 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_external_data_source/unittest >> TExternalDataSourceTest::RemovingReferencesFromDataSources [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:120:2058] recipient: [1:114:2145] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:120:2058] recipient: [1:114:2145] Leader for TabletID 72057594046678944 is [1:131:2156] sender: [1:132:2058] recipient: [1:114:2145] 2025-09-25T16:16:28.877663Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7911: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-09-25T16:16:28.877699Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7939: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-09-25T16:16:28.877707Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7825: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-09-25T16:16:28.877713Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7841: OperationsProcessing config: using default configuration 2025-09-25T16:16:28.877720Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7847: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-09-25T16:16:28.877725Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7847: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-09-25T16:16:28.877735Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7971: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-09-25T16:16:28.877750Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-09-25T16:16:28.877879Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8042: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-09-25T16:16:28.877944Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2025-09-25T16:16:28.904875Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:8074: Got new config: QueryServiceConfig { AllExternalDataSourcesAreAvailable: true } 2025-09-25T16:16:28.904917Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded 2025-09-25T16:16:28.905038Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8042: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-09-25T16:16:28.913648Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2025-09-25T16:16:28.921530Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2025-09-25T16:16:28.921602Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-09-25T16:16:28.942422Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2025-09-25T16:16:28.942502Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2025-09-25T16:16:28.942614Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1382: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-09-25T16:16:28.942675Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-09-25T16:16:28.943143Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:158: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-09-25T16:16:28.943182Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2025-09-25T16:16:28.943417Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-09-25T16:16:28.943423Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-09-25T16:16:28.943430Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-09-25T16:16:28.943435Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-09-25T16:16:28.943440Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:205: TTxServerlessStorageBilling.Complete 2025-09-25T16:16:28.943457Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7086: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-09-25T16:16:28.944677Z node 1 :HIVE INFO: tablet_helpers.cpp:1126: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:131:2156] sender: [1:244:2058] recipient: [1:15:2062] 2025-09-25T16:16:28.960397Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-09-25T16:16:28.960487Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-09-25T16:16:28.960542Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:599: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-09-25T16:16:28.960549Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5528: CreateTx for txid 1:0 type: TxAlterSubDomain target path: [OwnerId: 72057594046678944, LocalPathId: 1] source path: 2025-09-25T16:16:28.960588Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-09-25T16:16:28.960599Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2025-09-25T16:16:28.961564Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-09-25T16:16:28.961624Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-09-25T16:16:28.961672Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-09-25T16:16:28.961679Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:315: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-09-25T16:16:28.961683Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:369: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-09-25T16:16:28.961687Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 1:0 2 -> 3 2025-09-25T16:16:28.962121Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-09-25T16:16:28.962132Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-09-25T16:16:28.962138Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 1:0 3 -> 128 2025-09-25T16:16:28.962563Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-09-25T16:16:28.962574Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-09-25T16:16:28.962578Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-09-25T16:16:28.962583Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1723: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-09-25T16:16:28.963086Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1792: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-09-25T16:16:28.963418Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:663: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-09-25T16:16:28.963461Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1824: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-09-25T16:16:28.963626Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:685: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-09-25T16:16:28.963647Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:689: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 139 RawX2: 4294969457 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-09-25T16:16:28.963664Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:303: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-09-25T16:16:28.963719Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 1:0 128 -> 240 2025-09-25T16:16:28.963726Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:362: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-09-25T16:16:28.963750Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:599: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-09-25T16:16:28.963760Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:412: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-09-25T16:16:28.964150Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_sc ... 4046316545 FAKE_COORDINATOR: Add transaction: 104 at step: 5000005 FAKE_COORDINATOR: advance: minStep5000005 State->FrontStep: 5000004 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 104 at step: 5000005 2025-09-25T16:16:28.995372Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:685: TTxOperationPlanStep Execute, stepId: 5000005, transactions count in step: 1, at schemeshard: 72057594046678944 2025-09-25T16:16:28.995391Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:689: TTxOperationPlanStep Execute, message: Transactions { TxId: 104 Coordinator: 72057594046316545 AckTo { RawX1: 139 RawX2: 4294969457 } } Step: 5000005 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-09-25T16:16:28.995399Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_drop_external_data_source.cpp:40: [72057594046678944] TDropExternalDataSource TPropose opId# 104:0 HandleReply TEvOperationPlan: step# 5000005 2025-09-25T16:16:28.995418Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:610: DecrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 2 2025-09-25T16:16:28.995432Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 104:0 128 -> 240 2025-09-25T16:16:28.995455Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:599: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-09-25T16:16:28.995462Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:599: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 1 2025-09-25T16:16:28.995591Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 104 2025-09-25T16:16:28.995912Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 104 FAKE_COORDINATOR: Erasing txId 104 2025-09-25T16:16:28.996149Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-09-25T16:16:28.996157Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 104, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-09-25T16:16:28.996179Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 104, path id: [OwnerId: 72057594046678944, LocalPathId: 2] 2025-09-25T16:16:28.996198Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-09-25T16:16:28.996203Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [1:211:2212], at schemeshard: 72057594046678944, txId: 104, path id: 1 2025-09-25T16:16:28.996208Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [1:211:2212], at schemeshard: 72057594046678944, txId: 104, path id: 2 2025-09-25T16:16:28.996248Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 104:0, at schemeshard: 72057594046678944 2025-09-25T16:16:28.996255Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:497: [72057594046678944] TDone opId# 104:0 ProgressState 2025-09-25T16:16:28.996269Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:926: Part operation is done id#104:0 progress is 1/1 2025-09-25T16:16:28.996273Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 104 ready parts: 1/1 2025-09-25T16:16:28.996279Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:926: Part operation is done id#104:0 progress is 1/1 2025-09-25T16:16:28.996282Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 104 ready parts: 1/1 2025-09-25T16:16:28.996289Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 104, ready parts: 1/1, is published: false 2025-09-25T16:16:28.996295Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 104 ready parts: 1/1 2025-09-25T16:16:28.996300Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:993: Operation and all the parts is done, operation id: 104:0 2025-09-25T16:16:28.996303Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5552: RemoveTx for txid 104:0 2025-09-25T16:16:28.996316Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:610: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 2 2025-09-25T16:16:28.996321Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:1002: Publication still in progress, tx: 104, publications: 2, subscribers: 0 2025-09-25T16:16:28.996326Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1009: Publication details: tx: 104, [OwnerId: 72057594046678944, LocalPathId: 1], 11 2025-09-25T16:16:28.996329Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1009: Publication details: tx: 104, [OwnerId: 72057594046678944, LocalPathId: 2], 18446744073709551615 2025-09-25T16:16:28.996394Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6249: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 104 2025-09-25T16:16:28.996404Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 104 2025-09-25T16:16:28.996408Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 104 2025-09-25T16:16:28.996412Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 104, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 18446744073709551615 2025-09-25T16:16:28.996416Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:610: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 1 2025-09-25T16:16:28.996458Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:123: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2025-09-25T16:16:28.996463Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:137: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 2], at schemeshard: 72057594046678944 2025-09-25T16:16:28.996473Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:610: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-09-25T16:16:28.996496Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6249: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 11 PathOwnerId: 72057594046678944, cookie: 104 2025-09-25T16:16:28.996506Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 11 PathOwnerId: 72057594046678944, cookie: 104 2025-09-25T16:16:28.996510Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 104 2025-09-25T16:16:28.996513Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 104, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 11 2025-09-25T16:16:28.996517Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:610: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-09-25T16:16:28.996525Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__publish_to_scheme_board.cpp:155: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 104, subscribers: 0 2025-09-25T16:16:28.997314Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 104 2025-09-25T16:16:28.997336Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__clean_pathes.cpp:160: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 2025-09-25T16:16:28.997348Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 104 TestModificationResult got TxId: 104, wait until txId: 104 TestWaitNotification wait txId: 104 2025-09-25T16:16:28.997395Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:213: tests -- TTxNotificationSubscriber for txId 104: send EvNotifyTxCompletion 2025-09-25T16:16:28.997402Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:259: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 104 2025-09-25T16:16:28.997472Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 104, at schemeshard: 72057594046678944 2025-09-25T16:16:28.997488Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 104: got EvNotifyTxCompletionResult 2025-09-25T16:16:28.997493Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 104: satisfy waiter [1:394:2384] TestWaitNotification: OK eventTxId 104 2025-09-25T16:16:28.997566Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/ExternalDataSource" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-09-25T16:16:28.997589Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/ExternalDataSource" took 32us result status StatusPathDoesNotExist 2025-09-25T16:16:28.997620Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/ExternalDataSource\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot\' (id: [OwnerId: 72057594046678944, LocalPathId: 1]), source_location: ydb/core/tx/schemeshard/schemeshard_path_describer.cpp:1181" Path: "/MyRoot/ExternalDataSource" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot" LastExistedPrefixPathId: 1 LastExistedPrefixDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty ChildrenExist: false } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 >> test.py::test[pg-tpcds-q40-default.txt-Results] [GOOD] >> Acceleration::TestThresholdGet4Plus2Block2Slow [GOOD] >> Acceleration::TestDelayMultiplierPutMirror3dc1Slow >> CostMetricsGetBlock4Plus2::TestGet4Plus2BlockRequests10000Inflight1000BlobSize1000 [GOOD] >> CostMetricsGetHugeMirror3dc::TestGetMirror3dcRequests1Inflight1BlobSize2000000 >> test.py::test[blocks-string_as_agg_key--Results] [GOOD] >> test.py::test[join-equi_join_three_simple--Results] [GOOD] >> test.py::test[pg-tpcds-q79-default.txt-ForceBlocks] >> TBtreeIndexTPartLarge::CutKeys [GOOD] >> TBtreeIndexTPartLarge::Group >> test.py::test[blocks-string_filter--ForceBlocks] >> test.py::test[join-equi_join_three_simple-off-ForceBlocks] >> YdbProxy::DescribeTopic [GOOD] >> test.py::test[flatten_by-flatten_and_where--Results] [GOOD] >> test.py::test[flatten_by-flatten_expr_struct-default.txt-ForceBlocks] |80.6%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/tx/schemeshard/ut_sequence_reboots/ydb-core-tx-schemeshard-ut_sequence_reboots |80.6%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_sequence_reboots/ydb-core-tx-schemeshard-ut_sequence_reboots |80.6%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_sequence_reboots/ydb-core-tx-schemeshard-ut_sequence_reboots >> test.py::test[select-sampleselect--Results] [GOOD] >> test.py::test[select-simple_struct_field_access--ForceBlocks] >> YdbProxy::CreateTopic [GOOD] >> YdbProxy::DescribeConsumer >> YdbProxy::RemoveDirectory [GOOD] >> YdbProxy::StaticCreds >> test.py::test[pg-tpcds-q28-default.txt-Results] [GOOD] >> test.py::test[pg-tpcds-q37-default.txt-Results] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/replication/ydb_proxy/ut/unittest >> YdbProxy::DescribeTopic [GOOD] Test command err: 2025-09-25T16:16:28.979861Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7554061446071382763:2075];send_to=[0:7307199536658146131:7762515]; 2025-09-25T16:16:28.979880Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/endf/000f73/r3tmp/tmpXuttOa/pdisk_1.dat 2025-09-25T16:16:29.033687Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-09-25T16:16:29.035643Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded TClient is connected to server localhost:14513 TServer::EnableGrpc on GrpcPort 10348, node 1 2025-09-25T16:16:29.085129Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-09-25T16:16:29.085163Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-09-25T16:16:29.086893Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-09-25T16:16:29.090519Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-09-25T16:16:29.090530Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-09-25T16:16:29.090532Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-09-25T16:16:29.090581Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:14513 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-09-25T16:16:29.134785Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-09-25T16:16:29.138763Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-09-25T16:16:29.212877Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-09-25T16:16:29.383684Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:16:29.411357Z node 1 :HIVE WARN: hive_impl.cpp:516: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037888 not found 2025-09-25T16:16:29.413354Z node 1 :TX_PROXY ERROR: schemereq.cpp:590: Actor# [1:7554061450366350816:2395] txid# 281474976715660, issues: { message: "Path does not exist" issue_code: 200200 severity: 1 } 2025-09-25T16:16:29.713362Z node 2 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7554061450398992385:2149];send_to=[0:7307199536658146131:7762515]; 2025-09-25T16:16:29.713423Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/endf/000f73/r3tmp/tmpr343Ae/pdisk_1.dat 2025-09-25T16:16:29.716058Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-09-25T16:16:29.727368Z node 2 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded 2025-09-25T16:16:29.727561Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-09-25T16:16:29.727584Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-09-25T16:16:29.730794Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:10673 TServer::EnableGrpc on GrpcPort 12027, node 2 2025-09-25T16:16:29.751820Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-09-25T16:16:29.751831Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-09-25T16:16:29.751833Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-09-25T16:16:29.751869Z node 2 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:10673 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-09-25T16:16:29.788090Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-09-25T16:16:29.789403Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-09-25T16:16:29.801226Z node 2 :PQ_READ_PROXY ERROR: grpc_pq_schema.cpp:148: new Describe topic request 2025-09-25T16:16:29.833093Z node 2 :PQ_READ_PROXY ERROR: grpc_pq_schema.cpp:148: new Describe topic request >> test.py::test[in-huge_in-default.txt-ForceBlocks] [GOOD] >> test.py::test[in-huge_in-default.txt-Results] >> test.py::test[select-hits_count--Results] [GOOD] >> test.py::test[select-literal_bool-default.txt-Results] >> YdbProxy::DescribeConsumer [GOOD] >> YdbProxy::StaticCreds [GOOD] >> test.py::test[action-dep_world_action_quote-default.txt-Results] [GOOD] >> Acceleration::TestDelayMultiplierPutMirror3dc1Slow [GOOD] >> test.py::test[key_filter-is_null_or_data--ForceBlocks] [GOOD] >> test.py::test[key_filter-is_null_or_data--Results] >> Acceleration::TestDelayMultiplierPut4Plus2Block1Slow >> test.py::test[aggregate-group_by_session_compact--Results] [GOOD] >> test.py::test[aggregate-group_compact_sorted_distinct_complex--ForceBlocks] >> test.py::test[join-premap_common_inner_both_sides--Results] [GOOD] >> test.py::test[action-eval_if_guard-default.txt-Results] >> test.py::test[join-pullup_extra_columns--ForceBlocks] |80.6%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/statistics/service/ut/unittest |80.6%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/statistics/service/ut/unittest ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/replication/ydb_proxy/ut/unittest >> YdbProxy::DescribeConsumer [GOOD] Test command err: 2025-09-25T16:16:29.761302Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7554061448205490007:2181];send_to=[0:7307199536658146131:7762515]; 2025-09-25T16:16:29.761392Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/endf/000f4d/r3tmp/tmpf0TFn4/pdisk_1.dat 2025-09-25T16:16:29.801916Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-09-25T16:16:29.811386Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded TClient is connected to server localhost:64566 TServer::EnableGrpc on GrpcPort 1349, node 1 2025-09-25T16:16:29.864298Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-09-25T16:16:29.864334Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-09-25T16:16:29.865646Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-09-25T16:16:29.876189Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-09-25T16:16:29.876208Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-09-25T16:16:29.876210Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-09-25T16:16:29.876263Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:64566 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-09-25T16:16:29.923690Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-09-25T16:16:29.927015Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-09-25T16:16:29.978472Z node 1 :TX_PROXY ERROR: schemereq.cpp:590: Actor# [1:7554061448205490464:2294] txid# 281474976715658, issues: { message: "Invalid retention period: specified: 31536000s, min: 1s, max: 2678400s" severity: 1 } 2025-09-25T16:16:29.997663Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions test_client.cpp: SetPath # /home/runner/.ya/build/build_root/endf/000f4d/r3tmp/tmpYvHqPn/pdisk_1.dat 2025-09-25T16:16:30.613621Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-09-25T16:16:30.613646Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-09-25T16:16:30.624178Z node 2 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded TClient is connected to server localhost:17950 TServer::EnableGrpc on GrpcPort 17276, node 2 2025-09-25T16:16:30.682061Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-09-25T16:16:30.682073Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-09-25T16:16:30.682076Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-09-25T16:16:30.682140Z node 2 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:17950 2025-09-25T16:16:30.710796Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-09-25T16:16:30.710829Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-09-25T16:16:30.711725Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. waiting... 2025-09-25T16:16:30.753312Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2025-09-25T16:16:30.867742Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/replication/ydb_proxy/ut/unittest >> YdbProxy::StaticCreds [GOOD] Test command err: 2025-09-25T16:16:29.811163Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7554061448364447444:2149];send_to=[0:7307199536658146131:7762515]; 2025-09-25T16:16:29.811207Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/endf/000f34/r3tmp/tmpiTXjYJ/pdisk_1.dat 2025-09-25T16:16:29.860770Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-09-25T16:16:29.878325Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded TClient is connected to server localhost:20296 2025-09-25T16:16:29.912543Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-09-25T16:16:29.912576Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-09-25T16:16:29.913687Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 10949, node 1 2025-09-25T16:16:29.947330Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-09-25T16:16:29.947344Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-09-25T16:16:29.947346Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-09-25T16:16:29.947391Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:20296 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-09-25T16:16:29.980315Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-09-25T16:16:29.982623Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-09-25T16:16:30.005630Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpRmDir, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_rmdir.cpp:66) 2025-09-25T16:16:30.008447Z node 1 :TX_PROXY ERROR: schemereq.cpp:590: Actor# [1:7554061452659415262:2323] txid# 281474976715660, issues: { message: "Path does not exist" issue_code: 200200 severity: 1 } 2025-09-25T16:16:30.096931Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-09-25T16:16:30.651491Z node 2 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7554061452172676634:2084];send_to=[0:7307199536658146131:7762515]; 2025-09-25T16:16:30.653556Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/endf/000f34/r3tmp/tmpFIDlNr/pdisk_1.dat 2025-09-25T16:16:30.669673Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-09-25T16:16:30.671653Z node 2 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded TClient is connected to server localhost:19260 TServer::EnableGrpc on GrpcPort 11162, node 2 2025-09-25T16:16:30.708602Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-09-25T16:16:30.708614Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-09-25T16:16:30.708616Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-09-25T16:16:30.708664Z node 2 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:19260 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-09-25T16:16:30.753356Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2025-09-25T16:16:30.754902Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-09-25T16:16:30.754919Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-09-25T16:16:30.755872Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected waiting... 2025-09-25T16:16:30.761081Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-09-25T16:16:30.773339Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_modify_acl.cpp:101) TClient::Ls request: /Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 1758816990798 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "user1" ACL: "" EffectiveACL: "" PathVersion: 6 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 6 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 3 SubDomainVersion: 1 SecurityStateVersion: 1 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 1 ... (TRUNCATED) TClient::Ls request: /Root 2025-09-25T16:16:30.894693Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 1758816990798 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "user1" ACL: "" EffectiveACL: "" PathVersion: 7 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 7 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 3 SubDomainVersion: 1 SecurityStateVersion: 2 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 1 ... (TRUNCATED) >> CostMetricsGetHugeMirror3dc::TestGetMirror3dcRequests1Inflight1BlobSize2000000 [GOOD] >> CostMetricsGetHugeMirror3dc::TestGetMirror3dcRequests10Inflight1BlobSize2000000 >> test.py::test[aggregate-group_compact_sorted_distinct--ForceBlocks] [GOOD] >> test.py::test[aggregate-group_compact_sorted_distinct--Results] >> TBsOther1::ChaoticParallelWrite [GOOD] >> TBsOther2::ChaoticParallelWrite_SkeletonFrontQueuesOverload >> test.py::test[optimizers-remove_keep_sorted_setting--Results] [GOOD] >> test.py::test[optimizers-unordered_over_sort--ForceBlocks] [SKIPPED] >> test.py::test[optimizers-unordered_over_sort--Results] >> test.py::test[optimizers-unordered_over_sort--Results] [SKIPPED] >> test.py::test[optimizers-yql-2171_aggregate_desc_sort_and_extract--ForceBlocks] [SKIPPED] >> TBalanceCoverageBuilderTest::TestSplitWithPartialMergeAll [GOOD] >> test.py::test[join-group_compact_by--ForceBlocks] [GOOD] >> test.py::test[join-group_compact_by--Results] >> test.py::test[optimizers-yql-2171_aggregate_desc_sort_and_extract--Results] [SKIPPED] >> test.py::test[order_by-literal_single_item_sort--ForceBlocks] >> BlobStorageSync::TestSyncLogCuttingBlock4Plus2 [GOOD] >> BlobStorageSync::SyncWhenDiskGetsDown [GOOD] >> BurstDetection::TestPutEvenly >> test.py::test[pg-tpcds-q27-default.txt-ForceBlocks] [GOOD] >> test.py::test[blocks-pg_to_interval--ForceBlocks] [GOOD] >> test.py::test[blocks-pg_to_interval--Results] >> test.py::test[pg-tpcds-q27-default.txt-Results] >> test.py::test[aggregate-table_row_aggregation-default.txt-ForceBlocks] [GOOD] >> test.py::test[aggregate-table_row_aggregation-default.txt-Results] >> test.py::test[weak_field-weak_field_long_fields--ForceBlocks] [GOOD] |80.6%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/balance_coverage/ut/unittest >> TBalanceCoverageBuilderTest::TestSplitWithPartialMergeAll [GOOD] >> test.py::test[join-lookupjoin_inner_empty_subq-off-ForceBlocks] [GOOD] >> test.py::test[join-lookupjoin_inner_empty_subq-off-Results] >> test.py::test[join-count_bans--ForceBlocks] [GOOD] >> test.py::test[join-count_bans--Results] >> test.py::test[weak_field-weak_field_long_fields--Results] >> test.py::test[action-eval_input_output_table_subquery--Results] [GOOD] >> test.py::test[join-lookupjoin_inner_empty_subq-off-Results] [SKIPPED] >> test.py::test[action-insert_after_eval_xlock--Results] >> test.py::test[join-lookupjoin_unused_keys--ForceBlocks] [SKIPPED] >> test.py::test[join-lookupjoin_unused_keys--Results] [SKIPPED] >> test.py::test[join-opt_on_opt_side-off-ForceBlocks] >> TBalanceCoverageBuilderTest::TestSimpleSplit [GOOD] >> TBalanceCoverageBuilderTest::TestOneSplit [GOOD] >> TBalanceCoverageBuilderTest::TestSplitWithMergeBack [GOOD] |80.6%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/metadata/initializer/ut/unittest >> test.py::test[key_filter-is_null_or_data--Results] [GOOD] >> test.py::test[lineage-some_tablerow-default.txt-ForceBlocks] [SKIPPED] >> test.py::test[lineage-some_tablerow-default.txt-Results] >> test.py::test[column_order-select_win_func-default.txt-ForceBlocks] [GOOD] >> test.py::test[column_order-select_win_func-default.txt-Results] >> test.py::test[lineage-some_tablerow-default.txt-Results] [SKIPPED] >> test.py::test[multicluster-externaltx-default.txt-ForceBlocks] [SKIPPED] >> test.py::test[multicluster-externaltx-default.txt-Results] [SKIPPED] >> test.py::test[optimizers-direct_row_after_merge--ForceBlocks] >> BurstDetection::TestPutEvenly [GOOD] >> BurstDetection::TestPutBurst |80.6%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/balance_coverage/ut/unittest >> TBalanceCoverageBuilderTest::TestSplitWithMergeBack [GOOD] |80.6%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/balance_coverage/ut/unittest >> TBalanceCoverageBuilderTest::TestSimpleSplit [GOOD] |80.6%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/balance_coverage/ut/unittest >> TBalanceCoverageBuilderTest::TestOneSplit [GOOD] >> test.py::test[produce-reduce_multi_in_keytuple--Results] [GOOD] >> test.py::test[produce-reduce_multi_in_presort--Results] >> CostMetricsGetHugeMirror3dc::TestGetMirror3dcRequests10Inflight1BlobSize2000000 [GOOD] >> CostMetricsGetHugeMirror3dc::TestGetMirror3dcRequests100Inflight1BlobSize2000000 >> test.py::test[weak_field-weak_field_type-default.txt-ForceBlocks] [GOOD] >> test.py::test[weak_field-weak_field_type-default.txt-Results] >> test.py::test[result_types-pg-default.txt-ForceBlocks] [GOOD] >> test.py::test[result_types-pg-default.txt-Results] >> test.py::test[in-huge_in-default.txt-Results] [GOOD] >> test.py::test[in-in_scalar_vector_subquery-default.txt-ForceBlocks] >> test.py::test[blocks-pg_to_interval--Results] [GOOD] >> Acceleration::TestDelayMultiplierPut4Plus2Block1Slow [GOOD] >> Acceleration::TestDelayMultiplierPutMirror3dc2Slow >> BurstDetection::TestPutBurst [GOOD] >> BurstDetection::TestOverlySensitive >> test.py::test[pg-tpcds-q37-default.txt-Results] [GOOD] >> test.py::test[pg-tpcds-q42-default.txt-Results] >> test.py::test[blocks-sort_two_asc--ForceBlocks] >> test.py::test[pg-tpcds-q27-default.txt-Results] [GOOD] >> test.py::test[pg-tpcds-q52-default.txt-ForceBlocks] >> TBsOther2::ChaoticParallelWrite_SkeletonFrontQueuesOverload [GOOD] >> test.py::test[pg-tpcds-q79-default.txt-ForceBlocks] [GOOD] >> test.py::test[pg-tpcds-q79-default.txt-Results] >> test.py::test[weak_field-weak_field_long_fields--Results] [GOOD] >> test.py::test[window-full/aggregations_leadlag_compact--ForceBlocks] >> test.py::test[action-eval_if_guard-default.txt-Results] [GOOD] >> test.py::test[aggr_factory-avg-default.txt-Results] >> test.py::test[aggregate-group_compact_sorted_distinct--Results] [GOOD] >> test.py::test[bigdate-table_yt_key_filter-default-ForceBlocks] [SKIPPED] >> test.py::test[bigdate-table_yt_key_filter-default-Results] [SKIPPED] >> test.py::test[binding-table_range_binding-default.txt-ForceBlocks] >> test.py::test[blocks-string_filter--ForceBlocks] [GOOD] |80.6%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/metadata/initializer/ut/unittest >> test.py::test[blocks-string_filter--Results] |80.6%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/metadata/initializer/ut/unittest >> test.py::test[select-simple_struct_field_access--ForceBlocks] [GOOD] >> test.py::test[select-simple_struct_field_access--Results] |80.6%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/metadata/initializer/ut/unittest |80.6%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/ut_vdisk/unittest >> TBsOther2::ChaoticParallelWrite_SkeletonFrontQueuesOverload [GOOD] >> test.py::test[aggregate-table_row_aggregation-default.txt-Results] [GOOD] >> test.py::test[bigdate-table_arithmetic_mul_div-default.txt-ForceBlocks] >> TSequenceReboots::CreateMultipleSequencesNoInitialSequenceShard >> BurstDetection::TestOverlySensitive [GOOD] >> CompatibilityInfo::BSControllerCompatible [GOOD] >> CompatibilityInfo::BSControllerIncompatible |80.6%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/metadata/initializer/ut/unittest >> test.py::test[join-group_compact_by--Results] [GOOD] >> test.py::test[join-inner_all--ForceBlocks] >> test.py::test[join-equi_join_three_simple-off-ForceBlocks] [GOOD] >> test.py::test[join-equi_join_three_simple-off-Results] [SKIPPED] >> test.py::test[join-extract_or_predicates-default.txt-ForceBlocks] >> Acceleration::TestDelayMultiplierPutMirror3dc2Slow [GOOD] >> Acceleration::TestDelayMultiplierPut4Plus2Block2Slow >> test.py::test[select-literal_bool-default.txt-Results] [GOOD] >> test.py::test[select-missing_with_nonpersist--Results] >> CompatibilityInfo::BSControllerIncompatible [GOOD] >> CompatibilityInfo::BSControllerIncompatibleWithDefault [GOOD] >> CompatibilityInfo::BSControllerSuppressCompatibilityCheck [GOOD] >> CompatibilityInfo::BSControllerMigration [GOOD] >> test.py::test[select-missing_with_nonpersist--Results] [SKIPPED] >> test.py::test[select-trivial_group_by-default.txt-Results] >> test.py::test[result_types-pg-default.txt-Results] [GOOD] >> test.py::test[sampling-direct_read--ForceBlocks] >> test.py::test[weak_field-weak_field_type-default.txt-Results] [GOOD] >> test.py::test[window-full/leadlag_compact--ForceBlocks] >> TSequenceReboots::CreateSequence >> test.py::test[limit-dynamic_limit_offset_overflow-default.txt-Results] [GOOD] >> test.py::test[limit-yql-8046_empty_sorted_desc--Results] [SKIPPED] >> test.py::test[limit-zero_limit-default.txt-Results] >> TSequenceReboots::CreateDropRecreate >> test.py::test[flatten_by-flatten_expr_struct-default.txt-ForceBlocks] [GOOD] >> test.py::test[flatten_by-flatten_expr_struct-default.txt-Results] >> TSequenceReboots::CreateMultipleSequencesHaveInitialSequenceShard >> TSequenceReboots::CreateSequencesWithIndexedTable >> test.py::test[select-simple_struct_field_access--Results] [GOOD] >> test.py::test[pg-tpcds-q79-default.txt-Results] [GOOD] >> test.py::test[select-sum_to_string-default.txt-ForceBlocks] >> test.py::test[pg-tpcds-q80-default.txt-ForceBlocks] >> test.py::test[pg-tpcds-q70-default.txt-ForceBlocks] [GOOD] >> test.py::test[pg-tpcds-q70-default.txt-Results] >> TSequenceReboots::CopyTableWithSequence >> test.py::test[blocks-string_filter--Results] [GOOD] >> test.py::test[aggregate-group_compact_sorted_distinct_complex--ForceBlocks] [GOOD] >> test.py::test[aggregate-group_compact_sorted_distinct_complex--Results] >> test.py::test[coalesce-coalesce_few_real-default.txt-ForceBlocks] >> test.py::test[order_by-literal_single_item_sort--ForceBlocks] [GOOD] >> test.py::test[order_by-literal_single_item_sort--Results] >> test.py::test[join-pullup_extra_columns--ForceBlocks] [GOOD] >> test.py::test[join-pullup_extra_columns--Results] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/ut_blobstorage/unittest >> CompatibilityInfo::BSControllerMigration [GOOD] Test command err: RandomSeed# 15807617109493209813 1970-01-01T00:01:00.110512Z Unwrap {EvVPutResult Status# OK ID# [1:1:1:1:2:4:3] {MsgQoS MsgId# { SequenceId: 1 MsgId: 0 } Cost# 403 ExtQueueId# PutTabletLog IntQueueId# IntPutLog CostSettings# { SeekTimeUs# 40 ReadSpeedBps# 1048576000 WriteSpeedBps# 1048576000 ReadBlockSize# 65536 WriteBlockSize# 65536 MinHugeBlobInBytes# 524289} Window# { Status# Processed ActualWindowSize# 0 MaxWindowSize# 150000000 ExpectedMsgId# { SequenceId: 1 MsgId: 1 }}}} 1970-01-01T00:01:00.110512Z Unwrap {EvVPutResult Status# OK ID# [1:1:1:1:2:4:2] {MsgQoS MsgId# { SequenceId: 1 MsgId: 0 } Cost# 403 ExtQueueId# PutTabletLog IntQueueId# IntPutLog CostSettings# { SeekTimeUs# 40 ReadSpeedBps# 1048576000 WriteSpeedBps# 1048576000 ReadBlockSize# 65536 WriteBlockSize# 65536 MinHugeBlobInBytes# 524289} Window# { Status# Processed ActualWindowSize# 0 MaxWindowSize# 150000000 ExpectedMsgId# { SequenceId: 1 MsgId: 1 }}}} 1970-01-01T00:01:00.110512Z Unwrap {EvVPutResult Status# OK ID# [1:1:1:1:2:4:1] {MsgQoS MsgId# { SequenceId: 1 MsgId: 0 } Cost# 403 ExtQueueId# PutTabletLog IntQueueId# IntPutLog CostSettings# { SeekTimeUs# 40 ReadSpeedBps# 1048576000 WriteSpeedBps# 1048576000 ReadBlockSize# 65536 WriteBlockSize# 65536 MinHugeBlobInBytes# 524289} Window# { Status# Processed ActualWindowSize# 0 MaxWindowSize# 150000000 ExpectedMsgId# { SequenceId: 1 MsgId: 1 }}}} 1970-01-01T00:01:00.210512Z Unwrap {EvVPutResult Status# OK ID# [1:1:1:1:3:4:2] {MsgQoS MsgId# { SequenceId: 1 MsgId: 0 } Cost# 403 ExtQueueId# PutTabletLog IntQueueId# IntPutLog CostSettings# { SeekTimeUs# 40 ReadSpeedBps# 1048576000 WriteSpeedBps# 1048576000 ReadBlockSize# 65536 WriteBlockSize# 65536 MinHugeBlobInBytes# 524289} Window# { Status# Processed ActualWindowSize# 0 MaxWindowSize# 150000000 ExpectedMsgId# { SequenceId: 1 MsgId: 1 }}}} 1970-01-01T00:01:01.210512Z Unwrap {EvVPutResult Status# OK ID# [1:1:1:1:3:4:3] {MsgQoS MsgId# { SequenceId: 1 MsgId: 1 } Cost# 403 ExtQueueId# PutTabletLog IntQueueId# IntPutLog Window# { Status# Processed ActualWindowSize# 0 MaxWindowSize# 150000000 ExpectedMsgId# { SequenceId: 1 MsgId: 2 }}}} 1970-01-01T00:01:01.210512Z Unwrap {EvVPutResult Status# OK ID# [1:1:1:1:3:4:1] {MsgQoS MsgId# { SequenceId: 1 MsgId: 1 } Cost# 403 ExtQueueId# PutTabletLog IntQueueId# IntPutLog Window# { Status# Processed ActualWindowSize# 0 MaxWindowSize# 150000000 ExpectedMsgId# { SequenceId: 1 MsgId: 2 }}}} 1970-01-01T00:01:01.310512Z Unwrap {EvVPutResult Status# OK ID# [1:1:1:1:4:4:3] {MsgQoS MsgId# { SequenceId: 1 MsgId: 1 } Cost# 403 ExtQueueId# PutTabletLog IntQueueId# IntPutLog Window# { Status# Processed ActualWindowSize# 0 MaxWindowSize# 150000000 ExpectedMsgId# { SequenceId: 1 MsgId: 2 }}}} 1970-01-01T00:01:01.310512Z Unwrap {EvVPutResult Status# OK ID# [1:1:1:1:4:4:2] {MsgQoS MsgId# { SequenceId: 1 MsgId: 2 } Cost# 403 ExtQueueId# PutTabletLog IntQueueId# IntPutLog Window# { Status# Processed ActualWindowSize# 0 MaxWindowSize# 150000000 ExpectedMsgId# { SequenceId: 1 MsgId: 3 }}}} 1970-01-01T00:01:01.310512Z Unwrap {EvVPutResult Status# OK ID# [1:1:1:1:4:4:1] {MsgQoS MsgId# { SequenceId: 1 MsgId: 2 } Cost# 403 ExtQueueId# PutTabletLog IntQueueId# IntPutLog Window# { Status# Processed ActualWindowSize# 0 MaxWindowSize# 150000000 ExpectedMsgId# { SequenceId: 1 MsgId: 3 }}}} 1970-01-01T00:01:01.410512Z Unwrap {EvVPutResult Status# OK ID# [1:1:1:1:5:4:1] {MsgQoS MsgId# { SequenceId: 1 MsgId: 2 } Cost# 403 ExtQueueId# PutTabletLog IntQueueId# IntPutLog Window# { Status# Processed ActualWindowSize# 0 MaxWindowSize# 150000000 ExpectedMsgId# { SequenceId: 1 MsgId: 3 }}}} 1970-01-01T00:01:01.410512Z Unwrap {EvVPutResult Status# OK ID# [1:1:1:1:5:4:2] {MsgQoS MsgId# { SequenceId: 1 MsgId: 3 } Cost# 403 ExtQueueId# PutTabletLog IntQueueId# IntPutLog Window# { Status# Processed ActualWindowSize# 0 MaxWindowSize# 150000000 ExpectedMsgId# { SequenceId: 1 MsgId: 4 }}}} 1970-01-01T00:01:01.610512Z Unwrap {EvVPutResult Status# OK ID# [1:1:1:1:3:4:3] {MsgQoS MsgId# { SequenceId: 1 MsgId: 0 } Cost# 403 ExtQueueId# PutTabletLog IntQueueId# IntPutLog CostSettings# { SeekTimeUs# 40 ReadSpeedBps# 1048576000 WriteSpeedBps# 1048576000 ReadBlockSize# 65536 WriteBlockSize# 65536 MinHugeBlobInBytes# 524289} Window# { Status# Processed ActualWindowSize# 0 MaxWindowSize# 150000000 ExpectedMsgId# { SequenceId: 1 MsgId: 1 }}}} 1970-01-01T00:01:01.610512Z Unwrap {EvVPutResult Status# OK ID# [1:1:1:1:3:4:1] {MsgQoS MsgId# { SequenceId: 1 MsgId: 0 } Cost# 403 ExtQueueId# PutTabletLog IntQueueId# IntPutLog CostSettings# { SeekTimeUs# 40 ReadSpeedBps# 1048576000 WriteSpeedBps# 1048576000 ReadBlockSize# 65536 WriteBlockSize# 65536 MinHugeBlobInBytes# 524289} Window# { Status# Processed ActualWindowSize# 0 MaxWindowSize# 150000000 ExpectedMsgId# { SequenceId: 1 MsgId: 1 }}}} 1970-01-01T00:01:02.310512Z Unwrap {EvVPutResult Status# OK ID# [1:1:1:1:5:4:3] {MsgQoS MsgId# { SequenceId: 1 MsgId: 3 } Cost# 403 ExtQueueId# PutTabletLog IntQueueId# IntPutLog Window# { Status# Processed ActualWindowSize# 0 MaxWindowSize# 150000000 ExpectedMsgId# { SequenceId: 1 MsgId: 4 }}}} 1970-01-01T00:01:02.410512Z Unwrap {EvVPutResult Status# OK ID# [1:1:1:1:6:4:2] {MsgQoS MsgId# { SequenceId: 1 MsgId: 3 } Cost# 403 ExtQueueId# PutTabletLog IntQueueId# IntPutLog Window# { Status# Processed ActualWindowSize# 0 MaxWindowSize# 150000000 ExpectedMsgId# { SequenceId: 1 MsgId: 4 }}}} 1970-01-01T00:01:02.410512Z Unwrap {EvVPutResult Status# OK ID# [1:1:1:1:6:4:1] {MsgQoS MsgId# { SequenceId: 1 MsgId: 4 } Cost# 403 ExtQueueId# PutTabletLog IntQueueId# IntPutLog Window# { Status# Processed ActualWindowSize# 0 MaxWindowSize# 150000000 ExpectedMsgId# { SequenceId: 1 MsgId: 5 }}}} 1970-01-01T00:01:03.310512Z Unwrap {EvVPutResult Status# OK ID# [1:1:1:1:6:4:3] {MsgQoS MsgId# { SequenceId: 1 MsgId: 4 } Cost# 403 ExtQueueId# PutTabletLog IntQueueId# IntPutLog Window# { Status# Processed ActualWindowSize# 0 MaxWindowSize# 150000000 ExpectedMsgId# { SequenceId: 1 MsgId: 5 }}}} 1970-01-01T00:01:03.410512Z Unwrap {EvVPutResult Status# OK ID# [1:1:1:1:7:4:3] {MsgQoS MsgId# { SequenceId: 1 MsgId: 0 } Cost# 403 ExtQueueId# PutTabletLog IntQueueId# IntPutLog CostSettings# { SeekTimeUs# 40 ReadSpeedBps# 1048576000 WriteSpeedBps# 1048576000 ReadBlockSize# 65536 WriteBlockSize# 65536 MinHugeBlobInBytes# 524289} Window# { Status# Processed ActualWindowSize# 0 MaxWindowSize# 150000000 ExpectedMsgId# { SequenceId: 1 MsgId: 1 }}}} 1970-01-01T00:01:03.410512Z Unwrap {EvVPutResult Status# OK ID# [1:1:1:1:7:4:2] {MsgQoS MsgId# { SequenceId: 1 MsgId: 0 } Cost# 403 ExtQueueId# PutTabletLog IntQueueId# IntPutLog CostSettings# { SeekTimeUs# 40 ReadSpeedBps# 1048576000 WriteSpeedBps# 1048576000 ReadBlockSize# 65536 WriteBlockSize# 65536 MinHugeBlobInBytes# 524289} Window# { Status# Processed ActualWindowSize# 0 MaxWindowSize# 150000000 ExpectedMsgId# { SequenceId: 1 MsgId: 1 }}}} 1970-01-01T00:01:03.410512Z Unwrap {EvVPutResult Status# OK ID# [1:1:1:1:7:4:1] {MsgQoS MsgId# { SequenceId: 1 MsgId: 0 } Cost# 403 ExtQueueId# PutTabletLog IntQueueId# IntPutLog CostSettings# { SeekTimeUs# 40 ReadSpeedBps# 1048576000 WriteSpeedBps# 1048576000 ReadBlockSize# 65536 WriteBlockSize# 65536 MinHugeBlobInBytes# 524289} Window# { Status# Processed ActualWindowSize# 0 MaxWindowSize# 150000000 ExpectedMsgId# { SequenceId: 1 MsgId: 1 }}}} 1970-01-01T00:01:03.510512Z Unwrap {EvVPutResult Status# OK ID# [1:1:1:1:8:4:3] {MsgQoS MsgId# { SequenceId: 1 MsgId: 1 } Cost# 403 ExtQueueId# PutTabletLog IntQueueId# IntPutLog Window# { Status# Processed ActualWindowSize# 0 MaxWindowSize# 150000000 ExpectedMsgId# { SequenceId: 1 MsgId: 2 }}}} 1970-01-01T00:01:03.510512Z Unwrap {EvVPutResult Status# OK ID# [1:1:1:1:8:4:1] {MsgQoS MsgId# { SequenceId: 1 MsgId: 1 } Cost# 403 ExtQueueId# PutTabletLog IntQueueId# IntPutLog Window# { Status# Processed ActualWindowSize# 0 MaxWindowSize# 150000000 ExpectedMsgId# { SequenceId: 1 MsgId: 2 }}}} 1970-01-01T00:01:03.510512Z Unwrap {EvVPutResult Status# OK ID# [1:1:1:1:8:4:2] {MsgQoS MsgId# { SequenceId: 1 MsgId: 1 } Cost# 403 ExtQueueId# PutTabletLog IntQueueId# IntPutLog Window# { Status# Processed ActualWindowSize# 0 MaxWindowSize# 150000000 ExpectedMsgId# { SequenceId: 1 MsgId: 2 }}}} 1970-01-01T00:01:03.610512Z Unwrap {EvVPutResult Status# OK ID# [1:1:1:1:9:4:3] {MsgQoS MsgId# { SequenceId: 1 MsgId: 2 } Cost# 403 ExtQueueId# PutTabletLog IntQueueId# IntPutLog Window# { Status# Processed ActualWindowSize# 0 MaxWindowSize# 150000000 ExpectedMsgId# { SequenceId: 1 MsgId: 3 }}}} 1970-01-01T00:01:03.610512Z Unwrap {EvVPutResult Status# OK ID# [1:1:1:1:9:4:2] {MsgQoS MsgId# { SequenceId: 1 MsgId: 2 } Cost# 403 ExtQueueId# PutTabletLog IntQueueId# IntPutLog Window# { Status# Processed ActualWindowSize# 0 MaxWindowSize# 150000000 ExpectedMsgId# { SequenceId: 1 MsgId: 3 }}}} 1970-01-01T00:01:03.610512Z Unwrap {EvVPutResult Status# OK ID# [1:1:1:1:9:4:1] {MsgQoS MsgId# { SequenceId: 1 MsgId: 2 } Cost# 403 ExtQueueId# PutTabletLog IntQueueId# IntPutLog Window# { Status# Processed ActualWindowSize# 0 MaxWindowSize# 150000000 ExpectedMsgId# { SequenceId: 1 MsgId: 3 }}}} 1970-01-01T00:01:03.710512Z Unwrap {EvVPutResult Status# OK ID# [1:1:1:1:10:4:1] {MsgQoS MsgId# { SequenceId: 1 MsgId: 5 } Cost# 403 ExtQueueId# PutTabletLog IntQueueId# IntPutLog Window# { Status# Processed ActualWindowSize# 0 MaxWindowSize# 150000000 ExpectedMsgId# { SequenceId: 1 MsgId: 6 }}}} 1970-01-01T00:01:03.710512Z Unwrap {EvVPutResult Status# OK ID# [1:1:1:1:10:4:3] {MsgQoS MsgId# { SequenceId: 1 MsgId: 1 } Cost# 403 ExtQueueId# PutTabletLog IntQueueId# IntPutLog Window# { Status# Processed ActualWindowSize# 0 MaxWindowSize# 150000000 ExpectedMsgId# { SequenceId: 1 MsgId: 2 }}}} 1970-01-01T00:01:04.610512Z Unwrap {EvVPutResult Status# OK ID# [1:1:1:1:10:4:2] {MsgQoS MsgId# { SequenceId: 1 MsgId: 5 } Cost# 403 ExtQueueId# PutTabletLog IntQueueId# IntPutLog Window# { Status# Processed ActualWindowSize# 0 MaxWindowSize# 150000000 ExpectedMsgId# { SequenceId: 1 MsgId: 6 }}}} 1970-01-01T00:01:04.710512Z Unwrap {EvVPutResult Status# OK ID# [1:1:1:1:11:4:3] {MsgQoS MsgId# { SequenceId: 1 MsgId: 3 } Cost# 403 ExtQueueId# PutTabletLog IntQueueId# IntPutLog Window# { Status# Processed ActualWindowSize# 0 MaxWindowSize# 150000000 ExpectedMsgId# { SequenceId: 1 MsgId: 4 }}}} 1970-01-01T00:01:04.710512Z Unwrap {EvVPutResult Status# OK ID# [1:1:1:1:11:4:2] {MsgQoS MsgId# { SequenceId: 1 MsgId: 3 } Cost# 403 ExtQueueId# PutTabletLog IntQueueId# IntPutLog Window# { Status# Processed ActualWindowSize# 0 MaxWindowSize# 150000000 ExpectedMsgId# { SequenceId: 1 MsgId: 4 }}}} 1970-01-01T00:01:04.710512Z Unwrap {EvVPutResult Status# OK ID# [1:1:1:1:11:4:1] {MsgQoS MsgId# { SequenceId: 1 MsgId: 3 } Cost# 403 ExtQueueId# PutTabletLog IntQueueId# IntPutLog Window# { Status# Processed ActualWindowSize# 0 MaxWindowSize# 150000000 ExpectedMsgId# { SequenceId: 1 MsgId: 4 }}}} 1970-01-01T00:01:04.810512Z Unwrap {EvVPutResult Status# OK ID# [1:1:1:1:12:4:1] {MsgQoS MsgId# { SequenceId: 1 MsgId: 6 } Cost# 403 ExtQueueId# PutTabletLog IntQueueId# IntPutLog Window# { Status# Processed ActualWindowSize# 0 MaxWindowSize# 150000000 ExpectedMsgId# { SequenceId: 1 MsgId: 7 }}}} 1970-01-01T00:01:04.810512Z Unwrap {EvVPutResult Status# OK ID# [1:1:1:1:12:4:2] {MsgQoS MsgId# { SequenceId: 1 MsgId: 2 } Cost# 403 ExtQueueId# PutTabletLog IntQueueId# IntPutLog Window# { Status# Processed ActualWindowSize# 0 MaxWindowSize# 150000000 ExpectedMsgId# { SequenceId: 1 MsgId: 3 }}}} 1970-01-01T00:01:04.810512Z Unwrap {EvVPutResult Status# OK ID# [1:1:1:1:12:4:3] {MsgQoS MsgId# { SequenceId: 1 MsgId: 6 } Cost# 403 ExtQueueId# PutTabletLog IntQueueId# IntPutLog Window# { Status# Processed ActualWindowSize# 0 MaxWindowSize# 150000000 ExpectedMsgId# { SequenceId: 1 MsgId: 7 }}}} 1970-01-01T00:01:04.910512Z Unwrap {EvVPutResult Status# OK ID# [1:1:1:1:13:4:3] {MsgQoS MsgId# { SequenceId: 1 MsgId: 4 } Cost# 403 ExtQueueId# PutTabletLog IntQueueId# IntPutLog Window ... 1000 Major: 1000 Minor: 1000 Hotfix: 1000 } ComponentId: 5 } StoresReadableBy { Application: "ydb" LowerLimit { Year: 0 Major: 0 Minor: 0 Hotfix: 0 } UpperLimit { Year: 1000 Major: 1000 Minor: 1000 Hotfix: 1000 } ComponentId: 4 } StoresReadableBy { Application: "ydb" LowerLimit { Year: 0 Major: 0 Minor: 0 Hotfix: 0 } UpperLimit { Year: 1000 Major: 1000 Minor: 1000 Hotfix: 1000 } ComponentId: 5 } } 2025-09-25T16:16:35.688488Z 1 00h00m00.000000s :BS_CONTROLLER ALERT: {BSCTXM00@migrate.cpp:253} CompatibilityInfo check failed ErrorReason# Versions are not compatible neither by common rule nor by provided rule sets, Stored CompatibilityInfo# { Application: "ydb" Version { Year: 23 Major: 1 Minor: 19 Hotfix: 0 } } Current CompatibilityInfo# { Application: "ydb" Version { Year: 23 Major: 3 Minor: 1 Hotfix: 0 } CanLoadFrom { Application: "ydb" LowerLimit { Year: 0 Major: 0 Minor: 0 Hotfix: 0 } UpperLimit { Year: 1000 Major: 1000 Minor: 1000 Hotfix: 1000 } ComponentId: 4 } CanLoadFrom { Application: "ydb" LowerLimit { Year: 0 Major: 0 Minor: 0 Hotfix: 0 } UpperLimit { Year: 1000 Major: 1000 Minor: 1000 Hotfix: 1000 } ComponentId: 5 } StoresReadableBy { Application: "ydb" LowerLimit { Year: 0 Major: 0 Minor: 0 Hotfix: 0 } UpperLimit { Year: 1000 Major: 1000 Minor: 1000 Hotfix: 1000 } ComponentId: 4 } StoresReadableBy { Application: "ydb" LowerLimit { Year: 0 Major: 0 Minor: 0 Hotfix: 0 } UpperLimit { Year: 1000 Major: 1000 Minor: 1000 Hotfix: 1000 } ComponentId: 5 } } 2025-09-25T16:16:35.690008Z 1 00h00m00.000000s :BS_CONTROLLER ALERT: {BSCTXM00@migrate.cpp:253} CompatibilityInfo check failed ErrorReason# Versions are not compatible neither by common rule nor by provided rule sets, Stored CompatibilityInfo# { Application: "ydb" Version { Year: 23 Major: 1 Minor: 19 Hotfix: 0 } } Current CompatibilityInfo# { Application: "ydb" Version { Year: 23 Major: 3 Minor: 1 Hotfix: 0 } CanLoadFrom { Application: "ydb" LowerLimit { Year: 0 Major: 0 Minor: 0 Hotfix: 0 } UpperLimit { Year: 1000 Major: 1000 Minor: 1000 Hotfix: 1000 } ComponentId: 4 } CanLoadFrom { Application: "ydb" LowerLimit { Year: 0 Major: 0 Minor: 0 Hotfix: 0 } UpperLimit { Year: 1000 Major: 1000 Minor: 1000 Hotfix: 1000 } ComponentId: 5 } StoresReadableBy { Application: "ydb" LowerLimit { Year: 0 Major: 0 Minor: 0 Hotfix: 0 } UpperLimit { Year: 1000 Major: 1000 Minor: 1000 Hotfix: 1000 } ComponentId: 4 } StoresReadableBy { Application: "ydb" LowerLimit { Year: 0 Major: 0 Minor: 0 Hotfix: 0 } UpperLimit { Year: 1000 Major: 1000 Minor: 1000 Hotfix: 1000 } ComponentId: 5 } } 2025-09-25T16:16:35.691436Z 1 00h00m00.000000s :BS_CONTROLLER ALERT: {BSCTXM00@migrate.cpp:253} CompatibilityInfo check failed ErrorReason# Versions are not compatible neither by common rule nor by provided rule sets, Stored CompatibilityInfo# { Application: "ydb" Version { Year: 23 Major: 1 Minor: 19 Hotfix: 0 } } Current CompatibilityInfo# { Application: "ydb" Version { Year: 23 Major: 3 Minor: 1 Hotfix: 0 } CanLoadFrom { Application: "ydb" LowerLimit { Year: 0 Major: 0 Minor: 0 Hotfix: 0 } UpperLimit { Year: 1000 Major: 1000 Minor: 1000 Hotfix: 1000 } ComponentId: 4 } CanLoadFrom { Application: "ydb" LowerLimit { Year: 0 Major: 0 Minor: 0 Hotfix: 0 } UpperLimit { Year: 1000 Major: 1000 Minor: 1000 Hotfix: 1000 } ComponentId: 5 } StoresReadableBy { Application: "ydb" LowerLimit { Year: 0 Major: 0 Minor: 0 Hotfix: 0 } UpperLimit { Year: 1000 Major: 1000 Minor: 1000 Hotfix: 1000 } ComponentId: 4 } StoresReadableBy { Application: "ydb" LowerLimit { Year: 0 Major: 0 Minor: 0 Hotfix: 0 } UpperLimit { Year: 1000 Major: 1000 Minor: 1000 Hotfix: 1000 } ComponentId: 5 } } 2025-09-25T16:16:35.692797Z 1 00h00m00.000000s :BS_CONTROLLER ALERT: {BSCTXM00@migrate.cpp:253} CompatibilityInfo check failed ErrorReason# Versions are not compatible neither by common rule nor by provided rule sets, Stored CompatibilityInfo# { Application: "ydb" Version { Year: 23 Major: 1 Minor: 19 Hotfix: 0 } } Current CompatibilityInfo# { Application: "ydb" Version { Year: 23 Major: 3 Minor: 1 Hotfix: 0 } CanLoadFrom { Application: "ydb" LowerLimit { Year: 0 Major: 0 Minor: 0 Hotfix: 0 } UpperLimit { Year: 1000 Major: 1000 Minor: 1000 Hotfix: 1000 } ComponentId: 4 } CanLoadFrom { Application: "ydb" LowerLimit { Year: 0 Major: 0 Minor: 0 Hotfix: 0 } UpperLimit { Year: 1000 Major: 1000 Minor: 1000 Hotfix: 1000 } ComponentId: 5 } StoresReadableBy { Application: "ydb" LowerLimit { Year: 0 Major: 0 Minor: 0 Hotfix: 0 } UpperLimit { Year: 1000 Major: 1000 Minor: 1000 Hotfix: 1000 } ComponentId: 4 } StoresReadableBy { Application: "ydb" LowerLimit { Year: 0 Major: 0 Minor: 0 Hotfix: 0 } UpperLimit { Year: 1000 Major: 1000 Minor: 1000 Hotfix: 1000 } ComponentId: 5 } } 2025-09-25T16:16:35.694226Z 1 00h00m00.000000s :BS_CONTROLLER ALERT: {BSCTXM00@migrate.cpp:253} CompatibilityInfo check failed ErrorReason# Versions are not compatible neither by common rule nor by provided rule sets, Stored CompatibilityInfo# { Application: "ydb" Version { Year: 23 Major: 1 Minor: 19 Hotfix: 0 } } Current CompatibilityInfo# { Application: "ydb" Version { Year: 23 Major: 3 Minor: 1 Hotfix: 0 } CanLoadFrom { Application: "ydb" LowerLimit { Year: 0 Major: 0 Minor: 0 Hotfix: 0 } UpperLimit { Year: 1000 Major: 1000 Minor: 1000 Hotfix: 1000 } ComponentId: 4 } CanLoadFrom { Application: "ydb" LowerLimit { Year: 0 Major: 0 Minor: 0 Hotfix: 0 } UpperLimit { Year: 1000 Major: 1000 Minor: 1000 Hotfix: 1000 } ComponentId: 5 } StoresReadableBy { Application: "ydb" LowerLimit { Year: 0 Major: 0 Minor: 0 Hotfix: 0 } UpperLimit { Year: 1000 Major: 1000 Minor: 1000 Hotfix: 1000 } ComponentId: 4 } StoresReadableBy { Application: "ydb" LowerLimit { Year: 0 Major: 0 Minor: 0 Hotfix: 0 } UpperLimit { Year: 1000 Major: 1000 Minor: 1000 Hotfix: 1000 } ComponentId: 5 } } 2025-09-25T16:16:35.695557Z 1 00h00m00.000000s :BS_CONTROLLER ALERT: {BSCTXM00@migrate.cpp:253} CompatibilityInfo check failed ErrorReason# Versions are not compatible neither by common rule nor by provided rule sets, Stored CompatibilityInfo# { Application: "ydb" Version { Year: 23 Major: 1 Minor: 19 Hotfix: 0 } } Current CompatibilityInfo# { Application: "ydb" Version { Year: 23 Major: 3 Minor: 1 Hotfix: 0 } CanLoadFrom { Application: "ydb" LowerLimit { Year: 0 Major: 0 Minor: 0 Hotfix: 0 } UpperLimit { Year: 1000 Major: 1000 Minor: 1000 Hotfix: 1000 } ComponentId: 4 } CanLoadFrom { Application: "ydb" LowerLimit { Year: 0 Major: 0 Minor: 0 Hotfix: 0 } UpperLimit { Year: 1000 Major: 1000 Minor: 1000 Hotfix: 1000 } ComponentId: 5 } StoresReadableBy { Application: "ydb" LowerLimit { Year: 0 Major: 0 Minor: 0 Hotfix: 0 } UpperLimit { Year: 1000 Major: 1000 Minor: 1000 Hotfix: 1000 } ComponentId: 4 } StoresReadableBy { Application: "ydb" LowerLimit { Year: 0 Major: 0 Minor: 0 Hotfix: 0 } UpperLimit { Year: 1000 Major: 1000 Minor: 1000 Hotfix: 1000 } ComponentId: 5 } } 2025-09-25T16:16:35.696910Z 1 00h00m00.000000s :BS_CONTROLLER ALERT: {BSCTXM00@migrate.cpp:253} CompatibilityInfo check failed ErrorReason# Versions are not compatible neither by common rule nor by provided rule sets, Stored CompatibilityInfo# { Application: "ydb" Version { Year: 23 Major: 1 Minor: 19 Hotfix: 0 } } Current CompatibilityInfo# { Application: "ydb" Version { Year: 23 Major: 3 Minor: 1 Hotfix: 0 } CanLoadFrom { Application: "ydb" LowerLimit { Year: 0 Major: 0 Minor: 0 Hotfix: 0 } UpperLimit { Year: 1000 Major: 1000 Minor: 1000 Hotfix: 1000 } ComponentId: 4 } CanLoadFrom { Application: "ydb" LowerLimit { Year: 0 Major: 0 Minor: 0 Hotfix: 0 } UpperLimit { Year: 1000 Major: 1000 Minor: 1000 Hotfix: 1000 } ComponentId: 5 } StoresReadableBy { Application: "ydb" LowerLimit { Year: 0 Major: 0 Minor: 0 Hotfix: 0 } UpperLimit { Year: 1000 Major: 1000 Minor: 1000 Hotfix: 1000 } ComponentId: 4 } StoresReadableBy { Application: "ydb" LowerLimit { Year: 0 Major: 0 Minor: 0 Hotfix: 0 } UpperLimit { Year: 1000 Major: 1000 Minor: 1000 Hotfix: 1000 } ComponentId: 5 } } 2025-09-25T16:16:35.698313Z 1 00h00m00.000000s :BS_CONTROLLER ALERT: {BSCTXM00@migrate.cpp:253} CompatibilityInfo check failed ErrorReason# Versions are not compatible neither by common rule nor by provided rule sets, Stored CompatibilityInfo# { Application: "ydb" Version { Year: 23 Major: 1 Minor: 19 Hotfix: 0 } } Current CompatibilityInfo# { Application: "ydb" Version { Year: 23 Major: 3 Minor: 1 Hotfix: 0 } CanLoadFrom { Application: "ydb" LowerLimit { Year: 0 Major: 0 Minor: 0 Hotfix: 0 } UpperLimit { Year: 1000 Major: 1000 Minor: 1000 Hotfix: 1000 } ComponentId: 4 } CanLoadFrom { Application: "ydb" LowerLimit { Year: 0 Major: 0 Minor: 0 Hotfix: 0 } UpperLimit { Year: 1000 Major: 1000 Minor: 1000 Hotfix: 1000 } ComponentId: 5 } StoresReadableBy { Application: "ydb" LowerLimit { Year: 0 Major: 0 Minor: 0 Hotfix: 0 } UpperLimit { Year: 1000 Major: 1000 Minor: 1000 Hotfix: 1000 } ComponentId: 4 } StoresReadableBy { Application: "ydb" LowerLimit { Year: 0 Major: 0 Minor: 0 Hotfix: 0 } UpperLimit { Year: 1000 Major: 1000 Minor: 1000 Hotfix: 1000 } ComponentId: 5 } } 2025-09-25T16:16:35.699716Z 1 00h00m00.000000s :BS_CONTROLLER ALERT: {BSCTXM00@migrate.cpp:253} CompatibilityInfo check failed ErrorReason# Versions are not compatible neither by common rule nor by provided rule sets, Stored CompatibilityInfo# { Application: "ydb" Version { Year: 23 Major: 1 Minor: 19 Hotfix: 0 } } Current CompatibilityInfo# { Application: "ydb" Version { Year: 23 Major: 3 Minor: 1 Hotfix: 0 } CanLoadFrom { Application: "ydb" LowerLimit { Year: 0 Major: 0 Minor: 0 Hotfix: 0 } UpperLimit { Year: 1000 Major: 1000 Minor: 1000 Hotfix: 1000 } ComponentId: 4 } CanLoadFrom { Application: "ydb" LowerLimit { Year: 0 Major: 0 Minor: 0 Hotfix: 0 } UpperLimit { Year: 1000 Major: 1000 Minor: 1000 Hotfix: 1000 } ComponentId: 5 } StoresReadableBy { Application: "ydb" LowerLimit { Year: 0 Major: 0 Minor: 0 Hotfix: 0 } UpperLimit { Year: 1000 Major: 1000 Minor: 1000 Hotfix: 1000 } ComponentId: 4 } StoresReadableBy { Application: "ydb" LowerLimit { Year: 0 Major: 0 Minor: 0 Hotfix: 0 } UpperLimit { Year: 1000 Major: 1000 Minor: 1000 Hotfix: 1000 } ComponentId: 5 } } |80.6%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/services/ydb/table_split_ut/ydb-services-ydb-table_split_ut >> CostMetricsGetHugeMirror3dc::TestGetMirror3dcRequests100Inflight1BlobSize2000000 [GOOD] >> CostMetricsGetHugeMirror3dc::TestGetMirror3dcRequests2Inflight2BlobSize2000000 |80.6%| [LD] {RESULT} $(B)/ydb/services/ydb/table_split_ut/ydb-services-ydb-table_split_ut |80.7%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/services/ydb/table_split_ut/ydb-services-ydb-table_split_ut >> test.py::test[column_order-select_win_func-default.txt-Results] [GOOD] >> test.py::test[dq-precompute_asyncfile--ForceBlocks] >> test.py::test[join-count_bans--Results] [GOOD] >> test.py::test[join-opt_on_opt_side-off-ForceBlocks] [GOOD] >> test.py::test[join-grace_join2--ForceBlocks] >> test.py::test[join-opt_on_opt_side-off-Results] [SKIPPED] >> test.py::test[join-pullup_rownumber-off-ForceBlocks] >> Acceleration::TestDelayMultiplierPut4Plus2Block2Slow [GOOD] >> Acceleration::TestDelayMultiplierGetMirror3dc1Slow >> test.py::test[action-insert_after_eval_xlock--Results] [GOOD] >> test.py::test[action-nested_rewrite_io-default.txt-Results] >> test.py::test[optimizers-direct_row_after_merge--ForceBlocks] [GOOD] >> test.py::test[pg-tpcds-q42-default.txt-Results] [GOOD] >> test.py::test[pg-tpcds-q46-default.txt-Results] >> test.py::test[order_by-literal_single_item_sort--Results] [GOOD] >> test.py::test[optimizers-direct_row_after_merge--Results] >> test.py::test[order_by-order_by_missing_project_column-default.txt-ForceBlocks] >> test.py::test[tpch-q5-default.txt-ForceBlocks] [GOOD] >> test.py::test[tpch-q5-default.txt-Results] |80.7%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/metadata/initializer/ut/unittest >> test.py::test[blocks-sort_two_asc--ForceBlocks] [GOOD] >> test.py::test[flatten_by-flatten_expr_struct-default.txt-Results] [GOOD] >> test.py::test[flatten_by-flatten_list--ForceBlocks] >> CostMetricsGetHugeMirror3dc::TestGetMirror3dcRequests2Inflight2BlobSize2000000 [GOOD] >> CostMetricsGetHugeMirror3dc::TestGetMirror3dcRequests10Inflight10BlobSize2000000 >> test.py::test[blocks-sort_two_asc--Results] >> Acceleration::TestDelayMultiplierGetMirror3dc1Slow [GOOD] >> Acceleration::TestDelayMultiplierGet4Plus2Block1Slow >> test.py::test[join-pullup_extra_columns--Results] [GOOD] >> test.py::test[join-pullup_null_column--ForceBlocks] >> test.py::test[pg-tpcds-q52-default.txt-ForceBlocks] [GOOD] >> test.py::test[pg-tpcds-q52-default.txt-Results] >> test.py::test[aggregate-group_compact_sorted_distinct_complex--Results] [GOOD] >> test.py::test[binding-table_range_strict_binding-default.txt-ForceBlocks] |80.7%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/metadata/initializer/ut/unittest >> Acceleration::TestDelayMultiplierGet4Plus2Block1Slow [GOOD] >> Acceleration::TestDelayMultiplierGetMirror3dc2Slow >> test.py::test[produce-reduce_multi_in_presort--Results] [GOOD] >> test.py::test[produce-reduce_multi_in_sampling-sorted-Results] >> test.py::test[pg-tpcds-q70-default.txt-Results] [GOOD] >> test.py::test[produce-process_streaming_inline_bash-default.txt-ForceBlocks] >> test.py::test[optimizers-direct_row_after_merge--Results] [GOOD] >> test.py::test[optimizers-length_over_merge_fs_multiusage--ForceBlocks] >> test.py::test[select-sum_to_string-default.txt-ForceBlocks] [GOOD] >> test.py::test[select-sum_to_string-default.txt-Results] >> CostMetricsGetHugeMirror3dc::TestGetMirror3dcRequests10Inflight10BlobSize2000000 [GOOD] >> CostMetricsGetHugeMirror3dc::TestGetMirror3dcRequests100Inflight10BlobSize2000000 >> test.py::test[blocks-sort_two_asc--Results] [GOOD] >> test.py::test[blocks-top_sort_two_asc--ForceBlocks] >> test.py::test[join-inner_all--ForceBlocks] [GOOD] >> test.py::test[join-inner_all--Results] >> test.py::test[coalesce-coalesce_few_real-default.txt-ForceBlocks] [GOOD] >> test.py::test[coalesce-coalesce_few_real-default.txt-Results] >> test.py::test[sampling-direct_read--ForceBlocks] [GOOD] >> test.py::test[sampling-direct_read--Results] >> test.py::test[pg-tpcds-q52-default.txt-Results] [GOOD] >> test.py::test[pg-tpcds-q56-default.txt-ForceBlocks] >> Acceleration::TestDelayMultiplierGetMirror3dc2Slow [GOOD] >> Acceleration::TestDelayMultiplierGet4Plus2Block2Slow |80.7%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/metadata/initializer/ut/unittest >> test.py::test[limit-zero_limit-default.txt-Results] [GOOD] >> test.py::test[lineage-select_all-default.txt-Results] >> test.py::test[lineage-select_all-default.txt-Results] [SKIPPED] >> test.py::test[lineage-select_field_limit_offset-default.txt-Results] [SKIPPED] >> test.py::test[lineage-select_join-default.txt-Results] >> test.py::test[select-sum_to_string-default.txt-Results] [GOOD] >> test.py::test[select-tablename_with_table_row-default.txt-ForceBlocks] >> test.py::test[lineage-select_join-default.txt-Results] [SKIPPED] >> test.py::test[lineage-select_nested_table_row-default.txt-Results] [SKIPPED] >> test.py::test[lineage-window_one-default.txt-Results] [SKIPPED] >> test.py::test[optimizers-sort_constraint_in_left--Results] >> test.py::test[select-trivial_group_by-default.txt-Results] [GOOD] >> test.py::test[stream_lookup_join-lookup_join_narrow-default.txt-Results] >> TBsLocalRecovery::StartStopNotEmptyDB [GOOD] >> TBsLocalRecovery::WriteRestartRead >> test.py::test[stream_lookup_join-lookup_join_narrow-default.txt-Results] [SKIPPED] >> test.py::test[table_range-each_with_non_existing--Results] >> test.py::test[action-nested_rewrite_io-default.txt-Results] [GOOD] >> test.py::test[action-subquery_merge_evaluate-default.txt-Results] >> test.py::test[coalesce-coalesce_few_real-default.txt-Results] [GOOD] >> test.py::test[distinct-distinct_columns_after_group-default.txt-ForceBlocks] >> test.py::test[join-grace_join2--ForceBlocks] [GOOD] >> test.py::test[join-grace_join2--Results] [SKIPPED] >> test.py::test[join-inner_on_key_only-off-ForceBlocks] >> TPDiskRaces::DecommitWithInflightMock [GOOD] >> test.py::test[binding-table_range_binding-default.txt-ForceBlocks] [GOOD] >> test.py::test[binding-table_range_binding-default.txt-Results] >> TPDiskRaces::KillOwnerWhileDecommitting >> test.py::test[pg-tpcds-q46-default.txt-Results] [GOOD] >> test.py::test[sampling-direct_read--Results] [GOOD] >> test.py::test[sampling-direct_read-dynamic-ForceBlocks] >> Acceleration::TestDelayMultiplierGet4Plus2Block2Slow [GOOD] >> Acceleration::TestMaxNumOfSlowDisksGetMirror3dc1Slow >> test.py::test[tpch-q5-default.txt-Results] [GOOD] >> test.py::test[type_v3-ignore_v3_hint--ForceBlocks] >> test.py::test[pg-tpcds-q55-default.txt-Results] >> test.py::test[window-full/aggregations_leadlag_compact--ForceBlocks] [GOOD] >> test.py::test[window-full/aggregations_leadlag_compact--Results] |80.7%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/metadata/initializer/ut/unittest >> test.py::test[dq-precompute_asyncfile--ForceBlocks] [GOOD] >> test.py::test[dq-precompute_asyncfile--Results] [SKIPPED] >> test.py::test[dq-wrong_script_timeout-default.txt-ForceBlocks] [SKIPPED] >> test.py::test[dq-wrong_script_timeout-default.txt-Results] [SKIPPED] >> test.py::test[expr-non_persistable_order_by_fail--ForceBlocks] >> test.py::test[pg-tpcds-q80-default.txt-ForceBlocks] [GOOD] >> test.py::test[pg-tpcds-q80-default.txt-Results] >> test.py::test[join-inner_all--Results] [GOOD] >> test.py::test[join-join_comp_common_table--ForceBlocks] >> test.py::test[join-pullup_rownumber-off-ForceBlocks] [GOOD] >> test.py::test[join-pullup_rownumber-off-Results] >> test.py::test[join-pullup_rownumber-off-Results] [SKIPPED] >> test.py::test[join-selfjoin_on_sorted-off-ForceBlocks] >> test.py::test[flatten_by-flatten_list--ForceBlocks] [GOOD] >> Acceleration::TestMaxNumOfSlowDisksGetMirror3dc1Slow [GOOD] >> Acceleration::TestMaxNumOfSlowDisksGet4Plus2Block1Slow >> test.py::test[flatten_by-flatten_list--Results] >> test.py::test[window-full/leadlag_compact--ForceBlocks] [GOOD] >> test.py::test[window-full/leadlag_compact--Results] >> test.py::test[join-pullup_null_column--ForceBlocks] [GOOD] >> test.py::test[join-pullup_null_column--Results] >> CostMetricsGetHugeMirror3dc::TestGetMirror3dcRequests100Inflight10BlobSize2000000 [GOOD] >> CostMetricsGetMirror3dc::TestGetMirror3dcRequests1Inflight1BlobSize1000 >> test.py::test[aggr_factory-avg-default.txt-Results] [GOOD] |80.7%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/metadata/initializer/ut/unittest >> test.py::test[produce-process_streaming_inline_bash-default.txt-ForceBlocks] [GOOD] >> test.py::test[produce-process_streaming_inline_bash-default.txt-Results] >> test.py::test[expr-non_persistable_order_by_fail--ForceBlocks] [GOOD] >> test.py::test[expr-non_persistable_order_by_fail--Results] [GOOD] >> test.py::test[hor_join-out_max_outtables-default.txt-ForceBlocks] >> test.py::test[aggregate-GroupByOneField--Results] >> test.py::test[binding-table_range_binding-default.txt-Results] [GOOD] >> test.py::test[blocks-add_uint64_opt2--ForceBlocks] >> Acceleration::TestMaxNumOfSlowDisksGet4Plus2Block1Slow [GOOD] >> Acceleration::TestMaxNumOfSlowDisksPutMirror3dc1Slow >> test.py::test[blocks-top_sort_two_asc--ForceBlocks] [GOOD] >> test.py::test[blocks-top_sort_two_asc--Results] >> test.py::test[action-subquery_merge_evaluate-default.txt-Results] [GOOD] >> test.py::test[action-subquery_merge_nested_world-default.txt-Results] >> test.py::test[optimizers-length_over_merge_fs_multiusage--ForceBlocks] [GOOD] >> test.py::test[optimizers-length_over_merge_fs_multiusage--Results] >> test.py::test[table_range-each_with_non_existing--Results] [GOOD] >> test.py::test[table_range-range_with_view--Results] >> CostMetricsGetMirror3dc::TestGetMirror3dcRequests1Inflight1BlobSize1000 [GOOD] >> CostMetricsGetMirror3dc::TestGetMirror3dcRequests10Inflight1BlobSize1000 |80.7%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/mon/audit/ut/ydb-core-mon-audit-ut |80.7%| [LD] {RESULT} $(B)/ydb/core/mon/audit/ut/ydb-core-mon-audit-ut >> test.py::test[flatten_by-flatten_list--Results] [GOOD] >> test.py::test[produce-reduce_multi_in_sampling-sorted-Results] [GOOD] >> test.py::test[produce-reduce_with_assume_in_subquery--Results] >> test.py::test[pg-tpcds-q55-default.txt-Results] [GOOD] >> test.py::test[pg-tpcds-q80-default.txt-Results] [GOOD] >> test.py::test[pg-tpcds-q84-default.txt-ForceBlocks] |80.7%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/mon/audit/ut/ydb-core-mon-audit-ut >> test.py::test[flatten_by-flatten_two_fields--ForceBlocks] >> test.py::test[produce-reduce_with_assume_in_subquery--Results] [SKIPPED] >> test.py::test[produce-reduce_with_flat_lambda-default.txt-Results] >> test.py::test[pg-tpcds-q63-default.txt-Results] |80.7%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/metadata/initializer/ut/unittest >> Acceleration::TestMaxNumOfSlowDisksPutMirror3dc1Slow [GOOD] >> Acceleration::TestMaxNumOfSlowDisksPut4Plus2Block1Slow >> test.py::test[join-pullup_null_column--Results] [GOOD] >> test.py::test[join-pullup_null_column-off-ForceBlocks] >> test.py::test[order_by-order_by_missing_project_column-default.txt-ForceBlocks] [GOOD] >> test.py::test[order_by-order_by_missing_project_column-default.txt-Results] >> test.py::test[produce-process_streaming_inline_bash-default.txt-Results] [GOOD] >> test.py::test[produce-reduce_with_assume--ForceBlocks] [SKIPPED] >> test.py::test[produce-reduce_with_assume--Results] >> test.py::test[produce-reduce_with_assume--Results] [SKIPPED] >> test.py::test[sampling-bind_join_left-default.txt-ForceBlocks] >> test.py::test[join-inner_on_key_only-off-ForceBlocks] [GOOD] >> test.py::test[pg-tpcds-q56-default.txt-ForceBlocks] [GOOD] >> test.py::test[binding-table_range_strict_binding-default.txt-ForceBlocks] [GOOD] >> test.py::test[binding-table_range_strict_binding-default.txt-Results] >> test.py::test[join-inner_on_key_only-off-Results] [SKIPPED] >> test.py::test[join-join_and_distinct_key--ForceBlocks] >> test.py::test[pg-tpcds-q56-default.txt-Results] >> test.py::test[blocks-top_sort_two_asc--Results] [GOOD] >> test.py::test[select-tablename_with_table_row-default.txt-ForceBlocks] [GOOD] >> test.py::test[select-tablename_with_table_row-default.txt-Results] >> test.py::test[column_group-hint_empty_grp_fail--ForceBlocks] [SKIPPED] >> test.py::test[column_group-hint_empty_grp_fail--Results] >> test.py::test[window-full/aggregations_leadlag_compact--Results] [GOOD] >> test.py::test[window-full/session_aliases--ForceBlocks] >> test.py::test[type_v3-ignore_v3_hint--ForceBlocks] [GOOD] >> CostMetricsGetMirror3dc::TestGetMirror3dcRequests10Inflight1BlobSize1000 [GOOD] >> CostMetricsGetMirror3dc::TestGetMirror3dcRequests10000Inflight1BlobSize1000 >> test.py::test[sampling-direct_read-dynamic-ForceBlocks] [GOOD] >> test.py::test[sampling-direct_read-dynamic-Results] >> test.py::test[column_group-hint_empty_grp_fail--Results] [SKIPPED] >> test.py::test[column_order-select_distinct_star-default.txt-ForceBlocks] >> test.py::test[window-full/leadlag_compact--Results] [GOOD] >> test.py::test[type_v3-ignore_v3_hint--Results] >> TBsLocalRecovery::WriteRestartRead [GOOD] |80.7%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/metadata/initializer/ut/unittest >> TBsLocalRecovery::MultiPutWriteRestartRead >> test.py::test[optimizers-length_over_merge_fs_multiusage--Results] [GOOD] >> test.py::test[window-generic/aggregations_before_current--ForceBlocks] >> test.py::test[optimizers-yql-18300-flatmap-over-extend-default.txt-ForceBlocks] >> test.py::test[distinct-distinct_columns_after_group-default.txt-ForceBlocks] [GOOD] >> test.py::test[distinct-distinct_columns_after_group-default.txt-Results] >> test.py::test[bigdate-table_arithmetic_mul_div-default.txt-ForceBlocks] [GOOD] >> test.py::test[bigdate-table_arithmetic_mul_div-default.txt-Results] >> test.py::test[join-selfjoin_on_sorted-off-ForceBlocks] [GOOD] >> test.py::test[join-selfjoin_on_sorted-off-Results] [SKIPPED] >> test.py::test[join-selfjoin_on_sorted_with_filter--ForceBlocks] >> Acceleration::TestMaxNumOfSlowDisksPut4Plus2Block1Slow [GOOD] >> Acceleration::TestMaxNumOfSlowDisksPutMirror3dc2Slow >> test.py::test[in-in_scalar_vector_subquery-default.txt-ForceBlocks] [GOOD] >> test.py::test[in-in_scalar_vector_subquery-default.txt-Results] >> test.py::test[binding-table_range_strict_binding-default.txt-Results] [GOOD] >> test.py::test[blocks-combine_all_sum_filter_opt--ForceBlocks] >> test.py::test[type_v3-ignore_v3_hint--Results] [GOOD] >> test.py::test[view-file_outer--ForceBlocks] |80.7%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/metadata/initializer/ut/unittest >> TSequenceReboots::CreateSequence [GOOD] >> test.py::test[pg-tpcds-q56-default.txt-Results] [GOOD] >> test.py::test[action-subquery_merge_nested_world-default.txt-Results] [GOOD] >> test.py::test[agg_apply-avg_const_interval--Results] >> test.py::test[blocks-add_uint64_opt2--ForceBlocks] [GOOD] >> test.py::test[blocks-add_uint64_opt2--Results] >> test.py::test[sampling-direct_read-dynamic-Results] [GOOD] >> test.py::test[schema-select_operate_with_columns_simple-default.txt-ForceBlocks] >> test.py::test[pg-tpcds-q74-default.txt-ForceBlocks] >> test.py::test[select-tablename_with_table_row-default.txt-Results] [GOOD] >> test.py::test[select-uncorrelated_subqueries--ForceBlocks] >> test.py::test[optimizers-sort_constraint_in_left--Results] [GOOD] >> test.py::test[optimizers-test_fuse_map_predicate_limit-default.txt-Results] >> test.py::test[hor_join-out_max_outtables-default.txt-ForceBlocks] [GOOD] >> test.py::test[hor_join-out_max_outtables-default.txt-Results] >> Acceleration::TestMaxNumOfSlowDisksPutMirror3dc2Slow [GOOD] >> Acceleration::TestMaxNumOfSlowDisksPut4Plus2Block2Slow ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_sequence_reboots/unittest >> TSequenceReboots::CreateSequence [GOOD] Test command err: ==== RunWithTabletReboots =========== RUN: Trace =========== Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2144] IGNORE Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2144] Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:120:2058] recipient: [1:114:2145] IGNORE Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:120:2058] recipient: [1:114:2145] Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:121:2058] recipient: [1:117:2146] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:121:2058] recipient: [1:117:2146] Leader for TabletID 72057594046447617 is [1:129:2154] sender: [1:131:2058] recipient: [1:113:2144] Leader for TabletID 72057594046316545 is [1:134:2158] sender: [1:136:2058] recipient: [1:114:2145] Leader for TabletID 72057594046678944 is [1:141:2162] sender: [1:142:2058] recipient: [1:117:2146] 2025-09-25T16:16:36.557929Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7911: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-09-25T16:16:36.557965Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7939: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-09-25T16:16:36.557971Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7825: StatsBatching config: StatsBatchTimeout# 0.000000s, StatsMaxBatchSize# 0, StatsMaxExecuteTime# 0.010000s 2025-09-25T16:16:36.557976Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7841: OperationsProcessing config: using default configuration 2025-09-25T16:16:36.557983Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7847: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-09-25T16:16:36.557987Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7847: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-09-25T16:16:36.557997Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7971: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-09-25T16:16:36.558010Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-09-25T16:16:36.558166Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8042: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-09-25T16:16:36.558230Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2025-09-25T16:16:36.608594Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:8074: Got new config: QueryServiceConfig { AllExternalDataSourcesAreAvailable: true } 2025-09-25T16:16:36.608635Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded 2025-09-25T16:16:36.608751Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8042: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# Leader for TabletID 72057594046447617 is [1:129:2154] sender: [1:198:2058] recipient: [1:15:2062] 2025-09-25T16:16:36.618881Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2025-09-25T16:16:36.625099Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2025-09-25T16:16:36.625157Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-09-25T16:16:36.626577Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2025-09-25T16:16:36.626634Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2025-09-25T16:16:36.626743Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1382: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-09-25T16:16:36.626806Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-09-25T16:16:36.627267Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:158: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-09-25T16:16:36.627306Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2025-09-25T16:16:36.627589Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-09-25T16:16:36.627601Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-09-25T16:16:36.627639Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-09-25T16:16:36.627648Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-09-25T16:16:36.627655Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:205: TTxServerlessStorageBilling.Complete 2025-09-25T16:16:36.627678Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7086: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:223:2058] recipient: [1:221:2221] IGNORE Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:223:2058] recipient: [1:221:2221] Leader for TabletID 72057594037968897 is [1:227:2225] sender: [1:228:2058] recipient: [1:221:2221] 2025-09-25T16:16:36.629345Z node 1 :HIVE INFO: tablet_helpers.cpp:1126: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:141:2162] sender: [1:248:2058] recipient: [1:15:2062] 2025-09-25T16:16:36.670293Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-09-25T16:16:36.670408Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-09-25T16:16:36.670495Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:599: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-09-25T16:16:36.670506Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5528: CreateTx for txid 1:0 type: TxAlterSubDomain target path: [OwnerId: 72057594046678944, LocalPathId: 1] source path: 2025-09-25T16:16:36.670583Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-09-25T16:16:36.670602Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2025-09-25T16:16:36.673392Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-09-25T16:16:36.673466Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-09-25T16:16:36.673543Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-09-25T16:16:36.673556Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:315: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-09-25T16:16:36.673561Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:369: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-09-25T16:16:36.673566Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 1:0 2 -> 3 2025-09-25T16:16:36.677436Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-09-25T16:16:36.677473Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-09-25T16:16:36.677484Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 1:0 3 -> 128 2025-09-25T16:16:36.678066Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-09-25T16:16:36.678079Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-09-25T16:16:36.678086Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-09-25T16:16:36.678102Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1723: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-09-25T16:16:36.678949Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1792: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-09-25T16:16:36.679490Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:663: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-09-25T16:16:36.679536Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1824: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 Leader for TabletID 72057594046316545 is [1:134:2158] sender: [1:263:2058] recipient: [1:15:2062] FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-09-25T16:16:36.679769Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:685: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-09-25T16:16:36.679800Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:689: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 134 RawX2: 4294969454 } } Step: 5000001 MediatorID: 0 Tab ... TTxPublishToSchemeBoard Send, to populator: [52:213:2214], at schemeshard: 72057594046678944, txId: 1002, path id: 1 2025-09-25T16:16:51.609888Z node 52 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [52:213:2214], at schemeshard: 72057594046678944, txId: 1002, path id: 3 2025-09-25T16:16:51.609953Z node 52 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1002:0, at schemeshard: 72057594046678944 2025-09-25T16:16:51.609967Z node 52 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:497: [72057594046678944] TDone opId# 1002:0 ProgressState 2025-09-25T16:16:51.609982Z node 52 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:173: TSideEffects ApplyOnExecute at tablet# 72057594046678944 2025-09-25T16:16:51.609988Z node 52 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:926: Part operation is done id#1002:0 progress is 1/1 2025-09-25T16:16:51.609992Z node 52 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 1002 ready parts: 1/1 2025-09-25T16:16:51.609998Z node 52 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:926: Part operation is done id#1002:0 progress is 1/1 2025-09-25T16:16:51.610003Z node 52 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 1002 ready parts: 1/1 2025-09-25T16:16:51.610008Z node 52 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 1002, ready parts: 1/1, is published: false 2025-09-25T16:16:51.610014Z node 52 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 1002 ready parts: 1/1 2025-09-25T16:16:51.610020Z node 52 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:993: Operation and all the parts is done, operation id: 1002:0 2025-09-25T16:16:51.610025Z node 52 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5552: RemoveTx for txid 1002:0 2025-09-25T16:16:51.610054Z node 52 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:610: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 3 2025-09-25T16:16:51.610061Z node 52 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:1002: Publication still in progress, tx: 1002, publications: 2, subscribers: 1 2025-09-25T16:16:51.610066Z node 52 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1009: Publication details: tx: 1002, [OwnerId: 72057594046678944, LocalPathId: 1], 7 2025-09-25T16:16:51.610070Z node 52 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1009: Publication details: tx: 1002, [OwnerId: 72057594046678944, LocalPathId: 3], 2 2025-09-25T16:16:51.610263Z node 52 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5225: StateWork, received event# 274137603, Sender [52:213:2214], Recipient [52:130:2155]: NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 2 PathId: [OwnerId: 72057594046678944, LocalPathId: 1] Version: 7 } 2025-09-25T16:16:51.610275Z node 52 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5309: StateWork, processing event NSchemeBoard::NSchemeshardEvents::TEvUpdateAck 2025-09-25T16:16:51.610291Z node 52 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6249: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 7 PathOwnerId: 72057594046678944, cookie: 1002 2025-09-25T16:16:51.610306Z node 52 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 7 PathOwnerId: 72057594046678944, cookie: 1002 2025-09-25T16:16:51.610312Z node 52 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 1002 2025-09-25T16:16:51.610318Z node 52 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 1002, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 7 2025-09-25T16:16:51.610324Z node 52 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:610: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 4 2025-09-25T16:16:51.610345Z node 52 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:173: TSideEffects ApplyOnExecute at tablet# 72057594046678944 2025-09-25T16:16:51.610713Z node 52 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5225: StateWork, received event# 274137603, Sender [52:213:2214], Recipient [52:130:2155]: NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 2 PathId: [OwnerId: 72057594046678944, LocalPathId: 3] Version: 2 } 2025-09-25T16:16:51.610726Z node 52 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5309: StateWork, processing event NSchemeBoard::NSchemeshardEvents::TEvUpdateAck 2025-09-25T16:16:51.610740Z node 52 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6249: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 2 PathOwnerId: 72057594046678944, cookie: 1002 2025-09-25T16:16:51.610755Z node 52 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 2 PathOwnerId: 72057594046678944, cookie: 1002 2025-09-25T16:16:51.610761Z node 52 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1002 2025-09-25T16:16:51.610766Z node 52 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 1002, pathId: [OwnerId: 72057594046678944, LocalPathId: 3], version: 2 2025-09-25T16:16:51.610771Z node 52 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:610: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 2 2025-09-25T16:16:51.610788Z node 52 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__publish_to_scheme_board.cpp:155: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 1002, subscribers: 1 2025-09-25T16:16:51.610794Z node 52 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:212: TTxAckPublishToSchemeBoard Notify send TEvNotifyTxCompletionResult, at schemeshard: 72057594046678944, to actorId: [52:364:2343] 2025-09-25T16:16:51.610800Z node 52 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:173: TSideEffects ApplyOnExecute at tablet# 72057594046678944 2025-09-25T16:16:51.611276Z node 52 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:216: TSideEffects ApplyOnComplete at tablet# 72057594046678944 2025-09-25T16:16:51.611479Z node 52 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1002 2025-09-25T16:16:51.611486Z node 52 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:216: TSideEffects ApplyOnComplete at tablet# 72057594046678944 2025-09-25T16:16:51.611720Z node 52 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1002 2025-09-25T16:16:51.611725Z node 52 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:216: TSideEffects ApplyOnComplete at tablet# 72057594046678944 2025-09-25T16:16:51.611738Z node 52 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:641: Send to actor: [52:364:2343] msg type: 271124998 msg: NKikimrScheme.TEvNotifyTxCompletionResult TxId: 1002 at schemeshard: 72057594046678944 2025-09-25T16:16:51.611750Z node 52 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 1002: got EvNotifyTxCompletionResult 2025-09-25T16:16:51.611754Z node 52 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 1002: satisfy waiter [52:365:2344] 2025-09-25T16:16:51.611788Z node 52 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5225: StateWork, received event# 269877764, Sender [52:367:2346], Recipient [52:130:2155]: NKikimr::TEvTabletPipe::TEvServerDisconnected 2025-09-25T16:16:51.611794Z node 52 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5323: StateWork, processing event TEvTabletPipe::TEvServerDisconnected 2025-09-25T16:16:51.611797Z node 52 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:6190: Server pipe is reset, at schemeshard: 72057594046678944 TestWaitNotification: OK eventTxId 1002 2025-09-25T16:16:51.611891Z node 52 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5225: StateWork, received event# 271122945, Sender [52:390:2367], Recipient [52:130:2155]: NKikimrSchemeOp.TDescribePath Path: "/MyRoot/seq" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false } 2025-09-25T16:16:51.611895Z node 52 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5237: StateWork, processing event TEvSchemeShard::TEvDescribeScheme 2025-09-25T16:16:51.611906Z node 52 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/seq" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-09-25T16:16:51.611941Z node 52 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/seq" took 32us result status StatusSuccess 2025-09-25T16:16:51.612010Z node 52 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/seq" PathDescription { Self { Name: "seq" PathId: 3 SchemeshardId: 72057594046678944 PathType: EPathTypeSequence CreateFinished: true CreateTxId: 1002 CreateStep: 5000003 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 SequenceVersion: 1 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 2 PathsLimit: 10000 ShardsInside: 1 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } SequenceDescription { Name: "seq" PathId { OwnerId: 72057594046678944 LocalId: 3 } Version: 1 SequenceShard: 72075186233409546 MinValue: 1 MaxValue: 9223372036854775807 StartValue: 1 Cache: 1 Increment: 1 Cycle: false DataType: "Int64" } } PathId: 3 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 >> TBsLocalRecovery::MultiPutWriteRestartRead [GOOD] >> TBsLocalRecovery::MultiPutWriteRestartReadHuge >> test.py::test[pg-tpcds-q84-default.txt-ForceBlocks] [GOOD] >> test.py::test[pg-tpcds-q84-default.txt-Results] >> test.py::test[pg-tpcds-q63-default.txt-Results] [GOOD] >> test.py::test[pg-tpcds-q83-default.txt-Results] >> test.py::test[distinct-distinct_columns_after_group-default.txt-Results] [GOOD] >> test.py::test[distinct-distinct_count_and_full_count-default.txt-ForceBlocks] >> test.py::test[view-file_outer--ForceBlocks] [GOOD] |80.7%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/metadata/initializer/ut/unittest >> test.py::test[order_by-order_by_missing_project_column-default.txt-Results] [GOOD] >> test.py::test[order_by-sort_with_take--ForceBlocks] >> test.py::test[view-file_outer--Results] [GOOD] >> test.py::test[view-file_outer_library--ForceBlocks] >> VDiskBalancing::TestRandom_Block42 [GOOD] >> test.py::test[produce-reduce_with_flat_lambda-default.txt-Results] [GOOD] >> test.py::test[produce-reduce_with_python_having--Results] >> test.py::test[blocks-add_uint64_opt2--Results] [GOOD] >> test.py::test[blocks-combine_all_decimal_max-default.txt-ForceBlocks] >> test.py::test[aggregate-GroupByOneField--Results] [GOOD] >> test.py::test[aggregate-agg_phases_table3-default.txt-Results] >> TBsLocalRecovery::MultiPutWriteRestartReadHuge [GOOD] >> TBsLocalRecovery::ChaoticWriteRestartHugeXXX >> test.py::test[produce-reduce_with_python_having--Results] [SKIPPED] >> test.py::test[produce-reduce_with_python_input_stream--Results] [SKIPPED] >> test.py::test[produce-reduce_with_python_row_repack--Results] >> test.py::test[produce-reduce_with_python_row_repack--Results] [SKIPPED] >> test.py::test[ql_filter-integer_members--Results] |80.7%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/metadata/initializer/ut/unittest ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/ut_blobstorage/ut_balancing/unittest >> VDiskBalancing::TestRandom_Block42 [GOOD] Test command err: RandomSeed# 6989351824137798051 Step = 0 SEND TEvPut with key [1:1:0:0:0:585447:0] TEvPutResult: TEvPutResult {Id# [1:1:0:0:0:585447:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999988} Step = 1 SEND TEvPut with key [1:1:1:0:0:37868:0] TEvPutResult: TEvPutResult {Id# [1:1:1:0:0:37868:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999988} Step = 2 SEND TEvPut with key [1:1:2:0:0:619381:0] TEvPutResult: TEvPutResult {Id# [1:1:2:0:0:619381:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999988} Step = 3 SEND TEvPut with key [1:1:3:0:0:725585:0] TEvPutResult: TEvPutResult {Id# [1:1:3:0:0:725585:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999988} Step = 4 SEND TEvPut with key [1:1:4:0:0:2934723:0] TEvPutResult: TEvPutResult {Id# [1:1:4:0:0:2934723:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999988} Stop node 4 2025-09-25T16:16:17.497740Z 1 00h01m00.010512s :PIPE_SERVER ERROR: [72057594037932033] NodeDisconnected NodeId# 5 Step = 5 SEND TEvPut with key [1:1:5:0:0:502135:0] TEvPutResult: TEvPutResult {Id# [1:1:5:0:0:502135:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999976} Step = 6 SEND TEvPut with key [1:1:6:0:0:3044947:0] TEvPutResult: TEvPutResult {Id# [1:1:6:0:0:3044947:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999976} Stop node 7 2025-09-25T16:16:17.608607Z 1 00h01m10.060512s :PIPE_SERVER ERROR: [72057594037932033] NodeDisconnected NodeId# 8 Step = 7 SEND TEvPut with key [1:1:7:0:0:582354:0] TEvPutResult: TEvPutResult {Id# [1:1:7:0:0:582354:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999963} Step = 8 SEND TEvPut with key [1:1:8:0:0:1478820:0] TEvPutResult: TEvPutResult {Id# [1:1:8:0:0:1478820:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999963} Step = 9 SEND TEvPut with key [1:1:9:0:0:1360774:0] TEvPutResult: TEvPutResult {Id# [1:1:9:0:0:1360774:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999963} Start node 4 Step = 10 SEND TEvPut with key [1:1:10:0:0:1727870:0] TEvPutResult: TEvPutResult {Id# [1:1:10:0:0:1727870:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999939} Step = 11 SEND TEvPut with key [1:1:11:0:0:1883457:0] TEvPutResult: TEvPutResult {Id# [1:1:11:0:0:1883457:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999939} Step = 12 SEND TEvPut with key [1:1:12:0:0:568368:0] TEvPutResult: TEvPutResult {Id# [1:1:12:0:0:568368:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999939} Step = 13 SEND TEvPut with key [1:1:13:0:0:896600:0] TEvPutResult: TEvPutResult {Id# [1:1:13:0:0:896600:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999939} Step = 14 SEND TEvPut with key [1:1:14:0:0:179270:0] TEvPutResult: TEvPutResult {Id# [1:1:14:0:0:179270:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999939} Step = 15 SEND TEvPut with key [1:1:15:0:0:3026131:0] TEvPutResult: TEvPutResult {Id# [1:1:15:0:0:3026131:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999939} Step = 16 SEND TEvPut with key [1:1:16:0:0:670396:0] TEvPutResult: TEvPutResult {Id# [1:1:16:0:0:670396:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999939} Step = 17 SEND TEvPut with key [1:1:17:0:0:1584741:0] TEvPutResult: TEvPutResult {Id# [1:1:17:0:0:1584741:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999939} Step = 18 SEND TEvPut with key [1:1:18:0:0:2384818:0] TEvPutResult: TEvPutResult {Id# [1:1:18:0:0:2384818:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999939} Step = 19 SEND TEvPut with key [1:1:19:0:0:2867010:0] TEvPutResult: TEvPutResult {Id# [1:1:19:0:0:2867010:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999939} Step = 20 SEND TEvPut with key [1:1:20:0:0:2911789:0] TEvPutResult: TEvPutResult {Id# [1:1:20:0:0:2911789:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999939} Step = 21 SEND TEvPut with key [1:1:21:0:0:2463622:0] TEvPutResult: TEvPutResult {Id# [1:1:21:0:0:2463622:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999939} Step = 22 SEND TEvPut with key [1:1:22:0:0:322338:0] TEvPutResult: TEvPutResult {Id# [1:1:22:0:0:322338:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999939} Step = 23 SEND TEvPut with key [1:1:23:0:0:2119770:0] TEvPutResult: TEvPutResult {Id# [1:1:23:0:0:2119770:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999939} Step = 24 SEND TEvPut with key [1:1:24:0:0:56036:0] TEvPutResult: TEvPutResult {Id# [1:1:24:0:0:56036:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999939} Step = 25 SEND TEvPut with key [1:1:25:0:0:2648607:0] TEvPutResult: TEvPutResult {Id# [1:1:25:0:0:2648607:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999939} Stop node 0 2025-09-25T16:16:18.195591Z 3 00h01m30.100512s :BS_NODE ERROR: {NW42@node_warden_pipe.cpp:60} Handle(TEvTabletPipe::TEvClientDestroyed) ClientId# [3:194:17] ServerId# [1:304:64] TabletId# 72057594037932033 PipeClientId# [3:194:17] 2025-09-25T16:16:18.195661Z 6 00h01m30.100512s :BS_NODE ERROR: {NW42@node_warden_pipe.cpp:60} Handle(TEvTabletPipe::TEvClientDestroyed) ClientId# [6:215:17] ServerId# [1:307:67] TabletId# 72057594037932033 PipeClientId# [6:215:17] 2025-09-25T16:16:18.195687Z 5 00h01m30.100512s :BS_NODE ERROR: {NW42@node_warden_pipe.cpp:60} Handle(TEvTabletPipe::TEvClientDestroyed) ClientId# [5:7684:16] ServerId# [1:7693:1099] TabletId# 72057594037932033 PipeClientId# [5:7684:16] 2025-09-25T16:16:18.195713Z 4 00h01m30.100512s :BS_NODE ERROR: {NW42@node_warden_pipe.cpp:60} Handle(TEvTabletPipe::TEvClientDestroyed) ClientId# [4:201:17] ServerId# [1:305:65] TabletId# 72057594037932033 PipeClientId# [4:201:17] 2025-09-25T16:16:18.195767Z 2 00h01m30.100512s :BS_NODE ERROR: {NW42@node_warden_pipe.cpp:60} Handle(TEvTabletPipe::TEvClientDestroyed) ClientId# [2:187:17] ServerId# [1:303:63] TabletId# 72057594037932033 PipeClientId# [2:187:17] 2025-09-25T16:16:18.195787Z 7 00h01m30.100512s :BS_NODE ERROR: {NW42@node_warden_pipe.cpp:60} Handle(TEvTabletPipe::TEvClientDestroyed) ClientId# [7:222:17] ServerId# [1:308:68] TabletId# 72057594037932033 PipeClientId# [7:222:17] Step = 26 SEND TEvPut with key [1:1:26:0:0:539431:0] TEvPutResult: TEvPutResult {Id# [1:1:26:0:0:539431:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.99989} Step = 27 SEND TEvPut with key [1:1:27:0:0:148482:0] TEvPutResult: TEvPutResult {Id# [1:1:27:0:0:148482:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.99989} Step = 28 SEND TEvPut with key [1:1:28:0:0:2673563:0] TEvPutResult: TEvPutResult {Id# [1:1:28:0:0:2673563:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.99989} Step = 29 SEND TEvPut with key [1:1:29:0:0:265170:0] TEvPutResult: TEvPutResult {Id# [1:1:29:0:0:265170:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.99989} Step = 30 SEND TEvPut with key [1:1:30:0:0:2398732:0] TEvPutResult: TEvPutResult {Id# [1:1:30:0:0:2398732:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.99989} Compact vdisk 2 Step = 31 SEND TEvPut with key [1:1:31:0:0:2302132:0] TEvPutResult: TEvPutResult {Id# [1:1:31:0:0:2302132:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.99989} Step = 32 SEND TEvPut with key [1:1:32:0:0:3112269:0] TEvPutResult: TEvPutResult {Id# [1:1:32:0:0:3112269:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.99989} Step = 33 SEND TEvPut with key [1:1:33:0:0:883758:0] TEvPutResult: TEvPutResult {Id# [1:1:33:0:0:883758:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.99989} Step = 34 SEND TEvPut with key [1:1:34:0:0:1212958:0] TEvPutResult: TEvPutResult {Id# [1:1:34:0:0:1212958:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.99989} Step = 35 SEND TEvPut with key [1:1:35:0:0:3026131:0] TEvPutResult: TEvPutResult {Id# [1:1:35:0:0:3026131:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.99989} Step = 36 SEND TEvPut with key [1:1:36:0:0:139148:0] TEvPutResult: TEvPutResult {Id# [1:1:36:0:0:139148:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.99989} Step = 37 SEND TEvPut with key [1:1:37:0:0:200198:0] TEvPutResult: TEvPutResult {Id# [1:1:37:0:0:200198:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.99989} Step = 38 SEND TEvPut with key [1:1:38:0:0:1252178:0] TEvPutResult: TEvPutResult {Id# [1:1:38:0:0:1252178:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.99989} Step = 39 SEND TEvPut with key [1:1:39:0:0:1897783:0] TEvPutResult: TEvPutResult {Id# [1:1:39:0:0:1897783:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.99989} Step = 40 SEND TEvPut with key [1:1:40:0:0:1486678:0] TEvPutResult: TEvPutResult {Id# [1:1:40:0:0:1486678:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.99989} Step = 41 SEND TEvPut with key [1:1:41:0:0:1285964:0] TEvPutResult: TEvPutResult {Id# [1:1:41:0:0:1285964:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.99989} Step = 42 SEND TEvPut with key [1:1:42:0:0:1221731:0] TEvPutResult: TEvPutResult {Id# [1:1:42:0:0:1221731:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.99989} Step = 43 SEND TEvPut with key [1:1:43:0:0:1613844:0] TEvPutResult: TEvPutResult {Id# [1:1:43:0:0:1613844:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.99989} Step = 44 SEND TEvPut with key [1:1:44:0:0:2582908:0] TEvPutResult: TEvPutResult {Id# [1:1:44:0:0:2582908:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.99989} Step = 45 SEND TEvPut with key [1:1:45:0:0:1703743:0] TEvPutResult: TEvPutResult {Id# [1:1:45:0:0:1703743:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.99989} Step = 46 SEND TEvPut with key [1:1:46:0:0:1362981:0] TEvPutResult: TEvPutResult {Id# [1:1:46:0:0:1362981:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.99989} Step = 47 SEND TEvPut with key [1:1:47:0:0:1469807:0] TEvPutResult: TEvPutResult {Id# [1:1:47:0:0:1469807:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.99989} Step = 48 SEND TEvPut with key [1:1:48:0:0:2832565:0] TEvPutResult: TEvPutResult {Id# [1:1:48:0:0:2832565:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.99989} Step = 49 SEND TEvPut with key [1:1:49:0:0:1960611:0] TEvPutResult: TEvPutResult {Id# [1:1:49:0:0:1960611:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.99989} Step = 50 SEND TEvPut with key [1:1:50:0:0:1164230:0] TEvPutResult: TEvPutResult {Id# [1:1:50:0:0:1164230:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.99989} Step = 51 SEND TEvPut with key [1:1:51:0:0:836900:0] TEvPutResult: TEvPutResult {Id# [1:1:51:0:0:836900:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.99989} Step = 52 SEND TEvPut with key [1:1:52:0:0:838380:0] TEvPutResult: TEvPutResult {Id# [1:1:52:0:0:838380:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.99989} Step = 53 SEND TEvPut with key [1:1:53:0:0:1975575:0] TEvPutResult: TEvPutResult {Id# [1:1:53:0:0:1975575:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.99989} Start node 0 Step = 54 SEND TEvPut with key [1:1:54:0:0:1888556:0] TEvPutResult: TEvPutResult {Id# [1:1:54:0:0:1888556:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999817} Step = 55 SEND TEvPut with key [1:1:55:0:0:715063:0] TEvPutResult: TEvPutResult {Id# [1:1:55:0:0:715063:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999817} Step = 56 SEND TEvPut with key [1:1:56:0:0:42993:0] TEvPutResult: TEvPutResult {Id# [1:1:56:0:0:42993:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999817} Step = 57 SEND TEvPut with key [1:1:57:0:0:1491407:0] TEvPutResult: TEvPutResult {Id# [1:1:57:0:0:1491407:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999817} Step = 58 SEND TEvPut with key [1:1:58:0:0:702845:0] TEvPutResult: TEvPutResult {Id# [1:1:58:0:0:702845:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999817} Step = 59 SEND TEvPut with key [1:1:59:0:0:2539948:0] TEvPutResult: TEvPutResult {Id# [1:1:59:0:0:2539948:0] Status ... # 0.999646} Step = 936 SEND TEvPut with key [1:1:936:0:0:2748248:0] TEvPutResult: TEvPutResult {Id# [1:1:936:0:0:2748248:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999756} Step = 937 SEND TEvPut with key [1:1:937:0:0:112302:0] TEvPutResult: TEvPutResult {Id# [1:1:937:0:0:112302:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999646} Step = 938 SEND TEvPut with key [1:1:938:0:0:800417:0] TEvPutResult: TEvPutResult {Id# [1:1:938:0:0:800417:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999646} Step = 939 SEND TEvPut with key [1:1:939:0:0:2336442:0] TEvPutResult: TEvPutResult {Id# [1:1:939:0:0:2336442:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999646} Step = 940 SEND TEvPut with key [1:1:940:0:0:982070:0] TEvPutResult: TEvPutResult {Id# [1:1:940:0:0:982070:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999646} Start node 4 Step = 941 SEND TEvPut with key [1:1:941:0:0:713632:0] TEvPutResult: TEvPutResult {Id# [1:1:941:0:0:713632:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999719} Step = 942 SEND TEvPut with key [1:1:942:0:0:1644191:0] TEvPutResult: TEvPutResult {Id# [1:1:942:0:0:1644191:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999719} Step = 943 SEND TEvPut with key [1:1:943:0:0:254634:0] TEvPutResult: TEvPutResult {Id# [1:1:943:0:0:254634:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999719} Step = 944 SEND TEvPut with key [1:1:944:0:0:1141270:0] TEvPutResult: TEvPutResult {Id# [1:1:944:0:0:1141270:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999719} Step = 945 SEND TEvPut with key [1:1:945:0:0:610103:0] TEvPutResult: TEvPutResult {Id# [1:1:945:0:0:610103:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999756} Step = 946 SEND TEvPut with key [1:1:946:0:0:24822:0] TEvPutResult: TEvPutResult {Id# [1:1:946:0:0:24822:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999756} Compact vdisk 6 Step = 947 SEND TEvPut with key [1:1:947:0:0:100167:0] TEvPutResult: TEvPutResult {Id# [1:1:947:0:0:100167:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999646} Step = 948 SEND TEvPut with key [1:1:948:0:0:645630:0] TEvPutResult: TEvPutResult {Id# [1:1:948:0:0:645630:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999646} Step = 949 SEND TEvPut with key [1:1:949:0:0:2125890:0] TEvPutResult: TEvPutResult {Id# [1:1:949:0:0:2125890:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999646} Step = 950 SEND TEvPut with key [1:1:950:0:0:2544891:0] TEvPutResult: TEvPutResult {Id# [1:1:950:0:0:2544891:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999646} Step = 951 SEND TEvPut with key [1:1:951:0:0:647007:0] TEvPutResult: TEvPutResult {Id# [1:1:951:0:0:647007:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999646} Step = 952 SEND TEvPut with key [1:1:952:0:0:2031652:0] TEvPutResult: TEvPutResult {Id# [1:1:952:0:0:2031652:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999646} Step = 953 SEND TEvPut with key [1:1:953:0:0:2109805:0] TEvPutResult: TEvPutResult {Id# [1:1:953:0:0:2109805:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999646} Stop node 3 2025-09-25T16:16:45.058577Z 1 00h25m30.752048s :PIPE_SERVER ERROR: [72057594037932033] NodeDisconnected NodeId# 4 Step = 954 SEND TEvPut with key [1:1:954:0:0:1353403:0] TEvPutResult: TEvPutResult {Id# [1:1:954:0:0:1353403:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999731} Stop node 4 2025-09-25T16:16:45.133636Z 1 00h25m40.756779s :PIPE_SERVER ERROR: [72057594037932033] NodeDisconnected NodeId# 5 Step = 955 SEND TEvPut with key [1:1:955:0:0:1286278:0] TEvPutResult: TEvPutResult {Id# [1:1:955:0:0:1286278:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999622} Start node 3 Step = 956 SEND TEvPut with key [1:1:956:0:0:1875483:0] TEvPutResult: TEvPutResult {Id# [1:1:956:0:0:1875483:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999719} Step = 957 SEND TEvPut with key [1:1:957:0:0:1021388:0] TEvPutResult: TEvPutResult {Id# [1:1:957:0:0:1021388:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999634} Start node 4 Step = 958 SEND TEvPut with key [1:1:958:0:0:860806:0] TEvPutResult: TEvPutResult {Id# [1:1:958:0:0:860806:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999634} Step = 959 SEND TEvPut with key [1:1:959:0:0:385917:0] TEvPutResult: TEvPutResult {Id# [1:1:959:0:0:385917:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999634} Step = 960 SEND TEvPut with key [1:1:960:0:0:200998:0] TEvPutResult: TEvPutResult {Id# [1:1:960:0:0:200998:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999634} Step = 961 SEND TEvPut with key [1:1:961:0:0:1661659:0] TEvPutResult: TEvPutResult {Id# [1:1:961:0:0:1661659:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999634} Step = 962 SEND TEvPut with key [1:1:962:0:0:771410:0] TEvPutResult: TEvPutResult {Id# [1:1:962:0:0:771410:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999634} Step = 963 SEND TEvPut with key [1:1:963:0:0:1414281:0] TEvPutResult: TEvPutResult {Id# [1:1:963:0:0:1414281:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999634} Step = 964 SEND TEvPut with key [1:1:964:0:0:2848837:0] TEvPutResult: TEvPutResult {Id# [1:1:964:0:0:2848837:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999744} Step = 965 SEND TEvPut with key [1:1:965:0:0:989600:0] TEvPutResult: TEvPutResult {Id# [1:1:965:0:0:989600:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999719} Step = 966 SEND TEvPut with key [1:1:966:0:0:2761296:0] TEvPutResult: TEvPutResult {Id# [1:1:966:0:0:2761296:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999634} Step = 967 SEND TEvPut with key [1:1:967:0:0:981163:0] TEvPutResult: TEvPutResult {Id# [1:1:967:0:0:981163:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999634} Step = 968 SEND TEvPut with key [1:1:968:0:0:14298:0] TEvPutResult: TEvPutResult {Id# [1:1:968:0:0:14298:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999634} Step = 969 SEND TEvPut with key [1:1:969:0:0:626285:0] TEvPutResult: TEvPutResult {Id# [1:1:969:0:0:626285:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999634} Step = 970 SEND TEvPut with key [1:1:970:0:0:334566:0] TEvPutResult: TEvPutResult {Id# [1:1:970:0:0:334566:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999634} Stop node 7 2025-09-25T16:16:45.385677Z 1 00h26m10.783872s :PIPE_SERVER ERROR: [72057594037932033] NodeDisconnected NodeId# 8 Step = 971 SEND TEvPut with key [1:1:971:0:0:972888:0] TEvPutResult: TEvPutResult {Id# [1:1:971:0:0:972888:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999585} Step = 972 SEND TEvPut with key [1:1:972:0:0:786055:0] TEvPutResult: TEvPutResult {Id# [1:1:972:0:0:786055:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999585} Step = 973 SEND TEvPut with key [1:1:973:0:0:2707502:0] TEvPutResult: TEvPutResult {Id# [1:1:973:0:0:2707502:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999585} Stop node 1 2025-09-25T16:16:45.480638Z 1 00h26m20.784384s :PIPE_SERVER ERROR: [72057594037932033] NodeDisconnected NodeId# 2 Step = 974 SEND TEvPut with key [1:1:974:0:0:2660812:0] TEvPutResult: TEvPutResult {Id# [1:1:974:0:0:2660812:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.99967} Start node 1 Step = 975 SEND TEvPut with key [1:1:975:0:0:3005283:0] TEvPutResult: TEvPutResult {Id# [1:1:975:0:0:3005283:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999695} Stop node 1 2025-09-25T16:16:45.574962Z 1 00h26m40.789960s :PIPE_SERVER ERROR: [72057594037932033] NodeDisconnected NodeId# 2 Step = 976 SEND TEvPut with key [1:1:976:0:0:1542748:0] TEvPutResult: TEvPutResult {Id# [1:1:976:0:0:1542748:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.99967} Step = 977 SEND TEvPut with key [1:1:977:0:0:2837300:0] TEvPutResult: TEvPutResult {Id# [1:1:977:0:0:2837300:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.99967} Step = 978 SEND TEvPut with key [1:1:978:0:0:481535:0] TEvPutResult: TEvPutResult {Id# [1:1:978:0:0:481535:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.99967} Step = 979 SEND TEvPut with key [1:1:979:0:0:24668:0] TEvPutResult: TEvPutResult {Id# [1:1:979:0:0:24668:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.99967} Step = 980 SEND TEvPut with key [1:1:980:0:0:1760402:0] TEvPutResult: TEvPutResult {Id# [1:1:980:0:0:1760402:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.99967} Step = 981 SEND TEvPut with key [1:1:981:0:0:1711812:0] TEvPutResult: TEvPutResult {Id# [1:1:981:0:0:1711812:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.99967} Step = 982 SEND TEvPut with key [1:1:982:0:0:1422922:0] TEvPutResult: TEvPutResult {Id# [1:1:982:0:0:1422922:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.99967} Step = 983 SEND TEvPut with key [1:1:983:0:0:2533122:0] TEvPutResult: TEvPutResult {Id# [1:1:983:0:0:2533122:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.99967} Step = 984 SEND TEvPut with key [1:1:984:0:0:347759:0] TEvPutResult: TEvPutResult {Id# [1:1:984:0:0:347759:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.99967} Step = 985 SEND TEvPut with key [1:1:985:0:0:1862506:0] TEvPutResult: TEvPutResult {Id# [1:1:985:0:0:1862506:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.99967} Step = 986 SEND TEvPut with key [1:1:986:0:0:101043:0] TEvPutResult: TEvPutResult {Id# [1:1:986:0:0:101043:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.99967} Step = 987 SEND TEvPut with key [1:1:987:0:0:672278:0] TEvPutResult: TEvPutResult {Id# [1:1:987:0:0:672278:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.99967} Step = 988 SEND TEvPut with key [1:1:988:0:0:2042425:0] TEvPutResult: TEvPutResult {Id# [1:1:988:0:0:2042425:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.99967} Step = 989 SEND TEvPut with key [1:1:989:0:0:1201477:0] TEvPutResult: TEvPutResult {Id# [1:1:989:0:0:1201477:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.99967} Step = 990 SEND TEvPut with key [1:1:990:0:0:1724337:0] TEvPutResult: TEvPutResult {Id# [1:1:990:0:0:1724337:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.99967} Step = 991 SEND TEvPut with key [1:1:991:0:0:2174403:0] TEvPutResult: TEvPutResult {Id# [1:1:991:0:0:2174403:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.99967} Step = 992 SEND TEvPut with key [1:1:992:0:0:193000:0] TEvPutResult: TEvPutResult {Id# [1:1:992:0:0:193000:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.99967} Step = 993 SEND TEvPut with key [1:1:993:0:0:618508:0] TEvPutResult: TEvPutResult {Id# [1:1:993:0:0:618508:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.99967} Step = 994 SEND TEvPut with key [1:1:994:0:0:2278246:0] TEvPutResult: TEvPutResult {Id# [1:1:994:0:0:2278246:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.99967} Step = 995 SEND TEvPut with key [1:1:995:0:0:2001881:0] TEvPutResult: TEvPutResult {Id# [1:1:995:0:0:2001881:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.99967} Step = 996 SEND TEvPut with key [1:1:996:0:0:1759634:0] TEvPutResult: TEvPutResult {Id# [1:1:996:0:0:1759634:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.99967} Step = 997 SEND TEvPut with key [1:1:997:0:0:2469234:0] TEvPutResult: TEvPutResult {Id# [1:1:997:0:0:2469234:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.99967} Step = 998 SEND TEvPut with key [1:1:998:0:0:1329395:0] TEvPutResult: TEvPutResult {Id# [1:1:998:0:0:1329395:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.99967} Step = 999 SEND TEvPut with key [1:1:999:0:0:1243807:0] TEvPutResult: TEvPutResult {Id# [1:1:999:0:0:1243807:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.99967} Starting nodes Start compaction 1 Start checking |80.7%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/tx/replication/controller/ut_assign_tx_id/core-tx-replication-controller-ut_assign_tx_id >> test.py::test[join-pullup_null_column-off-ForceBlocks] [GOOD] >> test.py::test[join-pullup_null_column-off-Results] [SKIPPED] >> TBsVDiskOutOfSpace::WriteUntilYellowZone [GOOD] >> TBsVDiskRange::RangeGetFromEmptyDB >> test.py::test[join-star_join_inners-off-ForceBlocks] |80.7%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/replication/controller/ut_assign_tx_id/core-tx-replication-controller-ut_assign_tx_id |80.7%| [LD] {RESULT} $(B)/ydb/core/tx/replication/controller/ut_assign_tx_id/core-tx-replication-controller-ut_assign_tx_id >> test.py::test[hor_join-out_max_outtables-default.txt-Results] [GOOD] >> test.py::test[in-in_noansi_join--ForceBlocks] >> test.py::test[column_order-select_distinct_star-default.txt-ForceBlocks] [GOOD] >> test.py::test[column_order-select_distinct_star-default.txt-Results] >> TBsDbStat::ChaoticParallelWrite_DbStat [GOOD] >> TBsHuge::Simple >> test.py::test[flatten_by-flatten_two_fields--ForceBlocks] [GOOD] >> test.py::test[flatten_by-flatten_two_fields--Results] >> test.py::test[pg-tpcds-q84-default.txt-Results] [GOOD] >> YdbTableSplit::SplitByLoadWithReads >> test.py::test[pg-tpch-q20-default.txt-ForceBlocks] >> test.py::test[sampling-bind_join_left-default.txt-ForceBlocks] [GOOD] >> Acceleration::TestMaxNumOfSlowDisksPut4Plus2Block2Slow [GOOD] >> test.py::test[sampling-bind_join_left-default.txt-Results] >> CostMetricsGetMirror3dc::TestGetMirror3dcRequests10000Inflight1BlobSize1000 [GOOD] >> CostMetricsGetMirror3dc::TestGetMirror3dcRequests2Inflight2BlobSize1000 >> YdbTableSplit::SplitByLoadWithNonEmptyRangeReads >> test.py::test[table_range-range_with_view--Results] [GOOD] >> test.py::test[table_range-table_funcs_expr--Results] >> TBsVDiskManyPutGet::ManyPutRangeGetCompactionIndexOnly [GOOD] >> TBsVDiskManyPutGet::ManyPutRangeGet2ChannelsIndexOnly >> test.py::test[view-file_outer_library--ForceBlocks] [GOOD] >> test.py::test[view-file_outer_library--Results] [GOOD] >> test.py::test[window-full/session_incompat_sort--ForceBlocks] >> YdbTableSplit::RenameTablesAndSplit >> TBsVDiskRange::RangeGetFromEmptyDB [GOOD] >> TBsVDiskRange::Simple3PutRangeGetAllBackwardFresh >> test.py::test[optimizers-yql-18300-flatmap-over-extend-default.txt-ForceBlocks] [GOOD] >> test.py::test[join-selfjoin_on_sorted_with_filter--ForceBlocks] [GOOD] >> test.py::test[optimizers-yql-18300-flatmap-over-extend-default.txt-Results] >> test.py::test[join-selfjoin_on_sorted_with_filter--Results] >> TBsHuge::Simple [GOOD] >> TBsHuge::SimpleErasureNone >> test.py::test[schema-select_operate_with_columns_simple-default.txt-ForceBlocks] [GOOD] >> test.py::test[schema-select_operate_with_columns_simple-default.txt-Results] >> CostMetricsGetMirror3dc::TestGetMirror3dcRequests2Inflight2BlobSize1000 [GOOD] >> CostMetricsGetMirror3dc::TestGetMirror3dcRequests10Inflight10BlobSize1000 ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/ut_blobstorage/unittest >> Acceleration::TestMaxNumOfSlowDisksPut4Plus2Block2Slow [GOOD] Test command err: RandomSeed# 12651976078987817245 1970-01-01T00:02:46.060512Z Unwrap {EvVPutResult Status# OK ID# [1:1:1:1:1:1024:2] {MsgQoS MsgId# { SequenceId: 1 MsgId: 32 } Cost# 1376 ExtQueueId# PutTabletLog IntQueueId# IntPutLog Window# { Status# Processed ActualWindowSize# 0 MaxWindowSize# 150000000 ExpectedMsgId# { SequenceId: 1 MsgId: 33 }}}} 1970-01-01T00:02:46.060512Z Unwrap {EvVPutResult Status# OK ID# [1:1:1:1:1:1024:1] {MsgQoS MsgId# { SequenceId: 1 MsgId: 32 } Cost# 1376 ExtQueueId# PutTabletLog IntQueueId# IntPutLog Window# { Status# Processed ActualWindowSize# 0 MaxWindowSize# 150000000 ExpectedMsgId# { SequenceId: 1 MsgId: 33 }}}} 1970-01-01T00:02:47.060512Z Unwrap {EvVPutResult Status# OK ID# [1:1:1:1:1:1024:1] {MsgQoS MsgId# { SequenceId: 1 MsgId: 35 } Cost# 1376 ExtQueueId# PutTabletLog IntQueueId# IntPutLog Window# { Status# Processed ActualWindowSize# 0 MaxWindowSize# 150000000 ExpectedMsgId# { SequenceId: 1 MsgId: 36 }}}} 1970-01-01T00:02:47.060512Z Unwrap {EvVPutResult Status# OK ID# [1:1:1:1:1:1024:3] {MsgQoS MsgId# { SequenceId: 1 MsgId: 35 } Cost# 1376 ExtQueueId# PutTabletLog IntQueueId# IntPutLog Window# { Status# Processed ActualWindowSize# 0 MaxWindowSize# 150000000 ExpectedMsgId# { SequenceId: 1 MsgId: 36 }}}} 1970-01-01T00:02:46.060512Z Unwrap {EvVPutResult Status# OK ID# [1:1:1:1:1:1024:3] {MsgQoS MsgId# { SequenceId: 1 MsgId: 32 } Cost# 1376 ExtQueueId# PutTabletLog IntQueueId# IntPutLog Window# { Status# Processed ActualWindowSize# 0 MaxWindowSize# 150000000 ExpectedMsgId# { SequenceId: 1 MsgId: 33 }}}} 1970-01-01T00:02:46.060512Z Unwrap {EvVPutResult Status# OK ID# [1:1:1:1:1:1024:1] {MsgQoS MsgId# { SequenceId: 1 MsgId: 32 } Cost# 1376 ExtQueueId# PutTabletLog IntQueueId# IntPutLog Window# { Status# Processed ActualWindowSize# 0 MaxWindowSize# 150000000 ExpectedMsgId# { SequenceId: 1 MsgId: 33 }}}} 1970-01-01T00:02:47.060512Z Unwrap {EvVPutResult Status# OK ID# [1:1:1:1:1:1024:2] {MsgQoS MsgId# { SequenceId: 1 MsgId: 35 } Cost# 1376 ExtQueueId# PutTabletLog IntQueueId# IntPutLog Window# { Status# Processed ActualWindowSize# 0 MaxWindowSize# 150000000 ExpectedMsgId# { SequenceId: 1 MsgId: 36 }}}} 1970-01-01T00:02:47.060512Z Unwrap {EvVPutResult Status# OK ID# [1:1:1:1:1:1024:1] {MsgQoS MsgId# { SequenceId: 1 MsgId: 35 } Cost# 1376 ExtQueueId# PutTabletLog IntQueueId# IntPutLog Window# { Status# Processed ActualWindowSize# 0 MaxWindowSize# 150000000 ExpectedMsgId# { SequenceId: 1 MsgId: 36 }}}} 1970-01-01T00:02:46.060512Z Unwrap {EvVPutResult Status# OK ID# [1:1:1:1:1:1024:4] {MsgQoS MsgId# { SequenceId: 1 MsgId: 75 } Cost# 644 ExtQueueId# PutTabletLog IntQueueId# IntPutLog Window# { Status# Processed ActualWindowSize# 0 MaxWindowSize# 150000000 ExpectedMsgId# { SequenceId: 1 MsgId: 76 }}}} 1970-01-01T00:02:46.060512Z Unwrap {EvVPutResult Status# OK ID# [1:1:1:1:1:1024:3] {MsgQoS MsgId# { SequenceId: 1 MsgId: 73 } Cost# 644 ExtQueueId# PutTabletLog IntQueueId# IntPutLog Window# { Status# Processed ActualWindowSize# 0 MaxWindowSize# 150000000 ExpectedMsgId# { SequenceId: 1 MsgId: 74 }}}} 1970-01-01T00:02:46.060512Z Unwrap {EvVPutResult Status# OK ID# [1:1:1:1:1:1024:2] {MsgQoS MsgId# { SequenceId: 1 MsgId: 78 } Cost# 644 ExtQueueId# PutTabletLog IntQueueId# IntPutLog Window# { Status# Processed ActualWindowSize# 0 MaxWindowSize# 150000000 ExpectedMsgId# { SequenceId: 1 MsgId: 79 }}}} 1970-01-01T00:02:46.060512Z Unwrap {EvVPutResult Status# OK ID# [1:1:1:1:1:1024:5] {MsgQoS MsgId# { SequenceId: 1 MsgId: 76 } Cost# 644 ExtQueueId# PutTabletLog IntQueueId# IntPutLog Window# { Status# Processed ActualWindowSize# 0 MaxWindowSize# 150000000 ExpectedMsgId# { SequenceId: 1 MsgId: 77 }}}} 1970-01-01T00:02:46.060512Z Unwrap {EvVPutResult Status# OK ID# [1:1:1:1:1:1024:1] {MsgQoS MsgId# { SequenceId: 1 MsgId: 74 } Cost# 644 ExtQueueId# PutTabletLog IntQueueId# IntPutLog Window# { Status# Processed ActualWindowSize# 0 MaxWindowSize# 150000000 ExpectedMsgId# { SequenceId: 1 MsgId: 75 }}}} 1970-01-01T00:02:47.060512Z Unwrap {EvVPutResult Status# OK ID# [1:1:1:1:1:1024:6] {MsgQoS MsgId# { SequenceId: 1 MsgId: 74 } Cost# 644 ExtQueueId# PutTabletLog IntQueueId# IntPutLog Window# { Status# Processed ActualWindowSize# 0 MaxWindowSize# 150000000 ExpectedMsgId# { SequenceId: 1 MsgId: 75 }}}} 1970-01-01T00:02:47.060512Z Unwrap {EvVPutResult Status# OK ID# [1:1:1:1:1:1024:1] {MsgQoS MsgId# { SequenceId: 1 MsgId: 77 } Cost# 644 ExtQueueId# PutTabletLog IntQueueId# IntPutLog Window# { Status# Processed ActualWindowSize# 0 MaxWindowSize# 150000000 ExpectedMsgId# { SequenceId: 1 MsgId: 78 }}}} 1970-01-01T00:02:46.060512Z Unwrap {EvVPutResult Status# OK ID# [1:1:1:1:1:1024:6] {MsgQoS MsgId# { SequenceId: 1 MsgId: 73 } Cost# 644 ExtQueueId# PutTabletLog IntQueueId# IntPutLog Window# { Status# Processed ActualWindowSize# 0 MaxWindowSize# 150000000 ExpectedMsgId# { SequenceId: 1 MsgId: 74 }}}} 1970-01-01T00:02:46.060512Z Unwrap {EvVPutResult Status# OK ID# [1:1:1:1:1:1024:3] {MsgQoS MsgId# { SequenceId: 1 MsgId: 73 } Cost# 644 ExtQueueId# PutTabletLog IntQueueId# IntPutLog Window# { Status# Processed ActualWindowSize# 0 MaxWindowSize# 150000000 ExpectedMsgId# { SequenceId: 1 MsgId: 74 }}}} 1970-01-01T00:02:46.060512Z Unwrap {EvVPutResult Status# OK ID# [1:1:1:1:1:1024:2] {MsgQoS MsgId# { SequenceId: 1 MsgId: 78 } Cost# 644 ExtQueueId# PutTabletLog IntQueueId# IntPutLog Window# { Status# Processed ActualWindowSize# 0 MaxWindowSize# 150000000 ExpectedMsgId# { SequenceId: 1 MsgId: 79 }}}} 1970-01-01T00:02:46.060512Z Unwrap {EvVPutResult Status# OK ID# [1:1:1:1:1:1024:5] {MsgQoS MsgId# { SequenceId: 1 MsgId: 76 } Cost# 644 ExtQueueId# PutTabletLog IntQueueId# IntPutLog Window# { Status# Processed ActualWindowSize# 0 MaxWindowSize# 150000000 ExpectedMsgId# { SequenceId: 1 MsgId: 77 }}}} 1970-01-01T00:02:46.060512Z Unwrap {EvVPutResult Status# OK ID# [1:1:1:1:1:1024:1] {MsgQoS MsgId# { SequenceId: 1 MsgId: 74 } Cost# 644 ExtQueueId# PutTabletLog IntQueueId# IntPutLog Window# { Status# Processed ActualWindowSize# 0 MaxWindowSize# 150000000 ExpectedMsgId# { SequenceId: 1 MsgId: 75 }}}} 1970-01-01T00:02:47.060512Z Unwrap {EvVPutResult Status# OK ID# [1:1:1:1:1:1024:4] {MsgQoS MsgId# { SequenceId: 1 MsgId: 74 } Cost# 644 ExtQueueId# PutTabletLog IntQueueId# IntPutLog Window# { Status# Processed ActualWindowSize# 0 MaxWindowSize# 150000000 ExpectedMsgId# { SequenceId: 1 MsgId: 75 }}}} 1970-01-01T00:02:47.060512Z Unwrap {EvVPutResult Status# OK ID# [1:1:1:1:1:1024:1] {MsgQoS MsgId# { SequenceId: 1 MsgId: 77 } Cost# 644 ExtQueueId# PutTabletLog IntQueueId# IntPutLog Window# { Status# Processed ActualWindowSize# 0 MaxWindowSize# 150000000 ExpectedMsgId# { SequenceId: 1 MsgId: 78 }}}} 1970-01-01T00:02:46.060512Z Unwrap {EvVPutResult Status# OK ID# [1:1:1:1:1:1024:6] {MsgQoS MsgId# { SequenceId: 1 MsgId: 73 } Cost# 644 ExtQueueId# PutTabletLog IntQueueId# IntPutLog Window# { Status# Processed ActualWindowSize# 0 MaxWindowSize# 150000000 ExpectedMsgId# { SequenceId: 1 MsgId: 74 }}}} 1970-01-01T00:02:46.060512Z Unwrap {EvVPutResult Status# OK ID# [1:1:1:1:1:1024:4] {MsgQoS MsgId# { SequenceId: 1 MsgId: 75 } Cost# 644 ExtQueueId# PutTabletLog IntQueueId# IntPutLog Window# { Status# Processed ActualWindowSize# 0 MaxWindowSize# 150000000 ExpectedMsgId# { SequenceId: 1 MsgId: 76 }}}} 1970-01-01T00:02:46.060512Z Unwrap {EvVPutResult Status# OK ID# [1:1:1:1:1:1024:2] {MsgQoS MsgId# { SequenceId: 1 MsgId: 78 } Cost# 644 ExtQueueId# PutTabletLog IntQueueId# IntPutLog Window# { Status# Processed ActualWindowSize# 0 MaxWindowSize# 150000000 ExpectedMsgId# { SequenceId: 1 MsgId: 79 }}}} 1970-01-01T00:02:46.060512Z Unwrap {EvVPutResult Status# OK ID# [1:1:1:1:1:1024:5] {MsgQoS MsgId# { SequenceId: 1 MsgId: 76 } Cost# 644 ExtQueueId# PutTabletLog IntQueueId# IntPutLog Window# { Status# Processed ActualWindowSize# 0 MaxWindowSize# 150000000 ExpectedMsgId# { SequenceId: 1 MsgId: 77 }}}} 1970-01-01T00:02:46.060512Z Unwrap {EvVPutResult Status# OK ID# [1:1:1:1:1:1024:1] {MsgQoS MsgId# { SequenceId: 1 MsgId: 74 } Cost# 644 ExtQueueId# PutTabletLog IntQueueId# IntPutLog Window# { Status# Processed ActualWindowSize# 0 MaxWindowSize# 150000000 ExpectedMsgId# { SequenceId: 1 MsgId: 75 }}}} 1970-01-01T00:02:47.060512Z Unwrap {EvVPutResult Status# OK ID# [1:1:1:1:1:1024:3] {MsgQoS MsgId# { SequenceId: 1 MsgId: 74 } Cost# 644 ExtQueueId# PutTabletLog IntQueueId# IntPutLog Window# { Status# Processed ActualWindowSize# 0 MaxWindowSize# 150000000 ExpectedMsgId# { SequenceId: 1 MsgId: 75 }}}} 1970-01-01T00:02:47.060512Z Unwrap {EvVPutResult Status# OK ID# [1:1:1:1:1:1024:1] {MsgQoS MsgId# { SequenceId: 1 MsgId: 77 } Cost# 644 ExtQueueId# PutTabletLog IntQueueId# IntPutLog Window# { Status# Processed ActualWindowSize# 0 MaxWindowSize# 150000000 ExpectedMsgId# { SequenceId: 1 MsgId: 78 }}}} 1970-01-01T00:02:46.060512Z Unwrap {EvVPutResult Status# OK ID# [1:1:1:1:1:1024:6] {MsgQoS MsgId# { SequenceId: 1 MsgId: 73 } Cost# 644 ExtQueueId# PutTabletLog IntQueueId# IntPutLog Window# { Status# Processed ActualWindowSize# 0 MaxWindowSize# 150000000 ExpectedMsgId# { SequenceId: 1 MsgId: 74 }}}} 1970-01-01T00:02:46.060512Z Unwrap {EvVPutResult Status# OK ID# [1:1:1:1:1:1024:4] {MsgQoS MsgId# { SequenceId: 1 MsgId: 75 } Cost# 644 ExtQueueId# PutTabletLog IntQueueId# IntPutLog Window# { Status# Processed ActualWindowSize# 0 MaxWindowSize# 150000000 ExpectedMsgId# { SequenceId: 1 MsgId: 76 }}}} 1970-01-01T00:02:46.060512Z Unwrap {EvVPutResult Status# OK ID# [1:1:1:1:1:1024:3] {MsgQoS MsgId# { SequenceId: 1 MsgId: 73 } Cost# 644 ExtQueueId# PutTabletLog IntQueueId# IntPutLog Window# { Status# Processed ActualWindowSize# 0 MaxWindowSize# 150000000 ExpectedMsgId# { SequenceId: 1 MsgId: 74 }}}} 1970-01-01T00:02:46.060512Z Unwrap {EvVPutResult Status# OK ID# [1:1:1:1:1:1024:5] {MsgQoS MsgId# { SequenceId: 1 MsgId: 76 } Cost# 644 ExtQueueId# PutTabletLog IntQueueId# IntPutLog Window# { Status# Processed ActualWindowSize# 0 MaxWindowSize# 150000000 ExpectedMsgId# { SequenceId: 1 MsgId: 77 }}}} 1970-01-01T00:02:46.060512Z Unwrap {EvVPutResult Status# OK ID# [1:1:1:1:1:1024:1] {MsgQoS MsgId# { SequenceId: 1 MsgId: 74 } Cost# 644 ExtQueueId# PutTabletLog IntQueueId# IntPutLog Window# { Status# Processed ActualWindowSize# 0 MaxWindowSize# 150000000 ExpectedMsgId# { SequenceId: 1 MsgId: 75 }}}} 1970-01-01T00:02:47.060512Z Unwrap {EvVPutResult Status# OK ID# [1:1:1:1:1:1024:2] {MsgQoS MsgId# { SequenceId: 1 MsgId: 74 } Cost# 644 ExtQueueId# PutTabletLog IntQueueId# IntPutLog Window# { Status# Processed ActualWindowSize# 0 MaxWindowSize# 150000000 ExpectedMsgId# { SequenceId: 1 MsgId: 75 }}}} 1970-01-01T00:02:47.060512Z Unwrap {EvVPutResult Status# OK ID# [1:1:1:1:1:1024:1] {MsgQoS MsgId# { SequenceId: 1 MsgId: 77 } Cost# 644 ExtQueueId# PutTabletLog IntQueueId# IntPutLog Window# { Status# Processed ActualWindowSize# 0 MaxWindowSize# 150000000 ExpectedMsgId# { SequenceId: 1 MsgId: 78 }}}} 1970-01-01T00:02:46.060512Z Unwrap {EvVPutResult Status# OK ID# [1:1:1:1:1:1024:6] {MsgQoS MsgId# { SequenceId: 1 MsgId: 73 } Cost# 644 ExtQueueId# PutTabletLog IntQueueId# IntPutLog Window# { Status# Processed ActualWindowSize# 0 MaxWindowSize# 150000000 ExpectedMsgId# { SequenceId: 1 MsgId: 74 }}}} 1970-01-01T00:02:46.060512Z Unwrap {EvVPutResult Status# OK ID# [1:1:1:1:1:1024:4] {MsgQoS MsgId# { SequenceId: 1 MsgId: 75 } Cost# 644 ExtQueueId# PutTabletLog IntQueueId# IntPutLog Window# { Status# Processed ActualWindowSize# 0 MaxWindowSize# 150000000 ExpectedMsgId# { SequenceId: 1 MsgId: 76 }}}} 1970-01-01T00:02:46.060512Z Unwrap {EvVPutResult Status# OK ID# [1:1:1:1:1:1024:3] {MsgQoS MsgId# { SequenceId: 1 MsgId: 73 } Cost# 644 ExtQueueId# PutTabletLog IntQueueId# IntPutLog Window# { Status# Processed ActualWindowSize# 0 MaxWindowSize# 150000000 ExpectedMsgId# { SequenceId: 1 MsgId: 74 }}}} 1970-01-01T00:02:46.060512Z Unwrap {EvVPutResult Status# OK ID# [1:1:1:1:1:1024:2] {MsgQoS MsgId# { SequenceId: 1 MsgId: 78 } Cost# 644 ExtQueueId# PutTabletLog IntQueueId# IntPutLo ... {MsgQoS MsgId# { SequenceId: 1 MsgId: 73 } Cost# 644 ExtQueueId# PutTabletLog IntQueueId# IntPutLog Window# { Status# Processed ActualWindowSize# 0 MaxWindowSize# 150000000 ExpectedMsgId# { SequenceId: 1 MsgId: 74 }}}} 1970-01-01T00:02:46.060512Z Unwrap {EvVPutResult Status# OK ID# [1:1:1:1:1:1024:4] {MsgQoS MsgId# { SequenceId: 1 MsgId: 75 } Cost# 644 ExtQueueId# PutTabletLog IntQueueId# IntPutLog Window# { Status# Processed ActualWindowSize# 0 MaxWindowSize# 150000000 ExpectedMsgId# { SequenceId: 1 MsgId: 76 }}}} 1970-01-01T00:02:46.060512Z Unwrap {EvVPutResult Status# OK ID# [1:1:1:1:1:1024:2] {MsgQoS MsgId# { SequenceId: 1 MsgId: 78 } Cost# 644 ExtQueueId# PutTabletLog IntQueueId# IntPutLog Window# { Status# Processed ActualWindowSize# 0 MaxWindowSize# 150000000 ExpectedMsgId# { SequenceId: 1 MsgId: 79 }}}} 1970-01-01T00:02:46.060512Z Unwrap {EvVPutResult Status# OK ID# [1:1:1:1:1:1024:5] {MsgQoS MsgId# { SequenceId: 1 MsgId: 76 } Cost# 644 ExtQueueId# PutTabletLog IntQueueId# IntPutLog Window# { Status# Processed ActualWindowSize# 0 MaxWindowSize# 150000000 ExpectedMsgId# { SequenceId: 1 MsgId: 77 }}}} 1970-01-01T00:02:46.060512Z Unwrap {EvVPutResult Status# OK ID# [1:1:1:1:1:1024:1] {MsgQoS MsgId# { SequenceId: 1 MsgId: 74 } Cost# 644 ExtQueueId# PutTabletLog IntQueueId# IntPutLog Window# { Status# Processed ActualWindowSize# 0 MaxWindowSize# 150000000 ExpectedMsgId# { SequenceId: 1 MsgId: 75 }}}} 1970-01-01T00:02:47.060512Z Unwrap {EvVPutResult Status# OK ID# [1:1:1:1:1:1024:3] {MsgQoS MsgId# { SequenceId: 1 MsgId: 77 } Cost# 644 ExtQueueId# PutTabletLog IntQueueId# IntPutLog Window# { Status# Processed ActualWindowSize# 0 MaxWindowSize# 150000000 ExpectedMsgId# { SequenceId: 1 MsgId: 78 }}}} 1970-01-01T00:02:46.060512Z Unwrap {EvVPutResult Status# OK ID# [1:1:1:1:1:1024:6] {MsgQoS MsgId# { SequenceId: 1 MsgId: 73 } Cost# 644 ExtQueueId# PutTabletLog IntQueueId# IntPutLog Window# { Status# Processed ActualWindowSize# 0 MaxWindowSize# 150000000 ExpectedMsgId# { SequenceId: 1 MsgId: 74 }}}} 1970-01-01T00:02:46.060512Z Unwrap {EvVPutResult Status# OK ID# [1:1:1:1:1:1024:4] {MsgQoS MsgId# { SequenceId: 1 MsgId: 75 } Cost# 644 ExtQueueId# PutTabletLog IntQueueId# IntPutLog Window# { Status# Processed ActualWindowSize# 0 MaxWindowSize# 150000000 ExpectedMsgId# { SequenceId: 1 MsgId: 76 }}}} 1970-01-01T00:02:46.060512Z Unwrap {EvVPutResult Status# OK ID# [1:1:1:1:1:1024:3] {MsgQoS MsgId# { SequenceId: 1 MsgId: 73 } Cost# 644 ExtQueueId# PutTabletLog IntQueueId# IntPutLog Window# { Status# Processed ActualWindowSize# 0 MaxWindowSize# 150000000 ExpectedMsgId# { SequenceId: 1 MsgId: 74 }}}} 1970-01-01T00:02:46.060512Z Unwrap {EvVPutResult Status# OK ID# [1:1:1:1:1:1024:5] {MsgQoS MsgId# { SequenceId: 1 MsgId: 76 } Cost# 644 ExtQueueId# PutTabletLog IntQueueId# IntPutLog Window# { Status# Processed ActualWindowSize# 0 MaxWindowSize# 150000000 ExpectedMsgId# { SequenceId: 1 MsgId: 77 }}}} 1970-01-01T00:02:46.060512Z Unwrap {EvVPutResult Status# OK ID# [1:1:1:1:1:1024:1] {MsgQoS MsgId# { SequenceId: 1 MsgId: 74 } Cost# 644 ExtQueueId# PutTabletLog IntQueueId# IntPutLog Window# { Status# Processed ActualWindowSize# 0 MaxWindowSize# 150000000 ExpectedMsgId# { SequenceId: 1 MsgId: 75 }}}} 1970-01-01T00:02:47.060512Z Unwrap {EvVPutResult Status# OK ID# [1:1:1:1:1:1024:2] {MsgQoS MsgId# { SequenceId: 1 MsgId: 77 } Cost# 644 ExtQueueId# PutTabletLog IntQueueId# IntPutLog Window# { Status# Processed ActualWindowSize# 0 MaxWindowSize# 150000000 ExpectedMsgId# { SequenceId: 1 MsgId: 78 }}}} 1970-01-01T00:02:46.060512Z Unwrap {EvVPutResult Status# OK ID# [1:1:1:1:1:1024:6] {MsgQoS MsgId# { SequenceId: 1 MsgId: 73 } Cost# 644 ExtQueueId# PutTabletLog IntQueueId# IntPutLog Window# { Status# Processed ActualWindowSize# 0 MaxWindowSize# 150000000 ExpectedMsgId# { SequenceId: 1 MsgId: 74 }}}} 1970-01-01T00:02:46.060512Z Unwrap {EvVPutResult Status# OK ID# [1:1:1:1:1:1024:4] {MsgQoS MsgId# { SequenceId: 1 MsgId: 75 } Cost# 644 ExtQueueId# PutTabletLog IntQueueId# IntPutLog Window# { Status# Processed ActualWindowSize# 0 MaxWindowSize# 150000000 ExpectedMsgId# { SequenceId: 1 MsgId: 76 }}}} 1970-01-01T00:02:46.060512Z Unwrap {EvVPutResult Status# OK ID# [1:1:1:1:1:1024:3] {MsgQoS MsgId# { SequenceId: 1 MsgId: 73 } Cost# 644 ExtQueueId# PutTabletLog IntQueueId# IntPutLog Window# { Status# Processed ActualWindowSize# 0 MaxWindowSize# 150000000 ExpectedMsgId# { SequenceId: 1 MsgId: 74 }}}} 1970-01-01T00:02:46.060512Z Unwrap {EvVPutResult Status# OK ID# [1:1:1:1:1:1024:2] {MsgQoS MsgId# { SequenceId: 1 MsgId: 78 } Cost# 644 ExtQueueId# PutTabletLog IntQueueId# IntPutLog Window# { Status# Processed ActualWindowSize# 0 MaxWindowSize# 150000000 ExpectedMsgId# { SequenceId: 1 MsgId: 79 }}}} 1970-01-01T00:02:46.060512Z Unwrap {EvVPutResult Status# OK ID# [1:1:1:1:1:1024:1] {MsgQoS MsgId# { SequenceId: 1 MsgId: 74 } Cost# 644 ExtQueueId# PutTabletLog IntQueueId# IntPutLog Window# { Status# Processed ActualWindowSize# 0 MaxWindowSize# 150000000 ExpectedMsgId# { SequenceId: 1 MsgId: 75 }}}} 1970-01-01T00:02:47.060512Z Unwrap {EvVPutResult Status# OK ID# [1:1:1:1:1:1024:5] {MsgQoS MsgId# { SequenceId: 1 MsgId: 77 } Cost# 644 ExtQueueId# PutTabletLog IntQueueId# IntPutLog Window# { Status# Processed ActualWindowSize# 0 MaxWindowSize# 150000000 ExpectedMsgId# { SequenceId: 1 MsgId: 78 }}}} 1970-01-01T00:02:46.060512Z Unwrap {EvVPutResult Status# OK ID# [1:1:1:1:1:1024:1] {MsgQoS MsgId# { SequenceId: 1 MsgId: 32 } Cost# 1376 ExtQueueId# PutTabletLog IntQueueId# IntPutLog Window# { Status# Processed ActualWindowSize# 0 MaxWindowSize# 150000000 ExpectedMsgId# { SequenceId: 1 MsgId: 33 }}}} 1970-01-01T00:02:46.060512Z Unwrap {EvVPutResult Status# OK ID# [1:1:1:1:1:1024:3] {MsgQoS MsgId# { SequenceId: 1 MsgId: 32 } Cost# 1376 ExtQueueId# PutTabletLog IntQueueId# IntPutLog Window# { Status# Processed ActualWindowSize# 0 MaxWindowSize# 150000000 ExpectedMsgId# { SequenceId: 1 MsgId: 33 }}}} 1970-01-01T00:02:46.060512Z Unwrap {EvVPutResult Status# OK ID# [1:1:1:1:1:1024:3] {MsgQoS MsgId# { SequenceId: 1 MsgId: 73 } Cost# 644 ExtQueueId# PutTabletLog IntQueueId# IntPutLog Window# { Status# Processed ActualWindowSize# 0 MaxWindowSize# 150000000 ExpectedMsgId# { SequenceId: 1 MsgId: 74 }}}} 1970-01-01T00:02:46.060512Z Unwrap {EvVPutResult Status# OK ID# [1:1:1:1:1:1024:2] {MsgQoS MsgId# { SequenceId: 1 MsgId: 78 } Cost# 644 ExtQueueId# PutTabletLog IntQueueId# IntPutLog Window# { Status# Processed ActualWindowSize# 0 MaxWindowSize# 150000000 ExpectedMsgId# { SequenceId: 1 MsgId: 79 }}}} 1970-01-01T00:02:46.060512Z Unwrap {EvVPutResult Status# OK ID# [1:1:1:1:1:1024:5] {MsgQoS MsgId# { SequenceId: 1 MsgId: 76 } Cost# 644 ExtQueueId# PutTabletLog IntQueueId# IntPutLog Window# { Status# Processed ActualWindowSize# 0 MaxWindowSize# 150000000 ExpectedMsgId# { SequenceId: 1 MsgId: 77 }}}} 1970-01-01T00:02:46.060512Z Unwrap {EvVPutResult Status# OK ID# [1:1:1:1:1:1024:1] {MsgQoS MsgId# { SequenceId: 1 MsgId: 74 } Cost# 644 ExtQueueId# PutTabletLog IntQueueId# IntPutLog Window# { Status# Processed ActualWindowSize# 0 MaxWindowSize# 150000000 ExpectedMsgId# { SequenceId: 1 MsgId: 75 }}}} 1970-01-01T00:02:46.060512Z Unwrap {EvVPutResult Status# OK ID# [1:1:1:1:1:1024:6] {MsgQoS MsgId# { SequenceId: 1 MsgId: 73 } Cost# 644 ExtQueueId# PutTabletLog IntQueueId# IntPutLog Window# { Status# Processed ActualWindowSize# 0 MaxWindowSize# 150000000 ExpectedMsgId# { SequenceId: 1 MsgId: 74 }}}} 1970-01-01T00:02:46.060512Z Unwrap {EvVPutResult Status# OK ID# [1:1:1:1:1:1024:2] {MsgQoS MsgId# { SequenceId: 1 MsgId: 78 } Cost# 644 ExtQueueId# PutTabletLog IntQueueId# IntPutLog Window# { Status# Processed ActualWindowSize# 0 MaxWindowSize# 150000000 ExpectedMsgId# { SequenceId: 1 MsgId: 79 }}}} 1970-01-01T00:02:46.060512Z Unwrap {EvVPutResult Status# OK ID# [1:1:1:1:1:1024:5] {MsgQoS MsgId# { SequenceId: 1 MsgId: 76 } Cost# 644 ExtQueueId# PutTabletLog IntQueueId# IntPutLog Window# { Status# Processed ActualWindowSize# 0 MaxWindowSize# 150000000 ExpectedMsgId# { SequenceId: 1 MsgId: 77 }}}} 1970-01-01T00:02:46.060512Z Unwrap {EvVPutResult Status# OK ID# [1:1:1:1:1:1024:1] {MsgQoS MsgId# { SequenceId: 1 MsgId: 74 } Cost# 644 ExtQueueId# PutTabletLog IntQueueId# IntPutLog Window# { Status# Processed ActualWindowSize# 0 MaxWindowSize# 150000000 ExpectedMsgId# { SequenceId: 1 MsgId: 75 }}}} 1970-01-01T00:02:46.060512Z Unwrap {EvVPutResult Status# OK ID# [1:1:1:1:1:1024:6] {MsgQoS MsgId# { SequenceId: 1 MsgId: 73 } Cost# 644 ExtQueueId# PutTabletLog IntQueueId# IntPutLog Window# { Status# Processed ActualWindowSize# 0 MaxWindowSize# 150000000 ExpectedMsgId# { SequenceId: 1 MsgId: 74 }}}} 1970-01-01T00:02:46.060512Z Unwrap {EvVPutResult Status# OK ID# [1:1:1:1:1:1024:4] {MsgQoS MsgId# { SequenceId: 1 MsgId: 75 } Cost# 644 ExtQueueId# PutTabletLog IntQueueId# IntPutLog Window# { Status# Processed ActualWindowSize# 0 MaxWindowSize# 150000000 ExpectedMsgId# { SequenceId: 1 MsgId: 76 }}}} 1970-01-01T00:02:46.060512Z Unwrap {EvVPutResult Status# OK ID# [1:1:1:1:1:1024:5] {MsgQoS MsgId# { SequenceId: 1 MsgId: 76 } Cost# 644 ExtQueueId# PutTabletLog IntQueueId# IntPutLog Window# { Status# Processed ActualWindowSize# 0 MaxWindowSize# 150000000 ExpectedMsgId# { SequenceId: 1 MsgId: 77 }}}} 1970-01-01T00:02:46.060512Z Unwrap {EvVPutResult Status# OK ID# [1:1:1:1:1:1024:1] {MsgQoS MsgId# { SequenceId: 1 MsgId: 74 } Cost# 644 ExtQueueId# PutTabletLog IntQueueId# IntPutLog Window# { Status# Processed ActualWindowSize# 0 MaxWindowSize# 150000000 ExpectedMsgId# { SequenceId: 1 MsgId: 75 }}}} 1970-01-01T00:02:46.060512Z Unwrap {EvVPutResult Status# OK ID# [1:1:1:1:1:1024:6] {MsgQoS MsgId# { SequenceId: 1 MsgId: 73 } Cost# 644 ExtQueueId# PutTabletLog IntQueueId# IntPutLog Window# { Status# Processed ActualWindowSize# 0 MaxWindowSize# 150000000 ExpectedMsgId# { SequenceId: 1 MsgId: 74 }}}} 1970-01-01T00:02:46.060512Z Unwrap {EvVPutResult Status# OK ID# [1:1:1:1:1:1024:4] {MsgQoS MsgId# { SequenceId: 1 MsgId: 75 } Cost# 644 ExtQueueId# PutTabletLog IntQueueId# IntPutLog Window# { Status# Processed ActualWindowSize# 0 MaxWindowSize# 150000000 ExpectedMsgId# { SequenceId: 1 MsgId: 76 }}}} 1970-01-01T00:02:46.060512Z Unwrap {EvVPutResult Status# OK ID# [1:1:1:1:1:1024:3] {MsgQoS MsgId# { SequenceId: 1 MsgId: 73 } Cost# 644 ExtQueueId# PutTabletLog IntQueueId# IntPutLog Window# { Status# Processed ActualWindowSize# 0 MaxWindowSize# 150000000 ExpectedMsgId# { SequenceId: 1 MsgId: 74 }}}} 1970-01-01T00:02:46.060512Z Unwrap {EvVPutResult Status# OK ID# [1:1:1:1:1:1024:1] {MsgQoS MsgId# { SequenceId: 1 MsgId: 74 } Cost# 644 ExtQueueId# PutTabletLog IntQueueId# IntPutLog Window# { Status# Processed ActualWindowSize# 0 MaxWindowSize# 150000000 ExpectedMsgId# { SequenceId: 1 MsgId: 75 }}}} 1970-01-01T00:02:46.060512Z Unwrap {EvVPutResult Status# OK ID# [1:1:1:1:1:1024:6] {MsgQoS MsgId# { SequenceId: 1 MsgId: 73 } Cost# 644 ExtQueueId# PutTabletLog IntQueueId# IntPutLog Window# { Status# Processed ActualWindowSize# 0 MaxWindowSize# 150000000 ExpectedMsgId# { SequenceId: 1 MsgId: 74 }}}} 1970-01-01T00:02:46.060512Z Unwrap {EvVPutResult Status# OK ID# [1:1:1:1:1:1024:4] {MsgQoS MsgId# { SequenceId: 1 MsgId: 75 } Cost# 644 ExtQueueId# PutTabletLog IntQueueId# IntPutLog Window# { Status# Processed ActualWindowSize# 0 MaxWindowSize# 150000000 ExpectedMsgId# { SequenceId: 1 MsgId: 76 }}}} 1970-01-01T00:02:46.060512Z Unwrap {EvVPutResult Status# OK ID# [1:1:1:1:1:1024:3] {MsgQoS MsgId# { SequenceId: 1 MsgId: 73 } Cost# 644 ExtQueueId# PutTabletLog IntQueueId# IntPutLog Window# { Status# Processed ActualWindowSize# 0 MaxWindowSize# 150000000 ExpectedMsgId# { SequenceId: 1 MsgId: 74 }}}} 1970-01-01T00:02:46.060512Z Unwrap {EvVPutResult Status# OK ID# [1:1:1:1:1:1024:2] {MsgQoS MsgId# { SequenceId: 1 MsgId: 78 } Cost# 644 ExtQueueId# PutTabletLog IntQueueId# IntPutLog Window# { Status# Processed ActualWindowSize# 0 MaxWindowSize# 150000000 ExpectedMsgId# { SequenceId: 1 MsgId: 79 }}}} >> test.py::test[blocks-combine_all_sum_filter_opt--ForceBlocks] [GOOD] >> test.py::test[blocks-combine_all_sum_filter_opt--Results] >> test.py::test[join-join_and_distinct_key--ForceBlocks] [GOOD] >> test.py::test[join-join_and_distinct_key--Results] >> test.py::test[column_order-select_distinct_star-default.txt-Results] [GOOD] >> test.py::test[dq-read_cost_native-default.txt-ForceBlocks] >> TBsVDiskRange::Simple3PutRangeGetAllBackwardFresh [GOOD] >> TBsVDiskRange::Simple3PutRangeGetAllBackwardCompaction >> test.py::test[select-uncorrelated_subqueries--ForceBlocks] [GOOD] >> test.py::test[select-uncorrelated_subqueries--Results] >> test.py::test[pg-tpcds-q74-default.txt-ForceBlocks] [GOOD] >> TBsHuge::SimpleErasureNone [GOOD] >> TBsLocalRecovery::ChaoticWriteRestart >> test.py::test[optimizers-yql-18300-flatmap-over-extend-default.txt-Results] [GOOD] >> test.py::test[optimizers-yql-7324_duplicate_arg--ForceBlocks] >> test.py::test[pg-tpcds-q74-default.txt-Results] >> test.py::test[sampling-bind_join_left-default.txt-Results] [GOOD] >> test.py::test[sampling-read-dynamic-ForceBlocks] >> test.py::test[join-selfjoin_on_sorted_with_filter--Results] [GOOD] >> test.py::test[join-selfjoin_on_sorted_with_filter-off-ForceBlocks] >> YdbTableSplit::SplitByLoadWithDeletes >> CostMetricsGetMirror3dc::TestGetMirror3dcRequests10Inflight10BlobSize1000 [GOOD] >> CostMetricsGetMirror3dc::TestGetMirror3dcRequests100Inflight10BlobSize1000 >> test.py::test[flatten_by-flatten_two_fields--Results] [GOOD] >> test.py::test[in-in_with_tuple-default.txt-ForceBlocks] >> test.py::test[agg_apply-avg_const_interval--Results] [GOOD] >> test.py::test[aggr_factory-avg_distinct_expr-default.txt-Results] >> test.py::test[blocks-combine_all_decimal_max-default.txt-ForceBlocks] [GOOD] >> test.py::test[in-in_scalar_vector_subquery-default.txt-Results] [GOOD] >> test.py::test[in-in_types_cast-default.txt-ForceBlocks] >> test.py::test[schema-select_operate_with_columns_simple-default.txt-Results] [GOOD] >> test.py::test[select-const_subrequest_and_select_by_all-default.txt-ForceBlocks] >> test.py::test[ql_filter-integer_members--Results] [GOOD] >> test.py::test[ql_filter-integer_single_disable_prune--Results] >> test.py::test[blocks-combine_all_decimal_max-default.txt-Results] >> test.py::test[pg-tpcds-q83-default.txt-Results] [GOOD] >> test.py::test[pg-tpcds-q86-default.txt-Results] >> test.py::test[blocks-combine_all_sum_filter_opt--Results] [GOOD] >> test.py::test[blocks-distinct_pure_all--ForceBlocks] >> test.py::test[window-full/session_aliases--ForceBlocks] [GOOD] >> test.py::test[window-full/session_aliases--Results] >> test.py::test[distinct-distinct_count_and_full_count-default.txt-ForceBlocks] [GOOD] >> test.py::test[distinct-distinct_count_and_full_count-default.txt-Results] >> test.py::test[order_by-sort_with_take--ForceBlocks] [GOOD] >> test.py::test[order_by-sort_with_take--Results] >> test.py::test[optimizers-test_fuse_map_predicate_limit-default.txt-Results] [GOOD] >> test.py::test[optimizers-yql-10737_lost_passthrough-default.txt-Results] >> test.py::test[select-uncorrelated_subqueries--Results] [GOOD] >> test.py::test[simple_columns-simple_columns_join_all-default.txt-ForceBlocks] >> CostMetricsGetMirror3dc::TestGetMirror3dcRequests100Inflight10BlobSize1000 [GOOD] >> CostMetricsGetMirror3dc::TestGetMirror3dcRequests10000Inflight1000BlobSize1000 >> test.py::test[bigdate-table_arithmetic_mul_div-default.txt-Results] [GOOD] >> test.py::test[bigdate-table_arithmetic_narrow-default.txt-ForceBlocks] >> test.py::test[window-generic/aggregations_before_current--ForceBlocks] [GOOD] >> test.py::test[window-generic/aggregations_before_current--Results] >> test.py::test[blocks-combine_all_decimal_max-default.txt-Results] [GOOD] >> test.py::test[blocks-combine_all_min_filter--ForceBlocks] >> test.py::test[pg-tpcds-q74-default.txt-Results] [GOOD] >> test.py::test[pg-tpcds-q92-default.txt-ForceBlocks] >> test.py::test[join-join_comp_common_table--ForceBlocks] [GOOD] >> test.py::test[order_by-sort_with_take--Results] [GOOD] >> test.py::test[join-join_comp_common_table--Results] >> test.py::test[pg-aggregate_combine_all--ForceBlocks] >> test.py::test[join-join_and_distinct_key--Results] [GOOD] >> test.py::test[join-left_join_right_pushdown_nested_right--ForceBlocks] >> test.py::test[join-star_join_inners-off-ForceBlocks] [GOOD] >> test.py::test[join-star_join_inners-off-Results] >> test.py::test[join-star_join_inners-off-Results] [SKIPPED] >> test.py::test[join-yql-14829_leftonly--ForceBlocks] >> test.py::test[dq-read_cost_native-default.txt-ForceBlocks] [GOOD] >> test.py::test[dq-read_cost_native-default.txt-Results] [SKIPPED] >> test.py::test[epochs-use_sorted_by_complex_type--ForceBlocks] [SKIPPED] >> test.py::test[epochs-use_sorted_by_complex_type--Results] >> TBsVDiskRange::Simple3PutRangeGetAllBackwardCompaction [GOOD] >> test.py::test[epochs-use_sorted_by_complex_type--Results] [SKIPPED] >> test.py::test[expr-constraints_of--ForceBlocks] >> test.py::test[table_range-table_funcs_expr--Results] [GOOD] >> test.py::test[tpch-q1-default.txt-Results] >> TBsVDiskManyPutGet::ManyPutRangeGet2ChannelsIndexOnly [GOOD] >> TBsVDiskManyPutGetCheckSize::ManyPutGetCheckSize |80.7%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/ut_vdisk/unittest >> TBsVDiskRange::Simple3PutRangeGetAllBackwardCompaction [GOOD] >> test.py::test[ql_filter-integer_single_disable_prune--Results] [GOOD] >> test.py::test[result_types-data-default.txt-Results] >> test.py::test[distinct-distinct_count_and_full_count-default.txt-Results] [GOOD] >> test.py::test[dq-precompute_parallel_mix--ForceBlocks] >> test.py::test[optimizers-yql-7324_duplicate_arg--ForceBlocks] [GOOD] >> test.py::test[optimizers-yql-7324_duplicate_arg--Results] >> test.py::test[sampling-read-dynamic-ForceBlocks] [GOOD] >> test.py::test[in-in_with_tuple-default.txt-ForceBlocks] [GOOD] >> test.py::test[in-in_with_tuple-default.txt-Results] >> test.py::test[sampling-read-dynamic-Results] >> test.py::test[in-in_types_cast-default.txt-ForceBlocks] [GOOD] >> test.py::test[in-in_types_cast-default.txt-Results] >> TSequenceReboots::CreateMultipleSequencesHaveInitialSequenceShard [GOOD] >> test.py::test[aggregate-agg_phases_table3-default.txt-Results] [GOOD] >> test.py::test[aggregate-aggregate_with_lambda_inside_avg--Results] >> test.py::test[window-full/session_incompat_sort--ForceBlocks] [GOOD] >> test.py::test[window-full/session_incompat_sort--Results] >> test.py::test[join-extract_or_predicates-default.txt-ForceBlocks] [GOOD] >> test.py::test[join-extract_or_predicates-default.txt-Results] >> YdbTableSplit::SplitByLoadWithUpdates >> TPDiskRaces::KillOwnerWhileDecommitting [GOOD] >> TPDiskRaces::KillOwnerWhileDecommittingWithInflight >> TSequenceReboots::CreateMultipleSequencesNoInitialSequenceShard [GOOD] >> test.py::test[join-selfjoin_on_sorted_with_filter-off-ForceBlocks] [GOOD] >> test.py::test[join-selfjoin_on_sorted_with_filter-off-Results] [SKIPPED] >> test.py::test[select-const_subrequest_and_select_by_all-default.txt-ForceBlocks] [GOOD] >> test.py::test[select-const_subrequest_and_select_by_all-default.txt-Results] >> TBtreeIndexTPartLarge::Group [GOOD] >> TBtreeIndexTPartLarge::History >> test.py::test[window-generic/aggregations_before_current--Results] [GOOD] >> test.py::test[window-win_by_all_aggregate--ForceBlocks] >> test.py::test[blocks-distinct_pure_all--ForceBlocks] [GOOD] >> test.py::test[blocks-distinct_pure_all--Results] >> test.py::test[optimizers-yql-7324_duplicate_arg--Results] [GOOD] >> test.py::test[optimizers-yql-10737_lost_passthrough-default.txt-Results] [GOOD] >> test.py::test[pg-tpcds-q92-default.txt-ForceBlocks] [GOOD] >> test.py::test[pg-tpcds-q92-default.txt-Results] >> test.py::test[sampling-read-dynamic-Results] [GOOD] >> test.py::test[sampling-take_with_sampling-default.txt-ForceBlocks] >> test.py::test[in-in_with_tuple-default.txt-Results] [GOOD] >> test.py::test[insert-part_sortness--ForceBlocks] >> test.py::test[pg-tpcds-q86-default.txt-Results] [GOOD] >> test.py::test[pg-tpcds-q89-default.txt-Results] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_sequence_reboots/unittest >> TSequenceReboots::CreateMultipleSequencesHaveInitialSequenceShard [GOOD] Test command err: ==== RunWithTabletReboots =========== RUN: Trace =========== Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2144] IGNORE Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2144] Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:120:2058] recipient: [1:114:2145] IGNORE Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:120:2058] recipient: [1:114:2145] Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:121:2058] recipient: [1:117:2146] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:121:2058] recipient: [1:117:2146] Leader for TabletID 72057594046447617 is [1:129:2154] sender: [1:131:2058] recipient: [1:113:2144] Leader for TabletID 72057594046316545 is [1:134:2158] sender: [1:136:2058] recipient: [1:114:2145] Leader for TabletID 72057594046678944 is [1:141:2162] sender: [1:142:2058] recipient: [1:117:2146] 2025-09-25T16:16:36.964437Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7911: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-09-25T16:16:36.964473Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7939: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-09-25T16:16:36.964479Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7825: StatsBatching config: StatsBatchTimeout# 0.000000s, StatsMaxBatchSize# 0, StatsMaxExecuteTime# 0.010000s 2025-09-25T16:16:36.964485Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7841: OperationsProcessing config: using default configuration 2025-09-25T16:16:36.964492Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7847: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-09-25T16:16:36.964496Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7847: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-09-25T16:16:36.964506Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7971: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-09-25T16:16:36.964522Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-09-25T16:16:36.964649Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8042: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-09-25T16:16:36.964719Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2025-09-25T16:16:36.990963Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:8074: Got new config: QueryServiceConfig { AllExternalDataSourcesAreAvailable: true } 2025-09-25T16:16:36.991002Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded 2025-09-25T16:16:36.991143Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8042: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# Leader for TabletID 72057594046447617 is [1:129:2154] sender: [1:198:2058] recipient: [1:15:2062] 2025-09-25T16:16:37.001512Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2025-09-25T16:16:37.001650Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2025-09-25T16:16:37.001684Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-09-25T16:16:37.002586Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2025-09-25T16:16:37.002637Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2025-09-25T16:16:37.002743Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1382: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-09-25T16:16:37.002811Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-09-25T16:16:37.003217Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:158: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-09-25T16:16:37.003260Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2025-09-25T16:16:37.003533Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-09-25T16:16:37.003543Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-09-25T16:16:37.003582Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-09-25T16:16:37.003591Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-09-25T16:16:37.003598Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:205: TTxServerlessStorageBilling.Complete 2025-09-25T16:16:37.003617Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7086: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:223:2058] recipient: [1:221:2221] IGNORE Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:223:2058] recipient: [1:221:2221] Leader for TabletID 72057594037968897 is [1:227:2225] sender: [1:228:2058] recipient: [1:221:2221] 2025-09-25T16:16:37.005046Z node 1 :HIVE INFO: tablet_helpers.cpp:1126: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:141:2162] sender: [1:248:2058] recipient: [1:15:2062] 2025-09-25T16:16:37.025068Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-09-25T16:16:37.025156Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-09-25T16:16:37.025229Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:599: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-09-25T16:16:37.025240Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5528: CreateTx for txid 1:0 type: TxAlterSubDomain target path: [OwnerId: 72057594046678944, LocalPathId: 1] source path: 2025-09-25T16:16:37.025300Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-09-25T16:16:37.025317Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2025-09-25T16:16:37.027333Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-09-25T16:16:37.027401Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-09-25T16:16:37.027480Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-09-25T16:16:37.027494Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:315: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-09-25T16:16:37.027501Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:369: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-09-25T16:16:37.027508Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 1:0 2 -> 3 2025-09-25T16:16:37.028069Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-09-25T16:16:37.028086Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-09-25T16:16:37.028091Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 1:0 3 -> 128 2025-09-25T16:16:37.028519Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-09-25T16:16:37.028532Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-09-25T16:16:37.028538Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-09-25T16:16:37.028557Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1723: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-09-25T16:16:37.029261Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1792: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-09-25T16:16:37.029990Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:663: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-09-25T16:16:37.030056Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1824: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 Leader for TabletID 72057594046316545 is [1:134:2158] sender: [1:263:2058] recipient: [1:15:2062] FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-09-25T16:16:37.030305Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:685: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-09-25T16:16:37.030343Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:689: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 134 RawX2: 4294969454 } } Step: 5000001 MediatorID: 0 Tab ... :03.885240Z node 83 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:610: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 6] was 2 2025-09-25T16:17:03.885258Z node 83 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__publish_to_scheme_board.cpp:155: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 1003, subscribers: 1 2025-09-25T16:17:03.885263Z node 83 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:212: TTxAckPublishToSchemeBoard Notify send TEvNotifyTxCompletionResult, at schemeshard: 72057594046678944, to actorId: [83:365:2344] 2025-09-25T16:17:03.885270Z node 83 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:173: TSideEffects ApplyOnExecute at tablet# 72057594046678944 2025-09-25T16:17:03.885818Z node 83 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:216: TSideEffects ApplyOnComplete at tablet# 72057594046678944 2025-09-25T16:17:03.886265Z node 83 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1003 2025-09-25T16:17:03.886276Z node 83 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:216: TSideEffects ApplyOnComplete at tablet# 72057594046678944 2025-09-25T16:17:03.886399Z node 83 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1003 2025-09-25T16:17:03.886407Z node 83 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:216: TSideEffects ApplyOnComplete at tablet# 72057594046678944 2025-09-25T16:17:03.886425Z node 83 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:641: Send to actor: [83:365:2344] msg type: 271124998 msg: NKikimrScheme.TEvNotifyTxCompletionResult TxId: 1003 at schemeshard: 72057594046678944 2025-09-25T16:17:03.886442Z node 83 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 1003: got EvNotifyTxCompletionResult 2025-09-25T16:17:03.886448Z node 83 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 1003: satisfy waiter [83:440:2417] 2025-09-25T16:17:03.886485Z node 83 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5225: StateWork, received event# 269877764, Sender [83:444:2421], Recipient [83:130:2155]: NKikimr::TEvTabletPipe::TEvServerDisconnected 2025-09-25T16:17:03.886492Z node 83 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5323: StateWork, processing event TEvTabletPipe::TEvServerDisconnected 2025-09-25T16:17:03.886515Z node 83 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:6190: Server pipe is reset, at schemeshard: 72057594046678944 TestWaitNotification: OK eventTxId 1005 TestWaitNotification: OK eventTxId 1004 TestWaitNotification: OK eventTxId 1003 2025-09-25T16:17:03.886605Z node 83 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5225: StateWork, received event# 271122945, Sender [83:493:2470], Recipient [83:130:2155]: NKikimrSchemeOp.TDescribePath Path: "/MyRoot/seq1" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false } 2025-09-25T16:17:03.886609Z node 83 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5237: StateWork, processing event TEvSchemeShard::TEvDescribeScheme 2025-09-25T16:17:03.886623Z node 83 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/seq1" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-09-25T16:17:03.886664Z node 83 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/seq1" took 35us result status StatusSuccess 2025-09-25T16:17:03.886740Z node 83 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/seq1" PathDescription { Self { Name: "seq1" PathId: 4 SchemeshardId: 72057594046678944 PathType: EPathTypeSequence CreateFinished: true CreateTxId: 1005 CreateStep: 5000004 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 SequenceVersion: 1 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 5 PathsLimit: 10000 ShardsInside: 1 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } SequenceDescription { Name: "seq1" PathId { OwnerId: 72057594046678944 LocalId: 4 } Version: 1 SequenceShard: 72075186233409546 MinValue: 1 MaxValue: 9223372036854775807 StartValue: 1 Cache: 1 Increment: 1 Cycle: false DataType: "Int64" } } PathId: 4 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-09-25T16:17:03.886818Z node 83 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5225: StateWork, received event# 271122945, Sender [83:494:2471], Recipient [83:130:2155]: NKikimrSchemeOp.TDescribePath Path: "/MyRoot/seq2" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false } 2025-09-25T16:17:03.886821Z node 83 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5237: StateWork, processing event TEvSchemeShard::TEvDescribeScheme 2025-09-25T16:17:03.886827Z node 83 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/seq2" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-09-25T16:17:03.886840Z node 83 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/seq2" took 12us result status StatusSuccess 2025-09-25T16:17:03.886884Z node 83 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/seq2" PathDescription { Self { Name: "seq2" PathId: 5 SchemeshardId: 72057594046678944 PathType: EPathTypeSequence CreateFinished: true CreateTxId: 1004 CreateStep: 5000005 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 SequenceVersion: 1 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 5 PathsLimit: 10000 ShardsInside: 1 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } SequenceDescription { Name: "seq2" PathId { OwnerId: 72057594046678944 LocalId: 5 } Version: 1 SequenceShard: 72075186233409546 MinValue: 1 MaxValue: 9223372036854775807 StartValue: 1 Cache: 1 Increment: 1 Cycle: false DataType: "Int64" } } PathId: 5 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-09-25T16:17:03.886968Z node 83 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5225: StateWork, received event# 271122945, Sender [83:495:2472], Recipient [83:130:2155]: NKikimrSchemeOp.TDescribePath Path: "/MyRoot/seq3" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false } 2025-09-25T16:17:03.886974Z node 83 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5237: StateWork, processing event TEvSchemeShard::TEvDescribeScheme 2025-09-25T16:17:03.886983Z node 83 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/seq3" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-09-25T16:17:03.887002Z node 83 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/seq3" took 18us result status StatusSuccess 2025-09-25T16:17:03.887040Z node 83 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/seq3" PathDescription { Self { Name: "seq3" PathId: 6 SchemeshardId: 72057594046678944 PathType: EPathTypeSequence CreateFinished: true CreateTxId: 1003 CreateStep: 5000006 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 SequenceVersion: 1 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 5 PathsLimit: 10000 ShardsInside: 1 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } SequenceDescription { Name: "seq3" PathId { OwnerId: 72057594046678944 LocalId: 6 } Version: 1 SequenceShard: 72075186233409546 MinValue: 1 MaxValue: 9223372036854775807 StartValue: 1 Cache: 1 Increment: 1 Cycle: false DataType: "Int64" } } PathId: 6 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 >> test.py::test[blocks-combine_all_min_filter--ForceBlocks] [GOOD] >> test.py::test[blocks-combine_all_min_filter--Results] >> test.py::test[in-in_types_cast-default.txt-Results] [GOOD] >> test.py::test[in-in_types_cast_all-default.txt-ForceBlocks] >> TPDiskUtil::DriveEstimator [GOOD] >> TPDiskUtil::OffsetParsingCorrectness [GOOD] >> TPDiskUtil::PayloadParsingTest [GOOD] >> TPDiskUtil::FormatSectorMap >> TPDiskUtil::FormatSectorMap [GOOD] >> CostMetricsGetMirror3dc::TestGetMirror3dcRequests10000Inflight1000BlobSize1000 [GOOD] >> CostMetricsPatchBlock4Plus2::TestPatch4Plus2BlockRequests100Inflight1BlobSize1000 |80.7%| [TM] {default-linux-x86_64, pic, relwithdebinfo} ydb/library/yql/tests/sql/dq_file/part3/pytest >> test.py::test[join-selfjoin_on_sorted_with_filter-off-Results] [SKIPPED] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_sequence_reboots/unittest >> TSequenceReboots::CreateMultipleSequencesNoInitialSequenceShard [GOOD] Test command err: ==== RunWithTabletReboots =========== RUN: Trace =========== Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2144] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2144] Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:120:2058] recipient: [1:114:2145] IGNORE Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:120:2058] recipient: [1:114:2145] Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:121:2058] recipient: [1:116:2146] IGNORE Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:121:2058] recipient: [1:116:2146] Leader for TabletID 72057594046678944 is [1:128:2153] sender: [1:131:2058] recipient: [1:113:2144] Leader for TabletID 72057594046447617 is [1:134:2158] sender: [1:136:2058] recipient: [1:114:2145] Leader for TabletID 72057594046316545 is [1:139:2161] sender: [1:141:2058] recipient: [1:116:2146] 2025-09-25T16:16:35.578977Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7911: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-09-25T16:16:35.579003Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7939: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-09-25T16:16:35.579010Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7825: StatsBatching config: StatsBatchTimeout# 0.000000s, StatsMaxBatchSize# 0, StatsMaxExecuteTime# 0.010000s 2025-09-25T16:16:35.579015Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7841: OperationsProcessing config: using default configuration 2025-09-25T16:16:35.579022Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7847: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-09-25T16:16:35.579027Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7847: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-09-25T16:16:35.579037Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7971: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-09-25T16:16:35.579050Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-09-25T16:16:35.579172Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8042: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-09-25T16:16:35.579233Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2025-09-25T16:16:35.602588Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:8074: Got new config: QueryServiceConfig { AllExternalDataSourcesAreAvailable: true } 2025-09-25T16:16:35.602631Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded 2025-09-25T16:16:35.602746Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8042: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# Leader for TabletID 72057594046447617 is [1:134:2158] sender: [1:179:2058] recipient: [1:15:2062] 2025-09-25T16:16:35.619214Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2025-09-25T16:16:35.619352Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2025-09-25T16:16:35.619400Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-09-25T16:16:35.621136Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2025-09-25T16:16:35.621218Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2025-09-25T16:16:35.621333Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1382: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-09-25T16:16:35.621664Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-09-25T16:16:35.622812Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:158: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-09-25T16:16:35.622862Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2025-09-25T16:16:35.623144Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-09-25T16:16:35.623155Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-09-25T16:16:35.623177Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-09-25T16:16:35.623187Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-09-25T16:16:35.623193Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:205: TTxServerlessStorageBilling.Complete 2025-09-25T16:16:35.623238Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7086: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:221:2058] recipient: [1:219:2219] IGNORE Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:221:2058] recipient: [1:219:2219] Leader for TabletID 72057594037968897 is [1:225:2223] sender: [1:226:2058] recipient: [1:219:2219] 2025-09-25T16:16:35.624787Z node 1 :HIVE INFO: tablet_helpers.cpp:1126: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:128:2153] sender: [1:246:2058] recipient: [1:15:2062] 2025-09-25T16:16:35.646603Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-09-25T16:16:35.646701Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-09-25T16:16:35.646773Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:599: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-09-25T16:16:35.646782Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5528: CreateTx for txid 1:0 type: TxAlterSubDomain target path: [OwnerId: 72057594046678944, LocalPathId: 1] source path: 2025-09-25T16:16:35.646835Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-09-25T16:16:35.646851Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2025-09-25T16:16:35.647742Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-09-25T16:16:35.647808Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-09-25T16:16:35.647868Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-09-25T16:16:35.647907Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:315: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-09-25T16:16:35.647914Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:369: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-09-25T16:16:35.647920Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 1:0 2 -> 3 2025-09-25T16:16:35.648459Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-09-25T16:16:35.648472Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-09-25T16:16:35.648478Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 1:0 3 -> 128 2025-09-25T16:16:35.648882Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-09-25T16:16:35.648893Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-09-25T16:16:35.648900Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-09-25T16:16:35.648909Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1723: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-09-25T16:16:35.649584Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1792: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-09-25T16:16:35.649986Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:663: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-09-25T16:16:35.650048Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1824: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 Leader for TabletID 72057594046316545 is [1:139:2161] sender: [1:261:2058] recipient: [1:15:2062] FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-09-25T16:16:35.650315Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:685: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-09-25T16:16:35.650343Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:689: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 139 RawX2: 4294969457 } } Step: 5000001 MediatorID: 0 Tab ... lPathId: 4], version: 2 2025-09-25T16:17:04.554206Z node 97 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:610: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 2 2025-09-25T16:17:04.554218Z node 97 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__publish_to_scheme_board.cpp:155: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 1003, subscribers: 1 2025-09-25T16:17:04.554224Z node 97 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:212: TTxAckPublishToSchemeBoard Notify send TEvNotifyTxCompletionResult, at schemeshard: 72057594046678944, to actorId: [97:384:2362] 2025-09-25T16:17:04.554229Z node 97 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:173: TSideEffects ApplyOnExecute at tablet# 72057594046678944 2025-09-25T16:17:04.558732Z node 97 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:216: TSideEffects ApplyOnComplete at tablet# 72057594046678944 2025-09-25T16:17:04.560160Z node 97 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1003 2025-09-25T16:17:04.560189Z node 97 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:216: TSideEffects ApplyOnComplete at tablet# 72057594046678944 2025-09-25T16:17:04.560428Z node 97 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1003 2025-09-25T16:17:04.560436Z node 97 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:216: TSideEffects ApplyOnComplete at tablet# 72057594046678944 2025-09-25T16:17:04.560462Z node 97 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:641: Send to actor: [97:384:2362] msg type: 271124998 msg: NKikimrScheme.TEvNotifyTxCompletionResult TxId: 1003 at schemeshard: 72057594046678944 2025-09-25T16:17:04.560482Z node 97 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 1003: got EvNotifyTxCompletionResult 2025-09-25T16:17:04.560490Z node 97 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 1003: satisfy waiter [97:385:2363] 2025-09-25T16:17:04.560536Z node 97 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5225: StateWork, received event# 269877764, Sender [97:389:2367], Recipient [97:128:2152]: NKikimr::TEvTabletPipe::TEvServerDisconnected 2025-09-25T16:17:04.560544Z node 97 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5323: StateWork, processing event TEvTabletPipe::TEvServerDisconnected 2025-09-25T16:17:04.560549Z node 97 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:6190: Server pipe is reset, at schemeshard: 72057594046678944 TestWaitNotification: OK eventTxId 1002 TestWaitNotification: OK eventTxId 1003 2025-09-25T16:17:04.560640Z node 97 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5225: StateWork, received event# 271122945, Sender [97:455:2432], Recipient [97:128:2152]: NKikimrSchemeOp.TDescribePath Path: "/MyRoot/seq1" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false } 2025-09-25T16:17:04.560647Z node 97 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5237: StateWork, processing event TEvSchemeShard::TEvDescribeScheme 2025-09-25T16:17:04.560668Z node 97 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/seq1" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-09-25T16:17:04.560732Z node 97 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/seq1" took 56us result status StatusSuccess 2025-09-25T16:17:04.560863Z node 97 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/seq1" PathDescription { Self { Name: "seq1" PathId: 3 SchemeshardId: 72057594046678944 PathType: EPathTypeSequence CreateFinished: true CreateTxId: 1004 CreateStep: 5000003 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 SequenceVersion: 1 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 4 PathsLimit: 10000 ShardsInside: 1 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } SequenceDescription { Name: "seq1" PathId { OwnerId: 72057594046678944 LocalId: 3 } Version: 1 SequenceShard: 72075186233409546 MinValue: 1 MaxValue: 9223372036854775807 StartValue: 1 Cache: 1 Increment: 1 Cycle: false DataType: "Int64" } } PathId: 3 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-09-25T16:17:04.560992Z node 97 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5225: StateWork, received event# 271122945, Sender [97:456:2433], Recipient [97:128:2152]: NKikimrSchemeOp.TDescribePath Path: "/MyRoot/seq2" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false } 2025-09-25T16:17:04.560998Z node 97 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5237: StateWork, processing event TEvSchemeShard::TEvDescribeScheme 2025-09-25T16:17:04.561009Z node 97 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/seq2" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-09-25T16:17:04.561030Z node 97 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/seq2" took 21us result status StatusSuccess 2025-09-25T16:17:04.561081Z node 97 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/seq2" PathDescription { Self { Name: "seq2" PathId: 4 SchemeshardId: 72057594046678944 PathType: EPathTypeSequence CreateFinished: true CreateTxId: 1003 CreateStep: 5000005 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 SequenceVersion: 1 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 4 PathsLimit: 10000 ShardsInside: 1 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } SequenceDescription { Name: "seq2" PathId { OwnerId: 72057594046678944 LocalId: 4 } Version: 1 SequenceShard: 72075186233409546 MinValue: 1 MaxValue: 9223372036854775807 StartValue: 1 Cache: 1 Increment: 1 Cycle: false DataType: "Int64" } } PathId: 4 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-09-25T16:17:04.561205Z node 97 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5225: StateWork, received event# 271122945, Sender [97:457:2434], Recipient [97:128:2152]: NKikimrSchemeOp.TDescribePath Path: "/MyRoot/seq3" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false } 2025-09-25T16:17:04.561210Z node 97 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5237: StateWork, processing event TEvSchemeShard::TEvDescribeScheme 2025-09-25T16:17:04.561219Z node 97 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/seq3" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-09-25T16:17:04.561237Z node 97 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/seq3" took 17us result status StatusSuccess 2025-09-25T16:17:04.561287Z node 97 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/seq3" PathDescription { Self { Name: "seq3" PathId: 5 SchemeshardId: 72057594046678944 PathType: EPathTypeSequence CreateFinished: true CreateTxId: 1002 CreateStep: 5000004 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 SequenceVersion: 1 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 4 PathsLimit: 10000 ShardsInside: 1 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } SequenceDescription { Name: "seq3" PathId { OwnerId: 72057594046678944 LocalId: 5 } Version: 1 SequenceShard: 72075186233409546 MinValue: 1 MaxValue: 9223372036854775807 StartValue: 1 Cache: 1 Increment: 1 Cycle: false DataType: "Int64" } } PathId: 5 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 >> test.py::test[pg-aggregate_combine_all--ForceBlocks] [GOOD] >> test.py::test[pg-aggregate_combine_all--Results] >> test.py::test[window-full/session_aliases--Results] [GOOD] >> test.py::test[window-mixed/aggregations--ForceBlocks] |80.7%| [TM] {default-linux-x86_64, pic, relwithdebinfo} ydb/library/yql/tests/sql/dq_file/part3/pytest >> test.py::test[optimizers-yql-7324_duplicate_arg--Results] [GOOD] >> test.py::test[expr-constraints_of--ForceBlocks] [GOOD] >> test.py::test[expr-constraints_of--Results] >> test.py::test[simple_columns-simple_columns_join_all-default.txt-ForceBlocks] [GOOD] >> test.py::test[simple_columns-simple_columns_join_all-default.txt-Results] |80.7%| [TM] {default-linux-x86_64, pic, relwithdebinfo} ydb/library/yql/tests/sql/hybrid_file/part5/pytest >> test.py::test[optimizers-yql-10737_lost_passthrough-default.txt-Results] [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/pdisk/ut/unittest >> TPDiskUtil::FormatSectorMap [GOOD] Test command err: 2025-09-25T16:14:45.767422Z node 1 :BS_PDISK NOTICE: {BPD38@blobstorage_pdisk_impl.cpp:2857} OnDriveStartup Path# "" PDiskId# 1 2025-09-25T16:14:45.784936Z node 1 :BS_PDISK NOTICE: {BSP01@blobstorage_pdisk_actor.cpp:581} Successfully read format record Format# {TDiskFormat Version: 3 DiskSize: 134217728000 bytes (134 GB) Guid: 10092202409607507619 MagicNextLogChunkReference: 10026380969189092574 MagicLogChunk: 5099651201011218958 MagicDataChunk: 14874164721530214283 MagicSysLogChunk: 10966328193611727981 MagicFormatChunk: 17332287817462050952 ChunkSize: 136314880 bytes (136 MB) SectorSize: 4096 SysLogSectorCount: 64 SystemChunkCount: 1 FormatText: "Info" DiskFormatSize: 1168 (current sizeof: 1168) TimestampUs: 1758816885634247 (2025-09-25T16:14:45.634247Z) FormatFlags: {ErasureEncodeSysLog | ErasureEncodeFormat | ErasureEncodeNextChunkReference | EncryptFormat | EncryptData}} PDiskId# 1 2025-09-25T16:14:45.784954Z node 1 :BS_PDISK ERROR: {BSP01@blobstorage_pdisk_actor.cpp:590} PDiskId# 1 Can't start due to a guid error expected# 10092202409607507618 on-disk# 10092202409607507619 PDiskId# 1 2025-09-25T16:14:45.947806Z node 2 :BS_PDISK NOTICE: {BPD38@blobstorage_pdisk_impl.cpp:2857} OnDriveStartup Path# "" PDiskId# 1 2025-09-25T16:14:45.961637Z node 2 :BS_PDISK NOTICE: {BSP01@blobstorage_pdisk_actor.cpp:581} Successfully read format record Format# {TDiskFormat Version: 3 DiskSize: 134217728000 bytes (134 GB) Guid: 2450498860505178253 MagicNextLogChunkReference: 1193592262837470905 MagicLogChunk: 10492086431275246913 MagicDataChunk: 13699267113468979211 MagicSysLogChunk: 10054325146514874734 MagicFormatChunk: 17332287817462050952 ChunkSize: 136314880 bytes (136 MB) SectorSize: 4096 SysLogSectorCount: 64 SystemChunkCount: 1 FormatText: "Info" DiskFormatSize: 1168 (current sizeof: 1168) TimestampUs: 1758816885856500 (2025-09-25T16:14:45.856500Z) FormatFlags: {ErasureEncodeSysLog | ErasureEncodeFormat | ErasureEncodeNextChunkReference | EncryptFormat | EncryptData}} PDiskId# 1 2025-09-25T16:14:45.972925Z node 2 :BS_PDISK NOTICE: {BPD01@blobstorage_pdisk_impl_log.cpp:252} SysLogRecord is read Record# {TSysLogRecord Version# 0 NonceSet# {TNonceSet Version# 0 NonceSysLog# 61 NonceLog# 1 NonceData# 1} LogHeadChunkIdx# 1 LogHeadChunkPreviousNonce# 0} PDiskId# 1 2025-09-25T16:14:45.983353Z node 2 :BS_PDISK NOTICE: {LR018@blobstorage_pdisk_logreader.cpp:809} PDiskId# 1 LogReader IsInitial# 1 ChunkIdx# 1 SectorIdx# 0 OffsetInSector# 0 In ProcessSectorSet got !restorator.GoodSectorFlags LastGoodToWriteLogPosition# { ChunkIdx# 1 OffsetInChunk# 0} PDiskId# 1 2025-09-25T16:14:45.983382Z node 2 :BS_PDISK NOTICE: {BPD01@blobstorage_pdisk_logreader.cpp:1176} Reply to owner OwnerId# 0 Result# {EvReadLogResult Status# OK ErrorReason# "" position# { ChunkIdx# 0 OffsetInChunk# 0} nextPosition# { ChunkIdx# 1 OffsetInChunk# 0} isEndOfLog# true StatusFlags# IsValid | DiskSpaceCyan | DiskSpaceLightYellowMove | DiskSpaceYellowStop | DiskSpaceLightOrange | DiskSpacePreOrange | DiskSpaceOrange | DiskSpaceRed | DiskSpaceBlack Results.size# 0} PDiskId# 1 2025-09-25T16:14:45.985152Z node 2 :BS_PDISK NOTICE: {BPD01@blobstorage_pdisk_impl_log.cpp:1732} PDisk have successfully started PDiskId# 1 2025-09-25T16:14:45.994679Z node 2 :BS_PDISK NOTICE: {BPD02@blobstorage_pdisk_impl.cpp:2055} New owner is created ownerId# 3 vDiskId# [0:_:0:0:0] FirstNonceToKeep# 2065276 CutLogId# [0:0:0] ownerRound# 2 PDiskId# 1 2025-09-25T16:14:46.001101Z node 2 :BS_PDISK NOTICE: {BPD01@blobstorage_pdisk_impl.cpp:302} Shutdown OwnerInfo# {{OwnerId: 3 VDiskId: [0:_:0:0:0] GroupSizeInUnits: 0 ChunkWrites: 0 ChunkReads: 0 LogWrites: 0 LogReader: 0 CurrentFirstLsnToKeep: 0 FirstNonceToKeep: 2065276 StartingPoints: {} Owned chunkIds: {}} PDisk system/log ChunkIds: {0, 1} Free ChunkIds: {2..982} PDiskId# 1 2025-09-25T16:14:46.022982Z node 2 :BS_PDISK NOTICE: {BPD38@blobstorage_pdisk_impl.cpp:2857} OnDriveStartup Path# "" PDiskId# 1 2025-09-25T16:14:46.033101Z node 2 :BS_PDISK NOTICE: {BSP01@blobstorage_pdisk_actor.cpp:581} Successfully read format record Format# {TDiskFormat Version: 3 DiskSize: 134217728000 bytes (134 GB) Guid: 2450498860505178253 MagicNextLogChunkReference: 1193592262837470905 MagicLogChunk: 10492086431275246913 MagicDataChunk: 13699267113468979211 MagicSysLogChunk: 10054325146514874734 MagicFormatChunk: 17332287817462050952 ChunkSize: 136314880 bytes (136 MB) SectorSize: 4096 SysLogSectorCount: 64 SystemChunkCount: 1 FormatText: "Info" DiskFormatSize: 1168 (current sizeof: 1168) TimestampUs: 1758816885856500 (2025-09-25T16:14:45.856500Z) FormatFlags: {ErasureEncodeSysLog | ErasureEncodeFormat | ErasureEncodeNextChunkReference | EncryptFormat | EncryptData}} PDiskId# 1 2025-09-25T16:14:46.044927Z node 2 :BS_PDISK NOTICE: {BPD01@blobstorage_pdisk_impl_log.cpp:252} SysLogRecord is read Record# {TSysLogRecord Version# 8 NonceSet# {TNonceSet Version# 0 NonceSysLog# 1261886 NonceLog# 2065276 NonceData# 1811744} LogHeadChunkIdx# 1 LogHeadChunkPreviousNonce# 0 Owner[3]# [0:4294967295:0:0:0]} PDiskId# 1 2025-09-25T16:14:46.053128Z node 2 :BS_PDISK WARN: {LR016@blobstorage_pdisk_logreader.cpp:710} PDiskId# 1 LogReader IsInitial# 1 ChunkIdx# 1 SectorIdx# 0 OffsetInSector# 316 nonce jump2 IsEndOfSplice# false " replacing ChunkInfo->DesiredPrevChunkLastNonce# "# 0 " with nonceJumpLogPageHeader2->PreviousNonce# "# 0 PDiskId# 1 2025-09-25T16:14:46.053151Z node 2 :BS_PDISK NOTICE: {LR018@blobstorage_pdisk_logreader.cpp:809} PDiskId# 1 LogReader IsInitial# 1 ChunkIdx# 1 SectorIdx# 1 OffsetInSector# 0 In ProcessSectorSet got !restorator.GoodSectorFlags LastGoodToWriteLogPosition# { ChunkIdx# 1 OffsetInChunk# 4096} PDiskId# 1 2025-09-25T16:14:46.053171Z node 2 :BS_PDISK NOTICE: {BPD01@blobstorage_pdisk_logreader.cpp:1176} Reply to owner OwnerId# 0 Result# {EvReadLogResult Status# OK ErrorReason# "" position# { ChunkIdx# 0 OffsetInChunk# 0} nextPosition# { ChunkIdx# 1 OffsetInChunk# 4096} isEndOfLog# true StatusFlags# IsValid | DiskSpaceCyan | DiskSpaceLightYellowMove | DiskSpaceYellowStop | DiskSpaceLightOrange | DiskSpacePreOrange | DiskSpaceOrange | DiskSpaceRed | DiskSpaceBlack Results.size# 0} PDiskId# 1 2025-09-25T16:14:46.061001Z node 2 :BS_PDISK NOTICE: {BPD01@blobstorage_pdisk_impl_log.cpp:1732} PDisk have successfully started PDiskId# 1 2025-09-25T16:14:46.064963Z node 2 :BS_PDISK NOTICE: {BPD30@blobstorage_pdisk_impl.cpp:1930} Registered known VDisk VDisk# [0:4294967295:0:0:0] OwnerId# 3 OwnerRound# 3 GroupSizeInUnits# 0 PDiskId# 1 2025-09-25T16:14:46.336882Z node 3 :BS_PDISK NOTICE: {BPD38@blobstorage_pdisk_impl.cpp:2857} OnDriveStartup Path# "" PDiskId# 1 2025-09-25T16:14:46.360983Z node 3 :BS_PDISK NOTICE: {BSP01@blobstorage_pdisk_actor.cpp:581} Successfully read format record Format# {TDiskFormat Version: 3 DiskSize: 134217728000 bytes (134 GB) Guid: 9111005955475480396 MagicNextLogChunkReference: 8140178489105455849 MagicLogChunk: 10833264090966462640 MagicDataChunk: 10659799907030977367 MagicSysLogChunk: 17097855175736200589 MagicFormatChunk: 17332287817462050952 ChunkSize: 136314880 bytes (136 MB) SectorSize: 4096 SysLogSectorCount: 64 SystemChunkCount: 1 FormatText: "Info" DiskFormatSize: 1168 (current sizeof: 1168) TimestampUs: 1758816886244410 (2025-09-25T16:14:46.244410Z) FormatFlags: {ErasureEncodeSysLog | ErasureEncodeFormat | ErasureEncodeNextChunkReference | EncryptFormat | EncryptData}} PDiskId# 1 2025-09-25T16:14:46.377008Z node 3 :BS_PDISK NOTICE: {BPD01@blobstorage_pdisk_impl_log.cpp:252} SysLogRecord is read Record# {TSysLogRecord Version# 0 NonceSet# {TNonceSet Version# 0 NonceSysLog# 61 NonceLog# 1 NonceData# 1} LogHeadChunkIdx# 1 LogHeadChunkPreviousNonce# 0} PDiskId# 1 2025-09-25T16:14:46.390754Z node 3 :BS_PDISK NOTICE: {LR018@blobstorage_pdisk_logreader.cpp:809} PDiskId# 1 LogReader IsInitial# 1 ChunkIdx# 1 SectorIdx# 0 OffsetInSector# 0 In ProcessSectorSet got !restorator.GoodSectorFlags LastGoodToWriteLogPosition# { ChunkIdx# 1 OffsetInChunk# 0} PDiskId# 1 2025-09-25T16:14:46.390796Z node 3 :BS_PDISK NOTICE: {BPD01@blobstorage_pdisk_logreader.cpp:1176} Reply to owner OwnerId# 0 Result# {EvReadLogResult Status# OK ErrorReason# "" position# { ChunkIdx# 0 OffsetInChunk# 0} nextPosition# { ChunkIdx# 1 OffsetInChunk# 0} isEndOfLog# true StatusFlags# IsValid | DiskSpaceCyan | DiskSpaceLightYellowMove | DiskSpaceYellowStop | DiskSpaceLightOrange | DiskSpacePreOrange | DiskSpaceOrange | DiskSpaceRed | DiskSpaceBlack Results.size# 0} PDiskId# 1 2025-09-25T16:14:46.391328Z node 3 :BS_PDISK NOTICE: {BPD01@blobstorage_pdisk_impl_log.cpp:1732} PDisk have successfully started PDiskId# 1 2025-09-25T16:14:46.393013Z node 3 :BS_PDISK ERROR: {BSP01@blobstorage_pdisk_actor.cpp:1237} Actor recieved device error Details# test PDiskId# 1 ... waiting for Block device stop ... waiting for Block device stop (done) 2025-09-25T16:14:46.577076Z node 4 :BS_PDISK NOTICE: {BPD38@blobstorage_pdisk_impl.cpp:2857} OnDriveStartup Path# "" PDiskId# 1 2025-09-25T16:14:46.588935Z node 4 :BS_PDISK NOTICE: {BPD01@blobstorage_pdisk_impl.cpp:302} Shutdown OwnerInfo# { PDisk system/log ChunkIds: {} Free ChunkIds: {} PDiskId# 1 2025-09-25T16:14:46.732298Z node 5 :BS_PDISK NOTICE: {BPD38@blobstorage_pdisk_impl.cpp:2857} OnDriveStartup Path# "" PDiskId# 1 2025-09-25T16:14:46.760997Z node 5 :BS_PDISK NOTICE: {BSP01@blobstorage_pdisk_actor.cpp:581} Successfully read format record Format# {TDiskFormat Version: 3 DiskSize: 134217728000 bytes (134 GB) Guid: 5749029319545132233 MagicNextLogChunkReference: 9517544273182699802 MagicLogChunk: 13118941221028959611 MagicDataChunk: 8444693274338430577 MagicSysLogChunk: 7048189667859306178 MagicFormatChunk: 17332287817462050952 ChunkSize: 136314880 bytes (136 MB) SectorSize: 4096 SysLogSectorCount: 64 SystemChunkCount: 1 FormatText: "Info" DiskFormatSize: 1168 (current sizeof: 1168) TimestampUs: 1758816886650680 (2025-09-25T16:14:46.650680Z) FormatFlags: {ErasureEncodeSysLog | ErasureEncodeFormat | ErasureEncodeNextChunkReference | EncryptFormat | EncryptData}} PDiskId# 1 2025-09-25T16:14:46.776926Z node 5 :BS_PDISK NOTICE: {BPD01@blobstorage_pdisk_impl_log.cpp:252} SysLogRecord is read Record# {TSysLogRecord Version# 0 NonceSet# {TNonceSet Version# 0 NonceSysLog# 61 NonceLog# 1 NonceData# 1} LogHeadChunkIdx# 1 LogHeadChunkPreviousNonce# 0} PDiskId# 1 2025-09-25T16:14:46.788915Z node 5 :BS_PDISK NOTICE: {LR018@blobstorage_pdisk_logreader.cpp:809} PDiskId# 1 LogReader IsInitial# 1 ChunkIdx# 1 SectorIdx# 0 OffsetInSector# 0 In ProcessSectorSet got !restorator.GoodSectorFlags LastGoodToWriteLogPosition# { ChunkIdx# 1 OffsetInChunk# 0} PDiskId# 1 2025-09-25T16:14:46.788963Z node 5 :BS_PDISK NOTICE: {BPD01@blobstorage_pdisk_logreader.cpp:1176} Reply to owner OwnerId# 0 Result# {EvReadLogResult Status# OK ErrorReason# "" position# { ChunkIdx# 0 OffsetInChunk# 0} nextPosition# { ChunkIdx# 1 OffsetInChunk# 0} isEndOfLog# true StatusFlags# IsValid | DiskSpaceCyan | DiskSpaceLightYellowMove | DiskSpaceYellowStop | DiskSpaceLightOrange | DiskSpacePreOrange | DiskSpaceOrange | DiskSpaceRed | DiskSpaceBlack Results.size# 0} PDiskId# 1 2025-09-25T16:14:46.802022Z node 5 :BS_PDISK NOTICE: {BPD01@blobstorage_pdisk_impl_log.cpp:1732} PDisk have successfully started PDiskId# 1 2025-09-25T16:14:46.806494Z node 5 :BS_PDISK NOTICE: {BPD02@blobstorage_pdisk_impl.cpp:2055} New owner is created ownerId# 3 vDiskId# [0:_:0:0:0] FirstNonceToKeep# 1501941 CutLogId# [0:0:0] ownerRound# 2 PDiskId# 1 2025-09-25T16:14:46.817041Z node 5 :BS_PDISK NOTICE: {BPD01@blobstorage_pdisk_impl.cpp:2298} removed owner from chunks Keeper OwnerId# 3 PDiskId# 1 2025-09-25T16:14:46.817078Z node 5 :BS_PDISK NOTICE: {BPD12@blobstorage_pdisk_impl.cpp:2346} KillOwner ownerId# 3 ownerRound# 2 VDiskId# [0:_:0:0:0] lastSeenLsn# 0 PDiskId# 1 2025-09-25T16:14:46.825123Z node 5 :BS_PDISK NOTICE: {BPD02@blobstorage_pdisk_impl.cpp:2055} New owner is created ownerId# 4 vDiskId# [0:_:0:0:0] FirstNonceToKeep# 1501941 CutLogId# [0:0:0] ownerRound# 3 PDis ... PDiskId# 1 ReqId# 2560053038 reason# One of ChunkReadPart failed due to unknown reason 2025-09-25T16:14:56.262174Z node 14 :BS_PDISK WARN: blobstorage_pdisk_completion_impl.cpp:397: Reply Error from TCompletionChunkRead PDiskId# 1 ReqId# 2560053806 reason# One of ChunkReadPart failed due to unknown reason 2025-09-25T16:14:56.262190Z node 14 :BS_PDISK WARN: blobstorage_pdisk_completion_impl.cpp:397: Reply Error from TCompletionChunkRead PDiskId# 1 ReqId# 2560054574 reason# One of ChunkReadPart failed due to unknown reason 2025-09-25T16:14:56.262209Z node 14 :BS_PDISK WARN: blobstorage_pdisk_completion_impl.cpp:397: Reply Error from TCompletionChunkRead PDiskId# 1 ReqId# 2560055342 reason# One of ChunkReadPart failed due to unknown reason 2025-09-25T16:14:56.262221Z node 14 :BS_PDISK WARN: blobstorage_pdisk_completion_impl.cpp:397: Reply Error from TCompletionChunkRead PDiskId# 1 ReqId# 2560056110 reason# One of ChunkReadPart failed due to unknown reason 2025-09-25T16:14:56.262235Z node 14 :BS_PDISK WARN: blobstorage_pdisk_completion_impl.cpp:397: Reply Error from TCompletionChunkRead PDiskId# 1 ReqId# 2560056878 reason# One of ChunkReadPart failed due to unknown reason 2025-09-25T16:14:56.262246Z node 14 :BS_PDISK WARN: blobstorage_pdisk_completion_impl.cpp:397: Reply Error from TCompletionChunkRead PDiskId# 1 ReqId# 2560057646 reason# One of ChunkReadPart failed due to unknown reason 2025-09-25T16:14:56.262262Z node 14 :BS_PDISK WARN: blobstorage_pdisk_completion_impl.cpp:397: Reply Error from TCompletionChunkRead PDiskId# 1 ReqId# 2560058414 reason# One of ChunkReadPart failed due to unknown reason 2025-09-25T16:14:56.262279Z node 14 :BS_PDISK WARN: blobstorage_pdisk_completion_impl.cpp:397: Reply Error from TCompletionChunkRead PDiskId# 1 ReqId# 2560059182 reason# One of ChunkReadPart failed due to unknown reason 2025-09-25T16:14:56.262293Z node 14 :BS_PDISK WARN: blobstorage_pdisk_completion_impl.cpp:397: Reply Error from TCompletionChunkRead PDiskId# 1 ReqId# 2560059950 reason# One of ChunkReadPart failed due to unknown reason 2025-09-25T16:14:56.262307Z node 14 :BS_PDISK WARN: blobstorage_pdisk_completion_impl.cpp:397: Reply Error from TCompletionChunkRead PDiskId# 1 ReqId# 2560060718 reason# One of ChunkReadPart failed due to unknown reason 2025-09-25T16:14:56.262322Z node 14 :BS_PDISK WARN: blobstorage_pdisk_completion_impl.cpp:397: Reply Error from TCompletionChunkRead PDiskId# 1 ReqId# 2560061486 reason# One of ChunkReadPart failed due to unknown reason 2025-09-25T16:14:56.262334Z node 14 :BS_PDISK WARN: blobstorage_pdisk_completion_impl.cpp:397: Reply Error from TCompletionChunkRead PDiskId# 1 ReqId# 2560062254 reason# One of ChunkReadPart failed due to unknown reason 2025-09-25T16:14:56.262344Z node 14 :BS_PDISK WARN: blobstorage_pdisk_completion_impl.cpp:397: Reply Error from TCompletionChunkRead PDiskId# 1 ReqId# 2560063022 reason# One of ChunkReadPart failed due to unknown reason 2025-09-25T16:14:56.262358Z node 14 :BS_PDISK WARN: blobstorage_pdisk_completion_impl.cpp:397: Reply Error from TCompletionChunkRead PDiskId# 1 ReqId# 2560063790 reason# One of ChunkReadPart failed due to unknown reason 2025-09-25T16:14:56.262384Z node 14 :BS_PDISK WARN: blobstorage_pdisk_completion_impl.cpp:397: Reply Error from TCompletionChunkRead PDiskId# 1 ReqId# 2560064558 reason# One of ChunkReadPart failed due to unknown reason 2025-09-25T16:14:56.262396Z node 14 :BS_PDISK WARN: blobstorage_pdisk_completion_impl.cpp:397: Reply Error from TCompletionChunkRead PDiskId# 1 ReqId# 2560065326 reason# One of ChunkReadPart failed due to unknown reason 2025-09-25T16:14:56.262410Z node 14 :BS_PDISK WARN: blobstorage_pdisk_completion_impl.cpp:397: Reply Error from TCompletionChunkRead PDiskId# 1 ReqId# 2560066094 reason# One of ChunkReadPart failed due to unknown reason 2025-09-25T16:14:56.262421Z node 14 :BS_PDISK WARN: blobstorage_pdisk_completion_impl.cpp:397: Reply Error from TCompletionChunkRead PDiskId# 1 ReqId# 2560066862 reason# One of ChunkReadPart failed due to unknown reason 2025-09-25T16:14:56.262438Z node 14 :BS_PDISK WARN: blobstorage_pdisk_completion_impl.cpp:397: Reply Error from TCompletionChunkRead PDiskId# 1 ReqId# 2560067630 reason# One of ChunkReadPart failed due to unknown reason 2025-09-25T16:14:56.262451Z node 14 :BS_PDISK WARN: blobstorage_pdisk_completion_impl.cpp:397: Reply Error from TCompletionChunkRead PDiskId# 1 ReqId# 2560068398 reason# One of ChunkReadPart failed due to unknown reason 2025-09-25T16:14:56.262478Z node 14 :BS_PDISK WARN: blobstorage_pdisk_completion_impl.cpp:397: Reply Error from TCompletionChunkRead PDiskId# 1 ReqId# 2560069166 reason# One of ChunkReadPart failed due to unknown reason 2025-09-25T16:14:56.262492Z node 14 :BS_PDISK WARN: blobstorage_pdisk_completion_impl.cpp:397: Reply Error from TCompletionChunkRead PDiskId# 1 ReqId# 2560069934 reason# One of ChunkReadPart failed due to unknown reason 2025-09-25T16:14:56.262506Z node 14 :BS_PDISK WARN: blobstorage_pdisk_completion_impl.cpp:397: Reply Error from TCompletionChunkRead PDiskId# 1 ReqId# 2560070702 reason# One of ChunkReadPart failed due to unknown reason 2025-09-25T16:14:56.262521Z node 14 :BS_PDISK WARN: blobstorage_pdisk_completion_impl.cpp:397: Reply Error from TCompletionChunkRead PDiskId# 1 ReqId# 2560071470 reason# One of ChunkReadPart failed due to unknown reason 2025-09-25T16:14:56.262531Z node 14 :BS_PDISK WARN: blobstorage_pdisk_completion_impl.cpp:397: Reply Error from TCompletionChunkRead PDiskId# 1 ReqId# 2560072238 reason# One of ChunkReadPart failed due to unknown reason 2025-09-25T16:14:56.269185Z node 14 :BS_PDISK NOTICE: {BPD38@blobstorage_pdisk_impl.cpp:2857} OnDriveStartup Path# "" PDiskId# 1 2025-09-25T16:14:56.269198Z node 14 :BS_PDISK WARN: {BPD92@blobstorage_pdisk_impl.cpp:2968} PDisk's PlainDataChunks parameters mismatch, flag from Format will be used Format.IsPlainDataChunks()# false Cfg->PlainDataChunks# true PDiskId# 1 2025-09-25T16:14:56.290625Z node 14 :BS_PDISK NOTICE: {BSP01@blobstorage_pdisk_actor.cpp:581} Successfully read format record Format# {TDiskFormat Version: 3 DiskSize: 134217728000 bytes (134 GB) Guid: 12191640969653896104 MagicNextLogChunkReference: 10318067617550557645 MagicLogChunk: 2388142961199887883 MagicDataChunk: 6500323498848961817 MagicSysLogChunk: 15426515105098497261 MagicFormatChunk: 17332287817462050952 ChunkSize: 136314880 bytes (136 MB) SectorSize: 4096 SysLogSectorCount: 64 SystemChunkCount: 1 FormatText: "Info" DiskFormatSize: 1168 (current sizeof: 1168) TimestampUs: 1758816895694127 (2025-09-25T16:14:55.694127Z) FormatFlags: {ErasureEncodeSysLog | ErasureEncodeFormat | ErasureEncodeNextChunkReference | EncryptFormat | EncryptData | FormatFlagPlainDataChunks}} PDiskId# 1 2025-09-25T16:14:56.300901Z node 14 :BS_PDISK NOTICE: {BPD01@blobstorage_pdisk_impl_log.cpp:252} SysLogRecord is read Record# {TSysLogRecord Version# 8 NonceSet# {TNonceSet Version# 0 NonceSysLog# 9624040 NonceLog# 8562891 NonceData# 9949925} LogHeadChunkIdx# 1 LogHeadChunkPreviousNonce# 0 Owner[3]# [5:4294967295:0:0:0]} PDiskId# 1 2025-09-25T16:14:56.308873Z node 14 :BS_PDISK WARN: {LR016@blobstorage_pdisk_logreader.cpp:710} PDiskId# 1 LogReader IsInitial# 1 ChunkIdx# 1 SectorIdx# 0 OffsetInSector# 316 nonce jump2 IsEndOfSplice# false " replacing ChunkInfo->DesiredPrevChunkLastNonce# "# 0 " with nonceJumpLogPageHeader2->PreviousNonce# "# 0 PDiskId# 1 2025-09-25T16:14:56.308983Z node 14 :BS_PDISK NOTICE: {LR018@blobstorage_pdisk_logreader.cpp:809} PDiskId# 1 LogReader IsInitial# 1 ChunkIdx# 1 SectorIdx# 27 OffsetInSector# 0 In ProcessSectorSet got !restorator.GoodSectorFlags LastGoodToWriteLogPosition# { ChunkIdx# 1 OffsetInChunk# 110592} PDiskId# 1 2025-09-25T16:14:56.308998Z node 14 :BS_PDISK NOTICE: {BPD01@blobstorage_pdisk_logreader.cpp:1176} Reply to owner OwnerId# 0 Result# {EvReadLogResult Status# OK ErrorReason# "" position# { ChunkIdx# 0 OffsetInChunk# 0} nextPosition# { ChunkIdx# 1 OffsetInChunk# 110592} isEndOfLog# true StatusFlags# IsValid | DiskSpaceCyan | DiskSpaceLightYellowMove | DiskSpaceYellowStop | DiskSpaceLightOrange | DiskSpacePreOrange | DiskSpaceOrange | DiskSpaceRed | DiskSpaceBlack Results.size# 0} PDiskId# 1 2025-09-25T16:14:56.310753Z node 14 :BS_PDISK NOTICE: {BPD01@blobstorage_pdisk_impl_log.cpp:1732} PDisk have successfully started PDiskId# 1 all chunk reads are received all chunk writes are received all log writes are received reformat 2025-09-25T16:14:56.324436Z node 14 :BS_PDISK NOTICE: {BPD01@blobstorage_pdisk_impl.cpp:302} Shutdown OwnerInfo# {{OwnerId: 3 VDiskId: [5:_:0:0:0] GroupSizeInUnits: 0 ChunkWrites: 0 ChunkReads: 0 LogWrites: 0 LogReader: 0 CurrentFirstLsnToKeep: 0 FirstNonceToKeep: 1677480 StartingPoints: {{TLogRecord Signature# First Data.Size()# 1 Lsn# 306}} Owned chunkIds: {2}} PDisk system/log ChunkIds: {0, 1} Free ChunkIds: {3..982} PDiskId# 1 2025-09-25T16:14:56.333697Z node 14 :BS_PDISK NOTICE: {BSP01@blobstorage_pdisk_actor.cpp:1174} HandlePoison, PDiskThread stopped PDiskId# 1 2025-09-25T16:14:56.455434Z node 14 :BS_PDISK NOTICE: {BPD38@blobstorage_pdisk_impl.cpp:2857} OnDriveStartup Path# "" PDiskId# 1 2025-09-25T16:14:56.455454Z node 14 :BS_PDISK WARN: {BPD92@blobstorage_pdisk_impl.cpp:2968} PDisk's PlainDataChunks parameters mismatch, flag from Format will be used Format.IsPlainDataChunks()# false Cfg->PlainDataChunks# true PDiskId# 1 2025-09-25T16:14:56.476963Z node 14 :BS_PDISK NOTICE: {BSP01@blobstorage_pdisk_actor.cpp:581} Successfully read format record Format# {TDiskFormat Version: 3 DiskSize: 134217728000 bytes (134 GB) Guid: 12191640969653896104 MagicNextLogChunkReference: 4262911805003581902 MagicLogChunk: 7604711632250672154 MagicDataChunk: 3052660972150071746 MagicSysLogChunk: 433522477621760185 MagicFormatChunk: 17332287817462050952 ChunkSize: 136314880 bytes (136 MB) SectorSize: 4096 SysLogSectorCount: 64 SystemChunkCount: 1 FormatText: "Info" DiskFormatSize: 1168 (current sizeof: 1168) TimestampUs: 1758816896351065 (2025-09-25T16:14:56.351065Z) FormatFlags: {ErasureEncodeSysLog | ErasureEncodeFormat | ErasureEncodeNextChunkReference | EncryptFormat | EncryptData | FormatFlagPlainDataChunks}} PDiskId# 1 2025-09-25T16:14:56.488962Z node 14 :BS_PDISK NOTICE: {BPD01@blobstorage_pdisk_impl_log.cpp:252} SysLogRecord is read Record# {TSysLogRecord Version# 0 NonceSet# {TNonceSet Version# 0 NonceSysLog# 61 NonceLog# 1 NonceData# 1} LogHeadChunkIdx# 1 LogHeadChunkPreviousNonce# 0} PDiskId# 1 2025-09-25T16:14:56.492874Z node 14 :BS_PDISK NOTICE: {LR018@blobstorage_pdisk_logreader.cpp:809} PDiskId# 1 LogReader IsInitial# 1 ChunkIdx# 1 SectorIdx# 0 OffsetInSector# 0 In ProcessSectorSet got !restorator.GoodSectorFlags LastGoodToWriteLogPosition# { ChunkIdx# 1 OffsetInChunk# 0} PDiskId# 1 2025-09-25T16:14:56.492897Z node 14 :BS_PDISK NOTICE: {BPD01@blobstorage_pdisk_logreader.cpp:1176} Reply to owner OwnerId# 0 Result# {EvReadLogResult Status# OK ErrorReason# "" position# { ChunkIdx# 0 OffsetInChunk# 0} nextPosition# { ChunkIdx# 1 OffsetInChunk# 0} isEndOfLog# true StatusFlags# IsValid | DiskSpaceCyan | DiskSpaceLightYellowMove | DiskSpaceYellowStop | DiskSpaceLightOrange | DiskSpacePreOrange | DiskSpaceOrange | DiskSpaceRed | DiskSpaceBlack Results.size# 0} PDiskId# 1 2025-09-25T16:14:56.497461Z node 14 :BS_PDISK NOTICE: {BPD01@blobstorage_pdisk_impl_log.cpp:1732} PDisk have successfully started PDiskId# 1 2025-09-25T16:14:56.504950Z node 14 :BS_PDISK NOTICE: {BPD02@blobstorage_pdisk_impl.cpp:2055} New owner is created ownerId# 3 vDiskId# [6:_:0:0:0] FirstNonceToKeep# 1225127 CutLogId# [14:7554061046132186275:2050] ownerRound# 26 PDiskId# 1 2025-09-25T16:14:56.512910Z node 14 :BS_PDISK NOTICE: {BPD01@blobstorage_pdisk_logreader.cpp:1176} Reply to owner OwnerId# 3 Result# {EvReadLogResult Status# OK ErrorReason# "" position# { ChunkIdx# 0 OffsetInChunk# 0} nextPosition# { ChunkIdx# 4294967295 OffsetInChunk# 4294967295} isEndOfLog# true StatusFlags# IsValid Results.size# 0} PDiskId# 1 >> test.py::test[pg-tpcds-q92-default.txt-Results] [GOOD] >> test.py::test[pg-tpch-q07-default.txt-ForceBlocks] >> test.py::test[select-const_subrequest_and_select_by_all-default.txt-Results] [GOOD] >> test.py::test[select-deep_udf_call--ForceBlocks] |80.7%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/mon/audit/ut/unittest >> TUrlMatcherTest::MatchRecursive [GOOD] >> CostMetricsPatchBlock4Plus2::TestPatch4Plus2BlockRequests100Inflight1BlobSize1000 [GOOD] >> CostMetricsPatchBlock4Plus2::TestPatch4Plus2BlockRequests100Inflight10BlobSize1000 >> TAuditTest::ModifyingMethodsAlwaysAuditable [GOOD] >> test.py::test[blocks-combine_all_min_filter--Results] [GOOD] >> test.py::test[blocks-date_add_interval--ForceBlocks] >> TBsLocalRecovery::ChaoticWriteRestartHugeXXX [GOOD] >> TBsLocalRecovery::ChaoticWriteRestartHugeIncreased >> test.py::test[blocks-distinct_pure_all--Results] [GOOD] >> test.py::test[blocks-div_uint64--ForceBlocks] >> test.py::test[join-left_join_right_pushdown_nested_right--ForceBlocks] [GOOD] >> test.py::test[join-left_join_right_pushdown_nested_right--Results] |80.7%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/mon/audit/ut/unittest >> TAuditTest::ModifyingMethodsAlwaysAuditable [GOOD] |80.7%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/mon/audit/ut/unittest >> TUrlMatcherTest::MatchRecursive [GOOD] >> test.py::test[aggr_factory-avg_distinct_expr-default.txt-Results] [GOOD] >> test.py::test[aggr_factory-bitor-default.txt-Results] >> TUrlMatcherTest::MatchExactPathOnly [GOOD] >> test.py::test[pg-aggregate_combine_all--Results] [GOOD] |80.7%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/mon/audit/ut/unittest |80.7%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/mon/audit/ut/unittest >> test.py::test[pg-select_from_columns-default.txt-ForceBlocks] >> test.py::test[expr-constraints_of--Results] [GOOD] >> test.py::test[expr-empty_iterator--ForceBlocks] >> TSequenceReboots::CreateDropRecreate [GOOD] |80.7%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/mon/audit/ut/unittest >> TUrlMatcherTest::MatchExactPathOnly [GOOD] >> TAuditTest::OptionsRequestsAreNotAudited [GOOD] >> test.py::test[join-yql-14829_leftonly--ForceBlocks] [GOOD] >> test.py::test[join-yql-14829_leftonly--Results] >> CostMetricsPatchBlock4Plus2::TestPatch4Plus2BlockRequests100Inflight10BlobSize1000 [GOOD] >> CostMetricsPatchBlock4Plus2::TestPatch4Plus2BlockRequests10000Inflight100BlobSize1000 >> AssignTxId::Basic |80.7%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/mon/audit/ut/unittest >> TAuditTest::OptionsRequestsAreNotAudited [GOOD] >> test.py::test[window-full/session_incompat_sort--Results] [GOOD] >> test.py::test[window-win_expr_bounds--ForceBlocks] >> TBsLocalRecovery::ChaoticWriteRestart [GOOD] >> TBsLocalRecovery::ChaoticWriteRestartHuge [GOOD] >> TBsLocalRecovery::ChaoticWriteRestartHugeDecreased >> test.py::test[pg-tpcds-q89-default.txt-Results] [GOOD] >> test.py::test[pg_catalog-lambda--Results] >> test.py::test[dq-precompute_parallel_mix--ForceBlocks] [GOOD] >> test.py::test[dq-precompute_parallel_mix--Results] [SKIPPED] >> test.py::test[dq-wrong_script_segf--ForceBlocks] |80.7%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/replication/controller/ut_assign_tx_id/unittest |80.7%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/replication/controller/ut_assign_tx_id/unittest |80.7%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/replication/controller/ut_assign_tx_id/unittest >> test.py::test[insert-part_sortness--ForceBlocks] [GOOD] >> test.py::test[insert-part_sortness--Results] >> test.py::test[simple_columns-simple_columns_join_all-default.txt-Results] [GOOD] >> test.py::test[dq-wrong_script_segf--ForceBlocks] [SKIPPED] >> test.py::test[dq-wrong_script_segf--Results] [SKIPPED] >> test.py::test[epochs-write_and_use_in_same_epoch--ForceBlocks] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_sequence_reboots/unittest >> TSequenceReboots::CreateDropRecreate [GOOD] Test command err: ==== RunWithTabletReboots =========== RUN: Trace =========== Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2144] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2144] Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:120:2058] recipient: [1:114:2145] IGNORE Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:120:2058] recipient: [1:114:2145] Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:121:2058] recipient: [1:116:2146] IGNORE Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:121:2058] recipient: [1:116:2146] Leader for TabletID 72057594046678944 is [1:128:2153] sender: [1:131:2058] recipient: [1:113:2144] Leader for TabletID 72057594046447617 is [1:134:2158] sender: [1:136:2058] recipient: [1:114:2145] Leader for TabletID 72057594046316545 is [1:139:2161] sender: [1:141:2058] recipient: [1:116:2146] 2025-09-25T16:16:36.723986Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7911: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-09-25T16:16:36.724012Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7939: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-09-25T16:16:36.724019Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7825: StatsBatching config: StatsBatchTimeout# 0.000000s, StatsMaxBatchSize# 0, StatsMaxExecuteTime# 0.010000s 2025-09-25T16:16:36.724024Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7841: OperationsProcessing config: using default configuration 2025-09-25T16:16:36.724031Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7847: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-09-25T16:16:36.724036Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7847: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-09-25T16:16:36.724046Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7971: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-09-25T16:16:36.724062Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-09-25T16:16:36.724188Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8042: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-09-25T16:16:36.724250Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2025-09-25T16:16:36.748340Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:8074: Got new config: QueryServiceConfig { AllExternalDataSourcesAreAvailable: true } 2025-09-25T16:16:36.748394Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded 2025-09-25T16:16:36.748513Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8042: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# Leader for TabletID 72057594046447617 is [1:134:2158] sender: [1:179:2058] recipient: [1:15:2062] 2025-09-25T16:16:36.753537Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2025-09-25T16:16:36.753669Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2025-09-25T16:16:36.753718Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-09-25T16:16:36.755273Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2025-09-25T16:16:36.755358Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2025-09-25T16:16:36.755480Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1382: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-09-25T16:16:36.755719Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-09-25T16:16:36.756704Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:158: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-09-25T16:16:36.756755Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2025-09-25T16:16:36.757054Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-09-25T16:16:36.757066Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-09-25T16:16:36.757089Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-09-25T16:16:36.757098Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-09-25T16:16:36.757106Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:205: TTxServerlessStorageBilling.Complete 2025-09-25T16:16:36.757153Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7086: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:221:2058] recipient: [1:219:2219] IGNORE Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:221:2058] recipient: [1:219:2219] Leader for TabletID 72057594037968897 is [1:225:2223] sender: [1:226:2058] recipient: [1:219:2219] 2025-09-25T16:16:36.758673Z node 1 :HIVE INFO: tablet_helpers.cpp:1126: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:128:2153] sender: [1:246:2058] recipient: [1:15:2062] 2025-09-25T16:16:36.782730Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-09-25T16:16:36.782810Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-09-25T16:16:36.782868Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:599: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-09-25T16:16:36.782877Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5528: CreateTx for txid 1:0 type: TxAlterSubDomain target path: [OwnerId: 72057594046678944, LocalPathId: 1] source path: 2025-09-25T16:16:36.782950Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-09-25T16:16:36.782968Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2025-09-25T16:16:36.783704Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-09-25T16:16:36.783769Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-09-25T16:16:36.783817Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-09-25T16:16:36.783828Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:315: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-09-25T16:16:36.783834Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:369: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-09-25T16:16:36.783840Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 1:0 2 -> 3 2025-09-25T16:16:36.784281Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-09-25T16:16:36.784294Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-09-25T16:16:36.784300Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 1:0 3 -> 128 2025-09-25T16:16:36.784653Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-09-25T16:16:36.784665Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-09-25T16:16:36.784672Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-09-25T16:16:36.784679Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1723: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-09-25T16:16:36.785397Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1792: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-09-25T16:16:36.785783Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:663: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-09-25T16:16:36.785839Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1824: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 Leader for TabletID 72057594046316545 is [1:139:2161] sender: [1:261:2058] recipient: [1:15:2062] FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-09-25T16:16:36.786089Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:685: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-09-25T16:16:36.786117Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:689: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 139 RawX2: 4294969457 } } Step: 5000001 MediatorID: 0 Tab ... blishToSchemeBoard Send, to populator: [97:213:2214], at schemeshard: 72057594046678944, txId: 1004, path id: 1 2025-09-25T16:17:08.771663Z node 97 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [97:213:2214], at schemeshard: 72057594046678944, txId: 1004, path id: 4 2025-09-25T16:17:08.771707Z node 97 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1004:0, at schemeshard: 72057594046678944 2025-09-25T16:17:08.771717Z node 97 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:497: [72057594046678944] TDone opId# 1004:0 ProgressState 2025-09-25T16:17:08.771731Z node 97 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:173: TSideEffects ApplyOnExecute at tablet# 72057594046678944 2025-09-25T16:17:08.771736Z node 97 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:926: Part operation is done id#1004:0 progress is 1/1 2025-09-25T16:17:08.771741Z node 97 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 1004 ready parts: 1/1 2025-09-25T16:17:08.771747Z node 97 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:926: Part operation is done id#1004:0 progress is 1/1 2025-09-25T16:17:08.771751Z node 97 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 1004 ready parts: 1/1 2025-09-25T16:17:08.771756Z node 97 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 1004, ready parts: 1/1, is published: false 2025-09-25T16:17:08.771762Z node 97 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 1004 ready parts: 1/1 2025-09-25T16:17:08.771767Z node 97 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:993: Operation and all the parts is done, operation id: 1004:0 2025-09-25T16:17:08.771772Z node 97 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5552: RemoveTx for txid 1004:0 2025-09-25T16:17:08.771800Z node 97 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:610: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 3 2025-09-25T16:17:08.771808Z node 97 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:1002: Publication still in progress, tx: 1004, publications: 2, subscribers: 1 2025-09-25T16:17:08.771812Z node 97 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1009: Publication details: tx: 1004, [OwnerId: 72057594046678944, LocalPathId: 1], 11 2025-09-25T16:17:08.771817Z node 97 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1009: Publication details: tx: 1004, [OwnerId: 72057594046678944, LocalPathId: 4], 2 2025-09-25T16:17:08.771978Z node 97 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5225: StateWork, received event# 274137603, Sender [97:213:2214], Recipient [97:140:2161]: NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 2 PathId: [OwnerId: 72057594046678944, LocalPathId: 1] Version: 11 } 2025-09-25T16:17:08.771985Z node 97 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5309: StateWork, processing event NSchemeBoard::NSchemeshardEvents::TEvUpdateAck 2025-09-25T16:17:08.772000Z node 97 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6249: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 11 PathOwnerId: 72057594046678944, cookie: 1004 2025-09-25T16:17:08.772011Z node 97 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 11 PathOwnerId: 72057594046678944, cookie: 1004 2025-09-25T16:17:08.772016Z node 97 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 1004 2025-09-25T16:17:08.772022Z node 97 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 1004, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 11 2025-09-25T16:17:08.772027Z node 97 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:610: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 4 2025-09-25T16:17:08.772043Z node 97 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:173: TSideEffects ApplyOnExecute at tablet# 72057594046678944 2025-09-25T16:17:08.772171Z node 97 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5225: StateWork, received event# 274137603, Sender [97:213:2214], Recipient [97:140:2161]: NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 2 PathId: [OwnerId: 72057594046678944, LocalPathId: 4] Version: 2 } 2025-09-25T16:17:08.772179Z node 97 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5309: StateWork, processing event NSchemeBoard::NSchemeshardEvents::TEvUpdateAck 2025-09-25T16:17:08.772188Z node 97 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6249: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 4 Version: 2 PathOwnerId: 72057594046678944, cookie: 1004 2025-09-25T16:17:08.772200Z node 97 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 4 Version: 2 PathOwnerId: 72057594046678944, cookie: 1004 2025-09-25T16:17:08.772205Z node 97 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1004 2025-09-25T16:17:08.772210Z node 97 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 1004, pathId: [OwnerId: 72057594046678944, LocalPathId: 4], version: 2 2025-09-25T16:17:08.772214Z node 97 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:610: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 2 2025-09-25T16:17:08.772224Z node 97 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__publish_to_scheme_board.cpp:155: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 1004, subscribers: 1 2025-09-25T16:17:08.772231Z node 97 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:212: TTxAckPublishToSchemeBoard Notify send TEvNotifyTxCompletionResult, at schemeshard: 72057594046678944, to actorId: [97:368:2347] 2025-09-25T16:17:08.772236Z node 97 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:173: TSideEffects ApplyOnExecute at tablet# 72057594046678944 2025-09-25T16:17:08.772770Z node 97 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:216: TSideEffects ApplyOnComplete at tablet# 72057594046678944 2025-09-25T16:17:08.773060Z node 97 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1004 2025-09-25T16:17:08.773069Z node 97 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:216: TSideEffects ApplyOnComplete at tablet# 72057594046678944 2025-09-25T16:17:08.773112Z node 97 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1004 2025-09-25T16:17:08.773116Z node 97 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:216: TSideEffects ApplyOnComplete at tablet# 72057594046678944 2025-09-25T16:17:08.773132Z node 97 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:641: Send to actor: [97:368:2347] msg type: 271124998 msg: NKikimrScheme.TEvNotifyTxCompletionResult TxId: 1004 at schemeshard: 72057594046678944 2025-09-25T16:17:08.773147Z node 97 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 1004: got EvNotifyTxCompletionResult 2025-09-25T16:17:08.773154Z node 97 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 1004: satisfy waiter [97:447:2424] 2025-09-25T16:17:08.773184Z node 97 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5225: StateWork, received event# 269877764, Sender [97:449:2426], Recipient [97:140:2161]: NKikimr::TEvTabletPipe::TEvServerDisconnected 2025-09-25T16:17:08.773190Z node 97 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5323: StateWork, processing event TEvTabletPipe::TEvServerDisconnected 2025-09-25T16:17:08.773195Z node 97 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:6190: Server pipe is reset, at schemeshard: 72057594046678944 TestWaitNotification: OK eventTxId 1004 2025-09-25T16:17:08.773290Z node 97 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5225: StateWork, received event# 271122945, Sender [97:471:2447], Recipient [97:140:2161]: NKikimrSchemeOp.TDescribePath Path: "/MyRoot/seq" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false } 2025-09-25T16:17:08.773295Z node 97 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5237: StateWork, processing event TEvSchemeShard::TEvDescribeScheme 2025-09-25T16:17:08.773309Z node 97 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/seq" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-09-25T16:17:08.773349Z node 97 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/seq" took 35us result status StatusSuccess 2025-09-25T16:17:08.773462Z node 97 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/seq" PathDescription { Self { Name: "seq" PathId: 4 SchemeshardId: 72057594046678944 PathType: EPathTypeSequence CreateFinished: true CreateTxId: 1004 CreateStep: 5000005 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 SequenceVersion: 1 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 2 PathsLimit: 10000 ShardsInside: 1 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } SequenceDescription { Name: "seq" PathId { OwnerId: 72057594046678944 LocalId: 4 } Version: 1 SequenceShard: 72075186233409546 MinValue: 1 MaxValue: 9223372036854775807 StartValue: 1 Cache: 1 Increment: 1 Cycle: false DataType: "Int64" } } PathId: 4 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 >> test.py::test[tpch-q10-default.txt-ForceBlocks] >> AssignTxId::Basic [GOOD] >> test.py::test[aggregate-aggregate_with_lambda_inside_avg--Results] [GOOD] >> test.py::test[aggregate-aggregation_and_order-default.txt-Results] |80.7%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/replication/controller/ut_assign_tx_id/unittest >> test.py::test[sampling-take_with_sampling-default.txt-ForceBlocks] [GOOD] >> test.py::test[sampling-take_with_sampling-default.txt-Results] >> test.py::test[in-in_types_cast_all-default.txt-ForceBlocks] [GOOD] >> test.py::test[in-in_types_cast_all-default.txt-Results] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/replication/controller/ut_assign_tx_id/unittest >> AssignTxId::Basic [GOOD] Test command err: 2025-09-25T16:17:09.549552Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7554061622258959673:2074];send_to=[0:7307199536658146131:7762515]; 2025-09-25T16:17:09.549596Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/endf/005541/r3tmp/tmpAYZn13/pdisk_1.dat 2025-09-25T16:17:09.621821Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-09-25T16:17:09.624909Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded TClient is connected to server localhost:9415 2025-09-25T16:17:09.651446Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-09-25T16:17:09.651481Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-09-25T16:17:09.652631Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 14692, node 1 2025-09-25T16:17:09.675345Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-09-25T16:17:09.675358Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-09-25T16:17:09.675361Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-09-25T16:17:09.675410Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:9415 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. waiting... 2025-09-25T16:17:09.726568Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2025-09-25T16:17:09.729695Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-09-25T16:17:09.882350Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-09-25T16:17:10.013929Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7554061626553927599:2316], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:17:10.013964Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:17:10.014236Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7554061626553927609:2317], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:17:10.014270Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:17:10.070380Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateReplication, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_replication.cpp:487) 2025-09-25T16:17:10.078935Z node 1 :REPLICATION_CONTROLLER TRACE: controller.cpp:41: [controller 72075186224037888] OnActivateExecutor 2025-09-25T16:17:10.078962Z node 1 :REPLICATION_CONTROLLER DEBUG: tx_init_schema.cpp:17: [controller 72075186224037888][TxInitSchema] Execute 2025-09-25T16:17:10.081834Z node 1 :REPLICATION_CONTROLLER DEBUG: tx_init_schema.cpp:26: [controller 72075186224037888][TxInitSchema] Complete 2025-09-25T16:17:10.081868Z node 1 :REPLICATION_CONTROLLER DEBUG: tx_init.cpp:241: [controller 72075186224037888][TxInit] Execute 2025-09-25T16:17:10.081921Z node 1 :REPLICATION_CONTROLLER DEBUG: tx_init.cpp:246: [controller 72075186224037888][TxInit] Complete 2025-09-25T16:17:10.081923Z node 1 :REPLICATION_CONTROLLER TRACE: controller.cpp:114: [controller 72075186224037888] SwitchToWork 2025-09-25T16:17:10.083385Z node 1 :REPLICATION_CONTROLLER TRACE: controller.cpp:143: [controller 72075186224037888] Handle NKikimrReplication.TEvCreateReplication PathId { OwnerId: 72057594046644480 LocalId: 2 } OperationId { TxId: 281474976715658 PartId: 0 } Config { SrcConnectionParams { Endpoint: "localhost:14692" Database: "/Root" OAuthToken { Token: "***" } EnableSsl: false } Specific { Targets { SrcPath: "/Root/table" DstPath: "/Root/replica" } } ConsistencySettings { Global { CommitIntervalMilliSeconds: 10000 } } } Database: "/Root" 2025-09-25T16:17:10.083446Z node 1 :REPLICATION_CONTROLLER DEBUG: tx_create_replication.cpp:22: [controller 72075186224037888][TxCreateReplication] Execute: NKikimrReplication.TEvCreateReplication PathId { OwnerId: 72057594046644480 LocalId: 2 } OperationId { TxId: 281474976715658 PartId: 0 } Config { SrcConnectionParams { Endpoint: "localhost:14692" Database: "/Root" OAuthToken { Token: "***" } EnableSsl: false } Specific { Targets { SrcPath: "/Root/table" DstPath: "/Root/replica" } } ConsistencySettings { Global { CommitIntervalMilliSeconds: 10000 } } } Database: "/Root" 2025-09-25T16:17:10.083470Z node 1 :REPLICATION_CONTROLLER NOTICE: tx_create_replication.cpp:43: [controller 72075186224037888][TxCreateReplication] Add replication: rid# 1, pathId# [OwnerId: 72057594046644480, LocalPathId: 2] 2025-09-25T16:17:10.084232Z node 1 :REPLICATION_CONTROLLER DEBUG: tx_create_replication.cpp:58: [controller 72075186224037888][TxCreateReplication] Complete 2025-09-25T16:17:10.085096Z node 1 :REPLICATION_CONTROLLER TRACE: tenant_resolver.cpp:33: [TenantResolver][rid 1] Handle NKikimr::TEvTxProxySchemeCache::TEvNavigateKeySetResult: entry# { Path: Root/replication TableId: [72057594046644480:2:0] RequestType: ByTableId Operation: OpPath RedirectRequired: false ShowPrivatePath: false SyncVersion: false Status: Ok Kind: KindReplication DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } } 2025-09-25T16:17:10.085194Z node 1 :REPLICATION_CONTROLLER TRACE: tenant_resolver.cpp:33: [TenantResolver][rid 1] Handle NKikimr::TEvTxProxySchemeCache::TEvNavigateKeySetResult: entry# { Path: Root TableId: [72057594046644480:1:0] RequestType: ByTableId Operation: OpPath RedirectRequired: false ShowPrivatePath: false SyncVersion: false Status: Ok Kind: KindPath DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } } 2025-09-25T16:17:10.085508Z node 1 :REPLICATION_CONTROLLER TRACE: controller.cpp:258: [controller 72075186224037888] Handle NKikimr::NReplication::NController::TEvPrivate::TEvResolveTenantResult { ReplicationId: 1 Tenant: /Root Sucess: 1 } 2025-09-25T16:17:10.085512Z node 1 :REPLICATION_CONTROLLER NOTICE: controller.cpp:273: [controller 72075186224037888] Tenant resolved: rid# 1, tenant# /Root 2025-09-25T16:17:10.085515Z node 1 :REPLICATION_CONTROLLER INFO: controller.cpp:277: [controller 72075186224037888] Discover tenant nodes: tenant# /Root 2025-09-25T16:17:10.088154Z node 1 :REPLICATION_CONTROLLER TRACE: controller.cpp:302: [controller 72075186224037888] Handle NKikimr::TEvDiscovery::TEvDiscoveryData 2025-09-25T16:17:10.088169Z node 1 :REPLICATION_CONTROLLER DEBUG: controller.cpp:326: [controller 72075186224037888] Create session: nodeId# 1 TClient::Ls request: /Root/replication TClient::Ls response: 2025-09-25T16:17:10.094713Z node 1 :REPLICATION_CONTROLLER TRACE: target_discoverer.cpp:28: [TargetDiscoverer][rid 1] Handle NKikimr::NReplication::TEvYdbProxy::TEvDescribePathResponse { Result: { status: SCHEME_ERROR, issues: {
: Error: Path not found } } } 2025-09-25T16:17:10.094730Z node 1 :REPLICATION_CONTROLLER ERROR: target_discoverer.cpp:80: [TargetDiscoverer][rid 1] Describe path failed: path# /Root/table, status# SCHEME_ERROR, issues# {
: Error: Path not found }, iteration# 0 2025-09-25T16:17:10.094768Z node 1 :REPLICATION_CONTROLLER TRACE: controller.cpp:173: [controller 72075186224037888] Handle NKikimr::NReplication::NController::TEvPrivate::TEvDiscoveryTargetsResult { ReplicationId: 1 ToAdd [] ToDelete [] Failed [/Root/table: SCHEME_ERROR ({
: Error: Path not found })] } 2025-09-25T16:17:10.094789Z node 1 :REPLICATION_CONTROLLER DEBUG: tx_discovery_targets_result.cpp:24: [controller 72075186224037888][TxDiscoveryTargetsResult] Execute: NKikimr::NReplication::NController::TEvPrivate::TEvDiscoveryTargetsResult { ReplicationId: 1 ToAdd [] ToDelete [] Failed [/Root/table: SCHEME_ERROR ({
: Error: Path not found })] } 2025-09-25T16:17:10.094796Z node 1 :REPLICATION_CONTROLLER ERROR: tx_discovery_targets_result.cpp:79: [controller 72075186224037888][TxDiscoveryTargetsResult] Discovery error: rid# 1, error# /Root/table: SCHEME_ERROR ({
: Error: Path not found }) Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "replication" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeReplication CreateFinished: true CreateTxId: 281474976715658 CreateStep: 1758817030131 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 ReplicationVersion: 1 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } DomainKey { SchemeShard: 72057594046644480 PathId: 1 } PathsIns... (TRUNCATED) 2025-09-25T16:17:10.095938Z node 1 :REPLICATION_CONTROLLER TRACE: controller.cpp:762: [controller 72075186224037888] Handle NKikimrReplication.TEvGetTxId Versions { Step: 1 TxId: 0 } 2025-09-25T16:17:10.095972Z node 1 :REPLICATION_CONTROLLER DEBUG: tx_assign_tx_id.cpp:76: [controller 72075186224037888][TxAssignTxId] Execute: pending# 1, assigned# 0, allocated# 0 2025-09-25T16:17:10.096004Z node 1 :REPLICATION_CONTROLLER DEBUG: tx_discovery_targets_result.cpp:92: [controller 72075186224037888][TxDiscoveryTargetsResult] Complete 2025-09-25T16:17:10.096015Z node 1 :REPLICATION_CONTROLLER DEBUG: tx_assign_tx_id.cpp:142: [controller 72075186224037888][TxAssignTxId] Complete: pending# 1, assigned# 0, allocated# 0, exhausted# 1 2025-09-25T16:17:10.096038Z node 1 :REPLICATION_CONTROLLER TRACE: tx_assign_tx_id.cpp:174: [controller 72075186224037888] Handle NKikimr::TEvTxAllocatorClient::TEvAllocateResult 2025-09-25T16:17:10.096048Z node 1 :REPLICATION_CONTROLLER DEBUG: tx_assign_tx_id.cpp:76: [controller 72075186224037888][TxAssignTxId] Execute: pending# 1, assigned# 0, allocated# 5 2025-09-25T16:17:10.097040Z node 1 :REPLICATION_CONTROLLER DEBUG: tx_assign_tx_id.cpp:142: [controller 72075186224037888][TxAssignTxId] Complete: pending# 0, assigned# 1, allocated# 4, exhausted# 0 2025-09-25T16:17:10.097315Z node 1 :REPLICATION_CONTROLLER TRACE: controller.cpp:762: [controller 72075186224037888] Handle NKikimrReplication.TEvGetTxId Versions { Step: 9999 TxId: 0 } 2025-09-25T16:17:10.097330Z node 1 :REPLICATION_CONTROLLER DEBUG: tx_assign_tx_id.cpp:76: [controller 72075186224037888][TxAssignTxId] Execute: pending# 1, assigned# 1, allocated# 4 2025-09-25T16:17:10.097343Z node 1 :REPLICATION_CONTROLLER DEBUG: tx_assign_tx_id.cpp:142: [controller 72075186224037888][TxAssignTxId] Complete: pending# 0, assigned# 1, allocated# 4, exhausted# 0 2025-09-25T16:17:10.097580Z node 1 :REPLICATION_CONTROLLER TRACE: controller.cpp:762: [controller 72075186224037888] Handle NKikimrReplication.TEvGetTxId Versions { Step: 9999 TxId: 18446744073709551615 } 2025-09-25T16:17:10.097587Z node 1 :REPLICATION_CONTROLLER DEBUG: tx_assign_tx_id.cpp:76: [controller 72075186224037888][TxAssignTxId] Execute: pending# 1, assigned# 1, allocated# 4 2025-09-25T16:17:10.097594Z node 1 :REPLICATION_CONTROLLER DEBUG: tx_assign_tx_id.cpp:142: [controller 72075186224037888][TxAssignTxId] Complete: pending# 0, assigned# 1, allocated# 4, exhausted# 0 2025-09-25T16:17:10.097773Z node 1 :REPLICATION_CONTROLLER TRACE: controller.cpp:762: [controller 72075186224037888] Handle NKikimrReplication.TEvGetTxId Versions { Step: 10000 TxId: 0 } 2025-09-25T16:17:10.097780Z node 1 :REPLICATION_CONTROLLER DEBUG: tx_assign_tx_id.cpp:76: [controller 72075186224037888][TxAssignTxId] Execute: pending# 1, assigned# 1, allocated# 4 2025-09-25T16:17:10.098039Z node 1 :REPLICATION_CONTROLLER DEBUG: tx_assign_tx_id.cpp:142: [controller 72075186224037888][TxAssignTxId] Complete: pending# 0, assigned# 2, allocated# 3, exhausted# 0 2025-09-25T16:17:10.098574Z node 1 :REPLICATION_CONTROLLER TRACE: controller.cpp:762: [controller 72075186224037888] Handle NKikimrReplication.TEvGetTxId Versions { Step: 5000 TxId: 0 } 2025-09-25T16:17:10.098588Z node 1 :REPLICATION_CONTROLLER DEBUG: tx_assign_tx_id.cpp:76: [controller 72075186224037888][TxAssignTxId] Execute: pending# 1, assigned# 2, allocated# 3 2025-09-25T16:17:10.098598Z node 1 :REPLICATION_CONTROLLER DEBUG: tx_assign_tx_id.cpp:142: [controller 72075186224037888][TxAssignTxId] Complete: pending# 0, assigned# 2, allocated# 3, exhausted# 0 2025-09-25T16:17:10.098799Z node 1 :REPLICATION_CONTROLLER TRACE: controller.cpp:762: [controller 72075186224037888] Handle NKikimrReplication.TEvGetTxId Versions { Step: 20000 TxId: 0 } Versions { Step: 30000 TxId: 0 } Versions { Step: 40000 TxId: 0 } 2025-09-25T16:17:10.098805Z node 1 :REPLICATION_CONTROLLER DEBUG: tx_assign_tx_id.cpp:76: [controller 72075186224037888][TxAssignTxId] Execute: pending# 3, assigned# 2, allocated# 3 2025-09-25T16:17:10.099417Z node 1 :REPLICATION_CONTROLLER DEBUG: tx_assign_tx_id.cpp:142: [controller 72075186224037888][TxAssignTxId] Complete: pending# 0, assigned# 5, allocated# 0, exhausted# 0 2025-09-25T16:17:10.099448Z node 1 :REPLICATION_CONTROLLER TRACE: tx_assign_tx_id.cpp:174: [controller 72075186224037888] Handle NKikimr::TEvTxAllocatorClient::TEvAllocateResult 2025-09-25T16:17:10.099453Z node 1 :REPLICATION_CONTROLLER DEBUG: tx_assign_tx_id.cpp:76: [controller 72075186224037888][TxAssignTxId] Execute: pending# 0, assigned# 5, allocated# 5 2025-09-25T16:17:10.099459Z node 1 :REPLICATION_CONTROLLER DEBUG: tx_assign_tx_id.cpp:142: [controller 72075186224037888][TxAssignTxId] Complete: pending# 0, assigned# 5, allocated# 5, exhausted# 0 2025-09-25T16:17:10.099672Z node 1 :REPLICATION_CONTROLLER TRACE: controller.cpp:762: [controller 72075186224037888] Handle NKikimrReplication.TEvGetTxId Versions { Step: 50000 TxId: 0 } 2025-09-25T16:17:10.099680Z node 1 :REPLICATION_CONTROLLER DEBUG: tx_assign_tx_id.cpp:76: [controller 72075186224037888][TxAssignTxId] Execute: pending# 1, assigned# 5, allocated# 5 2025-09-25T16:17:10.099854Z node 1 :REPLICATION_CONTROLLER DEBUG: tx_assign_tx_id.cpp:142: [controller 72075186224037888][TxAssignTxId] Complete: pending# 0, assigned# 5, allocated# 5, exhausted# 0 >> test.py::test[result_types-data-default.txt-Results] [GOOD] >> test.py::test[sampling-direct_read--Results] |80.7%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/replication/controller/ut_assign_tx_id/unittest >> test.py::test[pg-tpch-q20-default.txt-ForceBlocks] [GOOD] >> test.py::test[pg-tpch-q20-default.txt-Results] >> test.py::test[tpch-q1-default.txt-Results] [GOOD] >> test.py::test[tpch-q13-default.txt-Results] |80.7%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/blobstorage/vdisk/syncer/ut/ydb-core-blobstorage-vdisk-syncer-ut |80.7%| [LD] {RESULT} $(B)/ydb/core/blobstorage/vdisk/syncer/ut/ydb-core-blobstorage-vdisk-syncer-ut |80.7%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/blobstorage/vdisk/syncer/ut/ydb-core-blobstorage-vdisk-syncer-ut |80.7%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/replication/controller/ut_assign_tx_id/unittest >> test.py::test[join-left_join_right_pushdown_nested_right--Results] [GOOD] >> test.py::test[join-left_only_semi_and_other-off-ForceBlocks] >> test.py::test[select-deep_udf_call--ForceBlocks] [GOOD] >> test.py::test[select-deep_udf_call--Results] |80.7%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/replication/controller/ut_assign_tx_id/unittest |80.7%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/replication/controller/ut_assign_tx_id/unittest |80.7%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/replication/controller/ut_assign_tx_id/unittest >> test.py::test[blocks-div_uint64--ForceBlocks] [GOOD] >> test.py::test[blocks-div_uint64--Results] >> test.py::test[insert-part_sortness--Results] [GOOD] >> test.py::test[insert-replace_ordered_by_key-default.txt-ForceBlocks] >> TAuditTest::OtherGetRequestsAreAudited [GOOD] >> test.py::test[bigdate-table_arithmetic_narrow-default.txt-ForceBlocks] [GOOD] >> TAuditTest::BlacklistedPathsAreNotAudited [GOOD] >> test.py::test[bigdate-table_arithmetic_narrow-default.txt-Results] >> test.py::test[sampling-take_with_sampling-default.txt-Results] [GOOD] >> test.py::test[schema-def_values--ForceBlocks] >> test.py::test[pg-select_from_columns-default.txt-ForceBlocks] [GOOD] >> test.py::test[pg-select_from_columns-default.txt-Results] >> TAuditTest::AuditDisabledWithoutAppData [GOOD] |80.7%| [TA] $(B)/ydb/core/tx/replication/controller/ut_assign_tx_id/test-results/unittest/{meta.json ... results_accumulator.log} |80.7%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/mon/audit/ut/unittest >> TAuditTest::OtherGetRequestsAreAudited [GOOD] |80.7%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/mon/audit/ut/unittest >> TAuditTest::BlacklistedPathsAreNotAudited [GOOD] >> test.py::test[in-in_types_cast_all-default.txt-Results] [GOOD] >> test.py::test[insert-append--ForceBlocks] |80.7%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/mon/audit/ut/unittest >> TAuditTest::AuditDisabledWithoutAppData [GOOD] >> YdbTableSplit::MergeByNoLoadAfterSplit >> test.py::test[expr-empty_iterator--ForceBlocks] [GOOD] >> Initializer::Simple >> test.py::test[expr-empty_iterator--Results] |80.7%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/metadata/initializer/ut/unittest >> YdbTableSplit::SplitByLoadWithReadsMultipleSplitsWithData |80.7%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/sys_view/partition_stats/ut/ydb-core-sys_view-partition_stats-ut |80.7%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/sys_view/partition_stats/ut/ydb-core-sys_view-partition_stats-ut |80.7%| [TA] {RESULT} $(B)/ydb/core/tx/replication/controller/ut_assign_tx_id/test-results/unittest/{meta.json ... results_accumulator.log} |80.7%| [LD] {RESULT} $(B)/ydb/core/sys_view/partition_stats/ut/ydb-core-sys_view-partition_stats-ut >> test.py::test[select-deep_udf_call--Results] [GOOD] >> test.py::test[join-yql-14829_leftonly--Results] [GOOD] >> test.py::test[join-yql-8125--ForceBlocks] >> test.py::test[blocks-date_add_interval--ForceBlocks] [GOOD] >> test.py::test[blocks-date_add_interval--Results] >> test.py::test[pg-select_from_columns-default.txt-Results] [GOOD] >> test.py::test[pg-select_limit-default.txt-ForceBlocks] >> test.py::test[blocks-div_uint64--Results] [GOOD] >> test.py::test[blocks-mod_uint64_opt2--ForceBlocks] |80.7%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/fq/libs/actors/ut/ydb-core-fq-libs-actors-ut |80.8%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/fq/libs/actors/ut/ydb-core-fq-libs-actors-ut |80.8%| [TA] $(B)/ydb/core/mon/audit/ut/test-results/unittest/{meta.json ... results_accumulator.log} |80.8%| [LD] {RESULT} $(B)/ydb/core/fq/libs/actors/ut/ydb-core-fq-libs-actors-ut |80.8%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/metadata/initializer/ut/unittest |80.8%| [TA] {RESULT} $(B)/ydb/core/mon/audit/ut/test-results/unittest/{meta.json ... results_accumulator.log} >> test.py::test[join-join_comp_common_table--Results] [GOOD] >> test.py::test[join-join_without_column-off-ForceBlocks] |80.8%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/metadata/initializer/ut/unittest |80.8%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/metadata/initializer/ut/unittest |80.8%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/metadata/initializer/ut/unittest >> test.py::test[sampling-direct_read--Results] [GOOD] >> test.py::test[expr-empty_iterator--Results] [GOOD] >> test.py::test[sampling-join_right_sample-default.txt-Results] >> TQuorumTrackerTests::ErasureNoneNeverHasQuorum_4_1 [GOOD] >> TQuorumTrackerTests::ErasureMirror3IncludingMyFailDomain_5_2 [GOOD] >> test.py::test[window-mixed/aggregations--ForceBlocks] [GOOD] >> test.py::test[window-mixed/aggregations--Results] >> test.py::test[pg_catalog-lambda--Results] [GOOD] >> test.py::test[pragma-release_temp_data_chain_pull--Results] [SKIPPED] >> test.py::test[produce-fuse_reduces_with_presort--Results] >> TSyncNeighborsTests::SerDes3 [GOOD] |80.8%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/metadata/initializer/ut/unittest >> test.py::test[window-win_expr_bounds--ForceBlocks] [GOOD] |80.8%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/vdisk/syncer/ut/unittest >> TQuorumTrackerTests::ErasureMirror3IncludingMyFailDomain_5_2 [GOOD] >> test.py::test[window-win_expr_bounds--Results] |80.8%| [TM] {default-linux-x86_64, pic, relwithdebinfo} ydb/library/yql/tests/sql/dq_file/part3/pytest >> test.py::test[select-deep_udf_call--Results] [GOOD] |80.8%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/vdisk/syncer/ut/unittest >> TSyncNeighborsTests::SerDes3 [GOOD] >> TSyncBrokerTests::ShouldReturnToken >> TSyncBrokerTests::ShouldReturnToken [GOOD] >> TSyncBrokerTests::ShouldReleaseToken [GOOD] >> test.py::test[join-extract_or_predicates-default.txt-Results] [GOOD] >> test.py::test[join-full_equal_null--ForceBlocks] >> test.py::test[in-in_noansi_join--ForceBlocks] [GOOD] >> test.py::test[in-in_noansi_join--Results] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/vdisk/syncer/ut/unittest >> TSyncBrokerTests::ShouldReleaseToken [GOOD] Test command err: 2025-09-25T16:17:17.878558Z node 1 :BS_SYNCER DEBUG: blobstorage_syncer_broker.cpp:64: TEvQuerySyncToken, VDisk actor id: [0:1:1], actor id: [1:5:2052], token sent, active: 1, waiting: 0 2025-09-25T16:17:17.975774Z node 2 :BS_SYNCER DEBUG: blobstorage_syncer_broker.cpp:64: TEvQuerySyncToken, VDisk actor id: [0:1:1], actor id: [2:5:2052], token sent, active: 1, waiting: 0 2025-09-25T16:17:17.975814Z node 2 :BS_SYNCER DEBUG: blobstorage_syncer_broker.cpp:123: TEvReleaseSyncToken, VDisk actor id: [0:1:1], actor id: [2:5:2052], token released, active: 1, waiting: 0 >> TQuorumTrackerTests::Erasure4Plus2BlockNotIncludingMyFailDomain_8_2 [GOOD] >> TQuorumTrackerTests::ErasureMirror3IncludingMyFailDomain_4_2 [GOOD] >> TSyncBrokerTests::ShouldReturnTokensWithSameVDiskId [GOOD] >> test.py::test[schema-def_values--ForceBlocks] [GOOD] >> test.py::test[schema-def_values--Results] >> TSyncNeighborsTests::SerDes1 [GOOD] |80.8%| [TM] {default-linux-x86_64, pic, relwithdebinfo} ydb/library/yql/tests/sql/dq_file/part6/pytest >> test.py::test[expr-empty_iterator--Results] [GOOD] |80.8%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/vdisk/syncer/ut/unittest >> TQuorumTrackerTests::ErasureMirror3IncludingMyFailDomain_4_2 [GOOD] >> test.py::test[aggregate-aggregation_and_order-default.txt-Results] [GOOD] >> TEvLocalSyncDataTests::SqueezeBlocks3 [GOOD] >> TQuorumTrackerTests::Erasure4Plus2BlockIncludingMyFailDomain_8_2 [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/vdisk/syncer/ut/unittest >> TSyncNeighborsTests::SerDes1 [GOOD] Test command err: 2025-09-25T16:17:19.071989Z node 1 :BS_SYNCER DEBUG: blobstorage_syncer_broker.cpp:64: TEvQuerySyncToken, VDisk actor id: [0:1:1], actor id: [1:5:2052], token sent, active: 1, waiting: 0 2025-09-25T16:17:19.072035Z node 1 :BS_SYNCER DEBUG: blobstorage_syncer_broker.cpp:50: TEvQuerySyncToken, VDisk actor id: [0:1:1], actor id: [1:6:2053], token sent, active: 1, waiting: 0 >> test.py::test[aggregate-aggrs_no_grouping_via_map_compact-default.txt-Results] >> test.py::test[insert-replace_ordered_by_key-default.txt-ForceBlocks] [GOOD] >> test.py::test[insert-replace_ordered_by_key-default.txt-Results] >> test.py::test[aggr_factory-bitor-default.txt-Results] [GOOD] >> test.py::test[aggr_factory-bottom_by-default.txt-Results] >> test.py::test[blocks-date_add_interval--Results] [GOOD] >> test.py::test[blocks-date_group_by--ForceBlocks] >> test.py::test[join-left_only_semi_and_other-off-ForceBlocks] [GOOD] >> test.py::test[join-left_only_semi_and_other-off-Results] [SKIPPED] >> test.py::test[join-left_semi_with_other-off-ForceBlocks] >> TBsLocalRecovery::ChaoticWriteRestartHugeIncreased [GOOD] >> TSyncBrokerTests::ShouldEnqueue [GOOD] >> TSyncBrokerTests::ShouldEnqueueWithSameVDiskId [GOOD] |80.8%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/vdisk/syncer/ut/unittest >> TQuorumTrackerTests::Erasure4Plus2BlockIncludingMyFailDomain_8_2 [GOOD] >> test.py::test[insert-append--ForceBlocks] [GOOD] >> test.py::test[insert-append--Results] >> test.py::test[window-win_expr_bounds--Results] [GOOD] >> test.py::test[window-win_func_into_udf--ForceBlocks] >> TSyncNeighborsTests::SerDes2 [GOOD] >> TDatabaseResolverTests::Ydb_Dedicated [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/vdisk/syncer/ut/unittest >> TSyncBrokerTests::ShouldEnqueueWithSameVDiskId [GOOD] Test command err: 2025-09-25T16:17:20.186532Z node 1 :BS_SYNCER DEBUG: blobstorage_syncer_broker.cpp:64: TEvQuerySyncToken, VDisk actor id: [0:1:1], actor id: [1:5:2052], token sent, active: 1, waiting: 0 2025-09-25T16:17:20.186569Z node 1 :BS_SYNCER DEBUG: blobstorage_syncer_broker.cpp:90: TEvQuerySyncToken, VDisk actor id: [0:1:2], actor id: [1:6:2053], enqueued, active: 1, waiting: 1 2025-09-25T16:17:20.211243Z node 2 :BS_SYNCER DEBUG: blobstorage_syncer_broker.cpp:64: TEvQuerySyncToken, VDisk actor id: [0:1:1], actor id: [2:5:2052], token sent, active: 1, waiting: 0 2025-09-25T16:17:20.211281Z node 2 :BS_SYNCER DEBUG: blobstorage_syncer_broker.cpp:90: TEvQuerySyncToken, VDisk actor id: [0:1:2], actor id: [2:6:2053], enqueued, active: 1, waiting: 1 2025-09-25T16:17:20.211288Z node 2 :BS_SYNCER DEBUG: blobstorage_syncer_broker.cpp:79: TEvQuerySyncToken, VDisk actor id: [0:1:2], actor id: [2:7:2054], enqueued, active: 1, waiting: 1 >> TDatabaseResolverTests::DataStreams_Dedicated >> YdbTableSplit::SplitByLoadWithDeletes [GOOD] >> test.py::test[blocks-mod_uint64_opt2--ForceBlocks] [GOOD] >> test.py::test[blocks-mod_uint64_opt2--Results] >> YdbTableSplit::SplitByLoadWithNonEmptyRangeReads [GOOD] >> TDatabaseResolverTests::PostgreSQL >> TDatabaseResolverTests::DataStreams_Dedicated [GOOD] >> TDatabaseResolverTests::ClickHouse_PermissionDenied [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/ut_vdisk/unittest >> TBsLocalRecovery::ChaoticWriteRestartHugeIncreased [GOOD] Test command err: 2025-09-25T16:16:53.546566Z :BS_VDISK_PUT CRIT: blobstorage_skeleton.cpp:633: PDiskId# 1 VDISK[0:_:0:0:0]: (0) TEvVMultiPut: TEvVMultiPut has huge blob# [5000:1:716:0:0:66560:1] Marker# BSVS08 2025-09-25T16:16:53.546573Z :BS_VDISK_PUT CRIT: blobstorage_skeleton.cpp:633: PDiskId# 1 VDISK[0:_:0:0:0]: (0) TEvVMultiPut: TEvVMultiPut has huge blob# [5000:1:137:0:0:66560:1] Marker# BSVS08 2025-09-25T16:16:53.546575Z :BS_VDISK_PUT CRIT: blobstorage_skeleton.cpp:633: PDiskId# 1 VDISK[0:_:0:0:0]: (0) TEvVMultiPut: TEvVMultiPut has huge blob# [5000:1:370:0:0:66560:1] Marker# BSVS08 2025-09-25T16:16:53.546577Z :BS_VDISK_PUT CRIT: blobstorage_skeleton.cpp:633: PDiskId# 1 VDISK[0:_:0:0:0]: (0) TEvVMultiPut: TEvVMultiPut has huge blob# [5000:1:30:0:0:66560:1] Marker# BSVS08 2025-09-25T16:16:53.546582Z :BS_VDISK_PUT CRIT: blobstorage_skeleton.cpp:633: PDiskId# 1 VDISK[0:_:0:0:0]: (0) TEvVMultiPut: TEvVMultiPut has huge blob# [5000:1:157:0:0:66560:1] Marker# BSVS08 2025-09-25T16:16:53.546587Z :BS_VDISK_PUT CRIT: blobstorage_skeleton.cpp:633: PDiskId# 1 VDISK[0:_:0:0:0]: (0) TEvVMultiPut: TEvVMultiPut has huge blob# [5000:1:69:0:0:66560:1] Marker# BSVS08 2025-09-25T16:16:53.546588Z :BS_VDISK_PUT CRIT: blobstorage_skeleton.cpp:633: PDiskId# 1 VDISK[0:_:0:0:0]: (0) TEvVMultiPut: TEvVMultiPut has huge blob# [5000:1:579:0:0:66560:1] Marker# BSVS08 2025-09-25T16:16:53.546590Z :BS_VDISK_PUT CRIT: blobstorage_skeleton.cpp:633: PDiskId# 1 VDISK[0:_:0:0:0]: (0) TEvVMultiPut: TEvVMultiPut has huge blob# [5000:1:681:0:0:66560:1] Marker# BSVS08 2025-09-25T16:16:53.546592Z :BS_VDISK_PUT CRIT: blobstorage_skeleton.cpp:633: PDiskId# 1 VDISK[0:_:0:0:0]: (0) TEvVMultiPut: TEvVMultiPut has huge blob# [5000:1:823:0:0:66560:1] Marker# BSVS08 2025-09-25T16:16:53.546594Z :BS_VDISK_PUT CRIT: blobstorage_skeleton.cpp:633: PDiskId# 1 VDISK[0:_:0:0:0]: (0) TEvVMultiPut: TEvVMultiPut has huge blob# [5000:1:89:0:0:66560:1] Marker# BSVS08 2025-09-25T16:16:53.546716Z :BS_VDISK_PUT CRIT: blobstorage_skeleton.cpp:633: PDiskId# 1 VDISK[0:_:0:0:0]: (0) TEvVMultiPut: TEvVMultiPut has huge blob# [5000:1:619:0:0:66560:1] Marker# BSVS08 2025-09-25T16:16:53.546718Z :BS_VDISK_PUT CRIT: blobstorage_skeleton.cpp:633: PDiskId# 1 VDISK[0:_:0:0:0]: (0) TEvVMultiPut: TEvVMultiPut has huge blob# [5000:1:750:0:0:66560:1] Marker# BSVS08 2025-09-25T16:16:53.546720Z :BS_VDISK_PUT CRIT: blobstorage_skeleton.cpp:633: PDiskId# 1 VDISK[0:_:0:0:0]: (0) TEvVMultiPut: TEvVMultiPut has huge blob# [5000:1:298:0:0:66560:1] Marker# BSVS08 2025-09-25T16:16:53.546721Z :BS_VDISK_PUT CRIT: blobstorage_skeleton.cpp:633: PDiskId# 1 VDISK[0:_:0:0:0]: (0) TEvVMultiPut: TEvVMultiPut has huge blob# [5000:1:585:0:0:66560:1] Marker# BSVS08 2025-09-25T16:16:53.546722Z :BS_VDISK_PUT CRIT: blobstorage_skeleton.cpp:633: PDiskId# 1 VDISK[0:_:0:0:0]: (0) TEvVMultiPut: TEvVMultiPut has huge blob# [5000:1:939:0:0:66560:1] Marker# BSVS08 2025-09-25T16:16:53.546724Z :BS_VDISK_PUT CRIT: blobstorage_skeleton.cpp:633: PDiskId# 1 VDISK[0:_:0:0:0]: (0) TEvVMultiPut: TEvVMultiPut has huge blob# [5000:1:618:0:0:66560:1] Marker# BSVS08 2025-09-25T16:16:53.546727Z :BS_VDISK_PUT CRIT: blobstorage_skeleton.cpp:633: PDiskId# 1 VDISK[0:_:0:0:0]: (0) TEvVMultiPut: TEvVMultiPut has huge blob# [5000:1:512:0:0:66560:1] Marker# BSVS08 2025-09-25T16:16:53.546730Z :BS_VDISK_PUT CRIT: blobstorage_skeleton.cpp:633: PDiskId# 1 VDISK[0:_:0:0:0]: (0) TEvVMultiPut: TEvVMultiPut has huge blob# [5000:1:390:0:0:66560:1] Marker# BSVS08 2025-09-25T16:16:53.546732Z :BS_VDISK_PUT CRIT: blobstorage_skeleton.cpp:633: PDiskId# 1 VDISK[0:_:0:0:0]: (0) TEvVMultiPut: TEvVMultiPut has huge blob# [5000:1:516:0:0:66560:1] Marker# BSVS08 2025-09-25T16:16:53.546736Z :BS_VDISK_PUT CRIT: blobstorage_skeleton.cpp:633: PDiskId# 1 VDISK[0:_:0:0:0]: (0) TEvVMultiPut: TEvVMultiPut has huge blob# [5000:1:963:0:0:66560:1] Marker# BSVS08 2025-09-25T16:16:53.546864Z :BS_VDISK_PUT CRIT: blobstorage_skeleton.cpp:633: PDiskId# 1 VDISK[0:_:0:0:0]: (0) TEvVMultiPut: TEvVMultiPut has huge blob# [5000:1:205:0:0:66560:1] Marker# BSVS08 2025-09-25T16:16:53.546873Z :BS_VDISK_PUT CRIT: blobstorage_skeleton.cpp:633: PDiskId# 1 VDISK[0:_:0:0:0]: (0) TEvVMultiPut: TEvVMultiPut has huge blob# [5000:1:434:0:0:66560:1] Marker# BSVS08 2025-09-25T16:16:53.546876Z :BS_VDISK_PUT CRIT: blobstorage_skeleton.cpp:633: PDiskId# 1 VDISK[0:_:0:0:0]: (0) TEvVMultiPut: TEvVMultiPut has huge blob# [5000:1:667:0:0:66560:1] Marker# BSVS08 2025-09-25T16:16:53.546879Z :BS_VDISK_PUT CRIT: blobstorage_skeleton.cpp:633: PDiskId# 1 VDISK[0:_:0:0:0]: (0) TEvVMultiPut: TEvVMultiPut has huge blob# [5000:1:915:0:0:66560:1] Marker# BSVS08 2025-09-25T16:16:53.546883Z :BS_VDISK_PUT CRIT: blobstorage_skeleton.cpp:633: PDiskId# 1 VDISK[0:_:0:0:0]: (0) TEvVMultiPut: TEvVMultiPut has huge blob# [5000:1:162:0:0:66560:1] Marker# BSVS08 2025-09-25T16:16:53.546886Z :BS_VDISK_PUT CRIT: blobstorage_skeleton.cpp:633: PDiskId# 1 VDISK[0:_:0:0:0]: (0) TEvVMultiPut: TEvVMultiPut has huge blob# [5000:1:929:0:0:66560:1] Marker# BSVS08 2025-09-25T16:16:53.546889Z :BS_VDISK_PUT CRIT: blobstorage_skeleton.cpp:633: PDiskId# 1 VDISK[0:_:0:0:0]: (0) TEvVMultiPut: TEvVMultiPut has huge blob# [5000:1:308:0:0:66560:1] Marker# BSVS08 2025-09-25T16:16:53.546892Z :BS_VDISK_PUT CRIT: blobstorage_skeleton.cpp:633: PDiskId# 1 VDISK[0:_:0:0:0]: (0) TEvVMultiPut: TEvVMultiPut has huge blob# [5000:1:472:0:0:66560:1] Marker# BSVS08 2025-09-25T16:16:53.546899Z :BS_VDISK_PUT CRIT: blobstorage_skeleton.cpp:633: PDiskId# 1 VDISK[0:_:0:0:0]: (0) TEvVMultiPut: TEvVMultiPut has huge blob# [5000:1:264:0:0:66560:1] Marker# BSVS08 2025-09-25T16:16:53.546903Z :BS_VDISK_PUT CRIT: blobstorage_skeleton.cpp:633: PDiskId# 1 VDISK[0:_:0:0:0]: (0) TEvVMultiPut: TEvVMultiPut has huge blob# [5000:1:862:0:0:66560:1] Marker# BSVS08 2025-09-25T16:16:53.547027Z :BS_VDISK_PUT CRIT: blobstorage_skeleton.cpp:633: PDiskId# 1 VDISK[0:_:0:0:0]: (0) TEvVMultiPut: TEvVMultiPut has huge blob# [5000:1:502:0:0:66560:1] Marker# BSVS08 2025-09-25T16:16:53.547030Z :BS_VDISK_PUT CRIT: blobstorage_skeleton.cpp:633: PDiskId# 1 VDISK[0:_:0:0:0]: (0) TEvVMultiPut: TEvVMultiPut has huge blob# [5000:1:531:0:0:66560:1] Marker# BSVS08 2025-09-25T16:16:53.547033Z :BS_VDISK_PUT CRIT: blobstorage_skeleton.cpp:633: PDiskId# 1 VDISK[0:_:0:0:0]: (0) TEvVMultiPut: TEvVMultiPut has huge blob# [5000:1:725:0:0:66560:1] Marker# BSVS08 2025-09-25T16:16:53.547035Z :BS_VDISK_PUT CRIT: blobstorage_skeleton.cpp:633: PDiskId# 1 VDISK[0:_:0:0:0]: (0) TEvVMultiPut: TEvVMultiPut has huge blob# [5000:1:866:0:0:66560:1] Marker# BSVS08 2025-09-25T16:16:53.547037Z :BS_VDISK_PUT CRIT: blobstorage_skeleton.cpp:633: PDiskId# 1 VDISK[0:_:0:0:0]: (0) TEvVMultiPut: TEvVMultiPut has huge blob# [5000:1:711:0:0:66560:1] Marker# BSVS08 2025-09-25T16:16:53.547039Z :BS_VDISK_PUT CRIT: blobstorage_skeleton.cpp:633: PDiskId# 1 VDISK[0:_:0:0:0]: (0) TEvVMultiPut: TEvVMultiPut has huge blob# [5000:1:511:0:0:66560:1] Marker# BSVS08 2025-09-25T16:16:53.547041Z :BS_VDISK_PUT CRIT: blobstorage_skeleton.cpp:633: PDiskId# 1 VDISK[0:_:0:0:0]: (0) TEvVMultiPut: TEvVMultiPut has huge blob# [5000:1:337:0:0:66560:1] Marker# BSVS08 2025-09-25T16:16:53.547043Z :BS_VDISK_PUT CRIT: blobstorage_skeleton.cpp:633: PDiskId# 1 VDISK[0:_:0:0:0]: (0) TEvVMultiPut: TEvVMultiPut has huge blob# [5000:1:973:0:0:66560:1] Marker# BSVS08 2025-09-25T16:16:53.547045Z :BS_VDISK_PUT CRIT: blobstorage_skeleton.cpp:633: PDiskId# 1 VDISK[0:_:0:0:0]: (0) TEvVMultiPut: TEvVMultiPut has huge blob# [5000:1:274:0:0:66560:1] Marker# BSVS08 2025-09-25T16:16:53.547047Z :BS_VDISK_PUT CRIT: blobstorage_skeleton.cpp:633: PDiskId# 1 VDISK[0:_:0:0:0]: (0) TEvVMultiPut: TEvVMultiPut has huge blob# [5000:1:701:0:0:66560:1] Marker# BSVS08 2025-09-25T16:16:53.547149Z :BS_VDISK_PUT CRIT: blobstorage_skeleton.cpp:633: PDiskId# 1 VDISK[0:_:0:0:0]: (0) TEvVMultiPut: TEvVMultiPut has huge blob# [5000:1:997:0:0:66560:1] Marker# BSVS08 2025-09-25T16:16:53.547153Z :BS_VDISK_PUT CRIT: blobstorage_skeleton.cpp:633: PDiskId# 1 VDISK[0:_:0:0:0]: (0) TEvVMultiPut: TEvVMultiPut has huge blob# [5000:1:31:0:0:66560:1] Marker# BSVS08 2025-09-25T16:16:53.547155Z :BS_VDISK_PUT CRIT: blobstorage_skeleton.cpp:633: PDiskId# 1 VDISK[0:_:0:0:0]: (0) TEvVMultiPut: TEvVMultiPut has huge blob# [5000:1:920:0:0:66560:1] Marker# BSVS08 2025-09-25T16:16:53.547157Z :BS_VDISK_PUT CRIT: blobstorage_skeleton.cpp:633: PDiskId# 1 VDISK[0:_:0:0:0]: (0) TEvVMultiPut: TEvVMultiPut has huge blob# [5000:1:482:0:0:66560:1] Marker# BSVS08 2025-09-25T16:16:53.547161Z :BS_VDISK_PUT CRIT: blobstorage_skeleton.cpp:633: PDiskId# 1 VDISK[0:_:0:0:0]: (0) TEvVMultiPut: TEvVMultiPut has huge blob# [5000:1:857:0:0:66560:1] Marker# BSVS08 2025-09-25T16:16:53.547163Z :BS_VDISK_PUT CRIT: blobstorage_skeleton.cpp:633: PDiskId# 1 VDISK[0:_:0:0:0]: (0) TEvVMultiPut: TEvVMultiPut has huge blob# [5000:1:837:0:0:66560:1] Marker# BSVS08 2025-09-25T16:16:53.547165Z :BS_VDISK_PUT CRIT: blobstorage_skeleton.cpp:633: PDiskId# 1 VDISK[0:_:0:0:0]: (0) TEvVMultiPut: TEvVMultiPut has huge blob# [5000:1:439:0:0:66560:1] Marker# BSVS08 2025-09-25T16:16:53.547167Z :BS_VDISK_PUT CRIT: blobstorage_skeleton.cpp:633: PDiskId# 1 VDISK[0:_:0:0:0]: (0) TEvVMultiPut: TEvVMultiPut has huge blob# [5000:1:764:0:0:66560:1] Marker# BSVS08 2025-09-25T16:16:53.547169Z :BS_VDISK_PUT CRIT: blobstorage_skeleton.cpp:633: PDiskId# 1 VDISK[0:_:0:0:0]: (0) TEvVMultiPut: TEvVMultiPut has huge blob# [5000:1:983:0:0:66560:1] Marker# BSVS08 2025-09-25T16:16:53.547172Z :BS_VDISK_PUT CRIT: blobstorage_skeleton.cpp:633: PDiskId# 1 VDISK[0:_:0:0:0]: (0) TEvVMultiPut: TEvVMultiPut has huge blob# [5000:1:239:0:0:66560:1] Marker# BSVS08 2025-09-25T16:16:53.547286Z :BS_VDISK_PUT CRIT: blobstorage_skeleton.cpp:633: PDiskId# 1 VDISK[0:_:0:0:0]: (0) TEvVMultiPut: TEvVMultiPut has huge blob# [5000:1:278:0:0:66560:1] Marker# BSVS08 2025-09-25T16:16:53.547288Z :BS_VDISK_PUT CRIT: blobstorage_skeleton.cpp:633: PDiskId# 1 VDISK[0:_:0:0:0]: (0) TEvVMultiPut: TEvVMultiPut has huge blob# [5000:1:341:0:0:66560:1] Marker# BSVS08 2025-09-25T16:16:53.547290Z :BS_VDISK_PUT CRIT: blobstorage_skeleton.cpp:633: PDiskId# 1 VDISK[0:_:0:0:0]: (0) TEvVMultiPut: TEvVMultiPut has huge blob# [5000:1:832:0:0:66560:1] Marker# BSVS08 2025-09-25T16:16:53.547294Z :BS_VDISK_PUT CRIT: blobstorage_skeleton.cpp:633: PDiskId# 1 VDISK[0:_:0:0:0]: (0) TEvVMultiPut: TEvVMultiPut has huge blob# [5000:1:195:0:0:66560:1] Marker# BSVS08 2025-09-25T16:16:53.547296Z :BS_VDISK_PUT CRIT: blobstorage_skeleton.cpp:633: PDiskId# 1 VDISK[0:_:0:0:0]: (0) TEvVMultiPut: TEvVMultiPut has huge blob# [5000:1:584:0:0:66560:1] Marker# BSVS08 2025-09-25T16:16:53.547298Z :BS_VDISK_PUT CRIT: blobstorage_skeleton.cpp:633: PDiskId# 1 VDISK[0:_:0:0:0]: (0) TEvVMultiPut: TEvVMultiPut has huge blob# [5000:1:521:0:0:66560:1] Marker# BSVS08 2025-09-25T16:16:53.547300Z :BS_VDISK_PUT CRIT: blobstorage_skeleton.cpp:633: PDiskId# 1 VDISK[0:_:0:0:0]: (0) TEvVMultiPut: TEvVMultiPut has huge blob# [5000:1:706:0:0:66560:1] Marker# BSVS08 2025-09-25T16:16:53.547302Z :BS_VDISK_PUT CRIT: blobstorage_skeleton.cpp:633: PDiskId# 1 VDISK[0:_:0:0:0]: (0) TEvVMultiPut: TEvVMultiPut has huge blob# [5000:1:900:0:0:66560:1] Marker# BSVS08 2025-09-25T16:16:53.547304Z :BS_VDISK_PUT CRIT: blobstorage_skeleton.cpp:633: PDiskId# 1 VDISK[0:_:0:0:0]: (0) TEvVMultiPut: TEvVMultiPut has huge blob# [5000:1:924:0:0:66560:1] Marker# BSVS08 2025-09-25T16:16:53.547306Z :BS_VDISK_PUT CRIT: blobstorage_skeleton.cpp:633: PDiskId# 1 VDISK[0:_:0:0:0]: (0) TEvVMultiPut: TEvVMultiPut has huge blob# [5000:1:842:0:0:66560:1] Marker# BSVS08 2025-09-25T16:16:53.547409Z :BS_VDISK_PUT CRIT: blobstorage_skeleton.cpp:633: PDiskId# 1 VDISK[0:_:0:0:0]: (0) TEvVMultiPut: TEvVMultiPut has huge blob# [5000:1:438:0:0:66560:1] Marker# BSVS08 2025-09-25T16:16:53.547412Z :BS_VDISK_PUT CRIT: blobstorage_skeleton.cpp:633: PDiskId# 1 VDISK[0:_:0:0:0]: (0) TEvVMultiPut: TEvVMultiPut has huge blob# [5000:1:225:0:0:66560:1] Marker# BSVS08 2025-09-25T16:16:53.547414Z :BS_VDISK_ ... lob# [5000:1:954:0:0:66560:1] Marker# BSVS08 2025-09-25T16:16:53.547523Z :BS_VDISK_PUT CRIT: blobstorage_skeleton.cpp:633: PDiskId# 1 VDISK[0:_:0:0:0]: (0) TEvVMultiPut: TEvVMultiPut has huge blob# [5000:1:147:0:0:66560:1] Marker# BSVS08 2025-09-25T16:16:53.547525Z :BS_VDISK_PUT CRIT: blobstorage_skeleton.cpp:633: PDiskId# 1 VDISK[0:_:0:0:0]: (0) TEvVMultiPut: TEvVMultiPut has huge blob# [5000:1:847:0:0:66560:1] Marker# BSVS08 2025-09-25T16:16:53.547527Z :BS_VDISK_PUT CRIT: blobstorage_skeleton.cpp:633: PDiskId# 1 VDISK[0:_:0:0:0]: (0) TEvVMultiPut: TEvVMultiPut has huge blob# [5000:1:652:0:0:66560:1] Marker# BSVS08 2025-09-25T16:16:53.547529Z :BS_VDISK_PUT CRIT: blobstorage_skeleton.cpp:633: PDiskId# 1 VDISK[0:_:0:0:0]: (0) TEvVMultiPut: TEvVMultiPut has huge blob# [5000:1:113:0:0:66560:1] Marker# BSVS08 2025-09-25T16:16:53.547531Z :BS_VDISK_PUT CRIT: blobstorage_skeleton.cpp:633: PDiskId# 1 VDISK[0:_:0:0:0]: (0) TEvVMultiPut: TEvVMultiPut has huge blob# [5000:1:424:0:0:66560:1] Marker# BSVS08 2025-09-25T16:16:53.547533Z :BS_VDISK_PUT CRIT: blobstorage_skeleton.cpp:633: PDiskId# 1 VDISK[0:_:0:0:0]: (0) TEvVMultiPut: TEvVMultiPut has huge blob# [5000:1:794:0:0:66560:1] Marker# BSVS08 2025-09-25T16:16:53.547535Z :BS_VDISK_PUT CRIT: blobstorage_skeleton.cpp:633: PDiskId# 1 VDISK[0:_:0:0:0]: (0) TEvVMultiPut: TEvVMultiPut has huge blob# [5000:1:376:0:0:66560:1] Marker# BSVS08 2025-09-25T16:16:53.547537Z :BS_VDISK_PUT CRIT: blobstorage_skeleton.cpp:633: PDiskId# 1 VDISK[0:_:0:0:0]: (0) TEvVMultiPut: TEvVMultiPut has huge blob# [5000:1:648:0:0:66560:1] Marker# BSVS08 2025-09-25T16:16:53.547539Z :BS_VDISK_PUT CRIT: blobstorage_skeleton.cpp:633: PDiskId# 1 VDISK[0:_:0:0:0]: (0) TEvVMultiPut: TEvVMultiPut has huge blob# [5000:1:1:0:0:66560:1] Marker# BSVS08 2025-09-25T16:16:53.547631Z :BS_VDISK_PUT CRIT: blobstorage_skeleton.cpp:633: PDiskId# 1 VDISK[0:_:0:0:0]: (0) TEvVMultiPut: TEvVMultiPut has huge blob# [5000:1:784:0:0:66560:1] Marker# BSVS08 2025-09-25T16:16:53.547633Z :BS_VDISK_PUT CRIT: blobstorage_skeleton.cpp:633: PDiskId# 1 VDISK[0:_:0:0:0]: (0) TEvVMultiPut: TEvVMultiPut has huge blob# [5000:1:720:0:0:66560:1] Marker# BSVS08 2025-09-25T16:16:53.547635Z :BS_VDISK_PUT CRIT: blobstorage_skeleton.cpp:633: PDiskId# 1 VDISK[0:_:0:0:0]: (0) TEvVMultiPut: TEvVMultiPut has huge blob# [5000:1:346:0:0:66560:1] Marker# BSVS08 2025-09-25T16:16:53.547637Z :BS_VDISK_PUT CRIT: blobstorage_skeleton.cpp:633: PDiskId# 1 VDISK[0:_:0:0:0]: (0) TEvVMultiPut: TEvVMultiPut has huge blob# [5000:1:371:0:0:66560:1] Marker# BSVS08 2025-09-25T16:16:53.547639Z :BS_VDISK_PUT CRIT: blobstorage_skeleton.cpp:633: PDiskId# 1 VDISK[0:_:0:0:0]: (0) TEvVMultiPut: TEvVMultiPut has huge blob# [5000:1:395:0:0:66560:1] Marker# BSVS08 2025-09-25T16:16:53.547641Z :BS_VDISK_PUT CRIT: blobstorage_skeleton.cpp:633: PDiskId# 1 VDISK[0:_:0:0:0]: (0) TEvVMultiPut: TEvVMultiPut has huge blob# [5000:1:890:0:0:66560:1] Marker# BSVS08 2025-09-25T16:16:53.547646Z :BS_VDISK_PUT CRIT: blobstorage_skeleton.cpp:633: PDiskId# 1 VDISK[0:_:0:0:0]: (0) TEvVMultiPut: TEvVMultiPut has huge blob# [5000:1:16:0:0:66560:1] Marker# BSVS08 2025-09-25T16:16:53.547648Z :BS_VDISK_PUT CRIT: blobstorage_skeleton.cpp:633: PDiskId# 1 VDISK[0:_:0:0:0]: (0) TEvVMultiPut: TEvVMultiPut has huge blob# [5000:1:210:0:0:66560:1] Marker# BSVS08 2025-09-25T16:16:53.547650Z :BS_VDISK_PUT CRIT: blobstorage_skeleton.cpp:633: PDiskId# 1 VDISK[0:_:0:0:0]: (0) TEvVMultiPut: TEvVMultiPut has huge blob# [5000:1:419:0:0:66560:1] Marker# BSVS08 2025-09-25T16:16:53.547652Z :BS_VDISK_PUT CRIT: blobstorage_skeleton.cpp:633: PDiskId# 1 VDISK[0:_:0:0:0]: (0) TEvVMultiPut: TEvVMultiPut has huge blob# [5000:1:682:0:0:66560:1] Marker# BSVS08 2025-09-25T16:16:53.547735Z :BS_VDISK_PUT CRIT: blobstorage_skeleton.cpp:633: PDiskId# 1 VDISK[0:_:0:0:0]: (0) TEvVMultiPut: TEvVMultiPut has huge blob# [5000:1:98:0:0:66560:1] Marker# BSVS08 2025-09-25T16:16:53.547738Z :BS_VDISK_PUT CRIT: blobstorage_skeleton.cpp:633: PDiskId# 1 VDISK[0:_:0:0:0]: (0) TEvVMultiPut: TEvVMultiPut has huge blob# [5000:1:551:0:0:66560:1] Marker# BSVS08 2025-09-25T16:16:53.547741Z :BS_VDISK_PUT CRIT: blobstorage_skeleton.cpp:633: PDiskId# 1 VDISK[0:_:0:0:0]: (0) TEvVMultiPut: TEvVMultiPut has huge blob# [5000:1:779:0:0:66560:1] Marker# BSVS08 2025-09-25T16:16:53.547745Z :BS_VDISK_PUT CRIT: blobstorage_skeleton.cpp:633: PDiskId# 1 VDISK[0:_:0:0:0]: (0) TEvVMultiPut: TEvVMultiPut has huge blob# [5000:1:609:0:0:66560:1] Marker# BSVS08 2025-09-25T16:16:53.547747Z :BS_VDISK_PUT CRIT: blobstorage_skeleton.cpp:633: PDiskId# 1 VDISK[0:_:0:0:0]: (0) TEvVMultiPut: TEvVMultiPut has huge blob# [5000:1:229:0:0:66560:1] Marker# BSVS08 2025-09-25T16:16:53.547753Z :BS_VDISK_PUT CRIT: blobstorage_skeleton.cpp:633: PDiskId# 1 VDISK[0:_:0:0:0]: (0) TEvVMultiPut: TEvVMultiPut has huge blob# [5000:1:36:0:0:66560:1] Marker# BSVS08 2025-09-25T16:16:53.547755Z :BS_VDISK_PUT CRIT: blobstorage_skeleton.cpp:633: PDiskId# 1 VDISK[0:_:0:0:0]: (0) TEvVMultiPut: TEvVMultiPut has huge blob# [5000:1:463:0:0:66560:1] Marker# BSVS08 2025-09-25T16:16:53.547757Z :BS_VDISK_PUT CRIT: blobstorage_skeleton.cpp:633: PDiskId# 1 VDISK[0:_:0:0:0]: (0) TEvVMultiPut: TEvVMultiPut has huge blob# [5000:1:176:0:0:66560:1] Marker# BSVS08 2025-09-25T16:16:53.547759Z :BS_VDISK_PUT CRIT: blobstorage_skeleton.cpp:633: PDiskId# 1 VDISK[0:_:0:0:0]: (0) TEvVMultiPut: TEvVMultiPut has huge blob# [5000:1:696:0:0:66560:1] Marker# BSVS08 2025-09-25T16:16:53.547761Z :BS_VDISK_PUT CRIT: blobstorage_skeleton.cpp:633: PDiskId# 1 VDISK[0:_:0:0:0]: (0) TEvVMultiPut: TEvVMultiPut has huge blob# [5000:1:506:0:0:66560:1] Marker# BSVS08 2025-09-25T16:16:53.547849Z :BS_VDISK_PUT CRIT: blobstorage_skeleton.cpp:633: PDiskId# 1 VDISK[0:_:0:0:0]: (0) TEvVMultiPut: TEvVMultiPut has huge blob# [5000:1:541:0:0:66560:1] Marker# BSVS08 2025-09-25T16:16:53.547852Z :BS_VDISK_PUT CRIT: blobstorage_skeleton.cpp:633: PDiskId# 1 VDISK[0:_:0:0:0]: (0) TEvVMultiPut: TEvVMultiPut has huge blob# [5000:1:861:0:0:66560:1] Marker# BSVS08 2025-09-25T16:16:53.547855Z :BS_VDISK_PUT CRIT: blobstorage_skeleton.cpp:633: PDiskId# 1 VDISK[0:_:0:0:0]: (0) TEvVMultiPut: TEvVMultiPut has huge blob# [5000:1:891:0:0:66560:1] Marker# BSVS08 2025-09-25T16:16:53.547858Z :BS_VDISK_PUT CRIT: blobstorage_skeleton.cpp:633: PDiskId# 1 VDISK[0:_:0:0:0]: (0) TEvVMultiPut: TEvVMultiPut has huge blob# [5000:1:133:0:0:66560:1] Marker# BSVS08 2025-09-25T16:16:53.547860Z :BS_VDISK_PUT CRIT: blobstorage_skeleton.cpp:633: PDiskId# 1 VDISK[0:_:0:0:0]: (0) TEvVMultiPut: TEvVMultiPut has huge blob# [5000:1:64:0:0:66560:1] Marker# BSVS08 2025-09-25T16:16:53.547863Z :BS_VDISK_PUT CRIT: blobstorage_skeleton.cpp:633: PDiskId# 1 VDISK[0:_:0:0:0]: (0) TEvVMultiPut: TEvVMultiPut has huge blob# [5000:1:949:0:0:66560:1] Marker# BSVS08 2025-09-25T16:16:53.547865Z :BS_VDISK_PUT CRIT: blobstorage_skeleton.cpp:633: PDiskId# 1 VDISK[0:_:0:0:0]: (0) TEvVMultiPut: TEvVMultiPut has huge blob# [5000:1:852:0:0:66560:1] Marker# BSVS08 2025-09-25T16:16:53.547869Z :BS_VDISK_PUT CRIT: blobstorage_skeleton.cpp:633: PDiskId# 1 VDISK[0:_:0:0:0]: (0) TEvVMultiPut: TEvVMultiPut has huge blob# [5000:1:191:0:0:66560:1] Marker# BSVS08 2025-09-25T16:16:53.547872Z :BS_VDISK_PUT CRIT: blobstorage_skeleton.cpp:633: PDiskId# 1 VDISK[0:_:0:0:0]: (0) TEvVMultiPut: TEvVMultiPut has huge blob# [5000:1:65:0:0:66560:1] Marker# BSVS08 2025-09-25T16:16:53.547875Z :BS_VDISK_PUT CRIT: blobstorage_skeleton.cpp:633: PDiskId# 1 VDISK[0:_:0:0:0]: (0) TEvVMultiPut: TEvVMultiPut has huge blob# [5000:1:21:0:0:66560:1] Marker# BSVS08 2025-09-25T16:16:53.547972Z :BS_VDISK_PUT CRIT: blobstorage_skeleton.cpp:633: PDiskId# 1 VDISK[0:_:0:0:0]: (0) TEvVMultiPut: TEvVMultiPut has huge blob# [5000:1:404:0:0:66560:1] Marker# BSVS08 2025-09-25T16:16:53.547974Z :BS_VDISK_PUT CRIT: blobstorage_skeleton.cpp:633: PDiskId# 1 VDISK[0:_:0:0:0]: (0) TEvVMultiPut: TEvVMultiPut has huge blob# [5000:1:361:0:0:66560:1] Marker# BSVS08 2025-09-25T16:16:53.547977Z :BS_VDISK_PUT CRIT: blobstorage_skeleton.cpp:633: PDiskId# 1 VDISK[0:_:0:0:0]: (0) TEvVMultiPut: TEvVMultiPut has huge blob# [5000:1:45:0:0:66560:1] Marker# BSVS08 2025-09-25T16:16:53.547979Z :BS_VDISK_PUT CRIT: blobstorage_skeleton.cpp:633: PDiskId# 1 VDISK[0:_:0:0:0]: (0) TEvVMultiPut: TEvVMultiPut has huge blob# [5000:1:74:0:0:66560:1] Marker# BSVS08 2025-09-25T16:16:53.547980Z :BS_VDISK_PUT CRIT: blobstorage_skeleton.cpp:633: PDiskId# 1 VDISK[0:_:0:0:0]: (0) TEvVMultiPut: TEvVMultiPut has huge blob# [5000:1:322:0:0:66560:1] Marker# BSVS08 2025-09-25T16:16:53.547983Z :BS_VDISK_PUT CRIT: blobstorage_skeleton.cpp:633: PDiskId# 1 VDISK[0:_:0:0:0]: (0) TEvVMultiPut: TEvVMultiPut has huge blob# [5000:1:774:0:0:66560:1] Marker# BSVS08 2025-09-25T16:16:53.547985Z :BS_VDISK_PUT CRIT: blobstorage_skeleton.cpp:633: PDiskId# 1 VDISK[0:_:0:0:0]: (0) TEvVMultiPut: TEvVMultiPut has huge blob# [5000:1:672:0:0:66560:1] Marker# BSVS08 2025-09-25T16:16:53.547987Z :BS_VDISK_PUT CRIT: blobstorage_skeleton.cpp:633: PDiskId# 1 VDISK[0:_:0:0:0]: (0) TEvVMultiPut: TEvVMultiPut has huge blob# [5000:1:118:0:0:66560:1] Marker# BSVS08 2025-09-25T16:16:53.547989Z :BS_VDISK_PUT CRIT: blobstorage_skeleton.cpp:633: PDiskId# 1 VDISK[0:_:0:0:0]: (0) TEvVMultiPut: TEvVMultiPut has huge blob# [5000:1:409:0:0:66560:1] Marker# BSVS08 2025-09-25T16:16:53.547990Z :BS_VDISK_PUT CRIT: blobstorage_skeleton.cpp:633: PDiskId# 1 VDISK[0:_:0:0:0]: (0) TEvVMultiPut: TEvVMultiPut has huge blob# [5000:1:580:0:0:66560:1] Marker# BSVS08 2025-09-25T16:16:53.548084Z :BS_VDISK_PUT CRIT: blobstorage_skeleton.cpp:633: PDiskId# 1 VDISK[0:_:0:0:0]: (0) TEvVMultiPut: TEvVMultiPut has huge blob# [5000:1:108:0:0:66560:1] Marker# BSVS08 2025-09-25T16:16:53.548086Z :BS_VDISK_PUT CRIT: blobstorage_skeleton.cpp:633: PDiskId# 1 VDISK[0:_:0:0:0]: (0) TEvVMultiPut: TEvVMultiPut has huge blob# [5000:1:200:0:0:66560:1] Marker# BSVS08 2025-09-25T16:16:53.548089Z :BS_VDISK_PUT CRIT: blobstorage_skeleton.cpp:633: PDiskId# 1 VDISK[0:_:0:0:0]: (0) TEvVMultiPut: TEvVMultiPut has huge blob# [5000:1:594:0:0:66560:1] Marker# BSVS08 2025-09-25T16:16:53.548090Z :BS_VDISK_PUT CRIT: blobstorage_skeleton.cpp:633: PDiskId# 1 VDISK[0:_:0:0:0]: (0) TEvVMultiPut: TEvVMultiPut has huge blob# [5000:1:653:0:0:66560:1] Marker# BSVS08 2025-09-25T16:16:53.548092Z :BS_VDISK_PUT CRIT: blobstorage_skeleton.cpp:633: PDiskId# 1 VDISK[0:_:0:0:0]: (0) TEvVMultiPut: TEvVMultiPut has huge blob# [5000:1:492:0:0:66560:1] Marker# BSVS08 2025-09-25T16:16:53.548094Z :BS_VDISK_PUT CRIT: blobstorage_skeleton.cpp:633: PDiskId# 1 VDISK[0:_:0:0:0]: (0) TEvVMultiPut: TEvVMultiPut has huge blob# [5000:1:448:0:0:66560:1] Marker# BSVS08 2025-09-25T16:16:53.548098Z :BS_VDISK_PUT CRIT: blobstorage_skeleton.cpp:633: PDiskId# 1 VDISK[0:_:0:0:0]: (0) TEvVMultiPut: TEvVMultiPut has huge blob# [5000:1:161:0:0:66560:1] Marker# BSVS08 2025-09-25T16:16:53.548100Z :BS_VDISK_PUT CRIT: blobstorage_skeleton.cpp:633: PDiskId# 1 VDISK[0:_:0:0:0]: (0) TEvVMultiPut: TEvVMultiPut has huge blob# [5000:1:614:0:0:66560:1] Marker# BSVS08 2025-09-25T16:16:53.548101Z :BS_VDISK_PUT CRIT: blobstorage_skeleton.cpp:633: PDiskId# 1 VDISK[0:_:0:0:0]: (0) TEvVMultiPut: TEvVMultiPut has huge blob# [5000:1:244:0:0:66560:1] Marker# BSVS08 2025-09-25T16:16:53.548103Z :BS_VDISK_PUT CRIT: blobstorage_skeleton.cpp:633: PDiskId# 1 VDISK[0:_:0:0:0]: (0) TEvVMultiPut: TEvVMultiPut has huge blob# [5000:1:546:0:0:66560:1] Marker# BSVS08 2025-09-25T16:16:53.548215Z :BS_VDISK_PUT CRIT: blobstorage_skeleton.cpp:633: PDiskId# 1 VDISK[0:_:0:0:0]: (0) TEvVMultiPut: TEvVMultiPut has huge blob# [5000:1:171:0:0:66560:1] Marker# BSVS08 2025-09-25T16:16:53.548227Z :BS_VDISK_PUT CRIT: blobstorage_skeleton.cpp:633: PDiskId# 1 VDISK[0:_:0:0:0]: (0) TEvVMultiPut: TEvVMultiPut has huge blob# [5000:1:895:0:0:66560:1] Marker# BSVS08 2025-09-25T16:16:53.548230Z :BS_VDISK_PUT CRIT: blobstorage_skeleton.cpp:633: PDiskId# 1 VDISK[0:_:0:0:0]: (0) TEvVMultiPut: TEvVMultiPut has huge blob# [5000:1:99:0:0:66560:1] Marker# BSVS08 |80.8%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/vdisk/syncer/ut/unittest >> TSyncNeighborsTests::SerDes2 [GOOD] >> TDatabaseResolverTests::PostgreSQL [GOOD] >> TDatabaseResolverTests::PostgreSQL_PermissionDenied [GOOD] >> test.py::test[schema-def_values--Results] [GOOD] >> test.py::test[schema-diffrerent_schemas--ForceBlocks] |80.8%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/fq/libs/actors/ut/unittest >> TDatabaseResolverTests::Ydb_Dedicated [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/fq/libs/actors/ut/unittest >> TDatabaseResolverTests::ClickHouse_PermissionDenied [GOOD] Test command err: 2025-09-25T16:17:20.930568Z node 2 :FQ_DATABASE_RESOLVER ERROR: database_resolver.cpp:175: TraceId: traceId ResponseProcessor::Handle(HttpIncomingResponse): error=Error while trying to resolve managed ClickHouse database with id etn021us5r9rhld1vgbh via HTTP request to: endpoint 'mdb.api.cloud.yandex.net:443', url '/managed-clickhouse/v1/clusters/etn021us5r9rhld1vgbh/hosts': you have no permission to resolve database id into database endpoint. Please check that your service account has role `managed-clickhouse.viewer`. >> TDatabaseResolverTests::MySQL [GOOD] >> TDatabaseResolverTests::MySQL_PermissionDenied [GOOD] >> test.py::test[insert-replace_ordered_by_key-default.txt-Results] [GOOD] >> test.py::test[insert-select_subquery--ForceBlocks] >> test.py::test[pg-select_limit-default.txt-ForceBlocks] [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/fq/libs/actors/ut/unittest >> TDatabaseResolverTests::PostgreSQL_PermissionDenied [GOOD] Test command err: 2025-09-25T16:17:21.053924Z node 2 :FQ_DATABASE_RESOLVER ERROR: database_resolver.cpp:175: TraceId: traceId ResponseProcessor::Handle(HttpIncomingResponse): error=Error while trying to resolve managed PostgreSQL database with id etn021us5r9rhld1vgbh via HTTP request to: endpoint 'mdb.api.cloud.yandex.net:443', url '/managed-postgresql/v1/clusters/etn021us5r9rhld1vgbh/hosts': you have no permission to resolve database id into database endpoint. Please check that your service account has role `managed-postgresql.viewer`. >> test.py::test[insert-append--Results] [GOOD] >> test.py::test[join-alias_where_group--ForceBlocks] >> test.py::test[bigdate-table_arithmetic_narrow-default.txt-Results] [GOOD] >> test.py::test[binding-table_regexp_strict_binding--ForceBlocks] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/fq/libs/actors/ut/unittest >> TDatabaseResolverTests::MySQL_PermissionDenied [GOOD] Test command err: 2025-09-25T16:17:21.606841Z node 2 :FQ_DATABASE_RESOLVER ERROR: database_resolver.cpp:175: TraceId: traceId ResponseProcessor::Handle(HttpIncomingResponse): error=Error while trying to resolve managed MySQL database with id etn021us5r9rhld1vgbh via HTTP request to: endpoint 'mdb.api.cloud.yandex.net:443', url '/managed-mysql/v1/clusters/etn021us5r9rhld1vgbh/hosts': you have no permission to resolve database id into database endpoint. ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/ydb/table_split_ut/unittest >> YdbTableSplit::SplitByLoadWithDeletes [GOOD] Test command err: 2025-09-25T16:16:58.390328Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7554061572627499265:2146];send_to=[0:7307199536658146131:7762515]; 2025-09-25T16:16:58.390349Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/endf/00518d/r3tmp/tmpHE0jra/pdisk_1.dat 2025-09-25T16:16:58.440705Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-09-25T16:16:58.452867Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 13250, node 1 2025-09-25T16:16:58.480109Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-09-25T16:16:58.480121Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-09-25T16:16:58.480123Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-09-25T16:16:58.480176Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-09-25T16:16:58.491745Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-09-25T16:16:58.491782Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-09-25T16:16:58.493243Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:63585 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-09-25T16:16:58.509806Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... TClient is connected to server localhost:63585 2025-09-25T16:16:58.676243Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-09-25T16:16:58.858845Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7554061572627500143:2322], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:16:58.858893Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:16:58.859767Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7554061572627500155:2324], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:16:58.859791Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:16:58.862493Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:16:58.897625Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7554061572627500312:2336], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:16:58.897655Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:16:58.897739Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7554061572627500316:2338], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:16:58.897754Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:16:58.899137Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_table.cpp:172) TClient::Ls request: /Root/Foo TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Foo" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976715658 CreateStep: 1758817018931 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 4 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 4 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 2 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "Foo" Columns { Name: "NameHash" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "Name" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false IsBuildInProgress: false } Columns { Name: "Versio... (TRUNCATED) Table has 1 shards TClient::Ls request: /Root/Foo TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Foo" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976715658 CreateStep: 1758817018931 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 4 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 4 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 2 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "Foo" Columns { Name: "NameHash" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "Name" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false IsBuildInProgress: false } Columns { Name: "Versio... (TRUNCATED) 2025-09-25T16:16:58.927189Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7554061572627500409:2367], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:16:58.927222Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:16:58.927804Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7554061572627500415:2371], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:16:58.927827Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7554061572627500416:2372], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:16:58.927832Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7554061572627500429:2381], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:16:58.927837Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7554061572627500437:2385], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:16:58.927841Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7554061572627500438:2386], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:16:58.927844Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7554061572627500439:2387], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:16:58.927848Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:2 ... 0. Ctx: { TraceId: 01k60tmcjc4qpsjm397ycmkzq7, Database: , SessionId: ydb://session/3?node_id=1&id=MWUzODZmOS00YjM2NWIzMi05ZjY1M2U3My03Y2FjYzQxNw==, PoolId: default, DatabaseId: /Root}. Database not set, use /Root 2025-09-25T16:17:18.925376Z node 1 :KQP_EXECUTER ERROR: kqp_planner.cpp:123: TxId: 281474976748282. Ctx: { TraceId: 01k60tmcjc3y8ebpp10gsv6fm2, Database: , SessionId: ydb://session/3?node_id=1&id=Y2EwYzllZTktMjlmOWUyNWUtZGNhNGEyNWEtYzkxZGRmODk=, PoolId: default, DatabaseId: /Root}. Database not set, use /Root 2025-09-25T16:17:18.926278Z node 1 :KQP_EXECUTER ERROR: kqp_planner.cpp:123: TxId: 281474976748283. Ctx: { TraceId: 01k60tmcje2906zn16sfw0rnxk, Database: , SessionId: ydb://session/3?node_id=1&id=NWU2YmI1MS1jYTQ2ZDA3Zi05MGQzODEwYi00M2ZhMWZkNg==, PoolId: default, DatabaseId: /Root}. Database not set, use /Root 2025-09-25T16:17:18.926281Z node 1 :KQP_EXECUTER ERROR: kqp_planner.cpp:123: TxId: 281474976748284. Ctx: { TraceId: 01k60tmcjec2c8p9ymgp1fqz8x, Database: , SessionId: ydb://session/3?node_id=1&id=Y2FlZTdhNDMtN2U5MGJkMTYtMjY1MGZhZGItYzFiZGRhNjc=, PoolId: default, DatabaseId: /Root}. Database not set, use /Root 2025-09-25T16:17:18.927082Z node 1 :KQP_EXECUTER ERROR: kqp_planner.cpp:123: TxId: 281474976748285. Ctx: { TraceId: 01k60tmcjeab25eftws0e3z6eb, Database: , SessionId: ydb://session/3?node_id=1&id=NDVmNjljNDgtYjIxYmQyZTgtN2RkNzQxYmEtYWJhOTI0Mw==, PoolId: default, DatabaseId: /Root}. Database not set, use /Root 2025-09-25T16:17:18.927360Z node 1 :KQP_EXECUTER ERROR: kqp_planner.cpp:123: TxId: 281474976748286. Ctx: { TraceId: 01k60tmcjff684msz3e96t6a9a, Database: , SessionId: ydb://session/3?node_id=1&id=YTc1MzdkNTItM2M4MjZkOTMtOWM3MGE2MWYtZjRhNjY4OWE=, PoolId: default, DatabaseId: /Root}. Database not set, use /Root 2025-09-25T16:17:18.927636Z node 1 :KQP_EXECUTER ERROR: kqp_planner.cpp:123: TxId: 281474976748288. Ctx: { TraceId: 01k60tmcjf9298em0bve0mgq4h, Database: , SessionId: ydb://session/3?node_id=1&id=ZTU3YWRkM2EtYTA3MzU5YzAtZWJhNmQ2MjktYmMyMzk0OGY=, PoolId: default, DatabaseId: /Root}. Database not set, use /Root 2025-09-25T16:17:18.927712Z node 1 :KQP_EXECUTER ERROR: kqp_planner.cpp:123: TxId: 281474976748287. Ctx: { TraceId: 01k60tmcjffz4a61da73ne909b, Database: , SessionId: ydb://session/3?node_id=1&id=MjMyNjhlNTAtMjFhOGQ4OTMtYjQ2NjFlNzgtMTNmZTI2OWE=, PoolId: default, DatabaseId: /Root}. Database not set, use /Root 2025-09-25T16:17:18.927872Z node 1 :KQP_EXECUTER ERROR: kqp_planner.cpp:123: TxId: 281474976748289. Ctx: { TraceId: 01k60tmcjff2gxftj5nqr16v7w, Database: , SessionId: ydb://session/3?node_id=1&id=ZDBlZjcxYy0zOWQwOTQxYi00NzcxNDgtNGIwN2NiZQ==, PoolId: default, DatabaseId: /Root}. Database not set, use /Root 2025-09-25T16:17:18.928564Z node 1 :KQP_EXECUTER ERROR: kqp_planner.cpp:123: TxId: 281474976748290. Ctx: { TraceId: 01k60tmcjf28w6r0cqr2xg04yw, Database: , SessionId: ydb://session/3?node_id=1&id=ZWY3NTAwMDYtZWM1ZWM1NzgtMzJkYTkyZTEtNGI3OTRjODE=, PoolId: default, DatabaseId: /Root}. Database not set, use /Root 2025-09-25T16:17:18.928672Z node 1 :KQP_EXECUTER ERROR: kqp_planner.cpp:123: TxId: 281474976748291. Ctx: { TraceId: 01k60tmcjf52ny2m6spmvd5ech, Database: , SessionId: ydb://session/3?node_id=1&id=MWUzODZmOS00YjM2NWIzMi05ZjY1M2U3My03Y2FjYzQxNw==, PoolId: default, DatabaseId: /Root}. Database not set, use /Root 2025-09-25T16:17:18.928898Z node 1 :KQP_EXECUTER ERROR: kqp_planner.cpp:123: TxId: 281474976748292. Ctx: { TraceId: 01k60tmcjf7b3dej1b4k61w9x4, Database: , SessionId: ydb://session/3?node_id=1&id=Y2EwYzllZTktMjlmOWUyNWUtZGNhNGEyNWEtYzkxZGRmODk=, PoolId: default, DatabaseId: /Root}. Database not set, use /Root 2025-09-25T16:17:18.929917Z node 1 :KQP_EXECUTER ERROR: kqp_planner.cpp:123: TxId: 281474976748293. Ctx: { TraceId: 01k60tmcjh0ym0hs3ssyz0bwhm, Database: , SessionId: ydb://session/3?node_id=1&id=NWU2YmI1MS1jYTQ2ZDA3Zi05MGQzODEwYi00M2ZhMWZkNg==, PoolId: default, DatabaseId: /Root}. Database not set, use /Root 2025-09-25T16:17:18.930133Z node 1 :KQP_EXECUTER ERROR: kqp_planner.cpp:123: TxId: 281474976748296. Ctx: { TraceId: 01k60tmcjh99htkkm36kxmh2d4, Database: , SessionId: ydb://session/3?node_id=1&id=Y2FlZTdhNDMtN2U5MGJkMTYtMjY1MGZhZGItYzFiZGRhNjc=, PoolId: default, DatabaseId: /Root}. Database not set, use /Root 2025-09-25T16:17:18.930144Z node 1 :KQP_EXECUTER ERROR: kqp_planner.cpp:123: TxId: 281474976748294. Ctx: { TraceId: 01k60tmcjhasjrk5h1whq7kps4, Database: , SessionId: ydb://session/3?node_id=1&id=NDVmNjljNDgtYjIxYmQyZTgtN2RkNzQxYmEtYWJhOTI0Mw==, PoolId: default, DatabaseId: /Root}. Database not set, use /Root 2025-09-25T16:17:18.930310Z node 1 :KQP_EXECUTER ERROR: kqp_planner.cpp:123: TxId: 281474976748295. Ctx: { TraceId: 01k60tmcjh74em1he11ra4g52j, Database: , SessionId: ydb://session/3?node_id=1&id=ZDBlZjcxYy0zOWQwOTQxYi00NzcxNDgtNGIwN2NiZQ==, PoolId: default, DatabaseId: /Root}. Database not set, use /Root 2025-09-25T16:17:18.931147Z node 1 :KQP_EXECUTER ERROR: kqp_planner.cpp:123: TxId: 281474976748297. Ctx: { TraceId: 01k60tmcjj56ss66g5szts3th8, Database: , SessionId: ydb://session/3?node_id=1&id=ZTU3YWRkM2EtYTA3MzU5YzAtZWJhNmQ2MjktYmMyMzk0OGY=, PoolId: default, DatabaseId: /Root}. Database not set, use /Root 2025-09-25T16:17:18.931220Z node 1 :KQP_EXECUTER ERROR: kqp_planner.cpp:123: TxId: 281474976748298. Ctx: { TraceId: 01k60tmcjj64by98c85cy628pt, Database: , SessionId: ydb://session/3?node_id=1&id=YTc1MzdkNTItM2M4MjZkOTMtOWM3MGE2MWYtZjRhNjY4OWE=, PoolId: default, DatabaseId: /Root}. Database not set, use /Root 2025-09-25T16:17:18.931815Z node 1 :KQP_EXECUTER ERROR: kqp_planner.cpp:123: TxId: 281474976748299. Ctx: { TraceId: 01k60tmcjjb84d09b1dcnj8d5n, Database: , SessionId: ydb://session/3?node_id=1&id=MjMyNjhlNTAtMjFhOGQ4OTMtYjQ2NjFlNzgtMTNmZTI2OWE=, PoolId: default, DatabaseId: /Root}. Database not set, use /Root 2025-09-25T16:17:18.932121Z node 1 :KQP_EXECUTER ERROR: kqp_planner.cpp:123: TxId: 281474976748300. Ctx: { TraceId: 01k60tmcjk2t5kagqrqc8kaefa, Database: , SessionId: ydb://session/3?node_id=1&id=ZWY3NTAwMDYtZWM1ZWM1NzgtMzJkYTkyZTEtNGI3OTRjODE=, PoolId: default, DatabaseId: /Root}. Database not set, use /Root 2025-09-25T16:17:18.932378Z node 1 :KQP_EXECUTER ERROR: kqp_planner.cpp:123: TxId: 281474976748301. Ctx: { TraceId: 01k60tmcjkbmqdrqxebj75t7w3, Database: , SessionId: ydb://session/3?node_id=1&id=MWUzODZmOS00YjM2NWIzMi05ZjY1M2U3My03Y2FjYzQxNw==, PoolId: default, DatabaseId: /Root}. Database not set, use /Root 2025-09-25T16:17:18.932620Z node 1 :KQP_EXECUTER ERROR: kqp_planner.cpp:123: TxId: 281474976748302. Ctx: { TraceId: 01k60tmcjk7d667s5a9y0hqw0c, Database: , SessionId: ydb://session/3?node_id=1&id=Y2EwYzllZTktMjlmOWUyNWUtZGNhNGEyNWEtYzkxZGRmODk=, PoolId: default, DatabaseId: /Root}. Database not set, use /Root 2025-09-25T16:17:18.933266Z node 1 :KQP_EXECUTER ERROR: kqp_planner.cpp:123: TxId: 281474976748303. Ctx: { TraceId: 01k60tmcjm79dxa5y5ra7wdcnk, Database: , SessionId: ydb://session/3?node_id=1&id=NWU2YmI1MS1jYTQ2ZDA3Zi05MGQzODEwYi00M2ZhMWZkNg==, PoolId: default, DatabaseId: /Root}. Database not set, use /Root 2025-09-25T16:17:18.933284Z node 1 :KQP_EXECUTER ERROR: kqp_planner.cpp:123: TxId: 281474976748304. Ctx: { TraceId: 01k60tmcjm3e7rgm3tbyfp35cn, Database: , SessionId: ydb://session/3?node_id=1&id=ZDBlZjcxYy0zOWQwOTQxYi00NzcxNDgtNGIwN2NiZQ==, PoolId: default, DatabaseId: /Root}. Database not set, use /Root 2025-09-25T16:17:18.933451Z node 1 :KQP_EXECUTER ERROR: kqp_planner.cpp:123: TxId: 281474976748305. Ctx: { TraceId: 01k60tmcjm0xnw8e65wt3xvgmc, Database: , SessionId: ydb://session/3?node_id=1&id=Y2FlZTdhNDMtN2U5MGJkMTYtMjY1MGZhZGItYzFiZGRhNjc=, PoolId: default, DatabaseId: /Root}. Database not set, use /Root 2025-09-25T16:17:18.934720Z node 1 :KQP_EXECUTER ERROR: kqp_planner.cpp:123: TxId: 281474976748308. Ctx: { TraceId: 01k60tmcjp77fxt02xjfy6w1tk, Database: , SessionId: ydb://session/3?node_id=1&id=YTc1MzdkNTItM2M4MjZkOTMtOWM3MGE2MWYtZjRhNjY4OWE=, PoolId: default, DatabaseId: /Root}. Database not set, use /Root 2025-09-25T16:17:18.934767Z node 1 :KQP_EXECUTER ERROR: kqp_planner.cpp:123: TxId: 281474976748307. Ctx: { TraceId: 01k60tmcjp2vwzr66yqt97mpsa, Database: , SessionId: ydb://session/3?node_id=1&id=NDVmNjljNDgtYjIxYmQyZTgtN2RkNzQxYmEtYWJhOTI0Mw==, PoolId: default, DatabaseId: /Root}. Database not set, use /Root 2025-09-25T16:17:18.934962Z node 1 :KQP_EXECUTER ERROR: kqp_planner.cpp:123: TxId: 281474976748306. Ctx: { TraceId: 01k60tmcjncjkphex75q3y2psx, Database: , SessionId: ydb://session/3?node_id=1&id=ZTU3YWRkM2EtYTA3MzU5YzAtZWJhNmQ2MjktYmMyMzk0OGY=, PoolId: default, DatabaseId: /Root}. Database not set, use /Root 2025-09-25T16:17:18.934963Z node 1 :KQP_EXECUTER ERROR: kqp_planner.cpp:123: TxId: 281474976748309. Ctx: { TraceId: 01k60tmcjpbc5xt6mcc28frs0k, Database: , SessionId: ydb://session/3?node_id=1&id=Y2EwYzllZTktMjlmOWUyNWUtZGNhNGEyNWEtYzkxZGRmODk=, PoolId: default, DatabaseId: /Root}. Database not set, use /Root 2025-09-25T16:17:18.935663Z node 1 :KQP_EXECUTER ERROR: kqp_planner.cpp:123: TxId: 281474976748310. Ctx: { TraceId: 01k60tmcjqdhzhy0ns53tzzf4q, Database: , SessionId: ydb://session/3?node_id=1&id=ZWY3NTAwMDYtZWM1ZWM1NzgtMzJkYTkyZTEtNGI3OTRjODE=, PoolId: default, DatabaseId: /Root}. Database not set, use /Root 2025-09-25T16:17:18.935664Z node 1 :KQP_EXECUTER ERROR: kqp_planner.cpp:123: TxId: 281474976748311. Ctx: { TraceId: 01k60tmcjqb01948s0n336s9b7, Database: , SessionId: ydb://session/3?node_id=1&id=MWUzODZmOS00YjM2NWIzMi05ZjY1M2U3My03Y2FjYzQxNw==, PoolId: default, DatabaseId: /Root}. Database not set, use /Root TClient::Ls request: /Root/Foo 2025-09-25T16:17:18.935908Z node 1 :KQP_EXECUTER ERROR: kqp_planner.cpp:123: TxId: 281474976748312. Ctx: { TraceId: 01k60tmcjq4s7yc4fbc94jhw6g, Database: , SessionId: ydb://session/3?node_id=1&id=MjMyNjhlNTAtMjFhOGQ4OTMtYjQ2NjFlNzgtMTNmZTI2OWE=, PoolId: default, DatabaseId: /Root}. Database not set, use /Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Foo" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976715658 CreateStep: 1758817018931 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 2 TablePartitionVersion: 2 } ChildrenExist: false } Table { Name: "Foo" Columns { Name: "NameHash" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "Name" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false IsBuildInProgress: false } Columns { Name: "Versio... (TRUNCATED) TClient::Ls request: /Root/Foo TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Foo" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976715658 CreateStep: 1758817018931 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 2 TablePartitionVersion: 2 } ChildrenExist: false } Table { Name: "Foo" Columns { Name: "NameHash" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "Name" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false IsBuildInProgress: false } Columns { Name: "Versio... (TRUNCATED) Table has 2 shards >> test.py::test[join-join_without_column-off-ForceBlocks] [GOOD] >> test.py::test[join-join_without_column-off-Results] [SKIPPED] >> TDatabaseResolverTests::Greenplum_MasterNode [GOOD] >> TDatabaseResolverTests::Greenplum_PermissionDenied [GOOD] >> YdbTableSplit::SplitByLoadWithReads [GOOD] >> test.py::test[join-lookupjoin_semi_subq--ForceBlocks] >> test.py::test[tpch-q10-default.txt-ForceBlocks] [GOOD] >> test.py::test[tpch-q10-default.txt-Results] >> TDatabaseResolverTests::DataStreams_Serverless ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/ydb/table_split_ut/unittest >> YdbTableSplit::SplitByLoadWithNonEmptyRangeReads [GOOD] Test command err: 2025-09-25T16:16:55.608755Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7554061559970473022:2147];send_to=[0:7307199536658146131:7762515]; 2025-09-25T16:16:55.608774Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/endf/00519e/r3tmp/tmpK4ROH5/pdisk_1.dat 2025-09-25T16:16:55.666656Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-09-25T16:16:55.680396Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 25098, node 1 2025-09-25T16:16:55.696147Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-09-25T16:16:55.696158Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-09-25T16:16:55.696160Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-09-25T16:16:55.696207Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:15984 WaitRootIsUp 'Root'... TClient::Ls request: Root 2025-09-25T16:16:55.710404Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-09-25T16:16:55.710446Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting TClient::Ls response: 2025-09-25T16:16:55.712033Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-09-25T16:16:55.740364Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... TClient is connected to server localhost:15984 2025-09-25T16:16:55.889230Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-09-25T16:16:56.064816Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7554061564265441199:2322], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:16:56.064868Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:16:56.064925Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7554061564265441209:2323], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:16:56.064931Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:16:56.112387Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:16:56.149897Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7554061564265441366:2336], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:16:56.149927Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:16:56.149932Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7554061564265441371:2339], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:16:56.149992Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7554061564265441373:2340], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:16:56.150008Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:16:56.150839Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715659:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-09-25T16:16:56.155325Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7554061564265441374:2341], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715659 completed, doublechecking } 2025-09-25T16:16:56.230489Z node 1 :TX_PROXY ERROR: schemereq.cpp:590: Actor# [1:7554061564265441452:2794] txid# 281474976715660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-09-25T16:16:56.254601Z node 1 :KQP_EXECUTER ERROR: kqp_planner.cpp:123: TxId: 281474976715661. Ctx: { TraceId: 01k60tkpan3t73xze5x9yz5q3f, Database: , SessionId: ydb://session/3?node_id=1&id=NDYxNDBjZjMtYjhlNGI0NGItM2Q3ODFjMC1hMDZkZDc2Yg==, PoolId: default, DatabaseId: /Root}. Database not set, use /Root 2025-09-25T16:16:56.267743Z node 1 :KQP_EXECUTER ERROR: kqp_planner.cpp:123: TxId: 281474976715662. Ctx: { TraceId: 01k60tkpe91yc2c0dwrj59dv59, Database: , SessionId: ydb://session/3?node_id=1&id=NDYxNDBjZjMtYjhlNGI0NGItM2Q3ODFjMC1hMDZkZDc2Yg==, PoolId: default, DatabaseId: /Root}. Database not set, use /Root 2025-09-25T16:16:56.272386Z node 1 :KQP_EXECUTER ERROR: kqp_planner.cpp:123: TxId: 281474976715663. Ctx: { TraceId: 01k60tkpef0npd3f0x6edmr6dp, Database: , SessionId: ydb://session/3?node_id=1&id=NDYxNDBjZjMtYjhlNGI0NGItM2Q3ODFjMC1hMDZkZDc2Yg==, PoolId: default, DatabaseId: /Root}. Database not set, use /Root 2025-09-25T16:16:56.275825Z node 1 :KQP_EXECUTER ERROR: kqp_planner.cpp:123: TxId: 281474976715664. Ctx: { TraceId: 01k60tkpekcf1trb7f7yapsrpd, Database: , SessionId: ydb://session/3?node_id=1&id=NDYxNDBjZjMtYjhlNGI0NGItM2Q3ODFjMC1hMDZkZDc2Yg==, PoolId: default, DatabaseId: /Root}. Database not set, use /Root 2025-09-25T16:16:56.279389Z node 1 :KQP_EXECUTER ERROR: kqp_planner.cpp:123: TxId: 281474976715665. Ctx: { TraceId: 01k60tkpep6yap4cn593xr9kz4, Database: , SessionId: ydb://session/3?node_id=1&id=NDYxNDBjZjMtYjhlNGI0NGItM2Q3ODFjMC1hMDZkZDc2Yg==, PoolId: default, DatabaseId: /Root}. Database not set, use /Root 2025-09-25T16:16:56.282489Z node 1 :KQP_EXECUTER ERROR: kqp_planner.cpp:123: TxId: 281474976715666. Ctx: { TraceId: 01k60tkpesafxcgfqxd1r4krqa, Database: , SessionId: ydb://session/3?node_id=1&id=NDYxNDBjZjMtYjhlNGI0NGItM2Q3ODFjMC1hMDZkZDc2Yg==, PoolId: default, DatabaseId: /Root}. Database not set, use /Root 2025-09-25T16:16:56.286022Z node 1 :KQP_EXECUTER ERROR: kqp_planner.cpp:123: TxId: 281474976715667. Ctx: { TraceId: 01k60tkpex3er736gvdbjkrkc7, Database: , SessionId: ydb://session/3?node_id=1&id=NDYxNDBjZjMtYjhlNGI0NGItM2Q3ODFjMC1hMDZkZDc2Yg==, PoolId: default, DatabaseId: /Root}. Database not set, use /Root 2025-09-25T16:16:56.289816Z node 1 :KQP_EXECUTER ERROR: kqp_planner.cpp:123: TxId: 281474976715668. Ctx: { TraceId: 01k60tkpf0caphcrp6hcd3cbpy, Database: , SessionId: ydb://session/3?node_id=1&id=NDYxNDBjZjMtYjhlNGI0NGItM2Q3ODFjMC1hMDZkZDc2Yg==, PoolId: default, DatabaseId: /Root}. Database not set, use /Root 2025-09-25T16:16:56.316996Z node 1 :KQP_EXECUTER ERROR: kqp_planner.cpp:123: TxId: 281474976715669. Ctx: { TraceId: 01k60tkpfg3zwm2ja6cggxe39y, Database: , SessionId: ydb://session/3?node_id=1&id=NDYxNDBjZjMtYjhlNGI0NGItM2Q3ODFjMC1hMDZkZDc2Yg==, PoolId: default, DatabaseId: /Root}. Database not set, use /Root 2025-09-25T16:16:56.322150Z node 1 :KQP_EXECUTER ERROR: kqp_planner.cpp:123: TxId: 281474976715670. Ctx: { TraceId: 01k60tkpg1adjffsqn8wcnasgv, Database: , SessionId: ydb://session/3?node_id=1&id=NDYxNDBjZjMtYjhlNGI0NGItM2Q3ODFjMC1hMDZkZDc2Yg==, PoolId: default, DatabaseId: /Root}. Database not set, use /Root 2025-09-25T16:16:56.327421Z node 1 :KQP_EXECUTER ERROR: kqp_planner.cpp:123: TxId: 281474976715671. Ctx: { TraceId: 01k60tkpg646p70zm5z6rj4aa0, Database: , SessionId: ydb://session/3?node_id=1&id=NDYxNDBjZjMtYjhlNGI0NGItM2Q3ODFjMC1hMDZkZDc2Yg==, PoolId: default, DatabaseId: /Root}. Database not set, use /Root 2025-09-25T16:16:56.330961Z node 1 :KQP_EXECUTER ERROR: kqp_planner.cpp:123: TxId: 281474976715672. Ctx: { TraceId: 01k60tkpga2ychqmyatdbkf8v4, Database: , SessionId: ydb://session/3?node_id=1&id=NDYxNDBjZjMtYjhlNGI0NGItM2Q3ODFjMC1hMDZkZDc2Yg==, PoolId: default, DatabaseId: /Root}. Database not set, use /Root 2025-09 ... 5. Ctx: { TraceId: 01k60tmct21cm6czzx4shkw3tm, Database: , SessionId: ydb://session/3?node_id=1&id=MzYxZTRkMjctYzU1NDU5OTEtMjgyZDgxZmItNTNiNmM4Yg==, PoolId: default, DatabaseId: /Root}. Database not set, use /Root 2025-09-25T16:17:19.172729Z node 1 :KQP_EXECUTER ERROR: kqp_planner.cpp:123: TxId: 281474976748727. Ctx: { TraceId: 01k60tmct369t9bs7kka6j093d, Database: , SessionId: ydb://session/3?node_id=1&id=ZmQ2N2IzOWEtNzEzOTdlNmEtOTEyM2NhZWMtNTIwMjNhYmI=, PoolId: default, DatabaseId: /Root}. Database not set, use /Root 2025-09-25T16:17:19.172736Z node 1 :KQP_EXECUTER ERROR: kqp_planner.cpp:123: TxId: 281474976748728. Ctx: { TraceId: 01k60tmct3a431y7rn2zb5xdjx, Database: , SessionId: ydb://session/3?node_id=1&id=ZGIxNmU0ODctM2UwNjIyYTQtZDZhYzFlODItOGVhY2RlY2U=, PoolId: default, DatabaseId: /Root}. Database not set, use /Root 2025-09-25T16:17:19.173365Z node 1 :KQP_EXECUTER ERROR: kqp_planner.cpp:123: TxId: 281474976748730. Ctx: { TraceId: 01k60tmct42nyqqcfdej21e1ke, Database: , SessionId: ydb://session/3?node_id=1&id=NGRjNjFkMzQtYmRiY2M2YmYtNGQwZTE4ZDctZTA1NWQ4NzI=, PoolId: default, DatabaseId: /Root}. Database not set, use /Root 2025-09-25T16:17:19.173472Z node 1 :KQP_EXECUTER ERROR: kqp_planner.cpp:123: TxId: 281474976748731. Ctx: { TraceId: 01k60tmct49tybafgc4rqac5cd, Database: , SessionId: ydb://session/3?node_id=1&id=NzlhMDQ0ZjgtNzY1MGViMzEtMzg3NzZlZmUtMzhlYTc4NGE=, PoolId: default, DatabaseId: /Root}. Database not set, use /Root 2025-09-25T16:17:19.174021Z node 1 :KQP_EXECUTER ERROR: kqp_planner.cpp:123: TxId: 281474976748729. Ctx: { TraceId: 01k60tmct4dacb5160mh87cddk, Database: , SessionId: ydb://session/3?node_id=1&id=YTlkMDU4MC1kYjk1MDVkLTU3NDkzMmY2LTc0ZWFlOTJk, PoolId: default, DatabaseId: /Root}. Database not set, use /Root 2025-09-25T16:17:19.174074Z node 1 :KQP_EXECUTER ERROR: kqp_planner.cpp:123: TxId: 281474976748732. Ctx: { TraceId: 01k60tmct4b2zz625z8j67kw5f, Database: , SessionId: ydb://session/3?node_id=1&id=Y2U1NjAyNC0yMWJmNmExZC0xNDFhNWU5OC05ZWJiZGQ5NQ==, PoolId: default, DatabaseId: /Root}. Database not set, use /Root 2025-09-25T16:17:19.174439Z node 1 :KQP_EXECUTER ERROR: kqp_planner.cpp:123: TxId: 281474976748733. Ctx: { TraceId: 01k60tmct46dqpw6mtx26aetz2, Database: , SessionId: ydb://session/3?node_id=1&id=ZGExMjdmYTAtYzlhZjhhNWUtNGIyZTVlYTItZjJkYzNkZDI=, PoolId: default, DatabaseId: /Root}. Database not set, use /Root 2025-09-25T16:17:19.180591Z node 1 :KQP_EXECUTER ERROR: kqp_planner.cpp:123: TxId: 281474976748734. Ctx: { TraceId: 01k60tmctb7d7jj0hs7gy1gc7j, Database: , SessionId: ydb://session/3?node_id=1&id=MjIzMWFiZjgtZTBjYzJiZWEtNjQ3ZDllZjYtZTRiMGMxN2Y=, PoolId: default, DatabaseId: /Root}. Database not set, use /Root 2025-09-25T16:17:19.180736Z node 1 :KQP_EXECUTER ERROR: kqp_planner.cpp:123: TxId: 281474976748735. Ctx: { TraceId: 01k60tmctcdnyte9pqq9kk5s6d, Database: , SessionId: ydb://session/3?node_id=1&id=MzYxZTRkMjctYzU1NDU5OTEtMjgyZDgxZmItNTNiNmM4Yg==, PoolId: default, DatabaseId: /Root}. Database not set, use /Root 2025-09-25T16:17:19.180888Z node 1 :KQP_EXECUTER ERROR: kqp_planner.cpp:123: TxId: 281474976748736. Ctx: { TraceId: 01k60tmctc4x9yypfd7cq7b4r4, Database: , SessionId: ydb://session/3?node_id=1&id=NzlhMDQ0ZjgtNzY1MGViMzEtMzg3NzZlZmUtMzhlYTc4NGE=, PoolId: default, DatabaseId: /Root}. Database not set, use /Root 2025-09-25T16:17:19.180909Z node 1 :KQP_EXECUTER ERROR: kqp_planner.cpp:123: TxId: 281474976748738. Ctx: { TraceId: 01k60tmctccecaqk0rnr8b2fmt, Database: , SessionId: ydb://session/3?node_id=1&id=NGRjNjFkMzQtYmRiY2M2YmYtNGQwZTE4ZDctZTA1NWQ4NzI=, PoolId: default, DatabaseId: /Root}. Database not set, use /Root 2025-09-25T16:17:19.181003Z node 1 :KQP_EXECUTER ERROR: kqp_planner.cpp:123: TxId: 281474976748737. Ctx: { TraceId: 01k60tmctc801xrqg18awm82wz, Database: , SessionId: ydb://session/3?node_id=1&id=NzQzOTkxOC00MjdiOWQzYS1kM2JiNWE0ZC0yYWU4ODEwNw==, PoolId: default, DatabaseId: /Root}. Database not set, use /Root 2025-09-25T16:17:19.181285Z node 1 :KQP_EXECUTER ERROR: kqp_planner.cpp:123: TxId: 281474976748739. Ctx: { TraceId: 01k60tmctce2nqm3xg08sbew72, Database: , SessionId: ydb://session/3?node_id=1&id=YTlkMDU4MC1kYjk1MDVkLTU3NDkzMmY2LTc0ZWFlOTJk, PoolId: default, DatabaseId: /Root}. Database not set, use /Root 2025-09-25T16:17:19.183407Z node 1 :KQP_EXECUTER ERROR: kqp_planner.cpp:123: TxId: 281474976748742. Ctx: { TraceId: 01k60tmctebjvcrsvhf7zxnfpz, Database: , SessionId: ydb://session/3?node_id=1&id=ZGExMjdmYTAtYzlhZjhhNWUtNGIyZTVlYTItZjJkYzNkZDI=, PoolId: default, DatabaseId: /Root}. Database not set, use /Root 2025-09-25T16:17:19.183434Z node 1 :KQP_EXECUTER ERROR: kqp_planner.cpp:123: TxId: 281474976748743. Ctx: { TraceId: 01k60tmctea8qzctgfmev3k6xp, Database: , SessionId: ydb://session/3?node_id=1&id=Y2U1NjAyNC0yMWJmNmExZC0xNDFhNWU5OC05ZWJiZGQ5NQ==, PoolId: default, DatabaseId: /Root}. Database not set, use /Root 2025-09-25T16:17:19.183485Z node 1 :KQP_EXECUTER ERROR: kqp_planner.cpp:123: TxId: 281474976748740. Ctx: { TraceId: 01k60tmctea9t563rcda8dype8, Database: , SessionId: ydb://session/3?node_id=1&id=ZmQ2N2IzOWEtNzEzOTdlNmEtOTEyM2NhZWMtNTIwMjNhYmI=, PoolId: default, DatabaseId: /Root}. Database not set, use /Root 2025-09-25T16:17:19.183526Z node 1 :KQP_EXECUTER ERROR: kqp_planner.cpp:123: TxId: 281474976748741. Ctx: { TraceId: 01k60tmcte4mj60083s9339z8h, Database: , SessionId: ydb://session/3?node_id=1&id=ZGIxNmU0ODctM2UwNjIyYTQtZDZhYzFlODItOGVhY2RlY2U=, PoolId: default, DatabaseId: /Root}. Database not set, use /Root 2025-09-25T16:17:19.185760Z node 1 :KQP_EXECUTER ERROR: kqp_planner.cpp:123: TxId: 281474976748744. Ctx: { TraceId: 01k60tmcth0peqyfdvtm9v7mmb, Database: , SessionId: ydb://session/3?node_id=1&id=MzYxZTRkMjctYzU1NDU5OTEtMjgyZDgxZmItNTNiNmM4Yg==, PoolId: default, DatabaseId: /Root}. Database not set, use /Root 2025-09-25T16:17:19.185922Z node 1 :KQP_EXECUTER ERROR: kqp_planner.cpp:123: TxId: 281474976748745. Ctx: { TraceId: 01k60tmcth015zq7zqmahcyx08, Database: , SessionId: ydb://session/3?node_id=1&id=NGRjNjFkMzQtYmRiY2M2YmYtNGQwZTE4ZDctZTA1NWQ4NzI=, PoolId: default, DatabaseId: /Root}. Database not set, use /Root 2025-09-25T16:17:19.186086Z node 1 :KQP_EXECUTER ERROR: kqp_planner.cpp:123: TxId: 281474976748746. Ctx: { TraceId: 01k60tmcth7vwwwnc6ash6vjpj, Database: , SessionId: ydb://session/3?node_id=1&id=MjIzMWFiZjgtZTBjYzJiZWEtNjQ3ZDllZjYtZTRiMGMxN2Y=, PoolId: default, DatabaseId: /Root}. Database not set, use /Root 2025-09-25T16:17:19.186471Z node 1 :KQP_EXECUTER ERROR: kqp_planner.cpp:123: TxId: 281474976748747. Ctx: { TraceId: 01k60tmcth9x7vweg16r90qvjn, Database: , SessionId: ydb://session/3?node_id=1&id=ZGIxNmU0ODctM2UwNjIyYTQtZDZhYzFlODItOGVhY2RlY2U=, PoolId: default, DatabaseId: /Root}. Database not set, use /Root 2025-09-25T16:17:19.186849Z node 1 :KQP_EXECUTER ERROR: kqp_planner.cpp:123: TxId: 281474976748748. Ctx: { TraceId: 01k60tmcthccxw3s05x6y81y0d, Database: , SessionId: ydb://session/3?node_id=1&id=ZGExMjdmYTAtYzlhZjhhNWUtNGIyZTVlYTItZjJkYzNkZDI=, PoolId: default, DatabaseId: /Root}. Database not set, use /Root 2025-09-25T16:17:19.186855Z node 1 :KQP_EXECUTER ERROR: kqp_planner.cpp:123: TxId: 281474976748749. Ctx: { TraceId: 01k60tmcthbjhcsey6b09z341m, Database: , SessionId: ydb://session/3?node_id=1&id=ZmQ2N2IzOWEtNzEzOTdlNmEtOTEyM2NhZWMtNTIwMjNhYmI=, PoolId: default, DatabaseId: /Root}. Database not set, use /Root 2025-09-25T16:17:19.187288Z node 1 :KQP_EXECUTER ERROR: kqp_planner.cpp:123: TxId: 281474976748750. Ctx: { TraceId: 01k60tmcth3vyewx92wcxegbyq, Database: , SessionId: ydb://session/3?node_id=1&id=YTlkMDU4MC1kYjk1MDVkLTU3NDkzMmY2LTc0ZWFlOTJk, PoolId: default, DatabaseId: /Root}. Database not set, use /Root 2025-09-25T16:17:19.187391Z node 1 :KQP_EXECUTER ERROR: kqp_planner.cpp:123: TxId: 281474976748751. Ctx: { TraceId: 01k60tmcthf2x57ngjgar3ew1k, Database: , SessionId: ydb://session/3?node_id=1&id=NzQzOTkxOC00MjdiOWQzYS1kM2JiNWE0ZC0yYWU4ODEwNw==, PoolId: default, DatabaseId: /Root}. Database not set, use /Root 2025-09-25T16:17:19.187429Z node 1 :KQP_EXECUTER ERROR: kqp_planner.cpp:123: TxId: 281474976748752. Ctx: { TraceId: 01k60tmcth6pjfkdhdwr808f6v, Database: , SessionId: ydb://session/3?node_id=1&id=Y2U1NjAyNC0yMWJmNmExZC0xNDFhNWU5OC05ZWJiZGQ5NQ==, PoolId: default, DatabaseId: /Root}. Database not set, use /Root TClient::Ls request: /Root/Foo 2025-09-25T16:17:19.187818Z node 1 :KQP_EXECUTER ERROR: kqp_planner.cpp:123: TxId: 281474976748753. Ctx: { TraceId: 01k60tmcthdncdf6y6dev6fpmj, Database: , SessionId: ydb://session/3?node_id=1&id=NzlhMDQ0ZjgtNzY1MGViMzEtMzg3NzZlZmUtMzhlYTc4NGE=, PoolId: default, DatabaseId: /Root}. Database not set, use /Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Foo" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976715658 CreateStep: 1758817016180 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 2 TablePartitionVersion: 2 } ChildrenExist: false } Table { Name: "Foo" Columns { Name: "NameHash" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "Name" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false IsBuildInProgress: false } Columns { Name: "Versio... (TRUNCATED) 2025-09-25T16:17:19.189255Z node 1 :KQP_EXECUTER ERROR: kqp_planner.cpp:123: TxId: 281474976748754. Ctx: { TraceId: 01k60tmctk7rpekbwmbfmyat7z, Database: , SessionId: ydb://session/3?node_id=1&id=MjIzMWFiZjgtZTBjYzJiZWEtNjQ3ZDllZjYtZTRiMGMxN2Y=, PoolId: default, DatabaseId: /Root}. Database not set, use /Root 2025-09-25T16:17:19.189599Z node 1 :KQP_EXECUTER ERROR: kqp_planner.cpp:123: TxId: 281474976748755. Ctx: { TraceId: 01k60tmctk99qtsvz8k7deprkf, Database: , SessionId: ydb://session/3?node_id=1&id=NGRjNjFkMzQtYmRiY2M2YmYtNGQwZTE4ZDctZTA1NWQ4NzI=, PoolId: default, DatabaseId: /Root}. Database not set, use /Root 2025-09-25T16:17:19.189697Z node 1 :KQP_EXECUTER ERROR: kqp_planner.cpp:123: TxId: 281474976748756. Ctx: { TraceId: 01k60tmctkd2jwe6pvksa1e48h, Database: , SessionId: ydb://session/3?node_id=1&id=MzYxZTRkMjctYzU1NDU5OTEtMjgyZDgxZmItNTNiNmM4Yg==, PoolId: default, DatabaseId: /Root}. Database not set, use /Root 2025-09-25T16:17:19.190415Z node 1 :KQP_EXECUTER ERROR: kqp_planner.cpp:123: TxId: 281474976748757. Ctx: { TraceId: 01k60tmctm63fn5afv7pc217tn, Database: , SessionId: ydb://session/3?node_id=1&id=ZGIxNmU0ODctM2UwNjIyYTQtZDZhYzFlODItOGVhY2RlY2U=, PoolId: default, DatabaseId: /Root}. Database not set, use /Root TClient::Ls request: /Root/Foo TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Foo" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976715658 CreateStep: 1758817016180 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 2 TablePartitionVersion: 2 } ChildrenExist: false } Table { Name: "Foo" Columns { Name: "NameHash" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "Name" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false IsBuildInProgress: false } Columns { Name: "Versio... (TRUNCATED) Table has 2 shards >> TDatabaseResolverTests::DataStreams_Serverless [GOOD] >> TDatabaseResolverTests::DataStreams_PermissionDenied >> TDatabaseResolverTests::ClickHouseNative [GOOD] >> TDatabaseResolverTests::ClickHouseHttp [GOOD] >> TDatabaseResolverTests::DataStreams_PermissionDenied [GOOD] >> test.py::test[blocks-mod_uint64_opt2--Results] [GOOD] >> test.py::test[blocks-not--ForceBlocks] >> test.py::test[window-win_by_all_aggregate--ForceBlocks] [GOOD] >> test.py::test[window-win_by_all_aggregate--Results] >> TDatabaseResolverTests::ResolveTwoDataStreamsFirstError [GOOD] >> TDatabaseResolverTests::Ydb_Serverless_Timeout >> test.py::test[window-mixed/aggregations--Results] [GOOD] >> TDatabaseResolverTests::Ydb_Serverless_Timeout [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/fq/libs/actors/ut/unittest >> TDatabaseResolverTests::Greenplum_PermissionDenied [GOOD] Test command err: 2025-09-25T16:17:22.184970Z node 2 :FQ_DATABASE_RESOLVER ERROR: database_resolver.cpp:175: TraceId: traceId ResponseProcessor::Handle(HttpIncomingResponse): error=Error while trying to resolve managed Greenplum database with id etn021us5r9rhld1vgbh via HTTP request to: endpoint 'mdb.api.cloud.yandex.net:443', url '/managed-greenplum/v1/clusters/etn021us5r9rhld1vgbh/master-hosts': you have no permission to resolve database id into database endpoint. ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/fq/libs/actors/ut/unittest >> TDatabaseResolverTests::DataStreams_PermissionDenied [GOOD] Test command err: 2025-09-25T16:17:22.730547Z node 2 :FQ_DATABASE_RESOLVER ERROR: database_resolver.cpp:175: TraceId: traceId ResponseProcessor::Handle(HttpIncomingResponse): error=Error while trying to resolve managed DataStreams database with id etn021us5r9rhld1vgbh via HTTP request to: endpoint 'ydbc.ydb.cloud.yandex.net:8789', url '/ydbc/cloud-prod/database?databaseId=etn021us5r9rhld1vgbh': you have no permission to resolve database id into database endpoint. >> test.py::test[window-win_func_order_by_udf_empty_rank--ForceBlocks] >> TDatabaseResolverTests::Ydb_Serverless [GOOD] |80.8%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/fq/libs/actors/ut/unittest >> TDatabaseResolverTests::ClickHouseHttp [GOOD] >> test.py::test[pg-tpch-q07-default.txt-ForceBlocks] [GOOD] >> test.py::test[pg-tpch-q07-default.txt-Results] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/fq/libs/actors/ut/unittest >> TDatabaseResolverTests::ResolveTwoDataStreamsFirstError [GOOD] Test command err: 2025-09-25T16:17:23.075606Z node 1 :FQ_DATABASE_RESOLVER ERROR: database_resolver.cpp:175: TraceId: traceId ResponseProcessor::Handle(HttpIncomingResponse): error=Error while trying to resolve managed DataStreams database with id etn021us5r9rhld1vgb1 via HTTP request to: endpoint 'ydbc.ydb.cloud.yandex.net:8789', url '/ydbc/cloud-prod/database?databaseId=etn021us5r9rhld1vgb1': Status: 404 Response body: {"message":"Database not found"} ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/fq/libs/actors/ut/unittest >> TDatabaseResolverTests::Ydb_Serverless_Timeout [GOOD] Test command err: 2025-09-25T16:17:23.142451Z node 1 :FQ_DATABASE_RESOLVER ERROR: database_resolver.cpp:175: TraceId: traceId ResponseProcessor::Handle(HttpIncomingResponse): error=Error while trying to resolve managed Ydb database with id etn021us5r9rhld1vgbh via HTTP request to: endpoint 'ydbc.ydb.cloud.yandex.net:8789', url '/ydbc/cloud-prod/database?databaseId=etn021us5r9rhld1vgbh': Connection timeout >> test.py::test[epochs-write_and_use_in_same_epoch--ForceBlocks] [GOOD] >> test.py::test[epochs-write_and_use_in_same_epoch--Results] |80.8%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/fq/libs/actors/ut/unittest >> TDatabaseResolverTests::Ydb_Serverless [GOOD] |80.8%| [TM] {default-linux-x86_64, pic, relwithdebinfo} ydb/library/yql/tests/sql/dq_file/part6/pytest >> test.py::test[pg-select_limit-default.txt-ForceBlocks] [GOOD] |80.8%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/sys_view/partition_stats/ut/unittest >> TBsLocalRecovery::ChaoticWriteRestartHugeDecreased [GOOD] |80.8%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/sys_view/partition_stats/ut/unittest |80.8%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/sys_view/partition_stats/ut/unittest ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/ydb/table_split_ut/unittest >> YdbTableSplit::SplitByLoadWithReads [GOOD] Test command err: 2025-09-25T16:16:55.017224Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7554061561975841760:2147];send_to=[0:7307199536658146131:7762515]; 2025-09-25T16:16:55.017250Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/endf/0051b2/r3tmp/tmpOyauYI/pdisk_1.dat 2025-09-25T16:16:55.062247Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-09-25T16:16:55.074445Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 25500, node 1 2025-09-25T16:16:55.085617Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-09-25T16:16:55.085631Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-09-25T16:16:55.085633Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-09-25T16:16:55.085674Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:21922 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-09-25T16:16:55.118199Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-09-25T16:16:55.118243Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-09-25T16:16:55.119830Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-09-25T16:16:55.133114Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-09-25T16:16:55.139820Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 TClient is connected to server localhost:21922 2025-09-25T16:16:55.291547Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-09-25T16:16:55.458482Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7554061561975842637:2322], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:16:55.458522Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:16:55.458649Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7554061561975842647:2323], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:16:55.458669Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:16:55.491906Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:16:55.523096Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7554061561975842807:2336], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:16:55.523124Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:16:55.523169Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7554061561975842809:2337], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:16:55.523179Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:16:55.526815Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_table.cpp:172) TClient::Ls request: /Root/Foo TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Foo" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976715658 CreateStep: 1758817015557 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 4 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 4 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 2 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "Foo" Columns { Name: "NameHash" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "Name" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false IsBuildInProgress: false } Columns { Name: "Versio... (TRUNCATED) Table has 1 shards TClient::Ls request: /Root/Foo TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Foo" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976715658 CreateStep: 1758817015557 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 4 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 4 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 2 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "Foo" Columns { Name: "NameHash" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "Name" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false IsBuildInProgress: false } Columns { Name: "Versio... (TRUNCATED) 2025-09-25T16:16:55.539967Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7554061561975842914:2372], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:16:55.540030Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:16:55.540064Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7554061561975842933:2384], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:16:55.540074Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7554061561975842935:2386], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:16:55.540076Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7554061561975842938:2389], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:16:55.540087Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7554061561975842939:2390], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:16:55.540099Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7554061561975842941:2391], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:16:55.540107Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource poo ... aceId: 01k60tme4s3cd6h6ks4g6ez965, Database: , SessionId: ydb://session/3?node_id=1&id=NjY1ZDY0ZDItYzg0ZGI2MjgtNGM2MmYzMmUtMTk3MWQwYWI=, PoolId: default, DatabaseId: /Root}. Database not set, use /Root 2025-09-25T16:17:20.537802Z node 1 :KQP_EXECUTER ERROR: kqp_planner.cpp:123: TxId: 281474976773427. Ctx: { TraceId: 01k60tme4sfeykf9eseqmxrha5, Database: , SessionId: ydb://session/3?node_id=1&id=NTQwN2IyMmItMjk1OTlmOTgtOGUxNTZkZTYtNjNhNDk0N2M=, PoolId: default, DatabaseId: /Root}. Database not set, use /Root 2025-09-25T16:17:20.537994Z node 1 :KQP_EXECUTER ERROR: kqp_planner.cpp:123: TxId: 281474976773429. Ctx: { TraceId: 01k60tme4s1vzv44dknmtm3r5d, Database: , SessionId: ydb://session/3?node_id=1&id=OTJjZDc5NGEtM2Q2NDRmZi1kYjBjNGZlYS1hNTRiOTY3MA==, PoolId: default, DatabaseId: /Root}. Database not set, use /Root 2025-09-25T16:17:20.538117Z node 1 :KQP_EXECUTER ERROR: kqp_planner.cpp:123: TxId: 281474976773430. Ctx: { TraceId: 01k60tme4seah09pk5905p09dg, Database: , SessionId: ydb://session/3?node_id=1&id=YTU1OGE5NTItYjU0NWQwYWQtN2JjZTE2ZjAtNzdmMmRiMGM=, PoolId: default, DatabaseId: /Root}. Database not set, use /Root 2025-09-25T16:17:20.538241Z node 1 :KQP_EXECUTER ERROR: kqp_planner.cpp:123: TxId: 281474976773431. Ctx: { TraceId: 01k60tme4sbrtdpfkwm91zyb6g, Database: , SessionId: ydb://session/3?node_id=1&id=ZDc3NWVhY2QtMmY4MjAyNWEtY2I0MmUwMWMtZDhkN2RkN2Q=, PoolId: default, DatabaseId: /Root}. Database not set, use /Root 2025-09-25T16:17:20.538686Z node 1 :KQP_EXECUTER ERROR: kqp_planner.cpp:123: TxId: 281474976773432. Ctx: { TraceId: 01k60tme4sfard5t7197js1cj2, Database: , SessionId: ydb://session/3?node_id=1&id=MTc2NDI3ZTItNDcwMGViNC03MDcwMmYyNC00Mzc0ZWMyZg==, PoolId: default, DatabaseId: /Root}. Database not set, use /Root 2025-09-25T16:17:20.538730Z node 1 :KQP_EXECUTER ERROR: kqp_planner.cpp:123: TxId: 281474976773434. Ctx: { TraceId: 01k60tme4sfb8pvs4rh24fk4j2, Database: , SessionId: ydb://session/3?node_id=1&id=N2M1ZDU3OTEtM2I3YjgzMjEtNmViMmZmNWItYmQ4NzkyYw==, PoolId: default, DatabaseId: /Root}. Database not set, use /Root 2025-09-25T16:17:20.538823Z node 1 :KQP_EXECUTER ERROR: kqp_planner.cpp:123: TxId: 281474976773433. Ctx: { TraceId: 01k60tme4saztgdqm6a89xtn60, Database: , SessionId: ydb://session/3?node_id=1&id=MmFjNzllMTItNjA3NTI4ZjUtMjIwMjE2NjctYzU3MmYyNzE=, PoolId: default, DatabaseId: /Root}. Database not set, use /Root 2025-09-25T16:17:20.539227Z node 1 :KQP_EXECUTER ERROR: kqp_planner.cpp:123: TxId: 281474976773436. Ctx: { TraceId: 01k60tme4s5nwyvv7c1sj576c7, Database: , SessionId: ydb://session/3?node_id=1&id=YmVmNWI0Yy0xZDIxMjg3Yi0yNmYwYmJiZS05YjQ0NWI4MQ==, PoolId: default, DatabaseId: /Root}. Database not set, use /Root 2025-09-25T16:17:20.539457Z node 1 :KQP_EXECUTER ERROR: kqp_planner.cpp:123: TxId: 281474976773435. Ctx: { TraceId: 01k60tme4s6ztydfkk4v8ekh8d, Database: , SessionId: ydb://session/3?node_id=1&id=ODA5ODI2NDQtZDE5ODk3YjItNzgzOTYyMGEtMzRiYWRjNGQ=, PoolId: default, DatabaseId: /Root}. Database not set, use /Root 2025-09-25T16:17:20.540847Z node 1 :KQP_EXECUTER ERROR: kqp_planner.cpp:123: TxId: 281474976773437. Ctx: { TraceId: 01k60tme4v88kqjctr48q97sdd, Database: , SessionId: ydb://session/3?node_id=1&id=NTQwN2IyMmItMjk1OTlmOTgtOGUxNTZkZTYtNjNhNDk0N2M=, PoolId: default, DatabaseId: /Root}. Database not set, use /Root 2025-09-25T16:17:20.540896Z node 1 :KQP_EXECUTER ERROR: kqp_planner.cpp:123: TxId: 281474976773438. Ctx: { TraceId: 01k60tme4ve7vzza5m58kdwkf7, Database: , SessionId: ydb://session/3?node_id=1&id=NjY1ZDY0ZDItYzg0ZGI2MjgtNGM2MmYzMmUtMTk3MWQwYWI=, PoolId: default, DatabaseId: /Root}. Database not set, use /Root 2025-09-25T16:17:20.541005Z node 1 :KQP_EXECUTER ERROR: kqp_planner.cpp:123: TxId: 281474976773439. Ctx: { TraceId: 01k60tme4v3h8959avvr7y6kej, Database: , SessionId: ydb://session/3?node_id=1&id=MmFjNzllMTItNjA3NTI4ZjUtMjIwMjE2NjctYzU3MmYyNzE=, PoolId: default, DatabaseId: /Root}. Database not set, use /Root 2025-09-25T16:17:20.541372Z node 1 :KQP_EXECUTER ERROR: kqp_planner.cpp:123: TxId: 281474976773440. Ctx: { TraceId: 01k60tme4w51z9ss84g71tzykb, Database: , SessionId: ydb://session/3?node_id=1&id=OTJjZDc5NGEtM2Q2NDRmZi1kYjBjNGZlYS1hNTRiOTY3MA==, PoolId: default, DatabaseId: /Root}. Database not set, use /Root 2025-09-25T16:17:20.541495Z node 1 :KQP_EXECUTER ERROR: kqp_planner.cpp:123: TxId: 281474976773442. Ctx: { TraceId: 01k60tme4xdkh6580mfwwdvd9g, Database: , SessionId: ydb://session/3?node_id=1&id=YTU1OGE5NTItYjU0NWQwYWQtN2JjZTE2ZjAtNzdmMmRiMGM=, PoolId: default, DatabaseId: /Root}. Database not set, use /Root 2025-09-25T16:17:20.541960Z node 1 :KQP_EXECUTER ERROR: kqp_planner.cpp:123: TxId: 281474976773441. Ctx: { TraceId: 01k60tme4w0qy9qsvpetjd3ph8, Database: , SessionId: ydb://session/3?node_id=1&id=N2M1ZDU3OTEtM2I3YjgzMjEtNmViMmZmNWItYmQ4NzkyYw==, PoolId: default, DatabaseId: /Root}. Database not set, use /Root 2025-09-25T16:17:20.542642Z node 1 :KQP_EXECUTER ERROR: kqp_planner.cpp:123: TxId: 281474976773445. Ctx: { TraceId: 01k60tme4xe2e1q7epy2ma8tsd, Database: , SessionId: ydb://session/3?node_id=1&id=ODA5ODI2NDQtZDE5ODk3YjItNzgzOTYyMGEtMzRiYWRjNGQ=, PoolId: default, DatabaseId: /Root}. Database not set, use /Root 2025-09-25T16:17:20.542712Z node 1 :KQP_EXECUTER ERROR: kqp_planner.cpp:123: TxId: 281474976773443. Ctx: { TraceId: 01k60tme4xfeg4jqkb3ceweet8, Database: , SessionId: ydb://session/3?node_id=1&id=YmVmNWI0Yy0xZDIxMjg3Yi0yNmYwYmJiZS05YjQ0NWI4MQ==, PoolId: default, DatabaseId: /Root}. Database not set, use /Root 2025-09-25T16:17:20.542859Z node 1 :KQP_EXECUTER ERROR: kqp_planner.cpp:123: TxId: 281474976773444. Ctx: { TraceId: 01k60tme4x60c07bw8jg0tayhs, Database: , SessionId: ydb://session/3?node_id=1&id=MTc2NDI3ZTItNDcwMGViNC03MDcwMmYyNC00Mzc0ZWMyZg==, PoolId: default, DatabaseId: /Root}. Database not set, use /Root 2025-09-25T16:17:20.543780Z node 1 :KQP_EXECUTER ERROR: kqp_planner.cpp:123: TxId: 281474976773449. Ctx: { TraceId: 01k60tme4yb0qnqyfebm4dpnrw, Database: , SessionId: ydb://session/3?node_id=1&id=NTQwN2IyMmItMjk1OTlmOTgtOGUxNTZkZTYtNjNhNDk0N2M=, PoolId: default, DatabaseId: /Root}. Database not set, use /Root 2025-09-25T16:17:20.543828Z node 1 :KQP_EXECUTER ERROR: kqp_planner.cpp:123: TxId: 281474976773447. Ctx: { TraceId: 01k60tme4z7g8h88da2jrhpcap, Database: , SessionId: ydb://session/3?node_id=1&id=OTJjZDc5NGEtM2Q2NDRmZi1kYjBjNGZlYS1hNTRiOTY3MA==, PoolId: default, DatabaseId: /Root}. Database not set, use /Root 2025-09-25T16:17:20.543928Z node 1 :KQP_EXECUTER ERROR: kqp_planner.cpp:123: TxId: 281474976773450. Ctx: { TraceId: 01k60tme4y85cyme4m147bh7fk, Database: , SessionId: ydb://session/3?node_id=1&id=MmFjNzllMTItNjA3NTI4ZjUtMjIwMjE2NjctYzU3MmYyNzE=, PoolId: default, DatabaseId: /Root}. Database not set, use /Root 2025-09-25T16:17:20.543955Z node 1 :KQP_EXECUTER ERROR: kqp_planner.cpp:123: TxId: 281474976773446. Ctx: { TraceId: 01k60tme4ydtg23xq8kssjb4ft, Database: , SessionId: ydb://session/3?node_id=1&id=ZDc3NWVhY2QtMmY4MjAyNWEtY2I0MmUwMWMtZDhkN2RkN2Q=, PoolId: default, DatabaseId: /Root}. Database not set, use /Root 2025-09-25T16:17:20.544055Z node 1 :KQP_EXECUTER ERROR: kqp_planner.cpp:123: TxId: 281474976773452. Ctx: { TraceId: 01k60tme4zdjgjw72sb4t9jhja, Database: , SessionId: ydb://session/3?node_id=1&id=YTU1OGE5NTItYjU0NWQwYWQtN2JjZTE2ZjAtNzdmMmRiMGM=, PoolId: default, DatabaseId: /Root}. Database not set, use /Root 2025-09-25T16:17:20.544137Z node 1 :KQP_EXECUTER ERROR: kqp_planner.cpp:123: TxId: 281474976773448. Ctx: { TraceId: 01k60tme4z1jbwq6kwt4dvqafa, Database: , SessionId: ydb://session/3?node_id=1&id=N2M1ZDU3OTEtM2I3YjgzMjEtNmViMmZmNWItYmQ4NzkyYw==, PoolId: default, DatabaseId: /Root}. Database not set, use /Root 2025-09-25T16:17:20.544230Z node 1 :KQP_EXECUTER ERROR: kqp_planner.cpp:123: TxId: 281474976773451. Ctx: { TraceId: 01k60tme4yawvq3vqjtqmqc6wm, Database: , SessionId: ydb://session/3?node_id=1&id=NjY1ZDY0ZDItYzg0ZGI2MjgtNGM2MmYzMmUtMTk3MWQwYWI=, PoolId: default, DatabaseId: /Root}. Database not set, use /Root 2025-09-25T16:17:20.545989Z node 1 :KQP_EXECUTER ERROR: kqp_planner.cpp:123: TxId: 281474976773454. Ctx: { TraceId: 01k60tme4zckt3hcw3ejve7c3d, Database: , SessionId: ydb://session/3?node_id=1&id=ODA5ODI2NDQtZDE5ODk3YjItNzgzOTYyMGEtMzRiYWRjNGQ=, PoolId: default, DatabaseId: /Root}. Database not set, use /Root 2025-09-25T16:17:20.546100Z node 1 :KQP_EXECUTER ERROR: kqp_planner.cpp:123: TxId: 281474976773453. Ctx: { TraceId: 01k60tme4zbdxpzj2bbvntvngs, Database: , SessionId: ydb://session/3?node_id=1&id=YmVmNWI0Yy0xZDIxMjg3Yi0yNmYwYmJiZS05YjQ0NWI4MQ==, PoolId: default, DatabaseId: /Root}. Database not set, use /Root 2025-09-25T16:17:20.547248Z node 1 :KQP_EXECUTER ERROR: kqp_planner.cpp:123: TxId: 281474976773455. Ctx: { TraceId: 01k60tme5205qyvdagxkekabah, Database: , SessionId: ydb://session/3?node_id=1&id=MTc2NDI3ZTItNDcwMGViNC03MDcwMmYyNC00Mzc0ZWMyZg==, PoolId: default, DatabaseId: /Root}. Database not set, use /Root 2025-09-25T16:17:20.547366Z node 1 :KQP_EXECUTER ERROR: kqp_planner.cpp:123: TxId: 281474976773456. Ctx: { TraceId: 01k60tme532beepy8snsvkxskj, Database: , SessionId: ydb://session/3?node_id=1&id=ZDc3NWVhY2QtMmY4MjAyNWEtY2I0MmUwMWMtZDhkN2RkN2Q=, PoolId: default, DatabaseId: /Root}. Database not set, use /Root TClient::Ls request: /Root/Foo TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Foo" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976715658 CreateStep: 1758817015557 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 2 TablePartitionVersion: 2 } ChildrenExist: false } Table { Name: "Foo" Columns { Name: "NameHash" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "Name" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false IsBuildInProgress: false } Columns { Name: "Versio... (TRUNCATED) 2025-09-25T16:17:20.561437Z node 1 :KQP_EXECUTER ERROR: kqp_planner.cpp:123: TxId: 281474976773457. Ctx: { TraceId: 01k60tme5d6jhtdv1dprnwsvnq, Database: , SessionId: ydb://session/3?node_id=1&id=OTJjZDc5NGEtM2Q2NDRmZi1kYjBjNGZlYS1hNTRiOTY3MA==, PoolId: default, DatabaseId: /Root}. Database not set, use /Root 2025-09-25T16:17:20.561479Z node 1 :KQP_EXECUTER ERROR: kqp_planner.cpp:123: TxId: 281474976773458. Ctx: { TraceId: 01k60tme5d024vzcjvsyarmbpv, Database: , SessionId: ydb://session/3?node_id=1&id=MmFjNzllMTItNjA3NTI4ZjUtMjIwMjE2NjctYzU3MmYyNzE=, PoolId: default, DatabaseId: /Root}. Database not set, use /Root TClient::Ls request: /Root/Foo TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Foo" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976715658 CreateStep: 1758817015557 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 2 TablePartitionVersion: 2 } ChildrenExist: false } Table { Name: "Foo" Columns { Name: "NameHash" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "Name" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false IsBuildInProgress: false } Columns { Name: "Versio... (TRUNCATED) Table has 2 shards >> TPDiskRaces::KillOwnerWhileDecommittingWithInflight [GOOD] >> TPDiskRaces::KillOwnerWhileDecommittingWithInflightMock >> test.py::test[sampling-join_right_sample-default.txt-Results] [GOOD] >> test.py::test[sampling-map-dynamic-Results] >> PartitionStats::Collector [GOOD] >> test.py::test[tpch-q13-default.txt-Results] [GOOD] >> test.py::test[tpch-q20-default.txt-Results] |80.8%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/sys_view/partition_stats/ut/unittest ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/ut_vdisk/unittest >> TBsLocalRecovery::ChaoticWriteRestartHugeDecreased [GOOD] Test command err: 2025-09-25T16:16:55.992264Z :BS_PDISK ERROR: {BPD01@blobstorage_pdisk_impl.cpp:3027} PDiskId# 1 ownerId# 3 invalid OwnerRound, got# 101 expected# 151 error in TLogWrite for ownerId# 3 ownerRound# 101 lsn# 12 PDiskId# 1 2025-09-25T16:16:57.579781Z :BS_PDISK ERROR: {BPD01@blobstorage_pdisk_impl.cpp:3027} PDiskId# 1 ownerId# 3 invalid OwnerRound, got# 101 expected# 151 error in TLogWrite for ownerId# 3 ownerRound# 101 lsn# 11 PDiskId# 1 |80.8%| [TA] $(B)/ydb/core/fq/libs/actors/ut/test-results/unittest/{meta.json ... results_accumulator.log} |80.8%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/tx/datashard/ut_external_blobs/ydb-core-tx-datashard-ut_external_blobs |80.8%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/datashard/ut_external_blobs/ydb-core-tx-datashard-ut_external_blobs |80.8%| [TA] {RESULT} $(B)/ydb/core/fq/libs/actors/ut/test-results/unittest/{meta.json ... results_accumulator.log} |80.8%| [LD] {RESULT} $(B)/ydb/core/tx/datashard/ut_external_blobs/ydb-core-tx-datashard-ut_external_blobs |80.8%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/sys_view/partition_stats/ut/unittest |80.8%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/sys_view/partition_stats/ut/unittest |80.8%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/sys_view/partition_stats/ut/unittest >> PartitionStats::CollectorOverload [GOOD] >> test.py::test[pg-tpch-q20-default.txt-Results] [GOOD] >> test.py::test[pg_catalog-lambda--ForceBlocks] |80.8%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/sys_view/partition_stats/ut/unittest >> PartitionStats::Collector [GOOD] |80.8%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/sys_view/partition_stats/ut/unittest >> TEvLocalSyncDataTests::SqueezeBlocks1 [GOOD] >> test.py::test[join-full_equal_null--ForceBlocks] [GOOD] >> test.py::test[join-full_equal_null--Results] >> TEvLocalSyncDataTests::SqueezeBlocks2 [GOOD] >> TSyncBrokerTests::ShouldProcessAfterRelease >> TSyncBrokerTests::ShouldProcessAfterRelease [GOOD] >> TSyncBrokerTests::ShouldReleaseInQueue [GOOD] >> CostMetricsPatchBlock4Plus2::TestPatch4Plus2BlockRequests10000Inflight100BlobSize1000 [GOOD] |80.8%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/vdisk/syncer/ut/unittest >> TEvLocalSyncDataTests::SqueezeBlocks2 [GOOD] |80.8%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/metadata/initializer/ut/unittest |80.8%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/sys_view/partition_stats/ut/unittest >> PartitionStats::CollectorOverload [GOOD] |80.8%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/metadata/initializer/ut/unittest ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/vdisk/syncer/ut/unittest >> TSyncBrokerTests::ShouldReleaseInQueue [GOOD] Test command err: 2025-09-25T16:17:25.732854Z node 1 :BS_SYNCER DEBUG: blobstorage_syncer_broker.cpp:64: TEvQuerySyncToken, VDisk actor id: [0:1:1], actor id: [1:5:2052], token sent, active: 1, waiting: 0 2025-09-25T16:17:25.732893Z node 1 :BS_SYNCER DEBUG: blobstorage_syncer_broker.cpp:90: TEvQuerySyncToken, VDisk actor id: [0:1:2], actor id: [1:6:2053], enqueued, active: 1, waiting: 1 2025-09-25T16:17:25.732901Z node 1 :BS_SYNCER DEBUG: blobstorage_syncer_broker.cpp:123: TEvReleaseSyncToken, VDisk actor id: [0:1:1], actor id: [1:5:2052], token released, active: 1, waiting: 1 2025-09-25T16:17:25.732907Z node 1 :BS_SYNCER DEBUG: blobstorage_syncer_broker.cpp:105: ProcessQueue(), VDisk actor id: [0:1:2], actor id: [1:6:2053], token sent, active: 0, waiting: 1 2025-09-25T16:17:25.758556Z node 2 :BS_SYNCER DEBUG: blobstorage_syncer_broker.cpp:64: TEvQuerySyncToken, VDisk actor id: [0:1:1], actor id: [2:5:2052], token sent, active: 1, waiting: 0 2025-09-25T16:17:25.758596Z node 2 :BS_SYNCER DEBUG: blobstorage_syncer_broker.cpp:90: TEvQuerySyncToken, VDisk actor id: [0:1:2], actor id: [2:6:2053], enqueued, active: 1, waiting: 1 2025-09-25T16:17:25.758605Z node 2 :BS_SYNCER DEBUG: blobstorage_syncer_broker.cpp:146: TEvReleaseSyncToken, VDisk actor id: [0:1:2], actor id: [2:6:2053], removed from queue, active: 1, waiting: 0 |80.9%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/metadata/initializer/ut/unittest |80.9%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/metadata/initializer/ut/unittest >> test.py::test[epochs-write_and_use_in_same_epoch--Results] [GOOD] >> test.py::test[join-left_semi_with_other-off-ForceBlocks] [GOOD] >> test.py::test[join-left_semi_with_other-off-Results] [SKIPPED] >> test.py::test[flatten_by-flatten_with_subquery-default.txt-ForceBlocks] >> TBtreeIndexTPartLarge::History [GOOD] >> TFlatTableLongTxLarge::LargeDeltaChain >> test.py::test[window-win_func_into_udf--ForceBlocks] [GOOD] >> test.py::test[window-win_func_into_udf--Results] |80.9%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/metadata/initializer/ut/unittest >> test.py::test[join-yql-8125--ForceBlocks] [GOOD] >> test.py::test[join-yql-8125--Results] >> YdbTableSplit::SplitByLoadWithUpdates [GOOD] |80.9%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/metadata/initializer/ut/unittest |80.9%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/metadata/initializer/ut/unittest ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/ut_blobstorage/unittest >> CostMetricsPatchBlock4Plus2::TestPatch4Plus2BlockRequests10000Inflight100BlobSize1000 [GOOD] Test command err: RandomSeed# 14473602425688416317 2025-09-25T16:15:41.630158Z 1 00h00m00.010512s :BS_LOCALRECOVERY CRIT: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) LocalRecovery FINISHED: {RecoveryDuration# INPROGRESS RecoveredLogStartLsn# 0 SuccessfulRecovery# false EmptyLogoBlobsDb# true EmptyBlocksDb# true EmptyBarriersDb# true EmptySyncLog# true EmptySyncer# false EmptyHuge# true LogRecLogoBlob# 0 LogRecBlock# 0 LogRecGC# 0 LogRecSyncLogIdx# 0 LogRecLogoBlobsDB# 0 LogRecBlocksDB# 0 LogRecBarriersDB# 0 LogRecCutLog# 0 LogRecLocalSyncData# 0 LogRecSyncerState# 0 LogRecHandoffDel# 0 LogRecHugeBlobAllocChunk# 0 LogRecHugeBlobFreeChunk# 0 LogRecHugeBlobEntryPoint# 0 LogRecHugeLogoBlob# 0 LogRecLogoBlobOpt# 0 LogRecPhantomBlob# 0 LogRecAnubisOsirisPut# 0 LogRecAddBulkSst# 0 LogoBlobFreshApply# 0 LogoBlobFreshSkip# 0 LogoBlobsBatchFreshApply# 0 LogoBlobsBatchFreshSkip#0 LogoBlobSyncLogApply# 0 LogoBlobSyncLogSkip# 0 HugeLogoBlobFreshApply# 0 HugeLogoBlobFreshSkip# 0 HugeLogoBlobSyncLogApply# 0 HugeLogoBlobSyncLogSkip# 0 BlockFreshApply# 0 BlockFreshSkip# 0 BlocksBatchFreshApply# 0 BlocksBatchFreshSkip# 0 BlockSyncLogApply# 0 BlockSyncLogSkip# 0 BarrierFreshApply# 0 BarrierFreshSkip# 0 BarriersBatchFreshApply# 0 BarriersBatchFreshSkip# 0 BarrierSyncLogApply# 0 BarrierSyncLogSkip# 0 GCBarrierFreshApply# 0 GCBarrierFreshSkip# 0 GCLogoBlobFreshApply# 0 GCLogoBlobFreshSkip# 0 GCSyncLogApply# 0 GCSyncLogSkip# 0 TryPutLogoBlobSyncData# 0 TryPutBlockSyncData# 0 TryPutBarrierSyncData# 0 HandoffDelFreshApply# 0 HandoffDelFreshSkip# 0 HugeBlobAllocChunkApply# 0 HugeBlobAllocChunkSkip# 0 HugeBlobFreeChunkApply# 0 HugeBlobFreeChunkSkip# 0 HugeLogoBlobToHeapApply# 0 HugeLogoBlobToHeapSkip# 0 HugeSlotsDelGenericApply# 0 HugeSlotsDelGenericSkip# 0 TryPutLogoBlobPhantom# 0 RecoveryLogDiapason# [18446744073709551615 0] StartingPoints# {[SyncerState 10][HugeBlobEntryPoint 1]} ReadLogReplies# {}} reason# Entry point for Syncer check failed, ErrorReason# Versions are not compatible neither by common rule nor by provided rule sets, Stored CompatibilityInfo# { Application: "ydb" Version { Year: 23 Major: 1 Minor: 19 Hotfix: 0 } } Current CompatibilityInfo# { Application: "ydb" Version { Year: 23 Major: 3 Minor: 1 Hotfix: 0 } CanLoadFrom { Application: "ydb" LowerLimit { Year: 0 Major: 0 Minor: 0 Hotfix: 0 } UpperLimit { Year: 1000 Major: 1000 Minor: 1000 Hotfix: 1000 } ComponentId: 4 } CanLoadFrom { Application: "ydb" LowerLimit { Year: 0 Major: 0 Minor: 0 Hotfix: 0 } UpperLimit { Year: 1000 Major: 1000 Minor: 1000 Hotfix: 1000 } ComponentId: 6 } StoresReadableBy { Application: "ydb" LowerLimit { Year: 0 Major: 0 Minor: 0 Hotfix: 0 } UpperLimit { Year: 1000 Major: 1000 Minor: 1000 Hotfix: 1000 } ComponentId: 4 } StoresReadableBy { Application: "ydb" LowerLimit { Year: 0 Major: 0 Minor: 0 Hotfix: 0 } UpperLimit { Year: 1000 Major: 1000 Minor: 1000 Hotfix: 1000 } ComponentId: 6 } } status# ERROR;VDISK LOCAL RECOVERY FAILURE DUE TO LOGICAL ERROR 2025-09-25T16:15:41.644222Z 1 00h00m30.000512s :BS_PROXY_PUT ERROR: [a0b23afca778983e] Result# TEvPutResult {Id# [1:1:1:1:3:4:0] Status# ERROR StatusFlags# { } ErrorReason# "TRestoreStrategy saw optimisticState# EBS_DISINTEGRATED GroupId# 2181038080 BlobId# [1:1:1:1:3:4:0] Reported ErrorReasons# [ { OrderNumber# 0 VDiskId# [82000000:1:0:0:0] NodeId# 1 ErrorReasons# [ "BS_QUEUE: VDISK_ERROR_STATE status in response", ] } ] Part situations# [ { OrderNumber# 0 Situations# E } ] " ApproximateFreeSpaceShare# 0} GroupId# 2181038080 Marker# BPP12 2025-09-25T16:17:06.436177Z 2 00h00m30.003048s :BS_PROXY_GET ERROR: [686c2c910a6f89f7] Response# TEvGetResult {Status# DEADLINE ResponseSz# 1 {[1:1:1:10:40470:1000:0] DEADLINE Size# 0 RequestedSize# 1000} ErrorReason# "status# DEADLINE from# [82000000:1:0:0:0]"} Marker# BPG29 2025-09-25T16:17:06.436250Z 2 00h00m30.003048s :BS_VDISK_PATCH ERROR: PDiskId# 1000 VDISK[82000000:_:0:1:0]: (2181038080) TEvVMovedPatch: failed on VGet; OriginalBlobId# [1:1:1:10:40470:1000:0] PatchedBlobId# [1:1:2:10:19990:1000:0] ErrorReason# Couldn't get the original blob; GetStatus# DEADLINE GetResponseStatus# DEADLINE GetErrorReason# status# DEADLINE from# [82000000:1:0:0:0] Marker# BSVSP01 2025-09-25T16:17:06.438242Z 2 00h00m30.003048s :BS_PROXY_GET ERROR: [9a59e6e1493c4e3d] Response# TEvGetResult {Status# DEADLINE ResponseSz# 1 {[1:1:2:10:19990:1000:0] DEADLINE Size# 0 RequestedSize# 1000} ErrorReason# "status# DEADLINE from# [82000000:1:0:0:0]"} Marker# BPG29 2025-09-25T16:17:06.438287Z 2 00h00m30.003048s :BS_VDISK_PATCH ERROR: PDiskId# 1000 VDISK[82000000:_:0:1:0]: (2181038080) TEvVMovedPatch: failed on VGet; OriginalBlobId# [1:1:2:10:19990:1000:0] PatchedBlobId# [1:1:3:10:19990:1000:0] ErrorReason# Couldn't get the original blob; GetStatus# DEADLINE GetResponseStatus# DEADLINE GetErrorReason# status# DEADLINE from# [82000000:1:0:0:0] Marker# BSVSP01 2025-09-25T16:17:06.439626Z 2 00h00m30.003048s :BS_PROXY_GET ERROR: [55a77a1f443beb5f] Response# TEvGetResult {Status# DEADLINE ResponseSz# 1 {[1:1:3:10:19990:1000:0] DEADLINE Size# 0 RequestedSize# 1000} ErrorReason# "status# DEADLINE from# [82000000:1:0:0:0]"} Marker# BPG29 2025-09-25T16:17:06.439661Z 2 00h00m30.003048s :BS_VDISK_PATCH ERROR: PDiskId# 1000 VDISK[82000000:_:0:1:0]: (2181038080) TEvVMovedPatch: failed on VGet; OriginalBlobId# [1:1:3:10:19990:1000:0] PatchedBlobId# [1:1:4:10:24086:1000:0] ErrorReason# Couldn't get the original blob; GetStatus# DEADLINE GetResponseStatus# DEADLINE GetErrorReason# status# DEADLINE from# [82000000:1:0:0:0] Marker# BSVSP01 2025-09-25T16:17:06.440898Z 2 00h00m30.003048s :BS_PROXY_GET ERROR: [9df093a742516ef3] Response# TEvGetResult {Status# DEADLINE ResponseSz# 1 {[1:1:4:10:24086:1000:0] DEADLINE Size# 0 RequestedSize# 1000} ErrorReason# "status# DEADLINE from# [82000000:1:0:0:0]"} Marker# BPG29 2025-09-25T16:17:06.440927Z 2 00h00m30.003048s :BS_VDISK_PATCH ERROR: PDiskId# 1000 VDISK[82000000:_:0:1:0]: (2181038080) TEvVMovedPatch: failed on VGet; OriginalBlobId# [1:1:4:10:24086:1000:0] PatchedBlobId# [1:1:5:10:122390:1000:0] ErrorReason# Couldn't get the original blob; GetStatus# DEADLINE GetResponseStatus# DEADLINE GetErrorReason# status# DEADLINE from# [82000000:1:0:0:0] Marker# BSVSP01 2025-09-25T16:17:06.442236Z 2 00h00m30.003048s :BS_PROXY_GET ERROR: [838f9af0db7d688b] Response# TEvGetResult {Status# DEADLINE ResponseSz# 1 {[1:1:5:10:122390:1000:0] DEADLINE Size# 0 RequestedSize# 1000} ErrorReason# "status# DEADLINE from# [82000000:1:0:0:0]"} Marker# BPG29 2025-09-25T16:17:06.442274Z 2 00h00m30.003048s :BS_VDISK_PATCH ERROR: PDiskId# 1000 VDISK[82000000:_:0:1:0]: (2181038080) TEvVMovedPatch: failed on VGet; OriginalBlobId# [1:1:5:10:122390:1000:0] PatchedBlobId# [1:1:6:10:3606:1000:0] ErrorReason# Couldn't get the original blob; GetStatus# DEADLINE GetResponseStatus# DEADLINE GetErrorReason# status# DEADLINE from# [82000000:1:0:0:0] Marker# BSVSP01 2025-09-25T16:17:06.443461Z 2 00h00m30.003048s :BS_PROXY_GET ERROR: [a9d649d41d15c84e] Response# TEvGetResult {Status# DEADLINE ResponseSz# 1 {[1:1:6:10:3606:1000:0] DEADLINE Size# 0 RequestedSize# 1000} ErrorReason# "status# DEADLINE from# [82000000:1:0:0:0]"} Marker# BPG29 2025-09-25T16:17:06.443487Z 2 00h00m30.003048s :BS_VDISK_PATCH ERROR: PDiskId# 1000 VDISK[82000000:_:0:1:0]: (2181038080) TEvVMovedPatch: failed on VGet; OriginalBlobId# [1:1:6:10:3606:1000:0] PatchedBlobId# [1:1:7:10:7702:1000:0] ErrorReason# Couldn't get the original blob; GetStatus# DEADLINE GetResponseStatus# DEADLINE GetErrorReason# status# DEADLINE from# [82000000:1:0:0:0] Marker# BSVSP01 2025-09-25T16:17:06.444254Z 2 00h00m30.003048s :BS_PROXY_GET ERROR: [9e87c9d0a2553528] Response# TEvGetResult {Status# DEADLINE ResponseSz# 1 {[1:1:7:10:7702:1000:0] DEADLINE Size# 0 RequestedSize# 1000} ErrorReason# "status# DEADLINE from# [82000000:1:0:0:0]"} Marker# BPG29 2025-09-25T16:17:06.444279Z 2 00h00m30.003048s :BS_VDISK_PATCH ERROR: PDiskId# 1000 VDISK[82000000:_:0:1:0]: (2181038080) TEvVMovedPatch: failed on VGet; OriginalBlobId# [1:1:7:10:7702:1000:0] PatchedBlobId# [1:1:8:10:56854:1000:0] ErrorReason# Couldn't get the original blob; GetStatus# DEADLINE GetResponseStatus# DEADLINE GetErrorReason# status# DEADLINE from# [82000000:1:0:0:0] Marker# BSVSP01 2025-09-25T16:17:06.445333Z 2 00h00m30.003048s :BS_PROXY_GET ERROR: [488dd310d59bfde6] Response# TEvGetResult {Status# DEADLINE ResponseSz# 1 {[1:1:8:10:56854:1000:0] DEADLINE Size# 0 RequestedSize# 1000} ErrorReason# "status# DEADLINE from# [82000000:1:0:0:0]"} Marker# BPG29 2025-09-25T16:17:06.445363Z 2 00h00m30.003048s :BS_VDISK_PATCH ERROR: PDiskId# 1000 VDISK[82000000:_:0:1:0]: (2181038080) TEvVMovedPatch: failed on VGet; OriginalBlobId# [1:1:8:10:56854:1000:0] PatchedBlobId# [1:1:9:10:11798:1000:0] ErrorReason# Couldn't get the original blob; GetStatus# DEADLINE GetResponseStatus# DEADLINE GetErrorReason# status# DEADLINE from# [82000000:1:0:0:0] Marker# BSVSP01 2025-09-25T16:17:06.446533Z 2 00h00m30.003048s :BS_PROXY_GET ERROR: [3b9bef9c8b02d39a] Response# TEvGetResult {Status# DEADLINE ResponseSz# 1 {[1:1:9:10:11798:1000:0] DEADLINE Size# 0 RequestedSize# 1000} ErrorReason# "status# DEADLINE from# [82000000:1:0:0:0]"} Marker# BPG29 2025-09-25T16:17:06.446556Z 2 00h00m30.003048s :BS_VDISK_PATCH ERROR: PDiskId# 1000 VDISK[82000000:_:0:1:0]: (2181038080) TEvVMovedPatch: failed on VGet; OriginalBlobId# [1:1:9:10:11798:1000:0] PatchedBlobId# [1:1:10:10:15894:1000:0] ErrorReason# Couldn't get the original blob; GetStatus# DEADLINE GetResponseStatus# DEADLINE GetErrorReason# status# DEADLINE from# [82000000:1:0:0:0] Marker# BSVSP01 2025-09-25T16:17:06.447668Z 2 00h00m30.003048s :BS_PROXY_GET ERROR: [3262300a4490b97a] Response# TEvGetResult {Status# DEADLINE ResponseSz# 1 {[1:1:10:10:15894:1000:0] DEADLINE Size# 0 RequestedSize# 1000} ErrorReason# "status# DEADLINE from# [82000000:1:0:0:0]"} Marker# BPG29 2025-09-25T16:17:06.447698Z 2 00h00m30.003048s :BS_VDISK_PATCH ERROR: PDiskId# 1000 VDISK[82000000:_:0:1:0]: (2181038080) TEvVMovedPatch: failed on VGet; OriginalBlobId# [1:1:10:10:15894:1000:0] PatchedBlobId# [1:1:11:10:15894:1000:0] ErrorReason# Couldn't get the original blob; GetStatus# DEADLINE GetResponseStatus# DEADLINE GetErrorReason# status# DEADLINE from# [82000000:1:0:0:0] Marker# BSVSP01 2025-09-25T16:17:06.448797Z 2 00h00m30.003048s :BS_PROXY_GET ERROR: [3f503dd7783e1916] Response# TEvGetResult {Status# DEADLINE ResponseSz# 1 {[1:1:11:10:15894:1000:0] DEADLINE Size# 0 RequestedSize# 1000} ErrorReason# "status# DEADLINE from# [82000000:1:0:0:0]"} Marker# BPG29 2025-09-25T16:17:06.448842Z 2 00h00m30.003048s :BS_VDISK_PATCH ERROR: PDiskId# 1000 VDISK[82000000:_:0:1:0]: (2181038080) TEvVMovedPatch: failed on VGet; OriginalBlobId# [1:1:11:10:15894:1000:0] PatchedBlobId# [1:1:12:10:19990:1000:0] ErrorReason# Couldn't get the original blob; GetStatus# DEADLINE GetResponseStatus# DEADLINE GetErrorReason# status# DEADLINE from# [82000000:1:0:0:0] Marker# BSVSP01 2025-09-25T16:17:06.449731Z 2 00h00m30.003048s :BS_PROXY_GET ERROR: [10cbc9f89cda063e] Response# TEvGetResult {Status# DEADLINE ResponseSz# 1 {[1:1:12:10:19990:1000:0] DEADLINE Size# 0 RequestedSize# 1000} ErrorReason# "status# DEADLINE from# [82000000:1:0:0:0]"} Marker# BPG29 2025-09-25T16:17:06.449752Z 2 00h00m30.003048s :BS_VDISK_PATCH ERROR: PDiskId# 1000 VDISK[82000000:_:0:1:0]: (2181038080) TEvVMovedPatch: failed on VGet; OriginalBlobId# [1:1:12:10:19990:1000:0] PatchedBlobId# [1:1:13:10:93718:1000:0] ErrorReason# Couldn't get the original blob; GetStatus# DEADLINE GetResponseStatus# DEADLINE GetErrorReason# status# DEADLINE from# [82000000:1:0:0:0] Marker# BSVSP01 2025-09-25T16:17:06.450725Z 2 00h00m30.003048s :BS_PROXY_GET ERROR: [5bc3ccbd4cc814ef] Response# TEvGetResult {Status# DEADLINE ResponseSz# 1 {[1:1:13:10:93718:1000 ... EADLINE from# [82000000:1:0:0:0]"} Marker# BPG29 2025-09-25T16:17:23.283856Z 4 00h00m30.003048s :BS_PROXY_GET ERROR: [7ee0f8a745357d37] Response# TEvGetResult {Status# DEADLINE ResponseSz# 1 {[1:1:73:10:24171:1000:0] DEADLINE Size# 0 RequestedSize# 1000} ErrorReason# "status# DEADLINE from# [82000000:1:0:1:0]"} Marker# BPG29 2025-09-25T16:17:23.284079Z 1 00h00m30.003048s :BS_VDISK_PATCH ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) TEvVMovedPatch: failed on VGet; OriginalBlobId# [1:1:92:10:48695:1000:0] PatchedBlobId# [1:1:93:10:3639:1000:0] ErrorReason# Couldn't get the original blob; GetStatus# DEADLINE GetResponseStatus# DEADLINE GetErrorReason# status# DEADLINE from# [82000000:1:0:0:0] Marker# BSVSP01 2025-09-25T16:17:23.284577Z 4 00h00m30.003048s :BS_VDISK_PATCH ERROR: PDiskId# 1000 VDISK[82000000:_:0:3:0]: (2181038080) TEvVMovedPatch: failed on VGet; OriginalBlobId# [1:1:73:10:24171:1000:0] PatchedBlobId# [1:1:74:10:3691:1000:0] ErrorReason# Couldn't get the original blob; GetStatus# DEADLINE GetResponseStatus# DEADLINE GetErrorReason# status# DEADLINE from# [82000000:1:0:1:0] Marker# BSVSP01 2025-09-25T16:17:23.287810Z 7 00h00m30.003048s :BS_PROXY_GET ERROR: [0f683175fe6b270c] Response# TEvGetResult {Status# DEADLINE ResponseSz# 1 {[1:1:93:10:3687:1000:0] DEADLINE Size# 0 RequestedSize# 1000} ErrorReason# "status# DEADLINE from# [82000000:1:0:0:0]"} Marker# BPG29 2025-09-25T16:17:23.288260Z 7 00h00m30.003048s :BS_VDISK_PATCH ERROR: PDiskId# 1000 VDISK[82000000:_:0:6:0]: (2181038080) TEvVMovedPatch: failed on VGet; OriginalBlobId# [1:1:93:10:3687:1000:0] PatchedBlobId# [1:1:94:10:28263:1000:0] ErrorReason# Couldn't get the original blob; GetStatus# DEADLINE GetResponseStatus# DEADLINE GetErrorReason# status# DEADLINE from# [82000000:1:0:0:0] Marker# BSVSP01 2025-09-25T16:17:23.289830Z 6 00h00m30.003048s :BS_PROXY_GET ERROR: [b87375002aa379d9] Response# TEvGetResult {Status# DEADLINE ResponseSz# 1 {[1:1:101:10:167474:1000:0] DEADLINE Size# 0 RequestedSize# 1000} ErrorReason# "status# DEADLINE from# [82000000:1:0:0:0]"} Marker# BPG29 2025-09-25T16:17:23.290425Z 1 00h00m30.003048s :BS_PROXY_GET ERROR: [05f101f5445b0033] Response# TEvGetResult {Status# DEADLINE ResponseSz# 1 {[1:1:93:10:3704:1000:0] DEADLINE Size# 0 RequestedSize# 1000} ErrorReason# "status# DEADLINE from# [82000000:1:0:0:0]"} Marker# BPG29 2025-09-25T16:17:23.290796Z 6 00h00m30.003048s :BS_VDISK_PATCH ERROR: PDiskId# 1000 VDISK[82000000:_:0:5:0]: (2181038080) TEvVMovedPatch: failed on VGet; OriginalBlobId# [1:1:101:10:167474:1000:0] PatchedBlobId# [1:1:102:10:24114:1000:0] ErrorReason# Couldn't get the original blob; GetStatus# DEADLINE GetResponseStatus# DEADLINE GetErrorReason# status# DEADLINE from# [82000000:1:0:0:0] Marker# BSVSP01 2025-09-25T16:17:23.291144Z 1 00h00m30.003048s :BS_VDISK_PATCH ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) TEvVMovedPatch: failed on VGet; OriginalBlobId# [1:1:93:10:3704:1000:0] PatchedBlobId# [1:1:94:10:3704:1000:0] ErrorReason# Couldn't get the original blob; GetStatus# DEADLINE GetResponseStatus# DEADLINE GetErrorReason# status# DEADLINE from# [82000000:1:0:0:0] Marker# BSVSP01 2025-09-25T16:17:23.291928Z 4 00h00m30.003048s :BS_PROXY_GET ERROR: [2e4fac57059fb35e] Response# TEvGetResult {Status# DEADLINE ResponseSz# 1 {[1:1:73:10:3635:1000:0] DEADLINE Size# 0 RequestedSize# 1000} ErrorReason# "status# DEADLINE from# [82000000:1:0:1:0]"} Marker# BPG29 2025-09-25T16:17:23.292428Z 4 00h00m30.003048s :BS_VDISK_PATCH ERROR: PDiskId# 1000 VDISK[82000000:_:0:3:0]: (2181038080) TEvVMovedPatch: failed on VGet; OriginalBlobId# [1:1:73:10:3635:1000:0] PatchedBlobId# [1:1:74:10:77363:1000:0] ErrorReason# Couldn't get the original blob; GetStatus# DEADLINE GetResponseStatus# DEADLINE GetErrorReason# status# DEADLINE from# [82000000:1:0:1:0] Marker# BSVSP01 2025-09-25T16:17:23.294608Z 7 00h00m30.003048s :BS_PROXY_GET ERROR: [57faf68cbbe9c626] Response# TEvGetResult {Status# DEADLINE ResponseSz# 1 {[1:1:93:10:3702:1000:0] DEADLINE Size# 0 RequestedSize# 1000} ErrorReason# "status# DEADLINE from# [82000000:1:0:0:0]"} Marker# BPG29 2025-09-25T16:17:23.295008Z 7 00h00m30.003048s :BS_VDISK_PATCH ERROR: PDiskId# 1000 VDISK[82000000:_:0:6:0]: (2181038080) TEvVMovedPatch: failed on VGet; OriginalBlobId# [1:1:93:10:3702:1000:0] PatchedBlobId# [1:1:94:10:7798:1000:0] ErrorReason# Couldn't get the original blob; GetStatus# DEADLINE GetResponseStatus# DEADLINE GetErrorReason# status# DEADLINE from# [82000000:1:0:0:0] Marker# BSVSP01 2025-09-25T16:17:23.295451Z 1 00h00m30.003048s :BS_PROXY_GET ERROR: [d18dda3e0f38ce9c] Response# TEvGetResult {Status# DEADLINE ResponseSz# 1 {[1:1:92:10:3648:1000:0] DEADLINE Size# 0 RequestedSize# 1000} ErrorReason# "status# DEADLINE from# [82000000:1:0:0:0]"} Marker# BPG29 2025-09-25T16:17:23.296053Z 1 00h00m30.003048s :BS_VDISK_PATCH ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) TEvVMovedPatch: failed on VGet; OriginalBlobId# [1:1:92:10:3648:1000:0] PatchedBlobId# [1:1:93:10:3648:1000:0] ErrorReason# Couldn't get the original blob; GetStatus# DEADLINE GetResponseStatus# DEADLINE GetErrorReason# status# DEADLINE from# [82000000:1:0:0:0] Marker# BSVSP01 2025-09-25T16:17:23.296262Z 6 00h00m30.003048s :BS_PROXY_GET ERROR: [d12fbda338fe3bcb] Response# TEvGetResult {Status# DEADLINE ResponseSz# 1 {[1:1:101:10:118319:1000:0] DEADLINE Size# 0 RequestedSize# 1000} ErrorReason# "status# DEADLINE from# [82000000:1:0:0:0]"} Marker# BPG29 2025-09-25T16:17:23.296763Z 6 00h00m30.003048s :BS_VDISK_PATCH ERROR: PDiskId# 1000 VDISK[82000000:_:0:5:0]: (2181038080) TEvVMovedPatch: failed on VGet; OriginalBlobId# [1:1:101:10:118319:1000:0] PatchedBlobId# [1:1:102:10:24111:1000:0] ErrorReason# Couldn't get the original blob; GetStatus# DEADLINE GetResponseStatus# DEADLINE GetErrorReason# status# DEADLINE from# [82000000:1:0:0:0] Marker# BSVSP01 2025-09-25T16:17:23.297870Z 4 00h00m30.003048s :BS_PROXY_GET ERROR: [b6a844bcbd0b2d67] Response# TEvGetResult {Status# DEADLINE ResponseSz# 1 {[1:1:73:10:24174:1000:0] DEADLINE Size# 0 RequestedSize# 1000} ErrorReason# "status# DEADLINE from# [82000000:1:0:1:0]"} Marker# BPG29 2025-09-25T16:17:23.298575Z 4 00h00m30.003048s :BS_VDISK_PATCH ERROR: PDiskId# 1000 VDISK[82000000:_:0:3:0]: (2181038080) TEvVMovedPatch: failed on VGet; OriginalBlobId# [1:1:73:10:24174:1000:0] PatchedBlobId# [1:1:74:10:3694:1000:0] ErrorReason# Couldn't get the original blob; GetStatus# DEADLINE GetResponseStatus# DEADLINE GetErrorReason# status# DEADLINE from# [82000000:1:0:1:0] Marker# BSVSP01 2025-09-25T16:17:23.301130Z 1 00h00m30.003048s :BS_PROXY_GET ERROR: [4b07896acc2af2dc] Response# TEvGetResult {Status# DEADLINE ResponseSz# 1 {[1:1:91:10:24196:1000:0] DEADLINE Size# 0 RequestedSize# 1000} ErrorReason# "status# DEADLINE from# [82000000:1:0:0:0]"} Marker# BPG29 2025-09-25T16:17:23.301312Z 7 00h00m30.003048s :BS_PROXY_GET ERROR: [2e75a243c610c994] Response# TEvGetResult {Status# DEADLINE ResponseSz# 1 {[1:1:94:10:7801:1000:0] DEADLINE Size# 0 RequestedSize# 1000} ErrorReason# "status# DEADLINE from# [82000000:1:0:0:0]"} Marker# BPG29 2025-09-25T16:17:23.301660Z 1 00h00m30.003048s :BS_VDISK_PATCH ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) TEvVMovedPatch: failed on VGet; OriginalBlobId# [1:1:91:10:24196:1000:0] PatchedBlobId# [1:1:92:10:48772:1000:0] ErrorReason# Couldn't get the original blob; GetStatus# DEADLINE GetResponseStatus# DEADLINE GetErrorReason# status# DEADLINE from# [82000000:1:0:0:0] Marker# BSVSP01 2025-09-25T16:17:23.301735Z 7 00h00m30.003048s :BS_VDISK_PATCH ERROR: PDiskId# 1000 VDISK[82000000:_:0:6:0]: (2181038080) TEvVMovedPatch: failed on VGet; OriginalBlobId# [1:1:94:10:7801:1000:0] PatchedBlobId# [1:1:95:10:81529:1000:0] ErrorReason# Couldn't get the original blob; GetStatus# DEADLINE GetResponseStatus# DEADLINE GetErrorReason# status# DEADLINE from# [82000000:1:0:0:0] Marker# BSVSP01 2025-09-25T16:17:23.303356Z 6 00h00m30.003048s :BS_PROXY_GET ERROR: [85e215d069a612f6] Response# TEvGetResult {Status# DEADLINE ResponseSz# 1 {[1:1:101:10:93820:1000:0] DEADLINE Size# 0 RequestedSize# 1000} ErrorReason# "status# DEADLINE from# [82000000:1:0:0:0]"} Marker# BPG29 2025-09-25T16:17:23.303968Z 6 00h00m30.003048s :BS_VDISK_PATCH ERROR: PDiskId# 1000 VDISK[82000000:_:0:5:0]: (2181038080) TEvVMovedPatch: failed on VGet; OriginalBlobId# [1:1:101:10:93820:1000:0] PatchedBlobId# [1:1:102:10:24188:1000:0] ErrorReason# Couldn't get the original blob; GetStatus# DEADLINE GetResponseStatus# DEADLINE GetErrorReason# status# DEADLINE from# [82000000:1:0:0:0] Marker# BSVSP01 2025-09-25T16:17:23.305454Z 4 00h00m30.003048s :BS_PROXY_GET ERROR: [8d3d3d51bfd87ce9] Response# TEvGetResult {Status# DEADLINE ResponseSz# 1 {[1:1:73:10:3712:1000:0] DEADLINE Size# 0 RequestedSize# 1000} ErrorReason# "status# DEADLINE from# [82000000:1:0:1:0]"} Marker# BPG29 2025-09-25T16:17:23.305999Z 4 00h00m30.003048s :BS_VDISK_PATCH ERROR: PDiskId# 1000 VDISK[82000000:_:0:3:0]: (2181038080) TEvVMovedPatch: failed on VGet; OriginalBlobId# [1:1:73:10:3712:1000:0] PatchedBlobId# [1:1:74:10:52864:1000:0] ErrorReason# Couldn't get the original blob; GetStatus# DEADLINE GetResponseStatus# DEADLINE GetErrorReason# status# DEADLINE from# [82000000:1:0:1:0] Marker# BSVSP01 2025-09-25T16:17:23.307259Z 1 00h00m30.003048s :BS_PROXY_GET ERROR: [29f5e1a878a5c141] Response# TEvGetResult {Status# DEADLINE ResponseSz# 1 {[1:1:92:10:24181:1000:0] DEADLINE Size# 0 RequestedSize# 1000} ErrorReason# "status# DEADLINE from# [82000000:1:0:0:0]"} Marker# BPG29 2025-09-25T16:17:23.307956Z 1 00h00m30.003048s :BS_VDISK_PATCH ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) TEvVMovedPatch: failed on VGet; OriginalBlobId# [1:1:92:10:24181:1000:0] PatchedBlobId# [1:1:93:10:3701:1000:0] ErrorReason# Couldn't get the original blob; GetStatus# DEADLINE GetResponseStatus# DEADLINE GetErrorReason# status# DEADLINE from# [82000000:1:0:0:0] Marker# BSVSP01 2025-09-25T16:17:23.311380Z 4 00h00m30.003048s :BS_PROXY_GET ERROR: [f753aab5b52e5c34] Response# TEvGetResult {Status# DEADLINE ResponseSz# 1 {[1:1:73:10:48753:1000:0] DEADLINE Size# 0 RequestedSize# 1000} ErrorReason# "status# DEADLINE from# [82000000:1:0:1:0]"} Marker# BPG29 2025-09-25T16:17:23.311825Z 4 00h00m30.003048s :BS_VDISK_PATCH ERROR: PDiskId# 1000 VDISK[82000000:_:0:3:0]: (2181038080) TEvVMovedPatch: failed on VGet; OriginalBlobId# [1:1:73:10:48753:1000:0] PatchedBlobId# [1:1:74:10:3697:1000:0] ErrorReason# Couldn't get the original blob; GetStatus# DEADLINE GetResponseStatus# DEADLINE GetErrorReason# status# DEADLINE from# [82000000:1:0:1:0] Marker# BSVSP01 2025-09-25T16:17:23.314893Z 4 00h00m30.003048s :BS_PROXY_GET ERROR: [5cc7c13968ce5a95] Response# TEvGetResult {Status# DEADLINE ResponseSz# 1 {[1:1:73:10:122487:1000:0] DEADLINE Size# 0 RequestedSize# 1000} ErrorReason# "status# DEADLINE from# [82000000:1:0:1:0]"} Marker# BPG29 2025-09-25T16:17:23.315103Z 4 00h00m30.003048s :BS_VDISK_PATCH ERROR: PDiskId# 1000 VDISK[82000000:_:0:3:0]: (2181038080) TEvVMovedPatch: failed on VGet; OriginalBlobId# [1:1:73:10:122487:1000:0] PatchedBlobId# [1:1:74:10:3703:1000:0] ErrorReason# Couldn't get the original blob; GetStatus# DEADLINE GetResponseStatus# DEADLINE GetErrorReason# status# DEADLINE from# [82000000:1:0:1:0] Marker# BSVSP01 2025-09-25T16:17:23.317480Z 4 00h00m30.003048s :BS_PROXY_GET ERROR: [bb894498fabb2fd5] Response# TEvGetResult {Status# DEADLINE ResponseSz# 1 {[1:1:73:10:3706:1000:0] DEADLINE Size# 0 RequestedSize# 1000} ErrorReason# "status# DEADLINE from# [82000000:1:0:1:0]"} Marker# BPG29 2025-09-25T16:17:23.317648Z 4 00h00m30.003048s :BS_VDISK_PATCH ERROR: PDiskId# 1000 VDISK[82000000:_:0:3:0]: (2181038080) TEvVMovedPatch: failed on VGet; OriginalBlobId# [1:1:73:10:3706:1000:0] PatchedBlobId# [1:1:74:10:3706:1000:0] ErrorReason# Couldn't get the original blob; GetStatus# DEADLINE GetResponseStatus# DEADLINE GetErrorReason# status# DEADLINE from# [82000000:1:0:1:0] Marker# BSVSP01 |80.9%| [TA] $(B)/ydb/core/sys_view/partition_stats/ut/test-results/unittest/{meta.json ... results_accumulator.log} |80.9%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/metadata/initializer/ut/unittest >> test.py::test[schema-diffrerent_schemas--ForceBlocks] [GOOD] >> test.py::test[blocks-date_group_by--ForceBlocks] [GOOD] >> test.py::test[blocks-date_group_by--Results] >> test.py::test[schema-diffrerent_schemas--Results] |80.9%| [TA] $(B)/ydb/core/blobstorage/vdisk/syncer/ut/test-results/unittest/{meta.json ... results_accumulator.log} |80.9%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/metadata/initializer/ut/unittest >> test.py::test[blocks-not--ForceBlocks] [GOOD] |80.9%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/metadata/initializer/ut/unittest |80.9%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/metadata/initializer/ut/unittest >> test.py::test[blocks-not--Results] >> test.py::test[tpch-q10-default.txt-Results] [GOOD] |80.9%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/metadata/initializer/ut/unittest |80.9%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/metadata/initializer/ut/unittest >> ExternalBlobsMultipleChannels::Simple |80.9%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/datashard/ut_external_blobs/unittest ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/ydb/table_split_ut/unittest >> YdbTableSplit::SplitByLoadWithUpdates [GOOD] Test command err: 2025-09-25T16:17:04.592948Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7554061599999936488:2147];send_to=[0:7307199536658146131:7762515]; 2025-09-25T16:17:04.592965Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-09-25T16:17:04.616987Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:123: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown test_client.cpp: SetPath # /home/runner/.ya/build/build_root/endf/005184/r3tmp/tmpTdRQkA/pdisk_1.dat TServer::EnableGrpc on GrpcPort 12619, node 1 2025-09-25T16:17:04.697543Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-09-25T16:17:04.697580Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-09-25T16:17:04.703473Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-09-25T16:17:04.707221Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded 2025-09-25T16:17:04.707741Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-09-25T16:17:04.707925Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-09-25T16:17:04.707927Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-09-25T16:17:04.707929Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-09-25T16:17:04.707983Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:9503 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-09-25T16:17:04.740762Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-09-25T16:17:04.748002Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 TClient is connected to server localhost:9503 2025-09-25T16:17:05.069323Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7554061604294904663:2322], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:17:05.069349Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:17:05.069506Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7554061604294904673:2323], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:17:05.069514Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:17:05.111326Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:17:05.147408Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7554061604294904833:2336], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:17:05.147434Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:17:05.147529Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7554061604294904835:2337], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:17:05.147541Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:17:05.151831Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_table.cpp:172) TClient::Ls request: /Root/Foo TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Foo" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976715658 CreateStep: 1758817025182 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 4 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 4 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 2 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "Foo" Columns { Name: "NameHash" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "Name" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false IsBuildInProgress: false } Columns { Name: "Versio... (TRUNCATED) Table has 1 shards TClient::Ls request: /Root/Foo TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Foo" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976715658 CreateStep: 1758817025182 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 4 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 4 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 2 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "Foo" Columns { Name: "NameHash" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "Name" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false IsBuildInProgress: false } Columns { Name: "Versio... (TRUNCATED) 2025-09-25T16:17:05.173522Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7554061604294904944:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:17:05.173600Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7554061604294904975:2394], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:17:05.173599Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:17:05.173614Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7554061604294904976:2395], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:17:05.173618Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7554061604294904977:2396], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:17:05.173627Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7554061604294904978:2397], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:17:05.173634Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7554061604294904979:2398], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:17:05.173634Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7554061604294904986:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: { > ExternalBlobsMultipleChannels::SingleChannel >> test.py::test[join-lookupjoin_semi_subq--ForceBlocks] [GOOD] >> test.py::test[join-lookupjoin_semi_subq--Results] >> test.py::test[insert-select_subquery--ForceBlocks] [GOOD] >> test.py::test[insert-select_subquery--Results] |80.9%| [TM] {default-linux-x86_64, pic, relwithdebinfo} ydb/library/yql/tests/sql/dq_file/part3/pytest >> test.py::test[join-left_semi_with_other-off-Results] [SKIPPED] >> ExternalBlobsMultipleChannels::ExtBlobsMultipleColumns |80.9%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/datashard/ut_external_blobs/unittest >> test.py::test[join-full_equal_null--Results] [GOOD] >> test.py::test[join-inner_trivial_from_concat--ForceBlocks] >> test.py::test[aggr_factory-bottom_by-default.txt-Results] [GOOD] >> test.py::test[aggr_factory-corellation-default.txt-Results] >> ExternalBlobsMultipleChannels::WithNewColumnFamilyAndCompaction |80.9%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/datashard/ut_external_blobs/unittest >> test.py::test[join-alias_where_group--ForceBlocks] [GOOD] >> test.py::test[join-alias_where_group--Results] >> ExternalBlobsMultipleChannels::WithCompaction >> test.py::test[binding-table_regexp_strict_binding--ForceBlocks] [GOOD] >> test.py::test[binding-table_regexp_strict_binding--Results] |80.9%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/datashard/ut_external_blobs/unittest >> test.py::test[schema-diffrerent_schemas--Results] [GOOD] >> test.py::test[schema-row_spec_with_default_values--ForceBlocks] >> ExternalBlobsMultipleChannels::ChangeExternalCount >> test.py::test[window-win_func_into_udf--Results] [GOOD] >> test.py::test[window-yql-14479-default.txt-ForceBlocks] >> test.py::test[blocks-not--Results] [GOOD] >> test.py::test[blocks-top_sort_two_mix--ForceBlocks] |80.9%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/metadata/initializer/ut/unittest |80.9%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/metadata/initializer/ut/unittest >> test.py::test[sampling-map-dynamic-Results] [GOOD] >> test.py::test[schema-fake_column-default.txt-Results] >> test.py::test[pg-tpch-q07-default.txt-Results] [GOOD] >> test.py::test[pg-tpch-q10-default.txt-ForceBlocks] |80.9%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/metadata/initializer/ut/unittest |80.9%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/metadata/initializer/ut/unittest |80.9%| [TM] {default-linux-x86_64, pic, relwithdebinfo} ydb/library/yql/tests/sql/dq_file/part3/pytest >> test.py::test[tpch-q10-default.txt-Results] [GOOD] |80.9%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/metadata/initializer/ut/unittest |80.9%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/cms/ut/ydb-core-cms-ut |80.9%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/tx/schemeshard/ut_streaming_query_reboots/core-tx-schemeshard-ut_streaming_query_reboots |80.9%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/cms/ut/ydb-core-cms-ut |80.9%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_streaming_query_reboots/core-tx-schemeshard-ut_streaming_query_reboots |80.9%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/metadata/initializer/ut/unittest >> test.py::test[window-win_by_all_aggregate--Results] [GOOD] |80.9%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/metadata/initializer/ut/unittest |80.9%| [TA] {RESULT} $(B)/ydb/core/blobstorage/vdisk/syncer/ut/test-results/unittest/{meta.json ... results_accumulator.log} |80.9%| [LD] {RESULT} $(B)/ydb/core/cms/ut/ydb-core-cms-ut >> test.py::test[window-win_func_aggr_hist--ForceBlocks] |80.9%| [TA] {RESULT} $(B)/ydb/core/sys_view/partition_stats/ut/test-results/unittest/{meta.json ... results_accumulator.log} |80.9%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_streaming_query_reboots/core-tx-schemeshard-ut_streaming_query_reboots |80.9%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/metadata/initializer/ut/unittest |80.9%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/kqp/ut/pg/ydb-core-kqp-ut-pg >> test.py::test[blocks-date_group_by--Results] [GOOD] >> test.py::test[blocks-date_sub_scalar--ForceBlocks] |80.9%| [LD] {RESULT} $(B)/ydb/core/kqp/ut/pg/ydb-core-kqp-ut-pg |80.9%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/kqp/ut/pg/ydb-core-kqp-ut-pg >> test.py::test[window-win_func_order_by_udf_empty_rank--ForceBlocks] [GOOD] >> test.py::test[window-win_func_order_by_udf_empty_rank--Results] |80.9%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/metadata/initializer/ut/unittest >> ExternalBlobsMultipleChannels::ChangeExternalCount [GOOD] >> ExternalBlobsMultipleChannels::Simple [GOOD] >> test.py::test[insert-select_subquery--Results] [GOOD] >> test.py::test[insert-select_with_sort_limit-default.txt-ForceBlocks] >> ExternalBlobsMultipleChannels::SingleChannel [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/datashard/ut_external_blobs/unittest >> ExternalBlobsMultipleChannels::ChangeExternalCount [GOOD] Test command err: 2025-09-25T16:17:30.070468Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:123: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2025-09-25T16:17:30.104886Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2025-09-25T16:17:30.107483Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:311:2354], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-09-25T16:17:30.107587Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-09-25T16:17:30.107614Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/endf/001ac2/r3tmp/tmpfgMzUv/pdisk_1.dat 2025-09-25T16:17:30.174868Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-09-25T16:17:30.174911Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-09-25T16:17:30.189092Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded 2025-09-25T16:17:30.190161Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1229: Notification cookie mismatch for subscription [1:34:2081] 1758817049592304 != 1758817049592308 2025-09-25T16:17:30.223008Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-09-25T16:17:30.274346Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2025-09-25T16:17:30.307957Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-09-25T16:17:30.396212Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:17:30.633797Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterTable, opId: 100:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_table.cpp:172) 2025-09-25T16:17:30.734603Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-09-25T16:17:30.881670Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterTable, opId: 101:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_table.cpp:172) ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/datashard/ut_external_blobs/unittest >> ExternalBlobsMultipleChannels::Simple [GOOD] Test command err: 2025-09-25T16:17:28.792302Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:123: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2025-09-25T16:17:28.830801Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2025-09-25T16:17:28.833339Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:311:2354], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-09-25T16:17:28.833423Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-09-25T16:17:28.833448Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/endf/001c85/r3tmp/tmpPeIDuI/pdisk_1.dat 2025-09-25T16:17:28.904414Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-09-25T16:17:28.904459Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-09-25T16:17:28.919131Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded 2025-09-25T16:17:28.920233Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1229: Notification cookie mismatch for subscription [1:34:2081] 1758817048311119 != 1758817048311123 2025-09-25T16:17:28.954915Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-09-25T16:17:29.010702Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2025-09-25T16:17:29.045492Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-09-25T16:17:29.139769Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:17:29.347376Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:748:2616], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:17:29.347416Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:758:2621], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:17:29.347518Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:17:29.347688Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:764:2626], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:17:29.347801Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:17:29.348998Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-09-25T16:17:29.399544Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-09-25T16:17:29.495148Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:762:2624], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2025-09-25T16:17:29.536331Z node 1 :TX_PROXY ERROR: schemereq.cpp:590: Actor# [1:834:2665] txid# 281474976715659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/datashard/ut_external_blobs/unittest >> ExternalBlobsMultipleChannels::SingleChannel [GOOD] Test command err: 2025-09-25T16:17:28.880250Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:123: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2025-09-25T16:17:28.919044Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2025-09-25T16:17:28.928125Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:311:2354], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-09-25T16:17:28.928230Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-09-25T16:17:28.928262Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/endf/001cb9/r3tmp/tmpnTb5Df/pdisk_1.dat 2025-09-25T16:17:29.014615Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-09-25T16:17:29.014660Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-09-25T16:17:29.027946Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded 2025-09-25T16:17:29.028999Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1229: Notification cookie mismatch for subscription [1:34:2081] 1758817048334038 != 1758817048334042 2025-09-25T16:17:29.060406Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-09-25T16:17:29.120959Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2025-09-25T16:17:29.165618Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-09-25T16:17:29.241424Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:17:29.461275Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:746:2615], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:17:29.461312Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:756:2620], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:17:29.461392Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:17:29.461558Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:762:2625], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:17:29.461666Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:17:29.462674Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-09-25T16:17:29.505723Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-09-25T16:17:29.603394Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:760:2623], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2025-09-25T16:17:29.650880Z node 1 :TX_PROXY ERROR: schemereq.cpp:590: Actor# [1:832:2664] txid# 281474976715659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } >> ExternalBlobsMultipleChannels::ExtBlobsMultipleColumns [GOOD] >> test.py::test[pg_catalog-lambda--ForceBlocks] [GOOD] >> test.py::test[pg_catalog-lambda--Results] >> test.py::test[join-lookupjoin_semi_subq--Results] [GOOD] >> test.py::test[join-mergejoin_big_primary-off-ForceBlocks] >> test.py::test[binding-table_regexp_strict_binding--Results] [GOOD] >> test.py::test[blocks-add_int8--ForceBlocks] |80.9%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/metadata/initializer/ut/unittest |80.9%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/tx/schemeshard/ut_external_table_reboots/ydb-core-tx-schemeshard-ut_external_table_reboots >> ExternalBlobsMultipleChannels::WithNewColumnFamilyAndCompaction [GOOD] >> test.py::test[flatten_by-flatten_with_subquery-default.txt-ForceBlocks] [GOOD] >> test.py::test[flatten_by-flatten_with_subquery-default.txt-Results] >> TBalanceCoverageBuilderTest::TestSplitWithPartialMergeOne [GOOD] |80.9%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_external_table_reboots/ydb-core-tx-schemeshard-ut_external_table_reboots |80.9%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_external_table_reboots/ydb-core-tx-schemeshard-ut_external_table_reboots |80.9%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/metadata/initializer/ut/unittest >> test.py::test[join-yql-8125--Results] [GOOD] >> test.py::test[key_filter-pushdown_keyextract_passthrough-default.txt-ForceBlocks] |80.9%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/metadata/initializer/ut/unittest |80.9%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/metadata/initializer/ut/unittest |80.9%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/balance_coverage/ut/unittest >> TBalanceCoverageBuilderTest::TestSplitWithPartialMergeOne [GOOD] |80.9%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/metadata/initializer/ut/unittest |80.9%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/metadata/initializer/ut/unittest ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/datashard/ut_external_blobs/unittest >> ExternalBlobsMultipleChannels::WithNewColumnFamilyAndCompaction [GOOD] Test command err: 2025-09-25T16:17:29.501344Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:123: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2025-09-25T16:17:29.535056Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2025-09-25T16:17:29.537729Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:311:2354], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-09-25T16:17:29.537802Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-09-25T16:17:29.537824Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/endf/001b78/r3tmp/tmpwyuR8c/pdisk_1.dat 2025-09-25T16:17:29.608022Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-09-25T16:17:29.608055Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-09-25T16:17:29.619693Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded 2025-09-25T16:17:29.620466Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1229: Notification cookie mismatch for subscription [1:34:2081] 1758817049087935 != 1758817049087939 2025-09-25T16:17:29.651774Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-09-25T16:17:29.706100Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2025-09-25T16:17:29.753293Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-09-25T16:17:29.828259Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:17:30.073198Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterTable, opId: 100:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_table.cpp:172) 2025-09-25T16:17:30.163660Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-09-25T16:17:30.304367Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:816:2662], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:17:30.304401Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:826:2667], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:17:30.304413Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:17:30.304624Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:831:2671], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:17:30.304643Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:17:30.305739Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-09-25T16:17:30.434635Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:830:2670], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2025-09-25T16:17:30.478006Z node 1 :TX_PROXY ERROR: schemereq.cpp:590: Actor# [1:888:2709] txid# 281474976715659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } |80.9%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/metadata/initializer/ut/unittest |80.9%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/kqp/ut/federated_query/generic_ut/ydb-core-kqp-ut-federated_query-generic_ut |81.0%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/kqp/ut/federated_query/generic_ut/ydb-core-kqp-ut-federated_query-generic_ut ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/datashard/ut_external_blobs/unittest >> ExternalBlobsMultipleChannels::ExtBlobsMultipleColumns [GOOD] Test command err: 2025-09-25T16:17:29.351640Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:123: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2025-09-25T16:17:29.396088Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2025-09-25T16:17:29.399274Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:311:2354], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-09-25T16:17:29.399398Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-09-25T16:17:29.399428Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/endf/001c07/r3tmp/tmpv4MqCD/pdisk_1.dat 2025-09-25T16:17:29.469189Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-09-25T16:17:29.469230Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-09-25T16:17:29.481524Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded 2025-09-25T16:17:29.482645Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1229: Notification cookie mismatch for subscription [1:34:2081] 1758817048880784 != 1758817048880788 2025-09-25T16:17:29.513855Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-09-25T16:17:29.574041Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2025-09-25T16:17:29.608812Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-09-25T16:17:29.705358Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:17:29.917006Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:748:2616], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:17:29.917047Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:758:2621], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:17:29.917119Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:17:29.917297Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:764:2626], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:17:29.917398Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:17:29.918313Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-09-25T16:17:29.961017Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-09-25T16:17:30.057221Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:762:2624], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2025-09-25T16:17:30.098220Z node 1 :TX_PROXY ERROR: schemereq.cpp:590: Actor# [1:834:2665] txid# 281474976715659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } |81.0%| [LD] {RESULT} $(B)/ydb/core/kqp/ut/federated_query/generic_ut/ydb-core-kqp-ut-federated_query-generic_ut |81.0%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/services/ydb/ut/ydb-services-ydb-ut |81.0%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/metadata/initializer/ut/unittest |81.0%| [LD] {RESULT} $(B)/ydb/services/ydb/ut/ydb-services-ydb-ut |81.0%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/library/ycloud/impl/ut/ydb-library-ycloud-impl-ut |81.0%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/services/ydb/ut/ydb-services-ydb-ut |81.0%| [LD] {RESULT} $(B)/ydb/library/ycloud/impl/ut/ydb-library-ycloud-impl-ut |81.0%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/metadata/initializer/ut/unittest |81.0%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/library/ycloud/impl/ut/ydb-library-ycloud-impl-ut >> ExternalBlobsMultipleChannels::WithCompaction [GOOD] >> test.py::test[join-alias_where_group--Results] [GOOD] >> TMaintenanceApiTest::SingleCompositeActionGroup >> TCmsTest::DynamicConfig >> TDowntimeTest::SetIgnoredDowntimeGap [GOOD] >> TMaintenanceApiTest::CompositeActionGroupSameStorageGroup |81.0%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/metadata/initializer/ut/unittest >> test.py::test[join-anyjoin_merge_nodup--ForceBlocks] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/datashard/ut_external_blobs/unittest >> ExternalBlobsMultipleChannels::WithCompaction [GOOD] Test command err: 2025-09-25T16:17:29.632142Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:123: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2025-09-25T16:17:29.668248Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2025-09-25T16:17:29.671865Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:311:2354], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-09-25T16:17:29.671963Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-09-25T16:17:29.671990Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/endf/001af4/r3tmp/tmpndCTgD/pdisk_1.dat 2025-09-25T16:17:29.756408Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-09-25T16:17:29.756444Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-09-25T16:17:29.767755Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded 2025-09-25T16:17:29.768590Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1229: Notification cookie mismatch for subscription [1:34:2081] 1758817049110015 != 1758817049110019 2025-09-25T16:17:29.799676Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-09-25T16:17:29.853254Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2025-09-25T16:17:29.899539Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-09-25T16:17:29.973021Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:17:30.179235Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:748:2616], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:17:30.179274Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:758:2621], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:17:30.179384Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:17:30.179558Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:764:2626], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:17:30.179723Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:17:30.180710Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-09-25T16:17:30.225450Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-09-25T16:17:30.324060Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:762:2624], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2025-09-25T16:17:30.367402Z node 1 :TX_PROXY ERROR: schemereq.cpp:590: Actor# [1:834:2665] txid# 281474976715659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-09-25T16:17:33.021969Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterTable, opId: 100:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_table.cpp:172) >> test.py::test[flatten_by-flatten_with_subquery-default.txt-Results] [GOOD] >> test.py::test[hor_join-runtime_dep-default.txt-ForceBlocks] |81.0%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/metadata/initializer/ut/unittest |81.0%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_streaming_query_reboots/unittest |81.0%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_streaming_query_reboots/unittest >> KqpPg::TypeCoercionInsert-useSink >> test.py::test[pg_catalog-lambda--Results] [GOOD] >> test.py::test[produce-process_row_and_columns-default.txt-ForceBlocks] >> KqpPg::NoTableQuery+useSink >> TCmsTest::DynamicConfig [GOOD] >> TCmsTest::DisabledEvictVDisks >> test.py::test[window-win_func_order_by_udf_empty_rank--Results] [GOOD] >> test.py::test[window-win_with_cur_row--ForceBlocks] >> KqpPg::InsertFromSelect_Simple+useSink >> test.py::test[aggregate-aggrs_no_grouping_via_map_compact-default.txt-Results] [GOOD] >> test.py::test[aggregate-group_by_cube_duo--Results] >> KqpPg::InsertNoTargetColumns_Simple+useSink |81.0%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_streaming_query_reboots/unittest >> KqpPg::EmptyQuery+useSink >> TExternalTableTestReboots::DropReplacedExternalTableWithReboots |81.0%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_external_table_reboots/unittest >> test.py::test[join-inner_trivial_from_concat--ForceBlocks] [GOOD] >> TExternalTableTestReboots::CreateDroppedExternalTableAndDropWithReboots >> KqpPg::NoTableQuery+useSink [GOOD] >> KqpPg::NoTableQuery-useSink >> test.py::test[schema-row_spec_with_default_values--ForceBlocks] [GOOD] >> test.py::test[schema-row_spec_with_default_values--Results] >> test.py::test[join-inner_trivial_from_concat--Results] >> KqpPg::InsertNoTargetColumns_Simple+useSink [GOOD] >> KqpPg::InsertNoTargetColumns_Simple-useSink >> KqpPg::EmptyQuery+useSink [GOOD] >> KqpPg::EmptyQuery-useSink |81.0%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_external_table_reboots/unittest >> TMaintenanceApiTest::CompositeActionGroupSameStorageGroup [GOOD] >> TMaintenanceApiTest::ActionReason |81.0%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_external_table_reboots/unittest >> TExternalTableTestReboots::CreateDroppedExternalTableWithReboots >> test.py::test[schema-fake_column-default.txt-Results] [GOOD] |81.0%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_external_table_reboots/unittest |81.0%| [TA] $(B)/ydb/core/tx/datashard/ut_external_blobs/test-results/unittest/{meta.json ... results_accumulator.log} >> TYardTest::TestLogWriteCutEqualRandomWait [GOOD] >> TYardTest::TestLogWriteCutUnequal >> KqpPg::NoTableQuery-useSink [GOOD] >> KqpPg::PgCreateTable >> KqpPg::EmptyQuery-useSink [GOOD] >> KqpPg::DuplicatedColumns+useSink >> KqpPg::InsertNoTargetColumns_Simple-useSink [GOOD] >> KqpPg::InsertNoTargetColumns_Serial-useSink >> GenericFederatedQuery::IcebergHadoopTokenSelectAll |81.0%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_external_table_reboots/unittest |81.0%| [TA] {RESULT} $(B)/ydb/core/tx/datashard/ut_external_blobs/test-results/unittest/{meta.json ... results_accumulator.log} >> test.py::test[blocks-top_sort_two_mix--ForceBlocks] [GOOD] >> test.py::test[blocks-top_sort_two_mix--Results] |81.0%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/tx/datashard/ut_change_collector/ydb-core-tx-datashard-ut_change_collector |81.0%| [LD] {RESULT} $(B)/ydb/core/tx/datashard/ut_change_collector/ydb-core-tx-datashard-ut_change_collector >> test.py::test[schema-row_spec_with_default_values--Results] [GOOD] |81.0%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/datashard/ut_change_collector/ydb-core-tx-datashard-ut_change_collector >> TGRpcYdbTest::RemoveNotExistedDirectory >> test.py::test[in-in_noansi_join--Results] [GOOD] >> test.py::test[in-in_with_list_dict-default.txt-ForceBlocks] >> KqpPg::DuplicatedColumns+useSink [GOOD] >> test.py::test[join-mergejoin_big_primary-off-ForceBlocks] [GOOD] >> KqpPg::DuplicatedColumns-useSink >> test.py::test[join-mergejoin_big_primary-off-Results] [SKIPPED] >> test.py::test[join-mergejoin_force_align1--ForceBlocks] [SKIPPED] >> test.py::test[join-mergejoin_force_align1--Results] >> test.py::test[window-yql-14479-default.txt-ForceBlocks] [GOOD] >> test.py::test[window-yql-14479-default.txt-Results] >> KqpPg::InsertNoTargetColumns_Serial-useSink [GOOD] >> KqpPg::InsertValuesFromTableWithDefault+useSink |81.0%| [TM] {default-linux-x86_64, pic, relwithdebinfo} ydb/library/yql/tests/sql/hybrid_file/part1/pytest >> test.py::test[schema-fake_column-default.txt-Results] [GOOD] >> test.py::test[join-mergejoin_force_align1--Results] [SKIPPED] >> test.py::test[join-mergejoin_with_reverse_key_order-off-ForceBlocks] >> TMaintenanceApiTest::SingleCompositeActionGroup [GOOD] >> TMaintenanceApiTest::SimplifiedMirror3DC >> test.py::test[join-inner_trivial_from_concat--Results] [GOOD] >> test.py::test[blocks-add_int8--ForceBlocks] [GOOD] >> TMaintenanceApiTest::ActionReason [GOOD] >> TMaintenanceApiTest::CreateTime >> test.py::test[join-inner_trivial_from_concat-off-ForceBlocks] >> test.py::test[blocks-add_int8--Results] >> test.py::test[insert-select_with_sort_limit-default.txt-ForceBlocks] [GOOD] >> test.py::test[insert-select_with_sort_limit-default.txt-Results] >> test.py::test[key_filter-pushdown_keyextract_passthrough-default.txt-ForceBlocks] [GOOD] >> test.py::test[key_filter-pushdown_keyextract_passthrough-default.txt-Results] |81.0%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/kqp/ut/scheme/kqp_scheme_ut.cpp >> TGRpcYdbTest::RemoveNotExistedDirectory [GOOD] >> TGRpcYdbTest::SdkUuid >> TMaintenanceApiTest::SimplifiedMirror3DC [GOOD] >> TMaintenanceApiTest::TestDrainAction >> TAccessServiceTest::Authenticate >> KqpPg::DuplicatedColumns-useSink [GOOD] >> KqpPg::InsertFromSelect_NoReorder+useSink >> test.py::test[aggr_factory-corellation-default.txt-Results] [GOOD] >> test.py::test[aggr_factory-count_if-default.txt-Results] >> KqpPg::InsertValuesFromTableWithDefault+useSink [GOOD] >> KqpPg::InsertValuesFromTableWithDefault-useSink |81.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/ut/scheme/kqp_scheme_ut.cpp >> FolderServiceTest::TFolderServiceAdapter >> test.py::test[blocks-top_sort_two_mix--Results] [GOOD] >> TAccessServiceTest::Authenticate [GOOD] >> test.py::test[window-win_func_aggr_hist--ForceBlocks] [GOOD] >> test.py::test[window-win_func_aggr_hist--Results] >> KqpPg::InsertFromSelect_NoReorder+useSink [GOOD] >> KqpPg::DropTablePg >> test.py::test[blocks-add_int8--Results] [GOOD] >> test.py::test[blocks-block_input--ForceBlocks] >> KqpPg::InsertFromSelect_Simple+useSink [GOOD] >> KqpPg::InsertFromSelect_Simple-useSink >> test.py::test[produce-process_row_and_columns-default.txt-ForceBlocks] [GOOD] >> test.py::test[produce-process_row_and_columns-default.txt-Results] >> FolderServiceTest::TFolderServiceAdapter [GOOD] >> test.py::test[blocks-block_input--ForceBlocks] [SKIPPED] >> test.py::test[blocks-block_input--Results] >> TYardTest::TestLogWriteCutUnequal [GOOD] >> TYardTest::TestLogMultipleWriteRead ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/library/ycloud/impl/ut/unittest >> TAccessServiceTest::Authenticate [GOOD] Test command err: 2025-09-25T16:17:38.864008Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7554061745357840405:2257];send_to=[0:7307199536658146131:7762515]; 2025-09-25T16:17:38.864051Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/endf/0056bf/r3tmp/tmpf7GWkU/pdisk_1.dat 2025-09-25T16:17:38.912886Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-09-25T16:17:38.924206Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded 2025-09-25T16:17:38.929059Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1229: Notification cookie mismatch for subscription [1:7554061745357840167:2081] 1758817058861280 != 1758817058861283 TClient is connected to server localhost:28352 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-09-25T16:17:38.955460Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-09-25T16:17:38.957181Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-09-25T16:17:38.966096Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-09-25T16:17:38.966134Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-09-25T16:17:38.967193Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-09-25T16:17:39.017080Z node 1 :GRPC_CLIENT DEBUG: grpc_service_client.h:83: [131c3fabb820] Connect to grpc://localhost:11902 2025-09-25T16:17:39.017516Z node 1 :GRPC_CLIENT DEBUG: grpc_service_client.h:122: [131c3fabb820] Request AuthenticateRequest { iam_token: "**** (047D44F1)" } 2025-09-25T16:17:39.026810Z node 1 :GRPC_CLIENT DEBUG: grpc_service_client.h:111: [131c3fabb820] Status 7 Permission Denied 2025-09-25T16:17:39.027085Z node 1 :GRPC_CLIENT DEBUG: grpc_service_client.h:122: [131c3fabb820] Request AuthenticateRequest { iam_token: "**** (342498C1)" } 2025-09-25T16:17:39.027844Z node 1 :GRPC_CLIENT DEBUG: grpc_service_client.h:109: [131c3fabb820] Response AuthenticateResponse { subject { user_account { id: "1234" } } } >> KqpPg::InsertValuesFromTableWithDefault-useSink [GOOD] >> KqpPg::InsertValuesFromTableWithDefaultAndCast+useSink >> test.py::test[blocks-block_input--Results] [SKIPPED] >> test.py::test[blocks-combine_all_avg--ForceBlocks] >> TMaintenanceApiTest::CreateTime [GOOD] |81.0%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/library/ycloud/impl/ut/unittest >> TGRpcYdbTest::SdkUuid [GOOD] >> TGRpcYdbTest::SdkUuidViaParams >> GenericFederatedQuery::IcebergHadoopTokenSelectAll [GOOD] >> GenericFederatedQuery::IcebergHadoopTokenSelectConstant |81.0%| [TM] {default-linux-x86_64, pic, relwithdebinfo} ydb/library/yql/tests/sql/dq_file/part2/pytest >> test.py::test[schema-row_spec_with_default_values--Results] [GOOD] >> TYardTest::TestLogMultipleWriteRead [GOOD] >> TYardTest::TestLogContinuityPersistence >> test.py::test[insert-select_with_sort_limit-default.txt-Results] [GOOD] >> test.py::test[insert-trivial_select-default.txt-ForceBlocks] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/library/ycloud/impl/ut/unittest >> FolderServiceTest::TFolderServiceAdapter [GOOD] Test command err: 2025-09-25T16:17:39.295057Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7554061750237852943:2140];send_to=[0:7307199536658146131:7762515]; 2025-09-25T16:17:39.295087Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-09-25T16:17:39.298922Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:123: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown test_client.cpp: SetPath # /home/runner/.ya/build/build_root/endf/0056c9/r3tmp/tmpSZwPMP/pdisk_1.dat 2025-09-25T16:17:39.329797Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded 2025-09-25T16:17:39.329920Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1229: Notification cookie mismatch for subscription [1:7554061750237852840:2081] 1758817059294210 != 1758817059294213 TClient is connected to server localhost:61293 WaitRootIsUp 'Root'... TClient::Ls request: Root 2025-09-25T16:17:39.380068Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-09-25T16:17:39.389125Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-09-25T16:17:39.399828Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-09-25T16:17:39.399858Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-09-25T16:17:39.401042Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-09-25T16:17:39.429788Z node 1 :GRPC_CLIENT DEBUG: grpc_service_client.h:83: [31f17faae720] Connect to grpc://localhost:11809 2025-09-25T16:17:39.430018Z node 1 :GRPC_CLIENT DEBUG: grpc_service_client.h:122: [31f17faae720] Request ListFoldersRequest { id: "i_am_exists" } 2025-09-25T16:17:39.440154Z node 1 :GRPC_CLIENT DEBUG: grpc_service_client.h:109: [31f17faae720] Response ListFoldersResponse { result { cloud_id: "cloud_from_old_service" } } 2025-09-25T16:17:39.440544Z node 1 :GRPC_CLIENT DEBUG: grpc_service_client.h:83: [31f17fab9720] Connect to grpc://localhost:14824 2025-09-25T16:17:39.440724Z node 1 :GRPC_CLIENT DEBUG: grpc_service_client.h:122: [31f17fab9720] Request ResolveFoldersRequest { folder_ids: "i_am_exists" } 2025-09-25T16:17:39.442808Z node 1 :GRPC_CLIENT DEBUG: grpc_service_client.h:109: [31f17fab9720] Response ResolveFoldersResponse { resolved_folders { cloud_id: "cloud_from_new_service" } } 2025-09-25T16:17:39.442996Z node 1 :GRPC_CLIENT DEBUG: grpc_service_client.h:122: [31f17fab9720] Request ResolveFoldersRequest { folder_ids: "i_am_not_exists" } 2025-09-25T16:17:39.443543Z node 1 :GRPC_CLIENT DEBUG: grpc_service_client.h:111: [31f17fab9720] Status 5 Not Found 2025-09-25T16:17:39.443683Z node 1 :GRPC_CLIENT DEBUG: grpc_service_client.h:122: [31f17faae720] Request ListFoldersRequest { id: "i_am_not_exists" } 2025-09-25T16:17:39.444317Z node 1 :GRPC_CLIENT DEBUG: grpc_service_client.h:111: [31f17faae720] Status 5 Not Found ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/cms/ut/unittest >> TMaintenanceApiTest::CreateTime [GOOD] Test command err: 2025-09-25T16:17:34.233984Z node 1 :CMS DEBUG: console__init_scheme.cpp:14: TConsole::TTxInitScheme Execute 2025-09-25T16:17:34.235244Z node 1 :CMS DEBUG: cms_impl.h:186: StateInit event type: 10060000 event: NKikimr::TEvTablet::TEvBoot 2025-09-25T16:17:34.241102Z node 1 :CMS DEBUG: console__init_scheme.cpp:23: TConsole::TTxInitScheme Complete 2025-09-25T16:17:34.241580Z node 1 :CMS DEBUG: console__load_state.cpp:28: TConsole::TTxLoadState Execute 2025-09-25T16:17:34.241655Z node 1 :CMS DEBUG: console__load_state.cpp:50: Using default config. 2025-09-25T16:17:34.241764Z node 1 :CMS DEBUG: console__load_state.cpp:66: TConsole::TTxLoadState Complete 2025-09-25T16:17:34.242229Z node 1 :CMS DEBUG: cms_impl.h:186: StateInit event type: 10060001 event: NKikimr::TEvTablet::TEvRestored 2025-09-25T16:17:34.242378Z node 1 :CMS DEBUG: cms_tx_init_scheme.cpp:16: TTxInitScheme Execute 2025-09-25T16:17:34.243243Z node 1 :CMS DEBUG: cms_impl.h:186: StateInit event type: 1006000c event: NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-09-25T16:17:34.243670Z node 1 :CMS DEBUG: cms_impl.h:186: StateInit event type: 10031c0c event: NKikimr::TEvNodeWardenStorageConfig 2025-09-25T16:17:34.243775Z node 1 :CMS DEBUG: cms_impl.h:186: StateInit event type: 104d0001 event: NKikimr::NConsole::TEvConfigsDispatcher::TEvSetConfigSubscriptionResponse 2025-09-25T16:17:34.245474Z node 1 :CMS DEBUG: cms_tx_init_scheme.cpp:24: TTxInitScheme Complete 2025-09-25T16:17:34.245504Z node 1 :CMS DEBUG: cms_tx_load_state.cpp:33: TTxLoadState Execute 2025-09-25T16:17:34.245560Z node 1 :CMS DEBUG: cms_tx_load_state.cpp:76: Using default config 2025-09-25T16:17:34.245594Z node 1 :CMS DEBUG: cms.cpp:1176: Running CleanupWalleTasks 2025-09-25T16:17:34.267011Z node 1 :CMS DEBUG: cms_impl.h:186: StateInit event type: 104a0012 event: NKikimr::NConsole::TEvConsole::TEvConfigNotificationRequest { Config { FeatureFlags { EnableCMSRequestPriorities: true EnableSingleCompositeActionGroup: true } } ItemKinds: 25 ItemKinds: 26 Local: true } 2025-09-25T16:17:34.310385Z node 1 :CMS DEBUG: cms_tx_load_state.cpp:256: TTxLoadState Complete 2025-09-25T16:17:34.310492Z node 1 :CMS DEBUG: cms_tx_update_config.cpp:23: TTxUpdateConfig Execute 2025-09-25T16:17:34.311810Z node 1 :CMS DEBUG: cms_tx_update_config.cpp:37: TTxUpdateConfig Complete 2025-09-25T16:17:34.311919Z node 1 :CMS DEBUG: sentinel.cpp:1020: [Sentinel] [Main] UpdateConfig 2025-09-25T16:17:34.311926Z node 1 :CMS DEBUG: sentinel.cpp:965: [Sentinel] [Main] Start ConfigUpdater 2025-09-25T16:17:34.311935Z node 1 :CMS DEBUG: sentinel.cpp:1036: [Sentinel] [Main] UpdateState 2025-09-25T16:17:34.311939Z node 1 :CMS INFO: sentinel.cpp:960: [Sentinel] [Main] StateUpdater was delayed 2025-09-25T16:17:34.311949Z node 1 :CMS DEBUG: sentinel.cpp:524: [Sentinel] [ConfigUpdater] Request blobstorage config: attempt# 0 2025-09-25T16:17:34.311966Z node 1 :CMS DEBUG: sentinel.cpp:537: [Sentinel] [ConfigUpdater] Request CMS cluster state: attempt# 0 2025-09-25T16:17:34.313949Z node 1 :CMS DEBUG: sentinel.cpp:599: [Sentinel] [ConfigUpdater] Handle TEvBlobStorage::TEvControllerConfigResponse: response# Status { Success: true BaseConfig { PDisk { NodeId: 1 PDiskId: 1 Path: "/1/pdisk-1.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 2 PDiskId: 2 Path: "/2/pdisk-2.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 3 PDiskId: 3 Path: "/3/pdisk-3.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 4 PDiskId: 4 Path: "/4/pdisk-4.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 5 PDiskId: 5 Path: "/5/pdisk-5.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 6 PDiskId: 6 Path: "/6/pdisk-6.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 7 PDiskId: 7 Path: "/7/pdisk-7.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 8 PDiskId: 8 Path: "/8/pdisk-8.data" Guid: 1 DriveStatus: ACTIVE } VSlot { VSlotId { NodeId: 1 PDiskId: 1 VSlotId: 1000 } GroupGeneration: 1 } VSlot { VSlotId { NodeId: 1 PDiskId: 1 VSlotId: 1001 } GroupId: 1 GroupGeneration: 1 } VSlot { VSlotId { NodeId: 1 PDiskId: 1 VSlotId: 1002 } GroupId: 2 GroupGeneration: 1 } VSlot { VSlotId { NodeId: 1 PDiskId: 1 VSlotId: 1003 } GroupId: 3 GroupGeneration: 1 } VSlot { VSlotId { NodeId: 2 PDiskId: 2 VSlotId: 1000 } GroupGeneration: 1 FailDomainIdx: 1 } VSlot { VSlotId { NodeId: 2 PDiskId: 2 VSlotId: 1001 } GroupId: 1 GroupGeneration: 1 FailDomainIdx: 1 } VSlot { VSlotId { NodeId: 2 PDiskId: 2 VSlotId: 1002 } GroupId: 2 GroupGeneration: 1 FailDomainIdx: 1 } VSlot { VSlotId { NodeId: 2 PDiskId: 2 VSlotId: 1003 } GroupId: 3 GroupGeneration: 1 FailDomainIdx: 1 } VSlot { VSlotId { NodeId: 3 PDiskId: 3 VSlotId: 1000 } GroupGeneration: 1 FailDomainIdx: 2 } VSlot { VSlotId { NodeId: 3 PDiskId: 3 VSlotId: 1001 } GroupId: 1 GroupGeneration: 1 FailDomainIdx: 2 } VSlot { VSlotId { NodeId: 3 PDiskId: 3 VSlotId: 1002 } GroupId: 2 GroupGeneration: 1 FailDomainIdx: 2 } VSlot { VSlotId { NodeId: 3 PDiskId: 3 VSlotId: 1003 } GroupId: 3 GroupGeneration: 1 FailDomainIdx: 2 } VSlot { VSlotId { NodeId: 4 PDiskId: 4 VSlotId: 1000 } GroupGeneration: 1 FailDomainIdx: 3 } VSlot { VSlotId { NodeId: 4 PDiskId: 4 VSlotId: 1001 } GroupId: 1 GroupGeneration: 1 FailDomainIdx: 3 } VSlot { VSlotId { NodeId: 4 PDiskId: 4 VSlotId: 1002 } GroupId: 2 GroupGeneration: 1 FailDomainIdx: 3 } VSlot { VSlotId { NodeId: 4 PDiskId: 4 VSlotId: 1003 } GroupId: 3 GroupGeneration: 1 FailDomainIdx: 3 } VSlot { VSlotId { NodeId: 5 PDiskId: 5 VSlotId: 1000 } GroupGeneration: 1 FailDomainIdx: 4 } VSlot { VSlotId { NodeId: 5 PDiskId: 5 VSlotId: 1001 } GroupId: 1 GroupGeneration: 1 FailDomainIdx: 4 } VSlot { VSlotId { NodeId: 5 PDiskId: 5 VSlotId: 1002 } GroupId: 2 GroupGeneration: 1 FailDomainIdx: 4 } VSlot { VSlotId { NodeId: 5 PDiskId: 5 VSlotId: 1003 } GroupId: 3 GroupGeneration: 1 FailDomainIdx: 4 } VSlot { VSlotId { NodeId: 6 PDiskId: 6 VSlotId: 1000 } GroupGeneration: 1 FailDomainIdx: 5 } VSlot { VSlotId { NodeId: 6 PDiskId: 6 VSlotId: 1001 } GroupId: 1 GroupGeneration: 1 FailDomainIdx: 5 } VSlot { VSlotId { NodeId: 6 PDiskId: 6 VSlotId: 1002 } GroupId: 2 GroupGeneration: 1 FailDomainIdx: 5 } VSlot { VSlotId { NodeId: 6 PDiskId: 6 VSlotId: 1003 } GroupId: 3 GroupGeneration: 1 FailDomainIdx: 5 } VSlot { VSlotId { NodeId: 7 PDiskId: 7 VSlotId: 1000 } GroupGeneration: 1 FailDomainIdx: 6 } VSlot { VSlotId { NodeId: 7 PDiskId: 7 VSlotId: 1001 } GroupId: 1 GroupGeneration: 1 FailDomainIdx: 6 } VSlot { VSlotId { NodeId: 7 PDiskId: 7 VSlotId: 1002 } GroupId: 2 GroupGeneration: 1 FailDomainIdx: 6 } VSlot { VSlotId { NodeId: 7 PDiskId: 7 VSlotId: 1003 } GroupId: 3 GroupGeneration: 1 FailDomainIdx: 6 } VSlot { VSlotId { NodeId: 8 PDiskId: 8 VSlotId: 1000 } GroupGeneration: 1 FailDomainIdx: 7 } VSlot { VSlotId { NodeId: 8 PDiskId: 8 VSlotId: 1001 } GroupId: 1 GroupGeneration: 1 FailDomainIdx: 7 } VSlot { VSlotId { NodeId: 8 PDiskId: 8 VSlotId: 1002 } GroupId: 2 GroupGeneration: 1 FailDomainIdx: 7 } VSlot { VSlotId { NodeId: 8 PDiskId: 8 VSlotId: 1003 } GroupId: 3 GroupGeneration: 1 FailDomainIdx: 7 } Group { GroupGeneration: 1 ErasureSpecies: "block-4-2" VSlotId { NodeId: 1 PDiskId: 1 VSlotId: 1000 } VSlotId { NodeId: 2 PDiskId: 2 VSlotId: 1000 } VSlotId { NodeId: 3 PDiskId: 3 VSlotId: 1000 } VSlotId { NodeId: 4 PDiskId: 4 VSlotId: 1000 } VSlotId { NodeId: 5 PDiskId: 5 VSlotId: 1000 } VSlotId { NodeId: 6 PDiskId: 6 VSlotId: 1000 } VSlotId { NodeId: 7 PDiskId: 7 VSlotId: 1000 } VSlotId { NodeId: 8 PDiskId: 8 VSlotId: 1000 } } Group { GroupId: 1 GroupGeneration: 1 ErasureSpecies: "block-4-2" VSlotId { NodeId: 1 PDiskId: 1 VSlotId: 1001 } VSlotId { NodeId: 2 PDiskId: 2 VSlotId: 1001 } VSlotId { NodeId: 3 PDiskId: 3 VSlotId: 1001 } VSlotId { NodeId: 4 PDiskId: 4 VSlotId: 1001 } VSlotId { NodeId: 5 PDiskId: 5 VSlotId: 1001 } VSlotId { NodeId: 6 PDiskId: 6 VSlotId: 1001 } VSlotId { NodeId: 7 PDiskId: 7 VSlotId: 1001 } VSlotId { NodeId: 8 PDiskId: 8 VSlotId: 1001 } } Group { GroupId: 2 GroupGeneration: 1 ErasureSpecies: "block-4-2" VSlotId { NodeId: 1 PDiskId: 1 VSlotId: 1002 } VSlotId { NodeId: 2 PDiskId: 2 VSlotId: 1002 } VSlotId { NodeId: 3 PDiskId: 3 VSlotId: 1002 } VSlotId { NodeId: 4 PDiskId: 4 VSlotId: 1002 } VSlotId { NodeId: 5 PDiskId: 5 VSlotId: 1002 } VSlotId { NodeId: 6 PDiskId: 6 VSlotId: 1002 } VSlotId { NodeId: 7 PDiskId: 7 VSlotId: 1002 } VSlotId { NodeId: 8 PDiskId: 8 VSlotId: 1002 } } Group { GroupId: 3 GroupGeneration: 1 ErasureSpecies: "block-4-2" VSlotId { NodeId: 1 PDiskId: 1 VSlotId: 1003 } VSlotId { NodeId: 2 PDiskId: 2 VSlotId: 1003 } VSlotId { NodeId: 3 PDiskId: 3 VSlotId: 1003 } VSlotId { NodeId: 4 PDiskId: 4 VSlotId: 1003 } VSlotId { NodeId: 5 PDiskId: 5 VSlotId: 1003 } VSlotId { NodeId: 6 PDiskId: 6 VSlotId: 1003 } VSlotId { NodeId: 7 PDiskId: 7 VSlotId: 1003 } VSlotId { NodeId: 8 PDiskId: 8 VSlotId: 1003 } } } } Success: true 2025-09-25T16:17:34.393457Z node 1 :CMS DEBUG: cms_tx_update_config.cpp:23: TTxUpdateConfig Execute 2025-09-25T16:17:34.405649Z node 1 :CMS DEBUG: cms_tx_update_config.cpp:37: TTxUpdateConfig Complete 2025-09-25T16:17:34.405718Z node 1 :CMS DEBUG: cms_tx_update_config.cpp:44: Updated config: TenantLimits { DisabledNodesRatioLimit: 0 } ClusterLimits { DisabledNodesRatioLimit: 0 } SentinelConfig { Enable: false } 2025-09-25T16:17:34.448299Z node 1 :CMS DEBUG: cms_tx_update_downtimes.cpp:17: TTxUpdateDowntimes Execute 2025-09-25T16:17:34.448355Z node 1 :CMS DEBUG: cms_tx_update_downtimes.cpp:26: TTxUpdateDowntimes Complete 2025-09-25T16:17:34.448444Z node 1 :CMS DEBUG: cluster_info.cpp:991: Timestamp: 1970-01-01T00:02:00Z 2025-09-25T16:17:34.449090Z node 1 :CMS NOTICE: audit_log.cpp:12: [AuditLog] [CMS tablet] Reply: request# NKikimr::NCms::TEvCms::TEvClusterStateRequest { }, response# NKikimr::NCms::TEvCms::TEvClusterStateResponse { Status { Code: OK } State { Hosts { Name: "::1" State: UP Services { Name: "storage" State: UP Version: "-1" Timestamp: 120110512 } Devices { Name: "vdisk-0-1-0-0-0" State: UP Timestamp: 120110512 } Devices { Name: "vdisk-1-1-0-0-0" State: UP Timestamp: 120110512 } Devices { Name: "vdisk-2-1-0-0-0" State: UP Timestamp: 120110512 } Devices { Name: "vdisk-3-1-0-0-0" State: UP Timestamp: 120110512 } Devices { Name: "pdisk-1-1" State: UP Timestamp: 120110512 } Timestamp: 120110512 NodeId: 1 InterconnectPort: 12001 Location { DataCenter: "1" Module: "1" Rack: "1" Unit: "1" } StartTimeSeconds: 0 } Hosts { Name: "::1" State: UP Services { Name: "storage" State: UP Version: "-1" Timestamp: 120110512 } Devices { Name: "vdisk-0-1-0-1-0" State: UP Timestamp: 120110512 } Devices { Name: "vdisk-1-1-0-1-0" State: UP Timestamp: 120110512 } Devices { Name: "vdisk-2-1-0-1-0" State: UP Timestamp: 120110512 } Devices { Name: "vdisk-3-1-0-1-0" State: UP Timestamp: 120110512 } Devices { Name: "pdisk-2-2" State: UP Timestamp: 120110512 } Timestamp: 120110512 NodeId: 2 InterconnectPort: 12002 Location { DataCenter: "1" Module: "2" Rack: "2" Unit: "2" } StartTimeSeconds: 0 } Hosts { Name: "::1" State: UP Services { Name: "storage" State: UP Version: "-1" Timestamp: 120110512 } Devices { Name: "vdisk-0-1-0-2-0" State: UP Timestamp: 120110512 } Devices { Name: "vdisk-1-1-0-2-0" State: UP Timestamp: 120110512 } Devices { Name: "vdisk-2-1-0-2-0" State: UP Timestamp: 120110512 } Devices { Name: "vdisk-3-1-0-2-0" State: UP Timestamp: 120110512 } Devices { Name: "pdisk-3-3" State: UP Timestamp: 120110512 } Timestamp: 120110512 NodeId: 3 InterconnectPort: 12003 Location { DataCenter: "1" Module: "3" Rack: "3" Unit: "3" } StartTimeSeconds: 0 } Hosts { Name: "::1" State: UP Services { Name: "storage" State: UP Version: "-1" Timestamp: 120110512 } Devices { Name: "vdisk-0-1-0-3-0" State: UP Timestamp: 120110512 } Devices { Name: "vdisk-1-1-0-3-0" State: UP Timestamp: 120110512 } Devices { Name: "vdisk-2-1-0-3-0" State: UP Timestamp: 120110512 } Devices { Name: "vdisk-3-1-0-3-0" State: UP Timestamp: 120110512 } Devices { Name: "pdisk-4-4" State: UP Timestamp: 120110512 } Timestamp: 120110512 NodeId: 4 InterconnectPort: 12004 Location { DataCenter: "1" Module: " ... InterconnectPort: 12002 Location { DataCenter: "1" Module: "2" Rack: "2" Unit: "2" } StartTimeSeconds: 0 } Hosts { Name: "::1" State: UP Services { Name: "storage" State: UP Version: "-1" Timestamp: 125610512 } Devices { Name: "vdisk-0-1-0-2-0" State: UP Timestamp: 125610512 } Devices { Name: "vdisk-1-1-0-2-0" State: UP Timestamp: 125610512 } Devices { Name: "vdisk-2-1-0-2-0" State: UP Timestamp: 125610512 } Devices { Name: "vdisk-3-1-0-2-0" State: UP Timestamp: 125610512 } Devices { Name: "pdisk-19-19" State: UP Timestamp: 125610512 } Timestamp: 125610512 NodeId: 19 InterconnectPort: 12003 Location { DataCenter: "1" Module: "3" Rack: "3" Unit: "3" } StartTimeSeconds: 0 } Hosts { Name: "::1" State: UP Services { Name: "storage" State: UP Version: "-1" Timestamp: 125610512 } Devices { Name: "vdisk-0-1-0-3-0" State: UP Timestamp: 125610512 } Devices { Name: "vdisk-1-1-0-3-0" State: UP Timestamp: 125610512 } Devices { Name: "vdisk-2-1-0-3-0" State: UP Timestamp: 125610512 } Devices { Name: "vdisk-3-1-0-3-0" State: UP Timestamp: 125610512 } Devices { Name: "pdisk-20-20" State: UP Timestamp: 125610512 } Timestamp: 125610512 NodeId: 20 InterconnectPort: 12004 Location { DataCenter: "1" Module: "4" Rack: "4" Unit: "4" } StartTimeSeconds: 0 } Hosts { Name: "::1" State: UP Services { Name: "storage" State: UP Version: "-1" Timestamp: 125610512 } Devices { Name: "vdisk-0-1-0-4-0" State: UP Timestamp: 125610512 } Devices { Name: "vdisk-1-1-0-4-0" State: UP Timestamp: 125610512 } Devices { Name: "vdisk-2-1-0-4-0" State: UP Timestamp: 125610512 } Devices { Name: "vdisk-3-1-0-4-0" State: UP Timestamp: 125610512 } Devices { Name: "pdisk-21-21" State: UP Timestamp: 125610512 } Timestamp: 125610512 NodeId: 21 InterconnectPort: 12005 Location { DataCenter: "1" Module: "5" Rack: "5" Unit: "5" } StartTimeSeconds: 0 } Hosts { Name: "::1" State: UP Services { Name: "storage" State: UP Version: "-1" Timestamp: 125610512 } Devices { Name: "vdisk-0-1-0-5-0" State: UP Timestamp: 125610512 } Devices { Name: "vdisk-1-1-0-5-0" State: UP Timestamp: 125610512 } Devices { Name: "vdisk-2-1-0-5-0" State: UP Timestamp: 125610512 } Devices { Name: "vdisk-3-1-0-5-0" State: UP Timestamp: 125610512 } Devices { Name: "pdisk-22-22" State: UP Timestamp: 125610512 } Timestamp: 125610512 NodeId: 22 InterconnectPort: 12006 Location { DataCenter: "1" Module: "6" Rack: "6" Unit: "6" } StartTimeSeconds: 0 } Hosts { Name: "::1" State: UP Services { Name: "storage" State: UP Version: "-1" Timestamp: 125610512 } Devices { Name: "vdisk-0-1-0-6-0" State: UP Timestamp: 125610512 } Devices { Name: "vdisk-1-1-0-6-0" State: UP Timestamp: 125610512 } Devices { Name: "vdisk-2-1-0-6-0" State: UP Timestamp: 125610512 } Devices { Name: "vdisk-3-1-0-6-0" State: UP Timestamp: 125610512 } Devices { Name: "pdisk-23-23" State: UP Timestamp: 125610512 } Timestamp: 125610512 NodeId: 23 InterconnectPort: 12007 Location { DataCenter: "1" Module: "7" Rack: "7" Unit: "7" } StartTimeSeconds: 0 } Hosts { Name: "::1" State: UP Services { Name: "storage" State: UP Version: "-1" Timestamp: 125610512 } Devices { Name: "vdisk-0-1-0-7-0" State: UP Timestamp: 125610512 } Devices { Name: "vdisk-1-1-0-7-0" State: UP Timestamp: 125610512 } Devices { Name: "vdisk-2-1-0-7-0" State: UP Timestamp: 125610512 } Devices { Name: "vdisk-3-1-0-7-0" State: UP Timestamp: 125610512 } Devices { Name: "pdisk-24-24" State: UP Timestamp: 125610512 } Timestamp: 125610512 NodeId: 24 InterconnectPort: 12008 Location { DataCenter: "1" Module: "8" Rack: "8" Unit: "8" } StartTimeSeconds: 0 } Timestamp: 125610512 } } 2025-09-25T16:17:38.277487Z node 17 :CMS INFO: cms.cpp:364: Check request: User: "test-user" Actions { Type: SHUTDOWN_HOST Host: "17" Duration: 600000000 } Actions { Type: SHUTDOWN_HOST Host: "18" Duration: 600000000 } PartialPermissionAllowed: true Schedule: true DryRun: false Reason: "" AvailabilityMode: MODE_MAX_AVAILABILITY MaintenanceTaskId: "task-1" 2025-09-25T16:17:38.277496Z node 17 :CMS DEBUG: cms.cpp:396: Checking action: Type: SHUTDOWN_HOST Host: "17" Duration: 600000000 2025-09-25T16:17:38.277507Z node 17 :CMS DEBUG: node_checkers.cpp:99: [Nodes Counter] Checking Node: 17, with state: Up, with limit: 0, with ratio limit: 0, locked nodes: 0, down nodes: 0 2025-09-25T16:17:38.277552Z node 17 :CMS DEBUG: cms.cpp:759: Ring: 0; State: Ok 2025-09-25T16:17:38.277556Z node 17 :CMS DEBUG: cms.cpp:759: Ring: 1; State: Ok 2025-09-25T16:17:38.277559Z node 17 :CMS DEBUG: cms.cpp:759: Ring: 2; State: Ok 2025-09-25T16:17:38.277563Z node 17 :CMS DEBUG: cms.cpp:404: Result: ALLOW 2025-09-25T16:17:38.277576Z node 17 :CMS DEBUG: cms.cpp:396: Checking action: Type: SHUTDOWN_HOST Host: "18" Duration: 600000000 2025-09-25T16:17:38.277581Z node 17 :CMS DEBUG: node_checkers.cpp:99: [Nodes Counter] Checking Node: 18, with state: Up, with limit: 0, with ratio limit: 0, locked nodes: 1, down nodes: 0 2025-09-25T16:17:38.277600Z node 17 :CMS DEBUG: cms.cpp:415: Result: DISALLOW_TEMP (reason: Issue in affected group with id '0': too many unavailable vdisks. Locked: Host ::1:12001 (17) has temporary lock, VDisk [0:1:0:1:0] (::1:/18/pdisk-18.data) is locked by this request. Down: ) 2025-09-25T16:17:38.277621Z node 17 :CMS DEBUG: cms.cpp:1064: Accepting permission: id# test-user-p-1, requestId# test-user-r-1, owner# test-user 2025-09-25T16:17:38.277629Z node 17 :CMS INFO: cluster_info.cpp:782: Adding lock for Host ::1:12001 (17) (permission test-user-p-1 until 1970-01-01T00:12:05Z) 2025-09-25T16:17:38.277642Z node 17 :CMS DEBUG: cms_tx_store_permissions.cpp:26: TTxStorePermissions Execute 2025-09-25T16:17:38.277694Z node 17 :CMS NOTICE: audit_log.cpp:12: [AuditLog] [CMS tablet] Store permission: id# test-user-p-1, validity# 1970-01-01T00:12:05.610512Z, action# Type: SHUTDOWN_HOST Host: "17" Duration: 600000000 2025-09-25T16:17:38.277724Z node 17 :CMS NOTICE: audit_log.cpp:12: [AuditLog] [CMS tablet] Store request: id# test-user-r-1, owner# test-user, order# 1, priority# 0, body# User: "test-user" Actions { Type: SHUTDOWN_HOST Host: "18" Duration: 600000000 Issue { Type: TOO_MANY_UNAVAILABLE_VDISKS Message: "Issue in affected group with id \'0\': too many unavailable vdisks. Locked: Host ::1:12001 (17) has temporary lock, VDisk [0:1:0:1:0] (::1:/18/pdisk-18.data) is locked by this request. Down: " } } PartialPermissionAllowed: true Schedule: true Reason: "" TenantPolicy: DEFAULT AvailabilityMode: MODE_MAX_AVAILABILITY EvictVDisks: false 2025-09-25T16:17:38.308238Z node 17 :CMS DEBUG: cms.cpp:1176: Running CleanupWalleTasks 2025-09-25T16:17:38.380370Z node 17 :CMS DEBUG: cms_tx_store_permissions.cpp:137: TTxStorePermissions complete 2025-09-25T16:17:38.380482Z node 17 :CMS NOTICE: audit_log.cpp:12: [AuditLog] [CMS tablet] Reply: request# NKikimr::NCms::TEvCms::TEvPermissionRequest { User: "test-user" Actions { Type: SHUTDOWN_HOST Host: "17" Duration: 600000000 } Actions { Type: SHUTDOWN_HOST Host: "18" Duration: 600000000 } PartialPermissionAllowed: true Schedule: true DryRun: false Reason: "" AvailabilityMode: MODE_MAX_AVAILABILITY MaintenanceTaskId: "task-1" }, response# NKikimr::NCms::TEvCms::TEvPermissionResponse { Status { Code: ALLOW_PARTIAL } RequestId: "test-user-r-1" Permissions { Id: "test-user-p-1" Action { Type: SHUTDOWN_HOST Host: "17" Duration: 600000000 } Deadline: 725610512 Extentions { Type: HostInfo Hosts { Name: "::1" State: UP NodeId: 17 InterconnectPort: 12001 } } } } 2025-09-25T16:17:38.380494Z node 17 :CMS DEBUG: cms.cpp:1092: Schedule cleanup at 1970-01-01T00:32:05.610512Z 2025-09-25T16:17:38.380709Z node 17 :CMS INFO: cms.cpp:1401: Get selected requests for test-user 2025-09-25T16:17:38.380721Z node 17 :CMS DEBUG: cms.cpp:1427: Resulting status: OK 2025-09-25T16:17:38.380757Z node 17 :CMS NOTICE: audit_log.cpp:12: [AuditLog] [CMS tablet] Reply: request# NKikimr::NCms::TEvCms::TEvManageRequestRequest { User: "test-user" Command: GET RequestId: "test-user-r-1" }, response# NKikimr::NCms::TEvCms::TEvManageRequestResponse { Status { Code: OK } Requests { RequestId: "test-user-r-1" Owner: "test-user" Actions { Type: SHUTDOWN_HOST Host: "18" Duration: 600000000 Issue { Type: TOO_MANY_UNAVAILABLE_VDISKS Message: "Issue in affected group with id \'0\': too many unavailable vdisks. Locked: Host ::1:12001 (17) has temporary lock, VDisk [0:1:0:1:0] (::1:/18/pdisk-18.data) is locked by this request. Down: " } } PartialPermissionAllowed: true Reason: "" AvailabilityMode: MODE_MAX_AVAILABILITY Priority: 0 } } 2025-09-25T16:17:38.477321Z node 17 :CMS INFO: cluster_info.cpp:782: Adding lock for Host ::1:12001 (17) (permission test-user-p-1 until 1970-01-01T00:12:05Z) 2025-09-25T16:17:38.477435Z node 17 :CMS DEBUG: cms_tx_update_downtimes.cpp:17: TTxUpdateDowntimes Execute 2025-09-25T16:17:38.477458Z node 17 :CMS DEBUG: cms_tx_update_downtimes.cpp:26: TTxUpdateDowntimes Complete 2025-09-25T16:17:38.477481Z node 17 :CMS DEBUG: cluster_info.cpp:991: Timestamp: 1970-01-01T00:02:11Z 2025-09-25T16:17:38.477647Z node 17 :CMS INFO: cms.cpp:364: Check request: User: "test-user" Actions { Type: SHUTDOWN_HOST Host: "18" Duration: 600000000 Issue { Type: TOO_MANY_UNAVAILABLE_VDISKS Message: "Issue in affected group with id \'0\': too many unavailable vdisks. Locked: Host ::1:12001 (17) has temporary lock, VDisk [0:1:0:1:0] (::1:/18/pdisk-18.data) is locked by this request. Down: " } } PartialPermissionAllowed: true Schedule: true Reason: "" TenantPolicy: DEFAULT AvailabilityMode: MODE_MAX_AVAILABILITY EvictVDisks: false 2025-09-25T16:17:38.477660Z node 17 :CMS DEBUG: cms.cpp:396: Checking action: Type: SHUTDOWN_HOST Host: "18" Duration: 600000000 Issue { Type: TOO_MANY_UNAVAILABLE_VDISKS Message: "Issue in affected group with id \'0\': too many unavailable vdisks. Locked: Host ::1:12001 (17) has temporary lock, VDisk [0:1:0:1:0] (::1:/18/pdisk-18.data) is locked by this request. Down: " } 2025-09-25T16:17:38.477671Z node 17 :CMS DEBUG: node_checkers.cpp:99: [Nodes Counter] Checking Node: 18, with state: Up, with limit: 0, with ratio limit: 0, locked nodes: 1, down nodes: 0 2025-09-25T16:17:38.477701Z node 17 :CMS DEBUG: cms.cpp:415: Result: DISALLOW_TEMP (reason: Issue in affected group with id '0': too many unavailable vdisks. Locked: Host ::1:12001 (17) has planned shutdown (permission test-user-p-1 owned by test-user), VDisk [0:1:0:1:0] (::1:/18/pdisk-18.data) is locked by this request. Down: ) 2025-09-25T16:17:38.477724Z node 17 :CMS DEBUG: cms_tx_store_permissions.cpp:26: TTxStorePermissions Execute 2025-09-25T16:17:38.477782Z node 17 :CMS NOTICE: audit_log.cpp:12: [AuditLog] [CMS tablet] Store request: id# test-user-r-1, owner# test-user, order# 1, priority# 0, body# User: "test-user" Actions { Type: SHUTDOWN_HOST Host: "18" Duration: 600000000 Issue { Type: TOO_MANY_UNAVAILABLE_VDISKS Message: "Issue in affected group with id \'0\': too many unavailable vdisks. Locked: Host ::1:12001 (17) has planned shutdown (permission test-user-p-1 owned by test-user), VDisk [0:1:0:1:0] (::1:/18/pdisk-18.data) is locked by this request. Down: " } } PartialPermissionAllowed: true Schedule: true Reason: "" TenantPolicy: DEFAULT AvailabilityMode: MODE_MAX_AVAILABILITY EvictVDisks: false 2025-09-25T16:17:38.491481Z node 17 :CMS DEBUG: cms_tx_store_permissions.cpp:137: TTxStorePermissions complete 2025-09-25T16:17:38.491578Z node 17 :CMS NOTICE: audit_log.cpp:12: [AuditLog] [CMS tablet] Reply: request# NKikimr::NCms::TEvCms::TEvCheckRequest { User: "test-user" RequestId: "test-user-r-1" AvailabilityMode: MODE_MAX_AVAILABILITY }, response# NKikimr::NCms::TEvCms::TEvPermissionResponse { Status { Code: DISALLOW_TEMP Reason: "Issue in affected group with id \'0\': too many unavailable vdisks. Locked: Host ::1:12001 (17) has planned shutdown (permission test-user-p-1 owned by test-user), VDisk [0:1:0:1:0] (::1:/18/pdisk-18.data) is locked by this request. Down: " } RequestId: "test-user-r-1" Deadline: 431212024 } |81.0%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/tx/schemeshard/ut_serverless_reboots/ydb-core-tx-schemeshard-ut_serverless_reboots >> test.py::test[window-yql-14479-default.txt-Results] [GOOD] >> TMaintenanceApiTest::TestDrainAction [GOOD] >> TMaintenanceApiTest::TestCordonAction >> test.py::test[tpch-q20-default.txt-Results] [GOOD] |81.0%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_serverless_reboots/ydb-core-tx-schemeshard-ut_serverless_reboots |81.0%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_serverless_reboots/ydb-core-tx-schemeshard-ut_serverless_reboots >> test.py::test[key_filter-pushdown_keyextract_passthrough-default.txt-Results] [GOOD] >> KqpPg::InsertValuesFromTableWithDefaultAndCast+useSink [GOOD] >> KqpPg::InsertValuesFromTableWithDefaultAndCast-useSink >> test.py::test[tpch-q21-default.txt-Results] >> test.py::test[key_filter-string_with_ff-default.txt-ForceBlocks] >> TYardTest::TestLogContinuityPersistence [GOOD] >> TYardTest::TestLogContinuityPersistenceLarge >> KqpPg::DropTablePg [GOOD] >> KqpPg::DropTablePgMultiple >> KqpPg::TypeCoercionInsert-useSink [GOOD] >> KqpPg::V1CreateTable |81.0%| [TM] {default-linux-x86_64, pic, relwithdebinfo} ydb/library/yql/tests/sql/dq_file/part3/pytest >> test.py::test[blocks-top_sort_two_mix--Results] [GOOD] >> TServiceAccountServiceTest::IssueToken [GOOD] >> TGRpcYdbTest::SdkUuidViaParams [GOOD] >> TGRpcYdbTest::ReadTable >> test.py::test[produce-process_row_and_columns-default.txt-Results] [GOOD] >> test.py::test[produce-process_rows_sorted_multi_out--ForceBlocks] >> KqpPg::InsertValuesFromTableWithDefaultAndCast-useSink [GOOD] >> KqpPg::InsertValuesFromTableWithDefaultBool+useSink >> test.py::test[hor_join-runtime_dep-default.txt-ForceBlocks] [GOOD] >> KqpPg::DropTablePgMultiple [GOOD] >> KqpPg::DropTableIfExists ------- [TM] {default-linux-x86_64, pic, relwithdebinfo} ydb/library/yql/tests/sql/dq_file/part2/pytest >> test.py::test[window-yql-14479-default.txt-Results] [GOOD] Test command err: 127.0.0.1 - - [25/Sep/2025 16:16:54] "GET /mylib.sql HTTP/1.1" 200 - >> test.py::test[window-win_with_cur_row--ForceBlocks] [GOOD] >> test.py::test[produce-process_rows_sorted_multi_out--ForceBlocks] [SKIPPED] >> test.py::test[produce-process_rows_sorted_multi_out--Results] [SKIPPED] >> test.py::test[window-win_with_cur_row--Results] |81.0%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/tx/scheme_board/ut_populator/ydb-core-tx-scheme_board-ut_populator |81.0%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/scheme_board/ut_populator/ydb-core-tx-scheme_board-ut_populator |81.0%| [LD] {RESULT} $(B)/ydb/core/tx/scheme_board/ut_populator/ydb-core-tx-scheme_board-ut_populator ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/library/ycloud/impl/ut/unittest >> TServiceAccountServiceTest::IssueToken [GOOD] Test command err: 2025-09-25T16:17:40.654116Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7554061755563768642:2141];send_to=[0:7307199536658146131:7762515]; 2025-09-25T16:17:40.654252Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/endf/0056b5/r3tmp/tmpiaATH9/pdisk_1.dat 2025-09-25T16:17:40.698800Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-09-25T16:17:40.708968Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded 2025-09-25T16:17:40.712899Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1229: Notification cookie mismatch for subscription [1:7554061755563768534:2081] 1758817060653264 != 1758817060653267 TClient is connected to server localhost:8155 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-09-25T16:17:40.757187Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-09-25T16:17:40.757221Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-09-25T16:17:40.758387Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-09-25T16:17:40.767928Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-09-25T16:17:41.253145Z node 2 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7554061760260389839:2075];send_to=[0:7307199536658146131:7762515]; 2025-09-25T16:17:41.253174Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/endf/0056b5/r3tmp/tmpBhewcl/pdisk_1.dat 2025-09-25T16:17:41.256298Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-09-25T16:17:41.280751Z node 2 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded TClient is connected to server localhost:12990 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-09-25T16:17:41.314378Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-09-25T16:17:41.316915Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-09-25T16:17:41.357109Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-09-25T16:17:41.357163Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-09-25T16:17:41.359146Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected >> TUserAccountServiceTest::Get >> GenericFederatedQuery::IcebergHadoopTokenSelectConstant [GOOD] >> GenericFederatedQuery::IcebergHadoopTokenSelectCount >> TMaintenanceApiTest::TestCordonAction [GOOD] >> TGRpcYdbTest::ReadTable [GOOD] >> TGRpcYdbTest::ReadTablePg >> KqpPg::InsertValuesFromTableWithDefaultBool+useSink [GOOD] >> KqpPg::InsertValuesFromTableWithDefaultBool-useSink >> FolderServiceTest::TFolderServiceTransitional >> test.py::test[window-win_func_aggr_hist--Results] [GOOD] >> test.py::test[window-win_func_first_last_rev--ForceBlocks] >> TYardTest::TestLogContinuityPersistenceLarge [GOOD] >> TYardTest::TestLogWriteLsnConsistency >> KqpPg::DropTableIfExists [GOOD] >> KqpPg::DropTableIfExists_GenericQuery >> FolderServiceTest::TFolderService >> TUserAccountServiceTest::Get [GOOD] >> test.py::test[in-in_with_list_dict-default.txt-ForceBlocks] [GOOD] >> test.py::test[in-in_with_list_dict-default.txt-Results] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/cms/ut/unittest >> TMaintenanceApiTest::TestCordonAction [GOOD] Test command err: 2025-09-25T16:17:34.308990Z node 1 :CMS DEBUG: cms_impl.h:186: StateInit event type: 10060000 event: NKikimr::TEvTablet::TEvBoot 2025-09-25T16:17:34.309389Z node 1 :CMS DEBUG: console__init_scheme.cpp:14: TConsole::TTxInitScheme Execute 2025-09-25T16:17:34.316229Z node 1 :CMS DEBUG: cms_impl.h:186: StateInit event type: 10060001 event: NKikimr::TEvTablet::TEvRestored 2025-09-25T16:17:34.316353Z node 1 :CMS DEBUG: cms_tx_init_scheme.cpp:16: TTxInitScheme Execute 2025-09-25T16:17:34.316818Z node 1 :CMS DEBUG: cms_impl.h:186: StateInit event type: 1006000c event: NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-09-25T16:17:34.316913Z node 1 :CMS DEBUG: cms_impl.h:186: StateInit event type: 10031c0c event: NKikimr::TEvNodeWardenStorageConfig 2025-09-25T16:17:34.317033Z node 1 :CMS DEBUG: cms_impl.h:186: StateInit event type: 104d0001 event: NKikimr::NConsole::TEvConfigsDispatcher::TEvSetConfigSubscriptionResponse 2025-09-25T16:17:34.325592Z node 1 :CMS DEBUG: console__init_scheme.cpp:23: TConsole::TTxInitScheme Complete 2025-09-25T16:17:34.325667Z node 1 :CMS DEBUG: console__load_state.cpp:28: TConsole::TTxLoadState Execute 2025-09-25T16:17:34.325733Z node 1 :CMS DEBUG: console__load_state.cpp:50: Using default config. 2025-09-25T16:17:34.325841Z node 1 :CMS DEBUG: console__load_state.cpp:66: TConsole::TTxLoadState Complete 2025-09-25T16:17:34.327600Z node 1 :CMS DEBUG: cms_tx_init_scheme.cpp:24: TTxInitScheme Complete 2025-09-25T16:17:34.327639Z node 1 :CMS DEBUG: cms_tx_load_state.cpp:33: TTxLoadState Execute 2025-09-25T16:17:34.327690Z node 1 :CMS DEBUG: cms_tx_load_state.cpp:76: Using default config 2025-09-25T16:17:34.327722Z node 1 :CMS DEBUG: cms.cpp:1176: Running CleanupWalleTasks 2025-09-25T16:17:34.352472Z node 1 :CMS DEBUG: cms_impl.h:186: StateInit event type: 104a0012 event: NKikimr::NConsole::TEvConsole::TEvConfigNotificationRequest { Config { FeatureFlags { EnableCMSRequestPriorities: true EnableSingleCompositeActionGroup: true } } ItemKinds: 25 ItemKinds: 26 Local: true } 2025-09-25T16:17:34.402638Z node 1 :CMS DEBUG: cms_tx_load_state.cpp:256: TTxLoadState Complete 2025-09-25T16:17:34.402766Z node 1 :CMS DEBUG: cms_tx_update_config.cpp:23: TTxUpdateConfig Execute 2025-09-25T16:17:34.404670Z node 1 :CMS DEBUG: cms_tx_update_config.cpp:37: TTxUpdateConfig Complete 2025-09-25T16:17:34.404844Z node 1 :CMS DEBUG: sentinel.cpp:1020: [Sentinel] [Main] UpdateConfig 2025-09-25T16:17:34.404854Z node 1 :CMS DEBUG: sentinel.cpp:965: [Sentinel] [Main] Start ConfigUpdater 2025-09-25T16:17:34.404865Z node 1 :CMS DEBUG: sentinel.cpp:1036: [Sentinel] [Main] UpdateState 2025-09-25T16:17:34.404871Z node 1 :CMS INFO: sentinel.cpp:960: [Sentinel] [Main] StateUpdater was delayed 2025-09-25T16:17:34.404884Z node 1 :CMS DEBUG: sentinel.cpp:524: [Sentinel] [ConfigUpdater] Request blobstorage config: attempt# 0 2025-09-25T16:17:34.404906Z node 1 :CMS DEBUG: sentinel.cpp:537: [Sentinel] [ConfigUpdater] Request CMS cluster state: attempt# 0 2025-09-25T16:17:34.408073Z node 1 :CMS DEBUG: sentinel.cpp:599: [Sentinel] [ConfigUpdater] Handle TEvBlobStorage::TEvControllerConfigResponse: response# Status { Success: true BaseConfig { PDisk { NodeId: 1 PDiskId: 1 Path: "/1/pdisk-1.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 2 PDiskId: 2 Path: "/2/pdisk-2.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 3 PDiskId: 3 Path: "/3/pdisk-3.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 4 PDiskId: 4 Path: "/4/pdisk-4.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 5 PDiskId: 5 Path: "/5/pdisk-5.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 6 PDiskId: 6 Path: "/6/pdisk-6.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 7 PDiskId: 7 Path: "/7/pdisk-7.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 8 PDiskId: 8 Path: "/8/pdisk-8.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 9 PDiskId: 9 Path: "/9/pdisk-9.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 10 PDiskId: 10 Path: "/10/pdisk-10.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 11 PDiskId: 11 Path: "/11/pdisk-11.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 12 PDiskId: 12 Path: "/12/pdisk-12.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 13 PDiskId: 13 Path: "/13/pdisk-13.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 14 PDiskId: 14 Path: "/14/pdisk-14.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 15 PDiskId: 15 Path: "/15/pdisk-15.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 16 PDiskId: 16 Path: "/16/pdisk-16.data" Guid: 1 DriveStatus: ACTIVE } VSlot { VSlotId { NodeId: 1 PDiskId: 1 VSlotId: 1000 } GroupGeneration: 1 } VSlot { VSlotId { NodeId: 1 PDiskId: 1 VSlotId: 1001 } GroupId: 1 GroupGeneration: 1 } VSlot { VSlotId { NodeId: 1 PDiskId: 1 VSlotId: 1002 } GroupId: 2 GroupGeneration: 1 } VSlot { VSlotId { NodeId: 1 PDiskId: 1 VSlotId: 1003 } GroupId: 3 GroupGeneration: 1 } VSlot { VSlotId { NodeId: 2 PDiskId: 2 VSlotId: 1000 } GroupGeneration: 1 FailDomainIdx: 1 } VSlot { VSlotId { NodeId: 2 PDiskId: 2 VSlotId: 1001 } GroupId: 1 GroupGeneration: 1 FailDomainIdx: 1 } VSlot { VSlotId { NodeId: 2 PDiskId: 2 VSlotId: 1002 } GroupId: 2 GroupGeneration: 1 FailDomainIdx: 1 } VSlot { VSlotId { NodeId: 2 PDiskId: 2 VSlotId: 1003 } GroupId: 3 GroupGeneration: 1 FailDomainIdx: 1 } VSlot { VSlotId { NodeId: 3 PDiskId: 3 VSlotId: 1000 } GroupGeneration: 1 FailDomainIdx: 2 } VSlot { VSlotId { NodeId: 3 PDiskId: 3 VSlotId: 1001 } GroupId: 1 GroupGeneration: 1 FailDomainIdx: 2 } VSlot { VSlotId { NodeId: 3 PDiskId: 3 VSlotId: 1002 } GroupId: 2 GroupGeneration: 1 FailDomainIdx: 2 } VSlot { VSlotId { NodeId: 3 PDiskId: 3 VSlotId: 1003 } GroupId: 3 GroupGeneration: 1 FailDomainIdx: 2 } VSlot { VSlotId { NodeId: 4 PDiskId: 4 VSlotId: 1000 } GroupGeneration: 1 FailDomainIdx: 3 } VSlot { VSlotId { NodeId: 4 PDiskId: 4 VSlotId: 1001 } GroupId: 1 GroupGeneration: 1 FailDomainIdx: 3 } VSlot { VSlotId { NodeId: 4 PDiskId: 4 VSlotId: 1002 } GroupId: 2 GroupGeneration: 1 FailDomainIdx: 3 } VSlot { VSlotId { NodeId: 4 PDiskId: 4 VSlotId: 1003 } GroupId: 3 GroupGeneration: 1 FailDomainIdx: 3 } VSlot { VSlotId { NodeId: 5 PDiskId: 5 VSlotId: 1000 } GroupGeneration: 1 FailDomainIdx: 4 } VSlot { VSlotId { NodeId: 5 PDiskId: 5 VSlotId: 1001 } GroupId: 1 GroupGeneration: 1 FailDomainIdx: 4 } VSlot { VSlotId { NodeId: 5 PDiskId: 5 VSlotId: 1002 } GroupId: 2 GroupGeneration: 1 FailDomainIdx: 4 } VSlot { VSlotId { NodeId: 5 PDiskId: 5 VSlotId: 1003 } GroupId: 3 GroupGeneration: 1 FailDomainIdx: 4 } VSlot { VSlotId { NodeId: 6 PDiskId: 6 VSlotId: 1000 } GroupGeneration: 1 FailDomainIdx: 5 } VSlot { VSlotId { NodeId: 6 PDiskId: 6 VSlotId: 1001 } GroupId: 1 GroupGeneration: 1 FailDomainIdx: 5 } VSlot { VSlotId { NodeId: 6 PDiskId: 6 VSlotId: 1002 } GroupId: 2 GroupGeneration: 1 FailDomainIdx: 5 } VSlot { VSlotId { NodeId: 6 PDiskId: 6 VSlotId: 1003 } GroupId: 3 GroupGeneration: 1 FailDomainIdx: 5 } VSlot { VSlotId { NodeId: 7 PDiskId: 7 VSlotId: 1000 } GroupGeneration: 1 FailDomainIdx: 6 } VSlot { VSlotId { NodeId: 7 PDiskId: 7 VSlotId: 1001 } GroupId: 1 GroupGeneration: 1 FailDomainIdx: 6 } VSlot { VSlotId { NodeId: 7 PDiskId: 7 VSlotId: 1002 } GroupId: 2 GroupGeneration: 1 FailDomainIdx: 6 } VSlot { VSlotId { NodeId: 7 PDiskId: 7 VSlotId: 1003 } GroupId: 3 GroupGeneration: 1 FailDomainIdx: 6 } VSlot { VSlotId { NodeId: 8 PDiskId: 8 VSlotId: 1000 } GroupGeneration: 1 FailDomainIdx: 7 } VSlot { VSlotId { NodeId: 8 PDiskId: 8 VSlotId: 1001 } GroupId: 1 GroupGeneration: 1 FailDomainIdx: 7 } VSlot { VSlotId { NodeId: 8 PDiskId: 8 VSlotId: 1002 } GroupId: 2 GroupGeneration: 1 FailDomainIdx: 7 } VSlot { VSlotId { NodeId: 8 PDiskId: 8 VSlotId: 1003 } GroupId: 3 GroupGeneration: 1 FailDomainIdx: 7 } VSlot { VSlotId { NodeId: 9 PDiskId: 9 VSlotId: 1000 } GroupId: 4 GroupGeneration: 1 } VSlot { VSlotId { NodeId: 9 PDiskId: 9 VSlotId: 1001 } GroupId: 5 GroupGeneration: 1 } VSlot { VSlotId { NodeId: 9 PDiskId: 9 VSlotId: 1002 } GroupId: 6 GroupGeneration: 1 } VSlot { VSlotId { NodeId: 9 PDiskId: 9 VSlotId: 1003 } GroupId: 7 GroupGeneration: 1 } VSlot { VSlotId { NodeId: 10 PDiskId: 10 VSlotId: 1000 } GroupId: 4 GroupGeneration: 1 FailDomainIdx: 1 } VSlot { VSlotId { NodeId: 10 PDiskId: 10 VSlotId: 1001 } GroupId: 5 GroupGeneration: 1 FailDomainIdx: 1 } VSlot { VSlotId { NodeId: 10 PDiskId: 10 VSlotId: 1002 } GroupId: 6 GroupGeneration: 1 FailDomainIdx: 1 } VSlot { VSlotId { NodeId: 10 PDiskId: 10 VSlotId: 1003 } GroupId: 7 GroupGeneration: 1 FailDomainIdx: 1 } VSlot { VSlotId { NodeId: 11 PDiskId: 11 VSlotId: 1000 } GroupId: 4 GroupGeneration: 1 FailDomainIdx: 2 } VSlot { VSlotId { NodeId: 11 PDiskId: 11 VSlotId: 1001 } GroupId: 5 GroupGeneration: 1 FailDomainIdx: 2 } VSlot { VSlotId { NodeId: 11 PDiskId: 11 VSlotId: 1002 } GroupId: 6 GroupGeneration: 1 FailDomainIdx: 2 } VSlot { VSlotId { NodeId: 11 PDiskId: 11 VSlotId: 1003 } GroupId: 7 GroupGeneration: 1 FailDomainIdx: 2 } VSlot { VSlotId { NodeId: 12 PDiskId: 12 VSlotId: 1000 } GroupId: 4 GroupGeneration: 1 FailDomainIdx: 3 } VSlot { VSlotId { NodeId: 12 PDiskId: 12 VSlotId: 1001 } GroupId: 5 GroupGeneration: 1 FailDomainIdx: 3 } VSlot { VSlotId { NodeId: 12 PDiskId: 12 VSlotId: 1002 } GroupId: 6 GroupGeneration: 1 FailDomainIdx: 3 } VSlot { VSlotId { NodeId: 12 PDiskId: 12 VSlotId: 1003 } GroupId: 7 GroupGeneration: 1 FailDomainIdx: 3 } VSlot { VSlotId { NodeId: 13 PDiskId: 13 VSlotId: 1000 } GroupId: 4 GroupGeneration: 1 FailDomainIdx: 4 } VSlot { VSlotId { NodeId: 13 PDiskId: 13 VSlotId: 1001 } GroupId: 5 GroupGeneration: 1 FailDomainIdx: 4 } VSlot { VSlotId { NodeId: 13 PDiskId: 13 VSlotId: 1002 } GroupId: 6 GroupGeneration: 1 FailDomainIdx: 4 } VSlot { VSlotId { NodeId: 13 PDiskId: 13 VSlotId: 1003 } GroupId: 7 GroupGeneration: 1 FailDomainIdx: 4 } VSlot { VSlotId { NodeId: 14 PDiskId: 14 VSlotId: 1000 } GroupId: 4 GroupGeneration: 1 FailDomainIdx: 5 } VSlot { VSlotId { NodeId: 14 PDiskId: 14 VSlotId: 1001 } GroupId: 5 GroupGeneration: 1 FailDomainIdx: 5 } VSlot { VSlotId { NodeId: 14 PDiskId: 14 VSlotId: 1002 } GroupId: 6 GroupGeneration: 1 FailDomainIdx: 5 } VSlot { VSlotId { NodeId: 14 PDiskId: 14 VSlotId: 1003 } GroupId: 7 GroupGeneration: 1 FailDomainIdx: 5 } VSlot { VSlotId { NodeId: 15 PDiskId: 15 VSlotId: 1000 } GroupId: 4 GroupGeneration: 1 FailDomainIdx: 6 } VSlot { VSlotId { NodeId: 15 PDiskId: 15 VSlotId: 1001 } GroupId: 5 GroupGeneration: 1 FailDomainIdx: 6 } VSlot { VSlotId { NodeId: 15 PDiskId: 15 VSlotId: 1002 } GroupId: 6 GroupGeneration: 1 FailDomainIdx: 6 } VSlot { VSlotId { NodeId: 15 PDiskId: 15 VSlotId: 1003 } GroupId: 7 GroupGeneration: 1 FailDomainIdx: 6 } VSlot { VSlotId { NodeId: 16 PDiskId: 16 VSlotId: 1000 } GroupId: 4 GroupGeneration: 1 FailDomainIdx: 7 } VSlot { VSlotId { NodeId: 16 PDiskId: 16 VSlotId: 1001 } GroupId: 5 GroupGeneration: 1 FailDomainIdx: 7 } VSlot { VSlotId { NodeId: 16 PDiskId: 16 VSlotId: 1002 } GroupId: 6 GroupGeneration: 1 FailDomainIdx: 7 } VSlot { VSlotId { NodeId: 16 PDiskId: 16 VSlotId: 1003 } GroupId: 7 GroupGeneration: 1 FailDomainIdx: 7 } Group { GroupGeneration: 1 ErasureSpecies: "block-4-2" VSlotId { NodeId: 1 PDiskId: 1 VSlotId: 1000 } VSlotId { NodeId: 2 PDiskId: 2 VSlotId: 1000 } VSlotId { NodeId: 3 PDiskId: 3 VSlotId: 1000 } VSlotId { NodeId: 4 PDiskId: 4 VSlotId: 1000 } VSlotId { NodeId: 5 PDiskId: 5 VSlotId: 1000 } VSlotId { NodeId: 6 PDiskId: 6 VSlotId: 1000 } VSlotId { NodeId: 7 PDiskId: 7 VSlotId: 1000 } VSlotId { NodeId: 8 PDiskId: 8 VSlotId: 1000 } } Group { GroupId: 1 GroupGeneration: 1 ErasureSpecies: "block-4-2" VSlotId { NodeId: 1 PDiskId: 1 VSlotId: 1001 } VSlotId { NodeId: 2 PDiskId: 2 VSlotId: 1001 } VSlotId { NodeId: 3 PDiskId: 3 VSlotId: 1001 } VSlotId { NodeId: 4 PDiskId: 4 VSlotId: 1001 } VSlotId { NodeId: 5 PDiskId: 5 VSlotId: 1001 } VSlotId { NodeId: 6 PDiskId: 6 VSlotId: 1001 } VSlotId { NodeId: 7 PDiskId: 7 VSlotId: 1001 } VSlotId { NodeId: 8 PDiskId: 8 VSlotId: 1001 } } Group { GroupId: 2 GroupGeneration: 1 ErasureSpecies: "block-4-2" VSlotId { NodeId: 1 PDiskId: 1 VSlotId: 100 ... VSlot { VSlotId { NodeId: 30 PDiskId: 30 VSlotId: 1001 } GroupId: 1 GroupGeneration: 1 FailDomainIdx: 2 } VSlot { VSlotId { NodeId: 30 PDiskId: 30 VSlotId: 1002 } GroupId: 2 GroupGeneration: 1 FailDomainIdx: 2 } VSlot { VSlotId { NodeId: 30 PDiskId: 30 VSlotId: 1003 } GroupId: 3 GroupGeneration: 1 FailDomainIdx: 2 } VSlot { VSlotId { NodeId: 31 PDiskId: 31 VSlotId: 1000 } GroupGeneration: 1 FailDomainIdx: 3 } VSlot { VSlotId { NodeId: 31 PDiskId: 31 VSlotId: 1001 } GroupId: 1 GroupGeneration: 1 FailDomainIdx: 3 } VSlot { VSlotId { NodeId: 31 PDiskId: 31 VSlotId: 1002 } GroupId: 2 GroupGeneration: 1 FailDomainIdx: 3 } VSlot { VSlotId { NodeId: 31 PDiskId: 31 VSlotId: 1003 } GroupId: 3 GroupGeneration: 1 FailDomainIdx: 3 } VSlot { VSlotId { NodeId: 32 PDiskId: 32 VSlotId: 1000 } GroupGeneration: 1 FailDomainIdx: 4 } VSlot { VSlotId { NodeId: 32 PDiskId: 32 VSlotId: 1001 } GroupId: 1 GroupGeneration: 1 FailDomainIdx: 4 } VSlot { VSlotId { NodeId: 32 PDiskId: 32 VSlotId: 1002 } GroupId: 2 GroupGeneration: 1 FailDomainIdx: 4 } VSlot { VSlotId { NodeId: 32 PDiskId: 32 VSlotId: 1003 } GroupId: 3 GroupGeneration: 1 FailDomainIdx: 4 } VSlot { VSlotId { NodeId: 33 PDiskId: 33 VSlotId: 1000 } GroupGeneration: 1 FailDomainIdx: 5 } VSlot { VSlotId { NodeId: 33 PDiskId: 33 VSlotId: 1001 } GroupId: 1 GroupGeneration: 1 FailDomainIdx: 5 } VSlot { VSlotId { NodeId: 33 PDiskId: 33 VSlotId: 1002 } GroupId: 2 GroupGeneration: 1 FailDomainIdx: 5 } VSlot { VSlotId { NodeId: 33 PDiskId: 33 VSlotId: 1003 } GroupId: 3 GroupGeneration: 1 FailDomainIdx: 5 } VSlot { VSlotId { NodeId: 34 PDiskId: 34 VSlotId: 1000 } GroupGeneration: 1 FailDomainIdx: 6 } VSlot { VSlotId { NodeId: 34 PDiskId: 34 VSlotId: 1001 } GroupId: 1 GroupGeneration: 1 FailDomainIdx: 6 } VSlot { VSlotId { NodeId: 34 PDiskId: 34 VSlotId: 1002 } GroupId: 2 GroupGeneration: 1 FailDomainIdx: 6 } VSlot { VSlotId { NodeId: 34 PDiskId: 34 VSlotId: 1003 } GroupId: 3 GroupGeneration: 1 FailDomainIdx: 6 } VSlot { VSlotId { NodeId: 35 PDiskId: 35 VSlotId: 1000 } GroupGeneration: 1 FailDomainIdx: 7 } VSlot { VSlotId { NodeId: 35 PDiskId: 35 VSlotId: 1001 } GroupId: 1 GroupGeneration: 1 FailDomainIdx: 7 } VSlot { VSlotId { NodeId: 35 PDiskId: 35 VSlotId: 1002 } GroupId: 2 GroupGeneration: 1 FailDomainIdx: 7 } VSlot { VSlotId { NodeId: 35 PDiskId: 35 VSlotId: 1003 } GroupId: 3 GroupGeneration: 1 FailDomainIdx: 7 } Group { GroupGeneration: 1 ErasureSpecies: "block-4-2" VSlotId { NodeId: 28 PDiskId: 28 VSlotId: 1000 } VSlotId { NodeId: 29 PDiskId: 29 VSlotId: 1000 } VSlotId { NodeId: 30 PDiskId: 30 VSlotId: 1000 } VSlotId { NodeId: 31 PDiskId: 31 VSlotId: 1000 } VSlotId { NodeId: 32 PDiskId: 32 VSlotId: 1000 } VSlotId { NodeId: 33 PDiskId: 33 VSlotId: 1000 } VSlotId { NodeId: 34 PDiskId: 34 VSlotId: 1000 } VSlotId { NodeId: 35 PDiskId: 35 VSlotId: 1000 } } Group { GroupId: 1 GroupGeneration: 1 ErasureSpecies: "block-4-2" VSlotId { NodeId: 28 PDiskId: 28 VSlotId: 1001 } VSlotId { NodeId: 29 PDiskId: 29 VSlotId: 1001 } VSlotId { NodeId: 30 PDiskId: 30 VSlotId: 1001 } VSlotId { NodeId: 31 PDiskId: 31 VSlotId: 1001 } VSlotId { NodeId: 32 PDiskId: 32 VSlotId: 1001 } VSlotId { NodeId: 33 PDiskId: 33 VSlotId: 1001 } VSlotId { NodeId: 34 PDiskId: 34 VSlotId: 1001 } VSlotId { NodeId: 35 PDiskId: 35 VSlotId: 1001 } } Group { GroupId: 2 GroupGeneration: 1 ErasureSpecies: "block-4-2" VSlotId { NodeId: 28 PDiskId: 28 VSlotId: 1002 } VSlotId { NodeId: 29 PDiskId: 29 VSlotId: 1002 } VSlotId { NodeId: 30 PDiskId: 30 VSlotId: 1002 } VSlotId { NodeId: 31 PDiskId: 31 VSlotId: 1002 } VSlotId { NodeId: 32 PDiskId: 32 VSlotId: 1002 } VSlotId { NodeId: 33 PDiskId: 33 VSlotId: 1002 } VSlotId { NodeId: 34 PDiskId: 34 VSlotId: 1002 } VSlotId { NodeId: 35 PDiskId: 35 VSlotId: 1002 } } Group { GroupId: 3 GroupGeneration: 1 ErasureSpecies: "block-4-2" VSlotId { NodeId: 28 PDiskId: 28 VSlotId: 1003 } VSlotId { NodeId: 29 PDiskId: 29 VSlotId: 1003 } VSlotId { NodeId: 30 PDiskId: 30 VSlotId: 1003 } VSlotId { NodeId: 31 PDiskId: 31 VSlotId: 1003 } VSlotId { NodeId: 32 PDiskId: 32 VSlotId: 1003 } VSlotId { NodeId: 33 PDiskId: 33 VSlotId: 1003 } VSlotId { NodeId: 34 PDiskId: 34 VSlotId: 1003 } VSlotId { NodeId: 35 PDiskId: 35 VSlotId: 1003 } } } } Success: true 2025-09-25T16:17:40.971834Z node 28 :CMS DEBUG: cms_tx_update_config.cpp:23: TTxUpdateConfig Execute 2025-09-25T16:17:40.984732Z node 28 :CMS DEBUG: cms_tx_update_config.cpp:37: TTxUpdateConfig Complete 2025-09-25T16:17:40.984815Z node 28 :CMS DEBUG: cms_tx_update_config.cpp:44: Updated config: TenantLimits { DisabledNodesRatioLimit: 0 } ClusterLimits { DisabledNodesRatioLimit: 0 } SentinelConfig { Enable: false } 2025-09-25T16:17:41.007546Z node 28 :CMS DEBUG: cms_tx_update_downtimes.cpp:17: TTxUpdateDowntimes Execute 2025-09-25T16:17:41.007586Z node 28 :CMS DEBUG: cms_tx_update_downtimes.cpp:26: TTxUpdateDowntimes Complete 2025-09-25T16:17:41.007611Z node 28 :CMS DEBUG: cluster_info.cpp:991: Timestamp: 1970-01-01T00:02:00Z 2025-09-25T16:17:41.008002Z node 28 :CMS NOTICE: audit_log.cpp:12: [AuditLog] [CMS tablet] Reply: request# NKikimr::NCms::TEvCms::TEvClusterStateRequest { }, response# NKikimr::NCms::TEvCms::TEvClusterStateResponse { Status { Code: OK } State { Hosts { Name: "::1" State: UP Services { Name: "storage" State: UP Version: "-1" Timestamp: 120110512 } Devices { Name: "vdisk-0-1-0-6-0" State: UP Timestamp: 120110512 } Devices { Name: "vdisk-1-1-0-6-0" State: UP Timestamp: 120110512 } Devices { Name: "vdisk-2-1-0-6-0" State: UP Timestamp: 120110512 } Devices { Name: "vdisk-3-1-0-6-0" State: UP Timestamp: 120110512 } Devices { Name: "pdisk-34-34" State: UP Timestamp: 120110512 } Timestamp: 120110512 NodeId: 34 InterconnectPort: 12007 Location { DataCenter: "1" Module: "7" Rack: "7" Unit: "7" } StartTimeSeconds: 0 } Hosts { Name: "::1" State: UP Services { Name: "storage" State: UP Version: "-1" Timestamp: 120110512 } Devices { Name: "vdisk-0-1-0-7-0" State: UP Timestamp: 120110512 } Devices { Name: "vdisk-1-1-0-7-0" State: UP Timestamp: 120110512 } Devices { Name: "vdisk-2-1-0-7-0" State: UP Timestamp: 120110512 } Devices { Name: "vdisk-3-1-0-7-0" State: UP Timestamp: 120110512 } Devices { Name: "pdisk-35-35" State: UP Timestamp: 120110512 } Timestamp: 120110512 NodeId: 35 InterconnectPort: 12008 Location { DataCenter: "1" Module: "8" Rack: "8" Unit: "8" } StartTimeSeconds: 0 } Hosts { Name: "::1" State: UP Services { Name: "storage" State: UP Version: "-1" Timestamp: 120110512 } Devices { Name: "vdisk-0-1-0-0-0" State: UP Timestamp: 120110512 } Devices { Name: "vdisk-1-1-0-0-0" State: UP Timestamp: 120110512 } Devices { Name: "vdisk-2-1-0-0-0" State: UP Timestamp: 120110512 } Devices { Name: "vdisk-3-1-0-0-0" State: UP Timestamp: 120110512 } Devices { Name: "pdisk-28-28" State: UP Timestamp: 120110512 } Timestamp: 120110512 NodeId: 28 InterconnectPort: 12001 Location { DataCenter: "1" Module: "1" Rack: "1" Unit: "1" } StartTimeSeconds: 0 } Hosts { Name: "::1" State: UP Services { Name: "storage" State: UP Version: "-1" Timestamp: 120110512 } Devices { Name: "vdisk-0-1-0-1-0" State: UP Timestamp: 120110512 } Devices { Name: "vdisk-1-1-0-1-0" State: UP Timestamp: 120110512 } Devices { Name: "vdisk-2-1-0-1-0" State: UP Timestamp: 120110512 } Devices { Name: "vdisk-3-1-0-1-0" State: UP Timestamp: 120110512 } Devices { Name: "pdisk-29-29" State: UP Timestamp: 120110512 } Timestamp: 120110512 NodeId: 29 InterconnectPort: 12002 Location { DataCenter: "1" Module: "2" Rack: "2" Unit: "2" } StartTimeSeconds: 0 } Hosts { Name: "::1" State: UP Services { Name: "storage" State: UP Version: "-1" Timestamp: 120110512 } Devices { Name: "vdisk-0-1-0-2-0" State: UP Timestamp: 120110512 } Devices { Name: "vdisk-1-1-0-2-0" State: UP Timestamp: 120110512 } Devices { Name: "vdisk-2-1-0-2-0" State: UP Timestamp: 120110512 } Devices { Name: "vdisk-3-1-0-2-0" State: UP Timestamp: 120110512 } Devices { Name: "pdisk-30-30" State: UP Timestamp: 120110512 } Timestamp: 120110512 NodeId: 30 InterconnectPort: 12003 Location { DataCenter: "1" Module: "3" Rack: "3" Unit: "3" } StartTimeSeconds: 0 } Hosts { Name: "::1" State: UP Services { Name: "storage" State: UP Version: "-1" Timestamp: 120110512 } Devices { Name: "vdisk-0-1-0-3-0" State: UP Timestamp: 120110512 } Devices { Name: "vdisk-1-1-0-3-0" State: UP Timestamp: 120110512 } Devices { Name: "vdisk-2-1-0-3-0" State: UP Timestamp: 120110512 } Devices { Name: "vdisk-3-1-0-3-0" State: UP Timestamp: 120110512 } Devices { Name: "pdisk-31-31" State: UP Timestamp: 120110512 } Timestamp: 120110512 NodeId: 31 InterconnectPort: 12004 Location { DataCenter: "1" Module: "4" Rack: "4" Unit: "4" } StartTimeSeconds: 0 } Hosts { Name: "::1" State: UP Services { Name: "storage" State: UP Version: "-1" Timestamp: 120110512 } Devices { Name: "vdisk-0-1-0-4-0" State: UP Timestamp: 120110512 } Devices { Name: "vdisk-1-1-0-4-0" State: UP Timestamp: 120110512 } Devices { Name: "vdisk-2-1-0-4-0" State: UP Timestamp: 120110512 } Devices { Name: "vdisk-3-1-0-4-0" State: UP Timestamp: 120110512 } Devices { Name: "pdisk-32-32" State: UP Timestamp: 120110512 } Timestamp: 120110512 NodeId: 32 InterconnectPort: 12005 Location { DataCenter: "1" Module: "5" Rack: "5" Unit: "5" } StartTimeSeconds: 0 } Hosts { Name: "::1" State: UP Services { Name: "storage" State: UP Version: "-1" Timestamp: 120110512 } Devices { Name: "vdisk-0-1-0-5-0" State: UP Timestamp: 120110512 } Devices { Name: "vdisk-1-1-0-5-0" State: UP Timestamp: 120110512 } Devices { Name: "vdisk-2-1-0-5-0" State: UP Timestamp: 120110512 } Devices { Name: "vdisk-3-1-0-5-0" State: UP Timestamp: 120110512 } Devices { Name: "pdisk-33-33" State: UP Timestamp: 120110512 } Timestamp: 120110512 NodeId: 33 InterconnectPort: 12006 Location { DataCenter: "1" Module: "6" Rack: "6" Unit: "6" } StartTimeSeconds: 0 } Timestamp: 120110512 } } 2025-09-25T16:17:41.008080Z node 28 :CMS INFO: cms.cpp:364: Check request: User: "test-user" Actions { Type: CORDON_NODE Host: "29" } PartialPermissionAllowed: true Schedule: true DryRun: false Reason: "" AvailabilityMode: MODE_MAX_AVAILABILITY MaintenanceTaskId: "task-1" 2025-09-25T16:17:41.008090Z node 28 :CMS DEBUG: cms.cpp:396: Checking action: Type: CORDON_NODE Host: "29" 2025-09-25T16:17:41.008096Z node 28 :CMS DEBUG: cms.cpp:404: Result: ALLOW 2025-09-25T16:17:41.008102Z node 28 :CMS ERROR: cluster_info.cpp:747: FindLockedItems: action CORDON_NODE is not supported 2025-09-25T16:17:41.008119Z node 28 :CMS DEBUG: cms.cpp:1064: Accepting permission: id# test-user-p-1, requestId# test-user-r-1, owner# test-user 2025-09-25T16:17:41.008124Z node 28 :CMS ERROR: cluster_info.cpp:747: FindLockedItems: action CORDON_NODE is not supported 2025-09-25T16:17:41.008136Z node 28 :CMS DEBUG: cms_tx_store_permissions.cpp:26: TTxStorePermissions Execute 2025-09-25T16:17:41.008189Z node 28 :CMS NOTICE: audit_log.cpp:12: [AuditLog] [CMS tablet] Store permission: id# test-user-p-1, validity# 1970-01-01T00:07:00.110512Z, action# Type: CORDON_NODE Host: "29" 2025-09-25T16:17:41.008199Z node 28 :CMS NOTICE: audit_log.cpp:12: [AuditLog] [CMS tablet] Remove request: id# test-user-r-1, owner# test-user 2025-09-25T16:17:41.039566Z node 28 :CMS DEBUG: cms.cpp:1176: Running CleanupWalleTasks 2025-09-25T16:17:41.124778Z node 28 :CMS DEBUG: cms_tx_store_permissions.cpp:137: TTxStorePermissions complete 2025-09-25T16:17:41.124896Z node 28 :CMS NOTICE: audit_log.cpp:12: [AuditLog] [CMS tablet] Reply: request# NKikimr::NCms::TEvCms::TEvPermissionRequest { User: "test-user" Actions { Type: CORDON_NODE Host: "29" } PartialPermissionAllowed: true Schedule: true DryRun: false Reason: "" AvailabilityMode: MODE_MAX_AVAILABILITY MaintenanceTaskId: "task-1" }, response# NKikimr::NCms::TEvCms::TEvPermissionResponse { Status { Code: ALLOW } RequestId: "test-user-r-1" Permissions { Id: "test-user-p-1" Action { Type: CORDON_NODE Host: "29" } Deadline: 420110512 } } 2025-09-25T16:17:41.124910Z node 28 :CMS DEBUG: cms.cpp:1092: Schedule cleanup at 1970-01-01T00:07:00.110512Z >> TYardTest::TestLogWriteLsnConsistency [GOOD] >> TYardTest::TestLotsOfTinyAsyncLogLatency >> TAccessServiceTest::PassRequestId |81.0%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/library/ycloud/impl/ut/unittest |81.0%| [TM] {default-linux-x86_64, pic, relwithdebinfo} ydb/library/yql/tests/sql/dq_file/part2/pytest >> test.py::test[hor_join-runtime_dep-default.txt-ForceBlocks] [GOOD] >> test.py::test[produce-fuse_reduces_with_presort--Results] [GOOD] >> test.py::test[produce-process_multi_in_single_out--Results] [SKIPPED] >> test.py::test[produce-process_with_python-default.txt-Results] >> TYardTest::TestLotsOfTinyAsyncLogLatency [GOOD] >> TYardTest::TestHugeChunkAndLotsOfTinyAsyncLogOrder >> test.py::test[pg-tpch-q10-default.txt-ForceBlocks] [GOOD] >> TRegisterNodeOverDiscoveryService::ServerWithCertVerification_ClientProvideIncorrectCerts >> test.py::test[pg-tpch-q10-default.txt-Results] >> test.py::test[join-anyjoin_merge_nodup--ForceBlocks] [GOOD] >> test.py::test[join-anyjoin_merge_nodup--Results] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/library/ycloud/impl/ut/unittest >> TUserAccountServiceTest::Get [GOOD] Test command err: 2025-09-25T16:17:42.539577Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7554061761972348274:2137];send_to=[0:7307199536658146131:7762515]; 2025-09-25T16:17:42.539634Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-09-25T16:17:42.543559Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:123: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown test_client.cpp: SetPath # /home/runner/.ya/build/build_root/endf/0056b0/r3tmp/tmpZuV3Pq/pdisk_1.dat 2025-09-25T16:17:42.568214Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-09-25T16:17:42.568246Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-09-25T16:17:42.569121Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-09-25T16:17:42.585431Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded 2025-09-25T16:17:42.588440Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1229: Notification cookie mismatch for subscription [1:7554061761972348175:2081] 1758817062538860 != 1758817062538863 TClient is connected to server localhost:24128 WaitRootIsUp 'Root'... TClient::Ls request: Root 2025-09-25T16:17:42.622854Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-09-25T16:17:42.632695Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... |81.0%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/tx/datashard/ut_write/ydb-core-tx-datashard-ut_write |81.0%| [LD] {RESULT} $(B)/ydb/core/tx/datashard/ut_write/ydb-core-tx-datashard-ut_write >> KqpPg::InsertValuesFromTableWithDefaultBool-useSink [GOOD] >> KqpPg::InsertNoTargetColumns_SerialNotNull+useSink >> KqpPg::DropTableIfExists_GenericQuery [GOOD] >> KqpPg::EquiJoin+useSink >> test.py::test[join-mergejoin_with_reverse_key_order-off-ForceBlocks] [GOOD] >> test.py::test[join-mergejoin_with_reverse_key_order-off-Results] [SKIPPED] >> test.py::test[join-nested_semi_join-off-ForceBlocks] >> test.py::test[join-inner_trivial_from_concat-off-ForceBlocks] [GOOD] >> test.py::test[join-inner_trivial_from_concat-off-Results] [SKIPPED] >> test.py::test[join-join_without_column--ForceBlocks] |81.0%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/datashard/ut_write/ydb-core-tx-datashard-ut_write >> TGRpcYdbTest::ExecuteQueryImplicitSession >> TAccessServiceTest::PassRequestId [GOOD] >> TServiceAccountServiceTest::Get [GOOD] >> TYardTest::TestHugeChunkAndLotsOfTinyAsyncLogOrder [GOOD] >> TYardTest::TestLogLatency >> TPDiskRaces::KillOwnerWhileDecommittingWithInflightMock [GOOD] >> TPDiskRaces::OwnerRecreationRaces ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/library/ycloud/impl/ut/unittest >> TAccessServiceTest::PassRequestId [GOOD] Test command err: 2025-09-25T16:17:43.627343Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7554061769362805247:2150];send_to=[0:7307199536658146131:7762515]; 2025-09-25T16:17:43.627396Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/endf/00569d/r3tmp/tmpOPO676/pdisk_1.dat 2025-09-25T16:17:43.672839Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-09-25T16:17:43.685046Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1229: Notification cookie mismatch for subscription [1:7554061769362805120:2081] 1758817063625026 != 1758817063625029 2025-09-25T16:17:43.694027Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded TClient is connected to server localhost:19767 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-09-25T16:17:43.736504Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-09-25T16:17:43.736558Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-09-25T16:17:43.737309Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-09-25T16:17:43.738078Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-09-25T16:17:43.742950Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-09-25T16:17:43.775901Z node 1 :GRPC_CLIENT DEBUG: grpc_service_client.h:83: [1322bfabd0e0]{trololo} Connect to grpc://localhost:11089 2025-09-25T16:17:43.776410Z node 1 :GRPC_CLIENT DEBUG: grpc_service_client.h:122: [1322bfabd0e0]{trololo} Request AuthenticateRequest { iam_token: "**** (717F937C)" } 2025-09-25T16:17:43.779806Z node 1 :GRPC_CLIENT DEBUG: grpc_service_client.h:109: [1322bfabd0e0]{trololo} Response AuthenticateResponse { subject { user_account { id: "1234" } } } >> FolderServiceTest::TFolderService [GOOD] >> test.py::test[blocks-date_sub_scalar--ForceBlocks] [GOOD] >> test.py::test[blocks-date_sub_scalar--Results] >> GenericFederatedQuery::IcebergHadoopTokenSelectCount [GOOD] >> GenericFederatedQuery::IcebergHadoopTokenFilterPushdown >> KqpPg::InsertNoTargetColumns_SerialNotNull+useSink [GOOD] >> KqpPg::InsertNoTargetColumns_SerialNotNull-useSink >> CdcStreamChangeCollector::UpsertManyRows >> CdcStreamChangeCollector::InsertSingleRow >> KqpPg::EquiJoin+useSink [GOOD] >> KqpPg::EquiJoin-useSink >> AsyncIndexChangeCollector::InsertSingleRow >> TGRpcYdbTest::ReadTablePg [GOOD] >> TGRpcYdbTest::ExecuteQueryImplicitSession [GOOD] >> TYardTest::TestLogLatency [GOOD] >> TYardTest::TestMultiYardFirstRecordToKeep >> TGRpcYdbTest::ExecuteQueryWithUuid ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/library/ycloud/impl/ut/unittest >> FolderServiceTest::TFolderService [GOOD] Test command err: 2025-09-25T16:17:43.287892Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7554061766195259573:2140];send_to=[0:7307199536658146131:7762515]; 2025-09-25T16:17:43.287964Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/endf/0056ae/r3tmp/tmpjAlEwx/pdisk_1.dat 2025-09-25T16:17:43.335157Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-09-25T16:17:43.340847Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded 2025-09-25T16:17:43.341094Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1229: Notification cookie mismatch for subscription [1:7554061766195259471:2081] 1758817063287004 != 1758817063287007 TClient is connected to server localhost:63760 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-09-25T16:17:43.395956Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-09-25T16:17:43.396002Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-09-25T16:17:43.397050Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-09-25T16:17:43.398771Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-09-25T16:17:43.402267Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-09-25T16:17:43.480208Z node 1 :GRPC_CLIENT DEBUG: grpc_service_client.h:83: [71d17fabad20] Connect to grpc://localhost:62769 2025-09-25T16:17:43.482356Z node 1 :GRPC_CLIENT DEBUG: grpc_service_client.h:122: [71d17fabad20] Request ResolveFoldersRequest { folder_ids: "i_am_not_exists" } 2025-09-25T16:17:43.485399Z node 1 :GRPC_CLIENT DEBUG: grpc_service_client.h:111: [71d17fabad20] Status 14 failed to connect to all addresses; last error: UNKNOWN: ipv4:127.0.0.1:62769: Failed to connect to remote host: Connection refused 2025-09-25T16:17:43.489336Z node 1 :GRPC_CLIENT DEBUG: grpc_service_client.h:122: [71d17fabad20] Request ResolveFoldersRequest { folder_ids: "i_am_not_exists" } 2025-09-25T16:17:43.489626Z node 1 :GRPC_CLIENT DEBUG: grpc_service_client.h:111: [71d17fabad20] Status 14 failed to connect to all addresses; last error: UNKNOWN: ipv4:127.0.0.1:62769: Failed to connect to remote host: Connection refused 2025-09-25T16:17:43.636986Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-09-25T16:17:44.289976Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-09-25T16:17:44.489970Z node 1 :GRPC_CLIENT DEBUG: grpc_service_client.h:122: [71d17fabad20] Request ResolveFoldersRequest { folder_ids: "i_am_not_exists" } 2025-09-25T16:17:44.491218Z node 1 :GRPC_CLIENT DEBUG: grpc_service_client.h:111: [71d17fabad20] Status 5 Not Found 2025-09-25T16:17:44.491408Z node 1 :GRPC_CLIENT DEBUG: grpc_service_client.h:122: [71d17fabad20] Request ResolveFoldersRequest { folder_ids: "i_am_exists" } 2025-09-25T16:17:44.493313Z node 1 :GRPC_CLIENT DEBUG: grpc_service_client.h:109: [71d17fabad20] Response ResolveFoldersResponse { resolved_folders { cloud_id: "response_cloud_id" } } >> FolderServiceTest::TFolderServiceTransitional [GOOD] >> TYardTest::TestMultiYardFirstRecordToKeep [GOOD] >> TYardTest::TestLogOverwriteRestarts >> AsyncIndexChangeCollector::UpsertToSameKey >> test.py::test[window-win_with_cur_row--Results] [GOOD] >> test.py::test[ypath-empty_range--ForceBlocks] >> test.py::test[in-in_with_list_dict-default.txt-Results] [GOOD] |81.1%| [TM] {default-linux-x86_64, pic, relwithdebinfo} ydb/library/yql/tests/sql/dq_file/part3/pytest >> test.py::test[produce-process_rows_sorted_multi_out--Results] [SKIPPED] >> test.py::test[ypath-empty_range--ForceBlocks] [SKIPPED] >> test.py::test[ypath-empty_range--Results] [SKIPPED] >> test.py::test[ytflow-file--ForceBlocks] [SKIPPED] >> test.py::test[ytflow-file--Results] [SKIPPED] >> KqpPg::InsertNoTargetColumns_SerialNotNull-useSink [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/library/ycloud/impl/ut/unittest >> FolderServiceTest::TFolderServiceTransitional [GOOD] Test command err: test_client.cpp: SetPath # /home/runner/.ya/build/build_root/endf/0056a8/r3tmp/tmpBjLyEC/pdisk_1.dat 2025-09-25T16:17:43.056965Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7554061766537760196:2080];send_to=[0:7307199536658146131:7762515]; 2025-09-25T16:17:43.058884Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-09-25T16:17:43.128502Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-09-25T16:17:43.129506Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-09-25T16:17:43.129537Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-09-25T16:17:43.133522Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-09-25T16:17:43.140300Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded TClient is connected to server localhost:27134 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-09-25T16:17:43.180078Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-09-25T16:17:43.185196Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-09-25T16:17:43.233486Z node 1 :GRPC_CLIENT DEBUG: grpc_service_client.h:83: [13c43fabdbe0] Connect to grpc://localhost:4861 2025-09-25T16:17:43.235697Z node 1 :GRPC_CLIENT DEBUG: grpc_service_client.h:122: [13c43fabdbe0] Request ListFoldersRequest { id: "i_am_not_exists" } 2025-09-25T16:17:43.237555Z node 1 :GRPC_CLIENT DEBUG: grpc_service_client.h:111: [13c43fabdbe0] Status 14 failed to connect to all addresses; last error: UNKNOWN: ipv4:127.0.0.1:4861: Failed to connect to remote host: Connection refused 2025-09-25T16:17:43.238052Z node 1 :GRPC_CLIENT DEBUG: grpc_service_client.h:122: [13c43fabdbe0] Request ListFoldersRequest { id: "i_am_not_exists" } 2025-09-25T16:17:43.238225Z node 1 :GRPC_CLIENT DEBUG: grpc_service_client.h:111: [13c43fabdbe0] Status 14 failed to connect to all addresses; last error: UNKNOWN: ipv4:127.0.0.1:4861: Failed to connect to remote host: Connection refused 2025-09-25T16:17:43.388300Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-09-25T16:17:44.051101Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-09-25T16:17:44.240974Z node 1 :GRPC_CLIENT DEBUG: grpc_service_client.h:122: [13c43fabdbe0] Request ListFoldersRequest { id: "i_am_not_exists" } 2025-09-25T16:17:44.241268Z node 1 :GRPC_CLIENT DEBUG: grpc_service_client.h:111: [13c43fabdbe0] Status 14 failed to connect to all addresses; last error: UNKNOWN: ipv4:127.0.0.1:4861: Failed to connect to remote host: Connection refused 2025-09-25T16:17:45.244994Z node 1 :GRPC_CLIENT DEBUG: grpc_service_client.h:122: [13c43fabdbe0] Request ListFoldersRequest { id: "i_am_not_exists" } 2025-09-25T16:17:45.247282Z node 1 :GRPC_CLIENT DEBUG: grpc_service_client.h:111: [13c43fabdbe0] Status 5 Not Found 2025-09-25T16:17:45.248994Z node 1 :GRPC_CLIENT DEBUG: grpc_service_client.h:122: [13c43fabdbe0] Request ListFoldersRequest { id: "i_am_exists" } 2025-09-25T16:17:45.253410Z node 1 :GRPC_CLIENT DEBUG: grpc_service_client.h:109: [13c43fabdbe0] Response ListFoldersResponse { result { cloud_id: "response_cloud_id" } } >> KqpPg::EquiJoin-useSink [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/ydb/ut/unittest >> TGRpcYdbTest::ReadTablePg [GOOD] Test command err: 2025-09-25T16:17:37.607460Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7554061741263282119:2182];send_to=[0:7307199536658146131:7762515]; 2025-09-25T16:17:37.607489Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/endf/0041c1/r3tmp/tmp3jmbUu/pdisk_1.dat 2025-09-25T16:17:37.660815Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-09-25T16:17:37.671990Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 8291, node 1 2025-09-25T16:17:37.694105Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-09-25T16:17:37.694119Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-09-25T16:17:37.694121Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-09-25T16:17:37.694170Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-09-25T16:17:37.709931Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-09-25T16:17:37.709961Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-09-25T16:17:37.711956Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:11768 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-09-25T16:17:37.730676Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-09-25T16:17:37.767741Z node 1 :TX_PROXY ERROR: schemereq.cpp:590: Actor# [1:7554061741263282888:2596] txid# 281474976715658, issues: { message: "Path does not exist" issue_code: 200200 severity: 1 } 2025-09-25T16:17:37.860049Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-09-25T16:17:38.852900Z node 4 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7554061744939334399:2152];send_to=[0:7307199536658146131:7762515]; 2025-09-25T16:17:38.852967Z node 4 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-09-25T16:17:38.855154Z node 4 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions test_client.cpp: SetPath # /home/runner/.ya/build/build_root/endf/0041c1/r3tmp/tmpu7Ep2D/pdisk_1.dat 2025-09-25T16:17:38.884986Z node 4 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 6822, node 4 2025-09-25T16:17:38.901304Z node 4 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-09-25T16:17:38.901317Z node 4 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-09-25T16:17:38.901320Z node 4 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-09-25T16:17:38.901379Z node 4 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:8790 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-09-25T16:17:38.953052Z node 4 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-09-25T16:17:38.953094Z node 4 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-09-25T16:17:38.955041Z node 4 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-09-25T16:17:38.956054Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-09-25T16:17:39.138203Z node 4 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-09-25T16:17:39.264752Z node 4 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7554061749234302563:2322], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:17:39.264779Z node 4 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:17:39.264910Z node 4 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7554061749234302573:2323], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:17:39.264918Z node 4 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:17:39.266019Z node 4 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7554061749234302578:2326], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:17:39.267094Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-09-25T16:17:39.272473Z node 4 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [4:7554061749234302580:2327], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2025-09-25T16:17:39.331856Z node 4 :TX_PROXY ERROR: schemereq.cpp:590: Actor# [4:7554061749234302651:2668] txid# 281474976715659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-09-25T16:17:40.104418Z node 7 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[7:7554061753991409363:2082];send_to=[0:7307199536658146131:7762515]; 2025-09-25T16:17:40.104452Z node 7 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/endf/0041c1/r3tmp/tmpTt7yjL/pdisk_1.dat 2025-09-25T16:17:40.108131Z node 7 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-09-25T16:17:40.132142Z node 7 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 10186, node 7 2025-09-25T16:17:40.152620Z node 7 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-09-25T16:17:40.152638Z node 7 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-09-25T16:17:40.152640Z node 7 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-09-25T16:17:40.152670Z node 7 :NET_CLASSIFIER ERROR: net_cla ... SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-09-25T16:17:43.378734Z node 13 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-09-25T16:17:43.385597Z node 13 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-09-25T16:17:43.405704Z node 13 :GRPC_SERVER DEBUG: grpc_request_proxy.cpp:595: Got grpc request# CreateTableRequest, traceId# 01k60tn4fd0nf0h2s6rp2hk6js, sdkBuildInfo# undef, state# AS_NOT_PERFORMED, database# undef, peer# ipv6:[::1]:43998, grpcInfo# grpc-c++/1.54.3 grpc-c/31.0.0 (linux; chttp2), timeout# undef 2025-09-25T16:17:43.406830Z node 13 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:17:43.408016Z node 13 :GRPC_SERVER DEBUG: grpc_request_proxy.cpp:489: SchemeBoardUpdate /Root 2025-09-25T16:17:43.408057Z node 13 :GRPC_SERVER DEBUG: grpc_request_proxy.cpp:518: Can't update SecurityState for /Root - no PublicKeys 2025-09-25T16:17:43.408061Z node 13 :GRPC_SERVER DEBUG: grpc_request_proxy.cpp:489: SchemeBoardUpdate /Root 2025-09-25T16:17:43.408080Z node 13 :GRPC_SERVER DEBUG: grpc_request_proxy.cpp:518: Can't update SecurityState for /Root - no PublicKeys 2025-09-25T16:17:43.520291Z node 13 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-09-25T16:17:44.196142Z node 13 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-09-25T16:17:44.432082Z node 13 :GRPC_SERVER DEBUG: grpc_request_proxy.cpp:489: SchemeBoardUpdate /Root 2025-09-25T16:17:44.432136Z node 13 :GRPC_SERVER DEBUG: grpc_request_proxy.cpp:518: Can't update SecurityState for /Root - no PublicKeys 2025-09-25T16:17:44.432140Z node 13 :GRPC_SERVER DEBUG: grpc_request_proxy.cpp:489: SchemeBoardUpdate /Root 2025-09-25T16:17:44.432158Z node 13 :GRPC_SERVER DEBUG: grpc_request_proxy.cpp:518: Can't update SecurityState for /Root - no PublicKeys 2025-09-25T16:17:44.438574Z node 13 :GRPC_SERVER DEBUG: grpc_request_proxy.cpp:595: Got grpc request# CreateSessionRequest, traceId# 01k60tn5fpftepdk82970kd8e6, sdkBuildInfo# undef, state# AS_NOT_PERFORMED, database# undef, peer# ipv6:[::1]:43998, grpcInfo# grpc-c++/1.54.3 grpc-c/31.0.0 (linux; chttp2), timeout# undef 2025-09-25T16:17:44.439931Z node 13 :GRPC_SERVER DEBUG: grpc_request_proxy.cpp:595: Got grpc request# ExecuteDataQueryRequest, traceId# 01k60tn5fq9et0mp1wevcwprzq, sdkBuildInfo# undef, state# AS_NOT_PERFORMED, database# undef, peer# ipv6:[::1]:43998, grpcInfo# grpc-c++/1.54.3 grpc-c/31.0.0 (linux; chttp2), timeout# undef 2025-09-25T16:17:44.440340Z node 13 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [13:7554061770223562226:2329], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:17:44.440346Z node 13 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [13:7554061770223562218:2326], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:17:44.440359Z node 13 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:17:44.440506Z node 13 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [13:7554061770223562233:2331], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:17:44.440512Z node 13 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:17:44.441182Z node 13 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715659:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-09-25T16:17:44.442575Z node 13 :GRPC_SERVER DEBUG: grpc_request_proxy.cpp:489: SchemeBoardUpdate /Root 2025-09-25T16:17:44.442602Z node 13 :GRPC_SERVER DEBUG: grpc_request_proxy.cpp:518: Can't update SecurityState for /Root - no PublicKeys 2025-09-25T16:17:44.442603Z node 13 :GRPC_SERVER DEBUG: grpc_request_proxy.cpp:489: SchemeBoardUpdate /Root 2025-09-25T16:17:44.442616Z node 13 :GRPC_SERVER DEBUG: grpc_request_proxy.cpp:518: Can't update SecurityState for /Root - no PublicKeys 2025-09-25T16:17:44.446353Z node 13 :GRPC_SERVER DEBUG: grpc_request_proxy.cpp:489: SchemeBoardUpdate /Root 2025-09-25T16:17:44.446383Z node 13 :GRPC_SERVER DEBUG: grpc_request_proxy.cpp:518: Can't update SecurityState for /Root - no PublicKeys 2025-09-25T16:17:44.446386Z node 13 :GRPC_SERVER DEBUG: grpc_request_proxy.cpp:489: SchemeBoardUpdate /Root 2025-09-25T16:17:44.446399Z node 13 :GRPC_SERVER DEBUG: grpc_request_proxy.cpp:518: Can't update SecurityState for /Root - no PublicKeys 2025-09-25T16:17:44.447182Z node 13 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [13:7554061770223562232:2330], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715659 completed, doublechecking } 2025-09-25T16:17:44.528851Z node 13 :TX_PROXY ERROR: schemereq.cpp:590: Actor# [13:7554061770223562311:2792] txid# 281474976715660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-09-25T16:17:44.554671Z node 13 :KQP_EXECUTER ERROR: kqp_planner.cpp:123: TxId: 281474976715661. Ctx: { TraceId: 01k60tn5fq9et0mp1wevcwprzq, Database: , SessionId: ydb://session/3?node_id=13&id=NDkzOTA3NTItY2FjMWZmMjgtYWE3MjkzZGEtZTM0YzMxYmQ=, PoolId: default, DatabaseId: /Root}. Database not set, use /Root 2025-09-25T16:17:44.561254Z node 13 :GRPC_SERVER DEBUG: grpc_request_proxy.cpp:595: Got grpc request# ReadTableRequest, traceId# 01k60tn5khdmx15vf1e6e7j34y, sdkBuildInfo# undef, state# AS_NOT_PERFORMED, database# undef, peer# ipv6:[::1]:43998, grpcInfo# grpc-c++/1.54.3 grpc-c/31.0.0 (linux; chttp2), timeout# undef 2025-09-25T16:17:44.561450Z node 13 :READ_TABLE_API NOTICE: rpc_read_table.cpp:531: [13:7554061770223562355:2338] Finish grpc stream, status: 400010 2025-09-25T16:17:44.567457Z node 13 :GRPC_SERVER DEBUG: grpc_request_proxy.cpp:595: Got grpc request# ReadTableRequest, traceId# 01k60tn5kqeahhprtrbb5wy5nw, sdkBuildInfo# undef, state# AS_NOT_PERFORMED, database# undef, peer# ipv6:[::1]:43998, grpcInfo# grpc-c++/1.54.3 grpc-c/31.0.0 (linux; chttp2), timeout# undef 2025-09-25T16:17:44.572579Z node 13 :READ_TABLE_API DEBUG: rpc_read_table.cpp:267: [13:7554061770223562356:2339] Adding quota request to queue ShardId: 0, TxId: 281474976715662 2025-09-25T16:17:44.572592Z node 13 :READ_TABLE_API DEBUG: rpc_read_table.cpp:629: [13:7554061770223562356:2339] Assign stream quota to Shard 0, Quota 5, TxId 281474976715662 Reserved: 5 of 25, Queued: 0 2025-09-25T16:17:44.573009Z node 13 :READ_TABLE_API DEBUG: rpc_read_table.cpp:647: [13:7554061770223562356:2339] got stream part, size: 246, RU required: 128 rate limiter absent 2025-09-25T16:17:44.573187Z node 13 :READ_TABLE_API DEBUG: rpc_read_table.cpp:563: [13:7554061770223562356:2339] Starting inactivity timer for 600.000000s with tag 3 2025-09-25T16:17:44.573323Z node 13 :READ_TABLE_API NOTICE: rpc_read_table.cpp:531: [13:7554061770223562356:2339] Finish grpc stream, status: 400000 2025-09-25T16:17:44.574057Z node 13 :GRPC_SERVER DEBUG: grpc_request_proxy.cpp:595: Got grpc request# ReadTableRequest, traceId# 01k60tn5kxe3bv4a963p4nv961, sdkBuildInfo# undef, state# AS_NOT_PERFORMED, database# undef, peer# ipv6:[::1]:43998, grpcInfo# grpc-c++/1.54.3 grpc-c/31.0.0 (linux; chttp2), timeout# undef 2025-09-25T16:17:44.580866Z node 13 :READ_TABLE_API DEBUG: rpc_read_table.cpp:267: [13:7554061770223562378:2341] Adding quota request to queue ShardId: 0, TxId: 281474976715664 2025-09-25T16:17:44.580884Z node 13 :READ_TABLE_API DEBUG: rpc_read_table.cpp:629: [13:7554061770223562378:2341] Assign stream quota to Shard 0, Quota 5, TxId 281474976715664 Reserved: 5 of 25, Queued: 0 2025-09-25T16:17:44.581160Z node 13 :READ_TABLE_API DEBUG: rpc_read_table.cpp:647: [13:7554061770223562378:2341] got stream part, size: 84, RU required: 128 rate limiter absent 2025-09-25T16:17:44.581335Z node 13 :READ_TABLE_API DEBUG: rpc_read_table.cpp:563: [13:7554061770223562378:2341] Starting inactivity timer for 600.000000s with tag 3 2025-09-25T16:17:44.581456Z node 13 :READ_TABLE_API NOTICE: rpc_read_table.cpp:531: [13:7554061770223562378:2341] Finish grpc stream, status: 400000 2025-09-25T16:17:44.582351Z node 13 :GRPC_SERVER DEBUG: grpc_request_proxy.cpp:595: Got grpc request# ReadTableRequest, traceId# 01k60tn5m60gn3qvma1pz50ays, sdkBuildInfo# undef, state# AS_NOT_PERFORMED, database# undef, peer# ipv6:[::1]:43998, grpcInfo# grpc-c++/1.54.3 grpc-c/31.0.0 (linux; chttp2), timeout# undef 2025-09-25T16:17:44.589231Z node 13 :READ_TABLE_API DEBUG: rpc_read_table.cpp:267: [13:7554061770223562400:2343] Adding quota request to queue ShardId: 0, TxId: 281474976715666 2025-09-25T16:17:44.589248Z node 13 :READ_TABLE_API DEBUG: rpc_read_table.cpp:629: [13:7554061770223562400:2343] Assign stream quota to Shard 0, Quota 5, TxId 281474976715666 Reserved: 5 of 25, Queued: 0 2025-09-25T16:17:44.589564Z node 13 :READ_TABLE_API DEBUG: rpc_read_table.cpp:647: [13:7554061770223562400:2343] got stream part, size: 210, RU required: 128 rate limiter absent 2025-09-25T16:17:44.589739Z node 13 :READ_TABLE_API DEBUG: rpc_read_table.cpp:563: [13:7554061770223562400:2343] Starting inactivity timer for 600.000000s with tag 3 2025-09-25T16:17:44.590013Z node 13 :READ_TABLE_API NOTICE: rpc_read_table.cpp:531: [13:7554061770223562400:2343] Finish grpc stream, status: 400000 ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/library/ycloud/impl/ut/unittest >> TServiceAccountServiceTest::Get [GOOD] Test command err: 2025-09-25T16:17:44.164815Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7554061773457355708:2258];send_to=[0:7307199536658146131:7762515]; 2025-09-25T16:17:44.164925Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/endf/005691/r3tmp/tmpO5Izlo/pdisk_1.dat 2025-09-25T16:17:44.219633Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-09-25T16:17:44.232153Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded 2025-09-25T16:17:44.232387Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1229: Notification cookie mismatch for subscription [1:7554061773457355459:2081] 1758817064161140 != 1758817064161143 2025-09-25T16:17:44.266320Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-09-25T16:17:44.266357Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-09-25T16:17:44.267458Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:13065 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-09-25T16:17:44.289927Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-09-25T16:17:44.308925Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-09-25T16:17:44.425413Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-09-25T16:17:44.794783Z node 2 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7554061772280111236:2139];send_to=[0:7307199536658146131:7762515]; 2025-09-25T16:17:44.794872Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/endf/005691/r3tmp/tmpGFZiIH/pdisk_1.dat 2025-09-25T16:17:44.798199Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-09-25T16:17:44.807934Z node 2 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded 2025-09-25T16:17:44.808180Z node 2 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1229: Notification cookie mismatch for subscription [2:7554061772280111135:2081] 1758817064794226 != 1758817064794229 2025-09-25T16:17:44.811175Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-09-25T16:17:44.811199Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-09-25T16:17:44.812309Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:65382 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-09-25T16:17:44.830630Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-09-25T16:17:44.832507Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 >> KqpPg::ExplainColumnsReorder >> AsyncIndexChangeCollector::UpsertSingleRow >> TRegisterNodeOverDiscoveryService::ServerWithCertVerification_ClientProvideIncorrectCerts [GOOD] >> TRegisterNodeOverDiscoveryService::ServerWithCertVerification_ClientDoesNotProvideAnyCerts >> KqpPg::InsertFromSelect_Simple-useSink [GOOD] >> KqpPg::InsertFromSelect_NoReorder-useSink >> test.py::test[insert-trivial_select-default.txt-ForceBlocks] [GOOD] >> test.py::test[insert-trivial_select-default.txt-Results] >> KqpPg::PgCreateTable [GOOD] >> KqpPg::PgUpdate+useSink >> AsyncIndexChangeCollector::InsertSingleRow [GOOD] >> AsyncIndexChangeCollector::InsertManyRows ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/pg/unittest >> KqpPg::InsertNoTargetColumns_SerialNotNull-useSink [GOOD] Test command err: Trying to start YDB, gRPC: 32388, MsgBus: 29220 2025-09-25T16:17:35.011934Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7554061730947549862:2076];send_to=[0:7307199536658146131:7762515]; 2025-09-25T16:17:35.011950Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/endf/00554d/r3tmp/tmpBqm4ta/pdisk_1.dat 2025-09-25T16:17:35.064514Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-09-25T16:17:35.065862Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 32388, node 1 2025-09-25T16:17:35.085023Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-09-25T16:17:35.085036Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-09-25T16:17:35.085044Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-09-25T16:17:35.085094Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:29220 2025-09-25T16:17:35.115249Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-09-25T16:17:35.115274Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-09-25T16:17:35.119380Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:29220 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-09-25T16:17:35.139027Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-09-25T16:17:35.337404Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-09-25T16:17:35.389270Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7554061730947550475:2315], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:17:35.389304Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:17:35.389404Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7554061730947550500:2318], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:17:35.390078Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-09-25T16:17:35.391785Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7554061730947550502:2319], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2025-09-25T16:17:35.478044Z node 1 :TX_PROXY ERROR: schemereq.cpp:590: Actor# [1:7554061730947550564:2339] txid# 281474976715659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-09-25T16:17:35.489775Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) Trying to start YDB, gRPC: 62635, MsgBus: 11090 test_client.cpp: SetPath # /home/runner/.ya/build/build_root/endf/00554d/r3tmp/tmp0H8sM1/pdisk_1.dat 2025-09-25T16:17:35.787033Z node 2 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7554061733412747984:2162];send_to=[0:7307199536658146131:7762515]; 2025-09-25T16:17:35.787055Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-09-25T16:17:35.789523Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-09-25T16:17:35.801619Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-09-25T16:17:35.801656Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-09-25T16:17:35.802987Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-09-25T16:17:35.805862Z node 2 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 62635, node 2 2025-09-25T16:17:35.811903Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-09-25T16:17:35.811914Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-09-25T16:17:35.811916Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-09-25T16:17:35.811959Z node 2 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:11090 TClient is connected to server localhost:11090 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-09-25T16:17:35.862423Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-09-25T16:17:36.061786Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-09-25T16:17:36.227574Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7554061737707715803:2318], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:17:36.227595Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7554061737707715783:2315], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:17:36.227626Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:17:36.227790Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7554061737707715820:2320], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:17:36.227809Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:17:36.228507Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cp ... kloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:17:44.529928Z node 10 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-09-25T16:17:44.532661Z node 10 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 281474976710658, at schemeshard: 72057594046644480 2025-09-25T16:17:44.532723Z node 10 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [10:7554061771792736352:2320], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2025-09-25T16:17:44.625710Z node 10 :TX_PROXY ERROR: schemereq.cpp:590: Actor# [10:7554061771792736403:2335] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-09-25T16:17:44.630118Z node 10 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:17:44.659621Z node 10 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:17:44.679570Z node 10 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:568: Compilation failed, self: [10:7554061771792736636:2346], status: BAD_REQUEST, issues:
: Error: Type annotation, code: 1030
:1:1: Error: At function: KiWriteTable!
:1:1: Error: Missing not null column in input: c. All not null columns should be initialized, code: 2032 2025-09-25T16:17:44.679679Z node 10 :KQP_SESSION WARN: kqp_session_actor.cpp:2395: SessionId: ydb://session/3?node_id=10&id=MWZhNjFjN2QtNzAyY2FmODYtODBiZTIwYS05YjM4MmRiNQ==, ActorId: [10:7554061771792736634:2345], ActorState: ExecuteState, TraceId: 01k60tn5q160srv43tgd7ezgs2, ReplyQueryCompileError, status BAD_REQUEST remove tx with tx_id: Trying to start YDB, gRPC: 19939, MsgBus: 10621 2025-09-25T16:17:45.045544Z node 11 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[11:7554061775583092217:2079];send_to=[0:7307199536658146131:7762515]; 2025-09-25T16:17:45.047068Z node 11 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-09-25T16:17:45.048299Z node 11 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:123: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown test_client.cpp: SetPath # /home/runner/.ya/build/build_root/endf/00554d/r3tmp/tmpFzWhV0/pdisk_1.dat 2025-09-25T16:17:45.065145Z node 11 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded 2025-09-25T16:17:45.067802Z node 11 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(11, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-09-25T16:17:45.067821Z node 11 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(11, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-09-25T16:17:45.069081Z node 11 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(11, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 19939, node 11 2025-09-25T16:17:45.076024Z node 11 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-09-25T16:17:45.076037Z node 11 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-09-25T16:17:45.076039Z node 11 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-09-25T16:17:45.076089Z node 11 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:10621 2025-09-25T16:17:45.121325Z node 11 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions TClient is connected to server localhost:10621 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-09-25T16:17:45.142632Z node 11 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-09-25T16:17:45.472748Z node 11 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [11:7554061775583092816:2315], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:17:45.472781Z node 11 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [11:7554061775583092835:2318], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:17:45.472792Z node 11 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:17:45.473759Z node 11 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-09-25T16:17:45.474055Z node 11 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [11:7554061775583092872:2320], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:17:45.474087Z node 11 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:17:45.474658Z node 11 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [11:7554061775583092875:2321], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:17:45.474703Z node 11 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:17:45.477280Z node 11 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [11:7554061775583092845:2319], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2025-09-25T16:17:45.542226Z node 11 :TX_PROXY ERROR: schemereq.cpp:590: Actor# [11:7554061775583092900:2335] txid# 281474976715659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-09-25T16:17:45.547689Z node 11 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:17:45.575804Z node 11 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:17:45.593415Z node 11 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:568: Compilation failed, self: [11:7554061775583093133:2347], status: BAD_REQUEST, issues:
: Error: Type annotation, code: 1030
:1:1: Error: At function: KiWriteTable!
:1:1: Error: Missing not null column in input: c. All not null columns should be initialized, code: 2032 2025-09-25T16:17:45.594051Z node 11 :KQP_SESSION WARN: kqp_session_actor.cpp:2395: SessionId: ydb://session/3?node_id=11&id=MzFhNmRhM2QtY2RkNjUyYzQtNmUwMGFmYy1iNzhmNjc4ZQ==, ActorId: [11:7554061775583093131:2346], ActorState: ExecuteState, TraceId: 01k60tn6knbvzw7n3s6k1hxepj, ReplyQueryCompileError, status BAD_REQUEST remove tx with tx_id: >> TGRpcYdbTest::ExecuteQueryWithUuid [GOOD] >> TGRpcYdbTest::ExecuteQueryWithParametersBadRequest >> CdcStreamChangeCollector::UpsertManyRows [GOOD] >> CdcStreamChangeCollector::UpsertToSameKey >> CdcStreamChangeCollector::UpsertIntoTwoStreams >> CdcStreamChangeCollector::InsertSingleRow [GOOD] >> CdcStreamChangeCollector::InsertSingleUuidRow |81.1%| [TM] {default-linux-x86_64, pic, relwithdebinfo} ydb/library/yql/tests/sql/dq_file/part3/pytest >> test.py::test[in-in_with_list_dict-default.txt-Results] [GOOD] >> AsyncIndexChangeCollector::UpsertToSameKey [GOOD] >> AsyncIndexChangeCollector::UpsertWithoutIndexedValue >> KqpPg::InsertFromSelect_NoReorder-useSink [GOOD] |81.1%| [TA] $(B)/ydb/library/ycloud/impl/ut/test-results/unittest/{meta.json ... results_accumulator.log} >> KqpPg::InsertFromSelect_Serial+useSink >> GenericFederatedQuery::IcebergHadoopTokenFilterPushdown [GOOD] |81.1%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_serverless_reboots/unittest >> KqpPg::ExplainColumnsReorder [GOOD] |81.1%| [TM] {default-linux-x86_64, pic, relwithdebinfo} ydb/library/yql/tests/sql/dq_file/part6/pytest >> test.py::test[ytflow-file--Results] [SKIPPED] >> TPopulatorTestWithResets::UpdateAck >> AsyncIndexChangeCollector::UpsertSingleRow [GOOD] >> AsyncIndexChangeCollector::UpsertManyRows >> TPopulatorTest::RemoveDir >> KqpPg::PgUpdate+useSink [GOOD] >> KqpPg::PgUpdate-useSink >> test.py::test[aggregate-group_by_cube_duo--Results] [GOOD] >> test.py::test[aggregate-group_by_expr_order_by_expr--Results] >> AsyncIndexChangeCollector::InsertManyRows [GOOD] >> AsyncIndexChangeCollector::MultiIndexedTableInsertSingleRow >> TPopulatorTestWithResets::UpdateAck [GOOD] >> TPopulatorTest::RemoveDir [GOOD] |81.1%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/kqp/ut/scan/ydb-core-kqp-ut-scan |81.1%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/kqp/ut/scan/ydb-core-kqp-ut-scan |81.1%| [TA] {RESULT} $(B)/ydb/library/ycloud/impl/ut/test-results/unittest/{meta.json ... results_accumulator.log} |81.1%| [LD] {RESULT} $(B)/ydb/core/kqp/ut/scan/ydb-core-kqp-ut-scan |81.1%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_serverless_reboots/unittest >> TGRpcYdbTest::ExecuteQueryWithParametersBadRequest [GOOD] >> TGRpcYdbTest::ExecuteQueryWithParametersExplicitSession ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/pg/unittest >> KqpPg::ExplainColumnsReorder [GOOD] Test command err: Trying to start YDB, gRPC: 25488, MsgBus: 31576 2025-09-25T16:17:34.983715Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7554061727592192992:2077];send_to=[0:7307199536658146131:7762515]; 2025-09-25T16:17:34.983977Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/endf/00554e/r3tmp/tmpb2vfom/pdisk_1.dat 2025-09-25T16:17:35.022761Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions TServer::EnableGrpc on GrpcPort 25488, node 1 2025-09-25T16:17:35.034503Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded 2025-09-25T16:17:35.042713Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-09-25T16:17:35.042724Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-09-25T16:17:35.042727Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-09-25T16:17:35.042762Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:31576 2025-09-25T16:17:35.086944Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-09-25T16:17:35.086985Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-09-25T16:17:35.088018Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:31576 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-09-25T16:17:35.105530Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-09-25T16:17:35.307311Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-09-25T16:17:35.372641Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7554061731887160919:2316], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:17:35.372660Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7554061731887160926:2319], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:17:35.372666Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:17:35.372712Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7554061731887160933:2320], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:17:35.372718Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:17:35.373487Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-09-25T16:17:35.375500Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7554061731887160934:2321], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2025-09-25T16:17:35.470104Z node 1 :TX_PROXY ERROR: schemereq.cpp:590: Actor# [1:7554061731887160986:2339] txid# 281474976715659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } Trying to start YDB, gRPC: 32178, MsgBus: 25670 2025-09-25T16:17:35.737437Z node 2 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7554061730954119719:2076];send_to=[0:7307199536658146131:7762515]; 2025-09-25T16:17:35.738221Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-09-25T16:17:35.740466Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions test_client.cpp: SetPath # /home/runner/.ya/build/build_root/endf/00554e/r3tmp/tmpUCB9Xq/pdisk_1.dat 2025-09-25T16:17:35.754398Z node 2 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 32178, node 2 2025-09-25T16:17:35.765086Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-09-25T16:17:35.765098Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-09-25T16:17:35.765100Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-09-25T16:17:35.765145Z node 2 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:25670 TClient is connected to server localhost:25670 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-09-25T16:17:35.816260Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-09-25T16:17:35.840545Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-09-25T16:17:35.840577Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-09-25T16:17:35.841637Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-09-25T16:17:35.980951Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-09-25T16:17:36.113585Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7554061735249087635:2316], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:17:36.113606Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:17:36.113637Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7554061735249087646:2319], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:17:36.113669Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7554061735249087649:2320], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:17:36.113731Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, s ... us: PathErrorUnknown, path: Root/.metadata/script_executions 2025-09-25T16:17:45.625743Z node 11 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [11:7554061777521801020:2316], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:17:45.625829Z node 11 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:17:45.626106Z node 11 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [11:7554061777521801048:2319], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:17:45.626121Z node 11 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:17:45.628047Z node 11 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:17:45.649026Z node 11 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:17:45.661541Z node 11 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [11:7554061777521801195:2335], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:17:45.661573Z node 11 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:17:45.661700Z node 11 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [11:7554061777521801200:2338], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:17:45.661712Z node 11 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [11:7554061777521801201:2339], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:17:45.661718Z node 11 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:17:45.662795Z node 11 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710660:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-09-25T16:17:45.670844Z node 11 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [11:7554061777521801204:2340], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710660 completed, doublechecking } 2025-09-25T16:17:45.737865Z node 11 :TX_PROXY ERROR: schemereq.cpp:590: Actor# [11:7554061777521801255:2440] txid# 281474976710661, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 7], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-09-25T16:17:46.725439Z node 12 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:123: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2025-09-25T16:17:46.727470Z node 12 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2025-09-25T16:17:46.733214Z node 12 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [12:289:2334], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-09-25T16:17:46.733294Z node 12 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-09-25T16:17:46.733306Z node 12 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/endf/00554e/r3tmp/tmp7X926t/pdisk_1.dat 2025-09-25T16:17:46.828202Z node 12 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded 2025-09-25T16:17:46.828806Z node 12 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(12, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-09-25T16:17:46.828863Z node 12 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(12, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-09-25T16:17:46.829160Z node 12 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1229: Notification cookie mismatch for subscription [12:34:2081] 1758817066165900 != 1758817066165904 2025-09-25T16:17:46.865533Z node 12 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(12, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-09-25T16:17:46.917727Z node 12 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2025-09-25T16:17:46.965606Z node 12 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-09-25T16:17:47.056765Z node 12 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [12:651:2546], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:17:47.056799Z node 12 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [12:661:2551], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:17:47.056811Z node 12 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:17:47.057012Z node 12 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [12:666:2555], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:17:47.057038Z node 12 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:17:47.057985Z node 12 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715657:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-09-25T16:17:47.183296Z node 12 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [12:665:2554], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715657 completed, doublechecking } 2025-09-25T16:17:47.209069Z node 12 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-09-25T16:17:47.246134Z node 12 :TX_PROXY ERROR: schemereq.cpp:590: Actor# [12:737:2595] txid# 281474976715658, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } PreparedQuery: "a8a928b7-967e824d-ce4e115d-f58bfd5d" QueryAst: "(\n(let $1 (PgType \'int4))\n(let $2 \'(\'(\'\"_logical_id\" \'218) \'(\'\"_id\" \'\"a3d18d33-bd1c450f-99af8634-f7fcdaba\") \'(\'\"_partition_mode\" \'\"single\")))\n(let $3 (DqPhyStage \'() (lambda \'() (Iterator (AsList (AsStruct \'(\'\"x\" (PgConst \'1 $1)) \'(\'\"y\" (PgConst \'2 $1)))))) $2))\n(let $4 (DqCnResult (TDqOutput $3 \'\"0\") \'(\'\"y\" \'\"x\")))\n(return (KqpPhysicalQuery \'((KqpPhysicalTx \'($3) \'($4) \'() \'(\'(\'\"type\" \'\"generic\")))) \'((KqpTxResultBinding (ListType (StructType \'(\'\"x\" $1) \'(\'\"y\" $1))) \'\"0\" \'\"0\")) \'(\'(\'\"type\" \'\"query\"))))\n)\n" QueryPlan: "{\"Plan\":{\"Plans\":[{\"PlanNodeId\":2,\"Plans\":[{\"PlanNodeId\":1,\"Operators\":[{\"Inputs\":[],\"Iterator\":\"[{x: \\\"1\\\",y: \\\"2\\\"}]\",\"Name\":\"Iterator\"}],\"Node Type\":\"ConstantExpr\"}],\"Node Type\":\"ResultSet\",\"PlanNodeType\":\"ResultSet\"}],\"Node Type\":\"Query\",\"Stats\":{\"ResourcePoolId\":\"default\"},\"PlanNodeType\":\"Query\"},\"meta\":{\"version\":\"0.2\",\"type\":\"query\"},\"tables\":[],\"SimplifiedPlan\":{\"PlanNodeId\":0,\"Plans\":[{\"PlanNodeId\":1,\"Node Type\":\"ResultSet\",\"PlanNodeType\":\"ResultSet\"}],\"Node Type\":\"Query\",\"OptimizerStats\":{\"EquiJoinsCount\":0,\"JoinsCount\":0},\"PlanNodeType\":\"Query\"}}" YdbResults { columns { name: "y" type { pg_type { oid: 23 } } } columns { name: "x" type { pg_type { oid: 23 } } } } QueryDiagnostics: "" ------- [TS] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/federated_query/generic_ut/unittest >> GenericFederatedQuery::IcebergHadoopTokenFilterPushdown [GOOD] Test command err: Trying to start YDB, gRPC: 63369, MsgBus: 27329 2025-09-25T16:17:37.104977Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7554061740486587863:2165];send_to=[0:7307199536658146131:7762515]; 2025-09-25T16:17:37.105094Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-09-25T16:17:37.113187Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:123: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown test_client.cpp: SetPath # /home/runner/.ya/build/build_root/endf/0028ee/r3tmp/tmpGe9gBR/pdisk_1.dat 2025-09-25T16:17:37.145391Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-09-25T16:17:37.145420Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-09-25T16:17:37.146742Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 63369, node 1 2025-09-25T16:17:37.168067Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded 2025-09-25T16:17:37.169005Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1229: Notification cookie mismatch for subscription [1:7554061740486587726:2081] 1758817057099650 != 1758817057099653 2025-09-25T16:17:37.170439Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-09-25T16:17:37.170448Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-09-25T16:17:37.170450Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-09-25T16:17:37.170527Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:27329 TClient is connected to server localhost:27329 WaitRootIsUp 'Root'... TClient::Ls request: Root 2025-09-25T16:17:37.255242Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-09-25T16:17:37.267083Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-09-25T16:17:37.526171Z node 1 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:1519: Updated YQL logs priority to current level: 5 2025-09-25T16:17:37.527024Z node 1 :SCHEME_BOARD_SUBSCRIBER NOTICE: subscriber.cpp:849: [main][1:7554061740486588359:2297][/Root/.metadata/workload_manager/classifiers/resource_pool_classifiers] Set up state: owner# [1:7554061740486587996:2115], state# { Deleted: 1 Strong: 1 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2025-09-25T16:17:37.573309Z node 1 :KQP_COMPUTE INFO: spilling_file.cpp:230: Init DQ local file spilling service at /home/runner/.ya/build/build_root/endf/0028ee/r3tmp/spilling-tmp-runner/node_1_7df3618c-a33e7e34-eee574ef-7c2f8599, actor: [1:7554061740486588367:2302] 2025-09-25T16:17:37.573447Z node 1 :KQP_COMPUTE INFO: spilling_file.cpp:781: [RemoveOldTmp] removing at root: /home/runner/.ya/build/build_root/endf/0028ee/r3tmp/spilling-tmp-runner E0925 16:17:37.573827743 180685 dns_resolver_ares.cc:452] no server name supplied in dns URI E0925 16:17:37.573908661 180685 channel.cc:120] channel stack builder failed: UNKNOWN: the target uri is not valid: dns:/// 2025-09-25T16:17:37.573995Z node 1 :SCHEME_BOARD_SUBSCRIBER NOTICE: subscriber.cpp:849: [main][1:7554061740486588381:2299][/Root/.metadata/workload_manager/delayed_requests] Set up state: owner# [1:7554061740486587996:2115], state# { Deleted: 1 Strong: 1 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2025-09-25T16:17:37.574023Z node 1 :SCHEME_BOARD_SUBSCRIBER NOTICE: subscriber.cpp:849: [main][1:7554061740486588382:2300][/Root/.metadata/workload_manager/running_requests] Set up state: owner# [1:7554061740486587996:2115], state# { Deleted: 1 Strong: 1 Version: DomainId: AbandonedSchemeShards: there are 0 elements } E0925 16:17:37.574756700 180685 dns_resolver_ares.cc:452] no server name supplied in dns URI E0925 16:17:37.574793736 180685 channel.cc:120] channel stack builder failed: UNKNOWN: the target uri is not valid: dns:/// 2025-09-25T16:17:37.575272Z node 1 :STREAMS_STORAGE_SERVICE ERROR: storage_proxy.cpp:183: Failed to init checkpoint storage: {
: Error: GRpc error: (2): Failed to create secure client channel } E0925 16:17:37.575437713 180685 dns_resolver_ares.cc:452] no server name supplied in dns URI E0925 16:17:37.575464276 180685 channel.cc:120] channel stack builder failed: UNKNOWN: the target uri is not valid: dns:/// 2025-09-25T16:17:37.575752Z node 1 :STREAMS_STORAGE_SERVICE ERROR: storage_proxy.cpp:189: Failed to init checkpoint state storage: {
: Error: GRpc error: (2): Failed to create secure client channel } 2025-09-25T16:17:37.576050Z node 1 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:1323: TraceId: "01k60tmygv6a5h4jfhr4wh72xx", Request has 18444985256651.975573s seconds to be completed 2025-09-25T16:17:37.576814Z node 1 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:1398: TraceId: "01k60tmygv6a5h4jfhr4wh72xx", Created new session, sessionId: ydb://session/3?node_id=1&id=YWUxZjg4NjctYzcwYjA0ZGItODBhYTkwZDgtMzQ0MDcyZmE=, workerId: [1:7554061740486588397:2316], database: /Root, longSession: 1, local sessions count: 1 2025-09-25T16:17:37.576892Z node 1 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:577: Received create session request, trace_id: 01k60tmygv6a5h4jfhr4wh72xx 2025-09-25T16:17:37.576918Z node 1 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:436: Subscribed for config changes. 2025-09-25T16:17:37.576922Z node 1 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:443: Updated table service config. 2025-09-25T16:17:37.576930Z node 1 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:1519: Updated YQL logs priority to current level: 5 2025-09-25T16:17:37.585620Z node 1 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:700: Ctx: { TraceId: 01k60tmysh1sw9ny608mcn7rfv, Database: /Root, SessionId: ydb://session/3?node_id=1&id=YWUxZjg4NjctYzcwYjA0ZGItODBhYTkwZDgtMzQ0MDcyZmE=, PoolId: , DatabaseId: }. TEvQueryRequest, set timer for: 600.000000s timeout: 0.000000s cancelAfter: 0.000000s. Send request to target, requestId: 3, targetId: [1:7554061740486588397:2316] 2025-09-25T16:17:37.585643Z node 1 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:1153: Scheduled timeout timer for requestId: 3 timeout: 600.000000s actor id: [1:7554061740486588399:2302] 2025-09-25T16:17:37.589154Z node 1 :SCHEME_BOARD_SUBSCRIBER NOTICE: subscriber.cpp:849: [main][1:7554061740486588402:2303][/Root/.metadata/workload_manager/pools/default] Set up state: owner# [1:7554061740486587996:2115], state# { Deleted: 1 Strong: 1 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2025-09-25T16:17:37.593108Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7554061740486588400:2318], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:17:37.593161Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:17:37.597009Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7554061740486588410:2319], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:17:37.597048Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:17:38.103330Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-09-25T16:17:38.105713Z node 1 :SCHEME_BOARD_SUBSCRIBER NOTICE: subscriber.cpp:849: [main][1:7554061744781555710:2307][/Root/.metadata/initialization/migrations] Set up state: owner# [1:7554061740486587996:2115], state# { Deleted: 1 Strong: 1 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2025-09-25T16:17:38.106417Z node 1 :SCHEME_BOARD_SUBSCRIBER NOTICE: subscriber.cpp:849: [main][1:7554061744781555721:2310][/Root/.metadata] Set up state: owner# [1:7554061740486587996:2115], state# { Deleted: 1 Strong: 1 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2025-09-25T16:17:38.106430Z node 1 :SCHEME_BOARD_SUBSCRIBER NOTICE: subscriber.cpp:849: [main][1:7554061744781555722:2311][/Root/.metadata/initialization] Set up state: owner# [1:7554061740486587996:2115], state# { Deleted: 1 Strong: 1 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2025-09-25T16:17:38.106784Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_mkdir.cpp:115: TMkDir Propose, path: /Root/.metadata, operationId: 281474976710658:0, at schemeshard: 72057594046644480 2025-09-25T16:17:38.106838Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 281474976710658:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2025-09-25T16:17:38.106841Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_mkdir.cpp:115: TMkDir Propose, path: /Root/.metadata/initialization, operationId: 281474976710658:1, at schemeshard: 72057594046644480 2025-09-25T16:17:38.106851Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 281474976710658:2, propose status:StatusAccepted, reason: , at schemeshard: 720 ... ITH (\n SOURCE_TYPE=\"Iceberg\",\n DATABASE_NAME=\"pgdb\",\n WAREHOUSE_TYPE=\"s3\",\n WAREHOUSE_S3_REGION=\"s3_region\",\n WAREHOUSE_S3_ENDPOINT=\"s3_endpoint\",\n WAREHOUSE_S3_URI=\"s3_uri\",\n \n AUTH_METHOD=\"TOKEN\",\n TOKEN_SECRET_NAME=\"external_data_source_p\"\n ,\n \n CATALOG_TYPE=\"hadoop\"\n ,\n USE_TLS=\"FALSE\"\n );\n ", parameters: 0b 2025-09-25T16:17:47.058352Z node 4 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:886: TraceId: "01k60tn6d518rgkrqy8ree5pce", Forwarded response to sender actor, requestId: 3, sender: [4:7554061776016732310:2317], selfId: [4:7554061771721764522:2205], source: [4:7554061776016732293:2315] 2025-09-25T16:17:47.061210Z node 4 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:1212: Session closed, sessionId: ydb://session/3?node_id=4&id=MTE4YzdkMmMtZjAwM2UzY2MtYmQxYzg0MzUtOWE3ZGI0ZTY=, workerId: [4:7554061776016732293:2315], local sessions count: 0 2025-09-25T16:17:47.062296Z node 4 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:1398: TraceId: "01k60tn81n31pfxgg0asffcyqb", Created new session, sessionId: ydb://session/3?node_id=4&id=ZTI5ZjlhNzgtNDJjZDg1YWUtYjI2YzNkZjAtNDgwOGFlODA=, workerId: [4:7554061784606668128:2611], database: /Root, longSession: 0, local sessions count: 1 2025-09-25T16:17:47.062366Z node 4 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:700: Ctx: { TraceId: 01k60tn81n31pfxgg0asffcyqb, Database: /Root, SessionId: ydb://session/3?node_id=4&id=ZTI5ZjlhNzgtNDJjZDg1YWUtYjI2YzNkZjAtNDgwOGFlODA=, PoolId: , DatabaseId: }. TEvQueryRequest, set timer for: 7200.000000s timeout: 0.000000s cancelAfter: 0.000000s. Send request to target, requestId: 36, targetId: [4:7554061784606668128:2611] 2025-09-25T16:17:47.062373Z node 4 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:1153: Scheduled timeout timer for requestId: 36 timeout: 7200.000000s actor id: [4:7554061784606668129:2901] 2025-09-25T16:17:47.067358Z node 4 :SCHEME_BOARD_SUBSCRIBER NOTICE: subscriber.cpp:849: [main][4:7554061784606668133:2903][/Root/external_data_source] Set up state: owner# [4:7554061771721764613:2116], state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 72057594046644480, LocalPathId: 13], Version: 2) DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] AbandonedSchemeShards: there are 0 elements } Call DescribeTable. data_source_instance { kind: ICEBERG endpoint { } database: "pgdb" credentials { token { type: "IAM" value: "qwerty12345" } } use_tls: false protocol: NATIVE iceberg_options { catalog { hadoop { } } warehouse { s3 { uri: "s3_uri" endpoint: "s3_endpoint" region: "s3_region" } } } } table: "example_1" type_mapping_settings { date_time_format: STRING_FORMAT } CRAB Expected: data_source_instance { kind: ICEBERG endpoint { } database: "pgdb" credentials { token { type: "IAM" value: "qwerty12345" } } use_tls: false protocol: NATIVE iceberg_options { catalog { hadoop { } } warehouse { s3 { uri: "s3_uri" endpoint: "s3_endpoint" region: "s3_region" } } } } table: "example_1" type_mapping_settings { date_time_format: STRING_FORMAT } CRAB Actual: data_source_instance { kind: ICEBERG endpoint { } database: "pgdb" credentials { token { type: "IAM" value: "qwerty12345" } } use_tls: false protocol: NATIVE iceberg_options { catalog { hadoop { } } warehouse { s3 { uri: "s3_uri" endpoint: "s3_endpoint" region: "s3_region" } } } } table: "example_1" type_mapping_settings { date_time_format: STRING_FORMAT } DescribeTable result. GRpcStatusCode: 0 schema { columns { name: "filtered_column" type { optional_type { item { type_id: INT32 } } } } columns { name: "data_column" type { optional_type { item { type_id: STRING } } } } } error { status: SUCCESS } Call ListSplits. selects { data_source_instance { kind: ICEBERG endpoint { } database: "pgdb" credentials { token { type: "IAM" value: "qwerty12345" } } use_tls: false protocol: NATIVE iceberg_options { catalog { hadoop { } } warehouse { s3 { uri: "s3_uri" endpoint: "s3_endpoint" region: "s3_region" } } } } from { table: "example_1" } } CRAB Expected: selects { data_source_instance { kind: ICEBERG endpoint { } database: "pgdb" credentials { token { type: "IAM" value: "qwerty12345" } } use_tls: false protocol: NATIVE iceberg_options { catalog { hadoop { } } warehouse { s3 { uri: "s3_uri" endpoint: "s3_endpoint" region: "s3_region" } } } } from { table: "example_1" } } CRAB Actual: selects { data_source_instance { kind: ICEBERG endpoint { } database: "pgdb" credentials { token { type: "IAM" value: "qwerty12345" } } use_tls: false protocol: NATIVE iceberg_options { catalog { hadoop { } } warehouse { s3 { uri: "s3_uri" endpoint: "s3_endpoint" region: "s3_region" } } } } from { table: "example_1" } } ListSplits result. GRpcStatusCode: 0 2025-09-25T16:17:47.093849Z node 4 :KQP_EXECUTER INFO: kqp_data_executer.cpp:2678: ActorId: [4:7554061784606668146:2611] TxId: 281474976710693. Ctx: { TraceId: 01k60tn81n31pfxgg0asffcyqb, Database: /Root, SessionId: ydb://session/3?node_id=4&id=ZTI5ZjlhNzgtNDJjZDg1YWUtYjI2YzNkZjAtNDgwOGFlODA=, PoolId: default}. Total tasks: 2, readonly: 1, datashardTxs: 0, evWriteTxs: 0, topicTxs: 0, volatile: 0, immediate: 1, pending compute tasks: 2, useFollowers: 0 Call ReadSplits. splits { select { data_source_instance { kind: ICEBERG endpoint { } database: "pgdb" credentials { token { type: "IAM" value: "qwerty12345" } } use_tls: false protocol: NATIVE iceberg_options { catalog { hadoop { } } warehouse { s3 { uri: "s3_uri" endpoint: "s3_endpoint" region: "s3_region" } } } } what { items { column { name: "data_column" type { optional_type { item { type_id: STRING } } } } } items { column { name: "filtered_column" type { optional_type { item { type_id: INT32 } } } } } } from { table: "example_1" } where { filter_typed { comparison { operation: EQ left_value { column: "filtered_column" } right_value { typed_value { type { type_id: INT32 } value { int32_value: 42 } } } } } } } description: "some binary description" } format: ARROW_IPC_STREAMING filtering: FILTERING_OPTIONAL CRAB Expected: splits { select { data_source_instance { kind: ICEBERG endpoint { } database: "pgdb" credentials { token { type: "IAM" value: "qwerty12345" } } use_tls: false protocol: NATIVE iceberg_options { catalog { hadoop { } } warehouse { s3 { uri: "s3_uri" endpoint: "s3_endpoint" region: "s3_region" } } } } what { items { column { name: "data_column" type { optional_type { item { type_id: STRING } } } } } items { column { name: "filtered_column" type { optional_type { item { type_id: INT32 } } } } } } from { table: "example_1" } where { filter_typed { comparison { operation: EQ left_value { column: "filtered_column" } right_value { typed_value { type { type_id: INT32 } value { int32_value: 42 } } } } } } } description: "some binary description" } format: ARROW_IPC_STREAMING filtering: FILTERING_OPTIONAL CRAB Actual: splits { select { data_source_instance { kind: ICEBERG endpoint { } database: "pgdb" credentials { token { type: "IAM" value: "qwerty12345" } } use_tls: false protocol: NATIVE iceberg_options { catalog { hadoop { } } warehouse { s3 { uri: "s3_uri" endpoint: "s3_endpoint" region: "s3_region" } } } } what { items { column { name: "data_column" type { optional_type { item { type_id: STRING } } } } } items { column { name: "filtered_column" type { optional_type { item { type_id: INT32 } } } } } } from { table: "example_1" } where { filter_typed { comparison { operation: EQ left_value { column: "filtered_column" } right_value { typed_value { type { type_id: INT32 } value { int32_value: 42 } } } } } } } description: "some binary description" } format: ARROW_IPC_STREAMING filtering: FILTERING_OPTIONAL ReadSplits result. GRpcStatusCode: 0 2025-09-25T16:17:47.095043Z node 4 :KQP_EXECUTER INFO: kqp_planner.cpp:721: TxId: 281474976710693. Ctx: { TraceId: 01k60tn81n31pfxgg0asffcyqb, Database: /Root, SessionId: ydb://session/3?node_id=4&id=ZTI5ZjlhNzgtNDJjZDg1YWUtYjI2YzNkZjAtNDgwOGFlODA=, PoolId: default}. Compute actor has finished execution: [4:7554061784606668149:2615] 2025-09-25T16:17:47.095164Z node 4 :KQP_EXECUTER INFO: kqp_planner.cpp:721: TxId: 281474976710693. Ctx: { TraceId: 01k60tn81n31pfxgg0asffcyqb, Database: /Root, SessionId: ydb://session/3?node_id=4&id=ZTI5ZjlhNzgtNDJjZDg1YWUtYjI2YzNkZjAtNDgwOGFlODA=, PoolId: default}. Compute actor has finished execution: [4:7554061784606668150:2616] 2025-09-25T16:17:47.095304Z node 4 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:886: TraceId: "01k60tn81n31pfxgg0asffcyqb", Forwarded response to sender actor, requestId: 36, sender: [4:7554061784606668127:2610], selfId: [4:7554061771721764522:2205], source: [4:7554061784606668128:2611] 2025-09-25T16:17:47.095433Z node 4 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:1212: Session closed, sessionId: ydb://session/3?node_id=4&id=ZTI5ZjlhNzgtNDJjZDg1YWUtYjI2YzNkZjAtNDgwOGFlODA=, workerId: [4:7554061784606668128:2611], local sessions count: 0 >> KqpPg::InsertFromSelect_Serial+useSink [GOOD] >> KqpPg::InsertFromSelect_Serial-useSink >> TPopulatorQuorumTest::OneRingGroup >> CdcStreamChangeCollector::UpsertToSameKey [GOOD] >> CdcStreamChangeCollector::UpsertToSameKeyWithImages >> test.py::test[produce-process_with_python-default.txt-Results] [GOOD] >> test.py::test[produce-reduce_all_expr-default.txt-Results] >> test.py::test[insert-trivial_select-default.txt-Results] [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/scheme_board/ut_populator/unittest >> TPopulatorTest::RemoveDir [GOOD] Test command err: 2025-09-25T16:17:48.084094Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7729: Cannot subscribe to console configs 2025-09-25T16:17:48.084127Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded TestModificationResults wait txId: 100 2025-09-25T16:17:48.111855Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:676: [1:97:2125] Handle TEvSchemeShard::TEvDescribeSchemeResult { Status: StatusSuccess Path: "/Root" PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 2 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 1 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { Audience: "/Root" } SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 1 PathOwnerId: 72057594046678944 }: sender# [1:72:2112], cookie# 100, event size# 419, preserialized size# 51 2025-09-25T16:17:48.111901Z node 1 :SCHEME_BOARD_POPULATOR NOTICE: populator.cpp:693: [1:97:2125] Update description: owner# 72057594046678944, pathId# [OwnerId: 72057594046678944, LocalPathId: 1], cookie# 100, is deletion# false, version: 3 2025-09-25T16:17:48.112200Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:283: [1:98:2126] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 72057594046678944 Generation: 2 }: sender# [1:97:2125], cookie# 100 2025-09-25T16:17:48.112214Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:283: [1:99:2127] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 72057594046678944 Generation: 2 }: sender# [1:97:2125], cookie# 100 2025-09-25T16:17:48.112223Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:283: [1:100:2128] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 72057594046678944 Generation: 2 }: sender# [1:97:2125], cookie# 100 2025-09-25T16:17:48.112338Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:676: [1:97:2125] Handle TEvSchemeShard::TEvDescribeSchemeResult { Status: StatusSuccess Path: "/Root/DirB" PathDescription { Self { Name: "DirB" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: false CreateTxId: 100 ParentPathId: 1 PathState: EPathStateCreate Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 1 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 2 PathOwnerId: 72057594046678944 }: sender# [1:72:2112], cookie# 100, event size# 309, preserialized size# 2 2025-09-25T16:17:48.112347Z node 1 :SCHEME_BOARD_POPULATOR NOTICE: populator.cpp:693: [1:97:2125] Update description: owner# 72057594046678944, pathId# [OwnerId: 72057594046678944, LocalPathId: 2], cookie# 100, is deletion# false, version: 2 2025-09-25T16:17:48.112362Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:283: [1:98:2126] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 72057594046678944 Generation: 2 }: sender# [1:97:2125], cookie# 100 2025-09-25T16:17:48.112373Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:291: [1:99:2127] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 2 PathId: [OwnerId: 72057594046678944, LocalPathId: 1] Version: 3 }: sender# [1:15:2062], cookie# 100 2025-09-25T16:17:48.112384Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:291: [1:100:2128] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 2 PathId: [OwnerId: 72057594046678944, LocalPathId: 1] Version: 3 }: sender# [1:18:2065], cookie# 100 2025-09-25T16:17:48.112540Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:758: [1:97:2125] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 2 PathId: [OwnerId: 72057594046678944, LocalPathId: 1] Version: 3 }: sender# [1:99:2127], cookie# 100 2025-09-25T16:17:48.112549Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:291: [1:98:2126] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 2 PathId: [OwnerId: 72057594046678944, LocalPathId: 1] Version: 3 }: sender# [1:12:2059], cookie# 100 2025-09-25T16:17:48.112556Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:283: [1:99:2127] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 72057594046678944 Generation: 2 }: sender# [1:97:2125], cookie# 100 2025-09-25T16:17:48.112563Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:283: [1:100:2128] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 72057594046678944 Generation: 2 }: sender# [1:97:2125], cookie# 100 2025-09-25T16:17:48.112651Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:758: [1:97:2125] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 2 PathId: [OwnerId: 72057594046678944, LocalPathId: 1] Version: 3 }: sender# [1:100:2128], cookie# 100 2025-09-25T16:17:48.112657Z node 1 :SCHEME_BOARD_POPULATOR NOTICE: populator.cpp:786: [1:97:2125] Ack update: ack to# [1:72:2112], cookie# 100, pathId# [OwnerId: 72057594046678944, LocalPathId: 1], version# 3 2025-09-25T16:17:48.112666Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:291: [1:98:2126] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 2 PathId: [OwnerId: 72057594046678944, LocalPathId: 2] Version: 2 }: sender# [1:12:2059], cookie# 100 2025-09-25T16:17:48.112673Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:291: [1:99:2127] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 2 PathId: [OwnerId: 72057594046678944, LocalPathId: 2] Version: 2 }: sender# [1:15:2062], cookie# 100 2025-09-25T16:17:48.112679Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:291: [1:100:2128] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 2 PathId: [OwnerId: 72057594046678944, LocalPathId: 2] Version: 2 }: sender# [1:18:2065], cookie# 100 2025-09-25T16:17:48.112739Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:758: [1:97:2125] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 2 PathId: [OwnerId: 72057594046678944, LocalPathId: 1] Version: 3 }: sender# [1:98:2126], cookie# 100 2025-09-25T16:17:48.112773Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:758: [1:97:2125] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 2 PathId: [OwnerId: 72057594046678944, LocalPathId: 2] Version: 2 }: sender# [1:98:2126], cookie# 100 2025-09-25T16:17:48.113392Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:758: [1:97:2125] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 2 PathId: [OwnerId: 72057594046678944, LocalPathId: 2] Version: 2 }: sender# [1:99:2127], cookie# 100 2025-09-25T16:17:48.113413Z node 1 :SCHEME_BOARD_POPULATOR NOTICE: populator.cpp:786: [1:97:2125] Ack update: ack to# [1:72:2112], cookie# 100, pathId# [OwnerId: 72057594046678944, LocalPathId: 2], version# 2 2025-09-25T16:17:48.113457Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:758: [1:97:2125] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 2 PathId: [OwnerId: 72057594046678944, LocalPathId: 2] Version: 2 }: sender# [1:100:2128], cookie# 100 2025-09-25T16:17:48.113462Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:764: [1:97:2125] Ack for unknown update (already acked?): sender# [1:100:2128], cookie# 100 FAKE_COORDINATOR: Add transaction: 100 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 100 at step: 5000001 2025-09-25T16:17:48.114183Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:676: [1:97:2125] Handle TEvSchemeShard::TEvDescribeSchemeResult { Status: StatusSuccess Path: "/Root" PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 4 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 4 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 3 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 1 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { Audience: "/Root" } SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 1 PathOwnerId: 72057594046678944 }: sender# [1:72:2112], cookie# 100, event size# 429, preserialized size# 56 2025-09-25T16:17:48.114202Z node 1 :SCHEME_BOARD_POPULATOR NOTICE: populator.cpp:693: [1:97:2125] Update description: owner# 72057594046678944, pathId# [OwnerId: 72057594046678944, LocalPathId: 1], cookie# 100, is deletion# false, version: 4 2025-09-25T16:17:48.114235Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:283: [1:98:2126] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 72057594046678944 Generation: 2 }: sender# [1:97:2125], co ... 2057594046678944 Generation: 2 }: sender# [1:97:2125], cookie# 101 2025-09-25T16:17:48.116131Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:283: [1:100:2128] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 72057594046678944 Generation: 2 }: sender# [1:97:2125], cookie# 101 2025-09-25T16:17:48.116140Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:758: [1:97:2125] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 2 PathId: [OwnerId: 72057594046678944, LocalPathId: 1] Version: 5 }: sender# [1:99:2127], cookie# 101 2025-09-25T16:17:48.116146Z node 1 :SCHEME_BOARD_POPULATOR NOTICE: populator.cpp:786: [1:97:2125] Ack update: ack to# [1:72:2112], cookie# 101, pathId# [OwnerId: 72057594046678944, LocalPathId: 1], version# 5 2025-09-25T16:17:48.116193Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:758: [1:97:2125] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 2 PathId: [OwnerId: 72057594046678944, LocalPathId: 1] Version: 5 }: sender# [1:100:2128], cookie# 101 2025-09-25T16:17:48.116200Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:291: [1:98:2126] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 2 PathId: [OwnerId: 72057594046678944, LocalPathId: 2] Version: 3 }: sender# [1:12:2059], cookie# 101 2025-09-25T16:17:48.116213Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:291: [1:99:2127] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 2 PathId: [OwnerId: 72057594046678944, LocalPathId: 2] Version: 3 }: sender# [1:15:2062], cookie# 101 2025-09-25T16:17:48.116222Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:291: [1:100:2128] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 2 PathId: [OwnerId: 72057594046678944, LocalPathId: 2] Version: 3 }: sender# [1:18:2065], cookie# 101 2025-09-25T16:17:48.116231Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:758: [1:97:2125] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 2 PathId: [OwnerId: 72057594046678944, LocalPathId: 2] Version: 3 }: sender# [1:98:2126], cookie# 101 2025-09-25T16:17:48.116284Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:758: [1:97:2125] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 2 PathId: [OwnerId: 72057594046678944, LocalPathId: 2] Version: 3 }: sender# [1:99:2127], cookie# 101 2025-09-25T16:17:48.116290Z node 1 :SCHEME_BOARD_POPULATOR NOTICE: populator.cpp:786: [1:97:2125] Ack update: ack to# [1:72:2112], cookie# 101, pathId# [OwnerId: 72057594046678944, LocalPathId: 2], version# 3 FAKE_COORDINATOR: Add transaction: 101 at step: 5000002 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000001 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 101 at step: 5000002 2025-09-25T16:17:48.116360Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:758: [1:97:2125] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 2 PathId: [OwnerId: 72057594046678944, LocalPathId: 2] Version: 3 }: sender# [1:100:2128], cookie# 101 2025-09-25T16:17:48.116366Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:764: [1:97:2125] Ack for unknown update (already acked?): sender# [1:100:2128], cookie# 101 FAKE_COORDINATOR: Erasing txId 101 2025-09-25T16:17:48.116612Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:676: [1:97:2125] Handle TEvSchemeShard::TEvDescribeSchemeResult { Status: StatusSuccess Path: "/Root" PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 6 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 6 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 5 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 0 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { Audience: "/Root" } SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 1 PathOwnerId: 72057594046678944 }: sender# [1:72:2112], cookie# 101, event size# 321, preserialized size# 2 2025-09-25T16:17:48.116620Z node 1 :SCHEME_BOARD_POPULATOR NOTICE: populator.cpp:693: [1:97:2125] Update description: owner# 72057594046678944, pathId# [OwnerId: 72057594046678944, LocalPathId: 1], cookie# 101, is deletion# false, version: 6 2025-09-25T16:17:48.116637Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:283: [1:98:2126] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 72057594046678944 Generation: 2 }: sender# [1:97:2125], cookie# 101 2025-09-25T16:17:48.116647Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:283: [1:99:2127] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 72057594046678944 Generation: 2 }: sender# [1:97:2125], cookie# 101 2025-09-25T16:17:48.116653Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:283: [1:100:2128] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 72057594046678944 Generation: 2 }: sender# [1:97:2125], cookie# 101 2025-09-25T16:17:48.116687Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:676: [1:97:2125] Handle TEvSchemeShard::TEvDescribeSchemeResult { Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/Root/DirB\', error: path has been deleted (id: [OwnerId: 72057594046678944, LocalPathId: 2], type: EPathTypeDir, state: EPathStateNotExist), drop stepId: 5000002, drop txId: 101, source_location: ydb/core/tx/schemeshard/schemeshard_path_describer.cpp:1181" Path: "/Root/DirB" PathId: 2 LastExistedPrefixPath: "/Root" LastExistedPrefixPathId: 1 LastExistedPrefixDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty ChildrenExist: false } } PathOwnerId: 72057594046678944 }: sender# [1:72:2112], cookie# 101, event size# 384, preserialized size# 0 2025-09-25T16:17:48.116692Z node 1 :SCHEME_BOARD_POPULATOR NOTICE: populator.cpp:693: [1:97:2125] Update description: owner# 72057594046678944, pathId# [OwnerId: 72057594046678944, LocalPathId: 2], cookie# 101, is deletion# true, version: 0 2025-09-25T16:17:48.116702Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:283: [1:98:2126] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 72057594046678944 Generation: 2 }: sender# [1:97:2125], cookie# 101 2025-09-25T16:17:48.116708Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:283: [1:99:2127] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 72057594046678944 Generation: 2 }: sender# [1:97:2125], cookie# 101 2025-09-25T16:17:48.116715Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:291: [1:100:2128] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 2 PathId: [OwnerId: 72057594046678944, LocalPathId: 1] Version: 6 }: sender# [1:18:2065], cookie# 101 2025-09-25T16:17:48.116729Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:291: [1:98:2126] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 2 PathId: [OwnerId: 72057594046678944, LocalPathId: 1] Version: 6 }: sender# [1:12:2059], cookie# 101 2025-09-25T16:17:48.116735Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:291: [1:99:2127] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 2 PathId: [OwnerId: 72057594046678944, LocalPathId: 1] Version: 6 }: sender# [1:15:2062], cookie# 101 2025-09-25T16:17:48.116742Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:283: [1:100:2128] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 72057594046678944 Generation: 2 }: sender# [1:97:2125], cookie# 101 2025-09-25T16:17:48.116781Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:758: [1:97:2125] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 2 PathId: [OwnerId: 72057594046678944, LocalPathId: 1] Version: 6 }: sender# [1:100:2128], cookie# 101 2025-09-25T16:17:48.116796Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:758: [1:97:2125] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 2 PathId: [OwnerId: 72057594046678944, LocalPathId: 1] Version: 6 }: sender# [1:98:2126], cookie# 101 2025-09-25T16:17:48.116801Z node 1 :SCHEME_BOARD_POPULATOR NOTICE: populator.cpp:786: [1:97:2125] Ack update: ack to# [1:72:2112], cookie# 101, pathId# [OwnerId: 72057594046678944, LocalPathId: 1], version# 6 2025-09-25T16:17:48.116809Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:291: [1:98:2126] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 2 PathId: [OwnerId: 72057594046678944, LocalPathId: 2] Version: 18446744073709551615 }: sender# [1:12:2059], cookie# 101 2025-09-25T16:17:48.116817Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:291: [1:99:2127] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 2 PathId: [OwnerId: 72057594046678944, LocalPathId: 2] Version: 18446744073709551615 }: sender# [1:15:2062], cookie# 101 2025-09-25T16:17:48.116848Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:291: [1:100:2128] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 2 PathId: [OwnerId: 72057594046678944, LocalPathId: 2] Version: 18446744073709551615 }: sender# [1:18:2065], cookie# 101 2025-09-25T16:17:48.116889Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:758: [1:97:2125] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 2 PathId: [OwnerId: 72057594046678944, LocalPathId: 1] Version: 6 }: sender# [1:99:2127], cookie# 101 2025-09-25T16:17:48.116909Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:758: [1:97:2125] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 2 PathId: [OwnerId: 72057594046678944, LocalPathId: 2] Version: 18446744073709551615 }: sender# [1:98:2126], cookie# 101 2025-09-25T16:17:48.116922Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:758: [1:97:2125] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 2 PathId: [OwnerId: 72057594046678944, LocalPathId: 2] Version: 18446744073709551615 }: sender# [1:99:2127], cookie# 101 2025-09-25T16:17:48.116928Z node 1 :SCHEME_BOARD_POPULATOR NOTICE: populator.cpp:786: [1:97:2125] Ack update: ack to# [1:72:2112], cookie# 101, pathId# [OwnerId: 72057594046678944, LocalPathId: 2], version# 18446744073709551615 2025-09-25T16:17:48.116939Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:758: [1:97:2125] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 2 PathId: [OwnerId: 72057594046678944, LocalPathId: 2] Version: 18446744073709551615 }: sender# [1:100:2128], cookie# 101 2025-09-25T16:17:48.116943Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:764: [1:97:2125] Ack for unknown update (already acked?): sender# [1:100:2128], cookie# 101 TestModificationResult got TxId: 101, wait until txId: 101 ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/scheme_board/ut_populator/unittest >> TPopulatorTestWithResets::UpdateAck [GOOD] Test command err: 2025-09-25T16:17:48.121416Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7729: Cannot subscribe to console configs 2025-09-25T16:17:48.121445Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded TestModificationResults wait txId: 100 2025-09-25T16:17:48.150903Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:676: [1:97:2125] Handle TEvSchemeShard::TEvDescribeSchemeResult { Status: StatusSuccess Path: "/Root" PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 2 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 1 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { Audience: "/Root" } SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 1 PathOwnerId: 72057594046678944 }: sender# [1:72:2112], cookie# 100, event size# 419, preserialized size# 51 2025-09-25T16:17:48.150954Z node 1 :SCHEME_BOARD_POPULATOR NOTICE: populator.cpp:693: [1:97:2125] Update description: owner# 72057594046678944, pathId# [OwnerId: 72057594046678944, LocalPathId: 1], cookie# 100, is deletion# false, version: 3 2025-09-25T16:17:48.151281Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:283: [1:98:2126] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 72057594046678944 Generation: 2 }: sender# [1:97:2125], cookie# 100 2025-09-25T16:17:48.151298Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:283: [1:99:2127] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 72057594046678944 Generation: 2 }: sender# [1:97:2125], cookie# 100 2025-09-25T16:17:48.151307Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:283: [1:100:2128] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 72057594046678944 Generation: 2 }: sender# [1:97:2125], cookie# 100 2025-09-25T16:17:48.151393Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:676: [1:97:2125] Handle TEvSchemeShard::TEvDescribeSchemeResult { Status: StatusSuccess Path: "/Root/DirC" PathDescription { Self { Name: "DirC" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: false CreateTxId: 100 ParentPathId: 1 PathState: EPathStateCreate Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 1 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 2 PathOwnerId: 72057594046678944 }: sender# [1:72:2112], cookie# 100, event size# 309, preserialized size# 2 2025-09-25T16:17:48.151403Z node 1 :SCHEME_BOARD_POPULATOR NOTICE: populator.cpp:693: [1:97:2125] Update description: owner# 72057594046678944, pathId# [OwnerId: 72057594046678944, LocalPathId: 2], cookie# 100, is deletion# false, version: 2 2025-09-25T16:17:48.151424Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:283: [1:98:2126] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 72057594046678944 Generation: 2 }: sender# [1:97:2125], cookie# 100 2025-09-25T16:17:48.151430Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:283: [1:99:2127] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 72057594046678944 Generation: 2 }: sender# [1:97:2125], cookie# 100 2025-09-25T16:17:48.151437Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:283: [1:100:2128] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 72057594046678944 Generation: 2 }: sender# [1:97:2125], cookie# 100 FAKE_COORDINATOR: Add transaction: 100 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 100 at step: 5000001 FAKE_COORDINATOR: Erasing txId 100 2025-09-25T16:17:48.152341Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:676: [1:97:2125] Handle TEvSchemeShard::TEvDescribeSchemeResult { Status: StatusSuccess Path: "/Root" PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 4 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 4 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 3 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 1 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { Audience: "/Root" } SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 1 PathOwnerId: 72057594046678944 }: sender# [1:72:2112], cookie# 100, event size# 429, preserialized size# 56 2025-09-25T16:17:48.152357Z node 1 :SCHEME_BOARD_POPULATOR NOTICE: populator.cpp:693: [1:97:2125] Update description: owner# 72057594046678944, pathId# [OwnerId: 72057594046678944, LocalPathId: 1], cookie# 100, is deletion# false, version: 4 2025-09-25T16:17:48.152436Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:676: [1:97:2125] Handle TEvSchemeShard::TEvDescribeSchemeResult { Status: StatusSuccess Path: "/Root/DirC" PathDescription { Self { Name: "DirC" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 100 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateCreate Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 2 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 1 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 2 PathOwnerId: 72057594046678944 }: sender# [1:72:2112], cookie# 100, event size# 314, preserialized size# 2 2025-09-25T16:17:48.152442Z node 1 :SCHEME_BOARD_POPULATOR NOTICE: populator.cpp:693: [1:97:2125] Update description: owner# 72057594046678944, pathId# [OwnerId: 72057594046678944, LocalPathId: 2], cookie# 100, is deletion# false, version: 3 TestModificationResult got TxId: 100, wait until txId: 100 TestWaitNotification wait txId: 100 2025-09-25T16:17:48.189025Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:243: [1:99:2127] Handle NKikimrSchemeBoard.TEvHandshake { Owner: 72057594046678944 Generation: 2 }: sender# [1:15:2062] 2025-09-25T16:17:48.189053Z node 1 :SCHEME_BOARD_POPULATOR NOTICE: populator.cpp:255: [1:99:2127] Successful handshake: replica# [1:15:2062] 2025-09-25T16:17:48.189061Z node 1 :SCHEME_BOARD_POPULATOR NOTICE: populator.cpp:264: [1:99:2127] Resume sync: replica# [1:15:2062], fromPathId# [OwnerId: 72057594046678944, LocalPathId: 2] 2025-09-25T16:17:48.189076Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:243: [1:100:2128] Handle NKikimrSchemeBoard.TEvHandshake { Owner: 72057594046678944 Generation: 2 }: sender# [1:18:2065] 2025-09-25T16:17:48.189081Z node 1 :SCHEME_BOARD_POPULATOR NOTICE: populator.cpp:255: [1:100:2128] Successful handshake: replica# [1:18:2065] 2025-09-25T16:17:48.189085Z node 1 :SCHEME_BOARD_POPULATOR NOTICE: populator.cpp:264: [1:100:2128] Resume sync: replica# [1:18:2065], fromPathId# [OwnerId: 72057594046678944, LocalPathId: 2] 2025-09-25T16:17:48.189103Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:543: [1:97:2125] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvRequestDescribe { PathId: [OwnerId: 72057594046678944, LocalPathId: 2] Replica: [1:1099535966835:0] }: sender# [1:99:2127] 2025-09-25T16:17:48.189118Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:243: [1:98:2126] Handle NKikimrSchemeBoard.TEvHandshake { Owner: 72057594046678944 Generation: 2 }: sender# [1:12:2059] 2025-09-25T16:17:48.189122Z node 1 :SCHEME_BOARD_POPULATOR NOTICE: populator.cpp:255: [1:98:2126] Successful handshake: replica# [1:12:2059] 2025-09-25T16:17:48.189127Z node 1 :SCHEME_BOARD_POPULATOR NOTICE: populator.cpp:264: [1:98:2126] Resume sync: replica# [1:12:2059], fromPathId# [OwnerId: 72057594046678944, LocalPathId: 2] 2025-09-25T16:17:48.189141Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:638: [1:97:2125] Handle NKikimr::NS ... DEBUG: populator.cpp:275: [1:100:2128] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvDescribeResult { Commit: false DeletedPathBegin: 0 DeletedPathEnd: 0 { Path: /Root/DirC PathId: [OwnerId: 72057594046678944, LocalPathId: 2] PathVersion: 3 } }: sender# [1:97:2125] 2025-09-25T16:17:48.189231Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:638: [1:97:2125] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvRequestUpdate { PathId: [OwnerId: 72057594046678944, LocalPathId: 1] }: sender# [1:100:2128] 2025-09-25T16:17:48.189240Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:291: [1:99:2127] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 2 PathId: [OwnerId: 72057594046678944, LocalPathId: 1] Version: 4 }: sender# [1:15:2062], cookie# 0 2025-09-25T16:17:48.189251Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:283: [1:100:2128] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 72057594046678944 Generation: 2 }: sender# [1:97:2125], cookie# 0 2025-09-25T16:17:48.189261Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:638: [1:97:2125] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvRequestUpdate { PathId: [OwnerId: 72057594046678944, LocalPathId: 2] }: sender# [1:100:2128] 2025-09-25T16:17:48.189267Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:291: [1:99:2127] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 2 PathId: [OwnerId: 72057594046678944, LocalPathId: 2] Version: 3 }: sender# [1:15:2062], cookie# 0 2025-09-25T16:17:48.189274Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:291: [1:100:2128] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 2 PathId: [OwnerId: 72057594046678944, LocalPathId: 1] Version: 4 }: sender# [1:18:2065], cookie# 0 2025-09-25T16:17:48.189284Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:543: [1:97:2125] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvRequestDescribe { PathId: [OwnerId: 72057594046678944, LocalPathId: 2] Replica: [1:24339059:0] }: sender# [1:98:2126] 2025-09-25T16:17:48.189292Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:275: [1:98:2126] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvDescribeResult { Commit: false DeletedPathBegin: 0 DeletedPathEnd: 0 { Path: /Root/DirC PathId: [OwnerId: 72057594046678944, LocalPathId: 2] PathVersion: 3 } }: sender# [1:97:2125] 2025-09-25T16:17:48.189298Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:283: [1:100:2128] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 72057594046678944 Generation: 2 }: sender# [1:97:2125], cookie# 0 2025-09-25T16:17:48.189312Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:638: [1:97:2125] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvRequestUpdate { PathId: [OwnerId: 72057594046678944, LocalPathId: 1] }: sender# [1:98:2126] 2025-09-25T16:17:48.189317Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:283: [1:98:2126] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 72057594046678944 Generation: 2 }: sender# [1:97:2125], cookie# 0 2025-09-25T16:17:48.189324Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:291: [1:100:2128] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 2 PathId: [OwnerId: 72057594046678944, LocalPathId: 2] Version: 3 }: sender# [1:18:2065], cookie# 0 2025-09-25T16:17:48.189332Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:638: [1:97:2125] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvRequestUpdate { PathId: [OwnerId: 72057594046678944, LocalPathId: 2] }: sender# [1:98:2126] 2025-09-25T16:17:48.189338Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:283: [1:98:2126] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 72057594046678944 Generation: 2 }: sender# [1:97:2125], cookie# 0 2025-09-25T16:17:48.189351Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:543: [1:97:2125] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvRequestDescribe { PathId: [OwnerId: 72057594046678944, LocalPathId: 3] Replica: [1:1099535966835:0] }: sender# [1:99:2127] 2025-09-25T16:17:48.189357Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:291: [1:98:2126] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 2 PathId: [OwnerId: 72057594046678944, LocalPathId: 1] Version: 4 }: sender# [1:12:2059], cookie# 0 2025-09-25T16:17:48.189364Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:275: [1:99:2127] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvDescribeResult { Commit: true DeletedPathBegin: 0 DeletedPathEnd: 0 }: sender# [1:97:2125] 2025-09-25T16:17:48.189372Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:543: [1:97:2125] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvRequestDescribe { PathId: [OwnerId: 72057594046678944, LocalPathId: 3] Replica: [1:2199047594611:0] }: sender# [1:100:2128] 2025-09-25T16:17:48.189380Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:291: [1:98:2126] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 2 PathId: [OwnerId: 72057594046678944, LocalPathId: 2] Version: 3 }: sender# [1:12:2059], cookie# 0 2025-09-25T16:17:48.189388Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:275: [1:100:2128] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvDescribeResult { Commit: true DeletedPathBegin: 0 DeletedPathEnd: 0 }: sender# [1:97:2125] 2025-09-25T16:17:48.189401Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:758: [1:97:2125] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 2 PathId: [OwnerId: 72057594046678944, LocalPathId: 1] Version: 4 }: sender# [1:99:2127], cookie# 0 2025-09-25T16:17:48.189406Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:764: [1:97:2125] Ack for unknown update (already acked?): sender# [1:99:2127], cookie# 0 2025-09-25T16:17:48.189414Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:309: [1:99:2127] Handle NKikimrSchemeBoard.TEvCommitGeneration { Owner: 72057594046678944 Generation: 2 }: sender# [1:15:2062] 2025-09-25T16:17:48.189420Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:309: [1:100:2128] Handle NKikimrSchemeBoard.TEvCommitGeneration { Owner: 72057594046678944 Generation: 2 }: sender# [1:18:2065] 2025-09-25T16:17:48.189427Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:758: [1:97:2125] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 2 PathId: [OwnerId: 72057594046678944, LocalPathId: 1] Version: 4 }: sender# [1:99:2127], cookie# 100 2025-09-25T16:17:48.189436Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:758: [1:97:2125] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 2 PathId: [OwnerId: 72057594046678944, LocalPathId: 2] Version: 3 }: sender# [1:99:2127], cookie# 0 2025-09-25T16:17:48.189440Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:764: [1:97:2125] Ack for unknown update (already acked?): sender# [1:99:2127], cookie# 0 2025-09-25T16:17:48.189445Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:758: [1:97:2125] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 2 PathId: [OwnerId: 72057594046678944, LocalPathId: 2] Version: 3 }: sender# [1:99:2127], cookie# 100 2025-09-25T16:17:48.189453Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:758: [1:97:2125] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 2 PathId: [OwnerId: 72057594046678944, LocalPathId: 1] Version: 4 }: sender# [1:100:2128], cookie# 0 2025-09-25T16:17:48.189457Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:764: [1:97:2125] Ack for unknown update (already acked?): sender# [1:100:2128], cookie# 0 2025-09-25T16:17:48.189462Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:758: [1:97:2125] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 2 PathId: [OwnerId: 72057594046678944, LocalPathId: 1] Version: 4 }: sender# [1:100:2128], cookie# 100 2025-09-25T16:17:48.189467Z node 1 :SCHEME_BOARD_POPULATOR NOTICE: populator.cpp:786: [1:97:2125] Ack update: ack to# [1:72:2112], cookie# 100, pathId# [OwnerId: 72057594046678944, LocalPathId: 1], version# 3 2025-09-25T16:17:48.189473Z node 1 :SCHEME_BOARD_POPULATOR NOTICE: populator.cpp:786: [1:97:2125] Ack update: ack to# [1:72:2112], cookie# 100, pathId# [OwnerId: 72057594046678944, LocalPathId: 1], version# 4 2025-09-25T16:17:48.189611Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:543: [1:97:2125] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvRequestDescribe { PathId: [OwnerId: 72057594046678944, LocalPathId: 3] Replica: [1:24339059:0] }: sender# [1:98:2126] 2025-09-25T16:17:48.189621Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:275: [1:98:2126] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvDescribeResult { Commit: true DeletedPathBegin: 0 DeletedPathEnd: 0 }: sender# [1:97:2125] 2025-09-25T16:17:48.189674Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:758: [1:97:2125] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 2 PathId: [OwnerId: 72057594046678944, LocalPathId: 2] Version: 3 }: sender# [1:100:2128], cookie# 0 2025-09-25T16:17:48.189679Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:764: [1:97:2125] Ack for unknown update (already acked?): sender# [1:100:2128], cookie# 0 2025-09-25T16:17:48.189685Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:309: [1:98:2126] Handle NKikimrSchemeBoard.TEvCommitGeneration { Owner: 72057594046678944 Generation: 2 }: sender# [1:12:2059] 2025-09-25T16:17:48.189693Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:758: [1:97:2125] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 2 PathId: [OwnerId: 72057594046678944, LocalPathId: 2] Version: 3 }: sender# [1:100:2128], cookie# 100 2025-09-25T16:17:48.189699Z node 1 :SCHEME_BOARD_POPULATOR NOTICE: populator.cpp:786: [1:97:2125] Ack update: ack to# [1:72:2112], cookie# 100, pathId# [OwnerId: 72057594046678944, LocalPathId: 2], version# 2 2025-09-25T16:17:48.189704Z node 1 :SCHEME_BOARD_POPULATOR NOTICE: populator.cpp:786: [1:97:2125] Ack update: ack to# [1:72:2112], cookie# 100, pathId# [OwnerId: 72057594046678944, LocalPathId: 2], version# 3 2025-09-25T16:17:48.189731Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:758: [1:97:2125] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 2 PathId: [OwnerId: 72057594046678944, LocalPathId: 1] Version: 4 }: sender# [1:98:2126], cookie# 0 2025-09-25T16:17:48.189735Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:764: [1:97:2125] Ack for unknown update (already acked?): sender# [1:98:2126], cookie# 0 2025-09-25T16:17:48.189814Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:758: [1:97:2125] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 2 PathId: [OwnerId: 72057594046678944, LocalPathId: 1] Version: 4 }: sender# [1:98:2126], cookie# 100 2025-09-25T16:17:48.189820Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:764: [1:97:2125] Ack for unknown update (already acked?): sender# [1:98:2126], cookie# 100 2025-09-25T16:17:48.189827Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:758: [1:97:2125] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 2 PathId: [OwnerId: 72057594046678944, LocalPathId: 2] Version: 3 }: sender# [1:98:2126], cookie# 0 2025-09-25T16:17:48.189831Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:764: [1:97:2125] Ack for unknown update (already acked?): sender# [1:98:2126], cookie# 0 2025-09-25T16:17:48.189885Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:758: [1:97:2125] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 2 PathId: [OwnerId: 72057594046678944, LocalPathId: 2] Version: 3 }: sender# [1:98:2126], cookie# 100 2025-09-25T16:17:48.189891Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:764: [1:97:2125] Ack for unknown update (already acked?): sender# [1:98:2126], cookie# 100 TestWaitNotification: OK eventTxId 100 >> TPopulatorQuorumTest::OneRingGroup [GOOD] >> AsyncIndexChangeCollector::UpsertWithoutIndexedValue [GOOD] >> CdcStreamChangeCollector::DeleteNothing >> TRegisterNodeOverDiscoveryService::ServerWithCertVerification_ClientDoesNotProvideAnyCerts [GOOD] >> TRegisterNodeOverDiscoveryService::ServerWithCertVerification_ClientProvidesCorruptedCert >> TPopulatorQuorumTest::OneWriteOnlyRingGroup >> CdcStreamChangeCollector::InsertSingleUuidRow [GOOD] >> CdcStreamChangeCollector::IndexAndStreamUpsert >> CdcStreamChangeCollector::UpsertIntoTwoStreams [GOOD] >> CdcStreamChangeCollector::PageFaults >> TPopulatorQuorumTest::OneWriteOnlyRingGroup [GOOD] >> DataShardWrite::AsyncIndexKeySizeConstraint ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/scheme_board/ut_populator/unittest >> TPopulatorQuorumTest::OneRingGroup [GOOD] Test command err: replicas: [1:24339059:0], [1:1099535966835:0], [1:2199047594611:0] replicaActorToServiceMap: actor: [1:6:2053], service: [1:2199047594611:0] actor: [1:4:2051], service: [1:24339059:0] actor: [1:5:2052], service: [1:1099535966835:0] ... waiting for NKikimr::TEvStateStorage::TEvListSchemeBoardResult 2025-09-25T16:17:48.568872Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:807: [1:19:2066] Handle NKikimr::TEvStateStorage::TEvListSchemeBoardResult: sender# [1:10:2057] ... waiting for NKikimr::TEvStateStorage::TEvListSchemeBoardResult (done) 2025-09-25T16:17:48.570107Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:676: [1:19:2066] Handle TEvSchemeShard::TEvDescribeSchemeResult { Status: StatusSuccess Path: "/Root/TestPath" PathId: 100 PathOwnerId: 72057594046678944 }: sender# [1:17:2064], cookie# 12345, event size# 36, preserialized size# 0 2025-09-25T16:17:48.570128Z node 1 :SCHEME_BOARD_POPULATOR NOTICE: populator.cpp:693: [1:19:2066] Update description: owner# 72057594046678944, pathId# [OwnerId: 72057594046678944, LocalPathId: 100], cookie# 12345, is deletion# false, version: 0 ... waiting for updates from replica populators 2025-09-25T16:17:48.570463Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:243: [1:20:2067] Handle NKikimrSchemeBoard.TEvHandshake { Owner: 72057594046678944 Generation: 0 }: sender# [1:4:2051] 2025-09-25T16:17:48.570471Z node 1 :SCHEME_BOARD_POPULATOR NOTICE: populator.cpp:255: [1:20:2067] Successful handshake: replica# [1:4:2051] 2025-09-25T16:17:48.570476Z node 1 :SCHEME_BOARD_POPULATOR NOTICE: populator.cpp:259: [1:20:2067] Start full sync: replica# [1:4:2051] 2025-09-25T16:17:48.570485Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:243: [1:21:2068] Handle NKikimrSchemeBoard.TEvHandshake { Owner: 72057594046678944 Generation: 0 }: sender# [1:5:2052] 2025-09-25T16:17:48.570489Z node 1 :SCHEME_BOARD_POPULATOR NOTICE: populator.cpp:255: [1:21:2068] Successful handshake: replica# [1:5:2052] 2025-09-25T16:17:48.570494Z node 1 :SCHEME_BOARD_POPULATOR NOTICE: populator.cpp:259: [1:21:2068] Start full sync: replica# [1:5:2052] 2025-09-25T16:17:48.570501Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:243: [1:22:2069] Handle NKikimrSchemeBoard.TEvHandshake { Owner: 72057594046678944 Generation: 0 }: sender# [1:6:2053] 2025-09-25T16:17:48.570505Z node 1 :SCHEME_BOARD_POPULATOR NOTICE: populator.cpp:255: [1:22:2069] Successful handshake: replica# [1:6:2053] 2025-09-25T16:17:48.570509Z node 1 :SCHEME_BOARD_POPULATOR NOTICE: populator.cpp:259: [1:22:2069] Start full sync: replica# [1:6:2053] 2025-09-25T16:17:48.570523Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:543: [1:19:2066] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvRequestDescribe { PathId: Replica: [1:24339059:0] }: sender# [1:20:2067] 2025-09-25T16:17:48.570535Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:638: [1:19:2066] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvRequestUpdate { PathId: [OwnerId: 72057594046678944, LocalPathId: 100] }: sender# [1:20:2067] 2025-09-25T16:17:48.570566Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:275: [1:20:2067] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvDescribeResult { Commit: false DeletedPathBegin: 0 DeletedPathEnd: 0 { Path: /Root/TestPath PathId: [OwnerId: 72057594046678944, LocalPathId: 100] PathVersion: 0 } }: sender# [1:19:2066] 2025-09-25T16:17:48.570598Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:283: [1:20:2067] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 72057594046678944 Generation: 1 }: sender# [1:19:2066], cookie# 0 2025-09-25T16:17:48.570617Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:543: [1:19:2066] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvRequestDescribe { PathId: Replica: [1:1099535966835:0] }: sender# [1:21:2068] 2025-09-25T16:17:48.570627Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:638: [1:19:2066] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvRequestUpdate { PathId: [OwnerId: 72057594046678944, LocalPathId: 100] }: sender# [1:21:2068] 2025-09-25T16:17:48.570639Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:291: [1:20:2067] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 1 PathId: [OwnerId: 72057594046678944, LocalPathId: 100] Version: 0 }: sender# [1:4:2051], cookie# 0 2025-09-25T16:17:48.570651Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:275: [1:21:2068] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvDescribeResult { Commit: false DeletedPathBegin: 0 DeletedPathEnd: 0 { Path: /Root/TestPath PathId: [OwnerId: 72057594046678944, LocalPathId: 100] PathVersion: 0 } }: sender# [1:19:2066] 2025-09-25T16:17:48.570663Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:543: [1:19:2066] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvRequestDescribe { PathId: Replica: [1:2199047594611:0] }: sender# [1:22:2069] 2025-09-25T16:17:48.570670Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:283: [1:21:2068] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 72057594046678944 Generation: 1 }: sender# [1:19:2066], cookie# 0 2025-09-25T16:17:48.570682Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:275: [1:22:2069] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvDescribeResult { Commit: false DeletedPathBegin: 0 DeletedPathEnd: 0 { Path: /Root/TestPath PathId: [OwnerId: 72057594046678944, LocalPathId: 100] PathVersion: 0 } }: sender# [1:19:2066] 2025-09-25T16:17:48.570693Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:291: [1:21:2068] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 1 PathId: [OwnerId: 72057594046678944, LocalPathId: 100] Version: 0 }: sender# [1:5:2052], cookie# 0 2025-09-25T16:17:48.570702Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:638: [1:19:2066] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvRequestUpdate { PathId: [OwnerId: 72057594046678944, LocalPathId: 100] }: sender# [1:22:2069] 2025-09-25T16:17:48.570709Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:283: [1:22:2069] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 72057594046678944 Generation: 1 }: sender# [1:19:2066], cookie# 0 2025-09-25T16:17:48.570720Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:543: [1:19:2066] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvRequestDescribe { PathId: [OwnerId: 72057594046678944, LocalPathId: 101] Replica: [1:24339059:0] }: sender# [1:20:2067] 2025-09-25T16:17:48.570729Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:275: [1:20:2067] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvDescribeResult { Commit: true DeletedPathBegin: 0 DeletedPathEnd: 0 }: sender# [1:19:2066] 2025-09-25T16:17:48.570740Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:309: [1:20:2067] Handle NKikimrSchemeBoard.TEvCommitGeneration { Owner: 72057594046678944 Generation: 1 }: sender# [1:4:2051] 2025-09-25T16:17:48.570748Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:291: [1:22:2069] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 1 PathId: [OwnerId: 72057594046678944, LocalPathId: 100] Version: 0 }: sender# [1:6:2053], cookie# 0 2025-09-25T16:17:48.570757Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:758: [1:19:2066] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 1 PathId: [OwnerId: 72057594046678944, LocalPathId: 100] Version: 0 }: sender# [1:20:2067], cookie# 0 2025-09-25T16:17:48.570763Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:764: [1:19:2066] Ack for unknown update (already acked?): sender# [1:20:2067], cookie# 0 ... blocking NKikimr::NSchemeBoard::NSchemeshardEvents::TEvUpdateAck from SCHEME_BOARD_REPLICA_POPULATOR_ACTOR to SCHEME_BOARD_POPULATOR_ACTOR cookie 12345 2025-09-25T16:17:48.570792Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:543: [1:19:2066] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvRequestDescribe { PathId: [OwnerId: 72057594046678944, LocalPathId: 101] Replica: [1:1099535966835:0] }: sender# [1:21:2068] 2025-09-25T16:17:48.570801Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:275: [1:21:2068] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvDescribeResult { Commit: true DeletedPathBegin: 0 DeletedPathEnd: 0 }: sender# [1:19:2066] 2025-09-25T16:17:48.570811Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:543: [1:19:2066] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvRequestDescribe { PathId: [OwnerId: 72057594046678944, LocalPathId: 101] Replica: [1:2199047594611:0] }: sender# [1:22:2069] 2025-09-25T16:17:48.570817Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:309: [1:21:2068] Handle NKikimrSchemeBoard.TEvCommitGeneration { Owner: 72057594046678944 Generation: 1 }: sender# [1:5:2052] 2025-09-25T16:17:48.570824Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:275: [1:22:2069] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvDescribeResult { Commit: true DeletedPathBegin: 0 DeletedPathEnd: 0 }: sender# [1:19:2066] 2025-09-25T16:17:48.570833Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:758: [1:19:2066] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 1 PathId: [OwnerId: 72057594046678944, LocalPathId: 100] Version: 0 }: sender# [1:21:2068], cookie# 0 2025-09-25T16:17:48.570837Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:764: [1:19:2066] Ack for unknown update (already acked?): sender# [1:21:2068], cookie# 0 2025-09-25T16:17:48.570843Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:309: [1:22:2069] Handle NKikimrSchemeBoard.TEvCommitGeneration { Owner: 72057594046678944 Generation: 1 }: sender# [1:6:2053] ... blocking NKikimr::NSchemeBoard::NSchemeshardEvents::TEvUpdateAck from SCHEME_BOARD_REPLICA_POPULATOR_ACTOR to SCHEME_BOARD_POPULATOR_ACTOR cookie 12345 2025-09-25T16:17:48.570856Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:758: [1:19:2066] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 1 PathId: [OwnerId: 72057594046678944, LocalPathId: 100] Version: 0 }: sender# [1:22:2069], cookie# 0 2025-09-25T16:17:48.570860Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:764: [1:19:2066] Ack for unknown update (already acked?): sender# [1:22:2069], cookie# 0 ... blocking NKikimr::NSchemeBoard::NSchemeshardEvents::TEvUpdateAck from SCHEME_BOARD_REPLICA_POPULATOR_ACTOR to SCHEME_BOARD_POPULATOR_ACTOR cookie 12345 ... waiting for updates from replica populators (done) populatorToReplicaMap: populator: [1:21:2068], replica: [1:1099535966835:0] populator: [1:22:2069], replica: [1:2199047594611:0] populator: [1:20:2067], replica: [1:24339059:0] 2025-09-25T16:17:48.570915Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:758: [1:19:2066] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 1 PathId: [OwnerId: 72057594046678944, LocalPathId: 100] Version: 0 }: sender# [1:20:2067], cookie# 12345 2025-09-25T16:17:48.581830Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:758: [1:19:2066] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 1 PathId: [OwnerId: 72057594046678944, LocalPathId: 100] Version: 0 }: sender# [1:21:2068], cookie# 12345 2025-09-25T16:17:48.581862Z node 1 :SCHEME_BOARD_POPULATOR NOTICE: populator.cpp:786: [1:19:2066] Ack update: ack to# [1:17:2064], cookie# 12345, pathId# [OwnerId: 72057594046678944, LocalPathId: 100], version# 0 >> KqpPg::PgUpdate-useSink [GOOD] >> KqpPg::JoinWithQueryService-StreamLookup >> AsyncIndexChangeCollector::MultiIndexedTableInsertSingleRow [GOOD] >> AsyncIndexChangeCollector::IndexedPrimaryKeyInsertSingleRow >> test.py::test[window-win_func_first_last_rev--ForceBlocks] [GOOD] >> test.py::test[window-win_func_first_last_rev--Results] >> TGRpcYdbTest::ExecuteQueryWithParametersExplicitSession [GOOD] >> TGRpcYdbTest::ExplainQuery >> AsyncIndexChangeCollector::UpsertManyRows [GOOD] >> AsyncIndexChangeCollector::MultiIndexedTableUpdateOneIndexedColumn >> KqpPg::InsertFromSelect_Serial-useSink [GOOD] >> KqpPg::InsertNoTargetColumns_ColumnOrder+useSink >> DataShardWrite::IncrementImmediate >> DataShardWrite::UpsertImmediateManyColumns >> test.py::test[key_filter-string_with_ff-default.txt-ForceBlocks] [GOOD] >> test.py::test[blocks-combine_all_avg--ForceBlocks] [GOOD] >> test.py::test[key_filter-string_with_ff-default.txt-Results] >> test.py::test[blocks-combine_all_avg--Results] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/scheme_board/ut_populator/unittest >> TPopulatorQuorumTest::OneWriteOnlyRingGroup [GOOD] Test command err: replicas: [1:24339059:0], [1:1099535966835:0], [1:2199047594611:0], [1:3298559222387:0], [1:4398070850163:0], [1:5497582477939:0] replicaActorToServiceMap: actor: [1:6:2053], service: [1:2199047594611:0] actor: [1:15:2062], service: [1:5497582477939:0] actor: [1:4:2051], service: [1:24339059:0] actor: [1:13:2060], service: [1:3298559222387:0] actor: [1:5:2052], service: [1:1099535966835:0] actor: [1:14:2061], service: [1:4398070850163:0] ... waiting for NKikimr::TEvStateStorage::TEvListSchemeBoardResult 2025-09-25T16:17:49.060665Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:807: [1:28:2075] Handle NKikimr::TEvStateStorage::TEvListSchemeBoardResult: sender# [1:19:2066] ... waiting for NKikimr::TEvStateStorage::TEvListSchemeBoardResult (done) 2025-09-25T16:17:49.061906Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:676: [1:28:2075] Handle TEvSchemeShard::TEvDescribeSchemeResult { Status: StatusSuccess Path: "/Root/TestPath" PathId: 100 PathOwnerId: 72057594046678944 }: sender# [1:26:2073], cookie# 12345, event size# 36, preserialized size# 0 2025-09-25T16:17:49.061933Z node 1 :SCHEME_BOARD_POPULATOR NOTICE: populator.cpp:693: [1:28:2075] Update description: owner# 72057594046678944, pathId# [OwnerId: 72057594046678944, LocalPathId: 100], cookie# 12345, is deletion# false, version: 0 ... waiting for updates from replica populators 2025-09-25T16:17:49.062304Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:243: [1:32:2079] Handle NKikimrSchemeBoard.TEvHandshake { Owner: 72057594046678944 Generation: 0 }: sender# [1:13:2060] 2025-09-25T16:17:49.062313Z node 1 :SCHEME_BOARD_POPULATOR NOTICE: populator.cpp:255: [1:32:2079] Successful handshake: replica# [1:13:2060] 2025-09-25T16:17:49.062318Z node 1 :SCHEME_BOARD_POPULATOR NOTICE: populator.cpp:259: [1:32:2079] Start full sync: replica# [1:13:2060] 2025-09-25T16:17:49.062329Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:243: [1:33:2080] Handle NKikimrSchemeBoard.TEvHandshake { Owner: 72057594046678944 Generation: 0 }: sender# [1:14:2061] 2025-09-25T16:17:49.062333Z node 1 :SCHEME_BOARD_POPULATOR NOTICE: populator.cpp:255: [1:33:2080] Successful handshake: replica# [1:14:2061] 2025-09-25T16:17:49.062337Z node 1 :SCHEME_BOARD_POPULATOR NOTICE: populator.cpp:259: [1:33:2080] Start full sync: replica# [1:14:2061] 2025-09-25T16:17:49.062344Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:243: [1:34:2081] Handle NKikimrSchemeBoard.TEvHandshake { Owner: 72057594046678944 Generation: 0 }: sender# [1:15:2062] 2025-09-25T16:17:49.062347Z node 1 :SCHEME_BOARD_POPULATOR NOTICE: populator.cpp:255: [1:34:2081] Successful handshake: replica# [1:15:2062] 2025-09-25T16:17:49.062351Z node 1 :SCHEME_BOARD_POPULATOR NOTICE: populator.cpp:259: [1:34:2081] Start full sync: replica# [1:15:2062] 2025-09-25T16:17:49.062367Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:543: [1:28:2075] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvRequestDescribe { PathId: Replica: [1:3298559222387:0] }: sender# [1:32:2079] 2025-09-25T16:17:49.062377Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:243: [1:29:2076] Handle NKikimrSchemeBoard.TEvHandshake { Owner: 72057594046678944 Generation: 0 }: sender# [1:4:2051] 2025-09-25T16:17:49.062381Z node 1 :SCHEME_BOARD_POPULATOR NOTICE: populator.cpp:255: [1:29:2076] Successful handshake: replica# [1:4:2051] 2025-09-25T16:17:49.062384Z node 1 :SCHEME_BOARD_POPULATOR NOTICE: populator.cpp:259: [1:29:2076] Start full sync: replica# [1:4:2051] 2025-09-25T16:17:49.062390Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:243: [1:30:2077] Handle NKikimrSchemeBoard.TEvHandshake { Owner: 72057594046678944 Generation: 0 }: sender# [1:5:2052] 2025-09-25T16:17:49.062394Z node 1 :SCHEME_BOARD_POPULATOR NOTICE: populator.cpp:255: [1:30:2077] Successful handshake: replica# [1:5:2052] 2025-09-25T16:17:49.062398Z node 1 :SCHEME_BOARD_POPULATOR NOTICE: populator.cpp:259: [1:30:2077] Start full sync: replica# [1:5:2052] 2025-09-25T16:17:49.062404Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:243: [1:31:2078] Handle NKikimrSchemeBoard.TEvHandshake { Owner: 72057594046678944 Generation: 0 }: sender# [1:6:2053] 2025-09-25T16:17:49.062410Z node 1 :SCHEME_BOARD_POPULATOR NOTICE: populator.cpp:255: [1:31:2078] Successful handshake: replica# [1:6:2053] 2025-09-25T16:17:49.062413Z node 1 :SCHEME_BOARD_POPULATOR NOTICE: populator.cpp:259: [1:31:2078] Start full sync: replica# [1:6:2053] 2025-09-25T16:17:49.062423Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:638: [1:28:2075] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvRequestUpdate { PathId: [OwnerId: 72057594046678944, LocalPathId: 100] }: sender# [1:32:2079] 2025-09-25T16:17:49.062434Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:275: [1:32:2079] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvDescribeResult { Commit: false DeletedPathBegin: 0 DeletedPathEnd: 0 { Path: /Root/TestPath PathId: [OwnerId: 72057594046678944, LocalPathId: 100] PathVersion: 0 } }: sender# [1:28:2075] 2025-09-25T16:17:49.062466Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:283: [1:32:2079] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 72057594046678944 Generation: 1 }: sender# [1:28:2075], cookie# 0 2025-09-25T16:17:49.062480Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:543: [1:28:2075] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvRequestDescribe { PathId: Replica: [1:4398070850163:0] }: sender# [1:33:2080] 2025-09-25T16:17:49.062487Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:638: [1:28:2075] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvRequestUpdate { PathId: [OwnerId: 72057594046678944, LocalPathId: 100] }: sender# [1:33:2080] 2025-09-25T16:17:49.062497Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:291: [1:32:2079] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 1 PathId: [OwnerId: 72057594046678944, LocalPathId: 100] Version: 0 }: sender# [1:13:2060], cookie# 0 2025-09-25T16:17:49.062506Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:275: [1:33:2080] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvDescribeResult { Commit: false DeletedPathBegin: 0 DeletedPathEnd: 0 { Path: /Root/TestPath PathId: [OwnerId: 72057594046678944, LocalPathId: 100] PathVersion: 0 } }: sender# [1:28:2075] 2025-09-25T16:17:49.062518Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:543: [1:28:2075] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvRequestDescribe { PathId: Replica: [1:5497582477939:0] }: sender# [1:34:2081] 2025-09-25T16:17:49.062524Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:283: [1:33:2080] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 72057594046678944 Generation: 1 }: sender# [1:28:2075], cookie# 0 2025-09-25T16:17:49.062531Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:275: [1:34:2081] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvDescribeResult { Commit: false DeletedPathBegin: 0 DeletedPathEnd: 0 { Path: /Root/TestPath PathId: [OwnerId: 72057594046678944, LocalPathId: 100] PathVersion: 0 } }: sender# [1:28:2075] 2025-09-25T16:17:49.062560Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:291: [1:33:2080] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 1 PathId: [OwnerId: 72057594046678944, LocalPathId: 100] Version: 0 }: sender# [1:14:2061], cookie# 0 2025-09-25T16:17:49.062568Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:638: [1:28:2075] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvRequestUpdate { PathId: [OwnerId: 72057594046678944, LocalPathId: 100] }: sender# [1:34:2081] 2025-09-25T16:17:49.062574Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:283: [1:34:2081] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 72057594046678944 Generation: 1 }: sender# [1:28:2075], cookie# 0 2025-09-25T16:17:49.062583Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:543: [1:28:2075] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvRequestDescribe { PathId: Replica: [1:24339059:0] }: sender# [1:29:2076] 2025-09-25T16:17:49.062591Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:275: [1:29:2076] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvDescribeResult { Commit: false DeletedPathBegin: 0 DeletedPathEnd: 0 { Path: /Root/TestPath PathId: [OwnerId: 72057594046678944, LocalPathId: 100] PathVersion: 0 } }: sender# [1:28:2075] 2025-09-25T16:17:49.062601Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:291: [1:34:2081] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 1 PathId: [OwnerId: 72057594046678944, LocalPathId: 100] Version: 0 }: sender# [1:15:2062], cookie# 0 2025-09-25T16:17:49.062609Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:638: [1:28:2075] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvRequestUpdate { PathId: [OwnerId: 72057594046678944, LocalPathId: 100] }: sender# [1:29:2076] 2025-09-25T16:17:49.062615Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:283: [1:29:2076] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 72057594046678944 Generation: 1 }: sender# [1:28:2075], cookie# 0 2025-09-25T16:17:49.062624Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:543: [1:28:2075] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvRequestDescribe { PathId: Replica: [1:1099535966835:0] }: sender# [1:30:2077] 2025-09-25T16:17:49.062631Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:291: [1:29:2076] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 1 PathId: [OwnerId: 72057594046678944, LocalPathId: 100] Version: 0 }: sender# [1:4:2051], cookie# 0 2025-09-25T16:17:49.062639Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:275: [1:30:2077] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvDescribeResult { Commit: false DeletedPathBegin: 0 DeletedPathEnd: 0 { Path: /Root/TestPath PathId: [OwnerId: 72057594046678944, LocalPathId: 100] PathVersion: 0 } }: sender# [1:28:2075] 2025-09-25T16:17:49.062650Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:638: [1:28:2075] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvRequestUpdate { PathId: [OwnerId: 72057594046678944, LocalPathId: 100] }: sender# [1:30:2077] 2025-09-25T16:17:49.062655Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:283: [1:30:2077] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 72057594046678944 Generation: 1 }: sender# [1:28:2075], cookie# 0 2025-09-25T16:17:49.062664Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:543: [1:28:2075] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvRequestDescribe { PathId: Replica: [1:2199047594611:0] }: sender# [1:31:2078] 2025-09-25T16:17:49.062671Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:291: [1:30:2077] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 1 PathId: [OwnerId: 72057594046678944, LocalPathId: 100] Version: 0 }: sender# [1:5:2052], cookie# 0 2025-09-25T16:17:49.062679Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:275: [1:31:2078] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvDescribeResult { Commit: false DeletedPathBegin: 0 DeletedPathEnd: 0 { Path: /Root/TestPath PathId: [OwnerId: 72057594046678944, LocalPathId: 100] PathVersion: 0 } }: sender# [1:28:2075] 2025-09-25T16:17:49.062688Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:638: [1:28:2075] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvRequestUpdate { PathId: [OwnerId: 72057594046678944, LocalPathId: 100] }: sender# [1:31:2078] 2025-09-25T16:17:49.062694Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:283: [1:31:2078] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 72057594046678944 Generation: 1 }: sender# [1:28:2075], cookie# 0 2025-09-25T16:17:49.062702Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:291: [1:31:2078] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 1 PathId: [OwnerId: 72057594046678944, LocalPathId: 100] Version: 0 }: sender# [1:6:2053], cookie# 0 2025-09-25T16:17:49.062726Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:543: [1:28:2075] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvRequestDescribe { PathId: [OwnerId: 72057594046678944, LocalPathId: 101] Replica: [1:3298559222387:0] }: sender# [1:32:2079] 2025-09-25T16:17:49.062735Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:275: [1:32:2079] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvDescribeResult { Commit: true DeletedPathBegin: 0 DeletedPathEnd: 0 }: sender# [1:28:2075] 2025-09-25T16:17:49.062746Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:758: [1:28:2075] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 1 PathId: [OwnerId: 72057594046678944, LocalPathId: 100] Version: 0 }: sender# [1:32:2079], cookie# 0 2025-09-25T16:17:49.062751Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:764: [1:28:2075] Ack for unknown update (already acked?): sender# [1:32:2079], cookie# 0 2025-09-25T16:17:49.062758Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:309: [1:32:2079] Handle NKikimrSchemeBoard.TEvCommitGeneration { Owner: 72057594046678944 Generation: 1 }: sender# [1:13:2060] ... blocking NKikimr::NSchemeBoard::NSchemeshardEvents::TEvUpdateAck from SCHEME_BOARD_REPLICA_POPULATOR_ACTOR to SCHEME_BOARD_POPULATOR_ACTOR cookie 12345 2025-09-25T16:17:49.062788Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:543: [1:28:2075] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvRequestDescribe { PathId: [OwnerId: 72057594046678944, LocalPathId: 101] Replica: [1:4398070850163:0] }: sender# [1:33:2080] 2025-09-25T16:17:49.062794Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:275: [1:33:2080] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvDescribeResult { Commit: true DeletedPathBegin: 0 DeletedPathEnd: 0 }: sender# [1:28:2075] 2025-09-25T16:17:49.062802Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:543: [1:28:2075] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvRequestDescribe { PathId: [OwnerId: 72057594046678944, LocalPathId: 101] Replica: [1:5497582477939:0] }: sender# [1:34:2081] 2025-09-25T16:17:49.062807Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:309: [1:33:2080] Handle NKikimrSchemeBoard.TEvCommitGeneration { Owner: 72057594046678944 Generation: 1 }: sender# [1:14:2061] 2025-09-25T16:17:49.062812Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:275: [1:34:2081] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvDescribeResult { Commit: true DeletedPathBegin: 0 DeletedPathEnd: 0 }: sender# [1:28:2075] 2025-09-25T16:17:49.062820Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:758: [1:28:2075] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 1 PathId: [OwnerId: 72057594046678944, LocalPathId: 100] Version: 0 }: sender# [1:33:2080], cookie# 0 2025-09-25T16:17:49.062823Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:764: [1:28:2075] Ack for unknown update (already acked?): sender# [1:33:2080], cookie# 0 2025-09-25T16:17:49.062828Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:309: [1:34:2081] Handle NKikimrSchemeBoard.TEvCommitGeneration { Owner: 72057594046678944 Generation: 1 }: sender# [1:15:2062] ... blocking NKikimr::NSchemeBoard::NSchemeshardEvents::TEvUpdateAck from SCHEME_BOARD_REPLICA_POPULATOR_ACTOR to SCHEME_BOARD_POPULATOR_ACTOR cookie 12345 2025-09-25T16:17:49.062840Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:543: [1:28:2075] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvRequestDescribe { PathId: [OwnerId: 72057594046678944, LocalPathId: 101] Replica: [1:24339059:0] }: sender# [1:29:2076] 2025-09-25T16:17:49.062846Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:275: [1:29:2076] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvDescribeResult { Commit: true DeletedPathBegin: 0 DeletedPathEnd: 0 }: sender# [1:28:2075] 2025-09-25T16:17:49.062854Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:758: [1:28:2075] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 1 PathId: [OwnerId: 72057594046678944, LocalPathId: 100] Version: 0 }: sender# [1:34:2081], cookie# 0 2025-09-25T16:17:49.062859Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:764: [1:28:2075] Ack for unknown update (already acked?): sender# [1:34:2081], cookie# 0 2025-09-25T16:17:49.062863Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:309: [1:29:2076] Handle NKikimrSchemeBoard.TEvCommitGeneration { Owner: 72057594046678944 Generation: 1 }: sender# [1:4:2051] ... blocking NKikimr::NSchemeBoard::NSchemeshardEvents::TEvUpdateAck from SCHEME_BOARD_REPLICA_POPULATOR_ACTOR to SCHEME_BOARD_POPULATOR_ACTOR cookie 12345 2025-09-25T16:17:49.062875Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:758: [1:28:2075] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 1 PathId: [OwnerId: 72057594046678944, LocalPathId: 100] Version: 0 }: sender# [1:29:2076], cookie# 0 2025-09-25T16:17:49.062879Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:764: [1:28:2075] Ack for unknown update (already acked?): sender# [1:29:2076], cookie# 0 ... blocking NKikimr::NSchemeBoard::NSchemeshardEvents::TEvUpdateAck from SCHEME_BOARD_REPLICA_POPULATOR_ACTOR to SCHEME_BOARD_POPULATOR_ACTOR cookie 12345 2025-09-25T16:17:49.062890Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:543: [1:28:2075] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvRequestDescribe { PathId: [OwnerId: 72057594046678944, LocalPathId: 101] Replica: [1:1099535966835:0] }: sender# [1:30:2077] 2025-09-25T16:17:49.062896Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:275: [1:30:2077] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvDescribeResult { Commit: true DeletedPathBegin: 0 DeletedPathEnd: 0 }: sender# [1:28:2075] 2025-09-25T16:17:49.062903Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:758: [1:28:2075] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 1 PathId: [OwnerId: 72057594046678944, LocalPathId: 100] Version: 0 }: sender# [1:30:2077], cookie# 0 2025-09-25T16:17:49.062906Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:764: [1:28:2075] Ack for unknown update (already acked?): sender# [1:30:2077], cookie# 0 2025-09-25T16:17:49.062911Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:309: [1:30:2077] Handle NKikimrSchemeBoard.TEvCommitGeneration { Owner: 72057594046678944 Generation: 1 }: sender# [1:5:2052] ... blocking NKikimr::NSchemeBoard::NSchemeshardEvents::TEvUpdateAck from SCHEME_BOARD_REPLICA_POPULATOR_ACTOR to SCHEME_BOARD_POPULATOR_ACTOR cookie 12345 2025-09-25T16:17:49.062923Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:543: [1:28:2075] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvRequestDescribe { PathId: [OwnerId: 72057594046678944, LocalPathId: 101] Replica: [1:2199047594611:0] }: sender# [1:31:2078] 2025-09-25T16:17:49.062929Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:275: [1:31:2078] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvDescribeResult { Commit: true DeletedPathBegin: 0 DeletedPathEnd: 0 }: sender# [1:28:2075] 2025-09-25T16:17:49.062936Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:309: [1:31:2078] Handle NKikimrSchemeBoard.TEvCommitGeneration { Owner: 72057594046678944 Generation: 1 }: sender# [1:6:2053] 2025-09-25T16:17:49.062941Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:758: [1:28:2075] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 1 PathId: [OwnerId: 72057594046678944, LocalPathId: 100] Version: 0 }: sender# [1:31:2078], cookie# 0 2025-09-25T16:17:49.062945Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:764: [1:28:2075] Ack for unknown update (already acked?): sender# [1:31:2078], cookie# 0 ... blocking NKikimr::NSchemeBoard::NSchemeshardEvents::TEvUpdateAck from SCHEME_BOARD_REPLICA_POPULATOR_ACTOR to SCHEME_BOARD_POPULATOR_ACTOR cookie 12345 ... waiting for updates from replica populators (done) populatorToReplicaMap: populator: [1:29:2076], replica: [1:24339059:0] populator: [1:33:2080], replica: [1:4398070850163:0] populator: [1:30:2077], replica: [1:1099535966835:0] populator: [1:34:2081], replica: [1:5497582477939:0] populator: [1:31:2078], replica: [1:2199047594611:0] populator: [1:32:2079], replica: [1:3298559222387:0] 2025-09-25T16:17:49.063020Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:758: [1:28:2075] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 1 PathId: [OwnerId: 72057594046678944, LocalPathId: 100] Version: 0 }: sender# [1:29:2076], cookie# 12345 2025-09-25T16:17:49.073203Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:758: [1:28:2075] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 1 PathId: [OwnerId: 72057594046678944, LocalPathId: 100] Version: 0 }: sender# [1:30:2077], cookie# 12345 2025-09-25T16:17:49.073242Z node 1 :SCHEME_BOARD_POPULATOR NOTICE: populator.cpp:786: [1:28:2075] Ack update: ack to# [1:26:2073], cookie# 12345, pathId# [OwnerId: 72057594046678944, LocalPathId: 100], version# 0 >> DataShardWrite::ExecSQLUpsertImmediate+EvWrite >> test.py::test[join-anyjoin_merge_nodup--Results] [GOOD] >> DataShardWrite::UpsertWithDefaults >> CdcStreamChangeCollector::UpsertToSameKeyWithImages [GOOD] >> CdcStreamChangeCollector::UpsertModifyDelete >> test.py::test[join-bush_in_in_in--ForceBlocks] >> TPopulatorQuorumTest::TwoRingGroups >> CdcStreamChangeCollector::DeleteNothing [GOOD] >> CdcStreamChangeCollector::DeleteSingleRow >> test.py::test[join-nested_semi_join-off-ForceBlocks] [GOOD] >> TPopulatorQuorumTest::TwoRingGroups [GOOD] >> test.py::test[join-join_without_column--ForceBlocks] [GOOD] >> test.py::test[join-join_without_column--Results] >> test.py::test[pg-tpch-q10-default.txt-Results] [GOOD] >> test.py::test[pg-tpch-q14-default.txt-ForceBlocks] >> CdcStreamChangeCollector::IndexAndStreamUpsert [GOOD] >> CdcStreamChangeCollector::NewImage |81.1%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/scheme_board/ut_populator/unittest >> KqpPg::JoinWithQueryService-StreamLookup [GOOD] >> KqpPg::PgAggregate+useSink >> KqpPg::InsertNoTargetColumns_ColumnOrder+useSink [GOOD] >> KqpPg::InsertNoTargetColumns_ColumnOrder-useSink |81.1%| [TM] {default-linux-x86_64, pic, relwithdebinfo} ydb/library/yql/tests/sql/dq_file/part6/pytest >> test.py::test[insert-trivial_select-default.txt-Results] [GOOD] >> DataShardWrite::AsyncIndexKeySizeConstraint [GOOD] >> DataShardWrite::DeleteImmediate >> AsyncIndexChangeCollector::IndexedPrimaryKeyInsertSingleRow [GOOD] >> test.py::test[aggr_factory-count_if-default.txt-Results] [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/scheme_board/ut_populator/unittest >> TPopulatorQuorumTest::TwoRingGroups [GOOD] Test command err: replicas: [1:24339059:0], [1:1099535966835:0], [1:2199047594611:0], [1:3298559222387:0], [1:4398070850163:0], [1:5497582477939:0] replicaActorToServiceMap: actor: [1:6:2053], service: [1:2199047594611:0] actor: [1:15:2062], service: [1:5497582477939:0] actor: [1:4:2051], service: [1:24339059:0] actor: [1:13:2060], service: [1:3298559222387:0] actor: [1:5:2052], service: [1:1099535966835:0] actor: [1:14:2061], service: [1:4398070850163:0] ... waiting for NKikimr::TEvStateStorage::TEvListSchemeBoardResult 2025-09-25T16:17:50.440352Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:807: [1:28:2075] Handle NKikimr::TEvStateStorage::TEvListSchemeBoardResult: sender# [1:19:2066] ... waiting for NKikimr::TEvStateStorage::TEvListSchemeBoardResult (done) 2025-09-25T16:17:50.441505Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:676: [1:28:2075] Handle TEvSchemeShard::TEvDescribeSchemeResult { Status: StatusSuccess Path: "/Root/TestPath" PathId: 100 PathOwnerId: 72057594046678944 }: sender# [1:26:2073], cookie# 12345, event size# 36, preserialized size# 0 2025-09-25T16:17:50.441530Z node 1 :SCHEME_BOARD_POPULATOR NOTICE: populator.cpp:693: [1:28:2075] Update description: owner# 72057594046678944, pathId# [OwnerId: 72057594046678944, LocalPathId: 100], cookie# 12345, is deletion# false, version: 0 ... waiting for updates from replica populators 2025-09-25T16:17:50.441872Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:243: [1:32:2079] Handle NKikimrSchemeBoard.TEvHandshake { Owner: 72057594046678944 Generation: 0 }: sender# [1:13:2060] 2025-09-25T16:17:50.441879Z node 1 :SCHEME_BOARD_POPULATOR NOTICE: populator.cpp:255: [1:32:2079] Successful handshake: replica# [1:13:2060] 2025-09-25T16:17:50.441884Z node 1 :SCHEME_BOARD_POPULATOR NOTICE: populator.cpp:259: [1:32:2079] Start full sync: replica# [1:13:2060] 2025-09-25T16:17:50.441894Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:243: [1:33:2080] Handle NKikimrSchemeBoard.TEvHandshake { Owner: 72057594046678944 Generation: 0 }: sender# [1:14:2061] 2025-09-25T16:17:50.441898Z node 1 :SCHEME_BOARD_POPULATOR NOTICE: populator.cpp:255: [1:33:2080] Successful handshake: replica# [1:14:2061] 2025-09-25T16:17:50.441901Z node 1 :SCHEME_BOARD_POPULATOR NOTICE: populator.cpp:259: [1:33:2080] Start full sync: replica# [1:14:2061] 2025-09-25T16:17:50.441908Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:243: [1:34:2081] Handle NKikimrSchemeBoard.TEvHandshake { Owner: 72057594046678944 Generation: 0 }: sender# [1:15:2062] 2025-09-25T16:17:50.441912Z node 1 :SCHEME_BOARD_POPULATOR NOTICE: populator.cpp:255: [1:34:2081] Successful handshake: replica# [1:15:2062] 2025-09-25T16:17:50.441915Z node 1 :SCHEME_BOARD_POPULATOR NOTICE: populator.cpp:259: [1:34:2081] Start full sync: replica# [1:15:2062] 2025-09-25T16:17:50.441931Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:543: [1:28:2075] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvRequestDescribe { PathId: Replica: [1:3298559222387:0] }: sender# [1:32:2079] 2025-09-25T16:17:50.441940Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:243: [1:29:2076] Handle NKikimrSchemeBoard.TEvHandshake { Owner: 72057594046678944 Generation: 0 }: sender# [1:4:2051] 2025-09-25T16:17:50.441943Z node 1 :SCHEME_BOARD_POPULATOR NOTICE: populator.cpp:255: [1:29:2076] Successful handshake: replica# [1:4:2051] 2025-09-25T16:17:50.441947Z node 1 :SCHEME_BOARD_POPULATOR NOTICE: populator.cpp:259: [1:29:2076] Start full sync: replica# [1:4:2051] 2025-09-25T16:17:50.441953Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:243: [1:30:2077] Handle NKikimrSchemeBoard.TEvHandshake { Owner: 72057594046678944 Generation: 0 }: sender# [1:5:2052] 2025-09-25T16:17:50.441956Z node 1 :SCHEME_BOARD_POPULATOR NOTICE: populator.cpp:255: [1:30:2077] Successful handshake: replica# [1:5:2052] 2025-09-25T16:17:50.441960Z node 1 :SCHEME_BOARD_POPULATOR NOTICE: populator.cpp:259: [1:30:2077] Start full sync: replica# [1:5:2052] 2025-09-25T16:17:50.441965Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:243: [1:31:2078] Handle NKikimrSchemeBoard.TEvHandshake { Owner: 72057594046678944 Generation: 0 }: sender# [1:6:2053] 2025-09-25T16:17:50.441969Z node 1 :SCHEME_BOARD_POPULATOR NOTICE: populator.cpp:255: [1:31:2078] Successful handshake: replica# [1:6:2053] 2025-09-25T16:17:50.441972Z node 1 :SCHEME_BOARD_POPULATOR NOTICE: populator.cpp:259: [1:31:2078] Start full sync: replica# [1:6:2053] 2025-09-25T16:17:50.441981Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:638: [1:28:2075] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvRequestUpdate { PathId: [OwnerId: 72057594046678944, LocalPathId: 100] }: sender# [1:32:2079] 2025-09-25T16:17:50.441992Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:275: [1:32:2079] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvDescribeResult { Commit: false DeletedPathBegin: 0 DeletedPathEnd: 0 { Path: /Root/TestPath PathId: [OwnerId: 72057594046678944, LocalPathId: 100] PathVersion: 0 } }: sender# [1:28:2075] 2025-09-25T16:17:50.442029Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:283: [1:32:2079] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 72057594046678944 Generation: 1 }: sender# [1:28:2075], cookie# 0 2025-09-25T16:17:50.442043Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:543: [1:28:2075] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvRequestDescribe { PathId: Replica: [1:4398070850163:0] }: sender# [1:33:2080] 2025-09-25T16:17:50.442050Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:638: [1:28:2075] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvRequestUpdate { PathId: [OwnerId: 72057594046678944, LocalPathId: 100] }: sender# [1:33:2080] 2025-09-25T16:17:50.442059Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:291: [1:32:2079] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 1 PathId: [OwnerId: 72057594046678944, LocalPathId: 100] Version: 0 }: sender# [1:13:2060], cookie# 0 2025-09-25T16:17:50.442068Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:275: [1:33:2080] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvDescribeResult { Commit: false DeletedPathBegin: 0 DeletedPathEnd: 0 { Path: /Root/TestPath PathId: [OwnerId: 72057594046678944, LocalPathId: 100] PathVersion: 0 } }: sender# [1:28:2075] 2025-09-25T16:17:50.442077Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:543: [1:28:2075] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvRequestDescribe { PathId: Replica: [1:5497582477939:0] }: sender# [1:34:2081] 2025-09-25T16:17:50.442083Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:283: [1:33:2080] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 72057594046678944 Generation: 1 }: sender# [1:28:2075], cookie# 0 2025-09-25T16:17:50.442090Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:275: [1:34:2081] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvDescribeResult { Commit: false DeletedPathBegin: 0 DeletedPathEnd: 0 { Path: /Root/TestPath PathId: [OwnerId: 72057594046678944, LocalPathId: 100] PathVersion: 0 } }: sender# [1:28:2075] 2025-09-25T16:17:50.442101Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:291: [1:33:2080] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 1 PathId: [OwnerId: 72057594046678944, LocalPathId: 100] Version: 0 }: sender# [1:14:2061], cookie# 0 2025-09-25T16:17:50.442108Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:638: [1:28:2075] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvRequestUpdate { PathId: [OwnerId: 72057594046678944, LocalPathId: 100] }: sender# [1:34:2081] 2025-09-25T16:17:50.442114Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:283: [1:34:2081] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 72057594046678944 Generation: 1 }: sender# [1:28:2075], cookie# 0 2025-09-25T16:17:50.442122Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:543: [1:28:2075] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvRequestDescribe { PathId: Replica: [1:24339059:0] }: sender# [1:29:2076] 2025-09-25T16:17:50.442129Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:275: [1:29:2076] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvDescribeResult { Commit: false DeletedPathBegin: 0 DeletedPathEnd: 0 { Path: /Root/TestPath PathId: [OwnerId: 72057594046678944, LocalPathId: 100] PathVersion: 0 } }: sender# [1:28:2075] 2025-09-25T16:17:50.442138Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:291: [1:34:2081] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 1 PathId: [OwnerId: 72057594046678944, LocalPathId: 100] Version: 0 }: sender# [1:15:2062], cookie# 0 2025-09-25T16:17:50.442144Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:638: [1:28:2075] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvRequestUpdate { PathId: [OwnerId: 72057594046678944, LocalPathId: 100] }: sender# [1:29:2076] 2025-09-25T16:17:50.442152Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:283: [1:29:2076] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 72057594046678944 Generation: 1 }: sender# [1:28:2075], cookie# 0 2025-09-25T16:17:50.442160Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:543: [1:28:2075] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvRequestDescribe { PathId: Replica: [1:1099535966835:0] }: sender# [1:30:2077] 2025-09-25T16:17:50.442167Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:291: [1:29:2076] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 1 PathId: [OwnerId: 72057594046678944, LocalPathId: 100] Version: 0 }: sender# [1:4:2051], cookie# 0 2025-09-25T16:17:50.442174Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:275: [1:30:2077] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvDescribeResult { Commit: false DeletedPathBegin: 0 DeletedPathEnd: 0 { Path: /Root/TestPath PathId: [OwnerId: 72057594046678944, LocalPathId: 100] PathVersion: 0 } }: sender# [1:28:2075] 2025-09-25T16:17:50.442183Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:638: [1:28:2075] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvRequestUpdate { PathId: [OwnerId: 72057594046678944, LocalPathId: 100] }: sender# [1:30:2077] 2025-09-25T16:17:50.442192Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:283: [1:30:2077] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 72057594046678944 Generation: 1 }: sender# [1:28:2075], cookie# 0 2025-09-25T16:17:50.442204Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:543: [1:28:2075] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvRequestDescribe { PathId: Replica: [1:2199047594611:0] }: sender# [1:31:2078] 2025-09-25T16:17:50.442213Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:291: [1:30:2077] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 1 PathId: [OwnerId: 72057594046678944, LocalPathId: 100] Version: 0 }: sender# [1:5:2052], cookie# 0 2025-09-25T16:17:50.442228Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:275: [1:31:2078] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvDescribeResult { Commit: false DeletedPathBegin: 0 DeletedPathEnd: 0 { Path: /Root/TestPath PathId: [OwnerId: 72057594046678944, LocalPathId: 100] PathVersion: 0 } }: sender# [1:28:2075] 2025-09-25T16:17:50.442238Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:638: [1:28:2075] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvRequestUpdate { PathId: [OwnerId: 72057594046678944, LocalPathId: 100] }: sender# [1:31:2078] 2025-09-25T16:17:50.442243Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:283: [1:31:2078] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 72057594046678944 Generation: 1 }: sender# [1:28:2075], cookie# 0 2025-09-25T16:17:50.442252Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:291: [1:31:2078] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 1 PathId: [OwnerId: 72057594046678944, LocalPathId: 100] Version: 0 }: sender# [1:6:2053], cookie# 0 2025-09-25T16:17:50.442278Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:543: [1:28:2075] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvRequestDescribe { PathId: [OwnerId: 72057594046678944, LocalPathId: 101] Replica: [1:3298559222387:0] }: sender# [1:32:2079] 2025-09-25T16:17:50.442285Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:275: [1:32:2079] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvDescribeResult { Commit: true DeletedPathBegin: 0 DeletedPathEnd: 0 }: sender# [1:28:2075] 2025-09-25T16:17:50.442295Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:758: [1:28:2075] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 1 PathId: [OwnerId: 72057594046678944, LocalPathId: 100] Version: 0 }: sender# [1:32:2079], cookie# 0 2025-09-25T16:17:50.442301Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:764: [1:28:2075] Ack for unknown update (already acked?): sender# [1:32:2079], cookie# 0 2025-09-25T16:17:50.442308Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:309: [1:32:2079] Handle NKikimrSchemeBoard.TEvCommitGeneration { Owner: 72057594046678944 Generation: 1 }: sender# [1:13:2060] ... blocking NKikimr::NSchemeBoard::NSchemeshardEvents::TEvUpdateAck from SCHEME_BOARD_REPLICA_POPULATOR_ACTOR to SCHEME_BOARD_POPULATOR_ACTOR cookie 12345 2025-09-25T16:17:50.442336Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:543: [1:28:2075] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvRequestDescribe { PathId: [OwnerId: 72057594046678944, LocalPathId: 101] Replica: [1:4398070850163:0] }: sender# [1:33:2080] 2025-09-25T16:17:50.442344Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:275: [1:33:2080] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvDescribeResult { Commit: true DeletedPathBegin: 0 DeletedPathEnd: 0 }: sender# [1:28:2075] 2025-09-25T16:17:50.442352Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:543: [1:28:2075] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvRequestDescribe { PathId: [OwnerId: 72057594046678944, LocalPathId: 101] Replica: [1:5497582477939:0] }: sender# [1:34:2081] 2025-09-25T16:17:50.442358Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:309: [1:33:2080] Handle NKikimrSchemeBoard.TEvCommitGeneration { Owner: 72057594046678944 Generation: 1 }: sender# [1:14:2061] 2025-09-25T16:17:50.442364Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:275: [1:34:2081] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvDescribeResult { Commit: true DeletedPathBegin: 0 DeletedPathEnd: 0 }: sender# [1:28:2075] 2025-09-25T16:17:50.442373Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:758: [1:28:2075] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 1 PathId: [OwnerId: 72057594046678944, LocalPathId: 100] Version: 0 }: sender# [1:33:2080], cookie# 0 2025-09-25T16:17:50.442377Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:764: [1:28:2075] Ack for unknown update (already acked?): sender# [1:33:2080], cookie# 0 2025-09-25T16:17:50.442382Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:309: [1:34:2081] Handle NKikimrSchemeBoard.TEvCommitGeneration { Owner: 72057594046678944 Generation: 1 }: sender# [1:15:2062] ... blocking NKikimr::NSchemeBoard::NSchemeshardEvents::TEvUpdateAck from SCHEME_BOARD_REPLICA_POPULATOR_ACTOR to SCHEME_BOARD_POPULATOR_ACTOR cookie 12345 2025-09-25T16:17:50.442394Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:543: [1:28:2075] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvRequestDescribe { PathId: [OwnerId: 72057594046678944, LocalPathId: 101] Replica: [1:24339059:0] }: sender# [1:29:2076] 2025-09-25T16:17:50.442400Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:275: [1:29:2076] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvDescribeResult { Commit: true DeletedPathBegin: 0 DeletedPathEnd: 0 }: sender# [1:28:2075] 2025-09-25T16:17:50.442408Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:758: [1:28:2075] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 1 PathId: [OwnerId: 72057594046678944, LocalPathId: 100] Version: 0 }: sender# [1:34:2081], cookie# 0 2025-09-25T16:17:50.442411Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:764: [1:28:2075] Ack for unknown update (already acked?): sender# [1:34:2081], cookie# 0 2025-09-25T16:17:50.442416Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:309: [1:29:2076] Handle NKikimrSchemeBoard.TEvCommitGeneration { Owner: 72057594046678944 Generation: 1 }: sender# [1:4:2051] ... blocking NKikimr::NSchemeBoard::NSchemeshardEvents::TEvUpdateAck from SCHEME_BOARD_REPLICA_POPULATOR_ACTOR to SCHEME_BOARD_POPULATOR_ACTOR cookie 12345 2025-09-25T16:17:50.442429Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:758: [1:28:2075] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 1 PathId: [OwnerId: 72057594046678944, LocalPathId: 100] Version: 0 }: sender# [1:29:2076], cookie# 0 2025-09-25T16:17:50.442432Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:764: [1:28:2075] Ack for unknown update (already acked?): sender# [1:29:2076], cookie# 0 ... blocking NKikimr::NSchemeBoard::NSchemeshardEvents::TEvUpdateAck from SCHEME_BOARD_REPLICA_POPULATOR_ACTOR to SCHEME_BOARD_POPULATOR_ACTOR cookie 12345 2025-09-25T16:17:50.442444Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:543: [1:28:2075] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvRequestDescribe { PathId: [OwnerId: 72057594046678944, LocalPathId: 101] Replica: [1:1099535966835:0] }: sender# [1:30:2077] 2025-09-25T16:17:50.442450Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:275: [1:30:2077] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvDescribeResult { Commit: true DeletedPathBegin: 0 DeletedPathEnd: 0 }: sender# [1:28:2075] 2025-09-25T16:17:50.442457Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:758: [1:28:2075] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 1 PathId: [OwnerId: 72057594046678944, LocalPathId: 100] Version: 0 }: sender# [1:30:2077], cookie# 0 2025-09-25T16:17:50.442461Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:764: [1:28:2075] Ack for unknown update (already acked?): sender# [1:30:2077], cookie# 0 2025-09-25T16:17:50.442465Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:309: [1:30:2077] Handle NKikimrSchemeBoard.TEvCommitGeneration { Owner: 72057594046678944 Generation: 1 }: sender# [1:5:2052] ... blocking NKikimr::NSchemeBoard::NSchemeshardEvents::TEvUpdateAck from SCHEME_BOARD_REPLICA_POPULATOR_ACTOR to SCHEME_BOARD_POPULATOR_ACTOR cookie 12345 2025-09-25T16:17:50.442478Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:543: [1:28:2075] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvRequestDescribe { PathId: [OwnerId: 72057594046678944, LocalPathId: 101] Replica: [1:2199047594611:0] }: sender# [1:31:2078] 2025-09-25T16:17:50.442484Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:275: [1:31:2078] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvDescribeResult { Commit: true DeletedPathBegin: 0 DeletedPathEnd: 0 }: sender# [1:28:2075] 2025-09-25T16:17:50.442490Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:309: [1:31:2078] Handle NKikimrSchemeBoard.TEvCommitGeneration { Owner: 72057594046678944 Generation: 1 }: sender# [1:6:2053] 2025-09-25T16:17:50.442496Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:758: [1:28:2075] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 1 PathId: [OwnerId: 72057594046678944, LocalPathId: 100] Version: 0 }: sender# [1:31:2078], cookie# 0 2025-09-25T16:17:50.442499Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:764: [1:28:2075] Ack for unknown update (already acked?): sender# [1:31:2078], cookie# 0 ... blocking NKikimr::NSchemeBoard::NSchemeshardEvents::TEvUpdateAck from SCHEME_BOARD_REPLICA_POPULATOR_ACTOR to SCHEME_BOARD_POPULATOR_ACTOR cookie 12345 ... waiting for updates from replica populators (done) populatorToReplicaMap: populator: [1:29:2076], replica: [1:24339059:0] populator: [1:33:2080], replica: [1:4398070850163:0] populator: [1:30:2077], replica: [1:1099535966835:0] populator: [1:34:2081], replica: [1:5497582477939:0] populator: [1:31:2078], replica: [1:2199047594611:0] populator: [1:32:2079], replica: [1:3298559222387:0] 2025-09-25T16:17:50.442592Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:758: [1:28:2075] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 1 PathId: [OwnerId: 72057594046678944, LocalPathId: 100] Version: 0 }: sender# [1:32:2079], cookie# 12345 2025-09-25T16:17:50.442599Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:758: [1:28:2075] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 1 PathId: [OwnerId: 72057594046678944, LocalPathId: 100] Version: 0 }: sender# [1:33:2080], cookie# 12345 2025-09-25T16:17:50.442606Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:758: [1:28:2075] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 1 PathId: [OwnerId: 72057594046678944, LocalPathId: 100] Version: 0 }: sender# [1:29:2076], cookie# 12345 2025-09-25T16:17:50.452794Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:758: [1:28:2075] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 1 PathId: [OwnerId: 72057594046678944, LocalPathId: 100] Version: 0 }: sender# [1:30:2077], cookie# 12345 2025-09-25T16:17:50.452855Z node 1 :SCHEME_BOARD_POPULATOR NOTICE: populator.cpp:786: [1:28:2075] Ack update: ack to# [1:26:2073], cookie# 12345, pathId# [OwnerId: 72057594046678944, LocalPathId: 100], version# 0 |81.1%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/scheme_board/ut_populator/unittest >> TCmsTest::DisabledEvictVDisks [GOOD] >> TCmsTest::EmergencyDuringRollingRestart >> test.py::test[aggr_factory-linear_histogram-default.txt-Results] >> TGRpcYdbTest::ExplainQuery [GOOD] >> DataShardWrite::IncrementImmediate [GOOD] >> DataShardWrite::UpsertImmediate >> DataShardWrite::UpsertImmediateManyColumns [GOOD] >> DataShardWrite::UpsertPrepared+Volatile >> AsyncIndexChangeCollector::MultiIndexedTableUpdateOneIndexedColumn [GOOD] >> AsyncIndexChangeCollector::MultiIndexedTableReplaceSingleRow >> TExternalTableTestReboots::CreateDroppedExternalTableAndDropWithReboots [GOOD] >> DataShardWrite::UpsertWithDefaults [GOOD] >> DataShardWrite::WriteImmediateBadRequest ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/datashard/ut_change_collector/unittest >> AsyncIndexChangeCollector::IndexedPrimaryKeyInsertSingleRow [GOOD] Test command err: 2025-09-25T16:17:45.636593Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:123: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2025-09-25T16:17:45.744019Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2025-09-25T16:17:45.746580Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:311:2354], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-09-25T16:17:45.746649Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-09-25T16:17:45.746667Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/endf/003717/r3tmp/tmpgK9kUc/pdisk_1.dat 2025-09-25T16:17:45.811990Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-09-25T16:17:45.812033Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-09-25T16:17:45.822992Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded 2025-09-25T16:17:45.823898Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1229: Notification cookie mismatch for subscription [1:34:2081] 1758817065154001 != 1758817065154005 2025-09-25T16:17:45.856690Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-09-25T16:17:45.907114Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2025-09-25T16:17:45.940613Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-09-25T16:17:46.025954Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:17:46.042957Z node 1 :TX_DATASHARD INFO: datashard.cpp:375: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:684:2572] 2025-09-25T16:17:46.043029Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:661: TxInitSchema.Execute 2025-09-25T16:17:46.053581Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:727: TxInitSchema.Complete 2025-09-25T16:17:46.053650Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:50: TDataShard::TTxInit::Execute 2025-09-25T16:17:46.053821Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1325: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2025-09-25T16:17:46.053830Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1381: LoadLockChangeRecords at tablet: 72075186224037888 2025-09-25T16:17:46.053838Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1430: LoadChangeRecordCommits at tablet: 72075186224037888 2025-09-25T16:17:46.053896Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:94: TDataShard::TTxInit::Complete 2025-09-25T16:17:46.053989Z node 1 :TX_DATASHARD INFO: datashard.cpp:375: TDataShard::OnActivateExecutor: tablet 72075186224037889 actor [1:688:2575] 2025-09-25T16:17:46.054027Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:661: TxInitSchema.Execute 2025-09-25T16:17:46.055384Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:102: TDataShard::TTxInitRestored::Execute 2025-09-25T16:17:46.055408Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:142: DataShard 72075186224037888 persisting started state actor id [1:711:2572] in generation 1 2025-09-25T16:17:46.055580Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:727: TxInitSchema.Complete 2025-09-25T16:17:46.055598Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:50: TDataShard::TTxInit::Execute 2025-09-25T16:17:46.055743Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1325: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037889 2025-09-25T16:17:46.055752Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1381: LoadLockChangeRecords at tablet: 72075186224037889 2025-09-25T16:17:46.055759Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1430: LoadChangeRecordCommits at tablet: 72075186224037889 2025-09-25T16:17:46.055805Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:94: TDataShard::TTxInit::Complete 2025-09-25T16:17:46.055822Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:102: TDataShard::TTxInitRestored::Execute 2025-09-25T16:17:46.055831Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:142: DataShard 72075186224037889 persisting started state actor id [1:716:2575] in generation 1 2025-09-25T16:17:46.066181Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:151: TDataShard::TTxInitRestored::Complete 2025-09-25T16:17:46.071761Z node 1 :TX_DATASHARD INFO: datashard.cpp:419: Switched to work state WaitScheme tabletId 72075186224037888 2025-09-25T16:17:46.071854Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:459: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2025-09-25T16:17:46.071881Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1260: Change sender created: at tablet: 72075186224037888, actorId: [1:719:2593] 2025-09-25T16:17:46.071887Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1265: Trying to activate change sender: at tablet: 72075186224037888 2025-09-25T16:17:46.071893Z node 1 :TX_DATASHARD INFO: datashard.cpp:1282: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2025-09-25T16:17:46.071900Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-09-25T16:17:46.072018Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:151: TDataShard::TTxInitRestored::Complete 2025-09-25T16:17:46.072027Z node 1 :TX_DATASHARD INFO: datashard.cpp:419: Switched to work state WaitScheme tabletId 72075186224037889 2025-09-25T16:17:46.072037Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:459: 72075186224037889 not sending time cast registration request in state WaitScheme: missing processing params 2025-09-25T16:17:46.072045Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1260: Change sender created: at tablet: 72075186224037889, actorId: [1:720:2594] 2025-09-25T16:17:46.072048Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1265: Trying to activate change sender: at tablet: 72075186224037889 2025-09-25T16:17:46.072052Z node 1 :TX_DATASHARD INFO: datashard.cpp:1282: Cannot activate change sender: at tablet: 72075186224037889, state: WaitScheme 2025-09-25T16:17:46.072056Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037889 2025-09-25T16:17:46.072153Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:96: TTxCheckInReadSets::Execute at 72075186224037888 2025-09-25T16:17:46.072176Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:139: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2025-09-25T16:17:46.072212Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037888 2025-09-25T16:17:46.072219Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2025-09-25T16:17:46.072228Z node 1 :TX_DATASHARD INFO: datashard__progress_tx.cpp:48: No tx to execute at 72075186224037888 TxInFly 0 2025-09-25T16:17:46.072234Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2025-09-25T16:17:46.072241Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:96: TTxCheckInReadSets::Execute at 72075186224037889 2025-09-25T16:17:46.072254Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:139: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037889 2025-09-25T16:17:46.072357Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3723: Server connected at leader tablet# 72075186224037888, clientId# [1:676:2568], serverId# [1:685:2573], sessionId# [0:0:0] 2025-09-25T16:17:46.072364Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037889 2025-09-25T16:17:46.072369Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037889 active 0 active planned 0 immediate 0 planned 0 2025-09-25T16:17:46.072373Z node 1 :TX_DATASHARD INFO: datashard__progress_tx.cpp:48: No tx to execute at 72075186224037889 TxInFly 0 2025-09-25T16:17:46.072379Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037889 2025-09-25T16:17:46.072403Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 72075186224037888 2025-09-25T16:17:46.072458Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:133: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2025-09-25T16:17:46.072479Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:221: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037888 2025-09-25T16:17:46.072558Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3723: Server connected at leader tablet# 72075186224037889, clientId# [1:677:2569], serverId# [1:689:2576], sessionId# [0:0:0] 2025-09-25T16:17:46.072590Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 72075186224037889 2025-09-25T16:17:46.072614Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:133: Propose scheme transaction at tablet 72075186224037889 txId 281474976715657 ssId 72057594046644480 seqNo 2:2 2025-09-25T16:17:46.072624Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:221: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037889 2025-09-25T16:17:46.072991Z node 1 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:129: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-09-25T16:17:46.073008Z node 1 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:129: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037889 2025-09-25T16:17:46.083351Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:152: TTxProposeTransactionBase::Complete at 72075186224037888 2025-09-25T16:17:46.083408Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:469: 72075186224037888 not sending time cast registration request in state WaitScheme 2025-09-25T16:17:46.083584Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:152: TTxProposeTransactionBase::Complete at 72075186224037889 2025-09-25T16:17:46.083595Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:469: 72075186224037889 not sending time cast registration request in state WaitScheme 2025-09-25T16:17:46.225479Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3723: Server connected at leader tablet# 72075186224037889, cl ... ve planned 0 immediate 0 planned 0 2025-09-25T16:17:50.564399Z node 4 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037888 2025-09-25T16:17:50.564411Z node 4 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 1 2025-09-25T16:17:50.564420Z node 4 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:333: Found ready operation [1000:281474976715657] in PlanQueue unit at 72075186224037888 2025-09-25T16:17:50.564476Z node 4 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:634: LoadTxDetails at 72075186224037888 loaded tx from db 1000:281474976715657 keys extracted: 0 2025-09-25T16:17:50.564503Z node 4 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 (dry run) active 1 active planned 1 immediate 0 planned 1 2025-09-25T16:17:50.564528Z node 4 :TX_DATASHARD DEBUG: datashard.cpp:3755: Got TEvMediatorTimecast::TEvRegisterTabletResult at 72075186224037889 time 0 2025-09-25T16:17:50.564533Z node 4 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037889 2025-09-25T16:17:50.564861Z node 4 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037888 2025-09-25T16:17:50.564883Z node 4 :TX_DATASHARD INFO: create_table_unit.cpp:69: Trying to CREATE TABLE at 72075186224037888 tableId# [OwnerId: 72057594046644480, LocalPathId: 4] schema version# 1 2025-09-25T16:17:50.564976Z node 4 :TX_DATASHARD INFO: datashard.cpp:477: Send registration request to time cast Ready tabletId 72075186224037888 mediators count is 1 coordinators count is 1 buckets per mediator 2 2025-09-25T16:17:50.565056Z node 4 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-09-25T16:17:50.565484Z node 4 :TX_DATASHARD DEBUG: datashard__plan_step.cpp:98: Sending '{TEvPlanStepAccepted TabletId# 72075186224037889 step# 1000} 2025-09-25T16:17:50.565501Z node 4 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037889 2025-09-25T16:17:50.565973Z node 4 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037889 2025-09-25T16:17:50.565988Z node 4 :TX_DATASHARD DEBUG: datashard.cpp:1265: Trying to activate change sender: at tablet: 72075186224037889 2025-09-25T16:17:50.565995Z node 4 :TX_DATASHARD INFO: datashard.cpp:1303: Change sender activated: at tablet: 72075186224037889 2025-09-25T16:17:50.566014Z node 4 :TX_DATASHARD DEBUG: datashard.cpp:811: Complete [1000 : 281474976715657] from 72075186224037889 at tablet 72075186224037889 send result to client [4:399:2397], exec latency: 0 ms, propose latency: 0 ms 2025-09-25T16:17:50.566026Z node 4 :TX_DATASHARD INFO: datashard.cpp:1600: 72075186224037889 Sending notify to schemeshard 72057594046644480 txId 281474976715657 state Ready TxInFly 0 2025-09-25T16:17:50.566040Z node 4 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037889 2025-09-25T16:17:50.566195Z node 4 :TX_DATASHARD DEBUG: datashard__plan_step.cpp:98: Sending '{TEvPlanStepAccepted TabletId# 72075186224037888 step# 1000} 2025-09-25T16:17:50.566206Z node 4 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2025-09-25T16:17:50.566304Z node 4 :TX_DATASHARD DEBUG: datashard.cpp:3755: Got TEvMediatorTimecast::TEvRegisterTabletResult at 72075186224037888 time 0 2025-09-25T16:17:50.566311Z node 4 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-09-25T16:17:50.566600Z node 4 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2025-09-25T16:17:50.566611Z node 4 :TX_DATASHARD DEBUG: datashard.cpp:1265: Trying to activate change sender: at tablet: 72075186224037888 2025-09-25T16:17:50.566616Z node 4 :TX_DATASHARD INFO: datashard.cpp:1303: Change sender activated: at tablet: 72075186224037888 2025-09-25T16:17:50.566628Z node 4 :TX_DATASHARD DEBUG: datashard.cpp:811: Complete [1000 : 281474976715657] from 72075186224037888 at tablet 72075186224037888 send result to client [4:399:2397], exec latency: 0 ms, propose latency: 0 ms 2025-09-25T16:17:50.566637Z node 4 :TX_DATASHARD INFO: datashard.cpp:1600: 72075186224037888 Sending notify to schemeshard 72057594046644480 txId 281474976715657 state Ready TxInFly 0 2025-09-25T16:17:50.566646Z node 4 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-09-25T16:17:50.566830Z node 4 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:129: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-09-25T16:17:50.566848Z node 4 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:129: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037889 2025-09-25T16:17:50.568793Z node 4 :TX_DATASHARD DEBUG: datashard.cpp:2966: Handle TEvSchemaChangedResult 281474976715657 datashard 72075186224037889 state Ready 2025-09-25T16:17:50.568815Z node 4 :TX_DATASHARD DEBUG: datashard__schema_changed.cpp:22: 72075186224037889 Got TEvSchemaChangedResult from SS at 72075186224037889 2025-09-25T16:17:50.568915Z node 4 :TX_DATASHARD DEBUG: datashard.cpp:3773: Got TEvMediatorTimecast::TEvSubscribeReadStepResult at 72075186224037889 coordinator 72057594046316545 last step 0 next step 1000 2025-09-25T16:17:50.569111Z node 4 :TX_DATASHARD DEBUG: datashard.cpp:3773: Got TEvMediatorTimecast::TEvSubscribeReadStepResult at 72075186224037888 coordinator 72057594046316545 last step 0 next step 1000 2025-09-25T16:17:50.569178Z node 4 :TX_DATASHARD DEBUG: datashard.cpp:2966: Handle TEvSchemaChangedResult 281474976715657 datashard 72075186224037888 state Ready 2025-09-25T16:17:50.569186Z node 4 :TX_DATASHARD DEBUG: datashard__schema_changed.cpp:22: 72075186224037888 Got TEvSchemaChangedResult from SS at 72075186224037888 2025-09-25T16:17:50.571313Z node 4 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [4:786:2646], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:17:50.571338Z node 4 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [4:795:2651], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:17:50.571350Z node 4 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:17:50.571525Z node 4 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [4:801:2655], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:17:50.571550Z node 4 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:17:50.572463Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-09-25T16:17:50.575343Z node 4 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:129: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-09-25T16:17:50.575380Z node 4 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:129: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037889 2025-09-25T16:17:50.618597Z node 4 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-09-25T16:17:50.721423Z node 4 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:129: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-09-25T16:17:50.721486Z node 4 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:129: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037889 2025-09-25T16:17:50.722240Z node 4 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [4:800:2654], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2025-09-25T16:17:50.758244Z node 4 :TX_PROXY ERROR: schemereq.cpp:590: Actor# [4:873:2696] txid# 281474976715659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 8], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-09-25T16:17:50.801541Z node 4 :KQP_EXECUTER ERROR: kqp_planner.cpp:123: TxId: 281474976715660. Ctx: { TraceId: 01k60tnbfa7akw527qweshrjrn, Database: , SessionId: ydb://session/3?node_id=4&id=MTYzNjQyYjgtYjViOTg1ZDQtMTNiZTMwYmEtNDE0NzZkZWY=, PoolId: default, DatabaseId: /Root}. Database not set, use /Root 2025-09-25T16:17:50.802394Z node 4 :TX_DATASHARD DEBUG: datashard.cpp:3723: Server connected at leader tablet# 72075186224037889, clientId# [4:948:2732], serverId# [4:949:2733], sessionId# [0:0:0] 2025-09-25T16:17:50.802548Z node 4 :TX_DATASHARD DEBUG: execute_write_unit.cpp:260: Executing write operation for [0:2] at 72075186224037889 2025-09-25T16:17:50.802651Z node 4 :TX_DATASHARD DEBUG: datashard.cpp:884: PersistChangeRecord: record: { Order: 1 Group: 1758817070802616 Step: 1500 TxId: 18446744073709551615 PathId: [OwnerId: 72057594046644480, LocalPathId: 3] Kind: AsyncIndex Source: Unspecified Body: 28b TableId: [OwnerId: 72057594046644480, LocalPathId: 2] SchemaVersion: 1 LockId: 0 LockOffset: 0 }, at tablet: 72075186224037889 2025-09-25T16:17:50.802690Z node 4 :TX_DATASHARD DEBUG: execute_write_unit.cpp:457: Executed write operation for [0:2] at 72075186224037889, row count=1 2025-09-25T16:17:50.817153Z node 4 :TX_DATASHARD DEBUG: datashard.cpp:1180: EnqueueChangeRecords: at tablet: 72075186224037889, records: { Order: 1 PathId: [OwnerId: 72057594046644480, LocalPathId: 3] BodySize: 28 TableId: [OwnerId: 72057594046644480, LocalPathId: 2] SchemaVersion: 1 } 2025-09-25T16:17:50.817181Z node 4 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037889 2025-09-25T16:17:50.818582Z node 4 :TX_DATASHARD DEBUG: datashard.cpp:3723: Server connected at leader tablet# 72075186224037889, clientId# [4:955:2738], serverId# [4:956:2739], sessionId# [0:0:0] 2025-09-25T16:17:50.819788Z node 4 :TX_DATASHARD DEBUG: datashard.cpp:3723: Server connected at leader tablet# 72075186224037889, clientId# [4:957:2740], serverId# [4:958:2741], sessionId# [0:0:0] >> TRegisterNodeOverDiscoveryService::ServerWithCertVerification_ClientProvidesCorruptedCert [GOOD] >> TExternalTableTestReboots::CreateDroppedExternalTableWithReboots [GOOD] >> TRegisterNodeOverDiscoveryService::ServerWithCertVerification_ClientDoesNotProvideClientCerts >> CdcStreamChangeCollector::UpsertModifyDelete [GOOD] >> TPopulatorQuorumTest::OneDisconnectedRingGroup >> KqpPg::PgAggregate+useSink [GOOD] >> KqpPg::PgAggregate-useSink >> DataShardWrite::ExecSQLUpsertImmediate+EvWrite [GOOD] >> DataShardWrite::ExecSQLUpsertImmediate-EvWrite ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/ydb/ut/unittest >> TGRpcYdbTest::ExplainQuery [GOOD] Test command err: 2025-09-25T16:17:44.397193Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7554061773528572506:2146];send_to=[0:7307199536658146131:7762515]; 2025-09-25T16:17:44.397217Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/endf/0041b8/r3tmp/tmpXE8Uok/pdisk_1.dat 2025-09-25T16:17:44.464886Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-09-25T16:17:44.498418Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-09-25T16:17:44.498450Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-09-25T16:17:44.499677Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-09-25T16:17:44.504604Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 1814, node 1 2025-09-25T16:17:44.540356Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-09-25T16:17:44.540368Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-09-25T16:17:44.540369Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-09-25T16:17:44.540403Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:20995 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. waiting... 2025-09-25T16:17:44.578578Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2025-09-25T16:17:44.647491Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-09-25T16:17:45.571285Z node 4 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7554061777659875344:2150];send_to=[0:7307199536658146131:7762515]; 2025-09-25T16:17:45.571314Z node 4 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-09-25T16:17:45.614736Z node 4 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions test_client.cpp: SetPath # /home/runner/.ya/build/build_root/endf/0041b8/r3tmp/tmpavU029/pdisk_1.dat TServer::EnableGrpc on GrpcPort 23721, node 4 2025-09-25T16:17:45.651162Z node 4 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded 2025-09-25T16:17:45.657012Z node 4 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-09-25T16:17:45.657027Z node 4 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-09-25T16:17:45.657029Z node 4 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-09-25T16:17:45.657097Z node 4 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-09-25T16:17:45.673413Z node 4 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-09-25T16:17:45.673453Z node 4 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting TClient is connected to server localhost:6452 WaitRootIsUp 'Root'... TClient::Ls request: Root 2025-09-25T16:17:45.677620Z node 4 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-09-25T16:17:45.679500Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-09-25T16:17:45.777347Z node 4 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-09-25T16:17:46.069863Z node 4 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7554061781954843523:2322], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:17:46.069901Z node 4 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7554061781954843515:2319], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:17:46.069911Z node 4 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:17:46.070744Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-09-25T16:17:46.071268Z node 4 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7554061781954843530:2324], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:17:46.071295Z node 4 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:17:46.075219Z node 4 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [4:7554061781954843529:2323], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2025-09-25T16:17:46.149284Z node 4 :TX_PROXY ERROR: schemereq.cpp:590: Actor# [4:7554061781954843604:2666] txid# 281474976715659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } test_client.cpp: SetPath # /home/runner/.ya/build/build_root/endf/0041b8/r3tmp/tmp8H2CT1/pdisk_1.dat 2025-09-25T16:17:47.056256Z node 7 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-09-25T16:17:47.056329Z node 7 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-09-25T16:17:47.093286Z node 7 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 13754, node 7 2025-09-25T16:17:47.132794Z node 7 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-09-25T16:17:47.132810Z node 7 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-09-25T16:17:47.132813Z node 7 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-09-25T16:17:47.132886Z node 7 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-09-25T16:17:47.149000Z node 7 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-09-25T16:17:47.149036Z node 7 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-09-25T16:17:47.159581Z node 7 :HIVE WARN: node_info.cpp:25: HIVE#72 ... VolatileState: Connecting -> Connected TClient is connected to server localhost:6416 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-09-25T16:17:48.513259Z node 10 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-09-25T16:17:48.642275Z node 10 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-09-25T16:17:48.816672Z node 10 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [10:7554061788588175661:2319], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:17:48.816694Z node 10 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:17:48.816878Z node 10 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [10:7554061788588175673:2322], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:17:48.816889Z node 10 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [10:7554061788588175674:2323], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:17:48.816954Z node 10 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:17:48.817781Z node 10 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-09-25T16:17:48.828914Z node 10 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [10:7554061788588175677:2324], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2025-09-25T16:17:48.929026Z node 10 :TX_PROXY ERROR: schemereq.cpp:590: Actor# [10:7554061788588175746:2656] txid# 281474976715659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-09-25T16:17:49.965861Z node 13 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[13:7554061793512061851:2082];send_to=[0:7307199536658146131:7762515]; 2025-09-25T16:17:49.966006Z node 13 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/endf/0041b8/r3tmp/tmpmunOdM/pdisk_1.dat 2025-09-25T16:17:49.993750Z node 13 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-09-25T16:17:50.023437Z node 13 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 25867, node 13 2025-09-25T16:17:50.059566Z node 13 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-09-25T16:17:50.059585Z node 13 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-09-25T16:17:50.059587Z node 13 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-09-25T16:17:50.059662Z node 13 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-09-25T16:17:50.068568Z node 13 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(13, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-09-25T16:17:50.068611Z node 13 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(13, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-09-25T16:17:50.071147Z node 13 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(13, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-09-25T16:17:50.195501Z node 13 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions TClient is connected to server localhost:25654 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-09-25T16:17:50.294257Z node 13 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-09-25T16:17:50.334734Z node 13 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:17:50.452228Z node 13 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [13:7554061797807030230:2325], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:17:50.452257Z node 13 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:17:50.452414Z node 13 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [13:7554061797807030242:2328], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:17:50.452423Z node 13 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [13:7554061797807030243:2329], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:17:50.452498Z node 13 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:17:50.453311Z node 13 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710659:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-09-25T16:17:50.476689Z node 13 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [13:7554061797807030246:2330], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710659 completed, doublechecking } 2025-09-25T16:17:50.570947Z node 13 :TX_PROXY ERROR: schemereq.cpp:590: Actor# [13:7554061797807030314:2775] txid# 281474976710660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-09-25T16:17:50.587576Z node 13 :KQP_EXECUTER ERROR: kqp_planner.cpp:123: TxId: 281474976710661. Ctx: { TraceId: 01k60tnbbk0ejkf4yffrrd4bvz, Database: , SessionId: ydb://session/3?node_id=13&id=NWI2NGJmNTQtZDA3OWFhMDEtZjRjOTY2NjItNjVjMzZkYzY=, PoolId: default, DatabaseId: /Root}. Database not set, use /Root >> TPopulatorQuorumTest::OneDisconnectedRingGroup [GOOD] >> KqpPg::InsertNoTargetColumns_ColumnOrder-useSink [GOOD] >> KqpPg::InsertNoTargetColumns_NotOneSize+useSink |81.1%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/tx/datashard/ut_kqp_scan/ydb-core-tx-datashard-ut_kqp_scan |81.1%| [LD] {RESULT} $(B)/ydb/core/tx/datashard/ut_kqp_scan/ydb-core-tx-datashard-ut_kqp_scan |81.1%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/datashard/ut_kqp_scan/ydb-core-tx-datashard-ut_kqp_scan >> CdcStreamChangeCollector::DeleteSingleRow [GOOD] >> DataShardWrite::DeleteImmediate [GOOD] >> DataShardWrite::CancelImmediate >> TPopulatorTest::MakeDir [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/scheme_board/ut_populator/unittest >> TPopulatorQuorumTest::OneDisconnectedRingGroup [GOOD] Test command err: replicas: [1:24339059:0], [1:1099535966835:0], [1:2199047594611:0] replicaActorToServiceMap: actor: [1:6:2053], service: [1:2199047594611:0] actor: [1:4:2051], service: [1:24339059:0] actor: [1:5:2052], service: [1:1099535966835:0] ... waiting for NKikimr::TEvStateStorage::TEvListSchemeBoardResult 2025-09-25T16:17:51.910733Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:807: [1:28:2075] Handle NKikimr::TEvStateStorage::TEvListSchemeBoardResult: sender# [1:19:2066] ... waiting for NKikimr::TEvStateStorage::TEvListSchemeBoardResult (done) 2025-09-25T16:17:51.911895Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:676: [1:28:2075] Handle TEvSchemeShard::TEvDescribeSchemeResult { Status: StatusSuccess Path: "/Root/TestPath" PathId: 100 PathOwnerId: 72057594046678944 }: sender# [1:26:2073], cookie# 12345, event size# 36, preserialized size# 0 2025-09-25T16:17:51.911914Z node 1 :SCHEME_BOARD_POPULATOR NOTICE: populator.cpp:693: [1:28:2075] Update description: owner# 72057594046678944, pathId# [OwnerId: 72057594046678944, LocalPathId: 100], cookie# 12345, is deletion# false, version: 0 ... waiting for updates from replica populators 2025-09-25T16:17:51.912220Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:243: [1:29:2076] Handle NKikimrSchemeBoard.TEvHandshake { Owner: 72057594046678944 Generation: 0 }: sender# [1:4:2051] 2025-09-25T16:17:51.912228Z node 1 :SCHEME_BOARD_POPULATOR NOTICE: populator.cpp:255: [1:29:2076] Successful handshake: replica# [1:4:2051] 2025-09-25T16:17:51.912233Z node 1 :SCHEME_BOARD_POPULATOR NOTICE: populator.cpp:259: [1:29:2076] Start full sync: replica# [1:4:2051] 2025-09-25T16:17:51.912243Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:243: [1:30:2077] Handle NKikimrSchemeBoard.TEvHandshake { Owner: 72057594046678944 Generation: 0 }: sender# [1:5:2052] 2025-09-25T16:17:51.912248Z node 1 :SCHEME_BOARD_POPULATOR NOTICE: populator.cpp:255: [1:30:2077] Successful handshake: replica# [1:5:2052] 2025-09-25T16:17:51.912252Z node 1 :SCHEME_BOARD_POPULATOR NOTICE: populator.cpp:259: [1:30:2077] Start full sync: replica# [1:5:2052] 2025-09-25T16:17:51.912259Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:243: [1:31:2078] Handle NKikimrSchemeBoard.TEvHandshake { Owner: 72057594046678944 Generation: 0 }: sender# [1:6:2053] 2025-09-25T16:17:51.912263Z node 1 :SCHEME_BOARD_POPULATOR NOTICE: populator.cpp:255: [1:31:2078] Successful handshake: replica# [1:6:2053] 2025-09-25T16:17:51.912267Z node 1 :SCHEME_BOARD_POPULATOR NOTICE: populator.cpp:259: [1:31:2078] Start full sync: replica# [1:6:2053] 2025-09-25T16:17:51.912280Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:543: [1:28:2075] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvRequestDescribe { PathId: Replica: [1:24339059:0] }: sender# [1:29:2076] 2025-09-25T16:17:51.912291Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:638: [1:28:2075] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvRequestUpdate { PathId: [OwnerId: 72057594046678944, LocalPathId: 100] }: sender# [1:29:2076] 2025-09-25T16:17:51.912331Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:275: [1:29:2076] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvDescribeResult { Commit: false DeletedPathBegin: 0 DeletedPathEnd: 0 { Path: /Root/TestPath PathId: [OwnerId: 72057594046678944, LocalPathId: 100] PathVersion: 0 } }: sender# [1:28:2075] 2025-09-25T16:17:51.912363Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:283: [1:29:2076] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 72057594046678944 Generation: 1 }: sender# [1:28:2075], cookie# 0 2025-09-25T16:17:51.912376Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:543: [1:28:2075] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvRequestDescribe { PathId: Replica: [1:1099535966835:0] }: sender# [1:30:2077] 2025-09-25T16:17:51.912385Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:638: [1:28:2075] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvRequestUpdate { PathId: [OwnerId: 72057594046678944, LocalPathId: 100] }: sender# [1:30:2077] 2025-09-25T16:17:51.912395Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:291: [1:29:2076] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 1 PathId: [OwnerId: 72057594046678944, LocalPathId: 100] Version: 0 }: sender# [1:4:2051], cookie# 0 2025-09-25T16:17:51.912406Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:275: [1:30:2077] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvDescribeResult { Commit: false DeletedPathBegin: 0 DeletedPathEnd: 0 { Path: /Root/TestPath PathId: [OwnerId: 72057594046678944, LocalPathId: 100] PathVersion: 0 } }: sender# [1:28:2075] 2025-09-25T16:17:51.912416Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:543: [1:28:2075] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvRequestDescribe { PathId: Replica: [1:2199047594611:0] }: sender# [1:31:2078] 2025-09-25T16:17:51.912423Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:283: [1:30:2077] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 72057594046678944 Generation: 1 }: sender# [1:28:2075], cookie# 0 2025-09-25T16:17:51.912431Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:275: [1:31:2078] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvDescribeResult { Commit: false DeletedPathBegin: 0 DeletedPathEnd: 0 { Path: /Root/TestPath PathId: [OwnerId: 72057594046678944, LocalPathId: 100] PathVersion: 0 } }: sender# [1:28:2075] 2025-09-25T16:17:51.912443Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:291: [1:30:2077] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 1 PathId: [OwnerId: 72057594046678944, LocalPathId: 100] Version: 0 }: sender# [1:5:2052], cookie# 0 2025-09-25T16:17:51.912452Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:638: [1:28:2075] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvRequestUpdate { PathId: [OwnerId: 72057594046678944, LocalPathId: 100] }: sender# [1:31:2078] 2025-09-25T16:17:51.912459Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:283: [1:31:2078] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 72057594046678944 Generation: 1 }: sender# [1:28:2075], cookie# 0 2025-09-25T16:17:51.912469Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:543: [1:28:2075] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvRequestDescribe { PathId: [OwnerId: 72057594046678944, LocalPathId: 101] Replica: [1:24339059:0] }: sender# [1:29:2076] 2025-09-25T16:17:51.912476Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:275: [1:29:2076] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvDescribeResult { Commit: true DeletedPathBegin: 0 DeletedPathEnd: 0 }: sender# [1:28:2075] 2025-09-25T16:17:51.912483Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:291: [1:31:2078] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 1 PathId: [OwnerId: 72057594046678944, LocalPathId: 100] Version: 0 }: sender# [1:6:2053], cookie# 0 2025-09-25T16:17:51.912495Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:758: [1:28:2075] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 1 PathId: [OwnerId: 72057594046678944, LocalPathId: 100] Version: 0 }: sender# [1:29:2076], cookie# 0 2025-09-25T16:17:51.912500Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:764: [1:28:2075] Ack for unknown update (already acked?): sender# [1:29:2076], cookie# 0 2025-09-25T16:17:51.912507Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:309: [1:29:2076] Handle NKikimrSchemeBoard.TEvCommitGeneration { Owner: 72057594046678944 Generation: 1 }: sender# [1:4:2051] ... blocking NKikimr::NSchemeBoard::NSchemeshardEvents::TEvUpdateAck from SCHEME_BOARD_REPLICA_POPULATOR_ACTOR to SCHEME_BOARD_POPULATOR_ACTOR cookie 12345 2025-09-25T16:17:51.912536Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:543: [1:28:2075] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvRequestDescribe { PathId: [OwnerId: 72057594046678944, LocalPathId: 101] Replica: [1:1099535966835:0] }: sender# [1:30:2077] 2025-09-25T16:17:51.912545Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:275: [1:30:2077] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvDescribeResult { Commit: true DeletedPathBegin: 0 DeletedPathEnd: 0 }: sender# [1:28:2075] 2025-09-25T16:17:51.912553Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:543: [1:28:2075] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvRequestDescribe { PathId: [OwnerId: 72057594046678944, LocalPathId: 101] Replica: [1:2199047594611:0] }: sender# [1:31:2078] 2025-09-25T16:17:51.912560Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:309: [1:30:2077] Handle NKikimrSchemeBoard.TEvCommitGeneration { Owner: 72057594046678944 Generation: 1 }: sender# [1:5:2052] 2025-09-25T16:17:51.912566Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:275: [1:31:2078] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvDescribeResult { Commit: true DeletedPathBegin: 0 DeletedPathEnd: 0 }: sender# [1:28:2075] 2025-09-25T16:17:51.912575Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:758: [1:28:2075] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 1 PathId: [OwnerId: 72057594046678944, LocalPathId: 100] Version: 0 }: sender# [1:30:2077], cookie# 0 2025-09-25T16:17:51.912579Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:764: [1:28:2075] Ack for unknown update (already acked?): sender# [1:30:2077], cookie# 0 2025-09-25T16:17:51.912584Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:309: [1:31:2078] Handle NKikimrSchemeBoard.TEvCommitGeneration { Owner: 72057594046678944 Generation: 1 }: sender# [1:6:2053] ... blocking NKikimr::NSchemeBoard::NSchemeshardEvents::TEvUpdateAck from SCHEME_BOARD_REPLICA_POPULATOR_ACTOR to SCHEME_BOARD_POPULATOR_ACTOR cookie 12345 2025-09-25T16:17:51.912597Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:758: [1:28:2075] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 1 PathId: [OwnerId: 72057594046678944, LocalPathId: 100] Version: 0 }: sender# [1:31:2078], cookie# 0 2025-09-25T16:17:51.912601Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:764: [1:28:2075] Ack for unknown update (already acked?): sender# [1:31:2078], cookie# 0 ... blocking NKikimr::NSchemeBoard::NSchemeshardEvents::TEvUpdateAck from SCHEME_BOARD_REPLICA_POPULATOR_ACTOR to SCHEME_BOARD_POPULATOR_ACTOR cookie 12345 ... waiting for updates from replica populators (done) populatorToReplicaMap: populator: [1:29:2076], replica: [1:24339059:0] populator: [1:30:2077], replica: [1:1099535966835:0] populator: [1:31:2078], replica: [1:2199047594611:0] 2025-09-25T16:17:51.912655Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:758: [1:28:2075] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 1 PathId: [OwnerId: 72057594046678944, LocalPathId: 100] Version: 0 }: sender# [1:29:2076], cookie# 12345 2025-09-25T16:17:51.924938Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:758: [1:28:2075] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 1 PathId: [OwnerId: 72057594046678944, LocalPathId: 100] Version: 0 }: sender# [1:30:2077], cookie# 12345 2025-09-25T16:17:51.924971Z node 1 :SCHEME_BOARD_POPULATOR NOTICE: populator.cpp:786: [1:28:2075] Ack update: ack to# [1:26:2073], cookie# 12345, pathId# [OwnerId: 72057594046678944, LocalPathId: 100], version# 0 >> DataShardWrite::UpsertImmediate [GOOD] >> DataShardWrite::ReplaceImmediate >> CdcStreamChangeCollector::NewImage [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/datashard/ut_change_collector/unittest >> CdcStreamChangeCollector::UpsertModifyDelete [GOOD] Test command err: 2025-09-25T16:17:45.732360Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:123: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2025-09-25T16:17:45.792705Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2025-09-25T16:17:45.795788Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:311:2354], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-09-25T16:17:45.795895Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-09-25T16:17:45.795925Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/endf/00370c/r3tmp/tmpn4UsrX/pdisk_1.dat 2025-09-25T16:17:45.869905Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-09-25T16:17:45.869948Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-09-25T16:17:45.897166Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded 2025-09-25T16:17:45.898306Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1229: Notification cookie mismatch for subscription [1:34:2081] 1758817065207914 != 1758817065207918 2025-09-25T16:17:45.933479Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-09-25T16:17:45.983991Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2025-09-25T16:17:46.030576Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-09-25T16:17:46.105979Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:17:46.121146Z node 1 :TX_DATASHARD INFO: datashard.cpp:375: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:673:2564] 2025-09-25T16:17:46.121251Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:661: TxInitSchema.Execute 2025-09-25T16:17:46.130998Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:727: TxInitSchema.Complete 2025-09-25T16:17:46.131044Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:50: TDataShard::TTxInit::Execute 2025-09-25T16:17:46.131173Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1325: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2025-09-25T16:17:46.131179Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1381: LoadLockChangeRecords at tablet: 72075186224037888 2025-09-25T16:17:46.131185Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1430: LoadChangeRecordCommits at tablet: 72075186224037888 2025-09-25T16:17:46.131232Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:94: TDataShard::TTxInit::Complete 2025-09-25T16:17:46.131254Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:102: TDataShard::TTxInitRestored::Execute 2025-09-25T16:17:46.131266Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:142: DataShard 72075186224037888 persisting started state actor id [1:688:2564] in generation 1 2025-09-25T16:17:46.141655Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:151: TDataShard::TTxInitRestored::Complete 2025-09-25T16:17:46.147050Z node 1 :TX_DATASHARD INFO: datashard.cpp:419: Switched to work state WaitScheme tabletId 72075186224037888 2025-09-25T16:17:46.147150Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:459: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2025-09-25T16:17:46.147182Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1260: Change sender created: at tablet: 72075186224037888, actorId: [1:690:2574] 2025-09-25T16:17:46.147189Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1265: Trying to activate change sender: at tablet: 72075186224037888 2025-09-25T16:17:46.147195Z node 1 :TX_DATASHARD INFO: datashard.cpp:1282: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2025-09-25T16:17:46.147201Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-09-25T16:17:46.147401Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:96: TTxCheckInReadSets::Execute at 72075186224037888 2025-09-25T16:17:46.147431Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:139: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2025-09-25T16:17:46.147448Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037888 2025-09-25T16:17:46.147455Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2025-09-25T16:17:46.147465Z node 1 :TX_DATASHARD INFO: datashard__progress_tx.cpp:48: No tx to execute at 72075186224037888 TxInFly 0 2025-09-25T16:17:46.147471Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2025-09-25T16:17:46.147483Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3723: Server connected at leader tablet# 72075186224037888, clientId# [1:669:2561], serverId# [1:674:2565], sessionId# [0:0:0] 2025-09-25T16:17:46.147522Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 72075186224037888 2025-09-25T16:17:46.147582Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:133: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2025-09-25T16:17:46.147610Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:221: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037888 2025-09-25T16:17:46.148052Z node 1 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:129: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-09-25T16:17:46.158395Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:152: TTxProposeTransactionBase::Complete at 72075186224037888 2025-09-25T16:17:46.158458Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:469: 72075186224037888 not sending time cast registration request in state WaitScheme 2025-09-25T16:17:46.300698Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3723: Server connected at leader tablet# 72075186224037888, clientId# [1:705:2583], serverId# [1:707:2585], sessionId# [0:0:0] 2025-09-25T16:17:46.302027Z node 1 :TX_DATASHARD DEBUG: datashard__plan_step.cpp:69: Planned transaction txId 281474976715657 at step 1000 at tablet 72075186224037888 { Transactions { TxId: 281474976715657 AckTo { RawX1: 0 RawX2: 0 } } Step: 1000 MediatorID: 72057594046382081 TabletID: 72075186224037888 } 2025-09-25T16:17:46.302058Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-09-25T16:17:46.302350Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037888 2025-09-25T16:17:46.302364Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 1 2025-09-25T16:17:46.302377Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:333: Found ready operation [1000:281474976715657] in PlanQueue unit at 72075186224037888 2025-09-25T16:17:46.302451Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:634: LoadTxDetails at 72075186224037888 loaded tx from db 1000:281474976715657 keys extracted: 0 2025-09-25T16:17:46.302490Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 (dry run) active 1 active planned 1 immediate 0 planned 1 2025-09-25T16:17:46.302738Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037888 2025-09-25T16:17:46.302756Z node 1 :TX_DATASHARD INFO: create_table_unit.cpp:69: Trying to CREATE TABLE at 72075186224037888 tableId# [OwnerId: 72057594046644480, LocalPathId: 2] schema version# 1 2025-09-25T16:17:46.303185Z node 1 :TX_DATASHARD INFO: datashard.cpp:477: Send registration request to time cast Ready tabletId 72075186224037888 mediators count is 1 coordinators count is 1 buckets per mediator 2 2025-09-25T16:17:46.303280Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-09-25T16:17:46.303595Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3755: Got TEvMediatorTimecast::TEvRegisterTabletResult at 72075186224037888 time 0 2025-09-25T16:17:46.303604Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-09-25T16:17:46.303827Z node 1 :TX_DATASHARD DEBUG: datashard__plan_step.cpp:98: Sending '{TEvPlanStepAccepted TabletId# 72075186224037888 step# 1000} 2025-09-25T16:17:46.303841Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2025-09-25T16:17:46.304075Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2025-09-25T16:17:46.304086Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1265: Trying to activate change sender: at tablet: 72075186224037888 2025-09-25T16:17:46.304093Z node 1 :TX_DATASHARD INFO: datashard.cpp:1303: Change sender activated: at tablet: 72075186224037888 2025-09-25T16:17:46.304111Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:811: Complete [1000 : 281474976715657] from 72075186224037888 at tablet 72075186224037888 send result to client [1:409:2405], exec latency: 0 ms, propose latency: 0 ms 2025-09-25T16:17:46.304121Z node 1 :TX_DATASHARD INFO: datashard.cpp:1600: 72075186224037888 Sending notify to schemeshard 72057594046644480 txId 281474976715657 state Ready TxInFly 0 2025-09-25T16:17:46.304133Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-09-25T16:17:46.305243Z node 1 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:129: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-09-25T16:17:46.305798Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:2966: Handle TEvSchemaChangedResult 281474976715657 datashard 72075186224037888 state Ready 2025-09-25T16:17:46.305814Z node 1 :TX_DATASHARD DEBUG: datashard__schema_changed.cpp:22: 72075186224037888 Got TEvSchemaChangedResult from SS at 72075186224037888 2025-09-25T16:17:46.305856Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3773: Got TEvMediatorTimecast::TEvSubscribeReadStepResult at 72075186224037888 coordinator 72057594046316545 last step 0 ... unit.cpp:235: Check scheme tx, proposed scheme version# 2 current version# 1 expected version# 2 at tablet# 72075186224037888 txId# 281474976715658 2025-09-25T16:17:51.312918Z node 4 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:221: Prepared scheme transaction txId 281474976715658 at tablet 72075186224037888 2025-09-25T16:17:51.313132Z node 4 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:129: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-09-25T16:17:51.334742Z node 4 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:152: TTxProposeTransactionBase::Complete at 72075186224037888 2025-09-25T16:17:51.397147Z node 4 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-09-25T16:17:51.501485Z node 4 :TX_DATASHARD DEBUG: datashard__plan_step.cpp:69: Planned transaction txId 281474976715658 at step 1500 at tablet 72075186224037888 { Transactions { TxId: 281474976715658 AckTo { RawX1: 0 RawX2: 0 } } Step: 1500 MediatorID: 72057594046382081 TabletID: 72075186224037888 } 2025-09-25T16:17:51.501521Z node 4 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-09-25T16:17:51.501812Z node 4 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037888 2025-09-25T16:17:51.501823Z node 4 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 1 2025-09-25T16:17:51.501834Z node 4 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:333: Found ready operation [1500:281474976715658] in PlanQueue unit at 72075186224037888 2025-09-25T16:17:51.501893Z node 4 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:634: LoadTxDetails at 72075186224037888 loaded tx from db 1500:281474976715658 keys extracted: 0 2025-09-25T16:17:51.501928Z node 4 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 (dry run) active 1 active planned 1 immediate 0 planned 1 2025-09-25T16:17:51.501977Z node 4 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037888 2025-09-25T16:17:51.502165Z node 4 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-09-25T16:17:51.541134Z node 4 :TX_DATASHARD DEBUG: datashard__plan_step.cpp:98: Sending '{TEvPlanStepAccepted TabletId# 72075186224037888 step# 1500} 2025-09-25T16:17:51.541173Z node 4 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2025-09-25T16:17:51.541181Z node 4 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2025-09-25T16:17:51.541195Z node 4 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-09-25T16:17:51.541222Z node 4 :TX_DATASHARD DEBUG: datashard.cpp:811: Complete [1500 : 281474976715658] from 72075186224037888 at tablet 72075186224037888 send result to client [4:399:2397], exec latency: 0 ms, propose latency: 0 ms 2025-09-25T16:17:51.541238Z node 4 :TX_DATASHARD INFO: datashard.cpp:1600: 72075186224037888 Sending notify to schemeshard 72057594046644480 txId 281474976715658 state Ready TxInFly 0 2025-09-25T16:17:51.541255Z node 4 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-09-25T16:17:51.541845Z node 4 :TX_DATASHARD DEBUG: datashard.cpp:2966: Handle TEvSchemaChangedResult 281474976715658 datashard 72075186224037888 state Ready 2025-09-25T16:17:51.541860Z node 4 :TX_DATASHARD DEBUG: datashard__schema_changed.cpp:22: 72075186224037888 Got TEvSchemaChangedResult from SS at 72075186224037888 2025-09-25T16:17:51.543596Z node 4 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [4:880:2709], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:17:51.543620Z node 4 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [4:891:2714], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:17:51.543630Z node 4 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:17:51.543798Z node 4 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [4:895:2718], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:17:51.543823Z node 4 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:17:51.544703Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715659:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-09-25T16:17:51.545982Z node 4 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:129: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-09-25T16:17:51.713277Z node 4 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:129: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-09-25T16:17:51.714019Z node 4 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [4:894:2717], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715659 completed, doublechecking } 2025-09-25T16:17:51.738142Z node 4 :TX_PROXY ERROR: schemereq.cpp:590: Actor# [4:952:2756] txid# 281474976715660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 8], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-09-25T16:17:51.750811Z node 4 :KQP_EXECUTER ERROR: kqp_planner.cpp:123: TxId: 281474976715661. Ctx: { TraceId: 01k60tncdq0f9v4k8t0gj23d4f, Database: , SessionId: ydb://session/3?node_id=4&id=YTE1MzRlNS03NzIxYmJjMy02YThjNjYyNS1lN2E1OTBlYg==, PoolId: default, DatabaseId: /Root}. Database not set, use /Root 2025-09-25T16:17:51.751598Z node 4 :TX_DATASHARD DEBUG: datashard.cpp:3723: Server connected at leader tablet# 72075186224037888, clientId# [4:983:2773], serverId# [4:984:2774], sessionId# [0:0:0] 2025-09-25T16:17:51.751766Z node 4 :TX_DATASHARD DEBUG: execute_write_unit.cpp:260: Executing write operation for [0:3] at 72075186224037888 2025-09-25T16:17:51.751870Z node 4 :TX_DATASHARD DEBUG: datashard.cpp:884: PersistChangeRecord: record: { Order: 1 Group: 1758817071751825 Step: 2000 TxId: 18446744073709551615 PathId: [OwnerId: 72057594046644480, LocalPathId: 3] Kind: CdcDataChange Source: Unspecified Body: 34b TableId: [OwnerId: 72057594046644480, LocalPathId: 2] SchemaVersion: 2 LockId: 0 LockOffset: 0 }, at tablet: 72075186224037888 2025-09-25T16:17:51.751911Z node 4 :TX_DATASHARD DEBUG: execute_write_unit.cpp:457: Executed write operation for [0:3] at 72075186224037888, row count=1 2025-09-25T16:17:51.765131Z node 4 :TX_DATASHARD DEBUG: datashard.cpp:1180: EnqueueChangeRecords: at tablet: 72075186224037888, records: { Order: 1 PathId: [OwnerId: 72057594046644480, LocalPathId: 3] BodySize: 34 TableId: [OwnerId: 72057594046644480, LocalPathId: 2] SchemaVersion: 2 } 2025-09-25T16:17:51.765162Z node 4 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-09-25T16:17:51.782917Z node 4 :KQP_EXECUTER ERROR: kqp_planner.cpp:123: TxId: 281474976715662. Ctx: { TraceId: 01k60tncmp3j6n3dnqx713fxv9, Database: , SessionId: ydb://session/3?node_id=4&id=M2UzOGYxZDgtNTYzZDMzZWItZmFhNzgwNGMtY2Q3MTM5YTA=, PoolId: default, DatabaseId: /Root}. Database not set, use /Root 2025-09-25T16:17:51.783696Z node 4 :TX_DATASHARD DEBUG: execute_write_unit.cpp:260: Executing write operation for [0:4] at 72075186224037888 2025-09-25T16:17:51.783809Z node 4 :TX_DATASHARD DEBUG: datashard.cpp:884: PersistChangeRecord: record: { Order: 2 Group: 1758817071783772 Step: 2000 TxId: 18446744073709551615 PathId: [OwnerId: 72057594046644480, LocalPathId: 3] Kind: CdcDataChange Source: Unspecified Body: 50b TableId: [OwnerId: 72057594046644480, LocalPathId: 2] SchemaVersion: 2 LockId: 0 LockOffset: 0 }, at tablet: 72075186224037888 2025-09-25T16:17:51.783846Z node 4 :TX_DATASHARD DEBUG: execute_write_unit.cpp:457: Executed write operation for [0:4] at 72075186224037888, row count=1 2025-09-25T16:17:51.797209Z node 4 :TX_DATASHARD DEBUG: datashard.cpp:1180: EnqueueChangeRecords: at tablet: 72075186224037888, records: { Order: 2 PathId: [OwnerId: 72057594046644480, LocalPathId: 3] BodySize: 50 TableId: [OwnerId: 72057594046644480, LocalPathId: 2] SchemaVersion: 2 } 2025-09-25T16:17:51.797242Z node 4 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-09-25T16:17:51.820509Z node 4 :KQP_EXECUTER ERROR: kqp_planner.cpp:123: TxId: 281474976715663. Ctx: { TraceId: 01k60tncnpc6b9bppx8qgecnab, Database: , SessionId: ydb://session/3?node_id=4&id=NzI0NjYwMGEtMzk1ZDIwZGMtM2ViMDg0M2UtZmM0MWRkODI=, PoolId: default, DatabaseId: /Root}. Database not set, use /Root 2025-09-25T16:17:51.821387Z node 4 :TX_DATASHARD DEBUG: execute_write_unit.cpp:260: Executing write operation for [0:5] at 72075186224037888 2025-09-25T16:17:51.821504Z node 4 :TX_DATASHARD DEBUG: datashard.cpp:884: PersistChangeRecord: record: { Order: 3 Group: 1758817071821464 Step: 2000 TxId: 18446744073709551615 PathId: [OwnerId: 72057594046644480, LocalPathId: 3] Kind: CdcDataChange Source: Unspecified Body: 34b TableId: [OwnerId: 72057594046644480, LocalPathId: 2] SchemaVersion: 2 LockId: 0 LockOffset: 0 }, at tablet: 72075186224037888 2025-09-25T16:17:51.821543Z node 4 :TX_DATASHARD DEBUG: execute_write_unit.cpp:457: Executed write operation for [0:5] at 72075186224037888, row count=1 2025-09-25T16:17:51.832226Z node 4 :TX_DATASHARD DEBUG: datashard.cpp:1180: EnqueueChangeRecords: at tablet: 72075186224037888, records: { Order: 3 PathId: [OwnerId: 72057594046644480, LocalPathId: 3] BodySize: 34 TableId: [OwnerId: 72057594046644480, LocalPathId: 2] SchemaVersion: 2 } 2025-09-25T16:17:51.832260Z node 4 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-09-25T16:17:51.833074Z node 4 :TX_DATASHARD DEBUG: datashard.cpp:3723: Server connected at leader tablet# 72075186224037888, clientId# [4:1030:2803], serverId# [4:1031:2804], sessionId# [0:0:0] 2025-09-25T16:17:51.834334Z node 4 :TX_DATASHARD DEBUG: datashard.cpp:3723: Server connected at leader tablet# 72075186224037888, clientId# [4:1032:2805], serverId# [4:1033:2806], sessionId# [0:0:0] >> DataShardWrite::WriteImmediateBadRequest [GOOD] >> DataShardWrite::WriteImmediateSeveralOperations >> CdcStreamChangeCollector::PageFaults [GOOD] >> CdcStreamChangeCollector::OldImage >> test.py::test[key_filter-string_with_ff-default.txt-Results] [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_external_table_reboots/unittest >> TExternalTableTestReboots::CreateDroppedExternalTableWithReboots [GOOD] Test command err: ==== RunWithTabletReboots =========== RUN: Trace =========== Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2144] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2144] Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:120:2058] recipient: [1:114:2145] IGNORE Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:120:2058] recipient: [1:114:2145] Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:121:2058] recipient: [1:116:2146] IGNORE Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:121:2058] recipient: [1:116:2146] Leader for TabletID 72057594046678944 is [1:128:2153] sender: [1:131:2058] recipient: [1:113:2144] Leader for TabletID 72057594046447617 is [1:134:2158] sender: [1:136:2058] recipient: [1:114:2145] Leader for TabletID 72057594046316545 is [1:139:2161] sender: [1:141:2058] recipient: [1:116:2146] 2025-09-25T16:17:36.254294Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7911: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-09-25T16:17:36.254326Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7939: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-09-25T16:17:36.254333Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7825: StatsBatching config: StatsBatchTimeout# 0.000000s, StatsMaxBatchSize# 0, StatsMaxExecuteTime# 0.010000s 2025-09-25T16:17:36.254340Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7841: OperationsProcessing config: using default configuration 2025-09-25T16:17:36.254348Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7847: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-09-25T16:17:36.254353Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7847: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-09-25T16:17:36.254364Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7971: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-09-25T16:17:36.254380Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-09-25T16:17:36.254537Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8042: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-09-25T16:17:36.254615Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2025-09-25T16:17:36.273894Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:8074: Got new config: QueryServiceConfig { AllExternalDataSourcesAreAvailable: true } 2025-09-25T16:17:36.273931Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded 2025-09-25T16:17:36.274018Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8042: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# Leader for TabletID 72057594046447617 is [1:134:2158] sender: [1:179:2058] recipient: [1:15:2062] 2025-09-25T16:17:36.277858Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2025-09-25T16:17:36.277948Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2025-09-25T16:17:36.277985Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-09-25T16:17:36.279752Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2025-09-25T16:17:36.279839Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2025-09-25T16:17:36.279957Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1382: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-09-25T16:17:36.280196Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-09-25T16:17:36.281390Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:158: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-09-25T16:17:36.281452Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2025-09-25T16:17:36.281740Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-09-25T16:17:36.281751Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-09-25T16:17:36.281773Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-09-25T16:17:36.281782Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-09-25T16:17:36.281788Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:205: TTxServerlessStorageBilling.Complete 2025-09-25T16:17:36.281833Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7086: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:221:2058] recipient: [1:219:2219] IGNORE Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:221:2058] recipient: [1:219:2219] Leader for TabletID 72057594037968897 is [1:225:2223] sender: [1:226:2058] recipient: [1:219:2219] 2025-09-25T16:17:36.286363Z node 1 :HIVE INFO: tablet_helpers.cpp:1126: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:128:2153] sender: [1:246:2058] recipient: [1:15:2062] 2025-09-25T16:17:36.310862Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-09-25T16:17:36.310979Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-09-25T16:17:36.311056Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:599: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-09-25T16:17:36.311066Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5528: CreateTx for txid 1:0 type: TxAlterSubDomain target path: [OwnerId: 72057594046678944, LocalPathId: 1] source path: 2025-09-25T16:17:36.311136Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-09-25T16:17:36.311153Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2025-09-25T16:17:36.312089Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-09-25T16:17:36.312161Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-09-25T16:17:36.312222Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-09-25T16:17:36.312233Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:315: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-09-25T16:17:36.312240Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:369: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-09-25T16:17:36.312262Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 1:0 2 -> 3 2025-09-25T16:17:36.312777Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-09-25T16:17:36.312790Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-09-25T16:17:36.312796Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 1:0 3 -> 128 2025-09-25T16:17:36.314356Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-09-25T16:17:36.314371Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-09-25T16:17:36.314380Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-09-25T16:17:36.314389Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1723: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-09-25T16:17:36.315123Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1792: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-09-25T16:17:36.315615Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:663: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-09-25T16:17:36.315674Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1824: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 Leader for TabletID 72057594046316545 is [1:139:2161] sender: [1:261:2058] recipient: [1:15:2062] FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-09-25T16:17:36.315921Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:685: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-09-25T16:17:36.315947Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:689: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 139 RawX2: 4294969457 } } Step: 5000001 MediatorID: 0 Tab ... CHEMESHARD DEBUG: schemeshard_impl.cpp:610: DecrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 5] was 2 2025-09-25T16:17:51.789446Z node 49 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 1006:0 128 -> 240 2025-09-25T16:17:51.789475Z node 49 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:599: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 3 2025-09-25T16:17:51.789489Z node 49 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:599: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 5] was 1 2025-09-25T16:17:51.789496Z node 49 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:599: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 2 2025-09-25T16:17:51.789612Z node 49 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1006 2025-09-25T16:17:51.789888Z node 49 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1006 2025-09-25T16:17:51.790206Z node 49 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-09-25T16:17:51.790216Z node 49 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1006, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-09-25T16:17:51.790257Z node 49 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1006, path id: [OwnerId: 72057594046678944, LocalPathId: 5] 2025-09-25T16:17:51.790278Z node 49 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1006, path id: [OwnerId: 72057594046678944, LocalPathId: 3] 2025-09-25T16:17:51.790302Z node 49 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-09-25T16:17:51.790308Z node 49 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [49:213:2214], at schemeshard: 72057594046678944, txId: 1006, path id: 1 2025-09-25T16:17:51.790314Z node 49 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [49:213:2214], at schemeshard: 72057594046678944, txId: 1006, path id: 5 2025-09-25T16:17:51.790321Z node 49 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [49:213:2214], at schemeshard: 72057594046678944, txId: 1006, path id: 3 FAKE_COORDINATOR: Erasing txId 1006 2025-09-25T16:17:51.790391Z node 49 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1006:0, at schemeshard: 72057594046678944 2025-09-25T16:17:51.790399Z node 49 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:497: [72057594046678944] TDone opId# 1006:0 ProgressState 2025-09-25T16:17:51.790414Z node 49 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:926: Part operation is done id#1006:0 progress is 1/1 2025-09-25T16:17:51.790420Z node 49 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 1006 ready parts: 1/1 2025-09-25T16:17:51.790426Z node 49 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:926: Part operation is done id#1006:0 progress is 1/1 2025-09-25T16:17:51.790430Z node 49 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 1006 ready parts: 1/1 2025-09-25T16:17:51.790435Z node 49 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 1006, ready parts: 1/1, is published: false 2025-09-25T16:17:51.790442Z node 49 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 1006 ready parts: 1/1 2025-09-25T16:17:51.790447Z node 49 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:993: Operation and all the parts is done, operation id: 1006:0 2025-09-25T16:17:51.790451Z node 49 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5552: RemoveTx for txid 1006:0 2025-09-25T16:17:51.790464Z node 49 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:610: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 5] was 2 2025-09-25T16:17:51.790469Z node 49 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:610: DecrementPathDbRefCount reason remove txstate source path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 3 2025-09-25T16:17:51.790475Z node 49 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:1002: Publication still in progress, tx: 1006, publications: 3, subscribers: 0 2025-09-25T16:17:51.790480Z node 49 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1009: Publication details: tx: 1006, [OwnerId: 72057594046678944, LocalPathId: 1], 15 2025-09-25T16:17:51.790484Z node 49 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1009: Publication details: tx: 1006, [OwnerId: 72057594046678944, LocalPathId: 3], 2 2025-09-25T16:17:51.790489Z node 49 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1009: Publication details: tx: 1006, [OwnerId: 72057594046678944, LocalPathId: 5], 18446744073709551615 2025-09-25T16:17:51.790603Z node 49 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6249: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 5 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 1006 2025-09-25T16:17:51.790616Z node 49 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 5 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 1006 2025-09-25T16:17:51.790622Z node 49 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 3, at schemeshard: 72057594046678944, txId: 1006 2025-09-25T16:17:51.790627Z node 49 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 1006, pathId: [OwnerId: 72057594046678944, LocalPathId: 5], version: 18446744073709551615 2025-09-25T16:17:51.790634Z node 49 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:610: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 5] was 1 2025-09-25T16:17:51.790713Z node 49 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:123: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2025-09-25T16:17:51.790720Z node 49 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:137: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 5], at schemeshard: 72057594046678944 2025-09-25T16:17:51.790733Z node 49 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:610: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 4 2025-09-25T16:17:51.790815Z node 49 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6249: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 15 PathOwnerId: 72057594046678944, cookie: 1006 2025-09-25T16:17:51.790826Z node 49 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 15 PathOwnerId: 72057594046678944, cookie: 1006 2025-09-25T16:17:51.790831Z node 49 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 1006 2025-09-25T16:17:51.790836Z node 49 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 1006, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 15 2025-09-25T16:17:51.790841Z node 49 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:610: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 3 2025-09-25T16:17:51.790923Z node 49 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6249: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 2 PathOwnerId: 72057594046678944, cookie: 1006 2025-09-25T16:17:51.790936Z node 49 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 2 PathOwnerId: 72057594046678944, cookie: 1006 2025-09-25T16:17:51.790940Z node 49 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1006 2025-09-25T16:17:51.790945Z node 49 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 1006, pathId: [OwnerId: 72057594046678944, LocalPathId: 3], version: 2 2025-09-25T16:17:51.790950Z node 49 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:610: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 2 2025-09-25T16:17:51.790961Z node 49 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__publish_to_scheme_board.cpp:155: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 1006, subscribers: 0 2025-09-25T16:17:51.797958Z node 49 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1006 2025-09-25T16:17:51.797999Z node 49 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__clean_pathes.cpp:160: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 2025-09-25T16:17:51.798051Z node 49 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1006 2025-09-25T16:17:51.798290Z node 49 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1006 TestModificationResult got TxId: 1006, wait until txId: 1006 TestWaitNotification wait txId: 1006 2025-09-25T16:17:51.798366Z node 49 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:213: tests -- TTxNotificationSubscriber for txId 1006: send EvNotifyTxCompletion 2025-09-25T16:17:51.798376Z node 49 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:259: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 1006 2025-09-25T16:17:51.798464Z node 49 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 1006, at schemeshard: 72057594046678944 2025-09-25T16:17:51.798492Z node 49 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 1006: got EvNotifyTxCompletionResult 2025-09-25T16:17:51.798498Z node 49 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 1006: satisfy waiter [49:450:2440] TestWaitNotification: OK eventTxId 1006 >> KqpPg::V1CreateTable [GOOD] >> KqpPg::ValuesInsert+useSink >> KqpScan::IsNull >> test.py::test[window-win_func_first_last_rev--Results] [GOOD] >> test.py::test[window-win_func_over_group_by--ForceBlocks] >> KqpPg::PgAggregate-useSink [GOOD] >> KqpPg::MkqlTerminate >> KqpScan::Grep >> test.py::test[blocks-date_sub_scalar--Results] [GOOD] >> test.py::test[blocks-decimal_comparison--ForceBlocks] >> TCmsTest::EmergencyDuringRollingRestart [GOOD] >> TCmsTest::EnableCMSRequestPrioritiesFeatureFlag ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/scheme_board/ut_populator/unittest >> TPopulatorTest::MakeDir [GOOD] Test command err: 2025-09-25T16:17:52.573467Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7729: Cannot subscribe to console configs 2025-09-25T16:17:52.573496Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded TestModificationResults wait txId: 100 2025-09-25T16:17:52.599834Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:676: [1:97:2125] Handle TEvSchemeShard::TEvDescribeSchemeResult { Status: StatusSuccess Path: "/Root" PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 2 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 1 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { Audience: "/Root" } SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 1 PathOwnerId: 72057594046678944 }: sender# [1:72:2112], cookie# 100, event size# 419, preserialized size# 51 2025-09-25T16:17:52.599886Z node 1 :SCHEME_BOARD_POPULATOR NOTICE: populator.cpp:693: [1:97:2125] Update description: owner# 72057594046678944, pathId# [OwnerId: 72057594046678944, LocalPathId: 1], cookie# 100, is deletion# false, version: 3 2025-09-25T16:17:52.600213Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:283: [1:98:2126] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 72057594046678944 Generation: 2 }: sender# [1:97:2125], cookie# 100 2025-09-25T16:17:52.600236Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:283: [1:99:2127] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 72057594046678944 Generation: 2 }: sender# [1:97:2125], cookie# 100 2025-09-25T16:17:52.600246Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:283: [1:100:2128] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 72057594046678944 Generation: 2 }: sender# [1:97:2125], cookie# 100 2025-09-25T16:17:52.600360Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:676: [1:97:2125] Handle TEvSchemeShard::TEvDescribeSchemeResult { Status: StatusSuccess Path: "/Root/DirA" PathDescription { Self { Name: "DirA" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: false CreateTxId: 100 ParentPathId: 1 PathState: EPathStateCreate Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 1 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 2 PathOwnerId: 72057594046678944 }: sender# [1:72:2112], cookie# 100, event size# 309, preserialized size# 2 2025-09-25T16:17:52.600372Z node 1 :SCHEME_BOARD_POPULATOR NOTICE: populator.cpp:693: [1:97:2125] Update description: owner# 72057594046678944, pathId# [OwnerId: 72057594046678944, LocalPathId: 2], cookie# 100, is deletion# false, version: 2 2025-09-25T16:17:52.600394Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:283: [1:98:2126] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 72057594046678944 Generation: 2 }: sender# [1:97:2125], cookie# 100 2025-09-25T16:17:52.600404Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:291: [1:99:2127] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 2 PathId: [OwnerId: 72057594046678944, LocalPathId: 1] Version: 3 }: sender# [1:15:2062], cookie# 100 2025-09-25T16:17:52.600413Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:291: [1:100:2128] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 2 PathId: [OwnerId: 72057594046678944, LocalPathId: 1] Version: 3 }: sender# [1:18:2065], cookie# 100 2025-09-25T16:17:52.600569Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:758: [1:97:2125] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 2 PathId: [OwnerId: 72057594046678944, LocalPathId: 1] Version: 3 }: sender# [1:99:2127], cookie# 100 2025-09-25T16:17:52.600581Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:291: [1:98:2126] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 2 PathId: [OwnerId: 72057594046678944, LocalPathId: 1] Version: 3 }: sender# [1:12:2059], cookie# 100 2025-09-25T16:17:52.600589Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:283: [1:99:2127] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 72057594046678944 Generation: 2 }: sender# [1:97:2125], cookie# 100 2025-09-25T16:17:52.600596Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:283: [1:100:2128] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 72057594046678944 Generation: 2 }: sender# [1:97:2125], cookie# 100 2025-09-25T16:17:52.600686Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:758: [1:97:2125] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 2 PathId: [OwnerId: 72057594046678944, LocalPathId: 1] Version: 3 }: sender# [1:100:2128], cookie# 100 2025-09-25T16:17:52.600694Z node 1 :SCHEME_BOARD_POPULATOR NOTICE: populator.cpp:786: [1:97:2125] Ack update: ack to# [1:72:2112], cookie# 100, pathId# [OwnerId: 72057594046678944, LocalPathId: 1], version# 3 2025-09-25T16:17:52.600704Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:291: [1:98:2126] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 2 PathId: [OwnerId: 72057594046678944, LocalPathId: 2] Version: 2 }: sender# [1:12:2059], cookie# 100 2025-09-25T16:17:52.600712Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:291: [1:99:2127] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 2 PathId: [OwnerId: 72057594046678944, LocalPathId: 2] Version: 2 }: sender# [1:15:2062], cookie# 100 2025-09-25T16:17:52.600719Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:291: [1:100:2128] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 2 PathId: [OwnerId: 72057594046678944, LocalPathId: 2] Version: 2 }: sender# [1:18:2065], cookie# 100 2025-09-25T16:17:52.600781Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:758: [1:97:2125] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 2 PathId: [OwnerId: 72057594046678944, LocalPathId: 1] Version: 3 }: sender# [1:98:2126], cookie# 100 2025-09-25T16:17:52.600819Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:758: [1:97:2125] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 2 PathId: [OwnerId: 72057594046678944, LocalPathId: 2] Version: 2 }: sender# [1:98:2126], cookie# 100 2025-09-25T16:17:52.600967Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:758: [1:97:2125] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 2 PathId: [OwnerId: 72057594046678944, LocalPathId: 2] Version: 2 }: sender# [1:99:2127], cookie# 100 2025-09-25T16:17:52.600975Z node 1 :SCHEME_BOARD_POPULATOR NOTICE: populator.cpp:786: [1:97:2125] Ack update: ack to# [1:72:2112], cookie# 100, pathId# [OwnerId: 72057594046678944, LocalPathId: 2], version# 2 2025-09-25T16:17:52.601001Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:758: [1:97:2125] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 2 PathId: [OwnerId: 72057594046678944, LocalPathId: 2] Version: 2 }: sender# [1:100:2128], cookie# 100 2025-09-25T16:17:52.601006Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:764: [1:97:2125] Ack for unknown update (already acked?): sender# [1:100:2128], cookie# 100 FAKE_COORDINATOR: Add transaction: 100 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 100 at step: 5000001 2025-09-25T16:17:52.601619Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:676: [1:97:2125] Handle TEvSchemeShard::TEvDescribeSchemeResult { Status: StatusSuccess Path: "/Root" PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 4 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 4 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 3 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 1 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { Audience: "/Root" } SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 1 PathOwnerId: 72057594046678944 }: sender# [1:72:2112], cookie# 100, event size# 429, preserialized size# 56 2025-09-25T16:17:52.601637Z node 1 :SCHEME_BOARD_POPULATOR NOTICE: populator.cpp:693: [1:97:2125] Update description: owner# 72057594046678944, pathId# [OwnerId: 72057594046678944, LocalPathId: 1], cookie# 100, is deletion# false, version: 4 2025-09-25T16:17:52.601671Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:283: [1:98:2126] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 72057594046678944 Generation: 2 }: sender# [1:97:2125], cookie# 100 2025-09-25T16:17:52.601680Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:283: [1:99:2127] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 72057594046678944 Generation: 2 }: sender# [1:97:2125], cookie# 100 2025-09-25T16:17:52.601687Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:283: [1:100:2128] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 72057594046678944 Generation: 2 }: sender# [1:97:2125], cookie# 100 FAKE_COORDINATOR: Erasing txId 100 2025-09-25T16:17:52.601833Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:676: [1:97:2125] Handle TEvSchemeShard::TEvDescribeSchemeResult { Status: StatusSuccess Path: "/Root/DirA" PathDescription { Self { Name: "DirA" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 100 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateCreate Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 2 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 1 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 2 PathOwnerId: 72057594046678944 }: sender# [1:72:2112], cookie# 100, event size# 314, preserialized size# 2 2025-09-25T16:17:52.601843Z node 1 :SCHEME_BOARD_POPULATOR NOTICE: populator.cpp:693: [1:97:2125] Update description: owner# 72057594046678944, pathId# [OwnerId: 72057594046678944, LocalPathId: 2], cookie# 100, is deletion# false, version: 3 2025-09-25T16:17:52.601864Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:291: [1:98:2126] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 2 PathId: [OwnerId: 72057594046678944, LocalPathId: 1] Version: 4 }: sender# [1:12:2059], cookie# 100 2025-09-25T16:17:52.601872Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:291: [1:99:2127] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 2 PathId: [OwnerId: 72057594046678944, LocalPathId: 1] Version: 4 }: sender# [1:15:2062], cookie# 100 2025-09-25T16:17:52.601879Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:291: [1:100:2128] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 2 PathId: [OwnerId: 72057594046678944, LocalPathId: 1] Version: 4 }: sender# [1:18:2065], cookie# 100 2025-09-25T16:17:52.601888Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:283: [1:100:2128] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 72057594046678944 Generation: 2 }: sender# [1:97:2125], cookie# 100 2025-09-25T16:17:52.601916Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:758: [1:97:2125] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 2 PathId: [OwnerId: 72057594046678944, LocalPathId: 1] Version: 4 }: sender# [1:98:2126], cookie# 100 2025-09-25T16:17:52.601923Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:283: [1:98:2126] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 72057594046678944 Generation: 2 }: sender# [1:97:2125], cookie# 100 2025-09-25T16:17:52.601929Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:283: [1:99:2127] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 72057594046678944 Generation: 2 }: sender# [1:97:2125], cookie# 100 2025-09-25T16:17:52.601952Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:758: [1:97:2125] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 2 PathId: [OwnerId: 72057594046678944, LocalPathId: 1] Version: 4 }: sender# [1:99:2127], cookie# 100 2025-09-25T16:17:52.601957Z node 1 :SCHEME_BOARD_POPULATOR NOTICE: populator.cpp:786: [1:97:2125] Ack update: ack to# [1:72:2112], cookie# 100, pathId# [OwnerId: 72057594046678944, LocalPathId: 1], version# 4 2025-09-25T16:17:52.601966Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:291: [1:98:2126] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 2 PathId: [OwnerId: 72057594046678944, LocalPathId: 2] Version: 3 }: sender# [1:12:2059], cookie# 100 2025-09-25T16:17:52.601973Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:291: [1:99:2127] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 2 PathId: [OwnerId: 72057594046678944, LocalPathId: 2] Version: 3 }: sender# [1:15:2062], cookie# 100 2025-09-25T16:17:52.601980Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:291: [1:100:2128] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 2 PathId: [OwnerId: 72057594046678944, LocalPathId: 2] Version: 3 }: sender# [1:18:2065], cookie# 100 2025-09-25T16:17:52.602024Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:758: [1:97:2125] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 2 PathId: [OwnerId: 72057594046678944, LocalPathId: 1] Version: 4 }: sender# [1:100:2128], cookie# 100 2025-09-25T16:17:52.602051Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:758: [1:97:2125] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 2 PathId: [OwnerId: 72057594046678944, LocalPathId: 2] Version: 3 }: sender# [1:98:2126], cookie# 100 2025-09-25T16:17:52.602061Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:758: [1:97:2125] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 2 PathId: [OwnerId: 72057594046678944, LocalPathId: 2] Version: 3 }: sender# [1:99:2127], cookie# 100 2025-09-25T16:17:52.602067Z node 1 :SCHEME_BOARD_POPULATOR NOTICE: populator.cpp:786: [1:97:2125] Ack update: ack to# [1:72:2112], cookie# 100, pathId# [OwnerId: 72057594046678944, LocalPathId: 2], version# 3 2025-09-25T16:17:52.602133Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:758: [1:97:2125] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 2 PathId: [OwnerId: 72057594046678944, LocalPathId: 2] Version: 3 }: sender# [1:100:2128], cookie# 100 2025-09-25T16:17:52.602140Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:764: [1:97:2125] Ack for unknown update (already acked?): sender# [1:100:2128], cookie# 100 TestModificationResult got TxId: 100, wait until txId: 100 >> KqpPg::InsertNoTargetColumns_NotOneSize+useSink [GOOD] >> KqpPg::InsertNoTargetColumns_NotOneSize-useSink >> DataShardWrite::UpsertPrepared+Volatile [GOOD] >> DataShardWrite::UpsertPrepared-Volatile >> KqpSplit::IntersectionLosesRange+Descending ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/datashard/ut_change_collector/unittest >> CdcStreamChangeCollector::DeleteSingleRow [GOOD] Test command err: 2025-09-25T16:17:46.221732Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:123: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2025-09-25T16:17:46.259069Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2025-09-25T16:17:46.262011Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:311:2354], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-09-25T16:17:46.262115Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-09-25T16:17:46.262144Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/endf/003702/r3tmp/tmp17p5Qd/pdisk_1.dat 2025-09-25T16:17:46.340974Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-09-25T16:17:46.341015Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-09-25T16:17:46.353068Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded 2025-09-25T16:17:46.354092Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1229: Notification cookie mismatch for subscription [1:34:2081] 1758817065760552 != 1758817065760556 2025-09-25T16:17:46.385305Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-09-25T16:17:46.436356Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2025-09-25T16:17:46.473334Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-09-25T16:17:46.564753Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:17:46.590841Z node 1 :TX_DATASHARD INFO: datashard.cpp:375: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:684:2572] 2025-09-25T16:17:46.590921Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:661: TxInitSchema.Execute 2025-09-25T16:17:46.610861Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:727: TxInitSchema.Complete 2025-09-25T16:17:46.610938Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:50: TDataShard::TTxInit::Execute 2025-09-25T16:17:46.611131Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1325: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2025-09-25T16:17:46.611143Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1381: LoadLockChangeRecords at tablet: 72075186224037888 2025-09-25T16:17:46.611152Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1430: LoadChangeRecordCommits at tablet: 72075186224037888 2025-09-25T16:17:46.611223Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:94: TDataShard::TTxInit::Complete 2025-09-25T16:17:46.611334Z node 1 :TX_DATASHARD INFO: datashard.cpp:375: TDataShard::OnActivateExecutor: tablet 72075186224037889 actor [1:688:2575] 2025-09-25T16:17:46.611375Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:661: TxInitSchema.Execute 2025-09-25T16:17:46.613425Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:102: TDataShard::TTxInitRestored::Execute 2025-09-25T16:17:46.613459Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:142: DataShard 72075186224037888 persisting started state actor id [1:711:2572] in generation 1 2025-09-25T16:17:46.613680Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:727: TxInitSchema.Complete 2025-09-25T16:17:46.613701Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:50: TDataShard::TTxInit::Execute 2025-09-25T16:17:46.613881Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1325: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037889 2025-09-25T16:17:46.613891Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1381: LoadLockChangeRecords at tablet: 72075186224037889 2025-09-25T16:17:46.613899Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1430: LoadChangeRecordCommits at tablet: 72075186224037889 2025-09-25T16:17:46.613961Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:94: TDataShard::TTxInit::Complete 2025-09-25T16:17:46.613979Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:102: TDataShard::TTxInitRestored::Execute 2025-09-25T16:17:46.613987Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:142: DataShard 72075186224037889 persisting started state actor id [1:716:2575] in generation 1 2025-09-25T16:17:46.624341Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:151: TDataShard::TTxInitRestored::Complete 2025-09-25T16:17:46.630133Z node 1 :TX_DATASHARD INFO: datashard.cpp:419: Switched to work state WaitScheme tabletId 72075186224037888 2025-09-25T16:17:46.630230Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:459: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2025-09-25T16:17:46.630264Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1260: Change sender created: at tablet: 72075186224037888, actorId: [1:719:2593] 2025-09-25T16:17:46.630271Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1265: Trying to activate change sender: at tablet: 72075186224037888 2025-09-25T16:17:46.630277Z node 1 :TX_DATASHARD INFO: datashard.cpp:1282: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2025-09-25T16:17:46.630283Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-09-25T16:17:46.630424Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:151: TDataShard::TTxInitRestored::Complete 2025-09-25T16:17:46.630433Z node 1 :TX_DATASHARD INFO: datashard.cpp:419: Switched to work state WaitScheme tabletId 72075186224037889 2025-09-25T16:17:46.630445Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:459: 72075186224037889 not sending time cast registration request in state WaitScheme: missing processing params 2025-09-25T16:17:46.630454Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1260: Change sender created: at tablet: 72075186224037889, actorId: [1:720:2594] 2025-09-25T16:17:46.630458Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1265: Trying to activate change sender: at tablet: 72075186224037889 2025-09-25T16:17:46.630462Z node 1 :TX_DATASHARD INFO: datashard.cpp:1282: Cannot activate change sender: at tablet: 72075186224037889, state: WaitScheme 2025-09-25T16:17:46.630466Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037889 2025-09-25T16:17:46.630591Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:96: TTxCheckInReadSets::Execute at 72075186224037888 2025-09-25T16:17:46.630622Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:139: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2025-09-25T16:17:46.630660Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037888 2025-09-25T16:17:46.630668Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2025-09-25T16:17:46.630678Z node 1 :TX_DATASHARD INFO: datashard__progress_tx.cpp:48: No tx to execute at 72075186224037888 TxInFly 0 2025-09-25T16:17:46.630685Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2025-09-25T16:17:46.630692Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:96: TTxCheckInReadSets::Execute at 72075186224037889 2025-09-25T16:17:46.630703Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:139: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037889 2025-09-25T16:17:46.630826Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3723: Server connected at leader tablet# 72075186224037888, clientId# [1:676:2568], serverId# [1:685:2573], sessionId# [0:0:0] 2025-09-25T16:17:46.630834Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037889 2025-09-25T16:17:46.630838Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037889 active 0 active planned 0 immediate 0 planned 0 2025-09-25T16:17:46.630843Z node 1 :TX_DATASHARD INFO: datashard__progress_tx.cpp:48: No tx to execute at 72075186224037889 TxInFly 0 2025-09-25T16:17:46.630848Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037889 2025-09-25T16:17:46.630879Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 72075186224037888 2025-09-25T16:17:46.630945Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:133: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2025-09-25T16:17:46.630965Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:221: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037888 2025-09-25T16:17:46.631047Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3723: Server connected at leader tablet# 72075186224037889, clientId# [1:677:2569], serverId# [1:689:2576], sessionId# [0:0:0] 2025-09-25T16:17:46.631074Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 72075186224037889 2025-09-25T16:17:46.631100Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:133: Propose scheme transaction at tablet 72075186224037889 txId 281474976715657 ssId 72057594046644480 seqNo 2:2 2025-09-25T16:17:46.631110Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:221: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037889 2025-09-25T16:17:46.631387Z node 1 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:129: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-09-25T16:17:46.631397Z node 1 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:129: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037889 2025-09-25T16:17:46.641779Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:152: TTxProposeTransactionBase::Complete at 72075186224037888 2025-09-25T16:17:46.641833Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:469: 72075186224037888 not sending time cast registration request in state WaitScheme 2025-09-25T16:17:46.642045Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:152: TTxProposeTransactionBase::Complete at 72075186224037889 2025-09-25T16:17:46.642061Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:469: 72075186224037889 not sending time cast registration request in state WaitScheme 2025-09-25T16:17:46.780020Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3723: Server connected at leader tablet# 72075186224037889, cl ... SHARD INFO: datashard.cpp:1600: 72075186224037888 Sending notify to schemeshard 72057594046644480 txId 281474976715657 state Ready TxInFly 0 2025-09-25T16:17:51.563866Z node 4 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-09-25T16:17:51.564286Z node 4 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:129: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-09-25T16:17:51.564618Z node 4 :TX_DATASHARD DEBUG: datashard.cpp:2966: Handle TEvSchemaChangedResult 281474976715657 datashard 72075186224037888 state Ready 2025-09-25T16:17:51.564634Z node 4 :TX_DATASHARD DEBUG: datashard__schema_changed.cpp:22: 72075186224037888 Got TEvSchemaChangedResult from SS at 72075186224037888 2025-09-25T16:17:51.564806Z node 4 :TX_DATASHARD DEBUG: datashard.cpp:3773: Got TEvMediatorTimecast::TEvSubscribeReadStepResult at 72075186224037888 coordinator 72057594046316545 last step 0 next step 1000 2025-09-25T16:17:51.567268Z node 4 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 72075186224037888 2025-09-25T16:17:51.567323Z node 4 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:133: Propose scheme transaction at tablet 72075186224037888 txId 281474976715658 ssId 72057594046644480 seqNo 2:2 2025-09-25T16:17:51.567339Z node 4 :TX_DATASHARD INFO: check_scheme_tx_unit.cpp:235: Check scheme tx, proposed scheme version# 2 current version# 1 expected version# 2 at tablet# 72075186224037888 txId# 281474976715658 2025-09-25T16:17:51.567346Z node 4 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:221: Prepared scheme transaction txId 281474976715658 at tablet 72075186224037888 2025-09-25T16:17:51.567568Z node 4 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:129: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-09-25T16:17:51.589022Z node 4 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:152: TTxProposeTransactionBase::Complete at 72075186224037888 2025-09-25T16:17:51.643825Z node 4 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-09-25T16:17:51.757370Z node 4 :TX_DATASHARD DEBUG: datashard__plan_step.cpp:69: Planned transaction txId 281474976715658 at step 1500 at tablet 72075186224037888 { Transactions { TxId: 281474976715658 AckTo { RawX1: 0 RawX2: 0 } } Step: 1500 MediatorID: 72057594046382081 TabletID: 72075186224037888 } 2025-09-25T16:17:51.757397Z node 4 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-09-25T16:17:51.757653Z node 4 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037888 2025-09-25T16:17:51.757669Z node 4 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 1 2025-09-25T16:17:51.757681Z node 4 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:333: Found ready operation [1500:281474976715658] in PlanQueue unit at 72075186224037888 2025-09-25T16:17:51.757749Z node 4 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:634: LoadTxDetails at 72075186224037888 loaded tx from db 1500:281474976715658 keys extracted: 0 2025-09-25T16:17:51.757788Z node 4 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 (dry run) active 1 active planned 1 immediate 0 planned 1 2025-09-25T16:17:51.757842Z node 4 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037888 2025-09-25T16:17:51.758047Z node 4 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-09-25T16:17:51.804446Z node 4 :TX_DATASHARD DEBUG: datashard__plan_step.cpp:98: Sending '{TEvPlanStepAccepted TabletId# 72075186224037888 step# 1500} 2025-09-25T16:17:51.804480Z node 4 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2025-09-25T16:17:51.804488Z node 4 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2025-09-25T16:17:51.804518Z node 4 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-09-25T16:17:51.804545Z node 4 :TX_DATASHARD DEBUG: datashard.cpp:811: Complete [1500 : 281474976715658] from 72075186224037888 at tablet 72075186224037888 send result to client [4:399:2397], exec latency: 0 ms, propose latency: 0 ms 2025-09-25T16:17:51.804562Z node 4 :TX_DATASHARD INFO: datashard.cpp:1600: 72075186224037888 Sending notify to schemeshard 72057594046644480 txId 281474976715658 state Ready TxInFly 0 2025-09-25T16:17:51.804578Z node 4 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-09-25T16:17:51.805195Z node 4 :TX_DATASHARD DEBUG: datashard.cpp:2966: Handle TEvSchemaChangedResult 281474976715658 datashard 72075186224037888 state Ready 2025-09-25T16:17:51.805210Z node 4 :TX_DATASHARD DEBUG: datashard__schema_changed.cpp:22: 72075186224037888 Got TEvSchemaChangedResult from SS at 72075186224037888 2025-09-25T16:17:51.807287Z node 4 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [4:880:2709], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:17:51.807315Z node 4 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [4:891:2714], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:17:51.807327Z node 4 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:17:51.807525Z node 4 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [4:895:2718], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:17:51.807554Z node 4 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:17:51.808628Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715659:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-09-25T16:17:51.810083Z node 4 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:129: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-09-25T16:17:51.994469Z node 4 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:129: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-09-25T16:17:51.995196Z node 4 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [4:894:2717], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715659 completed, doublechecking } 2025-09-25T16:17:52.020216Z node 4 :TX_PROXY ERROR: schemereq.cpp:590: Actor# [4:952:2756] txid# 281474976715660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 8], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-09-25T16:17:52.038853Z node 4 :KQP_EXECUTER ERROR: kqp_planner.cpp:123: TxId: 281474976715661. Ctx: { TraceId: 01k60tncny4bndj33bqp3gdp92, Database: , SessionId: ydb://session/3?node_id=4&id=MTU3OTM5YWItMjkwMTg5OWYtMjlmOTQ2NmYtYzIyMzJiZWY=, PoolId: default, DatabaseId: /Root}. Database not set, use /Root 2025-09-25T16:17:52.039614Z node 4 :TX_DATASHARD DEBUG: datashard.cpp:3723: Server connected at leader tablet# 72075186224037888, clientId# [4:983:2773], serverId# [4:984:2774], sessionId# [0:0:0] 2025-09-25T16:17:52.039776Z node 4 :TX_DATASHARD DEBUG: execute_write_unit.cpp:260: Executing write operation for [0:3] at 72075186224037888 2025-09-25T16:17:52.039889Z node 4 :TX_DATASHARD DEBUG: datashard.cpp:884: PersistChangeRecord: record: { Order: 1 Group: 1758817072039845 Step: 2000 TxId: 18446744073709551615 PathId: [OwnerId: 72057594046644480, LocalPathId: 3] Kind: CdcDataChange Source: Unspecified Body: 34b TableId: [OwnerId: 72057594046644480, LocalPathId: 2] SchemaVersion: 2 LockId: 0 LockOffset: 0 }, at tablet: 72075186224037888 2025-09-25T16:17:52.039925Z node 4 :TX_DATASHARD DEBUG: execute_write_unit.cpp:457: Executed write operation for [0:3] at 72075186224037888, row count=1 2025-09-25T16:17:52.050880Z node 4 :TX_DATASHARD DEBUG: datashard.cpp:1180: EnqueueChangeRecords: at tablet: 72075186224037888, records: { Order: 1 PathId: [OwnerId: 72057594046644480, LocalPathId: 3] BodySize: 34 TableId: [OwnerId: 72057594046644480, LocalPathId: 2] SchemaVersion: 2 } 2025-09-25T16:17:52.050918Z node 4 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-09-25T16:17:52.074534Z node 4 :KQP_EXECUTER ERROR: kqp_planner.cpp:123: TxId: 281474976715662. Ctx: { TraceId: 01k60tncxm0jxr7e2s2d70qzms, Database: , SessionId: ydb://session/3?node_id=4&id=OTNhYmEyMmYtMzZmYzhiYS04YmEyMTUzNi01MjI0MDUyMQ==, PoolId: default, DatabaseId: /Root}. Database not set, use /Root 2025-09-25T16:17:52.075333Z node 4 :TX_DATASHARD DEBUG: execute_write_unit.cpp:260: Executing write operation for [0:4] at 72075186224037888 2025-09-25T16:17:52.075452Z node 4 :TX_DATASHARD DEBUG: datashard.cpp:884: PersistChangeRecord: record: { Order: 2 Group: 1758817072075409 Step: 2000 TxId: 18446744073709551615 PathId: [OwnerId: 72057594046644480, LocalPathId: 3] Kind: CdcDataChange Source: Unspecified Body: 34b TableId: [OwnerId: 72057594046644480, LocalPathId: 2] SchemaVersion: 2 LockId: 0 LockOffset: 0 }, at tablet: 72075186224037888 2025-09-25T16:17:52.075491Z node 4 :TX_DATASHARD DEBUG: execute_write_unit.cpp:457: Executed write operation for [0:4] at 72075186224037888, row count=1 2025-09-25T16:17:52.085992Z node 4 :TX_DATASHARD DEBUG: datashard.cpp:1180: EnqueueChangeRecords: at tablet: 72075186224037888, records: { Order: 2 PathId: [OwnerId: 72057594046644480, LocalPathId: 3] BodySize: 34 TableId: [OwnerId: 72057594046644480, LocalPathId: 2] SchemaVersion: 2 } 2025-09-25T16:17:52.086023Z node 4 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-09-25T16:17:52.086822Z node 4 :TX_DATASHARD DEBUG: datashard.cpp:3723: Server connected at leader tablet# 72075186224037888, clientId# [4:1011:2792], serverId# [4:1012:2793], sessionId# [0:0:0] 2025-09-25T16:17:52.087828Z node 4 :TX_DATASHARD DEBUG: datashard.cpp:3723: Server connected at leader tablet# 72075186224037888, clientId# [4:1013:2794], serverId# [4:1014:2795], sessionId# [0:0:0] >> DataShardWrite::ExecSQLUpsertImmediate-EvWrite [GOOD] >> DataShardWrite::ExecSQLUpsertPrepared-EvWrite-Volatile ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/datashard/ut_change_collector/unittest >> CdcStreamChangeCollector::NewImage [GOOD] Test command err: 2025-09-25T16:17:45.733946Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:123: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2025-09-25T16:17:45.823013Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2025-09-25T16:17:45.826613Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:311:2354], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-09-25T16:17:45.826715Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-09-25T16:17:45.826743Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/endf/003729/r3tmp/tmpsej5Op/pdisk_1.dat 2025-09-25T16:17:45.925857Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-09-25T16:17:45.925911Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-09-25T16:17:45.949044Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded 2025-09-25T16:17:45.950132Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1229: Notification cookie mismatch for subscription [1:34:2081] 1758817065207980 != 1758817065207984 2025-09-25T16:17:45.981497Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-09-25T16:17:46.036339Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2025-09-25T16:17:46.080692Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-09-25T16:17:46.155700Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:17:46.172175Z node 1 :TX_DATASHARD INFO: datashard.cpp:375: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:673:2564] 2025-09-25T16:17:46.172268Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:661: TxInitSchema.Execute 2025-09-25T16:17:46.183303Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:727: TxInitSchema.Complete 2025-09-25T16:17:46.183361Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:50: TDataShard::TTxInit::Execute 2025-09-25T16:17:46.183583Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1325: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2025-09-25T16:17:46.183596Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1381: LoadLockChangeRecords at tablet: 72075186224037888 2025-09-25T16:17:46.183605Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1430: LoadChangeRecordCommits at tablet: 72075186224037888 2025-09-25T16:17:46.183684Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:94: TDataShard::TTxInit::Complete 2025-09-25T16:17:46.183723Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:102: TDataShard::TTxInitRestored::Execute 2025-09-25T16:17:46.183742Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:142: DataShard 72075186224037888 persisting started state actor id [1:688:2564] in generation 1 2025-09-25T16:17:46.194145Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:151: TDataShard::TTxInitRestored::Complete 2025-09-25T16:17:46.199911Z node 1 :TX_DATASHARD INFO: datashard.cpp:419: Switched to work state WaitScheme tabletId 72075186224037888 2025-09-25T16:17:46.200013Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:459: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2025-09-25T16:17:46.200048Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1260: Change sender created: at tablet: 72075186224037888, actorId: [1:690:2574] 2025-09-25T16:17:46.200055Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1265: Trying to activate change sender: at tablet: 72075186224037888 2025-09-25T16:17:46.200061Z node 1 :TX_DATASHARD INFO: datashard.cpp:1282: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2025-09-25T16:17:46.200067Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-09-25T16:17:46.200272Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:96: TTxCheckInReadSets::Execute at 72075186224037888 2025-09-25T16:17:46.200303Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:139: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2025-09-25T16:17:46.200319Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037888 2025-09-25T16:17:46.200327Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2025-09-25T16:17:46.200338Z node 1 :TX_DATASHARD INFO: datashard__progress_tx.cpp:48: No tx to execute at 72075186224037888 TxInFly 0 2025-09-25T16:17:46.200344Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2025-09-25T16:17:46.200357Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3723: Server connected at leader tablet# 72075186224037888, clientId# [1:669:2561], serverId# [1:674:2565], sessionId# [0:0:0] 2025-09-25T16:17:46.200396Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 72075186224037888 2025-09-25T16:17:46.200467Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:133: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2025-09-25T16:17:46.200500Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:221: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037888 2025-09-25T16:17:46.201032Z node 1 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:129: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-09-25T16:17:46.213131Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:152: TTxProposeTransactionBase::Complete at 72075186224037888 2025-09-25T16:17:46.213180Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:469: 72075186224037888 not sending time cast registration request in state WaitScheme 2025-09-25T16:17:46.361268Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3723: Server connected at leader tablet# 72075186224037888, clientId# [1:705:2583], serverId# [1:707:2585], sessionId# [0:0:0] 2025-09-25T16:17:46.362260Z node 1 :TX_DATASHARD DEBUG: datashard__plan_step.cpp:69: Planned transaction txId 281474976715657 at step 1000 at tablet 72075186224037888 { Transactions { TxId: 281474976715657 AckTo { RawX1: 0 RawX2: 0 } } Step: 1000 MediatorID: 72057594046382081 TabletID: 72075186224037888 } 2025-09-25T16:17:46.362282Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-09-25T16:17:46.362504Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037888 2025-09-25T16:17:46.362515Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 1 2025-09-25T16:17:46.362540Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:333: Found ready operation [1000:281474976715657] in PlanQueue unit at 72075186224037888 2025-09-25T16:17:46.362610Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:634: LoadTxDetails at 72075186224037888 loaded tx from db 1000:281474976715657 keys extracted: 0 2025-09-25T16:17:46.362646Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 (dry run) active 1 active planned 1 immediate 0 planned 1 2025-09-25T16:17:46.362837Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037888 2025-09-25T16:17:46.362854Z node 1 :TX_DATASHARD INFO: create_table_unit.cpp:69: Trying to CREATE TABLE at 72075186224037888 tableId# [OwnerId: 72057594046644480, LocalPathId: 2] schema version# 1 2025-09-25T16:17:46.363268Z node 1 :TX_DATASHARD INFO: datashard.cpp:477: Send registration request to time cast Ready tabletId 72075186224037888 mediators count is 1 coordinators count is 1 buckets per mediator 2 2025-09-25T16:17:46.363361Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-09-25T16:17:46.363694Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3755: Got TEvMediatorTimecast::TEvRegisterTabletResult at 72075186224037888 time 0 2025-09-25T16:17:46.363704Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-09-25T16:17:46.363914Z node 1 :TX_DATASHARD DEBUG: datashard__plan_step.cpp:98: Sending '{TEvPlanStepAccepted TabletId# 72075186224037888 step# 1000} 2025-09-25T16:17:46.363926Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2025-09-25T16:17:46.364147Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2025-09-25T16:17:46.364157Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1265: Trying to activate change sender: at tablet: 72075186224037888 2025-09-25T16:17:46.364163Z node 1 :TX_DATASHARD INFO: datashard.cpp:1303: Change sender activated: at tablet: 72075186224037888 2025-09-25T16:17:46.364178Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:811: Complete [1000 : 281474976715657] from 72075186224037888 at tablet 72075186224037888 send result to client [1:409:2405], exec latency: 0 ms, propose latency: 0 ms 2025-09-25T16:17:46.364188Z node 1 :TX_DATASHARD INFO: datashard.cpp:1600: 72075186224037888 Sending notify to schemeshard 72057594046644480 txId 281474976715657 state Ready TxInFly 0 2025-09-25T16:17:46.364199Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-09-25T16:17:46.365160Z node 1 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:129: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-09-25T16:17:46.365646Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:2966: Handle TEvSchemaChangedResult 281474976715657 datashard 72075186224037888 state Ready 2025-09-25T16:17:46.365663Z node 1 :TX_DATASHARD DEBUG: datashard__schema_changed.cpp:22: 72075186224037888 Got TEvSchemaChangedResult from SS at 72075186224037888 2025-09-25T16:17:46.365704Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3773: Got TEvMediatorTimecast::TEvSubscribeReadStepResult at 72075186224037888 coordinator 72057594046316545 last step 0 ... SHARD INFO: datashard.cpp:1600: 72075186224037888 Sending notify to schemeshard 72057594046644480 txId 281474976715657 state Ready TxInFly 0 2025-09-25T16:17:51.968958Z node 4 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-09-25T16:17:51.969397Z node 4 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:129: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-09-25T16:17:51.969718Z node 4 :TX_DATASHARD DEBUG: datashard.cpp:2966: Handle TEvSchemaChangedResult 281474976715657 datashard 72075186224037888 state Ready 2025-09-25T16:17:51.969733Z node 4 :TX_DATASHARD DEBUG: datashard__schema_changed.cpp:22: 72075186224037888 Got TEvSchemaChangedResult from SS at 72075186224037888 2025-09-25T16:17:51.970849Z node 4 :TX_DATASHARD DEBUG: datashard.cpp:3773: Got TEvMediatorTimecast::TEvSubscribeReadStepResult at 72075186224037888 coordinator 72057594046316545 last step 0 next step 1000 2025-09-25T16:17:51.973346Z node 4 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 72075186224037888 2025-09-25T16:17:51.973391Z node 4 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:133: Propose scheme transaction at tablet 72075186224037888 txId 281474976715658 ssId 72057594046644480 seqNo 2:2 2025-09-25T16:17:51.973407Z node 4 :TX_DATASHARD INFO: check_scheme_tx_unit.cpp:235: Check scheme tx, proposed scheme version# 2 current version# 1 expected version# 2 at tablet# 72075186224037888 txId# 281474976715658 2025-09-25T16:17:51.973413Z node 4 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:221: Prepared scheme transaction txId 281474976715658 at tablet 72075186224037888 2025-09-25T16:17:51.973590Z node 4 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:129: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-09-25T16:17:51.994852Z node 4 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:152: TTxProposeTransactionBase::Complete at 72075186224037888 2025-09-25T16:17:52.053096Z node 4 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-09-25T16:17:52.163317Z node 4 :TX_DATASHARD DEBUG: datashard__plan_step.cpp:69: Planned transaction txId 281474976715658 at step 1500 at tablet 72075186224037888 { Transactions { TxId: 281474976715658 AckTo { RawX1: 0 RawX2: 0 } } Step: 1500 MediatorID: 72057594046382081 TabletID: 72075186224037888 } 2025-09-25T16:17:52.163363Z node 4 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-09-25T16:17:52.163722Z node 4 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037888 2025-09-25T16:17:52.163739Z node 4 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 1 2025-09-25T16:17:52.163753Z node 4 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:333: Found ready operation [1500:281474976715658] in PlanQueue unit at 72075186224037888 2025-09-25T16:17:52.163827Z node 4 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:634: LoadTxDetails at 72075186224037888 loaded tx from db 1500:281474976715658 keys extracted: 0 2025-09-25T16:17:52.163867Z node 4 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 (dry run) active 1 active planned 1 immediate 0 planned 1 2025-09-25T16:17:52.163923Z node 4 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037888 2025-09-25T16:17:52.164146Z node 4 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-09-25T16:17:52.205300Z node 4 :TX_DATASHARD DEBUG: datashard__plan_step.cpp:98: Sending '{TEvPlanStepAccepted TabletId# 72075186224037888 step# 1500} 2025-09-25T16:17:52.205342Z node 4 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2025-09-25T16:17:52.205352Z node 4 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2025-09-25T16:17:52.205367Z node 4 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-09-25T16:17:52.205395Z node 4 :TX_DATASHARD DEBUG: datashard.cpp:811: Complete [1500 : 281474976715658] from 72075186224037888 at tablet 72075186224037888 send result to client [4:399:2397], exec latency: 0 ms, propose latency: 0 ms 2025-09-25T16:17:52.205412Z node 4 :TX_DATASHARD INFO: datashard.cpp:1600: 72075186224037888 Sending notify to schemeshard 72057594046644480 txId 281474976715658 state Ready TxInFly 0 2025-09-25T16:17:52.205431Z node 4 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-09-25T16:17:52.206085Z node 4 :TX_DATASHARD DEBUG: datashard.cpp:2966: Handle TEvSchemaChangedResult 281474976715658 datashard 72075186224037888 state Ready 2025-09-25T16:17:52.206103Z node 4 :TX_DATASHARD DEBUG: datashard__schema_changed.cpp:22: 72075186224037888 Got TEvSchemaChangedResult from SS at 72075186224037888 2025-09-25T16:17:52.208069Z node 4 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [4:880:2709], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:17:52.208099Z node 4 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [4:891:2714], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:17:52.208112Z node 4 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:17:52.208330Z node 4 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [4:895:2718], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:17:52.208362Z node 4 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:17:52.209450Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715659:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-09-25T16:17:52.210927Z node 4 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:129: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-09-25T16:17:52.389525Z node 4 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:129: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-09-25T16:17:52.389989Z node 4 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [4:894:2717], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715659 completed, doublechecking } 2025-09-25T16:17:52.414186Z node 4 :TX_PROXY ERROR: schemereq.cpp:590: Actor# [4:952:2756] txid# 281474976715660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 8], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-09-25T16:17:52.449546Z node 4 :KQP_EXECUTER ERROR: kqp_planner.cpp:123: TxId: 281474976715661. Ctx: { TraceId: 01k60tnd2f2khmmxj1cbgxd1nz, Database: , SessionId: ydb://session/3?node_id=4&id=YTcwYzNhZWUtYzAxMzU1YTQtNjU2ZjU1Y2YtODFlODhmNzQ=, PoolId: default, DatabaseId: /Root}. Database not set, use /Root 2025-09-25T16:17:52.450363Z node 4 :TX_DATASHARD DEBUG: datashard.cpp:3723: Server connected at leader tablet# 72075186224037888, clientId# [4:989:2778], serverId# [4:990:2779], sessionId# [0:0:0] 2025-09-25T16:17:52.450528Z node 4 :TX_DATASHARD DEBUG: execute_write_unit.cpp:260: Executing write operation for [0:3] at 72075186224037888 2025-09-25T16:17:52.450657Z node 4 :TX_DATASHARD DEBUG: datashard.cpp:884: PersistChangeRecord: record: { Order: 1 Group: 1758817072450608 Step: 2000 TxId: 18446744073709551615 PathId: [OwnerId: 72057594046644480, LocalPathId: 3] Kind: CdcDataChange Source: Unspecified Body: 40b TableId: [OwnerId: 72057594046644480, LocalPathId: 2] SchemaVersion: 2 LockId: 0 LockOffset: 0 }, at tablet: 72075186224037888 2025-09-25T16:17:52.450700Z node 4 :TX_DATASHARD DEBUG: execute_write_unit.cpp:457: Executed write operation for [0:3] at 72075186224037888, row count=1 2025-09-25T16:17:52.461189Z node 4 :TX_DATASHARD DEBUG: datashard.cpp:1180: EnqueueChangeRecords: at tablet: 72075186224037888, records: { Order: 1 PathId: [OwnerId: 72057594046644480, LocalPathId: 3] BodySize: 40 TableId: [OwnerId: 72057594046644480, LocalPathId: 2] SchemaVersion: 2 } 2025-09-25T16:17:52.461225Z node 4 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-09-25T16:17:52.501868Z node 4 :KQP_EXECUTER ERROR: kqp_planner.cpp:123: TxId: 281474976715662. Ctx: { TraceId: 01k60tndae2d3s5h734yxajy8w, Database: , SessionId: ydb://session/3?node_id=4&id=NzIxZjdmZGItYzg5YWQ5ZTEtODBmNjJiNmQtOGU4NTRiYTI=, PoolId: default, DatabaseId: /Root}. Database not set, use /Root 2025-09-25T16:17:52.502780Z node 4 :TX_DATASHARD DEBUG: execute_write_unit.cpp:260: Executing write operation for [0:4] at 72075186224037888 2025-09-25T16:17:52.502902Z node 4 :TX_DATASHARD DEBUG: datashard.cpp:884: PersistChangeRecord: record: { Order: 2 Group: 1758817072502855 Step: 2000 TxId: 18446744073709551615 PathId: [OwnerId: 72057594046644480, LocalPathId: 3] Kind: CdcDataChange Source: Unspecified Body: 18b TableId: [OwnerId: 72057594046644480, LocalPathId: 2] SchemaVersion: 2 LockId: 0 LockOffset: 0 }, at tablet: 72075186224037888 2025-09-25T16:17:52.502942Z node 4 :TX_DATASHARD DEBUG: execute_write_unit.cpp:457: Executed write operation for [0:4] at 72075186224037888, row count=1 2025-09-25T16:17:52.517314Z node 4 :TX_DATASHARD DEBUG: datashard.cpp:1180: EnqueueChangeRecords: at tablet: 72075186224037888, records: { Order: 2 PathId: [OwnerId: 72057594046644480, LocalPathId: 3] BodySize: 18 TableId: [OwnerId: 72057594046644480, LocalPathId: 2] SchemaVersion: 2 } 2025-09-25T16:17:52.517343Z node 4 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-09-25T16:17:52.518089Z node 4 :TX_DATASHARD DEBUG: datashard.cpp:3723: Server connected at leader tablet# 72075186224037888, clientId# [4:1017:2797], serverId# [4:1018:2798], sessionId# [0:0:0] 2025-09-25T16:17:52.519456Z node 4 :TX_DATASHARD DEBUG: datashard.cpp:3723: Server connected at leader tablet# 72075186224037888, clientId# [4:1019:2799], serverId# [4:1020:2800], sessionId# [0:0:0] >> DataShardWrite::ReplaceImmediate [GOOD] >> DataShardWrite::ReplaceImmediate_DefaultValue ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_external_table_reboots/unittest >> TExternalTableTestReboots::CreateDroppedExternalTableAndDropWithReboots [GOOD] Test command err: ==== RunWithTabletReboots =========== RUN: Trace =========== Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2144] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2144] Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:120:2058] recipient: [1:114:2145] IGNORE Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:120:2058] recipient: [1:114:2145] Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:121:2058] recipient: [1:116:2146] IGNORE Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:121:2058] recipient: [1:116:2146] Leader for TabletID 72057594046678944 is [1:128:2153] sender: [1:131:2058] recipient: [1:113:2144] Leader for TabletID 72057594046447617 is [1:134:2158] sender: [1:136:2058] recipient: [1:114:2145] Leader for TabletID 72057594046316545 is [1:139:2161] sender: [1:141:2058] recipient: [1:116:2146] 2025-09-25T16:17:35.557057Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7911: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-09-25T16:17:35.557083Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7939: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-09-25T16:17:35.557090Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7825: StatsBatching config: StatsBatchTimeout# 0.000000s, StatsMaxBatchSize# 0, StatsMaxExecuteTime# 0.010000s 2025-09-25T16:17:35.557096Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7841: OperationsProcessing config: using default configuration 2025-09-25T16:17:35.557102Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7847: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-09-25T16:17:35.557107Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7847: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-09-25T16:17:35.557116Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7971: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-09-25T16:17:35.557133Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-09-25T16:17:35.557255Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8042: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-09-25T16:17:35.557327Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2025-09-25T16:17:35.580055Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:8074: Got new config: QueryServiceConfig { AllExternalDataSourcesAreAvailable: true } 2025-09-25T16:17:35.580097Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded 2025-09-25T16:17:35.580206Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8042: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# Leader for TabletID 72057594046447617 is [1:134:2158] sender: [1:179:2058] recipient: [1:15:2062] 2025-09-25T16:17:35.585205Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2025-09-25T16:17:35.585316Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2025-09-25T16:17:35.585358Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-09-25T16:17:35.587797Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2025-09-25T16:17:35.587897Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2025-09-25T16:17:35.588067Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1382: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-09-25T16:17:35.588348Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-09-25T16:17:35.590193Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:158: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-09-25T16:17:35.590257Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2025-09-25T16:17:35.590547Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-09-25T16:17:35.590557Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-09-25T16:17:35.590596Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-09-25T16:17:35.590605Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-09-25T16:17:35.590611Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:205: TTxServerlessStorageBilling.Complete 2025-09-25T16:17:35.590659Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7086: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:221:2058] recipient: [1:219:2219] IGNORE Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:221:2058] recipient: [1:219:2219] Leader for TabletID 72057594037968897 is [1:225:2223] sender: [1:226:2058] recipient: [1:219:2219] 2025-09-25T16:17:35.595659Z node 1 :HIVE INFO: tablet_helpers.cpp:1126: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:128:2153] sender: [1:246:2058] recipient: [1:15:2062] 2025-09-25T16:17:35.611042Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-09-25T16:17:35.611144Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-09-25T16:17:35.611214Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:599: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-09-25T16:17:35.611222Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5528: CreateTx for txid 1:0 type: TxAlterSubDomain target path: [OwnerId: 72057594046678944, LocalPathId: 1] source path: 2025-09-25T16:17:35.611274Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-09-25T16:17:35.611288Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2025-09-25T16:17:35.612188Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-09-25T16:17:35.612252Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-09-25T16:17:35.612307Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-09-25T16:17:35.612318Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:315: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-09-25T16:17:35.612325Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:369: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-09-25T16:17:35.612330Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 1:0 2 -> 3 2025-09-25T16:17:35.612754Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-09-25T16:17:35.612766Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-09-25T16:17:35.612773Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 1:0 3 -> 128 2025-09-25T16:17:35.613217Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-09-25T16:17:35.613230Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-09-25T16:17:35.613237Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-09-25T16:17:35.613244Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1723: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-09-25T16:17:35.613922Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1792: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-09-25T16:17:35.617052Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:663: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-09-25T16:17:35.617109Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1824: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 Leader for TabletID 72057594046316545 is [1:139:2161] sender: [1:261:2058] recipient: [1:15:2062] FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-09-25T16:17:35.617297Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:685: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-09-25T16:17:35.617326Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:689: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 139 RawX2: 4294969457 } } Step: 5000001 MediatorID: 0 Tab ... e 50 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1006, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-09-25T16:17:51.655618Z node 50 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1006, path id: [OwnerId: 72057594046678944, LocalPathId: 5] 2025-09-25T16:17:51.655639Z node 50 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1006, path id: [OwnerId: 72057594046678944, LocalPathId: 3] 2025-09-25T16:17:51.655663Z node 50 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-09-25T16:17:51.655669Z node 50 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [50:212:2213], at schemeshard: 72057594046678944, txId: 1006, path id: 1 2025-09-25T16:17:51.655675Z node 50 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [50:212:2213], at schemeshard: 72057594046678944, txId: 1006, path id: 5 2025-09-25T16:17:51.655679Z node 50 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [50:212:2213], at schemeshard: 72057594046678944, txId: 1006, path id: 3 2025-09-25T16:17:51.655747Z node 50 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1006:0, at schemeshard: 72057594046678944 2025-09-25T16:17:51.655756Z node 50 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:497: [72057594046678944] TDone opId# 1006:0 ProgressState 2025-09-25T16:17:51.655772Z node 50 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:926: Part operation is done id#1006:0 progress is 1/1 2025-09-25T16:17:51.655777Z node 50 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 1006 ready parts: 1/1 2025-09-25T16:17:51.655782Z node 50 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:926: Part operation is done id#1006:0 progress is 1/1 2025-09-25T16:17:51.655787Z node 50 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 1006 ready parts: 1/1 2025-09-25T16:17:51.655793Z node 50 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 1006, ready parts: 1/1, is published: false 2025-09-25T16:17:51.655799Z node 50 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 1006 ready parts: 1/1 2025-09-25T16:17:51.655805Z node 50 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:993: Operation and all the parts is done, operation id: 1006:0 2025-09-25T16:17:51.655810Z node 50 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5552: RemoveTx for txid 1006:0 2025-09-25T16:17:51.655824Z node 50 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:610: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 5] was 2 2025-09-25T16:17:51.655829Z node 50 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:610: DecrementPathDbRefCount reason remove txstate source path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 3 2025-09-25T16:17:51.655836Z node 50 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:1002: Publication still in progress, tx: 1006, publications: 3, subscribers: 0 2025-09-25T16:17:51.655840Z node 50 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1009: Publication details: tx: 1006, [OwnerId: 72057594046678944, LocalPathId: 1], 15 2025-09-25T16:17:51.655847Z node 50 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1009: Publication details: tx: 1006, [OwnerId: 72057594046678944, LocalPathId: 3], 2 2025-09-25T16:17:51.655852Z node 50 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1009: Publication details: tx: 1006, [OwnerId: 72057594046678944, LocalPathId: 5], 18446744073709551615 2025-09-25T16:17:51.655926Z node 50 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6249: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 5 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 1006 2025-09-25T16:17:51.655938Z node 50 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 5 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 1006 2025-09-25T16:17:51.655943Z node 50 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 3, at schemeshard: 72057594046678944, txId: 1006 2025-09-25T16:17:51.655948Z node 50 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 1006, pathId: [OwnerId: 72057594046678944, LocalPathId: 5], version: 18446744073709551615 2025-09-25T16:17:51.655953Z node 50 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:610: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 5] was 1 2025-09-25T16:17:51.656035Z node 50 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:123: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2025-09-25T16:17:51.656042Z node 50 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:137: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 5], at schemeshard: 72057594046678944 2025-09-25T16:17:51.656053Z node 50 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:610: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 4 2025-09-25T16:17:51.656125Z node 50 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6249: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 15 PathOwnerId: 72057594046678944, cookie: 1006 2025-09-25T16:17:51.656135Z node 50 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 15 PathOwnerId: 72057594046678944, cookie: 1006 2025-09-25T16:17:51.656140Z node 50 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 1006 2025-09-25T16:17:51.656144Z node 50 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 1006, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 15 2025-09-25T16:17:51.656148Z node 50 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:610: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 3 2025-09-25T16:17:51.656209Z node 50 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6249: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 2 PathOwnerId: 72057594046678944, cookie: 1006 2025-09-25T16:17:51.656220Z node 50 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 2 PathOwnerId: 72057594046678944, cookie: 1006 2025-09-25T16:17:51.656226Z node 50 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1006 2025-09-25T16:17:51.656231Z node 50 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 1006, pathId: [OwnerId: 72057594046678944, LocalPathId: 3], version: 2 2025-09-25T16:17:51.656235Z node 50 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:610: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 2 2025-09-25T16:17:51.656245Z node 50 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__publish_to_scheme_board.cpp:155: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 1006, subscribers: 0 2025-09-25T16:17:51.656683Z node 50 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1006 2025-09-25T16:17:51.657100Z node 50 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__clean_pathes.cpp:160: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 2025-09-25T16:17:51.657119Z node 50 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1006 2025-09-25T16:17:51.657133Z node 50 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1006 TestModificationResult got TxId: 1006, wait until txId: 1006 TestWaitNotification wait txId: 1006 2025-09-25T16:17:51.657205Z node 50 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:213: tests -- TTxNotificationSubscriber for txId 1006: send EvNotifyTxCompletion 2025-09-25T16:17:51.657214Z node 50 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:259: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 1006 2025-09-25T16:17:51.657292Z node 50 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 1006, at schemeshard: 72057594046678944 2025-09-25T16:17:51.657309Z node 50 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 1006: got EvNotifyTxCompletionResult 2025-09-25T16:17:51.657314Z node 50 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 1006: satisfy waiter [50:449:2439] TestWaitNotification: OK eventTxId 1006 2025-09-25T16:17:51.657395Z node 50 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/ExternalTable" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-09-25T16:17:51.657426Z node 50 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/ExternalTable" took 42us result status StatusPathDoesNotExist 2025-09-25T16:17:51.657469Z node 50 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/ExternalTable\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot\' (id: [OwnerId: 72057594046678944, LocalPathId: 1]), source_location: ydb/core/tx/schemeshard/schemeshard_path_describer.cpp:1181" Path: "/MyRoot/ExternalTable" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot" LastExistedPrefixPathId: 1 LastExistedPrefixDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty ChildrenExist: true } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 >> KqpScan::TopSort >> test.py::test[join-join_without_column--Results] [GOOD] >> KqpScan::NullInKey >> DataShardWrite::WriteImmediateSeveralOperations [GOOD] >> DataShardWrite::UpsertPreparedNoTxCache+Volatile >> test.py::test[join-join_without_correlation_and_struct_access-off-ForceBlocks] >> DataShardWrite::UpsertPrepared-Volatile [GOOD] >> DataShardWrite::UpsertPreparedManyTables+Volatile >> CdcStreamChangeCollector::OldImage [GOOD] >> CdcStreamChangeCollector::SchemaChanges >> KqpPg::InsertNoTargetColumns_NotOneSize-useSink [GOOD] >> KqpPg::InsertNoTargetColumns_Alter+useSink |81.1%| [TM] {default-linux-x86_64, pic, relwithdebinfo} ydb/library/yql/tests/sql/dq_file/part2/pytest >> test.py::test[join-nested_semi_join-off-ForceBlocks] [GOOD] >> KqpScan::StreamLookupByPkPrefix >> AsyncIndexChangeCollector::MultiIndexedTableReplaceSingleRow [GOOD] >> KqpPg::MkqlTerminate [GOOD] >> KqpPg::NoSelectFullScan >> DataShardWrite::CancelImmediate [GOOD] >> DataShardWrite::DeletePrepared+Volatile >> KqpSplit::IntersectionLosesRange+Descending [GOOD] >> KqpSplit::IntersectionLosesRange+Unspecified >> TCmsTest::WalleTasks >> KqpScan::Grep [GOOD] >> KqpScan::GrepByString >> test.py::test[produce-reduce_all_expr-default.txt-Results] [GOOD] >> test.py::test[produce-reduce_lambda_list_mem-default.txt-Results] >> KqpScan::SecondaryIndex >> KqpPg::InsertNoTargetColumns_Alter+useSink [GOOD] >> KqpPg::InsertNoTargetColumns_Alter-useSink >> KqpScan::TopSort [GOOD] >> KqpScan::TooManyComputeActors >> DataShardWrite::ExecSQLUpsertPrepared-EvWrite-Volatile [GOOD] >> DataShardWrite::ExecSQLUpsertPrepared+EvWrite-Volatile >> DataShardWrite::ReplaceImmediate_DefaultValue [GOOD] >> DataShardWrite::InsertImmediate >> KqpPointConsolidation::TasksCount >> KqpScan::NullInKey [GOOD] >> KqpScan::NullInKeySuffix >> test.py::test[blocks-combine_all_avg--Results] [GOOD] >> test.py::test[blocks-combine_hashed_count_filter--ForceBlocks] >> TCmsTest::EnableCMSRequestPrioritiesFeatureFlag [GOOD] >> KqpScan::Join3 >> TRegisterNodeOverDiscoveryService::ServerWithCertVerification_ClientDoesNotProvideClientCerts [GOOD] >> TRegisterNodeOverDiscoveryService::ServerWithCertVerification_AuthNotRequired ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/datashard/ut_change_collector/unittest >> AsyncIndexChangeCollector::MultiIndexedTableReplaceSingleRow [GOOD] Test command err: 2025-09-25T16:17:46.696680Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:123: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2025-09-25T16:17:46.726089Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2025-09-25T16:17:46.728890Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:311:2354], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-09-25T16:17:46.728983Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-09-25T16:17:46.729010Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/endf/0036fb/r3tmp/tmpaohZPW/pdisk_1.dat 2025-09-25T16:17:46.809801Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-09-25T16:17:46.809837Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-09-25T16:17:46.821365Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded 2025-09-25T16:17:46.822115Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1229: Notification cookie mismatch for subscription [1:34:2081] 1758817066194702 != 1758817066194706 2025-09-25T16:17:46.853835Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-09-25T16:17:46.906196Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2025-09-25T16:17:46.952040Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-09-25T16:17:47.027199Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:17:47.044197Z node 1 :TX_DATASHARD INFO: datashard.cpp:375: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:684:2572] 2025-09-25T16:17:47.044266Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:661: TxInitSchema.Execute 2025-09-25T16:17:47.055211Z node 1 :TX_DATASHARD INFO: datashard.cpp:375: TDataShard::OnActivateExecutor: tablet 72075186224037889 actor [1:686:2574] 2025-09-25T16:17:47.055269Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:661: TxInitSchema.Execute 2025-09-25T16:17:47.056801Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:727: TxInitSchema.Complete 2025-09-25T16:17:47.056860Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:50: TDataShard::TTxInit::Execute 2025-09-25T16:17:47.057088Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1325: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2025-09-25T16:17:47.057099Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1381: LoadLockChangeRecords at tablet: 72075186224037888 2025-09-25T16:17:47.057108Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1430: LoadChangeRecordCommits at tablet: 72075186224037888 2025-09-25T16:17:47.057171Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:94: TDataShard::TTxInit::Complete 2025-09-25T16:17:47.057214Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:102: TDataShard::TTxInitRestored::Execute 2025-09-25T16:17:47.057228Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:142: DataShard 72075186224037888 persisting started state actor id [1:715:2572] in generation 1 2025-09-25T16:17:47.057305Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:727: TxInitSchema.Complete 2025-09-25T16:17:47.057320Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:50: TDataShard::TTxInit::Execute 2025-09-25T16:17:47.057439Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1325: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037889 2025-09-25T16:17:47.057447Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1381: LoadLockChangeRecords at tablet: 72075186224037889 2025-09-25T16:17:47.057454Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1430: LoadChangeRecordCommits at tablet: 72075186224037889 2025-09-25T16:17:47.057483Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:94: TDataShard::TTxInit::Complete 2025-09-25T16:17:47.057495Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:102: TDataShard::TTxInitRestored::Execute 2025-09-25T16:17:47.057503Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:142: DataShard 72075186224037889 persisting started state actor id [1:716:2574] in generation 1 2025-09-25T16:17:47.067877Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:151: TDataShard::TTxInitRestored::Complete 2025-09-25T16:17:47.074645Z node 1 :TX_DATASHARD INFO: datashard.cpp:419: Switched to work state WaitScheme tabletId 72075186224037888 2025-09-25T16:17:47.074731Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:459: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2025-09-25T16:17:47.074762Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1260: Change sender created: at tablet: 72075186224037888, actorId: [1:719:2593] 2025-09-25T16:17:47.074768Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1265: Trying to activate change sender: at tablet: 72075186224037888 2025-09-25T16:17:47.074773Z node 1 :TX_DATASHARD INFO: datashard.cpp:1282: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2025-09-25T16:17:47.074780Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-09-25T16:17:47.074883Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:151: TDataShard::TTxInitRestored::Complete 2025-09-25T16:17:47.074892Z node 1 :TX_DATASHARD INFO: datashard.cpp:419: Switched to work state WaitScheme tabletId 72075186224037889 2025-09-25T16:17:47.074902Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:459: 72075186224037889 not sending time cast registration request in state WaitScheme: missing processing params 2025-09-25T16:17:47.074911Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1260: Change sender created: at tablet: 72075186224037889, actorId: [1:720:2594] 2025-09-25T16:17:47.074918Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1265: Trying to activate change sender: at tablet: 72075186224037889 2025-09-25T16:17:47.074923Z node 1 :TX_DATASHARD INFO: datashard.cpp:1282: Cannot activate change sender: at tablet: 72075186224037889, state: WaitScheme 2025-09-25T16:17:47.074927Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037889 2025-09-25T16:17:47.075056Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:96: TTxCheckInReadSets::Execute at 72075186224037888 2025-09-25T16:17:47.075080Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:139: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2025-09-25T16:17:47.075097Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037888 2025-09-25T16:17:47.075104Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2025-09-25T16:17:47.075113Z node 1 :TX_DATASHARD INFO: datashard__progress_tx.cpp:48: No tx to execute at 72075186224037888 TxInFly 0 2025-09-25T16:17:47.075118Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2025-09-25T16:17:47.075124Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:96: TTxCheckInReadSets::Execute at 72075186224037889 2025-09-25T16:17:47.075134Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:139: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037889 2025-09-25T16:17:47.075231Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3723: Server connected at leader tablet# 72075186224037888, clientId# [1:676:2568], serverId# [1:688:2575], sessionId# [0:0:0] 2025-09-25T16:17:47.075239Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037889 2025-09-25T16:17:47.075243Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037889 active 0 active planned 0 immediate 0 planned 0 2025-09-25T16:17:47.075248Z node 1 :TX_DATASHARD INFO: datashard__progress_tx.cpp:48: No tx to execute at 72075186224037889 TxInFly 0 2025-09-25T16:17:47.075253Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037889 2025-09-25T16:17:47.075278Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 72075186224037888 2025-09-25T16:17:47.075334Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:133: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2025-09-25T16:17:47.075351Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:221: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037888 2025-09-25T16:17:47.075428Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3723: Server connected at leader tablet# 72075186224037889, clientId# [1:677:2569], serverId# [1:689:2576], sessionId# [0:0:0] 2025-09-25T16:17:47.075465Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 72075186224037889 2025-09-25T16:17:47.075491Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:133: Propose scheme transaction at tablet 72075186224037889 txId 281474976715657 ssId 72057594046644480 seqNo 2:2 2025-09-25T16:17:47.075501Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:221: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037889 2025-09-25T16:17:47.075889Z node 1 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:129: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-09-25T16:17:47.075904Z node 1 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:129: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037889 2025-09-25T16:17:47.089456Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:152: TTxProposeTransactionBase::Complete at 72075186224037888 2025-09-25T16:17:47.089516Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:469: 72075186224037888 not sending time cast registration request in state WaitScheme 2025-09-25T16:17:47.089725Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:152: TTxProposeTransactionBase::Complete at 72075186224037889 2025-09-25T16:17:47.089739Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:469: 72075186224037889 not sending time cast registration request in state WaitScheme 2025-09-25T16:17:47.236000Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3723: Server connected at leader tablet# 72075186224037888, cl ... :17:52.684480Z node 4 :TX_DATASHARD DEBUG: datashard.cpp:811: Complete [1000 : 281474976715657] from 72075186224037890 at tablet 72075186224037890 send result to client [4:399:2397], exec latency: 0 ms, propose latency: 0 ms 2025-09-25T16:17:52.684496Z node 4 :TX_DATASHARD INFO: datashard.cpp:1600: 72075186224037890 Sending notify to schemeshard 72057594046644480 txId 281474976715657 state Ready TxInFly 0 2025-09-25T16:17:52.684511Z node 4 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037890 2025-09-25T16:17:52.685669Z node 4 :TX_DATASHARD DEBUG: datashard.cpp:3773: Got TEvMediatorTimecast::TEvSubscribeReadStepResult at 72075186224037889 coordinator 72057594046316545 last step 0 next step 1000 2025-09-25T16:17:52.685803Z node 4 :TX_DATASHARD DEBUG: datashard.cpp:3773: Got TEvMediatorTimecast::TEvSubscribeReadStepResult at 72075186224037888 coordinator 72057594046316545 last step 0 next step 1000 2025-09-25T16:17:52.685820Z node 4 :TX_DATASHARD DEBUG: datashard.cpp:2966: Handle TEvSchemaChangedResult 281474976715657 datashard 72075186224037889 state Ready 2025-09-25T16:17:52.685830Z node 4 :TX_DATASHARD DEBUG: datashard__schema_changed.cpp:22: 72075186224037889 Got TEvSchemaChangedResult from SS at 72075186224037889 2025-09-25T16:17:52.685962Z node 4 :TX_DATASHARD DEBUG: datashard.cpp:2966: Handle TEvSchemaChangedResult 281474976715657 datashard 72075186224037888 state Ready 2025-09-25T16:17:52.685971Z node 4 :TX_DATASHARD DEBUG: datashard__schema_changed.cpp:22: 72075186224037888 Got TEvSchemaChangedResult from SS at 72075186224037888 2025-09-25T16:17:52.685997Z node 4 :TX_DATASHARD DEBUG: datashard.cpp:3773: Got TEvMediatorTimecast::TEvSubscribeReadStepResult at 72075186224037890 coordinator 72057594046316545 last step 0 next step 1000 2025-09-25T16:17:52.686088Z node 4 :TX_DATASHARD DEBUG: datashard.cpp:2966: Handle TEvSchemaChangedResult 281474976715657 datashard 72075186224037890 state Ready 2025-09-25T16:17:52.686097Z node 4 :TX_DATASHARD DEBUG: datashard__schema_changed.cpp:22: 72075186224037890 Got TEvSchemaChangedResult from SS at 72075186224037890 2025-09-25T16:17:52.688356Z node 4 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [4:835:2683], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:17:52.688380Z node 4 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [4:845:2688], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:17:52.688395Z node 4 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:17:52.688558Z node 4 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [4:850:2692], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:17:52.688576Z node 4 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:17:52.691110Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-09-25T16:17:52.692196Z node 4 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:129: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-09-25T16:17:52.692221Z node 4 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:129: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037889 2025-09-25T16:17:52.692236Z node 4 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:129: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037890 2025-09-25T16:17:52.737087Z node 4 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-09-25T16:17:52.844766Z node 4 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:129: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-09-25T16:17:52.844815Z node 4 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:129: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037889 2025-09-25T16:17:52.844853Z node 4 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:129: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037890 2025-09-25T16:17:52.845601Z node 4 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [4:849:2691], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2025-09-25T16:17:52.878720Z node 4 :TX_PROXY ERROR: schemereq.cpp:590: Actor# [4:923:2734] txid# 281474976715659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 10], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-09-25T16:17:52.933561Z node 4 :KQP_EXECUTER ERROR: kqp_planner.cpp:123: TxId: 281474976715660. Ctx: { TraceId: 01k60tndhg13cb7a9s77h6v9p9, Database: , SessionId: ydb://session/3?node_id=4&id=NjM3ZTE1YWYtZTgwODhlZGItYWZkNjlkMTQtNDMwMzQzODk=, PoolId: default, DatabaseId: /Root}. Database not set, use /Root 2025-09-25T16:17:52.934637Z node 4 :TX_DATASHARD DEBUG: datashard.cpp:3723: Server connected at leader tablet# 72075186224037889, clientId# [4:1034:2782], serverId# [4:1035:2783], sessionId# [0:0:0] 2025-09-25T16:17:52.934813Z node 4 :TX_DATASHARD DEBUG: execute_write_unit.cpp:260: Executing write operation for [0:2] at 72075186224037889 2025-09-25T16:17:52.934924Z node 4 :TX_DATASHARD DEBUG: datashard.cpp:884: PersistChangeRecord: record: { Order: 1 Group: 1758817072934876 Step: 1500 TxId: 18446744073709551615 PathId: [OwnerId: 72057594046644480, LocalPathId: 3] Kind: AsyncIndex Source: Unspecified Body: 28b TableId: [OwnerId: 72057594046644480, LocalPathId: 2] SchemaVersion: 1 LockId: 0 LockOffset: 0 }, at tablet: 72075186224037889 2025-09-25T16:17:52.934960Z node 4 :TX_DATASHARD DEBUG: datashard.cpp:884: PersistChangeRecord: record: { Order: 2 Group: 1758817072934876 Step: 1500 TxId: 18446744073709551615 PathId: [OwnerId: 72057594046644480, LocalPathId: 5] Kind: AsyncIndex Source: Unspecified Body: 28b TableId: [OwnerId: 72057594046644480, LocalPathId: 2] SchemaVersion: 1 LockId: 0 LockOffset: 0 }, at tablet: 72075186224037889 2025-09-25T16:17:52.934982Z node 4 :TX_DATASHARD DEBUG: execute_write_unit.cpp:457: Executed write operation for [0:2] at 72075186224037889, row count=1 2025-09-25T16:17:52.947202Z node 4 :TX_DATASHARD DEBUG: datashard.cpp:1180: EnqueueChangeRecords: at tablet: 72075186224037889, records: { Order: 1 PathId: [OwnerId: 72057594046644480, LocalPathId: 3] BodySize: 28 TableId: [OwnerId: 72057594046644480, LocalPathId: 2] SchemaVersion: 1 }, { Order: 2 PathId: [OwnerId: 72057594046644480, LocalPathId: 5] BodySize: 28 TableId: [OwnerId: 72057594046644480, LocalPathId: 2] SchemaVersion: 1 } 2025-09-25T16:17:52.947241Z node 4 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037889 2025-09-25T16:17:52.967250Z node 4 :KQP_EXECUTER ERROR: kqp_planner.cpp:123: TxId: 281474976715661. Ctx: { TraceId: 01k60tndsm82sbs377fxwsbfvs, Database: , SessionId: ydb://session/3?node_id=4&id=NThmODNjMGMtYWNlZWU5YWYtMjM0Njc2YjAtYjRhNDVkOTA=, PoolId: default, DatabaseId: /Root}. Database not set, use /Root 2025-09-25T16:17:52.968031Z node 4 :TX_DATASHARD DEBUG: execute_write_unit.cpp:260: Executing write operation for [0:3] at 72075186224037889 2025-09-25T16:17:52.968150Z node 4 :TX_DATASHARD DEBUG: datashard.cpp:884: PersistChangeRecord: record: { Order: 3 Group: 1758817072968101 Step: 1500 TxId: 18446744073709551615 PathId: [OwnerId: 72057594046644480, LocalPathId: 3] Kind: AsyncIndex Source: Unspecified Body: 28b TableId: [OwnerId: 72057594046644480, LocalPathId: 2] SchemaVersion: 1 LockId: 0 LockOffset: 0 }, at tablet: 72075186224037889 2025-09-25T16:17:52.968187Z node 4 :TX_DATASHARD DEBUG: datashard.cpp:884: PersistChangeRecord: record: { Order: 4 Group: 1758817072968101 Step: 1500 TxId: 18446744073709551615 PathId: [OwnerId: 72057594046644480, LocalPathId: 3] Kind: AsyncIndex Source: Unspecified Body: 28b TableId: [OwnerId: 72057594046644480, LocalPathId: 2] SchemaVersion: 1 LockId: 0 LockOffset: 0 }, at tablet: 72075186224037889 2025-09-25T16:17:52.968202Z node 4 :TX_DATASHARD DEBUG: datashard.cpp:884: PersistChangeRecord: record: { Order: 5 Group: 1758817072968101 Step: 1500 TxId: 18446744073709551615 PathId: [OwnerId: 72057594046644480, LocalPathId: 5] Kind: AsyncIndex Source: Unspecified Body: 28b TableId: [OwnerId: 72057594046644480, LocalPathId: 2] SchemaVersion: 1 LockId: 0 LockOffset: 0 }, at tablet: 72075186224037889 2025-09-25T16:17:52.968216Z node 4 :TX_DATASHARD DEBUG: datashard.cpp:884: PersistChangeRecord: record: { Order: 6 Group: 1758817072968101 Step: 1500 TxId: 18446744073709551615 PathId: [OwnerId: 72057594046644480, LocalPathId: 5] Kind: AsyncIndex Source: Unspecified Body: 24b TableId: [OwnerId: 72057594046644480, LocalPathId: 2] SchemaVersion: 1 LockId: 0 LockOffset: 0 }, at tablet: 72075186224037889 2025-09-25T16:17:52.968237Z node 4 :TX_DATASHARD DEBUG: execute_write_unit.cpp:457: Executed write operation for [0:3] at 72075186224037889, row count=1 2025-09-25T16:17:52.981501Z node 4 :TX_DATASHARD DEBUG: datashard.cpp:1180: EnqueueChangeRecords: at tablet: 72075186224037889, records: { Order: 3 PathId: [OwnerId: 72057594046644480, LocalPathId: 3] BodySize: 28 TableId: [OwnerId: 72057594046644480, LocalPathId: 2] SchemaVersion: 1 }, { Order: 4 PathId: [OwnerId: 72057594046644480, LocalPathId: 3] BodySize: 28 TableId: [OwnerId: 72057594046644480, LocalPathId: 2] SchemaVersion: 1 }, { Order: 5 PathId: [OwnerId: 72057594046644480, LocalPathId: 5] BodySize: 28 TableId: [OwnerId: 72057594046644480, LocalPathId: 2] SchemaVersion: 1 }, { Order: 6 PathId: [OwnerId: 72057594046644480, LocalPathId: 5] BodySize: 24 TableId: [OwnerId: 72057594046644480, LocalPathId: 2] SchemaVersion: 1 } 2025-09-25T16:17:52.981539Z node 4 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037889 2025-09-25T16:17:52.982852Z node 4 :TX_DATASHARD DEBUG: datashard.cpp:3723: Server connected at leader tablet# 72075186224037889, clientId# [4:1083:2822], serverId# [4:1084:2823], sessionId# [0:0:0] 2025-09-25T16:17:52.984190Z node 4 :TX_DATASHARD DEBUG: datashard.cpp:3723: Server connected at leader tablet# 72075186224037889, clientId# [4:1085:2824], serverId# [4:1086:2825], sessionId# [0:0:0] >> KqpSplit::ChoosePartition+Ascending >> KqpScan::AggregateCountStar >> DataShardWrite::UpsertPreparedNoTxCache+Volatile [GOOD] >> DataShardWrite::UpsertPreparedNoTxCache-Volatile >> DataShardWrite::UpsertPreparedManyTables+Volatile [GOOD] >> KqpScan::IsNull [GOOD] >> KqpPg::NoSelectFullScan [GOOD] >> DataShardWrite::UpsertPreparedManyTables-Volatile >> KqpPg::LongDomainName >> KqpScan::GrepRange >> KqpScan::LeftSemiJoinSimple >> CdcStreamChangeCollector::SchemaChanges [GOOD] >> KqpScan::StreamLookupByPkPrefix [GOOD] >> KqpScan::StreamLookupByFullPk >> KqpPg::InsertNoTargetColumns_Alter-useSink [GOOD] >> DataShardWrite::DeletePrepared+Volatile [GOOD] >> KqpPg::InsertNoTargetColumns_Serial+useSink >> DataShardWrite::DeletePrepared-Volatile >> KqpSplit::IntersectionLosesRange+Unspecified [GOOD] >> KqpScan::GrepByString [GOOD] >> KqpScan::GrepLimit ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/cms/ut/unittest >> TCmsTest::EnableCMSRequestPrioritiesFeatureFlag [GOOD] Test command err: 2025-09-25T16:17:34.074442Z node 1 :CMS DEBUG: console__init_scheme.cpp:14: TConsole::TTxInitScheme Execute 2025-09-25T16:17:34.076790Z node 1 :CMS DEBUG: cms_impl.h:186: StateInit event type: 10060000 event: NKikimr::TEvTablet::TEvBoot 2025-09-25T16:17:34.081876Z node 1 :CMS DEBUG: cms_impl.h:186: StateInit event type: 10060001 event: NKikimr::TEvTablet::TEvRestored 2025-09-25T16:17:34.081987Z node 1 :CMS DEBUG: cms_tx_init_scheme.cpp:16: TTxInitScheme Execute 2025-09-25T16:17:34.082938Z node 1 :CMS DEBUG: cms_impl.h:186: StateInit event type: 1006000c event: NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-09-25T16:17:34.083248Z node 1 :CMS DEBUG: console__init_scheme.cpp:23: TConsole::TTxInitScheme Complete 2025-09-25T16:17:34.083277Z node 1 :CMS DEBUG: cms_impl.h:186: StateInit event type: 10031c0c event: NKikimr::TEvNodeWardenStorageConfig 2025-09-25T16:17:34.083667Z node 1 :CMS DEBUG: console__load_state.cpp:28: TConsole::TTxLoadState Execute 2025-09-25T16:17:34.083731Z node 1 :CMS DEBUG: console__load_state.cpp:50: Using default config. 2025-09-25T16:17:34.083914Z node 1 :CMS DEBUG: console__load_state.cpp:66: TConsole::TTxLoadState Complete 2025-09-25T16:17:34.083937Z node 1 :CMS DEBUG: cms_impl.h:186: StateInit event type: 104d0001 event: NKikimr::NConsole::TEvConfigsDispatcher::TEvSetConfigSubscriptionResponse 2025-09-25T16:17:34.085820Z node 1 :CMS DEBUG: cms_tx_init_scheme.cpp:24: TTxInitScheme Complete 2025-09-25T16:17:34.085880Z node 1 :CMS DEBUG: cms_tx_load_state.cpp:33: TTxLoadState Execute 2025-09-25T16:17:34.085912Z node 1 :CMS DEBUG: cms_tx_load_state.cpp:76: Using default config 2025-09-25T16:17:34.085945Z node 1 :CMS DEBUG: cms.cpp:1176: Running CleanupWalleTasks 2025-09-25T16:17:34.109349Z node 1 :CMS DEBUG: cms_impl.h:186: StateInit event type: 104a0012 event: NKikimr::NConsole::TEvConsole::TEvConfigNotificationRequest { Config { FeatureFlags { EnableCMSRequestPriorities: true EnableSingleCompositeActionGroup: true } } ItemKinds: 25 ItemKinds: 26 Local: true } 2025-09-25T16:17:34.143159Z node 1 :CMS DEBUG: cms_tx_load_state.cpp:256: TTxLoadState Complete 2025-09-25T16:17:34.143248Z node 1 :CMS DEBUG: cms_tx_update_config.cpp:23: TTxUpdateConfig Execute 2025-09-25T16:17:34.145337Z node 1 :CMS DEBUG: cms_tx_update_config.cpp:37: TTxUpdateConfig Complete 2025-09-25T16:17:34.145443Z node 1 :CMS DEBUG: sentinel.cpp:1020: [Sentinel] [Main] UpdateConfig 2025-09-25T16:17:34.145448Z node 1 :CMS DEBUG: sentinel.cpp:965: [Sentinel] [Main] Start ConfigUpdater 2025-09-25T16:17:34.145455Z node 1 :CMS DEBUG: sentinel.cpp:1036: [Sentinel] [Main] UpdateState 2025-09-25T16:17:34.145458Z node 1 :CMS INFO: sentinel.cpp:960: [Sentinel] [Main] StateUpdater was delayed 2025-09-25T16:17:34.145463Z node 1 :CMS DEBUG: sentinel.cpp:524: [Sentinel] [ConfigUpdater] Request blobstorage config: attempt# 0 2025-09-25T16:17:34.145481Z node 1 :CMS DEBUG: sentinel.cpp:537: [Sentinel] [ConfigUpdater] Request CMS cluster state: attempt# 0 2025-09-25T16:17:34.148076Z node 1 :CMS DEBUG: sentinel.cpp:599: [Sentinel] [ConfigUpdater] Handle TEvBlobStorage::TEvControllerConfigResponse: response# Status { Success: true BaseConfig { PDisk { NodeId: 1 PDiskId: 1 Path: "/1/pdisk-1.data" Guid: 1 DriveStatus: ACTIVE } VSlot { VSlotId { NodeId: 1 PDiskId: 1 VSlotId: 1000 } GroupGeneration: 1 } VSlot { VSlotId { NodeId: 1 PDiskId: 1 VSlotId: 1001 } GroupId: 1 GroupGeneration: 1 } VSlot { VSlotId { NodeId: 1 PDiskId: 1 VSlotId: 1002 } GroupId: 2 GroupGeneration: 1 } VSlot { VSlotId { NodeId: 1 PDiskId: 1 VSlotId: 1003 } GroupId: 3 GroupGeneration: 1 } Group { GroupGeneration: 1 ErasureSpecies: "none" VSlotId { NodeId: 1 PDiskId: 1 VSlotId: 1000 } } Group { GroupId: 1 GroupGeneration: 1 ErasureSpecies: "none" VSlotId { NodeId: 1 PDiskId: 1 VSlotId: 1001 } } Group { GroupId: 2 GroupGeneration: 1 ErasureSpecies: "none" VSlotId { NodeId: 1 PDiskId: 1 VSlotId: 1002 } } Group { GroupId: 3 GroupGeneration: 1 ErasureSpecies: "none" VSlotId { NodeId: 1 PDiskId: 1 VSlotId: 1003 } } } } Success: true 2025-09-25T16:17:34.231596Z node 1 :CMS DEBUG: cms_tx_update_config.cpp:23: TTxUpdateConfig Execute 2025-09-25T16:17:34.245886Z node 1 :CMS DEBUG: cms_tx_update_config.cpp:37: TTxUpdateConfig Complete 2025-09-25T16:17:34.245946Z node 1 :CMS DEBUG: cms_tx_update_config.cpp:44: Updated config: TenantLimits { DisabledNodesRatioLimit: 0 } ClusterLimits { DisabledNodesRatioLimit: 0 } SentinelConfig { Enable: false } 2025-09-25T16:17:34.246055Z node 1 :CMS DEBUG: cms_tx_update_config.cpp:23: TTxUpdateConfig Execute 2025-09-25T16:17:34.275438Z node 1 :CMS DEBUG: cms_tx_update_downtimes.cpp:17: TTxUpdateDowntimes Execute 2025-09-25T16:17:34.275539Z node 1 :CMS DEBUG: cluster_info.cpp:991: Timestamp: 1970-01-01T00:02:00Z 2025-09-25T16:17:34.275645Z node 1 :CMS NOTICE: audit_log.cpp:12: [AuditLog] [CMS tablet] Reply: request# NKikimr::NCms::TEvCms::TEvClusterStateRequest { }, response# NKikimr::NCms::TEvCms::TEvClusterStateResponse { Status { Code: OK } State { Hosts { Name: "::1" State: UP Services { Name: "storage" State: UP Version: "-1" Timestamp: 120110512 } Devices { Name: "vdisk-0-1-0-0-0" State: UP Timestamp: 120110512 } Devices { Name: "vdisk-1-1-0-0-0" State: UP Timestamp: 120110512 } Devices { Name: "vdisk-2-1-0-0-0" State: UP Timestamp: 120110512 } Devices { Name: "vdisk-3-1-0-0-0" State: UP Timestamp: 120110512 } Devices { Name: "pdisk-1-1" State: UP Timestamp: 120110512 } Timestamp: 120110512 NodeId: 1 InterconnectPort: 12001 Location { DataCenter: "1" Module: "1" Rack: "1" Unit: "1" } StartTimeSeconds: 0 } Timestamp: 120110512 } } 2025-09-25T16:17:34.319066Z node 1 :CMS DEBUG: cms.cpp:1176: Running CleanupWalleTasks 2025-09-25T16:17:34.371807Z node 1 :CMS DEBUG: cms_tx_update_config.cpp:37: TTxUpdateConfig Complete 2025-09-25T16:17:34.371878Z node 1 :CMS DEBUG: cms_tx_update_config.cpp:44: Updated config: DefaultRetryTime: 600000000 DefaultPermissionDuration: 660000000 TenantLimits { DisabledNodesLimit: 1 DisabledNodesRatioLimit: 10 } ClusterLimits { DisabledNodesLimit: 2 DisabledNodesRatioLimit: 20 } InfoCollectionTimeout: 60000000 2025-09-25T16:17:34.371903Z node 1 :CMS DEBUG: cms_tx_update_downtimes.cpp:26: TTxUpdateDowntimes Complete 2025-09-25T16:17:34.371955Z node 1 :CMS DEBUG: sentinel.cpp:1020: [Sentinel] [Main] UpdateConfig 2025-09-25T16:17:34.371961Z node 1 :CMS DEBUG: sentinel.cpp:965: [Sentinel] [Main] Start ConfigUpdater 2025-09-25T16:17:34.371976Z node 1 :CMS DEBUG: sentinel.cpp:1036: [Sentinel] [Main] UpdateState 2025-09-25T16:17:34.371980Z node 1 :CMS INFO: sentinel.cpp:960: [Sentinel] [Main] StateUpdater was delayed 2025-09-25T16:17:34.371991Z node 1 :CMS DEBUG: sentinel.cpp:524: [Sentinel] [ConfigUpdater] Request blobstorage config: attempt# 0 2025-09-25T16:17:34.372009Z node 1 :CMS DEBUG: sentinel.cpp:537: [Sentinel] [ConfigUpdater] Request CMS cluster state: attempt# 0 2025-09-25T16:17:34.372118Z node 1 :CMS DEBUG: sentinel.cpp:599: [Sentinel] [ConfigUpdater] Handle TEvBlobStorage::TEvControllerConfigResponse: response# Status { Success: true BaseConfig { PDisk { NodeId: 1 PDiskId: 1 Path: "/1/pdisk-1.data" Guid: 1 DriveStatus: ACTIVE } VSlot { VSlotId { NodeId: 1 PDiskId: 1 VSlotId: 1000 } GroupGeneration: 1 } VSlot { VSlotId { NodeId: 1 PDiskId: 1 VSlotId: 1001 } GroupId: 1 GroupGeneration: 1 } VSlot { VSlotId { NodeId: 1 PDiskId: 1 VSlotId: 1002 } GroupId: 2 GroupGeneration: 1 } VSlot { VSlotId { NodeId: 1 PDiskId: 1 VSlotId: 1003 } GroupId: 3 GroupGeneration: 1 } Group { GroupGeneration: 1 ErasureSpecies: "none" VSlotId { NodeId: 1 PDiskId: 1 VSlotId: 1000 } } Group { GroupId: 1 GroupGeneration: 1 ErasureSpecies: "none" VSlotId { NodeId: 1 PDiskId: 1 VSlotId: 1001 } } Group { GroupId: 2 GroupGeneration: 1 ErasureSpecies: "none" VSlotId { NodeId: 1 PDiskId: 1 VSlotId: 1002 } } Group { GroupId: 3 GroupGeneration: 1 ErasureSpecies: "none" VSlotId { NodeId: 1 PDiskId: 1 VSlotId: 1003 } } } } Success: true 2025-09-25T16:17:34.372256Z node 1 :CMS NOTICE: audit_log.cpp:12: [AuditLog] [CMS tablet] Reply: request# NKikimr::NCms::TEvCms::TEvGetConfigRequest { }, response# NKikimr::NCms::TEvCms::TEvGetConfigResponse { Status { Code: OK } Config { DefaultRetryTime: 600000000 DefaultPermissionDuration: 660000000 TenantLimits { DisabledNodesLimit: 1 DisabledNodesRatioLimit: 10 } ClusterLimits { DisabledNodesLimit: 2 DisabledNodesRatioLimit: 20 } InfoCollectionTimeout: 60000000 LogConfig { DefaultLevel: ENABLED TTL: 1209600000000 } SentinelConfig { Enable: true UpdateConfigInterval: 3600000000 RetryUpdateConfig: 60000000 UpdateStateInterval: 60000000 UpdateStateTimeout: 45000000 RetryChangeStatus: 10000000 ChangeStatusRetries: 5 DefaultStateLimit: 60 DataCenterRatio: 50 RoomRatio: 70 RackRatio: 90 DryRun: false EvictVDisksStatus: FAULTY GoodStateLimit: 5 FaultyPDisksThresholdPerNode: 0 PileRatio: 50 StateStorageSelfHealConfig { Enable: true NodeGoodStateLimit: 10 NodePrettyGoodStateLimit: 7 NodeBadStateLimit: 10 WaitForConfigStep: 60000000 RelaxTime: 600000000 PileupReplicas: false } } } } 2025-09-25T16:17:34.591916Z node 2 :CMS DEBUG: console__init_scheme.cpp:14: TConsole::TTxInitScheme Execute 2025-09-25T16:17:34.593344Z node 2 :CMS DEBUG: cms_impl.h:186: StateInit event type: 10060000 event: NKikimr::TEvTablet::TEvBoot 2025-09-25T16:17:34.595822Z node 2 :CMS DEBUG: console__init_scheme.cpp:23: TConsole::TTxInitScheme Complete 2025-09-25T16:17:34.595864Z node 2 :CMS DEBUG: console__load_state.cpp:28: TConsole::TTxLoadState Execute 2025-09-25T16:17:34.595906Z node 2 :CMS DEBUG: console__load_state.cpp:50: Using default config. 2025-09-25T16:17:34.596016Z node 2 :CMS DEBUG: console__load_state.cpp:66: TConsole::TTxLoadState Complete 2025-09-25T16:17:34.596625Z node 2 :CMS DEBUG: cms_impl.h:186: StateInit event type: 10060001 event: NKikimr::TEvTablet::TEvRestored 2025-09-25T16:17:34.596716Z node 2 :CMS DEBUG: cms_tx_init_scheme.cpp:16: TTxInitScheme Execute 2025-09-25T16:17:34.597176Z node 2 :CMS DEBUG: cms_impl.h:186: StateInit event type: 1006000c event: NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-09-25T16:17:34.597332Z node 2 :CMS DEBUG: cms_impl.h:186: StateInit event type: 10031c0c event: NKikimr::TEvNodeWardenStorageConfig 2025-09-25T16:17:34.597648Z node 2 :CMS DEBUG: cms_impl.h:186: StateInit event type: 104d0001 event: NKikimr::NConsole::TEvConfigsDispatcher::TEvSetConfigSubscriptionResponse 2025-09-25T16:17:34.599396Z node 2 :CMS DEBUG: cms_tx_init_scheme.cpp:24: TTxInitScheme Complete 2025-09-25T16:17:34.599428Z node 2 :CMS DEBUG: cms_tx_load_state.cpp:33: TTxLoadState Execute 2025-09-25T16:17:34.599471Z node 2 :CMS DEBUG: cms_tx_load_state.cpp:76: Using default config 2025-09-25T16:17:34.599499Z node 2 :CMS DEBUG: cms.cpp:1176: Running CleanupWalleTasks 2025-09-25T16:17:34.611350Z node 2 :CMS DEBUG: cms_impl.h:186: StateInit event type: 104a0012 event: NKikimr::NConsole::TEvConsole::TEvConfigNotificationRequest { Config { FeatureFlags { EnableCMSRequestPriorities: true EnableSingleCompositeActionGroup: true } } ItemKinds: 25 ItemKinds: 26 Local: true } 2025-09-25T16:17:34.657705Z node 2 :CMS DEBUG: cms_tx_load_state.cpp:256: TTxLoadState Complete 2025-09-25T16:17:34.657811Z node 2 :CMS DEBUG: cms_tx_update_config.cpp:23: TTxUpdateConfig Execute 2025-09-25T16:17:34.657843Z node 2 :CMS DEBUG: cms_tx_update_config.cpp:37: TTxUpdateConfig Complete 2025-09-25T16:17:34.657933Z node 2 :CMS DEBUG: sentinel.cpp:1020: [Sentinel] [Main] UpdateConfig 2025-09-25T16:17:34.657941Z node 2 :CMS DEBUG: sentinel.cpp:965: [Sentinel] [Main] Start ConfigUpdater 2025-09-25T16:17:34.657949Z node 2 :CMS DEBUG: sentinel.cpp:1036: [Sentinel] [Main] UpdateState 2025-09-25T16:17:34.657954Z node 2 :CMS INFO: sentinel.cpp:960: [Sentinel] [Main] StateUpdater was delayed 2025-09-25T16:17:34.657963Z node 2 :CMS DEBUG: sentinel.cpp:524: [Sentinel] [ConfigUpdater] Request blobstorage config: attempt# 0 2025-09-25T16:17:34.658003Z node 2 :CMS DEBUG: sentinel.cpp:537: [Sentinel] [ConfigUpdater] Request CMS cluster st ... LABILITY EvictVDisks: false Priority: -80 2025-09-25T16:17:51.973494Z node 10 :CMS DEBUG: cms.cpp:396: Checking action: Type: RESTART_SERVICES Host: "11" Services: "storage" Duration: 60000000 Issue { Type: GENERIC Message: "Cannot lock node \'11\': node state: \'Locked\'" } 2025-09-25T16:17:51.973505Z node 10 :CMS DEBUG: node_checkers.cpp:99: [Nodes Counter] Checking Node: 11, with state: Up, with limit: 0, with ratio limit: 0, locked nodes: 0, down nodes: 0 2025-09-25T16:17:51.973547Z node 10 :CMS DEBUG: cms.cpp:404: Result: ALLOW 2025-09-25T16:17:51.973570Z node 10 :CMS DEBUG: cms.cpp:1064: Accepting permission: id# user-p-3, requestId# user-r-1, owner# user 2025-09-25T16:17:51.973579Z node 10 :CMS INFO: cluster_info.cpp:782: Adding lock for Host ::1:12002 (11) (permission user-p-3 until 1970-01-01T00:03:00Z) 2025-09-25T16:17:51.973590Z node 10 :CMS DEBUG: cms_tx_store_permissions.cpp:26: TTxStorePermissions Execute 2025-09-25T16:17:51.973631Z node 10 :CMS NOTICE: audit_log.cpp:12: [AuditLog] [CMS tablet] Store permission: id# user-p-3, validity# 1970-01-01T00:03:00.418072Z, action# Type: RESTART_SERVICES Host: "11" Services: "storage" Duration: 60000000 2025-09-25T16:17:51.973644Z node 10 :CMS NOTICE: audit_log.cpp:12: [AuditLog] [CMS tablet] Remove request: id# user-r-1, owner# user 2025-09-25T16:17:51.989606Z node 10 :CMS DEBUG: cms_tx_store_permissions.cpp:137: TTxStorePermissions complete 2025-09-25T16:17:51.989721Z node 10 :CMS NOTICE: audit_log.cpp:12: [AuditLog] [CMS tablet] Reply: request# NKikimr::NCms::TEvCms::TEvCheckRequest { User: "user" RequestId: "user-r-1" DryRun: false AvailabilityMode: MODE_MAX_AVAILABILITY }, response# NKikimr::NCms::TEvCms::TEvPermissionResponse { Status { Code: ALLOW } Permissions { Id: "user-p-3" Action { Type: RESTART_SERVICES Host: "11" Services: "storage" Duration: 60000000 } Deadline: 180418072 Extentions { Type: HostInfo Hosts { Name: "::1" State: UP NodeId: 11 InterconnectPort: 12002 } } } } 2025-09-25T16:17:54.956433Z node 18 :CMS DEBUG: cms_impl.h:186: StateInit event type: 10060000 event: NKikimr::TEvTablet::TEvBoot 2025-09-25T16:17:54.957004Z node 18 :CMS DEBUG: console__init_scheme.cpp:14: TConsole::TTxInitScheme Execute 2025-09-25T16:17:54.959343Z node 18 :CMS DEBUG: cms_impl.h:186: StateInit event type: 10060001 event: NKikimr::TEvTablet::TEvRestored 2025-09-25T16:17:54.959417Z node 18 :CMS DEBUG: cms_tx_init_scheme.cpp:16: TTxInitScheme Execute 2025-09-25T16:17:54.959799Z node 18 :CMS DEBUG: cms_impl.h:186: StateInit event type: 1006000c event: NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-09-25T16:17:54.959840Z node 18 :CMS DEBUG: cms_impl.h:186: StateInit event type: 10031c0c event: NKikimr::TEvNodeWardenStorageConfig 2025-09-25T16:17:54.959866Z node 18 :CMS DEBUG: cms_impl.h:186: StateInit event type: 104d0001 event: NKikimr::NConsole::TEvConfigsDispatcher::TEvSetConfigSubscriptionResponse 2025-09-25T16:17:54.960426Z node 18 :CMS DEBUG: console__init_scheme.cpp:23: TConsole::TTxInitScheme Complete 2025-09-25T16:17:54.960511Z node 18 :CMS DEBUG: console__load_state.cpp:28: TConsole::TTxLoadState Execute 2025-09-25T16:17:54.960544Z node 18 :CMS DEBUG: console__load_state.cpp:50: Using default config. 2025-09-25T16:17:54.960618Z node 18 :CMS DEBUG: console__load_state.cpp:66: TConsole::TTxLoadState Complete 2025-09-25T16:17:54.961153Z node 18 :CMS DEBUG: cms_tx_init_scheme.cpp:24: TTxInitScheme Complete 2025-09-25T16:17:54.961205Z node 18 :CMS DEBUG: cms_tx_load_state.cpp:33: TTxLoadState Execute 2025-09-25T16:17:54.961239Z node 18 :CMS DEBUG: cms_tx_load_state.cpp:76: Using default config 2025-09-25T16:17:54.961261Z node 18 :CMS DEBUG: cms.cpp:1176: Running CleanupWalleTasks 2025-09-25T16:17:54.973677Z node 18 :CMS DEBUG: cms_impl.h:186: StateInit event type: 104a0012 event: NKikimr::NConsole::TEvConsole::TEvConfigNotificationRequest { Config { FeatureFlags { EnableCMSRequestPriorities: false EnableSingleCompositeActionGroup: true } } ItemKinds: 25 ItemKinds: 26 Local: true } 2025-09-25T16:17:55.009281Z node 18 :CMS DEBUG: cms_tx_load_state.cpp:256: TTxLoadState Complete 2025-09-25T16:17:55.009444Z node 18 :CMS DEBUG: cms_tx_update_config.cpp:23: TTxUpdateConfig Execute 2025-09-25T16:17:55.009468Z node 18 :CMS DEBUG: cms_tx_update_config.cpp:37: TTxUpdateConfig Complete 2025-09-25T16:17:55.009533Z node 18 :CMS DEBUG: sentinel.cpp:1020: [Sentinel] [Main] UpdateConfig 2025-09-25T16:17:55.009539Z node 18 :CMS DEBUG: sentinel.cpp:965: [Sentinel] [Main] Start ConfigUpdater 2025-09-25T16:17:55.009548Z node 18 :CMS DEBUG: sentinel.cpp:1036: [Sentinel] [Main] UpdateState 2025-09-25T16:17:55.009553Z node 18 :CMS INFO: sentinel.cpp:960: [Sentinel] [Main] StateUpdater was delayed 2025-09-25T16:17:55.009585Z node 18 :CMS DEBUG: sentinel.cpp:524: [Sentinel] [ConfigUpdater] Request blobstorage config: attempt# 0 2025-09-25T16:17:55.009599Z node 18 :CMS DEBUG: sentinel.cpp:537: [Sentinel] [ConfigUpdater] Request CMS cluster state: attempt# 0 2025-09-25T16:17:55.009911Z node 18 :CMS DEBUG: sentinel.cpp:599: [Sentinel] [ConfigUpdater] Handle TEvBlobStorage::TEvControllerConfigResponse: response# Status { Success: true BaseConfig { PDisk { NodeId: 18 PDiskId: 18 Path: "/18/pdisk-18.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 19 PDiskId: 19 Path: "/19/pdisk-19.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 20 PDiskId: 20 Path: "/20/pdisk-20.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 21 PDiskId: 21 Path: "/21/pdisk-21.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 22 PDiskId: 22 Path: "/22/pdisk-22.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 23 PDiskId: 23 Path: "/23/pdisk-23.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 24 PDiskId: 24 Path: "/24/pdisk-24.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 25 PDiskId: 25 Path: "/25/pdisk-25.data" Guid: 1 DriveStatus: ACTIVE } VSlot { VSlotId { NodeId: 18 PDiskId: 18 VSlotId: 1000 } GroupGeneration: 1 } VSlot { VSlotId { NodeId: 18 PDiskId: 18 VSlotId: 1001 } GroupId: 1 GroupGeneration: 1 } VSlot { VSlotId { NodeId: 18 PDiskId: 18 VSlotId: 1002 } GroupId: 2 GroupGeneration: 1 } VSlot { VSlotId { NodeId: 18 PDiskId: 18 VSlotId: 1003 } GroupId: 3 GroupGeneration: 1 } VSlot { VSlotId { NodeId: 19 PDiskId: 19 VSlotId: 1000 } GroupGeneration: 1 FailDomainIdx: 1 } VSlot { VSlotId { NodeId: 19 PDiskId: 19 VSlotId: 1001 } GroupId: 1 GroupGeneration: 1 FailDomainIdx: 1 } VSlot { VSlotId { NodeId: 19 PDiskId: 19 VSlotId: 1002 } GroupId: 2 GroupGeneration: 1 FailDomainIdx: 1 } VSlot { VSlotId { NodeId: 19 PDiskId: 19 VSlotId: 1003 } GroupId: 3 GroupGeneration: 1 FailDomainIdx: 1 } VSlot { VSlotId { NodeId: 20 PDiskId: 20 VSlotId: 1000 } GroupGeneration: 1 FailDomainIdx: 2 } VSlot { VSlotId { NodeId: 20 PDiskId: 20 VSlotId: 1001 } GroupId: 1 GroupGeneration: 1 FailDomainIdx: 2 } VSlot { VSlotId { NodeId: 20 PDiskId: 20 VSlotId: 1002 } GroupId: 2 GroupGeneration: 1 FailDomainIdx: 2 } VSlot { VSlotId { NodeId: 20 PDiskId: 20 VSlotId: 1003 } GroupId: 3 GroupGeneration: 1 FailDomainIdx: 2 } VSlot { VSlotId { NodeId: 21 PDiskId: 21 VSlotId: 1000 } GroupGeneration: 1 FailDomainIdx: 3 } VSlot { VSlotId { NodeId: 21 PDiskId: 21 VSlotId: 1001 } GroupId: 1 GroupGeneration: 1 FailDomainIdx: 3 } VSlot { VSlotId { NodeId: 21 PDiskId: 21 VSlotId: 1002 } GroupId: 2 GroupGeneration: 1 FailDomainIdx: 3 } VSlot { VSlotId { NodeId: 21 PDiskId: 21 VSlotId: 1003 } GroupId: 3 GroupGeneration: 1 FailDomainIdx: 3 } VSlot { VSlotId { NodeId: 22 PDiskId: 22 VSlotId: 1000 } GroupGeneration: 1 FailDomainIdx: 4 } VSlot { VSlotId { NodeId: 22 PDiskId: 22 VSlotId: 1001 } GroupId: 1 GroupGeneration: 1 FailDomainIdx: 4 } VSlot { VSlotId { NodeId: 22 PDiskId: 22 VSlotId: 1002 } GroupId: 2 GroupGeneration: 1 FailDomainIdx: 4 } VSlot { VSlotId { NodeId: 22 PDiskId: 22 VSlotId: 1003 } GroupId: 3 GroupGeneration: 1 FailDomainIdx: 4 } VSlot { VSlotId { NodeId: 23 PDiskId: 23 VSlotId: 1000 } GroupGeneration: 1 FailDomainIdx: 5 } VSlot { VSlotId { NodeId: 23 PDiskId: 23 VSlotId: 1001 } GroupId: 1 GroupGeneration: 1 FailDomainIdx: 5 } VSlot { VSlotId { NodeId: 23 PDiskId: 23 VSlotId: 1002 } GroupId: 2 GroupGeneration: 1 FailDomainIdx: 5 } VSlot { VSlotId { NodeId: 23 PDiskId: 23 VSlotId: 1003 } GroupId: 3 GroupGeneration: 1 FailDomainIdx: 5 } VSlot { VSlotId { NodeId: 24 PDiskId: 24 VSlotId: 1000 } GroupGeneration: 1 FailDomainIdx: 6 } VSlot { VSlotId { NodeId: 24 PDiskId: 24 VSlotId: 1001 } GroupId: 1 GroupGeneration: 1 FailDomainIdx: 6 } VSlot { VSlotId { NodeId: 24 PDiskId: 24 VSlotId: 1002 } GroupId: 2 GroupGeneration: 1 FailDomainIdx: 6 } VSlot { VSlotId { NodeId: 24 PDiskId: 24 VSlotId: 1003 } GroupId: 3 GroupGeneration: 1 FailDomainIdx: 6 } VSlot { VSlotId { NodeId: 25 PDiskId: 25 VSlotId: 1000 } GroupGeneration: 1 FailDomainIdx: 7 } VSlot { VSlotId { NodeId: 25 PDiskId: 25 VSlotId: 1001 } GroupId: 1 GroupGeneration: 1 FailDomainIdx: 7 } VSlot { VSlotId { NodeId: 25 PDiskId: 25 VSlotId: 1002 } GroupId: 2 GroupGeneration: 1 FailDomainIdx: 7 } VSlot { VSlotId { NodeId: 25 PDiskId: 25 VSlotId: 1003 } GroupId: 3 GroupGeneration: 1 FailDomainIdx: 7 } Group { GroupGeneration: 1 ErasureSpecies: "block-4-2" VSlotId { NodeId: 18 PDiskId: 18 VSlotId: 1000 } VSlotId { NodeId: 19 PDiskId: 19 VSlotId: 1000 } VSlotId { NodeId: 20 PDiskId: 20 VSlotId: 1000 } VSlotId { NodeId: 21 PDiskId: 21 VSlotId: 1000 } VSlotId { NodeId: 22 PDiskId: 22 VSlotId: 1000 } VSlotId { NodeId: 23 PDiskId: 23 VSlotId: 1000 } VSlotId { NodeId: 24 PDiskId: 24 VSlotId: 1000 } VSlotId { NodeId: 25 PDiskId: 25 VSlotId: 1000 } } Group { GroupId: 1 GroupGeneration: 1 ErasureSpecies: "block-4-2" VSlotId { NodeId: 18 PDiskId: 18 VSlotId: 1001 } VSlotId { NodeId: 19 PDiskId: 19 VSlotId: 1001 } VSlotId { NodeId: 20 PDiskId: 20 VSlotId: 1001 } VSlotId { NodeId: 21 PDiskId: 21 VSlotId: 1001 } VSlotId { NodeId: 22 PDiskId: 22 VSlotId: 1001 } VSlotId { NodeId: 23 PDiskId: 23 VSlotId: 1001 } VSlotId { NodeId: 24 PDiskId: 24 VSlotId: 1001 } VSlotId { NodeId: 25 PDiskId: 25 VSlotId: 1001 } } Group { GroupId: 2 GroupGeneration: 1 ErasureSpecies: "block-4-2" VSlotId { NodeId: 18 PDiskId: 18 VSlotId: 1002 } VSlotId { NodeId: 19 PDiskId: 19 VSlotId: 1002 } VSlotId { NodeId: 20 PDiskId: 20 VSlotId: 1002 } VSlotId { NodeId: 21 PDiskId: 21 VSlotId: 1002 } VSlotId { NodeId: 22 PDiskId: 22 VSlotId: 1002 } VSlotId { NodeId: 23 PDiskId: 23 VSlotId: 1002 } VSlotId { NodeId: 24 PDiskId: 24 VSlotId: 1002 } VSlotId { NodeId: 25 PDiskId: 25 VSlotId: 1002 } } Group { GroupId: 3 GroupGeneration: 1 ErasureSpecies: "block-4-2" VSlotId { NodeId: 18 PDiskId: 18 VSlotId: 1003 } VSlotId { NodeId: 19 PDiskId: 19 VSlotId: 1003 } VSlotId { NodeId: 20 PDiskId: 20 VSlotId: 1003 } VSlotId { NodeId: 21 PDiskId: 21 VSlotId: 1003 } VSlotId { NodeId: 22 PDiskId: 22 VSlotId: 1003 } VSlotId { NodeId: 23 PDiskId: 23 VSlotId: 1003 } VSlotId { NodeId: 24 PDiskId: 24 VSlotId: 1003 } VSlotId { NodeId: 25 PDiskId: 25 VSlotId: 1003 } } } } Success: true 2025-09-25T16:17:55.097277Z node 18 :CMS DEBUG: cms_tx_update_config.cpp:23: TTxUpdateConfig Execute 2025-09-25T16:17:55.117215Z node 18 :CMS DEBUG: cms_tx_update_config.cpp:37: TTxUpdateConfig Complete 2025-09-25T16:17:55.117286Z node 18 :CMS DEBUG: cms_tx_update_config.cpp:44: Updated config: TenantLimits { DisabledNodesRatioLimit: 0 } ClusterLimits { DisabledNodesRatioLimit: 0 } SentinelConfig { Enable: false } 2025-09-25T16:17:55.117514Z node 18 :CMS NOTICE: audit_log.cpp:12: [AuditLog] [CMS tablet] Reply: request# NKikimr::NCms::TEvCms::TEvPermissionRequest { User: "user" Actions { Type: RESTART_SERVICES Host: "18" Services: "storage" Duration: 60000000 } Actions { Type: RESTART_SERVICES Host: "19" Services: "storage" Duration: 60000000 } PartialPermissionAllowed: true Schedule: true DryRun: false AvailabilityMode: MODE_MAX_AVAILABILITY EvictVDisks: false Priority: -80 }, response# NKikimr::NCms::TEvCms::TEvPermissionResponse { Status { Code: WRONG_REQUEST Reason: "Unsupported: feature flag EnableCMSRequestPriorities is off" } } >> test.py::test[aggregate-group_by_expr_order_by_expr--Results] [GOOD] >> test.py::test[aggregate-group_by_gs_grouping--Results] >> DataShardWrite::InsertImmediate [GOOD] >> test.py::test[tpch-q21-default.txt-Results] [GOOD] >> test.py::test[tpch-q6-default.txt-Results] >> KqpScan::TooManyComputeActors [GOOD] >> DataShardWrite::UpdateImmediate >> KqpScan::NullInKeySuffix [GOOD] >> KqpPg::LongDomainName [GOOD] >> KqpScan::NoTruncate ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/datashard/ut_change_collector/unittest >> CdcStreamChangeCollector::SchemaChanges [GOOD] Test command err: 2025-09-25T16:17:47.756196Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:123: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2025-09-25T16:17:47.797592Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2025-09-25T16:17:47.800372Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:311:2354], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-09-25T16:17:47.800470Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-09-25T16:17:47.800497Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/endf/0036f5/r3tmp/tmpxYbfrI/pdisk_1.dat 2025-09-25T16:17:47.875017Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-09-25T16:17:47.875060Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-09-25T16:17:47.888382Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded 2025-09-25T16:17:47.889358Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1229: Notification cookie mismatch for subscription [1:34:2081] 1758817067267925 != 1758817067267929 2025-09-25T16:17:47.921664Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-09-25T16:17:47.972035Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2025-09-25T16:17:48.020750Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-09-25T16:17:48.097602Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:17:48.113459Z node 1 :TX_DATASHARD INFO: datashard.cpp:375: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:673:2564] 2025-09-25T16:17:48.113609Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:661: TxInitSchema.Execute 2025-09-25T16:17:48.123997Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:727: TxInitSchema.Complete 2025-09-25T16:17:48.124039Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:50: TDataShard::TTxInit::Execute 2025-09-25T16:17:48.124203Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1325: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2025-09-25T16:17:48.124211Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1381: LoadLockChangeRecords at tablet: 72075186224037888 2025-09-25T16:17:48.124217Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1430: LoadChangeRecordCommits at tablet: 72075186224037888 2025-09-25T16:17:48.124260Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:94: TDataShard::TTxInit::Complete 2025-09-25T16:17:48.124279Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:102: TDataShard::TTxInitRestored::Execute 2025-09-25T16:17:48.124290Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:142: DataShard 72075186224037888 persisting started state actor id [1:688:2564] in generation 1 2025-09-25T16:17:48.134709Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:151: TDataShard::TTxInitRestored::Complete 2025-09-25T16:17:48.139360Z node 1 :TX_DATASHARD INFO: datashard.cpp:419: Switched to work state WaitScheme tabletId 72075186224037888 2025-09-25T16:17:48.139446Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:459: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2025-09-25T16:17:48.139473Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1260: Change sender created: at tablet: 72075186224037888, actorId: [1:690:2574] 2025-09-25T16:17:48.139479Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1265: Trying to activate change sender: at tablet: 72075186224037888 2025-09-25T16:17:48.139485Z node 1 :TX_DATASHARD INFO: datashard.cpp:1282: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2025-09-25T16:17:48.139492Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-09-25T16:17:48.139656Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:96: TTxCheckInReadSets::Execute at 72075186224037888 2025-09-25T16:17:48.139679Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:139: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2025-09-25T16:17:48.139692Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037888 2025-09-25T16:17:48.139699Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2025-09-25T16:17:48.139708Z node 1 :TX_DATASHARD INFO: datashard__progress_tx.cpp:48: No tx to execute at 72075186224037888 TxInFly 0 2025-09-25T16:17:48.139713Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2025-09-25T16:17:48.139724Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3723: Server connected at leader tablet# 72075186224037888, clientId# [1:669:2561], serverId# [1:674:2565], sessionId# [0:0:0] 2025-09-25T16:17:48.139752Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 72075186224037888 2025-09-25T16:17:48.139803Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:133: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2025-09-25T16:17:48.139829Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:221: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037888 2025-09-25T16:17:48.140303Z node 1 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:129: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-09-25T16:17:48.152127Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:152: TTxProposeTransactionBase::Complete at 72075186224037888 2025-09-25T16:17:48.152167Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:469: 72075186224037888 not sending time cast registration request in state WaitScheme 2025-09-25T16:17:48.289514Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3723: Server connected at leader tablet# 72075186224037888, clientId# [1:705:2583], serverId# [1:707:2585], sessionId# [0:0:0] 2025-09-25T16:17:48.290507Z node 1 :TX_DATASHARD DEBUG: datashard__plan_step.cpp:69: Planned transaction txId 281474976715657 at step 1000 at tablet 72075186224037888 { Transactions { TxId: 281474976715657 AckTo { RawX1: 0 RawX2: 0 } } Step: 1000 MediatorID: 72057594046382081 TabletID: 72075186224037888 } 2025-09-25T16:17:48.290527Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-09-25T16:17:48.290760Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037888 2025-09-25T16:17:48.290772Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 1 2025-09-25T16:17:48.290783Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:333: Found ready operation [1000:281474976715657] in PlanQueue unit at 72075186224037888 2025-09-25T16:17:48.290848Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:634: LoadTxDetails at 72075186224037888 loaded tx from db 1000:281474976715657 keys extracted: 0 2025-09-25T16:17:48.290883Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 (dry run) active 1 active planned 1 immediate 0 planned 1 2025-09-25T16:17:48.291063Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037888 2025-09-25T16:17:48.291082Z node 1 :TX_DATASHARD INFO: create_table_unit.cpp:69: Trying to CREATE TABLE at 72075186224037888 tableId# [OwnerId: 72057594046644480, LocalPathId: 2] schema version# 1 2025-09-25T16:17:48.291421Z node 1 :TX_DATASHARD INFO: datashard.cpp:477: Send registration request to time cast Ready tabletId 72075186224037888 mediators count is 1 coordinators count is 1 buckets per mediator 2 2025-09-25T16:17:48.291487Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-09-25T16:17:48.291701Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3755: Got TEvMediatorTimecast::TEvRegisterTabletResult at 72075186224037888 time 0 2025-09-25T16:17:48.291706Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-09-25T16:17:48.291859Z node 1 :TX_DATASHARD DEBUG: datashard__plan_step.cpp:98: Sending '{TEvPlanStepAccepted TabletId# 72075186224037888 step# 1000} 2025-09-25T16:17:48.291869Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2025-09-25T16:17:48.292011Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2025-09-25T16:17:48.292018Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1265: Trying to activate change sender: at tablet: 72075186224037888 2025-09-25T16:17:48.292022Z node 1 :TX_DATASHARD INFO: datashard.cpp:1303: Change sender activated: at tablet: 72075186224037888 2025-09-25T16:17:48.292033Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:811: Complete [1000 : 281474976715657] from 72075186224037888 at tablet 72075186224037888 send result to client [1:409:2405], exec latency: 0 ms, propose latency: 0 ms 2025-09-25T16:17:48.292041Z node 1 :TX_DATASHARD INFO: datashard.cpp:1600: 72075186224037888 Sending notify to schemeshard 72057594046644480 txId 281474976715657 state Ready TxInFly 0 2025-09-25T16:17:48.292048Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-09-25T16:17:48.292766Z node 1 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:129: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-09-25T16:17:48.293214Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:2966: Handle TEvSchemaChangedResult 281474976715657 datashard 72075186224037888 state Ready 2025-09-25T16:17:48.293226Z node 1 :TX_DATASHARD DEBUG: datashard__schema_changed.cpp:22: 72075186224037888 Got TEvSchemaChangedResult from SS at 72075186224037888 2025-09-25T16:17:48.293254Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3773: Got TEvMediatorTimecast::TEvSubscribeReadStepResult at 72075186224037888 coordinator 72057594046316545 last step 0 ... G: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2025-09-25T16:17:56.513164Z node 4 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2025-09-25T16:17:56.513176Z node 4 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-09-25T16:17:56.513197Z node 4 :TX_DATASHARD DEBUG: datashard.cpp:811: Complete [1500 : 281474976715658] from 72075186224037888 at tablet 72075186224037888 send result to client [4:399:2397], exec latency: 0 ms, propose latency: 0 ms 2025-09-25T16:17:56.513211Z node 4 :TX_DATASHARD INFO: datashard.cpp:1600: 72075186224037888 Sending notify to schemeshard 72057594046644480 txId 281474976715658 state Ready TxInFly 0 2025-09-25T16:17:56.513226Z node 4 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-09-25T16:17:56.513757Z node 4 :TX_DATASHARD DEBUG: datashard.cpp:2966: Handle TEvSchemaChangedResult 281474976715658 datashard 72075186224037888 state Ready 2025-09-25T16:17:56.513772Z node 4 :TX_DATASHARD DEBUG: datashard__schema_changed.cpp:22: 72075186224037888 Got TEvSchemaChangedResult from SS at 72075186224037888 2025-09-25T16:17:56.515412Z node 4 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [4:880:2709], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:17:56.515436Z node 4 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [4:891:2714], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:17:56.515447Z node 4 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:17:56.515613Z node 4 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [4:895:2718], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:17:56.515636Z node 4 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:17:56.516514Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715659:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-09-25T16:17:56.517996Z node 4 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:129: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-09-25T16:17:56.683868Z node 4 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:129: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-09-25T16:17:56.684552Z node 4 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [4:894:2717], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715659 completed, doublechecking } 2025-09-25T16:17:56.715241Z node 4 :TX_PROXY ERROR: schemereq.cpp:590: Actor# [4:952:2756] txid# 281474976715660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 8], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-09-25T16:17:56.727889Z node 4 :KQP_EXECUTER ERROR: kqp_planner.cpp:123: TxId: 281474976715661. Ctx: { TraceId: 01k60tnh93bvn25yy0r4pmn339, Database: , SessionId: ydb://session/3?node_id=4&id=NWZhZWYyNzItYjljZTM0ZTktYzkxNWEzZGQtYjllMmIzOWQ=, PoolId: default, DatabaseId: /Root}. Database not set, use /Root 2025-09-25T16:17:56.728617Z node 4 :TX_DATASHARD DEBUG: datashard.cpp:3723: Server connected at leader tablet# 72075186224037888, clientId# [4:983:2773], serverId# [4:984:2774], sessionId# [0:0:0] 2025-09-25T16:17:56.728750Z node 4 :TX_DATASHARD DEBUG: execute_write_unit.cpp:260: Executing write operation for [0:3] at 72075186224037888 2025-09-25T16:17:56.728815Z node 4 :TX_DATASHARD DEBUG: datashard.cpp:884: PersistChangeRecord: record: { Order: 1 Group: 1758817076728779 Step: 2000 TxId: 18446744073709551615 PathId: [OwnerId: 72057594046644480, LocalPathId: 3] Kind: CdcDataChange Source: Unspecified Body: 32b TableId: [OwnerId: 72057594046644480, LocalPathId: 2] SchemaVersion: 2 LockId: 0 LockOffset: 0 }, at tablet: 72075186224037888 2025-09-25T16:17:56.728871Z node 4 :TX_DATASHARD DEBUG: execute_write_unit.cpp:457: Executed write operation for [0:3] at 72075186224037888, row count=1 2025-09-25T16:17:56.739383Z node 4 :TX_DATASHARD DEBUG: datashard.cpp:1180: EnqueueChangeRecords: at tablet: 72075186224037888, records: { Order: 1 PathId: [OwnerId: 72057594046644480, LocalPathId: 3] BodySize: 32 TableId: [OwnerId: 72057594046644480, LocalPathId: 2] SchemaVersion: 2 } 2025-09-25T16:17:56.739413Z node 4 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-09-25T16:17:56.746004Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterTable, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_table.cpp:172) 2025-09-25T16:17:56.746614Z node 4 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 72075186224037888 2025-09-25T16:17:56.746671Z node 4 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:133: Propose scheme transaction at tablet 72075186224037888 txId 281474976715662 ssId 72057594046644480 seqNo 2:3 2025-09-25T16:17:56.746687Z node 4 :TX_DATASHARD INFO: check_scheme_tx_unit.cpp:235: Check scheme tx, proposed scheme version# 3 current version# 2 expected version# 3 at tablet# 72075186224037888 txId# 281474976715662 2025-09-25T16:17:56.746692Z node 4 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:221: Prepared scheme transaction txId 281474976715662 at tablet 72075186224037888 2025-09-25T16:17:56.757157Z node 4 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:152: TTxProposeTransactionBase::Complete at 72075186224037888 2025-09-25T16:17:56.865926Z node 4 :TX_DATASHARD DEBUG: datashard__plan_step.cpp:69: Planned transaction txId 281474976715662 at step 2500 at tablet 72075186224037888 { Transactions { TxId: 281474976715662 AckTo { RawX1: 0 RawX2: 0 } } Step: 2500 MediatorID: 72057594046382081 TabletID: 72075186224037888 } 2025-09-25T16:17:56.865950Z node 4 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-09-25T16:17:56.866001Z node 4 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037888 2025-09-25T16:17:56.866007Z node 4 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 1 2025-09-25T16:17:56.866015Z node 4 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:333: Found ready operation [2500:281474976715662] in PlanQueue unit at 72075186224037888 2025-09-25T16:17:56.866067Z node 4 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:634: LoadTxDetails at 72075186224037888 loaded tx from db 2500:281474976715662 keys extracted: 0 2025-09-25T16:17:56.866090Z node 4 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 (dry run) active 1 active planned 1 immediate 0 planned 1 2025-09-25T16:17:56.866113Z node 4 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037888 2025-09-25T16:17:56.866126Z node 4 :TX_DATASHARD INFO: alter_table_unit.cpp:145: Trying to ALTER TABLE at 72075186224037888 version 3 2025-09-25T16:17:56.866236Z node 4 :TX_DATASHARD DEBUG: datashard.cpp:1850: Add schema snapshot: pathId# [OwnerId: 72057594046644480, LocalPathId: 2], version# 3, step# 2500, txId# 281474976715662, at tablet# 72075186224037888 2025-09-25T16:17:56.866259Z node 4 :TX_DATASHARD DEBUG: datashard.cpp:884: PersistChangeRecord: record: { Order: 2 Group: 0 Step: 2500 TxId: 281474976715662 PathId: [OwnerId: 72057594046644480, LocalPathId: 3] Kind: CdcSchemaChange Source: Unspecified Body: 0b TableId: [OwnerId: 72057594046644480, LocalPathId: 2] SchemaVersion: 3 LockId: 0 LockOffset: 0 }, at tablet: 72075186224037888 2025-09-25T16:17:56.866323Z node 4 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-09-25T16:17:56.866770Z node 4 :TX_DATASHARD DEBUG: datashard__plan_step.cpp:98: Sending '{TEvPlanStepAccepted TabletId# 72075186224037888 step# 2500} 2025-09-25T16:17:56.866782Z node 4 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2025-09-25T16:17:56.867086Z node 4 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2025-09-25T16:17:56.867098Z node 4 :TX_DATASHARD DEBUG: datashard.cpp:1180: EnqueueChangeRecords: at tablet: 72075186224037888, records: { Order: 2 PathId: [OwnerId: 72057594046644480, LocalPathId: 3] BodySize: 0 TableId: [OwnerId: 72057594046644480, LocalPathId: 2] SchemaVersion: 3 } 2025-09-25T16:17:56.867112Z node 4 :TX_DATASHARD DEBUG: datashard.cpp:811: Complete [2500 : 281474976715662] from 72075186224037888 at tablet 72075186224037888 send result to client [4:399:2397], exec latency: 0 ms, propose latency: 0 ms 2025-09-25T16:17:56.867122Z node 4 :TX_DATASHARD INFO: datashard.cpp:1600: 72075186224037888 Sending notify to schemeshard 72057594046644480 txId 281474976715662 state Ready TxInFly 0 2025-09-25T16:17:56.867131Z node 4 :TX_DATASHARD DEBUG: datashard.cpp:1180: EnqueueChangeRecords: at tablet: 72075186224037888, records: { Order: 2 PathId: [OwnerId: 72057594046644480, LocalPathId: 3] BodySize: 0 TableId: [OwnerId: 72057594046644480, LocalPathId: 2] SchemaVersion: 3 } 2025-09-25T16:17:56.867134Z node 4 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-09-25T16:17:56.867497Z node 4 :TX_DATASHARD DEBUG: datashard.cpp:2966: Handle TEvSchemaChangedResult 281474976715662 datashard 72075186224037888 state Ready 2025-09-25T16:17:56.867510Z node 4 :TX_DATASHARD DEBUG: datashard__schema_changed.cpp:22: 72075186224037888 Got TEvSchemaChangedResult from SS at 72075186224037888 2025-09-25T16:17:56.868719Z node 4 :TX_DATASHARD DEBUG: datashard.cpp:3723: Server connected at leader tablet# 72075186224037888, clientId# [4:1026:2811], serverId# [4:1027:2812], sessionId# [0:0:0] 2025-09-25T16:17:56.879935Z node 4 :TX_DATASHARD DEBUG: datashard.cpp:3723: Server connected at leader tablet# 72075186224037888, clientId# [4:1029:2814], serverId# [4:1030:2815], sessionId# [0:0:0] >> DataShardWrite::ExecSQLUpsertPrepared+EvWrite-Volatile [GOOD] >> DataShardWrite::ExecSQLUpsertPrepared-EvWrite+Volatile ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/scan/unittest >> KqpSplit::IntersectionLosesRange+Unspecified [GOOD] Test command err: Trying to start YDB, gRPC: 1866, MsgBus: 13354 test_client.cpp: SetPath # /home/runner/.ya/build/build_root/endf/00400d/r3tmp/tmpdYGKr0/pdisk_1.dat 2025-09-25T16:17:54.009172Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:123: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2025-09-25T16:17:54.009416Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7554061815459748706:2245];send_to=[0:7307199536658146131:7762515]; 2025-09-25T16:17:54.009433Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-09-25T16:17:54.057548Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-09-25T16:17:54.057587Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-09-25T16:17:54.077295Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-09-25T16:17:54.084024Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded 2025-09-25T16:17:54.085048Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1229: Notification cookie mismatch for subscription [1:7554061811164781177:2081] 1758817073993724 != 1758817073993727 TServer::EnableGrpc on GrpcPort 1866, node 1 2025-09-25T16:17:54.209077Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-09-25T16:17:54.209089Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-09-25T16:17:54.209091Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-09-25T16:17:54.209133Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-09-25T16:17:54.213774Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions TClient is connected to server localhost:13354 TClient is connected to server localhost:13354 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-09-25T16:17:54.332528Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-09-25T16:17:54.337402Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-09-25T16:17:54.350689Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) waiting... 2025-09-25T16:17:54.397510Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) waiting... 2025-09-25T16:17:54.495457Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) waiting... 2025-09-25T16:17:54.536538Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) waiting... 2025-09-25T16:17:55.008941Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-09-25T16:17:55.087880Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7554061819754717423:2392], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:17:55.087909Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:17:55.087962Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7554061819754717433:2393], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:17:55.087967Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:17:55.186079Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:17:55.197389Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:17:55.207037Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:17:55.220785Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:17:55.237542Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:17:55.255502Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:17:55.272428Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:17:55.289286Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:17:55.312606Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7554061819754718296:2475], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:17:55.312625Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:17:55.312740Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7554061819754718302:2479], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:17:55.312795Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7554061819754718301:2478], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09 ... KqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions TClient is connected to server localhost:20438 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. waiting... 2025-09-25T16:17:56.310835Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2025-09-25T16:17:56.311965Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-09-25T16:17:56.320562Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) waiting... 2025-09-25T16:17:56.344538Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) waiting... 2025-09-25T16:17:56.376436Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) waiting... 2025-09-25T16:17:56.389453Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) waiting... 2025-09-25T16:17:56.687440Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7554061822513818337:2391], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:17:56.687468Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:17:56.687561Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7554061822513818418:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:17:56.687568Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:17:56.698964Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:17:56.717838Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:17:56.729486Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:17:56.740342Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:17:56.753612Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:17:56.767780Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:17:56.783615Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:17:56.799500Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:17:56.825466Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7554061822513819209:2474], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:17:56.825491Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:17:56.825660Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7554061822513819214:2477], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:17:56.825669Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7554061822513819215:2478], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:17:56.825686Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:17:56.826581Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-09-25T16:17:56.831157Z node 2 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7554061822513819218:2479], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2025-09-25T16:17:56.902243Z node 2 :TX_PROXY ERROR: schemereq.cpp:590: Actor# [2:7554061822513819270:3553] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 17], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-09-25T16:17:57.217070Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-09-25T16:17:57.249941Z node 2 :KQP_EXECUTER ERROR: kqp_planner.cpp:123: TxId: 281474976710674. Ctx: { TraceId: 01k60tnhxx50b6cg38z3pxqqmp, Database: , SessionId: ydb://session/3?node_id=2&id=YmJjNDVjM2UtZmViNzNiOGMtYTA0NDBhMGItN2E3ZTE3NTM=, PoolId: default, DatabaseId: /Root}. Database not set, use /Root captured evread ----------------------------------------------------------- starting split ----------------------------------------------------------- scheme op Status: 53 TxId: 281474976710675 SchemeShardStatus: 1 SchemeShardTabletId: 72057594046644480 resume evread ----------------------------------------------------------- 2025-09-25T16:17:57.271610Z node 2 :KQP_RESOURCE_MANAGER WARN: kqp_snapshot_manager.cpp:238: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1758817077290, txId: 281474976710673] shutting down >> KqpPg::InsertNoTargetColumns_Serial+useSink [GOOD] >> KqpPointConsolidation::TasksCount [GOOD] >> KqpPointConsolidation::ReadRanges >> KqpScan::RemoteShardScan >> KqpScan::GrepLimit [GOOD] >> TRegisterNodeOverDiscoveryService::ServerWithCertVerification_AuthNotRequired [GOOD] >> DataShardWrite::UpsertPreparedNoTxCache-Volatile [GOOD] >> DataShardWrite::WriteCommitVersion >> KqpScan::StreamLookupByFullPk [GOOD] |81.1%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/mind/hive/ut/ydb-core-mind-hive-ut |81.1%| [LD] {RESULT} $(B)/ydb/core/mind/hive/ut/ydb-core-mind-hive-ut ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/scan/unittest >> KqpScan::TooManyComputeActors [GOOD] Test command err: Trying to start YDB, gRPC: 15868, MsgBus: 2051 2025-09-25T16:17:55.234045Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7554061819682816750:2213];send_to=[0:7307199536658146131:7762515]; 2025-09-25T16:17:55.234070Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-09-25T16:17:55.248447Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:123: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown test_client.cpp: SetPath # /home/runner/.ya/build/build_root/endf/004001/r3tmp/tmpSppvHJ/pdisk_1.dat 2025-09-25T16:17:55.298568Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded 2025-09-25T16:17:55.300295Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1229: Notification cookie mismatch for subscription [1:7554061819682816562:2081] 1758817075229226 != 1758817075229229 TServer::EnableGrpc on GrpcPort 15868, node 1 2025-09-25T16:17:55.316159Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-09-25T16:17:55.316169Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-09-25T16:17:55.316171Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-09-25T16:17:55.316212Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-09-25T16:17:55.348804Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-09-25T16:17:55.348846Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-09-25T16:17:55.353297Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:2051 2025-09-25T16:17:55.410868Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions TClient is connected to server localhost:2051 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. waiting... 2025-09-25T16:17:55.427906Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2025-09-25T16:17:55.438398Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) waiting... 2025-09-25T16:17:55.472521Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) waiting... 2025-09-25T16:17:55.507003Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) waiting... 2025-09-25T16:17:55.527824Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) waiting... 2025-09-25T16:17:55.693601Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7554061819682818218:2391], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:17:55.693624Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:17:55.693806Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7554061819682818228:2392], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:17:55.693814Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:17:55.752046Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:17:55.767235Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:17:55.781557Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:17:55.795005Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:17:55.805347Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:17:55.817499Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:17:55.830732Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:17:55.844734Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:17:55.866416Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7554061819682819091:2474], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:17:55.866442Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:17:55.866560Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7554061819682819097:2478], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:17:55.866636Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7554061819682819096:2477], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:17:55.866643Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:17:55.867464Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__oper ... true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-09-25T16:17:56.705629Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-09-25T16:17:56.707207Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-09-25T16:17:56.726812Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-09-25T16:17:56.766198Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) waiting... 2025-09-25T16:17:56.780276Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) waiting... 2025-09-25T16:17:56.822005Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) waiting... 2025-09-25T16:17:56.837748Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) waiting... 2025-09-25T16:17:57.009611Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7554061822456163387:2391], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:17:57.009664Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:17:57.009840Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7554061826751130705:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:17:57.009859Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:17:57.014662Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:17:57.023191Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:17:57.033017Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:17:57.047251Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:17:57.066160Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:17:57.076957Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:17:57.089165Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:17:57.152030Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:17:57.180113Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7554061826751131564:2474], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:17:57.180137Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7554061826751131569:2477], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:17:57.180146Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:17:57.180336Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7554061826751131572:2479], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:17:57.180344Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:17:57.181066Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-09-25T16:17:57.186036Z node 2 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7554061826751131571:2478], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715670 completed, doublechecking } 2025-09-25T16:17:57.280941Z node 2 :TX_PROXY ERROR: schemereq.cpp:590: Actor# [2:7554061826751131625:3560] txid# 281474976715671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 17], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-09-25T16:17:57.542088Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715673:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) waiting... 2025-09-25T16:17:57.571611Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-09-25T16:17:57.781098Z node 2 :KQP_SESSION WARN: kqp_session_actor.cpp:2830: SessionId: ydb://session/3?node_id=2&id=MzI5Mzg4ZTItNTgyMmI5YTQtMjc3MjA2OTktNzllMDkxZA==, ActorId: [2:7554061826751132159:2544], ActorState: ExecuteState, TraceId: 01k60tnjawc770n6etqr3ecv0b, Create QueryResponse for error on request, msg: 2025-09-25T16:17:57.782190Z node 2 :KQP_RESOURCE_MANAGER WARN: kqp_snapshot_manager.cpp:238: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1758817077822, txId: 281474976715675] shutting down
: Warning: Type annotation, code: 1030
:7:13: Warning: At function: RemovePrefixMembers, At function: Sort, At function: PersistableRepr, At function: SqlProject
:8:18: Warning: At function: AssumeColumnOrderPartial, At function: Aggregate, At function: Filter, At lambda, At function: Coalesce
:9:67: Warning: At function: And
:9:39: Warning: At function: <
:9:46: Warning: At function: -
:9:46: Warning: Integral type implicit bitcast: Optional and Int32, code: 1107
: Error: Requested too many execution units: 21, code: 2029 |81.1%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/mind/hive/ut/ydb-core-mind-hive-ut >> DataShardWrite::UpsertPreparedManyTables-Volatile [GOOD] >> DataShardWrite::UpsertNoLocksArbiter >> KqpScan::GrepRange [GOOD] >> KqpScan::GrepNonKeyColumns >> DataShardWrite::DeletePrepared-Volatile [GOOD] >> DataShardWrite::DelayedVolatileTxAndEvWrite >> TYardTest::TestLogOverwriteRestarts [GOOD] >> TYardTest::TestLogOwerwrite >> DataShardWrite::UpdateImmediate [GOOD] >> DataShardWrite::RejectOnChangeQueueOverflow >> KqpScan::AggregateCountStar [GOOD] >> KqpScan::AggregateEmptyCountStar >> KqpScan::ScanDuringSplit10 >> KqpScan::LeftSemiJoinSimple [GOOD] >> KqpScan::JoinWithParams >> KqpScan::SecondaryIndex [GOOD] >> KqpScan::SecondaryIndexCustomColumnOrder ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/pg/unittest >> KqpPg::LongDomainName [GOOD] Test command err: Trying to start YDB, gRPC: 8612, MsgBus: 28284 2025-09-25T16:17:34.620222Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7554061726887989198:2252];send_to=[0:7307199536658146131:7762515]; 2025-09-25T16:17:34.620280Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-09-25T16:17:34.629646Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:123: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown test_client.cpp: SetPath # /home/runner/.ya/build/build_root/endf/005550/r3tmp/tmpTf3TL8/pdisk_1.dat 2025-09-25T16:17:34.676520Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 8612, node 1 2025-09-25T16:17:34.689455Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-09-25T16:17:34.689467Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-09-25T16:17:34.689469Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-09-25T16:17:34.689508Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:28284 2025-09-25T16:17:34.724535Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-09-25T16:17:34.724565Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-09-25T16:17:34.728760Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:28284 WaitRootIsUp 'Root'... TClient::Ls request: Root 2025-09-25T16:17:34.753797Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-09-25T16:17:34.771626Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-09-25T16:17:34.775484Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-09-25T16:17:35.085406Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7554061731182956952:2319], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:17:35.085410Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7554061731182956941:2316], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:17:35.085427Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:17:35.085496Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7554061731182956956:2321], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:17:35.085507Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:17:35.086205Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-09-25T16:17:35.088535Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7554061731182956955:2320], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2025-09-25T16:17:35.150857Z node 1 :TX_PROXY ERROR: schemereq.cpp:590: Actor# [1:7554061731182957008:2337] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } Trying to start YDB, gRPC: 28980, MsgBus: 2444 2025-09-25T16:17:35.606246Z node 2 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7554061735138932053:2154];send_to=[0:7307199536658146131:7762515]; 2025-09-25T16:17:35.606311Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-09-25T16:17:35.608296Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions test_client.cpp: SetPath # /home/runner/.ya/build/build_root/endf/005550/r3tmp/tmp0Jef6o/pdisk_1.dat 2025-09-25T16:17:35.621053Z node 2 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 28980, node 2 2025-09-25T16:17:35.633561Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-09-25T16:17:35.633577Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-09-25T16:17:35.633579Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-09-25T16:17:35.633633Z node 2 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:2444 TClient is connected to server localhost:2444 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-09-25T16:17:35.707823Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-09-25T16:17:35.707865Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-09-25T16:17:35.709025Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-09-25T16:17:35.709246Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-09-25T16:17:35.848506Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-09-25T16:17:36.031961Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7554061739433899894:2319], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:17:36.031984Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7554061739433899883:2316], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:17:36.032052Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:17:36.032853Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/ ... Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2025-09-25T16:17:56.470817Z node 10 :TX_PROXY ERROR: schemereq.cpp:590: Actor# [10:7554061822245031774:2335] txid# 281474976715659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-09-25T16:17:56.478892Z node 10 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) {"Plan":{"Plans":[{"PlanNodeId":4,"Plans":[{"PlanNodeId":3,"Plans":[{"PlanNodeId":2,"Plans":[{"Tables":["pgbench_accounts"],"PlanNodeId":1,"Operators":[{"Scan":"Parallel","E-Size":"0","Name":"TableRangeScan","Inputs":[],"Path":"\/Root\/pgbench_accounts","E-Rows":"0","Table":"pgbench_accounts","ReadRangesKeys":["aid"],"ReadColumns":["aid (null, 3)","aid [7, 7]","abalance"],"E-Cost":"0","ReadRangesExpectedSize":"2"}],"Node Type":"TableRangeScan"}],"Node Type":"UnionAll","PlanNodeType":"Connection"}],"Operators":[{"Inputs":[{"ExternalPlanNodeId":2}],"SortBy":"input.abalance","Name":"Sort"}],"Node Type":"Sort"}],"Node Type":"ResultSet_1","PlanNodeType":"ResultSet"}],"Node Type":"Query","Stats":{"ResourcePoolId":"default"},"PlanNodeType":"Query"},"meta":{"version":"0.2","type":"query"},"tables":[{"name":"\/Root\/pgbench_accounts","reads":[{"columns":["abalance"],"scan_by":["aid (null, 3)","aid [7, 7]"],"type":"Scan"}]}],"SimplifiedPlan":{"PlanNodeId":0,"Plans":[{"PlanNodeId":1,"Plans":[{"PlanNodeId":2,"Plans":[{"PlanNodeId":4,"Operators":[{"E-Size":"0","Name":"TableRangeScan","E-Rows":"0","Table":"pgbench_accounts","ReadRangesKeys":["aid"],"ReadColumns":["aid (null, 3)","aid [7, 7]","abalance"],"E-Cost":"0","ReadRangesExpectedSize":"2"}],"Node Type":"TableRangeScan"}],"Operators":[{"SortBy":"input.abalance","Name":"Sort"}],"Node Type":"Sort"}],"Node Type":"ResultSet_1","PlanNodeType":"ResultSet"}],"Node Type":"Query","OptimizerStats":{"EquiJoinsCount":0,"JoinsCount":0},"PlanNodeType":"Query"}} {"Plan":{"Plans":[{"PlanNodeId":4,"Plans":[{"PlanNodeId":3,"Plans":[{"PlanNodeId":2,"Plans":[{"Tables":["pgbench_accounts"],"PlanNodeId":1,"Operators":[{"Scan":"Parallel","ReadRange":["aid (4, 3)"],"E-Size":"0","Name":"TableRangeScan","Inputs":[],"Path":"\/Root\/pgbench_accounts","E-Rows":"1","Table":"pgbench_accounts","ReadColumns":["abalance"],"E-Cost":"0"}],"Node Type":"TableRangeScan"}],"Node Type":"UnionAll","PlanNodeType":"Connection"}],"Node Type":"Collect"}],"Node Type":"ResultSet","PlanNodeType":"ResultSet"}],"Node Type":"Query","Stats":{"ResourcePoolId":"default"},"PlanNodeType":"Query"},"meta":{"version":"0.2","type":"query"},"tables":[{"name":"\/Root\/pgbench_accounts","reads":[{"columns":["abalance"],"scan_by":["aid (4, 3)"],"type":"Scan"}]}],"SimplifiedPlan":{"PlanNodeId":0,"Plans":[{"PlanNodeId":1,"Plans":[{"PlanNodeId":4,"Operators":[{"ReadRange":["aid (4, 3)"],"E-Size":"0","Name":"TableRangeScan","E-Rows":"1","Table":"pgbench_accounts","ReadColumns":["abalance"],"E-Cost":"0"}],"Node Type":"TableRangeScan"}],"Node Type":"ResultSet","PlanNodeType":"ResultSet"}],"Node Type":"Query","OptimizerStats":{"EquiJoinsCount":0,"JoinsCount":0},"PlanNodeType":"Query"}} Trying to start YDB, gRPC: 6123, MsgBus: 62373 2025-09-25T16:17:57.266972Z node 11 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[11:7554061826697983935:2076];send_to=[0:7307199536658146131:7762515]; 2025-09-25T16:17:57.268082Z node 11 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-09-25T16:17:57.284362Z node 11 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa/.metadata/script_executions test_client.cpp: SetPath # /home/runner/.ya/build/build_root/endf/005550/r3tmp/tmp0ZwFFE/pdisk_1.dat 2025-09-25T16:17:57.347029Z node 11 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 6123, node 11 2025-09-25T16:17:57.355928Z node 11 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-09-25T16:17:57.355941Z node 11 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-09-25T16:17:57.355943Z node 11 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-09-25T16:17:57.355986Z node 11 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-09-25T16:17:57.397021Z node 11 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(11, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-09-25T16:17:57.397053Z node 11 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(11, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-09-25T16:17:57.398017Z node 11 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(11, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:62373 TClient is connected to server localhost:62373 WaitRootIsUp 'aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa'... TClient::Ls request: aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_D... (TRUNCATED) WaitRootIsUp 'aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa' success. 2025-09-25T16:17:57.483133Z node 11 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa/.metadata/script_executions 2025-09-25T16:17:57.485640Z node 11 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-09-25T16:17:57.497504Z node 11 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-09-25T16:17:57.745505Z node 11 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [11:7554061826697984548:2314], DatabaseId: /aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:17:57.745527Z node 11 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:17:57.745735Z node 11 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [11:7554061826697984569:2318], DatabaseId: /aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:17:57.745746Z node 11 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [11:7554061826697984570:2319], DatabaseId: /aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:17:57.745769Z node 11 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:17:57.746671Z node 11 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-09-25T16:17:57.749628Z node 11 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 281474976715658, at schemeshard: 72057594046644480 2025-09-25T16:17:57.749701Z node 11 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [11:7554061826697984573:2320], DatabaseId: /aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2025-09-25T16:17:57.842137Z node 11 :TX_PROXY ERROR: schemereq.cpp:590: Actor# [11:7554061826697984624:2336] txid# 281474976715659, issues: { message: "Check failed: path: \'/aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-09-25T16:17:57.847233Z node 11 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/pg/unittest >> KqpPg::InsertNoTargetColumns_Serial+useSink [GOOD] Test command err: Trying to start YDB, gRPC: 26677, MsgBus: 15143 2025-09-25T16:17:34.731774Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7554061728476617458:2084];send_to=[0:7307199536658146131:7762515]; 2025-09-25T16:17:34.733116Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/endf/00554f/r3tmp/tmpwzZAa9/pdisk_1.dat 2025-09-25T16:17:34.786315Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-09-25T16:17:34.788774Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 26677, node 1 2025-09-25T16:17:34.805787Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-09-25T16:17:34.805803Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-09-25T16:17:34.805806Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-09-25T16:17:34.805862Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:15143 2025-09-25T16:17:34.838578Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-09-25T16:17:34.838615Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-09-25T16:17:34.839538Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:15143 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-09-25T16:17:34.869168Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 16 2025-09-25T16:17:35.029686Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-09-25T16:17:35.267875Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:17:35.298987Z node 1 :READ_TABLE_API WARN: rpc_read_table.cpp:116: ForgetAction occurred, send TEvPoisonPill 2025-09-25T16:17:35.300035Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:17:35.315644Z node 1 :READ_TABLE_API WARN: rpc_read_table.cpp:116: ForgetAction occurred, send TEvPoisonPill 2025-09-25T16:17:35.319120Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7554061732771585533:2334], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:17:35.319152Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:17:35.319260Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7554061732771585546:2338], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:17:35.319280Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7554061732771585545:2337], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:17:35.319287Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:17:35.320251Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715662:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-09-25T16:17:35.322561Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 281474976715662, at schemeshard: 72057594046644480 2025-09-25T16:17:35.322612Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7554061732771585549:2339], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715662 completed, doublechecking } 2025-09-25T16:17:35.413251Z node 1 :TX_PROXY ERROR: schemereq.cpp:590: Actor# [1:7554061732771585600:2447] txid# 281474976715663, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 7], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } f f t t 18 2025-09-25T16:17:35.467216Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:17:35.480091Z node 1 :READ_TABLE_API WARN: rpc_read_table.cpp:116: ForgetAction occurred, send TEvPoisonPill 2025-09-25T16:17:35.481261Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:17:35.491129Z node 1 :READ_TABLE_API WARN: rpc_read_table.cpp:116: ForgetAction occurred, send TEvPoisonPill 0 0 1 1 2 2 3 3 4 4 5 5 6 6 7 7 8 8 9 9 21 2025-09-25T16:17:35.553463Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715672:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:17:35.565726Z node 1 :READ_TABLE_API WARN: rpc_read_table.cpp:116: ForgetAction occurred, send TEvPoisonPill 2025-09-25T16:17:35.566776Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715674:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:17:35.576694Z node 1 :READ_TABLE_API WARN: rpc_read_table.cpp:116: ForgetAction occurred, send TEvPoisonPill 0 0 1 1 2 2 3 3 4 4 5 5 6 6 7 7 8 8 9 9 23 2025-09-25T16:17:35.644132Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715678:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:17:35.669137Z node 1 :READ_TABLE_API WARN: rpc_read_table.cpp:116: ForgetAction occurred, send TEvPoisonPill 2025-09-25T16:17:35.670017Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715680:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:17:35.680502Z node 1 :READ_TABLE_API WARN: rpc_read_table.cpp:116: ForgetAction occurred, send TEvPoisonPill 2025-09-25T16:17:35.733928Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 0 0 1 1 2 2 3 3 4 4 5 5 6 6 7 7 8 8 9 9 20 2025-09-25T16:17:35.747761Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715684:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:17:35.760123Z node 1 :READ_TABLE_API WARN: rpc_read_table.cpp:116: ForgetAction occurred, send ... 7:56.492208Z node 11 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(11, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-09-25T16:17:56.492249Z node 11 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(11, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-09-25T16:17:56.493122Z node 11 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(11, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-09-25T16:17:56.561430Z node 11 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-09-25T16:17:56.808504Z node 11 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [11:7554061824554150923:2314], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:17:56.808539Z node 11 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:17:56.808720Z node 11 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [11:7554061824554150960:2319], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:17:56.808730Z node 11 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [11:7554061824554150959:2318], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:17:56.808741Z node 11 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:17:56.809644Z node 11 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-09-25T16:17:56.812592Z node 11 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 281474976710658, at schemeshard: 72057594046644480 2025-09-25T16:17:56.812671Z node 11 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [11:7554061824554150963:2320], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2025-09-25T16:17:56.909739Z node 11 :TX_PROXY ERROR: schemereq.cpp:590: Actor# [11:7554061824554151014:2335] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-09-25T16:17:56.915996Z node 11 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:17:56.996744Z node 11 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_table.cpp:172) Trying to start YDB, gRPC: 65163, MsgBus: 5949 test_client.cpp: SetPath # /home/runner/.ya/build/build_root/endf/00554f/r3tmp/tmp6VMO8T/pdisk_1.dat 2025-09-25T16:17:57.487043Z node 12 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-09-25T16:17:57.487114Z node 12 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-09-25T16:17:57.508282Z node 12 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(12, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-09-25T16:17:57.508321Z node 12 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(12, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-09-25T16:17:57.509544Z node 12 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(12, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-09-25T16:17:57.512749Z node 12 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 65163, node 12 2025-09-25T16:17:57.523150Z node 12 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-09-25T16:17:57.523162Z node 12 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-09-25T16:17:57.523165Z node 12 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-09-25T16:17:57.523220Z node 12 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:5949 TClient is connected to server localhost:5949 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-09-25T16:17:57.601293Z node 12 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-09-25T16:17:57.659388Z node 12 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-09-25T16:17:57.946018Z node 12 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [12:7554061825664248149:2318], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:17:57.946127Z node 12 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [12:7554061825664248135:2314], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:17:57.946145Z node 12 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:17:57.947054Z node 12 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-09-25T16:17:57.947359Z node 12 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [12:7554061825664248192:2320], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:17:57.947376Z node 12 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:17:57.947615Z node 12 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [12:7554061825664248195:2321], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:17:57.947639Z node 12 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:17:57.949419Z node 12 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [12:7554061825664248165:2319], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2025-09-25T16:17:58.017834Z node 12 :TX_PROXY ERROR: schemereq.cpp:590: Actor# [12:7554061829959215516:2339] txid# 281474976715659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-09-25T16:17:58.028949Z node 12 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/ydb/ut/unittest >> TRegisterNodeOverDiscoveryService::ServerWithCertVerification_AuthNotRequired [GOOD] Test command err: 2025-09-25T16:17:44.239958Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7554061770870286218:2154];send_to=[0:7307199536658146131:7762515]; 2025-09-25T16:17:44.239989Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/endf/0041bf/r3tmp/tmp3TTLRg/pdisk_1.dat 2025-09-25T16:17:44.293025Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-09-25T16:17:44.316198Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 32165, node 1 2025-09-25T16:17:44.339966Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-09-25T16:17:44.339981Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-09-25T16:17:44.339983Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-09-25T16:17:44.340034Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-09-25T16:17:44.340703Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-09-25T16:17:44.340717Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-09-25T16:17:44.342328Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:15391 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-09-25T16:17:44.371132Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-09-25T16:17:44.408211Z node 1 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1859: Ticket 64F16AD092B72C9738B040B2C6EA483DCAEFC92188FD0148250D18A08C69016F (ipv6:[::1]:33436) has now permanent error message 'Cannot create token from certificate. Client certificate failed verification' 2025-09-25T16:17:44.408299Z node 1 :TICKET_PARSER ERROR: ticket_parser_impl.h:1008: Ticket 64F16AD092B72C9738B040B2C6EA483DCAEFC92188FD0148250D18A08C69016F: Cannot create token from certificate. Client certificate failed verification 2025-09-25T16:17:44.439380Z node 1 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1828: Ticket **** (B6C6F477) (ipv6:[::1]:33444) has now valid token of root@builtin 2025-09-25T16:17:44.491841Z node 1 :TICKET_PARSER TRACE: ticket_parser_impl.h:797: CanInitLoginToken, domain db /Root, request db /Root, token db , DomainLoginOnly 1 2025-09-25T16:17:44.491867Z node 1 :TICKET_PARSER TRACE: ticket_parser_impl.h:802: CanInitLoginToken, target database candidates(1): /Root 2025-09-25T16:17:44.491870Z node 1 :TICKET_PARSER TRACE: ticket_parser_impl.h:855: CanInitLoginToken, database /Root, A6 error 2025-09-25T16:17:44.491888Z node 1 :TICKET_PARSER ERROR: ticket_parser_impl.h:1008: Ticket **** (0C093832): Could not find correct token validator 2025-09-25T16:17:45.250053Z node 4 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7554061777045631041:2165];send_to=[0:7307199536658146131:7762515]; 2025-09-25T16:17:45.250087Z node 4 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/endf/0041bf/r3tmp/tmpFRP2Ek/pdisk_1.dat 2025-09-25T16:17:45.271301Z node 4 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-09-25T16:17:45.290790Z node 4 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 6866, node 4 2025-09-25T16:17:45.317263Z node 4 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-09-25T16:17:45.317280Z node 4 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-09-25T16:17:45.317283Z node 4 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-09-25T16:17:45.317347Z node 4 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:29395 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-09-25T16:17:45.350618Z node 4 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-09-25T16:17:45.350663Z node 4 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-09-25T16:17:45.353635Z node 4 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-09-25T16:17:45.366922Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-09-25T16:17:45.484690Z node 4 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1859: Ticket 64F16AD092B72C9738B040B2C6EA483DCAEFC92188FD0148250D18A08C69016F (ipv6:[::1]:58378) has now permanent error message 'Cannot create token from certificate. Client certificate failed verification' 2025-09-25T16:17:45.484782Z node 4 :TICKET_PARSER ERROR: ticket_parser_impl.h:1008: Ticket 64F16AD092B72C9738B040B2C6EA483DCAEFC92188FD0148250D18A08C69016F: Cannot create token from certificate. Client certificate failed verification 2025-09-25T16:17:45.513730Z node 4 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-09-25T16:17:45.537827Z node 4 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1828: Ticket **** (B6C6F477) (ipv6:[::1]:58386) has now valid token of root@builtin 2025-09-25T16:17:45.581334Z node 4 :TICKET_PARSER TRACE: ticket_parser_impl.h:797: CanInitLoginToken, domain db /Root, request db /Root, token db , DomainLoginOnly 1 2025-09-25T16:17:45.581353Z node 4 :TICKET_PARSER TRACE: ticket_parser_impl.h:802: CanInitLoginToken, target database candidates(1): /Root 2025-09-25T16:17:45.581355Z node 4 :TICKET_PARSER TRACE: ticket_parser_impl.h:855: CanInitLoginToken, database /Root, A6 error 2025-09-25T16:17:45.581371Z node 4 :TICKET_PARSER ERROR: ticket_parser_impl.h:1008: Ticket **** (0C093832): Could not find correct token validator 2025-09-25T16:17:46.406484Z node 7 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[7:7554061780815601268:2078];send_to=[0:7307199536658146131:7762515]; 2025-09-25T16:17:46.406517Z node 7 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/endf/0041bf/r3tmp/tmpLFBJas/pdisk_1.dat 2025-09-25T16:17:46.425901Z node 7 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-09-25T16:17:46.440983Z node 7 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 32741, node 7 2025-09-25T16:17:46.466233Z node 7 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-09-25T16:17:46.466244Z node 7 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-09-25T16:17:46.466245Z node 7 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-09-25T16:17:46.466296Z node 7 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:62459 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 ... empty maybe) 2025-09-25T16:17:52.349258Z node 19 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:4113 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-09-25T16:17:52.382215Z node 19 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-09-25T16:17:52.525179Z node 19 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-09-25T16:17:52.609284Z node 19 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1828: Ticket **** (B6C6F477) (ipv6:[::1]:33062) has now valid token of root@builtin 2025-09-25T16:17:52.748199Z node 19 :TICKET_PARSER TRACE: ticket_parser_impl.h:797: CanInitLoginToken, domain db /Root, request db /Root, token db , DomainLoginOnly 1 2025-09-25T16:17:52.748218Z node 19 :TICKET_PARSER TRACE: ticket_parser_impl.h:802: CanInitLoginToken, target database candidates(1): /Root 2025-09-25T16:17:52.748221Z node 19 :TICKET_PARSER TRACE: ticket_parser_impl.h:855: CanInitLoginToken, database /Root, A6 error 2025-09-25T16:17:52.748236Z node 19 :TICKET_PARSER ERROR: ticket_parser_impl.h:1008: Ticket **** (0C093832): Could not find correct token validator 2025-09-25T16:17:55.558163Z node 22 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[22:7554061817201458144:2260];send_to=[0:7307199536658146131:7762515]; 2025-09-25T16:17:55.558273Z node 22 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-09-25T16:17:55.570284Z node 22 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions test_client.cpp: SetPath # /home/runner/.ya/build/build_root/endf/0041bf/r3tmp/tmpw6LwvW/pdisk_1.dat 2025-09-25T16:17:55.671453Z node 22 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded 2025-09-25T16:17:55.692348Z node 22 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(22, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-09-25T16:17:55.692376Z node 22 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(22, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-09-25T16:17:55.695506Z node 22 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(22, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 13475, node 22 2025-09-25T16:17:55.769117Z node 22 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-09-25T16:17:55.769128Z node 22 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-09-25T16:17:55.769131Z node 22 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-09-25T16:17:55.769183Z node 22 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-09-25T16:17:55.794862Z node 22 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions TClient is connected to server localhost:20469 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-09-25T16:17:55.810711Z node 22 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-09-25T16:17:55.983893Z node 22 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1828: Ticket **** (B6C6F477) (ipv6:[::1]:37300) has now valid token of root@builtin 2025-09-25T16:17:56.139872Z node 22 :TICKET_PARSER TRACE: ticket_parser_impl.h:797: CanInitLoginToken, domain db /Root, request db /Root, token db , DomainLoginOnly 1 2025-09-25T16:17:56.139889Z node 22 :TICKET_PARSER TRACE: ticket_parser_impl.h:802: CanInitLoginToken, target database candidates(1): /Root 2025-09-25T16:17:56.139893Z node 22 :TICKET_PARSER TRACE: ticket_parser_impl.h:855: CanInitLoginToken, database /Root, A6 error 2025-09-25T16:17:56.139909Z node 22 :TICKET_PARSER ERROR: ticket_parser_impl.h:1008: Ticket **** (0C093832): Could not find correct token validator 2025-09-25T16:17:57.641412Z node 25 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[25:7554061829254060025:2147];send_to=[0:7307199536658146131:7762515]; 2025-09-25T16:17:57.641435Z node 25 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/endf/0041bf/r3tmp/tmpOGICO7/pdisk_1.dat 2025-09-25T16:17:57.652876Z node 25 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-09-25T16:17:57.673225Z node 25 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 24137, node 25 2025-09-25T16:17:57.721200Z node 25 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-09-25T16:17:57.721213Z node 25 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-09-25T16:17:57.721215Z node 25 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-09-25T16:17:57.721274Z node 25 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:10908 WaitRootIsUp 'Root'... TClient::Ls request: Root 2025-09-25T16:17:57.744767Z node 25 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(25, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-09-25T16:17:57.744801Z node 25 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(25, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-09-25T16:17:57.746411Z node 25 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(25, (0,0,0,0)) VolatileState: Connecting -> Connected TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-09-25T16:17:57.747292Z node 25 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-09-25T16:17:57.805447Z node 25 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-09-25T16:17:57.867627Z node 25 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1828: Ticket DE8B5FDC864CC8F285D019BA80B0453BCAE8519E0BAE27B30D074B061E5EFAA4 (ipv6:[::1]:36342) has now valid token of C=RU,ST=MSK,L=MSK,O=YA,OU=UtTest,CN=localhost@cert 2025-09-25T16:17:57.960770Z node 25 :TICKET_PARSER ERROR: ticket_parser_impl.h:1008: Ticket **** (717F937C): Unknown token 2025-09-25T16:17:58.029144Z node 25 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1859: Ticket 49F75EEBE4600CC339EDA1C17AE5071604C1841E9B61725F50332A28A59ADC14 (ipv6:[::1]:36376) has now permanent error message 'Cannot create token from certificate. Client certificate failed verification' 2025-09-25T16:17:58.029219Z node 25 :TICKET_PARSER ERROR: ticket_parser_impl.h:1008: Ticket 49F75EEBE4600CC339EDA1C17AE5071604C1841E9B61725F50332A28A59ADC14: Cannot create token from certificate. Client certificate failed verification >> KqpSplit::ChoosePartition+Ascending [GOOD] >> KqpSplit::ChoosePartition+Descending >> TYardTest::TestLogOwerwrite [GOOD] >> KqpScan::NoTruncate [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/scan/unittest >> KqpScan::GrepLimit [GOOD] Test command err: Trying to start YDB, gRPC: 9069, MsgBus: 17180 2025-09-25T16:17:53.846148Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:123: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2025-09-25T16:17:53.846613Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7554061810926293133:2256];send_to=[0:7307199536658146131:7762515]; 2025-09-25T16:17:53.850222Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/endf/004006/r3tmp/tmp40Cc8q/pdisk_1.dat 2025-09-25T16:17:54.048898Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1229: Notification cookie mismatch for subscription [1:7554061810926292901:2081] 1758817073829969 != 1758817073829972 2025-09-25T16:17:54.054316Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded 2025-09-25T16:17:54.060733Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-09-25T16:17:54.060769Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-09-25T16:17:54.061669Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-09-25T16:17:54.074644Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions TServer::EnableGrpc on GrpcPort 9069, node 1 2025-09-25T16:17:54.109380Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-09-25T16:17:54.109392Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-09-25T16:17:54.109394Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-09-25T16:17:54.109436Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:17180 TClient is connected to server localhost:17180 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-09-25T16:17:54.321145Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-09-25T16:17:54.330136Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-09-25T16:17:54.342284Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) waiting... 2025-09-25T16:17:54.402668Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) waiting... 2025-09-25T16:17:54.528034Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) waiting... 2025-09-25T16:17:54.567708Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) waiting... 2025-09-25T16:17:54.832986Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-09-25T16:17:55.064854Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7554061819516229160:2392], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:17:55.064878Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:17:55.065038Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7554061819516229169:2393], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:17:55.065044Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:17:55.176395Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:17:55.187357Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:17:55.199870Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:17:55.216431Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:17:55.228703Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:17:55.246407Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:17:55.279370Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:17:55.309843Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:17:55.411774Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7554061819516230034:2475], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:17:55.411798Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:17:55.411879Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7554061819516230039:2478], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:17:55.411890Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7554061819516230040:2479], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09 ... on [3:7554061828325355528:2081] 1758817077623483 != 1758817077623486 TServer::EnableGrpc on GrpcPort 9170, node 3 2025-09-25T16:17:57.661436Z node 3 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-09-25T16:17:57.661451Z node 3 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-09-25T16:17:57.661453Z node 3 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-09-25T16:17:57.661499Z node 3 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:19529 TClient is connected to server localhost:19529 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-09-25T16:17:57.738888Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-09-25T16:17:57.740068Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-09-25T16:17:57.769886Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) waiting... 2025-09-25T16:17:57.785322Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) waiting... 2025-09-25T16:17:57.811778Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) waiting... 2025-09-25T16:17:57.832981Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) waiting... 2025-09-25T16:17:57.889227Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-09-25T16:17:58.029134Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7554061832620324466:2391], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:17:58.029183Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:17:58.030872Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7554061832620324484:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:17:58.030939Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:17:58.039629Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:17:58.050293Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:17:58.062328Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:17:58.075873Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:17:58.090137Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:17:58.103871Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:17:58.118382Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:17:58.132326Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:17:58.153758Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7554061832620325340:2474], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:17:58.153786Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:17:58.153789Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7554061832620325345:2477], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:17:58.153816Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7554061832620325347:2478], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:17:58.153822Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:17:58.154456Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-09-25T16:17:58.159123Z node 3 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [3:7554061832620325349:2479], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715670 completed, doublechecking } 2025-09-25T16:17:58.225798Z node 3 :TX_PROXY ERROR: schemereq.cpp:590: Actor# [3:7554061832620325401:3556] txid# 281474976715671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 17], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-09-25T16:17:58.551715Z node 3 :KQP_RESOURCE_MANAGER WARN: kqp_snapshot_manager.cpp:238: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1758817078592, txId: 281474976715673] shutting down ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/scan/unittest >> KqpScan::StreamLookupByFullPk [GOOD] Test command err: Trying to start YDB, gRPC: 28869, MsgBus: 18876 2025-09-25T16:17:55.739863Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7554061819416496519:2211];send_to=[0:7307199536658146131:7762515]; 2025-09-25T16:17:55.739900Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-09-25T16:17:55.741248Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:123: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown test_client.cpp: SetPath # /home/runner/.ya/build/build_root/endf/003ff9/r3tmp/tmp0UxNQ0/pdisk_1.dat 2025-09-25T16:17:55.764797Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-09-25T16:17:55.764846Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-09-25T16:17:55.765962Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-09-25T16:17:55.781196Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1229: Notification cookie mismatch for subscription [1:7554061819416496344:2081] 1758817075737481 != 1758817075737484 2025-09-25T16:17:55.782053Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 28869, node 1 2025-09-25T16:17:55.795532Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-09-25T16:17:55.795543Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-09-25T16:17:55.795545Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-09-25T16:17:55.795588Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:18876 TClient is connected to server localhost:18876 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: 2025-09-25T16:17:55.863563Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-09-25T16:17:55.873047Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-09-25T16:17:55.877162Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-09-25T16:17:55.889620Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) waiting... 2025-09-25T16:17:55.921696Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) waiting... waiting... 2025-09-25T16:17:55.953729Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:17:55.978464Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) waiting... 2025-09-25T16:17:56.154863Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7554061823711465280:2391], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:17:56.154889Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:17:56.155053Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7554061823711465290:2392], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:17:56.155059Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:17:56.202686Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:17:56.212850Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:17:56.236336Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:17:56.249713Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:17:56.268344Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:17:56.283583Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:17:56.293805Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:17:56.308209Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:17:56.325467Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7554061823711466154:2474], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:17:56.325497Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:17:56.325539Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7554061823711466160:2478], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:17:56.325544Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7554061823711466159:2477], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:17:56.325547Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, st ... hId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. waiting... 2025-09-25T16:17:57.469394Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2025-09-25T16:17:57.472185Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-09-25T16:17:57.481099Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) waiting... 2025-09-25T16:17:57.508754Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) waiting... 2025-09-25T16:17:57.559948Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) waiting... 2025-09-25T16:17:57.578206Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) waiting... 2025-09-25T16:17:57.601574Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-09-25T16:17:57.840338Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7554061827070711937:2391], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:17:57.840443Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:17:57.846701Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7554061827070712026:2402], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:17:57.846741Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:17:57.846849Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7554061827070712028:2403], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:17:57.846855Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:17:57.847207Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:17:57.856758Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:17:57.867880Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:17:57.880648Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:17:57.900281Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:17:57.919210Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:17:57.937570Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:17:57.954516Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:17:57.974864Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7554061827070712814:2475], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:17:57.974889Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:17:57.974935Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7554061827070712819:2478], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:17:57.974942Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7554061827070712820:2479], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:17:57.975107Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:17:57.975882Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-09-25T16:17:57.984470Z node 2 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7554061827070712823:2480], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715670 completed, doublechecking } 2025-09-25T16:17:58.075843Z node 2 :TX_PROXY ERROR: schemereq.cpp:590: Actor# [2:7554061831365680171:3553] txid# 281474976715671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 17], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-09-25T16:17:58.290057Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-09-25T16:17:58.298428Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715673:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) waiting... 2025-09-25T16:17:58.358576Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715675:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) waiting... 2025-09-25T16:17:58.493010Z node 2 :KQP_RESOURCE_MANAGER WARN: kqp_snapshot_manager.cpp:238: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1758817078529, txId: 281474976715677] shutting down >> KqpScan::ScanRetryRead >> DataShardWrite::ExecSQLUpsertPrepared-EvWrite+Volatile [GOOD] >> DataShardWrite::ExecSQLUpsertPrepared+EvWrite+Volatile >> test.py::test[join-bush_in_in_in--ForceBlocks] [GOOD] >> test.py::test[join-bush_in_in_in--Results] >> KqpScan::EarlyFinish >> KqpScan::GrepNonKeyColumns [GOOD] >> DataShardWrite::RejectOnChangeQueueOverflow [GOOD] >> DataShardWrite::UpsertBrokenLockArbiter >> KqpPointConsolidation::ReadRanges [GOOD] >> KqpScan::AggregateByColumn >> DataShardWrite::UpsertNoLocksArbiter [GOOD] >> DataShardWrite::UpsertLostPrepareArbiter ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/scan/unittest >> KqpScan::NoTruncate [GOOD] Test command err: Trying to start YDB, gRPC: 11065, MsgBus: 11198 2025-09-25T16:17:55.245086Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7554061818871065359:2082];send_to=[0:7307199536658146131:7762515]; 2025-09-25T16:17:55.245113Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/endf/004000/r3tmp/tmp7wCQ5z/pdisk_1.dat 2025-09-25T16:17:55.300992Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions TServer::EnableGrpc on GrpcPort 11065, node 1 2025-09-25T16:17:55.314867Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded 2025-09-25T16:17:55.329004Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-09-25T16:17:55.329017Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-09-25T16:17:55.329019Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-09-25T16:17:55.329061Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-09-25T16:17:55.345465Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-09-25T16:17:55.345493Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-09-25T16:17:55.349158Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:11198 TClient is connected to server localhost:11198 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-09-25T16:17:55.433821Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-09-25T16:17:55.442193Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-09-25T16:17:55.463595Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-09-25T16:17:55.489990Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) waiting... waiting... 2025-09-25T16:17:55.530289Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) waiting... 2025-09-25T16:17:55.558061Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:17:55.577876Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) waiting... 2025-09-25T16:17:55.753273Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7554061818871066947:2391], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:17:55.753302Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:17:55.753490Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7554061818871066957:2392], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:17:55.753502Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:17:55.813474Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:17:55.820987Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:17:55.828217Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:17:55.835499Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:17:55.850384Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:17:55.865651Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:17:55.878523Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:17:55.937938Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:17:55.957951Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7554061818871067825:2474], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:17:55.957979Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:17:55.957981Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7554061818871067830:2477], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:17:55.958033Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7554061818871067832:2478], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:17:55.958041Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:17:55.959155Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemes ... ROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:15482 TClient is connected to server localhost:15482 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. waiting... 2025-09-25T16:17:58.298942Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2025-09-25T16:17:58.300450Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-09-25T16:17:58.305389Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) waiting... 2025-09-25T16:17:58.325790Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) waiting... 2025-09-25T16:17:58.355902Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) waiting... 2025-09-25T16:17:58.370139Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) waiting... 2025-09-25T16:17:58.445073Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-09-25T16:17:58.728208Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7554061833433507021:2391], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:17:58.728347Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:17:58.730872Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7554061833433507103:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:17:58.730895Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:17:58.732603Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:17:58.747201Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:17:58.758122Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:17:58.769848Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:17:58.784665Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:17:58.801763Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:17:58.817258Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:17:58.827904Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:17:58.847222Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7554061833433507895:2474], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:17:58.847266Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:17:58.847348Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7554061833433507900:2477], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:17:58.847362Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7554061833433507901:2478], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:17:58.847374Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:17:58.848280Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-09-25T16:17:58.851196Z node 3 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [3:7554061833433507904:2479], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2025-09-25T16:17:58.915616Z node 3 :TX_PROXY ERROR: schemereq.cpp:590: Actor# [3:7554061833433507956:3553] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 17], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-09-25T16:17:59.138043Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:17:59.190384Z node 3 :KQP_RESOURCE_MANAGER WARN: kqp_snapshot_manager.cpp:238: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1758817079229, txId: 281474976710675] shutting down 2025-09-25T16:17:59.209445Z node 3 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; >> DataShardWrite::DelayedVolatileTxAndEvWrite [GOOD] >> DataShardWrite::DistributedInsertReadSetWithoutLocks+Volatile ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/pdisk/ut/unittest >> TYardTest::TestLogOwerwrite [GOOD] Test command err: 2025-09-25T16:14:43.703670Z :BS_PDISK NOTICE: {BPD38@blobstorage_pdisk_impl.cpp:2857} OnDriveStartup Path# "" PDiskId# 1 2025-09-25T16:14:43.713300Z :BS_PDISK NOTICE: {BPD01@blobstorage_pdisk_impl.cpp:302} Shutdown OwnerInfo# { PDisk system/log ChunkIds: {} Free ChunkIds: {} PDiskId# 1 2025-09-25T16:14:43.717620Z :BS_PDISK WARN: {BSP01@blobstorage_pdisk_actor.cpp:436} Magic sector is present on disk, now going to format device PDiskId# 1 2025-09-25T16:14:43.852379Z :BS_PDISK NOTICE: {BPD38@blobstorage_pdisk_impl.cpp:2857} OnDriveStartup Path# "" PDiskId# 1 2025-09-25T16:14:43.852403Z :BS_PDISK WARN: {BSP01@blobstorage_pdisk_actor.cpp:374} Device formatting done PDiskId# 1 2025-09-25T16:14:43.871912Z :BS_PDISK NOTICE: {BSP01@blobstorage_pdisk_actor.cpp:581} Successfully read format record Format# {TDiskFormat Version: 3 DiskSize: 1658880000 bytes (1 GB) Guid: 0 MagicNextLogChunkReference: 2617087746192085064 MagicLogChunk: 5249236789394756428 MagicDataChunk: 17458274644603686744 MagicSysLogChunk: 14524879979807912101 MagicFormatChunk: 17332287817462050952 ChunkSize: 2097152 bytes (2 MB) SectorSize: 4096 SysLogSectorCount: 64 SystemChunkCount: 1 FormatText: "" DiskFormatSize: 1168 (current sizeof: 1168) TimestampUs: 1758816883770012 (2025-09-25T16:14:43.770012Z) FormatFlags: {ErasureEncodeSysLog | ErasureEncodeFormat | ErasureEncodeNextChunkReference | EncryptFormat | EncryptData}} PDiskId# 1 2025-09-25T16:14:43.876892Z :BS_PDISK NOTICE: {BPD01@blobstorage_pdisk_impl_log.cpp:252} SysLogRecord is read Record# {TSysLogRecord Version# 0 NonceSet# {TNonceSet Version# 0 NonceSysLog# 61 NonceLog# 1 NonceData# 1} LogHeadChunkIdx# 1 LogHeadChunkPreviousNonce# 0} PDiskId# 1 2025-09-25T16:14:43.884900Z :BS_PDISK NOTICE: {LR018@blobstorage_pdisk_logreader.cpp:809} PDiskId# 1 LogReader IsInitial# 1 ChunkIdx# 1 SectorIdx# 0 OffsetInSector# 0 In ProcessSectorSet got !restorator.GoodSectorFlags LastGoodToWriteLogPosition# { ChunkIdx# 1 OffsetInChunk# 0} PDiskId# 1 2025-09-25T16:14:43.885105Z :BS_PDISK NOTICE: {BPD01@blobstorage_pdisk_logreader.cpp:1176} Reply to owner OwnerId# 0 Result# {EvReadLogResult Status# OK ErrorReason# "" position# { ChunkIdx# 0 OffsetInChunk# 0} nextPosition# { ChunkIdx# 1 OffsetInChunk# 0} isEndOfLog# true StatusFlags# IsValid | DiskSpaceCyan | DiskSpaceLightYellowMove | DiskSpaceYellowStop | DiskSpaceLightOrange | DiskSpacePreOrange | DiskSpaceOrange | DiskSpaceRed | DiskSpaceBlack Results.size# 0} PDiskId# 1 2025-09-25T16:14:43.889129Z :BS_PDISK NOTICE: {BPD01@blobstorage_pdisk_impl_log.cpp:1732} PDisk have successfully started PDiskId# 1 2025-09-25T16:14:43.900979Z :BS_PDISK NOTICE: {BPD02@blobstorage_pdisk_impl.cpp:2055} New owner is created ownerId# 3 vDiskId# [0:_:0:0:0] FirstNonceToKeep# 1145172 CutLogId# [0:0:0] ownerRound# 2 PDiskId# 1 2025-09-25T16:14:43.959612Z :BS_PDISK NOTICE: {BPD38@blobstorage_pdisk_impl.cpp:2857} OnDriveStartup Path# "" PDiskId# 1 2025-09-25T16:14:43.988961Z :BS_PDISK NOTICE: {BSP01@blobstorage_pdisk_actor.cpp:581} Successfully read format record Format# {TDiskFormat Version: 3 DiskSize: 1658880000 bytes (1 GB) Guid: 0 MagicNextLogChunkReference: 2617087746192085064 MagicLogChunk: 5249236789394756428 MagicDataChunk: 17458274644603686744 MagicSysLogChunk: 14524879979807912101 MagicFormatChunk: 17332287817462050952 ChunkSize: 2097152 bytes (2 MB) SectorSize: 4096 SysLogSectorCount: 64 SystemChunkCount: 1 FormatText: "" DiskFormatSize: 1168 (current sizeof: 1168) TimestampUs: 1758816883770012 (2025-09-25T16:14:43.770012Z) FormatFlags: {ErasureEncodeSysLog | ErasureEncodeFormat | ErasureEncodeNextChunkReference | EncryptFormat | EncryptData}} PDiskId# 1 2025-09-25T16:14:43.993284Z :BS_PDISK NOTICE: {BPD01@blobstorage_pdisk_impl_log.cpp:252} SysLogRecord is read Record# {TSysLogRecord Version# 8 NonceSet# {TNonceSet Version# 0 NonceSysLog# 1186675 NonceLog# 1145172 NonceData# 1694964} LogHeadChunkIdx# 1 LogHeadChunkPreviousNonce# 0 Owner[3]# [0:4294967295:0:0:0]} PDiskId# 1 2025-09-25T16:14:43.999641Z :BS_PDISK WARN: {LR016@blobstorage_pdisk_logreader.cpp:710} PDiskId# 1 LogReader IsInitial# 1 ChunkIdx# 1 SectorIdx# 0 OffsetInSector# 316 nonce jump2 IsEndOfSplice# false " replacing ChunkInfo->DesiredPrevChunkLastNonce# "# 0 " with nonceJumpLogPageHeader2->PreviousNonce# "# 0 PDiskId# 1 2025-09-25T16:14:43.999669Z :BS_PDISK NOTICE: {LR018@blobstorage_pdisk_logreader.cpp:809} PDiskId# 1 LogReader IsInitial# 1 ChunkIdx# 1 SectorIdx# 1 OffsetInSector# 0 In ProcessSectorSet got !restorator.GoodSectorFlags LastGoodToWriteLogPosition# { ChunkIdx# 1 OffsetInChunk# 4096} PDiskId# 1 2025-09-25T16:14:43.999689Z :BS_PDISK NOTICE: {BPD01@blobstorage_pdisk_logreader.cpp:1176} Reply to owner OwnerId# 0 Result# {EvReadLogResult Status# OK ErrorReason# "" position# { ChunkIdx# 0 OffsetInChunk# 0} nextPosition# { ChunkIdx# 1 OffsetInChunk# 4096} isEndOfLog# true StatusFlags# IsValid | DiskSpaceCyan | DiskSpaceLightYellowMove | DiskSpaceYellowStop | DiskSpaceLightOrange | DiskSpacePreOrange | DiskSpaceOrange | DiskSpaceRed | DiskSpaceBlack Results.size# 0} PDiskId# 1 2025-09-25T16:14:44.000093Z :BS_PDISK NOTICE: {BPD01@blobstorage_pdisk_impl_log.cpp:1732} PDisk have successfully started PDiskId# 1 2025-09-25T16:14:44.061034Z :BS_PDISK NOTICE: {BPD30@blobstorage_pdisk_impl.cpp:1930} Registered known VDisk VDisk# [0:4294967295:0:0:0] OwnerId# 3 OwnerRound# 2 GroupSizeInUnits# 0 PDiskId# 1 2025-09-25T16:14:44.090959Z :BS_PDISK NOTICE: {BPD38@blobstorage_pdisk_impl.cpp:2857} OnDriveStartup Path# "" PDiskId# 1 2025-09-25T16:14:44.118694Z :BS_PDISK NOTICE: {BSP01@blobstorage_pdisk_actor.cpp:581} Successfully read format record Format# {TDiskFormat Version: 3 DiskSize: 1658880000 bytes (1 GB) Guid: 0 MagicNextLogChunkReference: 2617087746192085064 MagicLogChunk: 5249236789394756428 MagicDataChunk: 17458274644603686744 MagicSysLogChunk: 14524879979807912101 MagicFormatChunk: 17332287817462050952 ChunkSize: 2097152 bytes (2 MB) SectorSize: 4096 SysLogSectorCount: 64 SystemChunkCount: 1 FormatText: "" DiskFormatSize: 1168 (current sizeof: 1168) TimestampUs: 1758816883770012 (2025-09-25T16:14:43.770012Z) FormatFlags: {ErasureEncodeSysLog | ErasureEncodeFormat | ErasureEncodeNextChunkReference | EncryptFormat | EncryptData}} PDiskId# 1 2025-09-25T16:14:44.124975Z :BS_PDISK NOTICE: {BPD01@blobstorage_pdisk_impl_log.cpp:252} SysLogRecord is read Record# {TSysLogRecord Version# 8 NonceSet# {TNonceSet Version# 0 NonceSysLog# 2250478 NonceLog# 2226631 NonceData# 3261806} LogHeadChunkIdx# 1 LogHeadChunkPreviousNonce# 0 Owner[3]# [0:4294967295:0:0:0]} PDiskId# 1 2025-09-25T16:14:44.128807Z :BS_PDISK WARN: {LR016@blobstorage_pdisk_logreader.cpp:710} PDiskId# 1 LogReader IsInitial# 1 ChunkIdx# 1 SectorIdx# 0 OffsetInSector# 316 nonce jump2 IsEndOfSplice# false " replacing ChunkInfo->DesiredPrevChunkLastNonce# "# 0 " with nonceJumpLogPageHeader2->PreviousNonce# "# 0 PDiskId# 1 2025-09-25T16:14:44.128842Z :BS_PDISK NOTICE: {LR018@blobstorage_pdisk_logreader.cpp:809} PDiskId# 1 LogReader IsInitial# 1 ChunkIdx# 1 SectorIdx# 2 OffsetInSector# 0 In ProcessSectorSet got !restorator.GoodSectorFlags LastGoodToWriteLogPosition# { ChunkIdx# 1 OffsetInChunk# 8192} PDiskId# 1 2025-09-25T16:14:44.128864Z :BS_PDISK NOTICE: {BPD01@blobstorage_pdisk_logreader.cpp:1176} Reply to owner OwnerId# 0 Result# {EvReadLogResult Status# OK ErrorReason# "" position# { ChunkIdx# 0 OffsetInChunk# 0} nextPosition# { ChunkIdx# 1 OffsetInChunk# 8192} isEndOfLog# true StatusFlags# IsValid | DiskSpaceCyan | DiskSpaceLightYellowMove | DiskSpaceYellowStop | DiskSpaceLightOrange | DiskSpacePreOrange | DiskSpaceOrange | DiskSpaceRed | DiskSpaceBlack Results.size# 0} PDiskId# 1 2025-09-25T16:14:44.130394Z :BS_PDISK NOTICE: {BPD01@blobstorage_pdisk_impl_log.cpp:1732} PDisk have successfully started PDiskId# 1 2025-09-25T16:14:44.262628Z :BS_PDISK NOTICE: {BPD38@blobstorage_pdisk_impl.cpp:2857} OnDriveStartup Path# "" PDiskId# 1 2025-09-25T16:14:44.291610Z :BS_PDISK NOTICE: {BPD01@blobstorage_pdisk_impl.cpp:302} Shutdown OwnerInfo# { PDisk system/log ChunkIds: {} Free ChunkIds: {} PDiskId# 1 2025-09-25T16:14:44.304935Z :BS_PDISK WARN: {BSP01@blobstorage_pdisk_actor.cpp:436} Magic sector is present on disk, now going to format device PDiskId# 1 2025-09-25T16:14:44.448594Z :BS_PDISK NOTICE: {BPD38@blobstorage_pdisk_impl.cpp:2857} OnDriveStartup Path# "" PDiskId# 1 2025-09-25T16:14:44.448620Z :BS_PDISK WARN: {BSP01@blobstorage_pdisk_actor.cpp:374} Device formatting done PDiskId# 1 2025-09-25T16:14:44.474965Z :BS_PDISK NOTICE: {BSP01@blobstorage_pdisk_actor.cpp:581} Successfully read format record Format# {TDiskFormat Version: 3 DiskSize: 1658880000 bytes (1 GB) Guid: 0 MagicNextLogChunkReference: 15536143411760147693 MagicLogChunk: 11655661665833248233 MagicDataChunk: 7636224432948302855 MagicSysLogChunk: 14175108390659494928 MagicFormatChunk: 17332287817462050952 ChunkSize: 2097152 bytes (2 MB) SectorSize: 4096 SysLogSectorCount: 64 SystemChunkCount: 1 FormatText: "" DiskFormatSize: 1168 (current sizeof: 1168) TimestampUs: 1758816884353281 (2025-09-25T16:14:44.353281Z) FormatFlags: {ErasureEncodeSysLog | ErasureEncodeFormat | ErasureEncodeNextChunkReference | EncryptFormat | EncryptData}} PDiskId# 1 2025-09-25T16:14:44.482927Z :BS_PDISK NOTICE: {BPD01@blobstorage_pdisk_impl_log.cpp:252} SysLogRecord is read Record# {TSysLogRecord Version# 0 NonceSet# {TNonceSet Version# 0 NonceSysLog# 61 NonceLog# 1 NonceData# 1} LogHeadChunkIdx# 1 LogHeadChunkPreviousNonce# 0} PDiskId# 1 2025-09-25T16:14:44.522529Z :BS_PDISK NOTICE: {BPD38@blobstorage_pdisk_impl.cpp:2857} OnDriveStartup Path# "" PDiskId# 1 2025-09-25T16:14:44.548925Z :BS_PDISK NOTICE: {BSP01@blobstorage_pdisk_actor.cpp:581} Successfully read format record Format# {TDiskFormat Version: 3 DiskSize: 1658880000 bytes (1 GB) Guid: 0 MagicNextLogChunkReference: 15536143411760147693 MagicLogChunk: 11655661665833248233 MagicDataChunk: 7636224432948302855 MagicSysLogChunk: 14175108390659494928 MagicFormatChunk: 17332287817462050952 ChunkSize: 2097152 bytes (2 MB) SectorSize: 4096 SysLogSectorCount: 64 SystemChunkCount: 1 FormatText: "" DiskFormatSize: 1168 (current sizeof: 1168) TimestampUs: 1758816884353281 (2025-09-25T16:14:44.353281Z) FormatFlags: {ErasureEncodeSysLog | ErasureEncodeFormat | ErasureEncodeNextChunkReference | EncryptFormat | EncryptData}} PDiskId# 1 2025-09-25T16:14:44.560925Z :BS_PDISK NOTICE: {BPD01@blobstorage_pdisk_impl_log.cpp:252} SysLogRecord is read Record# {TSysLogRecord Version# 8 NonceSet# {TNonceSet Version# 0 NonceSysLog# 1713200 NonceLog# 1241036 NonceData# 1633688} LogHeadChunkIdx# 1 LogHeadChunkPreviousNonce# 0 Owner[3]# [0:4294967295:0:0:0]} PDiskId# 1 2025-09-25T16:14:44.572894Z :BS_PDISK WARN: {LR016@blobstorage_pdisk_logreader.cpp:710} PDiskId# 1 LogReader IsInitial# 1 ChunkIdx# 1 SectorIdx# 0 OffsetInSector# 316 nonce jump2 IsEndOfSplice# false " replacing ChunkInfo->DesiredPrevChunkLastNonce# "# 0 " with nonceJumpLogPageHeader2->PreviousNonce# "# 0 PDiskId# 1 2025-09-25T16:14:44.572936Z :BS_PDISK NOTICE: {LR018@blobstorage_pdisk_logreader.cpp:809} PDiskId# 1 LogReader IsInitial# 1 ChunkIdx# 1 SectorIdx# 1 OffsetInSector# 0 In ProcessSectorSet got !restorator.GoodSectorFlags LastGoodToWriteLogPosition# { ChunkIdx# 1 OffsetInChunk# 4096} PDiskId# 1 2025-09-25T16:14:44.572960Z :BS_PDISK NOTICE: {BPD01@blobstorage_pdisk_logreader.cpp:1176} Reply to owner OwnerId# 0 Result# {EvReadLogResult Status# OK ErrorReason# "" position# { ChunkIdx# 0 OffsetInChunk# 0} nextPosition# { ChunkIdx# 1 OffsetInChunk# 4096} isEndOfLog# true StatusFlags# IsValid | DiskSpaceCyan | DiskSpaceLightYellowMove | DiskSpaceYellowStop | DiskSpaceLightOrange | DiskSpacePreOrange | DiskSpaceOrange | DiskSpaceRed | DiskSpaceBlack Results.size# 0} PDiskId# 1 2025-09-25T16:14:44.573376Z :BS_PDISK NOTICE: {BPD01@blobstorage_pdisk_impl_log.cpp:1732} PDisk have successfully started PDiskId# 1 2025-09-25T16:14:44.646272Z :BS_PDISK NOTICE: {BPD38@blobstorage ... 1@blobstorage_pdisk_actor.cpp:581} Successfully read format record Format# {TDiskFormat Version: 3 DiskSize: 1658880000 bytes (1 GB) Guid: 11628108020155661544 MagicNextLogChunkReference: 11821073586475933578 MagicLogChunk: 17846099668701598015 MagicDataChunk: 595237158825080579 MagicSysLogChunk: 4352052559022631811 MagicFormatChunk: 17332287817462050952 ChunkSize: 2097152 bytes (2 MB) SectorSize: 4096 SysLogSectorCount: 64 SystemChunkCount: 1 FormatText: "Info" DiskFormatSize: 1168 (current sizeof: 1168) TimestampUs: 1758817065638260 (2025-09-25T16:17:45.638260Z) FormatFlags: {ErasureEncodeSysLog | ErasureEncodeFormat | ErasureEncodeNextChunkReference | EncryptFormat | EncryptData}} PDiskId# 1 2025-09-25T16:17:58.565934Z :BS_PDISK NOTICE: {BPD01@blobstorage_pdisk_impl_log.cpp:252} SysLogRecord is read Record# {TSysLogRecord Version# 8 NonceSet# {TNonceSet Version# 0 NonceSysLog# 113552122 NonceLog# 115798063 NonceData# 115585523} LogHeadChunkIdx# 26 LogHeadChunkPreviousNonce# 1725906 Owner[3]# [0:4294967295:0:0:0]} PDiskId# 1 2025-09-25T16:17:58.573071Z :BS_PDISK WARN: {BPD01@blobstorage_pdisk_sectorrestorator.cpp:86} Sector nonce reordering OwnerId# 0 IsErasureEncode# false ErasureDataParts# 4 Sector# 0 ReadNonce# 1713235 LastNonce# 115798067 MaxNonce# 0 sectorOffset# 2416640 PDiskId# 1 2025-09-25T16:17:58.573102Z :BS_PDISK NOTICE: {LR018@blobstorage_pdisk_logreader.cpp:809} PDiskId# 1 LogReader IsInitial# 1 ChunkIdx# 1 SectorIdx# 78 OffsetInSector# 0 In ProcessSectorSet got !restorator.GoodSectorFlags LastGoodToWriteLogPosition# { ChunkIdx# 1 OffsetInChunk# 319488} PDiskId# 1 2025-09-25T16:17:58.573130Z :BS_PDISK NOTICE: {BPD01@blobstorage_pdisk_logreader.cpp:1176} Reply to owner OwnerId# 0 Result# {EvReadLogResult Status# OK ErrorReason# "" position# { ChunkIdx# 0 OffsetInChunk# 0} nextPosition# { ChunkIdx# 1 OffsetInChunk# 319488} isEndOfLog# true StatusFlags# IsValid | DiskSpaceCyan | DiskSpaceLightYellowMove | DiskSpaceYellowStop | DiskSpaceLightOrange | DiskSpacePreOrange | DiskSpaceOrange | DiskSpaceRed | DiskSpaceBlack Results.size# 0} PDiskId# 1 2025-09-25T16:17:58.573520Z :BS_PDISK NOTICE: {BPD01@blobstorage_pdisk_impl_log.cpp:1732} PDisk have successfully started PDiskId# 1 2025-09-25T16:17:58.695743Z :BS_PDISK NOTICE: {BPD38@blobstorage_pdisk_impl.cpp:2857} OnDriveStartup Path# "" PDiskId# 1 2025-09-25T16:17:58.696123Z :BS_PDISK NOTICE: {BSP01@blobstorage_pdisk_actor.cpp:581} Successfully read format record Format# {TDiskFormat Version: 3 DiskSize: 1658880000 bytes (1 GB) Guid: 11628108020155661544 MagicNextLogChunkReference: 11821073586475933578 MagicLogChunk: 17846099668701598015 MagicDataChunk: 595237158825080579 MagicSysLogChunk: 4352052559022631811 MagicFormatChunk: 17332287817462050952 ChunkSize: 2097152 bytes (2 MB) SectorSize: 4096 SysLogSectorCount: 64 SystemChunkCount: 1 FormatText: "Info" DiskFormatSize: 1168 (current sizeof: 1168) TimestampUs: 1758817065638260 (2025-09-25T16:17:45.638260Z) FormatFlags: {ErasureEncodeSysLog | ErasureEncodeFormat | ErasureEncodeNextChunkReference | EncryptFormat | EncryptData}} PDiskId# 1 2025-09-25T16:17:58.697746Z :BS_PDISK NOTICE: {BPD01@blobstorage_pdisk_impl_log.cpp:252} SysLogRecord is read Record# {TSysLogRecord Version# 8 NonceSet# {TNonceSet Version# 0 NonceSysLog# 115231422 NonceLog# 116983163 NonceData# 116905514} LogHeadChunkIdx# 26 LogHeadChunkPreviousNonce# 1725906 Owner[3]# [0:4294967295:0:0:0]} PDiskId# 1 2025-09-25T16:17:58.701086Z :BS_PDISK WARN: {BPD01@blobstorage_pdisk_sectorrestorator.cpp:86} Sector nonce reordering OwnerId# 0 IsErasureEncode# false ErasureDataParts# 4 Sector# 0 ReadNonce# 1713241 LastNonce# 116983167 MaxNonce# 0 sectorOffset# 2441216 PDiskId# 1 2025-09-25T16:17:58.701158Z :BS_PDISK NOTICE: {LR018@blobstorage_pdisk_logreader.cpp:809} PDiskId# 1 LogReader IsInitial# 1 ChunkIdx# 1 SectorIdx# 84 OffsetInSector# 0 In ProcessSectorSet got !restorator.GoodSectorFlags LastGoodToWriteLogPosition# { ChunkIdx# 1 OffsetInChunk# 344064} PDiskId# 1 2025-09-25T16:17:58.701188Z :BS_PDISK NOTICE: {BPD01@blobstorage_pdisk_logreader.cpp:1176} Reply to owner OwnerId# 0 Result# {EvReadLogResult Status# OK ErrorReason# "" position# { ChunkIdx# 0 OffsetInChunk# 0} nextPosition# { ChunkIdx# 1 OffsetInChunk# 344064} isEndOfLog# true StatusFlags# IsValid | DiskSpaceCyan | DiskSpaceLightYellowMove | DiskSpaceYellowStop | DiskSpaceLightOrange | DiskSpacePreOrange | DiskSpaceOrange | DiskSpaceRed | DiskSpaceBlack Results.size# 0} PDiskId# 1 2025-09-25T16:17:58.702745Z :BS_PDISK NOTICE: {BPD01@blobstorage_pdisk_impl_log.cpp:1732} PDisk have successfully started PDiskId# 1 2025-09-25T16:17:58.797125Z :BS_PDISK NOTICE: {BPD30@blobstorage_pdisk_impl.cpp:1930} Registered known VDisk VDisk# [0:4294967295:0:0:0] OwnerId# 3 OwnerRound# 2 GroupSizeInUnits# 0 PDiskId# 1 2025-09-25T16:17:58.847006Z :BS_PDISK NOTICE: {BPD38@blobstorage_pdisk_impl.cpp:2857} OnDriveStartup Path# "" PDiskId# 1 2025-09-25T16:17:58.847232Z :BS_PDISK NOTICE: {BSP01@blobstorage_pdisk_actor.cpp:581} Successfully read format record Format# {TDiskFormat Version: 3 DiskSize: 1658880000 bytes (1 GB) Guid: 11628108020155661544 MagicNextLogChunkReference: 11821073586475933578 MagicLogChunk: 17846099668701598015 MagicDataChunk: 595237158825080579 MagicSysLogChunk: 4352052559022631811 MagicFormatChunk: 17332287817462050952 ChunkSize: 2097152 bytes (2 MB) SectorSize: 4096 SysLogSectorCount: 64 SystemChunkCount: 1 FormatText: "Info" DiskFormatSize: 1168 (current sizeof: 1168) TimestampUs: 1758817065638260 (2025-09-25T16:17:45.638260Z) FormatFlags: {ErasureEncodeSysLog | ErasureEncodeFormat | ErasureEncodeNextChunkReference | EncryptFormat | EncryptData}} PDiskId# 1 2025-09-25T16:17:58.848326Z :BS_PDISK NOTICE: {BPD01@blobstorage_pdisk_impl_log.cpp:252} SysLogRecord is read Record# {TSysLogRecord Version# 8 NonceSet# {TNonceSet Version# 0 NonceSysLog# 116966032 NonceLog# 118434817 NonceData# 118118917} LogHeadChunkIdx# 26 LogHeadChunkPreviousNonce# 1725906 Owner[3]# [0:4294967295:0:0:0]} PDiskId# 1 2025-09-25T16:17:58.850742Z :BS_PDISK WARN: {BPD01@blobstorage_pdisk_sectorrestorator.cpp:86} Sector nonce reordering OwnerId# 0 IsErasureEncode# false ErasureDataParts# 4 Sector# 0 ReadNonce# 1713247 LastNonce# 118434821 MaxNonce# 0 sectorOffset# 2465792 PDiskId# 1 2025-09-25T16:17:58.850780Z :BS_PDISK NOTICE: {LR018@blobstorage_pdisk_logreader.cpp:809} PDiskId# 1 LogReader IsInitial# 1 ChunkIdx# 1 SectorIdx# 90 OffsetInSector# 0 In ProcessSectorSet got !restorator.GoodSectorFlags LastGoodToWriteLogPosition# { ChunkIdx# 1 OffsetInChunk# 368640} PDiskId# 1 2025-09-25T16:17:58.850805Z :BS_PDISK NOTICE: {BPD01@blobstorage_pdisk_logreader.cpp:1176} Reply to owner OwnerId# 0 Result# {EvReadLogResult Status# OK ErrorReason# "" position# { ChunkIdx# 0 OffsetInChunk# 0} nextPosition# { ChunkIdx# 1 OffsetInChunk# 368640} isEndOfLog# true StatusFlags# IsValid | DiskSpaceCyan | DiskSpaceLightYellowMove | DiskSpaceYellowStop | DiskSpaceLightOrange | DiskSpacePreOrange | DiskSpaceOrange | DiskSpaceRed | DiskSpaceBlack Results.size# 0} PDiskId# 1 2025-09-25T16:17:58.851136Z :BS_PDISK NOTICE: {BPD01@blobstorage_pdisk_impl_log.cpp:1732} PDisk have successfully started PDiskId# 1 2025-09-25T16:17:58.945916Z :BS_PDISK NOTICE: {BPD30@blobstorage_pdisk_impl.cpp:1930} Registered known VDisk VDisk# [0:4294967295:0:0:0] OwnerId# 3 OwnerRound# 2 GroupSizeInUnits# 0 PDiskId# 1 2025-09-25T16:17:59.136378Z :BS_PDISK NOTICE: {BPD38@blobstorage_pdisk_impl.cpp:2857} OnDriveStartup Path# "" PDiskId# 1 2025-09-25T16:17:59.136590Z :BS_PDISK NOTICE: {BSP01@blobstorage_pdisk_actor.cpp:581} Successfully read format record Format# {TDiskFormat Version: 3 DiskSize: 1658880000 bytes (1 GB) Guid: 6549208951486792753 MagicNextLogChunkReference: 1192938544992682601 MagicLogChunk: 16164297392005908824 MagicDataChunk: 3338497678556316968 MagicSysLogChunk: 12046362733182431048 MagicFormatChunk: 17332287817462050952 ChunkSize: 2097152 bytes (2 MB) SectorSize: 4096 SysLogSectorCount: 64 SystemChunkCount: 1 FormatText: "Info" DiskFormatSize: 1168 (current sizeof: 1168) TimestampUs: 1758817079108906 (2025-09-25T16:17:59.108906Z) FormatFlags: {ErasureEncodeSysLog | ErasureEncodeFormat | ErasureEncodeNextChunkReference | EncryptFormat | EncryptData}} PDiskId# 1 2025-09-25T16:17:59.137644Z :BS_PDISK NOTICE: {BPD01@blobstorage_pdisk_impl_log.cpp:252} SysLogRecord is read Record# {TSysLogRecord Version# 0 NonceSet# {TNonceSet Version# 0 NonceSysLog# 61 NonceLog# 1 NonceData# 1} LogHeadChunkIdx# 1 LogHeadChunkPreviousNonce# 0} PDiskId# 1 2025-09-25T16:17:59.138139Z :BS_PDISK NOTICE: {LR018@blobstorage_pdisk_logreader.cpp:809} PDiskId# 1 LogReader IsInitial# 1 ChunkIdx# 1 SectorIdx# 0 OffsetInSector# 0 In ProcessSectorSet got !restorator.GoodSectorFlags LastGoodToWriteLogPosition# { ChunkIdx# 1 OffsetInChunk# 0} PDiskId# 1 2025-09-25T16:17:59.138160Z :BS_PDISK NOTICE: {BPD01@blobstorage_pdisk_logreader.cpp:1176} Reply to owner OwnerId# 0 Result# {EvReadLogResult Status# OK ErrorReason# "" position# { ChunkIdx# 0 OffsetInChunk# 0} nextPosition# { ChunkIdx# 1 OffsetInChunk# 0} isEndOfLog# true StatusFlags# IsValid | DiskSpaceCyan | DiskSpaceLightYellowMove | DiskSpaceYellowStop | DiskSpaceLightOrange | DiskSpacePreOrange | DiskSpaceOrange | DiskSpaceRed | DiskSpaceBlack Results.size# 0} PDiskId# 1 2025-09-25T16:17:59.138408Z :BS_PDISK NOTICE: {BPD01@blobstorage_pdisk_impl_log.cpp:1732} PDisk have successfully started PDiskId# 1 2025-09-25T16:17:59.235776Z :BS_PDISK NOTICE: {BPD02@blobstorage_pdisk_impl.cpp:2055} New owner is created ownerId# 3 vDiskId# [0:_:0:0:0] FirstNonceToKeep# 1864792 CutLogId# [0:0:0] ownerRound# 2 PDiskId# 1 2025-09-25T16:17:59.278627Z :BS_PDISK NOTICE: {BPD38@blobstorage_pdisk_impl.cpp:2857} OnDriveStartup Path# "" PDiskId# 1 2025-09-25T16:17:59.278910Z :BS_PDISK NOTICE: {BSP01@blobstorage_pdisk_actor.cpp:581} Successfully read format record Format# {TDiskFormat Version: 3 DiskSize: 1658880000 bytes (1 GB) Guid: 6549208951486792753 MagicNextLogChunkReference: 1192938544992682601 MagicLogChunk: 16164297392005908824 MagicDataChunk: 3338497678556316968 MagicSysLogChunk: 12046362733182431048 MagicFormatChunk: 17332287817462050952 ChunkSize: 2097152 bytes (2 MB) SectorSize: 4096 SysLogSectorCount: 64 SystemChunkCount: 1 FormatText: "Info" DiskFormatSize: 1168 (current sizeof: 1168) TimestampUs: 1758817079108906 (2025-09-25T16:17:59.108906Z) FormatFlags: {ErasureEncodeSysLog | ErasureEncodeFormat | ErasureEncodeNextChunkReference | EncryptFormat | EncryptData}} PDiskId# 1 2025-09-25T16:17:59.281180Z :BS_PDISK NOTICE: {BPD01@blobstorage_pdisk_impl_log.cpp:252} SysLogRecord is read Record# {TSysLogRecord Version# 8 NonceSet# {TNonceSet Version# 0 NonceSysLog# 1428566 NonceLog# 1868397 NonceData# 1742066} LogHeadChunkIdx# 6 LogHeadChunkPreviousNonce# 1867340 Owner[3]# [0:4294967295:0:0:0]} PDiskId# 1 2025-09-25T16:17:59.286365Z :BS_PDISK NOTICE: {LR018@blobstorage_pdisk_logreader.cpp:809} PDiskId# 1 LogReader IsInitial# 1 ChunkIdx# 8 SectorIdx# 36 OffsetInSector# 0 In ProcessSectorSet got !restorator.GoodSectorFlags LastGoodToWriteLogPosition# { ChunkIdx# 8 OffsetInChunk# 147456} PDiskId# 1 2025-09-25T16:17:59.286402Z :BS_PDISK NOTICE: {BPD01@blobstorage_pdisk_logreader.cpp:1176} Reply to owner OwnerId# 0 Result# {EvReadLogResult Status# OK ErrorReason# "" position# { ChunkIdx# 0 OffsetInChunk# 0} nextPosition# { ChunkIdx# 8 OffsetInChunk# 147456} isEndOfLog# true StatusFlags# IsValid | DiskSpaceCyan | DiskSpaceLightYellowMove | DiskSpaceYellowStop | DiskSpaceLightOrange | DiskSpacePreOrange | DiskSpaceOrange | DiskSpaceRed | DiskSpaceBlack Results.size# 0} PDiskId# 1 2025-09-25T16:17:59.286914Z :BS_PDISK NOTICE: {BPD01@blobstorage_pdisk_impl_log.cpp:1732} PDisk have successfully started PDiskId# 1 2025-09-25T16:17:59.377251Z :BS_PDISK NOTICE: {BPD30@blobstorage_pdisk_impl.cpp:1930} Registered known VDisk VDisk# [0:4294967295:0:0:0] OwnerId# 3 OwnerRound# 2 GroupSizeInUnits# 0 PDiskId# 1 >> KqpScan::JoinWithParams [GOOD] >> KqpScan::LMapFunction >> KqpScan::AggregateEmptyCountStar [GOOD] >> KqpScan::AggregateEmptySum >> KqpScan::Join3 [GOOD] >> KqpScan::Join3TablesNoRemap >> KqpScan::Limit >> DataShardWrite::WriteCommitVersion [GOOD] >> DataShardWrite::WriteUniqueRowsInsertDuplicateBeforeCommit >> KqpSplit::ChoosePartition+Descending [GOOD] >> KqpScan::UnionBasic >> test.py::test[pg-tpch-q14-default.txt-ForceBlocks] [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/scan/unittest >> KqpScan::GrepNonKeyColumns [GOOD] Test command err: Trying to start YDB, gRPC: 22026, MsgBus: 26484 2025-09-25T16:17:53.946518Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7554061809622769826:2224];send_to=[0:7307199536658146131:7762515]; 2025-09-25T16:17:53.946802Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-09-25T16:17:53.983038Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:123: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown test_client.cpp: SetPath # /home/runner/.ya/build/build_root/endf/004064/r3tmp/tmpmucNb5/pdisk_1.dat 2025-09-25T16:17:54.449097Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-09-25T16:17:54.449122Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-09-25T16:17:54.457428Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-09-25T16:17:54.514570Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-09-25T16:17:54.536050Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded 2025-09-25T16:17:54.539066Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1229: Notification cookie mismatch for subscription [1:7554061809622769639:2081] 1758817073915892 != 1758817073915895 TServer::EnableGrpc on GrpcPort 22026, node 1 2025-09-25T16:17:54.673352Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-09-25T16:17:54.673366Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-09-25T16:17:54.673369Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-09-25T16:17:54.673416Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-09-25T16:17:54.808771Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-09-25T16:17:54.948940Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; TClient is connected to server localhost:26484 TClient is connected to server localhost:26484 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-09-25T16:17:55.443909Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... waiting... 2025-09-25T16:17:55.509606Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:17:55.542516Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) waiting... 2025-09-25T16:17:55.575572Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) waiting... waiting... 2025-09-25T16:17:55.595144Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:17:55.958342Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7554061818212705895:2393], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:17:55.958432Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:17:55.958639Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7554061818212705905:2394], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:17:55.958657Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:17:56.026498Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:17:56.039231Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:17:56.050409Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:17:56.060180Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:17:56.074731Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:17:56.090120Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:17:56.111198Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:17:56.128435Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:17:56.164153Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7554061822507674063:2476], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:17:56.164186Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:17:56.164299Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7554061822507674068:2479], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:17:56.164320Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7554061822507674069:2480], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource po ... .cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 16760, node 3 2025-09-25T16:17:58.897813Z node 3 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-09-25T16:17:58.897830Z node 3 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-09-25T16:17:58.897832Z node 3 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-09-25T16:17:58.897886Z node 3 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:15593 TClient is connected to server localhost:15593 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-09-25T16:17:58.946882Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-09-25T16:17:58.956401Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) waiting... 2025-09-25T16:17:58.970347Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) waiting... 2025-09-25T16:17:58.990704Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) waiting... 2025-09-25T16:17:59.004839Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) waiting... 2025-09-25T16:17:59.171735Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-09-25T16:17:59.311971Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7554061835153123902:2391], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:17:59.312088Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:17:59.315868Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:17:59.316187Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7554061835153123985:2401], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:17:59.316207Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:17:59.328145Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:17:59.338056Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:17:59.350767Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:17:59.367933Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:17:59.377778Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:17:59.392584Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:17:59.410705Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:17:59.433689Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7554061835153124775:2474], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:17:59.433728Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:17:59.434076Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7554061835153124780:2477], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:17:59.434096Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7554061835153124781:2478], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:17:59.434121Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:17:59.438292Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-09-25T16:17:59.441612Z node 3 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [3:7554061835153124784:2479], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2025-09-25T16:17:59.494213Z node 3 :TX_PROXY ERROR: schemereq.cpp:590: Actor# [3:7554061835153124836:3552] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 17], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-09-25T16:17:59.862756Z node 3 :KQP_RESOURCE_MANAGER WARN: kqp_snapshot_manager.cpp:238: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1758817079901, txId: 281474976710673] shutting down 2025-09-25T16:17:59.871391Z node 3 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; >> KqpScan::SecondaryIndexCustomColumnOrder [GOOD] >> KqpScan::SelectExistsUnexpected >> test.py::test[pg-tpch-q14-default.txt-Results] >> KqpScan::MultipleResults >> KqpScan::TwoAggregatesTwoWindows >> DataShardWrite::ExecSQLUpsertPrepared+EvWrite+Volatile [GOOD] >> DataShardWrite::ImmediateAndPlannedCommittedOpsRace >> KqpScan::JoinSimple >> KqpSplit::UndeliveryOnFinishedRead >> test.py::test[produce-reduce_lambda_list_mem-default.txt-Results] [GOOD] >> KqpScan::LMapFunction [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/scan/unittest >> KqpSplit::ChoosePartition+Descending [GOOD] Test command err: Trying to start YDB, gRPC: 6166, MsgBus: 12128 2025-09-25T16:17:57.112038Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7554061826286022559:2244];send_to=[0:7307199536658146131:7762515]; 2025-09-25T16:17:57.112094Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/endf/003fdb/r3tmp/tmpKT8BXg/pdisk_1.dat 2025-09-25T16:17:57.129315Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:123: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2025-09-25T16:17:57.322635Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-09-25T16:17:57.326226Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-09-25T16:17:57.326263Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-09-25T16:17:57.360905Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded 2025-09-25T16:17:57.362335Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1229: Notification cookie mismatch for subscription [1:7554061826286022352:2081] 1758817077109130 != 1758817077109133 2025-09-25T16:17:57.362985Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 6166, node 1 2025-09-25T16:17:57.425485Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-09-25T16:17:57.425498Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-09-25T16:17:57.425500Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-09-25T16:17:57.425534Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:12128 TClient is connected to server localhost:12128 WaitRootIsUp 'Root'... TClient::Ls request: Root 2025-09-25T16:17:57.557002Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-09-25T16:17:57.581637Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-09-25T16:17:57.589358Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-09-25T16:17:57.665674Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) waiting... 2025-09-25T16:17:57.703352Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) waiting... 2025-09-25T16:17:57.738490Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) waiting... waiting... 2025-09-25T16:17:57.757321Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:17:58.059625Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7554061830580991291:2391], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:17:58.059647Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:17:58.059787Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7554061830580991301:2392], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:17:58.059800Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:17:58.111996Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-09-25T16:17:58.151254Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:17:58.162802Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:17:58.175394Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:17:58.189370Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:17:58.217061Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:17:58.234007Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:17:58.260086Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:17:58.277681Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:17:58.297026Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7554061830580992171:2475], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:17:58.297054Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:17:58.297079Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7554061830580992176:2478], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:17:58.297203Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: ... lient is connected to server localhost:3430 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-09-25T16:17:59.626575Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-09-25T16:17:59.636499Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) waiting... 2025-09-25T16:17:59.660017Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) waiting... 2025-09-25T16:17:59.685490Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) waiting... 2025-09-25T16:17:59.697767Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) waiting... 2025-09-25T16:17:59.782663Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-09-25T16:17:59.903695Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7554061834450857977:2391], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:17:59.903721Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:17:59.903934Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7554061834450857987:2392], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:17:59.903945Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:17:59.918248Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:17:59.937125Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:17:59.948207Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:17:59.958399Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:17:59.973752Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:17:59.988106Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:18:00.000723Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:18:00.015731Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:18:00.045902Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7554061838745826146:2474], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:00.045931Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7554061838745826151:2477], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:00.045932Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:00.045977Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7554061838745826153:2478], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:00.045984Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:00.046896Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-09-25T16:18:00.050376Z node 2 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7554061838745826154:2479], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2025-09-25T16:18:00.106772Z node 2 :TX_PROXY ERROR: schemereq.cpp:590: Actor# [2:7554061838745826207:3552] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 17], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-09-25T16:18:00.431422Z node 2 :KQP_EXECUTER ERROR: kqp_planner.cpp:123: TxId: 281474976710674. Ctx: { TraceId: 01k60tnn2a75m2ecr9kd9jet1s, Database: , SessionId: ydb://session/3?node_id=2&id=ZTAwNjgzZTAtYTNjMjY4YTAtMzgyZGY3ZjktNTBkODg5OTg=, PoolId: default, DatabaseId: /Root}. Database not set, use /Root captured evread ----------------------------------------------------------- starting split ----------------------------------------------------------- scheme op Status: 53 TxId: 281474976710675 SchemeShardStatus: 1 SchemeShardTabletId: 72057594046644480 captured evreadresult ----------------------------------------------------------- resume evread ----------------------------------------------------------- 2025-09-25T16:18:00.537554Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-09-25T16:18:00.794765Z node 2 :KQP_RESOURCE_MANAGER WARN: kqp_snapshot_manager.cpp:238: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1758817080475, txId: 281474976710673] shutting down >> test.py::test[produce-reduce_lambda_list_table--Results] [SKIPPED] >> test.py::test[produce-reduce_multi_in_difftype--Results] >> test.py::test[blocks-decimal_comparison--ForceBlocks] [GOOD] >> test.py::test[blocks-decimal_comparison--Results] >> DataShardWrite::UpsertLostPrepareArbiter [GOOD] >> DataShardWrite::UpsertNoLocksArbiterRestart >> DataShardWrite::DistributedInsertReadSetWithoutLocks+Volatile [GOOD] >> DataShardWrite::DistributedInsertReadSetWithoutLocks-Volatile >> KqpScan::SingleKey >> TExternalTableTestReboots::DropReplacedExternalTableWithReboots [GOOD] >> KqpScan::AggregateEmptySum [GOOD] >> DataShardWrite::UpsertBrokenLockArbiter [GOOD] >> DataShardWrite::PreparedDistributedWritePageFault >> KqpScan::AggregateByColumn [GOOD] >> KqpPg::ValuesInsert+useSink [GOOD] >> KqpPg::ValuesInsert-useSink >> KqpSplit::AfterResult+Unspecified ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/scan/unittest >> KqpScan::LMapFunction [GOOD] Test command err: Trying to start YDB, gRPC: 14471, MsgBus: 26097 2025-09-25T16:17:57.551585Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7554061827657814175:2076];send_to=[0:7307199536658146131:7762515]; 2025-09-25T16:17:57.551768Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/endf/003fb9/r3tmp/tmpmGzE9w/pdisk_1.dat 2025-09-25T16:17:57.561538Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:123: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2025-09-25T16:17:57.643643Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded 2025-09-25T16:17:57.659237Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-09-25T16:17:57.659264Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-09-25T16:17:57.663866Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 14471, node 1 2025-09-25T16:17:57.684989Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-09-25T16:17:57.685001Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-09-25T16:17:57.685003Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-09-25T16:17:57.685042Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-09-25T16:17:57.689466Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions TClient is connected to server localhost:26097 TClient is connected to server localhost:26097 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. waiting... 2025-09-25T16:17:57.809570Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2025-09-25T16:17:57.829650Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) waiting... waiting... 2025-09-25T16:17:57.865336Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) waiting... 2025-09-25T16:17:57.889866Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:17:57.907132Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) waiting... 2025-09-25T16:17:58.149459Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7554061831952783060:2391], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:17:58.149490Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:17:58.149642Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7554061831952783070:2392], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:17:58.149661Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:17:58.193551Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:17:58.202037Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:17:58.216658Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:17:58.248523Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:17:58.263151Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:17:58.281136Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:17:58.293415Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:17:58.307628Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:17:58.329450Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7554061831952783931:2474], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:17:58.329476Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:17:58.329582Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7554061831952783936:2477], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:17:58.329595Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7554061831952783937:2478], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:17:58.329652Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:17:58.330618Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB c ... ess permissions } 2025-09-25T16:17:59.895448Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:17:59.895647Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7554061836373124980:2477], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:17:59.895657Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7554061836373124981:2478], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:17:59.895663Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:17:59.896764Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-09-25T16:17:59.900008Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 281474976715670, at schemeshard: 72057594046644480 2025-09-25T16:17:59.900238Z node 2 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7554061836373124984:2479], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715670 completed, doublechecking } 2025-09-25T16:17:59.984278Z node 2 :TX_PROXY ERROR: schemereq.cpp:590: Actor# [2:7554061836373125036:3560] txid# 281474976715671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 17], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-09-25T16:18:00.330704Z node 2 :KQP_RESOURCE_MANAGER WARN: kqp_snapshot_manager.cpp:238: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1758817080370, txId: 281474976715673] shutting down 2025-09-25T16:18:00.343482Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-09-25T16:18:00.407685Z node 2 :KQP_RESOURCE_MANAGER WARN: kqp_snapshot_manager.cpp:238: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1758817080440, txId: 281474976715675] shutting down Trying to start YDB, gRPC: 30319, MsgBus: 4724 test_client.cpp: SetPath # /home/runner/.ya/build/build_root/endf/003fb9/r3tmp/tmp6mS9pA/pdisk_1.dat 2025-09-25T16:18:00.744880Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-09-25T16:18:00.744985Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-09-25T16:18:00.754861Z node 3 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1229: Notification cookie mismatch for subscription [3:7554061840693495533:2081] 1758817080727922 != 1758817080727925 2025-09-25T16:18:00.755660Z node 3 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 30319, node 3 2025-09-25T16:18:00.780010Z node 3 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-09-25T16:18:00.780025Z node 3 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-09-25T16:18:00.780027Z node 3 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-09-25T16:18:00.780076Z node 3 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:4724 2025-09-25T16:18:00.837602Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-09-25T16:18:00.837635Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-09-25T16:18:00.838840Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:4724 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-09-25T16:18:00.894179Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-09-25T16:18:00.896082Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-09-25T16:18:01.037886Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-09-25T16:18:01.242517Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7554061844988463501:2316], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:01.242537Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:01.242701Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7554061844988463511:2317], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:01.242707Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:01.246316Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:18:01.265983Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7554061844988463603:2326], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:01.266009Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:01.266175Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7554061844988463608:2329], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:01.266185Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7554061844988463609:2330], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:01.266207Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:01.267144Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710659:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-09-25T16:18:01.274748Z node 3 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [3:7554061844988463612:2331], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710659 completed, doublechecking } 2025-09-25T16:18:01.370389Z node 3 :TX_PROXY ERROR: schemereq.cpp:590: Actor# [3:7554061844988463663:2399] txid# 281474976710660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-09-25T16:18:01.447121Z node 3 :KQP_RESOURCE_MANAGER WARN: kqp_snapshot_manager.cpp:238: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1758817081490, txId: 281474976710662] shutting down [[[2];[1000];["Dogecoin"]];[[4];[1];["XTC"]];[[5];[2];["Cardano"]];[[6];[3];["Tether"]]] >> KqpScan::Join3TablesNoRemap [GOOD] >> KqpScan::Join3Tables >> KqpScan::Limit [GOOD] >> KqpScan::LimitOverSecondaryIndexRead >> DataShardWrite::WriteUniqueRowsInsertDuplicateBeforeCommit [GOOD] >> DataShardWrite::WriteUniqueRowsInsertDuplicateAtCommit >> test.py::test[blocks-combine_hashed_count_filter--ForceBlocks] [GOOD] >> test.py::test[blocks-combine_hashed_count_filter--Results] >> test.py::test[window-win_func_over_group_by--ForceBlocks] [GOOD] >> test.py::test[window-win_func_over_group_by--Results] >> test.py::test[aggr_factory-linear_histogram-default.txt-Results] [GOOD] >> test.py::test[aggr_factory-top_by-default.txt-Results] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/scan/unittest >> KqpScan::AggregateEmptySum [GOOD] Test command err: Trying to start YDB, gRPC: 7180, MsgBus: 17341 2025-09-25T16:17:57.217909Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7554061829301519735:2084];send_to=[0:7307199536658146131:7762515]; 2025-09-25T16:17:57.220194Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-09-25T16:17:57.224231Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:123: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown test_client.cpp: SetPath # /home/runner/.ya/build/build_root/endf/003fb8/r3tmp/tmpUCgnxj/pdisk_1.dat 2025-09-25T16:17:57.271281Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 7180, node 1 2025-09-25T16:17:57.284044Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-09-25T16:17:57.284056Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-09-25T16:17:57.284058Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-09-25T16:17:57.284097Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:17341 2025-09-25T16:17:57.333220Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-09-25T16:17:57.333248Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-09-25T16:17:57.334414Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:17341 WaitRootIsUp 'Root'... TClient::Ls request: Root 2025-09-25T16:17:57.358627Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-09-25T16:17:57.371865Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-09-25T16:17:57.374686Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-09-25T16:17:57.388351Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) waiting... 2025-09-25T16:17:57.419032Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) waiting... 2025-09-25T16:17:57.441169Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) waiting... 2025-09-25T16:17:57.453803Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) waiting... 2025-09-25T16:17:57.958914Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7554061829301521314:2391], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:17:57.958955Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:17:57.959097Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7554061829301521325:2392], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:17:57.959121Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:17:58.058478Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:17:58.068045Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:17:58.076588Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:17:58.089901Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:17:58.103414Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:17:58.117470Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:17:58.131830Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:17:58.146631Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:17:58.176491Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7554061833596489484:2474], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:17:58.176538Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:17:58.176635Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7554061833596489490:2478], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:17:58.176647Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7554061833596489489:2477], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:17:58.176668Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:17:58.178039Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183 ... fo.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 18340, node 3 2025-09-25T16:18:00.738911Z node 3 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-09-25T16:18:00.738924Z node 3 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-09-25T16:18:00.738926Z node 3 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-09-25T16:18:00.738972Z node 3 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:7006 TClient is connected to server localhost:7006 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-09-25T16:18:00.806722Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... waiting... 2025-09-25T16:18:00.813491Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:18:00.825341Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) waiting... 2025-09-25T16:18:00.865410Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) waiting... 2025-09-25T16:18:00.880888Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) waiting... 2025-09-25T16:18:00.910783Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-09-25T16:18:01.177402Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7554061843820952522:2391], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:01.177433Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:01.177738Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7554061843820952532:2392], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:01.177765Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:01.191493Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:18:01.202523Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:18:01.214377Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:18:01.226403Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:18:01.242407Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:18:01.256947Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:18:01.270314Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:18:01.285240Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:18:01.318358Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7554061843820953393:2474], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:01.318392Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:01.318507Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7554061843820953398:2477], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:01.318522Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7554061843820953399:2478], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:01.318570Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:01.319582Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-09-25T16:18:01.324592Z node 3 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [3:7554061843820953402:2479], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2025-09-25T16:18:01.383846Z node 3 :TX_PROXY ERROR: schemereq.cpp:590: Actor# [3:7554061843820953454:3551] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 17], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-09-25T16:18:01.707289Z node 3 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-09-25T16:18:01.826594Z node 3 :KQP_RESOURCE_MANAGER WARN: kqp_snapshot_manager.cpp:238: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1758817081784, txId: 281474976710673] shutting down ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/scan/unittest >> KqpScan::AggregateByColumn [GOOD] Test command err: Trying to start YDB, gRPC: 18272, MsgBus: 6129 2025-09-25T16:17:56.592432Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7554061823236597911:2138];send_to=[0:7307199536658146131:7762515]; 2025-09-25T16:17:56.592466Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/endf/003ff7/r3tmp/tmpEG0yyG/pdisk_1.dat 2025-09-25T16:17:56.651444Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-09-25T16:17:56.651467Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-09-25T16:17:56.652774Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-09-25T16:17:56.662521Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-09-25T16:17:56.667642Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded 2025-09-25T16:17:56.668030Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1229: Notification cookie mismatch for subscription [1:7554061823236597811:2081] 1758817076591750 != 1758817076591753 TServer::EnableGrpc on GrpcPort 18272, node 1 2025-09-25T16:17:56.693163Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-09-25T16:17:56.693178Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-09-25T16:17:56.693180Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-09-25T16:17:56.693228Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:6129 2025-09-25T16:17:56.884153Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions TClient is connected to server localhost:6129 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-09-25T16:17:56.934526Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-09-25T16:17:56.945018Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-09-25T16:17:56.952843Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) waiting... waiting... 2025-09-25T16:17:56.995805Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:17:57.040185Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) waiting... waiting... 2025-09-25T16:17:57.053295Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:17:57.277346Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7554061827531566752:2391], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:17:57.277384Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:17:57.280101Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7554061827531566762:2392], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:17:57.280130Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:17:57.360209Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:17:57.379967Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:17:57.389959Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:17:57.405302Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:17:57.418554Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:17:57.431957Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:17:57.446141Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:17:57.468277Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:17:57.545678Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7554061827531567636:2474], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:17:57.545709Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:17:57.545830Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7554061827531567641:2477], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:17:57.545836Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7554061827531567642:2478], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:17:57.545857Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, Dat ... will try to initialize from file: (empty maybe) 2025-09-25T16:18:00.617278Z node 3 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-09-25T16:18:00.617340Z node 3 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:10805 TClient is connected to server localhost:10805 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-09-25T16:18:00.737344Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-09-25T16:18:00.739413Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-09-25T16:18:00.818951Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) waiting... 2025-09-25T16:18:00.842555Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) waiting... 2025-09-25T16:18:00.871130Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-09-25T16:18:00.904890Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) waiting... waiting... 2025-09-25T16:18:00.919042Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:18:01.169273Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7554061845516818198:2391], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:01.169308Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:01.169442Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7554061845516818208:2392], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:01.169455Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:01.181603Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:18:01.191096Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:18:01.205331Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:18:01.219658Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:18:01.235282Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:18:01.251936Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:18:01.263796Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:18:01.280911Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:18:01.321805Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7554061845516819078:2474], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:01.321832Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:01.321929Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7554061845516819084:2478], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:01.321996Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7554061845516819083:2477], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:01.322002Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:01.322846Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-09-25T16:18:01.329374Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 281474976710670, at schemeshard: 72057594046644480 2025-09-25T16:18:01.329446Z node 3 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [3:7554061845516819087:2479], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2025-09-25T16:18:01.401781Z node 3 :TX_PROXY ERROR: schemereq.cpp:590: Actor# [3:7554061845516819139:3558] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 17], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-09-25T16:18:01.558159Z node 3 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-09-25T16:18:01.961377Z node 3 :KQP_RESOURCE_MANAGER WARN: kqp_snapshot_manager.cpp:238: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1758817081819, txId: 281474976710673] shutting down >> KqpScan::MultipleResults [GOOD] >> KqpScan::MiltiExprWithPure >> KqpScan::UnionBasic [GOOD] >> KqpScan::UnionAggregate >> TCmsTest::WalleTasks [GOOD] >> TCmsTest::WalleRebootDownNode >> test.py::test[join-join_without_correlation_and_struct_access-off-ForceBlocks] [GOOD] >> test.py::test[join-join_without_correlation_and_struct_access-off-Results] [SKIPPED] >> test.py::test[join-lookupjoin_inner_1o--ForceBlocks] >> KqpScan::SelectExistsUnexpected [GOOD] >> KqpScan::TwoAggregatesTwoWindows [GOOD] >> KqpScan::UdfFailure >> KqpScan::JoinSimple [GOOD] >> KqpScan::Join4 >> DataShardWrite::ImmediateAndPlannedCommittedOpsRace [GOOD] >> DataShardWrite::DoubleWriteUncommittedThenDoubleReadWithCommit >> KqpScan::RemoteShardScan [GOOD] >> KqpScan::ScanDuringSplit >> KqpSplit::UndeliveryOnFinishedRead [GOOD] >> KqpSplit::StreamLookupSplitBeforeReading ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_external_table_reboots/unittest >> TExternalTableTestReboots::DropReplacedExternalTableWithReboots [GOOD] Test command err: ==== RunWithTabletReboots =========== RUN: Trace =========== Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2144] IGNORE Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2144] Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:120:2058] recipient: [1:114:2145] IGNORE Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:120:2058] recipient: [1:114:2145] Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:121:2058] recipient: [1:117:2146] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:121:2058] recipient: [1:117:2146] Leader for TabletID 72057594046447617 is [1:129:2154] sender: [1:131:2058] recipient: [1:113:2144] Leader for TabletID 72057594046316545 is [1:134:2158] sender: [1:136:2058] recipient: [1:114:2145] Leader for TabletID 72057594046678944 is [1:141:2162] sender: [1:142:2058] recipient: [1:117:2146] 2025-09-25T16:17:35.125850Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7911: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-09-25T16:17:35.125886Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7939: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-09-25T16:17:35.125894Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7825: StatsBatching config: StatsBatchTimeout# 0.000000s, StatsMaxBatchSize# 0, StatsMaxExecuteTime# 0.010000s 2025-09-25T16:17:35.125900Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7841: OperationsProcessing config: using default configuration 2025-09-25T16:17:35.125906Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7847: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-09-25T16:17:35.125911Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7847: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-09-25T16:17:35.125921Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7971: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-09-25T16:17:35.125936Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-09-25T16:17:35.126072Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8042: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-09-25T16:17:35.126143Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2025-09-25T16:17:35.148578Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:8074: Got new config: QueryServiceConfig { AllExternalDataSourcesAreAvailable: true } 2025-09-25T16:17:35.148623Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded 2025-09-25T16:17:35.148740Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8042: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# Leader for TabletID 72057594046447617 is [1:129:2154] sender: [1:198:2058] recipient: [1:15:2062] 2025-09-25T16:17:35.151543Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2025-09-25T16:17:35.151759Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2025-09-25T16:17:35.151800Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-09-25T16:17:35.152987Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2025-09-25T16:17:35.153041Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2025-09-25T16:17:35.153166Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1382: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-09-25T16:17:35.153248Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-09-25T16:17:35.153679Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:158: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-09-25T16:17:35.153735Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2025-09-25T16:17:35.154028Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-09-25T16:17:35.154039Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-09-25T16:17:35.154079Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-09-25T16:17:35.154087Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-09-25T16:17:35.154095Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:205: TTxServerlessStorageBilling.Complete 2025-09-25T16:17:35.154116Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7086: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:223:2058] recipient: [1:221:2221] IGNORE Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:223:2058] recipient: [1:221:2221] Leader for TabletID 72057594037968897 is [1:227:2225] sender: [1:228:2058] recipient: [1:221:2221] 2025-09-25T16:17:35.155652Z node 1 :HIVE INFO: tablet_helpers.cpp:1126: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:141:2162] sender: [1:248:2058] recipient: [1:15:2062] 2025-09-25T16:17:35.178516Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-09-25T16:17:35.178631Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-09-25T16:17:35.178703Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:599: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-09-25T16:17:35.178711Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5528: CreateTx for txid 1:0 type: TxAlterSubDomain target path: [OwnerId: 72057594046678944, LocalPathId: 1] source path: 2025-09-25T16:17:35.178795Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-09-25T16:17:35.178811Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2025-09-25T16:17:35.179657Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-09-25T16:17:35.179719Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-09-25T16:17:35.179778Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-09-25T16:17:35.179789Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:315: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-09-25T16:17:35.179796Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:369: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-09-25T16:17:35.179801Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 1:0 2 -> 3 2025-09-25T16:17:35.180197Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-09-25T16:17:35.180208Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-09-25T16:17:35.180214Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 1:0 3 -> 128 2025-09-25T16:17:35.180687Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-09-25T16:17:35.180707Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-09-25T16:17:35.180715Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-09-25T16:17:35.180724Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1723: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-09-25T16:17:35.181505Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1792: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-09-25T16:17:35.181978Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:663: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-09-25T16:17:35.182029Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1824: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 Leader for TabletID 72057594046316545 is [1:134:2158] sender: [1:263:2058] recipient: [1:15:2062] FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-09-25T16:17:35.182280Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:685: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-09-25T16:17:35.182307Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:689: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 134 RawX2: 4294969454 } } Step: 5000001 MediatorID: 0 Tab ... CHEMESHARD DEBUG: schemeshard_impl.cpp:610: DecrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 2 2025-09-25T16:18:01.788247Z node 83 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 1005:0 128 -> 240 2025-09-25T16:18:01.788278Z node 83 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:599: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 3 2025-09-25T16:18:01.788287Z node 83 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:599: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 1 2025-09-25T16:18:01.788298Z node 83 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:599: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 2 2025-09-25T16:18:01.788590Z node 83 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1005 2025-09-25T16:18:01.788665Z node 83 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1005 FAKE_COORDINATOR: Erasing txId 1005 2025-09-25T16:18:01.788980Z node 83 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-09-25T16:18:01.788988Z node 83 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1005, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-09-25T16:18:01.789027Z node 83 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1005, path id: [OwnerId: 72057594046678944, LocalPathId: 4] 2025-09-25T16:18:01.789049Z node 83 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1005, path id: [OwnerId: 72057594046678944, LocalPathId: 3] 2025-09-25T16:18:01.789072Z node 83 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-09-25T16:18:01.789078Z node 83 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [83:212:2213], at schemeshard: 72057594046678944, txId: 1005, path id: 1 2025-09-25T16:18:01.789083Z node 83 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [83:212:2213], at schemeshard: 72057594046678944, txId: 1005, path id: 4 2025-09-25T16:18:01.789088Z node 83 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [83:212:2213], at schemeshard: 72057594046678944, txId: 1005, path id: 3 2025-09-25T16:18:01.789138Z node 83 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1005:0, at schemeshard: 72057594046678944 2025-09-25T16:18:01.789146Z node 83 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:497: [72057594046678944] TDone opId# 1005:0 ProgressState 2025-09-25T16:18:01.789163Z node 83 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:926: Part operation is done id#1005:0 progress is 1/1 2025-09-25T16:18:01.789167Z node 83 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 1005 ready parts: 1/1 2025-09-25T16:18:01.789173Z node 83 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:926: Part operation is done id#1005:0 progress is 1/1 2025-09-25T16:18:01.789176Z node 83 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 1005 ready parts: 1/1 2025-09-25T16:18:01.789181Z node 83 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 1005, ready parts: 1/1, is published: false 2025-09-25T16:18:01.789187Z node 83 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 1005 ready parts: 1/1 2025-09-25T16:18:01.789193Z node 83 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:993: Operation and all the parts is done, operation id: 1005:0 2025-09-25T16:18:01.789197Z node 83 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5552: RemoveTx for txid 1005:0 2025-09-25T16:18:01.789212Z node 83 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:610: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 2 2025-09-25T16:18:01.789216Z node 83 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:610: DecrementPathDbRefCount reason remove txstate source path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 3 2025-09-25T16:18:01.789222Z node 83 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:1002: Publication still in progress, tx: 1005, publications: 3, subscribers: 0 2025-09-25T16:18:01.789226Z node 83 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1009: Publication details: tx: 1005, [OwnerId: 72057594046678944, LocalPathId: 1], 13 2025-09-25T16:18:01.789230Z node 83 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1009: Publication details: tx: 1005, [OwnerId: 72057594046678944, LocalPathId: 3], 2 2025-09-25T16:18:01.789234Z node 83 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1009: Publication details: tx: 1005, [OwnerId: 72057594046678944, LocalPathId: 4], 18446744073709551615 2025-09-25T16:18:01.789325Z node 83 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6249: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 4 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 1005 2025-09-25T16:18:01.789336Z node 83 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 4 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 1005 2025-09-25T16:18:01.789341Z node 83 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 3, at schemeshard: 72057594046678944, txId: 1005 2025-09-25T16:18:01.789347Z node 83 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 1005, pathId: [OwnerId: 72057594046678944, LocalPathId: 4], version: 18446744073709551615 2025-09-25T16:18:01.789351Z node 83 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:610: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 1 2025-09-25T16:18:01.789425Z node 83 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:123: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2025-09-25T16:18:01.789431Z node 83 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:137: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 4], at schemeshard: 72057594046678944 2025-09-25T16:18:01.789442Z node 83 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:610: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 4 2025-09-25T16:18:01.789515Z node 83 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6249: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 13 PathOwnerId: 72057594046678944, cookie: 1005 2025-09-25T16:18:01.789526Z node 83 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 13 PathOwnerId: 72057594046678944, cookie: 1005 2025-09-25T16:18:01.789530Z node 83 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 1005 2025-09-25T16:18:01.789534Z node 83 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 1005, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 13 2025-09-25T16:18:01.789540Z node 83 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:610: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 3 2025-09-25T16:18:01.789631Z node 83 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6249: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 2 PathOwnerId: 72057594046678944, cookie: 1005 2025-09-25T16:18:01.789643Z node 83 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 2 PathOwnerId: 72057594046678944, cookie: 1005 2025-09-25T16:18:01.789647Z node 83 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1005 2025-09-25T16:18:01.789651Z node 83 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 1005, pathId: [OwnerId: 72057594046678944, LocalPathId: 3], version: 2 2025-09-25T16:18:01.789656Z node 83 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:610: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 2 2025-09-25T16:18:01.789666Z node 83 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__publish_to_scheme_board.cpp:155: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 1005, subscribers: 0 2025-09-25T16:18:01.790549Z node 83 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1005 2025-09-25T16:18:01.790752Z node 83 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__clean_pathes.cpp:160: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 2025-09-25T16:18:01.790786Z node 83 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1005 2025-09-25T16:18:01.790869Z node 83 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1005 TestModificationResult got TxId: 1005, wait until txId: 1005 TestWaitNotification wait txId: 1005 2025-09-25T16:18:01.790944Z node 83 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:213: tests -- TTxNotificationSubscriber for txId 1005: send EvNotifyTxCompletion 2025-09-25T16:18:01.790953Z node 83 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:259: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 1005 2025-09-25T16:18:01.791007Z node 83 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 1005, at schemeshard: 72057594046678944 2025-09-25T16:18:01.791023Z node 83 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 1005: got EvNotifyTxCompletionResult 2025-09-25T16:18:01.791026Z node 83 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 1005: satisfy waiter [83:424:2414] TestWaitNotification: OK eventTxId 1005 >> KqpScan::SingleKey [GOOD] >> KqpScan::SqlInParameter >> KqpScan::ScanDuringSplit10 [GOOD] >> KqpScan::ScanDuringSplitThenMerge >> DataShardWrite::DistributedInsertReadSetWithoutLocks-Volatile [GOOD] >> DataShardWrite::DistributedInsertDuplicateWithLocks+Volatile |81.2%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/tx/scheme_board/ut_cache/ydb-core-tx-scheme_board-ut_cache |81.2%| [LD] {RESULT} $(B)/ydb/core/tx/scheme_board/ut_cache/ydb-core-tx-scheme_board-ut_cache |81.2%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/scheme_board/ut_cache/ydb-core-tx-scheme_board-ut_cache ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/scan/unittest >> KqpScan::SelectExistsUnexpected [GOOD] Test command err: Trying to start YDB, gRPC: 65352, MsgBus: 11173 2025-09-25T16:17:56.630074Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7554061824667032409:2251];send_to=[0:7307199536658146131:7762515]; 2025-09-25T16:17:56.630099Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/endf/003fee/r3tmp/tmpfhumM4/pdisk_1.dat 2025-09-25T16:17:56.740532Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-09-25T16:17:56.740564Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-09-25T16:17:56.740916Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-09-25T16:17:56.741579Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-09-25T16:17:56.758728Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 65352, node 1 2025-09-25T16:17:56.840846Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-09-25T16:17:56.840859Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-09-25T16:17:56.840861Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-09-25T16:17:56.840904Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-09-25T16:17:56.933411Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions TClient is connected to server localhost:11173 TClient is connected to server localhost:11173 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-09-25T16:17:57.012727Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-09-25T16:17:57.092939Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) waiting... 2025-09-25T16:17:57.129725Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) waiting... 2025-09-25T16:17:57.177267Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) waiting... 2025-09-25T16:17:57.196652Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) waiting... 2025-09-25T16:17:57.636686Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-09-25T16:17:57.694797Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7554061828962001153:2392], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:17:57.694841Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:17:57.700897Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7554061828962001163:2393], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:17:57.700965Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:17:57.743662Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:17:57.752601Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:17:57.762319Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:17:57.776051Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:17:57.805353Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:17:57.840409Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:17:57.856369Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:17:57.870121Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:17:57.890076Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7554061828962002028:2475], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:17:57.890111Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:17:57.890122Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7554061828962002033:2478], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:17:57.890152Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7554061828962002035:2479], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:17:57.890160Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:17:57.890918Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:18 ... got bad distributable configuration TClient is connected to server localhost:22142 TClient is connected to server localhost:22142 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-09-25T16:18:01.497653Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... waiting... 2025-09-25T16:18:01.506389Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:18:01.520277Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) waiting... 2025-09-25T16:18:01.544309Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) waiting... waiting... 2025-09-25T16:18:01.559295Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:18:01.616987Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-09-25T16:18:01.841018Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7554061843839084801:2391], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:01.841054Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:01.842065Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7554061843839084811:2392], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:01.842100Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:01.853932Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:18:01.868419Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:18:01.879325Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:18:01.894558Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:18:01.905198Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:18:01.920232Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:18:01.933719Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:18:01.948285Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:18:01.964489Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7554061843839085673:2474], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:01.964514Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:01.964613Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7554061843839085678:2477], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:01.964625Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7554061843839085679:2478], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:01.964632Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:01.965889Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-09-25T16:18:01.974961Z node 3 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [3:7554061843839085682:2479], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2025-09-25T16:18:02.052060Z node 3 :TX_PROXY ERROR: schemereq.cpp:590: Actor# [3:7554061848134053030:3553] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 17], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-09-25T16:18:02.365939Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) waiting... 2025-09-25T16:18:02.371736Z node 3 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-09-25T16:18:02.547592Z node 3 :KQP_RESOURCE_MANAGER WARN: kqp_snapshot_manager.cpp:238: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1758817082512, txId: 281474976710675] shutting down 2025-09-25T16:18:02.664724Z node 3 :KQP_RESOURCE_MANAGER WARN: kqp_snapshot_manager.cpp:238: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1758817082631, txId: 281474976710678] shutting down |81.2%| [TM] {default-linux-x86_64, pic, relwithdebinfo} ydb/library/yql/tests/sql/dq_file/part2/pytest >> test.py::test[key_filter-string_with_ff-default.txt-Results] [GOOD] >> test.py::test[blocks-decimal_comparison--Results] [GOOD] >> DataShardWrite::UpsertNoLocksArbiterRestart [GOOD] >> DataShardWrite::UpsertLostPrepareArbiterRestart >> KqpScan::Join3Tables [GOOD] >> KqpScan::LimitOverSecondaryIndexRead [GOOD] >> KqpScan::Like >> KqpScan::MiltiExprWithPure [GOOD] >> KqpScan::LongStringCombiner >> DataShardWrite::PreparedDistributedWritePageFault [GOOD] >> DataShardWrite::WriteUniqueRowsInsertDuplicateAtCommit [GOOD] >> DataShardWrite::VolatileAndNonVolatileWritePlanStepCommitFailure >> KqpSplit::AfterResult+Unspecified [GOOD] >> KqpSplit::AfterResultMultiRange+Ascending >> test.py::test[tpch-q6-default.txt-Results] [GOOD] >> test.py::test[type_v3-bare_yson--Results] >> test.py::test[pg-tpch-q14-default.txt-Results] [GOOD] >> test.py::test[produce-process_with_assume--ForceBlocks] >> test.py::test[type_v3-bare_yson--Results] [SKIPPED] >> test.py::test[type_v3-mergejoin_with_sort--Results] >> KqpScan::UnionAggregate [GOOD] >> KqpScan::ScanRetryRead [GOOD] >> KqpScan::ScanRetryReadRanges >> KqpScan::Join4 [GOOD] >> KqpScan::JoinLeftOnly >> KqpScan::SqlInParameter [GOOD] >> KqpScan::SqlInLiteral >> TPDiskRaces::OwnerRecreationRaces [GOOD] >> TPDiskRaces::OwnerKilledWhileReadingLog >> test.py::test[join-bush_in_in_in--Results] [GOOD] >> test.py::test[join-bush_in_in_in-off-ForceBlocks] >> TCmsTest::WalleRebootDownNode [GOOD] >> TCmsTest::WalleRequestDuringRollingRestart >> KqpScan::EarlyFinish [GOOD] >> KqpScan::Effects >> DataShardWrite::DistributedInsertDuplicateWithLocks+Volatile [GOOD] >> DataShardWrite::DistributedInsertDuplicateWithLocks-Volatile >> KqpSplit::StreamLookupSplitBeforeReading [GOOD] >> test.py::test[blocks-combine_hashed_count_filter--Results] [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/scan/unittest >> KqpScan::Join3Tables [GOOD] Test command err: Trying to start YDB, gRPC: 27248, MsgBus: 7879 2025-09-25T16:17:57.269057Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7554061828208835008:2173];send_to=[0:7307199536658146131:7762515]; 2025-09-25T16:17:57.269095Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-09-25T16:17:57.270502Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:123: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown test_client.cpp: SetPath # /home/runner/.ya/build/build_root/endf/003fd6/r3tmp/tmpkJuy9X/pdisk_1.dat 2025-09-25T16:17:57.460042Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-09-25T16:17:57.460068Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-09-25T16:17:57.473205Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-09-25T16:17:57.526493Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-09-25T16:17:57.531834Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded 2025-09-25T16:17:57.533235Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1229: Notification cookie mismatch for subscription [1:7554061828208834869:2081] 1758817077231777 != 1758817077231780 TServer::EnableGrpc on GrpcPort 27248, node 1 2025-09-25T16:17:57.561028Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-09-25T16:17:57.561041Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-09-25T16:17:57.561043Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-09-25T16:17:57.561081Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:7879 TClient is connected to server localhost:7879 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-09-25T16:17:57.671277Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-09-25T16:17:57.679763Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) waiting... 2025-09-25T16:17:57.696991Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-09-25T16:17:57.721773Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) waiting... 2025-09-25T16:17:57.748207Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) waiting... 2025-09-25T16:17:57.763113Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) waiting... 2025-09-25T16:17:58.260523Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7554061832503803815:2391], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:17:58.260553Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:17:58.261037Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7554061832503803825:2392], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:17:58.261064Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:17:58.262630Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-09-25T16:17:58.300189Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:17:58.308239Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:17:58.320457Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:17:58.336130Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:17:58.349943Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:17:58.362839Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:17:58.379676Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:17:58.395291Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:17:58.426145Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7554061832503804696:2475], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:17:58.426178Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:17:58.426285Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7554061832503804701:2478], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:17:58.426295Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7554061832503804702:2479], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool ... s response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. waiting... 2025-09-25T16:18:02.405888Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2025-09-25T16:18:02.407228Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-09-25T16:18:02.458130Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) waiting... 2025-09-25T16:18:02.482341Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) waiting... 2025-09-25T16:18:02.500640Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-09-25T16:18:02.508970Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) waiting... 2025-09-25T16:18:02.520706Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) waiting... 2025-09-25T16:18:02.727913Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7554061849895740132:2391], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:02.727942Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:02.731082Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7554061849895740208:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:02.731222Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:02.732143Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:18:02.740705Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:18:02.752196Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:18:02.766092Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:18:02.779864Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:18:02.794278Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:18:02.811405Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:18:02.822229Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:18:02.843308Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7554061849895741003:2474], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:02.843343Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:02.843411Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7554061849895741008:2477], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:02.843426Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7554061849895741009:2478], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:02.843433Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:02.844459Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-09-25T16:18:02.849249Z node 3 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [3:7554061849895741012:2479], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715670 completed, doublechecking } 2025-09-25T16:18:02.908286Z node 3 :TX_PROXY ERROR: schemereq.cpp:590: Actor# [3:7554061849895741064:3550] txid# 281474976715671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 17], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-09-25T16:18:03.163177Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715673:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) waiting... 2025-09-25T16:18:03.322199Z node 3 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-09-25T16:18:03.380556Z node 3 :KQP_RESOURCE_MANAGER WARN: kqp_snapshot_manager.cpp:238: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1758817083408, txId: 281474976715675] shutting down 2025-09-25T16:18:03.535491Z node 3 :KQP_RESOURCE_MANAGER WARN: kqp_snapshot_manager.cpp:238: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1758817083576, txId: 281474976715677] shutting down ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/scan/unittest >> KqpScan::UnionAggregate [GOOD] Test command err: Trying to start YDB, gRPC: 6873, MsgBus: 11492 2025-09-25T16:18:01.179251Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7554061844234849583:2083];send_to=[0:7307199536658146131:7762515]; 2025-09-25T16:18:01.179394Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/endf/003f9e/r3tmp/tmpfQCrS4/pdisk_1.dat 2025-09-25T16:18:01.220584Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions TServer::EnableGrpc on GrpcPort 6873, node 1 2025-09-25T16:18:01.235928Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded 2025-09-25T16:18:01.241643Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1229: Notification cookie mismatch for subscription [1:7554061844234849525:2081] 1758817081175242 != 1758817081175245 2025-09-25T16:18:01.256579Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-09-25T16:18:01.256595Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-09-25T16:18:01.256598Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-09-25T16:18:01.256666Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-09-25T16:18:01.285865Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-09-25T16:18:01.285902Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-09-25T16:18:01.288340Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:11492 2025-09-25T16:18:01.415371Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions TClient is connected to server localhost:11492 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-09-25T16:18:01.509822Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-09-25T16:18:01.513348Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-09-25T16:18:01.520286Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) waiting... waiting... 2025-09-25T16:18:01.566396Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:18:01.601415Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) waiting... 2025-09-25T16:18:01.613965Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) waiting... 2025-09-25T16:18:01.871846Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7554061844234851172:2391], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:01.871878Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:01.872011Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7554061844234851182:2392], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:01.872024Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:01.933559Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:18:01.942080Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:18:01.954541Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:18:01.968945Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:18:01.982492Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:18:01.997207Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:18:02.010838Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:18:02.028874Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:18:02.050278Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7554061848529819347:2474], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:02.050314Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:02.050401Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7554061848529819352:2477], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:02.050415Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7554061848529819353:2478], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:02.050470Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, Da ... : will try to initialize from file: (empty maybe) 2025-09-25T16:18:02.872675Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-09-25T16:18:02.872727Z node 2 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:3492 2025-09-25T16:18:02.913330Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions TClient is connected to server localhost:3492 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. waiting... 2025-09-25T16:18:02.925431Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2025-09-25T16:18:02.929310Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-09-25T16:18:02.945707Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) waiting... 2025-09-25T16:18:02.976666Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) waiting... 2025-09-25T16:18:03.006438Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) waiting... waiting... 2025-09-25T16:18:03.025916Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:18:03.350081Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7554061853539272848:2391], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:03.350137Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:03.353919Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7554061853539272931:2401], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:03.353949Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:03.354788Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:18:03.367178Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:18:03.378271Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:18:03.389346Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:18:03.403591Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:18:03.417938Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:18:03.432653Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:18:03.445971Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:18:03.469950Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7554061853539273719:2474], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:03.469988Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:03.470105Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7554061853539273724:2477], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:03.470117Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7554061853539273725:2478], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:03.470187Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:03.471179Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-09-25T16:18:03.474847Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 281474976710670, at schemeshard: 72057594046644480 2025-09-25T16:18:03.475495Z node 2 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7554061853539273728:2479], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2025-09-25T16:18:03.528210Z node 2 :TX_PROXY ERROR: schemereq.cpp:590: Actor# [2:7554061853539273780:3552] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 17], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-09-25T16:18:03.842527Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-09-25T16:18:04.099263Z node 2 :KQP_RESOURCE_MANAGER WARN: kqp_snapshot_manager.cpp:238: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1758817083926, txId: 281474976710673] shutting down >> test.py::test[blocks-date_less--ForceBlocks] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/datashard/ut_write/unittest >> DataShardWrite::PreparedDistributedWritePageFault [GOOD] Test command err: 2025-09-25T16:17:50.586430Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:123: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2025-09-25T16:17:50.617748Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2025-09-25T16:17:50.620246Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:311:2354], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-09-25T16:17:50.620319Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-09-25T16:17:50.620341Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/endf/004431/r3tmp/tmpNK0zf4/pdisk_1.dat 2025-09-25T16:17:50.690681Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-09-25T16:17:50.690716Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-09-25T16:17:50.701744Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded 2025-09-25T16:17:50.702425Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1229: Notification cookie mismatch for subscription [1:34:2081] 1758817070073606 != 1758817070073610 2025-09-25T16:17:50.736416Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-09-25T16:17:50.788325Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2025-09-25T16:17:50.823128Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-09-25T16:17:50.922737Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:17:50.938686Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3112: StateInit, received event# 268828672, Sender [1:664:2558], Recipient [1:673:2564]: NKikimr::TEvTablet::TEvBoot 2025-09-25T16:17:50.938995Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3112: StateInit, received event# 268828673, Sender [1:664:2558], Recipient [1:673:2564]: NKikimr::TEvTablet::TEvRestored 2025-09-25T16:17:50.939061Z node 1 :TX_DATASHARD INFO: datashard.cpp:375: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:673:2564] 2025-09-25T16:17:50.939123Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:661: TxInitSchema.Execute 2025-09-25T16:17:50.940342Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3125: StateInactive, received event# 268828684, Sender [1:664:2558], Recipient [1:673:2564]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-09-25T16:17:50.954741Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:727: TxInitSchema.Complete 2025-09-25T16:17:50.954790Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:50: TDataShard::TTxInit::Execute 2025-09-25T16:17:50.954991Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1325: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2025-09-25T16:17:50.955003Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1381: LoadLockChangeRecords at tablet: 72075186224037888 2025-09-25T16:17:50.955012Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1430: LoadChangeRecordCommits at tablet: 72075186224037888 2025-09-25T16:17:50.955080Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:94: TDataShard::TTxInit::Complete 2025-09-25T16:17:50.955114Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:102: TDataShard::TTxInitRestored::Execute 2025-09-25T16:17:50.955130Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:142: DataShard 72075186224037888 persisting started state actor id [1:688:2564] in generation 1 2025-09-25T16:17:50.966242Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:151: TDataShard::TTxInitRestored::Complete 2025-09-25T16:17:50.972328Z node 1 :TX_DATASHARD INFO: datashard.cpp:419: Switched to work state WaitScheme tabletId 72075186224037888 2025-09-25T16:17:50.972417Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:459: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2025-09-25T16:17:50.972443Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1260: Change sender created: at tablet: 72075186224037888, actorId: [1:690:2574] 2025-09-25T16:17:50.972449Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1265: Trying to activate change sender: at tablet: 72075186224037888 2025-09-25T16:17:50.972454Z node 1 :TX_DATASHARD INFO: datashard.cpp:1282: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2025-09-25T16:17:50.972460Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-09-25T16:17:50.972529Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3144: StateWork, received event# 2146435072, Sender [1:673:2564], Recipient [1:673:2564]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-09-25T16:17:50.972537Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3169: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-09-25T16:17:50.972637Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:96: TTxCheckInReadSets::Execute at 72075186224037888 2025-09-25T16:17:50.972662Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:139: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2025-09-25T16:17:50.972671Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037888 2025-09-25T16:17:50.972682Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2025-09-25T16:17:50.972691Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:385: Check unit PlanQueue at 72075186224037888 2025-09-25T16:17:50.972697Z node 1 :TX_DATASHARD TRACE: plan_queue_unit.cpp:59: TPlanQueueUnit at 72075186224037888 has no attached operations 2025-09-25T16:17:50.972702Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:403: Unit PlanQueue has no ready operations at 72075186224037888 2025-09-25T16:17:50.972708Z node 1 :TX_DATASHARD INFO: datashard__progress_tx.cpp:48: No tx to execute at 72075186224037888 TxInFly 0 2025-09-25T16:17:50.972713Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2025-09-25T16:17:50.972817Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3144: StateWork, received event# 269877761, Sender [1:675:2565], Recipient [1:673:2564]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-09-25T16:17:50.973148Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3180: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-09-25T16:17:50.973160Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3723: Server connected at leader tablet# 72075186224037888, clientId# [1:669:2561], serverId# [1:675:2565], sessionId# [0:0:0] 2025-09-25T16:17:50.973180Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3144: StateWork, received event# 269549568, Sender [1:409:2405], Recipient [1:675:2565] 2025-09-25T16:17:50.973185Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3150: StateWork, processing event TEvDataShard::TEvProposeTransaction 2025-09-25T16:17:50.973209Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 72075186224037888 2025-09-25T16:17:50.973270Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1843: Trying to execute [0:281474976715657] at 72075186224037888 on unit CheckSchemeTx 2025-09-25T16:17:50.973281Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:133: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2025-09-25T16:17:50.973301Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:221: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037888 2025-09-25T16:17:50.973310Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1878: Execution status for [0:281474976715657] at 72075186224037888 is ExecutedNoMoreRestarts 2025-09-25T16:17:50.973315Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1926: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit CheckSchemeTx 2025-09-25T16:17:50.973321Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1932: Add [0:281474976715657] at 72075186224037888 to execution unit StoreSchemeTx 2025-09-25T16:17:50.973326Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1843: Trying to execute [0:281474976715657] at 72075186224037888 on unit StoreSchemeTx 2025-09-25T16:17:50.973465Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1878: Execution status for [0:281474976715657] at 72075186224037888 is DelayCompleteNoMoreRestarts 2025-09-25T16:17:50.973472Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1926: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit StoreSchemeTx 2025-09-25T16:17:50.973476Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1932: Add [0:281474976715657] at 72075186224037888 to execution unit FinishPropose 2025-09-25T16:17:50.973482Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1843: Trying to execute [0:281474976715657] at 72075186224037888 on unit FinishPropose 2025-09-25T16:17:50.973494Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1878: Execution status for [0:281474976715657] at 72075186224037888 is DelayComplete 2025-09-25T16:17:50.973501Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1926: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit FinishPropose 2025-09-25T16:17:50.973508Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1932: Add [0:281474976715657] at 72075186224037888 to execution unit WaitForPlan 2025-09-25T16:17:50.973512Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1843: Trying to execute [0:281474976715657] at 72075186224037888 on unit WaitForPlan 2025-09-25T16:17:50.973517Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Operation [0:281474976715657] at 72075186224037888 is not ready to execute on unit WaitForPlan 2025-09-25T16:17:50.973846Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3144: StateWork, received event# 269746185, Sender [1:691:2575], Recipient [1:673:2564]: NKikimr::TEvTxProxySchemeCache::TEvWatchNotifyUpdated 2025-09-25T16:17:50.973858Z node 1 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:129: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-09-25T16:17:50.984268Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:152: TTxProposeTransactionBase::Complete at 72075186224037888 2025-09-25T16:17:50.984299Z node 1 :TX_DATASHAR ... Found ready operation [3500:1234567890011] in PlanQueue unit at 72075186224037888 2025-09-25T16:18:03.819921Z node 9 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1843: Trying to execute [3500:1234567890011] at 72075186224037888 on unit PlanQueue 2025-09-25T16:18:03.819931Z node 9 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1878: Execution status for [3500:1234567890011] at 72075186224037888 is Executed 2025-09-25T16:18:03.819935Z node 9 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1926: Advance execution plan for [3500:1234567890011] at 72075186224037888 executing on unit PlanQueue 2025-09-25T16:18:03.819939Z node 9 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1932: Add [3500:1234567890011] at 72075186224037888 to execution unit LoadWriteDetails 2025-09-25T16:18:03.819944Z node 9 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1843: Trying to execute [3500:1234567890011] at 72075186224037888 on unit LoadTxDetails 2025-09-25T16:18:03.820062Z node 9 :TX_DATASHARD TRACE: datashard_write_operation.cpp:68: Parsing write transaction for 1234567890011 at 72075186224037888, record: Operations { Type: OPERATION_UPSERT TableId { OwnerId: 72057594046644480 TableId: 2 SchemaVersion: 1 } ColumnIds: 1 ColumnIds: 2 PayloadIndex: 0 PayloadFormat: FORMAT_CELLVEC DefaultFilledColumnCount: 0 } TxId: 1234567890011 TxMode: MODE_PREPARE Locks { Op: Commit } 2025-09-25T16:18:03.820089Z node 9 :TX_DATASHARD TRACE: datashard_write_operation.cpp:252: Table /Root/table, shard: 72075186224037888, write point (Int32 : 1) 2025-09-25T16:18:03.820096Z node 9 :TX_DATASHARD TRACE: key_validator.cpp:54: -- AddWriteRange: (Int32 : 1) table: [72057594046644480:2:1] 2025-09-25T16:18:03.820124Z node 9 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:683: LoadWriteDetails at 72075186224037888 loaded writeOp from db 3500:1234567890011 keys extracted: 1 2025-09-25T16:18:03.820128Z node 9 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1878: Execution status for [3500:1234567890011] at 72075186224037888 is Executed 2025-09-25T16:18:03.820131Z node 9 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1926: Advance execution plan for [3500:1234567890011] at 72075186224037888 executing on unit LoadWriteDetails 2025-09-25T16:18:03.820134Z node 9 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1932: Add [3500:1234567890011] at 72075186224037888 to execution unit BuildAndWaitDependencies 2025-09-25T16:18:03.820137Z node 9 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1843: Trying to execute [3500:1234567890011] at 72075186224037888 on unit BuildAndWaitDependencies 2025-09-25T16:18:03.820154Z node 9 :TX_DATASHARD TRACE: datashard_pipeline.cpp:455: Operation [3500:1234567890011] is the new logically complete end at 72075186224037888 2025-09-25T16:18:03.820159Z node 9 :TX_DATASHARD TRACE: datashard_pipeline.cpp:461: Operation [3500:1234567890011] is the new logically incomplete end at 72075186224037888 2025-09-25T16:18:03.820162Z node 9 :TX_DATASHARD TRACE: datashard_pipeline.cpp:483: Activated operation [3500:1234567890011] at 72075186224037888 2025-09-25T16:18:03.820166Z node 9 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1878: Execution status for [3500:1234567890011] at 72075186224037888 is Executed 2025-09-25T16:18:03.820169Z node 9 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1926: Advance execution plan for [3500:1234567890011] at 72075186224037888 executing on unit BuildAndWaitDependencies 2025-09-25T16:18:03.820172Z node 9 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1932: Add [3500:1234567890011] at 72075186224037888 to execution unit PrepareWriteTxInRS 2025-09-25T16:18:03.820176Z node 9 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1843: Trying to execute [3500:1234567890011] at 72075186224037888 on unit PrepareWriteTxInRS 2025-09-25T16:18:03.820181Z node 9 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1878: Execution status for [3500:1234567890011] at 72075186224037888 is Executed 2025-09-25T16:18:03.820183Z node 9 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1926: Advance execution plan for [3500:1234567890011] at 72075186224037888 executing on unit PrepareWriteTxInRS 2025-09-25T16:18:03.820186Z node 9 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1932: Add [3500:1234567890011] at 72075186224037888 to execution unit LoadInRS 2025-09-25T16:18:03.820190Z node 9 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1843: Trying to execute [3500:1234567890011] at 72075186224037888 on unit LoadInRS 2025-09-25T16:18:03.820193Z node 9 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1878: Execution status for [3500:1234567890011] at 72075186224037888 is Executed 2025-09-25T16:18:03.820196Z node 9 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1926: Advance execution plan for [3500:1234567890011] at 72075186224037888 executing on unit LoadInRS 2025-09-25T16:18:03.820198Z node 9 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1932: Add [3500:1234567890011] at 72075186224037888 to execution unit BlockFailPoint 2025-09-25T16:18:03.820203Z node 9 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1843: Trying to execute [3500:1234567890011] at 72075186224037888 on unit BlockFailPoint 2025-09-25T16:18:03.820209Z node 9 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1878: Execution status for [3500:1234567890011] at 72075186224037888 is Executed 2025-09-25T16:18:03.820212Z node 9 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1926: Advance execution plan for [3500:1234567890011] at 72075186224037888 executing on unit BlockFailPoint 2025-09-25T16:18:03.820215Z node 9 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1932: Add [3500:1234567890011] at 72075186224037888 to execution unit ExecuteWrite 2025-09-25T16:18:03.820219Z node 9 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1843: Trying to execute [3500:1234567890011] at 72075186224037888 on unit ExecuteWrite 2025-09-25T16:18:03.820223Z node 9 :TX_DATASHARD DEBUG: execute_write_unit.cpp:260: Executing write operation for [3500:1234567890011] at 72075186224037888 2025-09-25T16:18:03.820355Z node 9 :TX_DATASHARD TRACE: execute_write_unit.cpp:122: Tablet 72075186224037888 is not ready for [3500:1234567890011] execution 2025-09-25T16:18:03.820373Z node 9 :TX_DATASHARD DEBUG: datashard_write_operation.cpp:503: tx 1234567890011 at 72075186224037888 released its data 2025-09-25T16:18:03.820379Z node 9 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1878: Execution status for [3500:1234567890011] at 72075186224037888 is Restart 2025-09-25T16:18:03.820382Z node 9 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 (dry run) active 1 active planned 1 immediate 0 planned 1 2025-09-25T16:18:03.820385Z node 9 :TX_DATASHARD TRACE: datashard_pipeline.cpp:327: Check candidate unit PlanQueue at 72075186224037888 2025-09-25T16:18:03.820388Z node 9 :TX_DATASHARD TRACE: plan_queue_unit.cpp:59: TPlanQueueUnit at 72075186224037888 has no attached operations 2025-09-25T16:18:03.820390Z node 9 :TX_DATASHARD TRACE: datashard_pipeline.cpp:341: Unit PlanQueue has no ready operations at 72075186224037888 2025-09-25T16:18:03.822046Z node 9 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037888 2025-09-25T16:18:03.822067Z node 9 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1843: Trying to execute [3500:1234567890011] at 72075186224037888 on unit ExecuteWrite 2025-09-25T16:18:03.822075Z node 9 :TX_DATASHARD DEBUG: execute_write_unit.cpp:260: Executing write operation for [3500:1234567890011] at 72075186224037888 2025-09-25T16:18:03.822166Z node 9 :TX_DATASHARD TRACE: datashard_write_operation.cpp:68: Parsing write transaction for 1234567890011 at 72075186224037888, record: Operations { Type: OPERATION_UPSERT TableId { OwnerId: 72057594046644480 TableId: 2 SchemaVersion: 1 } ColumnIds: 1 ColumnIds: 2 PayloadIndex: 0 PayloadFormat: FORMAT_CELLVEC DefaultFilledColumnCount: 0 } TxId: 1234567890011 TxMode: MODE_PREPARE Locks { Op: Commit } 2025-09-25T16:18:03.822187Z node 9 :TX_DATASHARD TRACE: datashard_write_operation.cpp:252: Table /Root/table, shard: 72075186224037888, write point (Int32 : 1) 2025-09-25T16:18:03.822195Z node 9 :TX_DATASHARD TRACE: key_validator.cpp:54: -- AddWriteRange: (Int32 : 1) table: [72057594046644480:2:1] 2025-09-25T16:18:03.822211Z node 9 :TX_DATASHARD DEBUG: datashard_write_operation.cpp:596: tx 1234567890011 at 72075186224037888 restored its data 2025-09-25T16:18:03.822274Z node 9 :TX_DATASHARD DEBUG: execute_write_unit.cpp:457: Executed write operation for [3500:1234567890011] at 72075186224037888, row count=1 2025-09-25T16:18:03.822287Z node 9 :TX_DATASHARD TRACE: locks.cpp:194: Lock 1234567890001 marked broken at v{min} 2025-09-25T16:18:03.822339Z node 9 :TX_DATASHARD TRACE: execute_write_unit.cpp:47: add locks to result: 0 2025-09-25T16:18:03.822354Z node 9 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1878: Execution status for [3500:1234567890011] at 72075186224037888 is ExecutedNoMoreRestarts 2025-09-25T16:18:03.822361Z node 9 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1926: Advance execution plan for [3500:1234567890011] at 72075186224037888 executing on unit ExecuteWrite 2025-09-25T16:18:03.822371Z node 9 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1932: Add [3500:1234567890011] at 72075186224037888 to execution unit CompleteWrite 2025-09-25T16:18:03.822380Z node 9 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1843: Trying to execute [3500:1234567890011] at 72075186224037888 on unit CompleteWrite 2025-09-25T16:18:03.822461Z node 9 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1878: Execution status for [3500:1234567890011] at 72075186224037888 is DelayComplete 2025-09-25T16:18:03.822467Z node 9 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1926: Advance execution plan for [3500:1234567890011] at 72075186224037888 executing on unit CompleteWrite 2025-09-25T16:18:03.822472Z node 9 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1932: Add [3500:1234567890011] at 72075186224037888 to execution unit CompletedOperations 2025-09-25T16:18:03.822477Z node 9 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1843: Trying to execute [3500:1234567890011] at 72075186224037888 on unit CompletedOperations 2025-09-25T16:18:03.822484Z node 9 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1878: Execution status for [3500:1234567890011] at 72075186224037888 is Executed 2025-09-25T16:18:03.822488Z node 9 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1926: Advance execution plan for [3500:1234567890011] at 72075186224037888 executing on unit CompletedOperations 2025-09-25T16:18:03.822494Z node 9 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1938: Execution plan for [3500:1234567890011] at 72075186224037888 has finished 2025-09-25T16:18:03.822499Z node 9 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-09-25T16:18:03.822503Z node 9 :TX_DATASHARD TRACE: datashard_pipeline.cpp:327: Check candidate unit PlanQueue at 72075186224037888 2025-09-25T16:18:03.822508Z node 9 :TX_DATASHARD TRACE: plan_queue_unit.cpp:59: TPlanQueueUnit at 72075186224037888 has no attached operations 2025-09-25T16:18:03.822513Z node 9 :TX_DATASHARD TRACE: datashard_pipeline.cpp:341: Unit PlanQueue has no ready operations at 72075186224037888 2025-09-25T16:18:03.822737Z node 9 :TX_DATASHARD DEBUG: datashard__plan_step.cpp:98: Sending '{TEvPlanStepAccepted TabletId# 72075186224037888 step# 3500} 2025-09-25T16:18:03.822834Z node 9 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2025-09-25T16:18:03.822841Z node 9 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1949: Complete execution for [3500:1234567890011] at 72075186224037888 on unit CompleteWrite 2025-09-25T16:18:03.822855Z node 9 :TX_DATASHARD DEBUG: datashard.cpp:836: Complete write [3500 : 1234567890011] from 72075186224037888 at tablet 72075186224037888 send result to client [9:798:2645] 2025-09-25T16:18:03.822868Z node 9 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 >> KqpScan::UdfFailure [GOOD] >> DataShardWrite::VolatileAndNonVolatileWritePlanStepCommitFailure [GOOD] >> KqpScan::Like [GOOD] >> KqpScan::LongStringCombiner [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/scan/unittest >> KqpSplit::StreamLookupSplitBeforeReading [GOOD] Test command err: 2025-09-25T16:18:02.225381Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:123: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2025-09-25T16:18:02.290072Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2025-09-25T16:18:02.292012Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:311:2354], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-09-25T16:18:02.292066Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-09-25T16:18:02.292084Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/endf/003f9d/r3tmp/tmp8t6c1h/pdisk_1.dat 2025-09-25T16:18:02.362781Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-09-25T16:18:02.362813Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-09-25T16:18:02.374400Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded 2025-09-25T16:18:02.375097Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1229: Notification cookie mismatch for subscription [1:34:2081] 1758817081784566 != 1758817081784570 2025-09-25T16:18:02.406634Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-09-25T16:18:02.457889Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2025-09-25T16:18:02.501872Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-09-25T16:18:02.610809Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:650:2545], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:02.610858Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:02.610973Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:668:2550], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:02.610984Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:02.613730Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:18:02.812489Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:753:2615], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:02.812526Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:02.812572Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:758:2620], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:02.812594Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:759:2621], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:02.812638Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:02.813730Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-09-25T16:18:02.856274Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-09-25T16:18:02.957453Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:762:2624], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2025-09-25T16:18:02.999310Z node 1 :TX_PROXY ERROR: schemereq.cpp:590: Actor# [1:832:2663] txid# 281474976715659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-09-25T16:18:03.026932Z node 1 :KQP_EXECUTER ERROR: kqp_planner.cpp:123: TxId: 281474976715660. Ctx: { TraceId: 01k60tnqdweew9qgpc4xgm9ydc, Database: , SessionId: ydb://session/3?node_id=1&id=YTU0N2EwM2QtOGJjMmZiYy0zNDExMjg4Zi01NTRiZjZjYw==, PoolId: default, DatabaseId: /Root}. Database not set, use /Root 2025-09-25T16:18:03.027019Z node 1 :KQP_COMPUTE DEBUG: log.cpp:841: fline=kqp_compute_actor_factory.cpp:156;event=channel_info;ch_size=8388608;ch_count=1;ch_limit=8388608;inputs=0;input_channels_count=0; 2025-09-25T16:18:03.027134Z node 1 :KQP_COMPUTE DEBUG: dq_compute_actor_impl.h:134: SelfId: [1:859:2613], TxId: 281474976715660, task: 1. Ctx: { TraceId : 01k60tnqdweew9qgpc4xgm9ydc. RunScriptActorId : [0:0:0]. CustomerSuppliedId : . PoolId : default. SessionId : ydb://session/3?node_id=1&id=YTU0N2EwM2QtOGJjMmZiYy0zNDExMjg4Zi01NTRiZjZjYw==. CurrentExecutionId : . Database : . DatabaseId : /Root. }. Start compute actor [1:859:2613], task: 1 2025-09-25T16:18:03.027145Z node 1 :KQP_COMPUTE DEBUG: dq_compute_actor_impl.h:141: SelfId: [1:859:2613], TxId: 281474976715660, task: 1. Ctx: { TraceId : 01k60tnqdweew9qgpc4xgm9ydc. RunScriptActorId : [0:0:0]. CustomerSuppliedId : . PoolId : default. SessionId : ydb://session/3?node_id=1&id=YTU0N2EwM2QtOGJjMmZiYy0zNDExMjg4Zi01NTRiZjZjYw==. CurrentExecutionId : . Database : . DatabaseId : /Root. }. Set execution timeout 299.418339s 2025-09-25T16:18:03.028254Z node 1 :KQP_COMPUTE DEBUG: dq_compute_actor_impl.h:1450: SelfId: [1:859:2613], TxId: 281474976715660, task: 1. Ctx: { TraceId : 01k60tnqdweew9qgpc4xgm9ydc. RunScriptActorId : [0:0:0]. CustomerSuppliedId : . PoolId : default. SessionId : ydb://session/3?node_id=1&id=YTU0N2EwM2QtOGJjMmZiYy0zNDExMjg4Zi01NTRiZjZjYw==. CurrentExecutionId : . Database : . DatabaseId : /Root. }. Create sink for output 0 { Sink { Type: "KqpTableSink" Settings { type_url: "type.googleapis.com/NKikimrKqp.TKqpTableSinkSettings" value: "\032\032\n\n/Root/Test\020\200\202\224\204\200\200\200\200\001\030\002(\001\"\t\n\003Key\020\001 \004*\t\n\003Key\020\001 \004*\014\n\005Value\020\002 \201 0\214\247\200\200\200\200@8\001@\000H\000R\022\tW\003\000\000\000\000\000\000\0215\n\000\000\001\000\000\000X\000`\000h\000h\001x\000" } } } 2025-09-25T16:18:03.028354Z node 1 :KQP_COMPUTE DEBUG: kqp_pure_compute_actor.cpp:155: SelfId: [1:859:2613], TxId: 281474976715660, task: 1. Ctx: { TraceId : 01k60tnqdweew9qgpc4xgm9ydc. RunScriptActorId : [0:0:0]. CustomerSuppliedId : . PoolId : default. SessionId : ydb://session/3?node_id=1&id=YTU0N2EwM2QtOGJjMmZiYy0zNDExMjg4Zi01NTRiZjZjYw==. CurrentExecutionId : . Database : . DatabaseId : /Root. }. CA StateFunc 271646926 2025-09-25T16:18:03.029146Z node 1 :KQP_COMPUTE DEBUG: dq_compute_actor_impl.h:1072: SelfId: [1:859:2613], TxId: 281474976715660, task: 1. Ctx: { TraceId : 01k60tnqdweew9qgpc4xgm9ydc. RunScriptActorId : [0:0:0]. CustomerSuppliedId : . PoolId : default. SessionId : ydb://session/3?node_id=1&id=YTU0N2EwM2QtOGJjMmZiYy0zNDExMjg4Zi01NTRiZjZjYw==. CurrentExecutionId : . Database : . DatabaseId : /Root. }. Received channels info: 2025-09-25T16:18:03.029242Z node 1 :KQP_COMPUTE DEBUG: dq_sync_compute_actor_base.h:370: SelfId: [1:859:2613], TxId: 281474976715660, task: 1. Ctx: { TraceId : 01k60tnqdweew9qgpc4xgm9ydc. RunScriptActorId : [0:0:0]. CustomerSuppliedId : . PoolId : default. SessionId : ydb://session/3?node_id=1&id=YTU0N2EwM2QtOGJjMmZiYy0zNDExMjg4Zi01NTRiZjZjYw==. CurrentExecutionId : . Database : . DatabaseId : /Root. }. About to drain async output 0. FreeSpace: 67108864, allowedOvercommit: 4194304, toSend: 71303168, finished: 0 2025-09-25T16:18:03.029286Z node 1 :KQP_COMPUTE DEBUG: kqp_write_actor.cpp:3931: TxId: 281474976715660, task: 1. Add data: 72 / 72 2025-09-25T16:18:03.029300Z node 1 :KQP_COMPUTE DEBUG: kqp_write_actor.cpp:3900: TxId: 281474976715660, task: 1. Send data=72, closed=1, bufferActorId=[1:855:2613] 2025-09-25T16:18:03.029310Z node 1 :KQP_COMPUTE DEBUG: dq_sync_compute_actor_base.h:384: SelfId: [1:859:2613], TxId: 281474976715660, task: 1. Ctx: { TraceId : 01k60tnqdweew9qgpc4xgm9ydc. RunScriptActorId : [0:0:0]. CustomerSuppliedId : . PoolId : default. SessionId : ydb://session/3?node_id=1&id=YTU0N2EwM2QtOGJjMmZiYy0zNDExMjg4Zi01NTRiZjZjYw==. CurrentExecutionId : . Database : . DatabaseId : /Root. }. Drain async output 0. Free space decreased: -9223372036787666944, sent data from buffer: 72 2025-09-25T16:18:03.029322Z node 1 :KQP_COMPUTE DEBUG: dq_compute_actor_channels.cpp:674: TxId: 281474976715660, task: 1. Tasks execution finished 2025-09-25T16:18:03.029328Z node 1 :KQP_COMPUTE DEBUG: dq_compute_actor_impl.h:1586: SelfId: [1:859:2613], TxId: 281474976715660, task: 1. Ctx: { TraceId : 01k60tnqdweew9qgpc4xgm9ydc. RunScriptActorId : [0:0:0]. CustomerSuppliedId : . PoolId : default. SessionId : ydb://sess ... boperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:18:03.506084Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) waiting... 2025-09-25T16:18:03.535936Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) waiting... 2025-09-25T16:18:03.548547Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) waiting... 2025-09-25T16:18:03.619360Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-09-25T16:18:03.885567Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7554061855163397098:2391], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:03.885617Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:03.889147Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7554061855163397180:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:03.889284Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:03.889486Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7554061855163397184:2403], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:03.889492Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:03.890242Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:18:03.914456Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:18:03.929485Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:18:03.942294Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:18:03.962208Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:18:03.983924Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:18:04.009808Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:18:04.030244Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:18:04.079304Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7554061859458365270:2475], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:04.079339Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:04.079543Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7554061859458365275:2478], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:04.079551Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7554061859458365276:2479], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:04.079557Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:04.080533Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-09-25T16:18:04.084553Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 281474976710670, at schemeshard: 72057594046644480 2025-09-25T16:18:04.084618Z node 2 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7554061859458365279:2480], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2025-09-25T16:18:04.176280Z node 2 :TX_PROXY ERROR: schemereq.cpp:590: Actor# [2:7554061859458365331:3559] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 17], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-09-25T16:18:04.382438Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-09-25T16:18:04.457364Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:18:04.550490Z node 2 :KQP_EXECUTER ERROR: kqp_planner.cpp:123: TxId: 281474976710674. Ctx: { TraceId: 01k60tns1mdkf211zt7mt9mxqh, Database: , SessionId: ydb://session/3?node_id=2&id=YjhjNjVlYzItZGZjODg0ZDMtNDYzZTE2NjMtNDQ3ZjVlMjU=, PoolId: default, DatabaseId: /Root}. Database not set, use /Root 2025-09-25T16:18:04.552052Z node 2 :KQP_EXECUTER ERROR: kqp_planner.cpp:123: TxId: 281474976710675. Ctx: { TraceId: 01k60tns1mdkf211zt7mt9mxqh, Database: , SessionId: ydb://session/3?node_id=2&id=YjhjNjVlYzItZGZjODg0ZDMtNDYzZTE2NjMtNDQ3ZjVlMjU=, PoolId: default, DatabaseId: /Root}. Database not set, use /Root 2025-09-25T16:18:04.626434Z node 2 :KQP_EXECUTER ERROR: kqp_planner.cpp:123: TxId: 281474976710677. Ctx: { TraceId: 01k60tns4bc3v14mt3d0msr3pq, Database: , SessionId: ydb://session/3?node_id=2&id=ZTkyMDJlZDctMTEzMDNlMzMtNDM1MzFkZGUtZjBmMzFlNDY=, PoolId: default, DatabaseId: /Root}. Database not set, use /Root captured evread ----------------------------------------------------------- starting split ----------------------------------------------------------- scheme op Status: 53 TxId: 281474976710678 SchemeShardStatus: 1 SchemeShardTabletId: 72057594046644480 resume evread ----------------------------------------------------------- 2025-09-25T16:18:04.641471Z node 2 :KQP_RESOURCE_MANAGER WARN: kqp_snapshot_manager.cpp:238: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1758817084668, txId: 281474976710676] shutting down ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/scan/unittest >> KqpScan::Like [GOOD] Test command err: Trying to start YDB, gRPC: 65374, MsgBus: 27209 2025-09-25T16:18:00.989315Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7554061838699518211:2268];send_to=[0:7307199536658146131:7762515]; 2025-09-25T16:18:00.991914Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/endf/003fab/r3tmp/tmpBVtr6S/pdisk_1.dat 2025-09-25T16:18:01.052912Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-09-25T16:18:01.055160Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-09-25T16:18:01.055185Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-09-25T16:18:01.062204Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-09-25T16:18:01.066445Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded 2025-09-25T16:18:01.072338Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1229: Notification cookie mismatch for subscription [1:7554061838699517952:2081] 1758817080946843 != 1758817080946846 TServer::EnableGrpc on GrpcPort 65374, node 1 2025-09-25T16:18:01.097059Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-09-25T16:18:01.097077Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-09-25T16:18:01.097080Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-09-25T16:18:01.097129Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:27209 TClient is connected to server localhost:27209 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-09-25T16:18:01.183648Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-09-25T16:18:01.190348Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 waiting... 2025-09-25T16:18:01.211105Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:18:01.238768Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) waiting... 2025-09-25T16:18:01.288566Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-09-25T16:18:01.310148Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) waiting... 2025-09-25T16:18:01.342863Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) waiting... 2025-09-25T16:18:01.560046Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7554061842994486896:2391], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:01.560129Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:01.560550Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7554061842994486906:2392], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:01.560565Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:01.561037Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7554061842994486908:2393], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:01.561066Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:01.617381Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:18:01.627114Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:18:01.639049Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:18:01.652591Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:18:01.667009Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:18:01.682743Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:18:01.695435Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:18:01.709131Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:18:01.738304Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7554061842994487772:2475], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:01.738337Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:01.738410Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7554061842994487777:2478], DatabaseId: /Root, PoolId: default, ... cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-09-25T16:18:04.313063Z node 3 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-09-25T16:18:04.313065Z node 3 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-09-25T16:18:04.313115Z node 3 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:6550 TClient is connected to server localhost:6550 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. waiting... 2025-09-25T16:18:04.375006Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-09-25T16:18:04.384896Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:18:04.403637Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) waiting... 2025-09-25T16:18:04.433992Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) waiting... 2025-09-25T16:18:04.450211Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) waiting... 2025-09-25T16:18:04.540454Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-09-25T16:18:04.774038Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7554061859670139617:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:04.774110Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:04.777240Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7554061859670139691:2401], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:04.777286Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:04.777768Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:18:04.787779Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:18:04.796361Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:18:04.810425Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:18:04.823652Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:18:04.839801Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:18:04.853456Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:18:04.866018Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:18:04.882837Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7554061859670140480:2474], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:04.882870Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:04.882910Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7554061859670140485:2477], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:04.882928Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7554061859670140486:2478], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:04.882940Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:04.883745Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-09-25T16:18:04.888904Z node 3 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [3:7554061859670140489:2479], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715670 completed, doublechecking } 2025-09-25T16:18:04.973062Z node 3 :TX_PROXY ERROR: schemereq.cpp:590: Actor# [3:7554061859670140541:3556] txid# 281474976715671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 17], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-09-25T16:18:05.231753Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715673:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:18:05.280926Z node 3 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/scan/unittest >> KqpScan::UdfFailure [GOOD] Test command err: Trying to start YDB, gRPC: 11481, MsgBus: 20894 2025-09-25T16:18:01.722993Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7554061843675654571:2068];send_to=[0:7307199536658146131:7762515]; 2025-09-25T16:18:01.723033Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/endf/003fa4/r3tmp/tmpoZHpv3/pdisk_1.dat 2025-09-25T16:18:01.776131Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-09-25T16:18:01.787637Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded 2025-09-25T16:18:01.787856Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1229: Notification cookie mismatch for subscription [1:7554061843675654542:2081] 1758817081722489 != 1758817081722492 TServer::EnableGrpc on GrpcPort 11481, node 1 2025-09-25T16:18:01.797830Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-09-25T16:18:01.797843Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-09-25T16:18:01.797845Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-09-25T16:18:01.797882Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:20894 2025-09-25T16:18:01.826157Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-09-25T16:18:01.826186Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-09-25T16:18:01.827287Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:20894 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-09-25T16:18:01.860273Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-09-25T16:18:01.863357Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-09-25T16:18:01.869100Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) waiting... waiting... 2025-09-25T16:18:01.890517Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) waiting... 2025-09-25T16:18:01.921466Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) waiting... 2025-09-25T16:18:01.934221Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:18:02.065285Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-09-25T16:18:02.196091Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7554061847970623484:2391], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:02.196134Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:02.198217Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7554061847970623494:2392], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:02.198249Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:02.259384Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:18:02.277505Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:18:02.289875Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:18:02.304258Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:18:02.321110Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:18:02.335378Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:18:02.347573Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:18:02.365482Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:18:02.386248Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7554061847970624366:2474], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:02.386269Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:02.386345Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7554061847970624371:2477], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:02.386353Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7554061847970624372:2478], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:02.386368Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, ... : EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-09-25T16:18:04.414685Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-09-25T16:18:04.416304Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-09-25T16:18:04.517943Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) waiting... 2025-09-25T16:18:04.532563Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) waiting... 2025-09-25T16:18:04.563771Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) waiting... 2025-09-25T16:18:04.576230Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) waiting... 2025-09-25T16:18:04.621876Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-09-25T16:18:04.745929Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7554061859326433428:2391], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:04.745976Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:04.746143Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7554061859326433451:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:04.746158Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:04.750689Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:18:04.759126Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:18:04.768050Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:18:04.785994Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:18:04.809739Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:18:04.822489Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:18:04.834970Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:18:04.845658Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:18:04.862119Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7554061859326434301:2474], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:04.862151Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7554061859326434306:2477], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:04.862152Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:04.862195Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7554061859326434308:2478], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:04.862201Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:04.862813Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-09-25T16:18:04.872102Z node 3 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [3:7554061859326434309:2479], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2025-09-25T16:18:04.949531Z node 3 :TX_PROXY ERROR: schemereq.cpp:590: Actor# [3:7554061859326434362:3556] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 17], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-09-25T16:18:05.219719Z node 3 :KQP_COMPUTE ERROR: dq_compute_actor_impl.h:678: SelfId: [3:7554061863621401984:2526], TxId: 281474976710674, task: 1. Ctx: { TraceId : 01k60tnsr857g3vftpfvz9q10f. RunScriptActorId : [0:0:0]. CustomerSuppliedId : . PoolId : default. SessionId : ydb://session/3?node_id=3&id=MTY3OWEwZDUtZTU1NjJkYzItZGZkYWZiYWYtZWE2ZWMzZDA=. CurrentExecutionId : . Database : /Root. DatabaseId : /Root. }. InternalError: INTERNAL_ERROR DEFAULT_ERROR: {
: Error: Terminate was called, reason(17): Bad filter value. }. 2025-09-25T16:18:05.219850Z node 3 :KQP_COMPUTE ERROR: dq_compute_actor_impl.h:678: SelfId: [3:7554061863621401986:2527], TxId: 281474976710674, task: 2. Ctx: { TraceId : 01k60tnsr857g3vftpfvz9q10f. CustomerSuppliedId : . RunScriptActorId : [0:0:0]. PoolId : default. SessionId : ydb://session/3?node_id=3&id=MTY3OWEwZDUtZTU1NjJkYzItZGZkYWZiYWYtZWE2ZWMzZDA=. CurrentExecutionId : . Database : /Root. DatabaseId : /Root. }. InternalError: INTERNAL_ERROR DEFAULT_ERROR: {
: Error: Terminate execution }. 2025-09-25T16:18:05.219851Z node 3 :KQP_SESSION WARN: kqp_session_actor.cpp:2830: SessionId: ydb://session/3?node_id=3&id=MTY3OWEwZDUtZTU1NjJkYzItZGZkYWZiYWYtZWE2ZWMzZDA=, ActorId: [3:7554061863621401955:2519], ActorState: ExecuteState, TraceId: 01k60tnsr857g3vftpfvz9q10f, Create QueryResponse for error on request, msg: 2025-09-25T16:18:05.219981Z node 3 :KQP_RESOURCE_MANAGER WARN: kqp_snapshot_manager.cpp:238: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1758817085263, txId: 281474976710673] shutting down 2025-09-25T16:18:05.312225Z node 3 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; >> DataShardWrite::UpsertLostPrepareArbiterRestart [GOOD] >> KqpScan::JoinLeftOnly [GOOD] >> YdbTableSplit::MergeByNoLoadAfterSplit [GOOD] >> KqpSplit::AfterResultMultiRange+Ascending [GOOD] >> KqpScan::SqlInLiteral [GOOD] >> KqpScan::Effects [GOOD] >> KqpScan::EmptySet_1 >> THiveTest::TestDrain ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/datashard/ut_write/unittest >> DataShardWrite::VolatileAndNonVolatileWritePlanStepCommitFailure [GOOD] Test command err: 2025-09-25T16:17:50.838322Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:123: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2025-09-25T16:17:50.905719Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2025-09-25T16:17:50.909198Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:311:2354], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-09-25T16:17:50.909295Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-09-25T16:17:50.909322Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/endf/004419/r3tmp/tmpKDjymE/pdisk_1.dat 2025-09-25T16:17:50.985402Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-09-25T16:17:50.985444Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-09-25T16:17:50.998128Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded 2025-09-25T16:17:50.998882Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1229: Notification cookie mismatch for subscription [1:34:2081] 1758817070308230 != 1758817070308234 2025-09-25T16:17:51.030095Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-09-25T16:17:51.080121Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2025-09-25T16:17:51.124287Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-09-25T16:17:51.204946Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:17:51.223752Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3112: StateInit, received event# 268828672, Sender [1:664:2558], Recipient [1:673:2564]: NKikimr::TEvTablet::TEvBoot 2025-09-25T16:17:51.224047Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3112: StateInit, received event# 268828673, Sender [1:664:2558], Recipient [1:673:2564]: NKikimr::TEvTablet::TEvRestored 2025-09-25T16:17:51.224122Z node 1 :TX_DATASHARD INFO: datashard.cpp:375: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:673:2564] 2025-09-25T16:17:51.224193Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:661: TxInitSchema.Execute 2025-09-25T16:17:51.236020Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3125: StateInactive, received event# 268828684, Sender [1:664:2558], Recipient [1:673:2564]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-09-25T16:17:51.236329Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:727: TxInitSchema.Complete 2025-09-25T16:17:51.236373Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:50: TDataShard::TTxInit::Execute 2025-09-25T16:17:51.236587Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1325: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2025-09-25T16:17:51.236597Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1381: LoadLockChangeRecords at tablet: 72075186224037888 2025-09-25T16:17:51.236606Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1430: LoadChangeRecordCommits at tablet: 72075186224037888 2025-09-25T16:17:51.236673Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:94: TDataShard::TTxInit::Complete 2025-09-25T16:17:51.236697Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:102: TDataShard::TTxInitRestored::Execute 2025-09-25T16:17:51.236712Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:142: DataShard 72075186224037888 persisting started state actor id [1:688:2564] in generation 1 2025-09-25T16:17:51.248951Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:151: TDataShard::TTxInitRestored::Complete 2025-09-25T16:17:51.255726Z node 1 :TX_DATASHARD INFO: datashard.cpp:419: Switched to work state WaitScheme tabletId 72075186224037888 2025-09-25T16:17:51.255850Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:459: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2025-09-25T16:17:51.255887Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1260: Change sender created: at tablet: 72075186224037888, actorId: [1:690:2574] 2025-09-25T16:17:51.255893Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1265: Trying to activate change sender: at tablet: 72075186224037888 2025-09-25T16:17:51.255900Z node 1 :TX_DATASHARD INFO: datashard.cpp:1282: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2025-09-25T16:17:51.255907Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-09-25T16:17:51.256004Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3144: StateWork, received event# 2146435072, Sender [1:673:2564], Recipient [1:673:2564]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-09-25T16:17:51.256013Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3169: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-09-25T16:17:51.256138Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:96: TTxCheckInReadSets::Execute at 72075186224037888 2025-09-25T16:17:51.256169Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:139: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2025-09-25T16:17:51.256183Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037888 2025-09-25T16:17:51.256192Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2025-09-25T16:17:51.256201Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:385: Check unit PlanQueue at 72075186224037888 2025-09-25T16:17:51.256208Z node 1 :TX_DATASHARD TRACE: plan_queue_unit.cpp:59: TPlanQueueUnit at 72075186224037888 has no attached operations 2025-09-25T16:17:51.256217Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:403: Unit PlanQueue has no ready operations at 72075186224037888 2025-09-25T16:17:51.256223Z node 1 :TX_DATASHARD INFO: datashard__progress_tx.cpp:48: No tx to execute at 72075186224037888 TxInFly 0 2025-09-25T16:17:51.256229Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2025-09-25T16:17:51.256242Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3144: StateWork, received event# 269877761, Sender [1:674:2565], Recipient [1:673:2564]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-09-25T16:17:51.256248Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3180: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-09-25T16:17:51.256255Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3723: Server connected at leader tablet# 72075186224037888, clientId# [1:669:2561], serverId# [1:674:2565], sessionId# [0:0:0] 2025-09-25T16:17:51.256276Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3144: StateWork, received event# 269549568, Sender [1:409:2405], Recipient [1:674:2565] 2025-09-25T16:17:51.256281Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3150: StateWork, processing event TEvDataShard::TEvProposeTransaction 2025-09-25T16:17:51.256306Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 72075186224037888 2025-09-25T16:17:51.256373Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1843: Trying to execute [0:281474976715657] at 72075186224037888 on unit CheckSchemeTx 2025-09-25T16:17:51.256399Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:133: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2025-09-25T16:17:51.256440Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:221: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037888 2025-09-25T16:17:51.256450Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1878: Execution status for [0:281474976715657] at 72075186224037888 is ExecutedNoMoreRestarts 2025-09-25T16:17:51.256455Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1926: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit CheckSchemeTx 2025-09-25T16:17:51.256462Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1932: Add [0:281474976715657] at 72075186224037888 to execution unit StoreSchemeTx 2025-09-25T16:17:51.256467Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1843: Trying to execute [0:281474976715657] at 72075186224037888 on unit StoreSchemeTx 2025-09-25T16:17:51.256530Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1878: Execution status for [0:281474976715657] at 72075186224037888 is DelayCompleteNoMoreRestarts 2025-09-25T16:17:51.256535Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1926: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit StoreSchemeTx 2025-09-25T16:17:51.256540Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1932: Add [0:281474976715657] at 72075186224037888 to execution unit FinishPropose 2025-09-25T16:17:51.256544Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1843: Trying to execute [0:281474976715657] at 72075186224037888 on unit FinishPropose 2025-09-25T16:17:51.256556Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1878: Execution status for [0:281474976715657] at 72075186224037888 is DelayComplete 2025-09-25T16:17:51.256561Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1926: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit FinishPropose 2025-09-25T16:17:51.256566Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1932: Add [0:281474976715657] at 72075186224037888 to execution unit WaitForPlan 2025-09-25T16:17:51.256570Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1843: Trying to execute [0:281474976715657] at 72075186224037888 on unit WaitForPlan 2025-09-25T16:17:51.256578Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Operation [0:281474976715657] at 72075186224037888 is not ready to execute on unit WaitForPlan 2025-09-25T16:17:51.257006Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3144: StateWork, received event# 269746185, Sender [1:691:2575], Recipient [1:673:2564]: NKikimr::TEvTxProxySchemeCache::TEvWatchNotifyUpdated 2025-09-25T16:17:51.257018Z node 1 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:129: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-09-25T16:17:51.267390Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:152: TTxProposeTransactionBase::Complete at 72075186224037888 2025-09-25T16:17:51.267426Z node 1 :TX_DATASHAR ... ode 9 :TX_DATASHARD TRACE: datashard_impl.h:3144: StateWork, received event# 269287938, Sender [9:708:2583], Recipient [9:954:2759]: {TEvReadSet step# 2001 txid# 1234567890012 TabletSource# 72075186224037888 TabletDest# 72075186224037889 SetTabletConsumer# 72075186224037889 Flags# 0 Seqno# 2} 2025-09-25T16:18:05.541246Z node 9 :TX_DATASHARD TRACE: datashard_impl.h:3164: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-09-25T16:18:05.541250Z node 9 :TX_DATASHARD DEBUG: datashard_outreadset.cpp:150: Receive RS Ack at 72075186224037888 source 72075186224037888 dest 72075186224037889 consumer 72075186224037889 txId 1234567890012 ... validating table 2025-09-25T16:18:05.568514Z node 9 :TX_DATASHARD TRACE: datashard_impl.h:3144: StateWork, received event# 269877761, Sender [9:1006:2798], Recipient [9:954:2759]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-09-25T16:18:05.568537Z node 9 :TX_DATASHARD TRACE: datashard_impl.h:3180: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-09-25T16:18:05.568544Z node 9 :TX_DATASHARD DEBUG: datashard.cpp:3723: Server connected at leader tablet# 72075186224037888, clientId# [9:1005:2797], serverId# [9:1006:2798], sessionId# [0:0:0] 2025-09-25T16:18:05.578936Z node 9 :KQP_EXECUTER ERROR: kqp_planner.cpp:123: TxId: 281474976715661. Ctx: { TraceId: 01k60tnt356bsh0ypn6ssztkw5, Database: , SessionId: ydb://session/3?node_id=9&id=ODZmMDVjMjYtYjExMzI1OGItOTRhMTJiNWQtYjI3ZmE0MWY=, PoolId: default, DatabaseId: /Root}. Database not set, use /Root 2025-09-25T16:18:05.579742Z node 9 :TX_DATASHARD TRACE: datashard_impl.h:3144: StateWork, received event# 269553215, Sender [9:1012:2801], Recipient [9:954:2759]: NKikimrTxDataShard.TEvRead ReadId: 0 TableId { OwnerId: 72057594046644480 TableId: 2 SchemaVersion: 1 } Columns: 1 Columns: 2 Snapshot { Step: 2001 TxId: 18446744073709551615 } ResultFormat: FORMAT_CELLVEC MaxRows: 1001 MaxBytes: 5242880 Reverse: false TotalRowsLimit: 1001 RangesSize: 1 2025-09-25T16:18:05.579789Z node 9 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2452: TTxReadViaPipeline execute: at tablet# 72075186224037888, FollowerId 0 2025-09-25T16:18:05.579808Z node 9 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1843: Trying to execute [0:3] at 72075186224037888 on unit CheckRead 2025-09-25T16:18:05.579828Z node 9 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1878: Execution status for [0:3] at 72075186224037888 is Executed 2025-09-25T16:18:05.579834Z node 9 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1926: Advance execution plan for [0:3] at 72075186224037888 executing on unit CheckRead 2025-09-25T16:18:05.579841Z node 9 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1932: Add [0:3] at 72075186224037888 to execution unit BuildAndWaitDependencies 2025-09-25T16:18:05.579846Z node 9 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1843: Trying to execute [0:3] at 72075186224037888 on unit BuildAndWaitDependencies 2025-09-25T16:18:05.579861Z node 9 :TX_DATASHARD TRACE: datashard_pipeline.cpp:483: Activated operation [0:3] at 72075186224037888 2025-09-25T16:18:05.579867Z node 9 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1878: Execution status for [0:3] at 72075186224037888 is Executed 2025-09-25T16:18:05.579870Z node 9 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1926: Advance execution plan for [0:3] at 72075186224037888 executing on unit BuildAndWaitDependencies 2025-09-25T16:18:05.579875Z node 9 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1932: Add [0:3] at 72075186224037888 to execution unit ExecuteRead 2025-09-25T16:18:05.579880Z node 9 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1843: Trying to execute [0:3] at 72075186224037888 on unit ExecuteRead 2025-09-25T16:18:05.579901Z node 9 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:1578: 72075186224037888 Execute read# 1, request: { ReadId: 0 TableId { OwnerId: 72057594046644480 TableId: 2 SchemaVersion: 1 } Columns: 1 Columns: 2 Snapshot { Step: 2001 TxId: 18446744073709551615 } ResultFormat: FORMAT_CELLVEC MaxRows: 1001 MaxBytes: 5242880 Reverse: false TotalRowsLimit: 1001 } 2025-09-25T16:18:05.579963Z node 9 :TX_DATASHARD TRACE: datashard.cpp:2489: PromoteImmediatePostExecuteEdges at 72075186224037888 promoting UnprotectedReadEdge to v2001/18446744073709551615 2025-09-25T16:18:05.579971Z node 9 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2163: 72075186224037888 Complete read# {[9:1012:2801], 0} after executionsCount# 1 2025-09-25T16:18:05.579979Z node 9 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2137: 72075186224037888 read iterator# {[9:1012:2801], 0} sends rowCount# 2, bytes# 64, quota rows left# 999, quota bytes left# 5242816, hasUnreadQueries# 0, total queries# 1, firstUnprocessed# 0 2025-09-25T16:18:05.579996Z node 9 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2188: 72075186224037888 read iterator# {[9:1012:2801], 0} finished in read 2025-09-25T16:18:05.580008Z node 9 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1878: Execution status for [0:3] at 72075186224037888 is Executed 2025-09-25T16:18:05.580012Z node 9 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1926: Advance execution plan for [0:3] at 72075186224037888 executing on unit ExecuteRead 2025-09-25T16:18:05.580017Z node 9 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1932: Add [0:3] at 72075186224037888 to execution unit CompletedOperations 2025-09-25T16:18:05.580021Z node 9 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1843: Trying to execute [0:3] at 72075186224037888 on unit CompletedOperations 2025-09-25T16:18:05.580033Z node 9 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1878: Execution status for [0:3] at 72075186224037888 is Executed 2025-09-25T16:18:05.580038Z node 9 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1926: Advance execution plan for [0:3] at 72075186224037888 executing on unit CompletedOperations 2025-09-25T16:18:05.580042Z node 9 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1938: Execution plan for [0:3] at 72075186224037888 has finished 2025-09-25T16:18:05.580048Z node 9 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2687: TTxReadViaPipeline(69) Execute with status# Executed at tablet# 72075186224037888 2025-09-25T16:18:05.580063Z node 9 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2736: TTxReadViaPipeline(69) Complete: at tablet# 72075186224037888 2025-09-25T16:18:05.580317Z node 9 :TX_DATASHARD TRACE: datashard_impl.h:3144: StateWork, received event# 269553219, Sender [9:1012:2801], Recipient [9:954:2759]: NKikimrTxDataShard.TEvReadCancel ReadId: 0 2025-09-25T16:18:05.580329Z node 9 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:3409: 72075186224037888 ReadCancel: { ReadId: 0 } 2025-09-25T16:18:05.580389Z node 9 :TX_DATASHARD TRACE: datashard_impl.h:3144: StateWork, received event# 269553215, Sender [9:1012:2801], Recipient [9:708:2583]: NKikimrTxDataShard.TEvRead ReadId: 1 TableId { OwnerId: 72057594046644480 TableId: 2 SchemaVersion: 1 } Columns: 1 Columns: 2 Snapshot { Step: 2001 TxId: 18446744073709551615 } ResultFormat: FORMAT_CELLVEC MaxRows: 999 MaxBytes: 5242880 Reverse: false TotalRowsLimit: 999 RangesSize: 1 2025-09-25T16:18:05.580414Z node 9 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2452: TTxReadViaPipeline execute: at tablet# 72075186224037889, FollowerId 0 2025-09-25T16:18:05.580424Z node 9 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1843: Trying to execute [0:5] at 72075186224037889 on unit CheckRead 2025-09-25T16:18:05.580433Z node 9 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1878: Execution status for [0:5] at 72075186224037889 is Executed 2025-09-25T16:18:05.580438Z node 9 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1926: Advance execution plan for [0:5] at 72075186224037889 executing on unit CheckRead 2025-09-25T16:18:05.580442Z node 9 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1932: Add [0:5] at 72075186224037889 to execution unit BuildAndWaitDependencies 2025-09-25T16:18:05.580446Z node 9 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1843: Trying to execute [0:5] at 72075186224037889 on unit BuildAndWaitDependencies 2025-09-25T16:18:05.580454Z node 9 :TX_DATASHARD TRACE: datashard_pipeline.cpp:483: Activated operation [0:5] at 72075186224037889 2025-09-25T16:18:05.580460Z node 9 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1878: Execution status for [0:5] at 72075186224037889 is Executed 2025-09-25T16:18:05.580463Z node 9 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1926: Advance execution plan for [0:5] at 72075186224037889 executing on unit BuildAndWaitDependencies 2025-09-25T16:18:05.580468Z node 9 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1932: Add [0:5] at 72075186224037889 to execution unit ExecuteRead 2025-09-25T16:18:05.580474Z node 9 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1843: Trying to execute [0:5] at 72075186224037889 on unit ExecuteRead 2025-09-25T16:18:05.580489Z node 9 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:1578: 72075186224037889 Execute read# 1, request: { ReadId: 1 TableId { OwnerId: 72057594046644480 TableId: 2 SchemaVersion: 1 } Columns: 1 Columns: 2 Snapshot { Step: 2001 TxId: 18446744073709551615 } ResultFormat: FORMAT_CELLVEC MaxRows: 999 MaxBytes: 5242880 Reverse: false TotalRowsLimit: 999 } 2025-09-25T16:18:05.580514Z node 9 :TX_DATASHARD TRACE: datashard.cpp:2489: PromoteImmediatePostExecuteEdges at 72075186224037889 promoting UnprotectedReadEdge to v2001/18446744073709551615 2025-09-25T16:18:05.580519Z node 9 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2163: 72075186224037889 Complete read# {[9:1012:2801], 1} after executionsCount# 1 2025-09-25T16:18:05.580525Z node 9 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2137: 72075186224037889 read iterator# {[9:1012:2801], 1} sends rowCount# 3, bytes# 96, quota rows left# 996, quota bytes left# 5242784, hasUnreadQueries# 0, total queries# 1, firstUnprocessed# 0 2025-09-25T16:18:05.580534Z node 9 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2188: 72075186224037889 read iterator# {[9:1012:2801], 1} finished in read 2025-09-25T16:18:05.580541Z node 9 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1878: Execution status for [0:5] at 72075186224037889 is Executed 2025-09-25T16:18:05.580544Z node 9 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1926: Advance execution plan for [0:5] at 72075186224037889 executing on unit ExecuteRead 2025-09-25T16:18:05.580548Z node 9 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1932: Add [0:5] at 72075186224037889 to execution unit CompletedOperations 2025-09-25T16:18:05.580552Z node 9 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1843: Trying to execute [0:5] at 72075186224037889 on unit CompletedOperations 2025-09-25T16:18:05.580559Z node 9 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1878: Execution status for [0:5] at 72075186224037889 is Executed 2025-09-25T16:18:05.580563Z node 9 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1926: Advance execution plan for [0:5] at 72075186224037889 executing on unit CompletedOperations 2025-09-25T16:18:05.580567Z node 9 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1938: Execution plan for [0:5] at 72075186224037889 has finished 2025-09-25T16:18:05.580571Z node 9 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2687: TTxReadViaPipeline(69) Execute with status# Executed at tablet# 72075186224037889 2025-09-25T16:18:05.580583Z node 9 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2736: TTxReadViaPipeline(69) Complete: at tablet# 72075186224037889 2025-09-25T16:18:05.580778Z node 9 :TX_DATASHARD TRACE: datashard_impl.h:3144: StateWork, received event# 269553219, Sender [9:1012:2801], Recipient [9:708:2583]: NKikimrTxDataShard.TEvReadCancel ReadId: 1 2025-09-25T16:18:05.580788Z node 9 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:3409: 72075186224037889 ReadCancel: { ReadId: 1 } { items { int32_value: 1 } items { int32_value: 1001 } }, { items { int32_value: 2 } items { int32_value: 1004 } }, { items { int32_value: 11 } items { int32_value: 1002 } }, { items { int32_value: 12 } items { int32_value: 1003 } }, { items { int32_value: 13 } items { int32_value: 1004 } } >> THiveTest::TestNoMigrationToSelf ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/scan/unittest >> KqpScan::LongStringCombiner [GOOD] Test command err: Trying to start YDB, gRPC: 5391, MsgBus: 8276 2025-09-25T16:18:01.449220Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7554061843009649128:2190];send_to=[0:7307199536658146131:7762515]; 2025-09-25T16:18:01.449271Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-09-25T16:18:01.456978Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:123: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown test_client.cpp: SetPath # /home/runner/.ya/build/build_root/endf/003fa5/r3tmp/tmpUGAcWj/pdisk_1.dat 2025-09-25T16:18:01.484311Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-09-25T16:18:01.484348Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-09-25T16:18:01.486554Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-09-25T16:18:01.495843Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded 2025-09-25T16:18:01.496035Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1229: Notification cookie mismatch for subscription [1:7554061843009648975:2081] 1758817081445458 != 1758817081445461 TServer::EnableGrpc on GrpcPort 5391, node 1 2025-09-25T16:18:01.541188Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-09-25T16:18:01.541201Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-09-25T16:18:01.541203Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-09-25T16:18:01.541250Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-09-25T16:18:01.577229Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions TClient is connected to server localhost:8276 TClient is connected to server localhost:8276 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-09-25T16:18:01.630125Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-09-25T16:18:01.639284Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) waiting... 2025-09-25T16:18:01.665108Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) waiting... 2025-09-25T16:18:01.683545Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) waiting... 2025-09-25T16:18:01.696720Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) waiting... 2025-09-25T16:18:01.939082Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7554061843009650621:2391], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:01.939113Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:01.939364Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7554061843009650631:2392], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:01.939378Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:01.994265Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:18:02.002972Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:18:02.019756Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:18:02.032409Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:18:02.044578Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:18:02.059220Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:18:02.074329Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:18:02.088679Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:18:02.120360Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7554061847304618789:2474], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:02.120418Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:02.121403Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7554061847304618794:2477], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:02.121425Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7554061847304618795:2478], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:02.121494Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:02.122727Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operat ... 228: got bad distributable configuration TClient is connected to server localhost:29007 TClient is connected to server localhost:29007 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-09-25T16:18:04.340758Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-09-25T16:18:04.343085Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-09-25T16:18:04.364370Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-09-25T16:18:04.364425Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-09-25T16:18:04.365506Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-09-25T16:18:04.394375Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) waiting... 2025-09-25T16:18:04.416385Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) waiting... 2025-09-25T16:18:04.447278Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) waiting... waiting... 2025-09-25T16:18:04.465306Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:18:04.541163Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-09-25T16:18:04.744525Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7554061858420522385:2391], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:04.744558Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:04.744762Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7554061858420522405:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:04.744779Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:04.749814Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:18:04.759903Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:18:04.768039Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:18:04.782120Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:18:04.795336Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:18:04.809785Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:18:04.825746Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:18:04.838367Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:18:04.856444Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7554061858420523257:2474], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:04.856484Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7554061858420523262:2477], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:04.856498Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:04.856582Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7554061858420523265:2479], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:04.856600Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:04.857271Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-09-25T16:18:04.866075Z node 3 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [3:7554061858420523264:2478], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715670 completed, doublechecking } 2025-09-25T16:18:04.951538Z node 3 :TX_PROXY ERROR: schemereq.cpp:590: Actor# [3:7554061858420523318:3550] txid# 281474976715671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 17], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-09-25T16:18:05.263693Z node 3 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-09-25T16:18:05.533466Z node 3 :KQP_RESOURCE_MANAGER WARN: kqp_snapshot_manager.cpp:238: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1758817085319, txId: 281474976715673] shutting down >> DataShardWrite::DistributedInsertDuplicateWithLocks-Volatile [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/scan/unittest >> KqpSplit::AfterResultMultiRange+Ascending [GOOD] Test command err: Trying to start YDB, gRPC: 6009, MsgBus: 27879 2025-09-25T16:18:02.260876Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7554061849459011669:2082];send_to=[0:7307199536658146131:7762515]; 2025-09-25T16:18:02.263600Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/endf/003f8f/r3tmp/tmpFmdeNS/pdisk_1.dat 2025-09-25T16:18:02.318138Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions TServer::EnableGrpc on GrpcPort 6009, node 1 2025-09-25T16:18:02.341576Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded 2025-09-25T16:18:02.343925Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-09-25T16:18:02.343931Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-09-25T16:18:02.343933Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-09-25T16:18:02.343984Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:27879 2025-09-25T16:18:02.367841Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-09-25T16:18:02.367876Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-09-25T16:18:02.369190Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:27879 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-09-25T16:18:02.416014Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-09-25T16:18:02.419656Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-09-25T16:18:02.428591Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) waiting... 2025-09-25T16:18:02.460235Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) waiting... 2025-09-25T16:18:02.484016Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions waiting... 2025-09-25T16:18:02.494080Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:18:02.507872Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) waiting... 2025-09-25T16:18:02.692907Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7554061849459013261:2391], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:02.692955Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:02.693062Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7554061849459013271:2392], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:02.693072Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:02.755897Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:18:02.764708Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:18:02.772749Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:18:02.787719Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:18:02.800983Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:18:02.814908Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:18:02.829407Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:18:02.843685Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:18:02.860526Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7554061849459014133:2474], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:02.860562Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:02.860600Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7554061849459014138:2477], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:02.860615Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7554061849459014139:2478], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:02.860631Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:02.861510Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemesha ... ient is connected to server localhost:26324 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-09-25T16:18:04.357553Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-09-25T16:18:04.410568Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) waiting... 2025-09-25T16:18:04.430116Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) waiting... 2025-09-25T16:18:04.447031Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-09-25T16:18:04.454474Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) waiting... 2025-09-25T16:18:04.467091Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) waiting... 2025-09-25T16:18:04.726664Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7554061858026390624:2391], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:04.726689Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:04.726785Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7554061858026390634:2392], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:04.726796Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:04.738664Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:18:04.748062Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:18:04.760336Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:18:04.774474Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:18:04.789136Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:18:04.803237Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:18:04.816330Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:18:04.831182Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:18:04.850968Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7554061858026391495:2474], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:04.851001Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:04.851044Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7554061858026391500:2477], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:04.851051Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7554061858026391501:2478], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:04.851071Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:04.851953Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-09-25T16:18:04.858451Z node 2 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7554061858026391504:2479], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2025-09-25T16:18:04.941162Z node 2 :TX_PROXY ERROR: schemereq.cpp:590: Actor# [2:7554061858026391556:3556] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 17], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-09-25T16:18:05.280583Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-09-25T16:18:05.289237Z node 2 :KQP_EXECUTER ERROR: kqp_planner.cpp:123: TxId: 281474976710674. Ctx: { TraceId: 01k60tnsrb2nqt6h6zbfkw5t4y, Database: , SessionId: ydb://session/3?node_id=2&id=MzZhNGJjMDctYWU1ZmQ3ZWYtMWUwODkwYTMtNTM2Yzc5YjA=, PoolId: default, DatabaseId: /Root}. Database not set, use /Root captured evread ----------------------------------------------------------- starting split ----------------------------------------------------------- scheme op Status: 53 TxId: 281474976710675 SchemeShardStatus: 1 SchemeShardTabletId: 72057594046644480 captured evreadresult ----------------------------------------------------------- resume evread ----------------------------------------------------------- 2025-09-25T16:18:05.737184Z node 2 :KQP_RESOURCE_MANAGER WARN: kqp_snapshot_manager.cpp:238: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1758817085333, txId: 281474976710673] shutting down ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/scan/unittest >> KqpScan::JoinLeftOnly [GOOD] Test command err: Trying to start YDB, gRPC: 25909, MsgBus: 18338 2025-09-25T16:18:01.767676Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7554061842785226247:2081];send_to=[0:7307199536658146131:7762515]; 2025-09-25T16:18:01.768964Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/endf/003f92/r3tmp/tmpidMUlb/pdisk_1.dat 2025-09-25T16:18:01.812898Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-09-25T16:18:01.827506Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 25909, node 1 2025-09-25T16:18:01.842367Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-09-25T16:18:01.842380Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-09-25T16:18:01.842383Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-09-25T16:18:01.842429Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:18338 2025-09-25T16:18:01.871324Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-09-25T16:18:01.871369Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-09-25T16:18:01.873294Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:18338 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. waiting... 2025-09-25T16:18:01.921185Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2025-09-25T16:18:01.924482Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-09-25T16:18:01.929150Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) waiting... 2025-09-25T16:18:01.954900Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) waiting... 2025-09-25T16:18:01.986203Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) waiting... 2025-09-25T16:18:01.998870Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) waiting... 2025-09-25T16:18:02.024511Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-09-25T16:18:02.225180Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7554061847080195154:2391], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:02.225219Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:02.225546Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7554061847080195164:2392], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:02.225563Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:02.278671Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:18:02.287012Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:18:02.296781Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:18:02.313899Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:18:02.325747Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:18:02.341565Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:18:02.356700Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:18:02.371833Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:18:02.394215Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7554061847080196029:2474], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:02.394253Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:02.394352Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7554061847080196034:2477], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:02.394359Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7554061847080196035:2478], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:02.394378Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:02.395449Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemes ... got bad distributable configuration TClient is connected to server localhost:12230 TClient is connected to server localhost:12230 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-09-25T16:18:04.777442Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-09-25T16:18:04.788894Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) waiting... 2025-09-25T16:18:04.814062Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) waiting... 2025-09-25T16:18:04.840167Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) waiting... 2025-09-25T16:18:04.854920Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) waiting... 2025-09-25T16:18:04.919128Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-09-25T16:18:05.152812Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7554061862409775916:2391], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:05.152898Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:05.156965Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7554061862409775999:2401], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:05.156988Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:05.157755Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:18:05.170027Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:18:05.180505Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:18:05.194976Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:18:05.208791Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:18:05.223932Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:18:05.237950Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:18:05.251543Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:18:05.267903Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7554061862409776787:2474], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:05.267938Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:05.267984Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7554061862409776792:2477], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:05.267995Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7554061862409776793:2478], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:05.268004Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:05.268933Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-09-25T16:18:05.273355Z node 3 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [3:7554061862409776796:2479], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2025-09-25T16:18:05.352773Z node 3 :TX_PROXY ERROR: schemereq.cpp:590: Actor# [3:7554061862409776848:3550] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 17], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } waiting... 2025-09-25T16:18:05.624343Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:18:05.699410Z node 3 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-09-25T16:18:05.731113Z node 3 :KQP_RESOURCE_MANAGER WARN: kqp_snapshot_manager.cpp:238: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1758817085774, txId: 281474976710675] shutting down 2025-09-25T16:18:05.787813Z node 3 :KQP_RESOURCE_MANAGER WARN: kqp_snapshot_manager.cpp:238: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1758817085823, txId: 281474976710677] shutting down >> TCmsTest::WalleRequestDuringRollingRestart [GOOD] >> TCmsTest::WalleTasksDifferentPriorities >> DataShardWrite::DoubleWriteUncommittedThenDoubleReadWithCommit [GOOD] >> DataShardWrite::DistributedInsertWithoutLocks+Volatile >> test.py::test[window-win_func_over_group_by--Results] [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/scan/unittest >> KqpScan::SqlInLiteral [GOOD] Test command err: Trying to start YDB, gRPC: 11897, MsgBus: 19895 2025-09-25T16:18:02.033449Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7554061847279572698:2084];send_to=[0:7307199536658146131:7762515]; 2025-09-25T16:18:02.034252Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/endf/003f90/r3tmp/tmp0x3Uk8/pdisk_1.dat TServer::EnableGrpc on GrpcPort 11897, node 1 2025-09-25T16:18:02.130756Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-09-25T16:18:02.131096Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded 2025-09-25T16:18:02.136019Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-09-25T16:18:02.136056Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-09-25T16:18:02.137180Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-09-25T16:18:02.147294Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-09-25T16:18:02.147318Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-09-25T16:18:02.147319Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-09-25T16:18:02.147371Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:19895 TClient is connected to server localhost:19895 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-09-25T16:18:02.256519Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-09-25T16:18:02.261569Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) waiting... 2025-09-25T16:18:02.290028Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) waiting... 2025-09-25T16:18:02.329988Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) waiting... waiting... 2025-09-25T16:18:02.357713Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-09-25T16:18:02.358052Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:18:02.568466Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7554061847279574277:2391], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:02.568553Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:02.568653Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7554061847279574287:2392], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:02.568662Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:02.578455Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:18:02.586773Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:18:02.598182Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:18:02.613193Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:18:02.626214Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:18:02.641100Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:18:02.653743Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:18:02.668301Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:18:02.685057Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7554061847279575149:2474], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:02.685082Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:02.685103Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7554061847279575154:2477], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:02.685111Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7554061847279575156:2478], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:02.685132Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:02.685995Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715670:3, at schemeshard: 72057594046 ... RN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-09-25T16:18:04.899200Z node 3 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-09-25T16:18:04.899203Z node 3 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-09-25T16:18:04.899253Z node 3 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:63528 TClient is connected to server localhost:63528 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. waiting... 2025-09-25T16:18:04.953007Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2025-09-25T16:18:04.972520Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) waiting... waiting... 2025-09-25T16:18:05.038441Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) waiting... 2025-09-25T16:18:05.066173Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:18:05.078527Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) waiting... 2025-09-25T16:18:05.145361Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-09-25T16:18:05.306920Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7554061860387309828:2391], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:05.306951Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:05.310490Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7554061860387309910:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:05.310525Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:05.311615Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:18:05.322348Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:18:05.335166Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:18:05.349282Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:18:05.363142Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:18:05.377906Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:18:05.393499Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:18:05.410129Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:18:05.437453Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7554061860387310701:2474], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:05.437493Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:05.437540Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7554061860387310706:2477], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:05.437553Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7554061860387310707:2478], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:05.437579Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:05.438550Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-09-25T16:18:05.441784Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 281474976710670, at schemeshard: 72057594046644480 2025-09-25T16:18:05.441836Z node 3 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [3:7554061860387310710:2479], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2025-09-25T16:18:05.521935Z node 3 :TX_PROXY ERROR: schemereq.cpp:590: Actor# [3:7554061860387310762:3554] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 17], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-09-25T16:18:05.848284Z node 3 :KQP_RESOURCE_MANAGER WARN: kqp_snapshot_manager.cpp:238: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1758817085893, txId: 281474976710674] shutting down 2025-09-25T16:18:05.874778Z node 3 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/datashard/ut_write/unittest >> DataShardWrite::UpsertLostPrepareArbiterRestart [GOOD] Test command err: 2025-09-25T16:17:50.710197Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:123: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2025-09-25T16:17:50.751443Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2025-09-25T16:17:50.754544Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:311:2354], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-09-25T16:17:50.754665Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-09-25T16:17:50.754698Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/endf/004426/r3tmp/tmpKPiXGl/pdisk_1.dat 2025-09-25T16:17:50.833783Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-09-25T16:17:50.833830Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-09-25T16:17:50.851976Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded 2025-09-25T16:17:50.853398Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1229: Notification cookie mismatch for subscription [1:34:2081] 1758817070265483 != 1758817070265487 2025-09-25T16:17:50.889560Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-09-25T16:17:50.946293Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2025-09-25T16:17:50.996492Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-09-25T16:17:51.072208Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:17:51.089244Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3112: StateInit, received event# 268828672, Sender [1:664:2558], Recipient [1:673:2564]: NKikimr::TEvTablet::TEvBoot 2025-09-25T16:17:51.089539Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3112: StateInit, received event# 268828673, Sender [1:664:2558], Recipient [1:673:2564]: NKikimr::TEvTablet::TEvRestored 2025-09-25T16:17:51.089617Z node 1 :TX_DATASHARD INFO: datashard.cpp:375: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:673:2564] 2025-09-25T16:17:51.089691Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:661: TxInitSchema.Execute 2025-09-25T16:17:51.100511Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3125: StateInactive, received event# 268828684, Sender [1:664:2558], Recipient [1:673:2564]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-09-25T16:17:51.100747Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:727: TxInitSchema.Complete 2025-09-25T16:17:51.100789Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:50: TDataShard::TTxInit::Execute 2025-09-25T16:17:51.101047Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1325: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2025-09-25T16:17:51.101059Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1381: LoadLockChangeRecords at tablet: 72075186224037888 2025-09-25T16:17:51.101067Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1430: LoadChangeRecordCommits at tablet: 72075186224037888 2025-09-25T16:17:51.101146Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:94: TDataShard::TTxInit::Complete 2025-09-25T16:17:51.101178Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:102: TDataShard::TTxInitRestored::Execute 2025-09-25T16:17:51.101195Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:142: DataShard 72075186224037888 persisting started state actor id [1:688:2564] in generation 1 2025-09-25T16:17:51.111598Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:151: TDataShard::TTxInitRestored::Complete 2025-09-25T16:17:51.117270Z node 1 :TX_DATASHARD INFO: datashard.cpp:419: Switched to work state WaitScheme tabletId 72075186224037888 2025-09-25T16:17:51.117397Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:459: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2025-09-25T16:17:51.117430Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1260: Change sender created: at tablet: 72075186224037888, actorId: [1:690:2574] 2025-09-25T16:17:51.117437Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1265: Trying to activate change sender: at tablet: 72075186224037888 2025-09-25T16:17:51.117443Z node 1 :TX_DATASHARD INFO: datashard.cpp:1282: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2025-09-25T16:17:51.117450Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-09-25T16:17:51.117551Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3144: StateWork, received event# 2146435072, Sender [1:673:2564], Recipient [1:673:2564]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-09-25T16:17:51.117560Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3169: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-09-25T16:17:51.117699Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:96: TTxCheckInReadSets::Execute at 72075186224037888 2025-09-25T16:17:51.117732Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:139: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2025-09-25T16:17:51.117747Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037888 2025-09-25T16:17:51.117756Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2025-09-25T16:17:51.117764Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:385: Check unit PlanQueue at 72075186224037888 2025-09-25T16:17:51.117772Z node 1 :TX_DATASHARD TRACE: plan_queue_unit.cpp:59: TPlanQueueUnit at 72075186224037888 has no attached operations 2025-09-25T16:17:51.117777Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:403: Unit PlanQueue has no ready operations at 72075186224037888 2025-09-25T16:17:51.117783Z node 1 :TX_DATASHARD INFO: datashard__progress_tx.cpp:48: No tx to execute at 72075186224037888 TxInFly 0 2025-09-25T16:17:51.117788Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2025-09-25T16:17:51.117801Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3144: StateWork, received event# 269877761, Sender [1:674:2565], Recipient [1:673:2564]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-09-25T16:17:51.117807Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3180: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-09-25T16:17:51.117814Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3723: Server connected at leader tablet# 72075186224037888, clientId# [1:669:2561], serverId# [1:674:2565], sessionId# [0:0:0] 2025-09-25T16:17:51.117834Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3144: StateWork, received event# 269549568, Sender [1:409:2405], Recipient [1:674:2565] 2025-09-25T16:17:51.117839Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3150: StateWork, processing event TEvDataShard::TEvProposeTransaction 2025-09-25T16:17:51.117867Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 72075186224037888 2025-09-25T16:17:51.117931Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1843: Trying to execute [0:281474976715657] at 72075186224037888 on unit CheckSchemeTx 2025-09-25T16:17:51.117944Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:133: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2025-09-25T16:17:51.117980Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:221: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037888 2025-09-25T16:17:51.117993Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1878: Execution status for [0:281474976715657] at 72075186224037888 is ExecutedNoMoreRestarts 2025-09-25T16:17:51.117998Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1926: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit CheckSchemeTx 2025-09-25T16:17:51.118004Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1932: Add [0:281474976715657] at 72075186224037888 to execution unit StoreSchemeTx 2025-09-25T16:17:51.118008Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1843: Trying to execute [0:281474976715657] at 72075186224037888 on unit StoreSchemeTx 2025-09-25T16:17:51.118072Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1878: Execution status for [0:281474976715657] at 72075186224037888 is DelayCompleteNoMoreRestarts 2025-09-25T16:17:51.118077Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1926: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit StoreSchemeTx 2025-09-25T16:17:51.118082Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1932: Add [0:281474976715657] at 72075186224037888 to execution unit FinishPropose 2025-09-25T16:17:51.118085Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1843: Trying to execute [0:281474976715657] at 72075186224037888 on unit FinishPropose 2025-09-25T16:17:51.118099Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1878: Execution status for [0:281474976715657] at 72075186224037888 is DelayComplete 2025-09-25T16:17:51.118103Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1926: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit FinishPropose 2025-09-25T16:17:51.118107Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1932: Add [0:281474976715657] at 72075186224037888 to execution unit WaitForPlan 2025-09-25T16:17:51.118111Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1843: Trying to execute [0:281474976715657] at 72075186224037888 on unit WaitForPlan 2025-09-25T16:17:51.118117Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Operation [0:281474976715657] at 72075186224037888 is not ready to execute on unit WaitForPlan 2025-09-25T16:17:51.118545Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3144: StateWork, received event# 269746185, Sender [1:691:2575], Recipient [1:673:2564]: NKikimr::TEvTxProxySchemeCache::TEvWatchNotifyUpdated 2025-09-25T16:17:51.118574Z node 1 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:129: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-09-25T16:17:51.129013Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:152: TTxProposeTransactionBase::Complete at 72075186224037888 2025-09-25T16:17:51.129062Z node 1 :TX_DATASHAR ... : NKikimr::TEvTabletPipe::TEvServerConnected 2025-09-25T16:18:05.999182Z node 9 :TX_DATASHARD TRACE: datashard_impl.h:3180: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-09-25T16:18:05.999187Z node 9 :TX_DATASHARD DEBUG: datashard.cpp:3723: Server connected at leader tablet# 72075186224037890, clientId# [9:968:2776], serverId# [9:969:2777], sessionId# [0:0:0] 2025-09-25T16:18:05.999202Z node 9 :TX_DATASHARD TRACE: datashard_impl.h:3144: StateWork, received event# 269553169, Sender [9:967:2775], Recipient [9:729:2596]: NKikimrTxDataShard.TEvGetInfoRequest 2025-09-25T16:18:05.999295Z node 9 :TX_DATASHARD TRACE: datashard_impl.h:3144: StateWork, received event# 269877761, Sender [9:972:2780], Recipient [9:729:2596]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-09-25T16:18:05.999300Z node 9 :TX_DATASHARD TRACE: datashard_impl.h:3180: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-09-25T16:18:05.999305Z node 9 :TX_DATASHARD DEBUG: datashard.cpp:3723: Server connected at leader tablet# 72075186224037890, clientId# [9:971:2779], serverId# [9:972:2780], sessionId# [0:0:0] 2025-09-25T16:18:05.999327Z node 9 :TX_DATASHARD TRACE: datashard_impl.h:3144: StateWork, received event# 269553215, Sender [9:970:2778], Recipient [9:729:2596]: NKikimrTxDataShard.TEvRead ReadId: 1002 TableId { OwnerId: 72057594046644480 TableId: 2 SchemaVersion: 1 } Columns: 1 Columns: 2 ResultFormat: FORMAT_CELLVEC RangesSize: 1 2025-09-25T16:18:05.999344Z node 9 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2452: TTxReadViaPipeline execute: at tablet# 72075186224037890, FollowerId 0 2025-09-25T16:18:05.999349Z node 9 :TX_DATASHARD TRACE: datashard.cpp:2378: GetMvccTxVersion at 72075186224037890 CompleteEdge# v1001/1000001 IncompleteEdge# v{min} UnprotectedReadEdge# v{min} ImmediateWriteEdge# v{min} ImmediateWriteEdgeReplied# v{min} 2025-09-25T16:18:05.999354Z node 9 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2555: 72075186224037890 changed HEAD read to non-repeatable v4000/18446744073709551615 2025-09-25T16:18:05.999359Z node 9 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1843: Trying to execute [0:3] at 72075186224037890 on unit CheckRead 2025-09-25T16:18:05.999369Z node 9 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1878: Execution status for [0:3] at 72075186224037890 is Executed 2025-09-25T16:18:05.999372Z node 9 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1926: Advance execution plan for [0:3] at 72075186224037890 executing on unit CheckRead 2025-09-25T16:18:05.999376Z node 9 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1932: Add [0:3] at 72075186224037890 to execution unit BuildAndWaitDependencies 2025-09-25T16:18:05.999379Z node 9 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1843: Trying to execute [0:3] at 72075186224037890 on unit BuildAndWaitDependencies 2025-09-25T16:18:05.999386Z node 9 :TX_DATASHARD TRACE: datashard_pipeline.cpp:483: Activated operation [0:3] at 72075186224037890 2025-09-25T16:18:05.999391Z node 9 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1878: Execution status for [0:3] at 72075186224037890 is Executed 2025-09-25T16:18:05.999394Z node 9 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1926: Advance execution plan for [0:3] at 72075186224037890 executing on unit BuildAndWaitDependencies 2025-09-25T16:18:05.999398Z node 9 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1932: Add [0:3] at 72075186224037890 to execution unit ExecuteRead 2025-09-25T16:18:05.999402Z node 9 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1843: Trying to execute [0:3] at 72075186224037890 on unit ExecuteRead 2025-09-25T16:18:05.999411Z node 9 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:1578: 72075186224037890 Execute read# 1, request: { ReadId: 1002 TableId { OwnerId: 72057594046644480 TableId: 2 SchemaVersion: 1 } Columns: 1 Columns: 2 ResultFormat: FORMAT_CELLVEC } 2025-09-25T16:18:05.999432Z node 9 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2163: 72075186224037890 Complete read# {[9:970:2778], 1002} after executionsCount# 1 2025-09-25T16:18:05.999438Z node 9 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2137: 72075186224037890 read iterator# {[9:970:2778], 1002} sends rowCount# 0, bytes# 0, quota rows left# 18446744073709551615, quota bytes left# 18446744073709551615, hasUnreadQueries# 0, total queries# 1, firstUnprocessed# 0 2025-09-25T16:18:05.999445Z node 9 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2188: 72075186224037890 read iterator# {[9:970:2778], 1002} finished in read 2025-09-25T16:18:05.999451Z node 9 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1878: Execution status for [0:3] at 72075186224037890 is Executed 2025-09-25T16:18:05.999455Z node 9 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1926: Advance execution plan for [0:3] at 72075186224037890 executing on unit ExecuteRead 2025-09-25T16:18:05.999458Z node 9 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1932: Add [0:3] at 72075186224037890 to execution unit CompletedOperations 2025-09-25T16:18:05.999461Z node 9 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1843: Trying to execute [0:3] at 72075186224037890 on unit CompletedOperations 2025-09-25T16:18:05.999467Z node 9 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1878: Execution status for [0:3] at 72075186224037890 is Executed 2025-09-25T16:18:05.999471Z node 9 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1926: Advance execution plan for [0:3] at 72075186224037890 executing on unit CompletedOperations 2025-09-25T16:18:05.999474Z node 9 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1938: Execution plan for [0:3] at 72075186224037890 has finished 2025-09-25T16:18:05.999478Z node 9 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2687: TTxReadViaPipeline(69) Execute with status# Executed at tablet# 72075186224037890 2025-09-25T16:18:05.999488Z node 9 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2736: TTxReadViaPipeline(69) Complete: at tablet# 72075186224037890 2025-09-25T16:18:05.999582Z node 9 :TX_DATASHARD TRACE: datashard_impl.h:3144: StateWork, received event# 269877761, Sender [9:975:2783], Recipient [9:724:2592]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-09-25T16:18:05.999587Z node 9 :TX_DATASHARD TRACE: datashard_impl.h:3180: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-09-25T16:18:05.999592Z node 9 :TX_DATASHARD DEBUG: datashard.cpp:3723: Server connected at leader tablet# 72075186224037891, clientId# [9:974:2782], serverId# [9:975:2783], sessionId# [0:0:0] 2025-09-25T16:18:05.999609Z node 9 :TX_DATASHARD TRACE: datashard_impl.h:3144: StateWork, received event# 269553169, Sender [9:973:2781], Recipient [9:724:2592]: NKikimrTxDataShard.TEvGetInfoRequest 2025-09-25T16:18:05.999705Z node 9 :TX_DATASHARD TRACE: datashard_impl.h:3144: StateWork, received event# 269877761, Sender [9:978:2786], Recipient [9:724:2592]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-09-25T16:18:05.999709Z node 9 :TX_DATASHARD TRACE: datashard_impl.h:3180: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-09-25T16:18:05.999714Z node 9 :TX_DATASHARD DEBUG: datashard.cpp:3723: Server connected at leader tablet# 72075186224037891, clientId# [9:977:2785], serverId# [9:978:2786], sessionId# [0:0:0] 2025-09-25T16:18:05.999734Z node 9 :TX_DATASHARD TRACE: datashard_impl.h:3144: StateWork, received event# 269553215, Sender [9:976:2784], Recipient [9:724:2592]: NKikimrTxDataShard.TEvRead ReadId: 1003 TableId { OwnerId: 72057594046644480 TableId: 2 SchemaVersion: 1 } Columns: 1 Columns: 2 ResultFormat: FORMAT_CELLVEC RangesSize: 1 2025-09-25T16:18:05.999749Z node 9 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2452: TTxReadViaPipeline execute: at tablet# 72075186224037891, FollowerId 0 2025-09-25T16:18:05.999756Z node 9 :TX_DATASHARD TRACE: datashard.cpp:2378: GetMvccTxVersion at 72075186224037891 CompleteEdge# v1000/281474976715657 IncompleteEdge# v{min} UnprotectedReadEdge# v{min} ImmediateWriteEdge# v{min} ImmediateWriteEdgeReplied# v{min} 2025-09-25T16:18:05.999760Z node 9 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2555: 72075186224037891 changed HEAD read to non-repeatable v4000/18446744073709551615 2025-09-25T16:18:05.999765Z node 9 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1843: Trying to execute [0:2] at 72075186224037891 on unit CheckRead 2025-09-25T16:18:05.999774Z node 9 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1878: Execution status for [0:2] at 72075186224037891 is Executed 2025-09-25T16:18:05.999777Z node 9 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1926: Advance execution plan for [0:2] at 72075186224037891 executing on unit CheckRead 2025-09-25T16:18:05.999781Z node 9 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1932: Add [0:2] at 72075186224037891 to execution unit BuildAndWaitDependencies 2025-09-25T16:18:05.999785Z node 9 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1843: Trying to execute [0:2] at 72075186224037891 on unit BuildAndWaitDependencies 2025-09-25T16:18:05.999791Z node 9 :TX_DATASHARD TRACE: datashard_pipeline.cpp:483: Activated operation [0:2] at 72075186224037891 2025-09-25T16:18:05.999795Z node 9 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1878: Execution status for [0:2] at 72075186224037891 is Executed 2025-09-25T16:18:05.999799Z node 9 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1926: Advance execution plan for [0:2] at 72075186224037891 executing on unit BuildAndWaitDependencies 2025-09-25T16:18:05.999802Z node 9 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1932: Add [0:2] at 72075186224037891 to execution unit ExecuteRead 2025-09-25T16:18:05.999806Z node 9 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1843: Trying to execute [0:2] at 72075186224037891 on unit ExecuteRead 2025-09-25T16:18:05.999815Z node 9 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:1578: 72075186224037891 Execute read# 1, request: { ReadId: 1003 TableId { OwnerId: 72057594046644480 TableId: 2 SchemaVersion: 1 } Columns: 1 Columns: 2 ResultFormat: FORMAT_CELLVEC } 2025-09-25T16:18:05.999830Z node 9 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2163: 72075186224037891 Complete read# {[9:976:2784], 1003} after executionsCount# 1 2025-09-25T16:18:05.999835Z node 9 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2137: 72075186224037891 read iterator# {[9:976:2784], 1003} sends rowCount# 0, bytes# 0, quota rows left# 18446744073709551615, quota bytes left# 18446744073709551615, hasUnreadQueries# 0, total queries# 1, firstUnprocessed# 0 2025-09-25T16:18:05.999842Z node 9 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2188: 72075186224037891 read iterator# {[9:976:2784], 1003} finished in read 2025-09-25T16:18:05.999847Z node 9 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1878: Execution status for [0:2] at 72075186224037891 is Executed 2025-09-25T16:18:05.999850Z node 9 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1926: Advance execution plan for [0:2] at 72075186224037891 executing on unit ExecuteRead 2025-09-25T16:18:05.999854Z node 9 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1932: Add [0:2] at 72075186224037891 to execution unit CompletedOperations 2025-09-25T16:18:05.999858Z node 9 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1843: Trying to execute [0:2] at 72075186224037891 on unit CompletedOperations 2025-09-25T16:18:05.999863Z node 9 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1878: Execution status for [0:2] at 72075186224037891 is Executed 2025-09-25T16:18:05.999867Z node 9 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1926: Advance execution plan for [0:2] at 72075186224037891 executing on unit CompletedOperations 2025-09-25T16:18:05.999870Z node 9 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1938: Execution plan for [0:2] at 72075186224037891 has finished 2025-09-25T16:18:05.999876Z node 9 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2687: TTxReadViaPipeline(69) Execute with status# Executed at tablet# 72075186224037891 2025-09-25T16:18:05.999885Z node 9 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2736: TTxReadViaPipeline(69) Complete: at tablet# 72075186224037891 >> KqpScan::ScanDuringSplit [GOOD] >> KqpScan::ScanAfterSplitSlowMetaRead >> KqpScan::ScanDuringSplitThenMerge [GOOD] >> KqpScan::ScanPg >> KqpSplit::StreamLookupSplitAfterFirstResult >> THiveTest::TestHiveBalancerWithPrefferedDC1 >> KqpScan::TwoAggregatesOneFullFrameWindow >> THiveTest::TestNoMigrationToSelf [GOOD] >> THiveTest::TestReCreateTablet >> KqpScan::TaggedScalar >> TCutHistoryRestrictions::BasicTest [GOOD] >> TCutHistoryRestrictions::EmptyAllowList [GOOD] >> TCutHistoryRestrictions::EmptyDenyList [GOOD] >> TCutHistoryRestrictions::SameTabletInBothLists [GOOD] >> TCutHistoryRestrictions::BothListsEmpty [GOOD] >> THeavyPerfTest::TTestLoadEverything >> KqpScan::SelfJoin3xSameLabels >> KqpScan::EmptySet_1 [GOOD] >> THiveTest::TestReCreateTablet [GOOD] >> THiveTest::TestReCreateTabletError >> TCacheTest::MigrationCommon >> test.py::test[aggregate-group_by_gs_grouping--Results] [GOOD] >> test.py::test[aggregate-group_by_rollup_grouping_hum--Results] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/datashard/ut_write/unittest >> DataShardWrite::DistributedInsertDuplicateWithLocks-Volatile [GOOD] Test command err: 2025-09-25T16:17:49.857499Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:123: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2025-09-25T16:17:49.884666Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2025-09-25T16:17:49.886954Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:311:2354], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-09-25T16:17:49.887058Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-09-25T16:17:49.887085Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/endf/004449/r3tmp/tmpXHt2In/pdisk_1.dat 2025-09-25T16:17:49.965892Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-09-25T16:17:49.965941Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-09-25T16:17:49.975418Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded 2025-09-25T16:17:49.976373Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1229: Notification cookie mismatch for subscription [1:34:2081] 1758817069413029 != 1758817069413033 2025-09-25T16:17:50.011870Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-09-25T16:17:50.074438Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2025-09-25T16:17:50.113555Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-09-25T16:17:50.202276Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:17:50.219142Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3112: StateInit, received event# 268828672, Sender [1:668:2562], Recipient [1:684:2572]: NKikimr::TEvTablet::TEvBoot 2025-09-25T16:17:50.219435Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3112: StateInit, received event# 268828673, Sender [1:668:2562], Recipient [1:684:2572]: NKikimr::TEvTablet::TEvRestored 2025-09-25T16:17:50.219502Z node 1 :TX_DATASHARD INFO: datashard.cpp:375: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:684:2572] 2025-09-25T16:17:50.219564Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:661: TxInitSchema.Execute 2025-09-25T16:17:50.230096Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3125: StateInactive, received event# 268828684, Sender [1:668:2562], Recipient [1:684:2572]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-09-25T16:17:50.230290Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3112: StateInit, received event# 268828672, Sender [1:670:2564], Recipient [1:688:2575]: NKikimr::TEvTablet::TEvBoot 2025-09-25T16:17:50.230665Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:727: TxInitSchema.Complete 2025-09-25T16:17:50.230717Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:50: TDataShard::TTxInit::Execute 2025-09-25T16:17:50.230919Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1325: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2025-09-25T16:17:50.230930Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1381: LoadLockChangeRecords at tablet: 72075186224037888 2025-09-25T16:17:50.230938Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1430: LoadChangeRecordCommits at tablet: 72075186224037888 2025-09-25T16:17:50.231012Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:94: TDataShard::TTxInit::Complete 2025-09-25T16:17:50.231030Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3112: StateInit, received event# 268828673, Sender [1:670:2564], Recipient [1:688:2575]: NKikimr::TEvTablet::TEvRestored 2025-09-25T16:17:50.231140Z node 1 :TX_DATASHARD INFO: datashard.cpp:375: TDataShard::OnActivateExecutor: tablet 72075186224037889 actor [1:688:2575] 2025-09-25T16:17:50.231184Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:661: TxInitSchema.Execute 2025-09-25T16:17:50.232687Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:102: TDataShard::TTxInitRestored::Execute 2025-09-25T16:17:50.232712Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:142: DataShard 72075186224037888 persisting started state actor id [1:711:2572] in generation 1 2025-09-25T16:17:50.232754Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3125: StateInactive, received event# 268828684, Sender [1:670:2564], Recipient [1:688:2575]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-09-25T16:17:50.232955Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:727: TxInitSchema.Complete 2025-09-25T16:17:50.232978Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:50: TDataShard::TTxInit::Execute 2025-09-25T16:17:50.233133Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1325: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037889 2025-09-25T16:17:50.233142Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1381: LoadLockChangeRecords at tablet: 72075186224037889 2025-09-25T16:17:50.233150Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1430: LoadChangeRecordCommits at tablet: 72075186224037889 2025-09-25T16:17:50.233198Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:94: TDataShard::TTxInit::Complete 2025-09-25T16:17:50.233216Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:102: TDataShard::TTxInitRestored::Execute 2025-09-25T16:17:50.233226Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:142: DataShard 72075186224037889 persisting started state actor id [1:716:2575] in generation 1 2025-09-25T16:17:50.245092Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:151: TDataShard::TTxInitRestored::Complete 2025-09-25T16:17:50.249392Z node 1 :TX_DATASHARD INFO: datashard.cpp:419: Switched to work state WaitScheme tabletId 72075186224037888 2025-09-25T16:17:50.249508Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:459: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2025-09-25T16:17:50.249543Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1260: Change sender created: at tablet: 72075186224037888, actorId: [1:719:2593] 2025-09-25T16:17:50.249549Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1265: Trying to activate change sender: at tablet: 72075186224037888 2025-09-25T16:17:50.249554Z node 1 :TX_DATASHARD INFO: datashard.cpp:1282: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2025-09-25T16:17:50.249562Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-09-25T16:17:50.249679Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3144: StateWork, received event# 2146435072, Sender [1:684:2572], Recipient [1:684:2572]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-09-25T16:17:50.249688Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3169: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-09-25T16:17:50.249714Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:151: TDataShard::TTxInitRestored::Complete 2025-09-25T16:17:50.249724Z node 1 :TX_DATASHARD INFO: datashard.cpp:419: Switched to work state WaitScheme tabletId 72075186224037889 2025-09-25T16:17:50.249735Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:459: 72075186224037889 not sending time cast registration request in state WaitScheme: missing processing params 2025-09-25T16:17:50.249743Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1260: Change sender created: at tablet: 72075186224037889, actorId: [1:720:2594] 2025-09-25T16:17:50.249746Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1265: Trying to activate change sender: at tablet: 72075186224037889 2025-09-25T16:17:50.249750Z node 1 :TX_DATASHARD INFO: datashard.cpp:1282: Cannot activate change sender: at tablet: 72075186224037889, state: WaitScheme 2025-09-25T16:17:50.249753Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037889 2025-09-25T16:17:50.249847Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3144: StateWork, received event# 2146435072, Sender [1:688:2575], Recipient [1:688:2575]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-09-25T16:17:50.249852Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3169: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-09-25T16:17:50.249868Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:96: TTxCheckInReadSets::Execute at 72075186224037888 2025-09-25T16:17:50.249896Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:139: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2025-09-25T16:17:50.249939Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037888 2025-09-25T16:17:50.249948Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2025-09-25T16:17:50.249956Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:385: Check unit PlanQueue at 72075186224037888 2025-09-25T16:17:50.249961Z node 1 :TX_DATASHARD TRACE: plan_queue_unit.cpp:59: TPlanQueueUnit at 72075186224037888 has no attached operations 2025-09-25T16:17:50.249966Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:403: Unit PlanQueue has no ready operations at 72075186224037888 2025-09-25T16:17:50.249971Z node 1 :TX_DATASHARD INFO: datashard__progress_tx.cpp:48: No tx to execute at 72075186224037888 TxInFly 0 2025-09-25T16:17:50.249976Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2025-09-25T16:17:50.249983Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:96: TTxCheckInReadSets::Execute at 72075186224037889 2025-09-25T16:17:50.249993Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:139: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037889 2025-09-25T16:17:50.250118Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3144: StateWork, received event# 269877761, Sender [1:685:2573], Recipient [1:684:2572]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-09-25T16:17:50.250125Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3180: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-09-25T16:17:50.250133Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3723: Server connected at leader tablet# 72075186224037888, clientId# [1:676:2568], serverId# [1:685:2573], sessionId# [0:0:0] 2025-09-25T16:17:50.250140Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037889 2025-09-25T16:17:50.250144Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037889 acti ... e 10 :GLOBAL WARN: log.cpp:841: fline=events.h:105;event=ev_write_error;status=STATUS_ABORTED;details=Distributed transaction aborted due to commit failure;tx_id=1234567890011; 2025-09-25T16:18:06.447601Z node 10 :TX_DATASHARD ERROR: datashard.cpp:761: Complete volatile write [2000 : 1234567890011] from 72075186224037889 at tablet 72075186224037889, error: Status: STATUS_ABORTED Issues: { message: "Distributed transaction aborted due to commit failure" issue_code: 2011 severity: 1 } 2025-09-25T16:18:06.447614Z node 10 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037889 2025-09-25T16:18:06.447631Z node 10 :TX_DATASHARD TRACE: datashard_impl.h:3144: StateWork, received event# 269287938, Sender [10:709:2583], Recipient [10:706:2581]: {TEvReadSet step# 2000 txid# 1234567890011 TabletSource# 72075186224037888 TabletDest# 72075186224037889 SetTabletConsumer# 72075186224037889 Flags# 0 Seqno# 2} 2025-09-25T16:18:06.447636Z node 10 :TX_DATASHARD TRACE: datashard_impl.h:3164: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-09-25T16:18:06.447640Z node 10 :TX_DATASHARD DEBUG: datashard_outreadset.cpp:150: Receive RS Ack at 72075186224037888 source 72075186224037888 dest 72075186224037889 consumer 72075186224037889 txId 1234567890011 2025-09-25T16:18:06.484565Z node 10 :KQP_EXECUTER ERROR: kqp_planner.cpp:123: TxId: 281474976715661. Ctx: { TraceId: 01k60tntzg227fcdtr9ndmy4ph, Database: , SessionId: ydb://session/3?node_id=10&id=ODRlMzllZGEtZTI3Mzk5ZTktZmU5MTdkNTgtMzhiY2NhZDA=, PoolId: default, DatabaseId: /Root}. Database not set, use /Root 2025-09-25T16:18:06.485527Z node 10 :TX_DATASHARD TRACE: datashard_impl.h:3144: StateWork, received event# 269553215, Sender [10:968:2769], Recipient [10:706:2581]: NKikimrTxDataShard.TEvRead ReadId: 0 TableId { OwnerId: 72057594046644480 TableId: 2 SchemaVersion: 1 } Columns: 1 Columns: 2 Snapshot { Step: 2000 TxId: 18446744073709551615 } ResultFormat: FORMAT_CELLVEC MaxRows: 1001 MaxBytes: 5242880 Reverse: false TotalRowsLimit: 1001 RangesSize: 1 2025-09-25T16:18:06.485592Z node 10 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2452: TTxReadViaPipeline execute: at tablet# 72075186224037888, FollowerId 0 2025-09-25T16:18:06.485616Z node 10 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1843: Trying to execute [0:5] at 72075186224037888 on unit CheckRead 2025-09-25T16:18:06.485642Z node 10 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1878: Execution status for [0:5] at 72075186224037888 is Executed 2025-09-25T16:18:06.485649Z node 10 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1926: Advance execution plan for [0:5] at 72075186224037888 executing on unit CheckRead 2025-09-25T16:18:06.485656Z node 10 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1932: Add [0:5] at 72075186224037888 to execution unit BuildAndWaitDependencies 2025-09-25T16:18:06.485661Z node 10 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1843: Trying to execute [0:5] at 72075186224037888 on unit BuildAndWaitDependencies 2025-09-25T16:18:06.485681Z node 10 :TX_DATASHARD TRACE: datashard_pipeline.cpp:483: Activated operation [0:5] at 72075186224037888 2025-09-25T16:18:06.485688Z node 10 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1878: Execution status for [0:5] at 72075186224037888 is Executed 2025-09-25T16:18:06.485692Z node 10 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1926: Advance execution plan for [0:5] at 72075186224037888 executing on unit BuildAndWaitDependencies 2025-09-25T16:18:06.485701Z node 10 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1932: Add [0:5] at 72075186224037888 to execution unit ExecuteRead 2025-09-25T16:18:06.485705Z node 10 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1843: Trying to execute [0:5] at 72075186224037888 on unit ExecuteRead 2025-09-25T16:18:06.485725Z node 10 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:1578: 72075186224037888 Execute read# 1, request: { ReadId: 0 TableId { OwnerId: 72057594046644480 TableId: 2 SchemaVersion: 1 } Columns: 1 Columns: 2 Snapshot { Step: 2000 TxId: 18446744073709551615 } ResultFormat: FORMAT_CELLVEC MaxRows: 1001 MaxBytes: 5242880 Reverse: false TotalRowsLimit: 1001 } 2025-09-25T16:18:06.485799Z node 10 :TX_DATASHARD TRACE: datashard.cpp:2489: PromoteImmediatePostExecuteEdges at 72075186224037888 promoting UnprotectedReadEdge to v2000/18446744073709551615 2025-09-25T16:18:06.485809Z node 10 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2163: 72075186224037888 Complete read# {[10:968:2769], 0} after executionsCount# 1 2025-09-25T16:18:06.485819Z node 10 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2137: 72075186224037888 read iterator# {[10:968:2769], 0} sends rowCount# 1, bytes# 32, quota rows left# 1000, quota bytes left# 5242848, hasUnreadQueries# 0, total queries# 1, firstUnprocessed# 0 2025-09-25T16:18:06.485838Z node 10 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2188: 72075186224037888 read iterator# {[10:968:2769], 0} finished in read 2025-09-25T16:18:06.485850Z node 10 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1878: Execution status for [0:5] at 72075186224037888 is Executed 2025-09-25T16:18:06.485854Z node 10 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1926: Advance execution plan for [0:5] at 72075186224037888 executing on unit ExecuteRead 2025-09-25T16:18:06.485859Z node 10 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1932: Add [0:5] at 72075186224037888 to execution unit CompletedOperations 2025-09-25T16:18:06.485864Z node 10 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1843: Trying to execute [0:5] at 72075186224037888 on unit CompletedOperations 2025-09-25T16:18:06.485879Z node 10 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1878: Execution status for [0:5] at 72075186224037888 is Executed 2025-09-25T16:18:06.485884Z node 10 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1926: Advance execution plan for [0:5] at 72075186224037888 executing on unit CompletedOperations 2025-09-25T16:18:06.485889Z node 10 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1938: Execution plan for [0:5] at 72075186224037888 has finished 2025-09-25T16:18:06.485895Z node 10 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2687: TTxReadViaPipeline(69) Execute with status# Executed at tablet# 72075186224037888 2025-09-25T16:18:06.485920Z node 10 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2736: TTxReadViaPipeline(69) Complete: at tablet# 72075186224037888 2025-09-25T16:18:06.486212Z node 10 :TX_DATASHARD TRACE: datashard_impl.h:3144: StateWork, received event# 269553219, Sender [10:968:2769], Recipient [10:706:2581]: NKikimrTxDataShard.TEvReadCancel ReadId: 0 2025-09-25T16:18:06.486225Z node 10 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:3409: 72075186224037888 ReadCancel: { ReadId: 0 } 2025-09-25T16:18:06.486283Z node 10 :TX_DATASHARD TRACE: datashard_impl.h:3144: StateWork, received event# 269553215, Sender [10:968:2769], Recipient [10:709:2583]: NKikimrTxDataShard.TEvRead ReadId: 1 TableId { OwnerId: 72057594046644480 TableId: 2 SchemaVersion: 1 } Columns: 1 Columns: 2 Snapshot { Step: 2000 TxId: 18446744073709551615 } ResultFormat: FORMAT_CELLVEC MaxRows: 1000 MaxBytes: 5242880 Reverse: false TotalRowsLimit: 1000 RangesSize: 1 2025-09-25T16:18:06.486303Z node 10 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2452: TTxReadViaPipeline execute: at tablet# 72075186224037889, FollowerId 0 2025-09-25T16:18:06.486322Z node 10 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1843: Trying to execute [0:5] at 72075186224037889 on unit CheckRead 2025-09-25T16:18:06.486341Z node 10 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1878: Execution status for [0:5] at 72075186224037889 is Executed 2025-09-25T16:18:06.486345Z node 10 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1926: Advance execution plan for [0:5] at 72075186224037889 executing on unit CheckRead 2025-09-25T16:18:06.486350Z node 10 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1932: Add [0:5] at 72075186224037889 to execution unit BuildAndWaitDependencies 2025-09-25T16:18:06.486355Z node 10 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1843: Trying to execute [0:5] at 72075186224037889 on unit BuildAndWaitDependencies 2025-09-25T16:18:06.486363Z node 10 :TX_DATASHARD TRACE: datashard_pipeline.cpp:483: Activated operation [0:5] at 72075186224037889 2025-09-25T16:18:06.486370Z node 10 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1878: Execution status for [0:5] at 72075186224037889 is Executed 2025-09-25T16:18:06.486374Z node 10 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1926: Advance execution plan for [0:5] at 72075186224037889 executing on unit BuildAndWaitDependencies 2025-09-25T16:18:06.486379Z node 10 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1932: Add [0:5] at 72075186224037889 to execution unit ExecuteRead 2025-09-25T16:18:06.486383Z node 10 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1843: Trying to execute [0:5] at 72075186224037889 on unit ExecuteRead 2025-09-25T16:18:06.486403Z node 10 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:1578: 72075186224037889 Execute read# 1, request: { ReadId: 1 TableId { OwnerId: 72057594046644480 TableId: 2 SchemaVersion: 1 } Columns: 1 Columns: 2 Snapshot { Step: 2000 TxId: 18446744073709551615 } ResultFormat: FORMAT_CELLVEC MaxRows: 1000 MaxBytes: 5242880 Reverse: false TotalRowsLimit: 1000 } 2025-09-25T16:18:06.486432Z node 10 :TX_DATASHARD TRACE: datashard.cpp:2489: PromoteImmediatePostExecuteEdges at 72075186224037889 promoting UnprotectedReadEdge to v2000/18446744073709551615 2025-09-25T16:18:06.486438Z node 10 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2163: 72075186224037889 Complete read# {[10:968:2769], 1} after executionsCount# 1 2025-09-25T16:18:06.486445Z node 10 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2137: 72075186224037889 read iterator# {[10:968:2769], 1} sends rowCount# 1, bytes# 32, quota rows left# 999, quota bytes left# 5242848, hasUnreadQueries# 0, total queries# 1, firstUnprocessed# 0 2025-09-25T16:18:06.486456Z node 10 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2188: 72075186224037889 read iterator# {[10:968:2769], 1} finished in read 2025-09-25T16:18:06.486464Z node 10 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1878: Execution status for [0:5] at 72075186224037889 is Executed 2025-09-25T16:18:06.486468Z node 10 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1926: Advance execution plan for [0:5] at 72075186224037889 executing on unit ExecuteRead 2025-09-25T16:18:06.486473Z node 10 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1932: Add [0:5] at 72075186224037889 to execution unit CompletedOperations 2025-09-25T16:18:06.486477Z node 10 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1843: Trying to execute [0:5] at 72075186224037889 on unit CompletedOperations 2025-09-25T16:18:06.486485Z node 10 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1878: Execution status for [0:5] at 72075186224037889 is Executed 2025-09-25T16:18:06.486489Z node 10 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1926: Advance execution plan for [0:5] at 72075186224037889 executing on unit CompletedOperations 2025-09-25T16:18:06.486494Z node 10 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1938: Execution plan for [0:5] at 72075186224037889 has finished 2025-09-25T16:18:06.486499Z node 10 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2687: TTxReadViaPipeline(69) Execute with status# Executed at tablet# 72075186224037889 2025-09-25T16:18:06.486512Z node 10 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2736: TTxReadViaPipeline(69) Complete: at tablet# 72075186224037889 2025-09-25T16:18:06.486667Z node 10 :TX_DATASHARD TRACE: datashard_impl.h:3144: StateWork, received event# 269553219, Sender [10:968:2769], Recipient [10:709:2583]: NKikimrTxDataShard.TEvReadCancel ReadId: 1 2025-09-25T16:18:06.486676Z node 10 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:3409: 72075186224037889 ReadCancel: { ReadId: 1 } { items { int32_value: 1 } items { int32_value: 1001 } }, { items { int32_value: 11 } items { int32_value: 1002 } } >> TCacheTest::WatchRoot >> KqpScan::ScanRetryReadRanges [GOOD] >> THiveTest::TestServerlessMigration >> TCacheTest::Recreate ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/scan/unittest >> KqpScan::EmptySet_1 [GOOD] Test command err: Trying to start YDB, gRPC: 22732, MsgBus: 7829 2025-09-25T16:18:00.181096Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7554061841204461370:2138];send_to=[0:7307199536658146131:7762515]; 2025-09-25T16:18:00.181334Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-09-25T16:18:00.188984Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:123: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown test_client.cpp: SetPath # /home/runner/.ya/build/build_root/endf/003fac/r3tmp/tmpeqwtPM/pdisk_1.dat 2025-09-25T16:18:00.274252Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded 2025-09-25T16:18:00.276187Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1229: Notification cookie mismatch for subscription [1:7554061841204461270:2081] 1758817080179203 != 1758817080179206 2025-09-25T16:18:00.312656Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-09-25T16:18:00.312793Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting TServer::EnableGrpc on GrpcPort 22732, node 1 2025-09-25T16:18:00.313854Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-09-25T16:18:00.364904Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-09-25T16:18:00.433141Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-09-25T16:18:00.433154Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-09-25T16:18:00.433156Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-09-25T16:18:00.433196Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:7829 TClient is connected to server localhost:7829 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-09-25T16:18:00.668640Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-09-25T16:18:00.677468Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 waiting... 2025-09-25T16:18:00.697758Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:18:00.746997Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) waiting... 2025-09-25T16:18:00.776606Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) waiting... 2025-09-25T16:18:00.797265Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) waiting... 2025-09-25T16:18:00.912294Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7554061841204462915:2391], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:00.912322Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:00.912439Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7554061841204462925:2392], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:00.912444Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:00.989908Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:18:01.002227Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:18:01.016662Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:18:01.030386Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:18:01.048096Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:18:01.064216Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:18:01.081270Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:18:01.100780Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:18:01.125264Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7554061845499431085:2474], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:01.125311Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:01.125681Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7554061845499431090:2477], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:01.125696Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7554061845499431091:2478], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:01.125775Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, statu ... 556529Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-09-25T16:18:06.557775Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 24489, node 3 2025-09-25T16:18:06.563596Z node 3 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-09-25T16:18:06.563622Z node 3 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-09-25T16:18:06.563624Z node 3 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-09-25T16:18:06.563685Z node 3 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:27641 TClient is connected to server localhost:27641 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. waiting... 2025-09-25T16:18:06.613634Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2025-09-25T16:18:06.614810Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-09-25T16:18:06.635628Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) waiting... 2025-09-25T16:18:06.647543Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) waiting... 2025-09-25T16:18:06.668947Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) waiting... 2025-09-25T16:18:06.686270Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) waiting... 2025-09-25T16:18:06.830716Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-09-25T16:18:06.947197Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7554061866702154443:2391], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:06.947226Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:06.947506Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7554061866702154462:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:06.947523Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:06.952382Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:18:06.961585Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:18:06.974768Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:18:06.986745Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:18:07.000955Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:18:07.015233Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:18:07.029418Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:18:07.043437Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:18:07.060540Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7554061870997122613:2474], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:07.060573Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7554061870997122618:2477], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:07.060582Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:07.060640Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7554061870997122621:2479], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:07.060649Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:07.061389Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-09-25T16:18:07.070758Z node 3 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [3:7554061870997122620:2478], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2025-09-25T16:18:07.152392Z node 3 :TX_PROXY ERROR: schemereq.cpp:590: Actor# [3:7554061870997122674:3555] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 17], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } >> TCacheTest::MigrationLostMessage >> DataShardWrite::DistributedInsertWithoutLocks+Volatile [GOOD] >> DataShardWrite::DistributedInsertWithoutLocks-Volatile >> THiveTest::TestReCreateTabletError [GOOD] >> THiveTest::TestNodeDisconnect >> THiveTest::TestCreateTablet >> TCacheTest::WatchRoot [GOOD] >> TCacheTestWithDrops::LookupErrorUponEviction >> THiveImplTest::BootQueueSpeed >> TCacheTest::MigrationCommon [GOOD] >> TCacheTest::MigrationDeletedPathNavigate >> TCacheTest::List >> THiveTest::TestLocalDisconnect >> TCacheTest::Navigate ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/datashard/ut_kqp_scan/unittest >> KqpScan::ScanRetryReadRanges [GOOD] Test command err: 2025-09-25T16:18:01.210091Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:123: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2025-09-25T16:18:01.210230Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:123: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2025-09-25T16:18:01.245517Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2025-09-25T16:18:01.248053Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2025-09-25T16:18:01.248475Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:678:2403], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-09-25T16:18:01.248558Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-09-25T16:18:01.248591Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-09-25T16:18:01.248980Z node 2 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [2:674:2344], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-09-25T16:18:01.249036Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-09-25T16:18:01.249063Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/endf/003c39/r3tmp/tmpXitABh/pdisk_1.dat 2025-09-25T16:18:01.494790Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded 2025-09-25T16:18:01.541621Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-09-25T16:18:01.541665Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-09-25T16:18:01.541806Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-09-25T16:18:01.541822Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-09-25T16:18:01.594580Z node 1 :HIVE WARN: hive_impl.cpp:811: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2025-09-25T16:18:01.594754Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-09-25T16:18:01.594887Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-09-25T16:18:01.689406Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2025-09-25T16:18:01.728183Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-09-25T16:18:01.739086Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-09-25T16:18:02.005718Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:18:02.514074Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:1415:2833], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:02.514110Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:1426:2838], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:02.514123Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:02.514296Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:1429:2841], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:02.514307Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:02.515457Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-09-25T16:18:02.616634Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-09-25T16:18:02.616686Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-09-25T16:18:02.982720Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:1430:2842], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2025-09-25T16:18:03.085070Z node 1 :TX_PROXY ERROR: schemereq.cpp:590: Actor# [1:1552:2910] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-09-25T16:18:03.243663Z node 1 :KQP_EXECUTER ERROR: kqp_planner.cpp:123: TxId: 281474976710660. Ctx: { TraceId: 01k60tnq4h2rymz4exp74rjqgm, Database: , SessionId: ydb://session/3?node_id=1&id=MWRiM2E1MjQtMjUzMjU1MmYtNDdjNzhlZjUtZjI2ZTZiYjA=, PoolId: default, DatabaseId: /Root}. Database not set, use /Root -- nodeId: 2 2025-09-25T16:18:03.594296Z node 1 :KQP_EXECUTER ERROR: kqp_planner.cpp:123: TxId: 281474976710662. Ctx: { TraceId: 01k60tnqvwd8nxxv2zcgxrhjs6, Database: , SessionId: ydb://session/3?node_id=1&id=ZjdmYjY2ZTctOTVlZGIxYzctN2JjZjU5ZDItYmVkMDZkMTQ=, PoolId: default, DatabaseId: /Root}. Database not set, use /Root -- EvScan [1:1631:2962] -> [2:1587:2428] -- EvScanData from [2:1635:2435]: pass 2025-09-25T16:18:03.769518Z node 1 :KQP_EXECUTER ERROR: kqp_planner.cpp:123: TxId: 281474976710663. Ctx: { TraceId: 01k60tnqvwd8nxxv2zcgxrhjs6, Database: , SessionId: ydb://session/3?node_id=1&id=ZjdmYjY2ZTctOTVlZGIxYzctN2JjZjU5ZDItYmVkMDZkMTQ=, PoolId: default, DatabaseId: /Root}. Database not set, use /Root -- EvStreamData: {"ResultSet":{"columns":[{"name":"column0","type":{"optional_type":{"item":{"type_id":4}}}}],"rows":[{"items":[{"uint64_value":596400}]}],"format":1},"SeqNo":1,"QueryResultIndex":0,"ChannelId":1,"VirtualTimestamp":{"Step":2000,"TxId":281474976710661},"Finished":true} 2025-09-25T16:18:03.770439Z node 1 :KQP_RESOURCE_MANAGER WARN: kqp_snapshot_manager.cpp:238: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 2000, txId: 281474976710661] shutting down 2025-09-25T16:18:05.689005Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:123: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2025-09-25T16:18:05.690007Z node 4 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:123: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2025-09-25T16:18:05.692984Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2025-09-25T16:18:05.693593Z node 4 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2025-09-25T16:18:05.693757Z node 3 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [3:476:2400], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-09-25T16:18:05.693833Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-09-25T16:18:05.693873Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-09-25T16:18:05.694216Z node 4 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [4:681:2345], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-09-25T16:18:05.694263Z node 4 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-09-25T16:18:05.694287Z node 4 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/endf/003c39/r3tmp/tmp2XQnF9/pdisk_1.dat 2025-09-25T16:18:05.797338Z node 3 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded 2025-09-25T16:18:05.833581Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-09-25T16:18:05.833624Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-09-25T16:18:05.833753Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-09-25T16:18:05.833764Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-09-25T16:18:05.867107Z node 3 :HIVE WARN: hive_impl.cpp:811: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 4 Cookie 4 2025-09-25T16:18:05.867367Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-09-25T16:18:05.867467Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-09-25T16:18:05.919411Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2025-09-25T16:18:05.955351Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-09-25T16:18:05.977281Z node 4 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-09-25T16:18:06.227106Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:18:06.694154Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:1414:2831], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:06.694188Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:1425:2836], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:06.694286Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:06.694447Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:1430:2841], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:06.694489Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:06.695373Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-09-25T16:18:06.798401Z node 4 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-09-25T16:18:06.798444Z node 3 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-09-25T16:18:07.079667Z node 3 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [3:1428:2839], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2025-09-25T16:18:07.157995Z node 3 :TX_PROXY ERROR: schemereq.cpp:590: Actor# [3:1555:2912] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-09-25T16:18:07.263954Z node 3 :KQP_EXECUTER ERROR: kqp_planner.cpp:123: TxId: 281474976710660. Ctx: { TraceId: 01k60tnv752shhr4atgyp44x7b, Database: , SessionId: ydb://session/3?node_id=3&id=ODYzNzRiZWItZjI1OGIxNGQtNDFjOTUxYmUtM2YxYmFjNDM=, PoolId: default, DatabaseId: /Root}. Database not set, use /Root -- nodeId: 4 2025-09-25T16:18:07.533634Z node 3 :KQP_EXECUTER ERROR: kqp_planner.cpp:123: TxId: 281474976710662. Ctx: { TraceId: 01k60tnvskd2htm4757dn6egxw, Database: , SessionId: ydb://session/3?node_id=3&id=ZGFhNzg2ZTQtYjFmNDk5MWMtNjYyZmM1N2EtZjhkYzJmNDI=, PoolId: default, DatabaseId: /Root}. Database not set, use /Root -- EvScan [3:1635:2965] -> [4:1590:2428] -- EvScanData from [4:1639:2435]: pass -- EvStreamData: {"ResultSet":{"columns":[{"name":"key","type":{"optional_type":{"item":{"type_id":2}}}},{"name":"value","type":{"optional_type":{"item":{"type_id":2}}}}],"rows":[{"items":[{"uint32_value":2},{"uint32_value":22}]},{"items":[{"uint32_value":21},{"uint32_value":2121}]},{"items":[{"uint32_value":22},{"uint32_value":2222}]},{"items":[{"uint32_value":23},{"uint32_value":2323}]},{"items":[{"uint32_value":24},{"uint32_value":2424}]},{"items":[{"uint32_value":25},{"uint32_value":2525}]},{"items":[{"uint32_value":26},{"uint32_value":2626}]},{"items":[{"uint32_value":27},{"uint32_value":2727}]},{"items":[{"uint32_value":28},{"uint32_value":2828}]},{"items":[{"uint32_value":29},{"uint32_value":2929}]},{"items":[{"uint32_value":40},{"uint32_value":4040}]},{"items":[{"uint32_value":41},{"uint32_value":4141}]},{"items":[{"uint32_value":42},{"uint32_value":4242}]},{"items":[{"uint32_value":43},{"uint32_value":4343}]},{"items":[{"uint32_value":44},{"uint32_value":4444}]},{"items":[{"uint32_value":45},{"uint32_value":4545}]},{"items":[{"uint32_value":46},{"uint32_value":4646}]},{"items":[{"uint32_value":47},{"uint32_value":4747}]},{"items":[{"uint32_value":48},{"uint32_value":4848}]},{"items":[{"uint32_value":49},{"uint32_value":4949}]},{"items":[{"uint32_value":50},{"uint32_value":5050}]}],"format":1},"SeqNo":1,"QueryResultIndex":0,"ChannelId":2,"VirtualTimestamp":{"Step":2000,"TxId":281474976710661},"Finished":false} -- EvStreamData: {"ResultSet":{"columns":[{"name":"key","type":{"optional_type":{"item":{"type_id":2}}}},{"name":"value","type":{"optional_type":{"item":{"type_id":2}}}}],"format":1},"SeqNo":2,"QueryResultIndex":0,"ChannelId":2,"VirtualTimestamp":{"Step":2000,"TxId":281474976710661},"Finished":true} 2025-09-25T16:18:07.538072Z node 3 :KQP_RESOURCE_MANAGER WARN: kqp_snapshot_manager.cpp:238: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 2000, txId: 281474976710661] shutting down >> TCacheTest::Recreate [GOOD] >> TCacheTest::SysLocks >> TCacheTest::List [GOOD] >> TCacheTest::MigrationCommit >> TCacheTest::MigrationLostMessage [GOOD] >> TCacheTest::Navigate [GOOD] >> TCacheTest::MigrationUndo >> TCacheTest::PathBelongsToDomain >> KqpScan::TaggedScalar [GOOD] >> KqpScan::StreamLookupFailedRead >> TCacheTest::SysLocks [GOOD] >> test.py::test[produce-reduce_multi_in_difftype--Results] [GOOD] >> test.py::test[join-lookupjoin_inner_1o--ForceBlocks] [GOOD] >> test.py::test[produce-reduce_multi_in_difftype_assume_keytuple--Results] >> test.py::test[join-lookupjoin_inner_1o--Results] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/ydb/table_split_ut/unittest >> YdbTableSplit::MergeByNoLoadAfterSplit [GOOD] Test command err: 2025-09-25T16:17:13.519502Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7554061639023607652:2146];send_to=[0:7307199536658146131:7762515]; 2025-09-25T16:17:13.519543Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/endf/005169/r3tmp/tmpLNE6he/pdisk_1.dat 2025-09-25T16:17:13.586536Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-09-25T16:17:13.603147Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 29168, node 1 2025-09-25T16:17:13.614602Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-09-25T16:17:13.614614Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-09-25T16:17:13.614616Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-09-25T16:17:13.614672Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-09-25T16:17:13.620932Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-09-25T16:17:13.620953Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-09-25T16:17:13.622428Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:29876 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-09-25T16:17:13.638997Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... Triggering split by load TClient is connected to server localhost:29876 2025-09-25T16:17:13.778874Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-09-25T16:17:13.986229Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7554061639023608528:2322], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:17:13.986263Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:17:13.986391Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7554061639023608538:2323], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:17:13.986425Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:17:14.025313Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:17:14.064891Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7554061643318575991:2336], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:17:14.064914Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:17:14.066487Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7554061643318575993:2337], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:17:14.066507Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:17:14.074507Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_table.cpp:172) TClient::Ls request: /Root/Foo TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Foo" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976710658 CreateStep: 1758817034093 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 4 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 4 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 2 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "Foo" Columns { Name: "NameHash" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "Name" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false IsBuildInProgress: false } Columns { Name: "Versio... (TRUNCATED) Table has 1 shards TClient::Ls request: /Root/Foo TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Foo" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976710658 CreateStep: 1758817034093 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 4 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 4 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 2 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "Foo" Columns { Name: "NameHash" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "Name" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false IsBuildInProgress: false } Columns { Name: "Versio... (TRUNCATED) 2025-09-25T16:17:14.101877Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7554061643318576088:2367], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:17:14.101913Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:17:14.101983Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7554061643318576093:2370], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:17:14.101992Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7554061643318576094:2371], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:17:14.101997Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:17:14.102592Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_mkdir.cpp:115: TMkDir Propose, path: /Root/.metadata, operationId: 281474976710660:0, at schemeshard: 72057594046644480 2025-09-25T16:17:14.102661Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 281474976710660:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2025-09-25T16:17:14.102664Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_mkdir.cpp:115: TMkDir Propose, path: /Root/.metadata/workload_manager, operationId: 281474976710660:1, at schemeshard: 72057594046644480 2025-09-25T16:17:14.102675Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 281474976710660:2, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2025-09-25T16:17:14.102677Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_mkdir.cpp:115: TMkDir Propose, path: /Root/.metadata/workload_manager/pools, operationId: 281474976710660:2, at schemeshard: 72057594046644480 2025-09-25T16:17: ... d: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 2 TablePartitionVersion: 2 } ChildrenExist: false } Table { Name: "Foo" Columns { Name: "NameHash" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "Name" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false IsBuildInProgress: false } Columns { Name: "Versio... (TRUNCATED) 2025-09-25T16:18:04.269680Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__table_stats.cpp:448: Propose merge request : Transaction { WorkingDir: "/Root" OperationType: ESchemeOpSplitMergeTablePartitions SplitMergeTablePartitions { TablePath: "/Root/Foo" SourceTabletId: 72075186224037889 SourceTabletId: 72075186224037890 SchemeshardId: 72057594046644480 } Internal: true FailOnExist: false } TxId: 281474976715658 TabletId: 72057594046644480, reason: shard with tabletId: 72075186224037889 merge by load (shardLoad: 0.02), shardToMergeCount: 2, totalSize: 0, sizeToMerge: 0, totalLoad: 0.04, loadThreshold: 0.07 2025-09-25T16:18:04.269771Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_split_merge.cpp:804: TSplitMerge Propose, tableStr: /Root/Foo, tableId: , opId: 281474976715658:0, at schemeshard: 72057594046644480, request: TablePath: "/Root/Foo" SourceTabletId: 72075186224037889 SourceTabletId: 72075186224037890 SchemeshardId: 72057594046644480 2025-09-25T16:18:04.270022Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_split_merge.cpp:1083: TSplitMerge Propose accepted, tableStr: /Root/Foo, tableId: , opId: 281474976715658:0, at schemeshard: 72057594046644480, op: SourceRanges { KeyRangeBegin: "\002\000\000\000\000\200\000\000\000\200" KeyRangeEnd: "\002\000\004\000\000\000\234\027\322\201\000\000\000\200" TabletID: 72075186224037889 ShardIdx: 2 } SourceRanges { KeyRangeBegin: "\002\000\004\000\000\000\234\027\322\201\000\000\000\200" KeyRangeEnd: "" TabletID: 72075186224037890 ShardIdx: 3 } DestinationRanges { KeyRangeBegin: "\002\000\000\000\000\200\000\000\000\200" KeyRangeEnd: "" ShardIdx: 4 }, request: TablePath: "/Root/Foo" SourceTabletId: 72075186224037889 SourceTabletId: 72075186224037890 SchemeshardId: 72057594046644480 2025-09-25T16:18:04.270039Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 281474976715658:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2025-09-25T16:18:04.272605Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:315: TCreateParts opId# 281474976715658:0 ProgressState, operation type: TxSplitTablePartition, at tablet# 72057594046644480 2025-09-25T16:18:04.285664Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:177: TCreateParts opId# 281474976715658:0 HandleReply TEvCreateTabletReply, at tabletId: 72057594046644480 2025-09-25T16:18:04.285730Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 281474976715658:0 2 -> 3 2025-09-25T16:18:04.286972Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_split_merge.cpp:84: TSplitMerge TConfigureDestination ProgressState, operationId: 281474976715658:0, at schemeshard: 72057594046644480 2025-09-25T16:18:04.288747Z node 1 :TX_DATASHARD INFO: datashard.cpp:375: TDataShard::OnActivateExecutor: tablet 72075186224037891 actor [1:7554061858067599164:8380] 2025-09-25T16:18:04.291932Z node 1 :TX_DATASHARD INFO: datashard.cpp:419: Switched to work state WaitScheme tabletId 72075186224037891 2025-09-25T16:18:04.291980Z node 1 :TX_DATASHARD INFO: datashard.cpp:1282: Cannot activate change sender: at tablet: 72075186224037891, state: WaitScheme 2025-09-25T16:18:04.292036Z node 1 :TX_DATASHARD INFO: datashard__progress_tx.cpp:48: No tx to execute at 72075186224037891 TxInFly 0 2025-09-25T16:18:04.293223Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_split_merge.cpp:38: TSplitMerge TConfigureDestination operationId# 281474976715658:0 HandleReply TEvInitSplitMergeDestinationAck, operationId: 281474976715658:0, at schemeshard: 72057594046644480 message# OperationCookie: 281474976715658 TabletId: 72075186224037891 2025-09-25T16:18:04.293241Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 281474976715658:0 3 -> 131 2025-09-25T16:18:04.293765Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_split_merge.cpp:334: TSplitMerge TTransferData operationId# 281474976715658:0 ProgressState, at schemeshard: 72057594046644480 2025-09-25T16:18:04.298290Z node 1 :TX_DATASHARD INFO: datashard.cpp:419: Switched to work state Ready tabletId 72075186224037891 2025-09-25T16:18:04.298329Z node 1 :TX_DATASHARD INFO: datashard.cpp:477: Send registration request to time cast Ready tabletId 72075186224037891 mediators count is 1 coordinators count is 1 buckets per mediator 2 2025-09-25T16:18:04.298346Z node 1 :TX_DATASHARD INFO: datashard.cpp:1311: Change sender killed: at tablet: 72075186224037891 2025-09-25T16:18:04.298359Z node 1 :TX_DATASHARD INFO: datashard.cpp:1303: Change sender activated: at tablet: 72075186224037891 2025-09-25T16:18:04.298456Z node 1 :TX_DATASHARD INFO: datashard__progress_tx.cpp:48: No tx to execute at 72075186224037891 TxInFly 0 2025-09-25T16:18:04.299663Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_split_merge.cpp:207: TSplitMerge TTransferData operationId# 281474976715658:0 HandleReply TEvSplitAck, at schemeshard: 72057594046644480, message: OperationCookie: 281474976715658 TabletId: 72075186224037890 2025-09-25T16:18:04.299720Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_split_merge.cpp:207: TSplitMerge TTransferData operationId# 281474976715658:0 HandleReply TEvSplitAck, at schemeshard: 72057594046644480, message: OperationCookie: 281474976715658 TabletId: 72075186224037889 2025-09-25T16:18:04.299813Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 281474976715658:0 131 -> 132 2025-09-25T16:18:04.300450Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2025-09-25T16:18:04.300516Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2025-09-25T16:18:04.300530Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_split_merge.cpp:437: TSplitMerge TNotifySrc, operationId: 281474976715658:0 ProgressState, at schemeshard: 72057594046644480 2025-09-25T16:18:04.300845Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6249: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 2 Version: 6 PathOwnerId: 72057594046644480, cookie: 281474976715658 2025-09-25T16:18:04.300860Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:108: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976715658 2025-09-25T16:18:04.300865Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715658, pathId: [OwnerId: 72057594046644480, LocalPathId: 2], version: 6 2025-09-25T16:18:04.308009Z node 1 :TX_DATASHARD INFO: datashard_loans.cpp:177: 72075186224037890 Initiating switch from PreOffline to Offline state 2025-09-25T16:18:04.308135Z node 1 :TX_DATASHARD INFO: datashard_loans.cpp:177: 72075186224037889 Initiating switch from PreOffline to Offline state 2025-09-25T16:18:04.308143Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_split_merge.cpp:392: TSplitMerge TNotifySrc, operationId: 281474976715658:0 HandleReply TEvSplitPartitioningChangedAck, from datashard: 72075186224037890, at schemeshard: 72057594046644480 2025-09-25T16:18:04.308237Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_split_merge.cpp:392: TSplitMerge TNotifySrc, operationId: 281474976715658:0 HandleReply TEvSplitPartitioningChangedAck, from datashard: 72075186224037889, at schemeshard: 72057594046644480 2025-09-25T16:18:04.308254Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:926: Part operation is done id#281474976715658:0 progress is 1/1 2025-09-25T16:18:04.308258Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:926: Part operation is done id#281474976715658:0 progress is 1/1 2025-09-25T16:18:04.308267Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:993: Operation and all the parts is done, operation id: 281474976715658:0 2025-09-25T16:18:04.312433Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:270: Unable to activate 281474976715658:0 2025-09-25T16:18:04.312436Z node 1 :TX_DATASHARD INFO: datashard_impl.h:3325: 72075186224037890 Reporting state Offline to schemeshard 72057594046644480 2025-09-25T16:18:04.312514Z node 1 :TX_DATASHARD INFO: datashard_impl.h:3325: 72075186224037889 Reporting state Offline to schemeshard 72057594046644480 2025-09-25T16:18:04.312759Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__state_changed_reply.cpp:78: TTxShardStateChanged DoExecute, datashard informs about state changing, datashardId: 72075186224037890, state: Offline, at schemeshard: 72057594046644480 2025-09-25T16:18:04.312938Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__state_changed_reply.cpp:78: TTxShardStateChanged DoExecute, datashard informs about state changing, datashardId: 72075186224037889, state: Offline, at schemeshard: 72057594046644480 2025-09-25T16:18:04.314920Z node 1 :TX_DATASHARD INFO: datashard.cpp:197: OnTabletStop: 72075186224037890 reason = ReasonStop 2025-09-25T16:18:04.314947Z node 1 :TX_DATASHARD INFO: datashard.cpp:197: OnTabletStop: 72075186224037889 reason = ReasonStop 2025-09-25T16:18:04.315269Z node 1 :HIVE WARN: hive_impl.cpp:516: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037890 not found 2025-09-25T16:18:04.315273Z node 1 :TX_DATASHARD INFO: datashard.cpp:257: OnTabletDead: 72075186224037890 2025-09-25T16:18:04.315306Z node 1 :HIVE WARN: hive_impl.cpp:516: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037889 not found 2025-09-25T16:18:04.315310Z node 1 :TX_DATASHARD INFO: datashard.cpp:1311: Change sender killed: at tablet: 72075186224037890 2025-09-25T16:18:04.316037Z node 1 :TX_DATASHARD INFO: datashard.cpp:257: OnTabletDead: 72075186224037889 2025-09-25T16:18:04.316050Z node 1 :TX_DATASHARD INFO: datashard.cpp:1311: Change sender killed: at tablet: 72075186224037889 TClient::Ls request: /Root/Foo TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Foo" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976710658 CreateStep: 1758817034093 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 6 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 6 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 2 TablePartitionVersion: 3 } ChildrenExist: false } Table { Name: "Foo" Columns { Name: "NameHash" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "Name" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false IsBuildInProgress: false } Columns { Name: "Versio... (TRUNCATED) >> THiveTest::TestCreateTablet [GOOD] >> THiveTest::TestCreate100Tablets >> KqpSplit::StreamLookupSplitAfterFirstResult [GOOD] >> KqpSplit::StreamLookupRetryAttemptForFinishedRead >> TCacheTest::PathBelongsToDomain [GOOD] >> test.py::test[produce-reduce_multi_in_difftype_assume_keytuple--Results] [SKIPPED] >> test.py::test[produce-reduce_multi_in_ref--Results] >> THiveTest::TestNodeDisconnect [GOOD] >> THiveTest::TestReassignGroupsWithRecreateTablet >> TCacheTest::MigrationCommit [GOOD] >> TCacheTest::CookiesArePreserved >> THiveTest::TestServerlessMigration [GOOD] >> THiveTest::TestUpdateChannelValues >> TCacheTest::MigrationUndo [GOOD] >> TCmsTest::WalleTasksDifferentPriorities [GOOD] >> KqpScan::SelfJoin3xSameLabels [GOOD] >> KqpScan::SelfJoin3x >> THiveTest::TestLocalDisconnect [GOOD] >> THiveTest::TestLocalReplacement ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/scheme_board/ut_cache/unittest >> TCacheTest::SysLocks [GOOD] Test command err: 2025-09-25T16:18:08.389249Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7729: Cannot subscribe to console configs 2025-09-25T16:18:08.389276Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded TestModificationResults wait txId: 1 2025-09-25T16:18:08.406607Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 FAKE_COORDINATOR: Erasing txId 1 TestModificationResult got TxId: 1, wait until txId: 1 TestModificationResults wait txId: 101 FAKE_COORDINATOR: Add transaction: 101 at step: 5000002 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000001 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 101 at step: 5000002 FAKE_COORDINATOR: Erasing txId 101 TestModificationResult got TxId: 101, wait until txId: 101 TestWaitNotification wait txId: 101 2025-09-25T16:18:08.408773Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 101, at schemeshard: 72057594046678944 TestWaitNotification: OK eventTxId 101 TestModificationResults wait txId: 102 2025-09-25T16:18:08.409049Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpRmDir, opId: 102:0, at schemeshard: 72057594046678944, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_rmdir.cpp:66) FAKE_COORDINATOR: Add transaction: 102 at step: 5000003 FAKE_COORDINATOR: advance: minStep5000003 State->FrontStep: 5000002 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 102 at step: 5000003 FAKE_COORDINATOR: Erasing txId 102 TestModificationResult got TxId: 102, wait until txId: 102 TestWaitNotification wait txId: 102 2025-09-25T16:18:08.413098Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 102, at schemeshard: 72057594046678944 TestWaitNotification: OK eventTxId 102 TestModificationResults wait txId: 103 FAKE_COORDINATOR: Add transaction: 103 at step: 5000004 FAKE_COORDINATOR: advance: minStep5000004 State->FrontStep: 5000003 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 103 at step: 5000004 FAKE_COORDINATOR: Erasing txId 103 TestModificationResult got TxId: 103, wait until txId: 103 TestWaitNotification wait txId: 103 2025-09-25T16:18:08.414336Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 103, at schemeshard: 72057594046678944 TestWaitNotification: OK eventTxId 103 2025-09-25T16:18:08.624134Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7729: Cannot subscribe to console configs 2025-09-25T16:18:08.624157Z node 2 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded TestModificationResults wait txId: 1 2025-09-25T16:18:08.641458Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 FAKE_COORDINATOR: Erasing txId 1 TestModificationResult got TxId: 1, wait until txId: 1 >> TCacheTestWithRealSystemViewPaths::SystemViews >> TCacheTest::CookiesArePreserved [GOOD] >> THiveTest::TestReassignGroupsWithRecreateTablet [GOOD] >> THiveTest::TestReassignUseRelativeSpace |81.2%| [TM] {default-linux-x86_64, pic, relwithdebinfo} ydb/library/yql/tests/sql/dq_file/part18/pytest >> test.py::test[blocks-decimal_comparison--Results] [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/scheme_board/ut_cache/unittest >> TCacheTest::PathBelongsToDomain [GOOD] Test command err: 2025-09-25T16:18:08.588669Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7729: Cannot subscribe to console configs 2025-09-25T16:18:08.588691Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded TestModificationResults wait txId: 1 2025-09-25T16:18:08.605157Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 FAKE_COORDINATOR: Erasing txId 1 TestModificationResult got TxId: 1, wait until txId: 1 TestModificationResults wait txId: 101 FAKE_COORDINATOR: Add transaction: 101 at step: 5000002 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000001 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 101 at step: 5000002 FAKE_COORDINATOR: Erasing txId 101 TestModificationResult got TxId: 101, wait until txId: 101 TestWaitNotification wait txId: 101 2025-09-25T16:18:08.634911Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 101, at schemeshard: 72057594046678944 TestWaitNotification: OK eventTxId 101 2025-09-25T16:18:08.796725Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7729: Cannot subscribe to console configs 2025-09-25T16:18:08.796752Z node 2 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded TestModificationResults wait txId: 1 2025-09-25T16:18:08.811989Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 FAKE_COORDINATOR: Erasing txId 1 TestModificationResult got TxId: 1, wait until txId: 1 TestModificationResults wait txId: 101 FAKE_COORDINATOR: Add transaction: 101 at step: 5000002 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000001 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 101 at step: 5000002 FAKE_COORDINATOR: Erasing txId 101 TestModificationResult got TxId: 101, wait until txId: 101 TestWaitNotification wait txId: 101 2025-09-25T16:18:08.813399Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 101, at schemeshard: 72057594046678944 TestWaitNotification: OK eventTxId 101 TestModificationResults wait txId: 102 2025-09-25T16:18:08.813536Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateSubDomain, opId: 102:0, at schemeshard: 72057594046678944, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_subdomain.cpp:259) FAKE_COORDINATOR: Add transaction: 102 at step: 5000003 FAKE_COORDINATOR: advance: minStep5000003 State->FrontStep: 5000002 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 102 at step: 5000003 FAKE_COORDINATOR: Erasing txId 102 TestModificationResult got TxId: 102, wait until txId: 102 TestModificationResults wait txId: 103 FAKE_COORDINATOR: Add transaction: 103 at step: 5000004 FAKE_COORDINATOR: advance: minStep5000004 State->FrontStep: 5000003 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 103 at step: 5000004 FAKE_COORDINATOR: Erasing txId 103 TestModificationResult got TxId: 103, wait until txId: 103 TestWaitNotification wait txId: 102 TestWaitNotification wait txId: 103 2025-09-25T16:18:08.814546Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 102, at schemeshard: 72057594046678944 2025-09-25T16:18:08.814561Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 103, at schemeshard: 72057594046678944 TestWaitNotification: OK eventTxId 102 TestWaitNotification: OK eventTxId 103 TestModificationResults wait txId: 104 2025-09-25T16:18:08.814672Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateSubDomain, opId: 104:0, at schemeshard: 72057594046678944, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_subdomain.cpp:259) FAKE_COORDINATOR: Add transaction: 104 at step: 5000005 FAKE_COORDINATOR: advance: minStep5000005 State->FrontStep: 5000004 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 104 at step: 5000005 FAKE_COORDINATOR: Erasing txId 104 TestModificationResult got TxId: 104, wait until txId: 104 TestModificationResults wait txId: 105 FAKE_COORDINATOR: Add transaction: 105 at step: 5000006 FAKE_COORDINATOR: advance: minStep5000006 State->FrontStep: 5000005 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 105 at step: 5000006 FAKE_COORDINATOR: Erasing txId 105 TestModificationResult got TxId: 105, wait until txId: 105 TestWaitNotification wait txId: 104 TestWaitNotification wait txId: 105 2025-09-25T16:18:08.815773Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 104, at schemeshard: 72057594046678944 2025-09-25T16:18:08.815791Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 105, at schemeshard: 72057594046678944 TestWaitNotification: OK eventTxId 104 TestWaitNotification: OK eventTxId 105 2025-09-25T16:18:08.816366Z node 2 :TX_PROXY_SCHEME_CACHE WARN: cache.cpp:306: Path does not belong to the specified domain: self# [2:279:2252], domain# [OwnerId: 72057594046678944, LocalPathId: 1], path's domain# [OwnerId: 72057594046678944, LocalPathId: 3] 2025-09-25T16:18:08.816392Z node 2 :TX_PROXY_SCHEME_CACHE WARN: cache.cpp:306: Path does not belong to the specified domain: self# [2:281:2254], domain# [OwnerId: 72057594046678944, LocalPathId: 3], path's domain# [OwnerId: 72057594046678944, LocalPathId: 1] 2025-09-25T16:18:08.816412Z node 2 :TX_PROXY_SCHEME_CACHE WARN: cache.cpp:306: Path does not belong to the specified domain: self# [2:283:2256], domain# [OwnerId: 72057594046678944, LocalPathId: 3], path's domain# [OwnerId: 72057594046678944, LocalPathId: 1] 2025-09-25T16:18:08.816538Z node 2 :TX_PROXY_SCHEME_CACHE WARN: cache.cpp:306: Path does not belong to the specified domain: self# [2:296:2263], domain# [OwnerId: 72057594046678944, LocalPathId: 3], path's domain# [OwnerId: 72057594046678944, LocalPathId: 5] 2025-09-25T16:18:08.816703Z node 2 :TX_PROXY_SCHEME_CACHE WARN: cache.cpp:306: Path does not belong to the specified domain: self# [2:305:2266], domain# [OwnerId: 72057594046678944, LocalPathId: 3], path's domain# [OwnerId: 72057594046678944, LocalPathId: 5] 2025-09-25T16:18:08.816857Z node 2 :TX_PROXY_SCHEME_CACHE WARN: cache.cpp:306: Path does not belong to the specified domain: self# [2:313:2274], domain# [OwnerId: 72057594046678944, LocalPathId: 1], path's domain# [OwnerId: 72057594046678944, LocalPathId: 3] 2025-09-25T16:18:08.816898Z node 2 :TX_PROXY_SCHEME_CACHE WARN: cache.cpp:306: Path does not belong to the specified domain: self# [2:315:2276], domain# [OwnerId: 72057594046678944, LocalPathId: 3], path's domain# [OwnerId: 72057594046678944, LocalPathId: 1] 2025-09-25T16:18:08.816933Z node 2 :TX_PROXY_SCHEME_CACHE WARN: cache.cpp:306: Path does not belong to the specified domain: self# [2:317:2278], domain# [OwnerId: 72057594046678944, LocalPathId: 3], path's domain# [OwnerId: 72057594046678944, LocalPathId: 1] 2025-09-25T16:18:08.817055Z node 2 :TX_PROXY_SCHEME_CACHE WARN: cache.cpp:306: Path does not belong to the specified domain: self# [2:323:2284], domain# [OwnerId: 72057594046678944, LocalPathId: 3], path's domain# [OwnerId: 72057594046678944, LocalPathId: 5] 2025-09-25T16:18:08.817096Z node 2 :TX_PROXY_SCHEME_CACHE WARN: cache.cpp:306: Path does not belong to the specified domain: self# [2:325:2286], domain# [OwnerId: 72057594046678944, LocalPathId: 3], path's domain# [OwnerId: 72057594046678944, LocalPathId: 5] >> THiveTest::TestUpdateChannelValues [GOOD] >> THiveTest::TestStorageBalancer ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/scheme_board/ut_cache/unittest >> TCacheTest::MigrationUndo [GOOD] Test command err: 2025-09-25T16:18:08.379303Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7729: Cannot subscribe to console configs 2025-09-25T16:18:08.379326Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded TestModificationResults wait txId: 1 2025-09-25T16:18:08.395605Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 FAKE_COORDINATOR: Erasing txId 1 TestModificationResult got TxId: 1, wait until txId: 1 Leader for TabletID 72057594046678944 is [1:72:2112] sender: [1:178:2067] recipient: [1:49:2096] Leader for TabletID 72057594046678944 is [1:72:2112] sender: [1:181:2067] recipient: [1:180:2175] Leader for TabletID 72057594046678944 is [1:182:2176] sender: [1:183:2067] recipient: [1:180:2175] 2025-09-25T16:18:08.402846Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7729: Cannot subscribe to console configs 2025-09-25T16:18:08.402868Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded TestModificationResults wait txId: 101 Leader for TabletID 72057594046678944 is [1:182:2176] sender: [1:215:2067] recipient: [1:24:2071] 2025-09-25T16:18:08.444947Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateSubDomain, opId: 101:0, at schemeshard: 72057594046678944, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_subdomain.cpp:259) FAKE_COORDINATOR: Add transaction: 101 at step: 5000002 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000001 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 101 at step: 5000002 FAKE_COORDINATOR: Erasing txId 101 TestModificationResult got TxId: 101, wait until txId: 101 TestModificationResults wait txId: 102 2025-09-25T16:18:08.446132Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 102:0, at schemeshard: 72057594046678944, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) FAKEHIVE 72057594037968897 TEvCreateTablet Owner: 72057594046678944 OwnerIdx: 1 TabletType: Coordinator ObjectDomain { SchemeShard: 72057594046678944 PathId: 2 } ObjectId: 2 BindedChannels { StoragePoolName: "pool-1" } AllowedDomains { SchemeShard: 72057594046678944 PathId: 2 } FAKEHIVE 72057594037968897 TEvCreateTablet Owner: 72057594046678944 OwnerIdx: 2 TabletType: Mediator ObjectDomain { SchemeShard: 72057594046678944 PathId: 2 } ObjectId: 2 BindedChannels { StoragePoolName: "pool-1" } AllowedDomains { SchemeShard: 72057594046678944 PathId: 2 } Leader for TabletID 72075186233409546 is [0:0:0] sender: [1:251:2067] recipient: [1:242:2218] IGNORE Leader for TabletID 72075186233409546 is [0:0:0] sender: [1:251:2067] recipient: [1:242:2218] Leader for TabletID 72075186233409547 is [0:0:0] sender: [1:252:2067] recipient: [1:244:2220] IGNORE Leader for TabletID 72075186233409547 is [0:0:0] sender: [1:252:2067] recipient: [1:244:2220] Leader for TabletID 72075186233409546 is [1:254:2224] sender: [1:256:2067] recipient: [1:242:2218] Leader for TabletID 72075186233409547 is [1:257:2226] sender: [1:258:2067] recipient: [1:244:2220] TestModificationResult got TxId: 102, wait until txId: 102 TestWaitNotification wait txId: 101 TestWaitNotification wait txId: 102 2025-09-25T16:18:08.450688Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 101, at schemeshard: 72057594046678944 TestWaitNotification: OK eventTxId 101 Leader for TabletID 72075186233409546 is [1:254:2224] sender: [1:290:2067] recipient: [1:24:2071] Leader for TabletID 72075186233409547 is [1:257:2226] sender: [1:291:2067] recipient: [1:24:2071] FAKE_COORDINATOR: Add transaction: 102 at step: 5000003 FAKE_COORDINATOR: advance: minStep5000003 State->FrontStep: 5000002 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 102 at step: 5000003 FAKE_COORDINATOR: Erasing txId 102 TestWaitNotification: OK eventTxId 102 TestModificationResults wait txId: 103 TestModificationResult got TxId: 103, wait until txId: 103 TestModificationResults wait txId: 104 2025-09-25T16:18:08.466930Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 104:0, at schemeshard: 72057594046678944, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) TestModificationResult got TxId: 104, wait until txId: 104 TestWaitNotification wait txId: 103 TestWaitNotification wait txId: 104 FAKEHIVE 72057594037968897 TEvCreateTablet Owner: 72057594046678944 OwnerIdx: 3 TabletType: DataShard ObjectDomain { SchemeShard: 72057594046678944 PathId: 2 } ObjectId: 4 BindedChannels { StoragePoolName: "pool-1" } AllowedDomains { SchemeShard: 72057594046678944 PathId: 2 } Leader for TabletID 72075186233409548 is [0:0:0] sender: [1:341:2067] recipient: [1:338:2290] IGNORE Leader for TabletID 72075186233409548 is [0:0:0] sender: [1:341:2067] recipient: [1:338:2290] Leader for TabletID 72075186233409548 is [0:0:0] sender: [1:343:2067] recipient: [1:24:2071] IGNORE Leader for TabletID 72075186233409548 is [0:0:0] sender: [1:343:2067] recipient: [1:24:2071] Leader for TabletID 72075186233409548 is [1:345:2294] sender: [1:346:2067] recipient: [1:338:2290] TestWaitNotification: OK eventTxId 103 TestWaitNotification: OK eventTxId 104 TestModificationResults wait txId: 105 2025-09-25T16:18:08.565723Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpUpgradeSubDomain, opId: 105:0, at schemeshard: 72057594046678944, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_upgrade_subdomain.cpp:1234) FAKEHIVE 72057594037968897 TEvCreateTablet Owner: 72057594046678944 OwnerIdx: 4 TabletType: SchemeShard ObjectDomain { SchemeShard: 72057594046678944 PathId: 2 } ObjectId: 2 BindedChannels { StoragePoolName: "pool-1" } AllowedDomains { SchemeShard: 72057594046678944 PathId: 2 } Leader for TabletID 72075186233409549 is [0:0:0] sender: [1:420:2067] recipient: [1:416:2338] IGNORE Leader for TabletID 72075186233409549 is [0:0:0] sender: [1:420:2067] recipient: [1:416:2338] Leader for TabletID 72075186233409549 is [0:0:0] sender: [1:421:2067] recipient: [1:24:2071] IGNORE Leader for TabletID 72075186233409549 is [0:0:0] sender: [1:421:2067] recipient: [1:24:2071] Leader for TabletID 72075186233409549 is [1:423:2342] sender: [1:425:2067] recipient: [1:416:2338] 2025-09-25T16:18:08.571537Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7729: Cannot subscribe to console configs 2025-09-25T16:18:08.571559Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded TestModificationResult got TxId: 105, wait until txId: 105 TestWaitNotification wait txId: 105 TestWaitNotification: OK eventTxId 105 TestModificationResults wait txId: 106 2025-09-25T16:18:08.575184Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:5780: Mark as Migrated path id [OwnerId: 72057594046678944, LocalPathId: 3] 2025-09-25T16:18:08.575201Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:5780: Mark as Migrated path id [OwnerId: 72057594046678944, LocalPathId: 4] 2025-09-25T16:18:08.575279Z node 1 :FLAT_TX_SCHEMESHARD ERROR: schemeshard__operation_upgrade_subdomain.cpp:1466: TWait ProgressState, dependent transaction: 106, parent transaction: 105, at schemeshard: 72057594046678944 2025-09-25T16:18:08.575308Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpUpgradeSubDomainDecision, opId: 106:0, at schemeshard: 72057594046678944, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_upgrade_subdomain.cpp:573) TestModificationResult got TxId: 106, wait until txId: 106 TestWaitNotification wait txId: 106 2025-09-25T16:18:08.587061Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:6267: Got TEvUpdateAck for unknown txId 105, at schemeshard: 72057594046678944 2025-09-25T16:18:08.587166Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:6267: Got TEvUpdateAck for unknown txId 105, at schemeshard: 72057594046678944 TestWaitNotification: OK eventTxId 106 TestModificationResults wait txId: 107 TestModificationResult got TxId: 107, wait until txId: 107 TestWaitNotification wait txId: 107 TestWaitNotification: OK eventTxId 107 TestModificationResults wait txId: 108 2025-09-25T16:18:08.622072Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpRmDir, opId: 108:0, at schemeshard: 72075186233409549, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_rmdir.cpp:66) TestModificationResult got TxId: 108, wait until txId: 108 TestWaitNotification wait txId: 108 FAKEHIVE 72057594037968897 TEvDeleteTablet ShardOwnerId: 72057594046678944 ShardLocalIdx: 3 TxId_Deprecated: 0 TabletID: 72075186233409548 TestWaitNotification: OK eventTxId 108 TestModificationResults wait txId: 109 TestModificationResult got TxId: 109, wait until txId: 109 TestWaitNotification wait txId: 109 TestWaitNotification: OK eventTxId 109 TestModificationResults wait txId: 110 2025-09-25T16:18:08.679164Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 110:0, at schemeshard: 72075186233409549, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) FAKEHIVE 72057594037968897 TEvCreateTablet Owner: 72075186233409549 OwnerIdx: 4 TabletType: DataShard ObjectDomain { SchemeShard: 72057594046678944 PathId: 2 } ObjectId: 3 BindedChannels { StoragePoolName: "pool-1" } AllowedDomains { SchemeShard: 72057594046678944 PathId: 2 } Leader for TabletID 72075186233409550 is [0:0:0] sender: [1:624:2067] recipient: [1:620:2508] IGNORE Leader for TabletID 72075186233409550 is [0:0:0] sender: [1:624:2067] recipient: [1:620:2508] Leader for TabletID 72075186233409550 is [0:0:0] sender: [1:625:2067] recipient: [1:24:2071] IGNORE Leader for TabletID 72075186233409550 is [0:0:0] sender: [1:625:2067] recipient: [1:24:2071] Leader for TabletID 72075186233409550 is [1:627:2512] sender: [1:628:2067] recipient: [1:620:2508] TestModificationResult got TxId: 110, wait until txId: 110 TestWaitNotification wait txId: 110 TestWaitNotification: OK eventTxId 110 2025-09-25T16:18:08.803261Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7729: Cannot subscribe to console configs 2025-09-25T16:18:08.803285Z node 2 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded TestModificationResults wait txId: 1 2025-09-25T16:18:08.818159Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 FAKE_COORDINATOR: Erasing txId 1 TestModificationResult got TxId: 1, wait until txId: 1 Leader for TabletID 72057594046678944 is [2:72:2112] sender: [2:178:2067] recipient: [2:49:2096] Leader for TabletID 72057594046678944 is [2:72:2112] sender: [2:181:2067] recipient: [2:180:2175] Leader for TabletID 72057594046678944 is [2:182:2176] sender: [2:183:2067] recipient: [2:180:2175] 2025-09-25T16:18:08.824626Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7729: Cannot subscribe to console configs 2025-09-25T16:18:08.824643Z node 2 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded TestModificationResults wait txId: 101 Leader for TabletID 72057594046678944 is [2:182:2176] sender: [2:215:2067] recipient: [2:24:2071] 2025-09-25T16:18:08.866624Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateSubDomain, opId: 101:0, at schemeshard: 72057594046678944, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_subdomain.cpp:259) FAKE_COORDINATOR: Add transaction: 101 at step: 5000002 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000001 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 101 at step: 5000002 FAKE_COORDINATOR: Erasing txId 101 TestModificationResult got TxId: 101, wait until txId: 101 TestModificationResults wait txId: 102 2025-09-25T16:18:08.867714Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 102:0, at schemeshard: 72057594046678944, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) FAKEHIVE 72057594037968897 TEvCreateTablet Owner: 72057594046678944 OwnerIdx: 1 TabletType: Coordinator ObjectDomain { SchemeShard: 72057594046678944 PathId: 2 } ObjectId: 2 BindedChannels { StoragePoolName: "pool-1" } AllowedDomains { SchemeShard: 72057594046678944 PathId: 2 } FAKEHIVE 72057594037968897 TEvCreateTablet Owner: 72057594046678944 OwnerIdx: 2 TabletType: Mediator ObjectDomain { SchemeShard: 72057594046678944 PathId: 2 } ObjectId: 2 BindedChannels { StoragePoolName: "pool-1" } AllowedDomains { SchemeShard: 72057594046678944 PathId: 2 } Leader for TabletID 72075186233409546 is [0:0:0] sender: [2:251:2067] recipient: [2:242:2218] IGNORE Leader for TabletID 72075186233409546 is [0:0:0] sender: [2:251:2067] recipient: [2:242:2218] Leader for TabletID 72075186233409547 is [0:0:0] sender: [2:252:2067] recipient: [2:244:2220] IGNORE Leader for TabletID 72075186233409547 is [0:0:0] sender: [2:252:2067] recipient: [2:244:2220] Leader for TabletID 72075186233409546 is [2:254:2224] sender: [2:256:2067] recipient: [2:242:2218] Leader for TabletID 72075186233409547 is [2:257:2226] sender: [2:259:2067] recipient: [2:244:2220] TestModificationResult got TxId: 102, wait until txId: 102 TestWaitNotification wait txId: 101 TestWaitNotification wait txId: 102 2025-09-25T16:18:08.870595Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 101, at schemeshard: 72057594046678944 TestWaitNotification: OK eventTxId 101 Leader for TabletID 72075186233409546 is [2:254:2224] sender: [2:290:2067] recipient: [2:24:2071] Leader for TabletID 72075186233409547 is [2:257:2226] sender: [2:291:2067] recipient: [2:24:2071] FAKE_COORDINATOR: Add transaction: 102 at step: 5000003 FAKE_COORDINATOR: advance: minStep5000003 State->FrontStep: 5000002 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 102 at step: 5000003 FAKE_COORDINATOR: Erasing txId 102 TestWaitNotification: OK eventTxId 102 TestModificationResults wait txId: 103 TestModificationResult got TxId: 103, wait until txId: 103 TestModificationResults wait txId: 104 2025-09-25T16:18:08.883261Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 104:0, at schemeshard: 72057594046678944, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) TestModificationResult got TxId: 104, wait until txId: 104 TestWaitNotification wait txId: 103 TestWaitNotification wait txId: 104 FAKEHIVE 72057594037968897 TEvCreateTablet Owner: 72057594046678944 OwnerIdx: 3 TabletType: DataShard ObjectDomain { SchemeShard: 72057594046678944 PathId: 2 } ObjectId: 4 BindedChannels { StoragePoolName: "pool-1" } AllowedDomains { SchemeShard: 72057594046678944 PathId: 2 } Leader for TabletID 72075186233409548 is [0:0:0] sender: [2:342:2067] recipient: [2:338:2290] IGNORE Leader for TabletID 72075186233409548 is [0:0:0] sender: [2:342:2067] recipient: [2:338:2290] Leader for TabletID 72075186233409548 is [0:0:0] sender: [2:343:2067] recipient: [2:24:2071] IGNORE Leader for TabletID 72075186233409548 is [0:0:0] sender: [2:343:2067] recipient: [2:24:2071] Leader for TabletID 72075186233409548 is [2:345:2294] sender: [2:346:2067] recipient: [2:338:2290] TestWaitNotification: OK eventTxId 103 TestWaitNotification: OK eventTxId 104 TestModificationResults wait txId: 105 2025-09-25T16:18:08.949840Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpUpgradeSubDomain, opId: 105:0, at schemeshard: 72057594046678944, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_upgrade_subdomain.cpp:1234) FAKEHIVE 72057594037968897 TEvCreateTablet Owner: 72057594046678944 OwnerIdx: 4 TabletType: SchemeShard ObjectDomain { SchemeShard: 72057594046678944 PathId: 2 } ObjectId: 2 BindedChannels { StoragePoolName: "pool-1" } AllowedDomains { SchemeShard: 72057594046678944 PathId: 2 } Leader for TabletID 72075186233409549 is [0:0:0] sender: [2:420:2067] recipient: [2:416:2338] IGNORE Leader for TabletID 72075186233409549 is [0:0:0] sender: [2:420:2067] recipient: [2:416:2338] Leader for TabletID 72075186233409549 is [0:0:0] sender: [2:421:2067] recipient: [2:24:2071] IGNORE Leader for TabletID 72075186233409549 is [0:0:0] sender: [2:421:2067] recipient: [2:24:2071] Leader for TabletID 72075186233409549 is [2:423:2342] sender: [2:425:2067] recipient: [2:416:2338] 2025-09-25T16:18:08.955633Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7729: Cannot subscribe to console configs 2025-09-25T16:18:08.955658Z node 2 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded TestModificationResult got TxId: 105, wait until txId: 105 TestWaitNotification wait txId: 105 TestWaitNotification: OK eventTxId 105 TestModificationResults wait txId: 106 2025-09-25T16:18:08.959154Z node 2 :FLAT_TX_SCHEMESHARD ERROR: schemeshard__operation_upgrade_subdomain.cpp:1466: TWait ProgressState, dependent transaction: 106, parent transaction: 105, at schemeshard: 72057594046678944 2025-09-25T16:18:08.959187Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpUpgradeSubDomainDecision, opId: 106:0, at schemeshard: 72057594046678944, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_upgrade_subdomain.cpp:601) 2025-09-25T16:18:08.959725Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:6267: Got TEvUpdateAck for unknown txId 105, at schemeshard: 72057594046678944 2025-09-25T16:18:08.959981Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:6267: Got TEvUpdateAck for unknown txId 105, at schemeshard: 72057594046678944 FAKEHIVE 72057594037968897 TEvDeleteTablet ShardOwnerId: 72057594046678944 ShardLocalIdx: 4 TxId_Deprecated: 0 TabletID: 72075186233409549 Forgetting tablet 72075186233409549 2025-09-25T16:18:08.961823Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:6267: Got TEvUpdateAck for unknown txId 105, at schemeshard: 72057594046678944 2025-09-25T16:18:08.961989Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:6267: Got TEvUpdateAck for unknown txId 105, at schemeshard: 72057594046678944 TestModificationResult got TxId: 106, wait until txId: 106 TestWaitNotification wait txId: 106 2025-09-25T16:18:08.962323Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 106, at schemeshard: 72057594046678944 TestWaitNotification: OK eventTxId 106 2025-09-25T16:18:08.964044Z node 2 :TX_DATASHARD ERROR: datashard.cpp:3586: Datashard's schemeshard pipe destroyed while no messages to sent at 72075186233409548 Leader for TabletID 72057594046678944 is [2:182:2176] sender: [2:510:2067] recipient: [2:49:2096] Leader for TabletID 72057594046678944 is [2:182:2176] sender: [2:513:2067] recipient: [2:512:2406] Leader for TabletID 72057594046678944 is [2:514:2407] sender: [2:515:2067] recipient: [2:512:2406] 2025-09-25T16:18:08.971674Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7729: Cannot subscribe to console configs 2025-09-25T16:18:08.971699Z node 2 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded >> TCacheTest::RacyRecreateAndSync >> DataShardWrite::DistributedInsertWithoutLocks-Volatile [GOOD] >> TFlatTableLongTxLarge::LargeDeltaChain [GOOD] >> TCacheTestWithRealSystemViewPaths::SystemViews [GOOD] >> TCacheTestWithRealSystemViewPaths::CheckSystemViewAccess ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/scheme_board/ut_cache/unittest >> TCacheTest::CookiesArePreserved [GOOD] Test command err: 2025-09-25T16:18:08.503734Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7729: Cannot subscribe to console configs 2025-09-25T16:18:08.503760Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded TestModificationResults wait txId: 1 2025-09-25T16:18:08.522191Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 FAKE_COORDINATOR: Erasing txId 1 TestModificationResult got TxId: 1, wait until txId: 1 TestModificationResults wait txId: 101 FAKE_COORDINATOR: Add transaction: 101 at step: 5000002 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000001 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 101 at step: 5000002 FAKE_COORDINATOR: Erasing txId 101 TestModificationResult got TxId: 101, wait until txId: 101 TestModificationResults wait txId: 102 FAKE_COORDINATOR: Add transaction: 102 at step: 5000003 FAKE_COORDINATOR: advance: minStep5000003 State->FrontStep: 5000002 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 102 at step: 5000003 FAKE_COORDINATOR: Erasing txId 102 TestModificationResult got TxId: 102, wait until txId: 102 TestModificationResults wait txId: 103 FAKE_COORDINATOR: Add transaction: 103 at step: 5000004 FAKE_COORDINATOR: advance: minStep5000004 State->FrontStep: 5000003 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 103 at step: 5000004 FAKE_COORDINATOR: Erasing txId 103 TestModificationResult got TxId: 103, wait until txId: 103 TestWaitNotification wait txId: 101 TestWaitNotification wait txId: 102 TestWaitNotification wait txId: 103 2025-09-25T16:18:08.526762Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 101, at schemeshard: 72057594046678944 2025-09-25T16:18:08.526815Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 102, at schemeshard: 72057594046678944 2025-09-25T16:18:08.526838Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 103, at schemeshard: 72057594046678944 TestWaitNotification: OK eventTxId 101 TestWaitNotification: OK eventTxId 102 TestWaitNotification: OK eventTxId 103 2025-09-25T16:18:08.751209Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7729: Cannot subscribe to console configs 2025-09-25T16:18:08.751234Z node 2 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded TestModificationResults wait txId: 1 2025-09-25T16:18:08.767000Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 FAKE_COORDINATOR: Erasing txId 1 TestModificationResult got TxId: 1, wait until txId: 1 Leader for TabletID 72057594046678944 is [2:72:2112] sender: [2:178:2067] recipient: [2:49:2096] Leader for TabletID 72057594046678944 is [2:72:2112] sender: [2:181:2067] recipient: [2:180:2175] Leader for TabletID 72057594046678944 is [2:182:2176] sender: [2:183:2067] recipient: [2:180:2175] 2025-09-25T16:18:08.775195Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7729: Cannot subscribe to console configs 2025-09-25T16:18:08.775220Z node 2 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded TestModificationResults wait txId: 101 Leader for TabletID 72057594046678944 is [2:182:2176] sender: [2:215:2067] recipient: [2:24:2071] 2025-09-25T16:18:08.818584Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateSubDomain, opId: 101:0, at schemeshard: 72057594046678944, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_subdomain.cpp:259) FAKE_COORDINATOR: Add transaction: 101 at step: 5000002 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000001 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 101 at step: 5000002 FAKE_COORDINATOR: Erasing txId 101 TestModificationResult got TxId: 101, wait until txId: 101 TestModificationResults wait txId: 102 2025-09-25T16:18:08.819991Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 102:0, at schemeshard: 72057594046678944, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) FAKEHIVE 72057594037968897 TEvCreateTablet Owner: 72057594046678944 OwnerIdx: 1 TabletType: Coordinator ObjectDomain { SchemeShard: 72057594046678944 PathId: 2 } ObjectId: 2 BindedChannels { StoragePoolName: "pool-1" } AllowedDomains { SchemeShard: 72057594046678944 PathId: 2 } FAKEHIVE 72057594037968897 TEvCreateTablet Owner: 72057594046678944 OwnerIdx: 2 TabletType: Mediator ObjectDomain { SchemeShard: 72057594046678944 PathId: 2 } ObjectId: 2 BindedChannels { StoragePoolName: "pool-1" } AllowedDomains { SchemeShard: 72057594046678944 PathId: 2 } Leader for TabletID 72075186233409546 is [0:0:0] sender: [2:251:2067] recipient: [2:242:2218] IGNORE Leader for TabletID 72075186233409546 is [0:0:0] sender: [2:251:2067] recipient: [2:242:2218] Leader for TabletID 72075186233409547 is [0:0:0] sender: [2:252:2067] recipient: [2:244:2220] IGNORE Leader for TabletID 72075186233409547 is [0:0:0] sender: [2:252:2067] recipient: [2:244:2220] Leader for TabletID 72075186233409546 is [2:254:2224] sender: [2:256:2067] recipient: [2:242:2218] Leader for TabletID 72075186233409547 is [2:257:2226] sender: [2:259:2067] recipient: [2:244:2220] TestModificationResult got TxId: 102, wait until txId: 102 TestWaitNotification wait txId: 101 TestWaitNotification wait txId: 102 2025-09-25T16:18:08.826030Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 101, at schemeshard: 72057594046678944 TestWaitNotification: OK eventTxId 101 Leader for TabletID 72075186233409546 is [2:254:2224] sender: [2:290:2067] recipient: [2:24:2071] Leader for TabletID 72075186233409547 is [2:257:2226] sender: [2:291:2067] recipient: [2:24:2071] FAKE_COORDINATOR: Add transaction: 102 at step: 5000003 FAKE_COORDINATOR: advance: minStep5000003 State->FrontStep: 5000002 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 102 at step: 5000003 FAKE_COORDINATOR: Erasing txId 102 TestWaitNotification: OK eventTxId 102 TestModificationResults wait txId: 103 TestModificationResult got TxId: 103, wait until txId: 103 TestModificationResults wait txId: 104 2025-09-25T16:18:08.844082Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 104:0, at schemeshard: 72057594046678944, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) TestModificationResult got TxId: 104, wait until txId: 104 TestWaitNotification wait txId: 103 TestWaitNotification wait txId: 104 FAKEHIVE 72057594037968897 TEvCreateTablet Owner: 72057594046678944 OwnerIdx: 3 TabletType: DataShard ObjectDomain { SchemeShard: 72057594046678944 PathId: 2 } ObjectId: 4 BindedChannels { StoragePoolName: "pool-1" } AllowedDomains { SchemeShard: 72057594046678944 PathId: 2 } Leader for TabletID 72075186233409548 is [0:0:0] sender: [2:342:2067] recipient: [2:338:2290] IGNORE Leader for TabletID 72075186233409548 is [0:0:0] sender: [2:342:2067] recipient: [2:338:2290] Leader for TabletID 72075186233409548 is [0:0:0] sender: [2:343:2067] recipient: [2:24:2071] IGNORE Leader for TabletID 72075186233409548 is [0:0:0] sender: [2:343:2067] recipient: [2:24:2071] Leader for TabletID 72075186233409548 is [2:345:2294] sender: [2:346:2067] recipient: [2:338:2290] TestWaitNotification: OK eventTxId 103 TestWaitNotification: OK eventTxId 104 TestModificationResults wait txId: 105 2025-09-25T16:18:08.930422Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpUpgradeSubDomain, opId: 105:0, at schemeshard: 72057594046678944, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_upgrade_subdomain.cpp:1234) FAKEHIVE 72057594037968897 TEvCreateTablet Owner: 72057594046678944 OwnerIdx: 4 TabletType: SchemeShard ObjectDomain { SchemeShard: 72057594046678944 PathId: 2 } ObjectId: 2 BindedChannels { StoragePoolName: "pool-1" } AllowedDomains { SchemeShard: 72057594046678944 PathId: 2 } Leader for TabletID 72075186233409549 is [0:0:0] sender: [2:420:2067] recipient: [2:416:2338] IGNORE Leader for TabletID 72075186233409549 is [0:0:0] sender: [2:420:2067] recipient: [2:416:2338] Leader for TabletID 72075186233409549 is [0:0:0] sender: [2:421:2067] recipient: [2:24:2071] IGNORE Leader for TabletID 72075186233409549 is [0:0:0] sender: [2:421:2067] recipient: [2:24:2071] Leader for TabletID 72075186233409549 is [2:423:2342] sender: [2:425:2067] recipient: [2:416:2338] 2025-09-25T16:18:08.939074Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7729: Cannot subscribe to console configs 2025-09-25T16:18:08.939108Z node 2 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded TestModificationResult got TxId: 105, wait until txId: 105 TestWaitNotification wait txId: 105 TestWaitNotification: OK eventTxId 105 TestModificationResults wait txId: 106 2025-09-25T16:18:08.943083Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:5780: Mark as Migrated path id [OwnerId: 72057594046678944, LocalPathId: 3] 2025-09-25T16:18:08.943105Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:5780: Mark as Migrated path id [OwnerId: 72057594046678944, LocalPathId: 4] 2025-09-25T16:18:08.943204Z node 2 :FLAT_TX_SCHEMESHARD ERROR: schemeshard__operation_upgrade_subdomain.cpp:1466: TWait ProgressState, dependent transaction: 106, parent transaction: 105, at schemeshard: 72057594046678944 2025-09-25T16:18:08.943229Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpUpgradeSubDomainDecision, opId: 106:0, at schemeshard: 72057594046678944, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_upgrade_subdomain.cpp:573) TestModificationResult got TxId: 106, wait until txId: 106 TestWaitNotification wait txId: 106 2025-09-25T16:18:08.954996Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:6267: Got TEvUpdateAck for unknown txId 105, at schemeshard: 72057594046678944 2025-09-25T16:18:08.955079Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:6267: Got TEvUpdateAck for unknown txId 105, at schemeshard: 72057594046678944 TestWaitNotification: OK eventTxId 106 Leader for TabletID 72057594046678944 is [2:182:2176] sender: [2:511:2067] recipient: [2:49:2096] Leader for TabletID 72057594046678944 is [2:182:2176] sender: [2:513:2067] recipient: [2:24:2071] Leader for TabletID 72057594046678944 is [2:182:2176] sender: [2:515:2067] recipient: [2:514:2413] Leader for TabletID 72057594046678944 is [2:516:2414] sender: [2:517:2067] recipient: [2:514:2413] 2025-09-25T16:18:08.965843Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7729: Cannot subscribe to console configs 2025-09-25T16:18:08.965869Z node 2 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded Leader for TabletID 72057594046678944 is [2:516:2414] sender: [2:545:2067] recipient: [2:24:2071] 2025-09-25T16:18:09.203638Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7729: Cannot subscribe to console configs 2025-09-25T16:18:09.203668Z node 3 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded TestModificationResults wait txId: 1 2025-09-25T16:18:09.219836Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 FAKE_COORDINATOR: Erasing txId 1 TestModificationResult got TxId: 1, wait until txId: 1 TestModificationResults wait txId: 101 2025-09-25T16:18:09.221075Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateSubDomain, opId: 101:0, at schemeshard: 72057594046678944, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_subdomain.cpp:259) FAKE_COORDINATOR: Add transaction: 101 at step: 5000002 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000001 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 101 at step: 5000002 FAKE_COORDINATOR: Erasing txId 101 TestModificationResult got TxId: 101, wait until txId: 101 TestWaitNotification wait txId: 101 2025-09-25T16:18:09.222137Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 101, at schemeshard: 72057594046678944 TestWaitNotification: OK eventTxId 101 TestModificationResults wait txId: 102 FAKE_COORDINATOR: Add transaction: 102 at step: 5000003 FAKE_COORDINATOR: advance: minStep5000003 State->FrontStep: 5000002 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 102 at step: 5000003 FAKE_COORDINATOR: Erasing txId 102 TestModificationResult got TxId: 102, wait until txId: 102 ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/cms/ut/unittest >> TCmsTest::WalleTasksDifferentPriorities [GOOD] Test command err: 2025-09-25T16:17:56.772950Z node 1 :CMS DEBUG: console__init_scheme.cpp:14: TConsole::TTxInitScheme Execute 2025-09-25T16:17:56.774641Z node 1 :CMS DEBUG: cms_impl.h:186: StateInit event type: 10060000 event: NKikimr::TEvTablet::TEvBoot 2025-09-25T16:17:56.776787Z node 1 :CMS DEBUG: console__init_scheme.cpp:23: TConsole::TTxInitScheme Complete 2025-09-25T16:17:56.776862Z node 1 :CMS DEBUG: console__load_state.cpp:28: TConsole::TTxLoadState Execute 2025-09-25T16:17:56.776919Z node 1 :CMS DEBUG: console__load_state.cpp:50: Using default config. 2025-09-25T16:17:56.777058Z node 1 :CMS DEBUG: console__load_state.cpp:66: TConsole::TTxLoadState Complete 2025-09-25T16:17:56.778303Z node 1 :CMS DEBUG: cms_impl.h:186: StateInit event type: 10060001 event: NKikimr::TEvTablet::TEvRestored 2025-09-25T16:17:56.778451Z node 1 :CMS DEBUG: cms_tx_init_scheme.cpp:16: TTxInitScheme Execute 2025-09-25T16:17:56.779111Z node 1 :CMS DEBUG: cms_impl.h:186: StateInit event type: 1006000c event: NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-09-25T16:17:56.779254Z node 1 :CMS DEBUG: cms_impl.h:186: StateInit event type: 10031c0c event: NKikimr::TEvNodeWardenStorageConfig 2025-09-25T16:17:56.779300Z node 1 :CMS DEBUG: cms_impl.h:186: StateInit event type: 104d0001 event: NKikimr::NConsole::TEvConfigsDispatcher::TEvSetConfigSubscriptionResponse 2025-09-25T16:17:56.782226Z node 1 :CMS DEBUG: cms_tx_init_scheme.cpp:24: TTxInitScheme Complete 2025-09-25T16:17:56.782262Z node 1 :CMS DEBUG: cms_tx_load_state.cpp:33: TTxLoadState Execute 2025-09-25T16:17:56.782307Z node 1 :CMS DEBUG: cms_tx_load_state.cpp:76: Using default config 2025-09-25T16:17:56.782338Z node 1 :CMS DEBUG: cms.cpp:1176: Running CleanupWalleTasks 2025-09-25T16:17:56.806087Z node 1 :CMS DEBUG: cms_impl.h:186: StateInit event type: 104a0012 event: NKikimr::NConsole::TEvConsole::TEvConfigNotificationRequest { Config { FeatureFlags { EnableCMSRequestPriorities: true EnableSingleCompositeActionGroup: true } } ItemKinds: 25 ItemKinds: 26 Local: true } 2025-09-25T16:17:56.856521Z node 1 :CMS DEBUG: cms_tx_load_state.cpp:256: TTxLoadState Complete 2025-09-25T16:17:56.856633Z node 1 :CMS DEBUG: cms_tx_update_config.cpp:23: TTxUpdateConfig Execute 2025-09-25T16:17:56.858393Z node 1 :CMS DEBUG: cms_tx_update_config.cpp:37: TTxUpdateConfig Complete 2025-09-25T16:17:56.858549Z node 1 :CMS DEBUG: sentinel.cpp:1020: [Sentinel] [Main] UpdateConfig 2025-09-25T16:17:56.858558Z node 1 :CMS DEBUG: sentinel.cpp:965: [Sentinel] [Main] Start ConfigUpdater 2025-09-25T16:17:56.858568Z node 1 :CMS DEBUG: sentinel.cpp:1036: [Sentinel] [Main] UpdateState 2025-09-25T16:17:56.858572Z node 1 :CMS INFO: sentinel.cpp:960: [Sentinel] [Main] StateUpdater was delayed 2025-09-25T16:17:56.858583Z node 1 :CMS DEBUG: sentinel.cpp:524: [Sentinel] [ConfigUpdater] Request blobstorage config: attempt# 0 2025-09-25T16:17:56.858618Z node 1 :CMS DEBUG: sentinel.cpp:537: [Sentinel] [ConfigUpdater] Request CMS cluster state: attempt# 0 2025-09-25T16:17:56.864031Z node 1 :CMS DEBUG: sentinel.cpp:599: [Sentinel] [ConfigUpdater] Handle TEvBlobStorage::TEvControllerConfigResponse: response# Status { Success: true BaseConfig { PDisk { NodeId: 1 PDiskId: 4 Path: "/1/pdisk-4.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 1 PDiskId: 5 Path: "/1/pdisk-5.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 1 PDiskId: 6 Path: "/1/pdisk-6.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 1 PDiskId: 7 Path: "/1/pdisk-7.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 2 PDiskId: 8 Path: "/2/pdisk-8.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 2 PDiskId: 9 Path: "/2/pdisk-9.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 2 PDiskId: 10 Path: "/2/pdisk-10.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 2 PDiskId: 11 Path: "/2/pdisk-11.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 3 PDiskId: 12 Path: "/3/pdisk-12.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 3 PDiskId: 13 Path: "/3/pdisk-13.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 3 PDiskId: 14 Path: "/3/pdisk-14.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 3 PDiskId: 15 Path: "/3/pdisk-15.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 4 PDiskId: 16 Path: "/4/pdisk-16.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 4 PDiskId: 17 Path: "/4/pdisk-17.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 4 PDiskId: 18 Path: "/4/pdisk-18.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 4 PDiskId: 19 Path: "/4/pdisk-19.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 5 PDiskId: 20 Path: "/5/pdisk-20.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 5 PDiskId: 21 Path: "/5/pdisk-21.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 5 PDiskId: 22 Path: "/5/pdisk-22.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 5 PDiskId: 23 Path: "/5/pdisk-23.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 6 PDiskId: 24 Path: "/6/pdisk-24.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 6 PDiskId: 25 Path: "/6/pdisk-25.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 6 PDiskId: 26 Path: "/6/pdisk-26.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 6 PDiskId: 27 Path: "/6/pdisk-27.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 7 PDiskId: 28 Path: "/7/pdisk-28.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 7 PDiskId: 29 Path: "/7/pdisk-29.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 7 PDiskId: 30 Path: "/7/pdisk-30.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 7 PDiskId: 31 Path: "/7/pdisk-31.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 8 PDiskId: 32 Path: "/8/pdisk-32.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 8 PDiskId: 33 Path: "/8/pdisk-33.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 8 PDiskId: 34 Path: "/8/pdisk-34.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 8 PDiskId: 35 Path: "/8/pdisk-35.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 9 PDiskId: 36 Path: "/9/pdisk-36.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 9 PDiskId: 37 Path: "/9/pdisk-37.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 9 PDiskId: 38 Path: "/9/pdisk-38.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 9 PDiskId: 39 Path: "/9/pdisk-39.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 10 PDiskId: 40 Path: "/10/pdisk-40.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 10 PDiskId: 41 Path: "/10/pdisk-41.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 10 PDiskId: 42 Path: "/10/pdisk-42.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 10 PDiskId: 43 Path: "/10/pdisk-43.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 11 PDiskId: 44 Path: "/11/pdisk-44.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 11 PDiskId: 45 Path: "/11/pdisk-45.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 11 PDiskId: 46 Path: "/11/pdisk-46.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 11 PDiskId: 47 Path: "/11/pdisk-47.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 12 PDiskId: 48 Path: "/12/pdisk-48.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 12 PDiskId: 49 Path: "/12/pdisk-49.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 12 PDiskId: 50 Path: "/12/pdisk-50.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 12 PDiskId: 51 Path: "/12/pdisk-51.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 13 PDiskId: 52 Path: "/13/pdisk-52.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 13 PDiskId: 53 Path: "/13/pdisk-53.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 13 PDiskId: 54 Path: "/13/pdisk-54.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 13 PDiskId: 55 Path: "/13/pdisk-55.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 14 PDiskId: 56 Path: "/14/pdisk-56.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 14 PDiskId: 57 Path: "/14/pdisk-57.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 14 PDiskId: 58 Path: "/14/pdisk-58.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 14 PDiskId: 59 Path: "/14/pdisk-59.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 15 PDiskId: 60 Path: "/15/pdisk-60.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 15 PDiskId: 61 Path: "/15/pdisk-61.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 15 PDiskId: 62 Path: "/15/pdisk-62.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 15 PDiskId: 63 Path: "/15/pdisk-63.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 16 PDiskId: 64 Path: "/16/pdisk-64.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 16 PDiskId: 65 Path: "/16/pdisk-65.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 16 PDiskId: 66 Path: "/16/pdisk-66.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 16 PDiskId: 67 Path: "/16/pdisk-67.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 17 PDiskId: 68 Path: "/17/pdisk-68.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 17 PDiskId: 69 Path: "/17/pdisk-69.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 17 PDiskId: 70 Path: "/17/pdisk-70.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 17 PDiskId: 71 Path: "/17/pdisk-71.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 18 PDiskId: 72 Path: "/18/pdisk-72.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 18 PDiskId: 73 Path: "/18/pdisk-73.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 18 PDiskId: 74 Path: "/18/pdisk-74.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 18 PDiskId: 75 Path: "/18/pdisk-75.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 19 PDiskId: 76 Path: "/19/pdisk-76.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 19 PDiskId: 77 Path: "/19/pdisk-77.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 19 PDiskId: 78 Path: "/19/pdisk-78.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 19 PDiskId: 79 Path: "/19/pdisk-79.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 20 PDiskId: 80 Path: "/20/pdisk-80.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 20 PDiskId: 81 Path: "/20/pdisk-81.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 20 PDiskId: 82 Path: "/20/pdisk-82.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 20 PDiskId: 83 Path: "/20/pdisk-83.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 21 PDiskId: 84 Path: "/21/pdisk-84.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 21 PDiskId: 85 Path: "/21/pdisk-85.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 21 PDiskId: 86 Path: "/21/pdisk-86.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 21 PDiskId: 87 Path: "/21/pdisk-87.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 22 PDiskId: 88 Path: "/22/pdisk-88.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 22 PDiskId: 89 Path: "/22/pdisk-89.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 22 PDiskId: 90 Path: "/22/pdisk-90.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 22 PDiskId: 91 Path: "/22/pdisk-91.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 23 PDiskId: 92 Path: "/23/pdisk-92.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 23 PDiskId: 93 Path: "/23/pdisk-93.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 23 PDiskId: 94 Path: "/23/pdisk-94.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 23 PDiskId: 95 Path: "/23/pdisk-95.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 24 PDiskId: 96 Path: "/24/pdisk-96.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 24 PDiskId: 97 Path: "/24/pdisk-97.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 24 PDiskId: 98 Path: "/24/pdisk-98.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 24 PDiskId: 99 Path: "/24/pdisk-99.data" Guid: 1 DriveStatus: ACTIVE } VSlot { VSlotId { NodeId: 1 PDiskId: 4 VSlotId: 1000 } GroupGeneration: 1 } VSlot { VSlotId { NodeId: 1 PDiskId: 4 VSlotId: 1001 } GroupId: 1 GroupGeneration: 1 } VSlot { VSlotId { NodeId: 1 PDiskId: 4 VSlotId: 1002 } GroupId: 2 GroupGeneration: 1 } VSlot { VSlotId { NodeId: 1 PDiskId: 4 VSlotId: 1003 } GroupId: 3 GroupGeneration: 1 } VSlot { VSlotId { NodeId: 1 PDiskId: 5 VSlotId: 1000 } GroupId: 4 GroupGeneration: 1 } VSlot { VSlotId { NodeId: 1 PDiskId: 5 VSlotId: 1001 } GroupId: 5 GroupGeneration: 1 } VSlot { VSlotId { NodeId: 1 PDiskId: 5 VSlotId: 100 ... OK } } 2025-09-25T16:18:07.532683Z node 41 :CMS INFO: walle_check_task_adapter.cpp:29: Processing Wall-E request: TaskId: "task-1" 2025-09-25T16:18:07.606361Z node 41 :CMS DEBUG: cms_tx_update_downtimes.cpp:17: TTxUpdateDowntimes Execute 2025-09-25T16:18:07.606404Z node 41 :CMS DEBUG: cms_tx_update_downtimes.cpp:26: TTxUpdateDowntimes Complete 2025-09-25T16:18:07.606420Z node 41 :CMS DEBUG: cluster_info.cpp:991: Timestamp: 1970-01-01T00:02:00Z 2025-09-25T16:18:07.606600Z node 41 :CMS INFO: cms.cpp:364: Check request: User: "Wall-E" Actions { Type: SHUTDOWN_HOST Host: "43" Duration: 18446744073709551615 Issue { Type: TOO_MANY_UNAVAILABLE_VDISKS Message: "Issue in affected group with id \'0\': too many unavailable vdisks. Locked: VDisk [0:1:0:2:0] (::1:/43/pdisk-43.data) is locked by this request, Host ::1:12002 (42) has planned shutdown (permission user-p-2 owned by user). Down: " } } PartialPermissionAllowed: false Schedule: true Reason: "" TenantPolicy: DEFAULT AvailabilityMode: MODE_MAX_AVAILABILITY EvictVDisks: false Priority: 50 2025-09-25T16:18:07.606612Z node 41 :CMS DEBUG: cms.cpp:396: Checking action: Type: SHUTDOWN_HOST Host: "43" Duration: 18446744073709551615 Issue { Type: TOO_MANY_UNAVAILABLE_VDISKS Message: "Issue in affected group with id \'0\': too many unavailable vdisks. Locked: VDisk [0:1:0:2:0] (::1:/43/pdisk-43.data) is locked by this request, Host ::1:12002 (42) has planned shutdown (permission user-p-2 owned by user). Down: " } 2025-09-25T16:18:07.606621Z node 41 :CMS DEBUG: node_checkers.cpp:99: [Nodes Counter] Checking Node: 43, with state: Up, with limit: 0, with ratio limit: 0, locked nodes: 1, down nodes: 0 2025-09-25T16:18:07.606647Z node 41 :CMS DEBUG: cms.cpp:415: Result: DISALLOW_TEMP (reason: Issue in affected group with id '0': too many unavailable vdisks. Locked: VDisk [0:1:0:2:0] (::1:/43/pdisk-43.data) is locked by this request, Host ::1:12002 (42) has scheduled action Wall-E-r-3 owned by Wall-E (priority 20 vs 50). Down: ) 2025-09-25T16:18:07.606682Z node 41 :CMS DEBUG: cms_tx_store_permissions.cpp:26: TTxStorePermissions Execute 2025-09-25T16:18:07.606722Z node 41 :CMS NOTICE: audit_log.cpp:12: [AuditLog] [CMS tablet] Store request: id# Wall-E-r-2, owner# Wall-E, order# 2, priority# 50, body# User: "Wall-E" Actions { Type: SHUTDOWN_HOST Host: "43" Duration: 18446744073709551615 Issue { Type: TOO_MANY_UNAVAILABLE_VDISKS Message: "Issue in affected group with id \'0\': too many unavailable vdisks. Locked: VDisk [0:1:0:2:0] (::1:/43/pdisk-43.data) is locked by this request, Host ::1:12002 (42) has scheduled action Wall-E-r-3 owned by Wall-E (priority 20 vs 50). Down: " } } PartialPermissionAllowed: false Schedule: true Reason: "" TenantPolicy: DEFAULT AvailabilityMode: MODE_MAX_AVAILABILITY EvictVDisks: false Priority: 50 2025-09-25T16:18:07.617664Z node 41 :CMS DEBUG: cms_tx_store_permissions.cpp:137: TTxStorePermissions complete 2025-09-25T16:18:07.617743Z node 41 :CMS NOTICE: audit_log.cpp:12: [AuditLog] [CMS tablet] Reply: request# NKikimr::NCms::TEvCms::TEvCheckRequest { User: "Wall-E" RequestId: "Wall-E-r-2" }, response# NKikimr::NCms::TEvCms::TEvPermissionResponse { Status { Code: DISALLOW_TEMP Reason: "Issue in affected group with id \'0\': too many unavailable vdisks. Locked: VDisk [0:1:0:2:0] (::1:/43/pdisk-43.data) is locked by this request, Host ::1:12002 (42) has scheduled action Wall-E-r-3 owned by Wall-E (priority 20 vs 50). Down: " } RequestId: "Wall-E-r-2" Deadline: 420824120 } 2025-09-25T16:18:07.617801Z node 41 :CMS NOTICE: audit_log.cpp:12: [AuditLog] [Wall-E adapter] Reply: request# NKikimr::NCms::TEvCms::TEvWalleCheckTaskRequest { TaskId: "task-1" }, response# NKikimr::NCms::TEvCms::TEvWalleCheckTaskResponse { Status { Code: DISALLOW_TEMP Reason: "Issue in affected group with id \'0\': too many unavailable vdisks. Locked: VDisk [0:1:0:2:0] (::1:/43/pdisk-43.data) is locked by this request, Host ::1:12002 (42) has scheduled action Wall-E-r-3 owned by Wall-E (priority 20 vs 50). Down: " } Task { TaskId: "task-1" Hosts: "43" } } 2025-09-25T16:18:07.617931Z node 41 :CMS INFO: walle_check_task_adapter.cpp:29: Processing Wall-E request: TaskId: "task-2" 2025-09-25T16:18:07.639977Z node 41 :CMS DEBUG: cms_tx_update_downtimes.cpp:17: TTxUpdateDowntimes Execute 2025-09-25T16:18:07.640021Z node 41 :CMS DEBUG: cms_tx_update_downtimes.cpp:26: TTxUpdateDowntimes Complete 2025-09-25T16:18:07.640038Z node 41 :CMS DEBUG: cluster_info.cpp:991: Timestamp: 1970-01-01T00:02:00Z 2025-09-25T16:18:07.640224Z node 41 :CMS INFO: cms.cpp:364: Check request: User: "Wall-E" Actions { Type: REBOOT_HOST Host: "42" Duration: 18446744073709551615 Issue { Type: GENERIC Message: "Cannot lock node \'42\': node state: \'Locked\'" } } PartialPermissionAllowed: false Schedule: true Reason: "" TenantPolicy: DEFAULT AvailabilityMode: MODE_MAX_AVAILABILITY EvictVDisks: false Priority: 20 2025-09-25T16:18:07.640236Z node 41 :CMS DEBUG: cms.cpp:396: Checking action: Type: REBOOT_HOST Host: "42" Duration: 18446744073709551615 Issue { Type: GENERIC Message: "Cannot lock node \'42\': node state: \'Locked\'" } 2025-09-25T16:18:07.640248Z node 41 :CMS DEBUG: node_checkers.cpp:99: [Nodes Counter] Checking Node: 42, with state: Up, with limit: 0, with ratio limit: 0, locked nodes: 0, down nodes: 0 2025-09-25T16:18:07.640292Z node 41 :CMS DEBUG: cms.cpp:404: Result: ALLOW 2025-09-25T16:18:07.640317Z node 41 :CMS DEBUG: cms.cpp:1064: Accepting permission: id# Wall-E-p-3, requestId# Wall-E-r-3, owner# Wall-E 2025-09-25T16:18:07.640325Z node 41 :CMS INFO: cluster_info.cpp:782: Adding lock for Host ::1:12002 (42) (permission Wall-E-p-3 until 586524-01-19T08:01:49Z) 2025-09-25T16:18:07.640338Z node 41 :CMS DEBUG: cms_tx_store_permissions.cpp:26: TTxStorePermissions Execute 2025-09-25T16:18:07.640379Z node 41 :CMS NOTICE: audit_log.cpp:12: [AuditLog] [CMS tablet] Store permission: id# Wall-E-p-3, validity# 586524-01-19T08:01:49.551615Z, action# Type: REBOOT_HOST Host: "42" Duration: 18446744073709551615 2025-09-25T16:18:07.640388Z node 41 :CMS NOTICE: audit_log.cpp:12: [AuditLog] [CMS tablet] Remove request: id# Wall-E-r-3, owner# Wall-E 2025-09-25T16:18:07.651673Z node 41 :CMS DEBUG: cms_tx_store_permissions.cpp:137: TTxStorePermissions complete 2025-09-25T16:18:07.651758Z node 41 :CMS NOTICE: audit_log.cpp:12: [AuditLog] [CMS tablet] Reply: request# NKikimr::NCms::TEvCms::TEvCheckRequest { User: "Wall-E" RequestId: "Wall-E-r-3" }, response# NKikimr::NCms::TEvCms::TEvPermissionResponse { Status { Code: ALLOW } Permissions { Id: "Wall-E-p-3" Action { Type: REBOOT_HOST Host: "42" Duration: 18446744073709551615 } Deadline: 18446744073709551615 Extentions { Type: HostInfo Hosts { Name: "::1" State: UP NodeId: 42 InterconnectPort: 12002 } } } } 2025-09-25T16:18:07.651808Z node 41 :CMS NOTICE: audit_log.cpp:12: [AuditLog] [Wall-E adapter] Reply: request# NKikimr::NCms::TEvCms::TEvWalleCheckTaskRequest { TaskId: "task-2" }, response# NKikimr::NCms::TEvCms::TEvWalleCheckTaskResponse { Status { Code: ALLOW } Task { TaskId: "task-2" Hosts: "42" } } 2025-09-25T16:18:07.651916Z node 41 :CMS INFO: walle_remove_task_adapter.cpp:29: Processing Wall-E request: TaskId: "task-2" 2025-09-25T16:18:07.651939Z node 41 :CMS DEBUG: cms_tx_remove_permissions.cpp:28: TTxRemovePermissions Execute 2025-09-25T16:18:07.651965Z node 41 :CMS NOTICE: audit_log.cpp:12: [AuditLog] [CMS tablet] Remove permission: id# Wall-E-p-3, reason# explicit remove 2025-09-25T16:18:07.669353Z node 41 :CMS DEBUG: cms_tx_remove_permissions.cpp:80: TTxRemovePermissions Complete 2025-09-25T16:18:07.669400Z node 41 :CMS NOTICE: audit_log.cpp:12: [AuditLog] [CMS tablet] Reply: request# NKikimr::NCms::TEvCms::TEvRemoveWalleTask { TaskId: task-2 }, response# NKikimr::NCms::TEvCms::TEvWalleTaskRemoved { TaskId: task-2 } 2025-09-25T16:18:07.669417Z node 41 :CMS DEBUG: cms.cpp:1220: Found empty task task-2 2025-09-25T16:18:07.669494Z node 41 :CMS NOTICE: audit_log.cpp:12: [AuditLog] [Wall-E adapter] Reply: request# NKikimr::NCms::TEvCms::TEvWalleRemoveTaskRequest { TaskId: "task-2" }, response# NKikimr::NCms::TEvCms::TEvWalleRemoveTaskResponse { Status { Code: OK } } 2025-09-25T16:18:07.669512Z node 41 :CMS DEBUG: cms_tx_remove_task.cpp:22: TTxRemoveTask Execute 2025-09-25T16:18:07.669540Z node 41 :CMS NOTICE: audit_log.cpp:12: [AuditLog] [CMS tablet] Remove task: id# task-2 2025-09-25T16:18:07.669725Z node 41 :CMS INFO: walle_check_task_adapter.cpp:29: Processing Wall-E request: TaskId: "task-1" 2025-09-25T16:18:07.681108Z node 41 :CMS DEBUG: cms_tx_remove_task.cpp:42: TTxRemoveTask Complete 2025-09-25T16:18:07.692439Z node 41 :CMS DEBUG: cms_tx_update_downtimes.cpp:17: TTxUpdateDowntimes Execute 2025-09-25T16:18:07.692474Z node 41 :CMS DEBUG: cms_tx_update_downtimes.cpp:26: TTxUpdateDowntimes Complete 2025-09-25T16:18:07.692487Z node 41 :CMS DEBUG: cluster_info.cpp:991: Timestamp: 1970-01-01T00:02:01Z 2025-09-25T16:18:07.692597Z node 41 :CMS INFO: cms.cpp:364: Check request: User: "Wall-E" Actions { Type: SHUTDOWN_HOST Host: "43" Duration: 18446744073709551615 Issue { Type: TOO_MANY_UNAVAILABLE_VDISKS Message: "Issue in affected group with id \'0\': too many unavailable vdisks. Locked: VDisk [0:1:0:2:0] (::1:/43/pdisk-43.data) is locked by this request, Host ::1:12002 (42) has scheduled action Wall-E-r-3 owned by Wall-E (priority 20 vs 50). Down: " } } PartialPermissionAllowed: false Schedule: true Reason: "" TenantPolicy: DEFAULT AvailabilityMode: MODE_MAX_AVAILABILITY EvictVDisks: false Priority: 50 2025-09-25T16:18:07.692607Z node 41 :CMS DEBUG: cms.cpp:396: Checking action: Type: SHUTDOWN_HOST Host: "43" Duration: 18446744073709551615 Issue { Type: TOO_MANY_UNAVAILABLE_VDISKS Message: "Issue in affected group with id \'0\': too many unavailable vdisks. Locked: VDisk [0:1:0:2:0] (::1:/43/pdisk-43.data) is locked by this request, Host ::1:12002 (42) has scheduled action Wall-E-r-3 owned by Wall-E (priority 20 vs 50). Down: " } 2025-09-25T16:18:07.692616Z node 41 :CMS DEBUG: node_checkers.cpp:99: [Nodes Counter] Checking Node: 43, with state: Up, with limit: 0, with ratio limit: 0, locked nodes: 0, down nodes: 0 2025-09-25T16:18:07.692646Z node 41 :CMS DEBUG: cms.cpp:404: Result: ALLOW 2025-09-25T16:18:07.692668Z node 41 :CMS DEBUG: cms.cpp:1064: Accepting permission: id# Wall-E-p-4, requestId# Wall-E-r-2, owner# Wall-E 2025-09-25T16:18:07.692677Z node 41 :CMS INFO: cluster_info.cpp:782: Adding lock for Host ::1:12003 (43) (permission Wall-E-p-4 until 586524-01-19T08:01:49Z) 2025-09-25T16:18:07.692687Z node 41 :CMS DEBUG: cms_tx_store_permissions.cpp:26: TTxStorePermissions Execute 2025-09-25T16:18:07.692719Z node 41 :CMS NOTICE: audit_log.cpp:12: [AuditLog] [CMS tablet] Store permission: id# Wall-E-p-4, validity# 586524-01-19T08:01:49.551615Z, action# Type: SHUTDOWN_HOST Host: "43" Duration: 18446744073709551615 2025-09-25T16:18:07.692728Z node 41 :CMS NOTICE: audit_log.cpp:12: [AuditLog] [CMS tablet] Remove request: id# Wall-E-r-2, owner# Wall-E 2025-09-25T16:18:07.703737Z node 41 :CMS DEBUG: cms_tx_store_permissions.cpp:137: TTxStorePermissions complete 2025-09-25T16:18:07.703827Z node 41 :CMS NOTICE: audit_log.cpp:12: [AuditLog] [CMS tablet] Reply: request# NKikimr::NCms::TEvCms::TEvCheckRequest { User: "Wall-E" RequestId: "Wall-E-r-2" }, response# NKikimr::NCms::TEvCms::TEvPermissionResponse { Status { Code: ALLOW } Permissions { Id: "Wall-E-p-4" Action { Type: SHUTDOWN_HOST Host: "43" Duration: 18446744073709551615 } Deadline: 18446744073709551615 Extentions { Type: HostInfo Hosts { Name: "::1" State: UP NodeId: 43 InterconnectPort: 12003 } } } } 2025-09-25T16:18:07.703877Z node 41 :CMS NOTICE: audit_log.cpp:12: [AuditLog] [Wall-E adapter] Reply: request# NKikimr::NCms::TEvCms::TEvWalleCheckTaskRequest { TaskId: "task-1" }, response# NKikimr::NCms::TEvCms::TEvWalleCheckTaskResponse { Status { Code: ALLOW } Task { TaskId: "task-1" Hosts: "43" } } >> TCacheTest::SystemViews >> TCacheTestWithRealSystemViewPaths::CheckSystemViewAccess [GOOD] >> TCacheTest::RacyRecreateAndSync [GOOD] >> TCacheTest::RacyCreateAndSync >> THiveTest::TestReassignUseRelativeSpace [GOOD] >> THiveTest::TestManyFollowersOnOneNode >> KqpSplit::StreamLookupRetryAttemptForFinishedRead [GOOD] >> TargetTrackingScaleRecommenderPolicy::ScaleOut >> THiveTest::TestLocalReplacement [GOOD] >> THiveTest::TestHiveRestart >> TargetTrackingScaleRecommenderPolicy::ScaleOut [GOOD] >> TargetTrackingScaleRecommenderPolicy::ScaleIn [GOOD] >> TargetTrackingScaleRecommenderPolicy::BigNumbersScaleOut [GOOD] >> TargetTrackingScaleRecommenderPolicy::BigNumbersScaleIn [GOOD] >> TargetTrackingScaleRecommenderPolicy::SpikeResistance [GOOD] >> TargetTrackingScaleRecommenderPolicy::NearTarget [GOOD] >> TargetTrackingScaleRecommenderPolicy::AtTarget [GOOD] >> TargetTrackingScaleRecommenderPolicy::Fluctuations [GOOD] >> TargetTrackingScaleRecommenderPolicy::FluctuationsBigNumbers [GOOD] >> TargetTrackingScaleRecommenderPolicy::ScaleInToMaxSeen [GOOD] >> TargetTrackingScaleRecommenderPolicy::Idle [GOOD] >> THiveTest::TestUpdateTabletsObjectUpdatesMetrics >> TCacheTest::RacyCreateAndSync [GOOD] >> KqpScan::ScanPg [GOOD] >> KqpScan::TwoAggregatesOneFullFrameWindow [GOOD] >> KqpScan::TopSortOverSecondaryIndexRead >> TCacheTest::SystemViews [GOOD] >> TCacheTest::TableSchemaVersion >> THiveTest::TestFollowers >> KqpPg::ValuesInsert-useSink [GOOD] >> PgCatalog::PgType ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/scheme_board/ut_cache/unittest >> TCacheTestWithRealSystemViewPaths::CheckSystemViewAccess [GOOD] Test command err: 2025-09-25T16:18:09.526980Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7729: Cannot subscribe to console configs 2025-09-25T16:18:09.527011Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded ... waiting for SysViewsRoster update finished FAKE_COORDINATOR: Add transaction: 281474976710657 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710657 at step: 5000001 FAKE_COORDINATOR: Erasing txId 281474976710657 FAKE_COORDINATOR: Add transaction: 281474976710671 at step: 5000002 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000001 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710671 at step: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710666 at step: 5000003 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710670 at step: 5000004 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710665 at step: 5000005 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710669 at step: 5000006 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710664 at step: 5000007 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710668 at step: 5000008 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710663 at step: 5000009 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710667 at step: 5000010 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710662 at step: 5000011 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710661 at step: 5000012 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710660 at step: 5000013 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710659 at step: 5000014 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710658 at step: 5000015 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710687 at step: 5000016 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710691 at step: 5000017 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710686 at step: 5000018 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710690 at step: 5000019 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710685 at step: 5000020 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710689 at step: 5000021 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710684 at step: 5000022 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710688 at step: 5000023 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710683 at step: 5000024 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710682 at step: 5000025 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710677 at step: 5000026 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710681 at step: 5000027 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710676 at step: 5000028 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710680 at step: 5000029 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710675 at step: 5000030 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710679 at step: 5000031 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710674 at step: 5000032 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710678 at step: 5000033 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710673 at step: 5000034 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710672 at step: 5000035 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Erasing txId 281474976710671 FAKE_COORDINATOR: advance: minStep5000003 State->FrontStep: 5000002 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710666 at step: 5000003 FAKE_COORDINATOR: Erasing txId 281474976710666 FAKE_COORDINATOR: advance: minStep5000004 State->FrontStep: 5000003 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710670 at step: 5000004 FAKE_COORDINATOR: Erasing txId 281474976710670 FAKE_COORDINATOR: advance: minStep5000005 State->FrontStep: 5000004 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710665 at step: 5000005 FAKE_COORDINATOR: Erasing txId 281474976710665 FAKE_COORDINATOR: advance: minStep5000006 State->FrontStep: 5000005 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710669 at step: 5000006 FAKE_COORDINATOR: Erasing txId 281474976710669 FAKE_COORDINATOR: advance: minStep5000007 State->FrontStep: 5000006 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710664 at step: 5000007 FAKE_COORDINATOR: Erasing txId 281474976710664 FAKE_COORDINATOR: advance: minStep5000008 State->FrontStep: 5000007 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710668 at step: 5000008 FAKE_COORDINATOR: Erasing txId 281474976710668 FAKE_COORDINATOR: advance: minStep5000009 State->FrontStep: 5000008 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710663 at step: 5000009 FAKE_COORDINATOR: Erasing txId 281474976710663 FAKE_COORDINATOR: advance: minStep5000010 State->FrontStep: 5000009 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710667 at step: 5000010 FAKE_COORDINATOR: Erasing txId 281474976710667 FAKE_COORDINATOR: advance: minStep5000011 State->FrontStep: 5000010 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710662 at step: 5000011 FAKE_COORDINATOR: Erasing txId 281474976710662 FAKE_COORDINATOR: advance: minStep5000012 State->FrontStep: 5000011 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710661 at step: 5000012 FAKE_COORDINATOR: Erasing txId 281474976710661 FAKE_COORDINATOR: advance: minStep5000013 State->FrontStep: 5000012 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710660 at step: 5000013 FAKE_COORDINATOR: Erasing txId 281474976710660 FAKE_COORDINATOR: advance: minStep5000014 State->FrontStep: 5000013 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710659 at step: 5000014 FAKE_COORDINATOR: Erasing txId 281474976710659 FAKE_COORDINATOR: advance: minStep5000015 State->FrontStep: 5000014 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710658 at step: 5000015 FAKE_COORDINATOR: Erasing txId 281474976710658 FAKE_COORDINATOR: advance: minStep5000016 State->FrontStep: 5000015 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710687 at step: 5000016 FAKE_COORDINATOR: Erasing txId 281474976710687 FAKE_COORDINATOR: advance: minStep5000017 State->FrontStep: 5000016 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710691 at step: 5000017 FAKE_COORDINATOR: Erasing txId 281474976710691 FAKE_COORDINATOR: advance: minStep5000018 State->FrontStep: 5000017 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710686 at step: 5000018 FAKE_COORDINATOR: Erasing txId 281474976710686 FAKE_COORDINATOR: advance: minStep5000019 State->FrontStep: 5000018 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710690 at step: 5000019 FAKE_COORDINATOR: Erasing txId 281474976710690 FAKE_COORDINATOR: advance: minStep5000020 State->FrontStep: 5000019 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710685 at step: 5000020 FAKE_COORDINATOR: Erasing txId 281474976710685 FAKE_COORDINATOR: advance: minStep5000021 State->FrontStep: 5000020 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710689 at step: 5000021 FAKE_COORDINATOR: Erasing txId 281474976710689 FAKE_COORDINATOR: advance: minStep5000022 State->FrontStep: 5000021 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710684 at step: 5000022 FAKE_COORDINATOR: Erasing txId 281474976710684 FAKE_COORDINATOR: advance: minStep5000023 State->FrontStep: 5000022 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710688 at step: 5000023 FAKE_COORDINATOR: Erasing txId 281474976710688 FAKE_COORDINATOR: advance: minStep5000024 State->FrontStep: 5000023 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710683 at step: 5000024 FAKE_COORDINATOR: Erasing txId 281474976710683 FAKE_COORDINATOR: advance: minStep5000025 State->FrontStep: 5000024 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710682 at step: 5000025 FAKE_COORDINATOR: Erasing txId 281474976710682 FAKE_COORDINATOR: advance: minStep5000026 State->FrontStep: 5000025 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710677 at step: 5000026 FAKE_COORDINATOR: Erasing txId 281474976710677 FAKE_COORDINATOR: advance: minStep5000027 State->FrontStep: 5000026 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710681 at step: 5000027 FAKE_COORDINATOR: Erasing txId 281474976710681 FAKE_COORDINATOR: advance: minStep5000028 State->FrontStep: 5000027 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710676 at step: 5000028 FAKE_COORDINATOR: Erasing txId 281474976710676 FAKE_COORDINATOR: advance: minStep5000029 State->FrontStep: 5000028 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710680 at step: 5000029 FAKE_COORDINATOR: Erasing txId 281474976710680 FAKE_COORDINATOR: advance: minStep5000030 State->FrontStep: 5000029 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710675 at step: 5000030 FAKE_COORDINATOR: Erasing txId 281474976710675 FAKE_COORDINATOR: advance: minStep5000031 State->FrontStep: 5000030 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710679 at step: 5000031 FAKE_COORDINATOR: Erasing txId 281474976710679 FAKE_COORDINATOR: advance: minStep5000032 State->FrontStep: 5000031 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710674 at step: 5000032 FAKE_COORDINATOR: Erasing txId 281474976710674 FAKE_COORDINATOR: advance: minStep5000033 State->FrontStep: 5000032 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710678 at step: 5000033 FAKE_COORDINATOR: Erasing txId 281474976710678 FAKE_CO ... FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710659 at step: 5000014 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710658 at step: 5000015 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710687 at step: 5000016 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710691 at step: 5000017 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710686 at step: 5000018 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710690 at step: 5000019 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710685 at step: 5000020 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710689 at step: 5000021 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710684 at step: 5000022 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710688 at step: 5000023 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710683 at step: 5000024 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710682 at step: 5000025 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710677 at step: 5000026 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710681 at step: 5000027 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710676 at step: 5000028 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710680 at step: 5000029 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710675 at step: 5000030 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710679 at step: 5000031 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710674 at step: 5000032 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710678 at step: 5000033 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710673 at step: 5000034 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710672 at step: 5000035 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Erasing txId 281474976710671 FAKE_COORDINATOR: advance: minStep5000003 State->FrontStep: 5000002 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710666 at step: 5000003 FAKE_COORDINATOR: Erasing txId 281474976710666 FAKE_COORDINATOR: advance: minStep5000004 State->FrontStep: 5000003 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710670 at step: 5000004 FAKE_COORDINATOR: Erasing txId 281474976710670 FAKE_COORDINATOR: advance: minStep5000005 State->FrontStep: 5000004 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710665 at step: 5000005 FAKE_COORDINATOR: Erasing txId 281474976710665 FAKE_COORDINATOR: advance: minStep5000006 State->FrontStep: 5000005 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710669 at step: 5000006 FAKE_COORDINATOR: Erasing txId 281474976710669 FAKE_COORDINATOR: advance: minStep5000007 State->FrontStep: 5000006 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710664 at step: 5000007 FAKE_COORDINATOR: Erasing txId 281474976710664 FAKE_COORDINATOR: advance: minStep5000008 State->FrontStep: 5000007 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710668 at step: 5000008 FAKE_COORDINATOR: Erasing txId 281474976710668 FAKE_COORDINATOR: advance: minStep5000009 State->FrontStep: 5000008 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710663 at step: 5000009 FAKE_COORDINATOR: Erasing txId 281474976710663 FAKE_COORDINATOR: advance: minStep5000010 State->FrontStep: 5000009 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710667 at step: 5000010 FAKE_COORDINATOR: Erasing txId 281474976710667 FAKE_COORDINATOR: advance: minStep5000011 State->FrontStep: 5000010 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710662 at step: 5000011 FAKE_COORDINATOR: Erasing txId 281474976710662 FAKE_COORDINATOR: advance: minStep5000012 State->FrontStep: 5000011 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710661 at step: 5000012 FAKE_COORDINATOR: Erasing txId 281474976710661 FAKE_COORDINATOR: advance: minStep5000013 State->FrontStep: 5000012 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710660 at step: 5000013 FAKE_COORDINATOR: Erasing txId 281474976710660 FAKE_COORDINATOR: advance: minStep5000014 State->FrontStep: 5000013 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710659 at step: 5000014 FAKE_COORDINATOR: Erasing txId 281474976710659 FAKE_COORDINATOR: advance: minStep5000015 State->FrontStep: 5000014 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710658 at step: 5000015 FAKE_COORDINATOR: Erasing txId 281474976710658 FAKE_COORDINATOR: advance: minStep5000016 State->FrontStep: 5000015 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710687 at step: 5000016 FAKE_COORDINATOR: Erasing txId 281474976710687 FAKE_COORDINATOR: advance: minStep5000017 State->FrontStep: 5000016 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710691 at step: 5000017 FAKE_COORDINATOR: Erasing txId 281474976710691 FAKE_COORDINATOR: advance: minStep5000018 State->FrontStep: 5000017 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710686 at step: 5000018 FAKE_COORDINATOR: Erasing txId 281474976710686 FAKE_COORDINATOR: advance: minStep5000019 State->FrontStep: 5000018 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710690 at step: 5000019 FAKE_COORDINATOR: Erasing txId 281474976710690 FAKE_COORDINATOR: advance: minStep5000020 State->FrontStep: 5000019 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710685 at step: 5000020 FAKE_COORDINATOR: Erasing txId 281474976710685 FAKE_COORDINATOR: advance: minStep5000021 State->FrontStep: 5000020 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710689 at step: 5000021 FAKE_COORDINATOR: Erasing txId 281474976710689 FAKE_COORDINATOR: advance: minStep5000022 State->FrontStep: 5000021 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710684 at step: 5000022 FAKE_COORDINATOR: Erasing txId 281474976710684 FAKE_COORDINATOR: advance: minStep5000023 State->FrontStep: 5000022 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710688 at step: 5000023 FAKE_COORDINATOR: Erasing txId 281474976710688 FAKE_COORDINATOR: advance: minStep5000024 State->FrontStep: 5000023 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710683 at step: 5000024 FAKE_COORDINATOR: Erasing txId 281474976710683 FAKE_COORDINATOR: advance: minStep5000025 State->FrontStep: 5000024 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710682 at step: 5000025 FAKE_COORDINATOR: Erasing txId 281474976710682 FAKE_COORDINATOR: advance: minStep5000026 State->FrontStep: 5000025 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710677 at step: 5000026 FAKE_COORDINATOR: Erasing txId 281474976710677 FAKE_COORDINATOR: advance: minStep5000027 State->FrontStep: 5000026 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710681 at step: 5000027 FAKE_COORDINATOR: Erasing txId 281474976710681 FAKE_COORDINATOR: advance: minStep5000028 State->FrontStep: 5000027 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710676 at step: 5000028 FAKE_COORDINATOR: Erasing txId 281474976710676 FAKE_COORDINATOR: advance: minStep5000029 State->FrontStep: 5000028 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710680 at step: 5000029 FAKE_COORDINATOR: Erasing txId 281474976710680 FAKE_COORDINATOR: advance: minStep5000030 State->FrontStep: 5000029 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710675 at step: 5000030 FAKE_COORDINATOR: Erasing txId 281474976710675 FAKE_COORDINATOR: advance: minStep5000031 State->FrontStep: 5000030 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710679 at step: 5000031 FAKE_COORDINATOR: Erasing txId 281474976710679 FAKE_COORDINATOR: advance: minStep5000032 State->FrontStep: 5000031 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710674 at step: 5000032 FAKE_COORDINATOR: Erasing txId 281474976710674 FAKE_COORDINATOR: advance: minStep5000033 State->FrontStep: 5000032 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710678 at step: 5000033 FAKE_COORDINATOR: Erasing txId 281474976710678 FAKE_COORDINATOR: advance: minStep5000034 State->FrontStep: 5000033 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710673 at step: 5000034 FAKE_COORDINATOR: Erasing txId 281474976710673 FAKE_COORDINATOR: advance: minStep5000035 State->FrontStep: 5000034 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710672 at step: 5000035 FAKE_COORDINATOR: Erasing txId 281474976710672 ... waiting for SysViewsRoster update finished (done) TestModificationResults wait txId: 1 2025-09-25T16:18:09.996110Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) FAKE_COORDINATOR: Add transaction: 1 at step: 5000036 FAKE_COORDINATOR: advance: minStep5000036 State->FrontStep: 5000035 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000036 FAKE_COORDINATOR: Erasing txId 1 TestModificationResult got TxId: 1, wait until txId: 1 TestModificationResults wait txId: 101 2025-09-25T16:18:09.996810Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 101:0, at schemeshard: 72057594046678944, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_modify_acl.cpp:101) TestModificationResult got TxId: 101, wait until txId: 101 TestWaitNotification wait txId: 101 2025-09-25T16:18:09.997238Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 101, at schemeshard: 72057594046678944 TestWaitNotification: OK eventTxId 101 2025-09-25T16:18:09.997766Z node 2 :TX_PROXY_SCHEME_CACHE WARN: cache.cpp:323: Access denied: self# [2:425:2405], for# user1@builtin, access# DescribeSchema 2025-09-25T16:18:09.997847Z node 2 :TX_PROXY_SCHEME_CACHE WARN: cache.cpp:323: Access denied: self# [2:431:2411], for# user1@builtin, access# ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/datashard/ut_write/unittest >> DataShardWrite::DistributedInsertWithoutLocks-Volatile [GOOD] Test command err: 2025-09-25T16:17:50.754573Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:123: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2025-09-25T16:17:50.804213Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2025-09-25T16:17:50.807074Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:311:2354], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-09-25T16:17:50.807159Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-09-25T16:17:50.807186Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/endf/00442b/r3tmp/tmpeL2vK8/pdisk_1.dat 2025-09-25T16:17:50.935545Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-09-25T16:17:50.935594Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-09-25T16:17:50.961454Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded 2025-09-25T16:17:50.962415Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1229: Notification cookie mismatch for subscription [1:34:2081] 1758817070309560 != 1758817070309564 2025-09-25T16:17:50.995742Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-09-25T16:17:51.045214Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2025-09-25T16:17:51.093499Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-09-25T16:17:51.171550Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:17:51.189916Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3112: StateInit, received event# 268828672, Sender [1:664:2558], Recipient [1:673:2564]: NKikimr::TEvTablet::TEvBoot 2025-09-25T16:17:51.190227Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3112: StateInit, received event# 268828673, Sender [1:664:2558], Recipient [1:673:2564]: NKikimr::TEvTablet::TEvRestored 2025-09-25T16:17:51.190309Z node 1 :TX_DATASHARD INFO: datashard.cpp:375: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:673:2564] 2025-09-25T16:17:51.190378Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:661: TxInitSchema.Execute 2025-09-25T16:17:51.200662Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3125: StateInactive, received event# 268828684, Sender [1:664:2558], Recipient [1:673:2564]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-09-25T16:17:51.201391Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:727: TxInitSchema.Complete 2025-09-25T16:17:51.201436Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:50: TDataShard::TTxInit::Execute 2025-09-25T16:17:51.201633Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1325: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2025-09-25T16:17:51.201644Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1381: LoadLockChangeRecords at tablet: 72075186224037888 2025-09-25T16:17:51.201653Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1430: LoadChangeRecordCommits at tablet: 72075186224037888 2025-09-25T16:17:51.201724Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:94: TDataShard::TTxInit::Complete 2025-09-25T16:17:51.201755Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:102: TDataShard::TTxInitRestored::Execute 2025-09-25T16:17:51.201774Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:142: DataShard 72075186224037888 persisting started state actor id [1:688:2564] in generation 1 2025-09-25T16:17:51.213307Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:151: TDataShard::TTxInitRestored::Complete 2025-09-25T16:17:51.226981Z node 1 :TX_DATASHARD INFO: datashard.cpp:419: Switched to work state WaitScheme tabletId 72075186224037888 2025-09-25T16:17:51.227117Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:459: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2025-09-25T16:17:51.227157Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1260: Change sender created: at tablet: 72075186224037888, actorId: [1:690:2574] 2025-09-25T16:17:51.227165Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1265: Trying to activate change sender: at tablet: 72075186224037888 2025-09-25T16:17:51.227172Z node 1 :TX_DATASHARD INFO: datashard.cpp:1282: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2025-09-25T16:17:51.227180Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-09-25T16:17:51.227276Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3144: StateWork, received event# 2146435072, Sender [1:673:2564], Recipient [1:673:2564]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-09-25T16:17:51.227287Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3169: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-09-25T16:17:51.227425Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:96: TTxCheckInReadSets::Execute at 72075186224037888 2025-09-25T16:17:51.227460Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:139: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2025-09-25T16:17:51.227480Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037888 2025-09-25T16:17:51.227491Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2025-09-25T16:17:51.227501Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:385: Check unit PlanQueue at 72075186224037888 2025-09-25T16:17:51.227508Z node 1 :TX_DATASHARD TRACE: plan_queue_unit.cpp:59: TPlanQueueUnit at 72075186224037888 has no attached operations 2025-09-25T16:17:51.227514Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:403: Unit PlanQueue has no ready operations at 72075186224037888 2025-09-25T16:17:51.227521Z node 1 :TX_DATASHARD INFO: datashard__progress_tx.cpp:48: No tx to execute at 72075186224037888 TxInFly 0 2025-09-25T16:17:51.227527Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2025-09-25T16:17:51.227541Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3144: StateWork, received event# 269877761, Sender [1:674:2565], Recipient [1:673:2564]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-09-25T16:17:51.227548Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3180: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-09-25T16:17:51.227557Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3723: Server connected at leader tablet# 72075186224037888, clientId# [1:669:2561], serverId# [1:674:2565], sessionId# [0:0:0] 2025-09-25T16:17:51.227579Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3144: StateWork, received event# 269549568, Sender [1:409:2405], Recipient [1:674:2565] 2025-09-25T16:17:51.227585Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3150: StateWork, processing event TEvDataShard::TEvProposeTransaction 2025-09-25T16:17:51.227612Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 72075186224037888 2025-09-25T16:17:51.227683Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1843: Trying to execute [0:281474976715657] at 72075186224037888 on unit CheckSchemeTx 2025-09-25T16:17:51.227697Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:133: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2025-09-25T16:17:51.227732Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:221: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037888 2025-09-25T16:17:51.227746Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1878: Execution status for [0:281474976715657] at 72075186224037888 is ExecutedNoMoreRestarts 2025-09-25T16:17:51.227751Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1926: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit CheckSchemeTx 2025-09-25T16:17:51.227759Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1932: Add [0:281474976715657] at 72075186224037888 to execution unit StoreSchemeTx 2025-09-25T16:17:51.227764Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1843: Trying to execute [0:281474976715657] at 72075186224037888 on unit StoreSchemeTx 2025-09-25T16:17:51.227873Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1878: Execution status for [0:281474976715657] at 72075186224037888 is DelayCompleteNoMoreRestarts 2025-09-25T16:17:51.227879Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1926: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit StoreSchemeTx 2025-09-25T16:17:51.227884Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1932: Add [0:281474976715657] at 72075186224037888 to execution unit FinishPropose 2025-09-25T16:17:51.227888Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1843: Trying to execute [0:281474976715657] at 72075186224037888 on unit FinishPropose 2025-09-25T16:17:51.227903Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1878: Execution status for [0:281474976715657] at 72075186224037888 is DelayComplete 2025-09-25T16:17:51.227908Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1926: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit FinishPropose 2025-09-25T16:17:51.227912Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1932: Add [0:281474976715657] at 72075186224037888 to execution unit WaitForPlan 2025-09-25T16:17:51.227916Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1843: Trying to execute [0:281474976715657] at 72075186224037888 on unit WaitForPlan 2025-09-25T16:17:51.227923Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Operation [0:281474976715657] at 72075186224037888 is not ready to execute on unit WaitForPlan 2025-09-25T16:17:51.228375Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3144: StateWork, received event# 269746185, Sender [1:691:2575], Recipient [1:673:2564]: NKikimr::TEvTxProxySchemeCache::TEvWatchNotifyUpdated 2025-09-25T16:17:51.228390Z node 1 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:129: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-09-25T16:17:51.241136Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:152: TTxProposeTransactionBase::Complete at 72075186224037888 2025-09-25T16:17:51.241177Z node 1 :TX_DATASHAR ... nt TEvTxProcessing::TEvReadSetAck 2025-09-25T16:18:09.526603Z node 11 :TX_DATASHARD DEBUG: datashard_outreadset.cpp:150: Receive RS Ack at 72075186224037888 source 72075186224037888 dest 72075186224037889 consumer 72075186224037889 txId 1234567890011 2025-09-25T16:18:09.526625Z node 11 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037889 2025-09-25T16:18:09.526646Z node 11 :TX_DATASHARD DEBUG: datashard__readset.cpp:91: TTxReadSet::Complete at 72075186224037888 2025-09-25T16:18:09.526656Z node 11 :TX_DATASHARD TRACE: datashard_impl.h:3144: StateWork, received event# 269287938, Sender [11:710:2585], Recipient [11:714:2588]: {TEvReadSet step# 2000 txid# 1234567890011 TabletSource# 72075186224037889 TabletDest# 72075186224037888 SetTabletConsumer# 72075186224037888 Flags# 0 Seqno# 2} 2025-09-25T16:18:09.526674Z node 11 :TX_DATASHARD TRACE: datashard_impl.h:3164: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-09-25T16:18:09.526678Z node 11 :TX_DATASHARD DEBUG: datashard_outreadset.cpp:150: Receive RS Ack at 72075186224037889 source 72075186224037889 dest 72075186224037888 consumer 72075186224037888 txId 1234567890011 2025-09-25T16:18:09.557895Z node 11 :KQP_EXECUTER ERROR: kqp_planner.cpp:123: TxId: 281474976715661. Ctx: { TraceId: 01k60tnxzq2ah3mt87s3h6x7xc, Database: , SessionId: ydb://session/3?node_id=11&id=ODc0ZTM0MzktZmI2YjEwNC1hNjdmNGNhYS1kNzc4ZTUxZA==, PoolId: default, DatabaseId: /Root}. Database not set, use /Root 2025-09-25T16:18:09.558881Z node 11 :TX_DATASHARD TRACE: datashard_impl.h:3144: StateWork, received event# 269553215, Sender [11:973:2773], Recipient [11:710:2585]: NKikimrTxDataShard.TEvRead ReadId: 0 TableId { OwnerId: 72057594046644480 TableId: 2 SchemaVersion: 1 } Columns: 1 Columns: 2 Snapshot { Step: 2000 TxId: 18446744073709551615 } ResultFormat: FORMAT_CELLVEC MaxRows: 1001 MaxBytes: 5242880 Reverse: false TotalRowsLimit: 1001 RangesSize: 1 2025-09-25T16:18:09.558943Z node 11 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2452: TTxReadViaPipeline execute: at tablet# 72075186224037888, FollowerId 0 2025-09-25T16:18:09.558966Z node 11 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1843: Trying to execute [0:4] at 72075186224037888 on unit CheckRead 2025-09-25T16:18:09.558991Z node 11 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1878: Execution status for [0:4] at 72075186224037888 is Executed 2025-09-25T16:18:09.558998Z node 11 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1926: Advance execution plan for [0:4] at 72075186224037888 executing on unit CheckRead 2025-09-25T16:18:09.559005Z node 11 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1932: Add [0:4] at 72075186224037888 to execution unit BuildAndWaitDependencies 2025-09-25T16:18:09.559010Z node 11 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1843: Trying to execute [0:4] at 72075186224037888 on unit BuildAndWaitDependencies 2025-09-25T16:18:09.559027Z node 11 :TX_DATASHARD TRACE: datashard_pipeline.cpp:483: Activated operation [0:4] at 72075186224037888 2025-09-25T16:18:09.559034Z node 11 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1878: Execution status for [0:4] at 72075186224037888 is Executed 2025-09-25T16:18:09.559039Z node 11 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1926: Advance execution plan for [0:4] at 72075186224037888 executing on unit BuildAndWaitDependencies 2025-09-25T16:18:09.559044Z node 11 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1932: Add [0:4] at 72075186224037888 to execution unit ExecuteRead 2025-09-25T16:18:09.559048Z node 11 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1843: Trying to execute [0:4] at 72075186224037888 on unit ExecuteRead 2025-09-25T16:18:09.559070Z node 11 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:1578: 72075186224037888 Execute read# 1, request: { ReadId: 0 TableId { OwnerId: 72057594046644480 TableId: 2 SchemaVersion: 1 } Columns: 1 Columns: 2 Snapshot { Step: 2000 TxId: 18446744073709551615 } ResultFormat: FORMAT_CELLVEC MaxRows: 1001 MaxBytes: 5242880 Reverse: false TotalRowsLimit: 1001 } 2025-09-25T16:18:09.559133Z node 11 :TX_DATASHARD TRACE: datashard.cpp:2489: PromoteImmediatePostExecuteEdges at 72075186224037888 promoting UnprotectedReadEdge to v2000/18446744073709551615 2025-09-25T16:18:09.559143Z node 11 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2163: 72075186224037888 Complete read# {[11:973:2773], 0} after executionsCount# 1 2025-09-25T16:18:09.559152Z node 11 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2137: 72075186224037888 read iterator# {[11:973:2773], 0} sends rowCount# 2, bytes# 64, quota rows left# 999, quota bytes left# 5242816, hasUnreadQueries# 0, total queries# 1, firstUnprocessed# 0 2025-09-25T16:18:09.559168Z node 11 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2188: 72075186224037888 read iterator# {[11:973:2773], 0} finished in read 2025-09-25T16:18:09.559180Z node 11 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1878: Execution status for [0:4] at 72075186224037888 is Executed 2025-09-25T16:18:09.559184Z node 11 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1926: Advance execution plan for [0:4] at 72075186224037888 executing on unit ExecuteRead 2025-09-25T16:18:09.559189Z node 11 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1932: Add [0:4] at 72075186224037888 to execution unit CompletedOperations 2025-09-25T16:18:09.559197Z node 11 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1843: Trying to execute [0:4] at 72075186224037888 on unit CompletedOperations 2025-09-25T16:18:09.559211Z node 11 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1878: Execution status for [0:4] at 72075186224037888 is Executed 2025-09-25T16:18:09.559215Z node 11 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1926: Advance execution plan for [0:4] at 72075186224037888 executing on unit CompletedOperations 2025-09-25T16:18:09.559220Z node 11 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1938: Execution plan for [0:4] at 72075186224037888 has finished 2025-09-25T16:18:09.559227Z node 11 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2687: TTxReadViaPipeline(69) Execute with status# Executed at tablet# 72075186224037888 2025-09-25T16:18:09.559249Z node 11 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2736: TTxReadViaPipeline(69) Complete: at tablet# 72075186224037888 2025-09-25T16:18:09.559532Z node 11 :TX_DATASHARD TRACE: datashard_impl.h:3144: StateWork, received event# 269553219, Sender [11:973:2773], Recipient [11:710:2585]: NKikimrTxDataShard.TEvReadCancel ReadId: 0 2025-09-25T16:18:09.559544Z node 11 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:3409: 72075186224037888 ReadCancel: { ReadId: 0 } 2025-09-25T16:18:09.559569Z node 11 :TX_DATASHARD TRACE: datashard_impl.h:3144: StateWork, received event# 269553215, Sender [11:973:2773], Recipient [11:714:2588]: NKikimrTxDataShard.TEvRead ReadId: 1 TableId { OwnerId: 72057594046644480 TableId: 2 SchemaVersion: 1 } Columns: 1 Columns: 2 Snapshot { Step: 2000 TxId: 18446744073709551615 } ResultFormat: FORMAT_CELLVEC MaxRows: 999 MaxBytes: 5242880 Reverse: false TotalRowsLimit: 999 RangesSize: 1 2025-09-25T16:18:09.559600Z node 11 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2452: TTxReadViaPipeline execute: at tablet# 72075186224037889, FollowerId 0 2025-09-25T16:18:09.559611Z node 11 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1843: Trying to execute [0:4] at 72075186224037889 on unit CheckRead 2025-09-25T16:18:09.559621Z node 11 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1878: Execution status for [0:4] at 72075186224037889 is Executed 2025-09-25T16:18:09.559626Z node 11 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1926: Advance execution plan for [0:4] at 72075186224037889 executing on unit CheckRead 2025-09-25T16:18:09.559631Z node 11 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1932: Add [0:4] at 72075186224037889 to execution unit BuildAndWaitDependencies 2025-09-25T16:18:09.559635Z node 11 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1843: Trying to execute [0:4] at 72075186224037889 on unit BuildAndWaitDependencies 2025-09-25T16:18:09.559643Z node 11 :TX_DATASHARD TRACE: datashard_pipeline.cpp:483: Activated operation [0:4] at 72075186224037889 2025-09-25T16:18:09.559649Z node 11 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1878: Execution status for [0:4] at 72075186224037889 is Executed 2025-09-25T16:18:09.559653Z node 11 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1926: Advance execution plan for [0:4] at 72075186224037889 executing on unit BuildAndWaitDependencies 2025-09-25T16:18:09.559657Z node 11 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1932: Add [0:4] at 72075186224037889 to execution unit ExecuteRead 2025-09-25T16:18:09.559662Z node 11 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1843: Trying to execute [0:4] at 72075186224037889 on unit ExecuteRead 2025-09-25T16:18:09.559677Z node 11 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:1578: 72075186224037889 Execute read# 1, request: { ReadId: 1 TableId { OwnerId: 72057594046644480 TableId: 2 SchemaVersion: 1 } Columns: 1 Columns: 2 Snapshot { Step: 2000 TxId: 18446744073709551615 } ResultFormat: FORMAT_CELLVEC MaxRows: 999 MaxBytes: 5242880 Reverse: false TotalRowsLimit: 999 } 2025-09-25T16:18:09.559704Z node 11 :TX_DATASHARD TRACE: datashard.cpp:2489: PromoteImmediatePostExecuteEdges at 72075186224037889 promoting UnprotectedReadEdge to v2000/18446744073709551615 2025-09-25T16:18:09.559713Z node 11 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2163: 72075186224037889 Complete read# {[11:973:2773], 1} after executionsCount# 1 2025-09-25T16:18:09.559720Z node 11 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2137: 72075186224037889 read iterator# {[11:973:2773], 1} sends rowCount# 2, bytes# 64, quota rows left# 997, quota bytes left# 5242816, hasUnreadQueries# 0, total queries# 1, firstUnprocessed# 0 2025-09-25T16:18:09.559731Z node 11 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2188: 72075186224037889 read iterator# {[11:973:2773], 1} finished in read 2025-09-25T16:18:09.559738Z node 11 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1878: Execution status for [0:4] at 72075186224037889 is Executed 2025-09-25T16:18:09.559742Z node 11 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1926: Advance execution plan for [0:4] at 72075186224037889 executing on unit ExecuteRead 2025-09-25T16:18:09.559747Z node 11 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1932: Add [0:4] at 72075186224037889 to execution unit CompletedOperations 2025-09-25T16:18:09.559751Z node 11 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1843: Trying to execute [0:4] at 72075186224037889 on unit CompletedOperations 2025-09-25T16:18:09.559758Z node 11 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1878: Execution status for [0:4] at 72075186224037889 is Executed 2025-09-25T16:18:09.559762Z node 11 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1926: Advance execution plan for [0:4] at 72075186224037889 executing on unit CompletedOperations 2025-09-25T16:18:09.559766Z node 11 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1938: Execution plan for [0:4] at 72075186224037889 has finished 2025-09-25T16:18:09.559771Z node 11 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2687: TTxReadViaPipeline(69) Execute with status# Executed at tablet# 72075186224037889 2025-09-25T16:18:09.559783Z node 11 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2736: TTxReadViaPipeline(69) Complete: at tablet# 72075186224037889 2025-09-25T16:18:09.559898Z node 11 :TX_DATASHARD TRACE: datashard_impl.h:3144: StateWork, received event# 269553219, Sender [11:973:2773], Recipient [11:714:2588]: NKikimrTxDataShard.TEvReadCancel ReadId: 1 2025-09-25T16:18:09.559906Z node 11 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:3409: 72075186224037889 ReadCancel: { ReadId: 1 } { items { int32_value: 1 } items { int32_value: 1001 } }, { items { int32_value: 2 } items { int32_value: 1003 } }, { items { int32_value: 11 } items { int32_value: 1002 } }, { items { int32_value: 12 } items { int32_value: 1004 } } >> TCacheTest::TableSchemaVersion [GOOD] >> TCacheTest::Attributes >> KqpScan::SelfJoin3x [GOOD] >> KqpScan::SimpleWindow >> KqpScan::DecimalColumn ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/scan/unittest >> KqpSplit::StreamLookupRetryAttemptForFinishedRead [GOOD] Test command err: Trying to start YDB, gRPC: 13371, MsgBus: 19671 2025-09-25T16:18:07.227271Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7554061870454743903:2139];send_to=[0:7307199536658146131:7762515]; 2025-09-25T16:18:07.227410Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/endf/003f87/r3tmp/tmpwI4ZfO/pdisk_1.dat 2025-09-25T16:18:07.269315Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-09-25T16:18:07.281555Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded 2025-09-25T16:18:07.284705Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1229: Notification cookie mismatch for subscription [1:7554061870454743802:2081] 1758817087226101 != 1758817087226104 TServer::EnableGrpc on GrpcPort 13371, node 1 2025-09-25T16:18:07.296997Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-09-25T16:18:07.297013Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-09-25T16:18:07.297015Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-09-25T16:18:07.297062Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:19671 2025-09-25T16:18:07.331038Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-09-25T16:18:07.331073Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-09-25T16:18:07.332178Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:19671 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-09-25T16:18:07.354603Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-09-25T16:18:07.368251Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) waiting... 2025-09-25T16:18:07.387553Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) waiting... 2025-09-25T16:18:07.410750Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) waiting... 2025-09-25T16:18:07.423059Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) waiting... 2025-09-25T16:18:07.557082Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-09-25T16:18:07.635939Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7554061870454745454:2391], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:07.635968Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:07.636046Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7554061870454745464:2392], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:07.636056Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:07.687299Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:18:07.695315Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:18:07.707496Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:18:07.723157Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:18:07.736371Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:18:07.750849Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:18:07.763942Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:18:07.777970Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:18:07.798766Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7554061870454746327:2474], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:07.798799Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:07.798880Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7554061870454746332:2477], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:07.798894Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7554061870454746333:2478], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:07.798902Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:07.799834Z node 1 :FLAT_TX_SCHEMESHARD ... lVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-09-25T16:18:09.062912Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-09-25T16:18:09.102686Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) waiting... 2025-09-25T16:18:09.112541Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) waiting... 2025-09-25T16:18:09.134147Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) waiting... 2025-09-25T16:18:09.145920Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) waiting... 2025-09-25T16:18:09.172363Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-09-25T16:18:09.345707Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7554061878857891736:2391], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:09.345730Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:09.345790Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7554061878857891746:2392], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:09.345804Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:09.358801Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:18:09.367657Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:18:09.382652Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:18:09.395533Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:18:09.409370Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:18:09.422883Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:18:09.437739Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:18:09.451334Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:18:09.467086Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7554061878857892611:2474], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:09.467111Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:09.467133Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7554061878857892616:2477], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:09.467137Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7554061878857892618:2478], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:09.467144Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:09.467880Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-09-25T16:18:09.470276Z node 2 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7554061878857892620:2479], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2025-09-25T16:18:09.552321Z node 2 :TX_PROXY ERROR: schemereq.cpp:590: Actor# [2:7554061878857892672:3556] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 17], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-09-25T16:18:09.778863Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:18:09.855052Z node 2 :KQP_EXECUTER ERROR: kqp_planner.cpp:123: TxId: 281474976710674. Ctx: { TraceId: 01k60tny7wfqzp1mcwfkhc5b36, Database: , SessionId: ydb://session/3?node_id=2&id=ZWQwMjFiNzUtYjY5M2NiNzItOGU2ZDI0MWEtNTgwZmExODU=, PoolId: default, DatabaseId: /Root}. Database not set, use /Root 2025-09-25T16:18:09.857750Z node 2 :KQP_EXECUTER ERROR: kqp_planner.cpp:123: TxId: 281474976710675. Ctx: { TraceId: 01k60tny7wfqzp1mcwfkhc5b36, Database: , SessionId: ydb://session/3?node_id=2&id=ZWQwMjFiNzUtYjY5M2NiNzItOGU2ZDI0MWEtNTgwZmExODU=, PoolId: default, DatabaseId: /Root}. Database not set, use /Root 2025-09-25T16:18:09.931901Z node 2 :KQP_EXECUTER ERROR: kqp_planner.cpp:123: TxId: 281474976710677. Ctx: { TraceId: 01k60tnya6bja0k5efnh5ts2rb, Database: , SessionId: ydb://session/3?node_id=2&id=OGYwOTU2NTYtNmI5YjZmNmEtMWI3Y2JkNmMtMjdlM2M5Nw==, PoolId: default, DatabaseId: /Root}. Database not set, use /Root captured evread ----------------------------------------------------------- 2025-09-25T16:18:09.934138Z node 2 :KQP_RESOURCE_MANAGER WARN: kqp_snapshot_manager.cpp:238: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1758817089974, txId: 281474976710676] shutting down >> THiveTest::TestManyFollowersOnOneNode [GOOD] >> THiveTest::TestNotEnoughResources >> TCacheTest::Attributes [GOOD] >> TCacheTest::CheckAccess ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/scheme_board/ut_cache/unittest >> TCacheTest::RacyCreateAndSync [GOOD] Test command err: 2025-09-25T16:18:09.863479Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7729: Cannot subscribe to console configs 2025-09-25T16:18:09.863503Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded TestModificationResults wait txId: 1 2025-09-25T16:18:09.879898Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 FAKE_COORDINATOR: Erasing txId 1 TestModificationResult got TxId: 1, wait until txId: 1 TestModificationResults wait txId: 101 FAKE_COORDINATOR: Add transaction: 101 at step: 5000002 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000001 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 101 at step: 5000002 FAKE_COORDINATOR: Erasing txId 101 TestModificationResult got TxId: 101, wait until txId: 101 TestWaitNotification wait txId: 101 2025-09-25T16:18:09.882392Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 101, at schemeshard: 72057594046678944 TestWaitNotification: OK eventTxId 101 TestModificationResults wait txId: 102 2025-09-25T16:18:09.906158Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpRmDir, opId: 102:0, at schemeshard: 72057594046678944, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_rmdir.cpp:66) FAKE_COORDINATOR: Add transaction: 102 at step: 5000003 FAKE_COORDINATOR: advance: minStep5000003 State->FrontStep: 5000002 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 102 at step: 5000003 FAKE_COORDINATOR: Erasing txId 102 TestModificationResult got TxId: 102, wait until txId: 102 TestWaitNotification wait txId: 102 2025-09-25T16:18:09.911698Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 102, at schemeshard: 72057594046678944 TestWaitNotification: OK eventTxId 102 TestModificationResults wait txId: 103 FAKE_COORDINATOR: Add transaction: 103 at step: 5000004 FAKE_COORDINATOR: advance: minStep5000004 State->FrontStep: 5000003 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 103 at step: 5000004 FAKE_COORDINATOR: Erasing txId 103 TestModificationResult got TxId: 103, wait until txId: 103 TestWaitNotification wait txId: 103 2025-09-25T16:18:09.923940Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 103, at schemeshard: 72057594046678944 TestWaitNotification: OK eventTxId 103 2025-09-25T16:18:10.115499Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7729: Cannot subscribe to console configs 2025-09-25T16:18:10.115527Z node 2 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded TestModificationResults wait txId: 1 2025-09-25T16:18:10.131290Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 FAKE_COORDINATOR: Erasing txId 1 TestModificationResult got TxId: 1, wait until txId: 1 TestModificationResults wait txId: 101 FAKE_COORDINATOR: Add transaction: 101 at step: 5000002 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000001 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 101 at step: 5000002 FAKE_COORDINATOR: Erasing txId 101 TestModificationResult got TxId: 101, wait until txId: 101 TestWaitNotification wait txId: 101 2025-09-25T16:18:10.133668Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 101, at schemeshard: 72057594046678944 TestWaitNotification: OK eventTxId 101 ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/datashard/ut_kqp_scan/unittest >> KqpScan::ScanPg [GOOD] Test command err: 2025-09-25T16:18:00.210415Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:123: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2025-09-25T16:18:00.210528Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:123: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2025-09-25T16:18:00.273833Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2025-09-25T16:18:00.276123Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2025-09-25T16:18:00.276529Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:678:2403], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-09-25T16:18:00.276608Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-09-25T16:18:00.276638Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-09-25T16:18:00.277083Z node 2 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [2:674:2344], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-09-25T16:18:00.277138Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-09-25T16:18:00.277164Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/endf/003c82/r3tmp/tmpae39sZ/pdisk_1.dat 2025-09-25T16:18:00.715148Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded 2025-09-25T16:18:00.754839Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-09-25T16:18:00.754878Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-09-25T16:18:00.755039Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-09-25T16:18:00.755054Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-09-25T16:18:00.804399Z node 1 :HIVE WARN: hive_impl.cpp:811: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2025-09-25T16:18:00.804559Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-09-25T16:18:00.804701Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-09-25T16:18:00.909419Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2025-09-25T16:18:00.997254Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-09-25T16:18:01.009443Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-09-25T16:18:01.248181Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:314: actor# [1:221:2181] Handle TEvProposeTransaction 2025-09-25T16:18:01.248203Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:237: actor# [1:221:2181] TxId# 281474976710657 ProcessProposeTransaction 2025-09-25T16:18:01.248244Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:256: actor# [1:221:2181] Cookie# 0 userReqId# "" txid# 281474976710657 SEND to# [1:1250:2742] 2025-09-25T16:18:01.284582Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1673: Actor# [1:1250:2742] txid# 281474976710657 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/Root" OperationType: ESchemeOpCreateTable CreateTable { Name: "table-1" Columns { Name: "key" Type: "Uint32" FamilyName: "" NotNull: false } Columns { Name: "value" Type: "Uint32" FamilyName: "" NotNull: false } KeyColumnNames: "key" UniformPartitionsCount: 1 } } } ExecTimeoutPeriod: 18446744073709551615 2025-09-25T16:18:01.284627Z node 1 :TX_PROXY DEBUG: schemereq.cpp:613: Actor# [1:1250:2742] txid# 281474976710657 Bootstrap, UserSID: CheckAdministrator: 0 CheckDatabaseAdministrator: 0 2025-09-25T16:18:01.285079Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1738: Actor# [1:1250:2742] txid# 281474976710657 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P6 2025-09-25T16:18:01.285107Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1728: Actor# [1:1250:2742] txid# 281474976710657 TEvNavigateKeySet requested from SchemeCache 2025-09-25T16:18:01.285228Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1561: Actor# [1:1250:2742] txid# 281474976710657 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2025-09-25T16:18:01.285369Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1608: Actor# [1:1250:2742] HANDLE EvNavigateKeySetResult, txid# 281474976710657 shardToRequest# 72057594046644480 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 1] DomainInfo.Params# Version: 1 PlanResolution: 500 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 RedirectRequired# true 2025-09-25T16:18:01.285388Z node 1 :TX_PROXY DEBUG: schemereq.cpp:103: Actor# [1:1250:2742] txid# 281474976710657 SEND to# 72057594046644480 shardToRequest {TEvModifySchemeTransaction txid# 281474976710657 TabletId# 72057594046644480} 2025-09-25T16:18:01.285478Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1463: Actor# [1:1250:2742] txid# 281474976710657 HANDLE EvClientConnected 2025-09-25T16:18:01.286104Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:18:01.287806Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1485: Actor# [1:1250:2742] txid# 281474976710657 Status StatusAccepted HANDLE {TEvModifySchemeTransactionResult Status# StatusAccepted txid# 281474976710657} 2025-09-25T16:18:01.287829Z node 1 :TX_PROXY DEBUG: schemereq.cpp:593: Actor# [1:1250:2742] txid# 281474976710657 SEND to# [1:1134:2696] Source {TEvProposeTransactionStatus txid# 281474976710657 Status# 53} 2025-09-25T16:18:01.367033Z node 2 :TX_DATASHARD INFO: datashard.cpp:375: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [2:1301:2386] 2025-09-25T16:18:01.367155Z node 2 :TX_DATASHARD DEBUG: datashard__init.cpp:661: TxInitSchema.Execute 2025-09-25T16:18:01.401653Z node 2 :TX_DATASHARD DEBUG: datashard__init.cpp:727: TxInitSchema.Complete 2025-09-25T16:18:01.411472Z node 2 :TX_DATASHARD DEBUG: datashard__init.cpp:50: TDataShard::TTxInit::Execute 2025-09-25T16:18:01.411735Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:1325: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2025-09-25T16:18:01.411749Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:1381: LoadLockChangeRecords at tablet: 72075186224037888 2025-09-25T16:18:01.411760Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:1430: LoadChangeRecordCommits at tablet: 72075186224037888 2025-09-25T16:18:01.411845Z node 2 :TX_DATASHARD DEBUG: datashard__init.cpp:94: TDataShard::TTxInit::Complete 2025-09-25T16:18:01.412027Z node 2 :TX_DATASHARD DEBUG: datashard__init.cpp:102: TDataShard::TTxInitRestored::Execute 2025-09-25T16:18:01.412051Z node 2 :TX_DATASHARD DEBUG: datashard__init.cpp:142: DataShard 72075186224037888 persisting started state actor id [2:1325:2386] in generation 1 2025-09-25T16:18:01.438156Z node 2 :TX_DATASHARD DEBUG: datashard__init.cpp:151: TDataShard::TTxInitRestored::Complete 2025-09-25T16:18:01.471597Z node 2 :TX_DATASHARD INFO: datashard.cpp:419: Switched to work state WaitScheme tabletId 72075186224037888 2025-09-25T16:18:01.471704Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:459: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2025-09-25T16:18:01.471736Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:1260: Change sender created: at tablet: 72075186224037888, actorId: [2:1328:2403] 2025-09-25T16:18:01.471743Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:1265: Trying to activate change sender: at tablet: 72075186224037888 2025-09-25T16:18:01.471749Z node 2 :TX_DATASHARD INFO: datashard.cpp:1282: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2025-09-25T16:18:01.471755Z node 2 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-09-25T16:18:01.471993Z node 2 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:96: TTxCheckInReadSets::Execute at 72075186224037888 2025-09-25T16:18:01.472020Z node 2 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:139: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2025-09-25T16:18:01.472039Z node 2 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037888 2025-09-25T16:18:01.472048Z node 2 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2025-09-25T16:18:01.472058Z node 2 :TX_DATASHARD INFO: datashard__progress_tx.cpp:48: No tx to execute at 72075186224037888 TxInFly 0 2025-09-25T16:18:01.472065Z node 2 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2025-09-25T16:18:01.472084Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:3723: Server connected at leader tablet# 72075186224037888, clientId# [1:1285:2772], serverId# [2:1298:2384], sessionId# [0:0:0] 2025-09-25T16:18:01.472247Z node 2 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 72075186224037888 2025-09-25T16:18:01.472312Z node 2 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:133: Propose scheme transaction at tablet 72075186224037888 txId 281474976710657 ssId 72057594046644480 seqNo 2:1 2025-09-25T16:18:01.472339Z node 2 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:221: Prepared scheme transaction txId 281474976710657 at tablet 72075186224037888 2025-09-25T16:18:01.473220Z node 2 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:129: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-09-25T16:18:01.485864Z node 2 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:152: TTxProposeTransactionBase::Complete at 72075186224037888 2025-09-25T16:18:01.485915Z node 2 :TX_ ... m compute actor: [3:1657:2972], task: 1, state: COMPUTE_STATE_FINISHED, stats: { CpuTimeUs: 413 Tasks { TaskId: 1 CpuTimeUs: 147 FinishTimeMs: 1758817086654 OutputRows: 1 OutputBytes: 6 ResultRows: 1 ResultBytes: 6 ComputeCpuTimeUs: 6 BuildCpuTimeUs: 141 HostName: "ghrun-v6cxduzo2m" NodeId: 3 CreateTimeMs: 1758817086654 UpdateTimeMs: 1758817086654 } MaxMemoryUsage: 1048576 } 2025-09-25T16:18:06.655003Z node 3 :KQP_EXECUTER INFO: kqp_planner.cpp:721: TxId: 281474976710664. Ctx: { TraceId: 01k60tntrr6de1yr0agnw91qmc, Database: , SessionId: ydb://session/3?node_id=3&id=ODU4ZTg1NDgtNDBlYjhlNTYtYjBjOTk5ZDktNmY0YTU1Mzg=, PoolId: default, DatabaseId: /Root}. Compute actor has finished execution: [3:1657:2972] 2025-09-25T16:18:06.655025Z node 3 :KQP_EXECUTER DEBUG: kqp_executer_impl.h:1208: ActorId: [3:1654:2933] TxId: 281474976710664. Ctx: { TraceId: 01k60tntrr6de1yr0agnw91qmc, Database: , SessionId: ydb://session/3?node_id=3&id=ODU4ZTg1NDgtNDBlYjhlNTYtYjBjOTk5ZDktNmY0YTU1Mzg=, PoolId: default, DatabaseId: /Root}. terminate execution. 2025-09-25T16:18:06.655031Z node 3 :KQP_EXECUTER TRACE: kqp_executer_impl.h:1222: ActorId: [3:1654:2933] TxId: 281474976710664. Ctx: { TraceId: 01k60tntrr6de1yr0agnw91qmc, Database: , SessionId: ydb://session/3?node_id=3&id=ODU4ZTg1NDgtNDBlYjhlNTYtYjBjOTk5ZDktNmY0YTU1Mzg=, PoolId: default, DatabaseId: /Root}. Terminate, become ZombieState 2025-09-25T16:18:06.655037Z node 3 :KQP_EXECUTER DEBUG: kqp_executer_impl.h:900: ActorId: [3:1654:2933] TxId: 281474976710664. Ctx: { TraceId: 01k60tntrr6de1yr0agnw91qmc, Database: , SessionId: ydb://session/3?node_id=3&id=ODU4ZTg1NDgtNDBlYjhlNTYtYjBjOTk5ZDktNmY0YTU1Mzg=, PoolId: default, DatabaseId: /Root}. Resource usage for last stat interval: ComputeTime: 0.000413s ReadRows: 0 ReadBytes: 0 ru: 1 rate limiter was not found force flag: 1 2025-09-25T16:18:06.655279Z node 3 :KQP_RESOURCE_MANAGER WARN: kqp_snapshot_manager.cpp:238: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 2000, txId: 281474976710661] shutting down 2025-09-25T16:18:06.655302Z node 3 :TX_PROXY DEBUG: proxy_impl.cpp:314: actor# [3:222:2181] Handle TEvProposeTransaction 2025-09-25T16:18:06.655309Z node 3 :TX_PROXY DEBUG: proxy_impl.cpp:237: actor# [3:222:2181] TxId# 0 ProcessProposeTransaction 2025-09-25T16:18:06.655329Z node 3 :TX_PROXY DEBUG: proxy_impl.cpp:288: actor# [3:222:2181] Cookie# 0 userReqId# "" txid# 0 reqId# [3:1659:2973] SnapshotReq marker# P0 2025-09-25T16:18:06.655480Z node 3 :TX_PROXY DEBUG: resolvereq.cpp:152: Actor# [3:1661:2973] txid# 0 HANDLE EvNavigateKeySetResult TResolveTablesActor marker# P1 ErrorCount# 0 2025-09-25T16:18:06.655718Z node 3 :TX_PROXY DEBUG: resolvereq.cpp:272: Actor# [3:1661:2973] txid# 0 HANDLE EvResolveKeySetResult TResolveTablesActor marker# P2 ErrorCount# 0 2025-09-25T16:18:06.655752Z node 3 :TX_PROXY DEBUG: snapshotreq.cpp:1451: Actor# [3:1659:2973] SEND TEvDiscardVolatileSnapshotRequest to datashard 72075186224037888 marker# P3 2025-09-25T16:18:07.963677Z node 6 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:123: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2025-09-25T16:18:07.963807Z node 5 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:123: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2025-09-25T16:18:07.966351Z node 5 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2025-09-25T16:18:07.966545Z node 6 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2025-09-25T16:18:07.966830Z node 5 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [5:679:2403], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-09-25T16:18:07.966882Z node 5 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-09-25T16:18:07.966901Z node 5 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-09-25T16:18:07.967123Z node 6 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [6:675:2344], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-09-25T16:18:07.967148Z node 6 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-09-25T16:18:07.967160Z node 6 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/endf/003c82/r3tmp/tmpohWNZG/pdisk_1.dat 2025-09-25T16:18:08.069636Z node 5 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded 2025-09-25T16:18:08.105676Z node 5 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-09-25T16:18:08.105716Z node 5 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-09-25T16:18:08.105813Z node 5 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-09-25T16:18:08.105825Z node 5 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-09-25T16:18:08.137542Z node 5 :HIVE WARN: hive_impl.cpp:811: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 6 Cookie 6 2025-09-25T16:18:08.137812Z node 5 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-09-25T16:18:08.137916Z node 5 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-09-25T16:18:08.189719Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2025-09-25T16:18:08.214569Z node 6 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-09-25T16:18:08.267914Z node 5 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-09-25T16:18:08.490688Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:18:08.924412Z node 5 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [5:1409:2826], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:08.924446Z node 5 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [5:1420:2831], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:08.924465Z node 5 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:08.924691Z node 5 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [5:1425:2836], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:08.924756Z node 5 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:08.925804Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-09-25T16:18:09.024726Z node 6 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-09-25T16:18:09.024772Z node 5 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-09-25T16:18:09.301018Z node 5 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [5:1423:2834], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2025-09-25T16:18:09.366727Z node 5 :TX_PROXY ERROR: schemereq.cpp:590: Actor# [5:1547:2904] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-09-25T16:18:09.449392Z node 5 :KQP_EXECUTER ERROR: kqp_planner.cpp:123: TxId: 281474976710660. Ctx: { TraceId: 01k60tnxcw4ph9pwa0xdbt3982, Database: , SessionId: ydb://session/3?node_id=5&id=YTkzMjhmMjAtM2M1YzhlZC1mOGU5NzVlMC03YzJmZDdjNQ==, PoolId: default, DatabaseId: /Root}. Database not set, use /Root 2025-09-25T16:18:09.733155Z node 5 :KQP_EXECUTER ERROR: kqp_planner.cpp:123: TxId: 281474976710662. Ctx: { TraceId: 01k60tnxxs2vnhv16tvxxnme42, Database: , SessionId: ydb://session/3?node_id=5&id=ZGRiNjRhYjQtOGY3ZjJiZTQtYjY2YmNlNjAtNjE0MzI4NGM=, PoolId: default, DatabaseId: /Root}. Database not set, use /Root 2025-09-25T16:18:09.857160Z node 5 :KQP_EXECUTER ERROR: kqp_planner.cpp:123: TxId: 281474976710663. Ctx: { TraceId: 01k60tnxxs2vnhv16tvxxnme42, Database: , SessionId: ydb://session/3?node_id=5&id=ZGRiNjRhYjQtOGY3ZjJiZTQtYjY2YmNlNjAtNjE0MzI4NGM=, PoolId: default, DatabaseId: /Root}. Database not set, use /Root 2025-09-25T16:18:09.858068Z node 5 :KQP_RESOURCE_MANAGER WARN: kqp_snapshot_manager.cpp:238: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 2000, txId: 281474976710661] shutting down >> THiveTest::TestUpdateTabletsObjectUpdatesMetrics [GOOD] >> TScaleRecommenderTest::BasicTest >> THiveTest::TestHiveRestart [GOOD] >> THiveTest::TestLimitedNodeList ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/scheme_board/ut_cache/unittest >> TCacheTest::TableSchemaVersion [GOOD] Test command err: 2025-09-25T16:18:10.206631Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7729: Cannot subscribe to console configs 2025-09-25T16:18:10.206653Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded TestModificationResults wait txId: 1 2025-09-25T16:18:10.225149Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 FAKE_COORDINATOR: Erasing txId 1 TestModificationResult got TxId: 1, wait until txId: 1 2025-09-25T16:18:10.421335Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7729: Cannot subscribe to console configs 2025-09-25T16:18:10.421365Z node 2 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded TestModificationResults wait txId: 1 2025-09-25T16:18:10.439655Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 FAKE_COORDINATOR: Erasing txId 1 TestModificationResult got TxId: 1, wait until txId: 1 TestModificationResults wait txId: 101 2025-09-25T16:18:10.450042Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 101:0, at schemeshard: 72057594046678944, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) FAKEHIVE 72057594037968897 TEvCreateTablet Owner: 72057594046678944 OwnerIdx: 1 TabletType: DataShard ObjectDomain { SchemeShard: 72057594046678944 PathId: 1 } ObjectId: 2 BindedChannels { StoragePoolName: "pool-1" StoragePoolKind: "pool-kind-1" } BindedChannels { StoragePoolName: "pool-1" StoragePoolKind: "pool-kind-1" } AllowedDomains { SchemeShard: 72057594046678944 PathId: 1 } TestModificationResult got TxId: 101, wait until txId: 101 TestWaitNotification wait txId: 101 FAKE_COORDINATOR: Add transaction: 101 at step: 5000002 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000001 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 101 at step: 5000002 FAKE_COORDINATOR: Send Plan to tablet 72075186233409546 for txId: 101 at step: 5000002 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Erasing txId 101 TestWaitNotification: OK eventTxId 101 TestModificationResults wait txId: 102 2025-09-25T16:18:10.508159Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterTable, opId: 102:0, at schemeshard: 72057594046678944, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_table.cpp:172) TestModificationResult got TxId: 102, wait until txId: 102 TestWaitNotification wait txId: 102 FAKE_COORDINATOR: Add transaction: 102 at step: 5000003 FAKE_COORDINATOR: advance: minStep5000003 State->FrontStep: 5000002 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 102 at step: 5000003 FAKE_COORDINATOR: Send Plan to tablet 72075186233409546 for txId: 102 at step: 5000003 FAKE_COORDINATOR: advance: minStep5000003 State->FrontStep: 5000003 FAKE_COORDINATOR: Erasing txId 102 TestWaitNotification: OK eventTxId 102 >> KqpSplit::AfterResult+Ascending >> KqpSplit::StreamLookupDeliveryProblem >> TCacheTest::CheckAccess [GOOD] >> TCacheTest::CheckSystemViewAccess >> test.py::test[produce-process_with_assume--ForceBlocks] [GOOD] >> test.py::test[produce-process_with_assume--Results] >> THiveTest::TestHiveBalancerWithPrefferedDC1 [GOOD] >> THiveTest::TestHiveBalancerWithPrefferedDC2 >> KqpScan::PrunePartitionsByLiteral >> test.py::test[type_v3-mergejoin_with_sort--Results] [GOOD] >> KqpSplit::AfterResultMultiRangeSegmentPartition+Unspecified >> PgCatalog::PgType [GOOD] >> PgCatalog::InformationSchema >> TCacheTest::CheckSystemViewAccess [GOOD] >> KqpScan::IsNullPartial >> test.py::test[join-lookupjoin_inner_1o--Results] [GOOD] >> test.py::test[join-lookupjoin_semi_2o-off-ForceBlocks] >> TScaleRecommenderTest::BasicTest [GOOD] >> TStorageBalanceTest::TestScenario1 >> THiveTest::TestFollowers [GOOD] >> KqpScan::ScanAfterSplitSlowMetaRead [GOOD] >> THiveTest::TestFollowersReconfiguration >> test.py::test[aggr_factory-top_by-default.txt-Results] [GOOD] >> test.py::test[aggr_factory-udaf-default.txt-Results] >> KqpScan::TopSortOverSecondaryIndexRead [GOOD] |81.2%| [TM] {default-linux-x86_64, pic, relwithdebinfo} ydb/library/yql/tests/sql/dq_file/part3/pytest >> test.py::test[window-win_func_over_group_by--Results] [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/scheme_board/ut_cache/unittest >> TCacheTest::CheckSystemViewAccess [GOOD] Test command err: 2025-09-25T16:18:10.755104Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7729: Cannot subscribe to console configs 2025-09-25T16:18:10.755135Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded TestModificationResults wait txId: 1 2025-09-25T16:18:10.783050Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 FAKE_COORDINATOR: Erasing txId 1 TestModificationResult got TxId: 1, wait until txId: 1 TestModificationResults wait txId: 101 FAKE_COORDINATOR: Add transaction: 101 at step: 5000002 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000001 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 101 at step: 5000002 FAKE_COORDINATOR: Erasing txId 101 TestModificationResult got TxId: 101, wait until txId: 101 TestWaitNotification wait txId: 101 2025-09-25T16:18:10.788946Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 101, at schemeshard: 72057594046678944 TestWaitNotification: OK eventTxId 101 2025-09-25T16:18:10.983053Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7729: Cannot subscribe to console configs 2025-09-25T16:18:10.983083Z node 2 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded TestModificationResults wait txId: 1 2025-09-25T16:18:10.998105Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 FAKE_COORDINATOR: Erasing txId 1 TestModificationResult got TxId: 1, wait until txId: 1 TestModificationResults wait txId: 101 FAKE_COORDINATOR: Add transaction: 101 at step: 5000002 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000001 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 101 at step: 5000002 FAKE_COORDINATOR: Erasing txId 101 TestModificationResult got TxId: 101, wait until txId: 101 TestModificationResults wait txId: 102 2025-09-25T16:18:10.999535Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 102:0, at schemeshard: 72057594046678944, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_modify_acl.cpp:101) TestModificationResult got TxId: 102, wait until txId: 102 2025-09-25T16:18:11.000009Z node 2 :TX_PROXY_SCHEME_CACHE WARN: cache.cpp:323: Access denied: self# [2:200:2191], for# user1@builtin, access# DescribeSchema 2025-09-25T16:18:11.000066Z node 2 :TX_PROXY_SCHEME_CACHE WARN: cache.cpp:323: Access denied: self# [2:204:2195], for# user1@builtin, access# DescribeSchema 2025-09-25T16:18:11.227260Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7729: Cannot subscribe to console configs 2025-09-25T16:18:11.227292Z node 3 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded TestModificationResults wait txId: 1 2025-09-25T16:18:11.242737Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 FAKE_COORDINATOR: Erasing txId 1 TestModificationResult got TxId: 1, wait until txId: 1 TestModificationResults wait txId: 101 2025-09-25T16:18:11.243794Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateSubDomain, opId: 101:0, at schemeshard: 72057594046678944, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_subdomain.cpp:259) FAKE_COORDINATOR: Add transaction: 101 at step: 5000002 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000001 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 101 at step: 5000002 FAKE_COORDINATOR: Erasing txId 101 TestModificationResult got TxId: 101, wait until txId: 101 TestWaitNotification wait txId: 101 2025-09-25T16:18:11.245195Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 101, at schemeshard: 72057594046678944 TestWaitNotification: OK eventTxId 101 TestModificationResults wait txId: 102 2025-09-25T16:18:11.245296Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 102:0, at schemeshard: 72057594046678944, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_modify_acl.cpp:101) TestModificationResult got TxId: 102, wait until txId: 102 2025-09-25T16:18:11.246503Z node 3 :TX_PROXY_SCHEME_CACHE WARN: cache.cpp:323: Access denied: self# [3:215:2200], for# user1@builtin, access# DescribeSchema 2025-09-25T16:18:11.246596Z node 3 :TX_PROXY_SCHEME_CACHE WARN: cache.cpp:323: Access denied: self# [3:221:2206], for# user1@builtin, access# >> THiveTest::TestLimitedNodeList [GOOD] >> THiveTest::TestHiveFollowersWithChangingDC >> KqpScan::SimpleWindow [GOOD] >> KqpScan::UnionMixed >> KqpScan::StreamLookupFailedRead [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tablet_flat/ut_large/unittest >> TFlatTableLongTxLarge::LargeDeltaChain [GOOD] Test command err: DataBytes = 1073746235 DataPages = 150237 FlatIndexBytes = 3155010 BTreeIndexBytes = 6810652 DataBytes = 1073742150 DataPages = 151523 FlatIndexBytes = 22252537 BTreeIndexBytes = 25927778 DataBytes = 1073753117 DataPages = 148879 FlatIndexBytes = 1072403884 BTreeIndexBytes = 1077128646 DataBytes = 1073744451 DataPages = 150676 FlatIndexBytes = 6479123 BTreeIndexBytes = 7437771 DataBytes = 1073743502 DataPages = 47351 FlatIndexBytes = 1643820 BTreeIndexBytes = 2065359 DataBytes = 1073744719 DataPages = 70000 FlatIndexBytes = 3454718 BTreeIndexBytes = 3553208 00000.000 II| FAKE_ENV: Born at 2025-09-25T16:17:26.915380Z 00000.002 NN| TABLET_SAUSAGECACHE: Bootstrap with config MemoryLimit: 8388608 ScanQueueInFlyLimit: 262144 AsyncQueueInFlyLimit: 262144 00000.002 II| FAKE_ENV: Starting storage for BS group 0 00000.002 II| FAKE_ENV: Starting storage for BS group 1 00000.002 II| FAKE_ENV: Starting storage for BS group 2 00000.002 II| FAKE_ENV: Starting storage for BS group 3 ...compacting ...waiting until compacted ...compacting ...waiting until compacted 00042.967 II| FAKE_ENV: Model starts hard shutdown on level 7 of 8, left 3 actors 00042.971 NN| TABLET_SAUSAGECACHE: Poison cache serviced 264 reqs hit {0 0b} miss {266 13194267274b} 00042.971 II| FAKE_ENV: Shut order, stopping 4 BS groups 00042.971 II| FAKE_ENV: DS.0 gone, left {15399b, 2}, put {222290b, 1558} 00042.971 II| FAKE_ENV: DS.1 gone, left {4398138657b, 535}, put {8813752974b, 2851} 00042.972 II| FAKE_ENV: DS.2 gone, left {0b, 0}, put {0b, 0} 00042.972 II| FAKE_ENV: DS.3 gone, left {0b, 0}, put {0b, 0} 00042.972 II| FAKE_ENV: All BS storage groups are stopped 00042.972 II| FAKE_ENV: Model stopped, hosted 3 actors, spent 0.000s 00042.977 II| FAKE_ENV: Logged {Emerg 0 Alert 0 Crit 0 Error 0 Left 15}, stopped ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/datashard/ut_kqp_scan/unittest >> KqpScan::ScanAfterSplitSlowMetaRead [GOOD] Test command err: 2025-09-25T16:17:59.736982Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:123: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2025-09-25T16:17:59.737114Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:123: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2025-09-25T16:17:59.789195Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2025-09-25T16:17:59.792719Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2025-09-25T16:17:59.793007Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:678:2403], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-09-25T16:17:59.793211Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-09-25T16:17:59.793284Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-09-25T16:17:59.793480Z node 2 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [2:674:2344], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-09-25T16:17:59.793541Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-09-25T16:17:59.793575Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/endf/003ca3/r3tmp/tmpojFz64/pdisk_1.dat 2025-09-25T16:17:59.997898Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded 2025-09-25T16:18:00.035091Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-09-25T16:18:00.035135Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-09-25T16:18:00.035349Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-09-25T16:18:00.035368Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-09-25T16:18:00.067992Z node 1 :HIVE WARN: hive_impl.cpp:811: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2025-09-25T16:18:00.068214Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-09-25T16:18:00.068342Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-09-25T16:18:00.147942Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2025-09-25T16:18:00.197971Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-09-25T16:18:00.209881Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-09-25T16:18:00.506942Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:314: actor# [1:221:2181] Handle TEvProposeTransaction 2025-09-25T16:18:00.506973Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:237: actor# [1:221:2181] TxId# 281474976710657 ProcessProposeTransaction 2025-09-25T16:18:00.507004Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:256: actor# [1:221:2181] Cookie# 0 userReqId# "" txid# 281474976710657 SEND to# [1:1247:2739] 2025-09-25T16:18:00.525945Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1673: Actor# [1:1247:2739] txid# 281474976710657 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/Root" OperationType: ESchemeOpCreateTable CreateTable { Name: "table-1" Columns { Name: "key" Type: "Uint32" FamilyName: "" NotNull: false } Columns { Name: "value" Type: "Uint32" FamilyName: "" NotNull: false } KeyColumnNames: "key" UniformPartitionsCount: 7 } } } ExecTimeoutPeriod: 18446744073709551615 2025-09-25T16:18:00.526003Z node 1 :TX_PROXY DEBUG: schemereq.cpp:613: Actor# [1:1247:2739] txid# 281474976710657 Bootstrap, UserSID: CheckAdministrator: 0 CheckDatabaseAdministrator: 0 2025-09-25T16:18:00.526346Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1738: Actor# [1:1247:2739] txid# 281474976710657 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P6 2025-09-25T16:18:00.526365Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1728: Actor# [1:1247:2739] txid# 281474976710657 TEvNavigateKeySet requested from SchemeCache 2025-09-25T16:18:00.526477Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1561: Actor# [1:1247:2739] txid# 281474976710657 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2025-09-25T16:18:00.526535Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1608: Actor# [1:1247:2739] HANDLE EvNavigateKeySetResult, txid# 281474976710657 shardToRequest# 72057594046644480 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 1] DomainInfo.Params# Version: 1 PlanResolution: 500 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 RedirectRequired# true 2025-09-25T16:18:00.526554Z node 1 :TX_PROXY DEBUG: schemereq.cpp:103: Actor# [1:1247:2739] txid# 281474976710657 SEND to# 72057594046644480 shardToRequest {TEvModifySchemeTransaction txid# 281474976710657 TabletId# 72057594046644480} 2025-09-25T16:18:00.527262Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:18:00.527426Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1463: Actor# [1:1247:2739] txid# 281474976710657 HANDLE EvClientConnected 2025-09-25T16:18:00.528358Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1485: Actor# [1:1247:2739] txid# 281474976710657 Status StatusAccepted HANDLE {TEvModifySchemeTransactionResult Status# StatusAccepted txid# 281474976710657} 2025-09-25T16:18:00.528377Z node 1 :TX_PROXY DEBUG: schemereq.cpp:593: Actor# [1:1247:2739] txid# 281474976710657 SEND to# [1:1130:2692] Source {TEvProposeTransactionStatus txid# 281474976710657 Status# 53} 2025-09-25T16:18:00.589298Z node 1 :TX_DATASHARD INFO: datashard.cpp:375: TDataShard::OnActivateExecutor: tablet 72075186224037889 actor [1:1324:2796] 2025-09-25T16:18:00.589409Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:661: TxInitSchema.Execute 2025-09-25T16:18:00.602091Z node 1 :TX_DATASHARD INFO: datashard.cpp:375: TDataShard::OnActivateExecutor: tablet 72075186224037894 actor [1:1326:2797] 2025-09-25T16:18:00.602184Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:661: TxInitSchema.Execute 2025-09-25T16:18:00.606394Z node 1 :TX_DATASHARD INFO: datashard.cpp:375: TDataShard::OnActivateExecutor: tablet 72075186224037892 actor [1:1331:2800] 2025-09-25T16:18:00.606510Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:661: TxInitSchema.Execute 2025-09-25T16:18:00.608360Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:727: TxInitSchema.Complete 2025-09-25T16:18:00.608424Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:50: TDataShard::TTxInit::Execute 2025-09-25T16:18:00.608638Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1325: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037889 2025-09-25T16:18:00.608650Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1381: LoadLockChangeRecords at tablet: 72075186224037889 2025-09-25T16:18:00.608659Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1430: LoadChangeRecordCommits at tablet: 72075186224037889 2025-09-25T16:18:00.608737Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:94: TDataShard::TTxInit::Complete 2025-09-25T16:18:00.608988Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:102: TDataShard::TTxInitRestored::Execute 2025-09-25T16:18:00.609006Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:142: DataShard 72075186224037889 persisting started state actor id [1:1429:2796] in generation 1 2025-09-25T16:18:00.613268Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:727: TxInitSchema.Complete 2025-09-25T16:18:00.613571Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:50: TDataShard::TTxInit::Execute 2025-09-25T16:18:00.613770Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1325: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037894 2025-09-25T16:18:00.613782Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1381: LoadLockChangeRecords at tablet: 72075186224037894 2025-09-25T16:18:00.613790Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1430: LoadChangeRecordCommits at tablet: 72075186224037894 2025-09-25T16:18:00.613853Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:94: TDataShard::TTxInit::Complete 2025-09-25T16:18:00.614042Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:102: TDataShard::TTxInitRestored::Execute 2025-09-25T16:18:00.614059Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:142: DataShard 72075186224037894 persisting started state actor id [1:1442:2797] in generation 1 2025-09-25T16:18:00.615197Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:727: TxInitSchema.Complete 2025-09-25T16:18:00.615256Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:50: TDataShard::TTxInit::Execute 2025-09-25T16:18:00.615418Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1325: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037892 2025-09-25T16:18:00.615428Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1381: LoadLockChangeRecords at tablet: 72075186224037892 2025-09-25T16:18:00.615436Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1430: LoadChangeRecordCommits at tablet: 72075186224037892 2025-09-25T16:18:00.615486Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:94: TDataShard::TTxInit::Complete 2025-09-25T16:18:00.615504Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:102: TDataShard::TTxInitRestored::Execute 2025-09-25T16:18:00.615516Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:142: DataShard 72075186224037892 persisting started state actor id [1:1449:2800] in generation 1 2025-09-25T16:18:00.620277Z node 2 :TX_DATASHARD INFO: datashard.cpp:375: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [2:1417:2398] 2025-09-25T16:18:00.620349Z node 2 :TX_DATASHARD DEBUG: datashard__init.cpp:661: TxInitSchema.Execute 2025-09-25T16:18:00.633629Z node 2 :TX_DATASHARD INFO: datashard.cpp:375: TDataShard::OnActivateExecutor: tablet 72075186224037891 actor [2:1430:2399] 2025-09-25T16:18:00.633704Z node 2 :TX_DATASHARD DEBUG: datashard__init.cpp:661: TxInitSchema.Execute 202 ... seId : /Root. }. Send stats to executor actor [5:1969:2967] TaskId: 1 Stats: CpuTimeUs: 111 Tasks { TaskId: 1 CpuTimeUs: 63 FinishTimeMs: 1758817091236 OutputRows: 1 OutputBytes: 6 ResultRows: 1 ResultBytes: 6 ComputeCpuTimeUs: 7 BuildCpuTimeUs: 56 HostName: "ghrun-v6cxduzo2m" NodeId: 5 CreateTimeMs: 1758817091236 CurrentWaitOutputTimeUs: 13 UpdateTimeMs: 1758817091236 } MaxMemoryUsage: 1048576 2025-09-25T16:18:11.237006Z node 5 :KQP_COMPUTE DEBUG: kqp_pure_compute_actor.cpp:155: SelfId: [5:1972:3150], TxId: 281474976710667, task: 1. Ctx: { TraceId : 01k60tnyaz87ngjx93jvdcyhct. CustomerSuppliedId : . RunScriptActorId : [0:0:0]. PoolId : default. SessionId : ydb://session/3?node_id=5&id=ZThiZTJlMDUtODZlYmYyNDAtYmU2NGJhMGUtNDc1YTY5Mjc=. CurrentExecutionId : . Database : . DatabaseId : /Root. }. CA StateFunc 271646922 2025-09-25T16:18:11.237012Z node 5 :KQP_COMPUTE TRACE: dq_compute_actor_impl.h:1484: SelfId: [5:1972:3150], TxId: 281474976710667, task: 1. Ctx: { TraceId : 01k60tnyaz87ngjx93jvdcyhct. CustomerSuppliedId : . RunScriptActorId : [0:0:0]. PoolId : default. SessionId : ydb://session/3?node_id=5&id=ZThiZTJlMDUtODZlYmYyNDAtYmU2NGJhMGUtNDc1YTY5Mjc=. CurrentExecutionId : . Database : . DatabaseId : /Root. }. Poll inputs 2025-09-25T16:18:11.237016Z node 5 :KQP_COMPUTE TRACE: dq_compute_actor_impl.h:1499: SelfId: [5:1972:3150], TxId: 281474976710667, task: 1. Ctx: { TraceId : 01k60tnyaz87ngjx93jvdcyhct. CustomerSuppliedId : . RunScriptActorId : [0:0:0]. PoolId : default. SessionId : ydb://session/3?node_id=5&id=ZThiZTJlMDUtODZlYmYyNDAtYmU2NGJhMGUtNDc1YTY5Mjc=. CurrentExecutionId : . Database : . DatabaseId : /Root. }. Poll sources 2025-09-25T16:18:11.237021Z node 5 :KQP_COMPUTE TRACE: dq_sync_compute_actor_base.h:36: SelfId: [5:1972:3150], TxId: 281474976710667, task: 1. Ctx: { TraceId : 01k60tnyaz87ngjx93jvdcyhct. CustomerSuppliedId : . RunScriptActorId : [0:0:0]. PoolId : default. SessionId : ydb://session/3?node_id=5&id=ZThiZTJlMDUtODZlYmYyNDAtYmU2NGJhMGUtNDc1YTY5Mjc=. CurrentExecutionId : . Database : . DatabaseId : /Root. }. Resume execution, run status: Finished 2025-09-25T16:18:11.237025Z node 5 :KQP_COMPUTE TRACE: dq_compute_actor_impl.h:393: SelfId: [5:1972:3150], TxId: 281474976710667, task: 1. Ctx: { TraceId : 01k60tnyaz87ngjx93jvdcyhct. CustomerSuppliedId : . RunScriptActorId : [0:0:0]. PoolId : default. SessionId : ydb://session/3?node_id=5&id=ZThiZTJlMDUtODZlYmYyNDAtYmU2NGJhMGUtNDc1YTY5Mjc=. CurrentExecutionId : . Database : . DatabaseId : /Root. }. ProcessOutputsState.Inflight: 0 2025-09-25T16:18:11.237029Z node 5 :KQP_COMPUTE TRACE: dq_compute_actor_impl.h:423: SelfId: [5:1972:3150], TxId: 281474976710667, task: 1. Ctx: { TraceId : 01k60tnyaz87ngjx93jvdcyhct. CustomerSuppliedId : . RunScriptActorId : [0:0:0]. PoolId : default. SessionId : ydb://session/3?node_id=5&id=ZThiZTJlMDUtODZlYmYyNDAtYmU2NGJhMGUtNDc1YTY5Mjc=. CurrentExecutionId : . Database : . DatabaseId : /Root. }. Do not drain channelId: 1, finished 2025-09-25T16:18:11.237038Z node 5 :KQP_COMPUTE DEBUG: dq_compute_actor_channels.cpp:669: TxId: 281474976710667, task: 1. Tasks execution finished, waiting for chunk delivery in output channelId: 1, seqNo: [1] 2025-09-25T16:18:11.237100Z node 5 :KQP_EXECUTER DEBUG: kqp_executer_impl.h:328: ActorId: [5:1969:2967] TxId: 281474976710667. Ctx: { TraceId: 01k60tnyaz87ngjx93jvdcyhct, Database: , SessionId: ydb://session/3?node_id=5&id=ZThiZTJlMDUtODZlYmYyNDAtYmU2NGJhMGUtNDc1YTY5Mjc=, PoolId: default, DatabaseId: /Root}. Send TEvStreamData to [5:1654:2967], seqNo: 1, nRows: 1 2025-09-25T16:18:11.237167Z node 5 :KQP_EXECUTER DEBUG: kqp_executer_impl.h:470: ActorId: [5:1969:2967] TxId: 281474976710667. Ctx: { TraceId: 01k60tnyaz87ngjx93jvdcyhct, Database: , SessionId: ydb://session/3?node_id=5&id=ZThiZTJlMDUtODZlYmYyNDAtYmU2NGJhMGUtNDc1YTY5Mjc=, PoolId: default, DatabaseId: /Root}. ActorState: ExecuteState, got execution state from compute actor: [5:1972:3150], task: 1, state: COMPUTE_STATE_EXECUTING, stats: { CpuTimeUs: 111 Tasks { TaskId: 1 CpuTimeUs: 63 FinishTimeMs: 1758817091236 OutputRows: 1 OutputBytes: 6 ResultRows: 1 ResultBytes: 6 ComputeCpuTimeUs: 7 BuildCpuTimeUs: 56 HostName: "ghrun-v6cxduzo2m" NodeId: 5 CreateTimeMs: 1758817091236 CurrentWaitOutputTimeUs: 13 UpdateTimeMs: 1758817091236 } MaxMemoryUsage: 1048576 } 2025-09-25T16:18:11.237189Z node 5 :KQP_EXECUTER DEBUG: kqp_executer_impl.h:698: ActorId: [5:1969:2967] TxId: 281474976710667. Ctx: { TraceId: 01k60tnyaz87ngjx93jvdcyhct, Database: , SessionId: ydb://session/3?node_id=5&id=ZThiZTJlMDUtODZlYmYyNDAtYmU2NGJhMGUtNDc1YTY5Mjc=, PoolId: default, DatabaseId: /Root}. Waiting for: CA [5:1972:3150], ... response 271646822 NKikimr::NKqp::TEvKqpExecuter::TEvStreamData NKikimrKqp.TEvExecuterStreamData ResultSet { columns { name: "column0" type { optional_type { item { type_id: UINT64 } } } } rows { items { uint64_value: 596400 } } format: FORMAT_VALUE } SeqNo: 1 QueryResultIndex: 0 ChannelId: 1 VirtualTimestamp { Step: 2500 TxId: 281474976710664 } Finished: true 2025-09-25T16:18:11.237408Z node 5 :KQP_EXECUTER DEBUG: kqp_executer_impl.h:451: TxId: 281474976710667, send ack to channelId: 1, seqNo: 1, enough: 0, freeSpace: 100, to: [5:1973:3150] 2025-09-25T16:18:11.237425Z node 5 :KQP_COMPUTE TRACE: dq_compute_actor_channels.cpp:179: TxId: 281474976710667, task: 1. Received channel data ack for channelId: 1, seqNo: 1, lastSentSeqNo: 1, freeSpace: 100, early finish: 0 2025-09-25T16:18:11.237434Z node 5 :KQP_COMPUTE TRACE: dq_compute_actor_channels.cpp:207: TxId: 281474976710667, task: 1. PeerState, peerState:(freeSpace:100;inFlightBytes:0;inFlightCount:0;), sentSeqNo: 1, ackSeqNo: 1 2025-09-25T16:18:11.237438Z node 5 :KQP_COMPUTE TRACE: dq_compute_actor_channels.cpp:220: TxId: 281474976710667, task: 1. Resume compute actor 2025-09-25T16:18:11.237463Z node 5 :KQP_COMPUTE DEBUG: kqp_pure_compute_actor.cpp:155: SelfId: [5:1972:3150], TxId: 281474976710667, task: 1. Ctx: { TraceId : 01k60tnyaz87ngjx93jvdcyhct. CustomerSuppliedId : . RunScriptActorId : [0:0:0]. PoolId : default. SessionId : ydb://session/3?node_id=5&id=ZThiZTJlMDUtODZlYmYyNDAtYmU2NGJhMGUtNDc1YTY5Mjc=. CurrentExecutionId : . Database : . DatabaseId : /Root. }. CA StateFunc 271646922 2025-09-25T16:18:11.237470Z node 5 :KQP_COMPUTE TRACE: dq_compute_actor_impl.h:1484: SelfId: [5:1972:3150], TxId: 281474976710667, task: 1. Ctx: { TraceId : 01k60tnyaz87ngjx93jvdcyhct. CustomerSuppliedId : . RunScriptActorId : [0:0:0]. PoolId : default. SessionId : ydb://session/3?node_id=5&id=ZThiZTJlMDUtODZlYmYyNDAtYmU2NGJhMGUtNDc1YTY5Mjc=. CurrentExecutionId : . Database : . DatabaseId : /Root. }. Poll inputs 2025-09-25T16:18:11.237476Z node 5 :KQP_COMPUTE TRACE: dq_compute_actor_impl.h:1499: SelfId: [5:1972:3150], TxId: 281474976710667, task: 1. Ctx: { TraceId : 01k60tnyaz87ngjx93jvdcyhct. CustomerSuppliedId : . RunScriptActorId : [0:0:0]. PoolId : default. SessionId : ydb://session/3?node_id=5&id=ZThiZTJlMDUtODZlYmYyNDAtYmU2NGJhMGUtNDc1YTY5Mjc=. CurrentExecutionId : . Database : . DatabaseId : /Root. }. Poll sources 2025-09-25T16:18:11.237482Z node 5 :KQP_COMPUTE TRACE: dq_sync_compute_actor_base.h:36: SelfId: [5:1972:3150], TxId: 281474976710667, task: 1. Ctx: { TraceId : 01k60tnyaz87ngjx93jvdcyhct. CustomerSuppliedId : . RunScriptActorId : [0:0:0]. PoolId : default. SessionId : ydb://session/3?node_id=5&id=ZThiZTJlMDUtODZlYmYyNDAtYmU2NGJhMGUtNDc1YTY5Mjc=. CurrentExecutionId : . Database : . DatabaseId : /Root. }. Resume execution, run status: Finished 2025-09-25T16:18:11.237486Z node 5 :KQP_COMPUTE TRACE: dq_compute_actor_impl.h:393: SelfId: [5:1972:3150], TxId: 281474976710667, task: 1. Ctx: { TraceId : 01k60tnyaz87ngjx93jvdcyhct. CustomerSuppliedId : . RunScriptActorId : [0:0:0]. PoolId : default. SessionId : ydb://session/3?node_id=5&id=ZThiZTJlMDUtODZlYmYyNDAtYmU2NGJhMGUtNDc1YTY5Mjc=. CurrentExecutionId : . Database : . DatabaseId : /Root. }. ProcessOutputsState.Inflight: 0 2025-09-25T16:18:11.237490Z node 5 :KQP_COMPUTE TRACE: dq_compute_actor_impl.h:423: SelfId: [5:1972:3150], TxId: 281474976710667, task: 1. Ctx: { TraceId : 01k60tnyaz87ngjx93jvdcyhct. CustomerSuppliedId : . RunScriptActorId : [0:0:0]. PoolId : default. SessionId : ydb://session/3?node_id=5&id=ZThiZTJlMDUtODZlYmYyNDAtYmU2NGJhMGUtNDc1YTY5Mjc=. CurrentExecutionId : . Database : . DatabaseId : /Root. }. Do not drain channelId: 1, finished 2025-09-25T16:18:11.237496Z node 5 :KQP_COMPUTE DEBUG: dq_compute_actor_channels.cpp:674: TxId: 281474976710667, task: 1. Tasks execution finished 2025-09-25T16:18:11.237502Z node 5 :KQP_COMPUTE DEBUG: dq_compute_actor_impl.h:510: SelfId: [5:1972:3150], TxId: 281474976710667, task: 1. Ctx: { TraceId : 01k60tnyaz87ngjx93jvdcyhct. CustomerSuppliedId : . RunScriptActorId : [0:0:0]. PoolId : default. SessionId : ydb://session/3?node_id=5&id=ZThiZTJlMDUtODZlYmYyNDAtYmU2NGJhMGUtNDc1YTY5Mjc=. CurrentExecutionId : . Database : . DatabaseId : /Root. }. Compute state finished. All channels and sinks finished 2025-09-25T16:18:11.237522Z node 5 :KQP_COMPUTE DEBUG: dq_compute_actor_channels.cpp:494: TxId: 281474976710667, task: 1. pass away 2025-09-25T16:18:11.237543Z node 5 :KQP_COMPUTE DEBUG: log.cpp:841: fline=kqp_compute_actor_factory.cpp:66;problem=finish_compute_actor;tx_id=281474976710667;task_id=1;success=1;message={
: Error: COMPUTE_STATE_FINISHED }; 2025-09-25T16:18:11.237585Z node 5 :KQP_RESOURCE_MANAGER DEBUG: kqp_rm_service.cpp:404: TxId: 281474976710667, taskId: 1. Released resources, Memory: 0, Free Tier: 1048576, ExecutionUnits: 1. 2025-09-25T16:18:11.237632Z node 5 :KQP_EXECUTER DEBUG: kqp_executer_impl.h:470: ActorId: [5:1969:2967] TxId: 281474976710667. Ctx: { TraceId: 01k60tnyaz87ngjx93jvdcyhct, Database: , SessionId: ydb://session/3?node_id=5&id=ZThiZTJlMDUtODZlYmYyNDAtYmU2NGJhMGUtNDc1YTY5Mjc=, PoolId: default, DatabaseId: /Root}. ActorState: ExecuteState, got execution state from compute actor: [5:1972:3150], task: 1, state: COMPUTE_STATE_FINISHED, stats: { CpuTimeUs: 377 Tasks { TaskId: 1 CpuTimeUs: 65 FinishTimeMs: 1758817091237 OutputRows: 1 OutputBytes: 6 ResultRows: 1 ResultBytes: 6 ComputeCpuTimeUs: 9 BuildCpuTimeUs: 56 HostName: "ghrun-v6cxduzo2m" NodeId: 5 CreateTimeMs: 1758817091236 UpdateTimeMs: 1758817091237 } MaxMemoryUsage: 1048576 } 2025-09-25T16:18:11.237644Z node 5 :KQP_EXECUTER INFO: kqp_planner.cpp:721: TxId: 281474976710667. Ctx: { TraceId: 01k60tnyaz87ngjx93jvdcyhct, Database: , SessionId: ydb://session/3?node_id=5&id=ZThiZTJlMDUtODZlYmYyNDAtYmU2NGJhMGUtNDc1YTY5Mjc=, PoolId: default, DatabaseId: /Root}. Compute actor has finished execution: [5:1972:3150] 2025-09-25T16:18:11.237680Z node 5 :KQP_EXECUTER DEBUG: kqp_executer_impl.h:1208: ActorId: [5:1969:2967] TxId: 281474976710667. Ctx: { TraceId: 01k60tnyaz87ngjx93jvdcyhct, Database: , SessionId: ydb://session/3?node_id=5&id=ZThiZTJlMDUtODZlYmYyNDAtYmU2NGJhMGUtNDc1YTY5Mjc=, PoolId: default, DatabaseId: /Root}. terminate execution. 2025-09-25T16:18:11.237686Z node 5 :KQP_EXECUTER TRACE: kqp_executer_impl.h:1222: ActorId: [5:1969:2967] TxId: 281474976710667. Ctx: { TraceId: 01k60tnyaz87ngjx93jvdcyhct, Database: , SessionId: ydb://session/3?node_id=5&id=ZThiZTJlMDUtODZlYmYyNDAtYmU2NGJhMGUtNDc1YTY5Mjc=, PoolId: default, DatabaseId: /Root}. Terminate, become ZombieState 2025-09-25T16:18:11.237694Z node 5 :KQP_EXECUTER DEBUG: kqp_executer_impl.h:900: ActorId: [5:1969:2967] TxId: 281474976710667. Ctx: { TraceId: 01k60tnyaz87ngjx93jvdcyhct, Database: , SessionId: ydb://session/3?node_id=5&id=ZThiZTJlMDUtODZlYmYyNDAtYmU2NGJhMGUtNDc1YTY5Mjc=, PoolId: default, DatabaseId: /Root}. Resource usage for last stat interval: ComputeTime: 0.000377s ReadRows: 0 ReadBytes: 0 ru: 1 rate limiter was not found force flag: 1 ... response 271646721 NKikimr::NKqp::NPrivateEvents::TEvQueryResponse NKikimrKqp.TEvQueryResponse Response { TxMeta { } QueryDiagnostics: "" } YdbStatus: SUCCESS ConsumedRu: 158 >> KqpScan::RightJoinSimple >> PgCatalog::InformationSchema [GOOD] >> PgCatalog::CheckSetConfig >> KqpScan::UnionWithPureExpr >> KqpScan::PrunePartitionsByLiteral [GOOD] >> KqpScan::PureExpr >> KqpScan::AggregateNoColumn ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/scan/unittest >> KqpScan::TopSortOverSecondaryIndexRead [GOOD] Test command err: Trying to start YDB, gRPC: 27702, MsgBus: 17623 2025-09-25T16:18:07.426634Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7554061872549851419:2068];send_to=[0:7307199536658146131:7762515]; 2025-09-25T16:18:07.426912Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/endf/003f7e/r3tmp/tmpn1cD2Y/pdisk_1.dat 2025-09-25T16:18:07.472002Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-09-25T16:18:07.483838Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded 2025-09-25T16:18:07.484182Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1229: Notification cookie mismatch for subscription [1:7554061872549851388:2081] 1758817087426380 != 1758817087426383 TServer::EnableGrpc on GrpcPort 27702, node 1 2025-09-25T16:18:07.504013Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-09-25T16:18:07.504030Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-09-25T16:18:07.504033Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-09-25T16:18:07.504096Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-09-25T16:18:07.530048Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-09-25T16:18:07.530082Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting TClient is connected to server localhost:17623 2025-09-25T16:18:07.530983Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:17623 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-09-25T16:18:07.576134Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-09-25T16:18:07.614402Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) waiting... 2025-09-25T16:18:07.639142Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) waiting... 2025-09-25T16:18:07.664084Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) waiting... 2025-09-25T16:18:07.678411Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) waiting... 2025-09-25T16:18:07.708402Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-09-25T16:18:07.798835Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7554061872549853032:2391], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:07.798863Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:07.798996Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7554061872549853042:2392], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:07.799007Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:07.861885Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:18:07.874074Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:18:07.883585Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:18:07.897417Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:18:07.911571Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:18:07.925285Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:18:07.940545Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:18:07.962148Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:18:07.979579Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7554061872549853905:2474], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:07.979620Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:07.979757Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7554061872549853910:2477], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:07.979770Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7554061872549853911:2478], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:07.979788Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:07.980675Z node 1 :FLAT_TX_SCHEMESHARD ... ... 2025-09-25T16:18:10.651976Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-09-25T16:18:10.658059Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) waiting... 2025-09-25T16:18:10.859636Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7554061884628844691:2391], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:10.859701Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:10.859797Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7554061884628844701:2392], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:10.859811Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:10.868751Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:18:10.877562Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:18:10.885863Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:18:10.899953Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:18:10.914318Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:18:10.928218Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:18:10.941599Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:18:10.955845Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:18:10.972760Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7554061884628845563:2474], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:10.972790Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:10.972791Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7554061884628845568:2477], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:10.972815Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7554061884628845570:2478], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:10.972837Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:10.973550Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-09-25T16:18:10.982497Z node 2 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7554061884628845571:2479], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2025-09-25T16:18:11.074697Z node 2 :TX_PROXY ERROR: schemereq.cpp:590: Actor# [2:7554061888923812920:3553] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 17], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-09-25T16:18:11.331466Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:18:11.341969Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710674:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:18:11.355294Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710675:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:18:11.477317Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; {"Plan":{"Plans":[{"PlanNodeId":8,"Plans":[{"PlanNodeId":7,"Plans":[{"PlanNodeId":6,"Plans":[{"PlanNodeId":5,"Plans":[{"E-Size":"0","PlanNodeId":4,"LookupKeyColumns":["Key"],"Node Type":"TableLookup","Path":"\/Root\/SecondaryComplexKeys","Columns":["Fk1","Fk2","Key","Value"],"E-Rows":"1","Table":"SecondaryComplexKeys","Plans":[{"PlanNodeId":3,"Plans":[{"PlanNodeId":2,"Plans":[{"Tables":["SecondaryComplexKeys\/Index\/indexImplTable"],"PlanNodeId":1,"Operators":[{"Inputs":[{"InternalOperatorId":1}],"Name":"Limit","Limit":"2"},{"Scan":"Parallel","ReadRange":["Fk1 (1)","Fk2 (-∞, +∞)","Key (-∞, +∞)"],"E-Size":"0","Name":"TableRangeScan","Inputs":[],"Path":"\/Root\/SecondaryComplexKeys\/Index\/indexImplTable","E-Rows":"1","Table":"SecondaryComplexKeys\/Index\/indexImplTable","ReadColumns":["Fk1","Key"],"E-Cost":"0"}],"Node Type":"Limit-TableRangeScan"}],"Node Type":"UnionAll","PlanNodeType":"Connection"}],"Operators":[{"Inputs":[{"ExternalPlanNodeId":2}],"TopBy":"row.Fk1","Name":"Top","Limit":"2"}],"Node Type":"Top"}],"PlanNodeType":"Connection","E-Cost":"0"}],"Operators":[{"Inputs":[{"ExternalPlanNodeId":4}],"Name":"TopSort","Limit":"2","TopSortBy":"row.Fk1"}],"Node Type":"TopSort"}],"Node Type":"Merge","SortColumns":["Fk1 (Asc)"],"PlanNodeType":"Connection"}],"Operators":[{"Inputs":[{"ExternalPlanNodeId":6}],"Name":"Limit","Limit":"2"}],"Node Type":"Limit"}],"Node Type":"ResultSet","PlanNodeType":"ResultSet"}],"Node Type":"Query","Stats":{"ResourcePoolId":"default"},"PlanNodeType":"Query"},"meta":{"version":"0.2","type":"query"},"tables":[{"name":"\/Root\/SecondaryComplexKeys","reads":[{"lookup_by":["Key"],"columns":["Fk1","Fk2","Key","Value"],"type":"Lookup"}]},{"name":"\/Root\/SecondaryComplexKeys\/Index\/indexImplTable","reads":[{"lookup_by":["Fk1 (1)"],"columns":["Fk1","Key"],"scan_by":["Fk2 (-∞, +∞)","Key (-∞, +∞)"],"type":"Scan"}]}],"SimplifiedPlan":{"PlanNodeId":0,"Plans":[{"PlanNodeId":1,"Plans":[{"PlanNodeId":2,"Plans":[{"PlanNodeId":4,"Plans":[{"PlanNodeId":5,"Operators":[{"E-Rows":"1","Columns":["Fk1","Fk2","Key","Value"],"E-Size":"0","E-Cost":"0","Name":"TableLookup","Table":"SecondaryComplexKeys","LookupKeyColumns":["Key"]}],"Node Type":"TableLookup","PlanNodeType":"Connection"}],"Operators":[{"Name":"TopSort","Limit":"2","TopSortBy":"row.Fk1"}],"Node Type":"TopSort"}],"Operators":[{"Name":"Limit","Limit":"2"}],"Node Type":"Limit"}],"Node Type":"ResultSet","PlanNodeType":"ResultSet"}],"Node Type":"Query","OptimizerStats":{"EquiJoinsCount":0,"JoinsCount":0},"PlanNodeType":"Query"}} >> KqpSplit::ChoosePartition+Unspecified >> KqpScan::Offset >> KqpSplit::StreamLookupDeliveryProblem [GOOD] >> KqpSplit::StreamLookupJoinDeliveryProblem ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/scan/unittest >> KqpScan::SimpleWindow [GOOD] Test command err: Trying to start YDB, gRPC: 31052, MsgBus: 14553 2025-09-25T16:18:07.801171Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7554061868621637188:2073];send_to=[0:7307199536658146131:7762515]; 2025-09-25T16:18:07.801260Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/endf/003f74/r3tmp/tmp6RVnQG/pdisk_1.dat 2025-09-25T16:18:07.852935Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-09-25T16:18:07.867440Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 31052, node 1 2025-09-25T16:18:07.876534Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-09-25T16:18:07.876548Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-09-25T16:18:07.876550Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-09-25T16:18:07.876587Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:14553 2025-09-25T16:18:07.906302Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-09-25T16:18:07.906337Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-09-25T16:18:07.908148Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:14553 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-09-25T16:18:07.942590Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-09-25T16:18:07.983291Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) waiting... 2025-09-25T16:18:08.005374Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-09-25T16:18:08.006786Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) waiting... 2025-09-25T16:18:08.038167Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) waiting... 2025-09-25T16:18:08.052485Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) waiting... 2025-09-25T16:18:08.244570Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7554061872916606095:2391], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:08.244591Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:08.244632Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7554061872916606105:2392], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:08.244640Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:08.287708Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:18:08.295037Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:18:08.302096Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:18:08.309430Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:18:08.324115Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:18:08.338041Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:18:08.352430Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:18:08.366764Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:18:08.381867Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7554061872916606967:2474], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:08.381899Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:08.381947Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7554061872916606972:2477], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:08.381956Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7554061872916606973:2478], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:08.381966Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:08.382744Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715670:3, at schemeshard: 72057594046 ... ssifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:29414 TClient is connected to server localhost:29414 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-09-25T16:18:10.827125Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-09-25T16:18:10.836059Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) waiting... 2025-09-25T16:18:10.847738Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) waiting... 2025-09-25T16:18:10.876024Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) waiting... 2025-09-25T16:18:10.891218Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) waiting... 2025-09-25T16:18:10.908410Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-09-25T16:18:11.175544Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7554061886781676193:2391], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:11.175604Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:11.180221Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:18:11.188384Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:18:11.200358Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7554061886781676391:2412], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:11.200389Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:11.200526Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7554061886781676397:2413], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:11.200556Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:11.201405Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:18:11.216013Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:18:11.232862Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:18:11.244145Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:18:11.258772Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:18:11.271757Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:18:11.287638Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7554061886781677067:2475], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:11.287674Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:11.287677Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7554061886781677072:2478], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:11.287727Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7554061886781677074:2479], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:11.287744Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:11.288507Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-09-25T16:18:11.298929Z node 3 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [3:7554061886781677075:2480], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715670 completed, doublechecking } 2025-09-25T16:18:11.372943Z node 3 :TX_PROXY ERROR: schemereq.cpp:590: Actor# [3:7554061886781677128:3554] txid# 281474976715671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 17], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-09-25T16:18:11.716555Z node 3 :KQP_RESOURCE_MANAGER WARN: kqp_snapshot_manager.cpp:238: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1758817091759, txId: 281474976715673] shutting down 2025-09-25T16:18:11.746734Z node 3 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; >> KqpScan::DecimalColumn [GOOD] >> KqpScan::CustomWindow >> KqpScan::IsNullPartial [GOOD] >> KqpScan::Join >> THiveTest::TestCreate100Tablets [GOOD] >> THiveTest::TestCreateSubHiveCreateTablet >> THiveTest::TestNotEnoughResources [GOOD] >> THiveTest::TestRestartTablets >> THiveTest::TestFollowersReconfiguration [GOOD] >> THiveTest::TestFollowerPromotion ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/scan/unittest >> KqpScan::StreamLookupFailedRead [GOOD] Test command err: Trying to start YDB, gRPC: 4022, MsgBus: 2897 2025-09-25T16:18:07.734551Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7554061869142846855:2071];send_to=[0:7307199536658146131:7762515]; 2025-09-25T16:18:07.734609Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/endf/003f78/r3tmp/tmpWtbTTx/pdisk_1.dat 2025-09-25T16:18:07.813068Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-09-25T16:18:07.813196Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1229: Notification cookie mismatch for subscription [1:7554061869142846813:2081] 1758817087733999 != 1758817087734002 2025-09-25T16:18:07.813691Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 4022, node 1 2025-09-25T16:18:07.822519Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-09-25T16:18:07.822529Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-09-25T16:18:07.822531Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-09-25T16:18:07.822569Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-09-25T16:18:07.839104Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-09-25T16:18:07.839151Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting TClient is connected to server localhost:2897 2025-09-25T16:18:07.840053Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:2897 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-09-25T16:18:07.886835Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... waiting... 2025-09-25T16:18:07.901941Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) waiting... 2025-09-25T16:18:07.921829Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) waiting... 2025-09-25T16:18:07.943087Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:18:07.962051Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) waiting... 2025-09-25T16:18:08.132647Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7554061873437815754:2391], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:08.132674Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:08.132754Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7554061873437815764:2392], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:08.132764Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:08.178194Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:18:08.186761Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:18:08.197833Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:18:08.211616Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:18:08.225490Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:18:08.240343Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:18:08.253614Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:18:08.267510Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:18:08.283814Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7554061873437816626:2474], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:08.283840Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:08.283851Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7554061873437816631:2477], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:08.283864Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7554061873437816633:2478], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:08.283871Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:08.284599Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at / ... d : ydb://session/3?node_id=2&id=N2RjODU1MzgtNmZkMGJmNzctOGNmNDFhNGUtMzliZjY1MDc=. CurrentExecutionId : . Database : . DatabaseId : /Root. }. CA StateFunc 271646922 2025-09-25T16:18:11.882133Z node 2 :KQP_COMPUTE DEBUG: kqp_stream_lookup_actor.cpp:328: StreamLookupActor, inputIndex: 0, CA Id [2:3250:4389]Returned 0 bytes, 0 rows, finished: 0 2025-09-25T16:18:11.882195Z node 2 :KQP_COMPUTE DEBUG: kqp_stream_lookup_actor.cpp:425: StreamLookupActor, inputIndex: 0, CA Id [2:3250:4389]Recv TEvReadResult (stream lookup) from ShardID=72075186224037920, Table = /Root/Table1, ReadId=3 (current ReadId=3), SeqNo=1, Status=SUCCESS, Finished=1, RowCount=0, TxLocks= , BrokenTxLocks= 2025-09-25T16:18:11.882223Z node 2 :KQP_COMPUTE DEBUG: kqp_pure_compute_actor.cpp:155: SelfId: [2:3250:4389], TxId: 281474976715660, task: 2. Ctx: { TraceId : 01k60tnyd82nzezyjv9b5m1mf4. RunScriptActorId : [0:0:0]. CustomerSuppliedId : . PoolId : default. SessionId : ydb://session/3?node_id=2&id=N2RjODU1MzgtNmZkMGJmNzctOGNmNDFhNGUtMzliZjY1MDc=. CurrentExecutionId : . Database : . DatabaseId : /Root. }. CA StateFunc 276037645 2025-09-25T16:18:11.882235Z node 2 :KQP_COMPUTE DEBUG: kqp_pure_compute_actor.cpp:155: SelfId: [2:3250:4389], TxId: 281474976715660, task: 2. Ctx: { TraceId : 01k60tnyd82nzezyjv9b5m1mf4. RunScriptActorId : [0:0:0]. CustomerSuppliedId : . PoolId : default. SessionId : ydb://session/3?node_id=2&id=N2RjODU1MzgtNmZkMGJmNzctOGNmNDFhNGUtMzliZjY1MDc=. CurrentExecutionId : . Database : . DatabaseId : /Root. }. CA StateFunc 271646922 2025-09-25T16:18:11.882246Z node 2 :KQP_COMPUTE DEBUG: kqp_stream_lookup_actor.cpp:328: StreamLookupActor, inputIndex: 0, CA Id [2:3250:4389]Returned 20 bytes, 2 rows, finished: 1 2025-09-25T16:18:11.882301Z node 2 :KQP_COMPUTE DEBUG: dq_compute_actor_impl.h:502: SelfId: [2:3250:4389], TxId: 281474976715660, task: 2. Ctx: { TraceId : 01k60tnyd82nzezyjv9b5m1mf4. RunScriptActorId : [0:0:0]. CustomerSuppliedId : . PoolId : default. SessionId : ydb://session/3?node_id=2&id=N2RjODU1MzgtNmZkMGJmNzctOGNmNDFhNGUtMzliZjY1MDc=. CurrentExecutionId : . Database : . DatabaseId : /Root. }. Continue execution, either output buffers are not empty or not all channels are ready, hasDataToSend: 1, channelsReady: 1 2025-09-25T16:18:11.882313Z node 2 :KQP_COMPUTE DEBUG: kqp_pure_compute_actor.cpp:155: SelfId: [2:3251:4390], TxId: 281474976715660, task: 3. Ctx: { TraceId : 01k60tnyd82nzezyjv9b5m1mf4. RunScriptActorId : [0:0:0]. CustomerSuppliedId : . PoolId : default. SessionId : ydb://session/3?node_id=2&id=N2RjODU1MzgtNmZkMGJmNzctOGNmNDFhNGUtMzliZjY1MDc=. CurrentExecutionId : . Database : . DatabaseId : /Root. }. CA StateFunc 271646923 2025-09-25T16:18:11.882324Z node 2 :KQP_COMPUTE DEBUG: dq_compute_actor_channels.cpp:163: TxId: 281474976715660, task: 3. Finish input channelId: 2, from: [2:3250:4389] 2025-09-25T16:18:11.882342Z node 2 :KQP_COMPUTE DEBUG: kqp_pure_compute_actor.cpp:155: SelfId: [2:3250:4389], TxId: 281474976715660, task: 2. Ctx: { TraceId : 01k60tnyd82nzezyjv9b5m1mf4. RunScriptActorId : [0:0:0]. CustomerSuppliedId : . PoolId : default. SessionId : ydb://session/3?node_id=2&id=N2RjODU1MzgtNmZkMGJmNzctOGNmNDFhNGUtMzliZjY1MDc=. CurrentExecutionId : . Database : . DatabaseId : /Root. }. CA StateFunc 271646922 2025-09-25T16:18:11.882352Z node 2 :KQP_COMPUTE DEBUG: dq_compute_actor_channels.cpp:657: TxId: 281474976715660, task: 2. Tasks execution finished, don't wait for ack delivery in input channelId: 1, seqNo: [1] 2025-09-25T16:18:11.882358Z node 2 :KQP_COMPUTE DEBUG: dq_compute_actor_channels.cpp:669: TxId: 281474976715660, task: 2. Tasks execution finished, waiting for chunk delivery in output channelId: 2, seqNo: [1] 2025-09-25T16:18:11.882363Z node 2 :KQP_COMPUTE DEBUG: kqp_pure_compute_actor.cpp:155: SelfId: [2:3251:4390], TxId: 281474976715660, task: 3. Ctx: { TraceId : 01k60tnyd82nzezyjv9b5m1mf4. RunScriptActorId : [0:0:0]. CustomerSuppliedId : . PoolId : default. SessionId : ydb://session/3?node_id=2&id=N2RjODU1MzgtNmZkMGJmNzctOGNmNDFhNGUtMzliZjY1MDc=. CurrentExecutionId : . Database : . DatabaseId : /Root. }. CA StateFunc 271646922 2025-09-25T16:18:11.882408Z node 2 :KQP_COMPUTE DEBUG: dq_compute_actor_impl.h:502: SelfId: [2:3251:4390], TxId: 281474976715660, task: 3. Ctx: { TraceId : 01k60tnyd82nzezyjv9b5m1mf4. RunScriptActorId : [0:0:0]. CustomerSuppliedId : . PoolId : default. SessionId : ydb://session/3?node_id=2&id=N2RjODU1MzgtNmZkMGJmNzctOGNmNDFhNGUtMzliZjY1MDc=. CurrentExecutionId : . Database : . DatabaseId : /Root. }. Continue execution, either output buffers are not empty or not all channels are ready, hasDataToSend: 1, channelsReady: 1 2025-09-25T16:18:11.882415Z node 2 :KQP_COMPUTE DEBUG: kqp_pure_compute_actor.cpp:155: SelfId: [2:3252:4391], TxId: 281474976715660, task: 4. Ctx: { TraceId : 01k60tnyd82nzezyjv9b5m1mf4. RunScriptActorId : [0:0:0]. CustomerSuppliedId : . PoolId : default. SessionId : ydb://session/3?node_id=2&id=N2RjODU1MzgtNmZkMGJmNzctOGNmNDFhNGUtMzliZjY1MDc=. CurrentExecutionId : . Database : . DatabaseId : /Root. }. CA StateFunc 271646923 2025-09-25T16:18:11.882421Z node 2 :KQP_COMPUTE DEBUG: dq_compute_actor_channels.cpp:163: TxId: 281474976715660, task: 4. Finish input channelId: 3, from: [2:3251:4390] 2025-09-25T16:18:11.882434Z node 2 :KQP_COMPUTE DEBUG: kqp_pure_compute_actor.cpp:155: SelfId: [2:3250:4389], TxId: 281474976715660, task: 2. Ctx: { TraceId : 01k60tnyd82nzezyjv9b5m1mf4. RunScriptActorId : [0:0:0]. CustomerSuppliedId : . PoolId : default. SessionId : ydb://session/3?node_id=2&id=N2RjODU1MzgtNmZkMGJmNzctOGNmNDFhNGUtMzliZjY1MDc=. CurrentExecutionId : . Database : . DatabaseId : /Root. }. CA StateFunc 271646927 2025-09-25T16:18:11.882441Z node 2 :KQP_COMPUTE DEBUG: kqp_pure_compute_actor.cpp:155: SelfId: [2:3251:4390], TxId: 281474976715660, task: 3. Ctx: { TraceId : 01k60tnyd82nzezyjv9b5m1mf4. RunScriptActorId : [0:0:0]. CustomerSuppliedId : . PoolId : default. SessionId : ydb://session/3?node_id=2&id=N2RjODU1MzgtNmZkMGJmNzctOGNmNDFhNGUtMzliZjY1MDc=. CurrentExecutionId : . Database : . DatabaseId : /Root. }. CA StateFunc 271646927 2025-09-25T16:18:11.882447Z node 2 :KQP_COMPUTE DEBUG: kqp_pure_compute_actor.cpp:155: SelfId: [2:3252:4391], TxId: 281474976715660, task: 4. Ctx: { TraceId : 01k60tnyd82nzezyjv9b5m1mf4. RunScriptActorId : [0:0:0]. CustomerSuppliedId : . PoolId : default. SessionId : ydb://session/3?node_id=2&id=N2RjODU1MzgtNmZkMGJmNzctOGNmNDFhNGUtMzliZjY1MDc=. CurrentExecutionId : . Database : . DatabaseId : /Root. }. CA StateFunc 271646922 2025-09-25T16:18:11.882463Z node 2 :KQP_COMPUTE DEBUG: dq_compute_actor_impl.h:502: SelfId: [2:3252:4391], TxId: 281474976715660, task: 4. Ctx: { TraceId : 01k60tnyd82nzezyjv9b5m1mf4. RunScriptActorId : [0:0:0]. CustomerSuppliedId : . PoolId : default. SessionId : ydb://session/3?node_id=2&id=N2RjODU1MzgtNmZkMGJmNzctOGNmNDFhNGUtMzliZjY1MDc=. CurrentExecutionId : . Database : . DatabaseId : /Root. }. Continue execution, either output buffers are not empty or not all channels are ready, hasDataToSend: 1, channelsReady: 1 2025-09-25T16:18:11.882482Z node 2 :KQP_COMPUTE DEBUG: kqp_pure_compute_actor.cpp:155: SelfId: [2:3250:4389], TxId: 281474976715660, task: 2. Ctx: { TraceId : 01k60tnyd82nzezyjv9b5m1mf4. RunScriptActorId : [0:0:0]. CustomerSuppliedId : . PoolId : default. SessionId : ydb://session/3?node_id=2&id=N2RjODU1MzgtNmZkMGJmNzctOGNmNDFhNGUtMzliZjY1MDc=. CurrentExecutionId : . Database : . DatabaseId : /Root. }. CA StateFunc 271646922 2025-09-25T16:18:11.882488Z node 2 :KQP_COMPUTE DEBUG: dq_compute_actor_channels.cpp:657: TxId: 281474976715660, task: 2. Tasks execution finished, don't wait for ack delivery in input channelId: 1, seqNo: [1] 2025-09-25T16:18:11.882492Z node 2 :KQP_COMPUTE DEBUG: dq_compute_actor_channels.cpp:674: TxId: 281474976715660, task: 2. Tasks execution finished 2025-09-25T16:18:11.882496Z node 2 :KQP_COMPUTE DEBUG: dq_compute_actor_impl.h:510: SelfId: [2:3250:4389], TxId: 281474976715660, task: 2. Ctx: { TraceId : 01k60tnyd82nzezyjv9b5m1mf4. RunScriptActorId : [0:0:0]. CustomerSuppliedId : . PoolId : default. SessionId : ydb://session/3?node_id=2&id=N2RjODU1MzgtNmZkMGJmNzctOGNmNDFhNGUtMzliZjY1MDc=. CurrentExecutionId : . Database : . DatabaseId : /Root. }. Compute state finished. All channels and sinks finished 2025-09-25T16:18:11.882572Z node 2 :KQP_COMPUTE DEBUG: dq_compute_actor_channels.cpp:494: TxId: 281474976715660, task: 2. pass away 2025-09-25T16:18:11.882609Z node 2 :KQP_COMPUTE DEBUG: log.cpp:841: fline=kqp_compute_actor_factory.cpp:66;problem=finish_compute_actor;tx_id=281474976715660;task_id=2;success=1;message={
: Error: COMPUTE_STATE_FINISHED }; 2025-09-25T16:18:11.882675Z node 2 :KQP_COMPUTE DEBUG: kqp_pure_compute_actor.cpp:155: SelfId: [2:3251:4390], TxId: 281474976715660, task: 3. Ctx: { TraceId : 01k60tnyd82nzezyjv9b5m1mf4. RunScriptActorId : [0:0:0]. CustomerSuppliedId : . PoolId : default. SessionId : ydb://session/3?node_id=2&id=N2RjODU1MzgtNmZkMGJmNzctOGNmNDFhNGUtMzliZjY1MDc=. CurrentExecutionId : . Database : . DatabaseId : /Root. }. CA StateFunc 271646922 2025-09-25T16:18:11.882705Z node 2 :KQP_COMPUTE DEBUG: dq_compute_actor_channels.cpp:657: TxId: 281474976715660, task: 3. Tasks execution finished, don't wait for ack delivery in input channelId: 2, seqNo: [1] 2025-09-25T16:18:11.882709Z node 2 :KQP_COMPUTE DEBUG: dq_compute_actor_channels.cpp:674: TxId: 281474976715660, task: 3. Tasks execution finished 2025-09-25T16:18:11.882713Z node 2 :KQP_COMPUTE DEBUG: dq_compute_actor_impl.h:510: SelfId: [2:3251:4390], TxId: 281474976715660, task: 3. Ctx: { TraceId : 01k60tnyd82nzezyjv9b5m1mf4. RunScriptActorId : [0:0:0]. CustomerSuppliedId : . PoolId : default. SessionId : ydb://session/3?node_id=2&id=N2RjODU1MzgtNmZkMGJmNzctOGNmNDFhNGUtMzliZjY1MDc=. CurrentExecutionId : . Database : . DatabaseId : /Root. }. Compute state finished. All channels and sinks finished 2025-09-25T16:18:11.882724Z node 2 :KQP_COMPUTE DEBUG: dq_compute_actor_channels.cpp:494: TxId: 281474976715660, task: 3. pass away 2025-09-25T16:18:11.882733Z node 2 :KQP_COMPUTE DEBUG: log.cpp:841: fline=kqp_compute_actor_factory.cpp:66;problem=finish_compute_actor;tx_id=281474976715660;task_id=3;success=1;message={
: Error: COMPUTE_STATE_FINISHED }; 2025-09-25T16:18:11.882779Z node 2 :KQP_COMPUTE DEBUG: kqp_pure_compute_actor.cpp:155: SelfId: [2:3252:4391], TxId: 281474976715660, task: 4. Ctx: { TraceId : 01k60tnyd82nzezyjv9b5m1mf4. RunScriptActorId : [0:0:0]. CustomerSuppliedId : . PoolId : default. SessionId : ydb://session/3?node_id=2&id=N2RjODU1MzgtNmZkMGJmNzctOGNmNDFhNGUtMzliZjY1MDc=. CurrentExecutionId : . Database : . DatabaseId : /Root. }. CA StateFunc 271646922 2025-09-25T16:18:11.882787Z node 2 :KQP_COMPUTE DEBUG: dq_compute_actor_channels.cpp:657: TxId: 281474976715660, task: 4. Tasks execution finished, don't wait for ack delivery in input channelId: 3, seqNo: [1] 2025-09-25T16:18:11.882791Z node 2 :KQP_COMPUTE DEBUG: dq_compute_actor_channels.cpp:674: TxId: 281474976715660, task: 4. Tasks execution finished 2025-09-25T16:18:11.882795Z node 2 :KQP_COMPUTE DEBUG: dq_compute_actor_impl.h:510: SelfId: [2:3252:4391], TxId: 281474976715660, task: 4. Ctx: { TraceId : 01k60tnyd82nzezyjv9b5m1mf4. RunScriptActorId : [0:0:0]. CustomerSuppliedId : . PoolId : default. SessionId : ydb://session/3?node_id=2&id=N2RjODU1MzgtNmZkMGJmNzctOGNmNDFhNGUtMzliZjY1MDc=. CurrentExecutionId : . Database : . DatabaseId : /Root. }. Compute state finished. All channels and sinks finished 2025-09-25T16:18:11.882803Z node 2 :KQP_COMPUTE DEBUG: dq_compute_actor_channels.cpp:494: TxId: 281474976715660, task: 4. pass away 2025-09-25T16:18:11.882810Z node 2 :KQP_COMPUTE DEBUG: log.cpp:841: fline=kqp_compute_actor_factory.cpp:66;problem=finish_compute_actor;tx_id=281474976715660;task_id=4;success=1;message={
: Error: COMPUTE_STATE_FINISHED }; >> KqpScan::CrossJoin >> test.py::test[join-bush_in_in_in-off-ForceBlocks] [GOOD] >> test.py::test[join-bush_in_in_in-off-Results] >> KqpSplit::AfterResult+Ascending [GOOD] >> KqpSplit::AfterResult+Descending >> test.py::test[join-bush_in_in_in-off-Results] [SKIPPED] >> test.py::test[join-commonjoin_unused_keys--ForceBlocks] [SKIPPED] >> test.py::test[join-commonjoin_unused_keys--Results] [SKIPPED] >> KqpScan::UnionMixed [GOOD] >> KqpScan::UnionSameTable >> KqpSplit::AfterResultMultiRangeSegmentPartition+Ascending >> KqpSplit::AfterResultMultiRangeSegmentPartition+Unspecified [GOOD] >> KqpSplit::BorderKeys+Ascending >> KqpScan::PureExpr [GOOD] >> KqpScan::RestrictSqlV0 >> test.py::test[produce-process_with_assume--Results] [GOOD] >> THiveTest::TestCreateSubHiveCreateTablet [GOOD] >> THiveTest::TestCheckSubHiveForwarding >> KqpScan::FullFrameWindow >> KqpScan::CustomWindow [GOOD] >> KqpScan::DqSource >> KqpSplit::StreamLookupJoinDeliveryProblemAfterFirstResult >> KqpSplit::StreamLookupJoinSplitBeforeReading >> KqpScan::RightJoinSimple [GOOD] >> KqpScan::RightOnlyJoinSimple >> KqpScan::Join [GOOD] >> KqpScan::Join2 >> THiveTest::TestRestartTablets [GOOD] >> THiveTest::TestLockTabletExecutionTimeout >> KqpSplit::AfterResultMultiRange+Descending >> THiveTest::TestFollowerPromotion [GOOD] >> THiveTest::TestFollowerPromotionFollowerDies >> THiveTest::TestCheckSubHiveForwarding [GOOD] >> THiveTest::TestCheckSubHiveMigration >> KqpScan::AggregateNoColumn [GOOD] >> KqpSplit::StreamLookupJoinDeliveryProblem [GOOD] >> KqpScan::AggregateNoColumnNoRemaps >> KqpSplit::AfterResult+Descending [GOOD] >> KqpScan::Offset [GOOD] >> KqpScan::Order >> KqpScan::UnionWithPureExpr [GOOD] >> KqpScan::UnionThree >> KqpScan::RestrictSqlV0 [GOOD] >> KqpScan::StreamExecuteScanQueryCancelation |81.2%| [TA] $(B)/ydb/core/tx/datashard/ut_kqp_scan/test-results/unittest/{meta.json ... results_accumulator.log} >> THiveTest::TestHiveBalancerWithPrefferedDC2 [GOOD] >> THiveTest::TestHiveBalancerWithPreferredDC3 >> KqpScan::UnionSameTable [GOOD] >> KqpSplit::ChoosePartition+Unspecified [GOOD] >> KqpSplit::IntersectionLosesRange+Ascending ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/scan/unittest >> KqpSplit::StreamLookupJoinDeliveryProblem [GOOD] Test command err: Trying to start YDB, gRPC: 6909, MsgBus: 8277 2025-09-25T16:18:11.449512Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7554061887956183480:2064];send_to=[0:7307199536658146131:7762515]; 2025-09-25T16:18:11.449534Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-09-25T16:18:11.454035Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:123: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown test_client.cpp: SetPath # /home/runner/.ya/build/build_root/endf/003f62/r3tmp/tmpdoZz2O/pdisk_1.dat 2025-09-25T16:18:11.493805Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded 2025-09-25T16:18:11.494041Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1229: Notification cookie mismatch for subscription [1:7554061887956183457:2081] 1758817091449358 != 1758817091449361 TServer::EnableGrpc on GrpcPort 6909, node 1 2025-09-25T16:18:11.509017Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-09-25T16:18:11.509033Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-09-25T16:18:11.509036Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-09-25T16:18:11.509079Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:8277 2025-09-25T16:18:11.547895Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-09-25T16:18:11.558979Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-09-25T16:18:11.559011Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-09-25T16:18:11.560162Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:8277 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-09-25T16:18:11.616027Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-09-25T16:18:11.620930Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-09-25T16:18:11.632879Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) waiting... 2025-09-25T16:18:11.659484Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) waiting... 2025-09-25T16:18:11.690096Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) waiting... 2025-09-25T16:18:11.705979Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) waiting... 2025-09-25T16:18:11.874134Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7554061887956185100:2391], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:11.874165Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:11.874235Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7554061887956185110:2392], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:11.874245Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:11.989554Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:18:12.005284Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:18:12.014640Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:18:12.026335Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:18:12.042213Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:18:12.055167Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:18:12.071849Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:18:12.084190Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:18:12.113478Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7554061892251153269:2474], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:12.113506Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7554061892251153274:2477], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:12.113511Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:12.113609Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7554061892251153277:2479], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:12.113627Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: ... : 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-09-25T16:18:13.299296Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-09-25T16:18:13.309926Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) waiting... 2025-09-25T16:18:13.321270Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) waiting... 2025-09-25T16:18:13.357956Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) waiting... 2025-09-25T16:18:13.378537Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) waiting... 2025-09-25T16:18:13.607008Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7554061895936448942:2391], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:13.607151Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:13.611885Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:18:13.621462Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:18:13.630325Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:18:13.643844Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:18:13.651356Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7554061895936449251:2423], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:13.651385Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:13.651447Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7554061895936449253:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:13.651458Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:13.657386Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:18:13.671707Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:18:13.685994Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:18:13.700675Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:18:13.718555Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7554061895936449824:2475], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:13.718574Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:13.718597Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7554061895936449830:2479], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:13.718601Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:13.718613Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7554061895936449829:2478], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:13.719292Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-09-25T16:18:13.726629Z node 2 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7554061895936449833:2480], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2025-09-25T16:18:13.777870Z node 2 :TX_PROXY ERROR: schemereq.cpp:590: Actor# [2:7554061895936449885:3556] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 17], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-09-25T16:18:14.170935Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-09-25T16:18:14.180803Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:18:14.201448Z node 2 :KQP_EXECUTER ERROR: kqp_planner.cpp:123: TxId: 281474976710674. Ctx: { TraceId: 01k60tp2hf1ys92tqbr8pgsf7f, Database: , SessionId: ydb://session/3?node_id=2&id=NGFmZTExYTAtMTA1Mjc4ZjYtZTQwOWRlNjYtOWY2YjRhZQ==, PoolId: default, DatabaseId: /Root}. Database not set, use /Root 2025-09-25T16:18:14.244351Z node 2 :KQP_EXECUTER ERROR: kqp_planner.cpp:123: TxId: 281474976710675. Ctx: { TraceId: 01k60tp2hw3bvf492e0ahvjw0v, Database: , SessionId: ydb://session/3?node_id=2&id=YjlmYjYxZGQtYzlhZDVjYjQtMTgwMzNiY2EtY2M2MmU5ZWQ=, PoolId: default, DatabaseId: /Root}. Database not set, use /Root captured evread ----------------------------------------------------------- ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/scan/unittest >> KqpSplit::AfterResult+Descending [GOOD] Test command err: Trying to start YDB, gRPC: 16562, MsgBus: 23836 2025-09-25T16:18:11.126015Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7554061886982066348:2253];send_to=[0:7307199536658146131:7762515]; 2025-09-25T16:18:11.126037Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-09-25T16:18:11.131162Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:123: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown test_client.cpp: SetPath # /home/runner/.ya/build/build_root/endf/003f6c/r3tmp/tmpKEbA0o/pdisk_1.dat 2025-09-25T16:18:11.152307Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-09-25T16:18:11.152333Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-09-25T16:18:11.153689Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-09-25T16:18:11.166182Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1229: Notification cookie mismatch for subscription [1:7554061886982066131:2081] 1758817091123033 != 1758817091123036 2025-09-25T16:18:11.175531Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 16562, node 1 2025-09-25T16:18:11.212954Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-09-25T16:18:11.212968Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-09-25T16:18:11.212971Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-09-25T16:18:11.213085Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-09-25T16:18:11.312261Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions TClient is connected to server localhost:23836 TClient is connected to server localhost:23836 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-09-25T16:18:11.383412Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... waiting... 2025-09-25T16:18:11.426860Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:18:11.465920Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) waiting... 2025-09-25T16:18:11.524869Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) waiting... 2025-09-25T16:18:11.546527Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) waiting... 2025-09-25T16:18:11.926331Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7554061886982067772:2391], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:11.926381Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:11.926507Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7554061886982067782:2392], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:11.926528Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:12.010163Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:18:12.024339Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:18:12.037000Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:18:12.048102Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:18:12.061933Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:18:12.081072Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:18:12.096308Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:18:12.117575Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:18:12.125980Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-09-25T16:18:12.150548Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7554061891277035958:2475], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:12.150578Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:12.150696Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7554061891277035963:2478], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:12.150707Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7554061891277035964:2479], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:12.150777Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issue ... Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-09-25T16:18:13.519403Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-09-25T16:18:13.700451Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) waiting... 2025-09-25T16:18:13.710080Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) waiting... 2025-09-25T16:18:13.731135Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) waiting... 2025-09-25T16:18:13.738265Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions waiting... 2025-09-25T16:18:13.742933Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:18:13.784704Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7554061897894808026:2391], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:13.784743Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:13.784887Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7554061897894808036:2392], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:13.784900Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:13.800888Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:18:13.810873Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:18:13.820087Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:18:13.836073Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:18:13.854561Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:18:13.871761Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:18:13.894632Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:18:13.924907Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:18:13.973581Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7554061897894808901:2474], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:13.973619Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:13.973795Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7554061897894808906:2477], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:13.973818Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7554061897894808907:2478], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:13.973829Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:13.974745Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-09-25T16:18:13.984623Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 281474976710670, at schemeshard: 72057594046644480 2025-09-25T16:18:13.984685Z node 2 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7554061897894808910:2479], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2025-09-25T16:18:14.056924Z node 2 :TX_PROXY ERROR: schemereq.cpp:590: Actor# [2:7554061902189776258:3557] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 17], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-09-25T16:18:14.355561Z node 2 :KQP_EXECUTER ERROR: kqp_planner.cpp:123: TxId: 281474976710674. Ctx: { TraceId: 01k60tp2ny0d94ay2m0sr5sem5, Database: , SessionId: ydb://session/3?node_id=2&id=YWJlOWE0MmEtYTU2OGFkYTEtOGZjZjI1YTItMjRkMmMxNTA=, PoolId: default, DatabaseId: /Root}. Database not set, use /Root captured evread ----------------------------------------------------------- starting split ----------------------------------------------------------- scheme op Status: 53 TxId: 281474976710675 SchemeShardStatus: 1 SchemeShardTabletId: 72057594046644480 captured evreadresult ----------------------------------------------------------- resume evread ----------------------------------------------------------- 2025-09-25T16:18:14.440738Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-09-25T16:18:14.752602Z node 2 :KQP_RESOURCE_MANAGER WARN: kqp_snapshot_manager.cpp:238: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1758817094398, txId: 281474976710673] shutting down ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/scan/unittest >> KqpScan::RestrictSqlV0 [GOOD] Test command err: Trying to start YDB, gRPC: 12355, MsgBus: 3857 2025-09-25T16:18:11.395221Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7554061886519675396:2154];send_to=[0:7307199536658146131:7762515]; 2025-09-25T16:18:11.395247Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/endf/003f5d/r3tmp/tmpjOSfrO/pdisk_1.dat 2025-09-25T16:18:11.440697Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-09-25T16:18:11.444023Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1229: Notification cookie mismatch for subscription [1:7554061886519675269:2081] 1758817091394438 != 1758817091394441 2025-09-25T16:18:11.446543Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 12355, node 1 2025-09-25T16:18:11.468758Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-09-25T16:18:11.468773Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-09-25T16:18:11.468775Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-09-25T16:18:11.468838Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-09-25T16:18:11.498780Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-09-25T16:18:11.498803Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-09-25T16:18:11.499920Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:3857 TClient is connected to server localhost:3857 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-09-25T16:18:11.557988Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-09-25T16:18:11.611806Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-09-25T16:18:11.627099Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) waiting... waiting... 2025-09-25T16:18:11.658025Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:18:11.720513Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) waiting... 2025-09-25T16:18:11.736629Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) waiting... 2025-09-25T16:18:11.873928Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7554061886519676914:2391], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:11.873952Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:11.874017Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7554061886519676924:2392], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:11.874022Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:11.942448Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:18:11.949994Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:18:11.960624Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:18:11.971945Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:18:11.985713Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:18:11.999320Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:18:12.013123Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:18:12.031835Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:18:12.058142Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7554061890814645085:2474], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:12.058176Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7554061890814645090:2477], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:12.058188Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:12.058250Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7554061890814645093:2479], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:12.058264Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:12.058958Z node 1 :FLAT_TX_SCHEMESHARD WA ... ptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions TClient is connected to server localhost:11781 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. waiting... 2025-09-25T16:18:14.059301Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-09-25T16:18:14.072112Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:18:14.085775Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) waiting... 2025-09-25T16:18:14.111997Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) waiting... 2025-09-25T16:18:14.126779Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) waiting... 2025-09-25T16:18:14.413297Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7554061901341185983:2391], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:14.413360Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:14.417263Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:18:14.428288Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:18:14.442784Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7554061901341186168:2411], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:14.442811Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:14.442945Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7554061901341186172:2412], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:14.442958Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:14.446446Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:18:14.455969Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:18:14.476444Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:18:14.488029Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:18:14.498262Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:18:14.515056Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:18:14.540084Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7554061901341186856:2475], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:14.540109Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:14.540201Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7554061901341186861:2478], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:14.540216Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7554061901341186862:2479], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:14.540221Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:14.541135Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-09-25T16:18:14.546471Z node 3 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [3:7554061901341186865:2480], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2025-09-25T16:18:14.631454Z node 3 :TX_PROXY ERROR: schemereq.cpp:590: Actor# [3:7554061901341186917:3552] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 17], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 }
:1:0: Error: V0 syntax is disabled 2025-09-25T16:18:14.894104Z node 3 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:568: Compilation failed, self: [3:7554061901341187220:2524], status: GENERIC_ERROR, issues:
:1:0: Error: V0 syntax is disabled 2025-09-25T16:18:14.894220Z node 3 :KQP_SESSION WARN: kqp_session_actor.cpp:2395: SessionId: ydb://session/3?node_id=3&id=NjU3ZmQxZTEtNWE2NWUzNmYtOTE5NWRlOGUtYjI3ZGUxYTA=, ActorId: [3:7554061901341187213:2520], ActorState: ExecuteState, TraceId: 01k60tp379985jbzqeaexd2kng, ReplyQueryCompileError, status GENERIC_ERROR remove tx with tx_id: >> KqpSplit::AfterResultMultiRangeSegmentPartition+Ascending [GOOD] >> KqpSplit::AfterResultMultiRangeSegmentPartition+Descending ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/scan/unittest >> KqpScan::UnionSameTable [GOOD] Test command err: Trying to start YDB, gRPC: 27460, MsgBus: 21280 2025-09-25T16:18:12.241103Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7554061891782461704:2072];send_to=[0:7307199536658146131:7762515]; 2025-09-25T16:18:12.241122Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/endf/003f48/r3tmp/tmpYVgVY0/pdisk_1.dat 2025-09-25T16:18:12.345499Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-09-25T16:18:12.345535Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-09-25T16:18:12.346636Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-09-25T16:18:12.351549Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 27460, node 1 2025-09-25T16:18:12.362226Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-09-25T16:18:12.384153Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-09-25T16:18:12.384173Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-09-25T16:18:12.384175Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-09-25T16:18:12.384223Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:21280 TClient is connected to server localhost:21280 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-09-25T16:18:12.536294Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2025-09-25T16:18:12.538231Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions waiting... 2025-09-25T16:18:12.682371Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) waiting... 2025-09-25T16:18:12.725421Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) waiting... 2025-09-25T16:18:12.761857Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) waiting... 2025-09-25T16:18:12.782711Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) waiting... 2025-09-25T16:18:12.824789Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7554061891782463308:2391], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:12.824817Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:12.824968Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7554061891782463318:2392], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:12.824983Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:12.931802Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:18:12.941375Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:18:12.958177Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:18:12.965476Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:18:12.979110Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:18:12.992917Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:18:13.007032Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:18:13.021394Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:18:13.041489Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7554061896077431475:2474], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:13.041517Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:13.041566Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7554061896077431480:2477], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:13.041573Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7554061896077431481:2478], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:13.041579Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:13.042650Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715670:3, at schemeshard: 72057594046 ... 228: got bad distributable configuration TClient is connected to server localhost:24982 TClient is connected to server localhost:24982 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-09-25T16:18:14.037373Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-09-25T16:18:14.041378Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-09-25T16:18:14.041405Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-09-25T16:18:14.042235Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-09-25T16:18:14.044847Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-09-25T16:18:14.046181Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) waiting... 2025-09-25T16:18:14.061639Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) waiting... 2025-09-25T16:18:14.085976Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) waiting... 2025-09-25T16:18:14.098793Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) waiting... 2025-09-25T16:18:14.131066Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-09-25T16:18:14.347431Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7554061899213193265:2391], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:14.347456Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:14.347514Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7554061899213193342:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:14.347534Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:14.351255Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:18:14.360492Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:18:14.376063Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:18:14.385653Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:18:14.400316Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:18:14.414670Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:18:14.430334Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:18:14.446731Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:18:14.469117Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7554061899213194137:2474], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:14.469154Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:14.469330Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7554061899213194142:2477], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:14.469340Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7554061899213194143:2478], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:14.469346Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:14.470355Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-09-25T16:18:14.480975Z node 2 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7554061899213194146:2479], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2025-09-25T16:18:14.560852Z node 2 :TX_PROXY ERROR: schemereq.cpp:590: Actor# [2:7554061899213194198:3553] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 17], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-09-25T16:18:14.862979Z node 2 :KQP_RESOURCE_MANAGER WARN: kqp_snapshot_manager.cpp:238: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1758817094895, txId: 281474976710673] shutting down 2025-09-25T16:18:14.942597Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; >> KqpScan::DqSource [GOOD] >> KqpSplit::StreamLookupJoinDeliveryProblemAfterFirstResult [GOOD] >> KqpSplit::StreamLookupJoinRetryAttemptForFinishedRead >> KqpSplit::BorderKeys+Ascending [GOOD] >> THiveImplTest::BootQueueSpeed [GOOD] >> THiveImplTest::BalancerSpeedAndDistribution >> THiveTest::TestCheckSubHiveMigration [GOOD] >> THiveTest::TestCheckSubHiveMigrationManyTablets >> THiveTest::TestFollowerPromotionFollowerDies [GOOD] >> THiveTest::TestHiveBalancer >> KqpScan::RightOnlyJoinSimple [GOOD] >> KqpScan::RightSemiJoinSimple >> KqpScan::Join2 [GOOD] >> KqpScan::Order [GOOD] >> KqpScan::PrunePartitionsByExpr ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/scan/unittest >> KqpSplit::BorderKeys+Ascending [GOOD] Test command err: Trying to start YDB, gRPC: 2045, MsgBus: 3518 2025-09-25T16:18:11.602910Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7554061885778025570:2063];send_to=[0:7307199536658146131:7762515]; 2025-09-25T16:18:11.602931Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-09-25T16:18:11.614492Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:123: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown test_client.cpp: SetPath # /home/runner/.ya/build/build_root/endf/003f69/r3tmp/tmphlfYZP/pdisk_1.dat 2025-09-25T16:18:11.686116Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-09-25T16:18:11.686252Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-09-25T16:18:11.694473Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-09-25T16:18:11.712338Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded 2025-09-25T16:18:11.712877Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1229: Notification cookie mismatch for subscription [1:7554061885778025547:2081] 1758817091602600 != 1758817091602603 TServer::EnableGrpc on GrpcPort 2045, node 1 2025-09-25T16:18:11.765060Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-09-25T16:18:11.765074Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-09-25T16:18:11.765077Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-09-25T16:18:11.765118Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-09-25T16:18:11.785873Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions TClient is connected to server localhost:3518 TClient is connected to server localhost:3518 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-09-25T16:18:11.884126Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-09-25T16:18:11.954462Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) waiting... 2025-09-25T16:18:11.976007Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) waiting... 2025-09-25T16:18:11.998692Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) waiting... 2025-09-25T16:18:12.010958Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) waiting... 2025-09-25T16:18:12.153214Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7554061890072994485:2391], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:12.153252Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:12.153399Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7554061890072994495:2392], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:12.153416Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:12.215621Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:18:12.224724Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:18:12.237449Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:18:12.259177Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:18:12.270474Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:18:12.278445Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:18:12.293454Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:18:12.307234Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:18:12.324583Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7554061890072995362:2477], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:12.324549Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7554061890072995357:2474], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:12.324603Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:12.324654Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7554061890072995365:2479], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:12.324661Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:12.325553Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operat ... ubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-09-25T16:18:14.147254Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-09-25T16:18:14.165402Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) waiting... 2025-09-25T16:18:14.175810Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-09-25T16:18:14.175847Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-09-25T16:18:14.176965Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-09-25T16:18:14.223301Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) waiting... 2025-09-25T16:18:14.246568Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-09-25T16:18:14.248457Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) waiting... 2025-09-25T16:18:14.265918Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) waiting... 2025-09-25T16:18:14.555329Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7554061900827176127:2391], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:14.560962Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:14.566854Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:18:14.584152Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:18:14.605985Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:18:14.614416Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7554061900827176369:2416], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:14.614518Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:14.614840Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7554061900827176373:2418], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:14.615011Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:14.625385Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:18:14.640171Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:18:14.655977Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:18:14.667307Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:18:14.685265Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:18:14.721549Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7554061900827176999:2475], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:14.721576Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:14.721701Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7554061900827177004:2478], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:14.721821Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7554061900827177005:2479], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:14.721840Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:14.722728Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-09-25T16:18:14.727956Z node 2 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7554061900827177008:2480], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2025-09-25T16:18:14.781283Z node 2 :TX_PROXY ERROR: schemereq.cpp:590: Actor# [2:7554061900827177060:3554] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 17], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-09-25T16:18:15.062012Z node 2 :KQP_EXECUTER ERROR: kqp_planner.cpp:123: TxId: 281474976710674. Ctx: { TraceId: 01k60tp3c3dzbj7b6rh6cbbwf9, Database: , SessionId: ydb://session/3?node_id=2&id=YmU4OGQ1YjQtNGFmZTgyNS04MjM0MTA1YS0zYzU1OWFmNA==, PoolId: default, DatabaseId: /Root}. Database not set, use /Root captured evread ----------------------------------------------------------- starting split ----------------------------------------------------------- scheme op Status: 53 TxId: 281474976710675 SchemeShardStatus: 1 SchemeShardTabletId: 72057594046644480 captured evreadresult ----------------------------------------------------------- scheme op Status: 53 TxId: 281474976710676 SchemeShardStatus: 1 SchemeShardTabletId: 72057594046644480 2025-09-25T16:18:15.080114Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; resume evread ----------------------------------------------------------- 2025-09-25T16:18:15.561050Z node 2 :KQP_RESOURCE_MANAGER WARN: kqp_snapshot_manager.cpp:238: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1758817095105, txId: 281474976710673] shutting down ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/scan/unittest >> KqpScan::DqSource [GOOD] Test command err: Trying to start YDB, gRPC: 29743, MsgBus: 17574 2025-09-25T16:18:10.870539Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7554061884271965852:2078];send_to=[0:7307199536658146131:7762515]; 2025-09-25T16:18:10.872140Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/endf/003f6d/r3tmp/tmpOOkNNf/pdisk_1.dat 2025-09-25T16:18:10.913803Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-09-25T16:18:10.928953Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 29743, node 1 2025-09-25T16:18:10.940074Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-09-25T16:18:10.940089Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-09-25T16:18:10.940091Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-09-25T16:18:10.940132Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:17574 2025-09-25T16:18:10.974775Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-09-25T16:18:10.974802Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-09-25T16:18:10.975732Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:17574 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-09-25T16:18:10.993444Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-09-25T16:18:11.009524Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) waiting... 2025-09-25T16:18:11.033709Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) waiting... 2025-09-25T16:18:11.057039Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) waiting... 2025-09-25T16:18:11.075324Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-09-25T16:18:11.077557Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) waiting... 2025-09-25T16:18:11.606085Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7554061888566934753:2391], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:11.606119Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:11.606240Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7554061888566934763:2392], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:11.606250Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:11.709328Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:18:11.720775Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:18:11.732220Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:18:11.748017Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:18:11.761513Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:18:11.774673Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:18:11.789386Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:18:11.805058Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:18:11.826512Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7554061888566935623:2474], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:11.826570Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:11.826698Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7554061888566935628:2477], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:11.826717Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7554061888566935629:2478], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:11.826733Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:11.828121Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715670:3, at schemeshard: 72057594046 ... assifier.cpp:228: got bad distributable configuration 2025-09-25T16:18:14.460977Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions TClient is connected to server localhost:22592 TClient is connected to server localhost:22592 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-09-25T16:18:14.532923Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-09-25T16:18:14.534518Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-09-25T16:18:14.544908Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) waiting... 2025-09-25T16:18:14.562369Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) waiting... 2025-09-25T16:18:14.583664Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) waiting... waiting... 2025-09-25T16:18:14.602430Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:18:14.846968Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7554061901110087718:2391], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:14.846992Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:14.847142Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7554061901110087728:2392], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:14.847151Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:14.861684Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:18:14.876706Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:18:14.937464Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:18:14.948102Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:18:14.961418Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:18:14.973848Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:18:14.989719Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:18:15.003037Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:18:15.023824Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7554061905405055892:2474], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:15.023857Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:15.023869Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7554061905405055897:2477], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:15.023917Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7554061905405055899:2478], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:15.023937Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:15.025044Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-09-25T16:18:15.028699Z node 3 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [3:7554061905405055900:2479], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2025-09-25T16:18:15.121681Z node 3 :TX_PROXY ERROR: schemereq.cpp:590: Actor# [3:7554061905405055953:3557] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 17], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-09-25T16:18:15.404723Z node 3 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-09-25T16:18:15.410549Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) waiting... 2025-09-25T16:18:15.535951Z node 3 :KQP_RESOURCE_MANAGER WARN: kqp_snapshot_manager.cpp:238: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1758817095581, txId: 281474976710675] shutting down >> KqpSplit::StreamLookupJoinSplitBeforeReading [GOOD] >> KqpSplit::StreamLookupJoinSplitAfterFirstResult >> KqpScan::UnionThree [GOOD] >> KqpScan::CrossJoin [GOOD] >> KqpScan::CrossJoinOneColumn >> KqpSplit::AfterResultMultiRange+Descending [GOOD] >> KqpSplit::AfterResultMultiRange+Unspecified >> TSequenceReboots::CopyTableWithSequence [GOOD] >> THiveTest::TestHiveBalancerWithPreferredDC3 [GOOD] >> THiveTest::TestHiveBalancerWithSystemTablets >> THiveTest::TestLockTabletExecutionTimeout [GOOD] >> THiveTest::TestLockTabletExecutionReconnect ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/scan/unittest >> KqpScan::Join2 [GOOD] Test command err: Trying to start YDB, gRPC: 11510, MsgBus: 31550 2025-09-25T16:18:11.509133Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7554061887184589485:2142];send_to=[0:7307199536658146131:7762515]; 2025-09-25T16:18:11.509221Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/endf/003f5e/r3tmp/tmpvuft0T/pdisk_1.dat 2025-09-25T16:18:11.584378Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-09-25T16:18:11.596947Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded 2025-09-25T16:18:11.597163Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1229: Notification cookie mismatch for subscription [1:7554061887184589372:2081] 1758817091506585 != 1758817091506588 TServer::EnableGrpc on GrpcPort 11510, node 1 2025-09-25T16:18:11.617194Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-09-25T16:18:11.617219Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-09-25T16:18:11.618949Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-09-25T16:18:11.630485Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-09-25T16:18:11.630504Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-09-25T16:18:11.630506Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-09-25T16:18:11.630553Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:31550 TClient is connected to server localhost:31550 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-09-25T16:18:11.722608Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-09-25T16:18:11.729810Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-09-25T16:18:11.731382Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) waiting... waiting... 2025-09-25T16:18:11.758753Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) waiting... 2025-09-25T16:18:11.787328Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:18:11.826525Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-09-25T16:18:11.845958Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) waiting... 2025-09-25T16:18:12.052489Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7554061891479558326:2391], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:12.052520Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:12.052681Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7554061891479558336:2392], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:12.052713Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:12.145417Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:18:12.155326Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:18:12.166722Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:18:12.180758Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:18:12.194318Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:18:12.208921Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:18:12.222693Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:18:12.239590Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:18:12.257899Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7554061891479559198:2474], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:12.257925Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:12.258013Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7554061891479559203:2477], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:12.258024Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7554061891479559204:2478], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:12.258028Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, ... tep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-09-25T16:18:14.900632Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-09-25T16:18:14.906021Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-09-25T16:18:14.914127Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions waiting... 2025-09-25T16:18:14.917556Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:18:14.918106Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-09-25T16:18:14.918125Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-09-25T16:18:14.919390Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected waiting... 2025-09-25T16:18:14.950638Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:18:14.978754Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) waiting... 2025-09-25T16:18:14.992361Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) waiting... 2025-09-25T16:18:15.209783Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7554061906682015793:2391], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:15.209818Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:15.209878Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7554061906682015803:2392], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:15.209895Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:15.219097Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:18:15.232598Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:18:15.242299Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:18:15.253585Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:18:15.268007Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:18:15.282141Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:18:15.299209Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:18:15.316124Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:18:15.341938Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7554061906682016665:2474], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:15.341975Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:15.342026Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7554061906682016670:2477], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:15.342038Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7554061906682016671:2478], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:15.342045Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:15.342992Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-09-25T16:18:15.346922Z node 3 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [3:7554061906682016674:2479], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2025-09-25T16:18:15.420399Z node 3 :TX_PROXY ERROR: schemereq.cpp:590: Actor# [3:7554061906682016726:3549] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 17], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } waiting... 2025-09-25T16:18:15.733495Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:18:15.814508Z node 3 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-09-25T16:18:15.859048Z node 3 :KQP_RESOURCE_MANAGER WARN: kqp_snapshot_manager.cpp:238: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1758817095896, txId: 281474976710675] shutting down >> KqpScan::AggregateNoColumnNoRemaps [GOOD] >> KqpScan::AggregateWithFunction >> KqpSplit::IntersectionLosesRange+Ascending [GOOD] >> THiveTest::TestDrain [GOOD] >> THiveTest::TestDrainWithMaxTabletsScheduled >> KqpScan::YqlTableSample >> KqpFlowControl::FlowControl_Unlimited ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/scan/unittest >> KqpScan::UnionThree [GOOD] Test command err: Trying to start YDB, gRPC: 17094, MsgBus: 31272 2025-09-25T16:18:12.930349Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7554061893290710204:2146];send_to=[0:7307199536658146131:7762515]; 2025-09-25T16:18:12.930377Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-09-25T16:18:12.936184Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:123: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown test_client.cpp: SetPath # /home/runner/.ya/build/build_root/endf/003f4d/r3tmp/tmpr4Vq5Z/pdisk_1.dat 2025-09-25T16:18:12.985398Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-09-25T16:18:12.985432Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-09-25T16:18:12.986163Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-09-25T16:18:13.012124Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded 2025-09-25T16:18:13.012777Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1229: Notification cookie mismatch for subscription [1:7554061893290710096:2081] 1758817092929532 != 1758817092929535 TServer::EnableGrpc on GrpcPort 17094, node 1 2025-09-25T16:18:13.048558Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-09-25T16:18:13.048572Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-09-25T16:18:13.048574Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-09-25T16:18:13.048616Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:31272 2025-09-25T16:18:13.138938Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions TClient is connected to server localhost:31272 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-09-25T16:18:13.226765Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-09-25T16:18:13.255544Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) waiting... 2025-09-25T16:18:13.319003Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) waiting... 2025-09-25T16:18:13.399129Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) waiting... waiting... 2025-09-25T16:18:13.432749Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:18:13.982115Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-09-25T16:18:14.040963Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7554061897585679058:2392], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:14.041107Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:14.041322Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7554061901880646371:2393], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:14.041340Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:14.061615Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:18:14.073899Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:18:14.085110Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:18:14.099001Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:18:14.113463Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:18:14.128965Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:18:14.142022Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:18:14.154793Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:18:14.171147Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7554061901880647235:2475], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:14.171172Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7554061901880647240:2478], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:14.171176Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:14.171215Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7554061901880647243:2480], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:14.171227Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issue ... o.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 3457, node 2 2025-09-25T16:18:15.156370Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-09-25T16:18:15.156381Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-09-25T16:18:15.156382Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-09-25T16:18:15.156420Z node 2 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:64174 TClient is connected to server localhost:64174 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. waiting... 2025-09-25T16:18:15.206791Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2025-09-25T16:18:15.216172Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) waiting... 2025-09-25T16:18:15.233549Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) waiting... 2025-09-25T16:18:15.261504Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) waiting... 2025-09-25T16:18:15.276271Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) waiting... 2025-09-25T16:18:15.309152Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-09-25T16:18:15.583255Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7554061903677164306:2391], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:15.583282Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:15.583400Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7554061903677164316:2392], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:15.583407Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:15.597853Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:18:15.607252Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:18:15.617675Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:18:15.631907Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:18:15.645682Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:18:15.659752Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:18:15.675009Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:18:15.688956Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:18:15.709723Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7554061903677165179:2474], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:15.709767Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7554061903677165184:2477], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:15.709769Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:15.709821Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7554061903677165186:2478], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:15.709828Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:15.710745Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-09-25T16:18:15.714765Z node 2 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7554061903677165187:2479], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2025-09-25T16:18:15.806214Z node 2 :TX_PROXY ERROR: schemereq.cpp:590: Actor# [2:7554061903677165240:3557] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 17], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-09-25T16:18:16.123642Z node 2 :KQP_RESOURCE_MANAGER WARN: kqp_snapshot_manager.cpp:238: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1758817096162, txId: 281474976710673] shutting down 2025-09-25T16:18:16.135866Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; >> PgCatalog::CheckSetConfig [GOOD] >> PgCatalog::PgDatabase+useSink >> KqpSplit::BorderKeys+Descending |81.2%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/tx/datashard/ut_minstep/ydb-core-tx-datashard-ut_minstep >> KqpScan::DropRedundantSortByPk |81.3%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/datashard/ut_minstep/ydb-core-tx-datashard-ut_minstep |81.3%| [TM] {RESULT} ydb/core/tablet_flat/ut_large/unittest >> KqpSplit::StreamLookupJoinRetryAttemptForFinishedRead [GOOD] >> KqpScan::FullFrameWindow [GOOD] >> KqpScan::EmptySet_2 >> test.py::test[join-lookupjoin_semi_2o-off-ForceBlocks] [GOOD] >> test.py::test[produce-reduce_multi_in_ref--Results] [GOOD] >> test.py::test[produce-reduce_with_python--Results] >> KqpScan::PrunePartitionsByExpr [GOOD] |81.3%| [TA] {RESULT} $(B)/ydb/core/tx/datashard/ut_kqp_scan/test-results/unittest/{meta.json ... results_accumulator.log} |81.3%| [LD] {RESULT} $(B)/ydb/core/tx/datashard/ut_minstep/ydb-core-tx-datashard-ut_minstep ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/scan/unittest >> KqpSplit::IntersectionLosesRange+Ascending [GOOD] Test command err: Trying to start YDB, gRPC: 32549, MsgBus: 4610 2025-09-25T16:18:13.040880Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7554061894184903122:2257];send_to=[0:7307199536658146131:7762515]; 2025-09-25T16:18:13.040925Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/endf/003f2e/r3tmp/tmp29B84h/pdisk_1.dat 2025-09-25T16:18:13.129192Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-09-25T16:18:13.131363Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 32549, node 1 2025-09-25T16:18:13.143833Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-09-25T16:18:13.143864Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-09-25T16:18:13.145253Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-09-25T16:18:13.147183Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-09-25T16:18:13.147188Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-09-25T16:18:13.147190Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-09-25T16:18:13.147238Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:4610 TClient is connected to server localhost:4610 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-09-25T16:18:13.273039Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-09-25T16:18:13.335758Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) waiting... 2025-09-25T16:18:13.389734Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) waiting... 2025-09-25T16:18:13.470367Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) waiting... 2025-09-25T16:18:13.491057Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) waiting... 2025-09-25T16:18:14.041185Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-09-25T16:18:14.074511Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7554061898479871839:2392], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:14.074535Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:14.074677Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7554061898479871849:2393], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:14.074684Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:14.123570Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:18:14.133955Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:18:14.148366Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:18:14.164439Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:18:14.175201Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:18:14.190366Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:18:14.203851Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:18:14.217792Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:18:14.240302Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7554061898479872712:2475], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:14.240328Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:14.240337Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7554061898479872717:2478], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:14.240365Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7554061898479872719:2479], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:14.240371Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:14.241763Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/y ... KqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions TClient is connected to server localhost:11502 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-09-25T16:18:15.622818Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-09-25T16:18:15.624595Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 waiting... 2025-09-25T16:18:15.632601Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:18:15.644005Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) waiting... 2025-09-25T16:18:15.666789Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) waiting... 2025-09-25T16:18:15.682443Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) waiting... 2025-09-25T16:18:15.992138Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7554061906697104176:2391], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:15.992165Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:15.992277Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7554061906697104186:2392], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:15.992305Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:16.010780Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:18:16.020090Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:18:16.031090Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:18:16.045320Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:18:16.059278Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:18:16.073834Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:18:16.087918Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:18:16.102759Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:18:16.122049Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7554061910992072344:2474], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:16.122078Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:16.122109Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7554061910992072349:2477], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:16.122126Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7554061910992072350:2478], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:16.122139Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:16.123039Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-09-25T16:18:16.128254Z node 2 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7554061910992072353:2479], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2025-09-25T16:18:16.215319Z node 2 :TX_PROXY ERROR: schemereq.cpp:590: Actor# [2:7554061910992072405:3555] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 17], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-09-25T16:18:16.498679Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-09-25T16:18:16.550746Z node 2 :KQP_EXECUTER ERROR: kqp_planner.cpp:123: TxId: 281474976710674. Ctx: { TraceId: 01k60tp4sc72385n2scb3stm7w, Database: , SessionId: ydb://session/3?node_id=2&id=ZDY3NWQxZjAtYjUyNzg0MTItYTdlMTJkMTAtN2UzMDczYjA=, PoolId: default, DatabaseId: /Root}. Database not set, use /Root captured evread ----------------------------------------------------------- starting split ----------------------------------------------------------- scheme op Status: 53 TxId: 281474976710675 SchemeShardStatus: 1 SchemeShardTabletId: 72057594046644480 resume evread ----------------------------------------------------------- 2025-09-25T16:18:16.564283Z node 2 :KQP_RESOURCE_MANAGER WARN: kqp_snapshot_manager.cpp:238: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1758817096596, txId: 281474976710673] shutting down >> test.py::test[join-lookupjoin_semi_2o-off-Results] [SKIPPED] >> KqpScan::RightSemiJoinSimple [GOOD] >> test.py::test[produce-reduce_with_python--Results] [SKIPPED] >> test.py::test[produce-reduce_with_trivial_remaps2--Results] >> KqpSplit::AfterResultMultiRangeSegmentPartition+Descending [GOOD] >> THiveTest::TestHiveFollowersWithChangingDC [GOOD] >> THiveTest::TestHiveNoBalancingWithLowResourceUsage >> PgCatalog::PgDatabase+useSink [GOOD] >> PgCatalog::PgDatabase-useSink >> KqpScan::CountDistinct >> test.py::test[produce-reduce_with_trivial_remaps2--Results] [SKIPPED] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/scan/unittest >> KqpSplit::StreamLookupJoinRetryAttemptForFinishedRead [GOOD] Test command err: Trying to start YDB, gRPC: 19778, MsgBus: 64518 2025-09-25T16:18:14.395084Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7554061898376564502:2066];send_to=[0:7307199536658146131:7762515]; 2025-09-25T16:18:14.395100Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/endf/003f0a/r3tmp/tmp5NH68B/pdisk_1.dat 2025-09-25T16:18:14.451981Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-09-25T16:18:14.462045Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded 2025-09-25T16:18:14.462625Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1229: Notification cookie mismatch for subscription [1:7554061898376564476:2081] 1758817094394849 != 1758817094394852 TServer::EnableGrpc on GrpcPort 19778, node 1 2025-09-25T16:18:14.499633Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-09-25T16:18:14.499659Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-09-25T16:18:14.501109Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-09-25T16:18:14.501336Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-09-25T16:18:14.501338Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-09-25T16:18:14.501339Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-09-25T16:18:14.501381Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:64518 TClient is connected to server localhost:64518 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. waiting... 2025-09-25T16:18:14.662038Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2025-09-25T16:18:14.665025Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-09-25T16:18:14.729810Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-09-25T16:18:14.816560Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) waiting... 2025-09-25T16:18:14.866870Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) waiting... 2025-09-25T16:18:14.894007Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) waiting... 2025-09-25T16:18:14.914084Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) waiting... 2025-09-25T16:18:14.952727Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7554061898376566126:2391], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:14.952758Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:14.953112Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7554061898376566136:2392], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:14.953135Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:15.028117Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:18:15.036059Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:18:15.045978Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:18:15.058486Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:18:15.077487Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:18:15.090098Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:18:15.107657Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:18:15.121002Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:18:15.145005Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7554061902671534295:2474], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:15.145041Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:15.145291Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7554061902671534300:2477], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:15.145303Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7554061902671534301:2478], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:15.145309Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, ... Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-09-25T16:18:16.037818Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-09-25T16:18:16.052196Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) waiting... waiting... 2025-09-25T16:18:16.067808Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:18:16.091586Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) waiting... 2025-09-25T16:18:16.104747Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) waiting... 2025-09-25T16:18:16.200309Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-09-25T16:18:16.403236Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7554061909888253211:2391], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:16.403309Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:16.408008Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:18:16.408109Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7554061909888253294:2401], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:16.408135Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:16.417520Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:18:16.430242Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:18:16.445180Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:18:16.459510Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:18:16.472943Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:18:16.486095Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:18:16.501796Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:18:16.517551Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7554061909888254083:2474], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:16.517588Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:16.517684Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7554061909888254088:2477], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:16.517698Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7554061909888254089:2478], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:16.517776Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:16.518616Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-09-25T16:18:16.527120Z node 2 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7554061909888254092:2479], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2025-09-25T16:18:16.597432Z node 2 :TX_PROXY ERROR: schemereq.cpp:590: Actor# [2:7554061909888254144:3554] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 17], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-09-25T16:18:16.854297Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:18:16.877717Z node 2 :KQP_EXECUTER ERROR: kqp_planner.cpp:123: TxId: 281474976710674. Ctx: { TraceId: 01k60tp553ek3bccqjztdzm8a5, Database: , SessionId: ydb://session/3?node_id=2&id=NDU5MzJjZDEtZjVhYjg4NS1lNTg5MTEyZC1lMTk4MjEwNQ==, PoolId: default, DatabaseId: /Root}. Database not set, use /Root 2025-09-25T16:18:16.922924Z node 2 :KQP_EXECUTER ERROR: kqp_planner.cpp:123: TxId: 281474976710675. Ctx: { TraceId: 01k60tp55k597pm0hm6snvvpy9, Database: , SessionId: ydb://session/3?node_id=2&id=NTMwY2Y4ZTEtNTFhNDBhMGUtYWZhZjllOTUtMjg5ZDUxYmE=, PoolId: default, DatabaseId: /Root}. Database not set, use /Root captured evread ----------------------------------------------------------- 2025-09-25T16:18:16.950743Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; >> test.py::test[blocks-date_less--ForceBlocks] [GOOD] |81.3%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_serverless_reboots/unittest >> KqpScan::AggregateWithFunction [GOOD] >> KqpSplit::StreamLookupJoinSplitAfterFirstResult [GOOD] >> test.py::test[blocks-date_less--Results] >> KqpScan::YqlTableSample [GOOD] >> KqpSplit::AfterResolve+Ascending ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/scan/unittest >> KqpSplit::AfterResultMultiRangeSegmentPartition+Descending [GOOD] Test command err: Trying to start YDB, gRPC: 4210, MsgBus: 61920 2025-09-25T16:18:13.945951Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7554061897672511818:2139];send_to=[0:7307199536658146131:7762515]; 2025-09-25T16:18:13.946003Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/endf/003f17/r3tmp/tmpAA5OrV/pdisk_1.dat 2025-09-25T16:18:13.994612Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-09-25T16:18:14.009590Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded 2025-09-25T16:18:14.013216Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1229: Notification cookie mismatch for subscription [1:7554061897672511717:2081] 1758817093945325 != 1758817093945328 TServer::EnableGrpc on GrpcPort 4210, node 1 2025-09-25T16:18:14.021558Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-09-25T16:18:14.021568Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-09-25T16:18:14.021569Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-09-25T16:18:14.021602Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:61920 2025-09-25T16:18:14.052241Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-09-25T16:18:14.052279Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-09-25T16:18:14.053392Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:61920 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-09-25T16:18:14.083939Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-09-25T16:18:14.086867Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-09-25T16:18:14.093801Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) waiting... 2025-09-25T16:18:14.123618Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) waiting... 2025-09-25T16:18:14.154779Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) waiting... 2025-09-25T16:18:14.166975Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) waiting... 2025-09-25T16:18:14.257147Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-09-25T16:18:14.350189Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7554061901967480656:2391], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:14.350212Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:14.350332Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7554061901967480666:2392], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:14.350343Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:14.403884Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:18:14.412468Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:18:14.430189Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:18:14.450403Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:18:14.467219Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:18:14.482898Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:18:14.504194Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:18:14.518827Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:18:14.559943Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7554061901967481526:2474], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:14.559994Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:14.564393Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7554061901967481531:2477], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:14.564416Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7554061901967481532:2478], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:14.564491Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, Da ... Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-09-25T16:18:15.812950Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-09-25T16:18:15.822540Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) waiting... 2025-09-25T16:18:15.841570Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) waiting... 2025-09-25T16:18:15.869756Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) waiting... 2025-09-25T16:18:15.882792Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) waiting... 2025-09-25T16:18:15.909992Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-09-25T16:18:16.166518Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7554061908625266371:2391], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:16.166545Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:16.166658Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7554061908625266380:2392], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:16.166679Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:16.180943Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:18:16.202415Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:18:16.217843Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:18:16.233668Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:18:16.250501Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:18:16.270732Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:18:16.285163Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:18:16.300794Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:18:16.327032Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7554061908625267248:2474], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:16.327064Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:16.327267Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7554061908625267253:2477], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:16.327278Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7554061908625267254:2478], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:16.327285Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:16.328470Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-09-25T16:18:16.335165Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 281474976710670, at schemeshard: 72057594046644480 2025-09-25T16:18:16.335281Z node 2 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7554061908625267257:2479], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2025-09-25T16:18:16.402111Z node 2 :TX_PROXY ERROR: schemereq.cpp:590: Actor# [2:7554061908625267309:3555] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 17], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-09-25T16:18:16.732076Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; captured evread ----------------------------------------------------------- starting split ----------------------------------------------------------- scheme op Status: 53 TxId: 281474976710675 SchemeShardStatus: 1 SchemeShardTabletId: 72057594046644480 2025-09-25T16:18:16.736394Z node 2 :KQP_EXECUTER ERROR: kqp_planner.cpp:123: TxId: 281474976710674. Ctx: { TraceId: 01k60tp4y3cbgjh625gc9c8nv9, Database: , SessionId: ydb://session/3?node_id=2&id=ODI2ZDIwMmUtODVjMzRiYTYtZjY0MDdhMDktNGFhNmY0OWU=, PoolId: default, DatabaseId: /Root}. Database not set, use /Root captured evreadresult ----------------------------------------------------------- resume evread ----------------------------------------------------------- 2025-09-25T16:18:17.246081Z node 2 :KQP_RESOURCE_MANAGER WARN: kqp_snapshot_manager.cpp:238: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1758817096778, txId: 281474976710673] shutting down ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/scan/unittest >> KqpScan::PrunePartitionsByExpr [GOOD] Test command err: Trying to start YDB, gRPC: 29991, MsgBus: 19312 2025-09-25T16:18:13.230646Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7554061894625381269:2084];send_to=[0:7307199536658146131:7762515]; 2025-09-25T16:18:13.231238Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/endf/003f2f/r3tmp/tmpV238ke/pdisk_1.dat 2025-09-25T16:18:13.344892Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-09-25T16:18:13.345705Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-09-25T16:18:13.345761Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-09-25T16:18:13.347122Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-09-25T16:18:13.399754Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 29991, node 1 2025-09-25T16:18:13.426567Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-09-25T16:18:13.426578Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-09-25T16:18:13.426580Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-09-25T16:18:13.426624Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:19312 2025-09-25T16:18:13.537661Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions TClient is connected to server localhost:19312 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-09-25T16:18:13.577744Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-09-25T16:18:13.615134Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) waiting... 2025-09-25T16:18:13.632668Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) waiting... 2025-09-25T16:18:13.652396Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) waiting... 2025-09-25T16:18:13.663083Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) waiting... 2025-09-25T16:18:14.151908Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7554061898920350156:2391], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:14.151978Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:14.152122Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7554061898920350166:2392], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:14.152144Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:14.216409Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:18:14.224903Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:18:14.234744Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-09-25T16:18:14.238479Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:18:14.254668Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:18:14.266425Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:18:14.280846Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:18:14.294933Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:18:14.309489Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:18:14.325215Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7554061898920351037:2475], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:14.325244Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:14.325309Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7554061898920351042:2478], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:14.325321Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7554061898920351043:2479], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:14.325386Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:14.326109Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:18 ... on [3:7554061909002167777:2081] 1758817096268367 != 1758817096268370 TServer::EnableGrpc on GrpcPort 9921, node 3 2025-09-25T16:18:16.300698Z node 3 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-09-25T16:18:16.300712Z node 3 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-09-25T16:18:16.300715Z node 3 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-09-25T16:18:16.300764Z node 3 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:28907 TClient is connected to server localhost:28907 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-09-25T16:18:16.357385Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-09-25T16:18:16.359322Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-09-25T16:18:16.367137Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) waiting... 2025-09-25T16:18:16.383353Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) waiting... 2025-09-25T16:18:16.409176Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) waiting... 2025-09-25T16:18:16.422243Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) waiting... 2025-09-25T16:18:16.456080Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-09-25T16:18:16.717764Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7554061909002169423:2391], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:16.717820Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:16.717949Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7554061909002169433:2392], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:16.717974Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:16.729198Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:18:16.743927Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:18:16.754764Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:18:16.765936Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:18:16.780150Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:18:16.795549Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:18:16.807913Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:18:16.821923Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:18:16.838718Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7554061909002170296:2474], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:16.838745Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:16.838774Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7554061909002170301:2477], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:16.838783Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7554061909002170302:2478], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:16.838791Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:16.839635Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-09-25T16:18:16.854902Z node 3 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [3:7554061909002170305:2479], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715670 completed, doublechecking } 2025-09-25T16:18:16.941688Z node 3 :TX_PROXY ERROR: schemereq.cpp:590: Actor# [3:7554061909002170357:3555] txid# 281474976715671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 17], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-09-25T16:18:17.194634Z node 3 :KQP_RESOURCE_MANAGER WARN: kqp_snapshot_manager.cpp:238: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1758817097233, txId: 281474976715673] shutting down ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/scan/unittest >> KqpScan::RightSemiJoinSimple [GOOD] Test command err: Trying to start YDB, gRPC: 19827, MsgBus: 18578 2025-09-25T16:18:12.505550Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7554061893992973865:2140];send_to=[0:7307199536658146131:7762515]; 2025-09-25T16:18:12.505593Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/endf/003f52/r3tmp/tmp002mej/pdisk_1.dat TServer::EnableGrpc on GrpcPort 19827, node 1 2025-09-25T16:18:12.564680Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded 2025-09-25T16:18:12.564680Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-09-25T16:18:12.565276Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1229: Notification cookie mismatch for subscription [1:7554061893992973763:2081] 1758817092504362 != 1758817092504365 2025-09-25T16:18:12.568847Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-09-25T16:18:12.568854Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-09-25T16:18:12.568860Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-09-25T16:18:12.568887Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:18578 TClient is connected to server localhost:18578 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: 2025-09-25T16:18:12.613059Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-09-25T16:18:12.613087Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-09-25T16:18:12.614148Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-09-25T16:18:12.620431Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-09-25T16:18:12.685254Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) waiting... 2025-09-25T16:18:12.725382Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-09-25T16:18:12.764060Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) waiting... 2025-09-25T16:18:12.802774Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) waiting... 2025-09-25T16:18:12.822176Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) waiting... 2025-09-25T16:18:13.319360Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7554061898287942709:2391], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:13.319437Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:13.319567Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7554061898287942719:2392], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:13.319583Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:13.333221Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:18:13.346241Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:18:13.361636Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:18:13.374293Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:18:13.397876Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:18:13.413110Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:18:13.441341Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:18:13.471107Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:18:13.485127Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7554061898287943582:2474], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:13.485148Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7554061898287943587:2477], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:13.485151Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:13.485175Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7554061898287943589:2478], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:13.485181Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:13.485721Z node 1 :FLAT_TX_SCHEMESHARD ... assifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:11281 TClient is connected to server localhost:11281 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-09-25T16:18:16.264038Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-09-25T16:18:16.266482Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-09-25T16:18:16.269825Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) waiting... 2025-09-25T16:18:16.299460Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) waiting... 2025-09-25T16:18:16.346433Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) waiting... 2025-09-25T16:18:16.363667Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) waiting... 2025-09-25T16:18:16.432326Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-09-25T16:18:16.654646Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7554061909422745327:2391], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:16.654677Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:16.654813Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7554061909422745337:2392], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:16.654831Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:16.665654Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:18:16.674424Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:18:16.691970Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:18:16.702943Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:18:16.720246Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:18:16.735087Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:18:16.744921Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:18:16.760667Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:18:16.776388Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7554061909422746200:2474], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:16.776419Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:16.776451Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7554061909422746205:2477], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:16.776454Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7554061909422746207:2478], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:16.776461Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:16.777494Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-09-25T16:18:16.786116Z node 3 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [3:7554061909422746209:2479], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2025-09-25T16:18:16.876154Z node 3 :TX_PROXY ERROR: schemereq.cpp:590: Actor# [3:7554061909422746261:3551] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 17], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-09-25T16:18:17.152950Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) waiting... 2025-09-25T16:18:17.167144Z node 3 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-09-25T16:18:17.264304Z node 3 :KQP_RESOURCE_MANAGER WARN: kqp_snapshot_manager.cpp:238: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1758817097310, txId: 281474976710675] shutting down |81.3%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_serverless_reboots/unittest >> THiveImplTest::BalancerSpeedAndDistribution [GOOD] >> THiveImplTest::TestShortTabletTypes [GOOD] >> THiveImplTest::TestStDev [GOOD] >> THiveImplTest::BootQueueConfigurePriorities [GOOD] >> THiveTest::TestBlockCreateTablet >> KqpFlowControl::FlowControl_Unlimited [GOOD] >> KqpFlowControl::FlowControl_BigLimit >> THiveTest::TestStorageBalancer [GOOD] >> THiveTest::TestRestartsWithFollower >> Initializer::Simple [GOOD] >> PgCatalog::PgDatabase-useSink [GOOD] >> PgCatalog::PgRoles >> KqpSplit::AfterResultMultiRange+Unspecified [GOOD] >> KqpScan::CrossJoinOneColumn [GOOD] >> KqpScan::CrossJoinCount |81.3%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_serverless_reboots/unittest ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/scan/unittest >> KqpScan::AggregateWithFunction [GOOD] Test command err: Trying to start YDB, gRPC: 14630, MsgBus: 21399 2025-09-25T16:18:12.559672Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7554061893004366046:2161];send_to=[0:7307199536658146131:7762515]; 2025-09-25T16:18:12.560054Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/endf/003f3f/r3tmp/tmpIYo5OG/pdisk_1.dat 2025-09-25T16:18:12.632229Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-09-25T16:18:12.663758Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1229: Notification cookie mismatch for subscription [1:7554061893004365919:2081] 1758817092543802 != 1758817092543805 2025-09-25T16:18:12.669706Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 14630, node 1 2025-09-25T16:18:12.677752Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-09-25T16:18:12.677795Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-09-25T16:18:12.682434Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-09-25T16:18:12.693021Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-09-25T16:18:12.693035Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-09-25T16:18:12.693037Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-09-25T16:18:12.693081Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:21399 TClient is connected to server localhost:21399 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-09-25T16:18:12.763485Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... waiting... 2025-09-25T16:18:12.799574Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:18:12.830320Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) waiting... 2025-09-25T16:18:12.853125Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-09-25T16:18:12.853825Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) waiting... 2025-09-25T16:18:12.865738Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) waiting... 2025-09-25T16:18:13.115895Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7554061897299334870:2391], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:13.116013Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:13.120469Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7554061897299334953:2401], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:13.120581Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:13.121227Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:18:13.137539Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:18:13.148272Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:18:13.169614Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:18:13.189494Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:18:13.209976Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:18:13.221251Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:18:13.232214Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:18:13.252684Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7554061897299335742:2474], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:13.252708Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:13.252789Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7554061897299335747:2477], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:13.252800Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7554061897299335748:2478], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:13.252878Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:13.253815Z node 1 :FLAT_TX_SCHEMESHARD ... use file: (empty maybe) 2025-09-25T16:18:16.769393Z node 3 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-09-25T16:18:16.769396Z node 3 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-09-25T16:18:16.769446Z node 3 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:1948 TClient is connected to server localhost:1948 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. waiting... 2025-09-25T16:18:16.825494Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2025-09-25T16:18:16.843051Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-09-25T16:18:16.843088Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-09-25T16:18:16.844097Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-09-25T16:18:16.865481Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) waiting... 2025-09-25T16:18:16.877969Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) waiting... 2025-09-25T16:18:16.905681Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) waiting... 2025-09-25T16:18:16.920664Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) waiting... 2025-09-25T16:18:16.941535Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-09-25T16:18:17.149567Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7554061913929264581:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:17.149640Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:17.152538Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7554061913929264655:2401], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:17.152566Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:17.153166Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:18:17.167935Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:18:17.179496Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:18:17.195240Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:18:17.208633Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:18:17.220752Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:18:17.236452Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:18:17.250202Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:18:17.269858Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7554061913929265444:2474], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:17.269899Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7554061913929265449:2477], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:17.269900Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:17.270016Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7554061913929265451:2478], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:17.270035Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:17.270847Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-09-25T16:18:17.276115Z node 3 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [3:7554061913929265452:2479], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2025-09-25T16:18:17.347591Z node 3 :TX_PROXY ERROR: schemereq.cpp:590: Actor# [3:7554061913929265505:3552] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 17], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-09-25T16:18:17.714492Z node 3 :KQP_RESOURCE_MANAGER WARN: kqp_snapshot_manager.cpp:238: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1758817097667, txId: 281474976710673] shutting down >> KqpScan::EmptySet_2 [GOOD] >> KqpScan::EmptySet_3 >> THiveTest::TestHiveBalancer [GOOD] >> THiveTest::TestFollowersCrossDC_Easy >> KqpScan::StreamExecuteScanQueryCancelation [GOOD] >> KqpScan::StreamExecuteScanQueryClientTimeoutBruteForce ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/scan/unittest >> KqpSplit::StreamLookupJoinSplitAfterFirstResult [GOOD] Test command err: Trying to start YDB, gRPC: 22691, MsgBus: 21119 2025-09-25T16:18:14.668700Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7554061899313305142:2183];send_to=[0:7307199536658146131:7762515]; 2025-09-25T16:18:14.669157Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/endf/003f03/r3tmp/tmphTV05K/pdisk_1.dat 2025-09-25T16:18:14.747358Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-09-25T16:18:14.771402Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-09-25T16:18:14.771426Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-09-25T16:18:14.772391Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-09-25T16:18:14.788536Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded 2025-09-25T16:18:14.788785Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1229: Notification cookie mismatch for subscription [1:7554061899313304985:2081] 1758817094666865 != 1758817094666868 TServer::EnableGrpc on GrpcPort 22691, node 1 2025-09-25T16:18:14.845843Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-09-25T16:18:14.845860Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-09-25T16:18:14.845862Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-09-25T16:18:14.845902Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:21119 TClient is connected to server localhost:21119 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-09-25T16:18:14.978728Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-09-25T16:18:14.981163Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-09-25T16:18:14.990243Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) waiting... 2025-09-25T16:18:15.013202Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) waiting... 2025-09-25T16:18:15.015117Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-09-25T16:18:15.040178Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) waiting... 2025-09-25T16:18:15.057211Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) waiting... 2025-09-25T16:18:15.293481Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7554061903608273932:2391], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:15.293533Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:15.293742Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7554061903608273941:2392], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:15.293764Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:15.358794Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:18:15.374945Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:18:15.388660Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:18:15.401027Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:18:15.415632Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:18:15.429468Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:18:15.446341Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:18:15.466405Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:18:15.485539Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7554061903608274801:2474], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:15.485569Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:15.485700Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7554061903608274807:2478], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:15.485708Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7554061903608274806:2477], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:15.485713Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, ... on { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. waiting... 2025-09-25T16:18:16.400939Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2025-09-25T16:18:16.411879Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) waiting... 2025-09-25T16:18:16.424417Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) waiting... waiting... 2025-09-25T16:18:16.446252Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:18:16.458590Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) waiting... 2025-09-25T16:18:16.512396Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-09-25T16:18:16.741223Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7554061908101404488:2391], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:16.741256Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:16.743356Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7554061908101404498:2392], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:16.743384Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:16.748008Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:18:16.758508Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:18:16.773325Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:18:16.789172Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:18:16.800736Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:18:16.816959Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:18:16.828755Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:18:16.843108Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:18:16.861620Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7554061908101405361:2474], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:16.861657Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:16.861662Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7554061908101405366:2477], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:16.861686Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7554061908101405368:2478], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:16.861699Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:16.862497Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-09-25T16:18:16.869809Z node 2 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7554061908101405370:2479], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2025-09-25T16:18:16.963854Z node 2 :TX_PROXY ERROR: schemereq.cpp:590: Actor# [2:7554061908101405422:3551] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 17], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-09-25T16:18:17.192362Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:18:17.218018Z node 2 :KQP_EXECUTER ERROR: kqp_planner.cpp:123: TxId: 281474976710674. Ctx: { TraceId: 01k60tp5fjfjhq5yy8aq8ckve3, Database: , SessionId: ydb://session/3?node_id=2&id=NThlMTA0ZGItM2M4OTZmOWUtYWQ4OWZlMTMtMTZjNzUyZDY=, PoolId: default, DatabaseId: /Root}. Database not set, use /Root 2025-09-25T16:18:17.258830Z node 2 :KQP_EXECUTER ERROR: kqp_planner.cpp:123: TxId: 281474976710675. Ctx: { TraceId: 01k60tp5g56asyescjkjtg1sg2, Database: , SessionId: ydb://session/3?node_id=2&id=ODYwMTk2NzMtZjU3M2EwMjYtNmQ2N2U1NDgtNWU5YWRlNWU=, PoolId: default, DatabaseId: /Root}. Database not set, use /Root captured evread ----------------------------------------------------------- starting split ----------------------------------------------------------- scheme op Status: 53 TxId: 281474976710676 SchemeShardStatus: 1 SchemeShardTabletId: 72057594046644480 captured evreadresult ----------------------------------------------------------- resume evread ----------------------------------------------------------- 2025-09-25T16:18:17.324092Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; >> KqpScan::DropRedundantSortByPk [GOOD] >> KqpScan::DqSourceFullScan >> THiveTest::TestBlockCreateTablet [GOOD] >> THiveTest::DrainWithHiveRestart >> KqpSplit::BorderKeys+Descending [GOOD] >> KqpSplit::BorderKeys+Unspecified >> THiveTest::TestLockTabletExecutionReconnect [GOOD] >> THiveTest::TestLockTabletExecutionReconnectExpire >> THiveTest::TestHiveBalancerWithSystemTablets [GOOD] >> THiveTest::TestHiveBalancerWithFollowers >> TSchemeShardServerLessReboots::TestServerlessComputeResourcesModeWithReboots >> KqpScan::CountDistinct [GOOD] >> KqpScan::BoolFlag ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/scan/unittest >> KqpSplit::AfterResultMultiRange+Unspecified [GOOD] Test command err: Trying to start YDB, gRPC: 27659, MsgBus: 28251 2025-09-25T16:18:15.004859Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7554061903569314010:2259];send_to=[0:7307199536658146131:7762515]; 2025-09-25T16:18:15.004892Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/endf/003efa/r3tmp/tmph3Koz7/pdisk_1.dat 2025-09-25T16:18:15.041280Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-09-25T16:18:15.053225Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1229: Notification cookie mismatch for subscription [1:7554061903569313769:2081] 1758817095002567 != 1758817095002570 2025-09-25T16:18:15.055410Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 27659, node 1 2025-09-25T16:18:15.066206Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-09-25T16:18:15.066222Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-09-25T16:18:15.066225Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-09-25T16:18:15.066299Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:28251 2025-09-25T16:18:15.109487Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-09-25T16:18:15.109516Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-09-25T16:18:15.110524Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:28251 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-09-25T16:18:15.145453Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-09-25T16:18:15.153602Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) waiting... 2025-09-25T16:18:15.174126Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) waiting... 2025-09-25T16:18:15.196358Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) waiting... 2025-09-25T16:18:15.209987Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) waiting... 2025-09-25T16:18:15.276117Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-09-25T16:18:15.433697Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7554061903569315414:2391], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:15.433738Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:15.434275Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7554061903569315424:2392], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:15.434286Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:15.505099Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:18:15.516661Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:18:15.527070Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:18:15.541658Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:18:15.555083Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:18:15.570097Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:18:15.584899Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:18:15.596746Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:18:15.614298Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7554061903569316286:2474], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:15.614334Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:15.614335Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7554061903569316291:2477], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:15.614392Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7554061903569316293:2478], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:15.614411Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:15.615286Z node 1 :FLAT_TX_SCHEMESHARD ... 5-09-25T16:18:16.806719Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions TClient is connected to server localhost:30071 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. waiting... 2025-09-25T16:18:16.815028Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2025-09-25T16:18:16.851692Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) waiting... waiting... 2025-09-25T16:18:16.864486Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) waiting... 2025-09-25T16:18:16.883686Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:18:16.896047Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) waiting... 2025-09-25T16:18:17.207188Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7554061914027209971:2391], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:17.207220Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:17.207390Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7554061914027209981:2392], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:17.207397Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:17.217684Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:18:17.230990Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:18:17.242389Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:18:17.256484Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:18:17.270210Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:18:17.284312Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:18:17.298792Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:18:17.312510Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:18:17.329308Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7554061914027210843:2474], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:17.329346Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:17.329365Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7554061914027210848:2477], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:17.329399Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7554061914027210850:2478], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:17.329421Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:17.330234Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-09-25T16:18:17.339829Z node 2 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7554061914027210852:2479], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2025-09-25T16:18:17.417469Z node 2 :TX_PROXY ERROR: schemereq.cpp:590: Actor# [2:7554061914027210904:3554] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 17], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-09-25T16:18:17.707613Z node 2 :KQP_EXECUTER ERROR: kqp_planner.cpp:123: TxId: 281474976710674. Ctx: { TraceId: 01k60tp5x8dh130k9qsnp5k673, Database: , SessionId: ydb://session/3?node_id=2&id=ZTdiMzUyMjUtMThmOTJhMjQtNjlkNDQ3NzYtOTEyYzZkYg==, PoolId: default, DatabaseId: /Root}. Database not set, use /Root captured evread ----------------------------------------------------------- starting split ----------------------------------------------------------- scheme op Status: 53 TxId: 281474976710675 SchemeShardStatus: 1 SchemeShardTabletId: 72057594046644480 captured evreadresult ----------------------------------------------------------- resume evread ----------------------------------------------------------- 2025-09-25T16:18:17.730224Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-09-25T16:18:18.163463Z node 2 :KQP_RESOURCE_MANAGER WARN: kqp_snapshot_manager.cpp:238: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1758817097751, txId: 281474976710673] shutting down ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/metadata/initializer/ut/unittest >> Initializer::Simple [GOOD] Test command err: 2025-09-25T16:17:14.230596Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:123: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2025-09-25T16:17:14.264891Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2025-09-25T16:17:14.267402Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:311:2354], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-09-25T16:17:14.267481Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-09-25T16:17:14.267508Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/endf/0047a7/r3tmp/tmpJC0KRD/pdisk_1.dat 2025-09-25T16:17:14.340745Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-09-25T16:17:14.340788Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-09-25T16:17:14.355425Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded 2025-09-25T16:17:14.356603Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1229: Notification cookie mismatch for subscription [1:34:2081] 1758817033749225 != 1758817033749229 2025-09-25T16:17:14.387718Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 10386, node 1 TClient is connected to server localhost:22158 2025-09-25T16:17:14.536540Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-09-25T16:17:14.536562Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-09-25T16:17:14.536568Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-09-25T16:17:14.536726Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-09-25T16:17:14.537958Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2025-09-25T16:17:14.577707Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-09-25T16:17:24.671023Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:685:2563], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:17:24.671058Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:695:2568], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:17:24.671069Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:17:24.671257Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:700:2572], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:17:24.671266Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:17:24.672242Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715657:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-09-25T16:17:24.762460Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:699:2571], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715657 completed, doublechecking } 2025-09-25T16:17:24.783286Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-09-25T16:17:24.831418Z node 1 :TX_PROXY ERROR: schemereq.cpp:590: Actor# [1:771:2612] txid# 281474976715658, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-09-25T16:17:24.874665Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:568: Compilation failed, self: [1:780:2620], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:1:1: Error: At function: KiReadTable!
:1:1: Error: Cannot find table 'db.[/Root/.metadata/test]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2025-09-25T16:17:24.875217Z node 1 :KQP_SESSION WARN: kqp_session_actor.cpp:2395: SessionId: ydb://session/3?node_id=1&id=MTIxYjBkZi00OGFkMGE3Ny1kOTM3ZjgxOS0xZTkwYzBlYw==, ActorId: [1:681:2560], ActorState: ExecuteState, TraceId: 01k60tmj5ybtmp6s39ga86mfhw, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: REQUEST=SELECT * FROM `/Root/.metadata/test`;RESULT=
: Error: Type annotation, code: 1030
:1:1: Error: At function: KiReadTable!
:1:1: Error: Cannot find table 'db.[/Root/.metadata/test]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 ;EXPECTATION=0 REQUEST=SELECT * FROM `/Root/.metadata/test`;EXPECTATION=0 2025-09-25T16:17:24.944786Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:1, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:17:25.263855Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_modify_acl.cpp:101) 2025-09-25T16:17:25.377369Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:17:25.728958Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715670:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_modify_acl.cpp:101) Initialization finished 2025-09-25T16:17:36.222798Z node 1 :KQP_EXECUTER ERROR: kqp_planner.cpp:123: TxId: 281474976715675. Ctx: { TraceId: 01k60tmxeb7gd5k6c0jnzycw9e, Database: , SessionId: ydb://session/3?node_id=1&id=NzQ5NmFmYzItYjhlZTI5YjYtZWUyOWZjMzMtNWVjMmE0MmQ=, PoolId: default, DatabaseId: /Root}. Database not set, use /Root REQUEST=SELECT * FROM `/Root/.metadata/test`;RESULT=;EXPECTATION=1 REQUEST=SELECT * FROM `/Root/.metadata/test`;EXPECTATION=1 REQUEST=DROP TABLE `/Root/.metadata/test`;EXPECTATION=0;WAITING=1 2025-09-25T16:17:46.598108Z node 1 :TX_PROXY ERROR: schemereq.cpp:1177: Actor# [1:1337:3020] txid# 281474976715678, Access denied for root@builtin on path /Root/.metadata/test, with access RemoveSchema 2025-09-25T16:17:46.598197Z node 1 :TX_PROXY ERROR: schemereq.cpp:590: Actor# [1:1337:3020] txid# 281474976715678, issues: { message: "Access denied for root@builtin on path /Root/.metadata/test" issue_code: 200000 severity: 1 } REQUEST=DROP TABLE `/Root/.metadata/test`;RESULT=
: Error: Execution, code: 1060
:1:12: Error: Executing DROP TABLE
: Error: Access denied., code: 2018
: Error: Access denied for root@builtin on path /Root/.metadata/test, code: 200000 ;EXPECTATION=0 FINISHED_REQUEST=DROP TABLE `/Root/.metadata/test`;EXPECTATION=0;WAITING=1 2025-09-25T16:17:56.972127Z node 1 :KQP_EXECUTER ERROR: kqp_planner.cpp:123: TxId: 281474976715681. Ctx: { TraceId: 01k60tnhpna7rvp9tazcf8r1n4, Database: , SessionId: ydb://session/3?node_id=1&id=NWEwNjNjNTMtZjI1NGI3YzktNzU2MDNlYzktMTk5MTczNA==, PoolId: default, DatabaseId: /Root}. Database not set, use /Root REQUEST=SELECT * FROM `/Root/.metadata/initialization/migrations`;RESULT=;EXPECTATION=1 REQUEST=SELECT * FROM `/Root/.metadata/initialization/migrations`;EXPECTATION=1 REQUEST=DELETE FROM `/Root/.metadata/initialization/migrations`;EXPECTATION=0;WAITING=1 REQUEST=DELETE FROM `/Root/.metadata/initialization/migrations`;RESULT=
: Fatal: ydb/core/kqp/host/kqp_host.cpp:946 ExecuteDataQuery(): requirement false failed, message: Unexpected query type for execute script action: Ddl, code: 1 ;EXPECTATION=0 FINISHED_REQUEST=DELETE FROM `/Root/.metadata/initialization/migrations`;EXPECTATION=0;WAITING=1 REQUEST=DROP TABLE `/Root/.metadata/initialization/migrations`;EXPECTATION=0;WAITING=1 2025-09-25T16:18:17.713064Z node 1 :TX_PROXY ERROR: schemereq.cpp:1177: Actor# [1:1504:3137] txid# 281474976715686, Access denied for root@builtin on path /Root/.metadata/initialization/migrations, with access RemoveSchema 2025-09-25T16:18:17.713120Z node 1 :TX_PROXY ERROR: schemereq.cpp:590: Actor# [1:1504:3137] txid# 281474976715686, issues: { message: "Access denied for root@builtin on path /Root/.metadata/initialization/migrations" issue_code: 200000 severity: 1 } REQUEST=DROP TABLE `/Root/.metadata/initialization/migrations`;RESULT=
: Error: Execution, code: 1060
:1:12: Error: Executing DROP TABLE
: Error: Access denied., code: 2018
: Error: Access denied for root@builtin on path /Root/.metadata/initialization/migrations, code: 200000 ;EXPECTATION=0 FINISHED_REQUEST=DROP TABLE `/Root/.metadata/initialization/migrations`;EXPECTATION=0;WAITING=1 >> KqpSplit::AfterResolve+Ascending [GOOD] >> PgCatalog::PgRoles [GOOD] >> PgCatalog::PgTables |81.3%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_serverless_reboots/unittest >> YdbTableSplit::RenameTablesAndSplit [GOOD] |81.3%| [TM] {default-linux-x86_64, pic, relwithdebinfo} ydb/library/yql/tests/sql/dq_file/part2/pytest >> test.py::test[join-commonjoin_unused_keys--Results] [SKIPPED] |81.3%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_serverless_reboots/unittest >> KqpFlowControl::FlowControl_BigLimit [GOOD] >> KqpFlowControl::FlowControl_SmallLimit >> THiveTest::TestRestartsWithFollower [GOOD] >> THiveTest::TestStartTabletTwiceInARow >> KqpScan::EmptySet_3 [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/scan/unittest >> KqpSplit::AfterResolve+Ascending [GOOD] Test command err: Trying to start YDB, gRPC: 10646, MsgBus: 20625 2025-09-25T16:18:16.959893Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7554061907665252769:2144];send_to=[0:7307199536658146131:7762515]; 2025-09-25T16:18:16.960044Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/endf/003ee2/r3tmp/tmpRauhGS/pdisk_1.dat 2025-09-25T16:18:17.013860Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-09-25T16:18:17.027413Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded 2025-09-25T16:18:17.027615Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1229: Notification cookie mismatch for subscription [1:7554061907665252660:2081] 1758817096959050 != 1758817096959053 TServer::EnableGrpc on GrpcPort 10646, node 1 2025-09-25T16:18:17.042978Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-09-25T16:18:17.042991Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-09-25T16:18:17.042993Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-09-25T16:18:17.043035Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:20625 2025-09-25T16:18:17.064811Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-09-25T16:18:17.064862Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-09-25T16:18:17.065869Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:20625 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-09-25T16:18:17.108185Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-09-25T16:18:17.115263Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) waiting... 2025-09-25T16:18:17.133732Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) waiting... 2025-09-25T16:18:17.157523Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) waiting... 2025-09-25T16:18:17.171443Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) waiting... 2025-09-25T16:18:17.288873Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-09-25T16:18:17.386731Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7554061911960221601:2391], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:17.386769Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:17.386857Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7554061911960221610:2392], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:17.386872Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:17.451019Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:18:17.458951Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:18:17.472110Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:18:17.479346Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:18:17.493605Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:18:17.507903Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:18:17.521759Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:18:17.536248Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:18:17.552912Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7554061911960222473:2474], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:17.552947Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:17.552977Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7554061911960222478:2477], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:17.552988Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7554061911960222480:2478], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:17.553001Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:17.553946Z node 1 :FLAT_TX_SCHEMESHARD ... FIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-09-25T16:18:18.150585Z node 2 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:23961 TClient is connected to server localhost:23961 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-09-25T16:18:18.197787Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-09-25T16:18:18.207310Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) waiting... 2025-09-25T16:18:18.219501Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) waiting... 2025-09-25T16:18:18.243572Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) waiting... 2025-09-25T16:18:18.255756Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) waiting... 2025-09-25T16:18:18.286135Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-09-25T16:18:18.495485Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7554061916053810036:2391], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:18.495517Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:18.495673Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7554061916053810046:2392], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:18.495685Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:18.503355Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:18:18.511462Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:18:18.522443Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:18:18.536546Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:18:18.550730Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:18:18.564816Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:18:18.579226Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:18:18.593183Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:18:18.610781Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7554061916053810909:2474], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:18.610804Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:18.610817Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7554061916053810914:2477], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:18.610825Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7554061916053810916:2478], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:18.610831Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:18.611380Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-09-25T16:18:18.620132Z node 2 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7554061916053810918:2479], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2025-09-25T16:18:18.673223Z node 2 :TX_PROXY ERROR: schemereq.cpp:590: Actor# [2:7554061916053810970:3551] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 17], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-09-25T16:18:18.942172Z node 2 :KQP_EXECUTER ERROR: kqp_planner.cpp:123: TxId: 281474976710674. Ctx: { TraceId: 01k60tp7543rfxj2qh7yy1yhfz, Database: , SessionId: ydb://session/3?node_id=2&id=ZTliM2ZiMzYtYzNmYTI2MzgtNDk2ZDYxMzQtYjY3MGFhYw==, PoolId: default, DatabaseId: /Root}. Database not set, use /Root captured evread ----------------------------------------------------------- starting split ----------------------------------------------------------- scheme op Status: 53 TxId: 281474976710675 SchemeShardStatus: 1 SchemeShardTabletId: 72057594046644480 resume evread ----------------------------------------------------------- 2025-09-25T16:18:18.955267Z node 2 :KQP_RESOURCE_MANAGER WARN: kqp_snapshot_manager.cpp:238: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1758817098983, txId: 281474976710673] shutting down >> TDataShardMinStepTest::TestDropTablePlanComesNotTooEarlyRW+VolatileTxs >> AsyncIndexChangeCollector::CoveredIndexUpdateCoveredColumn >> KqpScan::DqSourceFullScan [GOOD] >> KqpScan::DqSourceLiteralRange |81.3%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_serverless_reboots/unittest >> THiveTest::TestLockTabletExecutionReconnectExpire [GOOD] >> THiveTest::TestLockTabletExecutionStealLock >> TDataShardMinStepTest::TestDropTableCompletesQuicklyRW+VolatileTxs >> YdbYqlClient::ColumnFamiliesWithStorageAndIndex >> KqpScan::CrossJoinCount [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/ydb/table_split_ut/unittest >> YdbTableSplit::RenameTablesAndSplit [GOOD] Test command err: 2025-09-25T16:16:55.780123Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7554061561126067392:2081];send_to=[0:7307199536658146131:7762515]; 2025-09-25T16:16:55.780279Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/endf/005195/r3tmp/tmpnWtbch/pdisk_1.dat 2025-09-25T16:16:55.866777Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-09-25T16:16:55.881715Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-09-25T16:16:55.881746Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-09-25T16:16:55.883640Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-09-25T16:16:55.884036Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 4769, node 1 2025-09-25T16:16:55.891410Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-09-25T16:16:55.891426Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-09-25T16:16:55.891428Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-09-25T16:16:55.891475Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:25912 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-09-25T16:16:55.918952Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-09-25T16:16:56.151454Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-09-25T16:16:56.274326Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7554061565421035643:2322], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:16:56.274350Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:16:56.274517Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7554061565421035653:2323], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:16:56.274525Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:16:56.314715Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_mkdir.cpp:115: TMkDir Propose, path: /Root/Dir, operationId: 281474976710658:0, at schemeshard: 72057594046644480 2025-09-25T16:16:56.314793Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 281474976710658:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2025-09-25T16:16:56.314806Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_create_table.cpp:442: TCreateTable Propose, path: /Root/Dir/Foo, opId: 281474976710658:1, at schemeshard: 72057594046644480 2025-09-25T16:16:56.315136Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 281474976710658:2, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2025-09-25T16:16:56.315143Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:1, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:16:56.321278Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 281474976710658, database: /Root, subject: , status: StatusAccepted, operation: CREATE TABLE, path: /Root/Dir/Foo 2025-09-25T16:16:56.347806Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:685: TTxOperationPlanStep Execute, stepId: 1758817016397, transactions count in step: 1, at schemeshard: 72057594046644480 2025-09-25T16:16:56.361619Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:993: Operation and all the parts is done, operation id: 281474976710658:0 2025-09-25T16:16:56.361654Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:993: Operation and all the parts is done, operation id: 281474976710658:1 2025-09-25T16:16:56.377287Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7554061565421035884:2336], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:16:56.377314Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:16:56.377458Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7554061565421035887:2337], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:16:56.377478Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:16:56.378984Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_table.cpp:507: TAlterTable Propose, path: /Root/Dir/Foo, pathId: , opId: 281474976710659:0, at schemeshard: 72057594046644480 2025-09-25T16:16:56.379128Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 281474976710659:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2025-09-25T16:16:56.379136Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_table.cpp:172) 2025-09-25T16:16:56.381166Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 281474976710659, database: /Root, subject: , status: StatusAccepted, operation: ALTER TABLE, path: /Root/Dir/Foo 2025-09-25T16:16:56.387850Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:685: TTxOperationPlanStep Execute, stepId: 1758817016432, transactions count in step: 1, at schemeshard: 72057594046644480 2025-09-25T16:16:56.390188Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:993: Operation and all the parts is done, operation id: 281474976710659:0 Fast forward 1m 2025-09-25T16:16:56.781867Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; partitions 2 Fast forward 1m 2025-09-25T16:17:00.780074Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7554061561126067392:2081];send_to=[0:7307199536658146131:7762515]; 2025-09-25T16:17:00.780105Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; partitions 2 Fast forward 1m partitions 2 Fast forward 1m 2025-09-25T16:17:06.444003Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_split_merge.cpp:804: TSplitMerge Propose, tableStr: /Root/Dir/Foo, tableId: , opId: 281474976715657:0, at schemeshard: 72057594046644480, request: TablePath: "/Root/Dir/Foo" SourceTabletId: 72075186224037888 SourceTabletId: 72075186224037889 SchemeshardId: 72057594046644480 2025-09-25T16:17:06.444177Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_split_merge.cpp:1083: TSplitMerge Propose accepted, tableStr: /Root/Dir/Foo, tableId: , opId: 281474976715657:0, at schemeshard: 72057594046644480, op: SourceRanges { KeyRangeBegin: "\002\000\000\000\000\200\000\000\000\200" KeyRangeEnd: "\002\000\004\000\000\000\377\377\377\177\000\000\000\200" TabletID: 72075186224037888 ShardIdx: 1 } SourceRanges { KeyRangeBegin: "\002\000\004\000\000\000\377\377\377\177\000\000\000\200" KeyRangeEnd: "" TabletID: 72075186224037889 ShardIdx: 2 } DestinationRanges { KeyRangeBegin: "\002\000\000\000\000\200\000\000\000\200" KeyRangeEnd: "" ShardIdx: 3 }, request: TablePath: "/Root/Dir/Foo" SourceTabletId: 72075186224037888 SourceTabletId: 72075186224037889 SchemeshardId: 72057594046644480 2025-09-25T16:17:06.444192Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 281474976715657:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2025-09-25T16:17:06.461201Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:993: Ope ... :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 281474976710664 ready parts: 1/1 2025-09-25T16:18:18.644319Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:993: Operation and all the parts is done, operation id: 281474976710664:0 2025-09-25T16:18:18.644321Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5552: RemoveTx for txid 281474976710664:0 2025-09-25T16:18:18.644346Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:610: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046644480, LocalPathId: 4] was 4 2025-09-25T16:18:18.644593Z node 1 :TX_DATASHARD DEBUG: datashard_loans.cpp:128: 72075186224037892 parts [ [72075186224037890:1:114:1:12288:9936:0] ] return ack processed 2025-09-25T16:18:18.644617Z node 1 :TX_DATASHARD DEBUG: datashard_loans.cpp:220: 72075186224037892 in PreOffline state HasSharedBobs: 0 SchemaOperations: [ ] OutReadSets count: 0 ChangesQueue size: 0 ChangeExchangeSplit: 1 siblings to be activated: wait to activation from: 2025-09-25T16:18:18.644632Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046644480, cookie: 281474976710664 2025-09-25T16:18:18.644642Z node 1 :TX_DATASHARD INFO: datashard_loans.cpp:177: 72075186224037892 Initiating switch from PreOffline to Offline state 2025-09-25T16:18:18.644654Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046644480, cookie: 281474976710664 2025-09-25T16:18:18.644658Z node 1 :TX_DATASHARD INFO: datashard_impl.h:3325: 72075186224037890 Reporting state Offline to schemeshard 72057594046644480 2025-09-25T16:18:18.644661Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046644480, cookie: 281474976710664 2025-09-25T16:18:18.644665Z node 1 :TX_DATASHARD INFO: datashard_impl.h:3325: 72075186224037890 Reporting state Offline to schemeshard 72057594046644480 2025-09-25T16:18:18.644680Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3741: Server disconnected at leader tablet# 72075186224037890, clientId# [1:7554061917608358042:2726], serverId# [1:7554061917608358051:4691], sessionId# [0:0:0] 2025-09-25T16:18:18.644756Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5938: Handle TEvStateChanged, at schemeshard: 72057594046644480, message: Source { RawX1: 7554061608370709263 RawX2: 4503603922340172 } TabletId: 72075186224037890 State: 4 2025-09-25T16:18:18.644774Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__state_changed_reply.cpp:78: TTxShardStateChanged DoExecute, datashard informs about state changing, datashardId: 72075186224037890, state: Offline, at schemeshard: 72057594046644480 2025-09-25T16:18:18.644869Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5938: Handle TEvStateChanged, at schemeshard: 72057594046644480, message: Source { RawX1: 7554061608370709263 RawX2: 4503603922340172 } TabletId: 72075186224037890 State: 4 2025-09-25T16:18:18.644880Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__state_changed_reply.cpp:78: TTxShardStateChanged DoExecute, datashard informs about state changing, datashardId: 72075186224037890, state: Offline, at schemeshard: 72057594046644480 2025-09-25T16:18:18.644994Z node 1 :TX_DATASHARD INFO: datashard_impl.h:3325: 72075186224037891 Reporting state Offline to schemeshard 72057594046644480 2025-09-25T16:18:18.645160Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5938: Handle TEvStateChanged, at schemeshard: 72057594046644480, message: Source { RawX1: 7554061909018423027 RawX2: 4503603922340486 } TabletId: 72075186224037891 State: 4 2025-09-25T16:18:18.645191Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__state_changed_reply.cpp:78: TTxShardStateChanged DoExecute, datashard informs about state changing, datashardId: 72075186224037891, state: Offline, at schemeshard: 72057594046644480 2025-09-25T16:18:18.645498Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_shard_deleter.cpp:20: SendDeleteRequests, shardsToDelete 1, to hive 72057594037968897, at schemeshard 72057594046644480 2025-09-25T16:18:18.645508Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_shard_deleter.cpp:47: Free shard 72057594046644480:3 hive 72057594037968897 at ss 72057594046644480 2025-09-25T16:18:18.645514Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:2975: Handle TEvStateChangedResult datashard 72075186224037890 state Offline 2025-09-25T16:18:18.645521Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_shard_deleter.cpp:20: SendDeleteRequests, shardsToDelete 1, to hive 72057594037968897, at schemeshard 72057594046644480 2025-09-25T16:18:18.645524Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_shard_deleter.cpp:47: Free shard 72057594046644480:3 hive 72057594037968897 at ss 72057594046644480 2025-09-25T16:18:18.645527Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:2975: Handle TEvStateChangedResult datashard 72075186224037890 state Offline 2025-09-25T16:18:18.645554Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:2975: Handle TEvStateChangedResult datashard 72075186224037891 state Offline 2025-09-25T16:18:18.645559Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_shard_deleter.cpp:20: SendDeleteRequests, shardsToDelete 1, to hive 72057594037968897, at schemeshard 72057594046644480 2025-09-25T16:18:18.645566Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_shard_deleter.cpp:47: Free shard 72057594046644480:4 hive 72057594037968897 at ss 72057594046644480 2025-09-25T16:18:18.645587Z node 1 :TX_DATASHARD INFO: datashard_impl.h:3325: 72075186224037892 Reporting state Offline to schemeshard 72057594046644480 2025-09-25T16:18:18.645627Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5938: Handle TEvStateChanged, at schemeshard: 72057594046644480, message: Source { RawX1: 7554061909018423026 RawX2: 4503603922340485 } TabletId: 72075186224037892 State: 4 2025-09-25T16:18:18.645637Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__state_changed_reply.cpp:78: TTxShardStateChanged DoExecute, datashard informs about state changing, datashardId: 72075186224037892, state: Offline, at schemeshard: 72057594046644480 2025-09-25T16:18:18.645948Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_shard_deleter.cpp:20: SendDeleteRequests, shardsToDelete 1, to hive 72057594037968897, at schemeshard 72057594046644480 2025-09-25T16:18:18.645957Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_shard_deleter.cpp:47: Free shard 72057594046644480:5 hive 72057594037968897 at ss 72057594046644480 2025-09-25T16:18:18.646022Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:2975: Handle TEvStateChangedResult datashard 72075186224037892 state Offline 2025-09-25T16:18:18.646895Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:6353: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 0 ShardOwnerId: 72057594046644480 ShardLocalIdx: 3, at schemeshard: 72057594046644480 2025-09-25T16:18:18.646970Z node 1 :TX_DATASHARD INFO: datashard.cpp:197: OnTabletStop: 72075186224037892 reason = ReasonStop 2025-09-25T16:18:18.646971Z node 1 :TX_DATASHARD INFO: datashard.cpp:197: OnTabletStop: 72075186224037891 reason = ReasonStop 2025-09-25T16:18:18.647008Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:610: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046644480, LocalPathId: 4] was 3 2025-09-25T16:18:18.647030Z node 1 :TX_DATASHARD INFO: datashard.cpp:197: OnTabletStop: 72075186224037890 reason = ReasonStop 2025-09-25T16:18:18.647062Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:6353: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 0 ShardOwnerId: 72057594046644480 ShardLocalIdx: 3, at schemeshard: 72057594046644480 2025-09-25T16:18:18.647080Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:6353: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 0 ShardOwnerId: 72057594046644480 ShardLocalIdx: 4, at schemeshard: 72057594046644480 2025-09-25T16:18:18.647096Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:610: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046644480, LocalPathId: 4] was 2 2025-09-25T16:18:18.647112Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:6353: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 0 ShardOwnerId: 72057594046644480 ShardLocalIdx: 5, at schemeshard: 72057594046644480 2025-09-25T16:18:18.647126Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:610: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046644480, LocalPathId: 4] was 1 2025-09-25T16:18:18.647148Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:123: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046644480 2025-09-25T16:18:18.647156Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:137: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046644480, LocalPathId: 4], at schemeshard: 72057594046644480 2025-09-25T16:18:18.647164Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:610: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046644480, LocalPathId: 2] was 1 2025-09-25T16:18:18.647179Z node 1 :TX_DATASHARD INFO: datashard.cpp:257: OnTabletDead: 72075186224037891 2025-09-25T16:18:18.647199Z node 1 :TX_DATASHARD INFO: datashard.cpp:1311: Change sender killed: at tablet: 72075186224037891 2025-09-25T16:18:18.647205Z node 1 :TX_DATASHARD INFO: datashard.cpp:257: OnTabletDead: 72075186224037892 2025-09-25T16:18:18.647229Z node 1 :TX_DATASHARD INFO: datashard.cpp:1311: Change sender killed: at tablet: 72075186224037892 2025-09-25T16:18:18.647295Z node 1 :HIVE WARN: hive_impl.cpp:516: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037891 not found 2025-09-25T16:18:18.647304Z node 1 :HIVE WARN: hive_impl.cpp:516: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037892 not found 2025-09-25T16:18:18.647489Z node 1 :HIVE WARN: hive_impl.cpp:516: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037890 not found 2025-09-25T16:18:18.647727Z node 1 :TX_DATASHARD INFO: datashard.cpp:257: OnTabletDead: 72075186224037890 2025-09-25T16:18:18.647747Z node 1 :TX_DATASHARD INFO: datashard.cpp:1311: Change sender killed: at tablet: 72075186224037890 2025-09-25T16:18:18.648219Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:176: Deleted shardIdx 72057594046644480:3 2025-09-25T16:18:18.648231Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:182: Close pipe to deleted shardIdx 72057594046644480:3 tabletId 72075186224037890 2025-09-25T16:18:18.648241Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:176: Deleted shardIdx 72057594046644480:3 2025-09-25T16:18:18.648245Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:176: Deleted shardIdx 72057594046644480:4 2025-09-25T16:18:18.648246Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:182: Close pipe to deleted shardIdx 72057594046644480:4 tabletId 72075186224037891 2025-09-25T16:18:18.648250Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:176: Deleted shardIdx 72057594046644480:5 2025-09-25T16:18:18.648252Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:182: Close pipe to deleted shardIdx 72057594046644480:5 tabletId 72075186224037892 2025-09-25T16:18:18.648259Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__clean_pathes.cpp:160: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046644480 ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/scan/unittest >> KqpScan::EmptySet_3 [GOOD] Test command err: Trying to start YDB, gRPC: 29516, MsgBus: 17273 2025-09-25T16:18:14.440230Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7554061898605904489:2079];send_to=[0:7307199536658146131:7762515]; 2025-09-25T16:18:14.440268Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/endf/003f15/r3tmp/tmpk7dto9/pdisk_1.dat 2025-09-25T16:18:14.522196Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-09-25T16:18:14.536844Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded 2025-09-25T16:18:14.537115Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1229: Notification cookie mismatch for subscription [1:7554061898605904438:2081] 1758817094433004 != 1758817094433007 TServer::EnableGrpc on GrpcPort 29516, node 1 2025-09-25T16:18:14.545536Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-09-25T16:18:14.545561Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-09-25T16:18:14.546084Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-09-25T16:18:14.581062Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-09-25T16:18:14.581075Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-09-25T16:18:14.581076Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-09-25T16:18:14.581120Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:17273 TClient is connected to server localhost:17273 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-09-25T16:18:14.693953Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-09-25T16:18:14.698600Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-09-25T16:18:14.708477Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) waiting... 2025-09-25T16:18:14.746722Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) waiting... 2025-09-25T16:18:14.772450Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-09-25T16:18:14.808727Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) waiting... waiting... 2025-09-25T16:18:14.870933Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:18:15.169079Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7554061902900873396:2391], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:15.169126Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:15.169446Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7554061902900873406:2392], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:15.169462Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:15.219775Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:18:15.227959Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:18:15.239973Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:18:15.253595Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:18:15.268093Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:18:15.283746Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:18:15.305219Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:18:15.318216Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:18:15.335456Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7554061902900874266:2474], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:15.335488Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7554061902900874271:2477], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:15.335494Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:15.335543Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7554061902900874274:2479], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:15.335560Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, ... ption { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-09-25T16:18:18.673432Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-09-25T16:18:18.683149Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) waiting... 2025-09-25T16:18:18.694533Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) waiting... 2025-09-25T16:18:18.716129Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) waiting... 2025-09-25T16:18:18.727530Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) waiting... 2025-09-25T16:18:18.779941Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-09-25T16:18:18.968184Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7554061918215909124:2391], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:18.968208Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:18.968313Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7554061918215909134:2392], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:18.968326Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:18.978330Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:18:18.987718Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:18:18.999082Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:18:19.013171Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:18:19.026847Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:18:19.041152Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:18:19.055956Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:18:19.069031Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:18:19.086267Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7554061922510877294:2474], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:19.086296Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:19.086310Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7554061922510877299:2477], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:19.086320Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7554061922510877301:2478], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:19.086328Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:19.087029Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-09-25T16:18:19.095996Z node 3 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [3:7554061922510877303:2479], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2025-09-25T16:18:19.162814Z node 3 :TX_PROXY ERROR: schemereq.cpp:590: Actor# [3:7554061922510877355:3559] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 17], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } ( (declare $key (DataType 'Uint64)) (declare %kqp%tx_result_binding_0_0 (TupleType (ListType (TupleType (TupleType (OptionalType (OptionalType (DataType 'Uint64))) (DataType 'Int32)) (TupleType (OptionalType (OptionalType (DataType 'Uint64))) (DataType 'Int32)))))) (let $1 (DataType 'Uint64)) (let $2 (OptionalType $1)) (let $3 (OptionalType $2)) (let $4 (DataType 'Int32)) (let $5 '('"_partition_mode" '"single")) (let $6 '('('"_logical_id" '986) '('"_id" '"8fb3860e-4716bfe0-1d21613a-e9a34dde") $5)) (let $7 (DqPhyStage '() (lambda '() (block '( (let $28 '((Nothing $3) (Int32 '0))) (let $29 (TupleType $4 $3 $4)) (return (ToStream (Just '((RangeFinalize (RangeMultiply (Uint64 '10000) (RangeUnion (If (== (Int32 '1) $key) (RangeCreate (AsList '($28 $28))) (List (ListType (TupleType $29 $29))))))))))) ))) $6)) (let $8 (DqCnValue (TDqOutput $7 '0))) (let $9 (KqpPhysicalTx '($7) '($8) '('('"$key")) '('('"type" '"compute")))) (let $10 (KqpTable '"/Root/EightShard" '"72057594046644480:3" '"" '1)) (let $11 '('"Key")) (let $12 '"%kqp%tx_result_binding_0_0") (let $13 (TupleType $3 $4)) (let $14 (TupleType (ListType (TupleType $13 $13)))) (let $15 '('('"UsedKeyColumns" $11) '('"ExpectedMaxRanges" '1) '('"PointPrefixLen" '0))) (let $16 (KqpRowsSourceSettings $10 $11 '() %kqp%tx_result_binding_0_0 $15)) (let $17 '('('"_logical_id" '1030) '('"_id" '"b8acec75-eb832282-16b7498-96b7a13f") $5)) (let $18 (DqPhyStage '((DqSource (DataSource '"KqpReadRangesSource") $16)) (lambda '($30) $30) $17)) (let $19 (ListType (StructType '('"Key" $2)))) (let $20 '('('"_logical_id" '1054) '('"_id" '"3c603fa4-6f95995e-c21a1878-e55a00db") $5)) (let $21 (DqPhyStage '() (lambda '() (Iterator (List $19))) $20)) (let $22 (DqCnResult (TDqOutput $18 '0) $11)) (let $23 (DqCnResult (TDqOutput $21 '0) $11)) (let $24 (KqpTxResultBinding $14 '0 '0)) (let $25 (KqpPhysicalTx '($18 $21) '($22 $23) '('($12 $24)) '('('"type" '"generic")))) (let $26 (KqpTxResultBinding $19 '1 '0)) (let $27 (KqpTxResultBinding $19 '1 '1)) (return (KqpPhysicalQuery '($9 $25) '($26 $27) '('('"type" '"query")))) ) >> THiveTest::TestStartTabletTwiceInARow [GOOD] >> THiveTest::TestSpreadNeighboursWithUpdateTabletsObject >> KqpScan::BoolFlag [GOOD] >> KqpScan::Counters >> THiveTest::TestLockTabletExecutionStealLock [GOOD] >> THiveTest::TestProgressWithMaxTabletsScheduled ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/scan/unittest >> KqpScan::CrossJoinCount [GOOD] Test command err: Trying to start YDB, gRPC: 12518, MsgBus: 7617 2025-09-25T16:18:13.930308Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7554061896603719111:2137];send_to=[0:7307199536658146131:7762515]; 2025-09-25T16:18:13.930425Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/endf/003f16/r3tmp/tmpMMUyko/pdisk_1.dat 2025-09-25T16:18:13.986338Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-09-25T16:18:13.998515Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded 2025-09-25T16:18:13.998811Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1229: Notification cookie mismatch for subscription [1:7554061896603719012:2081] 1758817093929088 != 1758817093929091 TServer::EnableGrpc on GrpcPort 12518, node 1 2025-09-25T16:18:14.013953Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-09-25T16:18:14.013967Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-09-25T16:18:14.013975Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-09-25T16:18:14.014016Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:7617 2025-09-25T16:18:14.035430Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-09-25T16:18:14.035455Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-09-25T16:18:14.038600Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:7617 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-09-25T16:18:14.080487Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-09-25T16:18:14.083115Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-09-25T16:18:14.087902Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) waiting... waiting... 2025-09-25T16:18:14.117499Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:18:14.162328Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) waiting... 2025-09-25T16:18:14.182982Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) waiting... 2025-09-25T16:18:14.265930Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-09-25T16:18:14.347685Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7554061900898687953:2391], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:14.347710Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:14.347879Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7554061900898687963:2392], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:14.347907Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:14.403415Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:18:14.420153Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:18:14.447844Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:18:14.463598Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:18:14.472985Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:18:14.483698Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:18:14.498593Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:18:14.515019Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:18:14.533908Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7554061900898688827:2474], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:14.533934Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:14.534061Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7554061900898688832:2477], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:14.534075Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7554061900898688833:2478], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:14.534137Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, Dat ... 25-09-25T16:18:18.535659Z node 3 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-09-25T16:18:18.535720Z node 3 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:20150 TClient is connected to server localhost:20150 WaitRootIsUp 'Root'... TClient::Ls request: Root 2025-09-25T16:18:18.591156Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-09-25T16:18:18.592499Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-09-25T16:18:18.642014Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) waiting... 2025-09-25T16:18:18.652238Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) waiting... 2025-09-25T16:18:18.673251Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) waiting... 2025-09-25T16:18:18.687223Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) waiting... 2025-09-25T16:18:18.928979Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7554061917143746986:2391], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:18.929012Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:18.931098Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7554061917143747004:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:18.931119Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:18.935415Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:18:18.945692Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:18:18.956284Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:18:18.971147Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:18:18.984849Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:18:18.999094Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:18:19.013236Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:18:19.028126Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:18:19.046047Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7554061921438715155:2474], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:19.046083Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:19.046089Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7554061921438715160:2477], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:19.046116Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7554061921438715162:2478], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:19.046123Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:19.046909Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-09-25T16:18:19.053914Z node 3 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [3:7554061921438715164:2479], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2025-09-25T16:18:19.106622Z node 3 :TX_PROXY ERROR: schemereq.cpp:590: Actor# [3:7554061921438715216:3552] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 17], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-09-25T16:18:19.345850Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) waiting... 2025-09-25T16:18:19.661653Z node 3 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-09-25T16:18:19.726896Z node 3 :KQP_RESOURCE_MANAGER WARN: kqp_snapshot_manager.cpp:238: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1758817099494, txId: 281474976710675] shutting down >> KqpSplit::BorderKeys+Unspecified [GOOD] >> YdbYqlClient::ConnectDbAclIsStrictlyChecked >> YdbYqlClient::DiscoveryLocationOverride >> TGRpcYdbTest::MakeListRemoveDirectory ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_sequence_reboots/unittest >> TSequenceReboots::CopyTableWithSequence [GOOD] Test command err: ==== RunWithTabletReboots =========== RUN: Trace =========== Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2144] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2144] Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:120:2058] recipient: [1:114:2145] IGNORE Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:120:2058] recipient: [1:114:2145] Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:121:2058] recipient: [1:116:2146] IGNORE Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:121:2058] recipient: [1:116:2146] Leader for TabletID 72057594046678944 is [1:128:2153] sender: [1:131:2058] recipient: [1:113:2144] Leader for TabletID 72057594046447617 is [1:134:2158] sender: [1:136:2058] recipient: [1:114:2145] Leader for TabletID 72057594046316545 is [1:139:2161] sender: [1:141:2058] recipient: [1:116:2146] 2025-09-25T16:16:37.319467Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7911: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-09-25T16:16:37.319498Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7939: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-09-25T16:16:37.319504Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7825: StatsBatching config: StatsBatchTimeout# 0.000000s, StatsMaxBatchSize# 0, StatsMaxExecuteTime# 0.010000s 2025-09-25T16:16:37.319509Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7841: OperationsProcessing config: using default configuration 2025-09-25T16:16:37.319517Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7847: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-09-25T16:16:37.319521Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7847: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-09-25T16:16:37.319532Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7971: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-09-25T16:16:37.319547Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-09-25T16:16:37.319670Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8042: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-09-25T16:16:37.319736Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2025-09-25T16:16:37.341574Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:8074: Got new config: QueryServiceConfig { AllExternalDataSourcesAreAvailable: true } 2025-09-25T16:16:37.341624Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded 2025-09-25T16:16:37.341737Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8042: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# Leader for TabletID 72057594046447617 is [1:134:2158] sender: [1:179:2058] recipient: [1:15:2062] 2025-09-25T16:16:37.347506Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2025-09-25T16:16:37.347635Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2025-09-25T16:16:37.347679Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-09-25T16:16:37.351096Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2025-09-25T16:16:37.351234Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2025-09-25T16:16:37.351371Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1382: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-09-25T16:16:37.351650Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-09-25T16:16:37.352800Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:158: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-09-25T16:16:37.352868Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2025-09-25T16:16:37.353145Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-09-25T16:16:37.353158Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-09-25T16:16:37.353178Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-09-25T16:16:37.353187Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-09-25T16:16:37.353194Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:205: TTxServerlessStorageBilling.Complete 2025-09-25T16:16:37.353238Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7086: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:221:2058] recipient: [1:219:2219] IGNORE Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:221:2058] recipient: [1:219:2219] Leader for TabletID 72057594037968897 is [1:225:2223] sender: [1:226:2058] recipient: [1:219:2219] 2025-09-25T16:16:37.354888Z node 1 :HIVE INFO: tablet_helpers.cpp:1126: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:128:2153] sender: [1:246:2058] recipient: [1:15:2062] 2025-09-25T16:16:37.377701Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-09-25T16:16:37.377786Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-09-25T16:16:37.377841Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:599: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-09-25T16:16:37.377848Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5528: CreateTx for txid 1:0 type: TxAlterSubDomain target path: [OwnerId: 72057594046678944, LocalPathId: 1] source path: 2025-09-25T16:16:37.377892Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-09-25T16:16:37.377906Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2025-09-25T16:16:37.378679Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-09-25T16:16:37.378727Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-09-25T16:16:37.378773Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-09-25T16:16:37.378781Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:315: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-09-25T16:16:37.378786Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:369: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-09-25T16:16:37.378789Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 1:0 2 -> 3 2025-09-25T16:16:37.379149Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-09-25T16:16:37.379158Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-09-25T16:16:37.379163Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 1:0 3 -> 128 2025-09-25T16:16:37.379532Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-09-25T16:16:37.379545Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-09-25T16:16:37.379551Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-09-25T16:16:37.379558Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1723: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-09-25T16:16:37.380273Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1792: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-09-25T16:16:37.380762Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:663: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-09-25T16:16:37.380841Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1824: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 Leader for TabletID 72057594046316545 is [1:139:2161] sender: [1:261:2058] recipient: [1:15:2062] FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-09-25T16:16:37.381077Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:685: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-09-25T16:16:37.381110Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:689: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 139 RawX2: 4294969457 } } Step: 5000001 MediatorID: 0 Tab ... 16.498467Z node 191 :SEQUENCESHARD TRACE: tx_restore_sequence.cpp:103: [sequenceshard 72075186233409546] TTxRestoreSequence.Complete 2025-09-25T16:18:16.498556Z node 191 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5225: StateWork, received event# 276299788, Sender [191:359:2339], Recipient [191:140:2161]: NKikimrTxSequenceShard.TEvRestoreSequenceResult Status: SUCCESS Origin: 72075186233409546 TxId: 1003 TxPartId: 3 2025-09-25T16:18:16.498580Z node 191 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5275: StateWork, processing event NSequenceShard::TEvSequenceShard::TEvRestoreSequenceResult 2025-09-25T16:18:16.498589Z node 191 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:6634: Handle TEvRestoreSequenceResult, at schemeshard: 72057594046678944, message: Status: SUCCESS Origin: 72075186233409546 TxId: 1003 TxPartId: 3 2025-09-25T16:18:16.498622Z node 191 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:628: TTxOperationReply execute, operationId: 1003:3, at schemeshard: 72057594046678944, message: Status: SUCCESS Origin: 72075186233409546 TxId: 1003 TxPartId: 3 2025-09-25T16:18:16.498635Z node 191 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_copy_sequence.cpp:310: TCopySequence TProposedCopySequence HandleReply TEvRestoreSequenceResult shardId# 72075186233409546 status# SUCCESS operationId# 1003:3 at tablet 72057594046678944 2025-09-25T16:18:16.498680Z node 191 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 1003:3 140 -> 240 2025-09-25T16:18:16.498726Z node 191 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:173: TSideEffects ApplyOnExecute at tablet# 72057594046678944 2025-09-25T16:18:16.498736Z node 191 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:704: Ack tablet strongly msg opId: 1003:3 from tablet: 72057594046678944 to tablet: 72075186233409546 cookie: 72057594046678944:10 2025-09-25T16:18:16.499550Z node 191 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 1003:3, at schemeshard: 72057594046678944 2025-09-25T16:18:16.499571Z node 191 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:216: TSideEffects ApplyOnComplete at tablet# 72057594046678944 2025-09-25T16:18:16.499581Z node 191 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:285: Activate send for 1003:3 2025-09-25T16:18:16.499636Z node 191 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5225: StateWork, received event# 2146435072, Sender [191:140:2161], Recipient [191:140:2161]: NKikimr::NSchemeShard::TEvPrivate::TEvProgressOperation 2025-09-25T16:18:16.499643Z node 191 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5242: StateWork, processing event TEvPrivate::TEvProgressOperation 2025-09-25T16:18:16.499676Z node 191 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1003:3, at schemeshard: 72057594046678944 2025-09-25T16:18:16.499686Z node 191 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:497: [72057594046678944] TDone opId# 1003:3 ProgressState 2025-09-25T16:18:16.499711Z node 191 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:173: TSideEffects ApplyOnExecute at tablet# 72057594046678944 2025-09-25T16:18:16.499716Z node 191 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:926: Part operation is done id#1003:3 progress is 4/4 2025-09-25T16:18:16.499722Z node 191 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 1003 ready parts: 4/4 2025-09-25T16:18:16.499728Z node 191 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:926: Part operation is done id#1003:3 progress is 4/4 2025-09-25T16:18:16.499731Z node 191 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 1003 ready parts: 4/4 2025-09-25T16:18:16.499737Z node 191 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 1003, ready parts: 4/4, is published: true 2025-09-25T16:18:16.499758Z node 191 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1702: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [191:479:2430] message: TxId: 1003 2025-09-25T16:18:16.499767Z node 191 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 1003 ready parts: 4/4 2025-09-25T16:18:16.499779Z node 191 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:993: Operation and all the parts is done, operation id: 1003:0 2025-09-25T16:18:16.499785Z node 191 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5552: RemoveTx for txid 1003:0 2025-09-25T16:18:16.499836Z node 191 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:610: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 7] was 5 2025-09-25T16:18:16.499842Z node 191 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:610: DecrementPathDbRefCount reason remove txstate source path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 5 2025-09-25T16:18:16.499848Z node 191 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:993: Operation and all the parts is done, operation id: 1003:1 2025-09-25T16:18:16.499852Z node 191 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5552: RemoveTx for txid 1003:1 2025-09-25T16:18:16.499858Z node 191 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:610: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 8] was 3 2025-09-25T16:18:16.499862Z node 191 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:993: Operation and all the parts is done, operation id: 1003:2 2025-09-25T16:18:16.499866Z node 191 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5552: RemoveTx for txid 1003:2 2025-09-25T16:18:16.499876Z node 191 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:610: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 9] was 3 2025-09-25T16:18:16.499881Z node 191 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:610: DecrementPathDbRefCount reason remove txstate source path for pathId [OwnerId: 72057594046678944, LocalPathId: 5] was 3 2025-09-25T16:18:16.499886Z node 191 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:993: Operation and all the parts is done, operation id: 1003:3 2025-09-25T16:18:16.499889Z node 191 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5552: RemoveTx for txid 1003:3 2025-09-25T16:18:16.499895Z node 191 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:610: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 10] was 2 2025-09-25T16:18:16.499899Z node 191 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:610: DecrementPathDbRefCount reason remove txstate source path for pathId [OwnerId: 72057594046678944, LocalPathId: 6] was 2 2025-09-25T16:18:16.500574Z node 191 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:216: TSideEffects ApplyOnComplete at tablet# 72057594046678944 2025-09-25T16:18:16.500602Z node 191 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:641: Send to actor: [191:479:2430] msg type: 271124998 msg: NKikimrScheme.TEvNotifyTxCompletionResult TxId: 1003 at schemeshard: 72057594046678944 2025-09-25T16:18:16.500670Z node 191 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 1003: got EvNotifyTxCompletionResult 2025-09-25T16:18:16.500678Z node 191 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 1003: satisfy waiter [191:681:2602] 2025-09-25T16:18:16.500726Z node 191 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5225: StateWork, received event# 269877764, Sender [191:683:2604], Recipient [191:140:2161]: NKikimr::TEvTabletPipe::TEvServerDisconnected 2025-09-25T16:18:16.500737Z node 191 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5323: StateWork, processing event TEvTabletPipe::TEvServerDisconnected 2025-09-25T16:18:16.500742Z node 191 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:6190: Server pipe is reset, at schemeshard: 72057594046678944 TestWaitNotification: OK eventTxId 1003 2025-09-25T16:18:16.500851Z node 191 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5225: StateWork, received event# 271122945, Sender [191:787:2705], Recipient [191:140:2161]: NKikimrSchemeOp.TDescribePath Path: "/MyRoot/copy/myseq" Options { ShowPrivateTable: true } 2025-09-25T16:18:16.500858Z node 191 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5237: StateWork, processing event TEvSchemeShard::TEvDescribeScheme 2025-09-25T16:18:16.500872Z node 191 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/copy/myseq" Options { ShowPrivateTable: true }, at schemeshard: 72057594046678944 2025-09-25T16:18:16.500939Z node 191 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/copy/myseq" took 59us result status StatusSuccess 2025-09-25T16:18:16.501056Z node 191 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/copy/myseq" PathDescription { Self { Name: "myseq" PathId: 10 SchemeshardId: 72057594046678944 PathType: EPathTypeSequence CreateFinished: true CreateTxId: 1003 CreateStep: 5000004 ParentPathId: 7 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 SequenceVersion: 1 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 9 PathsLimit: 10000 ShardsInside: 5 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } SequenceDescription { Name: "myseq" PathId { OwnerId: 72057594046678944 LocalId: 10 } Version: 1 SequenceShard: 72075186233409546 MinValue: 1 MaxValue: 9223372036854775807 StartValue: 1 Cache: 1 Increment: 1 Cycle: false DataType: "Int64" } } PathId: 10 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-09-25T16:18:16.501604Z node 191 :SEQUENCESHARD TRACE: tx_allocate_sequence.cpp:22: [sequenceshard 72075186233409546] TTxAllocateSequence.Execute PathId# [OwnerId: 72057594046678944, LocalPathId: 10] Cache# 1 2025-09-25T16:18:16.501631Z node 191 :SEQUENCESHARD TRACE: tx_allocate_sequence.cpp:89: [sequenceshard 72075186233409546] TTxAllocateSequence.Execute SUCCESS PathId# [OwnerId: 72057594046678944, LocalPathId: 10] AllocationStart# 2 AllocationCount# 1 AllocationIncrement# 1 2025-09-25T16:18:16.512399Z node 191 :SEQUENCESHARD TRACE: tx_allocate_sequence.cpp:174: [sequenceshard 72075186233409546] TTxAllocateSequence.Complete >> PgCatalog::PgTables [GOOD] >> YdbMonitoring::SelfCheck |81.3%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/mind/ut/ydb-core-mind-ut |81.3%| [LD] {RESULT} $(B)/ydb/core/mind/ut/ydb-core-mind-ut >> KqpFlowControl::FlowControl_SmallLimit [GOOD] >> THiveTest::TestFollowersCrossDC_Easy [GOOD] >> TGRpcYdbTest::DropTableBadRequest >> THiveTest::TestFollowers_LocalNodeOnly >> KqpScan::DqSourceLiteralRange [GOOD] |81.3%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/mind/ut/ydb-core-mind-ut ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/scan/unittest >> KqpSplit::BorderKeys+Unspecified [GOOD] Test command err: Trying to start YDB, gRPC: 19606, MsgBus: 24314 2025-09-25T16:18:17.057853Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7554061913922642275:2139];send_to=[0:7307199536658146131:7762515]; 2025-09-25T16:18:17.057878Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/endf/003ee1/r3tmp/tmpZ4KOkV/pdisk_1.dat 2025-09-25T16:18:17.118823Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-09-25T16:18:17.118936Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded 2025-09-25T16:18:17.119169Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1229: Notification cookie mismatch for subscription [1:7554061913922642168:2081] 1758817097056520 != 1758817097056523 TServer::EnableGrpc on GrpcPort 19606, node 1 2025-09-25T16:18:17.132338Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-09-25T16:18:17.132355Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-09-25T16:18:17.132357Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-09-25T16:18:17.132404Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:24314 2025-09-25T16:18:17.164769Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-09-25T16:18:17.164799Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-09-25T16:18:17.165707Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:24314 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-09-25T16:18:17.196348Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-09-25T16:18:17.200509Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-09-25T16:18:17.204561Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) waiting... 2025-09-25T16:18:17.226280Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) waiting... 2025-09-25T16:18:17.253974Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) waiting... 2025-09-25T16:18:17.266339Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) waiting... 2025-09-25T16:18:17.291175Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-09-25T16:18:17.495510Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7554061913922643812:2391], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:17.495540Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:17.495624Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7554061913922643822:2392], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:17.495634Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:17.550628Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:18:17.560962Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:18:17.570494Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:18:17.584479Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:18:17.598827Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:18:17.612660Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:18:17.668461Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:18:17.683915Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:18:17.699306Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7554061913922644689:2474], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:17.699340Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:17.699358Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7554061913922644694:2477], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:17.699371Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7554061913922644696:2478], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:17.699391Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, ... t::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-09-25T16:18:18.982614Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-09-25T16:18:18.992344Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) waiting... 2025-09-25T16:18:19.002927Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) waiting... 2025-09-25T16:18:19.026158Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) waiting... waiting... 2025-09-25T16:18:19.040643Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:18:19.138113Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-09-25T16:18:19.354714Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7554061921861770811:2391], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:19.354763Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:19.354937Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7554061921861770894:2401], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:19.355044Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:19.355768Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:18:19.363390Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:18:19.376670Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:18:19.392253Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:18:19.404697Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:18:19.419007Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:18:19.433050Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:18:19.447013Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:18:19.463842Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7554061921861771683:2474], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:19.463871Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:19.463872Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7554061921861771688:2477], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:19.463924Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7554061921861771690:2478], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:19.463936Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:19.464549Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-09-25T16:18:19.473893Z node 2 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7554061921861771691:2479], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715670 completed, doublechecking } 2025-09-25T16:18:19.571201Z node 2 :TX_PROXY ERROR: schemereq.cpp:590: Actor# [2:7554061921861771744:3553] txid# 281474976715671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 17], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-09-25T16:18:19.799015Z node 2 :KQP_EXECUTER ERROR: kqp_planner.cpp:123: TxId: 281474976715674. Ctx: { TraceId: 01k60tp8072anw2n3ec902p0pk, Database: , SessionId: ydb://session/3?node_id=2&id=ZWY1OTBjNi0xOTJlYmQ2MC1mYjgxOTc3LWQ3ZTE0MTkz, PoolId: default, DatabaseId: /Root}. Database not set, use /Root captured evread ----------------------------------------------------------- starting split ----------------------------------------------------------- scheme op Status: 53 TxId: 281474976715675 SchemeShardStatus: 1 SchemeShardTabletId: 72057594046644480 captured evreadresult ----------------------------------------------------------- scheme op Status: 53 TxId: 281474976715676 SchemeShardStatus: 1 SchemeShardTabletId: 72057594046644480 resume evread ----------------------------------------------------------- 2025-09-25T16:18:19.910282Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-09-25T16:18:20.304023Z node 2 :KQP_RESOURCE_MANAGER WARN: kqp_snapshot_manager.cpp:238: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1758817099844, txId: 281474976715673] shutting down >> YdbOlapStore::LogLast50ByResource >> YdbYqlClient::TestTzTypesFullStack >> KqpScan::Counters [GOOD] >> AsyncIndexChangeCollector::CoveredIndexUpdateCoveredColumn [GOOD] >> AsyncIndexChangeCollector::CoveredIndexUpsert >> test.py::test[aggregate-group_by_rollup_grouping_hum--Results] [GOOD] >> test.py::test[aggregate-group_by_session--Results] >> YdbYqlClient::TestDoubleKey ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/scan/unittest >> KqpFlowControl::FlowControl_SmallLimit [GOOD] Test command err: Trying to start YDB, gRPC: 14548, MsgBus: 2643 2025-09-25T16:18:16.973334Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7554061909870327432:2143];send_to=[0:7307199536658146131:7762515]; 2025-09-25T16:18:16.973404Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/endf/003ed8/r3tmp/tmpJ83U41/pdisk_1.dat 2025-09-25T16:18:17.035561Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions TServer::EnableGrpc on GrpcPort 14548, node 1 2025-09-25T16:18:17.036114Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1229: Notification cookie mismatch for subscription [1:7554061909870327315:2081] 1758817096972415 != 1758817096972418 2025-09-25T16:18:17.040262Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded 2025-09-25T16:18:17.042878Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-09-25T16:18:17.042889Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-09-25T16:18:17.042891Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-09-25T16:18:17.042939Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:2643 2025-09-25T16:18:17.076885Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-09-25T16:18:17.076916Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-09-25T16:18:17.077992Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:2643 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-09-25T16:18:17.106380Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-09-25T16:18:17.116919Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) waiting... 2025-09-25T16:18:17.140178Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) waiting... 2025-09-25T16:18:17.161820Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) waiting... 2025-09-25T16:18:17.182243Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) waiting... 2025-09-25T16:18:17.200041Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-09-25T16:18:17.382344Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7554061914165296259:2391], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:17.382393Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:17.382505Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7554061914165296269:2392], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:17.382524Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:17.449815Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:18:17.458557Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:18:17.472573Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:18:17.487350Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:18:17.500969Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:18:17.514743Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:18:17.528563Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:18:17.543466Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:18:17.559956Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7554061914165297132:2474], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:17.559982Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7554061914165297137:2477], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:17.559988Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:17.560031Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7554061914165297140:2479], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:17.560042Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:17.560799Z node 1 :FLAT_TX_SCHEMESHARD WA ... 2025-09-25T16:18:19.664270Z node 3 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-09-25T16:18:19.664320Z node 3 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:1195 TClient is connected to server localhost:1195 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-09-25T16:18:19.715671Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-09-25T16:18:19.725430Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) waiting... 2025-09-25T16:18:19.736655Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) waiting... 2025-09-25T16:18:19.758335Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) waiting... 2025-09-25T16:18:19.770422Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) waiting... 2025-09-25T16:18:19.861318Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-09-25T16:18:20.069030Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7554061924741534118:2391], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:20.069065Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:20.072530Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7554061924741534200:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:20.072565Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:20.073674Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:18:20.082757Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:18:20.091232Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:18:20.104406Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:18:20.119533Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:18:20.132702Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:18:20.147672Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:18:20.161276Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:18:20.177545Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7554061924741534990:2474], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:20.177579Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:20.177650Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7554061924741534995:2477], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:20.177670Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7554061924741534996:2478], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:20.177677Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:20.178479Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-09-25T16:18:20.188220Z node 3 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [3:7554061924741534999:2479], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2025-09-25T16:18:20.255288Z node 3 :TX_PROXY ERROR: schemereq.cpp:590: Actor# [3:7554061924741535051:3553] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 17], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-09-25T16:18:20.513030Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) waiting... 2025-09-25T16:18:20.638827Z node 3 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-09-25T16:18:20.643480Z node 3 :KQP_RESOURCE_MANAGER WARN: kqp_snapshot_manager.cpp:238: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1758817100670, txId: 281474976710675] shutting down ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/pg/unittest >> PgCatalog::PgTables [GOOD] Test command err: Trying to start YDB, gRPC: 20135, MsgBus: 29477 2025-09-25T16:17:34.621333Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7554061728944112373:2162];send_to=[0:7307199536658146131:7762515]; 2025-09-25T16:17:34.621477Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/endf/00556c/r3tmp/tmpVU5qpm/pdisk_1.dat 2025-09-25T16:17:34.680907Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-09-25T16:17:34.693153Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 20135, node 1 2025-09-25T16:17:34.712944Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-09-25T16:17:34.712955Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-09-25T16:17:34.712956Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-09-25T16:17:34.712994Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:29477 2025-09-25T16:17:34.733106Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-09-25T16:17:34.733139Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-09-25T16:17:34.734701Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:29477 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-09-25T16:17:34.789161Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-09-25T16:17:34.791825Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 1042 2025-09-25T16:17:34.885266Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-09-25T16:17:35.044021Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684)
: Error: Bulk upsert to table '/Root/Coerce_pgbpchar_17472595041006102391_17823623939509273229' Typemod mismatch, got type pgbpchar for column value, type mod , but expected 2 --!syntax_pg INSERT INTO Coerce_pgbpchar_17472595041006102391_17823623939509273229 (key, value) VALUES ( '0'::int2, 'abcd'::bpchar ) 2025-09-25T16:17:35.071438Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7554061733239080305:2326], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:17:35.071466Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7554061733239080313:2329], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:17:35.071475Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:17:35.071670Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7554061733239080320:2331], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:17:35.071678Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:17:35.072515Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715659:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-09-25T16:17:35.075385Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7554061733239080319:2330], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715659 completed, doublechecking } 2025-09-25T16:17:35.160369Z node 1 :TX_PROXY ERROR: schemereq.cpp:590: Actor# [1:7554061733239080372:2400] txid# 281474976715660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-09-25T16:17:35.214240Z node 1 :TX_DATASHARD CRIT: execute_kqp_data_tx_unit.cpp:477: Exception while executing KQP transaction [0:281474976715663] at 72075186224037888: ydb/core/tx/datashard/datashard_kqp_upsert_rows.cpp:87: Apply(): requirement !error failed. Incorrect value: Error while coercing value, reason: yql/essentials/minikql/mkql_terminator.cpp:47: ERROR: value too long for type character(2) 2025-09-25T16:17:35.215556Z node 1 :TX_DATASHARD ERROR: finish_propose_unit.cpp:174: Errors while proposing transaction txid 281474976715663 at tablet 72075186224037888 status: EXEC_ERROR errors: UNKNOWN (Tx was terminated: ydb/core/tx/datashard/datashard_kqp_upsert_rows.cpp:87: Apply(): requirement !error failed. Incorrect value: Error while coercing value, reason: yql/essentials/minikql/mkql_terminator.cpp:47: ERROR: value too long for type character(2) ) | 2025-09-25T16:17:35.215661Z node 1 :KQP_EXECUTER ERROR: kqp_data_executer.cpp:912: ActorId: [1:7554061733239080423:2324] TxId: 281474976715663. Ctx: { TraceId: 01k60tmway4fx3hc3n4htd9ngr, Database: /Root, SessionId: ydb://session/3?node_id=1&id=NDhiZmZjNWUtYTQ0OWMyMGUtMjY5MjNhLTFhMzhiNTg=, PoolId: default}. EXEC_ERROR: [UNKNOWN] Tx was terminated: ydb/core/tx/datashard/datashard_kqp_upsert_rows.cpp:87: Apply(): requirement !error failed. Incorrect value: Error while coercing value, reason: yql/essentials/minikql/mkql_terminator.cpp:47: ERROR: value too long for type character(2) ; 2025-09-25T16:17:35.217950Z node 1 :KQP_SESSION WARN: kqp_session_actor.cpp:2830: SessionId: ydb://session/3?node_id=1&id=NDhiZmZjNWUtYTQ0OWMyMGUtMjY5MjNhLTFhMzhiNTg=, ActorId: [1:7554061733239080302:2324], ActorState: ExecuteState, TraceId: 01k60tmway4fx3hc3n4htd9ngr, Create QueryResponse for error on request, msg:
: Error: Error executing transaction (ExecError): Execution failed
: Error: [UNKNOWN] Tx was terminated: ydb/core/tx/datashard/datashard_kqp_upsert_rows.cpp:87: Apply(): requirement !error failed. Incorrect value: Error while coercing value, reason: yql/essentials/minikql/mkql_terminator.cpp:47: ERROR: value too long for type character(2) 2025-09-25T16:17:35.222540Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684)
: Error: Bulk upsert to table '/Root/Coerce__pgbpchar_17472595041006102391_5352544928909966465' Typemod mismatch, got type _pgbpchar for column value, type mod , but expected 2 --!syntax_pg INSERT INTO Coerce__pgbpchar_17472595041006102391_5352544928909966465 (key, value) VALUES ( '0'::int2, '{abcd,abcd}'::_bpchar ) 2025-09-25T16:17:35.291830Z node 1 :TX_DATASHARD CRIT: execute_kqp_data_tx_unit.cpp:477: Exception while executing KQP transaction [0:281474976715668] at 72075186224037889: ydb/core/tx/datashard/datashard_kqp_upsert_rows.cpp:87: Apply(): requirement !error failed. Incorrect value: Error while coercing value, reason: yql/essentials/minikql/mkql_terminator.cpp:47: ERROR: value too long for type character(2) 2025-09-25T16:17:35.292863Z node 1 :TX_DATASHARD ERROR: finish_propose_unit.cpp:174: Errors while proposing transaction txid 281474976715668 at tablet 72075186224037889 status: EXEC_ERROR errors: UNKNOWN (Tx was terminated: ydb/core/tx/datashard/datashard_kqp_upsert_rows.cpp:87: Apply(): requirement !error failed. Incorrect value: Error while coercing value, reason: yql/essentials/minikql/mkql_terminator.cpp:47: ERROR: value too long for type character(2) ) | 2025-09-25T16:17:35.292934Z node 1 :KQP_EXECUTER ERROR: kqp_data_executer.cpp:912: ActorId: [1:7554061733239080553:2358] TxId: 281474976715668. Ctx: { TraceId: 01k60tmwge2w3z53qachae1f7z, Database: /Root, SessionId: ydb://session/3?node_id=1&id=ZmE3ZjY1YmQtOGVlMmJkMDAtZDI2MzQwMzQtZjQzMGM1NTc=, PoolId: default}. EXEC_ERROR: [UNKNOWN] Tx was terminated: ydb/core/tx/datashard/datashard_kqp_upsert_rows.cpp:87: Apply(): requirement !error failed. Incorrect value: Error while coercing value, reason: yql/essentials/minikql/mkql_terminator.cpp:47: ERROR: value too long for type character(2) ; 2025-09-25T16:17:35.292993 ... info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:18.934281Z node 13 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:18.934380Z node 13 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [13:7554061917692065698:2320], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:18.934391Z node 13 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:18.935055Z node 13 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-09-25T16:18:18.937121Z node 13 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [13:7554061917692065697:2319], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2025-09-25T16:18:19.008972Z node 13 :TX_PROXY ERROR: schemereq.cpp:590: Actor# [13:7554061921987033046:2338] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } Trying to start YDB, gRPC: 24639, MsgBus: 6542 2025-09-25T16:18:19.281057Z node 14 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[14:7554061920960480959:2077];send_to=[0:7307199536658146131:7762515]; 2025-09-25T16:18:19.281075Z node 14 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/endf/00556c/r3tmp/tmpraGqbV/pdisk_1.dat 2025-09-25T16:18:19.284337Z node 14 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-09-25T16:18:19.297813Z node 14 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 24639, node 14 2025-09-25T16:18:19.311559Z node 14 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-09-25T16:18:19.311574Z node 14 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-09-25T16:18:19.311576Z node 14 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-09-25T16:18:19.311626Z node 14 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:6542 TClient is connected to server localhost:6542 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-09-25T16:18:19.363860Z node 14 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-09-25T16:18:19.382257Z node 14 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(14, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-09-25T16:18:19.382294Z node 14 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(14, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-09-25T16:18:19.383326Z node 14 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(14, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-09-25T16:18:19.537624Z node 14 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-09-25T16:18:19.712929Z node 14 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [14:7554061920960481582:2318], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:19.712949Z node 14 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [14:7554061920960481571:2315], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:19.713013Z node 14 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:19.713087Z node 14 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [14:7554061920960481586:2320], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:19.713098Z node 14 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:19.713933Z node 14 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-09-25T16:18:19.716120Z node 14 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [14:7554061920960481585:2319], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2025-09-25T16:18:19.815270Z node 14 :TX_PROXY ERROR: schemereq.cpp:590: Actor# [14:7554061920960481638:2335] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-09-25T16:18:20.082751Z node 14 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:18:20.096141Z node 14 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:18:20.283688Z node 14 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-09-25T16:18:20.497925Z node 14 :HIVE WARN: hive_impl.cpp:516: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 14, TabletId: 72075186224037888 not found 2025-09-25T16:18:20.501739Z node 14 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:18:20.540379Z node 14 :KQP_COMPUTE ERROR: dq_compute_actor_impl.h:678: SelfId: [14:7554061925255449463:2420], TxId: 281474976710672, task: 1. Ctx: { TraceId : 01k60tp8pzf2dkenemej61gqrq. RunScriptActorId : [0:0:0]. CustomerSuppliedId : . PoolId : default. SessionId : ydb://session/3?node_id=14&id=ZmQ3YTJhNzUtMjMwOTZhY2ItNWUyZmFmOWUtNzRhMjY0Y2E=. CurrentExecutionId : . Database : /Root. DatabaseId : /Root. }. InternalError: PRECONDITION_FAILED DEFAULT_ERROR: {
: Error: Terminate was called, reason(57): ERROR: invalid input syntax for type boolean: "pg_proc" }. 2025-09-25T16:18:20.540502Z node 14 :KQP_COMPUTE ERROR: dq_compute_actor_impl.h:1208: SelfId: [14:7554061925255449464:2421], TxId: 281474976710672, task: 2. Ctx: { TraceId : 01k60tp8pzf2dkenemej61gqrq. CustomerSuppliedId : . RunScriptActorId : [0:0:0]. PoolId : default. SessionId : ydb://session/3?node_id=14&id=ZmQ3YTJhNzUtMjMwOTZhY2ItNWUyZmFmOWUtNzRhMjY0Y2E=. CurrentExecutionId : . Database : /Root. DatabaseId : /Root. }. Handle abort execution event from: [14:7554061925255449460:2417], status: PRECONDITION_FAILED, reason: {
: Error: Terminate execution } 2025-09-25T16:18:20.540632Z node 14 :KQP_SESSION WARN: kqp_session_actor.cpp:2830: SessionId: ydb://session/3?node_id=14&id=ZmQ3YTJhNzUtMjMwOTZhY2ItNWUyZmFmOWUtNzRhMjY0Y2E=, ActorId: [14:7554061925255449454:2417], ActorState: ExecuteState, TraceId: 01k60tp8pzf2dkenemej61gqrq, Create QueryResponse for error on request, msg: ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/scan/unittest >> KqpScan::DqSourceLiteralRange [GOOD] Test command err: Trying to start YDB, gRPC: 1711, MsgBus: 1776 2025-09-25T16:18:17.166194Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7554061912937961471:2140];send_to=[0:7307199536658146131:7762515]; 2025-09-25T16:18:17.166284Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/endf/003ed3/r3tmp/tmpOaiaFP/pdisk_1.dat 2025-09-25T16:18:17.208897Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-09-25T16:18:17.220583Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1229: Notification cookie mismatch for subscription [1:7554061912937961369:2081] 1758817097165155 != 1758817097165158 2025-09-25T16:18:17.221305Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 1711, node 1 2025-09-25T16:18:17.231370Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-09-25T16:18:17.231383Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-09-25T16:18:17.231385Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-09-25T16:18:17.231477Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:1776 2025-09-25T16:18:17.276144Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-09-25T16:18:17.276190Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-09-25T16:18:17.277983Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:1776 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-09-25T16:18:17.294377Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-09-25T16:18:17.304363Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) waiting... 2025-09-25T16:18:17.325106Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) waiting... 2025-09-25T16:18:17.349390Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) waiting... 2025-09-25T16:18:17.361258Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) waiting... 2025-09-25T16:18:17.444258Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-09-25T16:18:17.590547Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7554061912937963013:2391], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:17.590576Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:17.590677Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7554061912937963023:2392], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:17.590698Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:17.639973Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:18:17.648558Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:18:17.662241Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:18:17.675636Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:18:17.690207Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:18:17.704047Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:18:17.719449Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:18:17.732373Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:18:17.750872Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7554061912937963885:2474], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:17.750895Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7554061912937963890:2477], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:17.750899Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:17.750935Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7554061912937963893:2479], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:17.750941Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:17.751736Z node 1 :FLAT_TX_SCHEMESHARD WARN ... ASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-09-25T16:18:20.028366Z node 3 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:25091 TClient is connected to server localhost:25091 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-09-25T16:18:20.076444Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-09-25T16:18:20.086207Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) waiting... 2025-09-25T16:18:20.099756Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) waiting... 2025-09-25T16:18:20.122303Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) waiting... waiting... 2025-09-25T16:18:20.136936Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:18:20.204982Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-09-25T16:18:20.419453Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7554061927581276656:2391], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:20.419492Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:20.419569Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7554061927581276666:2392], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:20.419584Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:20.431675Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:18:20.442180Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:18:20.454965Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:18:20.468476Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:18:20.484178Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:18:20.496657Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:18:20.510804Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:18:20.525805Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:18:20.542171Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7554061927581277528:2474], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:20.542215Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:20.542286Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7554061927581277533:2477], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:20.542303Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7554061927581277534:2478], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:20.542383Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:20.543202Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-09-25T16:18:20.551651Z node 3 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [3:7554061927581277537:2479], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2025-09-25T16:18:20.623857Z node 3 :TX_PROXY ERROR: schemereq.cpp:590: Actor# [3:7554061927581277589:3553] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 17], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-09-25T16:18:20.860028Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) waiting... 2025-09-25T16:18:20.931415Z node 3 :KQP_RESOURCE_MANAGER WARN: kqp_snapshot_manager.cpp:238: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1758817100971, txId: 281474976710675] shutting down 2025-09-25T16:18:20.965403Z node 3 :KQP_RESOURCE_MANAGER WARN: kqp_snapshot_manager.cpp:238: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1758817101006, txId: 281474976710677] shutting down >> YdbYqlClient::TestColumnOrder >> THiveTest::DrainWithHiveRestart [GOOD] >> THiveTest::TestCheckSubHiveDrain >> TDataShardMinStepTest::TestDropTablePlanComesNotTooEarlyRW+VolatileTxs [GOOD] >> TDataShardMinStepTest::TestDropTablePlanComesNotTooEarlyRW-VolatileTxs >> TGRpcYdbTest::MakeListRemoveDirectory [GOOD] >> TGRpcYdbTest::GetOperationBadRequest >> TDataShardMinStepTest::TestDropTableCompletesQuicklyRW+VolatileTxs [GOOD] >> TDataShardMinStepTest::TestDropTableCompletesQuicklyRW-VolatileTxs >> YdbYqlClient::ColumnFamiliesWithStorageAndIndex [GOOD] >> YdbYqlClient::ColumnFamiliesDescriptionWithStorageAndIndex >> YdbYqlClient::ConnectDbAclIsStrictlyChecked [GOOD] >> YdbYqlClient::ConnectDbAclIsOffWhenYdbRequestsWithoutDatabase >> YdbMonitoring::SelfCheck [GOOD] >> YdbMonitoring::SelfCheckWithNodesDying >> THiveTest::TestProgressWithMaxTabletsScheduled [GOOD] >> TGRpcNewCoordinationClient::SessionSemaphoreInfiniteTimeout >> THiveTest::TestResetServerlessComputeResourcesMode >> YdbYqlClient::DiscoveryLocationOverride [GOOD] >> YdbYqlClient::RetryOperationAsync >> TGRpcYdbTest::DropTableBadRequest [GOOD] >> TGRpcYdbTest::CreateTableWithIndex |81.3%| [TM] {default-linux-x86_64, pic, relwithdebinfo} ydb/library/yql/tests/sql/hybrid_file/part3/pytest >> test.py::test[type_v3-mergejoin_with_sort--Results] [GOOD] >> TYqlDateTimeTests::SimpleUpsertSelect ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/scan/unittest >> KqpScan::Counters [GOOD] Test command err: Trying to start YDB, gRPC: 17074, MsgBus: 4118 2025-09-25T16:18:17.792247Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7554061911386583456:2142];send_to=[0:7307199536658146131:7762515]; 2025-09-25T16:18:17.792389Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/endf/003ecb/r3tmp/tmpnBYWY4/pdisk_1.dat 2025-09-25T16:18:17.837368Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-09-25T16:18:17.848096Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded 2025-09-25T16:18:17.848322Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1229: Notification cookie mismatch for subscription [1:7554061911386583347:2081] 1758817097790886 != 1758817097790889 TServer::EnableGrpc on GrpcPort 17074, node 1 2025-09-25T16:18:17.857615Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-09-25T16:18:17.857631Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-09-25T16:18:17.857634Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-09-25T16:18:17.857682Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:4118 TClient is connected to server localhost:4118 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: 2025-09-25T16:18:17.900039Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-09-25T16:18:17.900065Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-09-25T16:18:17.900955Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-09-25T16:18:17.906350Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-09-25T16:18:17.947087Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) waiting... 2025-09-25T16:18:17.969162Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) waiting... 2025-09-25T16:18:17.992001Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) waiting... 2025-09-25T16:18:18.004111Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) waiting... 2025-09-25T16:18:18.005862Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-09-25T16:18:18.173640Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7554061915681552307:2391], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:18.173673Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:18.173779Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7554061915681552317:2392], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:18.173796Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:18.221039Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:18:18.229553Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:18:18.242886Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:18:18.256612Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:18:18.270897Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:18:18.289948Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:18:18.298956Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:18:18.314452Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:18:18.329324Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7554061915681553179:2474], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:18.329344Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:18.329391Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7554061915681553185:2478], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:18.329398Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:18.329401Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7554061915681553184:2477], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:18.330255Z node 1 :FLAT_TX_SCHEMESHARD WA ... VE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-09-25T16:18:20.382620Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 14158, node 3 2025-09-25T16:18:20.388262Z node 3 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-09-25T16:18:20.388274Z node 3 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-09-25T16:18:20.388276Z node 3 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-09-25T16:18:20.388326Z node 3 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:29709 TClient is connected to server localhost:29709 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-09-25T16:18:20.443560Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-09-25T16:18:20.485193Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) waiting... 2025-09-25T16:18:20.498093Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) waiting... 2025-09-25T16:18:20.525042Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) waiting... 2025-09-25T16:18:20.537629Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) waiting... 2025-09-25T16:18:20.637858Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-09-25T16:18:20.735695Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7554061924997927468:2391], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:20.735719Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:20.735780Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7554061924997927477:2392], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:20.735790Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:20.749252Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:18:20.756870Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:18:20.770022Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:18:20.783714Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:18:20.798162Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:18:20.812673Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:18:20.826083Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:18:20.841098Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:18:20.856723Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7554061924997928340:2474], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:20.856750Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:20.856765Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7554061924997928345:2477], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:20.856773Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7554061924997928347:2478], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:20.856779Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:20.857482Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-09-25T16:18:20.866407Z node 3 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [3:7554061924997928349:2479], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2025-09-25T16:18:20.932128Z node 3 :TX_PROXY ERROR: schemereq.cpp:590: Actor# [3:7554061924997928401:3551] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 17], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-09-25T16:18:21.245057Z node 3 :KQP_RESOURCE_MANAGER WARN: kqp_snapshot_manager.cpp:238: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1758817101286, txId: 281474976710673] shutting down >> test.py::test[aggr_factory-udaf-default.txt-Results] [GOOD] >> test.py::test[aggregate-group_by_cube_join_count--Results] >> YdbYqlClient::TestTzTypesFullStack [GOOD] >> YdbYqlClient::TestVariant >> AsyncIndexChangeCollector::CoveredIndexUpsert [GOOD] >> AsyncIndexChangeCollector::AllColumnsInPk >> YdbScripting::MultiResults >> THiveTest::TestSpreadNeighboursWithUpdateTabletsObject [GOOD] >> THiveTest::TestSpreadNeighboursDifferentOwners >> THiveTest::TestResetServerlessComputeResourcesMode [GOOD] >> THiveTest::TestLockedTabletsMustNotRestart >> YdbYqlClient::TestDoubleKey [GOOD] >> YdbYqlClient::TestMultipleModifications >> YdbLogStore::LogStore >> YdbYqlClient::DeleteTableWithDeletedIndex >> THiveTest::TestCheckSubHiveDrain [GOOD] >> THiveTest::PipeAlivenessOfDeadTablet |81.3%| [TA] $(B)/ydb/core/tx/datashard/ut_write/test-results/unittest/{meta.json ... results_accumulator.log} >> AsyncIndexChangeCollector::DeleteNothing >> YdbTableBulkUpsert::Nulls >> THiveTest::TestFollowers_LocalNodeOnly [GOOD] >> THiveTest::TestFollowersCrossDC_Tight >> test.py::test[blocks-date_less--Results] [GOOD] >> YdbYqlClient::TestColumnOrder [GOOD] >> YdbYqlClient::TestDecimal >> KqpScan::StreamExecuteScanQueryClientTimeoutBruteForce [GOOD] >> KqpScan::StreamLookup >> test.py::test[blocks-div_uint64_opt2--ForceBlocks] |81.3%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_serverless_reboots/unittest >> THiveTest::TestLockedTabletsMustNotRestart [GOOD] >> TDataShardMinStepTest::TestDropTableCompletesQuicklyRW-VolatileTxs [GOOD] >> TGRpcYdbTest::CreateTableWithIndex [GOOD] >> YdbYqlClient::ConnectDbAclIsOffWhenYdbRequestsWithoutDatabase [GOOD] >> YdbYqlClient::ConnectDbAclIsOffWhenTokenIsOptionalAndNull >> TGRpcYdbTest::CreateYqlSession >> TGRpcYdbTest::GetOperationBadRequest [GOOD] >> TGRpcYdbTest::OperationTimeout >> TGRpcNewCoordinationClient::SessionSemaphoreInfiniteTimeout [GOOD] >> TGRpcNewCoordinationClient::SessionReconnectReattach >> YdbYqlClient::TestVariant [GOOD] >> YdbYqlClient::TestTransactionQueryError >> THiveTest::PipeAlivenessOfDeadTablet [GOOD] >> THiveTest::TestAsyncReassign >> YdbYqlClient::BuildInfo >> AsyncIndexChangeCollector::AllColumnsInPk [GOOD] >> AsyncIndexChangeCollector::CoverIndexedColumn >> THiveTest::TestSpreadNeighboursDifferentOwners [GOOD] >> THiveTest::TestServerlessComputeResourcesMode >> YdbYqlClient::TestMultipleModifications [GOOD] >> YdbYqlClient::TestDescribeTableWithShardStats >> YdbOlapStore::ManyTables >> YdbYqlClient::ColumnFamiliesDescriptionWithStorageAndIndex [GOOD] >> YdbYqlClient::ColumnFamiliesExternalBlobsWithoutDefaultProfile >> YdbScripting::MultiResults [GOOD] >> YdbScripting::Params ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/datashard/ut_minstep/unittest >> TDataShardMinStepTest::TestDropTableCompletesQuicklyRW-VolatileTxs [GOOD] Test command err: 2025-09-25T16:18:20.453263Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:123: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2025-09-25T16:18:20.484689Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2025-09-25T16:18:20.487202Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:311:2354], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-09-25T16:18:20.487285Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-09-25T16:18:20.487315Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/endf/005a06/r3tmp/tmpeBk3OF/pdisk_1.dat 2025-09-25T16:18:20.550399Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-09-25T16:18:20.550444Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-09-25T16:18:20.559323Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded 2025-09-25T16:18:20.560103Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1229: Notification cookie mismatch for subscription [1:34:2081] 1758817100048990 != 1758817100048994 2025-09-25T16:18:20.591280Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-09-25T16:18:20.636699Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //Root, opId: 1:0, at schemeshard: 72057594046644480 2025-09-25T16:18:20.639542Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2025-09-25T16:18:20.639989Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2025-09-25T16:18:20.640317Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //Root 2025-09-25T16:18:20.640860Z node 1 :TX_COORDINATOR DEBUG: coordinator_impl.cpp:183: tablet# 72057594046316545 txid# 1 HANDLE EvProposeTransaction marker# C0 2025-09-25T16:18:20.640878Z node 1 :TX_COORDINATOR DEBUG: coordinator_impl.cpp:29: tablet# 72057594046316545 txid# 1 step# 500 Status# 16 SEND to# [1:409:2405] Proxy marker# C1 2025-09-25T16:18:20.672532Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-09-25T16:18:20.755740Z node 1 :TX_COORDINATOR DEBUG: coordinator__plan_step.cpp:184: Transaction 1 has been planned 2025-09-25T16:18:20.755778Z node 1 :TX_COORDINATOR DEBUG: coordinator__plan_step.cpp:197: Planned transaction 1 for mediator 72057594046382081 tablet 72057594046644480 2025-09-25T16:18:20.755896Z node 1 :TX_COORDINATOR TRACE: coordinator_impl.cpp:268: Coordinator# 72057594046316545 scheduling step 1000 in 0.500000s at 0.950000s 2025-09-25T16:18:20.756025Z node 1 :TX_COORDINATOR DEBUG: coordinator_impl.cpp:580: Send from# 72057594046316545 to mediator# 72057594046382081, step# 500, txid# 1 marker# C2 2025-09-25T16:18:20.756038Z node 1 :TX_COORDINATOR DEBUG: coordinator_impl.cpp:424: tablet# 72057594046316545 txid# 1 stepId# 500 Status# 17 SEND EvProposeTransactionStatus to# [1:409:2405] Proxy 2025-09-25T16:18:20.756275Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:685: TTxOperationPlanStep Execute, stepId: 500, transactions count in step: 1, at schemeshard: 72057594046644480 2025-09-25T16:18:20.756662Z node 1 :TX_COORDINATOR DEBUG: coordinator_impl.cpp:397: tablet# 72057594046316545 HANDLE EvMediatorQueueConfirmations MediatorId# 72057594046382081 2025-09-25T16:18:20.756686Z node 1 :TX_COORDINATOR DEBUG: coordinator__mediators_confirmations.cpp:84: at tablet# 72057594046316545 [2:6] persistent tx 1 for mediator 72057594046382081 tablet 72057594046644480 removed=1 2025-09-25T16:18:20.756692Z node 1 :TX_COORDINATOR DEBUG: coordinator__mediators_confirmations.cpp:91: at tablet# 72057594046316545 [2:6] persistent tx 1 for mediator 72057594046382081 acknowledged 2025-09-25T16:18:20.756699Z node 1 :TX_COORDINATOR DEBUG: coordinator__mediators_confirmations.cpp:99: at tablet# 72057594046316545 [2:6] persistent tx 1 acknowledged 2025-09-25T16:18:20.756927Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:993: Operation and all the parts is done, operation id: 1:0 2025-09-25T16:18:20.756947Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:1002: Publication still in progress, tx: 1, publications: 1, subscribers: 1 2025-09-25T16:18:20.757187Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__publish_to_scheme_board.cpp:155: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 1, subscribers: 1 2025-09-25T16:18:20.757806Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_create_table.cpp:442: TCreateTable Propose, path: /Root/table-1, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-09-25T16:18:20.758170Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 281474976715657:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2025-09-25T16:18:20.758189Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:18:20.758366Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 281474976715657, database: /Root, subject: , status: StatusAccepted, operation: CREATE TABLE, path: /Root/table-1 2025-09-25T16:18:20.759034Z node 1 :HIVE DEBUG: hive_impl.cpp:55: HIVE#72057594037968897 Handle TEvHive::TEvCreateTablet(DataShard(72057594046644480,1)) 2025-09-25T16:18:20.761339Z node 1 :HIVE DEBUG: tx__create_tablet.cpp:200: HIVE#72057594037968897 THive::TTxCreateTablet::Execute Owner: 72057594046644480 OwnerIdx: 1 TabletType: DataShard ObjectDomain { SchemeShard: 72057594046644480 PathId: 1 } ObjectId: 2 BindedChannels { StoragePoolName: "/Root:test" StoragePoolKind: "test" } BindedChannels { StoragePoolName: "/Root:test" StoragePoolKind: "test" } AllowedDomains { SchemeShard: 72057594046644480 PathId: 1 } 2025-09-25T16:18:20.761369Z node 1 :HIVE DEBUG: tx__create_tablet.cpp:354: HIVE#72057594037968897 Hive 72057594037968897 allocated TabletId 72075186224037888 from TabletIdIndex 65536 2025-09-25T16:18:20.761433Z node 1 :HIVE DEBUG: tx__create_tablet.cpp:446: HIVE#72057594037968897 THive::TTxCreateTablet::Execute; Default resources after merge for type DataShard: {} 2025-09-25T16:18:20.761443Z node 1 :HIVE DEBUG: tx__create_tablet.cpp:449: HIVE#72057594037968897 THive::TTxCreateTablet::Execute; Default resources after merge for object (72057594046644480,2): {} 2025-09-25T16:18:20.761453Z node 1 :HIVE DEBUG: tx__create_tablet.cpp:453: HIVE#72057594037968897 THive::TTxCreateTablet::Execute; Default resources after merge for profile 'default': {Memory: 1048576} 2025-09-25T16:18:20.761485Z node 1 :HIVE DEBUG: hive_impl.cpp:2888: HIVE#72057594037968897 CreateTabletFollowers Tablet DataShard.72075186224037888.Leader.0 2025-09-25T16:18:20.761590Z node 1 :HIVE DEBUG: tx__create_tablet.cpp:173: HIVE#72057594037968897 THive::TTxCreateTablet::Execute TabletId: 72075186224037888 Status: OK 2025-09-25T16:18:20.761622Z node 1 :HIVE DEBUG: hive_impl.cpp:1105: HIVE#72057594037968897 THive::AssignTabletGroups TEvControllerSelectGroups tablet 72075186224037888 GroupParameters { StoragePoolSpecifier { Name: "/Root:test" } } ReturnAllMatchingGroups: true 2025-09-25T16:18:20.761758Z node 1 :HIVE DEBUG: hive_impl.cpp:93: HIVE#72057594037968897 Connected to tablet 72057594037932033 from tablet 72057594037968897 2025-09-25T16:18:20.761848Z node 1 :HIVE DEBUG: hive_impl.cpp:458: HIVE#72057594037968897 THive::Handle TEvControllerSelectGroupsResult: success Status: OK NewStyleQuerySupported: true MatchingGroups { Groups { ErasureSpecies: 0 GroupID: 2181038080 StoragePoolName: "/Root:test" AssuredResources { } CurrentResources { } PhysicalGroup: true Decommitted: false GroupSizeInUnits: 0 } } 2025-09-25T16:18:20.761869Z node 1 :HIVE DEBUG: tx__update_tablet_groups.cpp:63: HIVE#72057594037968897 THive::TTxUpdateTabletGroups::Execute{20306537092656}(72075186224037888,HIVE_REASSIGN_REASON_NO,[]) 2025-09-25T16:18:20.761884Z node 1 :HIVE DEBUG: tx__update_tablet_groups.cpp:151: HIVE#72057594037968897 THive::TTxUpdateTabletGroups::Execute{20306537092656}: tablet 72075186224037888 channel 0 assigned to group 2181038080 2025-09-25T16:18:20.761909Z node 1 :HIVE DEBUG: tx__update_tablet_groups.cpp:151: HIVE#72057594037968897 THive::TTxUpdateTabletGroups::Execute{20306537092656}: tablet 72075186224037888 channel 1 assigned to group 2181038080 2025-09-25T16:18:20.761929Z node 1 :HIVE DEBUG: tablet_info.cpp:125: HIVE#72057594037968897 Tablet(DataShard.72075186224037888.Leader.0) VolatileState: Unknown -> Stopped 2025-09-25T16:18:20.761937Z node 1 :HIVE DEBUG: tablet_info.cpp:125: HIVE#72057594037968897 Tablet(DataShard.72075186224037888.Leader.0) VolatileState: Stopped -> Booting 2025-09-25T16:18:20.761947Z node 1 :HIVE DEBUG: hive_impl.cpp:367: HIVE#72057594037968897 ProcessBootQueue (1) 2025-09-25T16:18:20.761981Z node 1 :HIVE DEBUG: tx__process_boot_queue.cpp:18: HIVE#72057594037968897 THive::TTxProcessBootQueue()::Execute 2025-09-25T16:18:20.761988Z node 1 :HIVE DEBUG: hive_impl.cpp:247: HIVE#72057594037968897 Handle ProcessBootQueue (size: 1) 2025-09-25T16:18:20.761996Z node 1 :HIVE DEBUG: hive_impl.cpp:1251: HIVE#72057594037968897 [FBN] Finding best node for tablet DataShard.72075186224037888.Leader.0 2025-09-25T16:18:20.762029Z node 1 :HIVE DEBUG: tablet_info.cpp:123: HIVE#72057594037968897 Tablet(DataShard.72075186224037888.Leader.0) VolatileState: Booting -> Starting (Node 1) 2025-09-25T16:18:20.762052Z node 1 :HIVE DEBUG: hive_impl.cpp:327: HIVE#72057594037968897 ProcessBootQueue - BootQueue empty (WaitQueue: 0) 2025-09-25T16:18:20.762066Z node 1 :HIVE DEBUG: tx__start_tablet.cpp:31: HIVE#72057594037968897 THive::TTxStartTablet::Execute Tablet (72075186224037888,0) 2025-09-25T16:18:20.762096Z node 1 :HIVE DEBUG: tx__start_tablet.cpp:73: HIVE#72057594037968897 THive::TTxStartTablet::Execute, Sending TEvBootTablet(DataShard.72075186224037888.Leader.1) to node 1 storage {Version# 1 TabletID# 72075186224037888 TabletType# DataShard Channels# {0:{Channel# 0 Type# none StoragePool# /Root:test History# {0:{FromGeneration# 0 GroupID# 2181038080 Timestamp# 1970-01-01T00:00:00.450000Z}}, 1:{Channel# 1 Type# none StoragePool# /Root:test History# {0:{FromGeneration# 0 GroupID# 21 ... reason: , at schemeshard: 72057594046644480 2025-09-25T16:18:23.646809Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 281474976715665, database: /Root, subject: , status: StatusAccepted, operation: DROP TABLE, path: /Root/table-2 2025-09-25T16:18:23.647615Z node 2 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 72075186224037889 2025-09-25T16:18:23.647653Z node 2 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:133: Propose scheme transaction at tablet 72075186224037889 txId 281474976715665 ssId 72057594046644480 seqNo 2:4 2025-09-25T16:18:23.647673Z node 2 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:221: Prepared scheme transaction txId 281474976715665 at tablet 72075186224037889 2025-09-25T16:18:23.647768Z node 2 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:129: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-09-25T16:18:23.647787Z node 2 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:129: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037889 2025-09-25T16:18:23.658252Z node 2 :HIVE DEBUG: tx__delete_tablet.cpp:136: HIVE#72057594037968897 THive::TTxDeleteTablet::Complete() SideEffects: {Notifications: 0x10080003 [2:325:2365] NKikimrLocal.TEvStopTablet TabletId: 72075186224037888 FollowerId: 0 Generation: 1,0x10040206 [2:397:2396] NKikimrHive.TEvDeleteTabletReply Status: OK Origin: 72057594037968897 TxId_Deprecated: 0 ShardOwnerId: 72057594046644480 ShardLocalIdx: 1 Actions: NKikimr::TTabletReqBlockBlobStorage} 2025-09-25T16:18:23.658574Z node 2 :TX_DATASHARD INFO: datashard.cpp:197: OnTabletStop: 72075186224037888 reason = ReasonStop 2025-09-25T16:18:23.658934Z node 2 :TX_DATASHARD INFO: datashard.cpp:257: OnTabletDead: 72075186224037888 2025-09-25T16:18:23.658981Z node 2 :TX_DATASHARD INFO: datashard.cpp:1311: Change sender killed: at tablet: 72075186224037888 2025-09-25T16:18:23.659837Z node 2 :HIVE DEBUG: tx__block_storage_result.cpp:23: HIVE#72057594037968897 THive::TTxBlockStorageResult::Execute(72075186224037888 OK) 2025-09-25T16:18:23.659864Z node 2 :HIVE DEBUG: tx__block_storage_result.cpp:69: HIVE#72057594037968897 THive::TTxBlockStorageResult::Complete(72075186224037888 OK) 2025-09-25T16:18:23.660062Z node 2 :HIVE DEBUG: hive_impl.cpp:922: HIVE#72057594037968897 THive::Handle::TEvInitiateDeleteStorage TabletId=72075186224037888 2025-09-25T16:18:23.660123Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__clean_pathes.cpp:160: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046644480 2025-09-25T16:18:23.660246Z node 2 :HIVE DEBUG: hive_impl.cpp:505: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus, TabletId: 72075186224037888 2025-09-25T16:18:23.660258Z node 2 :HIVE WARN: hive_impl.cpp:516: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 2, TabletId: 72075186224037888 not found 2025-09-25T16:18:23.660276Z node 2 :HIVE DEBUG: tx__delete_tablet_result.cpp:26: HIVE#72057594037968897 THive::TTxDeleteTabletResult::Execute(72075186224037888 OK) 2025-09-25T16:18:23.670717Z node 2 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:152: TTxProposeTransactionBase::Complete at 72075186224037889 2025-09-25T16:18:23.671229Z node 2 :TX_COORDINATOR DEBUG: coordinator_impl.cpp:183: tablet# 72057594046316545 txid# 281474976715665 HANDLE EvProposeTransaction marker# C0 2025-09-25T16:18:23.671245Z node 2 :TX_COORDINATOR DEBUG: coordinator_impl.cpp:29: tablet# 72057594046316545 txid# 281474976715665 step# 3500 Status# 16 SEND to# [2:397:2396] Proxy marker# C1 2025-09-25T16:18:23.681604Z node 2 :HIVE DEBUG: tx__delete_tablet_result.cpp:72: HIVE#72057594037968897 THive::TTxDeleteTabletResult(72075186224037888)::Complete SideEffects {} 2025-09-25T16:18:23.753311Z node 2 :TX_COORDINATOR DEBUG: coordinator__plan_step.cpp:184: Transaction 281474976715665 has been planned 2025-09-25T16:18:23.753348Z node 2 :TX_COORDINATOR DEBUG: coordinator__plan_step.cpp:197: Planned transaction 281474976715665 for mediator 72057594046382081 tablet 72057594046644480 2025-09-25T16:18:23.753354Z node 2 :TX_COORDINATOR DEBUG: coordinator__plan_step.cpp:197: Planned transaction 281474976715665 for mediator 72057594046382081 tablet 72075186224037889 2025-09-25T16:18:23.753444Z node 2 :TX_COORDINATOR TRACE: coordinator_impl.cpp:268: Coordinator# 72057594046316545 scheduling step 4000 in 0.500000s at 3.950000s 2025-09-25T16:18:23.753592Z node 2 :TX_COORDINATOR DEBUG: coordinator_impl.cpp:580: Send from# 72057594046316545 to mediator# 72057594046382081, step# 3500, txid# 281474976715665 marker# C2 2025-09-25T16:18:23.753604Z node 2 :TX_COORDINATOR DEBUG: coordinator_impl.cpp:424: tablet# 72057594046316545 txid# 281474976715665 stepId# 3500 Status# 17 SEND EvProposeTransactionStatus to# [2:397:2396] Proxy 2025-09-25T16:18:23.753815Z node 2 :TX_DATASHARD DEBUG: datashard__plan_step.cpp:69: Planned transaction txId 281474976715665 at step 3500 at tablet 72075186224037889 { Transactions { TxId: 281474976715665 AckTo { RawX1: 0 RawX2: 0 } } Step: 3500 MediatorID: 72057594046382081 TabletID: 72075186224037889 } 2025-09-25T16:18:23.753826Z node 2 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037889 2025-09-25T16:18:23.753863Z node 2 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037889 2025-09-25T16:18:23.753872Z node 2 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037889 active 0 active planned 0 immediate 0 planned 1 2025-09-25T16:18:23.753894Z node 2 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:333: Found ready operation [3500:281474976715665] in PlanQueue unit at 72075186224037889 2025-09-25T16:18:23.753954Z node 2 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:634: LoadTxDetails at 72075186224037889 loaded tx from db 3500:281474976715665 keys extracted: 0 2025-09-25T16:18:23.753992Z node 2 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037889 (dry run) active 1 active planned 1 immediate 0 planned 1 2025-09-25T16:18:23.754022Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:685: TTxOperationPlanStep Execute, stepId: 3500, transactions count in step: 1, at schemeshard: 72057594046644480 2025-09-25T16:18:23.754118Z node 2 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037889 2025-09-25T16:18:23.754137Z node 2 :TX_DATASHARD INFO: drop_table_unit.cpp:72: Trying to DROP TABLE at 72075186224037889 2025-09-25T16:18:23.754242Z node 2 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037889 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-09-25T16:18:23.754620Z node 2 :TX_DATASHARD DEBUG: datashard__plan_step.cpp:98: Sending '{TEvPlanStepAccepted TabletId# 72075186224037889 step# 3500} 2025-09-25T16:18:23.754632Z node 2 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037889 2025-09-25T16:18:23.754810Z node 2 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037889 2025-09-25T16:18:23.754831Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:811: Complete [3500 : 281474976715665] from 72075186224037889 at tablet 72075186224037889 send result to client [2:397:2396], exec latency: 0 ms, propose latency: 0 ms 2025-09-25T16:18:23.754844Z node 2 :TX_DATASHARD INFO: datashard.cpp:1600: 72075186224037889 Sending notify to schemeshard 72057594046644480 txId 281474976715665 state PreOffline TxInFly 0 2025-09-25T16:18:23.754857Z node 2 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037889 2025-09-25T16:18:23.754877Z node 2 :TX_COORDINATOR DEBUG: coordinator_impl.cpp:397: tablet# 72057594046316545 HANDLE EvMediatorQueueConfirmations MediatorId# 72057594046382081 2025-09-25T16:18:23.754897Z node 2 :TX_COORDINATOR DEBUG: coordinator__mediators_confirmations.cpp:84: at tablet# 72057594046316545 [2:20] persistent tx 281474976715665 for mediator 72057594046382081 tablet 72057594046644480 removed=1 2025-09-25T16:18:23.754904Z node 2 :TX_COORDINATOR DEBUG: coordinator__mediators_confirmations.cpp:84: at tablet# 72057594046316545 [2:20] persistent tx 281474976715665 for mediator 72057594046382081 tablet 72075186224037889 removed=1 2025-09-25T16:18:23.754909Z node 2 :TX_COORDINATOR DEBUG: coordinator__mediators_confirmations.cpp:91: at tablet# 72057594046316545 [2:20] persistent tx 281474976715665 for mediator 72057594046382081 acknowledged 2025-09-25T16:18:23.754914Z node 2 :TX_COORDINATOR DEBUG: coordinator__mediators_confirmations.cpp:99: at tablet# 72057594046316545 [2:20] persistent tx 281474976715665 acknowledged 2025-09-25T16:18:23.755119Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:1123: All parts have reached barrier, tx: 281474976715665, done: 0, blocked: 1 2025-09-25T16:18:23.755822Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:993: Operation and all the parts is done, operation id: 281474976715665:0 2025-09-25T16:18:23.755848Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:1002: Publication still in progress, tx: 281474976715665, publications: 1, subscribers: 1 2025-09-25T16:18:23.755972Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:2966: Handle TEvSchemaChangedResult 281474976715665 datashard 72075186224037889 state PreOffline 2025-09-25T16:18:23.755986Z node 2 :TX_DATASHARD DEBUG: datashard__schema_changed.cpp:22: 72075186224037889 Got TEvSchemaChangedResult from SS at 72075186224037889 2025-09-25T16:18:23.756097Z node 2 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:129: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037889 2025-09-25T16:18:23.756127Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__publish_to_scheme_board.cpp:155: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 281474976715665, subscribers: 1 2025-09-25T16:18:23.767746Z node 2 :TX_DATASHARD DEBUG: datashard_loans.cpp:220: 72075186224037889 in PreOffline state HasSharedBobs: 0 SchemaOperations: [ ] OutReadSets count: 0 ChangesQueue size: 0 ChangeExchangeSplit: 1 siblings to be activated: wait to activation from: 2025-09-25T16:18:23.767836Z node 2 :TX_DATASHARD INFO: datashard_loans.cpp:177: 72075186224037889 Initiating switch from PreOffline to Offline state 2025-09-25T16:18:23.768245Z node 2 :TX_DATASHARD INFO: datashard_impl.h:3325: 72075186224037889 Reporting state Offline to schemeshard 72057594046644480 2025-09-25T16:18:23.768455Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:2975: Handle TEvStateChangedResult datashard 72075186224037889 state Offline 2025-09-25T16:18:23.768545Z node 2 :HIVE DEBUG: tx__delete_tablet.cpp:74: HIVE#72057594037968897 THive::TTxDeleteTablet::Execute() ShardOwnerId: 72057594046644480 ShardLocalIdx: 2 TxId_Deprecated: 0 TabletID: 72075186224037889 2025-09-25T16:18:23.768558Z node 2 :HIVE DEBUG: tx__delete_tablet.cpp:19: HIVE#72057594037968897 THive::TTxDeleteTablet::Execute Tablet 72075186224037889 2025-09-25T16:18:23.768579Z node 2 :HIVE DEBUG: tablet_info.cpp:123: HIVE#72057594037968897 Tablet(DataShard.72075186224037889.Leader.1) VolatileState: Running -> Stopped (Node 2) 2025-09-25T16:18:23.768604Z node 2 :HIVE DEBUG: tablet_info.cpp:522: HIVE#72057594037968897 Sending TEvStopTablet(DataShard.72075186224037889.Leader.1 gen 1) to node 2 2025-09-25T16:18:23.768625Z node 2 :HIVE DEBUG: tx__delete_tablet.cpp:67: HIVE#72057594037968897 THive::TTxDeleteTablet::Execute() result Status: OK Origin: 72057594037968897 TxId_Deprecated: 0 ShardOwnerId: 72057594046644480 ShardLocalIdx: 2 >> THiveTest::TestHiveNoBalancingWithLowResourceUsage [GOOD] >> THiveTest::TestLockTabletExecution |81.3%| [TM] {default-linux-x86_64, pic, relwithdebinfo} ydb/library/yql/tests/sql/dq_file/part6/pytest >> test.py::test[produce-process_with_assume--Results] [GOOD] >> TGRpcYdbTest::OperationTimeout [GOOD] >> TGRpcYdbTest::OperationCancelAfter >> THiveTest::TestAsyncReassign [GOOD] >> THiveTest::TestAlterFollower >> YdbYqlClient::DeleteTableWithDeletedIndex [GOOD] >> YdbYqlClient::CreateTableWithUniformPartitions >> TPDiskRaces::OwnerKilledWhileReadingLog [GOOD] >> TPDiskRaces::OwnerKilledWhileReadingLogAndThenKillLastOwner >> TYqlDateTimeTests::SimpleUpsertSelect [GOOD] >> TYqlDateTimeTests::DatetimeKey >> YdbYqlClient::ConnectDbAclIsOffWhenTokenIsOptionalAndNull [GOOD] >> TGRpcNewCoordinationClient::SessionReconnectReattach [GOOD] >> TGRpcNewCoordinationClientAuth::OwnersAndPermissions >> YdbYqlClient::CopyTables ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/mind/hive/ut/unittest >> THiveTest::TestLockedTabletsMustNotRestart [GOOD] Test command err: 2025-09-25T16:18:06.711064Z node 1 :BS_NODE DEBUG: {NW26@node_warden_impl.cpp:338} Bootstrap 2025-09-25T16:18:06.716568Z node 1 :BS_NODE DEBUG: {NW18@node_warden_resource.cpp:51} ApplyServiceSet IsStatic# true Comprehensive# false Origin# initial ServiceSet# {PDisks { NodeID: 1 PDiskID: 1 Path: "SectorMap:0:3200" PDiskGuid: 1 } PDisks { NodeID: 2 PDiskID: 1 Path: "SectorMap:1:3200" PDiskGuid: 2 } VDisks { VDiskID { GroupID: 0 GroupGeneration: 1 Ring: 0 Domain: 0 VDisk: 0 } VDiskLocation { NodeID: 1 PDiskID: 1 VDiskSlotID: 0 PDiskGuid: 1 } } Groups { GroupID: 0 GroupGeneration: 1 ErasureSpecies: 0 Rings { FailDomains { VDiskLocations { NodeID: 1 PDiskID: 1 VDiskSlotID: 0 PDiskGuid: 1 } } } } AvailabilityDomains: 0 } 2025-09-25T16:18:06.716665Z node 1 :BS_NODE DEBUG: {NW04@node_warden_pdisk.cpp:233} StartLocalPDisk NodeId# 1 PDiskId# 1 Path# "SectorMap:0:3200" PDiskCategory# {Type# DEVICE_TYPE_ROT Kind# 0} Temporary# false 2025-09-25T16:18:06.716935Z node 1 :BS_NODE WARN: {NW89@node_warden_pdisk.cpp:122} Can't write new MockDevicesConfig to file Path# /Berkanavt/kikimr/testing/mock_devices.txt 2025-09-25T16:18:06.717018Z node 1 :BS_NODE DEBUG: {NW23@node_warden_vdisk.cpp:70} StartLocalVDiskActor SlayInFlight# false VDiskId# [0:1:0:0:0] VSlotId# 1:1:0 PDiskGuid# 1 DonorMode# false PDiskRestartInFlight# false PDisksWaitingToStart# false 2025-09-25T16:18:06.717198Z node 1 :BS_NODE DEBUG: {NW24@node_warden_vdisk.cpp:276} StartLocalVDiskActor done VDiskId# [0:1:0:0:0] VSlotId# 1:1:0 PDiskGuid# 1 2025-09-25T16:18:06.717207Z node 1 :BS_NODE DEBUG: {NW12@node_warden_proxy.cpp:24} StartLocalProxy GroupId# 0 2025-09-25T16:18:06.717335Z node 1 :BS_NODE DEBUG: {NW21@node_warden_pipe.cpp:23} EstablishPipe AvailDomainId# 0 PipeClientId# [1:50:2076] ControllerId# 72057594037932033 2025-09-25T16:18:06.717340Z node 1 :BS_NODE DEBUG: {NW20@node_warden_pipe.cpp:73} SendRegisterNode 2025-09-25T16:18:06.717365Z node 1 :BS_NODE DEBUG: {NW11@node_warden_impl.cpp:313} StartInvalidGroupProxy GroupId# 4294967295 2025-09-25T16:18:06.717389Z node 1 :BS_NODE DEBUG: {NW62@node_warden_impl.cpp:325} StartRequestReportingThrottler 2025-09-25T16:18:06.720062Z node 1 :BS_PROXY INFO: dsproxy_state.cpp:159: Group# 0 TEvConfigureProxy received GroupGeneration# 1 IsLimitedKeyless# false Marker# DSP02 2025-09-25T16:18:06.720078Z node 1 :BS_PROXY NOTICE: dsproxy_state.cpp:319: EnsureMonitoring Group# 0 IsLimitedKeyless# 0 fullIfPossible# 0 Marker# DSP58 2025-09-25T16:18:06.720387Z node 1 :BS_PROXY DEBUG: group_sessions.cpp:83: Group# 0 Actor# [1:49:2075] Create Queue# [1:58:2081] targetNodeId# 1 Marker# DSP01 2025-09-25T16:18:06.720410Z node 1 :BS_PROXY DEBUG: group_sessions.cpp:83: Group# 0 Actor# [1:49:2075] Create Queue# [1:59:2082] targetNodeId# 1 Marker# DSP01 2025-09-25T16:18:06.720430Z node 1 :BS_PROXY DEBUG: group_sessions.cpp:83: Group# 0 Actor# [1:49:2075] Create Queue# [1:60:2083] targetNodeId# 1 Marker# DSP01 2025-09-25T16:18:06.720448Z node 1 :BS_PROXY DEBUG: group_sessions.cpp:83: Group# 0 Actor# [1:49:2075] Create Queue# [1:61:2084] targetNodeId# 1 Marker# DSP01 2025-09-25T16:18:06.720468Z node 1 :BS_PROXY DEBUG: group_sessions.cpp:83: Group# 0 Actor# [1:49:2075] Create Queue# [1:62:2085] targetNodeId# 1 Marker# DSP01 2025-09-25T16:18:06.720486Z node 1 :BS_PROXY DEBUG: group_sessions.cpp:83: Group# 0 Actor# [1:49:2075] Create Queue# [1:63:2086] targetNodeId# 1 Marker# DSP01 2025-09-25T16:18:06.720505Z node 1 :BS_PROXY DEBUG: group_sessions.cpp:83: Group# 0 Actor# [1:49:2075] Create Queue# [1:64:2087] targetNodeId# 1 Marker# DSP01 2025-09-25T16:18:06.720509Z node 1 :BS_PROXY INFO: dsproxy_state.cpp:31: Group# 0 SetStateEstablishingSessions Marker# DSP03 2025-09-25T16:18:06.720519Z node 1 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:49: TClient[72057594037932033] ::Bootstrap [1:50:2076] 2025-09-25T16:18:06.720523Z node 1 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:542: TClient[72057594037932033] lookup [1:50:2076] 2025-09-25T16:18:06.720529Z node 1 :BS_PROXY NOTICE: dsproxy_state.cpp:259: Group# 4294967295 HasInvalidGroupId# 1 Bootstrap -> StateEjected Marker# DSP42 2025-09-25T16:18:06.720535Z node 1 :BS_NODE DEBUG: {NWDC00@distconf.cpp:28} Bootstrap 2025-09-25T16:18:06.720698Z node 1 :BS_NODE DEBUG: {NWDC40@distconf_persistent_storage.cpp:25} TReaderActor bootstrap Paths# [] 2025-09-25T16:18:06.720711Z node 2 :BS_NODE DEBUG: {NW26@node_warden_impl.cpp:338} Bootstrap 2025-09-25T16:18:06.721834Z node 2 :BS_NODE DEBUG: {NW18@node_warden_resource.cpp:51} ApplyServiceSet IsStatic# true Comprehensive# false Origin# initial ServiceSet# {PDisks { NodeID: 1 PDiskID: 1 Path: "SectorMap:0:3200" PDiskGuid: 1 } PDisks { NodeID: 2 PDiskID: 1 Path: "SectorMap:1:3200" PDiskGuid: 2 } VDisks { VDiskID { GroupID: 0 GroupGeneration: 1 Ring: 0 Domain: 0 VDisk: 0 } VDiskLocation { NodeID: 1 PDiskID: 1 VDiskSlotID: 0 PDiskGuid: 1 } } Groups { GroupID: 0 GroupGeneration: 1 ErasureSpecies: 0 Rings { FailDomains { VDiskLocations { NodeID: 1 PDiskID: 1 VDiskSlotID: 0 PDiskGuid: 1 } } } } AvailabilityDomains: 0 } 2025-09-25T16:18:06.721882Z node 2 :BS_NODE DEBUG: {NW04@node_warden_pdisk.cpp:233} StartLocalPDisk NodeId# 2 PDiskId# 1 Path# "SectorMap:1:3200" PDiskCategory# {Type# DEVICE_TYPE_ROT Kind# 0} Temporary# false 2025-09-25T16:18:06.721994Z node 2 :BS_NODE WARN: {NW89@node_warden_pdisk.cpp:122} Can't write new MockDevicesConfig to file Path# /Berkanavt/kikimr/testing/mock_devices.txt 2025-09-25T16:18:06.722065Z node 2 :BS_NODE DEBUG: {NW12@node_warden_proxy.cpp:24} StartLocalProxy GroupId# 0 2025-09-25T16:18:06.722192Z node 2 :BS_NODE DEBUG: {NW21@node_warden_pipe.cpp:23} EstablishPipe AvailDomainId# 0 PipeClientId# [2:75:2076] ControllerId# 72057594037932033 2025-09-25T16:18:06.722196Z node 2 :BS_NODE DEBUG: {NW20@node_warden_pipe.cpp:73} SendRegisterNode 2025-09-25T16:18:06.722209Z node 2 :BS_NODE DEBUG: {NW11@node_warden_impl.cpp:313} StartInvalidGroupProxy GroupId# 4294967295 2025-09-25T16:18:06.722233Z node 2 :BS_NODE DEBUG: {NW62@node_warden_impl.cpp:325} StartRequestReportingThrottler 2025-09-25T16:18:06.723187Z node 2 :LOCAL DEBUG: local.cpp:1540: TLocal::Bootstrap 2025-09-25T16:18:06.724350Z node 2 :BS_PROXY INFO: dsproxy_state.cpp:159: Group# 0 TEvConfigureProxy received GroupGeneration# 1 IsLimitedKeyless# false Marker# DSP02 2025-09-25T16:18:06.724365Z node 2 :BS_PROXY NOTICE: dsproxy_state.cpp:319: EnsureMonitoring Group# 0 IsLimitedKeyless# 0 fullIfPossible# 0 Marker# DSP58 2025-09-25T16:18:06.724613Z node 2 :BS_PROXY DEBUG: group_sessions.cpp:83: Group# 0 Actor# [2:74:2075] Create Queue# [2:82:2080] targetNodeId# 1 Marker# DSP01 2025-09-25T16:18:06.724634Z node 2 :BS_PROXY DEBUG: group_sessions.cpp:83: Group# 0 Actor# [2:74:2075] Create Queue# [2:83:2081] targetNodeId# 1 Marker# DSP01 2025-09-25T16:18:06.724652Z node 2 :BS_PROXY DEBUG: group_sessions.cpp:83: Group# 0 Actor# [2:74:2075] Create Queue# [2:84:2082] targetNodeId# 1 Marker# DSP01 2025-09-25T16:18:06.724670Z node 2 :BS_PROXY DEBUG: group_sessions.cpp:83: Group# 0 Actor# [2:74:2075] Create Queue# [2:85:2083] targetNodeId# 1 Marker# DSP01 2025-09-25T16:18:06.724687Z node 2 :BS_PROXY DEBUG: group_sessions.cpp:83: Group# 0 Actor# [2:74:2075] Create Queue# [2:86:2084] targetNodeId# 1 Marker# DSP01 2025-09-25T16:18:06.724705Z node 2 :BS_PROXY DEBUG: group_sessions.cpp:83: Group# 0 Actor# [2:74:2075] Create Queue# [2:87:2085] targetNodeId# 1 Marker# DSP01 2025-09-25T16:18:06.724723Z node 2 :BS_PROXY DEBUG: group_sessions.cpp:83: Group# 0 Actor# [2:74:2075] Create Queue# [2:88:2086] targetNodeId# 1 Marker# DSP01 2025-09-25T16:18:06.724726Z node 2 :BS_PROXY INFO: dsproxy_state.cpp:31: Group# 0 SetStateEstablishingSessions Marker# DSP03 2025-09-25T16:18:06.724737Z node 2 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:49: TClient[72057594037932033] ::Bootstrap [2:75:2076] 2025-09-25T16:18:06.724741Z node 2 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:542: TClient[72057594037932033] lookup [2:75:2076] 2025-09-25T16:18:06.724747Z node 2 :BS_PROXY NOTICE: dsproxy_state.cpp:259: Group# 4294967295 HasInvalidGroupId# 1 Bootstrap -> StateEjected Marker# DSP42 2025-09-25T16:18:06.724752Z node 2 :BS_NODE DEBUG: {NWDC00@distconf.cpp:28} Bootstrap 2025-09-25T16:18:06.724810Z node 2 :BS_NODE DEBUG: {NWDC40@distconf_persistent_storage.cpp:25} TReaderActor bootstrap Paths# [] 2025-09-25T16:18:06.724961Z node 1 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:148: TClient[72057594037932033] queue send [1:50:2076] 2025-09-25T16:18:06.724978Z node 1 :BS_NODE DEBUG: {NWDC53@distconf.cpp:332} StateWaitForInit event Type# 131082 StorageConfigLoaded# false NodeListObtained# false PendingEvents.size# 0 2025-09-25T16:18:06.725033Z node 1 :LOCAL DEBUG: local.cpp:1540: TLocal::Bootstrap 2025-09-25T16:18:06.725060Z node 2 :TABLET_RESOLVER DEBUG: tablet_resolver.cpp:882: Handle TEvForward tabletId: 72057594037932033 entry.State: StResolve leader: [0:0:0] followers: 0 ev: {EvForward TabletID: 72057594037932033 Ev: nullptr Flags: 1:2:0} 2025-09-25T16:18:06.725170Z node 2 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:148: TClient[72057594037932033] queue send [2:75:2076] 2025-09-25T16:18:06.725181Z node 2 :BS_NODE DEBUG: {NWDC53@distconf.cpp:332} StateWaitForInit event Type# 131082 StorageConfigLoaded# false NodeListObtained# false PendingEvents.size# 0 2025-09-25T16:18:06.725187Z node 2 :LOCAL DEBUG: local.cpp:1490: TDomainLocal(dc-1): Bootstrap 2025-09-25T16:18:06.725208Z node 1 :TABLET_RESOLVER DEBUG: tablet_resolver.cpp:882: Handle TEvForward tabletId: 72057594037932033 entry.State: StResolve leader: [0:0:0] followers: 0 ev: {EvForward TabletID: 72057594037932033 Ev: nullptr Flags: 1:2:0} 2025-09-25T16:18:06.727858Z node 1 :BS_NODE DEBUG: {NWDC53@distconf.cpp:332} StateWaitForInit event Type# 2146435074 StorageConfigLoaded# false NodeListObtained# false PendingEvents.size# 0 2025-09-25T16:18:06.727873Z node 1 :BS_NODE DEBUG: {NWDC32@distconf_persistent_storage.cpp:221} TEvStorageConfigLoaded Cookie# 0 NumItemsRead# 0 2025-09-25T16:18:06.728651Z node 1 :BS_NODE DEBUG: {NWDC35@distconf_persistent_storage.cpp:184} PersistConfig Record# {} Drives# [] 2025-09-25T16:18:06.728701Z node 1 :BS_NODE DEBUG: {NWDC18@distconf_binding.cpp:462} UpdateBound RefererNodeId# 0 NodeId# :0/0 Meta# {Fingerprint: "\206\nD\014\\\363\333K3\275\271\004\016{\341F\344\223\331\221" } 2025-09-25T16:18:06.729065Z node 1 :BS_NODE DEBUG: {NWDC51@distconf_persistent_storage.cpp:103} TWriterActor bootstrap Drives# [] Record# {} 2025-09-25T16:18:06.729087Z node 1 :LOCAL DEBUG: local.cpp:1490: TDomainLocal(dc-1): Bootstrap 2025-09-25T16:18:06.729132Z node 2 :BS_NODE DEBUG: {NWDC53@distconf.cpp:332} StateWaitForInit event Type# 2146435074 StorageConfigLoaded# false NodeListObtained# false PendingEvents.size# 0 2025-09-25T16:18:06.729141Z node 2 :BS_NODE DEBUG: {NWDC32@distconf_persistent_storage.cpp:221} TEvStorageConfigLoaded Cookie# 0 NumItemsRead# 0 2025-09-25T16:18:06.729158Z node 2 :BS_NODE DEBUG: {NWDC35@distconf_persistent_storage.cpp:184} PersistConfig Record# {} Drives# [] 2025-09-25T16:18:06.729188Z node 2 :BS_NODE DEBUG: {NWDC18@distconf_binding.cpp:462} UpdateBound RefererNodeId# 0 NodeId# :0/0 Meta# {Fingerprint: "\206\nD\014\\\363\333K3\275\271\004\016{\341F\344\223\331\221" } 2025-09-25T16:18:06.729803Z node 2 :LOCAL DEBUG: local.cpp:1198: TDomainLocal(dc-1): Binding to hive 72057594037927937 at domain dc-1 (allocated resources: ) 2025-09-25T16:18:06.729822Z node 2 :BS_NODE DEBUG: {NWDC51@distconf_persistent_storage.cpp:103} TWriterActor bootstrap Drives# [] Record# {} 2025-09-25T16:18:06.729832Z node 2 :LOCAL DEBUG: local.cpp:1005: TLocalNodeRegistrar::Bootstrap 2025-09-25T16:18:06.729837Z node 2 :LOCAL DEBUG: local.cpp:183: TLocalNodeRegistrar::TryToRegister 2025-09-25T16:18:06.729881Z node 2 :LOCAL DEBUG: local.cpp:216: TLocalNodeRegistrar::TryToRegister pipe to hive, pipe:[2:100:2090] 2025-09-25T16:18:06.729953Z node 2 :STATESTORAGE DEBUG: statestorage_proxy.cpp:287: ProxyRequest::HandleInit ringGroup:0 ev: {EvLookup ... Processed ActualWindowSize# 0 MaxWindowSize# 150000000 ExpectedMsgId# { SequenceId: 1 MsgId: 2 }}}} from# [80000001:1:0:0:0] Marker# BPP01 2025-09-25T16:18:23.494936Z node 27 :BS_PROXY_PUT DEBUG: dsproxy_put_impl.cpp:72: [4d73cb573d4ef239] Result# TEvPutResult {Id# [72075186224037888:2:1:1:28672:89:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0.998955} GroupId# 2147483649 Marker# BPP12 2025-09-25T16:18:23.494943Z node 27 :BS_PROXY_PUT INFO: dsproxy_put.cpp:490: [4d73cb573d4ef239] SendReply putResult# TEvPutResult {Id# [72075186224037888:2:1:1:28672:89:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0.998955} ResponsesSent# 0 PutImpl.Blobs.size# 1 Last# true Marker# BPP21 2025-09-25T16:18:23.494961Z node 27 :BS_PROXY_PUT DEBUG: {BPP72@dsproxy_put.cpp:474} Query history GroupId# 2147483649 HandleClass# TabletLog Tactic# MinLatency History# THistory { Entries# [ TEvVPut{ TimestampMs# 0.109 sample PartId# [72075186224037888:2:1:1:28672:89:1] QueryCount# 1 VDiskId# [80000001:1:0:0:0] NodeId# 26 } TEvVPutResult{ TimestampMs# 1.278 VDiskId# [80000001:1:0:0:0] NodeId# 26 Status# OK } ] } 2025-09-25T16:18:23.494982Z node 27 :TABLET_MAIN DEBUG: tablet_req_writelog.cpp:54: Put Result: TEvPutResult {Id# [72075186224037888:2:1:1:28672:89:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0.998955} 2025-09-25T16:18:23.495004Z node 27 :TABLET_MAIN DEBUG: tablet_sys.cpp:1466: Tablet: 72075186224037888 GcCollect 0 channel, tablet:gen:step => 2:0 Marker# TSYS28 2025-09-25T16:18:23.495018Z node 27 :TABLET_EXECUTOR DEBUG: Leader{72075186224037888:2:2} commited cookie 2 for step 1 2025-09-25T16:18:23.495047Z node 27 :BS_PROXY_COLLECT INFO: dsproxy_collect.cpp:179: [758c562ff377dcdd] bootstrap ActorId# [27:549:2210] Group# 2147483648 TabletId# 72075186224037888 Channel# 0 RecordGeneration# 2 PerGenerationCounter# 1 Deadline# 586524-01-19T08:01:49.551615Z CollectGeneration# 2 CollectStep# 0 Collect# true Hard# false IgnoreBlock# false RestartCounter# 0 Marker# DSPC03 2025-09-25T16:18:23.495061Z node 27 :BS_PROXY DEBUG: group_sessions.h:181: Send to queueActorId# [27:492:2163] NKikimr::TEvBlobStorage::TEvVCollectGarbage# {TEvVCollectGarbage for [tablet:gen:cnt:channel]=[72075186224037888:2:1:0] collect=[2:0] cookie# 0 2025-09-25T16:18:23.495092Z node 27 :BS_PROXY_COLLECT INFO: dsproxy_collect.cpp:179: [5be63a1c46f51038] bootstrap ActorId# [27:550:2211] Group# 2147483649 TabletId# 72075186224037888 Channel# 1 RecordGeneration# 2 PerGenerationCounter# 1 Deadline# 586524-01-19T08:01:49.551615Z CollectGeneration# 2 CollectStep# 0 Collect# true Hard# false IgnoreBlock# false RestartCounter# 0 Marker# DSPC03 2025-09-25T16:18:23.495098Z node 27 :BS_PROXY_COLLECT INFO: dsproxy_collect.cpp:182: [5be63a1c46f51038] Keep# [72075186224037888:1:2:1:8192:289:0] Marker# DSPC04 2025-09-25T16:18:23.495108Z node 27 :BS_PROXY DEBUG: group_sessions.h:181: Send to queueActorId# [27:511:2179] NKikimr::TEvBlobStorage::TEvVCollectGarbage# {TEvVCollectGarbage for [tablet:gen:cnt:channel]=[72075186224037888:2:1:1] collect=[2:0] Keep: [72075186224037888:1:2:1:8192:289:0] cookie# 0 2025-09-25T16:18:23.495514Z node 27 :BS_PROXY_COLLECT DEBUG: dsproxy_collect.cpp:45: [758c562ff377dcdd] received TEvVCollectGarbageResult# {EvVCollectGarbageResult Status# OK TabletId# 72075186224037888 RecordGeneration# 2 Channel# 0 VDisk# [80000000:1:0:0:0]} Marker# DSPC01 2025-09-25T16:18:23.495528Z node 27 :BS_PROXY_COLLECT INFO: dsproxy_collect.cpp:113: [758c562ff377dcdd] Result# TEvCollectGarbageResult {TabletId# 72075186224037888 RecordGeneration# 2 PerGenerationCounter# 1 Channel# 0 Status# OK} Marker# DSPC02 2025-09-25T16:18:23.495910Z node 27 :BS_PROXY_COLLECT DEBUG: dsproxy_collect.cpp:45: [5be63a1c46f51038] received TEvVCollectGarbageResult# {EvVCollectGarbageResult Status# OK TabletId# 72075186224037888 RecordGeneration# 2 Channel# 1 VDisk# [80000001:1:0:0:0]} Marker# DSPC01 2025-09-25T16:18:23.495920Z node 27 :BS_PROXY_COLLECT INFO: dsproxy_collect.cpp:113: [5be63a1c46f51038] Result# TEvCollectGarbageResult {TabletId# 72075186224037888 RecordGeneration# 2 PerGenerationCounter# 1 Channel# 1 Status# OK} Marker# DSPC02 2025-09-25T16:18:23.495981Z node 26 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:49: TClient[72057594037927937] ::Bootstrap [26:551:2319] 2025-09-25T16:18:23.495985Z node 26 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:542: TClient[72057594037927937] lookup [26:551:2319] 2025-09-25T16:18:23.495993Z node 26 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:148: TClient[72057594037927937] queue send [26:551:2319] 2025-09-25T16:18:23.496003Z node 26 :TABLET_RESOLVER DEBUG: tablet_resolver.cpp:882: Handle TEvForward tabletId: 72057594037927937 entry.State: StNormal leader: [26:333:2202] followers: 0 ev: {EvForward TabletID: 72057594037927937 Ev: nullptr Flags: 1:2:0} 2025-09-25T16:18:23.496009Z node 26 :TABLET_RESOLVER DEBUG: tablet_resolver.cpp:667: SelectForward node 26 selfDC 1 leaderDC 1 1:2:0 local 1 localDc 1 other 0 disallowed 0 tabletId: 72057594037927937 followers: 0 countLeader 1 allowFollowers 0 winner: [26:333:2202] 2025-09-25T16:18:23.496017Z node 26 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:420: TClient[72057594037927937] received pending shutdown [26:551:2319] 2025-09-25T16:18:23.496024Z node 26 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:188: TClient[72057594037927937] forward result local node, try to connect [26:551:2319] 2025-09-25T16:18:23.496029Z node 26 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:687: TClient[72057594037927937]::SendEvent [26:551:2319] 2025-09-25T16:18:23.496041Z node 26 :PIPE_SERVER DEBUG: tablet_pipe_server.cpp:291: [72057594037927937] Accept Connect Originator# [26:551:2319] 2025-09-25T16:18:23.496056Z node 26 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:308: TClient[72057594037927937] connected with status OK role: Leader [26:551:2319] 2025-09-25T16:18:23.496062Z node 26 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:323: TClient[72057594037927937] send queued [26:551:2319] 2025-09-25T16:18:23.496065Z node 26 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:645: TClient[72057594037927937] push event to server [26:551:2319] 2025-09-25T16:18:23.496069Z node 26 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:331: TClient[72057594037927937] shutdown pipe due to pending shutdown request [26:551:2319] 2025-09-25T16:18:23.496072Z node 26 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:514: TClient[72057594037927937] notify reset [26:551:2319] 2025-09-25T16:18:23.496080Z node 26 :PIPE_SERVER DEBUG: tablet_pipe_server.cpp:141: [72057594037927937] HandleSend Sender# [26:465:2299] EventType# 268959750 2025-09-25T16:18:23.496101Z node 26 :HIVE TRACE: hive_impl.cpp:139: HIVE#72057594037927937 Handle TEvTabletPipe::TEvServerConnected([26:551:2319]) [26:552:2320] 2025-09-25T16:18:23.496112Z node 26 :HIVE DEBUG: hive_impl.cpp:767: HIVE#72057594037927937 THive::Handle::TEvSyncTablets 2025-09-25T16:18:23.496126Z node 26 :TABLET_EXECUTOR DEBUG: Leader{72057594037927937:2:12} Tx{32, NKikimr::NHive::TTxSyncTablets} queued, type NKikimr::NHive::TTxSyncTablets 2025-09-25T16:18:23.496133Z node 26 :TABLET_EXECUTOR DEBUG: Leader{72057594037927937:2:12} Tx{32, NKikimr::NHive::TTxSyncTablets} took 4194304b of static mem, Memory{4194304 dyn 0} 2025-09-25T16:18:23.496140Z node 26 :HIVE DEBUG: tx__sync_tablets.cpp:41: HIVE#72057594037927937 THive::TTxSyncTablets([26:465:2299])::Execute 2025-09-25T16:18:23.496148Z node 26 :HIVE DEBUG: hive_impl.cpp:367: HIVE#72057594037927937 ProcessBootQueue (0) 2025-09-25T16:18:23.496152Z node 26 :HIVE TRACE: hive_impl.cpp:369: HIVE#72057594037927937 ProcessBootQueue - sending 2025-09-25T16:18:23.496162Z node 26 :TABLET_EXECUTOR DEBUG: Leader{72057594037927937:2:12} Tx{32, NKikimr::NHive::TTxSyncTablets} hope 1 -> done Change{19, redo 0b alter 0b annex 0, ~{ } -{ }, 0 gb} 2025-09-25T16:18:23.496169Z node 26 :TABLET_EXECUTOR DEBUG: Leader{72057594037927937:2:12} Tx{32, NKikimr::NHive::TTxSyncTablets} release 4194304b of static, Memory{0 dyn 0} 2025-09-25T16:18:23.496188Z node 26 :HIVE TRACE: hive_impl.cpp:353: HIVE#72057594037927937 ProcessBootQueue - executing 2025-09-25T16:18:23.496196Z node 26 :TABLET_EXECUTOR DEBUG: Leader{72057594037927937:2:12} Tx{33, NKikimr::NHive::TTxProcessBootQueue} queued, type NKikimr::NHive::TTxProcessBootQueue 2025-09-25T16:18:23.496201Z node 26 :TABLET_EXECUTOR DEBUG: Leader{72057594037927937:2:12} Tx{33, NKikimr::NHive::TTxProcessBootQueue} took 4194304b of static mem, Memory{4194304 dyn 0} 2025-09-25T16:18:23.496206Z node 26 :HIVE DEBUG: tx__process_boot_queue.cpp:18: HIVE#72057594037927937 THive::TTxProcessBootQueue()::Execute 2025-09-25T16:18:23.496211Z node 26 :HIVE DEBUG: hive_impl.cpp:247: HIVE#72057594037927937 Handle ProcessBootQueue (size: 0) 2025-09-25T16:18:23.496218Z node 26 :HIVE DEBUG: hive_impl.cpp:327: HIVE#72057594037927937 ProcessBootQueue - BootQueue empty (WaitQueue: 0) 2025-09-25T16:18:23.496223Z node 26 :TABLET_EXECUTOR DEBUG: Leader{72057594037927937:2:12} Tx{33, NKikimr::NHive::TTxProcessBootQueue} hope 1 -> done Change{19, redo 0b alter 0b annex 0, ~{ } -{ }, 0 gb} 2025-09-25T16:18:23.496226Z node 26 :TABLET_EXECUTOR DEBUG: Leader{72057594037927937:2:12} Tx{33, NKikimr::NHive::TTxProcessBootQueue} release 4194304b of static, Memory{0 dyn 0} 2025-09-25T16:18:23.496273Z node 26 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:49: TClient[72057594037927937] ::Bootstrap [26:554:2322] 2025-09-25T16:18:23.496278Z node 26 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:542: TClient[72057594037927937] lookup [26:554:2322] 2025-09-25T16:18:23.496287Z node 26 :TABLET_RESOLVER DEBUG: tablet_resolver.cpp:882: Handle TEvForward tabletId: 72057594037927937 entry.State: StNormal leader: [26:333:2202] followers: 0 ev: {EvForward TabletID: 72057594037927937 Ev: nullptr Flags: 1:2:0} 2025-09-25T16:18:23.496296Z node 26 :TABLET_RESOLVER DEBUG: tablet_resolver.cpp:667: SelectForward node 26 selfDC 1 leaderDC 1 1:2:0 local 1 localDc 1 other 0 disallowed 0 tabletId: 72057594037927937 followers: 0 countLeader 1 allowFollowers 0 winner: [26:333:2202] 2025-09-25T16:18:23.496304Z node 26 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:148: TClient[72057594037927937] queue send [26:554:2322] 2025-09-25T16:18:23.496312Z node 26 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:420: TClient[72057594037927937] received pending shutdown [26:554:2322] 2025-09-25T16:18:23.496320Z node 26 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:188: TClient[72057594037927937] forward result local node, try to connect [26:554:2322] 2025-09-25T16:18:23.496326Z node 26 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:687: TClient[72057594037927937]::SendEvent [26:554:2322] 2025-09-25T16:18:23.496336Z node 26 :PIPE_SERVER DEBUG: tablet_pipe_server.cpp:291: [72057594037927937] Accept Connect Originator# [26:554:2322] 2025-09-25T16:18:23.496353Z node 26 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:308: TClient[72057594037927937] connected with status OK role: Leader [26:554:2322] 2025-09-25T16:18:23.496357Z node 26 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:323: TClient[72057594037927937] send queued [26:554:2322] 2025-09-25T16:18:23.496360Z node 26 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:645: TClient[72057594037927937] push event to server [26:554:2322] 2025-09-25T16:18:23.496363Z node 26 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:331: TClient[72057594037927937] shutdown pipe due to pending shutdown request [26:554:2322] 2025-09-25T16:18:23.496366Z node 26 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:514: TClient[72057594037927937] notify reset [26:554:2322] 2025-09-25T16:18:23.496371Z node 26 :PIPE_SERVER DEBUG: tablet_pipe_server.cpp:141: [72057594037927937] HandleSend Sender# [26:553:2321] EventType# 268697616 2025-09-25T16:18:23.496375Z node 26 :HIVE TRACE: hive_impl.cpp:139: HIVE#72057594037927937 Handle TEvTabletPipe::TEvServerConnected([26:554:2322]) [26:555:2323] 2025-09-25T16:18:23.496387Z node 26 :HIVE TRACE: hive_impl.cpp:1990: HIVE#72057594037927937 Handle TEvRequestHiveInfo >> YdbYqlClient::TestDecimal [GOOD] >> YdbYqlClient::TestBusySession >> YdbYqlClient::BuildInfo [GOOD] >> YdbYqlClient::AlterTableAddIndex >> TGRpcYdbTest::CreateYqlSession [GOOD] >> TGRpcYdbTest::CreateYqlSessionExecuteQuery >> KqpScan::StreamLookup [GOOD] |81.3%| [TM] {default-linux-x86_64, pic, relwithdebinfo} ydb/library/yql/tests/sql/hybrid_file/part8/pytest >> test.py::test[produce-reduce_with_trivial_remaps2--Results] [SKIPPED] >> YdbTableBulkUpsert::Simple >> AsyncIndexChangeCollector::CoverIndexedColumn [GOOD] >> AsyncIndexChangeCollector::DeleteNothing [GOOD] >> AsyncIndexChangeCollector::DeleteSingleRow >> THiveTest::TestHiveBalancerWithFollowers [GOOD] >> THiveTest::TestHiveBalancerWithLimit >> YdbYqlClient::TestTransactionQueryError [GOOD] >> YdbYqlClient::TestReadWrongTable >> YdbYqlClient::TestDescribeTableWithShardStats [GOOD] >> YdbYqlClient::TestExplicitPartitioning >> TNodeBrokerTest::NodesV2BackMigrationShiftIdRange >> TGRpcYdbTest::OperationCancelAfter [GOOD] >> TGRpcYdbTest::KeepAlive >> YdbYqlClient::ColumnFamiliesExternalBlobsWithoutDefaultProfile [GOOD] >> YdbYqlClient::CheckDefaultTableSettings2 >> THiveTest::TestFollowersCrossDC_Tight [GOOD] >> THiveTest::TestFollowersCrossDC_MovingLeader ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/datashard/ut_change_collector/unittest >> AsyncIndexChangeCollector::CoverIndexedColumn [GOOD] Test command err: 2025-09-25T16:18:20.424392Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:123: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2025-09-25T16:18:20.485023Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2025-09-25T16:18:20.487639Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:311:2354], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-09-25T16:18:20.487726Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-09-25T16:18:20.487752Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/endf/0036d7/r3tmp/tmpbPGBuD/pdisk_1.dat 2025-09-25T16:18:20.554078Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-09-25T16:18:20.554110Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-09-25T16:18:20.563469Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded 2025-09-25T16:18:20.564299Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1229: Notification cookie mismatch for subscription [1:34:2081] 1758817099977954 != 1758817099977958 2025-09-25T16:18:20.595443Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-09-25T16:18:20.646316Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2025-09-25T16:18:20.679843Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-09-25T16:18:20.764192Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:18:20.779812Z node 1 :TX_DATASHARD INFO: datashard.cpp:375: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:684:2572] 2025-09-25T16:18:20.779873Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:661: TxInitSchema.Execute 2025-09-25T16:18:20.788773Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:727: TxInitSchema.Complete 2025-09-25T16:18:20.788840Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:50: TDataShard::TTxInit::Execute 2025-09-25T16:18:20.789013Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1325: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2025-09-25T16:18:20.789024Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1381: LoadLockChangeRecords at tablet: 72075186224037888 2025-09-25T16:18:20.789033Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1430: LoadChangeRecordCommits at tablet: 72075186224037888 2025-09-25T16:18:20.789091Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:94: TDataShard::TTxInit::Complete 2025-09-25T16:18:20.789171Z node 1 :TX_DATASHARD INFO: datashard.cpp:375: TDataShard::OnActivateExecutor: tablet 72075186224037889 actor [1:688:2575] 2025-09-25T16:18:20.789211Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:661: TxInitSchema.Execute 2025-09-25T16:18:20.790631Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:102: TDataShard::TTxInitRestored::Execute 2025-09-25T16:18:20.790652Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:142: DataShard 72075186224037888 persisting started state actor id [1:711:2572] in generation 1 2025-09-25T16:18:20.790819Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:727: TxInitSchema.Complete 2025-09-25T16:18:20.790838Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:50: TDataShard::TTxInit::Execute 2025-09-25T16:18:20.790969Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1325: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037889 2025-09-25T16:18:20.790977Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1381: LoadLockChangeRecords at tablet: 72075186224037889 2025-09-25T16:18:20.790983Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1430: LoadChangeRecordCommits at tablet: 72075186224037889 2025-09-25T16:18:20.791018Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:94: TDataShard::TTxInit::Complete 2025-09-25T16:18:20.791033Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:102: TDataShard::TTxInitRestored::Execute 2025-09-25T16:18:20.791040Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:142: DataShard 72075186224037889 persisting started state actor id [1:716:2575] in generation 1 2025-09-25T16:18:20.801377Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:151: TDataShard::TTxInitRestored::Complete 2025-09-25T16:18:20.806942Z node 1 :TX_DATASHARD INFO: datashard.cpp:419: Switched to work state WaitScheme tabletId 72075186224037888 2025-09-25T16:18:20.807029Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:459: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2025-09-25T16:18:20.807058Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1260: Change sender created: at tablet: 72075186224037888, actorId: [1:719:2593] 2025-09-25T16:18:20.807064Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1265: Trying to activate change sender: at tablet: 72075186224037888 2025-09-25T16:18:20.807068Z node 1 :TX_DATASHARD INFO: datashard.cpp:1282: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2025-09-25T16:18:20.807074Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-09-25T16:18:20.807213Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:151: TDataShard::TTxInitRestored::Complete 2025-09-25T16:18:20.807223Z node 1 :TX_DATASHARD INFO: datashard.cpp:419: Switched to work state WaitScheme tabletId 72075186224037889 2025-09-25T16:18:20.807237Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:459: 72075186224037889 not sending time cast registration request in state WaitScheme: missing processing params 2025-09-25T16:18:20.807248Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1260: Change sender created: at tablet: 72075186224037889, actorId: [1:720:2594] 2025-09-25T16:18:20.807252Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1265: Trying to activate change sender: at tablet: 72075186224037889 2025-09-25T16:18:20.807256Z node 1 :TX_DATASHARD INFO: datashard.cpp:1282: Cannot activate change sender: at tablet: 72075186224037889, state: WaitScheme 2025-09-25T16:18:20.807260Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037889 2025-09-25T16:18:20.807367Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:96: TTxCheckInReadSets::Execute at 72075186224037888 2025-09-25T16:18:20.807393Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:139: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2025-09-25T16:18:20.807435Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037888 2025-09-25T16:18:20.807442Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2025-09-25T16:18:20.807451Z node 1 :TX_DATASHARD INFO: datashard__progress_tx.cpp:48: No tx to execute at 72075186224037888 TxInFly 0 2025-09-25T16:18:20.807457Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2025-09-25T16:18:20.807464Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:96: TTxCheckInReadSets::Execute at 72075186224037889 2025-09-25T16:18:20.807476Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:139: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037889 2025-09-25T16:18:20.807597Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3723: Server connected at leader tablet# 72075186224037888, clientId# [1:676:2568], serverId# [1:685:2573], sessionId# [0:0:0] 2025-09-25T16:18:20.807607Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037889 2025-09-25T16:18:20.807611Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037889 active 0 active planned 0 immediate 0 planned 0 2025-09-25T16:18:20.807617Z node 1 :TX_DATASHARD INFO: datashard__progress_tx.cpp:48: No tx to execute at 72075186224037889 TxInFly 0 2025-09-25T16:18:20.807623Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037889 2025-09-25T16:18:20.807651Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 72075186224037888 2025-09-25T16:18:20.807709Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:133: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2025-09-25T16:18:20.807732Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:221: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037888 2025-09-25T16:18:20.807820Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3723: Server connected at leader tablet# 72075186224037889, clientId# [1:677:2569], serverId# [1:689:2576], sessionId# [0:0:0] 2025-09-25T16:18:20.807862Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 72075186224037889 2025-09-25T16:18:20.807902Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:133: Propose scheme transaction at tablet 72075186224037889 txId 281474976715657 ssId 72057594046644480 seqNo 2:2 2025-09-25T16:18:20.807915Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:221: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037889 2025-09-25T16:18:20.808271Z node 1 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:129: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-09-25T16:18:20.808289Z node 1 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:129: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037889 2025-09-25T16:18:20.818653Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:152: TTxProposeTransactionBase::Complete at 72075186224037888 2025-09-25T16:18:20.818691Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:469: 72075186224037888 not sending time cast registration request in state WaitScheme 2025-09-25T16:18:20.818868Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:152: TTxProposeTransactionBase::Complete at 72075186224037889 2025-09-25T16:18:20.818877Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:469: 72075186224037889 not sending time cast registration request in state WaitScheme 2025-09-25T16:18:20.952784Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3723: Server connected at leader tablet# 72075186224037889, cl ... 186224037888 step# 1000} 2025-09-25T16:18:25.236172Z node 4 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2025-09-25T16:18:25.236386Z node 4 :TX_DATASHARD DEBUG: datashard.cpp:3755: Got TEvMediatorTimecast::TEvRegisterTabletResult at 72075186224037888 time 0 2025-09-25T16:18:25.236394Z node 4 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-09-25T16:18:25.236549Z node 4 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2025-09-25T16:18:25.236558Z node 4 :TX_DATASHARD DEBUG: datashard.cpp:1265: Trying to activate change sender: at tablet: 72075186224037888 2025-09-25T16:18:25.236562Z node 4 :TX_DATASHARD INFO: datashard.cpp:1303: Change sender activated: at tablet: 72075186224037888 2025-09-25T16:18:25.236574Z node 4 :TX_DATASHARD DEBUG: datashard.cpp:811: Complete [1000 : 281474976715657] from 72075186224037888 at tablet 72075186224037888 send result to client [4:399:2397], exec latency: 0 ms, propose latency: 0 ms 2025-09-25T16:18:25.236581Z node 4 :TX_DATASHARD INFO: datashard.cpp:1600: 72075186224037888 Sending notify to schemeshard 72057594046644480 txId 281474976715657 state Ready TxInFly 0 2025-09-25T16:18:25.236591Z node 4 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-09-25T16:18:25.236746Z node 4 :TX_DATASHARD DEBUG: datashard.cpp:3755: Got TEvMediatorTimecast::TEvRegisterTabletResult at 72075186224037890 time 0 2025-09-25T16:18:25.236754Z node 4 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037890 2025-09-25T16:18:25.236895Z node 4 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:129: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-09-25T16:18:25.236914Z node 4 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:129: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037889 2025-09-25T16:18:25.236928Z node 4 :TX_DATASHARD DEBUG: datashard__plan_step.cpp:98: Sending '{TEvPlanStepAccepted TabletId# 72075186224037890 step# 1000} 2025-09-25T16:18:25.236936Z node 4 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037890 2025-09-25T16:18:25.237064Z node 4 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:129: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037890 2025-09-25T16:18:25.237131Z node 4 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037890 2025-09-25T16:18:25.237138Z node 4 :TX_DATASHARD DEBUG: datashard.cpp:1265: Trying to activate change sender: at tablet: 72075186224037890 2025-09-25T16:18:25.237143Z node 4 :TX_DATASHARD INFO: datashard.cpp:1303: Change sender activated: at tablet: 72075186224037890 2025-09-25T16:18:25.237153Z node 4 :TX_DATASHARD DEBUG: datashard.cpp:811: Complete [1000 : 281474976715657] from 72075186224037890 at tablet 72075186224037890 send result to client [4:399:2397], exec latency: 0 ms, propose latency: 0 ms 2025-09-25T16:18:25.237160Z node 4 :TX_DATASHARD INFO: datashard.cpp:1600: 72075186224037890 Sending notify to schemeshard 72057594046644480 txId 281474976715657 state Ready TxInFly 0 2025-09-25T16:18:25.237170Z node 4 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037890 2025-09-25T16:18:25.237898Z node 4 :TX_DATASHARD DEBUG: datashard.cpp:3773: Got TEvMediatorTimecast::TEvSubscribeReadStepResult at 72075186224037889 coordinator 72057594046316545 last step 0 next step 1000 2025-09-25T16:18:25.238005Z node 4 :TX_DATASHARD DEBUG: datashard.cpp:3773: Got TEvMediatorTimecast::TEvSubscribeReadStepResult at 72075186224037888 coordinator 72057594046316545 last step 0 next step 1000 2025-09-25T16:18:25.238017Z node 4 :TX_DATASHARD DEBUG: datashard.cpp:2966: Handle TEvSchemaChangedResult 281474976715657 datashard 72075186224037889 state Ready 2025-09-25T16:18:25.238024Z node 4 :TX_DATASHARD DEBUG: datashard__schema_changed.cpp:22: 72075186224037889 Got TEvSchemaChangedResult from SS at 72075186224037889 2025-09-25T16:18:25.238134Z node 4 :TX_DATASHARD DEBUG: datashard.cpp:2966: Handle TEvSchemaChangedResult 281474976715657 datashard 72075186224037888 state Ready 2025-09-25T16:18:25.238141Z node 4 :TX_DATASHARD DEBUG: datashard__schema_changed.cpp:22: 72075186224037888 Got TEvSchemaChangedResult from SS at 72075186224037888 2025-09-25T16:18:25.238159Z node 4 :TX_DATASHARD DEBUG: datashard.cpp:3773: Got TEvMediatorTimecast::TEvSubscribeReadStepResult at 72075186224037890 coordinator 72057594046316545 last step 0 next step 1000 2025-09-25T16:18:25.238249Z node 4 :TX_DATASHARD DEBUG: datashard.cpp:2966: Handle TEvSchemaChangedResult 281474976715657 datashard 72075186224037890 state Ready 2025-09-25T16:18:25.238256Z node 4 :TX_DATASHARD DEBUG: datashard__schema_changed.cpp:22: 72075186224037890 Got TEvSchemaChangedResult from SS at 72075186224037890 2025-09-25T16:18:25.240024Z node 4 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [4:834:2683], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:25.240044Z node 4 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [4:844:2688], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:25.240055Z node 4 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:25.240210Z node 4 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [4:849:2692], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:25.240231Z node 4 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:25.241097Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-09-25T16:18:25.242141Z node 4 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:129: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-09-25T16:18:25.242163Z node 4 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:129: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037889 2025-09-25T16:18:25.242174Z node 4 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:129: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037890 2025-09-25T16:18:25.284059Z node 4 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-09-25T16:18:25.379396Z node 4 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:129: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-09-25T16:18:25.379442Z node 4 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:129: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037889 2025-09-25T16:18:25.379458Z node 4 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:129: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037890 2025-09-25T16:18:25.380130Z node 4 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [4:848:2691], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2025-09-25T16:18:25.411859Z node 4 :TX_PROXY ERROR: schemereq.cpp:590: Actor# [4:922:2734] txid# 281474976715659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 10], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-09-25T16:18:25.425455Z node 4 :KQP_EXECUTER ERROR: kqp_planner.cpp:123: TxId: 281474976715660. Ctx: { TraceId: 01k60tpdaq53nqhqahvqn9qymx, Database: , SessionId: ydb://session/3?node_id=4&id=NjljZDE1NS01NTkzYjMyMy0yNDRlODFiMy0zZWM1NWMy, PoolId: default, DatabaseId: /Root}. Database not set, use /Root 2025-09-25T16:18:25.425927Z node 4 :TX_DATASHARD DEBUG: datashard.cpp:3723: Server connected at leader tablet# 72075186224037889, clientId# [4:1027:2777], serverId# [4:1028:2778], sessionId# [0:0:0] 2025-09-25T16:18:25.426023Z node 4 :TX_DATASHARD DEBUG: execute_write_unit.cpp:260: Executing write operation for [0:2] at 72075186224037889 2025-09-25T16:18:25.426080Z node 4 :TX_DATASHARD DEBUG: datashard.cpp:884: PersistChangeRecord: record: { Order: 1 Group: 1758817105426057 Step: 1500 TxId: 18446744073709551615 PathId: [OwnerId: 72057594046644480, LocalPathId: 3] Kind: AsyncIndex Source: Unspecified Body: 38b TableId: [OwnerId: 72057594046644480, LocalPathId: 2] SchemaVersion: 1 LockId: 0 LockOffset: 0 }, at tablet: 72075186224037889 2025-09-25T16:18:25.426100Z node 4 :TX_DATASHARD DEBUG: datashard.cpp:884: PersistChangeRecord: record: { Order: 2 Group: 1758817105426057 Step: 1500 TxId: 18446744073709551615 PathId: [OwnerId: 72057594046644480, LocalPathId: 5] Kind: AsyncIndex Source: Unspecified Body: 42b TableId: [OwnerId: 72057594046644480, LocalPathId: 2] SchemaVersion: 1 LockId: 0 LockOffset: 0 }, at tablet: 72075186224037889 2025-09-25T16:18:25.426113Z node 4 :TX_DATASHARD DEBUG: execute_write_unit.cpp:457: Executed write operation for [0:2] at 72075186224037889, row count=1 2025-09-25T16:18:25.436514Z node 4 :TX_DATASHARD DEBUG: datashard.cpp:1180: EnqueueChangeRecords: at tablet: 72075186224037889, records: { Order: 1 PathId: [OwnerId: 72057594046644480, LocalPathId: 3] BodySize: 38 TableId: [OwnerId: 72057594046644480, LocalPathId: 2] SchemaVersion: 1 }, { Order: 2 PathId: [OwnerId: 72057594046644480, LocalPathId: 5] BodySize: 42 TableId: [OwnerId: 72057594046644480, LocalPathId: 2] SchemaVersion: 1 } 2025-09-25T16:18:25.436539Z node 4 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037889 2025-09-25T16:18:25.437711Z node 4 :TX_DATASHARD DEBUG: datashard.cpp:3723: Server connected at leader tablet# 72075186224037889, clientId# [4:1034:2783], serverId# [4:1035:2784], sessionId# [0:0:0] 2025-09-25T16:18:25.438720Z node 4 :TX_DATASHARD DEBUG: datashard.cpp:3723: Server connected at leader tablet# 72075186224037889, clientId# [4:1036:2785], serverId# [4:1037:2786], sessionId# [0:0:0] >> YdbScripting::Params [GOOD] >> YdbTableBulkUpsert::DataValidation >> TNodeBrokerTest::ShiftIdRangeRemoveNew >> THiveTest::TestServerlessComputeResourcesMode [GOOD] >> THiveTest::TestSkipBadNode >> TGRpcNewCoordinationClientAuth::OwnersAndPermissions [GOOD] >> TGRpcYdbTest::AlterTableAddIndexBadRequest ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/scan/unittest >> KqpScan::StreamLookup [GOOD] Test command err: Trying to start YDB, gRPC: 9315, MsgBus: 30568 2025-09-25T16:18:15.319910Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7554061906961966695:2077];send_to=[0:7307199536658146131:7762515]; 2025-09-25T16:18:15.320103Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-09-25T16:18:15.323279Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:123: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown test_client.cpp: SetPath # /home/runner/.ya/build/build_root/endf/003ef7/r3tmp/tmpdkz1Nz/pdisk_1.dat 2025-09-25T16:18:15.353763Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded 2025-09-25T16:18:15.353983Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1229: Notification cookie mismatch for subscription [1:7554061906961966648:2081] 1758817095311418 != 1758817095311421 TServer::EnableGrpc on GrpcPort 9315, node 1 2025-09-25T16:18:15.370574Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-09-25T16:18:15.370587Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-09-25T16:18:15.370589Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-09-25T16:18:15.370635Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:30568 2025-09-25T16:18:15.420278Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-09-25T16:18:15.420317Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-09-25T16:18:15.421282Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-09-25T16:18:15.429214Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions TClient is connected to server localhost:30568 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-09-25T16:18:15.453847Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-09-25T16:18:15.456944Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-09-25T16:18:15.470135Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) waiting... 2025-09-25T16:18:15.501445Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) waiting... 2025-09-25T16:18:15.528196Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) waiting... waiting... 2025-09-25T16:18:15.545400Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:18:15.760987Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7554061906961968289:2391], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:15.761018Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:15.761444Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7554061906961968299:2392], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:15.761461Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:15.807904Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:18:15.820144Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:18:15.828097Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:18:15.844475Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:18:15.858564Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:18:15.871897Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:18:15.885063Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:18:15.898416Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:18:15.923925Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7554061906961969160:2474], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:15.923952Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:15.923994Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7554061906961969165:2477], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:15.924007Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7554061906961969167:2478], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:15.924015Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, stat ... GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-09-25T16:18:24.089380Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-09-25T16:18:24.089415Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-09-25T16:18:24.089730Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2025-09-25T16:18:24.090426Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected waiting... 2025-09-25T16:18:24.091345Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-09-25T16:18:24.093732Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) waiting... 2025-09-25T16:18:24.108968Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) waiting... waiting... 2025-09-25T16:18:24.134768Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:18:24.152243Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) waiting... 2025-09-25T16:18:24.244181Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-09-25T16:18:24.441341Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7554061943658069119:2391], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:24.441425Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:24.441561Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7554061943658069129:2392], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:24.441572Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:24.453967Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:18:24.463739Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:18:24.473495Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:18:24.487197Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:18:24.510671Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:18:24.531216Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:18:24.546575Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:18:24.564335Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:18:24.589872Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7554061943658069992:2474], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:24.589896Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:24.590052Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7554061943658069997:2477], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:24.590059Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7554061943658069998:2478], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:24.590076Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:24.591077Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-09-25T16:18:24.594930Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 281474976715670, at schemeshard: 72057594046644480 2025-09-25T16:18:24.595014Z node 3 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [3:7554061943658070001:2479], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715670 completed, doublechecking } 2025-09-25T16:18:24.684751Z node 3 :TX_PROXY ERROR: schemereq.cpp:590: Actor# [3:7554061943658070053:3552] txid# 281474976715671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 17], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-09-25T16:18:24.962823Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715673:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) waiting... 2025-09-25T16:18:24.985830Z node 3 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-09-25T16:18:25.084052Z node 3 :KQP_RESOURCE_MANAGER WARN: kqp_snapshot_manager.cpp:238: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1758817105122, txId: 281474976715676] shutting down >> YdbYqlClient::CreateTableWithUniformPartitions [GOOD] >> YdbYqlClient::CreateTableWithUniformPartitionsAndAutoPartitioning |81.3%| [TM] {default-linux-x86_64, pic, relwithdebinfo} ydb/library/yql/tests/sql/dq_file/part6/pytest >> test.py::test[join-lookupjoin_semi_2o-off-Results] [SKIPPED] >> THiveTest::TestLockTabletExecution [GOOD] >> THiveTest::TestLockTabletExecutionBadOwner >> YdbOlapStore::LogLast50ByResource [GOOD] >> YdbOlapStore::LogNonExistingRequest >> TGRpcYdbTest::CreateYqlSessionExecuteQuery [GOOD] >> TGRpcYdbTest::DeleteFromAfterCreate >> TNodeBrokerTest::NodesMigrationRemovedChanged >> YdbYqlClient::AlterTableAddIndex [GOOD] >> YdbYqlClient::AlterTableAddIndexAsyncOp >> YdbYqlClient::TestBusySession [GOOD] >> YdbYqlClient::TestConstraintViolation >> AsyncIndexChangeCollector::DeleteSingleRow [GOOD] >> AsyncIndexChangeCollector::IndexedPrimaryKeyDeleteSingleRow >> TYqlDateTimeTests::DatetimeKey [GOOD] >> TYqlDateTimeTests::TimestampKey >> YdbYqlClient::TestReadWrongTable [GOOD] >> YdbYqlClient::CopyTables [GOOD] >> YdbYqlClient::CreateAndAltertTableWithCompactionPolicy >> TNodeBrokerTest::NodesMigrationExpireRemoved >> THiveTest::TestHiveBalancerWithLimit [GOOD] >> THiveTest::TestHiveBalancerIgnoreTablet >> THiveTest::TestAlterFollower [GOOD] >> THiveTest::TestBootProgress >> THiveTest::TestSkipBadNode [GOOD] >> THiveTest::TestStopTenant >> TGRpcYdbTest::AlterTableAddIndexBadRequest [GOOD] >> TGRpcYdbTest::BeginTxRequestError >> YdbTableBulkUpsert::Simple [GOOD] >> YdbTableBulkUpsert::SyncIndexShouldSucceed >> THiveTest::TestLockTabletExecutionBadOwner [GOOD] >> THiveTest::TestLockTabletExecutionRebootTimeout >> TGRpcYdbTest::KeepAlive [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/ydb/ut/unittest >> YdbYqlClient::TestReadWrongTable [GOOD] Test command err: 2025-09-25T16:18:21.666902Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7554061930150245451:2151];send_to=[0:7307199536658146131:7762515]; 2025-09-25T16:18:21.666933Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/endf/0041a5/r3tmp/tmp6jxIVI/pdisk_1.dat 2025-09-25T16:18:21.724249Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-09-25T16:18:21.737921Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 11241, node 1 2025-09-25T16:18:21.753003Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-09-25T16:18:21.753016Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-09-25T16:18:21.753018Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-09-25T16:18:21.753065Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-09-25T16:18:21.768153Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-09-25T16:18:21.768185Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-09-25T16:18:21.769835Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:13221 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-09-25T16:18:21.794359Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-09-25T16:18:21.952608Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-09-25T16:18:22.066279Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7554061934445213638:2325], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:22.066352Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7554061934445213628:2322], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:22.066359Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:22.066388Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7554061934445213644:2327], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:22.066394Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:22.066974Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-09-25T16:18:22.072841Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7554061934445213642:2326], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2025-09-25T16:18:22.138762Z node 1 :TX_PROXY ERROR: schemereq.cpp:590: Actor# [1:7554061934445213719:2678] txid# 281474976715659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-09-25T16:18:23.011580Z node 4 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7554061939156419967:2256];send_to=[0:7307199536658146131:7762515]; 2025-09-25T16:18:23.011665Z node 4 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-09-25T16:18:23.053517Z node 4 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions test_client.cpp: SetPath # /home/runner/.ya/build/build_root/endf/0041a5/r3tmp/tmp320BQD/pdisk_1.dat 2025-09-25T16:18:23.090352Z node 4 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 16167, node 4 2025-09-25T16:18:23.097890Z node 4 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-09-25T16:18:23.097905Z node 4 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-09-25T16:18:23.097907Z node 4 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-09-25T16:18:23.097947Z node 4 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-09-25T16:18:23.113156Z node 4 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-09-25T16:18:23.113192Z node 4 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting TClient is connected to server localhost:26054 WaitRootIsUp 'Root'... TClient::Ls request: Root 2025-09-25T16:18:23.116468Z node 4 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-09-25T16:18:23.152771Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-09-25T16:18:23.309261Z node 4 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-09-25T16:18:23.427717Z node 4 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7554061939156420738:2322], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:23.427735Z node 4 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7554061939156420746:2325], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:23.427742Z node 4 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:23.427910Z node 4 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7554061939156420753:2327], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:23.427926Z node 4 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:23.428522Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__o ... ompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-09-25T16:18:24.388370Z node 7 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-09-25T16:18:24.388399Z node 7 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-09-25T16:18:24.394595Z node 7 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-09-25T16:18:24.524878Z node 7 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-09-25T16:18:24.848363Z node 7 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7554061942795573915:2322], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:24.848388Z node 7 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:24.848524Z node 7 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7554061942795573934:2324], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:24.848536Z node 7 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:24.850101Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:18:24.935064Z node 7 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7554061942795574080:2333], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:24.935088Z node 7 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:24.935091Z node 7 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7554061942795574085:2336], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:24.935147Z node 7 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7554061942795574087:2337], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:24.935151Z node 7 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:24.935874Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710659:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-09-25T16:18:24.948373Z node 7 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [7:7554061942795574088:2338], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710659 completed, doublechecking } 2025-09-25T16:18:25.026637Z node 7 :TX_PROXY ERROR: schemereq.cpp:590: Actor# [7:7554061947090541456:2782] txid# 281474976710660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-09-25T16:18:25.040534Z node 7 :KQP_EXECUTER ERROR: kqp_planner.cpp:123: TxId: 281474976710661. Ctx: { TraceId: 01k60tpd162840tfdaahxe5618, Database: , SessionId: ydb://session/3?node_id=7&id=ODkyY2JhNTItYmU1ZjAwY2EtZjY0NjczY2MtZWY5MGU5M2E=, PoolId: default, DatabaseId: /Root}. Database not set, use /Root 2025-09-25T16:18:25.055051Z node 7 :KQP_EXECUTER ERROR: kqp_planner.cpp:123: TxId: 281474976710662. Ctx: { TraceId: 01k60tpd4n12a2jregqw7kc84g, Database: , SessionId: ydb://session/3?node_id=7&id=NmU4MzBkNjEtN2FmNzliOGUtMjMxYzg5ODYtODAyZDI4NTg=, PoolId: default, DatabaseId: /Root}. Database not set, use /Root 2025-09-25T16:18:25.069684Z node 7 :KQP_SESSION WARN: kqp_session_actor.cpp:2830: SessionId: ydb://session/3?node_id=7&id=ODkyY2JhNTItYmU1ZjAwY2EtZjY0NjczY2MtZWY5MGU5M2E=, ActorId: [7:7554061942795573896:2319], ActorState: ExecuteState, TraceId: 01k60tpd52cpgs49z9q919n9dh, Create QueryResponse for error on request, msg: 2025-09-25T16:18:25.943113Z node 10 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[10:7554061946209271349:2077];send_to=[0:7307199536658146131:7762515]; 2025-09-25T16:18:25.943139Z node 10 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-09-25T16:18:25.946399Z node 10 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions test_client.cpp: SetPath # /home/runner/.ya/build/build_root/endf/0041a5/r3tmp/tmp8em0xL/pdisk_1.dat 2025-09-25T16:18:25.979232Z node 10 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 25730, node 10 2025-09-25T16:18:26.007843Z node 10 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-09-25T16:18:26.007859Z node 10 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-09-25T16:18:26.007861Z node 10 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-09-25T16:18:26.007920Z node 10 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:3883 WaitRootIsUp 'Root'... TClient::Ls request: Root 2025-09-25T16:18:26.046311Z node 10 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-09-25T16:18:26.046346Z node 10 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Disconnected -> Connecting TClient::Ls response: 2025-09-25T16:18:26.047937Z node 10 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Connecting -> Connected Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-09-25T16:18:26.048676Z node 10 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-09-25T16:18:26.101620Z node 10 :GRPC_SERVER INFO: grpc_request_proxy.cpp:592: Got grpc request# ListEndpointsRequest, traceId# 01k60tpe5ndj4bwsxjvd77k1z2, sdkBuildInfo# ydb-cpp-sdk/dev, state# AS_NOT_PERFORMED, database# undef, peer# ipv6:[::1]:49174, grpcInfo# grpc-c++/1.54.3 grpc-c/31.0.0 (linux; chttp2), timeout# 9.999213s 2025-09-25T16:18:26.109210Z node 10 :GRPC_SERVER DEBUG: grpc_request_proxy.cpp:595: Got grpc request# CreateSessionRequest, traceId# 01k60tpe5x7s6hmzb94d6fwnvx, sdkBuildInfo# ydb-cpp-sdk/dev, state# AS_NOT_PERFORMED, database# undef, peer# ipv6:[::1]:49174, grpcInfo# grpc-c++/1.54.3 grpc-c/31.0.0 (linux; chttp2), timeout# undef 2025-09-25T16:18:26.202469Z node 10 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-09-25T16:18:26.394385Z node 10 :GRPC_SERVER DEBUG: grpc_request_proxy.cpp:595: Got grpc request# ReadTableRequest, traceId# 01k60tpeetbjj35yrybbn72azx, sdkBuildInfo# undef, state# AS_NOT_PERFORMED, database# undef, peer# ipv4:127.0.0.1:33344, grpcInfo# grpc-c++/1.54.3 grpc-c/31.0.0 (linux; chttp2), timeout# undef 2025-09-25T16:18:26.394834Z node 10 :TX_PROXY ERROR: read_table_impl.cpp:567: [ReadTable [10:7554061950504239603:2321] TxId# 281474976715658] Navigate request failed for table 'Root/NoTable' 2025-09-25T16:18:26.394870Z node 10 :TX_PROXY ERROR: read_table_impl.cpp:2919: [ReadTable [10:7554061950504239603:2321] TxId# 281474976715658] RESPONSE Status# ResolveError shard: 0 table: Root/NoTable 2025-09-25T16:18:26.395025Z node 10 :READ_TABLE_API NOTICE: rpc_read_table.cpp:531: [10:7554061950504239602:2321] Finish grpc stream, status: 400070
: Error: Failed to resolve table Root/NoTable, code: 200400
: Error: Got ResolveError response from TxProxy
: Error: Failed to resolve table Root/NoTable 2025-09-25T16:18:26.395956Z node 10 :GRPC_SERVER DEBUG: grpc_request_proxy.cpp:595: Got grpc request# DeleteSessionRequest, traceId# 01k60tpeev1rm4fdzk9d1r9gd7, sdkBuildInfo# ydb-cpp-sdk/dev, state# AS_NOT_PERFORMED, database# undef, peer# ipv6:[::1]:49174, grpcInfo# grpc-c++/1.54.3 grpc-c/31.0.0 (linux; chttp2), timeout# 2.008873s >> YdbYqlClient::CheckDefaultTableSettings2 [GOOD] >> YdbYqlClient::CheckDefaultTableSettings3 >> YdbTableBulkUpsert::DataValidation [GOOD] >> YdbTableBulkUpsert::AsyncIndexShouldFail >> THiveTest::TestBootProgress [GOOD] >> THiveTest::TestBridgeCreateTablet >> TNodeBrokerTest::NodesMigrationExtendLeaseThenRemove >> TNodeBrokerTest::FixedNodeId >> YdbYqlClient::CreateTableWithUniformPartitionsAndAutoPartitioning [GOOD] >> YdbYqlClient::CreateTableWithPartitionAtKeys >> TGRpcYdbTest::DeleteFromAfterCreate [GOOD] >> YdbYqlClient::TestConstraintViolation [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/ydb/ut/unittest >> TGRpcYdbTest::KeepAlive [GOOD] Test command err: 2025-09-25T16:18:21.077809Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7554061929843716501:2082];send_to=[0:7307199536658146131:7762515]; 2025-09-25T16:18:21.077835Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/endf/0041ae/r3tmp/tmpmFMSfL/pdisk_1.dat 2025-09-25T16:18:21.132650Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-09-25T16:18:21.146779Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 30022, node 1 2025-09-25T16:18:21.164497Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-09-25T16:18:21.164510Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-09-25T16:18:21.164511Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-09-25T16:18:21.164551Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-09-25T16:18:21.180189Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-09-25T16:18:21.180229Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-09-25T16:18:21.181711Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:26157 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-09-25T16:18:21.198426Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-09-25T16:18:21.228976Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpRmDir, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_rmdir.cpp:66) 2025-09-25T16:18:22.899029Z node 4 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7554061935989985787:2147];send_to=[0:7307199536658146131:7762515]; 2025-09-25T16:18:22.899060Z node 4 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-09-25T16:18:22.976944Z node 4 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions test_client.cpp: SetPath # /home/runner/.ya/build/build_root/endf/0041ae/r3tmp/tmpBF8nKf/pdisk_1.dat TServer::EnableGrpc on GrpcPort 6847, node 4 2025-09-25T16:18:23.097922Z node 4 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded 2025-09-25T16:18:23.117099Z node 4 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-09-25T16:18:23.117114Z node 4 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-09-25T16:18:23.117116Z node 4 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-09-25T16:18:23.117179Z node 4 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:15241 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-09-25T16:18:23.145445Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-09-25T16:18:23.208493Z node 4 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-09-25T16:18:23.208524Z node 4 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-09-25T16:18:23.213791Z node 4 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-09-25T16:18:23.259985Z node 4 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-09-25T16:18:24.086901Z node 7 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[7:7554061945431024366:2147];send_to=[0:7307199536658146131:7762515]; 2025-09-25T16:18:24.086927Z node 7 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-09-25T16:18:24.132581Z node 7 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions test_client.cpp: SetPath # /home/runner/.ya/build/build_root/endf/0041ae/r3tmp/tmp9icguI/pdisk_1.dat TServer::EnableGrpc on GrpcPort 19743, node 7 2025-09-25T16:18:24.170249Z node 7 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded 2025-09-25T16:18:24.174439Z node 7 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-09-25T16:18:24.174454Z node 7 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-09-25T16:18:24.174468Z node 7 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-09-25T16:18:24.174514Z node 7 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:6119 WaitRootIsUp 'Root'... TClient::Ls request: Root 2025-09-25T16:18:24.188602Z node 7 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-09-25T16:18:24.188638Z node 7 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Disconnected -> Connecting TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-09-25T16:18:24.190204Z node 7 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-09-25T16:18:24.235682Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting...
: Error: Operation timeout. 2025-09-25T16:18:24.376703Z node 7 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-09-25T16:18:25.204009Z node 10 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[10:7554061947831060524:2149];send_to=[0:7307199536658146131:7762515]; 2025-09-25T16:18:25.204035Z node 10 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-09-25T16:18:25.213490Z node 10 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions test_client.cpp: SetPath # /home/runner/.ya/build/build_root/endf/0041ae/r3tmp/tmpeBTlSa/pdisk_1.dat 2025-09-25T16:18:25.248491Z node 10 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 14346, node 10 2025-09-25T16:18:25.305878Z node 10 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-09-25T16:18:25.305910Z node 10 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-09-25T16:18:25.309281Z node 10 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-09-25T16:18:25.368322Z node 10 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-09-25T16:18:25.381054Z node 10 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-09-25T16:18:25.381069Z node 10 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-09-25T16:18:25.381071Z node 10 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-09-25T16:18:25.381128Z node 10 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:4376 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-09-25T16:18:25.429226Z node 10 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-09-25T16:18:25.437131Z node 10 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480
: Error: Operation cancelled. 2025-09-25T16:18:26.380306Z node 13 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[13:7554061953676347105:2082];send_to=[0:7307199536658146131:7762515]; 2025-09-25T16:18:26.380326Z node 13 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/endf/0041ae/r3tmp/tmp7SVoiV/pdisk_1.dat 2025-09-25T16:18:26.397431Z node 13 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-09-25T16:18:26.436710Z node 13 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 19590, node 13 2025-09-25T16:18:26.482442Z node 13 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(13, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-09-25T16:18:26.482475Z node 13 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(13, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-09-25T16:18:26.483972Z node 13 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(13, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-09-25T16:18:26.519279Z node 13 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-09-25T16:18:26.519293Z node 13 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-09-25T16:18:26.519295Z node 13 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-09-25T16:18:26.519358Z node 13 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:19186 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-09-25T16:18:26.554130Z node 13 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-09-25T16:18:26.557463Z node 13 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions >> THiveTest::TestStopTenant [GOOD] >> THiveTest::TestTabletAvailability >> TLocalTests::TestAlterTenant >> AsyncIndexChangeCollector::IndexedPrimaryKeyDeleteSingleRow [GOOD] >> AsyncIndexChangeCollector::ImplicitlyUpdateCoveredColumn >> TGRpcYdbTest::BeginTxRequestError [GOOD] >> YdbYqlClient::CreateAndAltertTableWithCompactionPolicy [GOOD] >> YdbYqlClient::AlterTableAddIndexAsyncOp [GOOD] >> YdbYqlClient::AlterTableAddIndexWithDataColumn >> TYqlDateTimeTests::TimestampKey [GOOD] >> TYqlDateTimeTests::IntervalKey >> YdbTableBulkUpsert::SyncIndexShouldSucceed [GOOD] >> YdbTableBulkUpsert::Overload ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/ydb/ut/unittest >> TGRpcYdbTest::DeleteFromAfterCreate [GOOD] Test command err: 2025-09-25T16:18:21.233077Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7554061928675771297:2081];send_to=[0:7307199536658146131:7762515]; 2025-09-25T16:18:21.233232Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-09-25T16:18:21.247800Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:123: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown test_client.cpp: SetPath # /home/runner/.ya/build/build_root/endf/0041aa/r3tmp/tmp6o9vx9/pdisk_1.dat 2025-09-25T16:18:21.285585Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 22599, node 1 2025-09-25T16:18:21.302650Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-09-25T16:18:21.302662Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-09-25T16:18:21.302664Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-09-25T16:18:21.302745Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:8742 WaitRootIsUp 'Root'... TClient::Ls request: Root 2025-09-25T16:18:21.335439Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-09-25T16:18:21.335474Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-09-25T16:18:21.337058Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-09-25T16:18:21.353187Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-09-25T16:18:21.363509Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-09-25T16:18:21.442682Z node 1 :TX_PROXY ERROR: schemereq.cpp:590: Actor# [1:7554061928675772209:2613] txid# 281474976710658, issues: { message: "Path does not exist" issue_code: 200200 severity: 1 } 2025-09-25T16:18:22.372534Z node 4 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7554061935839053891:2078];send_to=[0:7307199536658146131:7762515]; 2025-09-25T16:18:22.372861Z node 4 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-09-25T16:18:22.441122Z node 4 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions test_client.cpp: SetPath # /home/runner/.ya/build/build_root/endf/0041aa/r3tmp/tmpD9AvNP/pdisk_1.dat 2025-09-25T16:18:22.590449Z node 4 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 17076, node 4 2025-09-25T16:18:22.651526Z node 4 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-09-25T16:18:22.698857Z node 4 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-09-25T16:18:22.698890Z node 4 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-09-25T16:18:22.700491Z node 4 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-09-25T16:18:22.825080Z node 4 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-09-25T16:18:22.825098Z node 4 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-09-25T16:18:22.825100Z node 4 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-09-25T16:18:22.825163Z node 4 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:23908 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-09-25T16:18:22.963726Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-09-25T16:18:23.058997Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:18:23.113210Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:18:23.165255Z node 4 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7554061940134022566:2340], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:23.165277Z node 4 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7554061940134022555:2337], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:23.165351Z node 4 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:23.165526Z node 4 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7554061940134022570:2342], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:23.165548Z node 4 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:23.166236Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715660:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-09-25T16:18:23.173658Z node 4 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [4:7554061940134022569:2341], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715660 completed, doublechecking } 2025-09-25T16:18:23.242708Z node 4 :TX_PROXY ERROR: schemereq.cpp:590: Actor# [4:7554061940134022652:3012] txid# 281474976715661, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 11], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-09-25T16:18:23.355592Z node 4 :KQP_EXECUTER ERROR: kqp_planner.cpp:123: TxId: 281474976715662. Ctx: { TraceId: 01k60tpb9s7qgq1pxektfzxvpb, Database: , SessionId: ydb://session/3?node_id=4&id=ZmNiMTQ2YjctNzhjNGJkNzctYjUxM2IyNTMtMjZlZDVmODg=, PoolId: default, DatabaseId: /Root}. Database not set, use /Root 2025-09-25T16:18:23.357687Z node 4 :KQP_EXECUTER ERROR: kqp_planner.cpp:123: TxId: 281474976715663. Ctx: { TraceId: 01k60tpb9s7qgq1pxektfzxvpb, Database: , SessionId: ydb://session/3?node_id=4&id=ZmNiMTQ2YjctNzhjNGJkNzctYjUxM2IyNTMtMjZlZDVmODg=, PoolId: default, DatabaseId: /Root}. Database not set, use /Root 2025-0 ... ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-09-25T16:18:25.731261Z node 10 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-09-25T16:18:26.100816Z node 10 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [10:7554061949929935301:2322], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:26.100816Z node 10 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [10:7554061949929935290:2319], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:26.100843Z node 10 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:26.100894Z node 10 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [10:7554061949929935304:2323], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:26.100899Z node 10 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:26.101727Z node 10 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-09-25T16:18:26.109646Z node 10 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [10:7554061949929935305:2324], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2025-09-25T16:18:26.162718Z node 10 :TX_PROXY ERROR: schemereq.cpp:590: Actor# [10:7554061949929935381:2671] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-09-25T16:18:26.189748Z node 10 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:568: Compilation failed, self: [10:7554061949929935410:2334], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:1:1: Error: At function: KiReadTable!
:1:1: Error: Cannot find table 'db.[Root/NotFound]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2025-09-25T16:18:26.189837Z node 10 :KQP_SESSION WARN: kqp_session_actor.cpp:2395: SessionId: ydb://session/3?node_id=10&id=MzhmNDczNjEtZjI5N2U4NGQtOWU0NGFjZDgtNDE1ODdlOWY=, ActorId: [10:7554061949929935268:2313], ActorState: ExecuteState, TraceId: 01k60tpe8982mv2kpkvmewbv6e, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2025-09-25T16:18:26.843520Z node 13 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[13:7554061951574326708:2150];send_to=[0:7307199536658146131:7762515]; 2025-09-25T16:18:26.843548Z node 13 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-09-25T16:18:26.880328Z node 13 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions test_client.cpp: SetPath # /home/runner/.ya/build/build_root/endf/0041aa/r3tmp/tmpQ1z7au/pdisk_1.dat TServer::EnableGrpc on GrpcPort 17172, node 13 2025-09-25T16:18:26.926004Z node 13 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded 2025-09-25T16:18:26.945315Z node 13 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(13, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-09-25T16:18:26.945347Z node 13 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(13, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-09-25T16:18:26.948639Z node 13 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(13, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-09-25T16:18:26.955406Z node 13 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-09-25T16:18:26.955418Z node 13 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-09-25T16:18:26.955420Z node 13 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-09-25T16:18:26.955494Z node 13 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:15673 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-09-25T16:18:27.014717Z node 13 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-09-25T16:18:27.033091Z node 13 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:18:27.053672Z node 13 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:18:27.092975Z node 13 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-09-25T16:18:27.351112Z node 13 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [13:7554061955869295121:2330], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:27.351146Z node 13 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [13:7554061955869295129:2333], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:27.351154Z node 13 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:27.352008Z node 13 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715660:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-09-25T16:18:27.366024Z node 13 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [13:7554061955869295135:2334], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715660 completed, doublechecking } 2025-09-25T16:18:27.444448Z node 13 :TX_PROXY ERROR: schemereq.cpp:590: Actor# [13:7554061955869295224:2879] txid# 281474976715661, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 7], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-09-25T16:18:27.460903Z node 13 :KQP_EXECUTER ERROR: kqp_planner.cpp:123: TxId: 281474976715662. Ctx: { TraceId: 01k60tpfcp1xz11c5fp2zfhnzg, Database: , SessionId: ydb://session/3?node_id=13&id=M2FkYTVhNDItYzJlN2E3ZmQtZTQxZDQxMzQtN2FjYTFjOGE=, PoolId: default, DatabaseId: /Root}. Database not set, use /Root 2025-09-25T16:18:27.495410Z node 13 :KQP_EXECUTER ERROR: kqp_planner.cpp:123: TxId: 281474976715663. Ctx: { TraceId: 01k60tpfgmf25j4gyqx6v8gapz, Database: , SessionId: ydb://session/3?node_id=13&id=M2FkYTVhNDItYzJlN2E3ZmQtZTQxZDQxMzQtN2FjYTFjOGE=, PoolId: default, DatabaseId: /Root}. Database not set, use /Root >> THiveTest::TestTabletAvailability [GOOD] >> THiveTest::TestSetDomain >> YdbTableBulkUpsert::AsyncIndexShouldFail [GOOD] >> YdbTableBulkUpsert::AsyncIndexShouldSucceed >> YdbYqlClient::CheckDefaultTableSettings3 [GOOD] >> test.py::test[blocks-div_uint64_opt2--ForceBlocks] [GOOD] >> TDataShardMinStepTest::TestDropTablePlanComesNotTooEarlyRW-VolatileTxs [GOOD] >> TLocalTests::TestAlterTenant [GOOD] >> TLocalTests::TestAddTenantWhileResolving ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/ydb/ut/unittest >> YdbYqlClient::TestConstraintViolation [GOOD] Test command err: 2025-09-25T16:18:21.984672Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7554061931255056152:2078];send_to=[0:7307199536658146131:7762515]; 2025-09-25T16:18:21.984696Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/endf/00419c/r3tmp/tmpdNqnjY/pdisk_1.dat 2025-09-25T16:18:22.058078Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-09-25T16:18:22.079070Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded 2025-09-25T16:18:22.086695Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-09-25T16:18:22.086730Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-09-25T16:18:22.093191Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 2323, node 1 2025-09-25T16:18:22.240144Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-09-25T16:18:22.248360Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-09-25T16:18:22.248372Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-09-25T16:18:22.248374Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-09-25T16:18:22.248430Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:5970 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-09-25T16:18:22.283422Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-09-25T16:18:22.768005Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7554061935550024412:2322], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:22.768029Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:22.768159Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7554061935550024422:2323], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:22.768167Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:22.824014Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:18:22.953311Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7554061935550024581:2333], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:22.953337Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:22.953406Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7554061935550024586:2336], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:22.953418Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7554061935550024587:2337], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:22.953436Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:22.954377Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715659:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-09-25T16:18:22.958282Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7554061935550024590:2338], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715659 completed, doublechecking } 2025-09-25T16:18:22.987074Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-09-25T16:18:23.019427Z node 1 :TX_PROXY ERROR: schemereq.cpp:590: Actor# [1:7554061939844991974:2809] txid# 281474976715660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-09-25T16:18:23.052694Z node 1 :KQP_EXECUTER ERROR: kqp_planner.cpp:123: TxId: 281474976715661. Ctx: { TraceId: 01k60tpb3834xbr5hv0c14d2by, Database: , SessionId: ydb://session/3?node_id=1&id=OGNkMTdmMTItMzk3ZTk3OTItMWI0MzA2NjUtOWFlYzczMTA=, PoolId: default, DatabaseId: /Root}. Database not set, use /Root 2025-09-25T16:18:23.091218Z node 1 :KQP_EXECUTER ERROR: kqp_planner.cpp:123: TxId: 281474976715662. Ctx: { TraceId: 01k60tpb6jfyxdaxyndacw5qfe, Database: , SessionId: ydb://session/3?node_id=1&id=OGNkMTdmMTItMzk3ZTk3OTItMWI0MzA2NjUtOWFlYzczMTA=, PoolId: default, DatabaseId: /Root}. Database not set, use /Root 2025-09-25T16:18:24.018306Z node 4 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7554061942607373599:2080];send_to=[0:7307199536658146131:7762515]; 2025-09-25T16:18:24.018570Z node 4 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-09-25T16:18:24.028589Z node 4 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions test_client.cpp: SetPath # /home/runner/.ya/build/build_root/endf/00419c/r3tmp/tmpMfslXK/pdisk_1.dat 2025-09-25T16:18:24.071288Z node 4 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 65263, node 4 2025-09-25T16:18:24.087988Z node 4 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-09-25T16:18:24.088000Z node 4 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-09-25T16:18:24.088001Z node 4 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-09-25T16:18:24.088039Z node 4 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:5082 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-09-25T16:18:24.118292Z node 4 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-09-25T16:18:24.118334Z node 4 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-09-25T16:18:24.119893Z node 4 :HIVE WARN ... 86585:2683] txid# 281474976715659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-09-25T16:18:27.034355Z node 10 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[10:7554061958111576144:2084];send_to=[0:7307199536658146131:7762515]; 2025-09-25T16:18:27.034399Z node 10 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/endf/00419c/r3tmp/tmpFDPBMl/pdisk_1.dat 2025-09-25T16:18:27.041664Z node 10 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-09-25T16:18:27.060481Z node 10 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 18446, node 10 2025-09-25T16:18:27.081789Z node 10 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-09-25T16:18:27.081800Z node 10 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-09-25T16:18:27.081802Z node 10 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-09-25T16:18:27.081848Z node 10 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:8347 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-09-25T16:18:27.136530Z node 10 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-09-25T16:18:27.136564Z node 10 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-09-25T16:18:27.138123Z node 10 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-09-25T16:18:27.138401Z node 10 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-09-25T16:18:27.324884Z node 10 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-09-25T16:18:27.434265Z node 10 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [10:7554061958111577083:2322], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:27.434290Z node 10 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:27.434462Z node 10 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [10:7554061958111577102:2324], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:27.434471Z node 10 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:27.435591Z node 10 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:18:27.456637Z node 10 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [10:7554061958111577247:2333], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:27.456670Z node 10 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:27.456762Z node 10 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [10:7554061958111577252:2336], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:27.456779Z node 10 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [10:7554061958111577253:2337], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:27.456791Z node 10 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:27.457456Z node 10 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715659:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-09-25T16:18:27.461323Z node 10 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [10:7554061958111577256:2338], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715659 completed, doublechecking } 2025-09-25T16:18:27.546922Z node 10 :TX_PROXY ERROR: schemereq.cpp:590: Actor# [10:7554061958111577325:2778] txid# 281474976715660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-09-25T16:18:27.560423Z node 10 :KQP_EXECUTER ERROR: kqp_planner.cpp:123: TxId: 281474976715661. Ctx: { TraceId: 01k60tpfg09vmz0jwzb294m7xy, Database: , SessionId: ydb://session/3?node_id=10&id=M2M3MDBkYTYtNGFiMDA5NzQtZmIyZmZhNTAtOTI0MjgzODk=, PoolId: default, DatabaseId: /Root}. Database not set, use /Root 2025-09-25T16:18:27.575727Z node 10 :KQP_EXECUTER ERROR: kqp_planner.cpp:123: TxId: 281474976715662. Ctx: { TraceId: 01k60tpfkcdc2v9q04n3d3kvbs, Database: , SessionId: ydb://session/3?node_id=10&id=M2M3MDBkYTYtNGFiMDA5NzQtZmIyZmZhNTAtOTI0MjgzODk=, PoolId: default, DatabaseId: /Root}. Database not set, use /Root 2025-09-25T16:18:27.576772Z node 10 :GLOBAL WARN: log.cpp:841: fline=events.h:105;event=ev_write_error;status=STATUS_CONSTRAINT_VIOLATION;details=Conflict with existing key.;tx_id=3; 2025-09-25T16:18:27.579014Z node 10 :TX_DATASHARD ERROR: finish_propose_write_unit.cpp:226: Prepare transaction failed. txid 3 at tablet 72075186224037888 errors: Status: STATUS_CONSTRAINT_VIOLATION Issues: { message: "Conflict with existing key." issue_code: 2012 severity: 1 } 2025-09-25T16:18:27.579071Z node 10 :TX_DATASHARD ERROR: finish_propose_write_unit.cpp:168: Errors while proposing transaction txid 3 at tablet 72075186224037888 Status: STATUS_CONSTRAINT_VIOLATION Issues: { message: "Conflict with existing key." issue_code: 2012 severity: 1 } 2025-09-25T16:18:27.579140Z node 10 :KQP_COMPUTE ERROR: kqp_write_actor.cpp:873: SelfId: [10:7554061958111577393:2319], Table: `Root/Test` ([72057594046644480:2:1]), SessionActorId: [10:7554061958111577071:2319]Got CONSTRAINT VIOLATION for table `Root/Test`. ShardID=72075186224037888, Sink=[10:7554061958111577393:2319].{
: Error: Conflict with existing key., code: 2012 } 2025-09-25T16:18:27.579312Z node 10 :KQP_COMPUTE ERROR: kqp_write_actor.cpp:3599: SelfId: [10:7554061958111577386:2319], SessionActorId: [10:7554061958111577071:2319], statusCode=PRECONDITION_FAILED. Issue=
: Error: Constraint violated. Table: `Root/Test`., code: 2012
: Error: Conflict with existing key., code: 2012 . sessionActorId=[10:7554061958111577071:2319]. 2025-09-25T16:18:27.579552Z node 10 :KQP_SESSION WARN: kqp_session_actor.cpp:2082: SessionId: ydb://session/3?node_id=10&id=M2M3MDBkYTYtNGFiMDA5NzQtZmIyZmZhNTAtOTI0MjgzODk=, ActorId: [10:7554061958111577071:2319], ActorState: ExecuteState, TraceId: 01k60tpfkcdc2v9q04n3d3kvbs, got TEvKqpBuffer::TEvError in ExecuteState, status: PRECONDITION_FAILED send to: [10:7554061958111577387:2319] from: [10:7554061958111577386:2319] 2025-09-25T16:18:27.579580Z node 10 :KQP_EXECUTER ERROR: kqp_executer_impl.h:1012: ActorId: [10:7554061958111577387:2319] TxId: 281474976715662. Ctx: { TraceId: 01k60tpfkcdc2v9q04n3d3kvbs, Database: , SessionId: ydb://session/3?node_id=10&id=M2M3MDBkYTYtNGFiMDA5NzQtZmIyZmZhNTAtOTI0MjgzODk=, PoolId: default, DatabaseId: /Root}. PRECONDITION_FAILED: {
: Error: Constraint violated. Table: `Root/Test`., code: 2012 subissue: {
: Error: Conflict with existing key., code: 2012 } } 2025-09-25T16:18:27.579641Z node 10 :KQP_SESSION WARN: kqp_session_actor.cpp:2830: SessionId: ydb://session/3?node_id=10&id=M2M3MDBkYTYtNGFiMDA5NzQtZmIyZmZhNTAtOTI0MjgzODk=, ActorId: [10:7554061958111577071:2319], ActorState: ExecuteState, TraceId: 01k60tpfkcdc2v9q04n3d3kvbs, Create QueryResponse for error on request, msg: >> test.py::test[blocks-div_uint64_opt2--Results] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/ydb/ut/unittest >> TGRpcYdbTest::BeginTxRequestError [GOOD] Test command err: 2025-09-25T16:18:22.680976Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7554061936719753473:2079];send_to=[0:7307199536658146131:7762515]; 2025-09-25T16:18:22.680995Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-09-25T16:18:22.705661Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:123: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown test_client.cpp: SetPath # /home/runner/.ya/build/build_root/endf/00419a/r3tmp/tmpbEQTTg/pdisk_1.dat 2025-09-25T16:18:22.838380Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-09-25T16:18:23.002243Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded 2025-09-25T16:18:23.003021Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-09-25T16:18:23.003048Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting TServer::EnableGrpc on GrpcPort 26788, node 1 2025-09-25T16:18:23.004614Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-09-25T16:18:23.136288Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-09-25T16:18:23.143615Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-09-25T16:18:23.143630Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-09-25T16:18:23.143632Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-09-25T16:18:23.143695Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:2212 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-09-25T16:18:23.317092Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-09-25T16:18:23.329342Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-09-25T16:18:23.352980Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateKesus, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_kesus.cpp:30) 2025-09-25T16:18:24.179961Z node 4 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7554061945563662829:2147];send_to=[0:7307199536658146131:7762515]; 2025-09-25T16:18:24.179985Z node 4 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-09-25T16:18:24.195850Z node 4 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions test_client.cpp: SetPath # /home/runner/.ya/build/build_root/endf/00419a/r3tmp/tmpW0XNw1/pdisk_1.dat 2025-09-25T16:18:24.232757Z node 4 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 7190, node 4 2025-09-25T16:18:24.263088Z node 4 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-09-25T16:18:24.263100Z node 4 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-09-25T16:18:24.263102Z node 4 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-09-25T16:18:24.263141Z node 4 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:12135 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-09-25T16:18:24.282112Z node 4 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-09-25T16:18:24.282148Z node 4 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-09-25T16:18:24.285255Z node 4 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-09-25T16:18:24.296769Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-09-25T16:18:24.316096Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateKesus, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_kesus.cpp:30) 2025-09-25T16:18:24.484142Z node 4 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-09-25T16:18:25.282161Z node 7 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[7:7554061947502503731:2149];send_to=[0:7307199536658146131:7762515]; 2025-09-25T16:18:25.282188Z node 7 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-09-25T16:18:25.313374Z node 7 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions test_client.cpp: SetPath # /home/runner/.ya/build/build_root/endf/00419a/r3tmp/tmpO6tGOk/pdisk_1.dat TServer::EnableGrpc on GrpcPort 26641, node 7 2025-09-25T16:18:25.379428Z node 7 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded 2025-09-25T16:18:25.383474Z node 7 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-09-25T16:18:25.383503Z node 7 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-09-25T16:18:25.385538Z node 7 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-09-25T16:18:25.385557Z node 7 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-09-25T16:18:25.385559Z node 7 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-09-25T16:18:25.385600Z node 7 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-09-25T16:18:25.385604Z node 7 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:24693 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-09-25T16:18:25.421480Z node 7 :FLAT_TX_SCHEMESHARD WAR ... 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_modify_acl.cpp:101) 2025-09-25T16:18:26.488338Z node 10 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[10:7554061953790792641:2154];send_to=[0:7307199536658146131:7762515]; 2025-09-25T16:18:26.488373Z node 10 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-09-25T16:18:26.573742Z node 10 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions test_client.cpp: SetPath # /home/runner/.ya/build/build_root/endf/00419a/r3tmp/tmpun7483/pdisk_1.dat 2025-09-25T16:18:26.596199Z node 10 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-09-25T16:18:26.596223Z node 10 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-09-25T16:18:26.598523Z node 10 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 18432, node 10 TClient is connected to server localhost:26111 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-09-25T16:18:26.673423Z node 10 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded 2025-09-25T16:18:26.673440Z node 10 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-09-25T16:18:26.673444Z node 10 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-09-25T16:18:26.673447Z node 10 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-09-25T16:18:26.673484Z node 10 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-09-25T16:18:26.680249Z node 10 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-09-25T16:18:26.702598Z node 10 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:18:26.816249Z node 10 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-09-25T16:18:27.443286Z node 13 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[13:7554061954392673707:2078];send_to=[0:7307199536658146131:7762515]; 2025-09-25T16:18:27.443310Z node 13 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/endf/00419a/r3tmp/tmpkLDxN7/pdisk_1.dat 2025-09-25T16:18:27.446843Z node 13 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:123: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2025-09-25T16:18:27.464603Z node 13 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 18979, node 13 2025-09-25T16:18:27.494627Z node 13 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-09-25T16:18:27.494636Z node 13 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-09-25T16:18:27.494637Z node 13 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-09-25T16:18:27.494676Z node 13 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-09-25T16:18:27.503251Z node 13 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions TClient is connected to server localhost:14776 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-09-25T16:18:27.514727Z node 13 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-09-25T16:18:27.545616Z node 13 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(13, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-09-25T16:18:27.545651Z node 13 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(13, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-09-25T16:18:27.547169Z node 13 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(13, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-09-25T16:18:27.837850Z node 13 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [13:7554061954392674694:2319], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:27.837867Z node 13 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [13:7554061954392674705:2322], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:27.837878Z node 13 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:27.837992Z node 13 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [13:7554061954392674708:2323], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:27.838016Z node 13 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:27.838817Z node 13 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-09-25T16:18:27.843307Z node 13 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [13:7554061954392674709:2324], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2025-09-25T16:18:27.903637Z node 13 :TX_PROXY ERROR: schemereq.cpp:590: Actor# [13:7554061954392674781:2662] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-09-25T16:18:27.904061Z node 13 :KQP_SESSION WARN: kqp_session_actor.cpp:2395: SessionId: ydb://session/3?node_id=13&id=N2E5MWFhNWYtNzgyNTQ0ZTktMjBhNzA3MzktYTljNzQyOTc=, ActorId: [13:7554061954392674691:2317], ActorState: ExecuteState, TraceId: 01k60tpfvx3817r4drnd8n3snq, ReplyQueryCompileError, status NOT_FOUND remove tx with tx_id: 2025-09-25T16:18:27.905471Z node 13 :KQP_SESSION WARN: kqp_session_actor.cpp:2395: SessionId: ydb://session/3?node_id=13&id=N2E5MWFhNWYtNzgyNTQ0ZTktMjBhNzA3MzktYTljNzQyOTc=, ActorId: [13:7554061954392674691:2317], ActorState: ExecuteState, TraceId: 01k60tpfy1cr3bftxeqv8fcytq, ReplyQueryCompileError, status NOT_FOUND remove tx with tx_id: 2025-09-25T16:18:27.906263Z node 13 :KQP_SESSION WARN: kqp_session_actor.cpp:2395: SessionId: ydb://session/3?node_id=13&id=N2E5MWFhNWYtNzgyNTQ0ZTktMjBhNzA3MzktYTljNzQyOTc=, ActorId: [13:7554061954392674691:2317], ActorState: ExecuteState, TraceId: 01k60tpfy23tqjhwfaybmrtrn0, ReplyQueryCompileError, status NOT_FOUND remove tx with tx_id: >> TNodeBrokerTest::NodesMigrationReuseRemovedID >> TNodeBrokerTest::ShiftIdRangeRemoveNew [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/ydb/ut/unittest >> YdbYqlClient::CreateAndAltertTableWithCompactionPolicy [GOOD] Test command err: 2025-09-25T16:18:20.898508Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7554061927014375325:2149];send_to=[0:7307199536658146131:7762515]; 2025-09-25T16:18:20.898533Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/endf/0041b2/r3tmp/tmpW94hj9/pdisk_1.dat 2025-09-25T16:18:20.980173Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-09-25T16:18:20.998991Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded 2025-09-25T16:18:21.001990Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-09-25T16:18:21.002020Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting TServer::EnableGrpc on GrpcPort 62766, node 1 2025-09-25T16:18:21.011429Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-09-25T16:18:21.017513Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-09-25T16:18:21.017527Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-09-25T16:18:21.017530Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-09-25T16:18:21.017579Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:7952 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-09-25T16:18:21.052793Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-09-25T16:18:21.119162Z node 1 :GRPC_PROXY_NO_CONNECT_ACCESS DEBUG: grpc_request_check_actor.h:627: Skip check permission connect db, AllowYdbRequestsWithoutDatabase is off, there is no db provided from user, database: /Root, user: root@builtin, from ip: ipv6:[::1]:60026 Call 2025-09-25T16:18:21.129343Z node 1 :GRPC_PROXY_NO_CONNECT_ACCESS DEBUG: grpc_request_check_actor.h:627: Skip check permission connect db, AllowYdbRequestsWithoutDatabase is off, there is no db provided from user, database: /Root, user: root@builtin, from ip: ipv6:[::1]:60026 2025-09-25T16:18:21.270056Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-09-25T16:18:21.397203Z node 1 :GRPC_PROXY_NO_CONNECT_ACCESS DEBUG: grpc_request_check_actor.h:627: Skip check permission connect db, AllowYdbRequestsWithoutDatabase is off, there is no db provided from user, database: /Root, user: root@builtin, from ip: ipv4:127.0.0.1:46826 Call Call 2025-09-25T16:18:21.401906Z node 1 :GRPC_PROXY_NO_CONNECT_ACCESS DEBUG: grpc_request_check_actor.h:663: Skip check permission connect db, user is a admin, database: /Root, user: root@builtin, from ip: ipv4:127.0.0.1:46826 2025-09-25T16:18:21.403734Z node 1 :GRPC_PROXY_NO_CONNECT_ACCESS DEBUG: grpc_request_check_actor.h:663: Skip check permission connect db, user is a admin, database: /Root, user: root@builtin, from ip: ipv6:[::1]:60030 2025-09-25T16:18:21.404206Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_modify_acl.cpp:101) 2025-09-25T16:18:23.017099Z node 4 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7554061937730739114:2186];send_to=[0:7307199536658146131:7762515]; 2025-09-25T16:18:23.017201Z node 4 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/endf/0041b2/r3tmp/tmpXY9LFy/pdisk_1.dat 2025-09-25T16:18:23.050934Z node 4 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-09-25T16:18:23.068205Z node 4 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 25082, node 4 2025-09-25T16:18:23.111689Z node 4 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-09-25T16:18:23.111703Z node 4 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-09-25T16:18:23.111705Z node 4 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-09-25T16:18:23.111761Z node 4 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-09-25T16:18:23.121198Z node 4 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-09-25T16:18:23.121232Z node 4 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-09-25T16:18:23.123464Z node 4 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:3625 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-09-25T16:18:23.129510Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-09-25T16:18:23.307753Z node 4 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-09-25T16:18:24.176712Z node 7 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[7:7554061945027910576:2147];send_to=[0:7307199536658146131:7762515]; 2025-09-25T16:18:24.176742Z node 7 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/endf/0041b2/r3tmp/tmpa4e7vn/pdisk_1.dat 2025-09-25T16:18:24.207808Z node 7 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-09-25T16:18:24.230738Z node 7 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 18990, node 7 2025-09-25T16:18:24.246437Z node 7 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-09-25T16:18:24.246447Z node 7 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-09-25T16:18:24.246449Z node 7 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-09-25T16:18:24.246489Z node 7 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:63421 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-09-25T16:18:24.264855Z node 7 :FLAT_TX_SCHEMESHAR ... 44480 2025-09-25T16:18:26.522158Z node 10 :HIVE WARN: hive_impl.cpp:516: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 10, TabletId: 72075186224037895 not found 2025-09-25T16:18:26.522907Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 281474976715686, database: /Root, subject: , status: StatusAccepted, operation: DROP TABLE, path: Root/Dir/Table-6 2025-09-25T16:18:26.522954Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__clean_pathes.cpp:160: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046644480 2025-09-25T16:18:26.528912Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:685: TTxOperationPlanStep Execute, stepId: 1758817106578, transactions count in step: 1, at schemeshard: 72057594046644480 2025-09-25T16:18:26.529737Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:1123: All parts have reached barrier, tx: 281474976715686, done: 0, blocked: 1 2025-09-25T16:18:26.531105Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:993: Operation and all the parts is done, operation id: 281474976715686:0 2025-09-25T16:18:26.532609Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_drop_table.cpp:493: TDropTable Propose, path: Root/Dir/Table-7, pathId: 0, opId: 281474976715687:0, at schemeshard: 72057594046644480 2025-09-25T16:18:26.532653Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 281474976715687:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2025-09-25T16:18:26.533219Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 281474976715687, database: /Root, subject: , status: StatusAccepted, operation: DROP TABLE, path: Root/Dir/Table-7 2025-09-25T16:18:26.535508Z node 10 :HIVE WARN: hive_impl.cpp:516: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 10, TabletId: 72075186224037894 not found 2025-09-25T16:18:26.536164Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__clean_pathes.cpp:160: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046644480 2025-09-25T16:18:26.543574Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:685: TTxOperationPlanStep Execute, stepId: 1758817106592, transactions count in step: 1, at schemeshard: 72057594046644480 2025-09-25T16:18:26.544495Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:1123: All parts have reached barrier, tx: 281474976715687, done: 0, blocked: 1 2025-09-25T16:18:26.552482Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:993: Operation and all the parts is done, operation id: 281474976715687:0 2025-09-25T16:18:26.556008Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_drop_table.cpp:493: TDropTable Propose, path: Root/Dir/Table-8, pathId: 0, opId: 281474976715688:0, at schemeshard: 72057594046644480 2025-09-25T16:18:26.556057Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 281474976715688:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2025-09-25T16:18:26.556813Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 281474976715688, database: /Root, subject: , status: StatusAccepted, operation: DROP TABLE, path: Root/Dir/Table-8 2025-09-25T16:18:26.557464Z node 10 :HIVE WARN: hive_impl.cpp:516: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 10, TabletId: 72075186224037892 not found 2025-09-25T16:18:26.558755Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__clean_pathes.cpp:160: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046644480 2025-09-25T16:18:26.562310Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:685: TTxOperationPlanStep Execute, stepId: 1758817106606, transactions count in step: 1, at schemeshard: 72057594046644480 2025-09-25T16:18:26.563392Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:1123: All parts have reached barrier, tx: 281474976715688, done: 0, blocked: 1 2025-09-25T16:18:26.564722Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:993: Operation and all the parts is done, operation id: 281474976715688:0 2025-09-25T16:18:26.565872Z node 10 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-09-25T16:18:26.568944Z node 10 :HIVE WARN: hive_impl.cpp:516: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 10, TabletId: 72075186224037893 not found 2025-09-25T16:18:26.570552Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__clean_pathes.cpp:160: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046644480 2025-09-25T16:18:27.269549Z node 13 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[13:7554061955419718001:2166];send_to=[0:7307199536658146131:7762515]; 2025-09-25T16:18:27.269605Z node 13 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/endf/0041b2/r3tmp/tmpwaY7D3/pdisk_1.dat 2025-09-25T16:18:27.271786Z node 13 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-09-25T16:18:27.290912Z node 13 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 4369, node 13 2025-09-25T16:18:27.304060Z node 13 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-09-25T16:18:27.304072Z node 13 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-09-25T16:18:27.304074Z node 13 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-09-25T16:18:27.304124Z node 13 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:11317 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-09-25T16:18:27.323894Z node 13 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... TClient is connected to server localhost:11317 2025-09-25T16:18:27.370748Z node 13 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(13, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-09-25T16:18:27.370793Z node 13 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(13, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-09-25T16:18:27.372197Z node 13 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(13, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-09-25T16:18:27.567102Z node 13 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-09-25T16:18:27.711623Z node 13 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) TClient is connected to server localhost:11317 TClient::Ls request: Root/Test TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Test" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976710658 CreateStep: 1758817107775 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "Test" Columns { Name: "key" Type: "Uint64" TypeId: 4 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "value" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" KeyCo... (TRUNCATED) 2025-09-25T16:18:27.767892Z node 13 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_table.cpp:172) TClient is connected to server localhost:11317 TClient::Ls request: Root/Test TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Test" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976710658 CreateStep: 1758817107775 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 4 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 4 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 2 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "Test" Columns { Name: "key" Type: "Uint64" TypeId: 4 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "value" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" KeyCo... (TRUNCATED) >> THiveTest::TestSetDomain [GOOD] >> THiveTest::TestSetDomainAlready ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/ydb/ut/unittest >> YdbYqlClient::CheckDefaultTableSettings3 [GOOD] Test command err: 2025-09-25T16:18:20.249907Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7554061924540550798:2152];send_to=[0:7307199536658146131:7762515]; 2025-09-25T16:18:20.249954Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/endf/0041b5/r3tmp/tmpBi6D0W/pdisk_1.dat 2025-09-25T16:18:20.289242Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-09-25T16:18:20.306199Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 30572, node 1 2025-09-25T16:18:20.322869Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-09-25T16:18:20.322883Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-09-25T16:18:20.322886Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-09-25T16:18:20.322929Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:27928 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-09-25T16:18:20.351144Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-09-25T16:18:20.351190Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-09-25T16:18:20.352937Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-09-25T16:18:20.364265Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... TClient is connected to server localhost:27928 2025-09-25T16:18:20.441945Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_create_subdomain.cpp:92: TCreateSubDomain Propose, path: /Root/ydb_ut_tenant, opId: 281474976715658:0, at schemeshard: 72057594046644480 2025-09-25T16:18:20.442063Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 281474976715658:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2025-09-25T16:18:20.442076Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateSubDomain, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_subdomain.cpp:259) 2025-09-25T16:18:20.442653Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 281474976715658, database: /Root, subject: , status: StatusAccepted, operation: CREATE DATABASE, path: /Root/ydb_ut_tenant waiting... 2025-09-25T16:18:20.444901Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:685: TTxOperationPlanStep Execute, stepId: 1758817100488, transactions count in step: 1, at schemeshard: 72057594046644480 2025-09-25T16:18:20.445466Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:993: Operation and all the parts is done, operation id: 281474976715658:0 2025-09-25T16:18:20.445484Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:1002: Publication still in progress, tx: 281474976715658, publications: 2, subscribers: 1 2025-09-25T16:18:20.445618Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__publish_to_scheme_board.cpp:155: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 281474976715658, subscribers: 1 2025-09-25T16:18:20.446564Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: /Root/ydb_ut_tenant, opId: 281474976715659:0, at schemeshard: 72057594046644480 2025-09-25T16:18:20.446680Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 281474976715659:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2025-09-25T16:18:20.446692Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2025-09-25T16:18:20.447126Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 281474976715659, database: /Root, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: /Root/ydb_ut_tenant waiting... 2025-09-25T16:18:20.569385Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-09-25T16:18:20.953501Z node 3 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7554061928406145211:2084];send_to=[0:7307199536658146131:7762515]; 2025-09-25T16:18:20.953534Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/ydb_ut_tenant/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-09-25T16:18:20.958851Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-09-25T16:18:20.958886Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-09-25T16:18:20.960186Z node 1 :HIVE WARN: hive_impl.cpp:811: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 3 Cookie 3 2025-09-25T16:18:20.960440Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-09-25T16:18:20.961348Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/ydb_ut_tenant/.metadata/script_executions 2025-09-25T16:18:20.979183Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8042: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# ObjectStorage, ObjectStorage, PostgreSQL 2025-09-25T16:18:20.979206Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2025-09-25T16:18:21.009921Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_create_table.cpp:442: TCreateTable Propose, path: /Root/ydb_ut_tenant/Table-1, opId: 281474976715660:0, at schemeshard: 72057594046644480 2025-09-25T16:18:21.010233Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 281474976715660:3, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2025-09-25T16:18:21.010245Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:18:21.010479Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_create_index.cpp:119: TCreateTableIndex Propose, path: /Root/ydb_ut_tenant/Table-1/MyIndex, operationId: 281474976715660:1, transaction: WorkingDir: "/Root/ydb_ut_tenant/Table-1" OperationType: ESchemeOpCreateTableIndex CreateTableIndex { Name: "MyIndex" KeyColumnNames: "Value" Type: EIndexTypeGlobal IndexImplTableDescriptions { PartitionConfig { } } } Internal: false FailOnExist: false AllowCreateInTempDir: false, at schemeshard: 72057594046644480 2025-09-25T16:18:21.010520Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 281474976715660:3, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2025-09-25T16:18:21.010528Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_create_table.cpp:442: TCreateTable Propose, path: /Root/ydb_ut_tenant/Table-1/MyIndex/indexImplTable, opId: 281474976715660:2, at schemeshard: 72057594046644480 2025-09-25T16:18:21.010655Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 281474976715660:3, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2025-09-25T16:18:21.011594Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 281474976715660, database: /Root/ydb_ut_tenant, subject: , status: StatusAccepted, operation: CREATE TABLE WITH INDEXES, path: /Root/ydb_ut_tenant/Table-1 2025-09-25T16:18:21.252127Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-09-25T16:18:21.399781Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:685: TTxOperationPlanStep Execute, stepId: 1758817101447, transactions count in step: 1, at schemeshard: 72057594046644480 2025-09-25T16:18:21.400419Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:993: Operation and all the parts is done, operation id: 281474976715659:0 2025-09-25T16:18:21.400461Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:1002: Publication still in progress, tx: 281474976715659, publications: 1, subscribers: 1 2025-09-25T16:18:21.400679Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__publish_to_scheme_board.cpp:155: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 281474976715659, subscribers: 1 2025-09-25T16:18:21.417346Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:685: TTxOperationPlanStep Execute, stepId: 1758817101460, transactions count in st ... A_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/ydb_ut_tenant/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-09-25T16:18:25.272239Z node 7 :HIVE WARN: hive_impl.cpp:811: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 9 Cookie 9 2025-09-25T16:18:25.280561Z node 7 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(9, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-09-25T16:18:25.282891Z node 9 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/ydb_ut_tenant/.metadata/script_executions 2025-09-25T16:18:25.306197Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_create_table.cpp:442: TCreateTable Propose, path: /Root/ydb_ut_tenant/Table-1, opId: 281474976710660:0, at schemeshard: 72057594046644480 2025-09-25T16:18:25.306562Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 281474976710660:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2025-09-25T16:18:25.306571Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:18:25.309020Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 281474976710660, database: /Root/ydb_ut_tenant, subject: , status: StatusAccepted, operation: CREATE TABLE, path: /Root/ydb_ut_tenant/Table-1 2025-09-25T16:18:25.576521Z node 7 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-09-25T16:18:25.635568Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:685: TTxOperationPlanStep Execute, stepId: 1758817105682, transactions count in step: 1, at schemeshard: 72057594046644480 2025-09-25T16:18:25.636178Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:993: Operation and all the parts is done, operation id: 281474976710659:0 2025-09-25T16:18:25.636221Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:1002: Publication still in progress, tx: 281474976710659, publications: 1, subscribers: 1 2025-09-25T16:18:25.636603Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__publish_to_scheme_board.cpp:155: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 281474976710659, subscribers: 1 2025-09-25T16:18:25.673598Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:685: TTxOperationPlanStep Execute, stepId: 1758817105710, transactions count in step: 1, at schemeshard: 72057594046644480 2025-09-25T16:18:25.678811Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:993: Operation and all the parts is done, operation id: 281474976710660:0 2025-09-25T16:18:25.704887Z node 7 :HIVE WARN: tx__status.cpp:58: HIVE#72057594037968897 THive::TTxStatus(status=2 node=Connected) - killing node 9 2025-09-25T16:18:25.705061Z node 7 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(9, (0,0,0,0)) VolatileState: Connected -> Disconnected 2025-09-25T16:18:26.397776Z node 10 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[10:7554061952676252624:2161];send_to=[0:7307199536658146131:7762515]; 2025-09-25T16:18:26.397819Z node 10 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-09-25T16:18:26.402379Z node 10 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions test_client.cpp: SetPath # /home/runner/.ya/build/build_root/endf/0041b5/r3tmp/tmpup2D6H/pdisk_1.dat 2025-09-25T16:18:26.429203Z node 10 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 25232, node 10 2025-09-25T16:18:26.462966Z node 10 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-09-25T16:18:26.462980Z node 10 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-09-25T16:18:26.462983Z node 10 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-09-25T16:18:26.463043Z node 10 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:5871 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-09-25T16:18:26.470414Z node 10 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-09-25T16:18:26.498559Z node 10 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-09-25T16:18:26.498588Z node 10 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-09-25T16:18:26.500998Z node 10 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-09-25T16:18:26.654755Z node 10 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-09-25T16:18:26.833980Z node 10 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:18:27.754243Z node 13 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[13:7554061956652285329:2079];send_to=[0:7307199536658146131:7762515]; 2025-09-25T16:18:27.754404Z node 13 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/endf/0041b5/r3tmp/tmpPMJjOX/pdisk_1.dat 2025-09-25T16:18:27.769301Z node 13 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-09-25T16:18:27.790249Z node 13 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 14256, node 13 2025-09-25T16:18:27.810201Z node 13 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-09-25T16:18:27.810213Z node 13 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-09-25T16:18:27.810216Z node 13 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-09-25T16:18:27.810274Z node 13 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:62920 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-09-25T16:18:27.857431Z node 13 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(13, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-09-25T16:18:27.857463Z node 13 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(13, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-09-25T16:18:27.860645Z node 13 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(13, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-09-25T16:18:27.870141Z node 13 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-09-25T16:18:27.953651Z node 13 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-09-25T16:18:28.209900Z node 13 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/datashard/ut_minstep/unittest >> TDataShardMinStepTest::TestDropTablePlanComesNotTooEarlyRW-VolatileTxs [GOOD] Test command err: 2025-09-25T16:18:20.378859Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:123: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2025-09-25T16:18:20.415682Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2025-09-25T16:18:20.418007Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:311:2354], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-09-25T16:18:20.418086Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-09-25T16:18:20.418116Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/endf/005a08/r3tmp/tmpjtJ4eC/pdisk_1.dat 2025-09-25T16:18:20.491169Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-09-25T16:18:20.491212Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-09-25T16:18:20.502318Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded 2025-09-25T16:18:20.503118Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1229: Notification cookie mismatch for subscription [1:34:2081] 1758817099945394 != 1758817099945398 2025-09-25T16:18:20.537825Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-09-25T16:18:20.583121Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //Root, opId: 1:0, at schemeshard: 72057594046644480 2025-09-25T16:18:20.586491Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2025-09-25T16:18:20.586813Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2025-09-25T16:18:20.587049Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //Root 2025-09-25T16:18:20.587363Z node 1 :TX_COORDINATOR DEBUG: coordinator_impl.cpp:183: tablet# 72057594046316545 txid# 1 HANDLE EvProposeTransaction marker# C0 2025-09-25T16:18:20.587373Z node 1 :TX_COORDINATOR DEBUG: coordinator_impl.cpp:29: tablet# 72057594046316545 txid# 1 step# 500 Status# 16 SEND to# [1:409:2405] Proxy marker# C1 2025-09-25T16:18:20.629471Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-09-25T16:18:20.700793Z node 1 :TX_COORDINATOR DEBUG: coordinator__plan_step.cpp:184: Transaction 1 has been planned 2025-09-25T16:18:20.700841Z node 1 :TX_COORDINATOR DEBUG: coordinator__plan_step.cpp:197: Planned transaction 1 for mediator 72057594046382081 tablet 72057594046644480 2025-09-25T16:18:20.700923Z node 1 :TX_COORDINATOR TRACE: coordinator_impl.cpp:268: Coordinator# 72057594046316545 scheduling step 1000 in 0.500000s at 0.950000s 2025-09-25T16:18:20.701022Z node 1 :TX_COORDINATOR DEBUG: coordinator_impl.cpp:580: Send from# 72057594046316545 to mediator# 72057594046382081, step# 500, txid# 1 marker# C2 2025-09-25T16:18:20.701031Z node 1 :TX_COORDINATOR DEBUG: coordinator_impl.cpp:424: tablet# 72057594046316545 txid# 1 stepId# 500 Status# 17 SEND EvProposeTransactionStatus to# [1:409:2405] Proxy 2025-09-25T16:18:20.701217Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:685: TTxOperationPlanStep Execute, stepId: 500, transactions count in step: 1, at schemeshard: 72057594046644480 2025-09-25T16:18:20.701579Z node 1 :TX_COORDINATOR DEBUG: coordinator_impl.cpp:397: tablet# 72057594046316545 HANDLE EvMediatorQueueConfirmations MediatorId# 72057594046382081 2025-09-25T16:18:20.701604Z node 1 :TX_COORDINATOR DEBUG: coordinator__mediators_confirmations.cpp:84: at tablet# 72057594046316545 [2:6] persistent tx 1 for mediator 72057594046382081 tablet 72057594046644480 removed=1 2025-09-25T16:18:20.701610Z node 1 :TX_COORDINATOR DEBUG: coordinator__mediators_confirmations.cpp:91: at tablet# 72057594046316545 [2:6] persistent tx 1 for mediator 72057594046382081 acknowledged 2025-09-25T16:18:20.701615Z node 1 :TX_COORDINATOR DEBUG: coordinator__mediators_confirmations.cpp:99: at tablet# 72057594046316545 [2:6] persistent tx 1 acknowledged 2025-09-25T16:18:20.701663Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:993: Operation and all the parts is done, operation id: 1:0 2025-09-25T16:18:20.701677Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:1002: Publication still in progress, tx: 1, publications: 1, subscribers: 1 2025-09-25T16:18:20.701895Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__publish_to_scheme_board.cpp:155: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 1, subscribers: 1 2025-09-25T16:18:20.702504Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_create_table.cpp:442: TCreateTable Propose, path: /Root/table-1, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-09-25T16:18:20.702800Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 281474976715657:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2025-09-25T16:18:20.702813Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:18:20.702950Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 281474976715657, database: /Root, subject: , status: StatusAccepted, operation: CREATE TABLE, path: /Root/table-1 2025-09-25T16:18:20.703557Z node 1 :HIVE DEBUG: hive_impl.cpp:55: HIVE#72057594037968897 Handle TEvHive::TEvCreateTablet(DataShard(72057594046644480,1)) 2025-09-25T16:18:20.705707Z node 1 :HIVE DEBUG: tx__create_tablet.cpp:200: HIVE#72057594037968897 THive::TTxCreateTablet::Execute Owner: 72057594046644480 OwnerIdx: 1 TabletType: DataShard ObjectDomain { SchemeShard: 72057594046644480 PathId: 1 } ObjectId: 2 BindedChannels { StoragePoolName: "/Root:test" StoragePoolKind: "test" } BindedChannels { StoragePoolName: "/Root:test" StoragePoolKind: "test" } AllowedDomains { SchemeShard: 72057594046644480 PathId: 1 } 2025-09-25T16:18:20.705740Z node 1 :HIVE DEBUG: tx__create_tablet.cpp:354: HIVE#72057594037968897 Hive 72057594037968897 allocated TabletId 72075186224037888 from TabletIdIndex 65536 2025-09-25T16:18:20.705798Z node 1 :HIVE DEBUG: tx__create_tablet.cpp:446: HIVE#72057594037968897 THive::TTxCreateTablet::Execute; Default resources after merge for type DataShard: {} 2025-09-25T16:18:20.705807Z node 1 :HIVE DEBUG: tx__create_tablet.cpp:449: HIVE#72057594037968897 THive::TTxCreateTablet::Execute; Default resources after merge for object (72057594046644480,2): {} 2025-09-25T16:18:20.705816Z node 1 :HIVE DEBUG: tx__create_tablet.cpp:453: HIVE#72057594037968897 THive::TTxCreateTablet::Execute; Default resources after merge for profile 'default': {Memory: 1048576} 2025-09-25T16:18:20.705844Z node 1 :HIVE DEBUG: hive_impl.cpp:2888: HIVE#72057594037968897 CreateTabletFollowers Tablet DataShard.72075186224037888.Leader.0 2025-09-25T16:18:20.705940Z node 1 :HIVE DEBUG: tx__create_tablet.cpp:173: HIVE#72057594037968897 THive::TTxCreateTablet::Execute TabletId: 72075186224037888 Status: OK 2025-09-25T16:18:20.705968Z node 1 :HIVE DEBUG: hive_impl.cpp:1105: HIVE#72057594037968897 THive::AssignTabletGroups TEvControllerSelectGroups tablet 72075186224037888 GroupParameters { StoragePoolSpecifier { Name: "/Root:test" } } ReturnAllMatchingGroups: true 2025-09-25T16:18:20.706113Z node 1 :HIVE DEBUG: hive_impl.cpp:93: HIVE#72057594037968897 Connected to tablet 72057594037932033 from tablet 72057594037968897 2025-09-25T16:18:20.706202Z node 1 :HIVE DEBUG: hive_impl.cpp:458: HIVE#72057594037968897 THive::Handle TEvControllerSelectGroupsResult: success Status: OK NewStyleQuerySupported: true MatchingGroups { Groups { ErasureSpecies: 0 GroupID: 2181038080 StoragePoolName: "/Root:test" AssuredResources { } CurrentResources { } PhysicalGroup: true Decommitted: false GroupSizeInUnits: 0 } } 2025-09-25T16:18:20.706222Z node 1 :HIVE DEBUG: tx__update_tablet_groups.cpp:63: HIVE#72057594037968897 THive::TTxUpdateTabletGroups::Execute{90804130285856}(72075186224037888,HIVE_REASSIGN_REASON_NO,[]) 2025-09-25T16:18:20.706235Z node 1 :HIVE DEBUG: tx__update_tablet_groups.cpp:151: HIVE#72057594037968897 THive::TTxUpdateTabletGroups::Execute{90804130285856}: tablet 72075186224037888 channel 0 assigned to group 2181038080 2025-09-25T16:18:20.706260Z node 1 :HIVE DEBUG: tx__update_tablet_groups.cpp:151: HIVE#72057594037968897 THive::TTxUpdateTabletGroups::Execute{90804130285856}: tablet 72075186224037888 channel 1 assigned to group 2181038080 2025-09-25T16:18:20.706278Z node 1 :HIVE DEBUG: tablet_info.cpp:125: HIVE#72057594037968897 Tablet(DataShard.72075186224037888.Leader.0) VolatileState: Unknown -> Stopped 2025-09-25T16:18:20.706285Z node 1 :HIVE DEBUG: tablet_info.cpp:125: HIVE#72057594037968897 Tablet(DataShard.72075186224037888.Leader.0) VolatileState: Stopped -> Booting 2025-09-25T16:18:20.706294Z node 1 :HIVE DEBUG: hive_impl.cpp:367: HIVE#72057594037968897 ProcessBootQueue (1) 2025-09-25T16:18:20.706323Z node 1 :HIVE DEBUG: tx__process_boot_queue.cpp:18: HIVE#72057594037968897 THive::TTxProcessBootQueue()::Execute 2025-09-25T16:18:20.706329Z node 1 :HIVE DEBUG: hive_impl.cpp:247: HIVE#72057594037968897 Handle ProcessBootQueue (size: 1) 2025-09-25T16:18:20.706337Z node 1 :HIVE DEBUG: hive_impl.cpp:1251: HIVE#72057594037968897 [FBN] Finding best node for tablet DataShard.72075186224037888.Leader.0 2025-09-25T16:18:20.706366Z node 1 :HIVE DEBUG: tablet_info.cpp:123: HIVE#72057594037968897 Tablet(DataShard.72075186224037888.Leader.0) VolatileState: Booting -> Starting (Node 1) 2025-09-25T16:18:20.706387Z node 1 :HIVE DEBUG: hive_impl.cpp:327: HIVE#72057594037968897 ProcessBootQueue - BootQueue empty (WaitQueue: 0) 2025-09-25T16:18:20.706398Z node 1 :HIVE DEBUG: tx__start_tablet.cpp:31: HIVE#72057594037968897 THive::TTxStartTablet::Execute Tablet (72075186224037888,0) 2025-09-25T16:18:20.706423Z node 1 :HIVE DEBUG: tx__start_tablet.cpp:73: HIVE#72057594037968897 THive::TTxStartTablet::Execute, Sending TEvBootTablet(DataShard.72075186224037888.Leader.1) to node 1 storage {Version# 1 TabletID# 72075186224037888 TabletType# DataShard Channels# {0:{Channel# 0 Type# none StoragePool# /Root:test History# {0:{FromGeneration# 0 GroupID# 2181038080 Timestamp# 1970-01-01T00:00:00.450000Z}}, 1:{Channel# 1 Type# none StoragePool# /Root:test History# {0:{FromGeneration# 0 GroupID# 21 ... ndle TEvLocal::TEvTabletStatus, TabletId: 72075186224037888 2025-09-25T16:18:28.528407Z node 2 :HIVE WARN: hive_impl.cpp:516: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 2, TabletId: 72075186224037888 not found 2025-09-25T16:18:28.528449Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__clean_pathes.cpp:160: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046644480 2025-09-25T16:18:28.528465Z node 2 :HIVE DEBUG: tx__block_storage_result.cpp:23: HIVE#72057594037968897 THive::TTxBlockStorageResult::Execute(72075186224037888 OK) 2025-09-25T16:18:28.528475Z node 2 :HIVE DEBUG: tx__block_storage_result.cpp:69: HIVE#72057594037968897 THive::TTxBlockStorageResult::Complete(72075186224037888 OK) 2025-09-25T16:18:28.528496Z node 2 :HIVE DEBUG: hive_impl.cpp:922: HIVE#72057594037968897 THive::Handle::TEvInitiateDeleteStorage TabletId=72075186224037888 2025-09-25T16:18:28.528628Z node 2 :HIVE DEBUG: tx__delete_tablet_result.cpp:26: HIVE#72057594037968897 THive::TTxDeleteTabletResult::Execute(72075186224037888 OK) 2025-09-25T16:18:28.541098Z node 2 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:152: TTxProposeTransactionBase::Complete at 72075186224037889 2025-09-25T16:18:28.551740Z node 2 :HIVE DEBUG: tx__delete_tablet_result.cpp:72: HIVE#72057594037968897 THive::TTxDeleteTabletResult(72075186224037888)::Complete SideEffects {} 2025-09-25T16:18:28.572291Z node 2 :GLOBAL WARN: log.cpp:841: fline=events.h:105;event=ev_write_error;status=STATUS_ABORTED;details=DataShard 72075186224037889 is blocked by a schema operation;tx_id=281474976715662; 2025-09-25T16:18:28.572353Z node 2 :TX_DATASHARD INFO: datashard_pipeline.cpp:1318: Outdated Tx 281474976715662 is cleaned at tablet 72075186224037889 and outdatedStep# 33500 2025-09-25T16:18:28.572376Z node 2 :TX_DATASHARD INFO: datashard__cleanup_tx.cpp:38: Cleaned up old txs at 72075186224037889 TxInFly 0 2025-09-25T16:18:28.572422Z node 2 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037889 2025-09-25T16:18:28.572436Z node 2 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:133: Propose scheme transaction at tablet 72075186224037889 txId 281474976715665 ssId 72057594046644480 seqNo 2:4 2025-09-25T16:18:28.572447Z node 2 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:221: Prepared scheme transaction txId 281474976715665 at tablet 72075186224037889 2025-09-25T16:18:28.572488Z node 2 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037889 (dry run) active 0 active planned 0 immediate 0 planned 1 2025-09-25T16:18:28.572527Z node 2 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037889 (dry run) active 0 active planned 0 immediate 0 planned 1 2025-09-25T16:18:28.572536Z node 2 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037889 2025-09-25T16:18:28.572539Z node 2 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037889 active 0 active planned 0 immediate 0 planned 1 2025-09-25T16:18:28.572542Z node 2 :TX_DATASHARD INFO: datashard__progress_tx.cpp:48: No tx to execute at 72075186224037889 TxInFly 1 2025-09-25T16:18:28.582900Z node 2 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037889 2025-09-25T16:18:28.582948Z node 2 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037889 2025-09-25T16:18:28.583299Z node 2 :TX_COORDINATOR DEBUG: coordinator_impl.cpp:183: tablet# 72057594046316545 txid# 281474976715665 HANDLE EvProposeTransaction marker# C0 2025-09-25T16:18:28.583312Z node 2 :TX_COORDINATOR DEBUG: coordinator_impl.cpp:29: tablet# 72057594046316545 txid# 281474976715665 step# 34000 Status# 16 SEND to# [2:397:2396] Proxy marker# C1 2025-09-25T16:18:28.644490Z node 2 :TX_COORDINATOR DEBUG: coordinator__plan_step.cpp:184: Transaction 281474976715665 has been planned 2025-09-25T16:18:28.644531Z node 2 :TX_COORDINATOR DEBUG: coordinator__plan_step.cpp:197: Planned transaction 281474976715665 for mediator 72057594046382081 tablet 72057594046644480 2025-09-25T16:18:28.644538Z node 2 :TX_COORDINATOR DEBUG: coordinator__plan_step.cpp:197: Planned transaction 281474976715665 for mediator 72057594046382081 tablet 72075186224037889 2025-09-25T16:18:28.644619Z node 2 :TX_COORDINATOR TRACE: coordinator_impl.cpp:268: Coordinator# 72057594046316545 scheduling step 34500 in 0.500000s at 34.450000s 2025-09-25T16:18:28.644746Z node 2 :TX_COORDINATOR DEBUG: coordinator_impl.cpp:580: Send from# 72057594046316545 to mediator# 72057594046382081, step# 34000, txid# 281474976715665 marker# C2 2025-09-25T16:18:28.644759Z node 2 :TX_COORDINATOR DEBUG: coordinator_impl.cpp:424: tablet# 72057594046316545 txid# 281474976715665 stepId# 34000 Status# 17 SEND EvProposeTransactionStatus to# [2:397:2396] Proxy 2025-09-25T16:18:28.644971Z node 2 :TX_DATASHARD DEBUG: datashard__plan_step.cpp:69: Planned transaction txId 281474976715665 at step 34000 at tablet 72075186224037889 { Transactions { TxId: 281474976715665 AckTo { RawX1: 0 RawX2: 0 } } Step: 34000 MediatorID: 72057594046382081 TabletID: 72075186224037889 } 2025-09-25T16:18:28.644981Z node 2 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037889 2025-09-25T16:18:28.645018Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:685: TTxOperationPlanStep Execute, stepId: 34000, transactions count in step: 1, at schemeshard: 72057594046644480 2025-09-25T16:18:28.645141Z node 2 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037889 2025-09-25T16:18:28.645150Z node 2 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037889 active 0 active planned 0 immediate 0 planned 1 2025-09-25T16:18:28.645160Z node 2 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:333: Found ready operation [34000:281474976715665] in PlanQueue unit at 72075186224037889 2025-09-25T16:18:28.645217Z node 2 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:634: LoadTxDetails at 72075186224037889 loaded tx from db 34000:281474976715665 keys extracted: 0 2025-09-25T16:18:28.645251Z node 2 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037889 (dry run) active 1 active planned 1 immediate 0 planned 1 2025-09-25T16:18:28.645297Z node 2 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037889 2025-09-25T16:18:28.645319Z node 2 :TX_DATASHARD INFO: drop_table_unit.cpp:72: Trying to DROP TABLE at 72075186224037889 2025-09-25T16:18:28.645436Z node 2 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037889 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-09-25T16:18:28.645861Z node 2 :TX_DATASHARD DEBUG: datashard__plan_step.cpp:98: Sending '{TEvPlanStepAccepted TabletId# 72075186224037889 step# 34000} 2025-09-25T16:18:28.645877Z node 2 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037889 2025-09-25T16:18:28.646073Z node 2 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037889 2025-09-25T16:18:28.646098Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:811: Complete [34000 : 281474976715665] from 72075186224037889 at tablet 72075186224037889 send result to client [2:397:2396], exec latency: 0 ms, propose latency: 0 ms 2025-09-25T16:18:28.646114Z node 2 :TX_DATASHARD INFO: datashard.cpp:1600: 72075186224037889 Sending notify to schemeshard 72057594046644480 txId 281474976715665 state PreOffline TxInFly 0 2025-09-25T16:18:28.646128Z node 2 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037889 2025-09-25T16:18:28.646148Z node 2 :TX_COORDINATOR DEBUG: coordinator_impl.cpp:397: tablet# 72057594046316545 HANDLE EvMediatorQueueConfirmations MediatorId# 72057594046382081 2025-09-25T16:18:28.646171Z node 2 :TX_COORDINATOR DEBUG: coordinator__mediators_confirmations.cpp:84: at tablet# 72057594046316545 [2:50] persistent tx 281474976715665 for mediator 72057594046382081 tablet 72057594046644480 removed=1 2025-09-25T16:18:28.646178Z node 2 :TX_COORDINATOR DEBUG: coordinator__mediators_confirmations.cpp:84: at tablet# 72057594046316545 [2:50] persistent tx 281474976715665 for mediator 72057594046382081 tablet 72075186224037889 removed=1 2025-09-25T16:18:28.646183Z node 2 :TX_COORDINATOR DEBUG: coordinator__mediators_confirmations.cpp:91: at tablet# 72057594046316545 [2:50] persistent tx 281474976715665 for mediator 72057594046382081 acknowledged 2025-09-25T16:18:28.646188Z node 2 :TX_COORDINATOR DEBUG: coordinator__mediators_confirmations.cpp:99: at tablet# 72057594046316545 [2:50] persistent tx 281474976715665 acknowledged 2025-09-25T16:18:28.646372Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:1123: All parts have reached barrier, tx: 281474976715665, done: 0, blocked: 1 2025-09-25T16:18:28.647159Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:2966: Handle TEvSchemaChangedResult 281474976715665 datashard 72075186224037889 state PreOffline 2025-09-25T16:18:28.647177Z node 2 :TX_DATASHARD DEBUG: datashard__schema_changed.cpp:22: 72075186224037889 Got TEvSchemaChangedResult from SS at 72075186224037889 2025-09-25T16:18:28.647344Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:993: Operation and all the parts is done, operation id: 281474976715665:0 2025-09-25T16:18:28.647375Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:1002: Publication still in progress, tx: 281474976715665, publications: 1, subscribers: 1 2025-09-25T16:18:28.647554Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__publish_to_scheme_board.cpp:155: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 281474976715665, subscribers: 1 2025-09-25T16:18:28.647652Z node 2 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:129: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037889 2025-09-25T16:18:28.659334Z node 2 :TX_DATASHARD DEBUG: datashard_loans.cpp:220: 72075186224037889 in PreOffline state HasSharedBobs: 0 SchemaOperations: [ ] OutReadSets count: 0 ChangesQueue size: 0 ChangeExchangeSplit: 1 siblings to be activated: wait to activation from: 2025-09-25T16:18:28.659417Z node 2 :TX_DATASHARD INFO: datashard_loans.cpp:177: 72075186224037889 Initiating switch from PreOffline to Offline state 2025-09-25T16:18:28.659922Z node 2 :TX_DATASHARD INFO: datashard_impl.h:3325: 72075186224037889 Reporting state Offline to schemeshard 72057594046644480 2025-09-25T16:18:28.660178Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:2975: Handle TEvStateChangedResult datashard 72075186224037889 state Offline 2025-09-25T16:18:28.660295Z node 2 :HIVE DEBUG: tx__delete_tablet.cpp:74: HIVE#72057594037968897 THive::TTxDeleteTablet::Execute() ShardOwnerId: 72057594046644480 ShardLocalIdx: 2 TxId_Deprecated: 0 TabletID: 72075186224037889 2025-09-25T16:18:28.660309Z node 2 :HIVE DEBUG: tx__delete_tablet.cpp:19: HIVE#72057594037968897 THive::TTxDeleteTablet::Execute Tablet 72075186224037889 2025-09-25T16:18:28.660335Z node 2 :HIVE DEBUG: tablet_info.cpp:123: HIVE#72057594037968897 Tablet(DataShard.72075186224037889.Leader.1) VolatileState: Running -> Stopped (Node 2) 2025-09-25T16:18:28.660358Z node 2 :HIVE DEBUG: tablet_info.cpp:522: HIVE#72057594037968897 Sending TEvStopTablet(DataShard.72075186224037889.Leader.1 gen 1) to node 2 2025-09-25T16:18:28.660380Z node 2 :HIVE DEBUG: tx__delete_tablet.cpp:67: HIVE#72057594037968897 THive::TTxDeleteTablet::Execute() result Status: OK Origin: 72057594037968897 TxId_Deprecated: 0 ShardOwnerId: 72057594046644480 ShardLocalIdx: 2 >> TNodeBrokerTest::NodesV2BackMigrationShiftIdRange [GOOD] >> YdbYqlClient::CreateTableWithPartitionAtKeys [GOOD] >> YdbYqlClient::CreateTableWithPartitionAtKeysAndAutoPartitioning >> TLocalTests::TestAddTenantWhileResolving [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/mind/ut/unittest >> TNodeBrokerTest::ShiftIdRangeRemoveNew [GOOD] Test command err: 2025-09-25T16:18:26.536053Z node 8 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:636: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 8 Deadline: 18446744073709.551615s } 2025-09-25T16:18:26.539514Z node 5 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:636: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 5 Deadline: 18446744073709.551615s } 2025-09-25T16:18:26.539594Z node 6 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:636: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 6 Deadline: 18446744073709.551615s } 2025-09-25T16:18:26.539625Z node 7 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:636: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 7 Deadline: 18446744073709.551615s } 2025-09-25T16:18:26.539674Z node 8 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:607: Handle NActors::TEvInterconnect::TEvListNodes 2025-09-25T16:18:26.543785Z node 2 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:636: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 2 Deadline: 18446744073709.551615s } 2025-09-25T16:18:26.543843Z node 3 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:636: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 3 Deadline: 18446744073709.551615s } 2025-09-25T16:18:26.543905Z node 4 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:636: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 4 Deadline: 18446744073709.551615s } 2025-09-25T16:18:26.543954Z node 6 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:607: Handle NActors::TEvInterconnect::TEvListNodes 2025-09-25T16:18:26.544063Z node 7 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:607: Handle NActors::TEvInterconnect::TEvListNodes 2025-09-25T16:18:26.544131Z node 8 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:607: Handle NActors::TEvInterconnect::TEvListNodes 2025-09-25T16:18:26.544166Z node 1 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:636: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 1 Deadline: 18446744073709.551615s } 2025-09-25T16:18:26.548860Z node 2 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:607: Handle NActors::TEvInterconnect::TEvListNodes 2025-09-25T16:18:26.548967Z node 3 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:607: Handle NActors::TEvInterconnect::TEvListNodes 2025-09-25T16:18:26.549018Z node 4 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:607: Handle NActors::TEvInterconnect::TEvListNodes 2025-09-25T16:18:26.549065Z node 5 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:607: Handle NActors::TEvInterconnect::TEvListNodes 2025-09-25T16:18:26.549124Z node 4 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:607: Handle NActors::TEvInterconnect::TEvListNodes 2025-09-25T16:18:26.549164Z node 5 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:607: Handle NActors::TEvInterconnect::TEvListNodes 2025-09-25T16:18:26.549182Z node 6 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:607: Handle NActors::TEvInterconnect::TEvListNodes 2025-09-25T16:18:26.549211Z node 7 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:607: Handle NActors::TEvInterconnect::TEvListNodes 2025-09-25T16:18:26.549288Z node 1 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:607: Handle NActors::TEvInterconnect::TEvListNodes 2025-09-25T16:18:26.549377Z node 2 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:607: Handle NActors::TEvInterconnect::TEvListNodes 2025-09-25T16:18:26.549400Z node 3 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:607: Handle NActors::TEvInterconnect::TEvListNodes 2025-09-25T16:18:26.549485Z node 8 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:607: Handle NActors::TEvInterconnect::TEvListNodes 2025-09-25T16:18:26.549511Z node 1 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:607: Handle NActors::TEvInterconnect::TEvListNodes 2025-09-25T16:18:26.549834Z node 2 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:607: Handle NActors::TEvInterconnect::TEvListNodes 2025-09-25T16:18:26.549858Z node 3 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:607: Handle NActors::TEvInterconnect::TEvListNodes 2025-09-25T16:18:26.549885Z node 4 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:607: Handle NActors::TEvInterconnect::TEvListNodes 2025-09-25T16:18:26.549904Z node 5 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:607: Handle NActors::TEvInterconnect::TEvListNodes 2025-09-25T16:18:26.549925Z node 6 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:607: Handle NActors::TEvInterconnect::TEvListNodes 2025-09-25T16:18:26.549944Z node 7 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:607: Handle NActors::TEvInterconnect::TEvListNodes 2025-09-25T16:18:26.549981Z node 8 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:636: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 1 Deadline: 18446744073709.551615s } 2025-09-25T16:18:26.549993Z node 3 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:636: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 1 Deadline: 18446744073709.551615s } 2025-09-25T16:18:26.550024Z node 4 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:636: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 1 Deadline: 18446744073709.551615s } 2025-09-25T16:18:26.550049Z node 5 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:636: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 1 Deadline: 18446744073709.551615s } 2025-09-25T16:18:26.550083Z node 6 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:636: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 1 Deadline: 18446744073709.551615s } 2025-09-25T16:18:26.550108Z node 7 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:636: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 1 Deadline: 18446744073709.551615s } 2025-09-25T16:18:26.550190Z node 1 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:607: Handle NActors::TEvInterconnect::TEvListNodes 2025-09-25T16:18:26.550250Z node 2 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:636: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 1 Deadline: 18446744073709.551615s } 2025-09-25T16:18:26.550496Z node 1 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:636: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 3 Deadline: 18446744073709.551615s } 2025-09-25T16:18:26.550735Z node 8 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:636: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 6 Deadline: 18446744073709.551615s } 2025-09-25T16:18:26.551845Z node 3 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:636: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 2 Deadline: 18446744073709.551615s } 2025-09-25T16:18:26.551876Z node 4 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:636: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 2 Deadline: 18446744073709.551615s } 2025-09-25T16:18:26.551917Z node 6 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:636: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 4 Deadline: 18446744073709.551615s } 2025-09-25T16:18:26.551941Z node 7 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:636: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 3 Deadline: 18446744073709.551615s } 2025-09-25T16:18:26.559873Z node 6 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:636: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 8 Deadline: 18446744073709.551615s } 2025-09-25T16:18:26.560882Z node 3 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:636: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 7 Deadline: 18446744073709.551615s } 2025-09-25T16:18:26.560961Z node 1 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:636: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 8 Deadline: 18446744073709.551615s } 2025-09-25T16:18:26.561028Z node 4 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:636: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 6 Deadline: 18446744073709.551615s } 2025-09-25T16:18:26.561476Z node 2 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:636: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 3 Deadline: 18446744073709.551615s } 2025-09-25T16:18:26.561762Z node 2 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:636: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 4 Deadline: 18446744073709.551615s } 2025-09-25T16:18:26.562075Z node 1 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:636: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 5 Deadline: 18446744073709.551615s } 2025-09-25T16:18:26.562124Z node 1 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:636: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 4 Deadline: 18446744073709.551615s } 2025-09-25T16:18:26.562261Z node 1 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:636: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 6 Deadline: 18446744073709.551615s } 2025-09-25T16:18:26.562354Z node 1 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:636: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 2 Deadline: 18446744073709.551615s } 2025-09-25T16:18:26.562464Z node 1 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:636: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 7 Deadline: 18446744073709.551615s } 2025-09-25T16:18:26.563584Z node 3 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:636: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 6 Deadline: 18446744073709.551615s } 2025-09-25T16:18:26.563899Z node 6 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:636: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 3 Deadline: 18446744073709.551615s } 2025-09-25T16:18:26.586120Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7729: Cannot subscribe to console configs 2025-09-25T16:18:26.586144Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded ... waiting for nameservers are connected 2025-09-25T16:18:26.595551Z node 1 :NODE_BROKER DEBUG: node_broker_impl.h:239: StateInit event type: 10060000 event: NKikimr::TEvTablet::TEvBoot 2025-09-25T16:18:26.596138Z node 1 :NODE_BROKER DEBUG: node_broker_impl.h:239: StateInit event type: 10060001 event: NKikimr::TEvTablet::TEvRestored 2025-09-25T16:18:26.596227Z node 1 :NODE_BROKER DEBUG: node_broker__init_scheme.cpp:20: TTxInitScheme Execute 2025-09-25T16:18:26.596511Z node 1 :NODE_BROKER DEBUG: node_broker_impl.h:239: StateInit event type: 1006000c event: NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-09-25T16:18:26.597376Z node 1 :NODE_BROKER DEBUG: node_broker__init_scheme.cpp:29: TTxInitScheme Complete 2025-09-25T16:18:26.597545Z node 1 :NODE_BROKER DEBUG: node_broker__load_state.cpp:19: TTxLoadState Execute 2025-09-25T16:18:26.597619Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:968: [DB] Using default config. 2025-09-25T16:18:26.597637Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:1005: [DB] Starting the first epoch: #1.1 1970-01-01T00:00:00.026000Z - 1970-01-01T01:00:00.026000Z - 1970-01-01T02:00:00.026000Z 2025-09-25T16:18:26.597643Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:1031: [DB] Loaded the first approximate epoch start: #1.1 2025-09-25T16:18:26.597663Z node 1 :NODE_BROKER DEBUG: node_broker__load_state.cpp:27: TTxLoadState Complete 2025-09-25T16:18:26.597705Z node 1 :NODE_BROKER DEBUG: node_broker__migrate_state.cpp:84: TTxMigrateState Execute 2025-09-25T16:18:26.597712Z node 1 :NODE_BROKER DEBUG: node_broker__migrate_state.cpp:52: TTxMigrateState ProcessMigrationBatch UpdateNodes left 0, NewVersionUpdateNodes left 0 2025-09-25T16:18:26.597718Z node 1 :NODE_BROKER DEBUG: node_broker__migrate_state.cpp:21: TTxMigrateState FinalizeMigration 2025-09-25T16:18:26.597724Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:1318: [DB] Update epoch in database: #1.1 1970-01-01T00:00:00.026000Z - 1970-01-01T01:00:00.026000Z - 1970-01-01T02:00:00.026000Z 2025-09-25T16:18:26.597744Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:1337: [DB] Update approx epoch start in database: #1.1 2025-09-25T16:18:26.597751Z node 1 :NODE_BROKER NOTICE: node_broker.cpp:1350: [DB] Update main nodes table to: Nodes 2025-09-25T16:18:26.645228Z node 1 :NODE_BROKER DEBUG: node_broker__migrate_state.cpp:95: TTxMigrateState Complete 2025-09-25T16:18:26.645285Z node 1 :NODE_BROKER TRACE: node_broker.cpp:456: Scheduled epoch update at 1970-01-01T01:00:00.026000Z 2025-09-25T16:18:26.645299Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:562: Preparing nodes list cache for epoch #1.1 1970-01-01T00:00:00.026000Z - 1970-01-01T01:00:00.026000Z - 1970-01-01T02:00:00.026000Z, approximate epoch start #1.1 nodes=0 expired=0 2025-09-25T16:18:26.645312Z ... d TEvNodesInfo for epoch #2.8 1970-01-01T01:00:00.026000Z - 1970-01-01T02:00:00.026000Z - 1970-01-01T03:00:00.026000Z 2025-09-25T16:18:27.846488Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:245: StateWork, received event# 269877761, Sender [1:787:2323], Recipient [1:720:2270]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-09-25T16:18:27.846500Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:245: StateWork, received event# 272039936, Sender [1:627:2218], Recipient [1:720:2270]: NKikimr::NNodeBroker::TEvNodeBroker::TEvListNodes { } 2025-09-25T16:18:27.846504Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:249: StateWork, processing event TEvNodeBroker::TEvListNodes 2025-09-25T16:18:27.846511Z node 1 :NODE_BROKER TRACE: node_broker.cpp:423: Send TEvNodesInfo for epoch #2.8 1970-01-01T01:00:00.026000Z - 1970-01-01T02:00:00.026000Z - 1970-01-01T03:00:00.026000Z 2025-09-25T16:18:27.846574Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:245: StateWork, received event# 269877761, Sender [1:789:2325], Recipient [1:720:2270]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-09-25T16:18:27.846593Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:245: StateWork, received event# 272039936, Sender [1:627:2218], Recipient [1:720:2270]: NKikimr::NNodeBroker::TEvNodeBroker::TEvListNodes { CachedVersion: 5 } 2025-09-25T16:18:27.846599Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:249: StateWork, processing event TEvNodeBroker::TEvListNodes 2025-09-25T16:18:27.846606Z node 1 :NODE_BROKER TRACE: node_broker.cpp:423: Send TEvNodesInfo for epoch #2.8 1970-01-01T01:00:00.026000Z - 1970-01-01T02:00:00.026000Z - 1970-01-01T03:00:00.026000Z 2025-09-25T16:18:27.846671Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:245: StateWork, received event# 269877761, Sender [1:791:2327], Recipient [1:720:2270]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-09-25T16:18:27.846685Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:245: StateWork, received event# 272039936, Sender [1:627:2218], Recipient [1:720:2270]: NKikimr::NNodeBroker::TEvNodeBroker::TEvListNodes { } 2025-09-25T16:18:27.846690Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:249: StateWork, processing event TEvNodeBroker::TEvListNodes 2025-09-25T16:18:27.846709Z node 1 :NODE_BROKER TRACE: node_broker.cpp:423: Send TEvNodesInfo for epoch #2.8 1970-01-01T01:00:00.026000Z - 1970-01-01T02:00:00.026000Z - 1970-01-01T03:00:00.026000Z 2025-09-25T16:18:27.846768Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:245: StateWork, received event# 269877761, Sender [1:793:2329], Recipient [1:720:2270]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-09-25T16:18:27.846802Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:245: StateWork, received event# 272039936, Sender [1:627:2218], Recipient [1:720:2270]: NKikimr::NNodeBroker::TEvNodeBroker::TEvListNodes { CachedVersion: 4 } 2025-09-25T16:18:27.846807Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:249: StateWork, processing event TEvNodeBroker::TEvListNodes 2025-09-25T16:18:27.846813Z node 1 :NODE_BROKER TRACE: node_broker.cpp:423: Send TEvNodesInfo for epoch #2.8 1970-01-01T01:00:00.026000Z - 1970-01-01T02:00:00.026000Z - 1970-01-01T03:00:00.026000Z 2025-09-25T16:18:27.846878Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:245: StateWork, received event# 269877761, Sender [1:795:2331], Recipient [1:720:2270]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-09-25T16:18:27.846900Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:245: StateWork, received event# 272039950, Sender [1:627:2218], Recipient [1:720:2270]: NKikimr::NNodeBroker::TEvNodeBroker::TEvSubscribeNodesRequest { CachedVersion: 8 SeqNo: 2 } 2025-09-25T16:18:27.846906Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:257: StateWork, processing event TEvNodeBroker::TEvSubscribeNodesRequest 2025-09-25T16:18:27.846914Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:747: New subscriber [1:627:2218], seqNo: 2, version: 8, server pipe id: [1:795:2331] 2025-09-25T16:18:27.846923Z node 1 :NODE_BROKER TRACE: node_broker.cpp:730: Send TEvUpdateNodes v8 -> v8 to [1:627:2218] 2025-09-25T16:18:27.846984Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:245: StateWork, received event# 269877764, Sender [1:795:2331], Recipient [1:720:2270]: NKikimr::TEvTabletPipe::TEvServerDisconnected 2025-09-25T16:18:27.846991Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:768: Unsubscribed [1:627:2218], seqNo: 2, server pipe id: [1:795:2331] 2025-09-25T16:18:27.847026Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:245: StateWork, received event# 269877761, Sender [1:797:2333], Recipient [1:720:2270]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-09-25T16:18:27.847040Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:245: StateWork, received event# 272039950, Sender [1:627:2218], Recipient [1:720:2270]: NKikimr::NNodeBroker::TEvNodeBroker::TEvSubscribeNodesRequest { CachedVersion: 7 SeqNo: 3 } 2025-09-25T16:18:27.847045Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:257: StateWork, processing event TEvNodeBroker::TEvSubscribeNodesRequest 2025-09-25T16:18:27.847051Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:747: New subscriber [1:627:2218], seqNo: 3, version: 7, server pipe id: [1:797:2333] 2025-09-25T16:18:27.847056Z node 1 :NODE_BROKER TRACE: node_broker.cpp:730: Send TEvUpdateNodes v7 -> v8 to [1:627:2218] 2025-09-25T16:18:27.847112Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:245: StateWork, received event# 269877764, Sender [1:797:2333], Recipient [1:720:2270]: NKikimr::TEvTabletPipe::TEvServerDisconnected 2025-09-25T16:18:27.847117Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:768: Unsubscribed [1:627:2218], seqNo: 3, server pipe id: [1:797:2333] 2025-09-25T16:18:27.847143Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:245: StateWork, received event# 269877761, Sender [1:799:2335], Recipient [1:720:2270]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-09-25T16:18:27.847161Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:245: StateWork, received event# 272039950, Sender [1:627:2218], Recipient [1:720:2270]: NKikimr::NNodeBroker::TEvNodeBroker::TEvSubscribeNodesRequest { CachedVersion: 6 SeqNo: 4 } 2025-09-25T16:18:27.847166Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:257: StateWork, processing event TEvNodeBroker::TEvSubscribeNodesRequest 2025-09-25T16:18:27.847193Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:747: New subscriber [1:627:2218], seqNo: 4, version: 6, server pipe id: [1:799:2335] 2025-09-25T16:18:27.847198Z node 1 :NODE_BROKER TRACE: node_broker.cpp:730: Send TEvUpdateNodes v6 -> v8 to [1:627:2218] 2025-09-25T16:18:27.847243Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:245: StateWork, received event# 269877764, Sender [1:799:2335], Recipient [1:720:2270]: NKikimr::TEvTabletPipe::TEvServerDisconnected 2025-09-25T16:18:27.847297Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:768: Unsubscribed [1:627:2218], seqNo: 4, server pipe id: [1:799:2335] 2025-09-25T16:18:27.847327Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:245: StateWork, received event# 269877761, Sender [1:801:2337], Recipient [1:720:2270]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-09-25T16:18:27.847345Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:245: StateWork, received event# 272039950, Sender [1:627:2218], Recipient [1:720:2270]: NKikimr::NNodeBroker::TEvNodeBroker::TEvSubscribeNodesRequest { CachedVersion: 5 SeqNo: 5 } 2025-09-25T16:18:27.847350Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:257: StateWork, processing event TEvNodeBroker::TEvSubscribeNodesRequest 2025-09-25T16:18:27.847355Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:747: New subscriber [1:627:2218], seqNo: 5, version: 5, server pipe id: [1:801:2337] 2025-09-25T16:18:27.847361Z node 1 :NODE_BROKER TRACE: node_broker.cpp:730: Send TEvUpdateNodes v5 -> v8 to [1:627:2218] 2025-09-25T16:18:27.847416Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:245: StateWork, received event# 269877764, Sender [1:801:2337], Recipient [1:720:2270]: NKikimr::TEvTabletPipe::TEvServerDisconnected 2025-09-25T16:18:27.847422Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:768: Unsubscribed [1:627:2218], seqNo: 5, server pipe id: [1:801:2337] 2025-09-25T16:18:27.847461Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:245: StateWork, received event# 269877761, Sender [1:803:2339], Recipient [1:720:2270]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-09-25T16:18:27.847480Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:245: StateWork, received event# 272039950, Sender [1:627:2218], Recipient [1:720:2270]: NKikimr::NNodeBroker::TEvNodeBroker::TEvSubscribeNodesRequest { CachedVersion: 4 SeqNo: 6 } 2025-09-25T16:18:27.847485Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:257: StateWork, processing event TEvNodeBroker::TEvSubscribeNodesRequest 2025-09-25T16:18:27.847490Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:747: New subscriber [1:627:2218], seqNo: 6, version: 4, server pipe id: [1:803:2339] 2025-09-25T16:18:27.847495Z node 1 :NODE_BROKER TRACE: node_broker.cpp:730: Send TEvUpdateNodes v4 -> v8 to [1:627:2218] 2025-09-25T16:18:27.847555Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:245: StateWork, received event# 269877764, Sender [1:803:2339], Recipient [1:720:2270]: NKikimr::TEvTabletPipe::TEvServerDisconnected 2025-09-25T16:18:27.847560Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:768: Unsubscribed [1:627:2218], seqNo: 6, server pipe id: [1:803:2339] 2025-09-25T16:18:27.847602Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:245: StateWork, received event# 269877761, Sender [1:805:2341], Recipient [1:720:2270]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-09-25T16:18:27.847623Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:245: StateWork, received event# 272039937, Sender [1:627:2218], Recipient [1:720:2270]: NKikimr::NNodeBroker::TEvNodeBroker::TEvResolveNode { NodeId: 1024 } 2025-09-25T16:18:27.847628Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:250: StateWork, processing event TEvNodeBroker::TEvResolveNode 2025-09-25T16:18:27.847667Z node 1 :NODE_BROKER TRACE: node_broker.cpp:1485: Send TEvResolvedNode: NKikimr::NNodeBroker::TEvNodeBroker::TEvResolvedNode { Status { Code: OK } Node { NodeId: 1024 Host: "host1" Port: 1001 ResolveHost: "host1.yandex.net" Address: "1.2.3.4" Location { DataCenter: "1" Module: "2" Rack: "3" Unit: "4" } Expire: 10800026000 Name: "slot-0" } } 2025-09-25T16:18:27.847753Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:245: StateWork, received event# 269877761, Sender [1:807:2343], Recipient [1:720:2270]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-09-25T16:18:27.847769Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:245: StateWork, received event# 272039937, Sender [1:627:2218], Recipient [1:720:2270]: NKikimr::NNodeBroker::TEvNodeBroker::TEvResolveNode { NodeId: 1025 } 2025-09-25T16:18:27.847774Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:250: StateWork, processing event TEvNodeBroker::TEvResolveNode 2025-09-25T16:18:27.847793Z node 1 :NODE_BROKER TRACE: node_broker.cpp:1485: Send TEvResolvedNode: NKikimr::NNodeBroker::TEvNodeBroker::TEvResolvedNode { Status { Code: OK } Node { NodeId: 1025 Host: "host2" Port: 1001 ResolveHost: "host2.yandex.net" Address: "1.2.3.4" Location { DataCenter: "1" Module: "2" Rack: "3" Unit: "4" } Expire: 10800026000 Name: "slot-1" } } 2025-09-25T16:18:27.847856Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:245: StateWork, received event# 269877761, Sender [1:809:2345], Recipient [1:720:2270]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-09-25T16:18:27.847873Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:245: StateWork, received event# 272039937, Sender [1:627:2218], Recipient [1:720:2270]: NKikimr::NNodeBroker::TEvNodeBroker::TEvResolveNode { NodeId: 1026 } 2025-09-25T16:18:27.847877Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:250: StateWork, processing event TEvNodeBroker::TEvResolveNode 2025-09-25T16:18:27.847886Z node 1 :NODE_BROKER TRACE: node_broker.cpp:1485: Send TEvResolvedNode: NKikimr::NNodeBroker::TEvNodeBroker::TEvResolvedNode { Status { Code: WRONG_REQUEST Reason: "Unknown node" } } >> THiveTest::TestBridgeCreateTablet [GOOD] >> THiveTest::TestBridgeDisconnect >> TDynamicNameserverTest::CacheMissPipeDisconnect-EnableNodeBrokerDeltaProtocol-true >> test.py::test[aggregate-group_by_session--Results] [GOOD] >> test.py::test[aggregate-group_by_session_distinct_compact--Results] >> YdbYqlClient::AlterTableAddIndexWithDataColumn [GOOD] >> THiveTest::TestSetDomainAlready [GOOD] >> THiveTest::TestSetDomainError ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/mind/ut/unittest >> TNodeBrokerTest::NodesV2BackMigrationShiftIdRange [GOOD] Test command err: 2025-09-25T16:18:26.082134Z node 8 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:636: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 8 Deadline: 18446744073709.551615s } 2025-09-25T16:18:26.085016Z node 5 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:636: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 5 Deadline: 18446744073709.551615s } 2025-09-25T16:18:26.085095Z node 6 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:636: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 6 Deadline: 18446744073709.551615s } 2025-09-25T16:18:26.085125Z node 7 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:636: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 7 Deadline: 18446744073709.551615s } 2025-09-25T16:18:26.085174Z node 8 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:607: Handle NActors::TEvInterconnect::TEvListNodes 2025-09-25T16:18:26.089029Z node 2 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:636: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 2 Deadline: 18446744073709.551615s } 2025-09-25T16:18:26.089722Z node 3 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:636: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 3 Deadline: 18446744073709.551615s } 2025-09-25T16:18:26.089790Z node 4 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:636: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 4 Deadline: 18446744073709.551615s } 2025-09-25T16:18:26.089843Z node 6 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:607: Handle NActors::TEvInterconnect::TEvListNodes 2025-09-25T16:18:26.089956Z node 7 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:607: Handle NActors::TEvInterconnect::TEvListNodes 2025-09-25T16:18:26.090029Z node 8 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:607: Handle NActors::TEvInterconnect::TEvListNodes 2025-09-25T16:18:26.090063Z node 1 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:636: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 1 Deadline: 18446744073709.551615s } 2025-09-25T16:18:26.093770Z node 2 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:607: Handle NActors::TEvInterconnect::TEvListNodes 2025-09-25T16:18:26.093875Z node 3 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:607: Handle NActors::TEvInterconnect::TEvListNodes 2025-09-25T16:18:26.093928Z node 4 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:607: Handle NActors::TEvInterconnect::TEvListNodes 2025-09-25T16:18:26.093979Z node 5 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:607: Handle NActors::TEvInterconnect::TEvListNodes 2025-09-25T16:18:26.094038Z node 4 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:607: Handle NActors::TEvInterconnect::TEvListNodes 2025-09-25T16:18:26.094077Z node 5 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:607: Handle NActors::TEvInterconnect::TEvListNodes 2025-09-25T16:18:26.094097Z node 6 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:607: Handle NActors::TEvInterconnect::TEvListNodes 2025-09-25T16:18:26.094124Z node 7 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:607: Handle NActors::TEvInterconnect::TEvListNodes 2025-09-25T16:18:26.094214Z node 1 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:607: Handle NActors::TEvInterconnect::TEvListNodes 2025-09-25T16:18:26.094307Z node 2 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:607: Handle NActors::TEvInterconnect::TEvListNodes 2025-09-25T16:18:26.094333Z node 3 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:607: Handle NActors::TEvInterconnect::TEvListNodes 2025-09-25T16:18:26.094438Z node 8 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:607: Handle NActors::TEvInterconnect::TEvListNodes 2025-09-25T16:18:26.094463Z node 1 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:607: Handle NActors::TEvInterconnect::TEvListNodes 2025-09-25T16:18:26.094797Z node 2 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:607: Handle NActors::TEvInterconnect::TEvListNodes 2025-09-25T16:18:26.094822Z node 3 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:607: Handle NActors::TEvInterconnect::TEvListNodes 2025-09-25T16:18:26.094850Z node 4 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:607: Handle NActors::TEvInterconnect::TEvListNodes 2025-09-25T16:18:26.094871Z node 5 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:607: Handle NActors::TEvInterconnect::TEvListNodes 2025-09-25T16:18:26.094904Z node 6 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:607: Handle NActors::TEvInterconnect::TEvListNodes 2025-09-25T16:18:26.094923Z node 7 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:607: Handle NActors::TEvInterconnect::TEvListNodes 2025-09-25T16:18:26.094956Z node 8 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:636: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 1 Deadline: 18446744073709.551615s } 2025-09-25T16:18:26.094980Z node 3 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:636: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 1 Deadline: 18446744073709.551615s } 2025-09-25T16:18:26.095013Z node 4 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:636: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 1 Deadline: 18446744073709.551615s } 2025-09-25T16:18:26.095040Z node 5 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:636: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 1 Deadline: 18446744073709.551615s } 2025-09-25T16:18:26.095075Z node 6 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:636: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 1 Deadline: 18446744073709.551615s } 2025-09-25T16:18:26.095100Z node 7 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:636: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 1 Deadline: 18446744073709.551615s } 2025-09-25T16:18:26.095186Z node 1 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:607: Handle NActors::TEvInterconnect::TEvListNodes 2025-09-25T16:18:26.095242Z node 2 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:636: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 1 Deadline: 18446744073709.551615s } 2025-09-25T16:18:26.095453Z node 1 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:636: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 5 Deadline: 18446744073709.551615s } 2025-09-25T16:18:26.095702Z node 8 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:636: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 7 Deadline: 18446744073709.551615s } 2025-09-25T16:18:26.098447Z node 3 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:636: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 2 Deadline: 18446744073709.551615s } 2025-09-25T16:18:26.098497Z node 5 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:636: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 4 Deadline: 18446744073709.551615s } 2025-09-25T16:18:26.104063Z node 7 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:636: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 8 Deadline: 18446744073709.551615s } 2025-09-25T16:18:26.104981Z node 2 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:636: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 3 Deadline: 18446744073709.551615s } 2025-09-25T16:18:26.105018Z node 4 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:636: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 5 Deadline: 18446744073709.551615s } 2025-09-25T16:18:26.105080Z node 1 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:636: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 8 Deadline: 18446744073709.551615s } 2025-09-25T16:18:26.105888Z node 1 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:636: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 3 Deadline: 18446744073709.551615s } 2025-09-25T16:18:26.106105Z node 1 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:636: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 6 Deadline: 18446744073709.551615s } 2025-09-25T16:18:26.106169Z node 1 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:636: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 4 Deadline: 18446744073709.551615s } 2025-09-25T16:18:26.106340Z node 1 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:636: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 7 Deadline: 18446744073709.551615s } 2025-09-25T16:18:26.106427Z node 1 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:636: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 2 Deadline: 18446744073709.551615s } 2025-09-25T16:18:26.107411Z node 4 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:636: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 3 Deadline: 18446744073709.551615s } 2025-09-25T16:18:26.107590Z node 5 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:636: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 2 Deadline: 18446744073709.551615s } 2025-09-25T16:18:26.107929Z node 2 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:636: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 5 Deadline: 18446744073709.551615s } 2025-09-25T16:18:26.108093Z node 3 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:636: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 4 Deadline: 18446744073709.551615s } 2025-09-25T16:18:26.126894Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7729: Cannot subscribe to console configs 2025-09-25T16:18:26.126921Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded ... waiting for nameservers are connected 2025-09-25T16:18:26.132378Z node 1 :NODE_BROKER DEBUG: node_broker_impl.h:239: StateInit event type: 10060000 event: NKikimr::TEvTablet::TEvBoot 2025-09-25T16:18:26.132889Z node 1 :NODE_BROKER DEBUG: node_broker_impl.h:239: StateInit event type: 10060001 event: NKikimr::TEvTablet::TEvRestored 2025-09-25T16:18:26.132980Z node 1 :NODE_BROKER DEBUG: node_broker__init_scheme.cpp:20: TTxInitScheme Execute 2025-09-25T16:18:26.133239Z node 1 :NODE_BROKER DEBUG: node_broker_impl.h:239: StateInit event type: 1006000c event: NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-09-25T16:18:26.134208Z node 1 :NODE_BROKER DEBUG: node_broker__init_scheme.cpp:29: TTxInitScheme Complete 2025-09-25T16:18:26.134241Z node 1 :NODE_BROKER DEBUG: node_broker__load_state.cpp:19: TTxLoadState Execute 2025-09-25T16:18:26.134307Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:968: [DB] Using default config. 2025-09-25T16:18:26.134323Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:1005: [DB] Starting the first epoch: #1.1 1970-01-01T00:00:00.025000Z - 1970-01-01T01:00:00.025000Z - 1970-01-01T02:00:00.025000Z 2025-09-25T16:18:26.134329Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:1031: [DB] Loaded the first approximate epoch start: #1.1 2025-09-25T16:18:26.134345Z node 1 :NODE_BROKER DEBUG: node_broker__load_state.cpp:27: TTxLoadState Complete 2025-09-25T16:18:26.134364Z node 1 :NODE_BROKER DEBUG: node_broker__migrate_state.cpp:84: TTxMigrateState Execute 2025-09-25T16:18:26.134368Z node 1 :NODE_BROKER DEBUG: node_broker__migrate_state.cpp:52: TTxMigrateState ProcessMigrationBatch UpdateNodes left 0, NewVersionUpdateNodes left 0 2025-09-25T16:18:26.134372Z node 1 :NODE_BROKER DEBUG: node_broker__migrate_state.cpp:21: TTxMigrateState FinalizeMigration 2025-09-25T16:18:26.134376Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:1318: [DB] Update epoch in database: #1.1 1970-01-01T00:00:00.025000Z - 1970-01-01T01:00:00.025000Z - 1970-01-01T02:00:00.025000Z 2025-09-25T16:18:26.134388Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:1337: [DB] Update approx epoch start in database: #1.1 2025-09-25T16:18:26.134392Z node 1 :NODE_BROKER NOTICE: node_broker.cpp:1350: [DB] Update main nodes table to: Nodes 2025-09-25T16:18:26.167615Z node 1 :NODE_BROKER DEBUG: node_broker__migrate_state.cpp:95: TTxMigrateState Complete 2025-09-25T16:18:26.167661Z node 1 :NODE_BROKER TRACE: node_broker.cpp:456: Scheduled epoch update at 1970-01-01T01:00:00.025000Z 2025-09-25T16:18:26.167673Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:562: Preparing nodes list cache for epoch #1.1 1970-01-01T00:00:00.025000Z - 1970-01-01T01:00:00.025000Z - 1970-01-01T02:00:00.025000Z, approximate epoch start #1.1 nodes=0 expired=0 2025-09-25T16:18:26.167695Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:603: Preparing update nodes log for epoch ##1.1 1970-01-01T00:00:00.025000Z - 1970-01-01T01:00:00.025000Z - 1970-01-01T02:00:00.025000Z nodes=0 expired=0 removed=0 2025-09-25T16:18:26.167867Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:245: StateWork, received event# 269877761, Sender [1:585:2204], Rec ... DE_BROKER TRACE: node_broker_impl.h:245: StateWork, received event# 269877761, Sender [1:944:2426], Recipient [1:887:2383]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-09-25T16:18:28.179017Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:245: StateWork, received event# 272039936, Sender [1:623:2216], Recipient [1:887:2383]: NKikimr::NNodeBroker::TEvNodeBroker::TEvListNodes { } 2025-09-25T16:18:28.179022Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:249: StateWork, processing event TEvNodeBroker::TEvListNodes 2025-09-25T16:18:28.179029Z node 1 :NODE_BROKER TRACE: node_broker.cpp:423: Send TEvNodesInfo for epoch #4.14 1970-01-01T03:00:00.025000Z - 1970-01-01T04:00:00.025000Z - 1970-01-01T05:00:00.025000Z 2025-09-25T16:18:28.179089Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:245: StateWork, received event# 269877761, Sender [1:946:2428], Recipient [1:887:2383]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-09-25T16:18:28.179107Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:245: StateWork, received event# 272039936, Sender [1:623:2216], Recipient [1:887:2383]: NKikimr::NNodeBroker::TEvNodeBroker::TEvListNodes { CachedVersion: 13 } 2025-09-25T16:18:28.179111Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:249: StateWork, processing event TEvNodeBroker::TEvListNodes 2025-09-25T16:18:28.179118Z node 1 :NODE_BROKER TRACE: node_broker.cpp:423: Send TEvNodesInfo for epoch #4.14 1970-01-01T03:00:00.025000Z - 1970-01-01T04:00:00.025000Z - 1970-01-01T05:00:00.025000Z 2025-09-25T16:18:28.179178Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:245: StateWork, received event# 269877761, Sender [1:948:2430], Recipient [1:887:2383]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-09-25T16:18:28.179194Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:245: StateWork, received event# 272039936, Sender [1:623:2216], Recipient [1:887:2383]: NKikimr::NNodeBroker::TEvNodeBroker::TEvListNodes { } 2025-09-25T16:18:28.179200Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:249: StateWork, processing event TEvNodeBroker::TEvListNodes 2025-09-25T16:18:28.179207Z node 1 :NODE_BROKER TRACE: node_broker.cpp:423: Send TEvNodesInfo for epoch #4.14 1970-01-01T03:00:00.025000Z - 1970-01-01T04:00:00.025000Z - 1970-01-01T05:00:00.025000Z 2025-09-25T16:18:28.179269Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:245: StateWork, received event# 269877761, Sender [1:950:2432], Recipient [1:887:2383]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-09-25T16:18:28.179288Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:245: StateWork, received event# 272039936, Sender [1:623:2216], Recipient [1:887:2383]: NKikimr::NNodeBroker::TEvNodeBroker::TEvListNodes { CachedVersion: 12 } 2025-09-25T16:18:28.179292Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:249: StateWork, processing event TEvNodeBroker::TEvListNodes 2025-09-25T16:18:28.179298Z node 1 :NODE_BROKER TRACE: node_broker.cpp:423: Send TEvNodesInfo for epoch #4.14 1970-01-01T03:00:00.025000Z - 1970-01-01T04:00:00.025000Z - 1970-01-01T05:00:00.025000Z 2025-09-25T16:18:28.179366Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:245: StateWork, received event# 269877761, Sender [1:952:2434], Recipient [1:887:2383]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-09-25T16:18:28.179384Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:245: StateWork, received event# 272039936, Sender [1:623:2216], Recipient [1:887:2383]: NKikimr::NNodeBroker::TEvNodeBroker::TEvListNodes { } 2025-09-25T16:18:28.179389Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:249: StateWork, processing event TEvNodeBroker::TEvListNodes 2025-09-25T16:18:28.179395Z node 1 :NODE_BROKER TRACE: node_broker.cpp:423: Send TEvNodesInfo for epoch #4.14 1970-01-01T03:00:00.025000Z - 1970-01-01T04:00:00.025000Z - 1970-01-01T05:00:00.025000Z 2025-09-25T16:18:28.179460Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:245: StateWork, received event# 269877761, Sender [1:954:2436], Recipient [1:887:2383]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-09-25T16:18:28.179477Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:245: StateWork, received event# 272039936, Sender [1:623:2216], Recipient [1:887:2383]: NKikimr::NNodeBroker::TEvNodeBroker::TEvListNodes { CachedVersion: 11 } 2025-09-25T16:18:28.179482Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:249: StateWork, processing event TEvNodeBroker::TEvListNodes 2025-09-25T16:18:28.179488Z node 1 :NODE_BROKER TRACE: node_broker.cpp:423: Send TEvNodesInfo for epoch #4.14 1970-01-01T03:00:00.025000Z - 1970-01-01T04:00:00.025000Z - 1970-01-01T05:00:00.025000Z 2025-09-25T16:18:28.179553Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:245: StateWork, received event# 269877761, Sender [1:956:2438], Recipient [1:887:2383]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-09-25T16:18:28.179578Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:245: StateWork, received event# 272039950, Sender [1:623:2216], Recipient [1:887:2383]: NKikimr::NNodeBroker::TEvNodeBroker::TEvSubscribeNodesRequest { CachedVersion: 14 SeqNo: 6 } 2025-09-25T16:18:28.179584Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:257: StateWork, processing event TEvNodeBroker::TEvSubscribeNodesRequest 2025-09-25T16:18:28.179593Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:747: New subscriber [1:623:2216], seqNo: 6, version: 14, server pipe id: [1:956:2438] 2025-09-25T16:18:28.179601Z node 1 :NODE_BROKER TRACE: node_broker.cpp:730: Send TEvUpdateNodes v14 -> v14 to [1:623:2216] 2025-09-25T16:18:28.179666Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:245: StateWork, received event# 269877764, Sender [1:956:2438], Recipient [1:887:2383]: NKikimr::TEvTabletPipe::TEvServerDisconnected 2025-09-25T16:18:28.179673Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:768: Unsubscribed [1:623:2216], seqNo: 6, server pipe id: [1:956:2438] 2025-09-25T16:18:28.179698Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:245: StateWork, received event# 269877761, Sender [1:958:2440], Recipient [1:887:2383]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-09-25T16:18:28.179718Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:245: StateWork, received event# 272039950, Sender [1:623:2216], Recipient [1:887:2383]: NKikimr::NNodeBroker::TEvNodeBroker::TEvSubscribeNodesRequest { CachedVersion: 13 SeqNo: 7 } 2025-09-25T16:18:28.179723Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:257: StateWork, processing event TEvNodeBroker::TEvSubscribeNodesRequest 2025-09-25T16:18:28.179728Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:747: New subscriber [1:623:2216], seqNo: 7, version: 13, server pipe id: [1:958:2440] 2025-09-25T16:18:28.179733Z node 1 :NODE_BROKER TRACE: node_broker.cpp:730: Send TEvUpdateNodes v13 -> v14 to [1:623:2216] 2025-09-25T16:18:28.179792Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:245: StateWork, received event# 269877764, Sender [1:958:2440], Recipient [1:887:2383]: NKikimr::TEvTabletPipe::TEvServerDisconnected 2025-09-25T16:18:28.179797Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:768: Unsubscribed [1:623:2216], seqNo: 7, server pipe id: [1:958:2440] 2025-09-25T16:18:28.179823Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:245: StateWork, received event# 269877761, Sender [1:960:2442], Recipient [1:887:2383]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-09-25T16:18:28.179842Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:245: StateWork, received event# 272039950, Sender [1:623:2216], Recipient [1:887:2383]: NKikimr::NNodeBroker::TEvNodeBroker::TEvSubscribeNodesRequest { CachedVersion: 12 SeqNo: 8 } 2025-09-25T16:18:28.179847Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:257: StateWork, processing event TEvNodeBroker::TEvSubscribeNodesRequest 2025-09-25T16:18:28.179852Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:747: New subscriber [1:623:2216], seqNo: 8, version: 12, server pipe id: [1:960:2442] 2025-09-25T16:18:28.179857Z node 1 :NODE_BROKER TRACE: node_broker.cpp:730: Send TEvUpdateNodes v12 -> v14 to [1:623:2216] 2025-09-25T16:18:28.179913Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:245: StateWork, received event# 269877764, Sender [1:960:2442], Recipient [1:887:2383]: NKikimr::TEvTabletPipe::TEvServerDisconnected 2025-09-25T16:18:28.179918Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:768: Unsubscribed [1:623:2216], seqNo: 8, server pipe id: [1:960:2442] 2025-09-25T16:18:28.179942Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:245: StateWork, received event# 269877761, Sender [1:962:2444], Recipient [1:887:2383]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-09-25T16:18:28.179956Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:245: StateWork, received event# 272039950, Sender [1:623:2216], Recipient [1:887:2383]: NKikimr::NNodeBroker::TEvNodeBroker::TEvSubscribeNodesRequest { CachedVersion: 11 SeqNo: 9 } 2025-09-25T16:18:28.179960Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:257: StateWork, processing event TEvNodeBroker::TEvSubscribeNodesRequest 2025-09-25T16:18:28.179964Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:747: New subscriber [1:623:2216], seqNo: 9, version: 11, server pipe id: [1:962:2444] 2025-09-25T16:18:28.179968Z node 1 :NODE_BROKER TRACE: node_broker.cpp:730: Send TEvUpdateNodes v11 -> v14 to [1:623:2216] 2025-09-25T16:18:28.180028Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:245: StateWork, received event# 269877764, Sender [1:962:2444], Recipient [1:887:2383]: NKikimr::TEvTabletPipe::TEvServerDisconnected 2025-09-25T16:18:28.180035Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:768: Unsubscribed [1:623:2216], seqNo: 9, server pipe id: [1:962:2444] 2025-09-25T16:18:28.180067Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:245: StateWork, received event# 269877761, Sender [1:964:2446], Recipient [1:887:2383]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-09-25T16:18:28.180085Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:245: StateWork, received event# 272039937, Sender [1:623:2216], Recipient [1:887:2383]: NKikimr::NNodeBroker::TEvNodeBroker::TEvResolveNode { NodeId: 1024 } 2025-09-25T16:18:28.180091Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:250: StateWork, processing event TEvNodeBroker::TEvResolveNode 2025-09-25T16:18:28.180131Z node 1 :NODE_BROKER TRACE: node_broker.cpp:1485: Send TEvResolvedNode: NKikimr::NNodeBroker::TEvNodeBroker::TEvResolvedNode { Status { Code: OK } Node { NodeId: 1024 Host: "host1" Port: 1001 ResolveHost: "host1.yandex.net" Address: "1.2.3.4" Location { DataCenter: "1" Module: "2" Rack: "3" Unit: "4" } Expire: 18000025000 Name: "slot-0" } } 2025-09-25T16:18:28.180205Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:245: StateWork, received event# 269877761, Sender [1:966:2448], Recipient [1:887:2383]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-09-25T16:18:28.180232Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:245: StateWork, received event# 272039937, Sender [1:623:2216], Recipient [1:887:2383]: NKikimr::NNodeBroker::TEvNodeBroker::TEvResolveNode { NodeId: 1025 } 2025-09-25T16:18:28.180236Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:250: StateWork, processing event TEvNodeBroker::TEvResolveNode 2025-09-25T16:18:28.180245Z node 1 :NODE_BROKER TRACE: node_broker.cpp:1485: Send TEvResolvedNode: NKikimr::NNodeBroker::TEvNodeBroker::TEvResolvedNode { Status { Code: WRONG_REQUEST Reason: "Unknown node" } } 2025-09-25T16:18:28.180302Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:245: StateWork, received event# 269877761, Sender [1:968:2450], Recipient [1:887:2383]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-09-25T16:18:28.180321Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:245: StateWork, received event# 272039937, Sender [1:623:2216], Recipient [1:887:2383]: NKikimr::NNodeBroker::TEvNodeBroker::TEvResolveNode { NodeId: 1026 } 2025-09-25T16:18:28.180325Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:250: StateWork, processing event TEvNodeBroker::TEvResolveNode 2025-09-25T16:18:28.180332Z node 1 :NODE_BROKER TRACE: node_broker.cpp:1485: Send TEvResolvedNode: NKikimr::NNodeBroker::TEvNodeBroker::TEvResolvedNode { Status { Code: WRONG_REQUEST Reason: "Unknown node" } } ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/mind/ut/unittest >> TLocalTests::TestAddTenantWhileResolving [GOOD] Test command err: 2025-09-25T16:18:28.654921Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:433: actor# [1:121:2155] Bootstrap 2025-09-25T16:18:28.655126Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:452: actor# [1:121:2155] Become StateWork (SchemeCache [1:128:2161]) 2025-09-25T16:18:28.660652Z node 1 :BS_CONTROLLER DEBUG: {BSC05@impl.h:2083} StateInit event Type# 268828672 Event# NKikimr::TEvTablet::TEvBoot 2025-09-25T16:18:28.665124Z node 1 :BS_CONTROLLER DEBUG: {BSC05@impl.h:2083} StateInit event Type# 268828673 Event# NKikimr::TEvTablet::TEvRestored 2025-09-25T16:18:28.665190Z node 1 :BS_CONTROLLER DEBUG: {BSC22@console_interaction.cpp:14} Console interaction started 2025-09-25T16:18:28.665582Z node 1 :BS_CONTROLLER DEBUG: {BSC05@impl.h:2083} StateInit event Type# 268828684 Event# NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-09-25T16:18:28.665716Z node 1 :BS_CONTROLLER DEBUG: {BSC05@impl.h:2083} StateInit event Type# 268639244 Event# NKikimr::TEvNodeWardenStorageConfig 2025-09-25T16:18:28.665790Z node 1 :BS_CONTROLLER DEBUG: {BSC05@impl.h:2083} StateInit event Type# 131082 Event# NActors::TEvInterconnect::TEvNodesInfo 2025-09-25T16:18:28.665795Z node 1 :BS_CONTROLLER DEBUG: {BSC01@bsc.cpp:820} Handle TEvInterconnect::TEvNodesInfo 2025-09-25T16:18:28.665842Z node 1 :BS_CONTROLLER DEBUG: {BSCTXIS01@init_scheme.cpp:17} TTxInitScheme Execute 2025-09-25T16:18:28.669063Z node 1 :BS_CONTROLLER DEBUG: {BSCTXIS03@init_scheme.cpp:44} TTxInitScheme Complete 2025-09-25T16:18:28.669111Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM01@migrate.cpp:190} Execute tx 2025-09-25T16:18:28.669138Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM02@migrate.cpp:251} Complete tx IncompatibleData# false 2025-09-25T16:18:28.669154Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxTrimUnusedSlots 2025-09-25T16:18:28.669169Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxTrimUnusedSlots 2025-09-25T16:18:28.669287Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateSchemaVersion 2025-09-25T16:18:28.693741Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:212: actor# [1:121:2155] HANDLE TEvClientConnected success connect from tablet# 72057594046447617 2025-09-25T16:18:28.694625Z node 1 :TX_PROXY DEBUG: client.cpp:89: Handle TEvAllocateResult ACCEPTED RangeBegin# 281474976710656 RangeEnd# 281474976715656 txAllocator# 72057594046447617 2025-09-25T16:18:28.716000Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateSchemaVersion 2025-09-25T16:18:28.716058Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxGenerateInstanceId 2025-09-25T16:18:28.727075Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxGenerateInstanceId 2025-09-25T16:18:28.727134Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateStaticPDiskInfo 2025-09-25T16:18:28.727153Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateStaticPDiskInfo 2025-09-25T16:18:28.727168Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxFillInNonNullConfigForPDisk 2025-09-25T16:18:28.727197Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxFillInNonNullConfigForPDisk 2025-09-25T16:18:28.727219Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxDropDriveStatus 2025-09-25T16:18:28.727226Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxDropDriveStatus 2025-09-25T16:18:28.727234Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateCompatibilityInfo 2025-09-25T16:18:28.739905Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateCompatibilityInfo 2025-09-25T16:18:28.739959Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateEnableConfigV2 2025-09-25T16:18:28.750844Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateEnableConfigV2 2025-09-25T16:18:28.750902Z node 1 :BS_CONTROLLER DEBUG: {BSCTXLE01@load_everything.cpp:21} TTxLoadEverything Execute 2025-09-25T16:18:28.751096Z node 1 :BS_CONTROLLER DEBUG: {BSCTXLE03@load_everything.cpp:705} TTxLoadEverything Complete 2025-09-25T16:18:28.751104Z node 1 :BS_CONTROLLER DEBUG: {BSC09@impl.h:2214} LoadFinished 2025-09-25T16:18:28.752665Z node 1 :BS_CONTROLLER DEBUG: {BSC18@console_interaction.cpp:31} Console connection service started 2025-09-25T16:18:28.752680Z node 1 :BS_CONTROLLER DEBUG: {BSCTXLE04@load_everything.cpp:710} TTxLoadEverything InitQueue processed 2025-09-25T16:18:28.753062Z node 1 :BS_CONTROLLER DEBUG: {BSCTXRN01@register_node.cpp:216} Handle TEvControllerRegisterNode Request# {NodeID: 1 VDiskStatus { VDiskId { GroupID: 0 GroupGeneration: 1 Ring: 0 Domain: 0 VDisk: 0 } NodeId: 1 PDiskId: 1 VSlotId: 0 PDiskGuid: 123 Status: INIT_PENDING OnlyPhantomsRemain: false } DeclarativePDiskManagement: true } 2025-09-25T16:18:28.753087Z node 1 :BS_CONTROLLER DEBUG: {BSCBR00@bridge.cpp:242} ApplySyncerState NodeId# 1 Update# {} Comprehensive# true 2025-09-25T16:18:28.753398Z node 1 :BS_CONTROLLER DEBUG: {BSCTXCC01@config_cmd.cpp:410} Execute TEvControllerConfigRequest Request# {Command { DefineHostConfig { HostConfigId: 1 Drive { Path: "/home/runner/.ya/build/build_root/endf/0047b8/r3tmp/tmpHw52q6/pdisk_1.dat" } } } Command { DefineBox { BoxId: 1 Host { Key { Fqdn: "::1" IcPort: 12001 } HostConfigId: 1 } } } } 2025-09-25T16:18:28.753486Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 1:1 Path# /home/runner/.ya/build/build_root/endf/0047b8/r3tmp/tmpHw52q6/pdisk_1.dat 2025-09-25T16:18:28.753898Z node 1 :BS_CONTROLLER DEBUG: {BSCTXUDM01@disk_metrics.cpp:68} Updating disk status Record# {VDisksMetrics { VDiskId { GroupID: 0 GroupGeneration: 1 Ring: 0 Domain: 0 VDisk: 0 } VSlotId { NodeId: 1 PDiskId: 1 VSlotId: 0 } State: Initial Replicated: false DiskSpace: Green } } 2025-09-25T16:18:28.753931Z node 1 :BS_CONTROLLER DEBUG: {BSC10@scrub.cpp:187} Handle(TEvControllerScrubQueryStartQuantum) Msg# {VSlotId { NodeId: 1 PDiskId: 1 VSlotId: 0 } } 2025-09-25T16:18:28.753948Z node 1 :BS_CONTROLLER DEBUG: {BSC13@scrub.cpp:597} sending TEvControllerScrubStartQuantum Msg# NKikimrBlobStorage.TEvControllerScrubStartQuantum VSlotId { NodeId: 1 PDiskId: 1 VSlotId: 0 } 2025-09-25T16:18:28.754000Z node 1 :BS_CONTROLLER DEBUG: {BSCTXUDM01@disk_metrics.cpp:68} Updating disk status Record# {VDiskStatus { VDiskId { GroupID: 0 GroupGeneration: 1 Ring: 0 Domain: 0 VDisk: 0 } NodeId: 1 PDiskId: 1 VSlotId: 0 PDiskGuid: 123 Status: REPLICATING OnlyPhantomsRemain: false } } 2025-09-25T16:18:28.754123Z node 1 :BS_CONTROLLER DEBUG: {BSCTXUDM01@disk_metrics.cpp:68} Updating disk status Record# {VDiskStatus { VDiskId { GroupID: 0 GroupGeneration: 1 Ring: 0 Domain: 0 VDisk: 0 } NodeId: 1 PDiskId: 1 VSlotId: 0 PDiskGuid: 123 Status: READY OnlyPhantomsRemain: false } } 2025-09-25T16:18:28.754518Z node 1 :BS_CONTROLLER DEBUG: {BSC11@scrub.cpp:214} Handle(TEvControllerScrubQuantumFinished) Msg# {VSlotId { NodeId: 1 PDiskId: 1 VSlotId: 0 } Success: true } 2025-09-25T16:18:28.754562Z node 1 :BS_CONTROLLER DEBUG: {BSC10@scrub.cpp:187} Handle(TEvControllerScrubQueryStartQuantum) Msg# {VSlotId { NodeId: 1 PDiskId: 1 VSlotId: 0 } } 2025-09-25T16:18:28.765708Z node 1 :BS_CONTROLLER DEBUG: {BSCTXRN05@register_node.cpp:34} Add devicesData from NodeWarden NodeId# 1 Devices# [] 2025-09-25T16:18:28.765912Z node 1 :TENANT_POOL DEBUG: tenant_pool.cpp:826: TTenantPool::Bootstrap 2025-09-25T16:18:28.765973Z node 1 :LOCAL DEBUG: local.cpp:1540: TLocal::Bootstrap 2025-09-25T16:18:28.765981Z node 1 :TENANT_POOL DEBUG: tenant_pool.cpp:412: TDomainTenantPool(dc-1) Bootstrap 2025-09-25T16:18:28.768115Z node 1 :TENANT_POOL DEBUG: tenant_pool.cpp:286: TDomainTenantPool(dc-1) send request to add tenant /dc-1 with resources CPU: 1 Memory: 1 Network: 1 2025-09-25T16:18:28.768210Z node 1 :LOCAL DEBUG: local.cpp:1490: TDomainLocal(dc-1): Bootstrap 2025-09-25T16:18:28.768342Z node 1 :LOCAL DEBUG: local.cpp:1198: TDomainLocal(dc-1): Binding to hive 72057594046578946 at domain dc-1 (allocated resources: CPU: 1 Memory: 1 Network: 1) 2025-09-25T16:18:28.768355Z node 1 :LOCAL DEBUG: local.cpp:1005: TLocalNodeRegistrar::Bootstrap 2025-09-25T16:18:28.768361Z node 1 :LOCAL DEBUG: local.cpp:183: TLocalNodeRegistrar::TryToRegister 2025-09-25T16:18:28.768379Z node 1 :LOCAL DEBUG: local.cpp:216: TLocalNodeRegistrar::TryToRegister pipe to hive, pipe:[1:342:2312] 2025-09-25T16:18:28.768759Z node 1 :TENANT_POOL NOTICE: tenant_pool.cpp:526: TDomainTenantPool(dc-1) started tenant /dc-1 2025-09-25T16:18:28.768765Z node 1 :TENANT_POOL DEBUG: tenant_pool.cpp:274: TDomainTenantPool(dc-1) send status update to [1:336:2308] 2025-09-25T16:18:28.768990Z node 1 :LOCAL DEBUG: local.cpp:263: TEvTabletPipe::TEvClientConnected {TabletId=72057594046578946 Status=OK ClientId=[1:342:2312]} 2025-09-25T16:18:28.769007Z node 1 :LOCAL DEBUG: local.cpp:327: TLocalNodeRegistrar::Handle TEvLocal::TEvPing 2025-09-25T16:18:28.769016Z node 1 :LOCAL DEBUG: local.cpp:383: TLocalNodeRegistrar TEvPing - CONNECTED 2025-09-25T16:18:28.769020Z node 1 :LOCAL DEBUG: local.cpp:300: TLocalNodeRegistrar SendStatusOk 2025-09-25T16:18:28.786127Z node 1 :BS_CONTROLLER DEBUG: {BSC19@console_interaction.cpp:74} Console proposed config response Response# {Status: ReverseCommit ConsoleConfigVersion: 0 YAML: "" } 2025-09-25T16:18:28.797764Z node 1 :LOCAL DEBUG: local.cpp:1256: TDomainLocal(dc-1): TDomainLocal::TEvClientConnected for dc-1 shard 72057594046578944 2025-09-25T16:18:28.797792Z node 1 :LOCAL DEBUG: local.cpp:1115: TDomainLocal(dc-1): Send resolve request for /dc-1/users/tenant-1 to schemeshard 72057594046578944 2025-09-25T16:18:28.805256Z node 1 :LOCAL DEBUG: local.cpp:1283: TDomainLocal(dc-1): HandleResolve from schemeshard 72057594046578944: Status: StatusSuccess Path: "/dc-1/users/tenant-1" PathDescription { Self { Name: "/dc-1/users/tenant-1" PathId: 100 SchemeshardId: 72057594046578944 PathType: EPathTypeSubDomain } DomainDescription { SchemeShardId_Depricated: 72057594046578944 PathId_Depricated: 100 DomainKey { SchemeShard: 72057594046578944 PathId: 100 } } } 2025-09-25T16:18:28.805318Z node 1 :LOCAL DEBUG: local.cpp:1221: TDomainLocal(dc-1): Binding tenant /dc-1/users/tenant-1 to hive 72057594046578946 (allocated resources: CPU: 5 Memory: 5 Network: 1) 2025-09-25T16:18:28.805484Z node 1 :LOCAL DEBUG: local.cpp:1005: TLocalNodeRegistrar::Bootstrap 2025-09-25T16:18:28.805491Z node 1 :LOCAL DEBUG: local.cpp:183: TLocalNodeRegistrar::TryToRegister 2025-09-25T16:18:28.805511Z node 1 :LOCAL DEBUG: local.cpp:216: TLocalNodeRegistrar::TryToRegister pipe to hive, pipe:[1:399:2349] 2025-09-25T16:18:28.805939Z node 1 :LOCAL DEBUG: local.cpp:263: TEvTabletPipe::TEvClientConnected {TabletId=72 ... roller::TBlobStorageController::TTxMigrate::TTxTrimUnusedSlots 2025-09-25T16:18:29.134449Z node 2 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateSchemaVersion 2025-09-25T16:18:29.146844Z node 2 :TX_PROXY DEBUG: proxy_impl.cpp:212: actor# [2:122:2155] HANDLE TEvClientConnected success connect from tablet# 72057594046447617 2025-09-25T16:18:29.147828Z node 2 :TX_PROXY DEBUG: client.cpp:89: Handle TEvAllocateResult ACCEPTED RangeBegin# 281474976710656 RangeEnd# 281474976715656 txAllocator# 72057594046447617 2025-09-25T16:18:29.171858Z node 2 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateSchemaVersion 2025-09-25T16:18:29.171933Z node 2 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxGenerateInstanceId 2025-09-25T16:18:29.189267Z node 2 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxGenerateInstanceId 2025-09-25T16:18:29.189337Z node 2 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateStaticPDiskInfo 2025-09-25T16:18:29.189358Z node 2 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateStaticPDiskInfo 2025-09-25T16:18:29.189374Z node 2 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxFillInNonNullConfigForPDisk 2025-09-25T16:18:29.189421Z node 2 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxFillInNonNullConfigForPDisk 2025-09-25T16:18:29.189432Z node 2 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxDropDriveStatus 2025-09-25T16:18:29.189441Z node 2 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxDropDriveStatus 2025-09-25T16:18:29.189451Z node 2 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateCompatibilityInfo 2025-09-25T16:18:29.201131Z node 2 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateCompatibilityInfo 2025-09-25T16:18:29.201187Z node 2 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateEnableConfigV2 2025-09-25T16:18:29.212080Z node 2 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateEnableConfigV2 2025-09-25T16:18:29.212135Z node 2 :BS_CONTROLLER DEBUG: {BSCTXLE01@load_everything.cpp:21} TTxLoadEverything Execute 2025-09-25T16:18:29.212362Z node 2 :BS_CONTROLLER DEBUG: {BSCTXLE03@load_everything.cpp:705} TTxLoadEverything Complete 2025-09-25T16:18:29.212372Z node 2 :BS_CONTROLLER DEBUG: {BSC09@impl.h:2214} LoadFinished 2025-09-25T16:18:29.212460Z node 2 :BS_CONTROLLER DEBUG: {BSC18@console_interaction.cpp:31} Console connection service started 2025-09-25T16:18:29.212470Z node 2 :BS_CONTROLLER DEBUG: {BSCTXLE04@load_everything.cpp:710} TTxLoadEverything InitQueue processed 2025-09-25T16:18:29.212809Z node 2 :BS_CONTROLLER DEBUG: {BSCTXRN01@register_node.cpp:216} Handle TEvControllerRegisterNode Request# {NodeID: 2 VDiskStatus { VDiskId { GroupID: 0 GroupGeneration: 1 Ring: 0 Domain: 0 VDisk: 0 } NodeId: 2 PDiskId: 1 VSlotId: 0 PDiskGuid: 123 Status: INIT_PENDING OnlyPhantomsRemain: false } DeclarativePDiskManagement: true } 2025-09-25T16:18:29.212849Z node 2 :BS_CONTROLLER DEBUG: {BSCBR00@bridge.cpp:242} ApplySyncerState NodeId# 2 Update# {} Comprehensive# true 2025-09-25T16:18:29.212937Z node 2 :BS_CONTROLLER DEBUG: {BSCTXCC01@config_cmd.cpp:410} Execute TEvControllerConfigRequest Request# {Command { DefineHostConfig { HostConfigId: 1 Drive { Path: "/home/runner/.ya/build/build_root/endf/0047b8/r3tmp/tmpiRanyn/pdisk_1.dat" } } } Command { DefineBox { BoxId: 1 Host { Key { Fqdn: "::1" IcPort: 12001 } HostConfigId: 1 } } } } 2025-09-25T16:18:29.212994Z node 2 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 2:1 Path# /home/runner/.ya/build/build_root/endf/0047b8/r3tmp/tmpiRanyn/pdisk_1.dat 2025-09-25T16:18:29.213150Z node 2 :BS_CONTROLLER DEBUG: {BSCTXUDM01@disk_metrics.cpp:68} Updating disk status Record# {VDisksMetrics { VDiskId { GroupID: 0 GroupGeneration: 1 Ring: 0 Domain: 0 VDisk: 0 } VSlotId { NodeId: 2 PDiskId: 1 VSlotId: 0 } State: Initial Replicated: false DiskSpace: Green } } 2025-09-25T16:18:29.213175Z node 2 :BS_CONTROLLER DEBUG: {BSC10@scrub.cpp:187} Handle(TEvControllerScrubQueryStartQuantum) Msg# {VSlotId { NodeId: 2 PDiskId: 1 VSlotId: 0 } } 2025-09-25T16:18:29.213191Z node 2 :BS_CONTROLLER DEBUG: {BSC13@scrub.cpp:597} sending TEvControllerScrubStartQuantum Msg# NKikimrBlobStorage.TEvControllerScrubStartQuantum VSlotId { NodeId: 2 PDiskId: 1 VSlotId: 0 } 2025-09-25T16:18:29.213232Z node 2 :BS_CONTROLLER DEBUG: {BSCTXUDM01@disk_metrics.cpp:68} Updating disk status Record# {VDiskStatus { VDiskId { GroupID: 0 GroupGeneration: 1 Ring: 0 Domain: 0 VDisk: 0 } NodeId: 2 PDiskId: 1 VSlotId: 0 PDiskGuid: 123 Status: REPLICATING OnlyPhantomsRemain: false } } 2025-09-25T16:18:29.213251Z node 2 :BS_CONTROLLER DEBUG: {BSCTXUDM01@disk_metrics.cpp:68} Updating disk status Record# {VDiskStatus { VDiskId { GroupID: 0 GroupGeneration: 1 Ring: 0 Domain: 0 VDisk: 0 } NodeId: 2 PDiskId: 1 VSlotId: 0 PDiskGuid: 123 Status: READY OnlyPhantomsRemain: false } } 2025-09-25T16:18:29.213595Z node 2 :BS_CONTROLLER DEBUG: {BSC11@scrub.cpp:214} Handle(TEvControllerScrubQuantumFinished) Msg# {VSlotId { NodeId: 2 PDiskId: 1 VSlotId: 0 } Success: true } 2025-09-25T16:18:29.213650Z node 2 :BS_CONTROLLER DEBUG: {BSC10@scrub.cpp:187} Handle(TEvControllerScrubQueryStartQuantum) Msg# {VSlotId { NodeId: 2 PDiskId: 1 VSlotId: 0 } } 2025-09-25T16:18:29.224797Z node 2 :BS_CONTROLLER DEBUG: {BSCTXRN05@register_node.cpp:34} Add devicesData from NodeWarden NodeId# 2 Devices# [] 2025-09-25T16:18:29.224944Z node 2 :TENANT_POOL DEBUG: tenant_pool.cpp:826: TTenantPool::Bootstrap 2025-09-25T16:18:29.225021Z node 2 :LOCAL DEBUG: local.cpp:1540: TLocal::Bootstrap 2025-09-25T16:18:29.225031Z node 2 :TENANT_POOL DEBUG: tenant_pool.cpp:412: TDomainTenantPool(dc-1) Bootstrap 2025-09-25T16:18:29.225062Z node 2 :TENANT_POOL DEBUG: tenant_pool.cpp:286: TDomainTenantPool(dc-1) send request to add tenant /dc-1 with resources CPU: 1 Memory: 1 Network: 1 2025-09-25T16:18:29.225111Z node 2 :LOCAL DEBUG: local.cpp:1490: TDomainLocal(dc-1): Bootstrap 2025-09-25T16:18:29.225210Z node 2 :LOCAL DEBUG: local.cpp:1198: TDomainLocal(dc-1): Binding to hive 72057594046578946 at domain dc-1 (allocated resources: CPU: 1 Memory: 1 Network: 1) 2025-09-25T16:18:29.225220Z node 2 :LOCAL DEBUG: local.cpp:1005: TLocalNodeRegistrar::Bootstrap 2025-09-25T16:18:29.225225Z node 2 :LOCAL DEBUG: local.cpp:183: TLocalNodeRegistrar::TryToRegister 2025-09-25T16:18:29.225241Z node 2 :LOCAL DEBUG: local.cpp:216: TLocalNodeRegistrar::TryToRegister pipe to hive, pipe:[2:342:2312] 2025-09-25T16:18:29.225745Z node 2 :TENANT_POOL NOTICE: tenant_pool.cpp:526: TDomainTenantPool(dc-1) started tenant /dc-1 2025-09-25T16:18:29.225754Z node 2 :TENANT_POOL DEBUG: tenant_pool.cpp:274: TDomainTenantPool(dc-1) send status update to [2:336:2308] 2025-09-25T16:18:29.225924Z node 2 :LOCAL DEBUG: local.cpp:263: TEvTabletPipe::TEvClientConnected {TabletId=72057594046578946 Status=OK ClientId=[2:342:2312]} 2025-09-25T16:18:29.225934Z node 2 :LOCAL DEBUG: local.cpp:327: TLocalNodeRegistrar::Handle TEvLocal::TEvPing 2025-09-25T16:18:29.225943Z node 2 :LOCAL DEBUG: local.cpp:383: TLocalNodeRegistrar TEvPing - CONNECTED 2025-09-25T16:18:29.225947Z node 2 :LOCAL DEBUG: local.cpp:300: TLocalNodeRegistrar SendStatusOk 2025-09-25T16:18:29.240603Z node 2 :BS_CONTROLLER DEBUG: {BSC19@console_interaction.cpp:74} Console proposed config response Response# {Status: ReverseCommit ConsoleConfigVersion: 0 YAML: "" } 2025-09-25T16:18:29.254900Z node 2 :LOCAL DEBUG: local.cpp:1256: TDomainLocal(dc-1): TDomainLocal::TEvClientConnected for dc-1 shard 72057594046578944 2025-09-25T16:18:29.254924Z node 2 :LOCAL DEBUG: local.cpp:1115: TDomainLocal(dc-1): Send resolve request for /dc-1/users/tenant-1 to schemeshard 72057594046578944 2025-09-25T16:18:29.278904Z node 2 :LOCAL DEBUG: local.cpp:1115: TDomainLocal(dc-1): Send resolve request for /dc-1/users/tenant-2 to schemeshard 72057594046578944 2025-09-25T16:18:29.279111Z node 2 :LOCAL DEBUG: local.cpp:1283: TDomainLocal(dc-1): HandleResolve from schemeshard 72057594046578944: Status: StatusSuccess Path: "/dc-1/users/tenant-1" PathDescription { Self { Name: "/dc-1/users/tenant-1" PathId: 100 SchemeshardId: 72057594046578944 PathType: EPathTypeSubDomain } DomainDescription { SchemeShardId_Depricated: 72057594046578944 PathId_Depricated: 100 DomainKey { SchemeShard: 72057594046578944 PathId: 100 } } } 2025-09-25T16:18:29.279128Z node 2 :LOCAL DEBUG: local.cpp:1221: TDomainLocal(dc-1): Binding tenant /dc-1/users/tenant-1 to hive 72057594046578946 (allocated resources: CPU: 1 Memory: 1 Network: 1) 2025-09-25T16:18:29.279219Z node 2 :LOCAL DEBUG: local.cpp:1283: TDomainLocal(dc-1): HandleResolve from schemeshard 72057594046578944: Status: StatusSuccess Path: "/dc-1/users/tenant-2" PathDescription { Self { Name: "/dc-1/users/tenant-2" PathId: 101 SchemeshardId: 72057594046578944 PathType: EPathTypeSubDomain } DomainDescription { SchemeShardId_Depricated: 72057594046578944 PathId_Depricated: 101 DomainKey { SchemeShard: 72057594046578944 PathId: 101 } } } 2025-09-25T16:18:29.279225Z node 2 :LOCAL DEBUG: local.cpp:1221: TDomainLocal(dc-1): Binding tenant /dc-1/users/tenant-2 to hive 72057594046578946 (allocated resources: CPU: 1 Memory: 1 Network: 1) 2025-09-25T16:18:29.279254Z node 2 :LOCAL DEBUG: local.cpp:1005: TLocalNodeRegistrar::Bootstrap 2025-09-25T16:18:29.279257Z node 2 :LOCAL DEBUG: local.cpp:183: TLocalNodeRegistrar::TryToRegister 2025-09-25T16:18:29.279271Z node 2 :LOCAL DEBUG: local.cpp:216: TLocalNodeRegistrar::TryToRegister pipe to hive, pipe:[2:428:2370] 2025-09-25T16:18:29.279287Z node 2 :LOCAL DEBUG: local.cpp:1005: TLocalNodeRegistrar::Bootstrap 2025-09-25T16:18:29.279291Z node 2 :LOCAL DEBUG: local.cpp:183: TLocalNodeRegistrar::TryToRegister 2025-09-25T16:18:29.279296Z node 2 :LOCAL DEBUG: local.cpp:216: TLocalNodeRegistrar::TryToRegister pipe to hive, pipe:[2:429:2372] 2025-09-25T16:18:29.279464Z node 2 :LOCAL DEBUG: local.cpp:263: TEvTabletPipe::TEvClientConnected {TabletId=72057594046578946 Status=OK ClientId=[2:428:2370]} 2025-09-25T16:18:29.279478Z node 2 :LOCAL DEBUG: local.cpp:263: TEvTabletPipe::TEvClientConnected {TabletId=72057594046578946 Status=OK ClientId=[2:429:2372]} 2025-09-25T16:18:29.279490Z node 2 :LOCAL DEBUG: local.cpp:327: TLocalNodeRegistrar::Handle TEvLocal::TEvPing 2025-09-25T16:18:29.279497Z node 2 :LOCAL DEBUG: local.cpp:383: TLocalNodeRegistrar TEvPing - CONNECTED 2025-09-25T16:18:29.279499Z node 2 :LOCAL DEBUG: local.cpp:300: TLocalNodeRegistrar SendStatusOk 2025-09-25T16:18:29.279524Z node 2 :LOCAL DEBUG: local.cpp:327: TLocalNodeRegistrar::Handle TEvLocal::TEvPing 2025-09-25T16:18:29.279527Z node 2 :LOCAL DEBUG: local.cpp:383: TLocalNodeRegistrar TEvPing - CONNECTED 2025-09-25T16:18:29.279529Z node 2 :LOCAL DEBUG: local.cpp:300: TLocalNodeRegistrar SendStatusOk >> THiveTest::TestDrainWithMaxTabletsScheduled [GOOD] >> THiveTest::TestDownAfterDrain >> AsyncIndexChangeCollector::ImplicitlyUpdateCoveredColumn [GOOD] >> YdbYqlClient::CheckDefaultTableSettings1 >> TNodeBrokerTest::FixedNodeId [GOOD] >> TYqlDateTimeTests::IntervalKey [GOOD] >> TYqlDateTimeTests::SimpleOperations >> TDynamicNameserverTest::CacheMissPipeDisconnect-EnableNodeBrokerDeltaProtocol-true [GOOD] >> TDynamicNameserverTest::CacheMissSameDeadline-EnableNodeBrokerDeltaProtocol-false >> TNodeBrokerTest::NodesSubscriberDisconnect >> TNodeBrokerTest::NodesMigrationExtendLeaseThenRemove [GOOD] >> THiveTest::TestSetDomainError [GOOD] >> THiveTest::TestTabletsStartingCounter >> TNodeBrokerTest::NodesMigrationReuseIDThenExtendLease ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/datashard/ut_change_collector/unittest >> AsyncIndexChangeCollector::ImplicitlyUpdateCoveredColumn [GOOD] Test command err: 2025-09-25T16:18:24.387627Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:123: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2025-09-25T16:18:24.455542Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2025-09-25T16:18:24.458571Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:311:2354], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-09-25T16:18:24.458763Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-09-25T16:18:24.458793Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/endf/0036b7/r3tmp/tmpT6XSRx/pdisk_1.dat 2025-09-25T16:18:24.565552Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-09-25T16:18:24.565593Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-09-25T16:18:24.588421Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded 2025-09-25T16:18:24.589846Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1229: Notification cookie mismatch for subscription [1:34:2081] 1758817103739109 != 1758817103739113 2025-09-25T16:18:24.625498Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-09-25T16:18:24.681827Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2025-09-25T16:18:24.715041Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-09-25T16:18:24.799021Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:18:24.819975Z node 1 :TX_DATASHARD INFO: datashard.cpp:375: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:684:2572] 2025-09-25T16:18:24.820065Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:661: TxInitSchema.Execute 2025-09-25T16:18:24.829660Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:727: TxInitSchema.Complete 2025-09-25T16:18:24.829722Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:50: TDataShard::TTxInit::Execute 2025-09-25T16:18:24.829921Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1325: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2025-09-25T16:18:24.829932Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1381: LoadLockChangeRecords at tablet: 72075186224037888 2025-09-25T16:18:24.829940Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1430: LoadChangeRecordCommits at tablet: 72075186224037888 2025-09-25T16:18:24.829999Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:94: TDataShard::TTxInit::Complete 2025-09-25T16:18:24.830085Z node 1 :TX_DATASHARD INFO: datashard.cpp:375: TDataShard::OnActivateExecutor: tablet 72075186224037889 actor [1:688:2575] 2025-09-25T16:18:24.830118Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:661: TxInitSchema.Execute 2025-09-25T16:18:24.831503Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:102: TDataShard::TTxInitRestored::Execute 2025-09-25T16:18:24.831527Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:142: DataShard 72075186224037888 persisting started state actor id [1:711:2572] in generation 1 2025-09-25T16:18:24.831715Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:727: TxInitSchema.Complete 2025-09-25T16:18:24.831735Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:50: TDataShard::TTxInit::Execute 2025-09-25T16:18:24.831870Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1325: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037889 2025-09-25T16:18:24.831879Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1381: LoadLockChangeRecords at tablet: 72075186224037889 2025-09-25T16:18:24.831885Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1430: LoadChangeRecordCommits at tablet: 72075186224037889 2025-09-25T16:18:24.831935Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:94: TDataShard::TTxInit::Complete 2025-09-25T16:18:24.831954Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:102: TDataShard::TTxInitRestored::Execute 2025-09-25T16:18:24.831963Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:142: DataShard 72075186224037889 persisting started state actor id [1:716:2575] in generation 1 2025-09-25T16:18:24.842278Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:151: TDataShard::TTxInitRestored::Complete 2025-09-25T16:18:24.848631Z node 1 :TX_DATASHARD INFO: datashard.cpp:419: Switched to work state WaitScheme tabletId 72075186224037888 2025-09-25T16:18:24.848743Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:459: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2025-09-25T16:18:24.848814Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1260: Change sender created: at tablet: 72075186224037888, actorId: [1:719:2593] 2025-09-25T16:18:24.848860Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1265: Trying to activate change sender: at tablet: 72075186224037888 2025-09-25T16:18:24.848868Z node 1 :TX_DATASHARD INFO: datashard.cpp:1282: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2025-09-25T16:18:24.848874Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-09-25T16:18:24.849039Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:151: TDataShard::TTxInitRestored::Complete 2025-09-25T16:18:24.849053Z node 1 :TX_DATASHARD INFO: datashard.cpp:419: Switched to work state WaitScheme tabletId 72075186224037889 2025-09-25T16:18:24.849070Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:459: 72075186224037889 not sending time cast registration request in state WaitScheme: missing processing params 2025-09-25T16:18:24.849086Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1260: Change sender created: at tablet: 72075186224037889, actorId: [1:720:2594] 2025-09-25T16:18:24.849094Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1265: Trying to activate change sender: at tablet: 72075186224037889 2025-09-25T16:18:24.849099Z node 1 :TX_DATASHARD INFO: datashard.cpp:1282: Cannot activate change sender: at tablet: 72075186224037889, state: WaitScheme 2025-09-25T16:18:24.849103Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037889 2025-09-25T16:18:24.849242Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:96: TTxCheckInReadSets::Execute at 72075186224037888 2025-09-25T16:18:24.849277Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:139: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2025-09-25T16:18:24.849337Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037888 2025-09-25T16:18:24.849347Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2025-09-25T16:18:24.849357Z node 1 :TX_DATASHARD INFO: datashard__progress_tx.cpp:48: No tx to execute at 72075186224037888 TxInFly 0 2025-09-25T16:18:24.849364Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2025-09-25T16:18:24.849372Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:96: TTxCheckInReadSets::Execute at 72075186224037889 2025-09-25T16:18:24.849385Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:139: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037889 2025-09-25T16:18:24.849641Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3723: Server connected at leader tablet# 72075186224037888, clientId# [1:676:2568], serverId# [1:685:2573], sessionId# [0:0:0] 2025-09-25T16:18:24.849660Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037889 2025-09-25T16:18:24.849669Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037889 active 0 active planned 0 immediate 0 planned 0 2025-09-25T16:18:24.849675Z node 1 :TX_DATASHARD INFO: datashard__progress_tx.cpp:48: No tx to execute at 72075186224037889 TxInFly 0 2025-09-25T16:18:24.849683Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037889 2025-09-25T16:18:24.849723Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 72075186224037888 2025-09-25T16:18:24.849800Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:133: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2025-09-25T16:18:24.849825Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:221: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037888 2025-09-25T16:18:24.849939Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3723: Server connected at leader tablet# 72075186224037889, clientId# [1:677:2569], serverId# [1:689:2576], sessionId# [0:0:0] 2025-09-25T16:18:24.850010Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 72075186224037889 2025-09-25T16:18:24.850048Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:133: Propose scheme transaction at tablet 72075186224037889 txId 281474976715657 ssId 72057594046644480 seqNo 2:2 2025-09-25T16:18:24.850062Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:221: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037889 2025-09-25T16:18:24.850579Z node 1 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:129: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-09-25T16:18:24.850603Z node 1 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:129: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037889 2025-09-25T16:18:24.860991Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:152: TTxProposeTransactionBase::Complete at 72075186224037888 2025-09-25T16:18:24.861038Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:469: 72075186224037888 not sending time cast registration request in state WaitScheme 2025-09-25T16:18:24.861333Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:152: TTxProposeTransactionBase::Complete at 72075186224037889 2025-09-25T16:18:24.861345Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:469: 72075186224037889 not sending time cast registration request in state WaitScheme 2025-09-25T16:18:24.998427Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3723: Server connected at leader tablet# 72075186224037889, cl ... nStepAccepted TabletId# 72075186224037889 step# 1000} 2025-09-25T16:18:29.432683Z node 4 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037889 2025-09-25T16:18:29.433169Z node 4 :TX_DATASHARD DEBUG: datashard.cpp:3755: Got TEvMediatorTimecast::TEvRegisterTabletResult at 72075186224037888 time 0 2025-09-25T16:18:29.433182Z node 4 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-09-25T16:18:29.433195Z node 4 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037889 2025-09-25T16:18:29.433206Z node 4 :TX_DATASHARD DEBUG: datashard.cpp:1265: Trying to activate change sender: at tablet: 72075186224037889 2025-09-25T16:18:29.433212Z node 4 :TX_DATASHARD INFO: datashard.cpp:1303: Change sender activated: at tablet: 72075186224037889 2025-09-25T16:18:29.433231Z node 4 :TX_DATASHARD DEBUG: datashard.cpp:811: Complete [1000 : 281474976715657] from 72075186224037889 at tablet 72075186224037889 send result to client [4:399:2397], exec latency: 0 ms, propose latency: 0 ms 2025-09-25T16:18:29.433243Z node 4 :TX_DATASHARD INFO: datashard.cpp:1600: 72075186224037889 Sending notify to schemeshard 72057594046644480 txId 281474976715657 state Ready TxInFly 0 2025-09-25T16:18:29.433255Z node 4 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037889 2025-09-25T16:18:29.433316Z node 4 :TX_DATASHARD DEBUG: datashard__plan_step.cpp:98: Sending '{TEvPlanStepAccepted TabletId# 72075186224037888 step# 1000} 2025-09-25T16:18:29.433325Z node 4 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2025-09-25T16:18:29.434115Z node 4 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2025-09-25T16:18:29.434126Z node 4 :TX_DATASHARD DEBUG: datashard.cpp:1265: Trying to activate change sender: at tablet: 72075186224037888 2025-09-25T16:18:29.434132Z node 4 :TX_DATASHARD INFO: datashard.cpp:1303: Change sender activated: at tablet: 72075186224037888 2025-09-25T16:18:29.434145Z node 4 :TX_DATASHARD DEBUG: datashard.cpp:811: Complete [1000 : 281474976715657] from 72075186224037888 at tablet 72075186224037888 send result to client [4:399:2397], exec latency: 0 ms, propose latency: 0 ms 2025-09-25T16:18:29.434153Z node 4 :TX_DATASHARD INFO: datashard.cpp:1600: 72075186224037888 Sending notify to schemeshard 72057594046644480 txId 281474976715657 state Ready TxInFly 0 2025-09-25T16:18:29.434163Z node 4 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-09-25T16:18:29.434370Z node 4 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:129: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-09-25T16:18:29.434386Z node 4 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:129: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037889 2025-09-25T16:18:29.434907Z node 4 :TX_DATASHARD DEBUG: datashard.cpp:2966: Handle TEvSchemaChangedResult 281474976715657 datashard 72075186224037889 state Ready 2025-09-25T16:18:29.434922Z node 4 :TX_DATASHARD DEBUG: datashard__schema_changed.cpp:22: 72075186224037889 Got TEvSchemaChangedResult from SS at 72075186224037889 2025-09-25T16:18:29.435125Z node 4 :TX_DATASHARD DEBUG: datashard.cpp:3773: Got TEvMediatorTimecast::TEvSubscribeReadStepResult at 72075186224037889 coordinator 72057594046316545 last step 0 next step 1000 2025-09-25T16:18:29.435184Z node 4 :TX_DATASHARD DEBUG: datashard.cpp:2966: Handle TEvSchemaChangedResult 281474976715657 datashard 72075186224037888 state Ready 2025-09-25T16:18:29.435190Z node 4 :TX_DATASHARD DEBUG: datashard__schema_changed.cpp:22: 72075186224037888 Got TEvSchemaChangedResult from SS at 72075186224037888 2025-09-25T16:18:29.435217Z node 4 :TX_DATASHARD DEBUG: datashard.cpp:3773: Got TEvMediatorTimecast::TEvSubscribeReadStepResult at 72075186224037888 coordinator 72057594046316545 last step 0 next step 1000 2025-09-25T16:18:29.437234Z node 4 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [4:786:2646], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:29.437261Z node 4 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [4:796:2651], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:29.437273Z node 4 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:29.437463Z node 4 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [4:801:2655], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:29.437489Z node 4 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:29.438453Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-09-25T16:18:29.439694Z node 4 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:129: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-09-25T16:18:29.439718Z node 4 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:129: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037889 2025-09-25T16:18:29.481857Z node 4 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-09-25T16:18:29.577798Z node 4 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:129: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-09-25T16:18:29.577841Z node 4 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:129: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037889 2025-09-25T16:18:29.578431Z node 4 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [4:800:2654], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2025-09-25T16:18:29.610351Z node 4 :TX_PROXY ERROR: schemereq.cpp:590: Actor# [4:873:2696] txid# 281474976715659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 8], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-09-25T16:18:29.623486Z node 4 :KQP_EXECUTER ERROR: kqp_planner.cpp:123: TxId: 281474976715660. Ctx: { TraceId: 01k60tphdwdgsh174wyz5kg4pm, Database: , SessionId: ydb://session/3?node_id=4&id=YzQ5NzRkYzYtYzM2MTJhNWItNjllMjRhMzQtNzA3MTI0Mzc=, PoolId: default, DatabaseId: /Root}. Database not set, use /Root 2025-09-25T16:18:29.624122Z node 4 :TX_DATASHARD DEBUG: datashard.cpp:3723: Server connected at leader tablet# 72075186224037889, clientId# [4:942:2727], serverId# [4:943:2728], sessionId# [0:0:0] 2025-09-25T16:18:29.624243Z node 4 :TX_DATASHARD DEBUG: execute_write_unit.cpp:260: Executing write operation for [0:2] at 72075186224037889 2025-09-25T16:18:29.624315Z node 4 :TX_DATASHARD DEBUG: datashard.cpp:884: PersistChangeRecord: record: { Order: 1 Group: 1758817109624286 Step: 1500 TxId: 18446744073709551615 PathId: [OwnerId: 72057594046644480, LocalPathId: 3] Kind: AsyncIndex Source: Unspecified Body: 42b TableId: [OwnerId: 72057594046644480, LocalPathId: 2] SchemaVersion: 1 LockId: 0 LockOffset: 0 }, at tablet: 72075186224037889 2025-09-25T16:18:29.624342Z node 4 :TX_DATASHARD DEBUG: execute_write_unit.cpp:457: Executed write operation for [0:2] at 72075186224037889, row count=1 2025-09-25T16:18:29.634826Z node 4 :TX_DATASHARD DEBUG: datashard.cpp:1180: EnqueueChangeRecords: at tablet: 72075186224037889, records: { Order: 1 PathId: [OwnerId: 72057594046644480, LocalPathId: 3] BodySize: 42 TableId: [OwnerId: 72057594046644480, LocalPathId: 2] SchemaVersion: 1 } 2025-09-25T16:18:29.634861Z node 4 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037889 2025-09-25T16:18:29.646099Z node 4 :KQP_EXECUTER ERROR: kqp_planner.cpp:123: TxId: 281474976715661. Ctx: { TraceId: 01k60tphm4b9atpwfy9kffck3d, Database: , SessionId: ydb://session/3?node_id=4&id=MzVjNTlhZjAtOTY3OGNjMzQtNzkzOTdmNGUtZDEzZjA2YTE=, PoolId: default, DatabaseId: /Root}. Database not set, use /Root 2025-09-25T16:18:29.646828Z node 4 :TX_DATASHARD DEBUG: execute_write_unit.cpp:260: Executing write operation for [0:3] at 72075186224037889 2025-09-25T16:18:29.646934Z node 4 :TX_DATASHARD DEBUG: datashard.cpp:884: PersistChangeRecord: record: { Order: 2 Group: 1758817109646897 Step: 1500 TxId: 18446744073709551615 PathId: [OwnerId: 72057594046644480, LocalPathId: 3] Kind: AsyncIndex Source: Unspecified Body: 28b TableId: [OwnerId: 72057594046644480, LocalPathId: 2] SchemaVersion: 1 LockId: 0 LockOffset: 0 }, at tablet: 72075186224037889 2025-09-25T16:18:29.646968Z node 4 :TX_DATASHARD DEBUG: datashard.cpp:884: PersistChangeRecord: record: { Order: 3 Group: 1758817109646897 Step: 1500 TxId: 18446744073709551615 PathId: [OwnerId: 72057594046644480, LocalPathId: 3] Kind: AsyncIndex Source: Unspecified Body: 42b TableId: [OwnerId: 72057594046644480, LocalPathId: 2] SchemaVersion: 1 LockId: 0 LockOffset: 0 }, at tablet: 72075186224037889 2025-09-25T16:18:29.646988Z node 4 :TX_DATASHARD DEBUG: execute_write_unit.cpp:457: Executed write operation for [0:3] at 72075186224037889, row count=1 2025-09-25T16:18:29.657470Z node 4 :TX_DATASHARD DEBUG: datashard.cpp:1180: EnqueueChangeRecords: at tablet: 72075186224037889, records: { Order: 2 PathId: [OwnerId: 72057594046644480, LocalPathId: 3] BodySize: 28 TableId: [OwnerId: 72057594046644480, LocalPathId: 2] SchemaVersion: 1 }, { Order: 3 PathId: [OwnerId: 72057594046644480, LocalPathId: 3] BodySize: 42 TableId: [OwnerId: 72057594046644480, LocalPathId: 2] SchemaVersion: 1 } 2025-09-25T16:18:29.657518Z node 4 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037889 2025-09-25T16:18:29.658771Z node 4 :TX_DATASHARD DEBUG: datashard.cpp:3723: Server connected at leader tablet# 72075186224037889, clientId# [4:983:2759], serverId# [4:984:2760], sessionId# [0:0:0] 2025-09-25T16:18:29.659920Z node 4 :TX_DATASHARD DEBUG: datashard.cpp:3723: Server connected at leader tablet# 72075186224037889, clientId# [4:985:2761], serverId# [4:986:2762], sessionId# [0:0:0] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/mind/ut/unittest >> TNodeBrokerTest::FixedNodeId [GOOD] Test command err: 2025-09-25T16:18:28.101362Z node 8 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:636: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 8 Deadline: 18446744073709.551615s } 2025-09-25T16:18:28.104908Z node 5 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:636: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 5 Deadline: 18446744073709.551615s } 2025-09-25T16:18:28.104996Z node 6 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:636: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 6 Deadline: 18446744073709.551615s } 2025-09-25T16:18:28.105033Z node 7 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:636: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 7 Deadline: 18446744073709.551615s } 2025-09-25T16:18:28.105083Z node 8 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:607: Handle NActors::TEvInterconnect::TEvListNodes 2025-09-25T16:18:28.109064Z node 2 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:636: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 2 Deadline: 18446744073709.551615s } 2025-09-25T16:18:28.109133Z node 3 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:636: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 3 Deadline: 18446744073709.551615s } 2025-09-25T16:18:28.109201Z node 4 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:636: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 4 Deadline: 18446744073709.551615s } 2025-09-25T16:18:28.109258Z node 6 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:607: Handle NActors::TEvInterconnect::TEvListNodes 2025-09-25T16:18:28.109382Z node 7 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:607: Handle NActors::TEvInterconnect::TEvListNodes 2025-09-25T16:18:28.109462Z node 8 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:607: Handle NActors::TEvInterconnect::TEvListNodes 2025-09-25T16:18:28.109498Z node 1 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:636: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 1 Deadline: 18446744073709.551615s } 2025-09-25T16:18:28.113243Z node 2 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:607: Handle NActors::TEvInterconnect::TEvListNodes 2025-09-25T16:18:28.113358Z node 3 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:607: Handle NActors::TEvInterconnect::TEvListNodes 2025-09-25T16:18:28.113419Z node 4 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:607: Handle NActors::TEvInterconnect::TEvListNodes 2025-09-25T16:18:28.113474Z node 5 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:607: Handle NActors::TEvInterconnect::TEvListNodes 2025-09-25T16:18:28.113561Z node 4 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:607: Handle NActors::TEvInterconnect::TEvListNodes 2025-09-25T16:18:28.113604Z node 5 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:607: Handle NActors::TEvInterconnect::TEvListNodes 2025-09-25T16:18:28.113625Z node 6 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:607: Handle NActors::TEvInterconnect::TEvListNodes 2025-09-25T16:18:28.113659Z node 7 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:607: Handle NActors::TEvInterconnect::TEvListNodes 2025-09-25T16:18:28.113757Z node 1 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:607: Handle NActors::TEvInterconnect::TEvListNodes 2025-09-25T16:18:28.113860Z node 2 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:607: Handle NActors::TEvInterconnect::TEvListNodes 2025-09-25T16:18:28.113891Z node 3 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:607: Handle NActors::TEvInterconnect::TEvListNodes 2025-09-25T16:18:28.113987Z node 8 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:607: Handle NActors::TEvInterconnect::TEvListNodes 2025-09-25T16:18:28.114011Z node 1 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:607: Handle NActors::TEvInterconnect::TEvListNodes 2025-09-25T16:18:28.114320Z node 2 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:607: Handle NActors::TEvInterconnect::TEvListNodes 2025-09-25T16:18:28.114343Z node 3 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:607: Handle NActors::TEvInterconnect::TEvListNodes 2025-09-25T16:18:28.114364Z node 4 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:607: Handle NActors::TEvInterconnect::TEvListNodes 2025-09-25T16:18:28.114377Z node 5 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:607: Handle NActors::TEvInterconnect::TEvListNodes 2025-09-25T16:18:28.114391Z node 6 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:607: Handle NActors::TEvInterconnect::TEvListNodes 2025-09-25T16:18:28.114406Z node 7 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:607: Handle NActors::TEvInterconnect::TEvListNodes 2025-09-25T16:18:28.114428Z node 8 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:636: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 1 Deadline: 18446744073709.551615s } 2025-09-25T16:18:28.114436Z node 3 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:636: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 1 Deadline: 18446744073709.551615s } 2025-09-25T16:18:28.114460Z node 4 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:636: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 1 Deadline: 18446744073709.551615s } 2025-09-25T16:18:28.114485Z node 5 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:636: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 1 Deadline: 18446744073709.551615s } 2025-09-25T16:18:28.114515Z node 6 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:636: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 1 Deadline: 18446744073709.551615s } 2025-09-25T16:18:28.114534Z node 7 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:636: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 1 Deadline: 18446744073709.551615s } 2025-09-25T16:18:28.114596Z node 1 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:607: Handle NActors::TEvInterconnect::TEvListNodes 2025-09-25T16:18:28.114638Z node 2 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:636: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 1 Deadline: 18446744073709.551615s } 2025-09-25T16:18:28.114859Z node 1 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:636: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 8 Deadline: 18446744073709.551615s } 2025-09-25T16:18:28.115088Z node 8 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:636: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 4 Deadline: 18446744073709.551615s } 2025-09-25T16:18:28.116234Z node 4 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:636: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 3 Deadline: 18446744073709.551615s } 2025-09-25T16:18:28.116291Z node 6 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:636: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 5 Deadline: 18446744073709.551615s } 2025-09-25T16:18:28.116320Z node 7 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:636: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 5 Deadline: 18446744073709.551615s } 2025-09-25T16:18:28.127156Z node 4 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:636: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 8 Deadline: 18446744073709.551615s } 2025-09-25T16:18:28.127294Z node 5 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:636: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 6 Deadline: 18446744073709.551615s } 2025-09-25T16:18:28.128241Z node 3 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:636: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 4 Deadline: 18446744073709.551615s } 2025-09-25T16:18:28.128312Z node 5 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:636: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 7 Deadline: 18446744073709.551615s } 2025-09-25T16:18:28.129365Z node 1 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:636: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 4 Deadline: 18446744073709.551615s } 2025-09-25T16:18:28.129482Z node 1 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:636: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 2 Deadline: 18446744073709.551615s } 2025-09-25T16:18:28.129591Z node 1 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:636: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 3 Deadline: 18446744073709.551615s } 2025-09-25T16:18:28.129637Z node 1 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:636: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 6 Deadline: 18446744073709.551615s } 2025-09-25T16:18:28.129683Z node 1 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:636: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 7 Deadline: 18446744073709.551615s } 2025-09-25T16:18:28.129835Z node 1 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:636: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 5 Deadline: 18446744073709.551615s } 2025-09-25T16:18:28.131534Z node 4 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:636: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 2 Deadline: 18446744073709.551615s } 2025-09-25T16:18:28.131565Z node 8 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:636: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 3 Deadline: 18446744073709.551615s } 2025-09-25T16:18:28.131883Z node 3 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:636: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 2 Deadline: 18446744073709.551615s } 2025-09-25T16:18:28.132085Z node 2 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:636: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 4 Deadline: 18446744073709.551615s } 2025-09-25T16:18:28.132099Z node 3 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:636: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 8 Deadline: 18446744073709.551615s } 2025-09-25T16:18:28.132518Z node 2 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:636: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 3 Deadline: 18446744073709.551615s } 2025-09-25T16:18:28.158689Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7729: Cannot subscribe to console configs 2025-09-25T16:18:28.158715Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded ... waiting for nameservers are connected 2025-09-25T16:18:28.164294Z node 1 :NODE_BROKER DEBUG: node_broker_impl.h:239: StateInit event type: 10060000 event: NKikimr::TEvTablet::TEvBoot 2025-09-25T16:18:28.164816Z node 1 :NODE_BROKER DEBUG: node_broker_impl.h:239: StateInit event type: 10060001 event: NKikimr::TEvTablet::TEvRestored 2025-09-25T16:18:28.164925Z node 1 :NODE_BROKER DEBUG: node_broker__init_scheme.cpp:20: TTxInitScheme Execute 2025-09-25T16:18:28.165229Z node 1 :NODE_BROKER DEBUG: node_broker_impl.h:239: StateInit event type: 1006000c event: NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-09-25T16:18:28.166049Z node 1 :NODE_BROKER DEBUG: node_broker__init_scheme.cpp:29: TTxInitScheme Complete 2025-09-25T16:18:28.166219Z node 1 :NODE_BROKER DEBUG: node_broker__load_state.cpp:19: TTxLoadState Execute 2025-09-25T16:18:28.166287Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:968: [DB] Using default config. 2025-09-25T16:18:28.166306Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:1005: [DB] Starting the first epoch: #1.1 1970-01-01T00:00:00.026000Z - 1970-01-01T01:00:00.026000Z - 1970-01-01T02:00:00.026000Z 2025-09-25T16:18:28.166314Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:1031: [DB] Loaded the first approximate epoch start: #1.1 2025-09-25T16:18:28.166333Z node 1 :NODE_BROKER DEBUG: node_broker__load_state.cpp:27: TTxLoadState Complete 2025-09-25T16:18:28.166375Z node 1 :NODE_BROKER DEBUG: node_broker__migrate_state.cpp:84: TTxMigrateState Execute 2025-09-25T16:18:28.166382Z node 1 :NODE_BROKER DEBUG: node_broker__migrate_state.cpp:52: TTxMigrateState ProcessMigrationBatch UpdateNodes left 0, NewVersionUpdateNodes left 0 2025-09-25T16:18:28.166387Z node 1 :NODE_BROKER DEBUG: node_broker__migrate_state.cpp:21: TTxMigrateState FinalizeMigration 2025-09-25T16:18:28.166394Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:1318: [DB] Update epoch in database: #1.1 1970-01-01T00:00:00.026000Z - 1970-01-01T01:00:00.026000Z - 1970-01-01T02:00:00.026000Z 2025-09-25T16:18:28.166413Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:1337: [DB] Update approx epoch start in database: #1.1 2025-09-25T16:18:28.166419Z node 1 :NODE_BROKER NOTICE: node_broker.cpp:1350: [DB] Update main nodes table to: Nodes 2025-09-25T16:18:28.210642Z node 1 :NODE_BROKER DEBUG: node_broker__migrate_state.cpp:95: TTxMigrateState Complete 2025-09-25T16:18:28.210694Z node 1 :NODE_BROKER TRACE: node_brok ... s: 72057594046316545 TimeCastBucketsPerMediator: 2 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } } 2025-09-25T16:18:28.465229Z node 1 :NODE_BROKER TRACE: node_broker.cpp:1587: Finished resolving tenant: request# Host: "host2" Port: 1001 ResolveHost: "host2.yandex.net" Address: "1.2.3.5" Location { DataCenter: "1" Module: "2" Rack: "3" Unit: "5" } FixedNodeId: true Path: "dc-1": scope id# <72057594046678944:1>: serviced subdomain# 72057594046678944:1 2025-09-25T16:18:28.465243Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:245: StateWork, received event# 2146435073, Sender [1:663:2188], Recipient [1:560:2188]: NKikimr::NNodeBroker::TNodeBroker::TEvPrivate::TEvResolvedRegistrationRequest 2025-09-25T16:18:28.465247Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:260: StateWork, processing event TEvPrivate::TEvResolvedRegistrationRequest 2025-09-25T16:18:28.465261Z node 1 :NODE_BROKER DEBUG: node_broker__register_node.cpp:78: TTxRegisterNode Execute 2025-09-25T16:18:28.465266Z node 1 :NODE_BROKER DEBUG: node_broker__register_node.cpp:82: Registration request from host2:1001 (fixed) tenant: dc-1 2025-09-25T16:18:28.465278Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:324: [Dirty] Fix ID for node #1025.v4 host2:1001 2025-09-25T16:18:28.465290Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:863: [DB] Adding node #1025.v4 host2:1001 to database state=Active resolvehost=host2.yandex.net address=1.2.3.5 dc=1 location=DC=1/M=2/R=3/U=5/ lease=2 expire=NEVER servicedsubdomain=72057594046678944:1 slotindex=1 authorizedbycertificate=false 2025-09-25T16:18:28.465349Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:552: [Dirty] Update current epoch version from 3 to 4 2025-09-25T16:18:28.465355Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:1363: [DB] Update epoch version in database version=4 2025-09-25T16:18:28.476327Z node 1 :NODE_BROKER DEBUG: node_broker__register_node.cpp:197: TTxRegisterNode Complete 2025-09-25T16:18:28.476371Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:324: [Committed] Fix ID for node #1025.v4 host2:1001 2025-09-25T16:18:28.476378Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:552: [Committed] Update current epoch version from 3 to 4 2025-09-25T16:18:28.476384Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:630: Add node #1025.v4 host2:1001 to epoch cache 2025-09-25T16:18:28.476413Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:659: Add node #1025.v4 to update nodes log 2025-09-25T16:18:28.476461Z node 1 :NODE_BROKER TRACE: node_broker__register_node.cpp:59: TTxRegisterNode reply with: Status { Code: OK } Node { NodeId: 1025 Host: "host2" Port: 1001 ResolveHost: "host2.yandex.net" Address: "1.2.3.5" Location { DataCenter: "1" Module: "2" Rack: "3" Unit: "5" } Expire: 18446744073709551615 Name: "slot-1" } 2025-09-25T16:18:28.476610Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:245: StateWork, received event# 269877761, Sender [1:668:2247], Recipient [1:560:2188]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-09-25T16:18:28.476632Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:245: StateWork, received event# 272039937, Sender [1:629:2218], Recipient [1:560:2188]: NKikimr::NNodeBroker::TEvNodeBroker::TEvResolveNode { NodeId: 1025 } 2025-09-25T16:18:28.476639Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:250: StateWork, processing event TEvNodeBroker::TEvResolveNode 2025-09-25T16:18:28.476658Z node 1 :NODE_BROKER TRACE: node_broker.cpp:1485: Send TEvResolvedNode: NKikimr::NNodeBroker::TEvNodeBroker::TEvResolvedNode { Status { Code: OK } Node { NodeId: 1025 Host: "host2" Port: 1001 ResolveHost: "host2.yandex.net" Address: "1.2.3.5" Location { DataCenter: "1" Module: "2" Rack: "3" Unit: "5" } Expire: 18446744073709551615 Name: "slot-1" } } 2025-09-25T16:18:28.476731Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:245: StateWork, received event# 269877761, Sender [1:670:2249], Recipient [1:560:2188]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-09-25T16:18:28.476748Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:245: StateWork, received event# 272039939, Sender [1:629:2218], Recipient [1:560:2188]: NKikimr::NNodeBroker::TEvNodeBroker::TEvExtendLeaseRequest { NodeId: 1025 } 2025-09-25T16:18:28.476754Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:253: StateWork, processing event TEvNodeBroker::TEvExtendLeaseRequest 2025-09-25T16:18:28.476768Z node 1 :NODE_BROKER DEBUG: node_broker__extend_lease.cpp:44: TTxExtendLease Execute node #1025 2025-09-25T16:18:28.476783Z node 1 :NODE_BROKER DEBUG: node_broker__extend_lease.cpp:78: TTxExtendLease Complete 2025-09-25T16:18:28.476799Z node 1 :NODE_BROKER TRACE: node_broker__extend_lease.cpp:82: TTxExtendLease reply with: NKikimr::NNodeBroker::TEvNodeBroker::TEvExtendLeaseResponse { Status { Code: OK } NodeId: 1025 Expire: 18446744073709551615 Epoch { Id: 1 Version: 4 Start: 26000 End: 3600026000 NextEnd: 7200026000 } } 2025-09-25T16:18:28.476890Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:245: StateWork, received event# 269877761, Sender [1:672:2251], Recipient [1:560:2188]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-09-25T16:18:28.476911Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:245: StateWork, received event# 272039937, Sender [1:629:2218], Recipient [1:560:2188]: NKikimr::NNodeBroker::TEvNodeBroker::TEvResolveNode { NodeId: 1025 } 2025-09-25T16:18:28.476915Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:250: StateWork, processing event TEvNodeBroker::TEvResolveNode 2025-09-25T16:18:28.476931Z node 1 :NODE_BROKER TRACE: node_broker.cpp:1485: Send TEvResolvedNode: NKikimr::NNodeBroker::TEvNodeBroker::TEvResolvedNode { Status { Code: OK } Node { NodeId: 1025 Host: "host2" Port: 1001 ResolveHost: "host2.yandex.net" Address: "1.2.3.5" Location { DataCenter: "1" Module: "2" Rack: "3" Unit: "5" } Expire: 18446744073709551615 Name: "slot-1" } } 2025-09-25T16:18:28.476991Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:245: StateWork, received event# 269877761, Sender [1:674:2253], Recipient [1:560:2188]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-09-25T16:18:28.477016Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:245: StateWork, received event# 272039938, Sender [1:629:2218], Recipient [1:560:2188]: NKikimr::NNodeBroker::TEvNodeBroker::TEvRegistrationRequest { Host: "host2" Port: 1001 ResolveHost: "host2.yandex.net" Address: "1.2.3.5" Location { DataCenter: "1" Module: "2" Rack: "3" Unit: "5" } FixedNodeId: false Path: "dc-1" } 2025-09-25T16:18:28.477022Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:251: StateWork, processing event TEvNodeBroker::TEvRegistrationRequest 2025-09-25T16:18:28.477033Z node 1 :NODE_BROKER TRACE: node_broker.cpp:1494: Handle TEvNodeBroker::TEvRegistrationRequest: request# Host: "host2" Port: 1001 ResolveHost: "host2.yandex.net" Address: "1.2.3.5" Location { DataCenter: "1" Module: "2" Rack: "3" Unit: "5" } FixedNodeId: false Path: "dc-1" 2025-09-25T16:18:28.477091Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2806: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [1:23:2070], request# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1 TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: false ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-09-25T16:18:28.477118Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:1888: FillEntry for TNavigate: self# [1:23:2070], cacheItem# { Subscriber: { Subscriber: [1:635:2223] DomainOwnerId: 72057594046678944 Type: 2 SyncCookie: 0 } Filled: 1 Status: StatusSuccess Kind: 2 TableKind: 0 Created: 1 CreateStep: 0 PathId: [OwnerId: 72057594046678944, LocalPathId: 1] DomainId: [OwnerId: 72057594046678944, LocalPathId: 1] IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1 TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: false ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2025-09-25T16:18:28.477193Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:267: Send result: self# [1:676:2254], recipient# [1:675:2188], result# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1 TableId: [72057594046678944:1:0] RequestType: ByPath Operation: OpPath RedirectRequired: false ShowPrivatePath: false SyncVersion: false Status: Ok Kind: KindPath DomainInfo { DomainKey: [OwnerId: 72057594046678944, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046678944, LocalPathId: 1] Params { Version: 0 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } }] } 2025-09-25T16:18:28.477210Z node 1 :NODE_BROKER TRACE: node_broker.cpp:1561: Handle TEvTxProxySchemeCache::TEvNavigateKeySetResult: response# { Path: dc-1 TableId: [72057594046678944:1:0] RequestType: ByPath Operation: OpPath RedirectRequired: false ShowPrivatePath: false SyncVersion: false Status: Ok Kind: KindPath DomainInfo { DomainKey: [OwnerId: 72057594046678944, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046678944, LocalPathId: 1] Params { Version: 0 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } } 2025-09-25T16:18:28.477224Z node 1 :NODE_BROKER TRACE: node_broker.cpp:1587: Finished resolving tenant: request# Host: "host2" Port: 1001 ResolveHost: "host2.yandex.net" Address: "1.2.3.5" Location { DataCenter: "1" Module: "2" Rack: "3" Unit: "5" } FixedNodeId: false Path: "dc-1": scope id# <72057594046678944:1>: serviced subdomain# 72057594046678944:1 2025-09-25T16:18:28.477241Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:245: StateWork, received event# 2146435073, Sender [1:675:2188], Recipient [1:560:2188]: NKikimr::NNodeBroker::TNodeBroker::TEvPrivate::TEvResolvedRegistrationRequest 2025-09-25T16:18:28.477246Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:260: StateWork, processing event TEvPrivate::TEvResolvedRegistrationRequest 2025-09-25T16:18:28.477254Z node 1 :NODE_BROKER DEBUG: node_broker__register_node.cpp:78: TTxRegisterNode Execute 2025-09-25T16:18:28.477258Z node 1 :NODE_BROKER DEBUG: node_broker__register_node.cpp:82: Registration request from host2:1001 (not fixed) tenant: dc-1 2025-09-25T16:18:28.477272Z node 1 :NODE_BROKER DEBUG: node_broker__register_node.cpp:197: TTxRegisterNode Complete 2025-09-25T16:18:28.477287Z node 1 :NODE_BROKER TRACE: node_broker__register_node.cpp:59: TTxRegisterNode reply with: Status { Code: OK } Node { NodeId: 1025 Host: "host2" Port: 1001 ResolveHost: "host2.yandex.net" Address: "1.2.3.5" Location { DataCenter: "1" Module: "2" Rack: "3" Unit: "5" } Expire: 18446744073709551615 Name: "slot-1" } 2025-09-25T16:18:28.477360Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:245: StateWork, received event# 269877761, Sender [1:678:2256], Recipient [1:560:2188]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-09-25T16:18:28.477379Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:245: StateWork, received event# 272039939, Sender [1:629:2218], Recipient [1:560:2188]: NKikimr::NNodeBroker::TEvNodeBroker::TEvExtendLeaseRequest { NodeId: 1025 } 2025-09-25T16:18:28.477384Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:253: StateWork, processing event TEvNodeBroker::TEvExtendLeaseRequest 2025-09-25T16:18:28.477405Z node 1 :NODE_BROKER DEBUG: node_broker__extend_lease.cpp:44: TTxExtendLease Execute node #1025 2025-09-25T16:18:28.477411Z node 1 :NODE_BROKER DEBUG: node_broker__extend_lease.cpp:78: TTxExtendLease Complete 2025-09-25T16:18:28.477423Z node 1 :NODE_BROKER TRACE: node_broker__extend_lease.cpp:82: TTxExtendLease reply with: NKikimr::NNodeBroker::TEvNodeBroker::TEvExtendLeaseResponse { Status { Code: OK } NodeId: 1025 Expire: 18446744073709551615 Epoch { Id: 1 Version: 4 Start: 26000 End: 3600026000 NextEnd: 7200026000 } } ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/mind/ut/unittest >> TNodeBrokerTest::NodesMigrationExtendLeaseThenRemove [GOOD] Test command err: 2025-09-25T16:18:28.135992Z node 8 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:636: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 8 Deadline: 18446744073709.551615s } 2025-09-25T16:18:28.139514Z node 5 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:636: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 5 Deadline: 18446744073709.551615s } 2025-09-25T16:18:28.139585Z node 6 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:636: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 6 Deadline: 18446744073709.551615s } 2025-09-25T16:18:28.139621Z node 7 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:636: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 7 Deadline: 18446744073709.551615s } 2025-09-25T16:18:28.139668Z node 8 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:607: Handle NActors::TEvInterconnect::TEvListNodes 2025-09-25T16:18:28.143579Z node 2 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:636: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 2 Deadline: 18446744073709.551615s } 2025-09-25T16:18:28.143628Z node 3 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:636: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 3 Deadline: 18446744073709.551615s } 2025-09-25T16:18:28.143688Z node 4 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:636: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 4 Deadline: 18446744073709.551615s } 2025-09-25T16:18:28.143735Z node 6 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:607: Handle NActors::TEvInterconnect::TEvListNodes 2025-09-25T16:18:28.143843Z node 7 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:607: Handle NActors::TEvInterconnect::TEvListNodes 2025-09-25T16:18:28.143911Z node 8 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:607: Handle NActors::TEvInterconnect::TEvListNodes 2025-09-25T16:18:28.143945Z node 1 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:636: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 1 Deadline: 18446744073709.551615s } 2025-09-25T16:18:28.149270Z node 2 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:607: Handle NActors::TEvInterconnect::TEvListNodes 2025-09-25T16:18:28.149364Z node 3 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:607: Handle NActors::TEvInterconnect::TEvListNodes 2025-09-25T16:18:28.149401Z node 4 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:607: Handle NActors::TEvInterconnect::TEvListNodes 2025-09-25T16:18:28.149447Z node 5 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:607: Handle NActors::TEvInterconnect::TEvListNodes 2025-09-25T16:18:28.149518Z node 3 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:607: Handle NActors::TEvInterconnect::TEvListNodes 2025-09-25T16:18:28.149534Z node 4 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:607: Handle NActors::TEvInterconnect::TEvListNodes 2025-09-25T16:18:28.149547Z node 5 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:607: Handle NActors::TEvInterconnect::TEvListNodes 2025-09-25T16:18:28.149558Z node 6 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:607: Handle NActors::TEvInterconnect::TEvListNodes 2025-09-25T16:18:28.149580Z node 7 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:607: Handle NActors::TEvInterconnect::TEvListNodes 2025-09-25T16:18:28.149647Z node 1 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:607: Handle NActors::TEvInterconnect::TEvListNodes 2025-09-25T16:18:28.149733Z node 2 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:607: Handle NActors::TEvInterconnect::TEvListNodes 2025-09-25T16:18:28.149843Z node 8 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:607: Handle NActors::TEvInterconnect::TEvListNodes 2025-09-25T16:18:28.149880Z node 1 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:607: Handle NActors::TEvInterconnect::TEvListNodes 2025-09-25T16:18:28.150112Z node 2 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:607: Handle NActors::TEvInterconnect::TEvListNodes 2025-09-25T16:18:28.150161Z node 3 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:607: Handle NActors::TEvInterconnect::TEvListNodes 2025-09-25T16:18:28.150185Z node 4 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:607: Handle NActors::TEvInterconnect::TEvListNodes 2025-09-25T16:18:28.150220Z node 5 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:607: Handle NActors::TEvInterconnect::TEvListNodes 2025-09-25T16:18:28.150254Z node 6 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:607: Handle NActors::TEvInterconnect::TEvListNodes 2025-09-25T16:18:28.150278Z node 7 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:607: Handle NActors::TEvInterconnect::TEvListNodes 2025-09-25T16:18:28.150308Z node 8 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:636: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 1 Deadline: 18446744073709.551615s } 2025-09-25T16:18:28.150512Z node 1 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:607: Handle NActors::TEvInterconnect::TEvListNodes 2025-09-25T16:18:28.150575Z node 2 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:636: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 1 Deadline: 18446744073709.551615s } 2025-09-25T16:18:28.150605Z node 3 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:636: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 1 Deadline: 18446744073709.551615s } 2025-09-25T16:18:28.150666Z node 4 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:636: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 1 Deadline: 18446744073709.551615s } 2025-09-25T16:18:28.150695Z node 5 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:636: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 1 Deadline: 18446744073709.551615s } 2025-09-25T16:18:28.150727Z node 6 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:636: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 1 Deadline: 18446744073709.551615s } 2025-09-25T16:18:28.150765Z node 7 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:636: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 1 Deadline: 18446744073709.551615s } 2025-09-25T16:18:28.151232Z node 1 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:636: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 4 Deadline: 18446744073709.551615s } 2025-09-25T16:18:28.151337Z node 8 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:636: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 3 Deadline: 18446744073709.551615s } 2025-09-25T16:18:28.151749Z node 6 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:636: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 2 Deadline: 18446744073709.551615s } 2025-09-25T16:18:28.151782Z node 7 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:636: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 6 Deadline: 18446744073709.551615s } 2025-09-25T16:18:28.154234Z node 4 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:636: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 3 Deadline: 18446744073709.551615s } 2025-09-25T16:18:28.154273Z node 5 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:636: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 2 Deadline: 18446744073709.551615s } 2025-09-25T16:18:28.160690Z node 2 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:636: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 6 Deadline: 18446744073709.551615s } 2025-09-25T16:18:28.160778Z node 3 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:636: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 8 Deadline: 18446744073709.551615s } 2025-09-25T16:18:28.160852Z node 6 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:636: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 7 Deadline: 18446744073709.551615s } 2025-09-25T16:18:28.161912Z node 1 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:636: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 8 Deadline: 18446744073709.551615s } 2025-09-25T16:18:28.161962Z node 2 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:636: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 5 Deadline: 18446744073709.551615s } 2025-09-25T16:18:28.161982Z node 3 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:636: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 4 Deadline: 18446744073709.551615s } 2025-09-25T16:18:28.162699Z node 1 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:636: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 6 Deadline: 18446744073709.551615s } 2025-09-25T16:18:28.162861Z node 1 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:636: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 7 Deadline: 18446744073709.551615s } 2025-09-25T16:18:28.163067Z node 1 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:636: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 3 Deadline: 18446744073709.551615s } 2025-09-25T16:18:28.163186Z node 1 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:636: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 5 Deadline: 18446744073709.551615s } 2025-09-25T16:18:28.163287Z node 1 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:636: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 2 Deadline: 18446744073709.551615s } 2025-09-25T16:18:28.163699Z node 4 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:636: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 2 Deadline: 18446744073709.551615s } 2025-09-25T16:18:28.164162Z node 2 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:636: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 4 Deadline: 18446744073709.551615s } 2025-09-25T16:18:28.164550Z node 3 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:636: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 2 Deadline: 18446744073709.551615s } 2025-09-25T16:18:28.165027Z node 2 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:636: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 3 Deadline: 18446744073709.551615s } 2025-09-25T16:18:28.206406Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7729: Cannot subscribe to console configs 2025-09-25T16:18:28.206433Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded ... waiting for nameservers are connected 2025-09-25T16:18:28.223517Z node 1 :NODE_BROKER DEBUG: node_broker_impl.h:239: StateInit event type: 10060000 event: NKikimr::TEvTablet::TEvBoot 2025-09-25T16:18:28.229380Z node 1 :NODE_BROKER DEBUG: node_broker_impl.h:239: StateInit event type: 10060001 event: NKikimr::TEvTablet::TEvRestored 2025-09-25T16:18:28.229510Z node 1 :NODE_BROKER DEBUG: node_broker__init_scheme.cpp:20: TTxInitScheme Execute 2025-09-25T16:18:28.229815Z node 1 :NODE_BROKER DEBUG: node_broker_impl.h:239: StateInit event type: 1006000c event: NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-09-25T16:18:28.234649Z node 1 :NODE_BROKER DEBUG: node_broker__init_scheme.cpp:29: TTxInitScheme Complete 2025-09-25T16:18:28.236715Z node 1 :NODE_BROKER DEBUG: node_broker__load_state.cpp:19: TTxLoadState Execute 2025-09-25T16:18:28.237411Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:968: [DB] Using default config. 2025-09-25T16:18:28.237432Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:1005: [DB] Starting the first epoch: #1.1 1970-01-01T00:00:00.026000Z - 1970-01-01T01:00:00.026000Z - 1970-01-01T02:00:00.026000Z 2025-09-25T16:18:28.237438Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:1031: [DB] Loaded the first approximate epoch start: #1.1 2025-09-25T16:18:28.237470Z node 1 :NODE_BROKER DEBUG: node_broker__load_state.cpp:27: TTxLoadState Complete 2025-09-25T16:18:28.237566Z node 1 :NODE_BROKER DEBUG: node_broker__migrate_state.cpp:84: TTxMigrateState Execute 2025-09-25T16:18:28.237573Z node 1 :NODE_BROKER DEBUG: node_broker__migrate_state.cpp:52: TTxMigrateState ProcessMigrationBatch UpdateNodes left 0, NewVersionUpdateNodes left 0 2025-09-25T16:18:28.237578Z node 1 :NODE_BROKER DEBUG: node_broker__migrate_state.cpp:21: TTxMigrateState FinalizeMigration 2025-09-25T16:18:28.237584Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:1318: [DB] Update epoch in database: #1.1 1970-01-01T00:00:00.026000Z - 1970-01-01T01:00:00.026000Z - 1970-01-01T02:00:00.026000Z 2025-09-25T16:18:28.237606Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:1337: [DB] Update approx epoch start in database: #1.1 2025-09-25T16:18:28.237614Z node 1 :NODE_BROKER NOTICE: node_broker.cpp:1350: [DB] Update main nodes table to: Nodes 2025-09-25T16:18:28.283313Z node 1 :NODE_BROKER DEBUG: node_broker__migrate_state.cpp:95: TTxMigrateState Complete 2025-09-25T16:18:28.283354Z node 1 :NODE_BROKER TRACE: node_brok ... 8:28.707809Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:245: StateWork, received event# 269877761, Sender [1:726:2274], Recipient [1:685:2247]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-09-25T16:18:28.707829Z node 2 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:717: Handle NKikimr::TEvTabletPipe::TEvClientConnected { TabletId: 72057594037936129 Status: OK ServerId: [1:726:2274] Leader: 1 Dead: 0 Generation: 3 VersionInfo:  } 2025-09-25T16:18:28.707840Z node 3 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:717: Handle NKikimr::TEvTabletPipe::TEvClientConnected { TabletId: 72057594037936129 Status: OK ServerId: [1:731:2279] Leader: 1 Dead: 0 Generation: 3 VersionInfo:  } 2025-09-25T16:18:28.707851Z node 4 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:717: Handle NKikimr::TEvTabletPipe::TEvClientConnected { TabletId: 72057594037936129 Status: OK ServerId: [1:730:2278] Leader: 1 Dead: 0 Generation: 3 VersionInfo:  } 2025-09-25T16:18:28.707863Z node 5 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:717: Handle NKikimr::TEvTabletPipe::TEvClientConnected { TabletId: 72057594037936129 Status: OK ServerId: [1:727:2275] Leader: 1 Dead: 0 Generation: 3 VersionInfo:  } 2025-09-25T16:18:28.707874Z node 6 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:717: Handle NKikimr::TEvTabletPipe::TEvClientConnected { TabletId: 72057594037936129 Status: OK ServerId: [1:725:2273] Leader: 1 Dead: 0 Generation: 3 VersionInfo:  } 2025-09-25T16:18:28.707884Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:245: StateWork, received event# 269877761, Sender [1:727:2275], Recipient [1:685:2247]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-09-25T16:18:28.707915Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:245: StateWork, received event# 269877761, Sender [1:728:2276], Recipient [1:685:2247]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-09-25T16:18:28.707922Z node 7 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:717: Handle NKikimr::TEvTabletPipe::TEvClientConnected { TabletId: 72057594037936129 Status: OK ServerId: [1:728:2276] Leader: 1 Dead: 0 Generation: 3 VersionInfo:  } 2025-09-25T16:18:28.707948Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:245: StateWork, received event# 269877761, Sender [1:729:2277], Recipient [1:685:2247]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-09-25T16:18:28.707959Z node 8 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:717: Handle NKikimr::TEvTabletPipe::TEvClientConnected { TabletId: 72057594037936129 Status: OK ServerId: [1:729:2277] Leader: 1 Dead: 0 Generation: 3 VersionInfo:  } 2025-09-25T16:18:28.707984Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:245: StateWork, received event# 269877761, Sender [1:730:2278], Recipient [1:685:2247]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-09-25T16:18:28.708030Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:245: StateWork, received event# 269877761, Sender [1:731:2279], Recipient [1:685:2247]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-09-25T16:18:28.708091Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:245: StateWork, received event# 272039936, Sender [1:25:2072], Recipient [1:685:2247]: NKikimr::NNodeBroker::TEvNodeBroker::TEvListNodes { MinEpoch: 2 } 2025-09-25T16:18:28.708097Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:249: StateWork, processing event TEvNodeBroker::TEvListNodes 2025-09-25T16:18:28.708108Z node 1 :NODE_BROKER TRACE: node_broker.cpp:423: Send TEvNodesInfo for epoch #5.6 1970-01-01T04:00:00.026000Z - 1970-01-01T05:00:00.026000Z - 1970-01-01T06:00:00.026000Z 2025-09-25T16:18:28.708143Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:245: StateWork, received event# 272039936, Sender [6:170:2072], Recipient [1:725:2273] 2025-09-25T16:18:28.708147Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:249: StateWork, processing event TEvNodeBroker::TEvListNodes 2025-09-25T16:18:28.708155Z node 1 :NODE_BROKER TRACE: node_broker.cpp:423: Send TEvNodesInfo for epoch #5.6 1970-01-01T04:00:00.026000Z - 1970-01-01T05:00:00.026000Z - 1970-01-01T06:00:00.026000Z 2025-09-25T16:18:28.708171Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:245: StateWork, received event# 272039936, Sender [5:141:2072], Recipient [1:727:2275] 2025-09-25T16:18:28.708177Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:249: StateWork, processing event TEvNodeBroker::TEvListNodes 2025-09-25T16:18:28.708183Z node 1 :NODE_BROKER TRACE: node_broker.cpp:423: Send TEvNodesInfo for epoch #5.6 1970-01-01T04:00:00.026000Z - 1970-01-01T05:00:00.026000Z - 1970-01-01T06:00:00.026000Z 2025-09-25T16:18:28.708214Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:245: StateWork, received event# 272039936, Sender [4:112:2072], Recipient [1:730:2278] 2025-09-25T16:18:28.708218Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:249: StateWork, processing event TEvNodeBroker::TEvListNodes 2025-09-25T16:18:28.708224Z node 1 :NODE_BROKER TRACE: node_broker.cpp:423: Send TEvNodesInfo for epoch #5.6 1970-01-01T04:00:00.026000Z - 1970-01-01T05:00:00.026000Z - 1970-01-01T06:00:00.026000Z 2025-09-25T16:18:28.708234Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:245: StateWork, received event# 272039936, Sender [3:83:2072], Recipient [1:731:2279] 2025-09-25T16:18:28.708238Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:249: StateWork, processing event TEvNodeBroker::TEvListNodes 2025-09-25T16:18:28.708244Z node 1 :NODE_BROKER TRACE: node_broker.cpp:423: Send TEvNodesInfo for epoch #5.6 1970-01-01T04:00:00.026000Z - 1970-01-01T05:00:00.026000Z - 1970-01-01T06:00:00.026000Z 2025-09-25T16:18:28.708255Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:245: StateWork, received event# 272039936, Sender [2:54:2072], Recipient [1:726:2274] 2025-09-25T16:18:28.708259Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:249: StateWork, processing event TEvNodeBroker::TEvListNodes 2025-09-25T16:18:28.708265Z node 1 :NODE_BROKER TRACE: node_broker.cpp:423: Send TEvNodesInfo for epoch #5.6 1970-01-01T04:00:00.026000Z - 1970-01-01T05:00:00.026000Z - 1970-01-01T06:00:00.026000Z 2025-09-25T16:18:28.708279Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:245: StateWork, received event# 272039936, Sender [7:199:2072], Recipient [1:728:2276] 2025-09-25T16:18:28.708283Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:249: StateWork, processing event TEvNodeBroker::TEvListNodes 2025-09-25T16:18:28.708289Z node 1 :NODE_BROKER TRACE: node_broker.cpp:423: Send TEvNodesInfo for epoch #5.6 1970-01-01T04:00:00.026000Z - 1970-01-01T05:00:00.026000Z - 1970-01-01T06:00:00.026000Z 2025-09-25T16:18:28.708320Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:245: StateWork, received event# 272039936, Sender [8:228:2072], Recipient [1:729:2277] 2025-09-25T16:18:28.708324Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:249: StateWork, processing event TEvNodeBroker::TEvListNodes 2025-09-25T16:18:28.708330Z node 1 :NODE_BROKER TRACE: node_broker.cpp:423: Send TEvNodesInfo for epoch #5.6 1970-01-01T04:00:00.026000Z - 1970-01-01T05:00:00.026000Z - 1970-01-01T06:00:00.026000Z 2025-09-25T16:18:28.708417Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:245: StateWork, received event# 269877761, Sender [1:736:2284], Recipient [1:685:2247]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-09-25T16:18:28.708433Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:245: StateWork, received event# 272039936, Sender [1:629:2218], Recipient [1:685:2247]: NKikimr::NNodeBroker::TEvNodeBroker::TEvListNodes { } 2025-09-25T16:18:28.708436Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:249: StateWork, processing event TEvNodeBroker::TEvListNodes 2025-09-25T16:18:28.708440Z node 1 :NODE_BROKER TRACE: node_broker.cpp:423: Send TEvNodesInfo for epoch #5.6 1970-01-01T04:00:00.026000Z - 1970-01-01T05:00:00.026000Z - 1970-01-01T06:00:00.026000Z 2025-09-25T16:18:28.708491Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:245: StateWork, received event# 269877761, Sender [1:738:2286], Recipient [1:685:2247]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-09-25T16:18:28.708503Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:245: StateWork, received event# 272039936, Sender [1:629:2218], Recipient [1:685:2247]: NKikimr::NNodeBroker::TEvNodeBroker::TEvListNodes { } 2025-09-25T16:18:28.708509Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:249: StateWork, processing event TEvNodeBroker::TEvListNodes 2025-09-25T16:18:28.708515Z node 1 :NODE_BROKER TRACE: node_broker.cpp:423: Send TEvNodesInfo for epoch #5.6 1970-01-01T04:00:00.026000Z - 1970-01-01T05:00:00.026000Z - 1970-01-01T06:00:00.026000Z 2025-09-25T16:18:28.708561Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:245: StateWork, received event# 269877761, Sender [1:740:2288], Recipient [1:685:2247]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-09-25T16:18:28.708572Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:245: StateWork, received event# 272039936, Sender [1:629:2218], Recipient [1:685:2247]: NKikimr::NNodeBroker::TEvNodeBroker::TEvListNodes { } 2025-09-25T16:18:28.708575Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:249: StateWork, processing event TEvNodeBroker::TEvListNodes 2025-09-25T16:18:28.708578Z node 1 :NODE_BROKER TRACE: node_broker.cpp:423: Send TEvNodesInfo for epoch #5.6 1970-01-01T04:00:00.026000Z - 1970-01-01T05:00:00.026000Z - 1970-01-01T06:00:00.026000Z 2025-09-25T16:18:28.708615Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:245: StateWork, received event# 269877761, Sender [1:742:2290], Recipient [1:685:2247]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-09-25T16:18:28.708627Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:245: StateWork, received event# 272039936, Sender [1:629:2218], Recipient [1:685:2247]: NKikimr::NNodeBroker::TEvNodeBroker::TEvListNodes { CachedVersion: 6 } 2025-09-25T16:18:28.708630Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:249: StateWork, processing event TEvNodeBroker::TEvListNodes 2025-09-25T16:18:28.708633Z node 1 :NODE_BROKER TRACE: node_broker.cpp:423: Send TEvNodesInfo for epoch #5.6 1970-01-01T04:00:00.026000Z - 1970-01-01T05:00:00.026000Z - 1970-01-01T06:00:00.026000Z 2025-09-25T16:18:28.708673Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:245: StateWork, received event# 269877761, Sender [1:744:2292], Recipient [1:685:2247]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-09-25T16:18:28.708688Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:245: StateWork, received event# 272039950, Sender [1:629:2218], Recipient [1:685:2247]: NKikimr::NNodeBroker::TEvNodeBroker::TEvSubscribeNodesRequest { CachedVersion: 6 SeqNo: 2 } 2025-09-25T16:18:28.708692Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:257: StateWork, processing event TEvNodeBroker::TEvSubscribeNodesRequest 2025-09-25T16:18:28.708698Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:747: New subscriber [1:629:2218], seqNo: 2, version: 6, server pipe id: [1:744:2292] 2025-09-25T16:18:28.708704Z node 1 :NODE_BROKER TRACE: node_broker.cpp:730: Send TEvUpdateNodes v6 -> v6 to [1:629:2218] 2025-09-25T16:18:28.708743Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:245: StateWork, received event# 269877764, Sender [1:744:2292], Recipient [1:685:2247]: NKikimr::TEvTabletPipe::TEvServerDisconnected 2025-09-25T16:18:28.708748Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:768: Unsubscribed [1:629:2218], seqNo: 2, server pipe id: [1:744:2292] 2025-09-25T16:18:28.708769Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:245: StateWork, received event# 269877761, Sender [1:746:2294], Recipient [1:685:2247]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-09-25T16:18:28.708790Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:245: StateWork, received event# 272039937, Sender [1:629:2218], Recipient [1:685:2247]: NKikimr::NNodeBroker::TEvNodeBroker::TEvResolveNode { NodeId: 1024 } 2025-09-25T16:18:28.708794Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:250: StateWork, processing event TEvNodeBroker::TEvResolveNode 2025-09-25T16:18:28.708814Z node 1 :NODE_BROKER TRACE: node_broker.cpp:1485: Send TEvResolvedNode: NKikimr::NNodeBroker::TEvNodeBroker::TEvResolvedNode { Status { Code: WRONG_REQUEST Reason: "Unknown node" } } >> TNodeBrokerTest::NodesMigrationRemovedChanged [GOOD] >> TDynamicNameserverTest::CacheMissSameDeadline-EnableNodeBrokerDeltaProtocol-false [GOOD] |81.4%| [TA] $(B)/ydb/core/tx/datashard/ut_minstep/test-results/unittest/{meta.json ... results_accumulator.log} >> TNodeBrokerTest::ShiftIdRangeRemoveActive >> TDynamicNameserverTest::ListNodesCacheWhenNoChanges-EnableNodeBrokerDeltaProtocol-false >> YdbYqlClient::CreateTableWithPartitionAtKeysAndAutoPartitioning [GOOD] >> YdbTableBulkUpsert::Nulls [GOOD] >> YdbTableBulkUpsert::NotNulls >> TNodeBrokerTest::NodesMigrationExpireRemoved [GOOD] >> test.py::test[blocks-div_uint64_opt2--Results] [GOOD] >> TDynamicNameserverTest::ListNodesCacheWhenNoChanges-EnableNodeBrokerDeltaProtocol-false [GOOD] >> TDynamicNameserverTest::CacheMissSimpleDeadline-EnableNodeBrokerDeltaProtocol-true >> YdbOlapStore::LogNonExistingRequest [GOOD] >> YdbOlapStore::LogNonExistingUserId >> YdbTableBulkUpsert::AsyncIndexShouldSucceed [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/mind/ut/unittest >> TDynamicNameserverTest::CacheMissSameDeadline-EnableNodeBrokerDeltaProtocol-false [GOOD] Test command err: 2025-09-25T16:18:30.009454Z node 1 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:636: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 1 Deadline: 18446744073709.551615s } 2025-09-25T16:18:30.014985Z node 1 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:607: Handle NActors::TEvInterconnect::TEvListNodes 2025-09-25T16:18:30.015213Z node 1 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:607: Handle NActors::TEvInterconnect::TEvListNodes 2025-09-25T16:18:30.015358Z node 1 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:607: Handle NActors::TEvInterconnect::TEvListNodes 2025-09-25T16:18:30.034608Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7729: Cannot subscribe to console configs 2025-09-25T16:18:30.034638Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded ... waiting for nameservers are connected 2025-09-25T16:18:30.039810Z node 1 :NODE_BROKER DEBUG: node_broker_impl.h:239: StateInit event type: 10060000 event: NKikimr::TEvTablet::TEvBoot 2025-09-25T16:18:30.040189Z node 1 :NODE_BROKER DEBUG: node_broker_impl.h:239: StateInit event type: 10060001 event: NKikimr::TEvTablet::TEvRestored 2025-09-25T16:18:30.040274Z node 1 :NODE_BROKER DEBUG: node_broker__init_scheme.cpp:20: TTxInitScheme Execute 2025-09-25T16:18:30.040454Z node 1 :NODE_BROKER DEBUG: node_broker_impl.h:239: StateInit event type: 1006000c event: NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-09-25T16:18:30.040967Z node 1 :NODE_BROKER DEBUG: node_broker__init_scheme.cpp:29: TTxInitScheme Complete 2025-09-25T16:18:30.040995Z node 1 :NODE_BROKER DEBUG: node_broker__load_state.cpp:19: TTxLoadState Execute 2025-09-25T16:18:30.041059Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:968: [DB] Using default config. 2025-09-25T16:18:30.041076Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:1005: [DB] Starting the first epoch: #1.1 1970-01-01T00:00:00.023000Z - 1970-01-01T01:00:00.023000Z - 1970-01-01T02:00:00.023000Z 2025-09-25T16:18:30.041082Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:1031: [DB] Loaded the first approximate epoch start: #1.1 2025-09-25T16:18:30.041100Z node 1 :NODE_BROKER DEBUG: node_broker__load_state.cpp:27: TTxLoadState Complete 2025-09-25T16:18:30.041114Z node 1 :NODE_BROKER DEBUG: node_broker__migrate_state.cpp:84: TTxMigrateState Execute 2025-09-25T16:18:30.041120Z node 1 :NODE_BROKER DEBUG: node_broker__migrate_state.cpp:52: TTxMigrateState ProcessMigrationBatch UpdateNodes left 0, NewVersionUpdateNodes left 0 2025-09-25T16:18:30.041125Z node 1 :NODE_BROKER DEBUG: node_broker__migrate_state.cpp:21: TTxMigrateState FinalizeMigration 2025-09-25T16:18:30.041132Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:1318: [DB] Update epoch in database: #1.1 1970-01-01T00:00:00.023000Z - 1970-01-01T01:00:00.023000Z - 1970-01-01T02:00:00.023000Z 2025-09-25T16:18:30.041150Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:1337: [DB] Update approx epoch start in database: #1.1 2025-09-25T16:18:30.041157Z node 1 :NODE_BROKER NOTICE: node_broker.cpp:1350: [DB] Update main nodes table to: Nodes 2025-09-25T16:18:30.083617Z node 1 :NODE_BROKER DEBUG: node_broker__migrate_state.cpp:95: TTxMigrateState Complete 2025-09-25T16:18:30.083667Z node 1 :NODE_BROKER TRACE: node_broker.cpp:456: Scheduled epoch update at 1970-01-01T01:00:00.023000Z 2025-09-25T16:18:30.083680Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:562: Preparing nodes list cache for epoch #1.1 1970-01-01T00:00:00.023000Z - 1970-01-01T01:00:00.023000Z - 1970-01-01T02:00:00.023000Z, approximate epoch start #1.1 nodes=0 expired=0 2025-09-25T16:18:30.083691Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:603: Preparing update nodes log for epoch ##1.1 1970-01-01T00:00:00.023000Z - 1970-01-01T01:00:00.023000Z - 1970-01-01T02:00:00.023000Z nodes=0 expired=0 removed=0 2025-09-25T16:18:30.083809Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:245: StateWork, received event# 269877761, Sender [1:195:2195], Recipient [1:172:2179]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-09-25T16:18:30.083838Z node 1 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:717: Handle NKikimr::TEvTabletPipe::TEvClientConnected { TabletId: 72057594037936129 Status: OK ServerId: [1:195:2195] Leader: 1 Dead: 0 Generation: 2 VersionInfo:  } ... waiting for nameservers are connected (done) 2025-09-25T16:18:30.084457Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:245: StateWork, received event# 272039950, Sender [1:18:2065], Recipient [1:172:2179]: NKikimr::NNodeBroker::TEvNodeBroker::TEvSubscribeNodesRequest { CachedVersion: 0 SeqNo: 0 } 2025-09-25T16:18:30.084482Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:257: StateWork, processing event TEvNodeBroker::TEvSubscribeNodesRequest 2025-09-25T16:18:30.084491Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:747: New subscriber [1:18:2065], seqNo: 0, version: 0, server pipe id: [1:195:2195] 2025-09-25T16:18:30.084502Z node 1 :NODE_BROKER TRACE: node_broker.cpp:730: Send TEvUpdateNodes v0 -> v1 to [1:18:2065] ... blocking NKikimr::NNodeBroker::TEvNodeBroker::TEvUpdateNodes from NODE_BROKER_ACTOR to NAMESERVICE cookie 0 2025-09-25T16:18:30.084580Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:245: StateWork, received event# 269877761, Sender [1:204:2202], Recipient [1:172:2179]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-09-25T16:18:30.084612Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:245: StateWork, received event# 272039938, Sender [1:202:2200], Recipient [1:172:2179]: NKikimr::NNodeBroker::TEvNodeBroker::TEvRegistrationRequest { Host: "host1" Port: 1001 ResolveHost: "host1.host1.host1" Address: "1.2.3.4" Location { DataCenter: "1" Module: "2" Rack: "3" Unit: "4" } FixedNodeId: false Path: "dc-1" } 2025-09-25T16:18:30.084619Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:251: StateWork, processing event TEvNodeBroker::TEvRegistrationRequest 2025-09-25T16:18:30.084629Z node 1 :NODE_BROKER TRACE: node_broker.cpp:1494: Handle TEvNodeBroker::TEvRegistrationRequest: request# Host: "host1" Port: 1001 ResolveHost: "host1.host1.host1" Address: "1.2.3.4" Location { DataCenter: "1" Module: "2" Rack: "3" Unit: "4" } FixedNodeId: false Path: "dc-1" 2025-09-25T16:18:30.084699Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2806: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [1:16:2063], request# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1 TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: false ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-09-25T16:18:30.120965Z node 1 :TX_PROXY_SCHEME_CACHE TRACE: cache.cpp:2380: Create subscriber: self# [1:16:2063], path# /dc-1, domainOwnerId# 72057594046678944 2025-09-25T16:18:30.127876Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2647: HandleNotify: self# [1:16:2063], notify# NKikimr::TSchemeBoardEvents::TEvNotifyUpdate { Path: /dc-1 PathId: [OwnerId: 72057594046678944, LocalPathId: 1] DescribeSchemeResult: Status: StatusSuccess Path: "/dc-1" PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 0 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { Audience: "/dc-1" } SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 1 PathOwnerId: 72057594046678944 } 2025-09-25T16:18:30.127965Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2522: ResolveCacheItem: self# [1:16:2063], notify# NKikimr::TSchemeBoardEvents::TEvNotifyUpdate { Path: /dc-1 PathId: [OwnerId: 72057594046678944, LocalPathId: 1] DescribeSchemeResult: Status: StatusSuccess Path: "/dc-1" PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 0 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { Audience: "/dc-1" } SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 1 PathOwnerId: 72057594046678944 }, by path# { Subscriber: { Subscriber: [1:206:2203] DomainOwnerId: 72057594046678944 Type: 2 SyncCookie: 0 } Filled: 0 Status: undefined Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, by pathId# nullptr 2025-09-25T16:18:30.128016Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:1888: FillEntry for TNavigate: self# [1:16:2063], cacheItem# { Subscriber: { Subscriber: [1:206:2203] DomainOwnerId: 72057594046678944 Type: 2 SyncCookie: 0 } Filled: 1 Status: StatusSuccess Kind: 2 TableKind: 0 Created: 1 CreateStep: 0 PathId: [OwnerId: 72057594046678944, LocalPathId: 1] DomainId: [OwnerId: 72057594046678944, LocalPathId: 1] IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1 TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: false ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2025-09-25T16:18:30.128103Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:267: Send result: self# [1:213:2204], recipient# [1:205:2179], result# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1 TableId: [72057594046678944:1:0] RequestType: ByPath Operation: OpPath RedirectRequired: false ShowPrivatePath: false SyncVersion: false Status: Ok Kind: KindPath DomainInfo { DomainKey: [OwnerId: 72057594046678944, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 7205759404 ... ePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1 TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: false ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2025-09-25T16:18:30.325862Z node 2 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:267: Send result: self# [2:213:2204], recipient# [2:205:2179], result# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1 TableId: [72057594046678944:1:0] RequestType: ByPath Operation: OpPath RedirectRequired: false ShowPrivatePath: false SyncVersion: false Status: Ok Kind: KindPath DomainInfo { DomainKey: [OwnerId: 72057594046678944, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046678944, LocalPathId: 1] Params { Version: 0 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } }] } 2025-09-25T16:18:30.325882Z node 2 :NODE_BROKER TRACE: node_broker.cpp:1561: Handle TEvTxProxySchemeCache::TEvNavigateKeySetResult: response# { Path: dc-1 TableId: [72057594046678944:1:0] RequestType: ByPath Operation: OpPath RedirectRequired: false ShowPrivatePath: false SyncVersion: false Status: Ok Kind: KindPath DomainInfo { DomainKey: [OwnerId: 72057594046678944, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046678944, LocalPathId: 1] Params { Version: 0 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } } 2025-09-25T16:18:30.325899Z node 2 :NODE_BROKER TRACE: node_broker.cpp:1587: Finished resolving tenant: request# Host: "host1" Port: 1001 ResolveHost: "host1.host1.host1" Address: "1.2.3.4" Location { DataCenter: "1" Module: "2" Rack: "3" Unit: "4" } FixedNodeId: false Path: "dc-1": scope id# <72057594046678944:1>: serviced subdomain# 72057594046678944:1 2025-09-25T16:18:30.325916Z node 2 :NODE_BROKER TRACE: node_broker_impl.h:245: StateWork, received event# 2146435073, Sender [2:205:2179], Recipient [2:172:2179]: NKikimr::NNodeBroker::TNodeBroker::TEvPrivate::TEvResolvedRegistrationRequest 2025-09-25T16:18:30.325921Z node 2 :NODE_BROKER TRACE: node_broker_impl.h:260: StateWork, processing event TEvPrivate::TEvResolvedRegistrationRequest 2025-09-25T16:18:30.325940Z node 2 :NODE_BROKER DEBUG: node_broker__register_node.cpp:78: TTxRegisterNode Execute 2025-09-25T16:18:30.325945Z node 2 :NODE_BROKER DEBUG: node_broker__register_node.cpp:82: Registration request from host1:1001 (not fixed) tenant: dc-1 2025-09-25T16:18:30.325978Z node 2 :NODE_BROKER DEBUG: node_broker.cpp:863: [DB] Adding node #1024.v2 host1:1001 to database state=Active resolvehost=host1.host1.host1 address=1.2.3.4 dc=1 location=DC=1/M=2/R=3/U=4/ lease=1 expire=Thu, 01 Jan 1970 02:00:00 UTC servicedsubdomain=72057594046678944:1 slotindex=0 authorizedbycertificate=false 2025-09-25T16:18:30.326031Z node 2 :NODE_BROKER DEBUG: node_broker.cpp:264: [Dirty] Register new active node #1024.v2 host1:1001 2025-09-25T16:18:30.326039Z node 2 :NODE_BROKER DEBUG: node_broker.cpp:552: [Dirty] Update current epoch version from 1 to 2 2025-09-25T16:18:30.326043Z node 2 :NODE_BROKER DEBUG: node_broker.cpp:1363: [DB] Update epoch version in database version=2 2025-09-25T16:18:30.337374Z node 2 :NODE_BROKER DEBUG: node_broker__register_node.cpp:197: TTxRegisterNode Complete 2025-09-25T16:18:30.337420Z node 2 :NODE_BROKER DEBUG: node_broker.cpp:264: [Committed] Register new active node #1024.v2 host1:1001 2025-09-25T16:18:30.337437Z node 2 :NODE_BROKER DEBUG: node_broker.cpp:552: [Committed] Update current epoch version from 1 to 2 2025-09-25T16:18:30.337448Z node 2 :NODE_BROKER DEBUG: node_broker.cpp:630: Add node #1024.v2 host1:1001 to epoch cache 2025-09-25T16:18:30.337484Z node 2 :NODE_BROKER DEBUG: node_broker.cpp:659: Add node #1024.v2 to update nodes log 2025-09-25T16:18:30.337538Z node 2 :NODE_BROKER TRACE: node_broker__register_node.cpp:59: TTxRegisterNode reply with: Status { Code: OK } Node { NodeId: 1024 Host: "host1" Port: 1001 ResolveHost: "host1.host1.host1" Address: "1.2.3.4" Location { DataCenter: "1" Module: "2" Rack: "3" Unit: "4" } Expire: 7200023000 Name: "slot-0" } 2025-09-25T16:18:30.337695Z node 2 :NODE_BROKER TRACE: node_broker_impl.h:245: StateWork, received event# 269877761, Sender [2:217:2208], Recipient [2:172:2179]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-09-25T16:18:30.337737Z node 2 :NODE_BROKER TRACE: node_broker_impl.h:245: StateWork, received event# 272039938, Sender [2:202:2200], Recipient [2:172:2179]: NKikimr::NNodeBroker::TEvNodeBroker::TEvRegistrationRequest { Host: "host2" Port: 1001 ResolveHost: "host2.host2.host2" Address: "1.2.3.5" Location { DataCenter: "1" Module: "2" Rack: "3" Unit: "5" } FixedNodeId: false Path: "dc-1" } 2025-09-25T16:18:30.337745Z node 2 :NODE_BROKER TRACE: node_broker_impl.h:251: StateWork, processing event TEvNodeBroker::TEvRegistrationRequest 2025-09-25T16:18:30.337759Z node 2 :NODE_BROKER TRACE: node_broker.cpp:1494: Handle TEvNodeBroker::TEvRegistrationRequest: request# Host: "host2" Port: 1001 ResolveHost: "host2.host2.host2" Address: "1.2.3.5" Location { DataCenter: "1" Module: "2" Rack: "3" Unit: "5" } FixedNodeId: false Path: "dc-1" 2025-09-25T16:18:30.337821Z node 2 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2806: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [2:16:2063], request# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1 TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: false ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-09-25T16:18:30.337851Z node 2 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:1888: FillEntry for TNavigate: self# [2:16:2063], cacheItem# { Subscriber: { Subscriber: [2:206:2203] DomainOwnerId: 72057594046678944 Type: 2 SyncCookie: 0 } Filled: 1 Status: StatusSuccess Kind: 2 TableKind: 0 Created: 1 CreateStep: 0 PathId: [OwnerId: 72057594046678944, LocalPathId: 1] DomainId: [OwnerId: 72057594046678944, LocalPathId: 1] IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1 TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: false ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2025-09-25T16:18:30.337911Z node 2 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:267: Send result: self# [2:219:2209], recipient# [2:218:2179], result# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1 TableId: [72057594046678944:1:0] RequestType: ByPath Operation: OpPath RedirectRequired: false ShowPrivatePath: false SyncVersion: false Status: Ok Kind: KindPath DomainInfo { DomainKey: [OwnerId: 72057594046678944, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046678944, LocalPathId: 1] Params { Version: 0 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } }] } 2025-09-25T16:18:30.337929Z node 2 :NODE_BROKER TRACE: node_broker.cpp:1561: Handle TEvTxProxySchemeCache::TEvNavigateKeySetResult: response# { Path: dc-1 TableId: [72057594046678944:1:0] RequestType: ByPath Operation: OpPath RedirectRequired: false ShowPrivatePath: false SyncVersion: false Status: Ok Kind: KindPath DomainInfo { DomainKey: [OwnerId: 72057594046678944, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046678944, LocalPathId: 1] Params { Version: 0 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } } 2025-09-25T16:18:30.337947Z node 2 :NODE_BROKER TRACE: node_broker.cpp:1587: Finished resolving tenant: request# Host: "host2" Port: 1001 ResolveHost: "host2.host2.host2" Address: "1.2.3.5" Location { DataCenter: "1" Module: "2" Rack: "3" Unit: "5" } FixedNodeId: false Path: "dc-1": scope id# <72057594046678944:1>: serviced subdomain# 72057594046678944:1 2025-09-25T16:18:30.337963Z node 2 :NODE_BROKER TRACE: node_broker_impl.h:245: StateWork, received event# 2146435073, Sender [2:218:2179], Recipient [2:172:2179]: NKikimr::NNodeBroker::TNodeBroker::TEvPrivate::TEvResolvedRegistrationRequest 2025-09-25T16:18:30.337969Z node 2 :NODE_BROKER TRACE: node_broker_impl.h:260: StateWork, processing event TEvPrivate::TEvResolvedRegistrationRequest 2025-09-25T16:18:30.337986Z node 2 :NODE_BROKER DEBUG: node_broker__register_node.cpp:78: TTxRegisterNode Execute 2025-09-25T16:18:30.337991Z node 2 :NODE_BROKER DEBUG: node_broker__register_node.cpp:82: Registration request from host2:1001 (not fixed) tenant: dc-1 2025-09-25T16:18:30.338020Z node 2 :NODE_BROKER DEBUG: node_broker.cpp:863: [DB] Adding node #1025.v3 host2:1001 to database state=Active resolvehost=host2.host2.host2 address=1.2.3.5 dc=1 location=DC=1/M=2/R=3/U=5/ lease=1 expire=Thu, 01 Jan 1970 02:00:00 UTC servicedsubdomain=72057594046678944:1 slotindex=1 authorizedbycertificate=false 2025-09-25T16:18:30.338079Z node 2 :NODE_BROKER DEBUG: node_broker.cpp:264: [Dirty] Register new active node #1025.v3 host2:1001 2025-09-25T16:18:30.338087Z node 2 :NODE_BROKER DEBUG: node_broker.cpp:552: [Dirty] Update current epoch version from 2 to 3 2025-09-25T16:18:30.338092Z node 2 :NODE_BROKER DEBUG: node_broker.cpp:1363: [DB] Update epoch version in database version=3 2025-09-25T16:18:30.349043Z node 2 :NODE_BROKER DEBUG: node_broker__register_node.cpp:197: TTxRegisterNode Complete 2025-09-25T16:18:30.349073Z node 2 :NODE_BROKER DEBUG: node_broker.cpp:264: [Committed] Register new active node #1025.v3 host2:1001 2025-09-25T16:18:30.349088Z node 2 :NODE_BROKER DEBUG: node_broker.cpp:552: [Committed] Update current epoch version from 2 to 3 2025-09-25T16:18:30.349094Z node 2 :NODE_BROKER DEBUG: node_broker.cpp:630: Add node #1025.v3 host2:1001 to epoch cache 2025-09-25T16:18:30.349127Z node 2 :NODE_BROKER DEBUG: node_broker.cpp:659: Add node #1025.v3 to update nodes log 2025-09-25T16:18:30.349192Z node 2 :NODE_BROKER TRACE: node_broker__register_node.cpp:59: TTxRegisterNode reply with: Status { Code: OK } Node { NodeId: 1025 Host: "host2" Port: 1001 ResolveHost: "host2.host2.host2" Address: "1.2.3.5" Location { DataCenter: "1" Module: "2" Rack: "3" Unit: "5" } Expire: 7200023000 Name: "slot-1" } ... waiting for cache miss 2025-09-25T16:18:30.349278Z node 2 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:585: Handle NActors::TEvInterconnect::TEvResolveNode { NodeId: 1024 Deadline: 1.029024s } 2025-09-25T16:18:30.349300Z node 2 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:686: New cache miss: nodeId# 1024, deadline# 1.029024s 2025-09-25T16:18:30.349305Z node 2 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:690: Schedule wakeup for new earliest deadline 1.029024s 2025-09-25T16:18:30.349315Z node 2 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:585: Handle NActors::TEvInterconnect::TEvResolveNode { NodeId: 1025 Deadline: 1.029024s } 2025-09-25T16:18:30.349321Z node 2 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:686: New cache miss: nodeId# 1025, deadline# 1.029024s ... blocking NKikimr::NNodeBroker::TEvNodeBroker::TEvResolveNode from NAMESERVICE to NODE_BROKER_ACTOR cookie 0 ... blocking NKikimr::NNodeBroker::TEvNodeBroker::TEvResolveNode from NAMESERVICE to NODE_BROKER_ACTOR cookie 0 ... waiting for cache miss (done) 2025-09-25T16:18:30.422734Z node 2 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:935: HandleWakeup at 1.030024s 2025-09-25T16:18:30.422769Z node 2 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:137: Cache miss failed: nodeId=1024, error=Deadline exceeded 2025-09-25T16:18:30.422780Z node 2 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:137: Cache miss failed: nodeId=1025, error=Deadline exceeded ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/mind/ut/unittest >> TNodeBrokerTest::NodesMigrationRemovedChanged [GOOD] Test command err: 2025-09-25T16:18:26.927176Z node 8 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:636: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 8 Deadline: 18446744073709.551615s } 2025-09-25T16:18:26.929599Z node 5 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:636: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 5 Deadline: 18446744073709.551615s } 2025-09-25T16:18:26.929672Z node 6 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:636: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 6 Deadline: 18446744073709.551615s } 2025-09-25T16:18:26.929707Z node 7 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:636: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 7 Deadline: 18446744073709.551615s } 2025-09-25T16:18:26.929757Z node 8 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:607: Handle NActors::TEvInterconnect::TEvListNodes 2025-09-25T16:18:26.933865Z node 2 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:636: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 2 Deadline: 18446744073709.551615s } 2025-09-25T16:18:26.933955Z node 3 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:636: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 3 Deadline: 18446744073709.551615s } 2025-09-25T16:18:26.934031Z node 4 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:636: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 4 Deadline: 18446744073709.551615s } 2025-09-25T16:18:26.934101Z node 6 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:607: Handle NActors::TEvInterconnect::TEvListNodes 2025-09-25T16:18:26.934233Z node 7 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:607: Handle NActors::TEvInterconnect::TEvListNodes 2025-09-25T16:18:26.934323Z node 8 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:607: Handle NActors::TEvInterconnect::TEvListNodes 2025-09-25T16:18:26.934366Z node 1 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:636: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 1 Deadline: 18446744073709.551615s } 2025-09-25T16:18:26.939348Z node 2 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:607: Handle NActors::TEvInterconnect::TEvListNodes 2025-09-25T16:18:26.939497Z node 3 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:607: Handle NActors::TEvInterconnect::TEvListNodes 2025-09-25T16:18:26.939632Z node 4 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:607: Handle NActors::TEvInterconnect::TEvListNodes 2025-09-25T16:18:26.939697Z node 5 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:607: Handle NActors::TEvInterconnect::TEvListNodes 2025-09-25T16:18:26.939765Z node 4 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:607: Handle NActors::TEvInterconnect::TEvListNodes 2025-09-25T16:18:26.939824Z node 5 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:607: Handle NActors::TEvInterconnect::TEvListNodes 2025-09-25T16:18:26.939847Z node 6 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:607: Handle NActors::TEvInterconnect::TEvListNodes 2025-09-25T16:18:26.939884Z node 7 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:607: Handle NActors::TEvInterconnect::TEvListNodes 2025-09-25T16:18:26.939979Z node 1 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:607: Handle NActors::TEvInterconnect::TEvListNodes 2025-09-25T16:18:26.940081Z node 2 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:607: Handle NActors::TEvInterconnect::TEvListNodes 2025-09-25T16:18:26.940111Z node 3 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:607: Handle NActors::TEvInterconnect::TEvListNodes 2025-09-25T16:18:26.940206Z node 8 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:607: Handle NActors::TEvInterconnect::TEvListNodes 2025-09-25T16:18:26.940237Z node 1 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:607: Handle NActors::TEvInterconnect::TEvListNodes 2025-09-25T16:18:26.940612Z node 2 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:607: Handle NActors::TEvInterconnect::TEvListNodes 2025-09-25T16:18:26.940638Z node 3 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:607: Handle NActors::TEvInterconnect::TEvListNodes 2025-09-25T16:18:26.940665Z node 4 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:607: Handle NActors::TEvInterconnect::TEvListNodes 2025-09-25T16:18:26.940685Z node 5 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:607: Handle NActors::TEvInterconnect::TEvListNodes 2025-09-25T16:18:26.940708Z node 6 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:607: Handle NActors::TEvInterconnect::TEvListNodes 2025-09-25T16:18:26.940730Z node 7 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:607: Handle NActors::TEvInterconnect::TEvListNodes 2025-09-25T16:18:26.940767Z node 8 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:636: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 1 Deadline: 18446744073709.551615s } 2025-09-25T16:18:26.940780Z node 3 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:636: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 1 Deadline: 18446744073709.551615s } 2025-09-25T16:18:26.940841Z node 4 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:636: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 1 Deadline: 18446744073709.551615s } 2025-09-25T16:18:26.940871Z node 5 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:636: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 1 Deadline: 18446744073709.551615s } 2025-09-25T16:18:26.940907Z node 6 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:636: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 1 Deadline: 18446744073709.551615s } 2025-09-25T16:18:26.940934Z node 7 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:636: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 1 Deadline: 18446744073709.551615s } 2025-09-25T16:18:26.941025Z node 1 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:607: Handle NActors::TEvInterconnect::TEvListNodes 2025-09-25T16:18:26.941091Z node 2 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:636: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 1 Deadline: 18446744073709.551615s } 2025-09-25T16:18:26.941242Z node 1 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:636: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 7 Deadline: 18446744073709.551615s } 2025-09-25T16:18:26.941585Z node 8 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:636: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 3 Deadline: 18446744073709.551615s } 2025-09-25T16:18:26.942104Z node 7 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:636: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 3 Deadline: 18446744073709.551615s } 2025-09-25T16:18:26.945245Z node 5 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:636: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 3 Deadline: 18446744073709.551615s } 2025-09-25T16:18:26.951226Z node 3 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:636: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 8 Deadline: 18446744073709.551615s } 2025-09-25T16:18:26.952241Z node 1 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:636: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 8 Deadline: 18446744073709.551615s } 2025-09-25T16:18:26.952279Z node 3 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:636: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 5 Deadline: 18446744073709.551615s } 2025-09-25T16:18:26.954139Z node 3 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:636: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 7 Deadline: 18446744073709.551615s } 2025-09-25T16:18:26.954258Z node 1 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:636: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 4 Deadline: 18446744073709.551615s } 2025-09-25T16:18:26.954661Z node 1 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:636: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 2 Deadline: 18446744073709.551615s } 2025-09-25T16:18:26.954717Z node 1 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:636: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 5 Deadline: 18446744073709.551615s } 2025-09-25T16:18:26.954804Z node 1 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:636: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 6 Deadline: 18446744073709.551615s } 2025-09-25T16:18:26.954870Z node 1 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:636: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 3 Deadline: 18446744073709.551615s } 2025-09-25T16:18:26.955717Z node 7 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:636: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 5 Deadline: 18446744073709.551615s } 2025-09-25T16:18:26.956122Z node 5 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:636: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 7 Deadline: 18446744073709.551615s } 2025-09-25T16:18:26.977138Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7729: Cannot subscribe to console configs 2025-09-25T16:18:26.977169Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded ... waiting for nameservers are connected 2025-09-25T16:18:26.982487Z node 1 :NODE_BROKER DEBUG: node_broker_impl.h:239: StateInit event type: 10060000 event: NKikimr::TEvTablet::TEvBoot 2025-09-25T16:18:26.982981Z node 1 :NODE_BROKER DEBUG: node_broker_impl.h:239: StateInit event type: 10060001 event: NKikimr::TEvTablet::TEvRestored 2025-09-25T16:18:26.983067Z node 1 :NODE_BROKER DEBUG: node_broker__init_scheme.cpp:20: TTxInitScheme Execute 2025-09-25T16:18:26.983289Z node 1 :NODE_BROKER DEBUG: node_broker_impl.h:239: StateInit event type: 1006000c event: NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-09-25T16:18:26.984130Z node 1 :NODE_BROKER DEBUG: node_broker__init_scheme.cpp:29: TTxInitScheme Complete 2025-09-25T16:18:26.984167Z node 1 :NODE_BROKER DEBUG: node_broker__load_state.cpp:19: TTxLoadState Execute 2025-09-25T16:18:26.984228Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:968: [DB] Using default config. 2025-09-25T16:18:26.984245Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:1005: [DB] Starting the first epoch: #1.1 1970-01-01T00:00:00.025000Z - 1970-01-01T01:00:00.025000Z - 1970-01-01T02:00:00.025000Z 2025-09-25T16:18:26.984251Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:1031: [DB] Loaded the first approximate epoch start: #1.1 2025-09-25T16:18:26.984269Z node 1 :NODE_BROKER DEBUG: node_broker__load_state.cpp:27: TTxLoadState Complete 2025-09-25T16:18:26.984298Z node 1 :NODE_BROKER DEBUG: node_broker__migrate_state.cpp:84: TTxMigrateState Execute 2025-09-25T16:18:26.984305Z node 1 :NODE_BROKER DEBUG: node_broker__migrate_state.cpp:52: TTxMigrateState ProcessMigrationBatch UpdateNodes left 0, NewVersionUpdateNodes left 0 2025-09-25T16:18:26.984310Z node 1 :NODE_BROKER DEBUG: node_broker__migrate_state.cpp:21: TTxMigrateState FinalizeMigration 2025-09-25T16:18:26.984316Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:1318: [DB] Update epoch in database: #1.1 1970-01-01T00:00:00.025000Z - 1970-01-01T01:00:00.025000Z - 1970-01-01T02:00:00.025000Z 2025-09-25T16:18:26.984335Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:1337: [DB] Update approx epoch start in database: #1.1 2025-09-25T16:18:26.984342Z node 1 :NODE_BROKER NOTICE: node_broker.cpp:1350: [DB] Update main nodes table to: Nodes 2025-09-25T16:18:27.017615Z node 1 :NODE_BROKER DEBUG: node_broker__migrate_state.cpp:95: TTxMigrateState Complete 2025-09-25T16:18:27.017682Z node 1 :NODE_BROKER TRACE: node_broker.cpp:456: Scheduled epoch update at 1970-01-01T01:00:00.025000Z 2025-09-25T16:18:27.017697Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:562: Preparing nodes list cache for epoch #1.1 1970-01-01T00:00:00.025000Z - 1970-01-01T01:00:00.025000Z - 1970-01-01T02:00:00.025000Z, approximate epoch start #1.1 nodes=0 expired=0 2025-09-25T16:18:27.017710Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:603: Preparing update nodes log for epoch ##1.1 1970-01-01T00:00:00.025000Z - 1970-01-01T01:00:00.025000Z - 1970-01-01T02:00:00.025000Z nodes=0 expired=0 removed=0 2025-09-25T16:18:27.017908Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:245: StateWork, received event# 269877761, Sender [1:583:2204], Recipient [1:552:2186]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-09-25T16:18:27.018018Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:245: StateWork, received event# 269877761, Sender [1:584:2205], Recipient [1:552:2186]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-09-25T16:18:27.018047Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:245: ... 8:28.944795Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:245: StateWork, received event# 269877761, Sender [1:804:2324], Recipient [1:763:2297]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-09-25T16:18:28.944879Z node 2 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:717: Handle NKikimr::TEvTabletPipe::TEvClientConnected { TabletId: 72057594037936129 Status: OK ServerId: [1:808:2328] Leader: 1 Dead: 0 Generation: 3 VersionInfo:  } 2025-09-25T16:18:28.944891Z node 3 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:717: Handle NKikimr::TEvTabletPipe::TEvClientConnected { TabletId: 72057594037936129 Status: OK ServerId: [1:803:2323] Leader: 1 Dead: 0 Generation: 3 VersionInfo:  } 2025-09-25T16:18:28.944900Z node 4 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:717: Handle NKikimr::TEvTabletPipe::TEvClientConnected { TabletId: 72057594037936129 Status: OK ServerId: [1:804:2324] Leader: 1 Dead: 0 Generation: 3 VersionInfo:  } 2025-09-25T16:18:28.944910Z node 5 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:717: Handle NKikimr::TEvTabletPipe::TEvClientConnected { TabletId: 72057594037936129 Status: OK ServerId: [1:805:2325] Leader: 1 Dead: 0 Generation: 3 VersionInfo:  } 2025-09-25T16:18:28.944920Z node 6 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:717: Handle NKikimr::TEvTabletPipe::TEvClientConnected { TabletId: 72057594037936129 Status: OK ServerId: [1:806:2326] Leader: 1 Dead: 0 Generation: 3 VersionInfo:  } 2025-09-25T16:18:28.944942Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:245: StateWork, received event# 269877761, Sender [1:805:2325], Recipient [1:763:2297]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-09-25T16:18:28.944962Z node 7 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:717: Handle NKikimr::TEvTabletPipe::TEvClientConnected { TabletId: 72057594037936129 Status: OK ServerId: [1:807:2327] Leader: 1 Dead: 0 Generation: 3 VersionInfo:  } 2025-09-25T16:18:28.944973Z node 8 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:717: Handle NKikimr::TEvTabletPipe::TEvClientConnected { TabletId: 72057594037936129 Status: OK ServerId: [1:809:2329] Leader: 1 Dead: 0 Generation: 3 VersionInfo:  } 2025-09-25T16:18:28.944983Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:245: StateWork, received event# 269877761, Sender [1:806:2326], Recipient [1:763:2297]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-09-25T16:18:28.945013Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:245: StateWork, received event# 269877761, Sender [1:807:2327], Recipient [1:763:2297]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-09-25T16:18:28.945027Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:245: StateWork, received event# 269877761, Sender [1:808:2328], Recipient [1:763:2297]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-09-25T16:18:28.945055Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:245: StateWork, received event# 269877761, Sender [1:809:2329], Recipient [1:763:2297]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-09-25T16:18:28.945105Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:245: StateWork, received event# 272039936, Sender [1:25:2072], Recipient [1:763:2297]: NKikimr::NNodeBroker::TEvNodeBroker::TEvListNodes { MinEpoch: 5 } 2025-09-25T16:18:28.945112Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:249: StateWork, processing event TEvNodeBroker::TEvListNodes 2025-09-25T16:18:28.945123Z node 1 :NODE_BROKER TRACE: node_broker.cpp:423: Send TEvNodesInfo for epoch #7.9 1970-01-01T06:00:00.025000Z - 1970-01-01T07:00:00.025000Z - 1970-01-01T08:00:00.025000Z 2025-09-25T16:18:28.945150Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:245: StateWork, received event# 272039936, Sender [4:112:2072], Recipient [1:804:2324] 2025-09-25T16:18:28.945153Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:249: StateWork, processing event TEvNodeBroker::TEvListNodes 2025-09-25T16:18:28.945158Z node 1 :NODE_BROKER TRACE: node_broker.cpp:423: Send TEvNodesInfo for epoch #7.9 1970-01-01T06:00:00.025000Z - 1970-01-01T07:00:00.025000Z - 1970-01-01T08:00:00.025000Z 2025-09-25T16:18:28.945166Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:245: StateWork, received event# 272039936, Sender [5:141:2072], Recipient [1:805:2325] 2025-09-25T16:18:28.945169Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:249: StateWork, processing event TEvNodeBroker::TEvListNodes 2025-09-25T16:18:28.945173Z node 1 :NODE_BROKER TRACE: node_broker.cpp:423: Send TEvNodesInfo for epoch #7.9 1970-01-01T06:00:00.025000Z - 1970-01-01T07:00:00.025000Z - 1970-01-01T08:00:00.025000Z 2025-09-25T16:18:28.945188Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:245: StateWork, received event# 272039936, Sender [6:170:2072], Recipient [1:806:2326] 2025-09-25T16:18:28.945190Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:249: StateWork, processing event TEvNodeBroker::TEvListNodes 2025-09-25T16:18:28.945194Z node 1 :NODE_BROKER TRACE: node_broker.cpp:423: Send TEvNodesInfo for epoch #7.9 1970-01-01T06:00:00.025000Z - 1970-01-01T07:00:00.025000Z - 1970-01-01T08:00:00.025000Z 2025-09-25T16:18:28.945207Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:245: StateWork, received event# 272039936, Sender [3:83:2072], Recipient [1:803:2323] 2025-09-25T16:18:28.945209Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:249: StateWork, processing event TEvNodeBroker::TEvListNodes 2025-09-25T16:18:28.945213Z node 1 :NODE_BROKER TRACE: node_broker.cpp:423: Send TEvNodesInfo for epoch #7.9 1970-01-01T06:00:00.025000Z - 1970-01-01T07:00:00.025000Z - 1970-01-01T08:00:00.025000Z 2025-09-25T16:18:28.945220Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:245: StateWork, received event# 272039936, Sender [7:199:2072], Recipient [1:807:2327] 2025-09-25T16:18:28.945222Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:249: StateWork, processing event TEvNodeBroker::TEvListNodes 2025-09-25T16:18:28.945227Z node 1 :NODE_BROKER TRACE: node_broker.cpp:423: Send TEvNodesInfo for epoch #7.9 1970-01-01T06:00:00.025000Z - 1970-01-01T07:00:00.025000Z - 1970-01-01T08:00:00.025000Z 2025-09-25T16:18:28.945251Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:245: StateWork, received event# 272039936, Sender [2:54:2072], Recipient [1:808:2328] 2025-09-25T16:18:28.945256Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:249: StateWork, processing event TEvNodeBroker::TEvListNodes 2025-09-25T16:18:28.945262Z node 1 :NODE_BROKER TRACE: node_broker.cpp:423: Send TEvNodesInfo for epoch #7.9 1970-01-01T06:00:00.025000Z - 1970-01-01T07:00:00.025000Z - 1970-01-01T08:00:00.025000Z 2025-09-25T16:18:28.945281Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:245: StateWork, received event# 272039936, Sender [8:228:2072], Recipient [1:809:2329] 2025-09-25T16:18:28.945286Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:249: StateWork, processing event TEvNodeBroker::TEvListNodes 2025-09-25T16:18:28.945294Z node 1 :NODE_BROKER TRACE: node_broker.cpp:423: Send TEvNodesInfo for epoch #7.9 1970-01-01T06:00:00.025000Z - 1970-01-01T07:00:00.025000Z - 1970-01-01T08:00:00.025000Z 2025-09-25T16:18:28.945397Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:245: StateWork, received event# 269877761, Sender [1:814:2334], Recipient [1:763:2297]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-09-25T16:18:28.945418Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:245: StateWork, received event# 272039936, Sender [1:621:2216], Recipient [1:763:2297]: NKikimr::NNodeBroker::TEvNodeBroker::TEvListNodes { } 2025-09-25T16:18:28.945423Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:249: StateWork, processing event TEvNodeBroker::TEvListNodes 2025-09-25T16:18:28.945429Z node 1 :NODE_BROKER TRACE: node_broker.cpp:423: Send TEvNodesInfo for epoch #7.9 1970-01-01T06:00:00.025000Z - 1970-01-01T07:00:00.025000Z - 1970-01-01T08:00:00.025000Z 2025-09-25T16:18:28.945484Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:245: StateWork, received event# 269877761, Sender [1:816:2336], Recipient [1:763:2297]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-09-25T16:18:28.945496Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:245: StateWork, received event# 272039936, Sender [1:621:2216], Recipient [1:763:2297]: NKikimr::NNodeBroker::TEvNodeBroker::TEvListNodes { } 2025-09-25T16:18:28.945498Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:249: StateWork, processing event TEvNodeBroker::TEvListNodes 2025-09-25T16:18:28.945501Z node 1 :NODE_BROKER TRACE: node_broker.cpp:423: Send TEvNodesInfo for epoch #7.9 1970-01-01T06:00:00.025000Z - 1970-01-01T07:00:00.025000Z - 1970-01-01T08:00:00.025000Z 2025-09-25T16:18:28.945554Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:245: StateWork, received event# 269877761, Sender [1:818:2338], Recipient [1:763:2297]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-09-25T16:18:28.945572Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:245: StateWork, received event# 272039936, Sender [1:621:2216], Recipient [1:763:2297]: NKikimr::NNodeBroker::TEvNodeBroker::TEvListNodes { } 2025-09-25T16:18:28.945576Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:249: StateWork, processing event TEvNodeBroker::TEvListNodes 2025-09-25T16:18:28.945582Z node 1 :NODE_BROKER TRACE: node_broker.cpp:423: Send TEvNodesInfo for epoch #7.9 1970-01-01T06:00:00.025000Z - 1970-01-01T07:00:00.025000Z - 1970-01-01T08:00:00.025000Z 2025-09-25T16:18:28.945634Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:245: StateWork, received event# 269877761, Sender [1:820:2340], Recipient [1:763:2297]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-09-25T16:18:28.945657Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:245: StateWork, received event# 272039936, Sender [1:621:2216], Recipient [1:763:2297]: NKikimr::NNodeBroker::TEvNodeBroker::TEvListNodes { CachedVersion: 9 } 2025-09-25T16:18:28.945662Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:249: StateWork, processing event TEvNodeBroker::TEvListNodes 2025-09-25T16:18:28.945668Z node 1 :NODE_BROKER TRACE: node_broker.cpp:423: Send TEvNodesInfo for epoch #7.9 1970-01-01T06:00:00.025000Z - 1970-01-01T07:00:00.025000Z - 1970-01-01T08:00:00.025000Z 2025-09-25T16:18:28.945731Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:245: StateWork, received event# 269877761, Sender [1:822:2342], Recipient [1:763:2297]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-09-25T16:18:28.945755Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:245: StateWork, received event# 272039950, Sender [1:621:2216], Recipient [1:763:2297]: NKikimr::NNodeBroker::TEvNodeBroker::TEvSubscribeNodesRequest { CachedVersion: 9 SeqNo: 2 } 2025-09-25T16:18:28.945761Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:257: StateWork, processing event TEvNodeBroker::TEvSubscribeNodesRequest 2025-09-25T16:18:28.945768Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:747: New subscriber [1:621:2216], seqNo: 2, version: 9, server pipe id: [1:822:2342] 2025-09-25T16:18:28.945778Z node 1 :NODE_BROKER TRACE: node_broker.cpp:730: Send TEvUpdateNodes v9 -> v9 to [1:621:2216] 2025-09-25T16:18:28.945844Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:245: StateWork, received event# 269877764, Sender [1:822:2342], Recipient [1:763:2297]: NKikimr::TEvTabletPipe::TEvServerDisconnected 2025-09-25T16:18:28.945852Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:768: Unsubscribed [1:621:2216], seqNo: 2, server pipe id: [1:822:2342] 2025-09-25T16:18:28.945885Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:245: StateWork, received event# 269877761, Sender [1:824:2344], Recipient [1:763:2297]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-09-25T16:18:28.945908Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:245: StateWork, received event# 272039937, Sender [1:621:2216], Recipient [1:763:2297]: NKikimr::NNodeBroker::TEvNodeBroker::TEvResolveNode { NodeId: 1024 } 2025-09-25T16:18:28.945913Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:250: StateWork, processing event TEvNodeBroker::TEvResolveNode 2025-09-25T16:18:28.945932Z node 1 :NODE_BROKER TRACE: node_broker.cpp:1485: Send TEvResolvedNode: NKikimr::NNodeBroker::TEvNodeBroker::TEvResolvedNode { Status { Code: WRONG_REQUEST Reason: "Unknown node" } } ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/ydb/ut/unittest >> YdbYqlClient::CreateTableWithPartitionAtKeysAndAutoPartitioning [GOOD] Test command err: 2025-09-25T16:18:23.851029Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7554061939448639099:2078];send_to=[0:7307199536658146131:7762515]; 2025-09-25T16:18:23.851046Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/endf/004195/r3tmp/tmpsxq8E7/pdisk_1.dat 2025-09-25T16:18:23.936651Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-09-25T16:18:23.972955Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-09-25T16:18:23.972986Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-09-25T16:18:23.980435Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 18152, node 1 2025-09-25T16:18:23.986060Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-09-25T16:18:24.067095Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-09-25T16:18:24.067110Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-09-25T16:18:24.067112Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-09-25T16:18:24.067169Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:7914 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: 2025-09-25T16:18:24.088557Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-09-25T16:18:24.091807Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-09-25T16:18:24.466076Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:18:24.533306Z node 1 :HIVE WARN: hive_impl.cpp:516: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037889 not found 2025-09-25T16:18:24.573252Z node 1 :HIVE WARN: hive_impl.cpp:516: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037888 not found 2025-09-25T16:18:25.452447Z node 4 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7554061946534978340:2150];send_to=[0:7307199536658146131:7762515]; 2025-09-25T16:18:25.452480Z node 4 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-09-25T16:18:25.499212Z node 4 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions test_client.cpp: SetPath # /home/runner/.ya/build/build_root/endf/004195/r3tmp/tmpmSr4LV/pdisk_1.dat 2025-09-25T16:18:25.529994Z node 4 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 31094, node 4 2025-09-25T16:18:25.544568Z node 4 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-09-25T16:18:25.544595Z node 4 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-09-25T16:18:25.544597Z node 4 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-09-25T16:18:25.545830Z node 4 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-09-25T16:18:25.566650Z node 4 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-09-25T16:18:25.566679Z node 4 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-09-25T16:18:25.569419Z node 4 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:26453 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-09-25T16:18:25.602419Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-09-25T16:18:25.608035Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-09-25T16:18:25.731803Z node 4 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-09-25T16:18:26.086022Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:18:26.851412Z node 7 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-09-25T16:18:26.851441Z node 7 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/endf/004195/r3tmp/tmpKIqdSk/pdisk_1.dat 2025-09-25T16:18:26.872583Z node 7 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 16239, node 7 2025-09-25T16:18:26.883847Z node 7 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-09-25T16:18:26.883861Z node 7 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-09-25T16:18:26.883864Z node 7 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-09-25T16:18:26.883914Z node 7 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:1764 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. waiting... 2025-09-25T16:18:26.909456Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2025-09-25T16:18:26.950223Z node 7 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-09-25T16:18:26.950262Z node 7 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-09-25T16:18:26.951865Z node 7 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-09-25T16:18:27.144523Z node 7 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-09-25T16:18:27.298512Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:18:28.333826Z node 10 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[10:7554061960731017400:2257];send_to=[0:7307199536658146131:7762515]; 2025-09-25T16:18:28.333856Z node 10 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/endf/004195/r3tmp/tmpYXJUTB/pdisk_1.dat 2025-09-25T16:18:28.350144Z node 10 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-09-25T16:18:28.370930Z node 10 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 20943, node 10 2025-09-25T16:18:28.384806Z node 10 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-09-25T16:18:28.384817Z node 10 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-09-25T16:18:28.384819Z node 10 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-09-25T16:18:28.384894Z node 10 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:4572 WaitRootIsUp 'Root'... TClient::Ls request: Root 2025-09-25T16:18:28.435736Z node 10 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-09-25T16:18:28.435776Z node 10 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-09-25T16:18:28.437457Z node 10 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Connecting -> Connected TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-09-25T16:18:28.447180Z node 10 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-09-25T16:18:28.573345Z node 10 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-09-25T16:18:28.789990Z node 10 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:18:29.606002Z node 13 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[13:7554061964610396851:2149];send_to=[0:7307199536658146131:7762515]; 2025-09-25T16:18:29.606036Z node 13 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-09-25T16:18:29.634602Z node 13 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions test_client.cpp: SetPath # /home/runner/.ya/build/build_root/endf/004195/r3tmp/tmpKygOBG/pdisk_1.dat 2025-09-25T16:18:29.657991Z node 13 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 21430, node 13 2025-09-25T16:18:29.680028Z node 13 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-09-25T16:18:29.680042Z node 13 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-09-25T16:18:29.680044Z node 13 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-09-25T16:18:29.680093Z node 13 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:21270 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-09-25T16:18:29.704039Z node 13 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-09-25T16:18:29.707365Z node 13 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(13, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-09-25T16:18:29.707396Z node 13 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(13, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-09-25T16:18:29.711243Z node 13 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(13, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-09-25T16:18:29.856632Z node 13 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-09-25T16:18:30.039054Z node 13 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) >> THiveTest::TestTabletsStartingCounter [GOOD] >> THiveTest::TestTabletsStartingCounterExternalBoot >> TNodeBrokerTest::NodesMigrationExtendLease >> TSlotIndexesPoolTest::Ranges [GOOD] >> TNodeBrokerTest::ShiftIdRangeRemoveExpired >> THiveTest::TestBridgeDisconnect [GOOD] >> THiveTest::TestBridgeDisconnectWithReboots >> TDynamicNameserverTest::CacheMissSimpleDeadline-EnableNodeBrokerDeltaProtocol-true [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/mind/ut/unittest >> TNodeBrokerTest::NodesMigrationExpireRemoved [GOOD] Test command err: 2025-09-25T16:18:27.538442Z node 8 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:636: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 8 Deadline: 18446744073709.551615s } 2025-09-25T16:18:27.541634Z node 5 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:636: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 5 Deadline: 18446744073709.551615s } 2025-09-25T16:18:27.541709Z node 6 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:636: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 6 Deadline: 18446744073709.551615s } 2025-09-25T16:18:27.541733Z node 7 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:636: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 7 Deadline: 18446744073709.551615s } 2025-09-25T16:18:27.541766Z node 8 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:607: Handle NActors::TEvInterconnect::TEvListNodes 2025-09-25T16:18:27.545747Z node 2 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:636: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 2 Deadline: 18446744073709.551615s } 2025-09-25T16:18:27.545806Z node 3 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:636: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 3 Deadline: 18446744073709.551615s } 2025-09-25T16:18:27.545866Z node 4 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:636: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 4 Deadline: 18446744073709.551615s } 2025-09-25T16:18:27.545916Z node 6 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:607: Handle NActors::TEvInterconnect::TEvListNodes 2025-09-25T16:18:27.546021Z node 7 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:607: Handle NActors::TEvInterconnect::TEvListNodes 2025-09-25T16:18:27.546093Z node 8 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:607: Handle NActors::TEvInterconnect::TEvListNodes 2025-09-25T16:18:27.546130Z node 1 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:636: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 1 Deadline: 18446744073709.551615s } 2025-09-25T16:18:27.550171Z node 2 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:607: Handle NActors::TEvInterconnect::TEvListNodes 2025-09-25T16:18:27.550271Z node 3 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:607: Handle NActors::TEvInterconnect::TEvListNodes 2025-09-25T16:18:27.550321Z node 4 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:607: Handle NActors::TEvInterconnect::TEvListNodes 2025-09-25T16:18:27.550372Z node 5 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:607: Handle NActors::TEvInterconnect::TEvListNodes 2025-09-25T16:18:27.550433Z node 4 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:607: Handle NActors::TEvInterconnect::TEvListNodes 2025-09-25T16:18:27.550471Z node 5 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:607: Handle NActors::TEvInterconnect::TEvListNodes 2025-09-25T16:18:27.550491Z node 6 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:607: Handle NActors::TEvInterconnect::TEvListNodes 2025-09-25T16:18:27.550520Z node 7 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:607: Handle NActors::TEvInterconnect::TEvListNodes 2025-09-25T16:18:27.550598Z node 1 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:607: Handle NActors::TEvInterconnect::TEvListNodes 2025-09-25T16:18:27.550692Z node 2 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:607: Handle NActors::TEvInterconnect::TEvListNodes 2025-09-25T16:18:27.550715Z node 3 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:607: Handle NActors::TEvInterconnect::TEvListNodes 2025-09-25T16:18:27.550805Z node 8 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:607: Handle NActors::TEvInterconnect::TEvListNodes 2025-09-25T16:18:27.550834Z node 1 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:607: Handle NActors::TEvInterconnect::TEvListNodes 2025-09-25T16:18:27.551230Z node 2 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:607: Handle NActors::TEvInterconnect::TEvListNodes 2025-09-25T16:18:27.551280Z node 3 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:607: Handle NActors::TEvInterconnect::TEvListNodes 2025-09-25T16:18:27.551313Z node 4 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:607: Handle NActors::TEvInterconnect::TEvListNodes 2025-09-25T16:18:27.551334Z node 5 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:607: Handle NActors::TEvInterconnect::TEvListNodes 2025-09-25T16:18:27.551358Z node 6 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:607: Handle NActors::TEvInterconnect::TEvListNodes 2025-09-25T16:18:27.551381Z node 7 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:607: Handle NActors::TEvInterconnect::TEvListNodes 2025-09-25T16:18:27.551420Z node 8 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:636: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 1 Deadline: 18446744073709.551615s } 2025-09-25T16:18:27.551436Z node 3 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:636: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 1 Deadline: 18446744073709.551615s } 2025-09-25T16:18:27.551472Z node 4 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:636: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 1 Deadline: 18446744073709.551615s } 2025-09-25T16:18:27.551499Z node 5 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:636: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 1 Deadline: 18446744073709.551615s } 2025-09-25T16:18:27.551532Z node 6 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:636: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 1 Deadline: 18446744073709.551615s } 2025-09-25T16:18:27.551556Z node 7 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:636: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 1 Deadline: 18446744073709.551615s } 2025-09-25T16:18:27.551643Z node 1 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:607: Handle NActors::TEvInterconnect::TEvListNodes 2025-09-25T16:18:27.551705Z node 2 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:636: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 1 Deadline: 18446744073709.551615s } 2025-09-25T16:18:27.551931Z node 1 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:636: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 8 Deadline: 18446744073709.551615s } 2025-09-25T16:18:27.552219Z node 8 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:636: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 5 Deadline: 18446744073709.551615s } 2025-09-25T16:18:27.552488Z node 6 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:636: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 3 Deadline: 18446744073709.551615s } 2025-09-25T16:18:27.552518Z node 7 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:636: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 5 Deadline: 18446744073709.551615s } 2025-09-25T16:18:27.555573Z node 4 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:636: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 2 Deadline: 18446744073709.551615s } 2025-09-25T16:18:27.555604Z node 5 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:636: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 4 Deadline: 18446744073709.551615s } 2025-09-25T16:18:27.565398Z node 5 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:636: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 8 Deadline: 18446744073709.551615s } 2025-09-25T16:18:27.566215Z node 2 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:636: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 4 Deadline: 18446744073709.551615s } 2025-09-25T16:18:27.566238Z node 3 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:636: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 6 Deadline: 18446744073709.551615s } 2025-09-25T16:18:27.566293Z node 5 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:636: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 7 Deadline: 18446744073709.551615s } 2025-09-25T16:18:27.566587Z node 4 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:636: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 5 Deadline: 18446744073709.551615s } 2025-09-25T16:18:27.567466Z node 1 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:636: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 4 Deadline: 18446744073709.551615s } 2025-09-25T16:18:27.567549Z node 1 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:636: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 3 Deadline: 18446744073709.551615s } 2025-09-25T16:18:27.567602Z node 1 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:636: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 5 Deadline: 18446744073709.551615s } 2025-09-25T16:18:27.567667Z node 1 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:636: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 2 Deadline: 18446744073709.551615s } 2025-09-25T16:18:27.567723Z node 1 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:636: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 6 Deadline: 18446744073709.551615s } 2025-09-25T16:18:27.567827Z node 1 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:636: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 7 Deadline: 18446744073709.551615s } 2025-09-25T16:18:27.568986Z node 2 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:636: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 3 Deadline: 18446744073709.551615s } 2025-09-25T16:18:27.569146Z node 4 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:636: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 3 Deadline: 18446744073709.551615s } 2025-09-25T16:18:27.569446Z node 3 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:636: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 2 Deadline: 18446744073709.551615s } 2025-09-25T16:18:27.569564Z node 8 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:636: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 2 Deadline: 18446744073709.551615s } 2025-09-25T16:18:27.569783Z node 3 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:636: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 4 Deadline: 18446744073709.551615s } 2025-09-25T16:18:27.570053Z node 2 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:636: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 8 Deadline: 18446744073709.551615s } 2025-09-25T16:18:27.570199Z node 5 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:636: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 2 Deadline: 18446744073709.551615s } 2025-09-25T16:18:27.570460Z node 2 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:636: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 5 Deadline: 18446744073709.551615s } 2025-09-25T16:18:27.571548Z node 8 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:636: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 3 Deadline: 18446744073709.551615s } 2025-09-25T16:18:27.572173Z node 3 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:636: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 8 Deadline: 18446744073709.551615s } 2025-09-25T16:18:27.592685Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7729: Cannot subscribe to console configs 2025-09-25T16:18:27.592709Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded ... waiting for nameservers are connected 2025-09-25T16:18:27.599599Z node 1 :NODE_BROKER DEBUG: node_broker_impl.h:239: StateInit event type: 10060000 event: NKikimr::TEvTablet::TEvBoot 2025-09-25T16:18:27.600023Z node 1 :NODE_BROKER DEBUG: node_broker_impl.h:239: StateInit event type: 10060001 event: NKikimr::TEvTablet::TEvRestored 2025-09-25T16:18:27.600101Z node 1 :NODE_BROKER DEBUG: node_broker__init_scheme.cpp:20: TTxInitScheme Execute 2025-09-25T16:18:27.600335Z node 1 :NODE_BROKER DEBUG: node_broker_impl.h:239: StateInit event type: 1006000c event: NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-09-25T16:18:27.601231Z node 1 :NODE_BROKER DEBUG: node_broker__init_scheme.cpp:29: TTxInitScheme Complete 2025-09-25T16:18:27.601468Z node 1 :NODE_BROKER DEBUG: node_broker__load_state.cpp:19: TTxLoadState Execute 2025-09-25T16:18:27.601552Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:968: [DB] Using default config. 2025-09-25T16:18:27.601569Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:1005: [DB] Starting the first epoch: #1.1 1970-01-01T00:00:00.026000Z - 1970-01-01T01:00:00.026000Z - 1970-01-01T02:00:00.026000Z 2025-09-25T16:18:27.601574Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:1031: [DB] Loaded the first approximate epoch start: #1.1 2025-09-25T16:18:27.601592Z node 1 :NODE_BROKER DEBUG: node_broker__load_state.cpp:27: TTxLoadState Comp ... 8:29.533796Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:245: StateWork, received event# 269877761, Sender [1:816:2324], Recipient [1:775:2297]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-09-25T16:18:29.533845Z node 3 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:717: Handle NKikimr::TEvTabletPipe::TEvClientConnected { TabletId: 72057594037936129 Status: OK ServerId: [1:815:2323] Leader: 1 Dead: 0 Generation: 3 VersionInfo:  } 2025-09-25T16:18:29.533890Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:245: StateWork, received event# 269877761, Sender [1:817:2325], Recipient [1:775:2297]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-09-25T16:18:29.533903Z node 2 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:717: Handle NKikimr::TEvTabletPipe::TEvClientConnected { TabletId: 72057594037936129 Status: OK ServerId: [1:817:2325] Leader: 1 Dead: 0 Generation: 3 VersionInfo:  } 2025-09-25T16:18:29.533915Z node 8 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:717: Handle NKikimr::TEvTabletPipe::TEvClientConnected { TabletId: 72057594037936129 Status: OK ServerId: [1:821:2329] Leader: 1 Dead: 0 Generation: 3 VersionInfo:  } 2025-09-25T16:18:29.533925Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:245: StateWork, received event# 269877761, Sender [1:818:2326], Recipient [1:775:2297]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-09-25T16:18:29.533946Z node 4 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:717: Handle NKikimr::TEvTabletPipe::TEvClientConnected { TabletId: 72057594037936129 Status: OK ServerId: [1:816:2324] Leader: 1 Dead: 0 Generation: 3 VersionInfo:  } 2025-09-25T16:18:29.533957Z node 5 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:717: Handle NKikimr::TEvTabletPipe::TEvClientConnected { TabletId: 72057594037936129 Status: OK ServerId: [1:818:2326] Leader: 1 Dead: 0 Generation: 3 VersionInfo:  } 2025-09-25T16:18:29.533967Z node 6 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:717: Handle NKikimr::TEvTabletPipe::TEvClientConnected { TabletId: 72057594037936129 Status: OK ServerId: [1:819:2327] Leader: 1 Dead: 0 Generation: 3 VersionInfo:  } 2025-09-25T16:18:29.533977Z node 7 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:717: Handle NKikimr::TEvTabletPipe::TEvClientConnected { TabletId: 72057594037936129 Status: OK ServerId: [1:820:2328] Leader: 1 Dead: 0 Generation: 3 VersionInfo:  } 2025-09-25T16:18:29.534009Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:245: StateWork, received event# 269877761, Sender [1:819:2327], Recipient [1:775:2297]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-09-25T16:18:29.534031Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:245: StateWork, received event# 269877761, Sender [1:820:2328], Recipient [1:775:2297]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-09-25T16:18:29.534061Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:245: StateWork, received event# 269877761, Sender [1:821:2329], Recipient [1:775:2297]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-09-25T16:18:29.534125Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:245: StateWork, received event# 272039936, Sender [1:25:2072], Recipient [1:775:2297]: NKikimr::NNodeBroker::TEvNodeBroker::TEvListNodes { MinEpoch: 5 } 2025-09-25T16:18:29.534141Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:249: StateWork, processing event TEvNodeBroker::TEvListNodes 2025-09-25T16:18:29.534153Z node 1 :NODE_BROKER TRACE: node_broker.cpp:423: Send TEvNodesInfo for epoch #6.8 1970-01-01T05:00:00.026000Z - 1970-01-01T06:00:00.026000Z - 1970-01-01T07:00:00.026000Z 2025-09-25T16:18:29.534211Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:245: StateWork, received event# 272039936, Sender [3:83:2072], Recipient [1:815:2323] 2025-09-25T16:18:29.534216Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:249: StateWork, processing event TEvNodeBroker::TEvListNodes 2025-09-25T16:18:29.534225Z node 1 :NODE_BROKER TRACE: node_broker.cpp:423: Send TEvNodesInfo for epoch #6.8 1970-01-01T05:00:00.026000Z - 1970-01-01T06:00:00.026000Z - 1970-01-01T07:00:00.026000Z 2025-09-25T16:18:29.534241Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:245: StateWork, received event# 272039936, Sender [2:54:2072], Recipient [1:817:2325] 2025-09-25T16:18:29.534245Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:249: StateWork, processing event TEvNodeBroker::TEvListNodes 2025-09-25T16:18:29.534250Z node 1 :NODE_BROKER TRACE: node_broker.cpp:423: Send TEvNodesInfo for epoch #6.8 1970-01-01T05:00:00.026000Z - 1970-01-01T06:00:00.026000Z - 1970-01-01T07:00:00.026000Z 2025-09-25T16:18:29.534262Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:245: StateWork, received event# 272039936, Sender [5:141:2072], Recipient [1:818:2326] 2025-09-25T16:18:29.534268Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:249: StateWork, processing event TEvNodeBroker::TEvListNodes 2025-09-25T16:18:29.534276Z node 1 :NODE_BROKER TRACE: node_broker.cpp:423: Send TEvNodesInfo for epoch #6.8 1970-01-01T05:00:00.026000Z - 1970-01-01T06:00:00.026000Z - 1970-01-01T07:00:00.026000Z 2025-09-25T16:18:29.534316Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:245: StateWork, received event# 272039936, Sender [6:170:2072], Recipient [1:819:2327] 2025-09-25T16:18:29.534320Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:249: StateWork, processing event TEvNodeBroker::TEvListNodes 2025-09-25T16:18:29.534327Z node 1 :NODE_BROKER TRACE: node_broker.cpp:423: Send TEvNodesInfo for epoch #6.8 1970-01-01T05:00:00.026000Z - 1970-01-01T06:00:00.026000Z - 1970-01-01T07:00:00.026000Z 2025-09-25T16:18:29.534350Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:245: StateWork, received event# 272039936, Sender [7:199:2072], Recipient [1:820:2328] 2025-09-25T16:18:29.534354Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:249: StateWork, processing event TEvNodeBroker::TEvListNodes 2025-09-25T16:18:29.534360Z node 1 :NODE_BROKER TRACE: node_broker.cpp:423: Send TEvNodesInfo for epoch #6.8 1970-01-01T05:00:00.026000Z - 1970-01-01T06:00:00.026000Z - 1970-01-01T07:00:00.026000Z 2025-09-25T16:18:29.534383Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:245: StateWork, received event# 272039936, Sender [8:228:2072], Recipient [1:821:2329] 2025-09-25T16:18:29.534387Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:249: StateWork, processing event TEvNodeBroker::TEvListNodes 2025-09-25T16:18:29.534394Z node 1 :NODE_BROKER TRACE: node_broker.cpp:423: Send TEvNodesInfo for epoch #6.8 1970-01-01T05:00:00.026000Z - 1970-01-01T06:00:00.026000Z - 1970-01-01T07:00:00.026000Z 2025-09-25T16:18:29.534417Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:245: StateWork, received event# 272039936, Sender [4:112:2072], Recipient [1:816:2324] 2025-09-25T16:18:29.534422Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:249: StateWork, processing event TEvNodeBroker::TEvListNodes 2025-09-25T16:18:29.534429Z node 1 :NODE_BROKER TRACE: node_broker.cpp:423: Send TEvNodesInfo for epoch #6.8 1970-01-01T05:00:00.026000Z - 1970-01-01T06:00:00.026000Z - 1970-01-01T07:00:00.026000Z 2025-09-25T16:18:29.534549Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:245: StateWork, received event# 269877761, Sender [1:826:2334], Recipient [1:775:2297]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-09-25T16:18:29.534578Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:245: StateWork, received event# 272039936, Sender [1:635:2218], Recipient [1:775:2297]: NKikimr::NNodeBroker::TEvNodeBroker::TEvListNodes { } 2025-09-25T16:18:29.534582Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:249: StateWork, processing event TEvNodeBroker::TEvListNodes 2025-09-25T16:18:29.534589Z node 1 :NODE_BROKER TRACE: node_broker.cpp:423: Send TEvNodesInfo for epoch #6.8 1970-01-01T05:00:00.026000Z - 1970-01-01T06:00:00.026000Z - 1970-01-01T07:00:00.026000Z 2025-09-25T16:18:29.534658Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:245: StateWork, received event# 269877761, Sender [1:828:2336], Recipient [1:775:2297]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-09-25T16:18:29.534676Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:245: StateWork, received event# 272039936, Sender [1:635:2218], Recipient [1:775:2297]: NKikimr::NNodeBroker::TEvNodeBroker::TEvListNodes { } 2025-09-25T16:18:29.534680Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:249: StateWork, processing event TEvNodeBroker::TEvListNodes 2025-09-25T16:18:29.534687Z node 1 :NODE_BROKER TRACE: node_broker.cpp:423: Send TEvNodesInfo for epoch #6.8 1970-01-01T05:00:00.026000Z - 1970-01-01T06:00:00.026000Z - 1970-01-01T07:00:00.026000Z 2025-09-25T16:18:29.534750Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:245: StateWork, received event# 269877761, Sender [1:830:2338], Recipient [1:775:2297]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-09-25T16:18:29.534769Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:245: StateWork, received event# 272039936, Sender [1:635:2218], Recipient [1:775:2297]: NKikimr::NNodeBroker::TEvNodeBroker::TEvListNodes { } 2025-09-25T16:18:29.534774Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:249: StateWork, processing event TEvNodeBroker::TEvListNodes 2025-09-25T16:18:29.534780Z node 1 :NODE_BROKER TRACE: node_broker.cpp:423: Send TEvNodesInfo for epoch #6.8 1970-01-01T05:00:00.026000Z - 1970-01-01T06:00:00.026000Z - 1970-01-01T07:00:00.026000Z 2025-09-25T16:18:29.534853Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:245: StateWork, received event# 269877761, Sender [1:832:2340], Recipient [1:775:2297]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-09-25T16:18:29.534876Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:245: StateWork, received event# 272039936, Sender [1:635:2218], Recipient [1:775:2297]: NKikimr::NNodeBroker::TEvNodeBroker::TEvListNodes { CachedVersion: 8 } 2025-09-25T16:18:29.534880Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:249: StateWork, processing event TEvNodeBroker::TEvListNodes 2025-09-25T16:18:29.534887Z node 1 :NODE_BROKER TRACE: node_broker.cpp:423: Send TEvNodesInfo for epoch #6.8 1970-01-01T05:00:00.026000Z - 1970-01-01T06:00:00.026000Z - 1970-01-01T07:00:00.026000Z 2025-09-25T16:18:29.534943Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:245: StateWork, received event# 269877761, Sender [1:834:2342], Recipient [1:775:2297]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-09-25T16:18:29.534965Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:245: StateWork, received event# 272039950, Sender [1:635:2218], Recipient [1:775:2297]: NKikimr::NNodeBroker::TEvNodeBroker::TEvSubscribeNodesRequest { CachedVersion: 8 SeqNo: 2 } 2025-09-25T16:18:29.534971Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:257: StateWork, processing event TEvNodeBroker::TEvSubscribeNodesRequest 2025-09-25T16:18:29.534978Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:747: New subscriber [1:635:2218], seqNo: 2, version: 8, server pipe id: [1:834:2342] 2025-09-25T16:18:29.534987Z node 1 :NODE_BROKER TRACE: node_broker.cpp:730: Send TEvUpdateNodes v8 -> v8 to [1:635:2218] 2025-09-25T16:18:29.535049Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:245: StateWork, received event# 269877764, Sender [1:834:2342], Recipient [1:775:2297]: NKikimr::TEvTabletPipe::TEvServerDisconnected 2025-09-25T16:18:29.535058Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:768: Unsubscribed [1:635:2218], seqNo: 2, server pipe id: [1:834:2342] 2025-09-25T16:18:29.535089Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:245: StateWork, received event# 269877761, Sender [1:836:2344], Recipient [1:775:2297]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-09-25T16:18:29.535108Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:245: StateWork, received event# 272039937, Sender [1:635:2218], Recipient [1:775:2297]: NKikimr::NNodeBroker::TEvNodeBroker::TEvResolveNode { NodeId: 1024 } 2025-09-25T16:18:29.535112Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:250: StateWork, processing event TEvNodeBroker::TEvResolveNode 2025-09-25T16:18:29.535129Z node 1 :NODE_BROKER TRACE: node_broker.cpp:1485: Send TEvResolvedNode: NKikimr::NNodeBroker::TEvNodeBroker::TEvResolvedNode { Status { Code: WRONG_REQUEST Reason: "Unknown node" } } >> TSlotIndexesPoolTest::Init [GOOD] |81.4%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/mind/ut/unittest >> TSlotIndexesPoolTest::Ranges [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/ydb/ut/unittest >> YdbTableBulkUpsert::AsyncIndexShouldSucceed [GOOD] Test command err: 2025-09-25T16:18:23.059326Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7554061940099211082:2076];send_to=[0:7307199536658146131:7762515]; 2025-09-25T16:18:23.059349Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/endf/004197/r3tmp/tmpz89Ccw/pdisk_1.dat 2025-09-25T16:18:23.145059Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-09-25T16:18:23.164772Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 3587, node 1 2025-09-25T16:18:23.175820Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-09-25T16:18:23.175854Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-09-25T16:18:23.179654Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-09-25T16:18:23.259999Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-09-25T16:18:23.260010Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-09-25T16:18:23.260012Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-09-25T16:18:23.260061Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:14391 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-09-25T16:18:23.290086Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-09-25T16:18:23.375023Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-09-25T16:18:23.608854Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7554061940099212027:2320], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:23.608876Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:23.608926Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7554061940099212053:2322], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:23.608935Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:23.695232Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:18:23.718410Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7554061940099212209:2333], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:23.718440Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:23.718464Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7554061940099212215:2337], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:23.718469Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:23.718481Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7554061940099212214:2336], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:23.719216Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715659:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-09-25T16:18:23.723032Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7554061940099212218:2338], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715659 completed, doublechecking } 2025-09-25T16:18:23.819539Z node 1 :TX_PROXY ERROR: schemereq.cpp:590: Actor# [1:7554061940099212293:2792] txid# 281474976715660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-09-25T16:18:23.829601Z node 1 :KQP_EXECUTER ERROR: kqp_planner.cpp:123: TxId: 281474976715661. Ctx: { TraceId: 01k60tpbe90x9qrzkpe44zbg7d, Database: , SessionId: ydb://session/3?node_id=1&id=NGQ3MTEyMzEtZThiNjRlMWYtMjA3YzI0MjctYjkyZjYzYzY=, PoolId: default, DatabaseId: /Root}. Database not set, use /Root 2025-09-25T16:18:23.848415Z node 1 :KQP_EXECUTER ERROR: kqp_planner.cpp:123: TxId: 281474976715662. Ctx: { TraceId: 01k60tpbe90x9qrzkpe44zbg7d, Database: , SessionId: ydb://session/3?node_id=1&id=NTU1ZmYzY2QtZjJlMWJmMmEtMWYwYjg2MGUtNTg4NDFlMzM=, PoolId: default, DatabaseId: /Root}. Database not set, use /Root 2025-09-25T16:18:23.858486Z node 1 :KQP_EXECUTER ERROR: kqp_planner.cpp:123: TxId: 281474976715663. Ctx: { TraceId: 01k60tpbe90x9qrzkpe44zbg7d, Database: , SessionId: ydb://session/3?node_id=1&id=MzBkNzQ1NzItZjQ1NzgyY2ItMjk1YmEyNTUtNGM2NjhjMDQ=, PoolId: default, DatabaseId: /Root}. Database not set, use /Root 2025-09-25T16:18:23.881197Z node 1 :KQP_EXECUTER ERROR: kqp_planner.cpp:123: TxId: 281474976715664. Ctx: { TraceId: 01k60tpbe90x9qrzkpe44zbg7d, Database: , SessionId: ydb://session/3?node_id=1&id=Njg1ZTcyNTQtNTc5MDgzYmEtOWFlYjc0MzktZTJiOTgxOGM=, PoolId: default, DatabaseId: /Root}. Database not set, use /Root 2025-09-25T16:18:24.062330Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-09-25T16:18:24.878576Z node 4 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-09-25T16:18:24.878610Z node 4 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/endf/004197/r3tmp/tmpZSVM28/pdisk_1.dat 2025-09-25T16:18:24.902197Z node 4 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 25718, node 4 2025-09-25T16:18:24.932783Z node 4 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-09-25T16:18:24.932796Z node 4 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-09-25T16:18:24.932798Z node 4 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-09-25T16:18:24.932875Z node 4 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:18854 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { ... SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-09-25T16:18:26.706586Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-09-25T16:18:26.738874Z node 7 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-09-25T16:18:26.992602Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) BAD_REQUEST
: Error: Bulk upsert to table '/Root/TestInvalidData' Invalid Decimal(22,9) value BAD_REQUEST
: Error: Bulk upsert to table '/Root/TestInvalidData' Invalid Date value BAD_REQUEST
: Error: Bulk upsert to table '/Root/TestInvalidData' Invalid Datetime value BAD_REQUEST
: Error: Bulk upsert to table '/Root/TestInvalidData' Invalid Timestamp value BAD_REQUEST
: Error: Bulk upsert to table '/Root/TestInvalidData' Invalid Interval value CLIENT_INTERNAL_ERROR
: Error: GRpc error: (13): Unable to parse request
: Error: Grpc error response on endpoint localhost:12739 BAD_REQUEST
: Error: Bulk upsert to table '/Root/TestInvalidData' Invalid Yson value BAD_REQUEST
: Error: Bulk upsert to table '/Root/TestInvalidData' Invalid Json value BAD_REQUEST
: Error: Bulk upsert to table '/Root/TestInvalidData' Invalid JSON for JsonDocument provided: TAPE_ERROR: The JSON document has an improper structure: missing or superfluous commas, braces, missing keys, etc. This is a fatal and unrecoverable error. BAD_REQUEST
: Error: Bulk upsert to table '/Root/TestInvalidData' Invalid DyNumber string representation test_client.cpp: SetPath # /home/runner/.ya/build/build_root/endf/004197/r3tmp/tmpUQRr5j/pdisk_1.dat 2025-09-25T16:18:27.790345Z node 10 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-09-25T16:18:27.790410Z node 10 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-09-25T16:18:27.809724Z node 10 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 12056, node 10 2025-09-25T16:18:27.832331Z node 10 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-09-25T16:18:27.832349Z node 10 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-09-25T16:18:27.832351Z node 10 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-09-25T16:18:27.832416Z node 10 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:19222 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-09-25T16:18:27.862907Z node 10 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-09-25T16:18:27.891050Z node 10 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-09-25T16:18:27.891086Z node 10 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-09-25T16:18:27.892208Z node 10 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-09-25T16:18:27.988738Z node 10 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-09-25T16:18:28.254194Z node 10 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684)
: Error: Bulk upsert to table '/Root/ui8' Only async-indexed tables are supported by BulkUpsert
: Error: Bulk upsert to table '/Root/ui8/Value_index/indexImplTable' Writing to index implementation tables is not allowed. 2025-09-25T16:18:28.976007Z node 13 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[13:7554061961192156308:2082];send_to=[0:7307199536658146131:7762515]; 2025-09-25T16:18:28.976036Z node 13 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-09-25T16:18:28.993602Z node 13 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions test_client.cpp: SetPath # /home/runner/.ya/build/build_root/endf/004197/r3tmp/tmp9AW8c4/pdisk_1.dat 2025-09-25T16:18:29.012431Z node 13 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 9868, node 13 2025-09-25T16:18:29.046945Z node 13 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-09-25T16:18:29.046961Z node 13 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-09-25T16:18:29.046963Z node 13 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-09-25T16:18:29.047017Z node 13 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:1453 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-09-25T16:18:29.072591Z node 13 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-09-25T16:18:29.077547Z node 13 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(13, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-09-25T16:18:29.077577Z node 13 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(13, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-09-25T16:18:29.080855Z node 13 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(13, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-09-25T16:18:29.241058Z node 13 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-09-25T16:18:29.390429Z node 13 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:18:29.977953Z node 13 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup;
: Error: Bulk upsert to table '/Root/ui8/Value_index/indexImplTable' Writing to index implementation tables is not allowed. >> THiveTest::TestTabletsStartingCounterExternalBoot [GOOD] >> YdbYqlClient::CheckDefaultTableSettings1 [GOOD] >> TNodeBrokerTest::SyncNodes >> THiveTest::TestHiveBalancerIgnoreTablet [GOOD] >> THiveTest::TestHiveBalancerNodeRestarts >> THiveTest::TestDownAfterDrain [GOOD] >> THiveTest::TestCreateTabletsWithRaceForStoragePoolsKIKIMR_9659 ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/mind/ut/unittest >> TDynamicNameserverTest::CacheMissSimpleDeadline-EnableNodeBrokerDeltaProtocol-true [GOOD] Test command err: 2025-09-25T16:18:30.738637Z node 1 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:636: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 1 Deadline: 18446744073709.551615s } 2025-09-25T16:18:30.744569Z node 1 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:607: Handle NActors::TEvInterconnect::TEvListNodes 2025-09-25T16:18:30.744725Z node 1 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:607: Handle NActors::TEvInterconnect::TEvListNodes 2025-09-25T16:18:30.744847Z node 1 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:607: Handle NActors::TEvInterconnect::TEvListNodes 2025-09-25T16:18:30.759861Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7729: Cannot subscribe to console configs 2025-09-25T16:18:30.759881Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded ... waiting for nameservers are connected 2025-09-25T16:18:30.765211Z node 1 :NODE_BROKER DEBUG: node_broker_impl.h:239: StateInit event type: 10060000 event: NKikimr::TEvTablet::TEvBoot 2025-09-25T16:18:30.765958Z node 1 :NODE_BROKER DEBUG: node_broker_impl.h:239: StateInit event type: 10060001 event: NKikimr::TEvTablet::TEvRestored 2025-09-25T16:18:30.766051Z node 1 :NODE_BROKER DEBUG: node_broker__init_scheme.cpp:20: TTxInitScheme Execute 2025-09-25T16:18:30.766291Z node 1 :NODE_BROKER DEBUG: node_broker_impl.h:239: StateInit event type: 1006000c event: NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-09-25T16:18:30.766840Z node 1 :NODE_BROKER DEBUG: node_broker__init_scheme.cpp:29: TTxInitScheme Complete 2025-09-25T16:18:30.766863Z node 1 :NODE_BROKER DEBUG: node_broker__load_state.cpp:19: TTxLoadState Execute 2025-09-25T16:18:30.766933Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:968: [DB] Using default config. 2025-09-25T16:18:30.766948Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:1005: [DB] Starting the first epoch: #1.1 1970-01-01T00:00:00.023000Z - 1970-01-01T01:00:00.023000Z - 1970-01-01T02:00:00.023000Z 2025-09-25T16:18:30.766953Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:1031: [DB] Loaded the first approximate epoch start: #1.1 2025-09-25T16:18:30.766971Z node 1 :NODE_BROKER DEBUG: node_broker__load_state.cpp:27: TTxLoadState Complete 2025-09-25T16:18:30.766984Z node 1 :NODE_BROKER DEBUG: node_broker__migrate_state.cpp:84: TTxMigrateState Execute 2025-09-25T16:18:30.766990Z node 1 :NODE_BROKER DEBUG: node_broker__migrate_state.cpp:52: TTxMigrateState ProcessMigrationBatch UpdateNodes left 0, NewVersionUpdateNodes left 0 2025-09-25T16:18:30.766995Z node 1 :NODE_BROKER DEBUG: node_broker__migrate_state.cpp:21: TTxMigrateState FinalizeMigration 2025-09-25T16:18:30.767000Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:1318: [DB] Update epoch in database: #1.1 1970-01-01T00:00:00.023000Z - 1970-01-01T01:00:00.023000Z - 1970-01-01T02:00:00.023000Z 2025-09-25T16:18:30.767019Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:1337: [DB] Update approx epoch start in database: #1.1 2025-09-25T16:18:30.767025Z node 1 :NODE_BROKER NOTICE: node_broker.cpp:1350: [DB] Update main nodes table to: Nodes 2025-09-25T16:18:30.809077Z node 1 :NODE_BROKER DEBUG: node_broker__migrate_state.cpp:95: TTxMigrateState Complete 2025-09-25T16:18:30.809130Z node 1 :NODE_BROKER TRACE: node_broker.cpp:456: Scheduled epoch update at 1970-01-01T01:00:00.023000Z 2025-09-25T16:18:30.809143Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:562: Preparing nodes list cache for epoch #1.1 1970-01-01T00:00:00.023000Z - 1970-01-01T01:00:00.023000Z - 1970-01-01T02:00:00.023000Z, approximate epoch start #1.1 nodes=0 expired=0 2025-09-25T16:18:30.809155Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:603: Preparing update nodes log for epoch ##1.1 1970-01-01T00:00:00.023000Z - 1970-01-01T01:00:00.023000Z - 1970-01-01T02:00:00.023000Z nodes=0 expired=0 removed=0 2025-09-25T16:18:30.809250Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:245: StateWork, received event# 269877761, Sender [1:195:2195], Recipient [1:172:2179]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-09-25T16:18:30.809277Z node 1 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:717: Handle NKikimr::TEvTabletPipe::TEvClientConnected { TabletId: 72057594037936129 Status: OK ServerId: [1:195:2195] Leader: 1 Dead: 0 Generation: 2 VersionInfo:  } ... waiting for nameservers are connected (done) 2025-09-25T16:18:30.809932Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:245: StateWork, received event# 272039936, Sender [1:18:2065], Recipient [1:172:2179]: NKikimr::NNodeBroker::TEvNodeBroker::TEvListNodes { MinEpoch: 1 } 2025-09-25T16:18:30.809952Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:249: StateWork, processing event TEvNodeBroker::TEvListNodes 2025-09-25T16:18:30.809964Z node 1 :NODE_BROKER TRACE: node_broker.cpp:423: Send TEvNodesInfo for epoch #1.1 1970-01-01T00:00:00.023000Z - 1970-01-01T01:00:00.023000Z - 1970-01-01T02:00:00.023000Z 2025-09-25T16:18:30.810022Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:245: StateWork, received event# 269877761, Sender [1:204:2202], Recipient [1:172:2179]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-09-25T16:18:30.810054Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:245: StateWork, received event# 272039938, Sender [1:202:2200], Recipient [1:172:2179]: NKikimr::NNodeBroker::TEvNodeBroker::TEvRegistrationRequest { Host: "host1" Port: 1001 ResolveHost: "host1.host1.host1" Address: "1.2.3.4" Location { DataCenter: "1" Module: "2" Rack: "3" Unit: "4" } FixedNodeId: false Path: "dc-1" } 2025-09-25T16:18:30.810060Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:251: StateWork, processing event TEvNodeBroker::TEvRegistrationRequest 2025-09-25T16:18:30.810071Z node 1 :NODE_BROKER TRACE: node_broker.cpp:1494: Handle TEvNodeBroker::TEvRegistrationRequest: request# Host: "host1" Port: 1001 ResolveHost: "host1.host1.host1" Address: "1.2.3.4" Location { DataCenter: "1" Module: "2" Rack: "3" Unit: "4" } FixedNodeId: false Path: "dc-1" 2025-09-25T16:18:30.810135Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2806: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [1:16:2063], request# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1 TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: false ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-09-25T16:18:30.843173Z node 1 :TX_PROXY_SCHEME_CACHE TRACE: cache.cpp:2380: Create subscriber: self# [1:16:2063], path# /dc-1, domainOwnerId# 72057594046678944 2025-09-25T16:18:30.850916Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2647: HandleNotify: self# [1:16:2063], notify# NKikimr::TSchemeBoardEvents::TEvNotifyUpdate { Path: /dc-1 PathId: [OwnerId: 72057594046678944, LocalPathId: 1] DescribeSchemeResult: Status: StatusSuccess Path: "/dc-1" PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 0 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { Audience: "/dc-1" } SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 1 PathOwnerId: 72057594046678944 } 2025-09-25T16:18:30.851042Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2522: ResolveCacheItem: self# [1:16:2063], notify# NKikimr::TSchemeBoardEvents::TEvNotifyUpdate { Path: /dc-1 PathId: [OwnerId: 72057594046678944, LocalPathId: 1] DescribeSchemeResult: Status: StatusSuccess Path: "/dc-1" PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 0 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { Audience: "/dc-1" } SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 1 PathOwnerId: 72057594046678944 }, by path# { Subscriber: { Subscriber: [1:206:2203] DomainOwnerId: 72057594046678944 Type: 2 SyncCookie: 0 } Filled: 0 Status: undefined Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, by pathId# nullptr 2025-09-25T16:18:30.851105Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:1888: FillEntry for TNavigate: self# [1:16:2063], cacheItem# { Subscriber: { Subscriber: [1:206:2203] DomainOwnerId: 72057594046678944 Type: 2 SyncCookie: 0 } Filled: 1 Status: StatusSuccess Kind: 2 TableKind: 0 Created: 1 CreateStep: 0 PathId: [OwnerId: 72057594046678944, LocalPathId: 1] DomainId: [OwnerId: 72057594046678944, LocalPathId: 1] IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1 TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: false ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2025-09-25T16:18:30.851208Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:267: Send result: self# [1:213:2204], recipient# [1:205:2179], result# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1 TableId: [72057594046678944:1:0] RequestType: ByPath Operation: OpPath RedirectRequired: false ShowPrivatePath: false SyncVersion: false Status: Ok Kind: KindPath DomainInfo { DomainKey: [OwnerId: 72057594046678944, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046678944, LocalPathId: 1] Params { Version: 0 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } }] } 2025-09-25T16:18:30.851230 ... er::TEvUpdateNodes from NODE_BROKER_ACTOR to NAMESERVICE cookie 0 2025-09-25T16:18:31.067044Z node 2 :NODE_BROKER TRACE: node_broker_impl.h:245: StateWork, received event# 269877761, Sender [2:204:2202], Recipient [2:172:2179]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-09-25T16:18:31.067075Z node 2 :NODE_BROKER TRACE: node_broker_impl.h:245: StateWork, received event# 272039938, Sender [2:202:2200], Recipient [2:172:2179]: NKikimr::NNodeBroker::TEvNodeBroker::TEvRegistrationRequest { Host: "host1" Port: 1001 ResolveHost: "host1.host1.host1" Address: "1.2.3.4" Location { DataCenter: "1" Module: "2" Rack: "3" Unit: "4" } FixedNodeId: false Path: "dc-1" } 2025-09-25T16:18:31.067080Z node 2 :NODE_BROKER TRACE: node_broker_impl.h:251: StateWork, processing event TEvNodeBroker::TEvRegistrationRequest 2025-09-25T16:18:31.067090Z node 2 :NODE_BROKER TRACE: node_broker.cpp:1494: Handle TEvNodeBroker::TEvRegistrationRequest: request# Host: "host1" Port: 1001 ResolveHost: "host1.host1.host1" Address: "1.2.3.4" Location { DataCenter: "1" Module: "2" Rack: "3" Unit: "4" } FixedNodeId: false Path: "dc-1" 2025-09-25T16:18:31.067137Z node 2 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2806: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [2:16:2063], request# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1 TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: false ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-09-25T16:18:31.067150Z node 2 :TX_PROXY_SCHEME_CACHE TRACE: cache.cpp:2380: Create subscriber: self# [2:16:2063], path# /dc-1, domainOwnerId# 72057594046678944 2025-09-25T16:18:31.067467Z node 2 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2647: HandleNotify: self# [2:16:2063], notify# NKikimr::TSchemeBoardEvents::TEvNotifyUpdate { Path: /dc-1 PathId: [OwnerId: 72057594046678944, LocalPathId: 1] DescribeSchemeResult: Status: StatusSuccess Path: "/dc-1" PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 0 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { Audience: "/dc-1" } SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 1 PathOwnerId: 72057594046678944 } 2025-09-25T16:18:31.067521Z node 2 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2522: ResolveCacheItem: self# [2:16:2063], notify# NKikimr::TSchemeBoardEvents::TEvNotifyUpdate { Path: /dc-1 PathId: [OwnerId: 72057594046678944, LocalPathId: 1] DescribeSchemeResult: Status: StatusSuccess Path: "/dc-1" PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 0 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { Audience: "/dc-1" } SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 1 PathOwnerId: 72057594046678944 }, by path# { Subscriber: { Subscriber: [2:206:2203] DomainOwnerId: 72057594046678944 Type: 2 SyncCookie: 0 } Filled: 0 Status: undefined Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, by pathId# nullptr 2025-09-25T16:18:31.067556Z node 2 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:1888: FillEntry for TNavigate: self# [2:16:2063], cacheItem# { Subscriber: { Subscriber: [2:206:2203] DomainOwnerId: 72057594046678944 Type: 2 SyncCookie: 0 } Filled: 1 Status: StatusSuccess Kind: 2 TableKind: 0 Created: 1 CreateStep: 0 PathId: [OwnerId: 72057594046678944, LocalPathId: 1] DomainId: [OwnerId: 72057594046678944, LocalPathId: 1] IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1 TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: false ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2025-09-25T16:18:31.067601Z node 2 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:267: Send result: self# [2:213:2204], recipient# [2:205:2179], result# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1 TableId: [72057594046678944:1:0] RequestType: ByPath Operation: OpPath RedirectRequired: false ShowPrivatePath: false SyncVersion: false Status: Ok Kind: KindPath DomainInfo { DomainKey: [OwnerId: 72057594046678944, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046678944, LocalPathId: 1] Params { Version: 0 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } }] } 2025-09-25T16:18:31.067616Z node 2 :NODE_BROKER TRACE: node_broker.cpp:1561: Handle TEvTxProxySchemeCache::TEvNavigateKeySetResult: response# { Path: dc-1 TableId: [72057594046678944:1:0] RequestType: ByPath Operation: OpPath RedirectRequired: false ShowPrivatePath: false SyncVersion: false Status: Ok Kind: KindPath DomainInfo { DomainKey: [OwnerId: 72057594046678944, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046678944, LocalPathId: 1] Params { Version: 0 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } } 2025-09-25T16:18:31.067631Z node 2 :NODE_BROKER TRACE: node_broker.cpp:1587: Finished resolving tenant: request# Host: "host1" Port: 1001 ResolveHost: "host1.host1.host1" Address: "1.2.3.4" Location { DataCenter: "1" Module: "2" Rack: "3" Unit: "4" } FixedNodeId: false Path: "dc-1": scope id# <72057594046678944:1>: serviced subdomain# 72057594046678944:1 2025-09-25T16:18:31.067648Z node 2 :NODE_BROKER TRACE: node_broker_impl.h:245: StateWork, received event# 2146435073, Sender [2:205:2179], Recipient [2:172:2179]: NKikimr::NNodeBroker::TNodeBroker::TEvPrivate::TEvResolvedRegistrationRequest 2025-09-25T16:18:31.067653Z node 2 :NODE_BROKER TRACE: node_broker_impl.h:260: StateWork, processing event TEvPrivate::TEvResolvedRegistrationRequest 2025-09-25T16:18:31.067669Z node 2 :NODE_BROKER DEBUG: node_broker__register_node.cpp:78: TTxRegisterNode Execute 2025-09-25T16:18:31.067673Z node 2 :NODE_BROKER DEBUG: node_broker__register_node.cpp:82: Registration request from host1:1001 (not fixed) tenant: dc-1 2025-09-25T16:18:31.067701Z node 2 :NODE_BROKER DEBUG: node_broker.cpp:863: [DB] Adding node #1024.v2 host1:1001 to database state=Active resolvehost=host1.host1.host1 address=1.2.3.4 dc=1 location=DC=1/M=2/R=3/U=4/ lease=1 expire=Thu, 01 Jan 1970 02:00:00 UTC servicedsubdomain=72057594046678944:1 slotindex=0 authorizedbycertificate=false 2025-09-25T16:18:31.067751Z node 2 :NODE_BROKER DEBUG: node_broker.cpp:264: [Dirty] Register new active node #1024.v2 host1:1001 2025-09-25T16:18:31.067758Z node 2 :NODE_BROKER DEBUG: node_broker.cpp:552: [Dirty] Update current epoch version from 1 to 2 2025-09-25T16:18:31.067762Z node 2 :NODE_BROKER DEBUG: node_broker.cpp:1363: [DB] Update epoch version in database version=2 2025-09-25T16:18:31.078529Z node 2 :NODE_BROKER DEBUG: node_broker__register_node.cpp:197: TTxRegisterNode Complete 2025-09-25T16:18:31.078552Z node 2 :NODE_BROKER DEBUG: node_broker.cpp:264: [Committed] Register new active node #1024.v2 host1:1001 2025-09-25T16:18:31.078564Z node 2 :NODE_BROKER DEBUG: node_broker.cpp:552: [Committed] Update current epoch version from 1 to 2 2025-09-25T16:18:31.078573Z node 2 :NODE_BROKER DEBUG: node_broker.cpp:630: Add node #1024.v2 host1:1001 to epoch cache 2025-09-25T16:18:31.078597Z node 2 :NODE_BROKER DEBUG: node_broker.cpp:659: Add node #1024.v2 to update nodes log 2025-09-25T16:18:31.078657Z node 2 :NODE_BROKER TRACE: node_broker__register_node.cpp:59: TTxRegisterNode reply with: Status { Code: OK } Node { NodeId: 1024 Host: "host1" Port: 1001 ResolveHost: "host1.host1.host1" Address: "1.2.3.4" Location { DataCenter: "1" Module: "2" Rack: "3" Unit: "4" } Expire: 7200023000 Name: "slot-0" } ... waiting for cache miss 2025-09-25T16:18:31.078726Z node 2 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:585: Handle NActors::TEvInterconnect::TEvResolveNode { NodeId: 1024 Deadline: 1.027512s } 2025-09-25T16:18:31.078737Z node 2 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:686: New cache miss: nodeId# 1024, deadline# 1.027512s 2025-09-25T16:18:31.078742Z node 2 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:690: Schedule wakeup for new earliest deadline 1.027512s ... blocking NKikimr::NNodeBroker::TEvNodeBroker::TEvSyncNodesRequest from NAMESERVICE to NODE_BROKER_ACTOR cookie 1 ... waiting for cache miss (done) 2025-09-25T16:18:31.089918Z node 2 :NODE_BROKER TRACE: node_broker_impl.h:245: StateWork, received event# 2146435074, Sender [0:0:0], Recipient [2:172:2179]: NKikimr::NNodeBroker::TNodeBroker::TEvPrivate::TEvProcessSubscribersQueue 2025-09-25T16:18:31.089941Z node 2 :NODE_BROKER TRACE: node_broker_impl.h:261: StateWork, processing event TEvPrivate::TEvProcessSubscribersQueue 2025-09-25T16:18:31.089954Z node 2 :NODE_BROKER TRACE: node_broker.cpp:730: Send TEvUpdateNodes v1 -> v2 to [2:18:2065] ... blocking NKikimr::NNodeBroker::TEvNodeBroker::TEvUpdateNodes from NODE_BROKER_ACTOR to NAMESERVICE cookie 0 2025-09-25T16:18:31.166482Z node 2 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:935: HandleWakeup at 1.028512s 2025-09-25T16:18:31.166507Z node 2 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:137: Cache miss failed: nodeId=1024, error=Deadline exceeded |81.4%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/mind/ut/unittest >> TSlotIndexesPoolTest::Init [GOOD] >> TNodeBrokerTest::NodesMigrationRemoveExpired >> TNodeBrokerTest::NodesMigration1001Nodes >> TNodeBrokerTest::LoadStateMoveEpoch ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/ydb/ut/unittest >> YdbYqlClient::CheckDefaultTableSettings1 [GOOD] Test command err: test_client.cpp: SetPath # /home/runner/.ya/build/build_root/endf/004180/r3tmp/tmpqqf4Gl/pdisk_1.dat TServer::EnableGrpc on GrpcPort 23275, node 1 TClient is connected to server localhost:19929 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. waiting... 2025-09-25T16:18:25.469013Z node 4 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7554061947082282920:2146];send_to=[0:7307199536658146131:7762515]; 2025-09-25T16:18:25.469060Z node 4 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/endf/004180/r3tmp/tmp7lJ0q3/pdisk_1.dat 2025-09-25T16:18:25.480859Z node 4 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-09-25T16:18:25.500567Z node 4 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 9389, node 4 2025-09-25T16:18:25.521525Z node 4 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-09-25T16:18:25.521536Z node 4 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-09-25T16:18:25.521538Z node 4 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-09-25T16:18:25.521585Z node 4 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:12622 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-09-25T16:18:25.568164Z node 4 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-09-25T16:18:25.568191Z node 4 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-09-25T16:18:25.572362Z node 4 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-09-25T16:18:25.581579Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-09-25T16:18:25.666836Z node 4 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-09-25T16:18:25.930559Z node 4 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7554061947082283804:2325], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:25.930593Z node 4 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:25.930726Z node 4 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7554061947082283815:2326], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:25.930742Z node 4 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:25.974683Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:18:26.004395Z node 4 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7554061951377251264:2336], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:26.004418Z node 4 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7554061951377251269:2339], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:26.004421Z node 4 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:26.004462Z node 4 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7554061951377251272:2341], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:26.004468Z node 4 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:26.005149Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715659:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-09-25T16:18:26.012116Z node 4 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [4:7554061951377251271:2340], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715659 completed, doublechecking } 2025-09-25T16:18:26.085401Z node 4 :TX_PROXY ERROR: schemereq.cpp:590: Actor# [4:7554061951377251344:2785] txid# 281474976715660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-09-25T16:18:26.116038Z node 4 :KQP_EXECUTER ERROR: kqp_planner.cpp:123: TxId: 281474976715661. Ctx: { TraceId: 01k60tpe2m4thc1ny7neysx89a, Database: , SessionId: ydb://session/3?node_id=4&id=Nzk0NGVjNzgtNDYyYTk3MWItNGYyMmMzMTItMTg0MDc3MWM=, PoolId: default, DatabaseId: /Root}. Database not set, use /Root 2025-09-25T16:18:26.140888Z node 4 :TX_PROXY WARN: rpc_alter_table.cpp:331: [AlterTableAddIndex [4:7554061951377251392:2356] TxId# 281474976715663] Access check failed 2025-09-25T16:18:26.149611Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpInitiateBuildIndexImplTable, opId: 281474976710758:2, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:18:26.168383Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpFinalizeBuildIndexMainTable, opId: 281474976710759:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_finalize_build_index.cpp:383) 2025-09-25T16:18:26.185025Z node 4 :TX_PROXY ERROR: rpc_alter_table.cpp:276: [AlterTableAddIndex [4:7554061951377251759:2368] TxId# 281474976715665] Unable to navigate: Root/WrongPath status: PathErrorUnknown 2025-09-25T16:18:26.206109Z node 4 :HIVE WARN: hive_impl.cpp:516: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 4, TabletId: 72075186224037889 not found 2025-09-25T16:18:27.178901Z node 7 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[7:7554061957368420163:2147];send_to=[0:7307199536658146131:7762515]; 2025-09-25T16:18:27.178949Z node 7 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/endf/004180/r3tmp/tmpBXt33r/pdisk_1.dat 2025-09-25T16:18:27.197515Z node 7 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables e ... olatileState: Disconnected -> Connecting 2025-09-25T16:18:28.866502Z node 10 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Connecting -> Connected TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-09-25T16:18:28.868617Z node 10 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-09-25T16:18:28.972732Z node 10 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-09-25T16:18:29.161974Z node 10 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [10:7554061963375711321:2322], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:29.162814Z node 10 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:29.163662Z node 10 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:18:29.164100Z node 10 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [10:7554061963375711341:2325], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:29.164120Z node 10 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:29.199799Z node 10 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [10:7554061963375711484:2333], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:29.199841Z node 10 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:29.199890Z node 10 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [10:7554061963375711489:2336], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:29.199898Z node 10 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [10:7554061963375711490:2337], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:29.199969Z node 10 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:29.200804Z node 10 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715659:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-09-25T16:18:29.205064Z node 10 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [10:7554061963375711493:2338], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715659 completed, doublechecking } 2025-09-25T16:18:29.279938Z node 10 :TX_PROXY ERROR: schemereq.cpp:590: Actor# [10:7554061963375711568:2788] txid# 281474976715660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-09-25T16:18:29.293215Z node 10 :KQP_EXECUTER ERROR: kqp_planner.cpp:123: TxId: 281474976715661. Ctx: { TraceId: 01k60tph6f296dps3s7dprn4sm, Database: , SessionId: ydb://session/3?node_id=10&id=YmFhZjAwZjgtNjA4ZDYzZjMtZGRiNzRlODctOGY1NGE2MDc=, PoolId: default, DatabaseId: /Root}. Database not set, use /Root 2025-09-25T16:18:29.302450Z node 10 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpInitiateBuildIndexImplTable, opId: 281474976710758:2, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:18:29.323963Z node 10 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpFinalizeBuildIndexMainTable, opId: 281474976710759:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_finalize_build_index.cpp:383) 2025-09-25T16:18:30.316791Z node 13 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[13:7554061969156198724:2081];send_to=[0:7307199536658146131:7762515]; 2025-09-25T16:18:30.317007Z node 13 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/endf/004180/r3tmp/tmpcuUu5m/pdisk_1.dat 2025-09-25T16:18:30.415296Z node 13 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-09-25T16:18:30.437010Z node 13 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(13, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-09-25T16:18:30.437041Z node 13 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(13, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-09-25T16:18:30.439614Z node 13 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(13, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 29062, node 13 TClient is connected to server localhost:24919 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-09-25T16:18:30.482030Z node 13 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-09-25T16:18:30.493145Z node 13 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-09-25T16:18:30.501243Z node 13 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-09-25T16:18:30.501258Z node 13 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-09-25T16:18:30.501261Z node 13 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-09-25T16:18:30.501325Z node 13 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-09-25T16:18:30.502271Z node 13 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded 2025-09-25T16:18:30.702188Z node 13 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-09-25T16:18:30.859355Z node 13 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) >> TYqlDateTimeTests::SimpleOperations [GOOD] >> YdbTableBulkUpsert::NotNulls [GOOD] >> YdbTableBulkUpsert::Errors >> TDynamicNameserverTest::CacheMissNoDeadline-EnableNodeBrokerDeltaProtocol-false >> THiveTest::TestCreateTabletsWithRaceForStoragePoolsKIKIMR_9659 [GOOD] >> THiveTest::TestDeleteTablet >> TNodeBrokerTest::NodesMigrationReuseIDThenExtendLease [GOOD] >> TNodeBrokerTest::NodesMigrationSetLocation |81.4%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/tablet/ut/ydb-core-tablet-ut |81.4%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tablet/ut/ydb-core-tablet-ut >> TEnumerationTest::TestPublish [GOOD] >> TLocalTests::TestAddTenant >> TNodeBrokerTest::ExtendLeaseRestartRace >> TNodeBrokerTest::ExtendLeasePipelining >> TNodeBrokerTest::NodesMigrationReuseRemovedID [GOOD] >> THiveTest::TestDeleteTablet [GOOD] >> TNodeBrokerTest::NodesSubscriberDisconnect [GOOD] >> TDynamicNameserverTest::CacheMissNoDeadline-EnableNodeBrokerDeltaProtocol-false [GOOD] >> TLocalTests::TestAddTenant [GOOD] >> TNodeBrokerTest::UpdateEpochPipelining >> TNodeBrokerTest::ListNodesEpochDeltasPersistance >> TNodeBrokerTest::NodesMigrationExpiredChanged >> TNodeBrokerTest::ShiftIdRangeRemoveActive [GOOD] >> TNodeBrokerTest::SyncNodes [GOOD] >> TNodeBrokerTest::NodesMigrationExtendLease [GOOD] >> TStorageBalanceTest::TestScenario1 [GOOD] >> YdbTableBulkUpsert::Errors [GOOD] >> TNodeBrokerTest::NodesMigrationRemoveActive >> TDynamicNameserverTest::CacheMissDifferentDeadlineInverseOrder-EnableNodeBrokerDeltaProtocol-true >> THiveTest::TestDeleteOwnerTablets >> TDynamicNameserverTest::CacheMissDifferentDeadlineInverseOrder-EnableNodeBrokerDeltaProtocol-true [GOOD] >> THiveTest::TestDeleteOwnerTablets [GOOD] >> TStorageBalanceTest::TestScenario2 >> YdbTableBulkUpsert::Limits >> THiveTest::TestDeleteOwnerTabletsMany |81.4%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/tx/tx_allocator_client/ut/ydb-core-tx-tx_allocator_client-ut >> TNodeBrokerTest::ShiftIdRangeRemoveExpired [GOOD] >> THiveTest::TestHiveBalancerNodeRestarts [GOOD] >> TNodeBrokerTest::NodesMigrationSetLocation [GOOD] >> YdbYqlClient::RetryOperationAsync [GOOD] >> YdbTableBulkUpsert::Limits [GOOD] >> test.py::test[aggregate-group_by_cube_join_count--Results] [GOOD] >> YdbOlapStore::LogNonExistingUserId [GOOD] >> TNodeBrokerTest::LoadStateMoveEpoch [GOOD] >> TNodeBrokerTest::NodesMigrationRemoveActive [GOOD] >> TNodeBrokerTest::NodesMigrationRemoveExpired [GOOD] >> YdbTableBulkUpsert::Overload [GOOD] >> YdbTableBulkUpsert::DecimalPK >> YdbYqlClient::QueryLimits >> test.py::test[aggregate-group_by_expr_dict--Results] >> YdbOlapStore::LogPagingBefore >> THiveTest::TestHiveBalancerDifferentResources >> YdbTableBulkUpsert::RetryOperationSync ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/ydb/ut/unittest >> TYqlDateTimeTests::SimpleOperations [GOOD] Test command err: 2025-09-25T16:18:23.661509Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7554061939988593800:2147];send_to=[0:7307199536658146131:7762515]; 2025-09-25T16:18:23.661540Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/endf/004199/r3tmp/tmpGKaw7e/pdisk_1.dat 2025-09-25T16:18:23.739281Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-09-25T16:18:23.803794Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-09-25T16:18:23.803820Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-09-25T16:18:23.805604Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-09-25T16:18:23.822417Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 65333, node 1 2025-09-25T16:18:23.916495Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-09-25T16:18:23.916512Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-09-25T16:18:23.916515Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-09-25T16:18:23.916562Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:20014 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: 2025-09-25T16:18:24.004153Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-09-25T16:18:24.008394Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-09-25T16:18:24.203180Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:18:24.229075Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7554061944283562129:2328], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:24.229103Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7554061944283562120:2325], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:24.229128Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:24.229404Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7554061944283562135:2330], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:24.229417Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:24.230011Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710659:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-09-25T16:18:24.236078Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7554061944283562134:2329], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710659 completed, doublechecking } 2025-09-25T16:18:24.306722Z node 1 :TX_PROXY ERROR: schemereq.cpp:590: Actor# [1:7554061944283562205:2788] txid# 281474976710660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-09-25T16:18:24.454921Z node 1 :KQP_EXECUTER ERROR: kqp_planner.cpp:123: TxId: 281474976710661. Ctx: { TraceId: 01k60tpcb43tcc9hf19jqz595k, Database: , SessionId: ydb://session/3?node_id=1&id=ZjQ3ODBkMDMtOTE3ZTQ3M2EtN2U2Y2VmYzEtNDkwNzdmM2Y=, PoolId: default, DatabaseId: /Root}. Database not set, use /Root 2025-09-25T16:18:24.496393Z node 1 :KQP_EXECUTER ERROR: kqp_planner.cpp:123: TxId: 281474976710662. Ctx: { TraceId: 01k60tpcjsd63z97wjqmtz63e7, Database: , SessionId: ydb://session/3?node_id=1&id=ZjQ3ODBkMDMtOTE3ZTQ3M2EtN2U2Y2VmYzEtNDkwNzdmM2Y=, PoolId: default, DatabaseId: /Root}. Database not set, use /Root 2025-09-25T16:18:24.525768Z node 1 :KQP_EXECUTER ERROR: kqp_planner.cpp:123: TxId: 281474976710663. Ctx: { TraceId: 01k60tpckq16f7e4eh1cwsa19r, Database: , SessionId: ydb://session/3?node_id=1&id=ZjQ3ODBkMDMtOTE3ZTQ3M2EtN2U2Y2VmYzEtNDkwNzdmM2Y=, PoolId: default, DatabaseId: /Root}. Database not set, use /Root 2025-09-25T16:18:24.553394Z node 1 :KQP_EXECUTER ERROR: kqp_planner.cpp:123: TxId: 281474976710664. Ctx: { TraceId: 01k60tpcmg69pshvrbxpacs0e0, Database: , SessionId: ydb://session/3?node_id=1&id=ZjQ3ODBkMDMtOTE3ZTQ3M2EtN2U2Y2VmYzEtNDkwNzdmM2Y=, PoolId: default, DatabaseId: /Root}. Database not set, use /Root 2025-09-25T16:18:24.576696Z node 1 :KQP_EXECUTER ERROR: kqp_planner.cpp:123: TxId: 281474976710665. Ctx: { TraceId: 01k60tpcndacb5grdjyf3yk8ye, Database: , SessionId: ydb://session/3?node_id=1&id=ZjQ3ODBkMDMtOTE3ZTQ3M2EtN2U2Y2VmYzEtNDkwNzdmM2Y=, PoolId: default, DatabaseId: /Root}. Database not set, use /Root 2025-09-25T16:18:24.665000Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-09-25T16:18:25.570149Z node 4 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7554061947166436900:2157];send_to=[0:7307199536658146131:7762515]; 2025-09-25T16:18:25.570286Z node 4 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-09-25T16:18:25.575361Z node 4 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions test_client.cpp: SetPath # /home/runner/.ya/build/build_root/endf/004199/r3tmp/tmpf3h5zx/pdisk_1.dat 2025-09-25T16:18:25.603592Z node 4 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 27468, node 4 2025-09-25T16:18:25.628092Z node 4 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-09-25T16:18:25.628105Z node 4 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-09-25T16:18:25.628108Z node 4 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-09-25T16:18:25.628157Z node 4 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:23859 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-09-25T16:18:25.671595Z node 4 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-09-25T16:18:25.671622Z node 4 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-09-25T16:18:25.673283Z node 4 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-09-25T16:18:25.674145Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain ... :123: TxId: 281474976715663. Ctx: { TraceId: 01k60tphdzf6wv5av2c0xj9x71, Database: , SessionId: ydb://session/3?node_id=10&id=ZGM2ODUyZGYtYzk0YjIxMjEtZjk2ZTNlMTYtODg3Y2Q5MjE=, PoolId: default, DatabaseId: /Root}. Database not set, use /Root 2025-09-25T16:18:29.489714Z node 10 :KQP_EXECUTER ERROR: kqp_planner.cpp:123: TxId: 281474976715664. Ctx: { TraceId: 01k60tphetejr1rv8q9djn8k6j, Database: , SessionId: ydb://session/3?node_id=10&id=ZGM2ODUyZGYtYzk0YjIxMjEtZjk2ZTNlMTYtODg3Y2Q5MjE=, PoolId: default, DatabaseId: /Root}. Database not set, use /Root test_client.cpp: SetPath # /home/runner/.ya/build/build_root/endf/004199/r3tmp/tmpIX4Jje/pdisk_1.dat 2025-09-25T16:18:30.421384Z node 13 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-09-25T16:18:30.421530Z node 13 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-09-25T16:18:30.457115Z node 13 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded 2025-09-25T16:18:30.463479Z node 13 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(13, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-09-25T16:18:30.463513Z node 13 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(13, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-09-25T16:18:30.465820Z node 13 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(13, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 28874, node 13 2025-09-25T16:18:30.490780Z node 13 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-09-25T16:18:30.490812Z node 13 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-09-25T16:18:30.490814Z node 13 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-09-25T16:18:30.490876Z node 13 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:62131 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-09-25T16:18:30.521376Z node 13 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-09-25T16:18:30.703265Z node 13 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-09-25T16:18:30.855586Z node 13 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:18:30.876096Z node 13 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:18:30.891127Z node 13 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [13:7554061971376252133:2333], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:30.891141Z node 13 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [13:7554061971376252122:2330], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:30.891155Z node 13 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:30.891235Z node 13 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [13:7554061971376252137:2335], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:30.891247Z node 13 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:30.892078Z node 13 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710660:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-09-25T16:18:30.899557Z node 13 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [13:7554061971376252136:2334], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710660 completed, doublechecking } 2025-09-25T16:18:30.961506Z node 13 :TX_PROXY ERROR: schemereq.cpp:590: Actor# [13:7554061971376252219:2880] txid# 281474976710661, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 7], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-09-25T16:18:30.990736Z node 13 :KQP_EXECUTER ERROR: kqp_planner.cpp:123: TxId: 281474976710662. Ctx: { TraceId: 01k60tpjva7naz93d9g59ddb3p, Database: , SessionId: ydb://session/3?node_id=13&id=NjU0OTVlNi05MmI3Yjc2NC03MGVhYjM0ZC05OGM5MzUzOA==, PoolId: default, DatabaseId: /Root}. Database not set, use /Root 2025-09-25T16:18:31.014014Z node 13 :KQP_EXECUTER ERROR: kqp_planner.cpp:123: TxId: 281474976710663. Ctx: { TraceId: 01k60tpjym5spwqxgm4trxbev0, Database: , SessionId: ydb://session/3?node_id=13&id=NjU0OTVlNi05MmI3Yjc2NC03MGVhYjM0ZC05OGM5MzUzOA==, PoolId: default, DatabaseId: /Root}. Database not set, use /Root 2025-09-25T16:18:31.079110Z node 13 :KQP_EXECUTER ERROR: kqp_planner.cpp:123: TxId: 281474976710664. Ctx: { TraceId: 01k60tpjz9fnqf115hx20gnvqb, Database: , SessionId: ydb://session/3?node_id=13&id=NjU0OTVlNi05MmI3Yjc2NC03MGVhYjM0ZC05OGM5MzUzOA==, PoolId: default, DatabaseId: /Root}. Database not set, use /Root 2025-09-25T16:18:31.080836Z node 13 :KQP_EXECUTER ERROR: kqp_planner.cpp:123: TxId: 281474976710665. Ctx: { TraceId: 01k60tpjz9fnqf115hx20gnvqb, Database: , SessionId: ydb://session/3?node_id=13&id=NjU0OTVlNi05MmI3Yjc2NC03MGVhYjM0ZC05OGM5MzUzOA==, PoolId: default, DatabaseId: /Root}. Database not set, use /Root 2025-09-25T16:18:31.145280Z node 13 :KQP_EXECUTER ERROR: kqp_planner.cpp:123: TxId: 281474976710666. Ctx: { TraceId: 01k60tpk1cb3marj6nrswkz811, Database: , SessionId: ydb://session/3?node_id=13&id=NjU0OTVlNi05MmI3Yjc2NC03MGVhYjM0ZC05OGM5MzUzOA==, PoolId: default, DatabaseId: /Root}. Database not set, use /Root 2025-09-25T16:18:31.146983Z node 13 :KQP_EXECUTER ERROR: kqp_planner.cpp:123: TxId: 281474976710667. Ctx: { TraceId: 01k60tpk1cb3marj6nrswkz811, Database: , SessionId: ydb://session/3?node_id=13&id=NjU0OTVlNi05MmI3Yjc2NC03MGVhYjM0ZC05OGM5MzUzOA==, PoolId: default, DatabaseId: /Root}. Database not set, use /Root 2025-09-25T16:18:31.172018Z node 13 :KQP_EXECUTER ERROR: kqp_planner.cpp:123: TxId: 281474976710668. Ctx: { TraceId: 01k60tpk3jf68eqevprbmrbq5c, Database: , SessionId: ydb://session/3?node_id=13&id=NjU0OTVlNi05MmI3Yjc2NC03MGVhYjM0ZC05OGM5MzUzOA==, PoolId: default, DatabaseId: /Root}. Database not set, use /Root 2025-09-25T16:18:31.191676Z node 13 :KQP_EXECUTER ERROR: kqp_planner.cpp:123: TxId: 281474976710669. Ctx: { TraceId: 01k60tpk45aabe0426k4jbpn4g, Database: , SessionId: ydb://session/3?node_id=13&id=NjU0OTVlNi05MmI3Yjc2NC03MGVhYjM0ZC05OGM5MzUzOA==, PoolId: default, DatabaseId: /Root}. Database not set, use /Root 2025-09-25T16:18:31.213622Z node 13 :KQP_EXECUTER ERROR: kqp_planner.cpp:123: TxId: 281474976710670. Ctx: { TraceId: 01k60tpk4s4v27809350gbrv5k, Database: , SessionId: ydb://session/3?node_id=13&id=NjU0OTVlNi05MmI3Yjc2NC03MGVhYjM0ZC05OGM5MzUzOA==, PoolId: default, DatabaseId: /Root}. Database not set, use /Root 2025-09-25T16:18:31.236077Z node 13 :KQP_EXECUTER ERROR: kqp_planner.cpp:123: TxId: 281474976710671. Ctx: { TraceId: 01k60tpk5g36z57agdeadnp1wd, Database: , SessionId: ydb://session/3?node_id=13&id=NjU0OTVlNi05MmI3Yjc2NC03MGVhYjM0ZC05OGM5MzUzOA==, PoolId: default, DatabaseId: /Root}. Database not set, use /Root 2025-09-25T16:18:31.259180Z node 13 :KQP_EXECUTER ERROR: kqp_planner.cpp:123: TxId: 281474976710672. Ctx: { TraceId: 01k60tpk658c7xvembgfygjee4, Database: , SessionId: ydb://session/3?node_id=13&id=NjU0OTVlNi05MmI3Yjc2NC03MGVhYjM0ZC05OGM5MzUzOA==, PoolId: default, DatabaseId: /Root}. Database not set, use /Root 2025-09-25T16:18:31.304565Z node 13 :KQP_EXECUTER ERROR: kqp_planner.cpp:123: TxId: 281474976710673. Ctx: { TraceId: 01k60tpk6wddgfa8v8xbzyx5av, Database: , SessionId: ydb://session/3?node_id=13&id=NjU0OTVlNi05MmI3Yjc2NC03MGVhYjM0ZC05OGM5MzUzOA==, PoolId: default, DatabaseId: /Root}. Database not set, use /Root 2025-09-25T16:18:31.307402Z node 13 :KQP_EXECUTER ERROR: kqp_planner.cpp:123: TxId: 281474976710674. Ctx: { TraceId: 01k60tpk6wddgfa8v8xbzyx5av, Database: , SessionId: ydb://session/3?node_id=13&id=NjU0OTVlNi05MmI3Yjc2NC03MGVhYjM0ZC05OGM5MzUzOA==, PoolId: default, DatabaseId: /Root}. Database not set, use /Root 2025-09-25T16:18:31.420183Z node 13 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/mind/ut/unittest >> TNodeBrokerTest::NodesSubscriberDisconnect [GOOD] Test command err: 2025-09-25T16:18:30.396321Z node 8 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:636: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 8 Deadline: 18446744073709.551615s } 2025-09-25T16:18:30.400055Z node 5 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:636: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 5 Deadline: 18446744073709.551615s } 2025-09-25T16:18:30.400148Z node 6 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:636: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 6 Deadline: 18446744073709.551615s } 2025-09-25T16:18:30.400187Z node 7 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:636: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 7 Deadline: 18446744073709.551615s } 2025-09-25T16:18:30.400244Z node 8 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:607: Handle NActors::TEvInterconnect::TEvListNodes 2025-09-25T16:18:30.404514Z node 2 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:636: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 2 Deadline: 18446744073709.551615s } 2025-09-25T16:18:30.404590Z node 3 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:636: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 3 Deadline: 18446744073709.551615s } 2025-09-25T16:18:30.404676Z node 4 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:636: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 4 Deadline: 18446744073709.551615s } 2025-09-25T16:18:30.404741Z node 6 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:607: Handle NActors::TEvInterconnect::TEvListNodes 2025-09-25T16:18:30.404892Z node 7 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:607: Handle NActors::TEvInterconnect::TEvListNodes 2025-09-25T16:18:30.404979Z node 8 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:607: Handle NActors::TEvInterconnect::TEvListNodes 2025-09-25T16:18:30.405022Z node 1 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:636: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 1 Deadline: 18446744073709.551615s } 2025-09-25T16:18:30.412294Z node 2 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:607: Handle NActors::TEvInterconnect::TEvListNodes 2025-09-25T16:18:30.412434Z node 3 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:607: Handle NActors::TEvInterconnect::TEvListNodes 2025-09-25T16:18:30.412506Z node 4 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:607: Handle NActors::TEvInterconnect::TEvListNodes 2025-09-25T16:18:30.412573Z node 5 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:607: Handle NActors::TEvInterconnect::TEvListNodes 2025-09-25T16:18:30.412646Z node 4 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:607: Handle NActors::TEvInterconnect::TEvListNodes 2025-09-25T16:18:30.412692Z node 5 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:607: Handle NActors::TEvInterconnect::TEvListNodes 2025-09-25T16:18:30.412716Z node 6 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:607: Handle NActors::TEvInterconnect::TEvListNodes 2025-09-25T16:18:30.412754Z node 7 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:607: Handle NActors::TEvInterconnect::TEvListNodes 2025-09-25T16:18:30.412875Z node 1 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:607: Handle NActors::TEvInterconnect::TEvListNodes 2025-09-25T16:18:30.412979Z node 2 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:607: Handle NActors::TEvInterconnect::TEvListNodes 2025-09-25T16:18:30.413006Z node 3 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:607: Handle NActors::TEvInterconnect::TEvListNodes 2025-09-25T16:18:30.413103Z node 8 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:607: Handle NActors::TEvInterconnect::TEvListNodes 2025-09-25T16:18:30.413132Z node 1 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:607: Handle NActors::TEvInterconnect::TEvListNodes 2025-09-25T16:18:30.414197Z node 2 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:607: Handle NActors::TEvInterconnect::TEvListNodes 2025-09-25T16:18:30.414250Z node 3 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:607: Handle NActors::TEvInterconnect::TEvListNodes 2025-09-25T16:18:30.414289Z node 4 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:607: Handle NActors::TEvInterconnect::TEvListNodes 2025-09-25T16:18:30.414320Z node 5 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:607: Handle NActors::TEvInterconnect::TEvListNodes 2025-09-25T16:18:30.414358Z node 6 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:607: Handle NActors::TEvInterconnect::TEvListNodes 2025-09-25T16:18:30.414386Z node 7 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:607: Handle NActors::TEvInterconnect::TEvListNodes 2025-09-25T16:18:30.414434Z node 8 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:636: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 1 Deadline: 18446744073709.551615s } 2025-09-25T16:18:30.414452Z node 3 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:636: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 1 Deadline: 18446744073709.551615s } 2025-09-25T16:18:30.414496Z node 4 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:636: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 1 Deadline: 18446744073709.551615s } 2025-09-25T16:18:30.414527Z node 5 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:636: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 1 Deadline: 18446744073709.551615s } 2025-09-25T16:18:30.414567Z node 6 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:636: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 1 Deadline: 18446744073709.551615s } 2025-09-25T16:18:30.414595Z node 7 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:636: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 1 Deadline: 18446744073709.551615s } 2025-09-25T16:18:30.414701Z node 1 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:607: Handle NActors::TEvInterconnect::TEvListNodes 2025-09-25T16:18:30.414770Z node 2 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:636: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 1 Deadline: 18446744073709.551615s } 2025-09-25T16:18:30.415029Z node 1 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:636: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 3 Deadline: 18446744073709.551615s } 2025-09-25T16:18:30.415451Z node 8 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:636: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 2 Deadline: 18446744073709.551615s } 2025-09-25T16:18:30.418398Z node 3 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:636: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 2 Deadline: 18446744073709.551615s } 2025-09-25T16:18:30.418476Z node 6 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:636: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 3 Deadline: 18446744073709.551615s } 2025-09-25T16:18:30.426143Z node 2 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:636: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 8 Deadline: 18446744073709.551615s } 2025-09-25T16:18:30.426271Z node 3 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:636: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 6 Deadline: 18446744073709.551615s } 2025-09-25T16:18:30.426491Z node 1 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:636: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 8 Deadline: 18446744073709.551615s } 2025-09-25T16:18:30.426738Z node 2 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:636: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 3 Deadline: 18446744073709.551615s } 2025-09-25T16:18:30.427374Z node 1 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:636: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 4 Deadline: 18446744073709.551615s } 2025-09-25T16:18:30.427449Z node 1 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:636: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 5 Deadline: 18446744073709.551615s } 2025-09-25T16:18:30.427469Z node 1 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:636: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 6 Deadline: 18446744073709.551615s } 2025-09-25T16:18:30.427554Z node 1 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:636: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 7 Deadline: 18446744073709.551615s } 2025-09-25T16:18:30.427726Z node 1 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:636: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 2 Deadline: 18446744073709.551615s } 2025-09-25T16:18:30.428995Z node 2 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:636: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 7 Deadline: 18446744073709.551615s } 2025-09-25T16:18:30.429208Z node 3 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:636: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 8 Deadline: 18446744073709.551615s } 2025-09-25T16:18:30.429552Z node 7 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:636: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 2 Deadline: 18446744073709.551615s } 2025-09-25T16:18:30.429753Z node 8 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:636: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 3 Deadline: 18446744073709.551615s } 2025-09-25T16:18:30.458170Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7729: Cannot subscribe to console configs 2025-09-25T16:18:30.458198Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded ... waiting for nameservers are connected 2025-09-25T16:18:30.463592Z node 1 :NODE_BROKER DEBUG: node_broker_impl.h:239: StateInit event type: 10060000 event: NKikimr::TEvTablet::TEvBoot 2025-09-25T16:18:30.464088Z node 1 :NODE_BROKER DEBUG: node_broker_impl.h:239: StateInit event type: 10060001 event: NKikimr::TEvTablet::TEvRestored 2025-09-25T16:18:30.464166Z node 1 :NODE_BROKER DEBUG: node_broker__init_scheme.cpp:20: TTxInitScheme Execute 2025-09-25T16:18:30.464390Z node 1 :NODE_BROKER DEBUG: node_broker_impl.h:239: StateInit event type: 1006000c event: NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-09-25T16:18:30.465810Z node 1 :NODE_BROKER DEBUG: node_broker__init_scheme.cpp:29: TTxInitScheme Complete 2025-09-25T16:18:30.465848Z node 1 :NODE_BROKER DEBUG: node_broker__load_state.cpp:19: TTxLoadState Execute 2025-09-25T16:18:30.465907Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:968: [DB] Using default config. 2025-09-25T16:18:30.465924Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:1005: [DB] Starting the first epoch: #1.1 1970-01-01T00:00:00.025000Z - 1970-01-01T01:00:00.025000Z - 1970-01-01T02:00:00.025000Z 2025-09-25T16:18:30.465932Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:1031: [DB] Loaded the first approximate epoch start: #1.1 2025-09-25T16:18:30.465949Z node 1 :NODE_BROKER DEBUG: node_broker__load_state.cpp:27: TTxLoadState Complete 2025-09-25T16:18:30.465976Z node 1 :NODE_BROKER DEBUG: node_broker__migrate_state.cpp:84: TTxMigrateState Execute 2025-09-25T16:18:30.465982Z node 1 :NODE_BROKER DEBUG: node_broker__migrate_state.cpp:52: TTxMigrateState ProcessMigrationBatch UpdateNodes left 0, NewVersionUpdateNodes left 0 2025-09-25T16:18:30.465988Z node 1 :NODE_BROKER DEBUG: node_broker__migrate_state.cpp:21: TTxMigrateState FinalizeMigration 2025-09-25T16:18:30.465995Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:1318: [DB] Update epoch in database: #1.1 1970-01-01T00:00:00.025000Z - 1970-01-01T01:00:00.025000Z - 1970-01-01T02:00:00.025000Z 2025-09-25T16:18:30.466014Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:1337: [DB] Update approx epoch start in database: #1.1 2025-09-25T16:18:30.466021Z node 1 :NODE_BROKER NOTICE: node_broker.cpp:1350: [DB] Update main nodes table to: Nodes 2025-09-25T16:18:30.509206Z node 1 :NODE_BROKER DEBUG: node_broker__migrate_state.cpp:95: TTxMigrateState Complete 2025-09-25T16:18:30.509272Z node 1 :NODE_BROKER TRACE: node_broker.cpp:456: Scheduled epoch update at 1970-01-01T01:00:00.025000Z 2025-09-25T16:18:30.509286Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:562: Preparing nodes list cache for epoch #1.1 1970-01-01T00:00:00.025000Z - 1970-01-01T01:00:00.025000Z - 1970-01-01T02:00:00.025000Z, approximate epoch start #1.1 nodes=0 expired=0 2025-09-25T16:18:30.509300Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:603: Preparing update nodes log for epoch ##1.1 1970-01-01T00:00:00.025000Z - 1970-01-01T01:00:00.025000Z - 1970-01-01T02:00:00.025000Z nodes=0 expired=0 removed=0 2025-09-25T16:18:30.509485Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:245: StateWork, received event# 269877761, Sender [1:585:2204], Rec ... rizedbycertificate=false 2025-09-25T16:18:30.780058Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:264: [Dirty] Register new active node #1026.v4 host3:1001 2025-09-25T16:18:30.780066Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:552: [Dirty] Update current epoch version from 3 to 4 2025-09-25T16:18:30.780071Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:1363: [DB] Update epoch version in database version=4 2025-09-25T16:18:30.791154Z node 1 :NODE_BROKER DEBUG: node_broker__register_node.cpp:197: TTxRegisterNode Complete 2025-09-25T16:18:30.791193Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:264: [Committed] Register new active node #1026.v4 host3:1001 2025-09-25T16:18:30.791208Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:552: [Committed] Update current epoch version from 3 to 4 2025-09-25T16:18:30.791214Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:630: Add node #1026.v4 host3:1001 to epoch cache 2025-09-25T16:18:30.791246Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:659: Add node #1026.v4 to update nodes log 2025-09-25T16:18:30.791297Z node 1 :NODE_BROKER TRACE: node_broker__register_node.cpp:59: TTxRegisterNode reply with: Status { Code: OK } Node { NodeId: 1026 Host: "host3" Port: 1001 ResolveHost: "host3.yandex.net" Address: "1.2.3.4" Location { DataCenter: "1" Module: "2" Rack: "3" Unit: "4" } Expire: 7200025000 Name: "slot-2" } 2025-09-25T16:18:30.801547Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:245: StateWork, received event# 2146435074, Sender [0:0:0], Recipient [1:554:2186]: NKikimr::NNodeBroker::TNodeBroker::TEvPrivate::TEvProcessSubscribersQueue 2025-09-25T16:18:30.801573Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:261: StateWork, processing event TEvPrivate::TEvProcessSubscribersQueue 2025-09-25T16:18:31.253584Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:245: StateWork, received event# 269877761, Sender [1:695:2250], Recipient [1:554:2186]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-09-25T16:18:31.253654Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:245: StateWork, received event# 272039950, Sender [1:623:2216], Recipient [1:554:2186]: NKikimr::NNodeBroker::TEvNodeBroker::TEvSubscribeNodesRequest { CachedVersion: 3 SeqNo: 2 } 2025-09-25T16:18:31.253663Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:257: StateWork, processing event TEvNodeBroker::TEvSubscribeNodesRequest 2025-09-25T16:18:31.253673Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:747: New subscriber [1:623:2216], seqNo: 2, version: 3, server pipe id: [1:695:2250] 2025-09-25T16:18:31.253687Z node 1 :NODE_BROKER TRACE: node_broker.cpp:730: Send TEvUpdateNodes v3 -> v4 to [1:623:2216] 2025-09-25T16:18:31.253714Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:245: StateWork, received event# 269877761, Sender [1:696:2251], Recipient [1:554:2186]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-09-25T16:18:31.253746Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:245: StateWork, received event# 272039936, Sender [1:623:2216], Recipient [1:554:2186]: NKikimr::NNodeBroker::TEvNodeBroker::TEvListNodes { } 2025-09-25T16:18:31.253752Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:249: StateWork, processing event TEvNodeBroker::TEvListNodes 2025-09-25T16:18:31.253766Z node 1 :NODE_BROKER TRACE: node_broker.cpp:423: Send TEvNodesInfo for epoch #1.4 1970-01-01T00:00:00.025000Z - 1970-01-01T01:00:00.025000Z - 1970-01-01T02:00:00.025000Z 2025-09-25T16:18:31.253916Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:245: StateWork, received event# 269877761, Sender [1:698:2253], Recipient [1:554:2186]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-09-25T16:18:31.253963Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:245: StateWork, received event# 272039938, Sender [1:623:2216], Recipient [1:554:2186]: NKikimr::NNodeBroker::TEvNodeBroker::TEvRegistrationRequest { Host: "host4" Port: 1001 ResolveHost: "host4.yandex.net" Address: "1.2.3.4" Location { DataCenter: "1" Module: "2" Rack: "3" Unit: "4" } FixedNodeId: false Path: "dc-1" } 2025-09-25T16:18:31.253969Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:251: StateWork, processing event TEvNodeBroker::TEvRegistrationRequest 2025-09-25T16:18:31.253981Z node 1 :NODE_BROKER TRACE: node_broker.cpp:1494: Handle TEvNodeBroker::TEvRegistrationRequest: request# Host: "host4" Port: 1001 ResolveHost: "host4.yandex.net" Address: "1.2.3.4" Location { DataCenter: "1" Module: "2" Rack: "3" Unit: "4" } FixedNodeId: false Path: "dc-1" 2025-09-25T16:18:31.254050Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2806: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [1:23:2070], request# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1 TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: false ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-09-25T16:18:31.254083Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:1888: FillEntry for TNavigate: self# [1:23:2070], cacheItem# { Subscriber: { Subscriber: [1:627:2219] DomainOwnerId: 72057594046678944 Type: 2 SyncCookie: 0 } Filled: 1 Status: StatusSuccess Kind: 2 TableKind: 0 Created: 1 CreateStep: 0 PathId: [OwnerId: 72057594046678944, LocalPathId: 1] DomainId: [OwnerId: 72057594046678944, LocalPathId: 1] IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1 TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: false ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2025-09-25T16:18:31.254148Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:267: Send result: self# [1:700:2254], recipient# [1:699:2186], result# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1 TableId: [72057594046678944:1:0] RequestType: ByPath Operation: OpPath RedirectRequired: false ShowPrivatePath: false SyncVersion: false Status: Ok Kind: KindPath DomainInfo { DomainKey: [OwnerId: 72057594046678944, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046678944, LocalPathId: 1] Params { Version: 0 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } }] } 2025-09-25T16:18:31.254169Z node 1 :NODE_BROKER TRACE: node_broker.cpp:1561: Handle TEvTxProxySchemeCache::TEvNavigateKeySetResult: response# { Path: dc-1 TableId: [72057594046678944:1:0] RequestType: ByPath Operation: OpPath RedirectRequired: false ShowPrivatePath: false SyncVersion: false Status: Ok Kind: KindPath DomainInfo { DomainKey: [OwnerId: 72057594046678944, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046678944, LocalPathId: 1] Params { Version: 0 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } } 2025-09-25T16:18:31.254184Z node 1 :NODE_BROKER TRACE: node_broker.cpp:1587: Finished resolving tenant: request# Host: "host4" Port: 1001 ResolveHost: "host4.yandex.net" Address: "1.2.3.4" Location { DataCenter: "1" Module: "2" Rack: "3" Unit: "4" } FixedNodeId: false Path: "dc-1": scope id# <72057594046678944:1>: serviced subdomain# 72057594046678944:1 2025-09-25T16:18:31.254203Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:245: StateWork, received event# 2146435073, Sender [1:699:2186], Recipient [1:554:2186]: NKikimr::NNodeBroker::TNodeBroker::TEvPrivate::TEvResolvedRegistrationRequest 2025-09-25T16:18:31.254208Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:260: StateWork, processing event TEvPrivate::TEvResolvedRegistrationRequest 2025-09-25T16:18:31.254230Z node 1 :NODE_BROKER DEBUG: node_broker__register_node.cpp:78: TTxRegisterNode Execute 2025-09-25T16:18:31.254235Z node 1 :NODE_BROKER DEBUG: node_broker__register_node.cpp:82: Registration request from host4:1001 (not fixed) tenant: dc-1 2025-09-25T16:18:31.254271Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:863: [DB] Adding node #1027.v5 host4:1001 to database state=Active resolvehost=host4.yandex.net address=1.2.3.4 dc=1 location=DC=1/M=2/R=3/U=4/ lease=1 expire=Thu, 01 Jan 1970 02:00:00 UTC servicedsubdomain=72057594046678944:1 slotindex=3 authorizedbycertificate=false 2025-09-25T16:18:31.254334Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:264: [Dirty] Register new active node #1027.v5 host4:1001 2025-09-25T16:18:31.254343Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:552: [Dirty] Update current epoch version from 4 to 5 2025-09-25T16:18:31.254347Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:1363: [DB] Update epoch version in database version=5 2025-09-25T16:18:31.265905Z node 1 :NODE_BROKER DEBUG: node_broker__register_node.cpp:197: TTxRegisterNode Complete 2025-09-25T16:18:31.265941Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:264: [Committed] Register new active node #1027.v5 host4:1001 2025-09-25T16:18:31.265955Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:552: [Committed] Update current epoch version from 4 to 5 2025-09-25T16:18:31.265962Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:630: Add node #1027.v5 host4:1001 to epoch cache 2025-09-25T16:18:31.266010Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:659: Add node #1027.v5 to update nodes log 2025-09-25T16:18:31.266071Z node 1 :NODE_BROKER TRACE: node_broker__register_node.cpp:59: TTxRegisterNode reply with: Status { Code: OK } Node { NodeId: 1027 Host: "host4" Port: 1001 ResolveHost: "host4.yandex.net" Address: "1.2.3.4" Location { DataCenter: "1" Module: "2" Rack: "3" Unit: "4" } Expire: 7200025000 Name: "slot-3" } 2025-09-25T16:18:31.266262Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:245: StateWork, received event# 269877761, Sender [1:704:2258], Recipient [1:554:2186]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-09-25T16:18:31.266284Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:245: StateWork, received event# 272039936, Sender [1:623:2216], Recipient [1:554:2186]: NKikimr::NNodeBroker::TEvNodeBroker::TEvListNodes { } 2025-09-25T16:18:31.266291Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:249: StateWork, processing event TEvNodeBroker::TEvListNodes 2025-09-25T16:18:31.266306Z node 1 :NODE_BROKER TRACE: node_broker.cpp:423: Send TEvNodesInfo for epoch #1.5 1970-01-01T00:00:00.025000Z - 1970-01-01T01:00:00.025000Z - 1970-01-01T02:00:00.025000Z 2025-09-25T16:18:31.276604Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:245: StateWork, received event# 2146435074, Sender [0:0:0], Recipient [1:554:2186]: NKikimr::NNodeBroker::TNodeBroker::TEvPrivate::TEvProcessSubscribersQueue 2025-09-25T16:18:31.276641Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:261: StateWork, processing event TEvPrivate::TEvProcessSubscribersQueue 2025-09-25T16:18:31.276661Z node 1 :NODE_BROKER TRACE: node_broker.cpp:730: Send TEvUpdateNodes v4 -> v5 to [1:623:2216] 2025-09-25T16:18:31.276952Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:245: StateWork, received event# 269877761, Sender [1:706:2260], Recipient [1:554:2186]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-09-25T16:18:31.276995Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:245: StateWork, received event# 272039936, Sender [1:623:2216], Recipient [1:554:2186]: NKikimr::NNodeBroker::TEvNodeBroker::TEvListNodes { } 2025-09-25T16:18:31.277001Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:249: StateWork, processing event TEvNodeBroker::TEvListNodes 2025-09-25T16:18:31.277015Z node 1 :NODE_BROKER TRACE: node_broker.cpp:423: Send TEvNodesInfo for epoch #1.5 1970-01-01T00:00:00.025000Z - 1970-01-01T01:00:00.025000Z - 1970-01-01T02:00:00.025000Z 2025-09-25T16:18:31.277097Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:245: StateWork, received event# 272039952, Sender [1:623:2216], Recipient [1:554:2186]: NKikimr::NNodeBroker::TEvNodeBroker::TEvSyncNodesRequest { SeqNo: 2 } 2025-09-25T16:18:31.277103Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:258: StateWork, processing event TEvNodeBroker::TEvSyncNodesRequest ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/mind/ut/unittest >> TNodeBrokerTest::SyncNodes [GOOD] Test command err: 2025-09-25T16:18:31.641907Z node 8 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:636: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 8 Deadline: 18446744073709.551615s } 2025-09-25T16:18:31.645546Z node 5 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:636: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 5 Deadline: 18446744073709.551615s } 2025-09-25T16:18:31.645650Z node 6 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:636: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 6 Deadline: 18446744073709.551615s } 2025-09-25T16:18:31.645693Z node 7 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:636: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 7 Deadline: 18446744073709.551615s } 2025-09-25T16:18:31.645754Z node 8 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:607: Handle NActors::TEvInterconnect::TEvListNodes 2025-09-25T16:18:31.650235Z node 2 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:636: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 2 Deadline: 18446744073709.551615s } 2025-09-25T16:18:31.650312Z node 3 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:636: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 3 Deadline: 18446744073709.551615s } 2025-09-25T16:18:31.650388Z node 4 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:636: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 4 Deadline: 18446744073709.551615s } 2025-09-25T16:18:31.650452Z node 6 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:607: Handle NActors::TEvInterconnect::TEvListNodes 2025-09-25T16:18:31.650586Z node 7 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:607: Handle NActors::TEvInterconnect::TEvListNodes 2025-09-25T16:18:31.650671Z node 8 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:607: Handle NActors::TEvInterconnect::TEvListNodes 2025-09-25T16:18:31.650710Z node 1 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:636: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 1 Deadline: 18446744073709.551615s } 2025-09-25T16:18:31.654588Z node 2 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:607: Handle NActors::TEvInterconnect::TEvListNodes 2025-09-25T16:18:31.654710Z node 3 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:607: Handle NActors::TEvInterconnect::TEvListNodes 2025-09-25T16:18:31.654773Z node 4 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:607: Handle NActors::TEvInterconnect::TEvListNodes 2025-09-25T16:18:31.654851Z node 5 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:607: Handle NActors::TEvInterconnect::TEvListNodes 2025-09-25T16:18:31.654924Z node 4 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:607: Handle NActors::TEvInterconnect::TEvListNodes 2025-09-25T16:18:31.654984Z node 5 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:607: Handle NActors::TEvInterconnect::TEvListNodes 2025-09-25T16:18:31.655006Z node 6 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:607: Handle NActors::TEvInterconnect::TEvListNodes 2025-09-25T16:18:31.655042Z node 7 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:607: Handle NActors::TEvInterconnect::TEvListNodes 2025-09-25T16:18:31.655131Z node 1 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:607: Handle NActors::TEvInterconnect::TEvListNodes 2025-09-25T16:18:31.655232Z node 2 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:607: Handle NActors::TEvInterconnect::TEvListNodes 2025-09-25T16:18:31.655260Z node 3 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:607: Handle NActors::TEvInterconnect::TEvListNodes 2025-09-25T16:18:31.655355Z node 8 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:607: Handle NActors::TEvInterconnect::TEvListNodes 2025-09-25T16:18:31.655388Z node 1 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:607: Handle NActors::TEvInterconnect::TEvListNodes 2025-09-25T16:18:31.655758Z node 2 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:607: Handle NActors::TEvInterconnect::TEvListNodes 2025-09-25T16:18:31.655782Z node 3 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:607: Handle NActors::TEvInterconnect::TEvListNodes 2025-09-25T16:18:31.655811Z node 4 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:607: Handle NActors::TEvInterconnect::TEvListNodes 2025-09-25T16:18:31.655833Z node 5 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:607: Handle NActors::TEvInterconnect::TEvListNodes 2025-09-25T16:18:31.655856Z node 6 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:607: Handle NActors::TEvInterconnect::TEvListNodes 2025-09-25T16:18:31.655877Z node 7 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:607: Handle NActors::TEvInterconnect::TEvListNodes 2025-09-25T16:18:31.655904Z node 8 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:636: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 1 Deadline: 18446744073709.551615s } 2025-09-25T16:18:31.655917Z node 3 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:636: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 1 Deadline: 18446744073709.551615s } 2025-09-25T16:18:31.655957Z node 4 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:636: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 1 Deadline: 18446744073709.551615s } 2025-09-25T16:18:31.655984Z node 5 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:636: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 1 Deadline: 18446744073709.551615s } 2025-09-25T16:18:31.656019Z node 6 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:636: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 1 Deadline: 18446744073709.551615s } 2025-09-25T16:18:31.656044Z node 7 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:636: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 1 Deadline: 18446744073709.551615s } 2025-09-25T16:18:31.656134Z node 1 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:607: Handle NActors::TEvInterconnect::TEvListNodes 2025-09-25T16:18:31.656219Z node 2 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:636: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 1 Deadline: 18446744073709.551615s } 2025-09-25T16:18:31.656400Z node 1 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:636: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 8 Deadline: 18446744073709.551615s } 2025-09-25T16:18:31.660091Z node 3 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:636: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 2 Deadline: 18446744073709.551615s } 2025-09-25T16:18:31.660154Z node 5 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:636: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 4 Deadline: 18446744073709.551615s } 2025-09-25T16:18:31.660181Z node 6 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:636: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 4 Deadline: 18446744073709.551615s } 2025-09-25T16:18:31.666897Z node 4 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:636: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 5 Deadline: 18446744073709.551615s } 2025-09-25T16:18:31.666969Z node 2 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:636: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 3 Deadline: 18446744073709.551615s } 2025-09-25T16:18:31.666989Z node 4 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:636: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 6 Deadline: 18446744073709.551615s } 2025-09-25T16:18:31.667741Z node 1 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:636: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 3 Deadline: 18446744073709.551615s } 2025-09-25T16:18:31.668097Z node 8 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:636: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 4 Deadline: 18446744073709.551615s } 2025-09-25T16:18:31.668143Z node 1 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:636: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 5 Deadline: 18446744073709.551615s } 2025-09-25T16:18:31.668223Z node 1 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:636: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 6 Deadline: 18446744073709.551615s } 2025-09-25T16:18:31.668272Z node 1 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:636: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 7 Deadline: 18446744073709.551615s } 2025-09-25T16:18:31.668318Z node 1 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:636: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 2 Deadline: 18446744073709.551615s } 2025-09-25T16:18:31.668385Z node 4 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:636: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 8 Deadline: 18446744073709.551615s } 2025-09-25T16:18:31.668481Z node 1 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:636: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 4 Deadline: 18446744073709.551615s } 2025-09-25T16:18:31.670362Z node 8 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:636: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 6 Deadline: 18446744073709.551615s } 2025-09-25T16:18:31.670611Z node 6 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:636: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 8 Deadline: 18446744073709.551615s } 2025-09-25T16:18:31.683510Z node 4 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:636: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 3 Deadline: 18446744073709.551615s } 2025-09-25T16:18:31.684337Z node 3 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:636: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 4 Deadline: 18446744073709.551615s } 2025-09-25T16:18:31.686881Z node 4 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:636: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 2 Deadline: 18446744073709.551615s } 2025-09-25T16:18:31.687177Z node 2 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:636: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 4 Deadline: 18446744073709.551615s } 2025-09-25T16:18:31.709236Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7729: Cannot subscribe to console configs 2025-09-25T16:18:31.709261Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded ... waiting for nameservers are connected 2025-09-25T16:18:31.717707Z node 1 :NODE_BROKER DEBUG: node_broker_impl.h:239: StateInit event type: 10060000 event: NKikimr::TEvTablet::TEvBoot 2025-09-25T16:18:31.718275Z node 1 :NODE_BROKER DEBUG: node_broker_impl.h:239: StateInit event type: 10060001 event: NKikimr::TEvTablet::TEvRestored 2025-09-25T16:18:31.718388Z node 1 :NODE_BROKER DEBUG: node_broker__init_scheme.cpp:20: TTxInitScheme Execute 2025-09-25T16:18:31.718650Z node 1 :NODE_BROKER DEBUG: node_broker_impl.h:239: StateInit event type: 1006000c event: NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-09-25T16:18:31.719471Z node 1 :NODE_BROKER DEBUG: node_broker__init_scheme.cpp:29: TTxInitScheme Complete 2025-09-25T16:18:31.719547Z node 1 :NODE_BROKER DEBUG: node_broker__load_state.cpp:19: TTxLoadState Execute 2025-09-25T16:18:31.719625Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:968: [DB] Using default config. 2025-09-25T16:18:31.719641Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:1005: [DB] Starting the first epoch: #1.1 1970-01-01T00:00:00.024000Z - 1970-01-01T01:00:00.024000Z - 1970-01-01T02:00:00.024000Z 2025-09-25T16:18:31.719647Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:1031: [DB] Loaded the first approximate epoch start: #1.1 2025-09-25T16:18:31.719666Z node 1 :NODE_BROKER DEBUG: node_broker__load_state.cpp:27: TTxLoadState Complete 2025-09-25T16:18:31.719900Z node 1 :NODE_BROKER DEBUG: node_broker__migrate_state.cpp:84: TTxMigrateState Execute 2025-09-25T16:18:31.719908Z node 1 :NODE_BROKER DEBUG: node_broker__migrate_state.cpp:52: TTxMigrateState ProcessMigrationBatch UpdateNodes left 0, NewVersionUpdateNodes left 0 2025-09-25T16:18:31.719914Z node 1 :NODE_BROKER DEBUG: node_broker__migrate_state.cpp:21: TTxMigrateState FinalizeMigration 2025-09-25T16:18:31.719920Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:1318: [DB] Update epoch in database: #1.1 1970-01-01T00:00:00.024000Z - 1970-01-01T01:00:00.024000Z - 1970-01-01T02:00:00.024000Z 2025-09-25T16:18:31.719941Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:1337: [DB] Update approx epoch start in database: #1.1 2025-09-25T16:18:31.719948Z node 1 :NODE_BROKER NOTICE: node_broker.cpp:1350: [DB] Update main nodes table to: Nodes 2025-09-25T16:18:31.757276Z node 1 :NODE_BROKER DEBUG: node_broker__migrate_state.cpp:95: TTxMigrateState Complete 2025-09-25T16:18:31.757341Z node 1 :NODE_BROKER TRACE: node_brok ... :82: Registration request from host1:1001 (not fixed) tenant: dc-1 2025-09-25T16:18:31.995994Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:863: [DB] Adding node #1024.v2 host1:1001 to database state=Active resolvehost=host1.yandex.net address=1.2.3.4 dc=1 location=DC=1/M=2/R=3/U=4/ lease=1 expire=Thu, 01 Jan 1970 02:00:00 UTC servicedsubdomain=72057594046678944:1 slotindex=0 authorizedbycertificate=false 2025-09-25T16:18:31.996081Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:264: [Dirty] Register new active node #1024.v2 host1:1001 2025-09-25T16:18:31.996091Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:552: [Dirty] Update current epoch version from 1 to 2 2025-09-25T16:18:31.996095Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:1363: [DB] Update epoch version in database version=2 2025-09-25T16:18:32.009236Z node 1 :NODE_BROKER DEBUG: node_broker__register_node.cpp:197: TTxRegisterNode Complete 2025-09-25T16:18:32.009260Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:264: [Committed] Register new active node #1024.v2 host1:1001 2025-09-25T16:18:32.009272Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:552: [Committed] Update current epoch version from 1 to 2 2025-09-25T16:18:32.009277Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:630: Add node #1024.v2 host1:1001 to epoch cache 2025-09-25T16:18:32.009302Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:659: Add node #1024.v2 to update nodes log 2025-09-25T16:18:32.009360Z node 1 :NODE_BROKER TRACE: node_broker__register_node.cpp:59: TTxRegisterNode reply with: Status { Code: OK } Node { NodeId: 1024 Host: "host1" Port: 1001 ResolveHost: "host1.yandex.net" Address: "1.2.3.4" Location { DataCenter: "1" Module: "2" Rack: "3" Unit: "4" } Expire: 7200024000 Name: "slot-0" } 2025-09-25T16:18:32.009555Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:245: StateWork, received event# 269877761, Sender [1:643:2225], Recipient [1:558:2186]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-09-25T16:18:32.009594Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:245: StateWork, received event# 272039950, Sender [1:627:2216], Recipient [1:558:2186]: NKikimr::NNodeBroker::TEvNodeBroker::TEvSubscribeNodesRequest { CachedVersion: 0 SeqNo: 1 } 2025-09-25T16:18:32.009603Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:257: StateWork, processing event TEvNodeBroker::TEvSubscribeNodesRequest 2025-09-25T16:18:32.009610Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:747: New subscriber [1:627:2216], seqNo: 1, version: 0, server pipe id: [1:643:2225] 2025-09-25T16:18:32.009620Z node 1 :NODE_BROKER TRACE: node_broker.cpp:730: Send TEvUpdateNodes v0 -> v2 to [1:627:2216] 2025-09-25T16:18:32.009645Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:245: StateWork, received event# 269877761, Sender [1:644:2226], Recipient [1:558:2186]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-09-25T16:18:32.009660Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:245: StateWork, received event# 272039936, Sender [1:627:2216], Recipient [1:558:2186]: NKikimr::NNodeBroker::TEvNodeBroker::TEvListNodes { } 2025-09-25T16:18:32.009665Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:249: StateWork, processing event TEvNodeBroker::TEvListNodes 2025-09-25T16:18:32.009676Z node 1 :NODE_BROKER TRACE: node_broker.cpp:423: Send TEvNodesInfo for epoch #1.2 1970-01-01T00:00:00.024000Z - 1970-01-01T01:00:00.024000Z - 1970-01-01T02:00:00.024000Z 2025-09-25T16:18:32.009768Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:245: StateWork, received event# 269877761, Sender [1:646:2228], Recipient [1:558:2186]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-09-25T16:18:32.009798Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:245: StateWork, received event# 272039938, Sender [1:627:2216], Recipient [1:558:2186]: NKikimr::NNodeBroker::TEvNodeBroker::TEvRegistrationRequest { Host: "host2" Port: 1001 ResolveHost: "host2.yandex.net" Address: "1.2.3.4" Location { DataCenter: "1" Module: "2" Rack: "3" Unit: "4" } FixedNodeId: false Path: "dc-1" } 2025-09-25T16:18:32.009803Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:251: StateWork, processing event TEvNodeBroker::TEvRegistrationRequest 2025-09-25T16:18:32.009814Z node 1 :NODE_BROKER TRACE: node_broker.cpp:1494: Handle TEvNodeBroker::TEvRegistrationRequest: request# Host: "host2" Port: 1001 ResolveHost: "host2.yandex.net" Address: "1.2.3.4" Location { DataCenter: "1" Module: "2" Rack: "3" Unit: "4" } FixedNodeId: false Path: "dc-1" 2025-09-25T16:18:32.009885Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2806: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [1:23:2070], request# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1 TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: false ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-09-25T16:18:32.009912Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:1888: FillEntry for TNavigate: self# [1:23:2070], cacheItem# { Subscriber: { Subscriber: [1:631:2219] DomainOwnerId: 72057594046678944 Type: 2 SyncCookie: 0 } Filled: 1 Status: StatusSuccess Kind: 2 TableKind: 0 Created: 1 CreateStep: 0 PathId: [OwnerId: 72057594046678944, LocalPathId: 1] DomainId: [OwnerId: 72057594046678944, LocalPathId: 1] IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1 TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: false ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2025-09-25T16:18:32.009968Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:267: Send result: self# [1:648:2229], recipient# [1:647:2186], result# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1 TableId: [72057594046678944:1:0] RequestType: ByPath Operation: OpPath RedirectRequired: false ShowPrivatePath: false SyncVersion: false Status: Ok Kind: KindPath DomainInfo { DomainKey: [OwnerId: 72057594046678944, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046678944, LocalPathId: 1] Params { Version: 0 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } }] } 2025-09-25T16:18:32.009985Z node 1 :NODE_BROKER TRACE: node_broker.cpp:1561: Handle TEvTxProxySchemeCache::TEvNavigateKeySetResult: response# { Path: dc-1 TableId: [72057594046678944:1:0] RequestType: ByPath Operation: OpPath RedirectRequired: false ShowPrivatePath: false SyncVersion: false Status: Ok Kind: KindPath DomainInfo { DomainKey: [OwnerId: 72057594046678944, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046678944, LocalPathId: 1] Params { Version: 0 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } } 2025-09-25T16:18:32.010001Z node 1 :NODE_BROKER TRACE: node_broker.cpp:1587: Finished resolving tenant: request# Host: "host2" Port: 1001 ResolveHost: "host2.yandex.net" Address: "1.2.3.4" Location { DataCenter: "1" Module: "2" Rack: "3" Unit: "4" } FixedNodeId: false Path: "dc-1": scope id# <72057594046678944:1>: serviced subdomain# 72057594046678944:1 2025-09-25T16:18:32.010017Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:245: StateWork, received event# 2146435073, Sender [1:647:2186], Recipient [1:558:2186]: NKikimr::NNodeBroker::TNodeBroker::TEvPrivate::TEvResolvedRegistrationRequest 2025-09-25T16:18:32.010023Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:260: StateWork, processing event TEvPrivate::TEvResolvedRegistrationRequest 2025-09-25T16:18:32.010037Z node 1 :NODE_BROKER DEBUG: node_broker__register_node.cpp:78: TTxRegisterNode Execute 2025-09-25T16:18:32.010042Z node 1 :NODE_BROKER DEBUG: node_broker__register_node.cpp:82: Registration request from host2:1001 (not fixed) tenant: dc-1 2025-09-25T16:18:32.010067Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:863: [DB] Adding node #1025.v3 host2:1001 to database state=Active resolvehost=host2.yandex.net address=1.2.3.4 dc=1 location=DC=1/M=2/R=3/U=4/ lease=1 expire=Thu, 01 Jan 1970 02:00:00 UTC servicedsubdomain=72057594046678944:1 slotindex=1 authorizedbycertificate=false 2025-09-25T16:18:32.010118Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:264: [Dirty] Register new active node #1025.v3 host2:1001 2025-09-25T16:18:32.010125Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:552: [Dirty] Update current epoch version from 2 to 3 2025-09-25T16:18:32.010130Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:1363: [DB] Update epoch version in database version=3 2025-09-25T16:18:32.026310Z node 1 :NODE_BROKER DEBUG: node_broker__register_node.cpp:197: TTxRegisterNode Complete 2025-09-25T16:18:32.026338Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:264: [Committed] Register new active node #1025.v3 host2:1001 2025-09-25T16:18:32.026351Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:552: [Committed] Update current epoch version from 2 to 3 2025-09-25T16:18:32.026358Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:630: Add node #1025.v3 host2:1001 to epoch cache 2025-09-25T16:18:32.026386Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:659: Add node #1025.v3 to update nodes log 2025-09-25T16:18:32.026446Z node 1 :NODE_BROKER TRACE: node_broker__register_node.cpp:59: TTxRegisterNode reply with: Status { Code: OK } Node { NodeId: 1025 Host: "host2" Port: 1001 ResolveHost: "host2.yandex.net" Address: "1.2.3.4" Location { DataCenter: "1" Module: "2" Rack: "3" Unit: "4" } Expire: 7200024000 Name: "slot-1" } 2025-09-25T16:18:32.026615Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:245: StateWork, received event# 269877761, Sender [1:652:2233], Recipient [1:558:2186]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-09-25T16:18:32.026651Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:245: StateWork, received event# 272039936, Sender [1:627:2216], Recipient [1:558:2186]: NKikimr::NNodeBroker::TEvNodeBroker::TEvListNodes { } 2025-09-25T16:18:32.026657Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:249: StateWork, processing event TEvNodeBroker::TEvListNodes 2025-09-25T16:18:32.026670Z node 1 :NODE_BROKER TRACE: node_broker.cpp:423: Send TEvNodesInfo for epoch #1.3 1970-01-01T00:00:00.024000Z - 1970-01-01T01:00:00.024000Z - 1970-01-01T02:00:00.024000Z 2025-09-25T16:18:32.026731Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:245: StateWork, received event# 272039952, Sender [1:627:2216], Recipient [1:558:2186]: NKikimr::NNodeBroker::TEvNodeBroker::TEvSyncNodesRequest { SeqNo: 1 } 2025-09-25T16:18:32.026740Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:258: StateWork, processing event TEvNodeBroker::TEvSyncNodesRequest 2025-09-25T16:18:32.026749Z node 1 :NODE_BROKER TRACE: node_broker.cpp:730: Send TEvUpdateNodes v2 -> v3 to [1:627:2216] 2025-09-25T16:18:32.026839Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:245: StateWork, received event# 269877761, Sender [1:654:2235], Recipient [1:558:2186]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-09-25T16:18:32.026857Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:245: StateWork, received event# 272039936, Sender [1:627:2216], Recipient [1:558:2186]: NKikimr::NNodeBroker::TEvNodeBroker::TEvListNodes { } 2025-09-25T16:18:32.026862Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:249: StateWork, processing event TEvNodeBroker::TEvListNodes 2025-09-25T16:18:32.026868Z node 1 :NODE_BROKER TRACE: node_broker.cpp:423: Send TEvNodesInfo for epoch #1.3 1970-01-01T00:00:00.024000Z - 1970-01-01T01:00:00.024000Z - 1970-01-01T02:00:00.024000Z 2025-09-25T16:18:32.026905Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:245: StateWork, received event# 272039952, Sender [1:627:2216], Recipient [1:558:2186]: NKikimr::NNodeBroker::TEvNodeBroker::TEvSyncNodesRequest { SeqNo: 1 } 2025-09-25T16:18:32.026910Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:258: StateWork, processing event TEvNodeBroker::TEvSyncNodesRequest ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/mind/ut/unittest >> TNodeBrokerTest::ShiftIdRangeRemoveActive [GOOD] Test command err: 2025-09-25T16:18:30.747097Z node 8 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:636: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 8 Deadline: 18446744073709.551615s } 2025-09-25T16:18:30.750636Z node 5 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:636: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 5 Deadline: 18446744073709.551615s } 2025-09-25T16:18:30.750720Z node 6 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:636: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 6 Deadline: 18446744073709.551615s } 2025-09-25T16:18:30.750752Z node 7 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:636: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 7 Deadline: 18446744073709.551615s } 2025-09-25T16:18:30.750814Z node 8 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:607: Handle NActors::TEvInterconnect::TEvListNodes 2025-09-25T16:18:30.754615Z node 2 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:636: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 2 Deadline: 18446744073709.551615s } 2025-09-25T16:18:30.754672Z node 3 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:636: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 3 Deadline: 18446744073709.551615s } 2025-09-25T16:18:30.754734Z node 4 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:636: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 4 Deadline: 18446744073709.551615s } 2025-09-25T16:18:30.754784Z node 6 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:607: Handle NActors::TEvInterconnect::TEvListNodes 2025-09-25T16:18:30.754905Z node 7 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:607: Handle NActors::TEvInterconnect::TEvListNodes 2025-09-25T16:18:30.754971Z node 8 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:607: Handle NActors::TEvInterconnect::TEvListNodes 2025-09-25T16:18:30.755003Z node 1 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:636: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 1 Deadline: 18446744073709.551615s } 2025-09-25T16:18:30.758769Z node 2 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:607: Handle NActors::TEvInterconnect::TEvListNodes 2025-09-25T16:18:30.758887Z node 3 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:607: Handle NActors::TEvInterconnect::TEvListNodes 2025-09-25T16:18:30.758943Z node 4 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:607: Handle NActors::TEvInterconnect::TEvListNodes 2025-09-25T16:18:30.758989Z node 5 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:607: Handle NActors::TEvInterconnect::TEvListNodes 2025-09-25T16:18:30.759032Z node 4 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:607: Handle NActors::TEvInterconnect::TEvListNodes 2025-09-25T16:18:30.759062Z node 5 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:607: Handle NActors::TEvInterconnect::TEvListNodes 2025-09-25T16:18:30.759080Z node 6 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:607: Handle NActors::TEvInterconnect::TEvListNodes 2025-09-25T16:18:30.759111Z node 7 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:607: Handle NActors::TEvInterconnect::TEvListNodes 2025-09-25T16:18:30.759187Z node 1 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:607: Handle NActors::TEvInterconnect::TEvListNodes 2025-09-25T16:18:30.759280Z node 2 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:607: Handle NActors::TEvInterconnect::TEvListNodes 2025-09-25T16:18:30.759320Z node 3 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:607: Handle NActors::TEvInterconnect::TEvListNodes 2025-09-25T16:18:30.759414Z node 8 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:607: Handle NActors::TEvInterconnect::TEvListNodes 2025-09-25T16:18:30.759437Z node 1 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:607: Handle NActors::TEvInterconnect::TEvListNodes 2025-09-25T16:18:30.759877Z node 2 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:607: Handle NActors::TEvInterconnect::TEvListNodes 2025-09-25T16:18:30.759932Z node 3 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:607: Handle NActors::TEvInterconnect::TEvListNodes 2025-09-25T16:18:30.759979Z node 4 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:607: Handle NActors::TEvInterconnect::TEvListNodes 2025-09-25T16:18:30.760016Z node 5 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:607: Handle NActors::TEvInterconnect::TEvListNodes 2025-09-25T16:18:30.760053Z node 6 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:607: Handle NActors::TEvInterconnect::TEvListNodes 2025-09-25T16:18:30.760087Z node 7 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:607: Handle NActors::TEvInterconnect::TEvListNodes 2025-09-25T16:18:30.760149Z node 8 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:636: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 1 Deadline: 18446744073709.551615s } 2025-09-25T16:18:30.760169Z node 3 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:636: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 1 Deadline: 18446744073709.551615s } 2025-09-25T16:18:30.760218Z node 4 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:636: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 1 Deadline: 18446744073709.551615s } 2025-09-25T16:18:30.760249Z node 5 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:636: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 1 Deadline: 18446744073709.551615s } 2025-09-25T16:18:30.760290Z node 6 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:636: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 1 Deadline: 18446744073709.551615s } 2025-09-25T16:18:30.760319Z node 7 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:636: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 1 Deadline: 18446744073709.551615s } 2025-09-25T16:18:30.760431Z node 1 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:607: Handle NActors::TEvInterconnect::TEvListNodes 2025-09-25T16:18:30.760517Z node 2 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:636: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 1 Deadline: 18446744073709.551615s } 2025-09-25T16:18:30.760933Z node 1 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:636: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 5 Deadline: 18446744073709.551615s } 2025-09-25T16:18:30.761254Z node 8 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:636: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 6 Deadline: 18446744073709.551615s } 2025-09-25T16:18:30.762403Z node 3 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:636: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 2 Deadline: 18446744073709.551615s } 2025-09-25T16:18:30.762435Z node 4 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:636: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 3 Deadline: 18446744073709.551615s } 2025-09-25T16:18:30.762459Z node 5 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:636: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 4 Deadline: 18446744073709.551615s } 2025-09-25T16:18:30.762482Z node 6 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:636: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 4 Deadline: 18446744073709.551615s } 2025-09-25T16:18:30.762505Z node 7 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:636: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 4 Deadline: 18446744073709.551615s } 2025-09-25T16:18:30.771546Z node 6 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:636: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 8 Deadline: 18446744073709.551615s } 2025-09-25T16:18:30.772442Z node 2 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:636: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 3 Deadline: 18446744073709.551615s } 2025-09-25T16:18:30.772525Z node 4 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:636: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 5 Deadline: 18446744073709.551615s } 2025-09-25T16:18:30.773199Z node 3 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:636: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 4 Deadline: 18446744073709.551615s } 2025-09-25T16:18:30.773228Z node 4 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:636: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 6 Deadline: 18446744073709.551615s } 2025-09-25T16:18:30.773565Z node 1 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:636: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 6 Deadline: 18446744073709.551615s } 2025-09-25T16:18:30.773649Z node 4 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:636: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 7 Deadline: 18446744073709.551615s } 2025-09-25T16:18:30.774078Z node 1 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:636: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 7 Deadline: 18446744073709.551615s } 2025-09-25T16:18:30.774305Z node 1 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:636: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 2 Deadline: 18446744073709.551615s } 2025-09-25T16:18:30.774399Z node 1 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:636: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 8 Deadline: 18446744073709.551615s } 2025-09-25T16:18:30.774503Z node 1 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:636: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 3 Deadline: 18446744073709.551615s } 2025-09-25T16:18:30.774650Z node 1 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:636: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 4 Deadline: 18446744073709.551615s } 2025-09-25T16:18:30.775729Z node 5 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:636: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 6 Deadline: 18446744073709.551615s } 2025-09-25T16:18:30.776150Z node 6 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:636: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 5 Deadline: 18446744073709.551615s } 2025-09-25T16:18:30.783571Z node 4 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:636: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 2 Deadline: 18446744073709.551615s } 2025-09-25T16:18:30.783989Z node 2 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:636: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 4 Deadline: 18446744073709.551615s } 2025-09-25T16:18:30.800575Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7729: Cannot subscribe to console configs 2025-09-25T16:18:30.800604Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded ... waiting for nameservers are connected 2025-09-25T16:18:30.805930Z node 1 :NODE_BROKER DEBUG: node_broker_impl.h:239: StateInit event type: 10060000 event: NKikimr::TEvTablet::TEvBoot 2025-09-25T16:18:30.806529Z node 1 :NODE_BROKER DEBUG: node_broker_impl.h:239: StateInit event type: 10060001 event: NKikimr::TEvTablet::TEvRestored 2025-09-25T16:18:30.806602Z node 1 :NODE_BROKER DEBUG: node_broker__init_scheme.cpp:20: TTxInitScheme Execute 2025-09-25T16:18:30.806881Z node 1 :NODE_BROKER DEBUG: node_broker_impl.h:239: StateInit event type: 1006000c event: NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-09-25T16:18:30.807744Z node 1 :NODE_BROKER DEBUG: node_broker__init_scheme.cpp:29: TTxInitScheme Complete 2025-09-25T16:18:30.807887Z node 1 :NODE_BROKER DEBUG: node_broker__load_state.cpp:19: TTxLoadState Execute 2025-09-25T16:18:30.807960Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:968: [DB] Using default config. 2025-09-25T16:18:30.807976Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:1005: [DB] Starting the first epoch: #1.1 1970-01-01T00:00:00.026000Z - 1970-01-01T01:00:00.026000Z - 1970-01-01T02:00:00.026000Z 2025-09-25T16:18:30.807982Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:1031: [DB] Loaded the first approximate epoch start: #1.1 2025-09-25T16:18:30.808002Z node 1 :NODE_BROKER DEBUG: node_broker__load_state.cpp:27: TTxLoadState Complete 2025-09-25T16:18:30.808036Z node 1 :NODE_BROKER DEBUG: node_broker__migrate_state.cpp:84: TTxMigrateState Execute 2025-09-25T16:18:30.808042Z node 1 :NODE_BROKER DEBUG: node_broker__migrate_state.cpp:52: TTxMigrateState ProcessMigrationBatch UpdateNodes left 0, NewVersionUpdateNodes left 0 2025-09-25T16:18:30.808047Z node 1 :NODE_BROKER DEBUG: node_broker__migrate_state.cpp:21: TTxMigrateState FinalizeMigration 2025-09-25T16:18:30.808053Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:1318: [DB] Update epoch in database: #1.1 1970-01-01T00:00:00.026000Z - 1970-01-01T01:00:00.026000Z - 1970-01-01T02:00:00.026000Z 2025-09-25T16:18:30.808074Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:133 ... d TEvNodesInfo for epoch #2.9 1970-01-01T01:00:00.026000Z - 1970-01-01T02:00:00.026000Z - 1970-01-01T03:00:00.026000Z 2025-09-25T16:18:31.991102Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:245: StateWork, received event# 269877761, Sender [1:788:2320], Recipient [1:719:2267]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-09-25T16:18:31.991118Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:245: StateWork, received event# 272039936, Sender [1:631:2218], Recipient [1:719:2267]: NKikimr::NNodeBroker::TEvNodeBroker::TEvListNodes { } 2025-09-25T16:18:31.991122Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:249: StateWork, processing event TEvNodeBroker::TEvListNodes 2025-09-25T16:18:31.991128Z node 1 :NODE_BROKER TRACE: node_broker.cpp:423: Send TEvNodesInfo for epoch #2.9 1970-01-01T01:00:00.026000Z - 1970-01-01T02:00:00.026000Z - 1970-01-01T03:00:00.026000Z 2025-09-25T16:18:31.991180Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:245: StateWork, received event# 269877761, Sender [1:790:2322], Recipient [1:719:2267]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-09-25T16:18:31.991196Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:245: StateWork, received event# 272039936, Sender [1:631:2218], Recipient [1:719:2267]: NKikimr::NNodeBroker::TEvNodeBroker::TEvListNodes { CachedVersion: 6 } 2025-09-25T16:18:31.991201Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:249: StateWork, processing event TEvNodeBroker::TEvListNodes 2025-09-25T16:18:31.991207Z node 1 :NODE_BROKER TRACE: node_broker.cpp:423: Send TEvNodesInfo for epoch #2.9 1970-01-01T01:00:00.026000Z - 1970-01-01T02:00:00.026000Z - 1970-01-01T03:00:00.026000Z 2025-09-25T16:18:31.991261Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:245: StateWork, received event# 269877761, Sender [1:792:2324], Recipient [1:719:2267]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-09-25T16:18:31.991277Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:245: StateWork, received event# 272039936, Sender [1:631:2218], Recipient [1:719:2267]: NKikimr::NNodeBroker::TEvNodeBroker::TEvListNodes { } 2025-09-25T16:18:31.991282Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:249: StateWork, processing event TEvNodeBroker::TEvListNodes 2025-09-25T16:18:31.991288Z node 1 :NODE_BROKER TRACE: node_broker.cpp:423: Send TEvNodesInfo for epoch #2.9 1970-01-01T01:00:00.026000Z - 1970-01-01T02:00:00.026000Z - 1970-01-01T03:00:00.026000Z 2025-09-25T16:18:31.991346Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:245: StateWork, received event# 269877761, Sender [1:794:2326], Recipient [1:719:2267]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-09-25T16:18:31.991362Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:245: StateWork, received event# 272039936, Sender [1:631:2218], Recipient [1:719:2267]: NKikimr::NNodeBroker::TEvNodeBroker::TEvListNodes { CachedVersion: 5 } 2025-09-25T16:18:31.991367Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:249: StateWork, processing event TEvNodeBroker::TEvListNodes 2025-09-25T16:18:31.991373Z node 1 :NODE_BROKER TRACE: node_broker.cpp:423: Send TEvNodesInfo for epoch #2.9 1970-01-01T01:00:00.026000Z - 1970-01-01T02:00:00.026000Z - 1970-01-01T03:00:00.026000Z 2025-09-25T16:18:31.991449Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:245: StateWork, received event# 269877761, Sender [1:796:2328], Recipient [1:719:2267]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-09-25T16:18:31.991471Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:245: StateWork, received event# 272039950, Sender [1:631:2218], Recipient [1:719:2267]: NKikimr::NNodeBroker::TEvNodeBroker::TEvSubscribeNodesRequest { CachedVersion: 9 SeqNo: 2 } 2025-09-25T16:18:31.991478Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:257: StateWork, processing event TEvNodeBroker::TEvSubscribeNodesRequest 2025-09-25T16:18:31.991487Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:747: New subscriber [1:631:2218], seqNo: 2, version: 9, server pipe id: [1:796:2328] 2025-09-25T16:18:31.991495Z node 1 :NODE_BROKER TRACE: node_broker.cpp:730: Send TEvUpdateNodes v9 -> v9 to [1:631:2218] 2025-09-25T16:18:31.991557Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:245: StateWork, received event# 269877764, Sender [1:796:2328], Recipient [1:719:2267]: NKikimr::TEvTabletPipe::TEvServerDisconnected 2025-09-25T16:18:31.991563Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:768: Unsubscribed [1:631:2218], seqNo: 2, server pipe id: [1:796:2328] 2025-09-25T16:18:31.991587Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:245: StateWork, received event# 269877761, Sender [1:798:2330], Recipient [1:719:2267]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-09-25T16:18:31.991604Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:245: StateWork, received event# 272039950, Sender [1:631:2218], Recipient [1:719:2267]: NKikimr::NNodeBroker::TEvNodeBroker::TEvSubscribeNodesRequest { CachedVersion: 8 SeqNo: 3 } 2025-09-25T16:18:31.991608Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:257: StateWork, processing event TEvNodeBroker::TEvSubscribeNodesRequest 2025-09-25T16:18:31.991613Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:747: New subscriber [1:631:2218], seqNo: 3, version: 8, server pipe id: [1:798:2330] 2025-09-25T16:18:31.991618Z node 1 :NODE_BROKER TRACE: node_broker.cpp:730: Send TEvUpdateNodes v8 -> v9 to [1:631:2218] 2025-09-25T16:18:31.991667Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:245: StateWork, received event# 269877764, Sender [1:798:2330], Recipient [1:719:2267]: NKikimr::TEvTabletPipe::TEvServerDisconnected 2025-09-25T16:18:31.991672Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:768: Unsubscribed [1:631:2218], seqNo: 3, server pipe id: [1:798:2330] 2025-09-25T16:18:31.991695Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:245: StateWork, received event# 269877761, Sender [1:800:2332], Recipient [1:719:2267]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-09-25T16:18:31.991713Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:245: StateWork, received event# 272039950, Sender [1:631:2218], Recipient [1:719:2267]: NKikimr::NNodeBroker::TEvNodeBroker::TEvSubscribeNodesRequest { CachedVersion: 7 SeqNo: 4 } 2025-09-25T16:18:31.991718Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:257: StateWork, processing event TEvNodeBroker::TEvSubscribeNodesRequest 2025-09-25T16:18:31.991722Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:747: New subscriber [1:631:2218], seqNo: 4, version: 7, server pipe id: [1:800:2332] 2025-09-25T16:18:31.991727Z node 1 :NODE_BROKER TRACE: node_broker.cpp:730: Send TEvUpdateNodes v7 -> v9 to [1:631:2218] 2025-09-25T16:18:31.991773Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:245: StateWork, received event# 269877764, Sender [1:800:2332], Recipient [1:719:2267]: NKikimr::TEvTabletPipe::TEvServerDisconnected 2025-09-25T16:18:31.991778Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:768: Unsubscribed [1:631:2218], seqNo: 4, server pipe id: [1:800:2332] 2025-09-25T16:18:31.991804Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:245: StateWork, received event# 269877761, Sender [1:802:2334], Recipient [1:719:2267]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-09-25T16:18:31.991819Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:245: StateWork, received event# 272039950, Sender [1:631:2218], Recipient [1:719:2267]: NKikimr::NNodeBroker::TEvNodeBroker::TEvSubscribeNodesRequest { CachedVersion: 6 SeqNo: 5 } 2025-09-25T16:18:31.991823Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:257: StateWork, processing event TEvNodeBroker::TEvSubscribeNodesRequest 2025-09-25T16:18:31.991827Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:747: New subscriber [1:631:2218], seqNo: 5, version: 6, server pipe id: [1:802:2334] 2025-09-25T16:18:31.991831Z node 1 :NODE_BROKER TRACE: node_broker.cpp:730: Send TEvUpdateNodes v6 -> v9 to [1:631:2218] 2025-09-25T16:18:31.991877Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:245: StateWork, received event# 269877764, Sender [1:802:2334], Recipient [1:719:2267]: NKikimr::TEvTabletPipe::TEvServerDisconnected 2025-09-25T16:18:31.991881Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:768: Unsubscribed [1:631:2218], seqNo: 5, server pipe id: [1:802:2334] 2025-09-25T16:18:31.991907Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:245: StateWork, received event# 269877761, Sender [1:804:2336], Recipient [1:719:2267]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-09-25T16:18:31.991920Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:245: StateWork, received event# 272039950, Sender [1:631:2218], Recipient [1:719:2267]: NKikimr::NNodeBroker::TEvNodeBroker::TEvSubscribeNodesRequest { CachedVersion: 5 SeqNo: 6 } 2025-09-25T16:18:31.991924Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:257: StateWork, processing event TEvNodeBroker::TEvSubscribeNodesRequest 2025-09-25T16:18:31.991929Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:747: New subscriber [1:631:2218], seqNo: 6, version: 5, server pipe id: [1:804:2336] 2025-09-25T16:18:31.991934Z node 1 :NODE_BROKER TRACE: node_broker.cpp:730: Send TEvUpdateNodes v5 -> v9 to [1:631:2218] 2025-09-25T16:18:31.991992Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:245: StateWork, received event# 269877764, Sender [1:804:2336], Recipient [1:719:2267]: NKikimr::TEvTabletPipe::TEvServerDisconnected 2025-09-25T16:18:31.991997Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:768: Unsubscribed [1:631:2218], seqNo: 6, server pipe id: [1:804:2336] 2025-09-25T16:18:31.992025Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:245: StateWork, received event# 269877761, Sender [1:806:2338], Recipient [1:719:2267]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-09-25T16:18:31.992046Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:245: StateWork, received event# 272039937, Sender [1:631:2218], Recipient [1:719:2267]: NKikimr::NNodeBroker::TEvNodeBroker::TEvResolveNode { NodeId: 1024 } 2025-09-25T16:18:31.992052Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:250: StateWork, processing event TEvNodeBroker::TEvResolveNode 2025-09-25T16:18:31.992098Z node 1 :NODE_BROKER TRACE: node_broker.cpp:1485: Send TEvResolvedNode: NKikimr::NNodeBroker::TEvNodeBroker::TEvResolvedNode { Status { Code: OK } Node { NodeId: 1024 Host: "host1" Port: 1001 ResolveHost: "host1.yandex.net" Address: "1.2.3.4" Location { DataCenter: "1" Module: "2" Rack: "3" Unit: "4" } Expire: 10800026000 Name: "slot-0" } } 2025-09-25T16:18:31.992177Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:245: StateWork, received event# 269877761, Sender [1:808:2340], Recipient [1:719:2267]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-09-25T16:18:31.992196Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:245: StateWork, received event# 272039937, Sender [1:631:2218], Recipient [1:719:2267]: NKikimr::NNodeBroker::TEvNodeBroker::TEvResolveNode { NodeId: 1025 } 2025-09-25T16:18:31.992202Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:250: StateWork, processing event TEvNodeBroker::TEvResolveNode 2025-09-25T16:18:31.992219Z node 1 :NODE_BROKER TRACE: node_broker.cpp:1485: Send TEvResolvedNode: NKikimr::NNodeBroker::TEvNodeBroker::TEvResolvedNode { Status { Code: OK } Node { NodeId: 1025 Host: "host2" Port: 1001 ResolveHost: "host2.yandex.net" Address: "1.2.3.4" Location { DataCenter: "1" Module: "2" Rack: "3" Unit: "4" } Expire: 10800026000 Name: "slot-1" } } 2025-09-25T16:18:31.992275Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:245: StateWork, received event# 269877761, Sender [1:810:2342], Recipient [1:719:2267]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-09-25T16:18:31.992295Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:245: StateWork, received event# 272039937, Sender [1:631:2218], Recipient [1:719:2267]: NKikimr::NNodeBroker::TEvNodeBroker::TEvResolveNode { NodeId: 1026 } 2025-09-25T16:18:31.992300Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:250: StateWork, processing event TEvNodeBroker::TEvResolveNode 2025-09-25T16:18:31.992308Z node 1 :NODE_BROKER TRACE: node_broker.cpp:1485: Send TEvResolvedNode: NKikimr::NNodeBroker::TEvNodeBroker::TEvResolvedNode { Status { Code: WRONG_REQUEST Reason: "Unknown node" } } ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/mind/ut/unittest >> TDynamicNameserverTest::CacheMissDifferentDeadlineInverseOrder-EnableNodeBrokerDeltaProtocol-true [GOOD] Test command err: 2025-09-25T16:18:32.254107Z node 1 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:636: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 1 Deadline: 18446744073709.551615s } 2025-09-25T16:18:32.260728Z node 1 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:607: Handle NActors::TEvInterconnect::TEvListNodes 2025-09-25T16:18:32.262511Z node 1 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:607: Handle NActors::TEvInterconnect::TEvListNodes 2025-09-25T16:18:32.262705Z node 1 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:607: Handle NActors::TEvInterconnect::TEvListNodes 2025-09-25T16:18:32.284522Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7729: Cannot subscribe to console configs 2025-09-25T16:18:32.284545Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded ... waiting for nameservers are connected 2025-09-25T16:18:32.292053Z node 1 :NODE_BROKER DEBUG: node_broker_impl.h:239: StateInit event type: 10060000 event: NKikimr::TEvTablet::TEvBoot 2025-09-25T16:18:32.292545Z node 1 :NODE_BROKER DEBUG: node_broker_impl.h:239: StateInit event type: 10060001 event: NKikimr::TEvTablet::TEvRestored 2025-09-25T16:18:32.292627Z node 1 :NODE_BROKER DEBUG: node_broker__init_scheme.cpp:20: TTxInitScheme Execute 2025-09-25T16:18:32.292866Z node 1 :NODE_BROKER DEBUG: node_broker_impl.h:239: StateInit event type: 1006000c event: NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-09-25T16:18:32.293493Z node 1 :NODE_BROKER DEBUG: node_broker__init_scheme.cpp:29: TTxInitScheme Complete 2025-09-25T16:18:32.293523Z node 1 :NODE_BROKER DEBUG: node_broker__load_state.cpp:19: TTxLoadState Execute 2025-09-25T16:18:32.293612Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:968: [DB] Using default config. 2025-09-25T16:18:32.293631Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:1005: [DB] Starting the first epoch: #1.1 1970-01-01T00:00:00.023000Z - 1970-01-01T01:00:00.023000Z - 1970-01-01T02:00:00.023000Z 2025-09-25T16:18:32.293637Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:1031: [DB] Loaded the first approximate epoch start: #1.1 2025-09-25T16:18:32.293657Z node 1 :NODE_BROKER DEBUG: node_broker__load_state.cpp:27: TTxLoadState Complete 2025-09-25T16:18:32.293674Z node 1 :NODE_BROKER DEBUG: node_broker__migrate_state.cpp:84: TTxMigrateState Execute 2025-09-25T16:18:32.293680Z node 1 :NODE_BROKER DEBUG: node_broker__migrate_state.cpp:52: TTxMigrateState ProcessMigrationBatch UpdateNodes left 0, NewVersionUpdateNodes left 0 2025-09-25T16:18:32.293685Z node 1 :NODE_BROKER DEBUG: node_broker__migrate_state.cpp:21: TTxMigrateState FinalizeMigration 2025-09-25T16:18:32.293690Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:1318: [DB] Update epoch in database: #1.1 1970-01-01T00:00:00.023000Z - 1970-01-01T01:00:00.023000Z - 1970-01-01T02:00:00.023000Z 2025-09-25T16:18:32.293712Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:1337: [DB] Update approx epoch start in database: #1.1 2025-09-25T16:18:32.293719Z node 1 :NODE_BROKER NOTICE: node_broker.cpp:1350: [DB] Update main nodes table to: Nodes 2025-09-25T16:18:32.335920Z node 1 :NODE_BROKER DEBUG: node_broker__migrate_state.cpp:95: TTxMigrateState Complete 2025-09-25T16:18:32.335962Z node 1 :NODE_BROKER TRACE: node_broker.cpp:456: Scheduled epoch update at 1970-01-01T01:00:00.023000Z 2025-09-25T16:18:32.335973Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:562: Preparing nodes list cache for epoch #1.1 1970-01-01T00:00:00.023000Z - 1970-01-01T01:00:00.023000Z - 1970-01-01T02:00:00.023000Z, approximate epoch start #1.1 nodes=0 expired=0 2025-09-25T16:18:32.335984Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:603: Preparing update nodes log for epoch ##1.1 1970-01-01T00:00:00.023000Z - 1970-01-01T01:00:00.023000Z - 1970-01-01T02:00:00.023000Z nodes=0 expired=0 removed=0 2025-09-25T16:18:32.336076Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:245: StateWork, received event# 269877761, Sender [1:195:2195], Recipient [1:172:2179]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-09-25T16:18:32.336103Z node 1 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:717: Handle NKikimr::TEvTabletPipe::TEvClientConnected { TabletId: 72057594037936129 Status: OK ServerId: [1:195:2195] Leader: 1 Dead: 0 Generation: 2 VersionInfo:  } ... waiting for nameservers are connected (done) 2025-09-25T16:18:32.336755Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:245: StateWork, received event# 272039936, Sender [1:18:2065], Recipient [1:172:2179]: NKikimr::NNodeBroker::TEvNodeBroker::TEvListNodes { MinEpoch: 1 } 2025-09-25T16:18:32.336780Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:249: StateWork, processing event TEvNodeBroker::TEvListNodes 2025-09-25T16:18:32.336793Z node 1 :NODE_BROKER TRACE: node_broker.cpp:423: Send TEvNodesInfo for epoch #1.1 1970-01-01T00:00:00.023000Z - 1970-01-01T01:00:00.023000Z - 1970-01-01T02:00:00.023000Z 2025-09-25T16:18:32.336883Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:245: StateWork, received event# 269877761, Sender [1:204:2202], Recipient [1:172:2179]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-09-25T16:18:32.336914Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:245: StateWork, received event# 272039938, Sender [1:202:2200], Recipient [1:172:2179]: NKikimr::NNodeBroker::TEvNodeBroker::TEvRegistrationRequest { Host: "host1" Port: 1001 ResolveHost: "host1.host1.host1" Address: "1.2.3.4" Location { DataCenter: "1" Module: "2" Rack: "3" Unit: "4" } FixedNodeId: false Path: "dc-1" } 2025-09-25T16:18:32.336920Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:251: StateWork, processing event TEvNodeBroker::TEvRegistrationRequest 2025-09-25T16:18:32.336932Z node 1 :NODE_BROKER TRACE: node_broker.cpp:1494: Handle TEvNodeBroker::TEvRegistrationRequest: request# Host: "host1" Port: 1001 ResolveHost: "host1.host1.host1" Address: "1.2.3.4" Location { DataCenter: "1" Module: "2" Rack: "3" Unit: "4" } FixedNodeId: false Path: "dc-1" 2025-09-25T16:18:32.336984Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2806: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [1:16:2063], request# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1 TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: false ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-09-25T16:18:32.377403Z node 1 :TX_PROXY_SCHEME_CACHE TRACE: cache.cpp:2380: Create subscriber: self# [1:16:2063], path# /dc-1, domainOwnerId# 72057594046678944 2025-09-25T16:18:32.385766Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2647: HandleNotify: self# [1:16:2063], notify# NKikimr::TSchemeBoardEvents::TEvNotifyUpdate { Path: /dc-1 PathId: [OwnerId: 72057594046678944, LocalPathId: 1] DescribeSchemeResult: Status: StatusSuccess Path: "/dc-1" PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 0 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { Audience: "/dc-1" } SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 1 PathOwnerId: 72057594046678944 } 2025-09-25T16:18:32.385896Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2522: ResolveCacheItem: self# [1:16:2063], notify# NKikimr::TSchemeBoardEvents::TEvNotifyUpdate { Path: /dc-1 PathId: [OwnerId: 72057594046678944, LocalPathId: 1] DescribeSchemeResult: Status: StatusSuccess Path: "/dc-1" PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 0 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { Audience: "/dc-1" } SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 1 PathOwnerId: 72057594046678944 }, by path# { Subscriber: { Subscriber: [1:206:2203] DomainOwnerId: 72057594046678944 Type: 2 SyncCookie: 0 } Filled: 0 Status: undefined Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, by pathId# nullptr 2025-09-25T16:18:32.385960Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:1888: FillEntry for TNavigate: self# [1:16:2063], cacheItem# { Subscriber: { Subscriber: [1:206:2203] DomainOwnerId: 72057594046678944 Type: 2 SyncCookie: 0 } Filled: 1 Status: StatusSuccess Kind: 2 TableKind: 0 Created: 1 CreateStep: 0 PathId: [OwnerId: 72057594046678944, LocalPathId: 1] DomainId: [OwnerId: 72057594046678944, LocalPathId: 1] IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1 TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: false ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2025-09-25T16:18:32.386051Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:267: Send result: self# [1:213:2204], recipient# [1:205:2179], result# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1 TableId: [72057594046678944:1:0] RequestType: ByPath Operation: OpPath RedirectRequired: false ShowPrivatePath: false SyncVersion: false Status: Ok Kind: KindPath DomainInfo { DomainKey: [OwnerId: 72057594046678944, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046678944, LocalPathId: 1] Params { Version: 0 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } }] } 2025-09-25T16:18:32.386066 ... Cache::TEvNavigateKeySetResult: response# { Path: dc-1 TableId: [72057594046678944:1:0] RequestType: ByPath Operation: OpPath RedirectRequired: false ShowPrivatePath: false SyncVersion: false Status: Ok Kind: KindPath DomainInfo { DomainKey: [OwnerId: 72057594046678944, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046678944, LocalPathId: 1] Params { Version: 0 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } } 2025-09-25T16:18:32.778370Z node 2 :NODE_BROKER TRACE: node_broker.cpp:1587: Finished resolving tenant: request# Host: "host1" Port: 1001 ResolveHost: "host1.host1.host1" Address: "1.2.3.4" Location { DataCenter: "1" Module: "2" Rack: "3" Unit: "4" } FixedNodeId: false Path: "dc-1": scope id# <72057594046678944:1>: serviced subdomain# 72057594046678944:1 2025-09-25T16:18:32.778388Z node 2 :NODE_BROKER TRACE: node_broker_impl.h:245: StateWork, received event# 2146435073, Sender [2:205:2179], Recipient [2:172:2179]: NKikimr::NNodeBroker::TNodeBroker::TEvPrivate::TEvResolvedRegistrationRequest 2025-09-25T16:18:32.778394Z node 2 :NODE_BROKER TRACE: node_broker_impl.h:260: StateWork, processing event TEvPrivate::TEvResolvedRegistrationRequest 2025-09-25T16:18:32.778409Z node 2 :NODE_BROKER DEBUG: node_broker__register_node.cpp:78: TTxRegisterNode Execute 2025-09-25T16:18:32.778414Z node 2 :NODE_BROKER DEBUG: node_broker__register_node.cpp:82: Registration request from host1:1001 (not fixed) tenant: dc-1 2025-09-25T16:18:32.778441Z node 2 :NODE_BROKER DEBUG: node_broker.cpp:863: [DB] Adding node #1024.v2 host1:1001 to database state=Active resolvehost=host1.host1.host1 address=1.2.3.4 dc=1 location=DC=1/M=2/R=3/U=4/ lease=1 expire=Thu, 01 Jan 1970 02:00:00 UTC servicedsubdomain=72057594046678944:1 slotindex=0 authorizedbycertificate=false 2025-09-25T16:18:32.778491Z node 2 :NODE_BROKER DEBUG: node_broker.cpp:264: [Dirty] Register new active node #1024.v2 host1:1001 2025-09-25T16:18:32.778499Z node 2 :NODE_BROKER DEBUG: node_broker.cpp:552: [Dirty] Update current epoch version from 1 to 2 2025-09-25T16:18:32.778504Z node 2 :NODE_BROKER DEBUG: node_broker.cpp:1363: [DB] Update epoch version in database version=2 2025-09-25T16:18:32.789368Z node 2 :NODE_BROKER DEBUG: node_broker__register_node.cpp:197: TTxRegisterNode Complete 2025-09-25T16:18:32.789388Z node 2 :NODE_BROKER DEBUG: node_broker.cpp:264: [Committed] Register new active node #1024.v2 host1:1001 2025-09-25T16:18:32.789397Z node 2 :NODE_BROKER DEBUG: node_broker.cpp:552: [Committed] Update current epoch version from 1 to 2 2025-09-25T16:18:32.789403Z node 2 :NODE_BROKER DEBUG: node_broker.cpp:630: Add node #1024.v2 host1:1001 to epoch cache 2025-09-25T16:18:32.789421Z node 2 :NODE_BROKER DEBUG: node_broker.cpp:659: Add node #1024.v2 to update nodes log 2025-09-25T16:18:32.789449Z node 2 :NODE_BROKER TRACE: node_broker__register_node.cpp:59: TTxRegisterNode reply with: Status { Code: OK } Node { NodeId: 1024 Host: "host1" Port: 1001 ResolveHost: "host1.host1.host1" Address: "1.2.3.4" Location { DataCenter: "1" Module: "2" Rack: "3" Unit: "4" } Expire: 7200023000 Name: "slot-0" } 2025-09-25T16:18:32.789551Z node 2 :NODE_BROKER TRACE: node_broker_impl.h:245: StateWork, received event# 269877761, Sender [2:217:2208], Recipient [2:172:2179]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-09-25T16:18:32.789573Z node 2 :NODE_BROKER TRACE: node_broker_impl.h:245: StateWork, received event# 272039938, Sender [2:202:2200], Recipient [2:172:2179]: NKikimr::NNodeBroker::TEvNodeBroker::TEvRegistrationRequest { Host: "host2" Port: 1001 ResolveHost: "host2.host2.host2" Address: "1.2.3.5" Location { DataCenter: "1" Module: "2" Rack: "3" Unit: "5" } FixedNodeId: false Path: "dc-1" } 2025-09-25T16:18:32.789579Z node 2 :NODE_BROKER TRACE: node_broker_impl.h:251: StateWork, processing event TEvNodeBroker::TEvRegistrationRequest 2025-09-25T16:18:32.789585Z node 2 :NODE_BROKER TRACE: node_broker.cpp:1494: Handle TEvNodeBroker::TEvRegistrationRequest: request# Host: "host2" Port: 1001 ResolveHost: "host2.host2.host2" Address: "1.2.3.5" Location { DataCenter: "1" Module: "2" Rack: "3" Unit: "5" } FixedNodeId: false Path: "dc-1" 2025-09-25T16:18:32.789625Z node 2 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2806: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [2:16:2063], request# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1 TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: false ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-09-25T16:18:32.789653Z node 2 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:1888: FillEntry for TNavigate: self# [2:16:2063], cacheItem# { Subscriber: { Subscriber: [2:206:2203] DomainOwnerId: 72057594046678944 Type: 2 SyncCookie: 0 } Filled: 1 Status: StatusSuccess Kind: 2 TableKind: 0 Created: 1 CreateStep: 0 PathId: [OwnerId: 72057594046678944, LocalPathId: 1] DomainId: [OwnerId: 72057594046678944, LocalPathId: 1] IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1 TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: false ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2025-09-25T16:18:32.789694Z node 2 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:267: Send result: self# [2:219:2209], recipient# [2:218:2179], result# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1 TableId: [72057594046678944:1:0] RequestType: ByPath Operation: OpPath RedirectRequired: false ShowPrivatePath: false SyncVersion: false Status: Ok Kind: KindPath DomainInfo { DomainKey: [OwnerId: 72057594046678944, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046678944, LocalPathId: 1] Params { Version: 0 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } }] } 2025-09-25T16:18:32.789705Z node 2 :NODE_BROKER TRACE: node_broker.cpp:1561: Handle TEvTxProxySchemeCache::TEvNavigateKeySetResult: response# { Path: dc-1 TableId: [72057594046678944:1:0] RequestType: ByPath Operation: OpPath RedirectRequired: false ShowPrivatePath: false SyncVersion: false Status: Ok Kind: KindPath DomainInfo { DomainKey: [OwnerId: 72057594046678944, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046678944, LocalPathId: 1] Params { Version: 0 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } } 2025-09-25T16:18:32.789716Z node 2 :NODE_BROKER TRACE: node_broker.cpp:1587: Finished resolving tenant: request# Host: "host2" Port: 1001 ResolveHost: "host2.host2.host2" Address: "1.2.3.5" Location { DataCenter: "1" Module: "2" Rack: "3" Unit: "5" } FixedNodeId: false Path: "dc-1": scope id# <72057594046678944:1>: serviced subdomain# 72057594046678944:1 2025-09-25T16:18:32.789727Z node 2 :NODE_BROKER TRACE: node_broker_impl.h:245: StateWork, received event# 2146435073, Sender [2:218:2179], Recipient [2:172:2179]: NKikimr::NNodeBroker::TNodeBroker::TEvPrivate::TEvResolvedRegistrationRequest 2025-09-25T16:18:32.789730Z node 2 :NODE_BROKER TRACE: node_broker_impl.h:260: StateWork, processing event TEvPrivate::TEvResolvedRegistrationRequest 2025-09-25T16:18:32.789740Z node 2 :NODE_BROKER DEBUG: node_broker__register_node.cpp:78: TTxRegisterNode Execute 2025-09-25T16:18:32.789743Z node 2 :NODE_BROKER DEBUG: node_broker__register_node.cpp:82: Registration request from host2:1001 (not fixed) tenant: dc-1 2025-09-25T16:18:32.789764Z node 2 :NODE_BROKER DEBUG: node_broker.cpp:863: [DB] Adding node #1025.v3 host2:1001 to database state=Active resolvehost=host2.host2.host2 address=1.2.3.5 dc=1 location=DC=1/M=2/R=3/U=5/ lease=1 expire=Thu, 01 Jan 1970 02:00:00 UTC servicedsubdomain=72057594046678944:1 slotindex=1 authorizedbycertificate=false 2025-09-25T16:18:32.789809Z node 2 :NODE_BROKER DEBUG: node_broker.cpp:264: [Dirty] Register new active node #1025.v3 host2:1001 2025-09-25T16:18:32.789816Z node 2 :NODE_BROKER DEBUG: node_broker.cpp:552: [Dirty] Update current epoch version from 2 to 3 2025-09-25T16:18:32.789820Z node 2 :NODE_BROKER DEBUG: node_broker.cpp:1363: [DB] Update epoch version in database version=3 2025-09-25T16:18:32.800810Z node 2 :NODE_BROKER DEBUG: node_broker__register_node.cpp:197: TTxRegisterNode Complete 2025-09-25T16:18:32.800858Z node 2 :NODE_BROKER DEBUG: node_broker.cpp:264: [Committed] Register new active node #1025.v3 host2:1001 2025-09-25T16:18:32.800872Z node 2 :NODE_BROKER DEBUG: node_broker.cpp:552: [Committed] Update current epoch version from 2 to 3 2025-09-25T16:18:32.800878Z node 2 :NODE_BROKER DEBUG: node_broker.cpp:630: Add node #1025.v3 host2:1001 to epoch cache 2025-09-25T16:18:32.800909Z node 2 :NODE_BROKER DEBUG: node_broker.cpp:659: Add node #1025.v3 to update nodes log 2025-09-25T16:18:32.800976Z node 2 :NODE_BROKER TRACE: node_broker__register_node.cpp:59: TTxRegisterNode reply with: Status { Code: OK } Node { NodeId: 1025 Host: "host2" Port: 1001 ResolveHost: "host2.host2.host2" Address: "1.2.3.5" Location { DataCenter: "1" Module: "2" Rack: "3" Unit: "5" } Expire: 7200023000 Name: "slot-1" } ... waiting for cache miss 2025-09-25T16:18:32.801081Z node 2 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:585: Handle NActors::TEvInterconnect::TEvResolveNode { NodeId: 1024 Deadline: 2.029024s } 2025-09-25T16:18:32.801097Z node 2 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:686: New cache miss: nodeId# 1024, deadline# 2.029024s 2025-09-25T16:18:32.801101Z node 2 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:690: Schedule wakeup for new earliest deadline 2.029024s 2025-09-25T16:18:32.801112Z node 2 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:585: Handle NActors::TEvInterconnect::TEvResolveNode { NodeId: 1025 Deadline: 1.029024s } 2025-09-25T16:18:32.801116Z node 2 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:686: New cache miss: nodeId# 1025, deadline# 1.029024s 2025-09-25T16:18:32.801120Z node 2 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:690: Schedule wakeup for new earliest deadline 1.029024s ... blocking NKikimr::NNodeBroker::TEvNodeBroker::TEvSyncNodesRequest from NAMESERVICE to NODE_BROKER_ACTOR cookie 1 ... waiting for cache miss (done) 2025-09-25T16:18:32.811319Z node 2 :NODE_BROKER TRACE: node_broker_impl.h:245: StateWork, received event# 2146435074, Sender [0:0:0], Recipient [2:172:2179]: NKikimr::NNodeBroker::TNodeBroker::TEvPrivate::TEvProcessSubscribersQueue 2025-09-25T16:18:32.811362Z node 2 :NODE_BROKER TRACE: node_broker_impl.h:261: StateWork, processing event TEvPrivate::TEvProcessSubscribersQueue 2025-09-25T16:18:32.811378Z node 2 :NODE_BROKER TRACE: node_broker.cpp:730: Send TEvUpdateNodes v1 -> v3 to [2:18:2065] ... blocking NKikimr::NNodeBroker::TEvNodeBroker::TEvUpdateNodes from NODE_BROKER_ACTOR to NAMESERVICE cookie 0 2025-09-25T16:18:32.883183Z node 2 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:935: HandleWakeup at 1.030024s 2025-09-25T16:18:32.883222Z node 2 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:137: Cache miss failed: nodeId=1025, error=Deadline exceeded 2025-09-25T16:18:32.893428Z node 2 :NODE_BROKER TRACE: node_broker_impl.h:245: StateWork, received event# 2146435074, Sender [0:0:0], Recipient [2:172:2179]: NKikimr::NNodeBroker::TNodeBroker::TEvPrivate::TEvProcessSubscribersQueue 2025-09-25T16:18:32.893455Z node 2 :NODE_BROKER TRACE: node_broker_impl.h:261: StateWork, processing event TEvPrivate::TEvProcessSubscribersQueue 2025-09-25T16:18:32.934363Z node 2 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:935: HandleWakeup at 2.030024s 2025-09-25T16:18:32.934404Z node 2 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:137: Cache miss failed: nodeId=1024, error=Deadline exceeded ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/mind/ut/unittest >> TLocalTests::TestAddTenant [GOOD] Test command err: 2025-09-25T16:18:32.554586Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:433: actor# [1:121:2155] Bootstrap 2025-09-25T16:18:32.554832Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:452: actor# [1:121:2155] Become StateWork (SchemeCache [1:128:2161]) 2025-09-25T16:18:32.561318Z node 1 :BS_CONTROLLER DEBUG: {BSC05@impl.h:2083} StateInit event Type# 268828672 Event# NKikimr::TEvTablet::TEvBoot 2025-09-25T16:18:32.564999Z node 1 :BS_CONTROLLER DEBUG: {BSC05@impl.h:2083} StateInit event Type# 268828673 Event# NKikimr::TEvTablet::TEvRestored 2025-09-25T16:18:32.565075Z node 1 :BS_CONTROLLER DEBUG: {BSC22@console_interaction.cpp:14} Console interaction started 2025-09-25T16:18:32.565522Z node 1 :BS_CONTROLLER DEBUG: {BSC05@impl.h:2083} StateInit event Type# 268828684 Event# NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-09-25T16:18:32.565701Z node 1 :BS_CONTROLLER DEBUG: {BSC05@impl.h:2083} StateInit event Type# 268639244 Event# NKikimr::TEvNodeWardenStorageConfig 2025-09-25T16:18:32.565790Z node 1 :BS_CONTROLLER DEBUG: {BSC05@impl.h:2083} StateInit event Type# 131082 Event# NActors::TEvInterconnect::TEvNodesInfo 2025-09-25T16:18:32.565799Z node 1 :BS_CONTROLLER DEBUG: {BSC01@bsc.cpp:820} Handle TEvInterconnect::TEvNodesInfo 2025-09-25T16:18:32.565850Z node 1 :BS_CONTROLLER DEBUG: {BSCTXIS01@init_scheme.cpp:17} TTxInitScheme Execute 2025-09-25T16:18:32.571840Z node 1 :BS_CONTROLLER DEBUG: {BSCTXIS03@init_scheme.cpp:44} TTxInitScheme Complete 2025-09-25T16:18:32.571910Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM01@migrate.cpp:190} Execute tx 2025-09-25T16:18:32.571957Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM02@migrate.cpp:251} Complete tx IncompatibleData# false 2025-09-25T16:18:32.571978Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxTrimUnusedSlots 2025-09-25T16:18:32.571992Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxTrimUnusedSlots 2025-09-25T16:18:32.572139Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateSchemaVersion 2025-09-25T16:18:32.597973Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:212: actor# [1:121:2155] HANDLE TEvClientConnected success connect from tablet# 72057594046447617 2025-09-25T16:18:32.598981Z node 1 :TX_PROXY DEBUG: client.cpp:89: Handle TEvAllocateResult ACCEPTED RangeBegin# 281474976710656 RangeEnd# 281474976715656 txAllocator# 72057594046447617 2025-09-25T16:18:32.623616Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateSchemaVersion 2025-09-25T16:18:32.623673Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxGenerateInstanceId 2025-09-25T16:18:32.634833Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxGenerateInstanceId 2025-09-25T16:18:32.634894Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateStaticPDiskInfo 2025-09-25T16:18:32.634913Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateStaticPDiskInfo 2025-09-25T16:18:32.634928Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxFillInNonNullConfigForPDisk 2025-09-25T16:18:32.634960Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxFillInNonNullConfigForPDisk 2025-09-25T16:18:32.634972Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxDropDriveStatus 2025-09-25T16:18:32.634982Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxDropDriveStatus 2025-09-25T16:18:32.634990Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateCompatibilityInfo 2025-09-25T16:18:32.646097Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateCompatibilityInfo 2025-09-25T16:18:32.646154Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateEnableConfigV2 2025-09-25T16:18:32.657062Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateEnableConfigV2 2025-09-25T16:18:32.657131Z node 1 :BS_CONTROLLER DEBUG: {BSCTXLE01@load_everything.cpp:21} TTxLoadEverything Execute 2025-09-25T16:18:32.657363Z node 1 :BS_CONTROLLER DEBUG: {BSCTXLE03@load_everything.cpp:705} TTxLoadEverything Complete 2025-09-25T16:18:32.657373Z node 1 :BS_CONTROLLER DEBUG: {BSC09@impl.h:2214} LoadFinished 2025-09-25T16:18:32.659102Z node 1 :BS_CONTROLLER DEBUG: {BSC18@console_interaction.cpp:31} Console connection service started 2025-09-25T16:18:32.659118Z node 1 :BS_CONTROLLER DEBUG: {BSCTXLE04@load_everything.cpp:710} TTxLoadEverything InitQueue processed 2025-09-25T16:18:32.659398Z node 1 :BS_CONTROLLER DEBUG: {BSCTXRN01@register_node.cpp:216} Handle TEvControllerRegisterNode Request# {NodeID: 1 VDiskStatus { VDiskId { GroupID: 0 GroupGeneration: 1 Ring: 0 Domain: 0 VDisk: 0 } NodeId: 1 PDiskId: 1 VSlotId: 0 PDiskGuid: 123 Status: INIT_PENDING OnlyPhantomsRemain: false } DeclarativePDiskManagement: true } 2025-09-25T16:18:32.659415Z node 1 :BS_CONTROLLER DEBUG: {BSCBR00@bridge.cpp:242} ApplySyncerState NodeId# 1 Update# {} Comprehensive# true 2025-09-25T16:18:32.659610Z node 1 :BS_CONTROLLER DEBUG: {BSCTXCC01@config_cmd.cpp:410} Execute TEvControllerConfigRequest Request# {Command { DefineHostConfig { HostConfigId: 1 Drive { Path: "/home/runner/.ya/build/build_root/endf/00478b/r3tmp/tmpCKNXDy/pdisk_1.dat" } } } Command { DefineBox { BoxId: 1 Host { Key { Fqdn: "::1" IcPort: 12001 } HostConfigId: 1 } } } } 2025-09-25T16:18:32.659662Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 1:1 Path# /home/runner/.ya/build/build_root/endf/00478b/r3tmp/tmpCKNXDy/pdisk_1.dat 2025-09-25T16:18:32.659865Z node 1 :BS_CONTROLLER DEBUG: {BSCTXUDM01@disk_metrics.cpp:68} Updating disk status Record# {VDisksMetrics { VDiskId { GroupID: 0 GroupGeneration: 1 Ring: 0 Domain: 0 VDisk: 0 } VSlotId { NodeId: 1 PDiskId: 1 VSlotId: 0 } State: Initial Replicated: false DiskSpace: Green } } 2025-09-25T16:18:32.659884Z node 1 :BS_CONTROLLER DEBUG: {BSC10@scrub.cpp:187} Handle(TEvControllerScrubQueryStartQuantum) Msg# {VSlotId { NodeId: 1 PDiskId: 1 VSlotId: 0 } } 2025-09-25T16:18:32.659895Z node 1 :BS_CONTROLLER DEBUG: {BSC13@scrub.cpp:597} sending TEvControllerScrubStartQuantum Msg# NKikimrBlobStorage.TEvControllerScrubStartQuantum VSlotId { NodeId: 1 PDiskId: 1 VSlotId: 0 } 2025-09-25T16:18:32.659926Z node 1 :BS_CONTROLLER DEBUG: {BSCTXUDM01@disk_metrics.cpp:68} Updating disk status Record# {VDiskStatus { VDiskId { GroupID: 0 GroupGeneration: 1 Ring: 0 Domain: 0 VDisk: 0 } NodeId: 1 PDiskId: 1 VSlotId: 0 PDiskGuid: 123 Status: REPLICATING OnlyPhantomsRemain: false } } 2025-09-25T16:18:32.660020Z node 1 :BS_CONTROLLER DEBUG: {BSCTXUDM01@disk_metrics.cpp:68} Updating disk status Record# {VDiskStatus { VDiskId { GroupID: 0 GroupGeneration: 1 Ring: 0 Domain: 0 VDisk: 0 } NodeId: 1 PDiskId: 1 VSlotId: 0 PDiskGuid: 123 Status: READY OnlyPhantomsRemain: false } } 2025-09-25T16:18:32.660334Z node 1 :BS_CONTROLLER DEBUG: {BSC11@scrub.cpp:214} Handle(TEvControllerScrubQuantumFinished) Msg# {VSlotId { NodeId: 1 PDiskId: 1 VSlotId: 0 } Success: true } 2025-09-25T16:18:32.660367Z node 1 :BS_CONTROLLER DEBUG: {BSC10@scrub.cpp:187} Handle(TEvControllerScrubQueryStartQuantum) Msg# {VSlotId { NodeId: 1 PDiskId: 1 VSlotId: 0 } } 2025-09-25T16:18:32.671371Z node 1 :BS_CONTROLLER DEBUG: {BSCTXRN05@register_node.cpp:34} Add devicesData from NodeWarden NodeId# 1 Devices# [] 2025-09-25T16:18:32.671621Z node 1 :TENANT_POOL DEBUG: tenant_pool.cpp:826: TTenantPool::Bootstrap 2025-09-25T16:18:32.671709Z node 1 :LOCAL DEBUG: local.cpp:1540: TLocal::Bootstrap 2025-09-25T16:18:32.671722Z node 1 :TENANT_POOL DEBUG: tenant_pool.cpp:412: TDomainTenantPool(dc-1) Bootstrap 2025-09-25T16:18:32.673538Z node 1 :TENANT_POOL DEBUG: tenant_pool.cpp:286: TDomainTenantPool(dc-1) send request to add tenant /dc-1 with resources CPU: 1 Memory: 1 Network: 1 2025-09-25T16:18:32.673604Z node 1 :LOCAL DEBUG: local.cpp:1490: TDomainLocal(dc-1): Bootstrap 2025-09-25T16:18:32.673711Z node 1 :LOCAL DEBUG: local.cpp:1198: TDomainLocal(dc-1): Binding to hive 72057594046578946 at domain dc-1 (allocated resources: CPU: 1 Memory: 1 Network: 1) 2025-09-25T16:18:32.673725Z node 1 :LOCAL DEBUG: local.cpp:1005: TLocalNodeRegistrar::Bootstrap 2025-09-25T16:18:32.673730Z node 1 :LOCAL DEBUG: local.cpp:183: TLocalNodeRegistrar::TryToRegister 2025-09-25T16:18:32.673750Z node 1 :LOCAL DEBUG: local.cpp:216: TLocalNodeRegistrar::TryToRegister pipe to hive, pipe:[1:342:2312] 2025-09-25T16:18:32.674176Z node 1 :TENANT_POOL NOTICE: tenant_pool.cpp:526: TDomainTenantPool(dc-1) started tenant /dc-1 2025-09-25T16:18:32.674185Z node 1 :TENANT_POOL DEBUG: tenant_pool.cpp:274: TDomainTenantPool(dc-1) send status update to [1:336:2308] 2025-09-25T16:18:32.674321Z node 1 :LOCAL DEBUG: local.cpp:263: TEvTabletPipe::TEvClientConnected {TabletId=72057594046578946 Status=OK ClientId=[1:342:2312]} 2025-09-25T16:18:32.674330Z node 1 :LOCAL DEBUG: local.cpp:327: TLocalNodeRegistrar::Handle TEvLocal::TEvPing 2025-09-25T16:18:32.674339Z node 1 :LOCAL DEBUG: local.cpp:383: TLocalNodeRegistrar TEvPing - CONNECTED 2025-09-25T16:18:32.674343Z node 1 :LOCAL DEBUG: local.cpp:300: TLocalNodeRegistrar SendStatusOk 2025-09-25T16:18:32.688965Z node 1 :BS_CONTROLLER DEBUG: {BSC19@console_interaction.cpp:74} Console proposed config response Response# {Status: ReverseCommit ConsoleConfigVersion: 0 YAML: "" } 2025-09-25T16:18:32.700083Z node 1 :LOCAL DEBUG: local.cpp:1256: TDomainLocal(dc-1): TDomainLocal::TEvClientConnected for dc-1 shard 72057594046578944 2025-09-25T16:18:32.700101Z node 1 :LOCAL DEBUG: local.cpp:1115: TDomainLocal(dc-1): Send resolve request for /dc-1/users/tenant-1 to schemeshard 72057594046578944 2025-09-25T16:18:32.707355Z node 1 :LOCAL DEBUG: local.cpp:1283: TDomainLocal(dc-1): HandleResolve from schemeshard 72057594046578944: Status: StatusSuccess Path: "/dc-1/users/tenant-1" PathDescription { Self { Name: "/dc-1/users/tenant-1" PathId: 100 SchemeshardId: 72057594046578944 PathType: EPathTypeSubDomain } DomainDescription { SchemeShardId_Depricated: 72057594046578944 PathId_Depricated: 100 DomainKey { SchemeShard: 72057594046578944 PathId: 100 } } } 2025-09-25T16:18:32.707410Z node 1 :LOCAL DEBUG: local.cpp:1221: TDomainLocal(dc-1): Binding tenant /dc-1/users/tenant-1 to hive 72057594046578946 (allocated resources: CPU: 5 Memory: 1 Network: 1) 2025-09-25T16:18:32.707557Z node 1 :LOCAL DEBUG: local.cpp:1005: TLocalNodeRegistrar::Bootstrap 2025-09-25T16:18:32.707565Z node 1 :LOCAL DEBUG: local.cpp:183: TLocalNodeRegistrar::TryToRegister 2025-09-25T16:18:32.707582Z node 1 :LOCAL DEBUG: local.cpp:216: TLocalNodeRegistrar::TryToRegister pipe to hive, pipe:[1:399:2349] 2025-09-25T16:18:32.707963Z node 1 :LOCAL DEBUG: local.cpp:263: TEvTabletPipe::TEvClientConnected {TabletId=72057594046578946 Status=OK ClientId=[1:399:2349]} 2025-09-25T16:18:32.707993Z node 1 :LOCAL DEBUG: local.cpp:327: TLocalNodeRegistrar::Handle TEvLocal::TEvPing 2025-09-25T16:18:32.708001Z node 1 :LOCAL DEBUG: local.cpp:383: TLocalNodeRegistrar TEvPing - CONNECTED 2025-09-25T16:18:32.708005Z node 1 :LOCAL DEBUG: local.cpp:300: TLocalNodeRegistrar SendStatusOk 2025-09-25T16:18:32.710449Z node 1 :LOCAL DEBUG: local.cpp:1115: TDomainLocal(dc-1): Send resolve request for /dc-1/users/tenant-2 to schemeshard 72057594046578944 2025-09-25T16:18:32.710534Z node 1 :LOCAL DEBUG: local.cpp:1283: TDomainLocal(dc-1): HandleResolve from schemeshard 72057594046578944: Status: StatusSuccess Path: "/dc-1/users/tenant-2" PathDescription { Self { Name: "/dc-1/users/tenant-2" PathId: 101 SchemeshardId: 72057594046578944 PathType: EPathTypeSubDomain } DomainDescription { SchemeShardId_Depricated: 72057594046578944 PathId_Depricated: 101 DomainKey { SchemeShard: 72057594046578944 PathId: 101 } } } 2025-09-25T16:18:32.710555Z node 1 :LOCAL DEBUG: local.cpp:1221: TDomainLocal(dc-1): Binding tenant /dc-1/users/tenant-2 to hive 72057594046578946 (allocated resources: CPU: 1 Memory: 1 Network: 1) 2025-09-25T16:18:32.710634Z node 1 :LOCAL DEBUG: local.cpp:1005: TLocalNodeRegistrar::Bootstrap 2025-09-25T16:18:32.710640Z node 1 :LOCAL DEBUG: local.cpp:183: TLocalNodeRegistrar::TryToRegister 2025-09-25T16:18:32.710653Z node 1 :LOCAL DEBUG: local.cpp:216: TLocalNodeRegistrar::TryToRegister pipe to hive, pipe:[1:432:2369] 2025-09-25T16:18:32.710863Z node 1 :LOCAL DEBUG: local.cpp:263: TEvTabletPipe::TEvClientConnected {TabletId=72057594046578946 Status=OK ClientId=[1:432:2369]} 2025-09-25T16:18:32.710872Z node 1 :LOCAL DEBUG: local.cpp:327: TLocalNodeRegistrar::Handle TEvLocal::TEvPing 2025-09-25T16:18:32.710880Z node 1 :LOCAL DEBUG: local.cpp:383: TLocalNodeRegistrar TEvPing - CONNECTED 2025-09-25T16:18:32.710884Z node 1 :LOCAL DEBUG: local.cpp:300: TLocalNodeRegistrar SendStatusOk 2025-09-25T16:18:32.710966Z node 1 :LOCAL DEBUG: local.cpp:1115: TDomainLocal(dc-1): Send resolve request for /dc-1/users/tenant-unknown to schemeshard 72057594046578944 2025-09-25T16:18:32.710986Z node 1 :LOCAL DEBUG: local.cpp:1283: TDomainLocal(dc-1): HandleResolve from schemeshard 72057594046578944: Status: StatusPathDoesNotExist Path: "/dc-1/users/tenant-unknown" 2025-09-25T16:18:32.710993Z node 1 :LOCAL ERROR: local.cpp:1299: TDomainLocal(dc-1): Receive TEvDescribeSchemeResult with bad status StatusPathDoesNotExist reason is <> while resolving subdomain dc-1 2025-09-25T16:18:32.711009Z node 1 :LOCAL ERROR: local.cpp:1549: Unknown domain dc-3 ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/mind/hive/ut/unittest >> THiveTest::TestTabletsStartingCounterExternalBoot [GOOD] Test command err: 2025-09-25T16:18:08.121481Z node 1 :BS_NODE DEBUG: {NW26@node_warden_impl.cpp:338} Bootstrap 2025-09-25T16:18:08.126616Z node 1 :BS_NODE DEBUG: {NW18@node_warden_resource.cpp:51} ApplyServiceSet IsStatic# true Comprehensive# false Origin# initial ServiceSet# {PDisks { NodeID: 1 PDiskID: 1 Path: "SectorMap:0:3200" PDiskGuid: 1 } PDisks { NodeID: 2 PDiskID: 1 Path: "SectorMap:1:3200" PDiskGuid: 2 } VDisks { VDiskID { GroupID: 0 GroupGeneration: 1 Ring: 0 Domain: 0 VDisk: 0 } VDiskLocation { NodeID: 1 PDiskID: 1 VDiskSlotID: 0 PDiskGuid: 1 } } Groups { GroupID: 0 GroupGeneration: 1 ErasureSpecies: 0 Rings { FailDomains { VDiskLocations { NodeID: 1 PDiskID: 1 VDiskSlotID: 0 PDiskGuid: 1 } } } } AvailabilityDomains: 0 } 2025-09-25T16:18:08.126738Z node 1 :BS_NODE DEBUG: {NW04@node_warden_pdisk.cpp:233} StartLocalPDisk NodeId# 1 PDiskId# 1 Path# "SectorMap:0:3200" PDiskCategory# {Type# DEVICE_TYPE_ROT Kind# 0} Temporary# false 2025-09-25T16:18:08.126988Z node 1 :BS_NODE WARN: {NW89@node_warden_pdisk.cpp:122} Can't write new MockDevicesConfig to file Path# /Berkanavt/kikimr/testing/mock_devices.txt 2025-09-25T16:18:08.127083Z node 1 :BS_NODE DEBUG: {NW23@node_warden_vdisk.cpp:70} StartLocalVDiskActor SlayInFlight# false VDiskId# [0:1:0:0:0] VSlotId# 1:1:0 PDiskGuid# 1 DonorMode# false PDiskRestartInFlight# false PDisksWaitingToStart# false 2025-09-25T16:18:08.127286Z node 1 :BS_NODE DEBUG: {NW24@node_warden_vdisk.cpp:276} StartLocalVDiskActor done VDiskId# [0:1:0:0:0] VSlotId# 1:1:0 PDiskGuid# 1 2025-09-25T16:18:08.127296Z node 1 :BS_NODE DEBUG: {NW12@node_warden_proxy.cpp:24} StartLocalProxy GroupId# 0 2025-09-25T16:18:08.127461Z node 1 :BS_NODE DEBUG: {NW21@node_warden_pipe.cpp:23} EstablishPipe AvailDomainId# 0 PipeClientId# [1:50:2076] ControllerId# 72057594037932033 2025-09-25T16:18:08.127466Z node 1 :BS_NODE DEBUG: {NW20@node_warden_pipe.cpp:73} SendRegisterNode 2025-09-25T16:18:08.127490Z node 1 :BS_NODE DEBUG: {NW11@node_warden_impl.cpp:313} StartInvalidGroupProxy GroupId# 4294967295 2025-09-25T16:18:08.127522Z node 1 :BS_NODE DEBUG: {NW62@node_warden_impl.cpp:325} StartRequestReportingThrottler 2025-09-25T16:18:08.131104Z node 1 :BS_PROXY INFO: dsproxy_state.cpp:159: Group# 0 TEvConfigureProxy received GroupGeneration# 1 IsLimitedKeyless# false Marker# DSP02 2025-09-25T16:18:08.131124Z node 1 :BS_PROXY NOTICE: dsproxy_state.cpp:319: EnsureMonitoring Group# 0 IsLimitedKeyless# 0 fullIfPossible# 0 Marker# DSP58 2025-09-25T16:18:08.131481Z node 1 :BS_PROXY DEBUG: group_sessions.cpp:83: Group# 0 Actor# [1:49:2075] Create Queue# [1:58:2081] targetNodeId# 1 Marker# DSP01 2025-09-25T16:18:08.131511Z node 1 :BS_PROXY DEBUG: group_sessions.cpp:83: Group# 0 Actor# [1:49:2075] Create Queue# [1:59:2082] targetNodeId# 1 Marker# DSP01 2025-09-25T16:18:08.131538Z node 1 :BS_PROXY DEBUG: group_sessions.cpp:83: Group# 0 Actor# [1:49:2075] Create Queue# [1:60:2083] targetNodeId# 1 Marker# DSP01 2025-09-25T16:18:08.131562Z node 1 :BS_PROXY DEBUG: group_sessions.cpp:83: Group# 0 Actor# [1:49:2075] Create Queue# [1:61:2084] targetNodeId# 1 Marker# DSP01 2025-09-25T16:18:08.131589Z node 1 :BS_PROXY DEBUG: group_sessions.cpp:83: Group# 0 Actor# [1:49:2075] Create Queue# [1:62:2085] targetNodeId# 1 Marker# DSP01 2025-09-25T16:18:08.131613Z node 1 :BS_PROXY DEBUG: group_sessions.cpp:83: Group# 0 Actor# [1:49:2075] Create Queue# [1:63:2086] targetNodeId# 1 Marker# DSP01 2025-09-25T16:18:08.131636Z node 1 :BS_PROXY DEBUG: group_sessions.cpp:83: Group# 0 Actor# [1:49:2075] Create Queue# [1:64:2087] targetNodeId# 1 Marker# DSP01 2025-09-25T16:18:08.131641Z node 1 :BS_PROXY INFO: dsproxy_state.cpp:31: Group# 0 SetStateEstablishingSessions Marker# DSP03 2025-09-25T16:18:08.131654Z node 1 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:49: TClient[72057594037932033] ::Bootstrap [1:50:2076] 2025-09-25T16:18:08.131660Z node 1 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:542: TClient[72057594037932033] lookup [1:50:2076] 2025-09-25T16:18:08.131668Z node 1 :BS_PROXY NOTICE: dsproxy_state.cpp:259: Group# 4294967295 HasInvalidGroupId# 1 Bootstrap -> StateEjected Marker# DSP42 2025-09-25T16:18:08.131678Z node 1 :BS_NODE DEBUG: {NWDC00@distconf.cpp:28} Bootstrap 2025-09-25T16:18:08.131870Z node 1 :BS_NODE DEBUG: {NWDC40@distconf_persistent_storage.cpp:25} TReaderActor bootstrap Paths# [] 2025-09-25T16:18:08.131887Z node 2 :BS_NODE DEBUG: {NW26@node_warden_impl.cpp:338} Bootstrap 2025-09-25T16:18:08.132916Z node 2 :BS_NODE DEBUG: {NW18@node_warden_resource.cpp:51} ApplyServiceSet IsStatic# true Comprehensive# false Origin# initial ServiceSet# {PDisks { NodeID: 1 PDiskID: 1 Path: "SectorMap:0:3200" PDiskGuid: 1 } PDisks { NodeID: 2 PDiskID: 1 Path: "SectorMap:1:3200" PDiskGuid: 2 } VDisks { VDiskID { GroupID: 0 GroupGeneration: 1 Ring: 0 Domain: 0 VDisk: 0 } VDiskLocation { NodeID: 1 PDiskID: 1 VDiskSlotID: 0 PDiskGuid: 1 } } Groups { GroupID: 0 GroupGeneration: 1 ErasureSpecies: 0 Rings { FailDomains { VDiskLocations { NodeID: 1 PDiskID: 1 VDiskSlotID: 0 PDiskGuid: 1 } } } } AvailabilityDomains: 0 } 2025-09-25T16:18:08.132966Z node 2 :BS_NODE DEBUG: {NW04@node_warden_pdisk.cpp:233} StartLocalPDisk NodeId# 2 PDiskId# 1 Path# "SectorMap:1:3200" PDiskCategory# {Type# DEVICE_TYPE_ROT Kind# 0} Temporary# false 2025-09-25T16:18:08.133088Z node 2 :BS_NODE WARN: {NW89@node_warden_pdisk.cpp:122} Can't write new MockDevicesConfig to file Path# /Berkanavt/kikimr/testing/mock_devices.txt 2025-09-25T16:18:08.133151Z node 2 :BS_NODE DEBUG: {NW12@node_warden_proxy.cpp:24} StartLocalProxy GroupId# 0 2025-09-25T16:18:08.133304Z node 2 :BS_NODE DEBUG: {NW21@node_warden_pipe.cpp:23} EstablishPipe AvailDomainId# 0 PipeClientId# [2:75:2076] ControllerId# 72057594037932033 2025-09-25T16:18:08.133310Z node 2 :BS_NODE DEBUG: {NW20@node_warden_pipe.cpp:73} SendRegisterNode 2025-09-25T16:18:08.133324Z node 2 :BS_NODE DEBUG: {NW11@node_warden_impl.cpp:313} StartInvalidGroupProxy GroupId# 4294967295 2025-09-25T16:18:08.133356Z node 2 :BS_NODE DEBUG: {NW62@node_warden_impl.cpp:325} StartRequestReportingThrottler 2025-09-25T16:18:08.134559Z node 2 :LOCAL DEBUG: local.cpp:1540: TLocal::Bootstrap 2025-09-25T16:18:08.135950Z node 2 :BS_PROXY INFO: dsproxy_state.cpp:159: Group# 0 TEvConfigureProxy received GroupGeneration# 1 IsLimitedKeyless# false Marker# DSP02 2025-09-25T16:18:08.135964Z node 2 :BS_PROXY NOTICE: dsproxy_state.cpp:319: EnsureMonitoring Group# 0 IsLimitedKeyless# 0 fullIfPossible# 0 Marker# DSP58 2025-09-25T16:18:08.136275Z node 2 :BS_PROXY DEBUG: group_sessions.cpp:83: Group# 0 Actor# [2:74:2075] Create Queue# [2:82:2080] targetNodeId# 1 Marker# DSP01 2025-09-25T16:18:08.136304Z node 2 :BS_PROXY DEBUG: group_sessions.cpp:83: Group# 0 Actor# [2:74:2075] Create Queue# [2:83:2081] targetNodeId# 1 Marker# DSP01 2025-09-25T16:18:08.136326Z node 2 :BS_PROXY DEBUG: group_sessions.cpp:83: Group# 0 Actor# [2:74:2075] Create Queue# [2:84:2082] targetNodeId# 1 Marker# DSP01 2025-09-25T16:18:08.136351Z node 2 :BS_PROXY DEBUG: group_sessions.cpp:83: Group# 0 Actor# [2:74:2075] Create Queue# [2:85:2083] targetNodeId# 1 Marker# DSP01 2025-09-25T16:18:08.136374Z node 2 :BS_PROXY DEBUG: group_sessions.cpp:83: Group# 0 Actor# [2:74:2075] Create Queue# [2:86:2084] targetNodeId# 1 Marker# DSP01 2025-09-25T16:18:08.136396Z node 2 :BS_PROXY DEBUG: group_sessions.cpp:83: Group# 0 Actor# [2:74:2075] Create Queue# [2:87:2085] targetNodeId# 1 Marker# DSP01 2025-09-25T16:18:08.136419Z node 2 :BS_PROXY DEBUG: group_sessions.cpp:83: Group# 0 Actor# [2:74:2075] Create Queue# [2:88:2086] targetNodeId# 1 Marker# DSP01 2025-09-25T16:18:08.136424Z node 2 :BS_PROXY INFO: dsproxy_state.cpp:31: Group# 0 SetStateEstablishingSessions Marker# DSP03 2025-09-25T16:18:08.136439Z node 2 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:49: TClient[72057594037932033] ::Bootstrap [2:75:2076] 2025-09-25T16:18:08.136444Z node 2 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:542: TClient[72057594037932033] lookup [2:75:2076] 2025-09-25T16:18:08.136452Z node 2 :BS_PROXY NOTICE: dsproxy_state.cpp:259: Group# 4294967295 HasInvalidGroupId# 1 Bootstrap -> StateEjected Marker# DSP42 2025-09-25T16:18:08.136461Z node 2 :BS_NODE DEBUG: {NWDC00@distconf.cpp:28} Bootstrap 2025-09-25T16:18:08.136532Z node 2 :BS_NODE DEBUG: {NWDC40@distconf_persistent_storage.cpp:25} TReaderActor bootstrap Paths# [] 2025-09-25T16:18:08.136618Z node 1 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:148: TClient[72057594037932033] queue send [1:50:2076] 2025-09-25T16:18:08.136631Z node 1 :BS_NODE DEBUG: {NWDC53@distconf.cpp:332} StateWaitForInit event Type# 131082 StorageConfigLoaded# false NodeListObtained# false PendingEvents.size# 0 2025-09-25T16:18:08.136664Z node 1 :LOCAL DEBUG: local.cpp:1540: TLocal::Bootstrap 2025-09-25T16:18:08.136685Z node 2 :TABLET_RESOLVER DEBUG: tablet_resolver.cpp:882: Handle TEvForward tabletId: 72057594037932033 entry.State: StResolve leader: [0:0:0] followers: 0 ev: {EvForward TabletID: 72057594037932033 Ev: nullptr Flags: 1:2:0} 2025-09-25T16:18:08.136806Z node 2 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:148: TClient[72057594037932033] queue send [2:75:2076] 2025-09-25T16:18:08.136813Z node 2 :BS_NODE DEBUG: {NWDC53@distconf.cpp:332} StateWaitForInit event Type# 131082 StorageConfigLoaded# false NodeListObtained# false PendingEvents.size# 0 2025-09-25T16:18:08.136817Z node 2 :LOCAL DEBUG: local.cpp:1490: TDomainLocal(dc-1): Bootstrap 2025-09-25T16:18:08.136856Z node 1 :TABLET_RESOLVER DEBUG: tablet_resolver.cpp:882: Handle TEvForward tabletId: 72057594037932033 entry.State: StResolve leader: [0:0:0] followers: 0 ev: {EvForward TabletID: 72057594037932033 Ev: nullptr Flags: 1:2:0} 2025-09-25T16:18:08.140119Z node 1 :BS_NODE DEBUG: {NWDC53@distconf.cpp:332} StateWaitForInit event Type# 2146435074 StorageConfigLoaded# false NodeListObtained# false PendingEvents.size# 0 2025-09-25T16:18:08.140136Z node 1 :BS_NODE DEBUG: {NWDC32@distconf_persistent_storage.cpp:221} TEvStorageConfigLoaded Cookie# 0 NumItemsRead# 0 2025-09-25T16:18:08.141134Z node 1 :BS_NODE DEBUG: {NWDC35@distconf_persistent_storage.cpp:184} PersistConfig Record# {} Drives# [] 2025-09-25T16:18:08.141199Z node 1 :BS_NODE DEBUG: {NWDC18@distconf_binding.cpp:462} UpdateBound RefererNodeId# 0 NodeId# :0/0 Meta# {Fingerprint: "\206\nD\014\\\363\333K3\275\271\004\016{\341F\344\223\331\221" } 2025-09-25T16:18:08.141582Z node 1 :BS_NODE DEBUG: {NWDC51@distconf_persistent_storage.cpp:103} TWriterActor bootstrap Drives# [] Record# {} 2025-09-25T16:18:08.141607Z node 1 :LOCAL DEBUG: local.cpp:1490: TDomainLocal(dc-1): Bootstrap 2025-09-25T16:18:08.141661Z node 2 :BS_NODE DEBUG: {NWDC53@distconf.cpp:332} StateWaitForInit event Type# 2146435074 StorageConfigLoaded# false NodeListObtained# false PendingEvents.size# 0 2025-09-25T16:18:08.141668Z node 2 :BS_NODE DEBUG: {NWDC32@distconf_persistent_storage.cpp:221} TEvStorageConfigLoaded Cookie# 0 NumItemsRead# 0 2025-09-25T16:18:08.141684Z node 2 :BS_NODE DEBUG: {NWDC35@distconf_persistent_storage.cpp:184} PersistConfig Record# {} Drives# [] 2025-09-25T16:18:08.141712Z node 2 :BS_NODE DEBUG: {NWDC18@distconf_binding.cpp:462} UpdateBound RefererNodeId# 0 NodeId# :0/0 Meta# {Fingerprint: "\206\nD\014\\\363\333K3\275\271\004\016{\341F\344\223\331\221" } 2025-09-25T16:18:08.142333Z node 2 :LOCAL DEBUG: local.cpp:1198: TDomainLocal(dc-1): Binding to hive 72057594037927937 at domain dc-1 (allocated resources: ) 2025-09-25T16:18:08.142355Z node 2 :BS_NODE DEBUG: {NWDC51@distconf_persistent_storage.cpp:103} TWriterActor bootstrap Drives# [] Record# {} 2025-09-25T16:18:08.142364Z node 2 :LOCAL DEBUG: local.cpp:1005: TLocalNodeRegistrar::Bootstrap 2025-09-25T16:18:08.142369Z node 2 :LOCAL DEBUG: local.cpp:183: TLocalNodeRegistrar::TryToRegister 2025-09-25T16:18:08.142400Z node 2 :LOCAL DEBUG: local.cpp:216: TLocalNodeRegistrar::TryToRegister pipe to hive, pipe:[2:100:2090] 2025-09-25T16:18:08.142472Z node 2 :STATESTORAGE DEBUG: statestorage_proxy.cpp:287: ProxyRequest::HandleInit ringGroup:0 ev: {EvLookup ... -25T16:18:31.234338Z node 24 :STATESTORAGE DEBUG: statestorage_replica.cpp:185: Replica::Handle ev: {EvReplicaLookup TabletID: 72075186224037888 Cookie: 2} 2025-09-25T16:18:31.234345Z node 24 :STATESTORAGE DEBUG: statestorage_proxy.cpp:399: ProxyRequest::HandleLookup ringGroup:0 ev: {EvReplicaInfo Status: 1 TabletID: 72075186224037888 ClusterStateGeneration: 0 ClusterStateGuid: 0} 2025-09-25T16:18:31.234354Z node 24 :STATESTORAGE DEBUG: statestorage_proxy.cpp:399: ProxyRequest::HandleLookup ringGroup:0 ev: {EvReplicaInfo Status: 1 TabletID: 72075186224037888 ClusterStateGeneration: 0 ClusterStateGuid: 0} 2025-09-25T16:18:31.234360Z node 24 :STATESTORAGE DEBUG: statestorage_proxy.cpp:399: ProxyRequest::HandleLookup ringGroup:0 ev: {EvReplicaInfo Status: 1 TabletID: 72075186224037888 ClusterStateGeneration: 0 ClusterStateGuid: 0} 2025-09-25T16:18:31.234368Z node 24 :TABLET_RESOLVER DEBUG: tablet_resolver.cpp:781: ApplyEntry tabletId: 72075186224037888 leader: [0:0:0] followers: 0 2025-09-25T16:18:31.234391Z node 24 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:170: TClient[72075186224037888] forward result error, check reconnect [24:326:2305] 2025-09-25T16:18:31.234396Z node 24 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:507: TClient[72075186224037888] connect failed [24:326:2305] 2025-09-25T16:18:31.234428Z node 24 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:49: TClient[72057594037927937] ::Bootstrap [24:329:2307] 2025-09-25T16:18:31.234432Z node 24 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:542: TClient[72057594037927937] lookup [24:329:2307] 2025-09-25T16:18:31.234438Z node 24 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:148: TClient[72057594037927937] queue send [24:329:2307] 2025-09-25T16:18:31.234445Z node 24 :TABLET_RESOLVER DEBUG: tablet_resolver.cpp:882: Handle TEvForward tabletId: 72057594037927937 entry.State: StNormal leader: [24:275:2266] followers: 0 ev: {EvForward TabletID: 72057594037927937 Ev: nullptr Flags: 1:2:0} 2025-09-25T16:18:31.234453Z node 24 :TABLET_RESOLVER DEBUG: tablet_resolver.cpp:667: SelectForward node 24 selfDC leaderDC 1:2:0 local 1 localDc 1 other 0 disallowed 0 tabletId: 72057594037927937 followers: 0 countLeader 1 allowFollowers 0 winner: [24:275:2266] 2025-09-25T16:18:31.234460Z node 24 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:420: TClient[72057594037927937] received pending shutdown [24:329:2307] 2025-09-25T16:18:31.234466Z node 24 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:188: TClient[72057594037927937] forward result local node, try to connect [24:329:2307] 2025-09-25T16:18:31.234473Z node 24 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:687: TClient[72057594037927937]::SendEvent [24:329:2307] 2025-09-25T16:18:31.234484Z node 24 :PIPE_SERVER DEBUG: tablet_pipe_server.cpp:291: [72057594037927937] Accept Connect Originator# [24:329:2307] 2025-09-25T16:18:31.234507Z node 24 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:308: TClient[72057594037927937] connected with status OK role: Leader [24:329:2307] 2025-09-25T16:18:31.234511Z node 24 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:323: TClient[72057594037927937] send queued [24:329:2307] 2025-09-25T16:18:31.234515Z node 24 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:645: TClient[72057594037927937] push event to server [24:329:2307] 2025-09-25T16:18:31.234519Z node 24 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:331: TClient[72057594037927937] shutdown pipe due to pending shutdown request [24:329:2307] 2025-09-25T16:18:31.234523Z node 24 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:514: TClient[72057594037927937] notify reset [24:329:2307] 2025-09-25T16:18:31.234536Z node 24 :HIVE TRACE: hive_impl.cpp:139: HIVE#72057594037927937 Handle TEvTabletPipe::TEvServerConnected([24:329:2307]) [24:330:2308] 2025-09-25T16:18:31.234546Z node 24 :PIPE_SERVER DEBUG: tablet_pipe_server.cpp:141: [72057594037927937] HandleSend Sender# [24:328:2306] EventType# 268697624 2025-09-25T16:18:31.234565Z node 24 :TABLET_EXECUTOR DEBUG: Leader{72057594037927937:2:5} Tx{6, NKikimr::NHive::TTxStartTablet} queued, type NKikimr::NHive::TTxStartTablet 2025-09-25T16:18:31.234572Z node 24 :TABLET_EXECUTOR DEBUG: Leader{72057594037927937:2:5} Tx{6, NKikimr::NHive::TTxStartTablet} took 4194304b of static mem, Memory{4194304 dyn 0} 2025-09-25T16:18:31.234580Z node 24 :HIVE DEBUG: tx__start_tablet.cpp:31: HIVE#72057594037927937 THive::TTxStartTablet::Execute Tablet (72075186224037888,0) 2025-09-25T16:18:31.234627Z node 24 :HIVE DEBUG: tx__start_tablet.cpp:73: HIVE#72057594037927937 THive::TTxStartTablet::Execute, Sending TEvBootTablet(Dummy.72075186224037888.Leader.1) to node 24 storage {Version# 1 TabletID# 72075186224037888 TabletType# Dummy Channels# {0:{Channel# 0 Type# none StoragePool# def1 History# {0:{FromGeneration# 0 GroupID# 2147483648 Timestamp# 1970-01-01T00:00:00.057536Z}}, 1:{Channel# 1 Type# none StoragePool# def2 History# {0:{FromGeneration# 0 GroupID# 2147483649 Timestamp# 1970-01-01T00:00:00.057536Z}}, 2:{Channel# 2 Type# none StoragePool# def3 History# {0:{FromGeneration# 0 GroupID# 2147483650 Timestamp# 1970-01-01T00:00:00.057536Z}}} Tenant: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-09-25T16:18:31.234653Z node 24 :TABLET_EXECUTOR DEBUG: Leader{72057594037927937:2:5} Tx{6, NKikimr::NHive::TTxStartTablet} hope 1 -> done Change{6, redo 144b alter 0b annex 0, ~{ 1, 16 } -{ }, 0 gb} 2025-09-25T16:18:31.234661Z node 24 :TABLET_EXECUTOR DEBUG: Leader{72057594037927937:2:5} Tx{6, NKikimr::NHive::TTxStartTablet} release 4194304b of static, Memory{0 dyn 0} 2025-09-25T16:18:31.245057Z node 24 :BS_PROXY_PUT INFO: dsproxy_put.cpp:650: [aeed6b7f2709b4c0] bootstrap ActorId# [24:332:2310] Group# 0 BlobCount# 1 BlobIDs# [[72057594037927937:2:5:0:0:126:0]] HandleClass# TabletLog Tactic# MinLatency RestartCounter# 0 Marker# BPP13 2025-09-25T16:18:31.245113Z node 24 :BS_PROXY_PUT DEBUG: dsproxy_strategy_restore.h:42: [aeed6b7f2709b4c0] Id# [72057594037927937:2:5:0:0:126:0] restore disk# 0 part# 0 situation# ESituation::Unknown Marker# BPG51 2025-09-25T16:18:31.245122Z node 24 :BS_PROXY_PUT DEBUG: dsproxy_strategy_restore.h:65: [aeed6b7f2709b4c0] restore Id# [72057594037927937:2:5:0:0:126:0] optimisticReplicas# 1 optimisticState# EBS_FULL Marker# BPG55 2025-09-25T16:18:31.245132Z node 24 :BS_PROXY_PUT DEBUG: dsproxy_strategy_base.cpp:324: [aeed6b7f2709b4c0] partPlacement record partSituation# ESituation::Unknown to# 0 blob Id# [72057594037927937:2:5:0:0:126:1] Marker# BPG33 2025-09-25T16:18:31.245139Z node 24 :BS_PROXY_PUT DEBUG: dsproxy_strategy_base.cpp:345: [aeed6b7f2709b4c0] Sending missing VPut part# 0 to# 0 blob Id# [72057594037927937:2:5:0:0:126:1] Marker# BPG32 2025-09-25T16:18:31.245172Z node 24 :BS_PROXY DEBUG: group_sessions.h:181: Send to queueActorId# [24:36:2080] NKikimr::TEvBlobStorage::TEvVPut# {ID# [72057594037927937:2:5:0:0:126:1] FDS# 126 HandleClass# TabletLog {MsgQoS ExtQueueId# PutTabletLog} DataSize# 0 Data# } cookie# 0 2025-09-25T16:18:31.245560Z node 24 :BS_PROXY_PUT DEBUG: dsproxy_put.cpp:264: [aeed6b7f2709b4c0] received {EvVPutResult Status# OK ID# [72057594037927937:2:5:0:0:126:1] {MsgQoS MsgId# { SequenceId: 1 MsgId: 20 } Cost# 80992 ExtQueueId# PutTabletLog IntQueueId# IntPutLog Window# { Status# Processed ActualWindowSize# 0 MaxWindowSize# 150000000 ExpectedMsgId# { SequenceId: 1 MsgId: 21 }}}} from# [0:1:0:0:0] Marker# BPP01 2025-09-25T16:18:31.245587Z node 24 :BS_PROXY_PUT DEBUG: dsproxy_put_impl.cpp:72: [aeed6b7f2709b4c0] Result# TEvPutResult {Id# [72057594037927937:2:5:0:0:126:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0.998955} GroupId# 0 Marker# BPP12 2025-09-25T16:18:31.245596Z node 24 :BS_PROXY_PUT INFO: dsproxy_put.cpp:490: [aeed6b7f2709b4c0] SendReply putResult# TEvPutResult {Id# [72057594037927937:2:5:0:0:126:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0.998955} ResponsesSent# 0 PutImpl.Blobs.size# 1 Last# true Marker# BPP21 2025-09-25T16:18:31.245620Z node 24 :BS_PROXY_PUT DEBUG: {BPP72@dsproxy_put.cpp:474} Query history GroupId# 0 HandleClass# TabletLog Tactic# MinLatency History# THistory { Entries# [ TEvVPut{ TimestampMs# 0.172 sample PartId# [72057594037927937:2:5:0:0:126:1] QueryCount# 1 VDiskId# [0:1:0:0:0] NodeId# 24 } TEvVPutResult{ TimestampMs# 0.568 VDiskId# [0:1:0:0:0] NodeId# 24 Status# OK } ] } 2025-09-25T16:18:31.245651Z node 24 :TABLET_MAIN DEBUG: tablet_req_writelog.cpp:54: Put Result: TEvPutResult {Id# [72057594037927937:2:5:0:0:126:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0.998955} 2025-09-25T16:18:31.245685Z node 24 :TABLET_EXECUTOR DEBUG: Leader{72057594037927937:2:6} commited cookie 1 for step 5 2025-09-25T16:18:31.245744Z node 24 :HIVE DEBUG: tx__start_tablet.cpp:122: HIVE#72057594037927937 THive::TTxStartTablet::Complete Tablet (72075186224037888,0) SideEffects: {Notifications: 0x10080002 [24:328:2306] NKikimrLocal.TEvBootTablet Info { TabletID: 72075186224037888 Channels { Channel: 0 ChannelType: 0 History { FromGeneration: 0 GroupID: 2147483648 } StoragePool: "def1" } Channels { Channel: 1 ChannelType: 0 History { FromGeneration: 0 GroupID: 2147483649 } StoragePool: "def2" } Channels { Channel: 2 ChannelType: 0 History { FromGeneration: 0 GroupID: 2147483650 } StoragePool: "def3" } TabletType: Dummy Version: 1 TenantIdOwner: 72057594046678944 TenantIdLocalId: 1 } SuggestedGeneration: 1 BootMode: BOOT_MODE_LEADER FollowerId: 0} 2025-09-25T16:18:31.245816Z node 24 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:49: TClient[72057594037927937] ::Bootstrap [24:334:2312] 2025-09-25T16:18:31.245823Z node 24 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:542: TClient[72057594037927937] lookup [24:334:2312] 2025-09-25T16:18:31.245832Z node 24 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:148: TClient[72057594037927937] queue send [24:334:2312] 2025-09-25T16:18:31.245846Z node 24 :TABLET_RESOLVER DEBUG: tablet_resolver.cpp:882: Handle TEvForward tabletId: 72057594037927937 entry.State: StNormal leader: [24:275:2266] followers: 0 ev: {EvForward TabletID: 72057594037927937 Ev: nullptr Flags: 1:2:0} 2025-09-25T16:18:31.245855Z node 24 :TABLET_RESOLVER DEBUG: tablet_resolver.cpp:667: SelectForward node 24 selfDC leaderDC 1:2:0 local 1 localDc 1 other 0 disallowed 0 tabletId: 72057594037927937 followers: 0 countLeader 1 allowFollowers 0 winner: [24:275:2266] 2025-09-25T16:18:31.245864Z node 24 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:420: TClient[72057594037927937] received pending shutdown [24:334:2312] 2025-09-25T16:18:31.245872Z node 24 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:188: TClient[72057594037927937] forward result local node, try to connect [24:334:2312] 2025-09-25T16:18:31.245878Z node 24 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:687: TClient[72057594037927937]::SendEvent [24:334:2312] 2025-09-25T16:18:31.245891Z node 24 :PIPE_SERVER DEBUG: tablet_pipe_server.cpp:291: [72057594037927937] Accept Connect Originator# [24:334:2312] 2025-09-25T16:18:31.245919Z node 24 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:308: TClient[72057594037927937] connected with status OK role: Leader [24:334:2312] 2025-09-25T16:18:31.245925Z node 24 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:323: TClient[72057594037927937] send queued [24:334:2312] 2025-09-25T16:18:31.245929Z node 24 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:645: TClient[72057594037927937] push event to server [24:334:2312] 2025-09-25T16:18:31.245935Z node 24 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:331: TClient[72057594037927937] shutdown pipe due to pending shutdown request [24:334:2312] 2025-09-25T16:18:31.245939Z node 24 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:514: TClient[72057594037927937] notify reset [24:334:2312] 2025-09-25T16:18:31.245948Z node 24 :PIPE_SERVER DEBUG: tablet_pipe_server.cpp:141: [72057594037927937] HandleSend Sender# [24:333:2311] EventType# 268830214 2025-09-25T16:18:31.245962Z node 24 :HIVE TRACE: hive_impl.cpp:139: HIVE#72057594037927937 Handle TEvTabletPipe::TEvServerConnected([24:334:2312]) [24:335:2313] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/mind/ut/unittest >> TNodeBrokerTest::NodesMigrationExtendLease [GOOD] Test command err: 2025-09-25T16:18:31.087253Z node 8 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:636: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 8 Deadline: 18446744073709.551615s } 2025-09-25T16:18:31.090152Z node 5 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:636: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 5 Deadline: 18446744073709.551615s } 2025-09-25T16:18:31.090223Z node 6 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:636: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 6 Deadline: 18446744073709.551615s } 2025-09-25T16:18:31.090247Z node 7 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:636: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 7 Deadline: 18446744073709.551615s } 2025-09-25T16:18:31.090285Z node 8 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:607: Handle NActors::TEvInterconnect::TEvListNodes 2025-09-25T16:18:31.094284Z node 2 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:636: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 2 Deadline: 18446744073709.551615s } 2025-09-25T16:18:31.094338Z node 3 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:636: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 3 Deadline: 18446744073709.551615s } 2025-09-25T16:18:31.094389Z node 4 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:636: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 4 Deadline: 18446744073709.551615s } 2025-09-25T16:18:31.094429Z node 6 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:607: Handle NActors::TEvInterconnect::TEvListNodes 2025-09-25T16:18:31.094515Z node 7 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:607: Handle NActors::TEvInterconnect::TEvListNodes 2025-09-25T16:18:31.094569Z node 8 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:607: Handle NActors::TEvInterconnect::TEvListNodes 2025-09-25T16:18:31.094595Z node 1 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:636: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 1 Deadline: 18446744073709.551615s } 2025-09-25T16:18:31.098968Z node 2 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:607: Handle NActors::TEvInterconnect::TEvListNodes 2025-09-25T16:18:31.099095Z node 3 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:607: Handle NActors::TEvInterconnect::TEvListNodes 2025-09-25T16:18:31.099153Z node 4 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:607: Handle NActors::TEvInterconnect::TEvListNodes 2025-09-25T16:18:31.099254Z node 5 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:607: Handle NActors::TEvInterconnect::TEvListNodes 2025-09-25T16:18:31.099321Z node 4 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:607: Handle NActors::TEvInterconnect::TEvListNodes 2025-09-25T16:18:31.099362Z node 5 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:607: Handle NActors::TEvInterconnect::TEvListNodes 2025-09-25T16:18:31.099383Z node 6 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:607: Handle NActors::TEvInterconnect::TEvListNodes 2025-09-25T16:18:31.099416Z node 7 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:607: Handle NActors::TEvInterconnect::TEvListNodes 2025-09-25T16:18:31.099509Z node 1 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:607: Handle NActors::TEvInterconnect::TEvListNodes 2025-09-25T16:18:31.099614Z node 2 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:607: Handle NActors::TEvInterconnect::TEvListNodes 2025-09-25T16:18:31.099643Z node 3 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:607: Handle NActors::TEvInterconnect::TEvListNodes 2025-09-25T16:18:31.099747Z node 8 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:607: Handle NActors::TEvInterconnect::TEvListNodes 2025-09-25T16:18:31.099780Z node 1 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:607: Handle NActors::TEvInterconnect::TEvListNodes 2025-09-25T16:18:31.100267Z node 2 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:607: Handle NActors::TEvInterconnect::TEvListNodes 2025-09-25T16:18:31.100301Z node 3 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:607: Handle NActors::TEvInterconnect::TEvListNodes 2025-09-25T16:18:31.100338Z node 4 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:607: Handle NActors::TEvInterconnect::TEvListNodes 2025-09-25T16:18:31.100362Z node 5 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:607: Handle NActors::TEvInterconnect::TEvListNodes 2025-09-25T16:18:31.100388Z node 6 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:607: Handle NActors::TEvInterconnect::TEvListNodes 2025-09-25T16:18:31.100414Z node 7 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:607: Handle NActors::TEvInterconnect::TEvListNodes 2025-09-25T16:18:31.100456Z node 8 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:636: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 1 Deadline: 18446744073709.551615s } 2025-09-25T16:18:31.100472Z node 3 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:636: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 1 Deadline: 18446744073709.551615s } 2025-09-25T16:18:31.100512Z node 4 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:636: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 1 Deadline: 18446744073709.551615s } 2025-09-25T16:18:31.100539Z node 5 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:636: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 1 Deadline: 18446744073709.551615s } 2025-09-25T16:18:31.100576Z node 6 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:636: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 1 Deadline: 18446744073709.551615s } 2025-09-25T16:18:31.100603Z node 7 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:636: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 1 Deadline: 18446744073709.551615s } 2025-09-25T16:18:31.100692Z node 1 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:607: Handle NActors::TEvInterconnect::TEvListNodes 2025-09-25T16:18:31.100738Z node 2 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:636: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 1 Deadline: 18446744073709.551615s } 2025-09-25T16:18:31.101010Z node 1 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:636: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 3 Deadline: 18446744073709.551615s } 2025-09-25T16:18:31.101355Z node 8 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:636: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 4 Deadline: 18446744073709.551615s } 2025-09-25T16:18:31.102283Z node 3 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:636: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 2 Deadline: 18446744073709.551615s } 2025-09-25T16:18:31.102319Z node 4 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:636: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 3 Deadline: 18446744073709.551615s } 2025-09-25T16:18:31.102347Z node 6 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:636: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 4 Deadline: 18446744073709.551615s } 2025-09-25T16:18:31.102365Z node 7 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:636: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 2 Deadline: 18446744073709.551615s } 2025-09-25T16:18:31.110938Z node 4 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:636: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 8 Deadline: 18446744073709.551615s } 2025-09-25T16:18:31.111526Z node 2 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:636: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 7 Deadline: 18446744073709.551615s } 2025-09-25T16:18:31.111567Z node 3 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:636: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 4 Deadline: 18446744073709.551615s } 2025-09-25T16:18:31.111606Z node 1 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:636: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 8 Deadline: 18446744073709.551615s } 2025-09-25T16:18:31.111623Z node 2 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:636: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 3 Deadline: 18446744073709.551615s } 2025-09-25T16:18:31.111637Z node 4 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:636: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 6 Deadline: 18446744073709.551615s } 2025-09-25T16:18:31.112445Z node 1 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:636: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 5 Deadline: 18446744073709.551615s } 2025-09-25T16:18:31.112486Z node 1 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:636: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 4 Deadline: 18446744073709.551615s } 2025-09-25T16:18:31.112644Z node 1 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:636: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 2 Deadline: 18446744073709.551615s } 2025-09-25T16:18:31.112664Z node 1 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:636: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 6 Deadline: 18446744073709.551615s } 2025-09-25T16:18:31.112788Z node 1 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:636: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 7 Deadline: 18446744073709.551615s } 2025-09-25T16:18:31.115187Z node 3 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:636: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 7 Deadline: 18446744073709.551615s } 2025-09-25T16:18:31.115962Z node 7 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:636: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 3 Deadline: 18446744073709.551615s } 2025-09-25T16:18:31.148473Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7729: Cannot subscribe to console configs 2025-09-25T16:18:31.148503Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded ... waiting for nameservers are connected 2025-09-25T16:18:31.156661Z node 1 :NODE_BROKER DEBUG: node_broker_impl.h:239: StateInit event type: 10060000 event: NKikimr::TEvTablet::TEvBoot 2025-09-25T16:18:31.157326Z node 1 :NODE_BROKER DEBUG: node_broker_impl.h:239: StateInit event type: 10060001 event: NKikimr::TEvTablet::TEvRestored 2025-09-25T16:18:31.157421Z node 1 :NODE_BROKER DEBUG: node_broker__init_scheme.cpp:20: TTxInitScheme Execute 2025-09-25T16:18:31.157859Z node 1 :NODE_BROKER DEBUG: node_broker_impl.h:239: StateInit event type: 1006000c event: NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-09-25T16:18:31.159351Z node 1 :NODE_BROKER DEBUG: node_broker__init_scheme.cpp:29: TTxInitScheme Complete 2025-09-25T16:18:31.159614Z node 1 :NODE_BROKER DEBUG: node_broker__load_state.cpp:19: TTxLoadState Execute 2025-09-25T16:18:31.159689Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:968: [DB] Using default config. 2025-09-25T16:18:31.159707Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:1005: [DB] Starting the first epoch: #1.1 1970-01-01T00:00:00.026000Z - 1970-01-01T01:00:00.026000Z - 1970-01-01T02:00:00.026000Z 2025-09-25T16:18:31.159712Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:1031: [DB] Loaded the first approximate epoch start: #1.1 2025-09-25T16:18:31.159733Z node 1 :NODE_BROKER DEBUG: node_broker__load_state.cpp:27: TTxLoadState Complete 2025-09-25T16:18:31.159785Z node 1 :NODE_BROKER DEBUG: node_broker__migrate_state.cpp:84: TTxMigrateState Execute 2025-09-25T16:18:31.159791Z node 1 :NODE_BROKER DEBUG: node_broker__migrate_state.cpp:52: TTxMigrateState ProcessMigrationBatch UpdateNodes left 0, NewVersionUpdateNodes left 0 2025-09-25T16:18:31.159796Z node 1 :NODE_BROKER DEBUG: node_broker__migrate_state.cpp:21: TTxMigrateState FinalizeMigration 2025-09-25T16:18:31.159802Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:1318: [DB] Update epoch in database: #1.1 1970-01-01T00:00:00.026000Z - 1970-01-01T01:00:00.026000Z - 1970-01-01T02:00:00.026000Z 2025-09-25T16:18:31.159838Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:1337: [DB] Update approx epoch start in database: #1.1 2025-09-25T16:18:31.159844Z node 1 :NODE_BROKER NOTICE: node_broker.cpp:1350: [DB] Update main nodes table to: Nodes 2025-09-25T16:18:31.204298Z node 1 :NODE_BROKER DEBUG: node_broker__migrate_state.cpp:95: TTxMigrateState Complete 2025-09-25T16:18:31.204342Z node 1 :NODE_BROKER TRACE: node_broker.cpp:456: Scheduled epoch update at 1970-01-01T01:00:00.026000Z 2025-09-25T16:18:31.204355Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:562: Preparing nodes list cache for epoch #1.1 1970-01-01T00:00:00.026000Z - 1970-01-01T01:00:00.026000Z - 1970-01-01T02:00:00.026000Z, approximate epoch start #1.1 nodes=0 expired=0 2025-09-25T16:18:31.204366Z ... eserver.cpp:717: Handle NKikimr::TEvTabletPipe::TEvClientConnected { TabletId: 72057594037936129 Status: OK ServerId: [1:721:2270] Leader: 1 Dead: 0 Generation: 3 VersionInfo:  } 2025-09-25T16:18:31.543672Z node 7 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:717: Handle NKikimr::TEvTabletPipe::TEvClientConnected { TabletId: 72057594037936129 Status: OK ServerId: [1:726:2275] Leader: 1 Dead: 0 Generation: 3 VersionInfo:  } 2025-09-25T16:18:31.543700Z node 8 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:717: Handle NKikimr::TEvTabletPipe::TEvClientConnected { TabletId: 72057594037936129 Status: OK ServerId: [1:727:2276] Leader: 1 Dead: 0 Generation: 3 VersionInfo:  } 2025-09-25T16:18:31.543721Z node 3 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:717: Handle NKikimr::TEvTabletPipe::TEvClientConnected { TabletId: 72057594037936129 Status: OK ServerId: [1:722:2271] Leader: 1 Dead: 0 Generation: 3 VersionInfo:  } 2025-09-25T16:18:31.543733Z node 4 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:717: Handle NKikimr::TEvTabletPipe::TEvClientConnected { TabletId: 72057594037936129 Status: OK ServerId: [1:723:2272] Leader: 1 Dead: 0 Generation: 3 VersionInfo:  } 2025-09-25T16:18:31.543746Z node 5 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:717: Handle NKikimr::TEvTabletPipe::TEvClientConnected { TabletId: 72057594037936129 Status: OK ServerId: [1:724:2273] Leader: 1 Dead: 0 Generation: 3 VersionInfo:  } 2025-09-25T16:18:31.543758Z node 6 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:717: Handle NKikimr::TEvTabletPipe::TEvClientConnected { TabletId: 72057594037936129 Status: OK ServerId: [1:725:2274] Leader: 1 Dead: 0 Generation: 3 VersionInfo:  } 2025-09-25T16:18:31.543771Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:245: StateWork, received event# 269877761, Sender [1:724:2273], Recipient [1:681:2244]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-09-25T16:18:31.543803Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:245: StateWork, received event# 269877761, Sender [1:725:2274], Recipient [1:681:2244]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-09-25T16:18:31.543826Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:245: StateWork, received event# 269877761, Sender [1:726:2275], Recipient [1:681:2244]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-09-25T16:18:31.543852Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:245: StateWork, received event# 269877761, Sender [1:727:2276], Recipient [1:681:2244]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-09-25T16:18:31.543914Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:245: StateWork, received event# 272039936, Sender [1:25:2072], Recipient [1:681:2244]: NKikimr::NNodeBroker::TEvNodeBroker::TEvListNodes { MinEpoch: 2 } 2025-09-25T16:18:31.543921Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:249: StateWork, processing event TEvNodeBroker::TEvListNodes 2025-09-25T16:18:31.543928Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:375: Delaying list nodes request for epoch #2 2025-09-25T16:18:31.543964Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:245: StateWork, received event# 272039936, Sender [8:228:2072], Recipient [1:727:2276] 2025-09-25T16:18:31.543969Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:249: StateWork, processing event TEvNodeBroker::TEvListNodes 2025-09-25T16:18:31.543974Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:375: Delaying list nodes request for epoch #2 2025-09-25T16:18:31.543994Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:245: StateWork, received event# 272039936, Sender [2:54:2072], Recipient [1:721:2270] 2025-09-25T16:18:31.543998Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:249: StateWork, processing event TEvNodeBroker::TEvListNodes 2025-09-25T16:18:31.544002Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:375: Delaying list nodes request for epoch #2 2025-09-25T16:18:31.544009Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:245: StateWork, received event# 272039936, Sender [6:170:2072], Recipient [1:725:2274] 2025-09-25T16:18:31.544014Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:249: StateWork, processing event TEvNodeBroker::TEvListNodes 2025-09-25T16:18:31.544018Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:375: Delaying list nodes request for epoch #2 2025-09-25T16:18:31.544025Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:245: StateWork, received event# 272039936, Sender [7:199:2072], Recipient [1:726:2275] 2025-09-25T16:18:31.544029Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:249: StateWork, processing event TEvNodeBroker::TEvListNodes 2025-09-25T16:18:31.544033Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:375: Delaying list nodes request for epoch #2 2025-09-25T16:18:31.544040Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:245: StateWork, received event# 272039936, Sender [3:83:2072], Recipient [1:722:2271] 2025-09-25T16:18:31.544044Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:249: StateWork, processing event TEvNodeBroker::TEvListNodes 2025-09-25T16:18:31.544047Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:375: Delaying list nodes request for epoch #2 2025-09-25T16:18:31.544054Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:245: StateWork, received event# 272039936, Sender [4:112:2072], Recipient [1:723:2272] 2025-09-25T16:18:31.544058Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:249: StateWork, processing event TEvNodeBroker::TEvListNodes 2025-09-25T16:18:31.544062Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:375: Delaying list nodes request for epoch #2 2025-09-25T16:18:31.544070Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:245: StateWork, received event# 272039936, Sender [5:141:2072], Recipient [1:724:2273] 2025-09-25T16:18:31.544074Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:249: StateWork, processing event TEvNodeBroker::TEvListNodes 2025-09-25T16:18:31.544078Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:375: Delaying list nodes request for epoch #2 2025-09-25T16:18:31.544164Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:245: StateWork, received event# 269877761, Sender [1:732:2281], Recipient [1:681:2244]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-09-25T16:18:31.544190Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:245: StateWork, received event# 272039936, Sender [1:627:2218], Recipient [1:681:2244]: NKikimr::NNodeBroker::TEvNodeBroker::TEvListNodes { } 2025-09-25T16:18:31.544195Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:249: StateWork, processing event TEvNodeBroker::TEvListNodes 2025-09-25T16:18:31.544205Z node 1 :NODE_BROKER TRACE: node_broker.cpp:423: Send TEvNodesInfo for epoch #1.4 1970-01-01T00:00:00.026000Z - 1970-01-01T01:00:00.026000Z - 1970-01-01T02:00:00.026000Z 2025-09-25T16:18:31.544278Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:245: StateWork, received event# 269877761, Sender [1:734:2283], Recipient [1:681:2244]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-09-25T16:18:31.544297Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:245: StateWork, received event# 272039936, Sender [1:627:2218], Recipient [1:681:2244]: NKikimr::NNodeBroker::TEvNodeBroker::TEvListNodes { } 2025-09-25T16:18:31.544301Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:249: StateWork, processing event TEvNodeBroker::TEvListNodes 2025-09-25T16:18:31.544307Z node 1 :NODE_BROKER TRACE: node_broker.cpp:423: Send TEvNodesInfo for epoch #1.4 1970-01-01T00:00:00.026000Z - 1970-01-01T01:00:00.026000Z - 1970-01-01T02:00:00.026000Z 2025-09-25T16:18:31.544378Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:245: StateWork, received event# 269877761, Sender [1:736:2285], Recipient [1:681:2244]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-09-25T16:18:31.544394Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:245: StateWork, received event# 272039936, Sender [1:627:2218], Recipient [1:681:2244]: NKikimr::NNodeBroker::TEvNodeBroker::TEvListNodes { } 2025-09-25T16:18:31.544399Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:249: StateWork, processing event TEvNodeBroker::TEvListNodes 2025-09-25T16:18:31.544405Z node 1 :NODE_BROKER TRACE: node_broker.cpp:423: Send TEvNodesInfo for epoch #1.4 1970-01-01T00:00:00.026000Z - 1970-01-01T01:00:00.026000Z - 1970-01-01T02:00:00.026000Z 2025-09-25T16:18:31.544461Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:245: StateWork, received event# 269877761, Sender [1:738:2287], Recipient [1:681:2244]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-09-25T16:18:31.544483Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:245: StateWork, received event# 272039936, Sender [1:627:2218], Recipient [1:681:2244]: NKikimr::NNodeBroker::TEvNodeBroker::TEvListNodes { CachedVersion: 3 } 2025-09-25T16:18:31.544487Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:249: StateWork, processing event TEvNodeBroker::TEvListNodes 2025-09-25T16:18:31.544493Z node 1 :NODE_BROKER TRACE: node_broker.cpp:423: Send TEvNodesInfo for epoch #1.4 1970-01-01T00:00:00.026000Z - 1970-01-01T01:00:00.026000Z - 1970-01-01T02:00:00.026000Z 2025-09-25T16:18:31.544550Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:245: StateWork, received event# 269877761, Sender [1:740:2289], Recipient [1:681:2244]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-09-25T16:18:31.544573Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:245: StateWork, received event# 272039950, Sender [1:627:2218], Recipient [1:681:2244]: NKikimr::NNodeBroker::TEvNodeBroker::TEvSubscribeNodesRequest { CachedVersion: 3 SeqNo: 2 } 2025-09-25T16:18:31.544578Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:257: StateWork, processing event TEvNodeBroker::TEvSubscribeNodesRequest 2025-09-25T16:18:31.544586Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:747: New subscriber [1:627:2218], seqNo: 2, version: 3, server pipe id: [1:740:2289] 2025-09-25T16:18:31.544594Z node 1 :NODE_BROKER TRACE: node_broker.cpp:730: Send TEvUpdateNodes v3 -> v4 to [1:627:2218] 2025-09-25T16:18:31.544663Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:245: StateWork, received event# 269877764, Sender [1:740:2289], Recipient [1:681:2244]: NKikimr::TEvTabletPipe::TEvServerDisconnected 2025-09-25T16:18:31.544669Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:768: Unsubscribed [1:627:2218], seqNo: 2, server pipe id: [1:740:2289] 2025-09-25T16:18:31.544700Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:245: StateWork, received event# 269877761, Sender [1:742:2291], Recipient [1:681:2244]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-09-25T16:18:31.544722Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:245: StateWork, received event# 272039937, Sender [1:627:2218], Recipient [1:681:2244]: NKikimr::NNodeBroker::TEvNodeBroker::TEvResolveNode { NodeId: 1024 } 2025-09-25T16:18:31.544728Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:250: StateWork, processing event TEvNodeBroker::TEvResolveNode 2025-09-25T16:18:31.544760Z node 1 :NODE_BROKER TRACE: node_broker.cpp:1485: Send TEvResolvedNode: NKikimr::NNodeBroker::TEvNodeBroker::TEvResolvedNode { Status { Code: OK } Node { NodeId: 1024 Host: "host1" Port: 1001 ResolveHost: "host1.yandex.net" Address: "1.2.3.4" Location { DataCenter: "1" Module: "2" Rack: "3" Unit: "4" } Expire: 7200027000 Name: "slot-0" } } 2025-09-25T16:18:31.544850Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:245: StateWork, received event# 269877761, Sender [1:744:2293], Recipient [1:681:2244]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-09-25T16:18:31.544868Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:245: StateWork, received event# 272039937, Sender [1:627:2218], Recipient [1:681:2244]: NKikimr::NNodeBroker::TEvNodeBroker::TEvResolveNode { NodeId: 1025 } 2025-09-25T16:18:31.544873Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:250: StateWork, processing event TEvNodeBroker::TEvResolveNode 2025-09-25T16:18:31.544888Z node 1 :NODE_BROKER TRACE: node_broker.cpp:1485: Send TEvResolvedNode: NKikimr::NNodeBroker::TEvNodeBroker::TEvResolvedNode { Status { Code: OK } Node { NodeId: 1025 Host: "host2" Port: 1001 ResolveHost: "host2.yandex.net" Address: "1.2.3.4" Location { DataCenter: "1" Module: "2" Rack: "3" Unit: "4" } Expire: 7200027000 Name: "slot-1" } } |81.4%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/tx_allocator_client/ut/ydb-core-tx-tx_allocator_client-ut ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/mind/ut/unittest >> TNodeBrokerTest::NodesMigrationReuseIDThenExtendLease [GOOD] Test command err: 2025-09-25T16:18:30.549339Z node 8 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:636: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 8 Deadline: 18446744073709.551615s } 2025-09-25T16:18:30.553176Z node 5 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:636: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 5 Deadline: 18446744073709.551615s } 2025-09-25T16:18:30.553282Z node 6 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:636: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 6 Deadline: 18446744073709.551615s } 2025-09-25T16:18:30.553326Z node 7 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:636: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 7 Deadline: 18446744073709.551615s } 2025-09-25T16:18:30.553387Z node 8 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:607: Handle NActors::TEvInterconnect::TEvListNodes 2025-09-25T16:18:30.557651Z node 2 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:636: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 2 Deadline: 18446744073709.551615s } 2025-09-25T16:18:30.557728Z node 3 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:636: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 3 Deadline: 18446744073709.551615s } 2025-09-25T16:18:30.557801Z node 4 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:636: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 4 Deadline: 18446744073709.551615s } 2025-09-25T16:18:30.557870Z node 6 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:607: Handle NActors::TEvInterconnect::TEvListNodes 2025-09-25T16:18:30.558015Z node 7 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:607: Handle NActors::TEvInterconnect::TEvListNodes 2025-09-25T16:18:30.558105Z node 8 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:607: Handle NActors::TEvInterconnect::TEvListNodes 2025-09-25T16:18:30.558155Z node 1 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:636: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 1 Deadline: 18446744073709.551615s } 2025-09-25T16:18:30.570279Z node 2 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:607: Handle NActors::TEvInterconnect::TEvListNodes 2025-09-25T16:18:30.570418Z node 3 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:607: Handle NActors::TEvInterconnect::TEvListNodes 2025-09-25T16:18:30.570480Z node 4 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:607: Handle NActors::TEvInterconnect::TEvListNodes 2025-09-25T16:18:30.570539Z node 5 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:607: Handle NActors::TEvInterconnect::TEvListNodes 2025-09-25T16:18:30.570607Z node 4 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:607: Handle NActors::TEvInterconnect::TEvListNodes 2025-09-25T16:18:30.570656Z node 5 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:607: Handle NActors::TEvInterconnect::TEvListNodes 2025-09-25T16:18:30.570677Z node 6 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:607: Handle NActors::TEvInterconnect::TEvListNodes 2025-09-25T16:18:30.570715Z node 7 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:607: Handle NActors::TEvInterconnect::TEvListNodes 2025-09-25T16:18:30.570814Z node 1 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:607: Handle NActors::TEvInterconnect::TEvListNodes 2025-09-25T16:18:30.570898Z node 2 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:607: Handle NActors::TEvInterconnect::TEvListNodes 2025-09-25T16:18:30.570917Z node 3 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:607: Handle NActors::TEvInterconnect::TEvListNodes 2025-09-25T16:18:30.571001Z node 8 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:607: Handle NActors::TEvInterconnect::TEvListNodes 2025-09-25T16:18:30.571025Z node 1 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:607: Handle NActors::TEvInterconnect::TEvListNodes 2025-09-25T16:18:30.571375Z node 2 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:607: Handle NActors::TEvInterconnect::TEvListNodes 2025-09-25T16:18:30.571401Z node 3 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:607: Handle NActors::TEvInterconnect::TEvListNodes 2025-09-25T16:18:30.571421Z node 4 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:607: Handle NActors::TEvInterconnect::TEvListNodes 2025-09-25T16:18:30.571437Z node 5 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:607: Handle NActors::TEvInterconnect::TEvListNodes 2025-09-25T16:18:30.571454Z node 6 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:607: Handle NActors::TEvInterconnect::TEvListNodes 2025-09-25T16:18:30.571472Z node 7 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:607: Handle NActors::TEvInterconnect::TEvListNodes 2025-09-25T16:18:30.571510Z node 8 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:636: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 1 Deadline: 18446744073709.551615s } 2025-09-25T16:18:30.571524Z node 3 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:636: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 1 Deadline: 18446744073709.551615s } 2025-09-25T16:18:30.571557Z node 4 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:636: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 1 Deadline: 18446744073709.551615s } 2025-09-25T16:18:30.571581Z node 5 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:636: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 1 Deadline: 18446744073709.551615s } 2025-09-25T16:18:30.571615Z node 6 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:636: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 1 Deadline: 18446744073709.551615s } 2025-09-25T16:18:30.571641Z node 7 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:636: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 1 Deadline: 18446744073709.551615s } 2025-09-25T16:18:30.571738Z node 1 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:607: Handle NActors::TEvInterconnect::TEvListNodes 2025-09-25T16:18:30.571800Z node 2 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:636: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 1 Deadline: 18446744073709.551615s } 2025-09-25T16:18:30.572051Z node 1 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:636: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 6 Deadline: 18446744073709.551615s } 2025-09-25T16:18:30.572399Z node 8 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:636: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 7 Deadline: 18446744073709.551615s } 2025-09-25T16:18:30.573948Z node 3 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:636: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 2 Deadline: 18446744073709.551615s } 2025-09-25T16:18:30.573975Z node 4 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:636: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 2 Deadline: 18446744073709.551615s } 2025-09-25T16:18:30.573991Z node 5 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:636: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 4 Deadline: 18446744073709.551615s } 2025-09-25T16:18:30.574006Z node 6 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:636: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 4 Deadline: 18446744073709.551615s } 2025-09-25T16:18:30.574023Z node 7 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:636: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 4 Deadline: 18446744073709.551615s } 2025-09-25T16:18:30.579880Z node 7 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:636: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 8 Deadline: 18446744073709.551615s } 2025-09-25T16:18:30.579976Z node 4 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:636: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 5 Deadline: 18446744073709.551615s } 2025-09-25T16:18:30.580500Z node 2 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:636: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 3 Deadline: 18446744073709.551615s } 2025-09-25T16:18:30.580556Z node 1 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:636: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 8 Deadline: 18446744073709.551615s } 2025-09-25T16:18:30.580602Z node 4 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:636: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 6 Deadline: 18446744073709.551615s } 2025-09-25T16:18:30.581068Z node 2 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:636: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 4 Deadline: 18446744073709.551615s } 2025-09-25T16:18:30.581119Z node 4 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:636: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 7 Deadline: 18446744073709.551615s } 2025-09-25T16:18:30.581445Z node 1 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:636: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 3 Deadline: 18446744073709.551615s } 2025-09-25T16:18:30.581731Z node 1 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:636: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 4 Deadline: 18446744073709.551615s } 2025-09-25T16:18:30.581953Z node 1 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:636: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 5 Deadline: 18446744073709.551615s } 2025-09-25T16:18:30.581981Z node 1 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:636: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 2 Deadline: 18446744073709.551615s } 2025-09-25T16:18:30.582262Z node 1 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:636: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 7 Deadline: 18446744073709.551615s } 2025-09-25T16:18:30.582879Z node 6 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:636: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 3 Deadline: 18446744073709.551615s } 2025-09-25T16:18:30.583074Z node 4 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:636: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 3 Deadline: 18446744073709.551615s } 2025-09-25T16:18:30.583186Z node 3 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:636: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 6 Deadline: 18446744073709.551615s } 2025-09-25T16:18:30.583566Z node 3 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:636: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 4 Deadline: 18446744073709.551615s } 2025-09-25T16:18:30.585391Z node 6 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:636: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 8 Deadline: 18446744073709.551615s } 2025-09-25T16:18:30.585854Z node 8 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:636: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 6 Deadline: 18446744073709.551615s } 2025-09-25T16:18:30.599478Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7729: Cannot subscribe to console configs 2025-09-25T16:18:30.599501Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded ... waiting for nameservers are connected 2025-09-25T16:18:30.603624Z node 1 :NODE_BROKER DEBUG: node_broker_impl.h:239: StateInit event type: 10060000 event: NKikimr::TEvTablet::TEvBoot 2025-09-25T16:18:30.604085Z node 1 :NODE_BROKER DEBUG: node_broker_impl.h:239: StateInit event type: 10060001 event: NKikimr::TEvTablet::TEvRestored 2025-09-25T16:18:30.604160Z node 1 :NODE_BROKER DEBUG: node_broker__init_scheme.cpp:20: TTxInitScheme Execute 2025-09-25T16:18:30.604382Z node 1 :NODE_BROKER DEBUG: node_broker_impl.h:239: StateInit event type: 1006000c event: NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-09-25T16:18:30.604781Z node 1 :NODE_BROKER DEBUG: node_broker__init_scheme.cpp:29: TTxInitScheme Complete 2025-09-25T16:18:30.604807Z node 1 :NODE_BROKER DEBUG: node_broker__load_state.cpp:19: TTxLoadState Execute 2025-09-25T16:18:30.604884Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:968: [DB] Using default config. 2025-09-25T16:18:30.604904Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:1005: [DB] Starting the first epoch: #1.1 1970-01-01T00:00:00.023000Z - 1970-01-01T01:00:00.023000Z - 1970-01-01T02:00:00.023000Z 2025-09-25T16:18:30.604912Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:1031: [DB] Loaded the first approximate epoch start: #1.1 2025-09-25T16:18:30.604932Z node 1 :NODE_BROKER DEBUG: node_broker__load_state.cpp:27: TTxLoadState Complete 2025-09-25T16:18:30.604954Z node 1 :NODE_BROKER DEBUG: node_broker__migrate_state.cpp:84: TTxMigrateState Execute 2025-09-25T16:18:30.604960Z node 1 :NODE_BROKER DEBUG: node_broker__migrate_state.cpp:52: TTxMigrateState ProcessMigrationBatch UpdateNodes left 0, NewVersionUpdateNodes left 0 2025-09-25T16:18:30.604966Z node 1 :NODE_BROKER DEB ... abletPipe::TEvServerConnected 2025-09-25T16:18:31.127541Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:245: StateWork, received event# 269877761, Sender [1:740:2283], Recipient [1:697:2255]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-09-25T16:18:31.127562Z node 2 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:717: Handle NKikimr::TEvTabletPipe::TEvClientConnected { TabletId: 72057594037936129 Status: OK ServerId: [1:743:2286] Leader: 1 Dead: 0 Generation: 3 VersionInfo:  } 2025-09-25T16:18:31.127574Z node 3 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:717: Handle NKikimr::TEvTabletPipe::TEvClientConnected { TabletId: 72057594037936129 Status: OK ServerId: [1:744:2287] Leader: 1 Dead: 0 Generation: 3 VersionInfo:  } 2025-09-25T16:18:31.127586Z node 4 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:717: Handle NKikimr::TEvTabletPipe::TEvClientConnected { TabletId: 72057594037936129 Status: OK ServerId: [1:738:2281] Leader: 1 Dead: 0 Generation: 3 VersionInfo:  } 2025-09-25T16:18:31.127597Z node 5 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:717: Handle NKikimr::TEvTabletPipe::TEvClientConnected { TabletId: 72057594037936129 Status: OK ServerId: [1:739:2282] Leader: 1 Dead: 0 Generation: 3 VersionInfo:  } 2025-09-25T16:18:31.127607Z node 6 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:717: Handle NKikimr::TEvTabletPipe::TEvClientConnected { TabletId: 72057594037936129 Status: OK ServerId: [1:740:2283] Leader: 1 Dead: 0 Generation: 3 VersionInfo:  } 2025-09-25T16:18:31.127638Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:245: StateWork, received event# 269877761, Sender [1:741:2284], Recipient [1:697:2255]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-09-25T16:18:31.127646Z node 7 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:717: Handle NKikimr::TEvTabletPipe::TEvClientConnected { TabletId: 72057594037936129 Status: OK ServerId: [1:741:2284] Leader: 1 Dead: 0 Generation: 3 VersionInfo:  } 2025-09-25T16:18:31.127657Z node 8 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:717: Handle NKikimr::TEvTabletPipe::TEvClientConnected { TabletId: 72057594037936129 Status: OK ServerId: [1:742:2285] Leader: 1 Dead: 0 Generation: 3 VersionInfo:  } 2025-09-25T16:18:31.127677Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:245: StateWork, received event# 269877761, Sender [1:742:2285], Recipient [1:697:2255]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-09-25T16:18:31.127705Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:245: StateWork, received event# 269877761, Sender [1:743:2286], Recipient [1:697:2255]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-09-25T16:18:31.127740Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:245: StateWork, received event# 269877761, Sender [1:744:2287], Recipient [1:697:2255]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-09-25T16:18:31.127810Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:245: StateWork, received event# 272039936, Sender [1:25:2072], Recipient [1:697:2255]: NKikimr::NNodeBroker::TEvNodeBroker::TEvListNodes { MinEpoch: 2 } 2025-09-25T16:18:31.127819Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:249: StateWork, processing event TEvNodeBroker::TEvListNodes 2025-09-25T16:18:31.127830Z node 1 :NODE_BROKER TRACE: node_broker.cpp:423: Send TEvNodesInfo for epoch #4.7 1970-01-01T03:00:00.023000Z - 1970-01-01T04:00:00.023000Z - 1970-01-01T05:00:00.023000Z 2025-09-25T16:18:31.127885Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:245: StateWork, received event# 272039936, Sender [7:199:2072], Recipient [1:741:2284] 2025-09-25T16:18:31.127889Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:249: StateWork, processing event TEvNodeBroker::TEvListNodes 2025-09-25T16:18:31.127897Z node 1 :NODE_BROKER TRACE: node_broker.cpp:423: Send TEvNodesInfo for epoch #4.7 1970-01-01T03:00:00.023000Z - 1970-01-01T04:00:00.023000Z - 1970-01-01T05:00:00.023000Z 2025-09-25T16:18:31.127909Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:245: StateWork, received event# 272039936, Sender [3:83:2072], Recipient [1:744:2287] 2025-09-25T16:18:31.127913Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:249: StateWork, processing event TEvNodeBroker::TEvListNodes 2025-09-25T16:18:31.127919Z node 1 :NODE_BROKER TRACE: node_broker.cpp:423: Send TEvNodesInfo for epoch #4.7 1970-01-01T03:00:00.023000Z - 1970-01-01T04:00:00.023000Z - 1970-01-01T05:00:00.023000Z 2025-09-25T16:18:31.127937Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:245: StateWork, received event# 272039936, Sender [4:112:2072], Recipient [1:738:2281] 2025-09-25T16:18:31.127941Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:249: StateWork, processing event TEvNodeBroker::TEvListNodes 2025-09-25T16:18:31.127947Z node 1 :NODE_BROKER TRACE: node_broker.cpp:423: Send TEvNodesInfo for epoch #4.7 1970-01-01T03:00:00.023000Z - 1970-01-01T04:00:00.023000Z - 1970-01-01T05:00:00.023000Z 2025-09-25T16:18:31.127970Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:245: StateWork, received event# 272039936, Sender [5:141:2072], Recipient [1:739:2282] 2025-09-25T16:18:31.127974Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:249: StateWork, processing event TEvNodeBroker::TEvListNodes 2025-09-25T16:18:31.127980Z node 1 :NODE_BROKER TRACE: node_broker.cpp:423: Send TEvNodesInfo for epoch #4.7 1970-01-01T03:00:00.023000Z - 1970-01-01T04:00:00.023000Z - 1970-01-01T05:00:00.023000Z 2025-09-25T16:18:31.128037Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:245: StateWork, received event# 272039936, Sender [6:170:2072], Recipient [1:740:2283] 2025-09-25T16:18:31.128042Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:249: StateWork, processing event TEvNodeBroker::TEvListNodes 2025-09-25T16:18:31.128049Z node 1 :NODE_BROKER TRACE: node_broker.cpp:423: Send TEvNodesInfo for epoch #4.7 1970-01-01T03:00:00.023000Z - 1970-01-01T04:00:00.023000Z - 1970-01-01T05:00:00.023000Z 2025-09-25T16:18:31.128058Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:245: StateWork, received event# 272039936, Sender [8:228:2072], Recipient [1:742:2285] 2025-09-25T16:18:31.128062Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:249: StateWork, processing event TEvNodeBroker::TEvListNodes 2025-09-25T16:18:31.128068Z node 1 :NODE_BROKER TRACE: node_broker.cpp:423: Send TEvNodesInfo for epoch #4.7 1970-01-01T03:00:00.023000Z - 1970-01-01T04:00:00.023000Z - 1970-01-01T05:00:00.023000Z 2025-09-25T16:18:31.128080Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:245: StateWork, received event# 272039936, Sender [2:54:2072], Recipient [1:743:2286] 2025-09-25T16:18:31.128084Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:249: StateWork, processing event TEvNodeBroker::TEvListNodes 2025-09-25T16:18:31.128090Z node 1 :NODE_BROKER TRACE: node_broker.cpp:423: Send TEvNodesInfo for epoch #4.7 1970-01-01T03:00:00.023000Z - 1970-01-01T04:00:00.023000Z - 1970-01-01T05:00:00.023000Z 2025-09-25T16:18:31.128217Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:245: StateWork, received event# 269877761, Sender [1:749:2292], Recipient [1:697:2255]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-09-25T16:18:31.128246Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:245: StateWork, received event# 272039936, Sender [1:629:2214], Recipient [1:697:2255]: NKikimr::NNodeBroker::TEvNodeBroker::TEvListNodes { } 2025-09-25T16:18:31.128251Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:249: StateWork, processing event TEvNodeBroker::TEvListNodes 2025-09-25T16:18:31.128258Z node 1 :NODE_BROKER TRACE: node_broker.cpp:423: Send TEvNodesInfo for epoch #4.7 1970-01-01T03:00:00.023000Z - 1970-01-01T04:00:00.023000Z - 1970-01-01T05:00:00.023000Z 2025-09-25T16:18:31.128327Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:245: StateWork, received event# 269877761, Sender [1:751:2294], Recipient [1:697:2255]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-09-25T16:18:31.128345Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:245: StateWork, received event# 272039936, Sender [1:629:2214], Recipient [1:697:2255]: NKikimr::NNodeBroker::TEvNodeBroker::TEvListNodes { } 2025-09-25T16:18:31.128349Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:249: StateWork, processing event TEvNodeBroker::TEvListNodes 2025-09-25T16:18:31.128356Z node 1 :NODE_BROKER TRACE: node_broker.cpp:423: Send TEvNodesInfo for epoch #4.7 1970-01-01T03:00:00.023000Z - 1970-01-01T04:00:00.023000Z - 1970-01-01T05:00:00.023000Z 2025-09-25T16:18:31.128420Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:245: StateWork, received event# 269877761, Sender [1:753:2296], Recipient [1:697:2255]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-09-25T16:18:31.128436Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:245: StateWork, received event# 272039936, Sender [1:629:2214], Recipient [1:697:2255]: NKikimr::NNodeBroker::TEvNodeBroker::TEvListNodes { } 2025-09-25T16:18:31.128441Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:249: StateWork, processing event TEvNodeBroker::TEvListNodes 2025-09-25T16:18:31.128447Z node 1 :NODE_BROKER TRACE: node_broker.cpp:423: Send TEvNodesInfo for epoch #4.7 1970-01-01T03:00:00.023000Z - 1970-01-01T04:00:00.023000Z - 1970-01-01T05:00:00.023000Z 2025-09-25T16:18:31.128510Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:245: StateWork, received event# 269877761, Sender [1:755:2298], Recipient [1:697:2255]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-09-25T16:18:31.128532Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:245: StateWork, received event# 272039936, Sender [1:629:2214], Recipient [1:697:2255]: NKikimr::NNodeBroker::TEvNodeBroker::TEvListNodes { CachedVersion: 6 } 2025-09-25T16:18:31.128536Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:249: StateWork, processing event TEvNodeBroker::TEvListNodes 2025-09-25T16:18:31.128543Z node 1 :NODE_BROKER TRACE: node_broker.cpp:423: Send TEvNodesInfo for epoch #4.7 1970-01-01T03:00:00.023000Z - 1970-01-01T04:00:00.023000Z - 1970-01-01T05:00:00.023000Z 2025-09-25T16:18:31.128605Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:245: StateWork, received event# 269877761, Sender [1:757:2300], Recipient [1:697:2255]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-09-25T16:18:31.128623Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:245: StateWork, received event# 272039950, Sender [1:629:2214], Recipient [1:697:2255]: NKikimr::NNodeBroker::TEvNodeBroker::TEvSubscribeNodesRequest { CachedVersion: 6 SeqNo: 2 } 2025-09-25T16:18:31.128629Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:257: StateWork, processing event TEvNodeBroker::TEvSubscribeNodesRequest 2025-09-25T16:18:31.128637Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:747: New subscriber [1:629:2214], seqNo: 2, version: 6, server pipe id: [1:757:2300] 2025-09-25T16:18:31.128646Z node 1 :NODE_BROKER TRACE: node_broker.cpp:730: Send TEvUpdateNodes v6 -> v7 to [1:629:2214] 2025-09-25T16:18:31.128713Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:245: StateWork, received event# 269877764, Sender [1:757:2300], Recipient [1:697:2255]: NKikimr::TEvTabletPipe::TEvServerDisconnected 2025-09-25T16:18:31.128722Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:768: Unsubscribed [1:629:2214], seqNo: 2, server pipe id: [1:757:2300] 2025-09-25T16:18:31.128753Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:245: StateWork, received event# 269877761, Sender [1:759:2302], Recipient [1:697:2255]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-09-25T16:18:31.128775Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:245: StateWork, received event# 272039937, Sender [1:629:2214], Recipient [1:697:2255]: NKikimr::NNodeBroker::TEvNodeBroker::TEvResolveNode { NodeId: 1024 } 2025-09-25T16:18:31.128781Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:250: StateWork, processing event TEvNodeBroker::TEvResolveNode 2025-09-25T16:18:31.128810Z node 1 :NODE_BROKER TRACE: node_broker.cpp:1485: Send TEvResolvedNode: NKikimr::NNodeBroker::TEvNodeBroker::TEvResolvedNode { Status { Code: OK } Node { NodeId: 1024 Host: "host2" Port: 1001 ResolveHost: "host2.yandex.net" Address: "1.2.3.4" Location { DataCenter: "1" Module: "2" Rack: "3" Unit: "4" } Expire: 18000024000 Name: "slot-0" } } ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/mind/ut/unittest >> TNodeBrokerTest::NodesMigrationReuseRemovedID [GOOD] Test command err: 2025-09-25T16:18:29.291062Z node 8 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:636: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 8 Deadline: 18446744073709.551615s } 2025-09-25T16:18:29.294469Z node 5 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:636: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 5 Deadline: 18446744073709.551615s } 2025-09-25T16:18:29.294553Z node 6 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:636: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 6 Deadline: 18446744073709.551615s } 2025-09-25T16:18:29.294585Z node 7 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:636: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 7 Deadline: 18446744073709.551615s } 2025-09-25T16:18:29.294635Z node 8 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:607: Handle NActors::TEvInterconnect::TEvListNodes 2025-09-25T16:18:29.298749Z node 2 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:636: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 2 Deadline: 18446744073709.551615s } 2025-09-25T16:18:29.298831Z node 3 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:636: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 3 Deadline: 18446744073709.551615s } 2025-09-25T16:18:29.298897Z node 4 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:636: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 4 Deadline: 18446744073709.551615s } 2025-09-25T16:18:29.298952Z node 6 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:607: Handle NActors::TEvInterconnect::TEvListNodes 2025-09-25T16:18:29.299070Z node 7 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:607: Handle NActors::TEvInterconnect::TEvListNodes 2025-09-25T16:18:29.299145Z node 8 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:607: Handle NActors::TEvInterconnect::TEvListNodes 2025-09-25T16:18:29.299183Z node 1 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:636: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 1 Deadline: 18446744073709.551615s } 2025-09-25T16:18:29.309881Z node 2 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:607: Handle NActors::TEvInterconnect::TEvListNodes 2025-09-25T16:18:29.310017Z node 3 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:607: Handle NActors::TEvInterconnect::TEvListNodes 2025-09-25T16:18:29.310078Z node 4 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:607: Handle NActors::TEvInterconnect::TEvListNodes 2025-09-25T16:18:29.310141Z node 5 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:607: Handle NActors::TEvInterconnect::TEvListNodes 2025-09-25T16:18:29.310211Z node 4 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:607: Handle NActors::TEvInterconnect::TEvListNodes 2025-09-25T16:18:29.310256Z node 5 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:607: Handle NActors::TEvInterconnect::TEvListNodes 2025-09-25T16:18:29.310275Z node 6 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:607: Handle NActors::TEvInterconnect::TEvListNodes 2025-09-25T16:18:29.310308Z node 7 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:607: Handle NActors::TEvInterconnect::TEvListNodes 2025-09-25T16:18:29.310399Z node 1 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:607: Handle NActors::TEvInterconnect::TEvListNodes 2025-09-25T16:18:29.310498Z node 2 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:607: Handle NActors::TEvInterconnect::TEvListNodes 2025-09-25T16:18:29.310526Z node 3 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:607: Handle NActors::TEvInterconnect::TEvListNodes 2025-09-25T16:18:29.310623Z node 8 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:607: Handle NActors::TEvInterconnect::TEvListNodes 2025-09-25T16:18:29.310647Z node 1 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:607: Handle NActors::TEvInterconnect::TEvListNodes 2025-09-25T16:18:29.311062Z node 2 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:607: Handle NActors::TEvInterconnect::TEvListNodes 2025-09-25T16:18:29.311093Z node 3 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:607: Handle NActors::TEvInterconnect::TEvListNodes 2025-09-25T16:18:29.311121Z node 4 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:607: Handle NActors::TEvInterconnect::TEvListNodes 2025-09-25T16:18:29.311142Z node 5 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:607: Handle NActors::TEvInterconnect::TEvListNodes 2025-09-25T16:18:29.311164Z node 6 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:607: Handle NActors::TEvInterconnect::TEvListNodes 2025-09-25T16:18:29.311185Z node 7 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:607: Handle NActors::TEvInterconnect::TEvListNodes 2025-09-25T16:18:29.311222Z node 8 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:636: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 1 Deadline: 18446744073709.551615s } 2025-09-25T16:18:29.311236Z node 3 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:636: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 1 Deadline: 18446744073709.551615s } 2025-09-25T16:18:29.311319Z node 4 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:636: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 1 Deadline: 18446744073709.551615s } 2025-09-25T16:18:29.311347Z node 5 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:636: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 1 Deadline: 18446744073709.551615s } 2025-09-25T16:18:29.311385Z node 6 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:636: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 1 Deadline: 18446744073709.551615s } 2025-09-25T16:18:29.311412Z node 7 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:636: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 1 Deadline: 18446744073709.551615s } 2025-09-25T16:18:29.311519Z node 1 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:607: Handle NActors::TEvInterconnect::TEvListNodes 2025-09-25T16:18:29.311585Z node 2 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:636: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 1 Deadline: 18446744073709.551615s } 2025-09-25T16:18:29.311850Z node 1 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:636: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 8 Deadline: 18446744073709.551615s } 2025-09-25T16:18:29.312103Z node 8 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:636: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 4 Deadline: 18446744073709.551615s } 2025-09-25T16:18:29.313158Z node 3 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:636: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 2 Deadline: 18446744073709.551615s } 2025-09-25T16:18:29.313211Z node 5 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:636: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 4 Deadline: 18446744073709.551615s } 2025-09-25T16:18:29.313237Z node 6 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:636: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 2 Deadline: 18446744073709.551615s } 2025-09-25T16:18:29.313265Z node 7 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:636: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 3 Deadline: 18446744073709.551615s } 2025-09-25T16:18:29.321899Z node 2 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:636: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 6 Deadline: 18446744073709.551615s } 2025-09-25T16:18:29.322643Z node 3 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:636: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 7 Deadline: 18446744073709.551615s } 2025-09-25T16:18:29.322666Z node 4 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:636: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 8 Deadline: 18446744073709.551615s } 2025-09-25T16:18:29.322947Z node 4 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:636: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 5 Deadline: 18446744073709.551615s } 2025-09-25T16:18:29.323412Z node 2 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:636: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 3 Deadline: 18446744073709.551615s } 2025-09-25T16:18:29.325663Z node 1 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:636: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 3 Deadline: 18446744073709.551615s } 2025-09-25T16:18:29.325786Z node 1 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:636: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 5 Deadline: 18446744073709.551615s } 2025-09-25T16:18:29.325847Z node 1 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:636: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 4 Deadline: 18446744073709.551615s } 2025-09-25T16:18:29.325893Z node 1 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:636: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 6 Deadline: 18446744073709.551615s } 2025-09-25T16:18:29.325949Z node 1 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:636: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 2 Deadline: 18446744073709.551615s } 2025-09-25T16:18:29.325975Z node 1 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:636: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 7 Deadline: 18446744073709.551615s } 2025-09-25T16:18:29.327049Z node 4 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:636: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 3 Deadline: 18446744073709.551615s } 2025-09-25T16:18:29.327564Z node 3 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:636: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 4 Deadline: 18446744073709.551615s } 2025-09-25T16:18:29.327871Z node 8 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:636: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 7 Deadline: 18446744073709.551615s } 2025-09-25T16:18:29.328067Z node 4 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:636: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 6 Deadline: 18446744073709.551615s } 2025-09-25T16:18:29.328375Z node 7 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:636: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 8 Deadline: 18446744073709.551615s } 2025-09-25T16:18:29.328576Z node 6 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:636: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 4 Deadline: 18446744073709.551615s } 2025-09-25T16:18:29.334638Z node 4 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:636: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 2 Deadline: 18446744073709.551615s } 2025-09-25T16:18:29.335023Z node 2 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:636: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 4 Deadline: 18446744073709.551615s } 2025-09-25T16:18:29.354296Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7729: Cannot subscribe to console configs 2025-09-25T16:18:29.354324Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded ... waiting for nameservers are connected 2025-09-25T16:18:29.360382Z node 1 :NODE_BROKER DEBUG: node_broker_impl.h:239: StateInit event type: 10060000 event: NKikimr::TEvTablet::TEvBoot 2025-09-25T16:18:29.360912Z node 1 :NODE_BROKER DEBUG: node_broker_impl.h:239: StateInit event type: 10060001 event: NKikimr::TEvTablet::TEvRestored 2025-09-25T16:18:29.360988Z node 1 :NODE_BROKER DEBUG: node_broker__init_scheme.cpp:20: TTxInitScheme Execute 2025-09-25T16:18:29.361204Z node 1 :NODE_BROKER DEBUG: node_broker_impl.h:239: StateInit event type: 1006000c event: NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-09-25T16:18:29.362112Z node 1 :NODE_BROKER DEBUG: node_broker__init_scheme.cpp:29: TTxInitScheme Complete 2025-09-25T16:18:29.362148Z node 1 :NODE_BROKER DEBUG: node_broker__load_state.cpp:19: TTxLoadState Execute 2025-09-25T16:18:29.362227Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:968: [DB] Using default config. 2025-09-25T16:18:29.362246Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:1005: [DB] Starting the first epoch: #1.1 1970-01-01T00:00:00.025000Z - 1970-01-01T01:00:00.025000Z - 1970-01-01T02:00:00.025000Z 2025-09-25T16:18:29.362252Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:1031: [DB] Loaded the first approximate epoch start: #1.1 2025-09-25T16:18:29.362269Z node 1 :NODE_BROKER DEBUG: node_broker__load_state.cpp:27: TTxLoadState Complete 2025-09-25T16:18:29.362297Z node 1 :NODE_BROKER DEBUG: node_broker__migrate_state.cpp:84: TTxMigrateState Execute 2025-09-25T16:18:29.362303Z node 1 :NODE_BROKER DEBUG: node_broker__migrate_state.cpp:52: TTxMigrateState ProcessMigrationBatch UpdateNodes left 0, NewVersionUpdateNodes left 0 2025-09-25T16:18:29.362308Z node 1 :NODE_BROKER DEB ... verConnected 2025-09-25T16:18:31.183767Z node 1 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:717: Handle NKikimr::TEvTabletPipe::TEvClientConnected { TabletId: 72057594037936129 Status: OK ServerId: [1:800:2310] Leader: 1 Dead: 0 Generation: 3 VersionInfo:  } 2025-09-25T16:18:31.183895Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:245: StateWork, received event# 272039936, Sender [1:25:2072], Recipient [1:761:2285]: NKikimr::NNodeBroker::TEvNodeBroker::TEvListNodes { MinEpoch: 5 } 2025-09-25T16:18:31.183902Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:249: StateWork, processing event TEvNodeBroker::TEvListNodes 2025-09-25T16:18:31.183909Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:375: Delaying list nodes request for epoch #5 2025-09-25T16:18:31.183961Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:245: StateWork, received event# 269877761, Sender [1:801:2311], Recipient [1:761:2285]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-09-25T16:18:31.184039Z node 2 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:717: Handle NKikimr::TEvTabletPipe::TEvClientConnected { TabletId: 72057594037936129 Status: OK ServerId: [1:802:2312] Leader: 1 Dead: 0 Generation: 3 VersionInfo:  } 2025-09-25T16:18:31.184052Z node 3 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:717: Handle NKikimr::TEvTabletPipe::TEvClientConnected { TabletId: 72057594037936129 Status: OK ServerId: [1:803:2313] Leader: 1 Dead: 0 Generation: 3 VersionInfo:  } 2025-09-25T16:18:31.184062Z node 4 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:717: Handle NKikimr::TEvTabletPipe::TEvClientConnected { TabletId: 72057594037936129 Status: OK ServerId: [1:804:2314] Leader: 1 Dead: 0 Generation: 3 VersionInfo:  } 2025-09-25T16:18:31.184077Z node 5 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:717: Handle NKikimr::TEvTabletPipe::TEvClientConnected { TabletId: 72057594037936129 Status: OK ServerId: [1:805:2315] Leader: 1 Dead: 0 Generation: 3 VersionInfo:  } 2025-09-25T16:18:31.184087Z node 6 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:717: Handle NKikimr::TEvTabletPipe::TEvClientConnected { TabletId: 72057594037936129 Status: OK ServerId: [1:806:2316] Leader: 1 Dead: 0 Generation: 3 VersionInfo:  } 2025-09-25T16:18:31.184097Z node 7 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:717: Handle NKikimr::TEvTabletPipe::TEvClientConnected { TabletId: 72057594037936129 Status: OK ServerId: [1:801:2311] Leader: 1 Dead: 0 Generation: 3 VersionInfo:  } 2025-09-25T16:18:31.184112Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:245: StateWork, received event# 269877761, Sender [1:802:2312], Recipient [1:761:2285]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-09-25T16:18:31.184131Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:245: StateWork, received event# 269877761, Sender [1:803:2313], Recipient [1:761:2285]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-09-25T16:18:31.184139Z node 8 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:717: Handle NKikimr::TEvTabletPipe::TEvClientConnected { TabletId: 72057594037936129 Status: OK ServerId: [1:807:2317] Leader: 1 Dead: 0 Generation: 3 VersionInfo:  } 2025-09-25T16:18:31.184199Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:245: StateWork, received event# 269877761, Sender [1:804:2314], Recipient [1:761:2285]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-09-25T16:18:31.184233Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:245: StateWork, received event# 269877761, Sender [1:805:2315], Recipient [1:761:2285]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-09-25T16:18:31.184299Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:245: StateWork, received event# 269877761, Sender [1:806:2316], Recipient [1:761:2285]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-09-25T16:18:31.184310Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:245: StateWork, received event# 269877761, Sender [1:807:2317], Recipient [1:761:2285]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-09-25T16:18:31.184326Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:245: StateWork, received event# 272039936, Sender [7:199:2072], Recipient [1:801:2311] 2025-09-25T16:18:31.184330Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:249: StateWork, processing event TEvNodeBroker::TEvListNodes 2025-09-25T16:18:31.184336Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:375: Delaying list nodes request for epoch #5 2025-09-25T16:18:31.184348Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:245: StateWork, received event# 272039936, Sender [2:54:2072], Recipient [1:802:2312] 2025-09-25T16:18:31.184352Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:249: StateWork, processing event TEvNodeBroker::TEvListNodes 2025-09-25T16:18:31.184356Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:375: Delaying list nodes request for epoch #5 2025-09-25T16:18:31.184363Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:245: StateWork, received event# 272039936, Sender [3:83:2072], Recipient [1:803:2313] 2025-09-25T16:18:31.184369Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:249: StateWork, processing event TEvNodeBroker::TEvListNodes 2025-09-25T16:18:31.184373Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:375: Delaying list nodes request for epoch #5 2025-09-25T16:18:31.184381Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:245: StateWork, received event# 272039936, Sender [4:112:2072], Recipient [1:804:2314] 2025-09-25T16:18:31.184385Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:249: StateWork, processing event TEvNodeBroker::TEvListNodes 2025-09-25T16:18:31.184389Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:375: Delaying list nodes request for epoch #5 2025-09-25T16:18:31.184396Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:245: StateWork, received event# 272039936, Sender [5:141:2072], Recipient [1:805:2315] 2025-09-25T16:18:31.184400Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:249: StateWork, processing event TEvNodeBroker::TEvListNodes 2025-09-25T16:18:31.184404Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:375: Delaying list nodes request for epoch #5 2025-09-25T16:18:31.184412Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:245: StateWork, received event# 272039936, Sender [6:170:2072], Recipient [1:806:2316] 2025-09-25T16:18:31.184416Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:249: StateWork, processing event TEvNodeBroker::TEvListNodes 2025-09-25T16:18:31.184420Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:375: Delaying list nodes request for epoch #5 2025-09-25T16:18:31.184427Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:245: StateWork, received event# 272039936, Sender [8:228:2072], Recipient [1:807:2317] 2025-09-25T16:18:31.184431Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:249: StateWork, processing event TEvNodeBroker::TEvListNodes 2025-09-25T16:18:31.184435Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:375: Delaying list nodes request for epoch #5 2025-09-25T16:18:31.184514Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:245: StateWork, received event# 269877761, Sender [1:812:2322], Recipient [1:761:2285]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-09-25T16:18:31.184537Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:245: StateWork, received event# 272039936, Sender [1:631:2216], Recipient [1:761:2285]: NKikimr::NNodeBroker::TEvNodeBroker::TEvListNodes { } 2025-09-25T16:18:31.184542Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:249: StateWork, processing event TEvNodeBroker::TEvListNodes 2025-09-25T16:18:31.184552Z node 1 :NODE_BROKER TRACE: node_broker.cpp:423: Send TEvNodesInfo for epoch #4.7 1970-01-01T03:00:00.025000Z - 1970-01-01T04:00:00.025000Z - 1970-01-01T05:00:00.025000Z 2025-09-25T16:18:31.184627Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:245: StateWork, received event# 269877761, Sender [1:814:2324], Recipient [1:761:2285]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-09-25T16:18:31.184640Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:245: StateWork, received event# 272039936, Sender [1:631:2216], Recipient [1:761:2285]: NKikimr::NNodeBroker::TEvNodeBroker::TEvListNodes { } 2025-09-25T16:18:31.184644Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:249: StateWork, processing event TEvNodeBroker::TEvListNodes 2025-09-25T16:18:31.184650Z node 1 :NODE_BROKER TRACE: node_broker.cpp:423: Send TEvNodesInfo for epoch #4.7 1970-01-01T03:00:00.025000Z - 1970-01-01T04:00:00.025000Z - 1970-01-01T05:00:00.025000Z 2025-09-25T16:18:31.184709Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:245: StateWork, received event# 269877761, Sender [1:816:2326], Recipient [1:761:2285]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-09-25T16:18:31.184726Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:245: StateWork, received event# 272039936, Sender [1:631:2216], Recipient [1:761:2285]: NKikimr::NNodeBroker::TEvNodeBroker::TEvListNodes { } 2025-09-25T16:18:31.184730Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:249: StateWork, processing event TEvNodeBroker::TEvListNodes 2025-09-25T16:18:31.184738Z node 1 :NODE_BROKER TRACE: node_broker.cpp:423: Send TEvNodesInfo for epoch #4.7 1970-01-01T03:00:00.025000Z - 1970-01-01T04:00:00.025000Z - 1970-01-01T05:00:00.025000Z 2025-09-25T16:18:31.185661Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:245: StateWork, received event# 269877761, Sender [1:818:2328], Recipient [1:761:2285]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-09-25T16:18:31.185695Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:245: StateWork, received event# 272039936, Sender [1:631:2216], Recipient [1:761:2285]: NKikimr::NNodeBroker::TEvNodeBroker::TEvListNodes { CachedVersion: 6 } 2025-09-25T16:18:31.185699Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:249: StateWork, processing event TEvNodeBroker::TEvListNodes 2025-09-25T16:18:31.185707Z node 1 :NODE_BROKER TRACE: node_broker.cpp:423: Send TEvNodesInfo for epoch #4.7 1970-01-01T03:00:00.025000Z - 1970-01-01T04:00:00.025000Z - 1970-01-01T05:00:00.025000Z 2025-09-25T16:18:31.185772Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:245: StateWork, received event# 269877761, Sender [1:820:2330], Recipient [1:761:2285]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-09-25T16:18:31.185788Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:245: StateWork, received event# 272039950, Sender [1:631:2216], Recipient [1:761:2285]: NKikimr::NNodeBroker::TEvNodeBroker::TEvSubscribeNodesRequest { CachedVersion: 6 SeqNo: 2 } 2025-09-25T16:18:31.185793Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:257: StateWork, processing event TEvNodeBroker::TEvSubscribeNodesRequest 2025-09-25T16:18:31.185800Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:747: New subscriber [1:631:2216], seqNo: 2, version: 6, server pipe id: [1:820:2330] 2025-09-25T16:18:31.185807Z node 1 :NODE_BROKER TRACE: node_broker.cpp:730: Send TEvUpdateNodes v6 -> v7 to [1:631:2216] 2025-09-25T16:18:31.185855Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:245: StateWork, received event# 269877764, Sender [1:820:2330], Recipient [1:761:2285]: NKikimr::TEvTabletPipe::TEvServerDisconnected 2025-09-25T16:18:31.185859Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:768: Unsubscribed [1:631:2216], seqNo: 2, server pipe id: [1:820:2330] 2025-09-25T16:18:31.185878Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:245: StateWork, received event# 269877761, Sender [1:822:2332], Recipient [1:761:2285]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-09-25T16:18:31.185892Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:245: StateWork, received event# 272039937, Sender [1:631:2216], Recipient [1:761:2285]: NKikimr::NNodeBroker::TEvNodeBroker::TEvResolveNode { NodeId: 1024 } 2025-09-25T16:18:31.185896Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:250: StateWork, processing event TEvNodeBroker::TEvResolveNode 2025-09-25T16:18:31.185924Z node 1 :NODE_BROKER TRACE: node_broker.cpp:1485: Send TEvResolvedNode: NKikimr::NNodeBroker::TEvNodeBroker::TEvResolvedNode { Status { Code: OK } Node { NodeId: 1024 Host: "host2" Port: 1001 ResolveHost: "host2.yandex.net" Address: "1.2.3.4" Location { DataCenter: "1" Module: "2" Rack: "3" Unit: "4" } Expire: 18000025000 Name: "slot-0" } } |81.4%| [TA] $(B)/ydb/core/tx/datashard/ut_change_collector/test-results/unittest/{meta.json ... results_accumulator.log} ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/mind/ut/unittest >> TNodeBrokerTest::NodesMigrationSetLocation [GOOD] Test command err: 2025-09-25T16:18:32.406649Z node 8 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:636: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 8 Deadline: 18446744073709.551615s } 2025-09-25T16:18:32.409375Z node 5 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:636: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 5 Deadline: 18446744073709.551615s } 2025-09-25T16:18:32.409444Z node 6 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:636: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 6 Deadline: 18446744073709.551615s } 2025-09-25T16:18:32.409473Z node 7 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:636: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 7 Deadline: 18446744073709.551615s } 2025-09-25T16:18:32.409517Z node 8 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:607: Handle NActors::TEvInterconnect::TEvListNodes 2025-09-25T16:18:32.412683Z node 2 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:636: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 2 Deadline: 18446744073709.551615s } 2025-09-25T16:18:32.412722Z node 3 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:636: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 3 Deadline: 18446744073709.551615s } 2025-09-25T16:18:32.412768Z node 4 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:636: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 4 Deadline: 18446744073709.551615s } 2025-09-25T16:18:32.412805Z node 6 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:607: Handle NActors::TEvInterconnect::TEvListNodes 2025-09-25T16:18:32.412951Z node 7 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:607: Handle NActors::TEvInterconnect::TEvListNodes 2025-09-25T16:18:32.413019Z node 8 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:607: Handle NActors::TEvInterconnect::TEvListNodes 2025-09-25T16:18:32.413047Z node 1 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:636: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 1 Deadline: 18446744073709.551615s } 2025-09-25T16:18:32.416958Z node 2 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:607: Handle NActors::TEvInterconnect::TEvListNodes 2025-09-25T16:18:32.417088Z node 3 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:607: Handle NActors::TEvInterconnect::TEvListNodes 2025-09-25T16:18:32.417151Z node 4 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:607: Handle NActors::TEvInterconnect::TEvListNodes 2025-09-25T16:18:32.417213Z node 5 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:607: Handle NActors::TEvInterconnect::TEvListNodes 2025-09-25T16:18:32.417284Z node 4 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:607: Handle NActors::TEvInterconnect::TEvListNodes 2025-09-25T16:18:32.417329Z node 5 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:607: Handle NActors::TEvInterconnect::TEvListNodes 2025-09-25T16:18:32.417351Z node 6 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:607: Handle NActors::TEvInterconnect::TEvListNodes 2025-09-25T16:18:32.417383Z node 7 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:607: Handle NActors::TEvInterconnect::TEvListNodes 2025-09-25T16:18:32.417486Z node 1 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:607: Handle NActors::TEvInterconnect::TEvListNodes 2025-09-25T16:18:32.417602Z node 2 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:607: Handle NActors::TEvInterconnect::TEvListNodes 2025-09-25T16:18:32.417634Z node 3 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:607: Handle NActors::TEvInterconnect::TEvListNodes 2025-09-25T16:18:32.417745Z node 8 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:607: Handle NActors::TEvInterconnect::TEvListNodes 2025-09-25T16:18:32.417778Z node 1 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:607: Handle NActors::TEvInterconnect::TEvListNodes 2025-09-25T16:18:32.418187Z node 2 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:607: Handle NActors::TEvInterconnect::TEvListNodes 2025-09-25T16:18:32.418235Z node 3 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:607: Handle NActors::TEvInterconnect::TEvListNodes 2025-09-25T16:18:32.418272Z node 4 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:607: Handle NActors::TEvInterconnect::TEvListNodes 2025-09-25T16:18:32.418297Z node 5 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:607: Handle NActors::TEvInterconnect::TEvListNodes 2025-09-25T16:18:32.418320Z node 6 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:607: Handle NActors::TEvInterconnect::TEvListNodes 2025-09-25T16:18:32.418340Z node 7 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:607: Handle NActors::TEvInterconnect::TEvListNodes 2025-09-25T16:18:32.418366Z node 8 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:636: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 1 Deadline: 18446744073709.551615s } 2025-09-25T16:18:32.418379Z node 3 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:636: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 1 Deadline: 18446744073709.551615s } 2025-09-25T16:18:32.418422Z node 4 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:636: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 1 Deadline: 18446744073709.551615s } 2025-09-25T16:18:32.418449Z node 5 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:636: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 1 Deadline: 18446744073709.551615s } 2025-09-25T16:18:32.418491Z node 6 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:636: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 1 Deadline: 18446744073709.551615s } 2025-09-25T16:18:32.418515Z node 7 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:636: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 1 Deadline: 18446744073709.551615s } 2025-09-25T16:18:32.418616Z node 1 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:607: Handle NActors::TEvInterconnect::TEvListNodes 2025-09-25T16:18:32.418683Z node 2 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:636: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 1 Deadline: 18446744073709.551615s } 2025-09-25T16:18:32.418943Z node 1 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:636: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 3 Deadline: 18446744073709.551615s } 2025-09-25T16:18:32.419450Z node 6 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:636: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 3 Deadline: 18446744073709.551615s } 2025-09-25T16:18:32.419477Z node 7 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:636: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 2 Deadline: 18446744073709.551615s } 2025-09-25T16:18:32.421971Z node 3 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:636: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 2 Deadline: 18446744073709.551615s } 2025-09-25T16:18:32.422012Z node 4 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:636: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 2 Deadline: 18446744073709.551615s } 2025-09-25T16:18:32.422039Z node 5 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:636: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 4 Deadline: 18446744073709.551615s } 2025-09-25T16:18:32.429450Z node 3 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:636: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 6 Deadline: 18446744073709.551615s } 2025-09-25T16:18:32.429486Z node 4 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:636: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 5 Deadline: 18446744073709.551615s } 2025-09-25T16:18:32.430175Z node 2 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:636: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 3 Deadline: 18446744073709.551615s } 2025-09-25T16:18:32.430878Z node 1 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:636: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 8 Deadline: 18446744073709.551615s } 2025-09-25T16:18:32.430932Z node 2 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:636: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 4 Deadline: 18446744073709.551615s } 2025-09-25T16:18:32.431181Z node 2 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:636: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 7 Deadline: 18446744073709.551615s } 2025-09-25T16:18:32.431568Z node 1 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:636: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 4 Deadline: 18446744073709.551615s } 2025-09-25T16:18:32.431716Z node 1 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:636: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 2 Deadline: 18446744073709.551615s } 2025-09-25T16:18:32.431785Z node 1 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:636: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 5 Deadline: 18446744073709.551615s } 2025-09-25T16:18:32.431846Z node 1 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:636: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 6 Deadline: 18446744073709.551615s } 2025-09-25T16:18:32.431952Z node 1 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:636: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 7 Deadline: 18446744073709.551615s } 2025-09-25T16:18:32.432999Z node 2 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:636: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 8 Deadline: 18446744073709.551615s } 2025-09-25T16:18:32.433110Z node 3 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:636: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 8 Deadline: 18446744073709.551615s } 2025-09-25T16:18:32.433376Z node 8 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:636: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 2 Deadline: 18446744073709.551615s } 2025-09-25T16:18:32.437081Z node 8 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:636: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 3 Deadline: 18446744073709.551615s } 2025-09-25T16:18:32.437743Z node 2 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:636: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 6 Deadline: 18446744073709.551615s } 2025-09-25T16:18:32.438061Z node 6 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:636: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 2 Deadline: 18446744073709.551615s } 2025-09-25T16:18:32.464517Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7729: Cannot subscribe to console configs 2025-09-25T16:18:32.464538Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded ... waiting for nameservers are connected 2025-09-25T16:18:32.469163Z node 1 :NODE_BROKER DEBUG: node_broker_impl.h:239: StateInit event type: 10060000 event: NKikimr::TEvTablet::TEvBoot 2025-09-25T16:18:32.469604Z node 1 :NODE_BROKER DEBUG: node_broker_impl.h:239: StateInit event type: 10060001 event: NKikimr::TEvTablet::TEvRestored 2025-09-25T16:18:32.469689Z node 1 :NODE_BROKER DEBUG: node_broker__init_scheme.cpp:20: TTxInitScheme Execute 2025-09-25T16:18:32.469864Z node 1 :NODE_BROKER DEBUG: node_broker_impl.h:239: StateInit event type: 1006000c event: NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-09-25T16:18:32.470815Z node 1 :NODE_BROKER DEBUG: node_broker__init_scheme.cpp:29: TTxInitScheme Complete 2025-09-25T16:18:32.470862Z node 1 :NODE_BROKER DEBUG: node_broker__load_state.cpp:19: TTxLoadState Execute 2025-09-25T16:18:32.470930Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:968: [DB] Using default config. 2025-09-25T16:18:32.470947Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:1005: [DB] Starting the first epoch: #1.1 1970-01-01T00:00:00.023000Z - 1970-01-01T01:00:00.023000Z - 1970-01-01T02:00:00.023000Z 2025-09-25T16:18:32.470953Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:1031: [DB] Loaded the first approximate epoch start: #1.1 2025-09-25T16:18:32.470971Z node 1 :NODE_BROKER DEBUG: node_broker__load_state.cpp:27: TTxLoadState Complete 2025-09-25T16:18:32.471004Z node 1 :NODE_BROKER DEBUG: node_broker__migrate_state.cpp:84: TTxMigrateState Execute 2025-09-25T16:18:32.471009Z node 1 :NODE_BROKER DEBUG: node_broker__migrate_state.cpp:52: TTxMigrateState ProcessMigrationBatch UpdateNodes left 0, NewVersionUpdateNodes left 0 2025-09-25T16:18:32.471013Z node 1 :NODE_BROKER DEBUG: node_broker__migrate_state.cpp:21: TTxMigrateState FinalizeMigration 2025-09-25T16:18:32.471017Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:1318: [DB] Update epoch in database: #1.1 1970-01-01T00:00:00.023000Z - 1970-01-01T01:00:00.023000Z - 1970-01-01T02:00:00.023000Z 2025-09-25T16:18:32.471031Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:133 ... rverConnected 2025-09-25T16:18:32.787095Z node 1 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:717: Handle NKikimr::TEvTabletPipe::TEvClientConnected { TabletId: 72057594037936129 Status: OK ServerId: [1:710:2256] Leader: 1 Dead: 0 Generation: 3 VersionInfo:  } 2025-09-25T16:18:32.787215Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:245: StateWork, received event# 269877761, Sender [1:711:2257], Recipient [1:671:2231]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-09-25T16:18:32.787243Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:245: StateWork, received event# 269877761, Sender [1:712:2258], Recipient [1:671:2231]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-09-25T16:18:32.787284Z node 2 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:717: Handle NKikimr::TEvTabletPipe::TEvClientConnected { TabletId: 72057594037936129 Status: OK ServerId: [1:715:2261] Leader: 1 Dead: 0 Generation: 3 VersionInfo:  } 2025-09-25T16:18:32.787295Z node 3 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:717: Handle NKikimr::TEvTabletPipe::TEvClientConnected { TabletId: 72057594037936129 Status: OK ServerId: [1:716:2262] Leader: 1 Dead: 0 Generation: 3 VersionInfo:  } 2025-09-25T16:18:32.787314Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:245: StateWork, received event# 269877761, Sender [1:713:2259], Recipient [1:671:2231]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-09-25T16:18:32.787336Z node 4 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:717: Handle NKikimr::TEvTabletPipe::TEvClientConnected { TabletId: 72057594037936129 Status: OK ServerId: [1:717:2263] Leader: 1 Dead: 0 Generation: 3 VersionInfo:  } 2025-09-25T16:18:32.787347Z node 5 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:717: Handle NKikimr::TEvTabletPipe::TEvClientConnected { TabletId: 72057594037936129 Status: OK ServerId: [1:712:2258] Leader: 1 Dead: 0 Generation: 3 VersionInfo:  } 2025-09-25T16:18:32.787355Z node 6 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:717: Handle NKikimr::TEvTabletPipe::TEvClientConnected { TabletId: 72057594037936129 Status: OK ServerId: [1:713:2259] Leader: 1 Dead: 0 Generation: 3 VersionInfo:  } 2025-09-25T16:18:32.787361Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:245: StateWork, received event# 269877761, Sender [1:714:2260], Recipient [1:671:2231]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-09-25T16:18:32.787366Z node 7 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:717: Handle NKikimr::TEvTabletPipe::TEvClientConnected { TabletId: 72057594037936129 Status: OK ServerId: [1:711:2257] Leader: 1 Dead: 0 Generation: 3 VersionInfo:  } 2025-09-25T16:18:32.787373Z node 8 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:717: Handle NKikimr::TEvTabletPipe::TEvClientConnected { TabletId: 72057594037936129 Status: OK ServerId: [1:714:2260] Leader: 1 Dead: 0 Generation: 3 VersionInfo:  } 2025-09-25T16:18:32.787386Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:245: StateWork, received event# 269877761, Sender [1:715:2261], Recipient [1:671:2231]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-09-25T16:18:32.787406Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:245: StateWork, received event# 269877761, Sender [1:716:2262], Recipient [1:671:2231]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-09-25T16:18:32.787435Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:245: StateWork, received event# 269877761, Sender [1:717:2263], Recipient [1:671:2231]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-09-25T16:18:32.787496Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:245: StateWork, received event# 272039936, Sender [1:25:2072], Recipient [1:671:2231]: NKikimr::NNodeBroker::TEvNodeBroker::TEvListNodes { MinEpoch: 2 } 2025-09-25T16:18:32.787502Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:249: StateWork, processing event TEvNodeBroker::TEvListNodes 2025-09-25T16:18:32.787507Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:375: Delaying list nodes request for epoch #2 2025-09-25T16:18:32.787529Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:245: StateWork, received event# 272039936, Sender [5:141:2072], Recipient [1:712:2258] 2025-09-25T16:18:32.787533Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:249: StateWork, processing event TEvNodeBroker::TEvListNodes 2025-09-25T16:18:32.787538Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:375: Delaying list nodes request for epoch #2 2025-09-25T16:18:32.787553Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:245: StateWork, received event# 272039936, Sender [7:199:2072], Recipient [1:711:2257] 2025-09-25T16:18:32.787557Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:249: StateWork, processing event TEvNodeBroker::TEvListNodes 2025-09-25T16:18:32.787561Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:375: Delaying list nodes request for epoch #2 2025-09-25T16:18:32.787567Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:245: StateWork, received event# 272039936, Sender [8:228:2072], Recipient [1:714:2260] 2025-09-25T16:18:32.787571Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:249: StateWork, processing event TEvNodeBroker::TEvListNodes 2025-09-25T16:18:32.787575Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:375: Delaying list nodes request for epoch #2 2025-09-25T16:18:32.787581Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:245: StateWork, received event# 272039936, Sender [2:54:2072], Recipient [1:715:2261] 2025-09-25T16:18:32.787585Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:249: StateWork, processing event TEvNodeBroker::TEvListNodes 2025-09-25T16:18:32.787589Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:375: Delaying list nodes request for epoch #2 2025-09-25T16:18:32.787597Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:245: StateWork, received event# 272039936, Sender [3:83:2072], Recipient [1:716:2262] 2025-09-25T16:18:32.787601Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:249: StateWork, processing event TEvNodeBroker::TEvListNodes 2025-09-25T16:18:32.787605Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:375: Delaying list nodes request for epoch #2 2025-09-25T16:18:32.787612Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:245: StateWork, received event# 272039936, Sender [6:170:2072], Recipient [1:713:2259] 2025-09-25T16:18:32.787616Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:249: StateWork, processing event TEvNodeBroker::TEvListNodes 2025-09-25T16:18:32.787620Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:375: Delaying list nodes request for epoch #2 2025-09-25T16:18:32.787627Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:245: StateWork, received event# 272039936, Sender [4:112:2072], Recipient [1:717:2263] 2025-09-25T16:18:32.787630Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:249: StateWork, processing event TEvNodeBroker::TEvListNodes 2025-09-25T16:18:32.787634Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:375: Delaying list nodes request for epoch #2 2025-09-25T16:18:32.787711Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:245: StateWork, received event# 269877761, Sender [1:722:2268], Recipient [1:671:2231]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-09-25T16:18:32.787741Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:245: StateWork, received event# 272039936, Sender [1:627:2214], Recipient [1:671:2231]: NKikimr::NNodeBroker::TEvNodeBroker::TEvListNodes { } 2025-09-25T16:18:32.787746Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:249: StateWork, processing event TEvNodeBroker::TEvListNodes 2025-09-25T16:18:32.787756Z node 1 :NODE_BROKER TRACE: node_broker.cpp:423: Send TEvNodesInfo for epoch #1.3 1970-01-01T00:00:00.023000Z - 1970-01-01T01:00:00.023000Z - 1970-01-01T02:00:00.023000Z 2025-09-25T16:18:32.787820Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:245: StateWork, received event# 269877761, Sender [1:724:2270], Recipient [1:671:2231]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-09-25T16:18:32.787836Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:245: StateWork, received event# 272039936, Sender [1:627:2214], Recipient [1:671:2231]: NKikimr::NNodeBroker::TEvNodeBroker::TEvListNodes { } 2025-09-25T16:18:32.787840Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:249: StateWork, processing event TEvNodeBroker::TEvListNodes 2025-09-25T16:18:32.787847Z node 1 :NODE_BROKER TRACE: node_broker.cpp:423: Send TEvNodesInfo for epoch #1.3 1970-01-01T00:00:00.023000Z - 1970-01-01T01:00:00.023000Z - 1970-01-01T02:00:00.023000Z 2025-09-25T16:18:32.787901Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:245: StateWork, received event# 269877761, Sender [1:726:2272], Recipient [1:671:2231]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-09-25T16:18:32.787918Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:245: StateWork, received event# 272039936, Sender [1:627:2214], Recipient [1:671:2231]: NKikimr::NNodeBroker::TEvNodeBroker::TEvListNodes { } 2025-09-25T16:18:32.787922Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:249: StateWork, processing event TEvNodeBroker::TEvListNodes 2025-09-25T16:18:32.787928Z node 1 :NODE_BROKER TRACE: node_broker.cpp:423: Send TEvNodesInfo for epoch #1.3 1970-01-01T00:00:00.023000Z - 1970-01-01T01:00:00.023000Z - 1970-01-01T02:00:00.023000Z 2025-09-25T16:18:32.787979Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:245: StateWork, received event# 269877761, Sender [1:728:2274], Recipient [1:671:2231]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-09-25T16:18:32.787993Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:245: StateWork, received event# 272039936, Sender [1:627:2214], Recipient [1:671:2231]: NKikimr::NNodeBroker::TEvNodeBroker::TEvListNodes { CachedVersion: 2 } 2025-09-25T16:18:32.787997Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:249: StateWork, processing event TEvNodeBroker::TEvListNodes 2025-09-25T16:18:32.788004Z node 1 :NODE_BROKER TRACE: node_broker.cpp:423: Send TEvNodesInfo for epoch #1.3 1970-01-01T00:00:00.023000Z - 1970-01-01T01:00:00.023000Z - 1970-01-01T02:00:00.023000Z 2025-09-25T16:18:32.788055Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:245: StateWork, received event# 269877761, Sender [1:730:2276], Recipient [1:671:2231]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-09-25T16:18:32.788074Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:245: StateWork, received event# 272039950, Sender [1:627:2214], Recipient [1:671:2231]: NKikimr::NNodeBroker::TEvNodeBroker::TEvSubscribeNodesRequest { CachedVersion: 2 SeqNo: 2 } 2025-09-25T16:18:32.788079Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:257: StateWork, processing event TEvNodeBroker::TEvSubscribeNodesRequest 2025-09-25T16:18:32.788085Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:747: New subscriber [1:627:2214], seqNo: 2, version: 2, server pipe id: [1:730:2276] 2025-09-25T16:18:32.788091Z node 1 :NODE_BROKER TRACE: node_broker.cpp:730: Send TEvUpdateNodes v2 -> v3 to [1:627:2214] 2025-09-25T16:18:32.788152Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:245: StateWork, received event# 269877764, Sender [1:730:2276], Recipient [1:671:2231]: NKikimr::TEvTabletPipe::TEvServerDisconnected 2025-09-25T16:18:32.788157Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:768: Unsubscribed [1:627:2214], seqNo: 2, server pipe id: [1:730:2276] 2025-09-25T16:18:32.788178Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:245: StateWork, received event# 269877761, Sender [1:732:2278], Recipient [1:671:2231]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-09-25T16:18:32.788193Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:245: StateWork, received event# 272039937, Sender [1:627:2214], Recipient [1:671:2231]: NKikimr::NNodeBroker::TEvNodeBroker::TEvResolveNode { NodeId: 1024 } 2025-09-25T16:18:32.788196Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:250: StateWork, processing event TEvNodeBroker::TEvResolveNode 2025-09-25T16:18:32.788218Z node 1 :NODE_BROKER TRACE: node_broker.cpp:1485: Send TEvResolvedNode: NKikimr::NNodeBroker::TEvNodeBroker::TEvResolvedNode { Status { Code: OK } Node { NodeId: 1024 Host: "host1" Port: 1001 ResolveHost: "host1.yandex.net" Address: "1.2.3.4" Location { DataCenter: "1" Module: "2" Rack: "3" Unit: "4" } Expire: 7200023000 Name: "slot-0" } } ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/mind/ut/unittest >> TNodeBrokerTest::ShiftIdRangeRemoveExpired [GOOD] Test command err: 2025-09-25T16:18:31.186593Z node 8 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:636: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 8 Deadline: 18446744073709.551615s } 2025-09-25T16:18:31.190196Z node 5 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:636: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 5 Deadline: 18446744073709.551615s } 2025-09-25T16:18:31.190282Z node 6 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:636: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 6 Deadline: 18446744073709.551615s } 2025-09-25T16:18:31.190313Z node 7 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:636: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 7 Deadline: 18446744073709.551615s } 2025-09-25T16:18:31.190362Z node 8 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:607: Handle NActors::TEvInterconnect::TEvListNodes 2025-09-25T16:18:31.194587Z node 2 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:636: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 2 Deadline: 18446744073709.551615s } 2025-09-25T16:18:31.194650Z node 3 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:636: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 3 Deadline: 18446744073709.551615s } 2025-09-25T16:18:31.194716Z node 4 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:636: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 4 Deadline: 18446744073709.551615s } 2025-09-25T16:18:31.194766Z node 6 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:607: Handle NActors::TEvInterconnect::TEvListNodes 2025-09-25T16:18:31.194898Z node 7 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:607: Handle NActors::TEvInterconnect::TEvListNodes 2025-09-25T16:18:31.194971Z node 8 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:607: Handle NActors::TEvInterconnect::TEvListNodes 2025-09-25T16:18:31.195006Z node 1 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:636: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 1 Deadline: 18446744073709.551615s } 2025-09-25T16:18:31.200440Z node 2 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:607: Handle NActors::TEvInterconnect::TEvListNodes 2025-09-25T16:18:31.200565Z node 3 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:607: Handle NActors::TEvInterconnect::TEvListNodes 2025-09-25T16:18:31.200633Z node 4 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:607: Handle NActors::TEvInterconnect::TEvListNodes 2025-09-25T16:18:31.200704Z node 5 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:607: Handle NActors::TEvInterconnect::TEvListNodes 2025-09-25T16:18:31.200785Z node 4 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:607: Handle NActors::TEvInterconnect::TEvListNodes 2025-09-25T16:18:31.200849Z node 5 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:607: Handle NActors::TEvInterconnect::TEvListNodes 2025-09-25T16:18:31.200871Z node 6 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:607: Handle NActors::TEvInterconnect::TEvListNodes 2025-09-25T16:18:31.200913Z node 7 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:607: Handle NActors::TEvInterconnect::TEvListNodes 2025-09-25T16:18:31.200998Z node 1 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:607: Handle NActors::TEvInterconnect::TEvListNodes 2025-09-25T16:18:31.201094Z node 2 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:607: Handle NActors::TEvInterconnect::TEvListNodes 2025-09-25T16:18:31.201118Z node 3 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:607: Handle NActors::TEvInterconnect::TEvListNodes 2025-09-25T16:18:31.201200Z node 8 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:607: Handle NActors::TEvInterconnect::TEvListNodes 2025-09-25T16:18:31.201226Z node 1 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:607: Handle NActors::TEvInterconnect::TEvListNodes 2025-09-25T16:18:31.201604Z node 2 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:607: Handle NActors::TEvInterconnect::TEvListNodes 2025-09-25T16:18:31.201656Z node 3 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:607: Handle NActors::TEvInterconnect::TEvListNodes 2025-09-25T16:18:31.201694Z node 4 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:607: Handle NActors::TEvInterconnect::TEvListNodes 2025-09-25T16:18:31.201721Z node 5 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:607: Handle NActors::TEvInterconnect::TEvListNodes 2025-09-25T16:18:31.201747Z node 6 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:607: Handle NActors::TEvInterconnect::TEvListNodes 2025-09-25T16:18:31.201771Z node 7 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:607: Handle NActors::TEvInterconnect::TEvListNodes 2025-09-25T16:18:31.201802Z node 8 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:636: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 1 Deadline: 18446744073709.551615s } 2025-09-25T16:18:31.201817Z node 3 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:636: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 1 Deadline: 18446744073709.551615s } 2025-09-25T16:18:31.201863Z node 4 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:636: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 1 Deadline: 18446744073709.551615s } 2025-09-25T16:18:31.201891Z node 5 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:636: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 1 Deadline: 18446744073709.551615s } 2025-09-25T16:18:31.201925Z node 6 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:636: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 1 Deadline: 18446744073709.551615s } 2025-09-25T16:18:31.201964Z node 7 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:636: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 1 Deadline: 18446744073709.551615s } 2025-09-25T16:18:31.202052Z node 1 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:607: Handle NActors::TEvInterconnect::TEvListNodes 2025-09-25T16:18:31.202120Z node 2 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:636: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 1 Deadline: 18446744073709.551615s } 2025-09-25T16:18:31.202386Z node 1 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:636: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 2 Deadline: 18446744073709.551615s } 2025-09-25T16:18:31.203887Z node 4 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:636: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 2 Deadline: 18446744073709.551615s } 2025-09-25T16:18:31.203924Z node 5 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:636: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 4 Deadline: 18446744073709.551615s } 2025-09-25T16:18:31.203965Z node 7 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:636: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 6 Deadline: 18446744073709.551615s } 2025-09-25T16:18:31.210426Z node 4 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:636: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 5 Deadline: 18446744073709.551615s } 2025-09-25T16:18:31.210451Z node 6 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:636: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 7 Deadline: 18446744073709.551615s } 2025-09-25T16:18:31.211089Z node 2 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:636: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 4 Deadline: 18446744073709.551615s } 2025-09-25T16:18:31.211372Z node 1 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:636: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 8 Deadline: 18446744073709.551615s } 2025-09-25T16:18:31.211773Z node 1 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:636: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 4 Deadline: 18446744073709.551615s } 2025-09-25T16:18:31.211816Z node 2 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:636: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 7 Deadline: 18446744073709.551615s } 2025-09-25T16:18:31.211853Z node 1 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:636: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 3 Deadline: 18446744073709.551615s } 2025-09-25T16:18:31.211893Z node 1 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:636: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 5 Deadline: 18446744073709.551615s } 2025-09-25T16:18:31.211940Z node 1 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:636: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 6 Deadline: 18446744073709.551615s } 2025-09-25T16:18:31.212052Z node 7 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:636: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 2 Deadline: 18446744073709.551615s } 2025-09-25T16:18:31.212085Z node 1 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:636: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 7 Deadline: 18446744073709.551615s } 2025-09-25T16:18:31.213929Z node 6 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:636: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 2 Deadline: 18446744073709.551615s } 2025-09-25T16:18:31.214090Z node 2 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:636: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 6 Deadline: 18446744073709.551615s } 2025-09-25T16:18:31.216911Z node 6 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:636: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 5 Deadline: 18446744073709.551615s } 2025-09-25T16:18:31.217118Z node 5 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:636: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 6 Deadline: 18446744073709.551615s } 2025-09-25T16:18:31.217676Z node 2 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:636: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 3 Deadline: 18446744073709.551615s } 2025-09-25T16:18:31.217958Z node 3 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:636: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 2 Deadline: 18446744073709.551615s } 2025-09-25T16:18:31.244129Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7729: Cannot subscribe to console configs 2025-09-25T16:18:31.244155Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded ... waiting for nameservers are connected 2025-09-25T16:18:31.255432Z node 1 :NODE_BROKER DEBUG: node_broker_impl.h:239: StateInit event type: 10060000 event: NKikimr::TEvTablet::TEvBoot 2025-09-25T16:18:31.256202Z node 1 :NODE_BROKER DEBUG: node_broker_impl.h:239: StateInit event type: 10060001 event: NKikimr::TEvTablet::TEvRestored 2025-09-25T16:18:31.256334Z node 1 :NODE_BROKER DEBUG: node_broker__init_scheme.cpp:20: TTxInitScheme Execute 2025-09-25T16:18:31.256915Z node 1 :NODE_BROKER DEBUG: node_broker_impl.h:239: StateInit event type: 1006000c event: NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-09-25T16:18:31.260660Z node 1 :NODE_BROKER DEBUG: node_broker__init_scheme.cpp:29: TTxInitScheme Complete 2025-09-25T16:18:31.260730Z node 1 :NODE_BROKER DEBUG: node_broker__load_state.cpp:19: TTxLoadState Execute 2025-09-25T16:18:31.261011Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:968: [DB] Using default config. 2025-09-25T16:18:31.261031Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:1005: [DB] Starting the first epoch: #1.1 1970-01-01T00:00:00.025000Z - 1970-01-01T01:00:00.025000Z - 1970-01-01T02:00:00.025000Z 2025-09-25T16:18:31.261036Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:1031: [DB] Loaded the first approximate epoch start: #1.1 2025-09-25T16:18:31.261058Z node 1 :NODE_BROKER DEBUG: node_broker__load_state.cpp:27: TTxLoadState Complete 2025-09-25T16:18:31.261101Z node 1 :NODE_BROKER DEBUG: node_broker__migrate_state.cpp:84: TTxMigrateState Execute 2025-09-25T16:18:31.261107Z node 1 :NODE_BROKER DEBUG: node_broker__migrate_state.cpp:52: TTxMigrateState ProcessMigrationBatch UpdateNodes left 0, NewVersionUpdateNodes left 0 2025-09-25T16:18:31.261112Z node 1 :NODE_BROKER DEBUG: node_broker__migrate_state.cpp:21: TTxMigrateState FinalizeMigration 2025-09-25T16:18:31.261118Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:1318: [DB] Update epoch in database: #1.1 1970-01-01T00:00:00.025000Z - 1970-01-01T01:00:00.025000Z - 1970-01-01T02:00:00.025000Z 2025-09-25T16:18:31.261148Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:1337: [DB] Update approx epoch start in database: #1.1 2025-09-25T16:18:31.261155Z node 1 :NODE_BROKER NOTICE: node_broker.cpp:1350: [DB] Update main nodes table to: Nodes 2025-09-25T16:18:31.294851Z node 1 :NODE_BROKER DEBUG: node_broker__migrate_state.cpp:95: TTxMigrateState Complete 2025-09-25T16:18:31.294892Z node 1 :NODE_BROKER TRACE: node_brok ... erverConnected 2025-09-25T16:18:32.803948Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:245: StateWork, received event# 272039936, Sender [1:627:2216], Recipient [1:746:2284]: NKikimr::NNodeBroker::TEvNodeBroker::TEvListNodes { } 2025-09-25T16:18:32.803952Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:249: StateWork, processing event TEvNodeBroker::TEvListNodes 2025-09-25T16:18:32.803958Z node 1 :NODE_BROKER TRACE: node_broker.cpp:423: Send TEvNodesInfo for epoch #3.11 1970-01-01T02:00:00.025000Z - 1970-01-01T03:00:00.025000Z - 1970-01-01T04:00:00.025000Z 2025-09-25T16:18:32.804027Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:245: StateWork, received event# 269877761, Sender [1:807:2331], Recipient [1:746:2284]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-09-25T16:18:32.804043Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:245: StateWork, received event# 272039936, Sender [1:627:2216], Recipient [1:746:2284]: NKikimr::NNodeBroker::TEvNodeBroker::TEvListNodes { CachedVersion: 10 } 2025-09-25T16:18:32.804048Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:249: StateWork, processing event TEvNodeBroker::TEvListNodes 2025-09-25T16:18:32.804055Z node 1 :NODE_BROKER TRACE: node_broker.cpp:423: Send TEvNodesInfo for epoch #3.11 1970-01-01T02:00:00.025000Z - 1970-01-01T03:00:00.025000Z - 1970-01-01T04:00:00.025000Z 2025-09-25T16:18:32.804111Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:245: StateWork, received event# 269877761, Sender [1:809:2333], Recipient [1:746:2284]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-09-25T16:18:32.804148Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:245: StateWork, received event# 272039936, Sender [1:627:2216], Recipient [1:746:2284]: NKikimr::NNodeBroker::TEvNodeBroker::TEvListNodes { } 2025-09-25T16:18:32.804153Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:249: StateWork, processing event TEvNodeBroker::TEvListNodes 2025-09-25T16:18:32.804160Z node 1 :NODE_BROKER TRACE: node_broker.cpp:423: Send TEvNodesInfo for epoch #3.11 1970-01-01T02:00:00.025000Z - 1970-01-01T03:00:00.025000Z - 1970-01-01T04:00:00.025000Z 2025-09-25T16:18:32.804223Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:245: StateWork, received event# 269877761, Sender [1:811:2335], Recipient [1:746:2284]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-09-25T16:18:32.804245Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:245: StateWork, received event# 272039936, Sender [1:627:2216], Recipient [1:746:2284]: NKikimr::NNodeBroker::TEvNodeBroker::TEvListNodes { CachedVersion: 9 } 2025-09-25T16:18:32.804252Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:249: StateWork, processing event TEvNodeBroker::TEvListNodes 2025-09-25T16:18:32.804259Z node 1 :NODE_BROKER TRACE: node_broker.cpp:423: Send TEvNodesInfo for epoch #3.11 1970-01-01T02:00:00.025000Z - 1970-01-01T03:00:00.025000Z - 1970-01-01T04:00:00.025000Z 2025-09-25T16:18:32.804323Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:245: StateWork, received event# 269877761, Sender [1:813:2337], Recipient [1:746:2284]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-09-25T16:18:32.804342Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:245: StateWork, received event# 272039936, Sender [1:627:2216], Recipient [1:746:2284]: NKikimr::NNodeBroker::TEvNodeBroker::TEvListNodes { } 2025-09-25T16:18:32.804347Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:249: StateWork, processing event TEvNodeBroker::TEvListNodes 2025-09-25T16:18:32.804353Z node 1 :NODE_BROKER TRACE: node_broker.cpp:423: Send TEvNodesInfo for epoch #3.11 1970-01-01T02:00:00.025000Z - 1970-01-01T03:00:00.025000Z - 1970-01-01T04:00:00.025000Z 2025-09-25T16:18:32.804414Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:245: StateWork, received event# 269877761, Sender [1:815:2339], Recipient [1:746:2284]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-09-25T16:18:32.804436Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:245: StateWork, received event# 272039936, Sender [1:627:2216], Recipient [1:746:2284]: NKikimr::NNodeBroker::TEvNodeBroker::TEvListNodes { CachedVersion: 8 } 2025-09-25T16:18:32.804442Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:249: StateWork, processing event TEvNodeBroker::TEvListNodes 2025-09-25T16:18:32.804449Z node 1 :NODE_BROKER TRACE: node_broker.cpp:423: Send TEvNodesInfo for epoch #3.11 1970-01-01T02:00:00.025000Z - 1970-01-01T03:00:00.025000Z - 1970-01-01T04:00:00.025000Z 2025-09-25T16:18:32.804532Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:245: StateWork, received event# 269877761, Sender [1:817:2341], Recipient [1:746:2284]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-09-25T16:18:32.804557Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:245: StateWork, received event# 272039950, Sender [1:627:2216], Recipient [1:746:2284]: NKikimr::NNodeBroker::TEvNodeBroker::TEvSubscribeNodesRequest { CachedVersion: 11 SeqNo: 2 } 2025-09-25T16:18:32.804563Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:257: StateWork, processing event TEvNodeBroker::TEvSubscribeNodesRequest 2025-09-25T16:18:32.804572Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:747: New subscriber [1:627:2216], seqNo: 2, version: 11, server pipe id: [1:817:2341] 2025-09-25T16:18:32.804582Z node 1 :NODE_BROKER TRACE: node_broker.cpp:730: Send TEvUpdateNodes v11 -> v11 to [1:627:2216] 2025-09-25T16:18:32.804646Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:245: StateWork, received event# 269877764, Sender [1:817:2341], Recipient [1:746:2284]: NKikimr::TEvTabletPipe::TEvServerDisconnected 2025-09-25T16:18:32.804654Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:768: Unsubscribed [1:627:2216], seqNo: 2, server pipe id: [1:817:2341] 2025-09-25T16:18:32.804699Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:245: StateWork, received event# 269877761, Sender [1:819:2343], Recipient [1:746:2284]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-09-25T16:18:32.804714Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:245: StateWork, received event# 272039950, Sender [1:627:2216], Recipient [1:746:2284]: NKikimr::NNodeBroker::TEvNodeBroker::TEvSubscribeNodesRequest { CachedVersion: 10 SeqNo: 3 } 2025-09-25T16:18:32.804718Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:257: StateWork, processing event TEvNodeBroker::TEvSubscribeNodesRequest 2025-09-25T16:18:32.804723Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:747: New subscriber [1:627:2216], seqNo: 3, version: 10, server pipe id: [1:819:2343] 2025-09-25T16:18:32.804729Z node 1 :NODE_BROKER TRACE: node_broker.cpp:730: Send TEvUpdateNodes v10 -> v11 to [1:627:2216] 2025-09-25T16:18:32.804787Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:245: StateWork, received event# 269877764, Sender [1:819:2343], Recipient [1:746:2284]: NKikimr::TEvTabletPipe::TEvServerDisconnected 2025-09-25T16:18:32.804793Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:768: Unsubscribed [1:627:2216], seqNo: 3, server pipe id: [1:819:2343] 2025-09-25T16:18:32.804818Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:245: StateWork, received event# 269877761, Sender [1:821:2345], Recipient [1:746:2284]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-09-25T16:18:32.804859Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:245: StateWork, received event# 272039950, Sender [1:627:2216], Recipient [1:746:2284]: NKikimr::NNodeBroker::TEvNodeBroker::TEvSubscribeNodesRequest { CachedVersion: 9 SeqNo: 4 } 2025-09-25T16:18:32.804864Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:257: StateWork, processing event TEvNodeBroker::TEvSubscribeNodesRequest 2025-09-25T16:18:32.804869Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:747: New subscriber [1:627:2216], seqNo: 4, version: 9, server pipe id: [1:821:2345] 2025-09-25T16:18:32.804874Z node 1 :NODE_BROKER TRACE: node_broker.cpp:730: Send TEvUpdateNodes v9 -> v11 to [1:627:2216] 2025-09-25T16:18:32.804931Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:245: StateWork, received event# 269877764, Sender [1:821:2345], Recipient [1:746:2284]: NKikimr::TEvTabletPipe::TEvServerDisconnected 2025-09-25T16:18:32.804936Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:768: Unsubscribed [1:627:2216], seqNo: 4, server pipe id: [1:821:2345] 2025-09-25T16:18:32.804966Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:245: StateWork, received event# 269877761, Sender [1:823:2347], Recipient [1:746:2284]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-09-25T16:18:32.804979Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:245: StateWork, received event# 272039950, Sender [1:627:2216], Recipient [1:746:2284]: NKikimr::NNodeBroker::TEvNodeBroker::TEvSubscribeNodesRequest { CachedVersion: 8 SeqNo: 5 } 2025-09-25T16:18:32.804983Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:257: StateWork, processing event TEvNodeBroker::TEvSubscribeNodesRequest 2025-09-25T16:18:32.804988Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:747: New subscriber [1:627:2216], seqNo: 5, version: 8, server pipe id: [1:823:2347] 2025-09-25T16:18:32.804993Z node 1 :NODE_BROKER TRACE: node_broker.cpp:730: Send TEvUpdateNodes v8 -> v11 to [1:627:2216] 2025-09-25T16:18:32.805047Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:245: StateWork, received event# 269877764, Sender [1:823:2347], Recipient [1:746:2284]: NKikimr::TEvTabletPipe::TEvServerDisconnected 2025-09-25T16:18:32.805053Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:768: Unsubscribed [1:627:2216], seqNo: 5, server pipe id: [1:823:2347] 2025-09-25T16:18:32.805082Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:245: StateWork, received event# 269877761, Sender [1:825:2349], Recipient [1:746:2284]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-09-25T16:18:32.805099Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:245: StateWork, received event# 272039937, Sender [1:627:2216], Recipient [1:746:2284]: NKikimr::NNodeBroker::TEvNodeBroker::TEvResolveNode { NodeId: 1024 } 2025-09-25T16:18:32.805105Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:250: StateWork, processing event TEvNodeBroker::TEvResolveNode 2025-09-25T16:18:32.805140Z node 1 :NODE_BROKER TRACE: node_broker.cpp:1485: Send TEvResolvedNode: NKikimr::NNodeBroker::TEvNodeBroker::TEvResolvedNode { Status { Code: OK } Node { NodeId: 1024 Host: "host1" Port: 1001 ResolveHost: "host1.yandex.net" Address: "1.2.3.4" Location { DataCenter: "1" Module: "2" Rack: "3" Unit: "4" } Expire: 14400025000 Name: "slot-0" } } 2025-09-25T16:18:32.805209Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:245: StateWork, received event# 269877761, Sender [1:827:2351], Recipient [1:746:2284]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-09-25T16:18:32.805227Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:245: StateWork, received event# 272039937, Sender [1:627:2216], Recipient [1:746:2284]: NKikimr::NNodeBroker::TEvNodeBroker::TEvResolveNode { NodeId: 1025 } 2025-09-25T16:18:32.805236Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:250: StateWork, processing event TEvNodeBroker::TEvResolveNode 2025-09-25T16:18:32.805254Z node 1 :NODE_BROKER TRACE: node_broker.cpp:1485: Send TEvResolvedNode: NKikimr::NNodeBroker::TEvNodeBroker::TEvResolvedNode { Status { Code: OK } Node { NodeId: 1025 Host: "host2" Port: 1001 ResolveHost: "host2.yandex.net" Address: "1.2.3.4" Location { DataCenter: "1" Module: "2" Rack: "3" Unit: "4" } Expire: 14400025000 Name: "slot-1" } } 2025-09-25T16:18:32.805312Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:245: StateWork, received event# 269877761, Sender [1:829:2353], Recipient [1:746:2284]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-09-25T16:18:32.805330Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:245: StateWork, received event# 272039937, Sender [1:627:2216], Recipient [1:746:2284]: NKikimr::NNodeBroker::TEvNodeBroker::TEvResolveNode { NodeId: 1026 } 2025-09-25T16:18:32.805334Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:250: StateWork, processing event TEvNodeBroker::TEvResolveNode 2025-09-25T16:18:32.805343Z node 1 :NODE_BROKER TRACE: node_broker.cpp:1485: Send TEvResolvedNode: NKikimr::NNodeBroker::TEvNodeBroker::TEvResolvedNode { Status { Code: WRONG_REQUEST Reason: "Unknown node" } } ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/mind/ut/unittest >> TNodeBrokerTest::NodesMigrationRemoveExpired [GOOD] Test command err: 2025-09-25T16:18:31.991216Z node 8 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:636: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 8 Deadline: 18446744073709.551615s } 2025-09-25T16:18:31.994528Z node 5 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:636: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 5 Deadline: 18446744073709.551615s } 2025-09-25T16:18:31.994626Z node 6 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:636: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 6 Deadline: 18446744073709.551615s } 2025-09-25T16:18:31.994671Z node 7 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:636: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 7 Deadline: 18446744073709.551615s } 2025-09-25T16:18:31.994727Z node 8 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:607: Handle NActors::TEvInterconnect::TEvListNodes 2025-09-25T16:18:31.999122Z node 2 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:636: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 2 Deadline: 18446744073709.551615s } 2025-09-25T16:18:31.999172Z node 3 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:636: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 3 Deadline: 18446744073709.551615s } 2025-09-25T16:18:31.999237Z node 4 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:636: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 4 Deadline: 18446744073709.551615s } 2025-09-25T16:18:31.999297Z node 6 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:607: Handle NActors::TEvInterconnect::TEvListNodes 2025-09-25T16:18:31.999404Z node 7 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:607: Handle NActors::TEvInterconnect::TEvListNodes 2025-09-25T16:18:31.999466Z node 8 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:607: Handle NActors::TEvInterconnect::TEvListNodes 2025-09-25T16:18:31.999497Z node 1 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:636: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 1 Deadline: 18446744073709.551615s } 2025-09-25T16:18:32.003620Z node 2 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:607: Handle NActors::TEvInterconnect::TEvListNodes 2025-09-25T16:18:32.003731Z node 3 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:607: Handle NActors::TEvInterconnect::TEvListNodes 2025-09-25T16:18:32.003792Z node 4 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:607: Handle NActors::TEvInterconnect::TEvListNodes 2025-09-25T16:18:32.003849Z node 5 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:607: Handle NActors::TEvInterconnect::TEvListNodes 2025-09-25T16:18:32.003913Z node 4 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:607: Handle NActors::TEvInterconnect::TEvListNodes 2025-09-25T16:18:32.003949Z node 5 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:607: Handle NActors::TEvInterconnect::TEvListNodes 2025-09-25T16:18:32.003969Z node 6 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:607: Handle NActors::TEvInterconnect::TEvListNodes 2025-09-25T16:18:32.004002Z node 7 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:607: Handle NActors::TEvInterconnect::TEvListNodes 2025-09-25T16:18:32.004086Z node 1 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:607: Handle NActors::TEvInterconnect::TEvListNodes 2025-09-25T16:18:32.004180Z node 2 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:607: Handle NActors::TEvInterconnect::TEvListNodes 2025-09-25T16:18:32.004202Z node 3 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:607: Handle NActors::TEvInterconnect::TEvListNodes 2025-09-25T16:18:32.004284Z node 8 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:607: Handle NActors::TEvInterconnect::TEvListNodes 2025-09-25T16:18:32.004306Z node 1 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:607: Handle NActors::TEvInterconnect::TEvListNodes 2025-09-25T16:18:32.004656Z node 2 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:607: Handle NActors::TEvInterconnect::TEvListNodes 2025-09-25T16:18:32.004691Z node 3 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:607: Handle NActors::TEvInterconnect::TEvListNodes 2025-09-25T16:18:32.004722Z node 4 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:607: Handle NActors::TEvInterconnect::TEvListNodes 2025-09-25T16:18:32.004742Z node 5 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:607: Handle NActors::TEvInterconnect::TEvListNodes 2025-09-25T16:18:32.004764Z node 6 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:607: Handle NActors::TEvInterconnect::TEvListNodes 2025-09-25T16:18:32.004783Z node 7 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:607: Handle NActors::TEvInterconnect::TEvListNodes 2025-09-25T16:18:32.004819Z node 8 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:636: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 1 Deadline: 18446744073709.551615s } 2025-09-25T16:18:32.004848Z node 3 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:636: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 1 Deadline: 18446744073709.551615s } 2025-09-25T16:18:32.004890Z node 4 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:636: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 1 Deadline: 18446744073709.551615s } 2025-09-25T16:18:32.004921Z node 5 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:636: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 1 Deadline: 18446744073709.551615s } 2025-09-25T16:18:32.004959Z node 6 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:636: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 1 Deadline: 18446744073709.551615s } 2025-09-25T16:18:32.004987Z node 7 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:636: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 1 Deadline: 18446744073709.551615s } 2025-09-25T16:18:32.005078Z node 1 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:607: Handle NActors::TEvInterconnect::TEvListNodes 2025-09-25T16:18:32.005144Z node 2 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:636: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 1 Deadline: 18446744073709.551615s } 2025-09-25T16:18:32.005382Z node 1 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:636: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 5 Deadline: 18446744073709.551615s } 2025-09-25T16:18:32.005653Z node 8 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:636: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 4 Deadline: 18446744073709.551615s } 2025-09-25T16:18:32.005866Z node 6 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:636: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 5 Deadline: 18446744073709.551615s } 2025-09-25T16:18:32.005887Z node 7 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:636: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 3 Deadline: 18446744073709.551615s } 2025-09-25T16:18:32.008450Z node 4 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:636: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 3 Deadline: 18446744073709.551615s } 2025-09-25T16:18:32.008490Z node 5 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:636: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 3 Deadline: 18446744073709.551615s } 2025-09-25T16:18:32.015463Z node 3 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:636: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 4 Deadline: 18446744073709.551615s } 2025-09-25T16:18:32.015528Z node 4 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:636: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 8 Deadline: 18446744073709.551615s } 2025-09-25T16:18:32.015549Z node 5 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:636: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 6 Deadline: 18446744073709.551615s } 2025-09-25T16:18:32.015587Z node 3 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:636: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 5 Deadline: 18446744073709.551615s } 2025-09-25T16:18:32.016259Z node 1 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:636: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 4 Deadline: 18446744073709.551615s } 2025-09-25T16:18:32.016307Z node 3 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:636: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 7 Deadline: 18446744073709.551615s } 2025-09-25T16:18:32.017013Z node 1 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:636: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 2 Deadline: 18446744073709.551615s } 2025-09-25T16:18:32.017070Z node 1 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:636: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 6 Deadline: 18446744073709.551615s } 2025-09-25T16:18:32.017136Z node 1 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:636: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 3 Deadline: 18446744073709.551615s } 2025-09-25T16:18:32.017202Z node 1 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:636: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 7 Deadline: 18446744073709.551615s } 2025-09-25T16:18:32.017309Z node 1 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:636: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 8 Deadline: 18446744073709.551615s } 2025-09-25T16:18:32.018463Z node 5 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:636: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 4 Deadline: 18446744073709.551615s } 2025-09-25T16:18:32.018733Z node 3 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:636: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 2 Deadline: 18446744073709.551615s } 2025-09-25T16:18:32.018885Z node 4 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:636: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 5 Deadline: 18446744073709.551615s } 2025-09-25T16:18:32.019164Z node 2 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:636: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 3 Deadline: 18446744073709.551615s } 2025-09-25T16:18:32.042313Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7729: Cannot subscribe to console configs 2025-09-25T16:18:32.042338Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded ... waiting for nameservers are connected 2025-09-25T16:18:32.047618Z node 1 :NODE_BROKER DEBUG: node_broker_impl.h:239: StateInit event type: 10060000 event: NKikimr::TEvTablet::TEvBoot 2025-09-25T16:18:32.048123Z node 1 :NODE_BROKER DEBUG: node_broker_impl.h:239: StateInit event type: 10060001 event: NKikimr::TEvTablet::TEvRestored 2025-09-25T16:18:32.048195Z node 1 :NODE_BROKER DEBUG: node_broker__init_scheme.cpp:20: TTxInitScheme Execute 2025-09-25T16:18:32.048421Z node 1 :NODE_BROKER DEBUG: node_broker_impl.h:239: StateInit event type: 1006000c event: NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-09-25T16:18:32.048977Z node 1 :NODE_BROKER DEBUG: node_broker__init_scheme.cpp:29: TTxInitScheme Complete 2025-09-25T16:18:32.049002Z node 1 :NODE_BROKER DEBUG: node_broker__load_state.cpp:19: TTxLoadState Execute 2025-09-25T16:18:32.049065Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:968: [DB] Using default config. 2025-09-25T16:18:32.049081Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:1005: [DB] Starting the first epoch: #1.1 1970-01-01T00:00:00.023000Z - 1970-01-01T01:00:00.023000Z - 1970-01-01T02:00:00.023000Z 2025-09-25T16:18:32.049087Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:1031: [DB] Loaded the first approximate epoch start: #1.1 2025-09-25T16:18:32.049103Z node 1 :NODE_BROKER DEBUG: node_broker__load_state.cpp:27: TTxLoadState Complete 2025-09-25T16:18:32.049130Z node 1 :NODE_BROKER DEBUG: node_broker__migrate_state.cpp:84: TTxMigrateState Execute 2025-09-25T16:18:32.049137Z node 1 :NODE_BROKER DEBUG: node_broker__migrate_state.cpp:52: TTxMigrateState ProcessMigrationBatch UpdateNodes left 0, NewVersionUpdateNodes left 0 2025-09-25T16:18:32.049143Z node 1 :NODE_BROKER DEBUG: node_broker__migrate_state.cpp:21: TTxMigrateState FinalizeMigration 2025-09-25T16:18:32.049149Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:1318: [DB] Update epoch in database: #1.1 1970-01-01T00:00:00.023000Z - 1970-01-01T01:00:00.023000Z - 1970-01-01T02:00:00.023000Z 2025-09-25T16:18:32.049167Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:1337: [DB] Update approx epoch start in database: #1.1 2025-09-25T16:18:32.049174Z node 1 :NODE_BROKER NOTICE: node_broker.cpp:1350: [DB] Update main nodes table to: Nodes 2025-09-25T16:18:32.096124Z node 1 :NODE_BROKER DEBUG: node_broker__migrate_state.cpp:95: TTxMigrateState Complete 2025-09-25T16:18:32.096178Z node 1 :NODE_BROKER TRACE: node_brok ... 8:33.503045Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:245: StateWork, received event# 269877761, Sender [1:771:2297], Recipient [1:730:2270]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-09-25T16:18:33.503114Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:245: StateWork, received event# 269877761, Sender [1:772:2298], Recipient [1:730:2270]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-09-25T16:18:33.503123Z node 2 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:717: Handle NKikimr::TEvTabletPipe::TEvClientConnected { TabletId: 72057594037936129 Status: OK ServerId: [1:770:2296] Leader: 1 Dead: 0 Generation: 3 VersionInfo:  } 2025-09-25T16:18:33.503135Z node 3 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:717: Handle NKikimr::TEvTabletPipe::TEvClientConnected { TabletId: 72057594037936129 Status: OK ServerId: [1:771:2297] Leader: 1 Dead: 0 Generation: 3 VersionInfo:  } 2025-09-25T16:18:33.503146Z node 4 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:717: Handle NKikimr::TEvTabletPipe::TEvClientConnected { TabletId: 72057594037936129 Status: OK ServerId: [1:772:2298] Leader: 1 Dead: 0 Generation: 3 VersionInfo:  } 2025-09-25T16:18:33.503157Z node 5 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:717: Handle NKikimr::TEvTabletPipe::TEvClientConnected { TabletId: 72057594037936129 Status: OK ServerId: [1:773:2299] Leader: 1 Dead: 0 Generation: 3 VersionInfo:  } 2025-09-25T16:18:33.503172Z node 6 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:717: Handle NKikimr::TEvTabletPipe::TEvClientConnected { TabletId: 72057594037936129 Status: OK ServerId: [1:774:2300] Leader: 1 Dead: 0 Generation: 3 VersionInfo:  } 2025-09-25T16:18:33.503212Z node 7 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:717: Handle NKikimr::TEvTabletPipe::TEvClientConnected { TabletId: 72057594037936129 Status: OK ServerId: [1:775:2301] Leader: 1 Dead: 0 Generation: 3 VersionInfo:  } 2025-09-25T16:18:33.503223Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:245: StateWork, received event# 269877761, Sender [1:773:2299], Recipient [1:730:2270]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-09-25T16:18:33.503256Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:245: StateWork, received event# 269877761, Sender [1:774:2300], Recipient [1:730:2270]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-09-25T16:18:33.503263Z node 8 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:717: Handle NKikimr::TEvTabletPipe::TEvClientConnected { TabletId: 72057594037936129 Status: OK ServerId: [1:776:2302] Leader: 1 Dead: 0 Generation: 3 VersionInfo:  } 2025-09-25T16:18:33.503281Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:245: StateWork, received event# 269877761, Sender [1:775:2301], Recipient [1:730:2270]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-09-25T16:18:33.503325Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:245: StateWork, received event# 269877761, Sender [1:776:2302], Recipient [1:730:2270]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-09-25T16:18:33.503391Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:245: StateWork, received event# 272039936, Sender [1:25:2072], Recipient [1:730:2270]: NKikimr::NNodeBroker::TEvNodeBroker::TEvListNodes { MinEpoch: 4 } 2025-09-25T16:18:33.503399Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:249: StateWork, processing event TEvNodeBroker::TEvListNodes 2025-09-25T16:18:33.503411Z node 1 :NODE_BROKER TRACE: node_broker.cpp:423: Send TEvNodesInfo for epoch #4.5 1970-01-01T03:00:00.023000Z - 1970-01-01T04:00:00.023000Z - 1970-01-01T05:00:00.023000Z 2025-09-25T16:18:33.503448Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:245: StateWork, received event# 272039936, Sender [3:83:2072], Recipient [1:771:2297] 2025-09-25T16:18:33.503453Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:249: StateWork, processing event TEvNodeBroker::TEvListNodes 2025-09-25T16:18:33.503460Z node 1 :NODE_BROKER TRACE: node_broker.cpp:423: Send TEvNodesInfo for epoch #4.5 1970-01-01T03:00:00.023000Z - 1970-01-01T04:00:00.023000Z - 1970-01-01T05:00:00.023000Z 2025-09-25T16:18:33.503476Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:245: StateWork, received event# 272039936, Sender [5:141:2072], Recipient [1:773:2299] 2025-09-25T16:18:33.503480Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:249: StateWork, processing event TEvNodeBroker::TEvListNodes 2025-09-25T16:18:33.503486Z node 1 :NODE_BROKER TRACE: node_broker.cpp:423: Send TEvNodesInfo for epoch #4.5 1970-01-01T03:00:00.023000Z - 1970-01-01T04:00:00.023000Z - 1970-01-01T05:00:00.023000Z 2025-09-25T16:18:33.503508Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:245: StateWork, received event# 272039936, Sender [6:170:2072], Recipient [1:774:2300] 2025-09-25T16:18:33.503511Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:249: StateWork, processing event TEvNodeBroker::TEvListNodes 2025-09-25T16:18:33.503517Z node 1 :NODE_BROKER TRACE: node_broker.cpp:423: Send TEvNodesInfo for epoch #4.5 1970-01-01T03:00:00.023000Z - 1970-01-01T04:00:00.023000Z - 1970-01-01T05:00:00.023000Z 2025-09-25T16:18:33.503545Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:245: StateWork, received event# 272039936, Sender [2:54:2072], Recipient [1:770:2296] 2025-09-25T16:18:33.503549Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:249: StateWork, processing event TEvNodeBroker::TEvListNodes 2025-09-25T16:18:33.503558Z node 1 :NODE_BROKER TRACE: node_broker.cpp:423: Send TEvNodesInfo for epoch #4.5 1970-01-01T03:00:00.023000Z - 1970-01-01T04:00:00.023000Z - 1970-01-01T05:00:00.023000Z 2025-09-25T16:18:33.503567Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:245: StateWork, received event# 272039936, Sender [4:112:2072], Recipient [1:772:2298] 2025-09-25T16:18:33.503571Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:249: StateWork, processing event TEvNodeBroker::TEvListNodes 2025-09-25T16:18:33.503578Z node 1 :NODE_BROKER TRACE: node_broker.cpp:423: Send TEvNodesInfo for epoch #4.5 1970-01-01T03:00:00.023000Z - 1970-01-01T04:00:00.023000Z - 1970-01-01T05:00:00.023000Z 2025-09-25T16:18:33.503589Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:245: StateWork, received event# 272039936, Sender [7:199:2072], Recipient [1:775:2301] 2025-09-25T16:18:33.503593Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:249: StateWork, processing event TEvNodeBroker::TEvListNodes 2025-09-25T16:18:33.503600Z node 1 :NODE_BROKER TRACE: node_broker.cpp:423: Send TEvNodesInfo for epoch #4.5 1970-01-01T03:00:00.023000Z - 1970-01-01T04:00:00.023000Z - 1970-01-01T05:00:00.023000Z 2025-09-25T16:18:33.503620Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:245: StateWork, received event# 272039936, Sender [8:228:2072], Recipient [1:776:2302] 2025-09-25T16:18:33.503624Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:249: StateWork, processing event TEvNodeBroker::TEvListNodes 2025-09-25T16:18:33.503631Z node 1 :NODE_BROKER TRACE: node_broker.cpp:423: Send TEvNodesInfo for epoch #4.5 1970-01-01T03:00:00.023000Z - 1970-01-01T04:00:00.023000Z - 1970-01-01T05:00:00.023000Z 2025-09-25T16:18:33.503749Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:245: StateWork, received event# 269877761, Sender [1:781:2307], Recipient [1:730:2270]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-09-25T16:18:33.503775Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:245: StateWork, received event# 272039936, Sender [1:625:2214], Recipient [1:730:2270]: NKikimr::NNodeBroker::TEvNodeBroker::TEvListNodes { } 2025-09-25T16:18:33.503780Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:249: StateWork, processing event TEvNodeBroker::TEvListNodes 2025-09-25T16:18:33.503787Z node 1 :NODE_BROKER TRACE: node_broker.cpp:423: Send TEvNodesInfo for epoch #4.5 1970-01-01T03:00:00.023000Z - 1970-01-01T04:00:00.023000Z - 1970-01-01T05:00:00.023000Z 2025-09-25T16:18:33.503850Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:245: StateWork, received event# 269877761, Sender [1:783:2309], Recipient [1:730:2270]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-09-25T16:18:33.503870Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:245: StateWork, received event# 272039936, Sender [1:625:2214], Recipient [1:730:2270]: NKikimr::NNodeBroker::TEvNodeBroker::TEvListNodes { } 2025-09-25T16:18:33.503874Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:249: StateWork, processing event TEvNodeBroker::TEvListNodes 2025-09-25T16:18:33.503880Z node 1 :NODE_BROKER TRACE: node_broker.cpp:423: Send TEvNodesInfo for epoch #4.5 1970-01-01T03:00:00.023000Z - 1970-01-01T04:00:00.023000Z - 1970-01-01T05:00:00.023000Z 2025-09-25T16:18:33.503935Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:245: StateWork, received event# 269877761, Sender [1:785:2311], Recipient [1:730:2270]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-09-25T16:18:33.503949Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:245: StateWork, received event# 272039936, Sender [1:625:2214], Recipient [1:730:2270]: NKikimr::NNodeBroker::TEvNodeBroker::TEvListNodes { } 2025-09-25T16:18:33.503952Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:249: StateWork, processing event TEvNodeBroker::TEvListNodes 2025-09-25T16:18:33.503959Z node 1 :NODE_BROKER TRACE: node_broker.cpp:423: Send TEvNodesInfo for epoch #4.5 1970-01-01T03:00:00.023000Z - 1970-01-01T04:00:00.023000Z - 1970-01-01T05:00:00.023000Z 2025-09-25T16:18:33.504014Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:245: StateWork, received event# 269877761, Sender [1:787:2313], Recipient [1:730:2270]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-09-25T16:18:33.504032Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:245: StateWork, received event# 272039936, Sender [1:625:2214], Recipient [1:730:2270]: NKikimr::NNodeBroker::TEvNodeBroker::TEvListNodes { CachedVersion: 5 } 2025-09-25T16:18:33.504036Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:249: StateWork, processing event TEvNodeBroker::TEvListNodes 2025-09-25T16:18:33.504041Z node 1 :NODE_BROKER TRACE: node_broker.cpp:423: Send TEvNodesInfo for epoch #4.5 1970-01-01T03:00:00.023000Z - 1970-01-01T04:00:00.023000Z - 1970-01-01T05:00:00.023000Z 2025-09-25T16:18:33.504102Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:245: StateWork, received event# 269877761, Sender [1:789:2315], Recipient [1:730:2270]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-09-25T16:18:33.504124Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:245: StateWork, received event# 272039950, Sender [1:625:2214], Recipient [1:730:2270]: NKikimr::NNodeBroker::TEvNodeBroker::TEvSubscribeNodesRequest { CachedVersion: 5 SeqNo: 2 } 2025-09-25T16:18:33.504130Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:257: StateWork, processing event TEvNodeBroker::TEvSubscribeNodesRequest 2025-09-25T16:18:33.504137Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:747: New subscriber [1:625:2214], seqNo: 2, version: 5, server pipe id: [1:789:2315] 2025-09-25T16:18:33.504146Z node 1 :NODE_BROKER TRACE: node_broker.cpp:730: Send TEvUpdateNodes v5 -> v5 to [1:625:2214] 2025-09-25T16:18:33.504213Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:245: StateWork, received event# 269877764, Sender [1:789:2315], Recipient [1:730:2270]: NKikimr::TEvTabletPipe::TEvServerDisconnected 2025-09-25T16:18:33.504221Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:768: Unsubscribed [1:625:2214], seqNo: 2, server pipe id: [1:789:2315] 2025-09-25T16:18:33.504258Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:245: StateWork, received event# 269877761, Sender [1:791:2317], Recipient [1:730:2270]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-09-25T16:18:33.504278Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:245: StateWork, received event# 272039937, Sender [1:625:2214], Recipient [1:730:2270]: NKikimr::NNodeBroker::TEvNodeBroker::TEvResolveNode { NodeId: 1024 } 2025-09-25T16:18:33.504284Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:250: StateWork, processing event TEvNodeBroker::TEvResolveNode 2025-09-25T16:18:33.504304Z node 1 :NODE_BROKER TRACE: node_broker.cpp:1485: Send TEvResolvedNode: NKikimr::NNodeBroker::TEvNodeBroker::TEvResolvedNode { Status { Code: WRONG_REQUEST Reason: "Unknown node" } } ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/mind/ut/unittest >> TNodeBrokerTest::LoadStateMoveEpoch [GOOD] Test command err: 2025-09-25T16:18:31.979534Z node 8 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:636: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 8 Deadline: 18446744073709.551615s } 2025-09-25T16:18:31.983173Z node 5 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:636: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 5 Deadline: 18446744073709.551615s } 2025-09-25T16:18:31.983261Z node 6 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:636: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 6 Deadline: 18446744073709.551615s } 2025-09-25T16:18:31.983297Z node 7 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:636: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 7 Deadline: 18446744073709.551615s } 2025-09-25T16:18:31.983347Z node 8 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:607: Handle NActors::TEvInterconnect::TEvListNodes 2025-09-25T16:18:31.987948Z node 2 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:636: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 2 Deadline: 18446744073709.551615s } 2025-09-25T16:18:31.988010Z node 3 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:636: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 3 Deadline: 18446744073709.551615s } 2025-09-25T16:18:31.988083Z node 4 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:636: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 4 Deadline: 18446744073709.551615s } 2025-09-25T16:18:31.988141Z node 6 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:607: Handle NActors::TEvInterconnect::TEvListNodes 2025-09-25T16:18:31.988252Z node 7 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:607: Handle NActors::TEvInterconnect::TEvListNodes 2025-09-25T16:18:31.988332Z node 8 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:607: Handle NActors::TEvInterconnect::TEvListNodes 2025-09-25T16:18:31.988374Z node 1 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:636: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 1 Deadline: 18446744073709.551615s } 2025-09-25T16:18:31.996038Z node 2 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:607: Handle NActors::TEvInterconnect::TEvListNodes 2025-09-25T16:18:31.996194Z node 3 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:607: Handle NActors::TEvInterconnect::TEvListNodes 2025-09-25T16:18:31.996271Z node 4 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:607: Handle NActors::TEvInterconnect::TEvListNodes 2025-09-25T16:18:31.996349Z node 5 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:607: Handle NActors::TEvInterconnect::TEvListNodes 2025-09-25T16:18:31.996438Z node 4 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:607: Handle NActors::TEvInterconnect::TEvListNodes 2025-09-25T16:18:31.996482Z node 5 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:607: Handle NActors::TEvInterconnect::TEvListNodes 2025-09-25T16:18:31.996506Z node 6 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:607: Handle NActors::TEvInterconnect::TEvListNodes 2025-09-25T16:18:31.996551Z node 7 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:607: Handle NActors::TEvInterconnect::TEvListNodes 2025-09-25T16:18:31.996662Z node 1 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:607: Handle NActors::TEvInterconnect::TEvListNodes 2025-09-25T16:18:31.996799Z node 2 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:607: Handle NActors::TEvInterconnect::TEvListNodes 2025-09-25T16:18:31.996858Z node 3 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:607: Handle NActors::TEvInterconnect::TEvListNodes 2025-09-25T16:18:31.996988Z node 8 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:607: Handle NActors::TEvInterconnect::TEvListNodes 2025-09-25T16:18:31.997030Z node 1 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:607: Handle NActors::TEvInterconnect::TEvListNodes 2025-09-25T16:18:31.997523Z node 2 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:607: Handle NActors::TEvInterconnect::TEvListNodes 2025-09-25T16:18:31.997560Z node 3 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:607: Handle NActors::TEvInterconnect::TEvListNodes 2025-09-25T16:18:31.997651Z node 4 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:607: Handle NActors::TEvInterconnect::TEvListNodes 2025-09-25T16:18:31.997681Z node 5 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:607: Handle NActors::TEvInterconnect::TEvListNodes 2025-09-25T16:18:31.997711Z node 6 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:607: Handle NActors::TEvInterconnect::TEvListNodes 2025-09-25T16:18:31.997740Z node 7 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:607: Handle NActors::TEvInterconnect::TEvListNodes 2025-09-25T16:18:31.997786Z node 8 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:636: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 1 Deadline: 18446744073709.551615s } 2025-09-25T16:18:31.997801Z node 3 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:636: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 1 Deadline: 18446744073709.551615s } 2025-09-25T16:18:31.997839Z node 4 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:636: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 1 Deadline: 18446744073709.551615s } 2025-09-25T16:18:31.997869Z node 5 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:636: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 1 Deadline: 18446744073709.551615s } 2025-09-25T16:18:31.997909Z node 6 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:636: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 1 Deadline: 18446744073709.551615s } 2025-09-25T16:18:31.997937Z node 7 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:636: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 1 Deadline: 18446744073709.551615s } 2025-09-25T16:18:31.998030Z node 1 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:607: Handle NActors::TEvInterconnect::TEvListNodes 2025-09-25T16:18:31.998106Z node 2 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:636: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 1 Deadline: 18446744073709.551615s } 2025-09-25T16:18:31.998419Z node 1 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:636: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 5 Deadline: 18446744073709.551615s } 2025-09-25T16:18:31.998691Z node 8 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:636: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 2 Deadline: 18446744073709.551615s } 2025-09-25T16:18:31.999733Z node 3 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:636: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 2 Deadline: 18446744073709.551615s } 2025-09-25T16:18:31.999769Z node 4 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:636: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 3 Deadline: 18446744073709.551615s } 2025-09-25T16:18:31.999816Z node 6 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:636: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 5 Deadline: 18446744073709.551615s } 2025-09-25T16:18:31.999844Z node 7 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:636: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 5 Deadline: 18446744073709.551615s } 2025-09-25T16:18:32.007514Z node 2 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:636: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 8 Deadline: 18446744073709.551615s } 2025-09-25T16:18:32.007621Z node 5 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:636: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 6 Deadline: 18446744073709.551615s } 2025-09-25T16:18:32.008426Z node 3 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:636: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 4 Deadline: 18446744073709.551615s } 2025-09-25T16:18:32.008492Z node 1 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:636: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 8 Deadline: 18446744073709.551615s } 2025-09-25T16:18:32.008523Z node 2 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:636: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 3 Deadline: 18446744073709.551615s } 2025-09-25T16:18:32.008561Z node 5 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:636: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 7 Deadline: 18446744073709.551615s } 2025-09-25T16:18:32.009469Z node 1 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:636: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 3 Deadline: 18446744073709.551615s } 2025-09-25T16:18:32.009678Z node 1 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:636: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 4 Deadline: 18446744073709.551615s } 2025-09-25T16:18:32.009735Z node 5 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:636: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 2 Deadline: 18446744073709.551615s } 2025-09-25T16:18:32.009756Z node 1 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:636: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 2 Deadline: 18446744073709.551615s } 2025-09-25T16:18:32.009933Z node 1 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:636: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 6 Deadline: 18446744073709.551615s } 2025-09-25T16:18:32.010062Z node 2 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:636: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 5 Deadline: 18446744073709.551615s } 2025-09-25T16:18:32.010085Z node 1 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:636: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 7 Deadline: 18446744073709.551615s } 2025-09-25T16:18:32.011255Z node 5 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:636: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 4 Deadline: 18446744073709.551615s } 2025-09-25T16:18:32.011711Z node 4 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:636: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 5 Deadline: 18446744073709.551615s } 2025-09-25T16:18:32.034454Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7729: Cannot subscribe to console configs 2025-09-25T16:18:32.034483Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded ... waiting for nameservers are connected 2025-09-25T16:18:32.040180Z node 1 :NODE_BROKER DEBUG: node_broker_impl.h:239: StateInit event type: 10060000 event: NKikimr::TEvTablet::TEvBoot 2025-09-25T16:18:32.040690Z node 1 :NODE_BROKER DEBUG: node_broker_impl.h:239: StateInit event type: 10060001 event: NKikimr::TEvTablet::TEvRestored 2025-09-25T16:18:32.040764Z node 1 :NODE_BROKER DEBUG: node_broker__init_scheme.cpp:20: TTxInitScheme Execute 2025-09-25T16:18:32.041010Z node 1 :NODE_BROKER DEBUG: node_broker_impl.h:239: StateInit event type: 1006000c event: NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-09-25T16:18:32.041811Z node 1 :NODE_BROKER DEBUG: node_broker__init_scheme.cpp:29: TTxInitScheme Complete 2025-09-25T16:18:32.042015Z node 1 :NODE_BROKER DEBUG: node_broker__load_state.cpp:19: TTxLoadState Execute 2025-09-25T16:18:32.042087Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:968: [DB] Using default config. 2025-09-25T16:18:32.042104Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:1005: [DB] Starting the first epoch: #1.1 1970-01-01T00:00:00.026000Z - 1970-01-01T01:00:00.026000Z - 1970-01-01T02:00:00.026000Z 2025-09-25T16:18:32.042110Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:1031: [DB] Loaded the first approximate epoch start: #1.1 2025-09-25T16:18:32.042128Z node 1 :NODE_BROKER DEBUG: node_broker__load_state.cpp:27: TTxLoadState Complete 2025-09-25T16:18:32.042165Z node 1 :NODE_BROKER DEBUG: node_broker__migrate_state.cpp:84: TTxMigrateState Execute 2025-09-25T16:18:32.042172Z node 1 :NODE_BROKER DEBUG: node_broker__migrate_state.cpp:52: TTxMigrateState ProcessMigrationBatch UpdateNodes left 0, NewVersionUpdateNodes left 0 2025-09-25T16:18:32.042177Z node 1 :NODE_BROKER DEBUG: node_broker__migrate_state.cpp:21: TTxMigrateState FinalizeMigration 2025-09-25T16:18:32.042183Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:1318: [DB] Update epoch in database: #1.1 1970-01-01T00:00:00.026000Z - 1970-01-01T01:00:00.026000Z - 1970-01-01T02:00:00.026000Z 2025-09-25T16:18:32.042202Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:1337: [DB] Update approx epoch start in database: #1.1 2025-09-25T16:18:32.042208Z node 1 :NODE_BROKER NOTICE: node_broker.cpp:1350: [DB] Update main nodes table to: Nodes 2025-09-25T16:18:32.086793Z node 1 :NODE_BROKER DEBUG: node_broker__migrate_state.cpp:95: TTxMigrateState Complete 2025-09-25T16:18:32.086857Z node 1 :NODE_BROKER TRACE: node_brok ... 1T03:00:00.026000Z - 1970-01-01T04:00:00.026000Z - 1970-01-01T05:00:00.026000Z 2025-09-25T16:18:33.089124Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:1337: [DB] Update approx epoch start in database: #4.5 2025-09-25T16:18:33.367141Z node 2 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:607: Handle NActors::TEvInterconnect::TEvListNodes 2025-09-25T16:18:33.367207Z node 3 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:607: Handle NActors::TEvInterconnect::TEvListNodes 2025-09-25T16:18:33.367247Z node 4 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:607: Handle NActors::TEvInterconnect::TEvListNodes 2025-09-25T16:18:33.367276Z node 5 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:607: Handle NActors::TEvInterconnect::TEvListNodes 2025-09-25T16:18:33.367304Z node 6 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:607: Handle NActors::TEvInterconnect::TEvListNodes 2025-09-25T16:18:33.367330Z node 7 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:607: Handle NActors::TEvInterconnect::TEvListNodes 2025-09-25T16:18:33.367360Z node 8 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:607: Handle NActors::TEvInterconnect::TEvListNodes 2025-09-25T16:18:33.367453Z node 1 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:607: Handle NActors::TEvInterconnect::TEvListNodes 2025-09-25T16:18:33.410451Z node 1 :NODE_BROKER DEBUG: node_broker__migrate_state.cpp:95: TTxMigrateState Complete 2025-09-25T16:18:33.410503Z node 1 :NODE_BROKER TRACE: node_broker.cpp:456: Scheduled epoch update at 1970-01-01T04:00:00.026000Z 2025-09-25T16:18:33.410517Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:562: Preparing nodes list cache for epoch #4.5 1970-01-01T03:00:00.026000Z - 1970-01-01T04:00:00.026000Z - 1970-01-01T05:00:00.026000Z, approximate epoch start #4.5 nodes=0 expired=0 2025-09-25T16:18:33.410531Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:603: Preparing update nodes log for epoch ##4.5 1970-01-01T03:00:00.026000Z - 1970-01-01T04:00:00.026000Z - 1970-01-01T05:00:00.026000Z nodes=0 expired=0 removed=1 2025-09-25T16:18:33.410539Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:659: Add node #1024.v5 to update nodes log 2025-09-25T16:18:33.410718Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:245: StateWork, received event# 269877761, Sender [1:817:2313], Recipient [1:763:2282]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-09-25T16:18:33.410757Z node 1 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:717: Handle NKikimr::TEvTabletPipe::TEvClientConnected { TabletId: 72057594037936129 Status: OK ServerId: [1:817:2313] Leader: 1 Dead: 0 Generation: 4 VersionInfo:  } 2025-09-25T16:18:33.410940Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:245: StateWork, received event# 269877761, Sender [1:818:2314], Recipient [1:763:2282]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-09-25T16:18:33.410966Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:245: StateWork, received event# 269877761, Sender [1:819:2315], Recipient [1:763:2282]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-09-25T16:18:33.411023Z node 3 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:717: Handle NKikimr::TEvTabletPipe::TEvClientConnected { TabletId: 72057594037936129 Status: OK ServerId: [1:822:2318] Leader: 1 Dead: 0 Generation: 4 VersionInfo:  } 2025-09-25T16:18:33.411037Z node 4 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:717: Handle NKikimr::TEvTabletPipe::TEvClientConnected { TabletId: 72057594037936129 Status: OK ServerId: [1:819:2315] Leader: 1 Dead: 0 Generation: 4 VersionInfo:  } 2025-09-25T16:18:33.411078Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:245: StateWork, received event# 269877761, Sender [1:820:2316], Recipient [1:763:2282]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-09-25T16:18:33.411093Z node 2 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:717: Handle NKikimr::TEvTabletPipe::TEvClientConnected { TabletId: 72057594037936129 Status: OK ServerId: [1:821:2317] Leader: 1 Dead: 0 Generation: 4 VersionInfo:  } 2025-09-25T16:18:33.411125Z node 5 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:717: Handle NKikimr::TEvTabletPipe::TEvClientConnected { TabletId: 72057594037936129 Status: OK ServerId: [1:818:2314] Leader: 1 Dead: 0 Generation: 4 VersionInfo:  } 2025-09-25T16:18:33.411138Z node 6 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:717: Handle NKikimr::TEvTabletPipe::TEvClientConnected { TabletId: 72057594037936129 Status: OK ServerId: [1:823:2319] Leader: 1 Dead: 0 Generation: 4 VersionInfo:  } 2025-09-25T16:18:33.411154Z node 7 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:717: Handle NKikimr::TEvTabletPipe::TEvClientConnected { TabletId: 72057594037936129 Status: OK ServerId: [1:824:2320] Leader: 1 Dead: 0 Generation: 4 VersionInfo:  } 2025-09-25T16:18:33.411166Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:245: StateWork, received event# 269877761, Sender [1:821:2317], Recipient [1:763:2282]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-09-25T16:18:33.411175Z node 8 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:717: Handle NKikimr::TEvTabletPipe::TEvClientConnected { TabletId: 72057594037936129 Status: OK ServerId: [1:820:2316] Leader: 1 Dead: 0 Generation: 4 VersionInfo:  } 2025-09-25T16:18:33.411207Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:245: StateWork, received event# 269877761, Sender [1:822:2318], Recipient [1:763:2282]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-09-25T16:18:33.411237Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:245: StateWork, received event# 269877761, Sender [1:823:2319], Recipient [1:763:2282]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-09-25T16:18:33.411251Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:245: StateWork, received event# 269877761, Sender [1:824:2320], Recipient [1:763:2282]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-09-25T16:18:33.411319Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:245: StateWork, received event# 272039936, Sender [1:25:2072], Recipient [1:763:2282]: NKikimr::NNodeBroker::TEvNodeBroker::TEvListNodes { MinEpoch: 4 } 2025-09-25T16:18:33.411327Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:249: StateWork, processing event TEvNodeBroker::TEvListNodes 2025-09-25T16:18:33.411337Z node 1 :NODE_BROKER TRACE: node_broker.cpp:423: Send TEvNodesInfo for epoch #4.5 1970-01-01T03:00:00.026000Z - 1970-01-01T04:00:00.026000Z - 1970-01-01T05:00:00.026000Z 2025-09-25T16:18:33.411415Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:245: StateWork, received event# 272039936, Sender [4:112:2072], Recipient [1:819:2315] 2025-09-25T16:18:33.411421Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:249: StateWork, processing event TEvNodeBroker::TEvListNodes 2025-09-25T16:18:33.411429Z node 1 :NODE_BROKER TRACE: node_broker.cpp:423: Send TEvNodesInfo for epoch #4.5 1970-01-01T03:00:00.026000Z - 1970-01-01T04:00:00.026000Z - 1970-01-01T05:00:00.026000Z 2025-09-25T16:18:33.411438Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:245: StateWork, received event# 272039936, Sender [8:228:2072], Recipient [1:820:2316] 2025-09-25T16:18:33.411442Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:249: StateWork, processing event TEvNodeBroker::TEvListNodes 2025-09-25T16:18:33.411449Z node 1 :NODE_BROKER TRACE: node_broker.cpp:423: Send TEvNodesInfo for epoch #4.5 1970-01-01T03:00:00.026000Z - 1970-01-01T04:00:00.026000Z - 1970-01-01T05:00:00.026000Z 2025-09-25T16:18:33.411461Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:245: StateWork, received event# 272039936, Sender [2:54:2072], Recipient [1:821:2317] 2025-09-25T16:18:33.411466Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:249: StateWork, processing event TEvNodeBroker::TEvListNodes 2025-09-25T16:18:33.411473Z node 1 :NODE_BROKER TRACE: node_broker.cpp:423: Send TEvNodesInfo for epoch #4.5 1970-01-01T03:00:00.026000Z - 1970-01-01T04:00:00.026000Z - 1970-01-01T05:00:00.026000Z 2025-09-25T16:18:33.411506Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:245: StateWork, received event# 272039936, Sender [3:83:2072], Recipient [1:822:2318] 2025-09-25T16:18:33.411511Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:249: StateWork, processing event TEvNodeBroker::TEvListNodes 2025-09-25T16:18:33.411519Z node 1 :NODE_BROKER TRACE: node_broker.cpp:423: Send TEvNodesInfo for epoch #4.5 1970-01-01T03:00:00.026000Z - 1970-01-01T04:00:00.026000Z - 1970-01-01T05:00:00.026000Z 2025-09-25T16:18:33.411563Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:245: StateWork, received event# 272039936, Sender [7:199:2072], Recipient [1:824:2320] 2025-09-25T16:18:33.411570Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:249: StateWork, processing event TEvNodeBroker::TEvListNodes 2025-09-25T16:18:33.411577Z node 1 :NODE_BROKER TRACE: node_broker.cpp:423: Send TEvNodesInfo for epoch #4.5 1970-01-01T03:00:00.026000Z - 1970-01-01T04:00:00.026000Z - 1970-01-01T05:00:00.026000Z 2025-09-25T16:18:33.411587Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:245: StateWork, received event# 272039936, Sender [5:141:2072], Recipient [1:818:2314] 2025-09-25T16:18:33.411591Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:249: StateWork, processing event TEvNodeBroker::TEvListNodes 2025-09-25T16:18:33.411598Z node 1 :NODE_BROKER TRACE: node_broker.cpp:423: Send TEvNodesInfo for epoch #4.5 1970-01-01T03:00:00.026000Z - 1970-01-01T04:00:00.026000Z - 1970-01-01T05:00:00.026000Z 2025-09-25T16:18:33.411610Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:245: StateWork, received event# 272039936, Sender [6:170:2072], Recipient [1:823:2319] 2025-09-25T16:18:33.411614Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:249: StateWork, processing event TEvNodeBroker::TEvListNodes 2025-09-25T16:18:33.411621Z node 1 :NODE_BROKER TRACE: node_broker.cpp:423: Send TEvNodesInfo for epoch #4.5 1970-01-01T03:00:00.026000Z - 1970-01-01T04:00:00.026000Z - 1970-01-01T05:00:00.026000Z 2025-09-25T16:18:33.411751Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:245: StateWork, received event# 269877761, Sender [1:829:2325], Recipient [1:763:2282]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-09-25T16:18:33.411778Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:245: StateWork, received event# 272039936, Sender [1:629:2218], Recipient [1:763:2282]: NKikimr::NNodeBroker::TEvNodeBroker::TEvListNodes { } 2025-09-25T16:18:33.411784Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:249: StateWork, processing event TEvNodeBroker::TEvListNodes 2025-09-25T16:18:33.411792Z node 1 :NODE_BROKER TRACE: node_broker.cpp:423: Send TEvNodesInfo for epoch #4.5 1970-01-01T03:00:00.026000Z - 1970-01-01T04:00:00.026000Z - 1970-01-01T05:00:00.026000Z 2025-09-25T16:18:33.411850Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:245: StateWork, received event# 269877761, Sender [1:831:2327], Recipient [1:763:2282]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-09-25T16:18:33.411870Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:245: StateWork, received event# 272039936, Sender [1:629:2218], Recipient [1:763:2282]: NKikimr::NNodeBroker::TEvNodeBroker::TEvListNodes { } 2025-09-25T16:18:33.411875Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:249: StateWork, processing event TEvNodeBroker::TEvListNodes 2025-09-25T16:18:33.411881Z node 1 :NODE_BROKER TRACE: node_broker.cpp:423: Send TEvNodesInfo for epoch #4.5 1970-01-01T03:00:00.026000Z - 1970-01-01T04:00:00.026000Z - 1970-01-01T05:00:00.026000Z 2025-09-25T16:18:33.411940Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:245: StateWork, received event# 269877761, Sender [1:833:2329], Recipient [1:763:2282]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-09-25T16:18:33.411963Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:245: StateWork, received event# 272039937, Sender [1:629:2218], Recipient [1:763:2282]: NKikimr::NNodeBroker::TEvNodeBroker::TEvResolveNode { NodeId: 1024 } 2025-09-25T16:18:33.411969Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:250: StateWork, processing event TEvNodeBroker::TEvResolveNode 2025-09-25T16:18:33.411989Z node 1 :NODE_BROKER TRACE: node_broker.cpp:1485: Send TEvResolvedNode: NKikimr::NNodeBroker::TEvNodeBroker::TEvResolvedNode { Status { Code: WRONG_REQUEST Reason: "Unknown node" } } ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/mind/ut/unittest >> TNodeBrokerTest::NodesMigrationRemoveActive [GOOD] Test command err: 2025-09-25T16:18:33.205156Z node 8 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:636: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 8 Deadline: 18446744073709.551615s } 2025-09-25T16:18:33.208931Z node 5 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:636: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 5 Deadline: 18446744073709.551615s } 2025-09-25T16:18:33.209014Z node 6 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:636: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 6 Deadline: 18446744073709.551615s } 2025-09-25T16:18:33.209049Z node 7 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:636: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 7 Deadline: 18446744073709.551615s } 2025-09-25T16:18:33.209095Z node 8 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:607: Handle NActors::TEvInterconnect::TEvListNodes 2025-09-25T16:18:33.213738Z node 2 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:636: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 2 Deadline: 18446744073709.551615s } 2025-09-25T16:18:33.213801Z node 3 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:636: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 3 Deadline: 18446744073709.551615s } 2025-09-25T16:18:33.213872Z node 4 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:636: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 4 Deadline: 18446744073709.551615s } 2025-09-25T16:18:33.213927Z node 6 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:607: Handle NActors::TEvInterconnect::TEvListNodes 2025-09-25T16:18:33.214046Z node 7 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:607: Handle NActors::TEvInterconnect::TEvListNodes 2025-09-25T16:18:33.214121Z node 8 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:607: Handle NActors::TEvInterconnect::TEvListNodes 2025-09-25T16:18:33.214157Z node 1 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:636: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 1 Deadline: 18446744073709.551615s } 2025-09-25T16:18:33.220114Z node 2 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:607: Handle NActors::TEvInterconnect::TEvListNodes 2025-09-25T16:18:33.220240Z node 3 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:607: Handle NActors::TEvInterconnect::TEvListNodes 2025-09-25T16:18:33.220299Z node 4 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:607: Handle NActors::TEvInterconnect::TEvListNodes 2025-09-25T16:18:33.220369Z node 5 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:607: Handle NActors::TEvInterconnect::TEvListNodes 2025-09-25T16:18:33.220437Z node 4 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:607: Handle NActors::TEvInterconnect::TEvListNodes 2025-09-25T16:18:33.220482Z node 5 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:607: Handle NActors::TEvInterconnect::TEvListNodes 2025-09-25T16:18:33.220509Z node 6 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:607: Handle NActors::TEvInterconnect::TEvListNodes 2025-09-25T16:18:33.220547Z node 7 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:607: Handle NActors::TEvInterconnect::TEvListNodes 2025-09-25T16:18:33.220645Z node 1 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:607: Handle NActors::TEvInterconnect::TEvListNodes 2025-09-25T16:18:33.220744Z node 2 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:607: Handle NActors::TEvInterconnect::TEvListNodes 2025-09-25T16:18:33.220771Z node 3 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:607: Handle NActors::TEvInterconnect::TEvListNodes 2025-09-25T16:18:33.220894Z node 8 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:607: Handle NActors::TEvInterconnect::TEvListNodes 2025-09-25T16:18:33.220931Z node 1 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:607: Handle NActors::TEvInterconnect::TEvListNodes 2025-09-25T16:18:33.221304Z node 2 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:607: Handle NActors::TEvInterconnect::TEvListNodes 2025-09-25T16:18:33.221325Z node 3 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:607: Handle NActors::TEvInterconnect::TEvListNodes 2025-09-25T16:18:33.221352Z node 4 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:607: Handle NActors::TEvInterconnect::TEvListNodes 2025-09-25T16:18:33.221374Z node 5 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:607: Handle NActors::TEvInterconnect::TEvListNodes 2025-09-25T16:18:33.221397Z node 6 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:607: Handle NActors::TEvInterconnect::TEvListNodes 2025-09-25T16:18:33.221415Z node 7 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:607: Handle NActors::TEvInterconnect::TEvListNodes 2025-09-25T16:18:33.221445Z node 8 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:636: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 1 Deadline: 18446744073709.551615s } 2025-09-25T16:18:33.221455Z node 3 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:636: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 1 Deadline: 18446744073709.551615s } 2025-09-25T16:18:33.221482Z node 4 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:636: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 1 Deadline: 18446744073709.551615s } 2025-09-25T16:18:33.221500Z node 5 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:636: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 1 Deadline: 18446744073709.551615s } 2025-09-25T16:18:33.221527Z node 6 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:636: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 1 Deadline: 18446744073709.551615s } 2025-09-25T16:18:33.221556Z node 7 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:636: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 1 Deadline: 18446744073709.551615s } 2025-09-25T16:18:33.221631Z node 1 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:607: Handle NActors::TEvInterconnect::TEvListNodes 2025-09-25T16:18:33.221683Z node 2 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:636: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 1 Deadline: 18446744073709.551615s } 2025-09-25T16:18:33.221817Z node 1 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:636: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 5 Deadline: 18446744073709.551615s } 2025-09-25T16:18:33.222103Z node 8 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:636: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 3 Deadline: 18446744073709.551615s } 2025-09-25T16:18:33.224716Z node 3 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:636: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 2 Deadline: 18446744073709.551615s } 2025-09-25T16:18:33.224751Z node 4 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:636: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 3 Deadline: 18446744073709.551615s } 2025-09-25T16:18:33.224785Z node 6 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:636: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 3 Deadline: 18446744073709.551615s } 2025-09-25T16:18:33.230251Z node 3 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:636: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 8 Deadline: 18446744073709.551615s } 2025-09-25T16:18:33.231524Z node 2 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:636: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 3 Deadline: 18446744073709.551615s } 2025-09-25T16:18:33.231685Z node 1 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:636: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 8 Deadline: 18446744073709.551615s } 2025-09-25T16:18:33.231721Z node 3 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:636: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 4 Deadline: 18446744073709.551615s } 2025-09-25T16:18:33.232476Z node 3 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:636: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 6 Deadline: 18446744073709.551615s } 2025-09-25T16:18:33.232552Z node 1 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:636: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 6 Deadline: 18446744073709.551615s } 2025-09-25T16:18:33.232731Z node 5 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:636: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 4 Deadline: 18446744073709.551615s } 2025-09-25T16:18:33.232790Z node 1 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:636: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 7 Deadline: 18446744073709.551615s } 2025-09-25T16:18:33.232849Z node 1 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:636: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 3 Deadline: 18446744073709.551615s } 2025-09-25T16:18:33.232937Z node 1 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:636: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 2 Deadline: 18446744073709.551615s } 2025-09-25T16:18:33.233070Z node 1 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:636: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 4 Deadline: 18446744073709.551615s } 2025-09-25T16:18:33.233224Z node 4 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:636: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 5 Deadline: 18446744073709.551615s } 2025-09-25T16:18:33.235366Z node 5 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:636: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 3 Deadline: 18446744073709.551615s } 2025-09-25T16:18:33.235579Z node 3 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:636: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 5 Deadline: 18446744073709.551615s } 2025-09-25T16:18:33.237503Z node 5 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:636: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 2 Deadline: 18446744073709.551615s } 2025-09-25T16:18:33.238164Z node 2 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:636: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 5 Deadline: 18446744073709.551615s } 2025-09-25T16:18:33.260179Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7729: Cannot subscribe to console configs 2025-09-25T16:18:33.260206Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded ... waiting for nameservers are connected 2025-09-25T16:18:33.266373Z node 1 :NODE_BROKER DEBUG: node_broker_impl.h:239: StateInit event type: 10060000 event: NKikimr::TEvTablet::TEvBoot 2025-09-25T16:18:33.266966Z node 1 :NODE_BROKER DEBUG: node_broker_impl.h:239: StateInit event type: 10060001 event: NKikimr::TEvTablet::TEvRestored 2025-09-25T16:18:33.267060Z node 1 :NODE_BROKER DEBUG: node_broker__init_scheme.cpp:20: TTxInitScheme Execute 2025-09-25T16:18:33.267317Z node 1 :NODE_BROKER DEBUG: node_broker_impl.h:239: StateInit event type: 1006000c event: NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-09-25T16:18:33.268428Z node 1 :NODE_BROKER DEBUG: node_broker__init_scheme.cpp:29: TTxInitScheme Complete 2025-09-25T16:18:33.268628Z node 1 :NODE_BROKER DEBUG: node_broker__load_state.cpp:19: TTxLoadState Execute 2025-09-25T16:18:33.268716Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:968: [DB] Using default config. 2025-09-25T16:18:33.268734Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:1005: [DB] Starting the first epoch: #1.1 1970-01-01T00:00:00.024000Z - 1970-01-01T01:00:00.024000Z - 1970-01-01T02:00:00.024000Z 2025-09-25T16:18:33.268740Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:1031: [DB] Loaded the first approximate epoch start: #1.1 2025-09-25T16:18:33.268761Z node 1 :NODE_BROKER DEBUG: node_broker__load_state.cpp:27: TTxLoadState Complete 2025-09-25T16:18:33.268810Z node 1 :NODE_BROKER DEBUG: node_broker__migrate_state.cpp:84: TTxMigrateState Execute 2025-09-25T16:18:33.268816Z node 1 :NODE_BROKER DEBUG: node_broker__migrate_state.cpp:52: TTxMigrateState ProcessMigrationBatch UpdateNodes left 0, NewVersionUpdateNodes left 0 2025-09-25T16:18:33.268845Z node 1 :NODE_BROKER DEBUG: node_broker__migrate_state.cpp:21: TTxMigrateState FinalizeMigration 2025-09-25T16:18:33.268852Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:1318: [DB] Update epoch in database: #1.1 1970-01-01T00:00:00.024000Z - 1970-01-01T01:00:00.024000Z - 1970-01-01T02:00:00.024000Z 2025-09-25T16:18:33.268874Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:1337: [DB] Update approx epoch start in database: #1.1 2025-09-25T16:18:33.268881Z node 1 :NODE_BROKER NOTICE: node_broker.cpp:1350: [DB] Update main nodes table to: Nodes 2025-09-25T16:18:33.302756Z node 1 :NODE_BROKER DEBUG: node_broker__migrate_state.cpp:95: TTxMigrateState Complete 2025-09-25T16:18:33.302817Z node 1 :NODE_BROKER TRACE: node_brok ... 8:33.660753Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:245: StateWork, received event# 269877761, Sender [1:723:2270], Recipient [1:682:2243]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-09-25T16:18:33.660771Z node 6 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:717: Handle NKikimr::TEvTabletPipe::TEvClientConnected { TabletId: 72057594037936129 Status: OK ServerId: [1:725:2272] Leader: 1 Dead: 0 Generation: 3 VersionInfo:  } 2025-09-25T16:18:33.660784Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:245: StateWork, received event# 269877761, Sender [1:724:2271], Recipient [1:682:2243]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-09-25T16:18:33.660791Z node 7 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:717: Handle NKikimr::TEvTabletPipe::TEvClientConnected { TabletId: 72057594037936129 Status: OK ServerId: [1:726:2273] Leader: 1 Dead: 0 Generation: 3 VersionInfo:  } 2025-09-25T16:18:33.660804Z node 8 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:717: Handle NKikimr::TEvTabletPipe::TEvClientConnected { TabletId: 72057594037936129 Status: OK ServerId: [1:727:2274] Leader: 1 Dead: 0 Generation: 3 VersionInfo:  } 2025-09-25T16:18:33.660815Z node 2 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:717: Handle NKikimr::TEvTabletPipe::TEvClientConnected { TabletId: 72057594037936129 Status: OK ServerId: [1:728:2275] Leader: 1 Dead: 0 Generation: 3 VersionInfo:  } 2025-09-25T16:18:33.660844Z node 3 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:717: Handle NKikimr::TEvTabletPipe::TEvClientConnected { TabletId: 72057594037936129 Status: OK ServerId: [1:722:2269] Leader: 1 Dead: 0 Generation: 3 VersionInfo:  } 2025-09-25T16:18:33.660856Z node 4 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:717: Handle NKikimr::TEvTabletPipe::TEvClientConnected { TabletId: 72057594037936129 Status: OK ServerId: [1:723:2270] Leader: 1 Dead: 0 Generation: 3 VersionInfo:  } 2025-09-25T16:18:33.660867Z node 5 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:717: Handle NKikimr::TEvTabletPipe::TEvClientConnected { TabletId: 72057594037936129 Status: OK ServerId: [1:724:2271] Leader: 1 Dead: 0 Generation: 3 VersionInfo:  } 2025-09-25T16:18:33.660910Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:245: StateWork, received event# 269877761, Sender [1:725:2272], Recipient [1:682:2243]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-09-25T16:18:33.660934Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:245: StateWork, received event# 269877761, Sender [1:726:2273], Recipient [1:682:2243]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-09-25T16:18:33.660945Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:245: StateWork, received event# 269877761, Sender [1:727:2274], Recipient [1:682:2243]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-09-25T16:18:33.660980Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:245: StateWork, received event# 269877761, Sender [1:728:2275], Recipient [1:682:2243]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-09-25T16:18:33.661026Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:245: StateWork, received event# 272039936, Sender [1:25:2072], Recipient [1:682:2243]: NKikimr::NNodeBroker::TEvNodeBroker::TEvListNodes { MinEpoch: 2 } 2025-09-25T16:18:33.661032Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:249: StateWork, processing event TEvNodeBroker::TEvListNodes 2025-09-25T16:18:33.661040Z node 1 :NODE_BROKER TRACE: node_broker.cpp:423: Send TEvNodesInfo for epoch #4.5 1970-01-01T03:00:00.024000Z - 1970-01-01T04:00:00.024000Z - 1970-01-01T05:00:00.024000Z 2025-09-25T16:18:33.661065Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:245: StateWork, received event# 272039936, Sender [6:170:2072], Recipient [1:725:2272] 2025-09-25T16:18:33.661067Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:249: StateWork, processing event TEvNodeBroker::TEvListNodes 2025-09-25T16:18:33.661072Z node 1 :NODE_BROKER TRACE: node_broker.cpp:423: Send TEvNodesInfo for epoch #4.5 1970-01-01T03:00:00.024000Z - 1970-01-01T04:00:00.024000Z - 1970-01-01T05:00:00.024000Z 2025-09-25T16:18:33.661092Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:245: StateWork, received event# 272039936, Sender [7:199:2072], Recipient [1:726:2273] 2025-09-25T16:18:33.661096Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:249: StateWork, processing event TEvNodeBroker::TEvListNodes 2025-09-25T16:18:33.661104Z node 1 :NODE_BROKER TRACE: node_broker.cpp:423: Send TEvNodesInfo for epoch #4.5 1970-01-01T03:00:00.024000Z - 1970-01-01T04:00:00.024000Z - 1970-01-01T05:00:00.024000Z 2025-09-25T16:18:33.661113Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:245: StateWork, received event# 272039936, Sender [8:228:2072], Recipient [1:727:2274] 2025-09-25T16:18:33.661117Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:249: StateWork, processing event TEvNodeBroker::TEvListNodes 2025-09-25T16:18:33.661124Z node 1 :NODE_BROKER TRACE: node_broker.cpp:423: Send TEvNodesInfo for epoch #4.5 1970-01-01T03:00:00.024000Z - 1970-01-01T04:00:00.024000Z - 1970-01-01T05:00:00.024000Z 2025-09-25T16:18:33.661135Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:245: StateWork, received event# 272039936, Sender [2:54:2072], Recipient [1:728:2275] 2025-09-25T16:18:33.661138Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:249: StateWork, processing event TEvNodeBroker::TEvListNodes 2025-09-25T16:18:33.661142Z node 1 :NODE_BROKER TRACE: node_broker.cpp:423: Send TEvNodesInfo for epoch #4.5 1970-01-01T03:00:00.024000Z - 1970-01-01T04:00:00.024000Z - 1970-01-01T05:00:00.024000Z 2025-09-25T16:18:33.661154Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:245: StateWork, received event# 272039936, Sender [3:83:2072], Recipient [1:722:2269] 2025-09-25T16:18:33.661156Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:249: StateWork, processing event TEvNodeBroker::TEvListNodes 2025-09-25T16:18:33.661160Z node 1 :NODE_BROKER TRACE: node_broker.cpp:423: Send TEvNodesInfo for epoch #4.5 1970-01-01T03:00:00.024000Z - 1970-01-01T04:00:00.024000Z - 1970-01-01T05:00:00.024000Z 2025-09-25T16:18:33.661180Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:245: StateWork, received event# 272039936, Sender [4:112:2072], Recipient [1:723:2270] 2025-09-25T16:18:33.661212Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:249: StateWork, processing event TEvNodeBroker::TEvListNodes 2025-09-25T16:18:33.661219Z node 1 :NODE_BROKER TRACE: node_broker.cpp:423: Send TEvNodesInfo for epoch #4.5 1970-01-01T03:00:00.024000Z - 1970-01-01T04:00:00.024000Z - 1970-01-01T05:00:00.024000Z 2025-09-25T16:18:33.661252Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:245: StateWork, received event# 272039936, Sender [5:141:2072], Recipient [1:724:2271] 2025-09-25T16:18:33.661256Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:249: StateWork, processing event TEvNodeBroker::TEvListNodes 2025-09-25T16:18:33.661265Z node 1 :NODE_BROKER TRACE: node_broker.cpp:423: Send TEvNodesInfo for epoch #4.5 1970-01-01T03:00:00.024000Z - 1970-01-01T04:00:00.024000Z - 1970-01-01T05:00:00.024000Z 2025-09-25T16:18:33.661387Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:245: StateWork, received event# 269877761, Sender [1:733:2280], Recipient [1:682:2243]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-09-25T16:18:33.661405Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:245: StateWork, received event# 272039936, Sender [1:627:2216], Recipient [1:682:2243]: NKikimr::NNodeBroker::TEvNodeBroker::TEvListNodes { } 2025-09-25T16:18:33.661410Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:249: StateWork, processing event TEvNodeBroker::TEvListNodes 2025-09-25T16:18:33.661416Z node 1 :NODE_BROKER TRACE: node_broker.cpp:423: Send TEvNodesInfo for epoch #4.5 1970-01-01T03:00:00.024000Z - 1970-01-01T04:00:00.024000Z - 1970-01-01T05:00:00.024000Z 2025-09-25T16:18:33.661480Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:245: StateWork, received event# 269877761, Sender [1:735:2282], Recipient [1:682:2243]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-09-25T16:18:33.661498Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:245: StateWork, received event# 272039936, Sender [1:627:2216], Recipient [1:682:2243]: NKikimr::NNodeBroker::TEvNodeBroker::TEvListNodes { } 2025-09-25T16:18:33.661503Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:249: StateWork, processing event TEvNodeBroker::TEvListNodes 2025-09-25T16:18:33.661508Z node 1 :NODE_BROKER TRACE: node_broker.cpp:423: Send TEvNodesInfo for epoch #4.5 1970-01-01T03:00:00.024000Z - 1970-01-01T04:00:00.024000Z - 1970-01-01T05:00:00.024000Z 2025-09-25T16:18:33.661587Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:245: StateWork, received event# 269877761, Sender [1:737:2284], Recipient [1:682:2243]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-09-25T16:18:33.661601Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:245: StateWork, received event# 272039936, Sender [1:627:2216], Recipient [1:682:2243]: NKikimr::NNodeBroker::TEvNodeBroker::TEvListNodes { } 2025-09-25T16:18:33.661605Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:249: StateWork, processing event TEvNodeBroker::TEvListNodes 2025-09-25T16:18:33.661612Z node 1 :NODE_BROKER TRACE: node_broker.cpp:423: Send TEvNodesInfo for epoch #4.5 1970-01-01T03:00:00.024000Z - 1970-01-01T04:00:00.024000Z - 1970-01-01T05:00:00.024000Z 2025-09-25T16:18:33.661661Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:245: StateWork, received event# 269877761, Sender [1:739:2286], Recipient [1:682:2243]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-09-25T16:18:33.661680Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:245: StateWork, received event# 272039936, Sender [1:627:2216], Recipient [1:682:2243]: NKikimr::NNodeBroker::TEvNodeBroker::TEvListNodes { CachedVersion: 5 } 2025-09-25T16:18:33.661685Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:249: StateWork, processing event TEvNodeBroker::TEvListNodes 2025-09-25T16:18:33.661691Z node 1 :NODE_BROKER TRACE: node_broker.cpp:423: Send TEvNodesInfo for epoch #4.5 1970-01-01T03:00:00.024000Z - 1970-01-01T04:00:00.024000Z - 1970-01-01T05:00:00.024000Z 2025-09-25T16:18:33.661761Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:245: StateWork, received event# 269877761, Sender [1:741:2288], Recipient [1:682:2243]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-09-25T16:18:33.661785Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:245: StateWork, received event# 272039950, Sender [1:627:2216], Recipient [1:682:2243]: NKikimr::NNodeBroker::TEvNodeBroker::TEvSubscribeNodesRequest { CachedVersion: 5 SeqNo: 2 } 2025-09-25T16:18:33.661792Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:257: StateWork, processing event TEvNodeBroker::TEvSubscribeNodesRequest 2025-09-25T16:18:33.661801Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:747: New subscriber [1:627:2216], seqNo: 2, version: 5, server pipe id: [1:741:2288] 2025-09-25T16:18:33.661812Z node 1 :NODE_BROKER TRACE: node_broker.cpp:730: Send TEvUpdateNodes v5 -> v5 to [1:627:2216] 2025-09-25T16:18:33.661882Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:245: StateWork, received event# 269877764, Sender [1:741:2288], Recipient [1:682:2243]: NKikimr::TEvTabletPipe::TEvServerDisconnected 2025-09-25T16:18:33.661898Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:768: Unsubscribed [1:627:2216], seqNo: 2, server pipe id: [1:741:2288] 2025-09-25T16:18:33.661928Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:245: StateWork, received event# 269877761, Sender [1:743:2290], Recipient [1:682:2243]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-09-25T16:18:33.661950Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:245: StateWork, received event# 272039937, Sender [1:627:2216], Recipient [1:682:2243]: NKikimr::NNodeBroker::TEvNodeBroker::TEvResolveNode { NodeId: 1024 } 2025-09-25T16:18:33.661955Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:250: StateWork, processing event TEvNodeBroker::TEvResolveNode 2025-09-25T16:18:33.661974Z node 1 :NODE_BROKER TRACE: node_broker.cpp:1485: Send TEvResolvedNode: NKikimr::NNodeBroker::TEvNodeBroker::TEvResolvedNode { Status { Code: WRONG_REQUEST Reason: "Unknown node" } } >> TNodeBrokerTest::ExtendLeasePipelining [GOOD] >> YdbYqlClient::QueryLimits [GOOD] >> YdbYqlClient::QueryStats >> TNodeBrokerTest::ListNodesEpochDeltasPersistance [GOOD] >> TNodeBrokerTest::UpdateEpochPipelining [GOOD] >> YdbTableBulkUpsert::DecimalPK [GOOD] >> TNodeBrokerTest::NodesMigration1001Nodes [GOOD] |81.4%| [TA] {RESULT} $(B)/ydb/core/tx/datashard/ut_minstep/test-results/unittest/{meta.json ... results_accumulator.log} |81.4%| [LD] {RESULT} $(B)/ydb/core/tablet/ut/ydb-core-tablet-ut ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/mind/ut/unittest >> TNodeBrokerTest::ExtendLeasePipelining [GOOD] Test command err: 2025-09-25T16:18:32.734776Z node 8 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:636: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 8 Deadline: 18446744073709.551615s } 2025-09-25T16:18:32.738298Z node 5 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:636: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 5 Deadline: 18446744073709.551615s } 2025-09-25T16:18:32.738383Z node 6 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:636: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 6 Deadline: 18446744073709.551615s } 2025-09-25T16:18:32.738416Z node 7 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:636: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 7 Deadline: 18446744073709.551615s } 2025-09-25T16:18:32.738462Z node 8 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:607: Handle NActors::TEvInterconnect::TEvListNodes 2025-09-25T16:18:32.742616Z node 2 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:636: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 2 Deadline: 18446744073709.551615s } 2025-09-25T16:18:32.742666Z node 3 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:636: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 3 Deadline: 18446744073709.551615s } 2025-09-25T16:18:32.742720Z node 4 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:636: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 4 Deadline: 18446744073709.551615s } 2025-09-25T16:18:32.742758Z node 6 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:607: Handle NActors::TEvInterconnect::TEvListNodes 2025-09-25T16:18:32.742867Z node 7 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:607: Handle NActors::TEvInterconnect::TEvListNodes 2025-09-25T16:18:32.742945Z node 8 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:607: Handle NActors::TEvInterconnect::TEvListNodes 2025-09-25T16:18:32.742982Z node 1 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:636: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 1 Deadline: 18446744073709.551615s } 2025-09-25T16:18:32.747150Z node 2 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:607: Handle NActors::TEvInterconnect::TEvListNodes 2025-09-25T16:18:32.747268Z node 3 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:607: Handle NActors::TEvInterconnect::TEvListNodes 2025-09-25T16:18:32.747348Z node 4 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:607: Handle NActors::TEvInterconnect::TEvListNodes 2025-09-25T16:18:32.747400Z node 5 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:607: Handle NActors::TEvInterconnect::TEvListNodes 2025-09-25T16:18:32.747459Z node 4 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:607: Handle NActors::TEvInterconnect::TEvListNodes 2025-09-25T16:18:32.747503Z node 5 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:607: Handle NActors::TEvInterconnect::TEvListNodes 2025-09-25T16:18:32.747521Z node 6 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:607: Handle NActors::TEvInterconnect::TEvListNodes 2025-09-25T16:18:32.747553Z node 7 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:607: Handle NActors::TEvInterconnect::TEvListNodes 2025-09-25T16:18:32.747633Z node 1 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:607: Handle NActors::TEvInterconnect::TEvListNodes 2025-09-25T16:18:32.747729Z node 2 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:607: Handle NActors::TEvInterconnect::TEvListNodes 2025-09-25T16:18:32.747748Z node 3 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:607: Handle NActors::TEvInterconnect::TEvListNodes 2025-09-25T16:18:32.747844Z node 8 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:607: Handle NActors::TEvInterconnect::TEvListNodes 2025-09-25T16:18:32.747874Z node 1 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:607: Handle NActors::TEvInterconnect::TEvListNodes 2025-09-25T16:18:32.748193Z node 2 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:607: Handle NActors::TEvInterconnect::TEvListNodes 2025-09-25T16:18:32.748221Z node 3 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:607: Handle NActors::TEvInterconnect::TEvListNodes 2025-09-25T16:18:32.748248Z node 4 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:607: Handle NActors::TEvInterconnect::TEvListNodes 2025-09-25T16:18:32.748265Z node 5 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:607: Handle NActors::TEvInterconnect::TEvListNodes 2025-09-25T16:18:32.748284Z node 6 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:607: Handle NActors::TEvInterconnect::TEvListNodes 2025-09-25T16:18:32.748302Z node 7 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:607: Handle NActors::TEvInterconnect::TEvListNodes 2025-09-25T16:18:32.748329Z node 8 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:636: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 1 Deadline: 18446744073709.551615s } 2025-09-25T16:18:32.748339Z node 3 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:636: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 1 Deadline: 18446744073709.551615s } 2025-09-25T16:18:32.748362Z node 4 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:636: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 1 Deadline: 18446744073709.551615s } 2025-09-25T16:18:32.748379Z node 5 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:636: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 1 Deadline: 18446744073709.551615s } 2025-09-25T16:18:32.748404Z node 6 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:636: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 1 Deadline: 18446744073709.551615s } 2025-09-25T16:18:32.748422Z node 7 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:636: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 1 Deadline: 18446744073709.551615s } 2025-09-25T16:18:32.748494Z node 1 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:607: Handle NActors::TEvInterconnect::TEvListNodes 2025-09-25T16:18:32.748538Z node 2 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:636: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 1 Deadline: 18446744073709.551615s } 2025-09-25T16:18:32.748729Z node 1 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:636: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 2 Deadline: 18446744073709.551615s } 2025-09-25T16:18:32.748971Z node 8 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:636: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 6 Deadline: 18446744073709.551615s } 2025-09-25T16:18:32.749793Z node 4 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:636: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 2 Deadline: 18446744073709.551615s } 2025-09-25T16:18:32.749829Z node 6 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:636: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 3 Deadline: 18446744073709.551615s } 2025-09-25T16:18:32.749849Z node 7 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:636: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 2 Deadline: 18446744073709.551615s } 2025-09-25T16:18:32.755037Z node 6 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:636: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 8 Deadline: 18446744073709.551615s } 2025-09-25T16:18:32.755842Z node 2 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:636: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 4 Deadline: 18446744073709.551615s } 2025-09-25T16:18:32.755889Z node 1 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:636: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 8 Deadline: 18446744073709.551615s } 2025-09-25T16:18:32.755907Z node 2 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:636: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 7 Deadline: 18446744073709.551615s } 2025-09-25T16:18:32.755917Z node 3 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:636: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 6 Deadline: 18446744073709.551615s } 2025-09-25T16:18:32.756579Z node 1 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:636: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 4 Deadline: 18446744073709.551615s } 2025-09-25T16:18:32.756708Z node 1 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:636: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 5 Deadline: 18446744073709.551615s } 2025-09-25T16:18:32.756778Z node 1 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:636: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 3 Deadline: 18446744073709.551615s } 2025-09-25T16:18:32.756804Z node 2 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:636: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 8 Deadline: 18446744073709.551615s } 2025-09-25T16:18:32.756843Z node 1 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:636: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 6 Deadline: 18446744073709.551615s } 2025-09-25T16:18:32.756938Z node 1 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:636: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 7 Deadline: 18446744073709.551615s } 2025-09-25T16:18:32.757023Z node 8 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:636: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 2 Deadline: 18446744073709.551615s } 2025-09-25T16:18:32.758811Z node 3 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:636: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 2 Deadline: 18446744073709.551615s } 2025-09-25T16:18:32.759293Z node 2 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:636: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 3 Deadline: 18446744073709.551615s } 2025-09-25T16:18:32.779105Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7729: Cannot subscribe to console configs 2025-09-25T16:18:32.779131Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded ... waiting for nameservers are connected 2025-09-25T16:18:32.785361Z node 1 :NODE_BROKER DEBUG: node_broker_impl.h:239: StateInit event type: 10060000 event: NKikimr::TEvTablet::TEvBoot 2025-09-25T16:18:32.785842Z node 1 :NODE_BROKER DEBUG: node_broker_impl.h:239: StateInit event type: 10060001 event: NKikimr::TEvTablet::TEvRestored 2025-09-25T16:18:32.785915Z node 1 :NODE_BROKER DEBUG: node_broker__init_scheme.cpp:20: TTxInitScheme Execute 2025-09-25T16:18:32.786182Z node 1 :NODE_BROKER DEBUG: node_broker_impl.h:239: StateInit event type: 1006000c event: NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-09-25T16:18:32.786867Z node 1 :NODE_BROKER DEBUG: node_broker__init_scheme.cpp:29: TTxInitScheme Complete 2025-09-25T16:18:32.786918Z node 1 :NODE_BROKER DEBUG: node_broker__load_state.cpp:19: TTxLoadState Execute 2025-09-25T16:18:32.787001Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:968: [DB] Using default config. 2025-09-25T16:18:32.787019Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:1005: [DB] Starting the first epoch: #1.1 1970-01-01T00:00:00.023000Z - 1970-01-01T01:00:00.023000Z - 1970-01-01T02:00:00.023000Z 2025-09-25T16:18:32.787025Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:1031: [DB] Loaded the first approximate epoch start: #1.1 2025-09-25T16:18:32.787045Z node 1 :NODE_BROKER DEBUG: node_broker__load_state.cpp:27: TTxLoadState Complete 2025-09-25T16:18:32.787072Z node 1 :NODE_BROKER DEBUG: node_broker__migrate_state.cpp:84: TTxMigrateState Execute 2025-09-25T16:18:32.787079Z node 1 :NODE_BROKER DEBUG: node_broker__migrate_state.cpp:52: TTxMigrateState ProcessMigrationBatch UpdateNodes left 0, NewVersionUpdateNodes left 0 2025-09-25T16:18:32.787084Z node 1 :NODE_BROKER DEBUG: node_broker__migrate_state.cpp:21: TTxMigrateState FinalizeMigration 2025-09-25T16:18:32.787090Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:1318: [DB] Update epoch in database: #1.1 1970-01-01T00:00:00.023000Z - 1970-01-01T01:00:00.023000Z - 1970-01-01T02:00:00.023000Z 2025-09-25T16:18:32.787109Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:1337: [DB] Update approx epoch start in database: #1.1 2025-09-25T16:18:32.787116Z node 1 :NODE_BROKER NOTICE: node_broker.cpp:1350: [DB] Update main nodes table to: Nodes 2025-09-25T16:18:32.820548Z node 1 :NODE_BROKER DEBUG: node_broker__migrate_state.cpp:95: TTxMigrateState Complete 2025-09-25T16:18:32.820592Z node 1 :NODE_BROKER TRACE: node_broker.cpp:456: Scheduled epoch update at 1970-01-01T01:00:00.023000Z 2025-09-25T16:18:32.820605Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:562: Preparing nodes list cache for epoch #1.1 1970-01-01T00:00:00.023000Z - 1970-01-01T01:00:00.023000Z - 1970-01-01T02:00:00.023000Z, approximate epoch start #1.1 nodes=0 expired=0 2025-09-25T16:18:32.820616Z ... er_impl.h:245: StateWork, received event# 272039936, Sender [3:83:2072], Recipient [1:585:2202] 2025-09-25T16:18:34.052349Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:249: StateWork, processing event TEvNodeBroker::TEvListNodes 2025-09-25T16:18:34.052356Z node 1 :NODE_BROKER TRACE: node_broker.cpp:423: Send TEvNodesInfo for epoch #2.3 1970-01-01T01:00:00.023000Z - 1970-01-01T02:00:00.023000Z - 1970-01-01T03:00:00.023000Z 2025-09-25T16:18:34.052404Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:245: StateWork, received event# 272039936, Sender [4:112:2072], Recipient [1:586:2203] 2025-09-25T16:18:34.052410Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:249: StateWork, processing event TEvNodeBroker::TEvListNodes 2025-09-25T16:18:34.052414Z node 1 :NODE_BROKER TRACE: node_broker.cpp:423: Send TEvNodesInfo for epoch #2.3 1970-01-01T01:00:00.023000Z - 1970-01-01T02:00:00.023000Z - 1970-01-01T03:00:00.023000Z 2025-09-25T16:18:34.052427Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:245: StateWork, received event# 272039936, Sender [5:141:2072], Recipient [1:587:2204] 2025-09-25T16:18:34.052430Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:249: StateWork, processing event TEvNodeBroker::TEvListNodes 2025-09-25T16:18:34.052433Z node 1 :NODE_BROKER TRACE: node_broker.cpp:423: Send TEvNodesInfo for epoch #2.3 1970-01-01T01:00:00.023000Z - 1970-01-01T02:00:00.023000Z - 1970-01-01T03:00:00.023000Z 2025-09-25T16:18:34.052451Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:245: StateWork, received event# 272039936, Sender [6:170:2072], Recipient [1:588:2205] 2025-09-25T16:18:34.052453Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:249: StateWork, processing event TEvNodeBroker::TEvListNodes 2025-09-25T16:18:34.052457Z node 1 :NODE_BROKER TRACE: node_broker.cpp:423: Send TEvNodesInfo for epoch #2.3 1970-01-01T01:00:00.023000Z - 1970-01-01T02:00:00.023000Z - 1970-01-01T03:00:00.023000Z 2025-09-25T16:18:34.052485Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:245: StateWork, received event# 272039936, Sender [7:199:2072], Recipient [1:589:2206] 2025-09-25T16:18:34.052487Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:249: StateWork, processing event TEvNodeBroker::TEvListNodes 2025-09-25T16:18:34.052491Z node 1 :NODE_BROKER TRACE: node_broker.cpp:423: Send TEvNodesInfo for epoch #2.3 1970-01-01T01:00:00.023000Z - 1970-01-01T02:00:00.023000Z - 1970-01-01T03:00:00.023000Z 2025-09-25T16:18:34.083424Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:245: StateWork, received event# 272039936, Sender [1:25:2072], Recipient [1:556:2186]: NKikimr::NNodeBroker::TEvNodeBroker::TEvListNodes { MinEpoch: 3 } 2025-09-25T16:18:34.083451Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:249: StateWork, processing event TEvNodeBroker::TEvListNodes 2025-09-25T16:18:34.083457Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:375: Delaying list nodes request for epoch #3 2025-09-25T16:18:34.083550Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:245: StateWork, received event# 272039936, Sender [6:170:2072], Recipient [1:588:2205] 2025-09-25T16:18:34.083554Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:249: StateWork, processing event TEvNodeBroker::TEvListNodes 2025-09-25T16:18:34.083558Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:375: Delaying list nodes request for epoch #3 2025-09-25T16:18:34.083572Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:245: StateWork, received event# 272039936, Sender [7:199:2072], Recipient [1:589:2206] 2025-09-25T16:18:34.083575Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:249: StateWork, processing event TEvNodeBroker::TEvListNodes 2025-09-25T16:18:34.083578Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:375: Delaying list nodes request for epoch #3 2025-09-25T16:18:34.083585Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:245: StateWork, received event# 272039936, Sender [8:228:2072], Recipient [1:591:2208] 2025-09-25T16:18:34.083587Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:249: StateWork, processing event TEvNodeBroker::TEvListNodes 2025-09-25T16:18:34.083590Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:375: Delaying list nodes request for epoch #3 2025-09-25T16:18:34.083596Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:245: StateWork, received event# 272039936, Sender [3:83:2072], Recipient [1:585:2202] 2025-09-25T16:18:34.083598Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:249: StateWork, processing event TEvNodeBroker::TEvListNodes 2025-09-25T16:18:34.083601Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:375: Delaying list nodes request for epoch #3 2025-09-25T16:18:34.083608Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:245: StateWork, received event# 272039936, Sender [4:112:2072], Recipient [1:586:2203] 2025-09-25T16:18:34.083610Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:249: StateWork, processing event TEvNodeBroker::TEvListNodes 2025-09-25T16:18:34.083613Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:375: Delaying list nodes request for epoch #3 2025-09-25T16:18:34.083618Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:245: StateWork, received event# 272039936, Sender [5:141:2072], Recipient [1:587:2204] 2025-09-25T16:18:34.083620Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:249: StateWork, processing event TEvNodeBroker::TEvListNodes 2025-09-25T16:18:34.083622Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:375: Delaying list nodes request for epoch #3 2025-09-25T16:18:34.083628Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:245: StateWork, received event# 272039936, Sender [2:54:2072], Recipient [1:590:2207] 2025-09-25T16:18:34.083630Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:249: StateWork, processing event TEvNodeBroker::TEvListNodes 2025-09-25T16:18:34.083633Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:375: Delaying list nodes request for epoch #3 ... blocking NKikimr::TEvTablet::TEvCommit from FLAT_EXECUTOR to TABLET_ACTOR cookie 1 ... unblocking NKikimr::TEvTablet::TEvCommit from FLAT_EXECUTOR to TABLET_ACTOR 2025-09-25T16:18:34.094550Z node 1 :NODE_BROKER DEBUG: node_broker__extend_lease.cpp:78: TTxExtendLease Complete 2025-09-25T16:18:34.094623Z node 1 :NODE_BROKER TRACE: node_broker__extend_lease.cpp:82: TTxExtendLease reply with: NKikimr::NNodeBroker::TEvNodeBroker::TEvExtendLeaseResponse { Status { Code: OK } NodeId: 1024 Expire: 10800023000 Epoch { Id: 2 Version: 4 Start: 3600023000 End: 7200023000 NextEnd: 10800023000 } } 2025-09-25T16:18:34.094651Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:314: [Committed] Extended lease of #1024.v4 host1:1001 up to Thu, 01 Jan 1970 03:00:00 UTC (lease 2) 2025-09-25T16:18:34.094659Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:552: [Committed] Update current epoch version from 3 to 4 2025-09-25T16:18:34.094667Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:630: Add node #1024.v4 host1:1001 to epoch cache 2025-09-25T16:18:34.094695Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:659: Add node #1024.v4 to update nodes log 2025-09-25T16:18:34.094714Z node 1 :NODE_BROKER DEBUG: node_broker__update_epoch.cpp:31: TTxUpdateEpoch Complete 2025-09-25T16:18:34.094724Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:542: [Committed] Move to new epoch #3.5 1970-01-01T02:00:00.023000Z - 1970-01-01T03:00:00.023000Z - 1970-01-01T04:00:00.023000Z, approximate epoch start #3.5 2025-09-25T16:18:34.094741Z node 1 :NODE_BROKER TRACE: node_broker.cpp:456: Scheduled epoch update at 1970-01-01T03:00:00.023000Z 2025-09-25T16:18:34.094749Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:562: Preparing nodes list cache for epoch #3.5 1970-01-01T02:00:00.023000Z - 1970-01-01T03:00:00.023000Z - 1970-01-01T04:00:00.023000Z, approximate epoch start #3.5 nodes=1 expired=0 2025-09-25T16:18:34.094761Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:603: Preparing update nodes log for epoch ##3.5 1970-01-01T02:00:00.023000Z - 1970-01-01T03:00:00.023000Z - 1970-01-01T04:00:00.023000Z nodes=1 expired=0 removed=0 2025-09-25T16:18:34.094765Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:659: Add node #1024.v4 to update nodes log 2025-09-25T16:18:34.094779Z node 1 :NODE_BROKER TRACE: node_broker.cpp:423: Send TEvNodesInfo for epoch #3.5 1970-01-01T02:00:00.023000Z - 1970-01-01T03:00:00.023000Z - 1970-01-01T04:00:00.023000Z 2025-09-25T16:18:34.094789Z node 1 :NODE_BROKER TRACE: node_broker.cpp:423: Send TEvNodesInfo for epoch #3.5 1970-01-01T02:00:00.023000Z - 1970-01-01T03:00:00.023000Z - 1970-01-01T04:00:00.023000Z 2025-09-25T16:18:34.094797Z node 1 :NODE_BROKER TRACE: node_broker.cpp:423: Send TEvNodesInfo for epoch #3.5 1970-01-01T02:00:00.023000Z - 1970-01-01T03:00:00.023000Z - 1970-01-01T04:00:00.023000Z 2025-09-25T16:18:34.094824Z node 1 :NODE_BROKER TRACE: node_broker.cpp:423: Send TEvNodesInfo for epoch #3.5 1970-01-01T02:00:00.023000Z - 1970-01-01T03:00:00.023000Z - 1970-01-01T04:00:00.023000Z 2025-09-25T16:18:34.094832Z node 1 :NODE_BROKER TRACE: node_broker.cpp:423: Send TEvNodesInfo for epoch #3.5 1970-01-01T02:00:00.023000Z - 1970-01-01T03:00:00.023000Z - 1970-01-01T04:00:00.023000Z 2025-09-25T16:18:34.094841Z node 1 :NODE_BROKER TRACE: node_broker.cpp:423: Send TEvNodesInfo for epoch #3.5 1970-01-01T02:00:00.023000Z - 1970-01-01T03:00:00.023000Z - 1970-01-01T04:00:00.023000Z 2025-09-25T16:18:34.094848Z node 1 :NODE_BROKER TRACE: node_broker.cpp:423: Send TEvNodesInfo for epoch #3.5 1970-01-01T02:00:00.023000Z - 1970-01-01T03:00:00.023000Z - 1970-01-01T04:00:00.023000Z 2025-09-25T16:18:34.094855Z node 1 :NODE_BROKER TRACE: node_broker.cpp:423: Send TEvNodesInfo for epoch #3.5 1970-01-01T02:00:00.023000Z - 1970-01-01T03:00:00.023000Z - 1970-01-01T04:00:00.023000Z 2025-09-25T16:18:34.136292Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:245: StateWork, received event# 269877761, Sender [1:693:2255], Recipient [1:556:2186]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-09-25T16:18:34.136355Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:245: StateWork, received event# 272039936, Sender [1:623:2214], Recipient [1:556:2186]: NKikimr::NNodeBroker::TEvNodeBroker::TEvListNodes { } 2025-09-25T16:18:34.136362Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:249: StateWork, processing event TEvNodeBroker::TEvListNodes 2025-09-25T16:18:34.136377Z node 1 :NODE_BROKER TRACE: node_broker.cpp:423: Send TEvNodesInfo for epoch #3.5 1970-01-01T02:00:00.023000Z - 1970-01-01T03:00:00.023000Z - 1970-01-01T04:00:00.023000Z 2025-09-25T16:18:34.136472Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:245: StateWork, received event# 269877761, Sender [1:695:2257], Recipient [1:556:2186]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-09-25T16:18:34.136491Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:245: StateWork, received event# 272039936, Sender [1:623:2214], Recipient [1:556:2186]: NKikimr::NNodeBroker::TEvNodeBroker::TEvListNodes { } 2025-09-25T16:18:34.136496Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:249: StateWork, processing event TEvNodeBroker::TEvListNodes 2025-09-25T16:18:34.136503Z node 1 :NODE_BROKER TRACE: node_broker.cpp:423: Send TEvNodesInfo for epoch #3.5 1970-01-01T02:00:00.023000Z - 1970-01-01T03:00:00.023000Z - 1970-01-01T04:00:00.023000Z 2025-09-25T16:18:34.136583Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:245: StateWork, received event# 269877761, Sender [1:697:2259], Recipient [1:556:2186]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-09-25T16:18:34.136610Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:245: StateWork, received event# 272039937, Sender [1:623:2214], Recipient [1:556:2186]: NKikimr::NNodeBroker::TEvNodeBroker::TEvResolveNode { NodeId: 1024 } 2025-09-25T16:18:34.136616Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:250: StateWork, processing event TEvNodeBroker::TEvResolveNode 2025-09-25T16:18:34.136657Z node 1 :NODE_BROKER TRACE: node_broker.cpp:1485: Send TEvResolvedNode: NKikimr::NNodeBroker::TEvNodeBroker::TEvResolvedNode { Status { Code: OK } Node { NodeId: 1024 Host: "host1" Port: 1001 ResolveHost: "host1.yandex.net" Address: "1.2.3.4" Location { DataCenter: "1" Module: "2" Rack: "3" Unit: "4" } Expire: 10800023000 Name: "slot-0" } } >> TNodeBrokerTest::NodesMigrationExpiredChanged [GOOD] >> THiveTest::TestFollowersCrossDC_MovingLeader [GOOD] >> THiveTest::TestFollowersCrossDC_KillingHiveAndFollower |81.4%| [LD] {RESULT} $(B)/ydb/core/tx/tx_allocator_client/ut/ydb-core-tx-tx_allocator_client-ut |81.4%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/driver_lib/run/ut/ydb-core-driver_lib-run-ut |81.4%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/driver_lib/run/ut/ydb-core-driver_lib-run-ut >> TNodeBrokerTest::SubscribeToNodes >> TNodeBrokerTest::ExtendLeaseRestartRace [GOOD] >> TTxAllocatorClientTest::Boot >> TTxAllocatorClientTest::AllocateOverTheEdge >> TNodeBrokerTest::MinDynamicNodeIdShifted >> TTenantPoolTests::TestForcedSensorLabelsForStaticConfig >> TCacheTest::MigrationDeletedPathNavigate [GOOD] >> TTenantPoolTests::TestSensorsConfigForStaticSlot >> YdbTableBulkUpsert::RetryOperationSync [GOOD] >> THiveTest::TestHiveBalancerDifferentResources [GOOD] >> test.py::test[aggregate-group_by_session_distinct_compact--Results] [GOOD] >> YdbLogStore::LogStore [GOOD] >> TNodeBrokerTest::NodeNameWithDifferentTenants >> TNodeBrokerTest::NodesMigration1000Nodes >> TNodeBrokerTest::TestListNodesEpochDeltas >> THiveTest::TestDeleteOwnerTabletsMany [GOOD] >> THiveTest::TestFollowersCrossDC_KillingHiveAndFollower [GOOD] >> THiveTest::TestFollowerCompatability1 >> TTxAllocatorClientTest::ZeroRange >> THiveTest::TestCheckSubHiveMigrationManyTablets [GOOD] >> TTxAllocatorClientTest::AllocateOverTheEdge [GOOD] >> TTxAllocatorClientTest::Boot [GOOD] >> YdbTableBulkUpsert::RetryOperation >> YdbLogStore::LogStoreNegative >> THiveTest::TestDeleteTabletWithFollowers >> THiveTest::TestHiveBalancerDifferentResources2 >> test.py::test[aggregate-library_error_in_aggregation_fail--Results] >> THiveTest::TestCreateSubHiveCreateManyTablets >> TTenantPoolTests::TestForcedSensorLabelsForStaticConfig [GOOD] >> TTenantPoolTests::TestSensorsConfigForStaticSlot [GOOD] >> THiveTest::TestDeleteTabletWithFollowers [GOOD] >> YdbLogStore::LogStoreNegative [GOOD] >> YdbTableBulkUpsert::RetryOperation [GOOD] >> THiveTest::TestCreateTabletBeforeLocal >> YdbLogStore::Dirs ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/mind/ut/unittest >> TNodeBrokerTest::UpdateEpochPipelining [GOOD] Test command err: 2025-09-25T16:18:32.809403Z node 8 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:636: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 8 Deadline: 18446744073709.551615s } 2025-09-25T16:18:32.812313Z node 5 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:636: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 5 Deadline: 18446744073709.551615s } 2025-09-25T16:18:32.812379Z node 6 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:636: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 6 Deadline: 18446744073709.551615s } 2025-09-25T16:18:32.812402Z node 7 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:636: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 7 Deadline: 18446744073709.551615s } 2025-09-25T16:18:32.812443Z node 8 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:607: Handle NActors::TEvInterconnect::TEvListNodes 2025-09-25T16:18:32.816603Z node 2 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:636: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 2 Deadline: 18446744073709.551615s } 2025-09-25T16:18:32.816664Z node 3 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:636: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 3 Deadline: 18446744073709.551615s } 2025-09-25T16:18:32.816724Z node 4 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:636: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 4 Deadline: 18446744073709.551615s } 2025-09-25T16:18:32.816777Z node 6 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:607: Handle NActors::TEvInterconnect::TEvListNodes 2025-09-25T16:18:32.816904Z node 7 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:607: Handle NActors::TEvInterconnect::TEvListNodes 2025-09-25T16:18:32.816983Z node 8 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:607: Handle NActors::TEvInterconnect::TEvListNodes 2025-09-25T16:18:32.817019Z node 1 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:636: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 1 Deadline: 18446744073709.551615s } 2025-09-25T16:18:32.820640Z node 2 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:607: Handle NActors::TEvInterconnect::TEvListNodes 2025-09-25T16:18:32.820741Z node 3 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:607: Handle NActors::TEvInterconnect::TEvListNodes 2025-09-25T16:18:32.820784Z node 4 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:607: Handle NActors::TEvInterconnect::TEvListNodes 2025-09-25T16:18:32.820839Z node 5 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:607: Handle NActors::TEvInterconnect::TEvListNodes 2025-09-25T16:18:32.820896Z node 4 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:607: Handle NActors::TEvInterconnect::TEvListNodes 2025-09-25T16:18:32.820926Z node 5 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:607: Handle NActors::TEvInterconnect::TEvListNodes 2025-09-25T16:18:32.820941Z node 6 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:607: Handle NActors::TEvInterconnect::TEvListNodes 2025-09-25T16:18:32.820963Z node 7 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:607: Handle NActors::TEvInterconnect::TEvListNodes 2025-09-25T16:18:32.821025Z node 1 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:607: Handle NActors::TEvInterconnect::TEvListNodes 2025-09-25T16:18:32.821099Z node 2 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:607: Handle NActors::TEvInterconnect::TEvListNodes 2025-09-25T16:18:32.821116Z node 3 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:607: Handle NActors::TEvInterconnect::TEvListNodes 2025-09-25T16:18:32.821178Z node 8 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:607: Handle NActors::TEvInterconnect::TEvListNodes 2025-09-25T16:18:32.821194Z node 1 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:607: Handle NActors::TEvInterconnect::TEvListNodes 2025-09-25T16:18:32.821456Z node 2 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:607: Handle NActors::TEvInterconnect::TEvListNodes 2025-09-25T16:18:32.821485Z node 3 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:607: Handle NActors::TEvInterconnect::TEvListNodes 2025-09-25T16:18:32.821513Z node 4 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:607: Handle NActors::TEvInterconnect::TEvListNodes 2025-09-25T16:18:32.821531Z node 5 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:607: Handle NActors::TEvInterconnect::TEvListNodes 2025-09-25T16:18:32.821548Z node 6 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:607: Handle NActors::TEvInterconnect::TEvListNodes 2025-09-25T16:18:32.821564Z node 7 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:607: Handle NActors::TEvInterconnect::TEvListNodes 2025-09-25T16:18:32.821591Z node 8 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:636: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 1 Deadline: 18446744073709.551615s } 2025-09-25T16:18:32.821605Z node 3 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:636: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 1 Deadline: 18446744073709.551615s } 2025-09-25T16:18:32.821638Z node 4 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:636: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 1 Deadline: 18446744073709.551615s } 2025-09-25T16:18:32.821664Z node 5 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:636: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 1 Deadline: 18446744073709.551615s } 2025-09-25T16:18:32.821698Z node 6 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:636: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 1 Deadline: 18446744073709.551615s } 2025-09-25T16:18:32.821719Z node 7 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:636: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 1 Deadline: 18446744073709.551615s } 2025-09-25T16:18:32.821796Z node 1 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:607: Handle NActors::TEvInterconnect::TEvListNodes 2025-09-25T16:18:32.821859Z node 2 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:636: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 1 Deadline: 18446744073709.551615s } 2025-09-25T16:18:32.822053Z node 1 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:636: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 8 Deadline: 18446744073709.551615s } 2025-09-25T16:18:32.822281Z node 8 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:636: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 3 Deadline: 18446744073709.551615s } 2025-09-25T16:18:32.824419Z node 3 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:636: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 2 Deadline: 18446744073709.551615s } 2025-09-25T16:18:32.824489Z node 6 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:636: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 4 Deadline: 18446744073709.551615s } 2025-09-25T16:18:32.830388Z node 3 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:636: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 8 Deadline: 18446744073709.551615s } 2025-09-25T16:18:32.830477Z node 4 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:636: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 6 Deadline: 18446744073709.551615s } 2025-09-25T16:18:32.831199Z node 2 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:636: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 3 Deadline: 18446744073709.551615s } 2025-09-25T16:18:32.832478Z node 1 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:636: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 3 Deadline: 18446744073709.551615s } 2025-09-25T16:18:32.832578Z node 1 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:636: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 7 Deadline: 18446744073709.551615s } 2025-09-25T16:18:32.832622Z node 1 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:636: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 4 Deadline: 18446744073709.551615s } 2025-09-25T16:18:32.832747Z node 1 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:636: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 5 Deadline: 18446744073709.551615s } 2025-09-25T16:18:32.832870Z node 1 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:636: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 6 Deadline: 18446744073709.551615s } 2025-09-25T16:18:32.832916Z node 1 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:636: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 2 Deadline: 18446744073709.551615s } 2025-09-25T16:18:32.834177Z node 2 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:636: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 7 Deadline: 18446744073709.551615s } 2025-09-25T16:18:32.834392Z node 3 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:636: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 6 Deadline: 18446744073709.551615s } 2025-09-25T16:18:32.834509Z node 7 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:636: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 2 Deadline: 18446744073709.551615s } 2025-09-25T16:18:32.834850Z node 6 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:636: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 3 Deadline: 18446744073709.551615s } 2025-09-25T16:18:32.856959Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7729: Cannot subscribe to console configs 2025-09-25T16:18:32.856986Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded ... waiting for nameservers are connected 2025-09-25T16:18:32.864810Z node 1 :NODE_BROKER DEBUG: node_broker_impl.h:239: StateInit event type: 10060000 event: NKikimr::TEvTablet::TEvBoot 2025-09-25T16:18:32.865517Z node 1 :NODE_BROKER DEBUG: node_broker_impl.h:239: StateInit event type: 10060001 event: NKikimr::TEvTablet::TEvRestored 2025-09-25T16:18:32.865618Z node 1 :NODE_BROKER DEBUG: node_broker__init_scheme.cpp:20: TTxInitScheme Execute 2025-09-25T16:18:32.865901Z node 1 :NODE_BROKER DEBUG: node_broker_impl.h:239: StateInit event type: 1006000c event: NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-09-25T16:18:32.866891Z node 1 :NODE_BROKER DEBUG: node_broker__init_scheme.cpp:29: TTxInitScheme Complete 2025-09-25T16:18:32.867081Z node 1 :NODE_BROKER DEBUG: node_broker__load_state.cpp:19: TTxLoadState Execute 2025-09-25T16:18:32.867152Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:968: [DB] Using default config. 2025-09-25T16:18:32.867170Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:1005: [DB] Starting the first epoch: #1.1 1970-01-01T00:00:00.026000Z - 1970-01-01T01:00:00.026000Z - 1970-01-01T02:00:00.026000Z 2025-09-25T16:18:32.867175Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:1031: [DB] Loaded the first approximate epoch start: #1.1 2025-09-25T16:18:32.867197Z node 1 :NODE_BROKER DEBUG: node_broker__load_state.cpp:27: TTxLoadState Complete 2025-09-25T16:18:32.867241Z node 1 :NODE_BROKER DEBUG: node_broker__migrate_state.cpp:84: TTxMigrateState Execute 2025-09-25T16:18:32.867247Z node 1 :NODE_BROKER DEBUG: node_broker__migrate_state.cpp:52: TTxMigrateState ProcessMigrationBatch UpdateNodes left 0, NewVersionUpdateNodes left 0 2025-09-25T16:18:32.867252Z node 1 :NODE_BROKER DEBUG: node_broker__migrate_state.cpp:21: TTxMigrateState FinalizeMigration 2025-09-25T16:18:32.867258Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:1318: [DB] Update epoch in database: #1.1 1970-01-01T00:00:00.026000Z - 1970-01-01T01:00:00.026000Z - 1970-01-01T02:00:00.026000Z 2025-09-25T16:18:32.867280Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:1337: [DB] Update approx epoch start in database: #1.1 2025-09-25T16:18:32.867286Z node 1 :NODE_BROKER NOTICE: node_broker.cpp:1350: [DB] Update main nodes table to: Nodes 2025-09-25T16:18:32.910987Z node 1 :NODE_BROKER DEBUG: node_broker__migrate_state.cpp:95: TTxMigrateState Complete 2025-09-25T16:18:32.911039Z node 1 :NODE_BROKER TRACE: node_broker.cpp:456: Scheduled epoch update at 1970-01-01T01:00:00.026000Z 2025-09-25T16:18:32.911051Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:562: Preparing nodes list cache for epoch #1.1 1970-01-01T00:00:00.026000Z - 1970-01-01T01:00:00.026000Z - 1970-01-01T02:00:00.026000Z, approximate epoch start #1.1 nodes=0 expired=0 2025-09-25T16:18:32.911064Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:603: Preparing update nodes log for epoch ##1.1 1970-01-01T00:00:00.026000Z - 1970-01-01T01:00:00.026000Z - 1970-01-01T02:00:00.026000Z nodes=0 expired=0 removed=0 2025-09-25T16:18:32.911350Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:245: StateWork, received event# 269877761, Sender [1:587:2206], Rec ... reams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 1 PathOwnerId: 72057594046678944 }, by path# { Subscriber: { Subscriber: [1:725:2272] DomainOwnerId: 72057594046678944 Type: 2 SyncCookie: 0 } Filled: 0 Status: undefined Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, by pathId# nullptr 2025-09-25T16:18:34.534303Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:1888: FillEntry for TNavigate: self# [1:23:2070], cacheItem# { Subscriber: { Subscriber: [1:725:2272] DomainOwnerId: 72057594046678944 Type: 2 SyncCookie: 0 } Filled: 1 Status: StatusSuccess Kind: 2 TableKind: 0 Created: 1 CreateStep: 0 PathId: [OwnerId: 72057594046678944, LocalPathId: 1] DomainId: [OwnerId: 72057594046678944, LocalPathId: 1] IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1 TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: false ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2025-09-25T16:18:34.534374Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:267: Send result: self# [1:732:2273], recipient# [1:724:2188], result# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1 TableId: [72057594046678944:1:0] RequestType: ByPath Operation: OpPath RedirectRequired: false ShowPrivatePath: false SyncVersion: false Status: Ok Kind: KindPath DomainInfo { DomainKey: [OwnerId: 72057594046678944, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046678944, LocalPathId: 1] Params { Version: 0 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } }] } 2025-09-25T16:18:34.534396Z node 1 :NODE_BROKER TRACE: node_broker.cpp:1561: Handle TEvTxProxySchemeCache::TEvNavigateKeySetResult: response# { Path: dc-1 TableId: [72057594046678944:1:0] RequestType: ByPath Operation: OpPath RedirectRequired: false ShowPrivatePath: false SyncVersion: false Status: Ok Kind: KindPath DomainInfo { DomainKey: [OwnerId: 72057594046678944, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046678944, LocalPathId: 1] Params { Version: 0 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } } 2025-09-25T16:18:34.534420Z node 1 :NODE_BROKER TRACE: node_broker.cpp:1587: Finished resolving tenant: request# Host: "host2" Port: 1001 ResolveHost: "host2.yandex.net" Address: "1.2.3.4" Location { DataCenter: "1" Module: "2" Rack: "3" Unit: "4" } FixedNodeId: false Path: "dc-1": scope id# <72057594046678944:1>: serviced subdomain# 72057594046678944:1 2025-09-25T16:18:34.534440Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:245: StateWork, received event# 2146435073, Sender [1:724:2188], Recipient [1:556:2188]: NKikimr::NNodeBroker::TNodeBroker::TEvPrivate::TEvResolvedRegistrationRequest 2025-09-25T16:18:34.534446Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:260: StateWork, processing event TEvPrivate::TEvResolvedRegistrationRequest 2025-09-25T16:18:34.534466Z node 1 :NODE_BROKER DEBUG: node_broker__register_node.cpp:78: TTxRegisterNode Execute 2025-09-25T16:18:34.534471Z node 1 :NODE_BROKER DEBUG: node_broker__register_node.cpp:82: Registration request from host2:1001 (not fixed) tenant: dc-1 2025-09-25T16:18:34.534506Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:863: [DB] Adding node #1024.v6 host2:1001 to database state=Active resolvehost=host2.yandex.net address=1.2.3.4 dc=1 location=DC=1/M=2/R=3/U=4/ lease=1 expire=Thu, 01 Jan 1970 05:00:00 UTC servicedsubdomain=72057594046678944:1 slotindex=0 authorizedbycertificate=false 2025-09-25T16:18:34.534574Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:264: [Dirty] Register new active node #1024.v6 host2:1001 2025-09-25T16:18:34.534585Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:552: [Dirty] Update current epoch version from 5 to 6 2025-09-25T16:18:34.534590Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:1363: [DB] Update epoch version in database version=6 2025-09-25T16:18:34.534726Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:245: StateWork, received event# 269877761, Sender [1:734:2275], Recipient [1:556:2188]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-09-25T16:18:34.534756Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:245: StateWork, received event# 272039936, Sender [1:625:2218], Recipient [1:556:2188]: NKikimr::NNodeBroker::TEvNodeBroker::TEvListNodes { } 2025-09-25T16:18:34.534762Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:249: StateWork, processing event TEvNodeBroker::TEvListNodes 2025-09-25T16:18:34.534775Z node 1 :NODE_BROKER TRACE: node_broker.cpp:423: Send TEvNodesInfo for epoch #3.4 1970-01-01T02:00:00.026000Z - 1970-01-01T03:00:00.026000Z - 1970-01-01T04:00:00.026000Z 2025-09-25T16:18:34.534865Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:245: StateWork, received event# 269877761, Sender [1:736:2277], Recipient [1:556:2188]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-09-25T16:18:34.534891Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:245: StateWork, received event# 272039937, Sender [1:625:2218], Recipient [1:556:2188]: NKikimr::NNodeBroker::TEvNodeBroker::TEvResolveNode { NodeId: 1024 } 2025-09-25T16:18:34.534897Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:250: StateWork, processing event TEvNodeBroker::TEvResolveNode 2025-09-25T16:18:34.534912Z node 1 :NODE_BROKER TRACE: node_broker.cpp:1485: Send TEvResolvedNode: NKikimr::NNodeBroker::TEvNodeBroker::TEvResolvedNode { Status { Code: WRONG_REQUEST Reason: "Unknown node" } } ... unblocking NKikimr::TEvTablet::TEvCommit from FLAT_EXECUTOR to TABLET_ACTOR 2025-09-25T16:18:34.535552Z node 1 :NODE_BROKER DEBUG: node_broker__update_epoch.cpp:31: TTxUpdateEpoch Complete 2025-09-25T16:18:34.535571Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:530: [Committed] Remove node #1024.v4 host1:1001 2025-09-25T16:18:34.535590Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:542: [Committed] Move to new epoch #4.5 1970-01-01T03:00:00.026000Z - 1970-01-01T04:00:00.026000Z - 1970-01-01T05:00:00.026000Z, approximate epoch start #4.5 2025-09-25T16:18:34.535608Z node 1 :NODE_BROKER TRACE: node_broker.cpp:456: Scheduled epoch update at 1970-01-01T04:00:00.026000Z 2025-09-25T16:18:34.535617Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:562: Preparing nodes list cache for epoch #4.5 1970-01-01T03:00:00.026000Z - 1970-01-01T04:00:00.026000Z - 1970-01-01T05:00:00.026000Z, approximate epoch start #4.5 nodes=0 expired=0 2025-09-25T16:18:34.535628Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:603: Preparing update nodes log for epoch ##4.5 1970-01-01T03:00:00.026000Z - 1970-01-01T04:00:00.026000Z - 1970-01-01T05:00:00.026000Z nodes=0 expired=0 removed=1 2025-09-25T16:18:34.535634Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:659: Add node #1024.v5 to update nodes log 2025-09-25T16:18:34.535647Z node 1 :NODE_BROKER TRACE: node_broker.cpp:423: Send TEvNodesInfo for epoch #4.5 1970-01-01T03:00:00.026000Z - 1970-01-01T04:00:00.026000Z - 1970-01-01T05:00:00.026000Z 2025-09-25T16:18:34.535655Z node 1 :NODE_BROKER TRACE: node_broker.cpp:423: Send TEvNodesInfo for epoch #4.5 1970-01-01T03:00:00.026000Z - 1970-01-01T04:00:00.026000Z - 1970-01-01T05:00:00.026000Z 2025-09-25T16:18:34.535663Z node 1 :NODE_BROKER TRACE: node_broker.cpp:423: Send TEvNodesInfo for epoch #4.5 1970-01-01T03:00:00.026000Z - 1970-01-01T04:00:00.026000Z - 1970-01-01T05:00:00.026000Z 2025-09-25T16:18:34.535671Z node 1 :NODE_BROKER TRACE: node_broker.cpp:423: Send TEvNodesInfo for epoch #4.5 1970-01-01T03:00:00.026000Z - 1970-01-01T04:00:00.026000Z - 1970-01-01T05:00:00.026000Z 2025-09-25T16:18:34.535678Z node 1 :NODE_BROKER TRACE: node_broker.cpp:423: Send TEvNodesInfo for epoch #4.5 1970-01-01T03:00:00.026000Z - 1970-01-01T04:00:00.026000Z - 1970-01-01T05:00:00.026000Z 2025-09-25T16:18:34.535685Z node 1 :NODE_BROKER TRACE: node_broker.cpp:423: Send TEvNodesInfo for epoch #4.5 1970-01-01T03:00:00.026000Z - 1970-01-01T04:00:00.026000Z - 1970-01-01T05:00:00.026000Z 2025-09-25T16:18:34.535693Z node 1 :NODE_BROKER TRACE: node_broker.cpp:423: Send TEvNodesInfo for epoch #4.5 1970-01-01T03:00:00.026000Z - 1970-01-01T04:00:00.026000Z - 1970-01-01T05:00:00.026000Z 2025-09-25T16:18:34.535704Z node 1 :NODE_BROKER TRACE: node_broker.cpp:423: Send TEvNodesInfo for epoch #4.5 1970-01-01T03:00:00.026000Z - 1970-01-01T04:00:00.026000Z - 1970-01-01T05:00:00.026000Z 2025-09-25T16:18:34.546784Z node 1 :NODE_BROKER DEBUG: node_broker__register_node.cpp:197: TTxRegisterNode Complete 2025-09-25T16:18:34.546809Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:264: [Committed] Register new active node #1024.v6 host2:1001 2025-09-25T16:18:34.546832Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:552: [Committed] Update current epoch version from 5 to 6 2025-09-25T16:18:34.546837Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:630: Add node #1024.v6 host2:1001 to epoch cache 2025-09-25T16:18:34.546863Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:659: Add node #1024.v6 to update nodes log 2025-09-25T16:18:34.546907Z node 1 :NODE_BROKER TRACE: node_broker__register_node.cpp:59: TTxRegisterNode reply with: Status { Code: OK } Node { NodeId: 1024 Host: "host2" Port: 1001 ResolveHost: "host2.yandex.net" Address: "1.2.3.4" Location { DataCenter: "1" Module: "2" Rack: "3" Unit: "4" } Expire: 18000026000 Name: "slot-0" } 2025-09-25T16:18:34.547007Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:245: StateWork, received event# 269877761, Sender [1:743:2284], Recipient [1:556:2188]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-09-25T16:18:34.547031Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:245: StateWork, received event# 272039936, Sender [1:625:2218], Recipient [1:556:2188]: NKikimr::NNodeBroker::TEvNodeBroker::TEvListNodes { } 2025-09-25T16:18:34.547037Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:249: StateWork, processing event TEvNodeBroker::TEvListNodes 2025-09-25T16:18:34.547050Z node 1 :NODE_BROKER TRACE: node_broker.cpp:423: Send TEvNodesInfo for epoch #4.6 1970-01-01T03:00:00.026000Z - 1970-01-01T04:00:00.026000Z - 1970-01-01T05:00:00.026000Z 2025-09-25T16:18:34.547121Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:245: StateWork, received event# 269877761, Sender [1:745:2286], Recipient [1:556:2188]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-09-25T16:18:34.547130Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:245: StateWork, received event# 272039936, Sender [1:625:2218], Recipient [1:556:2188]: NKikimr::NNodeBroker::TEvNodeBroker::TEvListNodes { } 2025-09-25T16:18:34.547132Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:249: StateWork, processing event TEvNodeBroker::TEvListNodes 2025-09-25T16:18:34.547136Z node 1 :NODE_BROKER TRACE: node_broker.cpp:423: Send TEvNodesInfo for epoch #4.6 1970-01-01T03:00:00.026000Z - 1970-01-01T04:00:00.026000Z - 1970-01-01T05:00:00.026000Z 2025-09-25T16:18:34.547179Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:245: StateWork, received event# 269877761, Sender [1:747:2288], Recipient [1:556:2188]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-09-25T16:18:34.547193Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:245: StateWork, received event# 272039937, Sender [1:625:2218], Recipient [1:556:2188]: NKikimr::NNodeBroker::TEvNodeBroker::TEvResolveNode { NodeId: 1024 } 2025-09-25T16:18:34.547196Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:250: StateWork, processing event TEvNodeBroker::TEvResolveNode 2025-09-25T16:18:34.547213Z node 1 :NODE_BROKER TRACE: node_broker.cpp:1485: Send TEvResolvedNode: NKikimr::NNodeBroker::TEvNodeBroker::TEvResolvedNode { Status { Code: OK } Node { NodeId: 1024 Host: "host2" Port: 1001 ResolveHost: "host2.yandex.net" Address: "1.2.3.4" Location { DataCenter: "1" Module: "2" Rack: "3" Unit: "4" } Expire: 18000026000 Name: "slot-0" } } |81.4%| [LD] {RESULT} $(B)/ydb/core/driver_lib/run/ut/ydb-core-driver_lib-run-ut |81.4%| [TM] {default-linux-x86_64, pic, relwithdebinfo} ydb/library/yql/tests/sql/dq_file/part6/pytest >> test.py::test[blocks-div_uint64_opt2--Results] [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/mind/ut/unittest >> TNodeBrokerTest::ExtendLeaseRestartRace [GOOD] Test command err: 2025-09-25T16:18:32.655309Z node 8 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:636: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 8 Deadline: 18446744073709.551615s } 2025-09-25T16:18:32.658201Z node 5 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:636: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 5 Deadline: 18446744073709.551615s } 2025-09-25T16:18:32.658269Z node 6 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:636: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 6 Deadline: 18446744073709.551615s } 2025-09-25T16:18:32.658297Z node 7 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:636: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 7 Deadline: 18446744073709.551615s } 2025-09-25T16:18:32.658341Z node 8 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:607: Handle NActors::TEvInterconnect::TEvListNodes 2025-09-25T16:18:32.662502Z node 2 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:636: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 2 Deadline: 18446744073709.551615s } 2025-09-25T16:18:32.662557Z node 3 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:636: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 3 Deadline: 18446744073709.551615s } 2025-09-25T16:18:32.662619Z node 4 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:636: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 4 Deadline: 18446744073709.551615s } 2025-09-25T16:18:32.662670Z node 6 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:607: Handle NActors::TEvInterconnect::TEvListNodes 2025-09-25T16:18:32.662775Z node 7 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:607: Handle NActors::TEvInterconnect::TEvListNodes 2025-09-25T16:18:32.662856Z node 8 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:607: Handle NActors::TEvInterconnect::TEvListNodes 2025-09-25T16:18:32.662890Z node 1 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:636: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 1 Deadline: 18446744073709.551615s } 2025-09-25T16:18:32.667341Z node 2 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:607: Handle NActors::TEvInterconnect::TEvListNodes 2025-09-25T16:18:32.667477Z node 3 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:607: Handle NActors::TEvInterconnect::TEvListNodes 2025-09-25T16:18:32.667550Z node 4 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:607: Handle NActors::TEvInterconnect::TEvListNodes 2025-09-25T16:18:32.667617Z node 5 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:607: Handle NActors::TEvInterconnect::TEvListNodes 2025-09-25T16:18:32.667697Z node 4 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:607: Handle NActors::TEvInterconnect::TEvListNodes 2025-09-25T16:18:32.667748Z node 5 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:607: Handle NActors::TEvInterconnect::TEvListNodes 2025-09-25T16:18:32.667777Z node 6 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:607: Handle NActors::TEvInterconnect::TEvListNodes 2025-09-25T16:18:32.667819Z node 7 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:607: Handle NActors::TEvInterconnect::TEvListNodes 2025-09-25T16:18:32.667917Z node 1 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:607: Handle NActors::TEvInterconnect::TEvListNodes 2025-09-25T16:18:32.668022Z node 2 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:607: Handle NActors::TEvInterconnect::TEvListNodes 2025-09-25T16:18:32.668059Z node 3 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:607: Handle NActors::TEvInterconnect::TEvListNodes 2025-09-25T16:18:32.668171Z node 8 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:607: Handle NActors::TEvInterconnect::TEvListNodes 2025-09-25T16:18:32.668202Z node 1 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:607: Handle NActors::TEvInterconnect::TEvListNodes 2025-09-25T16:18:32.668544Z node 2 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:607: Handle NActors::TEvInterconnect::TEvListNodes 2025-09-25T16:18:32.668565Z node 3 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:607: Handle NActors::TEvInterconnect::TEvListNodes 2025-09-25T16:18:32.668591Z node 4 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:607: Handle NActors::TEvInterconnect::TEvListNodes 2025-09-25T16:18:32.668615Z node 5 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:607: Handle NActors::TEvInterconnect::TEvListNodes 2025-09-25T16:18:32.668637Z node 6 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:607: Handle NActors::TEvInterconnect::TEvListNodes 2025-09-25T16:18:32.668655Z node 7 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:607: Handle NActors::TEvInterconnect::TEvListNodes 2025-09-25T16:18:32.668683Z node 8 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:636: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 1 Deadline: 18446744073709.551615s } 2025-09-25T16:18:32.668693Z node 3 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:636: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 1 Deadline: 18446744073709.551615s } 2025-09-25T16:18:32.668734Z node 4 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:636: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 1 Deadline: 18446744073709.551615s } 2025-09-25T16:18:32.668764Z node 5 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:636: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 1 Deadline: 18446744073709.551615s } 2025-09-25T16:18:32.668795Z node 6 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:636: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 1 Deadline: 18446744073709.551615s } 2025-09-25T16:18:32.668843Z node 7 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:636: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 1 Deadline: 18446744073709.551615s } 2025-09-25T16:18:32.668928Z node 1 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:607: Handle NActors::TEvInterconnect::TEvListNodes 2025-09-25T16:18:32.668978Z node 2 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:636: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 1 Deadline: 18446744073709.551615s } 2025-09-25T16:18:32.669205Z node 1 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:636: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 8 Deadline: 18446744073709.551615s } 2025-09-25T16:18:32.669489Z node 8 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:636: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 3 Deadline: 18446744073709.551615s } 2025-09-25T16:18:32.672472Z node 3 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:636: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 2 Deadline: 18446744073709.551615s } 2025-09-25T16:18:32.672553Z node 5 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:636: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 3 Deadline: 18446744073709.551615s } 2025-09-25T16:18:32.678014Z node 3 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:636: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 8 Deadline: 18446744073709.551615s } 2025-09-25T16:18:32.678877Z node 2 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:636: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 3 Deadline: 18446744073709.551615s } 2025-09-25T16:18:32.679205Z node 3 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:636: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 5 Deadline: 18446744073709.551615s } 2025-09-25T16:18:32.680356Z node 1 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:636: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 3 Deadline: 18446744073709.551615s } 2025-09-25T16:18:32.680443Z node 1 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:636: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 6 Deadline: 18446744073709.551615s } 2025-09-25T16:18:32.680572Z node 1 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:636: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 7 Deadline: 18446744073709.551615s } 2025-09-25T16:18:32.680629Z node 1 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:636: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 4 Deadline: 18446744073709.551615s } 2025-09-25T16:18:32.680695Z node 1 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:636: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 5 Deadline: 18446744073709.551615s } 2025-09-25T16:18:32.680739Z node 1 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:636: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 2 Deadline: 18446744073709.551615s } 2025-09-25T16:18:32.681540Z node 3 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:636: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 7 Deadline: 18446744073709.551615s } 2025-09-25T16:18:32.681593Z node 8 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:636: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 7 Deadline: 18446744073709.551615s } 2025-09-25T16:18:32.681863Z node 2 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:636: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 7 Deadline: 18446744073709.551615s } 2025-09-25T16:18:32.682010Z node 7 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:636: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 3 Deadline: 18446744073709.551615s } 2025-09-25T16:18:32.682158Z node 7 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:636: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 8 Deadline: 18446744073709.551615s } 2025-09-25T16:18:32.682535Z node 7 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:636: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 2 Deadline: 18446744073709.551615s } 2025-09-25T16:18:32.702323Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7729: Cannot subscribe to console configs 2025-09-25T16:18:32.702351Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded ... waiting for nameservers are connected 2025-09-25T16:18:32.709168Z node 1 :NODE_BROKER DEBUG: node_broker_impl.h:239: StateInit event type: 10060000 event: NKikimr::TEvTablet::TEvBoot 2025-09-25T16:18:32.709668Z node 1 :NODE_BROKER DEBUG: node_broker_impl.h:239: StateInit event type: 10060001 event: NKikimr::TEvTablet::TEvRestored 2025-09-25T16:18:32.709751Z node 1 :NODE_BROKER DEBUG: node_broker__init_scheme.cpp:20: TTxInitScheme Execute 2025-09-25T16:18:32.710005Z node 1 :NODE_BROKER DEBUG: node_broker_impl.h:239: StateInit event type: 1006000c event: NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-09-25T16:18:32.710939Z node 1 :NODE_BROKER DEBUG: node_broker__init_scheme.cpp:29: TTxInitScheme Complete 2025-09-25T16:18:32.711171Z node 1 :NODE_BROKER DEBUG: node_broker__load_state.cpp:19: TTxLoadState Execute 2025-09-25T16:18:32.711267Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:968: [DB] Using default config. 2025-09-25T16:18:32.711287Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:1005: [DB] Starting the first epoch: #1.1 1970-01-01T00:00:00.026000Z - 1970-01-01T01:00:00.026000Z - 1970-01-01T02:00:00.026000Z 2025-09-25T16:18:32.711293Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:1031: [DB] Loaded the first approximate epoch start: #1.1 2025-09-25T16:18:32.711315Z node 1 :NODE_BROKER DEBUG: node_broker__load_state.cpp:27: TTxLoadState Complete 2025-09-25T16:18:32.711376Z node 1 :NODE_BROKER DEBUG: node_broker__migrate_state.cpp:84: TTxMigrateState Execute 2025-09-25T16:18:32.711384Z node 1 :NODE_BROKER DEBUG: node_broker__migrate_state.cpp:52: TTxMigrateState ProcessMigrationBatch UpdateNodes left 0, NewVersionUpdateNodes left 0 2025-09-25T16:18:32.711390Z node 1 :NODE_BROKER DEBUG: node_broker__migrate_state.cpp:21: TTxMigrateState FinalizeMigration 2025-09-25T16:18:32.711396Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:1318: [DB] Update epoch in database: #1.1 1970-01-01T00:00:00.026000Z - 1970-01-01T01:00:00.026000Z - 1970-01-01T02:00:00.026000Z 2025-09-25T16:18:32.711419Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:1337: [DB] Update approx epoch start in database: #1.1 2025-09-25T16:18:32.711426Z node 1 :NODE_BROKER NOTICE: node_broker.cpp:1350: [DB] Update main nodes table to: Nodes 2025-09-25T16:18:32.756076Z node 1 :NODE_BROKER DEBUG: node_broker__migrate_state.cpp:95: TTxMigrateState Complete 2025-09-25T16:18:32.756126Z node 1 :NODE_BROKER TRACE: node_broker.cpp:456: Scheduled epoch update at 1970-01-01T01:00:00.026000Z 2025-09-25T16:18:32.756138Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:562: Preparing nodes list cache for epoch #1.1 1970-01-01T00:00:00.026000Z - 1970-01-01T01:00:00.026000Z - 1970-01-01T02:00:00.026000Z, approximate epoch start #1.1 nodes=0 expired=0 2025-09-25T16:18:32.756151Z ... nodes request for epoch #3 2025-09-25T16:18:34.734072Z node 1 :NODE_BROKER DEBUG: node_broker__extend_lease.cpp:78: TTxExtendLease Complete 2025-09-25T16:18:34.734135Z node 1 :NODE_BROKER TRACE: node_broker__extend_lease.cpp:82: TTxExtendLease reply with: NKikimr::NNodeBroker::TEvNodeBroker::TEvExtendLeaseResponse { Status { Code: OK } NodeId: 1024 Expire: 10800026000 Epoch { Id: 2 Version: 4 Start: 3600026000 End: 7200026000 NextEnd: 10800026000 } } 2025-09-25T16:18:34.734154Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:314: [Committed] Extended lease of #1024.v4 host1:1001 up to Thu, 01 Jan 1970 03:00:00 UTC (lease 2) 2025-09-25T16:18:34.734161Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:552: [Committed] Update current epoch version from 3 to 4 2025-09-25T16:18:34.734170Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:630: Add node #1024.v4 host1:1001 to epoch cache 2025-09-25T16:18:34.734193Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:659: Add node #1024.v4 to update nodes log ... waiting for epoch update 2025-09-25T16:18:34.734332Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:245: StateWork, received event# 269877761, Sender [1:836:2337], Recipient [1:755:2274]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-09-25T16:18:34.734346Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:245: StateWork, received event# 272039936, Sender [1:627:2218], Recipient [1:755:2274]: NKikimr::NNodeBroker::TEvNodeBroker::TEvListNodes { } 2025-09-25T16:18:34.734350Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:249: StateWork, processing event TEvNodeBroker::TEvListNodes 2025-09-25T16:18:34.734358Z node 1 :NODE_BROKER TRACE: node_broker.cpp:423: Send TEvNodesInfo for epoch #2.4 1970-01-01T01:00:00.026000Z - 1970-01-01T02:00:00.026000Z - 1970-01-01T03:00:00.026000Z 2025-09-25T16:18:34.956214Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:245: StateWork, received event# 2146435072, Sender [1:755:2274], Recipient [1:755:2274]: NKikimr::NNodeBroker::TNodeBroker::TEvPrivate::TEvUpdateEpoch 2025-09-25T16:18:34.956236Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:259: StateWork, processing event TEvPrivate::TEvUpdateEpoch 2025-09-25T16:18:34.956257Z node 1 :NODE_BROKER DEBUG: node_broker__update_epoch.cpp:20: TTxUpdateEpoch Execute 2025-09-25T16:18:34.956267Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:542: [Dirty] Move to new epoch #3.5 1970-01-01T02:00:00.026000Z - 1970-01-01T03:00:00.026000Z - 1970-01-01T04:00:00.026000Z, approximate epoch start #3.5 2025-09-25T16:18:34.956272Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:1318: [DB] Update epoch in database: #3.5 1970-01-01T02:00:00.026000Z - 1970-01-01T03:00:00.026000Z - 1970-01-01T04:00:00.026000Z 2025-09-25T16:18:34.956298Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:1337: [DB] Update approx epoch start in database: #3.5 2025-09-25T16:18:35.068313Z node 7 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:607: Handle NActors::TEvInterconnect::TEvListNodes 2025-09-25T16:18:35.068356Z node 8 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:607: Handle NActors::TEvInterconnect::TEvListNodes 2025-09-25T16:18:35.068373Z node 1 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:607: Handle NActors::TEvInterconnect::TEvListNodes 2025-09-25T16:18:35.068397Z node 2 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:607: Handle NActors::TEvInterconnect::TEvListNodes 2025-09-25T16:18:35.068412Z node 3 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:607: Handle NActors::TEvInterconnect::TEvListNodes 2025-09-25T16:18:35.068426Z node 4 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:607: Handle NActors::TEvInterconnect::TEvListNodes 2025-09-25T16:18:35.068443Z node 5 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:607: Handle NActors::TEvInterconnect::TEvListNodes 2025-09-25T16:18:35.068491Z node 6 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:607: Handle NActors::TEvInterconnect::TEvListNodes 2025-09-25T16:18:35.068606Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:245: StateWork, received event# 272039936, Sender [1:25:2072], Recipient [1:755:2274]: NKikimr::NNodeBroker::TEvNodeBroker::TEvListNodes { CachedVersion: 3 } 2025-09-25T16:18:35.068616Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:249: StateWork, processing event TEvNodeBroker::TEvListNodes 2025-09-25T16:18:35.068632Z node 1 :NODE_BROKER TRACE: node_broker.cpp:423: Send TEvNodesInfo for epoch #2.4 1970-01-01T01:00:00.026000Z - 1970-01-01T02:00:00.026000Z - 1970-01-01T03:00:00.026000Z 2025-09-25T16:18:35.068773Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:245: StateWork, received event# 272039936, Sender [4:112:2072], Recipient [1:808:2313] 2025-09-25T16:18:35.068776Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:249: StateWork, processing event TEvNodeBroker::TEvListNodes 2025-09-25T16:18:35.068782Z node 1 :NODE_BROKER TRACE: node_broker.cpp:423: Send TEvNodesInfo for epoch #2.4 1970-01-01T01:00:00.026000Z - 1970-01-01T02:00:00.026000Z - 1970-01-01T03:00:00.026000Z 2025-09-25T16:18:35.068810Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:245: StateWork, received event# 272039936, Sender [8:228:2072], Recipient [1:811:2316] 2025-09-25T16:18:35.068815Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:249: StateWork, processing event TEvNodeBroker::TEvListNodes 2025-09-25T16:18:35.068834Z node 1 :NODE_BROKER TRACE: node_broker.cpp:423: Send TEvNodesInfo for epoch #2.4 1970-01-01T01:00:00.026000Z - 1970-01-01T02:00:00.026000Z - 1970-01-01T03:00:00.026000Z 2025-09-25T16:18:35.068877Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:245: StateWork, received event# 272039936, Sender [7:199:2072], Recipient [1:812:2317] 2025-09-25T16:18:35.068882Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:249: StateWork, processing event TEvNodeBroker::TEvListNodes 2025-09-25T16:18:35.068888Z node 1 :NODE_BROKER TRACE: node_broker.cpp:423: Send TEvNodesInfo for epoch #2.4 1970-01-01T01:00:00.026000Z - 1970-01-01T02:00:00.026000Z - 1970-01-01T03:00:00.026000Z 2025-09-25T16:18:35.068904Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:245: StateWork, received event# 272039936, Sender [5:141:2072], Recipient [1:809:2314] 2025-09-25T16:18:35.068908Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:249: StateWork, processing event TEvNodeBroker::TEvListNodes 2025-09-25T16:18:35.068914Z node 1 :NODE_BROKER TRACE: node_broker.cpp:423: Send TEvNodesInfo for epoch #2.4 1970-01-01T01:00:00.026000Z - 1970-01-01T02:00:00.026000Z - 1970-01-01T03:00:00.026000Z 2025-09-25T16:18:35.068978Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:245: StateWork, received event# 272039936, Sender [6:170:2072], Recipient [1:810:2315] 2025-09-25T16:18:35.068983Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:249: StateWork, processing event TEvNodeBroker::TEvListNodes 2025-09-25T16:18:35.068989Z node 1 :NODE_BROKER TRACE: node_broker.cpp:423: Send TEvNodesInfo for epoch #2.4 1970-01-01T01:00:00.026000Z - 1970-01-01T02:00:00.026000Z - 1970-01-01T03:00:00.026000Z 2025-09-25T16:18:35.069041Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:245: StateWork, received event# 272039936, Sender [2:54:2072], Recipient [1:813:2318] 2025-09-25T16:18:35.069048Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:249: StateWork, processing event TEvNodeBroker::TEvListNodes 2025-09-25T16:18:35.069054Z node 1 :NODE_BROKER TRACE: node_broker.cpp:423: Send TEvNodesInfo for epoch #2.4 1970-01-01T01:00:00.026000Z - 1970-01-01T02:00:00.026000Z - 1970-01-01T03:00:00.026000Z 2025-09-25T16:18:35.069105Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:245: StateWork, received event# 272039936, Sender [3:83:2072], Recipient [1:814:2319] 2025-09-25T16:18:35.069110Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:249: StateWork, processing event TEvNodeBroker::TEvListNodes 2025-09-25T16:18:35.069116Z node 1 :NODE_BROKER TRACE: node_broker.cpp:423: Send TEvNodesInfo for epoch #2.4 1970-01-01T01:00:00.026000Z - 1970-01-01T02:00:00.026000Z - 1970-01-01T03:00:00.026000Z 2025-09-25T16:18:35.141318Z node 1 :NODE_BROKER DEBUG: node_broker__update_epoch.cpp:31: TTxUpdateEpoch Complete 2025-09-25T16:18:35.141349Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:542: [Committed] Move to new epoch #3.5 1970-01-01T02:00:00.026000Z - 1970-01-01T03:00:00.026000Z - 1970-01-01T04:00:00.026000Z, approximate epoch start #3.5 2025-09-25T16:18:35.141371Z node 1 :NODE_BROKER TRACE: node_broker.cpp:456: Scheduled epoch update at 1970-01-01T03:00:00.026000Z 2025-09-25T16:18:35.141381Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:562: Preparing nodes list cache for epoch #3.5 1970-01-01T02:00:00.026000Z - 1970-01-01T03:00:00.026000Z - 1970-01-01T04:00:00.026000Z, approximate epoch start #3.5 nodes=1 expired=0 2025-09-25T16:18:35.141414Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:603: Preparing update nodes log for epoch ##3.5 1970-01-01T02:00:00.026000Z - 1970-01-01T03:00:00.026000Z - 1970-01-01T04:00:00.026000Z nodes=1 expired=0 removed=0 2025-09-25T16:18:35.141422Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:659: Add node #1024.v4 to update nodes log 2025-09-25T16:18:35.141440Z node 1 :NODE_BROKER TRACE: node_broker.cpp:423: Send TEvNodesInfo for epoch #3.5 1970-01-01T02:00:00.026000Z - 1970-01-01T03:00:00.026000Z - 1970-01-01T04:00:00.026000Z 2025-09-25T16:18:35.141454Z node 1 :NODE_BROKER TRACE: node_broker.cpp:423: Send TEvNodesInfo for epoch #3.5 1970-01-01T02:00:00.026000Z - 1970-01-01T03:00:00.026000Z - 1970-01-01T04:00:00.026000Z 2025-09-25T16:18:35.141463Z node 1 :NODE_BROKER TRACE: node_broker.cpp:423: Send TEvNodesInfo for epoch #3.5 1970-01-01T02:00:00.026000Z - 1970-01-01T03:00:00.026000Z - 1970-01-01T04:00:00.026000Z 2025-09-25T16:18:35.141472Z node 1 :NODE_BROKER TRACE: node_broker.cpp:423: Send TEvNodesInfo for epoch #3.5 1970-01-01T02:00:00.026000Z - 1970-01-01T03:00:00.026000Z - 1970-01-01T04:00:00.026000Z 2025-09-25T16:18:35.141480Z node 1 :NODE_BROKER TRACE: node_broker.cpp:423: Send TEvNodesInfo for epoch #3.5 1970-01-01T02:00:00.026000Z - 1970-01-01T03:00:00.026000Z - 1970-01-01T04:00:00.026000Z 2025-09-25T16:18:35.141488Z node 1 :NODE_BROKER TRACE: node_broker.cpp:423: Send TEvNodesInfo for epoch #3.5 1970-01-01T02:00:00.026000Z - 1970-01-01T03:00:00.026000Z - 1970-01-01T04:00:00.026000Z 2025-09-25T16:18:35.141496Z node 1 :NODE_BROKER TRACE: node_broker.cpp:423: Send TEvNodesInfo for epoch #3.5 1970-01-01T02:00:00.026000Z - 1970-01-01T03:00:00.026000Z - 1970-01-01T04:00:00.026000Z 2025-09-25T16:18:35.141505Z node 1 :NODE_BROKER TRACE: node_broker.cpp:423: Send TEvNodesInfo for epoch #3.5 1970-01-01T02:00:00.026000Z - 1970-01-01T03:00:00.026000Z - 1970-01-01T04:00:00.026000Z 2025-09-25T16:18:35.173004Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:245: StateWork, received event# 269877761, Sender [1:859:2348], Recipient [1:755:2274]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-09-25T16:18:35.173067Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:245: StateWork, received event# 272039936, Sender [1:627:2218], Recipient [1:755:2274]: NKikimr::NNodeBroker::TEvNodeBroker::TEvListNodes { } 2025-09-25T16:18:35.173076Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:249: StateWork, processing event TEvNodeBroker::TEvListNodes 2025-09-25T16:18:35.173093Z node 1 :NODE_BROKER TRACE: node_broker.cpp:423: Send TEvNodesInfo for epoch #3.5 1970-01-01T02:00:00.026000Z - 1970-01-01T03:00:00.026000Z - 1970-01-01T04:00:00.026000Z 2025-09-25T16:18:35.173221Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:245: StateWork, received event# 269877761, Sender [1:861:2350], Recipient [1:755:2274]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-09-25T16:18:35.173240Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:245: StateWork, received event# 272039936, Sender [1:627:2218], Recipient [1:755:2274]: NKikimr::NNodeBroker::TEvNodeBroker::TEvListNodes { } 2025-09-25T16:18:35.173244Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:249: StateWork, processing event TEvNodeBroker::TEvListNodes 2025-09-25T16:18:35.173251Z node 1 :NODE_BROKER TRACE: node_broker.cpp:423: Send TEvNodesInfo for epoch #3.5 1970-01-01T02:00:00.026000Z - 1970-01-01T03:00:00.026000Z - 1970-01-01T04:00:00.026000Z |81.4%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/tx_allocator_client/ut/unittest ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/scheme_board/ut_cache/unittest >> TCacheTest::MigrationDeletedPathNavigate [GOOD] Test command err: 2025-09-25T16:18:08.019211Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7729: Cannot subscribe to console configs 2025-09-25T16:18:08.019239Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded TestModificationResults wait txId: 1 2025-09-25T16:18:08.038813Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 FAKE_COORDINATOR: Erasing txId 1 TestModificationResult got TxId: 1, wait until txId: 1 Leader for TabletID 72057594046678944 is [1:72:2112] sender: [1:178:2067] recipient: [1:49:2096] Leader for TabletID 72057594046678944 is [1:72:2112] sender: [1:181:2067] recipient: [1:180:2175] Leader for TabletID 72057594046678944 is [1:182:2176] sender: [1:183:2067] recipient: [1:180:2175] 2025-09-25T16:18:08.049074Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7729: Cannot subscribe to console configs 2025-09-25T16:18:08.049100Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded TestModificationResults wait txId: 101 Leader for TabletID 72057594046678944 is [1:182:2176] sender: [1:215:2067] recipient: [1:24:2071] 2025-09-25T16:18:08.092126Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateSubDomain, opId: 101:0, at schemeshard: 72057594046678944, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_subdomain.cpp:259) FAKE_COORDINATOR: Add transaction: 101 at step: 5000002 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000001 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 101 at step: 5000002 FAKE_COORDINATOR: Erasing txId 101 TestModificationResult got TxId: 101, wait until txId: 101 TestModificationResults wait txId: 102 2025-09-25T16:18:08.093883Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 102:0, at schemeshard: 72057594046678944, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) FAKEHIVE 72057594037968897 TEvCreateTablet Owner: 72057594046678944 OwnerIdx: 1 TabletType: Coordinator ObjectDomain { SchemeShard: 72057594046678944 PathId: 2 } ObjectId: 2 BindedChannels { StoragePoolName: "pool-1" } AllowedDomains { SchemeShard: 72057594046678944 PathId: 2 } FAKEHIVE 72057594037968897 TEvCreateTablet Owner: 72057594046678944 OwnerIdx: 2 TabletType: Mediator ObjectDomain { SchemeShard: 72057594046678944 PathId: 2 } ObjectId: 2 BindedChannels { StoragePoolName: "pool-1" } AllowedDomains { SchemeShard: 72057594046678944 PathId: 2 } Leader for TabletID 72075186233409546 is [0:0:0] sender: [1:251:2067] recipient: [1:242:2218] IGNORE Leader for TabletID 72075186233409546 is [0:0:0] sender: [1:251:2067] recipient: [1:242:2218] Leader for TabletID 72075186233409547 is [0:0:0] sender: [1:252:2067] recipient: [1:244:2220] IGNORE Leader for TabletID 72075186233409547 is [0:0:0] sender: [1:252:2067] recipient: [1:244:2220] Leader for TabletID 72075186233409546 is [1:254:2224] sender: [1:256:2067] recipient: [1:242:2218] Leader for TabletID 72075186233409547 is [1:257:2226] sender: [1:258:2067] recipient: [1:244:2220] TestModificationResult got TxId: 102, wait until txId: 102 TestWaitNotification wait txId: 101 TestWaitNotification wait txId: 102 2025-09-25T16:18:08.099652Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 101, at schemeshard: 72057594046678944 TestWaitNotification: OK eventTxId 101 Leader for TabletID 72075186233409546 is [1:254:2224] sender: [1:290:2067] recipient: [1:24:2071] Leader for TabletID 72075186233409547 is [1:257:2226] sender: [1:291:2067] recipient: [1:24:2071] FAKE_COORDINATOR: Add transaction: 102 at step: 5000003 FAKE_COORDINATOR: advance: minStep5000003 State->FrontStep: 5000002 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 102 at step: 5000003 FAKE_COORDINATOR: Erasing txId 102 TestWaitNotification: OK eventTxId 102 TestModificationResults wait txId: 103 TestModificationResult got TxId: 103, wait until txId: 103 TestModificationResults wait txId: 104 2025-09-25T16:18:08.116712Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 104:0, at schemeshard: 72057594046678944, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) TestModificationResult got TxId: 104, wait until txId: 104 TestWaitNotification wait txId: 103 TestWaitNotification wait txId: 104 FAKEHIVE 72057594037968897 TEvCreateTablet Owner: 72057594046678944 OwnerIdx: 3 TabletType: DataShard ObjectDomain { SchemeShard: 72057594046678944 PathId: 2 } ObjectId: 4 BindedChannels { StoragePoolName: "pool-1" } AllowedDomains { SchemeShard: 72057594046678944 PathId: 2 } Leader for TabletID 72075186233409548 is [0:0:0] sender: [1:341:2067] recipient: [1:338:2290] IGNORE Leader for TabletID 72075186233409548 is [0:0:0] sender: [1:341:2067] recipient: [1:338:2290] Leader for TabletID 72075186233409548 is [0:0:0] sender: [1:343:2067] recipient: [1:24:2071] IGNORE Leader for TabletID 72075186233409548 is [0:0:0] sender: [1:343:2067] recipient: [1:24:2071] Leader for TabletID 72075186233409548 is [1:345:2294] sender: [1:346:2067] recipient: [1:338:2290] TestWaitNotification: OK eventTxId 103 TestWaitNotification: OK eventTxId 104 TestModificationResults wait txId: 105 2025-09-25T16:18:08.217437Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpUpgradeSubDomain, opId: 105:0, at schemeshard: 72057594046678944, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_upgrade_subdomain.cpp:1234) FAKEHIVE 72057594037968897 TEvCreateTablet Owner: 72057594046678944 OwnerIdx: 4 TabletType: SchemeShard ObjectDomain { SchemeShard: 72057594046678944 PathId: 2 } ObjectId: 2 BindedChannels { StoragePoolName: "pool-1" } AllowedDomains { SchemeShard: 72057594046678944 PathId: 2 } Leader for TabletID 72075186233409549 is [0:0:0] sender: [1:420:2067] recipient: [1:416:2338] IGNORE Leader for TabletID 72075186233409549 is [0:0:0] sender: [1:420:2067] recipient: [1:416:2338] Leader for TabletID 72075186233409549 is [0:0:0] sender: [1:421:2067] recipient: [1:24:2071] IGNORE Leader for TabletID 72075186233409549 is [0:0:0] sender: [1:421:2067] recipient: [1:24:2071] Leader for TabletID 72075186233409549 is [1:423:2342] sender: [1:425:2067] recipient: [1:416:2338] 2025-09-25T16:18:08.221948Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7729: Cannot subscribe to console configs 2025-09-25T16:18:08.221974Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded TestModificationResult got TxId: 105, wait until txId: 105 TestWaitNotification wait txId: 105 TestWaitNotification: OK eventTxId 105 2025-09-25T16:18:08.226849Z node 1 :TX_DATASHARD ERROR: datashard.cpp:3586: Datashard's schemeshard pipe destroyed while no messages to sent at 72075186233409548 Leader for TabletID 72057594046678944 is [1:182:2176] sender: [1:486:2067] recipient: [1:49:2096] Leader for TabletID 72057594046678944 is [1:182:2176] sender: [1:488:2067] recipient: [1:24:2071] Leader for TabletID 72057594046678944 is [1:182:2176] sender: [1:490:2067] recipient: [1:489:2388] Leader for TabletID 72057594046678944 is [1:491:2389] sender: [1:492:2067] recipient: [1:489:2388] 2025-09-25T16:18:08.233919Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7729: Cannot subscribe to console configs 2025-09-25T16:18:08.233952Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded Leader for TabletID 72057594046678944 is [1:491:2389] sender: [1:523:2067] recipient: [1:24:2071] 2025-09-25T16:18:08.485582Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7729: Cannot subscribe to console configs 2025-09-25T16:18:08.485615Z node 2 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded TestModificationResults wait txId: 1 2025-09-25T16:18:08.502332Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 FAKE_COORDINATOR: Erasing txId 1 TestModificationResult got TxId: 1, wait until txId: 1 Leader for TabletID 72057594046678944 is [2:72:2112] sender: [2:178:2067] recipient: [2:49:2096] Leader for TabletID 72057594046678944 is [2:72:2112] sender: [2:181:2067] recipient: [2:180:2175] Leader for TabletID 72057594046678944 is [2:182:2176] sender: [2:183:2067] recipient: [2:180:2175] 2025-09-25T16:18:08.513477Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7729: Cannot subscribe to console configs 2025-09-25T16:18:08.513513Z node 2 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded TestModificationResults wait txId: 101 Leader for TabletID 72057594046678944 is [2:182:2176] sender: [2:215:2067] recipient: [2:24:2071] 2025-09-25T16:18:08.556230Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateSubDomain, opId: 101:0, at schemeshard: 72057594046678944, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_subdomain.cpp:259) FAKE_COORDINATOR: Add transaction: 101 at step: 5000002 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000001 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 101 at step: 5000002 FAKE_COORDINATOR: Erasing txId 101 TestModificationResult got TxId: 101, wait until txId: 101 TestModificationResults wait txId: 102 2025-09-25T16:18:08.557563Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 102:0, at schemeshard: 72057594046678944, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) FAKEHIVE 72057594037968897 TEvCreateTablet Owner: 72057594046678944 OwnerIdx: 1 TabletType: Coordinator ObjectDomain { SchemeShard: 72057594046678944 PathId: 2 } ObjectId: 2 BindedChannels { StoragePoolName: "pool-1" } AllowedDomains { SchemeShard: 72057594046678944 PathId: 2 } FAKEHIVE 72057594037968897 TEvCreateTablet Owner: 72057594046678944 OwnerIdx: 2 TabletType: Mediator ObjectDomain { SchemeShard: 72057594046678944 PathId: 2 } ObjectId: 2 BindedChannels { StoragePoolName: "pool-1" } AllowedDomains { SchemeShard: 72057594046678944 PathId: 2 } Leader for TabletID 72075186233409546 is [0:0:0] sender: [2:251:2067] recipient: [2:242:2218] IGNORE Leader for TabletID 72075186233409546 is [0:0:0] sender: [2:251:2067] recipient: [2:242:2218] Leader for TabletID 72075186233409547 is [0:0:0] sender: [2:252:2067] recipient: [2:244:2220] IGNORE Leader for TabletID 72075186233409547 is [0:0:0] sender: [2:252:2067] recipient: [2:244:2220] Leader for TabletID 72075186233409546 is [2:254:2224] sender: [2:256:2067] recipient: [2:242:2218] Leader for TabletID 72075186233409547 is [2:257:2226] sender: [2:259:2067] recipient: [2:244:2220] TestModificationResult got TxId: 102, wait until txId: 102 TestWaitNotification wait txId: 101 TestWaitNotification wait txId: 102 2025-09-25T16:18:08.561553Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 101, at schemeshard: 72057594046678944 TestWaitNotification: OK eventTxId 101 Leader for TabletID 72075186233409546 is [2:254:2224] sender: [2:290:2067] recipient: [2:24:2071] Leader for TabletID 72075186233409547 is [2:257:2226] sender: [2:291:2067] recipient: [2:24:2071] FAKE_COORDINATOR: Add transaction: 102 at step: 5000003 FAKE_COORDINATOR: advance: minStep5000003 State->FrontStep: 5000002 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 102 at step: 5000003 FAKE_COORDINATOR: Erasing txId 102 TestWaitNotification: OK eventTxId 102 TestModificationResults wait txId: 103 TestModificationResult got TxId: 103, wait until txId: 103 TestModificationResults wait txId: 104 2025-09-25T16:18:08.574641Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 104:0, at schemeshard: 72057594046678944, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) TestModificationResult got TxId: 104, wait until txId: 104 TestWaitNotification wait txId: 103 TestWaitNotification wait txId: 104 FAKEHIVE 72057594037968897 TEvCreateTablet Owner: 72057594046678944 OwnerIdx: 3 TabletType: DataShard ObjectDomain { SchemeShard: 72057594046678944 PathId: 2 } ObjectId: 4 BindedChannels { StoragePoolName: "pool-1" } AllowedDomains { SchemeShard: 72057594046678944 PathId: 2 } Leader for TabletID 72075186233409548 is [0:0:0] sender: [2:342:2067] recipient: [2:338:2290] IGNORE Leader for TabletID 72075186233409548 is [0:0:0] sender: [2:342:2067] recipient: [2:338:2290] Leader for TabletID 72075186233409548 is [0:0:0] sender: [2:343:2067] recipient: [2:24:2071] IGNORE Leader for TabletID 72075186233409548 is [0:0:0] sender: [2:343:2067] recipient: [2:24:2071] Leader for TabletID 72075186233409548 is [2:345:2294] sender: [2:346:2067] recipient: [2:338:2290] TestWaitNotification: OK eventTxId 103 TestWaitNotification: OK eventTxId 104 TestModificationResults wait txId: 105 2025-09-25T16:18:08.641761Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpUpgradeSubDomain, opId: 105:0, at schemeshard: 72057594046678944, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_upgrade_subdomain.cpp:1234) FAKEHIVE 72057594037968897 TEvCreateTablet Owner: 72057594046678944 OwnerIdx: 4 TabletType: SchemeShard ObjectDomain { SchemeShard: 72057594046678944 PathId: 2 } ObjectId: 2 BindedChannels { StoragePoolName: "pool-1" } AllowedDomains { SchemeShard: 72057594046678944 PathId: 2 } Leader for TabletID 72075186233409549 is [0:0:0] sender: [2:420:2067] recipient: [2:416:2338] IGNORE Leader for TabletID 72075186233409549 is [0:0:0] sender: [2:420:2067] recipient: [2:416:2338] Leader for TabletID 72075186233409549 is [0:0:0] sender: [2:421:2067] recipient: [2:24:2071] IGNORE Leader for TabletID 72075186233409549 is [0:0:0] sender: [2:421:2067] recipient: [2:24:2071] Leader for TabletID 72075186233409549 is [2:423:2342] sender: [2:425:2067] recipient: [2:416:2338] 2025-09-25T16:18:08.648126Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7729: Cannot subscribe to console configs 2025-09-25T16:18:08.648159Z node 2 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded TestModificationResult got TxId: 105, wait until txId: 105 TestWaitNotification wait txId: 105 TestWaitNotification: OK eventTxId 105 TestModificationResults wait txId: 106 2025-09-25T16:18:08.653059Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:5780: Mark as Migrated path id [OwnerId: 72057594046678944, LocalPathId: 3] 2025-09-25T16:18:08.653084Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:5780: Mark as Migrated path id [OwnerId: 72057594046678944, LocalPathId: 4] 2025-09-25T16:18:08.653185Z node 2 :FLAT_TX_SCHEMESHARD ERROR: schemeshard__operation_upgrade_subdomain.cpp:1466: TWait ProgressState, dependent transaction: 106, parent transaction: 105, at schemeshard: 72057594046678944 2025-09-25T16:18:08.653213Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpUpgradeSubDomainDecision, opId: 106:0, at schemeshard: 72057594046678944, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_upgrade_subdomain.cpp:573) TestModificationResult got TxId: 106, wait until txId: 106 TestWaitNotification wait txId: 106 2025-09-25T16:18:08.665443Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:6267: Got TEvUpdateAck for unknown txId 105, at schemeshard: 72057594046678944 2025-09-25T16:18:08.665525Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:6267: Got TEvUpdateAck for unknown txId 105, at schemeshard: 72057594046678944 TestWaitNotification: OK eventTxId 106 TestModificationResults wait txId: 107 TestModificationResult got TxId: 107, wait until txId: 107 TestWaitNotification wait txId: 107 TestWaitNotification: OK eventTxId 107 TestModificationResults wait txId: 108 2025-09-25T16:18:08.701891Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 108:0, at schemeshard: 72075186233409549, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) FAKEHIVE 72057594037968897 TEvCreateTablet Owner: 72075186233409549 OwnerIdx: 4 TabletType: DataShard ObjectDomain { SchemeShard: 72057594046678944 PathId: 2 } ObjectId: 2 BindedChannels { StoragePoolName: "pool-1" } AllowedDomains { SchemeShard: 72057594046678944 PathId: 2 } Leader for TabletID 72075186233409550 is [0:0:0] sender: [2:555:2067] recipient: [2:552:2446] IGNORE Leader for TabletID 72075186233409550 is [0:0:0] sender: [2:555:2067] recipient: [2:552:2446] Leader for TabletID 72075186233409550 is [0:0:0] sender: [2:558:2067] recipient: [2:24:2071] IGNORE Leader for TabletID 72075186233409550 is [0:0:0] sender: [2:558:2067] recipient: [2:24:2071] Leader for TabletID 72075186233409550 is [2:559:2450] sender: [2:560:2067] recipient: [2:552:2446] TestModificationResult got TxId: 108, wait until txId: 108 TestWaitNotification wait txId: 108 FAKEHIVE 72057594037968897 TEvDeleteTablet ShardOwnerId: 72057594046678944 ShardLocalIdx: 3 TxId_Deprecated: 0 TabletID: 72075186233409548 Forgetting tablet 72075186233409548 TestWaitNotification: OK eventTxId 108 2025-09-25T16:18:10.951887Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7721: Cannot get console configs 2025-09-25T16:18:10.951924Z node 2 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded 2025-09-25T16:18:11.004098Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7721: Cannot get console configs 2025-09-25T16:18:11.004130Z node 2 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/mind/ut/unittest >> TNodeBrokerTest::ListNodesEpochDeltasPersistance [GOOD] Test command err: 2025-09-25T16:18:33.121507Z node 8 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:636: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 8 Deadline: 18446744073709.551615s } 2025-09-25T16:18:33.124644Z node 5 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:636: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 5 Deadline: 18446744073709.551615s } 2025-09-25T16:18:33.124716Z node 6 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:636: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 6 Deadline: 18446744073709.551615s } 2025-09-25T16:18:33.124746Z node 7 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:636: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 7 Deadline: 18446744073709.551615s } 2025-09-25T16:18:33.124794Z node 8 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:607: Handle NActors::TEvInterconnect::TEvListNodes 2025-09-25T16:18:33.128263Z node 2 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:636: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 2 Deadline: 18446744073709.551615s } 2025-09-25T16:18:33.128303Z node 3 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:636: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 3 Deadline: 18446744073709.551615s } 2025-09-25T16:18:33.128347Z node 4 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:636: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 4 Deadline: 18446744073709.551615s } 2025-09-25T16:18:33.128381Z node 6 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:607: Handle NActors::TEvInterconnect::TEvListNodes 2025-09-25T16:18:33.128451Z node 7 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:607: Handle NActors::TEvInterconnect::TEvListNodes 2025-09-25T16:18:33.128497Z node 8 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:607: Handle NActors::TEvInterconnect::TEvListNodes 2025-09-25T16:18:33.128518Z node 1 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:636: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 1 Deadline: 18446744073709.551615s } 2025-09-25T16:18:33.131723Z node 2 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:607: Handle NActors::TEvInterconnect::TEvListNodes 2025-09-25T16:18:33.131797Z node 3 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:607: Handle NActors::TEvInterconnect::TEvListNodes 2025-09-25T16:18:33.131831Z node 4 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:607: Handle NActors::TEvInterconnect::TEvListNodes 2025-09-25T16:18:33.131871Z node 5 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:607: Handle NActors::TEvInterconnect::TEvListNodes 2025-09-25T16:18:33.131915Z node 4 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:607: Handle NActors::TEvInterconnect::TEvListNodes 2025-09-25T16:18:33.131942Z node 5 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:607: Handle NActors::TEvInterconnect::TEvListNodes 2025-09-25T16:18:33.131955Z node 6 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:607: Handle NActors::TEvInterconnect::TEvListNodes 2025-09-25T16:18:33.131975Z node 7 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:607: Handle NActors::TEvInterconnect::TEvListNodes 2025-09-25T16:18:33.132045Z node 1 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:607: Handle NActors::TEvInterconnect::TEvListNodes 2025-09-25T16:18:33.132142Z node 2 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:607: Handle NActors::TEvInterconnect::TEvListNodes 2025-09-25T16:18:33.132168Z node 3 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:607: Handle NActors::TEvInterconnect::TEvListNodes 2025-09-25T16:18:33.132234Z node 8 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:607: Handle NActors::TEvInterconnect::TEvListNodes 2025-09-25T16:18:33.132260Z node 1 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:607: Handle NActors::TEvInterconnect::TEvListNodes 2025-09-25T16:18:33.132546Z node 2 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:607: Handle NActors::TEvInterconnect::TEvListNodes 2025-09-25T16:18:33.132564Z node 3 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:607: Handle NActors::TEvInterconnect::TEvListNodes 2025-09-25T16:18:33.132588Z node 4 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:607: Handle NActors::TEvInterconnect::TEvListNodes 2025-09-25T16:18:33.132608Z node 5 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:607: Handle NActors::TEvInterconnect::TEvListNodes 2025-09-25T16:18:33.132629Z node 6 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:607: Handle NActors::TEvInterconnect::TEvListNodes 2025-09-25T16:18:33.132647Z node 7 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:607: Handle NActors::TEvInterconnect::TEvListNodes 2025-09-25T16:18:33.132670Z node 8 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:636: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 1 Deadline: 18446744073709.551615s } 2025-09-25T16:18:33.132679Z node 3 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:636: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 1 Deadline: 18446744073709.551615s } 2025-09-25T16:18:33.132700Z node 4 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:636: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 1 Deadline: 18446744073709.551615s } 2025-09-25T16:18:33.132717Z node 5 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:636: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 1 Deadline: 18446744073709.551615s } 2025-09-25T16:18:33.132741Z node 6 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:636: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 1 Deadline: 18446744073709.551615s } 2025-09-25T16:18:33.132757Z node 7 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:636: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 1 Deadline: 18446744073709.551615s } 2025-09-25T16:18:33.132818Z node 1 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:607: Handle NActors::TEvInterconnect::TEvListNodes 2025-09-25T16:18:33.132895Z node 2 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:636: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 1 Deadline: 18446744073709.551615s } 2025-09-25T16:18:33.133034Z node 1 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:636: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 5 Deadline: 18446744073709.551615s } 2025-09-25T16:18:33.133338Z node 8 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:636: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 7 Deadline: 18446744073709.551615s } 2025-09-25T16:18:33.133518Z node 7 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:636: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 2 Deadline: 18446744073709.551615s } 2025-09-25T16:18:33.135850Z node 6 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:636: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 4 Deadline: 18446744073709.551615s } 2025-09-25T16:18:33.143274Z node 7 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:636: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 8 Deadline: 18446744073709.551615s } 2025-09-25T16:18:33.143303Z node 4 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:636: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 6 Deadline: 18446744073709.551615s } 2025-09-25T16:18:33.143884Z node 2 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:636: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 7 Deadline: 18446744073709.551615s } 2025-09-25T16:18:33.143935Z node 1 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:636: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 8 Deadline: 18446744073709.551615s } 2025-09-25T16:18:33.144776Z node 5 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:636: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 2 Deadline: 18446744073709.551615s } 2025-09-25T16:18:33.144810Z node 1 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:636: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 3 Deadline: 18446744073709.551615s } 2025-09-25T16:18:33.144936Z node 1 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:636: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 6 Deadline: 18446744073709.551615s } 2025-09-25T16:18:33.144991Z node 1 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:636: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 4 Deadline: 18446744073709.551615s } 2025-09-25T16:18:33.145051Z node 1 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:636: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 7 Deadline: 18446744073709.551615s } 2025-09-25T16:18:33.145935Z node 1 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:636: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 2 Deadline: 18446744073709.551615s } 2025-09-25T16:18:33.145997Z node 2 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:636: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 5 Deadline: 18446744073709.551615s } 2025-09-25T16:18:33.147223Z node 5 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:636: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 8 Deadline: 18446744073709.551615s } 2025-09-25T16:18:33.147462Z node 8 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:636: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 5 Deadline: 18446744073709.551615s } 2025-09-25T16:18:33.149053Z node 5 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:636: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 4 Deadline: 18446744073709.551615s } 2025-09-25T16:18:33.149452Z node 4 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:636: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 5 Deadline: 18446744073709.551615s } 2025-09-25T16:18:33.166965Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7729: Cannot subscribe to console configs 2025-09-25T16:18:33.166983Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded ... waiting for nameservers are connected 2025-09-25T16:18:33.171888Z node 1 :NODE_BROKER DEBUG: node_broker_impl.h:239: StateInit event type: 10060000 event: NKikimr::TEvTablet::TEvBoot 2025-09-25T16:18:33.172435Z node 1 :NODE_BROKER DEBUG: node_broker_impl.h:239: StateInit event type: 10060001 event: NKikimr::TEvTablet::TEvRestored 2025-09-25T16:18:33.172526Z node 1 :NODE_BROKER DEBUG: node_broker__init_scheme.cpp:20: TTxInitScheme Execute 2025-09-25T16:18:33.172766Z node 1 :NODE_BROKER DEBUG: node_broker_impl.h:239: StateInit event type: 1006000c event: NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-09-25T16:18:33.173600Z node 1 :NODE_BROKER DEBUG: node_broker__init_scheme.cpp:29: TTxInitScheme Complete 2025-09-25T16:18:33.173776Z node 1 :NODE_BROKER DEBUG: node_broker__load_state.cpp:19: TTxLoadState Execute 2025-09-25T16:18:33.173850Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:968: [DB] Using default config. 2025-09-25T16:18:33.173867Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:1005: [DB] Starting the first epoch: #1.1 1970-01-01T00:00:00.026000Z - 1970-01-01T01:00:00.026000Z - 1970-01-01T02:00:00.026000Z 2025-09-25T16:18:33.173872Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:1031: [DB] Loaded the first approximate epoch start: #1.1 2025-09-25T16:18:33.173891Z node 1 :NODE_BROKER DEBUG: node_broker__load_state.cpp:27: TTxLoadState Complete 2025-09-25T16:18:33.173927Z node 1 :NODE_BROKER DEBUG: node_broker__migrate_state.cpp:84: TTxMigrateState Execute 2025-09-25T16:18:33.173933Z node 1 :NODE_BROKER DEBUG: node_broker__migrate_state.cpp:52: TTxMigrateState ProcessMigrationBatch UpdateNodes left 0, NewVersionUpdateNodes left 0 2025-09-25T16:18:33.173938Z node 1 :NODE_BROKER DEBUG: node_broker__migrate_state.cpp:21: TTxMigrateState FinalizeMigration 2025-09-25T16:18:33.173945Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:1318: [DB] Update epoch in database: #1.1 1970-01-01T00:00:00.026000Z - 1970-01-01T01:00:00.026000Z - 1970-01-01T02:00:00.026000Z 2025-09-25T16:18:33.173963Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:1337: [DB] Update approx epoch start in database: #1.1 2025-09-25T16:18:33.173969Z node 1 :NODE_BROKER NOTICE: node_broker.cpp:1350: [DB] Update main nodes table to: Nodes 2025-09-25T16:18:33.217347Z node 1 :NODE_BROKER DEBUG: node_broker__migrate_state.cpp:95: TTxMigrateState Complete 2025-09-25T16:18:33.217391Z node 1 :NODE_BROKER TRACE: node_broker.cpp:456: Scheduled epoch update at 1970-01-01T01:00:00.026000Z 2025-09-25T16:18:33.217418Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:562: Preparing nodes list cache for epoch #1.1 1970-01-01T00:00:00.026000Z - 1970-01-01T01:00:00.026000Z - 1970-01-01T02:00:00.026000Z, approximate epoch start #1.1 nodes=0 expired=0 2025-09-25T16:18:33.217431Z ... broker_impl.h:245: StateWork, received event# 269877761, Sender [1:1002:2505], Recipient [1:929:2446]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-09-25T16:18:34.467123Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:245: StateWork, received event# 272039950, Sender [1:627:2218], Recipient [1:929:2446]: NKikimr::NNodeBroker::TEvNodeBroker::TEvSubscribeNodesRequest { CachedVersion: 10 SeqNo: 27 } 2025-09-25T16:18:34.467128Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:257: StateWork, processing event TEvNodeBroker::TEvSubscribeNodesRequest 2025-09-25T16:18:34.467137Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:747: New subscriber [1:627:2218], seqNo: 27, version: 10, server pipe id: [1:1002:2505] 2025-09-25T16:18:34.467145Z node 1 :NODE_BROKER TRACE: node_broker.cpp:730: Send TEvUpdateNodes v10 -> v10 to [1:627:2218] 2025-09-25T16:18:34.467212Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:245: StateWork, received event# 269877764, Sender [1:1002:2505], Recipient [1:929:2446]: NKikimr::TEvTabletPipe::TEvServerDisconnected 2025-09-25T16:18:34.467218Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:768: Unsubscribed [1:627:2218], seqNo: 27, server pipe id: [1:1002:2505] 2025-09-25T16:18:34.467245Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:245: StateWork, received event# 269877761, Sender [1:1004:2507], Recipient [1:929:2446]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-09-25T16:18:34.467256Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:245: StateWork, received event# 272039950, Sender [1:627:2218], Recipient [1:929:2446]: NKikimr::NNodeBroker::TEvNodeBroker::TEvSubscribeNodesRequest { CachedVersion: 9 SeqNo: 28 } 2025-09-25T16:18:34.467259Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:257: StateWork, processing event TEvNodeBroker::TEvSubscribeNodesRequest 2025-09-25T16:18:34.467262Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:747: New subscriber [1:627:2218], seqNo: 28, version: 9, server pipe id: [1:1004:2507] 2025-09-25T16:18:34.467265Z node 1 :NODE_BROKER TRACE: node_broker.cpp:730: Send TEvUpdateNodes v9 -> v10 to [1:627:2218] 2025-09-25T16:18:34.467309Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:245: StateWork, received event# 269877764, Sender [1:1004:2507], Recipient [1:929:2446]: NKikimr::TEvTabletPipe::TEvServerDisconnected 2025-09-25T16:18:34.467314Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:768: Unsubscribed [1:627:2218], seqNo: 28, server pipe id: [1:1004:2507] 2025-09-25T16:18:34.467336Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:245: StateWork, received event# 269877761, Sender [1:1006:2509], Recipient [1:929:2446]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-09-25T16:18:34.467349Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:245: StateWork, received event# 272039950, Sender [1:627:2218], Recipient [1:929:2446]: NKikimr::NNodeBroker::TEvNodeBroker::TEvSubscribeNodesRequest { CachedVersion: 8 SeqNo: 29 } 2025-09-25T16:18:34.467352Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:257: StateWork, processing event TEvNodeBroker::TEvSubscribeNodesRequest 2025-09-25T16:18:34.467355Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:747: New subscriber [1:627:2218], seqNo: 29, version: 8, server pipe id: [1:1006:2509] 2025-09-25T16:18:34.467358Z node 1 :NODE_BROKER TRACE: node_broker.cpp:730: Send TEvUpdateNodes v8 -> v10 to [1:627:2218] 2025-09-25T16:18:34.467395Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:245: StateWork, received event# 269877764, Sender [1:1006:2509], Recipient [1:929:2446]: NKikimr::TEvTabletPipe::TEvServerDisconnected 2025-09-25T16:18:34.467398Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:768: Unsubscribed [1:627:2218], seqNo: 29, server pipe id: [1:1006:2509] 2025-09-25T16:18:34.467419Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:245: StateWork, received event# 269877761, Sender [1:1008:2511], Recipient [1:929:2446]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-09-25T16:18:34.467435Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:245: StateWork, received event# 272039950, Sender [1:627:2218], Recipient [1:929:2446]: NKikimr::NNodeBroker::TEvNodeBroker::TEvSubscribeNodesRequest { CachedVersion: 7 SeqNo: 30 } 2025-09-25T16:18:34.467441Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:257: StateWork, processing event TEvNodeBroker::TEvSubscribeNodesRequest 2025-09-25T16:18:34.467445Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:747: New subscriber [1:627:2218], seqNo: 30, version: 7, server pipe id: [1:1008:2511] 2025-09-25T16:18:34.467449Z node 1 :NODE_BROKER TRACE: node_broker.cpp:730: Send TEvUpdateNodes v7 -> v10 to [1:627:2218] 2025-09-25T16:18:34.467506Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:245: StateWork, received event# 269877764, Sender [1:1008:2511], Recipient [1:929:2446]: NKikimr::TEvTabletPipe::TEvServerDisconnected 2025-09-25T16:18:34.467511Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:768: Unsubscribed [1:627:2218], seqNo: 30, server pipe id: [1:1008:2511] 2025-09-25T16:18:34.467576Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:245: StateWork, received event# 269877761, Sender [1:1010:2513], Recipient [1:929:2446]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-09-25T16:18:34.467593Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:245: StateWork, received event# 272039950, Sender [1:627:2218], Recipient [1:929:2446]: NKikimr::NNodeBroker::TEvNodeBroker::TEvSubscribeNodesRequest { CachedVersion: 6 SeqNo: 31 } 2025-09-25T16:18:34.467597Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:257: StateWork, processing event TEvNodeBroker::TEvSubscribeNodesRequest 2025-09-25T16:18:34.467602Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:747: New subscriber [1:627:2218], seqNo: 31, version: 6, server pipe id: [1:1010:2513] 2025-09-25T16:18:34.467605Z node 1 :NODE_BROKER TRACE: node_broker.cpp:730: Send TEvUpdateNodes v6 -> v10 to [1:627:2218] 2025-09-25T16:18:34.467650Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:245: StateWork, received event# 269877764, Sender [1:1010:2513], Recipient [1:929:2446]: NKikimr::TEvTabletPipe::TEvServerDisconnected 2025-09-25T16:18:34.467653Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:768: Unsubscribed [1:627:2218], seqNo: 31, server pipe id: [1:1010:2513] 2025-09-25T16:18:34.467678Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:245: StateWork, received event# 269877761, Sender [1:1012:2515], Recipient [1:929:2446]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-09-25T16:18:34.467690Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:245: StateWork, received event# 272039950, Sender [1:627:2218], Recipient [1:929:2446]: NKikimr::NNodeBroker::TEvNodeBroker::TEvSubscribeNodesRequest { CachedVersion: 5 SeqNo: 32 } 2025-09-25T16:18:34.467695Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:257: StateWork, processing event TEvNodeBroker::TEvSubscribeNodesRequest 2025-09-25T16:18:34.467699Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:747: New subscriber [1:627:2218], seqNo: 32, version: 5, server pipe id: [1:1012:2515] 2025-09-25T16:18:34.467704Z node 1 :NODE_BROKER TRACE: node_broker.cpp:730: Send TEvUpdateNodes v5 -> v10 to [1:627:2218] 2025-09-25T16:18:34.467766Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:245: StateWork, received event# 269877764, Sender [1:1012:2515], Recipient [1:929:2446]: NKikimr::TEvTabletPipe::TEvServerDisconnected 2025-09-25T16:18:34.467771Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:768: Unsubscribed [1:627:2218], seqNo: 32, server pipe id: [1:1012:2515] 2025-09-25T16:18:34.467793Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:245: StateWork, received event# 269877761, Sender [1:1014:2517], Recipient [1:929:2446]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-09-25T16:18:34.467811Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:245: StateWork, received event# 272039950, Sender [1:627:2218], Recipient [1:929:2446]: NKikimr::NNodeBroker::TEvNodeBroker::TEvSubscribeNodesRequest { CachedVersion: 4 SeqNo: 33 } 2025-09-25T16:18:34.467814Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:257: StateWork, processing event TEvNodeBroker::TEvSubscribeNodesRequest 2025-09-25T16:18:34.467818Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:747: New subscriber [1:627:2218], seqNo: 33, version: 4, server pipe id: [1:1014:2517] 2025-09-25T16:18:34.467821Z node 1 :NODE_BROKER TRACE: node_broker.cpp:730: Send TEvUpdateNodes v4 -> v10 to [1:627:2218] 2025-09-25T16:18:34.467863Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:245: StateWork, received event# 269877764, Sender [1:1014:2517], Recipient [1:929:2446]: NKikimr::TEvTabletPipe::TEvServerDisconnected 2025-09-25T16:18:34.467866Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:768: Unsubscribed [1:627:2218], seqNo: 33, server pipe id: [1:1014:2517] 2025-09-25T16:18:34.467880Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:245: StateWork, received event# 269877761, Sender [1:1016:2519], Recipient [1:929:2446]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-09-25T16:18:34.467893Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:245: StateWork, received event# 272039950, Sender [1:627:2218], Recipient [1:929:2446]: NKikimr::NNodeBroker::TEvNodeBroker::TEvSubscribeNodesRequest { CachedVersion: 3 SeqNo: 34 } 2025-09-25T16:18:34.467896Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:257: StateWork, processing event TEvNodeBroker::TEvSubscribeNodesRequest 2025-09-25T16:18:34.467898Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:747: New subscriber [1:627:2218], seqNo: 34, version: 3, server pipe id: [1:1016:2519] 2025-09-25T16:18:34.467901Z node 1 :NODE_BROKER TRACE: node_broker.cpp:730: Send TEvUpdateNodes v3 -> v10 to [1:627:2218] 2025-09-25T16:18:34.467940Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:245: StateWork, received event# 269877764, Sender [1:1016:2519], Recipient [1:929:2446]: NKikimr::TEvTabletPipe::TEvServerDisconnected 2025-09-25T16:18:34.467943Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:768: Unsubscribed [1:627:2218], seqNo: 34, server pipe id: [1:1016:2519] 2025-09-25T16:18:34.468031Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:245: StateWork, received event# 269877761, Sender [1:1018:2521], Recipient [1:929:2446]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-09-25T16:18:34.468051Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:245: StateWork, received event# 272039950, Sender [1:627:2218], Recipient [1:929:2446]: NKikimr::NNodeBroker::TEvNodeBroker::TEvSubscribeNodesRequest { CachedVersion: 2 SeqNo: 35 } 2025-09-25T16:18:34.468056Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:257: StateWork, processing event TEvNodeBroker::TEvSubscribeNodesRequest 2025-09-25T16:18:34.468061Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:747: New subscriber [1:627:2218], seqNo: 35, version: 2, server pipe id: [1:1018:2521] 2025-09-25T16:18:34.468066Z node 1 :NODE_BROKER TRACE: node_broker.cpp:730: Send TEvUpdateNodes v2 -> v10 to [1:627:2218] 2025-09-25T16:18:34.468115Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:245: StateWork, received event# 269877764, Sender [1:1018:2521], Recipient [1:929:2446]: NKikimr::TEvTabletPipe::TEvServerDisconnected 2025-09-25T16:18:34.468120Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:768: Unsubscribed [1:627:2218], seqNo: 35, server pipe id: [1:1018:2521] 2025-09-25T16:18:34.468145Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:245: StateWork, received event# 269877761, Sender [1:1020:2523], Recipient [1:929:2446]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-09-25T16:18:34.468160Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:245: StateWork, received event# 272039950, Sender [1:627:2218], Recipient [1:929:2446]: NKikimr::NNodeBroker::TEvNodeBroker::TEvSubscribeNodesRequest { CachedVersion: 1 SeqNo: 36 } 2025-09-25T16:18:34.468163Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:257: StateWork, processing event TEvNodeBroker::TEvSubscribeNodesRequest 2025-09-25T16:18:34.468165Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:747: New subscriber [1:627:2218], seqNo: 36, version: 1, server pipe id: [1:1020:2523] 2025-09-25T16:18:34.468168Z node 1 :NODE_BROKER TRACE: node_broker.cpp:730: Send TEvUpdateNodes v1 -> v10 to [1:627:2218] |81.4%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/tx_allocator_client/ut/unittest |81.4%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/tx_allocator_client/ut/unittest |81.4%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/tx_allocator_client/ut/unittest >> THiveTest::TestCreateTabletBeforeLocal [GOOD] >> THiveTest::TestCreateTabletReboots >> TNodeBrokerTest::NodeNameWithDifferentTenants [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/tx_allocator_client/ut/unittest >> TTxAllocatorClientTest::Boot [GOOD] Test command err: 2025-09-25T16:18:37.008418Z node 1 :TABLET_MAIN DEBUG: tablet_sys.cpp:2146: Tablet: 72057594046447617 LockedInitializationPath Marker# TSYS32 2025-09-25T16:18:37.008501Z node 1 :TABLET_MAIN DEBUG: tablet_sys.cpp:979: Tablet: 72057594046447617 HandleFindLatestLogEntry, NODATA Promote Marker# TSYS19 2025-09-25T16:18:37.008616Z node 1 :TABLET_MAIN DEBUG: tablet_sys.cpp:233: Tablet: 72057594046447617 TTablet::WriteZeroEntry. logid# [72057594046447617:2:0:0:0:0:0] Marker# TSYS01 2025-09-25T16:18:37.008973Z node 1 :TABLET_MAIN DEBUG: tablet_req_writelog.cpp:54: Put Result: TEvPutResult {Id# [72057594046447617:2:0:0:0:20:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-09-25T16:18:37.009062Z node 1 :TX_ALLOCATOR DEBUG: txallocator_impl.cpp:17: tablet# 72057594046447617 OnActivateExecutor 2025-09-25T16:18:37.011555Z node 1 :TABLET_MAIN DEBUG: tablet_req_writelog.cpp:54: Put Result: TEvPutResult {Id# [72057594046447617:2:1:1:28672:35:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-09-25T16:18:37.011585Z node 1 :TABLET_MAIN DEBUG: tablet_req_writelog.cpp:54: Put Result: TEvPutResult {Id# [72057594046447617:2:1:0:0:42:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-09-25T16:18:37.011604Z node 1 :TABLET_MAIN DEBUG: tablet_sys.cpp:1466: Tablet: 72057594046447617 GcCollect 0 channel, tablet:gen:step => 2:0 Marker# TSYS28 2025-09-25T16:18:37.011632Z node 1 :TABLET_MAIN DEBUG: tablet_req_writelog.cpp:54: Put Result: TEvPutResult {Id# [72057594046447617:2:2:1:8192:71:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-09-25T16:18:37.011644Z node 1 :TABLET_MAIN DEBUG: tablet_req_writelog.cpp:54: Put Result: TEvPutResult {Id# [72057594046447617:2:2:0:0:71:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-09-25T16:18:37.011667Z node 1 :TX_ALLOCATOR DEBUG: txallocator__scheme.cpp:22: tablet# 72057594046447617 TTxSchema Complete 2025-09-25T16:18:37.011693Z node 1 :TABLET_MAIN INFO: tablet_sys.cpp:1077: Tablet: 72057594046447617 Active! Generation: 2, Type: TxAllocator started in 0msec Marker# TSYS24 ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/tx_allocator_client/ut/unittest >> TTxAllocatorClientTest::AllocateOverTheEdge [GOOD] Test command err: 2025-09-25T16:18:36.928819Z node 1 :TABLET_MAIN DEBUG: tablet_sys.cpp:2146: Tablet: 72057594046447617 LockedInitializationPath Marker# TSYS32 2025-09-25T16:18:36.928931Z node 1 :TABLET_MAIN DEBUG: tablet_sys.cpp:979: Tablet: 72057594046447617 HandleFindLatestLogEntry, NODATA Promote Marker# TSYS19 2025-09-25T16:18:36.929068Z node 1 :TABLET_MAIN DEBUG: tablet_sys.cpp:233: Tablet: 72057594046447617 TTablet::WriteZeroEntry. logid# [72057594046447617:2:0:0:0:0:0] Marker# TSYS01 2025-09-25T16:18:36.929431Z node 1 :TABLET_MAIN DEBUG: tablet_req_writelog.cpp:54: Put Result: TEvPutResult {Id# [72057594046447617:2:0:0:0:20:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-09-25T16:18:36.929533Z node 1 :TX_ALLOCATOR DEBUG: txallocator_impl.cpp:17: tablet# 72057594046447617 OnActivateExecutor 2025-09-25T16:18:36.931686Z node 1 :TABLET_MAIN DEBUG: tablet_req_writelog.cpp:54: Put Result: TEvPutResult {Id# [72057594046447617:2:1:1:28672:35:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-09-25T16:18:36.931716Z node 1 :TABLET_MAIN DEBUG: tablet_req_writelog.cpp:54: Put Result: TEvPutResult {Id# [72057594046447617:2:1:0:0:42:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-09-25T16:18:36.931732Z node 1 :TABLET_MAIN DEBUG: tablet_sys.cpp:1466: Tablet: 72057594046447617 GcCollect 0 channel, tablet:gen:step => 2:0 Marker# TSYS28 2025-09-25T16:18:36.931754Z node 1 :TABLET_MAIN DEBUG: tablet_req_writelog.cpp:54: Put Result: TEvPutResult {Id# [72057594046447617:2:2:1:8192:71:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-09-25T16:18:36.931761Z node 1 :TABLET_MAIN DEBUG: tablet_req_writelog.cpp:54: Put Result: TEvPutResult {Id# [72057594046447617:2:2:0:0:71:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-09-25T16:18:36.931785Z node 1 :TX_ALLOCATOR DEBUG: txallocator__scheme.cpp:22: tablet# 72057594046447617 TTxSchema Complete 2025-09-25T16:18:36.931811Z node 1 :TABLET_MAIN INFO: tablet_sys.cpp:1077: Tablet: 72057594046447617 Active! Generation: 2, Type: TxAllocator started in 0msec Marker# TSYS24 2025-09-25T16:18:36.931944Z node 1 :TX_ALLOCATOR DEBUG: txallocator_impl.cpp:60: tablet# 72057594046447617 HANDLE TEvAllocate Sender# [1:73:2108] requested range size#5000 2025-09-25T16:18:36.932071Z node 1 :TABLET_MAIN DEBUG: tablet_req_writelog.cpp:54: Put Result: TEvPutResult {Id# [72057594046447617:2:3:1:24576:70:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-09-25T16:18:36.932078Z node 1 :TABLET_MAIN DEBUG: tablet_req_writelog.cpp:54: Put Result: TEvPutResult {Id# [72057594046447617:2:3:0:0:69:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-09-25T16:18:36.932089Z node 1 :TX_ALLOCATOR DEBUG: txallocator__reserve.cpp:56: tablet# 72057594046447617 TTxReserve Complete Successed# 1 Reserved from# 0 Reserved to# 5000 2025-09-25T16:18:36.932093Z node 1 :TX_ALLOCATOR DEBUG: txallocator_impl.cpp:70: tablet# 72057594046447617 Send to Sender# [1:73:2108] TEvAllocateResult from# 0 to# 5000 2025-09-25T16:18:36.932117Z node 1 :TX_ALLOCATOR_CLIENT WARN: client.cpp:38: AllocateTxIds: requested many txIds. Just a warning, request is processed. Requested: 1000 TxAllocators count: 1 RequestPerAllocator: 5000 MaxCapacity: 5000 BatchAllocationWarning: 500 2025-09-25T16:18:36.932135Z node 1 :TX_ALLOCATOR_CLIENT WARN: client.cpp:38: AllocateTxIds: requested many txIds. Just a warning, request is processed. Requested: 1000 TxAllocators count: 1 RequestPerAllocator: 5000 MaxCapacity: 5000 BatchAllocationWarning: 500 2025-09-25T16:18:36.932146Z node 1 :TX_ALLOCATOR_CLIENT WARN: client.cpp:38: AllocateTxIds: requested many txIds. Just a warning, request is processed. Requested: 1000 TxAllocators count: 1 RequestPerAllocator: 5000 MaxCapacity: 5000 BatchAllocationWarning: 500 2025-09-25T16:18:36.932158Z node 1 :TX_ALLOCATOR_CLIENT WARN: client.cpp:38: AllocateTxIds: requested many txIds. Just a warning, request is processed. Requested: 1000 TxAllocators count: 1 RequestPerAllocator: 5000 MaxCapacity: 5000 BatchAllocationWarning: 500 2025-09-25T16:18:36.932172Z node 1 :TX_ALLOCATOR DEBUG: txallocator_impl.cpp:60: tablet# 72057594046447617 HANDLE TEvAllocate Sender# [1:73:2108] requested range size#5000 2025-09-25T16:18:36.932228Z node 1 :TABLET_MAIN DEBUG: tablet_req_writelog.cpp:54: Put Result: TEvPutResult {Id# [72057594046447617:2:4:1:24576:76:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-09-25T16:18:36.932238Z node 1 :TABLET_MAIN DEBUG: tablet_req_writelog.cpp:54: Put Result: TEvPutResult {Id# [72057594046447617:2:4:0:0:69:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-09-25T16:18:36.932249Z node 1 :TX_ALLOCATOR DEBUG: txallocator__reserve.cpp:56: tablet# 72057594046447617 TTxReserve Complete Successed# 1 Reserved from# 5000 Reserved to# 10000 2025-09-25T16:18:36.932253Z node 1 :TX_ALLOCATOR DEBUG: txallocator_impl.cpp:70: tablet# 72057594046447617 Send to Sender# [1:73:2108] TEvAllocateResult from# 5000 to# 10000 2025-09-25T16:18:36.932272Z node 1 :TX_ALLOCATOR_CLIENT WARN: client.cpp:38: AllocateTxIds: requested many txIds. Just a warning, request is processed. Requested: 500 TxAllocators count: 1 RequestPerAllocator: 5000 MaxCapacity: 5000 BatchAllocationWarning: 500 2025-09-25T16:18:36.932290Z node 1 :TX_ALLOCATOR_CLIENT WARN: client.cpp:38: AllocateTxIds: requested many txIds. Just a warning, request is processed. Requested: 1000 TxAllocators count: 1 RequestPerAllocator: 5000 MaxCapacity: 5000 BatchAllocationWarning: 500 2025-09-25T16:18:36.932311Z node 1 :TX_ALLOCATOR_CLIENT WARN: client.cpp:38: AllocateTxIds: requested many txIds. Just a warning, request is processed. Requested: 2500 TxAllocators count: 1 RequestPerAllocator: 5000 MaxCapacity: 5000 BatchAllocationWarning: 500 2025-09-25T16:18:36.932343Z node 1 :TX_ALLOCATOR_CLIENT WARN: client.cpp:38: AllocateTxIds: requested many txIds. Just a warning, request is processed. Requested: 1000 TxAllocators count: 1 RequestPerAllocator: 5000 MaxCapacity: 5000 BatchAllocationWarning: 500 2025-09-25T16:18:36.932359Z node 1 :TX_ALLOCATOR DEBUG: txallocator_impl.cpp:60: tablet# 72057594046447617 HANDLE TEvAllocate Sender# [1:73:2108] requested range size#5000 2025-09-25T16:18:36.932398Z node 1 :TABLET_MAIN DEBUG: tablet_req_writelog.cpp:54: Put Result: TEvPutResult {Id# [72057594046447617:2:5:1:24576:76:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-09-25T16:18:36.932408Z node 1 :TABLET_MAIN DEBUG: tablet_req_writelog.cpp:54: Put Result: TEvPutResult {Id# [72057594046447617:2:5:0:0:69:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-09-25T16:18:36.932419Z node 1 :TX_ALLOCATOR DEBUG: txallocator__reserve.cpp:56: tablet# 72057594046447617 TTxReserve Complete Successed# 1 Reserved from# 10000 Reserved to# 15000 2025-09-25T16:18:36.932424Z node 1 :TX_ALLOCATOR DEBUG: txallocator_impl.cpp:70: tablet# 72057594046447617 Send to Sender# [1:73:2108] TEvAllocateResult from# 10000 to# 15000 2025-09-25T16:18:36.932445Z node 1 :TX_ALLOCATOR_CLIENT WARN: client.cpp:38: AllocateTxIds: requested many txIds. Just a warning, request is processed. Requested: 3000 TxAllocators count: 1 RequestPerAllocator: 5000 MaxCapacity: 5000 BatchAllocationWarning: 500 |81.4%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/tx_allocator_client/ut/unittest ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/mind/ut/unittest >> TNodeBrokerTest::NodesMigrationExpiredChanged [GOOD] Test command err: 2025-09-25T16:18:33.130774Z node 8 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:636: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 8 Deadline: 18446744073709.551615s } 2025-09-25T16:18:33.133429Z node 5 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:636: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 5 Deadline: 18446744073709.551615s } 2025-09-25T16:18:33.133497Z node 6 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:636: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 6 Deadline: 18446744073709.551615s } 2025-09-25T16:18:33.133532Z node 7 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:636: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 7 Deadline: 18446744073709.551615s } 2025-09-25T16:18:33.133576Z node 8 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:607: Handle NActors::TEvInterconnect::TEvListNodes 2025-09-25T16:18:33.137556Z node 2 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:636: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 2 Deadline: 18446744073709.551615s } 2025-09-25T16:18:33.137623Z node 3 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:636: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 3 Deadline: 18446744073709.551615s } 2025-09-25T16:18:33.137717Z node 4 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:636: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 4 Deadline: 18446744073709.551615s } 2025-09-25T16:18:33.137775Z node 6 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:607: Handle NActors::TEvInterconnect::TEvListNodes 2025-09-25T16:18:33.137897Z node 7 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:607: Handle NActors::TEvInterconnect::TEvListNodes 2025-09-25T16:18:33.137984Z node 8 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:607: Handle NActors::TEvInterconnect::TEvListNodes 2025-09-25T16:18:33.138030Z node 1 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:636: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 1 Deadline: 18446744073709.551615s } 2025-09-25T16:18:33.142377Z node 2 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:607: Handle NActors::TEvInterconnect::TEvListNodes 2025-09-25T16:18:33.142480Z node 3 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:607: Handle NActors::TEvInterconnect::TEvListNodes 2025-09-25T16:18:33.142548Z node 4 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:607: Handle NActors::TEvInterconnect::TEvListNodes 2025-09-25T16:18:33.142608Z node 5 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:607: Handle NActors::TEvInterconnect::TEvListNodes 2025-09-25T16:18:33.142691Z node 4 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:607: Handle NActors::TEvInterconnect::TEvListNodes 2025-09-25T16:18:33.142735Z node 5 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:607: Handle NActors::TEvInterconnect::TEvListNodes 2025-09-25T16:18:33.142758Z node 6 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:607: Handle NActors::TEvInterconnect::TEvListNodes 2025-09-25T16:18:33.142798Z node 7 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:607: Handle NActors::TEvInterconnect::TEvListNodes 2025-09-25T16:18:33.142912Z node 1 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:607: Handle NActors::TEvInterconnect::TEvListNodes 2025-09-25T16:18:33.143006Z node 2 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:607: Handle NActors::TEvInterconnect::TEvListNodes 2025-09-25T16:18:33.143038Z node 3 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:607: Handle NActors::TEvInterconnect::TEvListNodes 2025-09-25T16:18:33.143132Z node 8 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:607: Handle NActors::TEvInterconnect::TEvListNodes 2025-09-25T16:18:33.143159Z node 1 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:607: Handle NActors::TEvInterconnect::TEvListNodes 2025-09-25T16:18:33.143545Z node 2 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:607: Handle NActors::TEvInterconnect::TEvListNodes 2025-09-25T16:18:33.143573Z node 3 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:607: Handle NActors::TEvInterconnect::TEvListNodes 2025-09-25T16:18:33.143602Z node 4 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:607: Handle NActors::TEvInterconnect::TEvListNodes 2025-09-25T16:18:33.143624Z node 5 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:607: Handle NActors::TEvInterconnect::TEvListNodes 2025-09-25T16:18:33.143650Z node 6 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:607: Handle NActors::TEvInterconnect::TEvListNodes 2025-09-25T16:18:33.143672Z node 7 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:607: Handle NActors::TEvInterconnect::TEvListNodes 2025-09-25T16:18:33.143707Z node 8 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:636: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 1 Deadline: 18446744073709.551615s } 2025-09-25T16:18:33.143721Z node 3 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:636: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 1 Deadline: 18446744073709.551615s } 2025-09-25T16:18:33.143758Z node 4 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:636: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 1 Deadline: 18446744073709.551615s } 2025-09-25T16:18:33.143789Z node 5 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:636: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 1 Deadline: 18446744073709.551615s } 2025-09-25T16:18:33.143830Z node 6 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:636: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 1 Deadline: 18446744073709.551615s } 2025-09-25T16:18:33.143860Z node 7 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:636: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 1 Deadline: 18446744073709.551615s } 2025-09-25T16:18:33.143955Z node 1 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:607: Handle NActors::TEvInterconnect::TEvListNodes 2025-09-25T16:18:33.144025Z node 2 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:636: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 1 Deadline: 18446744073709.551615s } 2025-09-25T16:18:33.144309Z node 1 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:636: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 5 Deadline: 18446744073709.551615s } 2025-09-25T16:18:33.144517Z node 8 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:636: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 6 Deadline: 18446744073709.551615s } 2025-09-25T16:18:33.145478Z node 3 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:636: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 2 Deadline: 18446744073709.551615s } 2025-09-25T16:18:33.145516Z node 4 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:636: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 3 Deadline: 18446744073709.551615s } 2025-09-25T16:18:33.145549Z node 6 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:636: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 5 Deadline: 18446744073709.551615s } 2025-09-25T16:18:33.145576Z node 7 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:636: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 3 Deadline: 18446744073709.551615s } 2025-09-25T16:18:33.152492Z node 6 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:636: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 8 Deadline: 18446744073709.551615s } 2025-09-25T16:18:33.153331Z node 3 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:636: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 4 Deadline: 18446744073709.551615s } 2025-09-25T16:18:33.153383Z node 5 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:636: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 6 Deadline: 18446744073709.551615s } 2025-09-25T16:18:33.153567Z node 2 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:636: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 3 Deadline: 18446744073709.551615s } 2025-09-25T16:18:33.153585Z node 3 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:636: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 7 Deadline: 18446744073709.551615s } 2025-09-25T16:18:33.154008Z node 1 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:636: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 8 Deadline: 18446744073709.551615s } 2025-09-25T16:18:33.154554Z node 1 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:636: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 3 Deadline: 18446744073709.551615s } 2025-09-25T16:18:33.154600Z node 5 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:636: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 3 Deadline: 18446744073709.551615s } 2025-09-25T16:18:33.154753Z node 1 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:636: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 2 Deadline: 18446744073709.551615s } 2025-09-25T16:18:33.154829Z node 1 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:636: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 4 Deadline: 18446744073709.551615s } 2025-09-25T16:18:33.154907Z node 1 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:636: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 6 Deadline: 18446744073709.551615s } 2025-09-25T16:18:33.155004Z node 1 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:636: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 7 Deadline: 18446744073709.551615s } 2025-09-25T16:18:33.155028Z node 3 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:636: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 5 Deadline: 18446744073709.551615s } 2025-09-25T16:18:33.156791Z node 5 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:636: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 2 Deadline: 18446744073709.551615s } 2025-09-25T16:18:33.156985Z node 2 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:636: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 5 Deadline: 18446744073709.551615s } 2025-09-25T16:18:33.176547Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7729: Cannot subscribe to console configs 2025-09-25T16:18:33.176569Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded ... waiting for nameservers are connected 2025-09-25T16:18:33.182965Z node 1 :NODE_BROKER DEBUG: node_broker_impl.h:239: StateInit event type: 10060000 event: NKikimr::TEvTablet::TEvBoot 2025-09-25T16:18:33.183558Z node 1 :NODE_BROKER DEBUG: node_broker_impl.h:239: StateInit event type: 10060001 event: NKikimr::TEvTablet::TEvRestored 2025-09-25T16:18:33.183656Z node 1 :NODE_BROKER DEBUG: node_broker__init_scheme.cpp:20: TTxInitScheme Execute 2025-09-25T16:18:33.183946Z node 1 :NODE_BROKER DEBUG: node_broker_impl.h:239: StateInit event type: 1006000c event: NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-09-25T16:18:33.184559Z node 1 :NODE_BROKER DEBUG: node_broker__init_scheme.cpp:29: TTxInitScheme Complete 2025-09-25T16:18:33.184599Z node 1 :NODE_BROKER DEBUG: node_broker__load_state.cpp:19: TTxLoadState Execute 2025-09-25T16:18:33.184662Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:968: [DB] Using default config. 2025-09-25T16:18:33.184685Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:1005: [DB] Starting the first epoch: #1.1 1970-01-01T00:00:00.023000Z - 1970-01-01T01:00:00.023000Z - 1970-01-01T02:00:00.023000Z 2025-09-25T16:18:33.184689Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:1031: [DB] Loaded the first approximate epoch start: #1.1 2025-09-25T16:18:33.184703Z node 1 :NODE_BROKER DEBUG: node_broker__load_state.cpp:27: TTxLoadState Complete 2025-09-25T16:18:33.184717Z node 1 :NODE_BROKER DEBUG: node_broker__migrate_state.cpp:84: TTxMigrateState Execute 2025-09-25T16:18:33.184721Z node 1 :NODE_BROKER DEBUG: node_broker__migrate_state.cpp:52: TTxMigrateState ProcessMigrationBatch UpdateNodes left 0, NewVersionUpdateNodes left 0 2025-09-25T16:18:33.184724Z node 1 :NODE_BROKER DEBUG: node_broker__migrate_state.cpp:21: TTxMigrateState FinalizeMigration 2025-09-25T16:18:33.184728Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:1318: [DB] Update epoch in database: #1.1 1970-01-01T00:00:00.023000Z - 1970-01-01T01:00:00.023000Z - 1970-01-01T02:00:00.023000Z 2025-09-25T16:18:33.184745Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:1337: [DB] Update approx epoch start in database: #1.1 2025-09-25T16:18:33.184751Z node 1 :NODE_BROKER NOTICE: node_broker.cpp:1350: [DB] Update main nodes table to: Nodes 2025-09-25T16:18:33.218427Z node 1 :NODE_BROKER DEBUG: node_broker__migrate_state.cpp:95: TTxMigrateState Complete 2025-09-25T16:18:33.218475Z node 1 :NODE_BROKER TRACE: node_brok ... 8:34.726025Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:245: StateWork, received event# 269877761, Sender [1:791:2317], Recipient [1:750:2290]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-09-25T16:18:34.726101Z node 2 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:717: Handle NKikimr::TEvTabletPipe::TEvClientConnected { TabletId: 72057594037936129 Status: OK ServerId: [1:793:2319] Leader: 1 Dead: 0 Generation: 3 VersionInfo:  } 2025-09-25T16:18:34.726113Z node 3 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:717: Handle NKikimr::TEvTabletPipe::TEvClientConnected { TabletId: 72057594037936129 Status: OK ServerId: [1:795:2321] Leader: 1 Dead: 0 Generation: 3 VersionInfo:  } 2025-09-25T16:18:34.726120Z node 4 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:717: Handle NKikimr::TEvTabletPipe::TEvClientConnected { TabletId: 72057594037936129 Status: OK ServerId: [1:796:2322] Leader: 1 Dead: 0 Generation: 3 VersionInfo:  } 2025-09-25T16:18:34.726128Z node 5 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:717: Handle NKikimr::TEvTabletPipe::TEvClientConnected { TabletId: 72057594037936129 Status: OK ServerId: [1:790:2316] Leader: 1 Dead: 0 Generation: 3 VersionInfo:  } 2025-09-25T16:18:34.726135Z node 6 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:717: Handle NKikimr::TEvTabletPipe::TEvClientConnected { TabletId: 72057594037936129 Status: OK ServerId: [1:791:2317] Leader: 1 Dead: 0 Generation: 3 VersionInfo:  } 2025-09-25T16:18:34.726142Z node 7 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:717: Handle NKikimr::TEvTabletPipe::TEvClientConnected { TabletId: 72057594037936129 Status: OK ServerId: [1:792:2318] Leader: 1 Dead: 0 Generation: 3 VersionInfo:  } 2025-09-25T16:18:34.726153Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:245: StateWork, received event# 269877761, Sender [1:792:2318], Recipient [1:750:2290]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-09-25T16:18:34.726166Z node 8 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:717: Handle NKikimr::TEvTabletPipe::TEvClientConnected { TabletId: 72057594037936129 Status: OK ServerId: [1:794:2320] Leader: 1 Dead: 0 Generation: 3 VersionInfo:  } 2025-09-25T16:18:34.726174Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:245: StateWork, received event# 269877761, Sender [1:793:2319], Recipient [1:750:2290]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-09-25T16:18:34.726197Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:245: StateWork, received event# 269877761, Sender [1:794:2320], Recipient [1:750:2290]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-09-25T16:18:34.726214Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:245: StateWork, received event# 269877761, Sender [1:795:2321], Recipient [1:750:2290]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-09-25T16:18:34.726259Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:245: StateWork, received event# 269877761, Sender [1:796:2322], Recipient [1:750:2290]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-09-25T16:18:34.726283Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:245: StateWork, received event# 272039936, Sender [1:25:2072], Recipient [1:750:2290]: NKikimr::NNodeBroker::TEvNodeBroker::TEvListNodes { MinEpoch: 4 } 2025-09-25T16:18:34.726288Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:249: StateWork, processing event TEvNodeBroker::TEvListNodes 2025-09-25T16:18:34.726298Z node 1 :NODE_BROKER TRACE: node_broker.cpp:423: Send TEvNodesInfo for epoch #5.6 1970-01-01T04:00:00.023000Z - 1970-01-01T05:00:00.023000Z - 1970-01-01T06:00:00.023000Z 2025-09-25T16:18:34.726333Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:245: StateWork, received event# 272039936, Sender [6:170:2072], Recipient [1:791:2317] 2025-09-25T16:18:34.726336Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:249: StateWork, processing event TEvNodeBroker::TEvListNodes 2025-09-25T16:18:34.726340Z node 1 :NODE_BROKER TRACE: node_broker.cpp:423: Send TEvNodesInfo for epoch #5.6 1970-01-01T04:00:00.023000Z - 1970-01-01T05:00:00.023000Z - 1970-01-01T06:00:00.023000Z 2025-09-25T16:18:34.726351Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:245: StateWork, received event# 272039936, Sender [7:199:2072], Recipient [1:792:2318] 2025-09-25T16:18:34.726353Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:249: StateWork, processing event TEvNodeBroker::TEvListNodes 2025-09-25T16:18:34.726359Z node 1 :NODE_BROKER TRACE: node_broker.cpp:423: Send TEvNodesInfo for epoch #5.6 1970-01-01T04:00:00.023000Z - 1970-01-01T05:00:00.023000Z - 1970-01-01T06:00:00.023000Z 2025-09-25T16:18:34.726376Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:245: StateWork, received event# 272039936, Sender [5:141:2072], Recipient [1:790:2316] 2025-09-25T16:18:34.726378Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:249: StateWork, processing event TEvNodeBroker::TEvListNodes 2025-09-25T16:18:34.726382Z node 1 :NODE_BROKER TRACE: node_broker.cpp:423: Send TEvNodesInfo for epoch #5.6 1970-01-01T04:00:00.023000Z - 1970-01-01T05:00:00.023000Z - 1970-01-01T06:00:00.023000Z 2025-09-25T16:18:34.726403Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:245: StateWork, received event# 272039936, Sender [2:54:2072], Recipient [1:793:2319] 2025-09-25T16:18:34.726406Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:249: StateWork, processing event TEvNodeBroker::TEvListNodes 2025-09-25T16:18:34.726418Z node 1 :NODE_BROKER TRACE: node_broker.cpp:423: Send TEvNodesInfo for epoch #5.6 1970-01-01T04:00:00.023000Z - 1970-01-01T05:00:00.023000Z - 1970-01-01T06:00:00.023000Z 2025-09-25T16:18:34.726427Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:245: StateWork, received event# 272039936, Sender [3:83:2072], Recipient [1:795:2321] 2025-09-25T16:18:34.726431Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:249: StateWork, processing event TEvNodeBroker::TEvListNodes 2025-09-25T16:18:34.726436Z node 1 :NODE_BROKER TRACE: node_broker.cpp:423: Send TEvNodesInfo for epoch #5.6 1970-01-01T04:00:00.023000Z - 1970-01-01T05:00:00.023000Z - 1970-01-01T06:00:00.023000Z 2025-09-25T16:18:34.726460Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:245: StateWork, received event# 272039936, Sender [4:112:2072], Recipient [1:796:2322] 2025-09-25T16:18:34.726464Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:249: StateWork, processing event TEvNodeBroker::TEvListNodes 2025-09-25T16:18:34.726468Z node 1 :NODE_BROKER TRACE: node_broker.cpp:423: Send TEvNodesInfo for epoch #5.6 1970-01-01T04:00:00.023000Z - 1970-01-01T05:00:00.023000Z - 1970-01-01T06:00:00.023000Z 2025-09-25T16:18:34.726483Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:245: StateWork, received event# 272039936, Sender [8:228:2072], Recipient [1:794:2320] 2025-09-25T16:18:34.726486Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:249: StateWork, processing event TEvNodeBroker::TEvListNodes 2025-09-25T16:18:34.726489Z node 1 :NODE_BROKER TRACE: node_broker.cpp:423: Send TEvNodesInfo for epoch #5.6 1970-01-01T04:00:00.023000Z - 1970-01-01T05:00:00.023000Z - 1970-01-01T06:00:00.023000Z 2025-09-25T16:18:34.726572Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:245: StateWork, received event# 269877761, Sender [1:801:2327], Recipient [1:750:2290]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-09-25T16:18:34.726582Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:245: StateWork, received event# 272039936, Sender [1:625:2214], Recipient [1:750:2290]: NKikimr::NNodeBroker::TEvNodeBroker::TEvListNodes { } 2025-09-25T16:18:34.726584Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:249: StateWork, processing event TEvNodeBroker::TEvListNodes 2025-09-25T16:18:34.726588Z node 1 :NODE_BROKER TRACE: node_broker.cpp:423: Send TEvNodesInfo for epoch #5.6 1970-01-01T04:00:00.023000Z - 1970-01-01T05:00:00.023000Z - 1970-01-01T06:00:00.023000Z 2025-09-25T16:18:34.726633Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:245: StateWork, received event# 269877761, Sender [1:803:2329], Recipient [1:750:2290]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-09-25T16:18:34.726645Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:245: StateWork, received event# 272039936, Sender [1:625:2214], Recipient [1:750:2290]: NKikimr::NNodeBroker::TEvNodeBroker::TEvListNodes { } 2025-09-25T16:18:34.726647Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:249: StateWork, processing event TEvNodeBroker::TEvListNodes 2025-09-25T16:18:34.726652Z node 1 :NODE_BROKER TRACE: node_broker.cpp:423: Send TEvNodesInfo for epoch #5.6 1970-01-01T04:00:00.023000Z - 1970-01-01T05:00:00.023000Z - 1970-01-01T06:00:00.023000Z 2025-09-25T16:18:34.726694Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:245: StateWork, received event# 269877761, Sender [1:805:2331], Recipient [1:750:2290]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-09-25T16:18:34.726704Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:245: StateWork, received event# 272039936, Sender [1:625:2214], Recipient [1:750:2290]: NKikimr::NNodeBroker::TEvNodeBroker::TEvListNodes { } 2025-09-25T16:18:34.726707Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:249: StateWork, processing event TEvNodeBroker::TEvListNodes 2025-09-25T16:18:34.726710Z node 1 :NODE_BROKER TRACE: node_broker.cpp:423: Send TEvNodesInfo for epoch #5.6 1970-01-01T04:00:00.023000Z - 1970-01-01T05:00:00.023000Z - 1970-01-01T06:00:00.023000Z 2025-09-25T16:18:34.726755Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:245: StateWork, received event# 269877761, Sender [1:807:2333], Recipient [1:750:2290]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-09-25T16:18:34.726770Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:245: StateWork, received event# 272039936, Sender [1:625:2214], Recipient [1:750:2290]: NKikimr::NNodeBroker::TEvNodeBroker::TEvListNodes { CachedVersion: 6 } 2025-09-25T16:18:34.726772Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:249: StateWork, processing event TEvNodeBroker::TEvListNodes 2025-09-25T16:18:34.726776Z node 1 :NODE_BROKER TRACE: node_broker.cpp:423: Send TEvNodesInfo for epoch #5.6 1970-01-01T04:00:00.023000Z - 1970-01-01T05:00:00.023000Z - 1970-01-01T06:00:00.023000Z 2025-09-25T16:18:34.726828Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:245: StateWork, received event# 269877761, Sender [1:809:2335], Recipient [1:750:2290]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-09-25T16:18:34.726845Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:245: StateWork, received event# 272039950, Sender [1:625:2214], Recipient [1:750:2290]: NKikimr::NNodeBroker::TEvNodeBroker::TEvSubscribeNodesRequest { CachedVersion: 6 SeqNo: 2 } 2025-09-25T16:18:34.726849Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:257: StateWork, processing event TEvNodeBroker::TEvSubscribeNodesRequest 2025-09-25T16:18:34.726854Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:747: New subscriber [1:625:2214], seqNo: 2, version: 6, server pipe id: [1:809:2335] 2025-09-25T16:18:34.726860Z node 1 :NODE_BROKER TRACE: node_broker.cpp:730: Send TEvUpdateNodes v6 -> v6 to [1:625:2214] 2025-09-25T16:18:34.726905Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:245: StateWork, received event# 269877764, Sender [1:809:2335], Recipient [1:750:2290]: NKikimr::TEvTabletPipe::TEvServerDisconnected 2025-09-25T16:18:34.726909Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:768: Unsubscribed [1:625:2214], seqNo: 2, server pipe id: [1:809:2335] 2025-09-25T16:18:34.726926Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:245: StateWork, received event# 269877761, Sender [1:811:2337], Recipient [1:750:2290]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-09-25T16:18:34.726948Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:245: StateWork, received event# 272039937, Sender [1:625:2214], Recipient [1:750:2290]: NKikimr::NNodeBroker::TEvNodeBroker::TEvResolveNode { NodeId: 1024 } 2025-09-25T16:18:34.726954Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:250: StateWork, processing event TEvNodeBroker::TEvResolveNode 2025-09-25T16:18:34.726969Z node 1 :NODE_BROKER TRACE: node_broker.cpp:1485: Send TEvResolvedNode: NKikimr::NNodeBroker::TEvNodeBroker::TEvResolvedNode { Status { Code: WRONG_REQUEST Reason: "Unknown node" } } ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/mind/ut/unittest >> TTenantPoolTests::TestSensorsConfigForStaticSlot [GOOD] Test command err: 2025-09-25T16:18:37.133235Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7911: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-09-25T16:18:37.133263Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7939: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-09-25T16:18:37.133270Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7825: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-09-25T16:18:37.133276Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7841: OperationsProcessing config: using default configuration 2025-09-25T16:18:37.133282Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7847: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-09-25T16:18:37.133287Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7847: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-09-25T16:18:37.133296Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7971: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-09-25T16:18:37.133311Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-09-25T16:18:37.133430Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8042: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-09-25T16:18:37.133489Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2025-09-25T16:18:37.136651Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7729: Cannot subscribe to console configs 2025-09-25T16:18:37.136672Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded 2025-09-25T16:18:37.139506Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2025-09-25T16:18:37.139632Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2025-09-25T16:18:37.139651Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046578944 2025-09-25T16:18:37.141733Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2025-09-25T16:18:37.141852Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2025-09-25T16:18:37.141957Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1382: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046578944 2025-09-25T16:18:37.142107Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: dc-1, pathId: [OwnerId: 72057594046578944, LocalPathId: 1], at schemeshard: 72057594046578944 2025-09-25T16:18:37.142944Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:158: TTxInitRoot DoComplete, at schemeshard: 72057594046578944 2025-09-25T16:18:37.142981Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2025-09-25T16:18:37.143265Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046578944 2025-09-25T16:18:37.143275Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046578944 2025-09-25T16:18:37.143297Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-09-25T16:18:37.143305Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046578944, domainId: [OwnerId: 72057594046578944, LocalPathId: 1] 2025-09-25T16:18:37.143315Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:205: TTxServerlessStorageBilling.Complete 2025-09-25T16:18:37.143381Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7086: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046578944 2025-09-25T16:18:37.182566Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "dc-1" StoragePools { Name: "" Kind: "hdd" } StoragePools { Name: "" Kind: "hdd-3" } StoragePools { Name: "" Kind: "hdd-1" } StoragePools { Name: "" Kind: "hdd-2" } } } TxId: 1 TabletId: 72057594046578944 , at schemeshard: 72057594046578944 2025-09-25T16:18:37.182655Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //dc-1, opId: 1:0, at schemeshard: 72057594046578944 2025-09-25T16:18:37.182713Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:599: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046578944, LocalPathId: 1] was 0 2025-09-25T16:18:37.182720Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5528: CreateTx for txid 1:0 type: TxAlterSubDomain target path: [OwnerId: 72057594046578944, LocalPathId: 1] source path: 2025-09-25T16:18:37.182756Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046578944 2025-09-25T16:18:37.182766Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046578944, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2025-09-25T16:18:37.183520Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046578944 PathId: 1, at schemeshard: 72057594046578944 2025-09-25T16:18:37.183564Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //dc-1 2025-09-25T16:18:37.183608Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046578944 2025-09-25T16:18:37.183615Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:315: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046578944 2025-09-25T16:18:37.183620Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:369: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-09-25T16:18:37.183624Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 1:0 2 -> 3 2025-09-25T16:18:37.183920Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046578944 2025-09-25T16:18:37.183928Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046578944 2025-09-25T16:18:37.183932Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 1:0 3 -> 128 2025-09-25T16:18:37.184175Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046578944 2025-09-25T16:18:37.184181Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046578944 2025-09-25T16:18:37.184188Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046578944 2025-09-25T16:18:37.184193Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1723: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-09-25T16:18:37.184735Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1792: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046578944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-09-25T16:18:37.185206Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:663: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046578944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-09-25T16:18:37.185251Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1824: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 2025-09-25T16:18:37.185502Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__notify.cpp:30: NotifyTxCompletion operation in-flight, txId: 1, at schemeshard: 72057594046578944 2025-09-25T16:18:37.185512Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 1, ready parts: 0/1, is published: true 2025-09-25T16:18:37.185517Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__notify.cpp:131: NotifyTxCompletion transaction is registered, txId: 1, at schemeshard: 72057594046578944 2025-09-25T16:18:37.420717Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:685: TTxOperationPlanStep Execute, stepId: 500, transactions count in step: 1, at schemeshard: 72057594046578944 2025-09-25T16:18:37.420782Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:689: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 AckTo { RawX1: 0 RawX2: 0 } } Step: 500 MediatorID: 72057594046382081 TabletID: 72057594046578944, at schemeshard: 72057594046578944 2025-09-25T16:18:37.420793Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:303: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046578944 2025-09-25T16:18:37.420914Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 1:0 128 -> 240 2025-09-25T16:18:37.420926Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:362: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046578944 2025-09-25T16:18:37.420970Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:599: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046578944, LocalPathId: 1] was 1 2025-09-25T16:18:37.421013Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:412: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046578944, LocalPathId: 1], at schemeshard: 72057594046578944 2025-09-25T16:18:37.421743Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046578944 2025-09-25T16:18:37.421761Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046578944, txId: 1, path id: [OwnerId: 72057594046578944, LocalPathId: 1] 2025-09-25T16:18:37.421818Z node 1 :FLAT_TX_SCHEMESHARD INFO: s ... { Items { Kind: 10 Id: 4 Generation: 1 } } } AffectedKinds: 10 RawConsoleConfig { MonitoringConfig { ForceDatabaseLabels: true DatabaseLabels { StaticSlotLabelValue: "static-again" } } Version { Items { Kind: 10 Id: 1 Generation: 1 } Items { Kind: 10 Id: 2 Generation: 1 } Items { Kind: 10 Id: 3 Generation: 1 } Items { Kind: 10 Id: 4 Generation: 1 } } } } 2025-09-25T16:18:37.635998Z node 1 :CONFIGS_DISPATCHER TRACE: configs_dispatcher.cpp:222: StateWork, processing event TEvConsole::TEvConfigSubscriptionNotification 2025-09-25T16:18:37.636034Z node 1 :CONFIGS_DISPATCHER TRACE: configs_dispatcher.cpp:1037: Sending for kinds: MonitoringConfigItem 2025-09-25T16:18:37.636054Z node 1 :CONFIGS_DISPATCHER TRACE: configs_dispatcher.cpp:362: Send TEvConsole::TEvConfigNotificationRequest to [1:406:2363]: Config { MonitoringConfig { ForceDatabaseLabels: true DatabaseLabels { StaticSlotLabelValue: "static-again" } } } ItemKinds: 10 Local: true 2025-09-25T16:18:37.636062Z node 1 :CONFIGS_DISPATCHER TRACE: configs_dispatcher.cpp:1037: Sending for kinds: MonitoringConfigItem 2025-09-25T16:18:37.636071Z node 1 :CONFIGS_DISPATCHER TRACE: configs_dispatcher.cpp:362: Send TEvConsole::TEvConfigNotificationRequest to [1:409:2361]: Config { MonitoringConfig { ForceDatabaseLabels: true DatabaseLabels { StaticSlotLabelValue: "static-again" } } } ItemKinds: 10 Local: true 2025-09-25T16:18:37.636573Z node 1 :TENANT_POOL DEBUG: tenant_pool.cpp:486: TDomainTenantPool(dc-1) Got new monitoring config: MonitoringConfig { ForceDatabaseLabels: true DatabaseLabels { StaticSlotLabelValue: "static-again" } } 2025-09-25T16:18:37.636585Z node 1 :TENANT_POOL DEBUG: tenant_pool.cpp:452: TDomainTenantPool(dc-1) static slot label modified from static to static-again 2025-09-25T16:18:37.636590Z node 1 :TENANT_POOL DEBUG: tenant_pool.cpp:274: TDomainTenantPool(dc-1) send status update to [1:406:2363] 2025-09-25T16:18:37.636626Z node 1 :CONFIGS_DISPATCHER TRACE: configs_dispatcher.cpp:216: StateWork, received event# 273286162, Sender [1:406:2363], Recipient [1:405:2362]: NKikimr::NConsole::TEvConsole::TEvConfigNotificationResponse { SubscriptionId: 0 ConfigId { } } 2025-09-25T16:18:37.636633Z node 1 :CONFIGS_DISPATCHER TRACE: configs_dispatcher.cpp:228: StateWork, processing event TEvConsole::TEvConfigNotificationResponse 2025-09-25T16:18:37.636668Z node 1 :CONFIGS_DISPATCHER TRACE: configs_dispatcher.cpp:216: StateWork, received event# 273286162, Sender [1:409:2361], Recipient [1:405:2362]: NKikimr::NConsole::TEvConsole::TEvConfigNotificationResponse { SubscriptionId: 0 ConfigId { } } 2025-09-25T16:18:37.636673Z node 1 :CONFIGS_DISPATCHER TRACE: configs_dispatcher.cpp:228: StateWork, processing event TEvConsole::TEvConfigNotificationResponse 2025-09-25T16:18:37.658365Z node 1 :CONFIGS_DISPATCHER TRACE: configs_dispatcher.cpp:216: StateWork, received event# 273285146, Sender [1:411:2362], Recipient [1:405:2362]: NKikimr::NConsole::TEvConsole::TEvConfigSubscriptionNotification { Generation: 1 Config { MonitoringConfig { ForceDatabaseLabels: true DatabaseLabels { Enabled: false StaticSlotLabelValue: "static-again" } } FeatureFlags { EnableExternalHive: false EnableColumnStatistics: false EnableScaleRecommender: true } Version { Items { Kind: 10 Id: 5 Generation: 1 } } } AffectedKinds: 10 RawConsoleConfig { MonitoringConfig { ForceDatabaseLabels: true DatabaseLabels { Enabled: false StaticSlotLabelValue: "static-again" } } Version { Items { Kind: 10 Id: 1 Generation: 1 } Items { Kind: 10 Id: 2 Generation: 1 } Items { Kind: 10 Id: 3 Generation: 1 } Items { Kind: 10 Id: 4 Generation: 1 } Items { Kind: 10 Id: 5 Generation: 1 } } } } 2025-09-25T16:18:37.658392Z node 1 :CONFIGS_DISPATCHER TRACE: configs_dispatcher.cpp:222: StateWork, processing event TEvConsole::TEvConfigSubscriptionNotification 2025-09-25T16:18:37.658429Z node 1 :CONFIGS_DISPATCHER TRACE: configs_dispatcher.cpp:1037: Sending for kinds: MonitoringConfigItem 2025-09-25T16:18:37.658447Z node 1 :CONFIGS_DISPATCHER TRACE: configs_dispatcher.cpp:362: Send TEvConsole::TEvConfigNotificationRequest to [1:406:2363]: Config { MonitoringConfig { ForceDatabaseLabels: true DatabaseLabels { Enabled: false StaticSlotLabelValue: "static-again" } } } ItemKinds: 10 Local: true 2025-09-25T16:18:37.658455Z node 1 :CONFIGS_DISPATCHER TRACE: configs_dispatcher.cpp:1037: Sending for kinds: MonitoringConfigItem 2025-09-25T16:18:37.658467Z node 1 :CONFIGS_DISPATCHER TRACE: configs_dispatcher.cpp:362: Send TEvConsole::TEvConfigNotificationRequest to [1:409:2361]: Config { MonitoringConfig { ForceDatabaseLabels: true DatabaseLabels { Enabled: false StaticSlotLabelValue: "static-again" } } } ItemKinds: 10 Local: true 2025-09-25T16:18:37.659578Z node 1 :TENANT_POOL DEBUG: tenant_pool.cpp:486: TDomainTenantPool(dc-1) Got new monitoring config: MonitoringConfig { ForceDatabaseLabels: true DatabaseLabels { Enabled: false StaticSlotLabelValue: "static-again" } } 2025-09-25T16:18:37.659609Z node 1 :CONFIGS_DISPATCHER TRACE: configs_dispatcher.cpp:216: StateWork, received event# 273286162, Sender [1:406:2363], Recipient [1:405:2362]: NKikimr::NConsole::TEvConsole::TEvConfigNotificationResponse { SubscriptionId: 0 ConfigId { } } 2025-09-25T16:18:37.659615Z node 1 :CONFIGS_DISPATCHER TRACE: configs_dispatcher.cpp:228: StateWork, processing event TEvConsole::TEvConfigNotificationResponse 2025-09-25T16:18:37.659630Z node 1 :CONFIGS_DISPATCHER TRACE: configs_dispatcher.cpp:216: StateWork, received event# 273286162, Sender [1:409:2361], Recipient [1:405:2362]: NKikimr::NConsole::TEvConsole::TEvConfigNotificationResponse { SubscriptionId: 0 ConfigId { } } 2025-09-25T16:18:37.659635Z node 1 :CONFIGS_DISPATCHER TRACE: configs_dispatcher.cpp:228: StateWork, processing event TEvConsole::TEvConfigNotificationResponse 2025-09-25T16:18:37.681423Z node 1 :CONFIGS_DISPATCHER TRACE: configs_dispatcher.cpp:216: StateWork, received event# 273285146, Sender [1:411:2362], Recipient [1:405:2362]: NKikimr::NConsole::TEvConsole::TEvConfigSubscriptionNotification { Generation: 1 Config { MonitoringConfig { ForceDatabaseLabels: true DatabaseLabels { Enabled: true StaticSlotLabelValue: "static-again" } } FeatureFlags { EnableExternalHive: false EnableColumnStatistics: false EnableScaleRecommender: true } Version { Items { Kind: 10 Id: 6 Generation: 1 } } } AffectedKinds: 10 RawConsoleConfig { MonitoringConfig { ForceDatabaseLabels: true DatabaseLabels { Enabled: true StaticSlotLabelValue: "static-again" } } Version { Items { Kind: 10 Id: 1 Generation: 1 } Items { Kind: 10 Id: 2 Generation: 1 } Items { Kind: 10 Id: 3 Generation: 1 } Items { Kind: 10 Id: 4 Generation: 1 } Items { Kind: 10 Id: 5 Generation: 1 } Items { Kind: 10 Id: 6 Generation: 1 } } } } 2025-09-25T16:18:37.681447Z node 1 :CONFIGS_DISPATCHER TRACE: configs_dispatcher.cpp:222: StateWork, processing event TEvConsole::TEvConfigSubscriptionNotification 2025-09-25T16:18:37.681486Z node 1 :CONFIGS_DISPATCHER TRACE: configs_dispatcher.cpp:1037: Sending for kinds: MonitoringConfigItem 2025-09-25T16:18:37.681504Z node 1 :CONFIGS_DISPATCHER TRACE: configs_dispatcher.cpp:362: Send TEvConsole::TEvConfigNotificationRequest to [1:406:2363]: Config { MonitoringConfig { ForceDatabaseLabels: true DatabaseLabels { Enabled: true StaticSlotLabelValue: "static-again" } } } ItemKinds: 10 Local: true 2025-09-25T16:18:37.681511Z node 1 :CONFIGS_DISPATCHER TRACE: configs_dispatcher.cpp:1037: Sending for kinds: MonitoringConfigItem 2025-09-25T16:18:37.681523Z node 1 :CONFIGS_DISPATCHER TRACE: configs_dispatcher.cpp:362: Send TEvConsole::TEvConfigNotificationRequest to [1:409:2361]: Config { MonitoringConfig { ForceDatabaseLabels: true DatabaseLabels { Enabled: true StaticSlotLabelValue: "static-again" } } } ItemKinds: 10 Local: true 2025-09-25T16:18:37.682651Z node 1 :TENANT_POOL DEBUG: tenant_pool.cpp:486: TDomainTenantPool(dc-1) Got new monitoring config: MonitoringConfig { ForceDatabaseLabels: true DatabaseLabels { Enabled: true StaticSlotLabelValue: "static-again" } } 2025-09-25T16:18:37.682683Z node 1 :CONFIGS_DISPATCHER TRACE: configs_dispatcher.cpp:216: StateWork, received event# 273286162, Sender [1:406:2363], Recipient [1:405:2362]: NKikimr::NConsole::TEvConsole::TEvConfigNotificationResponse { SubscriptionId: 0 ConfigId { } } 2025-09-25T16:18:37.682690Z node 1 :CONFIGS_DISPATCHER TRACE: configs_dispatcher.cpp:228: StateWork, processing event TEvConsole::TEvConfigNotificationResponse 2025-09-25T16:18:37.682707Z node 1 :CONFIGS_DISPATCHER TRACE: configs_dispatcher.cpp:216: StateWork, received event# 273286162, Sender [1:409:2361], Recipient [1:405:2362]: NKikimr::NConsole::TEvConsole::TEvConfigNotificationResponse { SubscriptionId: 0 ConfigId { } } 2025-09-25T16:18:37.682712Z node 1 :CONFIGS_DISPATCHER TRACE: configs_dispatcher.cpp:228: StateWork, processing event TEvConsole::TEvConfigNotificationResponse 2025-09-25T16:18:37.706027Z node 1 :CONFIGS_DISPATCHER TRACE: configs_dispatcher.cpp:216: StateWork, received event# 273285146, Sender [1:411:2362], Recipient [1:405:2362]: NKikimr::NConsole::TEvConsole::TEvConfigSubscriptionNotification { Generation: 1 Config { MonitoringConfig { ForceDatabaseLabels: false DatabaseLabels { Enabled: true StaticSlotLabelValue: "static-again" } } FeatureFlags { EnableExternalHive: false EnableColumnStatistics: false EnableScaleRecommender: true } Version { Items { Kind: 10 Id: 7 Generation: 1 } } } AffectedKinds: 10 RawConsoleConfig { MonitoringConfig { ForceDatabaseLabels: false DatabaseLabels { Enabled: true StaticSlotLabelValue: "static-again" } } Version { Items { Kind: 10 Id: 1 Generation: 1 } Items { Kind: 10 Id: 2 Generation: 1 } Items { Kind: 10 Id: 3 Generation: 1 } Items { Kind: 10 Id: 4 Generation: 1 } Items { Kind: 10 Id: 5 Generation: 1 } Items { Kind: 10 Id: 6 Generation: 1 } Items { Kind: 10 Id: 7 Generation: 1 } } } } 2025-09-25T16:18:37.706053Z node 1 :CONFIGS_DISPATCHER TRACE: configs_dispatcher.cpp:222: StateWork, processing event TEvConsole::TEvConfigSubscriptionNotification 2025-09-25T16:18:37.706089Z node 1 :CONFIGS_DISPATCHER TRACE: configs_dispatcher.cpp:1037: Sending for kinds: MonitoringConfigItem 2025-09-25T16:18:37.706103Z node 1 :CONFIGS_DISPATCHER TRACE: configs_dispatcher.cpp:362: Send TEvConsole::TEvConfigNotificationRequest to [1:406:2363]: Config { MonitoringConfig { ForceDatabaseLabels: false DatabaseLabels { Enabled: true StaticSlotLabelValue: "static-again" } } } ItemKinds: 10 Local: true 2025-09-25T16:18:37.706109Z node 1 :CONFIGS_DISPATCHER TRACE: configs_dispatcher.cpp:1037: Sending for kinds: MonitoringConfigItem 2025-09-25T16:18:37.706122Z node 1 :CONFIGS_DISPATCHER TRACE: configs_dispatcher.cpp:362: Send TEvConsole::TEvConfigNotificationRequest to [1:409:2361]: Config { MonitoringConfig { ForceDatabaseLabels: false DatabaseLabels { Enabled: true StaticSlotLabelValue: "static-again" } } } ItemKinds: 10 Local: true 2025-09-25T16:18:37.707215Z node 1 :CONFIGS_DISPATCHER TRACE: configs_dispatcher.cpp:216: StateWork, received event# 273286162, Sender [1:406:2363], Recipient [1:405:2362]: NKikimr::NConsole::TEvConsole::TEvConfigNotificationResponse { SubscriptionId: 0 ConfigId { } } 2025-09-25T16:18:37.707225Z node 1 :CONFIGS_DISPATCHER TRACE: configs_dispatcher.cpp:228: StateWork, processing event TEvConsole::TEvConfigNotificationResponse 2025-09-25T16:18:37.707250Z node 1 :TENANT_POOL DEBUG: tenant_pool.cpp:486: TDomainTenantPool(dc-1) Got new monitoring config: MonitoringConfig { ForceDatabaseLabels: false DatabaseLabels { Enabled: true StaticSlotLabelValue: "static-again" } } 2025-09-25T16:18:37.707267Z node 1 :CONFIGS_DISPATCHER TRACE: configs_dispatcher.cpp:216: StateWork, received event# 273286162, Sender [1:409:2361], Recipient [1:405:2362]: NKikimr::NConsole::TEvConsole::TEvConfigNotificationResponse { SubscriptionId: 0 ConfigId { } } 2025-09-25T16:18:37.707272Z node 1 :CONFIGS_DISPATCHER TRACE: configs_dispatcher.cpp:228: StateWork, processing event TEvConsole::TEvConfigNotificationResponse ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/mind/ut/unittest >> TNodeBrokerTest::NodesMigration1001Nodes [GOOD] Test command err: 2025-09-25T16:18:31.984506Z node 8 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:636: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 8 Deadline: 18446744073709.551615s } 2025-09-25T16:18:31.987801Z node 5 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:636: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 5 Deadline: 18446744073709.551615s } 2025-09-25T16:18:31.987893Z node 6 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:636: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 6 Deadline: 18446744073709.551615s } 2025-09-25T16:18:31.987922Z node 7 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:636: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 7 Deadline: 18446744073709.551615s } 2025-09-25T16:18:31.987962Z node 8 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:607: Handle NActors::TEvInterconnect::TEvListNodes 2025-09-25T16:18:31.991729Z node 2 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:636: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 2 Deadline: 18446744073709.551615s } 2025-09-25T16:18:31.991791Z node 3 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:636: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 3 Deadline: 18446744073709.551615s } 2025-09-25T16:18:31.991860Z node 4 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:636: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 4 Deadline: 18446744073709.551615s } 2025-09-25T16:18:31.991906Z node 6 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:607: Handle NActors::TEvInterconnect::TEvListNodes 2025-09-25T16:18:31.992006Z node 7 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:607: Handle NActors::TEvInterconnect::TEvListNodes 2025-09-25T16:18:31.992075Z node 8 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:607: Handle NActors::TEvInterconnect::TEvListNodes 2025-09-25T16:18:31.992108Z node 1 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:636: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 1 Deadline: 18446744073709.551615s } 2025-09-25T16:18:31.996756Z node 2 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:607: Handle NActors::TEvInterconnect::TEvListNodes 2025-09-25T16:18:31.996904Z node 3 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:607: Handle NActors::TEvInterconnect::TEvListNodes 2025-09-25T16:18:31.996967Z node 4 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:607: Handle NActors::TEvInterconnect::TEvListNodes 2025-09-25T16:18:31.997030Z node 5 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:607: Handle NActors::TEvInterconnect::TEvListNodes 2025-09-25T16:18:31.997107Z node 4 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:607: Handle NActors::TEvInterconnect::TEvListNodes 2025-09-25T16:18:31.997169Z node 5 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:607: Handle NActors::TEvInterconnect::TEvListNodes 2025-09-25T16:18:31.997204Z node 6 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:607: Handle NActors::TEvInterconnect::TEvListNodes 2025-09-25T16:18:31.997243Z node 7 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:607: Handle NActors::TEvInterconnect::TEvListNodes 2025-09-25T16:18:31.997360Z node 1 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:607: Handle NActors::TEvInterconnect::TEvListNodes 2025-09-25T16:18:31.997488Z node 2 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:607: Handle NActors::TEvInterconnect::TEvListNodes 2025-09-25T16:18:31.997522Z node 3 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:607: Handle NActors::TEvInterconnect::TEvListNodes 2025-09-25T16:18:31.997624Z node 8 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:607: Handle NActors::TEvInterconnect::TEvListNodes 2025-09-25T16:18:31.997653Z node 1 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:607: Handle NActors::TEvInterconnect::TEvListNodes 2025-09-25T16:18:31.998059Z node 2 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:607: Handle NActors::TEvInterconnect::TEvListNodes 2025-09-25T16:18:31.998104Z node 3 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:607: Handle NActors::TEvInterconnect::TEvListNodes 2025-09-25T16:18:31.998144Z node 4 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:607: Handle NActors::TEvInterconnect::TEvListNodes 2025-09-25T16:18:31.998174Z node 5 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:607: Handle NActors::TEvInterconnect::TEvListNodes 2025-09-25T16:18:31.998204Z node 6 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:607: Handle NActors::TEvInterconnect::TEvListNodes 2025-09-25T16:18:31.998232Z node 7 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:607: Handle NActors::TEvInterconnect::TEvListNodes 2025-09-25T16:18:31.998265Z node 8 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:636: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 1 Deadline: 18446744073709.551615s } 2025-09-25T16:18:31.998282Z node 3 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:636: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 1 Deadline: 18446744073709.551615s } 2025-09-25T16:18:31.998330Z node 4 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:636: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 1 Deadline: 18446744073709.551615s } 2025-09-25T16:18:31.998362Z node 5 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:636: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 1 Deadline: 18446744073709.551615s } 2025-09-25T16:18:31.998403Z node 6 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:636: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 1 Deadline: 18446744073709.551615s } 2025-09-25T16:18:31.998431Z node 7 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:636: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 1 Deadline: 18446744073709.551615s } 2025-09-25T16:18:31.998526Z node 1 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:607: Handle NActors::TEvInterconnect::TEvListNodes 2025-09-25T16:18:31.998611Z node 2 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:636: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 1 Deadline: 18446744073709.551615s } 2025-09-25T16:18:31.998766Z node 1 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:636: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 4 Deadline: 18446744073709.551615s } 2025-09-25T16:18:32.002067Z node 3 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:636: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 2 Deadline: 18446744073709.551615s } 2025-09-25T16:18:32.007661Z node 2 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:636: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 3 Deadline: 18446744073709.551615s } 2025-09-25T16:18:32.008163Z node 1 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:636: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 8 Deadline: 18446744073709.551615s } 2025-09-25T16:18:32.008762Z node 4 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:636: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 2 Deadline: 18446744073709.551615s } 2025-09-25T16:18:32.008802Z node 1 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:636: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 3 Deadline: 18446744073709.551615s } 2025-09-25T16:18:32.008909Z node 1 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:636: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 5 Deadline: 18446744073709.551615s } 2025-09-25T16:18:32.008970Z node 1 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:636: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 6 Deadline: 18446744073709.551615s } 2025-09-25T16:18:32.009017Z node 1 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:636: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 7 Deadline: 18446744073709.551615s } 2025-09-25T16:18:32.009075Z node 1 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:636: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 2 Deadline: 18446744073709.551615s } 2025-09-25T16:18:32.009233Z node 2 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:636: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 4 Deadline: 18446744073709.551615s } 2025-09-25T16:18:32.030539Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7729: Cannot subscribe to console configs 2025-09-25T16:18:32.030570Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded ... waiting for nameservers are connected 2025-09-25T16:18:32.037069Z node 1 :NODE_BROKER DEBUG: node_broker_impl.h:239: StateInit event type: 10060000 event: NKikimr::TEvTablet::TEvBoot 2025-09-25T16:18:32.037759Z node 1 :NODE_BROKER DEBUG: node_broker_impl.h:239: StateInit event type: 10060001 event: NKikimr::TEvTablet::TEvRestored 2025-09-25T16:18:32.037848Z node 1 :NODE_BROKER DEBUG: node_broker__init_scheme.cpp:20: TTxInitScheme Execute 2025-09-25T16:18:32.038099Z node 1 :NODE_BROKER DEBUG: node_broker_impl.h:239: StateInit event type: 1006000c event: NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-09-25T16:18:32.039454Z node 1 :NODE_BROKER DEBUG: node_broker__init_scheme.cpp:29: TTxInitScheme Complete 2025-09-25T16:18:32.039501Z node 1 :NODE_BROKER DEBUG: node_broker__load_state.cpp:19: TTxLoadState Execute 2025-09-25T16:18:32.039574Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:968: [DB] Using default config. 2025-09-25T16:18:32.039591Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:1005: [DB] Starting the first epoch: #1.1 1970-01-01T00:00:00.025000Z - 1970-01-01T01:00:00.025000Z - 1970-01-01T02:00:00.025000Z 2025-09-25T16:18:32.039597Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:1031: [DB] Loaded the first approximate epoch start: #1.1 2025-09-25T16:18:32.039616Z node 1 :NODE_BROKER DEBUG: node_broker__load_state.cpp:27: TTxLoadState Complete 2025-09-25T16:18:32.039650Z node 1 :NODE_BROKER DEBUG: node_broker__migrate_state.cpp:84: TTxMigrateState Execute 2025-09-25T16:18:32.039658Z node 1 :NODE_BROKER DEBUG: node_broker__migrate_state.cpp:52: TTxMigrateState ProcessMigrationBatch UpdateNodes left 0, NewVersionUpdateNodes left 0 2025-09-25T16:18:32.039663Z node 1 :NODE_BROKER DEBUG: node_broker__migrate_state.cpp:21: TTxMigrateState FinalizeMigration 2025-09-25T16:18:32.039669Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:1318: [DB] Update epoch in database: #1.1 1970-01-01T00:00:00.025000Z - 1970-01-01T01:00:00.025000Z - 1970-01-01T02:00:00.025000Z 2025-09-25T16:18:32.039687Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:1337: [DB] Update approx epoch start in database: #1.1 2025-09-25T16:18:32.039694Z node 1 :NODE_BROKER NOTICE: node_broker.cpp:1350: [DB] Update main nodes table to: Nodes 2025-09-25T16:18:32.072961Z node 1 :NODE_BROKER DEBUG: node_broker__migrate_state.cpp:95: TTxMigrateState Complete 2025-09-25T16:18:32.073011Z node 1 :NODE_BROKER TRACE: node_broker.cpp:456: Scheduled epoch update at 1970-01-01T01:00:00.025000Z 2025-09-25T16:18:32.073024Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:562: Preparing nodes list cache for epoch #1.1 1970-01-01T00:00:00.025000Z - 1970-01-01T01:00:00.025000Z - 1970-01-01T02:00:00.025000Z, approximate epoch start #1.1 nodes=0 expired=0 2025-09-25T16:18:32.073037Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:603: Preparing update nodes log for epoch ##1.1 1970-01-01T00:00:00.025000Z - 1970-01-01T01:00:00.025000Z - 1970-01-01T02:00:00.025000Z nodes=0 expired=0 removed=0 2025-09-25T16:18:32.073218Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:245: StateWork, received event# 269877761, Sender [1:579:2204], Recipient [1:548:2186]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-09-25T16:18:32.073344Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:245: StateWork, received event# 269877761, Sender [1:580:2205], Recipient [1:548:2186]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-09-25T16:18:32.073384Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:245: StateWork, received event# 269877761, Sender [1:581:2206], Recipient [1:548:2186]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-09-25T16:18:32.073457Z node 2 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:717: Handle NKikimr::TEvTabletPipe::TEvClientConnected { TabletId: 72057594037936129 Status: OK ServerId: [1:585:2210] Leader: 1 Dead: 0 Generation: 2 VersionInfo:  } 2025-09-25T16:18:32.073476Z node 3 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:717: Handle NKikimr::TEvTabletPipe::TEvClientConnected { TabletId: 72057594037936129 Status: OK ServerId: [1:579:2204] Leader: 1 Dead: 0 Generation: 2 VersionInfo:  } 2025-09-25T16:18:32.073489Z node 4 :NAMESERVICE DEBUG: dynamic_nameserver.cp ... 25-09-25T16:18:34.489493Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:659: Add node #1548.v505 to update nodes log 2025-09-25T16:18:34.489500Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:659: Add node #1547.v505 to update nodes log 2025-09-25T16:18:34.489508Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:659: Add node #1546.v505 to update nodes log 2025-09-25T16:18:34.489516Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:659: Add node #1545.v505 to update nodes log 2025-09-25T16:18:34.489524Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:659: Add node #1544.v505 to update nodes log 2025-09-25T16:18:34.489533Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:659: Add node #1543.v505 to update nodes log 2025-09-25T16:18:34.489541Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:659: Add node #1542.v505 to update nodes log 2025-09-25T16:18:34.489547Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:659: Add node #1541.v505 to update nodes log 2025-09-25T16:18:34.489554Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:659: Add node #1540.v505 to update nodes log 2025-09-25T16:18:34.489562Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:659: Add node #1536.v505 to update nodes log 2025-09-25T16:18:34.489570Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:659: Add node #1787.v505 to update nodes log 2025-09-25T16:18:34.489883Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:245: StateWork, received event# 269877761, Sender [1:2821:3848], Recipient [1:2762:3803]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-09-25T16:18:34.489937Z node 1 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:717: Handle NKikimr::TEvTabletPipe::TEvClientConnected { TabletId: 72057594037936129 Status: OK ServerId: [1:2821:3848] Leader: 1 Dead: 0 Generation: 3 VersionInfo:  } 2025-09-25T16:18:34.490094Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:245: StateWork, received event# 272039936, Sender [1:25:2072], Recipient [1:2762:3803]: NKikimr::NNodeBroker::TEvNodeBroker::TEvListNodes { MinEpoch: 2 } 2025-09-25T16:18:34.490104Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:249: StateWork, processing event TEvNodeBroker::TEvListNodes 2025-09-25T16:18:34.490118Z node 1 :NODE_BROKER TRACE: node_broker.cpp:423: Send TEvNodesInfo for epoch #3.505 1970-01-01T02:00:00.025000Z - 1970-01-01T03:00:00.025000Z - 1970-01-01T04:00:00.025000Z 2025-09-25T16:18:34.493834Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:245: StateWork, received event# 269877761, Sender [1:2822:3849], Recipient [1:2762:3803]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-09-25T16:18:34.493909Z node 6 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:717: Handle NKikimr::TEvTabletPipe::TEvClientConnected { TabletId: 72057594037936129 Status: OK ServerId: [1:2827:3854] Leader: 1 Dead: 0 Generation: 3 VersionInfo:  } 2025-09-25T16:18:34.493930Z node 7 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:717: Handle NKikimr::TEvTabletPipe::TEvClientConnected { TabletId: 72057594037936129 Status: OK ServerId: [1:2828:3855] Leader: 1 Dead: 0 Generation: 3 VersionInfo:  } 2025-09-25T16:18:34.493945Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:245: StateWork, received event# 269877761, Sender [1:2823:3850], Recipient [1:2762:3803]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-09-25T16:18:34.493953Z node 8 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:717: Handle NKikimr::TEvTabletPipe::TEvClientConnected { TabletId: 72057594037936129 Status: OK ServerId: [1:2823:3850] Leader: 1 Dead: 0 Generation: 3 VersionInfo:  } 2025-09-25T16:18:34.493967Z node 2 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:717: Handle NKikimr::TEvTabletPipe::TEvClientConnected { TabletId: 72057594037936129 Status: OK ServerId: [1:2822:3849] Leader: 1 Dead: 0 Generation: 3 VersionInfo:  } 2025-09-25T16:18:34.493979Z node 3 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:717: Handle NKikimr::TEvTabletPipe::TEvClientConnected { TabletId: 72057594037936129 Status: OK ServerId: [1:2824:3851] Leader: 1 Dead: 0 Generation: 3 VersionInfo:  } 2025-09-25T16:18:34.493992Z node 4 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:717: Handle NKikimr::TEvTabletPipe::TEvClientConnected { TabletId: 72057594037936129 Status: OK ServerId: [1:2825:3852] Leader: 1 Dead: 0 Generation: 3 VersionInfo:  } 2025-09-25T16:18:34.494005Z node 5 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:717: Handle NKikimr::TEvTabletPipe::TEvClientConnected { TabletId: 72057594037936129 Status: OK ServerId: [1:2826:3853] Leader: 1 Dead: 0 Generation: 3 VersionInfo:  } 2025-09-25T16:18:34.494056Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:245: StateWork, received event# 269877761, Sender [1:2824:3851], Recipient [1:2762:3803]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-09-25T16:18:34.494094Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:245: StateWork, received event# 269877761, Sender [1:2825:3852], Recipient [1:2762:3803]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-09-25T16:18:34.494110Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:245: StateWork, received event# 269877761, Sender [1:2826:3853], Recipient [1:2762:3803]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-09-25T16:18:34.494208Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:245: StateWork, received event# 269877761, Sender [1:2827:3854], Recipient [1:2762:3803]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-09-25T16:18:34.494221Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:245: StateWork, received event# 269877761, Sender [1:2828:3855], Recipient [1:2762:3803]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-09-25T16:18:34.494255Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:245: StateWork, received event# 272039936, Sender [8:228:2072], Recipient [1:2823:3850] 2025-09-25T16:18:34.494262Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:249: StateWork, processing event TEvNodeBroker::TEvListNodes 2025-09-25T16:18:34.494278Z node 1 :NODE_BROKER TRACE: node_broker.cpp:423: Send TEvNodesInfo for epoch #3.505 1970-01-01T02:00:00.025000Z - 1970-01-01T03:00:00.025000Z - 1970-01-01T04:00:00.025000Z 2025-09-25T16:18:34.494295Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:245: StateWork, received event# 272039936, Sender [6:170:2072], Recipient [1:2827:3854] 2025-09-25T16:18:34.494299Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:249: StateWork, processing event TEvNodeBroker::TEvListNodes 2025-09-25T16:18:34.494305Z node 1 :NODE_BROKER TRACE: node_broker.cpp:423: Send TEvNodesInfo for epoch #3.505 1970-01-01T02:00:00.025000Z - 1970-01-01T03:00:00.025000Z - 1970-01-01T04:00:00.025000Z 2025-09-25T16:18:34.494320Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:245: StateWork, received event# 272039936, Sender [7:199:2072], Recipient [1:2828:3855] 2025-09-25T16:18:34.494324Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:249: StateWork, processing event TEvNodeBroker::TEvListNodes 2025-09-25T16:18:34.494331Z node 1 :NODE_BROKER TRACE: node_broker.cpp:423: Send TEvNodesInfo for epoch #3.505 1970-01-01T02:00:00.025000Z - 1970-01-01T03:00:00.025000Z - 1970-01-01T04:00:00.025000Z 2025-09-25T16:18:34.500237Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:245: StateWork, received event# 272039936, Sender [2:54:2072], Recipient [1:2822:3849] 2025-09-25T16:18:34.500268Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:249: StateWork, processing event TEvNodeBroker::TEvListNodes 2025-09-25T16:18:34.500294Z node 1 :NODE_BROKER TRACE: node_broker.cpp:423: Send TEvNodesInfo for epoch #3.505 1970-01-01T02:00:00.025000Z - 1970-01-01T03:00:00.025000Z - 1970-01-01T04:00:00.025000Z 2025-09-25T16:18:34.506091Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:245: StateWork, received event# 272039936, Sender [3:83:2072], Recipient [1:2824:3851] 2025-09-25T16:18:34.506127Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:249: StateWork, processing event TEvNodeBroker::TEvListNodes 2025-09-25T16:18:34.506150Z node 1 :NODE_BROKER TRACE: node_broker.cpp:423: Send TEvNodesInfo for epoch #3.505 1970-01-01T02:00:00.025000Z - 1970-01-01T03:00:00.025000Z - 1970-01-01T04:00:00.025000Z 2025-09-25T16:18:34.506214Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:245: StateWork, received event# 272039936, Sender [4:112:2072], Recipient [1:2825:3852] 2025-09-25T16:18:34.506220Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:249: StateWork, processing event TEvNodeBroker::TEvListNodes 2025-09-25T16:18:34.506229Z node 1 :NODE_BROKER TRACE: node_broker.cpp:423: Send TEvNodesInfo for epoch #3.505 1970-01-01T02:00:00.025000Z - 1970-01-01T03:00:00.025000Z - 1970-01-01T04:00:00.025000Z 2025-09-25T16:18:34.511978Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:245: StateWork, received event# 272039936, Sender [5:141:2072], Recipient [1:2826:3853] 2025-09-25T16:18:34.512011Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:249: StateWork, processing event TEvNodeBroker::TEvListNodes 2025-09-25T16:18:34.512031Z node 1 :NODE_BROKER TRACE: node_broker.cpp:423: Send TEvNodesInfo for epoch #3.505 1970-01-01T02:00:00.025000Z - 1970-01-01T03:00:00.025000Z - 1970-01-01T04:00:00.025000Z 2025-09-25T16:18:34.515141Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:245: StateWork, received event# 269877761, Sender [1:2836:3863], Recipient [1:2762:3803]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-09-25T16:18:34.515203Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:245: StateWork, received event# 272039936, Sender [1:617:2216], Recipient [1:2762:3803]: NKikimr::NNodeBroker::TEvNodeBroker::TEvListNodes { } 2025-09-25T16:18:34.515213Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:249: StateWork, processing event TEvNodeBroker::TEvListNodes 2025-09-25T16:18:34.515225Z node 1 :NODE_BROKER TRACE: node_broker.cpp:423: Send TEvNodesInfo for epoch #3.505 1970-01-01T02:00:00.025000Z - 1970-01-01T03:00:00.025000Z - 1970-01-01T04:00:00.025000Z 2025-09-25T16:18:34.517688Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:245: StateWork, received event# 269877761, Sender [1:2838:3865], Recipient [1:2762:3803]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-09-25T16:18:34.517740Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:245: StateWork, received event# 272039936, Sender [1:617:2216], Recipient [1:2762:3803]: NKikimr::NNodeBroker::TEvNodeBroker::TEvListNodes { } 2025-09-25T16:18:34.517748Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:249: StateWork, processing event TEvNodeBroker::TEvListNodes 2025-09-25T16:18:34.517758Z node 1 :NODE_BROKER TRACE: node_broker.cpp:423: Send TEvNodesInfo for epoch #3.505 1970-01-01T02:00:00.025000Z - 1970-01-01T03:00:00.025000Z - 1970-01-01T04:00:00.025000Z 2025-09-25T16:18:34.522157Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:245: StateWork, received event# 269877761, Sender [1:2840:3867], Recipient [1:2762:3803]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-09-25T16:18:34.522228Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:245: StateWork, received event# 272039936, Sender [1:617:2216], Recipient [1:2762:3803]: NKikimr::NNodeBroker::TEvNodeBroker::TEvListNodes { } 2025-09-25T16:18:34.522237Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:249: StateWork, processing event TEvNodeBroker::TEvListNodes 2025-09-25T16:18:34.522253Z node 1 :NODE_BROKER TRACE: node_broker.cpp:423: Send TEvNodesInfo for epoch #3.505 1970-01-01T02:00:00.025000Z - 1970-01-01T03:00:00.025000Z - 1970-01-01T04:00:00.025000Z 2025-09-25T16:18:34.524444Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:245: StateWork, received event# 269877761, Sender [1:2842:3869], Recipient [1:2762:3803]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-09-25T16:18:34.524507Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:245: StateWork, received event# 272039936, Sender [1:617:2216], Recipient [1:2762:3803]: NKikimr::NNodeBroker::TEvNodeBroker::TEvListNodes { CachedVersion: 504 } 2025-09-25T16:18:34.524513Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:249: StateWork, processing event TEvNodeBroker::TEvListNodes 2025-09-25T16:18:34.524526Z node 1 :NODE_BROKER TRACE: node_broker.cpp:423: Send TEvNodesInfo for epoch #3.505 1970-01-01T02:00:00.025000Z - 1970-01-01T03:00:00.025000Z - 1970-01-01T04:00:00.025000Z |81.5%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/tx_allocator_client/ut/unittest ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/mind/ut/unittest >> TTenantPoolTests::TestForcedSensorLabelsForStaticConfig [GOOD] Test command err: 2025-09-25T16:18:37.017590Z node 3 :TX_PROXY DEBUG: proxy_impl.cpp:433: actor# [3:255:2106] Bootstrap 2025-09-25T16:18:37.017809Z node 3 :TX_PROXY DEBUG: proxy_impl.cpp:452: actor# [3:255:2106] Become StateWork (SchemeCache [3:264:2109]) 2025-09-25T16:18:37.017870Z node 2 :TX_PROXY DEBUG: proxy_impl.cpp:433: actor# [2:254:2106] Bootstrap 2025-09-25T16:18:37.018059Z node 2 :TX_PROXY DEBUG: proxy_impl.cpp:452: actor# [2:254:2106] Become StateWork (SchemeCache [2:267:2109]) 2025-09-25T16:18:37.018106Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:433: actor# [1:253:2157] Bootstrap 2025-09-25T16:18:37.018267Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:452: actor# [1:253:2157] Become StateWork (SchemeCache [1:270:2163]) 2025-09-25T16:18:37.022903Z node 1 :BS_CONTROLLER DEBUG: {BSC05@impl.h:2083} StateInit event Type# 268828672 Event# NKikimr::TEvTablet::TEvBoot 2025-09-25T16:18:37.026479Z node 1 :BS_CONTROLLER DEBUG: {BSC05@impl.h:2083} StateInit event Type# 268828673 Event# NKikimr::TEvTablet::TEvRestored 2025-09-25T16:18:37.026569Z node 1 :BS_CONTROLLER DEBUG: {BSC22@console_interaction.cpp:14} Console interaction started 2025-09-25T16:18:37.026963Z node 1 :BS_CONTROLLER DEBUG: {BSC05@impl.h:2083} StateInit event Type# 268828684 Event# NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-09-25T16:18:37.027264Z node 1 :BS_CONTROLLER DEBUG: {BSC05@impl.h:2083} StateInit event Type# 268639244 Event# NKikimr::TEvNodeWardenStorageConfig 2025-09-25T16:18:37.027756Z node 1 :BS_CONTROLLER DEBUG: {BSC05@impl.h:2083} StateInit event Type# 131082 Event# NActors::TEvInterconnect::TEvNodesInfo 2025-09-25T16:18:37.027767Z node 1 :BS_CONTROLLER DEBUG: {BSC01@bsc.cpp:820} Handle TEvInterconnect::TEvNodesInfo 2025-09-25T16:18:37.027867Z node 1 :BS_CONTROLLER DEBUG: {BSCTXIS01@init_scheme.cpp:17} TTxInitScheme Execute 2025-09-25T16:18:37.031773Z node 1 :BS_CONTROLLER DEBUG: {BSCTXIS03@init_scheme.cpp:44} TTxInitScheme Complete 2025-09-25T16:18:37.031830Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM01@migrate.cpp:190} Execute tx 2025-09-25T16:18:37.031865Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM02@migrate.cpp:251} Complete tx IncompatibleData# false 2025-09-25T16:18:37.031893Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxTrimUnusedSlots 2025-09-25T16:18:37.031909Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxTrimUnusedSlots 2025-09-25T16:18:37.031951Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateSchemaVersion 2025-09-25T16:18:37.077712Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:212: actor# [1:253:2157] HANDLE TEvClientConnected success connect from tablet# 72057594046447617 2025-09-25T16:18:37.078575Z node 1 :TX_PROXY DEBUG: client.cpp:89: Handle TEvAllocateResult ACCEPTED RangeBegin# 281474976710656 RangeEnd# 281474976715656 txAllocator# 72057594046447617 2025-09-25T16:18:37.078603Z node 2 :TX_PROXY DEBUG: proxy_impl.cpp:212: actor# [2:254:2106] HANDLE TEvClientConnected success connect from tablet# 72057594046447617 2025-09-25T16:18:37.078741Z node 3 :TX_PROXY DEBUG: proxy_impl.cpp:212: actor# [3:255:2106] HANDLE TEvClientConnected success connect from tablet# 72057594046447617 2025-09-25T16:18:37.079445Z node 2 :TX_PROXY DEBUG: client.cpp:89: Handle TEvAllocateResult ACCEPTED RangeBegin# 281474976715656 RangeEnd# 281474976720656 txAllocator# 72057594046447617 2025-09-25T16:18:37.079508Z node 3 :TX_PROXY DEBUG: client.cpp:89: Handle TEvAllocateResult ACCEPTED RangeBegin# 281474976720656 RangeEnd# 281474976725656 txAllocator# 72057594046447617 2025-09-25T16:18:37.121100Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateSchemaVersion 2025-09-25T16:18:37.121143Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxGenerateInstanceId 2025-09-25T16:18:37.131804Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxGenerateInstanceId 2025-09-25T16:18:37.131843Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateStaticPDiskInfo 2025-09-25T16:18:37.131856Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateStaticPDiskInfo 2025-09-25T16:18:37.131871Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxFillInNonNullConfigForPDisk 2025-09-25T16:18:37.131898Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxFillInNonNullConfigForPDisk 2025-09-25T16:18:37.131908Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxDropDriveStatus 2025-09-25T16:18:37.131926Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxDropDriveStatus 2025-09-25T16:18:37.131936Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateCompatibilityInfo 2025-09-25T16:18:37.142685Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateCompatibilityInfo 2025-09-25T16:18:37.142733Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateEnableConfigV2 2025-09-25T16:18:37.153748Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateEnableConfigV2 2025-09-25T16:18:37.153813Z node 1 :BS_CONTROLLER DEBUG: {BSCTXLE01@load_everything.cpp:21} TTxLoadEverything Execute 2025-09-25T16:18:37.154036Z node 1 :BS_CONTROLLER DEBUG: {BSCTXLE03@load_everything.cpp:705} TTxLoadEverything Complete 2025-09-25T16:18:37.154042Z node 1 :BS_CONTROLLER DEBUG: {BSC09@impl.h:2214} LoadFinished 2025-09-25T16:18:37.155989Z node 1 :BS_CONTROLLER DEBUG: {BSC18@console_interaction.cpp:31} Console connection service started 2025-09-25T16:18:37.156006Z node 1 :BS_CONTROLLER DEBUG: {BSCTXLE04@load_everything.cpp:710} TTxLoadEverything InitQueue processed 2025-09-25T16:18:37.156515Z node 1 :BS_CONTROLLER DEBUG: {BSCTXRN01@register_node.cpp:216} Handle TEvControllerRegisterNode Request# {NodeID: 1 VDiskStatus { VDiskId { GroupID: 0 GroupGeneration: 1 Ring: 0 Domain: 0 VDisk: 0 } NodeId: 1 PDiskId: 1 VSlotId: 0 PDiskGuid: 123 Status: INIT_PENDING OnlyPhantomsRemain: false } DeclarativePDiskManagement: true } 2025-09-25T16:18:37.156541Z node 1 :BS_CONTROLLER DEBUG: {BSCBR00@bridge.cpp:242} ApplySyncerState NodeId# 1 Update# {} Comprehensive# true 2025-09-25T16:18:37.156606Z node 1 :BS_CONTROLLER DEBUG: {BSCTXRN01@register_node.cpp:216} Handle TEvControllerRegisterNode Request# {NodeID: 3 DeclarativePDiskManagement: true } 2025-09-25T16:18:37.156610Z node 1 :BS_CONTROLLER DEBUG: {BSCBR00@bridge.cpp:242} ApplySyncerState NodeId# 3 Update# {} Comprehensive# true 2025-09-25T16:18:37.156628Z node 1 :BS_CONTROLLER DEBUG: {BSCTXRN01@register_node.cpp:216} Handle TEvControllerRegisterNode Request# {NodeID: 2 DeclarativePDiskManagement: true } 2025-09-25T16:18:37.156634Z node 1 :BS_CONTROLLER DEBUG: {BSCBR00@bridge.cpp:242} ApplySyncerState NodeId# 2 Update# {} Comprehensive# true 2025-09-25T16:18:37.156871Z node 1 :BS_CONTROLLER DEBUG: {BSCTXCC01@config_cmd.cpp:410} Execute TEvControllerConfigRequest Request# {Command { DefineHostConfig { HostConfigId: 1 Drive { Path: "/home/runner/.ya/build/build_root/endf/004768/r3tmp/tmp7EA6PB/pdisk_1.dat" } } } Command { DefineBox { BoxId: 1 Host { Key { Fqdn: "::1" IcPort: 12001 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12002 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12003 } HostConfigId: 1 } } } } 2025-09-25T16:18:37.156959Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 1:1 Path# /home/runner/.ya/build/build_root/endf/004768/r3tmp/tmp7EA6PB/pdisk_1.dat 2025-09-25T16:18:37.156967Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 2:1000 Path# /home/runner/.ya/build/build_root/endf/004768/r3tmp/tmp7EA6PB/pdisk_1.dat 2025-09-25T16:18:37.156972Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 3:1000 Path# /home/runner/.ya/build/build_root/endf/004768/r3tmp/tmp7EA6PB/pdisk_1.dat 2025-09-25T16:18:37.157214Z node 1 :BS_CONTROLLER DEBUG: {BSCTXUDM01@disk_metrics.cpp:68} Updating disk status Record# {VDisksMetrics { VDiskId { GroupID: 0 GroupGeneration: 1 Ring: 0 Domain: 0 VDisk: 0 } VSlotId { NodeId: 1 PDiskId: 1 VSlotId: 0 } State: Initial Replicated: false DiskSpace: Green } } 2025-09-25T16:18:37.157236Z node 1 :BS_CONTROLLER DEBUG: {BSC10@scrub.cpp:187} Handle(TEvControllerScrubQueryStartQuantum) Msg# {VSlotId { NodeId: 1 PDiskId: 1 VSlotId: 0 } } 2025-09-25T16:18:37.157251Z node 1 :BS_CONTROLLER DEBUG: {BSC13@scrub.cpp:597} sending TEvControllerScrubStartQuantum Msg# NKikimrBlobStorage.TEvControllerScrubStartQuantum VSlotId { NodeId: 1 PDiskId: 1 VSlotId: 0 } 2025-09-25T16:18:37.157391Z node 1 :BS_CONTROLLER DEBUG: {BSCTXUDM01@disk_metrics.cpp:68} Updating disk status Record# {VDiskStatus { VDiskId { GroupID: 0 GroupGeneration: 1 Ring: 0 Domain: 0 VDisk: 0 } NodeId: 1 PDiskId: 1 VSlotId: 0 PDiskGuid: 123 Status: REPLICATING OnlyPhantomsRemain: false } } 2025-09-25T16:18:37.157407Z node 1 :BS_CONTROLLER DEBUG: {BSCTXUDM01@disk_metrics.cpp:68} Updating disk status Record# {VDiskStatus { VDiskId { GroupID: 0 GroupGeneration: 1 Ring: 0 Domain: 0 VDisk: 0 } NodeId: 1 PDiskId: 1 VSlotId: 0 PDiskGuid: 123 Status: READY OnlyPhantomsRemain: false } } 2025-09-25T16:18:37.157725Z node 1 :BS_CONTROLLER DEBUG: {BSC11@scrub.cpp:214} Handle(TEvControllerScrubQuantumFinished) Msg# {VSlotId { NodeId: 1 PDiskId: 1 VSlotId: 0 } Success: true } 2025-09-25T16:18:37.157781Z node 1 :BS_CONTROLLER DEBUG: {BSC10@scrub.cpp:187} Handle(TEvControllerScrubQueryStartQuantum) Msg# {VSlotId { NodeId: 1 PDiskId: 1 VSlotId: 0 } } 2025-09-25T16:18:37.168950Z node 1 :BS_CONTROLLER DEBUG: {BSCTXRN05@register_node.cpp:34} Add devicesData from NodeWarden NodeId# 1 Devices# [] 2025-09-25T16:18:37.169016Z node 1 :BS_CONTROLLER DEBUG: {BSCTXRN05@register_node.cpp:34} Add devicesData from NodeWarden NodeId# 3 Devices# [] 2025-09-25T16:18:37.169094Z node 1 :BS_CONTROLLER DEBUG: {BSCTXRN05@register_node.cpp:34} Add devicesData from NodeWarden NodeId# 2 Devices# [] 2025-09-25T16:18:37.171815Z node 2 :BS_PDISK WARN: {BPD01@blobstorage_pdisk_blockdevice_async.cpp:924} Warning# got EIoResult::FileOpenError from IoContext->Setup PDiskId# 1000 2025-09-25T16:18:37.172016Z node 2 :BS_PDISK CRIT: {BPD39@blobstorage_pdisk_impl.cpp:2935} BlockDevice initialization error Details# Can't open file "/home/runner/.ya/build/build_root/endf/004768/r3tmp/tmp7EA6PB/pdisk_1.dat": unknown reason, errno# 0. PDiskId# 1000 2025-09-25T16:18:37.172203Z node 2 :BS_PDISK CRIT: {BPD01@blobstorage_pdisk_actor.cpp:300} PDiskId# 1000 bootstrapped to the StateError, reason# Can't open file "/home/runner/.ya/build/build_root/endf/004768/r3tmp/tmp7EA6PB/pdisk_1.dat": unknown reason, errno# 0. Can not be initialized Config: {TPDiskConfig Path# "/home/runner/.ya/build/build_root/endf/004768/r3tmp/tmp7EA6PB/pdisk_1.dat" ExpectedPath# "" ExpectedSerial# "" PDiskGuid# 16755123493467873891 PDiskId# 1000 PDiskCategory# {Type# DEVICE_TYPE_ROT Kind# 0} MetadataOnly# 0 StartOwnerRound# 2 SectorMap# true EnableSectorEncryption # 0 ChunkSize# 134217728 SectorSize# 4096 StatisticsUpdateIntervalMs# 1000 SchedulerCfg# {TPDiskSchedulerConfig BytesSchedulerWeight# 1 LogWeight# 1 FreshWeight# 2 CompWeight# 7 SyncLogWeight# 15 HugeWeight# 2 FastReadWeight# 1 OtherReadWeight# 1 LoadWeight# 2 LowReadWeight# 1 MaxChunkReadsPerCycle# 16 MaxChunkReadsDurationPerCycleMs# 0.25 MaxChunkWritesPerCycle# 8 MaxChunkWritesDurationPerCycleMs# 1} MinLogChunksTotal# 4 MaxLogChunksPerOwnerMultiplier# 5 MaxLogChunksPerOwnerDivisor# 4 SortFreeChunksPerItems# 100 GetDriveDataSwitch# DoNotTouch WriteCacheSwitch# DoNotTouch DriveModelSeekTimeNs# 8000000 DriveModelSpeedBps# 127000000 DriveModelSpeedBpsMin# 135000000 DriveModelSpeedBpsMax# 200000000 DriveModelBulkWrieBlockSize# 2097152 DriveModelTrimSpeedBps# 0 ReorderingMs# 50 DeviceInFlight# 4 CostLimitNs# 50000000 BufferPoolBufferSizeBytes# 524288 BufferPoolBufferCount# 256 MaxQueuedCompletionActions# 128 ExpectedSlotCount# 0 SlotSizeInUnits# 0 ReserveLogChunksMultiplier# 56 InsaneLogChunksMultiplier# 40 RedLogChunksMultiplier# 30 OrangeLogChunksMultiplier# 20 WarningLogChunksMultiplier# 4 YellowLogChunksMultiplier# 4 MaxMetadataMegabytes# 32 SpaceColorBorder# GREEN CompletionThreadsCount# 1 UseNoopScheduler# false PlainDataChunks# 0 SeparateHugePriorities# 0} PDiskId# 1000 2025-09-25T16:18:37.174616Z node 3 :BS_PDISK WARN: {BPD01@blobstorage_pdisk_blockdevice_async.cpp:924} Warning# got EIoResult::FileOpenError from IoContext->Setup PDiskId# 1000 2025-09-25T16:18:37.174844Z node 3 :BS_PDISK CRIT: {BPD39@blobstorage_pdisk_impl.cpp:2935} BlockDevice initialization error Details# Can't open file "/home/runner/.ya/build/build_root/endf/004768/r3tmp/tmp7EA6PB/pdisk_1.dat": unknown reason, errno# 0. PDiskId# 1000 2025-09-25T16:18:37.174899Z node 3 :BS_PDISK CRIT: {BPD01@blobstorage_pdisk_actor.cpp:300} PDiskId# 1000 bootstrapped to the StateError, reason# Can't open file "/home/runner/.ya/build/build_root/endf/004768/r3tmp/tmp7EA6PB/pdisk_1.dat": unknown reason, errno# 0. Can not be initialized Config: {TPDiskConfig Path# "/home/runner/.ya/build/build_root/endf/004768/r3tmp/tmp7EA6PB/pdisk_1.dat" ExpectedPath# "" ExpectedSerial# "" PDiskGuid# 174059153247470993 PDiskId# 1000 PDiskCategory# {Type# DEVICE_TYPE_ROT Kind# 0} MetadataOnly# 0 StartOwnerRound# 2 SectorMap# true EnableSectorEncryption # 0 ChunkSize# 134217728 SectorSize# 4096 StatisticsUpdateIntervalMs# 1000 SchedulerCfg# {TPDiskSchedulerConfig BytesSchedulerWeight# 1 LogWeight# 1 FreshWeight# 2 CompWeight# 7 SyncLogWeight# 15 HugeWeight# 2 FastReadWeight# 1 OtherReadWeight# 1 LoadWeight# 2 LowReadWeight# 1 MaxChunkReadsPerCycle# 16 MaxChunkReadsDurationPerCycleMs# 0.25 MaxChunkWritesPerCycle# 8 MaxChunkWritesDurationPerCycleMs# 1} MinLogChunksTotal# 4 MaxLogChunksPerOwnerMultiplier# 5 MaxLogChunksPerOwnerDivisor# 4 SortFreeChunksPerItems# 100 GetDriveDataSwitch# DoNotTouch WriteCacheSwitch# DoNotTouch DriveModelSeekTimeNs# 8000000 DriveModelSpeedBps# 127000000 DriveModelSpeedBpsMin# 135000000 DriveModelSpeedBpsMax# 200000000 DriveModelBulkWrieBlockSize# 2097152 DriveModelTrimSpeedBps# 0 ReorderingMs# 50 DeviceInFlight# 4 CostLimitNs# 50000000 BufferPoolBufferSizeBytes# 524288 BufferPoolBufferCount# 256 MaxQueuedCompletionActions# 128 ExpectedSlotCount# 0 SlotSizeInUnits# 0 ReserveLogChunksMultiplier# 56 InsaneLogChunksMultiplier# 40 RedLogChunksMultiplier# 30 OrangeLogChunksMultiplier# 20 WarningLogChunksMultiplier# 4 YellowLogChunksMultiplier# 4 MaxMetadataMegabytes# 32 SpaceColorBorder# GREEN CompletionThreadsCount# 1 UseNoopScheduler# false PlainDataChunks# 0 SeparateHugePriorities# 0} PDiskId# 1000 2025-09-25T16:18:37.175105Z node 3 :TENANT_POOL DEBUG: tenant_pool.cpp:826: TTenantPool::Bootstrap 2025-09-25T16:18:37.175165Z node 3 :LOCAL DEBUG: local.cpp:1540: TLocal::Bootstrap 2025-09-25T16:18:37.175172Z node 1 :TENANT_POOL DEBUG: tenant_pool.cpp:826: TTenantPool::Bootstrap 2025-09-25T16:18:37.175212Z node 1 :LOCAL DEBUG: local.cpp:1540: TLocal::Bootstrap 2025-09-25T16:18:37.175218Z node 2 :TENANT_POOL DEBUG: tenant_pool.cpp:826: TTenantPool::Bootstrap 2025-09-25T16:18:37.175254Z node 2 :LOCAL DEBUG: local.cpp:1540: TLocal::Bootstrap 2025-09-25T16:18:37.175266Z node 2 :TENANT_POOL DEBUG: tenant_pool.cpp:412: TDomainTenantPool(dc-1) Bootstrap 2025-09-25T16:18:37.177013Z node 2 :TENANT_POOL DEBUG: tenant_pool.cpp:286: TDomainTenantPool(dc-1) send request to add tenant /dc-1/users/tenant-1 with resources CPU: 1 Memory: 1 Network: 1 2025-09-25T16:18:37.177073Z node 2 :LOCAL DEBUG: local.cpp:1490: TDomainLocal(dc-1): Bootstrap 2025-09-25T16:18:37.177084Z node 3 :TENANT_POOL DEBUG: tenant_pool.cpp:412: TDomainTenantPool(dc-1) Bootstrap 2025-09-25T16:18:37.177099Z node 3 :TENANT_POOL DEBUG: tenant_pool.cpp:286: TDomainTenantPool(dc-1) send request to add tenant /dc-1/users/tenant-2 with resources CPU: 1 Memory: 1 Network: 1 2025-09-25T16:18:37.177112Z node 3 :LOCAL DEBUG: local.cpp:1490: TDomainLocal(dc-1): Bootstrap 2025-09-25T16:18:37.177132Z node 1 :TENANT_POOL DEBUG: tenant_pool.cpp:412: TDomainTenantPool(dc-1) Bootstrap 2025-09-25T16:18:37.177142Z node 1 :TENANT_POOL DEBUG: tenant_pool.cpp:286: TDomainTenantPool(dc-1) send request to add tenant /dc-1 with resources CPU: 1 Memory: 1 Network: 1 2025-09-25T16:18:37.177155Z node 1 :LOCAL DEBUG: local.cpp:1490: TDomainLocal(dc-1): Bootstrap 2025-09-25T16:18:37.177254Z node 1 :LOCAL DEBUG: local.cpp:1198: TDomainLocal(dc-1): Binding to hive 72057594046578946 at domain dc-1 (allocated resources: CPU: 1 Memory: 1 Network: 1) 2025-09-25T16:18:37.177266Z node 1 :LOCAL DEBUG: local.cpp:1005: TLocalNodeRegistrar::Bootstrap 2025-09-25T16:18:37.177271Z node 1 :LOCAL DEBUG: local.cpp:183: TLocalNodeRegistrar::TryToRegister 2025-09-25T16:18:37.177287Z node 1 :LOCAL DEBUG: local.cpp:216: TLocalNodeRegistrar::TryToRegister pipe to hive, pipe:[1:520:2322] 2025-09-25T16:18:37.179512Z node 1 :TENANT_POOL NOTICE: tenant_pool.cpp:526: TDomainTenantPool(dc-1) started tenant /dc-1 2025-09-25T16:18:37.179528Z node 1 :TENANT_POOL DEBUG: tenant_pool.cpp:274: TDomainTenantPool(dc-1) send status update to [1:504:2318] 2025-09-25T16:18:37.179766Z node 1 :LOCAL DEBUG: local.cpp:263: TEvTabletPipe::TEvClientConnected {TabletId=72057594046578946 Status=OK ClientId=[1:520:2322]} 2025-09-25T16:18:37.179783Z node 1 :LOCAL DEBUG: local.cpp:327: TLocalNodeRegistrar::Handle TEvLocal::TEvPing 2025-09-25T16:18:37.179791Z node 1 :LOCAL DEBUG: local.cpp:383: TLocalNodeRegistrar TEvPing - CONNECTED 2025-09-25T16:18:37.179795Z node 1 :LOCAL DEBUG: local.cpp:300: TLocalNodeRegistrar SendStatusOk 2025-09-25T16:18:37.180026Z node 2 :LOCAL DEBUG: local.cpp:1256: TDomainLocal(dc-1): TDomainLocal::TEvClientConnected for dc-1 shard 72057594046578944 2025-09-25T16:18:37.180034Z node 2 :LOCAL DEBUG: local.cpp:1115: TDomainLocal(dc-1): Send resolve request for /dc-1/users/tenant-1 to schemeshard 72057594046578944 2025-09-25T16:18:37.180074Z node 3 :LOCAL DEBUG: local.cpp:1256: TDomainLocal(dc-1): TDomainLocal::TEvClientConnected for dc-1 shard 72057594046578944 2025-09-25T16:18:37.180079Z node 3 :LOCAL DEBUG: local.cpp:1115: TDomainLocal(dc-1): Send resolve request for /dc-1/users/tenant-2 to schemeshard 72057594046578944 2025-09-25T16:18:37.187152Z node 2 :LOCAL DEBUG: local.cpp:1283: TDomainLocal(dc-1): HandleResolve from schemeshard 72057594046578944: Status: StatusSuccess Path: "/dc-1/users/tenant-1" PathDescription { Self { Name: "/dc-1/users/tenant-1" PathId: 100 SchemeshardId: 72057594046578944 PathType: EPathTypeSubDomain } DomainDescription { SchemeShardId_Depricated: 72057594046578944 PathId_Depricated: 100 DomainKey { SchemeShard: 72057594046578944 PathId: 100 } } } 2025-09-25T16:18:37.187204Z node 2 :LOCAL DEBUG: local.cpp:1221: TDomainLocal(dc-1): Binding tenant /dc-1/users/tenant-1 to hive 72057594046578946 (allocated resources: CPU: 1 Memory: 1 Network: 1) 2025-09-25T16:18:37.187349Z node 2 :LOCAL DEBUG: local.cpp:1005: TLocalNodeRegistrar::Bootstrap 2025-09-25T16:18:37.187356Z node 2 :LOCAL DEBUG: local.cpp:183: TLocalNodeRegistrar::TryToRegister 2025-09-25T16:18:37.187373Z node 2 :LOCAL DEBUG: local.cpp:216: TLocalNodeRegistrar::TryToRegister pipe to hive, pipe:[2:531:2119] 2025-09-25T16:18:37.187454Z node 2 :TENANT_POOL NOTICE: tenant_pool.cpp:526: TDomainTenantPool(dc-1) started tenant /dc-1/users/tenant-1 2025-09-25T16:18:37.187461Z node 2 :TENANT_POOL DEBUG: tenant_pool.cpp:274: TDomainTenantPool(dc-1) send status update to [2:505:2115] 2025-09-25T16:18:37.187603Z node 3 :LOCAL DEBUG: local.cpp:1283: TDomainLocal(dc-1): HandleResolve from schemeshard 72057594046578944: Status: StatusSuccess Path: "/dc-1/users/tenant-2" PathDescription { Self { Name: "/dc-1/users/tenant-2" PathId: 101 SchemeshardId: 72057594046578944 PathType: EPathTypeSubDomain } DomainDescription { SchemeShardId_Depricated: 72057594046578944 PathId_Depricated: 101 DomainKey { SchemeShard: 72057594046578944 PathId: 101 } } } 2025-09-25T16:18:37.187618Z node 3 :LOCAL DEBUG: local.cpp:1221: TDomainLocal(dc-1): Binding tenant /dc-1/users/tenant-2 to hive 72057594046578946 (allocated resources: CPU: 1 Memory: 1 Network: 1) 2025-09-25T16:18:37.187699Z node 3 :LOCAL DEBUG: local.cpp:1005: TLocalNodeRegistrar::Bootstrap 2025-09-25T16:18:37.187705Z node 3 :LOCAL DEBUG: local.cpp:183: TLocalNodeRegistrar::TryToRegister 2025-09-25T16:18:37.187713Z node 3 :LOCAL DEBUG: local.cpp:216: TLocalNodeRegistrar::TryToRegister pipe to hive, pipe:[3:538:2119] 2025-09-25T16:18:37.187763Z node 3 :TENANT_POOL NOTICE: tenant_pool.cpp:526: TDomainTenantPool(dc-1) started tenant /dc-1/users/tenant-2 2025-09-25T16:18:37.187768Z node 3 :TENANT_POOL DEBUG: tenant_pool.cpp:274: TDomainTenantPool(dc-1) send status update to [3:506:2115] 2025-09-25T16:18:37.188476Z node 2 :LOCAL DEBUG: local.cpp:263: TEvTabletPipe::TEvClientConnected {TabletId=72057594046578946 Status=OK ClientId=[2:531:2119]} 2025-09-25T16:18:37.188488Z node 3 :LOCAL DEBUG: local.cpp:263: TEvTabletPipe::TEvClientConnected {TabletId=72057594046578946 Status=OK ClientId=[3:538:2119]} 2025-09-25T16:18:37.188552Z node 3 :LOCAL DEBUG: local.cpp:327: TLocalNodeRegistrar::Handle TEvLocal::TEvPing 2025-09-25T16:18:37.188563Z node 3 :LOCAL DEBUG: local.cpp:383: TLocalNodeRegistrar TEvPing - CONNECTED 2025-09-25T16:18:37.188567Z node 3 :LOCAL DEBUG: local.cpp:300: TLocalNodeRegistrar SendStatusOk 2025-09-25T16:18:37.188598Z node 2 :LOCAL DEBUG: local.cpp:327: TLocalNodeRegistrar::Handle TEvLocal::TEvPing 2025-09-25T16:18:37.188604Z node 2 :LOCAL DEBUG: local.cpp:383: TLocalNodeRegistrar TEvPing - CONNECTED 2025-09-25T16:18:37.188608Z node 2 :LOCAL DEBUG: local.cpp:300: TLocalNodeRegistrar SendStatusOk 2025-09-25T16:18:37.203492Z node 1 :BS_CONTROLLER DEBUG: {BSC19@console_interaction.cpp:74} Console proposed config response Response# {Status: ReverseCommit ConsoleConfigVersion: 0 YAML: "" } ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/ydb/ut/unittest >> YdbTableBulkUpsert::DecimalPK [GOOD] Test command err: 2025-09-25T16:18:24.152548Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7554061941606510947:2147];send_to=[0:7307199536658146131:7762515]; 2025-09-25T16:18:24.152571Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/endf/00418c/r3tmp/tmpcr7WRt/pdisk_1.dat 2025-09-25T16:18:24.201187Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-09-25T16:18:24.212308Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 30639, node 1 2025-09-25T16:18:24.225650Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-09-25T16:18:24.225664Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-09-25T16:18:24.225670Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-09-25T16:18:24.225717Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:2060 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-09-25T16:18:24.253757Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-09-25T16:18:24.253786Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-09-25T16:18:24.254333Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-09-25T16:18:24.257033Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-09-25T16:18:24.488402Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-09-25T16:18:24.545929Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) SUCCESS 2025-09-25T16:18:24.574873Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7554061941606511983:2332], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:24.574897Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7554061941606511975:2329], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:24.574910Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:24.574971Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7554061941606511990:2334], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:24.574980Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:24.575665Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715659:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-09-25T16:18:24.579919Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7554061941606511989:2333], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715659 completed, doublechecking } 2025-09-25T16:18:24.662636Z node 1 :TX_PROXY ERROR: schemereq.cpp:590: Actor# [1:7554061941606512066:2792] txid# 281474976715660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-09-25T16:18:24.768053Z node 1 :KQP_EXECUTER ERROR: kqp_planner.cpp:123: TxId: 281474976715661. Ctx: { TraceId: 01k60tpcnydqxa57dzs761haxb, Database: , SessionId: ydb://session/3?node_id=1&id=NzM1MWNlODQtZjIwM2NjMzktNjExOGU5NDItMzJlZmYzZWU=, PoolId: default, DatabaseId: /Root}. Database not set, use /Root SUCCESS count returned 0 rows 2025-09-25T16:18:24.831550Z node 1 :KQP_EXECUTER ERROR: kqp_planner.cpp:123: TxId: 281474976715662. Ctx: { TraceId: 01k60tpcw52ppw0r4rc0yt29w9, Database: , SessionId: ydb://session/3?node_id=1&id=NzM1MWNlODQtZjIwM2NjMzktNjExOGU5NDItMzJlZmYzZWU=, PoolId: default, DatabaseId: /Root}. Database not set, use /Root SUCCESS count returned 1 rows 2025-09-25T16:18:24.844123Z node 1 :HIVE WARN: hive_impl.cpp:516: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037888 not found 2025-09-25T16:18:24.844944Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) SUCCESS 2025-09-25T16:18:24.910774Z node 1 :KQP_EXECUTER ERROR: kqp_planner.cpp:123: TxId: 281474976715665. Ctx: { TraceId: 01k60tpcytevg4gz3xmnpa0ex1, Database: , SessionId: ydb://session/3?node_id=1&id=NzdiMjRiMGQtZjg2ZGQ0ZS1kNTNmOWQzNC1mY2U0YjA=, PoolId: default, DatabaseId: /Root}. Database not set, use /Root SUCCESS count returned 0 rows 2025-09-25T16:18:24.967840Z node 1 :KQP_EXECUTER ERROR: kqp_planner.cpp:123: TxId: 281474976715666. Ctx: { TraceId: 01k60tpd0g3ed2rtpysff3fq30, Database: , SessionId: ydb://session/3?node_id=1&id=NzdiMjRiMGQtZjg2ZGQ0ZS1kNTNmOWQzNC1mY2U0YjA=, PoolId: default, DatabaseId: /Root}. Database not set, use /Root SUCCESS count returned 1 rows 2025-09-25T16:18:24.979770Z node 1 :HIVE WARN: hive_impl.cpp:516: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037889 not found 2025-09-25T16:18:24.980504Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) SUCCESS 2025-09-25T16:18:25.061954Z node 1 :KQP_EXECUTER ERROR: kqp_planner.cpp:123: TxId: 281474976715669. Ctx: { TraceId: 01k60tpd363cnw603zqhwj2s4g, Database: , SessionId: ydb://session/3?node_id=1&id=NmY0ODAyODItNGI4YjE1N2ItYzg3NzhmZTktMzIzMzc4YzM=, PoolId: default, DatabaseId: /Root}. Database not set, use /Root SUCCESS count returned 0 rows 2025-09-25T16:18:25.129847Z node 1 :KQP_EXECUTER ERROR: kqp_planner.cpp:123: TxId: 281474976715670. Ctx: { TraceId: 01k60tpd59b41hsat35752vyh2, Database: , SessionId: ydb://session/3?node_id=1&id=NmY0ODAyODItNGI4YjE1N2ItYzg3NzhmZTktMzIzMzc4YzM=, PoolId: default, DatabaseId: /Root}. Database not set, use /Root SUCCESS count returned 1 rows 2025-09-25T16:18:25.142929Z node 1 :HIVE WARN: hive_impl.cpp:516: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037890 not found 2025-09-25T16:18:25.144166Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715672:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:18:25.154752Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; SUCCESS 2025-09-25T16:18:25.212724Z node 1 :KQP_EXECUTER ERROR: kqp_planner.cpp:123: TxId: 281474976715673. Ctx: { TraceId: 01k60tpd89375byknypj2xytnx, Database: , SessionId: ydb://session/3?node_id=1&id=NzU1NzBjYmYtMjUzNjUzZjgtNDI1NDFmNzEtOTJjMjY2MWU=, PoolId: default, DatabaseId: /Root}. Database not set, use /Root SUCCESS count returned 0 rows 2025-09-25T16:18:25.272724Z node 1 :KQP_EXECUTER ERROR: kqp_planner.cpp:123: TxId: 281474976715674. Ctx: { TraceId: 01k60tpda13axyqnmtdft2g66k, Database: , SessionId: ydb://session/3?node_id=1&id=NzU1NzBjYmYtMjUzNjUzZjgtNDI1NDFmNzEtOTJjMjY2MWU=, PoolId: default, DatabaseId: /Root}. Database not set, use /Root SUCCESS count returned 1 rows 2025-09-25T16:18:25.308394Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715676:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/ ... : log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[10:7554061980483974999:2149];send_to=[0:7307199536658146131:7762515]; 2025-09-25T16:18:33.265337Z node 10 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-09-25T16:18:33.269704Z node 10 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions test_client.cpp: SetPath # /home/runner/.ya/build/build_root/endf/00418c/r3tmp/tmpYa1eea/pdisk_1.dat 2025-09-25T16:18:33.289400Z node 10 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 63367, node 10 2025-09-25T16:18:33.315168Z node 10 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-09-25T16:18:33.315201Z node 10 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-09-25T16:18:33.315204Z node 10 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-09-25T16:18:33.315273Z node 10 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:28540 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-09-25T16:18:33.366968Z node 10 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-09-25T16:18:33.367021Z node 10 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-09-25T16:18:33.368640Z node 10 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-09-25T16:18:33.372499Z node 10 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-09-25T16:18:33.553438Z node 10 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-09-25T16:18:33.628019Z node 10 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684)
: Error: Bulk upsert to table '/Root/Limits' Row key size of 1100002 bytes is larger than the allowed threshold 1049600
: Error: Bulk upsert to table '/Root/Limits' Row key size of 1100002 bytes is larger than the allowed threshold 1049600
: Error: Bulk upsert to table '/Root/Limits' Row key size of 1100000 bytes is larger than the allowed threshold 1049600
: Error: Bulk upsert to table '/Root/Limits' Row cell size of 17000022 bytes is larger than the allowed threshold 16777216 2025-09-25T16:18:34.766419Z node 13 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[13:7554061984797486654:2153];send_to=[0:7307199536658146131:7762515]; 2025-09-25T16:18:34.766469Z node 13 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/endf/00418c/r3tmp/tmpo3JpGG/pdisk_1.dat 2025-09-25T16:18:34.770206Z node 13 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-09-25T16:18:34.782847Z node 13 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 6753, node 13 2025-09-25T16:18:34.805333Z node 13 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-09-25T16:18:34.805346Z node 13 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-09-25T16:18:34.805348Z node 13 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-09-25T16:18:34.805407Z node 13 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:20304 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-09-25T16:18:34.868170Z node 13 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(13, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-09-25T16:18:34.868213Z node 13 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(13, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-09-25T16:18:34.869809Z node 13 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(13, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-09-25T16:18:34.870036Z node 13 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-09-25T16:18:34.947624Z node 13 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-09-25T16:18:35.172476Z node 13 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:18:35.192117Z node 13 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [13:7554061989092454986:2332], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:35.192118Z node 13 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [13:7554061989092454975:2329], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:35.192140Z node 13 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:35.192241Z node 13 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [13:7554061989092454990:2334], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:35.192261Z node 13 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:35.192908Z node 13 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715659:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-09-25T16:18:35.197798Z node 13 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [13:7554061989092454989:2333], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715659 completed, doublechecking } 2025-09-25T16:18:35.258761Z node 13 :TX_PROXY ERROR: schemereq.cpp:590: Actor# [13:7554061989092455066:2794] txid# 281474976715660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-09-25T16:18:35.291197Z node 13 :KQP_EXECUTER ERROR: kqp_planner.cpp:123: TxId: 281474976715661. Ctx: { TraceId: 01k60tpq1qfm4wyw0enwrjdrxe, Database: , SessionId: ydb://session/3?node_id=13&id=ZWI0MmZiNGUtMjk4NGU2NDktNTE5Y2VmNGEtZThhY2Q4ZDk=, PoolId: default, DatabaseId: /Root}. Database not set, use /Root >> YdbOlapStore::LogPagingBefore [GOOD] >> YdbOlapStore::LogPagingAfter >> YdbYqlClient::QueryStats [GOOD] >> YdbYqlClient::RenameTables >> YdbLogStore::Dirs [GOOD] >> YdbLogStore::LogTable >> TCacheTestWithDrops::LookupErrorUponEviction [GOOD] |81.5%| [TA] {RESULT} $(B)/ydb/core/tx/datashard/ut_write/test-results/unittest/{meta.json ... results_accumulator.log} >> test.py::test[aggregate-library_error_in_aggregation_fail--Results] [GOOD] >> test.py::test[bigdate-table_io-default.txt-Results] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/mind/ut/unittest >> TNodeBrokerTest::NodeNameWithDifferentTenants [GOOD] Test command err: 2025-09-25T16:18:36.691486Z node 8 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:636: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 8 Deadline: 18446744073709.551615s } 2025-09-25T16:18:36.693859Z node 5 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:636: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 5 Deadline: 18446744073709.551615s } 2025-09-25T16:18:36.693919Z node 6 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:636: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 6 Deadline: 18446744073709.551615s } 2025-09-25T16:18:36.693943Z node 7 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:636: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 7 Deadline: 18446744073709.551615s } 2025-09-25T16:18:36.693983Z node 8 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:607: Handle NActors::TEvInterconnect::TEvListNodes 2025-09-25T16:18:36.697187Z node 2 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:636: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 2 Deadline: 18446744073709.551615s } 2025-09-25T16:18:36.697230Z node 3 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:636: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 3 Deadline: 18446744073709.551615s } 2025-09-25T16:18:36.697274Z node 4 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:636: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 4 Deadline: 18446744073709.551615s } 2025-09-25T16:18:36.697311Z node 6 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:607: Handle NActors::TEvInterconnect::TEvListNodes 2025-09-25T16:18:36.697388Z node 7 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:607: Handle NActors::TEvInterconnect::TEvListNodes 2025-09-25T16:18:36.697440Z node 8 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:607: Handle NActors::TEvInterconnect::TEvListNodes 2025-09-25T16:18:36.697465Z node 1 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:636: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 1 Deadline: 18446744073709.551615s } 2025-09-25T16:18:36.701270Z node 2 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:607: Handle NActors::TEvInterconnect::TEvListNodes 2025-09-25T16:18:36.701366Z node 3 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:607: Handle NActors::TEvInterconnect::TEvListNodes 2025-09-25T16:18:36.701429Z node 4 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:607: Handle NActors::TEvInterconnect::TEvListNodes 2025-09-25T16:18:36.701469Z node 5 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:607: Handle NActors::TEvInterconnect::TEvListNodes 2025-09-25T16:18:36.701514Z node 4 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:607: Handle NActors::TEvInterconnect::TEvListNodes 2025-09-25T16:18:36.701544Z node 5 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:607: Handle NActors::TEvInterconnect::TEvListNodes 2025-09-25T16:18:36.701558Z node 6 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:607: Handle NActors::TEvInterconnect::TEvListNodes 2025-09-25T16:18:36.701588Z node 7 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:607: Handle NActors::TEvInterconnect::TEvListNodes 2025-09-25T16:18:36.701645Z node 1 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:607: Handle NActors::TEvInterconnect::TEvListNodes 2025-09-25T16:18:36.701716Z node 2 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:607: Handle NActors::TEvInterconnect::TEvListNodes 2025-09-25T16:18:36.701741Z node 3 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:607: Handle NActors::TEvInterconnect::TEvListNodes 2025-09-25T16:18:36.701805Z node 8 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:607: Handle NActors::TEvInterconnect::TEvListNodes 2025-09-25T16:18:36.701823Z node 1 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:607: Handle NActors::TEvInterconnect::TEvListNodes 2025-09-25T16:18:36.702082Z node 2 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:607: Handle NActors::TEvInterconnect::TEvListNodes 2025-09-25T16:18:36.702102Z node 3 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:607: Handle NActors::TEvInterconnect::TEvListNodes 2025-09-25T16:18:36.702124Z node 4 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:607: Handle NActors::TEvInterconnect::TEvListNodes 2025-09-25T16:18:36.702137Z node 5 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:607: Handle NActors::TEvInterconnect::TEvListNodes 2025-09-25T16:18:36.702151Z node 6 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:607: Handle NActors::TEvInterconnect::TEvListNodes 2025-09-25T16:18:36.702164Z node 7 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:607: Handle NActors::TEvInterconnect::TEvListNodes 2025-09-25T16:18:36.702186Z node 8 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:636: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 1 Deadline: 18446744073709.551615s } 2025-09-25T16:18:36.702204Z node 3 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:636: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 1 Deadline: 18446744073709.551615s } 2025-09-25T16:18:36.702229Z node 4 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:636: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 1 Deadline: 18446744073709.551615s } 2025-09-25T16:18:36.702255Z node 5 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:636: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 1 Deadline: 18446744073709.551615s } 2025-09-25T16:18:36.702284Z node 6 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:636: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 1 Deadline: 18446744073709.551615s } 2025-09-25T16:18:36.702309Z node 7 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:636: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 1 Deadline: 18446744073709.551615s } 2025-09-25T16:18:36.702387Z node 1 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:607: Handle NActors::TEvInterconnect::TEvListNodes 2025-09-25T16:18:36.702447Z node 2 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:636: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 1 Deadline: 18446744073709.551615s } 2025-09-25T16:18:36.702667Z node 1 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:636: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 5 Deadline: 18446744073709.551615s } 2025-09-25T16:18:36.702917Z node 8 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:636: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 7 Deadline: 18446744073709.551615s } 2025-09-25T16:18:36.703683Z node 3 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:636: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 2 Deadline: 18446744073709.551615s } 2025-09-25T16:18:36.703724Z node 5 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:636: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 2 Deadline: 18446744073709.551615s } 2025-09-25T16:18:36.703745Z node 6 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:636: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 4 Deadline: 18446744073709.551615s } 2025-09-25T16:18:36.703761Z node 7 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:636: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 5 Deadline: 18446744073709.551615s } 2025-09-25T16:18:36.709849Z node 7 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:636: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 8 Deadline: 18446744073709.551615s } 2025-09-25T16:18:36.710649Z node 4 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:636: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 6 Deadline: 18446744073709.551615s } 2025-09-25T16:18:36.710673Z node 5 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:636: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 7 Deadline: 18446744073709.551615s } 2025-09-25T16:18:36.710899Z node 2 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:636: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 3 Deadline: 18446744073709.551615s } 2025-09-25T16:18:36.711365Z node 1 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:636: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 8 Deadline: 18446744073709.551615s } 2025-09-25T16:18:36.711413Z node 2 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:636: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 5 Deadline: 18446744073709.551615s } 2025-09-25T16:18:36.712054Z node 1 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:636: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 3 Deadline: 18446744073709.551615s } 2025-09-25T16:18:36.712134Z node 1 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:636: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 4 Deadline: 18446744073709.551615s } 2025-09-25T16:18:36.712385Z node 1 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:636: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 6 Deadline: 18446744073709.551615s } 2025-09-25T16:18:36.712459Z node 1 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:636: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 2 Deadline: 18446744073709.551615s } 2025-09-25T16:18:36.712516Z node 1 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:636: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 7 Deadline: 18446744073709.551615s } 2025-09-25T16:18:36.713708Z node 5 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:636: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 3 Deadline: 18446744073709.551615s } 2025-09-25T16:18:36.714078Z node 3 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:636: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 5 Deadline: 18446744073709.551615s } 2025-09-25T16:18:36.727885Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7729: Cannot subscribe to console configs 2025-09-25T16:18:36.727907Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded ... waiting for nameservers are connected 2025-09-25T16:18:36.732251Z node 1 :NODE_BROKER DEBUG: node_broker_impl.h:239: StateInit event type: 10060000 event: NKikimr::TEvTablet::TEvBoot 2025-09-25T16:18:36.732604Z node 1 :NODE_BROKER DEBUG: node_broker_impl.h:239: StateInit event type: 10060001 event: NKikimr::TEvTablet::TEvRestored 2025-09-25T16:18:36.732656Z node 1 :NODE_BROKER DEBUG: node_broker__init_scheme.cpp:20: TTxInitScheme Execute 2025-09-25T16:18:36.732840Z node 1 :NODE_BROKER DEBUG: node_broker_impl.h:239: StateInit event type: 1006000c event: NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-09-25T16:18:36.733215Z node 1 :NODE_BROKER DEBUG: node_broker__init_scheme.cpp:29: TTxInitScheme Complete 2025-09-25T16:18:36.733239Z node 1 :NODE_BROKER DEBUG: node_broker__load_state.cpp:19: TTxLoadState Execute 2025-09-25T16:18:36.733281Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:968: [DB] Using default config. 2025-09-25T16:18:36.733291Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:1005: [DB] Starting the first epoch: #1.1 1970-01-01T00:00:00.023000Z - 1970-01-01T01:00:00.023000Z - 1970-01-01T02:00:00.023000Z 2025-09-25T16:18:36.733296Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:1031: [DB] Loaded the first approximate epoch start: #1.1 2025-09-25T16:18:36.733307Z node 1 :NODE_BROKER DEBUG: node_broker__load_state.cpp:27: TTxLoadState Complete 2025-09-25T16:18:36.733317Z node 1 :NODE_BROKER DEBUG: node_broker__migrate_state.cpp:84: TTxMigrateState Execute 2025-09-25T16:18:36.733321Z node 1 :NODE_BROKER DEBUG: node_broker__migrate_state.cpp:52: TTxMigrateState ProcessMigrationBatch UpdateNodes left 0, NewVersionUpdateNodes left 0 2025-09-25T16:18:36.733324Z node 1 :NODE_BROKER DEBUG: node_broker__migrate_state.cpp:21: TTxMigrateState FinalizeMigration 2025-09-25T16:18:36.733328Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:1318: [DB] Update epoch in database: #1.1 1970-01-01T00:00:00.023000Z - 1970-01-01T01:00:00.023000Z - 1970-01-01T02:00:00.023000Z 2025-09-25T16:18:36.733338Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:1337: [DB] Update approx epoch start in database: #1.1 2025-09-25T16:18:36.733342Z node 1 :NODE_BROKER NOTICE: node_broker.cpp:1350: [DB] Update main nodes table to: Nodes 2025-09-25T16:18:36.766304Z node 1 :NODE_BROKER DEBUG: node_broker__migrate_state.cpp:95: TTxMigrateState Complete 2025-09-25T16:18:36.766350Z node 1 :NODE_BROKER TRACE: node_broker.cpp:456: Scheduled epoch update at 1970-01-01T01:00:00.023000Z 2025-09-25T16:18:36.766364Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:562: Preparing nodes list cache for epoch #1.1 1970-01-01T00:00:00.023000Z - 1970-01-01T01:00:00.023000Z - 1970-01-01T02:00:00.023000Z, approximate epoch start #1.1 nodes=0 expired=0 2025-09-25T16:18:36.766376Z ... node 1 :NODE_BROKER DEBUG: node_broker__register_node.cpp:82: Registration request from host1:19001 (not fixed) tenant: /dc-1/yet-another-database 2025-09-25T16:18:37.038783Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:863: [DB] Adding node #1024.v2 host1:19001 to database state=Active resolvehost=host1 address= dc= location= lease=1 expire=Thu, 01 Jan 1970 02:00:00 UTC servicedsubdomain=72057594046678944:3 slotindex=1 authorizedbycertificate=false 2025-09-25T16:18:37.049566Z node 1 :NODE_BROKER DEBUG: node_broker__register_node.cpp:197: TTxRegisterNode Complete 2025-09-25T16:18:37.049627Z node 1 :NODE_BROKER TRACE: node_broker__register_node.cpp:59: TTxRegisterNode reply with: Status { Code: OK } Node { NodeId: 1024 Host: "host1" Port: 19001 ResolveHost: "host1" Address: "" Location { } Expire: 7200023000 Name: "slot-1" } 2025-09-25T16:18:37.049769Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:245: StateWork, received event# 269877761, Sender [1:721:2294], Recipient [1:556:2186]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-09-25T16:18:37.049812Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:245: StateWork, received event# 272039938, Sender [1:679:2268], Recipient [1:556:2186]: NKikimr::NNodeBroker::TEvNodeBroker::TEvRegistrationRequest { Host: "host4" Port: 19001 ResolveHost: "host4" Address: "" Location { } FixedNodeId: false Path: "/dc-1/my-database" } 2025-09-25T16:18:37.049819Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:251: StateWork, processing event TEvNodeBroker::TEvRegistrationRequest 2025-09-25T16:18:37.049846Z node 1 :NODE_BROKER TRACE: node_broker.cpp:1494: Handle TEvNodeBroker::TEvRegistrationRequest: request# Host: "host4" Port: 19001 ResolveHost: "host4" Address: "" Location { } FixedNodeId: false Path: "/dc-1/my-database" 2025-09-25T16:18:37.049898Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2806: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [1:23:2070], request# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/my-database TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: false ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-09-25T16:18:37.049925Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:1888: FillEntry for TNavigate: self# [1:23:2070], cacheItem# { Subscriber: { Subscriber: [1:685:2273] DomainOwnerId: 72057594046678944 Type: 2 SyncCookie: 0 } Filled: 1 Status: StatusSuccess Kind: 9 TableKind: 0 Created: 1 CreateStep: 5000001 PathId: [OwnerId: 72057594046678944, LocalPathId: 2] DomainId: [OwnerId: 72057594046678944, LocalPathId: 2] IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/my-database TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: false ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2025-09-25T16:18:37.049981Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:267: Send result: self# [1:723:2295], recipient# [1:722:2186], result# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/my-database TableId: [72057594046678944:2:0] RequestType: ByPath Operation: OpPath RedirectRequired: false ShowPrivatePath: false SyncVersion: false Status: Ok Kind: KindExtSubdomain DomainInfo { DomainKey: [OwnerId: 72057594046678944, LocalPathId: 2] ResourcesDomainKey: [OwnerId: 72057594046678944, LocalPathId: 2] Params { Version: 1 PlanResolution: 0 TimeCastBucketsPerMediator: 0 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } }] } 2025-09-25T16:18:37.049996Z node 1 :NODE_BROKER TRACE: node_broker.cpp:1561: Handle TEvTxProxySchemeCache::TEvNavigateKeySetResult: response# { Path: dc-1/my-database TableId: [72057594046678944:2:0] RequestType: ByPath Operation: OpPath RedirectRequired: false ShowPrivatePath: false SyncVersion: false Status: Ok Kind: KindExtSubdomain DomainInfo { DomainKey: [OwnerId: 72057594046678944, LocalPathId: 2] ResourcesDomainKey: [OwnerId: 72057594046678944, LocalPathId: 2] Params { Version: 1 PlanResolution: 0 TimeCastBucketsPerMediator: 0 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } } 2025-09-25T16:18:37.050009Z node 1 :NODE_BROKER TRACE: node_broker.cpp:1587: Finished resolving tenant: request# Host: "host4" Port: 19001 ResolveHost: "host4" Address: "" Location { } FixedNodeId: false Path: "/dc-1/my-database": scope id# <72057594046678944:2>: serviced subdomain# 72057594046678944:2 2025-09-25T16:18:37.050026Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:245: StateWork, received event# 2146435073, Sender [1:722:2186], Recipient [1:556:2186]: NKikimr::NNodeBroker::TNodeBroker::TEvPrivate::TEvResolvedRegistrationRequest 2025-09-25T16:18:37.050032Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:260: StateWork, processing event TEvPrivate::TEvResolvedRegistrationRequest 2025-09-25T16:18:37.050042Z node 1 :NODE_BROKER DEBUG: node_broker__register_node.cpp:78: TTxRegisterNode Execute 2025-09-25T16:18:37.050047Z node 1 :NODE_BROKER DEBUG: node_broker__register_node.cpp:82: Registration request from host4:19001 (not fixed) tenant: /dc-1/my-database 2025-09-25T16:18:37.050068Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:863: [DB] Adding node #1027.v5 host4:19001 to database state=Active resolvehost=host4 address= dc= location= lease=1 expire=Thu, 01 Jan 1970 02:00:00 UTC servicedsubdomain=72057594046678944:2 slotindex=0 authorizedbycertificate=false 2025-09-25T16:18:37.050126Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:264: [Dirty] Register new active node #1027.v5 host4:19001 2025-09-25T16:18:37.050135Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:552: [Dirty] Update current epoch version from 4 to 5 2025-09-25T16:18:37.050140Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:1363: [DB] Update epoch version in database version=5 2025-09-25T16:18:37.071957Z node 1 :NODE_BROKER DEBUG: node_broker__register_node.cpp:197: TTxRegisterNode Complete 2025-09-25T16:18:37.071984Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:264: [Committed] Register new active node #1027.v5 host4:19001 2025-09-25T16:18:37.071997Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:552: [Committed] Update current epoch version from 4 to 5 2025-09-25T16:18:37.072003Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:630: Add node #1027.v5 host4:19001 to epoch cache 2025-09-25T16:18:37.072025Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:659: Add node #1027.v5 to update nodes log 2025-09-25T16:18:37.072074Z node 1 :NODE_BROKER TRACE: node_broker__register_node.cpp:59: TTxRegisterNode reply with: Status { Code: OK } Node { NodeId: 1027 Host: "host4" Port: 19001 ResolveHost: "host4" Address: "" Location { } Expire: 7200023000 Name: "slot-0" } 2025-09-25T16:18:37.072234Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:245: StateWork, received event# 269877761, Sender [1:728:2299], Recipient [1:556:2186]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-09-25T16:18:37.072267Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:245: StateWork, received event# 272039938, Sender [1:679:2268], Recipient [1:556:2186]: NKikimr::NNodeBroker::TEvNodeBroker::TEvRegistrationRequest { Host: "host1" Port: 19001 ResolveHost: "host1" Address: "" Location { } FixedNodeId: false Path: "/dc-1/my-database" } 2025-09-25T16:18:37.072275Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:251: StateWork, processing event TEvNodeBroker::TEvRegistrationRequest 2025-09-25T16:18:37.072286Z node 1 :NODE_BROKER TRACE: node_broker.cpp:1494: Handle TEvNodeBroker::TEvRegistrationRequest: request# Host: "host1" Port: 19001 ResolveHost: "host1" Address: "" Location { } FixedNodeId: false Path: "/dc-1/my-database" 2025-09-25T16:18:37.072344Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2806: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [1:23:2070], request# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/my-database TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: false ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-09-25T16:18:37.072370Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:1888: FillEntry for TNavigate: self# [1:23:2070], cacheItem# { Subscriber: { Subscriber: [1:685:2273] DomainOwnerId: 72057594046678944 Type: 2 SyncCookie: 0 } Filled: 1 Status: StatusSuccess Kind: 9 TableKind: 0 Created: 1 CreateStep: 5000001 PathId: [OwnerId: 72057594046678944, LocalPathId: 2] DomainId: [OwnerId: 72057594046678944, LocalPathId: 2] IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/my-database TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: false ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2025-09-25T16:18:37.072423Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:267: Send result: self# [1:730:2300], recipient# [1:729:2186], result# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/my-database TableId: [72057594046678944:2:0] RequestType: ByPath Operation: OpPath RedirectRequired: false ShowPrivatePath: false SyncVersion: false Status: Ok Kind: KindExtSubdomain DomainInfo { DomainKey: [OwnerId: 72057594046678944, LocalPathId: 2] ResourcesDomainKey: [OwnerId: 72057594046678944, LocalPathId: 2] Params { Version: 1 PlanResolution: 0 TimeCastBucketsPerMediator: 0 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } }] } 2025-09-25T16:18:37.072441Z node 1 :NODE_BROKER TRACE: node_broker.cpp:1561: Handle TEvTxProxySchemeCache::TEvNavigateKeySetResult: response# { Path: dc-1/my-database TableId: [72057594046678944:2:0] RequestType: ByPath Operation: OpPath RedirectRequired: false ShowPrivatePath: false SyncVersion: false Status: Ok Kind: KindExtSubdomain DomainInfo { DomainKey: [OwnerId: 72057594046678944, LocalPathId: 2] ResourcesDomainKey: [OwnerId: 72057594046678944, LocalPathId: 2] Params { Version: 1 PlanResolution: 0 TimeCastBucketsPerMediator: 0 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } } 2025-09-25T16:18:37.072457Z node 1 :NODE_BROKER TRACE: node_broker.cpp:1587: Finished resolving tenant: request# Host: "host1" Port: 19001 ResolveHost: "host1" Address: "" Location { } FixedNodeId: false Path: "/dc-1/my-database": scope id# <72057594046678944:2>: serviced subdomain# 72057594046678944:2 2025-09-25T16:18:37.072477Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:245: StateWork, received event# 2146435073, Sender [1:729:2186], Recipient [1:556:2186]: NKikimr::NNodeBroker::TNodeBroker::TEvPrivate::TEvResolvedRegistrationRequest 2025-09-25T16:18:37.072483Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:260: StateWork, processing event TEvPrivate::TEvResolvedRegistrationRequest 2025-09-25T16:18:37.072494Z node 1 :NODE_BROKER DEBUG: node_broker__register_node.cpp:78: TTxRegisterNode Execute 2025-09-25T16:18:37.072500Z node 1 :NODE_BROKER DEBUG: node_broker__register_node.cpp:82: Registration request from host1:19001 (not fixed) tenant: /dc-1/my-database 2025-09-25T16:18:37.072522Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:863: [DB] Adding node #1024.v2 host1:19001 to database state=Active resolvehost=host1 address= dc= location= lease=1 expire=Thu, 01 Jan 1970 02:00:00 UTC servicedsubdomain=72057594046678944:2 slotindex=2 authorizedbycertificate=false 2025-09-25T16:18:37.083320Z node 1 :NODE_BROKER DEBUG: node_broker__register_node.cpp:197: TTxRegisterNode Complete 2025-09-25T16:18:37.083384Z node 1 :NODE_BROKER TRACE: node_broker__register_node.cpp:59: TTxRegisterNode reply with: Status { Code: OK } Node { NodeId: 1024 Host: "host1" Port: 19001 ResolveHost: "host1" Address: "" Location { } Expire: 7200023000 Name: "slot-2" } ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/ydb/ut/unittest >> YdbTableBulkUpsert::RetryOperation [GOOD] Test command err: 2025-09-25T16:18:25.747250Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7554061947880622639:2147];send_to=[0:7307199536658146131:7762515]; 2025-09-25T16:18:25.747334Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/endf/00417e/r3tmp/tmpxxGCLU/pdisk_1.dat 2025-09-25T16:18:25.851482Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-09-25T16:18:25.859278Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-09-25T16:18:25.859305Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-09-25T16:18:25.861731Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-09-25T16:18:25.875974Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 22400, node 1 2025-09-25T16:18:25.964252Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-09-25T16:18:25.964269Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-09-25T16:18:25.964271Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-09-25T16:18:25.964333Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:11598 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: 2025-09-25T16:18:26.039857Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-09-25T16:18:26.054547Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-09-25T16:18:26.418518Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) SUCCESS 3 rows in 0.007166s 2025-09-25T16:18:26.551858Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7554061952175592677:2422], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:26.551887Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:26.551890Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7554061952175592688:2425], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:26.551975Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7554061952175592691:2426], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:26.551988Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:26.552756Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715659:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-09-25T16:18:26.557368Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7554061952175592692:2427], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715659 completed, doublechecking } 2025-09-25T16:18:26.656383Z node 1 :TX_PROXY ERROR: schemereq.cpp:590: Actor# [1:7554061952175592764:4096] txid# 281474976715660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-09-25T16:18:26.749716Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-09-25T16:18:26.775008Z node 1 :KQP_EXECUTER ERROR: kqp_planner.cpp:123: TxId: 281474976715661. Ctx: { TraceId: 01k60tpekqb83c9dr2fp5zwry4, Database: , SessionId: ydb://session/3?node_id=1&id=MTM0MWQ2YWMtODFjYzU3YjAtY2E1OTI2ZWUtZTMxMjM0ZjY=, PoolId: default, DatabaseId: /Root}. Database not set, use /Root SUCCESS count returned 3 rows 2025-09-25T16:18:27.616104Z node 4 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7554061955044079157:2082];send_to=[0:7307199536658146131:7762515]; 2025-09-25T16:18:27.616183Z node 4 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-09-25T16:18:27.635818Z node 4 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions test_client.cpp: SetPath # /home/runner/.ya/build/build_root/endf/00417e/r3tmp/tmpbZEctc/pdisk_1.dat 2025-09-25T16:18:27.652034Z node 4 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 27035, node 4 2025-09-25T16:18:27.673047Z node 4 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-09-25T16:18:27.673063Z node 4 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-09-25T16:18:27.673065Z node 4 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-09-25T16:18:27.673108Z node 4 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:2382 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-09-25T16:18:27.721772Z node 4 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-09-25T16:18:27.721804Z node 4 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-09-25T16:18:27.727789Z node 4 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-09-25T16:18:27.737618Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-09-25T16:18:27.890763Z node 4 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-09-25T16:18:28.060465Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684)
: Error: Bulk upsert to table '/Root/ui8' Only async-indexed tables are supported by BulkUpsert
: Error: Bulk upsert to table '/Root/ui8/Value_index/indexImplTable' Writing to index implementation tables is not allowed. 2025-09-25T16:18:28.867749Z node 7 :METADATA ... e) 2025-09-25T16:18:35.024401Z node 10 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:6220 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-09-25T16:18:35.070882Z node 10 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-09-25T16:18:35.070918Z node 10 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-09-25T16:18:35.072438Z node 10 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-09-25T16:18:35.085696Z node 10 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-09-25T16:18:35.173451Z node 10 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-09-25T16:18:35.353334Z node 10 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) Injecting ABORTED 10 times Result: ABORTED Injecting ABORTED 6 times Result: ABORTED Injecting ABORTED 5 times Result: SUCCESS Injecting ABORTED 3 times Result: SUCCESS Injecting ABORTED 0 times Result: SUCCESS Injecting OVERLOADED 10 times Result: OVERLOADED Injecting OVERLOADED 6 times Result: OVERLOADED Injecting OVERLOADED 5 times Result: SUCCESS Injecting OVERLOADED 3 times Result: SUCCESS Injecting OVERLOADED 0 times Result: SUCCESS Injecting CLIENT_RESOURCE_EXHAUSTED 10 times Result: CLIENT_RESOURCE_EXHAUSTED Injecting CLIENT_RESOURCE_EXHAUSTED 6 times Result: CLIENT_RESOURCE_EXHAUSTED Injecting CLIENT_RESOURCE_EXHAUSTED 5 times Result: SUCCESS Injecting CLIENT_RESOURCE_EXHAUSTED 3 times Result: SUCCESS Injecting CLIENT_RESOURCE_EXHAUSTED 0 times Result: SUCCESS Injecting UNAVAILABLE 10 times Result: UNAVAILABLE Injecting UNAVAILABLE 6 times Result: UNAVAILABLE Injecting UNAVAILABLE 5 times Result: SUCCESS Injecting UNAVAILABLE 3 times Result: SUCCESS Injecting UNAVAILABLE 0 times Result: SUCCESS Injecting BAD_SESSION 10 times Result: BAD_SESSION Injecting BAD_SESSION 6 times Result: BAD_SESSION Injecting BAD_SESSION 5 times Result: SUCCESS Injecting BAD_SESSION 3 times Result: SUCCESS Injecting BAD_SESSION 0 times Result: SUCCESS Injecting SESSION_BUSY 10 times Result: SESSION_BUSY Injecting SESSION_BUSY 6 times Result: SESSION_BUSY Injecting SESSION_BUSY 5 times Result: SUCCESS Injecting SESSION_BUSY 3 times Result: SUCCESS Injecting SESSION_BUSY 0 times Result: SUCCESS Injecting NOT_FOUND 10 times Result: NOT_FOUND Injecting NOT_FOUND 6 times Result: NOT_FOUND Injecting NOT_FOUND 5 times Result: SUCCESS Injecting NOT_FOUND 3 times Result: SUCCESS Injecting NOT_FOUND 0 times Result: SUCCESS Injecting UNDETERMINED 10 times Result: UNDETERMINED Injecting UNDETERMINED 6 times Result: UNDETERMINED Injecting UNDETERMINED 5 times Result: SUCCESS Injecting UNDETERMINED 3 times Result: SUCCESS Injecting UNDETERMINED 0 times Result: SUCCESS Injecting TRANSPORT_UNAVAILABLE 10 times Result: TRANSPORT_UNAVAILABLE Injecting TRANSPORT_UNAVAILABLE 6 times Result: TRANSPORT_UNAVAILABLE Injecting TRANSPORT_UNAVAILABLE 5 times Result: SUCCESS Injecting TRANSPORT_UNAVAILABLE 3 times Result: SUCCESS Injecting TRANSPORT_UNAVAILABLE 0 times Result: SUCCESS 2025-09-25T16:18:36.494479Z node 13 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[13:7554061993787173071:2149];send_to=[0:7307199536658146131:7762515]; 2025-09-25T16:18:36.494514Z node 13 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-09-25T16:18:36.525760Z node 13 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions test_client.cpp: SetPath # /home/runner/.ya/build/build_root/endf/00417e/r3tmp/tmpoaAp4K/pdisk_1.dat 2025-09-25T16:18:36.541462Z node 13 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 10617, node 13 2025-09-25T16:18:36.567493Z node 13 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-09-25T16:18:36.567504Z node 13 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-09-25T16:18:36.567507Z node 13 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-09-25T16:18:36.567558Z node 13 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:21870 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-09-25T16:18:36.596163Z node 13 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(13, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-09-25T16:18:36.596204Z node 13 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(13, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-09-25T16:18:36.597809Z node 13 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(13, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-09-25T16:18:36.626421Z node 13 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-09-25T16:18:36.633144Z node 13 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-09-25T16:18:36.804194Z node 13 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-09-25T16:18:36.922475Z node 13 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) Injecting ABORTED 10 times Result: ABORTED Injecting ABORTED 6 times Result: ABORTED Injecting ABORTED 5 times Result: SUCCESS Injecting ABORTED 3 times Result: SUCCESS Injecting ABORTED 0 times Result: SUCCESS Injecting OVERLOADED 10 times Result: OVERLOADED Injecting OVERLOADED 6 times Result: OVERLOADED Injecting OVERLOADED 5 times Result: SUCCESS Injecting OVERLOADED 3 times Result: SUCCESS Injecting OVERLOADED 0 times Result: SUCCESS Injecting CLIENT_RESOURCE_EXHAUSTED 10 times Result: CLIENT_RESOURCE_EXHAUSTED Injecting CLIENT_RESOURCE_EXHAUSTED 6 times Result: CLIENT_RESOURCE_EXHAUSTED Injecting CLIENT_RESOURCE_EXHAUSTED 5 times Result: SUCCESS Injecting CLIENT_RESOURCE_EXHAUSTED 3 times Result: SUCCESS Injecting CLIENT_RESOURCE_EXHAUSTED 0 times Result: SUCCESS Injecting UNAVAILABLE 10 times Result: UNAVAILABLE Injecting UNAVAILABLE 6 times Result: UNAVAILABLE Injecting UNAVAILABLE 5 times Result: SUCCESS Injecting UNAVAILABLE 3 times Result: SUCCESS Injecting UNAVAILABLE 0 times Result: SUCCESS Injecting BAD_SESSION 10 times Result: BAD_SESSION Injecting BAD_SESSION 6 times Result: BAD_SESSION Injecting BAD_SESSION 5 times Result: SUCCESS Injecting BAD_SESSION 3 times Result: SUCCESS Injecting BAD_SESSION 0 times Result: SUCCESS Injecting SESSION_BUSY 10 times Result: SESSION_BUSY Injecting SESSION_BUSY 6 times Result: SESSION_BUSY Injecting SESSION_BUSY 5 times Result: SUCCESS Injecting SESSION_BUSY 3 times Result: SUCCESS Injecting SESSION_BUSY 0 times Result: SUCCESS Injecting NOT_FOUND 10 times Result: NOT_FOUND Injecting NOT_FOUND 6 times Result: NOT_FOUND Injecting NOT_FOUND 5 times Result: SUCCESS Injecting NOT_FOUND 3 times Result: SUCCESS Injecting NOT_FOUND 0 times Result: SUCCESS Injecting UNDETERMINED 10 times Result: UNDETERMINED Injecting UNDETERMINED 6 times Result: UNDETERMINED Injecting UNDETERMINED 5 times Result: SUCCESS Injecting UNDETERMINED 3 times Result: SUCCESS Injecting UNDETERMINED 0 times Result: SUCCESS Injecting TRANSPORT_UNAVAILABLE 10 times Result: TRANSPORT_UNAVAILABLE Injecting TRANSPORT_UNAVAILABLE 6 times Result: TRANSPORT_UNAVAILABLE Injecting TRANSPORT_UNAVAILABLE 5 times Result: SUCCESS Injecting TRANSPORT_UNAVAILABLE 3 times 2025-09-25T16:18:37.496056Z node 13 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; Result: SUCCESS Injecting TRANSPORT_UNAVAILABLE 0 times Result: SUCCESS >> THiveTest::TestHiveBalancerDifferentResources2 [GOOD] >> THiveTest::TestFollowerCompatability1 [GOOD] >> THiveTest::TestFollowerCompatability2 >> THiveTest::TestHiveBalancerUselessNeighbourMoves |81.5%| [TA] {RESULT} $(B)/ydb/core/tx/datashard/ut_change_collector/test-results/unittest/{meta.json ... results_accumulator.log} ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/scheme_board/ut_cache/unittest >> TCacheTestWithDrops::LookupErrorUponEviction [GOOD] Test command err: 2025-09-25T16:18:08.092872Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7729: Cannot subscribe to console configs 2025-09-25T16:18:08.092898Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded TestModificationResults wait txId: 1 2025-09-25T16:18:08.111975Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 FAKE_COORDINATOR: Erasing txId 1 TestModificationResult got TxId: 1, wait until txId: 1 TestModificationResults wait txId: 101 FAKE_COORDINATOR: Add transaction: 101 at step: 5000002 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000001 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 101 at step: 5000002 FAKE_COORDINATOR: Erasing txId 101 TestModificationResult got TxId: 101, wait until txId: 101 TestWaitNotification wait txId: 101 2025-09-25T16:18:08.115961Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 101, at schemeshard: 72057594046678944 TestWaitNotification: OK eventTxId 101 TestModificationResults wait txId: 102 2025-09-25T16:18:08.116300Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpRmDir, opId: 102:0, at schemeshard: 72057594046678944, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_rmdir.cpp:66) FAKE_COORDINATOR: Add transaction: 102 at step: 5000003 FAKE_COORDINATOR: advance: minStep5000003 State->FrontStep: 5000002 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 102 at step: 5000003 FAKE_COORDINATOR: Erasing txId 102 TestModificationResult got TxId: 102, wait until txId: 102 TestWaitNotification wait txId: 102 2025-09-25T16:18:08.123883Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 102, at schemeshard: 72057594046678944 TestWaitNotification: OK eventTxId 102 2025-09-25T16:18:08.505429Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7729: Cannot subscribe to console configs 2025-09-25T16:18:08.505457Z node 2 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded TestModificationResults wait txId: 1 2025-09-25T16:18:08.523365Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 FAKE_COORDINATOR: Erasing txId 1 TestModificationResult got TxId: 1, wait until txId: 1 >> TNodeBrokerTest::MinDynamicNodeIdShifted [GOOD] >> TPipeTrackerTest::TestSimpleAdd [GOOD] >> TResourceBroker::TestAutoTaskId >> TTxAllocatorClientTest::InitiatingRequest >> AutoConfig::GetServicePoolsWith4AndMoreCPUs [GOOD] >> TNodeBrokerTest::TestListNodesEpochDeltas [GOOD] >> TResourceBroker::TestResubmitTask >> BootstrapperTest::RestartUnavailableTablet >> AutoConfig::GetServicePoolsWith3CPUs [GOOD] >> YdbYqlClient::RenameTables [GOOD] >> THiveTest::TestCreateTabletReboots [GOOD] >> THiveTest::TestCreateTabletWithWrongSPoolsAndReassignGroupsFailButDeletionIsOk >> TResourceBroker::TestAutoTaskId [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/mind/ut/unittest >> TNodeBrokerTest::MinDynamicNodeIdShifted [GOOD] Test command err: 2025-09-25T16:18:37.079854Z node 8 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:636: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 8 Deadline: 18446744073709.551615s } 2025-09-25T16:18:37.082353Z node 5 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:636: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 5 Deadline: 18446744073709.551615s } 2025-09-25T16:18:37.082412Z node 6 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:636: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 6 Deadline: 18446744073709.551615s } 2025-09-25T16:18:37.082433Z node 7 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:636: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 7 Deadline: 18446744073709.551615s } 2025-09-25T16:18:37.082469Z node 8 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:607: Handle NActors::TEvInterconnect::TEvListNodes 2025-09-25T16:18:37.085774Z node 2 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:636: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 2 Deadline: 18446744073709.551615s } 2025-09-25T16:18:37.085820Z node 3 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:636: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 3 Deadline: 18446744073709.551615s } 2025-09-25T16:18:37.085865Z node 4 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:636: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 4 Deadline: 18446744073709.551615s } 2025-09-25T16:18:37.085901Z node 6 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:607: Handle NActors::TEvInterconnect::TEvListNodes 2025-09-25T16:18:37.085979Z node 7 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:607: Handle NActors::TEvInterconnect::TEvListNodes 2025-09-25T16:18:37.086032Z node 8 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:607: Handle NActors::TEvInterconnect::TEvListNodes 2025-09-25T16:18:37.086056Z node 1 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:636: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 1 Deadline: 18446744073709.551615s } 2025-09-25T16:18:37.090666Z node 2 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:607: Handle NActors::TEvInterconnect::TEvListNodes 2025-09-25T16:18:37.090797Z node 3 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:607: Handle NActors::TEvInterconnect::TEvListNodes 2025-09-25T16:18:37.090868Z node 4 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:607: Handle NActors::TEvInterconnect::TEvListNodes 2025-09-25T16:18:37.090923Z node 5 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:607: Handle NActors::TEvInterconnect::TEvListNodes 2025-09-25T16:18:37.090983Z node 4 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:607: Handle NActors::TEvInterconnect::TEvListNodes 2025-09-25T16:18:37.091019Z node 5 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:607: Handle NActors::TEvInterconnect::TEvListNodes 2025-09-25T16:18:37.091047Z node 6 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:607: Handle NActors::TEvInterconnect::TEvListNodes 2025-09-25T16:18:37.091078Z node 7 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:607: Handle NActors::TEvInterconnect::TEvListNodes 2025-09-25T16:18:37.091156Z node 1 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:607: Handle NActors::TEvInterconnect::TEvListNodes 2025-09-25T16:18:37.091234Z node 2 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:607: Handle NActors::TEvInterconnect::TEvListNodes 2025-09-25T16:18:37.091253Z node 3 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:607: Handle NActors::TEvInterconnect::TEvListNodes 2025-09-25T16:18:37.091323Z node 8 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:607: Handle NActors::TEvInterconnect::TEvListNodes 2025-09-25T16:18:37.091343Z node 1 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:607: Handle NActors::TEvInterconnect::TEvListNodes 2025-09-25T16:18:37.091678Z node 2 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:607: Handle NActors::TEvInterconnect::TEvListNodes 2025-09-25T16:18:37.091738Z node 3 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:607: Handle NActors::TEvInterconnect::TEvListNodes 2025-09-25T16:18:37.091778Z node 4 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:607: Handle NActors::TEvInterconnect::TEvListNodes 2025-09-25T16:18:37.091801Z node 5 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:607: Handle NActors::TEvInterconnect::TEvListNodes 2025-09-25T16:18:37.091825Z node 6 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:607: Handle NActors::TEvInterconnect::TEvListNodes 2025-09-25T16:18:37.091848Z node 7 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:607: Handle NActors::TEvInterconnect::TEvListNodes 2025-09-25T16:18:37.091894Z node 8 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:636: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 1 Deadline: 18446744073709.551615s } 2025-09-25T16:18:37.091906Z node 3 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:636: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 1 Deadline: 18446744073709.551615s } 2025-09-25T16:18:37.091935Z node 4 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:636: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 1 Deadline: 18446744073709.551615s } 2025-09-25T16:18:37.091955Z node 5 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:636: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 1 Deadline: 18446744073709.551615s } 2025-09-25T16:18:37.091987Z node 6 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:636: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 1 Deadline: 18446744073709.551615s } 2025-09-25T16:18:37.092006Z node 7 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:636: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 1 Deadline: 18446744073709.551615s } 2025-09-25T16:18:37.092094Z node 1 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:607: Handle NActors::TEvInterconnect::TEvListNodes 2025-09-25T16:18:37.092148Z node 2 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:636: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 1 Deadline: 18446744073709.551615s } 2025-09-25T16:18:37.092339Z node 1 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:636: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 4 Deadline: 18446744073709.551615s } 2025-09-25T16:18:37.092549Z node 8 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:636: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 2 Deadline: 18446744073709.551615s } 2025-09-25T16:18:37.093299Z node 3 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:636: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 2 Deadline: 18446744073709.551615s } 2025-09-25T16:18:37.093323Z node 4 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:636: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 3 Deadline: 18446744073709.551615s } 2025-09-25T16:18:37.093349Z node 6 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:636: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 2 Deadline: 18446744073709.551615s } 2025-09-25T16:18:37.093366Z node 7 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:636: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 4 Deadline: 18446744073709.551615s } 2025-09-25T16:18:37.098859Z node 2 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:636: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 8 Deadline: 18446744073709.551615s } 2025-09-25T16:18:37.098897Z node 3 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:636: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 4 Deadline: 18446744073709.551615s } 2025-09-25T16:18:37.098908Z node 4 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:636: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 7 Deadline: 18446744073709.551615s } 2025-09-25T16:18:37.099460Z node 2 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:636: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 3 Deadline: 18446744073709.551615s } 2025-09-25T16:18:37.099500Z node 1 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:636: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 8 Deadline: 18446744073709.551615s } 2025-09-25T16:18:37.100039Z node 2 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:636: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 6 Deadline: 18446744073709.551615s } 2025-09-25T16:18:37.100123Z node 1 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:636: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 3 Deadline: 18446744073709.551615s } 2025-09-25T16:18:37.100288Z node 1 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:636: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 5 Deadline: 18446744073709.551615s } 2025-09-25T16:18:37.100398Z node 1 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:636: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 2 Deadline: 18446744073709.551615s } 2025-09-25T16:18:37.100447Z node 1 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:636: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 6 Deadline: 18446744073709.551615s } 2025-09-25T16:18:37.100509Z node 1 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:636: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 7 Deadline: 18446744073709.551615s } 2025-09-25T16:18:37.101802Z node 4 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:636: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 2 Deadline: 18446744073709.551615s } 2025-09-25T16:18:37.101998Z node 2 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:636: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 4 Deadline: 18446744073709.551615s } 2025-09-25T16:18:37.124561Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7729: Cannot subscribe to console configs 2025-09-25T16:18:37.124587Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded ... waiting for nameservers are connected 2025-09-25T16:18:37.130171Z node 1 :NODE_BROKER DEBUG: node_broker_impl.h:239: StateInit event type: 10060000 event: NKikimr::TEvTablet::TEvBoot 2025-09-25T16:18:37.130653Z node 1 :NODE_BROKER DEBUG: node_broker_impl.h:239: StateInit event type: 10060001 event: NKikimr::TEvTablet::TEvRestored 2025-09-25T16:18:37.130731Z node 1 :NODE_BROKER DEBUG: node_broker__init_scheme.cpp:20: TTxInitScheme Execute 2025-09-25T16:18:37.130981Z node 1 :NODE_BROKER DEBUG: node_broker_impl.h:239: StateInit event type: 1006000c event: NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-09-25T16:18:37.131692Z node 1 :NODE_BROKER DEBUG: node_broker__init_scheme.cpp:29: TTxInitScheme Complete 2025-09-25T16:18:37.131922Z node 1 :NODE_BROKER DEBUG: node_broker__load_state.cpp:19: TTxLoadState Execute 2025-09-25T16:18:37.132020Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:968: [DB] Using default config. 2025-09-25T16:18:37.132040Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:1005: [DB] Starting the first epoch: #1.1 1970-01-01T00:00:00.026000Z - 1970-01-01T01:00:00.026000Z - 1970-01-01T02:00:00.026000Z 2025-09-25T16:18:37.132046Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:1031: [DB] Loaded the first approximate epoch start: #1.1 2025-09-25T16:18:37.132067Z node 1 :NODE_BROKER DEBUG: node_broker__load_state.cpp:27: TTxLoadState Complete 2025-09-25T16:18:37.132128Z node 1 :NODE_BROKER DEBUG: node_broker__migrate_state.cpp:84: TTxMigrateState Execute 2025-09-25T16:18:37.132135Z node 1 :NODE_BROKER DEBUG: node_broker__migrate_state.cpp:52: TTxMigrateState ProcessMigrationBatch UpdateNodes left 0, NewVersionUpdateNodes left 0 2025-09-25T16:18:37.132141Z node 1 :NODE_BROKER DEBUG: node_broker__migrate_state.cpp:21: TTxMigrateState FinalizeMigration 2025-09-25T16:18:37.132148Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:1318: [DB] Update epoch in database: #1.1 1970-01-01T00:00:00.026000Z - 1970-01-01T01:00:00.026000Z - 1970-01-01T02:00:00.026000Z 2025-09-25T16:18:37.132165Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:1337: [DB] Update approx epoch start in database: #1.1 2025-09-25T16:18:37.132171Z node 1 :NODE_BROKER NOTICE: node_broker.cpp:1350: [DB] Update main nodes table to: Nodes 2025-09-25T16:18:37.175941Z node 1 :NODE_BROKER DEBUG: node_broker__migrate_state.cpp:95: TTxMigrateState Complete 2025-09-25T16:18:37.175992Z node 1 :NODE_BROKER TRACE: node_broker.cpp:456: Scheduled epoch update at 1970-01-01T01:00:00.026000Z 2025-09-25T16:18:37.176006Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:562: Preparing nodes list cache for epoch #1.1 1970-01-01T00:00:00.026000Z - 1970-01-01T01:00:00.026000Z - 1970-01-01T02:00:00.026000Z, approximate epoch start #1.1 nodes=0 expired=0 2025-09-25T16:18:37.176019Z ... eceived event# 272039936, Sender [8:228:2072], Recipient [1:710:2259] 2025-09-25T16:18:38.024383Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:249: StateWork, processing event TEvNodeBroker::TEvListNodes 2025-09-25T16:18:38.024387Z node 1 :NODE_BROKER TRACE: node_broker.cpp:423: Send TEvNodesInfo for epoch #1.3 1970-01-01T00:00:00.026000Z - 1970-01-01T01:00:00.026000Z - 1970-01-01T02:00:00.026000Z 2025-09-25T16:18:38.024447Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:245: StateWork, received event# 272039936, Sender [4:112:2072], Recipient [1:713:2262] 2025-09-25T16:18:38.024452Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:249: StateWork, processing event TEvNodeBroker::TEvListNodes 2025-09-25T16:18:38.024457Z node 1 :NODE_BROKER TRACE: node_broker.cpp:423: Send TEvNodesInfo for epoch #1.3 1970-01-01T00:00:00.026000Z - 1970-01-01T01:00:00.026000Z - 1970-01-01T02:00:00.026000Z 2025-09-25T16:18:38.024475Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:245: StateWork, received event# 272039936, Sender [5:141:2072], Recipient [1:714:2263] 2025-09-25T16:18:38.024477Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:249: StateWork, processing event TEvNodeBroker::TEvListNodes 2025-09-25T16:18:38.024481Z node 1 :NODE_BROKER TRACE: node_broker.cpp:423: Send TEvNodesInfo for epoch #1.3 1970-01-01T00:00:00.026000Z - 1970-01-01T01:00:00.026000Z - 1970-01-01T02:00:00.026000Z 2025-09-25T16:18:38.024550Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:245: StateWork, received event# 272039936, Sender [7:199:2072], Recipient [1:716:2265] 2025-09-25T16:18:38.024553Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:249: StateWork, processing event TEvNodeBroker::TEvListNodes 2025-09-25T16:18:38.024556Z node 1 :NODE_BROKER TRACE: node_broker.cpp:423: Send TEvNodesInfo for epoch #1.3 1970-01-01T00:00:00.026000Z - 1970-01-01T01:00:00.026000Z - 1970-01-01T02:00:00.026000Z 2025-09-25T16:18:38.024576Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:245: StateWork, received event# 272039936, Sender [2:54:2072], Recipient [1:711:2260] 2025-09-25T16:18:38.024580Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:249: StateWork, processing event TEvNodeBroker::TEvListNodes 2025-09-25T16:18:38.024584Z node 1 :NODE_BROKER TRACE: node_broker.cpp:423: Send TEvNodesInfo for epoch #1.3 1970-01-01T00:00:00.026000Z - 1970-01-01T01:00:00.026000Z - 1970-01-01T02:00:00.026000Z 2025-09-25T16:18:38.024589Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:245: StateWork, received event# 272039936, Sender [3:83:2072], Recipient [1:712:2261] 2025-09-25T16:18:38.024591Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:249: StateWork, processing event TEvNodeBroker::TEvListNodes 2025-09-25T16:18:38.024594Z node 1 :NODE_BROKER TRACE: node_broker.cpp:423: Send TEvNodesInfo for epoch #1.3 1970-01-01T00:00:00.026000Z - 1970-01-01T01:00:00.026000Z - 1970-01-01T02:00:00.026000Z 2025-09-25T16:18:38.228889Z node 1 :NODE_BROKER DEBUG: node_broker__update_epoch.cpp:31: TTxUpdateEpoch Complete 2025-09-25T16:18:38.228920Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:542: [Committed] Move to new epoch #2.4 1970-01-01T01:00:00.026000Z - 1970-01-01T02:00:00.026000Z - 1970-01-01T03:00:00.026000Z, approximate epoch start #2.4 2025-09-25T16:18:38.228938Z node 1 :NODE_BROKER TRACE: node_broker.cpp:456: Scheduled epoch update at 1970-01-01T02:00:00.026000Z 2025-09-25T16:18:38.228948Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:562: Preparing nodes list cache for epoch #2.4 1970-01-01T01:00:00.026000Z - 1970-01-01T02:00:00.026000Z - 1970-01-01T03:00:00.026000Z, approximate epoch start #2.4 nodes=2 expired=0 2025-09-25T16:18:38.228983Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:603: Preparing update nodes log for epoch ##2.4 1970-01-01T01:00:00.026000Z - 1970-01-01T02:00:00.026000Z - 1970-01-01T03:00:00.026000Z nodes=2 expired=0 removed=0 2025-09-25T16:18:38.228990Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:659: Add node #1024.v2 to update nodes log 2025-09-25T16:18:38.228999Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:659: Add node #1026.v3 to update nodes log 2025-09-25T16:18:38.229012Z node 1 :NODE_BROKER TRACE: node_broker.cpp:423: Send TEvNodesInfo for epoch #2.4 1970-01-01T01:00:00.026000Z - 1970-01-01T02:00:00.026000Z - 1970-01-01T03:00:00.026000Z 2025-09-25T16:18:38.229023Z node 1 :NODE_BROKER TRACE: node_broker.cpp:423: Send TEvNodesInfo for epoch #2.4 1970-01-01T01:00:00.026000Z - 1970-01-01T02:00:00.026000Z - 1970-01-01T03:00:00.026000Z 2025-09-25T16:18:38.229030Z node 1 :NODE_BROKER TRACE: node_broker.cpp:423: Send TEvNodesInfo for epoch #2.4 1970-01-01T01:00:00.026000Z - 1970-01-01T02:00:00.026000Z - 1970-01-01T03:00:00.026000Z 2025-09-25T16:18:38.229037Z node 1 :NODE_BROKER TRACE: node_broker.cpp:423: Send TEvNodesInfo for epoch #2.4 1970-01-01T01:00:00.026000Z - 1970-01-01T02:00:00.026000Z - 1970-01-01T03:00:00.026000Z 2025-09-25T16:18:38.229044Z node 1 :NODE_BROKER TRACE: node_broker.cpp:423: Send TEvNodesInfo for epoch #2.4 1970-01-01T01:00:00.026000Z - 1970-01-01T02:00:00.026000Z - 1970-01-01T03:00:00.026000Z 2025-09-25T16:18:38.229051Z node 1 :NODE_BROKER TRACE: node_broker.cpp:423: Send TEvNodesInfo for epoch #2.4 1970-01-01T01:00:00.026000Z - 1970-01-01T02:00:00.026000Z - 1970-01-01T03:00:00.026000Z 2025-09-25T16:18:38.229059Z node 1 :NODE_BROKER TRACE: node_broker.cpp:423: Send TEvNodesInfo for epoch #2.4 1970-01-01T01:00:00.026000Z - 1970-01-01T02:00:00.026000Z - 1970-01-01T03:00:00.026000Z 2025-09-25T16:18:38.229067Z node 1 :NODE_BROKER TRACE: node_broker.cpp:423: Send TEvNodesInfo for epoch #2.4 1970-01-01T01:00:00.026000Z - 1970-01-01T02:00:00.026000Z - 1970-01-01T03:00:00.026000Z 2025-09-25T16:18:38.250159Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:245: StateWork, received event# 269877761, Sender [1:740:2279], Recipient [1:665:2229]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-09-25T16:18:38.250223Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:245: StateWork, received event# 272039936, Sender [1:627:2218], Recipient [1:665:2229]: NKikimr::NNodeBroker::TEvNodeBroker::TEvListNodes { } 2025-09-25T16:18:38.250231Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:249: StateWork, processing event TEvNodeBroker::TEvListNodes 2025-09-25T16:18:38.250245Z node 1 :NODE_BROKER TRACE: node_broker.cpp:423: Send TEvNodesInfo for epoch #2.4 1970-01-01T01:00:00.026000Z - 1970-01-01T02:00:00.026000Z - 1970-01-01T03:00:00.026000Z 2025-09-25T16:18:38.250321Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:245: StateWork, received event# 269877761, Sender [1:742:2281], Recipient [1:665:2229]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-09-25T16:18:38.250338Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:245: StateWork, received event# 272039936, Sender [1:627:2218], Recipient [1:665:2229]: NKikimr::NNodeBroker::TEvNodeBroker::TEvListNodes { } 2025-09-25T16:18:38.250343Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:249: StateWork, processing event TEvNodeBroker::TEvListNodes 2025-09-25T16:18:38.250349Z node 1 :NODE_BROKER TRACE: node_broker.cpp:423: Send TEvNodesInfo for epoch #2.4 1970-01-01T01:00:00.026000Z - 1970-01-01T02:00:00.026000Z - 1970-01-01T03:00:00.026000Z 2025-09-25T16:18:38.250408Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:245: StateWork, received event# 269877761, Sender [1:744:2283], Recipient [1:665:2229]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-09-25T16:18:38.250437Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:245: StateWork, received event# 272039939, Sender [1:627:2218], Recipient [1:665:2229]: NKikimr::NNodeBroker::TEvNodeBroker::TEvExtendLeaseRequest { NodeId: 1024 } 2025-09-25T16:18:38.250443Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:253: StateWork, processing event TEvNodeBroker::TEvExtendLeaseRequest 2025-09-25T16:18:38.250464Z node 1 :NODE_BROKER DEBUG: node_broker__extend_lease.cpp:44: TTxExtendLease Execute node #1024 2025-09-25T16:18:38.250482Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:314: [Dirty] Extended lease of #1024.v5 host1:1001 up to Thu, 01 Jan 1970 03:00:00 UTC (lease 2) 2025-09-25T16:18:38.250501Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:863: [DB] Adding node #1024.v5 host1:1001 to database state=Active resolvehost=host1.yandex.net address=1.2.3.4 dc=1 location=DC=1/M=2/R=3/U=4/ lease=2 expire=Thu, 01 Jan 1970 03:00:00 UTC servicedsubdomain=72057594046678944:1 slotindex=0 authorizedbycertificate=false 2025-09-25T16:18:38.250567Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:552: [Dirty] Update current epoch version from 4 to 5 2025-09-25T16:18:38.250573Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:1363: [DB] Update epoch version in database version=5 2025-09-25T16:18:38.261589Z node 1 :NODE_BROKER DEBUG: node_broker__extend_lease.cpp:78: TTxExtendLease Complete 2025-09-25T16:18:38.261658Z node 1 :NODE_BROKER TRACE: node_broker__extend_lease.cpp:82: TTxExtendLease reply with: NKikimr::NNodeBroker::TEvNodeBroker::TEvExtendLeaseResponse { Status { Code: OK } NodeId: 1024 Expire: 10800026000 Epoch { Id: 2 Version: 5 Start: 3600026000 End: 7200026000 NextEnd: 10800026000 } } 2025-09-25T16:18:38.261683Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:314: [Committed] Extended lease of #1024.v5 host1:1001 up to Thu, 01 Jan 1970 03:00:00 UTC (lease 2) 2025-09-25T16:18:38.261689Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:552: [Committed] Update current epoch version from 4 to 5 2025-09-25T16:18:38.261695Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:630: Add node #1024.v5 host1:1001 to epoch cache 2025-09-25T16:18:38.261719Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:659: Add node #1024.v5 to update nodes log 2025-09-25T16:18:38.261854Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:245: StateWork, received event# 269877761, Sender [1:748:2287], Recipient [1:665:2229]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-09-25T16:18:38.261886Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:245: StateWork, received event# 272039939, Sender [1:627:2218], Recipient [1:665:2229]: NKikimr::NNodeBroker::TEvNodeBroker::TEvExtendLeaseRequest { NodeId: 1026 } 2025-09-25T16:18:38.261892Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:253: StateWork, processing event TEvNodeBroker::TEvExtendLeaseRequest 2025-09-25T16:18:38.261906Z node 1 :NODE_BROKER DEBUG: node_broker__extend_lease.cpp:44: TTxExtendLease Execute node #1026 2025-09-25T16:18:38.261914Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:314: [Dirty] Extended lease of #1026.v6 host2:1001 up to Thu, 01 Jan 1970 03:00:00 UTC (lease 2) 2025-09-25T16:18:38.261930Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:863: [DB] Adding node #1026.v6 host2:1001 to database state=Active resolvehost=host2.yandex.net address=1.2.3.5 dc=1 location=DC=1/M=2/R=3/U=5/ lease=2 expire=Thu, 01 Jan 1970 03:00:00 UTC servicedsubdomain=72057594046678944:1 slotindex=1 authorizedbycertificate=false 2025-09-25T16:18:38.261979Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:552: [Dirty] Update current epoch version from 5 to 6 2025-09-25T16:18:38.261984Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:1363: [DB] Update epoch version in database version=6 2025-09-25T16:18:38.272911Z node 1 :NODE_BROKER DEBUG: node_broker__extend_lease.cpp:78: TTxExtendLease Complete 2025-09-25T16:18:38.272975Z node 1 :NODE_BROKER TRACE: node_broker__extend_lease.cpp:82: TTxExtendLease reply with: NKikimr::NNodeBroker::TEvNodeBroker::TEvExtendLeaseResponse { Status { Code: OK } NodeId: 1026 Expire: 10800026000 Epoch { Id: 2 Version: 6 Start: 3600026000 End: 7200026000 NextEnd: 10800026000 } } 2025-09-25T16:18:38.272996Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:314: [Committed] Extended lease of #1026.v6 host2:1001 up to Thu, 01 Jan 1970 03:00:00 UTC (lease 2) 2025-09-25T16:18:38.273002Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:552: [Committed] Update current epoch version from 5 to 6 2025-09-25T16:18:38.273008Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:630: Add node #1026.v6 host2:1001 to epoch cache 2025-09-25T16:18:38.273029Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:659: Add node #1026.v6 to update nodes log >> AutoConfig::GetASPoolsWith3CPUs [GOOD] >> TTxAllocatorClientTest::InitiatingRequest [GOOD] >> TTabletLabeledCountersAggregator::Version3Aggregation >> TNodeBrokerTest::SubscribeToNodes [GOOD] >> THiveTest::TestFollowerCompatability2 [GOOD] >> THiveTest::TestFollowerCompatability3 >> AutoConfig::GetServicePoolsWith2CPUs [GOOD] >> AutoConfig::GetASPoolsith1CPU [GOOD] >> TResourceBroker::TestResubmitTask [GOOD] >> TResourceBroker::TestUpdateCookie >> TabletState::ExplicitUnsubscribe >> AutoConfig::GetServicePoolsWith1CPU [GOOD] >> AutoConfig::GetASPoolsWith4AndMoreCPUs [GOOD] >> THiveTest::TestCreateTabletWithWrongSPoolsAndReassignGroupsFailButDeletionIsOk [GOOD] >> THiveTest::TestCreateTabletAndReassignGroupsWithReboots >> TFlatMetrics::TimeSeriesKV2 [GOOD] >> TPipeCacheTest::TestAutoConnect >> TTabletCountersAggregator::IntegralPercentileAggregationHistNamedSingleBucket >> TTabletCountersPercentile::SingleBucket [GOOD] >> TTabletCountersPercentile::StartFromZero [GOOD] >> TTabletLabeledCountersAggregator::Version3Aggregation [GOOD] >> TTabletPipeTest::TestClientDisconnectAfterPipeOpen >> TFlatMetrics::MaximumValue3 [GOOD] >> TFlatMetrics::MaximumValue4 [GOOD] |81.5%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/core/driver_lib/run/ut/unittest |81.5%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/core/driver_lib/run/ut/unittest >> AutoConfig::GetServicePoolsWith4AndMoreCPUs [GOOD] |81.5%| [TA] $(B)/ydb/core/tx/scheme_board/ut_cache/test-results/unittest/{meta.json ... results_accumulator.log} >> AutoConfig::GetASPoolsWith2CPUs [GOOD] >> TTabletCountersAggregator::IntegralPercentileAggregationHistNamedSingleBucket [GOOD] >> TTabletCountersAggregator::IntegralPercentileAggregationRegular >> TResourceBroker::TestUpdateCookie [GOOD] >> BootstrapperTest::RestartUnavailableTablet [GOOD] >> BootstrapperTest::UnavailableStateStorage |81.5%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/core/driver_lib/run/ut/unittest >> AutoConfig::GetASPoolsWith3CPUs [GOOD] |81.5%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/core/driver_lib/run/ut/unittest ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/tx_allocator_client/ut/unittest >> TTxAllocatorClientTest::InitiatingRequest [GOOD] Test command err: 2025-09-25T16:18:40.280238Z node 1 :TABLET_MAIN DEBUG: tablet_sys.cpp:2146: Tablet: 72057594046447617 LockedInitializationPath Marker# TSYS32 2025-09-25T16:18:40.280308Z node 1 :TABLET_MAIN DEBUG: tablet_sys.cpp:979: Tablet: 72057594046447617 HandleFindLatestLogEntry, NODATA Promote Marker# TSYS19 2025-09-25T16:18:40.280399Z node 1 :TABLET_MAIN DEBUG: tablet_sys.cpp:233: Tablet: 72057594046447617 TTablet::WriteZeroEntry. logid# [72057594046447617:2:0:0:0:0:0] Marker# TSYS01 2025-09-25T16:18:40.280676Z node 1 :TABLET_MAIN DEBUG: tablet_req_writelog.cpp:54: Put Result: TEvPutResult {Id# [72057594046447617:2:0:0:0:20:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-09-25T16:18:40.280749Z node 1 :TX_ALLOCATOR DEBUG: txallocator_impl.cpp:17: tablet# 72057594046447617 OnActivateExecutor 2025-09-25T16:18:40.282431Z node 1 :TABLET_MAIN DEBUG: tablet_req_writelog.cpp:54: Put Result: TEvPutResult {Id# [72057594046447617:2:1:1:28672:35:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-09-25T16:18:40.282454Z node 1 :TABLET_MAIN DEBUG: tablet_req_writelog.cpp:54: Put Result: TEvPutResult {Id# [72057594046447617:2:1:0:0:42:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-09-25T16:18:40.282468Z node 1 :TABLET_MAIN DEBUG: tablet_sys.cpp:1466: Tablet: 72057594046447617 GcCollect 0 channel, tablet:gen:step => 2:0 Marker# TSYS28 2025-09-25T16:18:40.282487Z node 1 :TABLET_MAIN DEBUG: tablet_req_writelog.cpp:54: Put Result: TEvPutResult {Id# [72057594046447617:2:2:1:8192:71:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-09-25T16:18:40.282495Z node 1 :TABLET_MAIN DEBUG: tablet_req_writelog.cpp:54: Put Result: TEvPutResult {Id# [72057594046447617:2:2:0:0:71:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-09-25T16:18:40.282511Z node 1 :TX_ALLOCATOR DEBUG: txallocator__scheme.cpp:22: tablet# 72057594046447617 TTxSchema Complete 2025-09-25T16:18:40.282535Z node 1 :TABLET_MAIN INFO: tablet_sys.cpp:1077: Tablet: 72057594046447617 Active! Generation: 2, Type: TxAllocator started in 0msec Marker# TSYS24 2025-09-25T16:18:40.282674Z node 1 :TX_ALLOCATOR DEBUG: txallocator_impl.cpp:60: tablet# 72057594046447617 HANDLE TEvAllocate Sender# [1:73:2108] requested range size#5000 2025-09-25T16:18:40.282752Z node 1 :TABLET_MAIN DEBUG: tablet_req_writelog.cpp:54: Put Result: TEvPutResult {Id# [72057594046447617:2:3:1:24576:70:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-09-25T16:18:40.282758Z node 1 :TABLET_MAIN DEBUG: tablet_req_writelog.cpp:54: Put Result: TEvPutResult {Id# [72057594046447617:2:3:0:0:69:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-09-25T16:18:40.282767Z node 1 :TX_ALLOCATOR DEBUG: txallocator__reserve.cpp:56: tablet# 72057594046447617 TTxReserve Complete Successed# 1 Reserved from# 0 Reserved to# 5000 2025-09-25T16:18:40.282771Z node 1 :TX_ALLOCATOR DEBUG: txallocator_impl.cpp:70: tablet# 72057594046447617 Send to Sender# [1:73:2108] TEvAllocateResult from# 0 to# 5000 |81.5%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/core/driver_lib/run/ut/unittest >> AutoConfig::GetServicePoolsWith3CPUs [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/mind/ut/unittest >> TNodeBrokerTest::TestListNodesEpochDeltas [GOOD] Test command err: 2025-09-25T16:18:36.925408Z node 8 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:636: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 8 Deadline: 18446744073709.551615s } 2025-09-25T16:18:36.928515Z node 5 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:636: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 5 Deadline: 18446744073709.551615s } 2025-09-25T16:18:36.928602Z node 6 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:636: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 6 Deadline: 18446744073709.551615s } 2025-09-25T16:18:36.928639Z node 7 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:636: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 7 Deadline: 18446744073709.551615s } 2025-09-25T16:18:36.928688Z node 8 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:607: Handle NActors::TEvInterconnect::TEvListNodes 2025-09-25T16:18:36.933130Z node 2 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:636: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 2 Deadline: 18446744073709.551615s } 2025-09-25T16:18:36.933195Z node 3 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:636: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 3 Deadline: 18446744073709.551615s } 2025-09-25T16:18:36.933260Z node 4 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:636: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 4 Deadline: 18446744073709.551615s } 2025-09-25T16:18:36.933315Z node 6 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:607: Handle NActors::TEvInterconnect::TEvListNodes 2025-09-25T16:18:36.933437Z node 7 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:607: Handle NActors::TEvInterconnect::TEvListNodes 2025-09-25T16:18:36.933517Z node 8 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:607: Handle NActors::TEvInterconnect::TEvListNodes 2025-09-25T16:18:36.933567Z node 1 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:636: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 1 Deadline: 18446744073709.551615s } 2025-09-25T16:18:36.938188Z node 2 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:607: Handle NActors::TEvInterconnect::TEvListNodes 2025-09-25T16:18:36.938338Z node 3 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:607: Handle NActors::TEvInterconnect::TEvListNodes 2025-09-25T16:18:36.938469Z node 4 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:607: Handle NActors::TEvInterconnect::TEvListNodes 2025-09-25T16:18:36.938551Z node 5 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:607: Handle NActors::TEvInterconnect::TEvListNodes 2025-09-25T16:18:36.938636Z node 4 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:607: Handle NActors::TEvInterconnect::TEvListNodes 2025-09-25T16:18:36.938685Z node 5 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:607: Handle NActors::TEvInterconnect::TEvListNodes 2025-09-25T16:18:36.938709Z node 6 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:607: Handle NActors::TEvInterconnect::TEvListNodes 2025-09-25T16:18:36.938755Z node 7 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:607: Handle NActors::TEvInterconnect::TEvListNodes 2025-09-25T16:18:36.938880Z node 1 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:607: Handle NActors::TEvInterconnect::TEvListNodes 2025-09-25T16:18:36.939001Z node 2 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:607: Handle NActors::TEvInterconnect::TEvListNodes 2025-09-25T16:18:36.939038Z node 3 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:607: Handle NActors::TEvInterconnect::TEvListNodes 2025-09-25T16:18:36.939152Z node 8 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:607: Handle NActors::TEvInterconnect::TEvListNodes 2025-09-25T16:18:36.939178Z node 1 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:607: Handle NActors::TEvInterconnect::TEvListNodes 2025-09-25T16:18:36.939482Z node 2 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:607: Handle NActors::TEvInterconnect::TEvListNodes 2025-09-25T16:18:36.939509Z node 3 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:607: Handle NActors::TEvInterconnect::TEvListNodes 2025-09-25T16:18:36.939543Z node 4 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:607: Handle NActors::TEvInterconnect::TEvListNodes 2025-09-25T16:18:36.939569Z node 5 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:607: Handle NActors::TEvInterconnect::TEvListNodes 2025-09-25T16:18:36.939598Z node 6 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:607: Handle NActors::TEvInterconnect::TEvListNodes 2025-09-25T16:18:36.939628Z node 7 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:607: Handle NActors::TEvInterconnect::TEvListNodes 2025-09-25T16:18:36.939674Z node 8 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:636: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 1 Deadline: 18446744073709.551615s } 2025-09-25T16:18:36.939689Z node 3 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:636: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 1 Deadline: 18446744073709.551615s } 2025-09-25T16:18:36.939729Z node 4 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:636: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 1 Deadline: 18446744073709.551615s } 2025-09-25T16:18:36.939758Z node 5 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:636: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 1 Deadline: 18446744073709.551615s } 2025-09-25T16:18:36.939797Z node 6 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:636: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 1 Deadline: 18446744073709.551615s } 2025-09-25T16:18:36.939818Z node 7 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:636: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 1 Deadline: 18446744073709.551615s } 2025-09-25T16:18:36.939888Z node 1 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:607: Handle NActors::TEvInterconnect::TEvListNodes 2025-09-25T16:18:36.939940Z node 2 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:636: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 1 Deadline: 18446744073709.551615s } 2025-09-25T16:18:36.940160Z node 1 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:636: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 6 Deadline: 18446744073709.551615s } 2025-09-25T16:18:36.940445Z node 8 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:636: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 2 Deadline: 18446744073709.551615s } 2025-09-25T16:18:36.941388Z node 3 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:636: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 2 Deadline: 18446744073709.551615s } 2025-09-25T16:18:36.941422Z node 4 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:636: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 2 Deadline: 18446744073709.551615s } 2025-09-25T16:18:36.941466Z node 6 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:636: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 4 Deadline: 18446744073709.551615s } 2025-09-25T16:18:36.941491Z node 7 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:636: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 5 Deadline: 18446744073709.551615s } 2025-09-25T16:18:36.949509Z node 2 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:636: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 8 Deadline: 18446744073709.551615s } 2025-09-25T16:18:36.949548Z node 4 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:636: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 6 Deadline: 18446744073709.551615s } 2025-09-25T16:18:36.949567Z node 5 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:636: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 7 Deadline: 18446744073709.551615s } 2025-09-25T16:18:36.950663Z node 1 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:636: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 8 Deadline: 18446744073709.551615s } 2025-09-25T16:18:36.950704Z node 2 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:636: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 3 Deadline: 18446744073709.551615s } 2025-09-25T16:18:36.951375Z node 2 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:636: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 4 Deadline: 18446744073709.551615s } 2025-09-25T16:18:36.951643Z node 1 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:636: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 5 Deadline: 18446744073709.551615s } 2025-09-25T16:18:36.951673Z node 1 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:636: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 7 Deadline: 18446744073709.551615s } 2025-09-25T16:18:36.951719Z node 1 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:636: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 3 Deadline: 18446744073709.551615s } 2025-09-25T16:18:36.951820Z node 1 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:636: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 2 Deadline: 18446744073709.551615s } 2025-09-25T16:18:36.951863Z node 1 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:636: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 4 Deadline: 18446744073709.551615s } 2025-09-25T16:18:36.952739Z node 4 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:636: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 3 Deadline: 18446744073709.551615s } 2025-09-25T16:18:36.953163Z node 3 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:636: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 4 Deadline: 18446744073709.551615s } 2025-09-25T16:18:36.953208Z node 6 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:636: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 5 Deadline: 18446744073709.551615s } 2025-09-25T16:18:36.953604Z node 5 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:636: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 6 Deadline: 18446744073709.551615s } 2025-09-25T16:18:36.953935Z node 2 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:636: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 6 Deadline: 18446744073709.551615s } 2025-09-25T16:18:36.954339Z node 6 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:636: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 2 Deadline: 18446744073709.551615s } 2025-09-25T16:18:36.975219Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7729: Cannot subscribe to console configs 2025-09-25T16:18:36.975247Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded ... waiting for nameservers are connected 2025-09-25T16:18:36.981501Z node 1 :NODE_BROKER DEBUG: node_broker_impl.h:239: StateInit event type: 10060000 event: NKikimr::TEvTablet::TEvBoot 2025-09-25T16:18:36.981948Z node 1 :NODE_BROKER DEBUG: node_broker_impl.h:239: StateInit event type: 10060001 event: NKikimr::TEvTablet::TEvRestored 2025-09-25T16:18:36.982016Z node 1 :NODE_BROKER DEBUG: node_broker__init_scheme.cpp:20: TTxInitScheme Execute 2025-09-25T16:18:36.982204Z node 1 :NODE_BROKER DEBUG: node_broker_impl.h:239: StateInit event type: 1006000c event: NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-09-25T16:18:36.983152Z node 1 :NODE_BROKER DEBUG: node_broker__init_scheme.cpp:29: TTxInitScheme Complete 2025-09-25T16:18:36.983198Z node 1 :NODE_BROKER DEBUG: node_broker__load_state.cpp:19: TTxLoadState Execute 2025-09-25T16:18:36.983289Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:968: [DB] Using default config. 2025-09-25T16:18:36.983308Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:1005: [DB] Starting the first epoch: #1.1 1970-01-01T00:00:00.023000Z - 1970-01-01T01:00:00.023000Z - 1970-01-01T02:00:00.023000Z 2025-09-25T16:18:36.983315Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:1031: [DB] Loaded the first approximate epoch start: #1.1 2025-09-25T16:18:36.983338Z node 1 :NODE_BROKER DEBUG: node_broker__load_state.cpp:27: TTxLoadState Complete 2025-09-25T16:18:36.983364Z node 1 :NODE_BROKER DEBUG: node_broker__migrate_state.cpp:84: TTxMigrateState Execute 2025-09-25T16:18:36.983371Z node 1 :NODE_BROKER DEBUG: node_broker__migrate_state.cpp:52: TTxMigrateState ProcessMigrationBatch UpdateNodes left 0, NewVersionUpdateNodes left 0 2025-09-25T16:18:36.983377Z node 1 :NODE_BROKER DEBUG: node_broker__migrate_state.cpp:21: TTxMigrateState FinalizeMigration 2025-09-25T16:18:36.983383Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:1318: [DB] Update epoch in database: #1.1 1970-01-01T00:00:00.023000Z - 1970-01-01T01:00:00.023000Z - 1970-01-01T02:00:00.023000Z 2025-09-25T16:18:36.983403Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:133 ... 607: Handle NActors::TEvInterconnect::TEvListNodes 2025-09-25T16:18:38.647083Z node 5 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:607: Handle NActors::TEvInterconnect::TEvListNodes 2025-09-25T16:18:38.647102Z node 6 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:607: Handle NActors::TEvInterconnect::TEvListNodes 2025-09-25T16:18:38.647118Z node 7 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:607: Handle NActors::TEvInterconnect::TEvListNodes 2025-09-25T16:18:38.647288Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:245: StateWork, received event# 272039936, Sender [1:25:2072], Recipient [1:746:2283]: NKikimr::NNodeBroker::TEvNodeBroker::TEvListNodes { CachedVersion: 3 } 2025-09-25T16:18:38.647300Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:249: StateWork, processing event TEvNodeBroker::TEvListNodes 2025-09-25T16:18:38.647316Z node 1 :NODE_BROKER TRACE: node_broker.cpp:423: Send TEvNodesInfo for epoch #3.7 1970-01-01T02:00:00.023000Z - 1970-01-01T03:00:00.023000Z - 1970-01-01T04:00:00.023000Z 2025-09-25T16:18:38.647487Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:245: StateWork, received event# 272039936, Sender [2:54:2072], Recipient [1:788:2311] 2025-09-25T16:18:38.647494Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:249: StateWork, processing event TEvNodeBroker::TEvListNodes 2025-09-25T16:18:38.647502Z node 1 :NODE_BROKER TRACE: node_broker.cpp:423: Send TEvNodesInfo for epoch #3.7 1970-01-01T02:00:00.023000Z - 1970-01-01T03:00:00.023000Z - 1970-01-01T04:00:00.023000Z 2025-09-25T16:18:38.647514Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:245: StateWork, received event# 272039936, Sender [7:199:2072], Recipient [1:787:2310] 2025-09-25T16:18:38.647519Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:249: StateWork, processing event TEvNodeBroker::TEvListNodes 2025-09-25T16:18:38.647525Z node 1 :NODE_BROKER TRACE: node_broker.cpp:423: Send TEvNodesInfo for epoch #3.7 1970-01-01T02:00:00.023000Z - 1970-01-01T03:00:00.023000Z - 1970-01-01T04:00:00.023000Z 2025-09-25T16:18:38.647542Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:245: StateWork, received event# 272039936, Sender [4:112:2072], Recipient [1:790:2313] 2025-09-25T16:18:38.647547Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:249: StateWork, processing event TEvNodeBroker::TEvListNodes 2025-09-25T16:18:38.647553Z node 1 :NODE_BROKER TRACE: node_broker.cpp:423: Send TEvNodesInfo for epoch #3.7 1970-01-01T02:00:00.023000Z - 1970-01-01T03:00:00.023000Z - 1970-01-01T04:00:00.023000Z 2025-09-25T16:18:38.647646Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:245: StateWork, received event# 272039936, Sender [5:141:2072], Recipient [1:791:2314] 2025-09-25T16:18:38.647650Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:249: StateWork, processing event TEvNodeBroker::TEvListNodes 2025-09-25T16:18:38.647654Z node 1 :NODE_BROKER TRACE: node_broker.cpp:423: Send TEvNodesInfo for epoch #3.7 1970-01-01T02:00:00.023000Z - 1970-01-01T03:00:00.023000Z - 1970-01-01T04:00:00.023000Z 2025-09-25T16:18:38.647694Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:245: StateWork, received event# 272039936, Sender [6:170:2072], Recipient [1:792:2315] 2025-09-25T16:18:38.647697Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:249: StateWork, processing event TEvNodeBroker::TEvListNodes 2025-09-25T16:18:38.647701Z node 1 :NODE_BROKER TRACE: node_broker.cpp:423: Send TEvNodesInfo for epoch #3.7 1970-01-01T02:00:00.023000Z - 1970-01-01T03:00:00.023000Z - 1970-01-01T04:00:00.023000Z 2025-09-25T16:18:38.647764Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:245: StateWork, received event# 272039936, Sender [8:228:2072], Recipient [1:793:2316] 2025-09-25T16:18:38.647771Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:249: StateWork, processing event TEvNodeBroker::TEvListNodes 2025-09-25T16:18:38.647778Z node 1 :NODE_BROKER TRACE: node_broker.cpp:423: Send TEvNodesInfo for epoch #3.7 1970-01-01T02:00:00.023000Z - 1970-01-01T03:00:00.023000Z - 1970-01-01T04:00:00.023000Z 2025-09-25T16:18:38.647820Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:245: StateWork, received event# 272039936, Sender [3:83:2072], Recipient [1:789:2312] 2025-09-25T16:18:38.647823Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:249: StateWork, processing event TEvNodeBroker::TEvListNodes 2025-09-25T16:18:38.647827Z node 1 :NODE_BROKER TRACE: node_broker.cpp:423: Send TEvNodesInfo for epoch #3.7 1970-01-01T02:00:00.023000Z - 1970-01-01T03:00:00.023000Z - 1970-01-01T04:00:00.023000Z 2025-09-25T16:18:38.750759Z node 1 :NODE_BROKER DEBUG: node_broker__update_epoch.cpp:31: TTxUpdateEpoch Complete 2025-09-25T16:18:38.750794Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:542: [Committed] Move to new epoch #4.8 1970-01-01T03:00:00.023000Z - 1970-01-01T04:00:00.023000Z - 1970-01-01T05:00:00.023000Z, approximate epoch start #4.8 2025-09-25T16:18:38.750824Z node 1 :NODE_BROKER TRACE: node_broker.cpp:456: Scheduled epoch update at 1970-01-01T04:00:00.023000Z 2025-09-25T16:18:38.750834Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:562: Preparing nodes list cache for epoch #4.8 1970-01-01T03:00:00.023000Z - 1970-01-01T04:00:00.023000Z - 1970-01-01T05:00:00.023000Z, approximate epoch start #4.8 nodes=4 expired=0 2025-09-25T16:18:38.750891Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:603: Preparing update nodes log for epoch ##4.8 1970-01-01T03:00:00.023000Z - 1970-01-01T04:00:00.023000Z - 1970-01-01T05:00:00.023000Z nodes=4 expired=0 removed=0 2025-09-25T16:18:38.750900Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:659: Add node #1024.v4 to update nodes log 2025-09-25T16:18:38.750910Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:659: Add node #1025.v5 to update nodes log 2025-09-25T16:18:38.750915Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:659: Add node #1026.v6 to update nodes log 2025-09-25T16:18:38.750922Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:659: Add node #1027.v7 to update nodes log 2025-09-25T16:18:38.750936Z node 1 :NODE_BROKER TRACE: node_broker.cpp:423: Send TEvNodesInfo for epoch #4.8 1970-01-01T03:00:00.023000Z - 1970-01-01T04:00:00.023000Z - 1970-01-01T05:00:00.023000Z 2025-09-25T16:18:38.750947Z node 1 :NODE_BROKER TRACE: node_broker.cpp:423: Send TEvNodesInfo for epoch #4.8 1970-01-01T03:00:00.023000Z - 1970-01-01T04:00:00.023000Z - 1970-01-01T05:00:00.023000Z 2025-09-25T16:18:38.750956Z node 1 :NODE_BROKER TRACE: node_broker.cpp:423: Send TEvNodesInfo for epoch #4.8 1970-01-01T03:00:00.023000Z - 1970-01-01T04:00:00.023000Z - 1970-01-01T05:00:00.023000Z 2025-09-25T16:18:38.750963Z node 1 :NODE_BROKER TRACE: node_broker.cpp:423: Send TEvNodesInfo for epoch #4.8 1970-01-01T03:00:00.023000Z - 1970-01-01T04:00:00.023000Z - 1970-01-01T05:00:00.023000Z 2025-09-25T16:18:38.750971Z node 1 :NODE_BROKER TRACE: node_broker.cpp:423: Send TEvNodesInfo for epoch #4.8 1970-01-01T03:00:00.023000Z - 1970-01-01T04:00:00.023000Z - 1970-01-01T05:00:00.023000Z 2025-09-25T16:18:38.750979Z node 1 :NODE_BROKER TRACE: node_broker.cpp:423: Send TEvNodesInfo for epoch #4.8 1970-01-01T03:00:00.023000Z - 1970-01-01T04:00:00.023000Z - 1970-01-01T05:00:00.023000Z 2025-09-25T16:18:38.750986Z node 1 :NODE_BROKER TRACE: node_broker.cpp:423: Send TEvNodesInfo for epoch #4.8 1970-01-01T03:00:00.023000Z - 1970-01-01T04:00:00.023000Z - 1970-01-01T05:00:00.023000Z 2025-09-25T16:18:38.750994Z node 1 :NODE_BROKER TRACE: node_broker.cpp:423: Send TEvNodesInfo for epoch #4.8 1970-01-01T03:00:00.023000Z - 1970-01-01T04:00:00.023000Z - 1970-01-01T05:00:00.023000Z 2025-09-25T16:18:38.782624Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:245: StateWork, received event# 269877761, Sender [1:836:2346], Recipient [1:746:2283]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-09-25T16:18:38.782681Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:245: StateWork, received event# 272039936, Sender [1:627:2214], Recipient [1:746:2283]: NKikimr::NNodeBroker::TEvNodeBroker::TEvListNodes { } 2025-09-25T16:18:38.782687Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:249: StateWork, processing event TEvNodeBroker::TEvListNodes 2025-09-25T16:18:38.782700Z node 1 :NODE_BROKER TRACE: node_broker.cpp:423: Send TEvNodesInfo for epoch #4.8 1970-01-01T03:00:00.023000Z - 1970-01-01T04:00:00.023000Z - 1970-01-01T05:00:00.023000Z 2025-09-25T16:18:38.782771Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:245: StateWork, received event# 269877761, Sender [1:838:2348], Recipient [1:746:2283]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-09-25T16:18:38.782780Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:245: StateWork, received event# 272039936, Sender [1:627:2214], Recipient [1:746:2283]: NKikimr::NNodeBroker::TEvNodeBroker::TEvListNodes { } 2025-09-25T16:18:38.782785Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:249: StateWork, processing event TEvNodeBroker::TEvListNodes 2025-09-25T16:18:38.782788Z node 1 :NODE_BROKER TRACE: node_broker.cpp:423: Send TEvNodesInfo for epoch #4.8 1970-01-01T03:00:00.023000Z - 1970-01-01T04:00:00.023000Z - 1970-01-01T05:00:00.023000Z 2025-09-25T16:18:38.782854Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:245: StateWork, received event# 269877761, Sender [1:840:2350], Recipient [1:746:2283]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-09-25T16:18:38.782871Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:245: StateWork, received event# 272039936, Sender [1:627:2214], Recipient [1:746:2283]: NKikimr::NNodeBroker::TEvNodeBroker::TEvListNodes { } 2025-09-25T16:18:38.782876Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:249: StateWork, processing event TEvNodeBroker::TEvListNodes 2025-09-25T16:18:38.782881Z node 1 :NODE_BROKER TRACE: node_broker.cpp:423: Send TEvNodesInfo for epoch #4.8 1970-01-01T03:00:00.023000Z - 1970-01-01T04:00:00.023000Z - 1970-01-01T05:00:00.023000Z 2025-09-25T16:18:38.782942Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:245: StateWork, received event# 269877761, Sender [1:842:2352], Recipient [1:746:2283]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-09-25T16:18:38.782971Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:245: StateWork, received event# 272039936, Sender [1:627:2214], Recipient [1:746:2283]: NKikimr::NNodeBroker::TEvNodeBroker::TEvListNodes { CachedVersion: 8 } 2025-09-25T16:18:38.782976Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:249: StateWork, processing event TEvNodeBroker::TEvListNodes 2025-09-25T16:18:38.782982Z node 1 :NODE_BROKER TRACE: node_broker.cpp:423: Send TEvNodesInfo for epoch #4.8 1970-01-01T03:00:00.023000Z - 1970-01-01T04:00:00.023000Z - 1970-01-01T05:00:00.023000Z 2025-09-25T16:18:38.783032Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:245: StateWork, received event# 269877761, Sender [1:844:2354], Recipient [1:746:2283]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-09-25T16:18:38.783043Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:245: StateWork, received event# 272039936, Sender [1:627:2214], Recipient [1:746:2283]: NKikimr::NNodeBroker::TEvNodeBroker::TEvListNodes { } 2025-09-25T16:18:38.783045Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:249: StateWork, processing event TEvNodeBroker::TEvListNodes 2025-09-25T16:18:38.783049Z node 1 :NODE_BROKER TRACE: node_broker.cpp:423: Send TEvNodesInfo for epoch #4.8 1970-01-01T03:00:00.023000Z - 1970-01-01T04:00:00.023000Z - 1970-01-01T05:00:00.023000Z 2025-09-25T16:18:38.783106Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:245: StateWork, received event# 269877761, Sender [1:846:2356], Recipient [1:746:2283]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-09-25T16:18:38.783114Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:245: StateWork, received event# 272039936, Sender [1:627:2214], Recipient [1:746:2283]: NKikimr::NNodeBroker::TEvNodeBroker::TEvListNodes { CachedVersion: 6 } 2025-09-25T16:18:38.783117Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:249: StateWork, processing event TEvNodeBroker::TEvListNodes 2025-09-25T16:18:38.783120Z node 1 :NODE_BROKER TRACE: node_broker.cpp:423: Send TEvNodesInfo for epoch #4.8 1970-01-01T03:00:00.023000Z - 1970-01-01T04:00:00.023000Z - 1970-01-01T05:00:00.023000Z ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tablet/ut/unittest >> TResourceBroker::TestAutoTaskId [GOOD] Test command err: 2025-09-25T16:18:40.207928Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:1115: TResourceBrokerActor bootstrap 2025-09-25T16:18:40.208088Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:393: Submitted new compaction0 task task-1 (1 by [1:104:2138]) priority=5 resources={400, 400} 2025-09-25T16:18:40.208100Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:679: Assigning waiting task task-1 (1 by [1:104:2138]) to queue queue_compaction0 2025-09-25T16:18:40.208110Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:637: Allocate resources {400, 400} for task task-1 (1 by [1:104:2138]) from queue queue_compaction0 2025-09-25T16:18:40.208116Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:679: Assigning in-fly task task-1 (1 by [1:104:2138]) to queue queue_compaction0 2025-09-25T16:18:40.208129Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:711: Updated planned resource usage for queue queue_compaction0 from 0.000000 to 800.000000 (insert task task-1 (1 by [1:104:2138])) 2025-09-25T16:18:40.208140Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:393: Submitted new compaction0 task task-2 (2 by [1:104:2138]) priority=5 resources={400, 400} 2025-09-25T16:18:40.208145Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:679: Assigning waiting task task-2 (2 by [1:104:2138]) to queue queue_compaction0 2025-09-25T16:18:40.208150Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:619: Not enough resources to start task task-2 (2 by [1:104:2138]) 2025-09-25T16:18:40.208158Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:393: Submitted new compaction0 task task-3 (3 by [1:104:2138]) priority=5 resources={400, 400} 2025-09-25T16:18:40.208162Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:679: Assigning waiting task task-3 (3 by [1:104:2138]) to queue queue_compaction0 2025-09-25T16:18:40.208167Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:619: Not enough resources to start task task-2 (2 by [1:104:2138]) 2025-09-25T16:18:40.208174Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:393: Submitted new compaction0 task task-4 (4 by [1:104:2138]) priority=5 resources={400, 400} 2025-09-25T16:18:40.208179Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:679: Assigning waiting task task-4 (4 by [1:104:2138]) to queue queue_compaction0 2025-09-25T16:18:40.208183Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:619: Not enough resources to start task task-2 (2 by [1:104:2138]) 2025-09-25T16:18:40.208190Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:393: Submitted new compaction0 task task-5 (5 by [1:104:2138]) priority=5 resources={400, 400} 2025-09-25T16:18:40.208194Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:679: Assigning waiting task task-5 (5 by [1:104:2138]) to queue queue_compaction0 2025-09-25T16:18:40.208199Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:619: Not enough resources to start task task-2 (2 by [1:104:2138]) 2025-09-25T16:18:40.208205Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:393: Submitted new compaction0 task task-6 (6 by [1:104:2138]) priority=5 resources={400, 400} 2025-09-25T16:18:40.208210Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:679: Assigning waiting task task-6 (6 by [1:104:2138]) to queue queue_compaction0 2025-09-25T16:18:40.208214Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:619: Not enough resources to start task task-2 (2 by [1:104:2138]) 2025-09-25T16:18:40.208221Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:393: Submitted new compaction0 task task-7 (7 by [1:104:2138]) priority=5 resources={400, 400} 2025-09-25T16:18:40.208225Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:679: Assigning waiting task task-7 (7 by [1:104:2138]) to queue queue_compaction0 2025-09-25T16:18:40.208230Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:619: Not enough resources to start task task-2 (2 by [1:104:2138]) 2025-09-25T16:18:40.208236Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:393: Submitted new compaction0 task task-8 (8 by [1:104:2138]) priority=5 resources={400, 400} 2025-09-25T16:18:40.208241Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:679: Assigning waiting task task-8 (8 by [1:104:2138]) to queue queue_compaction0 2025-09-25T16:18:40.208248Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:619: Not enough resources to start task task-2 (2 by [1:104:2138]) 2025-09-25T16:18:40.208258Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:393: Submitted new compaction0 task task-9 (9 by [1:104:2138]) priority=5 resources={400, 400} 2025-09-25T16:18:40.208263Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:679: Assigning waiting task task-9 (9 by [1:104:2138]) to queue queue_compaction0 2025-09-25T16:18:40.208267Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:619: Not enough resources to start task task-2 (2 by [1:104:2138]) 2025-09-25T16:18:40.208274Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:393: Submitted new compaction0 task task-10 (10 by [1:104:2138]) priority=5 resources={400, 400} 2025-09-25T16:18:40.208279Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:679: Assigning waiting task task-10 (10 by [1:104:2138]) to queue queue_compaction0 2025-09-25T16:18:40.208283Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:619: Not enough resources to start task task-2 (2 by [1:104:2138]) 2025-09-25T16:18:40.208289Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:393: Submitted new compaction0 task task-11 (11 by [1:104:2138]) priority=5 resources={400, 400} 2025-09-25T16:18:40.208293Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:679: Assigning waiting task task-11 (11 by [1:104:2138]) to queue queue_compaction0 2025-09-25T16:18:40.208297Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:619: Not enough resources to start task task-2 (2 by [1:104:2138]) 2025-09-25T16:18:40.208303Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:393: Submitted new compaction0 task task-12 (12 by [1:104:2138]) priority=5 resources={400, 400} 2025-09-25T16:18:40.208307Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:679: Assigning waiting task task-12 (12 by [1:104:2138]) to queue queue_compaction0 2025-09-25T16:18:40.208311Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:619: Not enough resources to start task task-2 (2 by [1:104:2138]) 2025-09-25T16:18:40.208317Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:393: Submitted new compaction0 task task-13 (13 by [1:104:2138]) priority=5 resources={400, 400} 2025-09-25T16:18:40.208321Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:679: Assigning waiting task task-13 (13 by [1:104:2138]) to queue queue_compaction0 2025-09-25T16:18:40.208325Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:619: Not enough resources to start task task-2 (2 by [1:104:2138]) 2025-09-25T16:18:40.208331Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:393: Submitted new compaction0 task task-14 (14 by [1:104:2138]) priority=5 resources={400, 400} 2025-09-25T16:18:40.208335Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:679: Assigning waiting task task-14 (14 by [1:104:2138]) to queue queue_compaction0 2025-09-25T16:18:40.208339Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:619: Not enough resources to start task task-2 (2 by [1:104:2138]) 2025-09-25T16:18:40.208345Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:393: Submitted new compaction0 task task-15 (15 by [1:104:2138]) priority=5 resources={400, 400} 2025-09-25T16:18:40.208348Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:679: Assigning waiting task task-15 (15 by [1:104:2138]) to queue queue_compaction0 2025-09-25T16:18:40.208352Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:619: Not enough resources to start task task-2 (2 by [1:104:2138]) 2025-09-25T16:18:40.208358Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:393: Submitted new compaction0 task task-16 (16 by [1:104:2138]) priority=5 resources={400, 400} 2025-09-25T16:18:40.208362Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:679: Assigning waiting task task-16 (16 by [1:104:2138]) to queue queue_compaction0 2025-09-25T16:18:40.208366Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:619: Not enough resources to start task task-2 (2 by [1:104:2138]) 2025-09-25T16:18:40.208373Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:393: Submitted new compaction0 task task-17 (17 by [1:104:2138]) priority=5 resources={400, 400} 2025-09-25T16:18:40.208377Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:679: Assigning waiting task task-17 (17 by [1:104:2138]) to queue queue_compaction0 2025-09-25T16:18:40.208381Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:619: Not enough resources to start task task-2 (2 by [1:104:2138]) 2025-09-25T16:18:40.208388Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:393: Submitted new compaction0 task task-18 (18 by [1:104:2138]) priority=5 resources={400, 400} 2025-09-25T16:18:40.208393Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:679: Assigning waiting task task-18 (18 by [1:104:2138]) to queue queue_compaction0 2025-09-25T16:18:40.208397Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:619: Not enough resources to start task task-2 (2 by [1:104:2138]) 2025-09-25T16:18:40.208404Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:393: Submitted new compaction0 task task-19 (19 by [1:104:2138]) priority=5 resources={400, 400} 2025-09-25T16:18:40.208408Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:679: Assigning waiting task task-19 (19 by [1:104:2138]) to queue queue_compaction0 2025-09-25T16:18:40.208413Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:619: Not enough resources to start task task-2 (2 by [1:104:2138]) 2025-09-25T16:18:40.208419Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:393: Submitted new compaction0 task task-20 (20 by [1:104:2138]) priority=5 resources={400, 400} 2025-09-25T16:18:40.208423Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:679: Assigning waiting task task-20 (20 by [1:104:2138]) to queue queue_compaction0 2025-09-25T16:18:40.208428Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:619: Not enough resources to start task task-2 (2 by [1:104:2138]) 2025-09-25T16:18:40.208435Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:393: Submitted new compaction0 task task-21 (21 by [1:104:2138]) priority=5 resources={400, 400} 2025-09-25T16:18:40.208439Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:679: Assigning waiting task task-21 (21 by [1:104:2138]) to queue queue_compaction0 2025-09-25T16:18:40.208444Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:619: Not enough resources to start task task-2 (2 by [1:104:2138]) 2025-09-25T16:18:40.208450Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:393: Submitted new compaction0 task task-22 (22 by [1:104:2138]) priority=5 resources={400, 400} 2025-09-25T16:18:40.208455Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:679: Assigning waiting task task-22 (22 by [1:104:2138]) to queue queue_compaction0 2025-09-25T16:18:40.208459Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:619: Not enough resources to start task task-2 (2 by [1:104:2138]) 2025-09-25T16:18:40.208466Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:393: Submitted new compaction0 task task-23 (23 by [1:104:2138]) priority=5 resources={400, 400} 2025-09-25T16:18:40.208470Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:679: Assigning waiting task task-23 (23 by [1:104:2138]) to queue queue_compaction0 2025-09-25T16:18:40.208475Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:619: Not enough resources to start task task-2 (2 by [1:104:2138]) 2025-09-25T16:18:40.208481Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:393: Submitted new compaction0 task task-24 (24 by [1:104:2138]) priority=5 resources={400, 400} 2025-09-25T16:18:40.208485Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:679: Assigning waiting task task-24 (24 by [1:104:2138]) to queue queue_compaction0 2025-09-25T16:18:40.2 ... s to start task task-2 (2 by [1:104:2138]) 2025-09-25T16:18:40.209441Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:393: Submitted new compaction0 task task-83 (83 by [1:104:2138]) priority=5 resources={400, 400} 2025-09-25T16:18:40.209445Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:679: Assigning waiting task task-83 (83 by [1:104:2138]) to queue queue_compaction0 2025-09-25T16:18:40.209451Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:619: Not enough resources to start task task-2 (2 by [1:104:2138]) 2025-09-25T16:18:40.209458Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:393: Submitted new compaction0 task task-84 (84 by [1:104:2138]) priority=5 resources={400, 400} 2025-09-25T16:18:40.209463Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:679: Assigning waiting task task-84 (84 by [1:104:2138]) to queue queue_compaction0 2025-09-25T16:18:40.209467Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:619: Not enough resources to start task task-2 (2 by [1:104:2138]) 2025-09-25T16:18:40.209477Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:393: Submitted new compaction0 task task-85 (85 by [1:104:2138]) priority=5 resources={400, 400} 2025-09-25T16:18:40.209481Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:679: Assigning waiting task task-85 (85 by [1:104:2138]) to queue queue_compaction0 2025-09-25T16:18:40.209486Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:619: Not enough resources to start task task-2 (2 by [1:104:2138]) 2025-09-25T16:18:40.209493Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:393: Submitted new compaction0 task task-86 (86 by [1:104:2138]) priority=5 resources={400, 400} 2025-09-25T16:18:40.209497Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:679: Assigning waiting task task-86 (86 by [1:104:2138]) to queue queue_compaction0 2025-09-25T16:18:40.209502Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:619: Not enough resources to start task task-2 (2 by [1:104:2138]) 2025-09-25T16:18:40.209508Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:393: Submitted new compaction0 task task-87 (87 by [1:104:2138]) priority=5 resources={400, 400} 2025-09-25T16:18:40.209513Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:679: Assigning waiting task task-87 (87 by [1:104:2138]) to queue queue_compaction0 2025-09-25T16:18:40.209517Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:619: Not enough resources to start task task-2 (2 by [1:104:2138]) 2025-09-25T16:18:40.209524Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:393: Submitted new compaction0 task task-88 (88 by [1:104:2138]) priority=5 resources={400, 400} 2025-09-25T16:18:40.209528Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:679: Assigning waiting task task-88 (88 by [1:104:2138]) to queue queue_compaction0 2025-09-25T16:18:40.209533Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:619: Not enough resources to start task task-2 (2 by [1:104:2138]) 2025-09-25T16:18:40.209539Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:393: Submitted new compaction0 task task-89 (89 by [1:104:2138]) priority=5 resources={400, 400} 2025-09-25T16:18:40.209544Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:679: Assigning waiting task task-89 (89 by [1:104:2138]) to queue queue_compaction0 2025-09-25T16:18:40.209548Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:619: Not enough resources to start task task-2 (2 by [1:104:2138]) 2025-09-25T16:18:40.209555Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:393: Submitted new compaction0 task task-90 (90 by [1:104:2138]) priority=5 resources={400, 400} 2025-09-25T16:18:40.209560Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:679: Assigning waiting task task-90 (90 by [1:104:2138]) to queue queue_compaction0 2025-09-25T16:18:40.209564Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:619: Not enough resources to start task task-2 (2 by [1:104:2138]) 2025-09-25T16:18:40.209571Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:393: Submitted new compaction0 task task-91 (91 by [1:104:2138]) priority=5 resources={400, 400} 2025-09-25T16:18:40.209575Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:679: Assigning waiting task task-91 (91 by [1:104:2138]) to queue queue_compaction0 2025-09-25T16:18:40.209580Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:619: Not enough resources to start task task-2 (2 by [1:104:2138]) 2025-09-25T16:18:40.209588Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:393: Submitted new compaction0 task task-92 (92 by [1:104:2138]) priority=5 resources={400, 400} 2025-09-25T16:18:40.209592Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:679: Assigning waiting task task-92 (92 by [1:104:2138]) to queue queue_compaction0 2025-09-25T16:18:40.209597Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:619: Not enough resources to start task task-2 (2 by [1:104:2138]) 2025-09-25T16:18:40.209607Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:393: Submitted new compaction0 task task-93 (93 by [1:104:2138]) priority=5 resources={400, 400} 2025-09-25T16:18:40.209611Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:679: Assigning waiting task task-93 (93 by [1:104:2138]) to queue queue_compaction0 2025-09-25T16:18:40.209616Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:619: Not enough resources to start task task-2 (2 by [1:104:2138]) 2025-09-25T16:18:40.209623Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:393: Submitted new compaction0 task task-94 (94 by [1:104:2138]) priority=5 resources={400, 400} 2025-09-25T16:18:40.209627Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:679: Assigning waiting task task-94 (94 by [1:104:2138]) to queue queue_compaction0 2025-09-25T16:18:40.209635Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:619: Not enough resources to start task task-2 (2 by [1:104:2138]) 2025-09-25T16:18:40.209642Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:393: Submitted new compaction0 task task-95 (95 by [1:104:2138]) priority=5 resources={400, 400} 2025-09-25T16:18:40.209647Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:679: Assigning waiting task task-95 (95 by [1:104:2138]) to queue queue_compaction0 2025-09-25T16:18:40.209651Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:619: Not enough resources to start task task-2 (2 by [1:104:2138]) 2025-09-25T16:18:40.209658Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:393: Submitted new compaction0 task task-96 (96 by [1:104:2138]) priority=5 resources={400, 400} 2025-09-25T16:18:40.209663Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:679: Assigning waiting task task-96 (96 by [1:104:2138]) to queue queue_compaction0 2025-09-25T16:18:40.209667Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:619: Not enough resources to start task task-2 (2 by [1:104:2138]) 2025-09-25T16:18:40.209674Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:393: Submitted new compaction0 task task-97 (97 by [1:104:2138]) priority=5 resources={400, 400} 2025-09-25T16:18:40.209678Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:679: Assigning waiting task task-97 (97 by [1:104:2138]) to queue queue_compaction0 2025-09-25T16:18:40.209683Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:619: Not enough resources to start task task-2 (2 by [1:104:2138]) 2025-09-25T16:18:40.209689Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:393: Submitted new compaction0 task task-98 (98 by [1:104:2138]) priority=5 resources={400, 400} 2025-09-25T16:18:40.209696Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:679: Assigning waiting task task-98 (98 by [1:104:2138]) to queue queue_compaction0 2025-09-25T16:18:40.209700Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:619: Not enough resources to start task task-2 (2 by [1:104:2138]) 2025-09-25T16:18:40.209707Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:393: Submitted new compaction0 task task-99 (99 by [1:104:2138]) priority=5 resources={400, 400} 2025-09-25T16:18:40.209711Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:679: Assigning waiting task task-99 (99 by [1:104:2138]) to queue queue_compaction0 2025-09-25T16:18:40.209716Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:619: Not enough resources to start task task-2 (2 by [1:104:2138]) 2025-09-25T16:18:40.209723Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:393: Submitted new compaction0 task task-100 (100 by [1:104:2138]) priority=5 resources={400, 400} 2025-09-25T16:18:40.209728Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:679: Assigning waiting task task-100 (100 by [1:104:2138]) to queue queue_compaction0 2025-09-25T16:18:40.209732Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:619: Not enough resources to start task task-2 (2 by [1:104:2138]) 2025-09-25T16:18:40.209747Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:393: Submitted new compaction1 task task-0 (0 by [1:104:2138]) priority=5 resources={100, 100} 2025-09-25T16:18:40.209753Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:404: Use ID 101 for submitted task 2025-09-25T16:18:40.209758Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:679: Assigning waiting task task-0 (0 by [1:104:2138]) to queue queue_compaction1 2025-09-25T16:18:40.209764Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:637: Allocate resources {100, 100} for task task-0 (0 by [1:104:2138]) from queue queue_compaction1 2025-09-25T16:18:40.209769Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:679: Assigning in-fly task task-0 (0 by [1:104:2138]) to queue queue_compaction1 2025-09-25T16:18:40.209776Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:711: Updated planned resource usage for queue queue_compaction1 from 0.000000 to 200.000000 (insert task task-0 (0 by [1:104:2138])) 2025-09-25T16:18:40.209781Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:619: Not enough resources to start task task-2 (2 by [1:104:2138]) 2025-09-25T16:18:40.209789Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:393: Submitted new compaction1 task task-102 (102 by [1:104:2138]) priority=5 resources={100, 100} 2025-09-25T16:18:40.209793Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:679: Assigning waiting task task-102 (102 by [1:104:2138]) to queue queue_compaction1 2025-09-25T16:18:40.209798Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:619: Not enough resources to start task task-102 (102 by [1:104:2138]) 2025-09-25T16:18:40.209802Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:608: Skip queue queue_compaction0 blocked by an earlier queue 2025-09-25T16:18:40.209810Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:528: Finish task task-0 (0 by [1:104:2138]) (release resources {100, 100}) 2025-09-25T16:18:40.209817Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:577: Updated planned resource usage for queue queue_compaction1 from 200.000000 to 10.000000 (remove task task-0 (0 by [1:104:2138])) 2025-09-25T16:18:40.209823Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:582: Updated real resource usage for queue queue_compaction1 from 0.000000 to 10.000000 2025-09-25T16:18:40.209830Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:663: Updated real resource usage for queue queue_compaction0 from 0.000000 to 80.000000 (in-fly consumption {400, 400}) 2025-09-25T16:18:40.209835Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:637: Allocate resources {100, 100} for task task-102 (102 by [1:104:2138]) from queue queue_compaction1 2025-09-25T16:18:40.209840Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:679: Assigning in-fly task task-102 (102 by [1:104:2138]) to queue queue_compaction1 2025-09-25T16:18:40.209845Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:711: Updated planned resource usage for queue queue_compaction1 from 10.000000 to 200.500000 (insert task task-102 (102 by [1:104:2138])) 2025-09-25T16:18:40.209850Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:619: Not enough resources to start task task-2 (2 by [1:104:2138]) >> TNodeBrokerTest::NodesMigration1000Nodes [GOOD] >> TTabletCountersAggregator::IntegralPercentileAggregationRegular [GOOD] |81.5%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/core/driver_lib/run/ut/unittest >> AutoConfig::GetServicePoolsWith2CPUs [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/mind/ut/unittest >> TNodeBrokerTest::SubscribeToNodes [GOOD] Test command err: 2025-09-25T16:18:37.006771Z node 8 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:636: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 8 Deadline: 18446744073709.551615s } 2025-09-25T16:18:37.009323Z node 5 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:636: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 5 Deadline: 18446744073709.551615s } 2025-09-25T16:18:37.009380Z node 6 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:636: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 6 Deadline: 18446744073709.551615s } 2025-09-25T16:18:37.009410Z node 7 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:636: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 7 Deadline: 18446744073709.551615s } 2025-09-25T16:18:37.009448Z node 8 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:607: Handle NActors::TEvInterconnect::TEvListNodes 2025-09-25T16:18:37.012531Z node 2 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:636: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 2 Deadline: 18446744073709.551615s } 2025-09-25T16:18:37.012581Z node 3 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:636: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 3 Deadline: 18446744073709.551615s } 2025-09-25T16:18:37.012625Z node 4 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:636: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 4 Deadline: 18446744073709.551615s } 2025-09-25T16:18:37.012666Z node 6 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:607: Handle NActors::TEvInterconnect::TEvListNodes 2025-09-25T16:18:37.012748Z node 7 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:607: Handle NActors::TEvInterconnect::TEvListNodes 2025-09-25T16:18:37.012798Z node 8 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:607: Handle NActors::TEvInterconnect::TEvListNodes 2025-09-25T16:18:37.012888Z node 1 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:636: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 1 Deadline: 18446744073709.551615s } 2025-09-25T16:18:37.016626Z node 2 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:607: Handle NActors::TEvInterconnect::TEvListNodes 2025-09-25T16:18:37.016692Z node 3 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:607: Handle NActors::TEvInterconnect::TEvListNodes 2025-09-25T16:18:37.016732Z node 4 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:607: Handle NActors::TEvInterconnect::TEvListNodes 2025-09-25T16:18:37.016795Z node 5 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:607: Handle NActors::TEvInterconnect::TEvListNodes 2025-09-25T16:18:37.016884Z node 3 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:607: Handle NActors::TEvInterconnect::TEvListNodes 2025-09-25T16:18:37.016906Z node 4 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:607: Handle NActors::TEvInterconnect::TEvListNodes 2025-09-25T16:18:37.016926Z node 5 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:607: Handle NActors::TEvInterconnect::TEvListNodes 2025-09-25T16:18:37.016944Z node 6 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:607: Handle NActors::TEvInterconnect::TEvListNodes 2025-09-25T16:18:37.016967Z node 7 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:607: Handle NActors::TEvInterconnect::TEvListNodes 2025-09-25T16:18:37.017026Z node 1 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:607: Handle NActors::TEvInterconnect::TEvListNodes 2025-09-25T16:18:37.017097Z node 2 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:607: Handle NActors::TEvInterconnect::TEvListNodes 2025-09-25T16:18:37.017182Z node 8 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:607: Handle NActors::TEvInterconnect::TEvListNodes 2025-09-25T16:18:37.017201Z node 1 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:607: Handle NActors::TEvInterconnect::TEvListNodes 2025-09-25T16:18:37.017351Z node 2 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:607: Handle NActors::TEvInterconnect::TEvListNodes 2025-09-25T16:18:37.017365Z node 3 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:607: Handle NActors::TEvInterconnect::TEvListNodes 2025-09-25T16:18:37.017379Z node 4 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:607: Handle NActors::TEvInterconnect::TEvListNodes 2025-09-25T16:18:37.017399Z node 5 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:607: Handle NActors::TEvInterconnect::TEvListNodes 2025-09-25T16:18:37.017419Z node 6 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:607: Handle NActors::TEvInterconnect::TEvListNodes 2025-09-25T16:18:37.017438Z node 7 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:607: Handle NActors::TEvInterconnect::TEvListNodes 2025-09-25T16:18:37.017457Z node 8 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:636: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 1 Deadline: 18446744073709.551615s } 2025-09-25T16:18:37.017560Z node 1 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:607: Handle NActors::TEvInterconnect::TEvListNodes 2025-09-25T16:18:37.017599Z node 2 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:636: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 1 Deadline: 18446744073709.551615s } 2025-09-25T16:18:37.017617Z node 3 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:636: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 1 Deadline: 18446744073709.551615s } 2025-09-25T16:18:37.017640Z node 4 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:636: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 1 Deadline: 18446744073709.551615s } 2025-09-25T16:18:37.017655Z node 5 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:636: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 1 Deadline: 18446744073709.551615s } 2025-09-25T16:18:37.017673Z node 6 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:636: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 1 Deadline: 18446744073709.551615s } 2025-09-25T16:18:37.017696Z node 7 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:636: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 1 Deadline: 18446744073709.551615s } 2025-09-25T16:18:37.017768Z node 1 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:636: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 6 Deadline: 18446744073709.551615s } 2025-09-25T16:18:37.018097Z node 8 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:636: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 4 Deadline: 18446744073709.551615s } 2025-09-25T16:18:37.020089Z node 5 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:636: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 3 Deadline: 18446744073709.551615s } 2025-09-25T16:18:37.020113Z node 6 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:636: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 3 Deadline: 18446744073709.551615s } 2025-09-25T16:18:37.020129Z node 7 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:636: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 6 Deadline: 18446744073709.551615s } 2025-09-25T16:18:37.024295Z node 3 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:636: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 5 Deadline: 18446744073709.551615s } 2025-09-25T16:18:37.024325Z node 4 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:636: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 8 Deadline: 18446744073709.551615s } 2025-09-25T16:18:37.024337Z node 6 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:636: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 7 Deadline: 18446744073709.551615s } 2025-09-25T16:18:37.025157Z node 1 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:636: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 8 Deadline: 18446744073709.551615s } 2025-09-25T16:18:37.025253Z node 3 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:636: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 6 Deadline: 18446744073709.551615s } 2025-09-25T16:18:37.025661Z node 1 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:636: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 5 Deadline: 18446744073709.551615s } 2025-09-25T16:18:37.025775Z node 1 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:636: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 2 Deadline: 18446744073709.551615s } 2025-09-25T16:18:37.025812Z node 1 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:636: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 4 Deadline: 18446744073709.551615s } 2025-09-25T16:18:37.025854Z node 1 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:636: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 7 Deadline: 18446744073709.551615s } 2025-09-25T16:18:37.025922Z node 1 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:636: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 3 Deadline: 18446744073709.551615s } 2025-09-25T16:18:37.026705Z node 3 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:636: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 8 Deadline: 18446744073709.551615s } 2025-09-25T16:18:37.026990Z node 8 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:636: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 3 Deadline: 18446744073709.551615s } 2025-09-25T16:18:37.027150Z node 6 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:636: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 2 Deadline: 18446744073709.551615s } 2025-09-25T16:18:37.027387Z node 2 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:636: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 6 Deadline: 18446744073709.551615s } 2025-09-25T16:18:37.041219Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7729: Cannot subscribe to console configs 2025-09-25T16:18:37.041239Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded ... waiting for nameservers are connected 2025-09-25T16:18:37.046202Z node 1 :NODE_BROKER DEBUG: node_broker_impl.h:239: StateInit event type: 10060000 event: NKikimr::TEvTablet::TEvBoot 2025-09-25T16:18:37.046744Z node 1 :NODE_BROKER DEBUG: node_broker_impl.h:239: StateInit event type: 10060001 event: NKikimr::TEvTablet::TEvRestored 2025-09-25T16:18:37.046859Z node 1 :NODE_BROKER DEBUG: node_broker__init_scheme.cpp:20: TTxInitScheme Execute 2025-09-25T16:18:37.047122Z node 1 :NODE_BROKER DEBUG: node_broker_impl.h:239: StateInit event type: 1006000c event: NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-09-25T16:18:37.047959Z node 1 :NODE_BROKER DEBUG: node_broker__init_scheme.cpp:29: TTxInitScheme Complete 2025-09-25T16:18:37.048109Z node 1 :NODE_BROKER DEBUG: node_broker__load_state.cpp:19: TTxLoadState Execute 2025-09-25T16:18:37.048183Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:968: [DB] Using default config. 2025-09-25T16:18:37.048201Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:1005: [DB] Starting the first epoch: #1.1 1970-01-01T00:00:00.026000Z - 1970-01-01T01:00:00.026000Z - 1970-01-01T02:00:00.026000Z 2025-09-25T16:18:37.048207Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:1031: [DB] Loaded the first approximate epoch start: #1.1 2025-09-25T16:18:37.048226Z node 1 :NODE_BROKER DEBUG: node_broker__load_state.cpp:27: TTxLoadState Complete 2025-09-25T16:18:37.048267Z node 1 :NODE_BROKER DEBUG: node_broker__migrate_state.cpp:84: TTxMigrateState Execute 2025-09-25T16:18:37.048274Z node 1 :NODE_BROKER DEBUG: node_broker__migrate_state.cpp:52: TTxMigrateState ProcessMigrationBatch UpdateNodes left 0, NewVersionUpdateNodes left 0 2025-09-25T16:18:37.048279Z node 1 :NODE_BROKER DEBUG: node_broker__migrate_state.cpp:21: TTxMigrateState FinalizeMigration 2025-09-25T16:18:37.048285Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:1318: [DB] Update epoch in database: #1.1 1970-01-01T00:00:00.026000Z - 1970-01-01T01:00:00.026000Z - 1970-01-01T02:00:00.026000Z 2025-09-25T16:18:37.048305Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:1337: [DB] Update approx epoch start in database: #1.1 2025-09-25T16:18:37.048311Z node 1 :NODE_BROKER NOTICE: node_broker.cpp:1350: [DB] Update main nodes table to: Nodes 2025-09-25T16:18:37.091476Z node 1 :NODE_BROKER DEBUG: node_broker__migrate_state.cpp:95: TTxMigrateState Complete 2025-09-25T16:18:37.091520Z node 1 :NODE_BROKER TRACE: node_broker.cpp:456: Scheduled epoch update at 1970-01-01T01:00:00.026000Z 2025-09-25T16:18:37.091534Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:562: Preparing nodes list cache for epoch #1.1 1970-01-01T00:00:00.026000Z - 1970-01-01T01:00:00.026000Z - 1970-01-01T02:00:00.026000Z, approximate epoch start #1.1 nodes=0 expired=0 2025-09-25T16:18:37.091546Z ... pp:84: TTxMigrateState Execute 2025-09-25T16:18:39.019494Z node 1 :NODE_BROKER DEBUG: node_broker__migrate_state.cpp:52: TTxMigrateState ProcessMigrationBatch UpdateNodes left 0, NewVersionUpdateNodes left 1 2025-09-25T16:18:39.019499Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:1318: [DB] Update epoch in database: #4.11 1970-01-01T03:00:00.026000Z - 1970-01-01T04:00:00.026000Z - 1970-01-01T05:00:00.026000Z 2025-09-25T16:18:39.019513Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:837: [DB] Removing node #1025.v11 from database 2025-09-25T16:18:39.019523Z node 1 :NODE_BROKER DEBUG: node_broker__migrate_state.cpp:21: TTxMigrateState FinalizeMigration 2025-09-25T16:18:39.045286Z node 1 :NODE_BROKER DEBUG: node_broker__migrate_state.cpp:95: TTxMigrateState Complete 2025-09-25T16:18:39.045355Z node 1 :NODE_BROKER TRACE: node_broker.cpp:456: Scheduled epoch update at 1970-01-01T04:00:00.026000Z 2025-09-25T16:18:39.045371Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:562: Preparing nodes list cache for epoch #4.11 1970-01-01T03:00:00.026000Z - 1970-01-01T04:00:00.026000Z - 1970-01-01T05:00:00.026000Z, approximate epoch start #4.10 nodes=1 expired=0 2025-09-25T16:18:39.045409Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:603: Preparing update nodes log for epoch ##4.11 1970-01-01T03:00:00.026000Z - 1970-01-01T04:00:00.026000Z - 1970-01-01T05:00:00.026000Z nodes=1 expired=0 removed=2 2025-09-25T16:18:39.045418Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:659: Add node #1024.v9 to update nodes log 2025-09-25T16:18:39.045430Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:659: Add node #1026.v10 to update nodes log 2025-09-25T16:18:39.045435Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:659: Add node #1025.v11 to update nodes log 2025-09-25T16:18:39.045596Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:245: StateWork, received event# 269877761, Sender [1:831:2345], Recipient [1:792:2320]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-09-25T16:18:39.045636Z node 1 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:717: Handle NKikimr::TEvTabletPipe::TEvClientConnected { TabletId: 72057594037936129 Status: OK ServerId: [1:831:2345] Leader: 1 Dead: 0 Generation: 3 VersionInfo:  } 2025-09-25T16:18:39.045776Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:245: StateWork, received event# 269877761, Sender [1:832:2346], Recipient [1:792:2320]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-09-25T16:18:39.045828Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:245: StateWork, received event# 269877761, Sender [1:833:2347], Recipient [1:792:2320]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-09-25T16:18:39.045888Z node 3 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:717: Handle NKikimr::TEvTabletPipe::TEvClientConnected { TabletId: 72057594037936129 Status: OK ServerId: [1:832:2346] Leader: 1 Dead: 0 Generation: 3 VersionInfo:  } 2025-09-25T16:18:39.045902Z node 4 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:717: Handle NKikimr::TEvTabletPipe::TEvClientConnected { TabletId: 72057594037936129 Status: OK ServerId: [1:834:2348] Leader: 1 Dead: 0 Generation: 3 VersionInfo:  } 2025-09-25T16:18:39.045966Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:245: StateWork, received event# 269877761, Sender [1:834:2348], Recipient [1:792:2320]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-09-25T16:18:39.045982Z node 2 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:717: Handle NKikimr::TEvTabletPipe::TEvClientConnected { TabletId: 72057594037936129 Status: OK ServerId: [1:838:2352] Leader: 1 Dead: 0 Generation: 3 VersionInfo:  } 2025-09-25T16:18:39.046009Z node 6 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:717: Handle NKikimr::TEvTabletPipe::TEvClientConnected { TabletId: 72057594037936129 Status: OK ServerId: [1:833:2347] Leader: 1 Dead: 0 Generation: 3 VersionInfo:  } 2025-09-25T16:18:39.046021Z node 7 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:717: Handle NKikimr::TEvTabletPipe::TEvClientConnected { TabletId: 72057594037936129 Status: OK ServerId: [1:836:2350] Leader: 1 Dead: 0 Generation: 3 VersionInfo:  } 2025-09-25T16:18:39.046033Z node 8 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:717: Handle NKikimr::TEvTabletPipe::TEvClientConnected { TabletId: 72057594037936129 Status: OK ServerId: [1:837:2351] Leader: 1 Dead: 0 Generation: 3 VersionInfo:  } 2025-09-25T16:18:39.046046Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:245: StateWork, received event# 269877761, Sender [1:835:2349], Recipient [1:792:2320]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-09-25T16:18:39.046072Z node 5 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:717: Handle NKikimr::TEvTabletPipe::TEvClientConnected { TabletId: 72057594037936129 Status: OK ServerId: [1:835:2349] Leader: 1 Dead: 0 Generation: 3 VersionInfo:  } 2025-09-25T16:18:39.046094Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:245: StateWork, received event# 269877761, Sender [1:836:2350], Recipient [1:792:2320]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-09-25T16:18:39.046124Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:245: StateWork, received event# 269877761, Sender [1:837:2351], Recipient [1:792:2320]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-09-25T16:18:39.046153Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:245: StateWork, received event# 269877761, Sender [1:838:2352], Recipient [1:792:2320]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-09-25T16:18:39.046218Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:245: StateWork, received event# 272039936, Sender [1:25:2072], Recipient [1:792:2320]: NKikimr::NNodeBroker::TEvNodeBroker::TEvListNodes { MinEpoch: 5 } 2025-09-25T16:18:39.046225Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:249: StateWork, processing event TEvNodeBroker::TEvListNodes 2025-09-25T16:18:39.046232Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:375: Delaying list nodes request for epoch #5 2025-09-25T16:18:39.046273Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:245: StateWork, received event# 272039936, Sender [7:199:2072], Recipient [1:836:2350] 2025-09-25T16:18:39.046278Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:249: StateWork, processing event TEvNodeBroker::TEvListNodes 2025-09-25T16:18:39.046284Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:375: Delaying list nodes request for epoch #5 2025-09-25T16:18:39.046305Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:245: StateWork, received event# 272039936, Sender [3:83:2072], Recipient [1:832:2346] 2025-09-25T16:18:39.046310Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:249: StateWork, processing event TEvNodeBroker::TEvListNodes 2025-09-25T16:18:39.046315Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:375: Delaying list nodes request for epoch #5 2025-09-25T16:18:39.046324Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:245: StateWork, received event# 272039936, Sender [4:112:2072], Recipient [1:834:2348] 2025-09-25T16:18:39.046328Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:249: StateWork, processing event TEvNodeBroker::TEvListNodes 2025-09-25T16:18:39.046332Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:375: Delaying list nodes request for epoch #5 2025-09-25T16:18:39.046341Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:245: StateWork, received event# 272039936, Sender [2:54:2072], Recipient [1:838:2352] 2025-09-25T16:18:39.046349Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:249: StateWork, processing event TEvNodeBroker::TEvListNodes 2025-09-25T16:18:39.046353Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:375: Delaying list nodes request for epoch #5 2025-09-25T16:18:39.046361Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:245: StateWork, received event# 272039936, Sender [6:170:2072], Recipient [1:833:2347] 2025-09-25T16:18:39.046366Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:249: StateWork, processing event TEvNodeBroker::TEvListNodes 2025-09-25T16:18:39.046370Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:375: Delaying list nodes request for epoch #5 2025-09-25T16:18:39.046380Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:245: StateWork, received event# 272039936, Sender [5:141:2072], Recipient [1:835:2349] 2025-09-25T16:18:39.046384Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:249: StateWork, processing event TEvNodeBroker::TEvListNodes 2025-09-25T16:18:39.046388Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:375: Delaying list nodes request for epoch #5 2025-09-25T16:18:39.046397Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:245: StateWork, received event# 272039936, Sender [8:228:2072], Recipient [1:837:2351] 2025-09-25T16:18:39.046401Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:249: StateWork, processing event TEvNodeBroker::TEvListNodes 2025-09-25T16:18:39.046406Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:375: Delaying list nodes request for epoch #5 2025-09-25T16:18:39.046520Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:245: StateWork, received event# 269877761, Sender [1:844:2358], Recipient [1:792:2320]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-09-25T16:18:39.046563Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:245: StateWork, received event# 272039950, Sender [1:627:2218], Recipient [1:792:2320]: NKikimr::NNodeBroker::TEvNodeBroker::TEvSubscribeNodesRequest { CachedVersion: 10 SeqNo: 1 } 2025-09-25T16:18:39.046570Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:257: StateWork, processing event TEvNodeBroker::TEvSubscribeNodesRequest 2025-09-25T16:18:39.046579Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:747: New subscriber [1:627:2218], seqNo: 1, version: 10, server pipe id: [1:844:2358] 2025-09-25T16:18:39.046590Z node 1 :NODE_BROKER TRACE: node_broker.cpp:730: Send TEvUpdateNodes v10 -> v11 to [1:627:2218] 2025-09-25T16:18:39.046613Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:245: StateWork, received event# 269877761, Sender [1:845:2359], Recipient [1:792:2320]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-09-25T16:18:39.046627Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:245: StateWork, received event# 272039936, Sender [1:627:2218], Recipient [1:792:2320]: NKikimr::NNodeBroker::TEvNodeBroker::TEvListNodes { } 2025-09-25T16:18:39.046632Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:249: StateWork, processing event TEvNodeBroker::TEvListNodes 2025-09-25T16:18:39.046641Z node 1 :NODE_BROKER TRACE: node_broker.cpp:423: Send TEvNodesInfo for epoch #4.11 1970-01-01T03:00:00.026000Z - 1970-01-01T04:00:00.026000Z - 1970-01-01T05:00:00.026000Z 2025-09-25T16:18:39.046757Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:245: StateWork, received event# 269877761, Sender [1:849:2363], Recipient [1:792:2320]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-09-25T16:18:39.046782Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:245: StateWork, received event# 272039950, Sender [1:846:2360], Recipient [1:792:2320]: NKikimr::NNodeBroker::TEvNodeBroker::TEvSubscribeNodesRequest { CachedVersion: 0 SeqNo: 0 } 2025-09-25T16:18:39.046788Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:257: StateWork, processing event TEvNodeBroker::TEvSubscribeNodesRequest 2025-09-25T16:18:39.046793Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:747: New subscriber [1:846:2360], seqNo: 0, version: 0, server pipe id: [1:849:2363] 2025-09-25T16:18:39.046799Z node 1 :NODE_BROKER TRACE: node_broker.cpp:730: Send TEvUpdateNodes v0 -> v11 to [1:846:2360] 2025-09-25T16:18:39.046818Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:245: StateWork, received event# 269877761, Sender [1:850:2364], Recipient [1:792:2320]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-09-25T16:18:39.046832Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:245: StateWork, received event# 272039936, Sender [1:734:2294], Recipient [1:792:2320]: NKikimr::NNodeBroker::TEvNodeBroker::TEvListNodes { } 2025-09-25T16:18:39.046836Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:249: StateWork, processing event TEvNodeBroker::TEvListNodes 2025-09-25T16:18:39.046856Z node 1 :NODE_BROKER TRACE: node_broker.cpp:423: Send TEvNodesInfo for epoch #4.11 1970-01-01T03:00:00.026000Z - 1970-01-01T04:00:00.026000Z - 1970-01-01T05:00:00.026000Z |81.5%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/core/driver_lib/run/ut/unittest >> AutoConfig::GetASPoolsith1CPU [GOOD] |81.5%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tablet/ut/unittest >> TTabletCountersPercentile::StartFromZero [GOOD] |81.5%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/core/driver_lib/run/ut/unittest >> AutoConfig::GetServicePoolsWith1CPU [GOOD] >> BootstrapperTest::KeepExistingTablet >> TabletState::ExplicitUnsubscribe [GOOD] |81.5%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tablet/ut/unittest >> TFlatMetrics::MaximumValue4 [GOOD] |81.5%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/core/driver_lib/run/ut/unittest >> AutoConfig::GetASPoolsWith4AndMoreCPUs [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/ydb/ut/unittest >> YdbYqlClient::RenameTables [GOOD] Test command err: 2025-09-25T16:18:20.937429Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7554061927434226132:2152];send_to=[0:7307199536658146131:7762515]; 2025-09-25T16:18:20.937460Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/endf/0041af/r3tmp/tmplnwO9n/pdisk_1.dat 2025-09-25T16:18:20.983560Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-09-25T16:18:21.000625Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 13289, node 1 2025-09-25T16:18:21.018706Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-09-25T16:18:21.018724Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-09-25T16:18:21.018727Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-09-25T16:18:21.018788Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-09-25T16:18:21.038568Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-09-25T16:18:21.038598Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting TClient is connected to server localhost:2970 WaitRootIsUp 'Root'... TClient::Ls request: Root 2025-09-25T16:18:21.040198Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-09-25T16:18:21.052518Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-09-25T16:18:21.150075Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions test_client.cpp: SetPath # /home/runner/.ya/build/build_root/endf/0041af/r3tmp/tmp2qKZbW/pdisk_1.dat 2025-09-25T16:18:22.474930Z node 4 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-09-25T16:18:22.474978Z node 4 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-09-25T16:18:22.560204Z node 4 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 23562, node 4 2025-09-25T16:18:22.570139Z node 4 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-09-25T16:18:22.570150Z node 4 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-09-25T16:18:22.570152Z node 4 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-09-25T16:18:22.570196Z node 4 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-09-25T16:18:22.571899Z node 4 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-09-25T16:18:22.571921Z node 4 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-09-25T16:18:22.573504Z node 4 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:26622 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-09-25T16:18:22.588468Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-09-25T16:18:22.689337Z node 4 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions Previous query attempt was finished with unsuccessful status OVERLOADED: Sending retry attempt 1 of 5 2025-09-25T16:18:23.473933Z node 4 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; Previous query attempt was finished with unsuccessful status CLIENT_RESOURCE_EXHAUSTED: Sending retry attempt 2 of 5 Previous query attempt was finished with unsuccessful status UNAVAILABLE: Sending retry attempt 3 of 5 Previous query attempt was finished with unsuccessful status BAD_SESSION: Sending retry attempt 4 of 5 Previous query attempt was finished with unsuccessful status SESSION_BUSY: Sending retry attempt 5 of 5 2025-09-25T16:18:28.155804Z node 4 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7554061960694889011:2336], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:28.155804Z node 4 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7554061960694889022:2339], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:28.155826Z node 4 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:28.155892Z node 4 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7554061960694889026:2341], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:28.155903Z node 4 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:28.156749Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-09-25T16:18:28.164687Z node 4 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [4:7554061960694889025:2340], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2025-09-25T16:18:28.241438Z node 4 :TX_PROXY ERROR: schemereq.cpp:590: Actor# [4:7554061960694889096:2711] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } Previous query attempt was finished with unsuccessful status OVERLOADED: Sending retry attempt 1 of 5 Previous query attempt was finished with unsuccessful status CLIENT_RESOURCE_EXHAUSTED: Sending retry attempt 2 of 5 Previous query attempt was finished with unsuccessful status UNAVAILABLE: Sending retry attempt 3 of 5 Previous query attempt was finished with unsuccessful status BAD_SESSION: Sending retry attempt 4 of 5 Previous query attempt was finished with unsuccessful status SESSION_BUSY: Sending retry attempt 5 of 5 Previous query attempt was finished with unsuccessful status NOT_FOUND: Sending retry attempt 1 of 1 Previous query attempt was finished with unsuccessful status NOT_FOUND: Sending retry attempt 1 of 1 Previous query attempt was finished with unsuccessful status UNDETERMINED: Sending retry attempt 1 of 1 Previous query attempt was finished with unsuccessful status UNDETERMINED: Sending retry attempt 1 of 1 Previous query attempt was finished with unsuccessful status TRANSPORT_UNAVAILABLE: Sending retry attempt 1 of 1 Previous query attempt was finished with unsuccessful status TRANSPORT_UNAVAILABLE: Sending retry a ... thState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 39 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 39 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 38 } ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } DomainKey { SchemeShard: 72057594046644480 PathId: 1 } PathsInside: 3 PathsLimit: 10000 ShardsInside: 3 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046644480 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 2 PathOwnerId: 72057594046644480 2025-09-25T16:18:39.683829Z node 16 :HIVE WARN: hive_impl.cpp:516: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 16, TabletId: 72075186224037888 not found 2025-09-25T16:18:39.683889Z node 16 :GRPC_SERVER DEBUG: grpc_request_proxy.cpp:595: Got grpc request# DropTableRequest, traceId# 01k60tpve3ef29sc4dckqa1hkx, sdkBuildInfo# ydb-cpp-sdk/dev, state# AS_NOT_PERFORMED, database# undef, peer# ipv4:127.0.0.1:48800, grpcInfo# grpc-c++/1.54.3 grpc-c/31.0.0 (linux; chttp2), timeout# undef 2025-09-25T16:18:39.684084Z node 16 :TX_PROXY DEBUG: proxy_impl.cpp:314: actor# [16:7554062006904182471:2145] Handle TEvProposeTransaction 2025-09-25T16:18:39.684095Z node 16 :TX_PROXY DEBUG: proxy_impl.cpp:237: actor# [16:7554062006904182471:2145] TxId# 281474976710672 ProcessProposeTransaction 2025-09-25T16:18:39.684105Z node 16 :TX_PROXY DEBUG: proxy_impl.cpp:256: actor# [16:7554062006904182471:2145] Cookie# 0 userReqId# "" txid# 281474976710672 SEND to# [16:7554062006904184347:3545] 2025-09-25T16:18:39.684401Z node 16 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__clean_pathes.cpp:160: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046644480 2025-09-25T16:18:39.684998Z node 16 :TX_PROXY DEBUG: schemereq.cpp:1673: Actor# [16:7554062006904184347:3545] txid# 281474976710672 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "Root/Dir" OperationType: ESchemeOpDropTable Drop { Name: "Table-1" } } } DatabaseName: "" RequestType: "" PeerName: "ipv4:127.0.0.1:48800" 2025-09-25T16:18:39.685013Z node 16 :TX_PROXY DEBUG: schemereq.cpp:613: Actor# [16:7554062006904184347:3545] txid# 281474976710672 Bootstrap, UserSID: CheckAdministrator: 0 CheckDatabaseAdministrator: 0 2025-09-25T16:18:39.685029Z node 16 :TX_PROXY DEBUG: schemereq.cpp:1728: Actor# [16:7554062006904184347:3545] txid# 281474976710672 TEvNavigateKeySet requested from SchemeCache 2025-09-25T16:18:39.685141Z node 16 :TX_PROXY DEBUG: schemereq.cpp:1561: Actor# [16:7554062006904184347:3545] txid# 281474976710672 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2025-09-25T16:18:39.685179Z node 16 :TX_PROXY DEBUG: schemereq.cpp:1608: Actor# [16:7554062006904184347:3545] HANDLE EvNavigateKeySetResult, txid# 281474976710672 shardToRequest# 72057594046644480 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 1] DomainInfo.Params# Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 RedirectRequired# true 2025-09-25T16:18:39.685191Z node 16 :TX_PROXY DEBUG: schemereq.cpp:103: Actor# [16:7554062006904184347:3545] txid# 281474976710672 SEND to# 72057594046644480 shardToRequest {TEvModifySchemeTransaction txid# 281474976710672 TabletId# 72057594046644480} 2025-09-25T16:18:39.685227Z node 16 :TX_PROXY DEBUG: schemereq.cpp:1463: Actor# [16:7554062006904184347:3545] txid# 281474976710672 HANDLE EvClientConnected 2025-09-25T16:18:39.685283Z node 16 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_drop_table.cpp:493: TDropTable Propose, path: Root/Dir/Table-1, pathId: 0, opId: 281474976710672:0, at schemeshard: 72057594046644480 2025-09-25T16:18:39.685328Z node 16 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 281474976710672:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2025-09-25T16:18:39.685854Z node 16 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 281474976710672, database: /Root, subject: , status: StatusAccepted, operation: DROP TABLE, path: Root/Dir/Table-1 2025-09-25T16:18:39.686034Z node 16 :TX_PROXY DEBUG: schemereq.cpp:1485: Actor# [16:7554062006904184347:3545] txid# 281474976710672 Status StatusAccepted HANDLE {TEvModifySchemeTransactionResult Status# StatusAccepted txid# 281474976710672} 2025-09-25T16:18:39.686047Z node 16 :TX_PROXY DEBUG: schemereq.cpp:593: Actor# [16:7554062006904184347:3545] txid# 281474976710672 SEND to# [16:7554062006904184345:2376] Source {TEvProposeTransactionStatus txid# 281474976710672 Status# 53} 2025-09-25T16:18:39.689650Z node 16 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:685: TTxOperationPlanStep Execute, stepId: 1758817119738, transactions count in step: 1, at schemeshard: 72057594046644480 2025-09-25T16:18:39.690402Z node 16 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:1123: All parts have reached barrier, tx: 281474976710672, done: 0, blocked: 1 2025-09-25T16:18:39.691534Z node 16 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:993: Operation and all the parts is done, operation id: 281474976710672:0 2025-09-25T16:18:39.693079Z node 16 :GRPC_SERVER DEBUG: grpc_request_proxy.cpp:595: Got grpc request# DropTableRequest, traceId# 01k60tpved0sx0mpnk6rsmfn9w, sdkBuildInfo# ydb-cpp-sdk/dev, state# AS_NOT_PERFORMED, database# undef, peer# ipv6:[::1]:53728, grpcInfo# grpc-c++/1.54.3 grpc-c/31.0.0 (linux; chttp2), timeout# undef 2025-09-25T16:18:39.693137Z node 16 :TX_PROXY DEBUG: proxy_impl.cpp:314: actor# [16:7554062006904182471:2145] Handle TEvProposeTransaction 2025-09-25T16:18:39.693143Z node 16 :TX_PROXY DEBUG: proxy_impl.cpp:237: actor# [16:7554062006904182471:2145] TxId# 281474976710673 ProcessProposeTransaction 2025-09-25T16:18:39.693153Z node 16 :TX_PROXY DEBUG: proxy_impl.cpp:256: actor# [16:7554062006904182471:2145] Cookie# 0 userReqId# "" txid# 281474976710673 SEND to# [16:7554062006904184420:3614] 2025-09-25T16:18:39.694086Z node 16 :TX_PROXY DEBUG: schemereq.cpp:1673: Actor# [16:7554062006904184420:3614] txid# 281474976710673 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "Root/Dir" OperationType: ESchemeOpDropTable Drop { Name: "Table-2" } } } DatabaseName: "" RequestType: "" PeerName: "ipv6:[::1]:53728" 2025-09-25T16:18:39.694093Z node 16 :TX_PROXY DEBUG: schemereq.cpp:613: Actor# [16:7554062006904184420:3614] txid# 281474976710673 Bootstrap, UserSID: CheckAdministrator: 0 CheckDatabaseAdministrator: 0 2025-09-25T16:18:39.694118Z node 16 :TX_PROXY DEBUG: schemereq.cpp:1728: Actor# [16:7554062006904184420:3614] txid# 281474976710673 TEvNavigateKeySet requested from SchemeCache 2025-09-25T16:18:39.694208Z node 16 :TX_PROXY DEBUG: schemereq.cpp:1561: Actor# [16:7554062006904184420:3614] txid# 281474976710673 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2025-09-25T16:18:39.694233Z node 16 :TX_PROXY DEBUG: schemereq.cpp:1608: Actor# [16:7554062006904184420:3614] HANDLE EvNavigateKeySetResult, txid# 281474976710673 shardToRequest# 72057594046644480 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 1] DomainInfo.Params# Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 RedirectRequired# true 2025-09-25T16:18:39.694240Z node 16 :TX_PROXY DEBUG: schemereq.cpp:103: Actor# [16:7554062006904184420:3614] txid# 281474976710673 SEND to# 72057594046644480 shardToRequest {TEvModifySchemeTransaction txid# 281474976710673 TabletId# 72057594046644480} 2025-09-25T16:18:39.694272Z node 16 :TX_PROXY DEBUG: schemereq.cpp:1463: Actor# [16:7554062006904184420:3614] txid# 281474976710673 HANDLE EvClientConnected 2025-09-25T16:18:39.694319Z node 16 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_drop_table.cpp:493: TDropTable Propose, path: Root/Dir/Table-2, pathId: 0, opId: 281474976710673:0, at schemeshard: 72057594046644480 2025-09-25T16:18:39.694361Z node 16 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 281474976710673:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2025-09-25T16:18:39.694885Z node 16 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 281474976710673, database: /Root, subject: , status: StatusAccepted, operation: DROP TABLE, path: Root/Dir/Table-2 2025-09-25T16:18:39.694947Z node 16 :TX_PROXY DEBUG: schemereq.cpp:1485: Actor# [16:7554062006904184420:3614] txid# 281474976710673 Status StatusAccepted HANDLE {TEvModifySchemeTransactionResult Status# StatusAccepted txid# 281474976710673} 2025-09-25T16:18:39.694955Z node 16 :TX_PROXY DEBUG: schemereq.cpp:593: Actor# [16:7554062006904184420:3614] txid# 281474976710673 SEND to# [16:7554062006904184419:2379] Source {TEvProposeTransactionStatus txid# 281474976710673 Status# 53} 2025-09-25T16:18:39.695328Z node 16 :HIVE WARN: hive_impl.cpp:516: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 16, TabletId: 72075186224037890 not found 2025-09-25T16:18:39.696157Z node 16 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__clean_pathes.cpp:160: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046644480 2025-09-25T16:18:39.703077Z node 16 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:685: TTxOperationPlanStep Execute, stepId: 1758817119752, transactions count in step: 1, at schemeshard: 72057594046644480 2025-09-25T16:18:39.703713Z node 16 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:1123: All parts have reached barrier, tx: 281474976710673, done: 0, blocked: 1 2025-09-25T16:18:39.704522Z node 16 :GRPC_SERVER DEBUG: grpc_request_proxy.cpp:489: SchemeBoardUpdate /Root 2025-09-25T16:18:39.704539Z node 16 :GRPC_SERVER DEBUG: grpc_request_proxy.cpp:489: SchemeBoardUpdate /Root 2025-09-25T16:18:39.704571Z node 16 :GRPC_SERVER DEBUG: grpc_request_proxy.cpp:518: Can't update SecurityState for /Root - no PublicKeys 2025-09-25T16:18:39.704574Z node 16 :GRPC_SERVER DEBUG: grpc_request_proxy.cpp:518: Can't update SecurityState for /Root - no PublicKeys 2025-09-25T16:18:39.705377Z node 16 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:993: Operation and all the parts is done, operation id: 281474976710673:0 2025-09-25T16:18:39.706914Z node 16 :GRPC_SERVER DEBUG: grpc_request_proxy.cpp:595: Got grpc request# DeleteSessionRequest, traceId# 01k60tpvetcj86zgp2nkj2den0, sdkBuildInfo# ydb-cpp-sdk/dev, state# AS_NOT_PERFORMED, database# undef, peer# ipv4:127.0.0.1:48800, grpcInfo# grpc-c++/1.54.3 grpc-c/31.0.0 (linux; chttp2), timeout# 2.008917s 2025-09-25T16:18:39.709092Z node 16 :HIVE WARN: hive_impl.cpp:516: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 16, TabletId: 72075186224037889 not found 2025-09-25T16:18:39.709657Z node 16 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__clean_pathes.cpp:160: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046644480 |81.5%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/core/driver_lib/run/ut/unittest >> AutoConfig::GetASPoolsWith2CPUs [GOOD] >> TTabletPipeTest::TestClientDisconnectAfterPipeOpen [GOOD] >> THiveTest::TestBridgeDisconnectWithReboots [GOOD] >> THiveTest::TestBridgeDemotion ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tablet/ut/unittest >> TResourceBroker::TestUpdateCookie [GOOD] Test command err: 2025-09-25T16:18:40.380731Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:1115: TResourceBrokerActor bootstrap 2025-09-25T16:18:40.380850Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:393: Submitted new compaction0 task task-1 (1 by [1:104:2138]) priority=5 resources={200, 200} 2025-09-25T16:18:40.380860Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:679: Assigning waiting task task-1 (1 by [1:104:2138]) to queue queue_compaction0 2025-09-25T16:18:40.380867Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:637: Allocate resources {200, 200} for task task-1 (1 by [1:104:2138]) from queue queue_compaction0 2025-09-25T16:18:40.380873Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:679: Assigning in-fly task task-1 (1 by [1:104:2138]) to queue queue_compaction0 2025-09-25T16:18:40.380882Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:711: Updated planned resource usage for queue queue_compaction0 from 0.000000 to 400.000000 (insert task task-1 (1 by [1:104:2138])) 2025-09-25T16:18:40.380892Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:393: Submitted new compaction0 task task-2 (2 by [1:104:2138]) priority=5 resources={200, 200} 2025-09-25T16:18:40.380896Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:679: Assigning waiting task task-2 (2 by [1:104:2138]) to queue queue_compaction0 2025-09-25T16:18:40.380901Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:637: Allocate resources {200, 200} for task task-2 (2 by [1:104:2138]) from queue queue_compaction0 2025-09-25T16:18:40.380906Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:679: Assigning in-fly task task-2 (2 by [1:104:2138]) to queue queue_compaction0 2025-09-25T16:18:40.380911Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:711: Updated planned resource usage for queue queue_compaction0 from 400.000000 to 800.000000 (insert task task-2 (2 by [1:104:2138])) 2025-09-25T16:18:40.380918Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:393: Submitted new compaction0 task task-3 (3 by [1:104:2138]) priority=5 resources={200, 200} 2025-09-25T16:18:40.380922Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:679: Assigning waiting task task-3 (3 by [1:104:2138]) to queue queue_compaction0 2025-09-25T16:18:40.380927Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:619: Not enough resources to start task task-3 (3 by [1:104:2138]) 2025-09-25T16:18:40.380941Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:441: Update task task-2 (2 by [1:104:2138]) (priority=5 type=compaction0 resources={400, 400} resubmit=1) 2025-09-25T16:18:40.380946Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:679: Assigning waiting task task-2 (2 by [1:104:2138]) to queue queue_compaction0 2025-09-25T16:18:40.380950Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:619: Not enough resources to start task task-2 (2 by [1:104:2138]) 2025-09-25T16:18:40.380957Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:528: Finish task task-1 (1 by [1:104:2138]) (release resources {200, 200}) 2025-09-25T16:18:40.380964Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:577: Updated planned resource usage for queue queue_compaction0 from 400.000000 to 40.000000 (remove task task-1 (1 by [1:104:2138])) 2025-09-25T16:18:40.380969Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:582: Updated real resource usage for queue queue_compaction0 from 0.000000 to 40.000000 2025-09-25T16:18:40.380974Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:637: Allocate resources {400, 400} for task task-2 (2 by [1:104:2138]) from queue queue_compaction0 2025-09-25T16:18:40.380978Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:679: Assigning in-fly task task-2 (2 by [1:104:2138]) to queue queue_compaction0 2025-09-25T16:18:40.380983Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:711: Updated planned resource usage for queue queue_compaction0 from 40.000000 to 804.000000 (insert task task-2 (2 by [1:104:2138])) 2025-09-25T16:18:40.380988Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:619: Not enough resources to start task task-3 (3 by [1:104:2138]) 2025-09-25T16:18:40.380995Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:441: Update task task-2 (2 by [1:104:2138]) (priority=5 type=compaction0 resources={200, 200} resubmit=1) 2025-09-25T16:18:40.381001Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:679: Assigning waiting task task-2 (2 by [1:104:2138]) to queue queue_compaction0 2025-09-25T16:18:40.381006Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:637: Allocate resources {200, 200} for task task-2 (2 by [1:104:2138]) from queue queue_compaction0 2025-09-25T16:18:40.381010Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:679: Assigning in-fly task task-2 (2 by [1:104:2138]) to queue queue_compaction0 2025-09-25T16:18:40.381015Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:711: Updated planned resource usage for queue queue_compaction0 from 40.000000 to 422.000000 (insert task task-2 (2 by [1:104:2138])) 2025-09-25T16:18:40.381019Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:637: Allocate resources {200, 200} for task task-3 (3 by [1:104:2138]) from queue queue_compaction0 2025-09-25T16:18:40.381023Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:679: Assigning in-fly task task-3 (3 by [1:104:2138]) to queue queue_compaction0 2025-09-25T16:18:40.381028Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:711: Updated planned resource usage for queue queue_compaction0 from 422.000000 to 804.000000 (insert task task-3 (3 by [1:104:2138])) 2025-09-25T16:18:40.642443Z node 2 :RESOURCE_BROKER DEBUG: resource_broker.cpp:1115: TResourceBrokerActor bootstrap 2025-09-25T16:18:40.642549Z node 2 :RESOURCE_BROKER DEBUG: resource_broker.cpp:393: Submitted new compaction0 task task-1 (1 by [2:105:2138]) priority=5 resources={400, 400} 2025-09-25T16:18:40.642563Z node 2 :RESOURCE_BROKER DEBUG: resource_broker.cpp:679: Assigning waiting task task-1 (1 by [2:105:2138]) to queue queue_compaction0 2025-09-25T16:18:40.642577Z node 2 :RESOURCE_BROKER DEBUG: resource_broker.cpp:637: Allocate resources {400, 400} for task task-1 (1 by [2:105:2138]) from queue queue_compaction0 2025-09-25T16:18:40.642586Z node 2 :RESOURCE_BROKER DEBUG: resource_broker.cpp:679: Assigning in-fly task task-1 (1 by [2:105:2138]) to queue queue_compaction0 2025-09-25T16:18:40.642602Z node 2 :RESOURCE_BROKER DEBUG: resource_broker.cpp:711: Updated planned resource usage for queue queue_compaction0 from 0.000000 to 800.000000 (insert task task-1 (1 by [2:105:2138])) 2025-09-25T16:18:40.642622Z node 2 :RESOURCE_BROKER DEBUG: resource_broker.cpp:393: Submitted new compaction0 task task-2 (2 by [2:105:2138]) priority=5 resources={200, 200} 2025-09-25T16:18:40.642627Z node 2 :RESOURCE_BROKER DEBUG: resource_broker.cpp:679: Assigning waiting task task-2 (2 by [2:105:2138]) to queue queue_compaction0 2025-09-25T16:18:40.642633Z node 2 :RESOURCE_BROKER DEBUG: resource_broker.cpp:619: Not enough resources to start task task-2 (2 by [2:105:2138]) 2025-09-25T16:18:40.642641Z node 2 :RESOURCE_BROKER DEBUG: resource_broker.cpp:472: Update cookie for task task-2 (2 by [2:105:2138]) 2025-09-25T16:18:40.642649Z node 2 :RESOURCE_BROKER DEBUG: resource_broker.cpp:528: Finish task task-1 (1 by [2:105:2138]) (release resources {400, 400}) 2025-09-25T16:18:40.642656Z node 2 :RESOURCE_BROKER DEBUG: resource_broker.cpp:577: Updated planned resource usage for queue queue_compaction0 from 800.000000 to 0.000000 (remove task task-1 (1 by [2:105:2138])) 2025-09-25T16:18:40.642662Z node 2 :RESOURCE_BROKER DEBUG: resource_broker.cpp:637: Allocate resources {200, 200} for task task-2 (2 by [2:105:2138]) from queue queue_compaction0 2025-09-25T16:18:40.642667Z node 2 :RESOURCE_BROKER DEBUG: resource_broker.cpp:679: Assigning in-fly task task-2 (2 by [2:105:2138]) to queue queue_compaction0 2025-09-25T16:18:40.642672Z node 2 :RESOURCE_BROKER DEBUG: resource_broker.cpp:711: Updated planned resource usage for queue queue_compaction0 from 0.000000 to 380.000000 (insert task task-2 (2 by [2:105:2138])) 2025-09-25T16:18:40.642733Z node 2 :RESOURCE_BROKER DEBUG: resource_broker.cpp:393: Submitted new compaction0 task task-3 (3 by [2:105:2138]) priority=5 resources={200, 200} 2025-09-25T16:18:40.642741Z node 2 :RESOURCE_BROKER DEBUG: resource_broker.cpp:679: Assigning waiting task task-3 (3 by [2:105:2138]) to queue queue_compaction0 2025-09-25T16:18:40.642746Z node 2 :RESOURCE_BROKER DEBUG: resource_broker.cpp:637: Allocate resources {200, 200} for task task-3 (3 by [2:105:2138]) from queue queue_compaction0 2025-09-25T16:18:40.642751Z node 2 :RESOURCE_BROKER DEBUG: resource_broker.cpp:679: Assigning in-fly task task-3 (3 by [2:105:2138]) to queue queue_compaction0 2025-09-25T16:18:40.642757Z node 2 :RESOURCE_BROKER DEBUG: resource_broker.cpp:711: Updated planned resource usage for queue queue_compaction0 from 380.000000 to 760.000000 (insert task task-3 (3 by [2:105:2138])) 2025-09-25T16:18:40.642766Z node 2 :RESOURCE_BROKER DEBUG: resource_broker.cpp:472: Update cookie for task task-2 (2 by [2:105:2138]) 2025-09-25T16:18:40.642771Z node 2 :RESOURCE_BROKER DEBUG: resource_broker.cpp:528: Finish task task-3 (3 by [2:105:2138]) (release resources {200, 200}) 2025-09-25T16:18:40.642776Z node 2 :RESOURCE_BROKER DEBUG: resource_broker.cpp:577: Updated planned resource usage for queue queue_compaction0 from 760.000000 to 380.000000 (remove task task-3 (3 by [2:105:2138])) 2025-09-25T16:18:40.642786Z node 2 :RESOURCE_BROKER DEBUG: resource_broker.cpp:441: Update task task-2 (2 by [2:105:2138]) (priority=5 type=compaction0 resources={400, 400} resubmit=1) 2025-09-25T16:18:40.642793Z node 2 :RESOURCE_BROKER DEBUG: resource_broker.cpp:679: Assigning waiting task task-2 (2 by [2:105:2138]) to queue queue_compaction0 2025-09-25T16:18:40.642799Z node 2 :RESOURCE_BROKER DEBUG: resource_broker.cpp:637: Allocate resources {400, 400} for task task-2 (2 by [2:105:2138]) from queue queue_compaction0 2025-09-25T16:18:40.642803Z node 2 :RESOURCE_BROKER DEBUG: resource_broker.cpp:679: Assigning in-fly task task-2 (2 by [2:105:2138]) to queue queue_compaction0 2025-09-25T16:18:40.642809Z node 2 :RESOURCE_BROKER DEBUG: resource_broker.cpp:711: Updated planned resource usage for queue queue_compaction0 from 0.000000 to 720.000000 (insert task task-2 (2 by [2:105:2138])) |81.5%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tablet/ut/unittest >> TTabletCountersAggregator::IntegralPercentileAggregationRegular [GOOD] |81.5%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/tx/schemeshard/ut_replication/ydb-core-tx-schemeshard-ut_replication |81.5%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_replication/ydb-core-tx-schemeshard-ut_replication |81.5%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tablet/ut/unittest >> TabletState::ExplicitUnsubscribe [GOOD] >> TTabletPipeTest::TestRebootUsingTabletWithoutAcceptor ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/mind/ut/unittest >> TNodeBrokerTest::NodesMigration1000Nodes [GOOD] Test command err: 2025-09-25T16:18:37.018939Z node 8 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:636: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 8 Deadline: 18446744073709.551615s } 2025-09-25T16:18:37.022039Z node 5 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:636: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 5 Deadline: 18446744073709.551615s } 2025-09-25T16:18:37.022095Z node 6 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:636: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 6 Deadline: 18446744073709.551615s } 2025-09-25T16:18:37.022125Z node 7 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:636: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 7 Deadline: 18446744073709.551615s } 2025-09-25T16:18:37.022165Z node 8 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:607: Handle NActors::TEvInterconnect::TEvListNodes 2025-09-25T16:18:37.026004Z node 2 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:636: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 2 Deadline: 18446744073709.551615s } 2025-09-25T16:18:37.026053Z node 3 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:636: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 3 Deadline: 18446744073709.551615s } 2025-09-25T16:18:37.026109Z node 4 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:636: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 4 Deadline: 18446744073709.551615s } 2025-09-25T16:18:37.026158Z node 6 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:607: Handle NActors::TEvInterconnect::TEvListNodes 2025-09-25T16:18:37.026266Z node 7 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:607: Handle NActors::TEvInterconnect::TEvListNodes 2025-09-25T16:18:37.026328Z node 8 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:607: Handle NActors::TEvInterconnect::TEvListNodes 2025-09-25T16:18:37.026360Z node 1 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:636: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 1 Deadline: 18446744073709.551615s } 2025-09-25T16:18:37.031895Z node 2 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:607: Handle NActors::TEvInterconnect::TEvListNodes 2025-09-25T16:18:37.032005Z node 3 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:607: Handle NActors::TEvInterconnect::TEvListNodes 2025-09-25T16:18:37.032063Z node 4 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:607: Handle NActors::TEvInterconnect::TEvListNodes 2025-09-25T16:18:37.032117Z node 5 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:607: Handle NActors::TEvInterconnect::TEvListNodes 2025-09-25T16:18:37.032207Z node 3 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:607: Handle NActors::TEvInterconnect::TEvListNodes 2025-09-25T16:18:37.032232Z node 4 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:607: Handle NActors::TEvInterconnect::TEvListNodes 2025-09-25T16:18:37.032250Z node 5 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:607: Handle NActors::TEvInterconnect::TEvListNodes 2025-09-25T16:18:37.032269Z node 6 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:607: Handle NActors::TEvInterconnect::TEvListNodes 2025-09-25T16:18:37.032305Z node 7 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:607: Handle NActors::TEvInterconnect::TEvListNodes 2025-09-25T16:18:37.032387Z node 1 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:607: Handle NActors::TEvInterconnect::TEvListNodes 2025-09-25T16:18:37.032501Z node 2 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:607: Handle NActors::TEvInterconnect::TEvListNodes 2025-09-25T16:18:37.032637Z node 8 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:607: Handle NActors::TEvInterconnect::TEvListNodes 2025-09-25T16:18:37.032660Z node 1 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:607: Handle NActors::TEvInterconnect::TEvListNodes 2025-09-25T16:18:37.032913Z node 2 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:607: Handle NActors::TEvInterconnect::TEvListNodes 2025-09-25T16:18:37.032938Z node 3 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:607: Handle NActors::TEvInterconnect::TEvListNodes 2025-09-25T16:18:37.032958Z node 4 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:607: Handle NActors::TEvInterconnect::TEvListNodes 2025-09-25T16:18:37.032997Z node 5 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:607: Handle NActors::TEvInterconnect::TEvListNodes 2025-09-25T16:18:37.033022Z node 6 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:607: Handle NActors::TEvInterconnect::TEvListNodes 2025-09-25T16:18:37.033044Z node 7 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:607: Handle NActors::TEvInterconnect::TEvListNodes 2025-09-25T16:18:37.033070Z node 8 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:636: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 1 Deadline: 18446744073709.551615s } 2025-09-25T16:18:37.033250Z node 1 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:607: Handle NActors::TEvInterconnect::TEvListNodes 2025-09-25T16:18:37.033310Z node 2 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:636: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 1 Deadline: 18446744073709.551615s } 2025-09-25T16:18:37.033339Z node 3 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:636: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 1 Deadline: 18446744073709.551615s } 2025-09-25T16:18:37.033376Z node 4 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:636: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 1 Deadline: 18446744073709.551615s } 2025-09-25T16:18:37.033402Z node 5 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:636: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 1 Deadline: 18446744073709.551615s } 2025-09-25T16:18:37.033431Z node 6 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:636: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 1 Deadline: 18446744073709.551615s } 2025-09-25T16:18:37.033461Z node 7 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:636: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 1 Deadline: 18446744073709.551615s } 2025-09-25T16:18:37.033947Z node 1 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:636: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 3 Deadline: 18446744073709.551615s } 2025-09-25T16:18:37.034020Z node 8 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:636: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 2 Deadline: 18446744073709.551615s } 2025-09-25T16:18:37.036430Z node 3 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:636: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 2 Deadline: 18446744073709.551615s } 2025-09-25T16:18:37.036483Z node 5 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:636: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 4 Deadline: 18446744073709.551615s } 2025-09-25T16:18:37.036510Z node 6 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:636: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 4 Deadline: 18446744073709.551615s } 2025-09-25T16:18:37.043067Z node 2 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:636: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 8 Deadline: 18446744073709.551615s } 2025-09-25T16:18:37.043361Z node 1 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:636: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 8 Deadline: 18446744073709.551615s } 2025-09-25T16:18:37.043394Z node 2 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:636: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 3 Deadline: 18446744073709.551615s } 2025-09-25T16:18:37.043418Z node 4 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:636: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 5 Deadline: 18446744073709.551615s } 2025-09-25T16:18:37.043955Z node 4 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:636: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 6 Deadline: 18446744073709.551615s } 2025-09-25T16:18:37.044129Z node 1 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:636: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 5 Deadline: 18446744073709.551615s } 2025-09-25T16:18:37.044306Z node 1 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:636: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 7 Deadline: 18446744073709.551615s } 2025-09-25T16:18:37.044338Z node 1 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:636: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 6 Deadline: 18446744073709.551615s } 2025-09-25T16:18:37.044410Z node 1 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:636: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 4 Deadline: 18446744073709.551615s } 2025-09-25T16:18:37.044596Z node 1 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:636: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 2 Deadline: 18446744073709.551615s } 2025-09-25T16:18:37.045668Z node 2 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:636: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 6 Deadline: 18446744073709.551615s } 2025-09-25T16:18:37.045850Z node 6 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:636: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 2 Deadline: 18446744073709.551615s } 2025-09-25T16:18:37.067266Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7729: Cannot subscribe to console configs 2025-09-25T16:18:37.067290Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded ... waiting for nameservers are connected 2025-09-25T16:18:37.072585Z node 1 :NODE_BROKER DEBUG: node_broker_impl.h:239: StateInit event type: 10060000 event: NKikimr::TEvTablet::TEvBoot 2025-09-25T16:18:37.073105Z node 1 :NODE_BROKER DEBUG: node_broker_impl.h:239: StateInit event type: 10060001 event: NKikimr::TEvTablet::TEvRestored 2025-09-25T16:18:37.073182Z node 1 :NODE_BROKER DEBUG: node_broker__init_scheme.cpp:20: TTxInitScheme Execute 2025-09-25T16:18:37.073425Z node 1 :NODE_BROKER DEBUG: node_broker_impl.h:239: StateInit event type: 1006000c event: NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-09-25T16:18:37.074136Z node 1 :NODE_BROKER DEBUG: node_broker__init_scheme.cpp:29: TTxInitScheme Complete 2025-09-25T16:18:37.074276Z node 1 :NODE_BROKER DEBUG: node_broker__load_state.cpp:19: TTxLoadState Execute 2025-09-25T16:18:37.074350Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:968: [DB] Using default config. 2025-09-25T16:18:37.074367Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:1005: [DB] Starting the first epoch: #1.1 1970-01-01T00:00:00.026000Z - 1970-01-01T01:00:00.026000Z - 1970-01-01T02:00:00.026000Z 2025-09-25T16:18:37.074373Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:1031: [DB] Loaded the first approximate epoch start: #1.1 2025-09-25T16:18:37.074390Z node 1 :NODE_BROKER DEBUG: node_broker__load_state.cpp:27: TTxLoadState Complete 2025-09-25T16:18:37.074428Z node 1 :NODE_BROKER DEBUG: node_broker__migrate_state.cpp:84: TTxMigrateState Execute 2025-09-25T16:18:37.074433Z node 1 :NODE_BROKER DEBUG: node_broker__migrate_state.cpp:52: TTxMigrateState ProcessMigrationBatch UpdateNodes left 0, NewVersionUpdateNodes left 0 2025-09-25T16:18:37.074438Z node 1 :NODE_BROKER DEBUG: node_broker__migrate_state.cpp:21: TTxMigrateState FinalizeMigration 2025-09-25T16:18:37.074444Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:1318: [DB] Update epoch in database: #1.1 1970-01-01T00:00:00.026000Z - 1970-01-01T01:00:00.026000Z - 1970-01-01T02:00:00.026000Z 2025-09-25T16:18:37.074459Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:1337: [DB] Update approx epoch start in database: #1.1 2025-09-25T16:18:37.074466Z node 1 :NODE_BROKER NOTICE: node_broker.cpp:1350: [DB] Update main nodes table to: Nodes 2025-09-25T16:18:37.117346Z node 1 :NODE_BROKER DEBUG: node_broker__migrate_state.cpp:95: TTxMigrateState Complete 2025-09-25T16:18:37.117378Z node 1 :NODE_BROKER TRACE: node_broker.cpp:456: Scheduled epoch update at 1970-01-01T01:00:00.026000Z 2025-09-25T16:18:37.117387Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:562: Preparing nodes list cache for epoch #1.1 1970-01-01T00:00:00.026000Z - 1970-01-01T01:00:00.026000Z - 1970-01-01T02:00:00.026000Z, approximate epoch start #1.1 nodes=0 expired=0 2025-09-25T16:18:37.117396Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:603: Preparing update nodes log for epoch ##1.1 1970-01-01T00:00:00.026000Z - 1970-01-01T01:00:00.026000Z - 1970-01-01T02:00:00.026000Z nodes=0 expired=0 removed=0 2025-09-25T16:18:37.117595Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:245: StateWork, received event# 269877761, Sender [1:587:2206], Rec ... 25-09-25T16:18:39.345030Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:659: Add node #1548.v504 to update nodes log 2025-09-25T16:18:39.345034Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:659: Add node #1547.v504 to update nodes log 2025-09-25T16:18:39.345039Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:659: Add node #1546.v504 to update nodes log 2025-09-25T16:18:39.345044Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:659: Add node #1545.v504 to update nodes log 2025-09-25T16:18:39.345049Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:659: Add node #1544.v504 to update nodes log 2025-09-25T16:18:39.345053Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:659: Add node #1543.v504 to update nodes log 2025-09-25T16:18:39.345058Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:659: Add node #1542.v504 to update nodes log 2025-09-25T16:18:39.345062Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:659: Add node #1541.v504 to update nodes log 2025-09-25T16:18:39.345067Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:659: Add node #1540.v504 to update nodes log 2025-09-25T16:18:39.345072Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:659: Add node #1537.v504 to update nodes log 2025-09-25T16:18:39.345077Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:659: Add node #1787.v504 to update nodes log 2025-09-25T16:18:39.345338Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:245: StateWork, received event# 269877761, Sender [1:2833:3854], Recipient [1:2768:3803]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-09-25T16:18:39.345473Z node 1 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:717: Handle NKikimr::TEvTabletPipe::TEvClientConnected { TabletId: 72057594037936129 Status: OK ServerId: [1:2833:3854] Leader: 1 Dead: 0 Generation: 3 VersionInfo:  } 2025-09-25T16:18:39.345514Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:245: StateWork, received event# 269877761, Sender [1:2837:3858], Recipient [1:2768:3803]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-09-25T16:18:39.345587Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:245: StateWork, received event# 269877761, Sender [1:2838:3859], Recipient [1:2768:3803]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-09-25T16:18:39.345622Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:245: StateWork, received event# 269877761, Sender [1:2839:3860], Recipient [1:2768:3803]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-09-25T16:18:39.345657Z node 6 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:717: Handle NKikimr::TEvTabletPipe::TEvClientConnected { TabletId: 72057594037936129 Status: OK ServerId: [1:2834:3855] Leader: 1 Dead: 0 Generation: 3 VersionInfo:  } 2025-09-25T16:18:39.345668Z node 7 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:717: Handle NKikimr::TEvTabletPipe::TEvClientConnected { TabletId: 72057594037936129 Status: OK ServerId: [1:2835:3856] Leader: 1 Dead: 0 Generation: 3 VersionInfo:  } 2025-09-25T16:18:39.345677Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:245: StateWork, received event# 269877761, Sender [1:2834:3855], Recipient [1:2768:3803]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-09-25T16:18:39.345682Z node 8 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:717: Handle NKikimr::TEvTabletPipe::TEvClientConnected { TabletId: 72057594037936129 Status: OK ServerId: [1:2836:3857] Leader: 1 Dead: 0 Generation: 3 VersionInfo:  } 2025-09-25T16:18:39.345692Z node 2 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:717: Handle NKikimr::TEvTabletPipe::TEvClientConnected { TabletId: 72057594037936129 Status: OK ServerId: [1:2837:3858] Leader: 1 Dead: 0 Generation: 3 VersionInfo:  } 2025-09-25T16:18:39.345700Z node 3 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:717: Handle NKikimr::TEvTabletPipe::TEvClientConnected { TabletId: 72057594037936129 Status: OK ServerId: [1:2838:3859] Leader: 1 Dead: 0 Generation: 3 VersionInfo:  } 2025-09-25T16:18:39.345708Z node 4 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:717: Handle NKikimr::TEvTabletPipe::TEvClientConnected { TabletId: 72057594037936129 Status: OK ServerId: [1:2839:3860] Leader: 1 Dead: 0 Generation: 3 VersionInfo:  } 2025-09-25T16:18:39.345732Z node 5 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:717: Handle NKikimr::TEvTabletPipe::TEvClientConnected { TabletId: 72057594037936129 Status: OK ServerId: [1:2840:3861] Leader: 1 Dead: 0 Generation: 3 VersionInfo:  } 2025-09-25T16:18:39.345744Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:245: StateWork, received event# 269877761, Sender [1:2835:3856], Recipient [1:2768:3803]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-09-25T16:18:39.345766Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:245: StateWork, received event# 269877761, Sender [1:2836:3857], Recipient [1:2768:3803]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-09-25T16:18:39.345778Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:245: StateWork, received event# 269877761, Sender [1:2840:3861], Recipient [1:2768:3803]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-09-25T16:18:39.345847Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:245: StateWork, received event# 272039936, Sender [1:25:2072], Recipient [1:2768:3803]: NKikimr::NNodeBroker::TEvNodeBroker::TEvListNodes { MinEpoch: 2 } 2025-09-25T16:18:39.345853Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:249: StateWork, processing event TEvNodeBroker::TEvListNodes 2025-09-25T16:18:39.345865Z node 1 :NODE_BROKER TRACE: node_broker.cpp:423: Send TEvNodesInfo for epoch #3.504 1970-01-01T02:00:00.026000Z - 1970-01-01T03:00:00.026000Z - 1970-01-01T04:00:00.026000Z 2025-09-25T16:18:39.345875Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:245: StateWork, received event# 272039936, Sender [7:199:2072], Recipient [1:2835:3856] 2025-09-25T16:18:39.345878Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:249: StateWork, processing event TEvNodeBroker::TEvListNodes 2025-09-25T16:18:39.345883Z node 1 :NODE_BROKER TRACE: node_broker.cpp:423: Send TEvNodesInfo for epoch #3.504 1970-01-01T02:00:00.026000Z - 1970-01-01T03:00:00.026000Z - 1970-01-01T04:00:00.026000Z 2025-09-25T16:18:39.349353Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:245: StateWork, received event# 272039936, Sender [8:228:2072], Recipient [1:2836:3857] 2025-09-25T16:18:39.349376Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:249: StateWork, processing event TEvNodeBroker::TEvListNodes 2025-09-25T16:18:39.349402Z node 1 :NODE_BROKER TRACE: node_broker.cpp:423: Send TEvNodesInfo for epoch #3.504 1970-01-01T02:00:00.026000Z - 1970-01-01T03:00:00.026000Z - 1970-01-01T04:00:00.026000Z 2025-09-25T16:18:39.353314Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:245: StateWork, received event# 272039936, Sender [2:54:2072], Recipient [1:2837:3858] 2025-09-25T16:18:39.353336Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:249: StateWork, processing event TEvNodeBroker::TEvListNodes 2025-09-25T16:18:39.353351Z node 1 :NODE_BROKER TRACE: node_broker.cpp:423: Send TEvNodesInfo for epoch #3.504 1970-01-01T02:00:00.026000Z - 1970-01-01T03:00:00.026000Z - 1970-01-01T04:00:00.026000Z 2025-09-25T16:18:39.353416Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:245: StateWork, received event# 272039936, Sender [6:170:2072], Recipient [1:2834:3855] 2025-09-25T16:18:39.353420Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:249: StateWork, processing event TEvNodeBroker::TEvListNodes 2025-09-25T16:18:39.353426Z node 1 :NODE_BROKER TRACE: node_broker.cpp:423: Send TEvNodesInfo for epoch #3.504 1970-01-01T02:00:00.026000Z - 1970-01-01T03:00:00.026000Z - 1970-01-01T04:00:00.026000Z 2025-09-25T16:18:39.358802Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:245: StateWork, received event# 272039936, Sender [3:83:2072], Recipient [1:2838:3859] 2025-09-25T16:18:39.358829Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:249: StateWork, processing event TEvNodeBroker::TEvListNodes 2025-09-25T16:18:39.358860Z node 1 :NODE_BROKER TRACE: node_broker.cpp:423: Send TEvNodesInfo for epoch #3.504 1970-01-01T02:00:00.026000Z - 1970-01-01T03:00:00.026000Z - 1970-01-01T04:00:00.026000Z 2025-09-25T16:18:39.361390Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:245: StateWork, received event# 272039936, Sender [4:112:2072], Recipient [1:2839:3860] 2025-09-25T16:18:39.361411Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:249: StateWork, processing event TEvNodeBroker::TEvListNodes 2025-09-25T16:18:39.361425Z node 1 :NODE_BROKER TRACE: node_broker.cpp:423: Send TEvNodesInfo for epoch #3.504 1970-01-01T02:00:00.026000Z - 1970-01-01T03:00:00.026000Z - 1970-01-01T04:00:00.026000Z 2025-09-25T16:18:39.366878Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:245: StateWork, received event# 272039936, Sender [5:141:2072], Recipient [1:2840:3861] 2025-09-25T16:18:39.366904Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:249: StateWork, processing event TEvNodeBroker::TEvListNodes 2025-09-25T16:18:39.366920Z node 1 :NODE_BROKER TRACE: node_broker.cpp:423: Send TEvNodesInfo for epoch #3.504 1970-01-01T02:00:00.026000Z - 1970-01-01T03:00:00.026000Z - 1970-01-01T04:00:00.026000Z 2025-09-25T16:18:39.370371Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:245: StateWork, received event# 269877761, Sender [1:2846:3867], Recipient [1:2768:3803]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-09-25T16:18:39.370431Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:245: StateWork, received event# 272039936, Sender [1:625:2218], Recipient [1:2768:3803]: NKikimr::NNodeBroker::TEvNodeBroker::TEvListNodes { } 2025-09-25T16:18:39.370437Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:249: StateWork, processing event TEvNodeBroker::TEvListNodes 2025-09-25T16:18:39.370447Z node 1 :NODE_BROKER TRACE: node_broker.cpp:423: Send TEvNodesInfo for epoch #3.504 1970-01-01T02:00:00.026000Z - 1970-01-01T03:00:00.026000Z - 1970-01-01T04:00:00.026000Z 2025-09-25T16:18:39.375101Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:245: StateWork, received event# 269877761, Sender [1:2848:3869], Recipient [1:2768:3803]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-09-25T16:18:39.375276Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:245: StateWork, received event# 272039936, Sender [1:625:2218], Recipient [1:2768:3803]: NKikimr::NNodeBroker::TEvNodeBroker::TEvListNodes { } 2025-09-25T16:18:39.375282Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:249: StateWork, processing event TEvNodeBroker::TEvListNodes 2025-09-25T16:18:39.375294Z node 1 :NODE_BROKER TRACE: node_broker.cpp:423: Send TEvNodesInfo for epoch #3.504 1970-01-01T02:00:00.026000Z - 1970-01-01T03:00:00.026000Z - 1970-01-01T04:00:00.026000Z 2025-09-25T16:18:39.383151Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:245: StateWork, received event# 269877761, Sender [1:2850:3871], Recipient [1:2768:3803]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-09-25T16:18:39.383217Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:245: StateWork, received event# 272039936, Sender [1:625:2218], Recipient [1:2768:3803]: NKikimr::NNodeBroker::TEvNodeBroker::TEvListNodes { } 2025-09-25T16:18:39.383224Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:249: StateWork, processing event TEvNodeBroker::TEvListNodes 2025-09-25T16:18:39.383238Z node 1 :NODE_BROKER TRACE: node_broker.cpp:423: Send TEvNodesInfo for epoch #3.504 1970-01-01T02:00:00.026000Z - 1970-01-01T03:00:00.026000Z - 1970-01-01T04:00:00.026000Z 2025-09-25T16:18:39.385295Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:245: StateWork, received event# 269877761, Sender [1:2852:3873], Recipient [1:2768:3803]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-09-25T16:18:39.385348Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:245: StateWork, received event# 272039936, Sender [1:625:2218], Recipient [1:2768:3803]: NKikimr::NNodeBroker::TEvNodeBroker::TEvListNodes { CachedVersion: 503 } 2025-09-25T16:18:39.385354Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:249: StateWork, processing event TEvNodeBroker::TEvListNodes 2025-09-25T16:18:39.385364Z node 1 :NODE_BROKER TRACE: node_broker.cpp:423: Send TEvNodesInfo for epoch #3.504 1970-01-01T02:00:00.026000Z - 1970-01-01T03:00:00.026000Z - 1970-01-01T04:00:00.026000Z ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tablet/ut/unittest >> TTabletPipeTest::TestClientDisconnectAfterPipeOpen [GOOD] Test command err: LabeledCountersByGroup { Group: "cons/aaa|1|aba/caba/daba|man" LabeledCounter { Value: 13 AggregateFunc: EAF_SUM Type: CT_SIMPLE NameId: 0 } Delimiter: "|" } LabeledCountersByGroup { Group: "aba/caba/daba|man" LabeledCounter { Value: 13 AggregateFunc: EAF_SUM Type: CT_SIMPLE NameId: 0 } Delimiter: "|" } CounterNames: "value1" { LabeledCountersByGroup { Group: "cons/aaa|1|aba/caba/daba|man" LabeledCounter { Value: 13 AggregateFunc: EAF_SUM Type: CT_SIMPLE NameId: 0 } Delimiter: "|" } LabeledCountersByGroup { Group: "aba/caba/daba|man" LabeledCounter { Value: 13 AggregateFunc: EAF_SUM Type: CT_SIMPLE NameId: 0 } Delimiter: "|" } CounterNames: "value1" } 2025-09-25T16:18:40.890632Z node 3 :PIPE_SERVER ERROR: tablet_pipe_server.cpp:228: [9437185] NodeDisconnected NodeId# 2 >> THiveTest::TestHiveBalancerUselessNeighbourMoves [GOOD] >> THiveTest::TestHiveBalancerWithImmovableTablets >> BootstrapperTest::UnavailableStateStorage [GOOD] >> TPipeCacheTest::TestAutoConnect [GOOD] >> TTabletPipeTest::TestRebootUsingTabletWithoutAcceptor [GOOD] >> TTabletPipeTest::TestRewriteSameNode >> TTabletPipeTest::TestSendAfterOpen >> TPipeCacheTest::TestIdleRefresh |81.5%| [TA] {RESULT} $(B)/ydb/core/tx/scheme_board/ut_cache/test-results/unittest/{meta.json ... results_accumulator.log} |81.5%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_replication/ydb-core-tx-schemeshard-ut_replication >> BootstrapperTest::KeepExistingTablet [GOOD] >> BootstrapperTest::DuplicateNodes >> TFlatMetrics::MaximumValue1 [GOOD] >> TFlatMetrics::MaximumValue2 [GOOD] >> TTabletPipeTest::TestPipeWithVersionInfo >> TTabletPipeTest::TestPipeConnectToHint >> TTabletPipeTest::TestKillClientBeforServerIdKnown >> TFlatMetrics::TimeSeriesAvg4 [GOOD] >> TFlatMetrics::TimeSeriesKV [GOOD] >> TResourceBroker::TestOverusage >> TTabletPipeTest::TestTwoNodesAndRebootOfProducer >> THiveTest::TestCreateTabletAndReassignGroupsWithReboots [GOOD] >> THiveTest::TestDeleteTabletError >> YdbTableSplit::SplitByLoadWithReadsMultipleSplitsWithData [GOOD] >> TTabletPipeTest::TestSendAfterOpen [GOOD] >> TTabletPipeTest::TestSendAfterOpenUsingTabletWithoutAcceptor >> TTabletPipeTest::TestSendWithoutWaitOpen >> TPipeCacheTest::TestIdleRefresh [GOOD] >> TPipeCacheTest::TestTabletNode ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tablet/ut/unittest >> BootstrapperTest::UnavailableStateStorage [GOOD] Test command err: ... waiting for pipe to connect ... waiting for blocked connect attempt ... blocking NKikimr::TEvTabletPipe::TEvConnect from TABLET_PIPE_CLIENT to TABLET_ACTOR cookie 1 ... waiting for blocked connect attempt (done) ... disconnecting nodes 2 <-> 1 ... waiting for pipe to disconnect ... waiting for pipe to connect ... waiting for pipe to connect ... waiting for multiple state storage lookup attempts 2025-09-25T16:18:41.120268Z node 6 :BOOTSTRAPPER DEBUG: bootstrapper.cpp:194: tablet: 9437184, type: Dummy, begin new cycle (lookup in state storage) ... disconnecting nodes 2 <-> 0 ({EvReplicaLookup TabletID: 9437184 Cookie: 0} for [4:3:2050]) ... blocking NKikimr::TEvStateStorage::TEvReplicaLookup from SS_PROXY_REQUEST to SS_REPLICA cookie 0 ... disconnecting nodes 2 <-> 0 ({EvReplicaLookup TabletID: 9437184 Cookie: 1} for [4:6:2053]) ... blocking NKikimr::TEvStateStorage::TEvReplicaLookup from SS_PROXY_REQUEST to SS_REPLICA cookie 1 ... disconnecting nodes 2 <-> 0 ({EvReplicaLookup TabletID: 9437184 Cookie: 2} for [4:9:2056]) ... blocking NKikimr::TEvStateStorage::TEvReplicaLookup from SS_PROXY_REQUEST to SS_REPLICA cookie 2 2025-09-25T16:18:41.120488Z node 6 :BOOTSTRAPPER DEBUG: bootstrapper.cpp:233: tablet: 9437184, type: Dummy, lookup: ERROR, leader: [0:0:0] 2025-09-25T16:18:41.120497Z node 6 :BOOTSTRAPPER DEBUG: bootstrapper.cpp:260: tablet: 9437184, type: Dummy, state storage unavailable, sleeping for 0.101463s 2025-09-25T16:18:41.275407Z node 6 :BOOTSTRAPPER DEBUG: bootstrapper.cpp:194: tablet: 9437184, type: Dummy, begin new cycle (lookup in state storage) ... disconnecting nodes 2 <-> 0 ({EvReplicaLookup TabletID: 9437184 Cookie: 0} for [4:3:2050]) ... blocking NKikimr::TEvStateStorage::TEvReplicaLookup from SS_PROXY_REQUEST to SS_REPLICA cookie 0 ... disconnecting nodes 2 <-> 0 ({EvReplicaLookup TabletID: 9437184 Cookie: 1} for [4:6:2053]) ... blocking NKikimr::TEvStateStorage::TEvReplicaLookup from SS_PROXY_REQUEST to SS_REPLICA cookie 1 ... disconnecting nodes 2 <-> 0 ({EvReplicaLookup TabletID: 9437184 Cookie: 2} for [4:9:2056]) ... blocking NKikimr::TEvStateStorage::TEvReplicaLookup from SS_PROXY_REQUEST to SS_REPLICA cookie 2 ... waiting for multiple state storage lookup attempts (done) |81.5%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tablet/ut/unittest >> TPipeCacheTest::TestAutoConnect [GOOD] >> TTabletPipeTest::TestRewriteSameNode [GOOD] >> TTabletPipeTest::TestPipeConnectToHint [GOOD] >> TTabletPipeTest::TestPipeReconnectAfterKillWithoutRetries >> TResourceBrokerConfig::UpdateQueues [GOOD] >> TResourceBrokerConfig::DefaultConfig [GOOD] >> TTabletPipeTest::TestKillClientBeforServerIdKnown [GOOD] >> TTabletPipeTest::TestInterconnectSession >> TTabletPipeTest::TestPipeWithVersionInfo [GOOD] >> TTabletPipeTest::TestPipeReconnectAfterRestartWithoutRetries >> TTabletResolver::TabletResolvePriority [GOOD] >> TResourceBroker::TestQueueWithConfigure >> TBlockBlobStorageTest::DelayedErrorsNotIgnored >> THiveTest::TestFollowerCompatability3 [GOOD] >> TTabletPipeTest::TestSendWithoutWaitOpen [GOOD] >> THiveTest::TestGetStorageInfo >> TTabletPipeTest::TestSendWithoutWaitOpenToWrongTablet >> TResourceBroker::TestOverusage [GOOD] >> TPipeCacheTest::TestTabletNode [GOOD] >> TTabletResolver::NodeProblem >> TResourceBroker::TestNotifyActorDied >> TabletState::SeqNoSubscriptionReplace |81.5%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tablet/ut/unittest >> TFlatMetrics::MaximumValue2 [GOOD] >> TTabletCountersPercentile::WithoutZero [GOOD] >> TTabletLabeledCountersAggregator::DbAggregation >> TTabletPipeTest::TestSendAfterOpenUsingTabletWithoutAcceptor [GOOD] >> TResourceBroker::TestErrors |81.5%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tablet/ut/unittest >> TFlatMetrics::TimeSeriesKV [GOOD] |81.5%| [TA] $(B)/ydb/core/driver_lib/run/ut/test-results/unittest/{meta.json ... results_accumulator.log} >> TabletState::SeqNoSubscriptionReplace [GOOD] >> TResourceBroker::TestNotifyActorDied [GOOD] >> TTabletLabeledCountersAggregator::DbAggregation [GOOD] >> TResourceBroker::TestQueueWithConfigure [GOOD] >> TResourceBroker::TestOverusageDifferentResources >> THiveTest::TestGetStorageInfo [GOOD] >> THiveTest::TestGetStorageInfoDeleteTabletBeforeAssigned >> TTabletPipeTest::TestSendWithoutWaitOpenToWrongTablet [GOOD] >> TTabletPipeTest::TestTwoNodesAndRebootOfProducer [GOOD] >> TTabletPipeTest::TestTwoNodesAndRebootOfConsumer >> TabletState::SeqNoSubscribeOutOfOrder >> TResourceBroker::TestRealUsage ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tablet/ut/unittest >> TResourceBrokerConfig::DefaultConfig [GOOD] Test command err: Queues { Name: "queue_default" Weight: 30 Limit { Cpu: 2 } } Queues { Name: "queue_compaction_gen0" Weight: 100 Limit { Cpu: 10 } } Queues { Name: "queue_compaction_gen1" Weight: 100 Limit { Cpu: 6 } } Queues { Name: "queue_compaction_gen2" Weight: 100 Limit { Cpu: 3 } } Queues { Name: "queue_compaction_gen3" Weight: 100 Limit { Cpu: 3 } } Queues { Name: "queue_compaction_borrowed" Weight: 100 Limit { Cpu: 3 } } Queues { Name: "queue_cs_indexation" Weight: 100 Limit { Cpu: 3 Memory: 1073741824 } } Queues { Name: "queue_cs_ttl" Weight: 100 Limit { Cpu: 3 Memory: 1073741824 } } Queues { Name: "queue_cs_general" Weight: 100 Limit { Cpu: 3 Memory: 3221225472 } } Queues { Name: "queue_cs_scan_read" Weight: 100 Limit { Cpu: 3 Memory: 3221225472 } } Queues { Name: "queue_cs_normalizer" Weight: 100 Limit { Cpu: 3 Memory: 3221225472 } } Queues { Name: "queue_transaction" Weight: 100 Limit { Cpu: 4 } } Queues { Name: "queue_background_compaction" Weight: 10 Limit { Cpu: 1 } } Queues { Name: "queue_scan" Weight: 100 Limit { Cpu: 10 } } Queues { Name: "queue_backup" Weight: 100 Limit { Cpu: 2 } } Queues { Name: "queue_restore" Weight: 100 Limit { Cpu: 10 } } Queues { Name: "queue_kqp_resource_manager" Weight: 30 Limit { Cpu: 4 Memory: 10737418240 } } Queues { Name: "queue_build_index" Weight: 100 Limit { Cpu: 10 } } Queues { Name: "queue_ttl" Weight: 100 Limit { Cpu: 2 } } Queues { Name: "queue_datashard_build_stats" Weight: 100 Limit { Cpu: 1 } } Queues { Name: "queue_cdc_initial_scan" Weight: 100 Limit { Cpu: 2 } } Queues { Name: "queue_statistics_scan" Weight: 100 Limit { Cpu: 1 } } Queues { Name: "queue_system_tablet_backup" Weight: 100 Limit { Cpu: 1 } } Tasks { Name: "unknown" QueueName: "queue_default" DefaultDuration: 60000000 } Tasks { Name: "compaction_gen0" QueueName: "queue_compaction_gen0" DefaultDuration: 10000000 } Tasks { Name: "compaction_gen1" QueueName: "queue_compaction_gen1" DefaultDuration: 30000000 } Tasks { Name: "compaction_gen2" QueueName: "queue_compaction_gen2" DefaultDuration: 120000000 } Tasks { Name: "compaction_gen3" QueueName: "queue_compaction_gen3" DefaultDuration: 600000000 } Tasks { Name: "compaction_borrowed" QueueName: "queue_compaction_borrowed" DefaultDuration: 600000000 } Tasks { Name: "CS::TTL" QueueName: "queue_cs_ttl" DefaultDuration: 600000000 } Tasks { Name: "CS::INDEXATION" QueueName: "queue_cs_indexation" DefaultDuration: 600000000 } Tasks { Name: "CS::GENERAL" QueueName: "queue_cs_general" DefaultDuration: 600000000 } Tasks { Name: "CS::SCAN_READ" QueueName: "queue_cs_scan_read" DefaultDuration: 600000000 } Tasks { Name: "CS::NORMALIZER" QueueName: "queue_cs_normalizer" DefaultDuration: 600000000 } Tasks { Name: "transaction" QueueName: "queue_transaction" DefaultDuration: 600000000 } Tasks { Name: "background_compaction" QueueName: "queue_background_compaction" DefaultDuration: 60000000 } Tasks { Name: "background_compaction_gen0" QueueName: "queue_background_compaction" DefaultDuration: 10000000 } Tasks { Name: "background_compaction_gen1" QueueName: "queue_background_compaction" DefaultDuration: 20000000 } Tasks { Name: "background_compaction_gen2" QueueName: "queue_background_compaction" DefaultDuration: 60000000 } Tasks { Name: "background_compaction_gen3" QueueName: "queue_background_compaction" DefaultDuration: 300000000 } Tasks { Name: "scan" QueueName: "queue_scan" DefaultDuration: 300000000 } Tasks { Name: "backup" QueueName: "queue_backup" DefaultDuration: 300000000 } Tasks { Name: "restore" QueueName: "queue_restore" DefaultDuration: 300000000 } Tasks { Name: "kqp_query" QueueName: "queue_kqp_resource_manager" DefaultDuration: 600000000 } Tasks { Name: "build_index" QueueName: "queue_build_index" DefaultDuration: 600000000 } Tasks { Name: "ttl" QueueName: "queue_ttl" DefaultDuration: 300000000 } Tasks { Name: "datashard_build_stats" QueueName: "queue_datashard_build_stats" DefaultDuration: 5000000 } Tasks { Name: "cdc_initial_scan" QueueName: "queue_cdc_initial_scan" DefaultDuration: 600000000 } Tasks { Name: "statistics_scan" QueueName: "queue_statistics_scan" DefaultDuration: 600000000 } Tasks { Name: "system_tablet_backup" QueueName: "queue_system_tablet_backup" DefaultDuration: 60000000 } ResourceLimit { Cpu: 256 Memory: 17179869184 } Total queues cpu: 90 >> TTabletPipeTest::TestInterconnectSession [GOOD] >> TTabletPipeTest::TestPipeConnectLoopUnknownTabletWithoutRetries ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tablet/ut/unittest >> TTabletPipeTest::TestRewriteSameNode [GOOD] Test command err: Leader for TabletID 9437184 is [0:0:0] sender: [1:113:2057] recipient: [1:109:2141] IGNORE Leader for TabletID 9437184 is [0:0:0] sender: [1:113:2057] recipient: [1:109:2141] Leader for TabletID 9437185 is [0:0:0] sender: [1:114:2057] recipient: [1:110:2142] IGNORE Leader for TabletID 9437185 is [0:0:0] sender: [1:114:2057] recipient: [1:110:2142] Leader for TabletID 9437184 is [1:121:2149] sender: [1:122:2057] recipient: [1:109:2141] Leader for TabletID 9437185 is [1:124:2151] sender: [1:126:2057] recipient: [1:110:2142] Leader for TabletID 9437184 is [1:121:2149] sender: [1:161:2057] recipient: [1:14:2061] Leader for TabletID 9437185 is [1:124:2151] sender: [1:163:2057] recipient: [1:14:2061] Leader for TabletID 9437185 is [1:124:2151] sender: [1:166:2057] recipient: [1:106:2140] Leader for TabletID 9437185 is [1:124:2151] sender: [1:167:2057] recipient: [1:14:2061] Leader for TabletID 9437185 is [1:124:2151] sender: [1:170:2057] recipient: [1:169:2180] Leader for TabletID 9437185 is [1:171:2181] sender: [1:172:2057] recipient: [1:169:2180] Leader for TabletID 9437185 is [1:171:2181] sender: [1:201:2057] recipient: [1:14:2061] Leader for TabletID 9437184 is [1:121:2149] sender: [1:204:2057] recipient: [1:105:2139] Leader for TabletID 9437184 is [1:121:2149] sender: [1:207:2057] recipient: [1:206:2204] Leader for TabletID 9437184 is [1:208:2205] sender: [1:209:2057] recipient: [1:206:2204] Leader for TabletID 9437184 is [1:208:2205] sender: [1:237:2057] recipient: [1:14:2061] |81.5%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/load_test/ut_ycsb/ydb-core-load_test-ut_ycsb >> TResourceBroker::TestErrors [GOOD] >> TResourceBroker::TestExecutionStat >> TTabletPipeTest::TestPipeReconnectAfterKillWithoutRetries [GOOD] >> TResourceBroker::TestOverusageDifferentResources [GOOD] >> TTabletPipeTest::TestPipeReconnectAfterRestartWithoutRetries [GOOD] >> TabletState::SeqNoSubscribeOutOfOrder [GOOD] >> THiveTest::TestGetStorageInfoDeleteTabletBeforeAssigned [GOOD] |81.5%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/load_test/ut_ycsb/ydb-core-load_test-ut_ycsb |81.5%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tablet/ut/unittest >> TTabletResolver::TabletResolvePriority [GOOD] |81.5%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tablet/ut/unittest >> TPipeCacheTest::TestTabletNode [GOOD] >> THiveTest::TestExternalBootWhenLocked >> TResourceBrokerConfig::UpdateTasks [GOOD] >> YdbOlapStore::LogPagingAfter [GOOD] >> TResourceBroker::TestRealUsage [GOOD] >> TResourceBroker::TestRandomQueue >> TResourceBrokerConfig::UpdateResourceLimit [GOOD] >> THiveTest::TestBridgeDemotion [GOOD] >> THiveTest::TestBridgeBalance >> TTabletResolver::NodeProblem [GOOD] >> TResourceBroker::TestExecutionStat [GOOD] |81.5%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tablet/ut/unittest >> TTabletPipeTest::TestSendAfterOpenUsingTabletWithoutAcceptor [GOOD] |81.5%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tablet/ut/unittest >> TTabletLabeledCountersAggregator::DbAggregation [GOOD] |81.5%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tablet/ut/unittest >> TTabletPipeTest::TestSendWithoutWaitOpenToWrongTablet [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tablet/ut/unittest >> TResourceBroker::TestNotifyActorDied [GOOD] Test command err: 2025-09-25T16:18:42.451840Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:1115: TResourceBrokerActor bootstrap 2025-09-25T16:18:42.451986Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:393: Submitted new compaction0 task task-1 (1 by [1:104:2138]) priority=5 resources={50, 50} 2025-09-25T16:18:42.451997Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:679: Assigning waiting task task-1 (1 by [1:104:2138]) to queue queue_compaction0 2025-09-25T16:18:42.452006Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:637: Allocate resources {50, 50} for task task-1 (1 by [1:104:2138]) from queue queue_compaction0 2025-09-25T16:18:42.452012Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:679: Assigning in-fly task task-1 (1 by [1:104:2138]) to queue queue_compaction0 2025-09-25T16:18:42.452025Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:711: Updated planned resource usage for queue queue_compaction0 from 0.000000 to 100.000000 (insert task task-1 (1 by [1:104:2138])) 2025-09-25T16:18:42.452036Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:393: Submitted new compaction0 task task-2 (2 by [1:104:2138]) priority=5 resources={410, 410} 2025-09-25T16:18:42.452040Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:679: Assigning waiting task task-2 (2 by [1:104:2138]) to queue queue_compaction0 2025-09-25T16:18:42.452046Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:630: Skip queue queue_compaction0 due to exceeded limits 2025-09-25T16:18:42.452053Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:393: Submitted new compaction1 task task-3 (3 by [1:104:2138]) priority=5 resources={550, 550} 2025-09-25T16:18:42.452057Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:679: Assigning waiting task task-3 (3 by [1:104:2138]) to queue queue_compaction1 2025-09-25T16:18:42.452063Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:619: Not enough resources to start task task-3 (3 by [1:104:2138]) 2025-09-25T16:18:42.452067Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:608: Skip queue queue_compaction0 blocked by an earlier queue 2025-09-25T16:18:42.452083Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:528: Finish task task-1 (1 by [1:104:2138]) (release resources {50, 50}) 2025-09-25T16:18:42.452090Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:582: Updated real resource usage for queue queue_compaction0 from 0.000000 to 100.000000 2025-09-25T16:18:42.452096Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:637: Allocate resources {550, 550} for task task-3 (3 by [1:104:2138]) from queue queue_compaction1 2025-09-25T16:18:42.452100Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:679: Assigning in-fly task task-3 (3 by [1:104:2138]) to queue queue_compaction1 2025-09-25T16:18:42.452105Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:711: Updated planned resource usage for queue queue_compaction1 from 0.000000 to 1100.000000 (insert task task-3 (3 by [1:104:2138])) 2025-09-25T16:18:42.452110Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:619: Not enough resources to start task task-2 (2 by [1:104:2138]) 2025-09-25T16:18:42.452117Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:528: Finish task task-3 (3 by [1:104:2138]) (release resources {550, 550}) 2025-09-25T16:18:42.452124Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:577: Updated planned resource usage for queue queue_compaction1 from 1100.000000 to 550.000000 (remove task task-3 (3 by [1:104:2138])) 2025-09-25T16:18:42.452129Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:582: Updated real resource usage for queue queue_compaction1 from 0.000000 to 550.000000 2025-09-25T16:18:42.452134Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:637: Allocate resources {410, 410} for task task-2 (2 by [1:104:2138]) from queue queue_compaction0 2025-09-25T16:18:42.452138Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:679: Assigning in-fly task task-2 (2 by [1:104:2138]) to queue queue_compaction0 2025-09-25T16:18:42.452143Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:711: Updated planned resource usage for queue queue_compaction0 from 100.000000 to 920.000000 (insert task task-2 (2 by [1:104:2138])) 2025-09-25T16:18:42.716472Z node 2 :RESOURCE_BROKER DEBUG: resource_broker.cpp:1115: TResourceBrokerActor bootstrap 2025-09-25T16:18:42.716541Z node 2 :RESOURCE_BROKER DEBUG: resource_broker.cpp:393: Submitted new compaction0 task task-1 (1 by [2:105:2138]) priority=5 resources={500, 500} 2025-09-25T16:18:42.716548Z node 2 :RESOURCE_BROKER DEBUG: resource_broker.cpp:679: Assigning waiting task task-1 (1 by [2:105:2138]) to queue queue_compaction0 2025-09-25T16:18:42.716554Z node 2 :RESOURCE_BROKER DEBUG: resource_broker.cpp:637: Allocate resources {500, 500} for task task-1 (1 by [2:105:2138]) from queue queue_compaction0 2025-09-25T16:18:42.716558Z node 2 :RESOURCE_BROKER DEBUG: resource_broker.cpp:679: Assigning in-fly task task-1 (1 by [2:105:2138]) to queue queue_compaction0 2025-09-25T16:18:42.716566Z node 2 :RESOURCE_BROKER DEBUG: resource_broker.cpp:711: Updated planned resource usage for queue queue_compaction0 from 0.000000 to 1000.000000 (insert task task-1 (1 by [2:105:2138])) 2025-09-25T16:18:42.716573Z node 2 :RESOURCE_BROKER DEBUG: resource_broker.cpp:393: Submitted new compaction1 task task-2 (2 by [2:105:2138]) priority=5 resources={200, 200} 2025-09-25T16:18:42.716576Z node 2 :RESOURCE_BROKER DEBUG: resource_broker.cpp:679: Assigning waiting task task-2 (2 by [2:105:2138]) to queue queue_compaction1 2025-09-25T16:18:42.716579Z node 2 :RESOURCE_BROKER DEBUG: resource_broker.cpp:619: Not enough resources to start task task-2 (2 by [2:105:2138]) 2025-09-25T16:18:42.716584Z node 2 :RESOURCE_BROKER DEBUG: resource_broker.cpp:393: Submitted new compaction0 task task-3 (3 by [2:106:2139]) priority=5 resources={200, 200} 2025-09-25T16:18:42.716587Z node 2 :RESOURCE_BROKER DEBUG: resource_broker.cpp:679: Assigning waiting task task-3 (3 by [2:106:2139]) to queue queue_compaction0 2025-09-25T16:18:42.716589Z node 2 :RESOURCE_BROKER DEBUG: resource_broker.cpp:619: Not enough resources to start task task-2 (2 by [2:105:2138]) 2025-09-25T16:18:42.716592Z node 2 :RESOURCE_BROKER DEBUG: resource_broker.cpp:608: Skip queue queue_compaction0 blocked by an earlier queue 2025-09-25T16:18:42.716596Z node 2 :RESOURCE_BROKER DEBUG: resource_broker.cpp:393: Submitted new compaction1 task task-4 (4 by [2:106:2139]) priority=5 resources={200, 200} 2025-09-25T16:18:42.716598Z node 2 :RESOURCE_BROKER DEBUG: resource_broker.cpp:679: Assigning waiting task task-4 (4 by [2:106:2139]) to queue queue_compaction1 2025-09-25T16:18:42.716601Z node 2 :RESOURCE_BROKER DEBUG: resource_broker.cpp:619: Not enough resources to start task task-2 (2 by [2:105:2138]) 2025-09-25T16:18:42.716603Z node 2 :RESOURCE_BROKER DEBUG: resource_broker.cpp:608: Skip queue queue_compaction0 blocked by an earlier queue 2025-09-25T16:18:42.716613Z node 2 :RESOURCE_BROKER DEBUG: resource_broker.cpp:499: Removing task task-2 (2 by [2:105:2138]) 2025-09-25T16:18:42.716617Z node 2 :RESOURCE_BROKER DEBUG: resource_broker.cpp:528: Finish task task-1 (1 by [2:105:2138]) (release resources {500, 500}) 2025-09-25T16:18:42.716621Z node 2 :RESOURCE_BROKER DEBUG: resource_broker.cpp:577: Updated planned resource usage for queue queue_compaction0 from 1000.000000 to 100.000000 (remove task task-1 (1 by [2:105:2138])) 2025-09-25T16:18:42.716625Z node 2 :RESOURCE_BROKER DEBUG: resource_broker.cpp:582: Updated real resource usage for queue queue_compaction0 from 0.000000 to 100.000000 2025-09-25T16:18:42.716628Z node 2 :RESOURCE_BROKER DEBUG: resource_broker.cpp:637: Allocate resources {200, 200} for task task-4 (4 by [2:106:2139]) from queue queue_compaction1 2025-09-25T16:18:42.716631Z node 2 :RESOURCE_BROKER DEBUG: resource_broker.cpp:679: Assigning in-fly task task-4 (4 by [2:106:2139]) to queue queue_compaction1 2025-09-25T16:18:42.716634Z node 2 :RESOURCE_BROKER DEBUG: resource_broker.cpp:711: Updated planned resource usage for queue queue_compaction1 from 0.000000 to 400.000000 (insert task task-4 (4 by [2:106:2139])) 2025-09-25T16:18:42.716637Z node 2 :RESOURCE_BROKER DEBUG: resource_broker.cpp:637: Allocate resources {200, 200} for task task-3 (3 by [2:106:2139]) from queue queue_compaction0 2025-09-25T16:18:42.716643Z node 2 :RESOURCE_BROKER DEBUG: resource_broker.cpp:679: Assigning in-fly task task-3 (3 by [2:106:2139]) to queue queue_compaction0 2025-09-25T16:18:42.716646Z node 2 :RESOURCE_BROKER DEBUG: resource_broker.cpp:711: Updated planned resource usage for queue queue_compaction0 from 100.000000 to 500.000000 (insert task task-3 (3 by [2:106:2139])) ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tablet/ut/unittest >> TTabletPipeTest::TestInterconnectSession [GOOD] Test command err: 2025-09-25T16:18:42.378753Z node 1 :PIPE_SERVER DEBUG: tablet_pipe_server.cpp:315: [9437185] Detach 2025-09-25T16:18:42.383123Z node 1 :PIPE_SERVER DEBUG: tablet_pipe_server.cpp:338: [9437185] Activate 2025-09-25T16:18:42.385028Z node 1 :PIPE_SERVER DEBUG: tablet_pipe_server.cpp:338: [9437185] Activate 2025-09-25T16:18:42.385802Z node 1 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:49: TClient[9437185] ::Bootstrap [1:133:2158] 2025-09-25T16:18:42.385815Z node 1 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:542: TClient[9437185] lookup [1:133:2158] 2025-09-25T16:18:42.385878Z node 1 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:188: TClient[9437185] forward result local node, try to connect [1:133:2158] 2025-09-25T16:18:42.385887Z node 1 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:687: TClient[9437185]::SendEvent [1:133:2158] 2025-09-25T16:18:42.385901Z node 1 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:396: TClient[9437185] poison pill while connecting [1:133:2158] 2025-09-25T16:18:42.385907Z node 1 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:507: TClient[9437185] connect failed [1:133:2158] 2025-09-25T16:18:42.385920Z node 1 :PIPE_SERVER DEBUG: tablet_pipe_server.cpp:291: [9437185] Accept Connect Originator# [1:133:2158] 2025-09-25T16:18:42.385948Z node 1 :PIPE_SERVER INFO: tablet_pipe_server.cpp:236: [9437185] Undelivered Target# [1:133:2158] Type# 269877249 Reason# ActorUnknown 2025-09-25T16:18:42.385973Z node 1 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:49: TClient[9437185] ::Bootstrap [1:136:2160] 2025-09-25T16:18:42.385977Z node 1 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:542: TClient[9437185] lookup [1:136:2160] 2025-09-25T16:18:42.385986Z node 1 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:188: TClient[9437185] forward result local node, try to connect [1:136:2160] 2025-09-25T16:18:42.385991Z node 1 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:687: TClient[9437185]::SendEvent [1:136:2160] 2025-09-25T16:18:42.385999Z node 1 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:396: TClient[9437185] poison pill while connecting [1:136:2160] 2025-09-25T16:18:42.386003Z node 1 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:507: TClient[9437185] connect failed [1:136:2160] 2025-09-25T16:18:42.386011Z node 1 :PIPE_SERVER DEBUG: tablet_pipe_server.cpp:291: [9437185] Accept Connect Originator# [1:136:2160] 2025-09-25T16:18:42.386024Z node 1 :PIPE_SERVER INFO: tablet_pipe_server.cpp:236: [9437185] Undelivered Target# [1:136:2160] Type# 269877249 Reason# ActorUnknown 2025-09-25T16:18:42.386043Z node 1 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:49: TClient[9437185] ::Bootstrap [1:138:2162] 2025-09-25T16:18:42.386048Z node 1 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:542: TClient[9437185] lookup [1:138:2162] 2025-09-25T16:18:42.386056Z node 1 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:188: TClient[9437185] forward result local node, try to connect [1:138:2162] 2025-09-25T16:18:42.386061Z node 1 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:687: TClient[9437185]::SendEvent [1:138:2162] 2025-09-25T16:18:42.386068Z node 1 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:396: TClient[9437185] poison pill while connecting [1:138:2162] 2025-09-25T16:18:42.386072Z node 1 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:507: TClient[9437185] connect failed [1:138:2162] 2025-09-25T16:18:42.386079Z node 1 :PIPE_SERVER DEBUG: tablet_pipe_server.cpp:291: [9437185] Accept Connect Originator# [1:138:2162] 2025-09-25T16:18:42.386092Z node 1 :PIPE_SERVER INFO: tablet_pipe_server.cpp:236: [9437185] Undelivered Target# [1:138:2162] Type# 269877249 Reason# ActorUnknown >> BootstrapperTest::DuplicateNodes [GOOD] >> TTabletPipeTest::TestTwoNodesAndRebootOfConsumer [GOOD] >> test.py::test[aggregate-group_by_expr_dict--Results] [GOOD] >> test.py::test[aggregate-group_by_expr_lookup--Results] |81.5%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tablet/ut/unittest >> TabletState::SeqNoSubscriptionReplace [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tablet/ut/unittest >> TTabletPipeTest::TestPipeReconnectAfterKillWithoutRetries [GOOD] Test command err: ... waiting for boot1 ... waiting for connect1 ... waiting for boot2 ... waiting for client destroyed notification ... waiting for connect2 |81.6%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tablet/ut/unittest >> TResourceBrokerConfig::UpdateResourceLimit [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tablet/ut/unittest >> TTabletPipeTest::TestPipeReconnectAfterRestartWithoutRetries [GOOD] Test command err: ... waiting for boot1 ... waiting for connect1 ... waiting for boot2 ... received OnTabletStop ... received OnTabletStop ... received OnTabletStop ... waiting for client shutting down notification ... waiting for connect2 |81.6%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tablet/ut/unittest >> TabletState::SeqNoSubscribeOutOfOrder [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tablet/ut/unittest >> TResourceBroker::TestOverusageDifferentResources [GOOD] Test command err: 2025-09-25T16:18:42.774211Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:1115: TResourceBrokerActor bootstrap 2025-09-25T16:18:42.774344Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:393: Submitted new unknown task task-1 (1 by [1:104:2138]) priority=5 resources={500, 500} 2025-09-25T16:18:42.774354Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:679: Assigning waiting task task-1 (1 by [1:104:2138]) to queue queue_default 2025-09-25T16:18:42.774364Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:637: Allocate resources {500, 500} for task task-1 (1 by [1:104:2138]) from queue queue_default 2025-09-25T16:18:42.774370Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:679: Assigning in-fly task task-1 (1 by [1:104:2138]) to queue queue_default 2025-09-25T16:18:42.774380Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:711: Updated planned resource usage for queue queue_default from 0.000000 to 1000.000000 (insert task task-1 (1 by [1:104:2138])) 2025-09-25T16:18:42.774391Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:393: Submitted new compaction0 task task-2 (2 by [1:104:2138]) priority=5 resources={200, 200} 2025-09-25T16:18:42.774396Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:679: Assigning waiting task task-2 (2 by [1:104:2138]) to queue queue_compaction0 2025-09-25T16:18:42.774404Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:619: Not enough resources to start task task-2 (2 by [1:104:2138]) 2025-09-25T16:18:42.774412Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:393: Submitted new compaction1 task task-3 (3 by [1:104:2138]) priority=5 resources={200, 200} 2025-09-25T16:18:42.774416Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:679: Assigning waiting task task-3 (3 by [1:104:2138]) to queue queue_compaction1 2025-09-25T16:18:42.774421Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:619: Not enough resources to start task task-3 (3 by [1:104:2138]) 2025-09-25T16:18:42.774426Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:608: Skip queue queue_compaction0 blocked by an earlier queue 2025-09-25T16:18:42.774671Z node 1 :RESOURCE_BROKER INFO: resource_broker.cpp:1195: New config: Queues { Name: "queue_default" Weight: 5 Limit { Resource: 400 } } Queues { Name: "queue_compaction0" Weight: 50 Limit { Resource: 400 } } Queues { Name: "queue_compaction1" Weight: 20 Limit { Resource: 400 } } Queues { Name: "queue_scan" Weight: 20 Limit { Resource: 400 } } Tasks { Name: "unknown" QueueName: "queue_default" DefaultDuration: 5000000 } Tasks { Name: "compaction0" QueueName: "queue_compaction0" DefaultDuration: 10000000 } Tasks { Name: "compaction1" QueueName: "queue_compaction1" DefaultDuration: 20000000 } Tasks { Name: "scan" QueueName: "queue_scan" DefaultDuration: 20000000 } ResourceLimit { Resource: 1000 Resource: 1000 } 2025-09-25T16:18:42.774720Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:679: Assigning in-fly task task-1 (1 by [1:104:2138]) to queue queue_default 2025-09-25T16:18:42.774728Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:706: Updated real resource usage for queue queue_default from 0.000000 to 175881712277.399994 2025-09-25T16:18:42.774734Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:711: Updated planned resource usage for queue queue_default from 0.000000 to 500.000000 (insert task task-1 (1 by [1:104:2138])) 2025-09-25T16:18:42.774739Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:679: Assigning waiting task task-2 (2 by [1:104:2138]) to queue queue_compaction0 2025-09-25T16:18:42.774744Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:679: Assigning waiting task task-3 (3 by [1:104:2138]) to queue queue_compaction1 2025-09-25T16:18:42.774749Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:637: Allocate resources {200, 200} for task task-2 (2 by [1:104:2138]) from queue queue_compaction0 2025-09-25T16:18:42.774754Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:679: Assigning in-fly task task-2 (2 by [1:104:2138]) to queue queue_compaction0 2025-09-25T16:18:42.774760Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:711: Updated planned resource usage for queue queue_compaction0 from 0.000000 to 40.000000 (insert task task-2 (2 by [1:104:2138])) 2025-09-25T16:18:42.774769Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:637: Allocate resources {200, 200} for task task-3 (3 by [1:104:2138]) from queue queue_compaction1 2025-09-25T16:18:42.774773Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:679: Assigning in-fly task task-3 (3 by [1:104:2138]) to queue queue_compaction1 2025-09-25T16:18:42.774778Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:711: Updated planned resource usage for queue queue_compaction1 from 0.000000 to 200.000000 (insert task task-3 (3 by [1:104:2138])) 2025-09-25T16:18:42.774787Z node 1 :RESOURCE_BROKER INFO: resource_broker.cpp:1240: Configure result: Success: true 2025-09-25T16:18:42.774831Z node 1 :RESOURCE_BROKER INFO: resource_broker.cpp:1195: New config: Queues { Name: "queue_default" Weight: 5 Limit { Resource: 400 } } Queues { Name: "queue_compaction0" Weight: 50 Limit { Resource: 400 } } Queues { Name: "queue_compaction1" Weight: 20 Limit { Resource: 400 } } Queues { Name: "queue_scan" Weight: 20 Limit { Resource: 400 } } Tasks { Name: "unknown" QueueName: "queue_default" DefaultDuration: 5000000 } Tasks { Name: "compaction0" QueueName: "queue_compaction0" DefaultDuration: 10000000 } Tasks { Name: "compaction1" QueueName: "queue_default1" DefaultDuration: 20000000 } Tasks { Name: "scan" QueueName: "queue_scan" DefaultDuration: 20000000 } ResourceLimit { Resource: 1000 Resource: 1000 } 2025-09-25T16:18:42.774841Z node 1 :RESOURCE_BROKER ERROR: resource_broker.cpp:1240: Configure result: Success: false Message: "task \'compaction1\' uses unknown queue \'queue_default1\'" 2025-09-25T16:18:42.774892Z node 1 :RESOURCE_BROKER INFO: resource_broker.cpp:1195: New config: Queues { Name: "queue_default" Weight: 5 Limit { Resource: 400 } } Queues { Name: "queue_compaction0" Weight: 50 Limit { Resource: 400 } } Queues { Name: "queue_compaction1" Weight: 20 Limit { Resource: 400 } } Queues { Name: "queue_scan" Weight: 20 Limit { Resource: 400 } } Tasks { Name: "unknown1" QueueName: "queue_default" DefaultDuration: 5000000 } Tasks { Name: "compaction0" QueueName: "queue_compaction0" DefaultDuration: 10000000 } Tasks { Name: "compaction1" QueueName: "queue_default" DefaultDuration: 20000000 } Tasks { Name: "scan" QueueName: "queue_scan" DefaultDuration: 20000000 } ResourceLimit { Resource: 1000 Resource: 1000 } 2025-09-25T16:18:42.774902Z node 1 :RESOURCE_BROKER ERROR: resource_broker.cpp:1240: Configure result: Success: false Message: "task \'unknown\' is required" 2025-09-25T16:18:42.774938Z node 1 :RESOURCE_BROKER INFO: resource_broker.cpp:1195: New config: Queues { Name: "queue_default1" Weight: 5 Limit { Resource: 400 } } Queues { Name: "queue_compaction0" Weight: 50 Limit { Resource: 400 } } Queues { Name: "queue_compaction1" Weight: 20 Limit { Resource: 400 } } Queues { Name: "queue_scan" Weight: 20 Limit { Resource: 400 } } Tasks { Name: "unknown" QueueName: "queue_default" DefaultDuration: 5000000 } Tasks { Name: "compaction0" QueueName: "queue_compaction0" DefaultDuration: 10000000 } Tasks { Name: "compaction1" QueueName: "queue_default" DefaultDuration: 20000000 } Tasks { Name: "scan" QueueName: "queue_scan" DefaultDuration: 20000000 } ResourceLimit { Resource: 1000 Resource: 1000 } 2025-09-25T16:18:42.774945Z node 1 :RESOURCE_BROKER ERROR: resource_broker.cpp:1240: Configure result: Success: false Message: "task \'unknown\' uses unknown queue \'queue_default\'" 2025-09-25T16:18:42.774979Z node 1 :RESOURCE_BROKER INFO: resource_broker.cpp:1195: New config: Queues { Name: "queue_default" Weight: 5 Limit { Resource: 400 } } Queues { Name: "queue_compaction0" Weight: 50 Limit { Resource: 400 } } Queues { Name: "queue_compaction1" Weight: 20 Limit { Resource: 400 } } Queues { Name: "queue_scan" Weight: 20 Limit { Resource: 400 } } Tasks { Name: "unknown" QueueName: "queue_default" DefaultDuration: 5000000 } Tasks { Name: "compaction0" QueueName: "queue_compaction0" DefaultDuration: 10000000 } Tasks { Name: "compaction1" QueueName: "queue_default" DefaultDuration: 20000000 } Tasks { Name: "scan" QueueName: "queue_scan" DefaultDuration: 20000000 } ResourceLimit { Resource: 1000 Resource: 1000 } 2025-09-25T16:18:42.775017Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:679: Assigning in-fly task task-1 (1 by [1:104:2138]) to queue queue_default 2025-09-25T16:18:42.775024Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:706: Updated real resource usage for queue queue_default from 0.000000 to 175881712277.399994 2025-09-25T16:18:42.775030Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:711: Updated planned resource usage for queue queue_default from 0.000000 to 500.000000 (insert task task-1 (1 by [1:104:2138])) 2025-09-25T16:18:42.775034Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:679: Assigning in-fly task task-2 (2 by [1:104:2138]) to queue queue_compaction0 2025-09-25T16:18:42.775040Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:706: Updated real resource usage for queue queue_compaction0 from 0.000000 to 7035268491.096001 2025-09-25T16:18:42.775045Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:711: Updated planned resource usage for queue queue_compaction0 from 0.000000 to 40.000000 (insert task task-2 (2 by [1:104:2138])) 2025-09-25T16:18:42.775050Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:679: Assigning in-fly task task-3 (3 by [1:104:2138]) to queue queue_default 2025-09-25T16:18:42.775056Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:711: Updated planned resource usage for queue queue_default from 500.000000 to 175881713077.399994 (insert task task-3 (3 by [1:104:2138])) 2025-09-25T16:18:42.775061Z node 1 :RESOURCE_BROKER INFO: resource_broker.cpp:1240: Configure result: Success: true 2025-09-25T16:18:43.042051Z node 2 :RESOURCE_BROKER DEBUG: resource_broker.cpp:1115: TResourceBrokerActor bootstrap 2025-09-25T16:18:43.042151Z node 2 :RESOURCE_BROKER DEBUG: resource_broker.cpp:393: Submitted new compaction0 task task-1 (1 by [2:105:2138]) priority=5 resources={500, 0} 2025-09-25T16:18:43.042162Z node 2 :RESOURCE_BROKER DEBUG: resource_broker.cpp:679: Assigning waiting task task-1 (1 by [2:105:2138]) to queue queue_compaction0 2025-09-25T16:18:43.042170Z node 2 :RESOURCE_BROKER DEBUG: resource_broker.cpp:637: Allocate resources {500, 0} for task task-1 (1 by [2:105:2138]) from queue queue_compaction0 2025-09-25T16:18:43.042176Z node 2 :RESOURCE_BROKER DEBUG: resource_broker.cpp:679: Assigning in-fly task task-1 (1 by [2:105:2138]) to queue queue_compaction0 2025-09-25T16:18:43.042190Z node 2 :RESOURCE_BROKER DEBUG: resource_broker.cpp:711: Updated planned resource usage for queue queue_compaction0 from 0.000000 to 1000.000000 (insert task task-1 (1 by [2:105:2138])) 2025-09-25T16:18:43.042200Z node 2 :RESOURCE_BROKER DEBUG: resource_broker.cpp:393: Submitted new compaction1 task task-2 (2 by [2:105:2138]) priority=5 resources={500, 0} 2025-09-25T16:18:43.042204Z node 2 :RESOURCE_BROKER DEBUG: resource_broker.cpp:679: Assigning waiting task task-2 (2 by [2:105:2138]) to queue queue_compaction1 2025-09-25T16:18:43.042210Z node 2 :RESOURCE_BROKER DEBUG: resource_broker.cpp:619: Not enough resources to start task task-2 (2 by [2:105:2138]) 2025-09-25T16:18:43.042224Z node 2 :RESOURCE_BROKER DEBUG: resource_broker.cpp:528: Finish task task-1 (1 by [2:105:2138]) (release resources {500, 0}) 2025-09-25T16:18:43.042231Z node 2 :RESOURCE_BROKER DEBUG: resource_broker.cpp:582: Updated real resource usage for queue queue_compaction0 from 0.000000 to 1000.000000 2025-09-25T16:18:43.042236Z node 2 :RESOURCE_BROKER DEBUG: resource_broker.cpp:637: Allocate resources {500, 0} for task task-2 (2 by [2:105:2138]) from queue queue_compaction1 2025-09-25T16:18:43.042241Z node 2 :RESOURCE_BROKER DEBUG: resource_broker.cpp:679: Assigning in-fly task task-2 (2 by [2:105:2138]) to queue queue_compaction1 2025-09-25T16:18:43.042246Z node 2 :RESOURCE_BROKER DEBUG: resource_broker.cpp:711: Updated planned resource usage for queue queue_compaction1 from 0.000000 to 1000.000000 (insert task task-2 (2 by [2:105:2138])) 2025-09-25T16:18:43.042254Z node 2 :RESOURCE_BROKER DEBUG: resource_broker.cpp:528: Finish task task-2 (2 by [2:105:2138]) (release resources {500, 0}) 2025-09-25T16:18:43.042261Z node 2 :RESOURCE_BROKER DEBUG: resource_broker.cpp:577: Updated planned resource usage for queue queue_compaction1 from 1000.000000 to 500.000000 (remove task task-2 (2 by [2:105:2138])) 2025-09-25T16:18:43.042266Z node 2 :RESOURCE_BROKER DEBUG: resource_broker.cpp:582: Updated real resource usage for queue queue_compaction1 from 0.000000 to 500.000000 2025-09-25T16:18:43.042272Z node 2 :RESOURCE_BROKER DEBUG: resource_broker.cpp:393: Submitted new compaction1 task task-3 (3 by [2:105:2138]) priority=5 resources={250, 0} 2025-09-25T16:18:43.042276Z node 2 :RESOURCE_BROKER DEBUG: resource_broker.cpp:679: Assigning waiting task task-3 (3 by [2:105:2138]) to queue queue_compaction1 2025-09-25T16:18:43.042281Z node 2 :RESOURCE_BROKER DEBUG: resource_broker.cpp:637: Allocate resources {250, 0} for task task-3 (3 by [2:105:2138]) from queue queue_compaction1 2025-09-25T16:18:43.042285Z node 2 :RESOURCE_BROKER DEBUG: resource_broker.cpp:679: Assigning in-fly task task-3 (3 by [2:105:2138]) to queue queue_compaction1 2025-09-25T16:18:43.042291Z node 2 :RESOURCE_BROKER DEBUG: resource_broker.cpp:711: Updated planned resource usage for queue queue_compaction1 from 500.000000 to 987.500000 (insert task task-3 (3 by [2:105:2138])) 2025-09-25T16:18:43.042299Z node 2 :RESOURCE_BROKER DEBUG: resource_broker.cpp:393: Submitted new scan task task-4 (4 by [2:105:2138]) priority=5 resources={0, 800} 2025-09-25T16:18:43.042304Z node 2 :RESOURCE_BROKER DEBUG: resource_broker.cpp:679: Assigning waiting task task-4 (4 by [2:105:2138]) to queue queue_scan 2025-09-25T16:18:43.042313Z node 2 :RESOURCE_BROKER DEBUG: resource_broker.cpp:663: Updated real resource usage for queue queue_compaction1 from 500.000000 to 750.000000 (in-fly consumption {250, 0}) 2025-09-25T16:18:43.042317Z node 2 :RESOURCE_BROKER DEBUG: resource_broker.cpp:619: Not enough resources to start task task-4 (4 by [2:105:2138]) 2025-09-25T16:18:43.042323Z node 2 :RESOURCE_BROKER DEBUG: resource_broker.cpp:393: Submitted new compaction0 task task-5 (5 by [2:105:2138]) priority=5 resources={250, 0} 2025-09-25T16:18:43.042327Z node 2 :RESOURCE_BROKER DEBUG: resource_broker.cpp:679: Assigning waiting task task-5 (5 by [2:105:2138]) to queue queue_compaction0 2025-09-25T16:18:43.042333Z node 2 :RESOURCE_BROKER DEBUG: resource_broker.cpp:663: Updated real resource usage for queue queue_compaction1 from 750.000000 to 1000.000000 (in-fly consumption {250, 0}) 2025-09-25T16:18:43.042337Z node 2 :RESOURCE_BROKER DEBUG: resource_broker.cpp:619: Not enough resources to start task task-4 (4 by [2:105:2138]) 2025-09-25T16:18:43.042341Z node 2 :RESOURCE_BROKER DEBUG: resource_broker.cpp:637: Allocate resources {250, 0} for task task-5 (5 by [2:105:2138]) from queue queue_compaction0 2025-09-25T16:18:43.042345Z node 2 :RESOURCE_BROKER DEBUG: resource_broker.cpp:679: Assigning in-fly task task-5 (5 by [2:105:2138]) to queue queue_compaction0 2025-09-25T16:18:43.042350Z node 2 :RESOURCE_BROKER DEBUG: resource_broker.cpp:711: Updated planned resource usage for queue queue_compaction0 from 1000.000000 to 1500.000000 (insert task task-5 (5 by [2:105:2138])) >> TTabletCountersAggregator::IntegralPercentileAggregationHistNamed |81.6%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/tx/datashard/ut_object_storage_listing/ydb-core-tx-datashard-ut_object_storage_listing |81.6%| [LD] {RESULT} $(B)/ydb/core/load_test/ut_ycsb/ydb-core-load_test-ut_ycsb ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tablet/ut/unittest >> TTabletPipeTest::TestTwoNodesAndRebootOfConsumer [GOOD] Test command err: Leader for TabletID 9437184 is [0:0:0] sender: [3:168:2058] recipient: [3:166:2141] IGNORE Leader for TabletID 9437184 is [0:0:0] sender: [3:168:2058] recipient: [3:166:2141] Leader for TabletID 9437184 is [3:174:2145] sender: [3:175:2058] recipient: [3:166:2141] Leader for TabletID 9437185 is [0:0:0] sender: [4:178:2049] recipient: [4:169:2098] IGNORE Leader for TabletID 9437185 is [0:0:0] sender: [4:178:2049] recipient: [4:169:2098] Leader for TabletID 9437185 is [4:190:2101] sender: [4:191:2049] recipient: [4:169:2098] Leader for TabletID 9437184 is [3:174:2145] sender: [3:218:2058] recipient: [3:15:2062] Leader for TabletID 9437185 is [4:190:2101] sender: [3:220:2058] recipient: [3:15:2062] Leader for TabletID 9437185 is [4:190:2101] sender: [4:222:2049] recipient: [4:45:2053] Leader for TabletID 9437185 is [4:190:2101] sender: [3:225:2058] recipient: [3:15:2062] Leader for TabletID 9437185 is [4:190:2101] sender: [4:223:2049] recipient: [4:163:2097] Leader for TabletID 9437185 is [4:190:2101] sender: [4:228:2049] recipient: [4:227:2114] Leader for TabletID 9437185 is [4:229:2115] sender: [4:230:2049] recipient: [4:227:2114] Leader for TabletID 9437185 is [4:229:2115] sender: [3:260:2058] recipient: [3:15:2062] |81.6%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/datashard/ut_object_storage_listing/ydb-core-tx-datashard-ut_object_storage_listing |81.6%| [LD] {RESULT} $(B)/ydb/core/tx/datashard/ut_object_storage_listing/ydb-core-tx-datashard-ut_object_storage_listing |81.6%| [TA] {RESULT} $(B)/ydb/core/driver_lib/run/ut/test-results/unittest/{meta.json ... results_accumulator.log} >> THiveTest::TestExternalBootWhenLocked [GOOD] >> TTabletPipeTest::TestPipeConnectLoopUnknownTabletWithoutRetries [GOOD] >> TTabletPipeTest::TestPipeConnectLoopLeaderDownWithoutRetries ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tablet/ut/unittest >> TResourceBroker::TestExecutionStat [GOOD] Test command err: 2025-09-25T16:18:42.931773Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:1115: TResourceBrokerActor bootstrap 2025-09-25T16:18:42.931882Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:393: Submitted new unknown task task-1 (1 by [1:104:2138]) priority=5 resources={400, 400} 2025-09-25T16:18:42.931892Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:679: Assigning waiting task task-1 (1 by [1:104:2138]) to queue queue_default 2025-09-25T16:18:42.931901Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:637: Allocate resources {400, 400} for task task-1 (1 by [1:104:2138]) from queue queue_default 2025-09-25T16:18:42.931907Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:679: Assigning in-fly task task-1 (1 by [1:104:2138]) to queue queue_default 2025-09-25T16:18:42.931919Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:711: Updated planned resource usage for queue queue_default from 0.000000 to 800.000000 (insert task task-1 (1 by [1:104:2138])) 2025-09-25T16:18:42.931935Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:393: Submitted new compaction0 task task-2 (2 by [1:104:2138]) priority=5 resources={500, 500} 2025-09-25T16:18:42.931940Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:679: Assigning waiting task task-2 (2 by [1:104:2138]) to queue queue_compaction0 2025-09-25T16:18:42.931946Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:619: Not enough resources to start task task-2 (2 by [1:104:2138]) 2025-09-25T16:18:42.931953Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:393: Submitted new compaction0 task task-3 (3 by [1:104:2138]) priority=5 resources={500, 500} 2025-09-25T16:18:42.931957Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:679: Assigning waiting task task-3 (3 by [1:104:2138]) to queue queue_compaction0 2025-09-25T16:18:42.931962Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:619: Not enough resources to start task task-2 (2 by [1:104:2138]) 2025-09-25T16:18:42.931969Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:393: Submitted new compaction0 task task-4 (4 by [1:104:2138]) priority=5 resources={500, 500} 2025-09-25T16:18:42.931973Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:679: Assigning waiting task task-4 (4 by [1:104:2138]) to queue queue_compaction0 2025-09-25T16:18:42.931977Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:619: Not enough resources to start task task-2 (2 by [1:104:2138]) 2025-09-25T16:18:42.931983Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:393: Submitted new compaction0 task task-2 (2 by [1:104:2138]) priority=5 resources={500, 500} 2025-09-25T16:18:42.931988Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:408: SubmitTask failed for task 2 to [1:104:2138]: task with the same ID has been already submitted 2025-09-25T16:18:42.932002Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:494: RemoveQueuedTask failed for task 1 to [1:104:2138]: cannot remove in-fly task 2025-09-25T16:18:42.932010Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:486: RemoveQueuedTask failed for task 5 to [1:104:2138]: cannot remove unknown task 2025-09-25T16:18:42.932017Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:486: RemoveQueuedTask failed for task 2 to [1:105:2139]: cannot remove unknown task 2025-09-25T16:18:42.932024Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:499: Removing task task-2 (2 by [1:104:2138]) 2025-09-25T16:18:42.932030Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:619: Not enough resources to start task task-3 (3 by [1:104:2138]) 2025-09-25T16:18:42.932037Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:433: UpdateTask failed for task 2 to [1:104:2138]: cannot update unknown task 2025-09-25T16:18:42.932045Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:433: UpdateTask failed for task 4 to [1:105:2139]: cannot update unknown task 2025-09-25T16:18:42.932052Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:441: Update task task-4 (4 by [1:104:2138]) (priority=4 type=compaction0 resources={250, 250} resubmit=0) 2025-09-25T16:18:42.932058Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:679: Assigning waiting task task-4 (4 by [1:104:2138]) to queue queue_compaction0 2025-09-25T16:18:42.932064Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:619: Not enough resources to start task task-4 (4 by [1:104:2138]) 2025-09-25T16:18:42.932070Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:441: Update task task-3 (3 by [1:104:2138]) (priority=6 type=compaction0 resources={250, 250} resubmit=0) 2025-09-25T16:18:42.932074Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:679: Assigning waiting task task-3 (3 by [1:104:2138]) to queue queue_compaction0 2025-09-25T16:18:42.932079Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:619: Not enough resources to start task task-4 (4 by [1:104:2138]) 2025-09-25T16:18:42.932086Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:515: FinishTask failed for task 5 to [1:104:2138]: cannot finish unknown task 2025-09-25T16:18:42.932093Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:515: FinishTask failed for task 2 to [1:105:2139]: cannot finish unknown task 2025-09-25T16:18:42.932100Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:523: FinishTask failed for task 3 to [1:104:2138]: cannot finish queued task 2025-09-25T16:18:42.932108Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:528: Finish task task-1 (1 by [1:104:2138]) (release resources {400, 400}) 2025-09-25T16:18:42.932115Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:582: Updated real resource usage for queue queue_default from 0.000000 to 1600.000000 2025-09-25T16:18:42.932120Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:637: Allocate resources {250, 250} for task task-4 (4 by [1:104:2138]) from queue queue_compaction0 2025-09-25T16:18:42.932124Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:679: Assigning in-fly task task-4 (4 by [1:104:2138]) to queue queue_compaction0 2025-09-25T16:18:42.932129Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:711: Updated planned resource usage for queue queue_compaction0 from 0.000000 to 500.000000 (insert task task-4 (4 by [1:104:2138])) 2025-09-25T16:18:42.932135Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:630: Skip queue queue_compaction0 due to exceeded limits 2025-09-25T16:18:42.932142Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:528: Finish task task-4 (4 by [1:104:2138]) (release resources {250, 250}) 2025-09-25T16:18:42.932148Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:582: Updated real resource usage for queue queue_compaction0 from 0.000000 to 500.000000 2025-09-25T16:18:42.932152Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:637: Allocate resources {250, 250} for task task-3 (3 by [1:104:2138]) from queue queue_compaction0 2025-09-25T16:18:42.932157Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:679: Assigning in-fly task task-3 (3 by [1:104:2138]) to queue queue_compaction0 2025-09-25T16:18:42.932162Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:711: Updated planned resource usage for queue queue_compaction0 from 500.000000 to 1000.000000 (insert task task-3 (3 by [1:104:2138])) 2025-09-25T16:18:43.253607Z node 2 :RESOURCE_BROKER DEBUG: resource_broker.cpp:1115: TResourceBrokerActor bootstrap 2025-09-25T16:18:43.253700Z node 2 :RESOURCE_BROKER DEBUG: resource_broker.cpp:393: Submitted new unknown task task-1 (1 by [2:105:2138]) priority=5 resources={500, 500} 2025-09-25T16:18:43.253711Z node 2 :RESOURCE_BROKER DEBUG: resource_broker.cpp:679: Assigning waiting task task-1 (1 by [2:105:2138]) to queue queue_default 2025-09-25T16:18:43.253719Z node 2 :RESOURCE_BROKER DEBUG: resource_broker.cpp:637: Allocate resources {500, 500} for task task-1 (1 by [2:105:2138]) from queue queue_default 2025-09-25T16:18:43.253725Z node 2 :RESOURCE_BROKER DEBUG: resource_broker.cpp:679: Assigning in-fly task task-1 (1 by [2:105:2138]) to queue queue_default 2025-09-25T16:18:43.253738Z node 2 :RESOURCE_BROKER DEBUG: resource_broker.cpp:711: Updated planned resource usage for queue queue_default from 0.000000 to 1000.000000 (insert task task-1 (1 by [2:105:2138])) 2025-09-25T16:18:43.253748Z node 2 :RESOURCE_BROKER DEBUG: resource_broker.cpp:393: Submitted new compaction1 task task-2 (2 by [2:105:2138]) priority=5 resources={50, 50} 2025-09-25T16:18:43.253753Z node 2 :RESOURCE_BROKER DEBUG: resource_broker.cpp:679: Assigning waiting task task-2 (2 by [2:105:2138]) to queue queue_compaction1 2025-09-25T16:18:43.253758Z node 2 :RESOURCE_BROKER DEBUG: resource_broker.cpp:619: Not enough resources to start task task-2 (2 by [2:105:2138]) 2025-09-25T16:18:43.253765Z node 2 :RESOURCE_BROKER DEBUG: resource_broker.cpp:393: Submitted new compaction0 task task-3 (3 by [2:105:2138]) priority=5 resources={50, 50} 2025-09-25T16:18:43.253769Z node 2 :RESOURCE_BROKER DEBUG: resource_broker.cpp:679: Assigning waiting task task-3 (3 by [2:105:2138]) to queue queue_compaction0 2025-09-25T16:18:43.253774Z node 2 :RESOURCE_BROKER DEBUG: resource_broker.cpp:619: Not enough resources to start task task-2 (2 by [2:105:2138]) 2025-09-25T16:18:43.253778Z node 2 :RESOURCE_BROKER DEBUG: resource_broker.cpp:608: Skip queue queue_compaction0 blocked by an earlier queue 2025-09-25T16:18:43.253784Z node 2 :RESOURCE_BROKER DEBUG: resource_broker.cpp:393: Submitted new compaction1 task task-4 (4 by [2:105:2138]) priority=5 resources={50, 50} 2025-09-25T16:18:43.253788Z node 2 :RESOURCE_BROKER DEBUG: resource_broker.cpp:679: Assigning waiting task task-4 (4 by [2:105:2138]) to queue queue_compaction1 2025-09-25T16:18:43.253793Z node 2 :RESOURCE_BROKER DEBUG: resource_broker.cpp:619: Not enough resources to start task task-2 (2 by [2:105:2138]) 2025-09-25T16:18:43.253796Z node 2 :RESOURCE_BROKER DEBUG: resource_broker.cpp:608: Skip queue queue_compaction0 blocked by an earlier queue 2025-09-25T16:18:43.253803Z node 2 :RESOURCE_BROKER DEBUG: resource_broker.cpp:393: Submitted new compaction0 task task-5 (5 by [2:105:2138]) priority=5 resources={50, 50} 2025-09-25T16:18:43.253807Z node 2 :RESOURCE_BROKER DEBUG: resource_broker.cpp:679: Assigning waiting task task-5 (5 by [2:105:2138]) to queue queue_compaction0 2025-09-25T16:18:43.253811Z node 2 :RESOURCE_BROKER DEBUG: resource_broker.cpp:619: Not enough resources to start task task-2 (2 by [2:105:2138]) 2025-09-25T16:18:43.253815Z node 2 :RESOURCE_BROKER DEBUG: resource_broker.cpp:608: Skip queue queue_compaction0 blocked by an earlier queue 2025-09-25T16:18:43.253821Z node 2 :RESOURCE_BROKER DEBUG: resource_broker.cpp:393: Submitted new compaction1 task task-6 (6 by [2:105:2138]) priority=5 resources={50, 50} 2025-09-25T16:18:43.253825Z node 2 :RESOURCE_BROKER DEBUG: resource_broker.cpp:679: Assigning waiting task task-6 (6 by [2:105:2138]) to queue queue_compaction1 2025-09-25T16:18:43.253830Z node 2 :RESOURCE_BROKER DEBUG: resource_broker.cpp:619: Not enough resources to start task task-2 (2 by [2:105:2138]) 2025-09-25T16:18:43.253834Z node 2 :RESOURCE_BROKER DEBUG: resource_broker.cpp:608: Skip queue queue_compaction0 blocked by an earlier queue 2025-09-25T16:18:43.253840Z node 2 :RESOURCE_BROKER DEBUG: resource_broker.cpp:393: Submitted new compaction0 task task-7 (7 by [2:105:2138]) priority=5 resources={50, 50} 2025-09-25T16:18:43.253847Z node 2 :RESOURCE_BROKER DEBUG: resource_broker.cpp:679: Assigning waiting task task-7 (7 by [2:105:2138]) to queue queue_compaction0 2025-09-25T16:18:43.253851Z node 2 :RESOURCE_BROKER DEBUG: resource_broker.cpp:619: Not enough resources to start task task-2 (2 by [2:105:2138]) 2025-09-25T16:18:43.253855Z node 2 :RESOURCE_BROKER DEBUG: resource_broker.cpp:608: Skip queue queue_compaction0 blocked by an earlier queue 2025-09-25T16:18:43.253868Z node 2 :RESOURCE_BROKER DEBUG: resource_broker.cpp:528: Finish task task-1 (1 by [2:105:2138]) (release resources {500, 500}) 2025-09-25T16:18:43.253875Z node 2 :RESOURCE_BROKER DEBUG: resource_broker.cpp:577: Updated plann ... urce_broker.cpp:679: Assigning in-fly task task-1 (1 by [2:105:2138]) to queue queue_compaction1 2025-09-25T16:18:43.254705Z node 2 :RESOURCE_BROKER DEBUG: resource_broker.cpp:711: Updated planned resource usage for queue queue_compaction1 from 1150.000000 to 1207.500000 (insert task task-1 (1 by [2:105:2138])) 2025-09-25T16:18:43.254711Z node 2 :RESOURCE_BROKER DEBUG: resource_broker.cpp:528: Finish task task-1 (1 by [2:105:2138]) (release resources {50, 50}) 2025-09-25T16:18:43.254716Z node 2 :RESOURCE_BROKER DEBUG: resource_broker.cpp:577: Updated planned resource usage for queue queue_compaction1 from 1207.500000 to 1200.000000 (remove task task-1 (1 by [2:105:2138])) 2025-09-25T16:18:43.254721Z node 2 :RESOURCE_BROKER DEBUG: resource_broker.cpp:582: Updated real resource usage for queue queue_compaction1 from 1150.000000 to 1200.000000 2025-09-25T16:18:43.254727Z node 2 :RESOURCE_BROKER DEBUG: resource_broker.cpp:393: Submitted new compaction1 task task-1 (1 by [2:105:2138]) priority=5 resources={50, 50} 2025-09-25T16:18:43.254731Z node 2 :RESOURCE_BROKER DEBUG: resource_broker.cpp:679: Assigning waiting task task-1 (1 by [2:105:2138]) to queue queue_compaction1 2025-09-25T16:18:43.254735Z node 2 :RESOURCE_BROKER DEBUG: resource_broker.cpp:637: Allocate resources {50, 50} for task task-1 (1 by [2:105:2138]) from queue queue_compaction1 2025-09-25T16:18:43.254739Z node 2 :RESOURCE_BROKER DEBUG: resource_broker.cpp:679: Assigning in-fly task task-1 (1 by [2:105:2138]) to queue queue_compaction1 2025-09-25T16:18:43.254744Z node 2 :RESOURCE_BROKER DEBUG: resource_broker.cpp:711: Updated planned resource usage for queue queue_compaction1 from 1200.000000 to 1255.000000 (insert task task-1 (1 by [2:105:2138])) 2025-09-25T16:18:43.254763Z node 2 :RESOURCE_BROKER DEBUG: resource_broker.cpp:528: Finish task task-1 (1 by [2:105:2138]) (release resources {50, 50}) 2025-09-25T16:18:43.254769Z node 2 :RESOURCE_BROKER DEBUG: resource_broker.cpp:577: Updated planned resource usage for queue queue_compaction1 from 1255.000000 to 1250.000000 (remove task task-1 (1 by [2:105:2138])) 2025-09-25T16:18:43.254773Z node 2 :RESOURCE_BROKER DEBUG: resource_broker.cpp:582: Updated real resource usage for queue queue_compaction1 from 1200.000000 to 1250.000000 2025-09-25T16:18:43.254780Z node 2 :RESOURCE_BROKER DEBUG: resource_broker.cpp:393: Submitted new compaction1 task task-1 (1 by [2:105:2138]) priority=5 resources={50, 50} 2025-09-25T16:18:43.254784Z node 2 :RESOURCE_BROKER DEBUG: resource_broker.cpp:679: Assigning waiting task task-1 (1 by [2:105:2138]) to queue queue_compaction1 2025-09-25T16:18:43.254788Z node 2 :RESOURCE_BROKER DEBUG: resource_broker.cpp:637: Allocate resources {50, 50} for task task-1 (1 by [2:105:2138]) from queue queue_compaction1 2025-09-25T16:18:43.254792Z node 2 :RESOURCE_BROKER DEBUG: resource_broker.cpp:679: Assigning in-fly task task-1 (1 by [2:105:2138]) to queue queue_compaction1 2025-09-25T16:18:43.254796Z node 2 :RESOURCE_BROKER DEBUG: resource_broker.cpp:711: Updated planned resource usage for queue queue_compaction1 from 1250.000000 to 1302.500000 (insert task task-1 (1 by [2:105:2138])) 2025-09-25T16:18:43.254802Z node 2 :RESOURCE_BROKER DEBUG: resource_broker.cpp:528: Finish task task-1 (1 by [2:105:2138]) (release resources {50, 50}) 2025-09-25T16:18:43.254807Z node 2 :RESOURCE_BROKER DEBUG: resource_broker.cpp:577: Updated planned resource usage for queue queue_compaction1 from 1302.500000 to 1300.000000 (remove task task-1 (1 by [2:105:2138])) 2025-09-25T16:18:43.254812Z node 2 :RESOURCE_BROKER DEBUG: resource_broker.cpp:582: Updated real resource usage for queue queue_compaction1 from 1250.000000 to 1300.000000 2025-09-25T16:18:43.254818Z node 2 :RESOURCE_BROKER DEBUG: resource_broker.cpp:393: Submitted new unknown task task-1 (1 by [2:105:2138]) priority=5 resources={500, 500} 2025-09-25T16:18:43.254822Z node 2 :RESOURCE_BROKER DEBUG: resource_broker.cpp:679: Assigning waiting task task-1 (1 by [2:105:2138]) to queue queue_default 2025-09-25T16:18:43.254827Z node 2 :RESOURCE_BROKER DEBUG: resource_broker.cpp:637: Allocate resources {500, 500} for task task-1 (1 by [2:105:2138]) from queue queue_default 2025-09-25T16:18:43.254831Z node 2 :RESOURCE_BROKER DEBUG: resource_broker.cpp:679: Assigning in-fly task task-1 (1 by [2:105:2138]) to queue queue_default 2025-09-25T16:18:43.254836Z node 2 :RESOURCE_BROKER DEBUG: resource_broker.cpp:711: Updated planned resource usage for queue queue_default from 0.000000 to 950.000000 (insert task task-1 (1 by [2:105:2138])) 2025-09-25T16:18:43.254842Z node 2 :RESOURCE_BROKER DEBUG: resource_broker.cpp:393: Submitted new compaction1 task task-2 (2 by [2:105:2138]) priority=5 resources={50, 50} 2025-09-25T16:18:43.254846Z node 2 :RESOURCE_BROKER DEBUG: resource_broker.cpp:679: Assigning waiting task task-2 (2 by [2:105:2138]) to queue queue_compaction1 2025-09-25T16:18:43.254850Z node 2 :RESOURCE_BROKER DEBUG: resource_broker.cpp:619: Not enough resources to start task task-2 (2 by [2:105:2138]) 2025-09-25T16:18:43.254856Z node 2 :RESOURCE_BROKER DEBUG: resource_broker.cpp:393: Submitted new compaction0 task task-3 (3 by [2:105:2138]) priority=5 resources={50, 50} 2025-09-25T16:18:43.254860Z node 2 :RESOURCE_BROKER DEBUG: resource_broker.cpp:679: Assigning waiting task task-3 (3 by [2:105:2138]) to queue queue_compaction0 2025-09-25T16:18:43.254880Z node 2 :RESOURCE_BROKER DEBUG: resource_broker.cpp:706: Updated real resource usage for queue queue_compaction0 from 300.000000 to 1300.000000 2025-09-25T16:18:43.254885Z node 2 :RESOURCE_BROKER DEBUG: resource_broker.cpp:619: Not enough resources to start task task-2 (2 by [2:105:2138]) 2025-09-25T16:18:43.254890Z node 2 :RESOURCE_BROKER DEBUG: resource_broker.cpp:608: Skip queue queue_compaction0 blocked by an earlier queue 2025-09-25T16:18:43.254897Z node 2 :RESOURCE_BROKER DEBUG: resource_broker.cpp:393: Submitted new compaction1 task task-4 (4 by [2:105:2138]) priority=5 resources={50, 50} 2025-09-25T16:18:43.254901Z node 2 :RESOURCE_BROKER DEBUG: resource_broker.cpp:679: Assigning waiting task task-4 (4 by [2:105:2138]) to queue queue_compaction1 2025-09-25T16:18:43.254905Z node 2 :RESOURCE_BROKER DEBUG: resource_broker.cpp:619: Not enough resources to start task task-2 (2 by [2:105:2138]) 2025-09-25T16:18:43.254908Z node 2 :RESOURCE_BROKER DEBUG: resource_broker.cpp:608: Skip queue queue_compaction0 blocked by an earlier queue 2025-09-25T16:18:43.254914Z node 2 :RESOURCE_BROKER DEBUG: resource_broker.cpp:393: Submitted new compaction0 task task-5 (5 by [2:105:2138]) priority=5 resources={50, 50} 2025-09-25T16:18:43.254918Z node 2 :RESOURCE_BROKER DEBUG: resource_broker.cpp:679: Assigning waiting task task-5 (5 by [2:105:2138]) to queue queue_compaction0 2025-09-25T16:18:43.254922Z node 2 :RESOURCE_BROKER DEBUG: resource_broker.cpp:619: Not enough resources to start task task-2 (2 by [2:105:2138]) 2025-09-25T16:18:43.254926Z node 2 :RESOURCE_BROKER DEBUG: resource_broker.cpp:608: Skip queue queue_compaction0 blocked by an earlier queue 2025-09-25T16:18:43.254932Z node 2 :RESOURCE_BROKER DEBUG: resource_broker.cpp:393: Submitted new compaction1 task task-6 (6 by [2:105:2138]) priority=5 resources={50, 50} 2025-09-25T16:18:43.254936Z node 2 :RESOURCE_BROKER DEBUG: resource_broker.cpp:679: Assigning waiting task task-6 (6 by [2:105:2138]) to queue queue_compaction1 2025-09-25T16:18:43.254940Z node 2 :RESOURCE_BROKER DEBUG: resource_broker.cpp:619: Not enough resources to start task task-2 (2 by [2:105:2138]) 2025-09-25T16:18:43.254943Z node 2 :RESOURCE_BROKER DEBUG: resource_broker.cpp:608: Skip queue queue_compaction0 blocked by an earlier queue 2025-09-25T16:18:43.254949Z node 2 :RESOURCE_BROKER DEBUG: resource_broker.cpp:393: Submitted new compaction0 task task-7 (7 by [2:105:2138]) priority=5 resources={50, 50} 2025-09-25T16:18:43.254953Z node 2 :RESOURCE_BROKER DEBUG: resource_broker.cpp:679: Assigning waiting task task-7 (7 by [2:105:2138]) to queue queue_compaction0 2025-09-25T16:18:43.254957Z node 2 :RESOURCE_BROKER DEBUG: resource_broker.cpp:619: Not enough resources to start task task-2 (2 by [2:105:2138]) 2025-09-25T16:18:43.254961Z node 2 :RESOURCE_BROKER DEBUG: resource_broker.cpp:608: Skip queue queue_compaction0 blocked by an earlier queue 2025-09-25T16:18:43.254966Z node 2 :RESOURCE_BROKER DEBUG: resource_broker.cpp:528: Finish task task-1 (1 by [2:105:2138]) (release resources {500, 500}) 2025-09-25T16:18:43.254972Z node 2 :RESOURCE_BROKER DEBUG: resource_broker.cpp:577: Updated planned resource usage for queue queue_default from 950.000000 to 0.000000 (remove task task-1 (1 by [2:105:2138])) 2025-09-25T16:18:43.254976Z node 2 :RESOURCE_BROKER DEBUG: resource_broker.cpp:637: Allocate resources {50, 50} for task task-2 (2 by [2:105:2138]) from queue queue_compaction1 2025-09-25T16:18:43.254980Z node 2 :RESOURCE_BROKER DEBUG: resource_broker.cpp:679: Assigning in-fly task task-2 (2 by [2:105:2138]) to queue queue_compaction1 2025-09-25T16:18:43.254985Z node 2 :RESOURCE_BROKER DEBUG: resource_broker.cpp:711: Updated planned resource usage for queue queue_compaction1 from 1300.000000 to 1350.000000 (insert task task-2 (2 by [2:105:2138])) 2025-09-25T16:18:43.254990Z node 2 :RESOURCE_BROKER DEBUG: resource_broker.cpp:637: Allocate resources {50, 50} for task task-3 (3 by [2:105:2138]) from queue queue_compaction0 2025-09-25T16:18:43.254994Z node 2 :RESOURCE_BROKER DEBUG: resource_broker.cpp:679: Assigning in-fly task task-3 (3 by [2:105:2138]) to queue queue_compaction0 2025-09-25T16:18:43.254999Z node 2 :RESOURCE_BROKER DEBUG: resource_broker.cpp:711: Updated planned resource usage for queue queue_compaction0 from 300.000000 to 1400.000000 (insert task task-3 (3 by [2:105:2138])) 2025-09-25T16:18:43.255005Z node 2 :RESOURCE_BROKER DEBUG: resource_broker.cpp:637: Allocate resources {50, 50} for task task-4 (4 by [2:105:2138]) from queue queue_compaction1 2025-09-25T16:18:43.255009Z node 2 :RESOURCE_BROKER DEBUG: resource_broker.cpp:679: Assigning in-fly task task-4 (4 by [2:105:2138]) to queue queue_compaction1 2025-09-25T16:18:43.255014Z node 2 :RESOURCE_BROKER DEBUG: resource_broker.cpp:711: Updated planned resource usage for queue queue_compaction1 from 1350.000000 to 1400.000000 (insert task task-4 (4 by [2:105:2138])) 2025-09-25T16:18:43.255018Z node 2 :RESOURCE_BROKER DEBUG: resource_broker.cpp:637: Allocate resources {50, 50} for task task-6 (6 by [2:105:2138]) from queue queue_compaction1 2025-09-25T16:18:43.255022Z node 2 :RESOURCE_BROKER DEBUG: resource_broker.cpp:679: Assigning in-fly task task-6 (6 by [2:105:2138]) to queue queue_compaction1 2025-09-25T16:18:43.255027Z node 2 :RESOURCE_BROKER DEBUG: resource_broker.cpp:711: Updated planned resource usage for queue queue_compaction1 from 1400.000000 to 1450.000000 (insert task task-6 (6 by [2:105:2138])) 2025-09-25T16:18:43.255032Z node 2 :RESOURCE_BROKER DEBUG: resource_broker.cpp:637: Allocate resources {50, 50} for task task-5 (5 by [2:105:2138]) from queue queue_compaction0 2025-09-25T16:18:43.255036Z node 2 :RESOURCE_BROKER DEBUG: resource_broker.cpp:679: Assigning in-fly task task-5 (5 by [2:105:2138]) to queue queue_compaction0 2025-09-25T16:18:43.255040Z node 2 :RESOURCE_BROKER DEBUG: resource_broker.cpp:711: Updated planned resource usage for queue queue_compaction0 from 1400.000000 to 1500.000000 (insert task task-5 (5 by [2:105:2138])) 2025-09-25T16:18:43.255045Z node 2 :RESOURCE_BROKER DEBUG: resource_broker.cpp:637: Allocate resources {50, 50} for task task-7 (7 by [2:105:2138]) from queue queue_compaction0 2025-09-25T16:18:43.255049Z node 2 :RESOURCE_BROKER DEBUG: resource_broker.cpp:679: Assigning in-fly task task-7 (7 by [2:105:2138]) to queue queue_compaction0 2025-09-25T16:18:43.255054Z node 2 :RESOURCE_BROKER DEBUG: resource_broker.cpp:711: Updated planned resource usage for queue queue_compaction0 from 1500.000000 to 1600.000000 (insert task task-7 (7 by [2:105:2138])) ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tablet/ut/unittest >> TTabletResolver::NodeProblem [GOOD] Test command err: 2025-09-25T16:18:42.835975Z node 1 :TABLET_RESOLVER DEBUG: tablet_resolver.cpp:882: Handle TEvForward tabletId: 123 entry.State: StResolve leader: [0:0:0] followers: 0 ev: {EvForward TabletID: 123 Ev: nullptr Flags: 1:2:0} 2025-09-25T16:18:42.836012Z node 1 :TABLET_RESOLVER DEBUG: tablet_resolver.cpp:781: ApplyEntry tabletId: 123 leader: [1:219:2140] followers: 0 2025-09-25T16:18:42.836024Z node 1 :TABLET_RESOLVER DEBUG: tablet_resolver.cpp:667: SelectForward node 1 selfDC 1 leaderDC 1 1:2:0 local 1 localDc 1 other 0 disallowed 0 tabletId: 123 followers: 0 countLeader 1 allowFollowers 0 winner: [1:219:2140] 2025-09-25T16:18:42.836049Z node 1 :TABLET_RESOLVER DEBUG: tablet_resolver.cpp:882: Handle TEvForward tabletId: 234 entry.State: StResolve leader: [0:0:0] followers: 0 ev: {EvForward TabletID: 234 Ev: nullptr Flags: 1:2:0} 2025-09-25T16:18:42.836065Z node 1 :TABLET_RESOLVER DEBUG: tablet_resolver.cpp:781: ApplyEntry tabletId: 234 leader: [1:225:2144] followers: 0 2025-09-25T16:18:42.836070Z node 1 :TABLET_RESOLVER DEBUG: tablet_resolver.cpp:667: SelectForward node 1 selfDC 1 leaderDC 1 1:2:0 local 1 localDc 1 other 0 disallowed 0 tabletId: 234 followers: 0 countLeader 1 allowFollowers 0 winner: [1:225:2144] 2025-09-25T16:18:42.836186Z node 1 :TABLET_RESOLVER DEBUG: tablet_resolver.cpp:882: Handle TEvForward tabletId: 123 entry.State: StNormal leader: [1:219:2140] followers: 0 ev: {EvForward TabletID: 123 Ev: nullptr Flags: 1:2:0} 2025-09-25T16:18:42.836190Z node 1 :TABLET_RESOLVER DEBUG: tablet_resolver.cpp:667: SelectForward node 1 selfDC 1 leaderDC 1 1:2:0 local 1 localDc 1 other 0 disallowed 0 tabletId: 123 followers: 0 countLeader 1 allowFollowers 0 winner: [1:219:2140] 2025-09-25T16:18:42.836205Z node 1 :TABLET_RESOLVER DEBUG: tablet_resolver.cpp:882: Handle TEvForward tabletId: 234 entry.State: StNormal leader: [1:225:2144] followers: 0 ev: {EvForward TabletID: 234 Ev: nullptr Flags: 1:2:0} 2025-09-25T16:18:42.836208Z node 1 :TABLET_RESOLVER DEBUG: tablet_resolver.cpp:667: SelectForward node 1 selfDC 1 leaderDC 1 1:2:0 local 1 localDc 1 other 0 disallowed 0 tabletId: 234 followers: 0 countLeader 1 allowFollowers 0 winner: [1:225:2144] 2025-09-25T16:18:42.836224Z node 1 :TABLET_RESOLVER DEBUG: tablet_resolver.cpp:936: Handle TEvNodeProblem nodeId: 1 max(problemEpoch): 5 2025-09-25T16:18:42.836229Z node 1 :TABLET_RESOLVER DEBUG: tablet_resolver.cpp:839: Delayed invalidation of tabletId: 123 leader: [1:219:2140] by nodeId 2025-09-25T16:18:42.836233Z node 1 :TABLET_RESOLVER DEBUG: tablet_resolver.cpp:882: Handle TEvForward tabletId: 123 entry.State: StNormal leader: [1:219:2140] (known problem) followers: 0 ev: {EvForward TabletID: 123 Ev: nullptr Flags: 1:2:0} 2025-09-25T16:18:42.836236Z node 1 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 1 selfDC 1 leaderDC 1 1:2:0 local 0 localDc 0 other 0 disallowed 0 2025-09-25T16:18:42.836254Z node 1 :TABLET_RESOLVER DEBUG: tablet_resolver.cpp:781: ApplyEntry tabletId: 123 leader: [2:235:2097] followers: 0 2025-09-25T16:18:42.836259Z node 1 :TABLET_RESOLVER DEBUG: tablet_resolver.cpp:667: SelectForward node 1 selfDC 1 leaderDC 2 1:2:0 local 0 localDc 0 other 1 disallowed 0 tabletId: 123 followers: 0 countLeader 1 allowFollowers 0 winner: [2:235:2097] 2025-09-25T16:18:42.836293Z node 1 :TABLET_RESOLVER DEBUG: tablet_resolver.cpp:839: Delayed invalidation of tabletId: 234 leader: [1:225:2144] by nodeId 2025-09-25T16:18:42.836297Z node 1 :TABLET_RESOLVER DEBUG: tablet_resolver.cpp:882: Handle TEvForward tabletId: 234 entry.State: StNormal leader: [1:225:2144] (known problem) followers: 0 ev: {EvForward TabletID: 234 Ev: nullptr Flags: 1:2:0} 2025-09-25T16:18:42.836299Z node 1 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 1 selfDC 1 leaderDC 1 1:2:0 local 0 localDc 0 other 0 disallowed 0 2025-09-25T16:18:42.836316Z node 1 :TABLET_RESOLVER DEBUG: tablet_resolver.cpp:781: ApplyEntry tabletId: 234 leader: [2:241:2099] followers: 0 2025-09-25T16:18:42.836320Z node 1 :TABLET_RESOLVER DEBUG: tablet_resolver.cpp:667: SelectForward node 1 selfDC 1 leaderDC 2 1:2:0 local 0 localDc 0 other 1 disallowed 0 tabletId: 234 followers: 0 countLeader 1 allowFollowers 0 winner: [2:241:2099] 2025-09-25T16:18:42.836488Z node 1 :TABLET_RESOLVER DEBUG: tablet_resolver.cpp:936: Handle TEvNodeProblem nodeId: 2 max(problemEpoch): 5 2025-09-25T16:18:42.836494Z node 1 :TABLET_RESOLVER DEBUG: tablet_resolver.cpp:882: Handle TEvForward tabletId: 123 entry.State: StNormal leader: [2:235:2097] followers: 0 ev: {EvForward TabletID: 123 Ev: nullptr Flags: 1:2:0} 2025-09-25T16:18:42.836497Z node 1 :TABLET_RESOLVER DEBUG: tablet_resolver.cpp:667: SelectForward node 1 selfDC 1 leaderDC 2 1:2:0 local 0 localDc 0 other 1 disallowed 0 tabletId: 123 followers: 0 countLeader 1 allowFollowers 0 winner: [2:235:2097] 2025-09-25T16:18:42.836516Z node 1 :TABLET_RESOLVER DEBUG: tablet_resolver.cpp:882: Handle TEvForward tabletId: 234 entry.State: StNormal leader: [2:241:2099] followers: 0 ev: {EvForward TabletID: 234 Ev: nullptr Flags: 1:2:0} 2025-09-25T16:18:42.836519Z node 1 :TABLET_RESOLVER DEBUG: tablet_resolver.cpp:667: SelectForward node 1 selfDC 1 leaderDC 2 1:2:0 local 0 localDc 0 other 1 disallowed 0 tabletId: 234 followers: 0 countLeader 1 allowFollowers 0 winner: [2:241:2099] 2025-09-25T16:18:42.836535Z node 1 :TABLET_RESOLVER DEBUG: tablet_resolver.cpp:936: Handle TEvNodeProblem nodeId: 2 max(problemEpoch): 7 2025-09-25T16:18:42.836539Z node 1 :TABLET_RESOLVER DEBUG: tablet_resolver.cpp:839: Delayed invalidation of tabletId: 123 leader: [2:235:2097] by nodeId 2025-09-25T16:18:42.836543Z node 1 :TABLET_RESOLVER DEBUG: tablet_resolver.cpp:882: Handle TEvForward tabletId: 123 entry.State: StNormal leader: [2:235:2097] (known problem) followers: 0 ev: {EvForward TabletID: 123 Ev: nullptr Flags: 1:2:0} 2025-09-25T16:18:42.836545Z node 1 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 1 selfDC 1 leaderDC 2 1:2:0 local 0 localDc 0 other 0 disallowed 0 2025-09-25T16:18:42.836568Z node 1 :TABLET_RESOLVER DEBUG: tablet_resolver.cpp:781: ApplyEntry tabletId: 123 leader: [3:253:2097] followers: 0 2025-09-25T16:18:42.836572Z node 1 :TABLET_RESOLVER DEBUG: tablet_resolver.cpp:667: SelectForward node 1 selfDC 1 leaderDC 3 1:2:0 local 0 localDc 0 other 1 disallowed 0 tabletId: 123 followers: 0 countLeader 1 allowFollowers 0 winner: [3:253:2097] 2025-09-25T16:18:42.836619Z node 1 :TABLET_RESOLVER DEBUG: tablet_resolver.cpp:882: Handle TEvForward tabletId: 234 entry.State: StNormal leader: [2:241:2099] followers: 0 ev: {EvForward TabletID: 234 Ev: nullptr Flags: 1:2:0} 2025-09-25T16:18:42.836623Z node 1 :TABLET_RESOLVER DEBUG: tablet_resolver.cpp:667: SelectForward node 1 selfDC 1 leaderDC 2 1:2:0 local 0 localDc 0 other 1 disallowed 0 tabletId: 234 followers: 0 countLeader 1 allowFollowers 0 winner: [2:241:2099] 2025-09-25T16:18:42.836641Z node 1 :TABLET_RESOLVER DEBUG: tablet_resolver.cpp:936: Handle TEvNodeProblem nodeId: 2 max(problemEpoch): 8 2025-09-25T16:18:42.836647Z node 1 :TABLET_RESOLVER DEBUG: tablet_resolver.cpp:882: Handle TEvForward tabletId: 123 entry.State: StNormal leader: [3:253:2097] followers: 0 ev: {EvForward TabletID: 123 Ev: nullptr Flags: 1:2:0} 2025-09-25T16:18:42.836650Z node 1 :TABLET_RESOLVER DEBUG: tablet_resolver.cpp:667: SelectForward node 1 selfDC 1 leaderDC 3 1:2:0 local 0 localDc 0 other 1 disallowed 0 tabletId: 123 followers: 0 countLeader 1 allowFollowers 0 winner: [3:253:2097] 2025-09-25T16:18:42.836681Z node 1 :TABLET_RESOLVER DEBUG: tablet_resolver.cpp:839: Delayed invalidation of tabletId: 234 leader: [2:241:2099] by nodeId 2025-09-25T16:18:42.836687Z node 1 :TABLET_RESOLVER DEBUG: tablet_resolver.cpp:882: Handle TEvForward tabletId: 234 entry.State: StNormal leader: [2:241:2099] (known problem) followers: 0 ev: {EvForward TabletID: 234 Ev: nullptr Flags: 1:2:0} 2025-09-25T16:18:42.836691Z node 1 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 1 selfDC 1 leaderDC 2 1:2:0 local 0 localDc 0 other 0 disallowed 0 2025-09-25T16:18:42.836729Z node 1 :TABLET_RESOLVER DEBUG: tablet_resolver.cpp:781: ApplyEntry tabletId: 234 leader: [3:259:2099] followers: 0 2025-09-25T16:18:42.836736Z node 1 :TABLET_RESOLVER DEBUG: tablet_resolver.cpp:667: SelectForward node 1 selfDC 1 leaderDC 3 1:2:0 local 0 localDc 0 other 1 disallowed 0 tabletId: 234 followers: 0 countLeader 1 allowFollowers 0 winner: [3:259:2099] >> BootstrapperTest::LoneBootstrapper >> TTabletCountersAggregator::IntegralPercentileAggregationHistNamed [GOOD] >> TTabletCountersAggregator::IntegralPercentileAggregationHistNamedNoOverflowCheck >> TResourceBroker::TestRandomQueue [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tablet/ut/unittest >> BootstrapperTest::DuplicateNodes [GOOD] Test command err: ... waiting for pipe to connect ... sleeping (original instance should be preserved) ... waiting for original instance to stop ... waiting for original instance to stop (done) ... waiting for pipe to connect 2025-09-25T16:18:42.318726Z node 4 :BOOTSTRAPPER DEBUG: bootstrapper.cpp:194: tablet: 9437184, type: Dummy, begin new cycle (lookup in state storage) 2025-09-25T16:18:42.318752Z node 5 :BOOTSTRAPPER DEBUG: bootstrapper.cpp:194: tablet: 9437184, type: Dummy, begin new cycle (lookup in state storage) 2025-09-25T16:18:42.318910Z node 4 :BOOTSTRAPPER DEBUG: bootstrapper.cpp:233: tablet: 9437184, type: Dummy, lookup: NODATA, leader: [0:0:0] 2025-09-25T16:18:42.318919Z node 4 :BOOTSTRAPPER INFO: bootstrapper.cpp:348: tablet:9437184, type: Dummy, begin new round, seed: 15249746964198841502 2025-09-25T16:18:42.318928Z node 5 :BOOTSTRAPPER DEBUG: bootstrapper.cpp:233: tablet: 9437184, type: Dummy, lookup: NODATA, leader: [0:0:0] 2025-09-25T16:18:42.318932Z node 5 :BOOTSTRAPPER INFO: bootstrapper.cpp:348: tablet:9437184, type: Dummy, begin new round, seed: 838756400823690829 2025-09-25T16:18:42.319125Z node 5 :BOOTSTRAPPER DEBUG: bootstrapper.cpp:446: tablet: 9437184, type: Dummy, apply alien 4 state: FREE 2025-09-25T16:18:42.319134Z node 5 :BOOTSTRAPPER NOTICE: bootstrapper.cpp:698: tablet: 9437184, type: Dummy, boot 2025-09-25T16:18:42.319186Z node 4 :BOOTSTRAPPER DEBUG: bootstrapper.cpp:446: tablet: 9437184, type: Dummy, apply alien 5 state: FREE 2025-09-25T16:18:42.319193Z node 4 :BOOTSTRAPPER DEBUG: bootstrapper.cpp:517: tablet: 9437184, type: Dummy, lost round, wait for 0.123966s 2025-09-25T16:18:42.505011Z node 4 :BOOTSTRAPPER DEBUG: bootstrapper.cpp:194: tablet: 9437184, type: Dummy, begin new cycle (lookup in state storage) 2025-09-25T16:18:42.505222Z node 4 :BOOTSTRAPPER DEBUG: bootstrapper.cpp:233: tablet: 9437184, type: Dummy, lookup: OK, leader: [5:224:2098] 2025-09-25T16:18:42.505326Z node 4 :BOOTSTRAPPER DEBUG: bootstrapper.cpp:284: tablet: 9437184, type: Dummy, connect: OK 2025-09-25T16:18:42.505332Z node 4 :BOOTSTRAPPER INFO: bootstrapper.cpp:295: tablet: 9437184, type: Dummy, connected to leader, waiting >> THiveTest::TestDeleteTabletError [GOOD] >> THiveTest::TestDeleteTabletWithRestartAndRetry >> TResourceBroker::TestCounters >> YdbMonitoring::SelfCheckWithNodesDying [GOOD] >> YdbOlapStore::BulkUpsert >> TResourceBrokerInstant::TestMerge >> BootstrapperTest::LoneBootstrapper [GOOD] >> BootstrapperTest::MultipleBootstrappers >> TTabletCountersAggregator::IntegralPercentileAggregationHistNamedNoOverflowCheck [GOOD] >> TReplicationTests::Create >> TResourceBroker::TestCounters [GOOD] >> TResourceBroker::TestChangeTaskType >> TReplicationTests::CreateSequential >> TTabletPipeTest::TestShutdown >> TResourceBrokerInstant::TestMerge [GOOD] >> TTabletCountersAggregator::ColumnShardCounters ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/ydb/ut/unittest >> YdbOlapStore::LogPagingAfter [GOOD] Test command err: 2025-09-25T16:18:21.635619Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7554061928558038859:2153];send_to=[0:7307199536658146131:7762515]; 2025-09-25T16:18:21.635641Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/endf/0041a7/r3tmp/tmp8IzkpA/pdisk_1.dat 2025-09-25T16:18:21.706440Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-09-25T16:18:21.719905Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 62641, node 1 2025-09-25T16:18:21.740179Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-09-25T16:18:21.740208Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-09-25T16:18:21.742221Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-09-25T16:18:21.757216Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-09-25T16:18:21.757228Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-09-25T16:18:21.757231Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-09-25T16:18:21.757281Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:9641 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-09-25T16:18:21.814052Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... TClient is connected to server localhost:9641 2025-09-25T16:18:21.899183Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnStore, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/olap/operations/create_store.cpp:461) waiting... 2025-09-25T16:18:21.920994Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7554061928558039780:2309];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-09-25T16:18:21.921061Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7554061928558039780:2309];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-09-25T16:18:21.921107Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7554061928558039780:2309];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-09-25T16:18:21.921139Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7554061928558039780:2309];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-09-25T16:18:21.921160Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7554061928558039780:2309];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-09-25T16:18:21.921184Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7554061928558039780:2309];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-09-25T16:18:21.921206Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7554061928558039780:2309];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-09-25T16:18:21.921225Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7554061928558039780:2309];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-09-25T16:18:21.921249Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7554061928558039780:2309];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-09-25T16:18:21.921271Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7554061928558039780:2309];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanDeprecatedSnapshot; 2025-09-25T16:18:21.921301Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7554061928558039780:2309];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV0ChunksMeta; 2025-09-25T16:18:21.921327Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7554061928558039780:2309];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CopyBlobIdsToV2; 2025-09-25T16:18:21.921354Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7554061928558039780:2309];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreAppearanceSnapshot; 2025-09-25T16:18:21.926714Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037891;self_id=[1:7554061928558039779:2308];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-09-25T16:18:21.926758Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037891;self_id=[1:7554061928558039779:2308];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-09-25T16:18:21.926802Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037891;self_id=[1:7554061928558039779:2308];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-09-25T16:18:21.926823Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037891;self_id=[1:7554061928558039779:2308];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-09-25T16:18:21.926843Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037891;self_id=[1:7554061928558039779:2308];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-09-25T16:18:21.926865Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037891;self_id=[1:7554061928558039779:2308];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-09-25T16:18:21.926892Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037891;self_id=[1:7554061928558039779:2308];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-09-25T16:18:21.926917Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037891;self_id=[1:7554061928558039779:2308];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-09-25T16:18:21.926942Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037891;self_id=[1:7554061928558039779:2308];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-09-25T16:18:21.926968Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037891;self_id=[1:7554061928558039779:2308];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanDeprecatedSnapshot; 2025-09-25T16:18:21.926990Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037891;self_id=[1:7554061928558039779:2308];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV0ChunksMeta; 2025-09-25T16:18:21.927010Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037891;self_id=[1:7554061928558039779:2308];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CopyBlobIdsToV2; 2025-09-25T16:18:21.927030Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037891;self_id=[1:7554061928558039779:2308];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreAppearanceSnapshot; 2025-09-25T16:18:21.932276Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;self_id=[1:7554061928558039781:2310];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-09-25T16:18:21.932307Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;self_id=[1:7554061928558039781:2310];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-09-25T16:18:21.932349Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;self_id=[1:7554061928558039781:2310];tabl ... 01k60tpy5f8v0r9agh6hsmjgq0, Database: /Root, SessionId: ydb://session/3?node_id=28&id=Y2FkMWY5OGMtYzVkMjJiODgtNGQyOWQ4NC02NGRmYjJk, PoolId: default}. ActorState: ExecuteState, got execution state from compute actor: [28:7554062022059137898:3084], task: 15, state: COMPUTE_STATE_EXECUTING, stats: { CpuTimeUs: 112 Tasks { TaskId: 15 CpuTimeUs: 153 Tables { TablePath: "/Root/OlapStore/log1" } ComputeCpuTimeUs: 66 BuildCpuTimeUs: 87 Sources { IngressName: "CS" Ingress { } } HostName: "ghrun-v6cxduzo2m" NodeId: 28 CreateTimeMs: 1758817122594 CurrentWaitInputTimeUs: 5 UpdateTimeMs: 1758817122601 } MaxMemoryUsage: 1048576 } 2025-09-25T16:18:42.606037Z node 28 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=72075186224037889;self_id=[28:7554062017764168225:2309];ev=NKikimr::TEvDataShard::TEvKqpScan;tx_id=281474976715670;scan_id=40;gen=1;table=/Root/OlapStore/log1;snapshot={1758817122524:max};tablet=72075186224037889;timeout=0.000000s;cpu_limits=Disabled;;fline=program.cpp:103;parse_proto_program=Command { Assign { Column { Id: 11 } Constant { Bytes: "app" } } } Command { Assign { Column { Id: 12 } Function { Arguments { Id: 6 } Arguments { Id: 11 } FunctionType: YQL_KERNEL KernelIdx: 0 YqlOperationId: 11 } } } Command { Assign { Column { Id: 13 } Constant { Bytes: "resource_1" } } } Command { Assign { Column { Id: 14 } Function { Arguments { Id: 3 } Arguments { Id: 13 } FunctionType: YQL_KERNEL KernelIdx: 1 YqlOperationId: 11 } } } Command { Assign { Column { Id: 15 } Function { Arguments { Id: 12 } Arguments { Id: 14 } FunctionType: YQL_KERNEL KernelIdx: 2 YqlOperationId: 0 } } } Command { Filter { Predicate { Id: 15 } } } Command { Projection { Columns { Id: 7 } Columns { Id: 1 } Columns { Id: 3 } Columns { Id: 6 } Columns { Id: 5 } Columns { Id: 4 } } } Kernels: "O\006\006Arg\022BlockFunc\030BlockAsTuple\t\211\004\235\213\004\213\006\203\001H\203\005@\203\014\213\006?\004?\004\203\014\001\235?\006\001\235?\n\001\n\000\t\211\006?\020\235?\000\001\235?\002\000\235?\004\001\n\000\t\251\000?\026\002\000\t\251\000?\030\002\000\t\251\000?\032\002\000\000\t\211\006?\022?\032?\032\235?\010\001\n\000\t\211\006?\032\203\005@?\026?\030\006\000\003?0\014Equals? ?$\001\t\211\006?\032\203\005@?\026?\030\006\000\003?8\014Equals? ?$\001\t\211\004?,?\032?\032 BlockAnd\000?(?(\000\000\000/" ; 2025-09-25T16:18:42.606062Z node 28 :KQP_EXECUTER DEBUG: kqp_executer_impl.h:698: ActorId: [28:7554062022059137877:3062] TxId: 281474976715670. Ctx: { TraceId: 01k60tpy5f8v0r9agh6hsmjgq0, Database: /Root, SessionId: ydb://session/3?node_id=28&id=Y2FkMWY5OGMtYzVkMjJiODgtNGQyOWQ4NC02NGRmYjJk, PoolId: default}. Waiting for: CA [28:7554062022059137937:3117], CA [28:7554062022059137900:3086], CA [28:7554062022059137894:3081], CA [28:7554062022059137889:3076], CA [28:7554062022059137929:3110], CA [28:7554062022059137941:3120], CA [28:7554062022059137935:3115], CA [28:7554062022059137898:3084], CA [28:7554062022059137892:3079], CA [28:7554062022059137904:3089], CA [28:7554062022059137944:3123], CA [28:7554062022059137939:3118], CA [28:7554062022059137907:3092], CA [28:7554062022059137933:3113], CA [28:7554062022059137901:3087], CA [28:7554062022059137942:3121], CA [28:7554062022059137905:3090], CA [28:7554062022059137946:3124], CA [28:7554062022059137908:3093], CA [28:7554062022059137955:3132], CA [28:7554062022059137949:3127], CA [28:7554062022059137918:3101], CA [28:7554062022059137912:3096], CA [28:7554062022059137958:3135], CA [28:7554062022059137953:3130], CA [28:7554062022059137910:3094], CA [28:7554062022059137922:3104], CA [28:7554062022059137916:3099], CA [28:7554062022059137956:3133], CA [28:7554062022059137920:3102], CA [28:7554062022059137926:3107], CA [28:7554062022059137883:3071], CA [28:7554062022059137923:3105], CA [28:7554062022059137887:3074], CA [28:7554062022059137895:3082], CA [28:7554062022059137885:3072], CA [28:7554062022059137890:3077], CA [28:7554062022059137936:3116], CA [28:7554062022059137930:3111], CA [28:7554062022059137893:3080], CA [28:7554062022059137899:3085], CA [28:7554062022059137934:3114], CA [28:7554062022059137902:3088], CA [28:7554062022059137928:3109], CA [28:7554062022059137940:3119], CA [28:7554062022059137891:3078], CA [28:7554062022059137943:3122], CA [28:7554062022059137932:3112], CA [28:7554062022059137906:3091], CA [28:7554062022059137947:3125], CA [28:7554062022059137950:3128], CA [28:7554062022059137913:3097], CA [28:7554062022059137954:3131], CA [28:7554062022059137948:3126], CA [28:7554062022059137917:3100], CA [28:7554062022059137911:3095], CA [28:7554062022059137957:3134], CA [28:7554062022059137952:3129], CA [28:7554062022059137915:3098], CA [28:7554062022059137927:3108], CA [28:7554062022059137921:3103], CA [28:7554062022059137925:3106], CA [28:7554062022059137888:3075], CA [28:7554062022059137882:3070], CA [28:7554062022059137896:3083], CA [28:7554062022059137886:3073], 2025-09-25T16:18:42.606083Z node 28 :KQP_EXECUTER DEBUG: kqp_executer_impl.h:470: ActorId: [28:7554062022059137877:3062] TxId: 281474976715670. Ctx: { TraceId: 01k60tpy5f8v0r9agh6hsmjgq0, Database: /Root, SessionId: ydb://session/3?node_id=28&id=Y2FkMWY5OGMtYzVkMjJiODgtNGQyOWQ4NC02NGRmYjJk, PoolId: default}. ActorState: ExecuteState, got execution state from compute actor: [28:7554062022059137907:3092], task: 23, state: COMPUTE_STATE_EXECUTING, stats: { CpuTimeUs: 74 Tasks { TaskId: 23 CpuTimeUs: 84 Tables { TablePath: "/Root/OlapStore/log1" } ComputeCpuTimeUs: 26 BuildCpuTimeUs: 58 Sources { IngressName: "CS" Ingress { } } HostName: "ghrun-v6cxduzo2m" NodeId: 28 CreateTimeMs: 1758817122595 CurrentWaitInputTimeUs: 7 UpdateTimeMs: 1758817122601 } MaxMemoryUsage: 1048576 } 2025-09-25T16:18:42.606112Z node 28 :KQP_EXECUTER DEBUG: kqp_executer_impl.h:698: ActorId: [28:7554062022059137877:3062] TxId: 281474976715670. Ctx: { TraceId: 01k60tpy5f8v0r9agh6hsmjgq0, Database: /Root, SessionId: ydb://session/3?node_id=28&id=Y2FkMWY5OGMtYzVkMjJiODgtNGQyOWQ4NC02NGRmYjJk, PoolId: default}. Waiting for: CA [28:7554062022059137937:3117], CA [28:7554062022059137900:3086], CA [28:7554062022059137894:3081], CA [28:7554062022059137889:3076], CA [28:7554062022059137929:3110], CA [28:7554062022059137941:3120], CA [28:7554062022059137935:3115], CA [28:7554062022059137898:3084], CA [28:7554062022059137892:3079], CA [28:7554062022059137904:3089], CA [28:7554062022059137944:3123], CA [28:7554062022059137939:3118], CA [28:7554062022059137907:3092], CA [28:7554062022059137933:3113], CA [28:7554062022059137901:3087], CA [28:7554062022059137942:3121], CA [28:7554062022059137905:3090], CA [28:7554062022059137946:3124], CA [28:7554062022059137908:3093], CA [28:7554062022059137955:3132], CA [28:7554062022059137949:3127], CA [28:7554062022059137918:3101], CA [28:7554062022059137912:3096], CA [28:7554062022059137958:3135], CA [28:7554062022059137953:3130], CA [28:7554062022059137910:3094], CA [28:7554062022059137922:3104], CA [28:7554062022059137916:3099], CA [28:7554062022059137956:3133], CA [28:7554062022059137920:3102], CA [28:7554062022059137926:3107], CA [28:7554062022059137883:3071], CA [28:7554062022059137923:3105], CA [28:7554062022059137887:3074], CA [28:7554062022059137895:3082], CA [28:7554062022059137885:3072], CA [28:7554062022059137890:3077], CA [28:7554062022059137936:3116], CA [28:7554062022059137930:3111], CA [28:7554062022059137893:3080], CA [28:7554062022059137899:3085], CA [28:7554062022059137934:3114], CA [28:7554062022059137902:3088], CA [28:7554062022059137928:3109], CA [28:7554062022059137940:3119], CA [28:7554062022059137891:3078], CA [28:7554062022059137943:3122], CA [28:7554062022059137932:3112], CA [28:7554062022059137906:3091], CA [28:7554062022059137947:3125], CA [28:7554062022059137950:3128], CA [28:7554062022059137913:3097], CA [28:7554062022059137954:3131], CA [28:7554062022059137948:3126], CA [28:7554062022059137917:3100], CA [28:7554062022059137911:3095], CA [28:7554062022059137957:3134], CA [28:7554062022059137952:3129], CA [28:7554062022059137915:3098], CA [28:7554062022059137927:3108], CA [28:7554062022059137921:3103], CA [28:7554062022059137925:3106], CA [28:7554062022059137888:3075], CA [28:7554062022059137882:3070], CA [28:7554062022059137896:3083], CA [28:7554062022059137886:3073], 2025-09-25T16:18:42.606130Z node 28 :KQP_EXECUTER DEBUG: kqp_executer_impl.h:470: ActorId: [28:7554062022059137877:3062] TxId: 281474976715670. Ctx: { TraceId: 01k60tpy5f8v0r9agh6hsmjgq0, Database: /Root, SessionId: ydb://session/3?node_id=28&id=Y2FkMWY5OGMtYzVkMjJiODgtNGQyOWQ4NC02NGRmYjJk, PoolId: default}. ActorState: ExecuteState, got execution state from compute actor: [28:7554062022059137908:3093], task: 24, state: COMPUTE_STATE_EXECUTING, stats: { CpuTimeUs: 97 Tasks { TaskId: 24 CpuTimeUs: 123 Tables { TablePath: "/Root/OlapStore/log1" } ComputeCpuTimeUs: 42 BuildCpuTimeUs: 81 Sources { IngressName: "CS" Ingress { } } HostName: "ghrun-v6cxduzo2m" NodeId: 28 CreateTimeMs: 1758817122595 CurrentWaitInputTimeUs: 6 UpdateTimeMs: 1758817122601 } MaxMemoryUsage: 1048576 } 2025-09-25T16:18:42.606157Z node 28 :KQP_EXECUTER DEBUG: kqp_executer_impl.h:698: ActorId: [28:7554062022059137877:3062] TxId: 281474976715670. Ctx: { TraceId: 01k60tpy5f8v0r9agh6hsmjgq0, Database: /Root, SessionId: ydb://session/3?node_id=28&id=Y2FkMWY5OGMtYzVkMjJiODgtNGQyOWQ4NC02NGRmYjJk, PoolId: default}. Waiting for: CA [28:7554062022059137937:3117], CA [28:7554062022059137900:3086], CA [28:7554062022059137894:3081], CA [28:7554062022059137889:3076], CA [28:7554062022059137929:3110], CA [28:7554062022059137941:3120], CA [28:7554062022059137935:3115], CA [28:7554062022059137898:3084], CA [28:7554062022059137892:3079], CA [28:7554062022059137904:3089], CA [28:7554062022059137944:3123], CA [28:7554062022059137939:3118], CA [28:7554062022059137907:3092], CA [28:7554062022059137933:3113], CA [28:7554062022059137901:3087], CA [28:7554062022059137942:3121], CA [28:7554062022059137905:3090], CA [28:7554062022059137946:3124], CA [28:7554062022059137908:3093], CA [28:7554062022059137955:3132], CA [28:7554062022059137949:3127], CA [28:7554062022059137918:3101], CA [28:7554062022059137912:3096], CA [28:7554062022059137958:3135], CA [28:7554062022059137953:3130], CA [28:7554062022059137910:3094], CA [28:7554062022059137922:3104], CA [28:7554062022059137916:3099], CA [28:7554062022059137956:3133], CA [28:7554062022059137920:3102], CA [28:7554062022059137926:3107], CA [28:7554062022059137883:3071], CA [28:7554062022059137923:3105], CA [28:7554062022059137887:3074], CA [28:7554062022059137895:3082], CA [28:7554062022059137885:3072], CA [28:7554062022059137890:3077], CA [28:7554062022059137936:3116], CA [28:7554062022059137930:3111], CA [28:7554062022059137893:3080], CA [28:7554062022059137899:3085], CA [28:7554062022059137934:3114], CA [28:7554062022059137902:3088], CA [28:7554062022059137928:3109], CA [28:7554062022059137940:3119], CA [28:7554062022059137891:3078], CA [28:7554062022059137943:3122], CA [28:7554062022059137932:3112], CA [28:7554062022059137906:3091], CA [28:7554062022059137947:3125], CA [28:7554062022059137950:3128], CA [28:7554062022059137913:3097], CA [28:7554062022059137954:3131], CA [28:7554062022059137948:3126], CA [28:7554062022059137917:3100], CA [28:7554062022059137911:3095], CA [28:7554062022059137957:3134], CA [28:7554062022059137952:3129], CA [28:7554062022059137915:3098], CA [28:7554062022059137927:3108], CA [28:7554062022059137921:3103], CA [28:7554062022059137925:3106], CA [28:7554062022059137888:3075], CA [28:7554062022059137882:3070], CA [28:7554062022059137896:3083], CA [28:7554062022059137886:3073], >> TReplicationTests::Create [GOOD] >> TReplicationTests::ConsistencyLevel >> TPipeTrackerTest::TestShareTablet [GOOD] >> TPipeTrackerTest::TestIdempotentAttachDetach [GOOD] >> TResourceBroker::TestChangeTaskType [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tablet/ut/unittest >> TResourceBroker::TestRandomQueue [GOOD] Test command err: 2025-09-25T16:18:43.162441Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:1115: TResourceBrokerActor bootstrap 2025-09-25T16:18:43.162576Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:393: Submitted new compaction0 task task-1 (1 by [1:104:2138]) priority=5 resources={400, 400} 2025-09-25T16:18:43.162588Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:679: Assigning waiting task task-1 (1 by [1:104:2138]) to queue queue_compaction0 2025-09-25T16:18:43.162598Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:637: Allocate resources {400, 400} for task task-1 (1 by [1:104:2138]) from queue queue_compaction0 2025-09-25T16:18:43.162604Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:679: Assigning in-fly task task-1 (1 by [1:104:2138]) to queue queue_compaction0 2025-09-25T16:18:43.162617Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:711: Updated planned resource usage for queue queue_compaction0 from 0.000000 to 800.000000 (insert task task-1 (1 by [1:104:2138])) 2025-09-25T16:18:43.162629Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:393: Submitted new compaction1 task task-2 (2 by [1:104:2138]) priority=5 resources={400, 400} 2025-09-25T16:18:43.162633Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:679: Assigning waiting task task-2 (2 by [1:104:2138]) to queue queue_compaction1 2025-09-25T16:18:43.162639Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:619: Not enough resources to start task task-2 (2 by [1:104:2138]) 2025-09-25T16:18:43.162661Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:393: Submitted new compaction0 task task-3 (3 by [1:104:2138]) priority=5 resources={400, 400} 2025-09-25T16:18:43.162666Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:679: Assigning waiting task task-3 (3 by [1:104:2138]) to queue queue_compaction0 2025-09-25T16:18:43.162671Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:619: Not enough resources to start task task-2 (2 by [1:104:2138]) 2025-09-25T16:18:43.162675Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:608: Skip queue queue_compaction0 blocked by an earlier queue 2025-09-25T16:18:43.162688Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:393: Submitted new compaction1 task task-4 (4 by [1:104:2138]) priority=5 resources={400, 400} 2025-09-25T16:18:43.162692Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:679: Assigning waiting task task-4 (4 by [1:104:2138]) to queue queue_compaction1 2025-09-25T16:18:43.162696Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:619: Not enough resources to start task task-2 (2 by [1:104:2138]) 2025-09-25T16:18:43.162700Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:608: Skip queue queue_compaction0 blocked by an earlier queue 2025-09-25T16:18:43.162706Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:393: Submitted new compaction0 task task-5 (5 by [1:104:2138]) priority=5 resources={400, 400} 2025-09-25T16:18:43.162711Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:679: Assigning waiting task task-5 (5 by [1:104:2138]) to queue queue_compaction0 2025-09-25T16:18:43.162715Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:619: Not enough resources to start task task-2 (2 by [1:104:2138]) 2025-09-25T16:18:43.162718Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:608: Skip queue queue_compaction0 blocked by an earlier queue 2025-09-25T16:18:43.162725Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:393: Submitted new compaction1 task task-6 (6 by [1:104:2138]) priority=5 resources={400, 400} 2025-09-25T16:18:43.162730Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:679: Assigning waiting task task-6 (6 by [1:104:2138]) to queue queue_compaction1 2025-09-25T16:18:43.162739Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:619: Not enough resources to start task task-2 (2 by [1:104:2138]) 2025-09-25T16:18:43.162744Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:608: Skip queue queue_compaction0 blocked by an earlier queue 2025-09-25T16:18:43.162760Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:528: Finish task task-1 (1 by [1:104:2138]) (release resources {400, 400}) 2025-09-25T16:18:43.162770Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:582: Updated real resource usage for queue queue_compaction0 from 0.000000 to 800.000000 2025-09-25T16:18:43.162776Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:637: Allocate resources {400, 400} for task task-2 (2 by [1:104:2138]) from queue queue_compaction1 2025-09-25T16:18:43.162780Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:679: Assigning in-fly task task-2 (2 by [1:104:2138]) to queue queue_compaction1 2025-09-25T16:18:43.162785Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:711: Updated planned resource usage for queue queue_compaction1 from 0.000000 to 800.000000 (insert task task-2 (2 by [1:104:2138])) 2025-09-25T16:18:43.162791Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:619: Not enough resources to start task task-4 (4 by [1:104:2138]) 2025-09-25T16:18:43.162795Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:608: Skip queue queue_compaction0 blocked by an earlier queue 2025-09-25T16:18:43.162802Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:528: Finish task task-2 (2 by [1:104:2138]) (release resources {400, 400}) 2025-09-25T16:18:43.162809Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:577: Updated planned resource usage for queue queue_compaction1 from 800.000000 to 280.000000 (remove task task-2 (2 by [1:104:2138])) 2025-09-25T16:18:43.162813Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:582: Updated real resource usage for queue queue_compaction1 from 0.000000 to 280.000000 2025-09-25T16:18:43.162818Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:637: Allocate resources {400, 400} for task task-4 (4 by [1:104:2138]) from queue queue_compaction1 2025-09-25T16:18:43.162822Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:679: Assigning in-fly task task-4 (4 by [1:104:2138]) to queue queue_compaction1 2025-09-25T16:18:43.162827Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:711: Updated planned resource usage for queue queue_compaction1 from 280.000000 to 1054.000000 (insert task task-4 (4 by [1:104:2138])) 2025-09-25T16:18:43.162831Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:619: Not enough resources to start task task-3 (3 by [1:104:2138]) 2025-09-25T16:18:43.162835Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:608: Skip queue queue_compaction1 blocked by an earlier queue 2025-09-25T16:18:43.162841Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:528: Finish task task-4 (4 by [1:104:2138]) (release resources {400, 400}) 2025-09-25T16:18:43.162846Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:577: Updated planned resource usage for queue queue_compaction1 from 1054.000000 to 560.000000 (remove task task-4 (4 by [1:104:2138])) 2025-09-25T16:18:43.162850Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:582: Updated real resource usage for queue queue_compaction1 from 280.000000 to 560.000000 2025-09-25T16:18:43.162854Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:637: Allocate resources {400, 400} for task task-6 (6 by [1:104:2138]) from queue queue_compaction1 2025-09-25T16:18:43.162858Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:679: Assigning in-fly task task-6 (6 by [1:104:2138]) to queue queue_compaction1 2025-09-25T16:18:43.162876Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:711: Updated planned resource usage for queue queue_compaction1 from 560.000000 to 1308.000000 (insert task task-6 (6 by [1:104:2138])) 2025-09-25T16:18:43.162880Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:619: Not enough resources to start task task-3 (3 by [1:104:2138]) 2025-09-25T16:18:43.431524Z node 2 :RESOURCE_BROKER DEBUG: resource_broker.cpp:1115: TResourceBrokerActor bootstrap 2025-09-25T16:18:43.431621Z node 2 :RESOURCE_BROKER DEBUG: resource_broker.cpp:393: Submitted new compaction1 task task-1 (1 by [2:105:2138]) priority=4 resources={384, 454} 2025-09-25T16:18:43.431630Z node 2 :RESOURCE_BROKER DEBUG: resource_broker.cpp:679: Assigning waiting task task-1 (1 by [2:105:2138]) to queue queue_compaction1 2025-09-25T16:18:43.431636Z node 2 :RESOURCE_BROKER DEBUG: resource_broker.cpp:637: Allocate resources {384, 454} for task task-1 (1 by [2:105:2138]) from queue queue_compaction1 2025-09-25T16:18:43.431640Z node 2 :RESOURCE_BROKER DEBUG: resource_broker.cpp:679: Assigning in-fly task task-1 (1 by [2:105:2138]) to queue queue_compaction1 2025-09-25T16:18:43.431648Z node 2 :RESOURCE_BROKER DEBUG: resource_broker.cpp:711: Updated planned resource usage for queue queue_compaction1 from 0.000000 to 908.000000 (insert task task-1 (1 by [2:105:2138])) 2025-09-25T16:18:43.431656Z node 2 :RESOURCE_BROKER DEBUG: resource_broker.cpp:393: Submitted new unknown task task-2 (2 by [2:105:2138]) priority=1 resources={320, 352} 2025-09-25T16:18:43.431660Z node 2 :RESOURCE_BROKER ERROR: resource_broker.cpp:675: Assigning waiting task 'task-2 (2 by [2:105:2138])' of unknown type 'wrong' to default queue 2025-09-25T16:18:43.431664Z node 2 :RESOURCE_BROKER DEBUG: resource_broker.cpp:619: Not enough resources to start task task-2 (2 by [2:105:2138]) 2025-09-25T16:18:43.431669Z node 2 :RESOURCE_BROKER DEBUG: resource_broker.cpp:393: Submitted new unknown task task-3 (3 by [2:105:2138]) priority=1 resources={106, 250} 2025-09-25T16:18:43.431672Z node 2 :RESOURCE_BROKER DEBUG: resource_broker.cpp:679: Assigning waiting task task-3 (3 by [2:105:2138]) to queue queue_default 2025-09-25T16:18:43.431674Z node 2 :RESOURCE_BROKER DEBUG: resource_broker.cpp:619: Not enough resources to start task task-2 (2 by [2:105:2138]) 2025-09-25T16:18:43.431679Z node 2 :RESOURCE_BROKER DEBUG: resource_broker.cpp:393: Submitted new compaction1 task task-4 (4 by [2:105:2138]) priority=2 resources={1, 209} 2025-09-25T16:18:43.431681Z node 2 :RESOURCE_BROKER DEBUG: resource_broker.cpp:679: Assigning waiting task task-4 (4 by [2:105:2138]) to queue queue_compaction1 2025-09-25T16:18:43.431684Z node 2 :RESOURCE_BROKER DEBUG: resource_broker.cpp:619: Not enough resources to start task task-2 (2 by [2:105:2138]) 2025-09-25T16:18:43.431687Z node 2 :RESOURCE_BROKER DEBUG: resource_broker.cpp:608: Skip queue queue_compaction1 blocked by an earlier queue 2025-09-25T16:18:43.431690Z node 2 :RESOURCE_BROKER DEBUG: resource_broker.cpp:393: Submitted new unknown task task-5 (5 by [2:105:2138]) priority=4 resources={459, 179} 2025-09-25T16:18:43.431693Z node 2 :RESOURCE_BROKER ERROR: resource_broker.cpp:675: Assigning waiting task 'task-5 (5 by [2:105:2138])' of unknown type 'wrong' to default queue 2025-09-25T16:18:43.431696Z node 2 :RESOURCE_BROKER DEBUG: resource_broker.cpp:619: Not enough resources to start task task-2 (2 by [2:105:2138]) 2025-09-25T16:18:43.431698Z node 2 :RESOURCE_BROKER DEBUG: resource_broker.cpp:608: Skip queue queue_compaction1 blocked by an earlier queue 2025-09-25T16:18:43.431702Z node 2 :RESOURCE_BROKER DEBUG: resource_broker.cpp:393: Submitted new unknown task task-6 (6 by [2:105:2138]) priority=3 resources={351, 281} 2025-09-25T16:18:43.431705Z node 2 :RESOURCE_BROKER ERROR: resource_broker.cpp:675: Assigning waiting task 'task-6 (6 by [2:105:2138])' of unknown type 'wrong' to default queue 2025-09-25T16:18:43.431707Z node 2 :RESOURCE_BROKER DEBUG: resource_broker.cpp:619: Not enough resources to start task task-2 (2 by [2:105:2138]) 2025-09-25T16:18:43.431709Z node 2 :RESOURCE_BROKER DEBUG: resource_broker.cpp:608: Skip queue queue_compaction1 blocked by an earlier queue 2025-09-25T16:18:43.431714Z node 2 :RESOURCE_BROKER DEBUG: resource_broker.cpp:393: Submitted new compaction0 task task-7 (7 by [2:105:2138]) priority=2 resources={57, 215} 2025-09-25T16:18:43.431717Z node 2 :RESOURCE_BROKER DEBUG: resource_broker.cpp:679: Assigning waiting task task-7 (7 by [2:105:2138]) to queue queue_compaction0 2025-09-25T16:18:43.431722Z node 2 :RESOURCE_BROKER DEBUG: reso ... urce usage for queue queue_default from 687997.686400 to 687457.832800 (remove task task-908 (908 by [2:105:2138])) 2025-09-25T16:18:44.013302Z node 2 :RESOURCE_BROKER DEBUG: resource_broker.cpp:582: Updated real resource usage for queue queue_default from 686961.732800 to 687457.832800 2025-09-25T16:18:44.013307Z node 2 :RESOURCE_BROKER DEBUG: resource_broker.cpp:637: Allocate resources {414, 305} for task task-912 (912 by [2:105:2138]) from queue queue_default 2025-09-25T16:18:44.013311Z node 2 :RESOURCE_BROKER ERROR: resource_broker.cpp:675: Assigning in-fly task 'task-912 (912 by [2:105:2138])' of unknown type 'wrong' to default queue 2025-09-25T16:18:44.013317Z node 2 :RESOURCE_BROKER DEBUG: resource_broker.cpp:711: Updated planned resource usage for queue queue_default from 687457.832800 to 689165.003200 (insert task task-912 (912 by [2:105:2138])) 2025-09-25T16:18:44.013322Z node 2 :RESOURCE_BROKER DEBUG: resource_broker.cpp:619: Not enough resources to start task task-920 (920 by [2:105:2138]) 2025-09-25T16:18:44.013328Z node 2 :RESOURCE_BROKER DEBUG: resource_broker.cpp:528: Finish task task-912 (912 by [2:105:2138]) (release resources {414, 305}) 2025-09-25T16:18:44.013334Z node 2 :RESOURCE_BROKER DEBUG: resource_broker.cpp:577: Updated planned resource usage for queue queue_default from 689165.003200 to 689090.152000 (remove task task-912 (912 by [2:105:2138])) 2025-09-25T16:18:44.013340Z node 2 :RESOURCE_BROKER DEBUG: resource_broker.cpp:582: Updated real resource usage for queue queue_default from 687457.832800 to 689090.152000 2025-09-25T16:18:44.013344Z node 2 :RESOURCE_BROKER DEBUG: resource_broker.cpp:637: Allocate resources {369, 157} for task task-920 (920 by [2:105:2138]) from queue queue_default 2025-09-25T16:18:44.013349Z node 2 :RESOURCE_BROKER ERROR: resource_broker.cpp:675: Assigning in-fly task 'task-920 (920 by [2:105:2138])' of unknown type 'wrong' to default queue 2025-09-25T16:18:44.013356Z node 2 :RESOURCE_BROKER DEBUG: resource_broker.cpp:711: Updated planned resource usage for queue queue_default from 689090.152000 to 690494.123200 (insert task task-920 (920 by [2:105:2138])) 2025-09-25T16:18:44.013361Z node 2 :RESOURCE_BROKER DEBUG: resource_broker.cpp:619: Not enough resources to start task task-938 (938 by [2:105:2138]) 2025-09-25T16:18:44.013367Z node 2 :RESOURCE_BROKER DEBUG: resource_broker.cpp:528: Finish task task-920 (920 by [2:105:2138]) (release resources {369, 157}) 2025-09-25T16:18:44.013373Z node 2 :RESOURCE_BROKER DEBUG: resource_broker.cpp:582: Updated real resource usage for queue queue_default from 689090.152000 to 690807.035200 2025-09-25T16:18:44.013378Z node 2 :RESOURCE_BROKER DEBUG: resource_broker.cpp:637: Allocate resources {361, 262} for task task-938 (938 by [2:105:2138]) from queue queue_default 2025-09-25T16:18:44.013383Z node 2 :RESOURCE_BROKER ERROR: resource_broker.cpp:675: Assigning in-fly task 'task-938 (938 by [2:105:2138])' of unknown type 'wrong' to default queue 2025-09-25T16:18:44.013389Z node 2 :RESOURCE_BROKER DEBUG: resource_broker.cpp:711: Updated planned resource usage for queue queue_default from 690494.123200 to 692238.761200 (insert task task-938 (938 by [2:105:2138])) 2025-09-25T16:18:44.013393Z node 2 :RESOURCE_BROKER DEBUG: resource_broker.cpp:619: Not enough resources to start task task-950 (950 by [2:105:2138]) 2025-09-25T16:18:44.013403Z node 2 :RESOURCE_BROKER DEBUG: resource_broker.cpp:528: Finish task task-938 (938 by [2:105:2138]) (release resources {361, 262}) 2025-09-25T16:18:44.013409Z node 2 :RESOURCE_BROKER DEBUG: resource_broker.cpp:577: Updated planned resource usage for queue queue_default from 692238.761200 to 691968.588800 (remove task task-938 (938 by [2:105:2138])) 2025-09-25T16:18:44.013415Z node 2 :RESOURCE_BROKER DEBUG: resource_broker.cpp:582: Updated real resource usage for queue queue_default from 690807.035200 to 691968.588800 2025-09-25T16:18:44.013420Z node 2 :RESOURCE_BROKER DEBUG: resource_broker.cpp:637: Allocate resources {62, 250} for task task-950 (950 by [2:105:2138]) from queue queue_default 2025-09-25T16:18:44.013424Z node 2 :RESOURCE_BROKER ERROR: resource_broker.cpp:675: Assigning in-fly task 'task-950 (950 by [2:105:2138])' of unknown type 'wrong' to default queue 2025-09-25T16:18:44.013430Z node 2 :RESOURCE_BROKER DEBUG: resource_broker.cpp:711: Updated planned resource usage for queue queue_default from 691968.588800 to 692903.588800 (insert task task-950 (950 by [2:105:2138])) 2025-09-25T16:18:44.013435Z node 2 :RESOURCE_BROKER DEBUG: resource_broker.cpp:619: Not enough resources to start task task-958 (958 by [2:105:2138]) 2025-09-25T16:18:44.013441Z node 2 :RESOURCE_BROKER DEBUG: resource_broker.cpp:528: Finish task task-950 (950 by [2:105:2138]) (release resources {62, 250}) 2025-09-25T16:18:44.013447Z node 2 :RESOURCE_BROKER DEBUG: resource_broker.cpp:577: Updated planned resource usage for queue queue_default from 692903.588800 to 692319.088800 (remove task task-950 (950 by [2:105:2138])) 2025-09-25T16:18:44.013453Z node 2 :RESOURCE_BROKER DEBUG: resource_broker.cpp:582: Updated real resource usage for queue queue_default from 691968.588800 to 692319.088800 2025-09-25T16:18:44.013457Z node 2 :RESOURCE_BROKER DEBUG: resource_broker.cpp:637: Allocate resources {369, 460} for task task-958 (958 by [2:105:2138]) from queue queue_default 2025-09-25T16:18:44.013462Z node 2 :RESOURCE_BROKER ERROR: resource_broker.cpp:675: Assigning in-fly task 'task-958 (958 by [2:105:2138])' of unknown type 'wrong' to default queue 2025-09-25T16:18:44.013468Z node 2 :RESOURCE_BROKER DEBUG: resource_broker.cpp:711: Updated planned resource usage for queue queue_default from 692319.088800 to 693994.040800 (insert task task-958 (958 by [2:105:2138])) 2025-09-25T16:18:44.013473Z node 2 :RESOURCE_BROKER DEBUG: resource_broker.cpp:619: Not enough resources to start task task-963 (963 by [2:105:2138]) 2025-09-25T16:18:44.013481Z node 2 :RESOURCE_BROKER DEBUG: resource_broker.cpp:528: Finish task task-958 (958 by [2:105:2138]) (release resources {369, 460}) 2025-09-25T16:18:44.013486Z node 2 :RESOURCE_BROKER DEBUG: resource_broker.cpp:577: Updated planned resource usage for queue queue_default from 693994.040800 to 692383.672800 (remove task task-958 (958 by [2:105:2138])) 2025-09-25T16:18:44.013492Z node 2 :RESOURCE_BROKER DEBUG: resource_broker.cpp:582: Updated real resource usage for queue queue_default from 692319.088800 to 692383.672800 2025-09-25T16:18:44.013496Z node 2 :RESOURCE_BROKER DEBUG: resource_broker.cpp:637: Allocate resources {75, 268} for task task-963 (963 by [2:105:2138]) from queue queue_default 2025-09-25T16:18:44.013500Z node 2 :RESOURCE_BROKER ERROR: resource_broker.cpp:675: Assigning in-fly task 'task-963 (963 by [2:105:2138])' of unknown type 'wrong' to default queue 2025-09-25T16:18:44.013505Z node 2 :RESOURCE_BROKER DEBUG: resource_broker.cpp:711: Updated planned resource usage for queue queue_default from 692383.672800 to 693338.610400 (insert task task-963 (963 by [2:105:2138])) 2025-09-25T16:18:44.013508Z node 2 :RESOURCE_BROKER DEBUG: resource_broker.cpp:619: Not enough resources to start task task-972 (972 by [2:105:2138]) 2025-09-25T16:18:44.013514Z node 2 :RESOURCE_BROKER DEBUG: resource_broker.cpp:528: Finish task task-963 (963 by [2:105:2138]) (release resources {75, 268}) 2025-09-25T16:18:44.013520Z node 2 :RESOURCE_BROKER DEBUG: resource_broker.cpp:577: Updated planned resource usage for queue queue_default from 693338.610400 to 692727.784800 (remove task task-963 (963 by [2:105:2138])) 2025-09-25T16:18:44.013525Z node 2 :RESOURCE_BROKER DEBUG: resource_broker.cpp:582: Updated real resource usage for queue queue_default from 692383.672800 to 692727.784800 2025-09-25T16:18:44.013530Z node 2 :RESOURCE_BROKER DEBUG: resource_broker.cpp:637: Allocate resources {256, 320} for task task-972 (972 by [2:105:2138]) from queue queue_default 2025-09-25T16:18:44.013534Z node 2 :RESOURCE_BROKER ERROR: resource_broker.cpp:675: Assigning in-fly task 'task-972 (972 by [2:105:2138])' of unknown type 'wrong' to default queue 2025-09-25T16:18:44.013540Z node 2 :RESOURCE_BROKER DEBUG: resource_broker.cpp:711: Updated planned resource usage for queue queue_default from 692727.784800 to 693846.504800 (insert task task-972 (972 by [2:105:2138])) 2025-09-25T16:18:44.013545Z node 2 :RESOURCE_BROKER DEBUG: resource_broker.cpp:630: Skip queue queue_default due to exceeded limits 2025-09-25T16:18:44.013551Z node 2 :RESOURCE_BROKER DEBUG: resource_broker.cpp:528: Finish task task-972 (972 by [2:105:2138]) (release resources {256, 320}) 2025-09-25T16:18:44.013557Z node 2 :RESOURCE_BROKER DEBUG: resource_broker.cpp:582: Updated real resource usage for queue queue_default from 692727.784800 to 694332.776800 2025-09-25T16:18:44.013562Z node 2 :RESOURCE_BROKER DEBUG: resource_broker.cpp:637: Allocate resources {199, 16} for task task-977 (977 by [2:105:2138]) from queue queue_default 2025-09-25T16:18:44.013567Z node 2 :RESOURCE_BROKER ERROR: resource_broker.cpp:675: Assigning in-fly task 'task-977 (977 by [2:105:2138])' of unknown type 'wrong' to default queue 2025-09-25T16:18:44.013572Z node 2 :RESOURCE_BROKER DEBUG: resource_broker.cpp:711: Updated planned resource usage for queue queue_default from 693846.504800 to 695034.371200 (insert task task-977 (977 by [2:105:2138])) 2025-09-25T16:18:44.013577Z node 2 :RESOURCE_BROKER DEBUG: resource_broker.cpp:630: Skip queue queue_default due to exceeded limits 2025-09-25T16:18:44.013584Z node 2 :RESOURCE_BROKER DEBUG: resource_broker.cpp:528: Finish task task-977 (977 by [2:105:2138]) (release resources {199, 16}) 2025-09-25T16:18:44.013590Z node 2 :RESOURCE_BROKER DEBUG: resource_broker.cpp:577: Updated planned resource usage for queue queue_default from 695034.371200 to 694729.184800 (remove task task-977 (977 by [2:105:2138])) 2025-09-25T16:18:44.013597Z node 2 :RESOURCE_BROKER DEBUG: resource_broker.cpp:582: Updated real resource usage for queue queue_default from 694332.776800 to 694729.184800 2025-09-25T16:18:44.013602Z node 2 :RESOURCE_BROKER DEBUG: resource_broker.cpp:637: Allocate resources {219, 4} for task task-986 (986 by [2:105:2138]) from queue queue_default 2025-09-25T16:18:44.013606Z node 2 :RESOURCE_BROKER ERROR: resource_broker.cpp:675: Assigning in-fly task 'task-986 (986 by [2:105:2138])' of unknown type 'wrong' to default queue 2025-09-25T16:18:44.013612Z node 2 :RESOURCE_BROKER DEBUG: resource_broker.cpp:711: Updated planned resource usage for queue queue_default from 694729.184800 to 695480.880400 (insert task task-986 (986 by [2:105:2138])) 2025-09-25T16:18:44.013617Z node 2 :RESOURCE_BROKER DEBUG: resource_broker.cpp:619: Not enough resources to start task task-992 (992 by [2:105:2138]) 2025-09-25T16:18:44.013623Z node 2 :RESOURCE_BROKER DEBUG: resource_broker.cpp:528: Finish task task-986 (986 by [2:105:2138]) (release resources {219, 4}) 2025-09-25T16:18:44.013628Z node 2 :RESOURCE_BROKER DEBUG: resource_broker.cpp:582: Updated real resource usage for queue queue_default from 694729.184800 to 696349.434400 2025-09-25T16:18:44.013632Z node 2 :RESOURCE_BROKER DEBUG: resource_broker.cpp:637: Allocate resources {414, 85} for task task-992 (992 by [2:105:2138]) from queue queue_default 2025-09-25T16:18:44.013636Z node 2 :RESOURCE_BROKER ERROR: resource_broker.cpp:675: Assigning in-fly task 'task-992 (992 by [2:105:2138])' of unknown type 'wrong' to default queue 2025-09-25T16:18:44.013642Z node 2 :RESOURCE_BROKER DEBUG: resource_broker.cpp:711: Updated planned resource usage for queue queue_default from 695480.880400 to 697895.807200 (insert task task-992 (992 by [2:105:2138])) 2025-09-25T16:18:44.013649Z node 2 :RESOURCE_BROKER DEBUG: resource_broker.cpp:528: Finish task task-992 (992 by [2:105:2138]) (release resources {414, 85}) 2025-09-25T16:18:44.013655Z node 2 :RESOURCE_BROKER DEBUG: resource_broker.cpp:582: Updated real resource usage for queue queue_default from 696349.434400 to 698558.372800 |81.6%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/kqp/ut/tx/ydb-core-kqp-ut-tx >> THiveTest::TestHiveBalancerWithImmovableTablets [GOOD] >> THiveTest::TestHiveBalancerHighUsage >> TabletState::ImplicitUnsubscribeOnDisconnect >> TPipeTrackerTest::TestAddSameTabletTwice [GOOD] >> TPipeTrackerTest::TestAddTwoTablets [GOOD] >> TTabletPipeTest::TestConsumerSidePipeReset >> TabletState::NormalLifecycle >> TTabletCountersAggregator::ColumnShardCounters [GOOD] >> TReplicationTests::CreateSequential [GOOD] >> TReplicationTests::CreateInParallel >> TTabletCountersAggregator::IntegralPercentileAggregationRegularCheckSingleTablet >> TTabletPipeTest::TestShutdown [GOOD] >> TTabletPipeTest::TestTwoNodes |81.6%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/kqp/ut/tx/ydb-core-kqp-ut-tx ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/ydb/table_split_ut/unittest >> YdbTableSplit::SplitByLoadWithReadsMultipleSplitsWithData [GOOD] Test command err: 2025-09-25T16:17:13.859808Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7554061639427724387:2082];send_to=[0:7307199536658146131:7762515]; 2025-09-25T16:17:13.859827Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/endf/005165/r3tmp/tmpMUzlWy/pdisk_1.dat 2025-09-25T16:17:13.874505Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:123: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2025-09-25T16:17:13.919408Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 17789, node 1 2025-09-25T16:17:13.930900Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-09-25T16:17:13.930912Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-09-25T16:17:13.930914Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-09-25T16:17:13.930953Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:9039 WaitRootIsUp 'Root'... TClient::Ls request: Root 2025-09-25T16:17:13.961584Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-09-25T16:17:13.961616Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-09-25T16:17:13.963165Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-09-25T16:17:13.973183Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. waiting... 2025-09-25T16:17:13.990321Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) TClient is connected to server localhost:9039 2025-09-25T16:17:14.349732Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7554061643722692617:2322], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:17:14.349781Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:17:14.349922Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7554061643722692628:2323], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:17:14.349934Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:17:14.353689Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:17:14.393814Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7554061643722692805:2355], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:17:14.393864Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:17:14.393906Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7554061643722692830:2370], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:17:14.393931Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7554061643722692834:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:17:14.393933Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7554061643722692835:2375], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:17:14.393942Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7554061643722692836:2376], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:17:14.393950Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7554061643722692840:2378], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:17:14.394036Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7554061643722692838:2377], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:17:14.394040Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7554061643722692841:2379], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:17:14.394055Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7554061643722692844:2380], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:17:14.394072Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:17:14.395531Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715661:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-09-25T16:17:14.395891Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7554061643722692917:2392], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:17:14.395896Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7554061643722692920:2395], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:17:14.395911Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:17:14.395919Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7554061643722692921:2396], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:17:14.396250Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7554061643722692938:2402], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:17:14.396249Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7554061643722692932:2397], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:17:14.396265Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:17:14.401842Z node 1 :TX_PROXY ERROR: schemereq.cpp:590: Actor# [1:7554061643722692857:2748] txid# 281474976715659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exists but creating right now (id: [OwnerId: 7205759404664448 ... 7. Ctx: { TraceId: 01k60tpw490pnph2wpapbfeg3n, Database: , SessionId: ydb://session/3?node_id=1&id=ZWQxMGUyOTEtYThmNjNlMGMtODU4NmVjYmQtNzZmYzMzNDE=, PoolId: default, DatabaseId: /Root}. Database not set, use /Root 2025-09-25T16:18:40.394645Z node 1 :KQP_EXECUTER ERROR: kqp_planner.cpp:123: TxId: 281474976896308. Ctx: { TraceId: 01k60tpw491s50pw08rr9wpvtv, Database: , SessionId: ydb://session/3?node_id=1&id=OWRlZTljMDMtYWVmOTYxZTgtMWQwMzk4NjUtNWRjYWE2OTQ=, PoolId: default, DatabaseId: /Root}. Database not set, use /Root 2025-09-25T16:18:40.395112Z node 1 :KQP_EXECUTER ERROR: kqp_planner.cpp:123: TxId: 281474976896309. Ctx: { TraceId: 01k60tpw49dcmrk45ez0zhcrcf, Database: , SessionId: ydb://session/3?node_id=1&id=ZTkzYzYxNzAtZjdmODAzOGQtNzIzOTFhNGYtODFkNmRhNTA=, PoolId: default, DatabaseId: /Root}. Database not set, use /Root 2025-09-25T16:18:40.395543Z node 1 :KQP_EXECUTER ERROR: kqp_planner.cpp:123: TxId: 281474976896310. Ctx: { TraceId: 01k60tpw4aabg4fdkdnnvrn1th, Database: , SessionId: ydb://session/3?node_id=1&id=ODZjMWI3YWUtZDZjMTk1ZDktZTFiNWY0ZGEtOWNhNDQzZTg=, PoolId: default, DatabaseId: /Root}. Database not set, use /Root 2025-09-25T16:18:40.395649Z node 1 :KQP_EXECUTER ERROR: kqp_planner.cpp:123: TxId: 281474976896311. Ctx: { TraceId: 01k60tpw4ackph6dfta25c8xtj, Database: , SessionId: ydb://session/3?node_id=1&id=OWJhMzljZTEtMjZjM2Q2NGEtOGNhNjJiOWMtNTg3N2I0N2I=, PoolId: default, DatabaseId: /Root}. Database not set, use /Root 2025-09-25T16:18:40.395683Z node 1 :KQP_EXECUTER ERROR: kqp_planner.cpp:123: TxId: 281474976896312. Ctx: { TraceId: 01k60tpw4a4yjb9sryzmc47s7m, Database: , SessionId: ydb://session/3?node_id=1&id=MjgyNzA4MWEtOTg4NGVkNTItODMyNWI3OGItOTRkNjk0ZDY=, PoolId: default, DatabaseId: /Root}. Database not set, use /Root 2025-09-25T16:18:40.396568Z node 1 :KQP_EXECUTER ERROR: kqp_planner.cpp:123: TxId: 281474976896313. Ctx: { TraceId: 01k60tpw4bf41fw1x25h051w0t, Database: , SessionId: ydb://session/3?node_id=1&id=ZTU1M2EwYzUtNDdmMWQxYTgtYWFmMWY4YWItNjNjMWRlZDA=, PoolId: default, DatabaseId: /Root}. Database not set, use /Root 2025-09-25T16:18:40.396719Z node 1 :KQP_EXECUTER ERROR: kqp_planner.cpp:123: TxId: 281474976896316. Ctx: { TraceId: 01k60tpw4b03kkp8g0cxq2bh1d, Database: , SessionId: ydb://session/3?node_id=1&id=ZWQxMGUyOTEtYThmNjNlMGMtODU4NmVjYmQtNzZmYzMzNDE=, PoolId: default, DatabaseId: /Root}. Database not set, use /Root 2025-09-25T16:18:40.396719Z node 1 :KQP_EXECUTER ERROR: kqp_planner.cpp:123: TxId: 281474976896314. Ctx: { TraceId: 01k60tpw4bbw4j8d5apyrg1z9s, Database: , SessionId: ydb://session/3?node_id=1&id=NWM5Y2QwYTQtZmE3ZWM2MGItNDNkN2FiNjktYTMyYzg0Nzc=, PoolId: default, DatabaseId: /Root}. Database not set, use /Root 2025-09-25T16:18:40.396838Z node 1 :KQP_EXECUTER ERROR: kqp_planner.cpp:123: TxId: 281474976896315. Ctx: { TraceId: 01k60tpw4b8750evrbgdry4n54, Database: , SessionId: ydb://session/3?node_id=1&id=OWY5MWYzMWYtZDJiYjE1MGItMjU5NmNhODAtZjQ4ZmM3Yjc=, PoolId: default, DatabaseId: /Root}. Database not set, use /Root 2025-09-25T16:18:40.397134Z node 1 :KQP_EXECUTER ERROR: kqp_planner.cpp:123: TxId: 281474976896317. Ctx: { TraceId: 01k60tpw4b9abrs6dhjrgyvjc4, Database: , SessionId: ydb://session/3?node_id=1&id=OWRlZTljMDMtYWVmOTYxZTgtMWQwMzk4NjUtNWRjYWE2OTQ=, PoolId: default, DatabaseId: /Root}. Database not set, use /Root 2025-09-25T16:18:40.397623Z node 1 :KQP_EXECUTER ERROR: kqp_planner.cpp:123: TxId: 281474976896320. Ctx: { TraceId: 01k60tpw4c6kty29jpf1cfs6kv, Database: , SessionId: ydb://session/3?node_id=1&id=MjgyNzA4MWEtOTg4NGVkNTItODMyNWI3OGItOTRkNjk0ZDY=, PoolId: default, DatabaseId: /Root}. Database not set, use /Root 2025-09-25T16:18:40.397817Z node 1 :KQP_EXECUTER ERROR: kqp_planner.cpp:123: TxId: 281474976896318. Ctx: { TraceId: 01k60tpw4b4c62pd1n8exfw9nb, Database: , SessionId: ydb://session/3?node_id=1&id=NTMwNjI2YTUtYTU2ZjU1M2QtOTJmODg0ZS1iNmYzMjgx, PoolId: default, DatabaseId: /Root}. Database not set, use /Root 2025-09-25T16:18:40.397923Z node 1 :KQP_EXECUTER ERROR: kqp_planner.cpp:123: TxId: 281474976896319. Ctx: { TraceId: 01k60tpw4c5bdkcpz8z6qtd11f, Database: , SessionId: ydb://session/3?node_id=1&id=ZTkzYzYxNzAtZjdmODAzOGQtNzIzOTFhNGYtODFkNmRhNTA=, PoolId: default, DatabaseId: /Root}. Database not set, use /Root 2025-09-25T16:18:40.398086Z node 1 :KQP_EXECUTER ERROR: kqp_planner.cpp:123: TxId: 281474976896322. Ctx: { TraceId: 01k60tpw4c02g48k1210dy8r8d, Database: , SessionId: ydb://session/3?node_id=1&id=ODZjMWI3YWUtZDZjMTk1ZDktZTFiNWY0ZGEtOWNhNDQzZTg=, PoolId: default, DatabaseId: /Root}. Database not set, use /Root 2025-09-25T16:18:40.398196Z node 1 :KQP_EXECUTER ERROR: kqp_planner.cpp:123: TxId: 281474976896321. Ctx: { TraceId: 01k60tpw4c4a4z4mqsf5xc0mt0, Database: , SessionId: ydb://session/3?node_id=1&id=OWJhMzljZTEtMjZjM2Q2NGEtOGNhNjJiOWMtNTg3N2I0N2I=, PoolId: default, DatabaseId: /Root}. Database not set, use /Root 2025-09-25T16:18:40.398856Z node 1 :KQP_EXECUTER ERROR: kqp_planner.cpp:123: TxId: 281474976896323. Ctx: { TraceId: 01k60tpw4d69tvz0pt7vpwqbhg, Database: , SessionId: ydb://session/3?node_id=1&id=ZTU1M2EwYzUtNDdmMWQxYTgtYWFmMWY4YWItNjNjMWRlZDA=, PoolId: default, DatabaseId: /Root}. Database not set, use /Root 2025-09-25T16:18:40.398923Z node 1 :KQP_EXECUTER ERROR: kqp_planner.cpp:123: TxId: 281474976896324. Ctx: { TraceId: 01k60tpw4d7wj7y6ncw8mv6avm, Database: , SessionId: ydb://session/3?node_id=1&id=NWM5Y2QwYTQtZmE3ZWM2MGItNDNkN2FiNjktYTMyYzg0Nzc=, PoolId: default, DatabaseId: /Root}. Database not set, use /Root 2025-09-25T16:18:40.399685Z node 1 :KQP_EXECUTER ERROR: kqp_planner.cpp:123: TxId: 281474976896325. Ctx: { TraceId: 01k60tpw4f4e5y9b9gxbfr5b8a, Database: , SessionId: ydb://session/3?node_id=1&id=OWRlZTljMDMtYWVmOTYxZTgtMWQwMzk4NjUtNWRjYWE2OTQ=, PoolId: default, DatabaseId: /Root}. Database not set, use /Root 2025-09-25T16:18:40.399844Z node 1 :KQP_EXECUTER ERROR: kqp_planner.cpp:123: TxId: 281474976896326. Ctx: { TraceId: 01k60tpw4f5ysss2xmgh9gv8m5, Database: , SessionId: ydb://session/3?node_id=1&id=NTMwNjI2YTUtYTU2ZjU1M2QtOTJmODg0ZS1iNmYzMjgx, PoolId: default, DatabaseId: /Root}. Database not set, use /Root 2025-09-25T16:18:40.399874Z node 1 :KQP_EXECUTER ERROR: kqp_planner.cpp:123: TxId: 281474976896329. Ctx: { TraceId: 01k60tpw4ff70t5jktp9kd2s7d, Database: , SessionId: ydb://session/3?node_id=1&id=MjgyNzA4MWEtOTg4NGVkNTItODMyNWI3OGItOTRkNjk0ZDY=, PoolId: default, DatabaseId: /Root}. Database not set, use /Root 2025-09-25T16:18:40.400083Z node 1 :KQP_EXECUTER ERROR: kqp_planner.cpp:123: TxId: 281474976896327. Ctx: { TraceId: 01k60tpw4eatk0tccs274c3grk, Database: , SessionId: ydb://session/3?node_id=1&id=OWY5MWYzMWYtZDJiYjE1MGItMjU5NmNhODAtZjQ4ZmM3Yjc=, PoolId: default, DatabaseId: /Root}. Database not set, use /Root TClient::Ls request: /Root/Foo 2025-09-25T16:18:40.400291Z node 1 :KQP_EXECUTER ERROR: kqp_planner.cpp:123: TxId: 281474976896328. Ctx: { TraceId: 01k60tpw4e6327mbhswcdsm0p8, Database: , SessionId: ydb://session/3?node_id=1&id=ZWQxMGUyOTEtYThmNjNlMGMtODU4NmVjYmQtNzZmYzMzNDE=, PoolId: default, DatabaseId: /Root}. Database not set, use /Root 2025-09-25T16:18:40.400398Z node 1 :KQP_EXECUTER ERROR: kqp_planner.cpp:123: TxId: 281474976896330. Ctx: { TraceId: 01k60tpw4f4zne4rm661qbkzzg, Database: , SessionId: ydb://session/3?node_id=1&id=ZTkzYzYxNzAtZjdmODAzOGQtNzIzOTFhNGYtODFkNmRhNTA=, PoolId: default, DatabaseId: /Root}. Database not set, use /Root 2025-09-25T16:18:40.400514Z node 1 :KQP_EXECUTER ERROR: kqp_planner.cpp:123: TxId: 281474976896331. Ctx: { TraceId: 01k60tpw4fc6f3q33ae6zfqd9m, Database: , SessionId: ydb://session/3?node_id=1&id=ODZjMWI3YWUtZDZjMTk1ZDktZTFiNWY0ZGEtOWNhNDQzZTg=, PoolId: default, DatabaseId: /Root}. Database not set, use /Root 2025-09-25T16:18:40.400816Z node 1 :KQP_EXECUTER ERROR: kqp_planner.cpp:123: TxId: 281474976896332. Ctx: { TraceId: 01k60tpw4fe0ph2zw2xv23epq7, Database: , SessionId: ydb://session/3?node_id=1&id=OWJhMzljZTEtMjZjM2Q2NGEtOGNhNjJiOWMtNTg3N2I0N2I=, PoolId: default, DatabaseId: /Root}. Database not set, use /Root TClient::Ls response: 2025-09-25T16:18:40.401061Z node 1 :KQP_EXECUTER ERROR: kqp_planner.cpp:123: TxId: 281474976896333. Ctx: { TraceId: 01k60tpw4fenafxsmkqbnb1y2b, Database: , SessionId: ydb://session/3?node_id=1&id=ZTU1M2EwYzUtNDdmMWQxYTgtYWFmMWY4YWItNjNjMWRlZDA=, PoolId: default, DatabaseId: /Root}. Database not set, use /Root Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Foo" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976715658 CreateStep: 1758817034422 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 7 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 7 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 2 TablePartitionVersion: 4 } ChildrenExist: false } Table { Name: "Foo" Columns { Name: "NameHash" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "Name" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false IsBuildInProgress: false } Columns { Name: "Versio... (TRUNCATED) 2025-09-25T16:18:40.401645Z node 1 :KQP_EXECUTER ERROR: kqp_planner.cpp:123: TxId: 281474976896334. Ctx: { TraceId: 01k60tpw4f47x55cy5z4vn064f, Database: , SessionId: ydb://session/3?node_id=1&id=NWM5Y2QwYTQtZmE3ZWM2MGItNDNkN2FiNjktYTMyYzg0Nzc=, PoolId: default, DatabaseId: /Root}. Database not set, use /Root 2025-09-25T16:18:40.401826Z node 1 :KQP_EXECUTER ERROR: kqp_planner.cpp:123: TxId: 281474976896335. Ctx: { TraceId: 01k60tpw4ge5d6g1jvapqxdzbc, Database: , SessionId: ydb://session/3?node_id=1&id=NTMwNjI2YTUtYTU2ZjU1M2QtOTJmODg0ZS1iNmYzMjgx, PoolId: default, DatabaseId: /Root}. Database not set, use /Root 2025-09-25T16:18:40.402282Z node 1 :KQP_EXECUTER ERROR: kqp_planner.cpp:123: TxId: 281474976896337. Ctx: { TraceId: 01k60tpw4h39fm9a5w1j3nwc4x, Database: , SessionId: ydb://session/3?node_id=1&id=MjgyNzA4MWEtOTg4NGVkNTItODMyNWI3OGItOTRkNjk0ZDY=, PoolId: default, DatabaseId: /Root}. Database not set, use /Root 2025-09-25T16:18:40.402375Z node 1 :KQP_EXECUTER ERROR: kqp_planner.cpp:123: TxId: 281474976896336. Ctx: { TraceId: 01k60tpw4hchpxpsz9cz78z5d9, Database: , SessionId: ydb://session/3?node_id=1&id=OWRlZTljMDMtYWVmOTYxZTgtMWQwMzk4NjUtNWRjYWE2OTQ=, PoolId: default, DatabaseId: /Root}. Database not set, use /Root 2025-09-25T16:18:40.403191Z node 1 :KQP_EXECUTER ERROR: kqp_planner.cpp:123: TxId: 281474976896338. Ctx: { TraceId: 01k60tpw4h82qxq9329hw988ty, Database: , SessionId: ydb://session/3?node_id=1&id=OWY5MWYzMWYtZDJiYjE1MGItMjU5NmNhODAtZjQ4ZmM3Yjc=, PoolId: default, DatabaseId: /Root}. Database not set, use /Root TClient::Ls request: /Root/Foo TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Foo" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976715658 CreateStep: 1758817034422 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 7 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 7 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 2 TablePartitionVersion: 4 } ChildrenExist: false } Table { Name: "Foo" Columns { Name: "NameHash" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "Name" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false IsBuildInProgress: false } Columns { Name: "Versio... (TRUNCATED) Table has 4 shards |81.6%| [LD] {RESULT} $(B)/ydb/core/kqp/ut/tx/ydb-core-kqp-ut-tx |81.6%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tablet/ut/unittest >> TTabletCountersAggregator::IntegralPercentileAggregationHistNamedNoOverflowCheck [GOOD] >> ReadLoad::ShouldReadIterate >> TTabletPipeTest::TestPipeConnectLoopLeaderDownWithoutRetries [GOOD] >> TBlockBlobStorageTest::DelayedErrorsNotIgnored [GOOD] >> TFlatMetrics::DecayingAverageAvg [GOOD] >> THiveTest::TestDeleteTabletWithRestartAndRetry [GOOD] >> THiveTest::TestCreateTabletChangeToExternal >> TReplicationTests::ConsistencyLevel [GOOD] >> TReplicationTests::CommitInterval >> UpsertLoad::ShouldWriteDataBulkUpsertLocalMkql >> TTabletCountersAggregator::IntegralPercentileAggregationRegularCheckSingleTablet [GOOD] >> TReplicationTests::CreateInParallel [GOOD] >> TTabletCountersAggregator::IntegralPercentileAggregationRegularNoOverflowCheck >> TReplicationTests::CreateDropRecreate >> TTabletPipeTest::TestConsumerSidePipeReset [GOOD] |81.6%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/mon/ut/ydb-core-mon-ut >> TTabletPipeTest::TestConnectReject >> ReadLoad::ShouldReadKqp >> TabletState::NormalLifecycle [GOOD] >> UpsertLoad::ShouldWriteDataBulkUpsertBatch >> TReplicationTests::CommitInterval [GOOD] >> TReplicationTests::Alter >> UpsertLoad::ShouldCreateTable >> UpsertLoad::ShouldWriteDataBulkUpsertLocalMkqlKeyFrom ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/mind/hive/ut/unittest >> THiveTest::TestExternalBootWhenLocked [GOOD] Test command err: 2025-09-25T16:18:10.673372Z node 2 :BS_NODE DEBUG: {NW26@node_warden_impl.cpp:338} Bootstrap 2025-09-25T16:18:10.678506Z node 2 :BS_NODE DEBUG: {NW18@node_warden_resource.cpp:51} ApplyServiceSet IsStatic# true Comprehensive# false Origin# initial ServiceSet# {PDisks { NodeID: 1 PDiskID: 1 Path: "SectorMap:0:3200" PDiskGuid: 1 } PDisks { NodeID: 2 PDiskID: 1 Path: "SectorMap:1:3200" PDiskGuid: 2 } PDisks { NodeID: 3 PDiskID: 1 Path: "SectorMap:2:3200" PDiskGuid: 3 } VDisks { VDiskID { GroupID: 0 GroupGeneration: 1 Ring: 0 Domain: 0 VDisk: 0 } VDiskLocation { NodeID: 1 PDiskID: 1 VDiskSlotID: 0 PDiskGuid: 1 } } Groups { GroupID: 0 GroupGeneration: 1 ErasureSpecies: 0 Rings { FailDomains { VDiskLocations { NodeID: 1 PDiskID: 1 VDiskSlotID: 0 PDiskGuid: 1 } } } } AvailabilityDomains: 0 } 2025-09-25T16:18:10.678609Z node 2 :BS_NODE DEBUG: {NW04@node_warden_pdisk.cpp:233} StartLocalPDisk NodeId# 2 PDiskId# 1 Path# "SectorMap:1:3200" PDiskCategory# {Type# DEVICE_TYPE_ROT Kind# 0} Temporary# false 2025-09-25T16:18:10.678893Z node 2 :BS_NODE WARN: {NW89@node_warden_pdisk.cpp:122} Can't write new MockDevicesConfig to file Path# /Berkanavt/kikimr/testing/mock_devices.txt 2025-09-25T16:18:10.678967Z node 2 :BS_NODE DEBUG: {NW12@node_warden_proxy.cpp:24} StartLocalProxy GroupId# 0 2025-09-25T16:18:10.679140Z node 2 :BS_NODE DEBUG: {NW21@node_warden_pipe.cpp:23} EstablishPipe AvailDomainId# 0 PipeClientId# [2:73:2076] ControllerId# 72057594037932033 2025-09-25T16:18:10.679146Z node 2 :BS_NODE DEBUG: {NW20@node_warden_pipe.cpp:73} SendRegisterNode 2025-09-25T16:18:10.679175Z node 2 :BS_NODE DEBUG: {NW11@node_warden_impl.cpp:313} StartInvalidGroupProxy GroupId# 4294967295 2025-09-25T16:18:10.679209Z node 2 :BS_NODE DEBUG: {NW62@node_warden_impl.cpp:325} StartRequestReportingThrottler 2025-09-25T16:18:10.682691Z node 2 :BS_PROXY INFO: dsproxy_state.cpp:159: Group# 0 TEvConfigureProxy received GroupGeneration# 1 IsLimitedKeyless# false Marker# DSP02 2025-09-25T16:18:10.682709Z node 2 :BS_PROXY NOTICE: dsproxy_state.cpp:319: EnsureMonitoring Group# 0 IsLimitedKeyless# 0 fullIfPossible# 0 Marker# DSP58 2025-09-25T16:18:10.683066Z node 2 :BS_PROXY DEBUG: group_sessions.cpp:83: Group# 0 Actor# [2:72:2075] Create Queue# [2:80:2080] targetNodeId# 1 Marker# DSP01 2025-09-25T16:18:10.683112Z node 2 :BS_PROXY DEBUG: group_sessions.cpp:83: Group# 0 Actor# [2:72:2075] Create Queue# [2:81:2081] targetNodeId# 1 Marker# DSP01 2025-09-25T16:18:10.683142Z node 2 :BS_PROXY DEBUG: group_sessions.cpp:83: Group# 0 Actor# [2:72:2075] Create Queue# [2:82:2082] targetNodeId# 1 Marker# DSP01 2025-09-25T16:18:10.683168Z node 2 :BS_PROXY DEBUG: group_sessions.cpp:83: Group# 0 Actor# [2:72:2075] Create Queue# [2:83:2083] targetNodeId# 1 Marker# DSP01 2025-09-25T16:18:10.683192Z node 2 :BS_PROXY DEBUG: group_sessions.cpp:83: Group# 0 Actor# [2:72:2075] Create Queue# [2:84:2084] targetNodeId# 1 Marker# DSP01 2025-09-25T16:18:10.683219Z node 2 :BS_PROXY DEBUG: group_sessions.cpp:83: Group# 0 Actor# [2:72:2075] Create Queue# [2:85:2085] targetNodeId# 1 Marker# DSP01 2025-09-25T16:18:10.683246Z node 2 :BS_PROXY DEBUG: group_sessions.cpp:83: Group# 0 Actor# [2:72:2075] Create Queue# [2:86:2086] targetNodeId# 1 Marker# DSP01 2025-09-25T16:18:10.683252Z node 2 :BS_PROXY INFO: dsproxy_state.cpp:31: Group# 0 SetStateEstablishingSessions Marker# DSP03 2025-09-25T16:18:10.683269Z node 2 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:49: TClient[72057594037932033] ::Bootstrap [2:73:2076] 2025-09-25T16:18:10.683275Z node 2 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:542: TClient[72057594037932033] lookup [2:73:2076] 2025-09-25T16:18:10.683285Z node 2 :BS_PROXY NOTICE: dsproxy_state.cpp:259: Group# 4294967295 HasInvalidGroupId# 1 Bootstrap -> StateEjected Marker# DSP42 2025-09-25T16:18:10.683294Z node 2 :BS_NODE DEBUG: {NWDC00@distconf.cpp:28} Bootstrap 2025-09-25T16:18:10.683469Z node 2 :BS_NODE DEBUG: {NWDC40@distconf_persistent_storage.cpp:25} TReaderActor bootstrap Paths# [] 2025-09-25T16:18:10.683492Z node 3 :BS_NODE DEBUG: {NW26@node_warden_impl.cpp:338} Bootstrap 2025-09-25T16:18:10.684084Z node 3 :BS_NODE DEBUG: {NW18@node_warden_resource.cpp:51} ApplyServiceSet IsStatic# true Comprehensive# false Origin# initial ServiceSet# {PDisks { NodeID: 1 PDiskID: 1 Path: "SectorMap:0:3200" PDiskGuid: 1 } PDisks { NodeID: 2 PDiskID: 1 Path: "SectorMap:1:3200" PDiskGuid: 2 } PDisks { NodeID: 3 PDiskID: 1 Path: "SectorMap:2:3200" PDiskGuid: 3 } VDisks { VDiskID { GroupID: 0 GroupGeneration: 1 Ring: 0 Domain: 0 VDisk: 0 } VDiskLocation { NodeID: 1 PDiskID: 1 VDiskSlotID: 0 PDiskGuid: 1 } } Groups { GroupID: 0 GroupGeneration: 1 ErasureSpecies: 0 Rings { FailDomains { VDiskLocations { NodeID: 1 PDiskID: 1 VDiskSlotID: 0 PDiskGuid: 1 } } } } AvailabilityDomains: 0 } 2025-09-25T16:18:10.684138Z node 3 :BS_NODE DEBUG: {NW04@node_warden_pdisk.cpp:233} StartLocalPDisk NodeId# 3 PDiskId# 1 Path# "SectorMap:2:3200" PDiskCategory# {Type# DEVICE_TYPE_ROT Kind# 0} Temporary# false 2025-09-25T16:18:10.684257Z node 3 :BS_NODE WARN: {NW89@node_warden_pdisk.cpp:122} Can't write new MockDevicesConfig to file Path# /Berkanavt/kikimr/testing/mock_devices.txt 2025-09-25T16:18:10.684311Z node 3 :BS_NODE DEBUG: {NW12@node_warden_proxy.cpp:24} StartLocalProxy GroupId# 0 2025-09-25T16:18:10.684494Z node 3 :BS_NODE DEBUG: {NW21@node_warden_pipe.cpp:23} EstablishPipe AvailDomainId# 0 PipeClientId# [3:96:2077] ControllerId# 72057594037932033 2025-09-25T16:18:10.684498Z node 3 :BS_NODE DEBUG: {NW20@node_warden_pipe.cpp:73} SendRegisterNode 2025-09-25T16:18:10.684514Z node 3 :BS_NODE DEBUG: {NW11@node_warden_impl.cpp:313} StartInvalidGroupProxy GroupId# 4294967295 2025-09-25T16:18:10.684538Z node 3 :BS_NODE DEBUG: {NW62@node_warden_impl.cpp:325} StartRequestReportingThrottler 2025-09-25T16:18:10.685957Z node 3 :LOCAL DEBUG: local.cpp:1540: TLocal::Bootstrap 2025-09-25T16:18:10.687662Z node 3 :BS_PROXY INFO: dsproxy_state.cpp:159: Group# 0 TEvConfigureProxy received GroupGeneration# 1 IsLimitedKeyless# false Marker# DSP02 2025-09-25T16:18:10.687679Z node 3 :BS_PROXY NOTICE: dsproxy_state.cpp:319: EnsureMonitoring Group# 0 IsLimitedKeyless# 0 fullIfPossible# 0 Marker# DSP58 2025-09-25T16:18:10.688190Z node 3 :BS_PROXY DEBUG: group_sessions.cpp:83: Group# 0 Actor# [3:95:2076] Create Queue# [3:103:2081] targetNodeId# 1 Marker# DSP01 2025-09-25T16:18:10.688238Z node 3 :BS_PROXY DEBUG: group_sessions.cpp:83: Group# 0 Actor# [3:95:2076] Create Queue# [3:104:2082] targetNodeId# 1 Marker# DSP01 2025-09-25T16:18:10.688281Z node 3 :BS_PROXY DEBUG: group_sessions.cpp:83: Group# 0 Actor# [3:95:2076] Create Queue# [3:105:2083] targetNodeId# 1 Marker# DSP01 2025-09-25T16:18:10.688339Z node 3 :BS_PROXY DEBUG: group_sessions.cpp:83: Group# 0 Actor# [3:95:2076] Create Queue# [3:106:2084] targetNodeId# 1 Marker# DSP01 2025-09-25T16:18:10.688370Z node 3 :BS_PROXY DEBUG: group_sessions.cpp:83: Group# 0 Actor# [3:95:2076] Create Queue# [3:107:2085] targetNodeId# 1 Marker# DSP01 2025-09-25T16:18:10.688399Z node 3 :BS_PROXY DEBUG: group_sessions.cpp:83: Group# 0 Actor# [3:95:2076] Create Queue# [3:108:2086] targetNodeId# 1 Marker# DSP01 2025-09-25T16:18:10.688428Z node 3 :BS_PROXY DEBUG: group_sessions.cpp:83: Group# 0 Actor# [3:95:2076] Create Queue# [3:109:2087] targetNodeId# 1 Marker# DSP01 2025-09-25T16:18:10.688435Z node 3 :BS_PROXY INFO: dsproxy_state.cpp:31: Group# 0 SetStateEstablishingSessions Marker# DSP03 2025-09-25T16:18:10.688452Z node 3 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:49: TClient[72057594037932033] ::Bootstrap [3:96:2077] 2025-09-25T16:18:10.688458Z node 3 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:542: TClient[72057594037932033] lookup [3:96:2077] 2025-09-25T16:18:10.688469Z node 3 :BS_PROXY NOTICE: dsproxy_state.cpp:259: Group# 4294967295 HasInvalidGroupId# 1 Bootstrap -> StateEjected Marker# DSP42 2025-09-25T16:18:10.688478Z node 3 :BS_NODE DEBUG: {NWDC00@distconf.cpp:28} Bootstrap 2025-09-25T16:18:10.688607Z node 3 :BS_NODE DEBUG: {NWDC40@distconf_persistent_storage.cpp:25} TReaderActor bootstrap Paths# [] 2025-09-25T16:18:10.688635Z node 1 :BS_NODE DEBUG: {NW26@node_warden_impl.cpp:338} Bootstrap 2025-09-25T16:18:10.690361Z node 1 :BS_NODE DEBUG: {NW18@node_warden_resource.cpp:51} ApplyServiceSet IsStatic# true Comprehensive# false Origin# initial ServiceSet# {PDisks { NodeID: 1 PDiskID: 1 Path: "SectorMap:0:3200" PDiskGuid: 1 } PDisks { NodeID: 2 PDiskID: 1 Path: "SectorMap:1:3200" PDiskGuid: 2 } PDisks { NodeID: 3 PDiskID: 1 Path: "SectorMap:2:3200" PDiskGuid: 3 } VDisks { VDiskID { GroupID: 0 GroupGeneration: 1 Ring: 0 Domain: 0 VDisk: 0 } VDiskLocation { NodeID: 1 PDiskID: 1 VDiskSlotID: 0 PDiskGuid: 1 } } Groups { GroupID: 0 GroupGeneration: 1 ErasureSpecies: 0 Rings { FailDomains { VDiskLocations { NodeID: 1 PDiskID: 1 VDiskSlotID: 0 PDiskGuid: 1 } } } } AvailabilityDomains: 0 } 2025-09-25T16:18:10.690415Z node 1 :BS_NODE DEBUG: {NW04@node_warden_pdisk.cpp:233} StartLocalPDisk NodeId# 1 PDiskId# 1 Path# "SectorMap:0:3200" PDiskCategory# {Type# DEVICE_TYPE_ROT Kind# 0} Temporary# false 2025-09-25T16:18:10.690535Z node 1 :BS_NODE WARN: {NW89@node_warden_pdisk.cpp:122} Can't write new MockDevicesConfig to file Path# /Berkanavt/kikimr/testing/mock_devices.txt 2025-09-25T16:18:10.690607Z node 1 :BS_NODE DEBUG: {NW23@node_warden_vdisk.cpp:70} StartLocalVDiskActor SlayInFlight# false VDiskId# [0:1:0:0:0] VSlotId# 1:1:0 PDiskGuid# 1 DonorMode# false PDiskRestartInFlight# false PDisksWaitingToStart# false 2025-09-25T16:18:10.690893Z node 1 :BS_NODE DEBUG: {NW24@node_warden_vdisk.cpp:276} StartLocalVDiskActor done VDiskId# [0:1:0:0:0] VSlotId# 1:1:0 PDiskGuid# 1 2025-09-25T16:18:10.690907Z node 1 :BS_NODE DEBUG: {NW12@node_warden_proxy.cpp:24} StartLocalProxy GroupId# 0 2025-09-25T16:18:10.691108Z node 1 :BS_NODE DEBUG: {NW21@node_warden_pipe.cpp:23} EstablishPipe AvailDomainId# 0 PipeClientId# [1:120:2078] ControllerId# 72057594037932033 2025-09-25T16:18:10.691114Z node 1 :BS_NODE DEBUG: {NW20@node_warden_pipe.cpp:73} SendRegisterNode 2025-09-25T16:18:10.691132Z node 1 :BS_NODE DEBUG: {NW11@node_warden_impl.cpp:313} StartInvalidGroupProxy GroupId# 4294967295 2025-09-25T16:18:10.691153Z node 1 :BS_NODE DEBUG: {NW62@node_warden_impl.cpp:325} StartRequestReportingThrottler 2025-09-25T16:18:10.692564Z node 1 :LOCAL DEBUG: local.cpp:1540: TLocal::Bootstrap 2025-09-25T16:18:10.694684Z node 1 :BS_PROXY INFO: dsproxy_state.cpp:159: Group# 0 TEvConfigureProxy received GroupGeneration# 1 IsLimitedKeyless# false Marker# DSP02 2025-09-25T16:18:10.694700Z node 1 :BS_PROXY NOTICE: dsproxy_state.cpp:319: EnsureMonitoring Group# 0 IsLimitedKeyless# 0 fullIfPossible# 0 Marker# DSP58 2025-09-25T16:18:10.695039Z node 1 :BS_PROXY DEBUG: group_sessions.cpp:83: Group# 0 Actor# [1:119:2077] Create Queue# [1:128:2083] targetNodeId# 1 Marker# DSP01 2025-09-25T16:18:10.695071Z node 1 :BS_PROXY DEBUG: group_sessions.cpp:83: Group# 0 Actor# [1:119:2077] Create Queue# [1:129:2084] targetNodeId# 1 Marker# DSP01 2025-09-25T16:18:10.695099Z node 1 :BS_PROXY DEBUG: group_sessions.cpp:83: Group# 0 Actor# [1:119:2077] Create Queue# [1:130:2085] targetNodeId# 1 Marker# DSP01 2025-09-25T16:18:10.695137Z node 1 :BS_PROXY DEBUG: group_sessions.cpp:83: Group# 0 Actor# [1:119:2077] Create Queue# [1:131:2086] targetNodeId# 1 Marker# DSP01 2025-09-25T16:18:10.695166Z node 1 :BS_PROXY DEBUG: group_sessions.cpp:83: Group# 0 Actor# [1:119:2077] Create Queue# [1:132:2087] targetNodeId# 1 Marker# DSP01 2025-09-25T16:18:10.695194Z node 1 :BS_PROXY DEBUG: group_sessions.cpp:83: Group# 0 Actor# [1:119:2077] Create Queue# [1:133:2088] targetNodeId# 1 Marker# DSP01 2025-09-25T16:18:10.695223Z node 1 :BS_PROXY DEBUG: group_sessions.cpp:83: Group# 0 Actor# [1:119:2077] Create Queue# [1:134:2089] targetNodeId# 1 Marker# DSP01 2025-09-25T16:18:10.695229Z node 1 :BS_PROXY INFO: dsproxy_state.cpp:31: Group# 0 SetStateEstablishingSessions Marker# DSP03 2025-09-25T16:18:10.695245Z node 1 :PIPE_CLIENT DEBUG: tablet_pipe_clie ... 66 :HIVE DEBUG: hive_impl.cpp:247: HIVE#72057594037927937 Handle ProcessBootQueue (size: 0) 2025-09-25T16:18:43.639832Z node 66 :HIVE DEBUG: hive_impl.cpp:327: HIVE#72057594037927937 ProcessBootQueue - BootQueue empty (WaitQueue: 0) 2025-09-25T16:18:43.639838Z node 66 :TABLET_EXECUTOR DEBUG: Leader{72057594037927937:2:9} Tx{22, NKikimr::NHive::TTxProcessBootQueue} hope 1 -> done Change{13, redo 0b alter 0b annex 0, ~{ } -{ }, 0 gb} 2025-09-25T16:18:43.639844Z node 66 :TABLET_EXECUTOR DEBUG: Leader{72057594037927937:2:9} Tx{22, NKikimr::NHive::TTxProcessBootQueue} release 4194304b of static, Memory{0 dyn 0} 2025-09-25T16:18:43.639849Z node 66 :HIVE DEBUG: tx__process_boot_queue.cpp:26: HIVE#72057594037927937 THive::TTxProcessBootQueue()::Complete 2025-09-25T16:18:43.639933Z node 66 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:49: TClient[72075186224037888] ::Bootstrap [66:464:2293] 2025-09-25T16:18:43.639940Z node 66 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:542: TClient[72075186224037888] lookup [66:464:2293] 2025-09-25T16:18:43.639956Z node 66 :TABLET_RESOLVER DEBUG: tablet_resolver.cpp:882: Handle TEvForward tabletId: 72075186224037888 entry.State: StNormal leader: [66:384:2237] (known problem) followers: 0 ev: {EvForward TabletID: 72075186224037888 Ev: nullptr Flags: 1:2:0} 2025-09-25T16:18:43.639965Z node 66 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 66 selfDC 1 leaderDC 1 1:2:0 local 0 localDc 0 other 0 disallowed 0 2025-09-25T16:18:43.639999Z node 66 :STATESTORAGE DEBUG: statestorage_proxy.cpp:287: ProxyRequest::HandleInit ringGroup:0 ev: {EvLookup TabletID: 72075186224037888 Cookie: 0 ProxyOptions: SigNone} 2025-09-25T16:18:43.640023Z node 66 :STATESTORAGE DEBUG: statestorage_replica.cpp:185: Replica::Handle ev: {EvReplicaLookup TabletID: 72075186224037888 Cookie: 0} 2025-09-25T16:18:43.640034Z node 66 :STATESTORAGE DEBUG: statestorage_replica.cpp:185: Replica::Handle ev: {EvReplicaLookup TabletID: 72075186224037888 Cookie: 1} 2025-09-25T16:18:43.640040Z node 66 :STATESTORAGE DEBUG: statestorage_replica.cpp:185: Replica::Handle ev: {EvReplicaLookup TabletID: 72075186224037888 Cookie: 2} 2025-09-25T16:18:43.640051Z node 66 :STATESTORAGE DEBUG: statestorage_proxy.cpp:399: ProxyRequest::HandleLookup ringGroup:0 ev: {EvReplicaInfo Status: 0 TabletID: 72075186224037888 ClusterStateGeneration: 0 ClusterStateGuid: 0 CurrentLeader: [66:384:2237] CurrentLeaderTablet: [66:400:2248] CurrentGeneration: 1 CurrentStep: 0} 2025-09-25T16:18:43.640068Z node 66 :STATESTORAGE DEBUG: statestorage_proxy.cpp:399: ProxyRequest::HandleLookup ringGroup:0 ev: {EvReplicaInfo Status: 0 TabletID: 72075186224037888 ClusterStateGeneration: 0 ClusterStateGuid: 0 CurrentLeader: [66:384:2237] CurrentLeaderTablet: [66:400:2248] CurrentGeneration: 1 CurrentStep: 0} 2025-09-25T16:18:43.640083Z node 66 :TABLET_RESOLVER DEBUG: tablet_resolver.cpp:781: ApplyEntry tabletId: 72075186224037888 leader: [66:384:2237] followers: 0 2025-09-25T16:18:43.640122Z node 66 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 66 selfDC 1 leaderDC 1 1:2:0 local 0 localDc 0 other 0 disallowed 0 2025-09-25T16:18:43.640131Z node 66 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:170: TClient[72075186224037888] forward result error, check reconnect [66:464:2293] 2025-09-25T16:18:43.640135Z node 66 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:507: TClient[72075186224037888] connect failed [66:464:2293] 2025-09-25T16:18:43.640208Z node 67 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:49: TClient[72057594037927937] ::Bootstrap [67:466:2164] 2025-09-25T16:18:43.640217Z node 67 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:542: TClient[72057594037927937] lookup [67:466:2164] 2025-09-25T16:18:43.640229Z node 67 :TABLET_RESOLVER DEBUG: tablet_resolver.cpp:882: Handle TEvForward tabletId: 72057594037927937 entry.State: StNormal leader: [66:333:2202] followers: 0 ev: {EvForward TabletID: 72057594037927937 Ev: nullptr Flags: 1:2:0} 2025-09-25T16:18:43.640238Z node 67 :TABLET_RESOLVER DEBUG: tablet_resolver.cpp:667: SelectForward node 67 selfDC 2 leaderDC 1 1:2:0 local 0 localDc 0 other 1 disallowed 0 tabletId: 72057594037927937 followers: 0 countLeader 1 allowFollowers 0 winner: [66:333:2202] 2025-09-25T16:18:43.640249Z node 67 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:148: TClient[72057594037927937] queue send [67:466:2164] 2025-09-25T16:18:43.640257Z node 67 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:420: TClient[72057594037927937] received pending shutdown [67:466:2164] 2025-09-25T16:18:43.640265Z node 67 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:193: TClient[72057594037927937] forward result remote node 66 [67:466:2164] 2025-09-25T16:18:43.640285Z node 67 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:227: TClient[72057594037927937] remote node connected [67:466:2164] 2025-09-25T16:18:43.640291Z node 67 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:687: TClient[72057594037927937]::SendEvent [67:466:2164] 2025-09-25T16:18:43.640346Z node 66 :PIPE_SERVER DEBUG: tablet_pipe_server.cpp:291: [72057594037927937] Accept Connect Originator# [67:466:2164] 2025-09-25T16:18:43.640385Z node 66 :HIVE TRACE: hive_impl.cpp:139: HIVE#72057594037927937 Handle TEvTabletPipe::TEvServerConnected([67:466:2164]) [66:467:2294] 2025-09-25T16:18:43.640411Z node 67 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:308: TClient[72057594037927937] connected with status OK role: Leader [67:466:2164] 2025-09-25T16:18:43.640416Z node 67 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:323: TClient[72057594037927937] send queued [67:466:2164] 2025-09-25T16:18:43.640420Z node 67 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:645: TClient[72057594037927937] push event to server [67:466:2164] 2025-09-25T16:18:43.640430Z node 67 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:687: TClient[72057594037927937]::SendEvent [67:466:2164] 2025-09-25T16:18:43.640436Z node 67 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:331: TClient[72057594037927937] shutdown pipe due to pending shutdown request [67:466:2164] 2025-09-25T16:18:43.640440Z node 67 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:514: TClient[72057594037927937] notify reset [67:466:2164] 2025-09-25T16:18:43.640474Z node 66 :PIPE_SERVER DEBUG: tablet_pipe_server.cpp:72: [72057594037927937] Push Sender# [67:453:2159] EventType# 268697624 2025-09-25T16:18:43.640500Z node 66 :TABLET_EXECUTOR DEBUG: Leader{72057594037927937:2:9} Tx{23, NKikimr::NHive::TTxStartTablet} queued, type NKikimr::NHive::TTxStartTablet 2025-09-25T16:18:43.640507Z node 66 :TABLET_EXECUTOR DEBUG: Leader{72057594037927937:2:9} Tx{23, NKikimr::NHive::TTxStartTablet} took 4194304b of static mem, Memory{4194304 dyn 0} 2025-09-25T16:18:43.640518Z node 66 :HIVE DEBUG: tx__start_tablet.cpp:31: HIVE#72057594037927937 THive::TTxStartTablet::Execute Tablet (72075186224037888,0) 2025-09-25T16:18:43.640566Z node 66 :HIVE DEBUG: tx__start_tablet.cpp:73: HIVE#72057594037927937 THive::TTxStartTablet::Execute, Sending TEvBootTablet(Dummy.72075186224037888.Leader.2) to node 67 storage {Version# 1 TabletID# 72075186224037888 TabletType# Dummy Channels# {0:{Channel# 0 Type# none StoragePool# def1 History# {0:{FromGeneration# 0 GroupID# 2147483648 Timestamp# 1970-01-01T00:00:00.056536Z}}, 1:{Channel# 1 Type# none StoragePool# def2 History# {0:{FromGeneration# 0 GroupID# 2147483649 Timestamp# 1970-01-01T00:00:00.056536Z}}, 2:{Channel# 2 Type# none StoragePool# def3 History# {0:{FromGeneration# 0 GroupID# 2147483650 Timestamp# 1970-01-01T00:00:00.056536Z}}} Tenant: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-09-25T16:18:43.640584Z node 66 :TABLET_EXECUTOR DEBUG: Leader{72057594037927937:2:9} Tx{23, NKikimr::NHive::TTxStartTablet} hope 1 -> done Change{13, redo 144b alter 0b annex 0, ~{ 1, 16 } -{ }, 0 gb} 2025-09-25T16:18:43.640591Z node 66 :TABLET_EXECUTOR DEBUG: Leader{72057594037927937:2:9} Tx{23, NKikimr::NHive::TTxStartTablet} release 4194304b of static, Memory{0 dyn 0} 2025-09-25T16:18:43.661258Z node 66 :BS_PROXY_PUT INFO: dsproxy_put.cpp:650: [94999ccdc54a9387] bootstrap ActorId# [66:469:2296] Group# 0 BlobCount# 1 BlobIDs# [[72057594037927937:2:9:0:0:127:0]] HandleClass# TabletLog Tactic# MinLatency RestartCounter# 0 Marker# BPP13 2025-09-25T16:18:43.661317Z node 66 :BS_PROXY_PUT DEBUG: dsproxy_strategy_restore.h:42: [94999ccdc54a9387] Id# [72057594037927937:2:9:0:0:127:0] restore disk# 0 part# 0 situation# ESituation::Unknown Marker# BPG51 2025-09-25T16:18:43.661327Z node 66 :BS_PROXY_PUT DEBUG: dsproxy_strategy_restore.h:65: [94999ccdc54a9387] restore Id# [72057594037927937:2:9:0:0:127:0] optimisticReplicas# 1 optimisticState# EBS_FULL Marker# BPG55 2025-09-25T16:18:43.661339Z node 66 :BS_PROXY_PUT DEBUG: dsproxy_strategy_base.cpp:324: [94999ccdc54a9387] partPlacement record partSituation# ESituation::Unknown to# 0 blob Id# [72057594037927937:2:9:0:0:127:1] Marker# BPG33 2025-09-25T16:18:43.661345Z node 66 :BS_PROXY_PUT DEBUG: dsproxy_strategy_base.cpp:345: [94999ccdc54a9387] Sending missing VPut part# 0 to# 0 blob Id# [72057594037927937:2:9:0:0:127:1] Marker# BPG32 2025-09-25T16:18:43.661383Z node 66 :BS_PROXY DEBUG: group_sessions.h:181: Send to queueActorId# [66:58:2081] NKikimr::TEvBlobStorage::TEvVPut# {ID# [72057594037927937:2:9:0:0:127:1] FDS# 127 HandleClass# TabletLog {MsgQoS ExtQueueId# PutTabletLog} DataSize# 0 Data# } cookie# 0 2025-09-25T16:18:43.661865Z node 66 :BS_PROXY_PUT DEBUG: dsproxy_put.cpp:264: [94999ccdc54a9387] received {EvVPutResult Status# OK ID# [72057594037927937:2:9:0:0:127:1] {MsgQoS MsgId# { SequenceId: 1 MsgId: 23 } Cost# 81000 ExtQueueId# PutTabletLog IntQueueId# IntPutLog Window# { Status# Processed ActualWindowSize# 0 MaxWindowSize# 150000000 ExpectedMsgId# { SequenceId: 1 MsgId: 24 }}}} from# [0:1:0:0:0] Marker# BPP01 2025-09-25T16:18:43.661909Z node 66 :BS_PROXY_PUT DEBUG: dsproxy_put_impl.cpp:72: [94999ccdc54a9387] Result# TEvPutResult {Id# [72057594037927937:2:9:0:0:127:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0.998955} GroupId# 0 Marker# BPP12 2025-09-25T16:18:43.661921Z node 66 :BS_PROXY_PUT INFO: dsproxy_put.cpp:490: [94999ccdc54a9387] SendReply putResult# TEvPutResult {Id# [72057594037927937:2:9:0:0:127:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0.998955} ResponsesSent# 0 PutImpl.Blobs.size# 1 Last# true Marker# BPP21 2025-09-25T16:18:43.661947Z node 66 :BS_PROXY_PUT DEBUG: {BPP72@dsproxy_put.cpp:474} Query history GroupId# 0 HandleClass# TabletLog Tactic# MinLatency History# THistory { Entries# [ TEvVPut{ TimestampMs# 0.179 sample PartId# [72057594037927937:2:9:0:0:127:1] QueryCount# 1 VDiskId# [0:1:0:0:0] NodeId# 66 } TEvVPutResult{ TimestampMs# 0.674 VDiskId# [0:1:0:0:0] NodeId# 66 Status# OK } ] } 2025-09-25T16:18:43.661982Z node 66 :TABLET_MAIN DEBUG: tablet_req_writelog.cpp:54: Put Result: TEvPutResult {Id# [72057594037927937:2:9:0:0:127:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0.998955} 2025-09-25T16:18:43.662013Z node 66 :TABLET_EXECUTOR DEBUG: Leader{72057594037927937:2:10} commited cookie 1 for step 9 2025-09-25T16:18:43.662078Z node 66 :HIVE DEBUG: tx__start_tablet.cpp:122: HIVE#72057594037927937 THive::TTxStartTablet::Complete Tablet (72075186224037888,0) SideEffects: {Notifications: 0x10080002 [67:453:2159] NKikimrLocal.TEvBootTablet Info { TabletID: 72075186224037888 Channels { Channel: 0 ChannelType: 0 History { FromGeneration: 0 GroupID: 2147483648 } StoragePool: "def1" } Channels { Channel: 1 ChannelType: 0 History { FromGeneration: 0 GroupID: 2147483649 } StoragePool: "def2" } Channels { Channel: 2 ChannelType: 0 History { FromGeneration: 0 GroupID: 2147483650 } StoragePool: "def3" } TabletType: Dummy Version: 1 TenantIdOwner: 72057594046678944 TenantIdLocalId: 1 } SuggestedGeneration: 2 BootMode: BOOT_MODE_LEADER FollowerId: 0} 2025-09-25T16:18:43.662144Z node 66 :HIVE TRACE: hive_impl.cpp:815: HIVE#72057594037927937 Handle TEvInterconnect::TEvNodeConnected (duplicate), NodeId 67 Cookie 0 |81.6%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/tx/schemeshard/ut_bsvolume/ydb-core-tx-schemeshard-ut_bsvolume |81.6%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/kqp/ut/scheme/ydb-core-kqp-ut-scheme |81.6%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/mon/ut/ydb-core-mon-ut |81.6%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tablet/ut/unittest >> TPipeTrackerTest::TestIdempotentAttachDetach [GOOD] |81.6%| [LD] {RESULT} $(B)/ydb/core/mon/ut/ydb-core-mon-ut |81.6%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_bsvolume/ydb-core-tx-schemeshard-ut_bsvolume >> TTabletCountersAggregator::IntegralPercentileAggregationRegularNoOverflowCheck [GOOD] >> UpsertLoad::ShouldWriteKqpUpsert2 |81.6%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_bsvolume/ydb-core-tx-schemeshard-ut_bsvolume |81.6%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/kqp/ut/scheme/ydb-core-kqp-ut-scheme |81.6%| [LD] {RESULT} $(B)/ydb/core/kqp/ut/scheme/ydb-core-kqp-ut-scheme ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tablet/ut/unittest >> TResourceBroker::TestChangeTaskType [GOOD] Test command err: 2025-09-25T16:18:44.319842Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:1115: TResourceBrokerActor bootstrap 2025-09-25T16:18:44.319956Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:393: Submitted new compaction0 task task-1 (1 by [1:104:2138]) priority=5 resources={200, 200} 2025-09-25T16:18:44.319965Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:679: Assigning waiting task task-1 (1 by [1:104:2138]) to queue queue_compaction0 2025-09-25T16:18:44.319973Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:637: Allocate resources {200, 200} for task task-1 (1 by [1:104:2138]) from queue queue_compaction0 2025-09-25T16:18:44.319980Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:679: Assigning in-fly task task-1 (1 by [1:104:2138]) to queue queue_compaction0 2025-09-25T16:18:44.319993Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:711: Updated planned resource usage for queue queue_compaction0 from 0.000000 to 400.000000 (insert task task-1 (1 by [1:104:2138])) 2025-09-25T16:18:44.320003Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:393: Submitted new compaction1 task task-2 (2 by [1:104:2138]) priority=5 resources={100, 100} 2025-09-25T16:18:44.320008Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:679: Assigning waiting task task-2 (2 by [1:104:2138]) to queue queue_compaction1 2025-09-25T16:18:44.320012Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:637: Allocate resources {100, 100} for task task-2 (2 by [1:104:2138]) from queue queue_compaction1 2025-09-25T16:18:44.320017Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:679: Assigning in-fly task task-2 (2 by [1:104:2138]) to queue queue_compaction1 2025-09-25T16:18:44.320022Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:711: Updated planned resource usage for queue queue_compaction1 from 0.000000 to 200.000000 (insert task task-2 (2 by [1:104:2138])) 2025-09-25T16:18:44.320029Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:393: Submitted new compaction1 task task-3 (3 by [1:104:2138]) priority=5 resources={100, 100} 2025-09-25T16:18:44.320033Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:679: Assigning waiting task task-3 (3 by [1:104:2138]) to queue queue_compaction1 2025-09-25T16:18:44.320037Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:637: Allocate resources {100, 100} for task task-3 (3 by [1:104:2138]) from queue queue_compaction1 2025-09-25T16:18:44.320041Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:679: Assigning in-fly task task-3 (3 by [1:104:2138]) to queue queue_compaction1 2025-09-25T16:18:44.320046Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:711: Updated planned resource usage for queue queue_compaction1 from 200.000000 to 400.000000 (insert task task-3 (3 by [1:104:2138])) 2025-09-25T16:18:44.320052Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:393: Submitted new compaction1 task task-4 (4 by [1:104:2138]) priority=5 resources={100, 100} 2025-09-25T16:18:44.320056Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:679: Assigning waiting task task-4 (4 by [1:104:2138]) to queue queue_compaction1 2025-09-25T16:18:44.320060Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:637: Allocate resources {100, 100} for task task-4 (4 by [1:104:2138]) from queue queue_compaction1 2025-09-25T16:18:44.320064Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:679: Assigning in-fly task task-4 (4 by [1:104:2138]) to queue queue_compaction1 2025-09-25T16:18:44.320069Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:711: Updated planned resource usage for queue queue_compaction1 from 400.000000 to 600.000000 (insert task task-4 (4 by [1:104:2138])) 2025-09-25T16:18:44.320075Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:393: Submitted new compaction0 task task-5 (5 by [1:104:2138]) priority=5 resources={250, 250} 2025-09-25T16:18:44.320079Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:679: Assigning waiting task task-5 (5 by [1:104:2138]) to queue queue_compaction0 2025-09-25T16:18:44.320083Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:619: Not enough resources to start task task-5 (5 by [1:104:2138]) 2025-09-25T16:18:44.320091Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:393: Submitted new compaction1 task task-6 (6 by [1:104:2138]) priority=5 resources={250, 250} 2025-09-25T16:18:44.320095Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:679: Assigning waiting task task-6 (6 by [1:104:2138]) to queue queue_compaction1 2025-09-25T16:18:44.320099Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:619: Not enough resources to start task task-5 (5 by [1:104:2138]) 2025-09-25T16:18:44.320103Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:608: Skip queue queue_compaction1 blocked by an earlier queue 2025-09-25T16:18:44.320109Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:393: Submitted new compaction1 task task-7 (7 by [1:104:2138]) priority=5 resources={150, 150} 2025-09-25T16:18:44.320113Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:679: Assigning waiting task task-7 (7 by [1:104:2138]) to queue queue_compaction1 2025-09-25T16:18:44.320119Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:619: Not enough resources to start task task-5 (5 by [1:104:2138]) 2025-09-25T16:18:44.320123Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:608: Skip queue queue_compaction1 blocked by an earlier queue 2025-09-25T16:18:44.320160Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:528: Finish task task-1 (1 by [1:104:2138]) (release resources {200, 200}) 2025-09-25T16:18:44.320166Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:577: Updated planned resource usage for queue queue_compaction0 from 400.000000 to 0.000000 (remove task task-1 (1 by [1:104:2138])) 2025-09-25T16:18:44.320170Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:619: Not enough resources to start task task-5 (5 by [1:104:2138]) 2025-09-25T16:18:44.320174Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:608: Skip queue queue_compaction1 blocked by an earlier queue 2025-09-25T16:18:44.320181Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:528: Finish task task-2 (2 by [1:104:2138]) (release resources {100, 100}) 2025-09-25T16:18:44.320185Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:577: Updated planned resource usage for queue queue_compaction1 from 600.000000 to 400.000000 (remove task task-2 (2 by [1:104:2138])) 2025-09-25T16:18:44.320190Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:637: Allocate resources {250, 250} for task task-5 (5 by [1:104:2138]) from queue queue_compaction0 2025-09-25T16:18:44.320194Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:679: Assigning in-fly task task-5 (5 by [1:104:2138]) to queue queue_compaction0 2025-09-25T16:18:44.320198Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:711: Updated planned resource usage for queue queue_compaction0 from 0.000000 to 475.000000 (insert task task-5 (5 by [1:104:2138])) 2025-09-25T16:18:44.320202Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:619: Not enough resources to start task task-6 (6 by [1:104:2138]) 2025-09-25T16:18:44.320225Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:528: Finish task task-3 (3 by [1:104:2138]) (release resources {100, 100}) 2025-09-25T16:18:44.320230Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:577: Updated planned resource usage for queue queue_compaction1 from 400.000000 to 200.000000 (remove task task-3 (3 by [1:104:2138])) 2025-09-25T16:18:44.320234Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:619: Not enough resources to start task task-6 (6 by [1:104:2138]) 2025-09-25T16:18:44.320239Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:528: Finish task task-4 (4 by [1:104:2138]) (release resources {100, 100}) 2025-09-25T16:18:44.320243Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:577: Updated planned resource usage for queue queue_compaction1 from 200.000000 to 0.000000 (remove task task-4 (4 by [1:104:2138])) 2025-09-25T16:18:44.320247Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:637: Allocate resources {250, 250} for task task-6 (6 by [1:104:2138]) from queue queue_compaction1 2025-09-25T16:18:44.320252Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:679: Assigning in-fly task task-6 (6 by [1:104:2138]) to queue queue_compaction1 2025-09-25T16:18:44.320257Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:711: Updated planned resource usage for queue queue_compaction1 from 0.000000 to 425.000000 (insert task task-6 (6 by [1:104:2138])) 2025-09-25T16:18:44.320261Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:619: Not enough resources to start task task-7 (7 by [1:104:2138]) 2025-09-25T16:18:44.320283Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:528: Finish task task-5 (5 by [1:104:2138]) (release resources {250, 250}) 2025-09-25T16:18:44.320288Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:577: Updated planned resource usage for queue queue_compaction0 from 475.000000 to 0.000000 (remove task task-5 (5 by [1:104:2138])) 2025-09-25T16:18:44.320292Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:637: Allocate resources {150, 150} for task task-7 (7 by [1:104:2138]) from queue queue_compaction1 2025-09-25T16:18:44.320296Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:679: Assigning in-fly task task-7 (7 by [1:104:2138]) to queue queue_compaction1 2025-09-25T16:18:44.320300Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:711: Updated planned resource usage for queue queue_compaction1 from 425.000000 to 680.000000 (insert task task-7 (7 by [1:104:2138])) 2025-09-25T16:18:44.320306Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:528: Finish task task-6 (6 by [1:104:2138]) (release resources {250, 250}) 2025-09-25T16:18:44.320310Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:577: Updated planned resource usage for queue queue_compaction1 from 680.000000 to 255.000000 (remove task task-6 (6 by [1:104:2138])) 2025-09-25T16:18:44.320332Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:528: Finish task task-7 (7 by [1:104:2138]) (release resources {150, 150}) 2025-09-25T16:18:44.320337Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:577: Updated planned resource usage for queue queue_compaction1 from 255.000000 to 0.000000 (remove task task-7 (7 by [1:104:2138])) 2025-09-25T16:18:44.320344Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:393: Submitted new compaction0 task task-1000 (1000 by [1:104:2138]) priority=5 resources={500, 500} 2025-09-25T16:18:44.320348Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:679: Assigning waiting task task-1000 (1000 by [1:104:2138]) to queue queue_compaction0 2025-09-25T16:18:44.320352Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:637: Allocate resources {500, 500} for task task-1000 (1000 by [1:104:2138]) from queue queue_compaction0 2025-09-25T16:18:44.320356Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:679: Assigning in-fly task task-1000 (1000 by [1:104:2138]) to queue queue_compaction0 2025-09-25T16:18:44.320361Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:711: Updated planned resource usage for queue queue_compaction0 from 0.000000 to 900.000000 (insert task task-1000 (1000 by [1:104:2138])) 2025-09-25T16:18:44.320368Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:393: Submitted new unknown task task-1 (1 by [1:104:2138]) priority=5 resources={1, 1} 2025-09-25T16:18:44.320372Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:679: Assigning waiting task task-1 (1 by [1:104:2138]) to queue queue_default 2025-09-25T16:18:44.320377Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:619: Not enough resources to start task task-1 (1 by [1:104:2138]) 2025-09-25T16:18:44.320382Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:393: Submitted new unknown task task-2 (2 by [1:104:2138]) priority=5 resources={1, 1} 2025-09-25T16:18:44.320386Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:679: Assigning waiting task task- ... task-9 (9 by [1:104:2138]) priority=5 resources={1, 1} 2025-09-25T16:18:44.320484Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:679: Assigning waiting task task-9 (9 by [1:104:2138]) to queue queue_default 2025-09-25T16:18:44.320487Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:619: Not enough resources to start task task-1 (1 by [1:104:2138]) 2025-09-25T16:18:44.320495Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:393: Submitted new unknown task task-10 (10 by [1:104:2138]) priority=5 resources={1, 1} 2025-09-25T16:18:44.320499Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:679: Assigning waiting task task-10 (10 by [1:104:2138]) to queue queue_default 2025-09-25T16:18:44.320503Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:619: Not enough resources to start task task-1 (1 by [1:104:2138]) 2025-09-25T16:18:44.320508Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:528: Finish task task-1000 (1000 by [1:104:2138]) (release resources {500, 500}) 2025-09-25T16:18:44.320514Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:582: Updated real resource usage for queue queue_compaction0 from 0.000000 to 1500.000000 2025-09-25T16:18:44.320519Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:637: Allocate resources {1, 1} for task task-1 (1 by [1:104:2138]) from queue queue_default 2025-09-25T16:18:44.320524Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:679: Assigning in-fly task task-1 (1 by [1:104:2138]) to queue queue_default 2025-09-25T16:18:44.320528Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:711: Updated planned resource usage for queue queue_default from 0.000000 to 2.000000 (insert task task-1 (1 by [1:104:2138])) 2025-09-25T16:18:44.320532Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:637: Allocate resources {1, 1} for task task-2 (2 by [1:104:2138]) from queue queue_default 2025-09-25T16:18:44.320536Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:679: Assigning in-fly task task-2 (2 by [1:104:2138]) to queue queue_default 2025-09-25T16:18:44.320540Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:711: Updated planned resource usage for queue queue_default from 2.000000 to 4.000000 (insert task task-2 (2 by [1:104:2138])) 2025-09-25T16:18:44.320544Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:637: Allocate resources {1, 1} for task task-3 (3 by [1:104:2138]) from queue queue_default 2025-09-25T16:18:44.320548Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:679: Assigning in-fly task task-3 (3 by [1:104:2138]) to queue queue_default 2025-09-25T16:18:44.320552Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:711: Updated planned resource usage for queue queue_default from 4.000000 to 6.000000 (insert task task-3 (3 by [1:104:2138])) 2025-09-25T16:18:44.320556Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:637: Allocate resources {1, 1} for task task-4 (4 by [1:104:2138]) from queue queue_default 2025-09-25T16:18:44.320559Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:679: Assigning in-fly task task-4 (4 by [1:104:2138]) to queue queue_default 2025-09-25T16:18:44.320564Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:711: Updated planned resource usage for queue queue_default from 6.000000 to 8.000000 (insert task task-4 (4 by [1:104:2138])) 2025-09-25T16:18:44.320568Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:637: Allocate resources {1, 1} for task task-5 (5 by [1:104:2138]) from queue queue_default 2025-09-25T16:18:44.320571Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:679: Assigning in-fly task task-5 (5 by [1:104:2138]) to queue queue_default 2025-09-25T16:18:44.320576Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:711: Updated planned resource usage for queue queue_default from 8.000000 to 10.000000 (insert task task-5 (5 by [1:104:2138])) 2025-09-25T16:18:44.320580Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:637: Allocate resources {1, 1} for task task-6 (6 by [1:104:2138]) from queue queue_default 2025-09-25T16:18:44.320584Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:679: Assigning in-fly task task-6 (6 by [1:104:2138]) to queue queue_default 2025-09-25T16:18:44.320589Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:711: Updated planned resource usage for queue queue_default from 10.000000 to 12.000000 (insert task task-6 (6 by [1:104:2138])) 2025-09-25T16:18:44.320593Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:637: Allocate resources {1, 1} for task task-7 (7 by [1:104:2138]) from queue queue_default 2025-09-25T16:18:44.320597Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:679: Assigning in-fly task task-7 (7 by [1:104:2138]) to queue queue_default 2025-09-25T16:18:44.320601Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:711: Updated planned resource usage for queue queue_default from 12.000000 to 14.000000 (insert task task-7 (7 by [1:104:2138])) 2025-09-25T16:18:44.320605Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:637: Allocate resources {1, 1} for task task-8 (8 by [1:104:2138]) from queue queue_default 2025-09-25T16:18:44.320610Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:679: Assigning in-fly task task-8 (8 by [1:104:2138]) to queue queue_default 2025-09-25T16:18:44.320616Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:711: Updated planned resource usage for queue queue_default from 14.000000 to 16.000000 (insert task task-8 (8 by [1:104:2138])) 2025-09-25T16:18:44.320621Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:637: Allocate resources {1, 1} for task task-9 (9 by [1:104:2138]) from queue queue_default 2025-09-25T16:18:44.320625Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:679: Assigning in-fly task task-9 (9 by [1:104:2138]) to queue queue_default 2025-09-25T16:18:44.320629Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:711: Updated planned resource usage for queue queue_default from 16.000000 to 18.000000 (insert task task-9 (9 by [1:104:2138])) 2025-09-25T16:18:44.320634Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:637: Allocate resources {1, 1} for task task-10 (10 by [1:104:2138]) from queue queue_default 2025-09-25T16:18:44.320637Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:679: Assigning in-fly task task-10 (10 by [1:104:2138]) to queue queue_default 2025-09-25T16:18:44.320642Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:711: Updated planned resource usage for queue queue_default from 18.000000 to 20.000000 (insert task task-10 (10 by [1:104:2138])) 2025-09-25T16:18:44.320655Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:528: Finish task task-1 (1 by [1:104:2138]) (release resources {1, 1}) 2025-09-25T16:18:44.320659Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:582: Updated real resource usage for queue queue_default from 0.000000 to 20.000000 2025-09-25T16:18:44.320665Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:528: Finish task task-2 (2 by [1:104:2138]) (release resources {1, 1}) 2025-09-25T16:18:44.320670Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:528: Finish task task-3 (3 by [1:104:2138]) (release resources {1, 1}) 2025-09-25T16:18:44.320675Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:528: Finish task task-4 (4 by [1:104:2138]) (release resources {1, 1}) 2025-09-25T16:18:44.320680Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:528: Finish task task-5 (5 by [1:104:2138]) (release resources {1, 1}) 2025-09-25T16:18:44.320685Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:528: Finish task task-6 (6 by [1:104:2138]) (release resources {1, 1}) 2025-09-25T16:18:44.320690Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:528: Finish task task-7 (7 by [1:104:2138]) (release resources {1, 1}) 2025-09-25T16:18:44.320696Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:528: Finish task task-8 (8 by [1:104:2138]) (release resources {1, 1}) 2025-09-25T16:18:44.320701Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:528: Finish task task-9 (9 by [1:104:2138]) (release resources {1, 1}) 2025-09-25T16:18:44.320706Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:528: Finish task task-10 (10 by [1:104:2138]) (release resources {1, 1}) 2025-09-25T16:18:44.582551Z node 2 :RESOURCE_BROKER DEBUG: resource_broker.cpp:1115: TResourceBrokerActor bootstrap 2025-09-25T16:18:44.582655Z node 2 :RESOURCE_BROKER DEBUG: resource_broker.cpp:393: Submitted new compaction0 task task-1 (1 by [2:105:2138]) priority=5 resources={400, 400} 2025-09-25T16:18:44.582665Z node 2 :RESOURCE_BROKER DEBUG: resource_broker.cpp:679: Assigning waiting task task-1 (1 by [2:105:2138]) to queue queue_compaction0 2025-09-25T16:18:44.582674Z node 2 :RESOURCE_BROKER DEBUG: resource_broker.cpp:637: Allocate resources {400, 400} for task task-1 (1 by [2:105:2138]) from queue queue_compaction0 2025-09-25T16:18:44.582680Z node 2 :RESOURCE_BROKER DEBUG: resource_broker.cpp:679: Assigning in-fly task task-1 (1 by [2:105:2138]) to queue queue_compaction0 2025-09-25T16:18:44.582693Z node 2 :RESOURCE_BROKER DEBUG: resource_broker.cpp:711: Updated planned resource usage for queue queue_compaction0 from 0.000000 to 800.000000 (insert task task-1 (1 by [2:105:2138])) 2025-09-25T16:18:44.582703Z node 2 :RESOURCE_BROKER DEBUG: resource_broker.cpp:393: Submitted new compaction0 task task-2 (2 by [2:105:2138]) priority=5 resources={400, 400} 2025-09-25T16:18:44.582707Z node 2 :RESOURCE_BROKER DEBUG: resource_broker.cpp:679: Assigning waiting task task-2 (2 by [2:105:2138]) to queue queue_compaction0 2025-09-25T16:18:44.582713Z node 2 :RESOURCE_BROKER DEBUG: resource_broker.cpp:619: Not enough resources to start task task-2 (2 by [2:105:2138]) 2025-09-25T16:18:44.582720Z node 2 :RESOURCE_BROKER DEBUG: resource_broker.cpp:393: Submitted new compaction0 task task-3 (3 by [2:105:2138]) priority=5 resources={400, 400} 2025-09-25T16:18:44.582724Z node 2 :RESOURCE_BROKER DEBUG: resource_broker.cpp:679: Assigning waiting task task-3 (3 by [2:105:2138]) to queue queue_compaction0 2025-09-25T16:18:44.582729Z node 2 :RESOURCE_BROKER DEBUG: resource_broker.cpp:619: Not enough resources to start task task-2 (2 by [2:105:2138]) 2025-09-25T16:18:44.582743Z node 2 :RESOURCE_BROKER DEBUG: resource_broker.cpp:441: Update task task-3 (3 by [2:105:2138]) (priority=5 type=compaction1 resources={400, 400} resubmit=0) 2025-09-25T16:18:44.582748Z node 2 :RESOURCE_BROKER DEBUG: resource_broker.cpp:679: Assigning waiting task task-3 (3 by [2:105:2138]) to queue queue_compaction1 2025-09-25T16:18:44.582753Z node 2 :RESOURCE_BROKER DEBUG: resource_broker.cpp:619: Not enough resources to start task task-3 (3 by [2:105:2138]) 2025-09-25T16:18:44.582757Z node 2 :RESOURCE_BROKER DEBUG: resource_broker.cpp:608: Skip queue queue_compaction0 blocked by an earlier queue 2025-09-25T16:18:44.582765Z node 2 :RESOURCE_BROKER DEBUG: resource_broker.cpp:528: Finish task task-1 (1 by [2:105:2138]) (release resources {400, 400}) 2025-09-25T16:18:44.582772Z node 2 :RESOURCE_BROKER DEBUG: resource_broker.cpp:577: Updated planned resource usage for queue queue_compaction0 from 800.000000 to 80.000000 (remove task task-1 (1 by [2:105:2138])) 2025-09-25T16:18:44.582778Z node 2 :RESOURCE_BROKER DEBUG: resource_broker.cpp:582: Updated real resource usage for queue queue_compaction0 from 0.000000 to 80.000000 2025-09-25T16:18:44.582784Z node 2 :RESOURCE_BROKER DEBUG: resource_broker.cpp:637: Allocate resources {400, 400} for task task-3 (3 by [2:105:2138]) from queue queue_compaction1 2025-09-25T16:18:44.582788Z node 2 :RESOURCE_BROKER DEBUG: resource_broker.cpp:679: Assigning in-fly task task-3 (3 by [2:105:2138]) to queue queue_compaction1 2025-09-25T16:18:44.582793Z node 2 :RESOURCE_BROKER DEBUG: resource_broker.cpp:711: Updated planned resource usage for queue queue_compaction1 from 0.000000 to 800.000000 (insert task task-3 (3 by [2:105:2138])) 2025-09-25T16:18:44.582798Z node 2 :RESOURCE_BROKER DEBUG: resource_broker.cpp:619: Not enough resources to start task task-2 (2 by [2:105:2138]) |81.6%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tablet/ut/unittest >> TPipeTrackerTest::TestAddTwoTablets [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tablet/ut/unittest >> TTabletCountersAggregator::ColumnShardCounters [GOOD] Test command err: 2025-09-25T16:18:44.426002Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:1115: TResourceBrokerActor bootstrap 2025-09-25T16:18:44.426131Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:393: Submitted new compaction0 task task-1 (1 by [1:104:2138]) priority=0 resources={100, 200} 2025-09-25T16:18:44.426142Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:679: Assigning waiting task task-1 (1 by [1:104:2138]) to queue queue_compaction0 2025-09-25T16:18:44.426151Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:637: Allocate resources {100, 200} for task task-1 (1 by [1:104:2138]) from queue queue_compaction0 2025-09-25T16:18:44.426157Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:679: Assigning in-fly task task-1 (1 by [1:104:2138]) to queue queue_compaction0 2025-09-25T16:18:44.426169Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:711: Updated planned resource usage for queue queue_compaction0 from 0.000000 to 400.000000 (insert task task-1 (1 by [1:104:2138])) 2025-09-25T16:18:44.426187Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:393: Submitted new compaction0 task task-2 (2 by [1:104:2138]) priority=0 resources={100, 100} 2025-09-25T16:18:44.426192Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:679: Assigning waiting task task-2 (2 by [1:104:2138]) to queue queue_compaction0 2025-09-25T16:18:44.426196Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:637: Allocate resources {100, 100} for task task-2 (2 by [1:104:2138]) from queue queue_compaction0 2025-09-25T16:18:44.426201Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:679: Assigning in-fly task task-2 (2 by [1:104:2138]) to queue queue_compaction0 2025-09-25T16:18:44.426206Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:711: Updated planned resource usage for queue queue_compaction0 from 400.000000 to 600.000000 (insert task task-2 (2 by [1:104:2138])) 2025-09-25T16:18:44.426219Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:441: Update task task-1 (1 by [1:104:2138]) (priority=0 type=compaction0 resources={200, 300} resubmit=0) 2025-09-25T16:18:44.426224Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:679: Assigning in-fly task task-1 (1 by [1:104:2138]) to queue queue_compaction0 2025-09-25T16:18:44.426228Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:711: Updated planned resource usage for queue queue_compaction0 from 200.000000 to 800.000000 (insert task task-1 (1 by [1:104:2138])) 2025-09-25T16:18:44.426233Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:528: Finish task task-2 (2 by [1:104:2138]) (release resources {100, 100}) 2025-09-25T16:18:44.426239Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:577: Updated planned resource usage for queue queue_compaction0 from 800.000000 to 600.000000 (remove task task-2 (2 by [1:104:2138])) 2025-09-25T16:18:44.426250Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:393: Submitted new compaction1 task task-3 (3 by [1:104:2138]) priority=0 resources={10, 20} 2025-09-25T16:18:44.426254Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:679: Assigning waiting task task-3 (3 by [1:104:2138]) to queue queue_compaction1 2025-09-25T16:18:44.426259Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:637: Allocate resources {10, 20} for task task-3 (3 by [1:104:2138]) from queue queue_compaction1 2025-09-25T16:18:44.426263Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:679: Assigning in-fly task task-3 (3 by [1:104:2138]) to queue queue_compaction1 2025-09-25T16:18:44.426268Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:711: Updated planned resource usage for queue queue_compaction1 from 0.000000 to 40.000000 (insert task task-3 (3 by [1:104:2138])) ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tablet/ut/unittest >> TFlatMetrics::DecayingAverageAvg [GOOD] Test command err: ... waiting for all block results ... passing block result OK for [1:106:2139] ... blocking block result NO_GROUP for [1:108:2139] ... blocking block result NO_GROUP for [1:107:2139] ... blocking block result NO_GROUP for [1:109:2139] >> TTabletPipeTest::TestTwoNodes [GOOD] >> TReplicationTests::CreateDropRecreate [GOOD] >> TReplicationTests::CreateWithoutCredentials >> TTabletPipeTest::TestConnectReject [GOOD] >> UpsertLoad::ShouldWriteKqpUpsert >> UpsertLoad::ShouldWriteKqpUpsertKeyFrom >> THiveTest::TestCreateTabletChangeToExternal [GOOD] >> THiveTest::TestExternalBoot >> UpsertLoad::ShouldWriteDataBulkUpsert ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tablet/ut/unittest >> TTabletPipeTest::TestPipeConnectLoopLeaderDownWithoutRetries [GOOD] Test command err: ... waiting for connect1 ... waiting for connect2 ... waiting for connect3 ... waiting for connect4 ... waiting for connect5 ... waiting for connect6 ... waiting for connect7 ... waiting for connect8 ... waiting for connect9 ... waiting for connect10 ... waiting for boot1 ... waiting for connect1 ... waiting for client destroyed notification ... waiting for connect2 ... waiting for connect3 ... waiting for connect4 ... waiting for connect5 ... waiting for connect6 ... waiting for connect7 ... waiting for connect8 ... waiting for connect9 ... waiting for connect10 ... waiting for connect11 >> TSequenceReboots::CreateSequencesWithIndexedTable [GOOD] >> TReplicationTests::Alter [GOOD] >> TReplicationTests::CannotAddReplicationConfig |81.6%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tablet/ut/unittest >> TabletState::NormalLifecycle [GOOD] >> THiveTest::TestExternalBoot [GOOD] >> THiveTest::TestExternalBootCounters >> TReplicationTests::CreateWithoutCredentials [GOOD] >> TReplicationTests::SecureMode >> TabletState::ImplicitUnsubscribeOnDisconnect [GOOD] >> TPDiskRaces::OwnerKilledWhileReadingLogAndThenKillLastOwner [GOOD] >> TPDiskTest::CommitDeleteChunks [GOOD] >> TPDiskTest::DeviceHaltTooLong >> TReplicationTests::CannotAddReplicationConfig [GOOD] >> TReplicationTests::CannotSetAsyncReplicaAttribute |81.6%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tablet/ut/unittest >> TTabletCountersAggregator::IntegralPercentileAggregationRegularNoOverflowCheck [GOOD] |81.6%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tablet/ut/unittest >> TTabletPipeTest::TestTwoNodes [GOOD] >> TReplicationTests::SecureMode [GOOD] >> TReplicationTests::Describe >> TFlatMetrics::TimeSeriesAvg16x60 [GOOD] >> TFlatMetrics::TimeSeriesAvg16Signed [GOOD] |81.6%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tablet/ut/unittest >> TTabletPipeTest::TestConnectReject [GOOD] >> TFlatMetrics::TimeSeriesAvg16 [GOOD] >> TFlatMetrics::TimeSeriesAVG [GOOD] >> TReplicationTests::CannotSetAsyncReplicaAttribute [GOOD] >> TReplicationTests::AlterReplicatedTable >> TTxAllocatorClientTest::ZeroRange [GOOD] >> TBsVDiskManyPutGetCheckSize::ManyPutGetCheckSize [GOOD] >> UpsertLoad::ShouldWriteDataBulkUpsertLocalMkql [GOOD] >> UpsertLoad::ShouldWriteDataBulkUpsertLocalMkql2 >> TResourceBrokerInstant::Test >> TReplicationTests::Describe [GOOD] >> TReplicationTests::CreateReplicatedTable >> ReadLoad::ShouldReadIterate [GOOD] |81.6%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tablet/ut/unittest >> TabletState::ImplicitUnsubscribeOnDisconnect [GOOD] |81.6%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/tx/schemeshard/ut_backup/ydb-core-tx-schemeshard-ut_backup >> ReadLoad::ShouldReadIterateMoreThanRows >> UpsertLoad::ShouldWriteDataBulkUpsertLocalMkqlKeyFrom [GOOD] |81.6%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_backup/ydb-core-tx-schemeshard-ut_backup |81.6%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_backup/ydb-core-tx-schemeshard-ut_backup |81.6%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tablet/ut/unittest >> TFlatMetrics::TimeSeriesAvg16Signed [GOOD] >> TReplicationTests::AlterReplicatedTable [GOOD] >> TReplicationTests::AlterReplicatedIndexTable >> TResourceBrokerInstant::Test [GOOD] >> THiveTest::TestHiveBalancerHighUsage [GOOD] >> TResourceBrokerInstant::TestErrors >> THiveTest::TestHiveBalancerHighUsageAndColumnShards |81.6%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tablet/ut/unittest >> TFlatMetrics::TimeSeriesAVG [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/tx_allocator_client/ut/unittest >> TTxAllocatorClientTest::ZeroRange [GOOD] Test command err: 2025-09-25T16:18:37.666132Z node 1 :TABLET_MAIN DEBUG: tablet_sys.cpp:2146: Tablet: 72057594046447617 LockedInitializationPath Marker# TSYS32 2025-09-25T16:18:37.666228Z node 1 :TABLET_MAIN DEBUG: tablet_sys.cpp:979: Tablet: 72057594046447617 HandleFindLatestLogEntry, NODATA Promote Marker# TSYS19 2025-09-25T16:18:37.666359Z node 1 :TABLET_MAIN DEBUG: tablet_sys.cpp:233: Tablet: 72057594046447617 TTablet::WriteZeroEntry. logid# [72057594046447617:2:0:0:0:0:0] Marker# TSYS01 2025-09-25T16:18:37.666689Z node 1 :TABLET_MAIN DEBUG: tablet_req_writelog.cpp:54: Put Result: TEvPutResult {Id# [72057594046447617:2:0:0:0:20:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-09-25T16:18:37.666776Z node 1 :TX_ALLOCATOR DEBUG: txallocator_impl.cpp:17: tablet# 72057594046447617 OnActivateExecutor 2025-09-25T16:18:37.669261Z node 1 :TABLET_MAIN DEBUG: tablet_req_writelog.cpp:54: Put Result: TEvPutResult {Id# [72057594046447617:2:1:1:28672:35:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-09-25T16:18:37.669295Z node 1 :TABLET_MAIN DEBUG: tablet_req_writelog.cpp:54: Put Result: TEvPutResult {Id# [72057594046447617:2:1:0:0:42:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-09-25T16:18:37.669316Z node 1 :TABLET_MAIN DEBUG: tablet_sys.cpp:1466: Tablet: 72057594046447617 GcCollect 0 channel, tablet:gen:step => 2:0 Marker# TSYS28 2025-09-25T16:18:37.669344Z node 1 :TABLET_MAIN DEBUG: tablet_req_writelog.cpp:54: Put Result: TEvPutResult {Id# [72057594046447617:2:2:1:8192:71:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-09-25T16:18:37.669357Z node 1 :TABLET_MAIN DEBUG: tablet_req_writelog.cpp:54: Put Result: TEvPutResult {Id# [72057594046447617:2:2:0:0:71:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-09-25T16:18:37.669383Z node 1 :TX_ALLOCATOR DEBUG: txallocator__scheme.cpp:22: tablet# 72057594046447617 TTxSchema Complete 2025-09-25T16:18:37.669412Z node 1 :TABLET_MAIN INFO: tablet_sys.cpp:1077: Tablet: 72057594046447617 Active! Generation: 2, Type: TxAllocator started in 0msec Marker# TSYS24 2025-09-25T16:18:37.669593Z node 1 :TX_ALLOCATOR DEBUG: txallocator_impl.cpp:60: tablet# 72057594046447617 HANDLE TEvAllocate Sender# [1:73:2108] requested range size#5000 2025-09-25T16:18:37.669726Z node 1 :TABLET_MAIN DEBUG: tablet_req_writelog.cpp:54: Put Result: TEvPutResult {Id# [72057594046447617:2:3:1:24576:70:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-09-25T16:18:37.669736Z node 1 :TABLET_MAIN DEBUG: tablet_req_writelog.cpp:54: Put Result: TEvPutResult {Id# [72057594046447617:2:3:0:0:69:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-09-25T16:18:37.669751Z node 1 :TX_ALLOCATOR DEBUG: txallocator__reserve.cpp:56: tablet# 72057594046447617 TTxReserve Complete Successed# 1 Reserved from# 0 Reserved to# 5000 2025-09-25T16:18:37.669757Z node 1 :TX_ALLOCATOR DEBUG: txallocator_impl.cpp:70: tablet# 72057594046447617 Send to Sender# [1:73:2108] TEvAllocateResult from# 0 to# 5000 >> UpsertLoad::ShouldWriteDataBulkUpsert [GOOD] >> UpsertLoad::ShouldWriteDataBulkUpsert2 >> UpsertLoad::ShouldCreateTable [GOOD] >> UpsertLoad::ShouldDropCreateTable >> TReplicationTests::AlterReplicatedIndexTable [GOOD] >> TReplicationTests::CopyReplicatedTable >> UpsertLoad::ShouldWriteDataBulkUpsertBatch [GOOD] >> THiveTest::TestExternalBootCounters [GOOD] >> UpsertLoad::ShouldWriteDataBulkUpsertKeyFrom |81.6%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/datashard/ut_object_storage_listing/unittest >> THiveTest::TestBridgeBalance [GOOD] >> TResourceBrokerInstant::TestErrors [GOOD] >> TReplicationTests::CreateReplicatedTable [GOOD] >> TReplicationTests::DropReplicationWithInvalidCredentials ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/ut_vdisk/unittest >> TBsVDiskManyPutGetCheckSize::ManyPutGetCheckSize [GOOD] Test command err: 2025-09-25T16:18:45.756571Z :BS_VDISK_GET CRIT: query_base.h:102: PDiskId# 1 VDISK[0:_:0:0:0]: (0) TEvVGetResult: Result message is too large; size# 67108001 orig# {ExtrQuery# [5000:1:0:0:0:100000:1] sh# 257 sz# 99743 c# 0}{ExtrQuery# [5000:1:1:0:0:100000:1] sh# 257 sz# 99743 c# 1}{ExtrQuery# [5000:1:2:0:0:100000:1] sh# 257 sz# 99743 c# 2}{ExtrQuery# [5000:1:3:0:0:100000:1] sh# 257 sz# 99743 c# 3}{ExtrQuery# [5000:1:4:0:0:100000:1] sh# 257 sz# 99743 c# 4}{ExtrQuery# [5000:1:5:0:0:100000:1] sh# 257 sz# 99743 c# 5}{ExtrQuery# [5000:1:6:0:0:100000:1] sh# 257 sz# 99743 c# 6}{ExtrQuery# [5000:1:7:0:0:100000:1] sh# 257 sz# 99743 c# 7}{ExtrQuery# [5000:1:8:0:0:100000:1] sh# 257 sz# 99743 c# 8}{ExtrQuery# [5000:1:9:0:0:100000:1] sh# 257 sz# 99743 c# 9}{ExtrQuery# [5000:1:10:0:0:100000:1] sh# 257 sz# 99743 c# 10}{ExtrQuery# [5000:1:11:0:0:100000:1] sh# 257 sz# 99743 c# 11}{ExtrQuery# [5000:1:12:0:0:100000:1] sh# 257 sz# 99743 c# 12}{ExtrQuery# [5000:1:13:0:0:100000:1] sh# 257 sz# 99743 c# 13}{ExtrQuery# [5000:1:14:0:0:100000:1] sh# 257 sz# 99743 c# 14}{ExtrQuery# [5000:1:15:0:0:100000:1] sh# 257 sz# 99743 c# 15}{ExtrQuery# [5000:1:16:0:0:100000:1] sh# 257 sz# 99743 c# 16}{ExtrQuery# [5000:1:17:0:0:100000:1] sh# 257 sz# 99743 c# 17}{ExtrQuery# [5000:1:18:0:0:100000:1] sh# 257 sz# 99743 c# 18}{ExtrQuery# [5000:1:19:0:0:100000:1] sh# 257 sz# 99743 c# 19}{ExtrQuery# [5000:1:20:0:0:100000:1] sh# 257 sz# 99743 c# 20}{ExtrQuery# [5000:1:21:0:0:100000:1] sh# 257 sz# 99743 c# 21}{ExtrQuery# [5000:1:22:0:0:100000:1] sh# 257 sz# 99743 c# 22}{ExtrQuery# [5000:1:23:0:0:100000:1] sh# 257 sz# 99743 c# 23}{ExtrQuery# [5000:1:24:0:0:100000:1] sh# 257 sz# 99743 c# 24}{ExtrQuery# [5000:1:25:0:0:100000:1] sh# 257 sz# 99743 c# 25}{ExtrQuery# [5000:1:26:0:0:100000:1] sh# 257 sz# 99743 c# 26}{ExtrQuery# [5000:1:27:0:0:100000:1] sh# 257 sz# 99743 c# 27}{ExtrQuery# [5000:1:28:0:0:100000:1] sh# 257 sz# 99743 c# 28}{ExtrQuery# [5000:1:29:0:0:100000:1] sh# 257 sz# 99743 c# 29}{ExtrQuery# [5000:1:30:0:0:100000:1] sh# 257 sz# 99743 c# 30}{ExtrQuery# [5000:1:31:0:0:100000:1] sh# 257 sz# 99743 c# 31}{ExtrQuery# [5000:1:32:0:0:100000:1] sh# 257 sz# 99743 c# 32}{ExtrQuery# [5000:1:33:0:0:100000:1] sh# 257 sz# 99743 c# 33}{ExtrQuery# [5000:1:34:0:0:100000:1] sh# 257 sz# 99743 c# 34}{ExtrQuery# [5000:1:35:0:0:100000:1] sh# 257 sz# 99743 c# 35}{ExtrQuery# [5000:1:36:0:0:100000:1] sh# 257 sz# 99743 c# 36}{ExtrQuery# [5000:1:37:0:0:100000:1] sh# 257 sz# 99743 c# 37}{ExtrQuery# [5000:1:38:0:0:100000:1] sh# 257 sz# 99743 c# 38}{ExtrQuery# [5000:1:39:0:0:100000:1] sh# 257 sz# 99743 c# 39}{ExtrQuery# [5000:1:40:0:0:100000:1] sh# 257 sz# 99743 c# 40}{ExtrQuery# [5000:1:41:0:0:100000:1] sh# 257 sz# 99743 c# 41}{ExtrQuery# [5000:1:42:0:0:100000:1] sh# 257 sz# 99743 c# 42}{ExtrQuery# [5000:1:43:0:0:100000:1] sh# 257 sz# 99743 c# 43}{ExtrQuery# [5000:1:44:0:0:100000:1] sh# 257 sz# 99743 c# 44}{ExtrQuery# [5000:1:45:0:0:100000:1] sh# 257 sz# 99743 c# 45}{ExtrQuery# [5000:1:46:0:0:100000:1] sh# 257 sz# 99743 c# 46}{ExtrQuery# [5000:1:47:0:0:100000:1] sh# 257 sz# 99743 c# 47}{ExtrQuery# [5000:1:48:0:0:100000:1] sh# 257 sz# 99743 c# 48}{ExtrQuery# [5000:1:49:0:0:100000:1] sh# 257 sz# 99743 c# 49}{ExtrQuery# [5000:1:50:0:0:100000:1] sh# 257 sz# 99743 c# 50}{ExtrQuery# [5000:1:51:0:0:100000:1] sh# 257 sz# 99743 c# 51}{ExtrQuery# [5000:1:52:0:0:100000:1] sh# 257 sz# 99743 c# 52}{ExtrQuery# [5000:1:53:0:0:100000:1] sh# 257 sz# 99743 c# 53}{ExtrQuery# [5000:1:54:0:0:100000:1] sh# 257 sz# 99743 c# 54}{ExtrQuery# [5000:1:55:0:0:100000:1] sh# 257 sz# 99743 c# 55}{ExtrQuery# [5000:1:56:0:0:100000:1] sh# 257 sz# 99743 c# 56}{ExtrQuery# [5000:1:57:0:0:100000:1] sh# 257 sz# 99743 c# 57}{ExtrQuery# [5000:1:58:0:0:100000:1] sh# 257 sz# 99743 c# 58}{ExtrQuery# [5000:1:59:0:0:100000:1] sh# 257 sz# 99743 c# 59}{ExtrQuery# [5000:1:60:0:0:100000:1] sh# 257 sz# 99743 c# 60}{ExtrQuery# [5000:1:61:0:0:100000:1] sh# 257 sz# 99743 c# 61}{ExtrQuery# [5000:1:62:0:0:100000:1] sh# 257 sz# 99743 c# 62}{ExtrQuery# [5000:1:63:0:0:100000:1] sh# 257 sz# 99743 c# 63}{ExtrQuery# [5000:1:64:0:0:100000:1] sh# 257 sz# 99743 c# 64}{ExtrQuery# [5000:1:65:0:0:100000:1] sh# 257 sz# 99743 c# 65}{ExtrQuery# [5000:1:66:0:0:100000:1] sh# 257 sz# 99743 c# 66}{ExtrQuery# [5000:1:67:0:0:100000:1] sh# 257 sz# 99743 c# 67}{ExtrQuery# [5000:1:68:0:0:100000:1] sh# 257 sz# 99743 c# 68}{ExtrQuery# [5000:1:69:0:0:100000:1] sh# 257 sz# 99743 c# 69}{ExtrQuery# [5000:1:70:0:0:100000:1] sh# 257 sz# 99743 c# 70}{ExtrQuery# [5000:1:71:0:0:100000:1] sh# 257 sz# 99743 c# 71}{ExtrQuery# [5000:1:72:0:0:100000:1] sh# 257 sz# 99743 c# 72}{ExtrQuery# [5000:1:73:0:0:100000:1] sh# 257 sz# 99743 c# 73}{ExtrQuery# [5000:1:74:0:0:100000:1] sh# 257 sz# 99743 c# 74}{ExtrQuery# [5000:1:75:0:0:100000:1] sh# 257 sz# 99743 c# 75}{ExtrQuery# [5000:1:76:0:0:100000:1] sh# 257 sz# 99743 c# 76}{ExtrQuery# [5000:1:77:0:0:100000:1] sh# 257 sz# 99743 c# 77}{ExtrQuery# [5000:1:78:0:0:100000:1] sh# 257 sz# 99743 c# 78}{ExtrQuery# [5000:1:79:0:0:100000:1] sh# 257 sz# 99743 c# 79}{ExtrQuery# [5000:1:80:0:0:100000:1] sh# 257 sz# 99743 c# 80}{ExtrQuery# [5000:1:81:0:0:100000:1] sh# 257 sz# 99743 c# 81}{ExtrQuery# [5000:1:82:0:0:100000:1] sh# 257 sz# 99743 c# 82}{ExtrQuery# [5000:1:83:0:0:100000:1] sh# 257 sz# 99743 c# 83}{ExtrQuery# [5000:1:84:0:0:100000:1] sh# 257 sz# 99743 c# 84}{ExtrQuery# [5000:1:85:0:0:100000:1] sh# 257 sz# 99743 c# 85}{ExtrQuery# [5000:1:86:0:0:100000:1] sh# 257 sz# 99743 c# 86}{ExtrQuery# [5000:1:87:0:0:100000:1] sh# 257 sz# 99743 c# 87}{ExtrQuery# [5000:1:88:0:0:100000:1] sh# 257 sz# 99743 c# 88}{ExtrQuery# [5000:1:89:0:0:100000:1] sh# 257 sz# 99743 c# 89}{ExtrQuery# [5000:1:90:0:0:100000:1] sh# 257 sz# 99743 c# 90}{ExtrQuery# [5000:1:91:0:0:100000:1] sh# 257 sz# 99743 c# 91}{ExtrQuery# [5000:1:92:0:0:100000:1] sh# 257 sz# 99743 c# 92}{ExtrQuery# [5000:1:93:0:0:100000:1] sh# 257 sz# 99743 c# 93}{ExtrQuery# [5000:1:94:0:0:100000:1] sh# 257 sz# 99743 c# 94}{ExtrQuery# [5000:1:95:0:0:100000:1] sh# 257 sz# 99743 c# 95}{ExtrQuery# [5000:1:96:0:0:100000:1] sh# 257 sz# 99743 c# 96}{ExtrQuery# [5000:1:97:0:0:100000:1] sh# 257 sz# 99743 c# 97}{ExtrQuery# [5000:1:98:0:0:100000:1] sh# 257 sz# 99743 c# 98}{ExtrQuery# [5000:1:99:0:0:100000:1] sh# 257 sz# 99743 c# 99}{ExtrQuery# [5000:1:100:0:0:100000:1] sh# 257 sz# 99743 c# 100}{ExtrQuery# [5000:1:101:0:0:100000:1] sh# 257 sz# 99743 c# 101}{ExtrQuery# [5000:1:102:0:0:100000:1] sh# 257 sz# 99743 c# 102}{ExtrQuery# [5000:1:103:0:0:100000:1] sh# 257 sz# 99743 c# 103}{ExtrQuery# [5000:1:104:0:0:100000:1] sh# 257 sz# 99743 c# 104}{ExtrQuery# [5000:1:105:0:0:100000:1] sh# 257 sz# 99743 c# 105}{ExtrQuery# [5000:1:106:0:0:100000:1] sh# 257 sz# 99743 c# 106}{ExtrQuery# [5000:1:107:0:0:100000:1] sh# 257 sz# 99743 c# 107}{ExtrQuery# [5000:1:108:0:0:100000:1] sh# 257 sz# 99743 c# 108}{ExtrQuery# [5000:1:109:0:0:100000:1] sh# 257 sz# 99743 c# 109}{ExtrQuery# [5000:1:110:0:0:100000:1] sh# 257 sz# 99743 c# 110}{ExtrQuery# [5000:1:111:0:0:100000:1] sh# 257 sz# 99743 c# 111}{ExtrQuery# [5000:1:112:0:0:100000:1] sh# 257 sz# 99743 c# 112}{ExtrQuery# [5000:1:113:0:0:100000:1] sh# 257 sz# 99743 c# 113}{ExtrQuery# [5000:1:114:0:0:100000:1] sh# 257 sz# 99743 c# 114}{ExtrQuery# [5000:1:115:0:0:100000:1] sh# 257 sz# 99743 c# 115}{ExtrQuery# [5000:1:116:0:0:100000:1] sh# 257 sz# 99743 c# 116}{ExtrQuery# [5000:1:117:0:0:100000:1] sh# 257 sz# 99743 c# 117}{ExtrQuery# [5000:1:118:0:0:100000:1] sh# 257 sz# 99743 c# 118}{ExtrQuery# [5000:1:119:0:0:100000:1] sh# 257 sz# 99743 c# 119}{ExtrQuery# [5000:1:120:0:0:100000:1] sh# 257 sz# 99743 c# 120}{ExtrQuery# [5000:1:121:0:0:100000:1] sh# 257 sz# 99743 c# 121}{ExtrQuery# [5000:1:122:0:0:100000:1] sh# 257 sz# 99743 c# 122}{ExtrQuery# [5000:1:123:0:0:100000:1] sh# 257 sz# 99743 c# 123}{ExtrQuery# [5000:1:124:0:0:100000:1] sh# 257 sz# 99743 c# 124}{ExtrQuery# [5000:1:125:0:0:100000:1] sh# 257 sz# 99743 c# 125}{ExtrQuery# [5000:1:126:0:0:100000:1] sh# 257 sz# 99743 c# 126}{ExtrQuery# [5000:1:127:0:0:100000:1] sh# 257 sz# 99743 c# 127}{ExtrQuery# [5000:1:128:0:0:100000:1] sh# 257 sz# 99743 c# 128}{ExtrQuery# [5000:1:129:0:0:100000:1] sh# 257 sz# 99743 c# 129}{ExtrQuery# [5000:1:130:0:0:100000:1] sh# 257 sz# 99743 c# 130}{ExtrQuery# [5000:1:131:0:0:100000:1] sh# 257 sz# 99743 c# 131}{ExtrQuery# [5000:1:132:0:0:100000:1] sh# 257 sz# 99743 c# 132}{ExtrQuery# [5000:1:133:0:0:100000:1] sh# 257 sz# 99743 c# 133}{ExtrQuery# [5000:1:134:0:0:100000:1] sh# 257 sz# 99743 c# 134}{ExtrQuery# [5000:1:135:0:0:100000:1] sh# 257 sz# 99743 c# 135}{ExtrQuery# [5000:1:136:0:0:100000:1] sh# 257 sz# 99743 c# 136}{ExtrQuery# [5000:1:137:0:0:100000:1] sh# 257 sz# 99743 c# 137}{ExtrQuery# [5000:1:138:0:0:100000:1] sh# 257 sz# 99743 c# 138}{ExtrQuery# [5000:1:139:0:0:100000:1] sh# 257 sz# 99743 c# 139}{ExtrQuery# [5000:1:140:0:0:100000:1] sh# 257 sz# 99743 c# 140}{ExtrQuery# [5000:1:141:0:0:100000:1] sh# 257 sz# 99743 c# 141}{ExtrQuery# [5000:1:142:0:0:100000:1] sh# 257 sz# 99743 c# 142}{ExtrQuery# [5000:1:143:0:0:100000:1] sh# 257 sz# 99743 c# 143}{ExtrQuery# [5000:1:144:0:0:100000:1] sh# 257 sz# 99743 c# 144}{ExtrQuery# [5000:1:145:0:0:100000:1] sh# 257 sz# 99743 c# 145}{ExtrQuery# [5000:1:146:0:0:100000:1] sh# 257 sz# 99743 c# 146}{ExtrQuery# [5000:1:147:0:0:100000:1] sh# 257 sz# 99743 c# 147}{ExtrQuery# [5000:1:148:0:0:100000:1] sh# 257 sz# 99743 c# 148}{ExtrQuery# [5000:1:149:0:0:100000:1] sh# 257 sz# 99743 c# 149}{ExtrQuery# [5000:1:150:0:0:100000:1] sh# 257 sz# 99743 c# 150}{ExtrQuery# [5000:1:151:0:0:100000:1] sh# 257 sz# 99743 c# 151}{ExtrQuery# [5000:1:152:0:0:100000:1] sh# 257 sz# 99743 c# 152}{ExtrQuery# [5000:1:153:0:0:100000:1] sh# 257 sz# 99743 c# 153}{ExtrQuery# [5000:1:154:0:0:100000:1] sh# 257 sz# 99743 c# 154}{ExtrQuery# [5000:1:155:0:0:100000:1] sh# 257 sz# 99743 c# 155}{ExtrQuery# [5000:1:156:0:0:100000:1] sh# 257 sz# 99743 c# 156}{ExtrQuery# [5000:1:157:0:0:100000:1] sh# 257 sz# 99743 c# 157}{ExtrQuery# [5000:1:158:0:0:100000:1] sh# 257 sz# 99743 c# 158}{ExtrQuery# [5000:1:159:0:0:100000:1] sh# 257 sz# 99743 c# 159}{ExtrQuery# [5000:1:160:0:0:100000:1] sh# 257 sz# 99743 c# 160}{ExtrQuery# [5000:1:161:0:0:100000:1] sh# 257 sz# 99743 c# 161}{ExtrQuery# [5000:1:162:0:0:100000:1] sh# 257 sz# 99743 c# 162}{ExtrQuery# [5000:1:163:0:0:100000:1] sh# 257 sz# 99743 c# 163}{ExtrQuery# [5000:1:164:0:0:100000:1] sh# 257 sz# 99743 c# 164}{ExtrQuery# [5000:1:165:0:0:100000:1] sh# 257 sz# 99743 c# 165}{ExtrQuery# [5000:1:166:0:0:100000:1] sh# 257 sz# 99743 c# 166}{ExtrQuery# [5000:1:167:0:0:100000:1] sh# 257 sz# 99743 c# 167}{ExtrQuery# [5000:1:168:0:0:100000:1] sh# 257 sz# 99743 c# 168}{ExtrQuery# [5000:1:169:0:0:100000:1] sh# 257 sz# 99743 c# 169}{ExtrQuery# [5000:1:170:0:0:100000:1] sh# 257 sz# 99743 c# 170}{ExtrQuery# [5000:1:171:0:0:100000:1] sh# 257 sz# 99743 c# 171}{ExtrQuery# [5000:1:172:0:0:100000:1] sh# 257 sz# 99743 c# 172}{ExtrQuery# [5000:1:173:0:0:100000:1] sh# 257 sz# 99743 c# 173}{ExtrQuery# [5000:1:174:0:0:100000:1] sh# 257 sz# 99743 c# 174}{ExtrQuery# [5000:1:175:0:0:100000:1] sh# 257 sz# 99743 c# 175}{ExtrQuery# [5000:1:176:0:0:100000:1] sh# 257 sz# 99743 c# 176}{ExtrQuery# [5000:1:177:0:0:100000:1] sh# 257 sz# 99743 c# 177}{ExtrQuery# [5000:1:178:0:0:100000:1] sh# 257 sz# 99743 c# 178}{ExtrQuery# [5000:1:179:0:0:100000:1] sh# 257 sz# 99743 c# 179}{ExtrQuery# [5000:1:180:0:0:100000:1] sh# 257 sz# 99743 c# 180}{ExtrQuery# [5000:1:181:0:0:100000:1] sh# 257 sz# 99743 c# 181}{ExtrQuery# [5000:1:182:0:0:100000:1] sh# 257 sz# 99743 c# 182}{ExtrQuery# [5000:1:183:0:0:100000:1] sh# 257 sz# 99743 c# 183}{ExtrQuery# [5000:1:184:0:0:100000:1] sh# 257 sz# 99743 c# 184}{ExtrQuery# [5000:1:185:0:0:100000:1] sh# 257 sz# 99743 c# 185}{ExtrQuery# [5000:1:186:0:0:100000:1] sh# 257 sz# 99743 c# 186}{ExtrQuery# [5000:1:187:0:0:100000:1] sh# 257 sz# 99743 c# 187}{ExtrQuery# [5000:1:188:0:0:100000:1] sh# 257 sz# 99743 c# 188}{ExtrQuery# [5000:1:189:0:0:100000:1] sh# 257 sz# 99743 c# 189}{ExtrQuery# [5000:1:190:0:0:100000:1] sh# 257 sz# 99743 c# 190}{ExtrQuery# [5000:1:191 ... sz# 99743 c# 484}{ExtrQuery# [5000:1:485:0:0:100000:1] sh# 257 sz# 99743 c# 485}{ExtrQuery# [5000:1:486:0:0:100000:1] sh# 257 sz# 99743 c# 486}{ExtrQuery# [5000:1:487:0:0:100000:1] sh# 257 sz# 99743 c# 487}{ExtrQuery# [5000:1:488:0:0:100000:1] sh# 257 sz# 99743 c# 488}{ExtrQuery# [5000:1:489:0:0:100000:1] sh# 257 sz# 99743 c# 489}{ExtrQuery# [5000:1:490:0:0:100000:1] sh# 257 sz# 99743 c# 490}{ExtrQuery# [5000:1:491:0:0:100000:1] sh# 257 sz# 99743 c# 491}{ExtrQuery# [5000:1:492:0:0:100000:1] sh# 257 sz# 99743 c# 492}{ExtrQuery# [5000:1:493:0:0:100000:1] sh# 257 sz# 99743 c# 493}{ExtrQuery# [5000:1:494:0:0:100000:1] sh# 257 sz# 99743 c# 494}{ExtrQuery# [5000:1:495:0:0:100000:1] sh# 257 sz# 99743 c# 495}{ExtrQuery# [5000:1:496:0:0:100000:1] sh# 257 sz# 99743 c# 496}{ExtrQuery# [5000:1:497:0:0:100000:1] sh# 257 sz# 99743 c# 497}{ExtrQuery# [5000:1:498:0:0:100000:1] sh# 257 sz# 99743 c# 498}{ExtrQuery# [5000:1:499:0:0:100000:1] sh# 257 sz# 99743 c# 499}{ExtrQuery# [5000:1:500:0:0:100000:1] sh# 257 sz# 99743 c# 500}{ExtrQuery# [5000:1:501:0:0:100000:1] sh# 257 sz# 99743 c# 501}{ExtrQuery# [5000:1:502:0:0:100000:1] sh# 257 sz# 99743 c# 502}{ExtrQuery# [5000:1:503:0:0:100000:1] sh# 257 sz# 99743 c# 503}{ExtrQuery# [5000:1:504:0:0:100000:1] sh# 257 sz# 99743 c# 504}{ExtrQuery# [5000:1:505:0:0:100000:1] sh# 257 sz# 99743 c# 505}{ExtrQuery# [5000:1:506:0:0:100000:1] sh# 257 sz# 99743 c# 506}{ExtrQuery# [5000:1:507:0:0:100000:1] sh# 257 sz# 99743 c# 507}{ExtrQuery# [5000:1:508:0:0:100000:1] sh# 257 sz# 99743 c# 508}{ExtrQuery# [5000:1:509:0:0:100000:1] sh# 257 sz# 99743 c# 509}{ExtrQuery# [5000:1:510:0:0:100000:1] sh# 257 sz# 99743 c# 510}{ExtrQuery# [5000:1:511:0:0:100000:1] sh# 257 sz# 99743 c# 511}{ExtrQuery# [5000:1:512:0:0:100000:1] sh# 257 sz# 99743 c# 512}{ExtrQuery# [5000:1:513:0:0:100000:1] sh# 257 sz# 99743 c# 513}{ExtrQuery# [5000:1:514:0:0:100000:1] sh# 257 sz# 99743 c# 514}{ExtrQuery# [5000:1:515:0:0:100000:1] sh# 257 sz# 99743 c# 515}{ExtrQuery# [5000:1:516:0:0:100000:1] sh# 257 sz# 99743 c# 516}{ExtrQuery# [5000:1:517:0:0:100000:1] sh# 257 sz# 99743 c# 517}{ExtrQuery# [5000:1:518:0:0:100000:1] sh# 257 sz# 99743 c# 518}{ExtrQuery# [5000:1:519:0:0:100000:1] sh# 257 sz# 99743 c# 519}{ExtrQuery# [5000:1:520:0:0:100000:1] sh# 257 sz# 99743 c# 520}{ExtrQuery# [5000:1:521:0:0:100000:1] sh# 257 sz# 99743 c# 521}{ExtrQuery# [5000:1:522:0:0:100000:1] sh# 257 sz# 99743 c# 522}{ExtrQuery# [5000:1:523:0:0:100000:1] sh# 257 sz# 99743 c# 523}{ExtrQuery# [5000:1:524:0:0:100000:1] sh# 257 sz# 99743 c# 524}{ExtrQuery# [5000:1:525:0:0:100000:1] sh# 257 sz# 99743 c# 525}{ExtrQuery# [5000:1:526:0:0:100000:1] sh# 257 sz# 99743 c# 526}{ExtrQuery# [5000:1:527:0:0:100000:1] sh# 257 sz# 99743 c# 527}{ExtrQuery# [5000:1:528:0:0:100000:1] sh# 257 sz# 99743 c# 528}{ExtrQuery# [5000:1:529:0:0:100000:1] sh# 257 sz# 99743 c# 529}{ExtrQuery# [5000:1:530:0:0:100000:1] sh# 257 sz# 99743 c# 530}{ExtrQuery# [5000:1:531:0:0:100000:1] sh# 257 sz# 99743 c# 531}{ExtrQuery# [5000:1:532:0:0:100000:1] sh# 257 sz# 99743 c# 532}{ExtrQuery# [5000:1:533:0:0:100000:1] sh# 257 sz# 99743 c# 533}{ExtrQuery# [5000:1:534:0:0:100000:1] sh# 257 sz# 99743 c# 534}{ExtrQuery# [5000:1:535:0:0:100000:1] sh# 257 sz# 99743 c# 535}{ExtrQuery# [5000:1:536:0:0:100000:1] sh# 257 sz# 99743 c# 536}{ExtrQuery# [5000:1:537:0:0:100000:1] sh# 257 sz# 99743 c# 537}{ExtrQuery# [5000:1:538:0:0:100000:1] sh# 257 sz# 99743 c# 538}{ExtrQuery# [5000:1:539:0:0:100000:1] sh# 257 sz# 99743 c# 539}{ExtrQuery# [5000:1:540:0:0:100000:1] sh# 257 sz# 99743 c# 540}{ExtrQuery# [5000:1:541:0:0:100000:1] sh# 257 sz# 99743 c# 541}{ExtrQuery# [5000:1:542:0:0:100000:1] sh# 257 sz# 99743 c# 542}{ExtrQuery# [5000:1:543:0:0:100000:1] sh# 257 sz# 99743 c# 543}{ExtrQuery# [5000:1:544:0:0:100000:1] sh# 257 sz# 99743 c# 544}{ExtrQuery# [5000:1:545:0:0:100000:1] sh# 257 sz# 99743 c# 545}{ExtrQuery# [5000:1:546:0:0:100000:1] sh# 257 sz# 99743 c# 546}{ExtrQuery# [5000:1:547:0:0:100000:1] sh# 257 sz# 99743 c# 547}{ExtrQuery# [5000:1:548:0:0:100000:1] sh# 257 sz# 99743 c# 548}{ExtrQuery# [5000:1:549:0:0:100000:1] sh# 257 sz# 99743 c# 549}{ExtrQuery# [5000:1:550:0:0:100000:1] sh# 257 sz# 99743 c# 550}{ExtrQuery# [5000:1:551:0:0:100000:1] sh# 257 sz# 99743 c# 551}{ExtrQuery# [5000:1:552:0:0:100000:1] sh# 257 sz# 99743 c# 552}{ExtrQuery# [5000:1:553:0:0:100000:1] sh# 257 sz# 99743 c# 553}{ExtrQuery# [5000:1:554:0:0:100000:1] sh# 257 sz# 99743 c# 554}{ExtrQuery# [5000:1:555:0:0:100000:1] sh# 257 sz# 99743 c# 555}{ExtrQuery# [5000:1:556:0:0:100000:1] sh# 257 sz# 99743 c# 556}{ExtrQuery# [5000:1:557:0:0:100000:1] sh# 257 sz# 99743 c# 557}{ExtrQuery# [5000:1:558:0:0:100000:1] sh# 257 sz# 99743 c# 558}{ExtrQuery# [5000:1:559:0:0:100000:1] sh# 257 sz# 99743 c# 559}{ExtrQuery# [5000:1:560:0:0:100000:1] sh# 257 sz# 99743 c# 560}{ExtrQuery# [5000:1:561:0:0:100000:1] sh# 257 sz# 99743 c# 561}{ExtrQuery# [5000:1:562:0:0:100000:1] sh# 257 sz# 99743 c# 562}{ExtrQuery# [5000:1:563:0:0:100000:1] sh# 257 sz# 99743 c# 563}{ExtrQuery# [5000:1:564:0:0:100000:1] sh# 257 sz# 99743 c# 564}{ExtrQuery# [5000:1:565:0:0:100000:1] sh# 257 sz# 99743 c# 565}{ExtrQuery# [5000:1:566:0:0:100000:1] sh# 257 sz# 99743 c# 566}{ExtrQuery# [5000:1:567:0:0:100000:1] sh# 257 sz# 99743 c# 567}{ExtrQuery# [5000:1:568:0:0:100000:1] sh# 257 sz# 99743 c# 568}{ExtrQuery# [5000:1:569:0:0:100000:1] sh# 257 sz# 99743 c# 569}{ExtrQuery# [5000:1:570:0:0:100000:1] sh# 257 sz# 99743 c# 570}{ExtrQuery# [5000:1:571:0:0:100000:1] sh# 257 sz# 99743 c# 571}{ExtrQuery# [5000:1:572:0:0:100000:1] sh# 257 sz# 99743 c# 572}{ExtrQuery# [5000:1:573:0:0:100000:1] sh# 257 sz# 99743 c# 573}{ExtrQuery# [5000:1:574:0:0:100000:1] sh# 257 sz# 99743 c# 574}{ExtrQuery# [5000:1:575:0:0:100000:1] sh# 257 sz# 99743 c# 575}{ExtrQuery# [5000:1:576:0:0:100000:1] sh# 257 sz# 99743 c# 576}{ExtrQuery# [5000:1:577:0:0:100000:1] sh# 257 sz# 99743 c# 577}{ExtrQuery# [5000:1:578:0:0:100000:1] sh# 257 sz# 99743 c# 578}{ExtrQuery# [5000:1:579:0:0:100000:1] sh# 257 sz# 99743 c# 579}{ExtrQuery# [5000:1:580:0:0:100000:1] sh# 257 sz# 99743 c# 580}{ExtrQuery# [5000:1:581:0:0:100000:1] sh# 257 sz# 99743 c# 581}{ExtrQuery# [5000:1:582:0:0:100000:1] sh# 257 sz# 99743 c# 582}{ExtrQuery# [5000:1:583:0:0:100000:1] sh# 257 sz# 99743 c# 583}{ExtrQuery# [5000:1:584:0:0:100000:1] sh# 257 sz# 99743 c# 584}{ExtrQuery# [5000:1:585:0:0:100000:1] sh# 257 sz# 99743 c# 585}{ExtrQuery# [5000:1:586:0:0:100000:1] sh# 257 sz# 99743 c# 586}{ExtrQuery# [5000:1:587:0:0:100000:1] sh# 257 sz# 99743 c# 587}{ExtrQuery# [5000:1:588:0:0:100000:1] sh# 257 sz# 99743 c# 588}{ExtrQuery# [5000:1:589:0:0:100000:1] sh# 257 sz# 99743 c# 589}{ExtrQuery# [5000:1:590:0:0:100000:1] sh# 257 sz# 99743 c# 590}{ExtrQuery# [5000:1:591:0:0:100000:1] sh# 257 sz# 99743 c# 591}{ExtrQuery# [5000:1:592:0:0:100000:1] sh# 257 sz# 99743 c# 592}{ExtrQuery# [5000:1:593:0:0:100000:1] sh# 257 sz# 99743 c# 593}{ExtrQuery# [5000:1:594:0:0:100000:1] sh# 257 sz# 99743 c# 594}{ExtrQuery# [5000:1:595:0:0:100000:1] sh# 257 sz# 99743 c# 595}{ExtrQuery# [5000:1:596:0:0:100000:1] sh# 257 sz# 99743 c# 596}{ExtrQuery# [5000:1:597:0:0:100000:1] sh# 257 sz# 99743 c# 597}{ExtrQuery# [5000:1:598:0:0:100000:1] sh# 257 sz# 99743 c# 598}{ExtrQuery# [5000:1:599:0:0:100000:1] sh# 257 sz# 99743 c# 599}{ExtrQuery# [5000:1:600:0:0:100000:1] sh# 257 sz# 99743 c# 600}{ExtrQuery# [5000:1:601:0:0:100000:1] sh# 257 sz# 99743 c# 601}{ExtrQuery# [5000:1:602:0:0:100000:1] sh# 257 sz# 99743 c# 602}{ExtrQuery# [5000:1:603:0:0:100000:1] sh# 257 sz# 99743 c# 603}{ExtrQuery# [5000:1:604:0:0:100000:1] sh# 257 sz# 99743 c# 604}{ExtrQuery# [5000:1:605:0:0:100000:1] sh# 257 sz# 99743 c# 605}{ExtrQuery# [5000:1:606:0:0:100000:1] sh# 257 sz# 99743 c# 606}{ExtrQuery# [5000:1:607:0:0:100000:1] sh# 257 sz# 99743 c# 607}{ExtrQuery# [5000:1:608:0:0:100000:1] sh# 257 sz# 99743 c# 608}{ExtrQuery# [5000:1:609:0:0:100000:1] sh# 257 sz# 99743 c# 609}{ExtrQuery# [5000:1:610:0:0:100000:1] sh# 257 sz# 99743 c# 610}{ExtrQuery# [5000:1:611:0:0:100000:1] sh# 257 sz# 99743 c# 611}{ExtrQuery# [5000:1:612:0:0:100000:1] sh# 257 sz# 99743 c# 612}{ExtrQuery# [5000:1:613:0:0:100000:1] sh# 257 sz# 99743 c# 613}{ExtrQuery# [5000:1:614:0:0:100000:1] sh# 257 sz# 99743 c# 614}{ExtrQuery# [5000:1:615:0:0:100000:1] sh# 257 sz# 99743 c# 615}{ExtrQuery# [5000:1:616:0:0:100000:1] sh# 257 sz# 99743 c# 616}{ExtrQuery# [5000:1:617:0:0:100000:1] sh# 257 sz# 99743 c# 617}{ExtrQuery# [5000:1:618:0:0:100000:1] sh# 257 sz# 99743 c# 618}{ExtrQuery# [5000:1:619:0:0:100000:1] sh# 257 sz# 99743 c# 619}{ExtrQuery# [5000:1:620:0:0:100000:1] sh# 257 sz# 99743 c# 620}{ExtrQuery# [5000:1:621:0:0:100000:1] sh# 257 sz# 99743 c# 621}{ExtrQuery# [5000:1:622:0:0:100000:1] sh# 257 sz# 99743 c# 622}{ExtrQuery# [5000:1:623:0:0:100000:1] sh# 257 sz# 99743 c# 623}{ExtrQuery# [5000:1:624:0:0:100000:1] sh# 257 sz# 99743 c# 624}{ExtrQuery# [5000:1:625:0:0:100000:1] sh# 257 sz# 99743 c# 625}{ExtrQuery# [5000:1:626:0:0:100000:1] sh# 257 sz# 99743 c# 626}{ExtrQuery# [5000:1:627:0:0:100000:1] sh# 257 sz# 99743 c# 627}{ExtrQuery# [5000:1:628:0:0:100000:1] sh# 257 sz# 99743 c# 628}{ExtrQuery# [5000:1:629:0:0:100000:1] sh# 257 sz# 99743 c# 629}{ExtrQuery# [5000:1:630:0:0:100000:1] sh# 257 sz# 99743 c# 630}{ExtrQuery# [5000:1:631:0:0:100000:1] sh# 257 sz# 99743 c# 631}{ExtrQuery# [5000:1:632:0:0:100000:1] sh# 257 sz# 99743 c# 632}{ExtrQuery# [5000:1:633:0:0:100000:1] sh# 257 sz# 99743 c# 633}{ExtrQuery# [5000:1:634:0:0:100000:1] sh# 257 sz# 99743 c# 634}{ExtrQuery# [5000:1:635:0:0:100000:1] sh# 257 sz# 99743 c# 635}{ExtrQuery# [5000:1:636:0:0:100000:1] sh# 257 sz# 99743 c# 636}{ExtrQuery# [5000:1:637:0:0:100000:1] sh# 257 sz# 99743 c# 637}{ExtrQuery# [5000:1:638:0:0:100000:1] sh# 257 sz# 99743 c# 638}{ExtrQuery# [5000:1:639:0:0:100000:1] sh# 257 sz# 99743 c# 639}{ExtrQuery# [5000:1:640:0:0:100000:1] sh# 257 sz# 99743 c# 640}{ExtrQuery# [5000:1:641:0:0:100000:1] sh# 257 sz# 99743 c# 641}{ExtrQuery# [5000:1:642:0:0:100000:1] sh# 257 sz# 99743 c# 642}{ExtrQuery# [5000:1:643:0:0:100000:1] sh# 257 sz# 99743 c# 643}{ExtrQuery# [5000:1:644:0:0:100000:1] sh# 257 sz# 99743 c# 644}{ExtrQuery# [5000:1:645:0:0:100000:1] sh# 257 sz# 99743 c# 645}{ExtrQuery# [5000:1:646:0:0:100000:1] sh# 257 sz# 99743 c# 646}{ExtrQuery# [5000:1:647:0:0:100000:1] sh# 257 sz# 99743 c# 647}{ExtrQuery# [5000:1:648:0:0:100000:1] sh# 257 sz# 99743 c# 648}{ExtrQuery# [5000:1:649:0:0:100000:1] sh# 257 sz# 99743 c# 649}{ExtrQuery# [5000:1:650:0:0:100000:1] sh# 257 sz# 99743 c# 650}{ExtrQuery# [5000:1:651:0:0:100000:1] sh# 257 sz# 99743 c# 651}{ExtrQuery# [5000:1:652:0:0:100000:1] sh# 257 sz# 99743 c# 652}{ExtrQuery# [5000:1:653:0:0:100000:1] sh# 257 sz# 99743 c# 653}{ExtrQuery# [5000:1:654:0:0:100000:1] sh# 257 sz# 99743 c# 654}{ExtrQuery# [5000:1:655:0:0:100000:1] sh# 257 sz# 99743 c# 655}{ExtrQuery# [5000:1:656:0:0:100000:1] sh# 257 sz# 99743 c# 656}{ExtrQuery# [5000:1:657:0:0:100000:1] sh# 257 sz# 99743 c# 657}{ExtrQuery# [5000:1:658:0:0:100000:1] sh# 257 sz# 99743 c# 658}{ExtrQuery# [5000:1:659:0:0:100000:1] sh# 257 sz# 99743 c# 659}{ExtrQuery# [5000:1:660:0:0:100000:1] sh# 257 sz# 99743 c# 660}{ExtrQuery# [5000:1:661:0:0:100000:1] sh# 257 sz# 99743 c# 661}{ExtrQuery# [5000:1:662:0:0:100000:1] sh# 257 sz# 99743 c# 662}{ExtrQuery# [5000:1:663:0:0:100000:1] sh# 257 sz# 99743 c# 663}{ExtrQuery# [5000:1:664:0:0:100000:1] sh# 257 sz# 99743 c# 664}{ExtrQuery# [5000:1:665:0:0:100000:1] sh# 257 sz# 99743 c# 665}{ExtrQuery# [5000:1:666:0:0:100000:1] sh# 257 sz# 99743 c# 666}{ExtrQuery# [5000:1:667:0:0:100000:1] sh# 257 sz# 99743 c# 667}{ExtrQuery# [5000:1:668:0:0:100000:1] sh# 257 sz# 99743 c# 668}{ExtrQuery# [5000:1:669:0:0:100000:1] sh# 257 sz# 99743 c# 669}{ExtrQuery# [5000:1:670:0:0:100000:1] sh# 257 sz# 99743 c# 670}{ExtrQuery# [5000:1:671:0:0:100000:1] sh# 257 sz# 99743 c# 671}{ExtrQuery# [5000:1:672:0:0:17027:1] sh# 257 sz# 16770 c# 672} {MsgQoS} Notify# 0 Internals# 0 TabletId# 0 AcquireBlockedGeneration# 0 ForceBlockedGeneration# 0}; VDISK CAN NOT REPLY ON TEvVGet REQUEST |81.6%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/datashard/ut_object_storage_listing/unittest ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/load_test/ut_ycsb/unittest >> UpsertLoad::ShouldWriteDataBulkUpsertLocalMkqlKeyFrom [GOOD] Test command err: 2025-09-25T16:18:45.670166Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:123: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2025-09-25T16:18:45.699658Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2025-09-25T16:18:45.701424Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:311:2354], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-09-25T16:18:45.701475Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-09-25T16:18:45.701493Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/endf/002719/r3tmp/tmpLPrWXv/pdisk_1.dat 2025-09-25T16:18:45.772755Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-09-25T16:18:45.772797Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-09-25T16:18:45.785148Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded 2025-09-25T16:18:45.786026Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1229: Notification cookie mismatch for subscription [1:34:2081] 1758817125250378 != 1758817125250382 2025-09-25T16:18:45.817038Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-09-25T16:18:45.865151Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2025-09-25T16:18:45.898522Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-09-25T16:18:45.982118Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:18:46.166223Z node 1 :DS_LOAD_TEST DEBUG: test_load_actor.cpp:425: TLoad# 0 created load actor of type# kUpsertLocalMkqlStart with tag# 1, proto# NotifyWhenFinished: true TargetShard { TabletId: 72075186224037888 TableId: 2 TableName: "usertable" } UpsertLocalMkqlStart { RowCount: 10 Inflight: 3 KeyFrom: 12345 } 2025-09-25T16:18:46.166254Z node 1 :DS_LOAD_TEST NOTICE: bulk_mkql_upsert.cpp:157: Id# {Tag: 0, parent: [1:740:2610], subTag: 2} TUpsertActor Bootstrap called: RowCount: 10 Inflight: 3 KeyFrom: 12345 with type# 1, target# TabletId: 72075186224037888 TableId: 2 TableName: "usertable" 2025-09-25T16:18:46.235307Z node 1 :DS_LOAD_TEST NOTICE: bulk_mkql_upsert.cpp:255: Id# {Tag: 0, parent: [1:740:2610], subTag: 2} TUpsertActor finished in 0.069001s, errors=0 2025-09-25T16:18:46.235347Z node 1 :DS_LOAD_TEST INFO: test_load_actor.cpp:447: TLoad# 0 received finished from actor# [1:741:2611] with tag# 2 >> ObjectStorageListingTest::FilterListing >> KqpTx::CommitRequired >> TReplicationTests::CopyReplicatedTable [GOOD] >> KqpTx::ExplicitTcl |81.6%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/datashard/ut_object_storage_listing/unittest |81.6%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/datashard/ut_object_storage_listing/unittest |81.6%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/datashard/ut_object_storage_listing/unittest >> TReplicationTests::DropReplicationWithInvalidCredentials [GOOD] >> TReplicationTests::DropReplicationWithUnknownSecret |81.6%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/http_proxy/ut/inside_ydb_ut/ydb-core-http_proxy-ut-inside_ydb_ut >> ActorPage::OptionsNoContent >> UpsertLoad::ShouldWriteKqpUpsert2 [GOOD] >> TIncrHugeBasicTest::Defrag [GOOD] >> UpsertLoad::ShouldWriteKqpUpsert [GOOD] |81.6%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/http_proxy/ut/inside_ydb_ut/ydb-core-http_proxy-ut-inside_ydb_ut |81.6%| [LD] {RESULT} $(B)/ydb/core/http_proxy/ut/inside_ydb_ut/ydb-core-http_proxy-ut-inside_ydb_ut ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tablet/ut/unittest >> TResourceBrokerInstant::TestErrors [GOOD] Test command err: 2025-09-25T16:18:46.232972Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:1115: TResourceBrokerActor bootstrap 2025-09-25T16:18:46.233088Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:393: Submitted new compaction0 task task-1 (1 by [1:105:2138]) priority=0 resources={100, 100} 2025-09-25T16:18:46.233098Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:679: Assigning waiting task task-1 (1 by [1:105:2138]) to queue queue_compaction0 2025-09-25T16:18:46.233107Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:637: Allocate resources {100, 100} for task task-1 (1 by [1:105:2138]) from queue queue_compaction0 2025-09-25T16:18:46.233114Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:679: Assigning in-fly task task-1 (1 by [1:105:2138]) to queue queue_compaction0 2025-09-25T16:18:46.233126Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:711: Updated planned resource usage for queue queue_compaction0 from 0.000000 to 200.000000 (insert task task-1 (1 by [1:105:2138])) 2025-09-25T16:18:46.233143Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:441: Update task task-1 (1 by [1:105:2138]) (priority=0 type=compaction0 resources={80, 70} resubmit=0) 2025-09-25T16:18:46.233148Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:679: Assigning in-fly task task-1 (1 by [1:105:2138]) to queue queue_compaction0 2025-09-25T16:18:46.233153Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:711: Updated planned resource usage for queue queue_compaction0 from 0.000000 to 160.000000 (insert task task-1 (1 by [1:105:2138])) 2025-09-25T16:18:46.233164Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:528: Finish task task-1 (1 by [1:105:2138]) (release resources {80, 70}) 2025-09-25T16:18:46.233171Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:577: Updated planned resource usage for queue queue_compaction0 from 160.000000 to 0.000000 (remove task task-1 (1 by [1:105:2138])) 2025-09-25T16:18:46.503511Z node 2 :RESOURCE_BROKER DEBUG: resource_broker.cpp:1115: TResourceBrokerActor bootstrap 2025-09-25T16:18:46.503616Z node 2 :RESOURCE_BROKER DEBUG: resource_broker.cpp:393: Submitted new compaction0 task task-1 (1 by [2:105:2138]) priority=0 resources={100, 100} 2025-09-25T16:18:46.503628Z node 2 :RESOURCE_BROKER DEBUG: resource_broker.cpp:679: Assigning waiting task task-1 (1 by [2:105:2138]) to queue queue_compaction0 2025-09-25T16:18:46.503638Z node 2 :RESOURCE_BROKER DEBUG: resource_broker.cpp:637: Allocate resources {100, 100} for task task-1 (1 by [2:105:2138]) from queue queue_compaction0 2025-09-25T16:18:46.503644Z node 2 :RESOURCE_BROKER DEBUG: resource_broker.cpp:679: Assigning in-fly task task-1 (1 by [2:105:2138]) to queue queue_compaction0 2025-09-25T16:18:46.503656Z node 2 :RESOURCE_BROKER DEBUG: resource_broker.cpp:711: Updated planned resource usage for queue queue_compaction0 from 0.000000 to 200.000000 (insert task task-1 (1 by [2:105:2138])) 2025-09-25T16:18:46.503674Z node 2 :RESOURCE_BROKER DEBUG: resource_broker.cpp:393: Submitted new compaction0 task task-1 (1 by [2:105:2138]) priority=0 resources={100500, 100500} 2025-09-25T16:18:46.503680Z node 2 :RESOURCE_BROKER DEBUG: resource_broker.cpp:408: SubmitTask failed for task 1 to [2:105:2138]: task with the same ID has been already submitted 2025-09-25T16:18:46.503697Z node 2 :RESOURCE_BROKER DEBUG: resource_broker.cpp:515: FinishTask failed for task 2 to [2:105:2138]: cannot finish unknown task 2025-09-25T16:18:46.503703Z node 2 :RESOURCE_BROKER ERROR: resource_broker.cpp:1080: FinishTaskInstant failed for task 2: cannot finish unknown task >> KqpScheme::DropKeyColumn >> UpsertLoad::ShouldWriteDataBulkUpsertLocalMkql2 [GOOD] >> UpsertLoad::ShouldWriteKqpUpsertKeyFrom [GOOD] >> KqpScheme::CreateTableWithReadReplicasUncompat >> TReplicationTests::DropReplicationWithUnknownSecret [GOOD] >> ReadLoad::ShouldReadIterateMoreThanRows [GOOD] >> KqpScheme::QueryWithAlter >> test.py::test[bigdate-table_io-default.txt-Results] [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/load_test/ut_ycsb/unittest >> UpsertLoad::ShouldWriteKqpUpsert [GOOD] Test command err: 2025-09-25T16:18:45.981368Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:123: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2025-09-25T16:18:46.015879Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2025-09-25T16:18:46.018196Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:311:2354], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-09-25T16:18:46.018266Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-09-25T16:18:46.018291Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/endf/0025db/r3tmp/tmpneWtIz/pdisk_1.dat 2025-09-25T16:18:46.089512Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-09-25T16:18:46.089559Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-09-25T16:18:46.101133Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded 2025-09-25T16:18:46.101938Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1229: Notification cookie mismatch for subscription [1:34:2081] 1758817125549751 != 1758817125549755 2025-09-25T16:18:46.132932Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-09-25T16:18:46.179193Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2025-09-25T16:18:46.222780Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-09-25T16:18:46.295525Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:18:46.483483Z node 1 :DS_LOAD_TEST DEBUG: test_load_actor.cpp:425: TLoad# 0 created load actor of type# kUpsertKqpStart with tag# 1, proto# NotifyWhenFinished: true TargetShard { TabletId: 72075186224037888 TableId: 2 WorkingDir: "/Root" TableName: "usertable" } UpsertKqpStart { RowCount: 20 Inflight: 5 } 2025-09-25T16:18:46.483522Z node 1 :DS_LOAD_TEST NOTICE: kqp_upsert.cpp:298: TKqpUpsertActorMultiSession# {Tag: 0, parent: [1:740:2610], subTag: 2} Bootstrap called: RowCount: 20 Inflight: 5 2025-09-25T16:18:46.484054Z node 1 :DS_LOAD_TEST NOTICE: kqp_upsert.cpp:361: TKqpUpsertActorMultiSession# {Tag: 0, parent: [1:740:2610], subTag: 2} started# 5 actors each with inflight# 4 2025-09-25T16:18:46.484064Z node 1 :DS_LOAD_TEST NOTICE: kqp_upsert.cpp:116: TKqpUpsertActor# {Tag: 0, parent: [1:741:2611], subTag: 1} Bootstrap called: RowCount: 4 Inflight: 1 2025-09-25T16:18:46.484073Z node 1 :DS_LOAD_TEST NOTICE: kqp_upsert.cpp:116: TKqpUpsertActor# {Tag: 0, parent: [1:741:2611], subTag: 2} Bootstrap called: RowCount: 4 Inflight: 1 2025-09-25T16:18:46.484079Z node 1 :DS_LOAD_TEST NOTICE: kqp_upsert.cpp:116: TKqpUpsertActor# {Tag: 0, parent: [1:741:2611], subTag: 3} Bootstrap called: RowCount: 4 Inflight: 1 2025-09-25T16:18:46.484085Z node 1 :DS_LOAD_TEST NOTICE: kqp_upsert.cpp:116: TKqpUpsertActor# {Tag: 0, parent: [1:741:2611], subTag: 4} Bootstrap called: RowCount: 4 Inflight: 1 2025-09-25T16:18:46.484090Z node 1 :DS_LOAD_TEST NOTICE: kqp_upsert.cpp:116: TKqpUpsertActor# {Tag: 0, parent: [1:741:2611], subTag: 5} Bootstrap called: RowCount: 4 Inflight: 1 2025-09-25T16:18:46.484798Z node 1 :DS_LOAD_TEST DEBUG: kqp_upsert.cpp:207: TKqpUpsertActor# {Tag: 0, parent: [1:741:2611], subTag: 1} session: ydb://session/3?node_id=1&id=NGMyNWUxNDktZTVmMTlmMTItMzg4NWMxMTEtYjEyOWU4OTg= 2025-09-25T16:18:46.485146Z node 1 :DS_LOAD_TEST DEBUG: kqp_upsert.cpp:207: TKqpUpsertActor# {Tag: 0, parent: [1:741:2611], subTag: 2} session: ydb://session/3?node_id=1&id=NDk4Njc1ODctNjNmODRiZTYtMjBiZjU2OWEtYzBmMjI3ZDY= 2025-09-25T16:18:46.485434Z node 1 :DS_LOAD_TEST DEBUG: kqp_upsert.cpp:207: TKqpUpsertActor# {Tag: 0, parent: [1:741:2611], subTag: 3} session: ydb://session/3?node_id=1&id=ODMxZTA1ODktMTFhMmU1MWMtM2VmZmMxODQtODlmMDJhYTk= 2025-09-25T16:18:46.485958Z node 1 :DS_LOAD_TEST DEBUG: kqp_upsert.cpp:207: TKqpUpsertActor# {Tag: 0, parent: [1:741:2611], subTag: 4} session: ydb://session/3?node_id=1&id=ZjA1ZWJhYmMtYmVmMjg1ZDktNTQ4YmIzNzAtMzIxZDFkNGE= 2025-09-25T16:18:46.485970Z node 1 :DS_LOAD_TEST DEBUG: kqp_upsert.cpp:207: TKqpUpsertActor# {Tag: 0, parent: [1:741:2611], subTag: 5} session: ydb://session/3?node_id=1&id=MTBjZGM0ZjctZDljYzE5NzEtNGQ3NjFiYTItYjNjMjAwMWY= 2025-09-25T16:18:46.486917Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:754:2624], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:46.486942Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:781:2645], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:46.486951Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:782:2646], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:46.486959Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:783:2647], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:46.486968Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:784:2648], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:46.486984Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:785:2649], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:46.486994Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:46.487253Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:797:2661], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:46.487287Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:46.488368Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-09-25T16:18:46.496849Z node 1 :TX_PROXY ERROR: schemereq.cpp:590: Actor# [1:800:2664] txid# 281474976715659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exists but creating right now (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateCreate), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-09-25T16:18:46.497017Z node 1 :TX_PROXY ERROR: schemereq.cpp:590: Actor# [1:804:2668] txid# 281474976715660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exists but creating right now (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateCreate), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-09-25T16:18:46.497122Z node 1 :TX_PROXY ERROR: schemereq.cpp:590: Actor# [1:805:2669] txid# 281474976715661, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exists but creating right now (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateCreate), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-09-25T16:18:46.497291Z node 1 :TX_PROXY ERROR: schemereq.cpp:590: Actor# [1:806:2670] txid# 281474976715662, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exists but creating right now (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateCreate), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-09-25T16:18:46.538491Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-09-25T16:18:46.632359Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:792:2656], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2025-09-25T16:18:46.632389Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:793:2657], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2025-09-25T16:18:46.632400Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:794:2658], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2025-09-25T16:18:46.632410Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:795:2659], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2025-09-25T16:18:46.632418Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:796:2660], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2025-09-25T16:18:46.663801Z node 1 :TX_PROXY ERROR: schemereq.cpp:590: Actor# [1:900:2729] txid# 281474976715663, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-09-25T16:18:46.757420Z node 1 :DS_LOAD_TEST NOTICE: kqp_upsert.cpp:190: TKqpUpsertActor# {Tag: 0, parent: [1:741:2611], subTag: 5} finished in 1758817126.757404s, errors=0 2025-09-25T16:18:46.757505Z node 1 :DS_LOAD_TEST INFO: kqp_upsert.cpp:376: kqp# {Tag: 0, parent: [1:740:2610], subTag: 2} finished: 5 { Tag: 5 DurationMs: 1758817126757 OperationsOK: 4 OperationsError: 0 } 2025-09-25T16:18:46.768454Z node 1 :TX_PROXY ERROR: schemereq.cpp:590: Actor# [1:973:2767] txid# 281474976715668, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-09-25T16:18:46.815072Z node 1 :DS_LOAD_TEST NOTICE: kqp_upsert.cpp:190: TKqpUpsertActor# {Tag: 0, parent: [1:741:2611], subTag: 2} finished in 1758817126.815057s, errors=0 2025-09-25T16:18:46.815174Z node 1 :DS_LOAD_TEST INFO: kqp_upsert.cpp:376: kqp# {Tag: 0, parent: [1:740:2610], subTag: 2} finished: 2 { Tag: 2 DurationMs: 1758817126815 OperationsOK: 4 OperationsError: 0 } 2025-09-25T16:18:46.826402Z node 1 :TX_PROXY ERROR: schemereq.cpp:590: Actor# [1:1024:2789] txid# 281474976715673, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-09-25T16:18:46.873321Z node 1 :TX_PROXY ERROR: schemereq.cpp:590: Actor# [1:1074:2810] txid# 281474976715678, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-09-25T16:18:46.885042Z node 1 :DS_LOAD_TEST NOTICE: kqp_upsert.cpp:190: TKqpUpsertActor# {Tag: 0, parent: [1:741:2611], subTag: 4} finished in 1758817126.885027s, errors=0 2025-09-25T16:18:46.885140Z node 1 :DS_LOAD_TEST INFO: kqp_upsert.cpp:376: kqp# {Tag: 0, parent: [1:740:2610], subTag: 2} finished: 4 { Tag: 4 DurationMs: 1758817126885 OperationsOK: 4 OperationsError: 0 } 2025-09-25T16:18:46.919619Z node 1 :DS_LOAD_TEST NOTICE: kqp_upsert.cpp:190: TKqpUpsertActor# {Tag: 0, parent: [1:741:2611], subTag: 3} finished in 1758817126.919604s, errors=0 2025-09-25T16:18:46.919724Z node 1 :DS_LOAD_TEST INFO: kqp_upsert.cpp:376: kqp# {Tag: 0, parent: [1:740:2610], subTag: 2} finished: 3 { Tag: 3 DurationMs: 1758817126919 OperationsOK: 4 OperationsError: 0 } 2025-09-25T16:18:46.930650Z node 1 :TX_PROXY ERROR: schemereq.cpp:590: Actor# [1:1125:2832] txid# 281474976715683, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-09-25T16:18:46.976986Z node 1 :DS_LOAD_TEST NOTICE: kqp_upsert.cpp:190: TKqpUpsertActor# {Tag: 0, parent: [1:741:2611], subTag: 1} finished in 1758817126.976973s, errors=0 2025-09-25T16:18:46.977072Z node 1 :DS_LOAD_TEST INFO: kqp_upsert.cpp:376: kqp# {Tag: 0, parent: [1:740:2610], subTag: 2} finished: 1 { Tag: 1 DurationMs: 1758817126976 OperationsOK: 4 OperationsError: 0 } 2025-09-25T16:18:46.977078Z node 1 :DS_LOAD_TEST NOTICE: kqp_upsert.cpp:395: TKqpUpsertActorMultiSession# {Tag: 0, parent: [1:740:2610], subTag: 2} finished in 0.493054s, oks# 20, errors# 0 2025-09-25T16:18:46.977093Z node 1 :DS_LOAD_TEST INFO: test_load_actor.cpp:447: TLoad# 0 received finished from actor# [1:741:2611] with tag# 2 ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_replication/unittest >> TReplicationTests::CopyReplicatedTable [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2144] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2144] Leader for TabletID 72057594046678944 is [1:130:2155] sender: [1:131:2058] recipient: [1:113:2144] 2025-09-25T16:18:44.512114Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7911: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-09-25T16:18:44.512148Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7939: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-09-25T16:18:44.512155Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7825: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-09-25T16:18:44.512162Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7841: OperationsProcessing config: using default configuration 2025-09-25T16:18:44.512170Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7847: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-09-25T16:18:44.512174Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7847: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-09-25T16:18:44.512184Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7971: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-09-25T16:18:44.512198Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-09-25T16:18:44.512356Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8042: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-09-25T16:18:44.512433Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2025-09-25T16:18:44.530650Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7729: Cannot subscribe to console configs 2025-09-25T16:18:44.530681Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded 2025-09-25T16:18:44.536430Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2025-09-25T16:18:44.536553Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2025-09-25T16:18:44.536591Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-09-25T16:18:44.538583Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2025-09-25T16:18:44.538671Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2025-09-25T16:18:44.538795Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1382: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-09-25T16:18:44.538933Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-09-25T16:18:44.539557Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:158: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-09-25T16:18:44.539615Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2025-09-25T16:18:44.539935Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-09-25T16:18:44.539947Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-09-25T16:18:44.539973Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-09-25T16:18:44.539982Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-09-25T16:18:44.539990Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:205: TTxServerlessStorageBilling.Complete 2025-09-25T16:18:44.540028Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7086: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-09-25T16:18:44.541602Z node 1 :HIVE INFO: tablet_helpers.cpp:1126: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:130:2155] sender: [1:245:2058] recipient: [1:15:2062] 2025-09-25T16:18:44.559388Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-09-25T16:18:44.559498Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-09-25T16:18:44.559569Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:599: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-09-25T16:18:44.559580Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5528: CreateTx for txid 1:0 type: TxAlterSubDomain target path: [OwnerId: 72057594046678944, LocalPathId: 1] source path: 2025-09-25T16:18:44.559653Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-09-25T16:18:44.559670Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2025-09-25T16:18:44.560607Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-09-25T16:18:44.560660Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-09-25T16:18:44.560722Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-09-25T16:18:44.560733Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:315: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-09-25T16:18:44.560739Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:369: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-09-25T16:18:44.560745Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 1:0 2 -> 3 2025-09-25T16:18:44.561471Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-09-25T16:18:44.561490Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-09-25T16:18:44.561497Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 1:0 3 -> 128 2025-09-25T16:18:44.561977Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-09-25T16:18:44.561993Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-09-25T16:18:44.561999Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-09-25T16:18:44.562007Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1723: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-09-25T16:18:44.562756Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1792: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-09-25T16:18:44.563251Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:663: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-09-25T16:18:44.563299Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1824: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-09-25T16:18:44.563520Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:685: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-09-25T16:18:44.563553Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:689: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 137 RawX2: 4294969455 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-09-25T16:18:44.563561Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:303: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-09-25T16:18:44.563633Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 1:0 128 -> 240 2025-09-25T16:18:44.563642Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:362: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-09-25T16:18:44.563677Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:599: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-09-25T16:18:44.563689Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:412: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-09-25T16:18:44.564206Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-09-25T16:18:44.564216Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme ... TxId: 102 } 2025-09-25T16:18:46.796297Z node 9 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5901: Handle TEvSchemaChanged, tabletId: 72057594046678944, at schemeshard: 72057594046678944, message: Source { RawX1: 417 RawX2: 38654708048 } Origin: 72075186233409547 State: 2 TxId: 102 Step: 0 Generation: 2 2025-09-25T16:18:46.796304Z node 9 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1837: TOperation FindRelatedPartByTabletId, TxId: 102, tablet: 72075186233409547, partId: 0 2025-09-25T16:18:46.796321Z node 9 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:628: TTxOperationReply execute, operationId: 102:0, at schemeshard: 72057594046678944, message: Source { RawX1: 417 RawX2: 38654708048 } Origin: 72075186233409547 State: 2 TxId: 102 Step: 0 Generation: 2 2025-09-25T16:18:46.796329Z node 9 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:1052: NTableState::TProposedWaitParts operationId# 102:0 HandleReply TEvDataShard::TEvSchemaChanged at tablet: 72057594046678944 2025-09-25T16:18:46.796338Z node 9 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:1056: NTableState::TProposedWaitParts operationId# 102:0 HandleReply TEvDataShard::TEvSchemaChanged at tablet: 72057594046678944 message: Source { RawX1: 417 RawX2: 38654708048 } Origin: 72075186233409547 State: 2 TxId: 102 Step: 0 Generation: 2 2025-09-25T16:18:46.796355Z node 9 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:673: CollectSchemaChanged accept TEvDataShard::TEvSchemaChanged, operationId: 102:0, shardIdx: 72057594046678944:2, shard: 72075186233409547, left await: 1, txState.State: ProposedWaitParts, txState.ReadyForNotifications: 1, at schemeshard: 72057594046678944 2025-09-25T16:18:46.796360Z node 9 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:1061: NTableState::TProposedWaitParts operationId# 102:0 HandleReply TEvDataShard::TEvSchemaChanged CollectSchemaChanged: false 2025-09-25T16:18:46.796801Z node 9 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 102:0, at schemeshard: 72057594046678944 2025-09-25T16:18:46.797077Z node 9 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 102:0, at schemeshard: 72057594046678944 2025-09-25T16:18:46.808003Z node 9 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5901: Handle TEvSchemaChanged, tabletId: 72057594046678944, at schemeshard: 72057594046678944, message: Source { RawX1: 319 RawX2: 38654707967 } Origin: 72075186233409546 State: 2 TxId: 102 Step: 0 Generation: 2 2025-09-25T16:18:46.808032Z node 9 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1837: TOperation FindRelatedPartByTabletId, TxId: 102, tablet: 72075186233409546, partId: 0 2025-09-25T16:18:46.808062Z node 9 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:628: TTxOperationReply execute, operationId: 102:0, at schemeshard: 72057594046678944, message: Source { RawX1: 319 RawX2: 38654707967 } Origin: 72075186233409546 State: 2 TxId: 102 Step: 0 Generation: 2 2025-09-25T16:18:46.808072Z node 9 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:1052: NTableState::TProposedWaitParts operationId# 102:0 HandleReply TEvDataShard::TEvSchemaChanged at tablet: 72057594046678944 2025-09-25T16:18:46.808083Z node 9 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:1056: NTableState::TProposedWaitParts operationId# 102:0 HandleReply TEvDataShard::TEvSchemaChanged at tablet: 72057594046678944 message: Source { RawX1: 319 RawX2: 38654707967 } Origin: 72075186233409546 State: 2 TxId: 102 Step: 0 Generation: 2 2025-09-25T16:18:46.808101Z node 9 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:673: CollectSchemaChanged accept TEvDataShard::TEvSchemaChanged, operationId: 102:0, shardIdx: 72057594046678944:1, shard: 72075186233409546, left await: 0, txState.State: ProposedWaitParts, txState.ReadyForNotifications: 1, at schemeshard: 72057594046678944 2025-09-25T16:18:46.808107Z node 9 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:710: all shard schema changes has been received, operationId: 102:0, at schemeshard: 72057594046678944 2025-09-25T16:18:46.808112Z node 9 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:722: send schema changes ack message, operation: 102:0, datashard: 72075186233409546, at schemeshard: 72057594046678944 2025-09-25T16:18:46.808122Z node 9 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:722: send schema changes ack message, operation: 102:0, datashard: 72075186233409547, at schemeshard: 72057594046678944 2025-09-25T16:18:46.808144Z node 9 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 102:0 129 -> 240 2025-09-25T16:18:46.808632Z node 9 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 102:0, at schemeshard: 72057594046678944 2025-09-25T16:18:46.808764Z node 9 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 102:0, at schemeshard: 72057594046678944 2025-09-25T16:18:46.808776Z node 9 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_states.h:93: TCopyTable::TWaitCopyTableBarrier operationId: 102:0ProgressState, operation type TxCopyTable 2025-09-25T16:18:46.808785Z node 9 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1079: Set barrier, OperationId: 102:0, name: CopyTableBarrier, done: 0, blocked: 1, parts count: 1 2025-09-25T16:18:46.808792Z node 9 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:1123: All parts have reached barrier, tx: 102, done: 0, blocked: 1 2025-09-25T16:18:46.808805Z node 9 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_states.h:76: TCopyTable::TWaitCopyTableBarrier operationId: 102:0 HandleReply TEvPrivate::TEvCompleteBarrier, msg: NKikimr::NSchemeShard::TEvPrivate::TEvCompleteBarrier { TxId: 102 Name: CopyTableBarrier }, at tablet# 72057594046678944 2025-09-25T16:18:46.808810Z node 9 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 102:0 240 -> 240 2025-09-25T16:18:46.809206Z node 9 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 102:0, at schemeshard: 72057594046678944 2025-09-25T16:18:46.809219Z node 9 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:497: [72057594046678944] TDone opId# 102:0 ProgressState 2025-09-25T16:18:46.809238Z node 9 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:926: Part operation is done id#102:0 progress is 1/1 2025-09-25T16:18:46.809243Z node 9 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2025-09-25T16:18:46.809248Z node 9 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:926: Part operation is done id#102:0 progress is 1/1 2025-09-25T16:18:46.809252Z node 9 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2025-09-25T16:18:46.809257Z node 9 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 102, ready parts: 1/1, is published: true 2025-09-25T16:18:46.809272Z node 9 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1702: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [9:345:2322] message: TxId: 102 2025-09-25T16:18:46.809280Z node 9 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2025-09-25T16:18:46.809287Z node 9 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:993: Operation and all the parts is done, operation id: 102:0 2025-09-25T16:18:46.809292Z node 9 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5552: RemoveTx for txid 102:0 2025-09-25T16:18:46.809327Z node 9 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:610: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 3 2025-09-25T16:18:46.809332Z node 9 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:610: DecrementPathDbRefCount reason remove txstate source path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 3 2025-09-25T16:18:46.809726Z node 9 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 102: got EvNotifyTxCompletionResult 2025-09-25T16:18:46.809739Z node 9 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 102: satisfy waiter [9:448:2407] TestWaitNotification: OK eventTxId 102 2025-09-25T16:18:46.809860Z node 9 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/CopyTable" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-09-25T16:18:46.809928Z node 9 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/CopyTable" took 75us result status StatusSuccess 2025-09-25T16:18:46.810068Z node 9 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/CopyTable" PathDescription { Self { Name: "CopyTable" PathId: 3 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 102 CreateStep: 5000003 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "CopyTable" Columns { Name: "key" Type: "Uint64" TypeId: 4 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "value" Type: "Uint64" TypeId: 4 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" KeyColumnIds: 1 TableSchemaVersion: 1 IsBackup: false IsRestore: false } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 2 PathsLimit: 10000 ShardsInside: 2 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 3 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 >> KqpScheme::CreateDroppedTable >> UpsertLoad::ShouldWriteDataBulkUpsert2 [GOOD] >> ReadLoad::ShouldReadKqp [GOOD] >> ReadLoad::ShouldReadKqpMoreThanRows ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/load_test/ut_ycsb/unittest >> UpsertLoad::ShouldWriteKqpUpsert2 [GOOD] Test command err: 2025-09-25T16:18:45.909377Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:123: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2025-09-25T16:18:45.954398Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2025-09-25T16:18:45.957226Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:311:2354], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-09-25T16:18:45.957310Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-09-25T16:18:45.957337Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/endf/002691/r3tmp/tmpSXPX2f/pdisk_1.dat 2025-09-25T16:18:46.034799Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-09-25T16:18:46.034849Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-09-25T16:18:46.047874Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded 2025-09-25T16:18:46.048876Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1229: Notification cookie mismatch for subscription [1:34:2081] 1758817125410914 != 1758817125410918 2025-09-25T16:18:46.079922Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-09-25T16:18:46.129130Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2025-09-25T16:18:46.162905Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-09-25T16:18:46.246587Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:18:46.432571Z node 1 :DS_LOAD_TEST DEBUG: test_load_actor.cpp:425: TLoad# 0 created load actor of type# kUpsertKqpStart with tag# 1, proto# NotifyWhenFinished: true TargetShard { TabletId: 72075186224037888 TableId: 2 WorkingDir: "/Root" TableName: "JustTable" } UpsertKqpStart { RowCount: 20 Inflight: 5 } 2025-09-25T16:18:46.432620Z node 1 :DS_LOAD_TEST NOTICE: kqp_upsert.cpp:298: TKqpUpsertActorMultiSession# {Tag: 0, parent: [1:740:2610], subTag: 2} Bootstrap called: RowCount: 20 Inflight: 5 2025-09-25T16:18:46.433150Z node 1 :DS_LOAD_TEST NOTICE: kqp_upsert.cpp:361: TKqpUpsertActorMultiSession# {Tag: 0, parent: [1:740:2610], subTag: 2} started# 5 actors each with inflight# 4 2025-09-25T16:18:46.433162Z node 1 :DS_LOAD_TEST NOTICE: kqp_upsert.cpp:116: TKqpUpsertActor# {Tag: 0, parent: [1:741:2611], subTag: 1} Bootstrap called: RowCount: 4 Inflight: 1 2025-09-25T16:18:46.433173Z node 1 :DS_LOAD_TEST NOTICE: kqp_upsert.cpp:116: TKqpUpsertActor# {Tag: 0, parent: [1:741:2611], subTag: 2} Bootstrap called: RowCount: 4 Inflight: 1 2025-09-25T16:18:46.433178Z node 1 :DS_LOAD_TEST NOTICE: kqp_upsert.cpp:116: TKqpUpsertActor# {Tag: 0, parent: [1:741:2611], subTag: 3} Bootstrap called: RowCount: 4 Inflight: 1 2025-09-25T16:18:46.433185Z node 1 :DS_LOAD_TEST NOTICE: kqp_upsert.cpp:116: TKqpUpsertActor# {Tag: 0, parent: [1:741:2611], subTag: 4} Bootstrap called: RowCount: 4 Inflight: 1 2025-09-25T16:18:46.433190Z node 1 :DS_LOAD_TEST NOTICE: kqp_upsert.cpp:116: TKqpUpsertActor# {Tag: 0, parent: [1:741:2611], subTag: 5} Bootstrap called: RowCount: 4 Inflight: 1 2025-09-25T16:18:46.434220Z node 1 :DS_LOAD_TEST DEBUG: kqp_upsert.cpp:207: TKqpUpsertActor# {Tag: 0, parent: [1:741:2611], subTag: 1} session: ydb://session/3?node_id=1&id=OGM5YmYxMzMtZDIzZWZlYzYtZjAxNzE4ZmEtNzhkYjQwMjE= 2025-09-25T16:18:46.434240Z node 1 :DS_LOAD_TEST DEBUG: kqp_upsert.cpp:207: TKqpUpsertActor# {Tag: 0, parent: [1:741:2611], subTag: 2} session: ydb://session/3?node_id=1&id=NTg1ZDFlZC1iNTJjMTM1NC1jNWIxMDc4ZC1lZjIzODVjYw== 2025-09-25T16:18:46.434768Z node 1 :DS_LOAD_TEST DEBUG: kqp_upsert.cpp:207: TKqpUpsertActor# {Tag: 0, parent: [1:741:2611], subTag: 3} session: ydb://session/3?node_id=1&id=ZDIxMmY0NWEtOGJlZGZmYTktNDE1ODU3MWQtOWU3NzEzYmQ= 2025-09-25T16:18:46.434780Z node 1 :DS_LOAD_TEST DEBUG: kqp_upsert.cpp:207: TKqpUpsertActor# {Tag: 0, parent: [1:741:2611], subTag: 4} session: ydb://session/3?node_id=1&id=NTliMGM1Mi03ZTE0N2ItYmE3ZjAwNzgtNTQ0MWM1ZmY= 2025-09-25T16:18:46.435055Z node 1 :DS_LOAD_TEST DEBUG: kqp_upsert.cpp:207: TKqpUpsertActor# {Tag: 0, parent: [1:741:2611], subTag: 5} session: ydb://session/3?node_id=1&id=ZTZlZTIzYTctNjA2YzY3MTYtZTcxNTZlOGEtYWNkYzkyYjc= 2025-09-25T16:18:46.436012Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:754:2624], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:46.436038Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:781:2645], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:46.436048Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:782:2646], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:46.436057Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:783:2647], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:46.436066Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:784:2648], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:46.436081Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:785:2649], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:46.436092Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:46.436359Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:797:2661], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:46.436393Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:46.437586Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-09-25T16:18:46.446987Z node 1 :TX_PROXY ERROR: schemereq.cpp:590: Actor# [1:800:2664] txid# 281474976715659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exists but creating right now (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateCreate), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-09-25T16:18:46.447050Z node 1 :TX_PROXY ERROR: schemereq.cpp:590: Actor# [1:804:2668] txid# 281474976715660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exists but creating right now (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateCreate), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-09-25T16:18:46.447335Z node 1 :TX_PROXY ERROR: schemereq.cpp:590: Actor# [1:805:2669] txid# 281474976715661, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exists but creating right now (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateCreate), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-09-25T16:18:46.447455Z node 1 :TX_PROXY ERROR: schemereq.cpp:590: Actor# [1:806:2670] txid# 281474976715662, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exists but creating right now (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateCreate), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-09-25T16:18:46.488982Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-09-25T16:18:46.583218Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:792:2656], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2025-09-25T16:18:46.583253Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:793:2657], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2025-09-25T16:18:46.583261Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:794:2658], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2025-09-25T16:18:46.583267Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:795:2659], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2025-09-25T16:18:46.583272Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:796:2660], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2025-09-25T16:18:46.614685Z node 1 :TX_PROXY ERROR: schemereq.cpp:590: Actor# [1:900:2729] txid# 281474976715663, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-09-25T16:18:46.710351Z node 1 :DS_LOAD_TEST NOTICE: kqp_upsert.cpp:190: TKqpUpsertActor# {Tag: 0, parent: [1:741:2611], subTag: 2} finished in 1758817126.710342s, errors=0 2025-09-25T16:18:46.710434Z node 1 :DS_LOAD_TEST INFO: kqp_upsert.cpp:376: kqp# {Tag: 0, parent: [1:740:2610], subTag: 2} finished: 2 { Tag: 2 DurationMs: 1758817126710 OperationsOK: 4 OperationsError: 0 } 2025-09-25T16:18:46.722402Z node 1 :TX_PROXY ERROR: schemereq.cpp:590: Actor# [1:973:2767] txid# 281474976715668, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-09-25T16:18:46.769030Z node 1 :DS_LOAD_TEST NOTICE: kqp_upsert.cpp:190: TKqpUpsertActor# {Tag: 0, parent: [1:741:2611], subTag: 3} finished in 1758817126.769017s, errors=0 2025-09-25T16:18:46.769079Z node 1 :DS_LOAD_TEST INFO: kqp_upsert.cpp:376: kqp# {Tag: 0, parent: [1:740:2610], subTag: 2} finished: 3 { Tag: 3 DurationMs: 1758817126769 OperationsOK: 4 OperationsError: 0 } 2025-09-25T16:18:46.780131Z node 1 :TX_PROXY ERROR: schemereq.cpp:590: Actor# [1:1024:2789] txid# 281474976715673, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-09-25T16:18:46.826760Z node 1 :DS_LOAD_TEST NOTICE: kqp_upsert.cpp:190: TKqpUpsertActor# {Tag: 0, parent: [1:741:2611], subTag: 1} finished in 1758817126.826747s, errors=0 2025-09-25T16:18:46.826800Z node 1 :DS_LOAD_TEST INFO: kqp_upsert.cpp:376: kqp# {Tag: 0, parent: [1:740:2610], subTag: 2} finished: 1 { Tag: 1 DurationMs: 1758817126826 OperationsOK: 4 OperationsError: 0 } 2025-09-25T16:18:46.838070Z node 1 :TX_PROXY ERROR: schemereq.cpp:590: Actor# [1:1075:2811] txid# 281474976715678, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-09-25T16:18:46.885053Z node 1 :DS_LOAD_TEST NOTICE: kqp_upsert.cpp:190: TKqpUpsertActor# {Tag: 0, parent: [1:741:2611], subTag: 4} finished in 1758817126.885043s, errors=0 2025-09-25T16:18:46.885103Z node 1 :DS_LOAD_TEST INFO: kqp_upsert.cpp:376: kqp# {Tag: 0, parent: [1:740:2610], subTag: 2} finished: 4 { Tag: 4 DurationMs: 1758817126885 OperationsOK: 4 OperationsError: 0 } 2025-09-25T16:18:46.896118Z node 1 :TX_PROXY ERROR: schemereq.cpp:590: Actor# [1:1126:2833] txid# 281474976715683, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-09-25T16:18:46.942970Z node 1 :DS_LOAD_TEST NOTICE: kqp_upsert.cpp:190: TKqpUpsertActor# {Tag: 0, parent: [1:741:2611], subTag: 5} finished in 1758817126.942956s, errors=0 2025-09-25T16:18:46.943026Z node 1 :DS_LOAD_TEST INFO: kqp_upsert.cpp:376: kqp# {Tag: 0, parent: [1:740:2610], subTag: 2} finished: 5 { Tag: 5 DurationMs: 1758817126942 OperationsOK: 4 OperationsError: 0 } 2025-09-25T16:18:46.943034Z node 1 :DS_LOAD_TEST NOTICE: kqp_upsert.cpp:395: TKqpUpsertActorMultiSession# {Tag: 0, parent: [1:740:2610], subTag: 2} finished in 0.509913s, oks# 20, errors# 0 2025-09-25T16:18:46.943053Z node 1 :DS_LOAD_TEST INFO: test_load_actor.cpp:447: TLoad# 0 received finished from actor# [1:741:2611] with tag# 2 >> UpsertLoad::ShouldWriteDataBulkUpsertKeyFrom [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/load_test/ut_ycsb/unittest >> UpsertLoad::ShouldWriteDataBulkUpsertLocalMkql2 [GOOD] Test command err: 2025-09-25T16:18:45.441166Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:123: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2025-09-25T16:18:45.484955Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2025-09-25T16:18:45.486901Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:311:2354], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-09-25T16:18:45.486959Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-09-25T16:18:45.486977Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/endf/0027db/r3tmp/tmpeeuiZo/pdisk_1.dat 2025-09-25T16:18:45.554951Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-09-25T16:18:45.554995Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-09-25T16:18:45.563778Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded 2025-09-25T16:18:45.564393Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1229: Notification cookie mismatch for subscription [1:34:2081] 1758817125043357 != 1758817125043361 2025-09-25T16:18:45.595413Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-09-25T16:18:45.642728Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2025-09-25T16:18:45.676078Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-09-25T16:18:45.759344Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:18:45.942869Z node 1 :DS_LOAD_TEST DEBUG: test_load_actor.cpp:425: TLoad# 0 created load actor of type# kUpsertLocalMkqlStart with tag# 1, proto# NotifyWhenFinished: true TargetShard { TabletId: 72075186224037888 TableId: 2 TableName: "usertable" } UpsertLocalMkqlStart { RowCount: 10 Inflight: 3 } 2025-09-25T16:18:45.942914Z node 1 :DS_LOAD_TEST NOTICE: bulk_mkql_upsert.cpp:157: Id# {Tag: 0, parent: [1:740:2610], subTag: 2} TUpsertActor Bootstrap called: RowCount: 10 Inflight: 3 with type# 1, target# TabletId: 72075186224037888 TableId: 2 TableName: "usertable" 2025-09-25T16:18:46.010783Z node 1 :DS_LOAD_TEST NOTICE: bulk_mkql_upsert.cpp:255: Id# {Tag: 0, parent: [1:740:2610], subTag: 2} TUpsertActor finished in 0.067817s, errors=0 2025-09-25T16:18:46.010816Z node 1 :DS_LOAD_TEST INFO: test_load_actor.cpp:447: TLoad# 0 received finished from actor# [1:741:2611] with tag# 2 2025-09-25T16:18:46.534568Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:123: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2025-09-25T16:18:46.554506Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2025-09-25T16:18:46.555938Z node 2 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [2:108:2155], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-09-25T16:18:46.556017Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-09-25T16:18:46.556053Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/endf/0027db/r3tmp/tmpOWQPOp/pdisk_1.dat 2025-09-25T16:18:46.625249Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-09-25T16:18:46.625298Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-09-25T16:18:46.631045Z node 2 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded 2025-09-25T16:18:46.631610Z node 2 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1229: Notification cookie mismatch for subscription [2:34:2081] 1758817126143478 != 1758817126143482 2025-09-25T16:18:46.662815Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-09-25T16:18:46.705934Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2025-09-25T16:18:46.738117Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-09-25T16:18:46.822406Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:18:47.005291Z node 2 :DS_LOAD_TEST DEBUG: test_load_actor.cpp:425: TLoad# 0 created load actor of type# kUpsertLocalMkqlStart with tag# 1, proto# NotifyWhenFinished: true TargetShard { TabletId: 72075186224037888 TableId: 2 TableName: "JustTable" } UpsertLocalMkqlStart { RowCount: 10 Inflight: 3 } 2025-09-25T16:18:47.005331Z node 2 :DS_LOAD_TEST NOTICE: bulk_mkql_upsert.cpp:157: Id# {Tag: 0, parent: [2:740:2610], subTag: 2} TUpsertActor Bootstrap called: RowCount: 10 Inflight: 3 with type# 1, target# TabletId: 72075186224037888 TableId: 2 TableName: "JustTable" 2025-09-25T16:18:47.074007Z node 2 :DS_LOAD_TEST NOTICE: bulk_mkql_upsert.cpp:255: Id# {Tag: 0, parent: [2:740:2610], subTag: 2} TUpsertActor finished in 0.068605s, errors=0 2025-09-25T16:18:47.074055Z node 2 :DS_LOAD_TEST INFO: test_load_actor.cpp:447: TLoad# 0 received finished from actor# [2:741:2611] with tag# 2 >> ActorPage::OptionsNoContent [GOOD] >> KqpAcl::AclDml-UseSink-IsOlap >> KqpOlapTypes::Timestamp ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/load_test/ut_ycsb/unittest >> UpsertLoad::ShouldWriteKqpUpsertKeyFrom [GOOD] Test command err: 2025-09-25T16:18:46.004871Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:123: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2025-09-25T16:18:46.044866Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2025-09-25T16:18:46.047670Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:311:2354], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-09-25T16:18:46.047766Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-09-25T16:18:46.047795Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/endf/002622/r3tmp/tmpdGwgmS/pdisk_1.dat 2025-09-25T16:18:46.108418Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-09-25T16:18:46.108469Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-09-25T16:18:46.121227Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded 2025-09-25T16:18:46.122220Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1229: Notification cookie mismatch for subscription [1:34:2081] 1758817125528598 != 1758817125528602 2025-09-25T16:18:46.153421Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-09-25T16:18:46.202264Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2025-09-25T16:18:46.235496Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-09-25T16:18:46.319230Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:18:46.504589Z node 1 :DS_LOAD_TEST DEBUG: test_load_actor.cpp:425: TLoad# 0 created load actor of type# kUpsertKqpStart with tag# 1, proto# NotifyWhenFinished: true TargetShard { TabletId: 72075186224037888 TableId: 2 WorkingDir: "/Root" TableName: "usertable" } UpsertKqpStart { RowCount: 20 Inflight: 5 KeyFrom: 12345 } 2025-09-25T16:18:46.504618Z node 1 :DS_LOAD_TEST NOTICE: kqp_upsert.cpp:298: TKqpUpsertActorMultiSession# {Tag: 0, parent: [1:740:2610], subTag: 2} Bootstrap called: RowCount: 20 Inflight: 5 KeyFrom: 12345 2025-09-25T16:18:46.505109Z node 1 :DS_LOAD_TEST NOTICE: kqp_upsert.cpp:361: TKqpUpsertActorMultiSession# {Tag: 0, parent: [1:740:2610], subTag: 2} started# 5 actors each with inflight# 4 2025-09-25T16:18:46.505126Z node 1 :DS_LOAD_TEST NOTICE: kqp_upsert.cpp:116: TKqpUpsertActor# {Tag: 0, parent: [1:741:2611], subTag: 1} Bootstrap called: RowCount: 4 Inflight: 1 KeyFrom: 12345 2025-09-25T16:18:46.505137Z node 1 :DS_LOAD_TEST NOTICE: kqp_upsert.cpp:116: TKqpUpsertActor# {Tag: 0, parent: [1:741:2611], subTag: 2} Bootstrap called: RowCount: 4 Inflight: 1 KeyFrom: 12345 2025-09-25T16:18:46.505142Z node 1 :DS_LOAD_TEST NOTICE: kqp_upsert.cpp:116: TKqpUpsertActor# {Tag: 0, parent: [1:741:2611], subTag: 3} Bootstrap called: RowCount: 4 Inflight: 1 KeyFrom: 12345 2025-09-25T16:18:46.505149Z node 1 :DS_LOAD_TEST NOTICE: kqp_upsert.cpp:116: TKqpUpsertActor# {Tag: 0, parent: [1:741:2611], subTag: 4} Bootstrap called: RowCount: 4 Inflight: 1 KeyFrom: 12345 2025-09-25T16:18:46.505155Z node 1 :DS_LOAD_TEST NOTICE: kqp_upsert.cpp:116: TKqpUpsertActor# {Tag: 0, parent: [1:741:2611], subTag: 5} Bootstrap called: RowCount: 4 Inflight: 1 KeyFrom: 12345 2025-09-25T16:18:46.506038Z node 1 :DS_LOAD_TEST DEBUG: kqp_upsert.cpp:207: TKqpUpsertActor# {Tag: 0, parent: [1:741:2611], subTag: 1} session: ydb://session/3?node_id=1&id=MWE5Mzc0MzMtMmJkMDg0YWQtYTFlYjY4ZmYtYTVkYTBkZWI= 2025-09-25T16:18:46.506054Z node 1 :DS_LOAD_TEST DEBUG: kqp_upsert.cpp:207: TKqpUpsertActor# {Tag: 0, parent: [1:741:2611], subTag: 2} session: ydb://session/3?node_id=1&id=M2ViYWJmOTYtNTY3OTY1NTQtNzIwYjcxMDItZDAyNzY5OTA= 2025-09-25T16:18:46.506402Z node 1 :DS_LOAD_TEST DEBUG: kqp_upsert.cpp:207: TKqpUpsertActor# {Tag: 0, parent: [1:741:2611], subTag: 3} session: ydb://session/3?node_id=1&id=NTI3MWZkMGItMzBiYmZkM2YtOWZhNWFkYTItMTdlMzFmMDA= 2025-09-25T16:18:46.506409Z node 1 :DS_LOAD_TEST DEBUG: kqp_upsert.cpp:207: TKqpUpsertActor# {Tag: 0, parent: [1:741:2611], subTag: 4} session: ydb://session/3?node_id=1&id=ZmFlZmMwYzktYzU1N2ExYzAtZTNkZGI1NTQtOWRjMmEzMmI= 2025-09-25T16:18:46.506586Z node 1 :DS_LOAD_TEST DEBUG: kqp_upsert.cpp:207: TKqpUpsertActor# {Tag: 0, parent: [1:741:2611], subTag: 5} session: ydb://session/3?node_id=1&id=ZjUxOGRmNGMtNTdhMjBlNzQtM2FhNThiMTYtZmViNTU3ZDk= 2025-09-25T16:18:46.507282Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:754:2624], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:46.507299Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:781:2645], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:46.507305Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:782:2646], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:46.507311Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:783:2647], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:46.507316Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:784:2648], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:46.507330Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:785:2649], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:46.507338Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:46.507546Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:797:2661], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:46.507576Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:46.508382Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-09-25T16:18:46.517756Z node 1 :TX_PROXY ERROR: schemereq.cpp:590: Actor# [1:800:2664] txid# 281474976715659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exists but creating right now (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateCreate), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-09-25T16:18:46.517843Z node 1 :TX_PROXY ERROR: schemereq.cpp:590: Actor# [1:804:2668] txid# 281474976715660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exists but creating right now (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateCreate), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-09-25T16:18:46.518181Z node 1 :TX_PROXY ERROR: schemereq.cpp:590: Actor# [1:805:2669] txid# 281474976715661, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exists but creating right now (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateCreate), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-09-25T16:18:46.518340Z node 1 :TX_PROXY ERROR: schemereq.cpp:590: Actor# [1:806:2670] txid# 281474976715662, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exists but creating right now (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateCreate), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-09-25T16:18:46.559867Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-09-25T16:18:46.654580Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:792:2656], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2025-09-25T16:18:46.654622Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:793:2657], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2025-09-25T16:18:46.654633Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:794:2658], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2025-09-25T16:18:46.654643Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:795:2659], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2025-09-25T16:18:46.654652Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:796:2660], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2025-09-25T16:18:46.686377Z node 1 :TX_PROXY ERROR: schemereq.cpp:590: Actor# [1:900:2729] txid# 281474976715663, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-09-25T16:18:46.801678Z node 1 :DS_LOAD_TEST NOTICE: kqp_upsert.cpp:190: TKqpUpsertActor# {Tag: 0, parent: [1:741:2611], subTag: 5} finished in 1758817126.801664s, errors=0 2025-09-25T16:18:46.801767Z node 1 :DS_LOAD_TEST INFO: kqp_upsert.cpp:376: kqp# {Tag: 0, parent: [1:740:2610], subTag: 2} finished: 5 { Tag: 5 DurationMs: 1758817126801 OperationsOK: 4 OperationsError: 0 } 2025-09-25T16:18:46.812866Z node 1 :TX_PROXY ERROR: schemereq.cpp:590: Actor# [1:973:2767] txid# 281474976715668, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-09-25T16:18:46.859574Z node 1 :DS_LOAD_TEST NOTICE: kqp_upsert.cpp:190: TKqpUpsertActor# {Tag: 0, parent: [1:741:2611], subTag: 2} finished in 1758817126.859556s, errors=0 2025-09-25T16:18:46.859645Z node 1 :DS_LOAD_TEST INFO: kqp_upsert.cpp:376: kqp# {Tag: 0, parent: [1:740:2610], subTag: 2} finished: 2 { Tag: 2 DurationMs: 1758817126859 OperationsOK: 4 OperationsError: 0 } 2025-09-25T16:18:46.870944Z node 1 :TX_PROXY ERROR: schemereq.cpp:590: Actor# [1:1024:2789] txid# 281474976715673, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-09-25T16:18:46.907897Z node 1 :TX_PROXY ERROR: schemereq.cpp:590: Actor# [1:1063:2806] txid# 281474976715677, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-09-25T16:18:46.931772Z node 1 :DS_LOAD_TEST NOTICE: kqp_upsert.cpp:190: TKqpUpsertActor# {Tag: 0, parent: [1:741:2611], subTag: 1} finished in 1758817126.931762s, errors=0 2025-09-25T16:18:46.931822Z node 1 :DS_LOAD_TEST INFO: kqp_upsert.cpp:376: kqp# {Tag: 0, parent: [1:740:2610], subTag: 2} finished: 1 { Tag: 1 DurationMs: 1758817126931 OperationsOK: 4 OperationsError: 0 } 2025-09-25T16:18:46.954849Z node 1 :DS_LOAD_TEST NOTICE: kqp_upsert.cpp:190: TKqpUpsertActor# {Tag: 0, parent: [1:741:2611], subTag: 4} finished in 1758817126.954838s, errors=0 2025-09-25T16:18:46.954957Z node 1 :DS_LOAD_TEST INFO: kqp_upsert.cpp:376: kqp# {Tag: 0, parent: [1:740:2610], subTag: 2} finished: 4 { Tag: 4 DurationMs: 1758817126954 OperationsOK: 4 OperationsError: 0 } 2025-09-25T16:18:46.966182Z node 1 :TX_PROXY ERROR: schemereq.cpp:590: Actor# [1:1124:2831] txid# 281474976715683, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-09-25T16:18:47.012850Z node 1 :DS_LOAD_TEST NOTICE: kqp_upsert.cpp:190: TKqpUpsertActor# {Tag: 0, parent: [1:741:2611], subTag: 3} finished in 1758817127.012837s, errors=0 2025-09-25T16:18:47.012965Z node 1 :DS_LOAD_TEST INFO: kqp_upsert.cpp:376: kqp# {Tag: 0, parent: [1:740:2610], subTag: 2} finished: 3 { Tag: 3 DurationMs: 1758817127012 OperationsOK: 4 OperationsError: 0 } 2025-09-25T16:18:47.012973Z node 1 :DS_LOAD_TEST NOTICE: kqp_upsert.cpp:395: TKqpUpsertActorMultiSession# {Tag: 0, parent: [1:740:2610], subTag: 2} finished in 0.507902s, oks# 20, errors# 0 2025-09-25T16:18:47.012999Z node 1 :DS_LOAD_TEST INFO: test_load_actor.cpp:447: TLoad# 0 received finished from actor# [1:741:2611] with tag# 2 ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/load_test/ut_ycsb/unittest >> ReadLoad::ShouldReadIterateMoreThanRows [GOOD] Test command err: 2025-09-25T16:18:45.457012Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:123: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2025-09-25T16:18:45.510379Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2025-09-25T16:18:45.513644Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:311:2354], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-09-25T16:18:45.513734Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-09-25T16:18:45.513771Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/endf/002797/r3tmp/tmptcA8QX/pdisk_1.dat 2025-09-25T16:18:45.579048Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-09-25T16:18:45.579095Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-09-25T16:18:45.589833Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded 2025-09-25T16:18:45.590723Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1229: Notification cookie mismatch for subscription [1:34:2081] 1758817124979870 != 1758817124979874 2025-09-25T16:18:45.621738Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-09-25T16:18:45.671329Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2025-09-25T16:18:45.704866Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-09-25T16:18:45.788409Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:18:45.979751Z node 1 :DS_LOAD_TEST INFO: test_load_actor.cpp:346: TLoad# 0 warmups table# usertable in dir# /Root with rows# 1000 2025-09-25T16:18:45.980011Z node 1 :DS_LOAD_TEST NOTICE: bulk_mkql_upsert.cpp:157: Id# {Tag: 0, parent: [1:740:2610], subTag: 1} TUpsertActor Bootstrap called: RowCount: 1000 Inflight: 100 BatchSize: 100 with type# 0, target# TabletId: 72075186224037888 TableId: 2 WorkingDir: "/Root" TableName: "usertable" 2025-09-25T16:18:46.008602Z node 1 :DS_LOAD_TEST NOTICE: bulk_mkql_upsert.cpp:255: Id# {Tag: 0, parent: [1:740:2610], subTag: 1} TUpsertActor finished in 0.028539s, errors=0 2025-09-25T16:18:46.008948Z node 1 :DS_LOAD_TEST DEBUG: test_load_actor.cpp:425: TLoad# 0 created load actor of type# kReadIteratorStart with tag# 2, proto# NotifyWhenFinished: true TableSetup { WorkingDir: "/Root" TableName: "usertable" } TargetShard { TabletId: 72075186224037888 TableId: 2 WorkingDir: "/Root" TableName: "usertable" } ReadIteratorStart { RowCount: 1000 Inflights: 1 Chunks: 0 Chunks: 1 Chunks: 10 } 2025-09-25T16:18:46.008979Z node 1 :DS_LOAD_TEST NOTICE: test_load_read_iterator.cpp:334: ReadIteratorLoadScenario# [1:749:2619] with id# {Tag: 0, parent: [1:740:2610], subTag: 3} Bootstrap called: RowCount: 1000 Inflights: 1 Chunks: 0 Chunks: 1 Chunks: 10 2025-09-25T16:18:46.009417Z node 1 :DS_LOAD_TEST INFO: test_load_read_iterator.cpp:396: ReadIteratorLoadScenario# {Tag: 0, parent: [1:740:2610], subTag: 3} will work with tablet# 72075186224037888 with ownerId# 72057594046644480 with tableId# 2 resolved for path# /Root/usertable with columnsCount# 11, keyColumnCount# 1 2025-09-25T16:18:46.009455Z node 1 :DS_LOAD_TEST INFO: test_load_read_iterator.cpp:437: started fullscan actor# [1:752:2622] 2025-09-25T16:18:46.009473Z node 1 :DS_LOAD_TEST INFO: common.cpp:52: ReadIteratorScan# {Tag: 0, parent: [1:749:2619], subTag: 1} Bootstrap called, sample# 0 2025-09-25T16:18:46.009480Z node 1 :DS_LOAD_TEST DEBUG: common.cpp:61: ReadIteratorScan# {Tag: 0, parent: [1:749:2619], subTag: 1} Connect to# 72075186224037888 called 2025-09-25T16:18:46.009549Z node 1 :DS_LOAD_TEST DEBUG: common.cpp:75: ReadIteratorScan# {Tag: 0, parent: [1:749:2619], subTag: 1} Handle TEvClientConnected called, Status# OK 2025-09-25T16:18:46.010737Z node 1 :DS_LOAD_TEST NOTICE: common.cpp:147: ReadIteratorScan# {Tag: 0, parent: [1:749:2619], subTag: 1} finished in 0.001176s, read# 1000 2025-09-25T16:18:46.010867Z node 1 :DS_LOAD_TEST NOTICE: test_load_read_iterator.cpp:456: fullscan actor# [1:752:2622] with chunkSize# 0 finished: 0 { DurationMs: 1 OperationsOK: 1000 OperationsError: 0 } 2025-09-25T16:18:46.010906Z node 1 :DS_LOAD_TEST INFO: test_load_read_iterator.cpp:437: started fullscan actor# [1:755:2625] 2025-09-25T16:18:46.010913Z node 1 :DS_LOAD_TEST INFO: common.cpp:52: ReadIteratorScan# {Tag: 0, parent: [1:749:2619], subTag: 2} Bootstrap called, sample# 0 2025-09-25T16:18:46.010917Z node 1 :DS_LOAD_TEST DEBUG: common.cpp:61: ReadIteratorScan# {Tag: 0, parent: [1:749:2619], subTag: 2} Connect to# 72075186224037888 called 2025-09-25T16:18:46.010959Z node 1 :DS_LOAD_TEST DEBUG: common.cpp:75: ReadIteratorScan# {Tag: 0, parent: [1:749:2619], subTag: 2} Handle TEvClientConnected called, Status# OK 2025-09-25T16:18:46.022071Z node 1 :DS_LOAD_TEST NOTICE: common.cpp:147: ReadIteratorScan# {Tag: 0, parent: [1:749:2619], subTag: 2} finished in 0.011094s, read# 1000 2025-09-25T16:18:46.022127Z node 1 :DS_LOAD_TEST NOTICE: test_load_read_iterator.cpp:456: fullscan actor# [1:755:2625] with chunkSize# 1 finished: 0 { DurationMs: 11 OperationsOK: 1000 OperationsError: 0 } 2025-09-25T16:18:46.022152Z node 1 :DS_LOAD_TEST INFO: test_load_read_iterator.cpp:437: started fullscan actor# [1:758:2628] 2025-09-25T16:18:46.022160Z node 1 :DS_LOAD_TEST INFO: common.cpp:52: ReadIteratorScan# {Tag: 0, parent: [1:749:2619], subTag: 3} Bootstrap called, sample# 0 2025-09-25T16:18:46.022164Z node 1 :DS_LOAD_TEST DEBUG: common.cpp:61: ReadIteratorScan# {Tag: 0, parent: [1:749:2619], subTag: 3} Connect to# 72075186224037888 called 2025-09-25T16:18:46.022227Z node 1 :DS_LOAD_TEST DEBUG: common.cpp:75: ReadIteratorScan# {Tag: 0, parent: [1:749:2619], subTag: 3} Handle TEvClientConnected called, Status# OK 2025-09-25T16:18:46.023933Z node 1 :DS_LOAD_TEST NOTICE: common.cpp:147: ReadIteratorScan# {Tag: 0, parent: [1:749:2619], subTag: 3} finished in 0.001692s, read# 1000 2025-09-25T16:18:46.023967Z node 1 :DS_LOAD_TEST NOTICE: test_load_read_iterator.cpp:456: fullscan actor# [1:758:2628] with chunkSize# 10 finished: 0 { DurationMs: 1 OperationsOK: 1000 OperationsError: 0 } 2025-09-25T16:18:46.023983Z node 1 :DS_LOAD_TEST INFO: test_load_read_iterator.cpp:437: started fullscan actor# [1:761:2631] 2025-09-25T16:18:46.023988Z node 1 :DS_LOAD_TEST INFO: common.cpp:52: ReadIteratorScan# {Tag: 0, parent: [1:749:2619], subTag: 4} Bootstrap called, sample# 1000 2025-09-25T16:18:46.023992Z node 1 :DS_LOAD_TEST DEBUG: common.cpp:61: ReadIteratorScan# {Tag: 0, parent: [1:749:2619], subTag: 4} Connect to# 72075186224037888 called 2025-09-25T16:18:46.024032Z node 1 :DS_LOAD_TEST DEBUG: common.cpp:75: ReadIteratorScan# {Tag: 0, parent: [1:749:2619], subTag: 4} Handle TEvClientConnected called, Status# OK 2025-09-25T16:18:46.024450Z node 1 :DS_LOAD_TEST NOTICE: common.cpp:137: ReadIteratorScan# {Tag: 0, parent: [1:749:2619], subTag: 4} finished in 0.000288s, sampled# 1000, iter finished# 1, oks# 1000 2025-09-25T16:18:46.024480Z node 1 :DS_LOAD_TEST INFO: test_load_read_iterator.cpp:506: ReadIteratorLoadScenario# {Tag: 0, parent: [1:740:2610], subTag: 3} received keyCount# 1000 2025-09-25T16:18:46.024531Z node 1 :DS_LOAD_TEST DEBUG: test_load_read_iterator.cpp:551: ReadIteratorLoadScenario# {Tag: 0, parent: [1:740:2610], subTag: 3} started read actor with id# [1:764:2634] 2025-09-25T16:18:46.024540Z node 1 :DS_LOAD_TEST NOTICE: test_load_read_iterator.cpp:79: TReadIteratorPoints# {Tag: 0, parent: [1:749:2619], subTag: 5} Bootstrap called, will read keys# 1000 2025-09-25T16:18:46.048976Z node 1 :DS_LOAD_TEST DEBUG: test_load_read_iterator.cpp:559: ReadIteratorLoadScenario# {Tag: 0, parent: [1:740:2610], subTag: 3} received point times# 1000, Inflight left# 0 2025-09-25T16:18:46.049056Z node 1 :DS_LOAD_TEST INFO: test_load_read_iterator.cpp:482: headread with inflight# 1 finished: 0 { DurationMs: 24 OperationsOK: 1000 OperationsError: 0 Info: "single row head read hist (ms):\n50%: 1\n95%: 1\n99%: 1\n99.9%: 1\n" } 2025-09-25T16:18:46.049084Z node 1 :DS_LOAD_TEST NOTICE: test_load_read_iterator.cpp:616: ReadIteratorLoadScenario# {Tag: 0, parent: [1:740:2610], subTag: 3} finished in 0.040079s with report: { DurationMs: 1 OperationsOK: 1000 OperationsError: 0 PrefixInfo: "Test run# 1, type# FullScan with chunk# inf" } { DurationMs: 11 OperationsOK: 1000 OperationsError: 0 PrefixInfo: "Test run# 2, type# FullScan with chunk# 1" } { DurationMs: 1 OperationsOK: 1000 OperationsError: 0 PrefixInfo: "Test run# 3, type# FullScan with chunk# 10" } { DurationMs: 24 OperationsOK: 1000 OperationsError: 0 Info: "single row head read hist (ms):\n50%: 1\n95%: 1\n99%: 1\n99.9%: 1\n" PrefixInfo: "Test run# 4, type# ReadHeadPoints with inflight# 1" } 2025-09-25T16:18:46.049195Z node 1 :DS_LOAD_TEST INFO: test_load_actor.cpp:447: TLoad# 0 received finished from actor# [1:749:2619] with tag# 3 2025-09-25T16:18:46.641430Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:123: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2025-09-25T16:18:46.658109Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2025-09-25T16:18:46.659484Z node 2 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [2:108:2155], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-09-25T16:18:46.659559Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-09-25T16:18:46.659600Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/endf/002797/r3tmp/tmpeb38Zc/pdisk_1.dat 2025-09-25T16:18:46.712013Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-09-25T16:18:46.712055Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-09-25T16:18:46.717667Z node 2 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded 2025-09-25T16:18:46.718234Z node 2 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1229: Notification cookie mismatch for subscription [2:34:2081] 1758817126266522 != 1758817126266526 2025-09-25T16:18:46.749267Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-09-25T16:18:46.791688Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2025-09-25T16:18:46.833884Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-09-25T16:18:46.907371Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:18:47.096058Z node 2 :DS_LOAD_TEST INFO: test_load_actor.cpp:346: TLoad# 0 warmups table# usertable in dir# /Root with rows# 10 2025-09-25T16:18:47.096137Z node 2 :DS_LOAD_TEST NOTICE: bulk_mkql_upsert.cpp:157: Id# {Tag: 0, parent: [2:740:2610], subTag: 1} TUpsertActor Bootstrap called: RowCount: 10 Inflight: 100 BatchSize: 100 with type# 0, target# TabletId: 72075186224037888 TableId: 2 WorkingDir: "/Root" TableName: "usertable" 2025-09-25T16:18:47.117070Z node 2 :DS_LOAD_TEST NOTICE: bulk_mkql_upsert.cpp:255: Id# {Tag: 0, parent: [2:740:2610], subTag: 1} TUpsertActor finished in 0.020875s, errors=0 2025-09-25T16:18:47.117256Z node 2 :DS_LOAD_TEST DEBUG: test_load_actor.cpp:425: TLoad# 0 created load actor of type# kReadIteratorStart with tag# 2, proto# NotifyWhenFinished: true TableSetup { WorkingDir: "/Root" TableName: "usertable" } TargetShard { TabletId: 72075186224037888 TableId: 2 WorkingDir: "/Root" TableName: "usertable" } ReadIteratorStart { RowCount: 10 ReadCount: 1000 Inflights: 1 Chunks: 0 Chunks: 1 Chunks: 10 } 2025-09-25T16:18:47.117287Z node 2 :DS_LOAD_TEST NOTICE: test_load_read_iterator.cpp:334: ReadIteratorLoadScenario# [2:749:2619] with id# {Tag: 0, parent: [2:740:2610], subTag: 3} Bootstrap called: RowCount: 10 ReadCount: 1000 Inflights: 1 Chunks: 0 Chunks: 1 Chunks: 10 2025-09-25T16:18:47.117734Z node 2 :DS_LOAD_TEST INFO: test_load_read_iterator.cpp:396: ReadIteratorLoadScenario# {Tag: 0, parent: [2:740:2610], subTag: 3} will work with tablet# 72075186224037888 with ownerId# 72057594046644480 with tableId# 2 resolved for path# /Root/usertable with columnsCount# 11, keyColumnCount# 1 2025-09-25T16:18:47.117761Z node 2 :DS_LOAD_TEST INFO: test_load_read_iterator.cpp:437: started fullscan actor# [2:752:2622] 2025-09-25T16:18:47.117783Z node 2 :DS_LOAD_TEST INFO: common.cpp:52: ReadIteratorScan# {Tag: 0, parent: [2:749:2619], subTag: 1} Bootstrap called, sample# 0 2025-09-25T16:18:47.117790Z node 2 :DS_LOAD_TEST DEBUG: common.cpp:61: ReadIteratorScan# {Tag: 0, parent: [2:749:2619], subTag: 1} Connect to# 72075186224037888 called 2025-09-25T16:18:47.117859Z node 2 :DS_LOAD_TEST DEBUG: common.cpp:75: ReadIteratorScan# {Tag: 0, parent: [2:749:2619], subTag: 1} Handle TEvClientConnected called, Status# OK 2025-09-25T16:18:47.118011Z node 2 :DS_LOAD_TEST NOTICE: common.cpp:147: ReadIteratorScan# {Tag: 0, parent: [2:749:2619], subTag: 1} finished in 0.000145s, read# 10 2025-09-25T16:18:47.118046Z node 2 :DS_LOAD_TEST NOTICE: test_load_read_iterator.cpp:456: fullscan actor# [2:752:2622] with chunkSize# 0 finished: 0 { DurationMs: 0 OperationsOK: 10 OperationsError: 0 } 2025-09-25T16:18:47.118062Z node 2 :DS_LOAD_TEST INFO: test_load_read_iterator.cpp:437: started fullscan actor# [2:755:2625] 2025-09-25T16:18:47.118070Z node 2 :DS_LOAD_TEST INFO: common.cpp:52: ReadIteratorScan# {Tag: 0, parent: [2:749:2619], subTag: 2} Bootstrap called, sample# 0 2025-09-25T16:18:47.118075Z node 2 :DS_LOAD_TEST DEBUG: common.cpp:61: ReadIteratorScan# {Tag: 0, parent: [2:749:2619], subTag: 2} Connect to# 72075186224037888 called 2025-09-25T16:18:47.118118Z node 2 :DS_LOAD_TEST DEBUG: common.cpp:75: ReadIteratorScan# {Tag: 0, parent: [2:749:2619], subTag: 2} Handle TEvClientConnected called, Status# OK 2025-09-25T16:18:47.118414Z node 2 :DS_LOAD_TEST NOTICE: common.cpp:147: ReadIteratorScan# {Tag: 0, parent: [2:749:2619], subTag: 2} finished in 0.000287s, read# 10 2025-09-25T16:18:47.118439Z node 2 :DS_LOAD_TEST NOTICE: test_load_read_iterator.cpp:456: fullscan actor# [2:755:2625] with chunkSize# 1 finished: 0 { DurationMs: 0 OperationsOK: 10 OperationsError: 0 } 2025-09-25T16:18:47.118457Z node 2 :DS_LOAD_TEST INFO: test_load_read_iterator.cpp:437: started fullscan actor# [2:758:2628] 2025-09-25T16:18:47.118464Z node 2 :DS_LOAD_TEST INFO: common.cpp:52: ReadIteratorScan# {Tag: 0, parent: [2:749:2619], subTag: 3} Bootstrap called, sample# 0 2025-09-25T16:18:47.118469Z node 2 :DS_LOAD_TEST DEBUG: common.cpp:61: ReadIteratorScan# {Tag: 0, parent: [2:749:2619], subTag: 3} Connect to# 72075186224037888 called 2025-09-25T16:18:47.118519Z node 2 :DS_LOAD_TEST DEBUG: common.cpp:75: ReadIteratorScan# {Tag: 0, parent: [2:749:2619], subTag: 3} Handle TEvClientConnected called, Status# OK 2025-09-25T16:18:47.118614Z node 2 :DS_LOAD_TEST NOTICE: common.cpp:147: ReadIteratorScan# {Tag: 0, parent: [2:749:2619], subTag: 3} finished in 0.000088s, read# 10 2025-09-25T16:18:47.118630Z node 2 :DS_LOAD_TEST NOTICE: test_load_read_iterator.cpp:456: fullscan actor# [2:758:2628] with chunkSize# 10 finished: 0 { DurationMs: 0 OperationsOK: 10 OperationsError: 0 } 2025-09-25T16:18:47.118645Z node 2 :DS_LOAD_TEST INFO: test_load_read_iterator.cpp:437: started fullscan actor# [2:761:2631] 2025-09-25T16:18:47.118651Z node 2 :DS_LOAD_TEST INFO: common.cpp:52: ReadIteratorScan# {Tag: 0, parent: [2:749:2619], subTag: 4} Bootstrap called, sample# 10 2025-09-25T16:18:47.118656Z node 2 :DS_LOAD_TEST DEBUG: common.cpp:61: ReadIteratorScan# {Tag: 0, parent: [2:749:2619], subTag: 4} Connect to# 72075186224037888 called 2025-09-25T16:18:47.118690Z node 2 :DS_LOAD_TEST DEBUG: common.cpp:75: ReadIteratorScan# {Tag: 0, parent: [2:749:2619], subTag: 4} Handle TEvClientConnected called, Status# OK 2025-09-25T16:18:47.118751Z node 2 :DS_LOAD_TEST NOTICE: common.cpp:137: ReadIteratorScan# {Tag: 0, parent: [2:749:2619], subTag: 4} finished in 0.000052s, sampled# 10, iter finished# 1, oks# 10 2025-09-25T16:18:47.118767Z node 2 :DS_LOAD_TEST INFO: test_load_read_iterator.cpp:506: ReadIteratorLoadScenario# {Tag: 0, parent: [2:740:2610], subTag: 3} received keyCount# 10 2025-09-25T16:18:47.118806Z node 2 :DS_LOAD_TEST DEBUG: test_load_read_iterator.cpp:551: ReadIteratorLoadScenario# {Tag: 0, parent: [2:740:2610], subTag: 3} started read actor with id# [2:764:2634] 2025-09-25T16:18:47.118814Z node 2 :DS_LOAD_TEST NOTICE: test_load_read_iterator.cpp:79: TReadIteratorPoints# {Tag: 0, parent: [2:749:2619], subTag: 5} Bootstrap called, will read keys# 10 2025-09-25T16:18:47.153028Z node 2 :DS_LOAD_TEST DEBUG: test_load_read_iterator.cpp:559: ReadIteratorLoadScenario# {Tag: 0, parent: [2:740:2610], subTag: 3} received point times# 1000, Inflight left# 0 2025-09-25T16:18:47.153120Z node 2 :DS_LOAD_TEST INFO: test_load_read_iterator.cpp:482: headread with inflight# 1 finished: 0 { DurationMs: 34 OperationsOK: 1000 OperationsError: 0 Info: "single row head read hist (ms):\n50%: 1\n95%: 1\n99%: 1\n99.9%: 1\n" } 2025-09-25T16:18:47.153153Z node 2 :DS_LOAD_TEST NOTICE: test_load_read_iterator.cpp:616: ReadIteratorLoadScenario# {Tag: 0, parent: [2:740:2610], subTag: 3} finished in 0.035835s with report: { DurationMs: 0 OperationsOK: 10 OperationsError: 0 PrefixInfo: "Test run# 1, type# FullScan with chunk# inf" } { DurationMs: 0 OperationsOK: 10 OperationsError: 0 PrefixInfo: "Test run# 2, type# FullScan with chunk# 1" } { DurationMs: 0 OperationsOK: 10 OperationsError: 0 PrefixInfo: "Test run# 3, type# FullScan with chunk# 10" } { DurationMs: 34 OperationsOK: 1000 OperationsError: 0 Info: "single row head read hist (ms):\n50%: 1\n95%: 1\n99%: 1\n99.9%: 1\n" PrefixInfo: "Test run# 4, type# ReadHeadPoints with inflight# 1" } 2025-09-25T16:18:47.153183Z node 2 :DS_LOAD_TEST INFO: test_load_actor.cpp:447: TLoad# 0 received finished from actor# [2:749:2619] with tag# 3 ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_replication/unittest >> TReplicationTests::DropReplicationWithUnknownSecret [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2144] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2144] Leader for TabletID 72057594046678944 is [1:130:2155] sender: [1:131:2058] recipient: [1:113:2144] 2025-09-25T16:18:44.650171Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7911: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-09-25T16:18:44.650203Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7939: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-09-25T16:18:44.650210Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7825: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-09-25T16:18:44.650217Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7841: OperationsProcessing config: using default configuration 2025-09-25T16:18:44.650224Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7847: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-09-25T16:18:44.650229Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7847: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-09-25T16:18:44.650240Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7971: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-09-25T16:18:44.650255Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-09-25T16:18:44.650394Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8042: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-09-25T16:18:44.650473Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2025-09-25T16:18:44.668165Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7729: Cannot subscribe to console configs 2025-09-25T16:18:44.668196Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded 2025-09-25T16:18:44.673205Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2025-09-25T16:18:44.673320Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2025-09-25T16:18:44.673364Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-09-25T16:18:44.675553Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2025-09-25T16:18:44.675632Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2025-09-25T16:18:44.675754Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1382: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-09-25T16:18:44.675852Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-09-25T16:18:44.676330Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:158: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-09-25T16:18:44.676388Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2025-09-25T16:18:44.676709Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-09-25T16:18:44.676722Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-09-25T16:18:44.676750Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-09-25T16:18:44.676760Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-09-25T16:18:44.676767Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:205: TTxServerlessStorageBilling.Complete 2025-09-25T16:18:44.676808Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7086: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-09-25T16:18:44.678376Z node 1 :HIVE INFO: tablet_helpers.cpp:1126: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:130:2155] sender: [1:245:2058] recipient: [1:15:2062] 2025-09-25T16:18:44.702157Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-09-25T16:18:44.702268Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-09-25T16:18:44.702346Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:599: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-09-25T16:18:44.702356Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5528: CreateTx for txid 1:0 type: TxAlterSubDomain target path: [OwnerId: 72057594046678944, LocalPathId: 1] source path: 2025-09-25T16:18:44.702422Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-09-25T16:18:44.702438Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2025-09-25T16:18:44.703370Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-09-25T16:18:44.703421Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-09-25T16:18:44.703486Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-09-25T16:18:44.703498Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:315: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-09-25T16:18:44.703504Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:369: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-09-25T16:18:44.703510Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 1:0 2 -> 3 2025-09-25T16:18:44.704041Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-09-25T16:18:44.704055Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-09-25T16:18:44.704061Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 1:0 3 -> 128 2025-09-25T16:18:44.704451Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-09-25T16:18:44.704462Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-09-25T16:18:44.704469Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-09-25T16:18:44.704477Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1723: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-09-25T16:18:44.705231Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1792: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-09-25T16:18:44.705656Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:663: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-09-25T16:18:44.705705Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1824: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-09-25T16:18:44.705937Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:685: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-09-25T16:18:44.705964Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:689: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 137 RawX2: 4294969455 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-09-25T16:18:44.705971Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:303: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-09-25T16:18:44.706034Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 1:0 128 -> 240 2025-09-25T16:18:44.706041Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:362: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-09-25T16:18:44.706075Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:599: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-09-25T16:18:44.706088Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:412: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-09-25T16:18:44.706587Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-09-25T16:18:44.706601Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme ... ration is done id#102:0 progress is 1/1 2025-09-25T16:18:47.124457Z node 10 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2025-09-25T16:18:47.124463Z node 10 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 102, ready parts: 1/1, is published: false 2025-09-25T16:18:47.124470Z node 10 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2025-09-25T16:18:47.124476Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:993: Operation and all the parts is done, operation id: 102:0 2025-09-25T16:18:47.124482Z node 10 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5552: RemoveTx for txid 102:0 2025-09-25T16:18:47.124525Z node 10 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:610: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 2 2025-09-25T16:18:47.124537Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:1002: Publication still in progress, tx: 102, publications: 2, subscribers: 0 2025-09-25T16:18:47.124542Z node 10 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1009: Publication details: tx: 102, [OwnerId: 72057594046678944, LocalPathId: 1], 7 2025-09-25T16:18:47.124547Z node 10 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1009: Publication details: tx: 102, [OwnerId: 72057594046678944, LocalPathId: 2], 18446744073709551615 2025-09-25T16:18:47.124957Z node 10 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5225: StateWork, received event# 274137603, Sender [10:210:2211], Recipient [10:127:2152]: NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 2 PathId: [OwnerId: 72057594046678944, LocalPathId: 1] Version: 7 } 2025-09-25T16:18:47.124972Z node 10 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5309: StateWork, processing event NSchemeBoard::NSchemeshardEvents::TEvUpdateAck 2025-09-25T16:18:47.124997Z node 10 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6249: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 7 PathOwnerId: 72057594046678944, cookie: 102 2025-09-25T16:18:47.125013Z node 10 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 7 PathOwnerId: 72057594046678944, cookie: 102 2025-09-25T16:18:47.125020Z node 10 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 102 2025-09-25T16:18:47.125026Z node 10 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 102, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 7 2025-09-25T16:18:47.125032Z node 10 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:610: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-09-25T16:18:47.125069Z node 10 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:173: TSideEffects ApplyOnExecute at tablet# 72057594046678944 2025-09-25T16:18:47.125297Z node 10 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5225: StateWork, received event# 274137603, Sender [10:210:2211], Recipient [10:127:2152]: NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 2 PathId: [OwnerId: 72057594046678944, LocalPathId: 2] Version: 18446744073709551615 } 2025-09-25T16:18:47.125309Z node 10 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5309: StateWork, processing event NSchemeBoard::NSchemeshardEvents::TEvUpdateAck 2025-09-25T16:18:47.125323Z node 10 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6249: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 102 2025-09-25T16:18:47.125336Z node 10 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 102 2025-09-25T16:18:47.125342Z node 10 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 102 2025-09-25T16:18:47.125348Z node 10 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 102, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 18446744073709551615 2025-09-25T16:18:47.125357Z node 10 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:610: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 1 2025-09-25T16:18:47.125376Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__publish_to_scheme_board.cpp:155: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 102, subscribers: 0 2025-09-25T16:18:47.125381Z node 10 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:173: TSideEffects ApplyOnExecute at tablet# 72057594046678944 2025-09-25T16:18:47.125489Z node 10 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5225: StateWork, received event# 2146435084, Sender [10:127:2152], Recipient [10:127:2152]: NKikimr::NSchemeShard::TEvPrivate::TEvCleanDroppedPaths 2025-09-25T16:18:47.125498Z node 10 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5414: StateWork, processing event TEvPrivate::TEvCleanDroppedPaths 2025-09-25T16:18:47.125506Z node 10 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:123: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2025-09-25T16:18:47.125512Z node 10 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:137: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 2], at schemeshard: 72057594046678944 2025-09-25T16:18:47.125527Z node 10 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:610: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-09-25T16:18:47.129636Z node 10 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:216: TSideEffects ApplyOnComplete at tablet# 72057594046678944 2025-09-25T16:18:47.130253Z node 10 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 2025-09-25T16:18:47.130267Z node 10 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:216: TSideEffects ApplyOnComplete at tablet# 72057594046678944 2025-09-25T16:18:47.130319Z node 10 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 2025-09-25T16:18:47.130323Z node 10 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:216: TSideEffects ApplyOnComplete at tablet# 72057594046678944 2025-09-25T16:18:47.130646Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__clean_pathes.cpp:160: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 TestModificationResult got TxId: 102, wait until txId: 102 TestWaitNotification wait txId: 102 2025-09-25T16:18:47.130720Z node 10 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:213: tests -- TTxNotificationSubscriber for txId 102: send EvNotifyTxCompletion 2025-09-25T16:18:47.130730Z node 10 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:259: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 102 2025-09-25T16:18:47.130806Z node 10 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5225: StateWork, received event# 269877761, Sender [10:462:2415], Recipient [10:127:2152]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-09-25T16:18:47.130815Z node 10 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5322: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-09-25T16:18:47.130821Z node 10 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:6142: Pipe server connected, at tablet: 72057594046678944 2025-09-25T16:18:47.130863Z node 10 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5225: StateWork, received event# 271124996, Sender [10:409:2362], Recipient [10:127:2152]: NKikimrScheme.TEvNotifyTxCompletion TxId: 102 2025-09-25T16:18:47.130869Z node 10 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5238: StateWork, processing event TEvSchemeShard::TEvNotifyTxCompletion 2025-09-25T16:18:47.130908Z node 10 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 102, at schemeshard: 72057594046678944 2025-09-25T16:18:47.130936Z node 10 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 102: got EvNotifyTxCompletionResult 2025-09-25T16:18:47.130942Z node 10 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 102: satisfy waiter [10:460:2413] 2025-09-25T16:18:47.130976Z node 10 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5225: StateWork, received event# 269877764, Sender [10:462:2415], Recipient [10:127:2152]: NKikimr::TEvTabletPipe::TEvServerDisconnected 2025-09-25T16:18:47.130981Z node 10 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5323: StateWork, processing event TEvTabletPipe::TEvServerDisconnected 2025-09-25T16:18:47.130987Z node 10 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:6190: Server pipe is reset, at schemeshard: 72057594046678944 TestWaitNotification: OK eventTxId 102 2025-09-25T16:18:47.131055Z node 10 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5225: StateWork, received event# 271122945, Sender [10:463:2416], Recipient [10:127:2152]: NKikimrSchemeOp.TDescribePath Path: "/MyRoot/Replication" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false } 2025-09-25T16:18:47.131061Z node 10 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5237: StateWork, processing event TEvSchemeShard::TEvDescribeScheme 2025-09-25T16:18:47.131075Z node 10 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/Replication" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-09-25T16:18:47.131121Z node 10 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/Replication" took 44us result status StatusPathDoesNotExist 2025-09-25T16:18:47.131168Z node 10 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/Replication\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot\' (id: [OwnerId: 72057594046678944, LocalPathId: 1]), source_location: ydb/core/tx/schemeshard/schemeshard_path_describer.cpp:1181" Path: "/MyRoot/Replication" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot" LastExistedPrefixPathId: 1 LastExistedPrefixDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty ChildrenExist: false } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 >> KqpScheme::FamilyColumnTest >> KqpTx::CommitRequired [GOOD] >> KqpTx::CommitPrepared >> KqpScheme::CreateFamilyWithCompressionLevel ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/mon/ut/unittest >> ActorPage::OptionsNoContent [GOOD] Test command err: 2025-09-25T16:18:47.133957Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7554062042048905562:2144];send_to=[0:7307199536658146131:7762515]; 2025-09-25T16:18:47.133993Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # SectorMap:test-client[:2000] 2025-09-25T16:18:47.182653Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-09-25T16:18:47.197071Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded 2025-09-25T16:18:47.197307Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1229: Notification cookie mismatch for subscription [1:7554062042048905447:2081] 1758817127133019 != 1758817127133022 TServer::EnableGrpc on GrpcPort 2960, node 1 2025-09-25T16:18:47.208293Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-09-25T16:18:47.208306Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-09-25T16:18:47.208309Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-09-25T16:18:47.208367Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:28614 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-09-25T16:18:47.237935Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-09-25T16:18:47.237971Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-09-25T16:18:47.239072Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-09-25T16:18:47.245637Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... waiting... 2025-09-25T16:18:47.266354Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_modify_acl.cpp:101) 2025-09-25T16:18:47.267384Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterUserAttributes, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_user_attrs.cpp:72) >> KqpScheme::ValidatingUniqIndexSqlSuccess >> ObjectStorageListingTest::FilterListing [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/load_test/ut_ycsb/unittest >> UpsertLoad::ShouldWriteDataBulkUpsert2 [GOOD] Test command err: 2025-09-25T16:18:45.931887Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:123: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2025-09-25T16:18:45.957876Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2025-09-25T16:18:45.959999Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:311:2354], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-09-25T16:18:45.960077Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-09-25T16:18:45.960102Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/endf/00267f/r3tmp/tmp0jmcKL/pdisk_1.dat 2025-09-25T16:18:46.020132Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-09-25T16:18:46.020168Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-09-25T16:18:46.028457Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded 2025-09-25T16:18:46.029147Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1229: Notification cookie mismatch for subscription [1:34:2081] 1758817125528627 != 1758817125528631 2025-09-25T16:18:46.060096Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-09-25T16:18:46.106143Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2025-09-25T16:18:46.150178Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-09-25T16:18:46.223281Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:18:46.407133Z node 1 :DS_LOAD_TEST DEBUG: test_load_actor.cpp:425: TLoad# 0 created load actor of type# kUpsertBulkStart with tag# 1, proto# NotifyWhenFinished: true TargetShard { TabletId: 72075186224037888 TableId: 2 TableName: "usertable" } UpsertBulkStart { RowCount: 10 Inflight: 3 } 2025-09-25T16:18:46.407168Z node 1 :DS_LOAD_TEST NOTICE: bulk_mkql_upsert.cpp:157: Id# {Tag: 0, parent: [1:740:2610], subTag: 2} TUpsertActor Bootstrap called: RowCount: 10 Inflight: 3 with type# 0, target# TabletId: 72075186224037888 TableId: 2 TableName: "usertable" 2025-09-25T16:18:46.469452Z node 1 :DS_LOAD_TEST NOTICE: bulk_mkql_upsert.cpp:255: Id# {Tag: 0, parent: [1:740:2610], subTag: 2} TUpsertActor finished in 0.062233s, errors=0 2025-09-25T16:18:46.469486Z node 1 :DS_LOAD_TEST INFO: test_load_actor.cpp:447: TLoad# 0 received finished from actor# [1:741:2611] with tag# 2 2025-09-25T16:18:46.916676Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:123: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2025-09-25T16:18:46.928316Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2025-09-25T16:18:46.929338Z node 2 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [2:108:2155], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-09-25T16:18:46.929393Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-09-25T16:18:46.929419Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/endf/00267f/r3tmp/tmpKden6j/pdisk_1.dat 2025-09-25T16:18:46.983667Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-09-25T16:18:46.983701Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-09-25T16:18:46.987367Z node 2 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded 2025-09-25T16:18:46.987728Z node 2 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1229: Notification cookie mismatch for subscription [2:34:2081] 1758817126600589 != 1758817126600593 2025-09-25T16:18:47.018597Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-09-25T16:18:47.061216Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2025-09-25T16:18:47.093189Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-09-25T16:18:47.176932Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:18:47.356081Z node 2 :DS_LOAD_TEST DEBUG: test_load_actor.cpp:425: TLoad# 0 created load actor of type# kUpsertBulkStart with tag# 1, proto# NotifyWhenFinished: true TargetShard { TabletId: 72075186224037888 TableId: 2 TableName: "JustTable" } UpsertBulkStart { RowCount: 10 Inflight: 3 } 2025-09-25T16:18:47.356112Z node 2 :DS_LOAD_TEST NOTICE: bulk_mkql_upsert.cpp:157: Id# {Tag: 0, parent: [2:740:2610], subTag: 2} TUpsertActor Bootstrap called: RowCount: 10 Inflight: 3 with type# 0, target# TabletId: 72075186224037888 TableId: 2 TableName: "JustTable" 2025-09-25T16:18:47.418390Z node 2 :DS_LOAD_TEST NOTICE: bulk_mkql_upsert.cpp:255: Id# {Tag: 0, parent: [2:740:2610], subTag: 2} TUpsertActor finished in 0.062229s, errors=0 2025-09-25T16:18:47.418424Z node 2 :DS_LOAD_TEST INFO: test_load_actor.cpp:447: TLoad# 0 received finished from actor# [2:741:2611] with tag# 2 ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/load_test/ut_ycsb/unittest >> UpsertLoad::ShouldWriteDataBulkUpsertKeyFrom [GOOD] Test command err: 2025-09-25T16:18:45.668467Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:123: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2025-09-25T16:18:45.706065Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2025-09-25T16:18:45.708930Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:311:2354], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-09-25T16:18:45.709010Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-09-25T16:18:45.709039Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/endf/0026c8/r3tmp/tmpfhW5g6/pdisk_1.dat 2025-09-25T16:18:45.779420Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-09-25T16:18:45.779471Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-09-25T16:18:45.787349Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded 2025-09-25T16:18:45.787990Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1229: Notification cookie mismatch for subscription [1:34:2081] 1758817125246962 != 1758817125246966 2025-09-25T16:18:45.818801Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-09-25T16:18:45.866790Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2025-09-25T16:18:45.909848Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-09-25T16:18:45.982797Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:18:46.172198Z node 1 :DS_LOAD_TEST DEBUG: test_load_actor.cpp:425: TLoad# 0 created load actor of type# kUpsertBulkStart with tag# 1, proto# NotifyWhenFinished: true TargetShard { TabletId: 72075186224037888 TableId: 2 TableName: "usertable" } UpsertBulkStart { RowCount: 100 Inflight: 3 BatchSize: 7 } 2025-09-25T16:18:46.172252Z node 1 :DS_LOAD_TEST NOTICE: bulk_mkql_upsert.cpp:157: Id# {Tag: 0, parent: [1:740:2610], subTag: 2} TUpsertActor Bootstrap called: RowCount: 100 Inflight: 3 BatchSize: 7 with type# 0, target# TabletId: 72075186224037888 TableId: 2 TableName: "usertable" 2025-09-25T16:18:46.246562Z node 1 :DS_LOAD_TEST NOTICE: bulk_mkql_upsert.cpp:255: Id# {Tag: 0, parent: [1:740:2610], subTag: 2} TUpsertActor finished in 0.074218s, errors=0 2025-09-25T16:18:46.246606Z node 1 :DS_LOAD_TEST INFO: test_load_actor.cpp:447: TLoad# 0 received finished from actor# [1:741:2611] with tag# 2 2025-09-25T16:18:46.947183Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:123: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2025-09-25T16:18:46.960665Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2025-09-25T16:18:46.961789Z node 2 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [2:108:2155], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-09-25T16:18:46.961843Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-09-25T16:18:46.961871Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/endf/0026c8/r3tmp/tmpdz8G9V/pdisk_1.dat 2025-09-25T16:18:47.015800Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-09-25T16:18:47.015835Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-09-25T16:18:47.020681Z node 2 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded 2025-09-25T16:18:47.021130Z node 2 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1229: Notification cookie mismatch for subscription [2:34:2081] 1758817126600463 != 1758817126600467 2025-09-25T16:18:47.052032Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-09-25T16:18:47.094778Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2025-09-25T16:18:47.137363Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-09-25T16:18:47.210763Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:18:47.396423Z node 2 :DS_LOAD_TEST DEBUG: test_load_actor.cpp:425: TLoad# 0 created load actor of type# kUpsertBulkStart with tag# 1, proto# NotifyWhenFinished: true TargetShard { TabletId: 72075186224037888 TableId: 2 TableName: "usertable" } UpsertBulkStart { RowCount: 10 Inflight: 3 KeyFrom: 12345 } 2025-09-25T16:18:47.396459Z node 2 :DS_LOAD_TEST NOTICE: bulk_mkql_upsert.cpp:157: Id# {Tag: 0, parent: [2:740:2610], subTag: 2} TUpsertActor Bootstrap called: RowCount: 10 Inflight: 3 KeyFrom: 12345 with type# 0, target# TabletId: 72075186224037888 TableId: 2 TableName: "usertable" 2025-09-25T16:18:47.458964Z node 2 :DS_LOAD_TEST NOTICE: bulk_mkql_upsert.cpp:255: Id# {Tag: 0, parent: [2:740:2610], subTag: 2} TUpsertActor finished in 0.062444s, errors=0 2025-09-25T16:18:47.458996Z node 2 :DS_LOAD_TEST INFO: test_load_actor.cpp:447: TLoad# 0 received finished from actor# [2:741:2611] with tag# 2 >> KqpTx::ExplicitTcl [GOOD] >> KqpTx::EmptyTxOnCommit >> KqpScheme::CreateTableWithTtlSettingsUncompat >> KqpScheme::DisableS3ExternalDataSource >> UpsertLoad::ShouldDropCreateTable [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/incrhuge/ut/unittest >> TIncrHugeBasicTest::Defrag [GOOD] Test command err: 2025-09-25T16:15:26.249318Z :BS_INCRHUGE DEBUG: incrhuge_keeper.cpp:72: BlockSize# 8128 BlocksInChunk# 2304 BlocksInMinBlob# 65 MaxBlobsPerChunk# 35 BlocksInDataSection# 2303 BlocksInIndexSection# 1 2025-09-25T16:15:26.249348Z :BS_INCRHUGE INFO: incrhuge_keeper_recovery.cpp:152: [PDisk# 000000001 Recovery] [IncrHugeKeeper PDisk# 000000001] starting ReadLog 2025-09-25T16:15:26.252925Z :BS_INCRHUGE INFO: incrhuge_keeper_recovery.cpp:161: [PDisk# 000000001 Recovery] [IncrHugeKeeper PDisk# 000000001] finished ReadLog 2025-09-25T16:15:26.252943Z :BS_INCRHUGE DEBUG: incrhuge_keeper_recovery.cpp:200: [PDisk# 000000001 Recovery] ApplyReadLog Chunks# [] Deletes# [] Owners# {} CurrentSerNum# 0 NextLsn# 1 2025-09-25T16:15:26.252958Z :BS_INCRHUGE INFO: incrhuge_keeper_recovery.cpp:515: [PDisk# 000000001 Recovery] [IncrHugeKeeper PDisk# 000000001] ready 2025-09-25T16:15:26.252969Z :TEST DEBUG: test_actor_concurrent.h:153: finished Init Reference# [] Enumerated# [] InFlightDeletes# [] 2025-09-25T16:15:26.252972Z :TEST DEBUG: test_actor_concurrent.h:209: ActionsTaken# 1 2025-09-25T16:15:26.252975Z :TEST DEBUG: test_actor_concurrent.h:213: GetNumRequestsInFlight# 0 InFlightWritesSize# 0 2025-09-25T16:15:26.254047Z :TEST DEBUG: test_actor_concurrent.h:295: sent Write LogoBlobId# [1:1:1:0:811717:0:0] Lsn# 0 NumReq# 0 2025-09-25T16:15:26.254675Z :BS_INCRHUGE DEBUG: incrhuge_keeper_write.cpp:130: [PDisk# 000000001 Writer] QueryId# 0 HandleWrite Lsn# 0 DataSize# 811717 WriteQueueSize# 1 WriteInProgressItemsSize# 0 2025-09-25T16:15:26.254679Z :BS_INCRHUGE DEBUG: incrhuge_keeper_write.cpp:174: [PDisk# 000000001 Writer] WriteQueueSize# 1 WriteInProgressItemsSize# 0 2025-09-25T16:15:26.254683Z :BS_INCRHUGE DEBUG: incrhuge_keeper_write.cpp:192: [PDisk# 000000001 Writer] QueryId# 0 ProcessWriteItem entry 2025-09-25T16:15:26.254687Z :BS_INCRHUGE DEBUG: incrhuge_keeper_write.cpp:230: [PDisk# 000000001 Writer] QueryId# 0 ProcessWriteItem no free chunks 2025-09-25T16:15:26.255802Z :TEST DEBUG: test_actor_concurrent.h:213: GetNumRequestsInFlight# 1 InFlightWritesSize# 1 2025-09-25T16:15:26.258660Z :BS_INCRHUGE DEBUG: incrhuge_keeper_write.cpp:130: [PDisk# 000000001 Writer] QueryId# 1 HandleWrite Lsn# 1 DataSize# 1745495 WriteQueueSize# 2 WriteInProgressItemsSize# 0 2025-09-25T16:15:26.258663Z :BS_INCRHUGE DEBUG: incrhuge_keeper_write.cpp:174: [PDisk# 000000001 Writer] WriteQueueSize# 2 WriteInProgressItemsSize# 0 2025-09-25T16:15:26.258667Z :BS_INCRHUGE DEBUG: incrhuge_keeper_write.cpp:192: [PDisk# 000000001 Writer] QueryId# 0 ProcessWriteItem entry 2025-09-25T16:15:26.258675Z :BS_INCRHUGE DEBUG: incrhuge_keeper_write.cpp:230: [PDisk# 000000001 Writer] QueryId# 0 ProcessWriteItem no free chunks 2025-09-25T16:15:26.259020Z :BS_INCRHUGE DEBUG: incrhuge_keeper_log.cpp:460: [PDisk# 000000001 Logger] ApplyLogChunkItem Lsn# 1 Status# OK 2025-09-25T16:15:26.259028Z :BS_INCRHUGE DEBUG: incrhuge_keeper_alloc.cpp:64: [PDisk# 000000001 Allocator] ChunkIdx# 2 ChunkSerNum# 1000 2025-09-25T16:15:26.259033Z :BS_INCRHUGE DEBUG: incrhuge_keeper_alloc.cpp:64: [PDisk# 000000001 Allocator] ChunkIdx# 3 ChunkSerNum# 1001 2025-09-25T16:15:26.259035Z :BS_INCRHUGE DEBUG: incrhuge_keeper_alloc.cpp:64: [PDisk# 000000001 Allocator] ChunkIdx# 4 ChunkSerNum# 1002 2025-09-25T16:15:26.259038Z :BS_INCRHUGE DEBUG: incrhuge_keeper_alloc.cpp:64: [PDisk# 000000001 Allocator] ChunkIdx# 5 ChunkSerNum# 1003 2025-09-25T16:15:26.259040Z :BS_INCRHUGE DEBUG: incrhuge_keeper_alloc.cpp:64: [PDisk# 000000001 Allocator] ChunkIdx# 6 ChunkSerNum# 1004 2025-09-25T16:15:26.259042Z :BS_INCRHUGE DEBUG: incrhuge_keeper_alloc.cpp:64: [PDisk# 000000001 Allocator] ChunkIdx# 7 ChunkSerNum# 1005 2025-09-25T16:15:26.259044Z :BS_INCRHUGE DEBUG: incrhuge_keeper_alloc.cpp:64: [PDisk# 000000001 Allocator] ChunkIdx# 8 ChunkSerNum# 1006 2025-09-25T16:15:26.259047Z :BS_INCRHUGE DEBUG: incrhuge_keeper_alloc.cpp:64: [PDisk# 000000001 Allocator] ChunkIdx# 9 ChunkSerNum# 1007 2025-09-25T16:15:26.259050Z :BS_INCRHUGE DEBUG: incrhuge_keeper_write.cpp:174: [PDisk# 000000001 Writer] WriteQueueSize# 2 WriteInProgressItemsSize# 0 2025-09-25T16:15:26.259051Z :BS_INCRHUGE DEBUG: incrhuge_keeper_write.cpp:192: [PDisk# 000000001 Writer] QueryId# 0 ProcessWriteItem entry 2025-09-25T16:15:26.259504Z :BS_INCRHUGE DEBUG: incrhuge_keeper_write.cpp:319: [PDisk# 000000001 Writer] QueryId# 0 ProcessWriteItem OffsetInBlocks# 0 IndexInsideChunk# 0 SizeInBlocks# 100 SizeInBytes# 812800 Offset# 0 Size# 812800 End# 812800 Id# 0000000000000000 ChunkIdx# 2 ChunkSerNum# 1000 Defrag# false 2025-09-25T16:15:26.259507Z :BS_INCRHUGE DEBUG: incrhuge_keeper_write.cpp:192: [PDisk# 000000001 Writer] QueryId# 1 ProcessWriteItem entry 2025-09-25T16:15:26.260524Z :BS_INCRHUGE DEBUG: incrhuge_keeper_write.cpp:319: [PDisk# 000000001 Writer] QueryId# 1 ProcessWriteItem OffsetInBlocks# 100 IndexInsideChunk# 1 SizeInBlocks# 215 SizeInBytes# 1747520 Offset# 812800 Size# 1747520 End# 2560320 Id# 0000000000000001 ChunkIdx# 2 ChunkSerNum# 1000 Defrag# false 2025-09-25T16:15:26.261957Z :TEST DEBUG: test_actor_concurrent.h:295: sent Write LogoBlobId# [1:1:1:0:1745495:1:0] Lsn# 1 NumReq# 1 2025-09-25T16:15:26.265476Z :TEST DEBUG: test_actor_concurrent.h:213: GetNumRequestsInFlight# 2 InFlightWritesSize# 2 2025-09-25T16:15:26.266267Z :TEST DEBUG: test_actor_concurrent.h:295: sent Write LogoBlobId# [1:1:1:0:602037:2:0] Lsn# 2 NumReq# 2 2025-09-25T16:15:26.267448Z :TEST DEBUG: test_actor_concurrent.h:213: GetNumRequestsInFlight# 3 InFlightWritesSize# 3 2025-09-25T16:15:26.269027Z :TEST DEBUG: test_actor_concurrent.h:295: sent Write LogoBlobId# [1:1:1:0:1287465:3:0] Lsn# 3 NumReq# 3 2025-09-25T16:15:26.269444Z :BS_INCRHUGE DEBUG: incrhuge_keeper_write.cpp:130: [PDisk# 000000001 Writer] QueryId# 2 HandleWrite Lsn# 2 DataSize# 602037 WriteQueueSize# 1 WriteInProgressItemsSize# 2 2025-09-25T16:15:26.269453Z :BS_INCRHUGE DEBUG: incrhuge_keeper_write.cpp:174: [PDisk# 000000001 Writer] WriteQueueSize# 1 WriteInProgressItemsSize# 2 2025-09-25T16:15:26.269456Z :BS_INCRHUGE DEBUG: incrhuge_keeper_write.cpp:192: [PDisk# 000000001 Writer] QueryId# 2 ProcessWriteItem entry 2025-09-25T16:15:26.269536Z :BS_INCRHUGE DEBUG: incrhuge_keeper_write.cpp:319: [PDisk# 000000001 Writer] QueryId# 2 ProcessWriteItem OffsetInBlocks# 315 IndexInsideChunk# 2 SizeInBlocks# 75 SizeInBytes# 609600 Offset# 2560320 Size# 609600 End# 3169920 Id# 0000000000000002 ChunkIdx# 2 ChunkSerNum# 1000 Defrag# false 2025-09-25T16:15:26.269543Z :BS_INCRHUGE DEBUG: incrhuge_keeper_write.cpp:130: [PDisk# 000000001 Writer] QueryId# 3 HandleWrite Lsn# 3 DataSize# 1287465 WriteQueueSize# 1 WriteInProgressItemsSize# 3 2025-09-25T16:15:26.269545Z :BS_INCRHUGE DEBUG: incrhuge_keeper_write.cpp:174: [PDisk# 000000001 Writer] WriteQueueSize# 1 WriteInProgressItemsSize# 3 2025-09-25T16:15:26.269546Z :BS_INCRHUGE DEBUG: incrhuge_keeper_write.cpp:192: [PDisk# 000000001 Writer] QueryId# 3 ProcessWriteItem entry 2025-09-25T16:15:26.269671Z :BS_INCRHUGE DEBUG: incrhuge_keeper_write.cpp:319: [PDisk# 000000001 Writer] QueryId# 3 ProcessWriteItem OffsetInBlocks# 390 IndexInsideChunk# 3 SizeInBlocks# 159 SizeInBytes# 1292352 Offset# 3169920 Size# 1292352 End# 4462272 Id# 0000000000000003 ChunkIdx# 2 ChunkSerNum# 1000 Defrag# false 2025-09-25T16:15:26.271934Z :TEST DEBUG: test_actor_concurrent.h:213: GetNumRequestsInFlight# 4 InFlightWritesSize# 4 2025-09-25T16:15:26.274024Z :TEST DEBUG: test_actor_concurrent.h:295: sent Write LogoBlobId# [1:1:1:0:1501676:4:0] Lsn# 4 NumReq# 4 2025-09-25T16:15:26.276797Z :TEST DEBUG: test_actor_concurrent.h:213: GetNumRequestsInFlight# 5 InFlightWritesSize# 5 2025-09-25T16:15:26.277688Z :TEST DEBUG: test_actor_concurrent.h:295: sent Write LogoBlobId# [1:1:1:0:687721:5:0] Lsn# 5 NumReq# 5 2025-09-25T16:15:26.278963Z :TEST DEBUG: test_actor_concurrent.h:213: GetNumRequestsInFlight# 6 InFlightWritesSize# 6 2025-09-25T16:15:26.280284Z :BS_INCRHUGE DEBUG: incrhuge_keeper_write.cpp:130: [PDisk# 000000001 Writer] QueryId# 4 HandleWrite Lsn# 4 DataSize# 1501676 WriteQueueSize# 1 WriteInProgressItemsSize# 4 2025-09-25T16:15:26.280288Z :BS_INCRHUGE DEBUG: incrhuge_keeper_write.cpp:174: [PDisk# 000000001 Writer] WriteQueueSize# 1 WriteInProgressItemsSize# 4 2025-09-25T16:15:26.280293Z :BS_INCRHUGE DEBUG: incrhuge_keeper_write.cpp:192: [PDisk# 000000001 Writer] QueryId# 4 ProcessWriteItem entry 2025-09-25T16:15:26.280465Z :BS_INCRHUGE DEBUG: incrhuge_keeper_write.cpp:319: [PDisk# 000000001 Writer] QueryId# 4 ProcessWriteItem OffsetInBlocks# 549 IndexInsideChunk# 4 SizeInBlocks# 185 SizeInBytes# 1503680 Offset# 4462272 Size# 1503680 End# 5965952 Id# 0000000000000004 ChunkIdx# 2 ChunkSerNum# 1000 Defrag# false 2025-09-25T16:15:26.280474Z :BS_INCRHUGE DEBUG: incrhuge_keeper_write.cpp:130: [PDisk# 000000001 Writer] QueryId# 5 HandleWrite Lsn# 5 DataSize# 687721 WriteQueueSize# 1 WriteInProgressItemsSize# 5 2025-09-25T16:15:26.280476Z :BS_INCRHUGE DEBUG: incrhuge_keeper_write.cpp:174: [PDisk# 000000001 Writer] WriteQueueSize# 1 WriteInProgressItemsSize# 5 2025-09-25T16:15:26.281911Z :TEST DEBUG: test_actor_concurrent.h:295: sent Write LogoBlobId# [1:1:1:0:1957662:6:0] Lsn# 6 NumReq# 6 2025-09-25T16:15:26.284861Z :BS_INCRHUGE DEBUG: incrhuge_keeper_write.cpp:130: [PDisk# 000000001 Writer] QueryId# 6 HandleWrite Lsn# 6 DataSize# 1957662 WriteQueueSize# 2 WriteInProgressItemsSize# 5 2025-09-25T16:15:26.284865Z :BS_INCRHUGE DEBUG: incrhuge_keeper_write.cpp:174: [PDisk# 000000001 Writer] WriteQueueSize# 2 WriteInProgressItemsSize# 5 2025-09-25T16:15:26.285550Z :TEST DEBUG: test_actor_concurrent.h:213: GetNumRequestsInFlight# 7 InFlightWritesSize# 7 2025-09-25T16:15:26.287795Z :TEST DEBUG: test_actor_concurrent.h:295: sent Write LogoBlobId# [1:1:1:0:1824284:7:0] Lsn# 7 NumReq# 7 2025-09-25T16:15:26.291393Z :BS_INCRHUGE DEBUG: incrhuge_keeper_write.cpp:130: [PDisk# 000000001 Writer] QueryId# 7 HandleWrite Lsn# 7 DataSize# 1824284 WriteQueueSize# 3 WriteInProgressItemsSize# 5 2025-09-25T16:15:26.291398Z :BS_INCRHUGE DEBUG: incrhuge_keeper_write.cpp:174: [PDisk# 000000001 Writer] WriteQueueSize# 3 WriteInProgressItemsSize# 5 2025-09-25T16:15:26.419642Z :BS_INCRHUGE DEBUG: incrhuge_keeper_write.cpp:344: [PDisk# 000000001 Writer] QueryId# 0 ApplyBlobWrite Status# OK 2025-09-25T16:15:26.419675Z :BS_INCRHUGE DEBUG: incrhuge_keeper_write.cpp:174: [PDisk# 000000001 Writer] WriteQueueSize# 3 WriteInProgressItemsSize# 4 2025-09-25T16:15:26.419679Z :BS_INCRHUGE DEBUG: incrhuge_keeper_write.cpp:192: [PDisk# 000000001 Writer] QueryId# 5 ProcessWriteItem entry 2025-09-25T16:15:26.419789Z :BS_INCRHUGE DEBUG: incrhuge_keeper_write.cpp:319: [PDisk# 000000001 Writer] QueryId# 5 ProcessWriteItem OffsetInBlocks# 734 IndexInsideChunk# 5 SizeInBlocks# 85 SizeInBytes# 690880 Offset# 5965952 Size# 690880 End# 6656832 Id# 0000000000000005 ChunkIdx# 2 ChunkSerNum# 1000 Defrag# false 2025-09-25T16:15:26.419811Z :TEST DEBUG: test_actor_concurrent.h:308: finished Write Id# 0000000000000000 LogoBlobId# [1:1:1:0:811717:0:0] Lsn# 0 2025-09-25T16:15:26.419823Z :TEST INFO: test_actor_concurrent.h:320: BytesWritten# 0 MB ElapsedTime# 0.242869s Speed# 0.00 MB/s 2025-09-25T16:15:26.419828Z :TEST DEBUG: test_actor_concurrent.h:209: ActionsTaken# 2 2025-09-25T16:15:26.419830Z :TEST DEBUG: test_actor_concurrent.h:213: GetNumRequestsInFlight# 7 InFlightWritesSize# 7 2025-09-25T16:15:26.419844Z :TEST DEBUG: test_actor_concurrent.h:381: sent Delete Id# 0000000000000000 NumReq# 7 2025-09-25T16:15:26.419865Z :BS_INCRHUGE DEBUG: incrhuge_keeper_delete.cpp:50: [PDisk# 000000001 Deleter] Owner# 1 SeqNo# 8 HandleDelete Ids# [0000000000000000] 2025-09-25T16:15:26.419874Z :BS_INCRHUGE DEBUG: incrhuge_keeper_log.cpp:544: [PDisk# 000000001 Logger] LogBlobDeletes ChunkIdx# 2 ChunkSerNum# 1000 Id# 0000000000000000 IndexInsideChunk# 0 SizeInBlocks# 100 Lsn# 2 Owner# 1 SeqNo# 8 2025-09-25T16:15:26.419878Z :BS_INCRHUGE DEBUG: incrhuge_keeper_log.cpp:638: [PDisk# 000000001 Logger] ProcessDeleteQueueItem Lsn# 2 Entrypoint# false Virtual# f ... 1 Logger] LogBlobDeletes ChunkIdx# 31 ChunkSerNum# 1241 Id# 0000000000000060 IndexInsideChunk# 3 SizeInBlocks# 116 Lsn# 3222 Owner# 1 SeqNo# 6080 2025-09-25T16:18:46.967691Z :BS_INCRHUGE DEBUG: incrhuge_keeper_log.cpp:638: [PDisk# 000000001 Logger] ProcessDeleteQueueItem Lsn# 3222 Entrypoint# false Virtual# false 2025-09-25T16:18:46.970113Z :TEST DEBUG: test_actor_concurrent.h:295: sent Write LogoBlobId# [1:2:1:0:2037045:6081:0] Lsn# 6081 NumReq# 33 2025-09-25T16:18:46.970126Z :BS_INCRHUGE DEBUG: incrhuge_keeper_write.cpp:130: [PDisk# 000000001 Writer] QueryId# 2260 HandleWrite Lsn# 6081 DataSize# 2037045 WriteQueueSize# 15 WriteInProgressItemsSize# 5 2025-09-25T16:18:46.970128Z :BS_INCRHUGE DEBUG: incrhuge_keeper_write.cpp:174: [PDisk# 000000001 Writer] WriteQueueSize# 15 WriteInProgressItemsSize# 5 2025-09-25T16:18:46.974080Z :TEST DEBUG: test_actor_concurrent.h:213: GetNumRequestsInFlight# 34 InFlightWritesSize# 22 2025-09-25T16:18:46.974119Z :TEST DEBUG: test_actor_concurrent.h:381: sent Delete Id# 000000000000007e NumReq# 34 2025-09-25T16:18:46.974125Z :TEST DEBUG: test_actor_concurrent.h:213: GetNumRequestsInFlight# 35 InFlightWritesSize# 22 2025-09-25T16:18:46.974127Z :TEST DEBUG: test_actor_concurrent.h:381: sent Delete Id# 0000000000000007 NumReq# 35 2025-09-25T16:18:46.974129Z :TEST DEBUG: test_actor_concurrent.h:213: GetNumRequestsInFlight# 36 InFlightWritesSize# 22 2025-09-25T16:18:46.974132Z :TEST DEBUG: test_actor_concurrent.h:381: sent Delete Id# 0000000000000072 NumReq# 36 2025-09-25T16:18:46.974134Z :TEST DEBUG: test_actor_concurrent.h:213: GetNumRequestsInFlight# 37 InFlightWritesSize# 22 2025-09-25T16:18:46.974136Z :TEST DEBUG: test_actor_concurrent.h:381: sent Delete Id# 0000000000000002 NumReq# 37 2025-09-25T16:18:46.974138Z :TEST DEBUG: test_actor_concurrent.h:213: GetNumRequestsInFlight# 38 InFlightWritesSize# 22 2025-09-25T16:18:46.974141Z :TEST DEBUG: test_actor_concurrent.h:381: sent Delete Id# 0000000000000052 NumReq# 38 2025-09-25T16:18:46.974143Z :TEST DEBUG: test_actor_concurrent.h:213: GetNumRequestsInFlight# 39 InFlightWritesSize# 22 2025-09-25T16:18:46.974147Z :TEST DEBUG: test_actor_concurrent.h:381: sent Delete Id# 000000000000004a NumReq# 39 2025-09-25T16:18:46.974149Z :TEST DEBUG: test_actor_concurrent.h:213: GetNumRequestsInFlight# 40 InFlightWritesSize# 22 2025-09-25T16:18:46.974152Z :TEST DEBUG: test_actor_concurrent.h:381: sent Delete Id# 000000000000007c NumReq# 40 2025-09-25T16:18:46.974153Z :TEST DEBUG: test_actor_concurrent.h:213: GetNumRequestsInFlight# 41 InFlightWritesSize# 22 2025-09-25T16:18:46.974152Z :BS_INCRHUGE DEBUG: incrhuge_keeper_delete.cpp:50: [PDisk# 000000001 Deleter] Owner# 1 SeqNo# 6082 HandleDelete Ids# [000000000000007e] 2025-09-25T16:18:46.974165Z :BS_INCRHUGE DEBUG: incrhuge_keeper_log.cpp:544: [PDisk# 000000001 Logger] LogBlobDeletes ChunkIdx# 29 ChunkSerNum# 1239 Id# 000000000000007e IndexInsideChunk# 1 SizeInBlocks# 80 Lsn# 3223 Owner# 1 SeqNo# 6082 2025-09-25T16:18:46.974169Z :BS_INCRHUGE DEBUG: incrhuge_keeper_log.cpp:638: [PDisk# 000000001 Logger] ProcessDeleteQueueItem Lsn# 3223 Entrypoint# false Virtual# false 2025-09-25T16:18:46.974197Z :BS_INCRHUGE DEBUG: incrhuge_keeper_delete.cpp:50: [PDisk# 000000001 Deleter] Owner# 1 SeqNo# 6083 HandleDelete Ids# [0000000000000007] 2025-09-25T16:18:46.974211Z :BS_INCRHUGE DEBUG: incrhuge_keeper_log.cpp:544: [PDisk# 000000001 Logger] LogBlobDeletes ChunkIdx# 31 ChunkSerNum# 1241 Id# 0000000000000007 IndexInsideChunk# 7 SizeInBlocks# 134 Lsn# 3224 Owner# 1 SeqNo# 6083 2025-09-25T16:18:46.974213Z :BS_INCRHUGE DEBUG: incrhuge_keeper_log.cpp:638: [PDisk# 000000001 Logger] ProcessDeleteQueueItem Lsn# 3224 Entrypoint# false Virtual# false 2025-09-25T16:18:46.974218Z :BS_INCRHUGE DEBUG: incrhuge_keeper_delete.cpp:50: [PDisk# 000000001 Deleter] Owner# 1 SeqNo# 6084 HandleDelete Ids# [0000000000000072] 2025-09-25T16:18:46.974222Z :BS_INCRHUGE DEBUG: incrhuge_keeper_log.cpp:544: [PDisk# 000000001 Logger] LogBlobDeletes ChunkIdx# 35 ChunkSerNum# 1245 Id# 0000000000000072 IndexInsideChunk# 0 SizeInBlocks# 211 Lsn# 3225 Owner# 1 SeqNo# 6084 2025-09-25T16:18:46.974229Z :BS_INCRHUGE DEBUG: incrhuge_keeper_log.cpp:638: [PDisk# 000000001 Logger] ProcessDeleteQueueItem Lsn# 3225 Entrypoint# false Virtual# false 2025-09-25T16:18:46.974234Z :BS_INCRHUGE DEBUG: incrhuge_keeper_delete.cpp:50: [PDisk# 000000001 Deleter] Owner# 1 SeqNo# 6085 HandleDelete Ids# [0000000000000002] 2025-09-25T16:18:46.974250Z :BS_INCRHUGE DEBUG: incrhuge_keeper_log.cpp:544: [PDisk# 000000001 Logger] LogBlobDeletes ChunkIdx# 37 ChunkSerNum# 1247 Id# 0000000000000002 IndexInsideChunk# 12 SizeInBlocks# 119 Lsn# 3226 Owner# 1 SeqNo# 6085 2025-09-25T16:18:46.974252Z :BS_INCRHUGE DEBUG: incrhuge_keeper_log.cpp:638: [PDisk# 000000001 Logger] ProcessDeleteQueueItem Lsn# 3226 Entrypoint# false Virtual# false 2025-09-25T16:18:46.974257Z :BS_INCRHUGE DEBUG: incrhuge_keeper_delete.cpp:50: [PDisk# 000000001 Deleter] Owner# 1 SeqNo# 6086 HandleDelete Ids# [0000000000000052] 2025-09-25T16:18:46.974261Z :BS_INCRHUGE DEBUG: incrhuge_keeper_log.cpp:544: [PDisk# 000000001 Logger] LogBlobDeletes ChunkIdx# 31 ChunkSerNum# 1241 Id# 0000000000000052 IndexInsideChunk# 12 SizeInBlocks# 245 Lsn# 3227 Owner# 1 SeqNo# 6086 2025-09-25T16:18:46.974263Z :BS_INCRHUGE DEBUG: incrhuge_keeper_log.cpp:638: [PDisk# 000000001 Logger] ProcessDeleteQueueItem Lsn# 3227 Entrypoint# false Virtual# false 2025-09-25T16:18:46.974268Z :BS_INCRHUGE DEBUG: incrhuge_keeper_delete.cpp:50: [PDisk# 000000001 Deleter] Owner# 1 SeqNo# 6087 HandleDelete Ids# [000000000000004a] 2025-09-25T16:18:46.974272Z :BS_INCRHUGE DEBUG: incrhuge_keeper_log.cpp:544: [PDisk# 000000001 Logger] LogBlobDeletes ChunkIdx# 18 ChunkSerNum# 1228 Id# 000000000000004a IndexInsideChunk# 14 SizeInBlocks# 258 Lsn# 3228 Owner# 1 SeqNo# 6087 2025-09-25T16:18:46.974275Z :BS_INCRHUGE DEBUG: incrhuge_keeper_log.cpp:638: [PDisk# 000000001 Logger] ProcessDeleteQueueItem Lsn# 3228 Entrypoint# false Virtual# false 2025-09-25T16:18:46.974281Z :BS_INCRHUGE DEBUG: incrhuge_keeper_delete.cpp:50: [PDisk# 000000001 Deleter] Owner# 1 SeqNo# 6088 HandleDelete Ids# [000000000000007c] 2025-09-25T16:18:46.974284Z :BS_INCRHUGE DEBUG: incrhuge_keeper_log.cpp:544: [PDisk# 000000001 Logger] LogBlobDeletes ChunkIdx# 23 ChunkSerNum# 1233 Id# 000000000000007c IndexInsideChunk# 11 SizeInBlocks# 232 Lsn# 3229 Owner# 1 SeqNo# 6088 2025-09-25T16:18:46.974286Z :BS_INCRHUGE DEBUG: incrhuge_keeper_log.cpp:638: [PDisk# 000000001 Logger] ProcessDeleteQueueItem Lsn# 3229 Entrypoint# false Virtual# false 2025-09-25T16:18:46.976396Z :TEST DEBUG: test_actor_concurrent.h:295: sent Write LogoBlobId# [1:2:1:0:1829039:6089:0] Lsn# 6089 NumReq# 41 2025-09-25T16:18:46.976408Z :BS_INCRHUGE DEBUG: incrhuge_keeper_write.cpp:130: [PDisk# 000000001 Writer] QueryId# 2261 HandleWrite Lsn# 6089 DataSize# 1829039 WriteQueueSize# 16 WriteInProgressItemsSize# 5 2025-09-25T16:18:46.976412Z :BS_INCRHUGE DEBUG: incrhuge_keeper_write.cpp:174: [PDisk# 000000001 Writer] WriteQueueSize# 16 WriteInProgressItemsSize# 5 2025-09-25T16:18:46.979901Z :TEST DEBUG: test_actor_concurrent.h:213: GetNumRequestsInFlight# 42 InFlightWritesSize# 23 2025-09-25T16:18:46.979936Z :TEST DEBUG: test_actor_concurrent.h:381: sent Delete Id# 000000000000005c NumReq# 42 2025-09-25T16:18:46.979940Z :TEST DEBUG: test_actor_concurrent.h:213: GetNumRequestsInFlight# 43 InFlightWritesSize# 23 2025-09-25T16:18:46.979942Z :TEST DEBUG: test_actor_concurrent.h:381: sent Delete Id# 0000000000000012 NumReq# 43 2025-09-25T16:18:46.979944Z :TEST DEBUG: test_actor_concurrent.h:213: GetNumRequestsInFlight# 44 InFlightWritesSize# 23 2025-09-25T16:18:46.979947Z :TEST DEBUG: test_actor_concurrent.h:381: sent Delete Id# 0000000000000017 NumReq# 44 2025-09-25T16:18:46.979949Z :TEST DEBUG: test_actor_concurrent.h:213: GetNumRequestsInFlight# 45 InFlightWritesSize# 23 2025-09-25T16:18:46.979958Z :BS_INCRHUGE DEBUG: incrhuge_keeper_delete.cpp:50: [PDisk# 000000001 Deleter] Owner# 1 SeqNo# 6090 HandleDelete Ids# [000000000000005c] 2025-09-25T16:18:46.979969Z :BS_INCRHUGE DEBUG: incrhuge_keeper_log.cpp:544: [PDisk# 000000001 Logger] LogBlobDeletes ChunkIdx# 37 ChunkSerNum# 1247 Id# 000000000000005c IndexInsideChunk# 0 SizeInBlocks# 163 Lsn# 3230 Owner# 1 SeqNo# 6090 2025-09-25T16:18:46.979978Z :BS_INCRHUGE DEBUG: incrhuge_keeper_log.cpp:638: [PDisk# 000000001 Logger] ProcessDeleteQueueItem Lsn# 3230 Entrypoint# false Virtual# false 2025-09-25T16:18:46.980004Z :BS_INCRHUGE DEBUG: incrhuge_keeper_delete.cpp:50: [PDisk# 000000001 Deleter] Owner# 1 SeqNo# 6091 HandleDelete Ids# [0000000000000012] 2025-09-25T16:18:46.980012Z :BS_INCRHUGE DEBUG: incrhuge_keeper_log.cpp:544: [PDisk# 000000001 Logger] LogBlobDeletes ChunkIdx# 25 ChunkSerNum# 1235 Id# 0000000000000012 IndexInsideChunk# 1 SizeInBlocks# 257 Lsn# 3231 Owner# 1 SeqNo# 6091 2025-09-25T16:18:46.980013Z :BS_INCRHUGE DEBUG: incrhuge_keeper_log.cpp:638: [PDisk# 000000001 Logger] ProcessDeleteQueueItem Lsn# 3231 Entrypoint# false Virtual# false 2025-09-25T16:18:46.980018Z :BS_INCRHUGE DEBUG: incrhuge_keeper_delete.cpp:50: [PDisk# 000000001 Deleter] Owner# 1 SeqNo# 6092 HandleDelete Ids# [0000000000000017] 2025-09-25T16:18:46.980024Z :BS_INCRHUGE DEBUG: incrhuge_keeper_log.cpp:544: [PDisk# 000000001 Logger] LogBlobDeletes ChunkIdx# 28 ChunkSerNum# 1238 Id# 0000000000000017 IndexInsideChunk# 0 SizeInBlocks# 148 Lsn# 3232 Owner# 1 SeqNo# 6092 2025-09-25T16:18:46.980025Z :BS_INCRHUGE DEBUG: incrhuge_keeper_log.cpp:638: [PDisk# 000000001 Logger] ProcessDeleteQueueItem Lsn# 3232 Entrypoint# false Virtual# false 2025-09-25T16:18:46.982235Z :TEST DEBUG: test_actor_concurrent.h:295: sent Write LogoBlobId# [1:2:1:0:1886811:6093:0] Lsn# 6093 NumReq# 45 2025-09-25T16:18:46.982246Z :BS_INCRHUGE DEBUG: incrhuge_keeper_write.cpp:130: [PDisk# 000000001 Writer] QueryId# 2262 HandleWrite Lsn# 6093 DataSize# 1886811 WriteQueueSize# 17 WriteInProgressItemsSize# 5 2025-09-25T16:18:46.982249Z :BS_INCRHUGE DEBUG: incrhuge_keeper_write.cpp:174: [PDisk# 000000001 Writer] WriteQueueSize# 17 WriteInProgressItemsSize# 5 2025-09-25T16:18:46.985851Z :TEST DEBUG: test_actor_concurrent.h:213: GetNumRequestsInFlight# 46 InFlightWritesSize# 24 2025-09-25T16:18:46.988061Z :TEST DEBUG: test_actor_concurrent.h:295: sent Write LogoBlobId# [1:2:1:0:1769814:6094:0] Lsn# 6094 NumReq# 46 2025-09-25T16:18:46.988101Z :BS_INCRHUGE DEBUG: incrhuge_keeper_write.cpp:130: [PDisk# 000000001 Writer] QueryId# 2263 HandleWrite Lsn# 6094 DataSize# 1769814 WriteQueueSize# 18 WriteInProgressItemsSize# 5 2025-09-25T16:18:46.988105Z :BS_INCRHUGE DEBUG: incrhuge_keeper_write.cpp:174: [PDisk# 000000001 Writer] WriteQueueSize# 18 WriteInProgressItemsSize# 5 2025-09-25T16:18:46.991482Z :TEST DEBUG: test_actor_concurrent.h:213: GetNumRequestsInFlight# 47 InFlightWritesSize# 25 2025-09-25T16:18:46.991519Z :TEST DEBUG: test_actor_concurrent.h:381: sent Delete Id# 0000000000000040 NumReq# 47 2025-09-25T16:18:46.991523Z :TEST DEBUG: test_actor_concurrent.h:213: GetNumRequestsInFlight# 48 InFlightWritesSize# 25 2025-09-25T16:18:46.991526Z :TEST DEBUG: test_actor_concurrent.h:381: sent Delete Id# 000000000000001a NumReq# 48 2025-09-25T16:18:46.991529Z :TEST DEBUG: test_actor_concurrent.h:213: GetNumRequestsInFlight# 49 InFlightWritesSize# 25 2025-09-25T16:18:46.991533Z :TEST DEBUG: test_actor_concurrent.h:381: sent Delete Id# 0000000000000001 NumReq# 49 2025-09-25T16:18:46.991551Z :BS_INCRHUGE DEBUG: incrhuge_keeper_delete.cpp:50: [PDisk# 000000001 Deleter] Owner# 1 SeqNo# 6095 HandleDelete Ids# [0000000000000040] 2025-09-25T16:18:46.991573Z :BS_INCRHUGE DEBUG: incrhuge_keeper_log.cpp:544: [PDisk# 000000001 Logger] LogBlobDeletes ChunkIdx# 38 ChunkSerNum# 1248 Id# 0000000000000040 IndexInsideChunk# 8 SizeInBlocks# 125 Lsn# 3233 Owner# 1 SeqNo# 6095 2025-09-25T16:18:46.991577Z :BS_INCRHUGE DEBUG: incrhuge_keeper_log.cpp:638: [PDisk# 000000001 Logger] ProcessDeleteQueueItem Lsn# 3233 Entrypoint# false Virtual# false >> BootstrapperTest::MultipleBootstrappers [GOOD] >> KqpScheme::DropKeyColumn [GOOD] >> KqpScheme::DropNonExistingExternalDataSource >> KqpScheme::CreateDroppedTable [GOOD] >> KqpScheme::CreateDropTableMultipleTime >> KqpScheme::CreateTableWithReadReplicasUncompat [GOOD] >> KqpScheme::CreateTableWithReadReplicasCompat ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/datashard/ut_object_storage_listing/unittest >> ObjectStorageListingTest::FilterListing [GOOD] Test command err: 2025-09-25T16:18:47.314792Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:123: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2025-09-25T16:18:47.348968Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2025-09-25T16:18:47.350936Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:311:2354], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-09-25T16:18:47.351003Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-09-25T16:18:47.351021Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/endf/003c83/r3tmp/tmpsyDfXe/pdisk_1.dat 2025-09-25T16:18:47.412433Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-09-25T16:18:47.412466Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-09-25T16:18:47.423374Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded 2025-09-25T16:18:47.424062Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1229: Notification cookie mismatch for subscription [1:34:2081] 1758817126890803 != 1758817126890807 2025-09-25T16:18:47.461322Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-09-25T16:18:47.512102Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2025-09-25T16:18:47.546389Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-09-25T16:18:47.630001Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:18:47.645550Z node 1 :TX_DATASHARD INFO: datashard.cpp:375: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:673:2564] 2025-09-25T16:18:47.645672Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:661: TxInitSchema.Execute 2025-09-25T16:18:47.654409Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:727: TxInitSchema.Complete 2025-09-25T16:18:47.654462Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:50: TDataShard::TTxInit::Execute 2025-09-25T16:18:47.654656Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1325: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2025-09-25T16:18:47.654668Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1381: LoadLockChangeRecords at tablet: 72075186224037888 2025-09-25T16:18:47.654676Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1430: LoadChangeRecordCommits at tablet: 72075186224037888 2025-09-25T16:18:47.654748Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:94: TDataShard::TTxInit::Complete 2025-09-25T16:18:47.654777Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:102: TDataShard::TTxInitRestored::Execute 2025-09-25T16:18:47.654792Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:142: DataShard 72075186224037888 persisting started state actor id [1:688:2564] in generation 1 2025-09-25T16:18:47.665113Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:151: TDataShard::TTxInitRestored::Complete 2025-09-25T16:18:47.668809Z node 1 :TX_DATASHARD INFO: datashard.cpp:419: Switched to work state WaitScheme tabletId 72075186224037888 2025-09-25T16:18:47.668918Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:459: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2025-09-25T16:18:47.668951Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1260: Change sender created: at tablet: 72075186224037888, actorId: [1:690:2574] 2025-09-25T16:18:47.668956Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1265: Trying to activate change sender: at tablet: 72075186224037888 2025-09-25T16:18:47.668961Z node 1 :TX_DATASHARD INFO: datashard.cpp:1282: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2025-09-25T16:18:47.668965Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-09-25T16:18:47.669119Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:96: TTxCheckInReadSets::Execute at 72075186224037888 2025-09-25T16:18:47.669143Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:139: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2025-09-25T16:18:47.669149Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037888 2025-09-25T16:18:47.669154Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2025-09-25T16:18:47.669162Z node 1 :TX_DATASHARD INFO: datashard__progress_tx.cpp:48: No tx to execute at 72075186224037888 TxInFly 0 2025-09-25T16:18:47.669166Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2025-09-25T16:18:47.669260Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3723: Server connected at leader tablet# 72075186224037888, clientId# [1:669:2561], serverId# [1:675:2565], sessionId# [0:0:0] 2025-09-25T16:18:47.669283Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 72075186224037888 2025-09-25T16:18:47.669340Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:133: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2025-09-25T16:18:47.669354Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:221: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037888 2025-09-25T16:18:47.669623Z node 1 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:129: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-09-25T16:18:47.679922Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:152: TTxProposeTransactionBase::Complete at 72075186224037888 2025-09-25T16:18:47.679976Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:469: 72075186224037888 not sending time cast registration request in state WaitScheme 2025-09-25T16:18:47.813991Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3723: Server connected at leader tablet# 72075186224037888, clientId# [1:705:2583], serverId# [1:707:2585], sessionId# [0:0:0] 2025-09-25T16:18:47.815024Z node 1 :TX_DATASHARD DEBUG: datashard__plan_step.cpp:69: Planned transaction txId 281474976715657 at step 1000 at tablet 72075186224037888 { Transactions { TxId: 281474976715657 AckTo { RawX1: 0 RawX2: 0 } } Step: 1000 MediatorID: 72057594046382081 TabletID: 72075186224037888 } 2025-09-25T16:18:47.815049Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-09-25T16:18:47.815197Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037888 2025-09-25T16:18:47.815211Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 1 2025-09-25T16:18:47.815223Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:333: Found ready operation [1000:281474976715657] in PlanQueue unit at 72075186224037888 2025-09-25T16:18:47.815314Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:634: LoadTxDetails at 72075186224037888 loaded tx from db 1000:281474976715657 keys extracted: 0 2025-09-25T16:18:47.815353Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 (dry run) active 1 active planned 1 immediate 0 planned 1 2025-09-25T16:18:47.815377Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037888 2025-09-25T16:18:47.815391Z node 1 :TX_DATASHARD INFO: create_table_unit.cpp:69: Trying to CREATE TABLE at 72075186224037888 tableId# [OwnerId: 72057594046644480, LocalPathId: 2] schema version# 1 2025-09-25T16:18:47.815831Z node 1 :TX_DATASHARD INFO: datashard.cpp:477: Send registration request to time cast Ready tabletId 72075186224037888 mediators count is 1 coordinators count is 1 buckets per mediator 2 2025-09-25T16:18:47.815978Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-09-25T16:18:47.816442Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3755: Got TEvMediatorTimecast::TEvRegisterTabletResult at 72075186224037888 time 0 2025-09-25T16:18:47.816453Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-09-25T16:18:47.816739Z node 1 :TX_DATASHARD DEBUG: datashard__plan_step.cpp:98: Sending '{TEvPlanStepAccepted TabletId# 72075186224037888 step# 1000} 2025-09-25T16:18:47.816753Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2025-09-25T16:18:47.817025Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2025-09-25T16:18:47.817037Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1265: Trying to activate change sender: at tablet: 72075186224037888 2025-09-25T16:18:47.817044Z node 1 :TX_DATASHARD INFO: datashard.cpp:1303: Change sender activated: at tablet: 72075186224037888 2025-09-25T16:18:47.817061Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:811: Complete [1000 : 281474976715657] from 72075186224037888 at tablet 72075186224037888 send result to client [1:409:2405], exec latency: 0 ms, propose latency: 0 ms 2025-09-25T16:18:47.817081Z node 1 :TX_DATASHARD INFO: datashard.cpp:1600: 72075186224037888 Sending notify to schemeshard 72057594046644480 txId 281474976715657 state Ready TxInFly 0 2025-09-25T16:18:47.817093Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-09-25T16:18:47.818100Z node 1 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:129: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-09-25T16:18:47.818394Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:2966: Handle TEvSchemaChangedResult 281474976715657 datashard 72075186224037888 state Ready 2025-09-25T16:18:47.818409Z node 1 :TX_DATASHARD DEBUG: datashard__schema_changed.cpp:22: 72075186224037888 Got TEvSchemaChangedResult from SS at 72075186224037888 2025-09-25T16:18:47.818550Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3773: Got TEvMediatorTimecast::TEvSubscribeReadStepResult at 72075186224037888 coordinator 72057594046316545 last step 0 next step 1000 2025-09-25T16:18:47.820532Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:739:2609], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:47.820554Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:748:2614], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:47.820565Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:47.820738Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:754:2618], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:47.820760Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:47.821732Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-09-25T16:18:47.822806Z node 1 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:129: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-09-25T16:18:47.864843Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-09-25T16:18:47.959151Z node 1 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:129: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-09-25T16:18:47.959801Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:753:2617], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2025-09-25T16:18:47.995754Z node 1 :TX_PROXY ERROR: schemereq.cpp:590: Actor# [1:825:2658] txid# 281474976715659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-09-25T16:18:48.041899Z node 1 :KQP_EXECUTER ERROR: kqp_planner.cpp:123: TxId: 281474976715660. Ctx: { TraceId: 01k60tq3ccffaa2e0xfaabtr7c, Database: , SessionId: ydb://session/3?node_id=1&id=ZDQwZjM5NmItYzY5NzkzNDUtODI5NzM5NzYtMzhkOGRjMDA=, PoolId: default, DatabaseId: /Root}. Database not set, use /Root 2025-09-25T16:18:48.043219Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3723: Server connected at leader tablet# 72075186224037888, clientId# [1:856:2675], serverId# [1:857:2676], sessionId# [0:0:0] 2025-09-25T16:18:48.043362Z node 1 :TX_DATASHARD DEBUG: execute_write_unit.cpp:260: Executing write operation for [0:2] at 72075186224037888 2025-09-25T16:18:48.043411Z node 1 :TX_DATASHARD DEBUG: execute_write_unit.cpp:457: Executed write operation for [0:2] at 72075186224037888, row count=5 2025-09-25T16:18:48.053868Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-09-25T16:18:48.055994Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3723: Server connected at leader tablet# 72075186224037888, clientId# [1:864:2682], serverId# [1:865:2683], sessionId# [0:0:0] 2025-09-25T16:18:48.056043Z node 1 :TX_DATASHARD DEBUG: datashard__object_storage_listing.cpp:152: 72075186224037888 S3 Listing: start at key ((type:2, value:"d\0\0\0") (type:4608, value:"/test/")), end at key ((type:2, value:"d\0\0\0") (type:4608, value:"/test0")) restarted: 0 last path: "" contents: 0 common prefixes: 0 2025-09-25T16:18:48.056089Z node 1 :TX_DATASHARD DEBUG: datashard__object_storage_listing.cpp:374: 72075186224037888 S3 Listing: finished status: 0 description: "" contents: 2 common prefixes: 1 2025-09-25T16:18:48.056124Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3741: Server disconnected at leader tablet# 72075186224037888, clientId# [1:864:2682], serverId# [1:865:2683], sessionId# [0:0:0] 2025-09-25T16:18:48.056361Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3723: Server connected at leader tablet# 72075186224037888, clientId# [1:870:2688], serverId# [1:871:2689], sessionId# [0:0:0] 2025-09-25T16:18:48.056384Z node 1 :TX_DATASHARD DEBUG: datashard__object_storage_listing.cpp:152: 72075186224037888 S3 Listing: start at key ((type:2, value:"d\0\0\0") (type:4608, value:"/test/")), end at key ((type:2, value:"d\0\0\0") (type:4608, value:"/test0")) restarted: 0 last path: "" contents: 0 common prefixes: 0 2025-09-25T16:18:48.056402Z node 1 :TX_DATASHARD DEBUG: datashard__object_storage_listing.cpp:374: 72075186224037888 S3 Listing: finished status: 0 description: "" contents: 1 common prefixes: 1 2025-09-25T16:18:48.056420Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3741: Server disconnected at leader tablet# 72075186224037888, clientId# [1:870:2688], serverId# [1:871:2689], sessionId# [0:0:0] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/mind/hive/ut/unittest >> THiveTest::TestBridgeBalance [GOOD] Test command err: Create = 0.53759 Process = 0.956451 Move = 0.160004 HIVE_TABLET_BALANCE_STRATEGY_HEAVIEST Time=0.05182 Indirection=99% Distribution=0.00 0.00 0.00 0.00 0.00 0.00 0.00 0.00 0.00 0.00 0.00 0.00 0.00 0.00 0.00 0.00 Duplicates=0 HIVE_TABLET_BALANCE_STRATEGY_WEIGHTED_RANDOM Time=0.074287 Indirection=99% Distribution=0.00 0.00 0.00 0.00 0.00 0.00 0.00 0.00 0.00 0.00 0.00 0.00 0.00 0.00 0.00 0.00 Duplicates=0 HIVE_TABLET_BALANCE_STRATEGY_RANDOM Time=0.051068 Indirection=99% Distribution=0.00 0.00 0.00 0.00 0.00 0.00 0.00 0.00 0.00 0.00 0.00 0.00 0.00 0.00 0.00 0.00 Duplicates=0 2025-09-25T16:18:18.343622Z node 1 :BS_NODE DEBUG: {NW26@node_warden_impl.cpp:338} Bootstrap 2025-09-25T16:18:18.349589Z node 1 :BS_NODE DEBUG: {NW18@node_warden_resource.cpp:51} ApplyServiceSet IsStatic# true Comprehensive# false Origin# initial ServiceSet# {PDisks { NodeID: 1 PDiskID: 1 Path: "SectorMap:0:3200" PDiskGuid: 1 } VDisks { VDiskID { GroupID: 0 GroupGeneration: 1 Ring: 0 Domain: 0 VDisk: 0 } VDiskLocation { NodeID: 1 PDiskID: 1 VDiskSlotID: 0 PDiskGuid: 1 } } Groups { GroupID: 0 GroupGeneration: 1 ErasureSpecies: 0 Rings { FailDomains { VDiskLocations { NodeID: 1 PDiskID: 1 VDiskSlotID: 0 PDiskGuid: 1 } } } } AvailabilityDomains: 0 } 2025-09-25T16:18:18.349700Z node 1 :BS_NODE DEBUG: {NW04@node_warden_pdisk.cpp:233} StartLocalPDisk NodeId# 1 PDiskId# 1 Path# "SectorMap:0:3200" PDiskCategory# {Type# DEVICE_TYPE_ROT Kind# 0} Temporary# false 2025-09-25T16:18:18.350001Z node 1 :BS_NODE WARN: {NW89@node_warden_pdisk.cpp:122} Can't write new MockDevicesConfig to file Path# /Berkanavt/kikimr/testing/mock_devices.txt 2025-09-25T16:18:18.350093Z node 1 :BS_NODE DEBUG: {NW23@node_warden_vdisk.cpp:70} StartLocalVDiskActor SlayInFlight# false VDiskId# [0:1:0:0:0] VSlotId# 1:1:0 PDiskGuid# 1 DonorMode# false PDiskRestartInFlight# false PDisksWaitingToStart# false 2025-09-25T16:18:18.350369Z node 1 :BS_NODE DEBUG: {NW24@node_warden_vdisk.cpp:276} StartLocalVDiskActor done VDiskId# [0:1:0:0:0] VSlotId# 1:1:0 PDiskGuid# 1 2025-09-25T16:18:18.350382Z node 1 :BS_NODE DEBUG: {NW12@node_warden_proxy.cpp:24} StartLocalProxy GroupId# 0 2025-09-25T16:18:18.350611Z node 1 :BS_NODE DEBUG: {NW21@node_warden_pipe.cpp:23} EstablishPipe AvailDomainId# 0 PipeClientId# [1:28:2075] ControllerId# 72057594037932033 2025-09-25T16:18:18.350617Z node 1 :BS_NODE DEBUG: {NW20@node_warden_pipe.cpp:73} SendRegisterNode 2025-09-25T16:18:18.350651Z node 1 :BS_NODE DEBUG: {NW11@node_warden_impl.cpp:313} StartInvalidGroupProxy GroupId# 4294967295 2025-09-25T16:18:18.350695Z node 1 :BS_NODE DEBUG: {NW62@node_warden_impl.cpp:325} StartRequestReportingThrottler 2025-09-25T16:18:18.354675Z node 1 :BS_PROXY INFO: dsproxy_state.cpp:159: Group# 0 TEvConfigureProxy received GroupGeneration# 1 IsLimitedKeyless# false Marker# DSP02 2025-09-25T16:18:18.354697Z node 1 :BS_PROXY NOTICE: dsproxy_state.cpp:319: EnsureMonitoring Group# 0 IsLimitedKeyless# 0 fullIfPossible# 0 Marker# DSP58 2025-09-25T16:18:18.355097Z node 1 :BS_PROXY DEBUG: group_sessions.cpp:83: Group# 0 Actor# [1:27:2074] Create Queue# [1:36:2080] targetNodeId# 1 Marker# DSP01 2025-09-25T16:18:18.355130Z node 1 :BS_PROXY DEBUG: group_sessions.cpp:83: Group# 0 Actor# [1:27:2074] Create Queue# [1:37:2081] targetNodeId# 1 Marker# DSP01 2025-09-25T16:18:18.355162Z node 1 :BS_PROXY DEBUG: group_sessions.cpp:83: Group# 0 Actor# [1:27:2074] Create Queue# [1:38:2082] targetNodeId# 1 Marker# DSP01 2025-09-25T16:18:18.355194Z node 1 :BS_PROXY DEBUG: group_sessions.cpp:83: Group# 0 Actor# [1:27:2074] Create Queue# [1:39:2083] targetNodeId# 1 Marker# DSP01 2025-09-25T16:18:18.355226Z node 1 :BS_PROXY DEBUG: group_sessions.cpp:83: Group# 0 Actor# [1:27:2074] Create Queue# [1:40:2084] targetNodeId# 1 Marker# DSP01 2025-09-25T16:18:18.355258Z node 1 :BS_PROXY DEBUG: group_sessions.cpp:83: Group# 0 Actor# [1:27:2074] Create Queue# [1:41:2085] targetNodeId# 1 Marker# DSP01 2025-09-25T16:18:18.355287Z node 1 :BS_PROXY DEBUG: group_sessions.cpp:83: Group# 0 Actor# [1:27:2074] Create Queue# [1:42:2086] targetNodeId# 1 Marker# DSP01 2025-09-25T16:18:18.355293Z node 1 :BS_PROXY INFO: dsproxy_state.cpp:31: Group# 0 SetStateEstablishingSessions Marker# DSP03 2025-09-25T16:18:18.355308Z node 1 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:49: TClient[72057594037932033] ::Bootstrap [1:28:2075] 2025-09-25T16:18:18.355315Z node 1 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:542: TClient[72057594037932033] lookup [1:28:2075] 2025-09-25T16:18:18.355324Z node 1 :BS_PROXY NOTICE: dsproxy_state.cpp:259: Group# 4294967295 HasInvalidGroupId# 1 Bootstrap -> StateEjected Marker# DSP42 2025-09-25T16:18:18.355333Z node 1 :BS_NODE DEBUG: {NWDC00@distconf.cpp:28} Bootstrap 2025-09-25T16:18:18.355482Z node 1 :BS_NODE DEBUG: {NWDC40@distconf_persistent_storage.cpp:25} TReaderActor bootstrap Paths# [] 2025-09-25T16:18:18.357991Z node 1 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:148: TClient[72057594037932033] queue send [1:28:2075] 2025-09-25T16:18:18.358013Z node 1 :BS_NODE DEBUG: {NWDC53@distconf.cpp:332} StateWaitForInit event Type# 131082 StorageConfigLoaded# false NodeListObtained# false PendingEvents.size# 0 2025-09-25T16:18:18.358367Z node 1 :LOCAL DEBUG: local.cpp:1540: TLocal::Bootstrap 2025-09-25T16:18:18.358415Z node 1 :TABLET_RESOLVER DEBUG: tablet_resolver.cpp:882: Handle TEvForward tabletId: 72057594037932033 entry.State: StResolve leader: [0:0:0] followers: 0 ev: {EvForward TabletID: 72057594037932033 Ev: nullptr Flags: 1:2:0} 2025-09-25T16:18:18.358475Z node 1 :LOCAL DEBUG: local.cpp:1490: TDomainLocal(dc-1): Bootstrap 2025-09-25T16:18:18.358514Z node 1 :BS_NODE DEBUG: {NWDC53@distconf.cpp:332} StateWaitForInit event Type# 2146435074 StorageConfigLoaded# false NodeListObtained# false PendingEvents.size# 0 2025-09-25T16:18:18.358522Z node 1 :BS_NODE DEBUG: {NWDC32@distconf_persistent_storage.cpp:221} TEvStorageConfigLoaded Cookie# 0 NumItemsRead# 0 2025-09-25T16:18:18.359764Z node 1 :BS_NODE DEBUG: {NWDC35@distconf_persistent_storage.cpp:184} PersistConfig Record# {} Drives# [] 2025-09-25T16:18:18.359825Z node 1 :BS_NODE DEBUG: {NWDC18@distconf_binding.cpp:462} UpdateBound RefererNodeId# 0 NodeId# :0/0 Meta# {Fingerprint: "\363\365\\\016\336\205\240m2\241c\3010\003\261\342\227\n\267}" } 2025-09-25T16:18:18.359937Z node 1 :LOCAL DEBUG: local.cpp:1198: TDomainLocal(dc-1): Binding to hive 72057594037927937 at domain dc-1 (allocated resources: ) 2025-09-25T16:18:18.359950Z node 1 :BS_NODE DEBUG: {NWDC51@distconf_persistent_storage.cpp:103} TWriterActor bootstrap Drives# [] Record# {} 2025-09-25T16:18:18.359961Z node 1 :LOCAL DEBUG: local.cpp:1005: TLocalNodeRegistrar::Bootstrap 2025-09-25T16:18:18.359967Z node 1 :LOCAL DEBUG: local.cpp:183: TLocalNodeRegistrar::TryToRegister 2025-09-25T16:18:18.359985Z node 1 :LOCAL DEBUG: local.cpp:216: TLocalNodeRegistrar::TryToRegister pipe to hive, pipe:[1:53:2093] 2025-09-25T16:18:18.360006Z node 1 :STATESTORAGE DEBUG: statestorage_proxy.cpp:287: ProxyRequest::HandleInit ringGroup:0 ev: {EvLookup TabletID: 72057594037932033 Cookie: 0 ProxyOptions: SigNone} 2025-09-25T16:18:18.360196Z node 1 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:148: TClient[72057594037932033] queue send [1:28:2075] 2025-09-25T16:18:18.360207Z node 1 :BS_NODE DEBUG: {NWDC53@distconf.cpp:332} StateWaitForInit event Type# 268639258 StorageConfigLoaded# true NodeListObtained# false PendingEvents.size# 0 2025-09-25T16:18:18.360216Z node 1 :BS_NODE DEBUG: {NWDC53@distconf.cpp:332} StateWaitForInit event Type# 2146435075 StorageConfigLoaded# true NodeListObtained# false PendingEvents.size# 1 2025-09-25T16:18:18.360269Z node 1 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:49: TClient[72057594037927937] ::Bootstrap [1:53:2093] 2025-09-25T16:18:18.360275Z node 1 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:542: TClient[72057594037927937] lookup [1:53:2093] 2025-09-25T16:18:18.360286Z node 1 :STATESTORAGE DEBUG: statestorage_replica.cpp:185: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037932033 Cookie: 0} 2025-09-25T16:18:18.360295Z node 1 :STATESTORAGE DEBUG: statestorage_replica.cpp:185: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037932033 Cookie: 1} 2025-09-25T16:18:18.360302Z node 1 :STATESTORAGE DEBUG: statestorage_replica.cpp:185: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037932033 Cookie: 2} 2025-09-25T16:18:18.360313Z node 1 :STATESTORAGE DEBUG: statestorage_proxy.cpp:399: ProxyRequest::HandleLookup ringGroup:0 ev: {EvReplicaInfo Status: 1 TabletID: 72057594037932033 ClusterStateGeneration: 0 ClusterStateGuid: 0} 2025-09-25T16:18:18.360346Z node 1 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:49: TClient[72057594037936129] ::Bootstrap [1:32:2063] 2025-09-25T16:18:18.360350Z node 1 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:542: TClient[72057594037936129] lookup [1:32:2063] 2025-09-25T16:18:18.360391Z node 1 :TABLET_RESOLVER DEBUG: tablet_resolver.cpp:882: Handle TEvForward tabletId: 72057594037927937 entry.State: StResolve leader: [0:0:0] followers: 0 ev: {EvForward TabletID: 72057594037927937 Ev: nullptr Flags: 1:2:0} 2025-09-25T16:18:18.361913Z node 1 :BS_NODE DEBUG: {NWDC53@distconf.cpp:332} StateWaitForInit event Type# 131082 StorageConfigLoaded# true NodeListObtained# false PendingEvents.size# 2 2025-09-25T16:18:18.361927Z node 1 :BS_NODE DEBUG: {NWDC11@distconf_binding.cpp:8} TEvNodesInfo 2025-09-25T16:18:18.361954Z node 1 :BS_NODE DEBUG: {NWDC18@distconf_binding.cpp:462} UpdateBound RefererNodeId# 1 NodeId# ::1:12001/1 Meta# {Fingerprint: "\363\365\\\016\336\205\240m2\241c\3010\003\261\342\227\n\267}" } 2025-09-25T16:18:18.362001Z node 1 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:148: TClient[72057594037927937] queue send [1:53:2093] 2025-09-25T16:18:18.362011Z node 1 :STATESTORAGE DEBUG: statestorage_proxy.cpp:399: ProxyRequest::HandleLookup ringGroup:0 ev: {EvReplicaInfo Status: 1 TabletID: 72057594037932033 ClusterStateGeneration: 0 ClusterStateGuid: 0} 2025-09-25T16:18:18.362022Z node 1 :BS_NODE DEBUG: {NWDC53@distconf.cpp:332} StateWaitForInit event Type# 2146435072 StorageConfigLoaded# true NodeListObtained# true PendingEvents.size# 2 2025-09-25T16:18:18.362033Z node 1 :BS_NODE DEBUG: {NWDC15@distconf.cpp:401} StateFunc Type# 268639258 Sender# [1:12:2059] SessionId# [0:0:0] Cookie# 0 2025-09-25T16:18:18.362253Z node 1 :STATESTORAGE DEBUG: statestorage_proxy.cpp:399: ProxyRequest::HandleLookup ringGroup:0 ev: {EvReplicaInfo Status: 1 TabletID: 72057594037932033 ClusterStateGeneration: 0 ClusterStateGuid: 0} 2025-09-25T16:18:18.362292Z node 1 :TABLET_RESOLVER DEBUG: tablet_resolver.cpp:882: Handle TEvForward tabletId: 72057594037936129 entry.State: StResolve leader: [0:0:0] followers: 0 ev: {EvForward TabletID: 72057594037936129 Ev: nullptr Flags: 1:2:0} 2025-09-25T16:18:18.362467Z node 1 :BS_NODE DEBUG: {NWDC53@distconf.cpp:332} StateWaitForInit event Type# 2146435072 StorageConfigLoaded# true NodeListObtained# true PendingEvents.size# 1 2025-09-25T16:18:18.362494Z node 1 :BS_NODE DEBUG: {NWDC15@distconf.cpp:401} StateFunc Type# 2146435075 Sender# [1:51:2092] SessionId# [0:0:0] Cookie# 0 2025-09-25T16:18:18.362509Z node 1 :BS_NODE DEBUG: {NWDC36@distconf_persistent_storage.cpp:205} TEvStorageConfigStored NumOk# 0 NumError# 0 Passed# 0.003961s 2025-09-25T16:18:18.362657Z node 1 :BS_NODE DEBUG: {NW18@node_warden_resource.cpp:51} ApplyServiceSet IsStatic# true Comprehensive# true Origin# distconf ServiceSet# {PDisks { NodeID: 1 PDiskID: 1 Path: "SectorMap:0:3200" PDiskGuid: 1 } VDisks { VDiskID { GroupID: 0 GroupGeneration: 1 Ring: 0 Domain: 0 VDisk: 0 } VDiskLocation { NodeID: 1 PDiskID: 1 VDiskSlotID: 0 PDiskGuid: 1 } } Groups { GroupID: 0 GroupGeneration: 1 ErasureSpecies: 0 Rings { FailDomains { VDiskLocations { NodeID: 1 PDiskID: 1 VDiskSlotID: 0 PDiskGuid: 1 } } } } AvailabilityDomains: 0 } 2025-09-25T16:18:18.362731Z node 1 :BS_NODE DEBUG: {NW110@node_warden_pdisk.cpp:538} ApplyServiceSetPDisks PDiskId# 1 NewExpectedSlotCount# 0 OldExpectedSlotCount# 0 NewSlotSizeIn ... d# false IgnoreBlock# false RestartCounter# 0 Marker# DSPC03 2025-09-25T16:18:45.729384Z node 45 :BS_PROXY_COLLECT INFO: dsproxy_collect.cpp:182: [90531aad7e2f73ee] Keep# [72075186224037892:1:2:1:8192:289:0] Marker# DSPC04 2025-09-25T16:18:45.729393Z node 45 :BS_PROXY DEBUG: group_sessions.h:181: Send to queueActorId# [45:1689:2406] NKikimr::TEvBlobStorage::TEvVCollectGarbage# {TEvVCollectGarbage for [tablet:gen:cnt:channel]=[72075186224037892:2:1:1] collect=[2:0] Keep: [72075186224037892:1:2:1:8192:289:0] cookie# 0 2025-09-25T16:18:45.730193Z node 45 :BS_PROXY_COLLECT DEBUG: dsproxy_collect.cpp:45: [2a3a7dd47792978e] received TEvVCollectGarbageResult# {EvVCollectGarbageResult Status# OK TabletId# 72075186224037892 RecordGeneration# 2 Channel# 1 VDisk# [80000007:1:0:0:0]} Marker# DSPC01 2025-09-25T16:18:45.730217Z node 45 :BS_PROXY_COLLECT INFO: dsproxy_collect.cpp:113: [2a3a7dd47792978e] Result# TEvCollectGarbageResult {TabletId# 72075186224037892 RecordGeneration# 2 PerGenerationCounter# 1 Channel# 1 Status# OK} Marker# DSPC02 2025-09-25T16:18:45.730326Z node 45 :BS_PROXY_BRIDGE DEBUG: {BPB02@bridge_proxy.cpp:798} intermediate response RequestId# 4b74c5d8db2444cc GroupId# 2147483655 Status# OK PileState# SYNCHRONIZED Response# TEvCollectGarbageResult {TabletId# 72075186224037892 RecordGeneration# 2 PerGenerationCounter# 1 Channel# 1 Status# OK} 2025-09-25T16:18:45.730658Z node 45 :BS_PROXY_COLLECT DEBUG: dsproxy_collect.cpp:45: [d870d4b8f750f3c9] received TEvVCollectGarbageResult# {EvVCollectGarbageResult Status# OK TabletId# 72075186224037892 RecordGeneration# 2 Channel# 0 VDisk# [80000001:1:0:0:0]} Marker# DSPC01 2025-09-25T16:18:45.730672Z node 45 :BS_PROXY_COLLECT INFO: dsproxy_collect.cpp:113: [d870d4b8f750f3c9] Result# TEvCollectGarbageResult {TabletId# 72075186224037892 RecordGeneration# 2 PerGenerationCounter# 1 Channel# 0 Status# OK} Marker# DSPC02 2025-09-25T16:18:45.730682Z node 45 :BS_PROXY_COLLECT DEBUG: dsproxy_collect.cpp:45: [e0012a0104eae54b] received TEvVCollectGarbageResult# {EvVCollectGarbageResult Status# OK TabletId# 72075186224037892 RecordGeneration# 2 Channel# 0 VDisk# [80000002:1:0:0:0]} Marker# DSPC01 2025-09-25T16:18:45.730685Z node 45 :BS_PROXY_COLLECT INFO: dsproxy_collect.cpp:113: [e0012a0104eae54b] Result# TEvCollectGarbageResult {TabletId# 72075186224037892 RecordGeneration# 2 PerGenerationCounter# 1 Channel# 0 Status# OK} Marker# DSPC02 2025-09-25T16:18:45.730717Z node 45 :BS_PROXY_BRIDGE DEBUG: {BPB02@bridge_proxy.cpp:798} intermediate response RequestId# f5284ce55ab977d6 GroupId# 2147483649 Status# OK PileState# SYNCHRONIZED Response# TEvCollectGarbageResult {TabletId# 72075186224037892 RecordGeneration# 2 PerGenerationCounter# 1 Channel# 0 Status# OK} 2025-09-25T16:18:45.730745Z node 45 :BS_PROXY_BRIDGE DEBUG: {BPB02@bridge_proxy.cpp:798} intermediate response RequestId# f5284ce55ab977d6 GroupId# 2147483650 Status# OK PileState# SYNCHRONIZED Response# TEvCollectGarbageResult {TabletId# 72075186224037892 RecordGeneration# 2 PerGenerationCounter# 1 Channel# 0 Status# OK} 2025-09-25T16:18:45.730758Z node 45 :BS_PROXY_BRIDGE INFO: {BPB01@bridge_proxy.cpp:869} request finished RequestId# f5284ce55ab977d6 Status# OK Response# TEvCollectGarbageResult {TabletId# 72075186224037892 RecordGeneration# 2 PerGenerationCounter# 1 Channel# 0 Status# OK} Passed# 0.001605s SubrequestTimings# [NKikimr::TEvBlobStorage::TEvCollectGarbageResult:0.001549s NKikimr::TEvBlobStorage::TEvCollectGarbageResult:0.001563s] 2025-09-25T16:18:45.730780Z node 45 :BS_PROXY_COLLECT DEBUG: dsproxy_collect.cpp:45: [90531aad7e2f73ee] received TEvVCollectGarbageResult# {EvVCollectGarbageResult Status# OK TabletId# 72075186224037892 RecordGeneration# 2 Channel# 1 VDisk# [80000008:1:0:0:0]} Marker# DSPC01 2025-09-25T16:18:45.730786Z node 45 :BS_PROXY_COLLECT INFO: dsproxy_collect.cpp:113: [90531aad7e2f73ee] Result# TEvCollectGarbageResult {TabletId# 72075186224037892 RecordGeneration# 2 PerGenerationCounter# 1 Channel# 1 Status# OK} Marker# DSPC02 2025-09-25T16:18:45.730815Z node 45 :BS_PROXY_BRIDGE DEBUG: {BPB02@bridge_proxy.cpp:798} intermediate response RequestId# 4b74c5d8db2444cc GroupId# 2147483656 Status# OK PileState# SYNCHRONIZED Response# TEvCollectGarbageResult {TabletId# 72075186224037892 RecordGeneration# 2 PerGenerationCounter# 1 Channel# 1 Status# OK} 2025-09-25T16:18:45.730821Z node 45 :BS_PROXY_BRIDGE INFO: {BPB01@bridge_proxy.cpp:869} request finished RequestId# 4b74c5d8db2444cc Status# OK Response# TEvCollectGarbageResult {TabletId# 72075186224037892 RecordGeneration# 2 PerGenerationCounter# 1 Channel# 1 Status# OK} Passed# 0.001630s SubrequestTimings# [NKikimr::TEvBlobStorage::TEvCollectGarbageResult:0.001121s NKikimr::TEvBlobStorage::TEvCollectGarbageResult:0.001593s] 2025-09-25T16:18:45.854270Z node 45 :BS_PROXY_PUT INFO: dsproxy_put.cpp:650: [52ac45011689f04a] bootstrap ActorId# [45:1769:2467] Group# 0 BlobCount# 1 BlobIDs# [[72057594037927937:2:19:0:0:153:0]] HandleClass# TabletLog Tactic# MinLatency RestartCounter# 0 Marker# BPP13 2025-09-25T16:18:45.854322Z node 45 :BS_PROXY_PUT DEBUG: dsproxy_strategy_restore.h:42: [52ac45011689f04a] Id# [72057594037927937:2:19:0:0:153:0] restore disk# 0 part# 0 situation# ESituation::Unknown Marker# BPG51 2025-09-25T16:18:45.854329Z node 45 :BS_PROXY_PUT DEBUG: dsproxy_strategy_restore.h:65: [52ac45011689f04a] restore Id# [72057594037927937:2:19:0:0:153:0] optimisticReplicas# 1 optimisticState# EBS_FULL Marker# BPG55 2025-09-25T16:18:45.854337Z node 45 :BS_PROXY_PUT DEBUG: dsproxy_strategy_base.cpp:324: [52ac45011689f04a] partPlacement record partSituation# ESituation::Unknown to# 0 blob Id# [72057594037927937:2:19:0:0:153:1] Marker# BPG33 2025-09-25T16:18:45.854342Z node 45 :BS_PROXY_PUT DEBUG: dsproxy_strategy_base.cpp:345: [52ac45011689f04a] Sending missing VPut part# 0 to# 0 blob Id# [72057594037927937:2:19:0:0:153:1] Marker# BPG32 2025-09-25T16:18:45.854375Z node 45 :BS_PROXY DEBUG: group_sessions.h:181: Send to queueActorId# [45:234:2086] NKikimr::TEvBlobStorage::TEvVPut# {ID# [72057594037927937:2:19:0:0:153:1] FDS# 153 HandleClass# TabletLog {MsgQoS ExtQueueId# PutTabletLog} DataSize# 0 Data# } cookie# 0 2025-09-25T16:18:45.854926Z node 45 :BS_PROXY_PUT DEBUG: dsproxy_put.cpp:264: [52ac45011689f04a] received {EvVPutResult Status# OK ID# [72057594037927937:2:19:0:0:153:1] {MsgQoS MsgId# { SequenceId: 1 MsgId: 34 } Cost# 81204 ExtQueueId# PutTabletLog IntQueueId# IntPutLog Window# { Status# Processed ActualWindowSize# 0 MaxWindowSize# 150000000 ExpectedMsgId# { SequenceId: 1 MsgId: 35 }}}} from# [0:1:0:0:0] Marker# BPP01 2025-09-25T16:18:45.854953Z node 45 :BS_PROXY_PUT DEBUG: dsproxy_put_impl.cpp:72: [52ac45011689f04a] Result# TEvPutResult {Id# [72057594037927937:2:19:0:0:153:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0.998955} GroupId# 0 Marker# BPP12 2025-09-25T16:18:45.854959Z node 45 :BS_PROXY_PUT INFO: dsproxy_put.cpp:490: [52ac45011689f04a] SendReply putResult# TEvPutResult {Id# [72057594037927937:2:19:0:0:153:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0.998955} ResponsesSent# 0 PutImpl.Blobs.size# 1 Last# true Marker# BPP21 2025-09-25T16:18:45.854980Z node 45 :BS_PROXY_PUT DEBUG: {BPP72@dsproxy_put.cpp:474} Query history GroupId# 0 HandleClass# TabletLog Tactic# MinLatency History# THistory { Entries# [ TEvVPut{ TimestampMs# 0.15 sample PartId# [72057594037927937:2:19:0:0:153:1] QueryCount# 1 VDiskId# [0:1:0:0:0] NodeId# 45 } TEvVPutResult{ TimestampMs# 0.71 VDiskId# [0:1:0:0:0] NodeId# 45 Status# OK } ] } 2025-09-25T16:18:45.855025Z node 45 :TABLET_MAIN DEBUG: tablet_req_writelog.cpp:54: Put Result: TEvPutResult {Id# [72057594037927937:2:19:0:0:153:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0.998955} 2025-09-25T16:18:45.855063Z node 45 :TABLET_EXECUTOR DEBUG: Leader{72057594037927937:2:20} commited cookie 1 for step 19 2025-09-25T16:18:45.855083Z node 45 :HIVE DEBUG: tx__update_tablet_status.cpp:213: HIVE#72057594037927937 THive::TTxUpdateTabletStatus::Complete TabletId: 72075186224037892 SideEffects: {Notifications: 0x7FF00009 [45:1632:2247] NKikimr::NHive::TEvPrivate::TEvRestartComplete} 2025-09-25T16:18:45.855098Z node 45 :HIVE DEBUG: tx__process_boot_queue.cpp:26: HIVE#72057594037927937 THive::TTxProcessBootQueue()::Complete 2025-09-25T16:18:45.855137Z node 45 :HIVE DEBUG: balancer.cpp:345: HIVE#72057594037927937 Balancer [45:1632:2247] received for tablet (72075186224037892,0) 2025-09-25T16:18:45.855146Z node 45 :HIVE INFO: balancer.cpp:137: HIVE#72057594037927937 Balancer finished with 1 movements made 2025-09-25T16:18:45.855153Z node 45 :HIVE DEBUG: balancer.cpp:156: HIVE#72057594037927937 Balancer initiated recheck 2025-09-25T16:18:45.886135Z node 45 :HIVE DEBUG: hive_impl.cpp:2497: HIVE#72057594037927937 ProcessTabletBalancer [(72057594046678944:1,1)] MaxUsage=0.400000000 on #47 MinUsage=0.200000000 on #45 Scatter=0.500000000 2025-09-25T16:18:45.886163Z node 45 :HIVE DEBUG: hive_impl.cpp:2497: HIVE#72057594037927937 ProcessTabletBalancer [(72057594046678944:1,2)] MaxUsage=0.000000000 on #48 MinUsage=0.000000000 on #46 Scatter=0.000000000 2025-09-25T16:18:45.886194Z node 45 :HIVE DEBUG: hive_impl.cpp:403: HIVE#72057594037927937 Handle BalancerOut 2025-09-25T16:18:45.886279Z node 45 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:49: TClient[72057594037927937] ::Bootstrap [45:1770:2468] 2025-09-25T16:18:45.886286Z node 45 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:542: TClient[72057594037927937] lookup [45:1770:2468] 2025-09-25T16:18:45.886303Z node 45 :TABLET_RESOLVER DEBUG: tablet_resolver.cpp:882: Handle TEvForward tabletId: 72057594037927937 entry.State: StNormal leader: [45:896:2243] followers: 0 ev: {EvForward TabletID: 72057594037927937 Ev: nullptr Flags: 1:2:0} 2025-09-25T16:18:45.886313Z node 45 :TABLET_RESOLVER DEBUG: tablet_resolver.cpp:667: SelectForward node 45 selfDC 1 leaderDC 1 1:2:0 local 1 localDc 1 other 0 disallowed 0 tabletId: 72057594037927937 followers: 0 countLeader 1 allowFollowers 0 winner: [45:896:2243] 2025-09-25T16:18:45.886341Z node 45 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:148: TClient[72057594037927937] queue send [45:1770:2468] 2025-09-25T16:18:45.886357Z node 45 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:420: TClient[72057594037927937] received pending shutdown [45:1770:2468] 2025-09-25T16:18:45.886367Z node 45 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:188: TClient[72057594037927937] forward result local node, try to connect [45:1770:2468] 2025-09-25T16:18:45.886374Z node 45 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:687: TClient[72057594037927937]::SendEvent [45:1770:2468] 2025-09-25T16:18:45.886398Z node 45 :PIPE_SERVER DEBUG: tablet_pipe_server.cpp:291: [72057594037927937] Accept Connect Originator# [45:1770:2468] 2025-09-25T16:18:45.886441Z node 45 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:308: TClient[72057594037927937] connected with status OK role: Leader [45:1770:2468] 2025-09-25T16:18:45.886445Z node 45 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:323: TClient[72057594037927937] send queued [45:1770:2468] 2025-09-25T16:18:45.886448Z node 45 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:645: TClient[72057594037927937] push event to server [45:1770:2468] 2025-09-25T16:18:45.886452Z node 45 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:331: TClient[72057594037927937] shutdown pipe due to pending shutdown request [45:1770:2468] 2025-09-25T16:18:45.886455Z node 45 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:514: TClient[72057594037927937] notify reset [45:1770:2468] 2025-09-25T16:18:45.886463Z node 45 :PIPE_SERVER DEBUG: tablet_pipe_server.cpp:141: [72057594037927937] HandleSend Sender# [45:890:2239] EventType# 268697616 2025-09-25T16:18:45.886482Z node 45 :HIVE TRACE: hive_impl.cpp:139: HIVE#72057594037927937 Handle TEvTabletPipe::TEvServerConnected([45:1770:2468]) [45:1771:2469] 2025-09-25T16:18:45.886503Z node 45 :HIVE TRACE: hive_impl.cpp:1990: HIVE#72057594037927937 Handle TEvRequestHiveInfo >> TBackupTests::ShouldSucceedOnLargeData[Zstd] >> KqpOlapTypes::Timestamp [GOOD] >> KqpScheme::AddColumnFamilyWithCompressionLevel >> YdbLogStore::LogTable [GOOD] >> YdbLogStore::AlterLogTable >> TBackupTests::ShouldSucceedOnLargeData_MinWriteBatch ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/load_test/ut_ycsb/unittest >> UpsertLoad::ShouldDropCreateTable [GOOD] Test command err: 2025-09-25T16:18:45.748943Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:123: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2025-09-25T16:18:45.794200Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2025-09-25T16:18:45.796043Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:311:2354], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-09-25T16:18:45.796120Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-09-25T16:18:45.796148Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/endf/0026b1/r3tmp/tmpPf33x2/pdisk_1.dat 2025-09-25T16:18:45.855220Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-09-25T16:18:45.855271Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-09-25T16:18:45.868866Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded 2025-09-25T16:18:45.869956Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1229: Notification cookie mismatch for subscription [1:34:2081] 1758817125251581 != 1758817125251585 2025-09-25T16:18:45.901080Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-09-25T16:18:45.951899Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2025-09-25T16:18:45.984942Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-09-25T16:18:46.068594Z node 1 :DS_LOAD_TEST NOTICE: test_load_actor.cpp:194: TLoad# 0 creates table# BrandNewTable in dir# /Root 2025-09-25T16:18:46.098123Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:652:2546], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:46.098177Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:46.098260Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:669:2551], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:46.098270Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:46.102133Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:18:46.293095Z node 1 :DS_LOAD_TEST INFO: test_load_actor.cpp:346: TLoad# 0 warmups table# BrandNewTable in dir# /Root with rows# 10 2025-09-25T16:18:46.293395Z node 1 :DS_LOAD_TEST NOTICE: bulk_mkql_upsert.cpp:157: Id# {Tag: 0, parent: [1:648:2543], subTag: 1} TUpsertActor Bootstrap called: RowCount: 10 Inflight: 100 BatchSize: 100 with type# 0, target# TabletId: 72075186224037888 TableId: 2 WorkingDir: "/Root" TableName: "BrandNewTable" 2025-09-25T16:18:46.314199Z node 1 :DS_LOAD_TEST NOTICE: bulk_mkql_upsert.cpp:255: Id# {Tag: 0, parent: [1:648:2543], subTag: 1} TUpsertActor finished in 0.020741s, errors=0 2025-09-25T16:18:46.314298Z node 1 :DS_LOAD_TEST DEBUG: test_load_actor.cpp:425: TLoad# 0 created load actor of type# kUpsertBulkStart with tag# 2, proto# NotifyWhenFinished: true TableSetup { WorkingDir: "/Root" TableName: "BrandNewTable" CreateTable: true MinParts: 11 MaxParts: 13 MaxPartSizeMb: 1234 } TargetShard { TabletId: 72075186224037888 TableId: 2 WorkingDir: "/Root" TableName: "BrandNewTable" } UpsertBulkStart { RowCount: 10 Inflight: 3 } 2025-09-25T16:18:46.314318Z node 1 :DS_LOAD_TEST NOTICE: bulk_mkql_upsert.cpp:157: Id# {Tag: 0, parent: [1:648:2543], subTag: 3} TUpsertActor Bootstrap called: RowCount: 10 Inflight: 3 with type# 0, target# TabletId: 72075186224037888 TableId: 2 WorkingDir: "/Root" TableName: "BrandNewTable" 2025-09-25T16:18:46.366994Z node 1 :DS_LOAD_TEST NOTICE: bulk_mkql_upsert.cpp:255: Id# {Tag: 0, parent: [1:648:2543], subTag: 3} TUpsertActor finished in 0.052602s, errors=0 2025-09-25T16:18:46.367036Z node 1 :DS_LOAD_TEST INFO: test_load_actor.cpp:447: TLoad# 0 received finished from actor# [1:759:2622] with tag# 3 2025-09-25T16:18:46.991666Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:123: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2025-09-25T16:18:47.007265Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2025-09-25T16:18:47.008584Z node 2 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [2:108:2155], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-09-25T16:18:47.008653Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-09-25T16:18:47.008690Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/endf/0026b1/r3tmp/tmpgQsG59/pdisk_1.dat 2025-09-25T16:18:47.077006Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-09-25T16:18:47.077052Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-09-25T16:18:47.082732Z node 2 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded 2025-09-25T16:18:47.083252Z node 2 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1229: Notification cookie mismatch for subscription [2:34:2081] 1758817126600622 != 1758817126600626 2025-09-25T16:18:47.114563Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-09-25T16:18:47.157846Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2025-09-25T16:18:47.201307Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-09-25T16:18:47.274813Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:18:47.457092Z node 2 :DS_LOAD_TEST DEBUG: test_load_actor.cpp:425: TLoad# 0 created load actor of type# kUpsertBulkStart with tag# 1, proto# NotifyWhenFinished: true TargetShard { TabletId: 72075186224037888 TableId: 2 } UpsertBulkStart { RowCount: 100 Inflight: 3 } 2025-09-25T16:18:47.457130Z node 2 :DS_LOAD_TEST NOTICE: bulk_mkql_upsert.cpp:157: Id# {Tag: 0, parent: [2:740:2610], subTag: 2} TUpsertActor Bootstrap called: RowCount: 100 Inflight: 3 with type# 0, target# TabletId: 72075186224037888 TableId: 2 2025-09-25T16:18:47.839038Z node 2 :DS_LOAD_TEST NOTICE: bulk_mkql_upsert.cpp:255: Id# {Tag: 0, parent: [2:740:2610], subTag: 2} TUpsertActor finished in 0.381827s, errors=0 2025-09-25T16:18:47.839070Z node 2 :DS_LOAD_TEST INFO: test_load_actor.cpp:447: TLoad# 0 received finished from actor# [2:741:2611] with tag# 2 2025-09-25T16:18:47.840168Z node 2 :DS_LOAD_TEST NOTICE: test_load_actor.cpp:174: TLoad# 0 drops table# table in dir# /Root 2025-09-25T16:18:47.843011Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:782:2652], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:47.843041Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:47.843099Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:793:2656], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:47.843112Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:47.867711Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-09-25T16:18:47.973383Z node 2 :DS_LOAD_TEST NOTICE: test_load_actor.cpp:194: TLoad# 0 creates table# table in dir# /Root 2025-09-25T16:18:47.977544Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:847:2697], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:47.977607Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:47.977711Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:851:2700], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:47.977722Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:47.980074Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:18:48.015030Z node 2 :HIVE WARN: hive_impl.cpp:516: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 2, TabletId: 72075186224037888 not found 2025-09-25T16:18:48.157431Z node 2 :DS_LOAD_TEST INFO: test_load_actor.cpp:346: TLoad# 0 warmups table# table in dir# /Root with rows# 10 2025-09-25T16:18:48.157522Z node 2 :DS_LOAD_TEST NOTICE: bulk_mkql_upsert.cpp:157: Id# {Tag: 0, parent: [2:779:2649], subTag: 1} TUpsertActor Bootstrap called: RowCount: 10 Inflight: 100 BatchSize: 100 with type# 0, target# TabletId: 72075186224037889 TableId: 3 WorkingDir: "/Root" TableName: "table" 2025-09-25T16:18:48.168188Z node 2 :DS_LOAD_TEST NOTICE: bulk_mkql_upsert.cpp:255: Id# {Tag: 0, parent: [2:779:2649], subTag: 1} TUpsertActor finished in 0.010589s, errors=0 2025-09-25T16:18:48.168307Z node 2 :DS_LOAD_TEST DEBUG: test_load_actor.cpp:425: TLoad# 0 created load actor of type# kUpsertBulkStart with tag# 2, proto# NotifyWhenFinished: true TableSetup { WorkingDir: "/Root" TableName: "table" DropTable: true } TargetShard { TabletId: 72075186224037889 TableId: 3 WorkingDir: "/Root" TableName: "table" } UpsertBulkStart { RowCount: 10 Inflight: 3 } 2025-09-25T16:18:48.168342Z node 2 :DS_LOAD_TEST NOTICE: bulk_mkql_upsert.cpp:157: Id# {Tag: 0, parent: [2:779:2649], subTag: 3} TUpsertActor Bootstrap called: RowCount: 10 Inflight: 3 with type# 0, target# TabletId: 72075186224037889 TableId: 3 WorkingDir: "/Root" TableName: "table" 2025-09-25T16:18:48.221037Z node 2 :DS_LOAD_TEST NOTICE: bulk_mkql_upsert.cpp:255: Id# {Tag: 0, parent: [2:779:2649], subTag: 3} TUpsertActor finished in 0.052626s, errors=0 2025-09-25T16:18:48.221070Z node 2 :DS_LOAD_TEST INFO: test_load_actor.cpp:447: TLoad# 0 received finished from actor# [2:940:2771] with tag# 3 ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tablet/ut/unittest >> BootstrapperTest::MultipleBootstrappers [GOOD] Test command err: ... waiting for pipe to connect ... stopping current instance ... waiting for pipe to disconnect ... waiting for pipe to connect ... sleeping for 2 seconds 2025-09-25T16:18:44.471850Z node 5 :BOOTSTRAPPER DEBUG: bootstrapper.cpp:194: tablet: 9437184, type: Dummy, begin new cycle (lookup in state storage) 2025-09-25T16:18:44.471881Z node 3 :BOOTSTRAPPER DEBUG: bootstrapper.cpp:194: tablet: 9437184, type: Dummy, begin new cycle (lookup in state storage) 2025-09-25T16:18:44.471899Z node 4 :BOOTSTRAPPER DEBUG: bootstrapper.cpp:194: tablet: 9437184, type: Dummy, begin new cycle (lookup in state storage) 2025-09-25T16:18:44.472084Z node 5 :BOOTSTRAPPER DEBUG: bootstrapper.cpp:233: tablet: 9437184, type: Dummy, lookup: NODATA, leader: [0:0:0] 2025-09-25T16:18:44.472093Z node 5 :BOOTSTRAPPER INFO: bootstrapper.cpp:348: tablet:9437184, type: Dummy, begin new round, seed: 2303809724928703835 2025-09-25T16:18:44.472138Z node 3 :BOOTSTRAPPER DEBUG: bootstrapper.cpp:233: tablet: 9437184, type: Dummy, lookup: NODATA, leader: [0:0:0] 2025-09-25T16:18:44.472143Z node 3 :BOOTSTRAPPER INFO: bootstrapper.cpp:348: tablet:9437184, type: Dummy, begin new round, seed: 13151740404452589043 2025-09-25T16:18:44.472156Z node 4 :BOOTSTRAPPER DEBUG: bootstrapper.cpp:233: tablet: 9437184, type: Dummy, lookup: NODATA, leader: [0:0:0] 2025-09-25T16:18:44.472161Z node 4 :BOOTSTRAPPER INFO: bootstrapper.cpp:348: tablet:9437184, type: Dummy, begin new round, seed: 8427358873417017059 2025-09-25T16:18:44.472450Z node 5 :BOOTSTRAPPER DEBUG: bootstrapper.cpp:446: tablet: 9437184, type: Dummy, apply alien 3 state: FREE 2025-09-25T16:18:44.472477Z node 3 :BOOTSTRAPPER DEBUG: bootstrapper.cpp:446: tablet: 9437184, type: Dummy, apply alien 5 state: FREE 2025-09-25T16:18:44.472484Z node 4 :BOOTSTRAPPER DEBUG: bootstrapper.cpp:446: tablet: 9437184, type: Dummy, apply alien 3 state: FREE 2025-09-25T16:18:44.472490Z node 5 :BOOTSTRAPPER DEBUG: bootstrapper.cpp:446: tablet: 9437184, type: Dummy, apply alien 4 state: FREE 2025-09-25T16:18:44.472496Z node 5 :BOOTSTRAPPER NOTICE: bootstrapper.cpp:698: tablet: 9437184, type: Dummy, boot 2025-09-25T16:18:44.472539Z node 4 :BOOTSTRAPPER DEBUG: bootstrapper.cpp:446: tablet: 9437184, type: Dummy, apply alien 5 state: FREE 2025-09-25T16:18:44.472545Z node 4 :BOOTSTRAPPER DEBUG: bootstrapper.cpp:517: tablet: 9437184, type: Dummy, lost round, wait for 0.153108s 2025-09-25T16:18:44.472574Z node 3 :BOOTSTRAPPER DEBUG: bootstrapper.cpp:446: tablet: 9437184, type: Dummy, apply alien 4 state: FREE 2025-09-25T16:18:44.472579Z node 3 :BOOTSTRAPPER DEBUG: bootstrapper.cpp:517: tablet: 9437184, type: Dummy, lost round, wait for 0.170556s 2025-09-25T16:18:44.689699Z node 4 :BOOTSTRAPPER DEBUG: bootstrapper.cpp:194: tablet: 9437184, type: Dummy, begin new cycle (lookup in state storage) 2025-09-25T16:18:44.689859Z node 4 :BOOTSTRAPPER DEBUG: bootstrapper.cpp:233: tablet: 9437184, type: Dummy, lookup: OK, leader: [5:290:2099] 2025-09-25T16:18:44.689943Z node 4 :BOOTSTRAPPER DEBUG: bootstrapper.cpp:284: tablet: 9437184, type: Dummy, connect: OK 2025-09-25T16:18:44.689949Z node 4 :BOOTSTRAPPER INFO: bootstrapper.cpp:295: tablet: 9437184, type: Dummy, connected to leader, waiting 2025-09-25T16:18:44.720757Z node 3 :BOOTSTRAPPER DEBUG: bootstrapper.cpp:194: tablet: 9437184, type: Dummy, begin new cycle (lookup in state storage) 2025-09-25T16:18:44.720973Z node 3 :BOOTSTRAPPER DEBUG: bootstrapper.cpp:233: tablet: 9437184, type: Dummy, lookup: OK, leader: [5:290:2099] 2025-09-25T16:18:44.721097Z node 3 :BOOTSTRAPPER DEBUG: bootstrapper.cpp:284: tablet: 9437184, type: Dummy, connect: OK 2025-09-25T16:18:44.721103Z node 3 :BOOTSTRAPPER INFO: bootstrapper.cpp:295: tablet: 9437184, type: Dummy, connected to leader, waiting ... waiting for pipe to connect ... tablet initially started on node 5 (idx 3) in gen 2 ... disconnecting other nodes ... sleeping for 2 seconds (tablet expected to survive) 2025-09-25T16:18:45.597648Z node 5 :PIPE_SERVER ERROR: tablet_pipe_server.cpp:228: [9437184] NodeDisconnected NodeId# 4 2025-09-25T16:18:45.597671Z node 5 :PIPE_SERVER ERROR: tablet_pipe_server.cpp:228: [9437184] NodeDisconnected NodeId# 3 2025-09-25T16:18:45.597775Z node 3 :BOOTSTRAPPER DEBUG: bootstrapper.cpp:321: tablet: 9437184, type: Dummy, disconnected 2025-09-25T16:18:45.597784Z node 3 :BOOTSTRAPPER DEBUG: bootstrapper.cpp:194: tablet: 9437184, type: Dummy, begin new cycle (lookup in state storage) 2025-09-25T16:18:45.597864Z node 4 :BOOTSTRAPPER DEBUG: bootstrapper.cpp:321: tablet: 9437184, type: Dummy, disconnected 2025-09-25T16:18:45.597870Z node 4 :BOOTSTRAPPER DEBUG: bootstrapper.cpp:194: tablet: 9437184, type: Dummy, begin new cycle (lookup in state storage) 2025-09-25T16:18:45.598185Z node 3 :BOOTSTRAPPER DEBUG: bootstrapper.cpp:233: tablet: 9437184, type: Dummy, lookup: OK, leader: [5:290:2099] 2025-09-25T16:18:45.598197Z node 4 :BOOTSTRAPPER DEBUG: bootstrapper.cpp:233: tablet: 9437184, type: Dummy, lookup: OK, leader: [5:290:2099] 2025-09-25T16:18:45.598298Z node 3 :BOOTSTRAPPER DEBUG: bootstrapper.cpp:284: tablet: 9437184, type: Dummy, connect: OK 2025-09-25T16:18:45.598303Z node 3 :BOOTSTRAPPER INFO: bootstrapper.cpp:295: tablet: 9437184, type: Dummy, connected to leader, waiting 2025-09-25T16:18:45.598321Z node 4 :BOOTSTRAPPER DEBUG: bootstrapper.cpp:284: tablet: 9437184, type: Dummy, connect: OK 2025-09-25T16:18:45.598325Z node 4 :BOOTSTRAPPER INFO: bootstrapper.cpp:295: tablet: 9437184, type: Dummy, connected to leader, waiting ... disconnecting other nodes (new tablet connections fail) ... sleeping for 2 seconds (tablet expected to survive) 2025-09-25T16:18:46.365633Z node 5 :PIPE_SERVER ERROR: tablet_pipe_server.cpp:228: [9437184] NodeDisconnected NodeId# 3 2025-09-25T16:18:46.365662Z node 5 :PIPE_SERVER ERROR: tablet_pipe_server.cpp:228: [9437184] NodeDisconnected NodeId# 4 2025-09-25T16:18:46.365704Z node 3 :BOOTSTRAPPER DEBUG: bootstrapper.cpp:321: tablet: 9437184, type: Dummy, disconnected 2025-09-25T16:18:46.365712Z node 3 :BOOTSTRAPPER DEBUG: bootstrapper.cpp:194: tablet: 9437184, type: Dummy, begin new cycle (lookup in state storage) 2025-09-25T16:18:46.365721Z node 4 :BOOTSTRAPPER DEBUG: bootstrapper.cpp:321: tablet: 9437184, type: Dummy, disconnected 2025-09-25T16:18:46.365727Z node 4 :BOOTSTRAPPER DEBUG: bootstrapper.cpp:194: tablet: 9437184, type: Dummy, begin new cycle (lookup in state storage) 2025-09-25T16:18:46.365868Z node 3 :BOOTSTRAPPER DEBUG: bootstrapper.cpp:233: tablet: 9437184, type: Dummy, lookup: OK, leader: [5:290:2099] 2025-09-25T16:18:46.365900Z node 4 :BOOTSTRAPPER DEBUG: bootstrapper.cpp:233: tablet: 9437184, type: Dummy, lookup: OK, leader: [5:290:2099] ... disconnecting nodes 3 <-> 1 (tablet connect attempt) ... blocking NKikimr::TEvTabletPipe::TEvConnect from TABLET_PIPE_CLIENT to TABLET_ACTOR cookie 1 ... disconnecting nodes 3 <-> 2 (tablet connect attempt) ... blocking NKikimr::TEvTabletPipe::TEvConnect from TABLET_PIPE_CLIENT to TABLET_ACTOR cookie 1 2025-09-25T16:18:46.365998Z node 3 :BOOTSTRAPPER DEBUG: bootstrapper.cpp:284: tablet: 9437184, type: Dummy, connect: ERROR 2025-09-25T16:18:46.366002Z node 3 :BOOTSTRAPPER INFO: bootstrapper.cpp:348: tablet:9437184, type: Dummy, begin new round, seed: 10053858333920509680 2025-09-25T16:18:46.366029Z node 4 :BOOTSTRAPPER DEBUG: bootstrapper.cpp:284: tablet: 9437184, type: Dummy, connect: ERROR 2025-09-25T16:18:46.366032Z node 4 :BOOTSTRAPPER INFO: bootstrapper.cpp:348: tablet:9437184, type: Dummy, begin new round, seed: 11851482555838222794 2025-09-25T16:18:46.366095Z node 3 :BOOTSTRAPPER DEBUG: bootstrapper.cpp:446: tablet: 9437184, type: Dummy, apply alien 4 state: FREE 2025-09-25T16:18:46.366112Z node 4 :BOOTSTRAPPER DEBUG: bootstrapper.cpp:446: tablet: 9437184, type: Dummy, apply alien 3 state: FREE 2025-09-25T16:18:46.366124Z node 3 :BOOTSTRAPPER DEBUG: bootstrapper.cpp:446: tablet: 9437184, type: Dummy, apply alien 5 state: OWNER 2025-09-25T16:18:46.366128Z node 3 :BOOTSTRAPPER INFO: bootstrapper.cpp:589: tablet: 9437184, type: Dummy, become watch on node 5 (owner) 2025-09-25T16:18:46.366132Z node 4 :BOOTSTRAPPER DEBUG: bootstrapper.cpp:446: tablet: 9437184, type: Dummy, apply alien 5 state: OWNER 2025-09-25T16:18:46.366135Z node 4 :BOOTSTRAPPER INFO: bootstrapper.cpp:589: tablet: 9437184, type: Dummy, become watch on node 5 (owner) ... disconnect other nodes (new owner expected) ... sleeping for 2 seconds (new tablet expected to start once) 2025-09-25T16:18:47.112119Z node 3 :BOOTSTRAPPER DEBUG: bootstrapper.cpp:661: tablet: 9437184, type: Dummy, disconnected from 5, round 16045690984833335029 2025-09-25T16:18:47.112156Z node 3 :BOOTSTRAPPER DEBUG: bootstrapper.cpp:194: tablet: 9437184, type: Dummy, begin new cycle (lookup in state storage) 2025-09-25T16:18:47.112173Z node 4 :BOOTSTRAPPER DEBUG: bootstrapper.cpp:661: tablet: 9437184, type: Dummy, disconnected from 5, round 16045690984833335029 2025-09-25T16:18:47.112181Z node 4 :BOOTSTRAPPER DEBUG: bootstrapper.cpp:194: tablet: 9437184, type: Dummy, begin new cycle (lookup in state storage) 2025-09-25T16:18:47.112353Z node 3 :BOOTSTRAPPER DEBUG: bootstrapper.cpp:233: tablet: 9437184, type: Dummy, lookup: OK, leader: [5:290:2099] 2025-09-25T16:18:47.112397Z node 4 :BOOTSTRAPPER DEBUG: bootstrapper.cpp:233: tablet: 9437184, type: Dummy, lookup: OK, leader: [5:290:2099] ... disconnecting nodes 3 <-> 1 (tablet connect attempt) ... blocking NKikimr::TEvTabletPipe::TEvConnect from TABLET_PIPE_CLIENT to TABLET_ACTOR cookie 1 ... disconnecting nodes 3 <-> 2 (tablet connect attempt) ... blocking NKikimr::TEvTabletPipe::TEvConnect from TABLET_PIPE_CLIENT to TABLET_ACTOR cookie 1 2025-09-25T16:18:47.112507Z node 3 :BOOTSTRAPPER DEBUG: bootstrapper.cpp:284: tablet: 9437184, type: Dummy, connect: ERROR 2025-09-25T16:18:47.112512Z node 3 :BOOTSTRAPPER INFO: bootstrapper.cpp:348: tablet:9437184, type: Dummy, begin new round, seed: 8470239763125230813 2025-09-25T16:18:47.112523Z node 4 :BOOTSTRAPPER DEBUG: bootstrapper.cpp:284: tablet: 9437184, type: Dummy, connect: ERROR 2025-09-25T16:18:47.112526Z node 4 :BOOTSTRAPPER INFO: bootstrapper.cpp:348: tablet:9437184, type: Dummy, begin new round, seed: 6622044195218853944 ... disconnecting nodes 3 <-> 1 (bootstrap watch attempt) ... blocking NKikimr::TEvBootstrapper::TEvWatch from TABLET_BOOTSTRAPPER to TABLET_BOOTSTRAPPER cookie 16045690984833335031 ... disconnecting nodes 3 <-> 2 (bootstrap watch attempt) ... blocking NKikimr::TEvBootstrapper::TEvWatch from TABLET_BOOTSTRAPPER to TABLET_BOOTSTRAPPER cookie 16045690984833335031 2025-09-25T16:18:47.112598Z node 3 :BOOTSTRAPPER DEBUG: bootstrapper.cpp:421: tablet: 9437184, type: Dummy, disconnected from 5, round 16045690984833335031 2025-09-25T16:18:47.112602Z node 3 :BOOTSTRAPPER DEBUG: bootstrapper.cpp:446: tablet: 9437184, type: Dummy, apply alien 5 state: DISCONNECTED 2025-09-25T16:18:47.112615Z node 3 :BOOTSTRAPPER DEBUG: bootstrapper.cpp:446: tablet: 9437184, type: Dummy, apply alien 4 state: FREE 2025-09-25T16:18:47.112619Z node 3 :BOOTSTRAPPER DEBUG: bootstrapper.cpp:517: tablet: 9437184, type: Dummy, lost round, wait for 0.127990s 2025-09-25T16:18:47.112624Z node 4 :BOOTSTRAPPER DEBUG: bootstrapper.cpp:446: tablet: 9437184, type: Dummy, apply alien 3 state: FREE 2025-09-25T16:18:47.112629Z node 4 :BOOTSTRAPPER DEBUG: bootstrapper.cpp:421: tablet: 9437184, type: Dummy, disconnected from 5, round 16045690984833335031 2025-09-25T16:18:47.112632Z node 4 :BOOTSTRAPPER DEBUG: bootstrapper.cpp:446: tablet: 9437184, type: Dummy, apply alien 5 state: DISCONNECTED 2025-09-25T16:18:47.112635Z node 4 :BOOTSTRAPPER NOTICE: bootstrapper.cpp:698: tablet: 9437184, type: Dummy, boot 2025-09-25T16:18:47.113129Z node 5 :BOOTSTRAPPER INFO: bootstrapper.cpp:733: tablet: 9437184, type: Dummy, tablet dead 2025-09-25T16:18:47.113179Z node 5 :BOOTSTRAPPER DEBUG: bootstrapper.cpp:194: tablet: 9437184, type: Dummy, begin new cycle (lookup in state storage) 2025-09-25T16:18:47.117601Z node 5 :BOOTSTRAPPER DEBUG: bootstrapper.cpp:233: tablet: 9437184, type: Dummy, lookup: OK, leader: [4:443:2099] 2025-09-25T16:18:47.123075Z node 5 :BOOTSTRAPPER DEBUG: bootstrapper.cpp:284: tablet: 9437184, type: Dummy, connect: OK 2025-09-25T16:18:47.123095Z node 5 :BOOTSTRAPPER INFO: bootstrapper.cpp:295: tablet: 9437184, type: Dummy, connected to leader, waiting 2025-09-25T16:18:47.196371Z node 3 :BOOTSTRAPPER DEBUG: bootstrapper.cpp:194: tablet: 9437184, type: Dummy, begin new cycle (lookup in state storage) 2025-09-25T16:18:47.196592Z node 3 :BOOTSTRAPPER DEBUG: bootstrapper.cpp:233: tablet: 9437184, type: Dummy, lookup: OK, leader: [4:443:2099] 2025-09-25T16:18:47.196790Z node 3 :BOOTSTRAPPER DEBUG: bootstrapper.cpp:284: tablet: 9437184, type: Dummy, connect: OK 2025-09-25T16:18:47.196797Z node 3 :BOOTSTRAPPER INFO: bootstrapper.cpp:295: tablet: 9437184, type: Dummy, connected to leader, waiting ... waiting for pipe to connect >> KqpScheme::FamilyColumnTest [GOOD] >> KqpScheme::Int8Int16 >> KqpTx::CommitPrepared [GOOD] >> TBackupTests::ShouldSucceedOnMultiShardTable[Zstd] >> TBackupTests::BackupUuidColumn[Zstd] >> test.py::test[aggregate-group_by_expr_lookup--Results] [GOOD] >> test.py::test[aggregate-group_by_expr_with_where-default.txt-Results] >> TBackupTests::ShouldSucceedOnMultiShardTable[Raw] |81.7%| [TA] $(B)/ydb/core/tx/tx_allocator_client/ut/test-results/unittest/{meta.json ... results_accumulator.log} |81.7%| [TA] $(B)/ydb/core/tx/schemeshard/ut_replication/test-results/unittest/{meta.json ... results_accumulator.log} ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/mind/hive/ut/unittest >> THiveTest::TestExternalBootCounters [GOOD] Test command err: 2025-09-25T16:18:06.578249Z node 2 :BS_NODE DEBUG: {NW26@node_warden_impl.cpp:338} Bootstrap 2025-09-25T16:18:06.583133Z node 2 :BS_NODE DEBUG: {NW18@node_warden_resource.cpp:51} ApplyServiceSet IsStatic# true Comprehensive# false Origin# initial ServiceSet# {PDisks { NodeID: 1 PDiskID: 1 Path: "SectorMap:0:3200" PDiskGuid: 1 } PDisks { NodeID: 2 PDiskID: 1 Path: "SectorMap:1:3200" PDiskGuid: 2 } PDisks { NodeID: 3 PDiskID: 1 Path: "SectorMap:2:3200" PDiskGuid: 3 } VDisks { VDiskID { GroupID: 0 GroupGeneration: 1 Ring: 0 Domain: 0 VDisk: 0 } VDiskLocation { NodeID: 1 PDiskID: 1 VDiskSlotID: 0 PDiskGuid: 1 } } Groups { GroupID: 0 GroupGeneration: 1 ErasureSpecies: 0 Rings { FailDomains { VDiskLocations { NodeID: 1 PDiskID: 1 VDiskSlotID: 0 PDiskGuid: 1 } } } } AvailabilityDomains: 0 } 2025-09-25T16:18:06.583241Z node 2 :BS_NODE DEBUG: {NW04@node_warden_pdisk.cpp:233} StartLocalPDisk NodeId# 2 PDiskId# 1 Path# "SectorMap:1:3200" PDiskCategory# {Type# DEVICE_TYPE_ROT Kind# 0} Temporary# false 2025-09-25T16:18:06.583510Z node 2 :BS_NODE WARN: {NW89@node_warden_pdisk.cpp:122} Can't write new MockDevicesConfig to file Path# /Berkanavt/kikimr/testing/mock_devices.txt 2025-09-25T16:18:06.583575Z node 2 :BS_NODE DEBUG: {NW12@node_warden_proxy.cpp:24} StartLocalProxy GroupId# 0 2025-09-25T16:18:06.583757Z node 2 :BS_NODE DEBUG: {NW21@node_warden_pipe.cpp:23} EstablishPipe AvailDomainId# 0 PipeClientId# [2:73:2076] ControllerId# 72057594037932033 2025-09-25T16:18:06.583763Z node 2 :BS_NODE DEBUG: {NW20@node_warden_pipe.cpp:73} SendRegisterNode 2025-09-25T16:18:06.583793Z node 2 :BS_NODE DEBUG: {NW11@node_warden_impl.cpp:313} StartInvalidGroupProxy GroupId# 4294967295 2025-09-25T16:18:06.583824Z node 2 :BS_NODE DEBUG: {NW62@node_warden_impl.cpp:325} StartRequestReportingThrottler 2025-09-25T16:18:06.587176Z node 2 :BS_PROXY INFO: dsproxy_state.cpp:159: Group# 0 TEvConfigureProxy received GroupGeneration# 1 IsLimitedKeyless# false Marker# DSP02 2025-09-25T16:18:06.587196Z node 2 :BS_PROXY NOTICE: dsproxy_state.cpp:319: EnsureMonitoring Group# 0 IsLimitedKeyless# 0 fullIfPossible# 0 Marker# DSP58 2025-09-25T16:18:06.587606Z node 2 :BS_PROXY DEBUG: group_sessions.cpp:83: Group# 0 Actor# [2:72:2075] Create Queue# [2:80:2080] targetNodeId# 1 Marker# DSP01 2025-09-25T16:18:06.587662Z node 2 :BS_PROXY DEBUG: group_sessions.cpp:83: Group# 0 Actor# [2:72:2075] Create Queue# [2:81:2081] targetNodeId# 1 Marker# DSP01 2025-09-25T16:18:06.587704Z node 2 :BS_PROXY DEBUG: group_sessions.cpp:83: Group# 0 Actor# [2:72:2075] Create Queue# [2:82:2082] targetNodeId# 1 Marker# DSP01 2025-09-25T16:18:06.587744Z node 2 :BS_PROXY DEBUG: group_sessions.cpp:83: Group# 0 Actor# [2:72:2075] Create Queue# [2:83:2083] targetNodeId# 1 Marker# DSP01 2025-09-25T16:18:06.587788Z node 2 :BS_PROXY DEBUG: group_sessions.cpp:83: Group# 0 Actor# [2:72:2075] Create Queue# [2:84:2084] targetNodeId# 1 Marker# DSP01 2025-09-25T16:18:06.587816Z node 2 :BS_PROXY DEBUG: group_sessions.cpp:83: Group# 0 Actor# [2:72:2075] Create Queue# [2:85:2085] targetNodeId# 1 Marker# DSP01 2025-09-25T16:18:06.587841Z node 2 :BS_PROXY DEBUG: group_sessions.cpp:83: Group# 0 Actor# [2:72:2075] Create Queue# [2:86:2086] targetNodeId# 1 Marker# DSP01 2025-09-25T16:18:06.587846Z node 2 :BS_PROXY INFO: dsproxy_state.cpp:31: Group# 0 SetStateEstablishingSessions Marker# DSP03 2025-09-25T16:18:06.587861Z node 2 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:49: TClient[72057594037932033] ::Bootstrap [2:73:2076] 2025-09-25T16:18:06.587866Z node 2 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:542: TClient[72057594037932033] lookup [2:73:2076] 2025-09-25T16:18:06.587874Z node 2 :BS_PROXY NOTICE: dsproxy_state.cpp:259: Group# 4294967295 HasInvalidGroupId# 1 Bootstrap -> StateEjected Marker# DSP42 2025-09-25T16:18:06.587882Z node 2 :BS_NODE DEBUG: {NWDC00@distconf.cpp:28} Bootstrap 2025-09-25T16:18:06.588027Z node 2 :BS_NODE DEBUG: {NWDC40@distconf_persistent_storage.cpp:25} TReaderActor bootstrap Paths# [] 2025-09-25T16:18:06.588046Z node 3 :BS_NODE DEBUG: {NW26@node_warden_impl.cpp:338} Bootstrap 2025-09-25T16:18:06.588575Z node 3 :BS_NODE DEBUG: {NW18@node_warden_resource.cpp:51} ApplyServiceSet IsStatic# true Comprehensive# false Origin# initial ServiceSet# {PDisks { NodeID: 1 PDiskID: 1 Path: "SectorMap:0:3200" PDiskGuid: 1 } PDisks { NodeID: 2 PDiskID: 1 Path: "SectorMap:1:3200" PDiskGuid: 2 } PDisks { NodeID: 3 PDiskID: 1 Path: "SectorMap:2:3200" PDiskGuid: 3 } VDisks { VDiskID { GroupID: 0 GroupGeneration: 1 Ring: 0 Domain: 0 VDisk: 0 } VDiskLocation { NodeID: 1 PDiskID: 1 VDiskSlotID: 0 PDiskGuid: 1 } } Groups { GroupID: 0 GroupGeneration: 1 ErasureSpecies: 0 Rings { FailDomains { VDiskLocations { NodeID: 1 PDiskID: 1 VDiskSlotID: 0 PDiskGuid: 1 } } } } AvailabilityDomains: 0 } 2025-09-25T16:18:06.588620Z node 3 :BS_NODE DEBUG: {NW04@node_warden_pdisk.cpp:233} StartLocalPDisk NodeId# 3 PDiskId# 1 Path# "SectorMap:2:3200" PDiskCategory# {Type# DEVICE_TYPE_ROT Kind# 0} Temporary# false 2025-09-25T16:18:06.588725Z node 3 :BS_NODE WARN: {NW89@node_warden_pdisk.cpp:122} Can't write new MockDevicesConfig to file Path# /Berkanavt/kikimr/testing/mock_devices.txt 2025-09-25T16:18:06.588771Z node 3 :BS_NODE DEBUG: {NW12@node_warden_proxy.cpp:24} StartLocalProxy GroupId# 0 2025-09-25T16:18:06.588963Z node 3 :BS_NODE DEBUG: {NW21@node_warden_pipe.cpp:23} EstablishPipe AvailDomainId# 0 PipeClientId# [3:96:2077] ControllerId# 72057594037932033 2025-09-25T16:18:06.588969Z node 3 :BS_NODE DEBUG: {NW20@node_warden_pipe.cpp:73} SendRegisterNode 2025-09-25T16:18:06.588983Z node 3 :BS_NODE DEBUG: {NW11@node_warden_impl.cpp:313} StartInvalidGroupProxy GroupId# 4294967295 2025-09-25T16:18:06.589006Z node 3 :BS_NODE DEBUG: {NW62@node_warden_impl.cpp:325} StartRequestReportingThrottler 2025-09-25T16:18:06.590203Z node 3 :LOCAL DEBUG: local.cpp:1540: TLocal::Bootstrap 2025-09-25T16:18:06.591486Z node 3 :BS_PROXY INFO: dsproxy_state.cpp:159: Group# 0 TEvConfigureProxy received GroupGeneration# 1 IsLimitedKeyless# false Marker# DSP02 2025-09-25T16:18:06.591499Z node 3 :BS_PROXY NOTICE: dsproxy_state.cpp:319: EnsureMonitoring Group# 0 IsLimitedKeyless# 0 fullIfPossible# 0 Marker# DSP58 2025-09-25T16:18:06.591760Z node 3 :BS_PROXY DEBUG: group_sessions.cpp:83: Group# 0 Actor# [3:95:2076] Create Queue# [3:103:2081] targetNodeId# 1 Marker# DSP01 2025-09-25T16:18:06.591795Z node 3 :BS_PROXY DEBUG: group_sessions.cpp:83: Group# 0 Actor# [3:95:2076] Create Queue# [3:104:2082] targetNodeId# 1 Marker# DSP01 2025-09-25T16:18:06.591830Z node 3 :BS_PROXY DEBUG: group_sessions.cpp:83: Group# 0 Actor# [3:95:2076] Create Queue# [3:105:2083] targetNodeId# 1 Marker# DSP01 2025-09-25T16:18:06.591862Z node 3 :BS_PROXY DEBUG: group_sessions.cpp:83: Group# 0 Actor# [3:95:2076] Create Queue# [3:106:2084] targetNodeId# 1 Marker# DSP01 2025-09-25T16:18:06.591905Z node 3 :BS_PROXY DEBUG: group_sessions.cpp:83: Group# 0 Actor# [3:95:2076] Create Queue# [3:107:2085] targetNodeId# 1 Marker# DSP01 2025-09-25T16:18:06.591936Z node 3 :BS_PROXY DEBUG: group_sessions.cpp:83: Group# 0 Actor# [3:95:2076] Create Queue# [3:108:2086] targetNodeId# 1 Marker# DSP01 2025-09-25T16:18:06.591968Z node 3 :BS_PROXY DEBUG: group_sessions.cpp:83: Group# 0 Actor# [3:95:2076] Create Queue# [3:109:2087] targetNodeId# 1 Marker# DSP01 2025-09-25T16:18:06.591972Z node 3 :BS_PROXY INFO: dsproxy_state.cpp:31: Group# 0 SetStateEstablishingSessions Marker# DSP03 2025-09-25T16:18:06.591981Z node 3 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:49: TClient[72057594037932033] ::Bootstrap [3:96:2077] 2025-09-25T16:18:06.591985Z node 3 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:542: TClient[72057594037932033] lookup [3:96:2077] 2025-09-25T16:18:06.591990Z node 3 :BS_PROXY NOTICE: dsproxy_state.cpp:259: Group# 4294967295 HasInvalidGroupId# 1 Bootstrap -> StateEjected Marker# DSP42 2025-09-25T16:18:06.591995Z node 3 :BS_NODE DEBUG: {NWDC00@distconf.cpp:28} Bootstrap 2025-09-25T16:18:06.592089Z node 3 :BS_NODE DEBUG: {NWDC40@distconf_persistent_storage.cpp:25} TReaderActor bootstrap Paths# [] 2025-09-25T16:18:06.592113Z node 1 :BS_NODE DEBUG: {NW26@node_warden_impl.cpp:338} Bootstrap 2025-09-25T16:18:06.593128Z node 1 :BS_NODE DEBUG: {NW18@node_warden_resource.cpp:51} ApplyServiceSet IsStatic# true Comprehensive# false Origin# initial ServiceSet# {PDisks { NodeID: 1 PDiskID: 1 Path: "SectorMap:0:3200" PDiskGuid: 1 } PDisks { NodeID: 2 PDiskID: 1 Path: "SectorMap:1:3200" PDiskGuid: 2 } PDisks { NodeID: 3 PDiskID: 1 Path: "SectorMap:2:3200" PDiskGuid: 3 } VDisks { VDiskID { GroupID: 0 GroupGeneration: 1 Ring: 0 Domain: 0 VDisk: 0 } VDiskLocation { NodeID: 1 PDiskID: 1 VDiskSlotID: 0 PDiskGuid: 1 } } Groups { GroupID: 0 GroupGeneration: 1 ErasureSpecies: 0 Rings { FailDomains { VDiskLocations { NodeID: 1 PDiskID: 1 VDiskSlotID: 0 PDiskGuid: 1 } } } } AvailabilityDomains: 0 } 2025-09-25T16:18:06.593163Z node 1 :BS_NODE DEBUG: {NW04@node_warden_pdisk.cpp:233} StartLocalPDisk NodeId# 1 PDiskId# 1 Path# "SectorMap:0:3200" PDiskCategory# {Type# DEVICE_TYPE_ROT Kind# 0} Temporary# false 2025-09-25T16:18:06.593270Z node 1 :BS_NODE WARN: {NW89@node_warden_pdisk.cpp:122} Can't write new MockDevicesConfig to file Path# /Berkanavt/kikimr/testing/mock_devices.txt 2025-09-25T16:18:06.593331Z node 1 :BS_NODE DEBUG: {NW23@node_warden_vdisk.cpp:70} StartLocalVDiskActor SlayInFlight# false VDiskId# [0:1:0:0:0] VSlotId# 1:1:0 PDiskGuid# 1 DonorMode# false PDiskRestartInFlight# false PDisksWaitingToStart# false 2025-09-25T16:18:06.593564Z node 1 :BS_NODE DEBUG: {NW24@node_warden_vdisk.cpp:276} StartLocalVDiskActor done VDiskId# [0:1:0:0:0] VSlotId# 1:1:0 PDiskGuid# 1 2025-09-25T16:18:06.593575Z node 1 :BS_NODE DEBUG: {NW12@node_warden_proxy.cpp:24} StartLocalProxy GroupId# 0 2025-09-25T16:18:06.593724Z node 1 :BS_NODE DEBUG: {NW21@node_warden_pipe.cpp:23} EstablishPipe AvailDomainId# 0 PipeClientId# [1:120:2078] ControllerId# 72057594037932033 2025-09-25T16:18:06.593729Z node 1 :BS_NODE DEBUG: {NW20@node_warden_pipe.cpp:73} SendRegisterNode 2025-09-25T16:18:06.593745Z node 1 :BS_NODE DEBUG: {NW11@node_warden_impl.cpp:313} StartInvalidGroupProxy GroupId# 4294967295 2025-09-25T16:18:06.593763Z node 1 :BS_NODE DEBUG: {NW62@node_warden_impl.cpp:325} StartRequestReportingThrottler 2025-09-25T16:18:06.594891Z node 1 :LOCAL DEBUG: local.cpp:1540: TLocal::Bootstrap 2025-09-25T16:18:06.596480Z node 1 :BS_PROXY INFO: dsproxy_state.cpp:159: Group# 0 TEvConfigureProxy received GroupGeneration# 1 IsLimitedKeyless# false Marker# DSP02 2025-09-25T16:18:06.596490Z node 1 :BS_PROXY NOTICE: dsproxy_state.cpp:319: EnsureMonitoring Group# 0 IsLimitedKeyless# 0 fullIfPossible# 0 Marker# DSP58 2025-09-25T16:18:06.596921Z node 1 :BS_PROXY DEBUG: group_sessions.cpp:83: Group# 0 Actor# [1:119:2077] Create Queue# [1:128:2083] targetNodeId# 1 Marker# DSP01 2025-09-25T16:18:06.596950Z node 1 :BS_PROXY DEBUG: group_sessions.cpp:83: Group# 0 Actor# [1:119:2077] Create Queue# [1:129:2084] targetNodeId# 1 Marker# DSP01 2025-09-25T16:18:06.596975Z node 1 :BS_PROXY DEBUG: group_sessions.cpp:83: Group# 0 Actor# [1:119:2077] Create Queue# [1:130:2085] targetNodeId# 1 Marker# DSP01 2025-09-25T16:18:06.597008Z node 1 :BS_PROXY DEBUG: group_sessions.cpp:83: Group# 0 Actor# [1:119:2077] Create Queue# [1:131:2086] targetNodeId# 1 Marker# DSP01 2025-09-25T16:18:06.597033Z node 1 :BS_PROXY DEBUG: group_sessions.cpp:83: Group# 0 Actor# [1:119:2077] Create Queue# [1:132:2087] targetNodeId# 1 Marker# DSP01 2025-09-25T16:18:06.597058Z node 1 :BS_PROXY DEBUG: group_sessions.cpp:83: Group# 0 Actor# [1:119:2077] Create Queue# [1:133:2088] targetNodeId# 1 Marker# DSP01 2025-09-25T16:18:06.597083Z node 1 :BS_PROXY DEBUG: group_sessions.cpp:83: Group# 0 Actor# [1:119:2077] Create Queue# [1:134:2089] targetNodeId# 1 Marker# DSP01 2025-09-25T16:18:06.597088Z node 1 :BS_PROXY INFO: dsproxy_state.cpp:31: Group# 0 SetStateEstablishingSessions Marker# DSP03 2025-09-25T16:18:06.597108Z node 1 :PIPE_CLIENT DEBUG: tablet_pipe_clie ... tatus# Processed ActualWindowSize# 0 MaxWindowSize# 150000000 ExpectedMsgId# { SequenceId: 1 MsgId: 29 }}}} from# [0:1:0:0:0] Marker# BPP01 2025-09-25T16:18:46.211518Z node 31 :BS_PROXY_PUT DEBUG: dsproxy_put_impl.cpp:72: [667a4d0d3c364da9] Result# TEvPutResult {Id# [72057594037927937:2:13:0:0:105:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0.998955} GroupId# 0 Marker# BPP12 2025-09-25T16:18:46.211525Z node 31 :BS_PROXY_PUT INFO: dsproxy_put.cpp:490: [667a4d0d3c364da9] SendReply putResult# TEvPutResult {Id# [72057594037927937:2:13:0:0:105:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0.998955} ResponsesSent# 0 PutImpl.Blobs.size# 1 Last# true Marker# BPP21 2025-09-25T16:18:46.211542Z node 31 :BS_PROXY_PUT DEBUG: {BPP72@dsproxy_put.cpp:474} Query history GroupId# 0 HandleClass# TabletLog Tactic# MinLatency History# THistory { Entries# [ TEvVPut{ TimestampMs# 0.15 sample PartId# [72057594037927937:2:13:0:0:105:1] QueryCount# 1 VDiskId# [0:1:0:0:0] NodeId# 31 } TEvVPutResult{ TimestampMs# 0.535 VDiskId# [0:1:0:0:0] NodeId# 31 Status# OK } ] } 2025-09-25T16:18:46.211568Z node 31 :TABLET_MAIN DEBUG: tablet_req_writelog.cpp:54: Put Result: TEvPutResult {Id# [72057594037927937:2:13:0:0:105:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0.998955} 2025-09-25T16:18:46.211591Z node 31 :TABLET_EXECUTOR DEBUG: Leader{72057594037927937:2:14} commited cookie 1 for step 13 2025-09-25T16:18:46.211651Z node 31 :HIVE NOTICE: tx__unlock_tablet.cpp:91: HIVE#72057594037927937 THive::TTxUnlockTabletExecution::Complete TabletId: 72075186224037889 SideEffects: {Notifications: 0x1004020F [31:485:2318] NKikimrHive.TEvLockTabletExecutionLost TabletID: 72075186224037889 Reason: LOCK_LOST_REASON_UNLOCKED,0x1004020E [31:485:2318] NKikimrHive.TEvUnlockTabletExecutionResult TabletID: 72075186224037889 Status: OK StatusMessage: ""} 2025-09-25T16:18:46.211742Z node 31 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:49: TClient[72057594037927937] ::Bootstrap [31:511:2344] 2025-09-25T16:18:46.211747Z node 31 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:542: TClient[72057594037927937] lookup [31:511:2344] 2025-09-25T16:18:46.211760Z node 31 :TABLET_RESOLVER DEBUG: tablet_resolver.cpp:882: Handle TEvForward tabletId: 72057594037927937 entry.State: StNormal leader: [31:333:2202] followers: 0 ev: {EvForward TabletID: 72057594037927937 Ev: nullptr Flags: 1:2:0} 2025-09-25T16:18:46.211768Z node 31 :TABLET_RESOLVER DEBUG: tablet_resolver.cpp:667: SelectForward node 31 selfDC 1 leaderDC 1 1:2:0 local 1 localDc 1 other 0 disallowed 0 tabletId: 72057594037927937 followers: 0 countLeader 1 allowFollowers 0 winner: [31:333:2202] 2025-09-25T16:18:46.211776Z node 31 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:148: TClient[72057594037927937] queue send [31:511:2344] 2025-09-25T16:18:46.211782Z node 31 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:420: TClient[72057594037927937] received pending shutdown [31:511:2344] 2025-09-25T16:18:46.211787Z node 31 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:188: TClient[72057594037927937] forward result local node, try to connect [31:511:2344] 2025-09-25T16:18:46.211792Z node 31 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:687: TClient[72057594037927937]::SendEvent [31:511:2344] 2025-09-25T16:18:46.211802Z node 31 :PIPE_SERVER DEBUG: tablet_pipe_server.cpp:291: [72057594037927937] Accept Connect Originator# [31:511:2344] 2025-09-25T16:18:46.211826Z node 31 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:308: TClient[72057594037927937] connected with status OK role: Leader [31:511:2344] 2025-09-25T16:18:46.211830Z node 31 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:323: TClient[72057594037927937] send queued [31:511:2344] 2025-09-25T16:18:46.211833Z node 31 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:645: TClient[72057594037927937] push event to server [31:511:2344] 2025-09-25T16:18:46.211836Z node 31 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:331: TClient[72057594037927937] shutdown pipe due to pending shutdown request [31:511:2344] 2025-09-25T16:18:46.211839Z node 31 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:514: TClient[72057594037927937] notify reset [31:511:2344] 2025-09-25T16:18:46.211844Z node 31 :PIPE_SERVER DEBUG: tablet_pipe_server.cpp:141: [72057594037927937] HandleSend Sender# [31:485:2318] EventType# 268697612 2025-09-25T16:18:46.211852Z node 31 :HIVE TRACE: hive_impl.cpp:139: HIVE#72057594037927937 Handle TEvTabletPipe::TEvServerConnected([31:511:2344]) [31:512:2345] 2025-09-25T16:18:46.211879Z node 31 :HIVE TRACE: hive_impl.cpp:795: HIVE#72057594037927937 THive::Handle::TEvTabletMetrics, NodeId 31 TabletMetrics { TabletID: 72075186224037888 ResourceUsage { CPU: 30 } } TabletMetrics { TabletID: 72075186224037889 ResourceUsage { CPU: 800 } } 2025-09-25T16:18:46.211888Z node 31 :TABLET_EXECUTOR DEBUG: Leader{72057594037927937:2:14} Tx{29, NKikimr::NHive::TTxUpdateTabletMetrics} queued, type NKikimr::NHive::TTxUpdateTabletMetrics 2025-09-25T16:18:46.211894Z node 31 :TABLET_EXECUTOR DEBUG: Leader{72057594037927937:2:14} Tx{29, NKikimr::NHive::TTxUpdateTabletMetrics} took 4194304b of static mem, Memory{4194304 dyn 0} 2025-09-25T16:18:46.211907Z node 31 :HIVE TRACE: node_info.cpp:118: HIVE#72057594037927937 Node(31, (90,1048576,0,0)->(30,1048576,0,0)) 2025-09-25T16:18:46.211917Z node 31 :HIVE TRACE: hive_impl.cpp:2638: HIVE#72057594037927937 UpdateTotalResources: ObjectId (72057594037927937,0): {CPU: 90 Memory: 1048576} -> {CPU: 30 Memory: 1048576} 2025-09-25T16:18:46.211924Z node 31 :HIVE TRACE: hive_impl.cpp:2644: HIVE#72057594037927937 UpdateTotalResources: Type Dummy: {CPU: 90 Memory: 1048576} -> {CPU: 30 Memory: 1048576} 2025-09-25T16:18:46.211966Z node 31 :HIVE TRACE: tx__update_tablet_metrics.cpp:66: HIVE#72057594037927937 THive::TTxUpdateTabletMetrics UpdateResourceTotalUsage node 31 value (0,0,0,0) accumulated to (0,0,0,0) 2025-09-25T16:18:46.211986Z node 31 :TABLET_EXECUTOR DEBUG: Leader{72057594037927937:2:14} Tx{29, NKikimr::NHive::TTxUpdateTabletMetrics} hope 1 -> done Change{19, redo 675b alter 0b annex 0, ~{ 16, 1, 4 } -{ }, 0 gb} 2025-09-25T16:18:46.211991Z node 31 :TABLET_EXECUTOR DEBUG: Leader{72057594037927937:2:14} Tx{29, NKikimr::NHive::TTxUpdateTabletMetrics} release 4194304b of static, Memory{0 dyn 0} 2025-09-25T16:18:46.222371Z node 31 :BS_PROXY_PUT INFO: dsproxy_put.cpp:650: [ea40aeb3c4fab1e8] bootstrap ActorId# [31:514:2347] Group# 0 BlobCount# 1 BlobIDs# [[72057594037927937:2:14:0:0:329:0]] HandleClass# TabletLog Tactic# MinLatency RestartCounter# 0 Marker# BPP13 2025-09-25T16:18:46.222423Z node 31 :BS_PROXY_PUT DEBUG: dsproxy_strategy_restore.h:42: [ea40aeb3c4fab1e8] Id# [72057594037927937:2:14:0:0:329:0] restore disk# 0 part# 0 situation# ESituation::Unknown Marker# BPG51 2025-09-25T16:18:46.222433Z node 31 :BS_PROXY_PUT DEBUG: dsproxy_strategy_restore.h:65: [ea40aeb3c4fab1e8] restore Id# [72057594037927937:2:14:0:0:329:0] optimisticReplicas# 1 optimisticState# EBS_FULL Marker# BPG55 2025-09-25T16:18:46.222444Z node 31 :BS_PROXY_PUT DEBUG: dsproxy_strategy_base.cpp:324: [ea40aeb3c4fab1e8] partPlacement record partSituation# ESituation::Unknown to# 0 blob Id# [72057594037927937:2:14:0:0:329:1] Marker# BPG33 2025-09-25T16:18:46.222450Z node 31 :BS_PROXY_PUT DEBUG: dsproxy_strategy_base.cpp:345: [ea40aeb3c4fab1e8] Sending missing VPut part# 0 to# 0 blob Id# [72057594037927937:2:14:0:0:329:1] Marker# BPG32 2025-09-25T16:18:46.222482Z node 31 :BS_PROXY DEBUG: group_sessions.h:181: Send to queueActorId# [31:81:2082] NKikimr::TEvBlobStorage::TEvVPut# {ID# [72057594037927937:2:14:0:0:329:1] FDS# 329 HandleClass# TabletLog {MsgQoS ExtQueueId# PutTabletLog} DataSize# 0 Data# } cookie# 0 2025-09-25T16:18:46.222873Z node 31 :BS_PROXY_PUT DEBUG: dsproxy_put.cpp:264: [ea40aeb3c4fab1e8] received {EvVPutResult Status# OK ID# [72057594037927937:2:14:0:0:329:1] {MsgQoS MsgId# { SequenceId: 1 MsgId: 29 } Cost# 82590 ExtQueueId# PutTabletLog IntQueueId# IntPutLog Window# { Status# Processed ActualWindowSize# 0 MaxWindowSize# 150000000 ExpectedMsgId# { SequenceId: 1 MsgId: 30 }}}} from# [0:1:0:0:0] Marker# BPP01 2025-09-25T16:18:46.222910Z node 31 :BS_PROXY_PUT DEBUG: dsproxy_put_impl.cpp:72: [ea40aeb3c4fab1e8] Result# TEvPutResult {Id# [72057594037927937:2:14:0:0:329:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0.998955} GroupId# 0 Marker# BPP12 2025-09-25T16:18:46.222920Z node 31 :BS_PROXY_PUT INFO: dsproxy_put.cpp:490: [ea40aeb3c4fab1e8] SendReply putResult# TEvPutResult {Id# [72057594037927937:2:14:0:0:329:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0.998955} ResponsesSent# 0 PutImpl.Blobs.size# 1 Last# true Marker# BPP21 2025-09-25T16:18:46.222944Z node 31 :BS_PROXY_PUT DEBUG: {BPP72@dsproxy_put.cpp:474} Query history GroupId# 0 HandleClass# TabletLog Tactic# MinLatency History# THistory { Entries# [ TEvVPut{ TimestampMs# 0.156 sample PartId# [72057594037927937:2:14:0:0:329:1] QueryCount# 1 VDiskId# [0:1:0:0:0] NodeId# 31 } TEvVPutResult{ TimestampMs# 0.566 VDiskId# [0:1:0:0:0] NodeId# 31 Status# OK } ] } 2025-09-25T16:18:46.222991Z node 31 :TABLET_MAIN DEBUG: tablet_req_writelog.cpp:54: Put Result: TEvPutResult {Id# [72057594037927937:2:14:0:0:329:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0.998955} 2025-09-25T16:18:46.223026Z node 31 :TABLET_EXECUTOR DEBUG: Leader{72057594037927937:2:15} commited cookie 1 for step 14 2025-09-25T16:18:46.223147Z node 31 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:49: TClient[72057594037927937] ::Bootstrap [31:516:2349] 2025-09-25T16:18:46.223154Z node 31 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:542: TClient[72057594037927937] lookup [31:516:2349] 2025-09-25T16:18:46.223170Z node 31 :TABLET_RESOLVER DEBUG: tablet_resolver.cpp:882: Handle TEvForward tabletId: 72057594037927937 entry.State: StNormal leader: [31:333:2202] followers: 0 ev: {EvForward TabletID: 72057594037927937 Ev: nullptr Flags: 1:2:0} 2025-09-25T16:18:46.223180Z node 31 :TABLET_RESOLVER DEBUG: tablet_resolver.cpp:667: SelectForward node 31 selfDC 1 leaderDC 1 1:2:0 local 1 localDc 1 other 0 disallowed 0 tabletId: 72057594037927937 followers: 0 countLeader 1 allowFollowers 0 winner: [31:333:2202] 2025-09-25T16:18:46.223190Z node 31 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:148: TClient[72057594037927937] queue send [31:516:2349] 2025-09-25T16:18:46.223199Z node 31 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:420: TClient[72057594037927937] received pending shutdown [31:516:2349] 2025-09-25T16:18:46.223212Z node 31 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:188: TClient[72057594037927937] forward result local node, try to connect [31:516:2349] 2025-09-25T16:18:46.223219Z node 31 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:687: TClient[72057594037927937]::SendEvent [31:516:2349] 2025-09-25T16:18:46.223232Z node 31 :PIPE_SERVER DEBUG: tablet_pipe_server.cpp:291: [72057594037927937] Accept Connect Originator# [31:516:2349] 2025-09-25T16:18:46.223286Z node 31 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:308: TClient[72057594037927937] connected with status OK role: Leader [31:516:2349] 2025-09-25T16:18:46.223290Z node 31 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:323: TClient[72057594037927937] send queued [31:516:2349] 2025-09-25T16:18:46.223293Z node 31 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:645: TClient[72057594037927937] push event to server [31:516:2349] 2025-09-25T16:18:46.223297Z node 31 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:331: TClient[72057594037927937] shutdown pipe due to pending shutdown request [31:516:2349] 2025-09-25T16:18:46.223300Z node 31 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:514: TClient[72057594037927937] notify reset [31:516:2349] 2025-09-25T16:18:46.223306Z node 31 :PIPE_SERVER DEBUG: tablet_pipe_server.cpp:141: [72057594037927937] HandleSend Sender# [31:515:2348] EventType# 268830214 2025-09-25T16:18:46.223315Z node 31 :HIVE TRACE: hive_impl.cpp:139: HIVE#72057594037927937 Handle TEvTabletPipe::TEvServerConnected([31:516:2349]) [31:517:2350] |81.7%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_backup/unittest >> TestKinesisHttpProxy::DifferentContentTypes >> TBackupTests::ShouldSucceedOnSingleShardTable[Raw] >> KqpScheme::ValidatingUniqIndexSqlSuccess [GOOD] >> KqpScheme::ValidatingUniqIndexSdkSuccess >> KqpTx::EmptyTxOnCommit [GOOD] >> TestYmqHttpProxy::TestCreateQueueWithSameNameAndSameParams >> TestKinesisHttpProxy::TestPing >> KqpScheme::CreateFamilyWithCompressionLevel [GOOD] >> KqpScheme::CreateResourcePool >> TBackupTests::ShouldSucceedOnSingleShardTable[Zstd] >> KqpAcl::AclDml-UseSink-IsOlap [GOOD] >> KqpAcl::AclDml+UseSink-IsOlap ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/tx/unittest >> KqpTx::CommitPrepared [GOOD] Test command err: Trying to start YDB, gRPC: 9215, MsgBus: 4803 2025-09-25T16:18:46.918099Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7554062036064688733:2139];send_to=[0:7307199536658146131:7762515]; 2025-09-25T16:18:46.918310Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/endf/0056d3/r3tmp/tmp78YVsg/pdisk_1.dat 2025-09-25T16:18:46.950808Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-09-25T16:18:46.961216Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded 2025-09-25T16:18:46.961412Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1229: Notification cookie mismatch for subscription [1:7554062036064688632:2081] 1758817126917395 != 1758817126917398 TServer::EnableGrpc on GrpcPort 9215, node 1 2025-09-25T16:18:46.971729Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-09-25T16:18:46.971749Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-09-25T16:18:46.971751Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-09-25T16:18:46.971799Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:4803 TClient is connected to server localhost:4803 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-09-25T16:18:47.025711Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-09-25T16:18:47.025736Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-09-25T16:18:47.026704Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-09-25T16:18:47.028864Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-09-25T16:18:47.054010Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) waiting... 2025-09-25T16:18:47.072696Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) waiting... 2025-09-25T16:18:47.092867Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) waiting... 2025-09-25T16:18:47.105628Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) waiting... 2025-09-25T16:18:47.243953Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-09-25T16:18:47.311902Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7554062040359657584:2391], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:47.311931Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:47.312005Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7554062040359657594:2392], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:47.312015Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:47.370406Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:18:47.382946Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:18:47.393304Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:18:47.405529Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:18:47.419608Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:18:47.436697Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:18:47.454641Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:18:47.469135Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:18:47.487113Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7554062040359658457:2474], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:47.487159Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:47.487216Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7554062040359658462:2477], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:47.487228Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7554062040359658463:2478], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:47.487247Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:47.488231Z node 1 :FLAT_TX_SCHEMESHARD WARN ... necting 2025-09-25T16:18:48.096075Z node 2 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded 2025-09-25T16:18:48.096967Z node 2 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1229: Notification cookie mismatch for subscription [2:7554062047404526718:2081] 1758817128074232 != 1758817128074235 2025-09-25T16:18:48.098437Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 4535, node 2 2025-09-25T16:18:48.105383Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-09-25T16:18:48.105397Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-09-25T16:18:48.105401Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-09-25T16:18:48.105450Z node 2 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:26680 TClient is connected to server localhost:26680 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-09-25T16:18:48.163785Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-09-25T16:18:48.165570Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-09-25T16:18:48.213304Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) waiting... 2025-09-25T16:18:48.225284Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) waiting... 2025-09-25T16:18:48.257840Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) waiting... 2025-09-25T16:18:48.274352Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) waiting... 2025-09-25T16:18:48.555705Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7554062047404528351:2391], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:48.555745Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:48.556395Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7554062047404528373:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:48.556459Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:48.561591Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:18:48.573690Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:18:48.581203Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:18:48.594461Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:18:48.610422Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:18:48.623193Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:18:48.637279Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:18:48.652409Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:18:48.670829Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7554062047404529222:2474], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:48.670881Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:48.670908Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7554062047404529227:2477], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:48.671024Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7554062047404529229:2478], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:48.671043Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:48.671645Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-09-25T16:18:48.680773Z node 2 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7554062047404529230:2479], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715670 completed, doublechecking } 2025-09-25T16:18:48.747415Z node 2 :TX_PROXY ERROR: schemereq.cpp:590: Actor# [2:7554062047404529283:3546] txid# 281474976715671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 17], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } >> TestKinesisHttpProxy::MissingAction >> KqpScheme::CreateTableWithTtlSettingsUncompat [GOOD] >> KqpScheme::CreateTableWithTtlSettingsCompat >> TBackupTests::ShouldSucceedOnMultiShardTable[Zstd] [GOOD] >> KqpScheme::DropNonExistingExternalDataSource [GOOD] >> KqpScheme::DropResourcePool >> TBackupTests::BackupUuidColumn[Zstd] [GOOD] >> TestYmqHttpProxy::TestSendMessage >> KqpScheme::CreateTableWithReadReplicasCompat [GOOD] >> KqpScheme::CreateTableWithTtlOnIntColumn >> TBackupTests::ShouldSucceedOnLargeData[Raw] >> TBackupTests::ShouldSucceedOnMultiShardTable[Raw] [GOOD] >> TBackupTests::ShouldSucceedOnSingleShardTable[Raw] [GOOD] >> ReadLoad::ShouldReadKqpMoreThanRows [GOOD] >> KqpScheme::AddColumnFamilyWithCompressionLevel [GOOD] >> KqpScheme::AddColumnFamilyWithCacheModeFeatureDisabled+UseQueryService >> TBackupTests::ShouldSucceedOnSingleShardTable[Zstd] [GOOD] >> TestYmqHttpProxy::TestCreateQueue ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/tx/unittest >> KqpTx::EmptyTxOnCommit [GOOD] Test command err: Trying to start YDB, gRPC: 63099, MsgBus: 23113 2025-09-25T16:18:46.997153Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7554062038413859365:2158];send_to=[0:7307199536658146131:7762515]; 2025-09-25T16:18:46.997251Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/endf/0056d0/r3tmp/tmpLLZYq0/pdisk_1.dat 2025-09-25T16:18:47.038252Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-09-25T16:18:47.038447Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1229: Notification cookie mismatch for subscription [1:7554062038413859231:2081] 1758817126996056 != 1758817126996059 2025-09-25T16:18:47.042033Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 63099, node 1 2025-09-25T16:18:47.048791Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-09-25T16:18:47.048808Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-09-25T16:18:47.048811Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-09-25T16:18:47.048882Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:23113 TClient is connected to server localhost:23113 2025-09-25T16:18:47.099583Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-09-25T16:18:47.099623Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting WaitRootIsUp 'Root'... TClient::Ls request: Root 2025-09-25T16:18:47.100727Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-09-25T16:18:47.113989Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-09-25T16:18:47.132941Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) waiting... 2025-09-25T16:18:47.158990Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) waiting... waiting... 2025-09-25T16:18:47.187259Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:18:47.202473Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) waiting... 2025-09-25T16:18:47.247185Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-09-25T16:18:47.475202Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7554062042708828170:2391], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:47.475324Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:47.475795Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7554062042708828180:2392], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:47.475809Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:47.519105Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:18:47.527932Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:18:47.537993Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:18:47.552347Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:18:47.566067Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:18:47.580386Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:18:47.593894Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:18:47.607924Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:18:47.624678Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7554062042708829043:2474], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:47.624709Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:47.624756Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7554062042708829048:2477], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:47.624766Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7554062042708829049:2478], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:47.624772Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:47.625583Z node 1 :FLAT_TX_SCHEMESHARD ... necting 2025-09-25T16:18:48.429462Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-09-25T16:18:48.429736Z node 2 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded 2025-09-25T16:18:48.429998Z node 2 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1229: Notification cookie mismatch for subscription [2:7554062046654440534:2081] 1758817128414212 != 1758817128414215 TServer::EnableGrpc on GrpcPort 2836, node 2 2025-09-25T16:18:48.438518Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-09-25T16:18:48.438534Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-09-25T16:18:48.438536Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-09-25T16:18:48.438577Z node 2 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:10415 TClient is connected to server localhost:10415 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-09-25T16:18:48.495922Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-09-25T16:18:48.549343Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) waiting... 2025-09-25T16:18:48.575923Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) waiting... 2025-09-25T16:18:48.601466Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) waiting... 2025-09-25T16:18:48.603581Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-09-25T16:18:48.617423Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) waiting... 2025-09-25T16:18:48.848907Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7554062046654442170:2391], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:48.848952Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:48.849206Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7554062046654442180:2392], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:48.849218Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:48.861217Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:18:48.875863Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:18:48.890461Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:18:48.904000Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:18:48.917682Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:18:48.937092Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:18:48.945840Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:18:48.959533Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:18:48.977312Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7554062046654443043:2474], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:48.977353Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:48.977405Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7554062046654443048:2477], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:48.977423Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7554062046654443049:2478], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:48.977432Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:48.978374Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-09-25T16:18:48.987270Z node 2 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7554062046654443052:2479], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2025-09-25T16:18:49.051203Z node 2 :TX_PROXY ERROR: schemereq.cpp:590: Actor# [2:7554062050949410400:3551] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 17], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } >> YdbLogStore::AlterLogTable [FAIL] >> TestKinesisHttpProxy::CreateStreamInIncorrectDb |81.7%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_backup/unittest >> KqpScheme::Int8Int16 [GOOD] >> KqpScheme::DropTransfer ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_sequence_reboots/unittest >> TSequenceReboots::CreateSequencesWithIndexedTable [GOOD] Test command err: ==== RunWithTabletReboots =========== RUN: Trace =========== Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2144] IGNORE Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2144] Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:120:2058] recipient: [1:114:2145] IGNORE Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:120:2058] recipient: [1:114:2145] Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:121:2058] recipient: [1:117:2146] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:121:2058] recipient: [1:117:2146] Leader for TabletID 72057594046447617 is [1:129:2154] sender: [1:131:2058] recipient: [1:113:2144] Leader for TabletID 72057594046316545 is [1:134:2158] sender: [1:136:2058] recipient: [1:114:2145] Leader for TabletID 72057594046678944 is [1:141:2162] sender: [1:142:2058] recipient: [1:117:2146] 2025-09-25T16:16:37.080899Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7911: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-09-25T16:16:37.080935Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7939: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-09-25T16:16:37.080942Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7825: StatsBatching config: StatsBatchTimeout# 0.000000s, StatsMaxBatchSize# 0, StatsMaxExecuteTime# 0.010000s 2025-09-25T16:16:37.080952Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7841: OperationsProcessing config: using default configuration 2025-09-25T16:16:37.080960Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7847: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-09-25T16:16:37.080966Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7847: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-09-25T16:16:37.080979Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7971: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-09-25T16:16:37.080997Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-09-25T16:16:37.081149Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8042: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-09-25T16:16:37.081219Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2025-09-25T16:16:37.104687Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:8074: Got new config: QueryServiceConfig { AllExternalDataSourcesAreAvailable: true } 2025-09-25T16:16:37.104729Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded 2025-09-25T16:16:37.104815Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8042: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# Leader for TabletID 72057594046447617 is [1:129:2154] sender: [1:198:2058] recipient: [1:15:2062] 2025-09-25T16:16:37.110770Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2025-09-25T16:16:37.110966Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2025-09-25T16:16:37.111009Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-09-25T16:16:37.112103Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2025-09-25T16:16:37.112162Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2025-09-25T16:16:37.112272Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1382: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-09-25T16:16:37.112343Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-09-25T16:16:37.112860Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:158: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-09-25T16:16:37.112910Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2025-09-25T16:16:37.113182Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-09-25T16:16:37.113194Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-09-25T16:16:37.113232Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-09-25T16:16:37.113241Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-09-25T16:16:37.113247Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:205: TTxServerlessStorageBilling.Complete 2025-09-25T16:16:37.113270Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7086: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:223:2058] recipient: [1:221:2221] IGNORE Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:223:2058] recipient: [1:221:2221] Leader for TabletID 72057594037968897 is [1:227:2225] sender: [1:228:2058] recipient: [1:221:2221] 2025-09-25T16:16:37.115593Z node 1 :HIVE INFO: tablet_helpers.cpp:1126: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:141:2162] sender: [1:248:2058] recipient: [1:15:2062] 2025-09-25T16:16:37.144206Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-09-25T16:16:37.144304Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-09-25T16:16:37.144378Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:599: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-09-25T16:16:37.144390Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5528: CreateTx for txid 1:0 type: TxAlterSubDomain target path: [OwnerId: 72057594046678944, LocalPathId: 1] source path: 2025-09-25T16:16:37.144470Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-09-25T16:16:37.144491Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2025-09-25T16:16:37.145454Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-09-25T16:16:37.145524Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-09-25T16:16:37.145598Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-09-25T16:16:37.145613Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:315: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-09-25T16:16:37.145619Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:369: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-09-25T16:16:37.145626Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 1:0 2 -> 3 2025-09-25T16:16:37.146264Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-09-25T16:16:37.146284Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-09-25T16:16:37.146291Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 1:0 3 -> 128 2025-09-25T16:16:37.146752Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-09-25T16:16:37.146767Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-09-25T16:16:37.146775Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-09-25T16:16:37.146793Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1723: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-09-25T16:16:37.147597Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1792: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-09-25T16:16:37.148478Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:663: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-09-25T16:16:37.148542Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1824: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 Leader for TabletID 72057594046316545 is [1:134:2158] sender: [1:263:2058] recipient: [1:15:2062] FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-09-25T16:16:37.148793Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:685: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-09-25T16:16:37.148849Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:689: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 134 RawX2: 4294969454 } } Step: 5000001 MediatorID: 0 Tab ... AT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5323: StateWork, processing event TEvTabletPipe::TEvServerDisconnected 2025-09-25T16:18:45.456721Z node 286 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:6190: Server pipe is reset, at schemeshard: 72057594046678944 2025-09-25T16:18:45.456818Z node 286 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5225: StateWork, received event# 269877760, Sender [286:665:2614], Recipient [286:129:2154]: NKikimr::TEvTabletPipe::TEvClientConnected { TabletId: 72057594037968897 Status: OK ServerId: [286:666:2615] Leader: 1 Dead: 0 Generation: 2 VersionInfo: } 2025-09-25T16:18:45.456874Z node 286 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5320: StateWork, processing event TEvTabletPipe::TEvClientConnected 2025-09-25T16:18:45.456880Z node 286 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:6092: Handle TEvClientConnected, tabletId: 72057594037968897, status: OK, at schemeshard: 72057594046678944 2025-09-25T16:18:45.456908Z node 286 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:216: TSideEffects ApplyOnComplete at tablet# 72057594046678944 2025-09-25T16:18:45.456923Z node 286 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:641: Send to actor: [286:377:2353] msg type: 269552133 msg: NKikimrTxDataShard.TEvStateChangedResult TabletId: 72057594046678944 State: 4 at schemeshard: 72057594046678944 2025-09-25T16:18:45.456932Z node 286 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_shard_deleter.cpp:20: SendDeleteRequests, shardsToDelete 1, to hive 72057594037968897, at schemeshard 72057594046678944 2025-09-25T16:18:45.456938Z node 286 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_shard_deleter.cpp:47: Free shard 72057594046678944:1 hive 72057594037968897 at ss 72057594046678944 2025-09-25T16:18:45.456962Z node 286 :HIVE INFO: tablet_helpers.cpp:1356: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 2 TxId_Deprecated: 0 TabletID: 72075186233409547 FAKEHIVE 72057594037968897 TEvDeleteTablet ShardOwnerId: 72057594046678944 ShardLocalIdx: 2 TxId_Deprecated: 0 TabletID: 72075186233409547 2025-09-25T16:18:45.457007Z node 286 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5225: StateWork, received event# 268698118, Sender [286:223:2221], Recipient [286:129:2154]: NKikimrHive.TEvDeleteTabletReply Status: OK Origin: 72057594037968897 TxId_Deprecated: 0 ShardOwnerId: 72057594046678944 ShardLocalIdx: 2 2025-09-25T16:18:45.457013Z node 286 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5251: StateWork, processing event TEvHive::TEvDeleteTabletReply 2025-09-25T16:18:45.457020Z node 286 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:6353: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 0 ShardOwnerId: 72057594046678944 ShardLocalIdx: 2, at schemeshard: 72057594046678944 2025-09-25T16:18:45.457105Z node 286 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:610: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 5] was 1 Forgetting tablet 72075186233409547 2025-09-25T16:18:45.458028Z node 286 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5225: StateWork, received event# 2146435084, Sender [286:129:2154], Recipient [286:129:2154]: NKikimr::NSchemeShard::TEvPrivate::TEvCleanDroppedPaths 2025-09-25T16:18:45.458041Z node 286 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5414: StateWork, processing event TEvPrivate::TEvCleanDroppedPaths 2025-09-25T16:18:45.458051Z node 286 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:123: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2025-09-25T16:18:45.458058Z node 286 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:137: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 5], at schemeshard: 72057594046678944 2025-09-25T16:18:45.458073Z node 286 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:610: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 1 2025-09-25T16:18:45.458080Z node 286 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:137: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 4], at schemeshard: 72057594046678944 2025-09-25T16:18:45.458086Z node 286 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:610: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 2 2025-09-25T16:18:45.458138Z node 286 :HIVE INFO: tablet_helpers.cpp:1356: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 1 TxId_Deprecated: 0 TabletID: 72075186233409548 FAKEHIVE 72057594037968897 TEvDeleteTablet ShardOwnerId: 72057594046678944 ShardLocalIdx: 1 TxId_Deprecated: 0 TabletID: 72075186233409548 2025-09-25T16:18:45.458491Z node 286 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5225: StateWork, received event# 269877764, Sender [286:660:2609], Recipient [286:129:2154]: NKikimr::TEvTabletPipe::TEvServerDisconnected 2025-09-25T16:18:45.458500Z node 286 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5323: StateWork, processing event TEvTabletPipe::TEvServerDisconnected 2025-09-25T16:18:45.458504Z node 286 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:6190: Server pipe is reset, at schemeshard: 72057594046678944 Forgetting tablet 72075186233409548 2025-09-25T16:18:45.459489Z node 286 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5225: StateWork, received event# 268698118, Sender [286:223:2221], Recipient [286:129:2154]: NKikimrHive.TEvDeleteTabletReply Status: OK Origin: 72057594037968897 TxId_Deprecated: 0 ShardOwnerId: 72057594046678944 ShardLocalIdx: 1 2025-09-25T16:18:45.459502Z node 286 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5251: StateWork, processing event TEvHive::TEvDeleteTabletReply 2025-09-25T16:18:45.459512Z node 286 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:6353: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 0 ShardOwnerId: 72057594046678944 ShardLocalIdx: 1, at schemeshard: 72057594046678944 2025-09-25T16:18:45.459571Z node 286 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:610: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 1 2025-09-25T16:18:45.459646Z node 286 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5225: StateWork, received event# 269877763, Sender [286:398:2371], Recipient [286:129:2154]: NKikimr::TEvTabletPipe::TEvClientDestroyed { TabletId: 72075186233409547 ClientId: [286:398:2371] ServerId: [286:409:2378] } 2025-09-25T16:18:45.459652Z node 286 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5321: StateWork, processing event TEvTabletPipe::TEvClientDestroyed 2025-09-25T16:18:45.459658Z node 286 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:6152: Client pipe, to tablet: 72075186233409547, from:72057594046678944 is reset 2025-09-25T16:18:45.459779Z node 286 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5225: StateWork, received event# 269877763, Sender [286:399:2372], Recipient [286:129:2154]: NKikimr::TEvTabletPipe::TEvClientDestroyed { TabletId: 72075186233409548 ClientId: [286:399:2372] ServerId: [286:411:2380] } 2025-09-25T16:18:45.459785Z node 286 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5321: StateWork, processing event TEvTabletPipe::TEvClientDestroyed 2025-09-25T16:18:45.459789Z node 286 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:6152: Client pipe, to tablet: 72075186233409548, from:72057594046678944 is reset 2025-09-25T16:18:45.460367Z node 286 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:176: Deleted shardIdx 72057594046678944:2 2025-09-25T16:18:45.460382Z node 286 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:182: Close pipe to deleted shardIdx 72057594046678944:2 tabletId 72075186233409547 2025-09-25T16:18:45.460670Z node 286 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__clean_pathes.cpp:160: TTxCleanDroppedPaths Complete, done PersistRemovePath for 2 paths, skipped 0, left 1 candidates, at schemeshard: 72057594046678944 2025-09-25T16:18:45.460693Z node 286 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:123: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2025-09-25T16:18:45.460699Z node 286 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:137: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 3], at schemeshard: 72057594046678944 2025-09-25T16:18:45.460716Z node 286 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:610: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 3 2025-09-25T16:18:45.460758Z node 286 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:176: Deleted shardIdx 72057594046678944:1 2025-09-25T16:18:45.460767Z node 286 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:182: Close pipe to deleted shardIdx 72057594046678944:1 tabletId 72075186233409548 2025-09-25T16:18:45.460792Z node 286 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5225: StateWork, received event# 269877763, Sender [286:665:2614], Recipient [286:129:2154]: NKikimr::TEvTabletPipe::TEvClientDestroyed { TabletId: 72057594037968897 ClientId: [286:665:2614] ServerId: [286:666:2615] } 2025-09-25T16:18:45.460798Z node 286 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5321: StateWork, processing event TEvTabletPipe::TEvClientDestroyed 2025-09-25T16:18:45.460803Z node 286 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:6152: Client pipe, to tablet: 72057594037968897, from:72057594046678944 is reset 2025-09-25T16:18:45.461134Z node 286 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__clean_pathes.cpp:160: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 TestWaitNotification: OK eventTxId 1003 2025-09-25T16:18:45.461234Z node 286 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5225: StateWork, received event# 271122945, Sender [286:677:2626], Recipient [286:129:2154]: NKikimrSchemeOp.TDescribePath Path: "/MyRoot/Table" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false } 2025-09-25T16:18:45.461240Z node 286 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5237: StateWork, processing event TEvSchemeShard::TEvDescribeScheme 2025-09-25T16:18:45.461254Z node 286 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/Table" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-09-25T16:18:45.461300Z node 286 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/Table" took 41us result status StatusPathDoesNotExist 2025-09-25T16:18:45.461341Z node 286 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/Table\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot\' (id: [OwnerId: 72057594046678944, LocalPathId: 1]), source_location: ydb/core/tx/schemeshard/schemeshard_path_describer.cpp:1181" Path: "/MyRoot/Table" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot" LastExistedPrefixPathId: 1 LastExistedPrefixDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty ChildrenExist: true } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/load_test/ut_ycsb/unittest >> ReadLoad::ShouldReadKqpMoreThanRows [GOOD] Test command err: 2025-09-25T16:18:45.672294Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:123: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2025-09-25T16:18:45.713526Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2025-09-25T16:18:45.716440Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:311:2354], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-09-25T16:18:45.716529Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-09-25T16:18:45.716556Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/endf/002756/r3tmp/tmpCwccvZ/pdisk_1.dat 2025-09-25T16:18:45.778929Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-09-25T16:18:45.778985Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-09-25T16:18:45.792488Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded 2025-09-25T16:18:45.793533Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1229: Notification cookie mismatch for subscription [1:34:2081] 1758817125209927 != 1758817125209931 2025-09-25T16:18:45.824562Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-09-25T16:18:45.872024Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2025-09-25T16:18:45.905871Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-09-25T16:18:45.991970Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:18:46.185969Z node 1 :DS_LOAD_TEST INFO: test_load_actor.cpp:346: TLoad# 0 warmups table# usertable in dir# /Root with rows# 100 2025-09-25T16:18:46.186238Z node 1 :DS_LOAD_TEST NOTICE: bulk_mkql_upsert.cpp:157: Id# {Tag: 0, parent: [1:740:2610], subTag: 1} TUpsertActor Bootstrap called: RowCount: 100 Inflight: 100 BatchSize: 100 with type# 0, target# TabletId: 72075186224037888 TableId: 2 WorkingDir: "/Root" TableName: "usertable" 2025-09-25T16:18:46.208000Z node 1 :DS_LOAD_TEST NOTICE: bulk_mkql_upsert.cpp:255: Id# {Tag: 0, parent: [1:740:2610], subTag: 1} TUpsertActor finished in 0.021710s, errors=0 2025-09-25T16:18:46.208118Z node 1 :DS_LOAD_TEST DEBUG: test_load_actor.cpp:425: TLoad# 0 created load actor of type# kReadKqpStart with tag# 2, proto# NotifyWhenFinished: true TableSetup { WorkingDir: "/Root" TableName: "usertable" } TargetShard { TabletId: 72075186224037888 TableId: 2 WorkingDir: "/Root" TableName: "usertable" } ReadKqpStart { RowCount: 100 Inflights: 10 } 2025-09-25T16:18:46.208141Z node 1 :DS_LOAD_TEST NOTICE: kqp_select.cpp:322: TKqpSelectActorMultiSession# {Tag: 0, parent: [1:740:2610], subTag: 3} Bootstrap called: RowCount: 100 Inflights: 10 2025-09-25T16:18:46.208555Z node 1 :DS_LOAD_TEST INFO: kqp_select.cpp:366: TKqpSelectActorMultiSession# {Tag: 0, parent: [1:740:2610], subTag: 3} will work with tablet# 72075186224037888 with ownerId# 72057594046644480 with tableId# 2 resolved for path# /Root/usertable with columnsCount# 11, keyColumnCount# 1 2025-09-25T16:18:46.208594Z node 1 :DS_LOAD_TEST DEBUG: kqp_select.cpp:400: TKqpSelectActorMultiSession# {Tag: 0, parent: [1:740:2610], subTag: 3} started fullscan actor# [1:752:2622] 2025-09-25T16:18:46.208612Z node 1 :DS_LOAD_TEST INFO: common.cpp:52: ReadIteratorScan# {Tag: 0, parent: [1:749:2619], subTag: 1} Bootstrap called, sample# 100 2025-09-25T16:18:46.208617Z node 1 :DS_LOAD_TEST DEBUG: common.cpp:61: ReadIteratorScan# {Tag: 0, parent: [1:749:2619], subTag: 1} Connect to# 72075186224037888 called 2025-09-25T16:18:46.208683Z node 1 :DS_LOAD_TEST DEBUG: common.cpp:75: ReadIteratorScan# {Tag: 0, parent: [1:749:2619], subTag: 1} Handle TEvClientConnected called, Status# OK 2025-09-25T16:18:46.208955Z node 1 :DS_LOAD_TEST NOTICE: common.cpp:137: ReadIteratorScan# {Tag: 0, parent: [1:749:2619], subTag: 1} finished in 0.000245s, sampled# 100, iter finished# 1, oks# 100 2025-09-25T16:18:46.208993Z node 1 :DS_LOAD_TEST INFO: kqp_select.cpp:416: TKqpSelectActorMultiSession# {Tag: 0, parent: [1:740:2610], subTag: 3} received keyCount# 100 2025-09-25T16:18:46.209049Z node 1 :DS_LOAD_TEST NOTICE: kqp_select.cpp:445: TKqpSelectActorMultiSession# {Tag: 0, parent: [1:740:2610], subTag: 3} started# 10 actors each with inflight# 1 2025-09-25T16:18:46.209062Z node 1 :DS_LOAD_TEST INFO: kqp_select.cpp:130: TKqpSelectActor# {Tag: 0, parent: [1:749:2619], subTag: 2} Bootstrap called 2025-09-25T16:18:46.209067Z node 1 :DS_LOAD_TEST DEBUG: kqp_select.cpp:142: TKqpSelectActor# {Tag: 0, parent: [1:749:2619], subTag: 2} sends event for session creation to proxy: [1:8678280833929343339:121] 2025-09-25T16:18:46.209078Z node 1 :DS_LOAD_TEST INFO: kqp_select.cpp:130: TKqpSelectActor# {Tag: 0, parent: [1:749:2619], subTag: 3} Bootstrap called 2025-09-25T16:18:46.209082Z node 1 :DS_LOAD_TEST DEBUG: kqp_select.cpp:142: TKqpSelectActor# {Tag: 0, parent: [1:749:2619], subTag: 3} sends event for session creation to proxy: [1:8678280833929343339:121] 2025-09-25T16:18:46.209089Z node 1 :DS_LOAD_TEST INFO: kqp_select.cpp:130: TKqpSelectActor# {Tag: 0, parent: [1:749:2619], subTag: 4} Bootstrap called 2025-09-25T16:18:46.209094Z node 1 :DS_LOAD_TEST DEBUG: kqp_select.cpp:142: TKqpSelectActor# {Tag: 0, parent: [1:749:2619], subTag: 4} sends event for session creation to proxy: [1:8678280833929343339:121] 2025-09-25T16:18:46.209101Z node 1 :DS_LOAD_TEST INFO: kqp_select.cpp:130: TKqpSelectActor# {Tag: 0, parent: [1:749:2619], subTag: 5} Bootstrap called 2025-09-25T16:18:46.209105Z node 1 :DS_LOAD_TEST DEBUG: kqp_select.cpp:142: TKqpSelectActor# {Tag: 0, parent: [1:749:2619], subTag: 5} sends event for session creation to proxy: [1:8678280833929343339:121] 2025-09-25T16:18:46.209110Z node 1 :DS_LOAD_TEST INFO: kqp_select.cpp:130: TKqpSelectActor# {Tag: 0, parent: [1:749:2619], subTag: 6} Bootstrap called 2025-09-25T16:18:46.209115Z node 1 :DS_LOAD_TEST DEBUG: kqp_select.cpp:142: TKqpSelectActor# {Tag: 0, parent: [1:749:2619], subTag: 6} sends event for session creation to proxy: [1:8678280833929343339:121] 2025-09-25T16:18:46.209123Z node 1 :DS_LOAD_TEST INFO: kqp_select.cpp:130: TKqpSelectActor# {Tag: 0, parent: [1:749:2619], subTag: 7} Bootstrap called 2025-09-25T16:18:46.209128Z node 1 :DS_LOAD_TEST DEBUG: kqp_select.cpp:142: TKqpSelectActor# {Tag: 0, parent: [1:749:2619], subTag: 7} sends event for session creation to proxy: [1:8678280833929343339:121] 2025-09-25T16:18:46.209134Z node 1 :DS_LOAD_TEST INFO: kqp_select.cpp:130: TKqpSelectActor# {Tag: 0, parent: [1:749:2619], subTag: 8} Bootstrap called 2025-09-25T16:18:46.209138Z node 1 :DS_LOAD_TEST DEBUG: kqp_select.cpp:142: TKqpSelectActor# {Tag: 0, parent: [1:749:2619], subTag: 8} sends event for session creation to proxy: [1:8678280833929343339:121] 2025-09-25T16:18:46.209143Z node 1 :DS_LOAD_TEST INFO: kqp_select.cpp:130: TKqpSelectActor# {Tag: 0, parent: [1:749:2619], subTag: 9} Bootstrap called 2025-09-25T16:18:46.209148Z node 1 :DS_LOAD_TEST DEBUG: kqp_select.cpp:142: TKqpSelectActor# {Tag: 0, parent: [1:749:2619], subTag: 9} sends event for session creation to proxy: [1:8678280833929343339:121] 2025-09-25T16:18:46.209154Z node 1 :DS_LOAD_TEST INFO: kqp_select.cpp:130: TKqpSelectActor# {Tag: 0, parent: [1:749:2619], subTag: 10} Bootstrap called 2025-09-25T16:18:46.209159Z node 1 :DS_LOAD_TEST DEBUG: kqp_select.cpp:142: TKqpSelectActor# {Tag: 0, parent: [1:749:2619], subTag: 10} sends event for session creation to proxy: [1:8678280833929343339:121] 2025-09-25T16:18:46.209164Z node 1 :DS_LOAD_TEST INFO: kqp_select.cpp:130: TKqpSelectActor# {Tag: 0, parent: [1:749:2619], subTag: 11} Bootstrap called 2025-09-25T16:18:46.209169Z node 1 :DS_LOAD_TEST DEBUG: kqp_select.cpp:142: TKqpSelectActor# {Tag: 0, parent: [1:749:2619], subTag: 11} sends event for session creation to proxy: [1:8678280833929343339:121] 2025-09-25T16:18:46.210083Z node 1 :DS_LOAD_TEST DEBUG: kqp_select.cpp:214: TKqpSelectActor# {Tag: 0, parent: [1:749:2619], subTag: 2} session: ydb://session/3?node_id=1&id=MTFlMmJjYWItNDA1MjU5ZGQtYTJiMmNhMzYtOWI0ZjA1Yjk= 2025-09-25T16:18:46.210169Z node 1 :DS_LOAD_TEST DEBUG: kqp_select.cpp:214: TKqpSelectActor# {Tag: 0, parent: [1:749:2619], subTag: 3} session: ydb://session/3?node_id=1&id=NzkyMjEyMGEtZjM1MWFlNjEtYzk4NTllMTQtMzk1ZDRlYTc= 2025-09-25T16:18:46.210515Z node 1 :DS_LOAD_TEST DEBUG: kqp_select.cpp:214: TKqpSelectActor# {Tag: 0, parent: [1:749:2619], subTag: 4} session: ydb://session/3?node_id=1&id=MWMyZGRlOS05M2IzNTkwNy00ZWQ0NGFjMi1lYTkzOWU0Ng== 2025-09-25T16:18:46.210860Z node 1 :DS_LOAD_TEST DEBUG: kqp_select.cpp:214: TKqpSelectActor# {Tag: 0, parent: [1:749:2619], subTag: 5} session: ydb://session/3?node_id=1&id=YWU0OGZjMWItM2MzZTZlMjYtZjlmZWE5MzUtYzdjYzc1NjI= 2025-09-25T16:18:46.211209Z node 1 :DS_LOAD_TEST DEBUG: kqp_select.cpp:214: TKqpSelectActor# {Tag: 0, parent: [1:749:2619], subTag: 6} session: ydb://session/3?node_id=1&id=OWZjNDQ0ODctYjFiZjgyMzUtZTIyNGU0ZmEtYzQxMjZmMWY= 2025-09-25T16:18:46.211672Z node 1 :DS_LOAD_TEST DEBUG: kqp_select.cpp:214: TKqpSelectActor# {Tag: 0, parent: [1:749:2619], subTag: 7} session: ydb://session/3?node_id=1&id=ZTIyZDMwMDUtZjdhMWYxMDAtZTlkNDZhM2QtNDVjNjZmZGQ= 2025-09-25T16:18:46.211996Z node 1 :DS_LOAD_TEST DEBUG: kqp_select.cpp:214: TKqpSelectActor# {Tag: 0, parent: [1:749:2619], subTag: 8} session: ydb://session/3?node_id=1&id=MzRlYzZiYTYtOTA1YmU2MGUtYjY4NzVkYTctODczMzA4YTA= 2025-09-25T16:18:46.212300Z node 1 :DS_LOAD_TEST DEBUG: kqp_select.cpp:214: TKqpSelectActor# {Tag: 0, parent: [1:749:2619], subTag: 9} session: ydb://session/3?node_id=1&id=M2I5ZWM0ZDQtNGRlODQxNGQtOWNlZWIyNDYtZTM4YWRmMGE= 2025-09-25T16:18:46.212606Z node 1 :DS_LOAD_TEST DEBUG: kqp_select.cpp:214: TKqpSelectActor# {Tag: 0, parent: [1:749:2619], subTag: 10} session: ydb://session/3?node_id=1&id=MjNkZTgzMWUtYTQyY2E2OC05ZDM2NzNmMC0xY2FlYTgwYQ== 2025-09-25T16:18:46.212930Z node 1 :DS_LOAD_TEST DEBUG: kqp_select.cpp:214: TKqpSelectActor# {Tag: 0, parent: [1:749:2619], subTag: 11} session: ydb://session/3?node_id=1&id=MTUxNjkzMTItYmEwNzRhZTctMmMzNTQwYzktNjE5OWQ0ZWY= 2025-09-25T16:18:46.214092Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:777:2647], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:46.214130Z node 1 :KQP_WORKLOAD_SERVI ... ting right now (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateCreate), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-09-25T16:18:48.577803Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-09-25T16:18:48.673992Z node 2 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [2:828:2692], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2025-09-25T16:18:48.674024Z node 2 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [2:829:2693], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2025-09-25T16:18:48.674031Z node 2 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [2:830:2694], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2025-09-25T16:18:48.674038Z node 2 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [2:833:2697], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2025-09-25T16:18:48.674044Z node 2 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [2:834:2698], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2025-09-25T16:18:48.674050Z node 2 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [2:835:2699], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2025-09-25T16:18:48.674057Z node 2 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [2:836:2700], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2025-09-25T16:18:48.674063Z node 2 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [2:837:2701], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2025-09-25T16:18:48.674068Z node 2 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [2:845:2709], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2025-09-25T16:18:48.674075Z node 2 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [2:859:2714], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2025-09-25T16:18:48.706164Z node 2 :TX_PROXY ERROR: schemereq.cpp:590: Actor# [2:984:2808] txid# 281474976715668, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-09-25T16:18:48.799915Z node 2 :DS_LOAD_TEST NOTICE: kqp_select.cpp:197: TKqpSelectActor# {Tag: 0, parent: [2:749:2619], subTag: 11} finished in 0.252424s, errors=0 2025-09-25T16:18:48.800012Z node 2 :DS_LOAD_TEST DEBUG: kqp_select.cpp:461: TKqpSelectActorMultiSession# {Tag: 0, parent: [2:740:2610], subTag: 3} finished: 11 { Tag: 11 DurationMs: 252 OperationsOK: 100 OperationsError: 0 } 2025-09-25T16:18:48.811529Z node 2 :TX_PROXY ERROR: schemereq.cpp:590: Actor# [2:2013:3230] txid# 281474976715769, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-09-25T16:18:48.893124Z node 2 :DS_LOAD_TEST NOTICE: kqp_select.cpp:197: TKqpSelectActor# {Tag: 0, parent: [2:749:2619], subTag: 8} finished in 0.346615s, errors=0 2025-09-25T16:18:48.893210Z node 2 :DS_LOAD_TEST DEBUG: kqp_select.cpp:461: TKqpSelectActorMultiSession# {Tag: 0, parent: [2:740:2610], subTag: 3} finished: 8 { Tag: 8 DurationMs: 346 OperationsOK: 100 OperationsError: 0 } 2025-09-25T16:18:48.904956Z node 2 :TX_PROXY ERROR: schemereq.cpp:590: Actor# [2:3020:3636] txid# 281474976715870, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-09-25T16:18:48.985051Z node 2 :DS_LOAD_TEST NOTICE: kqp_select.cpp:197: TKqpSelectActor# {Tag: 0, parent: [2:749:2619], subTag: 9} finished in 0.438239s, errors=0 2025-09-25T16:18:48.985150Z node 2 :DS_LOAD_TEST DEBUG: kqp_select.cpp:461: TKqpSelectActorMultiSession# {Tag: 0, parent: [2:740:2610], subTag: 3} finished: 9 { Tag: 9 DurationMs: 438 OperationsOK: 100 OperationsError: 0 } 2025-09-25T16:18:48.997817Z node 2 :TX_PROXY ERROR: schemereq.cpp:590: Actor# [2:4027:4042] txid# 281474976715971, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-09-25T16:18:49.090325Z node 2 :DS_LOAD_TEST NOTICE: kqp_select.cpp:197: TKqpSelectActor# {Tag: 0, parent: [2:749:2619], subTag: 3} finished in 0.545475s, errors=0 2025-09-25T16:18:49.090408Z node 2 :DS_LOAD_TEST DEBUG: kqp_select.cpp:461: TKqpSelectActorMultiSession# {Tag: 0, parent: [2:740:2610], subTag: 3} finished: 3 { Tag: 3 DurationMs: 545 OperationsOK: 100 OperationsError: 0 } 2025-09-25T16:18:49.102129Z node 2 :TX_PROXY ERROR: schemereq.cpp:590: Actor# [2:5034:4448] txid# 281474976716072, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-09-25T16:18:49.197128Z node 2 :DS_LOAD_TEST NOTICE: kqp_select.cpp:197: TKqpSelectActor# {Tag: 0, parent: [2:749:2619], subTag: 6} finished in 0.650923s, errors=0 2025-09-25T16:18:49.197211Z node 2 :DS_LOAD_TEST DEBUG: kqp_select.cpp:461: TKqpSelectActorMultiSession# {Tag: 0, parent: [2:740:2610], subTag: 3} finished: 6 { Tag: 6 DurationMs: 650 OperationsOK: 100 OperationsError: 0 } 2025-09-25T16:18:49.210503Z node 2 :TX_PROXY ERROR: schemereq.cpp:590: Actor# [2:6041:4854] txid# 281474976716173, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-09-25T16:18:49.328956Z node 2 :DS_LOAD_TEST NOTICE: kqp_select.cpp:197: TKqpSelectActor# {Tag: 0, parent: [2:749:2619], subTag: 4} finished in 0.783480s, errors=0 2025-09-25T16:18:49.329056Z node 2 :DS_LOAD_TEST DEBUG: kqp_select.cpp:461: TKqpSelectActorMultiSession# {Tag: 0, parent: [2:740:2610], subTag: 3} finished: 4 { Tag: 4 DurationMs: 783 OperationsOK: 100 OperationsError: 0 } 2025-09-25T16:18:49.341137Z node 2 :TX_PROXY ERROR: schemereq.cpp:590: Actor# [2:7048:5260] txid# 281474976716274, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-09-25T16:18:49.466951Z node 2 :DS_LOAD_TEST NOTICE: kqp_select.cpp:197: TKqpSelectActor# {Tag: 0, parent: [2:749:2619], subTag: 10} finished in 0.919822s, errors=0 2025-09-25T16:18:49.467020Z node 2 :DS_LOAD_TEST DEBUG: kqp_select.cpp:461: TKqpSelectActorMultiSession# {Tag: 0, parent: [2:740:2610], subTag: 3} finished: 10 { Tag: 10 DurationMs: 919 OperationsOK: 100 OperationsError: 0 } 2025-09-25T16:18:49.480203Z node 2 :TX_PROXY ERROR: schemereq.cpp:590: Actor# [2:8055:5666] txid# 281474976716375, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-09-25T16:18:49.623182Z node 2 :DS_LOAD_TEST NOTICE: kqp_select.cpp:197: TKqpSelectActor# {Tag: 0, parent: [2:749:2619], subTag: 7} finished in 1.076959s, errors=0 2025-09-25T16:18:49.623280Z node 2 :DS_LOAD_TEST DEBUG: kqp_select.cpp:461: TKqpSelectActorMultiSession# {Tag: 0, parent: [2:740:2610], subTag: 3} finished: 7 { Tag: 7 DurationMs: 1076 OperationsOK: 100 OperationsError: 0 } 2025-09-25T16:18:49.635847Z node 2 :TX_PROXY ERROR: schemereq.cpp:590: Actor# [2:9062:6072] txid# 281474976716476, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-09-25T16:18:49.779529Z node 2 :DS_LOAD_TEST NOTICE: kqp_select.cpp:197: TKqpSelectActor# {Tag: 0, parent: [2:749:2619], subTag: 2} finished in 1.235115s, errors=0 2025-09-25T16:18:49.779608Z node 2 :DS_LOAD_TEST DEBUG: kqp_select.cpp:461: TKqpSelectActorMultiSession# {Tag: 0, parent: [2:740:2610], subTag: 3} finished: 2 { Tag: 2 DurationMs: 1235 OperationsOK: 100 OperationsError: 0 } 2025-09-25T16:18:49.792556Z node 2 :TX_PROXY ERROR: schemereq.cpp:590: Actor# [2:10069:6478] txid# 281474976716577, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-09-25T16:18:49.949660Z node 2 :DS_LOAD_TEST NOTICE: kqp_select.cpp:197: TKqpSelectActor# {Tag: 0, parent: [2:749:2619], subTag: 5} finished in 1.404166s, errors=0 2025-09-25T16:18:49.949786Z node 2 :DS_LOAD_TEST DEBUG: kqp_select.cpp:461: TKqpSelectActorMultiSession# {Tag: 0, parent: [2:740:2610], subTag: 3} finished: 5 { Tag: 5 DurationMs: 1404 OperationsOK: 100 OperationsError: 0 } 2025-09-25T16:18:49.949797Z node 2 :DS_LOAD_TEST NOTICE: kqp_select.cpp:480: TKqpSelectActorMultiSession# {Tag: 0, parent: [2:740:2610], subTag: 3} finished in 1.406105s, oks# 1000, errors# 0 2025-09-25T16:18:49.949853Z node 2 :DS_LOAD_TEST INFO: test_load_actor.cpp:447: TLoad# 0 received finished from actor# [2:749:2619] with tag# 3 >> TestKinesisHttpProxy::UnauthorizedGetShardIteratorRequest >> TestYmqHttpProxy::TestGetQueueUrl >> THiveTest::TestHiveBalancerHighUsageAndColumnShards [GOOD] >> THiveTest::TestHiveBalancerOneTabletHighUsage >> KqpScheme::QueryWithAlter [GOOD] >> KqpScheme::RenameTable+ColumnTable ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_backup/unittest >> TBackupTests::ShouldSucceedOnSingleShardTable[Zstd] [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2144] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2144] Leader for TabletID 72057594046678944 is [1:130:2155] sender: [1:131:2058] recipient: [1:113:2144] 2025-09-25T16:18:49.721572Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7911: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-09-25T16:18:49.721595Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7939: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-09-25T16:18:49.721601Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7825: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-09-25T16:18:49.721605Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7841: OperationsProcessing config: using default configuration 2025-09-25T16:18:49.721612Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7847: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-09-25T16:18:49.721617Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7847: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-09-25T16:18:49.721626Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7971: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-09-25T16:18:49.721639Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-09-25T16:18:49.721753Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8042: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-09-25T16:18:49.721822Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2025-09-25T16:18:49.735693Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7729: Cannot subscribe to console configs 2025-09-25T16:18:49.735722Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded 2025-09-25T16:18:49.740404Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2025-09-25T16:18:49.740512Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2025-09-25T16:18:49.740569Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-09-25T16:18:49.757201Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2025-09-25T16:18:49.757273Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2025-09-25T16:18:49.757363Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1382: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-09-25T16:18:49.757429Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-09-25T16:18:49.757793Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:158: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-09-25T16:18:49.757826Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2025-09-25T16:18:49.758049Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-09-25T16:18:49.758057Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-09-25T16:18:49.758074Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-09-25T16:18:49.758079Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-09-25T16:18:49.758084Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:205: TTxServerlessStorageBilling.Complete 2025-09-25T16:18:49.758118Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7086: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-09-25T16:18:49.759192Z node 1 :HIVE INFO: tablet_helpers.cpp:1126: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:130:2155] sender: [1:245:2058] recipient: [1:15:2062] 2025-09-25T16:18:49.784866Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-09-25T16:18:49.784972Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-09-25T16:18:49.785030Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:599: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-09-25T16:18:49.785039Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5528: CreateTx for txid 1:0 type: TxAlterSubDomain target path: [OwnerId: 72057594046678944, LocalPathId: 1] source path: 2025-09-25T16:18:49.785082Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-09-25T16:18:49.785096Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2025-09-25T16:18:49.785889Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-09-25T16:18:49.785934Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-09-25T16:18:49.785992Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-09-25T16:18:49.786002Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:315: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-09-25T16:18:49.786008Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:369: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-09-25T16:18:49.786015Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 1:0 2 -> 3 2025-09-25T16:18:49.786512Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-09-25T16:18:49.786526Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-09-25T16:18:49.786532Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 1:0 3 -> 128 2025-09-25T16:18:49.786924Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-09-25T16:18:49.786936Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-09-25T16:18:49.786942Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-09-25T16:18:49.786950Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1723: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-09-25T16:18:49.787595Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1792: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-09-25T16:18:49.787921Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:663: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-09-25T16:18:49.787951Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1824: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-09-25T16:18:49.788110Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:685: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-09-25T16:18:49.788129Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:689: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 137 RawX2: 4294969455 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-09-25T16:18:49.788134Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:303: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-09-25T16:18:49.788178Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 1:0 128 -> 240 2025-09-25T16:18:49.788184Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:362: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-09-25T16:18:49.788210Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:599: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-09-25T16:18:49.788219Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:412: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-09-25T16:18:49.788566Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-09-25T16:18:49.788572Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme ... hard: 72057594046678944 2025-09-25T16:18:49.956968Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_backup_restore_common.h:412: TBackup TPropose, opId: 102:0 HandleReply TEvOperationPlan, stepId: 5000003, at schemeshard: 72057594046678944 2025-09-25T16:18:49.956990Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 102:0 128 -> 129 2025-09-25T16:18:49.957025Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:599: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 3 2025-09-25T16:18:49.958967Z node 1 :DATASHARD_BACKUP DEBUG: export_s3_uploader.cpp:786: [Export] [s3] Bootstrap: self# [1:422:2391], attempt# 0 2025-09-25T16:18:49.962681Z node 1 :DATASHARD_BACKUP DEBUG: export_s3_uploader.cpp:442: [Export] [s3] Handle TEvExportScan::TEvReady: self# [1:422:2391], sender# [1:421:2390] FAKE_COORDINATOR: advance: minStep5000003 State->FrontStep: 5000003 2025-09-25T16:18:49.965077Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-09-25T16:18:49.965096Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 102, path id: [OwnerId: 72057594046678944, LocalPathId: 2] REQUEST: 2025-09-25T16:18:49.965187Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-09-25T16:18:49.965202Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [1:212:2213], at schemeshard: 72057594046678944, txId: 102, path id: 2 2025-09-25T16:18:49.965367Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 102:0, at schemeshard: 72057594046678944 2025-09-25T16:18:49.965381Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_backup_restore_common.h:258: TBackup TProposedWaitParts, opId: 102:0 ProgressState, at schemeshard: 72057594046678944 2025-09-25T16:18:49.965633Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6249: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 3 PathOwnerId: 72057594046678944, cookie: 102 2025-09-25T16:18:49.965653Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 3 PathOwnerId: 72057594046678944, cookie: 102 2025-09-25T16:18:49.965658Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:108: Operation in-flight, at schemeshard: 72057594046678944, txId: 102 2025-09-25T16:18:49.965664Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 102, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 3 2025-09-25T16:18:49.965676Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:610: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 4 2025-09-25T16:18:49.965695Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 102, ready parts: 0/1, is published: true FAKE_COORDINATOR: Erasing txId 102 2025-09-25T16:18:49.968141Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 PUT /metadata.json HTTP/1.1 HEADERS: Host: localhost:16372 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: 00D0E033-7B8D-4A07-B4B9-691E8C69B690 amz-sdk-request: attempt=1 content-length: 94 content-md5: ZpDejBbuBPHjGq8ZC8z8QA== content-type: binary/octet-stream user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-143-generic x86_64 Clang/20.1.8 x-amz-storage-class: STANDARD S3_MOCK::HttpServeWrite: /metadata.json / / 94 2025-09-25T16:18:49.969349Z node 1 :DATASHARD_BACKUP DEBUG: export_s3_uploader.cpp:402: [Export] [s3] HandleMetadata TEvExternalStorage::TEvPutObjectResponse: self# [1:422:2391], result# PutObjectResult { ETag: 6690de8c16ee04f1e31aaf190bccfc40 } REQUEST: PUT /scheme.pb HTTP/1.1 HEADERS: Host: localhost:16372 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: A65A0557-88D6-466E-92B1-B120BD9D8ED9 amz-sdk-request: attempt=1 content-length: 357 content-md5: csvC5nqNTZsSLy4ymlp0/Q== content-type: binary/octet-stream user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-143-generic x86_64 Clang/20.1.8 x-amz-storage-class: STANDARD S3_MOCK::HttpServeWrite: /scheme.pb / / 357 2025-09-25T16:18:49.971192Z node 1 :DATASHARD_BACKUP DEBUG: export_s3_uploader.cpp:307: [Export] [s3] HandleScheme TEvExternalStorage::TEvPutObjectResponse: self# [1:422:2391], result# PutObjectResult { ETag: 72cbc2e67a8d4d9b122f2e329a5a74fd } 2025-09-25T16:18:49.971218Z node 1 :DATASHARD_BACKUP DEBUG: export_scan.cpp:130: [Export] [scanner] Handle TEvExportScan::TEvFeed: self# [1:421:2390] 2025-09-25T16:18:49.971291Z node 1 :DATASHARD_BACKUP DEBUG: export_s3_uploader.cpp:460: [Export] [s3] Handle TEvExportScan::TEvBuffer: self# [1:422:2391], sender# [1:421:2390], msg# NKikimr::NDataShard::TEvExportScan::TEvBuffer { Last: 1 Checksum: } REQUEST: PUT /data_00.csv.zst HTTP/1.1 HEADERS: Host: localhost:16372 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: A39C460D-ABFA-4793-8012-7A3768EE76B6 amz-sdk-request: attempt=1 content-length: 20 content-md5: 2qFn9G0TW8wfvJ9C+A5Jbw== content-type: binary/octet-stream user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-143-generic x86_64 Clang/20.1.8 x-amz-storage-class: STANDARD S3_MOCK::HttpServeWrite: /data_00.csv.zst / / 20 2025-09-25T16:18:49.972197Z node 1 :DATASHARD_BACKUP DEBUG: export_s3_uploader.cpp:502: [Export] [s3] HandleData TEvExternalStorage::TEvPutObjectResponse: self# [1:422:2391], result# PutObjectResult { ETag: daa167f46d135bcc1fbc9f42f80e496f } 2025-09-25T16:18:49.972210Z node 1 :DATASHARD_BACKUP INFO: export_s3_uploader.cpp:705: [Export] [s3] Finish: self# [1:422:2391], success# 1, error# , multipart# 0, uploadId# (empty maybe) 2025-09-25T16:18:49.972249Z node 1 :DATASHARD_BACKUP DEBUG: export_scan.cpp:144: [Export] [scanner] Handle TEvExportScan::TEvFinish: self# [1:421:2390], msg# NKikimr::NDataShard::TEvExportScan::TEvFinish { Success: 1 Error: } 2025-09-25T16:18:49.974891Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5901: Handle TEvSchemaChanged, tabletId: 72057594046678944, at schemeshard: 72057594046678944, message: Source { RawX1: 315 RawX2: 4294969597 } Origin: 72075186233409546 State: 2 TxId: 102 Step: 0 Generation: 2 OpResult { Success: true Explain: "" BytesProcessed: 10 RowsProcessed: 1 } 2025-09-25T16:18:49.974935Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1837: TOperation FindRelatedPartByTabletId, TxId: 102, tablet: 72075186233409546, partId: 0 2025-09-25T16:18:49.974964Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:628: TTxOperationReply execute, operationId: 102:0, at schemeshard: 72057594046678944, message: Source { RawX1: 315 RawX2: 4294969597 } Origin: 72075186233409546 State: 2 TxId: 102 Step: 0 Generation: 2 OpResult { Success: true Explain: "" BytesProcessed: 10 RowsProcessed: 1 } 2025-09-25T16:18:49.974979Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_backup_restore_common.h:233: TBackup TProposedWaitParts, opId: 102:0 HandleReply TEvSchemaChanged at tablet# 72057594046678944 message# Source { RawX1: 315 RawX2: 4294969597 } Origin: 72075186233409546 State: 2 TxId: 102 Step: 0 Generation: 2 OpResult { Success: true Explain: "" BytesProcessed: 10 RowsProcessed: 1 } 2025-09-25T16:18:49.974996Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:673: CollectSchemaChanged accept TEvDataShard::TEvSchemaChanged, operationId: 102:0, shardIdx: 72057594046678944:1, shard: 72075186233409546, left await: 0, txState.State: ProposedWaitParts, txState.ReadyForNotifications: 1, at schemeshard: 72057594046678944 2025-09-25T16:18:49.975001Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:710: all shard schema changes has been received, operationId: 102:0, at schemeshard: 72057594046678944 2025-09-25T16:18:49.975005Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:722: send schema changes ack message, operation: 102:0, datashard: 72075186233409546, at schemeshard: 72057594046678944 2025-09-25T16:18:49.975013Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 102:0 129 -> 240 2025-09-25T16:18:49.975057Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_backup_restore_common.h:116: Unable to make a bill: kind# TBackup, opId# 102:0, reason# domain is not a serverless db, domain# /MyRoot, domainPathId# [OwnerId: 72057594046678944, LocalPathId: 1], IsDomainSchemeShard: 1, ParentDomainId: [OwnerId: 72057594046678944, LocalPathId: 1], ResourcesDomainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-09-25T16:18:49.975704Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 102:0, at schemeshard: 72057594046678944 2025-09-25T16:18:49.975803Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 102:0, at schemeshard: 72057594046678944 2025-09-25T16:18:49.975813Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:497: [72057594046678944] TDone opId# 102:0 ProgressState 2025-09-25T16:18:49.975829Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:926: Part operation is done id#102:0 progress is 1/1 2025-09-25T16:18:49.975834Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2025-09-25T16:18:49.975839Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:926: Part operation is done id#102:0 progress is 1/1 2025-09-25T16:18:49.975842Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2025-09-25T16:18:49.975847Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 102, ready parts: 1/1, is published: true 2025-09-25T16:18:49.975862Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1702: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [1:343:2321] message: TxId: 102 2025-09-25T16:18:49.975873Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2025-09-25T16:18:49.975880Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:993: Operation and all the parts is done, operation id: 102:0 2025-09-25T16:18:49.975885Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5552: RemoveTx for txid 102:0 2025-09-25T16:18:49.975909Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:610: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 3 2025-09-25T16:18:49.976366Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 102: got EvNotifyTxCompletionResult 2025-09-25T16:18:49.976378Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 102: satisfy waiter [1:407:2377] TestWaitNotification: OK eventTxId 102 ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_backup/unittest >> TBackupTests::BackupUuidColumn[Zstd] [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2144] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2144] Leader for TabletID 72057594046678944 is [1:130:2155] sender: [1:131:2058] recipient: [1:113:2144] 2025-09-25T16:18:49.530168Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7911: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-09-25T16:18:49.530199Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7939: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-09-25T16:18:49.530205Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7825: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-09-25T16:18:49.530210Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7841: OperationsProcessing config: using default configuration 2025-09-25T16:18:49.530218Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7847: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-09-25T16:18:49.530222Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7847: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-09-25T16:18:49.530239Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7971: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-09-25T16:18:49.530255Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-09-25T16:18:49.530391Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8042: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-09-25T16:18:49.530476Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2025-09-25T16:18:49.546772Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7729: Cannot subscribe to console configs 2025-09-25T16:18:49.546806Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded 2025-09-25T16:18:49.551949Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2025-09-25T16:18:49.552079Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2025-09-25T16:18:49.552143Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-09-25T16:18:49.553976Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2025-09-25T16:18:49.554057Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2025-09-25T16:18:49.554180Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1382: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-09-25T16:18:49.554273Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-09-25T16:18:49.554798Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:158: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-09-25T16:18:49.554848Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2025-09-25T16:18:49.555191Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-09-25T16:18:49.555203Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-09-25T16:18:49.555227Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-09-25T16:18:49.555237Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-09-25T16:18:49.555244Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:205: TTxServerlessStorageBilling.Complete 2025-09-25T16:18:49.555287Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7086: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-09-25T16:18:49.557252Z node 1 :HIVE INFO: tablet_helpers.cpp:1126: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:130:2155] sender: [1:245:2058] recipient: [1:15:2062] 2025-09-25T16:18:49.582874Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-09-25T16:18:49.582998Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-09-25T16:18:49.583057Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:599: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-09-25T16:18:49.583066Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5528: CreateTx for txid 1:0 type: TxAlterSubDomain target path: [OwnerId: 72057594046678944, LocalPathId: 1] source path: 2025-09-25T16:18:49.583109Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-09-25T16:18:49.583125Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2025-09-25T16:18:49.583875Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-09-25T16:18:49.583926Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-09-25T16:18:49.583986Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-09-25T16:18:49.583999Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:315: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-09-25T16:18:49.584005Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:369: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-09-25T16:18:49.584011Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 1:0 2 -> 3 2025-09-25T16:18:49.584607Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-09-25T16:18:49.584622Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-09-25T16:18:49.584631Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 1:0 3 -> 128 2025-09-25T16:18:49.585044Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-09-25T16:18:49.585057Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-09-25T16:18:49.585064Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-09-25T16:18:49.585071Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1723: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-09-25T16:18:49.585879Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1792: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-09-25T16:18:49.587806Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:663: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-09-25T16:18:49.587862Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1824: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-09-25T16:18:49.588117Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:685: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-09-25T16:18:49.588164Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:689: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 137 RawX2: 4294969455 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-09-25T16:18:49.588174Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:303: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-09-25T16:18:49.588253Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 1:0 128 -> 240 2025-09-25T16:18:49.588264Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:362: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-09-25T16:18:49.588313Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:599: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-09-25T16:18:49.588329Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:412: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-09-25T16:18:49.589296Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-09-25T16:18:49.589311Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme ... meshard: 72057594046678944 2025-09-25T16:18:49.719737Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_backup_restore_common.h:412: TBackup TPropose, opId: 102:0 HandleReply TEvOperationPlan, stepId: 5000003, at schemeshard: 72057594046678944 2025-09-25T16:18:49.719757Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 102:0 128 -> 129 2025-09-25T16:18:49.719784Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:599: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 3 2025-09-25T16:18:49.722034Z node 1 :DATASHARD_BACKUP DEBUG: export_s3_uploader.cpp:786: [Export] [s3] Bootstrap: self# [1:422:2391], attempt# 0 2025-09-25T16:18:49.724617Z node 1 :DATASHARD_BACKUP DEBUG: export_s3_uploader.cpp:442: [Export] [s3] Handle TEvExportScan::TEvReady: self# [1:422:2391], sender# [1:421:2390] FAKE_COORDINATOR: advance: minStep5000003 State->FrontStep: 5000003 2025-09-25T16:18:49.725707Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-09-25T16:18:49.725729Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 102, path id: [OwnerId: 72057594046678944, LocalPathId: 2] 2025-09-25T16:18:49.725818Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-09-25T16:18:49.725825Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [1:212:2213], at schemeshard: 72057594046678944, txId: 102, path id: 2 2025-09-25T16:18:49.725947Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 102:0, at schemeshard: 72057594046678944 2025-09-25T16:18:49.725957Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_backup_restore_common.h:258: TBackup TProposedWaitParts, opId: 102:0 ProgressState, at schemeshard: 72057594046678944 2025-09-25T16:18:49.726165Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6249: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 3 PathOwnerId: 72057594046678944, cookie: 102 2025-09-25T16:18:49.726180Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 3 PathOwnerId: 72057594046678944, cookie: 102 2025-09-25T16:18:49.726185Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:108: Operation in-flight, at schemeshard: 72057594046678944, txId: 102 2025-09-25T16:18:49.726192Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 102, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 3 2025-09-25T16:18:49.726200Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:610: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 4 2025-09-25T16:18:49.726217Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 102, ready parts: 0/1, is published: true FAKE_COORDINATOR: Erasing txId 102 2025-09-25T16:18:49.726938Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 REQUEST: PUT /metadata.json HTTP/1.1 HEADERS: Host: localhost:8426 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: FA997A39-AB38-4582-B122-F3BAD974D6F3 amz-sdk-request: attempt=1 content-length: 94 content-md5: ZpDejBbuBPHjGq8ZC8z8QA== content-type: binary/octet-stream user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-143-generic x86_64 Clang/20.1.8 x-amz-storage-class: STANDARD S3_MOCK::HttpServeWrite: /metadata.json / / 94 2025-09-25T16:18:49.729457Z node 1 :DATASHARD_BACKUP DEBUG: export_s3_uploader.cpp:402: [Export] [s3] HandleMetadata TEvExternalStorage::TEvPutObjectResponse: self# [1:422:2391], result# PutObjectResult { ETag: 6690de8c16ee04f1e31aaf190bccfc40 } REQUEST: PUT /scheme.pb HTTP/1.1 HEADERS: Host: localhost:8426 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: 30FE0676-5F38-4376-AB99-9C10143662DB amz-sdk-request: attempt=1 content-length: 357 content-md5: IxJB3qM/y2xlsv8qcwTF7g== content-type: binary/octet-stream user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-143-generic x86_64 Clang/20.1.8 x-amz-storage-class: STANDARD S3_MOCK::HttpServeWrite: /scheme.pb / / 357 2025-09-25T16:18:49.733321Z node 1 :DATASHARD_BACKUP DEBUG: export_s3_uploader.cpp:307: [Export] [s3] HandleScheme TEvExternalStorage::TEvPutObjectResponse: self# [1:422:2391], result# PutObjectResult { ETag: 231241dea33fcb6c65b2ff2a7304c5ee } 2025-09-25T16:18:49.733365Z node 1 :DATASHARD_BACKUP DEBUG: export_scan.cpp:130: [Export] [scanner] Handle TEvExportScan::TEvFeed: self# [1:421:2390] 2025-09-25T16:18:49.733469Z node 1 :DATASHARD_BACKUP DEBUG: export_s3_uploader.cpp:460: [Export] [s3] Handle TEvExportScan::TEvBuffer: self# [1:422:2391], sender# [1:421:2390], msg# NKikimr::NDataShard::TEvExportScan::TEvBuffer { Last: 1 Checksum: } REQUEST: PUT /data_00.csv.zst HTTP/1.1 HEADERS: Host: localhost:8426 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: F6D555E5-6ED5-41A3-B58F-02D162D9CA26 amz-sdk-request: attempt=1 content-length: 40 content-md5: LXbLDYru8NmFsYXNSXjnpQ== content-type: binary/octet-stream user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-143-generic x86_64 Clang/20.1.8 x-amz-storage-class: STANDARD S3_MOCK::HttpServeWrite: /data_00.csv.zst / / 40 2025-09-25T16:18:49.744988Z node 1 :DATASHARD_BACKUP DEBUG: export_s3_uploader.cpp:502: [Export] [s3] HandleData TEvExternalStorage::TEvPutObjectResponse: self# [1:422:2391], result# PutObjectResult { ETag: 2d76cb0d8aeef0d985b185cd4978e7a5 } 2025-09-25T16:18:49.745021Z node 1 :DATASHARD_BACKUP INFO: export_s3_uploader.cpp:705: [Export] [s3] Finish: self# [1:422:2391], success# 1, error# , multipart# 0, uploadId# (empty maybe) 2025-09-25T16:18:49.745085Z node 1 :DATASHARD_BACKUP DEBUG: export_scan.cpp:144: [Export] [scanner] Handle TEvExportScan::TEvFinish: self# [1:421:2390], msg# NKikimr::NDataShard::TEvExportScan::TEvFinish { Success: 1 Error: } 2025-09-25T16:18:49.749398Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5901: Handle TEvSchemaChanged, tabletId: 72057594046678944, at schemeshard: 72057594046678944, message: Source { RawX1: 315 RawX2: 4294969597 } Origin: 72075186233409546 State: 2 TxId: 102 Step: 0 Generation: 2 OpResult { Success: true Explain: "" BytesProcessed: 20 RowsProcessed: 1 } 2025-09-25T16:18:49.749435Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1837: TOperation FindRelatedPartByTabletId, TxId: 102, tablet: 72075186233409546, partId: 0 2025-09-25T16:18:49.749471Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:628: TTxOperationReply execute, operationId: 102:0, at schemeshard: 72057594046678944, message: Source { RawX1: 315 RawX2: 4294969597 } Origin: 72075186233409546 State: 2 TxId: 102 Step: 0 Generation: 2 OpResult { Success: true Explain: "" BytesProcessed: 20 RowsProcessed: 1 } 2025-09-25T16:18:49.749487Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_backup_restore_common.h:233: TBackup TProposedWaitParts, opId: 102:0 HandleReply TEvSchemaChanged at tablet# 72057594046678944 message# Source { RawX1: 315 RawX2: 4294969597 } Origin: 72075186233409546 State: 2 TxId: 102 Step: 0 Generation: 2 OpResult { Success: true Explain: "" BytesProcessed: 20 RowsProcessed: 1 } 2025-09-25T16:18:49.749507Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:673: CollectSchemaChanged accept TEvDataShard::TEvSchemaChanged, operationId: 102:0, shardIdx: 72057594046678944:1, shard: 72075186233409546, left await: 0, txState.State: ProposedWaitParts, txState.ReadyForNotifications: 1, at schemeshard: 72057594046678944 2025-09-25T16:18:49.749512Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:710: all shard schema changes has been received, operationId: 102:0, at schemeshard: 72057594046678944 2025-09-25T16:18:49.749518Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:722: send schema changes ack message, operation: 102:0, datashard: 72075186233409546, at schemeshard: 72057594046678944 2025-09-25T16:18:49.749527Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 102:0 129 -> 240 2025-09-25T16:18:49.749577Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_backup_restore_common.h:116: Unable to make a bill: kind# TBackup, opId# 102:0, reason# domain is not a serverless db, domain# /MyRoot, domainPathId# [OwnerId: 72057594046678944, LocalPathId: 1], IsDomainSchemeShard: 1, ParentDomainId: [OwnerId: 72057594046678944, LocalPathId: 1], ResourcesDomainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-09-25T16:18:49.750731Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 102:0, at schemeshard: 72057594046678944 2025-09-25T16:18:49.750876Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 102:0, at schemeshard: 72057594046678944 2025-09-25T16:18:49.750888Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:497: [72057594046678944] TDone opId# 102:0 ProgressState 2025-09-25T16:18:49.750926Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:926: Part operation is done id#102:0 progress is 1/1 2025-09-25T16:18:49.750932Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2025-09-25T16:18:49.750938Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:926: Part operation is done id#102:0 progress is 1/1 2025-09-25T16:18:49.750941Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2025-09-25T16:18:49.750946Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 102, ready parts: 1/1, is published: true 2025-09-25T16:18:49.750968Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1702: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [1:343:2321] message: TxId: 102 2025-09-25T16:18:49.750977Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2025-09-25T16:18:49.750983Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:993: Operation and all the parts is done, operation id: 102:0 2025-09-25T16:18:49.750989Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5552: RemoveTx for txid 102:0 2025-09-25T16:18:49.751027Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:610: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 3 2025-09-25T16:18:49.757254Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 102: got EvNotifyTxCompletionResult 2025-09-25T16:18:49.757277Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 102: satisfy waiter [1:407:2377] TestWaitNotification: OK eventTxId 102 ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_backup/unittest >> TBackupTests::ShouldSucceedOnMultiShardTable[Raw] [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2144] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2144] Leader for TabletID 72057594046678944 is [1:130:2155] sender: [1:131:2058] recipient: [1:113:2144] 2025-09-25T16:18:49.611876Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7911: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-09-25T16:18:49.611906Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7939: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-09-25T16:18:49.611912Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7825: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-09-25T16:18:49.611917Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7841: OperationsProcessing config: using default configuration 2025-09-25T16:18:49.611924Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7847: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-09-25T16:18:49.611929Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7847: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-09-25T16:18:49.611940Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7971: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-09-25T16:18:49.611954Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-09-25T16:18:49.612082Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8042: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-09-25T16:18:49.612165Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2025-09-25T16:18:49.625677Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7729: Cannot subscribe to console configs 2025-09-25T16:18:49.625702Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded 2025-09-25T16:18:49.633613Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2025-09-25T16:18:49.633725Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2025-09-25T16:18:49.633784Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-09-25T16:18:49.635476Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2025-09-25T16:18:49.635547Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2025-09-25T16:18:49.635664Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1382: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-09-25T16:18:49.635752Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-09-25T16:18:49.636257Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:158: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-09-25T16:18:49.636305Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2025-09-25T16:18:49.636588Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-09-25T16:18:49.636599Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-09-25T16:18:49.636624Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-09-25T16:18:49.636633Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-09-25T16:18:49.636640Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:205: TTxServerlessStorageBilling.Complete 2025-09-25T16:18:49.636678Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7086: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-09-25T16:18:49.638224Z node 1 :HIVE INFO: tablet_helpers.cpp:1126: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:130:2155] sender: [1:245:2058] recipient: [1:15:2062] 2025-09-25T16:18:49.659043Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-09-25T16:18:49.659126Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-09-25T16:18:49.659177Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:599: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-09-25T16:18:49.659182Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5528: CreateTx for txid 1:0 type: TxAlterSubDomain target path: [OwnerId: 72057594046678944, LocalPathId: 1] source path: 2025-09-25T16:18:49.659221Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-09-25T16:18:49.659234Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2025-09-25T16:18:49.659918Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-09-25T16:18:49.659955Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-09-25T16:18:49.659997Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-09-25T16:18:49.660005Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:315: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-09-25T16:18:49.660010Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:369: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-09-25T16:18:49.660015Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 1:0 2 -> 3 2025-09-25T16:18:49.660476Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-09-25T16:18:49.660489Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-09-25T16:18:49.660494Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 1:0 3 -> 128 2025-09-25T16:18:49.660931Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-09-25T16:18:49.660942Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-09-25T16:18:49.660948Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-09-25T16:18:49.660955Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1723: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-09-25T16:18:49.661612Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1792: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-09-25T16:18:49.662123Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:663: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-09-25T16:18:49.662176Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1824: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-09-25T16:18:49.662414Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:685: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-09-25T16:18:49.662446Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:689: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 137 RawX2: 4294969455 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-09-25T16:18:49.662457Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:303: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-09-25T16:18:49.662519Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 1:0 128 -> 240 2025-09-25T16:18:49.662528Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:362: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-09-25T16:18:49.662564Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:599: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-09-25T16:18:49.662577Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:412: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-09-25T16:18:49.663216Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-09-25T16:18:49.663227Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme ... -4B0B-B6B9-BA61A22F43FD amz-sdk-request: attempt=1 content-length: 11 content-md5: jsMhyzH+cyrvZpBm0dQVGQ== content-type: binary/octet-stream user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-143-generic x86_64 Clang/20.1.8 x-amz-storage-class: STANDARD S3_MOCK::HttpServeWrite: /data_01.csv / / 11 REQUEST: PUT /metadata.json HTTP/1.1 HEADERS: Host: localhost:2473 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: 5759D760-558D-416B-8CE2-1EDA0011D618 amz-sdk-request: attempt=1 content-length: 94 content-md5: ZpDejBbuBPHjGq8ZC8z8QA== content-type: binary/octet-stream user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-143-generic x86_64 Clang/20.1.8 x-amz-storage-class: STANDARD S3_MOCK::HttpServeWrite: /metadata.json / / 94 2025-09-25T16:18:49.866345Z node 1 :DATASHARD_BACKUP DEBUG: export_s3_uploader.cpp:402: [Export] [s3] HandleMetadata TEvExternalStorage::TEvPutObjectResponse: self# [1:483:2441], result# PutObjectResult { ETag: 6690de8c16ee04f1e31aaf190bccfc40 } 2025-09-25T16:18:49.866834Z node 1 :DATASHARD_BACKUP DEBUG: export_s3_uploader.cpp:502: [Export] [s3] HandleData TEvExternalStorage::TEvPutObjectResponse: self# [1:485:2442], result# PutObjectResult { ETag: 8ec321cb31fe732aef669066d1d41519 } 2025-09-25T16:18:49.866849Z node 1 :DATASHARD_BACKUP INFO: export_s3_uploader.cpp:705: [Export] [s3] Finish: self# [1:485:2442], success# 1, error# , multipart# 0, uploadId# (empty maybe) 2025-09-25T16:18:49.866939Z node 1 :DATASHARD_BACKUP DEBUG: export_scan.cpp:144: [Export] [scanner] Handle TEvExportScan::TEvFinish: self# [1:484:2440], msg# NKikimr::NDataShard::TEvExportScan::TEvFinish { Success: 1 Error: } REQUEST: PUT /scheme.pb HTTP/1.1 HEADERS: Host: localhost:2473 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: B4EDA111-9CFD-49DE-B8EF-25991BA33B47 amz-sdk-request: attempt=1 content-length: 638 content-md5: Myp3UygaBNGp6+7AMgyRnQ== content-type: binary/octet-stream user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-143-generic x86_64 Clang/20.1.8 x-amz-storage-class: STANDARD S3_MOCK::HttpServeWrite: /scheme.pb / / 638 2025-09-25T16:18:49.868632Z node 1 :DATASHARD_BACKUP DEBUG: export_s3_uploader.cpp:307: [Export] [s3] HandleScheme TEvExternalStorage::TEvPutObjectResponse: self# [1:483:2441], result# PutObjectResult { ETag: 332a7753281a04d1a9ebeec0320c919d } 2025-09-25T16:18:49.868704Z node 1 :DATASHARD_BACKUP DEBUG: export_scan.cpp:130: [Export] [scanner] Handle TEvExportScan::TEvFeed: self# [1:482:2439] 2025-09-25T16:18:49.868723Z node 1 :DATASHARD_BACKUP DEBUG: export_s3_uploader.cpp:460: [Export] [s3] Handle TEvExportScan::TEvBuffer: self# [1:483:2441], sender# [1:482:2439], msg# NKikimr::NDataShard::TEvExportScan::TEvBuffer { Last: 1 Checksum: } 2025-09-25T16:18:49.869305Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 REQUEST: PUT /data_00.csv HTTP/1.1 HEADERS: Host: localhost:2473 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: E57E70FA-EBB4-4DB3-BC61-1AE6A374B376 amz-sdk-request: attempt=1 content-length: 11 content-md5: bj4KQf2rit2DOGLxvSlUww== content-type: binary/octet-stream user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-143-generic x86_64 Clang/20.1.8 x-amz-storage-class: STANDARD S3_MOCK::HttpServeWrite: /data_00.csv / / 11 2025-09-25T16:18:49.869714Z node 1 :DATASHARD_BACKUP DEBUG: export_s3_uploader.cpp:502: [Export] [s3] HandleData TEvExternalStorage::TEvPutObjectResponse: self# [1:483:2441], result# PutObjectResult { ETag: 6e3e0a41fdab8add833862f1bd2954c3 } 2025-09-25T16:18:49.869723Z node 1 :DATASHARD_BACKUP INFO: export_s3_uploader.cpp:705: [Export] [s3] Finish: self# [1:483:2441], success# 1, error# , multipart# 0, uploadId# (empty maybe) 2025-09-25T16:18:49.869774Z node 1 :DATASHARD_BACKUP DEBUG: export_scan.cpp:144: [Export] [scanner] Handle TEvExportScan::TEvFinish: self# [1:482:2439], msg# NKikimr::NDataShard::TEvExportScan::TEvFinish { Success: 1 Error: } 2025-09-25T16:18:49.886498Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5901: Handle TEvSchemaChanged, tabletId: 72057594046678944, at schemeshard: 72057594046678944, message: Source { RawX1: 324 RawX2: 4294969603 } Origin: 72075186233409546 State: 2 TxId: 102 Step: 0 Generation: 2 OpResult { Success: true Explain: "" BytesProcessed: 10 RowsProcessed: 1 } 2025-09-25T16:18:49.886522Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1837: TOperation FindRelatedPartByTabletId, TxId: 102, tablet: 72075186233409546, partId: 0 2025-09-25T16:18:49.886550Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:628: TTxOperationReply execute, operationId: 102:0, at schemeshard: 72057594046678944, message: Source { RawX1: 324 RawX2: 4294969603 } Origin: 72075186233409546 State: 2 TxId: 102 Step: 0 Generation: 2 OpResult { Success: true Explain: "" BytesProcessed: 10 RowsProcessed: 1 } 2025-09-25T16:18:49.886569Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_backup_restore_common.h:233: TBackup TProposedWaitParts, opId: 102:0 HandleReply TEvSchemaChanged at tablet# 72057594046678944 message# Source { RawX1: 324 RawX2: 4294969603 } Origin: 72075186233409546 State: 2 TxId: 102 Step: 0 Generation: 2 OpResult { Success: true Explain: "" BytesProcessed: 10 RowsProcessed: 1 } 2025-09-25T16:18:49.886586Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:673: CollectSchemaChanged accept TEvDataShard::TEvSchemaChanged, operationId: 102:0, shardIdx: 72057594046678944:1, shard: 72075186233409546, left await: 1, txState.State: ProposedWaitParts, txState.ReadyForNotifications: 1, at schemeshard: 72057594046678944 2025-09-25T16:18:49.886623Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_backup_restore_common.h:116: Unable to make a bill: kind# TBackup, opId# 102:0, reason# domain is not a serverless db, domain# /MyRoot, domainPathId# [OwnerId: 72057594046678944, LocalPathId: 1], IsDomainSchemeShard: 1, ParentDomainId: [OwnerId: 72057594046678944, LocalPathId: 1], ResourcesDomainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-09-25T16:18:49.886738Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5901: Handle TEvSchemaChanged, tabletId: 72057594046678944, at schemeshard: 72057594046678944, message: Source { RawX1: 330 RawX2: 4294969607 } Origin: 72075186233409547 State: 2 TxId: 102 Step: 0 Generation: 2 OpResult { Success: true Explain: "" BytesProcessed: 10 RowsProcessed: 1 } 2025-09-25T16:18:49.886745Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1837: TOperation FindRelatedPartByTabletId, TxId: 102, tablet: 72075186233409547, partId: 0 2025-09-25T16:18:49.886761Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:628: TTxOperationReply execute, operationId: 102:0, at schemeshard: 72057594046678944, message: Source { RawX1: 330 RawX2: 4294969607 } Origin: 72075186233409547 State: 2 TxId: 102 Step: 0 Generation: 2 OpResult { Success: true Explain: "" BytesProcessed: 10 RowsProcessed: 1 } 2025-09-25T16:18:49.886774Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_backup_restore_common.h:233: TBackup TProposedWaitParts, opId: 102:0 HandleReply TEvSchemaChanged at tablet# 72057594046678944 message# Source { RawX1: 330 RawX2: 4294969607 } Origin: 72075186233409547 State: 2 TxId: 102 Step: 0 Generation: 2 OpResult { Success: true Explain: "" BytesProcessed: 10 RowsProcessed: 1 } 2025-09-25T16:18:49.886781Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:673: CollectSchemaChanged accept TEvDataShard::TEvSchemaChanged, operationId: 102:0, shardIdx: 72057594046678944:2, shard: 72075186233409547, left await: 0, txState.State: ProposedWaitParts, txState.ReadyForNotifications: 1, at schemeshard: 72057594046678944 2025-09-25T16:18:49.886787Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:710: all shard schema changes has been received, operationId: 102:0, at schemeshard: 72057594046678944 2025-09-25T16:18:49.886793Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:722: send schema changes ack message, operation: 102:0, datashard: 72075186233409546, at schemeshard: 72057594046678944 2025-09-25T16:18:49.886801Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:722: send schema changes ack message, operation: 102:0, datashard: 72075186233409547, at schemeshard: 72057594046678944 2025-09-25T16:18:49.886807Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 102:0 129 -> 240 2025-09-25T16:18:49.886823Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_backup_restore_common.h:116: Unable to make a bill: kind# TBackup, opId# 102:0, reason# domain is not a serverless db, domain# /MyRoot, domainPathId# [OwnerId: 72057594046678944, LocalPathId: 1], IsDomainSchemeShard: 1, ParentDomainId: [OwnerId: 72057594046678944, LocalPathId: 1], ResourcesDomainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-09-25T16:18:49.888449Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 102:0, at schemeshard: 72057594046678944 2025-09-25T16:18:49.888532Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 102:0, at schemeshard: 72057594046678944 2025-09-25T16:18:49.888561Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 102:0, at schemeshard: 72057594046678944 2025-09-25T16:18:49.888572Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:497: [72057594046678944] TDone opId# 102:0 ProgressState 2025-09-25T16:18:49.888591Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:926: Part operation is done id#102:0 progress is 1/1 2025-09-25T16:18:49.888597Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2025-09-25T16:18:49.888604Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:926: Part operation is done id#102:0 progress is 1/1 2025-09-25T16:18:49.888608Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2025-09-25T16:18:49.888614Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 102, ready parts: 1/1, is published: true 2025-09-25T16:18:49.888634Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1702: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [1:377:2344] message: TxId: 102 2025-09-25T16:18:49.888643Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2025-09-25T16:18:49.888649Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:993: Operation and all the parts is done, operation id: 102:0 2025-09-25T16:18:49.888655Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5552: RemoveTx for txid 102:0 2025-09-25T16:18:49.888693Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:610: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 4 2025-09-25T16:18:49.889337Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 102: got EvNotifyTxCompletionResult 2025-09-25T16:18:49.889350Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 102: satisfy waiter [1:460:2419] TestWaitNotification: OK eventTxId 102 ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_backup/unittest >> TBackupTests::ShouldSucceedOnMultiShardTable[Zstd] [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2144] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2144] Leader for TabletID 72057594046678944 is [1:130:2155] sender: [1:131:2058] recipient: [1:113:2144] 2025-09-25T16:18:49.357530Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7911: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-09-25T16:18:49.357559Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7939: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-09-25T16:18:49.357565Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7825: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-09-25T16:18:49.357570Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7841: OperationsProcessing config: using default configuration 2025-09-25T16:18:49.357577Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7847: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-09-25T16:18:49.357580Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7847: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-09-25T16:18:49.357592Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7971: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-09-25T16:18:49.357604Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-09-25T16:18:49.357705Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8042: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-09-25T16:18:49.357767Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2025-09-25T16:18:49.370748Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7729: Cannot subscribe to console configs 2025-09-25T16:18:49.370769Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded 2025-09-25T16:18:49.376155Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2025-09-25T16:18:49.376239Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2025-09-25T16:18:49.376291Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-09-25T16:18:49.379528Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2025-09-25T16:18:49.379595Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2025-09-25T16:18:49.379692Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1382: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-09-25T16:18:49.379760Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-09-25T16:18:49.380155Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:158: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-09-25T16:18:49.380193Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2025-09-25T16:18:49.380482Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-09-25T16:18:49.380491Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-09-25T16:18:49.380510Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-09-25T16:18:49.380515Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-09-25T16:18:49.380520Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:205: TTxServerlessStorageBilling.Complete 2025-09-25T16:18:49.380548Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7086: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-09-25T16:18:49.381717Z node 1 :HIVE INFO: tablet_helpers.cpp:1126: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:130:2155] sender: [1:245:2058] recipient: [1:15:2062] 2025-09-25T16:18:49.399496Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-09-25T16:18:49.399610Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-09-25T16:18:49.399676Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:599: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-09-25T16:18:49.399685Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5528: CreateTx for txid 1:0 type: TxAlterSubDomain target path: [OwnerId: 72057594046678944, LocalPathId: 1] source path: 2025-09-25T16:18:49.399731Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-09-25T16:18:49.399747Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2025-09-25T16:18:49.400608Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-09-25T16:18:49.400651Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-09-25T16:18:49.400707Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-09-25T16:18:49.400715Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:315: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-09-25T16:18:49.400719Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:369: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-09-25T16:18:49.400723Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 1:0 2 -> 3 2025-09-25T16:18:49.401202Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-09-25T16:18:49.401213Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-09-25T16:18:49.401219Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 1:0 3 -> 128 2025-09-25T16:18:49.401553Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-09-25T16:18:49.401566Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-09-25T16:18:49.401572Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-09-25T16:18:49.401580Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1723: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-09-25T16:18:49.402216Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1792: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-09-25T16:18:49.402626Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:663: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-09-25T16:18:49.402662Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1824: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-09-25T16:18:49.402835Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:685: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-09-25T16:18:49.402863Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:689: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 137 RawX2: 4294969455 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-09-25T16:18:49.402871Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:303: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-09-25T16:18:49.402949Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 1:0 128 -> 240 2025-09-25T16:18:49.402956Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:362: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-09-25T16:18:49.402994Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:599: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-09-25T16:18:49.403009Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:412: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-09-25T16:18:49.403451Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-09-25T16:18:49.403459Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme ... : binary/octet-stream user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-143-generic x86_64 Clang/20.1.8 x-amz-storage-class: STANDARD FAKE_COORDINATOR: advance: minStep5000003 State->FrontStep: 5000003 S3_MOCK::HttpServeWrite: /metadata.json / / 94 FAKE_COORDINATOR: Erasing txId 102 REQUEST: PUT /data_01.csv.zst HTTP/1.1 HEADERS: Host: localhost:27290 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: 82F2C673-13F4-4187-8E3D-D7A865D7B3BE amz-sdk-request: attempt=1 content-length: 20 content-md5: 8NOHH1ycwPXC5K+v+37u8g== content-type: binary/octet-stream user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-143-generic x86_64 Clang/20.1.8 x-amz-storage-class: STANDARD S3_MOCK::HttpServeWrite: /data_01.csv.zst / / 20 2025-09-25T16:18:49.595262Z node 1 :DATASHARD_BACKUP DEBUG: export_s3_uploader.cpp:402: [Export] [s3] HandleMetadata TEvExternalStorage::TEvPutObjectResponse: self# [1:483:2441], result# PutObjectResult { ETag: 6690de8c16ee04f1e31aaf190bccfc40 } 2025-09-25T16:18:49.595781Z node 1 :DATASHARD_BACKUP DEBUG: export_s3_uploader.cpp:502: [Export] [s3] HandleData TEvExternalStorage::TEvPutObjectResponse: self# [1:485:2442], result# PutObjectResult { ETag: f0d3871f5c9cc0f5c2e4afaffb7eeef2 } 2025-09-25T16:18:49.595793Z node 1 :DATASHARD_BACKUP INFO: export_s3_uploader.cpp:705: [Export] [s3] Finish: self# [1:485:2442], success# 1, error# , multipart# 0, uploadId# (empty maybe) 2025-09-25T16:18:49.595814Z node 1 :DATASHARD_BACKUP DEBUG: export_scan.cpp:144: [Export] [scanner] Handle TEvExportScan::TEvFinish: self# [1:484:2440], msg# NKikimr::NDataShard::TEvExportScan::TEvFinish { Success: 1 Error: } REQUEST: PUT /scheme.pb HTTP/1.1 HEADERS: Host: localhost:27290 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: 8B812C47-DAF9-4E6F-BC4B-60BA3C948F7E amz-sdk-request: attempt=1 content-length: 638 content-md5: Myp3UygaBNGp6+7AMgyRnQ== content-type: binary/octet-stream user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-143-generic x86_64 Clang/20.1.8 x-amz-storage-class: STANDARD S3_MOCK::HttpServeWrite: /scheme.pb / / 638 2025-09-25T16:18:49.596811Z node 1 :DATASHARD_BACKUP DEBUG: export_s3_uploader.cpp:307: [Export] [s3] HandleScheme TEvExternalStorage::TEvPutObjectResponse: self# [1:483:2441], result# PutObjectResult { ETag: 332a7753281a04d1a9ebeec0320c919d } 2025-09-25T16:18:49.597021Z node 1 :DATASHARD_BACKUP DEBUG: export_scan.cpp:130: [Export] [scanner] Handle TEvExportScan::TEvFeed: self# [1:482:2439] 2025-09-25T16:18:49.597072Z node 1 :DATASHARD_BACKUP DEBUG: export_s3_uploader.cpp:460: [Export] [s3] Handle TEvExportScan::TEvBuffer: self# [1:483:2441], sender# [1:482:2439], msg# NKikimr::NDataShard::TEvExportScan::TEvBuffer { Last: 1 Checksum: } REQUEST: PUT /data_00.csv.zst HTTP/1.1 HEADERS: Host: localhost:27290 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: 622A4CC2-1A16-456D-AE30-3F8237D19976 amz-sdk-request: attempt=1 content-length: 20 content-md5: 2qFn9G0TW8wfvJ9C+A5Jbw== content-type: binary/octet-stream user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-143-generic x86_64 Clang/20.1.8 x-amz-storage-class: STANDARD S3_MOCK::HttpServeWrite: /data_00.csv.zst / / 20 2025-09-25T16:18:49.597831Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 2025-09-25T16:18:49.597870Z node 1 :DATASHARD_BACKUP DEBUG: export_s3_uploader.cpp:502: [Export] [s3] HandleData TEvExternalStorage::TEvPutObjectResponse: self# [1:483:2441], result# PutObjectResult { ETag: daa167f46d135bcc1fbc9f42f80e496f } 2025-09-25T16:18:49.597877Z node 1 :DATASHARD_BACKUP INFO: export_s3_uploader.cpp:705: [Export] [s3] Finish: self# [1:483:2441], success# 1, error# , multipart# 0, uploadId# (empty maybe) 2025-09-25T16:18:49.597938Z node 1 :DATASHARD_BACKUP DEBUG: export_scan.cpp:144: [Export] [scanner] Handle TEvExportScan::TEvFinish: self# [1:482:2439], msg# NKikimr::NDataShard::TEvExportScan::TEvFinish { Success: 1 Error: } 2025-09-25T16:18:49.624597Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5901: Handle TEvSchemaChanged, tabletId: 72057594046678944, at schemeshard: 72057594046678944, message: Source { RawX1: 324 RawX2: 4294969603 } Origin: 72075186233409546 State: 2 TxId: 102 Step: 0 Generation: 2 OpResult { Success: true Explain: "" BytesProcessed: 10 RowsProcessed: 1 } 2025-09-25T16:18:49.624628Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1837: TOperation FindRelatedPartByTabletId, TxId: 102, tablet: 72075186233409546, partId: 0 2025-09-25T16:18:49.624667Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:628: TTxOperationReply execute, operationId: 102:0, at schemeshard: 72057594046678944, message: Source { RawX1: 324 RawX2: 4294969603 } Origin: 72075186233409546 State: 2 TxId: 102 Step: 0 Generation: 2 OpResult { Success: true Explain: "" BytesProcessed: 10 RowsProcessed: 1 } 2025-09-25T16:18:49.624684Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_backup_restore_common.h:233: TBackup TProposedWaitParts, opId: 102:0 HandleReply TEvSchemaChanged at tablet# 72057594046678944 message# Source { RawX1: 324 RawX2: 4294969603 } Origin: 72075186233409546 State: 2 TxId: 102 Step: 0 Generation: 2 OpResult { Success: true Explain: "" BytesProcessed: 10 RowsProcessed: 1 } 2025-09-25T16:18:49.624703Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:673: CollectSchemaChanged accept TEvDataShard::TEvSchemaChanged, operationId: 102:0, shardIdx: 72057594046678944:1, shard: 72075186233409546, left await: 1, txState.State: ProposedWaitParts, txState.ReadyForNotifications: 1, at schemeshard: 72057594046678944 2025-09-25T16:18:49.624751Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_backup_restore_common.h:116: Unable to make a bill: kind# TBackup, opId# 102:0, reason# domain is not a serverless db, domain# /MyRoot, domainPathId# [OwnerId: 72057594046678944, LocalPathId: 1], IsDomainSchemeShard: 1, ParentDomainId: [OwnerId: 72057594046678944, LocalPathId: 1], ResourcesDomainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-09-25T16:18:49.624940Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5901: Handle TEvSchemaChanged, tabletId: 72057594046678944, at schemeshard: 72057594046678944, message: Source { RawX1: 330 RawX2: 4294969607 } Origin: 72075186233409547 State: 2 TxId: 102 Step: 0 Generation: 2 OpResult { Success: true Explain: "" BytesProcessed: 10 RowsProcessed: 1 } 2025-09-25T16:18:49.624951Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1837: TOperation FindRelatedPartByTabletId, TxId: 102, tablet: 72075186233409547, partId: 0 2025-09-25T16:18:49.624968Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:628: TTxOperationReply execute, operationId: 102:0, at schemeshard: 72057594046678944, message: Source { RawX1: 330 RawX2: 4294969607 } Origin: 72075186233409547 State: 2 TxId: 102 Step: 0 Generation: 2 OpResult { Success: true Explain: "" BytesProcessed: 10 RowsProcessed: 1 } 2025-09-25T16:18:49.624983Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_backup_restore_common.h:233: TBackup TProposedWaitParts, opId: 102:0 HandleReply TEvSchemaChanged at tablet# 72057594046678944 message# Source { RawX1: 330 RawX2: 4294969607 } Origin: 72075186233409547 State: 2 TxId: 102 Step: 0 Generation: 2 OpResult { Success: true Explain: "" BytesProcessed: 10 RowsProcessed: 1 } 2025-09-25T16:18:49.624993Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:673: CollectSchemaChanged accept TEvDataShard::TEvSchemaChanged, operationId: 102:0, shardIdx: 72057594046678944:2, shard: 72075186233409547, left await: 0, txState.State: ProposedWaitParts, txState.ReadyForNotifications: 1, at schemeshard: 72057594046678944 2025-09-25T16:18:49.624997Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:710: all shard schema changes has been received, operationId: 102:0, at schemeshard: 72057594046678944 2025-09-25T16:18:49.625003Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:722: send schema changes ack message, operation: 102:0, datashard: 72075186233409546, at schemeshard: 72057594046678944 2025-09-25T16:18:49.625008Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:722: send schema changes ack message, operation: 102:0, datashard: 72075186233409547, at schemeshard: 72057594046678944 2025-09-25T16:18:49.625014Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 102:0 129 -> 240 2025-09-25T16:18:49.625036Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_backup_restore_common.h:116: Unable to make a bill: kind# TBackup, opId# 102:0, reason# domain is not a serverless db, domain# /MyRoot, domainPathId# [OwnerId: 72057594046678944, LocalPathId: 1], IsDomainSchemeShard: 1, ParentDomainId: [OwnerId: 72057594046678944, LocalPathId: 1], ResourcesDomainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-09-25T16:18:49.626251Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 102:0, at schemeshard: 72057594046678944 2025-09-25T16:18:49.626365Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 102:0, at schemeshard: 72057594046678944 2025-09-25T16:18:49.626394Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 102:0, at schemeshard: 72057594046678944 2025-09-25T16:18:49.626403Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:497: [72057594046678944] TDone opId# 102:0 ProgressState 2025-09-25T16:18:49.626419Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:926: Part operation is done id#102:0 progress is 1/1 2025-09-25T16:18:49.626425Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2025-09-25T16:18:49.626442Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:926: Part operation is done id#102:0 progress is 1/1 2025-09-25T16:18:49.626446Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2025-09-25T16:18:49.626451Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 102, ready parts: 1/1, is published: true 2025-09-25T16:18:49.626473Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1702: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [1:377:2344] message: TxId: 102 2025-09-25T16:18:49.626485Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2025-09-25T16:18:49.626491Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:993: Operation and all the parts is done, operation id: 102:0 2025-09-25T16:18:49.626497Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5552: RemoveTx for txid 102:0 2025-09-25T16:18:49.626548Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:610: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 4 2025-09-25T16:18:49.627198Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 102: got EvNotifyTxCompletionResult 2025-09-25T16:18:49.627212Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 102: satisfy waiter [1:460:2419] TestWaitNotification: OK eventTxId 102 >> KqpScheme::DisableS3ExternalDataSource [GOOD] >> KqpScheme::DoubleCreateExternalDataSource >> TestYmqHttpProxy::TestSendMessageEmptyQueueUrl ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_backup/unittest >> TBackupTests::ShouldSucceedOnSingleShardTable[Raw] [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2144] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2144] Leader for TabletID 72057594046678944 is [1:130:2155] sender: [1:131:2058] recipient: [1:113:2144] 2025-09-25T16:18:49.614747Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7911: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-09-25T16:18:49.614771Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7939: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-09-25T16:18:49.614775Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7825: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-09-25T16:18:49.614779Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7841: OperationsProcessing config: using default configuration 2025-09-25T16:18:49.614784Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7847: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-09-25T16:18:49.614787Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7847: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-09-25T16:18:49.614797Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7971: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-09-25T16:18:49.614809Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-09-25T16:18:49.614898Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8042: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-09-25T16:18:49.614982Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2025-09-25T16:18:49.628706Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7729: Cannot subscribe to console configs 2025-09-25T16:18:49.628731Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded 2025-09-25T16:18:49.633613Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2025-09-25T16:18:49.633726Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2025-09-25T16:18:49.633784Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-09-25T16:18:49.635505Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2025-09-25T16:18:49.635567Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2025-09-25T16:18:49.635664Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1382: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-09-25T16:18:49.635750Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-09-25T16:18:49.636262Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:158: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-09-25T16:18:49.636313Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2025-09-25T16:18:49.636591Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-09-25T16:18:49.636603Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-09-25T16:18:49.636624Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-09-25T16:18:49.636633Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-09-25T16:18:49.636640Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:205: TTxServerlessStorageBilling.Complete 2025-09-25T16:18:49.636679Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7086: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-09-25T16:18:49.639366Z node 1 :HIVE INFO: tablet_helpers.cpp:1126: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:130:2155] sender: [1:245:2058] recipient: [1:15:2062] 2025-09-25T16:18:49.658182Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-09-25T16:18:49.658301Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-09-25T16:18:49.658369Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:599: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-09-25T16:18:49.658378Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5528: CreateTx for txid 1:0 type: TxAlterSubDomain target path: [OwnerId: 72057594046678944, LocalPathId: 1] source path: 2025-09-25T16:18:49.658421Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-09-25T16:18:49.658441Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2025-09-25T16:18:49.659339Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-09-25T16:18:49.659394Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-09-25T16:18:49.659459Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-09-25T16:18:49.659481Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:315: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-09-25T16:18:49.659488Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:369: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-09-25T16:18:49.659494Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 1:0 2 -> 3 2025-09-25T16:18:49.660270Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-09-25T16:18:49.660289Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-09-25T16:18:49.660296Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 1:0 3 -> 128 2025-09-25T16:18:49.660729Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-09-25T16:18:49.660740Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-09-25T16:18:49.660746Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-09-25T16:18:49.660755Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1723: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-09-25T16:18:49.661627Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1792: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-09-25T16:18:49.662165Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:663: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-09-25T16:18:49.662202Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1824: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-09-25T16:18:49.662413Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:685: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-09-25T16:18:49.662444Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:689: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 137 RawX2: 4294969455 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-09-25T16:18:49.662452Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:303: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-09-25T16:18:49.662516Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 1:0 128 -> 240 2025-09-25T16:18:49.662524Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:362: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-09-25T16:18:49.662572Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:599: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-09-25T16:18:49.662584Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:412: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-09-25T16:18:49.663214Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-09-25T16:18:49.663224Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme ... t schemeshard: 72057594046678944 2025-09-25T16:18:49.802452Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_backup_restore_common.h:412: TBackup TPropose, opId: 102:0 HandleReply TEvOperationPlan, stepId: 5000003, at schemeshard: 72057594046678944 2025-09-25T16:18:49.802478Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 102:0 128 -> 129 2025-09-25T16:18:49.802512Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:599: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 3 2025-09-25T16:18:49.804491Z node 1 :DATASHARD_BACKUP DEBUG: export_s3_uploader.cpp:786: [Export] [s3] Bootstrap: self# [1:422:2391], attempt# 0 2025-09-25T16:18:49.808795Z node 1 :DATASHARD_BACKUP DEBUG: export_s3_uploader.cpp:442: [Export] [s3] Handle TEvExportScan::TEvReady: self# [1:422:2391], sender# [1:421:2390] FAKE_COORDINATOR: advance: minStep5000003 State->FrontStep: 5000003 2025-09-25T16:18:49.810065Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-09-25T16:18:49.810080Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 102, path id: [OwnerId: 72057594046678944, LocalPathId: 2] 2025-09-25T16:18:49.810169Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 REQUEST: PUT /metadata.json HTTP/1.12025-09-25T16:18:49.810177Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [1:212:2213], at schemeshard: 72057594046678944, txId: 102, path id: 2 2025-09-25T16:18:49.810313Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 102:0, at schemeshard: 72057594046678944 2025-09-25T16:18:49.810325Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_backup_restore_common.h:258: TBackup TProposedWaitParts, opId: 102:0 ProgressState, at schemeshard: 72057594046678944 HEADERS: Host: localhost:15488 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: A75EA728-C234-4C23-8560-2921E4DEB2AA amz-sdk-request: attempt=1 content-length: 94 content-md5: ZpDejBbuBPHjGq8ZC8z8QA== content-type: binary/octet-stream user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-143-generic x86_64 Clang/20.1.8 x-amz-storage-class: STANDARD S3_MOCK::HttpServeWrite: /metadata.json / / 94 2025-09-25T16:18:49.810796Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6249: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 3 PathOwnerId: 72057594046678944, cookie: 102 2025-09-25T16:18:49.810817Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 3 PathOwnerId: 72057594046678944, cookie: 102 2025-09-25T16:18:49.810825Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:108: Operation in-flight, at schemeshard: 72057594046678944, txId: 102 2025-09-25T16:18:49.810831Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 102, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 3 2025-09-25T16:18:49.810839Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:610: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 4 2025-09-25T16:18:49.810861Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 102, ready parts: 0/1, is published: true FAKE_COORDINATOR: Erasing txId 102 2025-09-25T16:18:49.810928Z node 1 :DATASHARD_BACKUP DEBUG: export_s3_uploader.cpp:402: [Export] [s3] HandleMetadata TEvExternalStorage::TEvPutObjectResponse: self# [1:422:2391], result# PutObjectResult { ETag: 6690de8c16ee04f1e31aaf190bccfc40 } REQUEST: PUT /scheme.pb HTTP/1.1 HEADERS: Host: localhost:15488 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: 5FDEACF4-881F-4F24-A131-A65C39E64C3D amz-sdk-request: attempt=1 content-length: 357 content-md5: csvC5nqNTZsSLy4ymlp0/Q== content-type: binary/octet-stream user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-143-generic x86_64 Clang/20.1.8 x-amz-storage-class: STANDARD S3_MOCK::HttpServeWrite: /scheme.pb / / 357 2025-09-25T16:18:49.812290Z node 1 :DATASHARD_BACKUP DEBUG: export_s3_uploader.cpp:307: [Export] [s3] HandleScheme TEvExternalStorage::TEvPutObjectResponse: self# [1:422:2391], result# PutObjectResult { ETag: 72cbc2e67a8d4d9b122f2e329a5a74fd } 2025-09-25T16:18:49.812321Z node 1 :DATASHARD_BACKUP DEBUG: export_scan.cpp:130: [Export] [scanner] Handle TEvExportScan::TEvFeed: self# [1:421:2390] 2025-09-25T16:18:49.812344Z node 1 :DATASHARD_BACKUP DEBUG: export_s3_uploader.cpp:460: [Export] [s3] Handle TEvExportScan::TEvBuffer: self# [1:422:2391], sender# [1:421:2390], msg# NKikimr::NDataShard::TEvExportScan::TEvBuffer { Last: 1 Checksum: } REQUEST: PUT /data_00.csv HTTP/1.1 HEADERS: Host: localhost:15488 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: CAE1AE4C-2890-457B-9B20-BBEC29C3B451 amz-sdk-request: attempt=1 content-length: 11 content-md5: bj4KQf2rit2DOGLxvSlUww== content-type: binary/octet-stream user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-143-generic x86_64 Clang/20.1.8 x-amz-storage-class: STANDARD S3_MOCK::HttpServeWrite: /data_00.csv / / 11 2025-09-25T16:18:49.813454Z node 1 :DATASHARD_BACKUP DEBUG: export_s3_uploader.cpp:502: [Export] [s3] HandleData TEvExternalStorage::TEvPutObjectResponse: self# [1:422:2391], result# PutObjectResult { ETag: 6e3e0a41fdab8add833862f1bd2954c3 } 2025-09-25T16:18:49.813469Z node 1 :DATASHARD_BACKUP INFO: export_s3_uploader.cpp:705: [Export] [s3] Finish: self# [1:422:2391], success# 1, error# , multipart# 0, uploadId# (empty maybe) 2025-09-25T16:18:49.813507Z node 1 :DATASHARD_BACKUP DEBUG: export_scan.cpp:144: [Export] [scanner] Handle TEvExportScan::TEvFinish: self# [1:421:2390], msg# NKikimr::NDataShard::TEvExportScan::TEvFinish { Success: 1 Error: } 2025-09-25T16:18:49.815353Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 2025-09-25T16:18:49.826497Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5901: Handle TEvSchemaChanged, tabletId: 72057594046678944, at schemeshard: 72057594046678944, message: Source { RawX1: 315 RawX2: 4294969597 } Origin: 72075186233409546 State: 2 TxId: 102 Step: 0 Generation: 2 OpResult { Success: true Explain: "" BytesProcessed: 10 RowsProcessed: 1 } 2025-09-25T16:18:49.826527Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1837: TOperation FindRelatedPartByTabletId, TxId: 102, tablet: 72075186233409546, partId: 0 2025-09-25T16:18:49.826559Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:628: TTxOperationReply execute, operationId: 102:0, at schemeshard: 72057594046678944, message: Source { RawX1: 315 RawX2: 4294969597 } Origin: 72075186233409546 State: 2 TxId: 102 Step: 0 Generation: 2 OpResult { Success: true Explain: "" BytesProcessed: 10 RowsProcessed: 1 } 2025-09-25T16:18:49.826575Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_backup_restore_common.h:233: TBackup TProposedWaitParts, opId: 102:0 HandleReply TEvSchemaChanged at tablet# 72057594046678944 message# Source { RawX1: 315 RawX2: 4294969597 } Origin: 72075186233409546 State: 2 TxId: 102 Step: 0 Generation: 2 OpResult { Success: true Explain: "" BytesProcessed: 10 RowsProcessed: 1 } 2025-09-25T16:18:49.826594Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:673: CollectSchemaChanged accept TEvDataShard::TEvSchemaChanged, operationId: 102:0, shardIdx: 72057594046678944:1, shard: 72075186233409546, left await: 0, txState.State: ProposedWaitParts, txState.ReadyForNotifications: 1, at schemeshard: 72057594046678944 2025-09-25T16:18:49.826599Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:710: all shard schema changes has been received, operationId: 102:0, at schemeshard: 72057594046678944 2025-09-25T16:18:49.826606Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:722: send schema changes ack message, operation: 102:0, datashard: 72075186233409546, at schemeshard: 72057594046678944 2025-09-25T16:18:49.826614Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 102:0 129 -> 240 2025-09-25T16:18:49.826667Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_backup_restore_common.h:116: Unable to make a bill: kind# TBackup, opId# 102:0, reason# domain is not a serverless db, domain# /MyRoot, domainPathId# [OwnerId: 72057594046678944, LocalPathId: 1], IsDomainSchemeShard: 1, ParentDomainId: [OwnerId: 72057594046678944, LocalPathId: 1], ResourcesDomainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-09-25T16:18:49.827221Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 102:0, at schemeshard: 72057594046678944 2025-09-25T16:18:49.827323Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 102:0, at schemeshard: 72057594046678944 2025-09-25T16:18:49.827333Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:497: [72057594046678944] TDone opId# 102:0 ProgressState 2025-09-25T16:18:49.827351Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:926: Part operation is done id#102:0 progress is 1/1 2025-09-25T16:18:49.827358Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2025-09-25T16:18:49.827364Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:926: Part operation is done id#102:0 progress is 1/1 2025-09-25T16:18:49.827368Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2025-09-25T16:18:49.827374Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 102, ready parts: 1/1, is published: true 2025-09-25T16:18:49.827394Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1702: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [1:343:2321] message: TxId: 102 2025-09-25T16:18:49.827406Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2025-09-25T16:18:49.827413Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:993: Operation and all the parts is done, operation id: 102:0 2025-09-25T16:18:49.827418Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5552: RemoveTx for txid 102:0 2025-09-25T16:18:49.827444Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:610: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 3 2025-09-25T16:18:49.827953Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 102: got EvNotifyTxCompletionResult 2025-09-25T16:18:49.827967Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 102: satisfy waiter [1:407:2377] TestWaitNotification: OK eventTxId 102 >> KqpScheme::ValidatingUniqIndexSdkSuccess [GOOD] >> KqpScheme::ValidatingUniqIndexSqlFail >> KqpScheme::DropResourcePool [GOOD] >> KqpScheme::DropNonExistingResourcePool >> KqpScheme::CreateResourcePool [GOOD] >> KqpScheme::CreateResourcePoolClassifier >> KqpOlapScheme::AddColumnWithTtl >> KqpScheme::CreateTableWithTtlSettingsCompat [GOOD] >> KqpScheme::CreateTableWithUniformPartitionsUncompat >> KqpAcl::AclDml+UseSink-IsOlap [GOOD] >> KqpAcl::AclDml-UseSink+IsOlap >> KqpScheme::AddColumnFamilyWithCacheModeFeatureDisabled+UseQueryService [GOOD] >> KqpScheme::AddColumnFamilyWithCacheModeFeatureDisabled-UseQueryService >> KqpOlapScheme::ColumnFamilyWithFieldData >> KqpScheme::UseUnauthorizedTable >> KqpScheme::DropTransfer [GOOD] >> KqpScheme::DropTransfer_QueryService |81.7%| [TA] $(B)/ydb/core/blobstorage/incrhuge/ut/test-results/unittest/{meta.json ... results_accumulator.log} >> KqpScheme::DoubleCreateExternalDataSource [GOOD] >> KqpScheme::DoubleCreateExternalTable >> KqpScheme::AlterTableAlterVectorIndex >> KqpScheme::DisableExternalDataSourcesOnServerless >> KqpConstraints::AddSerialColumnForbidden >> TestKinesisHttpProxy::DifferentContentTypes [GOOD] >> KqpScheme::AlterTableWithPgColumn >> KqpScheme::CreateTableWithWrongPartitionAtKeys >> KqpScheme::CreateTableWithUniformPartitionsUncompat [GOOD] >> YdbYqlClient::TestExplicitPartitioning [GOOD] >> KqpScheme::CreateTableWithUniformPartitionsCompat >> KqpScheme::ValidatingUniqIndexSqlFail [GOOD] >> KqpScheme::ValidatingUniqIndexSdkFail >> TestKinesisHttpProxy::TestPing [GOOD] >> TestKinesisHttpProxy::MissingAction [GOOD] >> KqpScheme::TouchIndexAfterMoveIndexRead >> KqpConstraints::DropCreateSerial >> TestYmqHttpProxy::TestCreateQueueWithSameNameAndSameParams [GOOD] >> KqpScheme::DropNonExistingResourcePool [GOOD] >> KqpOlapScheme::ColumnFamilyWithFieldData [GOOD] >> KqpOlapScheme::AlterCompressionType >> KqpScheme::DropResourcePoolClassifier |81.7%| [TA] $(B)/ydb/core/load_test/ut_ycsb/test-results/unittest/{meta.json ... results_accumulator.log} >> TestYmqHttpProxy::TestCreateQueueWithSameNameAndDifferentParams >> KqpScheme::AddColumnFamilyWithCacheModeFeatureDisabled-UseQueryService [GOOD] >> KqpScheme::AddDropColumn >> TestKinesisHttpProxy::GoodRequestPutRecords >> TestKinesisHttpProxy::TestRequestBadJson ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/ydb/ut/unittest >> YdbYqlClient::TestExplicitPartitioning [GOOD] Test command err: 2025-09-25T16:18:21.872906Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7554061928757358574:2082];send_to=[0:7307199536658146131:7762515]; 2025-09-25T16:18:21.872972Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/endf/00419b/r3tmp/tmp0bJ5A0/pdisk_1.dat 2025-09-25T16:18:21.979140Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-09-25T16:18:21.983219Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-09-25T16:18:21.983244Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-09-25T16:18:21.989803Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-09-25T16:18:21.999802Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 5138, node 1 2025-09-25T16:18:22.015529Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-09-25T16:18:22.015542Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-09-25T16:18:22.015544Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-09-25T16:18:22.015576Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:6783 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-09-25T16:18:22.054018Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-09-25T16:18:22.154876Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-09-25T16:18:22.368895Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7554061933052326827:2322], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:22.368926Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:22.369012Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7554061933052326837:2323], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:22.369025Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:22.409273Z node 1 :TX_PROXY ERROR: schemereq.cpp:590: Actor# [1:7554061933052326851:2625] txid# 281474976715658, issues: { message: "Column Key has wrong key type Double" severity: 1 } 2025-09-25T16:18:23.204661Z node 4 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7554061937099508225:2146];send_to=[0:7307199536658146131:7762515]; 2025-09-25T16:18:23.204803Z node 4 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/endf/00419b/r3tmp/tmpB6vpQ5/pdisk_1.dat 2025-09-25T16:18:23.212297Z node 4 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-09-25T16:18:23.229552Z node 4 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 2576, node 4 2025-09-25T16:18:23.260562Z node 4 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-09-25T16:18:23.260575Z node 4 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-09-25T16:18:23.260577Z node 4 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-09-25T16:18:23.260613Z node 4 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:27927 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. waiting... 2025-09-25T16:18:23.284705Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2025-09-25T16:18:23.303306Z node 4 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-09-25T16:18:23.303333Z node 4 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-09-25T16:18:23.305323Z node 4 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-09-25T16:18:23.471043Z node 4 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-09-25T16:18:23.623337Z node 4 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7554061937099509120:2342], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:23.623362Z node 4 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:23.626219Z node 4 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7554061937099509149:2352], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:23.627360Z node 4 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:23.631851Z node 4 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7554061937099509167:2357], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:23.631878Z node 4 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:23.633425Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:18:23.637790Z node 4 :TX_PROXY ERROR: schemereq.cpp:590: Actor# [4:7554061937099509193:2635] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/Test\', error: path exists but creating right now (id: [OwnerId: 72057594046644480, LocalPathId: 2], type: EPathTypeTable, state: EPathStateCreate), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:500" severity: 1 } 2025-09-25T16:18:23.637838Z node 4 :TX_PROXY ERROR: schemereq.cpp:590: Actor# [4:7554061937099509194:2636] txid# 281474976710660, issues: { message: "Check failed: path: \'/Root/Test\', error: path exists but creating right now (id: [OwnerId: 72057594046644480, LocalPathId: 2], ... nknown, path: Root/.metadata/script_executions 2025-09-25T16:18:25.100399Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:18:25.124720Z node 7 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7554061949706317787:2334], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:25.124720Z node 7 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7554061949706317782:2331], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:25.124751Z node 7 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:25.124834Z node 7 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7554061949706317797:2336], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:25.124853Z node 7 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:25.125620Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715659:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-09-25T16:18:25.130572Z node 7 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [7:7554061949706317796:2335], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715659 completed, doublechecking } 2025-09-25T16:18:25.191819Z node 7 :TX_PROXY ERROR: schemereq.cpp:590: Actor# [7:7554061949706317869:2823] txid# 281474976715660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-09-25T16:18:25.202880Z node 7 :KQP_EXECUTER ERROR: kqp_planner.cpp:123: TxId: 281474976715661. Ctx: { TraceId: 01k60tpd744etgngdh26tmsjnq, Database: , SessionId: ydb://session/3?node_id=7&id=MmJlYWViNjEtYzQzMTQxMTQtZDU0MGIzYjktNjQ4MGU2MWE=, PoolId: default, DatabaseId: /Root}. Database not set, use /Root 2025-09-25T16:18:26.043373Z node 10 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[10:7554061952926210572:2074];send_to=[0:7307199536658146131:7762515]; 2025-09-25T16:18:26.043394Z node 10 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-09-25T16:18:26.082300Z node 10 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions test_client.cpp: SetPath # /home/runner/.ya/build/build_root/endf/00419b/r3tmp/tmpRPhUQG/pdisk_1.dat TServer::EnableGrpc on GrpcPort 15150, node 10 2025-09-25T16:18:26.131560Z node 10 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-09-25T16:18:26.131573Z node 10 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-09-25T16:18:26.131575Z node 10 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-09-25T16:18:26.131642Z node 10 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-09-25T16:18:26.131855Z node 10 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded 2025-09-25T16:18:26.145440Z node 10 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-09-25T16:18:26.145475Z node 10 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-09-25T16:18:26.147073Z node 10 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:16586 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-09-25T16:18:26.176805Z node 10 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-09-25T16:18:26.277147Z node 10 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-09-25T16:18:26.567197Z node 10 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:18:27.046396Z node 10 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-09-25T16:18:31.043619Z node 10 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[10:7554061952926210572:2074];send_to=[0:7307199536658146131:7762515]; 2025-09-25T16:18:31.043661Z node 10 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-09-25T16:18:41.101859Z node 10 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7721: Cannot get console configs 2025-09-25T16:18:41.101876Z node 10 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded 2025-09-25T16:18:51.694932Z node 10 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [10:7554062060300395167:2517], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:51.694968Z node 10 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:51.694983Z node 10 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [10:7554062060300395179:2520], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:51.695642Z node 10 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [10:7554062060300395182:2522], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:51.695659Z node 10 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:51.696099Z node 10 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715659:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-09-25T16:18:51.702372Z node 10 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [10:7554062060300395181:2521], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715659 completed, doublechecking } 2025-09-25T16:18:51.784842Z node 10 :TX_PROXY ERROR: schemereq.cpp:590: Actor# [10:7554062060300395266:3181] txid# 281474976715660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-09-25T16:18:51.804198Z node 10 :KQP_EXECUTER ERROR: kqp_planner.cpp:123: TxId: 281474976715661. Ctx: { TraceId: 01k60tq75ddyg7qyxywt1xmn2b, Database: , SessionId: ydb://session/3?node_id=10&id=MmQxMmU0Mi1jZTEzYThmYy0xYmU3ZDU5My1hNzU0NGJjYw==, PoolId: default, DatabaseId: /Root}. Database not set, use /Root 2025-09-25T16:18:51.865224Z node 10 :KQP_EXECUTER ERROR: kqp_planner.cpp:123: TxId: 281474976715662. Ctx: { TraceId: 01k60tq7903zgzy2zrqxysd43t, Database: , SessionId: ydb://session/3?node_id=10&id=MmQxMmU0Mi1jZTEzYThmYy0xYmU3ZDU5My1hNzU0NGJjYw==, PoolId: default, DatabaseId: /Root}. Database not set, use /Root >> TestYmqHttpProxy::TestSendMessage [GOOD] >> TestKinesisHttpProxy::CreateStreamInIncorrectDb [GOOD] >> TestYmqHttpProxy::TestCreateQueue [GOOD] >> KqpScheme::UseUnauthorizedTable [GOOD] >> KqpScheme::UseNonexistentTable >> TestKinesisHttpProxy::PutRecordsWithLongExplicitHashKey >> TestYmqHttpProxy::TestReceiveMessage >> TestYmqHttpProxy::TestCreateQueueWithBadQueueName >> TestKinesisHttpProxy::CreateStreamWithInvalidName >> KqpScheme::RenameTable+ColumnTable [GOOD] >> KqpScheme::RenameTable-ColumnTable >> KqpConstraints::AddSerialColumnForbidden [GOOD] >> KqpConstraints::AlterTableAddColumnWithDefaultValue >> KqpOlapScheme::AlterCompressionType [GOOD] >> KqpOlapScheme::AlterCompressionLevelError |81.7%| [TM] {default-linux-x86_64, pic, relwithdebinfo} ydb/library/yql/tests/sql/hybrid_file/part5/pytest >> test.py::test[bigdate-table_io-default.txt-Results] [GOOD] >> TestKinesisHttpProxy::UnauthorizedGetShardIteratorRequest [GOOD] >> KqpScheme::DoubleCreateExternalTable [GOOD] >> KqpScheme::DoubleCreateResourcePool >> KqpConstraints::DropCreateSerial [GOOD] >> KqpConstraints::DefaultsAndDeleteAndUpdate >> KqpScheme::AlterTableAlterVectorIndex [GOOD] >> KqpScheme::AlterTableAlterMissedIndex >> KqpScheme::DropTransfer_QueryService [GOOD] >> KqpScheme::DropStreamingQueryBasic >> TestYmqHttpProxy::TestGetQueueUrl [GOOD] >> KqpScheme::CreateTableWithWrongPartitionAtKeys [GOOD] >> TestKinesisHttpProxy::TestRequestWithWrongRegion >> KqpScheme::CreateTableWithVectorIndexCovered >> KqpScheme::CreateResourcePoolClassifier [GOOD] >> KqpScheme::CreateResourcePoolClassifierOnServerless >> TestYmqHttpProxy::TestGetQueueUrlOfNotExistingQueue >> TestYmqHttpProxy::TestSendMessageEmptyQueueUrl [GOOD] >> KqpScheme::CreateTableWithUniformPartitionsCompat [GOOD] >> KqpScheme::CreateTableWithUniformPartitionsUuid >> KqpScheme::ValidatingUniqIndexSdkFail [GOOD] >> KqpSchemeFulltext::AlterTableWithIndex >> KqpAcl::AclDml-UseSink+IsOlap [GOOD] >> KqpAcl::AclDml+UseSink+IsOlap >> TestYmqHttpProxy::TestSendMessageFifoQueue >> KqpScheme::AlterTableWithPgColumn [GOOD] >> KqpScheme::AlterUser >> KqpOlapScheme::AddColumnWithTtl [GOOD] >> KqpOlapScheme::AddColumnSimpleReader >> KqpOlapScheme::AlterCompressionLevelError [GOOD] >> KqpOlapScheme::CreateTableStoreWithFamily >> KqpScheme::AddDropColumn [GOOD] >> KqpScheme::AddChangefeed >> KqpScheme::TouchIndexAfterMoveIndexRead [GOOD] >> KqpScheme::TouchIndexAfterMoveIndexWrite >> KqpScheme::AlterIndexImplTable+VectorIndex |81.7%| [TA] {RESULT} $(B)/ydb/core/tx/tx_allocator_client/ut/test-results/unittest/{meta.json ... results_accumulator.log} >> KqpScheme::UseNonexistentTable [GOOD] >> KqpScheme::UseDroppedTable >> KqpConstraints::DefaultsAndDeleteAndUpdate [GOOD] >> KqpConstraints::DefaultValuesForTableNegative2 >> KqpConstraints::AlterTableAddColumnWithDefaultValue [GOOD] >> KqpConstraints::DefaultValuesForTable >> KqpScheme::DisableExternalDataSourcesOnServerless [GOOD] >> KqpScheme::DisableDropExternalDataSource >> KqpScheme::RenameTable-ColumnTable [GOOD] >> KqpScheme::RenameTableWithVectorIndex |81.7%| [TA] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_replication/test-results/unittest/{meta.json ... results_accumulator.log} >> KqpScheme::DropIndexDataColumn >> KqpScheme::DoubleCreateResourcePool [GOOD] >> KqpScheme::DoubleCreateResourcePoolClassifier+UseSink >> KqpScheme::DropResourcePoolClassifier [GOOD] >> KqpScheme::DropNonExistingResourcePoolClassifier >> KqpOlapScheme::CreateTableStoreWithFamily [GOOD] >> KqpOlapScheme::CreateTableWithDefaultFamilyWithoutSettings >> KqpScheme::AlterTableAlterMissedIndex [GOOD] >> KqpScheme::AlterTableRenameIndex >> KqpScheme::CreateTableWithVectorIndexCovered [GOOD] >> KqpScheme::CreateTableWithVectorIndexCaseIncentive |81.7%| [TA] {RESULT} $(B)/ydb/core/blobstorage/incrhuge/ut/test-results/unittest/{meta.json ... results_accumulator.log} >> KqpScheme::CreateTableWithTtlOnIntColumn [GOOD] >> KqpScheme::AlterUser [GOOD] >> KqpScheme::CreateTableWithTtlOnDatetime64Column >> KqpScheme::AsyncReplicationCommitInterval+UseQueryService |81.7%| [TA] {RESULT} $(B)/ydb/core/load_test/ut_ycsb/test-results/unittest/{meta.json ... results_accumulator.log} >> TestKinesisHttpProxy::TestRequestBadJson [GOOD] >> KqpConstraints::DefaultValuesForTable [GOOD] >> KqpConstraints::DefaultValuesForTableNegative2 [GOOD] >> KqpConstraints::DefaultAndIndexesTestDefaultColumnNotIncludedInIndex >> KqpConstraints::DefaultValuesForTableNegative3 >> THiveTest::TestHiveBalancerOneTabletHighUsage [GOOD] >> THiveTest::TestHiveBalancerWithSpareNodes >> TestYmqHttpProxy::TestCreateQueueWithSameNameAndDifferentParams [GOOD] >> TestKinesisHttpProxy::GoodRequestPutRecords [GOOD] >> KqpScheme::CreateTableWithUniformPartitionsUuid [GOOD] >> KqpScheme::CreateTableWithUniqConstraint >> KqpOlapScheme::AddColumnSimpleReader [GOOD] >> KqpOlapScheme::AddColumnWithStore |81.7%| [TA] $(B)/ydb/services/ydb/table_split_ut/test-results/unittest/{meta.json ... results_accumulator.log} >> KqpScheme::DropStreamingQueryBasic [GOOD] >> KqpScheme::DropStreamingQueryErrors >> test.py::test[aggregate-group_by_expr_with_where-default.txt-Results] [GOOD] >> test.py::test[aggregate-group_by_gs_and_having-default.txt-Results] >> TestKinesisHttpProxy::DoubleCreateStream >> TestKinesisHttpProxy::TestConsumersEmptyNames ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/ydb/ut/unittest >> YdbLogStore::AlterLogTable [FAIL] Test command err: 2025-09-25T16:18:23.913861Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7554061941110605072:2148];send_to=[0:7307199536658146131:7762515]; 2025-09-25T16:18:23.913882Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/endf/00418b/r3tmp/tmpQkNAs1/pdisk_1.dat 2025-09-25T16:18:24.057010Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-09-25T16:18:24.063855Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-09-25T16:18:24.063880Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-09-25T16:18:24.070009Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-09-25T16:18:24.082131Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 3890, node 1 2025-09-25T16:18:24.101058Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-09-25T16:18:24.101074Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-09-25T16:18:24.101077Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-09-25T16:18:24.101123Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:15420 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-09-25T16:18:24.129676Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-09-25T16:18:24.188468Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/Root" OperationType: ESchemeOpCreateColumnStore CreateColumnStore { Name: "LogStore" ColumnShardCount: 4 SchemaPresets { Name: "default" Schema { Columns { Name: "timestamp" Type: "Uint8" NotNull: true } Columns { Name: "resource_type" Type: "Utf8" NotNull: true } Columns { Name: "resource_id" Type: "Utf8" NotNull: true } Columns { Name: "uid" Type: "Utf8" NotNull: true } Columns { Name: "level" Type: "Int32" } Columns { Name: "message" Type: "Utf8" } Columns { Name: "json_payload" Type: "JsonDocument" } Columns { Name: "request_id" Type: "Utf8" } Columns { Name: "ingested_at" Type: "Timestamp" } Columns { Name: "saved_at" Type: "Timestamp" } KeyColumnNames: "timestamp" KeyColumnNames: "resource_type" KeyColumnNames: "resource_id" KeyColumnNames: "uid" DefaultCompression { Codec: ColumnCodecLZ4 } } } } } TxId: 281474976715658 TabletId: 72057594046644480 PeerName: "ipv6:[::1]:43344" , at schemeshard: 72057594046644480 2025-09-25T16:18:24.188583Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: create_store.cpp:332: TCreateOlapStore Propose, path: /Root/LogStore, opId: 281474976715658:0, at schemeshard: 72057594046644480 2025-09-25T16:18:24.188738Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:441: AttachChild: child attached as only one child to the parent, parent id: [OwnerId: 72057594046644480, LocalPathId: 1], parent name: Root, child name: LogStore, child id: [OwnerId: 72057594046644480, LocalPathId: 2], at schemeshard: 72057594046644480 2025-09-25T16:18:24.188752Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:599: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046644480, LocalPathId: 2] was 0 2025-09-25T16:18:24.188756Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5528: CreateTx for txid 281474976715658:0 type: TxCreateOlapStore target path: [OwnerId: 72057594046644480, LocalPathId: 2] source path: 2025-09-25T16:18:24.188766Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:599: IncrementPathDbRefCount reason new shard created for pathId [OwnerId: 72057594046644480, LocalPathId: 2] was 1 2025-09-25T16:18:24.188772Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:599: IncrementPathDbRefCount reason new shard created for pathId [OwnerId: 72057594046644480, LocalPathId: 2] was 2 2025-09-25T16:18:24.188775Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:599: IncrementPathDbRefCount reason new shard created for pathId [OwnerId: 72057594046644480, LocalPathId: 2] was 3 2025-09-25T16:18:24.188780Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:599: IncrementPathDbRefCount reason new shard created for pathId [OwnerId: 72057594046644480, LocalPathId: 2] was 4 2025-09-25T16:18:24.188859Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:599: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046644480, LocalPathId: 2] was 5 2025-09-25T16:18:24.189228Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 281474976715658:0 1 -> 2 2025-09-25T16:18:24.189310Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 281474976715658:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2025-09-25T16:18:24.189319Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnStore, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/olap/operations/create_store.cpp:461) 2025-09-25T16:18:24.189351Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:599: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046644480, LocalPathId: 1] was 1 2025-09-25T16:18:24.189358Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:599: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046644480, LocalPathId: 2] was 6 2025-09-25T16:18:24.191802Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 281474976715658, response: Status: StatusAccepted TxId: 281474976715658 SchemeshardId: 72057594046644480 PathId: 2, at schemeshard: 72057594046644480 2025-09-25T16:18:24.191853Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 281474976715658, database: /Root, subject: , status: StatusAccepted, operation: CREATE COLUMN STORE, path: /Root/LogStore 2025-09-25T16:18:24.191914Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2025-09-25T16:18:24.191917Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046644480, txId: 281474976715658, path id: [OwnerId: 72057594046644480, LocalPathId: 1] 2025-09-25T16:18:24.191964Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046644480, txId: 281474976715658, path id: [OwnerId: 72057594046644480, LocalPathId: 2] 2025-09-25T16:18:24.191993Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2025-09-25T16:18:24.191997Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [1:7554061945405572856:2380], at schemeshard: 72057594046644480, txId: 281474976715658, path id: 1 2025-09-25T16:18:24.192000Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [1:7554061945405572856:2380], at schemeshard: 72057594046644480, txId: 281474976715658, path id: 2 2025-09-25T16:18:24.192010Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 281474976715658:0, at schemeshard: 72057594046644480 2025-09-25T16:18:24.192018Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:315: TCreateParts opId# 281474976715658:0 ProgressState, operation type: TxCreateOlapStore, at tablet# 72057594046644480 2025-09-25T16:18:24.192169Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:359: TCreateParts opId# 281474976715658:0 CreateRequest Event to Hive: 72057594037968897 msg: Owner: 72057594046644480 OwnerIdx: 1 TabletType: ColumnShard ObjectDomain { SchemeShard: 72057594046644480 PathId: 1 } ObjectId: 2 BindedChannels { StoragePoolName: "hdd2" } BindedChannels { StoragePoolName: "hdd2" } BindedChannels { StoragePoolName: "hdd2" } BindedChannels { StoragePoolName: "hdd2" } BindedChannels { StoragePoolName: "hdd2" } BindedChannels { StoragePoolName: "hdd2" } BindedChannels { StoragePoolName: "hdd2" } BindedChannels { StoragePoolName: "hdd2" } BindedChannels { StoragePoolName: "hdd2" } BindedChannels { StoragePoolName: "hdd2" } BindedChannels { StoragePoolName: "hdd2" } BindedChannels { StoragePoolName: "hdd2" } BindedChannels { StoragePoolName: "hdd2" } BindedChannels { StoragePoolName: "hdd2" } BindedChannels { StoragePoolName: "hdd2" } BindedChannels { StoragePoolName: "hdd2" } BindedChannels { StoragePoolName: "hdd2" } BindedChannels { StoragePoolName: "hdd2" } BindedChannels { StoragePoolName: "hdd2" } BindedChannels { StoragePoolName: "hdd2" } BindedChannels { StoragePoolName: "hdd2" } BindedChannels { StoragePoolName: "hdd2" } BindedChannels { StoragePoolName: "hdd2" } BindedChannels { StoragePoolName: "hdd2" } BindedChannels { StoragePoolName: "hdd2" } BindedChannels { StoragePoolName: "hdd2" } BindedChannels { StoragePoolName: "hdd2" } BindedChannels { StoragePoolName: "hdd2" } BindedChannels { StoragePoolName: "hdd2" } BindedChannels { StoragePoolName: "hdd2" } BindedChannels { StoragePoolName: "hdd2" } BindedChannels { StoragePoolName: "hdd2" } BindedChannels { StoragePoolName: "hdd2" } BindedChannels { StoragePoolName: "hdd2" } BindedChannels { StoragePoolName: "hdd2" } BindedChannels { StoragePoolName: "hdd2" } BindedChannels { StoragePoolN ... t schemeshard: 72057594046644480 2025-09-25T16:18:48.374348Z node 64 :HIVE WARN: hive_impl.cpp:516: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 64, TabletId: 72075186224037888 not found 2025-09-25T16:18:48.374355Z node 64 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:610: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046644480, LocalPathId: 2] was 2 2025-09-25T16:18:48.374380Z node 64 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:6353: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 0 ShardOwnerId: 72057594046644480 ShardLocalIdx: 3, at schemeshard: 72057594046644480 2025-09-25T16:18:48.374524Z node 64 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:610: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046644480, LocalPathId: 2] was 1 2025-09-25T16:18:48.374562Z node 64 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:123: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046644480 2025-09-25T16:18:48.374565Z node 64 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:137: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046644480, LocalPathId: 2], at schemeshard: 72057594046644480 2025-09-25T16:18:48.374574Z node 64 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:610: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046644480, LocalPathId: 1] was 1 2025-09-25T16:18:48.374612Z node 64 :HIVE WARN: hive_impl.cpp:516: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 64, TabletId: 72075186224037890 not found 2025-09-25T16:18:48.374628Z node 64 :HIVE WARN: hive_impl.cpp:516: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 64, TabletId: 72075186224037889 not found 2025-09-25T16:18:48.374632Z node 64 :HIVE WARN: hive_impl.cpp:516: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 64, TabletId: 72075186224037891 not found 2025-09-25T16:18:48.374766Z node 64 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:176: Deleted shardIdx 72057594046644480:2 2025-09-25T16:18:48.374771Z node 64 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:182: Close pipe to deleted shardIdx 72057594046644480:2 tabletId 72075186224037889 2025-09-25T16:18:48.375701Z node 64 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[64:7554062048126446928:2310];ev=NKikimr::TEvTablet::TEvTabletDead;fline=columnshard_impl.cpp:981;event=tablet_die; 2025-09-25T16:18:48.376796Z node 64 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:176: Deleted shardIdx 72057594046644480:4 2025-09-25T16:18:48.376800Z node 64 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:182: Close pipe to deleted shardIdx 72057594046644480:4 tabletId 72075186224037891 2025-09-25T16:18:48.376809Z node 64 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:176: Deleted shardIdx 72057594046644480:1 2025-09-25T16:18:48.376811Z node 64 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:182: Close pipe to deleted shardIdx 72057594046644480:1 tabletId 72075186224037888 2025-09-25T16:18:48.376814Z node 64 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:176: Deleted shardIdx 72057594046644480:3 2025-09-25T16:18:48.376817Z node 64 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:182: Close pipe to deleted shardIdx 72057594046644480:3 tabletId 72075186224037890 2025-09-25T16:18:48.377042Z node 64 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__clean_pathes.cpp:160: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046644480 2025-09-25T16:18:48.377392Z node 64 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037890;self_id=[64:7554062048126446931:2311];ev=NKikimr::TEvTablet::TEvTabletDead;fline=columnshard_impl.cpp:981;event=tablet_die; 2025-09-25T16:18:48.378923Z node 64 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;self_id=[64:7554062048126446943:2312];ev=NKikimr::TEvTablet::TEvTabletDead;fline=columnshard_impl.cpp:981;event=tablet_die; 2025-09-25T16:18:48.380116Z node 64 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037891;self_id=[64:7554062048126446927:2309];ev=NKikimr::TEvTablet::TEvTabletDead;fline=columnshard_impl.cpp:981;event=tablet_die; 2025-09-25T16:18:49.294929Z node 67 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[67:7554062051916766053:2082];send_to=[0:7307199536658146131:7762515]; 2025-09-25T16:18:49.294976Z node 67 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-09-25T16:18:49.333338Z node 67 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions test_client.cpp: SetPath # /home/runner/.ya/build/build_root/endf/00418b/r3tmp/tmpqLdAqA/pdisk_1.dat TServer::EnableGrpc on GrpcPort 64326, node 67 2025-09-25T16:18:49.373238Z node 67 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded 2025-09-25T16:18:49.373548Z node 67 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-09-25T16:18:49.373562Z node 67 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-09-25T16:18:49.373564Z node 67 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-09-25T16:18:49.373634Z node 67 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:20709 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-09-25T16:18:49.398776Z node 67 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(67, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-09-25T16:18:49.398804Z node 67 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(67, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-09-25T16:18:49.400250Z node 67 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(67, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-09-25T16:18:49.435918Z node 67 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-09-25T16:18:49.441108Z node 67 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-09-25T16:18:49.460144Z node 67 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/Root" OperationType: ESchemeOpCreateColumnStore CreateColumnStore { Name: "LogStore" ColumnShardCount: 4 SchemaPresets { Name: "default" Schema { Columns { Name: "timestamp" Type: "Timestamp" NotNull: true } Columns { Name: "resource_type" Type: "Utf8" NotNull: true } Columns { Name: "resource_id" Type: "Utf8" NotNull: true } Columns { Name: "uid" Type: "Utf8" NotNull: true } Columns { Name: "level" Type: "Int32" } Columns { Name: "message" Type: "Utf8" } Columns { Name: "json_payload" Type: "JsonDocument" } Columns { Name: "request_id" Type: "Utf8" } Columns { Name: "ingested_at" Type: "Timestamp" } Columns { Name: "saved_at" Type: "Timestamp" } KeyColumnNames: "timestamp" KeyColumnNames: "resource_type" KeyColumnNames: "resource_id" KeyColumnNames: "uid" DefaultCompression { Codec: ColumnCodecLZ4 } } } } } TxId: 281474976715658 TabletId: 72057594046644480 PeerName: "ipv6:[::1]:55748" , at schemeshard: 72057594046644480 2025-09-25T16:18:49.460247Z node 67 :FLAT_TX_SCHEMESHARD NOTICE: create_store.cpp:332: TCreateOlapStore Propose, path: /Root/LogStore, opId: 281474976715658:0, at schemeshard: 72057594046644480 2025-09-25T16:18:49.460255Z node 67 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 281474976715658:1, propose status:StatusPreconditionFailed, reason: Column stores are not supported, at schemeshard: 72057594046644480 2025-09-25T16:18:49.462647Z node 67 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 281474976715658, response: Status: StatusPreconditionFailed Reason: "Column stores are not supported" TxId: 281474976715658 SchemeshardId: 72057594046644480, at schemeshard: 72057594046644480 2025-09-25T16:18:49.462741Z node 67 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 281474976715658, database: /Root, subject: , status: StatusPreconditionFailed, reason: Column stores are not supported, operation: CREATE COLUMN STORE, path: /Root/LogStore 2025-09-25T16:18:49.462791Z node 67 :TX_PROXY ERROR: schemereq.cpp:590: Actor# [67:7554062051916766945:2604] txid# 281474976715658, issues: { message: "Column stores are not supported" severity: 1 } 2025-09-25T16:18:49.585821Z node 67 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions assertion failed at ydb/services/ydb/ydb_logstore_ut.cpp:437, virtual void NTestSuiteYdbLogStore::TTestCaseAlterLogTable::Execute_(NUnitTest::TTestContext &): (res.GetStatus() == EStatus::SUCCESS) failed: (PRECONDITION_FAILED != SUCCESS)
: Error: Column stores are not supported , with diff: (PRE|SUC)C(ONDITION_FAIL|)E(D|SS) TBackTrace::Capture()+28 (0x175C9ABC) NUnitTest::NPrivate::RaiseError(char const*, TBasicString> const&, bool)+128 (0x17786600) NTestSuiteYdbLogStore::TTestCaseAlterLogTable::Execute_(NUnitTest::TTestContext&)+2776 (0x1743A4D8) NTestSuiteYdbLogStore::TCurrentTest::Execute()::'lambda'()::operator()() const+71 (0x1744B947) NUnitTest::TTestBase::Run(std::__y1::function, TBasicString> const&, char const*, bool)+120 (0x177887D8) NTestSuiteYdbLogStore::TCurrentTest::Execute()+429 (0x1744B2ED) NUnitTest::TTestFactory::Execute()+817 (0x17788F21) NUnitTest::RunMain(int, char**)+3181 (0x1779B37D) ??+0 (0x7F06C82FBD90) __libc_start_main+128 (0x7F06C82FBE40) _start+41 (0x160AA029) >> KqpScheme::AlterIndexImplTable+VectorIndex [GOOD] >> KqpScheme::AlterIndexImplTable-VectorIndex >> KqpScheme::DisableDropExternalDataSource [GOOD] >> KqpScheme::UseDroppedTable [GOOD] >> TestYmqHttpProxy::TestCreateQueueWithBadQueueName [GOOD] >> TestYmqHttpProxy::TestCreateQueueWithWrongBody >> TestKinesisHttpProxy::PutRecordsWithLongExplicitHashKey [GOOD] >> TestKinesisHttpProxy::CreateStreamWithInvalidName [GOOD] >> KqpScheme::TouchIndexAfterMoveIndexWrite [GOOD] >> KqpScheme::TouchIndexAfterMoveTableWrite >> KqpScheme::DisableDropExternalTable >> TestYmqHttpProxy::TestReceiveMessage [GOOD] >> KqpScheme::RenameTableWithVectorIndex [GOOD] >> KqpScheme::TouchIndexAfterMoveIndexReadReplace >> KqpScheme::ResourcePoolsValidation >> TestYmqHttpProxy::TestCreateQueueWithEmptyName >> TestKinesisHttpProxy::PutRecordsWithIncorrectHashKey >> TestKinesisHttpProxy::CreateStreamWithDifferentRetentions >> TestYmqHttpProxy::TestGetQueueUrlOfNotExistingQueue [GOOD] >> KqpConstraints::DefaultValuesForTableNegative3 [GOOD] >> KqpScheme::CreateTableWithTtlOnDatetime64Column [GOOD] >> TestKinesisHttpProxy::TestRequestWithWrongRegion [GOOD] >> TestYmqHttpProxy::TestReceiveMessageWithAttributes >> KqpOlapScheme::CreateTableWithDefaultFamilyWithoutSettings [GOOD] >> KqpScheme::CreateTableWithPartitionAtKeysSimpleUncompat >> KqpConstraints::DefaultValuesForTableNegative4 >> KqpOlapScheme::CreateTableNonDefaultFamilyWithoutCompression >> KqpScheme::AsyncReplicationCommitInterval+UseQueryService [GOOD] >> KqpScheme::DropIndexDataColumn [GOOD] >> TestYmqHttpProxy::TestGetQueueUrlWithIAM >> KqpScheme::CreateTableWithVectorIndexCaseIncentive [GOOD] >> TestKinesisHttpProxy::TestRequestWithIAM >> TestYmqHttpProxy::TestSendMessageFifoQueue [GOOD] >> KqpConstraints::DefaultAndIndexesTestDefaultColumnNotIncludedInIndex [GOOD] >> KqpScheme::AsyncReplicationCommitInterval-UseQueryService >> KqpScheme::CreateTableWithVectorIndexNoFeatureFlag >> KqpScheme::DropChangefeedNegative >> KqpConstraints::AlterTableAddNotNullWithDefault >> TestYmqHttpProxy::TestSendMessageWithAttributes >> KqpScheme::CreateTableWithUniqConstraint [GOOD] >> KqpScheme::AlterTableRenameIndex [GOOD] >> KqpAcl::AclDml+UseSink+IsOlap [GOOD] >> KqpAcl::FailNavigate >> KqpScheme::CreateTableWithUniqConstraintPublicApi >> KqpAcl::AclCreateTableAs-IsOlap-UseAdmin >> KqpScheme::AlterTableReplaceIndex >> KqpConstraints::DefaultValuesForTableNegative4 [GOOD] >> KqpScheme::DropNonExistingResourcePoolClassifier [GOOD] >> KqpOlapScheme::AddColumnWithStore [GOOD] >> TBackupTests::BackupUuidColumn[Raw] >> KqpOlapScheme::CreateTableNonDefaultFamilyWithoutCompression [GOOD] >> KqpScheme::DisableDropExternalTable [GOOD] >> KqpScheme::DropSecret+UseQueryService >> KqpScheme::DoubleCreateResourcePoolClassifier+UseSink [GOOD] >> KqpOlapScheme::CreateTableWithCacheModeError >> KqpConstraints::IndexedTableAndNotNullColumn >> KqpOlapScheme::AddPgColumnWithStore >> KqpScheme::DisableResourcePools >> KqpScheme::DoubleCreateResourcePoolClassifier-UseSink >> KqpScheme::TouchIndexAfterMoveIndexReadReplace [GOOD] >> KqpScheme::CreateResourcePoolClassifierOnServerless [GOOD] >> KqpScheme::CreateStreamingQueryBasic >> KqpScheme::TouchIndexAfterMoveTableWrite [GOOD] >> YdbOlapStore::ManyTables [GOOD] >> KqpScheme::ResourcePoolsValidation [GOOD] >> KqpScheme::AlterIndexImplTable-VectorIndex [GOOD] >> KqpSchemeFulltext::AlterTableWithIndex [FAIL] >> KqpScheme::TouchIndexAfterMoveIndexWriteReplace >> KqpScheme::AlterIndexImplTableUsingPublicAPI >> KqpScheme::ResourcePoolClassifiersValidation >> YdbOlapStore::LogPagingBetween >> KqpScheme::UniqueIndexMultipleNulls >> KqpSchemeFulltext::AlterTableWithIndexNoFeatureFlag >> KqpScheme::DropStreamingQueryErrors [GOOD] >> KqpScheme::DropSecret-UseQueryService |81.7%| [TA] {RESULT} $(B)/ydb/services/ydb/table_split_ut/test-results/unittest/{meta.json ... results_accumulator.log} >> KqpScheme::AsyncReplicationCommitInterval-UseQueryService [GOOD] >> KqpScheme::AsyncReplicationConnectionString |81.7%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/tx/schemeshard/ut_base_reboots/ydb-core-tx-schemeshard-ut_base_reboots |81.7%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_base_reboots/ydb-core-tx-schemeshard-ut_base_reboots |81.7%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_base_reboots/ydb-core-tx-schemeshard-ut_base_reboots >> KqpScheme::DropChangefeedNegative [GOOD] >> KqpScheme::DropExternalDataSource >> KqpScheme::CreateTableWithPartitionAtKeysSimpleUncompat [GOOD] >> KqpScheme::CreateTableWithPartitionAtKeysUuid >> KqpScheme::DropSecret+UseQueryService [GOOD] >> KqpScheme::CreateTableWithVectorIndexNoFeatureFlag [GOOD] >> KqpScheme::CreateTableWithVectorIndexPublicApi >> TestKinesisHttpProxy::DoubleCreateStream [GOOD] >> KqpOlapScheme::CreateTableWithCacheModeError [GOOD] >> KqpOlapScheme::AlterTableWithCacheModeError >> TBackupTests::BackupUuidColumn[Raw] [GOOD] >> KqpAcl::AclCreateTableAs-IsOlap-UseAdmin [GOOD] >> KqpAcl::AclCreateTableAs+IsOlap-UseAdmin >> KqpScheme::CreateTableWithUniqConstraintPublicApi [GOOD] >> TestKinesisHttpProxy::TestConsumersEmptyNames [GOOD] >> KqpScheme::CreateTableWithVectorIndex >> TestYmqHttpProxy::TestCreateQueueWithWrongBody [GOOD] >> KqpAcl::FailNavigate [GOOD] >> KqpAcl::AlterDatabasePrivilegesRequiredToChangeSchemeLimits+AsClusterAdmin >> TestKinesisHttpProxy::TestListStreamConsumers >> KqpScheme::DisableResourcePools [GOOD] >> KqpScheme::DisableResourcePoolsOnServerless >> KqpScheme::DropSecret-UseQueryService [GOOD] >> TestKinesisHttpProxy::GoodRequestGetRecords >> KqpConstraints::IndexedTableAndNotNullColumn [GOOD] >> KqpConstraints::IndexAutoChooseAndNonReadyIndex ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/scheme/unittest >> KqpScheme::DropSecret+UseQueryService [GOOD] Test command err: Trying to start YDB, gRPC: 28061, MsgBus: 2127 2025-09-25T16:18:47.317152Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7554062040849004911:2258];send_to=[0:7307199536658146131:7762515]; 2025-09-25T16:18:47.317242Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/endf/001f29/r3tmp/tmpf7EO7j/pdisk_1.dat 2025-09-25T16:18:47.373559Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-09-25T16:18:47.375706Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded 2025-09-25T16:18:47.379734Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1229: Notification cookie mismatch for subscription [1:7554062040849004664:2081] 1758817127313374 != 1758817127313377 TServer::EnableGrpc on GrpcPort 28061, node 1 2025-09-25T16:18:47.393017Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-09-25T16:18:47.393027Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-09-25T16:18:47.393029Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-09-25T16:18:47.393067Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:2127 2025-09-25T16:18:47.417035Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-09-25T16:18:47.417067Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-09-25T16:18:47.418104Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:2127 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-09-25T16:18:47.464624Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-09-25T16:18:47.468268Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-09-25T16:18:47.486172Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) waiting... 2025-09-25T16:18:47.507870Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) waiting... 2025-09-25T16:18:47.534100Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) waiting... 2025-09-25T16:18:47.547025Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) waiting... 2025-09-25T16:18:47.643093Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-09-25T16:18:47.774136Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7554062040849006322:2391], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:47.774169Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:47.774254Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7554062040849006332:2392], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:47.774267Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:47.834125Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:18:47.841681Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:18:47.852892Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:18:47.866796Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:18:47.880386Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:18:47.894484Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:18:47.908340Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:18:47.922549Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:18:47.939775Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7554062040849007193:2474], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:47.939814Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7554062040849007198:2477], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:47.939822Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:47.939873Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7554062040849007201:2479], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:47.939886Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, Dat ... e/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_modify_acl.cpp:101) 2025-09-25T16:18:56.584959Z node 6 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710679:1, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:18:56.700143Z node 6 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710682:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:18:56.830456Z node 6 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710687:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_modify_acl.cpp:101) 2025-09-25T16:18:56.946357Z node 6 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710690:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_modify_acl.cpp:101) Trying to start YDB, gRPC: 30063, MsgBus: 16725 2025-09-25T16:18:57.683435Z node 7 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[7:7554062086094328981:2091];send_to=[0:7307199536658146131:7762515]; 2025-09-25T16:18:57.685422Z node 7 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/endf/001f29/r3tmp/tmpbedQGZ/pdisk_1.dat 2025-09-25T16:18:57.704705Z node 7 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-09-25T16:18:57.704732Z node 7 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-09-25T16:18:57.706537Z node 7 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 30063, node 7 2025-09-25T16:18:57.708994Z node 7 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-09-25T16:18:57.714131Z node 7 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-09-25T16:18:57.714141Z node 7 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-09-25T16:18:57.714143Z node 7 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-09-25T16:18:57.714182Z node 7 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-09-25T16:18:57.714479Z node 7 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded TClient is connected to server localhost:16725 TClient is connected to server localhost:16725 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-09-25T16:18:57.783245Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-09-25T16:18:57.784583Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-09-25T16:18:57.960413Z node 7 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-09-25T16:18:58.181238Z node 7 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7554062090389296880:2317], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:58.181257Z node 7 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7554062090389296889:2320], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:58.181263Z node 7 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:58.182249Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-09-25T16:18:58.184742Z node 7 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7554062090389296895:2322], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:58.184907Z node 7 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:58.185416Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 281474976710658, at schemeshard: 72057594046644480 2025-09-25T16:18:58.185477Z node 7 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [7:7554062090389296894:2321], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2025-09-25T16:18:58.260419Z node 7 :TX_PROXY ERROR: schemereq.cpp:590: Actor# [7:7554062090389296947:2338] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-09-25T16:18:58.265028Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateSecret, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_secret.cpp:263) 2025-09-25T16:18:58.265949Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 281474976710660, at schemeshard: 72057594046644480 2025-09-25T16:18:58.271759Z node 7 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:568: Compilation failed, self: [7:7554062090389296993:2333], status: GENERIC_ERROR, issues:
:2:48: Error: mismatched input 'WITH' expecting {, ';'} 2025-09-25T16:18:58.271847Z node 7 :KQP_SESSION WARN: kqp_session_actor.cpp:2395: SessionId: ydb://session/3?node_id=7&id=ZGZlMGQ3YmYtNWM3MGZkYi1hZjEwMDQzLWZjOWNmMTFj, ActorId: [7:7554062090389296986:2329], ActorState: ExecuteState, TraceId: 01k60tqdjx3kfaw7zfk6t46m78, ReplyQueryCompileError, status GENERIC_ERROR remove tx with tx_id: 2025-09-25T16:18:58.276516Z node 7 :TX_PROXY ERROR: schemereq.cpp:590: Actor# [7:7554062090389297003:2367] txid# 281474976710661, issues: { message: "Path does not exist" issue_code: 200200 severity: 1 } 2025-09-25T16:18:58.276658Z node 7 :KQP_SESSION WARN: kqp_session_actor.cpp:2830: SessionId: ydb://session/3?node_id=7&id=ZTkzNmZkZmUtOTI4NDE5MGYtNzQ5MTUxZjItYzVhMzdiZDM=, ActorId: [7:7554062090389296997:2335], ActorState: ExecuteState, TraceId: 01k60tqdk14hk8xee5w16vg8ak, Create QueryResponse for error on request, msg: TClient::Ls request: /Root/secret-name TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "secret-name" PathId: 6 SchemeshardId: 72057594046644480 PathType: EPathTypeSecret CreateFinished: true CreateTxId: 281474976710660 CreateStep: 1758817138309 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "\020\001" EffectiveACL: "\020\001" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 SecretVersion: 1 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } DomainKey { SchemeShard: 72057594046644480 PathId: 1 } Pa... (TRUNCATED) 2025-09-25T16:18:58.297333Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 281474976710663, at schemeshard: 72057594046644480 TClient::Ls request: /Root/secret-name TClient::Ls response: Status: 128 StatusCode: PATH_NOT_EXIST Issues { message: "Path not exist" issue_code: 200200 severity: 1 } SchemeStatus: 2 ErrorReason: "Path not found" >> THiveTest::TestHiveBalancerWithSpareNodes [GOOD] >> TestYmqHttpProxy::TestCreateQueueWithWrongAttribute >> KqpScheme::AlterTableReplaceIndex [GOOD] >> KqpScheme::AlterTableVectorIndexInvalidSettingsPublicApi >> TestKinesisHttpProxy::PutRecordsWithIncorrectHashKey [GOOD] |81.7%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/tx/schemeshard/ut_subdomain_reboots/ydb-core-tx-schemeshard-ut_subdomain_reboots >> TestYmqHttpProxy::TestCreateQueueWithEmptyName [GOOD] |81.7%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_subdomain_reboots/ydb-core-tx-schemeshard-ut_subdomain_reboots >> KqpOlapScheme::AlterTableWithCacheModeError [GOOD] |81.7%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_subdomain_reboots/ydb-core-tx-schemeshard-ut_subdomain_reboots >> TestKinesisHttpProxy::CreateStreamWithDifferentRetentions [GOOD] >> KqpSchemeFulltext::AlterTableWithIndexNoFeatureFlag [GOOD] >> KqpSchemeFulltext::AlterTableWithIndexInvalidSettings >> TestKinesisHttpProxy::ListShards >> KqpScheme::ResourcePoolClassifiersValidation [GOOD] >> KqpScheme::ResourcePoolClassifiersRankValidation >> TestYmqHttpProxy::TestReceiveMessageWithAttributes [GOOD] >> THiveTest::TestCreateSubHiveCreateManyTablets [GOOD] >> THiveTest::TestCreateSubHiveCreateManyTabletsWithReboots >> TestKinesisHttpProxy::TestRequestWithIAM [GOOD] >> TestYmqHttpProxy::TestCreateQueueWithAllAttributes >> TestYmqHttpProxy::TestGetQueueUrlWithIAM [GOOD] >> KqpScheme::TouchIndexAfterMoveIndexWriteReplace [GOOD] >> KqpScheme::TouchIndexAfterMoveTableRead >> KqpScheme::AsyncReplicationConnectionString [GOOD] >> KqpScheme::AsyncReplicationConnectionStringWithSsl ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_backup/unittest >> TBackupTests::BackupUuidColumn[Raw] [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2144] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2144] Leader for TabletID 72057594046678944 is [1:130:2155] sender: [1:131:2058] recipient: [1:113:2144] 2025-09-25T16:18:57.852695Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7911: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-09-25T16:18:57.852719Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7939: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-09-25T16:18:57.852723Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7825: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-09-25T16:18:57.852728Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7841: OperationsProcessing config: using default configuration 2025-09-25T16:18:57.852733Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7847: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-09-25T16:18:57.852737Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7847: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-09-25T16:18:57.852752Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7971: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-09-25T16:18:57.852765Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-09-25T16:18:57.852884Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8042: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-09-25T16:18:57.852953Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2025-09-25T16:18:57.865344Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7729: Cannot subscribe to console configs 2025-09-25T16:18:57.865370Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded 2025-09-25T16:18:57.869067Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2025-09-25T16:18:57.869171Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2025-09-25T16:18:57.869219Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-09-25T16:18:57.870814Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2025-09-25T16:18:57.870890Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2025-09-25T16:18:57.871048Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1382: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-09-25T16:18:57.871135Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-09-25T16:18:57.871611Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:158: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-09-25T16:18:57.871656Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2025-09-25T16:18:57.871923Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-09-25T16:18:57.871931Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-09-25T16:18:57.871950Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-09-25T16:18:57.871959Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-09-25T16:18:57.871965Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:205: TTxServerlessStorageBilling.Complete 2025-09-25T16:18:57.872003Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7086: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-09-25T16:18:57.873265Z node 1 :HIVE INFO: tablet_helpers.cpp:1126: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:130:2155] sender: [1:245:2058] recipient: [1:15:2062] 2025-09-25T16:18:57.889560Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-09-25T16:18:57.889660Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-09-25T16:18:57.889728Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:599: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-09-25T16:18:57.889737Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5528: CreateTx for txid 1:0 type: TxAlterSubDomain target path: [OwnerId: 72057594046678944, LocalPathId: 1] source path: 2025-09-25T16:18:57.889780Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-09-25T16:18:57.889796Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2025-09-25T16:18:57.891088Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-09-25T16:18:57.891140Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-09-25T16:18:57.891210Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-09-25T16:18:57.891222Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:315: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-09-25T16:18:57.891228Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:369: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-09-25T16:18:57.891234Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 1:0 2 -> 3 2025-09-25T16:18:57.891733Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-09-25T16:18:57.891745Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-09-25T16:18:57.891750Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 1:0 3 -> 128 2025-09-25T16:18:57.892049Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-09-25T16:18:57.892059Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-09-25T16:18:57.892065Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-09-25T16:18:57.892073Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1723: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-09-25T16:18:57.892591Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1792: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-09-25T16:18:57.892947Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:663: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-09-25T16:18:57.892984Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1824: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-09-25T16:18:57.893179Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:685: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-09-25T16:18:57.959007Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:689: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 137 RawX2: 4294969455 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-09-25T16:18:57.959060Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:303: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-09-25T16:18:57.959164Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 1:0 128 -> 240 2025-09-25T16:18:57.959176Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:362: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-09-25T16:18:57.959233Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:599: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-09-25T16:18:57.959258Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:412: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-09-25T16:18:57.962847Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-09-25T16:18:57.962871Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme ... at schemeshard: 72057594046678944 2025-09-25T16:18:58.603431Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_backup_restore_common.h:412: TBackup TPropose, opId: 102:0 HandleReply TEvOperationPlan, stepId: 5000003, at schemeshard: 72057594046678944 2025-09-25T16:18:58.603452Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 102:0 128 -> 129 2025-09-25T16:18:58.603481Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:599: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 3 2025-09-25T16:18:58.605432Z node 1 :DATASHARD_BACKUP DEBUG: export_s3_uploader.cpp:786: [Export] [s3] Bootstrap: self# [1:422:2391], attempt# 0 2025-09-25T16:18:58.615558Z node 1 :DATASHARD_BACKUP DEBUG: export_s3_uploader.cpp:442: [Export] [s3] Handle TEvExportScan::TEvReady: self# [1:422:2391], sender# [1:421:2390] REQUEST: PUT /metadata.json HTTP/1.1 HEADERS: Host: localhost:3804 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: 2F1071CE-5F89-4E65-91C4-65DF00B4073D amz-sdk-request: attempt=1 content-length: 94 content-md5: ZpDejBbuBPHjGq8ZC8z8QA== content-type: binary/octet-stream user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-143-generic x86_64 Clang/20.1.8 x-amz-storage-class: STANDARD S3_MOCK::HttpServeWrite: /metadata.json / / 94 2025-09-25T16:18:58.618248Z node 1 :DATASHARD_BACKUP DEBUG: export_s3_uploader.cpp:402: [Export] [s3] HandleMetadata TEvExternalStorage::TEvPutObjectResponse: self# [1:422:2391], result# PutObjectResult { ETag: 6690de8c16ee04f1e31aaf190bccfc40 } 2025-09-25T16:18:58.619038Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-09-25T16:18:58.619057Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 102, path id: [OwnerId: 72057594046678944, LocalPathId: 2] 2025-09-25T16:18:58.619161Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-09-25T16:18:58.619169Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [1:212:2213], at schemeshard: 72057594046678944, txId: 102, path id: 2 FAKE_COORDINATOR: advance: minStep5000003 State->FrontStep: 5000003 2025-09-25T16:18:58.619378Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 102:0, at schemeshard: 72057594046678944 2025-09-25T16:18:58.619391Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_backup_restore_common.h:258: TBackup TProposedWaitParts, opId: 102:0 ProgressState, at schemeshard: 72057594046678944 2025-09-25T16:18:58.619646Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6249: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 3 PathOwnerId: 72057594046678944, cookie: 102 2025-09-25T16:18:58.692961Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 3 PathOwnerId: 72057594046678944, cookie: 102 2025-09-25T16:18:58.692996Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:108: Operation in-flight, at schemeshard: 72057594046678944, txId: 102 2025-09-25T16:18:58.693008Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 102, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 3 2025-09-25T16:18:58.693018Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:610: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 4 2025-09-25T16:18:58.693052Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 102, ready parts: 0/1, is published: true FAKE_COORDINATOR: Erasing txId 102 REQUEST: PUT /scheme.pb HTTP/1.1 HEADERS: Host: localhost:3804 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: 1ED4D248-ABB8-4D74-9993-733B94C869EF amz-sdk-request: attempt=1 content-length: 357 content-md5: IxJB3qM/y2xlsv8qcwTF7g== content-type: binary/octet-stream user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-143-generic x86_64 Clang/20.1.8 x-amz-storage-class: STANDARD S3_MOCK::HttpServeWrite: /scheme.pb / / 357 2025-09-25T16:18:58.696690Z node 1 :DATASHARD_BACKUP DEBUG: export_s3_uploader.cpp:307: [Export] [s3] HandleScheme TEvExternalStorage::TEvPutObjectResponse: self# [1:422:2391], result# PutObjectResult { ETag: 231241dea33fcb6c65b2ff2a7304c5ee } 2025-09-25T16:18:58.696766Z node 1 :DATASHARD_BACKUP DEBUG: export_scan.cpp:130: [Export] [scanner] Handle TEvExportScan::TEvFeed: self# [1:421:2390] 2025-09-25T16:18:58.696868Z node 1 :DATASHARD_BACKUP DEBUG: export_s3_uploader.cpp:460: [Export] [s3] Handle TEvExportScan::TEvBuffer: self# [1:422:2391], sender# [1:421:2390], msg# NKikimr::NDataShard::TEvExportScan::TEvBuffer { Last: 1 Checksum: } REQUEST: PUT /data_00.csv HTTP/1.1 HEADERS: Host: localhost:3804 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: A22169B0-AC3E-4FC9-9723-9F903F61627B amz-sdk-request: attempt=1 content-length: 39 content-md5: GLX1nc5/cKhlAfxBHlykQA== content-type: binary/octet-stream user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-143-generic x86_64 Clang/20.1.8 x-amz-storage-class: STANDARD S3_MOCK::HttpServeWrite: /data_00.csv / / 39 2025-09-25T16:18:58.698387Z node 1 :DATASHARD_BACKUP DEBUG: export_s3_uploader.cpp:502: [Export] [s3] HandleData TEvExternalStorage::TEvPutObjectResponse: self# [1:422:2391], result# PutObjectResult { ETag: 18b5f59dce7f70a86501fc411e5ca440 } 2025-09-25T16:18:58.698413Z node 1 :DATASHARD_BACKUP INFO: export_s3_uploader.cpp:705: [Export] [s3] Finish: self# [1:422:2391], success# 1, error# , multipart# 0, uploadId# (empty maybe) 2025-09-25T16:18:58.699658Z node 1 :DATASHARD_BACKUP DEBUG: export_scan.cpp:144: [Export] [scanner] Handle TEvExportScan::TEvFinish: self# [1:421:2390], msg# NKikimr::NDataShard::TEvExportScan::TEvFinish { Success: 1 Error: } 2025-09-25T16:18:58.713212Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 2025-09-25T16:18:58.725367Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5901: Handle TEvSchemaChanged, tabletId: 72057594046678944, at schemeshard: 72057594046678944, message: Source { RawX1: 315 RawX2: 4294969597 } Origin: 72075186233409546 State: 2 TxId: 102 Step: 0 Generation: 2 OpResult { Success: true Explain: "" BytesProcessed: 20 RowsProcessed: 1 } 2025-09-25T16:18:58.725407Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1837: TOperation FindRelatedPartByTabletId, TxId: 102, tablet: 72075186233409546, partId: 0 2025-09-25T16:18:58.725442Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:628: TTxOperationReply execute, operationId: 102:0, at schemeshard: 72057594046678944, message: Source { RawX1: 315 RawX2: 4294969597 } Origin: 72075186233409546 State: 2 TxId: 102 Step: 0 Generation: 2 OpResult { Success: true Explain: "" BytesProcessed: 20 RowsProcessed: 1 } 2025-09-25T16:18:58.725457Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_backup_restore_common.h:233: TBackup TProposedWaitParts, opId: 102:0 HandleReply TEvSchemaChanged at tablet# 72057594046678944 message# Source { RawX1: 315 RawX2: 4294969597 } Origin: 72075186233409546 State: 2 TxId: 102 Step: 0 Generation: 2 OpResult { Success: true Explain: "" BytesProcessed: 20 RowsProcessed: 1 } 2025-09-25T16:18:58.725477Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:673: CollectSchemaChanged accept TEvDataShard::TEvSchemaChanged, operationId: 102:0, shardIdx: 72057594046678944:1, shard: 72075186233409546, left await: 0, txState.State: ProposedWaitParts, txState.ReadyForNotifications: 1, at schemeshard: 72057594046678944 2025-09-25T16:18:58.725482Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:710: all shard schema changes has been received, operationId: 102:0, at schemeshard: 72057594046678944 2025-09-25T16:18:58.725488Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:722: send schema changes ack message, operation: 102:0, datashard: 72075186233409546, at schemeshard: 72057594046678944 2025-09-25T16:18:58.725496Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 102:0 129 -> 240 2025-09-25T16:18:58.725553Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_backup_restore_common.h:116: Unable to make a bill: kind# TBackup, opId# 102:0, reason# domain is not a serverless db, domain# /MyRoot, domainPathId# [OwnerId: 72057594046678944, LocalPathId: 1], IsDomainSchemeShard: 1, ParentDomainId: [OwnerId: 72057594046678944, LocalPathId: 1], ResourcesDomainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-09-25T16:18:58.726291Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 102:0, at schemeshard: 72057594046678944 2025-09-25T16:18:58.726408Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 102:0, at schemeshard: 72057594046678944 2025-09-25T16:18:58.726418Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:497: [72057594046678944] TDone opId# 102:0 ProgressState 2025-09-25T16:18:58.726434Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:926: Part operation is done id#102:0 progress is 1/1 2025-09-25T16:18:58.726439Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2025-09-25T16:18:58.726445Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:926: Part operation is done id#102:0 progress is 1/1 2025-09-25T16:18:58.726448Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2025-09-25T16:18:58.726453Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 102, ready parts: 1/1, is published: true 2025-09-25T16:18:58.726470Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1702: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [1:343:2321] message: TxId: 102 2025-09-25T16:18:58.790112Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2025-09-25T16:18:58.790156Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:993: Operation and all the parts is done, operation id: 102:0 2025-09-25T16:18:58.790164Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5552: RemoveTx for txid 102:0 2025-09-25T16:18:58.790230Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:610: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 3 2025-09-25T16:18:58.793794Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 102: got EvNotifyTxCompletionResult 2025-09-25T16:18:58.793825Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 102: satisfy waiter [1:407:2377] TestWaitNotification: OK eventTxId 102 ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/scheme/unittest >> KqpScheme::DropSecret-UseQueryService [GOOD] Test command err: Trying to start YDB, gRPC: 19283, MsgBus: 9742 2025-09-25T16:18:48.064947Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7554062046433260075:2148];send_to=[0:7307199536658146131:7762515]; 2025-09-25T16:18:48.064983Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-09-25T16:18:48.069352Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:123: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown test_client.cpp: SetPath # /home/runner/.ya/build/build_root/endf/001efa/r3tmp/tmp2RZdSu/pdisk_1.dat 2025-09-25T16:18:48.097025Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 19283, node 1 2025-09-25T16:18:48.116914Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-09-25T16:18:48.116928Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-09-25T16:18:48.116930Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-09-25T16:18:48.116976Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:9742 TClient is connected to server localhost:9742 WaitRootIsUp 'Root'... TClient::Ls request: Root 2025-09-25T16:18:48.167912Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-09-25T16:18:48.167937Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-09-25T16:18:48.169085Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-09-25T16:18:48.177895Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-09-25T16:18:48.187791Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-09-25T16:18:48.190339Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-09-25T16:18:48.219955Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) waiting... 2025-09-25T16:18:48.246588Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) waiting... 2025-09-25T16:18:48.268695Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) waiting... waiting... 2025-09-25T16:18:48.281111Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:18:48.458251Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7554062046433261594:2391], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:48.458283Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:48.458451Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7554062046433261604:2392], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:48.458459Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:48.519136Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:18:48.527514Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:18:48.538617Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:18:48.555711Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:18:48.569422Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:18:48.581437Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:18:48.594739Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:18:48.613383Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:18:48.639431Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7554062046433262466:2474], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:48.639461Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:48.639462Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7554062046433262471:2477], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:48.639509Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7554062046433262473:2478], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:48.639516Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:48.640278Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: ... 99] failed BAD_REQUEST, issues: {
: Error: Path /Root/MyFolder/MyTable exists, but it is not a streaming query: KindTable } 2025-09-25T16:18:57.775307Z node 6 :KQP_PROXY WARN: queries.cpp:413: [StreamingQueries] [TDropStreamingQueryActor] OwnerId: [0:0:0] ActorId: [6:7554062087192213658:4098] QueryPath: /Root/MyFolder/MyTable. Failed BAD_REQUEST, with issues: {
: Error: Describe streaming query failed subissue: {
: Error: Path /Root/MyFolder/MyTable exists, but it is not a streaming query: KindTable } } 2025-09-25T16:18:57.775452Z node 6 :KQP_SESSION WARN: kqp_session_actor.cpp:2830: SessionId: ydb://session/3?node_id=6&id=NmY2YTE3ZTYtMjAzMTFjMzQtODVjOGFhODQtODAxMjI0ZDQ=, ActorId: [6:7554062087192213653:2664], ActorState: ExecuteState, TraceId: 01k60tqd394zky1mpejf4kdftc, Create QueryResponse for error on request, msg: 2025-09-25T16:18:57.782520Z node 6 :KQP_PROXY WARN: queries.cpp:413: [StreamingQueries] [TDescribeStreamingQuerySchemeActor] OwnerId: [6:7554062087192213672:4104] ActorId: [6:7554062087192213673:4105] QueryPath: /Root/MyFolder/MyTable. Failed BAD_REQUEST, with issues: {
: Error: Path /Root/MyFolder/MyTable exists, but it is not a streaming query: KindTable } 2025-09-25T16:18:57.782559Z node 6 :KQP_PROXY WARN: queries.cpp:398: [StreamingQueries] [TDropStreamingQueryActor] OwnerId: [0:0:0] ActorId: [6:7554062087192213672:4104] QueryPath: /Root/MyFolder/MyTable. Describe streaming query [6:7554062087192213673:4105] failed BAD_REQUEST, issues: {
: Error: Path /Root/MyFolder/MyTable exists, but it is not a streaming query: KindTable } 2025-09-25T16:18:57.782570Z node 6 :KQP_PROXY WARN: queries.cpp:413: [StreamingQueries] [TDropStreamingQueryActor] OwnerId: [0:0:0] ActorId: [6:7554062087192213672:4104] QueryPath: /Root/MyFolder/MyTable. Failed BAD_REQUEST, with issues: {
: Error: Describe streaming query failed subissue: {
: Error: Path /Root/MyFolder/MyTable exists, but it is not a streaming query: KindTable } } 2025-09-25T16:18:57.782941Z node 6 :KQP_SESSION WARN: kqp_session_actor.cpp:2830: SessionId: ydb://session/3?node_id=6&id=ODZhZDViMzAtZDgzMGI4NzQtY2E1MGZlZjUtNjA5MzYzZDI=, ActorId: [6:7554062087192213667:2667], ActorState: ExecuteState, TraceId: 01k60tqd3g0a8ta8bpfa39zpne, Create QueryResponse for error on request, msg: Trying to start YDB, gRPC: 29323, MsgBus: 30773 test_client.cpp: SetPath # /home/runner/.ya/build/build_root/endf/001efa/r3tmp/tmpWarwGh/pdisk_1.dat 2025-09-25T16:18:58.270692Z node 7 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-09-25T16:18:58.270721Z node 7 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-09-25T16:18:58.271076Z node 7 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-09-25T16:18:58.271125Z node 7 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-09-25T16:18:58.271979Z node 7 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded 2025-09-25T16:18:58.276350Z node 7 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 29323, node 7 2025-09-25T16:18:58.286753Z node 7 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-09-25T16:18:58.286766Z node 7 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-09-25T16:18:58.286769Z node 7 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-09-25T16:18:58.286834Z node 7 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:30773 TClient is connected to server localhost:30773 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-09-25T16:18:58.373610Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-09-25T16:18:58.377219Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-09-25T16:18:58.524552Z node 7 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-09-25T16:18:58.809094Z node 7 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7554062089855277693:2316], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:58.809119Z node 7 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:58.809182Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateSecret, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_secret.cpp:263) 2025-09-25T16:18:58.809215Z node 7 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7554062089855277709:2318], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:58.809220Z node 7 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:58.810710Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 281474976710658, at schemeshard: 72057594046644480 2025-09-25T16:18:58.818105Z node 7 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7554062089855277723:2321], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:58.818155Z node 7 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:58.820659Z node 7 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7554062089855277727:2323], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:58.820705Z node 7 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:58.821105Z node 7 :TX_PROXY ERROR: schemereq.cpp:590: Actor# [7:7554062089855277732:2323] txid# 281474976710659, issues: { message: "Path does not exist" issue_code: 200200 severity: 1 } TClient::Ls request: /Root/secret-name TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "secret-name" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeSecret CreateFinished: true CreateTxId: 281474976710658 CreateStep: 1758817138855 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "\020\001" EffectiveACL: "\020\001" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 SecretVersion: 1 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } DomainKey { SchemeShard: 72057594046644480 PathId: 1 } Pa... (TRUNCATED) 2025-09-25T16:18:58.826731Z node 7 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7554062089855277753:2326], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:58.826764Z node 7 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:58.826932Z node 7 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7554062089855277765:2329], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:58.826942Z node 7 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } TClient::Ls request: /Root/secret-name TClient::Ls response: Status: 128 StatusCode: PATH_NOT_EXIST Issues { message: "Path not exist" issue_code: 200200 severity: 1 } SchemeStatus: 2 ErrorReason: "Path not found" >> KqpScheme::DropExternalDataSource [GOOD] >> KqpScheme::DropExternalTable >> TestYmqHttpProxy::TestReceiveMessageWithAttemptId >> KqpScheme::CreateTableWithPartitionAtKeysUuid [GOOD] >> KqpScheme::CreateTableWithStoreExternalBlobs >> KqpScheme::UniqueIndexMultipleNulls [GOOD] >> KqpScheme::UnknownFamilyTest >> TestKinesisHttpProxy::CreateDeleteStream >> TestYmqHttpProxy::TestSendMessageWithAttributes [GOOD] >> KqpScheme::CreateTableWithVectorIndexPublicApi [GOOD] >> KqpScheme::CreateTableWithVectorIndexCoveredPublicApi >> TestKinesisHttpProxy::TestRequestNoAuthorization >> TestYmqHttpProxy::TestGetQueueAttributes ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/scheme/unittest >> KqpOlapScheme::AlterTableWithCacheModeError [GOOD] Test command err: Trying to start YDB, gRPC: 12633, MsgBus: 5815 2025-09-25T16:18:51.726972Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7554062057817123931:2082];send_to=[0:7307199536658146131:7762515]; 2025-09-25T16:18:51.727138Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-09-25T16:18:51.729716Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:123: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown test_client.cpp: SetPath # /home/runner/.ya/build/build_root/endf/001e62/r3tmp/tmpDHdbWc/pdisk_1.dat 2025-09-25T16:18:51.773876Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 12633, node 1 2025-09-25T16:18:51.784125Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-09-25T16:18:51.784148Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-09-25T16:18:51.784151Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-09-25T16:18:51.784203Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:5815 TClient is connected to server localhost:5815 WaitRootIsUp 'Root'... TClient::Ls request: Root 2025-09-25T16:18:51.829090Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-09-25T16:18:51.829117Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-09-25T16:18:51.830130Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-09-25T16:18:51.841413Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-09-25T16:18:51.851810Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... CREATE TABLE `/Root/TableWithoutColumnFamily` (Key Uint64 NOT NULL, Value1 String, Value2 Uint32, PRIMARY KEY (Key), FAMILY default (DATA="test", COMPRESSION="off")) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 1); 2025-09-25T16:18:52.235488Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7554062062112091850:2319], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:52.235541Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:52.237273Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7554062062112091860:2320], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:52.237302Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } CREATE TABLE `/Root/TableWithoutColumnFamily` (Key Uint64 NOT NULL, Value1 String FAMILY family1, Value2 Uint32, PRIMARY KEY (Key), FAMILY default (COMPRESSION="off"), FAMILY family1 (DATA="test", COMPRESSION="lz4")) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 1); 2025-09-25T16:18:52.301997Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7554062062112091874:2323], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:52.302062Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } CREATE TABLE `/Root/TableWithoutColumnFamily` (Key Uint64 NOT NULL, Value1 String FAMILY family1, Value2 Uint32, PRIMARY KEY (Key), FAMILY default (COMPRESSION="off"), FAMILY family1 (COMPRESSION="lz4")) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 1); 2025-09-25T16:18:52.305274Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7554062062112091879:2325], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:52.305314Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:52.310246Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7554062062112091884:2327], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:52.310311Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:52.312251Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/olap/operations/create_table.cpp:814) 2025-09-25T16:18:52.314197Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7554062062112091890:2330], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:52.314330Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:52.328239Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7554062062112091933:2332];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-09-25T16:18:52.328307Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7554062062112091933:2332];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-09-25T16:18:52.328366Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7554062062112091933:2332];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-09-25T16:18:52.328391Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7554062062112091933:2332];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-09-25T16:18:52.328412Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7554062062112091933:2332];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-09-25T16:18:52.328440Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7554062062112091933:2332];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-09-25T16:18:52.328461Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7554062062112091933:2332];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-09-25T16:18:52.328483Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7554062062112091933:2332];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-09-25T16:18:52.328506Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7554062062112091933:2332];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-09-25T16:18:52.328533Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7554062062112091933:2332];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanDeprecatedSnapshot; 2025-09-25T16:18:52.328554Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7554062062112091933:2332];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV0ChunksMeta; 2025-09-25T16:18:52.328575Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7554062062112091933:2332];ta ... 5-09-25T16:18:59.311593Z node 8 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:59.321825Z node 8 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[8:7554062094962497917:2324];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-09-25T16:18:59.321908Z node 8 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[8:7554062094962497917:2324];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-09-25T16:18:59.321981Z node 8 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[8:7554062094962497917:2324];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-09-25T16:18:59.322013Z node 8 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[8:7554062094962497917:2324];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-09-25T16:18:59.322037Z node 8 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[8:7554062094962497917:2324];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-09-25T16:18:59.322078Z node 8 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[8:7554062094962497917:2324];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-09-25T16:18:59.322098Z node 8 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[8:7554062094962497917:2324];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-09-25T16:18:59.322114Z node 8 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[8:7554062094962497917:2324];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-09-25T16:18:59.322128Z node 8 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[8:7554062094962497917:2324];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-09-25T16:18:59.322147Z node 8 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[8:7554062094962497917:2324];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanDeprecatedSnapshot; 2025-09-25T16:18:59.322168Z node 8 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[8:7554062094962497917:2324];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV0ChunksMeta; 2025-09-25T16:18:59.322186Z node 8 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[8:7554062094962497917:2324];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CopyBlobIdsToV2; 2025-09-25T16:18:59.322207Z node 8 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[8:7554062094962497917:2324];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreAppearanceSnapshot; 2025-09-25T16:18:59.328165Z node 8 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-09-25T16:18:59.328181Z node 8 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-09-25T16:18:59.328193Z node 8 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-09-25T16:18:59.328198Z node 8 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-09-25T16:18:59.328222Z node 8 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-09-25T16:18:59.328228Z node 8 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-09-25T16:18:59.328246Z node 8 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-09-25T16:18:59.328254Z node 8 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-09-25T16:18:59.328263Z node 8 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-09-25T16:18:59.328270Z node 8 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-09-25T16:18:59.328279Z node 8 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-09-25T16:18:59.328286Z node 8 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-09-25T16:18:59.328318Z node 8 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-09-25T16:18:59.328327Z node 8 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-09-25T16:18:59.328348Z node 8 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2025-09-25T16:18:59.328356Z node 8 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanDeprecatedSnapshot;id=CleanDeprecatedSnapshot; 2025-09-25T16:18:59.328365Z node 8 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanDeprecatedSnapshot;id=17; 2025-09-25T16:18:59.328372Z node 8 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV0ChunksMeta;id=RestoreV0ChunksMeta; 2025-09-25T16:18:59.328379Z node 8 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV0ChunksMeta;id=18; 2025-09-25T16:18:59.328388Z node 8 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CopyBlobIdsToV2;id=CopyBlobIdsToV2; 2025-09-25T16:18:59.328404Z node 8 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CopyBlobIdsToV2;id=19; 2025-09-25T16:18:59.328412Z node 8 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreAppearanceSnapshot;id=RestoreAppearanceSnapshot; 2025-09-25T16:18:59.328425Z node 8 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreAppearanceSnapshot;id=20; 2025-09-25T16:18:59.328431Z node 8 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2025-09-25T16:18:59.329993Z node 8 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[8:7554062094962497917:2324];ev=NActors::IEventHandle;tablet_id=72075186224037888;tx_id=281474976710658;this=126370713296336;method=TTxController::StartProposeOnExecute;tx_info=281474976710658:TX_KIND_SCHEMA;min=1758817139329;max=18446744073709551615;plan=0;src=[8:7554062090667530264:2162];cookie=12:1;;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=2;result=not_found; 2025-09-25T16:18:59.333180Z node 8 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710658;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=2;result=not_found; 2025-09-25T16:18:59.333201Z node 8 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710658;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=2;result=not_found; 2025-09-25T16:18:59.333204Z node 8 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710658;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=2;result=not_found; 2025-09-25T16:18:59.339641Z node 8 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [8:7554062094962497985:2332], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:59.339662Z node 8 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:59.339841Z node 8 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [8:7554062094962497989:2334], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:59.339877Z node 8 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } >> KqpScheme::CreateTableWithVectorIndex [GOOD] >> KqpScheme::CreateStreamingQueryBasic [GOOD] >> KqpScheme::CreateStreamingQueryErrors >> KqpScheme::DoubleCreateResourcePoolClassifier-UseSink [GOOD] >> KqpScheme::DisableStreamingQueries >> KqpScheme::AlterIndexImplTableUsingPublicAPI [GOOD] >> KqpScheme::AlterDatabaseChangeSchemeLimits+EnableAlterDatabase >> KqpAcl::AlterDatabasePrivilegesRequiredToChangeSchemeLimits+AsClusterAdmin [GOOD] >> KqpAcl::AlterDatabasePrivilegesRequiredToChangeSchemeLimits-AsClusterAdmin >> TestYmqHttpProxy::TestSetQueueAttributes >> KqpScheme::CreateAsyncReplicationWithTokenSecret >> KqpScheme::AlterTableVectorIndexInvalidSettingsPublicApi [GOOD] >> KqpScheme::AlterTableVectorIndexInvalidSettingsPositions >> KqpAcl::AclCreateTableAs+IsOlap-UseAdmin [GOOD] >> KqpAcl::AclCreateTableAs-IsOlap+UseAdmin >> KqpConstraints::AlterTableAddNotNullWithDefault [GOOD] >> KqpConstraints::AlterTableAddColumnWithDefaultRejection >> KqpSchemeFulltext::AlterTableWithIndexInvalidSettings [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/scheme/unittest >> KqpScheme::CreateTableWithVectorIndex [GOOD] Test command err: Trying to start YDB, gRPC: 24394, MsgBus: 23217 2025-09-25T16:18:48.419509Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7554062046538054585:2076];send_to=[0:7307199536658146131:7762515]; 2025-09-25T16:18:48.419528Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/endf/001e8a/r3tmp/tmp9PRtFl/pdisk_1.dat 2025-09-25T16:18:48.461012Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-09-25T16:18:48.469579Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 24394, node 1 2025-09-25T16:18:48.485021Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-09-25T16:18:48.485037Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-09-25T16:18:48.485040Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-09-25T16:18:48.485096Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:23217 2025-09-25T16:18:48.522324Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-09-25T16:18:48.522354Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-09-25T16:18:48.523529Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:23217 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-09-25T16:18:48.547148Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-09-25T16:18:48.551274Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 waiting... 2025-09-25T16:18:48.558300Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) waiting... 2025-09-25T16:18:48.579123Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:18:48.599194Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) waiting... waiting... 2025-09-25T16:18:48.613703Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:18:48.632459Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-09-25T16:18:48.824845Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7554062046538056187:2391], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:48.824871Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:48.824953Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7554062046538056197:2392], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:48.824965Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:48.883068Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:18:48.890348Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:18:48.902750Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:18:48.917033Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:18:48.931662Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:18:48.946366Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:18:48.959072Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:18:48.972983Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:18:48.993777Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7554062046538057063:2474], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:48.993808Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:48.993857Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7554062046538057068:2477], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:48.993862Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7554062046538057069:2478], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:48.993866Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:48.994652Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemes ... butable configuration TClient is connected to server localhost:1414 TClient is connected to server localhost:1414 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-09-25T16:18:59.070512Z node 8 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-09-25T16:18:59.075078Z node 8 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(8, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-09-25T16:18:59.075111Z node 8 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(8, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-09-25T16:18:59.076340Z node 8 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(8, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-09-25T16:18:59.110099Z node 8 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) waiting... 2025-09-25T16:18:59.136976Z node 8 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) waiting... 2025-09-25T16:18:59.154000Z node 8 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-09-25T16:18:59.163191Z node 8 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) waiting... 2025-09-25T16:18:59.180134Z node 8 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) waiting... 2025-09-25T16:18:59.469270Z node 8 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [8:7554062095814532267:2391], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:59.469297Z node 8 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:59.469479Z node 8 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [8:7554062095814532277:2392], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:59.469486Z node 8 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:59.482541Z node 8 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:18:59.494923Z node 8 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:18:59.506639Z node 8 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:18:59.515813Z node 8 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:18:59.529259Z node 8 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:18:59.544215Z node 8 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:18:59.563643Z node 8 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:18:59.581136Z node 8 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:18:59.601494Z node 8 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [8:7554062095814533138:2474], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:59.601540Z node 8 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:59.601610Z node 8 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [8:7554062095814533144:2478], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:59.601623Z node 8 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [8:7554062095814533143:2477], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:59.601628Z node 8 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:59.602597Z node 8 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-09-25T16:18:59.607155Z node 8 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [8:7554062095814533147:2479], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2025-09-25T16:18:59.709501Z node 8 :TX_PROXY ERROR: schemereq.cpp:590: Actor# [8:7554062095814533199:3551] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 17], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-09-25T16:18:59.973106Z node 8 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-09-25T16:19:00.008854Z node 8 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) >> KqpConstraints::IndexAutoChooseAndNonReadyIndex [GOOD] >> KqpConstraints::IndexedTableAndNotNullColumnAddNotNullColumn >> KqpScheme::CreateTableWithStoreExternalBlobs [GOOD] >> KqpScheme::CreateTableWithPgColumn >> KqpScheme::AsyncReplicationConnectionStringWithSsl [GOOD] >> KqpScheme::AlterTransfer >> KqpScheme::UnknownFamilyTest [GOOD] >> KqpScheme::TwoSimilarFamiliesTest >> KqpScheme::TouchIndexAfterMoveTableRead [GOOD] >> KqpScheme::StreamingQueriesValidation >> KqpScheme::CreateFamilyWithCacheModeFeatureDisabled+UseQueryService >> KqpConstraints::Utf8AndDefault >> TestKinesisHttpProxy::TestListStreamConsumers [GOOD] >> KqpScheme::AlterDatabaseChangeSchemeLimits+EnableAlterDatabase [GOOD] >> KqpScheme::AlterDatabaseChangeSchemeLimits-EnableAlterDatabase >> KqpScheme::DropExternalTable [GOOD] >> KqpScheme::DropDependentExternalDataSource >> KqpScheme::CreateTableWithVectorIndexCoveredPublicApi [GOOD] >> KqpScheme::CreateTransfer ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/mind/hive/ut/unittest >> THiveTest::TestHiveBalancerWithSpareNodes [GOOD] Test command err: 2025-09-25T16:18:07.434936Z node 4 :BS_NODE DEBUG: {NW26@node_warden_impl.cpp:338} Bootstrap 2025-09-25T16:18:07.440194Z node 4 :BS_NODE DEBUG: {NW18@node_warden_resource.cpp:51} ApplyServiceSet IsStatic# true Comprehensive# false Origin# initial ServiceSet# {PDisks { NodeID: 1 PDiskID: 1 Path: "SectorMap:0:3200" PDiskGuid: 1 } PDisks { NodeID: 2 PDiskID: 1 Path: "SectorMap:1:3200" PDiskGuid: 2 } PDisks { NodeID: 3 PDiskID: 1 Path: "SectorMap:2:3200" PDiskGuid: 3 } PDisks { NodeID: 4 PDiskID: 1 Path: "SectorMap:3:3200" PDiskGuid: 4 } PDisks { NodeID: 5 PDiskID: 1 Path: "SectorMap:4:3200" PDiskGuid: 5 } PDisks { NodeID: 6 PDiskID: 1 Path: "SectorMap:5:3200" PDiskGuid: 6 } VDisks { VDiskID { GroupID: 0 GroupGeneration: 1 Ring: 0 Domain: 0 VDisk: 0 } VDiskLocation { NodeID: 1 PDiskID: 1 VDiskSlotID: 0 PDiskGuid: 1 } } Groups { GroupID: 0 GroupGeneration: 1 ErasureSpecies: 0 Rings { FailDomains { VDiskLocations { NodeID: 1 PDiskID: 1 VDiskSlotID: 0 PDiskGuid: 1 } } } } AvailabilityDomains: 0 } 2025-09-25T16:18:07.440302Z node 4 :BS_NODE DEBUG: {NW04@node_warden_pdisk.cpp:233} StartLocalPDisk NodeId# 4 PDiskId# 1 Path# "SectorMap:3:3200" PDiskCategory# {Type# DEVICE_TYPE_ROT Kind# 0} Temporary# false 2025-09-25T16:18:07.440566Z node 4 :BS_NODE WARN: {NW89@node_warden_pdisk.cpp:122} Can't write new MockDevicesConfig to file Path# /Berkanavt/kikimr/testing/mock_devices.txt 2025-09-25T16:18:07.440639Z node 4 :BS_NODE DEBUG: {NW12@node_warden_proxy.cpp:24} StartLocalProxy GroupId# 0 2025-09-25T16:18:07.440841Z node 4 :BS_NODE DEBUG: {NW21@node_warden_pipe.cpp:23} EstablishPipe AvailDomainId# 0 PipeClientId# [4:157:2079] ControllerId# 72057594037932033 2025-09-25T16:18:07.440849Z node 4 :BS_NODE DEBUG: {NW20@node_warden_pipe.cpp:73} SendRegisterNode 2025-09-25T16:18:07.440879Z node 4 :BS_NODE DEBUG: {NW11@node_warden_impl.cpp:313} StartInvalidGroupProxy GroupId# 4294967295 2025-09-25T16:18:07.440907Z node 4 :BS_NODE DEBUG: {NW62@node_warden_impl.cpp:325} StartRequestReportingThrottler 2025-09-25T16:18:07.444151Z node 4 :BS_PROXY INFO: dsproxy_state.cpp:159: Group# 0 TEvConfigureProxy received GroupGeneration# 1 IsLimitedKeyless# false Marker# DSP02 2025-09-25T16:18:07.444171Z node 4 :BS_PROXY NOTICE: dsproxy_state.cpp:319: EnsureMonitoring Group# 0 IsLimitedKeyless# 0 fullIfPossible# 0 Marker# DSP58 2025-09-25T16:18:07.444540Z node 4 :BS_PROXY DEBUG: group_sessions.cpp:83: Group# 0 Actor# [4:156:2078] Create Queue# [4:164:2083] targetNodeId# 1 Marker# DSP01 2025-09-25T16:18:07.444575Z node 4 :BS_PROXY DEBUG: group_sessions.cpp:83: Group# 0 Actor# [4:156:2078] Create Queue# [4:165:2084] targetNodeId# 1 Marker# DSP01 2025-09-25T16:18:07.444604Z node 4 :BS_PROXY DEBUG: group_sessions.cpp:83: Group# 0 Actor# [4:156:2078] Create Queue# [4:166:2085] targetNodeId# 1 Marker# DSP01 2025-09-25T16:18:07.444633Z node 4 :BS_PROXY DEBUG: group_sessions.cpp:83: Group# 0 Actor# [4:156:2078] Create Queue# [4:167:2086] targetNodeId# 1 Marker# DSP01 2025-09-25T16:18:07.444662Z node 4 :BS_PROXY DEBUG: group_sessions.cpp:83: Group# 0 Actor# [4:156:2078] Create Queue# [4:168:2087] targetNodeId# 1 Marker# DSP01 2025-09-25T16:18:07.444692Z node 4 :BS_PROXY DEBUG: group_sessions.cpp:83: Group# 0 Actor# [4:156:2078] Create Queue# [4:169:2088] targetNodeId# 1 Marker# DSP01 2025-09-25T16:18:07.444721Z node 4 :BS_PROXY DEBUG: group_sessions.cpp:83: Group# 0 Actor# [4:156:2078] Create Queue# [4:170:2089] targetNodeId# 1 Marker# DSP01 2025-09-25T16:18:07.444727Z node 4 :BS_PROXY INFO: dsproxy_state.cpp:31: Group# 0 SetStateEstablishingSessions Marker# DSP03 2025-09-25T16:18:07.444742Z node 4 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:49: TClient[72057594037932033] ::Bootstrap [4:157:2079] 2025-09-25T16:18:07.444747Z node 4 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:542: TClient[72057594037932033] lookup [4:157:2079] 2025-09-25T16:18:07.444756Z node 4 :BS_PROXY NOTICE: dsproxy_state.cpp:259: Group# 4294967295 HasInvalidGroupId# 1 Bootstrap -> StateEjected Marker# DSP42 2025-09-25T16:18:07.444764Z node 4 :BS_NODE DEBUG: {NWDC00@distconf.cpp:28} Bootstrap 2025-09-25T16:18:07.444973Z node 4 :BS_NODE DEBUG: {NWDC40@distconf_persistent_storage.cpp:25} TReaderActor bootstrap Paths# [] 2025-09-25T16:18:07.444996Z node 5 :BS_NODE DEBUG: {NW26@node_warden_impl.cpp:338} Bootstrap 2025-09-25T16:18:07.446153Z node 5 :BS_NODE DEBUG: {NW18@node_warden_resource.cpp:51} ApplyServiceSet IsStatic# true Comprehensive# false Origin# initial ServiceSet# {PDisks { NodeID: 1 PDiskID: 1 Path: "SectorMap:0:3200" PDiskGuid: 1 } PDisks { NodeID: 2 PDiskID: 1 Path: "SectorMap:1:3200" PDiskGuid: 2 } PDisks { NodeID: 3 PDiskID: 1 Path: "SectorMap:2:3200" PDiskGuid: 3 } PDisks { NodeID: 4 PDiskID: 1 Path: "SectorMap:3:3200" PDiskGuid: 4 } PDisks { NodeID: 5 PDiskID: 1 Path: "SectorMap:4:3200" PDiskGuid: 5 } PDisks { NodeID: 6 PDiskID: 1 Path: "SectorMap:5:3200" PDiskGuid: 6 } VDisks { VDiskID { GroupID: 0 GroupGeneration: 1 Ring: 0 Domain: 0 VDisk: 0 } VDiskLocation { NodeID: 1 PDiskID: 1 VDiskSlotID: 0 PDiskGuid: 1 } } Groups { GroupID: 0 GroupGeneration: 1 ErasureSpecies: 0 Rings { FailDomains { VDiskLocations { NodeID: 1 PDiskID: 1 VDiskSlotID: 0 PDiskGuid: 1 } } } } AvailabilityDomains: 0 } 2025-09-25T16:18:07.446209Z node 5 :BS_NODE DEBUG: {NW04@node_warden_pdisk.cpp:233} StartLocalPDisk NodeId# 5 PDiskId# 1 Path# "SectorMap:4:3200" PDiskCategory# {Type# DEVICE_TYPE_ROT Kind# 0} Temporary# false 2025-09-25T16:18:07.446328Z node 5 :BS_NODE WARN: {NW89@node_warden_pdisk.cpp:122} Can't write new MockDevicesConfig to file Path# /Berkanavt/kikimr/testing/mock_devices.txt 2025-09-25T16:18:07.446379Z node 5 :BS_NODE DEBUG: {NW12@node_warden_proxy.cpp:24} StartLocalProxy GroupId# 0 2025-09-25T16:18:07.446547Z node 5 :BS_NODE DEBUG: {NW21@node_warden_pipe.cpp:23} EstablishPipe AvailDomainId# 0 PipeClientId# [5:180:2080] ControllerId# 72057594037932033 2025-09-25T16:18:07.446553Z node 5 :BS_NODE DEBUG: {NW20@node_warden_pipe.cpp:73} SendRegisterNode 2025-09-25T16:18:07.446568Z node 5 :BS_NODE DEBUG: {NW11@node_warden_impl.cpp:313} StartInvalidGroupProxy GroupId# 4294967295 2025-09-25T16:18:07.446603Z node 5 :BS_NODE DEBUG: {NW62@node_warden_impl.cpp:325} StartRequestReportingThrottler 2025-09-25T16:18:07.447959Z node 5 :LOCAL DEBUG: local.cpp:1540: TLocal::Bootstrap 2025-09-25T16:18:07.449524Z node 5 :BS_PROXY INFO: dsproxy_state.cpp:159: Group# 0 TEvConfigureProxy received GroupGeneration# 1 IsLimitedKeyless# false Marker# DSP02 2025-09-25T16:18:07.449541Z node 5 :BS_PROXY NOTICE: dsproxy_state.cpp:319: EnsureMonitoring Group# 0 IsLimitedKeyless# 0 fullIfPossible# 0 Marker# DSP58 2025-09-25T16:18:07.449975Z node 5 :BS_PROXY DEBUG: group_sessions.cpp:83: Group# 0 Actor# [5:179:2079] Create Queue# [5:187:2084] targetNodeId# 1 Marker# DSP01 2025-09-25T16:18:07.450025Z node 5 :BS_PROXY DEBUG: group_sessions.cpp:83: Group# 0 Actor# [5:179:2079] Create Queue# [5:188:2085] targetNodeId# 1 Marker# DSP01 2025-09-25T16:18:07.450073Z node 5 :BS_PROXY DEBUG: group_sessions.cpp:83: Group# 0 Actor# [5:179:2079] Create Queue# [5:189:2086] targetNodeId# 1 Marker# DSP01 2025-09-25T16:18:07.450120Z node 5 :BS_PROXY DEBUG: group_sessions.cpp:83: Group# 0 Actor# [5:179:2079] Create Queue# [5:190:2087] targetNodeId# 1 Marker# DSP01 2025-09-25T16:18:07.450148Z node 5 :BS_PROXY DEBUG: group_sessions.cpp:83: Group# 0 Actor# [5:179:2079] Create Queue# [5:191:2088] targetNodeId# 1 Marker# DSP01 2025-09-25T16:18:07.450180Z node 5 :BS_PROXY DEBUG: group_sessions.cpp:83: Group# 0 Actor# [5:179:2079] Create Queue# [5:192:2089] targetNodeId# 1 Marker# DSP01 2025-09-25T16:18:07.450208Z node 5 :BS_PROXY DEBUG: group_sessions.cpp:83: Group# 0 Actor# [5:179:2079] Create Queue# [5:193:2090] targetNodeId# 1 Marker# DSP01 2025-09-25T16:18:07.450213Z node 5 :BS_PROXY INFO: dsproxy_state.cpp:31: Group# 0 SetStateEstablishingSessions Marker# DSP03 2025-09-25T16:18:07.450228Z node 5 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:49: TClient[72057594037932033] ::Bootstrap [5:180:2080] 2025-09-25T16:18:07.450233Z node 5 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:542: TClient[72057594037932033] lookup [5:180:2080] 2025-09-25T16:18:07.450242Z node 5 :BS_PROXY NOTICE: dsproxy_state.cpp:259: Group# 4294967295 HasInvalidGroupId# 1 Bootstrap -> StateEjected Marker# DSP42 2025-09-25T16:18:07.450252Z node 5 :BS_NODE DEBUG: {NWDC00@distconf.cpp:28} Bootstrap 2025-09-25T16:18:07.450398Z node 5 :BS_NODE DEBUG: {NWDC40@distconf_persistent_storage.cpp:25} TReaderActor bootstrap Paths# [] 2025-09-25T16:18:07.450425Z node 6 :BS_NODE DEBUG: {NW26@node_warden_impl.cpp:338} Bootstrap 2025-09-25T16:18:07.451563Z node 6 :BS_NODE DEBUG: {NW18@node_warden_resource.cpp:51} ApplyServiceSet IsStatic# true Comprehensive# false Origin# initial ServiceSet# {PDisks { NodeID: 1 PDiskID: 1 Path: "SectorMap:0:3200" PDiskGuid: 1 } PDisks { NodeID: 2 PDiskID: 1 Path: "SectorMap:1:3200" PDiskGuid: 2 } PDisks { NodeID: 3 PDiskID: 1 Path: "SectorMap:2:3200" PDiskGuid: 3 } PDisks { NodeID: 4 PDiskID: 1 Path: "SectorMap:3:3200" PDiskGuid: 4 } PDisks { NodeID: 5 PDiskID: 1 Path: "SectorMap:4:3200" PDiskGuid: 5 } PDisks { NodeID: 6 PDiskID: 1 Path: "SectorMap:5:3200" PDiskGuid: 6 } VDisks { VDiskID { GroupID: 0 GroupGeneration: 1 Ring: 0 Domain: 0 VDisk: 0 } VDiskLocation { NodeID: 1 PDiskID: 1 VDiskSlotID: 0 PDiskGuid: 1 } } Groups { GroupID: 0 GroupGeneration: 1 ErasureSpecies: 0 Rings { FailDomains { VDiskLocations { NodeID: 1 PDiskID: 1 VDiskSlotID: 0 PDiskGuid: 1 } } } } AvailabilityDomains: 0 } 2025-09-25T16:18:07.451607Z node 6 :BS_NODE DEBUG: {NW04@node_warden_pdisk.cpp:233} StartLocalPDisk NodeId# 6 PDiskId# 1 Path# "SectorMap:5:3200" PDiskCategory# {Type# DEVICE_TYPE_ROT Kind# 0} Temporary# false 2025-09-25T16:18:07.451718Z node 6 :BS_NODE WARN: {NW89@node_warden_pdisk.cpp:122} Can't write new MockDevicesConfig to file Path# /Berkanavt/kikimr/testing/mock_devices.txt 2025-09-25T16:18:07.451762Z node 6 :BS_NODE DEBUG: {NW12@node_warden_proxy.cpp:24} StartLocalProxy GroupId# 0 2025-09-25T16:18:07.451944Z node 6 :BS_NODE DEBUG: {NW21@node_warden_pipe.cpp:23} EstablishPipe AvailDomainId# 0 PipeClientId# [6:203:2080] ControllerId# 72057594037932033 2025-09-25T16:18:07.451950Z node 6 :BS_NODE DEBUG: {NW20@node_warden_pipe.cpp:73} SendRegisterNode 2025-09-25T16:18:07.451965Z node 6 :BS_NODE DEBUG: {NW11@node_warden_impl.cpp:313} StartInvalidGroupProxy GroupId# 4294967295 2025-09-25T16:18:07.452016Z node 6 :BS_NODE DEBUG: {NW62@node_warden_impl.cpp:325} StartRequestReportingThrottler 2025-09-25T16:18:07.453433Z node 6 :LOCAL DEBUG: local.cpp:1540: TLocal::Bootstrap 2025-09-25T16:18:07.455077Z node 6 :BS_PROXY INFO: dsproxy_state.cpp:159: Group# 0 TEvConfigureProxy received GroupGeneration# 1 IsLimitedKeyless# false Marker# DSP02 2025-09-25T16:18:07.455092Z node 6 :BS_PROXY NOTICE: dsproxy_state.cpp:319: EnsureMonitoring Group# 0 IsLimitedKeyless# 0 fullIfPossible# 0 Marker# DSP58 2025-09-25T16:18:07.455615Z node 6 :BS_PROXY DEBUG: group_sessions.cpp:83: Group# 0 Actor# [6:202:2079] Create Queue# [6:210:2084] targetNodeId# 1 Marker# DSP01 2025-09-25T16:18:07.455654Z node 6 :BS_PROXY DEBUG: group_sessions.cpp:83: Group# 0 Actor# [6:202:2079] Create Queue# [6:211:2085] targetNodeId# 1 Marker# DSP01 2025-09-25T16:18:07.455684Z node 6 :BS_PROXY DEBUG: group_sessions.cpp:83: Group# 0 Actor# [6:202:2079] Create Queue# [6:212:2086] targetNodeId# 1 Marker# DSP01 2025-09-25T16:18:07.455712Z node 6 :BS_PROXY DEBUG: group_sessions.cpp:83: Group# 0 Actor# [6:202:2079] Create Queue# [6:213:2087] targetNodeId# 1 Marker# DSP01 2025-09-25T16:18:07.455741Z node 6 :BS_PROXY DEBUG: group_sessions.cpp:83: Group# 0 Actor# [6:202:2079] Create Queue# [6:214:2088] targetNodeId# 1 Marker# DSP01 2025-09-25T16:18:07.455769Z node 6 :BS_PROXY DEBUG: group_sessions.cpp:83: Group# 0 Actor# [6:202:2079] Create Queue# [6:215:2089] targetNodeId# 1 Marker# DSP01 2025-09-25T16:18:07.455797Z node 6 :BS_PROXY DEBUG: group_sessions.cpp:83: Group# 0 Actor# [6:202:2079] C ... lient.cpp:193: TClient[72075186224037892] forward result remote node 69 [64:2117:2502] 2025-09-25T16:18:58.172033Z node 64 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:227: TClient[72075186224037892] remote node connected [64:2117:2502] 2025-09-25T16:18:58.172036Z node 64 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:687: TClient[72075186224037892]::SendEvent [64:2117:2502] 2025-09-25T16:18:58.172106Z node 69 :PIPE_SERVER DEBUG: tablet_pipe_server.cpp:291: [72075186224037892] Accept Connect Originator# [64:2117:2502] 2025-09-25T16:18:58.172218Z node 64 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:308: TClient[72075186224037892] connected with status OK role: Leader [64:2117:2502] 2025-09-25T16:18:58.172222Z node 64 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:323: TClient[72075186224037892] send queued [64:2117:2502] 2025-09-25T16:18:58.172302Z node 64 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:49: TClient[72075186224037893] ::Bootstrap [64:2121:2504] 2025-09-25T16:18:58.172307Z node 64 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:542: TClient[72075186224037893] lookup [64:2121:2504] 2025-09-25T16:18:58.172312Z node 64 :TABLET_RESOLVER DEBUG: tablet_resolver.cpp:882: Handle TEvForward tabletId: 72075186224037893 entry.State: StNormal leader: [69:1323:2101] followers: 0 ev: {EvForward TabletID: 72075186224037893 Ev: nullptr Flags: 1:2:0} 2025-09-25T16:18:58.172316Z node 64 :TABLET_RESOLVER DEBUG: tablet_resolver.cpp:667: SelectForward node 64 selfDC 1 leaderDC 3 1:2:0 local 0 localDc 0 other 1 disallowed 0 tabletId: 72075186224037893 followers: 0 countLeader 1 allowFollowers 0 winner: [69:1323:2101] 2025-09-25T16:18:58.172332Z node 64 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:193: TClient[72075186224037893] forward result remote node 69 [64:2121:2504] 2025-09-25T16:18:58.172349Z node 64 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:227: TClient[72075186224037893] remote node connected [64:2121:2504] 2025-09-25T16:18:58.172352Z node 64 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:687: TClient[72075186224037893]::SendEvent [64:2121:2504] 2025-09-25T16:18:58.172378Z node 69 :PIPE_SERVER DEBUG: tablet_pipe_server.cpp:291: [72075186224037893] Accept Connect Originator# [64:2121:2504] 2025-09-25T16:18:58.172436Z node 64 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:308: TClient[72075186224037893] connected with status OK role: Leader [64:2121:2504] 2025-09-25T16:18:58.172439Z node 64 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:323: TClient[72075186224037893] send queued [64:2121:2504] 2025-09-25T16:18:58.172521Z node 64 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:49: TClient[72075186224037894] ::Bootstrap [64:2124:2506] 2025-09-25T16:18:58.172524Z node 64 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:542: TClient[72075186224037894] lookup [64:2124:2506] 2025-09-25T16:18:58.172529Z node 64 :TABLET_RESOLVER DEBUG: tablet_resolver.cpp:882: Handle TEvForward tabletId: 72075186224037894 entry.State: StNormal leader: [68:1332:2141] (known problem) followers: 0 ev: {EvForward TabletID: 72075186224037894 Ev: nullptr Flags: 1:2:0} 2025-09-25T16:18:58.172532Z node 64 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 64 selfDC 1 leaderDC 3 1:2:0 local 0 localDc 0 other 0 disallowed 0 2025-09-25T16:18:58.172560Z node 64 :STATESTORAGE DEBUG: statestorage_proxy.cpp:287: ProxyRequest::HandleInit ringGroup:0 ev: {EvLookup TabletID: 72075186224037894 Cookie: 0 ProxyOptions: SigNone} 2025-09-25T16:18:58.172577Z node 64 :STATESTORAGE DEBUG: statestorage_replica.cpp:185: Replica::Handle ev: {EvReplicaLookup TabletID: 72075186224037894 Cookie: 0} 2025-09-25T16:18:58.172582Z node 64 :STATESTORAGE DEBUG: statestorage_replica.cpp:185: Replica::Handle ev: {EvReplicaLookup TabletID: 72075186224037894 Cookie: 1} 2025-09-25T16:18:58.172587Z node 64 :STATESTORAGE DEBUG: statestorage_replica.cpp:185: Replica::Handle ev: {EvReplicaLookup TabletID: 72075186224037894 Cookie: 2} 2025-09-25T16:18:58.172593Z node 64 :STATESTORAGE DEBUG: statestorage_proxy.cpp:399: ProxyRequest::HandleLookup ringGroup:0 ev: {EvReplicaInfo Status: 0 TabletID: 72075186224037894 ClusterStateGeneration: 0 ClusterStateGuid: 0 CurrentLeader: [69:1990:2269] CurrentLeaderTablet: [69:1995:2272] CurrentGeneration: 3 CurrentStep: 0} 2025-09-25T16:18:58.172607Z node 64 :STATESTORAGE DEBUG: statestorage_proxy.cpp:399: ProxyRequest::HandleLookup ringGroup:0 ev: {EvReplicaInfo Status: 0 TabletID: 72075186224037894 ClusterStateGeneration: 0 ClusterStateGuid: 0 CurrentLeader: [69:1990:2269] CurrentLeaderTablet: [69:1995:2272] CurrentGeneration: 3 CurrentStep: 0} 2025-09-25T16:18:58.172615Z node 64 :TABLET_RESOLVER DEBUG: tablet_resolver.cpp:781: ApplyEntry tabletId: 72075186224037894 leader: [69:1990:2269] followers: 0 2025-09-25T16:18:58.172619Z node 64 :TABLET_RESOLVER DEBUG: tablet_resolver.cpp:667: SelectForward node 64 selfDC 1 leaderDC 3 1:2:0 local 0 localDc 0 other 1 disallowed 0 tabletId: 72075186224037894 followers: 0 countLeader 1 allowFollowers 0 winner: [69:1990:2269] 2025-09-25T16:18:58.172639Z node 64 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:193: TClient[72075186224037894] forward result remote node 69 [64:2124:2506] 2025-09-25T16:18:58.172664Z node 64 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:227: TClient[72075186224037894] remote node connected [64:2124:2506] 2025-09-25T16:18:58.172669Z node 64 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:687: TClient[72075186224037894]::SendEvent [64:2124:2506] 2025-09-25T16:18:58.172752Z node 69 :PIPE_SERVER DEBUG: tablet_pipe_server.cpp:291: [72075186224037894] Accept Connect Originator# [64:2124:2506] 2025-09-25T16:18:58.172896Z node 64 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:308: TClient[72075186224037894] connected with status OK role: Leader [64:2124:2506] 2025-09-25T16:18:58.172904Z node 64 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:323: TClient[72075186224037894] send queued [64:2124:2506] 2025-09-25T16:18:58.173058Z node 64 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:49: TClient[72075186224037895] ::Bootstrap [64:2128:2508] 2025-09-25T16:18:58.173064Z node 64 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:542: TClient[72075186224037895] lookup [64:2128:2508] 2025-09-25T16:18:58.173074Z node 64 :TABLET_RESOLVER DEBUG: tablet_resolver.cpp:882: Handle TEvForward tabletId: 72075186224037895 entry.State: StNormal leader: [69:1834:2196] followers: 0 ev: {EvForward TabletID: 72075186224037895 Ev: nullptr Flags: 1:2:0} 2025-09-25T16:18:58.173081Z node 64 :TABLET_RESOLVER DEBUG: tablet_resolver.cpp:667: SelectForward node 64 selfDC 1 leaderDC 3 1:2:0 local 0 localDc 0 other 1 disallowed 0 tabletId: 72075186224037895 followers: 0 countLeader 1 allowFollowers 0 winner: [69:1834:2196] 2025-09-25T16:18:58.173098Z node 64 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:193: TClient[72075186224037895] forward result remote node 69 [64:2128:2508] 2025-09-25T16:18:58.173118Z node 64 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:227: TClient[72075186224037895] remote node connected [64:2128:2508] 2025-09-25T16:18:58.173124Z node 64 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:687: TClient[72075186224037895]::SendEvent [64:2128:2508] 2025-09-25T16:18:58.173163Z node 69 :PIPE_SERVER DEBUG: tablet_pipe_server.cpp:291: [72075186224037895] Accept Connect Originator# [64:2128:2508] 2025-09-25T16:18:58.173255Z node 64 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:308: TClient[72075186224037895] connected with status OK role: Leader [64:2128:2508] 2025-09-25T16:18:58.173262Z node 64 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:323: TClient[72075186224037895] send queued [64:2128:2508] 2025-09-25T16:18:58.173404Z node 64 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:49: TClient[72075186224037896] ::Bootstrap [64:2131:2510] 2025-09-25T16:18:58.173409Z node 64 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:542: TClient[72075186224037896] lookup [64:2131:2510] 2025-09-25T16:18:58.173419Z node 64 :TABLET_RESOLVER DEBUG: tablet_resolver.cpp:882: Handle TEvForward tabletId: 72075186224037896 entry.State: StNormal leader: [69:1837:2198] followers: 0 ev: {EvForward TabletID: 72075186224037896 Ev: nullptr Flags: 1:2:0} 2025-09-25T16:18:58.173425Z node 64 :TABLET_RESOLVER DEBUG: tablet_resolver.cpp:667: SelectForward node 64 selfDC 1 leaderDC 3 1:2:0 local 0 localDc 0 other 1 disallowed 0 tabletId: 72075186224037896 followers: 0 countLeader 1 allowFollowers 0 winner: [69:1837:2198] 2025-09-25T16:18:58.173446Z node 64 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:193: TClient[72075186224037896] forward result remote node 69 [64:2131:2510] 2025-09-25T16:18:58.173466Z node 64 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:227: TClient[72075186224037896] remote node connected [64:2131:2510] 2025-09-25T16:18:58.173471Z node 64 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:687: TClient[72075186224037896]::SendEvent [64:2131:2510] 2025-09-25T16:18:58.173521Z node 69 :PIPE_SERVER DEBUG: tablet_pipe_server.cpp:291: [72075186224037896] Accept Connect Originator# [64:2131:2510] 2025-09-25T16:18:58.173607Z node 64 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:308: TClient[72075186224037896] connected with status OK role: Leader [64:2131:2510] 2025-09-25T16:18:58.173613Z node 64 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:323: TClient[72075186224037896] send queued [64:2131:2510] 2025-09-25T16:18:58.173751Z node 64 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:49: TClient[72057594037927937] ::Bootstrap [64:2133:2511] 2025-09-25T16:18:58.173757Z node 64 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:542: TClient[72057594037927937] lookup [64:2133:2511] 2025-09-25T16:18:58.173780Z node 64 :TABLET_RESOLVER DEBUG: tablet_resolver.cpp:882: Handle TEvForward tabletId: 72057594037927937 entry.State: StNormal leader: [64:624:2180] followers: 0 ev: {EvForward TabletID: 72057594037927937 Ev: nullptr Flags: 1:2:0} 2025-09-25T16:18:58.173787Z node 64 :TABLET_RESOLVER DEBUG: tablet_resolver.cpp:667: SelectForward node 64 selfDC 1 leaderDC 1 1:2:0 local 1 localDc 1 other 0 disallowed 0 tabletId: 72057594037927937 followers: 0 countLeader 1 allowFollowers 0 winner: [64:624:2180] 2025-09-25T16:18:58.173803Z node 64 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:148: TClient[72057594037927937] queue send [64:2133:2511] 2025-09-25T16:18:58.173821Z node 64 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:420: TClient[72057594037927937] received pending shutdown [64:2133:2511] 2025-09-25T16:18:58.173836Z node 64 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:188: TClient[72057594037927937] forward result local node, try to connect [64:2133:2511] 2025-09-25T16:18:58.173842Z node 64 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:687: TClient[72057594037927937]::SendEvent [64:2133:2511] 2025-09-25T16:18:58.173856Z node 64 :PIPE_SERVER DEBUG: tablet_pipe_server.cpp:291: [72057594037927937] Accept Connect Originator# [64:2133:2511] 2025-09-25T16:18:58.173902Z node 64 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:308: TClient[72057594037927937] connected with status OK role: Leader [64:2133:2511] 2025-09-25T16:18:58.173908Z node 64 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:323: TClient[72057594037927937] send queued [64:2133:2511] 2025-09-25T16:18:58.173912Z node 64 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:645: TClient[72057594037927937] push event to server [64:2133:2511] 2025-09-25T16:18:58.173918Z node 64 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:331: TClient[72057594037927937] shutdown pipe due to pending shutdown request [64:2133:2511] 2025-09-25T16:18:58.173922Z node 64 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:514: TClient[72057594037927937] notify reset [64:2133:2511] 2025-09-25T16:18:58.173931Z node 64 :PIPE_SERVER DEBUG: tablet_pipe_server.cpp:141: [72057594037927937] HandleSend Sender# [64:595:2175] EventType# 268697616 2025-09-25T16:18:58.173945Z node 64 :HIVE TRACE: hive_impl.cpp:139: HIVE#72057594037927937 Handle TEvTabletPipe::TEvServerConnected([64:2133:2511]) [64:2134:2512] 2025-09-25T16:18:58.173980Z node 64 :HIVE TRACE: hive_impl.cpp:1990: HIVE#72057594037927937 Handle TEvRequestHiveInfo >> TestYmqHttpProxy::TestCreateQueueWithWrongAttribute [GOOD] >> TestKinesisHttpProxy::TestListStreamConsumersWithMaxResults >> KqpAcl::AlterDatabasePrivilegesRequiredToChangeSchemeLimits-AsClusterAdmin [GOOD] >> KqpAcl::AclTemporary-IsOlap-UseAdmin >> KqpConstraints::AlterTableAddColumnWithDefaultRejection [GOOD] >> KqpConstraints::AlterTableAddColumnWithDefaultCancellation >> KqpScheme::AlterTableAddImplicitSyncIndex >> KqpScheme::DisableResourcePoolsOnServerless [GOOD] >> KqpScheme::DisableResourcePoolClassifiers >> KqpAcl::AclCreateTableAs-IsOlap+UseAdmin [GOOD] >> KqpAcl::AclCreateTableAs+IsOlap+UseAdmin >> TestKinesisHttpProxy::GoodRequestGetRecords [GOOD] >> YdbOlapStore::LogPagingBetween [GOOD] >> YdbOlapStore::LogWithUnionAllAscending >> TestYmqHttpProxy::TestCreateQueueWithTags >> KqpScheme::AlterTableVectorIndexInvalidSettingsPositions [GOOD] >> KqpScheme::AlterTableRenameVectorIndex >> TestKinesisHttpProxy::ListShards [GOOD] >> KqpScheme::ResourcePoolClassifiersRankValidation [GOOD] >> KqpScheme::DisableStreamingQueries [GOOD] >> TestKinesisHttpProxy::GoodRequestGetRecordsCbor >> TestYmqHttpProxy::TestCreateQueueWithAllAttributes [GOOD] |81.8%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/grpc_services/ut/ydb-core-grpc_services-ut |81.8%| [LD] {RESULT} $(B)/ydb/core/grpc_services/ut/ydb-core-grpc_services-ut |81.8%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/grpc_services/ut/ydb-core-grpc_services-ut >> TestYmqHttpProxy::TestReceiveMessageWithAttemptId [GOOD] >> KqpScheme::CreateTableWithDefaultSettings >> KqpScheme::AlterDatabaseChangeSchemeLimits-EnableAlterDatabase [GOOD] >> KqpScheme::AlterGroup >> TestYmqHttpProxy::TestGetQueueAttributes [GOOD] >> TestKinesisHttpProxy::ListShardsEmptyFields >> KqpScheme::CreateStreamingQueryErrors [GOOD] >> KqpScheme::CreateSecret+UseQueryService >> TestKinesisHttpProxy::TestRequestNoAuthorization [GOOD] >> TestKinesisHttpProxy::CreateDeleteStream [GOOD] >> TestYmqHttpProxy::TestListQueues >> TestYmqHttpProxy::BillingRecordsForJsonApi >> KqpScheme::CreateTableWithPgColumn [GOOD] >> TestYmqHttpProxy::TestDeleteQueue >> KqpScheme::TwoSimilarFamiliesTest [GOOD] >> KqpAcl::AclTemporary-IsOlap-UseAdmin [GOOD] >> KqpAcl::AclTemporary+IsOlap-UseAdmin >> KqpConstraints::Utf8AndDefault [GOOD] >> KqpOlapScheme::AddColumnLongPk |81.8%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/client/server/ut/ydb-core-client-server-ut |81.8%| [LD] {RESULT} $(B)/ydb/core/client/server/ut/ydb-core-client-server-ut |81.8%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/client/server/ut/ydb-core-client-server-ut >> TestKinesisHttpProxy::TestUnauthorizedPutRecords >> KqpScheme::CreateFamilyWithCacheModeFeatureDisabled+UseQueryService [GOOD] >> KqpScheme::CreateFamilyWithCacheModeFeatureDisabled-UseQueryService >> KqpScheme::AlterCompressionLevelInColumnFamily ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/scheme/unittest >> KqpScheme::DisableStreamingQueries [GOOD] Test command err: Trying to start YDB, gRPC: 27009, MsgBus: 23179 2025-09-25T16:18:48.499392Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7554062044866074599:2150];send_to=[0:7307199536658146131:7762515]; 2025-09-25T16:18:48.499412Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/endf/001e7f/r3tmp/tmpZoSqVn/pdisk_1.dat 2025-09-25T16:18:48.543187Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-09-25T16:18:48.546199Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 27009, node 1 2025-09-25T16:18:48.558307Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-09-25T16:18:48.558320Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-09-25T16:18:48.558322Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-09-25T16:18:48.558367Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:23179 2025-09-25T16:18:48.601676Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-09-25T16:18:48.601708Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-09-25T16:18:48.602832Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:23179 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. waiting... 2025-09-25T16:18:48.629439Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-09-25T16:18:48.658502Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:18:48.682889Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) waiting... 2025-09-25T16:18:48.706525Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) waiting... 2025-09-25T16:18:48.730304Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) waiting... 2025-09-25T16:18:48.786364Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-09-25T16:18:48.911972Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7554062044866076116:2391], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:48.912004Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:48.912094Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7554062044866076126:2392], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:48.912106Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:48.979824Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:18:48.987633Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:18:49.001042Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:18:49.015381Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:18:49.029268Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:18:49.042583Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:18:49.059856Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:18:49.071006Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:18:49.089048Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7554062049161044286:2474], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:49.089064Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7554062049161044291:2477], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:49.089075Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:49.089128Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7554062049161044294:2479], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:49.089135Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:49.089885Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715670:3, at schemeshard: 72057594046 ... r/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) waiting... 2025-09-25T16:19:01.421013Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) waiting... 2025-09-25T16:19:01.661097Z node 7 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7554062104419340049:2391], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:19:01.661124Z node 7 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:19:01.661259Z node 7 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7554062104419340059:2392], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:19:01.661265Z node 7 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:19:01.669222Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:19:01.686042Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:19:01.703440Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:19:01.718479Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:19:01.736430Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:19:01.758380Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:19:01.783016Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:19:01.802364Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:19:01.822457Z node 7 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7554062104419340920:2474], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:19:01.822491Z node 7 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:19:01.822597Z node 7 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7554062104419340926:2478], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:19:01.822600Z node 7 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7554062104419340925:2477], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:19:01.822606Z node 7 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:19:01.823364Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-09-25T16:19:01.831168Z node 7 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [7:7554062104419340929:2479], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715670 completed, doublechecking } 2025-09-25T16:19:01.930000Z node 7 :TX_PROXY ERROR: schemereq.cpp:590: Actor# [7:7554062104419340981:3550] txid# 281474976715671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 17], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-09-25T16:19:01.962890Z node 7 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-09-25T16:19:02.289534Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExternalDataSource, opId: 281474976715674:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_external_data_source.cpp:268) Check query: CREATE STREAMING QUERY MyQuery WITH ( RUN = FALSE ) AS DO BEGIN INSERT INTO MySource.MyTopic SELECT * FROM MySource.MyTopic END DO 2025-09-25T16:19:02.322104Z node 7 :PQ_READ_PROXY ERROR: grpc_pq_schema.cpp:148: new Describe topic request 2025-09-25T16:19:02.326047Z node 7 :PQ_READ_PROXY ERROR: grpc_pq_schema.cpp:148: new Describe topic request 2025-09-25T16:19:02.330572Z node 7 :KQP_SESSION WARN: kqp_session_actor.cpp:2830: SessionId: ydb://session/3?node_id=7&id=MmMyYzcxY2UtYmFiMGZiOTMtNzBlY2UzYTMtODVkYmM3YjU=, ActorId: [7:7554062108714308731:2545], ActorState: ExecuteState, TraceId: 01k60tqhgs6rv057n2jas1r21q, Create QueryResponse for error on request, msg: Check query: ALTER STREAMING QUERY MyQuery SET (RUN = FALSE); 2025-09-25T16:19:02.342430Z node 7 :KQP_SESSION WARN: kqp_session_actor.cpp:2830: SessionId: ydb://session/3?node_id=7&id=MjFjNDEzMTctYTlmYTA5ZWYtNzczZmNiNjEtZjY0ZDc1MDI=, ActorId: [7:7554062108714308773:2556], ActorState: ExecuteState, TraceId: 01k60tqhhwbnxz9ygqm6m1q506, Create QueryResponse for error on request, msg: Check query: DROP STREAMING QUERY MyQuery; 2025-09-25T16:19:02.366211Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715677:1, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:19:02.499002Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715680:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_modify_acl.cpp:101) 2025-09-25T16:19:02.603243Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715683:1, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:19:02.701111Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715686:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_modify_acl.cpp:101) 2025-09-25T16:19:02.798765Z node 7 :KQP_PROXY WARN: queries.cpp:413: [StreamingQueries] [TDropStreamingQueryActor] OwnerId: [0:0:0] ActorId: [7:7554062108714309171:4033] QueryPath: /Root/MyQuery. Failed NOT_FOUND, with issues: {
: Error: Streaming query /Root/MyQuery not found or you don't have access permissions } 2025-09-25T16:19:02.798945Z node 7 :KQP_SESSION WARN: kqp_session_actor.cpp:2830: SessionId: ydb://session/3?node_id=7&id=NWYxM2E4NjktOWQ0OTI4NDEtZTBiYmE0OTAtMmQ3N2E2OTM=, ActorId: [7:7554062108714308785:2559], ActorState: ExecuteState, TraceId: 01k60tqhj97jzqa3jetn97rw0c, Create QueryResponse for error on request, msg: ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/scheme/unittest >> KqpScheme::ResourcePoolClassifiersRankValidation [GOOD] Test command err: Trying to start YDB, gRPC: 6559, MsgBus: 22664 2025-09-25T16:18:47.426589Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7554062043896258404:2149];send_to=[0:7307199536658146131:7762515]; 2025-09-25T16:18:47.426609Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/endf/001f6c/r3tmp/tmpevH949/pdisk_1.dat 2025-09-25T16:18:47.478269Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-09-25T16:18:47.481047Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 6559, node 1 2025-09-25T16:18:47.497851Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-09-25T16:18:47.497869Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-09-25T16:18:47.497871Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-09-25T16:18:47.497918Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:22664 2025-09-25T16:18:47.527992Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-09-25T16:18:47.528025Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-09-25T16:18:47.529099Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:22664 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-09-25T16:18:47.567877Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-09-25T16:18:47.578637Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) waiting... 2025-09-25T16:18:47.595726Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) waiting... 2025-09-25T16:18:47.616459Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) waiting... 2025-09-25T16:18:47.626710Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) waiting... 2025-09-25T16:18:47.667478Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-09-25T16:18:47.797351Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7554062043896259915:2391], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:47.797377Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:47.797437Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7554062043896259925:2392], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:47.797446Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:47.863401Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:18:47.871566Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:18:47.880287Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:18:47.894483Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:18:47.908193Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:18:47.922766Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:18:47.937121Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:18:47.951226Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:18:47.964483Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7554062043896260787:2474], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:47.964514Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:47.964534Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7554062043896260792:2477], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:47.964546Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7554062043896260794:2478], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:47.964556Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:47.965474Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715670:3, at schemeshard: 7205759404664 ... urce pool default not found or you don't have access permissions } 2025-09-25T16:19:00.419644Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:19:00.438395Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:19:00.452135Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:19:00.464134Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:19:00.478474Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:19:00.496262Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:19:00.512214Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:19:00.558002Z node 7 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7554062097792070300:2474], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:19:00.558038Z node 7 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:19:00.558300Z node 7 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7554062097792070305:2477], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:19:00.558311Z node 7 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7554062097792070306:2478], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:19:00.558334Z node 7 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:19:00.559329Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-09-25T16:19:00.571715Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 281474976710670, at schemeshard: 72057594046644480 2025-09-25T16:19:00.571939Z node 7 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [7:7554062097792070309:2479], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2025-09-25T16:19:00.669887Z node 7 :TX_PROXY ERROR: schemereq.cpp:590: Actor# [7:7554062097792070361:3555] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 17], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-09-25T16:19:00.897298Z node 7 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-09-25T16:19:01.072592Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:1, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:19:01.230609Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710676:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_modify_acl.cpp:101) 2025-09-25T16:19:01.330491Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710679:1, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:19:01.507376Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710684:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:19:01.665471Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710687:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_modify_acl.cpp:101) 2025-09-25T16:19:01.776760Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710690:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_modify_acl.cpp:101) 2025-09-25T16:19:02.256382Z node 7 :KQP_GATEWAY WARN: query_actor.cpp:375: [TQueryBase] [TRanksCheckerActor] OwnerId: [7:7554062106382006022:4088], ActorId: [7:7554062106382006024:4090], TraceId: /Root, Finish with ALREADY_EXISTS, Issues: {
: Error: Classifier with rank 42 already exists, its name ClassifierRank42 }, SessionId: ydb://session/3?node_id=7&id=NmRiZjEwMTItN2UwNWUzMi1jZWJlNGU5Ny04NjZhNjhmMQ==, TxId: 01k60tqhfa9bjxh9ydk2a5qmnd 2025-09-25T16:19:02.441481Z node 7 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7554062106382006206:2803], DatabaseId: /Root, PoolId: test_pool, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool test_pool not found or you don't have access permissions } 2025-09-25T16:19:02.441526Z node 7 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool test_pool, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool test_pool not found or you don't have access permissions } 2025-09-25T16:19:02.441662Z node 7 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7554062106382006218:2804], DatabaseId: /Root, PoolId: test_pool, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool test_pool not found or you don't have access permissions } 2025-09-25T16:19:02.441668Z node 7 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool test_pool, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool test_pool not found or you don't have access permissions } 2025-09-25T16:19:02.585076Z node 7 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7554062106382006274:2824], DatabaseId: /Root, PoolId: test_pool, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool test_pool not found or you don't have access permissions } 2025-09-25T16:19:02.585117Z node 7 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool test_pool, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool test_pool not found or you don't have access permissions } 2025-09-25T16:19:02.585250Z node 7 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7554062106382006278:2826], DatabaseId: /Root, PoolId: test_pool, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool test_pool not found or you don't have access permissions } 2025-09-25T16:19:02.585266Z node 7 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool test_pool, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool test_pool not found or you don't have access permissions } 2025-09-25T16:19:02.675528Z node 7 :KQP_GATEWAY WARN: query_actor.cpp:375: [TQueryBase] [TRanksCheckerActor] OwnerId: [7:7554062106382006300:4182], ActorId: [7:7554062106382006302:4184], TraceId: /Root, Finish with ALREADY_EXISTS, Issues: {
: Error: Classifier with rank 42 already exists, its name ClassifierRank42 }, SessionId: ydb://session/3?node_id=7&id=NjVhMTEyMGQtYTc3YWFkODEtZDdiZDEwMWMtYTIwNzEzOTA=, TxId: 01k60tqhw06023yqk6eedszzva >> TestKinesisHttpProxy::CreateDeleteStreamWithConsumer >> KqpScheme::DropDependentExternalDataSource [GOOD] >> KqpScheme::DropAsyncReplication >> TestYmqHttpProxy::TestSetQueueAttributes [GOOD] >> KqpScheme::CreateTransfer [GOOD] >> KqpScheme::CreateTransfer_QueryService >> TestYmqHttpProxy::TestTagQueue >> KqpOlapScheme::AlterCompressionLevel ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/scheme/unittest >> KqpScheme::TwoSimilarFamiliesTest [GOOD] Test command err: Trying to start YDB, gRPC: 12562, MsgBus: 64777 2025-09-25T16:18:51.824320Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7554062059242645740:2147];send_to=[0:7307199536658146131:7762515]; 2025-09-25T16:18:51.827127Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-09-25T16:18:51.828037Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:123: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown test_client.cpp: SetPath # /home/runner/.ya/build/build_root/endf/001e58/r3tmp/tmpc96pre/pdisk_1.dat 2025-09-25T16:18:51.851024Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-09-25T16:18:51.851059Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-09-25T16:18:51.852135Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-09-25T16:18:51.864000Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 12562, node 1 2025-09-25T16:18:51.877920Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-09-25T16:18:51.877931Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-09-25T16:18:51.877934Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-09-25T16:18:51.877989Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:64777 TClient is connected to server localhost:64777 2025-09-25T16:18:51.916855Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-09-25T16:18:51.928947Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-09-25T16:18:51.949051Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) waiting... 2025-09-25T16:18:51.968113Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) waiting... 2025-09-25T16:18:51.989302Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) waiting... 2025-09-25T16:18:52.000746Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) waiting... 2025-09-25T16:18:52.226294Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7554062063537614569:2391], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:52.226320Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:52.226459Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7554062063537614579:2392], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:52.226466Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:52.293370Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:18:52.306555Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:18:52.318933Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:18:52.335019Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:18:52.348263Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:18:52.360948Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:18:52.375847Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:18:52.391879Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:18:52.412690Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7554062063537615441:2474], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:52.412721Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:52.412853Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7554062063537615446:2477], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:52.412865Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7554062063537615447:2478], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:52.412903Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:52.413836Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB c ... VE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 29961, node 7 2025-09-25T16:19:02.248500Z node 7 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-09-25T16:19:02.248517Z node 7 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-09-25T16:19:02.248520Z node 7 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-09-25T16:19:02.248573Z node 7 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:25600 TClient is connected to server localhost:25600 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. waiting... 2025-09-25T16:19:02.348043Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2025-09-25T16:19:02.365797Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) waiting... 2025-09-25T16:19:02.390104Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) waiting... 2025-09-25T16:19:02.413168Z node 7 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-09-25T16:19:02.437301Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) waiting... waiting... 2025-09-25T16:19:02.474128Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:19:02.715364Z node 7 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7554062107979124327:2391], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:19:02.715429Z node 7 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:19:02.719870Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:19:02.719915Z node 7 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7554062107979124410:2401], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:19:02.719935Z node 7 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:19:02.731577Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:19:02.743403Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:19:02.756865Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:19:02.770905Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:19:02.791856Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:19:02.806182Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:19:02.830755Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:19:02.874307Z node 7 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7554062107979125199:2474], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:19:02.874348Z node 7 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:19:02.874473Z node 7 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7554062107979125204:2477], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:19:02.874481Z node 7 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7554062107979125205:2478], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:19:02.874503Z node 7 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:19:02.875573Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-09-25T16:19:02.881673Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 281474976710670, at schemeshard: 72057594046644480 2025-09-25T16:19:02.881761Z node 7 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [7:7554062107979125208:2479], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2025-09-25T16:19:02.969771Z node 7 :TX_PROXY ERROR: schemereq.cpp:590: Actor# [7:7554062107979125260:3554] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 17], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-09-25T16:19:03.230139Z node 7 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/scheme/unittest >> KqpScheme::CreateTableWithPgColumn [GOOD] Test command err: Trying to start YDB, gRPC: 14856, MsgBus: 24511 2025-09-25T16:18:47.369259Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7554062043143027912:2075];send_to=[0:7307199536658146131:7762515]; 2025-09-25T16:18:47.369281Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/endf/001f52/r3tmp/tmpoEG4f1/pdisk_1.dat 2025-09-25T16:18:47.408461Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-09-25T16:18:47.416600Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 14856, node 1 2025-09-25T16:18:47.431545Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-09-25T16:18:47.431560Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-09-25T16:18:47.431562Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-09-25T16:18:47.431612Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:24511 2025-09-25T16:18:47.472453Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-09-25T16:18:47.472481Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-09-25T16:18:47.473942Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:24511 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-09-25T16:18:47.506747Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-09-25T16:18:47.521970Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) waiting... 2025-09-25T16:18:47.550470Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) waiting... 2025-09-25T16:18:47.571870Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) waiting... 2025-09-25T16:18:47.582302Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) waiting... 2025-09-25T16:18:47.628330Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-09-25T16:18:47.837756Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7554062043143029510:2391], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:47.837785Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:47.837859Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7554062043143029520:2392], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:47.837875Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:47.884484Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:18:47.892514Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:18:47.901306Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:18:47.908207Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:18:47.922470Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:18:47.936376Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:18:47.950957Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:18:47.959763Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:18:47.974763Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7554062043143030381:2474], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:47.974807Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:47.974853Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7554062043143030387:2477], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:47.974858Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7554062043143030388:2478], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:47.974865Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:47.975764Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715670:3, at schemeshard: 72057594046 ... rmissions } 2025-09-25T16:19:02.445348Z node 11 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:19:02.445457Z node 11 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [11:7554062108095088330:2392], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:19:02.445463Z node 11 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:19:02.500968Z node 11 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:19:02.517593Z node 11 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:19:02.531137Z node 11 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:19:02.550883Z node 11 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:19:02.566752Z node 11 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:19:02.581181Z node 11 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:19:02.605761Z node 11 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:19:02.632171Z node 11 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:19:02.658248Z node 11 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [11:7554062108095089192:2474], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:19:02.658282Z node 11 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:19:02.658411Z node 11 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [11:7554062108095089197:2477], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:19:02.658419Z node 11 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [11:7554062108095089198:2478], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:19:02.658440Z node 11 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:19:02.659450Z node 11 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-09-25T16:19:02.665356Z node 11 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 281474976710670, at schemeshard: 72057594046644480 2025-09-25T16:19:02.665440Z node 11 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [11:7554062108095089201:2479], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2025-09-25T16:19:02.744739Z node 11 :TX_PROXY ERROR: schemereq.cpp:590: Actor# [11:7554062108095089253:3554] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 17], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-09-25T16:19:02.945033Z node 11 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-09-25T16:19:03.083248Z node 11 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:19:03.131202Z node 11 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710674:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:19:03.178874Z node 11 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710675:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:19:03.219327Z node 11 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710676:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:19:03.262908Z node 11 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710677:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:19:03.322559Z node 11 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710678:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:19:03.377833Z node 11 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710679:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:19:03.417781Z node 11 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710680:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:19:03.465709Z node 11 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710681:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:19:03.505733Z node 11 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710682:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:19:03.530272Z node 11 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710683:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) >> KqpConstraints::IndexedTableAndNotNullColumnAddNotNullColumn [GOOD] >> KqpOlapScheme::AddColumnLongPk [GOOD] >> KqpOlapScheme::AddColumn >> KqpScheme::CreateTableWithDefaultSettings [GOOD] >> KqpScheme::CreateTableWithPartitionAtKeysSimpleCompat |81.8%| [TA] $(B)/ydb/core/tx/schemeshard/ut_sequence_reboots/test-results/unittest/{meta.json ... results_accumulator.log} ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/scheme/unittest >> KqpSchemeFulltext::AlterTableWithIndexInvalidSettings [GOOD] Test command err: Trying to start YDB, gRPC: 17861, MsgBus: 26039 2025-09-25T16:18:48.289940Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7554062046524463166:2082];send_to=[0:7307199536658146131:7762515]; 2025-09-25T16:18:48.290208Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/endf/001e9e/r3tmp/tmphPB3uB/pdisk_1.dat TServer::EnableGrpc on GrpcPort 17861, node 1 2025-09-25T16:18:48.340365Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-09-25T16:18:48.340408Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded 2025-09-25T16:18:48.351596Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-09-25T16:18:48.351613Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-09-25T16:18:48.351616Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-09-25T16:18:48.351659Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:26039 2025-09-25T16:18:48.392519Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-09-25T16:18:48.392555Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-09-25T16:18:48.393613Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:26039 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-09-25T16:18:48.409758Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-09-25T16:18:48.422810Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) waiting... waiting... 2025-09-25T16:18:48.447088Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:18:48.480346Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) waiting... 2025-09-25T16:18:48.493552Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) waiting... 2025-09-25T16:18:48.637403Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-09-25T16:18:48.744106Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7554062046524464744:2391], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:48.744141Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:48.744282Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7554062046524464754:2392], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:48.744306Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:48.799649Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:18:48.808941Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:18:48.818976Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:18:48.835084Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:18:48.848780Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:18:48.865445Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:18:48.879535Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:18:48.890821Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:18:48.909467Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7554062046524465616:2474], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:48.909504Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:48.909521Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7554062046524465621:2477], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:48.909539Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7554062046524465623:2478], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:48.909550Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:48.910477Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715670:3, at schemeshard: 72057594046 ... p:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-09-25T16:18:59.948783Z node 7 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-09-25T16:18:59.948785Z node 7 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-09-25T16:18:59.948847Z node 7 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:32609 TClient is connected to server localhost:32609 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. waiting... 2025-09-25T16:19:00.077479Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2025-09-25T16:19:00.120470Z node 7 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-09-25T16:19:00.226006Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) waiting... 2025-09-25T16:19:00.306560Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) waiting... waiting... 2025-09-25T16:19:00.346314Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:19:00.368395Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) waiting... 2025-09-25T16:19:00.481740Z node 7 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7554062100179507951:2391], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:19:00.481799Z node 7 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:19:00.482402Z node 7 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7554062100179507969:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:19:00.482421Z node 7 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:19:00.488794Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:19:00.503200Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:19:00.531197Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:19:00.545157Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:19:00.568153Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:19:00.601955Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:19:00.619909Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:19:00.636731Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:19:00.673824Z node 7 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7554062100179508823:2474], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:19:00.673860Z node 7 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:19:00.674054Z node 7 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7554062100179508828:2477], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:19:00.674073Z node 7 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:19:00.674072Z node 7 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7554062100179508829:2478], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:19:00.675054Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-09-25T16:19:00.679401Z node 7 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [7:7554062100179508832:2479], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2025-09-25T16:19:00.753583Z node 7 :TX_PROXY ERROR: schemereq.cpp:590: Actor# [7:7554062100179508884:3553] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 17], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-09-25T16:19:00.878432Z node 7 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-09-25T16:19:01.224742Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) >> test.py::test[aggregate-group_by_gs_and_having-default.txt-Results] [GOOD] >> test.py::test[aggregate-group_by_hop_only_distinct--Results] [SKIPPED] >> test.py::test[aggregate-group_by_hop_static_list_key-default.txt-Results] >> KqpScheme::AlterTableRenameVectorIndex [GOOD] >> KqpScheme::AlterTableWithDecimalColumn >> test.py::test[aggregate-group_by_hop_static_list_key-default.txt-Results] [SKIPPED] >> test.py::test[aggregate-group_by_rollup_column_ref_same_names--Results] >> KqpScheme::CreateSecret+UseQueryService [GOOD] >> KqpScheme::CreateSecret-UseQueryService >> KqpScheme::AlterTableAddImplicitSyncIndex [GOOD] >> KqpScheme::AlterTableAddExplicitSyncIndex >> KqpScheme::CreateAsyncReplicationWithTokenSecret [GOOD] >> KqpScheme::CreateAsyncReplicationWithPasswordSecret >> KqpScheme::AlterGroup [GOOD] >> KqpScheme::AlterNonExistingResourcePool >> KqpAcl::AclCreateTableAs+IsOlap+UseAdmin [GOOD] >> KqpScheme::StreamingQueriesValidation [GOOD] >> KqpScheme::StreamingQueriesWithResourcePools ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/scheme/unittest >> KqpConstraints::IndexedTableAndNotNullColumnAddNotNullColumn [GOOD] Test command err: Trying to start YDB, gRPC: 20852, MsgBus: 10752 2025-09-25T16:18:52.633331Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7554062065016559882:2082];send_to=[0:7307199536658146131:7762515]; 2025-09-25T16:18:52.633569Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/endf/001dc9/r3tmp/tmpTQf8UA/pdisk_1.dat 2025-09-25T16:18:52.691400Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-09-25T16:18:52.691805Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 20852, node 1 2025-09-25T16:18:52.710716Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-09-25T16:18:52.710725Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-09-25T16:18:52.710727Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-09-25T16:18:52.710754Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:10752 2025-09-25T16:18:52.734331Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-09-25T16:18:52.734356Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-09-25T16:18:52.735402Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:10752 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-09-25T16:18:52.777544Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-09-25T16:18:52.780469Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-09-25T16:18:52.906077Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-09-25T16:18:53.076366Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7554062069311527809:2320], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:53.076403Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7554062069311527801:2317], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:53.076429Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:53.076817Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7554062069311527816:2322], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:53.076842Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:53.077611Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-09-25T16:18:53.080248Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 281474976715658, at schemeshard: 72057594046644480 2025-09-25T16:18:53.080393Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7554062069311527815:2321], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2025-09-25T16:18:53.166009Z node 1 :TX_PROXY ERROR: schemereq.cpp:590: Actor# [1:7554062069311527868:2340] txid# 281474976715659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-09-25T16:18:53.209446Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:18:53.289070Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpDropSequence, opId: 281474976715663:1, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_drop_sequence.cpp:343) 2025-09-25T16:18:53.298657Z node 1 :HIVE WARN: hive_impl.cpp:516: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037888 not found 2025-09-25T16:18:53.304780Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) Trying to start YDB, gRPC: 64611, MsgBus: 8600 2025-09-25T16:18:53.675357Z node 2 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7554062066803283058:2240];send_to=[0:7307199536658146131:7762515]; 2025-09-25T16:18:53.675402Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/endf/001dc9/r3tmp/tmpQp89m3/pdisk_1.dat 2025-09-25T16:18:53.679334Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-09-25T16:18:53.693751Z node 2 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 64611, node 2 2025-09-25T16:18:53.704372Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-09-25T16:18:53.704387Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-09-25T16:18:53.704390Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-09-25T16:18:53.704431Z node 2 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:8600 TClient is connected to server localhost:8600 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-09-25T16:18:53.779375Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-09-25T16:18:53.779413Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-09-25T16:18:53.779881Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2025-09-25T16:18:53.781160Z node 2 :HIVE WARN: node_info.cpp:25: HIVE# ... 09-25T16:19:02.508333Z node 8 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [8:7554062106346512133:2321], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:19:02.508338Z node 8 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:19:02.509195Z node 8 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-09-25T16:19:02.511569Z node 8 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 281474976710658, at schemeshard: 72057594046644480 2025-09-25T16:19:02.511655Z node 8 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [8:7554062106346512136:2322], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2025-09-25T16:19:02.566554Z node 8 :TX_PROXY ERROR: schemereq.cpp:590: Actor# [8:7554062106346512187:2338] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-09-25T16:19:02.573728Z node 8 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:19:03.008858Z node 8 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterTable, opId: 281474976715757:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_table.cpp:172) 2025-09-25T16:19:03.031559Z node 8 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-09-25T16:19:03.049800Z node 8 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpFinalizeBuildIndexMainTable, opId: 281474976715760:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_finalize_build_index.cpp:383) 2025-09-25T16:19:03.133409Z node 8 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterTable, opId: 281474976715762:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_table.cpp:172) 2025-09-25T16:19:03.177829Z node 8 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpFinalizeBuildIndexMainTable, opId: 281474976715765:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_finalize_build_index.cpp:383) 2025-09-25T16:19:03.247262Z node 8 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterTable, opId: 281474976715767:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_table.cpp:172) 2025-09-25T16:19:03.268774Z node 8 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpFinalizeBuildIndexMainTable, opId: 281474976715770:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_finalize_build_index.cpp:383) 2025-09-25T16:19:03.335538Z node 8 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterTable, opId: 281474976715772:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_table.cpp:172) 2025-09-25T16:19:03.369969Z node 8 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpFinalizeBuildIndexMainTable, opId: 281474976715775:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_finalize_build_index.cpp:383) 2025-09-25T16:19:03.445525Z node 8 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterTable, opId: 281474976715777:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_table.cpp:172) 2025-09-25T16:19:03.501806Z node 8 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpFinalizeBuildIndexMainTable, opId: 281474976715780:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_finalize_build_index.cpp:383) 2025-09-25T16:19:03.600197Z node 8 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterTable, opId: 281474976715782:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_table.cpp:172) 2025-09-25T16:19:03.633542Z node 8 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpFinalizeBuildIndexMainTable, opId: 281474976715785:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_finalize_build_index.cpp:383) 2025-09-25T16:19:03.705834Z node 8 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterTable, opId: 281474976715787:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_table.cpp:172) 2025-09-25T16:19:03.762029Z node 8 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpFinalizeBuildIndexMainTable, opId: 281474976715790:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_finalize_build_index.cpp:383) 2025-09-25T16:19:03.822803Z node 8 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterTable, opId: 281474976715792:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_table.cpp:172) 2025-09-25T16:19:03.889573Z node 8 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpFinalizeBuildIndexMainTable, opId: 281474976715795:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_finalize_build_index.cpp:383) 2025-09-25T16:19:04.000322Z node 8 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterTable, opId: 281474976715797:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_table.cpp:172) 2025-09-25T16:19:04.024563Z node 8 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpFinalizeBuildIndexMainTable, opId: 281474976715800:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_finalize_build_index.cpp:383) 2025-09-25T16:19:04.141471Z node 8 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterTable, opId: 281474976715802:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_table.cpp:172) 2025-09-25T16:19:04.195263Z node 8 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpFinalizeBuildIndexMainTable, opId: 281474976715805:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_finalize_build_index.cpp:383) 2025-09-25T16:19:04.322257Z node 8 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterTable, opId: 281474976715807:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_table.cpp:172) 2025-09-25T16:19:04.370000Z node 8 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpFinalizeBuildIndexMainTable, opId: 281474976715810:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_finalize_build_index.cpp:383) >> TTablesWithReboots::Fake [GOOD] >> KqpAcl::AclTemporary+IsOlap-UseAdmin [GOOD] >> KqpAcl::AclTemporary-IsOlap+UseAdmin >> TestKinesisHttpProxy::TestListStreamConsumersWithMaxResults [GOOD] >> KqpOlapScheme::AlterCompressionLevel [GOOD] >> KqpOlapScheme::AddColumnWithoutColumnFamily >> KqpScheme::AlterCompressionLevelInColumnFamily [GOOD] >> KqpScheme::AlterCacheModeInColumnFamilyFeatureDisabled+UseQueryService >> TestKinesisHttpProxy::TestListStreamConsumersWithToken >> KqpScheme::CreateFamilyWithCacheModeFeatureDisabled-UseQueryService [GOOD] >> KqpScheme::CreateExternalDataSourceWithSa >> TestYmqHttpProxy::TestCreateQueueWithTags [GOOD] >> KqpScheme::DisableResourcePoolClassifiers [GOOD] >> KqpScheme::DisableResourcePoolClassifiersOnServerless >> KqpScheme::CreateTransfer_QueryService [GOOD] |81.8%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_base_reboots/unittest >> TTablesWithReboots::Fake [GOOD] >> THiveTest::TestLockTabletExecutionRebootTimeout [GOOD] >> THiveTest::TestLockTabletExecutionDelete >> KqpScheme::CreateAlterUserWithHash >> TestKinesisHttpProxy::GoodRequestGetRecordsCbor [GOOD] >> KqpConstraints::AlterTableAddColumnWithDefaultCancellation [GOOD] >> KqpConstraints::AlterTableAddColumnWithDefaultOlap >> KqpScheme::CreateSecret-UseQueryService [GOOD] |81.8%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_subdomain_reboots/unittest ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/scheme/unittest >> KqpAcl::AclCreateTableAs+IsOlap+UseAdmin [GOOD] Test command err: Trying to start YDB, gRPC: 25701, MsgBus: 4470 2025-09-25T16:18:47.981428Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7554062043869576298:2148];send_to=[0:7307199536658146131:7762515]; 2025-09-25T16:18:47.981489Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/endf/001f1a/r3tmp/tmp4aPQUP/pdisk_1.dat 2025-09-25T16:18:48.013377Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-09-25T16:18:48.025066Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 25701, node 1 2025-09-25T16:18:48.035939Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-09-25T16:18:48.035951Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-09-25T16:18:48.035952Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-09-25T16:18:48.036006Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:4470 TClient is connected to server localhost:4470 2025-09-25T16:18:48.088594Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-09-25T16:18:48.088635Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting WaitRootIsUp 'Root'... TClient::Ls request: Root 2025-09-25T16:18:48.089738Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. waiting... 2025-09-25T16:18:48.101863Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2025-09-25T16:18:48.105604Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-09-25T16:18:48.132547Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) waiting... 2025-09-25T16:18:48.148089Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) waiting... 2025-09-25T16:18:48.169218Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-09-25T16:18:48.170797Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) waiting... 2025-09-25T16:18:48.182364Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) waiting... 2025-09-25T16:18:48.376514Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7554062048164545122:2391], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:48.376542Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:48.376603Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7554062048164545132:2392], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:48.376610Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:48.424746Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:18:48.434863Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:18:48.449265Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:18:48.461943Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:18:48.476587Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:18:48.491162Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:18:48.504040Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:18:48.517565Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:18:48.534690Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7554062048164545994:2474], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:48.534714Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:48.534884Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7554062048164546000:2478], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:48.534887Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7554062048164545999:2477], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:48.534921Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:48.535844Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshar ... 186224037989 unhandled event type: NKikimr::TEvDataShard::TEvSchemaChangedResult event: NKikimrTxDataShard.TEvSchemaChangedResult TxId: 281474976715667 2025-09-25T16:19:04.773737Z node 8 :TX_COLUMNSHARD WARN: ctor_logger.h:56: TColumnShard.StateWork at 72075186224037985 unhandled event type: NKikimr::TEvDataShard::TEvSchemaChangedResult event: NKikimrTxDataShard.TEvSchemaChangedResult TxId: 281474976715667 2025-09-25T16:19:04.773743Z node 8 :TX_COLUMNSHARD WARN: ctor_logger.h:56: TColumnShard.StateWork at 72075186224037983 unhandled event type: NKikimr::TEvDataShard::TEvSchemaChangedResult event: NKikimrTxDataShard.TEvSchemaChangedResult TxId: 281474976715667 2025-09-25T16:19:04.773750Z node 8 :TX_COLUMNSHARD WARN: ctor_logger.h:56: TColumnShard.StateWork at 72075186224037981 unhandled event type: NKikimr::TEvDataShard::TEvSchemaChangedResult event: NKikimrTxDataShard.TEvSchemaChangedResult TxId: 281474976715667 2025-09-25T16:19:04.773756Z node 8 :TX_COLUMNSHARD WARN: ctor_logger.h:56: TColumnShard.StateWork at 72075186224037979 unhandled event type: NKikimr::TEvDataShard::TEvSchemaChangedResult event: NKikimrTxDataShard.TEvSchemaChangedResult TxId: 281474976715667 2025-09-25T16:19:04.773761Z node 8 :TX_COLUMNSHARD WARN: ctor_logger.h:56: TColumnShard.StateWork at 72075186224037975 unhandled event type: NKikimr::TEvDataShard::TEvSchemaChangedResult event: NKikimrTxDataShard.TEvSchemaChangedResult TxId: 281474976715667 2025-09-25T16:19:04.773767Z node 8 :TX_COLUMNSHARD WARN: ctor_logger.h:56: TColumnShard.StateWork at 72075186224037973 unhandled event type: NKikimr::TEvDataShard::TEvSchemaChangedResult event: NKikimrTxDataShard.TEvSchemaChangedResult TxId: 281474976715667 2025-09-25T16:19:04.773773Z node 8 :TX_COLUMNSHARD WARN: ctor_logger.h:56: TColumnShard.StateWork at 72075186224037969 unhandled event type: NKikimr::TEvDataShard::TEvSchemaChangedResult event: NKikimrTxDataShard.TEvSchemaChangedResult TxId: 281474976715667 2025-09-25T16:19:04.773780Z node 8 :TX_COLUMNSHARD WARN: ctor_logger.h:56: TColumnShard.StateWork at 72075186224038004 unhandled event type: NKikimr::TEvDataShard::TEvSchemaChangedResult event: NKikimrTxDataShard.TEvSchemaChangedResult TxId: 281474976715667 2025-09-25T16:19:04.773786Z node 8 :TX_COLUMNSHARD WARN: ctor_logger.h:56: TColumnShard.StateWork at 72075186224038006 unhandled event type: NKikimr::TEvDataShard::TEvSchemaChangedResult event: NKikimrTxDataShard.TEvSchemaChangedResult TxId: 281474976715667 2025-09-25T16:19:04.773793Z node 8 :TX_COLUMNSHARD WARN: ctor_logger.h:56: TColumnShard.StateWork at 72075186224038000 unhandled event type: NKikimr::TEvDataShard::TEvSchemaChangedResult event: NKikimrTxDataShard.TEvSchemaChangedResult TxId: 281474976715667 2025-09-25T16:19:04.773799Z node 8 :TX_COLUMNSHARD WARN: ctor_logger.h:56: TColumnShard.StateWork at 72075186224037998 unhandled event type: NKikimr::TEvDataShard::TEvSchemaChangedResult event: NKikimrTxDataShard.TEvSchemaChangedResult TxId: 281474976715667 2025-09-25T16:19:04.773806Z node 8 :TX_COLUMNSHARD WARN: ctor_logger.h:56: TColumnShard.StateWork at 72075186224037996 unhandled event type: NKikimr::TEvDataShard::TEvSchemaChangedResult event: NKikimrTxDataShard.TEvSchemaChangedResult TxId: 281474976715667 2025-09-25T16:19:04.773812Z node 8 :TX_COLUMNSHARD WARN: ctor_logger.h:56: TColumnShard.StateWork at 72075186224037994 unhandled event type: NKikimr::TEvDataShard::TEvSchemaChangedResult event: NKikimrTxDataShard.TEvSchemaChangedResult TxId: 281474976715667 2025-09-25T16:19:04.773818Z node 8 :TX_COLUMNSHARD WARN: ctor_logger.h:56: TColumnShard.StateWork at 72075186224037992 unhandled event type: NKikimr::TEvDataShard::TEvSchemaChangedResult event: NKikimrTxDataShard.TEvSchemaChangedResult TxId: 281474976715667 2025-09-25T16:19:04.773824Z node 8 :TX_COLUMNSHARD WARN: ctor_logger.h:56: TColumnShard.StateWork at 72075186224038013 unhandled event type: NKikimr::TEvDataShard::TEvSchemaChangedResult event: NKikimrTxDataShard.TEvSchemaChangedResult TxId: 281474976715667 2025-09-25T16:19:04.773830Z node 8 :TX_COLUMNSHARD WARN: ctor_logger.h:56: TColumnShard.StateWork at 72075186224038015 unhandled event type: NKikimr::TEvDataShard::TEvSchemaChangedResult event: NKikimrTxDataShard.TEvSchemaChangedResult TxId: 281474976715667 2025-09-25T16:19:04.773837Z node 8 :TX_COLUMNSHARD WARN: ctor_logger.h:56: TColumnShard.StateWork at 72075186224038009 unhandled event type: NKikimr::TEvDataShard::TEvSchemaChangedResult event: NKikimrTxDataShard.TEvSchemaChangedResult TxId: 281474976715667 2025-09-25T16:19:04.773843Z node 8 :TX_COLUMNSHARD WARN: ctor_logger.h:56: TColumnShard.StateWork at 72075186224038011 unhandled event type: NKikimr::TEvDataShard::TEvSchemaChangedResult event: NKikimrTxDataShard.TEvSchemaChangedResult TxId: 281474976715667 2025-09-25T16:19:04.773850Z node 8 :TX_COLUMNSHARD WARN: ctor_logger.h:56: TColumnShard.StateWork at 72075186224037956 unhandled event type: NKikimr::TEvDataShard::TEvSchemaChangedResult event: NKikimrTxDataShard.TEvSchemaChangedResult TxId: 281474976715667 2025-09-25T16:19:04.773856Z node 8 :TX_COLUMNSHARD WARN: ctor_logger.h:56: TColumnShard.StateWork at 72075186224037954 unhandled event type: NKikimr::TEvDataShard::TEvSchemaChangedResult event: NKikimrTxDataShard.TEvSchemaChangedResult TxId: 281474976715667 2025-09-25T16:19:04.773862Z node 8 :TX_COLUMNSHARD WARN: ctor_logger.h:56: TColumnShard.StateWork at 72075186224037952 unhandled event type: NKikimr::TEvDataShard::TEvSchemaChangedResult event: NKikimrTxDataShard.TEvSchemaChangedResult TxId: 281474976715667 2025-09-25T16:19:04.773869Z node 8 :TX_COLUMNSHARD WARN: ctor_logger.h:56: TColumnShard.StateWork at 72075186224037974 unhandled event type: NKikimr::TEvDataShard::TEvSchemaChangedResult event: NKikimrTxDataShard.TEvSchemaChangedResult TxId: 281474976715667 2025-09-25T16:19:04.773874Z node 8 :TX_COLUMNSHARD WARN: ctor_logger.h:56: TColumnShard.StateWork at 72075186224037971 unhandled event type: NKikimr::TEvDataShard::TEvSchemaChangedResult event: NKikimrTxDataShard.TEvSchemaChangedResult TxId: 281474976715667 2025-09-25T16:19:04.773881Z node 8 :TX_COLUMNSHARD WARN: ctor_logger.h:56: TColumnShard.StateWork at 72075186224037967 unhandled event type: NKikimr::TEvDataShard::TEvSchemaChangedResult event: NKikimrTxDataShard.TEvSchemaChangedResult TxId: 281474976715667 2025-09-25T16:19:04.773886Z node 8 :TX_COLUMNSHARD WARN: ctor_logger.h:56: TColumnShard.StateWork at 72075186224037965 unhandled event type: NKikimr::TEvDataShard::TEvSchemaChangedResult event: NKikimrTxDataShard.TEvSchemaChangedResult TxId: 281474976715667 2025-09-25T16:19:04.773893Z node 8 :TX_COLUMNSHARD WARN: ctor_logger.h:56: TColumnShard.StateWork at 72075186224037963 unhandled event type: NKikimr::TEvDataShard::TEvSchemaChangedResult event: NKikimrTxDataShard.TEvSchemaChangedResult TxId: 281474976715667 2025-09-25T16:19:04.773900Z node 8 :TX_COLUMNSHARD WARN: ctor_logger.h:56: TColumnShard.StateWork at 72075186224037961 unhandled event type: NKikimr::TEvDataShard::TEvSchemaChangedResult event: NKikimrTxDataShard.TEvSchemaChangedResult TxId: 281474976715667 2025-09-25T16:19:04.773906Z node 8 :TX_COLUMNSHARD WARN: ctor_logger.h:56: TColumnShard.StateWork at 72075186224037959 unhandled event type: NKikimr::TEvDataShard::TEvSchemaChangedResult event: NKikimrTxDataShard.TEvSchemaChangedResult TxId: 281474976715667 2025-09-25T16:19:04.773925Z node 8 :TX_COLUMNSHARD WARN: ctor_logger.h:56: TColumnShard.StateWork at 72075186224037990 unhandled event type: NKikimr::TEvDataShard::TEvSchemaChangedResult event: NKikimrTxDataShard.TEvSchemaChangedResult TxId: 281474976715667 2025-09-25T16:19:04.773932Z node 8 :TX_COLUMNSHARD WARN: ctor_logger.h:56: TColumnShard.StateWork at 72075186224037986 unhandled event type: NKikimr::TEvDataShard::TEvSchemaChangedResult event: NKikimrTxDataShard.TEvSchemaChangedResult TxId: 281474976715667 2025-09-25T16:19:04.773938Z node 8 :TX_COLUMNSHARD WARN: ctor_logger.h:56: TColumnShard.StateWork at 72075186224037988 unhandled event type: NKikimr::TEvDataShard::TEvSchemaChangedResult event: NKikimrTxDataShard.TEvSchemaChangedResult TxId: 281474976715667 2025-09-25T16:19:04.773944Z node 8 :TX_COLUMNSHARD WARN: ctor_logger.h:56: TColumnShard.StateWork at 72075186224037982 unhandled event type: NKikimr::TEvDataShard::TEvSchemaChangedResult event: NKikimrTxDataShard.TEvSchemaChangedResult TxId: 281474976715667 2025-09-25T16:19:04.773951Z node 8 :TX_COLUMNSHARD WARN: ctor_logger.h:56: TColumnShard.StateWork at 72075186224037984 unhandled event type: NKikimr::TEvDataShard::TEvSchemaChangedResult event: NKikimrTxDataShard.TEvSchemaChangedResult TxId: 281474976715667 2025-09-25T16:19:04.773957Z node 8 :TX_COLUMNSHARD WARN: ctor_logger.h:56: TColumnShard.StateWork at 72075186224037980 unhandled event type: NKikimr::TEvDataShard::TEvSchemaChangedResult event: NKikimrTxDataShard.TEvSchemaChangedResult TxId: 281474976715667 2025-09-25T16:19:04.773964Z node 8 :TX_COLUMNSHARD WARN: ctor_logger.h:56: TColumnShard.StateWork at 72075186224037978 unhandled event type: NKikimr::TEvDataShard::TEvSchemaChangedResult event: NKikimrTxDataShard.TEvSchemaChangedResult TxId: 281474976715667 2025-09-25T16:19:04.773971Z node 8 :TX_COLUMNSHARD WARN: ctor_logger.h:56: TColumnShard.StateWork at 72075186224037976 unhandled event type: NKikimr::TEvDataShard::TEvSchemaChangedResult event: NKikimrTxDataShard.TEvSchemaChangedResult TxId: 281474976715667 2025-09-25T16:19:04.880535Z node 8 :TX_PROXY_SCHEME_CACHE WARN: cache.cpp:323: Access denied: self# [8:7554062115657566472:5409], for# root@builtin, access# DescribeSchema 2025-09-25T16:19:04.880556Z node 8 :TX_PROXY_SCHEME_CACHE WARN: cache.cpp:323: Access denied: self# [8:7554062115657566472:5409], for# root@builtin, access# DescribeSchema 2025-09-25T16:19:04.880856Z node 8 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:568: Compilation failed, self: [8:7554062115657566469:3214], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:1:1: Error: At function: KiReadTable!
:1:1: Error: Cannot find table 'db.[/Root/Test]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2025-09-25T16:19:04.881437Z node 8 :KQP_SESSION WARN: kqp_session_actor.cpp:2395: SessionId: ydb://session/3?node_id=8&id=MzNiYmJjOTAtOGNkNWVhOTMtMmQ5NTQ1NWUtZDI3NWVlY2E=, ActorId: [8:7554062111362593521:2330], ActorState: ExecuteState, TraceId: 01k60tqm1e25wdy2gn8azx66aw, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2025-09-25T16:19:04.898050Z node 8 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_modify_acl.cpp:101) 2025-09-25T16:19:05.025397Z node 8 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpRmDir, opId: 281474976715671:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_rmdir.cpp:66) 2025-09-25T16:19:05.027262Z node 8 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 281474976715671, at schemeshard: 72057594046644480 2025-09-25T16:19:05.027581Z node 8 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpRmDir, opId: 281474976715672:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_rmdir.cpp:66) >> TestYmqHttpProxy::TestDeleteMessage >> TestKinesisHttpProxy::GoodRequestGetRecordsLongStreamName >> KqpScheme::CreateTableWithPartitionAtKeysSimpleCompat [GOOD] >> KqpScheme::CreateTableWithPartitionAtKeysSigned |81.8%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_subdomain_reboots/unittest >> KqpOlapScheme::AddColumn [GOOD] >> KqpOlapScheme::AddColumnOldSchemeBulkUpsert >> KqpOlapScheme::AddColumnWithoutColumnFamily [GOOD] >> KqpOlapScheme::AddColumnWithColumnFamily >> KqpScheme::DropAsyncReplication [GOOD] >> KqpScheme::DropAsyncReplicationCascade >> TestKinesisHttpProxy::TestUnauthorizedPutRecords [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/scheme/unittest >> KqpScheme::CreateTransfer_QueryService [GOOD] Test command err: Trying to start YDB, gRPC: 28107, MsgBus: 8307 2025-09-25T16:18:52.543249Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7554062061895367270:2084];send_to=[0:7307199536658146131:7762515]; 2025-09-25T16:18:52.543412Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/endf/001d9c/r3tmp/tmp9j3WWX/pdisk_1.dat 2025-09-25T16:18:52.591585Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-09-25T16:18:52.595779Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 28107, node 1 2025-09-25T16:18:52.612783Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-09-25T16:18:52.612796Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-09-25T16:18:52.612798Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-09-25T16:18:52.612856Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:8307 2025-09-25T16:18:52.644497Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-09-25T16:18:52.644532Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-09-25T16:18:52.645596Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:8307 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-09-25T16:18:52.720076Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-09-25T16:18:52.729246Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-09-25T16:18:52.737683Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) waiting... 2025-09-25T16:18:52.781096Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) waiting... 2025-09-25T16:18:52.822795Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) waiting... waiting... 2025-09-25T16:18:52.839459Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:18:52.861168Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-09-25T16:18:52.989407Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7554062061895368858:2391], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:52.989434Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:52.989552Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7554062061895368868:2392], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:52.989563Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:53.053267Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:18:53.069403Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:18:53.082234Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:18:53.118617Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:18:53.136183Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:18:53.151717Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:18:53.171766Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:18:53.187948Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:18:53.214097Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7554062066190337025:2474], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:53.214125Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:53.214184Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7554062066190337030:2477], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:53.214192Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7554062066190337031:2478], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:53.214209Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:53.215130Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshar ... _create_resource_pool.cpp:92" severity: 1 } 2025-09-25T16:19:05.679890Z node 8 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:568: Compilation failed, self: [8:7554062117497640181:2525], status: GENERIC_ERROR, issues:
: Error: Execution, code: 1060
:4:87: Error: CONNECTION_STRING and ENDPOINT/DATABASE are mutually exclusive 2025-09-25T16:19:05.680159Z node 8 :KQP_SESSION WARN: kqp_session_actor.cpp:2395: SessionId: ydb://session/3?node_id=8&id=OGUyZGZhMmYtNWYyY2RlYjQtMWRiN2U2NmYtYjE4ODk1MDA=, ActorId: [8:7554062117497640172:2519], ActorState: ExecuteState, TraceId: 01k60tqmt974pp02qe909sghds, ReplyQueryCompileError, status GENERIC_ERROR remove tx with tx_id: 2025-09-25T16:19:05.690805Z node 8 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:568: Compilation failed, self: [8:7554062117497640185:2527], status: GENERIC_ERROR, issues:
: Error: Execution, code: 1060
:4:87: Error: Neither CONNECTION_STRING nor ENDPOINT/DATABASE are provided 2025-09-25T16:19:05.692296Z node 8 :KQP_SESSION WARN: kqp_session_actor.cpp:2395: SessionId: ydb://session/3?node_id=8&id=OGUyZGZhMmYtNWYyY2RlYjQtMWRiN2U2NmYtYjE4ODk1MDA=, ActorId: [8:7554062117497640172:2519], ActorState: ExecuteState, TraceId: 01k60tqmtp0m6ansq9v9fd4xz6, ReplyQueryCompileError, status GENERIC_ERROR remove tx with tx_id: 2025-09-25T16:19:05.697070Z node 8 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:568: Compilation failed, self: [8:7554062117497640189:2529], status: GENERIC_ERROR, issues:
: Error: Execution, code: 1060
:4:87: Error: Neither CONNECTION_STRING nor ENDPOINT/DATABASE are provided 2025-09-25T16:19:05.697796Z node 8 :KQP_SESSION WARN: kqp_session_actor.cpp:2395: SessionId: ydb://session/3?node_id=8&id=OGUyZGZhMmYtNWYyY2RlYjQtMWRiN2U2NmYtYjE4ODk1MDA=, ActorId: [8:7554062117497640172:2519], ActorState: ExecuteState, TraceId: 01k60tqmtxczeh7v3p7sj9df50, ReplyQueryCompileError, status GENERIC_ERROR remove tx with tx_id: 2025-09-25T16:19:05.705395Z node 8 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-09-25T16:19:05.708459Z node 8 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:568: Compilation failed, self: [8:7554062117497640193:2531], status: GENERIC_ERROR, issues:
: Error: Execution, code: 1060
:4:87: Error: TOKEN, USER/PASSWORD and SERVICE_ACCOUNT_ID/INITIAL_TOKEN are mutually exclusive 2025-09-25T16:19:05.708934Z node 8 :KQP_SESSION WARN: kqp_session_actor.cpp:2395: SessionId: ydb://session/3?node_id=8&id=OGUyZGZhMmYtNWYyY2RlYjQtMWRiN2U2NmYtYjE4ODk1MDA=, ActorId: [8:7554062117497640172:2519], ActorState: ExecuteState, TraceId: 01k60tqmv539q4jharxkjeqzry, ReplyQueryCompileError, status GENERIC_ERROR remove tx with tx_id: 2025-09-25T16:19:05.718271Z node 8 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:568: Compilation failed, self: [8:7554062117497640206:2534], status: GENERIC_ERROR, issues:
: Error: Execution, code: 1060
:4:87: Error: TOKEN and TOKEN_SECRET_NAME are mutually exclusive 2025-09-25T16:19:05.719308Z node 8 :KQP_SESSION WARN: kqp_session_actor.cpp:2395: SessionId: ydb://session/3?node_id=8&id=OGUyZGZhMmYtNWYyY2RlYjQtMWRiN2U2NmYtYjE4ODk1MDA=, ActorId: [8:7554062117497640172:2519], ActorState: ExecuteState, TraceId: 01k60tqmvhb71zxfzfbb79nptf, ReplyQueryCompileError, status GENERIC_ERROR remove tx with tx_id: 2025-09-25T16:19:05.726474Z node 8 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:568: Compilation failed, self: [8:7554062117497640210:2536], status: GENERIC_ERROR, issues:
: Error: Execution, code: 1060
:4:87: Error: PASSWORD and PASSWORD_SECRET_NAME are mutually exclusive 2025-09-25T16:19:05.727082Z node 8 :KQP_SESSION WARN: kqp_session_actor.cpp:2395: SessionId: ydb://session/3?node_id=8&id=OGUyZGZhMmYtNWYyY2RlYjQtMWRiN2U2NmYtYjE4ODk1MDA=, ActorId: [8:7554062117497640172:2519], ActorState: ExecuteState, TraceId: 01k60tqmvr20m5cqj8aa2wnkr2, ReplyQueryCompileError, status GENERIC_ERROR remove tx with tx_id: 2025-09-25T16:19:05.733417Z node 8 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:568: Compilation failed, self: [8:7554062117497640214:2538], status: GENERIC_ERROR, issues:
: Error: Execution, code: 1060
:4:87: Error: USER is not provided 2025-09-25T16:19:05.733534Z node 8 :KQP_SESSION WARN: kqp_session_actor.cpp:2395: SessionId: ydb://session/3?node_id=8&id=OGUyZGZhMmYtNWYyY2RlYjQtMWRiN2U2NmYtYjE4ODk1MDA=, ActorId: [8:7554062117497640172:2519], ActorState: ExecuteState, TraceId: 01k60tqmw009zmzj57xrv0cpp3, ReplyQueryCompileError, status GENERIC_ERROR remove tx with tx_id: 2025-09-25T16:19:05.738015Z node 8 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:568: Compilation failed, self: [8:7554062117497640218:2540], status: GENERIC_ERROR, issues:
: Error: Execution, code: 1060
:4:87: Error: Neither PASSWORD nor PASSWORD_SECRET_NAME are provided 2025-09-25T16:19:05.738602Z node 8 :KQP_SESSION WARN: kqp_session_actor.cpp:2395: SessionId: ydb://session/3?node_id=8&id=OGUyZGZhMmYtNWYyY2RlYjQtMWRiN2U2NmYtYjE4ODk1MDA=, ActorId: [8:7554062117497640172:2519], ActorState: ExecuteState, TraceId: 01k60tqmw629et766sdsx7q0e2, ReplyQueryCompileError, status GENERIC_ERROR remove tx with tx_id: 2025-09-25T16:19:05.746524Z node 8 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:568: Compilation failed, self: [8:7554062117497640222:2542], status: GENERIC_ERROR, issues:
: Error: Execution, code: 1060
:4:87: Error: batch_size_bytes must be greater than 0 but 0 2025-09-25T16:19:05.746719Z node 8 :KQP_SESSION WARN: kqp_session_actor.cpp:2395: SessionId: ydb://session/3?node_id=8&id=OGUyZGZhMmYtNWYyY2RlYjQtMWRiN2U2NmYtYjE4ODk1MDA=, ActorId: [8:7554062117497640172:2519], ActorState: ExecuteState, TraceId: 01k60tqmwd127ba8tsm04tnjeq, ReplyQueryCompileError, status GENERIC_ERROR remove tx with tx_id: 2025-09-25T16:19:05.752914Z node 8 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:568: Compilation failed, self: [8:7554062117497640226:2544], status: GENERIC_ERROR, issues:
: Error: Execution, code: 1060
:4:87: Error: flush_interval must be Interval 2025-09-25T16:19:05.753018Z node 8 :KQP_SESSION WARN: kqp_session_actor.cpp:2395: SessionId: ydb://session/3?node_id=8&id=OGUyZGZhMmYtNWYyY2RlYjQtMWRiN2U2NmYtYjE4ODk1MDA=, ActorId: [8:7554062117497640172:2519], ActorState: ExecuteState, TraceId: 01k60tqmwm0c9drp01ekh32ew3, ReplyQueryCompileError, status GENERIC_ERROR remove tx with tx_id: 2025-09-25T16:19:05.759586Z node 8 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:568: Compilation failed, self: [8:7554062117497640230:2546], status: GENERIC_ERROR, issues:
: Error: Execution, code: 1060
:4:87: Error: consumer must be not empty 2025-09-25T16:19:05.760383Z node 8 :KQP_SESSION WARN: kqp_session_actor.cpp:2395: SessionId: ydb://session/3?node_id=8&id=OGUyZGZhMmYtNWYyY2RlYjQtMWRiN2U2NmYtYjE4ODk1MDA=, ActorId: [8:7554062117497640172:2519], ActorState: ExecuteState, TraceId: 01k60tqmwt7k778eq4107m36kp, ReplyQueryCompileError, status GENERIC_ERROR remove tx with tx_id: 2025-09-25T16:19:05.768307Z node 8 :TX_PROXY ERROR: schemereq.cpp:590: Actor# [8:7554062117497640238:3772] txid# 281474976710673, issues: { message: "Path does not exist" issue_code: 200200 severity: 1 } 2025-09-25T16:19:05.768457Z node 8 :KQP_SESSION WARN: kqp_session_actor.cpp:2830: SessionId: ydb://session/3?node_id=8&id=OGUyZGZhMmYtNWYyY2RlYjQtMWRiN2U2NmYtYjE4ODk1MDA=, ActorId: [8:7554062117497640172:2519], ActorState: ExecuteState, TraceId: 01k60tqmx1543bj41wkf1bkmhy, Create QueryResponse for error on request, msg: 2025-09-25T16:19:05.776280Z node 8 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710675:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:19:05.810380Z node 8 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTransfer, opId: 281474976710677:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_replication.cpp:487) 2025-09-25T16:19:05.834849Z node 8 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTransfer, opId: 281474976710678:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_replication.cpp:487) 2025-09-25T16:19:05.837592Z node 8 :PQ_READ_PROXY ERROR: grpc_pq_schema.cpp:148: new Describe topic request 2025-09-25T16:19:05.845853Z node 8 :PQ_READ_PROXY ERROR: grpc_pq_schema.cpp:148: new Describe topic request 2025-09-25T16:19:05.846895Z node 8 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterPersQueueGroup, opId: 281474976710679:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_pq.cpp:313) 2025-09-25T16:19:05.849330Z node 8 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTransfer, opId: 281474976710680:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_replication.cpp:487) 2025-09-25T16:19:05.852333Z node 8 :TX_PROXY ERROR: schemereq.cpp:590: Actor# [8:7554062117497640666:4015] txid# 281474976710681, issues: { message: "Check failed: path: \'/Root/topic\', error: path is under operation (id: [OwnerId: 72057594046644480, LocalPathId: 19], type: EPathTypePersQueueGroup, state: EPathStateAlter), source_location: ydb/core/tx/schemeshard/schemeshard__operation_alter_pq.cpp:574" severity: 1 } 2025-09-25T16:19:05.857344Z node 8 :PQ_READ_PROXY ERROR: grpc_pq_schema.cpp:148: new Describe topic request 2025-09-25T16:19:05.861202Z node 8 :PQ_READ_PROXY ERROR: grpc_pq_schema.cpp:148: new Describe topic request 2025-09-25T16:19:05.869771Z node 8 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterPersQueueGroup, opId: 281474976710682:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_pq.cpp:313) 2025-09-25T16:19:05.876232Z node 8 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTransfer, opId: 281474976710683:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_replication.cpp:487) 2025-09-25T16:19:05.884534Z node 8 :REPLICATION_CONTROLLER ERROR: target_discoverer.cpp:80: [TargetDiscoverer][rid 1] Describe path failed: path# /Root/topic, status# SCHEME_ERROR, issues# , iteration# 0 2025-09-25T16:19:05.884600Z node 8 :REPLICATION_CONTROLLER ERROR: tx_discovery_targets_result.cpp:79: [controller 72075186224037933][TxDiscoveryTargetsResult] Discovery error: rid# 1, error# /Root/topic: SCHEME_ERROR () 2025-09-25T16:19:05.886959Z node 8 :PQ_READ_PROXY ERROR: grpc_pq_schema.cpp:148: new Describe topic request >> KqpScheme::AlterTableWithDecimalColumn [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/scheme/unittest >> KqpScheme::CreateSecret-UseQueryService [GOOD] Test command err: Trying to start YDB, gRPC: 14471, MsgBus: 3033 2025-09-25T16:18:48.320771Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7554062044764299226:2065];send_to=[0:7307199536658146131:7762515]; 2025-09-25T16:18:48.320793Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/endf/001f0a/r3tmp/tmpWfutH5/pdisk_1.dat 2025-09-25T16:18:48.371275Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-09-25T16:18:48.371654Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 14471, node 1 2025-09-25T16:18:48.375971Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1229: Notification cookie mismatch for subscription [1:7554062044764299198:2081] 1758817128320486 != 1758817128320489 2025-09-25T16:18:48.387424Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-09-25T16:18:48.387439Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-09-25T16:18:48.387441Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-09-25T16:18:48.387491Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:3033 TClient is connected to server localhost:3033 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: 2025-09-25T16:18:48.429398Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-09-25T16:18:48.429432Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-09-25T16:18:48.430504Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-09-25T16:18:48.438067Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-09-25T16:18:48.440600Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 waiting... 2025-09-25T16:18:48.461682Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:18:48.484707Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) waiting... 2025-09-25T16:18:48.504602Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) waiting... 2025-09-25T16:18:48.514712Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) waiting... 2025-09-25T16:18:48.651285Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-09-25T16:18:48.722286Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7554062044764300847:2391], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:48.722309Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:48.722373Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7554062044764300857:2392], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:48.722392Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:48.785037Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:18:48.795577Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:18:48.804786Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:18:48.818539Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:18:48.833067Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:18:48.848197Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:18:48.862253Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:18:48.876764Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:18:48.901264Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7554062044764301718:2474], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:48.901299Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:48.901390Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7554062044764301723:2477], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:48.901402Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7554062044764301724:2478], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:48.901408Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, Dat ... 5-09-25T16:19:05.239360Z node 9 :KQP_SESSION WARN: kqp_session_actor.cpp:2395: SessionId: ydb://session/3?node_id=9&id=ODcyMzIwYWUtZDdhNWZhZjMtNTNjOWNmZTMtODc4ZDBiMDE=, ActorId: [9:7554062117618075505:2317], ActorState: ExecuteState, TraceId: 01k60tqm992kh33y6wtyj14h5b, ReplyQueryCompileError, status GENERIC_ERROR remove tx with tx_id: 2025-09-25T16:19:05.250954Z node 9 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:568: Compilation failed, self: [9:7554062117618075617:2333], status: GENERIC_ERROR, issues:
:2:74: Error: Unknown parameter: SECRET_VALUE 2025-09-25T16:19:05.251577Z node 9 :KQP_SESSION WARN: kqp_session_actor.cpp:2395: SessionId: ydb://session/3?node_id=9&id=ZGI0YTZkNGUtN2MyYzhiNjEtOTAyMzJjMGUtZTNhZmFhMmI=, ActorId: [9:7554062117618075610:2329], ActorState: ExecuteState, TraceId: 01k60tqmct0r2kambr3kv1eykb, ReplyQueryCompileError, status GENERIC_ERROR remove tx with tx_id: 2025-09-25T16:19:05.259135Z node 9 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateSecret, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_secret.cpp:263) 2025-09-25T16:19:05.266693Z node 9 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 281474976710660, at schemeshard: 72057594046644480 2025-09-25T16:19:05.279776Z node 9 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateSecret, opId: 281474976710661:1, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_secret.cpp:263) 2025-09-25T16:19:05.284247Z node 9 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 281474976710661, at schemeshard: 72057594046644480 Trying to start YDB, gRPC: 22326, MsgBus: 17947 test_client.cpp: SetPath # /home/runner/.ya/build/build_root/endf/001f0a/r3tmp/tmpnZ7tat/pdisk_1.dat 2025-09-25T16:19:05.684920Z node 10 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-09-25T16:19:05.686181Z node 10 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-09-25T16:19:05.689961Z node 10 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 22326, node 10 2025-09-25T16:19:05.713744Z node 10 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-09-25T16:19:05.713761Z node 10 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-09-25T16:19:05.713763Z node 10 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-09-25T16:19:05.713820Z node 10 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:17947 2025-09-25T16:19:05.761370Z node 10 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-09-25T16:19:05.761408Z node 10 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-09-25T16:19:05.769183Z node 10 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:17947 2025-09-25T16:19:05.869625Z node 10 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-09-25T16:19:05.881987Z node 10 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-09-25T16:19:05.883996Z node 10 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-09-25T16:19:06.225760Z node 10 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [10:7554062123301386739:2316], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:19:06.225790Z node 10 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:19:06.225997Z node 10 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [10:7554062123301386749:2317], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:19:06.226006Z node 10 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:19:06.236470Z node 10 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [10:7554062123301386753:2319], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:19:06.236495Z node 10 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:19:06.236687Z node 10 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [10:7554062123301386756:2320], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:19:06.236694Z node 10 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:19:06.240568Z node 10 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateSecret, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_secret.cpp:263) 2025-09-25T16:19:06.241033Z node 10 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [10:7554062123301386760:2322], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:19:06.241062Z node 10 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:19:06.241120Z node 10 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [10:7554062123301386773:2325], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:19:06.241124Z node 10 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:19:06.247068Z node 10 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateSecret, opId: 281474976710659:1, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_secret.cpp:263) 2025-09-25T16:19:06.248100Z node 10 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [10:7554062123301386785:2328], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:19:06.248219Z node 10 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:19:06.249501Z node 10 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [10:7554062123301386805:2330], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:19:06.249592Z node 10 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:19:06.249816Z node 10 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 281474976710659, at schemeshard: 72057594046644480 |81.8%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_subdomain_reboots/unittest >> KqpAcl::AclTemporary-IsOlap+UseAdmin [GOOD] >> KqpAcl::AclTemporaryInterruptInheritance+IsOlap >> TestKinesisHttpProxy::ListShardsEmptyFields [GOOD] >> THiveTest::TestLockTabletExecutionDelete [GOOD] >> THiveTest::TestLockTabletExecutionDeleteReboot >> KqpScheme::CreateAsyncReplicationWithPasswordSecret [GOOD] >> KqpScheme::CreateAsyncReplicationWithIamAuth+UseQueryService >> TestKinesisHttpProxy::CreateDeleteStreamWithConsumer [GOOD] >> KqpScheme::AlterNonExistingResourcePool [GOOD] >> KqpScheme::AlterNonExistingResourcePoolClassifier >> TestKinesisHttpProxy::TestWrongStream >> TestKinesisHttpProxy::ListShardsExclusiveStartShardId >> TestYmqHttpProxy::TestListQueues [GOOD] >> TestKinesisHttpProxy::CreateDeleteStreamWithConsumerWithFlag >> KqpScheme::AlterTableAddExplicitSyncIndex [GOOD] >> KqpScheme::AlterTableAddExplicitAsyncIndex |81.8%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_subdomain_reboots/unittest ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/scheme/unittest >> KqpScheme::AlterTableWithDecimalColumn [GOOD] Test command err: Trying to start YDB, gRPC: 3438, MsgBus: 14352 2025-09-25T16:18:52.286656Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7554062065650731606:2074];send_to=[0:7307199536658146131:7762515]; 2025-09-25T16:18:52.286672Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-09-25T16:18:52.293359Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:123: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown test_client.cpp: SetPath # /home/runner/.ya/build/build_root/endf/001e54/r3tmp/tmpFnorjL/pdisk_1.dat 2025-09-25T16:18:52.331175Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 3438, node 1 2025-09-25T16:18:52.356937Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-09-25T16:18:52.356948Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-09-25T16:18:52.356950Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-09-25T16:18:52.356995Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:14352 2025-09-25T16:18:52.393089Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-09-25T16:18:52.393115Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-09-25T16:18:52.397146Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-09-25T16:18:52.409346Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions TClient is connected to server localhost:14352 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-09-25T16:18:52.457000Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-09-25T16:18:52.460399Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-09-25T16:18:52.497444Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) waiting... 2025-09-25T16:18:52.539018Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) waiting... waiting... 2025-09-25T16:18:52.563664Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:18:52.578718Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) waiting... 2025-09-25T16:18:52.737284Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7554062065650733206:2391], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:52.737309Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:52.737458Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7554062065650733216:2392], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:52.737492Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:52.813273Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:18:52.823939Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:18:52.838610Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:18:52.850966Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:18:52.865474Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:18:52.881634Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:18:52.904364Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:18:52.919207Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:18:52.947338Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7554062065650734079:2474], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:52.947378Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:52.947460Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7554062065650734084:2477], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:52.947476Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7554062065650734085:2478], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:52.947485Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:52.948516Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183 ... boperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) waiting... 2025-09-25T16:19:05.571096Z node 8 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) waiting... 2025-09-25T16:19:05.602234Z node 8 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) waiting... 2025-09-25T16:19:05.620225Z node 8 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) waiting... 2025-09-25T16:19:05.731727Z node 8 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-09-25T16:19:05.946938Z node 8 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [8:7554062119625914689:2391], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:19:05.946996Z node 8 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:19:05.948539Z node 8 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [8:7554062119625914707:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:19:05.948612Z node 8 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:19:05.952651Z node 8 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:19:05.966070Z node 8 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:19:05.978288Z node 8 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:19:05.991809Z node 8 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:19:06.004366Z node 8 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:19:06.019254Z node 8 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:19:06.037844Z node 8 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:19:06.054864Z node 8 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:19:06.093347Z node 8 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [8:7554062123920882861:2474], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:19:06.093376Z node 8 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:19:06.093603Z node 8 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [8:7554062123920882867:2478], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:19:06.093694Z node 8 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [8:7554062123920882866:2477], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:19:06.093702Z node 8 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:19:06.094599Z node 8 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-09-25T16:19:06.100273Z node 8 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 281474976715670, at schemeshard: 72057594046644480 2025-09-25T16:19:06.100344Z node 8 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [8:7554062123920882870:2479], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715670 completed, doublechecking } 2025-09-25T16:19:06.166054Z node 8 :TX_PROXY ERROR: schemereq.cpp:590: Actor# [8:7554062123920882922:3553] txid# 281474976715671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 17], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-09-25T16:19:06.435408Z node 8 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-09-25T16:19:06.698354Z node 8 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715673:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:19:06.731521Z node 8 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterTable, opId: 281474976715674:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_table.cpp:172) 2025-09-25T16:19:06.745553Z node 8 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterTable, opId: 281474976715675:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_table.cpp:172) 2025-09-25T16:19:06.758586Z node 8 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterTable, opId: 281474976715676:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_table.cpp:172) 2025-09-25T16:19:06.782445Z node 8 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterTable, opId: 281474976715677:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_table.cpp:172) 2025-09-25T16:19:06.801782Z node 8 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterTable, opId: 281474976715678:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_table.cpp:172) >> ForceDropWithReboots::ForceDelete >> KqpScheme::AlterCacheModeInColumnFamilyFeatureDisabled+UseQueryService [GOOD] >> KqpScheme::AlterCacheModeInColumnFamilyFeatureDisabled-UseQueryService >> KqpScheme::CreateAlterUserWithHash [GOOD] >> KqpScheme::CreateAlterUserLoginNoLogin >> TestYmqHttpProxy::TestTagQueue [GOOD] >> KqpOlapScheme::AddColumnWithColumnFamily [GOOD] >> KqpOlapScheme::AddExsitsColumnFamily >> KqpConstraints::AlterTableAddColumnWithDefaultOlap [GOOD] >> TMessageBusServerPersQueueGetPartitionOffsetsMetaRequestTest::FailsOnBadRootStatusInGetNodeRequest >> TestYmqHttpProxy::TestPurgeQueue >> TMessageBusServerPersQueueGetReadSessionsInfoMetaRequestTest::FailsOnFailedGetAllTopicsRequest >> TBackupTests::ShouldSucceedOnLargeData_MinWriteBatch [GOOD] >> KqpOlapScheme::AddColumnOldSchemeBulkUpsert [GOOD] >> KqpOlapScheme::AddColumnErrors >> TMessageBusServerPersQueueGetPartitionOffsetsMetaRequestTest::FailsOnBadRootStatusInGetNodeRequest [GOOD] >> TMessageBusServerPersQueueGetPartitionOffsetsMetaRequestTest::FailsOnBalancerDescribeResultFailureWhenTopicsAreGivenExplicitly >> KqpScheme::CreateTableWithPartitionAtKeysSigned [GOOD] >> KqpScheme::CreateTableWithPartitionAtKeysComplex >> TestYmqHttpProxy::TestUntagQueue >> TMessageBusServerPersQueueGetPartitionLocationsMetaRequestTest::HandlesTimeout >> TBackupTests::ShouldSucceedOnLargeData[Zstd] [GOOD] >> TMessageBusServerPersQueueGetReadSessionsInfoMetaRequestTest::FailsOnFailedGetAllTopicsRequest [GOOD] >> TMessageBusServerPersQueueGetReadSessionsInfoMetaRequestTest::FailsOnNoBalancerInGetNodeRequest >> TMessageBusServerPersQueueGetPartitionOffsetsMetaRequestTest::FailsOnBalancerDescribeResultFailureWhenTopicsAreGivenExplicitly [GOOD] >> TMessageBusServerPersQueueGetPartitionOffsetsMetaRequestTest::FailsOnDuplicatedTopicName >> TMessageBusServerPersQueueGetPartitionStatusMetaRequestTest::FailsOnBalancerDescribeResultFailureWhenTopicsAreGivenExplicitly >> THiveTest::TestLockTabletExecutionDeleteReboot [GOOD] >> THiveTest::TestLockTabletExecutionRebootReconnect >> TMessageBusServerPersQueueGetReadSessionsInfoMetaRequestTest::FailsOnNoBalancerInGetNodeRequest [GOOD] >> TMessageBusServerPersQueueGetReadSessionsInfoMetaRequestTest::FailsOnEmptyTopicName >> KqpOlapScheme::AddExsitsColumnFamily [GOOD] >> TMessageBusServerPersQueueGetPartitionOffsetsMetaRequestTest::FailsOnDuplicatedTopicName [GOOD] >> TMessageBusServerPersQueueGetPartitionOffsetsMetaRequestTest::FailsOnDuplicatedPartition >> TMessageBusServerPersQueueGetPartitionLocationsMetaRequestTest::HandlesTimeout [GOOD] >> TMessageBusServerPersQueueGetPartitionLocationsMetaRequestTest::SuccessfullyPassesResponsesFromTablets ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/scheme/unittest >> KqpConstraints::AlterTableAddColumnWithDefaultOlap [GOOD] Test command err: Trying to start YDB, gRPC: 29669, MsgBus: 18116 2025-09-25T16:18:52.512183Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7554062065812445568:2077];send_to=[0:7307199536658146131:7762515]; 2025-09-25T16:18:52.512384Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/endf/001d48/r3tmp/tmp0wJt4R/pdisk_1.dat 2025-09-25T16:18:52.574939Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-09-25T16:18:52.578079Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 29669, node 1 2025-09-25T16:18:52.592150Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-09-25T16:18:52.592161Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-09-25T16:18:52.592162Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-09-25T16:18:52.592203Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:18116 2025-09-25T16:18:52.615251Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-09-25T16:18:52.615282Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-09-25T16:18:52.615944Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:18116 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-09-25T16:18:52.654047Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-09-25T16:18:52.848060Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-09-25T16:18:52.982880Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7554062065812446194:2320], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:52.982905Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7554062065812446186:2317], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:52.982964Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:52.985694Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7554062065812446201:2322], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:52.985714Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:52.986353Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-09-25T16:18:52.989758Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7554062065812446200:2321], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2025-09-25T16:18:53.089817Z node 1 :TX_PROXY ERROR: schemereq.cpp:590: Actor# [1:7554062070107413549:2338] txid# 281474976715659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-09-25T16:18:53.175132Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:18:53.230675Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:568: Compilation failed, self: [1:7554062070107413664:2337], status: GENERIC_ERROR, issues:
: Error: Type annotation, code: 1030
:3:34: Error: At function: KiAlterTable!
:3:34: Error: Column addition with serial data type is unsupported 2025-09-25T16:18:53.231269Z node 1 :KQP_SESSION WARN: kqp_session_actor.cpp:2395: SessionId: ydb://session/3?node_id=1&id=OGM4MWZhYzUtZDA3ZDE0NDAtYjIwMmQ4NmYtM2Q4MjY4Yg==, ActorId: [1:7554062065812446182:2314], ActorState: ExecuteState, TraceId: 01k60tq8n8ftmqyfk7g3w9mmz7, ReplyQueryCompileError, status GENERIC_ERROR remove tx with tx_id: Trying to start YDB, gRPC: 2479, MsgBus: 24951 2025-09-25T16:18:53.490528Z node 2 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7554062070019256999:2093];send_to=[0:7307199536658146131:7762515]; 2025-09-25T16:18:53.492372Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/endf/001d48/r3tmp/tmpmsNHBs/pdisk_1.dat 2025-09-25T16:18:53.512248Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-09-25T16:18:53.512277Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-09-25T16:18:53.512905Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-09-25T16:18:53.513618Z node 2 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded 2025-09-25T16:18:53.516489Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 2479, node 2 2025-09-25T16:18:53.524987Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-09-25T16:18:53.524998Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-09-25T16:18:53.525000Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-09-25T16:18:53.525050Z node 2 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:24951 TClient is connected to server localhost:24951 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-09-25T16:18:53.606762Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-09-25T16:18:53.757338Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-09-25T16:18:53.993964Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7554062070019257588:2317], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:1 ... local=6;result=not_found; 2025-09-25T16:19:07.755201Z node 8 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037904;self_id=[8:7554062126195333396:2389];ev=NActors::IEventHandle;tablet_id=72075186224037904;tx_id=281474976710660;this=19201540083904;method=TTxController::StartProposeOnExecute;tx_info=281474976710660:TX_KIND_SCHEMA;min=1758817147755;max=18446744073709551615;plan=0;src=[8:7554062121900364926:2140];cookie=172:1;;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=6;result=not_found; 2025-09-25T16:19:07.755280Z node 8 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037919;self_id=[8:7554062126195333203:2334];ev=NActors::IEventHandle;tablet_id=72075186224037919;tx_id=281474976710660;this=19201540040080;method=TTxController::StartProposeOnExecute;tx_info=281474976710660:TX_KIND_SCHEMA;min=1758817147755;max=18446744073709551615;plan=0;src=[8:7554062121900364926:2140];cookie=322:1;;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=6;result=not_found; 2025-09-25T16:19:07.755314Z node 8 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037916;self_id=[8:7554062126195333250:2351];ev=NActors::IEventHandle;tablet_id=72075186224037916;tx_id=281474976710660;this=19201540205872;method=TTxController::StartProposeOnExecute;tx_info=281474976710660:TX_KIND_SCHEMA;min=1758817147755;max=18446744073709551615;plan=0;src=[8:7554062121900364926:2140];cookie=292:1;;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=6;result=not_found; 2025-09-25T16:19:07.755370Z node 8 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037923;self_id=[8:7554062126195333278:2369];ev=NActors::IEventHandle;tablet_id=72075186224037923;tx_id=281474976710660;this=19201540043072;method=TTxController::StartProposeOnExecute;tx_info=281474976710660:TX_KIND_SCHEMA;min=1758817147755;max=18446744073709551615;plan=0;src=[8:7554062121900364926:2140];cookie=362:1;;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=6;result=not_found; 2025-09-25T16:19:07.755413Z node 8 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037929;self_id=[8:7554062126195333253:2352];ev=NActors::IEventHandle;tablet_id=72075186224037929;tx_id=281474976710660;this=19201554136368;method=TTxController::StartProposeOnExecute;tx_info=281474976710660:TX_KIND_SCHEMA;min=1758817147755;max=18446744073709551615;plan=0;src=[8:7554062121900364926:2140];cookie=422:1;;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=6;result=not_found; 2025-09-25T16:19:07.755466Z node 8 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037946;self_id=[8:7554062126195333202:2333];ev=NActors::IEventHandle;tablet_id=72075186224037946;tx_id=281474976710660;this=19201619391488;method=TTxController::StartProposeOnExecute;tx_info=281474976710660:TX_KIND_SCHEMA;min=1758817147755;max=18446744073709551615;plan=0;src=[8:7554062121900364926:2140];cookie=592:1;;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=6;result=not_found; 2025-09-25T16:19:07.755498Z node 8 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037949;self_id=[8:7554062126195333225:2346];ev=NActors::IEventHandle;tablet_id=72075186224037949;tx_id=281474976710660;this=19201540115760;method=TTxController::StartProposeOnExecute;tx_info=281474976710660:TX_KIND_SCHEMA;min=1758817147755;max=18446744073709551615;plan=0;src=[8:7554062121900364926:2140];cookie=622:1;;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=6;result=not_found; 2025-09-25T16:19:07.755592Z node 8 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037930;self_id=[8:7554062126195333254:2353];ev=NActors::IEventHandle;tablet_id=72075186224037930;tx_id=281474976710660;this=19201540087248;method=TTxController::StartProposeOnExecute;tx_info=281474976710660:TX_KIND_SCHEMA;min=1758817147755;max=18446744073709551615;plan=0;src=[8:7554062121900364926:2140];cookie=432:1;;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=6;result=not_found; 2025-09-25T16:19:07.755619Z node 8 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037942;self_id=[8:7554062126195333244:2349];ev=NActors::IEventHandle;tablet_id=72075186224037942;tx_id=281474976710660;this=19201475067616;method=TTxController::StartProposeOnExecute;tx_info=281474976710660:TX_KIND_SCHEMA;min=1758817147755;max=18446744073709551615;plan=0;src=[8:7554062121900364926:2140];cookie=552:1;;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=6;result=not_found; 2025-09-25T16:19:07.755703Z node 8 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037912;self_id=[8:7554062126195333272:2363];ev=NActors::IEventHandle;tablet_id=72075186224037912;tx_id=281474976710660;this=19201484115328;method=TTxController::StartProposeOnExecute;tx_info=281474976710660:TX_KIND_SCHEMA;min=1758817147755;max=18446744073709551615;plan=0;src=[8:7554062121900364926:2140];cookie=252:1;;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=6;result=not_found; 2025-09-25T16:19:07.755704Z node 8 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037943;self_id=[8:7554062126195333243:2348];ev=NActors::IEventHandle;tablet_id=72075186224037943;tx_id=281474976710660;this=19201619250160;method=TTxController::StartProposeOnExecute;tx_info=281474976710660:TX_KIND_SCHEMA;min=1758817147755;max=18446744073709551615;plan=0;src=[8:7554062121900364926:2140];cookie=562:1;;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=6;result=not_found; 2025-09-25T16:19:07.755792Z node 8 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037922;self_id=[8:7554062126195333261:2357];ev=NActors::IEventHandle;tablet_id=72075186224037922;tx_id=281474976710660;this=19201540060144;method=TTxController::StartProposeOnExecute;tx_info=281474976710660:TX_KIND_SCHEMA;min=1758817147755;max=18446744073709551615;plan=0;src=[8:7554062121900364926:2140];cookie=352:1;;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=6;result=not_found; 2025-09-25T16:19:07.755854Z node 8 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037941;self_id=[8:7554062126195333222:2344];ev=NActors::IEventHandle;tablet_id=72075186224037941;tx_id=281474976710660;this=19201475104400;method=TTxController::StartProposeOnExecute;tx_info=281474976710660:TX_KIND_SCHEMA;min=1758817147755;max=18446744073709551615;plan=0;src=[8:7554062121900364926:2140];cookie=542:1;;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=6;result=not_found; 2025-09-25T16:19:07.755873Z node 8 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037936;self_id=[8:7554062126195333208:2335];ev=NActors::IEventHandle;tablet_id=72075186224037936;tx_id=281474976710660;this=19201484000048;method=TTxController::StartProposeOnExecute;tx_info=281474976710660:TX_KIND_SCHEMA;min=1758817147755;max=18446744073709551615;plan=0;src=[8:7554062121900364926:2140];cookie=492:1;;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=6;result=not_found; 2025-09-25T16:19:07.755958Z node 8 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037947;self_id=[8:7554062126195333258:2354];ev=NActors::IEventHandle;tablet_id=72075186224037947;tx_id=281474976710660;this=19201484076256;method=TTxController::StartProposeOnExecute;tx_info=281474976710660:TX_KIND_SCHEMA;min=1758817147755;max=18446744073709551615;plan=0;src=[8:7554062121900364926:2140];cookie=602:1;;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=6;result=not_found; 2025-09-25T16:19:07.755963Z node 8 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037921;self_id=[8:7554062126195333275:2366];ev=NActors::IEventHandle;tablet_id=72075186224037921;tx_id=281474976710660;this=19201540203232;method=TTxController::StartProposeOnExecute;tx_info=281474976710660:TX_KIND_SCHEMA;min=1758817147755;max=18446744073709551615;plan=0;src=[8:7554062121900364926:2140];cookie=342:1;;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=6;result=not_found; 2025-09-25T16:19:07.756053Z node 8 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037920;self_id=[8:7554062126195333270:2362];ev=NActors::IEventHandle;tablet_id=72075186224037920;tx_id=281474976710660;this=19201554136896;method=TTxController::StartProposeOnExecute;tx_info=281474976710660:TX_KIND_SCHEMA;min=1758817147756;max=18446744073709551615;plan=0;src=[8:7554062121900364926:2140];cookie=332:1;;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=6;result=not_found; 2025-09-25T16:19:07.756056Z node 8 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037939;self_id=[8:7554062126195333212:2338];ev=NActors::IEventHandle;tablet_id=72075186224037939;tx_id=281474976710660;this=19201475105104;method=TTxController::StartProposeOnExecute;tx_info=281474976710660:TX_KIND_SCHEMA;min=1758817147756;max=18446744073709551615;plan=0;src=[8:7554062121900364926:2140];cookie=522:1;;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=6;result=not_found; 2025-09-25T16:19:07.756135Z node 8 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037909;self_id=[8:7554062126195333276:2367];ev=NActors::IEventHandle;tablet_id=72075186224037909;tx_id=281474976710660;this=19201540150432;method=TTxController::StartProposeOnExecute;tx_info=281474976710660:TX_KIND_SCHEMA;min=1758817147756;max=18446744073709551615;plan=0;src=[8:7554062121900364926:2140];cookie=222:1;;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=6;result=not_found; 2025-09-25T16:19:07.756138Z node 8 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037895;self_id=[8:7554062126195333137:2329];ev=NActors::IEventHandle;tablet_id=72075186224037895;tx_id=281474976710660;this=19201554131616;method=TTxController::StartProposeOnExecute;tx_info=281474976710660:TX_KIND_SCHEMA;min=1758817147756;max=18446744073709551615;plan=0;src=[8:7554062121900364926:2140];cookie=82:1;;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=6;result=not_found; 2025-09-25T16:19:07.756216Z node 8 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037893;self_id=[8:7554062126195333370:2386];ev=NActors::IEventHandle;tablet_id=72075186224037893;tx_id=281474976710660;this=19201540199008;method=TTxController::StartProposeOnExecute;tx_info=281474976710660:TX_KIND_SCHEMA;min=1758817147756;max=18446744073709551615;plan=0;src=[8:7554062121900364926:2140];cookie=62:1;;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=6;result=not_found; 2025-09-25T16:19:07.756239Z node 8 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037917;self_id=[8:7554062126195333320:2373];ev=NActors::IEventHandle;tablet_id=72075186224037917;tx_id=281474976710660;this=19201554124400;method=TTxController::StartProposeOnExecute;tx_info=281474976710660:TX_KIND_SCHEMA;min=1758817147756;max=18446744073709551615;plan=0;src=[8:7554062121900364926:2140];cookie=302:1;;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=6;result=not_found; 2025-09-25T16:19:07.756301Z node 8 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037910;self_id=[8:7554062126195333303:2370];ev=NActors::IEventHandle;tablet_id=72075186224037910;tx_id=281474976710660;this=19201540165040;method=TTxController::StartProposeOnExecute;tx_info=281474976710660:TX_KIND_SCHEMA;min=1758817147756;max=18446744073709551615;plan=0;src=[8:7554062121900364926:2140];cookie=232:1;;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=6;result=not_found; 2025-09-25T16:19:07.756315Z node 8 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037950;self_id=[8:7554062126195333221:2343];ev=NActors::IEventHandle;tablet_id=72075186224037950;tx_id=281474976710660;this=19201540162576;method=TTxController::StartProposeOnExecute;tx_info=281474976710660:TX_KIND_SCHEMA;min=1758817147756;max=18446744073709551615;plan=0;src=[8:7554062121900364926:2140];cookie=632:1;;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=6;result=not_found; >> KqpScheme::StreamingQueriesWithResourcePools [GOOD] >> KqpAcl::AclTemporaryInterruptInheritance+IsOlap [GOOD] >> KqpAcl::AclTemporaryInterruptInheritance-IsOlap >> TMessageBusServerPersQueueGetPartitionLocationsMetaRequestTest::FailsOnFailedGetAllTopicsRequest >> TMessageBusServerPersQueueGetPartitionStatusMetaRequestTest::FailsOnBalancerDescribeResultFailureWhenTopicsAreGivenExplicitly [GOOD] >> TMessageBusServerPersQueueGetPartitionStatusMetaRequestTest::FailsOnEmptyTopicName >> TestKinesisHttpProxy::TestListStreamConsumersWithToken [GOOD] >> TMessageBusServerPersQueueGetReadSessionsInfoMetaRequestTest::FailsOnEmptyTopicName [GOOD] >> TMessageBusServerPersQueueGetPartitionLocationsMetaRequestTest::FailsOnBadRootStatusInGetNodeRequest >> KqpScheme::CreateAsyncReplicationWithIamAuth+UseQueryService [GOOD] >> KqpScheme::CreateAsyncReplicationWithIamAuth-UseQueryService >> TMessageBusServerPersQueueGetPartitionOffsetsMetaRequestTest::FailsOnDuplicatedPartition [GOOD] >> TMessageBusServerPersQueueGetPartitionLocationsMetaRequestTest::SuccessfullyPassesResponsesFromTablets [GOOD] >> TMessageBusServerPersQueueGetPartitionLocationsMetaRequestTest::HandlesPipeDisconnection_DisconnectionComesSecond >> TestYmqHttpProxy::TestDeleteMessage [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_backup/unittest >> TBackupTests::ShouldSucceedOnLargeData_MinWriteBatch [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2144] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2144] Leader for TabletID 72057594046678944 is [1:130:2155] sender: [1:131:2058] recipient: [1:113:2144] 2025-09-25T16:18:49.237838Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7911: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-09-25T16:18:49.237870Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7939: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-09-25T16:18:49.237877Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7825: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-09-25T16:18:49.237883Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7841: OperationsProcessing config: using default configuration 2025-09-25T16:18:49.237891Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7847: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-09-25T16:18:49.237897Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7847: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-09-25T16:18:49.237908Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7971: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-09-25T16:18:49.237931Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-09-25T16:18:49.238120Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8042: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-09-25T16:18:49.238221Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2025-09-25T16:18:49.256219Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7729: Cannot subscribe to console configs 2025-09-25T16:18:49.256257Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded 2025-09-25T16:18:49.262510Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2025-09-25T16:18:49.262654Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2025-09-25T16:18:49.262719Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-09-25T16:18:49.264929Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2025-09-25T16:18:49.265015Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2025-09-25T16:18:49.265147Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1382: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-09-25T16:18:49.265241Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-09-25T16:18:49.265841Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:158: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-09-25T16:18:49.265894Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2025-09-25T16:18:49.266217Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-09-25T16:18:49.266229Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-09-25T16:18:49.266256Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-09-25T16:18:49.266265Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-09-25T16:18:49.266271Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:205: TTxServerlessStorageBilling.Complete 2025-09-25T16:18:49.266313Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7086: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-09-25T16:18:49.268006Z node 1 :HIVE INFO: tablet_helpers.cpp:1126: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:130:2155] sender: [1:245:2058] recipient: [1:15:2062] 2025-09-25T16:18:49.293818Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-09-25T16:18:49.293936Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-09-25T16:18:49.294008Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:599: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-09-25T16:18:49.294017Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5528: CreateTx for txid 1:0 type: TxAlterSubDomain target path: [OwnerId: 72057594046678944, LocalPathId: 1] source path: 2025-09-25T16:18:49.294065Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-09-25T16:18:49.294082Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2025-09-25T16:18:49.295019Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-09-25T16:18:49.295074Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-09-25T16:18:49.295140Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-09-25T16:18:49.295154Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:315: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-09-25T16:18:49.295161Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:369: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-09-25T16:18:49.295167Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 1:0 2 -> 3 2025-09-25T16:18:49.295745Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-09-25T16:18:49.295763Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-09-25T16:18:49.295770Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 1:0 3 -> 128 2025-09-25T16:18:49.296198Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-09-25T16:18:49.296213Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-09-25T16:18:49.296220Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-09-25T16:18:49.296229Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1723: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-09-25T16:18:49.297062Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1792: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-09-25T16:18:49.297579Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:663: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-09-25T16:18:49.297625Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1824: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-09-25T16:18:49.297900Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:685: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-09-25T16:18:49.297935Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:689: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 137 RawX2: 4294969455 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-09-25T16:18:49.297943Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:303: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-09-25T16:18:49.298016Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 1:0 128 -> 240 2025-09-25T16:18:49.298024Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:362: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-09-25T16:18:49.298066Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:599: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-09-25T16:18:49.298094Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:412: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-09-25T16:18:49.298619Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-09-25T16:18:49.298631Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme ... 25T16:19:08.242829Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_backup_restore_common.h:412: TBackup TPropose, opId: 102:0 HandleReply TEvOperationPlan, stepId: 5000003, at schemeshard: 72057594046678944 2025-09-25T16:19:08.242867Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 102:0 128 -> 129 2025-09-25T16:19:08.242906Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:599: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 3 2025-09-25T16:19:08.245923Z node 1 :DATASHARD_BACKUP DEBUG: export_s3_uploader.cpp:786: [Export] [s3] Bootstrap: self# [1:3462:5424], attempt# 0 2025-09-25T16:19:08.251536Z node 1 :DATASHARD_BACKUP DEBUG: export_s3_uploader.cpp:442: [Export] [s3] Handle TEvExportScan::TEvReady: self# [1:3462:5424], sender# [1:3461:5423] REQUEST: PUT /metadata.json HTTP/1.1 HEADERS: Host: localhost:23503 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: 769542D1-737F-4A2D-9037-32E26950DFAC amz-sdk-request: attempt=1 content-length: 94 content-md5: ZpDejBbuBPHjGq8ZC8z8QA== content-type: binary/octet-stream user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-143-generic x86_64 Clang/20.1.8 x-amz-storage-class: STANDARD S3_MOCK::HttpServeWrite: /metadata.json / / 94 FAKE_COORDINATOR: advance: minStep5000003 State->FrontStep: 5000003 2025-09-25T16:19:08.264122Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-09-25T16:19:08.264144Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 102, path id: [OwnerId: 72057594046678944, LocalPathId: 2] 2025-09-25T16:19:08.264234Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-09-25T16:19:08.264241Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [1:212:2213], at schemeshard: 72057594046678944, txId: 102, path id: 2 2025-09-25T16:19:08.264458Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 102:0, at schemeshard: 72057594046678944 2025-09-25T16:19:08.264471Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_backup_restore_common.h:258: TBackup TProposedWaitParts, opId: 102:0 ProgressState, at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 102 2025-09-25T16:19:08.264729Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6249: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 3 PathOwnerId: 72057594046678944, cookie: 102 2025-09-25T16:19:08.264746Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 3 PathOwnerId: 72057594046678944, cookie: 102 2025-09-25T16:19:08.264751Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:108: Operation in-flight, at schemeshard: 72057594046678944, txId: 102 2025-09-25T16:19:08.264758Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 102, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 3 2025-09-25T16:19:08.264770Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:610: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 4 2025-09-25T16:19:08.264793Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 102, ready parts: 0/1, is published: true 2025-09-25T16:19:08.269046Z node 1 :DATASHARD_BACKUP DEBUG: export_s3_uploader.cpp:402: [Export] [s3] HandleMetadata TEvExternalStorage::TEvPutObjectResponse: self# [1:3462:5424], result# PutObjectResult { ETag: 6690de8c16ee04f1e31aaf190bccfc40 } 2025-09-25T16:19:08.271682Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 REQUEST: PUT /scheme.pb HTTP/1.1 HEADERS: Host: localhost:23503 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: 1642503E-A28C-4C7B-9073-B46AC49AF0DD amz-sdk-request: attempt=1 content-length: 357 content-md5: csvC5nqNTZsSLy4ymlp0/Q== content-type: binary/octet-stream user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-143-generic x86_64 Clang/20.1.8 x-amz-storage-class: STANDARD S3_MOCK::HttpServeWrite: /scheme.pb / / 357 2025-09-25T16:19:08.273752Z node 1 :DATASHARD_BACKUP DEBUG: export_s3_uploader.cpp:307: [Export] [s3] HandleScheme TEvExternalStorage::TEvPutObjectResponse: self# [1:3462:5424], result# PutObjectResult { ETag: 72cbc2e67a8d4d9b122f2e329a5a74fd } 2025-09-25T16:19:08.273823Z node 1 :DATASHARD_BACKUP DEBUG: export_scan.cpp:130: [Export] [scanner] Handle TEvExportScan::TEvFeed: self# [1:3461:5423] 2025-09-25T16:19:08.274101Z node 1 :DATASHARD_BACKUP DEBUG: export_s3_uploader.cpp:460: [Export] [s3] Handle TEvExportScan::TEvBuffer: self# [1:3462:5424], sender# [1:3461:5423], msg# NKikimr::NDataShard::TEvExportScan::TEvBuffer { Last: 1 Checksum: } REQUEST: PUT /data_00.csv.zst HTTP/1.1 HEADERS: Host: localhost:23503 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: 0843A649-676D-4EA4-864C-62D6303F90B1 amz-sdk-request: attempt=1 content-length: 740 content-md5: P/a/uWmNWYxyRT1pAtAE7A== content-type: binary/octet-stream user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-143-generic x86_64 Clang/20.1.8 x-amz-storage-class: STANDARD S3_MOCK::HttpServeWrite: /data_00.csv.zst / / 740 2025-09-25T16:19:08.278033Z node 1 :DATASHARD_BACKUP DEBUG: export_s3_uploader.cpp:502: [Export] [s3] HandleData TEvExternalStorage::TEvPutObjectResponse: self# [1:3462:5424], result# PutObjectResult { ETag: 3ff6bfb9698d598c72453d6902d004ec } 2025-09-25T16:19:08.278056Z node 1 :DATASHARD_BACKUP INFO: export_s3_uploader.cpp:705: [Export] [s3] Finish: self# [1:3462:5424], success# 1, error# , multipart# 0, uploadId# (empty maybe) 2025-09-25T16:19:08.278154Z node 1 :DATASHARD_BACKUP DEBUG: export_scan.cpp:144: [Export] [scanner] Handle TEvExportScan::TEvFinish: self# [1:3461:5423], msg# NKikimr::NDataShard::TEvExportScan::TEvFinish { Success: 1 Error: } 2025-09-25T16:19:08.297034Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5901: Handle TEvSchemaChanged, tabletId: 72057594046678944, at schemeshard: 72057594046678944, message: Source { RawX1: 315 RawX2: 4294969597 } Origin: 72075186233409546 State: 2 TxId: 102 Step: 0 Generation: 2 OpResult { Success: true Explain: "" BytesProcessed: 10000 RowsProcessed: 1000 } 2025-09-25T16:19:08.297078Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1837: TOperation FindRelatedPartByTabletId, TxId: 102, tablet: 72075186233409546, partId: 0 2025-09-25T16:19:08.297132Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:628: TTxOperationReply execute, operationId: 102:0, at schemeshard: 72057594046678944, message: Source { RawX1: 315 RawX2: 4294969597 } Origin: 72075186233409546 State: 2 TxId: 102 Step: 0 Generation: 2 OpResult { Success: true Explain: "" BytesProcessed: 10000 RowsProcessed: 1000 } 2025-09-25T16:19:08.297154Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_backup_restore_common.h:233: TBackup TProposedWaitParts, opId: 102:0 HandleReply TEvSchemaChanged at tablet# 72057594046678944 message# Source { RawX1: 315 RawX2: 4294969597 } Origin: 72075186233409546 State: 2 TxId: 102 Step: 0 Generation: 2 OpResult { Success: true Explain: "" BytesProcessed: 10000 RowsProcessed: 1000 } 2025-09-25T16:19:08.297175Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:673: CollectSchemaChanged accept TEvDataShard::TEvSchemaChanged, operationId: 102:0, shardIdx: 72057594046678944:1, shard: 72075186233409546, left await: 0, txState.State: ProposedWaitParts, txState.ReadyForNotifications: 1, at schemeshard: 72057594046678944 2025-09-25T16:19:08.297180Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:710: all shard schema changes has been received, operationId: 102:0, at schemeshard: 72057594046678944 2025-09-25T16:19:08.297186Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:722: send schema changes ack message, operation: 102:0, datashard: 72075186233409546, at schemeshard: 72057594046678944 2025-09-25T16:19:08.297195Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 102:0 129 -> 240 2025-09-25T16:19:08.297268Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_backup_restore_common.h:116: Unable to make a bill: kind# TBackup, opId# 102:0, reason# domain is not a serverless db, domain# /MyRoot, domainPathId# [OwnerId: 72057594046678944, LocalPathId: 1], IsDomainSchemeShard: 1, ParentDomainId: [OwnerId: 72057594046678944, LocalPathId: 1], ResourcesDomainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-09-25T16:19:08.302867Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 102:0, at schemeshard: 72057594046678944 2025-09-25T16:19:08.303152Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 102:0, at schemeshard: 72057594046678944 2025-09-25T16:19:08.303169Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:497: [72057594046678944] TDone opId# 102:0 ProgressState 2025-09-25T16:19:08.303197Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:926: Part operation is done id#102:0 progress is 1/1 2025-09-25T16:19:08.303203Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2025-09-25T16:19:08.303210Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:926: Part operation is done id#102:0 progress is 1/1 2025-09-25T16:19:08.303214Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2025-09-25T16:19:08.303221Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 102, ready parts: 1/1, is published: true 2025-09-25T16:19:08.303259Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1702: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [1:343:2321] message: TxId: 102 2025-09-25T16:19:08.303271Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2025-09-25T16:19:08.303278Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:993: Operation and all the parts is done, operation id: 102:0 2025-09-25T16:19:08.303284Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5552: RemoveTx for txid 102:0 2025-09-25T16:19:08.303347Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:610: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 3 2025-09-25T16:19:08.305189Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 102: got EvNotifyTxCompletionResult 2025-09-25T16:19:08.305215Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 102: satisfy waiter [1:3447:5410] TestWaitNotification: OK eventTxId 102 >> TMessageBusServerPersQueueGetPartitionStatusMetaRequestTest::FailsOnEmptyTopicName [GOOD] >> TMessageBusServerPersQueueGetPartitionStatusMetaRequestTest::FailsOnDuplicatedTopicName >> TestKinesisHttpProxy::GoodRequestGetRecordsLongStreamName [GOOD] >> TMessageBusServerPersQueueGetPartitionLocationsMetaRequestTest::FailsOnFailedGetAllTopicsRequest [GOOD] >> TMessageBusServerPersQueueGetPartitionLocationsMetaRequestTest::FailsOnNoBalancerInGetNodeRequest >> KqpOlapScheme::AddColumnErrors [GOOD] >> KqpOlapScheme::AddColumnFamily >> KqpScheme::CreateExternalDataSourceWithSa [GOOD] >> KqpScheme::CreateExternalTable >> TBackupTests::ShouldSucceedOnLargeData[Raw] [GOOD] >> TMessageBusServerPersQueueGetPartitionLocationsMetaRequestTest::FailsOnBadRootStatusInGetNodeRequest [GOOD] >> TMessageBusServerPersQueueGetPartitionLocationsMetaRequestTest::FailesOnNotATopic >> TestKinesisHttpProxy::TestCounters ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/scheme/unittest >> KqpScheme::StreamingQueriesWithResourcePools [GOOD] Test command err: Trying to start YDB, gRPC: 22896, MsgBus: 10407 2025-09-25T16:18:52.730500Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7554062063803132970:2080];send_to=[0:7307199536658146131:7762515]; 2025-09-25T16:18:52.732619Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-09-25T16:18:52.734176Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:123: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown test_client.cpp: SetPath # /home/runner/.ya/build/build_root/endf/001d37/r3tmp/tmpHLAVsU/pdisk_1.dat 2025-09-25T16:18:52.755001Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-09-25T16:18:52.755032Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-09-25T16:18:52.757160Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-09-25T16:18:52.762063Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 22896, node 1 2025-09-25T16:18:52.797083Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-09-25T16:18:52.797097Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-09-25T16:18:52.797099Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-09-25T16:18:52.797147Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:10407 2025-09-25T16:18:52.840865Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions TClient is connected to server localhost:10407 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-09-25T16:18:52.863626Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-09-25T16:18:52.874367Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) waiting... 2025-09-25T16:18:52.903253Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) waiting... 2025-09-25T16:18:52.932839Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) waiting... 2025-09-25T16:18:52.949956Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) waiting... 2025-09-25T16:18:53.253377Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7554062068098101860:2391], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:53.253416Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:53.253552Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7554062068098101870:2392], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:53.253557Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:53.339991Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:18:53.351024Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:18:53.362189Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:18:53.377833Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:18:53.390591Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:18:53.404856Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:18:53.421814Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:18:53.440955Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:18:53.457989Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7554062068098102732:2474], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:53.458017Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:53.458088Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7554062068098102737:2477], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:53.458098Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7554062068098102738:2478], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:53.458127Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:53.459005Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB c ... roposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710675:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-09-25T16:19:07.034877Z node 7 :PQ_READ_PROXY ERROR: grpc_pq_schema.cpp:148: new Describe topic request 2025-09-25T16:19:07.038116Z node 7 :PQ_READ_PROXY ERROR: grpc_pq_schema.cpp:148: new Describe topic request 2025-09-25T16:19:07.043257Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710676:1, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:19:07.140562Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710679:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_modify_acl.cpp:101) 2025-09-25T16:19:07.232247Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710682:1, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:19:07.344367Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710685:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_modify_acl.cpp:101) 2025-09-25T16:19:07.581932Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710697:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:19:07.582371Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710698:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:19:07.582673Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710699:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:19:07.808037Z node 7 :KQP_SESSION WARN: kqp_session_actor.cpp:2830: SessionId: ydb://session/3?node_id=7&id=YWUzMDQwOTQtODQ1ZjFmNmYtMzVhNjM1NTYtOWY5YTZmOWM=, ActorId: [7:7554062128115120625:2716], ActorState: ExecuteState, TraceId: streaming-query-/Root/MyFolder/MyStreamingQuery-51cc8cd4-b9193a0c-f14a9c0-8f5c76d7-72057594046644480-26, Create QueryResponse for error on request, msg: Query failed during adding/waiting in workload pool my_pool 2025-09-25T16:19:08.362616Z node 7 :KQP_PROXY WARN: queries.cpp:398: [StreamingQueries] [TStartStreamingQueryTableActor] OwnerId: [7:7554062128115120287:4096] ActorId: [7:7554062128115120332:4115] QueryPath: /Root/MyFolder/MyStreamingQuery. Query compilation / planing [7:7554062132410088142:4370] failed PRECONDITION_FAILED, issues: [ {
: Error: Resource pool my_pool was disabled due to zero concurrent query limit } {
: Error: Query failed during adding/waiting in workload pool my_pool } ] 2025-09-25T16:19:08.381817Z node 7 :KQP_PROXY WARN: queries.cpp:413: [StreamingQueries] [TStartStreamingQueryTableActor] OwnerId: [7:7554062128115120287:4096] ActorId: [7:7554062128115120332:4115] QueryPath: /Root/MyFolder/MyStreamingQuery. Failed PRECONDITION_FAILED, with issues: {
: Error: Query compilation / planing failed subissue: {
: Error: Resource pool my_pool was disabled due to zero concurrent query limit } subissue: {
: Error: Query failed during adding/waiting in workload pool my_pool } } 2025-09-25T16:19:08.381871Z node 7 :KQP_PROXY WARN: queries.cpp:398: [StreamingQueries] [TSyncStreamingQueryTableActor] OwnerId: [7:7554062128115120199:4048] ActorId: [7:7554062128115120287:4096] QueryPath: /Root/MyFolder/MyStreamingQuery. Start streaming query [7:7554062128115120332:4115] failed PRECONDITION_FAILED, issues: {
: Error: Query compilation / planing failed subissue: {
: Error: Resource pool my_pool was disabled due to zero concurrent query limit } subissue: {
: Error: Query failed during adding/waiting in workload pool my_pool } } 2025-09-25T16:19:08.397835Z node 7 :KQP_PROXY WARN: queries.cpp:413: [StreamingQueries] [TSyncStreamingQueryTableActor] OwnerId: [7:7554062128115120199:4048] ActorId: [7:7554062128115120287:4096] QueryPath: /Root/MyFolder/MyStreamingQuery. Failed PRECONDITION_FAILED, with issues: {
: Error: Start streaming query failed subissue: {
: Error: Query compilation / planing failed subissue: {
: Error: Resource pool my_pool was disabled due to zero concurrent query limit } subissue: {
: Error: Query failed during adding/waiting in workload pool my_pool } } } 2025-09-25T16:19:08.397892Z node 7 :KQP_PROXY WARN: queries.cpp:398: [StreamingQueries] [TCreateStreamingQueryActor] OwnerId: [0:0:0] ActorId: [7:7554062128115120199:4048] QueryPath: /Root/MyFolder/MyStreamingQuery. Streaming query initialization [7:7554062128115120287:4096] failed PRECONDITION_FAILED, issues: {
: Error: Start streaming query failed subissue: {
: Error: Query compilation / planing failed subissue: {
: Error: Resource pool my_pool was disabled due to zero concurrent query limit } subissue: {
: Error: Query failed during adding/waiting in workload pool my_pool } } } 2025-09-25T16:19:08.519790Z node 7 :KQP_PROXY WARN: queries.cpp:413: [StreamingQueries] [TCreateStreamingQueryActor] OwnerId: [0:0:0] ActorId: [7:7554062128115120199:4048] QueryPath: /Root/MyFolder/MyStreamingQuery. Failed PRECONDITION_FAILED, with issues: {
: Error: Streaming query initialization failed subissue: {
: Error: Start streaming query failed subissue: {
: Error: Query compilation / planing failed subissue: {
: Error: Resource pool my_pool was disabled due to zero concurrent query limit } subissue: {
: Error: Query failed during adding/waiting in workload pool my_pool } } } } 2025-09-25T16:19:08.519961Z node 7 :KQP_SESSION WARN: kqp_session_actor.cpp:2830: SessionId: ydb://session/3?node_id=7&id=ZjUwMDQ4ZGEtZGJkMTUwNmYtNDM5NDExMTgtZDhiMmIyMzg=, ActorId: [7:7554062128115119782:2548], ActorState: ExecuteState, TraceId: 01k60tqp467wtzc0kc9xa0apdk, Create QueryResponse for error on request, msg: 2025-09-25T16:19:08.543817Z node 7 :PQ_READ_PROXY ERROR: grpc_pq_schema.cpp:148: new Describe topic request 2025-09-25T16:19:08.545199Z node 7 :PQ_READ_PROXY ERROR: grpc_pq_schema.cpp:148: new Describe topic request 2025-09-25T16:19:08.811546Z node 7 :KQP_SESSION WARN: kqp_session_actor.cpp:2830: SessionId: ydb://session/3?node_id=7&id=YWEwNWNlYmYtN2EwNTIxODAtN2IyMTgzMTMtNDI2MGZiYWI=, ActorId: [7:7554062132410088675:2900], ActorState: ExecuteState, TraceId: streaming-query-/Root/MyFolder/OtherQuery-9fde2fda-63d5e4a4-c3b35969-bf007855-72057594046644480-30, Create QueryResponse for error on request, msg: Query failed during adding/waiting in workload pool my_pool 2025-09-25T16:19:08.919857Z node 7 :KQP_PROXY WARN: queries.cpp:398: [StreamingQueries] [TStartStreamingQueryTableActor] OwnerId: [7:7554062132410088533:4540] ActorId: [7:7554062132410088566:4553] QueryPath: /Root/MyFolder/OtherQuery. Query compilation / planing [7:7554062132410088769:4617] failed PRECONDITION_FAILED, issues: [ {
: Error: Resource pool my_pool was disabled due to zero concurrent query limit } {
: Error: Query failed during adding/waiting in workload pool my_pool } ] 2025-09-25T16:19:08.933158Z node 7 :KQP_PROXY WARN: queries.cpp:413: [StreamingQueries] [TStartStreamingQueryTableActor] OwnerId: [7:7554062132410088533:4540] ActorId: [7:7554062132410088566:4553] QueryPath: /Root/MyFolder/OtherQuery. Failed PRECONDITION_FAILED, with issues: {
: Error: Query compilation / planing failed subissue: {
: Error: Resource pool my_pool was disabled due to zero concurrent query limit } subissue: {
: Error: Query failed during adding/waiting in workload pool my_pool } } 2025-09-25T16:19:08.933205Z node 7 :KQP_PROXY WARN: queries.cpp:398: [StreamingQueries] [TSyncStreamingQueryTableActor] OwnerId: [7:7554062132410088481:4510] ActorId: [7:7554062132410088533:4540] QueryPath: /Root/MyFolder/OtherQuery. Start streaming query [7:7554062132410088566:4553] failed PRECONDITION_FAILED, issues: {
: Error: Query compilation / planing failed subissue: {
: Error: Resource pool my_pool was disabled due to zero concurrent query limit } subissue: {
: Error: Query failed during adding/waiting in workload pool my_pool } } 2025-09-25T16:19:08.951059Z node 7 :KQP_PROXY WARN: queries.cpp:413: [StreamingQueries] [TSyncStreamingQueryTableActor] OwnerId: [7:7554062132410088481:4510] ActorId: [7:7554062132410088533:4540] QueryPath: /Root/MyFolder/OtherQuery. Failed PRECONDITION_FAILED, with issues: {
: Error: Start streaming query failed subissue: {
: Error: Query compilation / planing failed subissue: {
: Error: Resource pool my_pool was disabled due to zero concurrent query limit } subissue: {
: Error: Query failed during adding/waiting in workload pool my_pool } } } 2025-09-25T16:19:08.951132Z node 7 :KQP_PROXY WARN: queries.cpp:398: [StreamingQueries] [TAlterStreamingQueryActor] OwnerId: [0:0:0] ActorId: [7:7554062132410088481:4510] QueryPath: /Root/MyFolder/OtherQuery. Streaming query alter [7:7554062132410088533:4540] failed PRECONDITION_FAILED, issues: {
: Error: Start streaming query failed subissue: {
: Error: Query compilation / planing failed subissue: {
: Error: Resource pool my_pool was disabled due to zero concurrent query limit } subissue: {
: Error: Query failed during adding/waiting in workload pool my_pool } } } 2025-09-25T16:19:08.962253Z node 7 :KQP_PROXY WARN: queries.cpp:413: [StreamingQueries] [TAlterStreamingQueryActor] OwnerId: [0:0:0] ActorId: [7:7554062132410088481:4510] QueryPath: /Root/MyFolder/OtherQuery. Failed PRECONDITION_FAILED, with issues: {
: Error: Streaming query alter failed subissue: {
: Error: Start streaming query failed subissue: {
: Error: Query compilation / planing failed subissue: {
: Error: Resource pool my_pool was disabled due to zero concurrent query limit } subissue: {
: Error: Query failed during adding/waiting in workload pool my_pool } } } } 2025-09-25T16:19:08.962459Z node 7 :KQP_SESSION WARN: kqp_session_actor.cpp:2830: SessionId: ydb://session/3?node_id=7&id=ZDFmMTkyOGUtNWIxNjRmYjAtYTk0NmNjOWYtMWZkNTk2YWQ=, ActorId: [7:7554062132410088475:2860], ActorState: ExecuteState, TraceId: 01k60tqqqpbf0n0dcbh7cnpckv, Create QueryResponse for error on request, msg: ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_backup/unittest >> TBackupTests::ShouldSucceedOnLargeData[Zstd] [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2144] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2144] Leader for TabletID 72057594046678944 is [1:130:2155] sender: [1:131:2058] recipient: [1:113:2144] 2025-09-25T16:18:49.157037Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7911: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-09-25T16:18:49.157068Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7939: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-09-25T16:18:49.157075Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7825: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-09-25T16:18:49.157081Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7841: OperationsProcessing config: using default configuration 2025-09-25T16:18:49.157089Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7847: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-09-25T16:18:49.157094Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7847: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-09-25T16:18:49.157111Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7971: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-09-25T16:18:49.157125Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-09-25T16:18:49.157247Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8042: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-09-25T16:18:49.157323Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2025-09-25T16:18:49.173417Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7729: Cannot subscribe to console configs 2025-09-25T16:18:49.173442Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded 2025-09-25T16:18:49.178624Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2025-09-25T16:18:49.178700Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2025-09-25T16:18:49.178739Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-09-25T16:18:49.180528Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2025-09-25T16:18:49.180602Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2025-09-25T16:18:49.180719Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1382: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-09-25T16:18:49.180791Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-09-25T16:18:49.181265Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:158: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-09-25T16:18:49.181312Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2025-09-25T16:18:49.181585Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-09-25T16:18:49.181593Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-09-25T16:18:49.181609Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-09-25T16:18:49.181614Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-09-25T16:18:49.181620Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:205: TTxServerlessStorageBilling.Complete 2025-09-25T16:18:49.181644Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7086: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-09-25T16:18:49.183276Z node 1 :HIVE INFO: tablet_helpers.cpp:1126: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:130:2155] sender: [1:245:2058] recipient: [1:15:2062] 2025-09-25T16:18:49.205062Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-09-25T16:18:49.205178Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-09-25T16:18:49.205248Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:599: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-09-25T16:18:49.205256Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5528: CreateTx for txid 1:0 type: TxAlterSubDomain target path: [OwnerId: 72057594046678944, LocalPathId: 1] source path: 2025-09-25T16:18:49.205299Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-09-25T16:18:49.205314Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2025-09-25T16:18:49.206169Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-09-25T16:18:49.206216Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-09-25T16:18:49.206284Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-09-25T16:18:49.206295Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:315: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-09-25T16:18:49.206302Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:369: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-09-25T16:18:49.206309Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 1:0 2 -> 3 2025-09-25T16:18:49.206891Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-09-25T16:18:49.206921Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-09-25T16:18:49.206933Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 1:0 3 -> 128 2025-09-25T16:18:49.207383Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-09-25T16:18:49.207395Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-09-25T16:18:49.207402Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-09-25T16:18:49.207410Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1723: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-09-25T16:18:49.208190Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1792: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-09-25T16:18:49.212679Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:663: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-09-25T16:18:49.212730Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1824: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-09-25T16:18:49.213004Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:685: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-09-25T16:18:49.213040Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:689: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 137 RawX2: 4294969455 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-09-25T16:18:49.213049Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:303: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-09-25T16:18:49.213126Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 1:0 128 -> 240 2025-09-25T16:18:49.213136Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:362: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-09-25T16:18:49.213177Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:599: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-09-25T16:18:49.213191Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:412: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-09-25T16:18:49.213812Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-09-25T16:18:49.213823Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme ... :NDataShard::TEvExportScan::TEvBuffer { Last: 0 Checksum: } REQUEST: PUT /data_00.csv.zst?partNumber=100&uploadId=1 HTTP/1.1 HEADERS: Host: localhost:13111 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: 182E88FE-8D98-4829-A8D9-F05609B3CE40 amz-sdk-request: attempt=1 content-length: 55 content-md5: B5SOCmjwb1RI3tHamcoRHA== content-type: binary/octet-stream user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-143-generic x86_64 Clang/20.1.8 S3_MOCK::HttpServeWrite: /data_00.csv.zst / partNumber=100&uploadId=1 / 55 2025-09-25T16:19:08.510694Z node 1 :DATASHARD_BACKUP DEBUG: export_s3_uploader.cpp:593: [Export] [s3] Handle TEvExternalStorage::TEvUploadPartResponse: self# [1:3462:5424], result# UploadPartResult { ETag: 07948e0a68f06f5448ded1da99ca111c } 2025-09-25T16:19:08.510750Z node 1 :DATASHARD_BACKUP DEBUG: export_scan.cpp:130: [Export] [scanner] Handle TEvExportScan::TEvFeed: self# [1:3461:5423] 2025-09-25T16:19:08.510766Z node 1 :DATASHARD_BACKUP DEBUG: export_s3_uploader.cpp:460: [Export] [s3] Handle TEvExportScan::TEvBuffer: self# [1:3462:5424], sender# [1:3461:5423], msg# NKikimr::NDataShard::TEvExportScan::TEvBuffer { Last: 1 Checksum: } REQUEST: PUT /data_00.csv.zst?partNumber=101&uploadId=1 HTTP/1.1 HEADERS: Host: localhost:13111 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: 783A40EC-A582-46F6-A612-30BC310DA3FE amz-sdk-request: attempt=1 content-length: 0 content-md5: 1B2M2Y8AsgTpgAmY7PhCfg== content-type: binary/octet-stream user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-143-generic x86_64 Clang/20.1.8 S3_MOCK::HttpServeWrite: /data_00.csv.zst / partNumber=101&uploadId=1 / 0 2025-09-25T16:19:08.511429Z node 1 :DATASHARD_BACKUP DEBUG: export_s3_uploader.cpp:593: [Export] [s3] Handle TEvExternalStorage::TEvUploadPartResponse: self# [1:3462:5424], result# UploadPartResult { ETag: d41d8cd98f00b204e9800998ecf8427e } 2025-09-25T16:19:08.511441Z node 1 :DATASHARD_BACKUP INFO: export_s3_uploader.cpp:705: [Export] [s3] Finish: self# [1:3462:5424], success# 1, error# , multipart# 1, uploadId# 1 2025-09-25T16:19:08.513617Z node 1 :DATASHARD_BACKUP DEBUG: export_s3_uploader.cpp:527: [Export] [s3] Handle TEvDataShard::TEvS3Upload: self# [1:3462:5424], upload# { Id: 1 Status: Complete Error: (empty maybe) Parts: [f8f51a1e4a70db44fa91cc2ab9680824,9eba675fd7f187274786dff2f47292df,921325fb6b8811df3d06a44dbe1f8523,4eeb6b90e8e61075275bd8a42f56bd69,2840a487abe8cb9502b3d9c8a8e1c942,607d8f6e3b235a360d63796efd3a51c2,ed22e08df7fb8840f7cabc779cc86885,efeff2c7731061edd9a39059cc078045,4af01cb3455932f28e3bba713dcd57c9,dc94d36ecf3b36d183d75c84b9b2fac6,e2ce425dd2bb582abcc13d0d714c3554,b71e46686939d2cdf046520dd2774281,ab731a82a161e5e044b24e895a1713d6,1df51aaec89711e13a6f95c13113e36c,b6066b2ed343831b1b0ee0076179981e,332d34d77adc2b024a33d87e07d4233f,cf0093cc99590a0e8f9c199ed6deca07,8cc923ec76224e69263ac93b7bfabd30,690d66897e0780f2dfe3614e5a659a22,7502aae0ec253663b1cbfdc8ede92ab9,7d2c6f728ee0c12097dfe5441970b946,5fc7b9b675e0a125eea67cf05f82627f,fc8c5faa99cc7f4ce7ca320f8e7adb58,8e305c5aca758683ff25407a7bbd9220,181bce9c6393e22a0ac359a7b45d8187,639677548f0a8b776a6db92f44d96505,390ff8f57cfa4c04bfbed0d7a63c90e8,3dd76756e6558fd6c8c918210f7dc136,a3f5254fdad3ded54edef910e704c151,e9186373f80dbaa55dd04d07621de277,8898b965060a431b499261ec0cd3cee3,3ed51c736e64defe04980ce328b17aa4,bb0e45971888796588c12ea1c1bec162,e2b3defa84005d3892986ca6894b811f,656c7c809c8c8485f6e91892591cd284,779c6827126f255bde25ae242bf4c8ff,8883fc9b073e683558f1231c5f2142d0,19390a0e3340bcb6ccfe866a790f05cb,305182d3e9745fba3aad1973bb1bfc93,002819d72a6dc7954ecc1bcd2bd20254,325c6bc3cdd6fd83083cf0126c606218,b86932903843b9626e80bd9ccb5d0571,b5054116537a7c467bdb488c9d67dee7,fc3a45bd17a00b147e4f9c55bc2493da,1118e2f41e8839211163250796a65dce,b403ff17c2c269a79201a03ce439dc2a,88f2692ee439cfadef1cd21d58aac8d3,e5bef12f89b101af84d52299a5867d99,ed613335180c53f69d450ef8b176a4d5,150fd7dcdc86eb38c7f821ff4698d8bc,a0c18bf08acc6ebecac04a2520efee9b,e8463d7ce8f502d1575a433c1b30a9af,f123e0fc879e2fdc2c3e2f698fc4176d,d7ab79d73e4648e0a2bf8dec3a19c019,4e74b82f6a8ea7fad8790ee7dfcdb76e,f72bb1d8aa0f5c9265bae10a3784d8e8,924b317371d16363a37962b17a2ae4bb,7214b458c7e25c791e54bd430b835a6e,e79dba1b56122372af3fe7b06ea91bda,6aae345b94d78fc7c1ed0b8697cf5e62,fd3636ed699facb5f0c12f81741cabc5,2c4a198408c3eb9577fcd339ca62c539,59fbf761f9b7574b65fa6877b167bb8c,14f9f5cfdf3a6c33c577a54429b19cb6,c6d078b3be9cd7943e8145fd982baeef,198f55ae25539fbd54a4a6075beac2d1,939123b44e362c76a151a85af0247fb7,0147f8bd741be7780cbc900b6f4b0899,43453200aeaf201420737354cd73cfe4,de26d1339779fe0c538d01d5963fd423,5c903650e719f959dc9f37ea360c6319,23607b3f36e0a2abae7f1ed8e38596f3,0db9af920c6d1cf868e470bf7a349747,aed6ac19c60d08500582eea9dadcdfee,3f4e37ddd3e2e56a725323fad4d85cf6,942b269af420b4277d025cea489dcb25,89eddc25ba615b6cf09b9cd9a11a16bb,1d8e7f0613dc1919ee90133c468380bd,8bf1e4c1266d8437c1bd85e0fca6640a,e9eabcf5b61cf257f530b156dbd77a88,411f1661ae7650d2144e8c6f8a33b28f,6706ec5b8771e555779d5cbeca41aa75,b3a33ef21a8224ddc78a52e8d7ca8357,58749d344f42c192e572eda4ee66fb01,381aeb5ee3014e2c0fd9b85bd59ce005,9aed2297cd10dce10d68de3ff1830b42,be88e095fc3a13708b714db03b1f2744,5628e81ee17fb22fc828ed1b2169578b,a1cfb563fa4af884fe02ced05c26c881,fc602b8ee2e9746fb52823f8fd1f0f28,a1de256e94c7baa9b8ab905c892d1a14,6bff895b0b5f3552ad4bdc61b0d24148,fcba1d258a8651d831767b42e010e439,bef6e3d7088e671809fe584531f96971,f0b489242271d11200dbdbc78e4ce715,372d2d6877fff7c04433e492ad4dbd45,32191cf1972dcccd59c0b5a8b53d4f23,25928b7997b97ac58f18fbbe589573e8,472e53a27497661c6400410909405c4e,07948e0a68f06f5448ded1da99ca111c,d41d8cd98f00b204e9800998ecf8427e] } REQUEST: POST /data_00.csv.zst?uploadId=1 HTTP/1.1 HEADERS: Host: localhost:13111 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: 1A6DED40-FA67-40E3-B5CD-0B5B2D55EC42 amz-sdk-request: attempt=1 content-length: 11529 content-type: application/xml user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-143-generic x86_64 Clang/20.1.8 x-amz-api-version: 2006-03-01 S3_MOCK::HttpServeAction: 4 / /data_00.csv.zst / uploadId=1 2025-09-25T16:19:08.515692Z node 1 :DATASHARD_BACKUP DEBUG: export_s3_uploader.cpp:624: [Export] [s3] Handle TEvExternalStorage::TEvCompleteMultipartUploadResponse: self# [1:3462:5424], result# CompleteMultipartUploadResult { Bucket: Key: data_00.csv.zst ETag: c902b621cdd1ee89b9f1c4e6c36e6e45 } 2025-09-25T16:19:08.515796Z node 1 :DATASHARD_BACKUP DEBUG: export_scan.cpp:144: [Export] [scanner] Handle TEvExportScan::TEvFinish: self# [1:3461:5423], msg# NKikimr::NDataShard::TEvExportScan::TEvFinish { Success: 1 Error: } 2025-09-25T16:19:08.523103Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5901: Handle TEvSchemaChanged, tabletId: 72057594046678944, at schemeshard: 72057594046678944, message: Source { RawX1: 315 RawX2: 4294969597 } Origin: 72075186233409546 State: 2 TxId: 102 Step: 0 Generation: 2 OpResult { Success: true Explain: "" BytesProcessed: 10000 RowsProcessed: 1000 } 2025-09-25T16:19:08.523138Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1837: TOperation FindRelatedPartByTabletId, TxId: 102, tablet: 72075186233409546, partId: 0 2025-09-25T16:19:08.523179Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:628: TTxOperationReply execute, operationId: 102:0, at schemeshard: 72057594046678944, message: Source { RawX1: 315 RawX2: 4294969597 } Origin: 72075186233409546 State: 2 TxId: 102 Step: 0 Generation: 2 OpResult { Success: true Explain: "" BytesProcessed: 10000 RowsProcessed: 1000 } 2025-09-25T16:19:08.523195Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_backup_restore_common.h:233: TBackup TProposedWaitParts, opId: 102:0 HandleReply TEvSchemaChanged at tablet# 72057594046678944 message# Source { RawX1: 315 RawX2: 4294969597 } Origin: 72075186233409546 State: 2 TxId: 102 Step: 0 Generation: 2 OpResult { Success: true Explain: "" BytesProcessed: 10000 RowsProcessed: 1000 } 2025-09-25T16:19:08.523215Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:673: CollectSchemaChanged accept TEvDataShard::TEvSchemaChanged, operationId: 102:0, shardIdx: 72057594046678944:1, shard: 72075186233409546, left await: 0, txState.State: ProposedWaitParts, txState.ReadyForNotifications: 1, at schemeshard: 72057594046678944 2025-09-25T16:19:08.523220Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:710: all shard schema changes has been received, operationId: 102:0, at schemeshard: 72057594046678944 2025-09-25T16:19:08.523228Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:722: send schema changes ack message, operation: 102:0, datashard: 72075186233409546, at schemeshard: 72057594046678944 2025-09-25T16:19:08.523237Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 102:0 129 -> 240 2025-09-25T16:19:08.523318Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_backup_restore_common.h:116: Unable to make a bill: kind# TBackup, opId# 102:0, reason# domain is not a serverless db, domain# /MyRoot, domainPathId# [OwnerId: 72057594046678944, LocalPathId: 1], IsDomainSchemeShard: 1, ParentDomainId: [OwnerId: 72057594046678944, LocalPathId: 1], ResourcesDomainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-09-25T16:19:08.524856Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 102:0, at schemeshard: 72057594046678944 2025-09-25T16:19:08.525051Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 102:0, at schemeshard: 72057594046678944 2025-09-25T16:19:08.525067Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:497: [72057594046678944] TDone opId# 102:0 ProgressState 2025-09-25T16:19:08.525091Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:926: Part operation is done id#102:0 progress is 1/1 2025-09-25T16:19:08.525097Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2025-09-25T16:19:08.525103Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:926: Part operation is done id#102:0 progress is 1/1 2025-09-25T16:19:08.525111Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2025-09-25T16:19:08.525117Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 102, ready parts: 1/1, is published: true 2025-09-25T16:19:08.525149Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1702: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [1:343:2321] message: TxId: 102 2025-09-25T16:19:08.525159Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2025-09-25T16:19:08.525165Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:993: Operation and all the parts is done, operation id: 102:0 2025-09-25T16:19:08.525186Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5552: RemoveTx for txid 102:0 2025-09-25T16:19:08.525237Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:610: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 3 2025-09-25T16:19:08.526393Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 102: got EvNotifyTxCompletionResult 2025-09-25T16:19:08.526410Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 102: satisfy waiter [1:3447:5410] TestWaitNotification: OK eventTxId 102 ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/scheme/unittest >> KqpOlapScheme::AddExsitsColumnFamily [GOOD] Test command err: Trying to start YDB, gRPC: 21124, MsgBus: 10308 2025-09-25T16:18:51.308723Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7554062060263986021:2188];send_to=[0:7307199536658146131:7762515]; 2025-09-25T16:18:51.308771Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/endf/001e73/r3tmp/tmpTeZhW7/pdisk_1.dat 2025-09-25T16:18:51.389555Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-09-25T16:18:51.392904Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1229: Notification cookie mismatch for subscription [1:7554062060263985869:2081] 1758817131305926 != 1758817131305929 2025-09-25T16:18:51.393288Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 21124, node 1 2025-09-25T16:18:51.413130Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-09-25T16:18:51.413193Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-09-25T16:18:51.414437Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-09-25T16:18:51.425042Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-09-25T16:18:51.425058Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-09-25T16:18:51.425060Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-09-25T16:18:51.425102Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:10308 TClient is connected to server localhost:10308 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-09-25T16:18:51.518225Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-09-25T16:18:51.521483Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-09-25T16:18:51.597436Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions CREATE TABLE `/Root/ColumnTableTest` (id Int32 NOT NULL, id_second Int32 NOT NULL, level Int32, created_at Timestamp NOT NULL, PRIMARY KEY (id, id_second)) PARTITION BY HASH(id) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 1); 2025-09-25T16:18:51.808957Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7554062060263986548:2319], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:51.808993Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:51.809192Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7554062060263986558:2320], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:51.809199Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:51.864305Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/olap/operations/create_table.cpp:814) 2025-09-25T16:18:51.875574Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7554062060263986611:2324];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-09-25T16:18:51.875634Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7554062060263986611:2324];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-09-25T16:18:51.875694Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7554062060263986611:2324];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-09-25T16:18:51.875718Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7554062060263986611:2324];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-09-25T16:18:51.875744Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7554062060263986611:2324];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-09-25T16:18:51.875772Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7554062060263986611:2324];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-09-25T16:18:51.875796Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7554062060263986611:2324];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-09-25T16:18:51.875821Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7554062060263986611:2324];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-09-25T16:18:51.875846Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7554062060263986611:2324];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-09-25T16:18:51.875869Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7554062060263986611:2324];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanDeprecatedSnapshot; 2025-09-25T16:18:51.875901Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7554062060263986611:2324];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV0ChunksMeta; 2025-09-25T16:18:51.875927Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7554062060263986611:2324];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CopyBlobIdsToV2; 2025-09-25T16:18:51.875954Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7554062060263986611:2324];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreAppearanceSnapshot; 2025-09-25T16:18:51.877029Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-09-25T16:18:51.877047Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-09-25T16:18:51.877059Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-09-25T16:18:51.877064Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-09-25T16:18:51.877081Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-09-25T16:18:51.877085Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-09-25T16:18:51.877095Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-09-25T16:18:51.877099Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-09-25T16:18:51.877106Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-09-25T16:18:51.877111Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSch ... 075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-09-25T16:19:09.074007Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[4:7554062134955161316:2324];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-09-25T16:19:09.074031Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[4:7554062134955161316:2324];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-09-25T16:19:09.074055Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[4:7554062134955161316:2324];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-09-25T16:19:09.074080Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[4:7554062134955161316:2324];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-09-25T16:19:09.074100Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[4:7554062134955161316:2324];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-09-25T16:19:09.074124Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[4:7554062134955161316:2324];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanDeprecatedSnapshot; 2025-09-25T16:19:09.074147Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[4:7554062134955161316:2324];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV0ChunksMeta; 2025-09-25T16:19:09.074168Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[4:7554062134955161316:2324];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CopyBlobIdsToV2; 2025-09-25T16:19:09.074192Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[4:7554062134955161316:2324];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreAppearanceSnapshot; 2025-09-25T16:19:09.079528Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-09-25T16:19:09.079548Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-09-25T16:19:09.079566Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-09-25T16:19:09.079573Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-09-25T16:19:09.079600Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-09-25T16:19:09.079607Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-09-25T16:19:09.079620Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-09-25T16:19:09.079627Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-09-25T16:19:09.079637Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-09-25T16:19:09.079644Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-09-25T16:19:09.079651Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-09-25T16:19:09.079659Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-09-25T16:19:09.079705Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-09-25T16:19:09.079714Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-09-25T16:19:09.079731Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2025-09-25T16:19:09.079738Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanDeprecatedSnapshot;id=CleanDeprecatedSnapshot; 2025-09-25T16:19:09.079745Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanDeprecatedSnapshot;id=17; 2025-09-25T16:19:09.079752Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV0ChunksMeta;id=RestoreV0ChunksMeta; 2025-09-25T16:19:09.079760Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV0ChunksMeta;id=18; 2025-09-25T16:19:09.079767Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CopyBlobIdsToV2;id=CopyBlobIdsToV2; 2025-09-25T16:19:09.079782Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CopyBlobIdsToV2;id=19; 2025-09-25T16:19:09.079789Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreAppearanceSnapshot;id=RestoreAppearanceSnapshot; 2025-09-25T16:19:09.079801Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreAppearanceSnapshot;id=20; 2025-09-25T16:19:09.079806Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2025-09-25T16:19:09.082060Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[4:7554062134955161316:2324];ev=NActors::IEventHandle;tablet_id=72075186224037888;tx_id=281474976715658;this=124078304876656;method=TTxController::StartProposeOnExecute;tx_info=281474976715658:TX_KIND_SCHEMA;min=1758817149081;max=18446744073709551615;plan=0;src=[4:7554062130660193620:2151];cookie=12:1;;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=2;result=not_found; 2025-09-25T16:19:09.086273Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715658;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=2;result=not_found; 2025-09-25T16:19:09.086298Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715658;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=2;result=not_found; 2025-09-25T16:19:09.086301Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715658;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=2;result=not_found; 2025-09-25T16:19:09.102949Z node 4 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7554062134955161384:2332], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:19:09.102972Z node 4 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:19:09.103269Z node 4 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7554062134955161386:2333], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:19:09.103280Z node 4 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:19:09.110144Z node 4 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7554062134955161391:2335], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:19:09.110215Z node 4 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:19:09.111951Z node 4 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7554062134955161395:2337], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:19:09.111975Z node 4 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } >> TMessageBusServerPersQueueGetPartitionStatusMetaRequestTest::FailsOnDuplicatedTopicName [GOOD] >> TMessageBusServerPersQueueGetPartitionStatusMetaRequestTest::FailsOnDuplicatedPartition >> KqpScheme::AlterCacheModeInColumnFamilyFeatureDisabled-UseQueryService [GOOD] >> KqpScheme::AlterDatabaseChangeOwner+EnableAlterDatabase >> TMessageBusServerPersQueueGetReadSessionsInfoMetaRequestTest::HandlesPipeDisconnection_DisconnectionComesFirst >> TMessageBusServerPersQueueGetPartitionLocationsMetaRequestTest::HandlesPipeDisconnection_DisconnectionComesSecond [GOOD] >> TMessageBusServerPersQueueGetPartitionOffsetsMetaRequestTest::FailesOnNotATopic >> TestYmqHttpProxy::TestDeleteMessageBatch >> TestKinesisHttpProxy::ErroneousRequestGetRecords >> KqpScheme::DropAsyncReplicationCascade [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/client/server/ut/unittest >> TMessageBusServerPersQueueGetReadSessionsInfoMetaRequestTest::FailsOnEmptyTopicName [GOOD] Test command err: Assert failed: Check response: { Status: 128 ErrorReason: "no path \'/Root/PQ/\', Marker# PQ17" ErrorCode: UNKNOWN_TOPIC } Assert failed: Check response: { Status: 128 ErrorReason: "topic \'rt3.dc1--topic1\' has no balancer, Marker# PQ193" ErrorCode: UNKNOWN_TOPIC } Assert failed: Check response: { Status: 128 ErrorReason: "empty topic in GetReadSessionsInfo request" ErrorCode: BAD_REQUEST } ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/client/server/ut/unittest >> TMessageBusServerPersQueueGetPartitionOffsetsMetaRequestTest::FailsOnDuplicatedPartition [GOOD] Test command err: Assert failed: Check response: { Status: 128 ErrorReason: "path \'Root/PQ\' has unknown/invalid root prefix \'Root\', Marker# PQ14" ErrorCode: UNKNOWN_TOPIC } Assert failed: Check response: { Status: 128 ErrorReason: "topic \'Root/PQ\' describe error, Status# LookupError, Marker# PQ1" ErrorCode: ERROR } Assert failed: Check response: { Status: 128 ErrorReason: "multiple TopicRequest for topic \'rt3.dc1--topic1\'" ErrorCode: BAD_REQUEST } Assert failed: Check response: { Status: 128 ErrorReason: "multiple partition 2 in TopicRequest for topic \'rt3.dc1--topic2\'" ErrorCode: BAD_REQUEST } >> TMessageBusServerPersQueueGetPartitionLocationsMetaRequestTest::FailsOnNoBalancerInGetNodeRequest [GOOD] >> TMessageBusServerPersQueueGetPartitionLocationsMetaRequestTest::FailsOnEmptyTopicName >> THiveTest::TestLockTabletExecutionRebootReconnect [GOOD] >> THiveTest::TestLockTabletExecutionBadUnlock >> KqpScheme::CreateAlterUserLoginNoLogin [GOOD] >> KqpScheme::ChangefeedSchemaChanges-UseQueryService >> TMessageBusServerPersQueueGetPartitionLocationsMetaRequestTest::FailesOnNotATopic [GOOD] >> TMessageBusServerPersQueueGetPartitionLocationsMetaRequestTest::FailsOnBalancerDescribeResultFailureWhenTopicsAreGivenExplicitly >> TMessageBusServerPersQueueGetPartitionOffsetsMetaRequestTest::FailesOnNotATopic [GOOD] >> TMessageBusServerPersQueueGetPartitionOffsetsMetaRequestTest::FailsOnFailedGetAllTopicsRequest >> TMessageBusServerPersQueueGetPartitionStatusMetaRequestTest::FailsOnDuplicatedPartition [GOOD] >> TMessageBusServerPersQueueGetPartitionLocationsMetaRequestTest::FailsOnEmptyTopicName [GOOD] >> TMessageBusServerPersQueueGetPartitionLocationsMetaRequestTest::FailsOnDuplicatedTopicName >> TMessageBusServerPersQueueGetPartitionLocationsMetaRequestTest::FailsOnBalancerDescribeResultFailureWhenTopicsAreGivenExplicitly [GOOD] >> TMessageBusServerPersQueueGetPartitionLocationsMetaRequestTest::FailsOnDuplicatedPartition >> TMessageBusServerPersQueueGetPartitionOffsetsMetaRequestTest::FailsOnFailedGetAllTopicsRequest [GOOD] >> TMessageBusServerPersQueueGetPartitionOffsetsMetaRequestTest::FailsOnNotOkStatusInGetNodeRequest >> KqpScheme::CreateTableWithPartitionAtKeysComplex [GOOD] >> KqpScheme::CreateTableWithFamiliesRegular+UseQueryService >> KqpScheme::AlterNonExistingResourcePoolClassifier [GOOD] >> TMessageBusServerPersQueueGetReadSessionsInfoMetaRequestTest::HandlesPipeDisconnection_DisconnectionComesFirst [GOOD] >> TMessageBusServerPersQueueGetReadSessionsInfoMetaRequestTest::HandlesPipeDisconnection_DisconnectionComesSecond >> TestKinesisHttpProxy::TestWrongStream [GOOD] >> TMessageBusServerPersQueueGetPartitionLocationsMetaRequestTest::FailsOnDuplicatedTopicName [GOOD] >> KqpAcl::AclTemporaryInterruptInheritance-IsOlap [GOOD] >> KqpScheme::AlterTableAddExplicitAsyncIndex [GOOD] >> KqpScheme::AlterTableAddExplicitSyncVectorKMeansTreeIndex >> TMessageBusServerPersQueueGetPartitionOffsetsMetaRequestTest::FailsOnNotOkStatusInGetNodeRequest [GOOD] >> TMessageBusServerPersQueueGetPartitionOffsetsMetaRequestTest::FailsOnNoBalancerInGetNodeRequest >> TMessageBusServerPersQueueGetPartitionLocationsMetaRequestTest::FailsOnDuplicatedPartition [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_backup/unittest >> TBackupTests::ShouldSucceedOnLargeData[Raw] [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2144] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2144] Leader for TabletID 72057594046678944 is [1:130:2155] sender: [1:131:2058] recipient: [1:113:2144] 2025-09-25T16:18:50.040068Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7911: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-09-25T16:18:50.040098Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7939: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-09-25T16:18:50.040105Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7825: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-09-25T16:18:50.040111Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7841: OperationsProcessing config: using default configuration 2025-09-25T16:18:50.040119Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7847: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-09-25T16:18:50.040123Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7847: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-09-25T16:18:50.040134Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7971: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-09-25T16:18:50.040150Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-09-25T16:18:50.040281Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8042: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-09-25T16:18:50.040367Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2025-09-25T16:18:50.057953Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7729: Cannot subscribe to console configs 2025-09-25T16:18:50.057987Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded 2025-09-25T16:18:50.062941Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2025-09-25T16:18:50.063062Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2025-09-25T16:18:50.063123Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-09-25T16:18:50.064747Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2025-09-25T16:18:50.064838Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2025-09-25T16:18:50.064960Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1382: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-09-25T16:18:50.065052Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-09-25T16:18:50.065537Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:158: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-09-25T16:18:50.065581Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2025-09-25T16:18:50.065887Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-09-25T16:18:50.065898Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-09-25T16:18:50.065923Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-09-25T16:18:50.065931Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-09-25T16:18:50.065938Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:205: TTxServerlessStorageBilling.Complete 2025-09-25T16:18:50.065977Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7086: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-09-25T16:18:50.067449Z node 1 :HIVE INFO: tablet_helpers.cpp:1126: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:130:2155] sender: [1:245:2058] recipient: [1:15:2062] 2025-09-25T16:18:50.091617Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-09-25T16:18:50.091745Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-09-25T16:18:50.091817Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:599: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-09-25T16:18:50.091826Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5528: CreateTx for txid 1:0 type: TxAlterSubDomain target path: [OwnerId: 72057594046678944, LocalPathId: 1] source path: 2025-09-25T16:18:50.091872Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-09-25T16:18:50.091888Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2025-09-25T16:18:50.092857Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-09-25T16:18:50.092909Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-09-25T16:18:50.093015Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-09-25T16:18:50.093027Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:315: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-09-25T16:18:50.093034Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:369: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-09-25T16:18:50.093040Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 1:0 2 -> 3 2025-09-25T16:18:50.093600Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-09-25T16:18:50.093613Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-09-25T16:18:50.093622Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 1:0 3 -> 128 2025-09-25T16:18:50.094144Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-09-25T16:18:50.094157Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-09-25T16:18:50.094164Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-09-25T16:18:50.094173Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1723: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-09-25T16:18:50.095013Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1792: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-09-25T16:18:50.095505Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:663: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-09-25T16:18:50.095552Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1824: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-09-25T16:18:50.095788Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:685: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-09-25T16:18:50.095818Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:689: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 137 RawX2: 4294969455 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-09-25T16:18:50.095826Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:303: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-09-25T16:18:50.095896Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 1:0 128 -> 240 2025-09-25T16:18:50.095904Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:362: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-09-25T16:18:50.095945Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:599: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-09-25T16:18:50.095959Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:412: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-09-25T16:18:50.096458Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-09-25T16:18:50.096468Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme ... [1:3461:5423], msg# NKikimr::NDataShard::TEvExportScan::TEvBuffer { Last: 0 Checksum: } REQUEST: PUT /data_00.csv?partNumber=100&uploadId=1 HTTP/1.1 HEADERS: Host: localhost:8547 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: E6042256-E1B3-4377-A02E-77322A912278 amz-sdk-request: attempt=1 content-length: 130 content-md5: Wyd1w7MZYbbZucaVvuRDAw== content-type: binary/octet-stream user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-143-generic x86_64 Clang/20.1.8 S3_MOCK::HttpServeWrite: /data_00.csv / partNumber=100&uploadId=1 / 130 2025-09-25T16:19:09.646367Z node 1 :DATASHARD_BACKUP DEBUG: export_s3_uploader.cpp:593: [Export] [s3] Handle TEvExternalStorage::TEvUploadPartResponse: self# [1:3462:5424], result# UploadPartResult { ETag: 5b2775c3b31961b6d9b9c695bee44303 } 2025-09-25T16:19:09.646402Z node 1 :DATASHARD_BACKUP DEBUG: export_scan.cpp:130: [Export] [scanner] Handle TEvExportScan::TEvFeed: self# [1:3461:5423] 2025-09-25T16:19:09.646416Z node 1 :DATASHARD_BACKUP DEBUG: export_s3_uploader.cpp:460: [Export] [s3] Handle TEvExportScan::TEvBuffer: self# [1:3462:5424], sender# [1:3461:5423], msg# NKikimr::NDataShard::TEvExportScan::TEvBuffer { Last: 1 Checksum: } REQUEST: PUT /data_00.csv?partNumber=101&uploadId=1 HTTP/1.1 HEADERS: Host: localhost:8547 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: 3303E8BE-8026-4A22-B4C1-91787EE29736 amz-sdk-request: attempt=1 content-length: 0 content-md5: 1B2M2Y8AsgTpgAmY7PhCfg== content-type: binary/octet-stream user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-143-generic x86_64 Clang/20.1.8 S3_MOCK::HttpServeWrite: /data_00.csv / partNumber=101&uploadId=1 / 0 2025-09-25T16:19:09.647063Z node 1 :DATASHARD_BACKUP DEBUG: export_s3_uploader.cpp:593: [Export] [s3] Handle TEvExternalStorage::TEvUploadPartResponse: self# [1:3462:5424], result# UploadPartResult { ETag: d41d8cd98f00b204e9800998ecf8427e } 2025-09-25T16:19:09.647074Z node 1 :DATASHARD_BACKUP INFO: export_s3_uploader.cpp:705: [Export] [s3] Finish: self# [1:3462:5424], success# 1, error# , multipart# 1, uploadId# 1 2025-09-25T16:19:09.651369Z node 1 :DATASHARD_BACKUP DEBUG: export_s3_uploader.cpp:527: [Export] [s3] Handle TEvDataShard::TEvS3Upload: self# [1:3462:5424], upload# { Id: 1 Status: Complete Error: (empty maybe) Parts: [a59dd9a97cf3685e69093fb2d96653c6,bdbb215613239cb3a835fee1fe7e7ca3,cb38dbc776d5763f1926dfb22d508c87,3c430d66d07a0a4b1fa889f321fce197,43baf91083f286b60bf15e7786459cd9,90b5581bef612fa3bf9b38b336af405f,fd4869c26a12d22ee79256d778954d04,a9459bc28198b0b6bd67732c492fd740,697a3f8386ea1ff4e327de943224cb1a,614da0b4ec9464e69cd0c59909e80fbb,9b94eb3f67aa4c8a0bcbf546833ed966,fd45c3afacec641ad19e59d2b31aeba4,fd69678aecbc149601f58cf13c64d33e,90c09ab4923bc9f97f825d36e32bf362,c1586416a281a4cca2b2b4e333d9b079,f31908576272623f9f0a19bf774cde8e,6fe3b42388304d2af07c629aeb683581,7bc90eec21ca5bb3648e6a48e83c5730,8e1dda26de1af89bdffe2eefdcebea1d,14dc42d90caa1575bbfffa9dc8f21d66,92efb2368eecb32d4075c09294fde0b7,98efff5f7c7ecb42e7af65142ce05af9,6206c81807b3b9283b0173ee2c682100,616b431b91aedc9de4593321eb42ba96,9ae4762563ffdec596cc9ca4cb8913e1,946ebf2d95b4796ea2faee21f017be79,45834a9948bb4ab8b62d1894156d13ed,6ad3fe7286856927c1e00422bc8da697,ef89464d20eae46829e1bf557e4d04ce,f128e5de32097d205453080b01c94ac3,c13e650ee2cfcecfdf4f578a2e5b1c2d,fc26314711b25d20fc654cf59301b806,56f6f2c574fba86496a87a7dd5fab46c,c7951eace72cfe0f14f808173e07bc64,3d9ad3340e58b973eaf8d4f14ba3b0f9,fc41d6fdfb52389dda8b26d7a0a3a889,9974b6ae96ffd0b756acb67088e890f9,cde8a5604010abe8fccfa9492144036f,0364e048eaac35c26d48b0c5072b5255,aac5a84927124d6ae4931e2650c80d9f,eab068fe4ca35c2f3e35890bd727eb4f,bc3646bdbcbc7f97dcddf2202ea9421f,6d3f63d672eda4a4617c9e7589a68bfc,0401bade6c3031b5be872238520b993a,1c6405688f86423480173e3e316a20bd,52395f68e877cbb8d7115a247331b0a7,4b0673ac18058554d2c53bf9f99b34b2,87bc1b9e650b31e81a9ad2531e3ef9da,b29053c8cd093c8b92ad3954c42cb7be,faf1084f6b33b00e2e822d1d3c3f0083,eedec03ee8d7eda4654db7206ad0889e,be4469dd028d5519a67098055f25513f,a7afa9827ec27c565cff1ed505a06f4b,91fe8109d2ad934c4364d90c29aaba71,73b81ea00e11db12d66497d30eb48446,cce69ef69777afeab34eefa515abc7f4,4e4ac1a421353964356400b8be8e21da,32cd6083b12660bcd4062af08d89eb05,71957b9db37811c7680638b82dc6384b,a8787e692c423a2dfa07dd261e72790a,283838ab16206b27738ea6653110f833,88bf084fb3029f0d5c0705eece930d70,1ed2f9f7221f1718b81fdf2d846347dd,406706cfbc454922dcad50b9c534b8d1,dbb606c993d798974ed4f5c9ebf195ca,1a4a3868dc6fa26c6b019d237f9ea6f4,82660a3c6b576a1b3fea925f3c179a2e,d393db2749ae42e854e85eeec2ea3592,b42c92ad14ee0e5351fec7e5a045a91b,2c7af27f9dc77efbcbe71c2d7997d6e9,278aba62ab1d9e3ff16df2d82ac5f5c7,6b8380404a7e7ec95ad5f3941d5d404c,c9813b9fc1d6b5087e64849076edd0f8,160785e4dac02a91c43a497ee59eea06,db529a9ba22f60f404031cfe85e966e9,9b70af168e2d3769bd8bc4dffa3202ea,9ac39c3843b6621ace44acf430a59e06,4603ff564a46e93951f246ed18926071,66b85f35ee76a7f71f50e9aad56758de,1665c284ad04d6b893b69372bf8fc6b9,8c1c27ec88fb52f06de6e7516a392672,0a5f992db51277a05ec12f0d6459ef21,8debe3a6023155561cb0890fc05bd7fb,938ece258b7596f8eea7e82bc2b8f88c,767ca0dcf0b154fa3c818044bbfc58fd,914cc7165d994bb05824332ac120446f,ab0ece250f5959a510170ee07aa21b5d,8bf4b44d67f062026b0010a8a0b39cc0,e0aa13fa8246e68c18905d3abadfc44d,27b021b75b6a95f63ea27f7ec238c05f,673e661e4cfea1e431678dd9881c2a8c,f101b34943f1831ae8c0b46ffcb1c2d6,562b32a8142b29c1a88e507ab1981a6b,fdea4c6fc2befb44614992ca8bf34b21,b7c8ec6acc45b037978482996e910b75,aec72fbd2e171b798900b22897d00941,710ef5b5e8eba750b6acc9b32dff42a3,821c7e22ef9c22098171e7f837dcfcc8,aecc9f6d0e6f54e938a10d40fda96d7b,5b2775c3b31961b6d9b9c695bee44303,d41d8cd98f00b204e9800998ecf8427e] } REQUEST: POST /data_00.csv?uploadId=1 HTTP/1.1 HEADERS: Host: localhost:8547 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: BC66E315-C07A-4CDA-A92B-31F4A8120595 amz-sdk-request: attempt=1 content-length: 11529 content-type: application/xml user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-143-generic x86_64 Clang/20.1.8 x-amz-api-version: 2006-03-01 S3_MOCK::HttpServeAction: 4 / /data_00.csv / uploadId=1 2025-09-25T16:19:09.654147Z node 1 :DATASHARD_BACKUP DEBUG: export_s3_uploader.cpp:624: [Export] [s3] Handle TEvExternalStorage::TEvCompleteMultipartUploadResponse: self# [1:3462:5424], result# CompleteMultipartUploadResult { Bucket: Key: data_00.csv ETag: 5d8c28efc812b445ddd02900ff3ee599 } 2025-09-25T16:19:09.654267Z node 1 :DATASHARD_BACKUP DEBUG: export_scan.cpp:144: [Export] [scanner] Handle TEvExportScan::TEvFinish: self# [1:3461:5423], msg# NKikimr::NDataShard::TEvExportScan::TEvFinish { Success: 1 Error: } 2025-09-25T16:19:09.658572Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5901: Handle TEvSchemaChanged, tabletId: 72057594046678944, at schemeshard: 72057594046678944, message: Source { RawX1: 315 RawX2: 4294969597 } Origin: 72075186233409546 State: 2 TxId: 102 Step: 0 Generation: 2 OpResult { Success: true Explain: "" BytesProcessed: 10000 RowsProcessed: 1000 } 2025-09-25T16:19:09.658603Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1837: TOperation FindRelatedPartByTabletId, TxId: 102, tablet: 72075186233409546, partId: 0 2025-09-25T16:19:09.658641Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:628: TTxOperationReply execute, operationId: 102:0, at schemeshard: 72057594046678944, message: Source { RawX1: 315 RawX2: 4294969597 } Origin: 72075186233409546 State: 2 TxId: 102 Step: 0 Generation: 2 OpResult { Success: true Explain: "" BytesProcessed: 10000 RowsProcessed: 1000 } 2025-09-25T16:19:09.658656Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_backup_restore_common.h:233: TBackup TProposedWaitParts, opId: 102:0 HandleReply TEvSchemaChanged at tablet# 72057594046678944 message# Source { RawX1: 315 RawX2: 4294969597 } Origin: 72075186233409546 State: 2 TxId: 102 Step: 0 Generation: 2 OpResult { Success: true Explain: "" BytesProcessed: 10000 RowsProcessed: 1000 } 2025-09-25T16:19:09.658678Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:673: CollectSchemaChanged accept TEvDataShard::TEvSchemaChanged, operationId: 102:0, shardIdx: 72057594046678944:1, shard: 72075186233409546, left await: 0, txState.State: ProposedWaitParts, txState.ReadyForNotifications: 1, at schemeshard: 72057594046678944 2025-09-25T16:19:09.658682Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:710: all shard schema changes has been received, operationId: 102:0, at schemeshard: 72057594046678944 2025-09-25T16:19:09.658688Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:722: send schema changes ack message, operation: 102:0, datashard: 72075186233409546, at schemeshard: 72057594046678944 2025-09-25T16:19:09.658697Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 102:0 129 -> 240 2025-09-25T16:19:09.658766Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_backup_restore_common.h:116: Unable to make a bill: kind# TBackup, opId# 102:0, reason# domain is not a serverless db, domain# /MyRoot, domainPathId# [OwnerId: 72057594046678944, LocalPathId: 1], IsDomainSchemeShard: 1, ParentDomainId: [OwnerId: 72057594046678944, LocalPathId: 1], ResourcesDomainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-09-25T16:19:09.661452Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 102:0, at schemeshard: 72057594046678944 2025-09-25T16:19:09.661702Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 102:0, at schemeshard: 72057594046678944 2025-09-25T16:19:09.661721Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:497: [72057594046678944] TDone opId# 102:0 ProgressState 2025-09-25T16:19:09.661747Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:926: Part operation is done id#102:0 progress is 1/1 2025-09-25T16:19:09.661754Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2025-09-25T16:19:09.661761Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:926: Part operation is done id#102:0 progress is 1/1 2025-09-25T16:19:09.661765Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2025-09-25T16:19:09.661771Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 102, ready parts: 1/1, is published: true 2025-09-25T16:19:09.661808Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1702: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [1:343:2321] message: TxId: 102 2025-09-25T16:19:09.661821Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2025-09-25T16:19:09.661828Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:993: Operation and all the parts is done, operation id: 102:0 2025-09-25T16:19:09.661833Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5552: RemoveTx for txid 102:0 2025-09-25T16:19:09.661878Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:610: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 3 2025-09-25T16:19:09.663134Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 102: got EvNotifyTxCompletionResult 2025-09-25T16:19:09.663152Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 102: satisfy waiter [1:3447:5410] TestWaitNotification: OK eventTxId 102 |81.8%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/persqueue/dread_cache_service/ut/ydb-core-persqueue-dread_cache_service-ut >> TestKinesisHttpProxy::ListShardsExclusiveStartShardId [GOOD] >> TMessageBusServerPersQueueGetPartitionOffsetsMetaRequestTest::FailsOnNoBalancerInGetNodeRequest [GOOD] >> TestKinesisHttpProxy::TestWrongStream2 >> THiveTest::TestLockTabletExecutionBadUnlock [GOOD] >> KqpScheme::CreateAsyncReplicationWithIamAuth-UseQueryService [GOOD] >> TMessageBusServerPersQueueGetReadSessionsInfoMetaRequestTest::HandlesPipeDisconnection_DisconnectionComesSecond [GOOD] >> TMessageBusServerPersQueueGetTopicMetadataMetaRequestTest::FailsOnBadRootStatusInGetNodeRequest >> TestKinesisHttpProxy::CreateDeleteStreamWithConsumerWithFlag [GOOD] >> TMessageBusServerPersQueueGetPartitionOffsetsMetaRequestTest::FailsOnEmptyTopicName >> KqpScheme::CreateAsyncReplicationWithCaCert+UseQueryService >> THiveTest::TestLockTabletExecutionGoodUnlock >> TMessageBusServerPersQueueGetReadSessionsInfoMetaRequestTest::HandlesPipeDisconnection_AnswerDoesNotArrive |81.8%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/persqueue/dread_cache_service/ut/ydb-core-persqueue-dread_cache_service-ut >> TMessageBusServerPersQueueGetPartitionOffsetsMetaRequestTest::HandlesTimeout >> KqpOlapScheme::AddColumnFamily [GOOD] >> TMessageBusServerPersQueueGetTopicMetadataMetaRequestTest::FailsOnBadRootStatusInGetNodeRequest [GOOD] >> TMessageBusServerPersQueueGetPartitionOffsetsMetaRequestTest::FailsOnEmptyTopicName [GOOD] >> KqpOlapScheme::AddColumnFamilyWithNotSupportedCodec >> TMessageBusServerPersQueueGetTopicMetadataMetaRequestTest::FailsOnBalancerDescribeResultFailureWhenTopicsAreGivenExplicitly >> TestKinesisHttpProxy::ListShardsTimestamp ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/client/server/ut/unittest >> TMessageBusServerPersQueueGetPartitionLocationsMetaRequestTest::FailsOnDuplicatedPartition [GOOD] Test command err: Assert failed: Check response: { Status: 128 ErrorReason: "path \'Root/PQ\' has unknown/invalid root prefix \'Root\', Marker# PQ14" ErrorCode: UNKNOWN_TOPIC } Assert failed: Check response: { Status: 128 ErrorReason: "the following topics are not created: rt3.dc1--topic2, Marker# PQ95" ErrorCode: UNKNOWN_TOPIC } Assert failed: Check response: { Status: 128 ErrorReason: "topic \'Root/PQ\' describe error, Status# LookupError, Marker# PQ1" ErrorCode: ERROR } Assert failed: Check response: { Status: 128 ErrorReason: "multiple partition 2 in TopicRequest for topic \'rt3.dc1--topic2\'" ErrorCode: BAD_REQUEST } ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/client/server/ut/unittest >> TMessageBusServerPersQueueGetPartitionOffsetsMetaRequestTest::FailesOnNotATopic [GOOD] Test command err: Assert failed: Check response: { Status: 130 ErrorReason: "Timeout while waiting for response, may be just slow, Marker# PQ16" ErrorCode: ERROR } 2025-09-25T16:19:09.625780Z node 2 :PERSQUEUE DEBUG: pq_impl.cpp:2957: [PQ: 72057594037928037] Handle TEvInterconnect::TEvNodeInfo 2025-09-25T16:19:09.626764Z node 2 :PERSQUEUE DEBUG: pq_impl.cpp:2989: [PQ: 72057594037928037] Transactions request. From tx_00000000000000000000, To tx_18446744073709551615 2025-09-25T16:19:09.626851Z node 2 :PERSQUEUE DEBUG: pq_impl.cpp:784: [PQ: 72057594037928037] no config, start with empty partitions and default config 2025-09-25T16:19:09.626868Z node 2 :PERSQUEUE NOTICE: pq_impl.cpp:908: [PQ: 72057594037928037] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-09-25T16:19:09.626884Z node 2 :PERSQUEUE INFO: pq_impl.cpp:609: [PQ: 72057594037928037] doesn't have tx writes info 2025-09-25T16:19:09.627115Z node 2 :PERSQUEUE DEBUG: pq_impl.cpp:2718: [PQ: 72057594037928037] server connected, pipe [2:261:2256], now have 1 active actors on pipe 2025-09-25T16:19:09.627155Z node 2 :PERSQUEUE DEBUG: pq_impl.cpp:1269: [PQ: 72057594037928037] Handle TEvPersQueue::TEvUpdateConfig 2025-09-25T16:19:09.630738Z node 2 :PERSQUEUE DEBUG: pq_impl.cpp:395: [PQ: 72057594037928037] Apply new config CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 TopicName: "rt3.dc1--topic1" Version: 1 Partitions { PartitionId: 0 } AllPartitions { PartitionId: 0 } 2025-09-25T16:19:09.630791Z node 2 :PERSQUEUE NOTICE: pq_impl.cpp:908: [PQ: 72057594037928037] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-09-25T16:19:09.631070Z node 2 :PERSQUEUE INFO: pq_impl.cpp:1296: [PQ: 72057594037928037] Config applied version 1 actor [2:104:2138] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 TopicName: "rt3.dc1--topic1" Version: 1 Partitions { PartitionId: 0 } AllPartitions { PartitionId: 0 } 2025-09-25T16:19:09.631116Z node 2 :PERSQUEUE DEBUG: partition_init.cpp:80: [rt3.dc1--topic1:0:Initializer] Start initializing step TInitConfigStep 2025-09-25T16:19:09.631269Z node 2 :PERSQUEUE DEBUG: partition_init.cpp:80: [rt3.dc1--topic1:0:Initializer] Start initializing step TInitInternalFieldsStep 2025-09-25T16:19:09.631336Z node 2 :PERSQUEUE INFO: partition_init.cpp:1075: [72057594037928037][Partition][0][StateInit] bootstrapping 0 [2:269:2262] 2025-09-25T16:19:09.631965Z node 2 :PERSQUEUE DEBUG: partition_init.cpp:80: [rt3.dc1--topic1:0:Initializer] Start initializing step TInitFieldsStep 2025-09-25T16:19:09.631976Z node 2 :PERSQUEUE DEBUG: partition_init.cpp:60: [rt3.dc1--topic1:0:Initializer] Initializing completed. 2025-09-25T16:19:09.631996Z node 2 :PERSQUEUE INFO: partition.cpp:694: [72057594037928037][Partition][0][StateInit] init complete for topic 'rt3.dc1--topic1' partition 0 generation 2 [2:269:2262] 2025-09-25T16:19:09.632005Z node 2 :PERSQUEUE DEBUG: partition.cpp:708: [72057594037928037][Partition][0][StateInit] SYNC INIT topic rt3.dc1--topic1 partitition 0 so 0 endOffset 0 Head Offset 0 PartNo 0 PackedSize 0 count 0 nextOffset 0 batches 0 2025-09-25T16:19:09.632017Z node 2 :PERSQUEUE DEBUG: partition.cpp:4293: [72057594037928037][Partition][0][StateIdle] Process pending events. Count 0 2025-09-25T16:19:09.632023Z node 2 :PERSQUEUE DEBUG: partition.cpp:2261: [72057594037928037][Partition][0][StateIdle] Batching state before ContinueProcessTxsAndUserActs: 0 2025-09-25T16:19:09.632039Z node 2 :PERSQUEUE DEBUG: partition.cpp:2270: [72057594037928037][Partition][0][StateIdle] Batching state after ContinueProcessTxsAndUserActs: 1 2025-09-25T16:19:09.632044Z node 2 :PERSQUEUE DEBUG: partition.cpp:2293: [72057594037928037][Partition][0][StateIdle] Try persist 2025-09-25T16:19:09.632122Z node 2 :PERSQUEUE DEBUG: partition_compaction.cpp:162: [72057594037928037][Partition][0][StateIdle] no data for compaction 2025-09-25T16:19:09.632263Z node 2 :PERSQUEUE DEBUG: pq_impl.cpp:2718: [PQ: 72057594037928037] server connected, pipe [2:272:2264], now have 1 active actors on pipe 2025-09-25T16:19:09.658951Z node 2 :PERSQUEUE DEBUG: pq_impl.cpp:2957: [PQ: 72057594037928139] Handle TEvInterconnect::TEvNodeInfo 2025-09-25T16:19:09.660026Z node 2 :PERSQUEUE DEBUG: pq_impl.cpp:2989: [PQ: 72057594037928139] Transactions request. From tx_00000000000000000000, To tx_18446744073709551615 2025-09-25T16:19:09.660119Z node 2 :PERSQUEUE DEBUG: pq_impl.cpp:784: [PQ: 72057594037928139] no config, start with empty partitions and default config 2025-09-25T16:19:09.660134Z node 2 :PERSQUEUE NOTICE: pq_impl.cpp:908: [PQ: 72057594037928139] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-09-25T16:19:09.660145Z node 2 :PERSQUEUE INFO: pq_impl.cpp:609: [PQ: 72057594037928139] doesn't have tx writes info 2025-09-25T16:19:09.660333Z node 2 :PERSQUEUE DEBUG: pq_impl.cpp:2718: [PQ: 72057594037928139] server connected, pipe [2:399:2360], now have 1 active actors on pipe 2025-09-25T16:19:09.660351Z node 2 :PERSQUEUE DEBUG: pq_impl.cpp:1269: [PQ: 72057594037928139] Handle TEvPersQueue::TEvUpdateConfig 2025-09-25T16:19:09.661921Z node 2 :PERSQUEUE DEBUG: pq_impl.cpp:395: [PQ: 72057594037928139] Apply new config CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 2 TopicName: "rt3.dc1--topic2" Version: 2 Partitions { PartitionId: 2 } AllPartitions { PartitionId: 2 } 2025-09-25T16:19:09.661956Z node 2 :PERSQUEUE NOTICE: pq_impl.cpp:908: [PQ: 72057594037928139] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-09-25T16:19:09.662182Z node 2 :PERSQUEUE INFO: pq_impl.cpp:1296: [PQ: 72057594037928139] Config applied version 2 actor [2:104:2138] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 2 TopicName: "rt3.dc1--topic2" Version: 2 Partitions { PartitionId: 2 } AllPartitions { PartitionId: 2 } 2025-09-25T16:19:09.662214Z node 2 :PERSQUEUE DEBUG: partition_init.cpp:80: [rt3.dc1--topic2:2:Initializer] Start initializing step TInitConfigStep 2025-09-25T16:19:09.662305Z node 2 :PERSQUEUE DEBUG: partition_init.cpp:80: [rt3.dc1--topic2:2:Initializer] Start initializing step TInitInternalFieldsStep 2025-09-25T16:19:09.662348Z node 2 :PERSQUEUE INFO: partition_init.cpp:1075: [72057594037928139][Partition][2][StateInit] bootstrapping 2 [2:407:2366] 2025-09-25T16:19:09.662905Z node 2 :PERSQUEUE DEBUG: partition_init.cpp:80: [rt3.dc1--topic2:2:Initializer] Start initializing step TInitFieldsStep 2025-09-25T16:19:09.662919Z node 2 :PERSQUEUE DEBUG: partition_init.cpp:60: [rt3.dc1--topic2:2:Initializer] Initializing completed. 2025-09-25T16:19:09.662928Z node 2 :PERSQUEUE INFO: partition.cpp:694: [72057594037928139][Partition][2][StateInit] init complete for topic 'rt3.dc1--topic2' partition 2 generation 2 [2:407:2366] 2025-09-25T16:19:09.662937Z node 2 :PERSQUEUE DEBUG: partition.cpp:708: [72057594037928139][Partition][2][StateInit] SYNC INIT topic rt3.dc1--topic2 partitition 2 so 0 endOffset 0 Head Offset 0 PartNo 0 PackedSize 0 count 0 nextOffset 0 batches 0 2025-09-25T16:19:09.662949Z node 2 :PERSQUEUE DEBUG: partition.cpp:4293: [72057594037928139][Partition][2][StateIdle] Process pending events. Count 0 2025-09-25T16:19:09.662955Z node 2 :PERSQUEUE DEBUG: partition.cpp:2261: [72057594037928139][Partition][2][StateIdle] Batching state before ContinueProcessTxsAndUserActs: 0 2025-09-25T16:19:09.662960Z node 2 :PERSQUEUE DEBUG: partition.cpp:2270: [72057594037928139][Partition][2][StateIdle] Batching state after ContinueProcessTxsAndUserActs: 1 2025-09-25T16:19:09.662964Z node 2 :PERSQUEUE DEBUG: partition.cpp:2293: [72057594037928139][Partition][2][StateIdle] Try persist 2025-09-25T16:19:09.663068Z node 2 :PERSQUEUE DEBUG: partition_compaction.cpp:162: [72057594037928139][Partition][2][StateIdle] no data for compaction 2025-09-25T16:19:09.663203Z node 2 :PERSQUEUE DEBUG: pq_impl.cpp:2718: [PQ: 72057594037928139] server connected, pipe [2:410:2368], now have 1 active actors on pipe 2025-09-25T16:19:09.663485Z node 2 :PERSQUEUE DEBUG: pq_impl.cpp:2718: [PQ: 72057594037928037] server connected, pipe [2:416:2371], now have 1 active actors on pipe 2025-09-25T16:19:09.663564Z node 2 :PERSQUEUE DEBUG: pq_impl.cpp:2743: [PQ: 72057594037928037] server disconnected, pipe [2:416:2371] destroyed 2025-09-25T16:19:09.663572Z node 2 :PERSQUEUE DEBUG: pq_impl.cpp:2718: [PQ: 72057594037928139] server connected, pipe [2:418:2372], now have 1 active actors on pipe 2025-09-25T16:19:09.663664Z node 2 :PERSQUEUE DEBUG: pq_impl.cpp:2743: [PQ: 72057594037928139] server disconnected, pipe [2:418:2372] destroyed 2025-09-25T16:19:10.008265Z node 3 :PERSQUEUE DEBUG: pq_impl.cpp:2957: [PQ: 72057594037928037] Handle TEvInterconnect::TEvNodeInfo 2025-09-25T16:19:10.009273Z node 3 :PERSQUEUE DEBUG: pq_impl.cpp:2989: [PQ: 72057594037928037] Transactions request. From tx_00000000000000000000, To tx_18446744073709551615 2025-09-25T16:19:10.009358Z node 3 :PERSQUEUE DEBUG: pq_impl.cpp:784: [PQ: 72057594037928037] no config, start with empty partitions and default config 2025-09-25T16:19:10.009373Z node 3 :PERSQUEUE NOTICE: pq_impl.cpp:908: [PQ: 72057594037928037] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-09-25T16:19:10.009383Z node 3 :PERSQUEUE INFO: pq_impl.cpp:609: [PQ: 72057594037928037] doesn't have tx writes info 2025-09-25T16:19:10.009580Z node 3 :PERSQUEUE DEBUG: pq_impl.cpp:2718: [PQ: 72057594037928037] server connected, pipe [3:260:2255], now have 1 active actors on pipe 2025-09-25T16:19:10.009589Z node 3 :PERSQUEUE DEBUG: pq_impl.cpp:1269: [PQ: 72057594037928037] Handle TEvPersQueue::TEvUpdateConfig 2025-09-25T16:19:10.010091Z node 3 :PERSQUEUE DEBUG: pq_impl.cpp:395: [PQ: 72057594037928037] Apply new config CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 TopicName: "rt3.dc1--topic1" Version: 3 Partitions { PartitionId: 0 } AllPartitions { PartitionId: 0 } 2025-09-25T16:19:10.010120Z node 3 :PERSQUEUE NOTICE: pq_impl.cpp:908: [PQ: 72057594037928037] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-09-25T16:19:10.010248Z node 3 :PERSQUEUE INFO: pq_impl.cpp:1296: [PQ: 72057594037928037] Config applied version 3 actor [3:104:2138] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 TopicName: "rt3.dc1--topic1" Version: 3 Partitions { PartitionId: 0 } AllPartitions { PartitionId: 0 } 2025-09-25T16:19:10.010273Z node 3 :PERSQUEUE DEBUG: partition_init.cpp:80: [rt3.dc1--topic1:0:Initializer] Start initializing step TInitConfigStep 2025-09-25T16:19:10.010341Z node 3 :PERSQUEUE DEBUG: partition_init.cpp:80: [rt3.dc1--topic1:0:Initializer] Start initializing step TInitInternalFieldsStep 2025-09-25T16:19:10.010378Z node 3 :PERSQUEUE INFO: partition_init.cpp:1075: [72057594037928037][Partition][0][StateInit] bootstrapping 0 [3:268:2261] 2025-09-25T16:19:10.011081Z node 3 :PERSQUEUE DEBUG: partition_init.cpp:80: [rt3.dc1--topic1:0:Initializer] Start initializing ... ntInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 1 TopicName: "rt3.dc1--topic2" Version: 5 Partitions { PartitionId: 1 } AllPartitions { PartitionId: 1 } 2025-09-25T16:19:10.030967Z node 3 :PERSQUEUE DEBUG: partition_init.cpp:80: [rt3.dc1--topic2:1:Initializer] Start initializing step TInitConfigStep 2025-09-25T16:19:10.031055Z node 3 :PERSQUEUE DEBUG: partition_init.cpp:80: [rt3.dc1--topic2:1:Initializer] Start initializing step TInitInternalFieldsStep 2025-09-25T16:19:10.031101Z node 3 :PERSQUEUE INFO: partition_init.cpp:1075: [72057594037928138][Partition][1][StateInit] bootstrapping 1 [3:468:2412] 2025-09-25T16:19:10.031678Z node 3 :PERSQUEUE DEBUG: partition_init.cpp:80: [rt3.dc1--topic2:1:Initializer] Start initializing step TInitFieldsStep 2025-09-25T16:19:10.031686Z node 3 :PERSQUEUE DEBUG: partition_init.cpp:60: [rt3.dc1--topic2:1:Initializer] Initializing completed. 2025-09-25T16:19:10.031693Z node 3 :PERSQUEUE INFO: partition.cpp:694: [72057594037928138][Partition][1][StateInit] init complete for topic 'rt3.dc1--topic2' partition 1 generation 2 [3:468:2412] 2025-09-25T16:19:10.031702Z node 3 :PERSQUEUE DEBUG: partition.cpp:708: [72057594037928138][Partition][1][StateInit] SYNC INIT topic rt3.dc1--topic2 partitition 1 so 0 endOffset 0 Head Offset 0 PartNo 0 PackedSize 0 count 0 nextOffset 0 batches 0 2025-09-25T16:19:10.031711Z node 3 :PERSQUEUE DEBUG: partition.cpp:4293: [72057594037928138][Partition][1][StateIdle] Process pending events. Count 0 2025-09-25T16:19:10.031717Z node 3 :PERSQUEUE DEBUG: partition.cpp:2261: [72057594037928138][Partition][1][StateIdle] Batching state before ContinueProcessTxsAndUserActs: 0 2025-09-25T16:19:10.031723Z node 3 :PERSQUEUE DEBUG: partition.cpp:2270: [72057594037928138][Partition][1][StateIdle] Batching state after ContinueProcessTxsAndUserActs: 1 2025-09-25T16:19:10.031730Z node 3 :PERSQUEUE DEBUG: partition.cpp:2293: [72057594037928138][Partition][1][StateIdle] Try persist 2025-09-25T16:19:10.031782Z node 3 :PERSQUEUE DEBUG: partition_compaction.cpp:162: [72057594037928138][Partition][1][StateIdle] no data for compaction 2025-09-25T16:19:10.031905Z node 3 :PERSQUEUE DEBUG: pq_impl.cpp:2718: [PQ: 72057594037928138] server connected, pipe [3:471:2414], now have 1 active actors on pipe 2025-09-25T16:19:10.035880Z node 3 :PERSQUEUE DEBUG: pq_impl.cpp:2957: [PQ: 72057594037928139] Handle TEvInterconnect::TEvNodeInfo 2025-09-25T16:19:10.036638Z node 3 :PERSQUEUE DEBUG: pq_impl.cpp:2989: [PQ: 72057594037928139] Transactions request. From tx_00000000000000000000, To tx_18446744073709551615 2025-09-25T16:19:10.036710Z node 3 :PERSQUEUE DEBUG: pq_impl.cpp:784: [PQ: 72057594037928139] no config, start with empty partitions and default config 2025-09-25T16:19:10.036722Z node 3 :PERSQUEUE NOTICE: pq_impl.cpp:908: [PQ: 72057594037928139] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-09-25T16:19:10.036734Z node 3 :PERSQUEUE INFO: pq_impl.cpp:609: [PQ: 72057594037928139] doesn't have tx writes info 2025-09-25T16:19:10.036925Z node 3 :PERSQUEUE DEBUG: pq_impl.cpp:2718: [PQ: 72057594037928139] server connected, pipe [3:520:2451], now have 1 active actors on pipe 2025-09-25T16:19:10.036951Z node 3 :PERSQUEUE DEBUG: pq_impl.cpp:1269: [PQ: 72057594037928139] Handle TEvPersQueue::TEvUpdateConfig 2025-09-25T16:19:10.037564Z node 3 :PERSQUEUE DEBUG: pq_impl.cpp:395: [PQ: 72057594037928139] Apply new config CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 2 TopicName: "rt3.dc1--topic2" Version: 6 Partitions { PartitionId: 2 } AllPartitions { PartitionId: 2 } 2025-09-25T16:19:10.037594Z node 3 :PERSQUEUE NOTICE: pq_impl.cpp:908: [PQ: 72057594037928139] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-09-25T16:19:10.037777Z node 3 :PERSQUEUE INFO: pq_impl.cpp:1296: [PQ: 72057594037928139] Config applied version 6 actor [3:104:2138] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 2 TopicName: "rt3.dc1--topic2" Version: 6 Partitions { PartitionId: 2 } AllPartitions { PartitionId: 2 } 2025-09-25T16:19:10.037805Z node 3 :PERSQUEUE DEBUG: partition_init.cpp:80: [rt3.dc1--topic2:2:Initializer] Start initializing step TInitConfigStep 2025-09-25T16:19:10.037875Z node 3 :PERSQUEUE DEBUG: partition_init.cpp:80: [rt3.dc1--topic2:2:Initializer] Start initializing step TInitInternalFieldsStep 2025-09-25T16:19:10.037915Z node 3 :PERSQUEUE INFO: partition_init.cpp:1075: [72057594037928139][Partition][2][StateInit] bootstrapping 2 [3:528:2457] 2025-09-25T16:19:10.038521Z node 3 :PERSQUEUE DEBUG: partition_init.cpp:80: [rt3.dc1--topic2:2:Initializer] Start initializing step TInitFieldsStep 2025-09-25T16:19:10.038530Z node 3 :PERSQUEUE DEBUG: partition_init.cpp:60: [rt3.dc1--topic2:2:Initializer] Initializing completed. 2025-09-25T16:19:10.038537Z node 3 :PERSQUEUE INFO: partition.cpp:694: [72057594037928139][Partition][2][StateInit] init complete for topic 'rt3.dc1--topic2' partition 2 generation 2 [3:528:2457] 2025-09-25T16:19:10.038545Z node 3 :PERSQUEUE DEBUG: partition.cpp:708: [72057594037928139][Partition][2][StateInit] SYNC INIT topic rt3.dc1--topic2 partitition 2 so 0 endOffset 0 Head Offset 0 PartNo 0 PackedSize 0 count 0 nextOffset 0 batches 0 2025-09-25T16:19:10.038557Z node 3 :PERSQUEUE DEBUG: partition.cpp:4293: [72057594037928139][Partition][2][StateIdle] Process pending events. Count 0 2025-09-25T16:19:10.038562Z node 3 :PERSQUEUE DEBUG: partition.cpp:2261: [72057594037928139][Partition][2][StateIdle] Batching state before ContinueProcessTxsAndUserActs: 0 2025-09-25T16:19:10.038569Z node 3 :PERSQUEUE DEBUG: partition.cpp:2270: [72057594037928139][Partition][2][StateIdle] Batching state after ContinueProcessTxsAndUserActs: 1 2025-09-25T16:19:10.038574Z node 3 :PERSQUEUE DEBUG: partition.cpp:2293: [72057594037928139][Partition][2][StateIdle] Try persist 2025-09-25T16:19:10.038638Z node 3 :PERSQUEUE DEBUG: partition_compaction.cpp:162: [72057594037928139][Partition][2][StateIdle] no data for compaction 2025-09-25T16:19:10.038764Z node 3 :PERSQUEUE DEBUG: pq_impl.cpp:2718: [PQ: 72057594037928139] server connected, pipe [3:531:2459], now have 1 active actors on pipe 2025-09-25T16:19:10.039021Z node 3 :PERSQUEUE DEBUG: pq_impl.cpp:2718: [PQ: 72057594037928037] server connected, pipe [3:537:2462], now have 1 active actors on pipe 2025-09-25T16:19:10.039056Z node 3 :PERSQUEUE DEBUG: pq_impl.cpp:2718: [PQ: 72057594037928138] server connected, pipe [3:538:2463], now have 1 active actors on pipe 2025-09-25T16:19:10.039075Z node 3 :PERSQUEUE DEBUG: pq_impl.cpp:2718: [PQ: 72057594037928139] server connected, pipe [3:539:2463], now have 1 active actors on pipe 2025-09-25T16:19:10.049475Z node 3 :PERSQUEUE DEBUG: pq_impl.cpp:2718: [PQ: 72057594037928139] server connected, pipe [3:544:2467], now have 1 active actors on pipe 2025-09-25T16:19:10.062757Z node 3 :PERSQUEUE DEBUG: pq_impl.cpp:2957: [PQ: 72057594037928139] Handle TEvInterconnect::TEvNodeInfo 2025-09-25T16:19:10.063565Z node 3 :PERSQUEUE DEBUG: pq_impl.cpp:2989: [PQ: 72057594037928139] Transactions request. From tx_00000000000000000000, To tx_18446744073709551615 2025-09-25T16:19:10.063759Z node 3 :PERSQUEUE NOTICE: pq_impl.cpp:908: [PQ: 72057594037928139] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-09-25T16:19:10.063768Z node 3 :PERSQUEUE INFO: pq_impl.cpp:609: [PQ: 72057594037928139] doesn't have tx writes info 2025-09-25T16:19:10.063789Z node 3 :PERSQUEUE DEBUG: partition_init.cpp:80: [rt3.dc1--topic2:2:Initializer] Start initializing step TInitConfigStep 2025-09-25T16:19:10.063844Z node 3 :PERSQUEUE DEBUG: partition_init.cpp:80: [rt3.dc1--topic2:2:Initializer] Start initializing step TInitInternalFieldsStep 2025-09-25T16:19:10.063876Z node 3 :PERSQUEUE INFO: partition_init.cpp:1075: [72057594037928139][Partition][2][StateInit] bootstrapping 2 [3:600:2512] 2025-09-25T16:19:10.064425Z node 3 :PERSQUEUE DEBUG: partition_init.cpp:80: [rt3.dc1--topic2:2:Initializer] Start initializing step TInitDiskStatusStep 2025-09-25T16:19:10.064663Z node 3 :PERSQUEUE DEBUG: partition_init.cpp:80: [rt3.dc1--topic2:2:Initializer] Start initializing step TInitMetaStep 2025-09-25T16:19:10.064702Z node 3 :PERSQUEUE DEBUG: partition_init.cpp:80: [rt3.dc1--topic2:2:Initializer] Start initializing step TInitInfoRangeStep 2025-09-25T16:19:10.064745Z node 3 :PERSQUEUE DEBUG: partition_init.cpp:80: [rt3.dc1--topic2:2:Initializer] Start initializing step TInitDataRangeStep 2025-09-25T16:19:10.064774Z node 3 :PERSQUEUE DEBUG: partition_init.cpp:80: [rt3.dc1--topic2:2:Initializer] Start initializing step TInitDataStep 2025-09-25T16:19:10.064779Z node 3 :PERSQUEUE DEBUG: partition_init.cpp:80: [rt3.dc1--topic2:2:Initializer] Start initializing step TInitEndWriteTimestampStep 2025-09-25T16:19:10.064784Z node 3 :PERSQUEUE INFO: partition_init.cpp:948: [rt3.dc1--topic2:2:TInitEndWriteTimestampStep] Initializing EndWriteTimestamp skipped because already initialized. 2025-09-25T16:19:10.064787Z node 3 :PERSQUEUE DEBUG: partition_init.cpp:80: [rt3.dc1--topic2:2:Initializer] Start initializing step TInitFieldsStep 2025-09-25T16:19:10.064793Z node 3 :PERSQUEUE DEBUG: partition_init.cpp:60: [rt3.dc1--topic2:2:Initializer] Initializing completed. 2025-09-25T16:19:10.064800Z node 3 :PERSQUEUE INFO: partition.cpp:694: [72057594037928139][Partition][2][StateInit] init complete for topic 'rt3.dc1--topic2' partition 2 generation 3 [3:600:2512] 2025-09-25T16:19:10.064808Z node 3 :PERSQUEUE DEBUG: partition.cpp:708: [72057594037928139][Partition][2][StateInit] SYNC INIT topic rt3.dc1--topic2 partitition 2 so 0 endOffset 0 Head Offset 0 PartNo 0 PackedSize 0 count 0 nextOffset 0 batches 0 2025-09-25T16:19:10.064819Z node 3 :PERSQUEUE DEBUG: partition.cpp:4293: [72057594037928139][Partition][2][StateIdle] Process pending events. Count 0 2025-09-25T16:19:10.064839Z node 3 :PERSQUEUE DEBUG: partition.cpp:2261: [72057594037928139][Partition][2][StateIdle] Batching state before ContinueProcessTxsAndUserActs: 0 2025-09-25T16:19:10.064844Z node 3 :PERSQUEUE DEBUG: partition.cpp:2270: [72057594037928139][Partition][2][StateIdle] Batching state after ContinueProcessTxsAndUserActs: 1 2025-09-25T16:19:10.064849Z node 3 :PERSQUEUE DEBUG: partition.cpp:2293: [72057594037928139][Partition][2][StateIdle] Try persist 2025-09-25T16:19:10.064900Z node 3 :PERSQUEUE DEBUG: partition_compaction.cpp:162: [72057594037928139][Partition][2][StateIdle] no data for compaction 2025-09-25T16:19:10.065077Z node 3 :PERSQUEUE DEBUG: pq_impl.cpp:2743: [PQ: 72057594037928138] server disconnected, pipe [3:538:2463] destroyed 2025-09-25T16:19:10.065126Z node 3 :PERSQUEUE DEBUG: pq_impl.cpp:2743: [PQ: 72057594037928037] server disconnected, pipe [3:537:2462] destroyed RESPONSE Status: 1 ErrorCode: OK MetaResponse { CmdGetPartitionLocationsResult { TopicResult { Topic: "rt3.dc1--topic1" PartitionLocation { Partition: 0 Host: "::1" HostId: 3 ErrorCode: OK } ErrorCode: OK } TopicResult { Topic: "rt3.dc1--topic2" PartitionLocation { Partition: 1 Host: "::1" HostId: 3 ErrorCode: OK } PartitionLocation { Partition: 2 Host: "::1" HostId: 3 ErrorCode: OK } ErrorCode: OK } } } Assert failed: Check response: { Status: 128 ErrorReason: "the following topics are not created: rt3.dc1--topic2, Marker# PQ95" ErrorCode: UNKNOWN_TOPIC } >> TMessageBusServerPersQueueGetReadSessionsInfoMetaRequestTest::HandlesPipeDisconnection_AnswerDoesNotArrive [GOOD] >> KqpScheme::CreateExternalTable [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/client/server/ut/unittest >> TMessageBusServerPersQueueGetPartitionStatusMetaRequestTest::FailsOnDuplicatedPartition [GOOD] >> TestKinesisHttpProxy::BadRequestUnknownMethod Test command err: Assert failed: Check response: { Status: 128 ErrorReason: "topic \'Root/PQ\' describe error, Status# LookupError, Marker# PQ1" ErrorCode: ERROR } Assert failed: Check response: { Status: 128 ErrorReason: "TopicRequest must have Topic field." ErrorCode: BAD_REQUEST } Assert failed: Check response: { Status: 128 ErrorReason: "multiple TopicRequest for topic \'rt3.dc1--topic1\'" ErrorCode: BAD_REQUEST } Assert failed: Check response: { Status: 128 ErrorReason: "multiple partition 2 in TopicRequest for topic \'rt3.dc1--topic2\'" ErrorCode: BAD_REQUEST } >> KqpScheme::AlterDatabaseChangeOwner+EnableAlterDatabase [GOOD] >> TMessageBusServerPersQueueGetPartitionStatusMetaRequestTest::FailsOnFailedGetAllTopicsRequest >> TMessageBusServerPersQueueGetPartitionStatusMetaRequestTest::HandlesTimeout >> TMessageBusServerPersQueueGetPartitionOffsetsMetaRequestTest::HandlesTimeout [GOOD] >> TMessageBusServerPersQueueGetPartitionLocationsMetaRequestTest::FailsOnNotOkStatusInGetNodeRequest >> TestYmqHttpProxy::TestUntagQueue [GOOD] >> TMessageBusServerPersQueueGetTopicMetadataMetaRequestTest::FailsOnBalancerDescribeResultFailureWhenTopicsAreGivenExplicitly [GOOD] >> TMessageBusServerPersQueueGetReadSessionsInfoMetaRequestTest::FailsOnNotOkStatusInGetNodeRequest >> TMessageBusServerPersQueueGetPartitionOffsetsMetaRequestTest::SuccessfullyPassesResponsesFromTablets >> KqpScheme::CreateExternalTableCheckPrimaryKey >> KqpScheme::AlterDatabaseChangeOwner-EnableAlterDatabase >> TMessageBusServerPersQueueGetTopicMetadataMetaRequestTest::FailsOnEmptyTopicName >> TMessageBusServerPersQueueGetPartitionStatusMetaRequestTest::HandlesTimeout [GOOD] >> TMessageBusServerPersQueueGetPartitionStatusMetaRequestTest::SuccessfullyPassesResponsesFromTablets >> TMessageBusServerPersQueueGetPartitionOffsetsMetaRequestTest::FailsOnZeroBalancerTabletIdInGetNodeRequest >> KqpScheme::ChangefeedSchemaChanges-UseQueryService [GOOD] >> KqpScheme::ChangefeedTopicPartitions >> THiveTest::TestLockTabletExecutionGoodUnlock [GOOD] >> THiveTest::TestLockTabletExecutionLocalGone ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/client/server/ut/unittest >> TMessageBusServerPersQueueGetPartitionLocationsMetaRequestTest::FailsOnDuplicatedTopicName [GOOD] Test command err: Assert failed: Check response: { Status: 128 ErrorReason: "no path \'/Root/PQ/\', Marker# PQ17" ErrorCode: UNKNOWN_TOPIC } Assert failed: Check response: { Status: 128 ErrorReason: "topic \'rt3.dc1--topic1\' has no balancer, Marker# PQ193" ErrorCode: UNKNOWN_TOPIC } Assert failed: Check response: { Status: 128 ErrorReason: "TopicRequest must have Topic field." ErrorCode: BAD_REQUEST } Assert failed: Check response: { Status: 128 ErrorReason: "multiple TopicRequest for topic \'rt3.dc1--topic1\'" ErrorCode: BAD_REQUEST } |81.8%| [TA] $(B)/ydb/core/tx/schemeshard/ut_backup/test-results/unittest/{meta.json ... results_accumulator.log} ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/scheme/unittest >> KqpScheme::DropAsyncReplicationCascade [GOOD] Test command err: Trying to start YDB, gRPC: 64874, MsgBus: 6812 test_client.cpp: SetPath # /home/runner/.ya/build/build_root/endf/001d0d/r3tmp/tmp3uQ7ow/pdisk_1.dat 2025-09-25T16:18:55.278488Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-09-25T16:18:55.278514Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-09-25T16:18:55.279570Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-09-25T16:18:55.289426Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-09-25T16:18:55.289469Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-09-25T16:18:55.292495Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded 2025-09-25T16:18:55.296793Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1229: Notification cookie mismatch for subscription [1:7554062077820312882:2081] 1758817135208801 != 1758817135208804 TServer::EnableGrpc on GrpcPort 64874, node 1 2025-09-25T16:18:55.313623Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-09-25T16:18:55.313637Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-09-25T16:18:55.313639Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-09-25T16:18:55.313679Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:6812 TClient is connected to server localhost:6812 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-09-25T16:18:55.440312Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-09-25T16:18:55.443619Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-09-25T16:18:55.449729Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-09-25T16:18:55.450864Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) waiting... 2025-09-25T16:18:55.489990Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) waiting... 2025-09-25T16:18:55.533322Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) waiting... 2025-09-25T16:18:55.565869Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) waiting... 2025-09-25T16:18:55.818733Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7554062077820314525:2391], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:55.818784Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:55.818962Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7554062077820314535:2392], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:55.818978Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:55.894376Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:18:55.906303Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:18:55.920065Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:18:55.936749Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:18:55.948712Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:18:55.964771Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:18:55.975845Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:18:55.988190Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:18:56.018011Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7554062082115282693:2474], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:56.018031Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:56.018137Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7554062082115282699:2478], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:56.018148Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7554062082115282698:2477], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:56.018153Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:56.019050Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.c ... e 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:19:07.403029Z node 7 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-09-25T16:19:07.605389Z node 7 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7554062127491262871:2391], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:19:07.605415Z node 7 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:19:07.605631Z node 7 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7554062127491262880:2392], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:19:07.605640Z node 7 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:19:07.621492Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:19:07.639150Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:19:07.650816Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:19:07.667892Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:19:07.682443Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:19:07.698987Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:19:07.715823Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:19:07.729934Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:19:07.754272Z node 7 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7554062127491263741:2474], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:19:07.754305Z node 7 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:19:07.754395Z node 7 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7554062127491263746:2477], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:19:07.754401Z node 7 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7554062127491263747:2478], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:19:07.754419Z node 7 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:19:07.755439Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-09-25T16:19:07.760391Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 281474976710670, at schemeshard: 72057594046644480 2025-09-25T16:19:07.760508Z node 7 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [7:7554062127491263750:2479], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2025-09-25T16:19:07.841880Z node 7 :TX_PROXY ERROR: schemereq.cpp:590: Actor# [7:7554062127491263802:3553] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 17], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-09-25T16:19:08.153811Z node 7 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-09-25T16:19:08.383628Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:19:08.421376Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateReplication, opId: 281474976710674:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_replication.cpp:487) 2025-09-25T16:19:08.509620Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterPersQueueGroup, opId: 281474976710677:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_pq.cpp:313) 2025-09-25T16:19:08.537511Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710678:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:19:08.589041Z node 7 :PQ_READ_PROXY ERROR: grpc_pq_schema.cpp:148: new Describe topic request 2025-09-25T16:19:09.503200Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpDropReplicationCascade, opId: 281474976710679:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_drop_replication.cpp:359) 2025-09-25T16:19:09.513542Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpDropPersQueueGroup, opId: 281474976710680:2, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_drop_pq.cpp:422) 2025-09-25T16:19:09.518161Z node 7 :HIVE WARN: hive_impl.cpp:516: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 7, TabletId: 72075186224037929 not found 2025-09-25T16:19:09.518172Z node 7 :HIVE WARN: hive_impl.cpp:516: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 7, TabletId: 72075186224037930 not found 2025-09-25T16:19:09.519936Z node 7 :HIVE WARN: tx__delete_tablet.cpp:91: HIVE#72057594037968897 THive::TTxDeleteTablet tablet (72057594046644480,42) wasn't found 2025-09-25T16:19:09.519950Z node 7 :HIVE WARN: tx__delete_tablet.cpp:91: HIVE#72057594037968897 THive::TTxDeleteTablet tablet (72057594046644480,43) wasn't found 2025-09-25T16:19:09.548373Z node 7 :HIVE WARN: hive_impl.cpp:516: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 7, TabletId: 72075186224037928 not found 2025-09-25T16:19:09.548387Z node 7 :HIVE WARN: hive_impl.cpp:516: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 7, TabletId: 72075186224037931 not found |81.8%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/tx/schemeshard/ut_incremental_restore/ydb-core-tx-schemeshard-ut_incremental_restore |81.8%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/kqp/ut/rbo/ydb-core-kqp-ut-rbo |81.8%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/kqp/proxy_service/ut/ydb-core-kqp-proxy_service-ut >> TMessageBusServerPersQueueGetPartitionOffsetsMetaRequestTest::SuccessfullyPassesResponsesFromTablets [GOOD] >> TMessageBusServerPersQueueGetPartitionLocationsMetaRequestTest::FailsOnNotOkStatusInGetNodeRequest [GOOD] >> TestYmqHttpProxy::BillingRecordsForJsonApi [GOOD] >> TMessageBusServerPersQueueGetPartitionStatusMetaRequestTest::FailsOnFailedGetAllTopicsRequest [GOOD] >> TMessageBusServerPersQueueGetPartitionStatusMetaRequestTest::FailsOnNotOkStatusInGetNodeRequest >> TMessageBusServerPersQueueGetTopicMetadataMetaRequestTest::FailsOnEmptyTopicName [GOOD] >> TMessageBusServerPersQueueGetPartitionOffsetsMetaRequestTest::FailsOnZeroBalancerTabletIdInGetNodeRequest [GOOD] >> TestYmqHttpProxy::TestTagQueueMultipleQueriesInflight >> TMessageBusServerPersQueueGetPartitionStatusMetaRequestTest::SuccessfullyPassesResponsesFromTablets [GOOD] >> TMessageBusServerPersQueueGetReadSessionsInfoMetaRequestTest::FailsOnNotOkStatusInGetNodeRequest [GOOD] >> TestYmqHttpProxy::TestPurgeQueue [GOOD] >> KqpScheme::CreateTableWithFamiliesRegular+UseQueryService [GOOD] >> TMessageBusServerPersQueueGetReadSessionsInfoMetaRequestTest::FailsOnBadRootStatusInGetNodeRequest >> KqpOlapScheme::AddColumnFamilyWithNotSupportedCodec [GOOD] >> KqpScheme::CreateAsyncReplicationWithCaCert+UseQueryService [GOOD] >> KqpScheme::AlterTableAddExplicitSyncVectorKMeansTreeIndex [GOOD] >> KqpScheme::AlterTableAlterIndex+UseQueryService ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/scheme/unittest >> KqpAcl::AclTemporaryInterruptInheritance-IsOlap [GOOD] Test command err: Trying to start YDB, gRPC: 17777, MsgBus: 22177 test_client.cpp: SetPath # /home/runner/.ya/build/build_root/endf/001cb4/r3tmp/tmpUzGJ9f/pdisk_1.dat 2025-09-25T16:18:57.692097Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-09-25T16:18:57.692125Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-09-25T16:18:57.692563Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-09-25T16:18:57.692870Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-09-25T16:18:57.692943Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; TServer::EnableGrpc on GrpcPort 17777, node 1 2025-09-25T16:18:57.737313Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded 2025-09-25T16:18:57.740928Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1229: Notification cookie mismatch for subscription [1:7554062085222491595:2081] 1758817137604325 != 1758817137604328 2025-09-25T16:18:57.757058Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-09-25T16:18:57.757071Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-09-25T16:18:57.757074Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-09-25T16:18:57.757118Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:22177 TClient is connected to server localhost:22177 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-09-25T16:18:57.835814Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-09-25T16:18:57.838869Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-09-25T16:18:57.845803Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) waiting... 2025-09-25T16:18:57.870789Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) waiting... 2025-09-25T16:18:57.913110Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) waiting... 2025-09-25T16:18:57.932922Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) waiting... 2025-09-25T16:18:57.969426Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-09-25T16:18:58.133378Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7554062089517460537:2391], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:58.133416Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:58.133505Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7554062089517460547:2392], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:58.133510Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:58.228618Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:18:58.238842Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:18:58.251427Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:18:58.262994Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:18:58.277734Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:18:58.291338Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:18:58.311920Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:18:58.325859Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:18:58.349020Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7554062089517461409:2474], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:58.349056Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:58.349333Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7554062089517461414:2477], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:58.349347Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7554062089517461415:2478], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:58.349409Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:58.350188Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operatio ... fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-09-25T16:19:08.555515Z node 9 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037937;self_id=[9:7554062131238371425:2367];tablet_id=72075186224037937;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-09-25T16:19:08.555538Z node 9 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037937;self_id=[9:7554062131238371425:2367];tablet_id=72075186224037937;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-09-25T16:19:08.555565Z node 9 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037937;self_id=[9:7554062131238371425:2367];tablet_id=72075186224037937;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanDeprecatedSnapshot; 2025-09-25T16:19:08.555591Z node 9 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037937;self_id=[9:7554062131238371425:2367];tablet_id=72075186224037937;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV0ChunksMeta; 2025-09-25T16:19:08.555614Z node 9 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037937;self_id=[9:7554062131238371425:2367];tablet_id=72075186224037937;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CopyBlobIdsToV2; 2025-09-25T16:19:08.555638Z node 9 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037937;self_id=[9:7554062131238371425:2367];tablet_id=72075186224037937;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreAppearanceSnapshot; 2025-09-25T16:19:08.560256Z node 9 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037917;self_id=[9:7554062131238371415:2357];tablet_id=72075186224037917;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV0ChunksMeta; 2025-09-25T16:19:08.560298Z node 9 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037917;self_id=[9:7554062131238371415:2357];tablet_id=72075186224037917;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CopyBlobIdsToV2; Trying to start YDB, gRPC: 19113, MsgBus: 20422 test_client.cpp: SetPath # /home/runner/.ya/build/build_root/endf/001cb4/r3tmp/tmpKIYhVH/pdisk_1.dat 2025-09-25T16:19:09.575417Z node 10 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-09-25T16:19:09.592957Z node 10 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-09-25T16:19:09.608910Z node 10 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1229: Notification cookie mismatch for subscription [10:7554062136044248640:2081] 1758817149554175 != 1758817149554178 2025-09-25T16:19:09.609141Z node 10 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 19113, node 10 2025-09-25T16:19:09.625066Z node 10 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-09-25T16:19:09.625083Z node 10 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-09-25T16:19:09.625085Z node 10 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-09-25T16:19:09.625144Z node 10 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:20422 2025-09-25T16:19:09.672686Z node 10 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-09-25T16:19:09.672725Z node 10 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-09-25T16:19:09.677244Z node 10 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-09-25T16:19:09.729445Z node 10 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions TClient is connected to server localhost:20422 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-09-25T16:19:09.769437Z node 10 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-09-25T16:19:09.774359Z node 10 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-09-25T16:19:10.485431Z node 10 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [10:7554062140339216630:2329], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:19:10.485475Z node 10 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:19:10.485670Z node 10 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [10:7554062140339216642:2332], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:19:10.485680Z node 10 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [10:7554062140339216643:2333], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:19:10.485757Z node 10 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:19:10.486738Z node 10 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-09-25T16:19:10.490407Z node 10 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [10:7554062140339216646:2334], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2025-09-25T16:19:10.564930Z node 10 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-09-25T16:19:10.580278Z node 10 :TX_PROXY ERROR: schemereq.cpp:590: Actor# [10:7554062140339216697:2339] txid# 281474976715659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-09-25T16:19:10.599176Z node 10 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:1, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:19:10.633398Z node 10 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_modify_acl.cpp:101) 2025-09-25T16:19:10.643014Z node 10 :TX_PROXY ERROR: describe.cpp:393: Access denied for root@builtin with access DescribeSchema to path Root/.tmp/sessions 2025-09-25T16:19:10.649375Z node 10 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_modify_acl.cpp:101) 2025-09-25T16:19:10.674162Z node 10 :TX_PROXY ERROR: describe.cpp:393: Access denied for root@builtin with access DescribeSchema to path Root/.tmp/sessions/ODQzMzg5NzUtODVhMTUzNDEtYmNkZGFjYjMtNzg3ZjdjYjQ= 2025-09-25T16:19:10.701571Z node 10 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpRmDir, opId: 281474976715666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_rmdir.cpp:66) 2025-09-25T16:19:10.706940Z node 10 :HIVE WARN: hive_impl.cpp:516: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 10, TabletId: 72075186224037888 not found 2025-09-25T16:19:10.707495Z node 10 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpRmDir, opId: 281474976715667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_rmdir.cpp:66) |81.8%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_incremental_restore/ydb-core-tx-schemeshard-ut_incremental_restore >> TestKinesisHttpProxy::TestCounters [GOOD] >> KqpScheme::AlterDatabaseChangeOwner-EnableAlterDatabase [GOOD] >> TestKinesisHttpProxy::ErroneousRequestGetRecords [GOOD] >> TMessageBusServerPersQueueGetTopicMetadataMetaRequestTest::FailsOnFailedGetAllTopicsRequest >> TestYmqHttpProxy::TestDeleteMessageBatch [GOOD] >> KqpScheme::CreateExternalTableCheckPrimaryKey [GOOD] >> KqpScheme::CreateExternalTableValidation >> KqpScheme::DisableResourcePoolClassifiersOnServerless [GOOD] >> THiveTest::TestLockTabletExecutionLocalGone [GOOD] >> KqpScheme::AlterTransfer [GOOD] >> KqpScheme::ChangefeedTopicPartitions [GOOD] >> YdbOlapStore::LogWithUnionAllAscending [GOOD] >> TestKinesisHttpProxy::TestWrongStream2 [GOOD] >> TestKinesisHttpProxy::ListShardsTimestamp [GOOD] >> test.py::test[aggregate-group_by_rollup_column_ref_same_names--Results] [GOOD] >> KqpScheme::AddChangefeed [GOOD] >> TestKinesisHttpProxy::BadRequestUnknownMethod [GOOD] >> TestYmqHttpProxy::TestDeleteQueue [GOOD] >> KqpScheme::CreateDropTableMultipleTime [GOOD] >> TMessageBusServerPersQueueGetPartitionLocationsMetaRequestTest::FailsOnZeroBalancerTabletIdInGetNodeRequest >> TMessageBusServerPersQueueGetPartitionOffsetsMetaRequestTest::HandlesPipeDisconnection_DisconnectionComesFirst >> TestYmqHttpProxy::TestTagQueueMultipleQueriesInflight [GOOD] >> TMessageBusServerPersQueueGetPartitionStatusMetaRequestTest::FailsOnNotOkStatusInGetNodeRequest [GOOD] >> KqpScheme::CreateAsyncReplicationWithCaCert-UseQueryService >> TestYmqHttpProxy::TestSendMessageBatch >> TestKinesisHttpProxy::GoodRequestCreateStream >> TMessageBusServerPersQueueGetPartitionStatusMetaRequestTest::FailsOnZeroBalancerTabletIdInGetNodeRequest >> KqpScheme::AlterColumnTableTtl >> KqpScheme::AlterTableAlterIndex+UseQueryService [GOOD] >> TMessageBusServerPersQueueGetTopicMetadataMetaRequestTest::FailsOnFailedGetAllTopicsRequest [GOOD] >> KqpScheme::AlterTableAlterIndex-UseQueryService >> TMessageBusServerPersQueueGetTopicMetadataMetaRequestTest::FailsOnNotOkStatusInGetNodeRequest >> KqpScheme::CreateExternalTableValidation [GOOD] >> KqpScheme::DisableMetadataObjectsOnServerless >> THiveTest::TestLocalRegistrationInSharedHive >> KqpScheme::CreateTableWithFamiliesRegular-UseQueryService >> TMessageBusServerPersQueueGetTopicMetadataMetaRequestTest::HandlesTimeout >> TMessageBusServerPersQueueGetPartitionOffsetsMetaRequestTest::HandlesPipeDisconnection_DisconnectionComesFirst [GOOD] >> TMessageBusServerPersQueueGetPartitionStatusMetaRequestTest::FailsOnBadRootStatusInGetNodeRequest >> KqpScheme::AlterTransfer_QueryService >> TMessageBusServerPersQueueGetPartitionLocationsMetaRequestTest::FailsOnZeroBalancerTabletIdInGetNodeRequest [GOOD] >> TestYmqHttpProxy::TestChangeMessageVisibility >> KqpScheme::ChangefeedTopicAutoPartitioning >> TMessageBusServerPersQueueGetPartitionStatusMetaRequestTest::FailsOnNoBalancerInGetNodeRequest >> TMessageBusServerPersQueueGetPartitionStatusMetaRequestTest::HandlesPipeDisconnection_DisconnectionComesSecond >> TMessageBusServerPersQueueGetReadSessionsInfoMetaRequestTest::FailsOnZeroBalancerTabletIdInGetNodeRequest >> TMessageBusServerPersQueueGetReadSessionsInfoMetaRequestTest::FailsOnBadRootStatusInGetNodeRequest [GOOD] >> KqpOlapScheme::AddColumnFamilyWithCacheModeError >> KqpScheme::CreateAsyncReplicationWithCaCert-UseQueryService [GOOD] >> TestYmqHttpProxy::TestSendMessageBatch [GOOD] >> TestKinesisHttpProxy::TestEmptyHttpBody >> KqpScheme::AlterColumnTableTtl [GOOD] >> TMessageBusServerPersQueueGetTopicMetadataMetaRequestTest::FailsOnNotOkStatusInGetNodeRequest [GOOD] >> KqpScheme::DisableMetadataObjectsOnServerless [GOOD] >> TestKinesisHttpProxy::GoodRequestCreateStream [GOOD] >> THiveTest::TestLocalRegistrationInSharedHive [GOOD] >> TMessageBusServerPersQueueGetPartitionOffsetsMetaRequestTest::HandlesPipeDisconnection_DisconnectionComesSecond >> TMessageBusServerPersQueueGetPartitionStatusMetaRequestTest::FailsOnNoBalancerInGetNodeRequest [GOOD] >> TMessageBusServerPersQueueGetReadSessionsInfoMetaRequestTest::FailsOnZeroBalancerTabletIdInGetNodeRequest [GOOD] >> KqpScheme::ChangefeedTopicAutoPartitioning [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/scheme/unittest >> KqpScheme::AlterNonExistingResourcePoolClassifier [GOOD] Test command err: Trying to start YDB, gRPC: 24873, MsgBus: 14013 2025-09-25T16:18:54.524130Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7554062070726412445:2071];send_to=[0:7307199536658146131:7762515]; 2025-09-25T16:18:54.524151Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/endf/001d13/r3tmp/tmp7aq71f/pdisk_1.dat 2025-09-25T16:18:54.576349Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-09-25T16:18:54.589085Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 24873, node 1 2025-09-25T16:18:54.617042Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-09-25T16:18:54.617055Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-09-25T16:18:54.617057Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-09-25T16:18:54.617101Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-09-25T16:18:54.633618Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-09-25T16:18:54.633647Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-09-25T16:18:54.634696Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:14013 TClient is connected to server localhost:14013 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-09-25T16:18:54.692211Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-09-25T16:18:54.694859Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-09-25T16:18:54.700665Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) waiting... 2025-09-25T16:18:54.733716Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) waiting... 2025-09-25T16:18:54.780542Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) waiting... 2025-09-25T16:18:54.782740Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-09-25T16:18:54.812361Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) waiting... 2025-09-25T16:18:55.024179Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7554062075021381352:2391], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:55.024207Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:55.024669Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7554062075021381362:2392], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:55.024683Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:55.094001Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:18:55.107012Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:18:55.125406Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:18:55.135191Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:18:55.147164Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:18:55.162964Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:18:55.181247Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:18:55.193301Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:18:55.214598Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7554062075021382223:2474], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:55.214628Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:55.214720Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7554062075021382228:2477], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:55.214731Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7554062075021382229:2478], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:55.214788Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:55.215653Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemes ... s_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) waiting... 2025-09-25T16:19:08.122815Z node 10 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) waiting... 2025-09-25T16:19:08.166299Z node 10 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) waiting... 2025-09-25T16:19:08.401149Z node 10 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [10:7554062134394675402:2391], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:19:08.401187Z node 10 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:19:08.401446Z node 10 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [10:7554062134394675411:2392], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:19:08.401459Z node 10 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:19:08.416993Z node 10 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:19:08.431510Z node 10 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:19:08.446818Z node 10 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:19:08.459217Z node 10 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:19:08.497020Z node 10 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:19:08.516321Z node 10 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:19:08.539879Z node 10 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:19:08.568295Z node 10 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:19:08.595936Z node 10 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [10:7554062134394676271:2474], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:19:08.595965Z node 10 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:19:08.596143Z node 10 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [10:7554062134394676276:2477], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:19:08.596153Z node 10 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [10:7554062134394676277:2478], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:19:08.596210Z node 10 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:19:08.597197Z node 10 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-09-25T16:19:08.602676Z node 10 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 281474976710670, at schemeshard: 72057594046644480 2025-09-25T16:19:08.612950Z node 10 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [10:7554062134394676280:2479], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2025-09-25T16:19:08.680419Z node 10 :TX_PROXY ERROR: schemereq.cpp:590: Actor# [10:7554062134394676332:3556] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 17], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-09-25T16:19:08.722015Z node 10 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-09-25T16:19:09.127078Z node 10 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:1, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:19:09.294645Z node 10 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710676:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_modify_acl.cpp:101) 2025-09-25T16:19:09.436404Z node 10 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710679:1, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:19:09.578792Z node 10 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710682:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:19:09.717392Z node 10 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710685:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_modify_acl.cpp:101) 2025-09-25T16:19:09.851165Z node 10 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710690:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_modify_acl.cpp:101) 2025-09-25T16:19:10.198976Z node 10 :KQP_SESSION WARN: kqp_session_actor.cpp:2830: SessionId: ydb://session/3?node_id=10&id=OWUzMTE0YS1lYzZkZTBhMS03OWMyNDVhYS1lNWU4M2NmYw==, ActorId: [10:7554062138689644525:2684], ActorState: ExecuteState, TraceId: 01k60tqs5r4edd7yhwq7tx1bvv, Create QueryResponse for error on request, msg: 2025-09-25T16:19:10.210109Z node 10 :KQP_GATEWAY WARN: query_actor.cpp:375: [TQueryBase] [TRanksCheckerActor] OwnerId: [10:7554062142984611850:4037], ActorId: [10:7554062142984611852:4039], TraceId: /Root, Finish with CANCELLED, Issues: [ {
: Error: Query execution is cancelled because session was requested to be closed. } {
: Error: Cancelling after 62ms during compilation } ], SessionId: ydb://session/3?node_id=10&id=OWUzMTE0YS1lYzZkZTBhMS03OWMyNDVhYS1lNWU4M2NmYw==, TxId: |81.8%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/kqp/ut/rbo/ydb-core-kqp-ut-rbo |81.8%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/kqp/proxy_service/ut/ydb-core-kqp-proxy_service-ut |81.8%| [LD] {RESULT} $(B)/ydb/core/persqueue/dread_cache_service/ut/ydb-core-persqueue-dread_cache_service-ut |81.8%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_incremental_restore/ydb-core-tx-schemeshard-ut_incremental_restore >> TMessageBusServerPersQueueGetPartitionOffsetsMetaRequestTest::HandlesPipeDisconnection_DisconnectionComesSecond [GOOD] >> TestKinesisHttpProxy::TestEmptyHttpBody [GOOD] >> TMessageBusServerPersQueueGetPartitionStatusMetaRequestTest::FailsOnZeroBalancerTabletIdInGetNodeRequest [GOOD] >> KqpScheme::CreateTableWithFamiliesRegular-UseQueryService [GOOD] >> TMessageBusServerPersQueueGetPartitionLocationsMetaRequestTest::HandlesPipeDisconnection_DisconnectionComesFirst >> TMessageBusServerPersQueueGetPartitionStatusMetaRequestTest::HandlesPipeDisconnection_DisconnectionComesSecond [GOOD] >> TMessageBusServerPersQueueGetPartitionStatusMetaRequestTest::FailsOnBadRootStatusInGetNodeRequest [GOOD] >> TMessageBusServerPersQueueGetPartitionOffsetsMetaRequestTest::HandlesPipeDisconnection_AnswerDoesNotArrive >> KqpOlapScheme::AddColumnFamilyWithCacheModeError [GOOD] >> TestYmqHttpProxy::TestChangeMessageVisibility [GOOD] >> TMessageBusServerPersQueueGetReadSessionsInfoMetaRequestTest::FailesOnNotATopic >> TMessageBusServerPersQueueGetPartitionOffsetsMetaRequestTest::HandlesPipeDisconnection_AnswerDoesNotArrive [GOOD] >> TMessageBusServerPersQueueGetPartitionStatusMetaRequestTest::FailesOnNotATopic >> TMessageBusServerPersQueueGetReadSessionsInfoMetaRequestTest::FailsOnNoClientSpecified >> TMessageBusServerPersQueueGetPartitionLocationsMetaRequestTest::HandlesPipeDisconnection_DisconnectionComesFirst [GOOD] >> TMessageBusServerPersQueueGetPartitionStatusMetaRequestTest::FailesOnNotATopic [GOOD] >> TMessageBusServerPersQueueGetReadSessionsInfoMetaRequestTest::FailsOnNoClientSpecified [GOOD] >> TMessageBusServerPersQueueGetReadSessionsInfoMetaRequestTest::FailesOnNotATopic [GOOD] >> TMessageBusServerPersQueueGetPartitionLocationsMetaRequestTest::HandlesPipeDisconnection_AnswerDoesNotArrive >> TMessageBusServerPersQueueGetReadSessionsInfoMetaRequestTest::FailsOnBalancerDescribeResultFailureWhenTopicsAreGivenExplicitly >> TMessageBusServerPersQueueGetReadSessionsInfoMetaRequestTest::FailsOnBalancerDescribeResultFailureWhenTopicsAreGivenExplicitly [GOOD] >> TMessageBusServerPersQueueGetPartitionLocationsMetaRequestTest::HandlesPipeDisconnection_AnswerDoesNotArrive [GOOD] >> TMessageBusServerPersQueueGetTopicMetadataMetaRequestTest::FailsOnNoBalancerInGetNodeRequest >> KqpScheme::CreateTableWithDefaultFamily+UseQueryService >> TMessageBusServerPersQueueGetTopicMetadataMetaRequestTest::FailsOnNoBalancerInGetNodeRequest [GOOD] >> KqpScheme::CreateTableWithDefaultFamily+UseQueryService [GOOD] >> KqpScheme::CreateTableWithDefaultFamily-UseQueryService >> KqpScheme::CreateTableWithDefaultFamily-UseQueryService [GOOD] >> KqpScheme::CreateExternalTableWithSettings >> KqpScheme::CreateExternalTableWithSettings [GOOD] >> KqpScheme::CreateExternalTableWithUpperCaseSettings >> KqpScheme::CreateExternalTableWithUpperCaseSettings [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/client/server/ut/unittest >> TMessageBusServerPersQueueGetPartitionOffsetsMetaRequestTest::FailsOnEmptyTopicName [GOOD] Test command err: Assert failed: Check response: { Status: 128 ErrorReason: "no path \'/Root/PQ/\', Marker# PQ17" ErrorCode: UNKNOWN_TOPIC } Assert failed: Check response: { Status: 128 ErrorReason: "no path \'Root/PQ\', Marker# PQ150" ErrorCode: UNKNOWN_TOPIC } Assert failed: Check response: { Status: 128 ErrorReason: "topic \'rt3.dc1--topic1\' has no balancer, Marker# PQ193" ErrorCode: UNKNOWN_TOPIC } Assert failed: Check response: { Status: 128 ErrorReason: "TopicRequest must have Topic field." ErrorCode: BAD_REQUEST } |81.8%| [LD] {RESULT} $(B)/ydb/core/kqp/ut/rbo/ydb-core-kqp-ut-rbo ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/client/server/ut/unittest >> TMessageBusServerPersQueueGetTopicMetadataMetaRequestTest::FailsOnEmptyTopicName [GOOD] Test command err: Assert failed: Check response: { Status: 128 ErrorReason: "path \'Root/PQ\' has unknown/invalid root prefix \'Root\', Marker# PQ14" ErrorCode: UNKNOWN_TOPIC } Assert failed: Check response: { Status: 128 ErrorReason: "topic \'Root/PQ\' describe error, Status# LookupError, Marker# PQ1" ErrorCode: ERROR } Assert failed: Check response: { Status: 128 ErrorReason: "empty topic in GetTopicMetadata request" ErrorCode: BAD_REQUEST } ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/client/server/ut/unittest >> TMessageBusServerPersQueueGetPartitionStatusMetaRequestTest::FailesOnNotATopic [GOOD] Test command err: Assert failed: Check response: { Status: 130 ErrorReason: "Timeout while waiting for response, may be just slow, Marker# PQ16" ErrorCode: ERROR } 2025-09-25T16:19:12.217080Z node 2 :PERSQUEUE DEBUG: pq_impl.cpp:2957: [PQ: 72057594037928037] Handle TEvInterconnect::TEvNodeInfo 2025-09-25T16:19:12.218453Z node 2 :PERSQUEUE DEBUG: pq_impl.cpp:2989: [PQ: 72057594037928037] Transactions request. From tx_00000000000000000000, To tx_18446744073709551615 2025-09-25T16:19:12.218572Z node 2 :PERSQUEUE DEBUG: pq_impl.cpp:784: [PQ: 72057594037928037] no config, start with empty partitions and default config 2025-09-25T16:19:12.218589Z node 2 :PERSQUEUE NOTICE: pq_impl.cpp:908: [PQ: 72057594037928037] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-09-25T16:19:12.218602Z node 2 :PERSQUEUE INFO: pq_impl.cpp:609: [PQ: 72057594037928037] doesn't have tx writes info 2025-09-25T16:19:12.218848Z node 2 :PERSQUEUE DEBUG: pq_impl.cpp:2718: [PQ: 72057594037928037] server connected, pipe [2:260:2254], now have 1 active actors on pipe 2025-09-25T16:19:12.218904Z node 2 :PERSQUEUE DEBUG: pq_impl.cpp:1269: [PQ: 72057594037928037] Handle TEvPersQueue::TEvUpdateConfig 2025-09-25T16:19:12.223169Z node 2 :PERSQUEUE DEBUG: pq_impl.cpp:395: [PQ: 72057594037928037] Apply new config CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 TopicName: "rt3.dc1--topic1" Version: 1 Partitions { PartitionId: 0 } AllPartitions { PartitionId: 0 } 2025-09-25T16:19:12.223230Z node 2 :PERSQUEUE NOTICE: pq_impl.cpp:908: [PQ: 72057594037928037] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-09-25T16:19:12.223580Z node 2 :PERSQUEUE INFO: pq_impl.cpp:1296: [PQ: 72057594037928037] Config applied version 1 actor [2:105:2138] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 TopicName: "rt3.dc1--topic1" Version: 1 Partitions { PartitionId: 0 } AllPartitions { PartitionId: 0 } 2025-09-25T16:19:12.223632Z node 2 :PERSQUEUE DEBUG: partition_init.cpp:80: [rt3.dc1--topic1:0:Initializer] Start initializing step TInitConfigStep 2025-09-25T16:19:12.223822Z node 2 :PERSQUEUE DEBUG: partition_init.cpp:80: [rt3.dc1--topic1:0:Initializer] Start initializing step TInitInternalFieldsStep 2025-09-25T16:19:12.223914Z node 2 :PERSQUEUE INFO: partition_init.cpp:1075: [72057594037928037][Partition][0][StateInit] bootstrapping 0 [2:268:2260] 2025-09-25T16:19:12.224685Z node 2 :PERSQUEUE DEBUG: partition_init.cpp:80: [rt3.dc1--topic1:0:Initializer] Start initializing step TInitFieldsStep 2025-09-25T16:19:12.224701Z node 2 :PERSQUEUE DEBUG: partition_init.cpp:60: [rt3.dc1--topic1:0:Initializer] Initializing completed. 2025-09-25T16:19:12.224724Z node 2 :PERSQUEUE INFO: partition.cpp:694: [72057594037928037][Partition][0][StateInit] init complete for topic 'rt3.dc1--topic1' partition 0 generation 2 [2:268:2260] 2025-09-25T16:19:12.224735Z node 2 :PERSQUEUE DEBUG: partition.cpp:708: [72057594037928037][Partition][0][StateInit] SYNC INIT topic rt3.dc1--topic1 partitition 0 so 0 endOffset 0 Head Offset 0 PartNo 0 PackedSize 0 count 0 nextOffset 0 batches 0 2025-09-25T16:19:12.224749Z node 2 :PERSQUEUE DEBUG: partition.cpp:4293: [72057594037928037][Partition][0][StateIdle] Process pending events. Count 0 2025-09-25T16:19:12.224756Z node 2 :PERSQUEUE DEBUG: partition.cpp:2261: [72057594037928037][Partition][0][StateIdle] Batching state before ContinueProcessTxsAndUserActs: 0 2025-09-25T16:19:12.224773Z node 2 :PERSQUEUE DEBUG: partition.cpp:2270: [72057594037928037][Partition][0][StateIdle] Batching state after ContinueProcessTxsAndUserActs: 1 2025-09-25T16:19:12.224778Z node 2 :PERSQUEUE DEBUG: partition.cpp:2293: [72057594037928037][Partition][0][StateIdle] Try persist 2025-09-25T16:19:12.225055Z node 2 :PERSQUEUE DEBUG: partition_compaction.cpp:162: [72057594037928037][Partition][0][StateIdle] no data for compaction 2025-09-25T16:19:12.225224Z node 2 :PERSQUEUE DEBUG: pq_impl.cpp:2718: [PQ: 72057594037928037] server connected, pipe [2:271:2262], now have 1 active actors on pipe 2025-09-25T16:19:12.238532Z node 2 :PERSQUEUE DEBUG: pq_impl.cpp:2957: [PQ: 72057594037928139] Handle TEvInterconnect::TEvNodeInfo 2025-09-25T16:19:12.239449Z node 2 :PERSQUEUE DEBUG: pq_impl.cpp:2989: [PQ: 72057594037928139] Transactions request. From tx_00000000000000000000, To tx_18446744073709551615 2025-09-25T16:19:12.239535Z node 2 :PERSQUEUE DEBUG: pq_impl.cpp:784: [PQ: 72057594037928139] no config, start with empty partitions and default config 2025-09-25T16:19:12.239549Z node 2 :PERSQUEUE NOTICE: pq_impl.cpp:908: [PQ: 72057594037928139] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-09-25T16:19:12.239561Z node 2 :PERSQUEUE INFO: pq_impl.cpp:609: [PQ: 72057594037928139] doesn't have tx writes info 2025-09-25T16:19:12.239730Z node 2 :PERSQUEUE DEBUG: pq_impl.cpp:2718: [PQ: 72057594037928139] server connected, pipe [2:400:2360], now have 1 active actors on pipe 2025-09-25T16:19:12.239755Z node 2 :PERSQUEUE DEBUG: pq_impl.cpp:1269: [PQ: 72057594037928139] Handle TEvPersQueue::TEvUpdateConfig 2025-09-25T16:19:12.240287Z node 2 :PERSQUEUE DEBUG: pq_impl.cpp:395: [PQ: 72057594037928139] Apply new config CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 2 TopicName: "rt3.dc1--topic2" Version: 2 Partitions { PartitionId: 2 } AllPartitions { PartitionId: 2 } 2025-09-25T16:19:12.240316Z node 2 :PERSQUEUE NOTICE: pq_impl.cpp:908: [PQ: 72057594037928139] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-09-25T16:19:12.240536Z node 2 :PERSQUEUE INFO: pq_impl.cpp:1296: [PQ: 72057594037928139] Config applied version 2 actor [2:105:2138] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 2 TopicName: "rt3.dc1--topic2" Version: 2 Partitions { PartitionId: 2 } AllPartitions { PartitionId: 2 } 2025-09-25T16:19:12.240561Z node 2 :PERSQUEUE DEBUG: partition_init.cpp:80: [rt3.dc1--topic2:2:Initializer] Start initializing step TInitConfigStep 2025-09-25T16:19:12.240626Z node 2 :PERSQUEUE DEBUG: partition_init.cpp:80: [rt3.dc1--topic2:2:Initializer] Start initializing step TInitInternalFieldsStep 2025-09-25T16:19:12.240666Z node 2 :PERSQUEUE INFO: partition_init.cpp:1075: [72057594037928139][Partition][2][StateInit] bootstrapping 2 [2:408:2366] 2025-09-25T16:19:12.241318Z node 2 :PERSQUEUE DEBUG: partition_init.cpp:80: [rt3.dc1--topic2:2:Initializer] Start initializing step TInitFieldsStep 2025-09-25T16:19:12.241326Z node 2 :PERSQUEUE DEBUG: partition_init.cpp:60: [rt3.dc1--topic2:2:Initializer] Initializing completed. 2025-09-25T16:19:12.241334Z node 2 :PERSQUEUE INFO: partition.cpp:694: [72057594037928139][Partition][2][StateInit] init complete for topic 'rt3.dc1--topic2' partition 2 generation 2 [2:408:2366] 2025-09-25T16:19:12.241342Z node 2 :PERSQUEUE DEBUG: partition.cpp:708: [72057594037928139][Partition][2][StateInit] SYNC INIT topic rt3.dc1--topic2 partitition 2 so 0 endOffset 0 Head Offset 0 PartNo 0 PackedSize 0 count 0 nextOffset 0 batches 0 2025-09-25T16:19:12.241352Z node 2 :PERSQUEUE DEBUG: partition.cpp:4293: [72057594037928139][Partition][2][StateIdle] Process pending events. Count 0 2025-09-25T16:19:12.241357Z node 2 :PERSQUEUE DEBUG: partition.cpp:2261: [72057594037928139][Partition][2][StateIdle] Batching state before ContinueProcessTxsAndUserActs: 0 2025-09-25T16:19:12.241363Z node 2 :PERSQUEUE DEBUG: partition.cpp:2270: [72057594037928139][Partition][2][StateIdle] Batching state after ContinueProcessTxsAndUserActs: 1 2025-09-25T16:19:12.241367Z node 2 :PERSQUEUE DEBUG: partition.cpp:2293: [72057594037928139][Partition][2][StateIdle] Try persist 2025-09-25T16:19:12.241477Z node 2 :PERSQUEUE DEBUG: partition_compaction.cpp:162: [72057594037928139][Partition][2][StateIdle] no data for compaction 2025-09-25T16:19:12.241587Z node 2 :PERSQUEUE DEBUG: pq_impl.cpp:2718: [PQ: 72057594037928139] server connected, pipe [2:411:2368], now have 1 active actors on pipe 2025-09-25T16:19:12.241806Z node 2 :PERSQUEUE DEBUG: pq_impl.cpp:2718: [PQ: 72057594037928037] server connected, pipe [2:417:2371], now have 1 active actors on pipe 2025-09-25T16:19:12.241889Z node 2 :PERSQUEUE DEBUG: pq_impl.cpp:2718: [PQ: 72057594037928139] server connected, pipe [2:419:2372], now have 1 active actors on pipe 2025-09-25T16:19:12.241969Z node 2 :PERSQUEUE DEBUG: pq_impl.cpp:2743: [PQ: 72057594037928037] server disconnected, pipe [2:417:2371] destroyed 2025-09-25T16:19:12.242033Z node 2 :PERSQUEUE DEBUG: pq_impl.cpp:2743: [PQ: 72057594037928139] server disconnected, pipe [2:419:2372] destroyed Assert failed: Check response: { Status: 128 ErrorReason: "path \'Root/PQ\' has unknown/invalid root prefix \'Root\', Marker# PQ14" ErrorCode: UNKNOWN_TOPIC } Assert failed: Check response: { Status: 128 ErrorReason: "the following topics are not created: rt3.dc1--topic2, Marker# PQ95" ErrorCode: UNKNOWN_TOPIC } |81.8%| [LD] {RESULT} $(B)/ydb/core/kqp/proxy_service/ut/ydb-core-kqp-proxy_service-ut >> TMessageBusServerPersQueueGetPartitionStatusMetaRequestTest::HandlesPipeDisconnection_DisconnectionComesFirst >> TMessageBusServerPersQueueGetPartitionStatusMetaRequestTest::HandlesPipeDisconnection_DisconnectionComesFirst [GOOD] >> TMessageBusServerPersQueueGetPartitionStatusMetaRequestTest::HandlesPipeDisconnection_AnswerDoesNotArrive >> TMessageBusServerPersQueueGetPartitionStatusMetaRequestTest::HandlesPipeDisconnection_AnswerDoesNotArrive [GOOD] >> YdbOlapStore::LogWithUnionAllDescending >> KqpScheme::CreateBackupCollectionDisabledByDefault >> KqpScheme::CreateBackupCollectionDisabledByDefault [GOOD] >> KqpScheme::CreateBackupCollection >> KqpScheme::CreateBackupCollection [GOOD] >> KqpScheme::AddChangefeedWhenDisabled >> KqpScheme::AddChangefeedWhenDisabled [GOOD] >> KqpScheme::AddChangefeedNegative >> TestKinesisHttpProxy::TestWrongRequest >> TestKinesisHttpProxy::TestWrongRequest [GOOD] >> TestKinesisHttpProxy::ListShardsToken >> TestKinesisHttpProxy::ListShardsToken [GOOD] >> test.py::test[aggregate-group_by_rollup_grouping--Results] >> TestYmqHttpProxy::TestListDeadLetterSourceQueues >> KqpScheme::CreateAlterDropTableStore >> KqpScheme::CreateAlterDropTableStore [GOOD] >> KqpScheme::CreateAlterDropColumnTableInStore >> KqpScheme::CreateAlterDropColumnTableInStore [GOOD] >> KqpScheme::CleanupTemporaryTables >> KqpScheme::AlterTableAlterIndex-UseQueryService [GOOD] >> KqpScheme::AlterTableAddUniqIndexSqlFeatureOff >> KqpScheme::AlterTableAddUniqIndexSqlFeatureOff [GOOD] >> KqpScheme::AlterTableAddUniqIndexPublicApiFeatureOff >> KqpScheme::AlterTableAddUniqIndexPublicApiFeatureOff [GOOD] >> HttpProxyInsideYdb::TestIfEnvVariableSet [GOOD] >> KqpScheme::AlterColumnTableTiering >> KqpScheme::AlterColumnTableTiering [GOOD] >> KqpScheme::AlterAsyncReplication >> TestYmqHttpProxy::TestChangeMessageVisibilityBatch >> KqpScheme::CreateDropTableViaApiMultipleTime >> TMessageBusServerPersQueueGetTopicMetadataMetaRequestTest::HandlesTimeout [GOOD] >> TMessageBusServerPersQueueGetTopicMetadataMetaRequestTest::FailsOnZeroBalancerTabletIdInGetNodeRequest >> TMessageBusServerPersQueueGetTopicMetadataMetaRequestTest::FailsOnZeroBalancerTabletIdInGetNodeRequest [GOOD] >> TMessageBusServerPersQueueGetTopicMetadataMetaRequestTest::SuccessfullyReplies >> TMessageBusServerPersQueueGetTopicMetadataMetaRequestTest::SuccessfullyReplies [GOOD] >> TStorageBalanceTest::TestScenario2 [GOOD] >> KqpScheme::AlterTransfer_QueryService [GOOD] >> ForceDropWithReboots::ForceDelete [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/client/server/ut/unittest >> TMessageBusServerPersQueueGetPartitionStatusMetaRequestTest::FailsOnNoBalancerInGetNodeRequest [GOOD] Test command err: Assert failed: Check response: { Status: 128 ErrorReason: "no path \'/Root/PQ/\', Marker# PQ17" ErrorCode: UNKNOWN_TOPIC } Assert failed: Check response: { Status: 128 ErrorReason: "no path \'Root/PQ\', Marker# PQ150" ErrorCode: UNKNOWN_TOPIC } Assert failed: Check response: { Status: 128 ErrorReason: "topic \'rt3.dc1--topic1\' has no balancer, Marker# PQ193" ErrorCode: UNKNOWN_TOPIC } ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/client/server/ut/unittest >> TMessageBusServerPersQueueGetReadSessionsInfoMetaRequestTest::FailsOnNoClientSpecified [GOOD] Test command err: Assert failed: Check response: { Status: 128 ErrorReason: "no path \'Root/PQ\', Marker# PQ150" ErrorCode: UNKNOWN_TOPIC } Assert failed: Check response: { Status: 128 ErrorReason: "topic \'rt3.dc1--topic1\' is not created, Marker# PQ94" ErrorCode: UNKNOWN_TOPIC } Assert failed: Check response: { Status: 128 ErrorReason: "No clientId specified in CmdGetReadSessionsInfo" ErrorCode: BAD_REQUEST } >> YdbOlapStore::LogWithUnionAllDescending [GOOD] >> YdbOlapStore::LogTsRangeDescending >> YdbOlapStore::LogTsRangeDescending [GOOD] >> KqpScheme::AddChangefeedNegative [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/client/server/ut/unittest >> TMessageBusServerPersQueueGetReadSessionsInfoMetaRequestTest::FailsOnBalancerDescribeResultFailureWhenTopicsAreGivenExplicitly [GOOD] Test command err: Assert failed: Check response: { Status: 128 ErrorReason: "path \'Root/PQ\' has unknown/invalid root prefix \'Root\', Marker# PQ14" ErrorCode: UNKNOWN_TOPIC } Assert failed: Check response: { Status: 128 ErrorReason: "the following topics are not created: rt3.dc1--topic2, Marker# PQ95" ErrorCode: UNKNOWN_TOPIC } Assert failed: Check response: { Status: 128 ErrorReason: "topic \'Root/PQ\' describe error, Status# LookupError, Marker# PQ1" ErrorCode: ERROR } >> test.py::test[aggregate-group_by_rollup_grouping--Results] [GOOD] >> KqpScheme::CleanupTemporaryTables [GOOD] >> TestYmqHttpProxy::TestListDeadLetterSourceQueues [GOOD] >> TestYmqHttpProxy::TestListQueueTags >> TestYmqHttpProxy::TestListQueueTags [GOOD] >> KqpScheme::AlterAsyncReplication [GOOD] >> TestYmqHttpProxy::TestChangeMessageVisibilityBatch [GOOD] >> KqpScheme::CreateDropTableViaApiMultipleTime [GOOD] >> KqpScheme::CreateDropColumnTable >> KqpScheme::CreateDropColumnTable [GOOD] >> KqpScheme::CreateDropColumnTableNegative >> KqpScheme::CreateDropColumnTableNegative [GOOD] >> KqpScheme::CreateExternalDataSource >> KqpScheme::CreateExternalDataSource [GOOD] >> KqpScheme::CreateExternalDataSourceValidationAuthMethod >> KqpScheme::CreateExternalDataSourceValidationAuthMethod [GOOD] >> KqpScheme::CreateExternalDataSourceValidationLocation >> KqpScheme::CreateExternalDataSourceValidationLocation [GOOD] >> TMessageBusServerPersQueueGetReadSessionsInfoMetaRequestTest::HandlesTimeout >> TStorageBalanceTest::TestScenario3 >> SplitPathTests::WithDatabaseShouldFail [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/client/server/ut/unittest >> TMessageBusServerPersQueueGetPartitionStatusMetaRequestTest::HandlesPipeDisconnection_AnswerDoesNotArrive [GOOD] Test command err: Assert failed: Check response: { Status: 128 ErrorReason: "topic \'rt3.dc1--topic1\' is not created, Marker# PQ94" ErrorCode: UNKNOWN_TOPIC } 2025-09-25T16:19:14.308113Z node 2 :PERSQUEUE DEBUG: pq_impl.cpp:2957: [PQ: 72057594037928037] Handle TEvInterconnect::TEvNodeInfo 2025-09-25T16:19:14.309157Z node 2 :PERSQUEUE DEBUG: pq_impl.cpp:2989: [PQ: 72057594037928037] Transactions request. From tx_00000000000000000000, To tx_18446744073709551615 2025-09-25T16:19:14.309245Z node 2 :PERSQUEUE DEBUG: pq_impl.cpp:784: [PQ: 72057594037928037] no config, start with empty partitions and default config 2025-09-25T16:19:14.309259Z node 2 :PERSQUEUE NOTICE: pq_impl.cpp:908: [PQ: 72057594037928037] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-09-25T16:19:14.309270Z node 2 :PERSQUEUE INFO: pq_impl.cpp:609: [PQ: 72057594037928037] doesn't have tx writes info 2025-09-25T16:19:14.309441Z node 2 :PERSQUEUE DEBUG: pq_impl.cpp:2718: [PQ: 72057594037928037] server connected, pipe [2:261:2255], now have 1 active actors on pipe 2025-09-25T16:19:14.309470Z node 2 :PERSQUEUE DEBUG: pq_impl.cpp:1269: [PQ: 72057594037928037] Handle TEvPersQueue::TEvUpdateConfig 2025-09-25T16:19:14.311850Z node 2 :PERSQUEUE DEBUG: pq_impl.cpp:395: [PQ: 72057594037928037] Apply new config CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 TopicName: "rt3.dc1--topic1" Version: 1 Partitions { PartitionId: 0 } AllPartitions { PartitionId: 0 } 2025-09-25T16:19:14.311887Z node 2 :PERSQUEUE NOTICE: pq_impl.cpp:908: [PQ: 72057594037928037] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-09-25T16:19:14.312128Z node 2 :PERSQUEUE INFO: pq_impl.cpp:1296: [PQ: 72057594037928037] Config applied version 1 actor [2:105:2138] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 TopicName: "rt3.dc1--topic1" Version: 1 Partitions { PartitionId: 0 } AllPartitions { PartitionId: 0 } 2025-09-25T16:19:14.312162Z node 2 :PERSQUEUE DEBUG: partition_init.cpp:80: [rt3.dc1--topic1:0:Initializer] Start initializing step TInitConfigStep 2025-09-25T16:19:14.312258Z node 2 :PERSQUEUE DEBUG: partition_init.cpp:80: [rt3.dc1--topic1:0:Initializer] Start initializing step TInitInternalFieldsStep 2025-09-25T16:19:14.312327Z node 2 :PERSQUEUE INFO: partition_init.cpp:1075: [72057594037928037][Partition][0][StateInit] bootstrapping 0 [2:269:2261] 2025-09-25T16:19:14.312800Z node 2 :PERSQUEUE DEBUG: partition_init.cpp:80: [rt3.dc1--topic1:0:Initializer] Start initializing step TInitFieldsStep 2025-09-25T16:19:14.312809Z node 2 :PERSQUEUE DEBUG: partition_init.cpp:60: [rt3.dc1--topic1:0:Initializer] Initializing completed. 2025-09-25T16:19:14.312848Z node 2 :PERSQUEUE INFO: partition.cpp:694: [72057594037928037][Partition][0][StateInit] init complete for topic 'rt3.dc1--topic1' partition 0 generation 2 [2:269:2261] 2025-09-25T16:19:14.312859Z node 2 :PERSQUEUE DEBUG: partition.cpp:708: [72057594037928037][Partition][0][StateInit] SYNC INIT topic rt3.dc1--topic1 partitition 0 so 0 endOffset 0 Head Offset 0 PartNo 0 PackedSize 0 count 0 nextOffset 0 batches 0 2025-09-25T16:19:14.312870Z node 2 :PERSQUEUE DEBUG: partition.cpp:4293: [72057594037928037][Partition][0][StateIdle] Process pending events. Count 0 2025-09-25T16:19:14.312876Z node 2 :PERSQUEUE DEBUG: partition.cpp:2261: [72057594037928037][Partition][0][StateIdle] Batching state before ContinueProcessTxsAndUserActs: 0 2025-09-25T16:19:14.312892Z node 2 :PERSQUEUE DEBUG: partition.cpp:2270: [72057594037928037][Partition][0][StateIdle] Batching state after ContinueProcessTxsAndUserActs: 1 2025-09-25T16:19:14.312897Z node 2 :PERSQUEUE DEBUG: partition.cpp:2293: [72057594037928037][Partition][0][StateIdle] Try persist 2025-09-25T16:19:14.312964Z node 2 :PERSQUEUE DEBUG: partition_compaction.cpp:162: [72057594037928037][Partition][0][StateIdle] no data for compaction 2025-09-25T16:19:14.313107Z node 2 :PERSQUEUE DEBUG: pq_impl.cpp:2718: [PQ: 72057594037928037] server connected, pipe [2:272:2263], now have 1 active actors on pipe 2025-09-25T16:19:14.326047Z node 2 :PERSQUEUE DEBUG: pq_impl.cpp:2957: [PQ: 72057594037928137] Handle TEvInterconnect::TEvNodeInfo 2025-09-25T16:19:14.327473Z node 2 :PERSQUEUE DEBUG: pq_impl.cpp:2989: [PQ: 72057594037928137] Transactions request. From tx_00000000000000000000, To tx_18446744073709551615 2025-09-25T16:19:14.327573Z node 2 :PERSQUEUE DEBUG: pq_impl.cpp:784: [PQ: 72057594037928137] no config, start with empty partitions and default config 2025-09-25T16:19:14.327589Z node 2 :PERSQUEUE NOTICE: pq_impl.cpp:908: [PQ: 72057594037928137] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-09-25T16:19:14.327601Z node 2 :PERSQUEUE INFO: pq_impl.cpp:609: [PQ: 72057594037928137] doesn't have tx writes info 2025-09-25T16:19:14.327809Z node 2 :PERSQUEUE DEBUG: pq_impl.cpp:2718: [PQ: 72057594037928137] server connected, pipe [2:402:2362], now have 1 active actors on pipe 2025-09-25T16:19:14.327825Z node 2 :PERSQUEUE DEBUG: pq_impl.cpp:1269: [PQ: 72057594037928137] Handle TEvPersQueue::TEvUpdateConfig 2025-09-25T16:19:14.328446Z node 2 :PERSQUEUE DEBUG: pq_impl.cpp:395: [PQ: 72057594037928137] Apply new config CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 TopicName: "rt3.dc1--topic2" Version: 2 Partitions { PartitionId: 0 } AllPartitions { PartitionId: 0 } 2025-09-25T16:19:14.328475Z node 2 :PERSQUEUE NOTICE: pq_impl.cpp:908: [PQ: 72057594037928137] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-09-25T16:19:14.328695Z node 2 :PERSQUEUE INFO: pq_impl.cpp:1296: [PQ: 72057594037928137] Config applied version 2 actor [2:105:2138] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 TopicName: "rt3.dc1--topic2" Version: 2 Partitions { PartitionId: 0 } AllPartitions { PartitionId: 0 } 2025-09-25T16:19:14.328730Z node 2 :PERSQUEUE DEBUG: partition_init.cpp:80: [rt3.dc1--topic2:0:Initializer] Start initializing step TInitConfigStep 2025-09-25T16:19:14.328811Z node 2 :PERSQUEUE DEBUG: partition_init.cpp:80: [rt3.dc1--topic2:0:Initializer] Start initializing step TInitInternalFieldsStep 2025-09-25T16:19:14.328867Z node 2 :PERSQUEUE INFO: partition_init.cpp:1075: [72057594037928137][Partition][0][StateInit] bootstrapping 0 [2:410:2368] 2025-09-25T16:19:14.329311Z node 2 :PERSQUEUE DEBUG: partition_init.cpp:80: [rt3.dc1--topic2:0:Initializer] Start initializing step TInitFieldsStep 2025-09-25T16:19:14.329319Z node 2 :PERSQUEUE DEBUG: partition_init.cpp:60: [rt3.dc1--topic2:0:Initializer] Initializing completed. 2025-09-25T16:19:14.329324Z node 2 :PERSQUEUE INFO: partition.cpp:694: [72057594037928137][Partition][0][StateInit] init complete for topic 'rt3.dc1--topic2' partition 0 generation 2 [2:410:2368] 2025-09-25T16:19:14.329330Z node 2 :PERSQUEUE DEBUG: partition.cpp:708: [72057594037928137][Partition][0][StateInit] SYNC INIT topic rt3.dc1--topic2 partitition 0 so 0 endOffset 0 Head Offset 0 PartNo 0 PackedSize 0 count 0 nextOffset 0 batches 0 2025-09-25T16:19:14.329338Z node 2 :PERSQUEUE DEBUG: partition.cpp:4293: [72057594037928137][Partition][0][StateIdle] Process pending events. Count 0 2025-09-25T16:19:14.329343Z node 2 :PERSQUEUE DEBUG: partition.cpp:2261: [72057594037928137][Partition][0][StateIdle] Batching state before ContinueProcessTxsAndUserActs: 0 2025-09-25T16:19:14.329346Z node 2 :PERSQUEUE DEBUG: partition.cpp:2270: [72057594037928137][Partition][0][StateIdle] Batching state after ContinueProcessTxsAndUserActs: 1 2025-09-25T16:19:14.329349Z node 2 :PERSQUEUE DEBUG: partition.cpp:2293: [72057594037928137][Partition][0][StateIdle] Try persist 2025-09-25T16:19:14.329428Z node 2 :PERSQUEUE DEBUG: partition_compaction.cpp:162: [72057594037928137][Partition][0][StateIdle] no data for compaction 2025-09-25T16:19:14.329531Z node 2 :PERSQUEUE DEBUG: pq_impl.cpp:2718: [PQ: 72057594037928137] server connected, pipe [2:413:2370], now have 1 active actors on pipe 2025-09-25T16:19:14.333741Z node 2 :PERSQUEUE DEBUG: pq_impl.cpp:2957: [PQ: 72057594037928138] Handle TEvInterconnect::TEvNodeInfo 2025-09-25T16:19:14.335027Z node 2 :PERSQUEUE DEBUG: pq_impl.cpp:2989: [PQ: 72057594037928138] Transactions request. From tx_00000000000000000000, To tx_18446744073709551615 2025-09-25T16:19:14.335137Z node 2 :PERSQUEUE DEBUG: pq_impl.cpp:784: [PQ: 72057594037928138] no config, start with empty partitions and default config 2025-09-25T16:19:14.335153Z node 2 :PERSQUEUE NOTICE: pq_impl.cpp:908: [PQ: 72057594037928138] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-09-25T16:19:14.335164Z node 2 :PERSQUEUE INFO: pq_impl.cpp:609: [PQ: 72057594037928138] doesn't have tx writes info 2025-09-25T16:19:14.335361Z node 2 :PERSQUEUE DEBUG: pq_impl.cpp:2718: [PQ: 72057594037928138] server connected, pipe [2:462:2407], now have 1 active actors on pipe 2025-09-25T16:19:14.335372Z node 2 :PERSQUEUE DEBUG: pq_impl.cpp:1269: [PQ: 72057594037928138] Handle TEvPersQueue::TEvUpdateConfig 2025-09-25T16:19:14.335913Z node 2 :PERSQUEUE DEBUG: pq_impl.cpp:395: [PQ: 72057594037928138] Apply new config CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 1 TopicName: "rt3.dc1--topic2" Version: 3 Partitions { PartitionId: 1 } AllPartitions { PartitionId: 1 } 2025-09-25T16:19:14.335941Z node 2 :PERSQUEUE NOTICE: pq_impl.cpp:908: [PQ: 72057594037928138] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-09-25T16:19:14.336087Z node 2 :PERSQUEUE INFO: pq_impl.cpp:1296: [PQ: 72057594037928138] Config applied version 3 actor [2:105:2138] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 1 TopicName: "rt3.dc1--topic2" Version: 3 Partitions { PartitionId: 1 } AllPartitions { PartitionId: 1 } 2025-09-25T16:19:14.336108Z node 2 :PERSQUEUE DEBUG: partition_init.cpp:80: [rt3.dc1--topic2:1:Initializer] Start initializing step TInitConfigStep 2025-09-25T16:19:14.336178Z node 2 :PERSQUEUE DEBUG: partition_init.cpp:80: [rt3.dc1--topic2:1:Initializer] Start initializing step TInitInternalFieldsStep 2025-09-25T16:19:14.336209Z node 2 :PERSQUEUE INFO: partition_init.cpp:1075: [72057594037928138][Partition][1][StateInit] bootstrapping 1 [2:470:2413] 2025-09-25T16:19:14.336579Z node 2 :PERSQUEUE DEBUG: partition_init.cpp:80: [rt3.dc1--topic2:1:Initializer] Start initializing step TInitFieldsStep 2025-09-25T16:19:14.336585Z node 2 :PERSQUEUE DEBUG: partition_init.cpp:60: [rt3.dc1--topic2:1:Initializer] Initializing completed. 2025-09-25T16:19:14.336591Z node 2 :PERSQUEUE INFO: partition.cpp:694: [72057594037928138][Partition][1][StateInit] init complete for topic 'rt3.dc1--topic2' partition 1 generation 2 [2:470:2413] 2025-09-25T16:19:14.336597Z node 2 :PERSQUEUE DEBUG: partition.cpp:708: [72057594037928138][Partition][1][StateInit] SYNC INIT topic rt3.dc1--topic2 partitition 1 so 0 endOffset 0 Head Offset 0 PartNo 0 PackedSize 0 count 0 nextOffset 0 batches 0 2025-09-25T16:19:14.33660 ... artition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 2 TopicName: "rt3.dc1--topic2" Version: 8 Partitions { PartitionId: 2 } AllPartitions { PartitionId: 2 } 2025-09-25T16:19:14.578162Z node 3 :PERSQUEUE DEBUG: partition_init.cpp:80: [rt3.dc1--topic2:2:Initializer] Start initializing step TInitConfigStep 2025-09-25T16:19:14.578208Z node 3 :PERSQUEUE DEBUG: partition_init.cpp:80: [rt3.dc1--topic2:2:Initializer] Start initializing step TInitInternalFieldsStep 2025-09-25T16:19:14.578234Z node 3 :PERSQUEUE INFO: partition_init.cpp:1075: [72057594037928139][Partition][2][StateInit] bootstrapping 2 [3:527:2456] 2025-09-25T16:19:14.578637Z node 3 :PERSQUEUE DEBUG: partition_init.cpp:80: [rt3.dc1--topic2:2:Initializer] Start initializing step TInitFieldsStep 2025-09-25T16:19:14.578643Z node 3 :PERSQUEUE DEBUG: partition_init.cpp:60: [rt3.dc1--topic2:2:Initializer] Initializing completed. 2025-09-25T16:19:14.578648Z node 3 :PERSQUEUE INFO: partition.cpp:694: [72057594037928139][Partition][2][StateInit] init complete for topic 'rt3.dc1--topic2' partition 2 generation 2 [3:527:2456] 2025-09-25T16:19:14.578653Z node 3 :PERSQUEUE DEBUG: partition.cpp:708: [72057594037928139][Partition][2][StateInit] SYNC INIT topic rt3.dc1--topic2 partitition 2 so 0 endOffset 0 Head Offset 0 PartNo 0 PackedSize 0 count 0 nextOffset 0 batches 0 2025-09-25T16:19:14.578660Z node 3 :PERSQUEUE DEBUG: partition.cpp:4293: [72057594037928139][Partition][2][StateIdle] Process pending events. Count 0 2025-09-25T16:19:14.578663Z node 3 :PERSQUEUE DEBUG: partition.cpp:2261: [72057594037928139][Partition][2][StateIdle] Batching state before ContinueProcessTxsAndUserActs: 0 2025-09-25T16:19:14.578666Z node 3 :PERSQUEUE DEBUG: partition.cpp:2270: [72057594037928139][Partition][2][StateIdle] Batching state after ContinueProcessTxsAndUserActs: 1 2025-09-25T16:19:14.578672Z node 3 :PERSQUEUE DEBUG: partition.cpp:2293: [72057594037928139][Partition][2][StateIdle] Try persist 2025-09-25T16:19:14.578713Z node 3 :PERSQUEUE DEBUG: partition_compaction.cpp:162: [72057594037928139][Partition][2][StateIdle] no data for compaction 2025-09-25T16:19:14.578787Z node 3 :PERSQUEUE DEBUG: pq_impl.cpp:2718: [PQ: 72057594037928139] server connected, pipe [3:530:2458], now have 1 active actors on pipe 2025-09-25T16:19:14.578956Z node 3 :PERSQUEUE DEBUG: pq_impl.cpp:2718: [PQ: 72057594037928037] server connected, pipe [3:536:2461], now have 1 active actors on pipe 2025-09-25T16:19:14.578969Z node 3 :PERSQUEUE DEBUG: pq_impl.cpp:2718: [PQ: 72057594037928138] server connected, pipe [3:537:2462], now have 1 active actors on pipe 2025-09-25T16:19:14.579006Z node 3 :PERSQUEUE DEBUG: partition.cpp:997: [72057594037928037][Partition][0][StateIdle] Topic PartitionStatus PartitionSize: 0 UsedReserveSize: 0 ReserveSize: 0 PartitionConfig{ MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } 2025-09-25T16:19:14.579047Z node 3 :PERSQUEUE DEBUG: partition.cpp:997: [72057594037928138][Partition][1][StateIdle] Topic PartitionStatus PartitionSize: 0 UsedReserveSize: 0 ReserveSize: 0 PartitionConfig{ MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } 2025-09-25T16:19:14.579054Z node 3 :PERSQUEUE DEBUG: pq_impl.cpp:2718: [PQ: 72057594037928139] server connected, pipe [3:538:2462], now have 1 active actors on pipe 2025-09-25T16:19:14.579086Z node 3 :PERSQUEUE DEBUG: partition.cpp:997: [72057594037928139][Partition][2][StateIdle] Topic PartitionStatus PartitionSize: 0 UsedReserveSize: 0 ReserveSize: 0 PartitionConfig{ MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } 2025-09-25T16:19:14.589443Z node 3 :PERSQUEUE DEBUG: pq_impl.cpp:2718: [PQ: 72057594037928139] server connected, pipe [3:546:2469], now have 1 active actors on pipe 2025-09-25T16:19:14.595470Z node 3 :PERSQUEUE DEBUG: pq_impl.cpp:2957: [PQ: 72057594037928139] Handle TEvInterconnect::TEvNodeInfo 2025-09-25T16:19:14.596365Z node 3 :PERSQUEUE DEBUG: pq_impl.cpp:2989: [PQ: 72057594037928139] Transactions request. From tx_00000000000000000000, To tx_18446744073709551615 2025-09-25T16:19:14.596607Z node 3 :PERSQUEUE NOTICE: pq_impl.cpp:908: [PQ: 72057594037928139] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-09-25T16:19:14.596618Z node 3 :PERSQUEUE INFO: pq_impl.cpp:609: [PQ: 72057594037928139] doesn't have tx writes info 2025-09-25T16:19:14.596641Z node 3 :PERSQUEUE DEBUG: partition_init.cpp:80: [rt3.dc1--topic2:2:Initializer] Start initializing step TInitConfigStep 2025-09-25T16:19:14.596700Z node 3 :PERSQUEUE DEBUG: partition_init.cpp:80: [rt3.dc1--topic2:2:Initializer] Start initializing step TInitInternalFieldsStep 2025-09-25T16:19:14.596737Z node 3 :PERSQUEUE INFO: partition_init.cpp:1075: [72057594037928139][Partition][2][StateInit] bootstrapping 2 [3:602:2514] 2025-09-25T16:19:14.597438Z node 3 :PERSQUEUE DEBUG: partition_init.cpp:80: [rt3.dc1--topic2:2:Initializer] Start initializing step TInitDiskStatusStep 2025-09-25T16:19:14.597695Z node 3 :PERSQUEUE DEBUG: partition_init.cpp:80: [rt3.dc1--topic2:2:Initializer] Start initializing step TInitMetaStep 2025-09-25T16:19:14.597742Z node 3 :PERSQUEUE DEBUG: partition_init.cpp:80: [rt3.dc1--topic2:2:Initializer] Start initializing step TInitInfoRangeStep 2025-09-25T16:19:14.597809Z node 3 :PERSQUEUE DEBUG: partition_init.cpp:80: [rt3.dc1--topic2:2:Initializer] Start initializing step TInitDataRangeStep 2025-09-25T16:19:14.597840Z node 3 :PERSQUEUE DEBUG: partition_init.cpp:80: [rt3.dc1--topic2:2:Initializer] Start initializing step TInitDataStep 2025-09-25T16:19:14.597846Z node 3 :PERSQUEUE DEBUG: partition_init.cpp:80: [rt3.dc1--topic2:2:Initializer] Start initializing step TInitEndWriteTimestampStep 2025-09-25T16:19:14.597851Z node 3 :PERSQUEUE INFO: partition_init.cpp:948: [rt3.dc1--topic2:2:TInitEndWriteTimestampStep] Initializing EndWriteTimestamp skipped because already initialized. 2025-09-25T16:19:14.597855Z node 3 :PERSQUEUE DEBUG: partition_init.cpp:80: [rt3.dc1--topic2:2:Initializer] Start initializing step TInitFieldsStep 2025-09-25T16:19:14.597861Z node 3 :PERSQUEUE DEBUG: partition_init.cpp:60: [rt3.dc1--topic2:2:Initializer] Initializing completed. 2025-09-25T16:19:14.597870Z node 3 :PERSQUEUE INFO: partition.cpp:694: [72057594037928139][Partition][2][StateInit] init complete for topic 'rt3.dc1--topic2' partition 2 generation 3 [3:602:2514] 2025-09-25T16:19:14.597880Z node 3 :PERSQUEUE DEBUG: partition.cpp:708: [72057594037928139][Partition][2][StateInit] SYNC INIT topic rt3.dc1--topic2 partitition 2 so 0 endOffset 0 Head Offset 0 PartNo 0 PackedSize 0 count 0 nextOffset 0 batches 0 2025-09-25T16:19:14.597890Z node 3 :PERSQUEUE DEBUG: partition.cpp:4293: [72057594037928139][Partition][2][StateIdle] Process pending events. Count 0 2025-09-25T16:19:14.597896Z node 3 :PERSQUEUE DEBUG: partition.cpp:2261: [72057594037928139][Partition][2][StateIdle] Batching state before ContinueProcessTxsAndUserActs: 0 2025-09-25T16:19:14.597901Z node 3 :PERSQUEUE DEBUG: partition.cpp:2270: [72057594037928139][Partition][2][StateIdle] Batching state after ContinueProcessTxsAndUserActs: 1 2025-09-25T16:19:14.597906Z node 3 :PERSQUEUE DEBUG: partition.cpp:2293: [72057594037928139][Partition][2][StateIdle] Try persist 2025-09-25T16:19:14.597956Z node 3 :PERSQUEUE DEBUG: partition_compaction.cpp:162: [72057594037928139][Partition][2][StateIdle] no data for compaction 2025-09-25T16:19:14.598136Z node 3 :PERSQUEUE DEBUG: pq_impl.cpp:2743: [PQ: 72057594037928138] server disconnected, pipe [3:537:2462] destroyed 2025-09-25T16:19:14.598195Z node 3 :PERSQUEUE DEBUG: pq_impl.cpp:2743: [PQ: 72057594037928037] server disconnected, pipe [3:536:2461] destroyed RESPONSE Status: 1 ErrorCode: OK MetaResponse { CmdGetPartitionStatusResult { TopicResult { Topic: "rt3.dc1--topic1" PartitionResult { Partition: 0 Status: STATUS_OK LastInitDurationSeconds: 0 CreationTimestamp: 0 GapCount: 0 GapSize: 0 AvgWriteSpeedPerSec: 0 AvgWriteSpeedPerMin: 0 AvgWriteSpeedPerHour: 0 AvgWriteSpeedPerDay: 0 AvgReadSpeedPerSec: 0 AvgReadSpeedPerMin: 0 AvgReadSpeedPerHour: 0 AvgReadSpeedPerDay: 0 ReadBytesQuota: 0 WriteBytesQuota: 50000000 PartitionSize: 0 StartOffset: 0 EndOffset: 0 LastWriteTimestampMs: 39 WriteLagMs: 0 AvgQuotaSpeedPerSec: 0 AvgQuotaSpeedPerMin: 0 AvgQuotaSpeedPerHour: 0 AvgQuotaSpeedPerDay: 0 SourceIdCount: 0 SourceIdRetentionPeriodSec: 0 UsedReserveSize: 0 AggregatedCounters { Values: 39 Values: 0 Values: 1 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 50000000 Values: 0 Values: 9223372036854775807 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 1 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 } Generation: 2 Cookie: 1 ScaleStatus: NORMAL } ErrorCode: OK } TopicResult { Topic: "rt3.dc1--topic2" PartitionResult { Partition: 1 Status: STATUS_OK LastInitDurationSeconds: 0 CreationTimestamp: 0 GapCount: 0 GapSize: 0 AvgWriteSpeedPerSec: 0 AvgWriteSpeedPerMin: 0 AvgWriteSpeedPerHour: 0 AvgWriteSpeedPerDay: 0 AvgReadSpeedPerSec: 0 AvgReadSpeedPerMin: 0 AvgReadSpeedPerHour: 0 AvgReadSpeedPerDay: 0 ReadBytesQuota: 0 WriteBytesQuota: 50000000 PartitionSize: 0 StartOffset: 0 EndOffset: 0 LastWriteTimestampMs: 79 WriteLagMs: 0 AvgQuotaSpeedPerSec: 0 AvgQuotaSpeedPerMin: 0 AvgQuotaSpeedPerHour: 0 AvgQuotaSpeedPerDay: 0 SourceIdCount: 0 SourceIdRetentionPeriodSec: 0 UsedReserveSize: 0 AggregatedCounters { Values: 79 Values: 0 Values: 1 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 50000000 Values: 0 Values: 9223372036854775807 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 1 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 } Generation: 2 Cookie: 1 ScaleStatus: NORMAL } PartitionResult { Partition: 2 Status: STATUS_UNKNOWN } ErrorCode: OK } } } ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/scheme/unittest >> KqpScheme::CreateExternalTableWithUpperCaseSettings [GOOD] Test command err: Trying to start YDB, gRPC: 20999, MsgBus: 2418 2025-09-25T16:19:02.388320Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7554062105660750997:2082];send_to=[0:7307199536658146131:7762515]; 2025-09-25T16:19:02.390514Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/endf/001c6e/r3tmp/tmpPLv2sT/pdisk_1.dat 2025-09-25T16:19:02.447139Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-09-25T16:19:02.447555Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 20999, node 1 2025-09-25T16:19:02.471300Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-09-25T16:19:02.471314Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-09-25T16:19:02.471327Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-09-25T16:19:02.471374Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:2418 2025-09-25T16:19:02.494681Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-09-25T16:19:02.494712Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-09-25T16:19:02.495799Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:2418 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-09-25T16:19:02.590477Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-09-25T16:19:02.593407Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-09-25T16:19:02.609534Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) waiting... 2025-09-25T16:19:02.648242Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-09-25T16:19:02.655803Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) waiting... waiting... 2025-09-25T16:19:02.694081Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) waiting... 2025-09-25T16:19:02.709671Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:19:02.889148Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7554062105660752592:2391], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:19:02.889205Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:19:02.892939Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7554062105660752602:2392], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:19:02.892965Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:19:03.009078Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:19:03.035685Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:19:03.062350Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:19:03.077565Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:19:03.102883Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:19:03.135274Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:19:03.158538Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:19:03.188516Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:19:03.220933Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7554062109955720761:2474], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:19:03.220985Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:19:03.221187Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7554062109955720767:2478], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:19:03.221187Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7554062109955720766:2477], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:19:03.221195Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:19:03.222010Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshar ... : failed to initialize from file: (empty maybe) 2025-09-25T16:19:16.085814Z node 8 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:14087 TClient is connected to server localhost:14087 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-09-25T16:19:16.144091Z node 8 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-09-25T16:19:16.152322Z node 8 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) waiting... 2025-09-25T16:19:16.170195Z node 8 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) waiting... 2025-09-25T16:19:16.195198Z node 8 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) waiting... 2025-09-25T16:19:16.213844Z node 8 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) waiting... 2025-09-25T16:19:16.366361Z node 8 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-09-25T16:19:16.444119Z node 8 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [8:7554062164673381148:2391], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:19:16.444145Z node 8 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:19:16.444202Z node 8 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [8:7554062164673381157:2392], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:19:16.444210Z node 8 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:19:16.457208Z node 8 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:19:16.465924Z node 8 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:19:16.500263Z node 8 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:19:16.547518Z node 8 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:19:16.560924Z node 8 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:19:16.579134Z node 8 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:19:16.590078Z node 8 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:19:16.603108Z node 8 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:19:16.620060Z node 8 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [8:7554062164673382022:2474], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:19:16.620090Z node 8 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:19:16.620219Z node 8 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [8:7554062164673382027:2477], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:19:16.620238Z node 8 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [8:7554062164673382028:2478], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:19:16.620247Z node 8 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:19:16.621317Z node 8 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-09-25T16:19:16.629232Z node 8 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [8:7554062164673382031:2479], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2025-09-25T16:19:16.681456Z node 8 :TX_PROXY ERROR: schemereq.cpp:590: Actor# [8:7554062164673382083:3553] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 17], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-09-25T16:19:16.977098Z node 8 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExternalDataSource, opId: 281474976710673:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_external_data_source.cpp:268) 2025-09-25T16:19:16.980251Z node 8 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExternalTable, opId: 281474976710674:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_external_table.cpp:352) ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/http_proxy/ut/inside_ydb_ut/unittest >> TestYmqHttpProxy::TestSendMessageBatch [GOOD] Test command err: 2025-09-25T16:18:50.105346Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7554062054076332460:2210];send_to=[0:7307199536658146131:7762515]; 2025-09-25T16:18:50.105409Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/endf/005984/r3tmp/tmpa3FxEg/pdisk_1.dat 2025-09-25T16:18:50.151004Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-09-25T16:18:50.167206Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 21950, node 1 2025-09-25T16:18:50.172275Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1229: Notification cookie mismatch for subscription [1:7554062054076332274:2081] 1758817130101782 != 1758817130101785 2025-09-25T16:18:50.177642Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-09-25T16:18:50.177652Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-09-25T16:18:50.177655Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-09-25T16:18:50.177701Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:16502 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-09-25T16:18:50.209028Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-09-25T16:18:50.209063Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-09-25T16:18:50.211653Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-09-25T16:18:50.212926Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... TClient is connected to server localhost:16502 2025-09-25T16:18:50.239388Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterUserAttributes, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_user_attrs.cpp:72) waiting... 2025-09-25T16:18:50.240795Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_modify_acl.cpp:101) waiting... 2025-09-25T16:18:50.267077Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) waiting... 2025-09-25T16:18:50.285812Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) waiting... waiting... 2025-09-25T16:18:50.303124Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) waiting... 2025-09-25T16:18:50.311695Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions waiting... 2025-09-25T16:18:50.343733Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) waiting... 2025-09-25T16:18:50.352883Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) waiting... 2025-09-25T16:18:50.365759Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) waiting... 2025-09-25T16:18:50.385113Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) waiting... 2025-09-25T16:18:50.400707Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715670:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) waiting... 2025-09-25T16:18:50.416150Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) waiting... 2025-09-25T16:18:50.426311Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715672:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) waiting... 2025-09-25T16:18:50.534011Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7554062054076333673:2360], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:50.534016Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7554062054076333681:2363], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:50.534043Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:50.534113Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7554062054076333688:2365], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:50.534140Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:50.535063Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715673:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-09-25T16:18:50.538291Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7554062054076333687:2364], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715673 completed, doublechecking } 2025-09-25T16:18:50.633009Z node 1 :TX_PROXY ERROR: schemereq.cpp:590: Actor# [1:7554062054076333740:2865] txid# 281474976715674, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 18], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-09-25T16:18:50.694 ... bf7e6d806a0b8062135ae945eca30bf" MessageId: "8b06a0a2-f98aeb59-1403cb7f-ad67f17f" SequenceNumber: 2 Id: "Id-1" } Entries { Error { Status: 400 Message: "No MessageGroupId parameter." ErrorCode: "MissingParameter" } Id: "Id-2" } } RequestId: "fe459be0-2ee94519-97d822c-ad7a800a" FolderId: "folder4" ResourceId: "000000000000000101v0" IsFifo: true } 2025-09-25T16:19:16.226758Z node 7 :SQS TRACE: service.cpp:1483: Dec local leader ref for actor [7:7554062167050426492:3509]. Found: 1 2025-09-25T16:19:16.226795Z node 7 :SQS TRACE: proxy_service.cpp:194: HandleSqsResponse SendMessageBatch { RequestId: "fe459be0-2ee94519-97d822c-ad7a800a" Entries { MD5OfMessageAttributes: "3d778967e1fa431d626ffb890c486385" MD5OfMessageBody: "94a29778a1f1f41bf68142847b2e6106" MessageId: "453e797b-b9c5ef42-e318f69f-bd38e441" SequenceNumber: 1 Id: "Id-0" } Entries { MD5OfMessageBody: "3bf7e6d806a0b8062135ae945eca30bf" MessageId: "8b06a0a2-f98aeb59-1403cb7f-ad67f17f" SequenceNumber: 2 Id: "Id-1" } Entries { Error { Status: 400 Message: "No MessageGroupId parameter." ErrorCode: "MissingParameter" } Id: "Id-2" } } RequestId: "fe459be0-2ee94519-97d822c-ad7a800a" FolderId: "folder4" ResourceId: "000000000000000101v0" IsFifo: true 2025-09-25T16:19:16.226825Z node 7 :SQS TRACE: proxy_service.cpp:208: Sending answer to proxy actor [7:7554062167050426488:2487]: SendMessageBatch { RequestId: "fe459be0-2ee94519-97d822c-ad7a800a" Entries { MD5OfMessageAttributes: "3d778967e1fa431d626ffb890c486385" MD5OfMessageBody: "94a29778a1f1f41bf68142847b2e6106" MessageId: "453e797b-b9c5ef42-e318f69f-bd38e441" SequenceNumber: 1 Id: "Id-0" } Entries { MD5OfMessageBody: "3bf7e6d806a0b8062135ae945eca30bf" MessageId: "8b06a0a2-f98aeb59-1403cb7f-ad67f17f" SequenceNumber: 2 Id: "Id-1" } Entries { Error { Status: 400 Message: "No MessageGroupId parameter." ErrorCode: "MissingParameter" } Id: "Id-2" } } RequestId: "fe459be0-2ee94519-97d822c-ad7a800a" FolderId: "folder4" ResourceId: "000000000000000101v0" IsFifo: true 2025-09-25T16:19:16.226914Z node 7 :SQS TRACE: proxy_actor.cpp:178: Request [fe459be0-2ee94519-97d822c-ad7a800a] HandleResponse: { SendMessageBatch { RequestId: "fe459be0-2ee94519-97d822c-ad7a800a" Entries { MD5OfMessageAttributes: "3d778967e1fa431d626ffb890c486385" MD5OfMessageBody: "94a29778a1f1f41bf68142847b2e6106" MessageId: "453e797b-b9c5ef42-e318f69f-bd38e441" SequenceNumber: 1 Id: "Id-0" } Entries { MD5OfMessageBody: "3bf7e6d806a0b8062135ae945eca30bf" MessageId: "8b06a0a2-f98aeb59-1403cb7f-ad67f17f" SequenceNumber: 2 Id: "Id-1" } Entries { Error { Status: 400 Message: "No MessageGroupId parameter." ErrorCode: "MissingParameter" } Id: "Id-2" } } RequestId: "fe459be0-2ee94519-97d822c-ad7a800a" FolderId: "folder4" ResourceId: "000000000000000101v0" IsFifo: true }, status: OK 2025-09-25T16:19:16.226942Z node 7 :SQS DEBUG: proxy_actor.cpp:147: Request [fe459be0-2ee94519-97d822c-ad7a800a] Sending reply from proxy actor: { SendMessageBatch { RequestId: "fe459be0-2ee94519-97d822c-ad7a800a" Entries { MD5OfMessageAttributes: "3d778967e1fa431d626ffb890c486385" MD5OfMessageBody: "94a29778a1f1f41bf68142847b2e6106" MessageId: "453e797b-b9c5ef42-e318f69f-bd38e441" SequenceNumber: 1 Id: "Id-0" } Entries { MD5OfMessageBody: "3bf7e6d806a0b8062135ae945eca30bf" MessageId: "8b06a0a2-f98aeb59-1403cb7f-ad67f17f" SequenceNumber: 2 Id: "Id-1" } Entries { Error { Status: 400 Message: "No MessageGroupId parameter." ErrorCode: "MissingParameter" } Id: "Id-2" } } RequestId: "fe459be0-2ee94519-97d822c-ad7a800a" FolderId: "folder4" ResourceId: "000000000000000101v0" IsFifo: true } 2025-09-25T16:19:16.227041Z node 7 :HTTP_PROXY DEBUG: http_req.cpp:378: http request [SendMessageBatch] requestId [fe459be0-2ee94519-97d822c-ad7a800a] Got succesfult GRPC response. 2025-09-25T16:19:16.227117Z node 7 :HTTP_PROXY INFO: http_req.cpp:1205: http request [SendMessageBatch] requestId [fe459be0-2ee94519-97d822c-ad7a800a] reply ok 2025-09-25T16:19:16.227166Z node 7 :HTTP_PROXY DEBUG: http_req.cpp:1265: http request [SendMessageBatch] requestId [fe459be0-2ee94519-97d822c-ad7a800a] Send metering event. HttpStatusCode: 200 IsFifo: 1 FolderId: folder4 RequestSizeInBytes: 1063 ResponseSizeInBytes: 618 SourceAddress: d87a:463c:fa70:0:c07a:463c:fa70:0 ResourceId: 000000000000000101v0 Action: SendMessageBatch 2025-09-25T16:19:16.227215Z node 7 :HTTP DEBUG: http_proxy_incoming.cpp:280: (#37,[::1]:44430) <- (200 , 465 bytes) 2025-09-25T16:19:16.227308Z node 7 :HTTP DEBUG: http_proxy_incoming.cpp:340: (#37,[::1]:44430) connection closed 2025-09-25T16:19:16.227323Z node 7 :SQS TRACE: executor.cpp:256: Request [] Query(idx=GET_OLDEST_MESSAGE_TIMESTAMP_METRIC_ID) Queue [cloud4/000000000000000101v0] Compile program response: { Status: 48 MiniKQLCompileResults { CompiledProgram: "\037\016\nFlags\010Name\010Args\016Payload\022Parameter\014Offset\032SentTimestamp\006\002\206\202\t\211\004\202\203\005@\206\205\004\207\203\010\207\203\010\026\032$SetResult\000\003?\002\020messages\t\211\004?\016\205\004?\016\203\014\020List$Truncated\203\004\030Member\000\t\211\026?\026\203\005\004\200\205\004\203\004\203\004\026\032\213\010\203\010\203\010\203\010\203\010\213\010?$?&\203\010\203\010\203\004\203\010\203\010\203\004\206\203\014\203\014,SelectRange\000\003?\034 \000\001\205\000\000\000\000\001\032\000\000\000\000\000\000\000?\014\005?\"\003?\036\010\003? \006\003\013?,\t\351\000?$\005\205\004\206\205\004\203\010\203\005@\002\006\203\005@\n\016\006\000?R\003?T(QUEUE_ID_NUMBER_HASH\003\022\000\t\351\000?&\005\205\004\206\205\004\203\010\203\005@\002\006\203\005@\n\016\006\000?h\003?j\036QUEUE_ID_NUMBER\003\022\000\t\351\000?(\005\205\004\206\205\004\203\010\203\005@\002\006\203\005@\n\016\006\000?~\003?\200\022TIME_FROM\003\022\000\003?*\000\010\013?2?`?v\003?.\177\377\377\377\377\377\377\377\377\003?0\177\377\377\377\377\377\377\377\377\014\003?4\000\003?6\002\003?8\000\003?:\000\006\010?>\003\203\014\000\003\203\014\000\003\203\014\000\003\203\014\000\017\003?@\000\377\007\003?\030\000\002\001\000/" } } Http output full {"Successful":[{"SequenceNumber":"1","Id":"Id-0","MD5OfMessageBody":"94a29778a1f1f41bf68142847b2e6106","MD5OfMessageAttributes":"3d778967e1fa431d626ffb890c486385","MessageId":"453e797b-b9c5ef42-e318f69f-bd38e441"},{"SequenceNumber":"2","Id":"Id-1","MD5OfMessageBody":"3bf7e6d806a0b8062135ae945eca30bf","MessageId":"8b06a0a2-f98aeb59-1403cb7f-ad67f17f"}],"Failed":[{"Message":"No MessageGroupId parameter.","Id":"Id-2","Code":"MissingParameter","SenderFault":true}]} 2025-09-25T16:19:16.227329Z node 7 :SQS DEBUG: executor.cpp:401: Request [] Query(idx=GET_OLDEST_MESSAGE_TIMESTAMP_METRIC_ID) Queue [cloud4/000000000000000101v0] compilation duration: 0ms 2025-09-25T16:19:16.227345Z node 7 :SQS DEBUG: queue_leader.cpp:464: Request [] Query(idx=GET_OLDEST_MESSAGE_TIMESTAMP_METRIC_ID) has been prepared 2025-09-25T16:19:16.227347Z node 7 :SQS DEBUG: queue_leader.cpp:514: Request [] Executing compiled query(idx=GET_OLDEST_MESSAGE_TIMESTAMP_METRIC_ID) 2025-09-25T16:19:16.227365Z node 7 :SQS DEBUG: executor.cpp:83: Request [] Starting executor actor for query(idx=GET_OLDEST_MESSAGE_TIMESTAMP_METRIC_ID). Mode: COMPILE_AND_EXEC 2025-09-25T16:19:16.227386Z node 7 :SQS TRACE: executor.cpp:154: Request [] Query(idx=GET_OLDEST_MESSAGE_TIMESTAMP_METRIC_ID) Queue [cloud4/000000000000000101v0] Serializing params: {"QUEUE_ID_NUMBER": 2, "QUEUE_ID_NUMBER_HASH": 17472595041006102391, "SHARD": 0, "QUEUE_ID_NUMBER_AND_SHARD_HASH": 12311263855443095412, "TIME_FROM": 0} 2025-09-25T16:19:16.227460Z node 7 :SQS TRACE: executor.cpp:203: Request [] Query(idx=GET_OLDEST_MESSAGE_TIMESTAMP_METRIC_ID) Queue [cloud4/000000000000000101v0] Execute program: { Transaction { MiniKQLTransaction { Mode: COMPILE_AND_EXEC Program { Bin: "\037\016\nFlags\010Name\010Args\016Payload\022Parameter\014Offset\032SentTimestamp\006\002\206\202\t\211\004\202\203\005@\206\205\004\207\203\010\207\203\010\026\032$SetResult\000\003?\002\020messages\t\211\004?\016\205\004?\016\203\014\020List$Truncated\203\004\030Member\000\t\211\026?\026\203\005\004\200\205\004\203\004\203\004\026\032\213\010\203\010\203\010\203\010\203\010\213\010?$?&\203\010\203\010\203\004\203\010\203\010\203\004\206\203\014\203\014,SelectRange\000\003?\034 \000\001\205\000\000\000\000\001\032\000\000\000\000\000\000\000?\014\005?\"\003?\036\010\003? \006\003\013?,\t\351\000?$\005\205\004\206\205\004\203\010\203\005@\002\006\203\005@\n\016\006\000?R\003?T(QUEUE_ID_NUMBER_HASH\003\022\000\t\351\000?&\005\205\004\206\205\004\203\010\203\005@\002\006\203\005@\n\016\006\000?h\003?j\036QUEUE_ID_NUMBER\003\022\000\t\351\000?(\005\205\004\206\205\004\203\010\203\005@\002\006\203\005@\n\016\006\000?~\003?\200\022TIME_FROM\003\022\000\003?*\000\010\013?2?`?v\003?.\177\377\377\377\377\377\377\377\377\003?0\177\377\377\377\377\377\377\377\377\014\003?4\000\003?6\002\003?8\000\003?:\000\006\010?>\003\203\014\000\003\203\014\000\003\203\014\000\003\203\014\000\017\003?@\000\377\007\003?\030\000\002\001\000/" } Params { Bin: "\037\000\005\205\n\203\010\203\010\203\010\203\004\203\010> TMessageBusServerPersQueueGetReadSessionsInfoMetaRequestTest::HandlesTimeout [GOOD] >> TPQCachingProxyTest::TestPublishAndForget |81.9%| [TA] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_sequence_reboots/test-results/unittest/{meta.json ... results_accumulator.log} >> TMessageBusServerPersQueueGetReadSessionsInfoMetaRequestTest::SuccessfullyPassesResponsesFromTablets ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/client/server/ut/unittest >> TMessageBusServerPersQueueGetPartitionLocationsMetaRequestTest::HandlesPipeDisconnection_AnswerDoesNotArrive [GOOD] >> TMessageBusServerPersQueueGetReadSessionsInfoMetaRequestTest::SuccessfullyPassesResponsesFromTablets [GOOD] Test command err: Assert failed: Check response: { Status: 128 ErrorReason: "no path \'Root/PQ\', Marker# PQ150" ErrorCode: UNKNOWN_TOPIC } Assert failed: Check response: { Status: 128 ErrorReason: "topic \'rt3.dc1--topic1\' is not created, Marker# PQ94" ErrorCode: UNKNOWN_TOPIC } 2025-09-25T16:19:12.891669Z node 3 :PERSQUEUE DEBUG: pq_impl.cpp:2957: [PQ: 72057594037928037] Handle TEvInterconnect::TEvNodeInfo 2025-09-25T16:19:12.893797Z node 3 :PERSQUEUE DEBUG: pq_impl.cpp:2989: [PQ: 72057594037928037] Transactions request. From tx_00000000000000000000, To tx_18446744073709551615 2025-09-25T16:19:12.893873Z node 3 :PERSQUEUE DEBUG: pq_impl.cpp:784: [PQ: 72057594037928037] no config, start with empty partitions and default config 2025-09-25T16:19:12.893886Z node 3 :PERSQUEUE NOTICE: pq_impl.cpp:908: [PQ: 72057594037928037] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-09-25T16:19:12.893897Z node 3 :PERSQUEUE INFO: pq_impl.cpp:609: [PQ: 72057594037928037] doesn't have tx writes info 2025-09-25T16:19:12.894124Z node 3 :PERSQUEUE DEBUG: pq_impl.cpp:2718: [PQ: 72057594037928037] server connected, pipe [3:261:2256], now have 1 active actors on pipe 2025-09-25T16:19:12.894166Z node 3 :PERSQUEUE DEBUG: pq_impl.cpp:1269: [PQ: 72057594037928037] Handle TEvPersQueue::TEvUpdateConfig 2025-09-25T16:19:12.897876Z node 3 :PERSQUEUE DEBUG: pq_impl.cpp:395: [PQ: 72057594037928037] Apply new config CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 TopicName: "rt3.dc1--topic1" Version: 1 Partitions { PartitionId: 0 } AllPartitions { PartitionId: 0 } 2025-09-25T16:19:12.897940Z node 3 :PERSQUEUE NOTICE: pq_impl.cpp:908: [PQ: 72057594037928037] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-09-25T16:19:12.898240Z node 3 :PERSQUEUE INFO: pq_impl.cpp:1296: [PQ: 72057594037928037] Config applied version 1 actor [3:104:2138] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 TopicName: "rt3.dc1--topic1" Version: 1 Partitions { PartitionId: 0 } AllPartitions { PartitionId: 0 } 2025-09-25T16:19:12.898299Z node 3 :PERSQUEUE DEBUG: partition_init.cpp:80: [rt3.dc1--topic1:0:Initializer] Start initializing step TInitConfigStep 2025-09-25T16:19:12.898460Z node 3 :PERSQUEUE DEBUG: partition_init.cpp:80: [rt3.dc1--topic1:0:Initializer] Start initializing step TInitInternalFieldsStep 2025-09-25T16:19:12.898521Z node 3 :PERSQUEUE INFO: partition_init.cpp:1075: [72057594037928037][Partition][0][StateInit] bootstrapping 0 [3:269:2262] 2025-09-25T16:19:12.899222Z node 3 :PERSQUEUE DEBUG: partition_init.cpp:80: [rt3.dc1--topic1:0:Initializer] Start initializing step TInitFieldsStep 2025-09-25T16:19:12.899231Z node 3 :PERSQUEUE DEBUG: partition_init.cpp:60: [rt3.dc1--topic1:0:Initializer] Initializing completed. 2025-09-25T16:19:12.899249Z node 3 :PERSQUEUE INFO: partition.cpp:694: [72057594037928037][Partition][0][StateInit] init complete for topic 'rt3.dc1--topic1' partition 0 generation 2 [3:269:2262] 2025-09-25T16:19:12.899259Z node 3 :PERSQUEUE DEBUG: partition.cpp:708: [72057594037928037][Partition][0][StateInit] SYNC INIT topic rt3.dc1--topic1 partitition 0 so 0 endOffset 0 Head Offset 0 PartNo 0 PackedSize 0 count 0 nextOffset 0 batches 0 2025-09-25T16:19:12.899270Z node 3 :PERSQUEUE DEBUG: partition.cpp:4293: [72057594037928037][Partition][0][StateIdle] Process pending events. Count 0 2025-09-25T16:19:12.899463Z node 3 :PERSQUEUE DEBUG: partition.cpp:2261: [72057594037928037][Partition][0][StateIdle] Batching state before ContinueProcessTxsAndUserActs: 0 2025-09-25T16:19:12.899482Z node 3 :PERSQUEUE DEBUG: partition.cpp:2270: [72057594037928037][Partition][0][StateIdle] Batching state after ContinueProcessTxsAndUserActs: 1 2025-09-25T16:19:12.899487Z node 3 :PERSQUEUE DEBUG: partition.cpp:2293: [72057594037928037][Partition][0][StateIdle] Try persist 2025-09-25T16:19:12.899594Z node 3 :PERSQUEUE DEBUG: partition_compaction.cpp:162: [72057594037928037][Partition][0][StateIdle] no data for compaction 2025-09-25T16:19:12.899734Z node 3 :PERSQUEUE DEBUG: pq_impl.cpp:2718: [PQ: 72057594037928037] server connected, pipe [3:272:2264], now have 1 active actors on pipe 2025-09-25T16:19:12.914848Z node 3 :PERSQUEUE DEBUG: pq_impl.cpp:2957: [PQ: 72057594037928137] Handle TEvInterconnect::TEvNodeInfo 2025-09-25T16:19:12.916558Z node 3 :PERSQUEUE DEBUG: pq_impl.cpp:2989: [PQ: 72057594037928137] Transactions request. From tx_00000000000000000000, To tx_18446744073709551615 2025-09-25T16:19:12.916680Z node 3 :PERSQUEUE DEBUG: pq_impl.cpp:784: [PQ: 72057594037928137] no config, start with empty partitions and default config 2025-09-25T16:19:12.916696Z node 3 :PERSQUEUE NOTICE: pq_impl.cpp:908: [PQ: 72057594037928137] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-09-25T16:19:12.916707Z node 3 :PERSQUEUE INFO: pq_impl.cpp:609: [PQ: 72057594037928137] doesn't have tx writes info 2025-09-25T16:19:12.916955Z node 3 :PERSQUEUE DEBUG: pq_impl.cpp:2718: [PQ: 72057594037928137] server connected, pipe [3:401:2362], now have 1 active actors on pipe 2025-09-25T16:19:12.916972Z node 3 :PERSQUEUE DEBUG: pq_impl.cpp:1269: [PQ: 72057594037928137] Handle TEvPersQueue::TEvUpdateConfig 2025-09-25T16:19:12.920249Z node 3 :PERSQUEUE DEBUG: pq_impl.cpp:395: [PQ: 72057594037928137] Apply new config CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 TopicName: "rt3.dc1--topic2" Version: 2 Partitions { PartitionId: 0 } AllPartitions { PartitionId: 0 } 2025-09-25T16:19:12.920299Z node 3 :PERSQUEUE NOTICE: pq_impl.cpp:908: [PQ: 72057594037928137] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-09-25T16:19:12.920539Z node 3 :PERSQUEUE INFO: pq_impl.cpp:1296: [PQ: 72057594037928137] Config applied version 2 actor [3:104:2138] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 TopicName: "rt3.dc1--topic2" Version: 2 Partitions { PartitionId: 0 } AllPartitions { PartitionId: 0 } 2025-09-25T16:19:12.920576Z node 3 :PERSQUEUE DEBUG: partition_init.cpp:80: [rt3.dc1--topic2:0:Initializer] Start initializing step TInitConfigStep 2025-09-25T16:19:12.920698Z node 3 :PERSQUEUE DEBUG: partition_init.cpp:80: [rt3.dc1--topic2:0:Initializer] Start initializing step TInitInternalFieldsStep 2025-09-25T16:19:12.920741Z node 3 :PERSQUEUE INFO: partition_init.cpp:1075: [72057594037928137][Partition][0][StateInit] bootstrapping 0 [3:409:2368] 2025-09-25T16:19:12.921339Z node 3 :PERSQUEUE DEBUG: partition_init.cpp:80: [rt3.dc1--topic2:0:Initializer] Start initializing step TInitFieldsStep 2025-09-25T16:19:12.921356Z node 3 :PERSQUEUE DEBUG: partition_init.cpp:60: [rt3.dc1--topic2:0:Initializer] Initializing completed. 2025-09-25T16:19:12.921364Z node 3 :PERSQUEUE INFO: partition.cpp:694: [72057594037928137][Partition][0][StateInit] init complete for topic 'rt3.dc1--topic2' partition 0 generation 2 [3:409:2368] 2025-09-25T16:19:12.921373Z node 3 :PERSQUEUE DEBUG: partition.cpp:708: [72057594037928137][Partition][0][StateInit] SYNC INIT topic rt3.dc1--topic2 partitition 0 so 0 endOffset 0 Head Offset 0 PartNo 0 PackedSize 0 count 0 nextOffset 0 batches 0 2025-09-25T16:19:12.921386Z node 3 :PERSQUEUE DEBUG: partition.cpp:4293: [72057594037928137][Partition][0][StateIdle] Process pending events. Count 0 2025-09-25T16:19:12.921392Z node 3 :PERSQUEUE DEBUG: partition.cpp:2261: [72057594037928137][Partition][0][StateIdle] Batching state before ContinueProcessTxsAndUserActs: 0 2025-09-25T16:19:12.921397Z node 3 :PERSQUEUE DEBUG: partition.cpp:2270: [72057594037928137][Partition][0][StateIdle] Batching state after ContinueProcessTxsAndUserActs: 1 2025-09-25T16:19:12.921401Z node 3 :PERSQUEUE DEBUG: partition.cpp:2293: [72057594037928137][Partition][0][StateIdle] Try persist 2025-09-25T16:19:12.921499Z node 3 :PERSQUEUE DEBUG: partition_compaction.cpp:162: [72057594037928137][Partition][0][StateIdle] no data for compaction 2025-09-25T16:19:12.921656Z node 3 :PERSQUEUE DEBUG: pq_impl.cpp:2718: [PQ: 72057594037928137] server connected, pipe [3:412:2370], now have 1 active actors on pipe 2025-09-25T16:19:12.945400Z node 3 :PERSQUEUE DEBUG: pq_impl.cpp:2957: [PQ: 72057594037928138] Handle TEvInterconnect::TEvNodeInfo 2025-09-25T16:19:12.947135Z node 3 :PERSQUEUE DEBUG: pq_impl.cpp:2989: [PQ: 72057594037928138] Transactions request. From tx_00000000000000000000, To tx_18446744073709551615 2025-09-25T16:19:12.947264Z node 3 :PERSQUEUE DEBUG: pq_impl.cpp:784: [PQ: 72057594037928138] no config, start with empty partitions and default config 2025-09-25T16:19:12.947280Z node 3 :PERSQUEUE NOTICE: pq_impl.cpp:908: [PQ: 72057594037928138] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-09-25T16:19:12.947293Z node 3 :PERSQUEUE INFO: pq_impl.cpp:609: [PQ: 72057594037928138] doesn't have tx writes info 2025-09-25T16:19:12.947521Z node 3 :PERSQUEUE DEBUG: pq_impl.cpp:2718: [PQ: 72057594037928138] server connected, pipe [3:461:2407], now have 1 active actors on pipe 2025-09-25T16:19:12.947549Z node 3 :PERSQUEUE DEBUG: pq_impl.cpp:1269: [PQ: 72057594037928138] Handle TEvPersQueue::TEvUpdateConfig 2025-09-25T16:19:12.948159Z node 3 :PERSQUEUE DEBUG: pq_impl.cpp:395: [PQ: 72057594037928138] Apply new config CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 1 TopicName: "rt3.dc1--topic2" Version: 3 Partitions { PartitionId: 1 } AllPartitions { PartitionId: 1 } 2025-09-25T16:19:12.948191Z node 3 :PERSQUEUE NOTICE: pq_impl.cpp:908: [PQ: 72057594037928138] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-09-25T16:19:12.948378Z node 3 :PERSQUEUE INFO: pq_impl.cpp:1296: [PQ: 72057594037928138] Config applied version 3 actor [3:104:2138] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 1 TopicName: "rt3.dc1--topic2" Version: 3 Partitions { PartitionId: 1 } AllPartitions { PartitionId: 1 } 2025-09-25T16:19:12.948408Z node 3 :PERSQUEUE DEBUG: partition_init.cpp:80: [rt3.dc1--topic2:1:Initializer] Start initializing step TInitConfigStep 2025-09-25T16:19:12.948481Z node 3 :PERSQUEUE DEBUG: partition_init.cpp:80: [rt3.dc1--topic2:1:Initializer] Start initializing step TInitInternalFieldsStep 2025-09-25T16:19:12.948525Z node 3 :PERSQUEUE INFO: partition_init.cpp:1075: [72057594037928138][Partition][1][StateInit] bootstrapping 1 [3:469:2413] 2025-09-25T16:19:12.949167Z node 3 :PERSQUEUE DEBUG: partition_init.cpp:80: [rt3.dc1--topic2:1:Initializer] Start initializing step TInitFieldsStep 2025-09-25T16:19:12.949178Z node 3 :PERSQUEUE DEBUG: partition_init.cpp:60: [rt3.dc1--topic2:1:Initializer] Initializing completed. 2025-09-25T16:19:12.949183Z node 3 :PERSQUEUE INFO: partition.cpp:694: [72057594037928138][Partition][1][StateInit] init complete for topic 'rt3.dc1--topic2' partition 1 generation 2 [3:469:2413] 2025-09-25T16:19:12.949189Z node 3 :PERSQUEUE DEBUG: partition.cpp:708: [72057594037928138][Partition][1][StateInit] SYNC INIT topic rt3.dc1--topic2 p ... 37928138] Config applied version 7 actor [4:104:2138] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 1 TopicName: "rt3.dc1--topic2" Version: 7 Partitions { PartitionId: 1 } AllPartitions { PartitionId: 1 } 2025-09-25T16:19:13.258490Z node 4 :PERSQUEUE DEBUG: partition_init.cpp:80: [rt3.dc1--topic2:1:Initializer] Start initializing step TInitConfigStep 2025-09-25T16:19:13.258554Z node 4 :PERSQUEUE DEBUG: partition_init.cpp:80: [rt3.dc1--topic2:1:Initializer] Start initializing step TInitInternalFieldsStep 2025-09-25T16:19:13.258594Z node 4 :PERSQUEUE INFO: partition_init.cpp:1075: [72057594037928138][Partition][1][StateInit] bootstrapping 1 [4:468:2412] 2025-09-25T16:19:13.259496Z node 4 :PERSQUEUE DEBUG: partition_init.cpp:80: [rt3.dc1--topic2:1:Initializer] Start initializing step TInitFieldsStep 2025-09-25T16:19:13.259514Z node 4 :PERSQUEUE DEBUG: partition_init.cpp:60: [rt3.dc1--topic2:1:Initializer] Initializing completed. 2025-09-25T16:19:13.259522Z node 4 :PERSQUEUE INFO: partition.cpp:694: [72057594037928138][Partition][1][StateInit] init complete for topic 'rt3.dc1--topic2' partition 1 generation 2 [4:468:2412] 2025-09-25T16:19:13.259533Z node 4 :PERSQUEUE DEBUG: partition.cpp:708: [72057594037928138][Partition][1][StateInit] SYNC INIT topic rt3.dc1--topic2 partitition 1 so 0 endOffset 0 Head Offset 0 PartNo 0 PackedSize 0 count 0 nextOffset 0 batches 0 2025-09-25T16:19:13.259544Z node 4 :PERSQUEUE DEBUG: partition.cpp:4293: [72057594037928138][Partition][1][StateIdle] Process pending events. Count 0 2025-09-25T16:19:13.259550Z node 4 :PERSQUEUE DEBUG: partition.cpp:2261: [72057594037928138][Partition][1][StateIdle] Batching state before ContinueProcessTxsAndUserActs: 0 2025-09-25T16:19:13.259554Z node 4 :PERSQUEUE DEBUG: partition.cpp:2270: [72057594037928138][Partition][1][StateIdle] Batching state after ContinueProcessTxsAndUserActs: 1 2025-09-25T16:19:13.259563Z node 4 :PERSQUEUE DEBUG: partition.cpp:2293: [72057594037928138][Partition][1][StateIdle] Try persist 2025-09-25T16:19:13.259645Z node 4 :PERSQUEUE DEBUG: partition_compaction.cpp:162: [72057594037928138][Partition][1][StateIdle] no data for compaction 2025-09-25T16:19:13.259760Z node 4 :PERSQUEUE DEBUG: pq_impl.cpp:2718: [PQ: 72057594037928138] server connected, pipe [4:471:2414], now have 1 active actors on pipe 2025-09-25T16:19:13.263566Z node 4 :PERSQUEUE DEBUG: pq_impl.cpp:2957: [PQ: 72057594037928139] Handle TEvInterconnect::TEvNodeInfo 2025-09-25T16:19:13.264554Z node 4 :PERSQUEUE DEBUG: pq_impl.cpp:2989: [PQ: 72057594037928139] Transactions request. From tx_00000000000000000000, To tx_18446744073709551615 2025-09-25T16:19:13.264626Z node 4 :PERSQUEUE DEBUG: pq_impl.cpp:784: [PQ: 72057594037928139] no config, start with empty partitions and default config 2025-09-25T16:19:13.264637Z node 4 :PERSQUEUE NOTICE: pq_impl.cpp:908: [PQ: 72057594037928139] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-09-25T16:19:13.264646Z node 4 :PERSQUEUE INFO: pq_impl.cpp:609: [PQ: 72057594037928139] doesn't have tx writes info 2025-09-25T16:19:13.264805Z node 4 :PERSQUEUE DEBUG: pq_impl.cpp:2718: [PQ: 72057594037928139] server connected, pipe [4:520:2451], now have 1 active actors on pipe 2025-09-25T16:19:13.264845Z node 4 :PERSQUEUE DEBUG: pq_impl.cpp:1269: [PQ: 72057594037928139] Handle TEvPersQueue::TEvUpdateConfig 2025-09-25T16:19:13.265356Z node 4 :PERSQUEUE DEBUG: pq_impl.cpp:395: [PQ: 72057594037928139] Apply new config CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 2 TopicName: "rt3.dc1--topic2" Version: 8 Partitions { PartitionId: 2 } AllPartitions { PartitionId: 2 } 2025-09-25T16:19:13.265384Z node 4 :PERSQUEUE NOTICE: pq_impl.cpp:908: [PQ: 72057594037928139] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-09-25T16:19:13.265569Z node 4 :PERSQUEUE INFO: pq_impl.cpp:1296: [PQ: 72057594037928139] Config applied version 8 actor [4:104:2138] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 2 TopicName: "rt3.dc1--topic2" Version: 8 Partitions { PartitionId: 2 } AllPartitions { PartitionId: 2 } 2025-09-25T16:19:13.265594Z node 4 :PERSQUEUE DEBUG: partition_init.cpp:80: [rt3.dc1--topic2:2:Initializer] Start initializing step TInitConfigStep 2025-09-25T16:19:13.265664Z node 4 :PERSQUEUE DEBUG: partition_init.cpp:80: [rt3.dc1--topic2:2:Initializer] Start initializing step TInitInternalFieldsStep 2025-09-25T16:19:13.265705Z node 4 :PERSQUEUE INFO: partition_init.cpp:1075: [72057594037928139][Partition][2][StateInit] bootstrapping 2 [4:528:2457] 2025-09-25T16:19:13.266331Z node 4 :PERSQUEUE DEBUG: partition_init.cpp:80: [rt3.dc1--topic2:2:Initializer] Start initializing step TInitFieldsStep 2025-09-25T16:19:13.266344Z node 4 :PERSQUEUE DEBUG: partition_init.cpp:60: [rt3.dc1--topic2:2:Initializer] Initializing completed. 2025-09-25T16:19:13.266352Z node 4 :PERSQUEUE INFO: partition.cpp:694: [72057594037928139][Partition][2][StateInit] init complete for topic 'rt3.dc1--topic2' partition 2 generation 2 [4:528:2457] 2025-09-25T16:19:13.266362Z node 4 :PERSQUEUE DEBUG: partition.cpp:708: [72057594037928139][Partition][2][StateInit] SYNC INIT topic rt3.dc1--topic2 partitition 2 so 0 endOffset 0 Head Offset 0 PartNo 0 PackedSize 0 count 0 nextOffset 0 batches 0 2025-09-25T16:19:13.266372Z node 4 :PERSQUEUE DEBUG: partition.cpp:4293: [72057594037928139][Partition][2][StateIdle] Process pending events. Count 0 2025-09-25T16:19:13.266377Z node 4 :PERSQUEUE DEBUG: partition.cpp:2261: [72057594037928139][Partition][2][StateIdle] Batching state before ContinueProcessTxsAndUserActs: 0 2025-09-25T16:19:13.266385Z node 4 :PERSQUEUE DEBUG: partition.cpp:2270: [72057594037928139][Partition][2][StateIdle] Batching state after ContinueProcessTxsAndUserActs: 1 2025-09-25T16:19:13.266390Z node 4 :PERSQUEUE DEBUG: partition.cpp:2293: [72057594037928139][Partition][2][StateIdle] Try persist 2025-09-25T16:19:13.266469Z node 4 :PERSQUEUE DEBUG: partition_compaction.cpp:162: [72057594037928139][Partition][2][StateIdle] no data for compaction 2025-09-25T16:19:13.266591Z node 4 :PERSQUEUE DEBUG: pq_impl.cpp:2718: [PQ: 72057594037928139] server connected, pipe [4:531:2459], now have 1 active actors on pipe 2025-09-25T16:19:13.266851Z node 4 :PERSQUEUE DEBUG: pq_impl.cpp:2718: [PQ: 72057594037928037] server connected, pipe [4:537:2462], now have 1 active actors on pipe 2025-09-25T16:19:13.266872Z node 4 :PERSQUEUE DEBUG: pq_impl.cpp:2718: [PQ: 72057594037928138] server connected, pipe [4:538:2463], now have 1 active actors on pipe 2025-09-25T16:19:13.266889Z node 4 :PERSQUEUE DEBUG: pq_impl.cpp:2718: [PQ: 72057594037928139] server connected, pipe [4:539:2463], now have 1 active actors on pipe 2025-09-25T16:19:13.280804Z node 4 :PERSQUEUE DEBUG: pq_impl.cpp:2718: [PQ: 72057594037928139] server connected, pipe [4:544:2467], now have 1 active actors on pipe 2025-09-25T16:19:13.288395Z node 4 :PERSQUEUE DEBUG: pq_impl.cpp:2957: [PQ: 72057594037928139] Handle TEvInterconnect::TEvNodeInfo 2025-09-25T16:19:13.289186Z node 4 :PERSQUEUE DEBUG: pq_impl.cpp:2989: [PQ: 72057594037928139] Transactions request. From tx_00000000000000000000, To tx_18446744073709551615 2025-09-25T16:19:13.289424Z node 4 :PERSQUEUE NOTICE: pq_impl.cpp:908: [PQ: 72057594037928139] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-09-25T16:19:13.289436Z node 4 :PERSQUEUE INFO: pq_impl.cpp:609: [PQ: 72057594037928139] doesn't have tx writes info 2025-09-25T16:19:13.289458Z node 4 :PERSQUEUE DEBUG: partition_init.cpp:80: [rt3.dc1--topic2:2:Initializer] Start initializing step TInitConfigStep 2025-09-25T16:19:13.289513Z node 4 :PERSQUEUE DEBUG: partition_init.cpp:80: [rt3.dc1--topic2:2:Initializer] Start initializing step TInitInternalFieldsStep 2025-09-25T16:19:13.289549Z node 4 :PERSQUEUE INFO: partition_init.cpp:1075: [72057594037928139][Partition][2][StateInit] bootstrapping 2 [4:600:2512] 2025-09-25T16:19:13.290295Z node 4 :PERSQUEUE DEBUG: partition_init.cpp:80: [rt3.dc1--topic2:2:Initializer] Start initializing step TInitDiskStatusStep 2025-09-25T16:19:13.290556Z node 4 :PERSQUEUE DEBUG: partition_init.cpp:80: [rt3.dc1--topic2:2:Initializer] Start initializing step TInitMetaStep 2025-09-25T16:19:13.290637Z node 4 :PERSQUEUE DEBUG: partition_init.cpp:80: [rt3.dc1--topic2:2:Initializer] Start initializing step TInitInfoRangeStep 2025-09-25T16:19:13.290681Z node 4 :PERSQUEUE DEBUG: partition_init.cpp:80: [rt3.dc1--topic2:2:Initializer] Start initializing step TInitDataRangeStep 2025-09-25T16:19:13.290708Z node 4 :PERSQUEUE DEBUG: partition_init.cpp:80: [rt3.dc1--topic2:2:Initializer] Start initializing step TInitDataStep 2025-09-25T16:19:13.290713Z node 4 :PERSQUEUE DEBUG: partition_init.cpp:80: [rt3.dc1--topic2:2:Initializer] Start initializing step TInitEndWriteTimestampStep 2025-09-25T16:19:13.290719Z node 4 :PERSQUEUE INFO: partition_init.cpp:948: [rt3.dc1--topic2:2:TInitEndWriteTimestampStep] Initializing EndWriteTimestamp skipped because already initialized. 2025-09-25T16:19:13.290724Z node 4 :PERSQUEUE DEBUG: partition_init.cpp:80: [rt3.dc1--topic2:2:Initializer] Start initializing step TInitFieldsStep 2025-09-25T16:19:13.290730Z node 4 :PERSQUEUE DEBUG: partition_init.cpp:60: [rt3.dc1--topic2:2:Initializer] Initializing completed. 2025-09-25T16:19:13.290737Z node 4 :PERSQUEUE INFO: partition.cpp:694: [72057594037928139][Partition][2][StateInit] init complete for topic 'rt3.dc1--topic2' partition 2 generation 3 [4:600:2512] 2025-09-25T16:19:13.290747Z node 4 :PERSQUEUE DEBUG: partition.cpp:708: [72057594037928139][Partition][2][StateInit] SYNC INIT topic rt3.dc1--topic2 partitition 2 so 0 endOffset 0 Head Offset 0 PartNo 0 PackedSize 0 count 0 nextOffset 0 batches 0 2025-09-25T16:19:13.290760Z node 4 :PERSQUEUE DEBUG: partition.cpp:4293: [72057594037928139][Partition][2][StateIdle] Process pending events. Count 0 2025-09-25T16:19:13.290765Z node 4 :PERSQUEUE DEBUG: partition.cpp:2261: [72057594037928139][Partition][2][StateIdle] Batching state before ContinueProcessTxsAndUserActs: 0 2025-09-25T16:19:13.290770Z node 4 :PERSQUEUE DEBUG: partition.cpp:2270: [72057594037928139][Partition][2][StateIdle] Batching state after ContinueProcessTxsAndUserActs: 1 2025-09-25T16:19:13.290775Z node 4 :PERSQUEUE DEBUG: partition.cpp:2293: [72057594037928139][Partition][2][StateIdle] Try persist 2025-09-25T16:19:13.290838Z node 4 :PERSQUEUE DEBUG: partition_compaction.cpp:162: [72057594037928139][Partition][2][StateIdle] no data for compaction 2025-09-25T16:19:13.291003Z node 4 :PERSQUEUE DEBUG: pq_impl.cpp:2743: [PQ: 72057594037928138] server disconnected, pipe [4:538:2463] destroyed 2025-09-25T16:19:13.291017Z node 4 :PERSQUEUE DEBUG: pq_impl.cpp:2743: [PQ: 72057594037928037] server disconnected, pipe [4:537:2462] destroyed RESPONSE Status: 1 ErrorCode: OK MetaResponse { CmdGetPartitionLocationsResult { TopicResult { Topic: "rt3.dc1--topic1" PartitionLocation { Partition: 0 Host: "::1" HostId: 4 ErrorCode: OK } ErrorCode: OK } TopicResult { Topic: "rt3.dc1--topic2" PartitionLocation { Partition: 1 Host: "::1" HostId: 4 ErrorCode: OK } PartitionLocation { Partition: 2 ErrorCode: INITIALIZING ErrorReason: "Tablet for that partition is not running" } ErrorCode: OK } } } >> TMessageBusServerPersQueueGetTopicMetadataMetaRequestTest::FailesOnNotATopic ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/client/server/ut/unittest >> TMessageBusServerPersQueueGetTopicMetadataMetaRequestTest::SuccessfullyReplies [GOOD] >> TMessageBusServerPersQueueGetTopicMetadataMetaRequestTest::FailesOnNotATopic [GOOD] Test command err: Assert failed: Check response: { Status: 130 ErrorReason: "Timeout while waiting for response, may be just slow, Marker# PQ16" ErrorCode: ERROR } Assert failed: Check response: { Status: 128 ErrorReason: "topic \'rt3.dc1--topic1\' is not created, Marker# PQ94" ErrorCode: UNKNOWN_TOPIC } ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/http_proxy/ut/inside_ydb_ut/unittest >> TestKinesisHttpProxy::TestWrongRequest [GOOD] Test command err: 2025-09-25T16:18:50.768855Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7554062054316530739:2063];send_to=[0:7307199536658146131:7762515]; 2025-09-25T16:18:50.768875Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/endf/00594b/r3tmp/tmpKqz69Q/pdisk_1.dat 2025-09-25T16:18:50.824267Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions TServer::EnableGrpc on GrpcPort 27395, node 1 2025-09-25T16:18:50.838437Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded 2025-09-25T16:18:50.839143Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1229: Notification cookie mismatch for subscription [1:7554062054316530717:2081] 1758817130768710 != 1758817130768713 2025-09-25T16:18:50.839609Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-09-25T16:18:50.839619Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-09-25T16:18:50.839620Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-09-25T16:18:50.839659Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:17263 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-09-25T16:18:50.861189Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-09-25T16:18:50.872429Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-09-25T16:18:50.872451Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-09-25T16:18:50.873570Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:17263 2025-09-25T16:18:50.894284Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterUserAttributes, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_user_attrs.cpp:72) waiting... waiting... 2025-09-25T16:18:50.895704Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_modify_acl.cpp:101) 2025-09-25T16:18:50.925677Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) waiting... 2025-09-25T16:18:50.945079Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) waiting... waiting... 2025-09-25T16:18:50.976127Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) waiting... waiting... 2025-09-25T16:18:51.004010Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) waiting... 2025-09-25T16:18:51.013932Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) waiting... 2025-09-25T16:18:51.025304Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) waiting... 2025-09-25T16:18:51.039085Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) waiting... 2025-09-25T16:18:51.053253Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710670:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) waiting... 2025-09-25T16:18:51.065567Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-09-25T16:18:51.067815Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) waiting... 2025-09-25T16:18:51.080843Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) waiting... 2025-09-25T16:18:51.173218Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7554062058611499413:2360], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:51.173235Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7554062058611499424:2363], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:51.173241Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:51.173342Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7554062058611499428:2365], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:51.173353Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:51.174043Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710673:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-09-25T16:18:51.176188Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 281474976710673, at schemeshard: 72057594046644480 2025-09-25T16:18:51.176302Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7554062058611499427:2364], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710673 completed, doublechecking } 2025-09-25T16:18:51.269414Z node 1 :TX_PROXY ERROR: schemereq.cpp:590: Actor# [1:7554062058611499480:2865] txid# 281474976710674, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 18], ... nd: Data Data { Scheme: 6 } } } } } } } Value { Struct { Optional { } } Struct { Optional { Bool: false } } } } } 2025-09-25T16:19:16.457382Z node 8 :SQS TRACE: executor.cpp:286: Request [] Query(idx=GET_QUEUES_LIST_ID) Queue [] HandleResponse { Status: 48 TxId: 281474976710685 StatusCode: SUCCESS ExecutionEngineStatus: 1 ExecutionEngineResponseStatus: 2 ExecutionEngineEvaluatedResponse { Type { Kind: Struct Struct { Member { Name: "queues" Type { Kind: Optional Optional { Item { Kind: List List { Item { Kind: Struct Struct { Member { Name: "Account" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "CreatedTimestamp" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } Member { Name: "CustomQueueName" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "DlqName" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "FifoQueue" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 6 } } } } } Member { Name: "FolderId" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "MasterTabletId" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } Member { Name: "QueueName" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "QueueState" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } Member { Name: "Shards" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } Member { Name: "TablesFormat" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 2 } } } } } Member { Name: "Version" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } } } } } } } } Member { Name: "truncated" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 6 } } } } } } } Value { Struct { Optional { } } Struct { Optional { Bool: false } } } } } 2025-09-25T16:19:16.457392Z node 8 :SQS DEBUG: executor.cpp:287: Request [] Query(idx=GET_QUEUES_LIST_ID) Queue [] Attempt 1 execution duration: 5ms 2025-09-25T16:19:16.457446Z node 8 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:594: [WorkloadService] [TDatabaseFetcherActor] ActorId: [8:7554062166644264349:2433], Database: /Root/SQS, Failed to fetch database info, UNSUPPORTED, issues: {
: Error: Invalid database path /Root/SQS, please check the correctness of the path } 2025-09-25T16:19:16.457458Z node 8 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [8:7554062166644264348:2432], DatabaseId: /Root/SQS, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:19:16.457473Z node 8 :SQS TRACE: executor.cpp:325: Request [] Query(idx=GET_QUEUES_LIST_ID) Queue [] Sending mkql execution result: { Status: 48 TxId: 281474976710685 StatusCode: SUCCESS ExecutionEngineStatus: 1 ExecutionEngineResponseStatus: 2 ExecutionEngineEvaluatedResponse { Type { Kind: Struct Struct { Member { Name: "queues" Type { Kind: Optional Optional { Item { Kind: List List { Item { Kind: Struct Struct { Member { Name: "Account" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "CreatedTimestamp" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } Member { Name: "CustomQueueName" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "DlqName" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "FifoQueue" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 6 } } } } } Member { Name: "FolderId" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "MasterTabletId" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } Member { Name: "QueueName" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "QueueState" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } Member { Name: "Shards" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } Member { Name: "TablesFormat" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 2 } } } } } Member { Name: "Version" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } } } } } } } } Member { Name: "truncated" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 6 } } } } } } } Value { Struct { Optional { } } Struct { Optional { Bool: false } } } } } 2025-09-25T16:19:16.457477Z node 8 :SQS TRACE: executor.cpp:327: Request [] Query(idx=GET_QUEUES_LIST_ID) Queue [] Minikql data response: {"queues": [], "truncated": false} 2025-09-25T16:19:16.457498Z node 8 :SQS DEBUG: executor.cpp:401: Request [] Query(idx=GET_QUEUES_LIST_ID) Queue [] execution duration: 6ms 2025-09-25T16:19:16.457509Z node 8 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root/SQS, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:19:16.457598Z node 8 :SQS TRACE: queues_list_reader.cpp:82: Handle queues list: { Status: 48 TxId: 281474976710685 StatusCode: SUCCESS ExecutionEngineStatus: 1 ExecutionEngineResponseStatus: 2 ExecutionEngineEvaluatedResponse { Type { Kind: Struct Struct { Member { Name: "queues" Type { Kind: Optional Optional { Item { Kind: List List { Item { Kind: Struct Struct { Member { Name: "Account" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "CreatedTimestamp" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } Member { Name: "CustomQueueName" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "DlqName" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "FifoQueue" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 6 } } } } } Member { Name: "FolderId" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "MasterTabletId" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } Member { Name: "QueueName" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "QueueState" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } Member { Name: "Shards" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } Member { Name: "TablesFormat" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 2 } } } } } Member { Name: "Version" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } } } } } } } } Member { Name: "truncated" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 6 } } } } } } } Value { Struct { Optional { } } Struct { Optional { Bool: false } } } } } 2025-09-25T16:19:16.486670Z node 8 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:559: [WorkloadService] [Service] Reply cleanup error NOT_FOUND to [8:7554062166644264283:2425]: Pool not found 2025-09-25T16:19:16.487043Z node 8 :SQS DEBUG: monitoring.cpp:60: [monitoring] Report deletion queue data lag: 0.000000s, count: 0 2025-09-25T16:19:16.521357Z node 8 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:559: [WorkloadService] [Service] Reply cleanup error NOT_FOUND to [8:7554062166644264346:2431]: Pool not found 2025-09-25T16:19:16.522271Z node 8 :SQS DEBUG: cleanup_queue_data.cpp:100: [cleanup removed queues] getting queues... 2025-09-25T16:19:16.523239Z node 8 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [8:7554062166644264402:2445], DatabaseId: /Root/SQS, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:19:16.523252Z node 8 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:594: [WorkloadService] [TDatabaseFetcherActor] ActorId: [8:7554062166644264403:2446], Database: /Root/SQS, Failed to fetch database info, UNSUPPORTED, issues: {
: Error: Invalid database path /Root/SQS, please check the correctness of the path } 2025-09-25T16:19:16.523259Z node 8 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root/SQS, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:19:16.525005Z node 8 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [8:7554062166644264406:2447], DatabaseId: /Root/SQS, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:19:16.525034Z node 8 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root/SQS, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:19:16.563809Z node 8 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:559: [WorkloadService] [Service] Reply cleanup error NOT_FOUND to [8:7554062166644264400:2444]: Pool not found 2025-09-25T16:19:16.564241Z node 8 :SQS DEBUG: cleanup_queue_data.cpp:138: [cleanup removed queues] there are no queues to delete 2025-09-25T16:19:17.448246Z node 8 :HTTP DEBUG: http_proxy_incoming.cpp:83: (#37,[::1]:39692) incoming connection opened 2025-09-25T16:19:17.448332Z node 8 :HTTP DEBUG: http_proxy_incoming.cpp:156: (#37,[::1]:39692) -> (POST /, 87 bytes) 2025-09-25T16:19:17.448424Z node 8 :HTTP_PROXY INFO: http_service.cpp:102: proxy service: incoming request from [d8d5:c03a:2371:0:c0d5:c03a:2371:0] request [CreateStream] url [/] database [] requestId: c181623b-53824f4e-bfa6ff82-ac0ccc64 2025-09-25T16:19:17.448693Z node 8 :HTTP_PROXY WARN: http_req.cpp:947: http request [CreateStream] requestId [c181623b-53824f4e-bfa6ff82-ac0ccc64] got new request with incorrect json from [d8d5:c03a:2371:0:c0d5:c03a:2371:0] database '' 2025-09-25T16:19:17.448763Z node 8 :HTTP_PROXY INFO: http_req.cpp:1209: http request [CreateStream] requestId [c181623b-53824f4e-bfa6ff82-ac0ccc64] reply with status: BAD_REQUEST message: ydb/core/http_proxy/json_proto_conversion.h:400: Unexpected json key: WrongStreamName 2025-09-25T16:19:17.448808Z node 8 :HTTP DEBUG: http_proxy_incoming.cpp:280: (#37,[::1]:39692) <- (400 InvalidArgumentException, 135 bytes) 2025-09-25T16:19:17.448861Z node 8 :HTTP DEBUG: http_proxy_incoming.cpp:289: (#37,[::1]:39692) Request: POST / HTTP/1.1 Host: example.amazonaws.com X-Amz-Target: kinesisApi.CreateStream X-Amz-Date: 20150830T123600Z Authorization: Content-Type: application/json Connection: Close Transfer-Encoding: chunked { "ShardCount":5, "StreamName":"testtopic", "WrongStreamName":"WrongStreamName" } 2025-09-25T16:19:17.448875Z node 8 :HTTP DEBUG: http_proxy_incoming.cpp:296: (#37,[::1]:39692) Response: HTTP/1.1 400 InvalidArgumentException Connection: close x-amzn-requestid: c181623b-53824f4e-bfa6ff82-ac0ccc64 Content-Type: application/x-amz-json-1.1 Content-Length: 135 2025-09-25T16:19:17.448953Z node 8 :HTTP DEBUG: http_proxy_incoming.cpp:340: (#37,[::1]:39692) connection closed Http output full {"__type":"InvalidArgumentException","message":"ydb/core/http_proxy/json_proto_conversion.h:400: Unexpected json key: WrongStreamName"} 400 {"__type":"InvalidArgumentException","message":"ydb/core/http_proxy/json_proto_conversion.h:400: Unexpected json key: WrongStreamName"} ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/scheme/unittest >> KqpScheme::AlterTableAddUniqIndexPublicApiFeatureOff [GOOD] Test command err: Trying to start YDB, gRPC: 65155, MsgBus: 26509 2025-09-25T16:19:02.909141Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7554062105897445380:2084];send_to=[0:7307199536658146131:7762515]; 2025-09-25T16:19:02.909220Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/endf/001c3d/r3tmp/tmpirNixh/pdisk_1.dat 2025-09-25T16:19:02.913503Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:123: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2025-09-25T16:19:02.968141Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded 2025-09-25T16:19:03.025155Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-09-25T16:19:03.025189Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting TServer::EnableGrpc on GrpcPort 65155, node 1 2025-09-25T16:19:03.029152Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-09-25T16:19:03.033896Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-09-25T16:19:03.041081Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-09-25T16:19:03.041093Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-09-25T16:19:03.041095Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-09-25T16:19:03.041143Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:26509 TClient is connected to server localhost:26509 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-09-25T16:19:03.163611Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-09-25T16:19:03.169325Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-09-25T16:19:03.197364Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) waiting... 2025-09-25T16:19:03.235664Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) waiting... 2025-09-25T16:19:03.279820Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) waiting... 2025-09-25T16:19:03.325164Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) waiting... 2025-09-25T16:19:03.404909Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7554062110192414254:2391], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:19:03.404942Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:19:03.405147Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7554062110192414264:2392], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:19:03.405157Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:19:03.485502Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:19:03.497705Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:19:03.509523Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:19:03.527050Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:19:03.540046Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:19:03.570299Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:19:03.595082Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:19:03.616305Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:19:03.654492Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7554062110192415122:2474], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:19:03.654547Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:19:03.654713Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7554062110192415132:2477], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:19:03.654739Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7554062110192415133:2478], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:19:03.654808Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:19:03.655863Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:1 ... net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-09-25T16:19:17.767406Z node 8 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:8480 TClient is connected to server localhost:8480 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-09-25T16:19:17.816909Z node 8 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-09-25T16:19:17.826402Z node 8 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) waiting... 2025-09-25T16:19:17.845470Z node 8 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(8, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-09-25T16:19:17.845498Z node 8 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(8, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-09-25T16:19:17.846566Z node 8 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(8, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-09-25T16:19:17.883684Z node 8 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) waiting... 2025-09-25T16:19:17.902499Z node 8 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) waiting... 2025-09-25T16:19:17.913648Z node 8 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) waiting... 2025-09-25T16:19:17.953051Z node 8 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-09-25T16:19:18.079590Z node 8 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [8:7554062174712650672:2391], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:19:18.079616Z node 8 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:19:18.079696Z node 8 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [8:7554062174712650682:2392], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:19:18.079707Z node 8 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:19:18.089051Z node 8 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:19:18.096896Z node 8 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:19:18.106963Z node 8 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:19:18.120810Z node 8 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:19:18.135303Z node 8 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:19:18.149032Z node 8 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:19:18.162623Z node 8 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:19:18.177459Z node 8 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:19:18.193257Z node 8 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [8:7554062174712651544:2474], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:19:18.193277Z node 8 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:19:18.193302Z node 8 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [8:7554062174712651549:2477], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:19:18.193317Z node 8 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [8:7554062174712651550:2478], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:19:18.193326Z node 8 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:19:18.193948Z node 8 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-09-25T16:19:18.196435Z node 8 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [8:7554062174712651553:2479], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2025-09-25T16:19:18.285484Z node 8 :TX_PROXY ERROR: schemereq.cpp:590: Actor# [8:7554062174712651605:3551] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 17], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-09-25T16:19:18.535447Z node 8 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/http_proxy/ut/inside_ydb_ut/unittest >> TestKinesisHttpProxy::TestEmptyHttpBody [GOOD] Test command err: 2025-09-25T16:18:49.743029Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7554062050056745853:2241];send_to=[0:7307199536658146131:7762515]; 2025-09-25T16:18:49.743053Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/endf/005993/r3tmp/tmpd974Kn/pdisk_1.dat 2025-09-25T16:18:49.794358Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-09-25T16:18:49.814557Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded 2025-09-25T16:18:49.814755Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1229: Notification cookie mismatch for subscription [1:7554062050056745650:2081] 1758817129739796 != 1758817129739799 TServer::EnableGrpc on GrpcPort 28256, node 1 2025-09-25T16:18:49.822336Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-09-25T16:18:49.822349Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-09-25T16:18:49.822351Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-09-25T16:18:49.822389Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:1511 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: 2025-09-25T16:18:49.850979Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-09-25T16:18:49.851012Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-09-25T16:18:49.851980Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-09-25T16:18:49.863382Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-09-25T16:18:49.866230Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 TClient is connected to server localhost:1511 waiting... 2025-09-25T16:18:49.916306Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterUserAttributes, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_user_attrs.cpp:72) 2025-09-25T16:18:49.917800Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 281474976715658, at schemeshard: 72057594046644480 2025-09-25T16:18:49.918358Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_modify_acl.cpp:101) waiting... 2025-09-25T16:18:49.924523Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) waiting... 2025-09-25T16:18:49.949633Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) waiting... waiting... 2025-09-25T16:18:49.978731Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:18:49.979628Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions waiting... waiting... 2025-09-25T16:18:50.018550Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) waiting... 2025-09-25T16:18:50.031052Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) waiting... 2025-09-25T16:18:50.041963Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) waiting... 2025-09-25T16:18:50.053871Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) waiting... 2025-09-25T16:18:50.067560Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715670:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) waiting... 2025-09-25T16:18:50.079328Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) waiting... 2025-09-25T16:18:50.102004Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715672:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) waiting... 2025-09-25T16:18:50.205585Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7554062054351714344:2360], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:50.205623Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:50.205770Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7554062054351714356:2363], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:50.205851Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7554062054351714357:2364], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:50.205882Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:50.207646Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715673:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-09-25T16:18:50.211381Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7554062054351714360:2365], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715673 completed, doublechecking } 2025-09-25T16:18:50.294610Z node 1 :TX_PROXY ERROR: schemereq.cpp:590: Actor# [1:7554062054351714411:2865] txid# 28147497671 ... { Scheme: 4608 } } } } } Member { Name: "Value" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } } } } } } } } Member { Name: "truncated" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 6 } } } } } } } Value { Struct { Optional { } } Struct { Optional { Bool: false } } } } } 2025-09-25T16:19:15.481232Z node 8 :SQS TRACE: executor.cpp:327: Request [] Query(idx=GET_USER_SETTINGS_ID) Queue [] Minikql data response: {"settings": [], "truncated": false} 2025-09-25T16:19:15.481246Z node 8 :SQS DEBUG: executor.cpp:401: Request [] Query(idx=GET_USER_SETTINGS_ID) Queue [] execution duration: 4ms 2025-09-25T16:19:15.481409Z node 8 :SQS TRACE: user_settings_reader.cpp:89: Handle user settings: { Status: 48 TxId: 281474976715686 StatusCode: SUCCESS ExecutionEngineStatus: 1 ExecutionEngineResponseStatus: 2 ExecutionEngineEvaluatedResponse { Type { Kind: Struct Struct { Member { Name: "settings" Type { Kind: Optional Optional { Item { Kind: List List { Item { Kind: Struct Struct { Member { Name: "Account" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "Name" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "Value" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } } } } } } } } Member { Name: "truncated" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 6 } } } } } } } Value { Struct { Optional { } } Struct { Optional { Bool: false } } } } } 2025-09-25T16:19:15.481532Z node 8 :SQS TRACE: executor.cpp:286: Request [] Query(idx=GET_QUEUES_LIST_ID) Queue [] HandleResponse { Status: 48 TxId: 281474976715685 StatusCode: SUCCESS ExecutionEngineStatus: 1 ExecutionEngineResponseStatus: 2 ExecutionEngineEvaluatedResponse { Type { Kind: Struct Struct { Member { Name: "queues" Type { Kind: Optional Optional { Item { Kind: List List { Item { Kind: Struct Struct { Member { Name: "Account" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "CreatedTimestamp" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } Member { Name: "CustomQueueName" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "DlqName" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "FifoQueue" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 6 } } } } } Member { Name: "FolderId" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "MasterTabletId" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } Member { Name: "QueueName" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "QueueState" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } Member { Name: "Shards" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } Member { Name: "TablesFormat" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 2 } } } } } Member { Name: "Version" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } } } } } } } } Member { Name: "truncated" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 6 } } } } } } } Value { Struct { Optional { } } Struct { Optional { Bool: false } } } } } 2025-09-25T16:19:15.481535Z node 8 :SQS DEBUG: executor.cpp:287: Request [] Query(idx=GET_QUEUES_LIST_ID) Queue [] Attempt 1 execution duration: 5ms 2025-09-25T16:19:15.481615Z node 8 :SQS TRACE: executor.cpp:325: Request [] Query(idx=GET_QUEUES_LIST_ID) Queue [] Sending mkql execution result: { Status: 48 TxId: 281474976715685 StatusCode: SUCCESS ExecutionEngineStatus: 1 ExecutionEngineResponseStatus: 2 ExecutionEngineEvaluatedResponse { Type { Kind: Struct Struct { Member { Name: "queues" Type { Kind: Optional Optional { Item { Kind: List List { Item { Kind: Struct Struct { Member { Name: "Account" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "CreatedTimestamp" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } Member { Name: "CustomQueueName" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "DlqName" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "FifoQueue" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 6 } } } } } Member { Name: "FolderId" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "MasterTabletId" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } Member { Name: "QueueName" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "QueueState" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } Member { Name: "Shards" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } Member { Name: "TablesFormat" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 2 } } } } } Member { Name: "Version" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } } } } } } } } Member { Name: "truncated" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 6 } } } } } } } Value { Struct { Optional { } } Struct { Optional { Bool: false } } } } } 2025-09-25T16:19:15.481619Z node 8 :SQS TRACE: executor.cpp:327: Request [] Query(idx=GET_QUEUES_LIST_ID) Queue [] Minikql data response: {"queues": [], "truncated": false} 2025-09-25T16:19:15.481633Z node 8 :SQS DEBUG: executor.cpp:401: Request [] Query(idx=GET_QUEUES_LIST_ID) Queue [] execution duration: 5ms 2025-09-25T16:19:15.481767Z node 8 :SQS TRACE: queues_list_reader.cpp:82: Handle queues list: { Status: 48 TxId: 281474976715685 StatusCode: SUCCESS ExecutionEngineStatus: 1 ExecutionEngineResponseStatus: 2 ExecutionEngineEvaluatedResponse { Type { Kind: Struct Struct { Member { Name: "queues" Type { Kind: Optional Optional { Item { Kind: List List { Item { Kind: Struct Struct { Member { Name: "Account" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "CreatedTimestamp" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } Member { Name: "CustomQueueName" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "DlqName" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "FifoQueue" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 6 } } } } } Member { Name: "FolderId" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "MasterTabletId" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } Member { Name: "QueueName" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "QueueState" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } Member { Name: "Shards" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } Member { Name: "TablesFormat" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 2 } } } } } Member { Name: "Version" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } } } } } } } } Member { Name: "truncated" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 6 } } } } } } } Value { Struct { Optional { } } Struct { Optional { Bool: false } } } } } 2025-09-25T16:19:15.506105Z node 8 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:559: [WorkloadService] [Service] Reply cleanup error NOT_FOUND to [8:7554062160956911685:2428]: Pool not found 2025-09-25T16:19:15.506166Z node 8 :SQS DEBUG: monitoring.cpp:60: [monitoring] Report deletion queue data lag: 0.000000s, count: 0 2025-09-25T16:19:15.542564Z node 8 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:559: [WorkloadService] [Service] Reply cleanup error NOT_FOUND to [8:7554062160956911676:2426]: Pool not found 2025-09-25T16:19:15.542655Z node 8 :SQS DEBUG: cleanup_queue_data.cpp:100: [cleanup removed queues] getting queues... 2025-09-25T16:19:15.543482Z node 8 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [8:7554062160956911794:2445], DatabaseId: /Root/SQS, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:19:15.543507Z node 8 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root/SQS, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:19:15.543507Z node 8 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:594: [WorkloadService] [TDatabaseFetcherActor] ActorId: [8:7554062160956911795:2446], Database: /Root/SQS, Failed to fetch database info, UNSUPPORTED, issues: {
: Error: Invalid database path /Root/SQS, please check the correctness of the path } 2025-09-25T16:19:15.543575Z node 8 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [8:7554062160956911798:2447], DatabaseId: /Root/SQS, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:19:15.543608Z node 8 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root/SQS, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:19:15.590046Z node 8 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:559: [WorkloadService] [Service] Reply cleanup error NOT_FOUND to [8:7554062160956911792:2444]: Pool not found 2025-09-25T16:19:15.590144Z node 8 :SQS DEBUG: cleanup_queue_data.cpp:138: [cleanup removed queues] there are no queues to delete Http output full {"__type":"MissingParameter","message":"ydb/core/http_proxy/json_proto_conversion.h:395: Top level of json value is not a map"} 2025-09-25T16:19:16.476953Z node 8 :HTTP DEBUG: http_proxy_incoming.cpp:83: (#37,[::1]:41610) incoming connection opened 2025-09-25T16:19:16.477010Z node 8 :HTTP DEBUG: http_proxy_incoming.cpp:156: (#37,[::1]:41610) -> (POST /Root, 4 bytes) 2025-09-25T16:19:16.477127Z node 8 :HTTP_PROXY INFO: http_service.cpp:102: proxy service: incoming request from [1853:d27a:e151:0:53:d27a:e151:0] request [CreateStream] url [/Root] database [/Root] requestId: 616fc51e-bc4f6773-8c0dff5a-1792617f 2025-09-25T16:19:16.477354Z node 8 :HTTP_PROXY INFO: http_req.cpp:1209: http request [CreateStream] requestId [616fc51e-bc4f6773-8c0dff5a-1792617f] reply with status: BAD_REQUEST message: ydb/core/http_proxy/json_proto_conversion.h:395: Top level of json value is not a map 2025-09-25T16:19:16.477434Z node 8 :HTTP DEBUG: http_proxy_incoming.cpp:280: (#37,[::1]:41610) <- (400 MissingParameter, 127 bytes) 2025-09-25T16:19:16.477447Z node 8 :HTTP DEBUG: http_proxy_incoming.cpp:289: (#37,[::1]:41610) Request: POST /Root HTTP/1.1 Host: example.amazonaws.com X-Amz-Target: kinesisApi.CreateStream X-Amz-Date: 20150830T123600Z Authorization: Content-Type: application/json Connection: Close Transfer-Encoding: chunked null 2025-09-25T16:19:16.477451Z node 8 :HTTP DEBUG: http_proxy_incoming.cpp:296: (#37,[::1]:41610) Response: HTTP/1.1 400 MissingParameter Connection: close x-amzn-requestid: 616fc51e-bc4f6773-8c0dff5a-1792617f Content-Type: application/x-amz-json-1.1 Content-Length: 127 2025-09-25T16:19:16.477514Z node 8 :HTTP DEBUG: http_proxy_incoming.cpp:340: (#37,[::1]:41610) connection closed ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/mind/hive/ut/unittest >> THiveTest::TestLocalRegistrationInSharedHive [GOOD] Test command err: 2025-09-25T16:18:08.597247Z node 1 :BS_NODE DEBUG: {NW26@node_warden_impl.cpp:338} Bootstrap 2025-09-25T16:18:08.601104Z node 1 :BS_NODE DEBUG: {NW18@node_warden_resource.cpp:51} ApplyServiceSet IsStatic# true Comprehensive# false Origin# initial ServiceSet# {PDisks { NodeID: 1 PDiskID: 1 Path: "SectorMap:0:3200" PDiskGuid: 1 } VDisks { VDiskID { GroupID: 0 GroupGeneration: 1 Ring: 0 Domain: 0 VDisk: 0 } VDiskLocation { NodeID: 1 PDiskID: 1 VDiskSlotID: 0 PDiskGuid: 1 } } Groups { GroupID: 0 GroupGeneration: 1 ErasureSpecies: 0 Rings { FailDomains { VDiskLocations { NodeID: 1 PDiskID: 1 VDiskSlotID: 0 PDiskGuid: 1 } } } } AvailabilityDomains: 0 } 2025-09-25T16:18:08.601185Z node 1 :BS_NODE DEBUG: {NW04@node_warden_pdisk.cpp:233} StartLocalPDisk NodeId# 1 PDiskId# 1 Path# "SectorMap:0:3200" PDiskCategory# {Type# DEVICE_TYPE_ROT Kind# 0} Temporary# false 2025-09-25T16:18:08.601404Z node 1 :BS_NODE WARN: {NW89@node_warden_pdisk.cpp:122} Can't write new MockDevicesConfig to file Path# /Berkanavt/kikimr/testing/mock_devices.txt 2025-09-25T16:18:08.601483Z node 1 :BS_NODE DEBUG: {NW23@node_warden_vdisk.cpp:70} StartLocalVDiskActor SlayInFlight# false VDiskId# [0:1:0:0:0] VSlotId# 1:1:0 PDiskGuid# 1 DonorMode# false PDiskRestartInFlight# false PDisksWaitingToStart# false 2025-09-25T16:18:08.601677Z node 1 :BS_NODE DEBUG: {NW24@node_warden_vdisk.cpp:276} StartLocalVDiskActor done VDiskId# [0:1:0:0:0] VSlotId# 1:1:0 PDiskGuid# 1 2025-09-25T16:18:08.601686Z node 1 :BS_NODE DEBUG: {NW12@node_warden_proxy.cpp:24} StartLocalProxy GroupId# 0 2025-09-25T16:18:08.601824Z node 1 :BS_NODE DEBUG: {NW21@node_warden_pipe.cpp:23} EstablishPipe AvailDomainId# 0 PipeClientId# [1:28:2075] ControllerId# 72057594037932033 2025-09-25T16:18:08.601828Z node 1 :BS_NODE DEBUG: {NW20@node_warden_pipe.cpp:73} SendRegisterNode 2025-09-25T16:18:08.601847Z node 1 :BS_NODE DEBUG: {NW11@node_warden_impl.cpp:313} StartInvalidGroupProxy GroupId# 4294967295 2025-09-25T16:18:08.601866Z node 1 :BS_NODE DEBUG: {NW62@node_warden_impl.cpp:325} StartRequestReportingThrottler 2025-09-25T16:18:08.605763Z node 1 :BS_PROXY INFO: dsproxy_state.cpp:159: Group# 0 TEvConfigureProxy received GroupGeneration# 1 IsLimitedKeyless# false Marker# DSP02 2025-09-25T16:18:08.605781Z node 1 :BS_PROXY NOTICE: dsproxy_state.cpp:319: EnsureMonitoring Group# 0 IsLimitedKeyless# 0 fullIfPossible# 0 Marker# DSP58 2025-09-25T16:18:08.606303Z node 1 :BS_PROXY DEBUG: group_sessions.cpp:83: Group# 0 Actor# [1:27:2074] Create Queue# [1:36:2080] targetNodeId# 1 Marker# DSP01 2025-09-25T16:18:08.606335Z node 1 :BS_PROXY DEBUG: group_sessions.cpp:83: Group# 0 Actor# [1:27:2074] Create Queue# [1:37:2081] targetNodeId# 1 Marker# DSP01 2025-09-25T16:18:08.606357Z node 1 :BS_PROXY DEBUG: group_sessions.cpp:83: Group# 0 Actor# [1:27:2074] Create Queue# [1:38:2082] targetNodeId# 1 Marker# DSP01 2025-09-25T16:18:08.606390Z node 1 :BS_PROXY DEBUG: group_sessions.cpp:83: Group# 0 Actor# [1:27:2074] Create Queue# [1:39:2083] targetNodeId# 1 Marker# DSP01 2025-09-25T16:18:08.606421Z node 1 :BS_PROXY DEBUG: group_sessions.cpp:83: Group# 0 Actor# [1:27:2074] Create Queue# [1:40:2084] targetNodeId# 1 Marker# DSP01 2025-09-25T16:18:08.606454Z node 1 :BS_PROXY DEBUG: group_sessions.cpp:83: Group# 0 Actor# [1:27:2074] Create Queue# [1:41:2085] targetNodeId# 1 Marker# DSP01 2025-09-25T16:18:08.606479Z node 1 :BS_PROXY DEBUG: group_sessions.cpp:83: Group# 0 Actor# [1:27:2074] Create Queue# [1:42:2086] targetNodeId# 1 Marker# DSP01 2025-09-25T16:18:08.606484Z node 1 :BS_PROXY INFO: dsproxy_state.cpp:31: Group# 0 SetStateEstablishingSessions Marker# DSP03 2025-09-25T16:18:08.606494Z node 1 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:49: TClient[72057594037932033] ::Bootstrap [1:28:2075] 2025-09-25T16:18:08.606498Z node 1 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:542: TClient[72057594037932033] lookup [1:28:2075] 2025-09-25T16:18:08.606505Z node 1 :BS_PROXY NOTICE: dsproxy_state.cpp:259: Group# 4294967295 HasInvalidGroupId# 1 Bootstrap -> StateEjected Marker# DSP42 2025-09-25T16:18:08.606512Z node 1 :BS_NODE DEBUG: {NWDC00@distconf.cpp:28} Bootstrap 2025-09-25T16:18:08.606676Z node 1 :BS_NODE DEBUG: {NWDC40@distconf_persistent_storage.cpp:25} TReaderActor bootstrap Paths# [] 2025-09-25T16:18:08.610650Z node 1 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:148: TClient[72057594037932033] queue send [1:28:2075] 2025-09-25T16:18:08.610691Z node 1 :BS_NODE DEBUG: {NWDC53@distconf.cpp:332} StateWaitForInit event Type# 131082 StorageConfigLoaded# false NodeListObtained# false PendingEvents.size# 0 2025-09-25T16:18:08.611301Z node 1 :LOCAL DEBUG: local.cpp:1540: TLocal::Bootstrap 2025-09-25T16:18:08.611370Z node 1 :TABLET_RESOLVER DEBUG: tablet_resolver.cpp:882: Handle TEvForward tabletId: 72057594037932033 entry.State: StResolve leader: [0:0:0] followers: 0 ev: {EvForward TabletID: 72057594037932033 Ev: nullptr Flags: 1:2:0} 2025-09-25T16:18:08.611433Z node 1 :LOCAL DEBUG: local.cpp:1490: TDomainLocal(dc-1): Bootstrap 2025-09-25T16:18:08.611475Z node 1 :BS_NODE DEBUG: {NWDC53@distconf.cpp:332} StateWaitForInit event Type# 2146435074 StorageConfigLoaded# false NodeListObtained# false PendingEvents.size# 0 2025-09-25T16:18:08.611484Z node 1 :BS_NODE DEBUG: {NWDC32@distconf_persistent_storage.cpp:221} TEvStorageConfigLoaded Cookie# 0 NumItemsRead# 0 2025-09-25T16:18:08.613013Z node 1 :BS_NODE DEBUG: {NWDC35@distconf_persistent_storage.cpp:184} PersistConfig Record# {} Drives# [] 2025-09-25T16:18:08.613078Z node 1 :BS_NODE DEBUG: {NWDC18@distconf_binding.cpp:462} UpdateBound RefererNodeId# 0 NodeId# :0/0 Meta# {Fingerprint: "\363\365\\\016\336\205\240m2\241c\3010\003\261\342\227\n\267}" } 2025-09-25T16:18:08.613938Z node 1 :LOCAL DEBUG: local.cpp:1198: TDomainLocal(dc-1): Binding to hive 72057594037927937 at domain dc-1 (allocated resources: ) 2025-09-25T16:18:08.613964Z node 1 :BS_NODE DEBUG: {NWDC51@distconf_persistent_storage.cpp:103} TWriterActor bootstrap Drives# [] Record# {} 2025-09-25T16:18:08.613989Z node 1 :LOCAL DEBUG: local.cpp:1005: TLocalNodeRegistrar::Bootstrap 2025-09-25T16:18:08.613995Z node 1 :LOCAL DEBUG: local.cpp:183: TLocalNodeRegistrar::TryToRegister 2025-09-25T16:18:08.614031Z node 1 :LOCAL DEBUG: local.cpp:216: TLocalNodeRegistrar::TryToRegister pipe to hive, pipe:[1:53:2093] 2025-09-25T16:18:08.614051Z node 1 :STATESTORAGE DEBUG: statestorage_proxy.cpp:287: ProxyRequest::HandleInit ringGroup:0 ev: {EvLookup TabletID: 72057594037932033 Cookie: 0 ProxyOptions: SigNone} 2025-09-25T16:18:08.614214Z node 1 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:148: TClient[72057594037932033] queue send [1:28:2075] 2025-09-25T16:18:08.614224Z node 1 :BS_NODE DEBUG: {NWDC53@distconf.cpp:332} StateWaitForInit event Type# 268639258 StorageConfigLoaded# true NodeListObtained# false PendingEvents.size# 0 2025-09-25T16:18:08.614234Z node 1 :BS_NODE DEBUG: {NWDC53@distconf.cpp:332} StateWaitForInit event Type# 2146435075 StorageConfigLoaded# true NodeListObtained# false PendingEvents.size# 1 2025-09-25T16:18:08.614282Z node 1 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:49: TClient[72057594037927937] ::Bootstrap [1:53:2093] 2025-09-25T16:18:08.614287Z node 1 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:542: TClient[72057594037927937] lookup [1:53:2093] 2025-09-25T16:18:08.614297Z node 1 :STATESTORAGE DEBUG: statestorage_replica.cpp:185: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037932033 Cookie: 0} 2025-09-25T16:18:08.614304Z node 1 :STATESTORAGE DEBUG: statestorage_replica.cpp:185: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037932033 Cookie: 1} 2025-09-25T16:18:08.614310Z node 1 :STATESTORAGE DEBUG: statestorage_replica.cpp:185: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037932033 Cookie: 2} 2025-09-25T16:18:08.614319Z node 1 :STATESTORAGE DEBUG: statestorage_proxy.cpp:399: ProxyRequest::HandleLookup ringGroup:0 ev: {EvReplicaInfo Status: 1 TabletID: 72057594037932033 ClusterStateGeneration: 0 ClusterStateGuid: 0} 2025-09-25T16:18:08.614346Z node 1 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:49: TClient[72057594037936129] ::Bootstrap [1:32:2063] 2025-09-25T16:18:08.614350Z node 1 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:542: TClient[72057594037936129] lookup [1:32:2063] 2025-09-25T16:18:08.614394Z node 1 :TABLET_RESOLVER DEBUG: tablet_resolver.cpp:882: Handle TEvForward tabletId: 72057594037927937 entry.State: StResolve leader: [0:0:0] followers: 0 ev: {EvForward TabletID: 72057594037927937 Ev: nullptr Flags: 1:2:0} 2025-09-25T16:18:08.616337Z node 1 :BS_NODE DEBUG: {NWDC53@distconf.cpp:332} StateWaitForInit event Type# 131082 StorageConfigLoaded# true NodeListObtained# false PendingEvents.size# 2 2025-09-25T16:18:08.616350Z node 1 :BS_NODE DEBUG: {NWDC11@distconf_binding.cpp:8} TEvNodesInfo 2025-09-25T16:18:08.616374Z node 1 :BS_NODE DEBUG: {NWDC18@distconf_binding.cpp:462} UpdateBound RefererNodeId# 1 NodeId# ::1:12001/1 Meta# {Fingerprint: "\363\365\\\016\336\205\240m2\241c\3010\003\261\342\227\n\267}" } 2025-09-25T16:18:08.616419Z node 1 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:148: TClient[72057594037927937] queue send [1:53:2093] 2025-09-25T16:18:08.616427Z node 1 :STATESTORAGE DEBUG: statestorage_proxy.cpp:399: ProxyRequest::HandleLookup ringGroup:0 ev: {EvReplicaInfo Status: 1 TabletID: 72057594037932033 ClusterStateGeneration: 0 ClusterStateGuid: 0} 2025-09-25T16:18:08.616438Z node 1 :BS_NODE DEBUG: {NWDC53@distconf.cpp:332} StateWaitForInit event Type# 2146435072 StorageConfigLoaded# true NodeListObtained# true PendingEvents.size# 2 2025-09-25T16:18:08.616446Z node 1 :BS_NODE DEBUG: {NWDC15@distconf.cpp:401} StateFunc Type# 268639258 Sender# [1:12:2059] SessionId# [0:0:0] Cookie# 0 2025-09-25T16:18:08.616680Z node 1 :STATESTORAGE DEBUG: statestorage_proxy.cpp:399: ProxyRequest::HandleLookup ringGroup:0 ev: {EvReplicaInfo Status: 1 TabletID: 72057594037932033 ClusterStateGeneration: 0 ClusterStateGuid: 0} 2025-09-25T16:18:08.616716Z node 1 :TABLET_RESOLVER DEBUG: tablet_resolver.cpp:882: Handle TEvForward tabletId: 72057594037936129 entry.State: StResolve leader: [0:0:0] followers: 0 ev: {EvForward TabletID: 72057594037936129 Ev: nullptr Flags: 1:2:0} 2025-09-25T16:18:08.616833Z node 1 :BS_NODE DEBUG: {NWDC53@distconf.cpp:332} StateWaitForInit event Type# 2146435072 StorageConfigLoaded# true NodeListObtained# true PendingEvents.size# 1 2025-09-25T16:18:08.616841Z node 1 :BS_NODE DEBUG: {NWDC15@distconf.cpp:401} StateFunc Type# 2146435075 Sender# [1:51:2092] SessionId# [0:0:0] Cookie# 0 2025-09-25T16:18:08.616853Z node 1 :BS_NODE DEBUG: {NWDC36@distconf_persistent_storage.cpp:205} TEvStorageConfigStored NumOk# 0 NumError# 0 Passed# 0.005346s 2025-09-25T16:18:08.616932Z node 1 :BS_NODE DEBUG: {NW18@node_warden_resource.cpp:51} ApplyServiceSet IsStatic# true Comprehensive# true Origin# distconf ServiceSet# {PDisks { NodeID: 1 PDiskID: 1 Path: "SectorMap:0:3200" PDiskGuid: 1 } VDisks { VDiskID { GroupID: 0 GroupGeneration: 1 Ring: 0 Domain: 0 VDisk: 0 } VDiskLocation { NodeID: 1 PDiskID: 1 VDiskSlotID: 0 PDiskGuid: 1 } } Groups { GroupID: 0 GroupGeneration: 1 ErasureSpecies: 0 Rings { FailDomains { VDiskLocations { NodeID: 1 PDiskID: 1 VDiskSlotID: 0 PDiskGuid: 1 } } } } AvailabilityDomains: 0 } 2025-09-25T16:18:08.616999Z node 1 :BS_NODE DEBUG: {NW110@node_warden_pdisk.cpp:538} ApplyServiceSetPDisks PDiskId# 1 NewExpectedSlotCount# 0 OldExpectedSlotCount# 0 NewSlotSizeInUnits# 0 OldSlotSizeInUnits# 0 2025-09-25T16:18:08.617019Z node 1 :TABLET_RESOLVER DEBUG: tablet_resolver.cpp:781: ApplyEntry tabletId: 72057594037932033 leader: [0:0:0] followers: 0 2025-09-25T16:18:08.617053Z node 1 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:170: TClient[72057594037932033] forward result error, check reconnect [1:28:2075] 2025-09-25T16:18:08.617059Z node 1 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:569: TClient[72057594037932033] schedule retry [1:28:2075] 2025-09-25T16:18:08.617067Z node 1 :BS_NODE DEBUG: {NWDC15@distconf.cpp:401} StateFunc Type# 26863924 ... DEBUG: statestorage_replica.cpp:185: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037936131 Cookie: 0} 2025-09-25T16:19:14.525769Z node 38 :STATESTORAGE DEBUG: statestorage_replica.cpp:185: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037936131 Cookie: 1} 2025-09-25T16:19:14.525776Z node 38 :STATESTORAGE DEBUG: statestorage_replica.cpp:185: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037936131 Cookie: 2} 2025-09-25T16:19:14.525787Z node 38 :STATESTORAGE DEBUG: statestorage_proxy.cpp:399: ProxyRequest::HandleLookup ringGroup:0 ev: {EvReplicaInfo Status: 1 TabletID: 72057594037936131 ClusterStateGeneration: 0 ClusterStateGuid: 0} 2025-09-25T16:19:14.525801Z node 38 :STATESTORAGE DEBUG: statestorage_proxy.cpp:399: ProxyRequest::HandleLookup ringGroup:0 ev: {EvReplicaInfo Status: 1 TabletID: 72057594037936131 ClusterStateGeneration: 0 ClusterStateGuid: 0} 2025-09-25T16:19:14.525811Z node 38 :STATESTORAGE DEBUG: statestorage_proxy.cpp:399: ProxyRequest::HandleLookup ringGroup:0 ev: {EvReplicaInfo Status: 1 TabletID: 72057594037936131 ClusterStateGeneration: 0 ClusterStateGuid: 0} 2025-09-25T16:19:14.525828Z node 38 :TABLET_RESOLVER DEBUG: tablet_resolver.cpp:781: ApplyEntry tabletId: 72057594037936131 leader: [0:0:0] followers: 0 2025-09-25T16:19:14.525842Z node 38 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:170: TClient[72057594037936131] forward result error, check reconnect [38:201:2163] 2025-09-25T16:19:14.525849Z node 38 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:569: TClient[72057594037936131] schedule retry [38:201:2163] 2025-09-25T16:19:14.547293Z node 38 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:426: TClient[72057594046316545] client retry [38:438:2279] 2025-09-25T16:19:14.547321Z node 38 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:542: TClient[72057594046316545] lookup [38:438:2279] 2025-09-25T16:19:14.547351Z node 38 :TABLET_RESOLVER DEBUG: tablet_resolver.cpp:882: Handle TEvForward tabletId: 72057594046316545 entry.State: StNormal leader: [0:0:0] (known problem) followers: 0 ev: {EvForward TabletID: 72057594046316545 Ev: nullptr Flags: 1:2:0} 2025-09-25T16:19:14.547364Z node 38 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 38 selfDC 1 leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2025-09-25T16:19:14.547409Z node 38 :STATESTORAGE DEBUG: statestorage_proxy.cpp:287: ProxyRequest::HandleInit ringGroup:0 ev: {EvLookup TabletID: 72057594046316545 Cookie: 0 ProxyOptions: SigNone} 2025-09-25T16:19:14.547439Z node 38 :STATESTORAGE DEBUG: statestorage_replica.cpp:185: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594046316545 Cookie: 0} 2025-09-25T16:19:14.547449Z node 38 :STATESTORAGE DEBUG: statestorage_replica.cpp:185: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594046316545 Cookie: 1} 2025-09-25T16:19:14.547456Z node 38 :STATESTORAGE DEBUG: statestorage_replica.cpp:185: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594046316545 Cookie: 2} 2025-09-25T16:19:14.547466Z node 38 :STATESTORAGE DEBUG: statestorage_proxy.cpp:399: ProxyRequest::HandleLookup ringGroup:0 ev: {EvReplicaInfo Status: 1 TabletID: 72057594046316545 ClusterStateGeneration: 0 ClusterStateGuid: 0} 2025-09-25T16:19:14.547484Z node 38 :STATESTORAGE DEBUG: statestorage_proxy.cpp:399: ProxyRequest::HandleLookup ringGroup:0 ev: {EvReplicaInfo Status: 1 TabletID: 72057594046316545 ClusterStateGeneration: 0 ClusterStateGuid: 0} 2025-09-25T16:19:14.547494Z node 38 :STATESTORAGE DEBUG: statestorage_proxy.cpp:399: ProxyRequest::HandleLookup ringGroup:0 ev: {EvReplicaInfo Status: 1 TabletID: 72057594046316545 ClusterStateGeneration: 0 ClusterStateGuid: 0} 2025-09-25T16:19:14.547511Z node 38 :TABLET_RESOLVER DEBUG: tablet_resolver.cpp:781: ApplyEntry tabletId: 72057594046316545 leader: [0:0:0] followers: 0 2025-09-25T16:19:14.547523Z node 38 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:170: TClient[72057594046316545] forward result error, check reconnect [38:438:2279] 2025-09-25T16:19:14.547530Z node 38 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:569: TClient[72057594046316545] schedule retry [38:438:2279] 2025-09-25T16:19:14.567972Z node 39 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:426: TClient[72075186224037888] client retry [39:565:2215] 2025-09-25T16:19:14.567996Z node 39 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:542: TClient[72075186224037888] lookup [39:565:2215] 2025-09-25T16:19:14.568020Z node 39 :TABLET_RESOLVER DEBUG: tablet_resolver.cpp:882: Handle TEvForward tabletId: 72075186224037888 entry.State: StNormal leader: [39:474:2157] (known problem) followers: 0 ev: {EvForward TabletID: 72075186224037888 Ev: nullptr Flags: 1:2:0} 2025-09-25T16:19:14.568033Z node 39 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 39 selfDC 2 leaderDC 2 1:2:0 local 0 localDc 0 other 0 disallowed 0 2025-09-25T16:19:14.568083Z node 39 :STATESTORAGE DEBUG: statestorage_proxy.cpp:287: ProxyRequest::HandleInit ringGroup:0 ev: {EvLookup TabletID: 72075186224037888 Cookie: 0 ProxyOptions: SigNone} 2025-09-25T16:19:14.568183Z node 38 :STATESTORAGE DEBUG: statestorage_replica.cpp:185: Replica::Handle ev: {EvReplicaLookup TabletID: 72075186224037888 Cookie: 0} 2025-09-25T16:19:14.568216Z node 38 :STATESTORAGE DEBUG: statestorage_replica.cpp:185: Replica::Handle ev: {EvReplicaLookup TabletID: 72075186224037888 Cookie: 1} 2025-09-25T16:19:14.568226Z node 38 :STATESTORAGE DEBUG: statestorage_replica.cpp:185: Replica::Handle ev: {EvReplicaLookup TabletID: 72075186224037888 Cookie: 2} 2025-09-25T16:19:14.568254Z node 39 :STATESTORAGE DEBUG: statestorage_proxy.cpp:399: ProxyRequest::HandleLookup ringGroup:0 ev: {EvReplicaInfo Status: 0 TabletID: 72075186224037888 ClusterStateGeneration: 0 ClusterStateGuid: 0 CurrentLeader: [38:577:2316] CurrentLeaderTablet: [38:579:2317] CurrentGeneration: 2 CurrentStep: 0} 2025-09-25T16:19:14.568289Z node 39 :STATESTORAGE DEBUG: statestorage_proxy.cpp:399: ProxyRequest::HandleLookup ringGroup:0 ev: {EvReplicaInfo Status: 0 TabletID: 72075186224037888 ClusterStateGeneration: 0 ClusterStateGuid: 0 CurrentLeader: [38:577:2316] CurrentLeaderTablet: [38:579:2317] CurrentGeneration: 2 CurrentStep: 0} 2025-09-25T16:19:14.568307Z node 39 :TABLET_RESOLVER DEBUG: tablet_resolver.cpp:781: ApplyEntry tabletId: 72075186224037888 leader: [38:577:2316] followers: 0 2025-09-25T16:19:14.568318Z node 39 :TABLET_RESOLVER DEBUG: tablet_resolver.cpp:667: SelectForward node 39 selfDC 2 leaderDC 1 1:2:0 local 0 localDc 0 other 1 disallowed 0 tabletId: 72075186224037888 followers: 0 countLeader 1 allowFollowers 0 winner: [38:577:2316] 2025-09-25T16:19:14.568330Z node 39 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:193: TClient[72075186224037888] forward result remote node 38 [39:565:2215] 2025-09-25T16:19:14.568355Z node 39 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:227: TClient[72075186224037888] remote node connected [39:565:2215] 2025-09-25T16:19:14.568361Z node 39 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:687: TClient[72075186224037888]::SendEvent [39:565:2215] 2025-09-25T16:19:14.568407Z node 38 :PIPE_SERVER DEBUG: tablet_pipe_server.cpp:291: [72075186224037888] Accept Connect Originator# [39:565:2215] 2025-09-25T16:19:14.568472Z node 38 :HIVE TRACE: hive_impl.cpp:139: HIVE#72075186224037888 Handle TEvTabletPipe::TEvServerConnected([39:565:2215]) [38:656:2372] 2025-09-25T16:19:14.568488Z node 39 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:308: TClient[72075186224037888] connected with status OK role: Leader [39:565:2215] 2025-09-25T16:19:14.568492Z node 39 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:323: TClient[72075186224037888] send queued [39:565:2215] 2025-09-25T16:19:14.568495Z node 39 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:645: TClient[72075186224037888] push event to server [39:565:2215] 2025-09-25T16:19:14.568509Z node 39 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:687: TClient[72075186224037888]::SendEvent [39:565:2215] 2025-09-25T16:19:14.568521Z node 39 :LOCAL DEBUG: local.cpp:263: TEvTabletPipe::TEvClientConnected {TabletId=72075186224037888 Status=OK ClientId=[39:565:2215]} 2025-09-25T16:19:14.568541Z node 38 :PIPE_SERVER DEBUG: tablet_pipe_server.cpp:72: [72075186224037888] Push Sender# [39:562:2215] EventType# 268959744 2025-09-25T16:19:14.568595Z node 38 :HIVE DEBUG: hive_impl.cpp:166: HIVE#72075186224037888 Handle TEvLocal::TEvRegisterNode from [39:562:2215] HiveId: 72075186224037888 ServicedDomains { SchemeShard: 72057594046678944 PathId: 2 } TabletAvailability { Type: Dummy Priority: 0 } TabletAvailability { Type: Hive Priority: 0 } 2025-09-25T16:19:14.568615Z node 38 :TABLET_EXECUTOR DEBUG: Leader{72075186224037888:2:3} Tx{4, NKikimr::NHive::TTxRegisterNode} queued, type NKikimr::NHive::TTxRegisterNode 2025-09-25T16:19:14.568621Z node 38 :TABLET_EXECUTOR DEBUG: Leader{72075186224037888:2:3} Tx{4, NKikimr::NHive::TTxRegisterNode} took 4194304b of static mem, Memory{4194304 dyn 0} 2025-09-25T16:19:14.568629Z node 38 :HIVE DEBUG: tx__register_node.cpp:21: HIVE#72075186224037888 THive::TTxRegisterNode(39)::Execute 2025-09-25T16:19:14.568661Z node 38 :HIVE WARN: node_info.cpp:25: HIVE#72075186224037888 Node(39, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-09-25T16:19:14.568666Z node 38 :HIVE DEBUG: hive_impl.cpp:386: HIVE#72075186224037888 ProcessWaitQueue (0) 2025-09-25T16:19:14.568670Z node 38 :HIVE DEBUG: hive_impl.cpp:367: HIVE#72075186224037888 ProcessBootQueue (0) 2025-09-25T16:19:14.568672Z node 38 :HIVE TRACE: hive_impl.cpp:369: HIVE#72075186224037888 ProcessBootQueue - sending 2025-09-25T16:19:14.568676Z node 38 :HIVE DEBUG: hive_impl.cpp:386: HIVE#72075186224037888 ProcessWaitQueue (0) 2025-09-25T16:19:14.568678Z node 38 :HIVE DEBUG: hive_impl.cpp:367: HIVE#72075186224037888 ProcessBootQueue (0) 2025-09-25T16:19:14.568687Z node 38 :HIVE WARN: node_info.cpp:25: HIVE#72075186224037888 Node(39, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-09-25T16:19:14.568700Z node 38 :TABLET_EXECUTOR DEBUG: Leader{72075186224037888:2:3} Tx{4, NKikimr::NHive::TTxRegisterNode} hope 1 -> done Change{7, redo 199b alter 0b annex 0, ~{ 4 } -{ }, 0 gb} 2025-09-25T16:19:14.568706Z node 38 :TABLET_EXECUTOR DEBUG: Leader{72075186224037888:2:3} Tx{4, NKikimr::NHive::TTxRegisterNode} release 4194304b of static, Memory{0 dyn 0} 2025-09-25T16:19:14.568747Z node 38 :HIVE TRACE: hive_impl.cpp:353: HIVE#72075186224037888 ProcessBootQueue - executing 2025-09-25T16:19:14.568754Z node 38 :TABLET_EXECUTOR DEBUG: Leader{72075186224037888:2:4} Tx{5, NKikimr::NHive::TTxProcessBootQueue} queued, type NKikimr::NHive::TTxProcessBootQueue 2025-09-25T16:19:14.568757Z node 38 :TABLET_EXECUTOR DEBUG: Leader{72075186224037888:2:4} Tx{5, NKikimr::NHive::TTxProcessBootQueue} took 4194304b of static mem, Memory{4194304 dyn 0} 2025-09-25T16:19:14.568760Z node 38 :HIVE DEBUG: tx__process_boot_queue.cpp:18: HIVE#72075186224037888 THive::TTxProcessBootQueue()::Execute 2025-09-25T16:19:14.568765Z node 38 :HIVE DEBUG: hive_impl.cpp:224: HIVE#72075186224037888 ProcessBootQueue: 0 nodes connected out of 0 2025-09-25T16:19:14.568770Z node 38 :HIVE DEBUG: hive_impl.cpp:241: HIVE#72075186224037888 ProcessBootQueue - waiting until 586524-01-19T08:01:49.551615Z because of warmup, now: 1970-01-01T00:00:00.243648Z 2025-09-25T16:19:14.568776Z node 38 :HIVE DEBUG: hive_impl.cpp:378: HIVE#72075186224037888 PostponeProcessBootQueue (18446744073709.307967s) 2025-09-25T16:19:14.568782Z node 38 :TABLET_EXECUTOR DEBUG: Leader{72075186224037888:2:4} Tx{5, NKikimr::NHive::TTxProcessBootQueue} hope 1 -> done Change{8, redo 0b alter 0b annex 0, ~{ } -{ }, 0 gb} 2025-09-25T16:19:14.568786Z node 38 :TABLET_EXECUTOR DEBUG: Leader{72075186224037888:2:4} Tx{5, NKikimr::NHive::TTxProcessBootQueue} release 4194304b of static, Memory{0 dyn 0} 2025-09-25T16:19:14.568814Z node 38 :HIVE DEBUG: hive_impl.cpp:838: HIVE#72075186224037888 TEvInterconnect::TEvNodeInfo NodeId 39 Location DataCenter: "2" Module: "2" Rack: "2" Unit: "2" >> TPQCachingProxyTest::TestPublishAndForget [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/client/server/ut/unittest >> TMessageBusServerPersQueueGetTopicMetadataMetaRequestTest::FailsOnNoBalancerInGetNodeRequest [GOOD] Test command err: Assert failed: Check response: { Status: 128 ErrorReason: "no path \'/Root/PQ/\', Marker# PQ17" ErrorCode: UNKNOWN_TOPIC } Assert failed: Check response: { Status: 128 ErrorReason: "no path \'Root/PQ\', Marker# PQ150" ErrorCode: UNKNOWN_TOPIC } Assert failed: Check response: { Status: 128 ErrorReason: "topic \'rt3.dc1--topic1\' has no balancer, Marker# PQ193" ErrorCode: UNKNOWN_TOPIC } ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/http_proxy/ut/inside_ydb_ut/unittest >> TestKinesisHttpProxy::GoodRequestCreateStream [GOOD] Test command err: 2025-09-25T16:18:49.539510Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7554062050172800824:2137];send_to=[0:7307199536658146131:7762515]; 2025-09-25T16:18:49.539602Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/endf/00599c/r3tmp/tmpySYVIn/pdisk_1.dat 2025-09-25T16:18:49.627658Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-09-25T16:18:49.643014Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-09-25T16:18:49.643044Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-09-25T16:18:49.643710Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-09-25T16:18:49.644543Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded 2025-09-25T16:18:49.644983Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1229: Notification cookie mismatch for subscription [1:7554062050172800722:2081] 1758817129538706 != 1758817129538709 TServer::EnableGrpc on GrpcPort 13498, node 1 2025-09-25T16:18:49.650711Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-09-25T16:18:49.650720Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-09-25T16:18:49.650721Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-09-25T16:18:49.650754Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:15058 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-09-25T16:18:49.700392Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... TClient is connected to server localhost:15058 2025-09-25T16:18:49.756202Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterUserAttributes, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_user_attrs.cpp:72) waiting... 2025-09-25T16:18:49.758340Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 281474976715658, at schemeshard: 72057594046644480 2025-09-25T16:18:49.758857Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_modify_acl.cpp:101) waiting... 2025-09-25T16:18:49.765248Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 281474976715660, at schemeshard: 72057594046644480 2025-09-25T16:18:49.804325Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) waiting... waiting... 2025-09-25T16:18:49.822697Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:18:49.829619Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions waiting... 2025-09-25T16:18:49.858554Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) waiting... waiting... 2025-09-25T16:18:49.900260Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) waiting... 2025-09-25T16:18:49.912018Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) waiting... waiting... 2025-09-25T16:18:49.928646Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:18:49.944998Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) waiting... waiting... 2025-09-25T16:18:49.972643Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715670:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:18:49.988444Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) waiting... 2025-09-25T16:18:50.000057Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715672:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) waiting... 2025-09-25T16:18:50.059603Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7554062054467769423:2360], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:50.059624Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7554062054467769434:2363], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:50.059627Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:50.059714Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7554062054467769437:2364], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:50.059726Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:50.060322Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715673:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-09-25T16:18:50.062262Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7554062054467769438:2365], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715673 completed, doublechecking } 2025-09-25T16:18:50.148165Z node 1 :TX_PROXY ERROR: schemereq.cpp:590: Actor# [1:7554062054467769490:2866] txid# 281474976 ... :1758817157,"StorageLimitMb":0,"StreamName":"testtopic"}} 2025-09-25T16:19:16.581233Z node 8 :HTTP DEBUG: http_proxy_incoming.cpp:280: (#37,[::1]:40292) <- (200 , 1672 bytes) 2025-09-25T16:19:16.581329Z node 8 :HTTP DEBUG: http_proxy_incoming.cpp:340: (#37,[::1]:40292) connection closed 2025-09-25T16:19:16.581903Z node 8 :HTTP DEBUG: http_proxy_incoming.cpp:83: (#40,[::1]:40300) incoming connection opened 2025-09-25T16:19:16.581933Z node 8 :HTTP DEBUG: http_proxy_incoming.cpp:156: (#40,[::1]:40300) -> (POST /Root, 30 bytes) 2025-09-25T16:19:16.581989Z node 8 :HTTP_PROXY INFO: http_service.cpp:102: proxy service: incoming request from [18ed:e37b:8513:0:ed:e37b:8513:0] request [DescribeStreamSummary] url [/Root] database [/Root] requestId: c89e7172-10bc16d0-2bd158c3-1367e9d1 2025-09-25T16:19:16.582098Z node 8 :HTTP_PROXY INFO: http_req.cpp:958: http request [DescribeStreamSummary] requestId [c89e7172-10bc16d0-2bd158c3-1367e9d1] got new request from [18ed:e37b:8513:0:ed:e37b:8513:0] database '/Root' stream 'testtopic' 2025-09-25T16:19:16.582448Z node 8 :HTTP_PROXY DEBUG: http_req.cpp:1498: http request [DescribeStreamSummary] requestId [c89e7172-10bc16d0-2bd158c3-1367e9d1] [auth] Authorized successfully 2025-09-25T16:19:16.582478Z node 8 :HTTP_PROXY INFO: http_req.cpp:677: http request [DescribeStreamSummary] requestId [c89e7172-10bc16d0-2bd158c3-1367e9d1] sending grpc request to '' database: '/Root' iam token size: 0 2025-09-25T16:19:16.582897Z node 8 :HTTP_PROXY INFO: http_req.cpp:1205: http request [DescribeStreamSummary] requestId [c89e7172-10bc16d0-2bd158c3-1367e9d1] reply ok 2025-09-25T16:19:16.582965Z node 8 :HTTP DEBUG: http_proxy_incoming.cpp:280: (#40,[::1]:40300) <- (200 , 239 bytes) 2025-09-25T16:19:16.583002Z node 8 :HTTP DEBUG: http_proxy_incoming.cpp:340: (#40,[::1]:40300) connection closed Http output full {"StreamDescriptionSummary":{"RetentionPeriodHours":24,"OpenShardCount":5,"StreamArn":"testtopic","ConsumerCount":0,"KeyId":"","StreamStatus":"ACTIVE","EncryptionType":"NONE","StreamCreationTimestamp":1758817.156,"StreamName":"testtopic"}} 200 {"StreamDescriptionSummary":{"RetentionPeriodHours":24,"OpenShardCount":5,"StreamArn":"testtopic","ConsumerCount":0,"KeyId":"","StreamStatus":"ACTIVE","EncryptionType":"NONE","StreamCreationTimestamp":1758817.156,"StreamName":"testtopic"}} 2025-09-25T16:19:16.584151Z node 8 :HTTP DEBUG: http_proxy_incoming.cpp:83: (#37,[::1]:40312) incoming connection opened 2025-09-25T16:19:16.584176Z node 8 :HTTP DEBUG: http_proxy_incoming.cpp:156: (#37,[::1]:40312) -> (POST /Root, 30 bytes) 2025-09-25T16:19:16.584222Z node 8 :HTTP_PROXY INFO: http_service.cpp:102: proxy service: incoming request from [d891:7579:8513:0:c091:7579:8513:0] request [DescribeStream] url [/Root] database [/Root] requestId: 53df9788-4e94b8ca-839b9e30-e85a2455 2025-09-25T16:19:16.584303Z node 8 :HTTP_PROXY INFO: http_req.cpp:958: http request [DescribeStream] requestId [53df9788-4e94b8ca-839b9e30-e85a2455] got new request from [d891:7579:8513:0:c091:7579:8513:0] database '/Root' stream 'testtopic' 2025-09-25T16:19:16.584972Z node 8 :HTTP_PROXY DEBUG: http_req.cpp:1498: http request [DescribeStream] requestId [53df9788-4e94b8ca-839b9e30-e85a2455] [auth] Authorized successfully 2025-09-25T16:19:16.584999Z node 8 :HTTP_PROXY INFO: http_req.cpp:677: http request [DescribeStream] requestId [53df9788-4e94b8ca-839b9e30-e85a2455] sending grpc request to '' database: '/Root' iam token size: 0 2025-09-25T16:19:16.585572Z node 8 :PERSQUEUE DEBUG: pq_impl.cpp:2718: [PQ: 72075186224037909] server connected, pipe [8:7554062165142625630:2523], now have 1 active actors on pipe 2025-09-25T16:19:16.585607Z node 8 :PERSQUEUE DEBUG: pq_impl.cpp:2718: [PQ: 72075186224037908] server connected, pipe [8:7554062165142625629:2522], now have 1 active actors on pipe 2025-09-25T16:19:16.585624Z node 8 :PERSQUEUE DEBUG: pq_impl.cpp:2718: [PQ: 72075186224037910] server connected, pipe [8:7554062165142625631:2524], now have 1 active actors on pipe 2025-09-25T16:19:16.585697Z node 8 :PERSQUEUE DEBUG: pq_impl.cpp:2718: [PQ: 72075186224037911] server connected, pipe [8:7554062165142625632:2525], now have 1 active actors on pipe 2025-09-25T16:19:16.585724Z node 8 :PERSQUEUE DEBUG: pq_impl.cpp:2718: [PQ: 72075186224037907] server connected, pipe [8:7554062165142625628:2521], now have 1 active actors on pipe 2025-09-25T16:19:16.586264Z node 8 :HTTP_PROXY INFO: http_req.cpp:1205: http request [DescribeStream] requestId [53df9788-4e94b8ca-839b9e30-e85a2455] reply ok 2025-09-25T16:19:16.586378Z node 8 :HTTP DEBUG: http_proxy_incoming.cpp:280: (#37,[::1]:40312) <- (200 , 1672 bytes) 2025-09-25T16:19:16.586424Z node 8 :HTTP DEBUG: http_proxy_incoming.cpp:340: (#37,[::1]:40312) connection closed 2025-09-25T16:19:16.586492Z node 8 :PERSQUEUE DEBUG: pq_impl.cpp:2743: [PQ: 72075186224037907] server disconnected, pipe [8:7554062165142625628:2521] destroyed 2025-09-25T16:19:16.586500Z node 8 :PERSQUEUE DEBUG: pq_impl.cpp:2743: [PQ: 72075186224037908] server disconnected, pipe [8:7554062165142625629:2522] destroyed 2025-09-25T16:19:16.586505Z node 8 :PERSQUEUE DEBUG: pq_impl.cpp:2743: [PQ: 72075186224037910] server disconnected, pipe [8:7554062165142625631:2524] destroyed 2025-09-25T16:19:16.586509Z node 8 :PERSQUEUE DEBUG: pq_impl.cpp:2743: [PQ: 72075186224037911] server disconnected, pipe [8:7554062165142625632:2525] destroyed 2025-09-25T16:19:16.586513Z node 8 :PERSQUEUE DEBUG: pq_impl.cpp:2743: [PQ: 72075186224037909] server disconnected, pipe [8:7554062165142625630:2523] destroyed Http output full {"StreamDescription":{"RetentionPeriodHours":24,"WriteQuotaKbPerSec":1024,"StreamModeDetails":{"StreamMode":"ON_DEMAND"},"StreamArn":"testtopic","PartitioningSettings":{"MinActivePartitions":5,"AutoPartitioningSettings":{"Strategy":"AUTO_PARTITIONING_STRATEGY_DISABLED","PartitionWriteSpeed":{"StabilizationWindow":{"Nanos":0,"Seconds":300},"DownUtilizationPercent":30,"UpUtilizationPercent":90}},"MaxActivePartitions":5},"Shards":[{"ShardId":"shard-000000","SequenceNumberRange":{"StartingSequenceNumber":"0"},"HashKeyRange":{"EndingHashKey":"68056473384187692692674921486353642290","StartingHashKey":"0"}},{"ShardId":"shard-000001","SequenceNumberRange":{"StartingSequenceNumber":"0"},"HashKeyRange":{"EndingHashKey":"136112946768375385385349842972707284581","StartingHashKey":"68056473384187692692674921486353642291"}},{"ShardId":"shard-000002","SequenceNumberRange":{"StartingSequenceNumber":"0"},"HashKeyRange":{"EndingHashKey":"204169420152563078078024764459060926872","StartingHashKey":"136112946768375385385349842972707284582"}},{"ShardId":"shard-000003","SequenceNumberRange":{"StartingSequenceNumber":"0"},"HashKeyRange":{"EndingHashKey":"272225893536750770770699685945414569163","StartingHashKey":"204169420152563078078024764459060926873"}},{"ShardId":"shard-000004","SequenceNumberRange":{"StartingSequenceNumber":"0"},"HashKeyRange":{"EndingHashKey":"340282366920938463463374607431768211455","StartingHashKey":"272225893536750770770699685945414569164"}}],"KeyId":"","Owner":"Service1_id@as","StreamStatus":"ACTIVE","HasMoreShards":false,"EncryptionType":"ENCRYPTION_UNDEFINED","StreamCreationTimestamp":1758817157,"StorageLimitMb":0,"StreamName":"testtopic"}} 2025-09-25T16:19:16.666806Z node 8 :PERSQUEUE DEBUG: partition.cpp:2261: [72075186224037908][Partition][2][StateIdle] Batching state before ContinueProcessTxsAndUserActs: 0 2025-09-25T16:19:16.666823Z node 8 :PERSQUEUE DEBUG: partition.cpp:2270: [72075186224037908][Partition][2][StateIdle] Batching state after ContinueProcessTxsAndUserActs: 1 2025-09-25T16:19:16.666826Z node 8 :PERSQUEUE DEBUG: partition.cpp:2293: [72075186224037908][Partition][2][StateIdle] Try persist 2025-09-25T16:19:16.667250Z node 8 :PERSQUEUE DEBUG: partition.cpp:2261: [72075186224037909][Partition][3][StateIdle] Batching state before ContinueProcessTxsAndUserActs: 0 2025-09-25T16:19:16.667253Z node 8 :PERSQUEUE DEBUG: partition.cpp:2270: [72075186224037909][Partition][3][StateIdle] Batching state after ContinueProcessTxsAndUserActs: 1 2025-09-25T16:19:16.667256Z node 8 :PERSQUEUE DEBUG: partition.cpp:2293: [72075186224037909][Partition][3][StateIdle] Try persist 2025-09-25T16:19:16.667265Z node 8 :PERSQUEUE DEBUG: partition.cpp:2261: [72075186224037910][Partition][4][StateIdle] Batching state before ContinueProcessTxsAndUserActs: 0 2025-09-25T16:19:16.667268Z node 8 :PERSQUEUE DEBUG: partition.cpp:2270: [72075186224037910][Partition][4][StateIdle] Batching state after ContinueProcessTxsAndUserActs: 1 2025-09-25T16:19:16.667269Z node 8 :PERSQUEUE DEBUG: partition.cpp:2293: [72075186224037910][Partition][4][StateIdle] Try persist 2025-09-25T16:19:16.667768Z node 8 :PERSQUEUE DEBUG: partition.cpp:2261: [72075186224037907][Partition][1][StateIdle] Batching state before ContinueProcessTxsAndUserActs: 0 2025-09-25T16:19:16.667783Z node 8 :PERSQUEUE DEBUG: partition.cpp:2270: [72075186224037907][Partition][1][StateIdle] Batching state after ContinueProcessTxsAndUserActs: 1 2025-09-25T16:19:16.667785Z node 8 :PERSQUEUE DEBUG: partition.cpp:2293: [72075186224037907][Partition][1][StateIdle] Try persist 2025-09-25T16:19:16.668265Z node 8 :PERSQUEUE DEBUG: partition.cpp:2261: [72075186224037911][Partition][0][StateIdle] Batching state before ContinueProcessTxsAndUserActs: 0 2025-09-25T16:19:16.668278Z node 8 :PERSQUEUE DEBUG: partition.cpp:2270: [72075186224037911][Partition][0][StateIdle] Batching state after ContinueProcessTxsAndUserActs: 1 2025-09-25T16:19:16.668280Z node 8 :PERSQUEUE DEBUG: partition.cpp:2293: [72075186224037911][Partition][0][StateIdle] Try persist 2025-09-25T16:19:16.767178Z node 8 :PERSQUEUE DEBUG: partition.cpp:2261: [72075186224037908][Partition][2][StateIdle] Batching state before ContinueProcessTxsAndUserActs: 0 2025-09-25T16:19:16.767197Z node 8 :PERSQUEUE DEBUG: partition.cpp:2270: [72075186224037908][Partition][2][StateIdle] Batching state after ContinueProcessTxsAndUserActs: 1 2025-09-25T16:19:16.767200Z node 8 :PERSQUEUE DEBUG: partition.cpp:2293: [72075186224037908][Partition][2][StateIdle] Try persist 2025-09-25T16:19:16.767629Z node 8 :PERSQUEUE DEBUG: partition.cpp:2261: [72075186224037909][Partition][3][StateIdle] Batching state before ContinueProcessTxsAndUserActs: 0 2025-09-25T16:19:16.767640Z node 8 :PERSQUEUE DEBUG: partition.cpp:2270: [72075186224037909][Partition][3][StateIdle] Batching state after ContinueProcessTxsAndUserActs: 1 2025-09-25T16:19:16.767643Z node 8 :PERSQUEUE DEBUG: partition.cpp:2293: [72075186224037909][Partition][3][StateIdle] Try persist 2025-09-25T16:19:16.767692Z node 8 :PERSQUEUE DEBUG: partition.cpp:2261: [72075186224037910][Partition][4][StateIdle] Batching state before ContinueProcessTxsAndUserActs: 0 2025-09-25T16:19:16.767714Z node 8 :PERSQUEUE DEBUG: partition.cpp:2270: [72075186224037910][Partition][4][StateIdle] Batching state after ContinueProcessTxsAndUserActs: 1 2025-09-25T16:19:16.767718Z node 8 :PERSQUEUE DEBUG: partition.cpp:2293: [72075186224037910][Partition][4][StateIdle] Try persist 2025-09-25T16:19:16.768127Z node 8 :PERSQUEUE DEBUG: partition.cpp:2261: [72075186224037907][Partition][1][StateIdle] Batching state before ContinueProcessTxsAndUserActs: 0 2025-09-25T16:19:16.768138Z node 8 :PERSQUEUE DEBUG: partition.cpp:2270: [72075186224037907][Partition][1][StateIdle] Batching state after ContinueProcessTxsAndUserActs: 1 2025-09-25T16:19:16.768140Z node 8 :PERSQUEUE DEBUG: partition.cpp:2293: [72075186224037907][Partition][1][StateIdle] Try persist 2025-09-25T16:19:16.768649Z node 8 :PERSQUEUE DEBUG: partition.cpp:2261: [72075186224037911][Partition][0][StateIdle] Batching state before ContinueProcessTxsAndUserActs: 0 2025-09-25T16:19:16.768658Z node 8 :PERSQUEUE DEBUG: partition.cpp:2270: [72075186224037911][Partition][0][StateIdle] Batching state after ContinueProcessTxsAndUserActs: 1 2025-09-25T16:19:16.768661Z node 8 :PERSQUEUE DEBUG: partition.cpp:2293: [72075186224037911][Partition][0][StateIdle] Try persist |81.9%| [TM] {default-linux-x86_64, pic, relwithdebinfo} ydb/library/yql/tests/sql/hybrid_file/part8/pytest >> test.py::test[aggregate-group_by_rollup_grouping--Results] [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_subdomain_reboots/unittest >> ForceDropWithReboots::ForceDelete [GOOD] Test command err: ==== RunWithTabletReboots =========== RUN: Trace =========== Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2144] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2144] Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:120:2058] recipient: [1:114:2145] IGNORE Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:120:2058] recipient: [1:114:2145] Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:121:2058] recipient: [1:116:2146] IGNORE Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:121:2058] recipient: [1:116:2146] Leader for TabletID 72057594046678944 is [1:128:2153] sender: [1:131:2058] recipient: [1:113:2144] Leader for TabletID 72057594046447617 is [1:134:2158] sender: [1:136:2058] recipient: [1:114:2145] Leader for TabletID 72057594046316545 is [1:139:2161] sender: [1:141:2058] recipient: [1:116:2146] 2025-09-25T16:19:08.389148Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7911: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-09-25T16:19:08.389177Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7939: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-09-25T16:19:08.389182Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7825: StatsBatching config: StatsBatchTimeout# 0.000000s, StatsMaxBatchSize# 0, StatsMaxExecuteTime# 0.010000s 2025-09-25T16:19:08.389188Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7841: OperationsProcessing config: using default configuration 2025-09-25T16:19:08.389195Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7847: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-09-25T16:19:08.389199Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7847: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-09-25T16:19:08.389208Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7971: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-09-25T16:19:08.389221Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-09-25T16:19:08.389354Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8042: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-09-25T16:19:08.389426Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2025-09-25T16:19:08.418774Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:8074: Got new config: QueryServiceConfig { AllExternalDataSourcesAreAvailable: true } 2025-09-25T16:19:08.418816Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded 2025-09-25T16:19:08.418923Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8042: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# Leader for TabletID 72057594046447617 is [1:134:2158] sender: [1:179:2058] recipient: [1:15:2062] 2025-09-25T16:19:08.424022Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2025-09-25T16:19:08.424142Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2025-09-25T16:19:08.424179Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-09-25T16:19:08.425729Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2025-09-25T16:19:08.425812Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2025-09-25T16:19:08.425940Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1382: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-09-25T16:19:08.426176Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-09-25T16:19:08.427237Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:158: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-09-25T16:19:08.427281Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2025-09-25T16:19:08.427539Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-09-25T16:19:08.427550Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-09-25T16:19:08.427570Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-09-25T16:19:08.427577Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-09-25T16:19:08.427584Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:205: TTxServerlessStorageBilling.Complete 2025-09-25T16:19:08.427641Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7086: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:221:2058] recipient: [1:219:2219] IGNORE Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:221:2058] recipient: [1:219:2219] Leader for TabletID 72057594037968897 is [1:225:2223] sender: [1:226:2058] recipient: [1:219:2219] 2025-09-25T16:19:08.429198Z node 1 :HIVE INFO: tablet_helpers.cpp:1126: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:128:2153] sender: [1:246:2058] recipient: [1:15:2062] 2025-09-25T16:19:08.459275Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-09-25T16:19:08.459375Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-09-25T16:19:08.459440Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:599: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-09-25T16:19:08.459449Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5528: CreateTx for txid 1:0 type: TxAlterSubDomain target path: [OwnerId: 72057594046678944, LocalPathId: 1] source path: 2025-09-25T16:19:08.459509Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-09-25T16:19:08.459524Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2025-09-25T16:19:08.460503Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-09-25T16:19:08.460549Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-09-25T16:19:08.460622Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-09-25T16:19:08.460632Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:315: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-09-25T16:19:08.460638Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:369: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-09-25T16:19:08.460644Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 1:0 2 -> 3 2025-09-25T16:19:08.461603Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-09-25T16:19:08.461620Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-09-25T16:19:08.461627Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 1:0 3 -> 128 2025-09-25T16:19:08.462099Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-09-25T16:19:08.462110Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-09-25T16:19:08.462117Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-09-25T16:19:08.462125Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1723: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-09-25T16:19:08.462969Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1792: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-09-25T16:19:08.463565Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:663: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-09-25T16:19:08.463626Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1824: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 Leader for TabletID 72057594046316545 is [1:139:2161] sender: [1:261:2058] recipient: [1:15:2062] FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-09-25T16:19:08.463886Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:685: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-09-25T16:19:08.463918Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:689: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 139 RawX2: 4294969457 } } Step: 5000001 MediatorID: 0 Tab ... FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:926: Part operation is done id#1002:0 progress is 1/1 2025-09-25T16:19:25.695143Z node 61 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 1002 ready parts: 1/1 2025-09-25T16:19:25.695146Z node 61 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 1002, ready parts: 1/1, is published: false 2025-09-25T16:19:25.695149Z node 61 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 1002 ready parts: 1/1 2025-09-25T16:19:25.695152Z node 61 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:993: Operation and all the parts is done, operation id: 1002:0 2025-09-25T16:19:25.695155Z node 61 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5552: RemoveTx for txid 1002:0 2025-09-25T16:19:25.695175Z node 61 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:610: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 5 2025-09-25T16:19:25.695183Z node 61 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:1002: Publication still in progress, tx: 1002, publications: 2, subscribers: 1 2025-09-25T16:19:25.695186Z node 61 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1009: Publication details: tx: 1002, [OwnerId: 72057594046678944, LocalPathId: 2], 7 2025-09-25T16:19:25.695188Z node 61 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1009: Publication details: tx: 1002, [OwnerId: 72057594046678944, LocalPathId: 3], 18446744073709551615 2025-09-25T16:19:25.695284Z node 61 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6249: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 7 PathOwnerId: 72057594046678944, cookie: 1002 2025-09-25T16:19:25.695292Z node 61 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 7 PathOwnerId: 72057594046678944, cookie: 1002 2025-09-25T16:19:25.695295Z node 61 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 1002 2025-09-25T16:19:25.695299Z node 61 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 1002, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 7 2025-09-25T16:19:25.695302Z node 61 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:610: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 2 2025-09-25T16:19:25.695368Z node 61 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6249: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 1002 2025-09-25T16:19:25.695375Z node 61 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 1002 2025-09-25T16:19:25.695378Z node 61 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1002 2025-09-25T16:19:25.695380Z node 61 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 1002, pathId: [OwnerId: 72057594046678944, LocalPathId: 3], version: 18446744073709551615 2025-09-25T16:19:25.695383Z node 61 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:610: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 4 2025-09-25T16:19:25.695389Z node 61 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__publish_to_scheme_board.cpp:155: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 1002, subscribers: 1 2025-09-25T16:19:25.695392Z node 61 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:212: TTxAckPublishToSchemeBoard Notify send TEvNotifyTxCompletionResult, at schemeshard: 72057594046678944, to actorId: [61:434:2401] 2025-09-25T16:19:25.695652Z node 61 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_shard_deleter.cpp:20: SendDeleteRequests, shardsToDelete 2, to hive 72057594037968897, at schemeshard 72057594046678944 2025-09-25T16:19:25.695666Z node 61 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_shard_deleter.cpp:47: Free shard 72057594046678944:1 hive 72057594037968897 at ss 72057594046678944 2025-09-25T16:19:25.695671Z node 61 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_shard_deleter.cpp:47: Free shard 72057594046678944:2 hive 72057594037968897 at ss 72057594046678944 2025-09-25T16:19:25.695869Z node 61 :HIVE INFO: tablet_helpers.cpp:1356: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 1 TxId_Deprecated: 0 TabletID: 72075186233409546 FAKEHIVE 72057594037968897 TEvDeleteTablet ShardOwnerId: 72057594046678944 ShardLocalIdx: 1 TxId_Deprecated: 0 TabletID: 72075186233409546 2025-09-25T16:19:25.695923Z node 61 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:6353: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 0 ShardOwnerId: 72057594046678944 ShardLocalIdx: 1, at schemeshard: 72057594046678944 2025-09-25T16:19:25.695966Z node 61 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:610: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 3 2025-09-25T16:19:25.696022Z node 61 :HIVE INFO: tablet_helpers.cpp:1356: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 2 TxId_Deprecated: 0 TabletID: 72075186233409547 FAKEHIVE 72057594037968897 TEvDeleteTablet ShardOwnerId: 72057594046678944 ShardLocalIdx: 2 TxId_Deprecated: 0 TabletID: 72075186233409547 Forgetting tablet 72075186233409546 2025-09-25T16:19:25.696246Z node 61 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:6353: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 0 ShardOwnerId: 72057594046678944 ShardLocalIdx: 2, at schemeshard: 72057594046678944 2025-09-25T16:19:25.696269Z node 61 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:610: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 2 Forgetting tablet 72075186233409547 2025-09-25T16:19:25.696445Z node 61 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:205: TTxCleanDroppedSubDomains Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2025-09-25T16:19:25.696453Z node 61 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:224: TTxCleanDroppedPaths: PersistRemoveSubDomain for PathId# [OwnerId: 72057594046678944, LocalPathId: 3], at schemeshard: 72057594046678944 2025-09-25T16:19:25.696472Z node 61 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:610: DecrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 1 2025-09-25T16:19:25.696585Z node 61 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1002 2025-09-25T16:19:25.696624Z node 61 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:123: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2025-09-25T16:19:25.696627Z node 61 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:137: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 3], at schemeshard: 72057594046678944 2025-09-25T16:19:25.696634Z node 61 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:610: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 1 2025-09-25T16:19:25.696709Z node 61 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1002 2025-09-25T16:19:25.696720Z node 61 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 1002: got EvNotifyTxCompletionResult 2025-09-25T16:19:25.696725Z node 61 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 1002: satisfy waiter [61:441:2408] 2025-09-25T16:19:25.696974Z node 61 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:176: Deleted shardIdx 72057594046678944:1 2025-09-25T16:19:25.696984Z node 61 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:182: Close pipe to deleted shardIdx 72057594046678944:1 tabletId 72075186233409546 2025-09-25T16:19:25.697253Z node 61 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:176: Deleted shardIdx 72057594046678944:2 2025-09-25T16:19:25.697260Z node 61 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:182: Close pipe to deleted shardIdx 72057594046678944:2 tabletId 72075186233409547 2025-09-25T16:19:25.697270Z node 61 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__clean_pathes.cpp:252: TTxCleanDroppedSubDomains Complete, done PersistRemoveSubDomain for 1 subdomains, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 2025-09-25T16:19:25.697286Z node 61 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__clean_pathes.cpp:160: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 TestWaitNotification: OK eventTxId 1002 wait until 72075186233409546 is deleted wait until 72075186233409547 is deleted 2025-09-25T16:19:25.697346Z node 61 :HIVE INFO: tablet_helpers.cpp:1504: [72057594037968897] TEvSubscribeToTabletDeletion, 72075186233409546 2025-09-25T16:19:25.697358Z node 61 :HIVE INFO: tablet_helpers.cpp:1504: [72057594037968897] TEvSubscribeToTabletDeletion, 72075186233409547 Deleted tabletId 72075186233409546 Deleted tabletId 72075186233409547 2025-09-25T16:19:25.697410Z node 61 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/DirA/USER_0" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-09-25T16:19:25.697440Z node 61 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/DirA/USER_0" took 36us result status StatusPathDoesNotExist 2025-09-25T16:19:25.697468Z node 61 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/DirA/USER_0\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot/DirA\' (id: [OwnerId: 72057594046678944, LocalPathId: 2]), source_location: ydb/core/tx/schemeshard/schemeshard_path_describer.cpp:1181" Path: "/MyRoot/DirA/USER_0" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot/DirA" LastExistedPrefixPathId: 2 LastExistedPrefixDescription { Self { Name: "DirA" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1000 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty ChildrenExist: false } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 Waiting until shard idx 72057594046678944:1 is deleted Waiting until shard idx 72057594046678944:2 is deleted Deleted shard idx 72057594046678944:1 Deleted shard idx 72057594046678944:2 |81.9%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/persqueue/dread_cache_service/ut/unittest |81.9%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/core/grpc_services/ut/unittest |81.9%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/core/grpc_services/ut/unittest |81.9%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/core/grpc_services/ut/unittest >> SplitPathTests::WithDatabaseShouldFail [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/scheme/unittest >> KqpOlapScheme::AddColumnFamilyWithCacheModeError [GOOD] Test command err: Trying to start YDB, gRPC: 3757, MsgBus: 26083 2025-09-25T16:19:02.483928Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7554062107601229529:2082];send_to=[0:7307199536658146131:7762515]; 2025-09-25T16:19:02.484740Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/endf/001c79/r3tmp/tmpj4oD8K/pdisk_1.dat 2025-09-25T16:19:02.542208Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-09-25T16:19:02.542238Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-09-25T16:19:02.542980Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-09-25T16:19:02.544892Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-09-25T16:19:02.559356Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 3757, node 1 2025-09-25T16:19:02.575438Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-09-25T16:19:02.575453Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-09-25T16:19:02.575455Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-09-25T16:19:02.575500Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:26083 TClient is connected to server localhost:26083 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-09-25T16:19:02.697769Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-09-25T16:19:02.701298Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-09-25T16:19:02.730124Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-09-25T16:19:03.038759Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7554062111896197442:2317], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:19:03.038804Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:19:03.039213Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7554062111896197454:2320], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:19:03.039236Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7554062111896197455:2321], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:19:03.039305Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:19:03.040237Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-09-25T16:19:03.043238Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 281474976715658, at schemeshard: 72057594046644480 2025-09-25T16:19:03.043355Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7554062111896197458:2322], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2025-09-25T16:19:03.122261Z node 1 :TX_PROXY ERROR: schemereq.cpp:590: Actor# [1:7554062111896197509:2340] txid# 281474976715659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-09-25T16:19:03.294776Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:19:03.489012Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-09-25T16:19:03.530153Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterTable, opId: 281474976710757:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_table.cpp:172) 2025-09-25T16:19:03.585161Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpFinalizeBuildIndexMainTable, opId: 281474976710760:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_finalize_build_index.cpp:383) 2025-09-25T16:19:03.613727Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterTable, opId: 281474976710762:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_table.cpp:172) 2025-09-25T16:19:03.640462Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpFinalizeBuildIndexMainTable, opId: 281474976710765:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_finalize_build_index.cpp:383) Trying to start YDB, gRPC: 29585, MsgBus: 26425 2025-09-25T16:19:04.154800Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:123: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2025-09-25T16:19:04.154943Z node 2 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7554062116805844686:2215];send_to=[0:7307199536658146131:7762515]; 2025-09-25T16:19:04.155087Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/endf/001c79/r3tmp/tmpDZE3ES/pdisk_1.dat 2025-09-25T16:19:04.203262Z node 2 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded 2025-09-25T16:19:04.203524Z node 2 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1229: Notification cookie mismatch for subscription [2:7554062116805844506:2081] 1758817144139774 != 1758817144139777 2025-09-25T16:19:04.209163Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-09-25T16:19:04.209190Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-09-25T16:19:04.214515Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 29585, node 2 2025-09-25T16:19:04.228579Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-09-25T16:19:04.228590Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-09-25T16:19:04.228592Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-09-25T16:19:04.228642Z node 2 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:26425 2025-09-25T16:19:04.261529Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions TClient is connected to server localhost:26425 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Nam ... itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/olap/operations/create_table.cpp:814) 2025-09-25T16:19:13.439074Z node 8 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[8:7554062153920839262:2324];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-09-25T16:19:13.439113Z node 8 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[8:7554062153920839262:2324];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-09-25T16:19:13.439199Z node 8 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[8:7554062153920839262:2324];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-09-25T16:19:13.439227Z node 8 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[8:7554062153920839262:2324];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-09-25T16:19:13.439254Z node 8 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[8:7554062153920839262:2324];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-09-25T16:19:13.439281Z node 8 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[8:7554062153920839262:2324];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-09-25T16:19:13.439309Z node 8 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[8:7554062153920839262:2324];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-09-25T16:19:13.439348Z node 8 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[8:7554062153920839262:2324];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-09-25T16:19:13.439374Z node 8 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[8:7554062153920839262:2324];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-09-25T16:19:13.439402Z node 8 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[8:7554062153920839262:2324];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanDeprecatedSnapshot; 2025-09-25T16:19:13.439428Z node 8 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[8:7554062153920839262:2324];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV0ChunksMeta; 2025-09-25T16:19:13.439456Z node 8 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[8:7554062153920839262:2324];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CopyBlobIdsToV2; 2025-09-25T16:19:13.439480Z node 8 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[8:7554062153920839262:2324];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreAppearanceSnapshot; 2025-09-25T16:19:13.441335Z node 8 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-09-25T16:19:13.441355Z node 8 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-09-25T16:19:13.441371Z node 8 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-09-25T16:19:13.441381Z node 8 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-09-25T16:19:13.441406Z node 8 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-09-25T16:19:13.441412Z node 8 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-09-25T16:19:13.441428Z node 8 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-09-25T16:19:13.441435Z node 8 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-09-25T16:19:13.441444Z node 8 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-09-25T16:19:13.441450Z node 8 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-09-25T16:19:13.441458Z node 8 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-09-25T16:19:13.441464Z node 8 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-09-25T16:19:13.441493Z node 8 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-09-25T16:19:13.441502Z node 8 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-09-25T16:19:13.441526Z node 8 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2025-09-25T16:19:13.441534Z node 8 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanDeprecatedSnapshot;id=CleanDeprecatedSnapshot; 2025-09-25T16:19:13.441543Z node 8 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanDeprecatedSnapshot;id=17; 2025-09-25T16:19:13.441550Z node 8 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV0ChunksMeta;id=RestoreV0ChunksMeta; 2025-09-25T16:19:13.441559Z node 8 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV0ChunksMeta;id=18; 2025-09-25T16:19:13.441567Z node 8 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CopyBlobIdsToV2;id=CopyBlobIdsToV2; 2025-09-25T16:19:13.441584Z node 8 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CopyBlobIdsToV2;id=19; 2025-09-25T16:19:13.441591Z node 8 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreAppearanceSnapshot;id=RestoreAppearanceSnapshot; 2025-09-25T16:19:13.441604Z node 8 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreAppearanceSnapshot;id=20; 2025-09-25T16:19:13.441610Z node 8 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2025-09-25T16:19:13.443285Z node 8 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[8:7554062153920839262:2324];ev=NActors::IEventHandle;tablet_id=72075186224037888;tx_id=281474976715658;this=21065607707056;method=TTxController::StartProposeOnExecute;tx_info=281474976715658:TX_KIND_SCHEMA;min=1758817153443;max=18446744073709551615;plan=0;src=[8:7554062149625871587:2145];cookie=12:1;;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=2;result=not_found; 2025-09-25T16:19:13.445686Z node 8 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715658;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=2;result=not_found; 2025-09-25T16:19:13.445714Z node 8 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715658;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=2;result=not_found; 2025-09-25T16:19:13.445718Z node 8 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715658;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=2;result=not_found; 2025-09-25T16:19:13.452904Z node 8 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [8:7554062153920839330:2332], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:19:13.452943Z node 8 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:19:13.453146Z node 8 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [8:7554062153920839333:2333], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:19:13.453157Z node 8 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/http_proxy/ut/inside_ydb_ut/unittest >> TestKinesisHttpProxy::ListShardsToken [GOOD] Test command err: 2025-09-25T16:18:49.870029Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7554062048976708169:2075];send_to=[0:7307199536658146131:7762515]; 2025-09-25T16:18:49.870057Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/endf/005987/r3tmp/tmpPS4Fup/pdisk_1.dat 2025-09-25T16:18:49.944868Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions TServer::EnableGrpc on GrpcPort 1736, node 1 2025-09-25T16:18:49.970717Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded 2025-09-25T16:18:49.972226Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-09-25T16:18:49.972268Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-09-25T16:18:49.973019Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-09-25T16:18:49.973023Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-09-25T16:18:49.973024Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-09-25T16:18:49.973095Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-09-25T16:18:49.976946Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:5987 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-09-25T16:18:50.004649Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-09-25T16:18:50.008730Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 TClient is connected to server localhost:5987 2025-09-25T16:18:50.033713Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterUserAttributes, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_user_attrs.cpp:72) waiting... 2025-09-25T16:18:50.035488Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 281474976715658, at schemeshard: 72057594046644480 2025-09-25T16:18:50.036279Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_modify_acl.cpp:101) waiting... 2025-09-25T16:18:50.089627Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) waiting... 2025-09-25T16:18:50.097817Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-09-25T16:18:50.109627Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) waiting... waiting... 2025-09-25T16:18:50.163179Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) waiting... waiting... waiting... 2025-09-25T16:18:50.195131Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:18:50.212908Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) waiting... 2025-09-25T16:18:50.229236Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) waiting... 2025-09-25T16:18:50.240296Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) waiting... 2025-09-25T16:18:50.259102Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715670:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) waiting... 2025-09-25T16:18:50.270168Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) waiting... 2025-09-25T16:18:50.282751Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715672:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) waiting... 2025-09-25T16:18:50.321305Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7554062053271676816:2360], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:50.321332Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7554062053271676827:2363], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:50.321341Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:50.321544Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7554062053271676831:2365], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:50.321553Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:50.322288Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715673:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-09-25T16:18:50.325324Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7554062053271676830:2364], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715673 completed, doublechecking } 2025-09-25T16:18:50.403772Z node 1 :TX_PROXY ERROR: schemereq.cpp:590: Actor# [1:7554062053271676883:2865] txid# 281474976715674, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 18], type: EPathTypeR ... StateIdle] Try persist 2025-09-25T16:19:17.859981Z node 8 :PERSQUEUE DEBUG: partition_compaction.cpp:162: [72075186224037907][Partition][1][StateIdle] no data for compaction 2025-09-25T16:19:17.860002Z node 8 :PERSQUEUE DEBUG: pq_impl.cpp:395: [PQ: 72075186224037907] Apply new config PartitionConfig { MaxCountInPartition: 2147483647 LifetimeSeconds: 86400 SourceIdLifetimeSeconds: 1382400 WriteSpeedInBytesPerSecond: 1048576 BurstSize: 1048576 TotalPartitions: 5 SourceIdMaxCounts: 6000000 } PartitionIds: 1 TopicName: "teststream" Version: 0 RequireAuthWrite: true RequireAuthRead: true FormatVersion: 0 Codecs { } TopicPath: "/Root/teststream" YcCloudId: "cloud4" YcFolderId: "folder4" YdbDatabaseId: "database4" YdbDatabasePath: "/Root" Partitions { PartitionId: 1 Status: Active CreateVersion: 1 TabletId: 0 } MeteringMode: METERING_MODE_REQUEST_UNITS AllPartitions { PartitionId: 1 Status: Active CreateVersion: 1 TabletId: 0 } MonitoringProjectId: "" 2025-09-25T16:19:17.860020Z node 8 :PERSQUEUE NOTICE: pq_impl.cpp:928: [PQ: 72075186224037907] metering mode METERING_MODE_REQUEST_UNITS 2025-09-25T16:19:17.860044Z node 8 :PERSQUEUE DEBUG: pq_impl.cpp:3567: [PQ: 72075186224037907] Send TEvKeyValue::TEvRequest (WRITE_TX_COOKIE) 2025-09-25T16:19:17.860199Z node 8 :PERSQUEUE DEBUG: pq_impl.cpp:1040: [PQ: 72075186224037907] Handle TEvKeyValue::TEvResponse (WRITE_TX_COOKIE) 2025-09-25T16:19:17.860218Z node 8 :PERSQUEUE DEBUG: pq_impl.cpp:3567: [PQ: 72075186224037907] Send TEvKeyValue::TEvRequest (WRITE_TX_COOKIE) 2025-09-25T16:19:17.860473Z node 8 :PERSQUEUE DEBUG: pq_impl.cpp:1040: [PQ: 72075186224037907] Handle TEvKeyValue::TEvResponse (WRITE_TX_COOKIE) 2025-09-25T16:19:17.860538Z node 8 :HTTP_PROXY INFO: http_req.cpp:1205: http request [CreateStream] requestId [d7a17c0c-2e010d25-bc4955cc-7e59dafc] reply ok 2025-09-25T16:19:17.860574Z node 8 :HTTP DEBUG: http_proxy_incoming.cpp:280: (#37,[::1]:41304) <- (200 , 2 bytes) Http output full {} 2025-09-25T16:19:17.860650Z node 8 :HTTP DEBUG: http_proxy_incoming.cpp:340: (#37,[::1]:41304) connection closed 200 {} 2025-09-25T16:19:17.860973Z node 8 :HTTP DEBUG: http_proxy_incoming.cpp:83: (#37,[::1]:41316) incoming connection opened 2025-09-25T16:19:17.860993Z node 8 :HTTP DEBUG: http_proxy_incoming.cpp:156: (#37,[::1]:41316) -> (POST /Root, 157 bytes) 2025-09-25T16:19:17.861023Z node 8 :HTTP_PROXY INFO: http_service.cpp:102: proxy service: incoming request from [181b:4ffc:2f50:0:1b:4ffc:2f50:0] request [ListShards] url [/Root] database [/Root] requestId: 6d396cc5-7ff903a6-c6ce6e0b-8b71d551 2025-09-25T16:19:17.861112Z node 8 :HTTP_PROXY INFO: http_req.cpp:958: http request [ListShards] requestId [6d396cc5-7ff903a6-c6ce6e0b-8b71d551] got new request from [181b:4ffc:2f50:0:1b:4ffc:2f50:0] database '/Root' stream 'teststream' 2025-09-25T16:19:17.861222Z node 8 :HTTP_PROXY DEBUG: http_req.cpp:1498: http request [ListShards] requestId [6d396cc5-7ff903a6-c6ce6e0b-8b71d551] [auth] Authorized successfully 2025-09-25T16:19:17.861242Z node 8 :HTTP_PROXY INFO: http_req.cpp:677: http request [ListShards] requestId [6d396cc5-7ff903a6-c6ce6e0b-8b71d551] sending grpc request to '' database: '/Root' iam token size: 0 E0000 00:00:1758817157.861267 280205 message_lite.cc:131] Can't parse message of type "NKikimrPQ.TYdsNextToken" because it is missing required fields: CreationTimestamp, MaxResults, AlreadyRead, StreamArn 2025-09-25T16:19:17.861565Z node 8 :PERSQUEUE DEBUG: pq_impl.cpp:2718: [PQ: 72075186224037907] server connected, pipe [8:7554062172787444904:2510], now have 1 active actors on pipe 2025-09-25T16:19:17.861567Z node 8 :PERSQUEUE DEBUG: pq_impl.cpp:2718: [PQ: 72075186224037911] server connected, pipe [8:7554062172787444905:2511], now have 1 active actors on pipe 2025-09-25T16:19:17.861656Z node 8 :PERSQUEUE DEBUG: pq_impl.cpp:2743: [PQ: 72075186224037907] server disconnected, pipe [8:7554062172787444904:2510] destroyed 2025-09-25T16:19:17.861662Z node 8 :PERSQUEUE DEBUG: pq_impl.cpp:2743: [PQ: 72075186224037911] server disconnected, pipe [8:7554062172787444905:2511] destroyed 2025-09-25T16:19:17.861674Z node 8 :HTTP_PROXY INFO: http_req.cpp:1205: http request [ListShards] requestId [6d396cc5-7ff903a6-c6ce6e0b-8b71d551] reply ok 2025-09-25T16:19:17.861699Z node 8 :HTTP DEBUG: http_proxy_incoming.cpp:280: (#37,[::1]:41316) <- (200 , 449 bytes) Http output full {"NextToken":"COWFsI2YMxACGAIiCnRlc3RzdHJlYW0=","Shards":[{"ShardId":"shard-000000","SequenceNumberRange":{"StartingSequenceNumber":"0"},"HashKeyRange":{"EndingHashKey":"68056473384187692692674921486353642290","StartingHashKey":"0"}},{"ShardId":"shard-000001","SequenceNumberRange":{"StartingSequenceNumber":"0"},"HashKeyRange":{"EndingHashKey":"136112946768375385385349842972707284581","StartingHashKey":"68056473384187692692674921486353642291"}}]} 2025-09-25T16:19:17.861730Z node 8 :HTTP DEBUG: http_proxy_incoming.cpp:340: (#37,[::1]:41316) connection closed 200 {"NextToken":"COWFsI2YMxACGAIiCnRlc3RzdHJlYW0=","Shards":[{"ShardId":"shard-000000","SequenceNumberRange":{"StartingSequenceNumber":"0"},"HashKeyRange":{"EndingHashKey":"68056473384187692692674921486353642290","StartingHashKey":"0"}},{"ShardId":"shard-000001","SequenceNumberRange":{"StartingSequenceNumber":"0"},"HashKeyRange":{"EndingHashKey":"136112946768375385385349842972707284581","StartingHashKey":"68056473384187692692674921486353642291"}}]} 2025-09-25T16:19:17.861925Z node 8 :HTTP DEBUG: http_proxy_incoming.cpp:83: (#37,[::1]:41332) incoming connection opened 2025-09-25T16:19:17.861938Z node 8 :HTTP DEBUG: http_proxy_incoming.cpp:156: (#37,[::1]:41332) -> (POST /Root, 157 bytes) 2025-09-25T16:19:17.861951Z node 8 :HTTP_PROXY INFO: http_service.cpp:102: proxy service: incoming request from [d8b9:34fa:2f50:0:c0b9:34fa:2f50:0] request [ListShards] url [/Root] database [/Root] requestId: df167a4c-7266342b-acf641d5-6fba156a 2025-09-25T16:19:17.861995Z node 8 :HTTP_PROXY INFO: http_req.cpp:958: http request [ListShards] requestId [df167a4c-7266342b-acf641d5-6fba156a] got new request from [d8b9:34fa:2f50:0:c0b9:34fa:2f50:0] database '/Root' stream 'teststream' 2025-09-25T16:19:17.862066Z node 8 :HTTP_PROXY DEBUG: http_req.cpp:1498: http request [ListShards] requestId [df167a4c-7266342b-acf641d5-6fba156a] [auth] Authorized successfully 2025-09-25T16:19:17.862076Z node 8 :HTTP_PROXY INFO: http_req.cpp:677: http request [ListShards] requestId [df167a4c-7266342b-acf641d5-6fba156a] sending grpc request to '' database: '/Root' iam token size: 0 E0000 00:00:1758817157.862087 280205 message_lite.cc:131] Can't parse message of type "NKikimrPQ.TYdsNextToken" because it is missing required fields: CreationTimestamp, MaxResults, AlreadyRead, StreamArn 2025-09-25T16:19:17.862194Z node 8 :PERSQUEUE DEBUG: pq_impl.cpp:2718: [PQ: 72075186224037907] server connected, pipe [8:7554062172787444916:2515], now have 1 active actors on pipe 2025-09-25T16:19:17.862199Z node 8 :PERSQUEUE DEBUG: pq_impl.cpp:2718: [PQ: 72075186224037911] server connected, pipe [8:7554062172787444917:2516], now have 1 active actors on pipe 2025-09-25T16:19:17.862261Z node 8 :PERSQUEUE DEBUG: pq_impl.cpp:2743: [PQ: 72075186224037907] server disconnected, pipe [8:7554062172787444916:2515] destroyed 2025-09-25T16:19:17.862263Z node 8 :HTTP_PROXY INFO: http_req.cpp:1205: http request [ListShards] requestId [df167a4c-7266342b-acf641d5-6fba156a] reply ok 2025-09-25T16:19:17.862268Z node 8 :PERSQUEUE DEBUG: pq_impl.cpp:2743: [PQ: 72075186224037911] server disconnected, pipe [8:7554062172787444917:2516] destroyed 2025-09-25T16:19:17.862284Z node 8 :HTTP DEBUG: http_proxy_incoming.cpp:280: (#37,[::1]:41332) <- (200 , 449 bytes) Http output full {"NextToken":"COaFsI2YMxACGAIiCnRlc3RzdHJlYW0=","Shards":[{"ShardId":"shard-000000","SequenceNumberRange":{"StartingSequenceNumber":"0"},"HashKeyRange":{"EndingHashKey":"68056473384187692692674921486353642290","StartingHashKey":"0"}},{"ShardId":"shard-000001","SequenceNumberRange":{"StartingSequenceNumber":"0"},"HashKeyRange":{"EndingHashKey":"136112946768375385385349842972707284581","StartingHashKey":"68056473384187692692674921486353642291"}}]}2025-09-25T16:19:17.862308Z node 8 :HTTP DEBUG: http_proxy_incoming.cpp:340: (#37,[::1]:41332) connection closed 200 {"NextToken":"COaFsI2YMxACGAIiCnRlc3RzdHJlYW0=","Shards":[{"ShardId":"shard-000000","SequenceNumberRange":{"StartingSequenceNumber":"0"},"HashKeyRange":{"EndingHashKey":"68056473384187692692674921486353642290","StartingHashKey":"0"}},{"ShardId":"shard-000001","SequenceNumberRange":{"StartingSequenceNumber":"0"},"HashKeyRange":{"EndingHashKey":"136112946768375385385349842972707284581","StartingHashKey":"68056473384187692692674921486353642291"}}]} 2025-09-25T16:19:17.862467Z node 8 :HTTP DEBUG: http_proxy_incoming.cpp:83: (#37,[::1]:41334) incoming connection opened 2025-09-25T16:19:17.862485Z node 8 :HTTP DEBUG: http_proxy_incoming.cpp:156: (#37,[::1]:41334) -> (POST /Root, 157 bytes) 2025-09-25T16:19:17.862507Z node 8 :HTTP_PROXY INFO: http_service.cpp:102: proxy service: incoming request from [9848:55fc:2f50:0:8048:55fc:2f50:0] request [ListShards] url [/Root] database [/Root] requestId: 503d33e7-3a3223b4-8be6a139-99e5adf1 2025-09-25T16:19:17.862580Z node 8 :HTTP_PROXY INFO: http_req.cpp:958: http request [ListShards] requestId [503d33e7-3a3223b4-8be6a139-99e5adf1] got new request from [9848:55fc:2f50:0:8048:55fc:2f50:0] database '/Root' stream 'teststream' 2025-09-25T16:19:17.862639Z node 8 :HTTP_PROXY DEBUG: http_req.cpp:1498: http request [ListShards] requestId [503d33e7-3a3223b4-8be6a139-99e5adf1] [auth] Authorized successfully 2025-09-25T16:19:17.862648Z node 8 :HTTP_PROXY INFO: http_req.cpp:677: http request [ListShards] requestId [503d33e7-3a3223b4-8be6a139-99e5adf1] sending grpc request to '' database: '/Root' iam token size: 0 E0000 00:00:1758817157.862660 280205 message_lite.cc:131] Can't parse message of type "NKikimrPQ.TYdsNextToken" because it is missing required fields: CreationTimestamp, MaxResults, AlreadyRead, StreamArn 2025-09-25T16:19:17.862756Z node 8 :PERSQUEUE DEBUG: pq_impl.cpp:2718: [PQ: 72075186224037907] server connected, pipe [8:7554062172787444928:2520], now have 1 active actors on pipe 2025-09-25T16:19:17.862764Z node 8 :PERSQUEUE DEBUG: pq_impl.cpp:2718: [PQ: 72075186224037911] server connected, pipe [8:7554062172787444929:2521], now have 1 active actors on pipe 2025-09-25T16:19:17.862817Z node 8 :PERSQUEUE DEBUG: pq_impl.cpp:2743: [PQ: 72075186224037907] server disconnected, pipe [8:7554062172787444928:2520] destroyed 2025-09-25T16:19:17.862818Z node 8 :HTTP_PROXY INFO: http_req.cpp:1205: http request [ListShards] requestId [503d33e7-3a3223b4-8be6a139-99e5adf1] reply ok 2025-09-25T16:19:17.862835Z node 8 :PERSQUEUE DEBUG: pq_impl.cpp:2743: [PQ: 72075186224037911] server disconnected, pipe [8:7554062172787444929:2521] destroyed 2025-09-25T16:19:17.862837Z node 8 :HTTP DEBUG: http_proxy_incoming.cpp:280: (#37,[::1]:41334) <- (200 , 449 bytes) 2025-09-25T16:19:17.862860Z node 8 :HTTP DEBUG: http_proxy_incoming.cpp:340: (#37,[::1]:41334) connection closed Http output full {"NextToken":"COaFsI2YMxACGAIiCnRlc3RzdHJlYW0=","Shards":[{"ShardId":"shard-000000","SequenceNumberRange":{"StartingSequenceNumber":"0"},"HashKeyRange":{"EndingHashKey":"68056473384187692692674921486353642290","StartingHashKey":"0"}},{"ShardId":"shard-000001","SequenceNumberRange":{"StartingSequenceNumber":"0"},"HashKeyRange":{"EndingHashKey":"136112946768375385385349842972707284581","StartingHashKey":"68056473384187692692674921486353642291"}}]} 200 {"NextToken":"COaFsI2YMxACGAIiCnRlc3RzdHJlYW0=","Shards":[{"ShardId":"shard-000000","SequenceNumberRange":{"StartingSequenceNumber":"0"},"HashKeyRange":{"EndingHashKey":"68056473384187692692674921486353642290","StartingHashKey":"0"}},{"ShardId":"shard-000001","SequenceNumberRange":{"StartingSequenceNumber":"0"},"HashKeyRange":{"EndingHashKey":"136112946768375385385349842972707284581","StartingHashKey":"68056473384187692692674921486353642291"}}]} ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/scheme/unittest >> KqpScheme::CreateTableWithDefaultFamily-UseQueryService [GOOD] Test command err: Trying to start YDB, gRPC: 6657, MsgBus: 6492 2025-09-25T16:19:03.700850Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7554062112513115461:2136];send_to=[0:7307199536658146131:7762515]; 2025-09-25T16:19:03.700869Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/endf/001c29/r3tmp/tmpRDKII5/pdisk_1.dat 2025-09-25T16:19:03.784554Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-09-25T16:19:03.800556Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 6657, node 1 2025-09-25T16:19:03.814125Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-09-25T16:19:03.814156Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-09-25T16:19:03.815119Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-09-25T16:19:03.817969Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-09-25T16:19:03.817979Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-09-25T16:19:03.817981Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-09-25T16:19:03.818026Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:6492 TClient is connected to server localhost:6492 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-09-25T16:19:03.912804Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-09-25T16:19:03.921094Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-09-25T16:19:03.941839Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) waiting... 2025-09-25T16:19:03.970494Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) waiting... 2025-09-25T16:19:03.990151Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-09-25T16:19:04.010151Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) waiting... 2025-09-25T16:19:04.050051Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) waiting... 2025-09-25T16:19:04.277316Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7554062116808084298:2391], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:19:04.277344Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:19:04.277518Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7554062116808084308:2392], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:19:04.277526Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:19:04.367681Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:19:04.380386Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:19:04.392100Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:19:04.404333Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:19:04.416552Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:19:04.431484Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:19:04.446914Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:19:04.475573Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:19:04.528922Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7554062116808085164:2474], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:19:04.528999Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:19:04.529213Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7554062116808085174:2477], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:19:04.529223Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7554062116808085175:2478], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:19:04.529241Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:19:04.530205Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_ ... Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-09-25T16:19:15.625450Z node 8 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-09-25T16:19:15.645294Z node 8 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) waiting... 2025-09-25T16:19:15.656585Z node 8 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(8, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-09-25T16:19:15.656624Z node 8 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(8, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-09-25T16:19:15.657754Z node 8 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(8, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-09-25T16:19:15.703923Z node 8 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) waiting... 2025-09-25T16:19:15.729954Z node 8 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-09-25T16:19:15.730854Z node 8 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) waiting... 2025-09-25T16:19:15.745741Z node 8 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) waiting... 2025-09-25T16:19:15.952638Z node 8 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [8:7554062161816344059:2391], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:19:15.952671Z node 8 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:19:15.952986Z node 8 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [8:7554062161816344078:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:19:15.953006Z node 8 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:19:15.956250Z node 8 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:19:15.963707Z node 8 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:19:15.971949Z node 8 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:19:15.985348Z node 8 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:19:15.999730Z node 8 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:19:16.013737Z node 8 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:19:16.030331Z node 8 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:19:16.042039Z node 8 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:19:16.060125Z node 8 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [8:7554062166111312228:2474], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:19:16.060156Z node 8 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [8:7554062166111312233:2477], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:19:16.060169Z node 8 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:19:16.060224Z node 8 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [8:7554062166111312236:2479], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:19:16.060240Z node 8 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:19:16.061411Z node 8 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-09-25T16:19:16.069408Z node 8 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [8:7554062166111312235:2478], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2025-09-25T16:19:16.157809Z node 8 :TX_PROXY ERROR: schemereq.cpp:590: Actor# [8:7554062166111312289:3552] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 17], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-09-25T16:19:16.397279Z node 8 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:19:16.412928Z node 8 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterTable, opId: 281474976710674:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_table.cpp:172) ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/client/server/ut/unittest >> TMessageBusServerPersQueueGetTopicMetadataMetaRequestTest::FailesOnNotATopic [GOOD] Test command err: Assert failed: Check response: { Status: 130 ErrorReason: "Timeout while waiting for response, may be just slow, Marker# PQ16" ErrorCode: ERROR } 2025-09-25T16:19:25.436922Z node 2 :PERSQUEUE DEBUG: pq_impl.cpp:2957: [PQ: 72057594037928037] Handle TEvInterconnect::TEvNodeInfo 2025-09-25T16:19:25.438062Z node 2 :PERSQUEUE DEBUG: pq_impl.cpp:2989: [PQ: 72057594037928037] Transactions request. From tx_00000000000000000000, To tx_18446744073709551615 2025-09-25T16:19:25.438133Z node 2 :PERSQUEUE DEBUG: pq_impl.cpp:784: [PQ: 72057594037928037] no config, start with empty partitions and default config 2025-09-25T16:19:25.438145Z node 2 :PERSQUEUE NOTICE: pq_impl.cpp:908: [PQ: 72057594037928037] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-09-25T16:19:25.438154Z node 2 :PERSQUEUE INFO: pq_impl.cpp:609: [PQ: 72057594037928037] doesn't have tx writes info 2025-09-25T16:19:25.438351Z node 2 :PERSQUEUE DEBUG: pq_impl.cpp:2718: [PQ: 72057594037928037] server connected, pipe [2:262:2256], now have 1 active actors on pipe 2025-09-25T16:19:25.438373Z node 2 :PERSQUEUE DEBUG: pq_impl.cpp:1269: [PQ: 72057594037928037] Handle TEvPersQueue::TEvUpdateConfig 2025-09-25T16:19:25.440625Z node 2 :PERSQUEUE DEBUG: pq_impl.cpp:395: [PQ: 72057594037928037] Apply new config CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 TopicName: "rt3.dc1--topic1" Version: 1 Partitions { PartitionId: 0 } AllPartitions { PartitionId: 0 } 2025-09-25T16:19:25.440661Z node 2 :PERSQUEUE NOTICE: pq_impl.cpp:908: [PQ: 72057594037928037] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-09-25T16:19:25.440894Z node 2 :PERSQUEUE INFO: pq_impl.cpp:1296: [PQ: 72057594037928037] Config applied version 1 actor [2:105:2138] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 TopicName: "rt3.dc1--topic1" Version: 1 Partitions { PartitionId: 0 } AllPartitions { PartitionId: 0 } 2025-09-25T16:19:25.440938Z node 2 :PERSQUEUE DEBUG: partition_init.cpp:80: [rt3.dc1--topic1:0:Initializer] Start initializing step TInitConfigStep 2025-09-25T16:19:25.441017Z node 2 :PERSQUEUE DEBUG: partition_init.cpp:80: [rt3.dc1--topic1:0:Initializer] Start initializing step TInitInternalFieldsStep 2025-09-25T16:19:25.441064Z node 2 :PERSQUEUE INFO: partition_init.cpp:1075: [72057594037928037][Partition][0][StateInit] bootstrapping 0 [2:270:2262] 2025-09-25T16:19:25.441496Z node 2 :PERSQUEUE DEBUG: partition_init.cpp:80: [rt3.dc1--topic1:0:Initializer] Start initializing step TInitFieldsStep 2025-09-25T16:19:25.441502Z node 2 :PERSQUEUE DEBUG: partition_init.cpp:60: [rt3.dc1--topic1:0:Initializer] Initializing completed. 2025-09-25T16:19:25.441517Z node 2 :PERSQUEUE INFO: partition.cpp:694: [72057594037928037][Partition][0][StateInit] init complete for topic 'rt3.dc1--topic1' partition 0 generation 2 [2:270:2262] 2025-09-25T16:19:25.441523Z node 2 :PERSQUEUE DEBUG: partition.cpp:708: [72057594037928037][Partition][0][StateInit] SYNC INIT topic rt3.dc1--topic1 partitition 0 so 0 endOffset 0 Head Offset 0 PartNo 0 PackedSize 0 count 0 nextOffset 0 batches 0 2025-09-25T16:19:25.441530Z node 2 :PERSQUEUE DEBUG: partition.cpp:4293: [72057594037928037][Partition][0][StateIdle] Process pending events. Count 0 2025-09-25T16:19:25.441533Z node 2 :PERSQUEUE DEBUG: partition.cpp:2261: [72057594037928037][Partition][0][StateIdle] Batching state before ContinueProcessTxsAndUserActs: 0 2025-09-25T16:19:25.441545Z node 2 :PERSQUEUE DEBUG: partition.cpp:2270: [72057594037928037][Partition][0][StateIdle] Batching state after ContinueProcessTxsAndUserActs: 1 2025-09-25T16:19:25.441548Z node 2 :PERSQUEUE DEBUG: partition.cpp:2293: [72057594037928037][Partition][0][StateIdle] Try persist 2025-09-25T16:19:25.441604Z node 2 :PERSQUEUE DEBUG: partition_compaction.cpp:162: [72057594037928037][Partition][0][StateIdle] no data for compaction 2025-09-25T16:19:25.441695Z node 2 :PERSQUEUE DEBUG: pq_impl.cpp:2718: [PQ: 72057594037928037] server connected, pipe [2:273:2264], now have 1 active actors on pipe 2025-09-25T16:19:25.453281Z node 2 :PERSQUEUE DEBUG: pq_impl.cpp:2957: [PQ: 72057594037928138] Handle TEvInterconnect::TEvNodeInfo 2025-09-25T16:19:25.454259Z node 2 :PERSQUEUE DEBUG: pq_impl.cpp:2989: [PQ: 72057594037928138] Transactions request. From tx_00000000000000000000, To tx_18446744073709551615 2025-09-25T16:19:25.454338Z node 2 :PERSQUEUE DEBUG: pq_impl.cpp:784: [PQ: 72057594037928138] no config, start with empty partitions and default config 2025-09-25T16:19:25.454352Z node 2 :PERSQUEUE NOTICE: pq_impl.cpp:908: [PQ: 72057594037928138] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-09-25T16:19:25.454363Z node 2 :PERSQUEUE INFO: pq_impl.cpp:609: [PQ: 72057594037928138] doesn't have tx writes info 2025-09-25T16:19:25.454514Z node 2 :PERSQUEUE DEBUG: pq_impl.cpp:2718: [PQ: 72057594037928138] server connected, pipe [2:403:2363], now have 1 active actors on pipe 2025-09-25T16:19:25.454542Z node 2 :PERSQUEUE DEBUG: pq_impl.cpp:1269: [PQ: 72057594037928138] Handle TEvPersQueue::TEvUpdateConfig 2025-09-25T16:19:25.455069Z node 2 :PERSQUEUE DEBUG: pq_impl.cpp:395: [PQ: 72057594037928138] Apply new config CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 1 TopicName: "rt3.dc1--topic2" Version: 2 Partitions { PartitionId: 1 } AllPartitions { PartitionId: 1 } 2025-09-25T16:19:25.455100Z node 2 :PERSQUEUE NOTICE: pq_impl.cpp:908: [PQ: 72057594037928138] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-09-25T16:19:25.455338Z node 2 :PERSQUEUE INFO: pq_impl.cpp:1296: [PQ: 72057594037928138] Config applied version 2 actor [2:105:2138] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 1 TopicName: "rt3.dc1--topic2" Version: 2 Partitions { PartitionId: 1 } AllPartitions { PartitionId: 1 } 2025-09-25T16:19:25.455372Z node 2 :PERSQUEUE DEBUG: partition_init.cpp:80: [rt3.dc1--topic2:1:Initializer] Start initializing step TInitConfigStep 2025-09-25T16:19:25.455440Z node 2 :PERSQUEUE DEBUG: partition_init.cpp:80: [rt3.dc1--topic2:1:Initializer] Start initializing step TInitInternalFieldsStep 2025-09-25T16:19:25.455475Z node 2 :PERSQUEUE INFO: partition_init.cpp:1075: [72057594037928138][Partition][1][StateInit] bootstrapping 1 [2:411:2369] 2025-09-25T16:19:25.456053Z node 2 :PERSQUEUE DEBUG: partition_init.cpp:80: [rt3.dc1--topic2:1:Initializer] Start initializing step TInitFieldsStep 2025-09-25T16:19:25.456067Z node 2 :PERSQUEUE DEBUG: partition_init.cpp:60: [rt3.dc1--topic2:1:Initializer] Initializing completed. 2025-09-25T16:19:25.456074Z node 2 :PERSQUEUE INFO: partition.cpp:694: [72057594037928138][Partition][1][StateInit] init complete for topic 'rt3.dc1--topic2' partition 1 generation 2 [2:411:2369] 2025-09-25T16:19:25.456084Z node 2 :PERSQUEUE DEBUG: partition.cpp:708: [72057594037928138][Partition][1][StateInit] SYNC INIT topic rt3.dc1--topic2 partitition 1 so 0 endOffset 0 Head Offset 0 PartNo 0 PackedSize 0 count 0 nextOffset 0 batches 0 2025-09-25T16:19:25.456095Z node 2 :PERSQUEUE DEBUG: partition.cpp:4293: [72057594037928138][Partition][1][StateIdle] Process pending events. Count 0 2025-09-25T16:19:25.456101Z node 2 :PERSQUEUE DEBUG: partition.cpp:2261: [72057594037928138][Partition][1][StateIdle] Batching state before ContinueProcessTxsAndUserActs: 0 2025-09-25T16:19:25.456106Z node 2 :PERSQUEUE DEBUG: partition.cpp:2270: [72057594037928138][Partition][1][StateIdle] Batching state after ContinueProcessTxsAndUserActs: 1 2025-09-25T16:19:25.456111Z node 2 :PERSQUEUE DEBUG: partition.cpp:2293: [72057594037928138][Partition][1][StateIdle] Try persist 2025-09-25T16:19:25.456191Z node 2 :PERSQUEUE DEBUG: partition_compaction.cpp:162: [72057594037928138][Partition][1][StateIdle] no data for compaction 2025-09-25T16:19:25.456298Z node 2 :PERSQUEUE DEBUG: pq_impl.cpp:2718: [PQ: 72057594037928138] server connected, pipe [2:414:2371], now have 1 active actors on pipe 2025-09-25T16:19:25.459940Z node 2 :PERSQUEUE DEBUG: pq_impl.cpp:2957: [PQ: 72057594037928139] Handle TEvInterconnect::TEvNodeInfo 2025-09-25T16:19:25.460946Z node 2 :PERSQUEUE DEBUG: pq_impl.cpp:2989: [PQ: 72057594037928139] Transactions request. From tx_00000000000000000000, To tx_18446744073709551615 2025-09-25T16:19:25.461021Z node 2 :PERSQUEUE DEBUG: pq_impl.cpp:784: [PQ: 72057594037928139] no config, start with empty partitions and default config 2025-09-25T16:19:25.461034Z node 2 :PERSQUEUE NOTICE: pq_impl.cpp:908: [PQ: 72057594037928139] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-09-25T16:19:25.461045Z node 2 :PERSQUEUE INFO: pq_impl.cpp:609: [PQ: 72057594037928139] doesn't have tx writes info 2025-09-25T16:19:25.461197Z node 2 :PERSQUEUE DEBUG: pq_impl.cpp:2718: [PQ: 72057594037928139] server connected, pipe [2:463:2408], now have 1 active actors on pipe 2025-09-25T16:19:25.461223Z node 2 :PERSQUEUE DEBUG: pq_impl.cpp:1269: [PQ: 72057594037928139] Handle TEvPersQueue::TEvUpdateConfig 2025-09-25T16:19:25.461737Z node 2 :PERSQUEUE DEBUG: pq_impl.cpp:395: [PQ: 72057594037928139] Apply new config CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 2 TopicName: "rt3.dc1--topic2" Version: 3 Partitions { PartitionId: 2 } AllPartitions { PartitionId: 2 } 2025-09-25T16:19:25.461764Z node 2 :PERSQUEUE NOTICE: pq_impl.cpp:908: [PQ: 72057594037928139] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-09-25T16:19:25.461922Z node 2 :PERSQUEUE INFO: pq_impl.cpp:1296: [PQ: 72057594037928139] Config applied version 3 actor [2:105:2138] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 2 TopicName: "rt3.dc1--topic2" Version: 3 Partitions { PartitionId: 2 } AllPartitions { PartitionId: 2 } 2025-09-25T16:19:25.461949Z node 2 :PERSQUEUE DEBUG: partition_init.cpp:80: [rt3.dc1--topic2:2:Initializer] Start initializing step TInitConfigStep 2025-09-25T16:19:25.462017Z node 2 :PERSQUEUE DEBUG: partition_init.cpp:80: [rt3.dc1--topic2:2:Initializer] Start initializing step TInitInternalFieldsStep 2025-09-25T16:19:25.462049Z node 2 :PERSQUEUE INFO: partition_init.cpp:1075: [72057594037928139][Partition][2][StateInit] bootstrapping 2 [2:471:2414] 2025-09-25T16:19:25.462564Z node 2 :PERSQUEUE DEBUG: partition_init.cpp:80: [rt3.dc1--topic2:2:Initializer] Start initializing step TInitFieldsStep 2025-09-25T16:19:25.462577Z node 2 :PERSQUEUE DEBUG: partition_init.cpp:60: [rt3.dc1--topic2:2:Initializer] Initializing completed. 2025-09-25T16:19:25.462585Z node 2 :PERSQUEUE INFO: partition.cpp:694: [72057594037928139][Partition][2][StateInit] init complete for topic 'rt3.dc1--topic2' partition 2 generation 2 [2:471:2414] 2025-09-25T16:19:25.462594Z node 2 :PERSQUEUE DEBUG: partition.cpp:708: [72057594037928139][Partition][2][StateInit] SYNC INIT topic rt3.dc1--topic2 partitition 2 so 0 endOffset 0 Head Offset 0 PartNo 0 PackedSize 0 count 0 nextOffset 0 batches 0 2025-09-25T16:19:25.462604Z node 2 :PERSQUEUE DEBUG: partition.cpp:4293: [72057594037928139][Partition][2][StateIdle] Process pending events. Count 0 2025-09-25T16:19:25.462609Z node 2 :PERSQUEUE DEBUG: partition.cpp:2261: [72057594037928139][Partition][2][StateIdle] Batching state before ContinueProcessTxsAndUserActs: 0 2025-09-25T16:19:25.462615Z node 2 :PERSQUEUE DEBUG: partition.cpp:2270: [72057594037928139][Partition][2][StateIdle] Batching state after ContinueProcessTxsAndUserActs: 1 2025-09-25T16:19:25.462621Z node 2 :PERSQUEUE DEBUG: partition.cpp:2293: [72057594037928139][Partition][2][StateIdle] Try persist 2025-09-25T16:19:25.462699Z node 2 :PERSQUEUE DEBUG: partition_compaction.cpp:162: [72057594037928139][Partition][2][StateIdle] no data for compaction 2025-09-25T16:19:25.462811Z node 2 :PERSQUEUE DEBUG: pq_impl.cpp:2718: [PQ: 72057594037928139] server connected, pipe [2:474:2416], now have 1 active actors on pipe REQUEST MetaRequest { CmdGetReadSessionsInfo { ClientId: "client_id" Topic: "rt3.dc1--topic1" Topic: "rt3.dc1--topic2" } } Ticket: "client_id@builtin" 2025-09-25T16:19:25.464599Z node 2 :PERSQUEUE DEBUG: pq_impl.cpp:2718: [PQ: 72057594037928037] server connected, pipe [2:481:2419], now have 1 active actors on pipe 2025-09-25T16:19:25.464732Z node 2 :PERSQUEUE DEBUG: pq_impl.cpp:2718: [PQ: 72057594037928138] server connected, pipe [2:484:2420], now have 1 active actors on pipe 2025-09-25T16:19:25.464787Z node 2 :PERSQUEUE DEBUG: pq_impl.cpp:2718: [PQ: 72057594037928139] server connected, pipe [2:485:2420], now have 1 active actors on pipe 2025-09-25T16:19:25.464946Z node 2 :PERSQUEUE DEBUG: pq_impl.cpp:2743: [PQ: 72057594037928037] server disconnected, pipe [2:481:2419] destroyed 2025-09-25T16:19:25.465051Z node 2 :PERSQUEUE DEBUG: pq_impl.cpp:2743: [PQ: 72057594037928138] server disconnected, pipe [2:484:2420] destroyed 2025-09-25T16:19:25.465067Z node 2 :PERSQUEUE DEBUG: pq_impl.cpp:2743: [PQ: 72057594037928139] server disconnected, pipe [2:485:2420] destroyed RESULT Status: 1 ErrorCode: OK MetaResponse { CmdGetReadSessionsInfoResult { TopicResult { Topic: "rt3.dc1--topic1" PartitionResult { Partition: 0 ClientOffset: 0 StartOffset: 0 EndOffset: 0 TimeLag: 0 TabletNode: "::1" ClientReadOffset: 0 ReadTimeLag: 0 TabletNodeId: 2 ErrorCode: OK } ErrorCode: OK } TopicResult { Topic: "rt3.dc1--topic2" PartitionResult { Partition: 0 ErrorCode: INITIALIZING ErrorReason: "tablet for partition is not running" } PartitionResult { Partition: 1 ClientOffset: 0 StartOffset: 0 EndOffset: 0 TimeLag: 0 TabletNode: "::1" ClientReadOffset: 0 ReadTimeLag: 0 TabletNodeId: 2 ErrorCode: OK } PartitionResult { Partition: 2 ClientOffset: 0 StartOffset: 0 EndOffset: 0 TimeLag: 0 TabletNode: "::1" ClientReadOffset: 0 ReadTimeLag: 0 TabletNodeId: 2 ErrorCode: OK } ErrorCode: OK } } } Assert failed: Check response: { Status: 128 ErrorReason: "the following topics are not created: rt3.dc1--topic2, Marker# PQ95" ErrorCode: UNKNOWN_TOPIC } ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/http_proxy/ut/inside_ydb_ut/unittest >> HttpProxyInsideYdb::TestIfEnvVariableSet [GOOD] Test command err: 2025-09-25T16:18:50.369341Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7554062053928411081:2250];send_to=[0:7307199536658146131:7762515]; 2025-09-25T16:18:50.369389Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/endf/005951/r3tmp/tmpjH8AZH/pdisk_1.dat 2025-09-25T16:18:50.451608Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded 2025-09-25T16:18:50.451734Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-09-25T16:18:50.451857Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1229: Notification cookie mismatch for subscription [1:7554062053928410860:2081] 1758817130365994 != 1758817130365997 TServer::EnableGrpc on GrpcPort 27766, node 1 2025-09-25T16:18:50.459168Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-09-25T16:18:50.459185Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-09-25T16:18:50.459187Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-09-25T16:18:50.459231Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-09-25T16:18:50.475860Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-09-25T16:18:50.475886Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-09-25T16:18:50.477131Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:27833 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. waiting... 2025-09-25T16:18:50.493060Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2025-09-25T16:18:50.494808Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 TClient is connected to server localhost:27833 2025-09-25T16:18:50.521155Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterUserAttributes, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_user_attrs.cpp:72) waiting... 2025-09-25T16:18:50.522665Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 281474976715658, at schemeshard: 72057594046644480 2025-09-25T16:18:50.523309Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_modify_acl.cpp:101) waiting... waiting... 2025-09-25T16:18:50.553762Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:18:50.583023Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) waiting... waiting... 2025-09-25T16:18:50.624568Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions waiting... 2025-09-25T16:18:50.639619Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) waiting... 2025-09-25T16:18:50.660445Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) waiting... 2025-09-25T16:18:50.674251Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) waiting... 2025-09-25T16:18:50.687784Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) waiting... 2025-09-25T16:18:50.702692Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) waiting... 2025-09-25T16:18:50.721691Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715670:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) waiting... 2025-09-25T16:18:50.730762Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) waiting... waiting... 2025-09-25T16:18:50.745131Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715672:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:18:50.793832Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7554062053928412270:2363], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:50.793834Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7554062053928412259:2360], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:50.793859Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:50.793938Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7554062053928412274:2365], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:50.793949Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:50.794843Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715673:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-09-25T16:18:50.797475Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7554062053928412273:2364], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715673 completed, doublechecking } 2025-09-25T16:18:50.857110Z node 1 :TX_PROXY ERROR: schemereq.cpp:590: Actor# [1:7554062053928412326:2865] txid# 281474976 ... e: 4 } } } } } } } } } } } } Member { Name: "truncated" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 6 } } } } } } } Value { Struct { Optional { } } Struct { Optional { Bool: false } } } } } 2025-09-25T16:19:13.844660Z node 7 :SQS DEBUG: executor.cpp:287: Request [] Query(idx=GET_QUEUES_LIST_ID) Queue [] Attempt 1 execution duration: 3ms 2025-09-25T16:19:13.844740Z node 7 :SQS TRACE: executor.cpp:325: Request [] Query(idx=GET_QUEUES_LIST_ID) Queue [] Sending mkql execution result: { Status: 48 TxId: 281474976710685 StatusCode: SUCCESS ExecutionEngineStatus: 1 ExecutionEngineResponseStatus: 2 ExecutionEngineEvaluatedResponse { Type { Kind: Struct Struct { Member { Name: "queues" Type { Kind: Optional Optional { Item { Kind: List List { Item { Kind: Struct Struct { Member { Name: "Account" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "CreatedTimestamp" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } Member { Name: "CustomQueueName" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "DlqName" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "FifoQueue" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 6 } } } } } Member { Name: "FolderId" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "MasterTabletId" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } Member { Name: "QueueName" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "QueueState" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } Member { Name: "Shards" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } Member { Name: "TablesFormat" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 2 } } } } } Member { Name: "Version" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } } } } } } } } Member { Name: "truncated" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 6 } } } } } } } Value { Struct { Optional { } } Struct { Optional { Bool: false } } } } } 2025-09-25T16:19:13.844752Z node 7 :SQS TRACE: executor.cpp:286: Request [] Query(idx=GET_USER_SETTINGS_ID) Queue [] HandleResponse { Status: 48 TxId: 281474976710686 StatusCode: SUCCESS ExecutionEngineStatus: 1 ExecutionEngineResponseStatus: 2 ExecutionEngineEvaluatedResponse { Type { Kind: Struct Struct { Member { Name: "settings" Type { Kind: Optional Optional { Item { Kind: List List { Item { Kind: Struct Struct { Member { Name: "Account" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "Name" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "Value" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } } } } } } } } Member { Name: "truncated" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 6 } } } } } } } Value { Struct { Optional { } } Struct { Optional { Bool: false } } } } } 2025-09-25T16:19:13.844755Z node 7 :SQS TRACE: executor.cpp:327: Request [] Query(idx=GET_QUEUES_LIST_ID) Queue [] Minikql data response: {"queues": [], "truncated": false} 2025-09-25T16:19:13.844756Z node 7 :SQS DEBUG: executor.cpp:287: Request [] Query(idx=GET_USER_SETTINGS_ID) Queue [] Attempt 1 execution duration: 2ms 2025-09-25T16:19:13.844776Z node 7 :SQS DEBUG: executor.cpp:401: Request [] Query(idx=GET_QUEUES_LIST_ID) Queue [] execution duration: 3ms 2025-09-25T16:19:13.844798Z node 7 :SQS TRACE: executor.cpp:325: Request [] Query(idx=GET_USER_SETTINGS_ID) Queue [] Sending mkql execution result: { Status: 48 TxId: 281474976710686 StatusCode: SUCCESS ExecutionEngineStatus: 1 ExecutionEngineResponseStatus: 2 ExecutionEngineEvaluatedResponse { Type { Kind: Struct Struct { Member { Name: "settings" Type { Kind: Optional Optional { Item { Kind: List List { Item { Kind: Struct Struct { Member { Name: "Account" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "Name" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "Value" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } } } } } } } } Member { Name: "truncated" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 6 } } } } } } } Value { Struct { Optional { } } Struct { Optional { Bool: false } } } } } 2025-09-25T16:19:13.844811Z node 7 :SQS TRACE: executor.cpp:327: Request [] Query(idx=GET_USER_SETTINGS_ID) Queue [] Minikql data response: {"settings": [], "truncated": false} 2025-09-25T16:19:13.844838Z node 7 :SQS DEBUG: executor.cpp:401: Request [] Query(idx=GET_USER_SETTINGS_ID) Queue [] execution duration: 2ms 2025-09-25T16:19:13.844889Z node 7 :SQS TRACE: user_settings_reader.cpp:89: Handle user settings: { Status: 48 TxId: 281474976710686 StatusCode: SUCCESS ExecutionEngineStatus: 1 ExecutionEngineResponseStatus: 2 ExecutionEngineEvaluatedResponse { Type { Kind: Struct Struct { Member { Name: "settings" Type { Kind: Optional Optional { Item { Kind: List List { Item { Kind: Struct Struct { Member { Name: "Account" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "Name" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "Value" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } } } } } } } } Member { Name: "truncated" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 6 } } } } } } } Value { Struct { Optional { } } Struct { Optional { Bool: false } } } } } 2025-09-25T16:19:13.844924Z node 7 :SQS TRACE: queues_list_reader.cpp:82: Handle queues list: { Status: 48 TxId: 281474976710685 StatusCode: SUCCESS ExecutionEngineStatus: 1 ExecutionEngineResponseStatus: 2 ExecutionEngineEvaluatedResponse { Type { Kind: Struct Struct { Member { Name: "queues" Type { Kind: Optional Optional { Item { Kind: List List { Item { Kind: Struct Struct { Member { Name: "Account" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "CreatedTimestamp" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } Member { Name: "CustomQueueName" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "DlqName" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "FifoQueue" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 6 } } } } } Member { Name: "FolderId" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "MasterTabletId" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } Member { Name: "QueueName" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "QueueState" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } Member { Name: "Shards" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } Member { Name: "TablesFormat" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 2 } } } } } Member { Name: "Version" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } } } } } } } } Member { Name: "truncated" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 6 } } } } } } } Value { Struct { Optional { } } Struct { Optional { Bool: false } } } } } 2025-09-25T16:19:13.874076Z node 7 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:559: [WorkloadService] [Service] Reply cleanup error NOT_FOUND to [7:7554062152434785445:2428]: Pool not found 2025-09-25T16:19:13.874138Z node 7 :SQS DEBUG: monitoring.cpp:60: [monitoring] Report deletion queue data lag: 0.000000s, count: 0 2025-09-25T16:19:13.910634Z node 7 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:559: [WorkloadService] [Service] Reply cleanup error NOT_FOUND to [7:7554062152434785443:2427]: Pool not found 2025-09-25T16:19:13.910722Z node 7 :SQS DEBUG: cleanup_queue_data.cpp:100: [cleanup removed queues] getting queues... 2025-09-25T16:19:13.911525Z node 7 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:594: [WorkloadService] [TDatabaseFetcherActor] ActorId: [7:7554062152434785555:2446], Database: /Root/SQS, Failed to fetch database info, UNSUPPORTED, issues: {
: Error: Invalid database path /Root/SQS, please check the correctness of the path } 2025-09-25T16:19:13.911536Z node 7 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7554062152434785554:2445], DatabaseId: /Root/SQS, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:19:13.911559Z node 7 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root/SQS, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:19:13.911611Z node 7 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7554062152434785558:2447], DatabaseId: /Root/SQS, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:19:13.911618Z node 7 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root/SQS, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:19:13.946849Z node 7 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:559: [WorkloadService] [Service] Reply cleanup error NOT_FOUND to [7:7554062152434785552:2444]: Pool not found 2025-09-25T16:19:13.947103Z node 7 :SQS DEBUG: cleanup_queue_data.cpp:138: [cleanup removed queues] there are no queues to delete 2025-09-25T16:19:14.834922Z node 7 :HTTP DEBUG: http_proxy_incoming.cpp:83: (#37,[::1]:56414) incoming connection opened 2025-09-25T16:19:14.834967Z node 7 :HTTP DEBUG: http_proxy_incoming.cpp:156: (#37,[::1]:56414) -> (POST /Root, 3 bytes) 2025-09-25T16:19:14.835030Z node 7 :HTTP_PROXY INFO: http_service.cpp:102: proxy service: incoming request from [182a:9fb8:3651:0:2a:9fb8:3651:0] request [UnknownMethodName] url [/Root] database [/Root] requestId: 49900a93-e58280e9-9cc5f01-600b54a3 2025-09-25T16:19:14.835121Z node 7 :HTTP_PROXY INFO: http_req.cpp:1209: http request [UnknownMethodName] requestId [49900a93-e58280e9-9cc5f01-600b54a3] reply with status: UNSUPPORTED message: Missing method name UnknownMethodName 2025-09-25T16:19:14.835163Z node 7 :HTTP DEBUG: http_proxy_incoming.cpp:280: (#37,[::1]:56414) <- (400 InvalidAction, 76 bytes) 2025-09-25T16:19:14.835182Z node 7 :HTTP DEBUG: http_proxy_incoming.cpp:289: (#37,[::1]:56414) Request: POST /Root HTTP/1.1 Host: example.amazonaws.com X-Amz-Target: kinesisApi.UnknownMethodName X-Amz-Date: 20150830T123600Z Authorization: Content-Type: application/json Connection: Close Transfer-Encoding: chunked { } 2025-09-25T16:19:14.835195Z node 7 :HTTP DEBUG: http_proxy_incoming.cpp:296: (#37,[::1]:56414) Response: HTTP/1.1 400 InvalidAction Connection: close x-amzn-requestid: 49900a93-e58280e9-9cc5f01-600b54a3 Content-Type: application/x-amz-json-1.1 Content-Length: 76 Http output full {"__type":"InvalidAction","message":"Missing method name UnknownMethodName"} 2025-09-25T16:19:14.835249Z node 7 :HTTP DEBUG: http_proxy_incoming.cpp:340: (#37,[::1]:56414) connection closed 400 {"__type":"InvalidAction","message":"Missing method name UnknownMethodName"} ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/http_proxy/ut/inside_ydb_ut/unittest >> TestYmqHttpProxy::TestListQueueTags [GOOD] Test command err: 2025-09-25T16:18:50.764679Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7554062054530154551:2140];send_to=[0:7307199536658146131:7762515]; 2025-09-25T16:18:50.764733Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/endf/00594e/r3tmp/tmpp51wj3/pdisk_1.dat 2025-09-25T16:18:50.821970Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-09-25T16:18:50.840224Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 13637, node 1 2025-09-25T16:18:50.849073Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-09-25T16:18:50.849091Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-09-25T16:18:50.849098Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-09-25T16:18:50.849146Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:17730 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: 2025-09-25T16:18:50.869939Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-09-25T16:18:50.869972Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-09-25T16:18:50.871113Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-09-25T16:18:50.893462Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... TClient is connected to server localhost:17730 2025-09-25T16:18:50.946065Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterUserAttributes, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_user_attrs.cpp:72) waiting... 2025-09-25T16:18:50.949334Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_modify_acl.cpp:101) waiting... waiting... 2025-09-25T16:18:50.973228Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:18:50.991569Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-09-25T16:18:51.001867Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) waiting... waiting... 2025-09-25T16:18:51.024489Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 281474976715663, at schemeshard: 72057594046644480 2025-09-25T16:18:51.054396Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) waiting... waiting... 2025-09-25T16:18:51.081107Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 281474976715665, at schemeshard: 72057594046644480 waiting... 2025-09-25T16:18:51.106049Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:18:51.115284Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) waiting... 2025-09-25T16:18:51.130801Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) waiting... waiting... 2025-09-25T16:18:51.143628Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:18:51.159870Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715670:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) waiting... 2025-09-25T16:18:51.171417Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) waiting... 2025-09-25T16:18:51.186659Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715672:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) waiting... 2025-09-25T16:18:51.213912Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7554062058825123144:2363], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:51.213923Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7554062058825123136:2360], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:51.213939Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:51.214020Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7554062058825123151:2365], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:51.214036Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:51.214886Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715673:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-09-25T16:18:51.218010Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7554062058825123150:2364], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715673 completed, doublechecking } 2025-09-25T16:18:51.305955Z node 1 :TX_PROXY ERROR: schemereq.cpp:590: Actor# [1:7554062058825123203:2865] txid# 281474976715674, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 18], type: EPathTy ... 0\203\005@\203\001H?\322\030Invoke\000\003?\326\014Equals\003?\330\000\t\211\004?\322\207\203\001H?\322 Coalesce\000\t\211\004?\342\207\205\004\207\203\001H?\342\026\032\203\004\030Member\000\t\211\n?\354\203\005\004\200\205\004\203\004\203\004\026\032\213\004\203\001H\203\001H\203\004\036\000\003?\362 \000\001\205\000\000\000\000\001\003\000\000\000\000\000\000\000?\352\005?\370\003?\364\004\003?\366 \003\013?\376\t\351\000?\372\005\205\004\206\205\004\203\010\203\005@\002\006\203\005@\n\016\006\000?%\002\003?)\002\022USER_NAME\003\022\000\003?\374(000000000000000301v0\002\003?\001\002\000\037\003?\356\002\002\003?\322\004{}\002\003\003?\302\004{}?a\002\002\002\001\000/" } Params { Bin: "\037\000\005\205\010\203\001H\203\010\203\010\203\001H\020NAME> TPQCachingProxyTest::MultipleSessions ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/ydb/ut/unittest >> YdbOlapStore::LogTsRangeDescending [GOOD] Test command err: 2025-09-25T16:18:24.642048Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7554061943606817476:2147];send_to=[0:7307199536658146131:7762515]; 2025-09-25T16:18:24.642086Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/endf/004184/r3tmp/tmp9UJs5J/pdisk_1.dat 2025-09-25T16:18:24.735978Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-09-25T16:18:24.782465Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-09-25T16:18:24.782493Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-09-25T16:18:24.783974Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 20324, node 1 2025-09-25T16:18:24.796315Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded 2025-09-25T16:18:24.852893Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-09-25T16:18:24.852907Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-09-25T16:18:24.852909Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-09-25T16:18:24.852966Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:4225 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-09-25T16:18:24.901673Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... TClient is connected to server localhost:4225 2025-09-25T16:18:24.953897Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/Root" OperationType: ESchemeOpCreateColumnStore CreateColumnStore { Name: "OlapStore" ColumnShardCount: 4 SchemaPresets { Name: "default" Schema { Columns { Name: "message" Type: "Utf8" } Columns { Name: "json_payload" Type: "JsonDocument" } Columns { Name: "resource_id" Type: "Utf8" NotNull: true } Columns { Name: "uid" Type: "Utf8" NotNull: true } Columns { Name: "timestamp" Type: "Timestamp" NotNull: true } Columns { Name: "resource_type" Type: "Utf8" NotNull: true } Columns { Name: "level" Type: "Int32" } Columns { Name: "ingested_at" Type: "Timestamp" } Columns { Name: "saved_at" Type: "Timestamp" } Columns { Name: "request_id" Type: "Utf8" } KeyColumnNames: "timestamp" KeyColumnNames: "resource_type" KeyColumnNames: "resource_id" KeyColumnNames: "uid" } } } } TxId: 281474976715658 TabletId: 72057594046644480 PeerName: "" , at schemeshard: 72057594046644480 2025-09-25T16:18:24.954009Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: create_store.cpp:332: TCreateOlapStore Propose, path: /Root/OlapStore, opId: 281474976715658:0, at schemeshard: 72057594046644480 2025-09-25T16:18:24.954170Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:441: AttachChild: child attached as only one child to the parent, parent id: [OwnerId: 72057594046644480, LocalPathId: 1], parent name: Root, child name: OlapStore, child id: [OwnerId: 72057594046644480, LocalPathId: 2], at schemeshard: 72057594046644480 2025-09-25T16:18:24.954186Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:599: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046644480, LocalPathId: 2] was 0 2025-09-25T16:18:24.954190Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5528: CreateTx for txid 281474976715658:0 type: TxCreateOlapStore target path: [OwnerId: 72057594046644480, LocalPathId: 2] source path: 2025-09-25T16:18:24.954199Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:599: IncrementPathDbRefCount reason new shard created for pathId [OwnerId: 72057594046644480, LocalPathId: 2] was 1 2025-09-25T16:18:24.954206Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:599: IncrementPathDbRefCount reason new shard created for pathId [OwnerId: 72057594046644480, LocalPathId: 2] was 2 2025-09-25T16:18:24.954214Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:599: IncrementPathDbRefCount reason new shard created for pathId [OwnerId: 72057594046644480, LocalPathId: 2] was 3 2025-09-25T16:18:24.954219Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:599: IncrementPathDbRefCount reason new shard created for pathId [OwnerId: 72057594046644480, LocalPathId: 2] was 4 2025-09-25T16:18:24.954292Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:599: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046644480, LocalPathId: 2] was 5 2025-09-25T16:18:24.954796Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 281474976715658:0 1 -> 2 2025-09-25T16:18:24.954865Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 281474976715658:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2025-09-25T16:18:24.954877Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnStore, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/olap/operations/create_store.cpp:461) 2025-09-25T16:18:24.954907Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:599: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046644480, LocalPathId: 1] was 1 2025-09-25T16:18:24.954917Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:599: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046644480, LocalPathId: 2] was 6 2025-09-25T16:18:24.957073Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 281474976715658, response: Status: StatusAccepted TxId: 281474976715658 SchemeshardId: 72057594046644480 PathId: 2, at schemeshard: 72057594046644480 2025-09-25T16:18:24.957127Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 281474976715658, database: /Root, subject: , status: StatusAccepted, operation: CREATE COLUMN STORE, path: /Root/OlapStore 2025-09-25T16:18:24.957188Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2025-09-25T16:18:24.957196Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046644480, txId: 281474976715658, path id: [OwnerId: 72057594046644480, LocalPathId: 1] 2025-09-25T16:18:24.957232Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046644480, txId: 281474976715658, path id: [OwnerId: 72057594046644480, LocalPathId: 2] 2025-09-25T16:18:24.957256Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2025-09-25T16:18:24.957262Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [1:7554061943606817962:2377], at schemeshard: 72057594046644480, txId: 281474976715658, path id: 1 2025-09-25T16:18:24.957265Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [1:7554061943606817962:2377], at schemeshard: 72057594046644480, txId: 281474976715658, path id: 2 2025-09-25T16:18:24.957273Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 281474976715658:0, at schemeshard: 72057594046644480 2025-09-25T16:18:24.957285Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:315: TCreateParts opId# 281474976715658:0 ProgressState, operation type: TxCreateOlapStore, at tablet# 72057594046644480 waiting... 2025-09-25T16:18:24.957439Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:359: TCreateParts opId# 281474976715658:0 CreateRequest Event to Hive: 72057594037968897 msg: Owner: 72057594046644480 OwnerIdx: 1 TabletType: ColumnShard ObjectDomain { SchemeShard: 72057594046644480 PathId: 1 } ObjectId: 2 BindedChannels { StoragePoolName: "hdd2" } BindedChannels { StoragePoolName: "hdd2" } BindedChannels { StoragePoolName: "hdd2" } BindedChannels { StoragePoolName: "hdd2" } BindedChannels { StoragePoolName: "hdd2" } BindedChannels { StoragePoolName: "hdd2" } BindedChannels { StoragePoolName: "hdd2" } BindedChannels { StoragePoolName: "hdd2" } BindedChannels { StoragePoolName: "hdd2" } BindedChannels { StoragePoolName: "hdd2" } BindedChannels { StoragePoolName: "hdd2" } BindedChannels { StoragePoolName: "hdd2" } BindedChannels { StoragePoolName: "hdd2" } BindedChannels { StoragePoolName: "hdd2" } BindedChannels { StoragePoolName: "hdd2" } BindedChannels { StoragePoolName: "hdd2" } BindedChannels { StoragePoolName: "hdd2" } BindedChannels { StoragePoolName: "hdd2" } BindedChannels { StoragePoolName: "hdd2" } BindedChannels { StoragePoolName: "hdd2" } BindedChannels { StoragePoolName: "hdd2" } BindedChannels { StoragePoolName: "hdd2" } BindedChannels { StoragePoolName: "hdd2" } BindedChannels { StoragePoolName: "hdd2" } BindedChannels { StoragePoolName: "hdd2" } BindedChannels { StoragePoolName: "hdd2" } BindedChannels { StoragePoolName: "hdd2" } BindedChannels { StoragePoolName: "hdd2" } BindedChannels { StoragePoolName: "hdd2" } BindedChannels { StoragePoolName: "hdd2" } BindedChannels { StoragePoolName: "hdd2" } BindedChannels { StoragePoolName: "hdd2" } BindedChannels { StoragePoolName: "hdd2" } BindedChannels { StoragePoolName: "hdd2" } BindedChannels { StoragePoolName: "hdd2" } BindedChannels { StoragePoolName: "hdd2" } BindedChannels { StorageP ... delivery in input channelId: 35, seqNo: [1] 2025-09-25T16:19:22.855716Z node 28 :KQP_COMPUTE DEBUG: dq_compute_actor_channels.cpp:657: TxId: 281474976710670, task: 65. Tasks execution finished, don't wait for ack delivery in input channelId: 36, seqNo: [1] 2025-09-25T16:19:22.855718Z node 28 :KQP_COMPUTE DEBUG: dq_compute_actor_channels.cpp:657: TxId: 281474976710670, task: 65. Tasks execution finished, don't wait for ack delivery in input channelId: 37, seqNo: [1] 2025-09-25T16:19:22.855721Z node 28 :KQP_COMPUTE DEBUG: dq_compute_actor_channels.cpp:657: TxId: 281474976710670, task: 65. Tasks execution finished, don't wait for ack delivery in input channelId: 38, seqNo: [1] 2025-09-25T16:19:22.855723Z node 28 :KQP_COMPUTE DEBUG: dq_compute_actor_channels.cpp:657: TxId: 281474976710670, task: 65. Tasks execution finished, don't wait for ack delivery in input channelId: 39, seqNo: [1] 2025-09-25T16:19:22.855726Z node 28 :KQP_COMPUTE DEBUG: dq_compute_actor_channels.cpp:657: TxId: 281474976710670, task: 65. Tasks execution finished, don't wait for ack delivery in input channelId: 40, seqNo: [1] 2025-09-25T16:19:22.855728Z node 28 :KQP_COMPUTE DEBUG: dq_compute_actor_channels.cpp:657: TxId: 281474976710670, task: 65. Tasks execution finished, don't wait for ack delivery in input channelId: 41, seqNo: [1] 2025-09-25T16:19:22.855730Z node 28 :KQP_COMPUTE DEBUG: dq_compute_actor_channels.cpp:657: TxId: 281474976710670, task: 65. Tasks execution finished, don't wait for ack delivery in input channelId: 42, seqNo: [1] 2025-09-25T16:19:22.855733Z node 28 :KQP_COMPUTE DEBUG: dq_compute_actor_channels.cpp:657: TxId: 281474976710670, task: 65. Tasks execution finished, don't wait for ack delivery in input channelId: 43, seqNo: [1] 2025-09-25T16:19:22.855736Z node 28 :KQP_COMPUTE DEBUG: dq_compute_actor_channels.cpp:657: TxId: 281474976710670, task: 65. Tasks execution finished, don't wait for ack delivery in input channelId: 44, seqNo: [1] 2025-09-25T16:19:22.855738Z node 28 :KQP_COMPUTE DEBUG: dq_compute_actor_channels.cpp:657: TxId: 281474976710670, task: 65. Tasks execution finished, don't wait for ack delivery in input channelId: 45, seqNo: [1] 2025-09-25T16:19:22.855741Z node 28 :KQP_COMPUTE DEBUG: dq_compute_actor_channels.cpp:657: TxId: 281474976710670, task: 65. Tasks execution finished, don't wait for ack delivery in input channelId: 46, seqNo: [1] 2025-09-25T16:19:22.855744Z node 28 :KQP_COMPUTE DEBUG: dq_compute_actor_channels.cpp:657: TxId: 281474976710670, task: 65. Tasks execution finished, don't wait for ack delivery in input channelId: 47, seqNo: [1] 2025-09-25T16:19:22.855746Z node 28 :KQP_COMPUTE DEBUG: dq_compute_actor_channels.cpp:657: TxId: 281474976710670, task: 65. Tasks execution finished, don't wait for ack delivery in input channelId: 48, seqNo: [1] 2025-09-25T16:19:22.855748Z node 28 :KQP_COMPUTE DEBUG: dq_compute_actor_channels.cpp:657: TxId: 281474976710670, task: 65. Tasks execution finished, don't wait for ack delivery in input channelId: 49, seqNo: [1] 2025-09-25T16:19:22.855751Z node 28 :KQP_COMPUTE DEBUG: dq_compute_actor_channels.cpp:657: TxId: 281474976710670, task: 65. Tasks execution finished, don't wait for ack delivery in input channelId: 50, seqNo: [1] 2025-09-25T16:19:22.855753Z node 28 :KQP_COMPUTE DEBUG: dq_compute_actor_channels.cpp:657: TxId: 281474976710670, task: 65. Tasks execution finished, don't wait for ack delivery in input channelId: 51, seqNo: [1] 2025-09-25T16:19:22.855756Z node 28 :KQP_COMPUTE DEBUG: dq_compute_actor_channels.cpp:657: TxId: 281474976710670, task: 65. Tasks execution finished, don't wait for ack delivery in input channelId: 52, seqNo: [1] 2025-09-25T16:19:22.855758Z node 28 :KQP_COMPUTE DEBUG: dq_compute_actor_channels.cpp:657: TxId: 281474976710670, task: 65. Tasks execution finished, don't wait for ack delivery in input channelId: 53, seqNo: [1] 2025-09-25T16:19:22.855760Z node 28 :KQP_COMPUTE DEBUG: dq_compute_actor_channels.cpp:657: TxId: 281474976710670, task: 65. Tasks execution finished, don't wait for ack delivery in input channelId: 54, seqNo: [1] 2025-09-25T16:19:22.855763Z node 28 :KQP_COMPUTE DEBUG: dq_compute_actor_channels.cpp:657: TxId: 281474976710670, task: 65. Tasks execution finished, don't wait for ack delivery in input channelId: 55, seqNo: [1] 2025-09-25T16:19:22.855765Z node 28 :KQP_COMPUTE DEBUG: dq_compute_actor_channels.cpp:657: TxId: 281474976710670, task: 65. Tasks execution finished, don't wait for ack delivery in input channelId: 56, seqNo: [1] 2025-09-25T16:19:22.855767Z node 28 :KQP_COMPUTE DEBUG: dq_compute_actor_channels.cpp:657: TxId: 281474976710670, task: 65. Tasks execution finished, don't wait for ack delivery in input channelId: 57, seqNo: [1] 2025-09-25T16:19:22.855769Z node 28 :KQP_COMPUTE DEBUG: dq_compute_actor_channels.cpp:657: TxId: 281474976710670, task: 65. Tasks execution finished, don't wait for ack delivery in input channelId: 58, seqNo: [1] 2025-09-25T16:19:22.855771Z node 28 :KQP_COMPUTE DEBUG: dq_compute_actor_channels.cpp:657: TxId: 281474976710670, task: 65. Tasks execution finished, don't wait for ack delivery in input channelId: 59, seqNo: [1] 2025-09-25T16:19:22.855773Z node 28 :KQP_COMPUTE DEBUG: dq_compute_actor_channels.cpp:657: TxId: 281474976710670, task: 65. Tasks execution finished, don't wait for ack delivery in input channelId: 60, seqNo: [1] 2025-09-25T16:19:22.855775Z node 28 :KQP_COMPUTE DEBUG: dq_compute_actor_channels.cpp:657: TxId: 281474976710670, task: 65. Tasks execution finished, don't wait for ack delivery in input channelId: 61, seqNo: [1] 2025-09-25T16:19:22.855777Z node 28 :KQP_COMPUTE DEBUG: dq_compute_actor_channels.cpp:657: TxId: 281474976710670, task: 65. Tasks execution finished, don't wait for ack delivery in input channelId: 62, seqNo: [1] 2025-09-25T16:19:22.855779Z node 28 :KQP_COMPUTE DEBUG: dq_compute_actor_channels.cpp:657: TxId: 281474976710670, task: 65. Tasks execution finished, don't wait for ack delivery in input channelId: 63, seqNo: [1] 2025-09-25T16:19:22.855781Z node 28 :KQP_COMPUTE DEBUG: dq_compute_actor_channels.cpp:657: TxId: 281474976710670, task: 65. Tasks execution finished, don't wait for ack delivery in input channelId: 64, seqNo: [1] 2025-09-25T16:19:22.855782Z node 28 :KQP_COMPUTE DEBUG: dq_compute_actor_channels.cpp:674: TxId: 281474976710670, task: 65. Tasks execution finished 2025-09-25T16:19:22.855785Z node 28 :KQP_COMPUTE DEBUG: dq_compute_actor_impl.h:510: SelfId: [28:7554062192611550830:3129], TxId: 281474976710670, task: 65. Ctx: { TraceId : 01k60tr5hb9w5z6sdhrs2y23z6. RunScriptActorId : [0:0:0]. CustomerSuppliedId : . PoolId : default. SessionId : ydb://session/3?node_id=28&id=NjZkY2NhYmMtNGQzY2RhMTktZmUyOTdkMDgtNmYyMjhmNzY=. CurrentExecutionId : . Database : /Root. DatabaseId : /Root. }. Compute state finished. All channels and sinks finished 2025-09-25T16:19:22.855822Z node 28 :KQP_COMPUTE DEBUG: dq_compute_actor_channels.cpp:494: TxId: 281474976710670, task: 65. pass away 2025-09-25T16:19:22.855856Z node 28 :KQP_COMPUTE DEBUG: log.cpp:841: fline=kqp_compute_actor_factory.cpp:66;problem=finish_compute_actor;tx_id=281474976710670;task_id=65;success=1;message={
: Error: COMPUTE_STATE_FINISHED }; 2025-09-25T16:19:22.855858Z node 28 :KQP_EXECUTER DEBUG: kqp_executer_impl.h:470: ActorId: [28:7554062192611550746:3059] TxId: 281474976710670. Ctx: { TraceId: 01k60tr5hb9w5z6sdhrs2y23z6, Database: /Root, SessionId: ydb://session/3?node_id=28&id=NjZkY2NhYmMtNGQzY2RhMTktZmUyOTdkMDgtNmYyMjhmNzY=, PoolId: default}. ActorState: ExecuteState, got execution state from compute actor: [28:7554062192611550830:3129], task: 65, state: COMPUTE_STATE_FINISHED, stats: { CpuTimeUs: 1009 Tasks { TaskId: 65 StageId: 1 CpuTimeUs: 89 FinishTimeMs: 1758817162855 ComputeCpuTimeUs: 42 BuildCpuTimeUs: 47 HostName: "ghrun-v6cxduzo2m" NodeId: 28 CreateTimeMs: 1758817162844 UpdateTimeMs: 1758817162855 } MaxMemoryUsage: 1048576 } 2025-09-25T16:19:22.855868Z node 28 :KQP_EXECUTER INFO: kqp_planner.cpp:721: TxId: 281474976710670. Ctx: { TraceId: 01k60tr5hb9w5z6sdhrs2y23z6, Database: /Root, SessionId: ydb://session/3?node_id=28&id=NjZkY2NhYmMtNGQzY2RhMTktZmUyOTdkMDgtNmYyMjhmNzY=, PoolId: default}. Compute actor has finished execution: [28:7554062192611550830:3129] 2025-09-25T16:19:22.855902Z node 28 :KQP_EXECUTER DEBUG: kqp_executer_impl.h:1208: ActorId: [28:7554062192611550746:3059] TxId: 281474976710670. Ctx: { TraceId: 01k60tr5hb9w5z6sdhrs2y23z6, Database: /Root, SessionId: ydb://session/3?node_id=28&id=NjZkY2NhYmMtNGQzY2RhMTktZmUyOTdkMDgtNmYyMjhmNzY=, PoolId: default}. terminate execution. 2025-09-25T16:19:22.855910Z node 28 :KQP_EXECUTER DEBUG: kqp_executer_impl.h:900: ActorId: [28:7554062192611550746:3059] TxId: 281474976710670. Ctx: { TraceId: 01k60tr5hb9w5z6sdhrs2y23z6, Database: /Root, SessionId: ydb://session/3?node_id=28&id=NjZkY2NhYmMtNGQzY2RhMTktZmUyOTdkMDgtNmYyMjhmNzY=, PoolId: default}. Resource usage for last stat interval: ComputeTime: 0.012769s ReadRows: 0 ReadBytes: 0 ru: 8 rate limiter was not found force flag: 1 2025-09-25T16:19:22.855925Z node 28 :KQP_SESSION DEBUG: kqp_session_actor.cpp:1938: SessionId: ydb://session/3?node_id=28&id=NjZkY2NhYmMtNGQzY2RhMTktZmUyOTdkMDgtNmYyMjhmNzY=, ActorId: [28:7554062192611550727:3059], ActorState: ExecuteState, TraceId: 01k60tr5hb9w5z6sdhrs2y23z6, TEvTxResponse, CurrentTx: 2/2 response.status: SUCCESS 2025-09-25T16:19:22.855998Z node 28 :KQP_SESSION INFO: kqp_session_actor.cpp:2202: SessionId: ydb://session/3?node_id=28&id=NjZkY2NhYmMtNGQzY2RhMTktZmUyOTdkMDgtNmYyMjhmNzY=, ActorId: [28:7554062192611550727:3059], ActorState: ExecuteState, TraceId: 01k60tr5hb9w5z6sdhrs2y23z6, txInfo Status: Active Kind: ReadOnly TotalDuration: 0 ServerDuration: 20.08 QueriesCount: 1 2025-09-25T16:19:22.856010Z node 28 :KQP_SESSION DEBUG: kqp_session_actor.cpp:2361: SessionId: ydb://session/3?node_id=28&id=NjZkY2NhYmMtNGQzY2RhMTktZmUyOTdkMDgtNmYyMjhmNzY=, ActorId: [28:7554062192611550727:3059], ActorState: ExecuteState, TraceId: 01k60tr5hb9w5z6sdhrs2y23z6, Create QueryResponse for action: QUERY_ACTION_EXECUTE with SUCCESS status 2025-09-25T16:19:22.856142Z node 28 :KQP_SESSION INFO: kqp_session_actor.cpp:2725: SessionId: ydb://session/3?node_id=28&id=NjZkY2NhYmMtNGQzY2RhMTktZmUyOTdkMDgtNmYyMjhmNzY=, ActorId: [28:7554062192611550727:3059], ActorState: ExecuteState, TraceId: 01k60tr5hb9w5z6sdhrs2y23z6, Cleanup start, isFinal: 1 CleanupCtx: 0 TransactionsToBeAborted.size(): 0 WorkerId: [0:0:0] WorkloadServiceCleanup: 0 2025-09-25T16:19:22.856144Z node 28 :KQP_SESSION DEBUG: kqp_session_actor.cpp:2786: SessionId: ydb://session/3?node_id=28&id=NjZkY2NhYmMtNGQzY2RhMTktZmUyOTdkMDgtNmYyMjhmNzY=, ActorId: [28:7554062192611550727:3059], ActorState: ExecuteState, TraceId: 01k60tr5hb9w5z6sdhrs2y23z6, EndCleanup, isFinal: 1 2025-09-25T16:19:22.856153Z node 28 :KQP_SESSION DEBUG: kqp_session_actor.cpp:2518: SessionId: ydb://session/3?node_id=28&id=NjZkY2NhYmMtNGQzY2RhMTktZmUyOTdkMDgtNmYyMjhmNzY=, ActorId: [28:7554062192611550727:3059], ActorState: ExecuteState, TraceId: 01k60tr5hb9w5z6sdhrs2y23z6, Sent query response back to proxy, proxyRequestId: 5, proxyId: [28:7554062188316580283:2266] 2025-09-25T16:19:22.856155Z node 28 :KQP_SESSION DEBUG: kqp_session_actor.cpp:2798: SessionId: ydb://session/3?node_id=28&id=NjZkY2NhYmMtNGQzY2RhMTktZmUyOTdkMDgtNmYyMjhmNzY=, ActorId: [28:7554062192611550727:3059], ActorState: unknown state, TraceId: 01k60tr5hb9w5z6sdhrs2y23z6, Cleanup temp tables: 0 2025-09-25T16:19:22.857583Z node 28 :KQP_RESOURCE_MANAGER WARN: kqp_snapshot_manager.cpp:238: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1758817162844, txId: 18446744073709551615] shutting down 2025-09-25T16:19:22.857639Z node 28 :KQP_SESSION DEBUG: kqp_session_actor.cpp:2889: SessionId: ydb://session/3?node_id=28&id=NjZkY2NhYmMtNGQzY2RhMTktZmUyOTdkMDgtNmYyMjhmNzY=, ActorId: [28:7554062192611550727:3059], ActorState: unknown state, TraceId: 01k60tr5hb9w5z6sdhrs2y23z6, Session actor destroyed ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/http_proxy/ut/inside_ydb_ut/unittest >> TestYmqHttpProxy::TestChangeMessageVisibilityBatch [GOOD] Test command err: 2025-09-25T16:18:50.226935Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7554062053072942820:2254];send_to=[0:7307199536658146131:7762515]; 2025-09-25T16:18:50.226963Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/endf/005980/r3tmp/tmpKnX2ep/pdisk_1.dat 2025-09-25T16:18:50.326116Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-09-25T16:18:50.328061Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-09-25T16:18:50.328089Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-09-25T16:18:50.329085Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 19908, node 1 2025-09-25T16:18:50.351721Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded 2025-09-25T16:18:50.356923Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1229: Notification cookie mismatch for subscription [1:7554062053072942579:2081] 1758817130221739 != 1758817130221742 2025-09-25T16:18:50.357155Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-09-25T16:18:50.357166Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-09-25T16:18:50.357169Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-09-25T16:18:50.357222Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:27079 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. waiting... 2025-09-25T16:18:50.390298Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2025-09-25T16:18:50.392793Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 TClient is connected to server localhost:27079 2025-09-25T16:18:50.419942Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterUserAttributes, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_user_attrs.cpp:72) waiting... 2025-09-25T16:18:50.426863Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 281474976715658, at schemeshard: 72057594046644480 2025-09-25T16:18:50.427908Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_modify_acl.cpp:101) waiting... 2025-09-25T16:18:50.494348Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) waiting... 2025-09-25T16:18:50.534007Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) waiting... waiting... 2025-09-25T16:18:50.548814Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) waiting... waiting... 2025-09-25T16:18:50.594132Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) waiting... 2025-09-25T16:18:50.605174Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) waiting... 2025-09-25T16:18:50.618795Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) waiting... 2025-09-25T16:18:50.624659Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-09-25T16:18:50.637645Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) waiting... waiting... 2025-09-25T16:18:50.650228Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715670:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:18:50.661294Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) waiting... 2025-09-25T16:18:50.674943Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715672:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) waiting... 2025-09-25T16:18:50.763642Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7554062053072943981:2360], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:50.763674Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:50.763976Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7554062053072943994:2364], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:50.764001Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7554062053072943993:2363], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:50.764010Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:50.765010Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715673:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-09-25T16:18:50.767216Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7554062053072943997:2365], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715673 completed, doublechecking } 2025-09-25T16:18:50.845926Z node 1 :TX_PROXY ERROR: schemereq.cpp:590: Actor# [1:7554062053072944048:2865] txid# 281474976 ... idx=CHANGE_VISIBILITY_ID). Mode: COMPILE_AND_EXEC 2025-09-25T16:19:19.824036Z node 7 :SQS TRACE: executor.cpp:154: Request [6c87687f-acba2ad9-98360c0c-49635d10] Query(idx=CHANGE_VISIBILITY_ID) Queue [cloud4/000000000000000101v0] Serializing params: {"QUEUE_ID_NUMBER": 2, "QUEUE_ID_NUMBER_HASH": 17472595041006102391, "SHARD": 2, "QUEUE_ID_NUMBER_AND_SHARD_HASH": 18011340738530590538, "NOW": 1758817159823, "GROUPS_READ_ATTEMPT_IDS_PERIOD": 300000, "KEYS": [{"LockTimestamp": 1758817159781, "Offset": 1, "NewVisibilityDeadline": 1758817160823}, {"LockTimestamp": 1758817159796, "Offset": 2, "NewVisibilityDeadline": 1758817161823}]} 2025-09-25T16:19:19.824137Z node 7 :SQS TRACE: executor.cpp:203: Request [6c87687f-acba2ad9-98360c0c-49635d10] Query(idx=CHANGE_VISIBILITY_ID) Queue [cloud4/000000000000000101v0] Execute program: { Transaction { MiniKQLTransaction { Mode: COMPILE_AND_EXEC Program { Bin: "O\034\014Exists*NewVisibilityDeadline\014Offset\006Arg\014Member\nFlags\010Name\010Args\016Payload\022Parameter\006And\032LockTimestamp$VisibilityDeadline\014Invoke\t\211\004\206\202?\000\206\202\030Extend\000\006\002?\000\t\211\004\202\203\005@\206\205\n\203\014\207\203\010\203\014\203\010?\020(ChangeConddCurrentVisibilityDeadline\002\006\n$SetResult\000\003?\006\014result\t\211\006?\024\206\205\006?\020?\020?\020.\006\n?\032?\0220MapParameter\000\t\351\000?\034\005\205\004\206\205\004\203\010\203\005@\026\032\203\005@\036\"\006\000?&\003?(\010KEYS\003&\000\t\251\000?\032\016\000\005?\022\t\211\004?\010\207\203\014?\010 Coalesce\000\t\211\004?<\207\203\014\207\203\014*\000\t\211\006?B\203\005@\203\010?\0146\000\003?J\026LessOrEqual\t\351\000?L\005\205\004\206\205\004\203\010\203\005@\026\032\203\005@\036\"\006\000?X\003?Z\006NOW\003&\000\t\211\004?\014\207\205\004\207\203\010?\014.2\203\004\022\000\t\211\n?n\203\005\004\200\205\004\203\004\203\004.2\213\010\203\010\203\010\203\004?\020\203\004$SelectRow\000\003?t \000\001\205\000\000\000\000\001\030\000\000\000\000\000\000\000?l\005?z\003?v\020\003?x\026\003\013?\202\t\351\000?|\005\205\004\206\205\004\203\010\203\005@\026\032\203\005@\036\"\006\000?\226\003?\230> KqpScheme::AlterAsyncReplication [GOOD] Test command err: Trying to start YDB, gRPC: 11873, MsgBus: 8927 2025-09-25T16:19:04.389349Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:123: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown test_client.cpp: SetPath # /home/runner/.ya/build/build_root/endf/001bd2/r3tmp/tmpR5xL7g/pdisk_1.dat 2025-09-25T16:19:04.517109Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-09-25T16:19:04.517146Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-09-25T16:19:04.520279Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-09-25T16:19:04.520766Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-09-25T16:19:04.530556Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-09-25T16:19:04.530880Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded 2025-09-25T16:19:04.536952Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1229: Notification cookie mismatch for subscription [1:7554062115676510447:2081] 1758817144370022 != 1758817144370025 TServer::EnableGrpc on GrpcPort 11873, node 1 2025-09-25T16:19:04.579739Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-09-25T16:19:04.579749Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-09-25T16:19:04.579751Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-09-25T16:19:04.579791Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:8927 2025-09-25T16:19:04.801664Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions TClient is connected to server localhost:8927 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-09-25T16:19:04.855907Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-09-25T16:19:04.876961Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-09-25T16:19:04.893716Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) waiting... 2025-09-25T16:19:04.951552Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) waiting... 2025-09-25T16:19:04.994187Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) waiting... 2025-09-25T16:19:05.013893Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) waiting... 2025-09-25T16:19:05.101070Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7554062119971479389:2391], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:19:05.101106Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:19:05.105092Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7554062119971479399:2392], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:19:05.105121Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:19:05.285509Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:19:05.304677Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:19:05.329692Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:19:05.347401Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:19:05.361615Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:19:05.366699Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-09-25T16:19:05.377027Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:19:05.440094Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:19:05.453920Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:19:05.487294Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7554062119971480265:2474], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:19:05.487333Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:19:05.487501Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7554062119971480270:2477], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:19:05.487510Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7554062119971480271:2478], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don ... 2025-09-25T16:19:28.398764Z node 10 :REPLICATION_CONTROLLER TRACE: secret_resolver.cpp:27: [SecretResolver][rid 1] Handle NKikimr::TEvTxProxySchemeCache::TEvNavigateKeySetResult: entry# { Path: Root/replication TableId: [72057594046644480:19:0] RequestType: ByTableId Operation: OpPath RedirectRequired: false ShowPrivatePath: false SyncVersion: false Status: Ok Kind: KindReplication DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } } 2025-09-25T16:19:28.399015Z node 10 :REPLICATION_CONTROLLER TRACE: controller.cpp:248: [controller 72075186224037928] Handle NKikimr::NReplication::NController::TEvPrivate::TEvResolveSecretResult { ReplicationId: 1 Success: 0 Error: No such secret: USId:root@builtin:mysecret } 2025-09-25T16:19:28.399024Z node 10 :REPLICATION_CONTROLLER DEBUG: tx_resolve_secret_result.cpp:21: [controller 72075186224037928][TxResolveSecretResult] Execute: NKikimr::NReplication::NController::TEvPrivate::TEvResolveSecretResult { ReplicationId: 1 Success: 0 Error: No such secret: USId:root@builtin:mysecret } 2025-09-25T16:19:28.399027Z node 10 :REPLICATION_CONTROLLER ERROR: tx_resolve_secret_result.cpp:45: [controller 72075186224037928][TxResolveSecretResult] Resolve secret error: rid# 1, error# No such secret: USId:root@builtin:mysecret 2025-09-25T16:19:28.399097Z node 10 :REPLICATION_CONTROLLER DEBUG: tx_resolve_secret_result.cpp:59: [controller 72075186224037928][TxResolveSecretResult] Complete 2025-09-25T16:19:28.406006Z node 10 :TX_PROXY ERROR: schemereq.cpp:590: Actor# [10:7554062219944257753:4225] txid# 281474976710688, issues: { message: "User is not set" severity: 1 } 2025-09-25T16:19:28.409743Z node 10 :TX_PROXY ERROR: schemereq.cpp:590: Actor# [10:7554062219944257762:4230] txid# 281474976710689, issues: { message: "User is not set" severity: 1 } 2025-09-25T16:19:28.412513Z node 10 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterReplication, opId: 281474976710690:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_replication.cpp:528) 2025-09-25T16:19:28.412875Z node 10 :REPLICATION_CONTROLLER TRACE: controller.cpp:148: [controller 72075186224037928] Handle NKikimrReplication.TEvAlterReplication PathId { OwnerId: 72057594046644480 LocalId: 19 } OperationId { TxId: 281474976710690 PartId: 0 } Config { SrcConnectionParams { Endpoint: "localhost:2135" Database: "/Root" StaticCredentials { User: "user" } } Specific { Targets { SrcPath: "/Root/table" DstPath: "/Root/replica" } } ConsistencySettings { Row { } } } 2025-09-25T16:19:28.412921Z node 10 :REPLICATION_CONTROLLER DEBUG: tx_alter_replication.cpp:22: [controller 72075186224037928][TxAlterReplication] Execute: NKikimrReplication.TEvAlterReplication PathId { OwnerId: 72057594046644480 LocalId: 19 } OperationId { TxId: 281474976710690 PartId: 0 } Config { SrcConnectionParams { Endpoint: "localhost:2135" Database: "/Root" StaticCredentials { User: "user" } } Specific { Targets { SrcPath: "/Root/table" DstPath: "/Root/replica" } } ConsistencySettings { Row { } } } 2025-09-25T16:19:28.413016Z node 10 :REPLICATION_CONTROLLER DEBUG: tx_alter_replication.cpp:137: [controller 72075186224037928][TxAlterReplication] Complete 2025-09-25T16:19:28.413125Z node 10 :REPLICATION_CONTROLLER TRACE: secret_resolver.cpp:27: [SecretResolver][rid 1] Handle NKikimr::TEvTxProxySchemeCache::TEvNavigateKeySetResult: entry# { Path: Root/replication TableId: [72057594046644480:19:0] RequestType: ByTableId Operation: OpPath RedirectRequired: false ShowPrivatePath: false SyncVersion: false Status: Ok Kind: KindReplication DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } } 2025-09-25T16:19:28.413218Z node 10 :REPLICATION_CONTROLLER TRACE: controller.cpp:248: [controller 72075186224037928] Handle NKikimr::NReplication::NController::TEvPrivate::TEvResolveSecretResult { ReplicationId: 1 Success: 0 Error: No such secret: USId:root@builtin: } 2025-09-25T16:19:28.413238Z node 10 :REPLICATION_CONTROLLER DEBUG: tx_resolve_secret_result.cpp:21: [controller 72075186224037928][TxResolveSecretResult] Execute: NKikimr::NReplication::NController::TEvPrivate::TEvResolveSecretResult { ReplicationId: 1 Success: 0 Error: No such secret: USId:root@builtin: } 2025-09-25T16:19:28.413247Z node 10 :REPLICATION_CONTROLLER ERROR: tx_resolve_secret_result.cpp:45: [controller 72075186224037928][TxResolveSecretResult] Resolve secret error: rid# 1, error# No such secret: USId:root@builtin: 2025-09-25T16:19:28.413324Z node 10 :REPLICATION_CONTROLLER DEBUG: tx_resolve_secret_result.cpp:59: [controller 72075186224037928][TxResolveSecretResult] Complete 2025-09-25T16:19:28.418814Z node 10 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterReplication, opId: 281474976710691:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_replication.cpp:528) 2025-09-25T16:19:28.419122Z node 10 :REPLICATION_CONTROLLER TRACE: controller.cpp:148: [controller 72075186224037928] Handle NKikimrReplication.TEvAlterReplication PathId { OwnerId: 72057594046644480 LocalId: 19 } OperationId { TxId: 281474976710691 PartId: 0 } Config { SrcConnectionParams { Endpoint: "localhost:2135" Database: "/Root" StaticCredentials { User: "user" Password: "***" } } Specific { Targets { SrcPath: "/Root/table" DstPath: "/Root/replica" } } ConsistencySettings { Row { } } } 2025-09-25T16:19:28.419165Z node 10 :REPLICATION_CONTROLLER DEBUG: tx_alter_replication.cpp:22: [controller 72075186224037928][TxAlterReplication] Execute: NKikimrReplication.TEvAlterReplication PathId { OwnerId: 72057594046644480 LocalId: 19 } OperationId { TxId: 281474976710691 PartId: 0 } Config { SrcConnectionParams { Endpoint: "localhost:2135" Database: "/Root" StaticCredentials { User: "user" Password: "***" } } Specific { Targets { SrcPath: "/Root/table" DstPath: "/Root/replica" } } ConsistencySettings { Row { } } } 2025-09-25T16:19:28.419256Z node 10 :REPLICATION_CONTROLLER DEBUG: tx_alter_replication.cpp:137: [controller 72075186224037928][TxAlterReplication] Complete 2025-09-25T16:19:28.426752Z node 10 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterReplication, opId: 281474976710692:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_replication.cpp:528) 2025-09-25T16:19:28.427073Z node 10 :REPLICATION_CONTROLLER TRACE: controller.cpp:148: [controller 72075186224037928] Handle NKikimrReplication.TEvAlterReplication PathId { OwnerId: 72057594046644480 LocalId: 19 } OperationId { TxId: 281474976710692 PartId: 0 } Config { SrcConnectionParams { Endpoint: "localhost:2135" Database: "/Root" StaticCredentials { User: "user" PasswordSecretName: "password_secret_name" } } Specific { Targets { SrcPath: "/Root/table" DstPath: "/Root/replica" } } ConsistencySettings { Row { } } } 2025-09-25T16:19:28.427104Z node 10 :REPLICATION_CONTROLLER DEBUG: tx_alter_replication.cpp:22: [controller 72075186224037928][TxAlterReplication] Execute: NKikimrReplication.TEvAlterReplication PathId { OwnerId: 72057594046644480 LocalId: 19 } OperationId { TxId: 281474976710692 PartId: 0 } Config { SrcConnectionParams { Endpoint: "localhost:2135" Database: "/Root" StaticCredentials { User: "user" PasswordSecretName: "password_secret_name" } } Specific { Targets { SrcPath: "/Root/table" DstPath: "/Root/replica" } } ConsistencySettings { Row { } } } 2025-09-25T16:19:28.427205Z node 10 :REPLICATION_CONTROLLER DEBUG: tx_alter_replication.cpp:137: [controller 72075186224037928][TxAlterReplication] Complete 2025-09-25T16:19:28.427270Z node 10 :REPLICATION_CONTROLLER TRACE: secret_resolver.cpp:27: [SecretResolver][rid 1] Handle NKikimr::TEvTxProxySchemeCache::TEvNavigateKeySetResult: entry# { Path: Root/replication TableId: [72057594046644480:19:0] RequestType: ByTableId Operation: OpPath RedirectRequired: false ShowPrivatePath: false SyncVersion: false Status: Ok Kind: KindReplication DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } } 2025-09-25T16:19:28.427360Z node 10 :REPLICATION_CONTROLLER TRACE: controller.cpp:248: [controller 72075186224037928] Handle NKikimr::NReplication::NController::TEvPrivate::TEvResolveSecretResult { ReplicationId: 1 Success: 0 Error: No such secret: USId:root@builtin:password_secret_name } 2025-09-25T16:19:28.427371Z node 10 :REPLICATION_CONTROLLER DEBUG: tx_resolve_secret_result.cpp:21: [controller 72075186224037928][TxResolveSecretResult] Execute: NKikimr::NReplication::NController::TEvPrivate::TEvResolveSecretResult { ReplicationId: 1 Success: 0 Error: No such secret: USId:root@builtin:password_secret_name } 2025-09-25T16:19:28.427379Z node 10 :REPLICATION_CONTROLLER ERROR: tx_resolve_secret_result.cpp:45: [controller 72075186224037928][TxResolveSecretResult] Resolve secret error: rid# 1, error# No such secret: USId:root@builtin:password_secret_name 2025-09-25T16:19:28.427414Z node 10 :REPLICATION_CONTROLLER DEBUG: tx_resolve_secret_result.cpp:59: [controller 72075186224037928][TxResolveSecretResult] Complete 2025-09-25T16:19:28.433713Z node 10 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterReplication, opId: 281474976710693:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_replication.cpp:528) 2025-09-25T16:19:28.434046Z node 10 :REPLICATION_CONTROLLER TRACE: controller.cpp:148: [controller 72075186224037928] Handle NKikimrReplication.TEvAlterReplication PathId { OwnerId: 72057594046644480 LocalId: 19 } OperationId { TxId: 281474976710693 PartId: 0 } Config { SrcConnectionParams { Endpoint: "localhost:2135" Database: "/Root" StaticCredentials { User: "new_user" Password: "***" } } Specific { Targets { SrcPath: "/Root/table" DstPath: "/Root/replica" } } ConsistencySettings { Row { } } } 2025-09-25T16:19:28.434076Z node 10 :REPLICATION_CONTROLLER DEBUG: tx_alter_replication.cpp:22: [controller 72075186224037928][TxAlterReplication] Execute: NKikimrReplication.TEvAlterReplication PathId { OwnerId: 72057594046644480 LocalId: 19 } OperationId { TxId: 281474976710693 PartId: 0 } Config { SrcConnectionParams { Endpoint: "localhost:2135" Database: "/Root" StaticCredentials { User: "new_user" Password: "***" } } Specific { Targets { SrcPath: "/Root/table" DstPath: "/Root/replica" } } ConsistencySettings { Row { } } } 2025-09-25T16:19:28.434154Z node 10 :REPLICATION_CONTROLLER DEBUG: tx_alter_replication.cpp:137: [controller 72075186224037928][TxAlterReplication] Complete ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/client/server/ut/unittest >> TMessageBusServerPersQueueGetReadSessionsInfoMetaRequestTest::HandlesPipeDisconnection_AnswerDoesNotArrive [GOOD] Test command err: 2025-09-25T16:19:10.518444Z node 1 :PERSQUEUE DEBUG: pq_impl.cpp:2957: [PQ: 72057594037928037] Handle TEvInterconnect::TEvNodeInfo 2025-09-25T16:19:10.519604Z node 1 :PERSQUEUE DEBUG: pq_impl.cpp:2989: [PQ: 72057594037928037] Transactions request. From tx_00000000000000000000, To tx_18446744073709551615 2025-09-25T16:19:10.519706Z node 1 :PERSQUEUE DEBUG: pq_impl.cpp:784: [PQ: 72057594037928037] no config, start with empty partitions and default config 2025-09-25T16:19:10.519722Z node 1 :PERSQUEUE NOTICE: pq_impl.cpp:908: [PQ: 72057594037928037] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-09-25T16:19:10.519735Z node 1 :PERSQUEUE INFO: pq_impl.cpp:609: [PQ: 72057594037928037] doesn't have tx writes info 2025-09-25T16:19:10.519969Z node 1 :PERSQUEUE DEBUG: pq_impl.cpp:2718: [PQ: 72057594037928037] server connected, pipe [1:261:2256], now have 1 active actors on pipe 2025-09-25T16:19:10.519992Z node 1 :PERSQUEUE DEBUG: pq_impl.cpp:1269: [PQ: 72057594037928037] Handle TEvPersQueue::TEvUpdateConfig 2025-09-25T16:19:10.524691Z node 1 :PERSQUEUE DEBUG: pq_impl.cpp:395: [PQ: 72057594037928037] Apply new config CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 TopicName: "rt3.dc1--topic1" Version: 1 Partitions { PartitionId: 0 } AllPartitions { PartitionId: 0 } 2025-09-25T16:19:10.524766Z node 1 :PERSQUEUE NOTICE: pq_impl.cpp:908: [PQ: 72057594037928037] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-09-25T16:19:10.525139Z node 1 :PERSQUEUE INFO: pq_impl.cpp:1296: [PQ: 72057594037928037] Config applied version 1 actor [1:104:2138] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 TopicName: "rt3.dc1--topic1" Version: 1 Partitions { PartitionId: 0 } AllPartitions { PartitionId: 0 } 2025-09-25T16:19:10.525206Z node 1 :PERSQUEUE DEBUG: partition_init.cpp:80: [rt3.dc1--topic1:0:Initializer] Start initializing step TInitConfigStep 2025-09-25T16:19:10.525402Z node 1 :PERSQUEUE DEBUG: partition_init.cpp:80: [rt3.dc1--topic1:0:Initializer] Start initializing step TInitInternalFieldsStep 2025-09-25T16:19:10.525484Z node 1 :PERSQUEUE INFO: partition_init.cpp:1075: [72057594037928037][Partition][0][StateInit] bootstrapping 0 [1:269:2262] 2025-09-25T16:19:10.526114Z node 1 :PERSQUEUE DEBUG: partition_init.cpp:80: [rt3.dc1--topic1:0:Initializer] Start initializing step TInitFieldsStep 2025-09-25T16:19:10.526127Z node 1 :PERSQUEUE DEBUG: partition_init.cpp:60: [rt3.dc1--topic1:0:Initializer] Initializing completed. 2025-09-25T16:19:10.526148Z node 1 :PERSQUEUE INFO: partition.cpp:694: [72057594037928037][Partition][0][StateInit] init complete for topic 'rt3.dc1--topic1' partition 0 generation 2 [1:269:2262] 2025-09-25T16:19:10.526157Z node 1 :PERSQUEUE DEBUG: partition.cpp:708: [72057594037928037][Partition][0][StateInit] SYNC INIT topic rt3.dc1--topic1 partitition 0 so 0 endOffset 0 Head Offset 0 PartNo 0 PackedSize 0 count 0 nextOffset 0 batches 0 2025-09-25T16:19:10.526169Z node 1 :PERSQUEUE DEBUG: partition.cpp:4293: [72057594037928037][Partition][0][StateIdle] Process pending events. Count 0 2025-09-25T16:19:10.526175Z node 1 :PERSQUEUE DEBUG: partition.cpp:2261: [72057594037928037][Partition][0][StateIdle] Batching state before ContinueProcessTxsAndUserActs: 0 2025-09-25T16:19:10.526190Z node 1 :PERSQUEUE DEBUG: partition.cpp:2270: [72057594037928037][Partition][0][StateIdle] Batching state after ContinueProcessTxsAndUserActs: 1 2025-09-25T16:19:10.526194Z node 1 :PERSQUEUE DEBUG: partition.cpp:2293: [72057594037928037][Partition][0][StateIdle] Try persist 2025-09-25T16:19:10.526293Z node 1 :PERSQUEUE DEBUG: partition_compaction.cpp:162: [72057594037928037][Partition][0][StateIdle] no data for compaction 2025-09-25T16:19:10.526446Z node 1 :PERSQUEUE DEBUG: pq_impl.cpp:2718: [PQ: 72057594037928037] server connected, pipe [1:272:2264], now have 1 active actors on pipe 2025-09-25T16:19:10.538610Z node 1 :PERSQUEUE DEBUG: pq_impl.cpp:2957: [PQ: 72057594037928137] Handle TEvInterconnect::TEvNodeInfo 2025-09-25T16:19:10.539517Z node 1 :PERSQUEUE DEBUG: pq_impl.cpp:2989: [PQ: 72057594037928137] Transactions request. From tx_00000000000000000000, To tx_18446744073709551615 2025-09-25T16:19:10.539607Z node 1 :PERSQUEUE DEBUG: pq_impl.cpp:784: [PQ: 72057594037928137] no config, start with empty partitions and default config 2025-09-25T16:19:10.539621Z node 1 :PERSQUEUE NOTICE: pq_impl.cpp:908: [PQ: 72057594037928137] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-09-25T16:19:10.539632Z node 1 :PERSQUEUE INFO: pq_impl.cpp:609: [PQ: 72057594037928137] doesn't have tx writes info 2025-09-25T16:19:10.539828Z node 1 :PERSQUEUE DEBUG: pq_impl.cpp:2718: [PQ: 72057594037928137] server connected, pipe [1:402:2363], now have 1 active actors on pipe 2025-09-25T16:19:10.539842Z node 1 :PERSQUEUE DEBUG: pq_impl.cpp:1269: [PQ: 72057594037928137] Handle TEvPersQueue::TEvUpdateConfig 2025-09-25T16:19:10.540369Z node 1 :PERSQUEUE DEBUG: pq_impl.cpp:395: [PQ: 72057594037928137] Apply new config CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 TopicName: "rt3.dc1--topic2" Version: 2 Partitions { PartitionId: 0 } AllPartitions { PartitionId: 0 } 2025-09-25T16:19:10.540401Z node 1 :PERSQUEUE NOTICE: pq_impl.cpp:908: [PQ: 72057594037928137] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-09-25T16:19:10.540617Z node 1 :PERSQUEUE INFO: pq_impl.cpp:1296: [PQ: 72057594037928137] Config applied version 2 actor [1:104:2138] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 TopicName: "rt3.dc1--topic2" Version: 2 Partitions { PartitionId: 0 } AllPartitions { PartitionId: 0 } 2025-09-25T16:19:10.540641Z node 1 :PERSQUEUE DEBUG: partition_init.cpp:80: [rt3.dc1--topic2:0:Initializer] Start initializing step TInitConfigStep 2025-09-25T16:19:10.540714Z node 1 :PERSQUEUE DEBUG: partition_init.cpp:80: [rt3.dc1--topic2:0:Initializer] Start initializing step TInitInternalFieldsStep 2025-09-25T16:19:10.540745Z node 1 :PERSQUEUE INFO: partition_init.cpp:1075: [72057594037928137][Partition][0][StateInit] bootstrapping 0 [1:410:2369] 2025-09-25T16:19:10.541332Z node 1 :PERSQUEUE DEBUG: partition_init.cpp:80: [rt3.dc1--topic2:0:Initializer] Start initializing step TInitFieldsStep 2025-09-25T16:19:10.541348Z node 1 :PERSQUEUE DEBUG: partition_init.cpp:60: [rt3.dc1--topic2:0:Initializer] Initializing completed. 2025-09-25T16:19:10.541355Z node 1 :PERSQUEUE INFO: partition.cpp:694: [72057594037928137][Partition][0][StateInit] init complete for topic 'rt3.dc1--topic2' partition 0 generation 2 [1:410:2369] 2025-09-25T16:19:10.541363Z node 1 :PERSQUEUE DEBUG: partition.cpp:708: [72057594037928137][Partition][0][StateInit] SYNC INIT topic rt3.dc1--topic2 partitition 0 so 0 endOffset 0 Head Offset 0 PartNo 0 PackedSize 0 count 0 nextOffset 0 batches 0 2025-09-25T16:19:10.541375Z node 1 :PERSQUEUE DEBUG: partition.cpp:4293: [72057594037928137][Partition][0][StateIdle] Process pending events. Count 0 2025-09-25T16:19:10.541380Z node 1 :PERSQUEUE DEBUG: partition.cpp:2261: [72057594037928137][Partition][0][StateIdle] Batching state before ContinueProcessTxsAndUserActs: 0 2025-09-25T16:19:10.541385Z node 1 :PERSQUEUE DEBUG: partition.cpp:2270: [72057594037928137][Partition][0][StateIdle] Batching state after ContinueProcessTxsAndUserActs: 1 2025-09-25T16:19:10.541390Z node 1 :PERSQUEUE DEBUG: partition.cpp:2293: [72057594037928137][Partition][0][StateIdle] Try persist 2025-09-25T16:19:10.541488Z node 1 :PERSQUEUE DEBUG: partition_compaction.cpp:162: [72057594037928137][Partition][0][StateIdle] no data for compaction 2025-09-25T16:19:10.541631Z node 1 :PERSQUEUE DEBUG: pq_impl.cpp:2718: [PQ: 72057594037928137] server connected, pipe [1:413:2371], now have 1 active actors on pipe 2025-09-25T16:19:10.545995Z node 1 :PERSQUEUE DEBUG: pq_impl.cpp:2957: [PQ: 72057594037928138] Handle TEvInterconnect::TEvNodeInfo 2025-09-25T16:19:10.547247Z node 1 :PERSQUEUE DEBUG: pq_impl.cpp:2989: [PQ: 72057594037928138] Transactions request. From tx_00000000000000000000, To tx_18446744073709551615 2025-09-25T16:19:10.547345Z node 1 :PERSQUEUE DEBUG: pq_impl.cpp:784: [PQ: 72057594037928138] no config, start with empty partitions and default config 2025-09-25T16:19:10.547360Z node 1 :PERSQUEUE NOTICE: pq_impl.cpp:908: [PQ: 72057594037928138] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-09-25T16:19:10.547370Z node 1 :PERSQUEUE INFO: pq_impl.cpp:609: [PQ: 72057594037928138] doesn't have tx writes info 2025-09-25T16:19:10.547559Z node 1 :PERSQUEUE DEBUG: pq_impl.cpp:2718: [PQ: 72057594037928138] server connected, pipe [1:462:2408], now have 1 active actors on pipe 2025-09-25T16:19:10.547570Z node 1 :PERSQUEUE DEBUG: pq_impl.cpp:1269: [PQ: 72057594037928138] Handle TEvPersQueue::TEvUpdateConfig 2025-09-25T16:19:10.548060Z node 1 :PERSQUEUE DEBUG: pq_impl.cpp:395: [PQ: 72057594037928138] Apply new config CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 1 TopicName: "rt3.dc1--topic2" Version: 3 Partitions { PartitionId: 1 } AllPartitions { PartitionId: 1 } 2025-09-25T16:19:10.548090Z node 1 :PERSQUEUE NOTICE: pq_impl.cpp:908: [PQ: 72057594037928138] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-09-25T16:19:10.548279Z node 1 :PERSQUEUE INFO: pq_impl.cpp:1296: [PQ: 72057594037928138] Config applied version 3 actor [1:104:2138] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 1 TopicName: "rt3.dc1--topic2" Version: 3 Partitions { PartitionId: 1 } AllPartitions { PartitionId: 1 } 2025-09-25T16:19:10.548307Z node 1 :PERSQUEUE DEBUG: partition_init.cpp:80: [rt3.dc1--topic2:1:Initializer] Start initializing step TInitConfigStep 2025-09-25T16:19:10.548380Z node 1 :PERSQUEUE DEBUG: partition_init.cpp:80: [rt3.dc1--topic2:1:Initializer] Start initializing step TInitInternalFieldsStep 2025-09-25T16:19:10.548414Z node 1 :PERSQUEUE INFO: partition_init.cpp:1075: [72057594037928138][Partition][1][StateInit] bootstrapping 1 [1:470:2414] 2025-09-25T16:19:10.548965Z node 1 :PERSQUEUE DEBUG: partition_init.cpp:80: [rt3.dc1--topic2:1:Initializer] Start initializing step TInitFieldsStep 2025-09-25T16:19:10.548979Z node 1 :PERSQUEUE DEBUG: partition_init.cpp:60: [rt3.dc1--topic2:1:Initializer] Initializing completed. 2025-09-25T16:19:10.548987Z node 1 :PERSQUEUE INFO: partition.cpp:694: [72057594037928138][Partition][1][StateInit] init complete for topic 'rt3.dc1--topic2' partition 1 generation 2 [1:470:2414] 2025-09-25T16:19:10.548995Z node 1 :PERSQUEUE DEBUG: partition.cpp:708: [72057594037928138][Partition][1][StateInit] SYNC INIT topic rt3.dc1--topic2 partitition 1 so 0 endOffset 0 Head Offset 0 PartNo 0 PackedSize 0 count 0 nextOffset 0 batches 0 2025-09-25T16:19:10.549009Z node 1 :PERSQUEUE DEBUG: partition.cpp:4293: [72057594037928138][Partition][1][StateIdle] Process pending events. Count 0 2025-09-25T16:19: ... 2] 2025-09-25T16:19:11.722653Z node 3 :PERSQUEUE DEBUG: partition_init.cpp:80: [rt3.dc1--topic2:1:Initializer] Start initializing step TInitFieldsStep 2025-09-25T16:19:11.722664Z node 3 :PERSQUEUE DEBUG: partition_init.cpp:60: [rt3.dc1--topic2:1:Initializer] Initializing completed. 2025-09-25T16:19:11.722672Z node 3 :PERSQUEUE INFO: partition.cpp:694: [72057594037928138][Partition][1][StateInit] init complete for topic 'rt3.dc1--topic2' partition 1 generation 2 [3:468:2412] 2025-09-25T16:19:11.722698Z node 3 :PERSQUEUE DEBUG: partition.cpp:708: [72057594037928138][Partition][1][StateInit] SYNC INIT topic rt3.dc1--topic2 partitition 1 so 0 endOffset 0 Head Offset 0 PartNo 0 PackedSize 0 count 0 nextOffset 0 batches 0 2025-09-25T16:19:11.722709Z node 3 :PERSQUEUE DEBUG: partition.cpp:4293: [72057594037928138][Partition][1][StateIdle] Process pending events. Count 0 2025-09-25T16:19:11.722714Z node 3 :PERSQUEUE DEBUG: partition.cpp:2261: [72057594037928138][Partition][1][StateIdle] Batching state before ContinueProcessTxsAndUserActs: 0 2025-09-25T16:19:11.722719Z node 3 :PERSQUEUE DEBUG: partition.cpp:2270: [72057594037928138][Partition][1][StateIdle] Batching state after ContinueProcessTxsAndUserActs: 1 2025-09-25T16:19:11.722728Z node 3 :PERSQUEUE DEBUG: partition.cpp:2293: [72057594037928138][Partition][1][StateIdle] Try persist 2025-09-25T16:19:11.722791Z node 3 :PERSQUEUE DEBUG: partition_compaction.cpp:162: [72057594037928138][Partition][1][StateIdle] no data for compaction 2025-09-25T16:19:11.722926Z node 3 :PERSQUEUE DEBUG: pq_impl.cpp:2718: [PQ: 72057594037928138] server connected, pipe [3:471:2414], now have 1 active actors on pipe 2025-09-25T16:19:11.726590Z node 3 :PERSQUEUE DEBUG: pq_impl.cpp:2957: [PQ: 72057594037928139] Handle TEvInterconnect::TEvNodeInfo 2025-09-25T16:19:11.727374Z node 3 :PERSQUEUE DEBUG: pq_impl.cpp:2989: [PQ: 72057594037928139] Transactions request. From tx_00000000000000000000, To tx_18446744073709551615 2025-09-25T16:19:11.727431Z node 3 :PERSQUEUE DEBUG: pq_impl.cpp:784: [PQ: 72057594037928139] no config, start with empty partitions and default config 2025-09-25T16:19:11.727442Z node 3 :PERSQUEUE NOTICE: pq_impl.cpp:908: [PQ: 72057594037928139] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-09-25T16:19:11.727451Z node 3 :PERSQUEUE INFO: pq_impl.cpp:609: [PQ: 72057594037928139] doesn't have tx writes info 2025-09-25T16:19:11.727587Z node 3 :PERSQUEUE DEBUG: pq_impl.cpp:2718: [PQ: 72057594037928139] server connected, pipe [3:520:2451], now have 1 active actors on pipe 2025-09-25T16:19:11.727598Z node 3 :PERSQUEUE DEBUG: pq_impl.cpp:1269: [PQ: 72057594037928139] Handle TEvPersQueue::TEvUpdateConfig 2025-09-25T16:19:11.728008Z node 3 :PERSQUEUE DEBUG: pq_impl.cpp:395: [PQ: 72057594037928139] Apply new config CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 2 TopicName: "rt3.dc1--topic2" Version: 12 Partitions { PartitionId: 2 } AllPartitions { PartitionId: 2 } 2025-09-25T16:19:11.728038Z node 3 :PERSQUEUE NOTICE: pq_impl.cpp:908: [PQ: 72057594037928139] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-09-25T16:19:11.728156Z node 3 :PERSQUEUE INFO: pq_impl.cpp:1296: [PQ: 72057594037928139] Config applied version 12 actor [3:104:2138] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 2 TopicName: "rt3.dc1--topic2" Version: 12 Partitions { PartitionId: 2 } AllPartitions { PartitionId: 2 } 2025-09-25T16:19:11.728185Z node 3 :PERSQUEUE DEBUG: partition_init.cpp:80: [rt3.dc1--topic2:2:Initializer] Start initializing step TInitConfigStep 2025-09-25T16:19:11.728250Z node 3 :PERSQUEUE DEBUG: partition_init.cpp:80: [rt3.dc1--topic2:2:Initializer] Start initializing step TInitInternalFieldsStep 2025-09-25T16:19:11.728288Z node 3 :PERSQUEUE INFO: partition_init.cpp:1075: [72057594037928139][Partition][2][StateInit] bootstrapping 2 [3:528:2457] 2025-09-25T16:19:11.728787Z node 3 :PERSQUEUE DEBUG: partition_init.cpp:80: [rt3.dc1--topic2:2:Initializer] Start initializing step TInitFieldsStep 2025-09-25T16:19:11.728797Z node 3 :PERSQUEUE DEBUG: partition_init.cpp:60: [rt3.dc1--topic2:2:Initializer] Initializing completed. 2025-09-25T16:19:11.728803Z node 3 :PERSQUEUE INFO: partition.cpp:694: [72057594037928139][Partition][2][StateInit] init complete for topic 'rt3.dc1--topic2' partition 2 generation 2 [3:528:2457] 2025-09-25T16:19:11.728809Z node 3 :PERSQUEUE DEBUG: partition.cpp:708: [72057594037928139][Partition][2][StateInit] SYNC INIT topic rt3.dc1--topic2 partitition 2 so 0 endOffset 0 Head Offset 0 PartNo 0 PackedSize 0 count 0 nextOffset 0 batches 0 2025-09-25T16:19:11.728817Z node 3 :PERSQUEUE DEBUG: partition.cpp:4293: [72057594037928139][Partition][2][StateIdle] Process pending events. Count 0 2025-09-25T16:19:11.728838Z node 3 :PERSQUEUE DEBUG: partition.cpp:2261: [72057594037928139][Partition][2][StateIdle] Batching state before ContinueProcessTxsAndUserActs: 0 2025-09-25T16:19:11.728847Z node 3 :PERSQUEUE DEBUG: partition.cpp:2270: [72057594037928139][Partition][2][StateIdle] Batching state after ContinueProcessTxsAndUserActs: 1 2025-09-25T16:19:11.728851Z node 3 :PERSQUEUE DEBUG: partition.cpp:2293: [72057594037928139][Partition][2][StateIdle] Try persist 2025-09-25T16:19:11.728939Z node 3 :PERSQUEUE DEBUG: partition_compaction.cpp:162: [72057594037928139][Partition][2][StateIdle] no data for compaction 2025-09-25T16:19:11.729058Z node 3 :PERSQUEUE DEBUG: pq_impl.cpp:2718: [PQ: 72057594037928139] server connected, pipe [3:531:2459], now have 1 active actors on pipe 2025-09-25T16:19:11.729393Z node 3 :PERSQUEUE DEBUG: pq_impl.cpp:2718: [PQ: 72057594037928037] server connected, pipe [3:538:2462], now have 1 active actors on pipe 2025-09-25T16:19:11.729537Z node 3 :PERSQUEUE DEBUG: pq_impl.cpp:2718: [PQ: 72057594037928138] server connected, pipe [3:541:2463], now have 1 active actors on pipe 2025-09-25T16:19:11.729581Z node 3 :PERSQUEUE DEBUG: pq_impl.cpp:2718: [PQ: 72057594037928137] server connected, pipe [3:540:2463], now have 1 active actors on pipe 2025-09-25T16:19:11.729619Z node 3 :PERSQUEUE DEBUG: pq_impl.cpp:2718: [PQ: 72057594037928139] server connected, pipe [3:542:2463], now have 1 active actors on pipe 2025-09-25T16:19:11.729714Z node 3 :PERSQUEUE DEBUG: pq_impl.cpp:2718: [PQ: 72057594037928139] server connected, pipe [3:555:2474], now have 1 active actors on pipe 2025-09-25T16:19:11.733809Z node 3 :PERSQUEUE DEBUG: pq_impl.cpp:2957: [PQ: 72057594037928139] Handle TEvInterconnect::TEvNodeInfo 2025-09-25T16:19:11.734465Z node 3 :PERSQUEUE DEBUG: pq_impl.cpp:2989: [PQ: 72057594037928139] Transactions request. From tx_00000000000000000000, To tx_18446744073709551615 2025-09-25T16:19:11.734677Z node 3 :PERSQUEUE NOTICE: pq_impl.cpp:908: [PQ: 72057594037928139] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-09-25T16:19:11.734687Z node 3 :PERSQUEUE INFO: pq_impl.cpp:609: [PQ: 72057594037928139] doesn't have tx writes info 2025-09-25T16:19:11.734710Z node 3 :PERSQUEUE DEBUG: partition_init.cpp:80: [rt3.dc1--topic2:2:Initializer] Start initializing step TInitConfigStep 2025-09-25T16:19:11.734772Z node 3 :PERSQUEUE DEBUG: partition_init.cpp:80: [rt3.dc1--topic2:2:Initializer] Start initializing step TInitInternalFieldsStep 2025-09-25T16:19:11.734809Z node 3 :PERSQUEUE INFO: partition_init.cpp:1075: [72057594037928139][Partition][2][StateInit] bootstrapping 2 [3:611:2519] 2025-09-25T16:19:11.735417Z node 3 :PERSQUEUE DEBUG: partition_init.cpp:80: [rt3.dc1--topic2:2:Initializer] Start initializing step TInitDiskStatusStep 2025-09-25T16:19:11.735682Z node 3 :PERSQUEUE DEBUG: partition_init.cpp:80: [rt3.dc1--topic2:2:Initializer] Start initializing step TInitMetaStep 2025-09-25T16:19:11.735726Z node 3 :PERSQUEUE DEBUG: partition_init.cpp:80: [rt3.dc1--topic2:2:Initializer] Start initializing step TInitInfoRangeStep 2025-09-25T16:19:11.735777Z node 3 :PERSQUEUE DEBUG: partition_init.cpp:80: [rt3.dc1--topic2:2:Initializer] Start initializing step TInitDataRangeStep 2025-09-25T16:19:11.735809Z node 3 :PERSQUEUE DEBUG: partition_init.cpp:80: [rt3.dc1--topic2:2:Initializer] Start initializing step TInitDataStep 2025-09-25T16:19:11.735815Z node 3 :PERSQUEUE DEBUG: partition_init.cpp:80: [rt3.dc1--topic2:2:Initializer] Start initializing step TInitEndWriteTimestampStep 2025-09-25T16:19:11.735821Z node 3 :PERSQUEUE INFO: partition_init.cpp:948: [rt3.dc1--topic2:2:TInitEndWriteTimestampStep] Initializing EndWriteTimestamp skipped because already initialized. 2025-09-25T16:19:11.735825Z node 3 :PERSQUEUE DEBUG: partition_init.cpp:80: [rt3.dc1--topic2:2:Initializer] Start initializing step TInitFieldsStep 2025-09-25T16:19:11.735831Z node 3 :PERSQUEUE DEBUG: partition_init.cpp:60: [rt3.dc1--topic2:2:Initializer] Initializing completed. 2025-09-25T16:19:11.735839Z node 3 :PERSQUEUE INFO: partition.cpp:694: [72057594037928139][Partition][2][StateInit] init complete for topic 'rt3.dc1--topic2' partition 2 generation 3 [3:611:2519] 2025-09-25T16:19:11.735852Z node 3 :PERSQUEUE DEBUG: partition.cpp:708: [72057594037928139][Partition][2][StateInit] SYNC INIT topic rt3.dc1--topic2 partitition 2 so 0 endOffset 0 Head Offset 0 PartNo 0 PackedSize 0 count 0 nextOffset 0 batches 0 2025-09-25T16:19:11.735863Z node 3 :PERSQUEUE DEBUG: partition.cpp:4293: [72057594037928139][Partition][2][StateIdle] Process pending events. Count 0 2025-09-25T16:19:11.735868Z node 3 :PERSQUEUE DEBUG: partition.cpp:2261: [72057594037928139][Partition][2][StateIdle] Batching state before ContinueProcessTxsAndUserActs: 0 2025-09-25T16:19:11.735873Z node 3 :PERSQUEUE DEBUG: partition.cpp:2270: [72057594037928139][Partition][2][StateIdle] Batching state after ContinueProcessTxsAndUserActs: 1 2025-09-25T16:19:11.735878Z node 3 :PERSQUEUE DEBUG: partition.cpp:2293: [72057594037928139][Partition][2][StateIdle] Try persist 2025-09-25T16:19:11.735971Z node 3 :PERSQUEUE DEBUG: partition_compaction.cpp:162: [72057594037928139][Partition][2][StateIdle] no data for compaction 2025-09-25T16:19:11.736213Z node 3 :PERSQUEUE DEBUG: pq_impl.cpp:2743: [PQ: 72057594037928138] server disconnected, pipe [3:541:2463] destroyed 2025-09-25T16:19:11.736227Z node 3 :PERSQUEUE DEBUG: pq_impl.cpp:2743: [PQ: 72057594037928037] server disconnected, pipe [3:538:2462] destroyed 2025-09-25T16:19:11.736236Z node 3 :PERSQUEUE DEBUG: pq_impl.cpp:2743: [PQ: 72057594037928137] server disconnected, pipe [3:540:2463] destroyed RESPONSE Status: 1 ErrorCode: OK MetaResponse { CmdGetReadSessionsInfoResult { TopicResult { Topic: "rt3.dc1--topic1" PartitionResult { Partition: 0 ClientOffset: 0 StartOffset: 0 EndOffset: 0 TimeLag: 0 TabletNode: "::1" ClientReadOffset: 0 ReadTimeLag: 0 TabletNodeId: 3 ErrorCode: OK } ErrorCode: OK } TopicResult { Topic: "rt3.dc1--topic2" PartitionResult { Partition: 0 ClientOffset: 0 StartOffset: 0 EndOffset: 0 TimeLag: 0 TabletNode: "::1" ClientReadOffset: 0 ReadTimeLag: 0 TabletNodeId: 3 ErrorCode: OK } PartitionResult { Partition: 1 ClientOffset: 0 StartOffset: 0 EndOffset: 0 TimeLag: 0 TabletNode: "::1" ClientReadOffset: 0 ReadTimeLag: 0 TabletNodeId: 3 ErrorCode: OK } PartitionResult { Partition: 2 ErrorCode: INITIALIZING ErrorReason: "tablet for partition is not running" } ErrorCode: OK } } } ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/client/server/ut/unittest >> TMessageBusServerPersQueueGetPartitionOffsetsMetaRequestTest::HandlesPipeDisconnection_AnswerDoesNotArrive [GOOD] Test command err: Assert failed: Check response: { Status: 128 ErrorReason: "topic \'rt3.dc1--topic1\' is not created, Marker# PQ94" ErrorCode: UNKNOWN_TOPIC } 2025-09-25T16:19:12.832975Z node 2 :PERSQUEUE DEBUG: pq_impl.cpp:2957: [PQ: 72057594037928037] Handle TEvInterconnect::TEvNodeInfo 2025-09-25T16:19:12.834790Z node 2 :PERSQUEUE DEBUG: pq_impl.cpp:2989: [PQ: 72057594037928037] Transactions request. From tx_00000000000000000000, To tx_18446744073709551615 2025-09-25T16:19:12.834954Z node 2 :PERSQUEUE DEBUG: pq_impl.cpp:784: [PQ: 72057594037928037] no config, start with empty partitions and default config 2025-09-25T16:19:12.834976Z node 2 :PERSQUEUE NOTICE: pq_impl.cpp:908: [PQ: 72057594037928037] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-09-25T16:19:12.834994Z node 2 :PERSQUEUE INFO: pq_impl.cpp:609: [PQ: 72057594037928037] doesn't have tx writes info 2025-09-25T16:19:12.835299Z node 2 :PERSQUEUE DEBUG: pq_impl.cpp:2718: [PQ: 72057594037928037] server connected, pipe [2:261:2255], now have 1 active actors on pipe 2025-09-25T16:19:12.835368Z node 2 :PERSQUEUE DEBUG: pq_impl.cpp:1269: [PQ: 72057594037928037] Handle TEvPersQueue::TEvUpdateConfig 2025-09-25T16:19:12.838780Z node 2 :PERSQUEUE DEBUG: pq_impl.cpp:395: [PQ: 72057594037928037] Apply new config CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 TopicName: "rt3.dc1--topic1" Version: 1 Partitions { PartitionId: 0 } AllPartitions { PartitionId: 0 } 2025-09-25T16:19:12.838827Z node 2 :PERSQUEUE NOTICE: pq_impl.cpp:908: [PQ: 72057594037928037] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-09-25T16:19:12.839152Z node 2 :PERSQUEUE INFO: pq_impl.cpp:1296: [PQ: 72057594037928037] Config applied version 1 actor [2:105:2138] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 TopicName: "rt3.dc1--topic1" Version: 1 Partitions { PartitionId: 0 } AllPartitions { PartitionId: 0 } 2025-09-25T16:19:12.839206Z node 2 :PERSQUEUE DEBUG: partition_init.cpp:80: [rt3.dc1--topic1:0:Initializer] Start initializing step TInitConfigStep 2025-09-25T16:19:12.839374Z node 2 :PERSQUEUE DEBUG: partition_init.cpp:80: [rt3.dc1--topic1:0:Initializer] Start initializing step TInitInternalFieldsStep 2025-09-25T16:19:12.839467Z node 2 :PERSQUEUE INFO: partition_init.cpp:1075: [72057594037928037][Partition][0][StateInit] bootstrapping 0 [2:269:2261] 2025-09-25T16:19:12.840047Z node 2 :PERSQUEUE DEBUG: partition_init.cpp:80: [rt3.dc1--topic1:0:Initializer] Start initializing step TInitFieldsStep 2025-09-25T16:19:12.840057Z node 2 :PERSQUEUE DEBUG: partition_init.cpp:60: [rt3.dc1--topic1:0:Initializer] Initializing completed. 2025-09-25T16:19:12.840074Z node 2 :PERSQUEUE INFO: partition.cpp:694: [72057594037928037][Partition][0][StateInit] init complete for topic 'rt3.dc1--topic1' partition 0 generation 2 [2:269:2261] 2025-09-25T16:19:12.840083Z node 2 :PERSQUEUE DEBUG: partition.cpp:708: [72057594037928037][Partition][0][StateInit] SYNC INIT topic rt3.dc1--topic1 partitition 0 so 0 endOffset 0 Head Offset 0 PartNo 0 PackedSize 0 count 0 nextOffset 0 batches 0 2025-09-25T16:19:12.840094Z node 2 :PERSQUEUE DEBUG: partition.cpp:4293: [72057594037928037][Partition][0][StateIdle] Process pending events. Count 0 2025-09-25T16:19:12.840099Z node 2 :PERSQUEUE DEBUG: partition.cpp:2261: [72057594037928037][Partition][0][StateIdle] Batching state before ContinueProcessTxsAndUserActs: 0 2025-09-25T16:19:12.840112Z node 2 :PERSQUEUE DEBUG: partition.cpp:2270: [72057594037928037][Partition][0][StateIdle] Batching state after ContinueProcessTxsAndUserActs: 1 2025-09-25T16:19:12.840116Z node 2 :PERSQUEUE DEBUG: partition.cpp:2293: [72057594037928037][Partition][0][StateIdle] Try persist 2025-09-25T16:19:12.840195Z node 2 :PERSQUEUE DEBUG: partition_compaction.cpp:162: [72057594037928037][Partition][0][StateIdle] no data for compaction 2025-09-25T16:19:12.840333Z node 2 :PERSQUEUE DEBUG: pq_impl.cpp:2718: [PQ: 72057594037928037] server connected, pipe [2:272:2263], now have 1 active actors on pipe 2025-09-25T16:19:12.855306Z node 2 :PERSQUEUE DEBUG: pq_impl.cpp:2957: [PQ: 72057594037928137] Handle TEvInterconnect::TEvNodeInfo 2025-09-25T16:19:12.856200Z node 2 :PERSQUEUE DEBUG: pq_impl.cpp:2989: [PQ: 72057594037928137] Transactions request. From tx_00000000000000000000, To tx_18446744073709551615 2025-09-25T16:19:12.856277Z node 2 :PERSQUEUE DEBUG: pq_impl.cpp:784: [PQ: 72057594037928137] no config, start with empty partitions and default config 2025-09-25T16:19:12.856289Z node 2 :PERSQUEUE NOTICE: pq_impl.cpp:908: [PQ: 72057594037928137] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-09-25T16:19:12.856300Z node 2 :PERSQUEUE INFO: pq_impl.cpp:609: [PQ: 72057594037928137] doesn't have tx writes info 2025-09-25T16:19:12.856462Z node 2 :PERSQUEUE DEBUG: pq_impl.cpp:2718: [PQ: 72057594037928137] server connected, pipe [2:401:2361], now have 1 active actors on pipe 2025-09-25T16:19:12.856491Z node 2 :PERSQUEUE DEBUG: pq_impl.cpp:1269: [PQ: 72057594037928137] Handle TEvPersQueue::TEvUpdateConfig 2025-09-25T16:19:12.857046Z node 2 :PERSQUEUE DEBUG: pq_impl.cpp:395: [PQ: 72057594037928137] Apply new config CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 TopicName: "rt3.dc1--topic2" Version: 2 Partitions { PartitionId: 0 } AllPartitions { PartitionId: 0 } 2025-09-25T16:19:12.857075Z node 2 :PERSQUEUE NOTICE: pq_impl.cpp:908: [PQ: 72057594037928137] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-09-25T16:19:12.857272Z node 2 :PERSQUEUE INFO: pq_impl.cpp:1296: [PQ: 72057594037928137] Config applied version 2 actor [2:105:2138] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 TopicName: "rt3.dc1--topic2" Version: 2 Partitions { PartitionId: 0 } AllPartitions { PartitionId: 0 } 2025-09-25T16:19:12.857297Z node 2 :PERSQUEUE DEBUG: partition_init.cpp:80: [rt3.dc1--topic2:0:Initializer] Start initializing step TInitConfigStep 2025-09-25T16:19:12.857360Z node 2 :PERSQUEUE DEBUG: partition_init.cpp:80: [rt3.dc1--topic2:0:Initializer] Start initializing step TInitInternalFieldsStep 2025-09-25T16:19:12.857393Z node 2 :PERSQUEUE INFO: partition_init.cpp:1075: [72057594037928137][Partition][0][StateInit] bootstrapping 0 [2:409:2367] 2025-09-25T16:19:12.857956Z node 2 :PERSQUEUE DEBUG: partition_init.cpp:80: [rt3.dc1--topic2:0:Initializer] Start initializing step TInitFieldsStep 2025-09-25T16:19:12.857966Z node 2 :PERSQUEUE DEBUG: partition_init.cpp:60: [rt3.dc1--topic2:0:Initializer] Initializing completed. 2025-09-25T16:19:12.857972Z node 2 :PERSQUEUE INFO: partition.cpp:694: [72057594037928137][Partition][0][StateInit] init complete for topic 'rt3.dc1--topic2' partition 0 generation 2 [2:409:2367] 2025-09-25T16:19:12.857980Z node 2 :PERSQUEUE DEBUG: partition.cpp:708: [72057594037928137][Partition][0][StateInit] SYNC INIT topic rt3.dc1--topic2 partitition 0 so 0 endOffset 0 Head Offset 0 PartNo 0 PackedSize 0 count 0 nextOffset 0 batches 0 2025-09-25T16:19:12.857990Z node 2 :PERSQUEUE DEBUG: partition.cpp:4293: [72057594037928137][Partition][0][StateIdle] Process pending events. Count 0 2025-09-25T16:19:12.857995Z node 2 :PERSQUEUE DEBUG: partition.cpp:2261: [72057594037928137][Partition][0][StateIdle] Batching state before ContinueProcessTxsAndUserActs: 0 2025-09-25T16:19:12.857999Z node 2 :PERSQUEUE DEBUG: partition.cpp:2270: [72057594037928137][Partition][0][StateIdle] Batching state after ContinueProcessTxsAndUserActs: 1 2025-09-25T16:19:12.858003Z node 2 :PERSQUEUE DEBUG: partition.cpp:2293: [72057594037928137][Partition][0][StateIdle] Try persist 2025-09-25T16:19:12.858075Z node 2 :PERSQUEUE DEBUG: partition_compaction.cpp:162: [72057594037928137][Partition][0][StateIdle] no data for compaction 2025-09-25T16:19:12.858181Z node 2 :PERSQUEUE DEBUG: pq_impl.cpp:2718: [PQ: 72057594037928137] server connected, pipe [2:412:2369], now have 1 active actors on pipe 2025-09-25T16:19:12.861396Z node 2 :PERSQUEUE DEBUG: pq_impl.cpp:2957: [PQ: 72057594037928138] Handle TEvInterconnect::TEvNodeInfo 2025-09-25T16:19:12.862205Z node 2 :PERSQUEUE DEBUG: pq_impl.cpp:2989: [PQ: 72057594037928138] Transactions request. From tx_00000000000000000000, To tx_18446744073709551615 2025-09-25T16:19:12.862272Z node 2 :PERSQUEUE DEBUG: pq_impl.cpp:784: [PQ: 72057594037928138] no config, start with empty partitions and default config 2025-09-25T16:19:12.862283Z node 2 :PERSQUEUE NOTICE: pq_impl.cpp:908: [PQ: 72057594037928138] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-09-25T16:19:12.862293Z node 2 :PERSQUEUE INFO: pq_impl.cpp:609: [PQ: 72057594037928138] doesn't have tx writes info 2025-09-25T16:19:12.862410Z node 2 :PERSQUEUE DEBUG: pq_impl.cpp:2718: [PQ: 72057594037928138] server connected, pipe [2:461:2406], now have 1 active actors on pipe 2025-09-25T16:19:12.862431Z node 2 :PERSQUEUE DEBUG: pq_impl.cpp:1269: [PQ: 72057594037928138] Handle TEvPersQueue::TEvUpdateConfig 2025-09-25T16:19:12.862879Z node 2 :PERSQUEUE DEBUG: pq_impl.cpp:395: [PQ: 72057594037928138] Apply new config CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 1 TopicName: "rt3.dc1--topic2" Version: 3 Partitions { PartitionId: 1 } AllPartitions { PartitionId: 1 } 2025-09-25T16:19:12.862905Z node 2 :PERSQUEUE NOTICE: pq_impl.cpp:908: [PQ: 72057594037928138] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-09-25T16:19:12.863084Z node 2 :PERSQUEUE INFO: pq_impl.cpp:1296: [PQ: 72057594037928138] Config applied version 3 actor [2:105:2138] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 1 TopicName: "rt3.dc1--topic2" Version: 3 Partitions { PartitionId: 1 } AllPartitions { PartitionId: 1 } 2025-09-25T16:19:12.863108Z node 2 :PERSQUEUE DEBUG: partition_init.cpp:80: [rt3.dc1--topic2:1:Initializer] Start initializing step TInitConfigStep 2025-09-25T16:19:12.863169Z node 2 :PERSQUEUE DEBUG: partition_init.cpp:80: [rt3.dc1--topic2:1:Initializer] Start initializing step TInitInternalFieldsStep 2025-09-25T16:19:12.863203Z node 2 :PERSQUEUE INFO: partition_init.cpp:1075: [72057594037928138][Partition][1][StateInit] bootstrapping 1 [2:469:2412] 2025-09-25T16:19:12.863692Z node 2 :PERSQUEUE DEBUG: partition_init.cpp:80: [rt3.dc1--topic2:1:Initializer] Start initializing step TInitFieldsStep 2025-09-25T16:19:12.863707Z node 2 :PERSQUEUE DEBUG: partition_init.cpp:60: [rt3.dc1--topic2:1:Initializer] Initializing completed. 2025-09-25T16:19:12.863715Z node 2 :PERSQUEUE INFO: partition.cpp:694: [72057594037928138][Partition][1][StateInit] init complete for topic 'rt3.dc1--topic2' partition 1 generation 2 [2:469:2412] 2025-09-25T16:19:12.863724Z node 2 :PERSQUEUE DEBUG: partition.cpp:708: [72057594037928138][Partition][1][StateInit] SYNC INIT topic rt3.dc1--topic2 partitition 1 so 0 endOffset 0 Head Offset 0 PartNo 0 PackedSize 0 count 0 nextOffset 0 batches 0 2025-09-25T16:19:12.86373 ... onfig: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 1 TopicName: "rt3.dc1--topic2" Version: 11 Partitions { PartitionId: 1 } AllPartitions { PartitionId: 1 } 2025-09-25T16:19:13.482493Z node 4 :PERSQUEUE DEBUG: partition_init.cpp:80: [rt3.dc1--topic2:1:Initializer] Start initializing step TInitConfigStep 2025-09-25T16:19:13.482549Z node 4 :PERSQUEUE DEBUG: partition_init.cpp:80: [rt3.dc1--topic2:1:Initializer] Start initializing step TInitInternalFieldsStep 2025-09-25T16:19:13.482582Z node 4 :PERSQUEUE INFO: partition_init.cpp:1075: [72057594037928138][Partition][1][StateInit] bootstrapping 1 [4:469:2413] 2025-09-25T16:19:13.483008Z node 4 :PERSQUEUE DEBUG: partition_init.cpp:80: [rt3.dc1--topic2:1:Initializer] Start initializing step TInitFieldsStep 2025-09-25T16:19:13.483013Z node 4 :PERSQUEUE DEBUG: partition_init.cpp:60: [rt3.dc1--topic2:1:Initializer] Initializing completed. 2025-09-25T16:19:13.483019Z node 4 :PERSQUEUE INFO: partition.cpp:694: [72057594037928138][Partition][1][StateInit] init complete for topic 'rt3.dc1--topic2' partition 1 generation 2 [4:469:2413] 2025-09-25T16:19:13.483029Z node 4 :PERSQUEUE DEBUG: partition.cpp:708: [72057594037928138][Partition][1][StateInit] SYNC INIT topic rt3.dc1--topic2 partitition 1 so 0 endOffset 0 Head Offset 0 PartNo 0 PackedSize 0 count 0 nextOffset 0 batches 0 2025-09-25T16:19:13.483039Z node 4 :PERSQUEUE DEBUG: partition.cpp:4293: [72057594037928138][Partition][1][StateIdle] Process pending events. Count 0 2025-09-25T16:19:13.483044Z node 4 :PERSQUEUE DEBUG: partition.cpp:2261: [72057594037928138][Partition][1][StateIdle] Batching state before ContinueProcessTxsAndUserActs: 0 2025-09-25T16:19:13.483065Z node 4 :PERSQUEUE DEBUG: partition.cpp:2270: [72057594037928138][Partition][1][StateIdle] Batching state after ContinueProcessTxsAndUserActs: 1 2025-09-25T16:19:13.483070Z node 4 :PERSQUEUE DEBUG: partition.cpp:2293: [72057594037928138][Partition][1][StateIdle] Try persist 2025-09-25T16:19:13.483147Z node 4 :PERSQUEUE DEBUG: partition_compaction.cpp:162: [72057594037928138][Partition][1][StateIdle] no data for compaction 2025-09-25T16:19:13.483298Z node 4 :PERSQUEUE DEBUG: pq_impl.cpp:2718: [PQ: 72057594037928138] server connected, pipe [4:472:2415], now have 1 active actors on pipe 2025-09-25T16:19:13.487875Z node 4 :PERSQUEUE DEBUG: pq_impl.cpp:2957: [PQ: 72057594037928139] Handle TEvInterconnect::TEvNodeInfo 2025-09-25T16:19:13.489044Z node 4 :PERSQUEUE DEBUG: pq_impl.cpp:2989: [PQ: 72057594037928139] Transactions request. From tx_00000000000000000000, To tx_18446744073709551615 2025-09-25T16:19:13.489151Z node 4 :PERSQUEUE DEBUG: pq_impl.cpp:784: [PQ: 72057594037928139] no config, start with empty partitions and default config 2025-09-25T16:19:13.489168Z node 4 :PERSQUEUE NOTICE: pq_impl.cpp:908: [PQ: 72057594037928139] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-09-25T16:19:13.489181Z node 4 :PERSQUEUE INFO: pq_impl.cpp:609: [PQ: 72057594037928139] doesn't have tx writes info 2025-09-25T16:19:13.489402Z node 4 :PERSQUEUE DEBUG: pq_impl.cpp:2718: [PQ: 72057594037928139] server connected, pipe [4:521:2452], now have 1 active actors on pipe 2025-09-25T16:19:13.489432Z node 4 :PERSQUEUE DEBUG: pq_impl.cpp:1269: [PQ: 72057594037928139] Handle TEvPersQueue::TEvUpdateConfig 2025-09-25T16:19:13.490054Z node 4 :PERSQUEUE DEBUG: pq_impl.cpp:395: [PQ: 72057594037928139] Apply new config CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 2 TopicName: "rt3.dc1--topic2" Version: 12 Partitions { PartitionId: 2 } AllPartitions { PartitionId: 2 } 2025-09-25T16:19:13.490089Z node 4 :PERSQUEUE NOTICE: pq_impl.cpp:908: [PQ: 72057594037928139] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-09-25T16:19:13.490256Z node 4 :PERSQUEUE INFO: pq_impl.cpp:1296: [PQ: 72057594037928139] Config applied version 12 actor [4:104:2138] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 2 TopicName: "rt3.dc1--topic2" Version: 12 Partitions { PartitionId: 2 } AllPartitions { PartitionId: 2 } 2025-09-25T16:19:13.490290Z node 4 :PERSQUEUE DEBUG: partition_init.cpp:80: [rt3.dc1--topic2:2:Initializer] Start initializing step TInitConfigStep 2025-09-25T16:19:13.490377Z node 4 :PERSQUEUE DEBUG: partition_init.cpp:80: [rt3.dc1--topic2:2:Initializer] Start initializing step TInitInternalFieldsStep 2025-09-25T16:19:13.490425Z node 4 :PERSQUEUE INFO: partition_init.cpp:1075: [72057594037928139][Partition][2][StateInit] bootstrapping 2 [4:529:2458] 2025-09-25T16:19:13.491041Z node 4 :PERSQUEUE DEBUG: partition_init.cpp:80: [rt3.dc1--topic2:2:Initializer] Start initializing step TInitFieldsStep 2025-09-25T16:19:13.491071Z node 4 :PERSQUEUE DEBUG: partition_init.cpp:60: [rt3.dc1--topic2:2:Initializer] Initializing completed. 2025-09-25T16:19:13.491081Z node 4 :PERSQUEUE INFO: partition.cpp:694: [72057594037928139][Partition][2][StateInit] init complete for topic 'rt3.dc1--topic2' partition 2 generation 2 [4:529:2458] 2025-09-25T16:19:13.491092Z node 4 :PERSQUEUE DEBUG: partition.cpp:708: [72057594037928139][Partition][2][StateInit] SYNC INIT topic rt3.dc1--topic2 partitition 2 so 0 endOffset 0 Head Offset 0 PartNo 0 PackedSize 0 count 0 nextOffset 0 batches 0 2025-09-25T16:19:13.491105Z node 4 :PERSQUEUE DEBUG: partition.cpp:4293: [72057594037928139][Partition][2][StateIdle] Process pending events. Count 0 2025-09-25T16:19:13.491111Z node 4 :PERSQUEUE DEBUG: partition.cpp:2261: [72057594037928139][Partition][2][StateIdle] Batching state before ContinueProcessTxsAndUserActs: 0 2025-09-25T16:19:13.491117Z node 4 :PERSQUEUE DEBUG: partition.cpp:2270: [72057594037928139][Partition][2][StateIdle] Batching state after ContinueProcessTxsAndUserActs: 1 2025-09-25T16:19:13.491125Z node 4 :PERSQUEUE DEBUG: partition.cpp:2293: [72057594037928139][Partition][2][StateIdle] Try persist 2025-09-25T16:19:13.491228Z node 4 :PERSQUEUE DEBUG: partition_compaction.cpp:162: [72057594037928139][Partition][2][StateIdle] no data for compaction 2025-09-25T16:19:13.491362Z node 4 :PERSQUEUE DEBUG: pq_impl.cpp:2718: [PQ: 72057594037928139] server connected, pipe [4:532:2460], now have 1 active actors on pipe 2025-09-25T16:19:13.491646Z node 4 :PERSQUEUE DEBUG: pq_impl.cpp:2718: [PQ: 72057594037928037] server connected, pipe [4:538:2463], now have 1 active actors on pipe 2025-09-25T16:19:13.491711Z node 4 :PERSQUEUE DEBUG: pq_impl.cpp:2718: [PQ: 72057594037928138] server connected, pipe [4:539:2464], now have 1 active actors on pipe 2025-09-25T16:19:13.491752Z node 4 :PERSQUEUE DEBUG: pq_impl.cpp:2718: [PQ: 72057594037928139] server connected, pipe [4:540:2464], now have 1 active actors on pipe 2025-09-25T16:19:13.505233Z node 4 :PERSQUEUE DEBUG: pq_impl.cpp:2718: [PQ: 72057594037928139] server connected, pipe [4:548:2471], now have 1 active actors on pipe 2025-09-25T16:19:13.516478Z node 4 :PERSQUEUE DEBUG: pq_impl.cpp:2957: [PQ: 72057594037928139] Handle TEvInterconnect::TEvNodeInfo 2025-09-25T16:19:13.517440Z node 4 :PERSQUEUE DEBUG: pq_impl.cpp:2989: [PQ: 72057594037928139] Transactions request. From tx_00000000000000000000, To tx_18446744073709551615 2025-09-25T16:19:13.517680Z node 4 :PERSQUEUE NOTICE: pq_impl.cpp:908: [PQ: 72057594037928139] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-09-25T16:19:13.517691Z node 4 :PERSQUEUE INFO: pq_impl.cpp:609: [PQ: 72057594037928139] doesn't have tx writes info 2025-09-25T16:19:13.517715Z node 4 :PERSQUEUE DEBUG: partition_init.cpp:80: [rt3.dc1--topic2:2:Initializer] Start initializing step TInitConfigStep 2025-09-25T16:19:13.517771Z node 4 :PERSQUEUE DEBUG: partition_init.cpp:80: [rt3.dc1--topic2:2:Initializer] Start initializing step TInitInternalFieldsStep 2025-09-25T16:19:13.517811Z node 4 :PERSQUEUE INFO: partition_init.cpp:1075: [72057594037928139][Partition][2][StateInit] bootstrapping 2 [4:604:2516] 2025-09-25T16:19:13.518421Z node 4 :PERSQUEUE DEBUG: partition_init.cpp:80: [rt3.dc1--topic2:2:Initializer] Start initializing step TInitDiskStatusStep 2025-09-25T16:19:13.518694Z node 4 :PERSQUEUE DEBUG: partition_init.cpp:80: [rt3.dc1--topic2:2:Initializer] Start initializing step TInitMetaStep 2025-09-25T16:19:13.518733Z node 4 :PERSQUEUE DEBUG: partition_init.cpp:80: [rt3.dc1--topic2:2:Initializer] Start initializing step TInitInfoRangeStep 2025-09-25T16:19:13.518784Z node 4 :PERSQUEUE DEBUG: partition_init.cpp:80: [rt3.dc1--topic2:2:Initializer] Start initializing step TInitDataRangeStep 2025-09-25T16:19:13.518812Z node 4 :PERSQUEUE DEBUG: partition_init.cpp:80: [rt3.dc1--topic2:2:Initializer] Start initializing step TInitDataStep 2025-09-25T16:19:13.518817Z node 4 :PERSQUEUE DEBUG: partition_init.cpp:80: [rt3.dc1--topic2:2:Initializer] Start initializing step TInitEndWriteTimestampStep 2025-09-25T16:19:13.518823Z node 4 :PERSQUEUE INFO: partition_init.cpp:948: [rt3.dc1--topic2:2:TInitEndWriteTimestampStep] Initializing EndWriteTimestamp skipped because already initialized. 2025-09-25T16:19:13.518827Z node 4 :PERSQUEUE DEBUG: partition_init.cpp:80: [rt3.dc1--topic2:2:Initializer] Start initializing step TInitFieldsStep 2025-09-25T16:19:13.518833Z node 4 :PERSQUEUE DEBUG: partition_init.cpp:60: [rt3.dc1--topic2:2:Initializer] Initializing completed. 2025-09-25T16:19:13.518841Z node 4 :PERSQUEUE INFO: partition.cpp:694: [72057594037928139][Partition][2][StateInit] init complete for topic 'rt3.dc1--topic2' partition 2 generation 3 [4:604:2516] 2025-09-25T16:19:13.518850Z node 4 :PERSQUEUE DEBUG: partition.cpp:708: [72057594037928139][Partition][2][StateInit] SYNC INIT topic rt3.dc1--topic2 partitition 2 so 0 endOffset 0 Head Offset 0 PartNo 0 PackedSize 0 count 0 nextOffset 0 batches 0 2025-09-25T16:19:13.518864Z node 4 :PERSQUEUE DEBUG: partition.cpp:4293: [72057594037928139][Partition][2][StateIdle] Process pending events. Count 0 2025-09-25T16:19:13.518870Z node 4 :PERSQUEUE DEBUG: partition.cpp:2261: [72057594037928139][Partition][2][StateIdle] Batching state before ContinueProcessTxsAndUserActs: 0 2025-09-25T16:19:13.518875Z node 4 :PERSQUEUE DEBUG: partition.cpp:2270: [72057594037928139][Partition][2][StateIdle] Batching state after ContinueProcessTxsAndUserActs: 1 2025-09-25T16:19:13.518880Z node 4 :PERSQUEUE DEBUG: partition.cpp:2293: [72057594037928139][Partition][2][StateIdle] Try persist 2025-09-25T16:19:13.518953Z node 4 :PERSQUEUE DEBUG: partition_compaction.cpp:162: [72057594037928139][Partition][2][StateIdle] no data for compaction 2025-09-25T16:19:13.519191Z node 4 :PERSQUEUE DEBUG: pq_impl.cpp:2743: [PQ: 72057594037928138] server disconnected, pipe [4:539:2464] destroyed 2025-09-25T16:19:13.519207Z node 4 :PERSQUEUE DEBUG: pq_impl.cpp:2743: [PQ: 72057594037928037] server disconnected, pipe [4:538:2463] destroyed RESPONSE Status: 1 ErrorCode: OK MetaResponse { CmdGetPartitionOffsetsResult { TopicResult { Topic: "rt3.dc1--topic2" PartitionResult { Partition: 1 StartOffset: 0 EndOffset: 0 ErrorCode: OK WriteTimestampEstimateMS: 0 } PartitionResult { Partition: 2 ErrorCode: INITIALIZING ErrorReason: "partition is not ready yet" } ErrorCode: OK } TopicResult { Topic: "rt3.dc1--topic1" PartitionResult { Partition: 0 StartOffset: 0 EndOffset: 0 ErrorCode: OK WriteTimestampEstimateMS: 0 } ErrorCode: OK } } } ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/scheme/unittest >> KqpScheme::CreateExternalDataSourceValidationLocation [GOOD] Test command err: Trying to start YDB, gRPC: 12119, MsgBus: 23953 2025-09-25T16:18:47.621739Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7554062043020158938:2160];send_to=[0:7307199536658146131:7762515]; 2025-09-25T16:18:47.621765Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/endf/001f24/r3tmp/tmptB2fK7/pdisk_1.dat 2025-09-25T16:18:47.660920Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-09-25T16:18:47.668601Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 12119, node 1 2025-09-25T16:18:47.685171Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-09-25T16:18:47.685180Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-09-25T16:18:47.685182Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-09-25T16:18:47.685217Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:23953 2025-09-25T16:18:47.724036Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-09-25T16:18:47.724071Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-09-25T16:18:47.725101Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:23953 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-09-25T16:18:47.742560Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-09-25T16:18:47.752512Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) waiting... 2025-09-25T16:18:47.776203Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) waiting... 2025-09-25T16:18:47.797486Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) waiting... 2025-09-25T16:18:47.811017Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) waiting... 2025-09-25T16:18:47.928945Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-09-25T16:18:47.994286Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7554062043020160450:2391], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:47.994308Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:47.994400Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7554062043020160460:2392], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:47.994412Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:48.063189Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:18:48.080194Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:18:48.090652Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:18:48.104435Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:18:48.119218Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:18:48.133722Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:18:48.146817Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:18:48.162687Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:18:48.184002Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7554062047315128618:2474], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:48.184034Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:48.184161Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7554062047315128623:2477], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:48.184188Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7554062047315128624:2478], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:48.184310Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:48.185310Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715670:3, at schemeshard: 72057594046 ... ard_import.cpp:307: Table profiles were not loaded 2025-09-25T16:19:25.354905Z node 8 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(8, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 23824, node 8 2025-09-25T16:19:25.360697Z node 8 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-09-25T16:19:25.360706Z node 8 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-09-25T16:19:25.360707Z node 8 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-09-25T16:19:25.360734Z node 8 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:8843 TClient is connected to server localhost:8843 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-09-25T16:19:25.407297Z node 8 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-09-25T16:19:25.417117Z node 8 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) waiting... 2025-09-25T16:19:25.426548Z node 8 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) waiting... 2025-09-25T16:19:25.445472Z node 8 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) waiting... 2025-09-25T16:19:25.455594Z node 8 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) waiting... 2025-09-25T16:19:25.577294Z node 8 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-09-25T16:19:25.633144Z node 8 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [8:7554062205997006365:2391], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:19:25.633170Z node 8 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:19:25.633236Z node 8 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [8:7554062205997006375:2392], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:19:25.633244Z node 8 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:19:25.642129Z node 8 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:19:25.648962Z node 8 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:19:25.658930Z node 8 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:19:25.665897Z node 8 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:19:25.672656Z node 8 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:19:25.679995Z node 8 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:19:25.694211Z node 8 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:19:25.708638Z node 8 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:19:25.724898Z node 8 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [8:7554062205997007238:2474], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:19:25.724927Z node 8 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:19:25.724935Z node 8 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [8:7554062205997007243:2477], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:19:25.724959Z node 8 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [8:7554062205997007245:2478], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:19:25.724964Z node 8 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:19:25.725537Z node 8 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-09-25T16:19:25.728343Z node 8 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [8:7554062205997007247:2479], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2025-09-25T16:19:25.825593Z node 8 :TX_PROXY ERROR: schemereq.cpp:590: Actor# [8:7554062205997007299:3553] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 17], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-09-25T16:19:25.993670Z node 8 :TX_PROXY ERROR: schemereq.cpp:590: Actor# [8:7554062205997007602:3752] txid# 281474976710673, issues: { message: "(NKikimr::NExternalSource::TExternalSourceException) It is not allowed to access hostname \'my-bucket\'" severity: 1 } ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/http_proxy/ut/inside_ydb_ut/unittest >> TestYmqHttpProxy::TestTagQueueMultipleQueriesInflight [GOOD] Test command err: 2025-09-25T16:18:50.980187Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7554062053759388409:2138];send_to=[0:7307199536658146131:7762515]; 2025-09-25T16:18:50.980228Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/endf/005947/r3tmp/tmpdwGzAY/pdisk_1.dat 2025-09-25T16:18:51.089610Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-09-25T16:18:51.090091Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-09-25T16:18:51.090119Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-09-25T16:18:51.090970Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-09-25T16:18:51.110248Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1229: Notification cookie mismatch for subscription [1:7554062053759388309:2081] 1758817130979174 != 1758817130979177 2025-09-25T16:18:51.111560Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 4207, node 1 2025-09-25T16:18:51.121074Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-09-25T16:18:51.121090Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-09-25T16:18:51.121092Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-09-25T16:18:51.121137Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:21350 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-09-25T16:18:51.150051Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-09-25T16:18:51.157192Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 TClient is connected to server localhost:21350 2025-09-25T16:18:51.201926Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterUserAttributes, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_user_attrs.cpp:72) waiting... 2025-09-25T16:18:51.203141Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 281474976715658, at schemeshard: 72057594046644480 2025-09-25T16:18:51.204523Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_modify_acl.cpp:101) waiting... waiting... 2025-09-25T16:18:51.236143Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:18:51.263809Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) waiting... waiting... 2025-09-25T16:18:51.273993Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 281474976715663, at schemeshard: 72057594046644480 2025-09-25T16:18:51.286340Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) waiting... waiting... 2025-09-25T16:18:51.326374Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) waiting... 2025-09-25T16:18:51.350195Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) waiting... 2025-09-25T16:18:51.363668Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) waiting... 2025-09-25T16:18:51.368068Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-09-25T16:18:51.375603Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) waiting... waiting... 2025-09-25T16:18:51.389234Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715670:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:18:51.403396Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) waiting... 2025-09-25T16:18:51.415972Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715672:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) waiting... 2025-09-25T16:18:51.527696Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7554062058054357005:2360], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:51.527727Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:51.527906Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7554062058054357017:2363], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:51.527915Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7554062058054357018:2364], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:51.527976Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:51.528849Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715673:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-09-25T16:18:51.531478Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 281474976715673, at schemeshard: 72057594046644480 2025-09-25T16:18:51.531683Z node 1 ... der_id='folder4' 2025-09-25T16:19:19.050623Z node 7 :SQS DEBUG: proxy_actor.cpp:78: Request [ba95f182-7f0ee117-b2e55d4a-1f9af6ba] Request proxy started 2025-09-25T16:19:19.050645Z node 7 :SQS DEBUG: service.cpp:761: Request [ba95f182-7f0ee117-b2e55d4a-1f9af6ba] Answer configuration for queue [cloud4/000000000000000301v0] without leader 2025-09-25T16:19:19.050662Z node 7 :SQS DEBUG: proxy_actor.cpp:97: Request [ba95f182-7f0ee117-b2e55d4a-1f9af6ba] Get configuration duration: 0ms 2025-09-25T16:19:19.050680Z node 7 :SQS DEBUG: proxy_service.cpp:246: Request [ba95f182-7f0ee117-b2e55d4a-1f9af6ba] Send get leader node request to sqs service for cloud4/000000000000000301v0 2025-09-25T16:19:19.050687Z node 7 :SQS DEBUG: service.cpp:581: Request [ba95f182-7f0ee117-b2e55d4a-1f9af6ba] Leader node for queue [cloud4/000000000000000301v0] is 7 2025-09-25T16:19:19.050692Z node 7 :SQS DEBUG: proxy_service.cpp:170: Request [ba95f182-7f0ee117-b2e55d4a-1f9af6ba] Got leader node for queue response. Node id: 7. Status: 0 2025-09-25T16:19:19.050718Z node 7 :SQS TRACE: proxy_service.cpp:303: Request [ba95f182-7f0ee117-b2e55d4a-1f9af6ba] Sending request from proxy to leader node 7: ListQueueTags { Auth { UserName: "cloud4" FolderId: "folder4" UserSID: "fake_user_sid@as" } QueueName: "000000000000000301v0" } RequestId: "ba95f182-7f0ee117-b2e55d4a-1f9af6ba" 2025-09-25T16:19:19.050731Z node 7 :SQS DEBUG: proxy_service.cpp:70: Request [ba95f182-7f0ee117-b2e55d4a-1f9af6ba] Received Sqs Request: ListQueueTags { Auth { UserName: "cloud4" FolderId: "folder4" UserSID: "fake_user_sid@as" } QueueName: "000000000000000301v0" } RequestId: "ba95f182-7f0ee117-b2e55d4a-1f9af6ba" 2025-09-25T16:19:19.050747Z node 7 :SQS DEBUG: action.h:133: Request [ba95f182-7f0ee117-b2e55d4a-1f9af6ba] Request started. Actor: [7:7554062179663723847:5482] 2025-09-25T16:19:19.050760Z node 7 :SQS TRACE: service.cpp:1472: Inc local leader ref for actor [7:7554062179663723847:5482] 2025-09-25T16:19:19.050769Z node 7 :SQS DEBUG: service.cpp:754: Request [ba95f182-7f0ee117-b2e55d4a-1f9af6ba] Forward configuration request to queue [cloud4/000000000000000301v0] leader 2025-09-25T16:19:19.058204Z node 7 :SQS TRACE: executor.cpp:286: Request [45f7d97c-e3d795fb-fe374745-2d10ae0a] Query(idx=INTERNAL_GET_QUEUE_ATTRIBUTES_ID) Queue [cloud4/000000000000000301v0] HandleResponse { Status: 48 TxId: 281474976715934 Step: 1758817159106 StatusCode: SUCCESS ExecutionEngineStatus: 1 ExecutionEngineResponseStatus: 2 ExecutionEngineEvaluatedResponse { Type { Kind: Struct Struct { Member { Name: "attrs" Type { Kind: Optional Optional { Item { Kind: Optional Optional { Item { Kind: Struct Struct { Member { Name: "ContentBasedDeduplication" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 6 } } } } } Member { Name: "DelaySeconds" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } Member { Name: "DlqArn" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "DlqName" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "FifoQueue" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 6 } } } } } Member { Name: "MaxReceiveCount" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } Member { Name: "MaximumMessageSize" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } Member { Name: "MessageRetentionPeriod" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } Member { Name: "ReceiveMessageWaitTime" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } Member { Name: "ShowDetailedCountersDeadline" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } Member { Name: "VisibilityTimeout" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } } } } } } } } Member { Name: "queueExists" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 6 } } } } } Member { Name: "tags" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } } } Value { Struct { Optional { Optional { Struct { Optional { Bool: false } } Struct { Optional { Uint64: 0 } } Struct { Optional { Text: "" } } Struct { Optional { Text: "" } } Struct { Optional { Bool: true } } Struct { Optional { Uint64: 0 } } Struct { Optional { Uint64: 262144 } } Struct { Optional { Uint64: 345600000 } } Struct { Optional { Uint64: 0 } } Struct { } Struct { Optional { Uint64: 30000 } } } } } Struct { Optional { Bool: true } } Struct { Optional { Text: "{\"k14\":\"v\"}" } } } } } 2025-09-25T16:19:19.058220Z node 7 :SQS DEBUG: executor.cpp:287: Request [45f7d97c-e3d795fb-fe374745-2d10ae0a] Query(idx=INTERNAL_GET_QUEUE_ATTRIBUTES_ID) Queue [cloud4/000000000000000301v0] Attempt 1 execution duration: 8ms 2025-09-25T16:19:19.058316Z node 7 :SQS TRACE: executor.cpp:325: Request [45f7d97c-e3d795fb-fe374745-2d10ae0a] Query(idx=INTERNAL_GET_QUEUE_ATTRIBUTES_ID) Queue [cloud4/000000000000000301v0] Sending mkql execution result: { Status: 48 TxId: 281474976715934 Step: 1758817159106 StatusCode: SUCCESS ExecutionEngineStatus: 1 ExecutionEngineResponseStatus: 2 ExecutionEngineEvaluatedResponse { Type { Kind: Struct Struct { Member { Name: "attrs" Type { Kind: Optional Optional { Item { Kind: Optional Optional { Item { Kind: Struct Struct { Member { Name: "ContentBasedDeduplication" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 6 } } } } } Member { Name: "DelaySeconds" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } Member { Name: "DlqArn" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "DlqName" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "FifoQueue" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 6 } } } } } Member { Name: "MaxReceiveCount" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } Member { Name: "MaximumMessageSize" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } Member { Name: "MessageRetentionPeriod" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } Member { Name: "ReceiveMessageWaitTime" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } Member { Name: "ShowDetailedCountersDeadline" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } Member { Name: "VisibilityTimeout" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } } } } } } } } Member { Name: "queueExists" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 6 } } } } } Member { Name: "tags" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } } } Value { Struct { Optional { Optional { Struct { Optional { Bool: false } } Struct { Optional { Uint64: 0 } } Struct { Optional { Text: "" } } Struct { Optional { Text: "" } } Struct { Optional { Bool: true } } Struct { Optional { Uint64: 0 } } Struct { Optional { Uint64: 262144 } } Struct { Optional { Uint64: 345600000 } } Struct { Optional { Uint64: 0 } } Struct { } Struct { Optional { Uint64: 30000 } } } } } Struct { Optional { Bool: true } } Struct { Optional { Text: "{\"k14\":\"v\"}" } } } } } 2025-09-25T16:19:19.058339Z node 7 :SQS TRACE: executor.cpp:327: Request [45f7d97c-e3d795fb-fe374745-2d10ae0a] Query(idx=INTERNAL_GET_QUEUE_ATTRIBUTES_ID) Queue [cloud4/000000000000000301v0] Minikql data response: {"attrs": {"ContentBasedDeduplication": false, "DelaySeconds": 0, "DlqArn": "", "DlqName": "", "FifoQueue": true, "MaxReceiveCount": 0, "MaximumMessageSize": 262144, "MessageRetentionPeriod": 345600000, "ReceiveMessageWaitTime": 0, "ShowDetailedCountersDeadline": null, "VisibilityTimeout": 30000}, "queueExists": true, "tags": "{\"k14\":\"v\"}"} 2025-09-25T16:19:19.058371Z node 7 :SQS DEBUG: executor.cpp:401: Request [45f7d97c-e3d795fb-fe374745-2d10ae0a] Query(idx=INTERNAL_GET_QUEUE_ATTRIBUTES_ID) Queue [cloud4/000000000000000301v0] execution duration: 9ms 2025-09-25T16:19:19.058385Z node 7 :SQS DEBUG: queue_leader.cpp:556: Request [45f7d97c-e3d795fb-fe374745-2d10ae0a] Sending executed reply 2025-09-25T16:19:19.058457Z node 7 :SQS DEBUG: action.h:627: Request [ba95f182-7f0ee117-b2e55d4a-1f9af6ba] Get configuration duration: 7ms 2025-09-25T16:19:19.058466Z node 7 :SQS TRACE: action.h:647: Request [ba95f182-7f0ee117-b2e55d4a-1f9af6ba] Got configuration. Root url: http://ghrun-v6cxduzo2m.auto.internal:8771, Shards: 1, Fail: 0 2025-09-25T16:19:19.058472Z node 7 :SQS TRACE: action.h:427: Request [ba95f182-7f0ee117-b2e55d4a-1f9af6ba] DoRoutine 2025-09-25T16:19:19.058497Z node 7 :SQS TRACE: action.h:264: Request [ba95f182-7f0ee117-b2e55d4a-1f9af6ba] SendReplyAndDie from action actor { ListQueueTags { RequestId: "ba95f182-7f0ee117-b2e55d4a-1f9af6ba" Tags { Key: "k14" Value: "v" } } } 2025-09-25T16:19:19.058529Z node 7 :SQS TRACE: proxy_service.h:35: Request [ba95f182-7f0ee117-b2e55d4a-1f9af6ba] Sending sqs response: { ListQueueTags { RequestId: "ba95f182-7f0ee117-b2e55d4a-1f9af6ba" Tags { Key: "k14" Value: "v" } } RequestId: "ba95f182-7f0ee117-b2e55d4a-1f9af6ba" FolderId: "folder4" ResourceId: "000000000000000301v0" IsFifo: true QueueTags { Key: "k14" Value: "v" } } 2025-09-25T16:19:19.058556Z node 7 :SQS TRACE: proxy_service.cpp:194: HandleSqsResponse ListQueueTags { RequestId: "ba95f182-7f0ee117-b2e55d4a-1f9af6ba" Tags { Key: "k14" Value: "v" } } RequestId: "ba95f182-7f0ee117-b2e55d4a-1f9af6ba" FolderId: "folder4" ResourceId: "000000000000000301v0" IsFifo: true QueueTags { Key: "k14" Value: "v" } 2025-09-25T16:19:19.058559Z node 7 :SQS TRACE: service.cpp:1483: Dec local leader ref for actor [7:7554062179663723847:5482]. Found: 1 2025-09-25T16:19:19.058575Z node 7 :SQS TRACE: proxy_service.cpp:208: Sending answer to proxy actor [7:7554062179663723846:2763]: ListQueueTags { RequestId: "ba95f182-7f0ee117-b2e55d4a-1f9af6ba" Tags { Key: "k14" Value: "v" } } RequestId: "ba95f182-7f0ee117-b2e55d4a-1f9af6ba" FolderId: "folder4" ResourceId: "000000000000000301v0" IsFifo: true QueueTags { Key: "k14" Value: "v" } 2025-09-25T16:19:19.058641Z node 7 :SQS TRACE: proxy_actor.cpp:178: Request [ba95f182-7f0ee117-b2e55d4a-1f9af6ba] HandleResponse: { ListQueueTags { RequestId: "ba95f182-7f0ee117-b2e55d4a-1f9af6ba" Tags { Key: "k14" Value: "v" } } RequestId: "ba95f182-7f0ee117-b2e55d4a-1f9af6ba" FolderId: "folder4" ResourceId: "000000000000000301v0" IsFifo: true QueueTags { Key: "k14" Value: "v" } }, status: OK 2025-09-25T16:19:19.058666Z node 7 :SQS DEBUG: proxy_actor.cpp:147: Request [ba95f182-7f0ee117-b2e55d4a-1f9af6ba] Sending reply from proxy actor: { ListQueueTags { RequestId: "ba95f182-7f0ee117-b2e55d4a-1f9af6ba" Tags { Key: "k14" Value: "v" } } RequestId: "ba95f182-7f0ee117-b2e55d4a-1f9af6ba" FolderId: "folder4" ResourceId: "000000000000000301v0" IsFifo: true QueueTags { Key: "k14" Value: "v" } } 2025-09-25T16:19:19.058743Z node 7 :HTTP_PROXY DEBUG: http_req.cpp:378: http request [ListQueueTags] requestId [ba95f182-7f0ee117-b2e55d4a-1f9af6ba] Got succesfult GRPC response. 2025-09-25T16:19:19.058789Z node 7 :HTTP_PROXY INFO: http_req.cpp:1205: http request [ListQueueTags] requestId [ba95f182-7f0ee117-b2e55d4a-1f9af6ba] reply ok 2025-09-25T16:19:19.058829Z node 7 :HTTP_PROXY DEBUG: http_req.cpp:1265: http request [ListQueueTags] requestId [ba95f182-7f0ee117-b2e55d4a-1f9af6ba] Send metering event. HttpStatusCode: 200 IsFifo: 1 FolderId: folder4 RequestSizeInBytes: 530 ResponseSizeInBytes: 173 SourceAddress: 9844:227a:2813:0:8044:227a:2813:0 ResourceId: 000000000000000301v0 Action: ListQueueTags 2025-09-25T16:19:19.058871Z node 7 :HTTP DEBUG: http_proxy_incoming.cpp:280: (#37,[::1]:56752) <- (200 , 20 bytes) 2025-09-25T16:19:19.058923Z node 7 :HTTP DEBUG: http_proxy_incoming.cpp:340: (#37,[::1]:56752) connection closed Http output full {"Tags":{"k14":"v"}} ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/persqueue/dread_cache_service/ut/unittest >> TPQCachingProxyTest::TestPublishAndForget [GOOD] Test command err: 2025-09-25T16:19:30.265446Z node 1 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 1 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2025-09-25T16:19:30.277645Z node 1 :PQ_TX INFO: pq_impl.cpp:560: [PQ: 72057594037927937] doesn't have tx info 2025-09-25T16:19:30.277681Z node 1 :PQ_TX INFO: pq_impl.cpp:572: [PQ: 72057594037927937] PlanStep 0, PlanTxId 0, ExecStep 0, ExecTxId 0 2025-09-25T16:19:30.277694Z node 1 :PERSQUEUE NOTICE: pq_impl.cpp:908: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-09-25T16:19:30.277702Z node 1 :PERSQUEUE INFO: pq_impl.cpp:609: [PQ: 72057594037927937] doesn't have tx writes info 2025-09-25T16:19:30.280170Z node 1 :PQ_READ_PROXY DEBUG: caching_service.cpp:44: Direct read cache: : Created 2025-09-25T16:19:30.280198Z node 1 :PQ_READ_PROXY DEBUG: caching_service.cpp:283: Direct read cache: registered server session: session1:1 with generation 1 2025-09-25T16:19:30.280213Z node 1 :PQ_READ_PROXY DEBUG: caching_service.cpp:171: Direct read cache: staged direct read id 1 for session: session1 2025-09-25T16:19:30.280218Z node 1 :PQ_READ_PROXY DEBUG: caching_service.cpp:179: Direct read cache: publish read: 1 for session session1, Generation: 1 2025-09-25T16:19:30.280229Z node 1 :PQ_READ_PROXY DEBUG: caching_service.cpp:218: Direct read cache: forget read: 1 for session session1 ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/scheme/unittest >> KqpScheme::AlterTransfer_QueryService [GOOD] Test command err: Trying to start YDB, gRPC: 28274, MsgBus: 18495 2025-09-25T16:18:52.600758Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7554062063050111157:2077];send_to=[0:7307199536658146131:7762515]; 2025-09-25T16:18:52.600992Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/endf/001d39/r3tmp/tmpFwHIPk/pdisk_1.dat 2025-09-25T16:18:52.666273Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-09-25T16:18:52.666336Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 28274, node 1 2025-09-25T16:18:52.687366Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-09-25T16:18:52.687389Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-09-25T16:18:52.687392Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-09-25T16:18:52.687440Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-09-25T16:18:52.707621Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-09-25T16:18:52.707668Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-09-25T16:18:52.708765Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:18495 TClient is connected to server localhost:18495 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-09-25T16:18:52.771562Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-09-25T16:18:52.775085Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-09-25T16:18:52.789529Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) waiting... 2025-09-25T16:18:52.826174Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) waiting... 2025-09-25T16:18:52.868671Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-09-25T16:18:52.873862Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) waiting... 2025-09-25T16:18:52.896599Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) waiting... 2025-09-25T16:18:53.117627Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7554062067345080048:2391], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:53.117657Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:53.117841Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7554062067345080058:2392], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:53.117849Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:53.181554Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:18:53.202615Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:18:53.215044Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:18:53.245011Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:18:53.262654Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:18:53.270720Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:18:53.286221Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:18:53.321571Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:18:53.346123Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7554062067345080921:2474], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:53.346152Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:53.346266Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7554062067345080926:2477], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:53.346277Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7554062067345080927:2478], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:53.346332Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:53.347213Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemes ... meshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterTransfer, opId: 281474976715694:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_replication.cpp:528) 2025-09-25T16:19:24.856536Z node 8 :REPLICATION_CONTROLLER TRACE: controller.cpp:148: [controller 72075186224037930] Handle NKikimrReplication.TEvAlterReplication PathId { OwnerId: 72057594046644480 LocalId: 20 } OperationId { TxId: 281474976715694 PartId: 0 } Config { SrcConnectionParams { Endpoint: "localhost:2135" Database: "/Root" StaticCredentials { User: "user" Password: "***" } } ConsistencySettings { Row { } } TransferSpecific { Target { SrcPath: "/Root/topic" DstPath: "/Root/table" TransformLambda: "$__ydb_transfer_lambda = ($x) -> { RETURN <| id:$x._offset |> };\n" } RunAsUser: "root@builtin" } } 2025-09-25T16:19:24.856569Z node 8 :REPLICATION_CONTROLLER DEBUG: tx_alter_replication.cpp:22: [controller 72075186224037930][TxAlterReplication] Execute: NKikimrReplication.TEvAlterReplication PathId { OwnerId: 72057594046644480 LocalId: 20 } OperationId { TxId: 281474976715694 PartId: 0 } Config { SrcConnectionParams { Endpoint: "localhost:2135" Database: "/Root" StaticCredentials { User: "user" Password: "***" } } ConsistencySettings { Row { } } TransferSpecific { Target { SrcPath: "/Root/topic" DstPath: "/Root/table" TransformLambda: "$__ydb_transfer_lambda = ($x) -> { RETURN <| id:$x._offset |> };\n" } RunAsUser: "root@builtin" } } 2025-09-25T16:19:24.856671Z node 8 :REPLICATION_CONTROLLER DEBUG: tx_alter_replication.cpp:137: [controller 72075186224037930][TxAlterReplication] Complete 2025-09-25T16:19:24.862886Z node 8 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterTransfer, opId: 281474976715695:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_replication.cpp:528) 2025-09-25T16:19:24.863159Z node 8 :REPLICATION_CONTROLLER TRACE: controller.cpp:148: [controller 72075186224037930] Handle NKikimrReplication.TEvAlterReplication PathId { OwnerId: 72057594046644480 LocalId: 20 } OperationId { TxId: 281474976715695 PartId: 0 } Config { SrcConnectionParams { Endpoint: "localhost:2135" Database: "/Root" StaticCredentials { User: "user" PasswordSecretName: "password_secret_name" } } ConsistencySettings { Row { } } TransferSpecific { Target { SrcPath: "/Root/topic" DstPath: "/Root/table" TransformLambda: "$__ydb_transfer_lambda = ($x) -> { RETURN <| id:$x._offset |> };\n" } RunAsUser: "root@builtin" } } 2025-09-25T16:19:24.863193Z node 8 :REPLICATION_CONTROLLER DEBUG: tx_alter_replication.cpp:22: [controller 72075186224037930][TxAlterReplication] Execute: NKikimrReplication.TEvAlterReplication PathId { OwnerId: 72057594046644480 LocalId: 20 } OperationId { TxId: 281474976715695 PartId: 0 } Config { SrcConnectionParams { Endpoint: "localhost:2135" Database: "/Root" StaticCredentials { User: "user" PasswordSecretName: "password_secret_name" } } ConsistencySettings { Row { } } TransferSpecific { Target { SrcPath: "/Root/topic" DstPath: "/Root/table" TransformLambda: "$__ydb_transfer_lambda = ($x) -> { RETURN <| id:$x._offset |> };\n" } RunAsUser: "root@builtin" } } 2025-09-25T16:19:24.863288Z node 8 :REPLICATION_CONTROLLER DEBUG: tx_alter_replication.cpp:137: [controller 72075186224037930][TxAlterReplication] Complete 2025-09-25T16:19:24.863372Z node 8 :REPLICATION_CONTROLLER TRACE: secret_resolver.cpp:27: [SecretResolver][rid 1] Handle NKikimr::TEvTxProxySchemeCache::TEvNavigateKeySetResult: entry# { Path: Root/transfer TableId: [72057594046644480:20:0] RequestType: ByTableId Operation: OpPath RedirectRequired: false ShowPrivatePath: false SyncVersion: false Status: Ok Kind: KindTransfer DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } } 2025-09-25T16:19:24.863505Z node 8 :REPLICATION_CONTROLLER TRACE: controller.cpp:248: [controller 72075186224037930] Handle NKikimr::NReplication::NController::TEvPrivate::TEvResolveSecretResult { ReplicationId: 1 Success: 0 Error: No such secret: USId:root@builtin:password_secret_name } 2025-09-25T16:19:24.863516Z node 8 :REPLICATION_CONTROLLER DEBUG: tx_resolve_secret_result.cpp:21: [controller 72075186224037930][TxResolveSecretResult] Execute: NKikimr::NReplication::NController::TEvPrivate::TEvResolveSecretResult { ReplicationId: 1 Success: 0 Error: No such secret: USId:root@builtin:password_secret_name } 2025-09-25T16:19:24.863520Z node 8 :REPLICATION_CONTROLLER ERROR: tx_resolve_secret_result.cpp:45: [controller 72075186224037930][TxResolveSecretResult] Resolve secret error: rid# 1, error# No such secret: USId:root@builtin:password_secret_name 2025-09-25T16:19:24.863571Z node 8 :REPLICATION_CONTROLLER DEBUG: tx_resolve_secret_result.cpp:59: [controller 72075186224037930][TxResolveSecretResult] Complete 2025-09-25T16:19:24.869712Z node 8 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterTransfer, opId: 281474976715696:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_replication.cpp:528) 2025-09-25T16:19:24.870068Z node 8 :REPLICATION_CONTROLLER TRACE: controller.cpp:148: [controller 72075186224037930] Handle NKikimrReplication.TEvAlterReplication PathId { OwnerId: 72057594046644480 LocalId: 20 } OperationId { TxId: 281474976715696 PartId: 0 } Config { SrcConnectionParams { Endpoint: "localhost:2135" Database: "/Root" StaticCredentials { User: "new_user" Password: "***" } } ConsistencySettings { Row { } } TransferSpecific { Target { SrcPath: "/Root/topic" DstPath: "/Root/table" TransformLambda: "$__ydb_transfer_lambda = ($x) -> { RETURN <| id:$x._offset |> };\n" } RunAsUser: "root@builtin" } } 2025-09-25T16:19:24.870100Z node 8 :REPLICATION_CONTROLLER DEBUG: tx_alter_replication.cpp:22: [controller 72075186224037930][TxAlterReplication] Execute: NKikimrReplication.TEvAlterReplication PathId { OwnerId: 72057594046644480 LocalId: 20 } OperationId { TxId: 281474976715696 PartId: 0 } Config { SrcConnectionParams { Endpoint: "localhost:2135" Database: "/Root" StaticCredentials { User: "new_user" Password: "***" } } ConsistencySettings { Row { } } TransferSpecific { Target { SrcPath: "/Root/topic" DstPath: "/Root/table" TransformLambda: "$__ydb_transfer_lambda = ($x) -> { RETURN <| id:$x._offset |> };\n" } RunAsUser: "root@builtin" } } 2025-09-25T16:19:24.870188Z node 8 :REPLICATION_CONTROLLER DEBUG: tx_alter_replication.cpp:137: [controller 72075186224037930][TxAlterReplication] Complete 2025-09-25T16:19:24.876903Z node 8 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterTransfer, opId: 281474976715697:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_replication.cpp:528) 2025-09-25T16:19:24.877202Z node 8 :REPLICATION_CONTROLLER TRACE: controller.cpp:148: [controller 72075186224037930] Handle NKikimrReplication.TEvAlterReplication PathId { OwnerId: 72057594046644480 LocalId: 20 } OperationId { TxId: 281474976715697 PartId: 0 } Config { SrcConnectionParams { Endpoint: "localhost:2135" Database: "/Root" StaticCredentials { User: "new_user" Password: "***" } } ConsistencySettings { Row { } } TransferSpecific { Target { SrcPath: "/Root/topic" DstPath: "/Root/table" TransformLambda: "$__ydb_transfer_lambda = ($x) -> {\n RETURN CAST($x as String);\n };\n" } RunAsUser: "root@builtin" } } 2025-09-25T16:19:24.877233Z node 8 :REPLICATION_CONTROLLER DEBUG: tx_alter_replication.cpp:22: [controller 72075186224037930][TxAlterReplication] Execute: NKikimrReplication.TEvAlterReplication PathId { OwnerId: 72057594046644480 LocalId: 20 } OperationId { TxId: 281474976715697 PartId: 0 } Config { SrcConnectionParams { Endpoint: "localhost:2135" Database: "/Root" StaticCredentials { User: "new_user" Password: "***" } } ConsistencySettings { Row { } } TransferSpecific { Target { SrcPath: "/Root/topic" DstPath: "/Root/table" TransformLambda: "$__ydb_transfer_lambda = ($x) -> {\n RETURN CAST($x as String);\n };\n" } RunAsUser: "root@builtin" } } 2025-09-25T16:19:24.877272Z node 8 :REPLICATION_CONTROLLER NOTICE: tx_alter_replication.cpp:128: [controller 72075186224037930][TxAlterReplication] Alter replication: rid# 1, pathId# [OwnerId: 72057594046644480, LocalPathId: 20] 2025-09-25T16:19:24.877346Z node 8 :REPLICATION_CONTROLLER DEBUG: tx_alter_replication.cpp:137: [controller 72075186224037930][TxAlterReplication] Complete 2025-09-25T16:19:24.877393Z node 8 :REPLICATION_CONTROLLER TRACE: controller.cpp:203: [controller 72075186224037930] Handle NKikimr::NReplication::NController::TEvPrivate::TEvRequestCreateStream 2025-09-25T16:19:24.884242Z node 8 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterTransfer, opId: 281474976715698:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_replication.cpp:528) 2025-09-25T16:19:24.884651Z node 8 :REPLICATION_CONTROLLER TRACE: controller.cpp:148: [controller 72075186224037930] Handle NKikimrReplication.TEvAlterReplication PathId { OwnerId: 72057594046644480 LocalId: 20 } OperationId { TxId: 281474976715698 PartId: 0 } Config { SrcConnectionParams { Endpoint: "localhost:2135" Database: "/Root" StaticCredentials { User: "new_user" Password: "***" } } ConsistencySettings { Row { } } TransferSpecific { Target { SrcPath: "/Root/topic" DstPath: "/Root/table" TransformLambda: "$__ydb_transfer_lambda = ($x) -> {\n RETURN CAST($x as String);\n };\n" DirectoryPath: "/Root" } RunAsUser: "root@builtin" } } 2025-09-25T16:19:24.884687Z node 8 :REPLICATION_CONTROLLER DEBUG: tx_alter_replication.cpp:22: [controller 72075186224037930][TxAlterReplication] Execute: NKikimrReplication.TEvAlterReplication PathId { OwnerId: 72057594046644480 LocalId: 20 } OperationId { TxId: 281474976715698 PartId: 0 } Config { SrcConnectionParams { Endpoint: "localhost:2135" Database: "/Root" StaticCredentials { User: "new_user" Password: "***" } } ConsistencySettings { Row { } } TransferSpecific { Target { SrcPath: "/Root/topic" DstPath: "/Root/table" TransformLambda: "$__ydb_transfer_lambda = ($x) -> {\n RETURN CAST($x as String);\n };\n" DirectoryPath: "/Root" } RunAsUser: "root@builtin" } } 2025-09-25T16:19:24.884724Z node 8 :REPLICATION_CONTROLLER NOTICE: tx_alter_replication.cpp:128: [controller 72075186224037930][TxAlterReplication] Alter replication: rid# 1, pathId# [OwnerId: 72057594046644480, LocalPathId: 20] 2025-09-25T16:19:24.884802Z node 8 :REPLICATION_CONTROLLER DEBUG: tx_alter_replication.cpp:137: [controller 72075186224037930][TxAlterReplication] Complete 2025-09-25T16:19:24.884869Z node 8 :REPLICATION_CONTROLLER TRACE: controller.cpp:203: [controller 72075186224037930] Handle NKikimr::NReplication::NController::TEvPrivate::TEvRequestCreateStream ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/client/server/ut/unittest >> TMessageBusServerPersQueueGetPartitionStatusMetaRequestTest::HandlesPipeDisconnection_DisconnectionComesSecond [GOOD] Test command err: Assert failed: Check response: { Status: 130 ErrorReason: "Timeout while waiting for response, may be just slow, Marker# PQ16" ErrorCode: ERROR } 2025-09-25T16:19:12.438468Z node 2 :PERSQUEUE DEBUG: pq_impl.cpp:2957: [PQ: 72057594037928037] Handle TEvInterconnect::TEvNodeInfo 2025-09-25T16:19:12.439811Z node 2 :PERSQUEUE DEBUG: pq_impl.cpp:2989: [PQ: 72057594037928037] Transactions request. From tx_00000000000000000000, To tx_18446744073709551615 2025-09-25T16:19:12.439908Z node 2 :PERSQUEUE DEBUG: pq_impl.cpp:784: [PQ: 72057594037928037] no config, start with empty partitions and default config 2025-09-25T16:19:12.439924Z node 2 :PERSQUEUE NOTICE: pq_impl.cpp:908: [PQ: 72057594037928037] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-09-25T16:19:12.439937Z node 2 :PERSQUEUE INFO: pq_impl.cpp:609: [PQ: 72057594037928037] doesn't have tx writes info 2025-09-25T16:19:12.440161Z node 2 :PERSQUEUE DEBUG: pq_impl.cpp:2718: [PQ: 72057594037928037] server connected, pipe [2:261:2255], now have 1 active actors on pipe 2025-09-25T16:19:12.440787Z node 2 :PERSQUEUE DEBUG: pq_impl.cpp:1269: [PQ: 72057594037928037] Handle TEvPersQueue::TEvUpdateConfig 2025-09-25T16:19:12.445200Z node 2 :PERSQUEUE DEBUG: pq_impl.cpp:395: [PQ: 72057594037928037] Apply new config CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 TopicName: "rt3.dc1--topic1" Version: 1 Partitions { PartitionId: 0 } AllPartitions { PartitionId: 0 } 2025-09-25T16:19:12.445256Z node 2 :PERSQUEUE NOTICE: pq_impl.cpp:908: [PQ: 72057594037928037] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-09-25T16:19:12.445573Z node 2 :PERSQUEUE INFO: pq_impl.cpp:1296: [PQ: 72057594037928037] Config applied version 1 actor [2:105:2138] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 TopicName: "rt3.dc1--topic1" Version: 1 Partitions { PartitionId: 0 } AllPartitions { PartitionId: 0 } 2025-09-25T16:19:12.445620Z node 2 :PERSQUEUE DEBUG: partition_init.cpp:80: [rt3.dc1--topic1:0:Initializer] Start initializing step TInitConfigStep 2025-09-25T16:19:12.445782Z node 2 :PERSQUEUE DEBUG: partition_init.cpp:80: [rt3.dc1--topic1:0:Initializer] Start initializing step TInitInternalFieldsStep 2025-09-25T16:19:12.445885Z node 2 :PERSQUEUE INFO: partition_init.cpp:1075: [72057594037928037][Partition][0][StateInit] bootstrapping 0 [2:269:2261] 2025-09-25T16:19:12.446580Z node 2 :PERSQUEUE DEBUG: partition_init.cpp:80: [rt3.dc1--topic1:0:Initializer] Start initializing step TInitFieldsStep 2025-09-25T16:19:12.446590Z node 2 :PERSQUEUE DEBUG: partition_init.cpp:60: [rt3.dc1--topic1:0:Initializer] Initializing completed. 2025-09-25T16:19:12.446611Z node 2 :PERSQUEUE INFO: partition.cpp:694: [72057594037928037][Partition][0][StateInit] init complete for topic 'rt3.dc1--topic1' partition 0 generation 2 [2:269:2261] 2025-09-25T16:19:12.446621Z node 2 :PERSQUEUE DEBUG: partition.cpp:708: [72057594037928037][Partition][0][StateInit] SYNC INIT topic rt3.dc1--topic1 partitition 0 so 0 endOffset 0 Head Offset 0 PartNo 0 PackedSize 0 count 0 nextOffset 0 batches 0 2025-09-25T16:19:12.446636Z node 2 :PERSQUEUE DEBUG: partition.cpp:4293: [72057594037928037][Partition][0][StateIdle] Process pending events. Count 0 2025-09-25T16:19:12.446642Z node 2 :PERSQUEUE DEBUG: partition.cpp:2261: [72057594037928037][Partition][0][StateIdle] Batching state before ContinueProcessTxsAndUserActs: 0 2025-09-25T16:19:12.446659Z node 2 :PERSQUEUE DEBUG: partition.cpp:2270: [72057594037928037][Partition][0][StateIdle] Batching state after ContinueProcessTxsAndUserActs: 1 2025-09-25T16:19:12.446664Z node 2 :PERSQUEUE DEBUG: partition.cpp:2293: [72057594037928037][Partition][0][StateIdle] Try persist 2025-09-25T16:19:12.446740Z node 2 :PERSQUEUE DEBUG: partition_compaction.cpp:162: [72057594037928037][Partition][0][StateIdle] no data for compaction 2025-09-25T16:19:12.446902Z node 2 :PERSQUEUE DEBUG: pq_impl.cpp:2718: [PQ: 72057594037928037] server connected, pipe [2:272:2263], now have 1 active actors on pipe 2025-09-25T16:19:12.485774Z node 2 :PERSQUEUE DEBUG: pq_impl.cpp:2957: [PQ: 72057594037928139] Handle TEvInterconnect::TEvNodeInfo 2025-09-25T16:19:12.487756Z node 2 :PERSQUEUE DEBUG: pq_impl.cpp:2989: [PQ: 72057594037928139] Transactions request. From tx_00000000000000000000, To tx_18446744073709551615 2025-09-25T16:19:12.487859Z node 2 :PERSQUEUE DEBUG: pq_impl.cpp:784: [PQ: 72057594037928139] no config, start with empty partitions and default config 2025-09-25T16:19:12.487874Z node 2 :PERSQUEUE NOTICE: pq_impl.cpp:908: [PQ: 72057594037928139] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-09-25T16:19:12.487886Z node 2 :PERSQUEUE INFO: pq_impl.cpp:609: [PQ: 72057594037928139] doesn't have tx writes info 2025-09-25T16:19:12.488113Z node 2 :PERSQUEUE DEBUG: pq_impl.cpp:2718: [PQ: 72057594037928139] server connected, pipe [2:402:2362], now have 1 active actors on pipe 2025-09-25T16:19:12.488126Z node 2 :PERSQUEUE DEBUG: pq_impl.cpp:1269: [PQ: 72057594037928139] Handle TEvPersQueue::TEvUpdateConfig 2025-09-25T16:19:12.488658Z node 2 :PERSQUEUE DEBUG: pq_impl.cpp:395: [PQ: 72057594037928139] Apply new config CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 2 TopicName: "rt3.dc1--topic2" Version: 2 Partitions { PartitionId: 2 } AllPartitions { PartitionId: 2 } 2025-09-25T16:19:12.488686Z node 2 :PERSQUEUE NOTICE: pq_impl.cpp:908: [PQ: 72057594037928139] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-09-25T16:19:12.488941Z node 2 :PERSQUEUE INFO: pq_impl.cpp:1296: [PQ: 72057594037928139] Config applied version 2 actor [2:105:2138] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 2 TopicName: "rt3.dc1--topic2" Version: 2 Partitions { PartitionId: 2 } AllPartitions { PartitionId: 2 } 2025-09-25T16:19:12.488971Z node 2 :PERSQUEUE DEBUG: partition_init.cpp:80: [rt3.dc1--topic2:2:Initializer] Start initializing step TInitConfigStep 2025-09-25T16:19:12.489045Z node 2 :PERSQUEUE DEBUG: partition_init.cpp:80: [rt3.dc1--topic2:2:Initializer] Start initializing step TInitInternalFieldsStep 2025-09-25T16:19:12.489082Z node 2 :PERSQUEUE INFO: partition_init.cpp:1075: [72057594037928139][Partition][2][StateInit] bootstrapping 2 [2:410:2368] 2025-09-25T16:19:12.489672Z node 2 :PERSQUEUE DEBUG: partition_init.cpp:80: [rt3.dc1--topic2:2:Initializer] Start initializing step TInitFieldsStep 2025-09-25T16:19:12.489681Z node 2 :PERSQUEUE DEBUG: partition_init.cpp:60: [rt3.dc1--topic2:2:Initializer] Initializing completed. 2025-09-25T16:19:12.489689Z node 2 :PERSQUEUE INFO: partition.cpp:694: [72057594037928139][Partition][2][StateInit] init complete for topic 'rt3.dc1--topic2' partition 2 generation 2 [2:410:2368] 2025-09-25T16:19:12.489698Z node 2 :PERSQUEUE DEBUG: partition.cpp:708: [72057594037928139][Partition][2][StateInit] SYNC INIT topic rt3.dc1--topic2 partitition 2 so 0 endOffset 0 Head Offset 0 PartNo 0 PackedSize 0 count 0 nextOffset 0 batches 0 2025-09-25T16:19:12.489708Z node 2 :PERSQUEUE DEBUG: partition.cpp:4293: [72057594037928139][Partition][2][StateIdle] Process pending events. Count 0 2025-09-25T16:19:12.489714Z node 2 :PERSQUEUE DEBUG: partition.cpp:2261: [72057594037928139][Partition][2][StateIdle] Batching state before ContinueProcessTxsAndUserActs: 0 2025-09-25T16:19:12.489719Z node 2 :PERSQUEUE DEBUG: partition.cpp:2270: [72057594037928139][Partition][2][StateIdle] Batching state after ContinueProcessTxsAndUserActs: 1 2025-09-25T16:19:12.489724Z node 2 :PERSQUEUE DEBUG: partition.cpp:2293: [72057594037928139][Partition][2][StateIdle] Try persist 2025-09-25T16:19:12.489797Z node 2 :PERSQUEUE DEBUG: partition_compaction.cpp:162: [72057594037928139][Partition][2][StateIdle] no data for compaction 2025-09-25T16:19:12.489917Z node 2 :PERSQUEUE DEBUG: pq_impl.cpp:2718: [PQ: 72057594037928139] server connected, pipe [2:413:2370], now have 1 active actors on pipe 2025-09-25T16:19:12.490186Z node 2 :PERSQUEUE DEBUG: pq_impl.cpp:2718: [PQ: 72057594037928037] server connected, pipe [2:419:2373], now have 1 active actors on pipe 2025-09-25T16:19:12.490258Z node 2 :PERSQUEUE DEBUG: partition.cpp:997: [72057594037928037][Partition][0][StateIdle] Topic PartitionStatus PartitionSize: 0 UsedReserveSize: 0 ReserveSize: 0 PartitionConfig{ MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } 2025-09-25T16:19:12.490288Z node 2 :PERSQUEUE DEBUG: pq_impl.cpp:2718: [PQ: 72057594037928139] server connected, pipe [2:421:2374], now have 1 active actors on pipe 2025-09-25T16:19:12.490368Z node 2 :PERSQUEUE DEBUG: pq_impl.cpp:2743: [PQ: 72057594037928037] server disconnected, pipe [2:419:2373] destroyed 2025-09-25T16:19:12.490395Z node 2 :PERSQUEUE DEBUG: partition.cpp:997: [72057594037928139][Partition][2][StateIdle] Topic PartitionStatus PartitionSize: 0 UsedReserveSize: 0 ReserveSize: 0 PartitionConfig{ MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } 2025-09-25T16:19:12.490472Z node 2 :PERSQUEUE DEBUG: pq_impl.cpp:2743: [PQ: 72057594037928139] server disconnected, pipe [2:421:2374] destroyed 2025-09-25T16:19:12.663322Z node 3 :PERSQUEUE DEBUG: pq_impl.cpp:2957: [PQ: 72057594037928037] Handle TEvInterconnect::TEvNodeInfo 2025-09-25T16:19:12.664339Z node 3 :PERSQUEUE DEBUG: pq_impl.cpp:2989: [PQ: 72057594037928037] Transactions request. From tx_00000000000000000000, To tx_18446744073709551615 2025-09-25T16:19:12.664433Z node 3 :PERSQUEUE DEBUG: pq_impl.cpp:784: [PQ: 72057594037928037] no config, start with empty partitions and default config 2025-09-25T16:19:12.664449Z node 3 :PERSQUEUE NOTICE: pq_impl.cpp:908: [PQ: 72057594037928037] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-09-25T16:19:12.664461Z node 3 :PERSQUEUE INFO: pq_impl.cpp:609: [PQ: 72057594037928037] doesn't have tx writes info 2025-09-25T16:19:12.664669Z node 3 :PERSQUEUE DEBUG: pq_impl.cpp:2718: [PQ: 72057594037928037] server connected, pipe [3:261:2256], now have 1 active actors on pipe 2025-09-25T16:19:12.664694Z node 3 :PERSQUEUE DEBUG: pq_impl.cpp:1269: [PQ: 72057594037928037] Handle TEvPersQueue::TEvUpdateConfig 2025-09-25T16:19:12.665315Z node 3 :PERSQUEUE DEBUG: pq_impl.cpp:395: [PQ: 72057594037928037] Apply new config CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 TopicName: "rt3.dc1--topic1" Version: 3 Partitions { PartitionId: 0 } AllPartitions { PartitionId: 0 } 2025-09-25T16:19:12.665348Z node 3 :PERSQUEUE NOTICE: pq_impl.cpp:908: [PQ: 72057594037928037] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-09-25T16:19:12.665481Z node 3 :PERSQUEUE INFO: pq_impl.cpp:1296: [PQ: 72057594037928037] Config applied version 3 actor [3:104:2138] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 36 ... before ContinueProcessTxsAndUserActs: 0 2025-09-25T16:19:12.701457Z node 3 :PERSQUEUE DEBUG: partition.cpp:2270: [72057594037928139][Partition][2][StateIdle] Batching state after ContinueProcessTxsAndUserActs: 1 2025-09-25T16:19:12.701462Z node 3 :PERSQUEUE DEBUG: partition.cpp:2293: [72057594037928139][Partition][2][StateIdle] Try persist 2025-09-25T16:19:12.701544Z node 3 :PERSQUEUE DEBUG: partition_compaction.cpp:162: [72057594037928139][Partition][2][StateIdle] no data for compaction 2025-09-25T16:19:12.701672Z node 3 :PERSQUEUE DEBUG: pq_impl.cpp:2718: [PQ: 72057594037928139] server connected, pipe [3:532:2460], now have 1 active actors on pipe 2025-09-25T16:19:12.701920Z node 3 :PERSQUEUE DEBUG: pq_impl.cpp:2718: [PQ: 72057594037928037] server connected, pipe [3:538:2463], now have 1 active actors on pipe 2025-09-25T16:19:12.701941Z node 3 :PERSQUEUE DEBUG: pq_impl.cpp:2718: [PQ: 72057594037928138] server connected, pipe [3:539:2464], now have 1 active actors on pipe 2025-09-25T16:19:12.702001Z node 3 :PERSQUEUE DEBUG: partition.cpp:997: [72057594037928037][Partition][0][StateIdle] Topic PartitionStatus PartitionSize: 0 UsedReserveSize: 0 ReserveSize: 0 PartitionConfig{ MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } 2025-09-25T16:19:12.702044Z node 3 :PERSQUEUE DEBUG: partition.cpp:997: [72057594037928138][Partition][1][StateIdle] Topic PartitionStatus PartitionSize: 0 UsedReserveSize: 0 ReserveSize: 0 PartitionConfig{ MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } 2025-09-25T16:19:12.702055Z node 3 :PERSQUEUE DEBUG: pq_impl.cpp:2718: [PQ: 72057594037928139] server connected, pipe [3:540:2464], now have 1 active actors on pipe 2025-09-25T16:19:12.702096Z node 3 :PERSQUEUE DEBUG: partition.cpp:997: [72057594037928139][Partition][2][StateIdle] Topic PartitionStatus PartitionSize: 0 UsedReserveSize: 0 ReserveSize: 0 PartitionConfig{ MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } 2025-09-25T16:19:12.712446Z node 3 :PERSQUEUE DEBUG: pq_impl.cpp:2718: [PQ: 72057594037928139] server connected, pipe [3:548:2471], now have 1 active actors on pipe 2025-09-25T16:19:12.722196Z node 3 :PERSQUEUE DEBUG: pq_impl.cpp:2957: [PQ: 72057594037928139] Handle TEvInterconnect::TEvNodeInfo 2025-09-25T16:19:12.723132Z node 3 :PERSQUEUE DEBUG: pq_impl.cpp:2989: [PQ: 72057594037928139] Transactions request. From tx_00000000000000000000, To tx_18446744073709551615 2025-09-25T16:19:12.723370Z node 3 :PERSQUEUE NOTICE: pq_impl.cpp:908: [PQ: 72057594037928139] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-09-25T16:19:12.723382Z node 3 :PERSQUEUE INFO: pq_impl.cpp:609: [PQ: 72057594037928139] doesn't have tx writes info 2025-09-25T16:19:12.723407Z node 3 :PERSQUEUE DEBUG: partition_init.cpp:80: [rt3.dc1--topic2:2:Initializer] Start initializing step TInitConfigStep 2025-09-25T16:19:12.723477Z node 3 :PERSQUEUE DEBUG: partition_init.cpp:80: [rt3.dc1--topic2:2:Initializer] Start initializing step TInitInternalFieldsStep 2025-09-25T16:19:12.723514Z node 3 :PERSQUEUE INFO: partition_init.cpp:1075: [72057594037928139][Partition][2][StateInit] bootstrapping 2 [3:604:2516] 2025-09-25T16:19:12.724121Z node 3 :PERSQUEUE DEBUG: partition_init.cpp:80: [rt3.dc1--topic2:2:Initializer] Start initializing step TInitDiskStatusStep 2025-09-25T16:19:12.724419Z node 3 :PERSQUEUE DEBUG: partition_init.cpp:80: [rt3.dc1--topic2:2:Initializer] Start initializing step TInitMetaStep 2025-09-25T16:19:12.724474Z node 3 :PERSQUEUE DEBUG: partition_init.cpp:80: [rt3.dc1--topic2:2:Initializer] Start initializing step TInitInfoRangeStep 2025-09-25T16:19:12.724528Z node 3 :PERSQUEUE DEBUG: partition_init.cpp:80: [rt3.dc1--topic2:2:Initializer] Start initializing step TInitDataRangeStep 2025-09-25T16:19:12.724566Z node 3 :PERSQUEUE DEBUG: partition_init.cpp:80: [rt3.dc1--topic2:2:Initializer] Start initializing step TInitDataStep 2025-09-25T16:19:12.724573Z node 3 :PERSQUEUE DEBUG: partition_init.cpp:80: [rt3.dc1--topic2:2:Initializer] Start initializing step TInitEndWriteTimestampStep 2025-09-25T16:19:12.724580Z node 3 :PERSQUEUE INFO: partition_init.cpp:948: [rt3.dc1--topic2:2:TInitEndWriteTimestampStep] Initializing EndWriteTimestamp skipped because already initialized. 2025-09-25T16:19:12.724584Z node 3 :PERSQUEUE DEBUG: partition_init.cpp:80: [rt3.dc1--topic2:2:Initializer] Start initializing step TInitFieldsStep 2025-09-25T16:19:12.724591Z node 3 :PERSQUEUE DEBUG: partition_init.cpp:60: [rt3.dc1--topic2:2:Initializer] Initializing completed. 2025-09-25T16:19:12.724599Z node 3 :PERSQUEUE INFO: partition.cpp:694: [72057594037928139][Partition][2][StateInit] init complete for topic 'rt3.dc1--topic2' partition 2 generation 3 [3:604:2516] 2025-09-25T16:19:12.724608Z node 3 :PERSQUEUE DEBUG: partition.cpp:708: [72057594037928139][Partition][2][StateInit] SYNC INIT topic rt3.dc1--topic2 partitition 2 so 0 endOffset 0 Head Offset 0 PartNo 0 PackedSize 0 count 0 nextOffset 0 batches 0 2025-09-25T16:19:12.724619Z node 3 :PERSQUEUE DEBUG: partition.cpp:4293: [72057594037928139][Partition][2][StateIdle] Process pending events. Count 0 2025-09-25T16:19:12.724625Z node 3 :PERSQUEUE DEBUG: partition.cpp:2261: [72057594037928139][Partition][2][StateIdle] Batching state before ContinueProcessTxsAndUserActs: 0 2025-09-25T16:19:12.724631Z node 3 :PERSQUEUE DEBUG: partition.cpp:2270: [72057594037928139][Partition][2][StateIdle] Batching state after ContinueProcessTxsAndUserActs: 1 2025-09-25T16:19:12.724635Z node 3 :PERSQUEUE DEBUG: partition.cpp:2293: [72057594037928139][Partition][2][StateIdle] Try persist 2025-09-25T16:19:12.724684Z node 3 :PERSQUEUE DEBUG: partition_compaction.cpp:162: [72057594037928139][Partition][2][StateIdle] no data for compaction 2025-09-25T16:19:12.724967Z node 3 :PERSQUEUE DEBUG: pq_impl.cpp:2743: [PQ: 72057594037928138] server disconnected, pipe [3:539:2464] destroyed 2025-09-25T16:19:12.725029Z node 3 :PERSQUEUE DEBUG: pq_impl.cpp:2743: [PQ: 72057594037928037] server disconnected, pipe [3:538:2463] destroyed RESPONSE Status: 1 ErrorCode: OK MetaResponse { CmdGetPartitionStatusResult { TopicResult { Topic: "rt3.dc1--topic2" PartitionResult { Partition: 1 Status: STATUS_OK LastInitDurationSeconds: 0 CreationTimestamp: 0 GapCount: 0 GapSize: 0 AvgWriteSpeedPerSec: 0 AvgWriteSpeedPerMin: 0 AvgWriteSpeedPerHour: 0 AvgWriteSpeedPerDay: 0 AvgReadSpeedPerSec: 0 AvgReadSpeedPerMin: 0 AvgReadSpeedPerHour: 0 AvgReadSpeedPerDay: 0 ReadBytesQuota: 0 WriteBytesQuota: 50000000 PartitionSize: 0 StartOffset: 0 EndOffset: 0 LastWriteTimestampMs: 79 WriteLagMs: 0 AvgQuotaSpeedPerSec: 0 AvgQuotaSpeedPerMin: 0 AvgQuotaSpeedPerHour: 0 AvgQuotaSpeedPerDay: 0 SourceIdCount: 0 SourceIdRetentionPeriodSec: 0 UsedReserveSize: 0 AggregatedCounters { Values: 79 Values: 0 Values: 1 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 50000000 Values: 0 Values: 9223372036854775807 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 1 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 } Generation: 2 Cookie: 1 ScaleStatus: NORMAL } PartitionResult { Partition: 2 Status: STATUS_OK LastInitDurationSeconds: 0 CreationTimestamp: 0 GapCount: 0 GapSize: 0 AvgWriteSpeedPerSec: 0 AvgWriteSpeedPerMin: 0 AvgWriteSpeedPerHour: 0 AvgWriteSpeedPerDay: 0 AvgReadSpeedPerSec: 0 AvgReadSpeedPerMin: 0 AvgReadSpeedPerHour: 0 AvgReadSpeedPerDay: 0 ReadBytesQuota: 0 WriteBytesQuota: 50000000 PartitionSize: 0 StartOffset: 0 EndOffset: 0 LastWriteTimestampMs: 93 WriteLagMs: 0 AvgQuotaSpeedPerSec: 0 AvgQuotaSpeedPerMin: 0 AvgQuotaSpeedPerHour: 0 AvgQuotaSpeedPerDay: 0 SourceIdCount: 0 SourceIdRetentionPeriodSec: 0 UsedReserveSize: 0 AggregatedCounters { Values: 93 Values: 0 Values: 1 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 50000000 Values: 0 Values: 9223372036854775807 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 1 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 } Generation: 2 Cookie: 1 ScaleStatus: NORMAL } ErrorCode: OK } TopicResult { Topic: "rt3.dc1--topic1" PartitionResult { Partition: 0 Status: STATUS_OK LastInitDurationSeconds: 0 CreationTimestamp: 0 GapCount: 0 GapSize: 0 AvgWriteSpeedPerSec: 0 AvgWriteSpeedPerMin: 0 AvgWriteSpeedPerHour: 0 AvgWriteSpeedPerDay: 0 AvgReadSpeedPerSec: 0 AvgReadSpeedPerMin: 0 AvgReadSpeedPerHour: 0 AvgReadSpeedPerDay: 0 ReadBytesQuota: 0 WriteBytesQuota: 50000000 PartitionSize: 0 StartOffset: 0 EndOffset: 0 LastWriteTimestampMs: 39 WriteLagMs: 0 AvgQuotaSpeedPerSec: 0 AvgQuotaSpeedPerMin: 0 AvgQuotaSpeedPerHour: 0 AvgQuotaSpeedPerDay: 0 SourceIdCount: 0 SourceIdRetentionPeriodSec: 0 UsedReserveSize: 0 AggregatedCounters { Values: 39 Values: 0 Values: 1 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 50000000 Values: 0 Values: 9223372036854775807 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 1 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 } Generation: 2 Cookie: 1 ScaleStatus: NORMAL } ErrorCode: OK } } } ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/http_proxy/ut/inside_ydb_ut/unittest >> TestYmqHttpProxy::TestDeleteMessageBatch [GOOD] Test command err: 2025-09-25T16:18:49.765485Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7554062050600247637:2133];send_to=[0:7307199536658146131:7762515]; 2025-09-25T16:18:49.765506Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/endf/00599b/r3tmp/tmprDFCw7/pdisk_1.dat 2025-09-25T16:18:49.847424Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-09-25T16:18:49.866101Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded 2025-09-25T16:18:49.866312Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1229: Notification cookie mismatch for subscription [1:7554062050600247542:2081] 1758817129764805 != 1758817129764808 TServer::EnableGrpc on GrpcPort 26084, node 1 2025-09-25T16:18:49.871425Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-09-25T16:18:49.871455Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-09-25T16:18:49.872519Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-09-25T16:18:49.875661Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-09-25T16:18:49.875671Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-09-25T16:18:49.875673Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-09-25T16:18:49.875709Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:65463 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-09-25T16:18:49.916903Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-09-25T16:18:49.921271Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 TClient is connected to server localhost:65463 2025-09-25T16:18:49.951984Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterUserAttributes, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_user_attrs.cpp:72) waiting... waiting... 2025-09-25T16:18:49.964649Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_modify_acl.cpp:101) 2025-09-25T16:18:49.974635Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) waiting... 2025-09-25T16:18:49.992682Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) waiting... waiting... 2025-09-25T16:18:50.008967Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) waiting... waiting... 2025-09-25T16:18:50.059478Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) waiting... 2025-09-25T16:18:50.071181Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) waiting... 2025-09-25T16:18:50.079557Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) waiting... 2025-09-25T16:18:50.093959Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) waiting... 2025-09-25T16:18:50.100757Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-09-25T16:18:50.111052Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715670:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) waiting... 2025-09-25T16:18:50.121359Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) waiting... 2025-09-25T16:18:50.137374Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715672:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) waiting... 2025-09-25T16:18:50.263594Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7554062054895216250:2363], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:50.263663Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7554062054895216239:2360], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:50.263681Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:50.264264Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7554062054895216254:2365], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:50.264281Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:50.264561Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715673:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-09-25T16:18:50.267753Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7554062054895216253:2364], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715673 completed, doublechecking } 2025-09-25T16:18:50.360004Z node 1 :TX_PROXY ERROR: schemereq.cpp:590: Actor# [1:7554062054895216306:2865] txid# 281474976715674, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 18], ... xy_actor.cpp:147: Request [3a7a0b13-c25554f7-db159b4-564d73ca] Sending reply from proxy actor: { DeleteMessageBatch { RequestId: "3a7a0b13-c25554f7-db159b4-564d73ca" Entries { Id: "Id-0" } Entries { Id: "Id-1" } } RequestId: "3a7a0b13-c25554f7-db159b4-564d73ca" FolderId: "folder4" ResourceId: "000000000000000101v0" IsFifo: false } 2025-09-25T16:19:13.389428Z node 7 :HTTP_PROXY DEBUG: http_req.cpp:378: http request [DeleteMessageBatch] requestId [3a7a0b13-c25554f7-db159b4-564d73ca] Got succesfult GRPC response. 2025-09-25T16:19:13.389479Z node 7 :HTTP_PROXY INFO: http_req.cpp:1205: http request [DeleteMessageBatch] requestId [3a7a0b13-c25554f7-db159b4-564d73ca] reply ok 2025-09-25T16:19:13.389531Z node 7 :HTTP_PROXY DEBUG: http_req.cpp:1265: http request [DeleteMessageBatch] requestId [3a7a0b13-c25554f7-db159b4-564d73ca] Send metering event. HttpStatusCode: 200 IsFifo: 0 FolderId: folder4 RequestSizeInBytes: 716 ResponseSizeInBytes: 196 SourceAddress: d82c:7fa:ac52:0:c02c:7fa:ac52:0 ResourceId: 000000000000000101v0 Action: DeleteMessageBatch 2025-09-25T16:19:13.389586Z node 7 :HTTP DEBUG: http_proxy_incoming.cpp:280: (#38,[::1]:47330) <- (200 , 44 bytes) 2025-09-25T16:19:13.389666Z node 7 :HTTP DEBUG: http_proxy_incoming.cpp:340: (#38,[::1]:47330) connection closed Http output full {"Successful":[{"Id":"Id-0"},{"Id":"Id-1"}]} 2025-09-25T16:19:13.389723Z node 7 :SQS TRACE: executor.cpp:286: Request [] Query(idx=GET_OLDEST_MESSAGE_TIMESTAMP_METRIC_ID) Queue [cloud4/000000000000000101v0] HandleResponse { Status: 48 TxId: 281474976710712 StatusCode: SUCCESS ExecutionEngineStatus: 1 ExecutionEngineResponseStatus: 2 ExecutionEngineEvaluatedResponse { Type { Kind: Struct Struct { Member { Name: "messages" Type { Kind: Optional Optional { Item { Kind: List List { Item { Kind: Struct Struct { Member { Name: "Offset" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } Member { Name: "SentTimestamp" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } } } } } } } } } } Value { Struct { Optional { } } } } } 2025-09-25T16:19:13.389737Z node 7 :SQS DEBUG: executor.cpp:287: Request [] Query(idx=GET_OLDEST_MESSAGE_TIMESTAMP_METRIC_ID) Queue [cloud4/000000000000000101v0] Attempt 1 execution duration: 1ms 2025-09-25T16:19:13.389757Z node 7 :SQS TRACE: executor.cpp:325: Request [] Query(idx=GET_OLDEST_MESSAGE_TIMESTAMP_METRIC_ID) Queue [cloud4/000000000000000101v0] Sending mkql execution result: { Status: 48 TxId: 281474976710712 StatusCode: SUCCESS ExecutionEngineStatus: 1 ExecutionEngineResponseStatus: 2 ExecutionEngineEvaluatedResponse { Type { Kind: Struct Struct { Member { Name: "messages" Type { Kind: Optional Optional { Item { Kind: List List { Item { Kind: Struct Struct { Member { Name: "Offset" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } Member { Name: "SentTimestamp" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } } } } } } } } } } Value { Struct { Optional { } } } } } 2025-09-25T16:19:13.389761Z node 7 :SQS TRACE: executor.cpp:327: Request [] Query(idx=GET_OLDEST_MESSAGE_TIMESTAMP_METRIC_ID) Queue [cloud4/000000000000000101v0] Minikql data response: {"messages": []} 2025-09-25T16:19:13.389768Z node 7 :SQS DEBUG: executor.cpp:401: Request [] Query(idx=GET_OLDEST_MESSAGE_TIMESTAMP_METRIC_ID) Queue [cloud4/000000000000000101v0] execution duration: 1ms 2025-09-25T16:19:13.389828Z node 7 :SQS DEBUG: queue_leader.cpp:556: Request [] Sending executed reply 2025-09-25T16:19:13.389847Z node 7 :SQS DEBUG: queue_leader.cpp:1913: Handle oldest timestamp metrics for [cloud4/000000000000000101v0/0] 2025-09-25T16:19:13.390304Z node 7 :HTTP DEBUG: http_proxy_incoming.cpp:83: (#38,[::1]:47338) incoming connection opened 2025-09-25T16:19:13.390339Z node 7 :HTTP DEBUG: http_proxy_incoming.cpp:156: (#38,[::1]:47338) -> (POST /Root, 106 bytes) 2025-09-25T16:19:13.390405Z node 7 :HTTP_PROXY INFO: http_service.cpp:102: proxy service: incoming request from [d82c:7fa:ac52:0:c02c:7fa:ac52:0] request [ReceiveMessage] url [/Root] database [/Root] requestId: 7630f4df-64346125-932289c1-b1e98171 2025-09-25T16:19:13.390527Z node 7 :HTTP_PROXY INFO: http_req.cpp:519: http request [ReceiveMessage] requestId [7630f4df-64346125-932289c1-b1e98171] got new request from [d82c:7fa:ac52:0:c02c:7fa:ac52:0] 2025-09-25T16:19:13.390621Z node 7 :HTTP_PROXY DEBUG: http_req.cpp:453: http request [ReceiveMessage] requestId [7630f4df-64346125-932289c1-b1e98171] Got cloud auth response. FolderId: folder4 CloudId: cloud4 UserSid: fake_user_sid@as 2025-09-25T16:19:13.390633Z node 7 :HTTP_PROXY INFO: http_req.cpp:279: http request [ReceiveMessage] requestId [7630f4df-64346125-932289c1-b1e98171] sending grpc request to '' database: '/Root' iam token size: 0 2025-09-25T16:19:13.390714Z node 7 :SQS DEBUG: ymq_proxy.cpp:148: Got new request in YMQ proxy. FolderId: folder4, CloudId: cloud4, UserSid: fake_user_sid@as, RequestId: 7630f4df-64346125-932289c1-b1e98171 2025-09-25T16:19:13.390743Z node 7 :SQS DEBUG: proxy_actor.cpp:263: Request [7630f4df-64346125-932289c1-b1e98171] Proxy actor: used user_name='cloud4', queue_name='000000000000000101v0', folder_id='folder4' 2025-09-25T16:19:13.390751Z node 7 :SQS DEBUG: proxy_actor.cpp:78: Request [7630f4df-64346125-932289c1-b1e98171] Request proxy started 2025-09-25T16:19:13.390806Z node 7 :SQS DEBUG: service.cpp:761: Request [7630f4df-64346125-932289c1-b1e98171] Answer configuration for queue [cloud4/000000000000000101v0] without leader 2025-09-25T16:19:13.390848Z node 7 :SQS DEBUG: proxy_actor.cpp:97: Request [7630f4df-64346125-932289c1-b1e98171] Get configuration duration: 0ms 2025-09-25T16:19:13.390901Z node 7 :SQS DEBUG: proxy_service.cpp:246: Request [7630f4df-64346125-932289c1-b1e98171] Send get leader node request to sqs service for cloud4/000000000000000101v0 2025-09-25T16:19:13.390915Z node 7 :SQS DEBUG: service.cpp:581: Request [7630f4df-64346125-932289c1-b1e98171] Leader node for queue [cloud4/000000000000000101v0] is 7 2025-09-25T16:19:13.390921Z node 7 :SQS DEBUG: proxy_service.cpp:170: Request [7630f4df-64346125-932289c1-b1e98171] Got leader node for queue response. Node id: 7. Status: 0 2025-09-25T16:19:13.390958Z node 7 :SQS TRACE: proxy_service.cpp:303: Request [7630f4df-64346125-932289c1-b1e98171] Sending request from proxy to leader node 7: ReceiveMessage { Auth { UserName: "cloud4" FolderId: "folder4" UserSID: "fake_user_sid@as" } QueueName: "000000000000000101v0" } RequestId: "7630f4df-64346125-932289c1-b1e98171" 2025-09-25T16:19:13.390983Z node 7 :SQS DEBUG: proxy_service.cpp:70: Request [7630f4df-64346125-932289c1-b1e98171] Received Sqs Request: ReceiveMessage { Auth { UserName: "cloud4" FolderId: "folder4" UserSID: "fake_user_sid@as" } QueueName: "000000000000000101v0" } RequestId: "7630f4df-64346125-932289c1-b1e98171" 2025-09-25T16:19:13.390996Z node 7 :SQS DEBUG: action.h:133: Request [7630f4df-64346125-932289c1-b1e98171] Request started. Actor: [7:7554062154414655447:3696] 2025-09-25T16:19:13.391006Z node 7 :SQS TRACE: service.cpp:1472: Inc local leader ref for actor [7:7554062154414655447:3696] 2025-09-25T16:19:13.391012Z node 7 :SQS DEBUG: service.cpp:754: Request [7630f4df-64346125-932289c1-b1e98171] Forward configuration request to queue [cloud4/000000000000000101v0] leader 2025-09-25T16:19:13.391021Z node 7 :SQS DEBUG: action.h:627: Request [7630f4df-64346125-932289c1-b1e98171] Get configuration duration: 0ms 2025-09-25T16:19:13.391026Z node 7 :SQS TRACE: action.h:647: Request [7630f4df-64346125-932289c1-b1e98171] Got configuration. Root url: http://ghrun-v6cxduzo2m.auto.internal:8771, Shards: 4, Fail: 0 2025-09-25T16:19:13.391033Z node 7 :SQS TRACE: action.h:662: Request [7630f4df-64346125-932289c1-b1e98171] Got configuration. Attributes: { ContentBasedDeduplication: 0 DelaySeconds: 0.000000s FifoQueue: 0 MaximumMessageSize: 262144 MessageRetentionPeriod: 345600.000000s ReceiveMessageWaitTime: 0.000000s VisibilityTimeout: 30.000000s } 2025-09-25T16:19:13.391040Z node 7 :SQS TRACE: action.h:427: Request [7630f4df-64346125-932289c1-b1e98171] DoRoutine 2025-09-25T16:19:13.391074Z node 7 :SQS TRACE: queue_leader.cpp:2424: Increment active message requests for [cloud4/000000000000000101v0/3]. ActiveMessageRequests: 1 2025-09-25T16:19:13.391086Z node 7 :SQS DEBUG: queue_leader.cpp:938: Request [7630f4df-64346125-932289c1-b1e98171] Received empty result from shard 3 infly. Infly capacity: 0. Messages count: 0 2025-09-25T16:19:13.391090Z node 7 :SQS DEBUG: queue_leader.cpp:1162: Request [7630f4df-64346125-932289c1-b1e98171] No known messages in this shard. Skip attempt to add messages to infly 2025-09-25T16:19:13.391092Z node 7 :SQS DEBUG: queue_leader.cpp:1168: Request [7630f4df-64346125-932289c1-b1e98171] Already tried to add messages to infly 2025-09-25T16:19:13.391098Z node 7 :SQS TRACE: queue_leader.cpp:2434: Decrement active message requests for [[cloud4/000000000000000101v0/3]. ActiveMessageRequests: 0 2025-09-25T16:19:13.391117Z node 7 :SQS TRACE: action.h:264: Request [7630f4df-64346125-932289c1-b1e98171] SendReplyAndDie from action actor { ReceiveMessage { RequestId: "7630f4df-64346125-932289c1-b1e98171" } } 2025-09-25T16:19:13.391139Z node 7 :SQS TRACE: proxy_service.h:35: Request [7630f4df-64346125-932289c1-b1e98171] Sending sqs response: { ReceiveMessage { RequestId: "7630f4df-64346125-932289c1-b1e98171" } RequestId: "7630f4df-64346125-932289c1-b1e98171" FolderId: "folder4" ResourceId: "000000000000000101v0" IsFifo: false } 2025-09-25T16:19:13.391189Z node 7 :SQS DEBUG: queue_leader.cpp:384: Request ReceiveMessage working duration: 0ms 2025-09-25T16:19:13.391211Z node 7 :SQS TRACE: proxy_service.cpp:194: HandleSqsResponse ReceiveMessage { RequestId: "7630f4df-64346125-932289c1-b1e98171" } RequestId: "7630f4df-64346125-932289c1-b1e98171" FolderId: "folder4" ResourceId: "000000000000000101v0" IsFifo: false 2025-09-25T16:19:13.391227Z node 7 :SQS TRACE: proxy_service.cpp:208: Sending answer to proxy actor [7:7554062154414655446:2531]: ReceiveMessage { RequestId: "7630f4df-64346125-932289c1-b1e98171" } RequestId: "7630f4df-64346125-932289c1-b1e98171" FolderId: "folder4" ResourceId: "000000000000000101v0" IsFifo: false 2025-09-25T16:19:13.391240Z node 7 :SQS TRACE: service.cpp:1483: Dec local leader ref for actor [7:7554062154414655447:3696]. Found: 1 2025-09-25T16:19:13.391290Z node 7 :SQS TRACE: proxy_actor.cpp:178: Request [7630f4df-64346125-932289c1-b1e98171] HandleResponse: { ReceiveMessage { RequestId: "7630f4df-64346125-932289c1-b1e98171" } RequestId: "7630f4df-64346125-932289c1-b1e98171" FolderId: "folder4" ResourceId: "000000000000000101v0" IsFifo: false }, status: OK 2025-09-25T16:19:13.391307Z node 7 :SQS DEBUG: proxy_actor.cpp:147: Request [7630f4df-64346125-932289c1-b1e98171] Sending reply from proxy actor: { ReceiveMessage { RequestId: "7630f4df-64346125-932289c1-b1e98171" } RequestId: "7630f4df-64346125-932289c1-b1e98171" FolderId: "folder4" ResourceId: "000000000000000101v0" IsFifo: false } 2025-09-25T16:19:13.391392Z node 7 :HTTP_PROXY DEBUG: http_req.cpp:378: http request [ReceiveMessage] requestId [7630f4df-64346125-932289c1-b1e98171] Got succesfult GRPC response. 2025-09-25T16:19:13.391410Z node 7 :HTTP_PROXY INFO: http_req.cpp:1205: http request [ReceiveMessage] requestId [7630f4df-64346125-932289c1-b1e98171] reply ok 2025-09-25T16:19:13.391456Z node 7 :HTTP_PROXY DEBUG: http_req.cpp:1265: http request [ReceiveMessage] requestId [7630f4df-64346125-932289c1-b1e98171] Send metering event. HttpStatusCode: 200 IsFifo: 0 FolderId: folder4 RequestSizeInBytes: 526 ResponseSizeInBytes: 154 SourceAddress: d82c:7fa:ac52:0:c02c:7fa:ac52:0 ResourceId: 000000000000000101v0 Action: ReceiveMessage 2025-09-25T16:19:13.391488Z node 7 :HTTP DEBUG: http_proxy_incoming.cpp:280: (#38,[::1]:47338) <- (200 , 2 bytes) 2025-09-25T16:19:13.391531Z node 7 :HTTP DEBUG: http_proxy_incoming.cpp:340: (#38,[::1]:47338) connection closed Http output full {} ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/scheme/unittest >> KqpScheme::CleanupTemporaryTables [GOOD] Test command err: Trying to start YDB, gRPC: 2627, MsgBus: 9387 2025-09-25T16:19:06.616498Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7554062123867157256:2083];send_to=[0:7307199536658146131:7762515]; 2025-09-25T16:19:06.617106Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/endf/001b99/r3tmp/tmpiYt392/pdisk_1.dat 2025-09-25T16:19:06.684946Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-09-25T16:19:06.704692Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 2627, node 1 2025-09-25T16:19:06.723538Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-09-25T16:19:06.723573Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-09-25T16:19:06.725554Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-09-25T16:19:06.733391Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-09-25T16:19:06.733403Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-09-25T16:19:06.733406Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-09-25T16:19:06.733462Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:9387 TClient is connected to server localhost:9387 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-09-25T16:19:06.848197Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-09-25T16:19:06.854811Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-09-25T16:19:06.867048Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) waiting... 2025-09-25T16:19:06.906637Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) waiting... 2025-09-25T16:19:06.948629Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) waiting... 2025-09-25T16:19:06.965073Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-09-25T16:19:06.972308Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) waiting... 2025-09-25T16:19:07.116681Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7554062128162126131:2391], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:19:07.116707Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:19:07.116987Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7554062128162126141:2392], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:19:07.116997Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:19:07.192046Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:19:07.203211Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:19:07.217270Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:19:07.230375Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:19:07.246556Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:19:07.261795Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:19:07.278532Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:19:07.307649Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:19:07.353163Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7554062128162127002:2474], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:19:07.353198Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:19:07.353904Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7554062128162127007:2477], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:19:07.353918Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7554062128162127008:2478], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:19:07.353929Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:19:07.355061Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_ ... TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-09-25T16:19:17.054133Z node 8 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-09-25T16:19:17.073953Z node 8 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) waiting... 2025-09-25T16:19:17.090278Z node 8 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) waiting... 2025-09-25T16:19:17.115684Z node 8 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) waiting... 2025-09-25T16:19:17.129586Z node 8 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) waiting... 2025-09-25T16:19:17.255606Z node 8 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-09-25T16:19:17.361242Z node 8 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [8:7554062171373103223:2391], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:19:17.361271Z node 8 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:19:17.361386Z node 8 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [8:7554062171373103232:2392], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:19:17.361400Z node 8 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:19:17.375460Z node 8 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:19:17.384020Z node 8 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:19:17.392332Z node 8 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:19:17.406392Z node 8 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:19:17.421189Z node 8 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:19:17.434857Z node 8 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:19:17.448618Z node 8 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:19:17.463876Z node 8 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:19:17.479423Z node 8 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [8:7554062171373104095:2474], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:19:17.479446Z node 8 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:19:17.479458Z node 8 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [8:7554062171373104100:2477], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:19:17.479484Z node 8 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [8:7554062171373104102:2478], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:19:17.479494Z node 8 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:19:17.480126Z node 8 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-09-25T16:19:17.489715Z node 8 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [8:7554062171373104104:2479], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2025-09-25T16:19:17.567120Z node 8 :TX_PROXY ERROR: schemereq.cpp:590: Actor# [8:7554062171373104156:3554] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 17], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-09-25T16:19:17.962874Z node 8 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-09-25T16:19:21.961063Z node 8 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[8:7554062167078134328:2074];send_to=[0:7307199536658146131:7762515]; 2025-09-25T16:19:21.961101Z node 8 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-09-25T16:19:22.815210Z node 8 :FLAT_TX_SCHEMESHARD WARN: schemeshard__background_cleaning.cpp:196: Out of txIds for temp dir# /Root/first_dir. Background cleaning will be finished later. 2025-09-25T16:19:23.815709Z node 8 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpRmDir, opId: 281474976720657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_rmdir.cpp:66) |81.9%| [TA] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_backup/test-results/unittest/{meta.json ... results_accumulator.log} ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/scheme/unittest >> KqpScheme::CreateBackupCollection [GOOD] Test command err: Trying to start YDB, gRPC: 26799, MsgBus: 21027 2025-09-25T16:19:01.137025Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7554062103753208318:2083];send_to=[0:7307199536658146131:7762515]; 2025-09-25T16:19:01.137276Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/endf/001cad/r3tmp/tmpNrTdne/pdisk_1.dat 2025-09-25T16:19:01.204213Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded 2025-09-25T16:19:01.204242Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-09-25T16:19:01.209500Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1229: Notification cookie mismatch for subscription [1:7554062103753208261:2081] 1758817141136458 != 1758817141136461 TServer::EnableGrpc on GrpcPort 26799, node 1 2025-09-25T16:19:01.232979Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-09-25T16:19:01.232992Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-09-25T16:19:01.232994Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-09-25T16:19:01.233044Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-09-25T16:19:01.247386Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-09-25T16:19:01.247413Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting TClient is connected to server localhost:21027 2025-09-25T16:19:01.248485Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:21027 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-09-25T16:19:01.394686Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-09-25T16:19:01.399852Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-09-25T16:19:01.404095Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) waiting... waiting... 2025-09-25T16:19:01.438589Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:19:01.468383Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) waiting... 2025-09-25T16:19:01.487706Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-09-25T16:19:01.498540Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) waiting... 2025-09-25T16:19:01.983298Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7554062103753209929:2392], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:19:01.983331Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:19:01.983492Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7554062103753209939:2393], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:19:01.983504Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:19:02.059419Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:19:02.070426Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:19:02.080032Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:19:02.091463Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:19:02.109129Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:19:02.125324Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:19:02.139663Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-09-25T16:19:02.190818Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:19:02.215234Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:19:02.242271Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7554062108048178109:2476], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:19:02.242315Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:19:02.242420Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7554062108048178115:2480], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:19:02.242430Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:19:02.242441Z node ... UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-09-25T16:19:15.764652Z node 8 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-09-25T16:19:15.774208Z node 8 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) waiting... 2025-09-25T16:19:15.791162Z node 8 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(8, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-09-25T16:19:15.791196Z node 8 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(8, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-09-25T16:19:15.792366Z node 8 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(8, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-09-25T16:19:15.833132Z node 8 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) waiting... 2025-09-25T16:19:15.854388Z node 8 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) waiting... 2025-09-25T16:19:15.865374Z node 8 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) waiting... 2025-09-25T16:19:15.953264Z node 8 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-09-25T16:19:16.062352Z node 8 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [8:7554062167440136665:2391], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:19:16.062382Z node 8 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:19:16.062464Z node 8 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [8:7554062167440136675:2392], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:19:16.062476Z node 8 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:19:16.072360Z node 8 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:19:16.080443Z node 8 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:19:16.090510Z node 8 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:19:16.106472Z node 8 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:19:16.118638Z node 8 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:19:16.133631Z node 8 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:19:16.148452Z node 8 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:19:16.161682Z node 8 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:19:16.179193Z node 8 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [8:7554062167440137536:2474], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:19:16.179231Z node 8 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [8:7554062167440137541:2477], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:19:16.179231Z node 8 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:19:16.179289Z node 8 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [8:7554062167440137544:2479], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:19:16.179306Z node 8 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:19:16.179882Z node 8 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-09-25T16:19:16.187983Z node 8 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [8:7554062167440137543:2478], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2025-09-25T16:19:16.268036Z node 8 :TX_PROXY ERROR: schemereq.cpp:590: Actor# [8:7554062167440137597:3553] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 17], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-09-25T16:19:16.497804Z node 8 :TX_PROXY ERROR: schemereq.cpp:590: Actor# [8:7554062167440137899:3750] txid# 281474976710673, issues: { message: "Backup collections must be placed in /Root/.backups/collections" severity: 1 } 2025-09-25T16:19:16.505323Z node 8 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateBackupCollection, opId: 281474976710674:2, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_common.cpp:1251) 2025-09-25T16:19:16.517650Z node 8 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateBackupCollection, opId: 281474976710675:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_backup_collection.cpp:186) 2025-09-25T16:19:16.691168Z node 8 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/scheme/unittest >> KqpScheme::DisableMetadataObjectsOnServerless [GOOD] Test command err: 2025-09-25T16:18:52.341103Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7554062065187723219:2152];send_to=[0:7307199536658146131:7762515]; 2025-09-25T16:18:52.341136Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-09-25T16:18:52.411637Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:123: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown test_client.cpp: SetPath # /home/runner/.ya/build/build_root/endf/001dda/r3tmp/tmpQChLW1/pdisk_1.dat 2025-09-25T16:18:52.465816Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-09-25T16:18:52.465841Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-09-25T16:18:52.470898Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-09-25T16:18:52.473140Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 24320, node 1 2025-09-25T16:18:52.506949Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-09-25T16:18:52.512618Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-09-25T16:18:52.512630Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-09-25T16:18:52.512632Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-09-25T16:18:52.512674Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:63476 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-09-25T16:18:52.539482Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-09-25T16:18:52.871395Z node 1 :KQP_WORKLOAD_SERVICE INFO: kqp_workload_service.cpp:447: [WorkloadService] [Service] Started workload service initialization 2025-09-25T16:18:52.871486Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: kqp_workload_service.cpp:102: [WorkloadService] [Service] Subscribed for config changes 2025-09-25T16:18:52.871496Z node 1 :KQP_WORKLOAD_SERVICE INFO: kqp_workload_service.cpp:113: [WorkloadService] [Service] Resource pools was enanbled 2025-09-25T16:18:52.872174Z node 1 :KQP_SESSION DEBUG: kqp_session_actor.cpp:222: SessionId: ydb://session/3?node_id=1&id=ZmQ0ZGEwMjktOWE0YmVjZTEtY2ViOWVmMi1hNjMzYTdkOQ==, ActorId: [0:0:0], ActorState: unknown state, Create session actor with id ZmQ0ZGEwMjktOWE0YmVjZTEtY2ViOWVmMi1hNjMzYTdkOQ== 2025-09-25T16:18:52.872311Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: table_queries.cpp:241: [WorkloadService] [TCleanupTablesActor] ActorId: [1:7554062065187723984:2318], Start check tables existence, number paths: 2 2025-09-25T16:18:52.875007Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: table_queries.cpp:182: [WorkloadService] [TCleanupTablesActor] ActorId: [1:7554062065187723984:2318], Describe table /Root/.metadata/workload_manager/delayed_requests status PathErrorUnknown 2025-09-25T16:18:52.875026Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: table_queries.cpp:182: [WorkloadService] [TCleanupTablesActor] ActorId: [1:7554062065187723984:2318], Describe table /Root/.metadata/workload_manager/running_requests status PathErrorUnknown 2025-09-25T16:18:52.875031Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: table_queries.cpp:289: [WorkloadService] [TCleanupTablesActor] ActorId: [1:7554062065187723984:2318], Successfully finished 2025-09-25T16:18:52.875043Z node 1 :KQP_SESSION DEBUG: kqp_session_actor.cpp:226: SessionId: ydb://session/3?node_id=1&id=ZmQ0ZGEwMjktOWE0YmVjZTEtY2ViOWVmMi1hNjMzYTdkOQ==, ActorId: [1:7554062065187723986:2320], ActorState: unknown state, session actor bootstrapped 2025-09-25T16:18:52.875055Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: kqp_workload_service.cpp:374: [WorkloadService] [Service] Cleanup completed, tables exists: 0 2025-09-25T16:18:52.876463Z node 1 :KQP_WORKLOAD_SERVICE TRACE: kqp_workload_service.cpp:132: [WorkloadService] [Service] Updated node info, noode count: 1 2025-09-25T16:18:52.885324Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_extsubdomain.cpp:194) 2025-09-25T16:18:52.896944Z node 3 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7554062061862183566:2259];send_to=[0:7307199536658146131:7762515]; 2025-09-25T16:18:52.896967Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/test-dedicated/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-09-25T16:18:52.898004Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-09-25T16:18:52.898033Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-09-25T16:18:52.899127Z node 1 :HIVE WARN: hive_impl.cpp:811: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 3 Cookie 3 2025-09-25T16:18:52.899431Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-09-25T16:18:52.908377Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72075186224037888 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-09-25T16:18:52.908411Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72075186224037888 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-09-25T16:18:52.908572Z node 3 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-09-25T16:18:52.908624Z node 3 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-09-25T16:18:52.908635Z node 3 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-09-25T16:18:52.908649Z node 3 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-09-25T16:18:52.908661Z node 3 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-09-25T16:18:52.908676Z node 3 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-09-25T16:18:52.908693Z node 3 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-09-25T16:18:52.908702Z node 3 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-09-25T16:18:52.908709Z node 3 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-09-25T16:18:52.909912Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72075186224037888 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-09-25T16:18:52.943108Z node 3 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded 2025-09-25T16:18:52.946441Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/test-dedicated/.metadata/script_executions 2025-09-25T16:18:52.947562Z node 3 :STATISTICS WARN: tx_init.cpp:288: [72075186224037894] TTxInit::Complete. EnableColumnStatistics=false TServer::EnableGrpc on GrpcPort 63363, node 3 2025-09-25T16:18:53.115742Z node 3 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-09-25T16:18:53.115757Z node 3 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-09-25T16:18:53.115759Z node 3 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-09-25T16:18:53.115801Z node 3 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-09-25T16:18:53.130458Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_extsubdomain.cpp:194) 2025-09-25T16:18:53.142259Z node 2 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7554062069280089995:2155];send_to=[0:7307199536658146131:7762515]; 2025-09-25T16:18:53.142298Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/test-shared/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-09-25T16:18:53.149199Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-09-25T16:18:53.149225Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-09-25T16:18:53.150882Z node 1 :HIVE WARN: hive_impl.cpp:811: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2025-09-25T16:18:53.151366Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-09-25T16:18:53.182132Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72075186224038889 Node(2, ... 851068:2951] database: /Root/test-shared databaseId: /Root/test-shared pool id: default 2025-09-25T16:19:18.746770Z node 15 :KQP_SESSION DEBUG: kqp_session_actor.cpp:263: SessionId: ydb://session/3?node_id=15&id=ZmU5ODhhODktNjZmOTM3MTUtMWJhOGY4OGQtYTQ4YTQ5Yzc=, ActorId: [15:7554062173935851067:2950], ActorState: ReadyState, TraceId: 01k60tr1jt59ny9h3x90mrgtsp, request placed into pool from cache: default 2025-09-25T16:19:18.746791Z node 15 :KQP_SESSION DEBUG: kqp_session_actor.cpp:629: SessionId: ydb://session/3?node_id=15&id=ZmU5ODhhODktNjZmOTM3MTUtMWJhOGY4OGQtYTQ4YTQ5Yzc=, ActorId: [15:7554062173935851067:2950], ActorState: ExecuteState, TraceId: 01k60tr1jt59ny9h3x90mrgtsp, Sending CompileQuery request 2025-09-25T16:19:18.748985Z node 15 :SCHEME_BOARD_SUBSCRIBER WARN: subscriber.cpp:1006: [main][15:7554062169640882254:2996][/Root/test-shared/.metadata/secrets/values] Sync is incomplete in one of the ring groups: cookie# 38 2025-09-25T16:19:18.749005Z node 15 :SCHEME_BOARD_SUBSCRIBER WARN: subscriber.cpp:1006: [main][15:7554062169640882254:2996][/Root/test-shared/.metadata/secrets/values] Sync is incomplete in one of the ring groups: cookie# 39 2025-09-25T16:19:18.749230Z node 15 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:568: Compilation failed, self: [15:7554062173935851070:2952], status: UNAVAILABLE, issues:
: Error: Table metadata loading, code: 1050
:1:1: Error: Failed to load metadata for table: db.[//Root/test-shared/.metadata/secrets/values]
: Error: LookupError, code: 2005 2025-09-25T16:19:18.749296Z node 15 :KQP_SESSION WARN: kqp_session_actor.cpp:2395: SessionId: ydb://session/3?node_id=15&id=ZmU5ODhhODktNjZmOTM3MTUtMWJhOGY4OGQtYTQ4YTQ5Yzc=, ActorId: [15:7554062173935851067:2950], ActorState: ExecuteState, TraceId: 01k60tr1jt59ny9h3x90mrgtsp, ReplyQueryCompileError, status UNAVAILABLE remove tx with tx_id: 2025-09-25T16:19:18.749313Z node 15 :KQP_SESSION INFO: kqp_session_actor.cpp:2725: SessionId: ydb://session/3?node_id=15&id=ZmU5ODhhODktNjZmOTM3MTUtMWJhOGY4OGQtYTQ4YTQ5Yzc=, ActorId: [15:7554062173935851067:2950], ActorState: ExecuteState, TraceId: 01k60tr1jt59ny9h3x90mrgtsp, Cleanup start, isFinal: 0 CleanupCtx: 0 TransactionsToBeAborted.size(): 0 WorkerId: [0:0:0] WorkloadServiceCleanup: 0 2025-09-25T16:19:18.749317Z node 15 :KQP_SESSION DEBUG: kqp_session_actor.cpp:2786: SessionId: ydb://session/3?node_id=15&id=ZmU5ODhhODktNjZmOTM3MTUtMWJhOGY4OGQtYTQ4YTQ5Yzc=, ActorId: [15:7554062173935851067:2950], ActorState: ExecuteState, TraceId: 01k60tr1jt59ny9h3x90mrgtsp, EndCleanup, isFinal: 0 2025-09-25T16:19:18.749354Z node 15 :KQP_SESSION DEBUG: kqp_session_actor.cpp:2518: SessionId: ydb://session/3?node_id=15&id=ZmU5ODhhODktNjZmOTM3MTUtMWJhOGY4OGQtYTQ4YTQ5Yzc=, ActorId: [15:7554062173935851067:2950], ActorState: ExecuteState, TraceId: 01k60tr1jt59ny9h3x90mrgtsp, Sent query response back to proxy, proxyRequestId: 70, proxyId: [15:7554062156755978848:2266] 2025-09-25T16:19:18.749547Z node 15 :METADATA_PROVIDER ERROR: log.cpp:841: fline=request_actor_cb.h:34;event=unexpected reply;response=operation { ready: true status: UNAVAILABLE issues { message: "Table metadata loading" issue_code: 1050 severity: 1 issues { position { row: 1 column: 1 } message: "Failed to load metadata for table: db.[//Root/test-shared/.metadata/secrets/values]" end_position { row: 1 column: 1 } severity: 1 issues { message: "LookupError" issue_code: 2005 severity: 1 } } } result { [type.googleapis.com/Ydb.Table.ExecuteQueryResult] { tx_meta { } } } } ; 2025-09-25T16:19:18.749605Z node 15 :METADATA_PROVIDER ERROR: log.h:466: accessor_snapshot_base.cpp:16 :cannot construct snapshot: on request failed:
: Error: Table metadata loading, code: 1050
:1:1: Error: Failed to load metadata for table: db.[//Root/test-shared/.metadata/secrets/values]
: Error: LookupError, code: 2005 2025-09-25T16:19:18.749633Z node 15 :KQP_SESSION INFO: kqp_session_actor.cpp:2563: SessionId: ydb://session/3?node_id=15&id=ZmU5ODhhODktNjZmOTM3MTUtMWJhOGY4OGQtYTQ4YTQ5Yzc=, ActorId: [15:7554062173935851067:2950], ActorState: ReadyState, Session closed due to explicit close event 2025-09-25T16:19:18.749641Z node 15 :KQP_SESSION INFO: kqp_session_actor.cpp:2725: SessionId: ydb://session/3?node_id=15&id=ZmU5ODhhODktNjZmOTM3MTUtMWJhOGY4OGQtYTQ4YTQ5Yzc=, ActorId: [15:7554062173935851067:2950], ActorState: ReadyState, Cleanup start, isFinal: 1 CleanupCtx: 0 TransactionsToBeAborted.size(): 0 WorkerId: [0:0:0] WorkloadServiceCleanup: 0 2025-09-25T16:19:18.749644Z node 15 :KQP_SESSION DEBUG: kqp_session_actor.cpp:2786: SessionId: ydb://session/3?node_id=15&id=ZmU5ODhhODktNjZmOTM3MTUtMWJhOGY4OGQtYTQ4YTQ5Yzc=, ActorId: [15:7554062173935851067:2950], ActorState: ReadyState, EndCleanup, isFinal: 1 2025-09-25T16:19:18.749647Z node 15 :KQP_SESSION DEBUG: kqp_session_actor.cpp:2798: SessionId: ydb://session/3?node_id=15&id=ZmU5ODhhODktNjZmOTM3MTUtMWJhOGY4OGQtYTQ4YTQ5Yzc=, ActorId: [15:7554062173935851067:2950], ActorState: unknown state, Cleanup temp tables: 0 2025-09-25T16:19:18.749663Z node 15 :KQP_SESSION DEBUG: kqp_session_actor.cpp:2889: SessionId: ydb://session/3?node_id=15&id=ZmU5ODhhODktNjZmOTM3MTUtMWJhOGY4OGQtYTQ4YTQ5Yzc=, ActorId: [15:7554062173935851067:2950], ActorState: unknown state, Session actor destroyed 2025-09-25T16:19:18.864052Z node 15 :KQP_SESSION DEBUG: kqp_session_actor.cpp:222: SessionId: ydb://session/3?node_id=15&id=YTk1NzA1YmMtNjNjOTRmNmItOTdlZjMwMmItMjRiZTUyMDE=, ActorId: [0:0:0], ActorState: unknown state, Create session actor with id YTk1NzA1YmMtNjNjOTRmNmItOTdlZjMwMmItMjRiZTUyMDE= 2025-09-25T16:19:18.864133Z node 15 :KQP_SESSION DEBUG: kqp_session_actor.cpp:226: SessionId: ydb://session/3?node_id=15&id=YTk1NzA1YmMtNjNjOTRmNmItOTdlZjMwMmItMjRiZTUyMDE=, ActorId: [15:7554062173935851085:2957], ActorState: unknown state, session actor bootstrapped 2025-09-25T16:19:18.864283Z node 15 :KQP_SESSION DEBUG: kqp_session_actor.cpp:442: SessionId: ydb://session/3?node_id=15&id=YTk1NzA1YmMtNjNjOTRmNmItOTdlZjMwMmItMjRiZTUyMDE=, ActorId: [15:7554062173935851085:2957], ActorState: ReadyState, TraceId: 01k60tr1pg4dg57d6735pq74ky, received request, proxyRequestId: 72 prepared: 0 tx_control: 1 action: QUERY_ACTION_EXECUTE type: QUERY_TYPE_SQL_DML text: SELECT * FROM `//Root/test-shared/.metadata/secrets/values`; rpcActor: [15:7554062173935851086:2958] database: /Root/test-shared databaseId: /Root/test-shared pool id: default 2025-09-25T16:19:18.864295Z node 15 :KQP_SESSION DEBUG: kqp_session_actor.cpp:263: SessionId: ydb://session/3?node_id=15&id=YTk1NzA1YmMtNjNjOTRmNmItOTdlZjMwMmItMjRiZTUyMDE=, ActorId: [15:7554062173935851085:2957], ActorState: ReadyState, TraceId: 01k60tr1pg4dg57d6735pq74ky, request placed into pool from cache: default 2025-09-25T16:19:18.864317Z node 15 :KQP_SESSION DEBUG: kqp_session_actor.cpp:629: SessionId: ydb://session/3?node_id=15&id=YTk1NzA1YmMtNjNjOTRmNmItOTdlZjMwMmItMjRiZTUyMDE=, ActorId: [15:7554062173935851085:2957], ActorState: ExecuteState, TraceId: 01k60tr1pg4dg57d6735pq74ky, Sending CompileQuery request 2025-09-25T16:19:18.866566Z node 15 :SCHEME_BOARD_SUBSCRIBER WARN: subscriber.cpp:1006: [main][15:7554062169640882254:2996][/Root/test-shared/.metadata/secrets/values] Sync is incomplete in one of the ring groups: cookie# 40 2025-09-25T16:19:18.866588Z node 15 :SCHEME_BOARD_SUBSCRIBER WARN: subscriber.cpp:1006: [main][15:7554062169640882254:2996][/Root/test-shared/.metadata/secrets/values] Sync is incomplete in one of the ring groups: cookie# 41 2025-09-25T16:19:18.866799Z node 15 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:568: Compilation failed, self: [15:7554062173935851088:2959], status: UNAVAILABLE, issues:
: Error: Table metadata loading, code: 1050
:1:1: Error: Failed to load metadata for table: db.[//Root/test-shared/.metadata/secrets/values]
: Error: LookupError, code: 2005 2025-09-25T16:19:18.866871Z node 15 :KQP_SESSION WARN: kqp_session_actor.cpp:2395: SessionId: ydb://session/3?node_id=15&id=YTk1NzA1YmMtNjNjOTRmNmItOTdlZjMwMmItMjRiZTUyMDE=, ActorId: [15:7554062173935851085:2957], ActorState: ExecuteState, TraceId: 01k60tr1pg4dg57d6735pq74ky, ReplyQueryCompileError, status UNAVAILABLE remove tx with tx_id: 2025-09-25T16:19:18.866889Z node 15 :KQP_SESSION INFO: kqp_session_actor.cpp:2725: SessionId: ydb://session/3?node_id=15&id=YTk1NzA1YmMtNjNjOTRmNmItOTdlZjMwMmItMjRiZTUyMDE=, ActorId: [15:7554062173935851085:2957], ActorState: ExecuteState, TraceId: 01k60tr1pg4dg57d6735pq74ky, Cleanup start, isFinal: 0 CleanupCtx: 0 TransactionsToBeAborted.size(): 0 WorkerId: [0:0:0] WorkloadServiceCleanup: 0 2025-09-25T16:19:18.866892Z node 15 :KQP_SESSION DEBUG: kqp_session_actor.cpp:2786: SessionId: ydb://session/3?node_id=15&id=YTk1NzA1YmMtNjNjOTRmNmItOTdlZjMwMmItMjRiZTUyMDE=, ActorId: [15:7554062173935851085:2957], ActorState: ExecuteState, TraceId: 01k60tr1pg4dg57d6735pq74ky, EndCleanup, isFinal: 0 2025-09-25T16:19:18.866928Z node 15 :KQP_SESSION DEBUG: kqp_session_actor.cpp:2518: SessionId: ydb://session/3?node_id=15&id=YTk1NzA1YmMtNjNjOTRmNmItOTdlZjMwMmItMjRiZTUyMDE=, ActorId: [15:7554062173935851085:2957], ActorState: ExecuteState, TraceId: 01k60tr1pg4dg57d6735pq74ky, Sent query response back to proxy, proxyRequestId: 72, proxyId: [15:7554062156755978848:2266] 2025-09-25T16:19:18.867150Z node 15 :METADATA_PROVIDER ERROR: log.cpp:841: fline=request_actor_cb.h:34;event=unexpected reply;response=operation { ready: true status: UNAVAILABLE issues { message: "Table metadata loading" issue_code: 1050 severity: 1 issues { position { row: 1 column: 1 } message: "Failed to load metadata for table: db.[//Root/test-shared/.metadata/secrets/values]" end_position { row: 1 column: 1 } severity: 1 issues { message: "LookupError" issue_code: 2005 severity: 1 } } } result { [type.googleapis.com/Ydb.Table.ExecuteQueryResult] { tx_meta { } } } } ; 2025-09-25T16:19:18.867221Z node 15 :METADATA_PROVIDER ERROR: log.h:466: accessor_snapshot_base.cpp:16 :cannot construct snapshot: on request failed:
: Error: Table metadata loading, code: 1050
:1:1: Error: Failed to load metadata for table: db.[//Root/test-shared/.metadata/secrets/values]
: Error: LookupError, code: 2005 2025-09-25T16:19:18.867238Z node 15 :KQP_SESSION INFO: kqp_session_actor.cpp:2563: SessionId: ydb://session/3?node_id=15&id=YTk1NzA1YmMtNjNjOTRmNmItOTdlZjMwMmItMjRiZTUyMDE=, ActorId: [15:7554062173935851085:2957], ActorState: ReadyState, Session closed due to explicit close event 2025-09-25T16:19:18.867243Z node 15 :KQP_SESSION INFO: kqp_session_actor.cpp:2725: SessionId: ydb://session/3?node_id=15&id=YTk1NzA1YmMtNjNjOTRmNmItOTdlZjMwMmItMjRiZTUyMDE=, ActorId: [15:7554062173935851085:2957], ActorState: ReadyState, Cleanup start, isFinal: 1 CleanupCtx: 0 TransactionsToBeAborted.size(): 0 WorkerId: [0:0:0] WorkloadServiceCleanup: 0 2025-09-25T16:19:18.867246Z node 15 :KQP_SESSION DEBUG: kqp_session_actor.cpp:2786: SessionId: ydb://session/3?node_id=15&id=YTk1NzA1YmMtNjNjOTRmNmItOTdlZjMwMmItMjRiZTUyMDE=, ActorId: [15:7554062173935851085:2957], ActorState: ReadyState, EndCleanup, isFinal: 1 2025-09-25T16:19:18.867249Z node 15 :KQP_SESSION DEBUG: kqp_session_actor.cpp:2798: SessionId: ydb://session/3?node_id=15&id=YTk1NzA1YmMtNjNjOTRmNmItOTdlZjMwMmItMjRiZTUyMDE=, ActorId: [15:7554062173935851085:2957], ActorState: unknown state, Cleanup temp tables: 0 2025-09-25T16:19:18.867265Z node 15 :KQP_SESSION DEBUG: kqp_session_actor.cpp:2889: SessionId: ydb://session/3?node_id=15&id=YTk1NzA1YmMtNjNjOTRmNmItOTdlZjMwMmItMjRiZTUyMDE=, ActorId: [15:7554062173935851085:2957], ActorState: unknown state, Session actor destroyed ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/scheme/unittest >> KqpScheme::AddChangefeedNegative [GOOD] Test command err: Trying to start YDB, gRPC: 11262, MsgBus: 12946 2025-09-25T16:18:48.083676Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7554062048516985438:2247];send_to=[0:7307199536658146131:7762515]; 2025-09-25T16:18:48.083695Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-09-25T16:18:48.088987Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:123: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown test_client.cpp: SetPath # /home/runner/.ya/build/build_root/endf/001f14/r3tmp/tmpmDCgNB/pdisk_1.dat 2025-09-25T16:18:48.127290Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded 2025-09-25T16:18:48.127532Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1229: Notification cookie mismatch for subscription [1:7554062048516985220:2081] 1758817128081468 != 1758817128081471 TServer::EnableGrpc on GrpcPort 11262, node 1 2025-09-25T16:18:48.149680Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-09-25T16:18:48.149694Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-09-25T16:18:48.149697Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-09-25T16:18:48.149730Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:12946 2025-09-25T16:18:48.185646Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-09-25T16:18:48.196485Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-09-25T16:18:48.196522Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-09-25T16:18:48.197513Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:12946 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-09-25T16:18:48.209838Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... CREATE TABLE `/Root/ColumnTableTest` (id Int64 NOT NULL, timestamp Timestamp NOT NULL, ui64_type Uint64 NOT NULL, PRIMARY KEY (id)) PARTITION BY HASH(id) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 1); 2025-09-25T16:18:48.462426Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7554062048516985901:2319], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:48.462457Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:48.462583Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7554062048516985911:2320], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:48.462589Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:18:48.512253Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/olap/operations/create_table.cpp:814) 2025-09-25T16:18:48.525355Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7554062048516985964:2324];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-09-25T16:18:48.525414Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7554062048516985964:2324];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-09-25T16:18:48.525470Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7554062048516985964:2324];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-09-25T16:18:48.525504Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7554062048516985964:2324];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-09-25T16:18:48.525539Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7554062048516985964:2324];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-09-25T16:18:48.525570Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7554062048516985964:2324];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-09-25T16:18:48.525595Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7554062048516985964:2324];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-09-25T16:18:48.525615Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7554062048516985964:2324];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-09-25T16:18:48.525638Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7554062048516985964:2324];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-09-25T16:18:48.525664Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7554062048516985964:2324];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanDeprecatedSnapshot; 2025-09-25T16:18:48.525691Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7554062048516985964:2324];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV0ChunksMeta; 2025-09-25T16:18:48.525715Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7554062048516985964:2324];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CopyBlobIdsToV2; 2025-09-25T16:18:48.525744Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7554062048516985964:2324];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreAppearanceSnapshot; 2025-09-25T16:18:48.526959Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-09-25T16:18:48.526980Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-09-25T16:18:48.527001Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-09-25T16:18:48.527008Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-09-25T16:18:48.527058Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-09-25T16:18:48.527072Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-09-25T16:18:48.527085Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-09-25T16:18:48.527092Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-09-25T16:18:48.527107Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-09-25T16:18:48.527113Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-09-25T16:18:48.527127Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;proc ... urityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-09-25T16:19:18.477381Z node 20 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-09-25T16:19:18.487187Z node 20 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) waiting... 2025-09-25T16:19:18.496584Z node 20 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(20, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-09-25T16:19:18.496624Z node 20 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(20, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-09-25T16:19:18.497720Z node 20 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(20, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-09-25T16:19:18.545023Z node 20 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) waiting... 2025-09-25T16:19:18.566115Z node 20 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) waiting... 2025-09-25T16:19:18.576698Z node 20 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) waiting... 2025-09-25T16:19:18.629830Z node 20 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-09-25T16:19:18.759101Z node 20 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [20:7554062175628538586:2391], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:19:18.759128Z node 20 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:19:18.759152Z node 20 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [20:7554062175628538596:2392], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:19:18.759156Z node 20 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:19:18.766638Z node 20 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:19:18.773801Z node 20 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:19:18.784872Z node 20 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:19:18.799254Z node 20 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:19:18.813260Z node 20 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:19:18.827147Z node 20 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:19:18.834205Z node 20 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:19:18.848461Z node 20 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:19:18.864813Z node 20 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [20:7554062175628539459:2474], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:19:18.864852Z node 20 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [20:7554062175628539464:2477], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:19:18.864856Z node 20 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:19:18.864894Z node 20 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [20:7554062175628539467:2479], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:19:18.864903Z node 20 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:19:18.865434Z node 20 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-09-25T16:19:18.867835Z node 20 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [20:7554062175628539466:2478], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2025-09-25T16:19:18.922788Z node 20 :TX_PROXY ERROR: schemereq.cpp:590: Actor# [20:7554062175628539520:3554] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 17], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-09-25T16:19:19.109494Z node 20 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:19:19.130152Z node 20 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710674:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:19:19.140904Z node 20 :TX_PROXY ERROR: schemereq.cpp:590: Actor# [20:7554062179923507290:3857] txid# 281474976710675, issues: { message: "DYNAMODB_STREAMS_JSON format incompatible with specified stream mode" severity: 1 } 2025-09-25T16:19:19.145896Z node 20 :TX_PROXY ERROR: schemereq.cpp:590: Actor# [20:7554062179923507307:3865] txid# 281474976710676, issues: { message: "DEBEZIUM_JSON format incompatible with specified stream mode" severity: 1 } >> TPQCachingProxyTest::MultipleSessions [GOOD] |81.9%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/tx/schemeshard/ut_login/ydb-core-tx-schemeshard-ut_login |81.9%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_login/ydb-core-tx-schemeshard-ut_login |81.9%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_login/ydb-core-tx-schemeshard-ut_login |81.9%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/tx/schemeshard/ut_replication_reboots/ydb-core-tx-schemeshard-ut_replication_reboots |81.9%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_replication_reboots/ydb-core-tx-schemeshard-ut_replication_reboots |81.9%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_replication_reboots/ydb-core-tx-schemeshard-ut_replication_reboots ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/persqueue/dread_cache_service/ut/unittest >> TPQCachingProxyTest::MultipleSessions [GOOD] Test command err: 2025-09-25T16:19:30.988446Z node 1 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 1 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2025-09-25T16:19:31.008353Z node 1 :PQ_TX INFO: pq_impl.cpp:560: [PQ: 72057594037927937] doesn't have tx info 2025-09-25T16:19:31.008401Z node 1 :PQ_TX INFO: pq_impl.cpp:572: [PQ: 72057594037927937] PlanStep 0, PlanTxId 0, ExecStep 0, ExecTxId 0 2025-09-25T16:19:31.008425Z node 1 :PERSQUEUE NOTICE: pq_impl.cpp:908: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-09-25T16:19:31.008438Z node 1 :PERSQUEUE INFO: pq_impl.cpp:609: [PQ: 72057594037927937] doesn't have tx writes info 2025-09-25T16:19:31.012535Z node 1 :PQ_READ_PROXY DEBUG: caching_service.cpp:44: Direct read cache: : Created 2025-09-25T16:19:31.012567Z node 1 :PQ_READ_PROXY DEBUG: caching_service.cpp:283: Direct read cache: registered server session: session1:1 with generation 1 2025-09-25T16:19:31.012591Z node 1 :PQ_READ_PROXY DEBUG: caching_service.cpp:171: Direct read cache: staged direct read id 1 for session: session1 2025-09-25T16:19:31.012611Z node 1 :PQ_READ_PROXY DEBUG: caching_service.cpp:171: Direct read cache: staged direct read id 2 for session: session1 2025-09-25T16:19:31.012619Z node 1 :PQ_READ_PROXY DEBUG: caching_service.cpp:179: Direct read cache: publish read: 1 for session session1, Generation: 1 2025-09-25T16:19:31.012632Z node 1 :PQ_READ_PROXY DEBUG: caching_service.cpp:179: Direct read cache: publish read: 2 for session session1, Generation: 1 2025-09-25T16:19:31.012641Z node 1 :PQ_READ_PROXY DEBUG: caching_service.cpp:283: Direct read cache: registered server session: session2:1 with generation 2 2025-09-25T16:19:31.012649Z node 1 :PQ_READ_PROXY DEBUG: caching_service.cpp:171: Direct read cache: staged direct read id 3 for session: session2 2025-09-25T16:19:31.012654Z node 1 :PQ_READ_PROXY DEBUG: caching_service.cpp:179: Direct read cache: publish read: 3 for session session2, Generation: 2 >> KqpRbo::CrossFilter >> TPQCachingProxyTest::TestDeregister |81.9%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/tx/schemeshard/ut_olap/ydb-core-tx-schemeshard-ut_olap |81.9%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/tx/tx_proxy/ut_schemereq/ydb-core-tx-tx_proxy-ut_schemereq |81.9%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/tx/schemeshard/ut_restore/ydb-core-tx-schemeshard-ut_restore |81.9%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/tx/schemeshard/ut_index/ydb-core-tx-schemeshard-ut_index >> KqpProxy::NoLocalSessionExecution >> KqpRbo::Bench_Filter >> TableCreation::ConcurrentTableCreation >> KqpRbo::LeftJoinToKqpOpJoin >> TestScriptExecutionsUtils::TestRetryPolicyItem [GOOD] >> TestScriptExecutionsUtils::TestRetryLimiter [GOOD] >> KqpProxy::CalcPeerStats [GOOD] >> KqpProxy::CreatesScriptExecutionsTable |81.9%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/tx/replication/service/ut_topic_reader/ydb-core-tx-replication-service-ut_topic_reader |81.9%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_olap/ydb-core-tx-schemeshard-ut_olap |81.9%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/tx_proxy/ut_schemereq/ydb-core-tx-tx_proxy-ut_schemereq |81.9%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_restore/ydb-core-tx-schemeshard-ut_restore |81.9%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_olap/ydb-core-tx-schemeshard-ut_olap >> ScriptExecutionsTest::RunCheckLeaseStatus >> KqpProxy::InvalidSessionID >> TableCreation::SimpleTableCreation >> TableCreation::MultipleTablesCreation >> TPQCachingProxyTest::TestDeregister [GOOD] |81.9%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_index/ydb-core-tx-schemeshard-ut_index |81.9%| [LD] {RESULT} $(B)/ydb/core/tx/tx_proxy/ut_schemereq/ydb-core-tx-tx_proxy-ut_schemereq |81.9%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/public/lib/ydb_cli/topic/ut/ydb-public-lib-ydb_cli-topic-ut |81.9%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_restore/ydb-core-tx-schemeshard-ut_restore >> KqpRbo::PredicatePushdownLeftJoin >> KqpRbo::JoinFilter |81.9%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/persqueue/dread_cache_service/ut/unittest |81.9%| [LD] {RESULT} $(B)/ydb/core/tx/replication/service/ut_topic_reader/ydb-core-tx-replication-service-ut_topic_reader |81.9%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/replication/service/ut_topic_reader/ydb-core-tx-replication-service-ut_topic_reader |82.0%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_index/ydb-core-tx-schemeshard-ut_index >> ScriptExecutionsTest::BackgroundOperationRestart >> KqpRbo::Select |82.0%| [LD] {RESULT} $(B)/ydb/public/lib/ydb_cli/topic/ut/ydb-public-lib-ydb_cli-topic-ut |82.0%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/public/lib/ydb_cli/topic/ut/ydb-public-lib-ydb_cli-topic-ut |82.0%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/proxy_service/ut/unittest >> TestScriptExecutionsUtils::TestRetryLimiter [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/persqueue/dread_cache_service/ut/unittest >> TPQCachingProxyTest::TestDeregister [GOOD] Test command err: 2025-09-25T16:19:31.819250Z node 1 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 1 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2025-09-25T16:19:31.837131Z node 1 :PQ_TX INFO: pq_impl.cpp:560: [PQ: 72057594037927937] doesn't have tx info 2025-09-25T16:19:31.837178Z node 1 :PQ_TX INFO: pq_impl.cpp:572: [PQ: 72057594037927937] PlanStep 0, PlanTxId 0, ExecStep 0, ExecTxId 0 2025-09-25T16:19:31.837200Z node 1 :PERSQUEUE NOTICE: pq_impl.cpp:908: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-09-25T16:19:31.837213Z node 1 :PERSQUEUE INFO: pq_impl.cpp:609: [PQ: 72057594037927937] doesn't have tx writes info 2025-09-25T16:19:31.841125Z node 1 :PQ_READ_PROXY DEBUG: caching_service.cpp:44: Direct read cache: : Created 2025-09-25T16:19:31.841161Z node 1 :PQ_READ_PROXY DEBUG: caching_service.cpp:283: Direct read cache: registered server session: session1:1 with generation 1 2025-09-25T16:19:31.841172Z node 1 :PQ_READ_PROXY DEBUG: caching_service.cpp:283: Direct read cache: registered server session: session2:1 with generation 1 2025-09-25T16:19:31.841195Z node 1 :PQ_READ_PROXY DEBUG: caching_service.cpp:139: Direct read cache: server session deregistered: session1 >> TIncrementalRestoreTests::CreateLongIncrementalRestoreOpWithMultipleTables >> KqpRbo::Filter >> KqpRbo::Bench_CrossFilter >> TIncrementalRestoreTests::TxProgressExecutionWithCorrectBackupCollectionPathId >> KqpProxy::PassErrroViaSessionActor >> TCmsTest::CollectInfo >> KqpRbo::Bench_Select >> KqpRbo::Bench_JoinFilter >> TIncrementalRestoreTests::CreateLongIncrementalRestoreOpBasic >> TIncrementalRestoreTests::CreateLongIncrementalRestoreOpInternalStateVerification >> TIncrementalRestoreTests::CreateLongIncrementalRestoreOpInvalidPath >> TCmsTest::StateStorageNodesFromOneRing >> TIncrementalRestoreTests::IncrementalRestoreCompleteLifecycle >> KqpRbo::CrossFilter [GOOD] >> TIncrementalRestoreTests::CopyTableChangeStateSupport >> TIncrementalRestoreTests::BackupCollectionRestoreOpApiMultipleOperationsListing |82.0%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/metadata/initializer/ut/unittest |82.0%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_incremental_restore/unittest |82.0%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_incremental_restore/unittest |82.0%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_incremental_restore/unittest >> KqpProxy::InvalidSessionID [GOOD] >> KqpProxy::LoadedMetadataAfterCompilationTimeout >> TIncrementalRestoreTests::CreateLongIncrementalRestoreOpInvalidPath [GOOD] >> TIncrementalRestoreTests::IncrementalRestorePersistenceRowsLifecycle >> KqpRbo::LeftJoinToKqpOpJoin [GOOD] |82.0%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_incremental_restore/unittest |82.0%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_incremental_restore/unittest >> KqpRbo::Bench_Filter [GOOD] >> KqpRbo::Select [GOOD] |82.0%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_incremental_restore/unittest |82.0%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_incremental_restore/unittest >> KqpProxy::NoLocalSessionExecution [GOOD] >> KqpRbo::JoinFilter [GOOD] >> KqpProxy::NodeDisconnectedTest >> KqpProxy::PassErrroViaSessionActor [GOOD] >> KqpProxy::PingNotExistedSession >> KqpRbo::PredicatePushdownLeftJoin [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/rbo/unittest >> KqpRbo::CrossFilter [GOOD] Test command err: Trying to start YDB, gRPC: 4024, MsgBus: 24131 2025-09-25T16:19:31.801327Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7554062232703860028:2083];send_to=[0:7307199536658146131:7762515]; 2025-09-25T16:19:31.801344Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/endf/004322/r3tmp/tmpWvcMii/pdisk_1.dat 2025-09-25T16:19:31.843668Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-09-25T16:19:31.854624Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 4024, node 1 2025-09-25T16:19:31.870544Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-09-25T16:19:31.870556Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-09-25T16:19:31.870558Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-09-25T16:19:31.870605Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:24131 TClient is connected to server localhost:24131 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: 2025-09-25T16:19:31.907751Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-09-25T16:19:31.907796Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-09-25T16:19:31.908939Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-09-25T16:19:31.918518Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-09-25T16:19:32.005965Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-09-25T16:19:32.246820Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7554062236998827943:2316], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:19:32.246850Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:19:32.246985Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7554062236998827953:2317], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:19:32.247010Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:19:32.302637Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:19:32.319849Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:19:32.335555Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7554062236998828117:2335], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:19:32.335588Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:19:32.335641Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7554062236998828122:2338], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:19:32.335654Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7554062236998828123:2339], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:19:32.335674Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:19:32.336547Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710660:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-09-25T16:19:32.338415Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7554062236998828126:2340], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710660 completed, doublechecking } 2025-09-25T16:19:32.404574Z node 1 :TX_PROXY ERROR: schemereq.cpp:590: Actor# [1:7554062236998828177:2444] txid# 281474976710661, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 7], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } >> TIncrementalRestoreTests::MultipleCollectionsGenerateMultipleTEvRunIncrementalRestoreEvents >> TIncrementalRestoreTests::CreateLongIncrementalRestoreOpBasic [GOOD] >> TIncrementalRestoreTests::CopyTableChangeStateSupport [GOOD] >> KqpRbo::Filter [GOOD] >> KqpRbo::Bench_Select [GOOD] >> TableCreation::ConcurrentTableCreation [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_incremental_restore/unittest >> TIncrementalRestoreTests::CreateLongIncrementalRestoreOpInvalidPath [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2144] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2144] Leader for TabletID 72057594046678944 is [1:130:2155] sender: [1:131:2058] recipient: [1:113:2144] 2025-09-25T16:19:32.576420Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7911: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-09-25T16:19:32.576442Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7939: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-09-25T16:19:32.576448Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7825: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-09-25T16:19:32.576453Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7841: OperationsProcessing config: using default configuration 2025-09-25T16:19:32.576459Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7847: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-09-25T16:19:32.576464Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7847: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-09-25T16:19:32.576473Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7971: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-09-25T16:19:32.576486Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-09-25T16:19:32.576592Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8042: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-09-25T16:19:32.576640Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2025-09-25T16:19:32.591333Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7729: Cannot subscribe to console configs 2025-09-25T16:19:32.591352Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded 2025-09-25T16:19:32.594115Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2025-09-25T16:19:32.594177Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2025-09-25T16:19:32.594201Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-09-25T16:19:32.595227Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2025-09-25T16:19:32.595270Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2025-09-25T16:19:32.595352Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1382: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-09-25T16:19:32.595395Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-09-25T16:19:32.595690Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:158: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-09-25T16:19:32.595718Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2025-09-25T16:19:32.595895Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-09-25T16:19:32.595901Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-09-25T16:19:32.595915Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-09-25T16:19:32.595920Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-09-25T16:19:32.595925Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:205: TTxServerlessStorageBilling.Complete 2025-09-25T16:19:32.595946Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7086: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-09-25T16:19:32.596941Z node 1 :HIVE INFO: tablet_helpers.cpp:1126: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:130:2155] sender: [1:245:2058] recipient: [1:15:2062] 2025-09-25T16:19:32.616914Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-09-25T16:19:32.616994Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-09-25T16:19:32.617057Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:599: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-09-25T16:19:32.617066Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5528: CreateTx for txid 1:0 type: TxAlterSubDomain target path: [OwnerId: 72057594046678944, LocalPathId: 1] source path: 2025-09-25T16:19:32.617125Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-09-25T16:19:32.617141Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2025-09-25T16:19:32.617844Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-09-25T16:19:32.617900Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-09-25T16:19:32.617952Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-09-25T16:19:32.617971Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:315: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-09-25T16:19:32.617977Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:369: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-09-25T16:19:32.617983Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 1:0 2 -> 3 2025-09-25T16:19:32.618383Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-09-25T16:19:32.618393Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-09-25T16:19:32.618397Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 1:0 3 -> 128 2025-09-25T16:19:32.618697Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-09-25T16:19:32.618706Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-09-25T16:19:32.618710Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-09-25T16:19:32.618715Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1723: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-09-25T16:19:32.619368Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1792: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-09-25T16:19:32.619713Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:663: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-09-25T16:19:32.619761Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1824: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-09-25T16:19:32.619934Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:685: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-09-25T16:19:32.619955Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:689: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 137 RawX2: 4294969455 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-09-25T16:19:32.619960Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:303: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-09-25T16:19:32.620010Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 1:0 128 -> 240 2025-09-25T16:19:32.620015Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:362: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-09-25T16:19:32.620038Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:599: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-09-25T16:19:32.620047Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:412: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-09-25T16:19:32.620417Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-09-25T16:19:32.620424Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme ... 9-25T16:19:32.637713Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 104:0 128 -> 240 2025-09-25T16:19:32.637735Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:599: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 1 2025-09-25T16:19:32.637742Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:599: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 5] was 1 2025-09-25T16:19:32.637846Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 104 2025-09-25T16:19:32.638287Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 104 2025-09-25T16:19:32.638351Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 104 FAKE_COORDINATOR: Erasing txId 104 2025-09-25T16:19:32.638634Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-09-25T16:19:32.638642Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 104, path id: [OwnerId: 72057594046678944, LocalPathId: 4] 2025-09-25T16:19:32.638665Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 104, path id: [OwnerId: 72057594046678944, LocalPathId: 5] 2025-09-25T16:19:32.638680Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-09-25T16:19:32.638685Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [1:212:2213], at schemeshard: 72057594046678944, txId: 104, path id: 4 2025-09-25T16:19:32.638691Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [1:212:2213], at schemeshard: 72057594046678944, txId: 104, path id: 5 2025-09-25T16:19:32.638702Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 104:0, at schemeshard: 72057594046678944 2025-09-25T16:19:32.638708Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:497: [72057594046678944] TDone opId# 104:0 ProgressState 2025-09-25T16:19:32.638720Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:926: Part operation is done id#104:0 progress is 1/1 2025-09-25T16:19:32.638724Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 104 ready parts: 1/1 2025-09-25T16:19:32.638729Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:926: Part operation is done id#104:0 progress is 1/1 2025-09-25T16:19:32.638733Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 104 ready parts: 1/1 2025-09-25T16:19:32.638738Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 104, ready parts: 1/1, is published: false 2025-09-25T16:19:32.638743Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 104 ready parts: 1/1 2025-09-25T16:19:32.638748Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:993: Operation and all the parts is done, operation id: 104:0 2025-09-25T16:19:32.638753Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5552: RemoveTx for txid 104:0 2025-09-25T16:19:32.638764Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:610: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 5] was 2 2025-09-25T16:19:32.638770Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:1002: Publication still in progress, tx: 104, publications: 2, subscribers: 0 2025-09-25T16:19:32.638774Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1009: Publication details: tx: 104, [OwnerId: 72057594046678944, LocalPathId: 4], 5 2025-09-25T16:19:32.638778Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1009: Publication details: tx: 104, [OwnerId: 72057594046678944, LocalPathId: 5], 3 2025-09-25T16:19:32.638947Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6249: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 4 Version: 5 PathOwnerId: 72057594046678944, cookie: 104 2025-09-25T16:19:32.638961Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 4 Version: 5 PathOwnerId: 72057594046678944, cookie: 104 2025-09-25T16:19:32.638967Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 104 2025-09-25T16:19:32.638972Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 104, pathId: [OwnerId: 72057594046678944, LocalPathId: 4], version: 5 2025-09-25T16:19:32.638977Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:610: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 2 2025-09-25T16:19:32.639118Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6249: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 5 Version: 3 PathOwnerId: 72057594046678944, cookie: 104 2025-09-25T16:19:32.639133Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 5 Version: 3 PathOwnerId: 72057594046678944, cookie: 104 2025-09-25T16:19:32.639138Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 104 2025-09-25T16:19:32.639143Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 104, pathId: [OwnerId: 72057594046678944, LocalPathId: 5], version: 3 2025-09-25T16:19:32.639148Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:610: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 5] was 1 2025-09-25T16:19:32.639159Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__publish_to_scheme_board.cpp:155: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 104, subscribers: 0 2025-09-25T16:19:32.639871Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 104 2025-09-25T16:19:32.639944Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 104 TestModificationResult got TxId: 104, wait until txId: 104 TestWaitNotification wait txId: 104 2025-09-25T16:19:32.639990Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:213: tests -- TTxNotificationSubscriber for txId 104: send EvNotifyTxCompletion 2025-09-25T16:19:32.639997Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:259: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 104 2025-09-25T16:19:32.640064Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 104, at schemeshard: 72057594046678944 2025-09-25T16:19:32.640081Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 104: got EvNotifyTxCompletionResult 2025-09-25T16:19:32.640085Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 104: satisfy waiter [1:399:2389] TestWaitNotification: OK eventTxId 104 TestModificationResults wait txId: 105 2025-09-25T16:19:32.640809Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot/NotABackupDir/" OperationType: ESchemeOpRestoreBackupCollection RestoreBackupCollection { Name: "TestCollection" } } TxId: 105 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-09-25T16:19:32.640913Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_just_reject.cpp:47: TReject Propose, opId: 105:0, explain: Check failed: path: '/MyRoot/NotABackupDir/TestCollection', error: path is not a backup collection (id: [OwnerId: 72057594046678944, LocalPathId: 5], type: EPathTypeDir, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_restore_backup_collection.cpp:364, at schemeshard: 72057594046678944 2025-09-25T16:19:32.640923Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 105:1, propose status:StatusNameConflict, reason: Check failed: path: '/MyRoot/NotABackupDir/TestCollection', error: path is not a backup collection (id: [OwnerId: 72057594046678944, LocalPathId: 5], type: EPathTypeDir, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_restore_backup_collection.cpp:364, at schemeshard: 72057594046678944 2025-09-25T16:19:32.641420Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 105, response: Status: StatusNameConflict Reason: "Check failed: path: \'/MyRoot/NotABackupDir/TestCollection\', error: path is not a backup collection (id: [OwnerId: 72057594046678944, LocalPathId: 5], type: EPathTypeDir, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_restore_backup_collection.cpp:364" TxId: 105 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2025-09-25T16:19:32.641472Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 105, database: /MyRoot, subject: , status: StatusNameConflict, reason: Check failed: path: '/MyRoot/NotABackupDir/TestCollection', error: path is not a backup collection (id: [OwnerId: 72057594046678944, LocalPathId: 5], type: EPathTypeDir, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_restore_backup_collection.cpp:364, operation: RESTORE, path: /MyRoot/NotABackupDir//TestCollection TestModificationResult got TxId: 105, wait until txId: 105 TestWaitNotification wait txId: 105 2025-09-25T16:19:32.641540Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:213: tests -- TTxNotificationSubscriber for txId 105: send EvNotifyTxCompletion 2025-09-25T16:19:32.641548Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:259: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 105 2025-09-25T16:19:32.641616Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 105, at schemeshard: 72057594046678944 2025-09-25T16:19:32.641634Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 105: got EvNotifyTxCompletionResult 2025-09-25T16:19:32.641639Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 105: satisfy waiter [1:406:2396] TestWaitNotification: OK eventTxId 105 >> TableCreation::ConcurrentMultipleTablesCreation ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/rbo/unittest >> KqpRbo::LeftJoinToKqpOpJoin [GOOD] Test command err: Trying to start YDB, gRPC: 2125, MsgBus: 11402 2025-09-25T16:19:31.974396Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7554062232286800252:2177];send_to=[0:7307199536658146131:7762515]; 2025-09-25T16:19:31.974473Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-09-25T16:19:31.976146Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:123: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown test_client.cpp: SetPath # /home/runner/.ya/build/build_root/endf/004356/r3tmp/tmpQMErwZ/pdisk_1.dat TServer::EnableGrpc on GrpcPort 2125, node 1 2025-09-25T16:19:32.012237Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded 2025-09-25T16:19:32.026821Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-09-25T16:19:32.026840Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-09-25T16:19:32.026843Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-09-25T16:19:32.026894Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:11402 2025-09-25T16:19:32.074334Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-09-25T16:19:32.074359Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-09-25T16:19:32.075424Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-09-25T16:19:32.077472Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions TClient is connected to server localhost:11402 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-09-25T16:19:32.093008Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-09-25T16:19:32.095298Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-09-25T16:19:32.447221Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7554062236581768047:2316], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:19:32.447277Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:19:32.447611Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7554062236581768057:2317], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:19:32.447619Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:19:32.452915Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:19:32.473249Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:19:32.481887Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:19:32.495063Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:19:32.517030Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7554062236581768361:2349], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:19:32.517077Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:19:32.517089Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7554062236581768366:2352], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:19:32.517140Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7554062236581768368:2353], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:19:32.517149Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:19:32.518138Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715662:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-09-25T16:19:32.521241Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7554062236581768369:2354], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715662 completed, doublechecking } 2025-09-25T16:19:32.601832Z node 1 :TX_PROXY ERROR: schemereq.cpp:590: Actor# [1:7554062236581768421:2533] txid# 281474976715663, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 9], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } >> TIncrementalRestoreTests::CreateLongIncrementalRestoreOpWithMultipleTables [GOOD] >> KqpRbo::Bench_CrossFilter [GOOD] >> KqpRbo::Bench_10Joins >> TIncrementalRestoreTests::TxProgressExecutionWithCorrectBackupCollectionPathId [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/rbo/unittest >> KqpRbo::Bench_Filter [GOOD] Test command err: Trying to start YDB, gRPC: 61340, MsgBus: 7548 2025-09-25T16:19:31.967677Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7554062230075837055:2149];send_to=[0:7307199536658146131:7762515]; 2025-09-25T16:19:31.967758Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/endf/004343/r3tmp/tmpTw3ZBu/pdisk_1.dat 2025-09-25T16:19:32.006679Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-09-25T16:19:32.016109Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 61340, node 1 2025-09-25T16:19:32.032108Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-09-25T16:19:32.032122Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-09-25T16:19:32.032125Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-09-25T16:19:32.032176Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:7548 2025-09-25T16:19:32.068050Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-09-25T16:19:32.068081Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-09-25T16:19:32.069111Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:7548 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. waiting... 2025-09-25T16:19:32.097775Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2025-09-25T16:19:32.215098Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-09-25T16:19:32.435031Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7554062234370804892:2316], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:19:32.435095Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:19:32.436971Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7554062234370804902:2317], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:19:32.437004Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:19:32.440464Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:19:32.463494Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7554062234370804996:2328], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:19:32.463525Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7554062234370805001:2331], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:19:32.463530Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:19:32.463663Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7554062234370805004:2333], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:19:32.463698Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:19:32.464415Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715659:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-09-25T16:19:32.466523Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7554062234370805003:2332], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715659 completed, doublechecking } 2025-09-25T16:19:32.541926Z node 1 :TX_PROXY ERROR: schemereq.cpp:590: Actor# [1:7554062234370805056:2396] txid# 281474976715660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } >> TIncrementalRestoreTests::CreateLongIncrementalRestoreOpInternalStateVerification [GOOD] >> TableCreation::SimpleTableCreation [GOOD] >> KqpRbo::Bench_JoinFilter [GOOD] >> TableCreation::SimpleUpdateTable >> TIncrementalRestoreTests::BackupCollectionRestoreOpApiGetListForget ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/rbo/unittest >> KqpRbo::JoinFilter [GOOD] >> TableCreation::MultipleTablesCreation [GOOD] Test command err: Trying to start YDB, gRPC: 18039, MsgBus: 18405 2025-09-25T16:19:32.250002Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7554062233600213551:2078];send_to=[0:7307199536658146131:7762515]; 2025-09-25T16:19:32.250192Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/endf/00437f/r3tmp/tmpVAyFIz/pdisk_1.dat 2025-09-25T16:19:32.297734Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-09-25T16:19:32.306994Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 18039, node 1 2025-09-25T16:19:32.325445Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-09-25T16:19:32.325460Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-09-25T16:19:32.325462Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-09-25T16:19:32.325511Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:18405 2025-09-25T16:19:32.352808Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-09-25T16:19:32.352855Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-09-25T16:19:32.353858Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:18405 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-09-25T16:19:32.392252Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-09-25T16:19:32.496877Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-09-25T16:19:32.764001Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7554062233600214174:2316], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:19:32.764040Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:19:32.766048Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7554062233600214184:2317], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:19:32.766076Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:19:32.802518Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:19:32.821469Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:19:32.832344Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7554062233600214348:2335], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:19:32.832377Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:19:32.832387Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7554062233600214353:2338], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:19:32.832414Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7554062233600214355:2339], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:19:32.832424Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:19:32.833158Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715660:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-09-25T16:19:32.840871Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7554062233600214356:2340], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715660 completed, doublechecking } 2025-09-25T16:19:32.923720Z node 1 :TX_PROXY ERROR: schemereq.cpp:590: Actor# [1:7554062233600214408:2442] txid# 281474976715661, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 7], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-09-25T16:19:32.940290Z node 1 :TX_DATASHARD WARN: datashard__read_iterator.cpp:3439: 72075186224037889 Cancelled read: {[1:7554062233600214455:2351], 0} ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/rbo/unittest >> KqpRbo::Select [GOOD] Test command err: Trying to start YDB, gRPC: 10441, MsgBus: 25027 2025-09-25T16:19:32.391389Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7554062233714154721:2149];send_to=[0:7307199536658146131:7762515]; 2025-09-25T16:19:32.391412Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/endf/00432c/r3tmp/tmpj3lNqD/pdisk_1.dat 2025-09-25T16:19:32.439217Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions TServer::EnableGrpc on GrpcPort 10441, node 1 2025-09-25T16:19:32.451716Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded 2025-09-25T16:19:32.461309Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-09-25T16:19:32.461329Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-09-25T16:19:32.461331Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-09-25T16:19:32.461378Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:25027 2025-09-25T16:19:32.492750Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-09-25T16:19:32.492780Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-09-25T16:19:32.493981Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:25027 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-09-25T16:19:32.539067Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-09-25T16:19:32.543191Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-09-25T16:19:32.625442Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-09-25T16:19:32.817910Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7554062233714155273:2319], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:19:32.817913Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7554062233714155262:2316], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:19:32.817933Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:19:32.818030Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7554062233714155277:2321], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:19:32.818053Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:19:32.818841Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-09-25T16:19:32.820762Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7554062233714155276:2320], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2025-09-25T16:19:32.890381Z node 1 :TX_PROXY ERROR: schemereq.cpp:590: Actor# [1:7554062233714155329:2337] txid# 281474976715659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/rbo/unittest >> KqpRbo::PredicatePushdownLeftJoin [GOOD] Test command err: Trying to start YDB, gRPC: 2447, MsgBus: 64219 2025-09-25T16:19:32.220580Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7554062236007925450:2084];send_to=[0:7307199536658146131:7762515]; 2025-09-25T16:19:32.220819Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/endf/00438f/r3tmp/tmpWd2369/pdisk_1.dat 2025-09-25T16:19:32.256870Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-09-25T16:19:32.279280Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 2447, node 1 2025-09-25T16:19:32.290998Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-09-25T16:19:32.291013Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-09-25T16:19:32.291015Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-09-25T16:19:32.291063Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:64219 2025-09-25T16:19:32.321831Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-09-25T16:19:32.321862Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-09-25T16:19:32.324011Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:64219 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-09-25T16:19:32.354401Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-09-25T16:19:32.477768Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-09-25T16:19:32.689681Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7554062236007926058:2316], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:19:32.689711Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:19:32.689832Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7554062236007926068:2317], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:19:32.689842Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:19:32.734654Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:19:32.751415Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:19:32.761562Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7554062236007926232:2335], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:19:32.761583Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:19:32.761610Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7554062236007926237:2338], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:19:32.761617Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7554062236007926238:2339], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:19:32.761622Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:19:32.762301Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715660:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-09-25T16:19:32.770388Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7554062236007926241:2340], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715660 completed, doublechecking } 2025-09-25T16:19:32.840457Z node 1 :TX_PROXY ERROR: schemereq.cpp:590: Actor# [1:7554062236007926292:2442] txid# 281474976715661, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 7], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } >> TableCreation::ConcurrentUpdateTable >> TIncrementalRestoreTests::IncrementalRestoreCompleteLifecycle [GOOD] >> TCmsTest::StateStorageNodesFromOneRing [GOOD] |82.0%| [TA] $(B)/ydb/core/client/server/ut/test-results/unittest/{meta.json ... results_accumulator.log} ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_incremental_restore/unittest >> TIncrementalRestoreTests::CopyTableChangeStateSupport [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2144] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2144] Leader for TabletID 72057594046678944 is [1:130:2155] sender: [1:131:2058] recipient: [1:113:2144] 2025-09-25T16:19:32.744952Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7911: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-09-25T16:19:32.744976Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7939: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-09-25T16:19:32.744982Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7825: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-09-25T16:19:32.744987Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7841: OperationsProcessing config: using default configuration 2025-09-25T16:19:32.744993Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7847: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-09-25T16:19:32.744998Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7847: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-09-25T16:19:32.745007Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7971: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-09-25T16:19:32.745023Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-09-25T16:19:32.745130Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8042: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-09-25T16:19:32.745183Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2025-09-25T16:19:32.758051Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7729: Cannot subscribe to console configs 2025-09-25T16:19:32.758069Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded 2025-09-25T16:19:32.761285Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2025-09-25T16:19:32.761380Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2025-09-25T16:19:32.761407Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-09-25T16:19:32.763075Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2025-09-25T16:19:32.763137Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2025-09-25T16:19:32.763257Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1382: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-09-25T16:19:32.763312Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-09-25T16:19:32.766170Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:158: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-09-25T16:19:32.766233Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2025-09-25T16:19:32.766524Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-09-25T16:19:32.766537Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-09-25T16:19:32.766561Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-09-25T16:19:32.766571Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-09-25T16:19:32.766578Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:205: TTxServerlessStorageBilling.Complete 2025-09-25T16:19:32.766616Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7086: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-09-25T16:19:32.768411Z node 1 :HIVE INFO: tablet_helpers.cpp:1126: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:130:2155] sender: [1:245:2058] recipient: [1:15:2062] 2025-09-25T16:19:32.788188Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-09-25T16:19:32.788258Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-09-25T16:19:32.788304Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:599: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-09-25T16:19:32.788310Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5528: CreateTx for txid 1:0 type: TxAlterSubDomain target path: [OwnerId: 72057594046678944, LocalPathId: 1] source path: 2025-09-25T16:19:32.788361Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-09-25T16:19:32.788377Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2025-09-25T16:19:32.789038Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-09-25T16:19:32.789112Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-09-25T16:19:32.789162Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-09-25T16:19:32.789182Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:315: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-09-25T16:19:32.789188Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:369: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-09-25T16:19:32.789193Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 1:0 2 -> 3 2025-09-25T16:19:32.789637Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-09-25T16:19:32.789648Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-09-25T16:19:32.789655Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 1:0 3 -> 128 2025-09-25T16:19:32.790026Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-09-25T16:19:32.790037Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-09-25T16:19:32.790043Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-09-25T16:19:32.790049Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1723: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-09-25T16:19:32.790543Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1792: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-09-25T16:19:32.790971Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:663: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-09-25T16:19:32.791015Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1824: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-09-25T16:19:32.791196Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:685: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-09-25T16:19:32.791218Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:689: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 137 RawX2: 4294969455 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-09-25T16:19:32.791223Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:303: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-09-25T16:19:32.791290Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 1:0 128 -> 240 2025-09-25T16:19:32.791295Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:362: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-09-25T16:19:32.791316Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:599: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-09-25T16:19:32.791325Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:412: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-09-25T16:19:32.791871Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-09-25T16:19:32.791883Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme ... ::TEvSchemaChanged> complete, operationId: 103:0, at schemeshard: 72057594046678944 2025-09-25T16:19:32.978039Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5901: Handle TEvSchemaChanged, tabletId: 72057594046678944, at schemeshard: 72057594046678944, message: Source { RawX1: 333 RawX2: 4294969615 } Origin: 72075186233409546 State: 2 TxId: 103 Step: 0 Generation: 2 2025-09-25T16:19:32.978066Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1837: TOperation FindRelatedPartByTabletId, TxId: 103, tablet: 72075186233409546, partId: 0 2025-09-25T16:19:32.978092Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:628: TTxOperationReply execute, operationId: 103:0, at schemeshard: 72057594046678944, message: Source { RawX1: 333 RawX2: 4294969615 } Origin: 72075186233409546 State: 2 TxId: 103 Step: 0 Generation: 2 2025-09-25T16:19:32.978101Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:1052: NTableState::TProposedWaitParts operationId# 103:0 HandleReply TEvDataShard::TEvSchemaChanged at tablet: 72057594046678944 2025-09-25T16:19:32.978112Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:1056: NTableState::TProposedWaitParts operationId# 103:0 HandleReply TEvDataShard::TEvSchemaChanged at tablet: 72057594046678944 message: Source { RawX1: 333 RawX2: 4294969615 } Origin: 72075186233409546 State: 2 TxId: 103 Step: 0 Generation: 2 2025-09-25T16:19:32.978129Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:673: CollectSchemaChanged accept TEvDataShard::TEvSchemaChanged, operationId: 103:0, shardIdx: 72057594046678944:1, shard: 72075186233409546, left await: 0, txState.State: ProposedWaitParts, txState.ReadyForNotifications: 1, at schemeshard: 72057594046678944 2025-09-25T16:19:32.978137Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:710: all shard schema changes has been received, operationId: 103:0, at schemeshard: 72057594046678944 2025-09-25T16:19:32.978142Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:722: send schema changes ack message, operation: 103:0, datashard: 72075186233409546, at schemeshard: 72057594046678944 2025-09-25T16:19:32.978148Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:722: send schema changes ack message, operation: 103:0, datashard: 72075186233409547, at schemeshard: 72057594046678944 2025-09-25T16:19:32.978155Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 103:0 129 -> 240 2025-09-25T16:19:32.978685Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 103:0, at schemeshard: 72057594046678944 2025-09-25T16:19:32.978730Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 103:0, at schemeshard: 72057594046678944 2025-09-25T16:19:32.978739Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_states.h:93: TCopyTable::TWaitCopyTableBarrier operationId: 103:0ProgressState, operation type TxCopyTable 2025-09-25T16:19:32.978749Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1079: Set barrier, OperationId: 103:0, name: CopyTableBarrier, done: 0, blocked: 1, parts count: 1 2025-09-25T16:19:32.978755Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:1123: All parts have reached barrier, tx: 103, done: 0, blocked: 1 2025-09-25T16:19:32.978767Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_states.h:76: TCopyTable::TWaitCopyTableBarrier operationId: 103:0 HandleReply TEvPrivate::TEvCompleteBarrier, msg: NKikimr::NSchemeShard::TEvPrivate::TEvCompleteBarrier { TxId: 103 Name: CopyTableBarrier }, at tablet# 72057594046678944 2025-09-25T16:19:32.978773Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 103:0 240 -> 240 2025-09-25T16:19:32.979236Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 103:0, at schemeshard: 72057594046678944 2025-09-25T16:19:32.979250Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:497: [72057594046678944] TDone opId# 103:0 ProgressState 2025-09-25T16:19:32.979265Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:926: Part operation is done id#103:0 progress is 1/1 2025-09-25T16:19:32.979271Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 103 ready parts: 1/1 2025-09-25T16:19:32.979277Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:926: Part operation is done id#103:0 progress is 1/1 2025-09-25T16:19:32.979280Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 103 ready parts: 1/1 2025-09-25T16:19:32.979286Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 103, ready parts: 1/1, is published: true 2025-09-25T16:19:32.979301Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1702: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [1:361:2339] message: TxId: 103 2025-09-25T16:19:32.979309Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 103 ready parts: 1/1 2025-09-25T16:19:32.979316Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:993: Operation and all the parts is done, operation id: 103:0 2025-09-25T16:19:32.979324Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5552: RemoveTx for txid 103:0 2025-09-25T16:19:32.979352Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:610: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 3 2025-09-25T16:19:32.979358Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:610: DecrementPathDbRefCount reason remove txstate source path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 3 2025-09-25T16:19:32.979729Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 103: got EvNotifyTxCompletionResult 2025-09-25T16:19:32.979740Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 103: satisfy waiter [1:467:2426] TestWaitNotification: OK eventTxId 103 2025-09-25T16:19:32.979858Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/DirA/dst1" Options { ReturnPartitioningInfo: true ReturnPartitionConfig: true BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-09-25T16:19:32.979929Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/DirA/dst1" took 79us result status StatusSuccess 2025-09-25T16:19:32.980190Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/DirA/dst1" PathDescription { Self { Name: "dst1" PathId: 4 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 103 CreateStep: 5000004 ParentPathId: 2 PathState: EPathStateIncomingIncrementalRestore Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "dst1" Columns { Name: "key" Type: "Uint64" TypeId: 4 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "value0" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" KeyColumnIds: 1 PartitionConfig { CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot: 300 InMemForceStepsToSnapshot: 500 InMemForceSizeToSnapshot: 16777216 InMemCompactionBrokerQueue: 0 ReadAheadHiThreshold: 67108864 ReadAheadLoThreshold: 16777216 MinDataPageSize: 7168 SnapBrokerQueue: 0 Generation { GenerationId: 0 SizeToCompact: 0 CountToCompact: 8 ForceCountToCompact: 8 ForceSizeToCompact: 134217728 CompactionBrokerQueue: 4294967295 KeepInCache: true BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen1" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 0 UpliftPartSize: 0 } Generation { GenerationId: 1 SizeToCompact: 41943040 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 536870912 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen2" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 8388608 UpliftPartSize: 8388608 } Generation { GenerationId: 2 SizeToCompact: 419430400 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 17179869184 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen3" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } PartitioningPolicy { MinPartitionsCount: 1 } } TableSchemaVersion: 1 IsBackup: false IsRestore: false } TablePartitions { EndOfRangeKeyPrefix: "" IsPoint: false IsInclusive: false DatashardId: 72075186233409547 } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 3 PathsLimit: 10000 ShardsInside: 2 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 4 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_incremental_restore/unittest >> TIncrementalRestoreTests::CreateLongIncrementalRestoreOpWithMultipleTables [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2144] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2144] Leader for TabletID 72057594046678944 is [1:130:2155] sender: [1:131:2058] recipient: [1:113:2144] 2025-09-25T16:19:32.502556Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7911: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-09-25T16:19:32.502581Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7939: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-09-25T16:19:32.502586Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7825: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-09-25T16:19:32.502592Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7841: OperationsProcessing config: using default configuration 2025-09-25T16:19:32.502598Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7847: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-09-25T16:19:32.502603Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7847: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-09-25T16:19:32.502612Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7971: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-09-25T16:19:32.502626Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-09-25T16:19:32.502726Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8042: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-09-25T16:19:32.502782Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2025-09-25T16:19:32.521105Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7729: Cannot subscribe to console configs 2025-09-25T16:19:32.521128Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded 2025-09-25T16:19:32.525426Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2025-09-25T16:19:32.525520Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2025-09-25T16:19:32.525554Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-09-25T16:19:32.527427Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2025-09-25T16:19:32.527521Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2025-09-25T16:19:32.527669Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1382: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-09-25T16:19:32.527760Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-09-25T16:19:32.528396Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:158: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-09-25T16:19:32.528443Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2025-09-25T16:19:32.528723Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-09-25T16:19:32.528735Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-09-25T16:19:32.528759Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-09-25T16:19:32.528767Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-09-25T16:19:32.528774Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:205: TTxServerlessStorageBilling.Complete 2025-09-25T16:19:32.528807Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7086: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-09-25T16:19:32.532875Z node 1 :HIVE INFO: tablet_helpers.cpp:1126: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:130:2155] sender: [1:245:2058] recipient: [1:15:2062] 2025-09-25T16:19:32.554406Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-09-25T16:19:32.554485Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-09-25T16:19:32.554578Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:599: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-09-25T16:19:32.554589Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5528: CreateTx for txid 1:0 type: TxAlterSubDomain target path: [OwnerId: 72057594046678944, LocalPathId: 1] source path: 2025-09-25T16:19:32.554650Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-09-25T16:19:32.554664Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2025-09-25T16:19:32.555758Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-09-25T16:19:32.555817Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-09-25T16:19:32.555877Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-09-25T16:19:32.555897Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:315: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-09-25T16:19:32.555904Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:369: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-09-25T16:19:32.555910Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 1:0 2 -> 3 2025-09-25T16:19:32.556394Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-09-25T16:19:32.556408Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-09-25T16:19:32.556416Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 1:0 3 -> 128 2025-09-25T16:19:32.556803Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-09-25T16:19:32.556815Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-09-25T16:19:32.556837Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-09-25T16:19:32.556846Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1723: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-09-25T16:19:32.557569Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1792: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-09-25T16:19:32.558343Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:663: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-09-25T16:19:32.558404Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1824: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-09-25T16:19:32.558634Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:685: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-09-25T16:19:32.558664Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:689: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 137 RawX2: 4294969455 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-09-25T16:19:32.558672Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:303: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-09-25T16:19:32.558746Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 1:0 128 -> 240 2025-09-25T16:19:32.558755Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:362: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-09-25T16:19:32.558787Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:599: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-09-25T16:19:32.558799Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:412: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-09-25T16:19:32.559321Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-09-25T16:19:32.559331Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme ... e 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_incremental_restore_scan.cpp:584: [IncrementalRestore] Table operation 281474976710657:0 expects 1 shards 2025-09-25T16:19:33.145424Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_incremental_restore_scan.cpp:588: [IncrementalRestore] Tracking operation 281474976710657:0 for incremental restore 113 2025-09-25T16:19:33.145427Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_incremental_restore_scan.cpp:591: [IncrementalRestore] Sending MultiIncrementalRestore operation for table: /MyRoot/Table1 2025-09-25T16:19:33.145434Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_incremental_restore_scan.cpp:546: [IncrementalRestore] Creating separate restore operation for table: /MyRoot/.backups/collections/MultiTableCollection/backup_002_incremental/Table2 -> /MyRoot/Table2 2025-09-25T16:19:33.145439Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_incremental_restore_scan.cpp:584: [IncrementalRestore] Table operation 281474976710658:0 expects 1 shards 2025-09-25T16:19:33.145442Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_incremental_restore_scan.cpp:588: [IncrementalRestore] Tracking operation 281474976710658:0 for incremental restore 113 2025-09-25T16:19:33.145444Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_incremental_restore_scan.cpp:591: [IncrementalRestore] Sending MultiIncrementalRestore operation for table: /MyRoot/Table2 2025-09-25T16:19:33.145447Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_incremental_restore_scan.cpp:598: [IncrementalRestore] Created separate restore operations for incremental backup: backup_002 2025-09-25T16:19:33.146132Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot/.backups/collections/MultiTableCollection" OperationType: ESchemeOpRestoreMultipleIncrementalBackups Internal: true RestoreMultipleIncrementalBackups { SrcTablePaths: "/MyRoot/.backups/collections/MultiTableCollection/backup_002_incremental/Table1" DstTablePath: "/MyRoot/Table1" } } TxId: 281474976710657 , at schemeshard: 72057594046678944 2025-09-25T16:19:33.146163Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_create_restore_incremental_backup.cpp:518: [72057594046678944] CreateRestoreMultipleIncrementalBackups: opId# 281474976710657:0, tx# WorkingDir: "/MyRoot/.backups/collections/MultiTableCollection" OperationType: ESchemeOpRestoreMultipleIncrementalBackups Internal: true RestoreMultipleIncrementalBackups { SrcTablePaths: "/MyRoot/.backups/collections/MultiTableCollection/backup_002_incremental/Table1" DstTablePath: "/MyRoot/Table1" } 2025-09-25T16:19:33.146202Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_create_restore_incremental_backup.cpp:360: [72057594046678944] TNewRestoreFromAtTable Propose: opId# 281474976710657:0, srcs# [/MyRoot/.backups/collections/MultiTableCollection/backup_002_incremental/Table1], dst# /MyRoot/Table1 2025-09-25T16:19:33.146224Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:599: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 9] was 1 2025-09-25T16:19:33.146228Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:599: IncrementPathDbRefCount reason transaction source path for pathId [OwnerId: 72057594046678944, LocalPathId: 14] was 2 2025-09-25T16:19:33.146235Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5528: CreateTx for txid 281474976710657:0 type: TxRestoreIncrementalBackupAtTable target path: [OwnerId: 72057594046678944, LocalPathId: 9] source path: [OwnerId: 72057594046678944, LocalPathId: 14] 2025-09-25T16:19:33.146239Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_create_restore_incremental_backup.cpp:474: TNewRestoreFromAtTable Propose opId# 281474976710657:0 workingDir# /MyRoot/.backups/collections/MultiTableCollection dstTablePath# /MyRoot/Table1 pathId# [OwnerId: 72057594046678944, LocalPathId: 14] 2025-09-25T16:19:33.146248Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 281474976710657:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-09-25T16:19:33.146809Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot/.backups/collections/MultiTableCollection" OperationType: ESchemeOpRestoreMultipleIncrementalBackups Internal: true RestoreMultipleIncrementalBackups { SrcTablePaths: "/MyRoot/.backups/collections/MultiTableCollection/backup_002_incremental/Table2" DstTablePath: "/MyRoot/Table2" } } TxId: 281474976710658 , at schemeshard: 72057594046678944 2025-09-25T16:19:33.146838Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_create_restore_incremental_backup.cpp:518: [72057594046678944] CreateRestoreMultipleIncrementalBackups: opId# 281474976710658:0, tx# WorkingDir: "/MyRoot/.backups/collections/MultiTableCollection" OperationType: ESchemeOpRestoreMultipleIncrementalBackups Internal: true RestoreMultipleIncrementalBackups { SrcTablePaths: "/MyRoot/.backups/collections/MultiTableCollection/backup_002_incremental/Table2" DstTablePath: "/MyRoot/Table2" } 2025-09-25T16:19:33.146866Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_create_restore_incremental_backup.cpp:360: [72057594046678944] TNewRestoreFromAtTable Propose: opId# 281474976710658:0, srcs# [/MyRoot/.backups/collections/MultiTableCollection/backup_002_incremental/Table2], dst# /MyRoot/Table2 2025-09-25T16:19:33.146886Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:599: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 10] was 1 2025-09-25T16:19:33.146890Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:599: IncrementPathDbRefCount reason transaction source path for pathId [OwnerId: 72057594046678944, LocalPathId: 15] was 2 2025-09-25T16:19:33.146896Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5528: CreateTx for txid 281474976710658:0 type: TxRestoreIncrementalBackupAtTable target path: [OwnerId: 72057594046678944, LocalPathId: 10] source path: [OwnerId: 72057594046678944, LocalPathId: 15] 2025-09-25T16:19:33.146901Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_create_restore_incremental_backup.cpp:474: TNewRestoreFromAtTable Propose opId# 281474976710658:0 workingDir# /MyRoot/.backups/collections/MultiTableCollection dstTablePath# /MyRoot/Table2 pathId# [OwnerId: 72057594046678944, LocalPathId: 15] 2025-09-25T16:19:33.146909Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 281474976710658:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-09-25T16:19:33.147510Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_incremental_restore_scan.cpp:115: [IncrementalRestore] TTxProgressIncrementalRestore::Complete operationId: 113 2025-09-25T16:19:33.147833Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 281474976710657, response: Status: StatusAccepted TxId: 281474976710657 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2025-09-25T16:19:33.147873Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 281474976710657, database: /MyRoot, subject: , status: StatusAccepted, operation: RESTORE, dst path: /MyRoot/.backups/collections/MultiTableCollection/backup_002_incremental/Table1, dst path: /MyRoot/Table1 2025-09-25T16:19:33.147913Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7120: Handle: TEvModifySchemeTransactionResult: txId# 281474976710657, status# StatusAccepted 2025-09-25T16:19:33.147921Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:7122: Message: Status: StatusAccepted TxId: 281474976710657 SchemeshardId: 72057594046678944 2025-09-25T16:19:33.147930Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_incremental_restore_scan.cpp:41: [IncrementalRestore] TTxProgressIncrementalRestore::Execute operationId: 113 tablet: 72057594046678944 2025-09-25T16:19:33.147948Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_incremental_restore_scan.cpp:73: [IncrementalRestore] Checking completion: InProgressOperations.size()=2, CompletedOperations.size()=0, CurrentIncrementalIdx=0, IncrementalBackups.size()=2 2025-09-25T16:19:33.147988Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 281474976710657:0, at schemeshard: 72057594046678944 2025-09-25T16:19:33.147995Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_create_restore_incremental_backup.cpp:85: NIncrRestoreState::TConfigurePartsAtTable operationId: 281474976710657:0 ProgressState, at schemeshard: 72057594046678944 2025-09-25T16:19:33.148076Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 281474976710658, response: Status: StatusAccepted TxId: 281474976710658 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2025-09-25T16:19:33.148098Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 281474976710658, database: /MyRoot, subject: , status: StatusAccepted, operation: RESTORE, dst path: /MyRoot/.backups/collections/MultiTableCollection/backup_002_incremental/Table2, dst path: /MyRoot/Table2 2025-09-25T16:19:33.148120Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7120: Handle: TEvModifySchemeTransactionResult: txId# 281474976710658, status# StatusAccepted 2025-09-25T16:19:33.148126Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:7122: Message: Status: StatusAccepted TxId: 281474976710658 SchemeshardId: 72057594046678944 2025-09-25T16:19:33.148132Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_incremental_restore_scan.cpp:41: [IncrementalRestore] TTxProgressIncrementalRestore::Execute operationId: 113 tablet: 72057594046678944 2025-09-25T16:19:33.148140Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_incremental_restore_scan.cpp:73: [IncrementalRestore] Checking completion: InProgressOperations.size()=2, CompletedOperations.size()=0, CurrentIncrementalIdx=0, IncrementalBackups.size()=2 2025-09-25T16:19:33.148172Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 281474976710658:0, at schemeshard: 72057594046678944 2025-09-25T16:19:33.148178Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_create_restore_incremental_backup.cpp:85: NIncrRestoreState::TConfigurePartsAtTable operationId: 281474976710658:0 ProgressState, at schemeshard: 72057594046678944 2025-09-25T16:19:33.148507Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_incremental_restore_scan.cpp:115: [IncrementalRestore] TTxProgressIncrementalRestore::Complete operationId: 113 2025-09-25T16:19:33.148929Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:663: Send tablet strongly msg operationId: 281474976710657:0 from tablet: 72057594046678944 to tablet: 72075186233409548 cookie: 72057594046678944:3 msg type: 269549568 2025-09-25T16:19:33.148959Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1824: TOperation RegisterRelationByTabletId, TxId: 281474976710657, partId: 0, tablet: 72075186233409548 2025-09-25T16:19:33.148989Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_incremental_restore_scan.cpp:115: [IncrementalRestore] TTxProgressIncrementalRestore::Complete operationId: 113 2025-09-25T16:19:33.149121Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:663: Send tablet strongly msg operationId: 281474976710658:0 from tablet: 72057594046678944 to tablet: 72075186233409549 cookie: 72057594046678944:4 msg type: 269549568 2025-09-25T16:19:33.149132Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1824: TOperation RegisterRelationByTabletId, TxId: 281474976710658, partId: 0, tablet: 72075186233409549 TestWaitNotification: OK eventTxId 113 ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/rbo/unittest >> KqpRbo::Bench_Select [GOOD] Test command err: Trying to start YDB, gRPC: 17352, MsgBus: 12748 2025-09-25T16:19:32.517775Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7554062235700060377:2068];send_to=[0:7307199536658146131:7762515]; 2025-09-25T16:19:32.517800Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/endf/004317/r3tmp/tmpzI45yw/pdisk_1.dat 2025-09-25T16:19:32.560857Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-09-25T16:19:32.574074Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 17352, node 1 2025-09-25T16:19:32.587396Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-09-25T16:19:32.587409Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-09-25T16:19:32.587412Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-09-25T16:19:32.587468Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:12748 2025-09-25T16:19:32.621620Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-09-25T16:19:32.621657Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-09-25T16:19:32.622677Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:12748 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-09-25T16:19:32.649929Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-09-25T16:19:32.788070Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-09-25T16:19:32.886150Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7554062235700061019:2319], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:19:32.886149Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7554062235700061011:2316], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:19:32.886168Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:19:32.886217Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7554062235700061026:2321], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:19:32.886226Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:19:32.886995Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-09-25T16:19:32.889038Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7554062235700061025:2320], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2025-09-25T16:19:32.954713Z node 1 :TX_PROXY ERROR: schemereq.cpp:590: Actor# [1:7554062235700061078:2337] txid# 281474976715659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } >> TCmsTest::StateStorageTwoBrokenRings >> KqpProxy::CreatesScriptExecutionsTable [GOOD] >> KqpProxy::DatabasesCacheForServerless >> TIncrementalRestoreTests::IncrementalRestorePersistenceRowsLifecycle [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_incremental_restore/unittest >> TIncrementalRestoreTests::CreateLongIncrementalRestoreOpBasic [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2144] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2144] Leader for TabletID 72057594046678944 is [1:130:2155] sender: [1:131:2058] recipient: [1:113:2144] 2025-09-25T16:19:32.556238Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7911: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-09-25T16:19:32.556261Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7939: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-09-25T16:19:32.556265Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7825: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-09-25T16:19:32.556269Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7841: OperationsProcessing config: using default configuration 2025-09-25T16:19:32.556274Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7847: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-09-25T16:19:32.556277Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7847: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-09-25T16:19:32.556285Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7971: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-09-25T16:19:32.556296Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-09-25T16:19:32.556383Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8042: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-09-25T16:19:32.556431Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2025-09-25T16:19:32.569906Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7729: Cannot subscribe to console configs 2025-09-25T16:19:32.569932Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded 2025-09-25T16:19:32.573727Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2025-09-25T16:19:32.573811Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2025-09-25T16:19:32.573835Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-09-25T16:19:32.575201Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2025-09-25T16:19:32.575256Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2025-09-25T16:19:32.575334Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1382: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-09-25T16:19:32.575387Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-09-25T16:19:32.575736Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:158: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-09-25T16:19:32.575770Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2025-09-25T16:19:32.575980Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-09-25T16:19:32.575987Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-09-25T16:19:32.576006Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-09-25T16:19:32.576014Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-09-25T16:19:32.576020Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:205: TTxServerlessStorageBilling.Complete 2025-09-25T16:19:32.576052Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7086: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-09-25T16:19:32.577045Z node 1 :HIVE INFO: tablet_helpers.cpp:1126: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:130:2155] sender: [1:245:2058] recipient: [1:15:2062] 2025-09-25T16:19:32.593124Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-09-25T16:19:32.593201Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-09-25T16:19:32.593254Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:599: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-09-25T16:19:32.593263Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5528: CreateTx for txid 1:0 type: TxAlterSubDomain target path: [OwnerId: 72057594046678944, LocalPathId: 1] source path: 2025-09-25T16:19:32.593314Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-09-25T16:19:32.593325Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2025-09-25T16:19:32.597693Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-09-25T16:19:32.597769Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-09-25T16:19:32.597843Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-09-25T16:19:32.597869Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:315: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-09-25T16:19:32.597876Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:369: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-09-25T16:19:32.597883Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 1:0 2 -> 3 2025-09-25T16:19:32.598723Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-09-25T16:19:32.598744Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-09-25T16:19:32.598752Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 1:0 3 -> 128 2025-09-25T16:19:32.599433Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-09-25T16:19:32.599448Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-09-25T16:19:32.599456Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-09-25T16:19:32.599464Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1723: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-09-25T16:19:32.600220Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1792: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-09-25T16:19:32.600843Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:663: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-09-25T16:19:32.600892Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1824: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-09-25T16:19:32.601150Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:685: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-09-25T16:19:32.601187Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:689: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 137 RawX2: 4294969455 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-09-25T16:19:32.601197Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:303: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-09-25T16:19:32.601284Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 1:0 128 -> 240 2025-09-25T16:19:32.601293Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:362: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-09-25T16:19:32.601331Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:599: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-09-25T16:19:32.601344Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:412: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-09-25T16:19:32.601971Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-09-25T16:19:32.601982Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme ... ion IsReadyToNotify, TxId: 112, ready parts: 5/5, is published: true 2025-09-25T16:19:33.000234Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1702: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [1:310:2300] message: TxId: 112 2025-09-25T16:19:33.000238Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 112 ready parts: 5/5 2025-09-25T16:19:33.000244Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:993: Operation and all the parts is done, operation id: 112:0 2025-09-25T16:19:33.000247Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5552: RemoveTx for txid 112:0 2025-09-25T16:19:33.000270Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:610: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 13] was 3 2025-09-25T16:19:33.000274Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:610: DecrementPathDbRefCount reason remove txstate source path for pathId [OwnerId: 72057594046678944, LocalPathId: 6] was 3 2025-09-25T16:19:33.000277Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:993: Operation and all the parts is done, operation id: 112:1 2025-09-25T16:19:33.000279Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5552: RemoveTx for txid 112:1 2025-09-25T16:19:33.000283Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:610: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 8] was 2 2025-09-25T16:19:33.000286Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:993: Operation and all the parts is done, operation id: 112:2 2025-09-25T16:19:33.000288Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5552: RemoveTx for txid 112:2 2025-09-25T16:19:33.000291Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:610: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 10] was 2 2025-09-25T16:19:33.000294Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:993: Operation and all the parts is done, operation id: 112:3 2025-09-25T16:19:33.000296Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5552: RemoveTx for txid 112:3 2025-09-25T16:19:33.000299Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:610: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 12] was 2 2025-09-25T16:19:33.000301Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:993: Operation and all the parts is done, operation id: 112:4 2025-09-25T16:19:33.000303Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5552: RemoveTx for txid 112:4 2025-09-25T16:19:33.000307Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:610: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 5 2025-09-25T16:19:33.001308Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 112: got EvNotifyTxCompletionResult 2025-09-25T16:19:33.001325Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 112: satisfy waiter [1:953:2877] 2025-09-25T16:19:33.001342Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_incremental_restore_scan.cpp:366: [IncrementalRestore] Handle(TEvRunIncrementalRestore) starting sequential processing for 3 incremental backups backupCollectionPathId: [OwnerId: 72057594046678944, LocalPathId: 4] operationId: 112:4 tablet: 72057594046678944 2025-09-25T16:19:33.001354Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_incremental_restore_scan.cpp:391: [IncrementalRestore] Handle(TEvRunIncrementalRestore) added incremental backup: 'backup_002' 2025-09-25T16:19:33.001358Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_incremental_restore_scan.cpp:391: [IncrementalRestore] Handle(TEvRunIncrementalRestore) added incremental backup: 'backup_003' 2025-09-25T16:19:33.001384Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_incremental_restore_scan.cpp:391: [IncrementalRestore] Handle(TEvRunIncrementalRestore) added incremental backup: 'backup_004' 2025-09-25T16:19:33.001388Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_incremental_restore_scan.cpp:394: [IncrementalRestore] Handle(TEvRunIncrementalRestore) state now has 3 incremental backups 2025-09-25T16:19:33.001398Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_incremental_restore_scan.cpp:41: [IncrementalRestore] TTxProgressIncrementalRestore::Execute operationId: 112 tablet: 72057594046678944 2025-09-25T16:19:33.001415Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_incremental_restore_scan.cpp:73: [IncrementalRestore] Checking completion: InProgressOperations.size()=0, CompletedOperations.size()=0, CurrentIncrementalIdx=0, IncrementalBackups.size()=3 2025-09-25T16:19:33.001419Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_incremental_restore_scan.cpp:106: [IncrementalRestore] No operations in progress, starting first incremental backup 2025-09-25T16:19:33.001440Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_incremental_restore_scan.cpp:193: [IncrementalRestore] Processing incremental backup #1 path: backup_002 timestamp: 0 2025-09-25T16:19:33.001458Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_incremental_restore_scan.cpp:520: [IncrementalRestore] CreateIncrementalRestoreOperation for backup: backup_002 operationId: 112 backupCollectionPathId: [OwnerId: 72057594046678944, LocalPathId: 4] 2025-09-25T16:19:33.001503Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_incremental_restore_scan.cpp:546: [IncrementalRestore] Creating separate restore operation for table: /MyRoot/.backups/collections/TestCollection/backup_002_incremental/TestTable -> /MyRoot/TestTable 2025-09-25T16:19:33.001521Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_incremental_restore_scan.cpp:584: [IncrementalRestore] Table operation 281474976710657:0 expects 1 shards 2025-09-25T16:19:33.001526Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_incremental_restore_scan.cpp:588: [IncrementalRestore] Tracking operation 281474976710657:0 for incremental restore 112 2025-09-25T16:19:33.001530Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_incremental_restore_scan.cpp:591: [IncrementalRestore] Sending MultiIncrementalRestore operation for table: /MyRoot/TestTable 2025-09-25T16:19:33.001536Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_incremental_restore_scan.cpp:598: [IncrementalRestore] Created separate restore operations for incremental backup: backup_002 2025-09-25T16:19:33.002399Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot/.backups/collections/TestCollection" OperationType: ESchemeOpRestoreMultipleIncrementalBackups Internal: true RestoreMultipleIncrementalBackups { SrcTablePaths: "/MyRoot/.backups/collections/TestCollection/backup_002_incremental/TestTable" DstTablePath: "/MyRoot/TestTable" } } TxId: 281474976710657 , at schemeshard: 72057594046678944 2025-09-25T16:19:33.002437Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_create_restore_incremental_backup.cpp:518: [72057594046678944] CreateRestoreMultipleIncrementalBackups: opId# 281474976710657:0, tx# WorkingDir: "/MyRoot/.backups/collections/TestCollection" OperationType: ESchemeOpRestoreMultipleIncrementalBackups Internal: true RestoreMultipleIncrementalBackups { SrcTablePaths: "/MyRoot/.backups/collections/TestCollection/backup_002_incremental/TestTable" DstTablePath: "/MyRoot/TestTable" } 2025-09-25T16:19:33.002478Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_create_restore_incremental_backup.cpp:360: [72057594046678944] TNewRestoreFromAtTable Propose: opId# 281474976710657:0, srcs# [/MyRoot/.backups/collections/TestCollection/backup_002_incremental/TestTable], dst# /MyRoot/TestTable 2025-09-25T16:19:33.002509Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:599: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 8] was 1 2025-09-25T16:19:33.002514Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:599: IncrementPathDbRefCount reason transaction source path for pathId [OwnerId: 72057594046678944, LocalPathId: 13] was 2 2025-09-25T16:19:33.002521Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5528: CreateTx for txid 281474976710657:0 type: TxRestoreIncrementalBackupAtTable target path: [OwnerId: 72057594046678944, LocalPathId: 8] source path: [OwnerId: 72057594046678944, LocalPathId: 13] 2025-09-25T16:19:33.002527Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_create_restore_incremental_backup.cpp:474: TNewRestoreFromAtTable Propose opId# 281474976710657:0 workingDir# /MyRoot/.backups/collections/TestCollection dstTablePath# /MyRoot/TestTable pathId# [OwnerId: 72057594046678944, LocalPathId: 13] 2025-09-25T16:19:33.002539Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 281474976710657:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-09-25T16:19:33.003305Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_incremental_restore_scan.cpp:115: [IncrementalRestore] TTxProgressIncrementalRestore::Complete operationId: 112 2025-09-25T16:19:33.003367Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 281474976710657, response: Status: StatusAccepted TxId: 281474976710657 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2025-09-25T16:19:33.003417Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 281474976710657, database: /MyRoot, subject: , status: StatusAccepted, operation: RESTORE, dst path: /MyRoot/.backups/collections/TestCollection/backup_002_incremental/TestTable, dst path: /MyRoot/TestTable 2025-09-25T16:19:33.003452Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7120: Handle: TEvModifySchemeTransactionResult: txId# 281474976710657, status# StatusAccepted 2025-09-25T16:19:33.003460Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:7122: Message: Status: StatusAccepted TxId: 281474976710657 SchemeshardId: 72057594046678944 2025-09-25T16:19:33.003469Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_incremental_restore_scan.cpp:41: [IncrementalRestore] TTxProgressIncrementalRestore::Execute operationId: 112 tablet: 72057594046678944 2025-09-25T16:19:33.003484Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_incremental_restore_scan.cpp:73: [IncrementalRestore] Checking completion: InProgressOperations.size()=1, CompletedOperations.size()=0, CurrentIncrementalIdx=0, IncrementalBackups.size()=3 2025-09-25T16:19:33.003527Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 281474976710657:0, at schemeshard: 72057594046678944 2025-09-25T16:19:33.003534Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_create_restore_incremental_backup.cpp:85: NIncrRestoreState::TConfigurePartsAtTable operationId: 281474976710657:0 ProgressState, at schemeshard: 72057594046678944 2025-09-25T16:19:33.004240Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_incremental_restore_scan.cpp:115: [IncrementalRestore] TTxProgressIncrementalRestore::Complete operationId: 112 2025-09-25T16:19:33.004271Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:663: Send tablet strongly msg operationId: 281474976710657:0 from tablet: 72057594046678944 to tablet: 72075186233409547 cookie: 72057594046678944:2 msg type: 269549568 2025-09-25T16:19:33.004297Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1824: TOperation RegisterRelationByTabletId, TxId: 281474976710657, partId: 0, tablet: 72075186233409547 TestWaitNotification: OK eventTxId 112 ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/rbo/unittest >> KqpRbo::Filter [GOOD] Test command err: Trying to start YDB, gRPC: 20432, MsgBus: 31835 2025-09-25T16:19:32.402233Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7554062237162854628:2084];send_to=[0:7307199536658146131:7762515]; 2025-09-25T16:19:32.402477Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/endf/004372/r3tmp/tmpZ9nTTm/pdisk_1.dat 2025-09-25T16:19:32.463248Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-09-25T16:19:32.475043Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 20432, node 1 2025-09-25T16:19:32.493047Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-09-25T16:19:32.493061Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-09-25T16:19:32.493064Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-09-25T16:19:32.493112Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-09-25T16:19:32.505185Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-09-25T16:19:32.505223Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-09-25T16:19:32.506283Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:31835 TClient is connected to server localhost:31835 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-09-25T16:19:32.556865Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-09-25T16:19:32.561193Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-09-25T16:19:32.733668Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-09-25T16:19:32.897109Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7554062237162855236:2316], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:19:32.897154Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:19:32.900971Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7554062237162855246:2317], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:19:32.901003Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:19:32.943568Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:19:32.965248Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7554062237162855341:2328], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:19:32.965287Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:19:32.965310Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7554062237162855346:2331], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:19:32.965326Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7554062237162855348:2332], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:19:32.965336Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:19:32.966117Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715659:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-09-25T16:19:32.968099Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7554062237162855350:2333], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715659 completed, doublechecking } 2025-09-25T16:19:33.052196Z node 1 :TX_PROXY ERROR: schemereq.cpp:590: Actor# [1:7554062241457822697:2397] txid# 281474976715660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } >> TIncrementalRestoreTests::LongIncrementalRestoreOpCleanupMultipleOperations >> TIncrementalRestoreTests::CreateLongIncrementalRestoreOpOperationAlreadyInProgress >> TReplicationWithRebootsTests::CreateInParallelWithoutInitialController >> TOlap::CreateStore |82.0%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_incremental_restore/unittest ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_incremental_restore/unittest >> TIncrementalRestoreTests::TxProgressExecutionWithCorrectBackupCollectionPathId [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2144] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2144] Leader for TabletID 72057594046678944 is [1:130:2155] sender: [1:131:2058] recipient: [1:113:2144] 2025-09-25T16:19:32.628270Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7911: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-09-25T16:19:32.628293Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7939: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-09-25T16:19:32.628299Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7825: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-09-25T16:19:32.628305Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7841: OperationsProcessing config: using default configuration 2025-09-25T16:19:32.628311Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7847: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-09-25T16:19:32.628316Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7847: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-09-25T16:19:32.628325Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7971: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-09-25T16:19:32.628340Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-09-25T16:19:32.628447Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8042: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-09-25T16:19:32.628499Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2025-09-25T16:19:32.640109Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7729: Cannot subscribe to console configs 2025-09-25T16:19:32.640130Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded 2025-09-25T16:19:32.643700Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2025-09-25T16:19:32.643796Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2025-09-25T16:19:32.643824Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-09-25T16:19:32.645639Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2025-09-25T16:19:32.645704Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2025-09-25T16:19:32.645809Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1382: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-09-25T16:19:32.645880Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-09-25T16:19:32.646347Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:158: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-09-25T16:19:32.646388Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2025-09-25T16:19:32.646669Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-09-25T16:19:32.646679Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-09-25T16:19:32.646702Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-09-25T16:19:32.646709Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-09-25T16:19:32.646715Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:205: TTxServerlessStorageBilling.Complete 2025-09-25T16:19:32.646748Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7086: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-09-25T16:19:32.648457Z node 1 :HIVE INFO: tablet_helpers.cpp:1126: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:130:2155] sender: [1:245:2058] recipient: [1:15:2062] 2025-09-25T16:19:32.673188Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-09-25T16:19:32.673276Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-09-25T16:19:32.673333Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:599: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-09-25T16:19:32.673341Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5528: CreateTx for txid 1:0 type: TxAlterSubDomain target path: [OwnerId: 72057594046678944, LocalPathId: 1] source path: 2025-09-25T16:19:32.673408Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-09-25T16:19:32.673423Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2025-09-25T16:19:32.674016Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-09-25T16:19:32.674080Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-09-25T16:19:32.674130Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-09-25T16:19:32.674152Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:315: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-09-25T16:19:32.674158Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:369: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-09-25T16:19:32.674164Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 1:0 2 -> 3 2025-09-25T16:19:32.674688Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-09-25T16:19:32.674706Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-09-25T16:19:32.674715Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 1:0 3 -> 128 2025-09-25T16:19:32.679502Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-09-25T16:19:32.679518Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-09-25T16:19:32.679525Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-09-25T16:19:32.679531Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1723: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-09-25T16:19:32.680155Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1792: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-09-25T16:19:32.680741Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:663: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-09-25T16:19:32.680778Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1824: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-09-25T16:19:32.681001Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:685: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-09-25T16:19:32.681030Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:689: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 137 RawX2: 4294969455 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-09-25T16:19:32.681039Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:303: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-09-25T16:19:32.681099Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 1:0 128 -> 240 2025-09-25T16:19:32.681106Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:362: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-09-25T16:19:32.681133Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:599: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-09-25T16:19:32.681142Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:412: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-09-25T16:19:32.681563Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-09-25T16:19:32.681570Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme ... ection.cpp:98: [72057594046678944] TDoneWithIncrementalRestore, operationId: 117:3 Found 2 incremental backups to restore 2025-09-25T16:19:33.278822Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:926: Part operation is done id#117:3 progress is 4/4 2025-09-25T16:19:33.278825Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 117 ready parts: 4/4 2025-09-25T16:19:33.278831Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 117, ready parts: 4/4, is published: true 2025-09-25T16:19:33.278846Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1702: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [1:310:2300] message: TxId: 117 2025-09-25T16:19:33.278851Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 117 ready parts: 4/4 2025-09-25T16:19:33.278858Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:993: Operation and all the parts is done, operation id: 117:0 2025-09-25T16:19:33.278863Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5552: RemoveTx for txid 117:0 2025-09-25T16:19:33.278895Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:610: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 18] was 3 2025-09-25T16:19:33.278901Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:610: DecrementPathDbRefCount reason remove txstate source path for pathId [OwnerId: 72057594046678944, LocalPathId: 6] was 3 2025-09-25T16:19:33.278908Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:993: Operation and all the parts is done, operation id: 117:1 2025-09-25T16:19:33.278912Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5552: RemoveTx for txid 117:1 2025-09-25T16:19:33.278917Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:610: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 8] was 2 2025-09-25T16:19:33.278922Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:993: Operation and all the parts is done, operation id: 117:2 2025-09-25T16:19:33.278925Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5552: RemoveTx for txid 117:2 2025-09-25T16:19:33.278930Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:610: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 10] was 2 2025-09-25T16:19:33.278934Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:993: Operation and all the parts is done, operation id: 117:3 2025-09-25T16:19:33.278938Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5552: RemoveTx for txid 117:3 2025-09-25T16:19:33.278943Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:610: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 4 2025-09-25T16:19:33.279666Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 117: got EvNotifyTxCompletionResult 2025-09-25T16:19:33.279678Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 117: satisfy waiter [1:1223:3125] 2025-09-25T16:19:33.279699Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_incremental_restore_scan.cpp:366: [IncrementalRestore] Handle(TEvRunIncrementalRestore) starting sequential processing for 2 incremental backups backupCollectionPathId: [OwnerId: 72057594046678944, LocalPathId: 4] operationId: 117:3 tablet: 72057594046678944 2025-09-25T16:19:33.279707Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_incremental_restore_scan.cpp:391: [IncrementalRestore] Handle(TEvRunIncrementalRestore) added incremental backup: 'backup_002' 2025-09-25T16:19:33.279711Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_incremental_restore_scan.cpp:391: [IncrementalRestore] Handle(TEvRunIncrementalRestore) added incremental backup: 'backup_003' 2025-09-25T16:19:33.279715Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_incremental_restore_scan.cpp:394: [IncrementalRestore] Handle(TEvRunIncrementalRestore) state now has 2 incremental backups 2025-09-25T16:19:33.279725Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_incremental_restore_scan.cpp:41: [IncrementalRestore] TTxProgressIncrementalRestore::Execute operationId: 117 tablet: 72057594046678944 2025-09-25T16:19:33.279741Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_incremental_restore_scan.cpp:73: [IncrementalRestore] Checking completion: InProgressOperations.size()=0, CompletedOperations.size()=0, CurrentIncrementalIdx=0, IncrementalBackups.size()=2 2025-09-25T16:19:33.279745Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_incremental_restore_scan.cpp:106: [IncrementalRestore] No operations in progress, starting first incremental backup 2025-09-25T16:19:33.279750Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_incremental_restore_scan.cpp:193: [IncrementalRestore] Processing incremental backup #1 path: backup_002 timestamp: 0 2025-09-25T16:19:33.279755Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_incremental_restore_scan.cpp:520: [IncrementalRestore] CreateIncrementalRestoreOperation for backup: backup_002 operationId: 117 backupCollectionPathId: [OwnerId: 72057594046678944, LocalPathId: 4] 2025-09-25T16:19:33.279787Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_incremental_restore_scan.cpp:546: [IncrementalRestore] Creating separate restore operation for table: /MyRoot/.backups/collections/TargetCollection/backup_002_incremental/TargetTable -> /MyRoot/TargetTable 2025-09-25T16:19:33.279804Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_incremental_restore_scan.cpp:584: [IncrementalRestore] Table operation 281474976710657:0 expects 1 shards 2025-09-25T16:19:33.279809Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_incremental_restore_scan.cpp:588: [IncrementalRestore] Tracking operation 281474976710657:0 for incremental restore 117 2025-09-25T16:19:33.279813Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_incremental_restore_scan.cpp:591: [IncrementalRestore] Sending MultiIncrementalRestore operation for table: /MyRoot/TargetTable 2025-09-25T16:19:33.279818Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_incremental_restore_scan.cpp:598: [IncrementalRestore] Created separate restore operations for incremental backup: backup_002 2025-09-25T16:19:33.280474Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot/.backups/collections/TargetCollection" OperationType: ESchemeOpRestoreMultipleIncrementalBackups Internal: true RestoreMultipleIncrementalBackups { SrcTablePaths: "/MyRoot/.backups/collections/TargetCollection/backup_002_incremental/TargetTable" DstTablePath: "/MyRoot/TargetTable" } } TxId: 281474976710657 , at schemeshard: 72057594046678944 2025-09-25T16:19:33.280523Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_create_restore_incremental_backup.cpp:518: [72057594046678944] CreateRestoreMultipleIncrementalBackups: opId# 281474976710657:0, tx# WorkingDir: "/MyRoot/.backups/collections/TargetCollection" OperationType: ESchemeOpRestoreMultipleIncrementalBackups Internal: true RestoreMultipleIncrementalBackups { SrcTablePaths: "/MyRoot/.backups/collections/TargetCollection/backup_002_incremental/TargetTable" DstTablePath: "/MyRoot/TargetTable" } 2025-09-25T16:19:33.280559Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_create_restore_incremental_backup.cpp:360: [72057594046678944] TNewRestoreFromAtTable Propose: opId# 281474976710657:0, srcs# [/MyRoot/.backups/collections/TargetCollection/backup_002_incremental/TargetTable], dst# /MyRoot/TargetTable 2025-09-25T16:19:33.280584Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:599: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 8] was 1 2025-09-25T16:19:33.280587Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:599: IncrementPathDbRefCount reason transaction source path for pathId [OwnerId: 72057594046678944, LocalPathId: 18] was 2 2025-09-25T16:19:33.280591Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5528: CreateTx for txid 281474976710657:0 type: TxRestoreIncrementalBackupAtTable target path: [OwnerId: 72057594046678944, LocalPathId: 8] source path: [OwnerId: 72057594046678944, LocalPathId: 18] 2025-09-25T16:19:33.280595Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_create_restore_incremental_backup.cpp:474: TNewRestoreFromAtTable Propose opId# 281474976710657:0 workingDir# /MyRoot/.backups/collections/TargetCollection dstTablePath# /MyRoot/TargetTable pathId# [OwnerId: 72057594046678944, LocalPathId: 18] 2025-09-25T16:19:33.280603Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 281474976710657:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-09-25T16:19:33.281596Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_incremental_restore_scan.cpp:115: [IncrementalRestore] TTxProgressIncrementalRestore::Complete operationId: 117 2025-09-25T16:19:33.281884Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 281474976710657, response: Status: StatusAccepted TxId: 281474976710657 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2025-09-25T16:19:33.281921Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 281474976710657, database: /MyRoot, subject: , status: StatusAccepted, operation: RESTORE, dst path: /MyRoot/.backups/collections/TargetCollection/backup_002_incremental/TargetTable, dst path: /MyRoot/TargetTable 2025-09-25T16:19:33.281947Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7120: Handle: TEvModifySchemeTransactionResult: txId# 281474976710657, status# StatusAccepted 2025-09-25T16:19:33.281952Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:7122: Message: Status: StatusAccepted TxId: 281474976710657 SchemeshardId: 72057594046678944 2025-09-25T16:19:33.281959Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_incremental_restore_scan.cpp:41: [IncrementalRestore] TTxProgressIncrementalRestore::Execute operationId: 117 tablet: 72057594046678944 2025-09-25T16:19:33.281970Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_incremental_restore_scan.cpp:73: [IncrementalRestore] Checking completion: InProgressOperations.size()=1, CompletedOperations.size()=0, CurrentIncrementalIdx=0, IncrementalBackups.size()=2 2025-09-25T16:19:33.282001Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 281474976710657:0, at schemeshard: 72057594046678944 2025-09-25T16:19:33.282005Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_create_restore_incremental_backup.cpp:85: NIncrRestoreState::TConfigurePartsAtTable operationId: 281474976710657:0 ProgressState, at schemeshard: 72057594046678944 2025-09-25T16:19:33.282513Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_incremental_restore_scan.cpp:115: [IncrementalRestore] TTxProgressIncrementalRestore::Complete operationId: 117 2025-09-25T16:19:33.282571Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:663: Send tablet strongly msg operationId: 281474976710657:0 from tablet: 72057594046678944 to tablet: 72075186233409547 cookie: 72057594046678944:2 msg type: 269549568 2025-09-25T16:19:33.282591Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1824: TOperation RegisterRelationByTabletId, TxId: 281474976710657, partId: 0, tablet: 72075186233409547 TestWaitNotification: OK eventTxId 117 Successfully verified TEvRunIncrementalRestore event contains valid PathId: [OwnerId: 72057594046678944, LocalPathId: 4] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_incremental_restore/unittest >> TIncrementalRestoreTests::CreateLongIncrementalRestoreOpInternalStateVerification [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2144] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2144] Leader for TabletID 72057594046678944 is [1:130:2155] sender: [1:131:2058] recipient: [1:113:2144] 2025-09-25T16:19:32.696622Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7911: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-09-25T16:19:32.696649Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7939: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-09-25T16:19:32.696655Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7825: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-09-25T16:19:32.696660Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7841: OperationsProcessing config: using default configuration 2025-09-25T16:19:32.696667Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7847: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-09-25T16:19:32.696672Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7847: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-09-25T16:19:32.696681Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7971: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-09-25T16:19:32.696696Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-09-25T16:19:32.696815Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8042: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-09-25T16:19:32.696902Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2025-09-25T16:19:32.713562Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7729: Cannot subscribe to console configs 2025-09-25T16:19:32.713584Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded 2025-09-25T16:19:32.717645Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2025-09-25T16:19:32.717732Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2025-09-25T16:19:32.717760Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-09-25T16:19:32.719673Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2025-09-25T16:19:32.719733Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2025-09-25T16:19:32.719855Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1382: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-09-25T16:19:32.719918Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-09-25T16:19:32.720441Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:158: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-09-25T16:19:32.720470Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2025-09-25T16:19:32.720682Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-09-25T16:19:32.720692Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-09-25T16:19:32.720712Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-09-25T16:19:32.720720Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-09-25T16:19:32.720727Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:205: TTxServerlessStorageBilling.Complete 2025-09-25T16:19:32.720753Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7086: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-09-25T16:19:32.722106Z node 1 :HIVE INFO: tablet_helpers.cpp:1126: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:130:2155] sender: [1:245:2058] recipient: [1:15:2062] 2025-09-25T16:19:32.738517Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-09-25T16:19:32.738599Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-09-25T16:19:32.738659Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:599: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-09-25T16:19:32.738669Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5528: CreateTx for txid 1:0 type: TxAlterSubDomain target path: [OwnerId: 72057594046678944, LocalPathId: 1] source path: 2025-09-25T16:19:32.738728Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-09-25T16:19:32.738743Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2025-09-25T16:19:32.739455Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-09-25T16:19:32.739515Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-09-25T16:19:32.739576Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-09-25T16:19:32.739595Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:315: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-09-25T16:19:32.739601Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:369: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-09-25T16:19:32.739607Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 1:0 2 -> 3 2025-09-25T16:19:32.740042Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-09-25T16:19:32.740054Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-09-25T16:19:32.740060Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 1:0 3 -> 128 2025-09-25T16:19:32.740370Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-09-25T16:19:32.740377Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-09-25T16:19:32.740381Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-09-25T16:19:32.740386Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1723: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-09-25T16:19:32.740992Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1792: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-09-25T16:19:32.741474Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:663: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-09-25T16:19:32.741520Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1824: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-09-25T16:19:32.741732Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:685: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-09-25T16:19:32.741759Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:689: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 137 RawX2: 4294969455 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-09-25T16:19:32.741765Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:303: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-09-25T16:19:32.741849Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 1:0 128 -> 240 2025-09-25T16:19:32.741857Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:362: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-09-25T16:19:32.741885Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:599: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-09-25T16:19:32.741897Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:412: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-09-25T16:19:32.742418Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-09-25T16:19:32.742442Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme ... ngth: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 12 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 Source table (backup_004_incremental) state: EPathStateAwaitingOutgoingIncrementalRestore 2025-09-25T16:19:33.312061Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/.backups/collections/DatabaseTestCollection/backup_005_incremental/DatabaseTestTable" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-09-25T16:19:33.312072Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/.backups/collections/DatabaseTestCollection/backup_005_incremental/DatabaseTestTable" took 11us result status StatusSuccess 2025-09-25T16:19:33.312107Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/.backups/collections/DatabaseTestCollection/backup_005_incremental/DatabaseTestTable" PathDescription { Self { Name: "DatabaseTestTable" PathId: 14 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 113 CreateStep: 5000014 ParentPathId: 13 PathState: EPathStateAwaitingOutgoingIncrementalRestore Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "DatabaseTestTable" Columns { Name: "key" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "value" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false IsBuildInProgress: false } Columns { Name: "__ydb_incrBackupImpl_changeMetadata" Type: "String" TypeId: 4097 Id: 3 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" KeyColumnIds: 1 TableSchemaVersion: 1 IsBackup: false IsRestore: false } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 16 PathsLimit: 10000 ShardsInside: 7 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 14 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 Source table (backup_005_incremental) state: EPathStateAwaitingOutgoingIncrementalRestore 2025-09-25T16:19:33.312159Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/.backups/collections/DatabaseTestCollection/backup_006_incremental/DatabaseTestTable" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-09-25T16:19:33.312176Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/.backups/collections/DatabaseTestCollection/backup_006_incremental/DatabaseTestTable" took 17us result status StatusSuccess 2025-09-25T16:19:33.312235Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/.backups/collections/DatabaseTestCollection/backup_006_incremental/DatabaseTestTable" PathDescription { Self { Name: "DatabaseTestTable" PathId: 16 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 115 CreateStep: 5000016 ParentPathId: 15 PathState: EPathStateAwaitingOutgoingIncrementalRestore Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "DatabaseTestTable" Columns { Name: "key" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "value" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false IsBuildInProgress: false } Columns { Name: "__ydb_incrBackupImpl_changeMetadata" Type: "String" TypeId: 4097 Id: 3 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" KeyColumnIds: 1 TableSchemaVersion: 1 IsBackup: false IsRestore: false } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 16 PathsLimit: 10000 ShardsInside: 7 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 16 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 Source table (backup_006_incremental) state: EPathStateAwaitingOutgoingIncrementalRestore 2025-09-25T16:19:33.312303Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/.backups/collections/DatabaseTestCollection" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-09-25T16:19:33.312322Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/.backups/collections/DatabaseTestCollection" took 21us result status StatusSuccess 2025-09-25T16:19:33.312389Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/.backups/collections/DatabaseTestCollection" PathDescription { Self { Name: "DatabaseTestCollection" PathId: 4 SchemeshardId: 72057594046678944 PathType: EPathTypeBackupCollection CreateFinished: true CreateTxId: 103 CreateStep: 5000004 ParentPathId: 3 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 7 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 7 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 6 BackupCollectionVersion: 0 } ChildrenExist: true } Children { Name: "backup_001_full" PathId: 5 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 104 CreateStep: 5000005 ParentPathId: 4 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" ChildrenExist: true } Children { Name: "backup_002_incremental" PathId: 7 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 106 CreateStep: 5000007 ParentPathId: 4 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" ChildrenExist: true } Children { Name: "backup_003_incremental" PathId: 9 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 108 CreateStep: 5000009 ParentPathId: 4 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" ChildrenExist: true } Children { Name: "backup_004_incremental" PathId: 11 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 110 CreateStep: 5000011 ParentPathId: 4 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" ChildrenExist: true } Children { Name: "backup_005_incremental" PathId: 13 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 112 CreateStep: 5000013 ParentPathId: 4 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" ChildrenExist: true } Children { Name: "backup_006_incremental" PathId: 15 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 114 CreateStep: 5000015 ParentPathId: 4 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 16 PathsLimit: 10000 ShardsInside: 7 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } BackupCollectionDescription { Name: "DatabaseTestCollection" ExplicitEntryList { Entries { Type: ETypeTable Path: "/MyRoot/DatabaseTestTable" } } Cluster { } } } PathId: 4 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 Backup collection state: EPathStateNoChanges ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/rbo/unittest >> KqpRbo::Bench_JoinFilter [GOOD] Test command err: Trying to start YDB, gRPC: 20972, MsgBus: 16428 2025-09-25T16:19:32.605335Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7554062237042879921:2065];send_to=[0:7307199536658146131:7762515]; 2025-09-25T16:19:32.605744Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/endf/004309/r3tmp/tmphUMYnG/pdisk_1.dat 2025-09-25T16:19:32.652902Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-09-25T16:19:32.665057Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 20972, node 1 2025-09-25T16:19:32.678239Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-09-25T16:19:32.678266Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-09-25T16:19:32.678273Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-09-25T16:19:32.678323Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:16428 2025-09-25T16:19:32.709093Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-09-25T16:19:32.709127Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-09-25T16:19:32.710172Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:16428 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-09-25T16:19:32.741795Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-09-25T16:19:32.916183Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-09-25T16:19:33.007003Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7554062241337847858:2316], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:19:33.007032Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:19:33.007103Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7554062241337847868:2317], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:19:33.007117Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:19:33.060153Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:19:33.082368Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:19:33.091489Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7554062241337848032:2335], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:19:33.091513Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:19:33.091517Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7554062241337848037:2338], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:19:33.091544Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7554062241337848039:2339], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:19:33.091551Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:19:33.092081Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715660:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-09-25T16:19:33.100252Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7554062241337848040:2340], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715660 completed, doublechecking } 2025-09-25T16:19:33.201244Z node 1 :TX_PROXY ERROR: schemereq.cpp:590: Actor# [1:7554062241337848092:2443] txid# 281474976715661, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 7], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } |82.0%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_replication_reboots/unittest >> KqpProxy::PingNotExistedSession [GOOD] >> ScriptExecutionsTest::AttemptToUpdateDeletedLease |82.0%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_incremental_restore/unittest ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_incremental_restore/unittest >> TIncrementalRestoreTests::IncrementalRestoreCompleteLifecycle [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2144] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2144] Leader for TabletID 72057594046678944 is [1:130:2155] sender: [1:131:2058] recipient: [1:113:2144] 2025-09-25T16:19:32.752020Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7911: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-09-25T16:19:32.752042Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7939: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-09-25T16:19:32.752047Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7825: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-09-25T16:19:32.752053Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7841: OperationsProcessing config: using default configuration 2025-09-25T16:19:32.752058Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7847: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-09-25T16:19:32.752062Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7847: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-09-25T16:19:32.752071Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7971: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-09-25T16:19:32.752084Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-09-25T16:19:32.752195Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8042: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-09-25T16:19:32.752243Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2025-09-25T16:19:32.767924Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7729: Cannot subscribe to console configs 2025-09-25T16:19:32.767945Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded 2025-09-25T16:19:32.771847Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2025-09-25T16:19:32.771934Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2025-09-25T16:19:32.771959Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-09-25T16:19:32.773776Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2025-09-25T16:19:32.773828Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2025-09-25T16:19:32.773908Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1382: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-09-25T16:19:32.773958Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-09-25T16:19:32.774349Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:158: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-09-25T16:19:32.774383Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2025-09-25T16:19:32.774587Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-09-25T16:19:32.774594Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-09-25T16:19:32.774613Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-09-25T16:19:32.774618Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-09-25T16:19:32.774624Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:205: TTxServerlessStorageBilling.Complete 2025-09-25T16:19:32.774648Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7086: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-09-25T16:19:32.775770Z node 1 :HIVE INFO: tablet_helpers.cpp:1126: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:130:2155] sender: [1:245:2058] recipient: [1:15:2062] 2025-09-25T16:19:32.791825Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-09-25T16:19:32.791892Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-09-25T16:19:32.791941Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:599: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-09-25T16:19:32.791948Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5528: CreateTx for txid 1:0 type: TxAlterSubDomain target path: [OwnerId: 72057594046678944, LocalPathId: 1] source path: 2025-09-25T16:19:32.791996Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-09-25T16:19:32.792008Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2025-09-25T16:19:32.792921Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-09-25T16:19:32.792991Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-09-25T16:19:32.793053Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-09-25T16:19:32.793071Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:315: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-09-25T16:19:32.793075Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:369: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-09-25T16:19:32.793078Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 1:0 2 -> 3 2025-09-25T16:19:32.793518Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-09-25T16:19:32.793528Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-09-25T16:19:32.793532Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 1:0 3 -> 128 2025-09-25T16:19:32.793932Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-09-25T16:19:32.793968Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-09-25T16:19:32.793974Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-09-25T16:19:32.793981Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1723: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-09-25T16:19:32.794704Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1792: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-09-25T16:19:32.802837Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:663: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-09-25T16:19:32.802906Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1824: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-09-25T16:19:32.803205Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:685: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-09-25T16:19:32.803244Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:689: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 137 RawX2: 4294969455 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-09-25T16:19:32.803255Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:303: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-09-25T16:19:32.803337Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 1:0 128 -> 240 2025-09-25T16:19:32.803347Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:362: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-09-25T16:19:32.803382Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:599: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-09-25T16:19:32.803396Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:412: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-09-25T16:19:32.804011Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-09-25T16:19:32.804021Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme ... d: 3 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" KeyColumnIds: 1 TableSchemaVersion: 2 IsBackup: false IsRestore: false } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 14 PathsLimit: 10000 ShardsInside: 6 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 10 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-09-25T16:19:33.565013Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/.backups/collections/LifecycleCollection/backup_004_incremental/LifecycleTable" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-09-25T16:19:33.565032Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/.backups/collections/LifecycleCollection/backup_004_incremental/LifecycleTable" took 21us result status StatusSuccess 2025-09-25T16:19:33.565107Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/.backups/collections/LifecycleCollection/backup_004_incremental/LifecycleTable" PathDescription { Self { Name: "LifecycleTable" PathId: 12 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 111 CreateStep: 5000012 ParentPathId: 11 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 4 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 4 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 2 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "LifecycleTable" Columns { Name: "key" Type: "Uint64" TypeId: 4 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "value" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false IsBuildInProgress: false } Columns { Name: "__ydb_incrBackupImpl_changeMetadata" Type: "String" TypeId: 4097 Id: 3 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" KeyColumnIds: 1 TableSchemaVersion: 2 IsBackup: false IsRestore: false } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 14 PathsLimit: 10000 ShardsInside: 6 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 12 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-09-25T16:19:33.565199Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/.backups/collections/LifecycleCollection/backup_005_incremental/LifecycleTable" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-09-25T16:19:33.565217Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/.backups/collections/LifecycleCollection/backup_005_incremental/LifecycleTable" took 20us result status StatusSuccess 2025-09-25T16:19:33.565292Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/.backups/collections/LifecycleCollection/backup_005_incremental/LifecycleTable" PathDescription { Self { Name: "LifecycleTable" PathId: 14 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 113 CreateStep: 5000014 ParentPathId: 13 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 4 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 4 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 2 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "LifecycleTable" Columns { Name: "key" Type: "Uint64" TypeId: 4 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "value" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false IsBuildInProgress: false } Columns { Name: "__ydb_incrBackupImpl_changeMetadata" Type: "String" TypeId: 4097 Id: 3 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" KeyColumnIds: 1 TableSchemaVersion: 2 IsBackup: false IsRestore: false } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 14 PathsLimit: 10000 ShardsInside: 6 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 14 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-09-25T16:19:33.608923Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/LifecycleTable" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-09-25T16:19:33.608985Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/LifecycleTable" took 76us result status StatusSuccess 2025-09-25T16:19:33.609111Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/LifecycleTable" PathDescription { Self { Name: "LifecycleTable" PathId: 15 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 114 CreateStep: 5000015 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "LifecycleTable" Columns { Name: "key" Type: "Uint64" TypeId: 4 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "value" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" KeyColumnIds: 1 TableSchemaVersion: 1 IsBackup: false IsRestore: false } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 14 PathsLimit: 10000 ShardsInside: 6 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 15 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 |82.0%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_replication_reboots/unittest ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_incremental_restore/unittest >> TIncrementalRestoreTests::IncrementalRestorePersistenceRowsLifecycle [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2144] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2144] Leader for TabletID 72057594046678944 is [1:130:2155] sender: [1:131:2058] recipient: [1:113:2144] 2025-09-25T16:19:32.884215Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7911: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-09-25T16:19:32.884239Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7939: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-09-25T16:19:32.884246Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7825: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-09-25T16:19:32.884251Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7841: OperationsProcessing config: using default configuration 2025-09-25T16:19:32.884258Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7847: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-09-25T16:19:32.884263Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7847: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-09-25T16:19:32.884275Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7971: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-09-25T16:19:32.884289Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-09-25T16:19:32.884416Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8042: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-09-25T16:19:32.884481Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2025-09-25T16:19:32.902340Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7729: Cannot subscribe to console configs 2025-09-25T16:19:32.902362Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded 2025-09-25T16:19:32.907089Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2025-09-25T16:19:32.907204Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2025-09-25T16:19:32.907237Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-09-25T16:19:32.909486Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2025-09-25T16:19:32.909559Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2025-09-25T16:19:32.909670Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1382: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-09-25T16:19:32.909736Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-09-25T16:19:32.910245Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:158: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-09-25T16:19:32.910287Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2025-09-25T16:19:32.910565Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-09-25T16:19:32.910579Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-09-25T16:19:32.910603Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-09-25T16:19:32.910613Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-09-25T16:19:32.910620Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:205: TTxServerlessStorageBilling.Complete 2025-09-25T16:19:32.910653Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7086: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-09-25T16:19:32.913726Z node 1 :HIVE INFO: tablet_helpers.cpp:1126: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:130:2155] sender: [1:245:2058] recipient: [1:15:2062] 2025-09-25T16:19:32.936360Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-09-25T16:19:32.936447Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-09-25T16:19:32.936506Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:599: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-09-25T16:19:32.936515Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5528: CreateTx for txid 1:0 type: TxAlterSubDomain target path: [OwnerId: 72057594046678944, LocalPathId: 1] source path: 2025-09-25T16:19:32.936578Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-09-25T16:19:32.936595Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2025-09-25T16:19:32.937285Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-09-25T16:19:32.937355Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-09-25T16:19:32.937431Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-09-25T16:19:32.937455Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:315: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-09-25T16:19:32.937462Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:369: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-09-25T16:19:32.937468Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 1:0 2 -> 3 2025-09-25T16:19:32.937964Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-09-25T16:19:32.937977Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-09-25T16:19:32.937988Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 1:0 3 -> 128 2025-09-25T16:19:32.938383Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-09-25T16:19:32.938394Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-09-25T16:19:32.938401Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-09-25T16:19:32.938409Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1723: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-09-25T16:19:32.939280Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1792: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-09-25T16:19:32.939727Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:663: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-09-25T16:19:32.939786Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1824: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-09-25T16:19:32.940015Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:685: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-09-25T16:19:32.940046Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:689: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 137 RawX2: 4294969455 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-09-25T16:19:32.940055Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:303: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-09-25T16:19:32.940132Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 1:0 128 -> 240 2025-09-25T16:19:32.940141Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:362: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-09-25T16:19:32.940176Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:599: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-09-25T16:19:32.940189Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:412: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-09-25T16:19:32.940711Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-09-25T16:19:32.940721Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme ... eneration: 2 2025-09-25T16:19:33.420728Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:1052: NTableState::TProposedWaitParts operationId# 281474976710658:0 HandleReply TEvDataShard::TEvSchemaChanged at tablet: 72057594046678944 2025-09-25T16:19:33.420738Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:1056: NTableState::TProposedWaitParts operationId# 281474976710658:0 HandleReply TEvDataShard::TEvSchemaChanged at tablet: 72057594046678944 message: Source { RawX1: 693 RawX2: 4294969944 } Origin: 72075186233409548 State: 2 TxId: 281474976710658 Step: 0 Generation: 2 2025-09-25T16:19:33.420755Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:673: CollectSchemaChanged accept TEvDataShard::TEvSchemaChanged, operationId: 281474976710658:0, shardIdx: 72057594046678944:3, shard: 72075186233409548, left await: 0, txState.State: ProposedWaitParts, txState.ReadyForNotifications: 1, at schemeshard: 72057594046678944 2025-09-25T16:19:33.420761Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:710: all shard schema changes has been received, operationId: 281474976710658:0, at schemeshard: 72057594046678944 2025-09-25T16:19:33.420767Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:722: send schema changes ack message, operation: 281474976710658:0, datashard: 72075186233409548, at schemeshard: 72057594046678944 2025-09-25T16:19:33.420779Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 281474976710658:0 129 -> 240 2025-09-25T16:19:33.421459Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 281474976710658:0, at schemeshard: 72057594046678944 2025-09-25T16:19:33.421569Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 281474976710658:0, at schemeshard: 72057594046678944 2025-09-25T16:19:33.421580Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_create_restore_incremental_backup.cpp:251: [72057594046678944] TRestoreMultipleIncrementalBackups TDone, operationId: 281474976710658:0 ProgressState 2025-09-25T16:19:33.421590Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:926: Part operation is done id#281474976710658:0 progress is 1/1 2025-09-25T16:19:33.421596Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 281474976710658 ready parts: 1/1 2025-09-25T16:19:33.421602Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:926: Part operation is done id#281474976710658:0 progress is 1/1 2025-09-25T16:19:33.421606Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 281474976710658 ready parts: 1/1 2025-09-25T16:19:33.421612Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 281474976710658, ready parts: 1/1, is published: true 2025-09-25T16:19:33.421619Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 281474976710658 ready parts: 1/1 2025-09-25T16:19:33.421628Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:993: Operation and all the parts is done, operation id: 281474976710658:0 2025-09-25T16:19:33.421633Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5552: RemoveTx for txid 281474976710658:0 2025-09-25T16:19:33.421664Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:610: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 10] was 2 2025-09-25T16:19:33.421671Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:610: DecrementPathDbRefCount reason remove txstate source path for pathId [OwnerId: 72057594046678944, LocalPathId: 11] was 3 2025-09-25T16:19:33.421680Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_incremental_restore_scan.cpp:608: [IncrementalRestore] Operation 281474976710658:0 completed, triggering progress check for incremental restore 110 2025-09-25T16:19:33.421777Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_incremental_restore_scan.cpp:408: [IncrementalRestore] Handle(TEvProgressIncrementalRestore) operationId: 110 tablet: 72057594046678944 2025-09-25T16:19:33.421786Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_incremental_restore_scan.cpp:41: [IncrementalRestore] TTxProgressIncrementalRestore::Execute operationId: 110 tablet: 72057594046678944 2025-09-25T16:19:33.421800Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_incremental_restore_scan.cpp:171: [IncrementalRestore] Operation 281474976710658:0 completed for incremental restore 110 2025-09-25T16:19:33.421811Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_incremental_restore_scan.cpp:66: [IncrementalRestore] Persisted CompletedOperations update: @ 2025-09-25T16:19:33.421817Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_incremental_restore_scan.cpp:73: [IncrementalRestore] Checking completion: InProgressOperations.size()=0, CompletedOperations.size()=1, CurrentIncrementalIdx=1, IncrementalBackups.size()=2 2025-09-25T16:19:33.421822Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_incremental_restore_scan.cpp:76: [IncrementalRestore] All operations for current incremental backup completed, moving to next 2025-09-25T16:19:33.421833Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_incremental_restore_scan.cpp:86: [IncrementalRestore] After MoveToNextIncremental: CurrentIncrementalIdx=2, IncrementalBackups.size()=2 2025-09-25T16:19:33.421838Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_incremental_restore_scan.cpp:89: [IncrementalRestore] All incremental backups processed, performing finalization 2025-09-25T16:19:33.421843Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_incremental_restore_scan.cpp:210: [IncrementalRestore] Starting finalization of incremental restore operation: 110 2025-09-25T16:19:33.421864Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_incremental_restore_scan.cpp:234: [IncrementalRestore] Sending finalization operation with txId: 281474976710659 2025-09-25T16:19:33.422742Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { OperationType: ESchemeOpIncrementalRestoreFinalize Internal: true IncrementalRestoreFinalize { OriginalOperationId: 110 BackupCollectionPathId: 4 TargetTablePaths: "/MyRoot/PersistTable" } } TxId: 281474976710659 , at schemeshard: 72057594046678944 2025-09-25T16:19:33.422780Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_incremental_restore_finalize.cpp:209: [72057594046678944] TIncrementalRestoreFinalizeOp Propose, opId: 281474976710659:0 2025-09-25T16:19:33.422793Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:599: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 3 2025-09-25T16:19:33.422801Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5528: CreateTx for txid 281474976710659:0 type: TxIncrementalRestoreFinalize target path: [OwnerId: 72057594046678944, LocalPathId: 4] source path: 2025-09-25T16:19:33.422813Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 281474976710659:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-09-25T16:19:33.423827Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_incremental_restore_scan.cpp:115: [IncrementalRestore] TTxProgressIncrementalRestore::Complete operationId: 110 2025-09-25T16:19:33.423865Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 281474976710659, response: Status: StatusAccepted TxId: 281474976710659 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2025-09-25T16:19:33.423903Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 281474976710659, subject: , status: StatusAccepted, operation: RESTORE INCREMENTAL FINALIZE, no path 2025-09-25T16:19:33.423938Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7120: Handle: TEvModifySchemeTransactionResult: txId# 281474976710659, status# StatusAccepted 2025-09-25T16:19:33.423946Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:7122: Message: Status: StatusAccepted TxId: 281474976710659 SchemeshardId: 72057594046678944 2025-09-25T16:19:33.423954Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7141: no able to determine destination for message TEvModifySchemeTransactionResult: txId: 281474976710659, at schemeshard: 72057594046678944 2025-09-25T16:19:33.423969Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 281474976710659:0, at schemeshard: 72057594046678944 2025-09-25T16:19:33.423978Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_incremental_restore_finalize.cpp:56: [72057594046678944] TIncrementalRestoreFinalize TPropose operationId: 281474976710659:0 ProgressState 2025-09-25T16:19:33.423993Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_incremental_restore_finalize.cpp:111: [72057594046678944] Adding target table path to normalize: /MyRoot/PersistTable 2025-09-25T16:19:33.424002Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_incremental_restore_finalize.cpp:144: [72057594046678944] Marked incremental restore state as completed for operation: 110 2025-09-25T16:19:33.424008Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_incremental_restore_finalize.cpp:147: [72057594046678944] Keeping IncrementalRestoreOperations entry for operation: 110 - will be cleaned up on FORGET 2025-09-25T16:19:33.424014Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_incremental_restore_finalize.cpp:150: [72057594046678944] Cleaned up long incremental restore ops for operation: 110 2025-09-25T16:19:33.424020Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_incremental_restore_finalize.cpp:176: [72057594046678944] Cleaned up mappings for operation: 110 2025-09-25T16:19:33.424026Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_incremental_restore_finalize.cpp:82: [72057594046678944] Cleaning up 1 shard progress entries for operation 110 2025-09-25T16:19:33.424048Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:926: Part operation is done id#281474976710659:0 progress is 1/1 2025-09-25T16:19:33.424053Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 281474976710659 ready parts: 1/1 2025-09-25T16:19:33.424059Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:926: Part operation is done id#281474976710659:0 progress is 1/1 2025-09-25T16:19:33.424063Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 281474976710659 ready parts: 1/1 2025-09-25T16:19:33.424069Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 281474976710659, ready parts: 1/1, is published: true 2025-09-25T16:19:33.424076Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 281474976710659 ready parts: 1/1 2025-09-25T16:19:33.424082Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:993: Operation and all the parts is done, operation id: 281474976710659:0 2025-09-25T16:19:33.424086Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5552: RemoveTx for txid 281474976710659:0 2025-09-25T16:19:33.424098Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:610: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 4 2025-09-25T16:19:33.424492Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:270: Unable to activate 281474976710659:0 >> TCmsTest::CollectInfo [GOOD] >> TCmsTest::CheckUnreplicatedDiskPreventsRestart >> TOlapNaming::CreateColumnTableExtraSymbolsOk >> TTopicReaderTests::TestRun_ReadTwoMessages_With_Limit_1 >> TOlap::CreateStore [GOOD] >> TOlap::CreateDropTable >> ScriptExecutionsTest::RunCheckLeaseStatus [GOOD] >> ScriptExecutionsTest::UpdatesLeaseAfterExpiring >> TIncrementalRestoreTests::BackupCollectionRestoreOpApiGetListForget [GOOD] >> TIncrementalRestoreTests::CreateLongIncrementalRestoreOpOperationAlreadyInProgress [GOOD] >> TableCreation::ConcurrentMultipleTablesCreation [GOOD] >> TableCreation::ConcurrentTableCreationWithDifferentVersions >> TIncrementalRestoreTests::MultipleCollectionsGenerateMultipleTEvRunIncrementalRestoreEvents [GOOD] >> TTopicWriterTests::TestEnterMessage_ZeroSymbol_Delimited [GOOD] >> TTopicWriterTests::TestEnterMessage_With_Base64_Transform_NewlineDelimited [GOOD] >> TTopicWriterTests::TestEnterMessage_1KiB_No_Delimiter [GOOD] >> TTopicWriterTests::TestEnterMessage_1KiB_Newline_Delimiter [GOOD] >> TTopicWriterTests::TestEnterMessage_Custom_Delimiter_Delimited [GOOD] >> TTopicWriterTests::TestEnterMessage_With_Base64_Transform_Invalid_Encode [GOOD] >> TTopicWriterTests::TestEnterMessage_With_Base64_Transform [GOOD] >> TableCreation::SimpleUpdateTable [GOOD] >> TTopicReaderTests::TestRun_ReadMessages_Output_Base64 >> TableCreation::ConcurrentUpdateTable [GOOD] >> TableCreation::CreateOldTable >> TOlap::CreateDropTable [GOOD] >> TOlap::CreateDropStandaloneTableDefaultSharding >> TTopicWriterTests::TestTopicWriterParams_No_Delimiter [GOOD] >> TTopicWriterTests::TestEnterMessage_EmptyInput [GOOD] >> TTopicWriterTests::TestEnterMessage_OnlyDelimiters [GOOD] >> TTopicWriterTests::TestTopicWriterParams_InvalidDelimiter [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_incremental_restore/unittest >> TIncrementalRestoreTests::BackupCollectionRestoreOpApiGetListForget [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2144] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2144] Leader for TabletID 72057594046678944 is [1:130:2155] sender: [1:131:2058] recipient: [1:113:2144] 2025-09-25T16:19:33.733723Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7911: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-09-25T16:19:33.733744Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7939: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-09-25T16:19:33.733748Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7825: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-09-25T16:19:33.733751Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7841: OperationsProcessing config: using default configuration 2025-09-25T16:19:33.733756Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7847: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-09-25T16:19:33.733759Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7847: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-09-25T16:19:33.733765Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7971: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-09-25T16:19:33.733776Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-09-25T16:19:33.733850Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8042: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-09-25T16:19:33.733891Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2025-09-25T16:19:33.744654Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7729: Cannot subscribe to console configs 2025-09-25T16:19:33.744676Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded 2025-09-25T16:19:33.747907Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2025-09-25T16:19:33.747973Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2025-09-25T16:19:33.747997Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-09-25T16:19:33.749487Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2025-09-25T16:19:33.749536Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2025-09-25T16:19:33.749623Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1382: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-09-25T16:19:33.749665Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-09-25T16:19:33.750014Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:158: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-09-25T16:19:33.750039Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2025-09-25T16:19:33.750211Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-09-25T16:19:33.750218Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-09-25T16:19:33.750232Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-09-25T16:19:33.750237Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-09-25T16:19:33.750242Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:205: TTxServerlessStorageBilling.Complete 2025-09-25T16:19:33.750262Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7086: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-09-25T16:19:33.751264Z node 1 :HIVE INFO: tablet_helpers.cpp:1126: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:130:2155] sender: [1:245:2058] recipient: [1:15:2062] 2025-09-25T16:19:33.765828Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-09-25T16:19:33.765899Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-09-25T16:19:33.765946Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:599: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-09-25T16:19:33.765952Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5528: CreateTx for txid 1:0 type: TxAlterSubDomain target path: [OwnerId: 72057594046678944, LocalPathId: 1] source path: 2025-09-25T16:19:33.766004Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-09-25T16:19:33.766019Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2025-09-25T16:19:33.766653Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-09-25T16:19:33.766694Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-09-25T16:19:33.766736Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-09-25T16:19:33.766749Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:315: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-09-25T16:19:33.766753Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:369: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-09-25T16:19:33.766757Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 1:0 2 -> 3 2025-09-25T16:19:33.767073Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-09-25T16:19:33.767084Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-09-25T16:19:33.767092Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 1:0 3 -> 128 2025-09-25T16:19:33.767410Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-09-25T16:19:33.767418Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-09-25T16:19:33.767424Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-09-25T16:19:33.767431Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1723: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-09-25T16:19:33.767875Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1792: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-09-25T16:19:33.768190Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:663: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-09-25T16:19:33.768232Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1824: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-09-25T16:19:33.768391Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:685: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-09-25T16:19:33.768408Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:689: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 137 RawX2: 4294969455 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-09-25T16:19:33.768413Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:303: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-09-25T16:19:33.768460Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 1:0 128 -> 240 2025-09-25T16:19:33.768466Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:362: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-09-25T16:19:33.768487Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:599: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-09-25T16:19:33.768496Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:412: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-09-25T16:19:33.768879Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-09-25T16:19:33.768885Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme ... tep: 0 Generation: 2 2025-09-25T16:19:34.165305Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:1052: NTableState::TProposedWaitParts operationId# 281474976710658:0 HandleReply TEvDataShard::TEvSchemaChanged at tablet: 72057594046678944 2025-09-25T16:19:34.165329Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:1056: NTableState::TProposedWaitParts operationId# 281474976710658:0 HandleReply TEvDataShard::TEvSchemaChanged at tablet: 72057594046678944 message: Source { RawX1: 693 RawX2: 4294969944 } Origin: 72075186233409548 State: 2 TxId: 281474976710658 Step: 0 Generation: 2 2025-09-25T16:19:34.165346Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:673: CollectSchemaChanged accept TEvDataShard::TEvSchemaChanged, operationId: 281474976710658:0, shardIdx: 72057594046678944:3, shard: 72075186233409548, left await: 0, txState.State: ProposedWaitParts, txState.ReadyForNotifications: 1, at schemeshard: 72057594046678944 2025-09-25T16:19:34.165351Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:710: all shard schema changes has been received, operationId: 281474976710658:0, at schemeshard: 72057594046678944 2025-09-25T16:19:34.165357Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:722: send schema changes ack message, operation: 281474976710658:0, datashard: 72075186233409548, at schemeshard: 72057594046678944 2025-09-25T16:19:34.165364Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 281474976710658:0 129 -> 240 2025-09-25T16:19:34.165868Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 281474976710658:0, at schemeshard: 72057594046678944 2025-09-25T16:19:34.165916Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 281474976710658:0, at schemeshard: 72057594046678944 2025-09-25T16:19:34.165924Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_create_restore_incremental_backup.cpp:251: [72057594046678944] TRestoreMultipleIncrementalBackups TDone, operationId: 281474976710658:0 ProgressState 2025-09-25T16:19:34.165934Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:926: Part operation is done id#281474976710658:0 progress is 1/1 2025-09-25T16:19:34.165940Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 281474976710658 ready parts: 1/1 2025-09-25T16:19:34.165945Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:926: Part operation is done id#281474976710658:0 progress is 1/1 2025-09-25T16:19:34.165952Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 281474976710658 ready parts: 1/1 2025-09-25T16:19:34.165958Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 281474976710658, ready parts: 1/1, is published: true 2025-09-25T16:19:34.165965Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 281474976710658 ready parts: 1/1 2025-09-25T16:19:34.165971Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:993: Operation and all the parts is done, operation id: 281474976710658:0 2025-09-25T16:19:34.165977Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5552: RemoveTx for txid 281474976710658:0 2025-09-25T16:19:34.166009Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:610: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 10] was 2 2025-09-25T16:19:34.166015Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:610: DecrementPathDbRefCount reason remove txstate source path for pathId [OwnerId: 72057594046678944, LocalPathId: 11] was 3 2025-09-25T16:19:34.166022Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_incremental_restore_scan.cpp:608: [IncrementalRestore] Operation 281474976710658:0 completed, triggering progress check for incremental restore 110 2025-09-25T16:19:34.166122Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_incremental_restore_scan.cpp:408: [IncrementalRestore] Handle(TEvProgressIncrementalRestore) operationId: 110 tablet: 72057594046678944 2025-09-25T16:19:34.166130Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_incremental_restore_scan.cpp:41: [IncrementalRestore] TTxProgressIncrementalRestore::Execute operationId: 110 tablet: 72057594046678944 2025-09-25T16:19:34.166142Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_incremental_restore_scan.cpp:171: [IncrementalRestore] Operation 281474976710658:0 completed for incremental restore 110 2025-09-25T16:19:34.166152Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_incremental_restore_scan.cpp:66: [IncrementalRestore] Persisted CompletedOperations update: @ 2025-09-25T16:19:34.166158Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_incremental_restore_scan.cpp:73: [IncrementalRestore] Checking completion: InProgressOperations.size()=0, CompletedOperations.size()=1, CurrentIncrementalIdx=1, IncrementalBackups.size()=2 2025-09-25T16:19:34.166162Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_incremental_restore_scan.cpp:76: [IncrementalRestore] All operations for current incremental backup completed, moving to next 2025-09-25T16:19:34.166169Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_incremental_restore_scan.cpp:86: [IncrementalRestore] After MoveToNextIncremental: CurrentIncrementalIdx=2, IncrementalBackups.size()=2 2025-09-25T16:19:34.166173Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_incremental_restore_scan.cpp:89: [IncrementalRestore] All incremental backups processed, performing finalization 2025-09-25T16:19:34.166178Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_incremental_restore_scan.cpp:210: [IncrementalRestore] Starting finalization of incremental restore operation: 110 2025-09-25T16:19:34.166194Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_incremental_restore_scan.cpp:234: [IncrementalRestore] Sending finalization operation with txId: 281474976710659 2025-09-25T16:19:34.166923Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { OperationType: ESchemeOpIncrementalRestoreFinalize Internal: true IncrementalRestoreFinalize { OriginalOperationId: 110 BackupCollectionPathId: 4 TargetTablePaths: "/MyRoot/ApiTable" } } TxId: 281474976710659 , at schemeshard: 72057594046678944 2025-09-25T16:19:34.166955Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_incremental_restore_finalize.cpp:209: [72057594046678944] TIncrementalRestoreFinalizeOp Propose, opId: 281474976710659:0 2025-09-25T16:19:34.166968Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:599: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 3 2025-09-25T16:19:34.166975Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5528: CreateTx for txid 281474976710659:0 type: TxIncrementalRestoreFinalize target path: [OwnerId: 72057594046678944, LocalPathId: 4] source path: 2025-09-25T16:19:34.166985Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 281474976710659:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-09-25T16:19:34.167763Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_incremental_restore_scan.cpp:115: [IncrementalRestore] TTxProgressIncrementalRestore::Complete operationId: 110 2025-09-25T16:19:34.167796Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 281474976710659, response: Status: StatusAccepted TxId: 281474976710659 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2025-09-25T16:19:34.167826Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 281474976710659, subject: , status: StatusAccepted, operation: RESTORE INCREMENTAL FINALIZE, no path 2025-09-25T16:19:34.167857Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7120: Handle: TEvModifySchemeTransactionResult: txId# 281474976710659, status# StatusAccepted 2025-09-25T16:19:34.167866Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:7122: Message: Status: StatusAccepted TxId: 281474976710659 SchemeshardId: 72057594046678944 2025-09-25T16:19:34.167873Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7141: no able to determine destination for message TEvModifySchemeTransactionResult: txId: 281474976710659, at schemeshard: 72057594046678944 2025-09-25T16:19:34.167886Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 281474976710659:0, at schemeshard: 72057594046678944 2025-09-25T16:19:34.167893Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_incremental_restore_finalize.cpp:56: [72057594046678944] TIncrementalRestoreFinalize TPropose operationId: 281474976710659:0 ProgressState 2025-09-25T16:19:34.167905Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_incremental_restore_finalize.cpp:111: [72057594046678944] Adding target table path to normalize: /MyRoot/ApiTable 2025-09-25T16:19:34.167912Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_incremental_restore_finalize.cpp:144: [72057594046678944] Marked incremental restore state as completed for operation: 110 2025-09-25T16:19:34.167917Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_incremental_restore_finalize.cpp:147: [72057594046678944] Keeping IncrementalRestoreOperations entry for operation: 110 - will be cleaned up on FORGET 2025-09-25T16:19:34.167922Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_incremental_restore_finalize.cpp:150: [72057594046678944] Cleaned up long incremental restore ops for operation: 110 2025-09-25T16:19:34.167928Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_incremental_restore_finalize.cpp:176: [72057594046678944] Cleaned up mappings for operation: 110 2025-09-25T16:19:34.167933Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_incremental_restore_finalize.cpp:82: [72057594046678944] Cleaning up 1 shard progress entries for operation 110 2025-09-25T16:19:34.167951Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:926: Part operation is done id#281474976710659:0 progress is 1/1 2025-09-25T16:19:34.167956Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 281474976710659 ready parts: 1/1 2025-09-25T16:19:34.167961Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:926: Part operation is done id#281474976710659:0 progress is 1/1 2025-09-25T16:19:34.167967Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 281474976710659 ready parts: 1/1 2025-09-25T16:19:34.167972Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 281474976710659, ready parts: 1/1, is published: true 2025-09-25T16:19:34.167978Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 281474976710659 ready parts: 1/1 2025-09-25T16:19:34.167983Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:993: Operation and all the parts is done, operation id: 281474976710659:0 2025-09-25T16:19:34.167987Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5552: RemoveTx for txid 281474976710659:0 2025-09-25T16:19:34.167996Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:610: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 4 2025-09-25T16:19:34.168367Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:270: Unable to activate 281474976710659:0 >> TTopicWriterTests::TestEnterMessage_No_Base64_Transform [GOOD] >> TTopicWriterTests::TestEnterMessage_SomeBinaryData [GOOD] >> KqpProxy::NodeDisconnectedTest [GOOD] >> KqpProxy::NoUserAccessToScriptExecutionsTable >> TTopicWriterTests::TestTopicWriterParams_Format_NewlineDelimited [GOOD] >> TTopicWriterTests::TestTopicWriterParams_Format_Concatenated [GOOD] >> TTopicReaderTests::TestRun_ReadOneMessage ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_incremental_restore/unittest >> TIncrementalRestoreTests::CreateLongIncrementalRestoreOpOperationAlreadyInProgress [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2144] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2144] Leader for TabletID 72057594046678944 is [1:130:2155] sender: [1:131:2058] recipient: [1:113:2144] 2025-09-25T16:19:34.174798Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7911: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-09-25T16:19:34.174818Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7939: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-09-25T16:19:34.174822Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7825: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-09-25T16:19:34.174826Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7841: OperationsProcessing config: using default configuration 2025-09-25T16:19:34.174830Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7847: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-09-25T16:19:34.174833Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7847: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-09-25T16:19:34.174840Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7971: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-09-25T16:19:34.174851Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-09-25T16:19:34.174929Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8042: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-09-25T16:19:34.174999Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2025-09-25T16:19:34.185712Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7729: Cannot subscribe to console configs 2025-09-25T16:19:34.185735Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded 2025-09-25T16:19:34.189623Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2025-09-25T16:19:34.189726Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2025-09-25T16:19:34.189760Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-09-25T16:19:34.191458Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2025-09-25T16:19:34.191517Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2025-09-25T16:19:34.191637Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1382: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-09-25T16:19:34.191706Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-09-25T16:19:34.192177Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:158: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-09-25T16:19:34.192214Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2025-09-25T16:19:34.192482Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-09-25T16:19:34.192493Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-09-25T16:19:34.192535Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-09-25T16:19:34.192544Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-09-25T16:19:34.192552Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:205: TTxServerlessStorageBilling.Complete 2025-09-25T16:19:34.192584Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7086: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-09-25T16:19:34.193986Z node 1 :HIVE INFO: tablet_helpers.cpp:1126: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:130:2155] sender: [1:245:2058] recipient: [1:15:2062] 2025-09-25T16:19:34.209193Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-09-25T16:19:34.209274Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-09-25T16:19:34.209336Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:599: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-09-25T16:19:34.209345Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5528: CreateTx for txid 1:0 type: TxAlterSubDomain target path: [OwnerId: 72057594046678944, LocalPathId: 1] source path: 2025-09-25T16:19:34.209413Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-09-25T16:19:34.209427Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2025-09-25T16:19:34.210123Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-09-25T16:19:34.210168Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-09-25T16:19:34.210212Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-09-25T16:19:34.210225Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:315: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-09-25T16:19:34.210229Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:369: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-09-25T16:19:34.210233Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 1:0 2 -> 3 2025-09-25T16:19:34.210577Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-09-25T16:19:34.210586Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-09-25T16:19:34.210594Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 1:0 3 -> 128 2025-09-25T16:19:34.211026Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-09-25T16:19:34.211042Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-09-25T16:19:34.211049Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-09-25T16:19:34.211057Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1723: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-09-25T16:19:34.211529Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1792: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-09-25T16:19:34.211920Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:663: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-09-25T16:19:34.211951Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1824: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-09-25T16:19:34.212123Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:685: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-09-25T16:19:34.212142Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:689: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 137 RawX2: 4294969455 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-09-25T16:19:34.212148Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:303: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-09-25T16:19:34.212202Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 1:0 128 -> 240 2025-09-25T16:19:34.212208Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:362: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-09-25T16:19:34.212230Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:599: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-09-25T16:19:34.212238Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:412: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-09-25T16:19:34.212622Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-09-25T16:19:34.212628Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme ... LAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 110 ready parts: 4/4 2025-09-25T16:19:34.524962Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:926: Part operation is done id#110:0 progress is 4/4 2025-09-25T16:19:34.524966Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 110 ready parts: 4/4 2025-09-25T16:19:34.524971Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 110, ready parts: 3/4, is published: true 2025-09-25T16:19:34.524974Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 110, ready parts: 4/4, is published: true 2025-09-25T16:19:34.524987Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1702: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [1:310:2300] message: TxId: 110 2025-09-25T16:19:34.524992Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 110 ready parts: 4/4 2025-09-25T16:19:34.525000Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:993: Operation and all the parts is done, operation id: 110:0 2025-09-25T16:19:34.525004Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5552: RemoveTx for txid 110:0 2025-09-25T16:19:34.525030Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:610: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 11] was 3 2025-09-25T16:19:34.525034Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:610: DecrementPathDbRefCount reason remove txstate source path for pathId [OwnerId: 72057594046678944, LocalPathId: 6] was 3 2025-09-25T16:19:34.525040Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:993: Operation and all the parts is done, operation id: 110:1 2025-09-25T16:19:34.525043Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5552: RemoveTx for txid 110:1 2025-09-25T16:19:34.525049Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:610: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 8] was 2 2025-09-25T16:19:34.525053Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:993: Operation and all the parts is done, operation id: 110:2 2025-09-25T16:19:34.525057Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5552: RemoveTx for txid 110:2 2025-09-25T16:19:34.525061Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:610: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 10] was 2 2025-09-25T16:19:34.525065Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:993: Operation and all the parts is done, operation id: 110:3 2025-09-25T16:19:34.525070Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5552: RemoveTx for txid 110:3 2025-09-25T16:19:34.525074Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:610: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 4 2025-09-25T16:19:34.525542Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_incremental_restore_scan.cpp:366: [IncrementalRestore] Handle(TEvRunIncrementalRestore) starting sequential processing for 2 incremental backups backupCollectionPathId: [OwnerId: 72057594046678944, LocalPathId: 4] operationId: 110:3 tablet: 72057594046678944 2025-09-25T16:19:34.525555Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_incremental_restore_scan.cpp:391: [IncrementalRestore] Handle(TEvRunIncrementalRestore) added incremental backup: 'backup_002' 2025-09-25T16:19:34.525559Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_incremental_restore_scan.cpp:391: [IncrementalRestore] Handle(TEvRunIncrementalRestore) added incremental backup: 'backup_003' 2025-09-25T16:19:34.525563Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_incremental_restore_scan.cpp:394: [IncrementalRestore] Handle(TEvRunIncrementalRestore) state now has 2 incremental backups 2025-09-25T16:19:34.525572Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_incremental_restore_scan.cpp:41: [IncrementalRestore] TTxProgressIncrementalRestore::Execute operationId: 110 tablet: 72057594046678944 2025-09-25T16:19:34.525586Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_incremental_restore_scan.cpp:73: [IncrementalRestore] Checking completion: InProgressOperations.size()=0, CompletedOperations.size()=0, CurrentIncrementalIdx=0, IncrementalBackups.size()=2 2025-09-25T16:19:34.525590Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_incremental_restore_scan.cpp:106: [IncrementalRestore] No operations in progress, starting first incremental backup 2025-09-25T16:19:34.525594Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_incremental_restore_scan.cpp:193: [IncrementalRestore] Processing incremental backup #1 path: backup_002 timestamp: 0 2025-09-25T16:19:34.525600Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_incremental_restore_scan.cpp:520: [IncrementalRestore] CreateIncrementalRestoreOperation for backup: backup_002 operationId: 110 backupCollectionPathId: [OwnerId: 72057594046678944, LocalPathId: 4] 2025-09-25T16:19:34.525627Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_incremental_restore_scan.cpp:546: [IncrementalRestore] Creating separate restore operation for table: /MyRoot/.backups/collections/BusyCollection/backup_002_incremental/BusyTable -> /MyRoot/BusyTable 2025-09-25T16:19:34.525642Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_incremental_restore_scan.cpp:584: [IncrementalRestore] Table operation 281474976710657:0 expects 1 shards 2025-09-25T16:19:34.525646Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_incremental_restore_scan.cpp:588: [IncrementalRestore] Tracking operation 281474976710657:0 for incremental restore 110 2025-09-25T16:19:34.525650Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_incremental_restore_scan.cpp:591: [IncrementalRestore] Sending MultiIncrementalRestore operation for table: /MyRoot/BusyTable 2025-09-25T16:19:34.525656Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_incremental_restore_scan.cpp:598: [IncrementalRestore] Created separate restore operations for incremental backup: backup_002 2025-09-25T16:19:34.525681Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 110: got EvNotifyTxCompletionResult 2025-09-25T16:19:34.525687Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 110: satisfy waiter [1:822:2758] 2025-09-25T16:19:34.526381Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot/.backups/collections/BusyCollection" OperationType: ESchemeOpRestoreMultipleIncrementalBackups Internal: true RestoreMultipleIncrementalBackups { SrcTablePaths: "/MyRoot/.backups/collections/BusyCollection/backup_002_incremental/BusyTable" DstTablePath: "/MyRoot/BusyTable" } } TxId: 281474976710657 , at schemeshard: 72057594046678944 2025-09-25T16:19:34.526415Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_create_restore_incremental_backup.cpp:518: [72057594046678944] CreateRestoreMultipleIncrementalBackups: opId# 281474976710657:0, tx# WorkingDir: "/MyRoot/.backups/collections/BusyCollection" OperationType: ESchemeOpRestoreMultipleIncrementalBackups Internal: true RestoreMultipleIncrementalBackups { SrcTablePaths: "/MyRoot/.backups/collections/BusyCollection/backup_002_incremental/BusyTable" DstTablePath: "/MyRoot/BusyTable" } 2025-09-25T16:19:34.526451Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_create_restore_incremental_backup.cpp:360: [72057594046678944] TNewRestoreFromAtTable Propose: opId# 281474976710657:0, srcs# [/MyRoot/.backups/collections/BusyCollection/backup_002_incremental/BusyTable], dst# /MyRoot/BusyTable 2025-09-25T16:19:34.526479Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:599: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 8] was 1 2025-09-25T16:19:34.526484Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:599: IncrementPathDbRefCount reason transaction source path for pathId [OwnerId: 72057594046678944, LocalPathId: 11] was 2 2025-09-25T16:19:34.526490Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5528: CreateTx for txid 281474976710657:0 type: TxRestoreIncrementalBackupAtTable target path: [OwnerId: 72057594046678944, LocalPathId: 8] source path: [OwnerId: 72057594046678944, LocalPathId: 11] 2025-09-25T16:19:34.526496Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_create_restore_incremental_backup.cpp:474: TNewRestoreFromAtTable Propose opId# 281474976710657:0 workingDir# /MyRoot/.backups/collections/BusyCollection dstTablePath# /MyRoot/BusyTable pathId# [OwnerId: 72057594046678944, LocalPathId: 11] 2025-09-25T16:19:34.526506Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 281474976710657:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-09-25T16:19:34.527138Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_incremental_restore_scan.cpp:115: [IncrementalRestore] TTxProgressIncrementalRestore::Complete operationId: 110 2025-09-25T16:19:34.527178Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 281474976710657, response: Status: StatusAccepted TxId: 281474976710657 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2025-09-25T16:19:34.527232Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 281474976710657, database: /MyRoot, subject: , status: StatusAccepted, operation: RESTORE, dst path: /MyRoot/.backups/collections/BusyCollection/backup_002_incremental/BusyTable, dst path: /MyRoot/BusyTable 2025-09-25T16:19:34.527255Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7120: Handle: TEvModifySchemeTransactionResult: txId# 281474976710657, status# StatusAccepted 2025-09-25T16:19:34.527262Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:7122: Message: Status: StatusAccepted TxId: 281474976710657 SchemeshardId: 72057594046678944 2025-09-25T16:19:34.527270Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_incremental_restore_scan.cpp:41: [IncrementalRestore] TTxProgressIncrementalRestore::Execute operationId: 110 tablet: 72057594046678944 2025-09-25T16:19:34.527282Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_incremental_restore_scan.cpp:73: [IncrementalRestore] Checking completion: InProgressOperations.size()=1, CompletedOperations.size()=0, CurrentIncrementalIdx=0, IncrementalBackups.size()=2 2025-09-25T16:19:34.527329Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 281474976710657:0, at schemeshard: 72057594046678944 2025-09-25T16:19:34.527336Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_create_restore_incremental_backup.cpp:85: NIncrRestoreState::TConfigurePartsAtTable operationId: 281474976710657:0 ProgressState, at schemeshard: 72057594046678944 2025-09-25T16:19:34.527856Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_incremental_restore_scan.cpp:115: [IncrementalRestore] TTxProgressIncrementalRestore::Complete operationId: 110 2025-09-25T16:19:34.527910Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:663: Send tablet strongly msg operationId: 281474976710657:0 from tablet: 72057594046678944 to tablet: 72075186233409547 cookie: 72057594046678944:2 msg type: 269549568 2025-09-25T16:19:34.527935Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1824: TOperation RegisterRelationByTabletId, TxId: 281474976710657, partId: 0, tablet: 72075186233409547 TestWaitNotification: OK eventTxId 110 |82.0%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/public/lib/ydb_cli/topic/ut/unittest >> TTopicWriterTests::TestEnterMessage_Custom_Delimiter_Delimited [GOOD] |82.0%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/public/lib/ydb_cli/topic/ut/unittest >> TTopicWriterTests::TestEnterMessage_With_Base64_Transform_NewlineDelimited [GOOD] |82.0%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/public/lib/ydb_cli/topic/ut/unittest >> TTopicWriterTests::TestEnterMessage_With_Base64_Transform [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_incremental_restore/unittest >> TIncrementalRestoreTests::MultipleCollectionsGenerateMultipleTEvRunIncrementalRestoreEvents [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2144] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2144] Leader for TabletID 72057594046678944 is [1:130:2155] sender: [1:131:2058] recipient: [1:113:2144] 2025-09-25T16:19:33.249358Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7911: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-09-25T16:19:33.249384Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7939: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-09-25T16:19:33.249390Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7825: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-09-25T16:19:33.249395Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7841: OperationsProcessing config: using default configuration 2025-09-25T16:19:33.249402Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7847: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-09-25T16:19:33.249406Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7847: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-09-25T16:19:33.249416Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7971: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-09-25T16:19:33.249429Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-09-25T16:19:33.249532Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8042: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-09-25T16:19:33.249587Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2025-09-25T16:19:33.265009Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7729: Cannot subscribe to console configs 2025-09-25T16:19:33.265027Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded 2025-09-25T16:19:33.268810Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2025-09-25T16:19:33.268909Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2025-09-25T16:19:33.268935Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-09-25T16:19:33.270321Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2025-09-25T16:19:33.270374Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2025-09-25T16:19:33.270478Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1382: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-09-25T16:19:33.270531Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-09-25T16:19:33.270946Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:158: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-09-25T16:19:33.270980Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2025-09-25T16:19:33.271234Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-09-25T16:19:33.271244Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-09-25T16:19:33.271266Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-09-25T16:19:33.271274Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-09-25T16:19:33.271281Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:205: TTxServerlessStorageBilling.Complete 2025-09-25T16:19:33.271312Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7086: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-09-25T16:19:33.272527Z node 1 :HIVE INFO: tablet_helpers.cpp:1126: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:130:2155] sender: [1:245:2058] recipient: [1:15:2062] 2025-09-25T16:19:33.287508Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-09-25T16:19:33.287571Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-09-25T16:19:33.287612Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:599: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-09-25T16:19:33.287617Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5528: CreateTx for txid 1:0 type: TxAlterSubDomain target path: [OwnerId: 72057594046678944, LocalPathId: 1] source path: 2025-09-25T16:19:33.287672Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-09-25T16:19:33.287684Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2025-09-25T16:19:33.288277Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-09-25T16:19:33.288317Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-09-25T16:19:33.288351Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-09-25T16:19:33.288364Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:315: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-09-25T16:19:33.288368Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:369: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-09-25T16:19:33.288372Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 1:0 2 -> 3 2025-09-25T16:19:33.288654Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-09-25T16:19:33.288660Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-09-25T16:19:33.288666Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 1:0 3 -> 128 2025-09-25T16:19:33.288931Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-09-25T16:19:33.288939Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-09-25T16:19:33.288943Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-09-25T16:19:33.288949Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1723: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-09-25T16:19:33.289406Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1792: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-09-25T16:19:33.289719Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:663: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-09-25T16:19:33.289757Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1824: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-09-25T16:19:33.289921Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:685: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-09-25T16:19:33.289938Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:689: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 137 RawX2: 4294969455 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-09-25T16:19:33.289944Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:303: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-09-25T16:19:33.289991Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 1:0 128 -> 240 2025-09-25T16:19:33.289996Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:362: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-09-25T16:19:33.290019Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:599: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-09-25T16:19:33.290030Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:412: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-09-25T16:19:33.290420Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-09-25T16:19:33.290426Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme ... 5-09-25T16:19:34.505622Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_restore_backup_collection.cpp:62: [72057594046678944] TDoneWithIncrementalRestore, operationId: 126:2HandleReply TEvCompleteBarrier 2025-09-25T16:19:34.505631Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_restore_backup_collection.cpp:98: [72057594046678944] TDoneWithIncrementalRestore, operationId: 126:2 Found 1 incremental backups to restore 2025-09-25T16:19:34.505640Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:926: Part operation is done id#126:2 progress is 3/3 2025-09-25T16:19:34.505644Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 126 ready parts: 3/3 2025-09-25T16:19:34.505648Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:926: Part operation is done id#126:0 progress is 3/3 2025-09-25T16:19:34.505652Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 126 ready parts: 3/3 2025-09-25T16:19:34.505657Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 126, ready parts: 2/3, is published: true 2025-09-25T16:19:34.505661Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 126, ready parts: 3/3, is published: true 2025-09-25T16:19:34.505680Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1702: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [1:310:2300] message: TxId: 126 2025-09-25T16:19:34.505687Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 126 ready parts: 3/3 2025-09-25T16:19:34.505694Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:993: Operation and all the parts is done, operation id: 126:0 2025-09-25T16:19:34.505700Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5552: RemoveTx for txid 126:0 2025-09-25T16:19:34.505739Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:610: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 27] was 3 2025-09-25T16:19:34.505744Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:610: DecrementPathDbRefCount reason remove txstate source path for pathId [OwnerId: 72057594046678944, LocalPathId: 22] was 3 2025-09-25T16:19:34.505752Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:993: Operation and all the parts is done, operation id: 126:1 2025-09-25T16:19:34.505756Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5552: RemoveTx for txid 126:1 2025-09-25T16:19:34.505762Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:610: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 24] was 2 2025-09-25T16:19:34.505766Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:993: Operation and all the parts is done, operation id: 126:2 2025-09-25T16:19:34.505770Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5552: RemoveTx for txid 126:2 2025-09-25T16:19:34.505775Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:610: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 20] was 3 2025-09-25T16:19:34.506443Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 126: got EvNotifyTxCompletionResult 2025-09-25T16:19:34.506455Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 126: satisfy waiter [1:2034:3840] 2025-09-25T16:19:34.506473Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_incremental_restore_scan.cpp:366: [IncrementalRestore] Handle(TEvRunIncrementalRestore) starting sequential processing for 1 incremental backups backupCollectionPathId: [OwnerId: 72057594046678944, LocalPathId: 20] operationId: 126:2 tablet: 72057594046678944 2025-09-25T16:19:34.506481Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_incremental_restore_scan.cpp:391: [IncrementalRestore] Handle(TEvRunIncrementalRestore) added incremental backup: 'backup_002' 2025-09-25T16:19:34.506486Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_incremental_restore_scan.cpp:394: [IncrementalRestore] Handle(TEvRunIncrementalRestore) state now has 1 incremental backups 2025-09-25T16:19:34.506496Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_incremental_restore_scan.cpp:41: [IncrementalRestore] TTxProgressIncrementalRestore::Execute operationId: 126 tablet: 72057594046678944 2025-09-25T16:19:34.506514Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_incremental_restore_scan.cpp:73: [IncrementalRestore] Checking completion: InProgressOperations.size()=0, CompletedOperations.size()=0, CurrentIncrementalIdx=0, IncrementalBackups.size()=1 2025-09-25T16:19:34.506518Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_incremental_restore_scan.cpp:106: [IncrementalRestore] No operations in progress, starting first incremental backup 2025-09-25T16:19:34.506524Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_incremental_restore_scan.cpp:193: [IncrementalRestore] Processing incremental backup #1 path: backup_002 timestamp: 0 2025-09-25T16:19:34.506530Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_incremental_restore_scan.cpp:520: [IncrementalRestore] CreateIncrementalRestoreOperation for backup: backup_002 operationId: 126 backupCollectionPathId: [OwnerId: 72057594046678944, LocalPathId: 20] 2025-09-25T16:19:34.506564Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_incremental_restore_scan.cpp:546: [IncrementalRestore] Creating separate restore operation for table: /MyRoot/.backups/collections/Collection3/backup_002_incremental/Table3 -> /MyRoot/Table3 2025-09-25T16:19:34.506582Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_incremental_restore_scan.cpp:584: [IncrementalRestore] Table operation 281474976710662:0 expects 1 shards 2025-09-25T16:19:34.506586Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_incremental_restore_scan.cpp:588: [IncrementalRestore] Tracking operation 281474976710662:0 for incremental restore 126 2025-09-25T16:19:34.506590Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_incremental_restore_scan.cpp:591: [IncrementalRestore] Sending MultiIncrementalRestore operation for table: /MyRoot/Table3 2025-09-25T16:19:34.506599Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_incremental_restore_scan.cpp:598: [IncrementalRestore] Created separate restore operations for incremental backup: backup_002 2025-09-25T16:19:34.507344Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot/.backups/collections/Collection3" OperationType: ESchemeOpRestoreMultipleIncrementalBackups Internal: true RestoreMultipleIncrementalBackups { SrcTablePaths: "/MyRoot/.backups/collections/Collection3/backup_002_incremental/Table3" DstTablePath: "/MyRoot/Table3" } } TxId: 281474976710662 , at schemeshard: 72057594046678944 2025-09-25T16:19:34.507373Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_create_restore_incremental_backup.cpp:518: [72057594046678944] CreateRestoreMultipleIncrementalBackups: opId# 281474976710662:0, tx# WorkingDir: "/MyRoot/.backups/collections/Collection3" OperationType: ESchemeOpRestoreMultipleIncrementalBackups Internal: true RestoreMultipleIncrementalBackups { SrcTablePaths: "/MyRoot/.backups/collections/Collection3/backup_002_incremental/Table3" DstTablePath: "/MyRoot/Table3" } 2025-09-25T16:19:34.507412Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_create_restore_incremental_backup.cpp:360: [72057594046678944] TNewRestoreFromAtTable Propose: opId# 281474976710662:0, srcs# [/MyRoot/.backups/collections/Collection3/backup_002_incremental/Table3], dst# /MyRoot/Table3 2025-09-25T16:19:34.507436Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:599: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 24] was 1 2025-09-25T16:19:34.507439Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:599: IncrementPathDbRefCount reason transaction source path for pathId [OwnerId: 72057594046678944, LocalPathId: 27] was 2 2025-09-25T16:19:34.507444Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5528: CreateTx for txid 281474976710662:0 type: TxRestoreIncrementalBackupAtTable target path: [OwnerId: 72057594046678944, LocalPathId: 24] source path: [OwnerId: 72057594046678944, LocalPathId: 27] 2025-09-25T16:19:34.507448Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_create_restore_incremental_backup.cpp:474: TNewRestoreFromAtTable Propose opId# 281474976710662:0 workingDir# /MyRoot/.backups/collections/Collection3 dstTablePath# /MyRoot/Table3 pathId# [OwnerId: 72057594046678944, LocalPathId: 27] 2025-09-25T16:19:34.507457Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 281474976710662:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-09-25T16:19:34.507904Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_incremental_restore_scan.cpp:115: [IncrementalRestore] TTxProgressIncrementalRestore::Complete operationId: 126 2025-09-25T16:19:34.508141Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 281474976710662, response: Status: StatusAccepted TxId: 281474976710662 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2025-09-25T16:19:34.508196Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 281474976710662, database: /MyRoot, subject: , status: StatusAccepted, operation: RESTORE, dst path: /MyRoot/.backups/collections/Collection3/backup_002_incremental/Table3, dst path: /MyRoot/Table3 2025-09-25T16:19:34.508233Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7120: Handle: TEvModifySchemeTransactionResult: txId# 281474976710662, status# StatusAccepted 2025-09-25T16:19:34.508241Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:7122: Message: Status: StatusAccepted TxId: 281474976710662 SchemeshardId: 72057594046678944 2025-09-25T16:19:34.508252Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_incremental_restore_scan.cpp:41: [IncrementalRestore] TTxProgressIncrementalRestore::Execute operationId: 126 tablet: 72057594046678944 2025-09-25T16:19:34.508269Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_incremental_restore_scan.cpp:73: [IncrementalRestore] Checking completion: InProgressOperations.size()=1, CompletedOperations.size()=0, CurrentIncrementalIdx=0, IncrementalBackups.size()=1 2025-09-25T16:19:34.508315Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 281474976710662:0, at schemeshard: 72057594046678944 2025-09-25T16:19:34.508324Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_create_restore_incremental_backup.cpp:85: NIncrRestoreState::TConfigurePartsAtTable operationId: 281474976710662:0 ProgressState, at schemeshard: 72057594046678944 2025-09-25T16:19:34.509012Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_incremental_restore_scan.cpp:115: [IncrementalRestore] TTxProgressIncrementalRestore::Complete operationId: 126 2025-09-25T16:19:34.509074Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:663: Send tablet strongly msg operationId: 281474976710662:0 from tablet: 72057594046678944 to tablet: 72075186233409554 cookie: 72057594046678944:9 msg type: 269549568 2025-09-25T16:19:34.509097Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1824: TOperation RegisterRelationByTabletId, TxId: 281474976710662, partId: 0, tablet: 72075186233409554 TestWaitNotification: OK eventTxId 126 Successfully verified 3 TEvRunIncrementalRestore events for 3 unique collections with 3 operations in database |82.0%| [TA] $(B)/ydb/core/http_proxy/ut/inside_ydb_ut/test-results/unittest/{meta.json ... results_accumulator.log} |82.0%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/replication/service/ut_topic_reader/unittest >> KqpRbo::Bench_10Joins [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/proxy_service/ut/unittest >> TableCreation::SimpleUpdateTable [GOOD] Test command err: 2025-09-25T16:19:32.085843Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7554062234492149057:2141];send_to=[0:7307199536658146131:7762515]; 2025-09-25T16:19:32.085868Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/endf/00505b/r3tmp/tmpAlOinF/pdisk_1.dat 2025-09-25T16:19:32.111725Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-09-25T16:19:32.111753Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-09-25T16:19:32.112920Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-09-25T16:19:32.119047Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded 2025-09-25T16:19:32.119270Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1229: Notification cookie mismatch for subscription [1:7554062234492148953:2081] 1758817172084780 != 1758817172084783 TClient is connected to server localhost:8823 TServer::EnableGrpc on GrpcPort 18957, node 1 2025-09-25T16:19:32.151003Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-09-25T16:19:32.151018Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-09-25T16:19:32.151021Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-09-25T16:19:32.151068Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-09-25T16:19:32.193863Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-09-25T16:19:32.196656Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-09-25T16:19:32.463618Z node 1 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:1519: Updated YQL logs priority to current level: 4 2025-09-25T16:19:32.485891Z node 1 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:436: Subscribed for config changes. 2025-09-25T16:19:32.485919Z node 1 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:443: Updated table service config. 2025-09-25T16:19:32.485929Z node 1 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:1519: Updated YQL logs priority to current level: 4 2025-09-25T16:19:32.486766Z node 1 :KQP_PROXY DEBUG: table_creator.cpp:148: Table script_execution_leases updater. Describe result: PathErrorUnknown 2025-09-25T16:19:32.486768Z node 1 :KQP_PROXY NOTICE: table_creator.cpp:168: Table script_execution_leases updater. Creating table 2025-09-25T16:19:32.486792Z node 1 :KQP_PROXY DEBUG: table_creator.cpp:101: Table script_execution_leases updater. Full table path:/dc-1/.metadata/script_execution_leases 2025-09-25T16:19:32.486806Z node 1 :KQP_PROXY DEBUG: table_creator.cpp:148: Table result_sets updater. Describe result: PathErrorUnknown 2025-09-25T16:19:32.486807Z node 1 :KQP_PROXY NOTICE: table_creator.cpp:168: Table result_sets updater. Creating table 2025-09-25T16:19:32.486810Z node 1 :KQP_PROXY DEBUG: table_creator.cpp:101: Table result_sets updater. Full table path:/dc-1/.metadata/result_sets 2025-09-25T16:19:32.486818Z node 1 :KQP_PROXY DEBUG: table_creator.cpp:148: Table script_executions updater. Describe result: PathErrorUnknown 2025-09-25T16:19:32.486819Z node 1 :KQP_PROXY NOTICE: table_creator.cpp:168: Table script_executions updater. Creating table 2025-09-25T16:19:32.486821Z node 1 :KQP_PROXY DEBUG: table_creator.cpp:101: Table script_executions updater. Full table path:/dc-1/.metadata/script_executions 2025-09-25T16:19:32.487953Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:1, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:19:32.488354Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:19:32.488604Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:19:32.490048Z node 1 :KQP_PROXY DEBUG: table_creator.cpp:191: Table script_executions updater. TEvProposeTransactionStatus: { Status: 53 TxId: 281474976710660 SchemeShardStatus: 1 SchemeShardTabletId: 72057594046644480 PathId: 3 } 2025-09-25T16:19:32.490064Z node 1 :KQP_PROXY DEBUG: table_creator.cpp:262: Table script_executions updater. Subscribe on create table tx: 281474976710660 2025-09-25T16:19:32.490077Z node 1 :KQP_PROXY DEBUG: table_creator.cpp:191: Table script_execution_leases updater. TEvProposeTransactionStatus: { Status: 53 TxId: 281474976710658 SchemeShardStatus: 1 SchemeShardTabletId: 72057594046644480 PathId: 4 } 2025-09-25T16:19:32.490088Z node 1 :KQP_PROXY DEBUG: table_creator.cpp:262: Table script_execution_leases updater. Subscribe on create table tx: 281474976710658 2025-09-25T16:19:32.490112Z node 1 :KQP_PROXY DEBUG: table_creator.cpp:191: Table result_sets updater. TEvProposeTransactionStatus: { Status: 53 TxId: 281474976710659 SchemeShardStatus: 1 SchemeShardTabletId: 72057594046644480 PathId: 5 } 2025-09-25T16:19:32.490115Z node 1 :KQP_PROXY DEBUG: table_creator.cpp:262: Table result_sets updater. Subscribe on create table tx: 281474976710659 2025-09-25T16:19:32.507909Z node 1 :KQP_PROXY DEBUG: table_creator.cpp:291: Table script_executions updater. Request: create. Transaction completed: 281474976710660. Doublechecking... 2025-09-25T16:19:32.515366Z node 1 :KQP_PROXY DEBUG: table_creator.cpp:291: Table script_execution_leases updater. Request: create. Transaction completed: 281474976710658. Doublechecking... 2025-09-25T16:19:32.518298Z node 1 :KQP_PROXY DEBUG: table_creator.cpp:291: Table result_sets updater. Request: create. Transaction completed: 281474976710659. Doublechecking... 2025-09-25T16:19:32.572367Z node 1 :KQP_PROXY DEBUG: table_creator.cpp:363: Table script_executions updater. Column diff is empty, finishing 2025-09-25T16:19:32.615641Z node 1 :KQP_PROXY DEBUG: table_creator.cpp:363: Table script_execution_leases updater. Column diff is empty, finishing 2025-09-25T16:19:32.616572Z node 1 :KQP_PROXY DEBUG: table_creator.cpp:363: Table result_sets updater. Column diff is empty, finishing 2025-09-25T16:19:32.616782Z node 1 :KQP_PROXY DEBUG: kqp_script_executions.cpp:543: [ScriptExecutions] [TCreateScriptExecutionActor] OwnerId: [1:7554062234492149544:2285] ActorId: [1:7554062234492149818:2452] Database: /dc-1 ExecutionId: 36a844e7-5ca80803-327b1ccf-8db7bf3e. Bootstrap. Start TCreateScriptOperationQuery [1:7554062234492149820:2454], RunScriptActorId: [1:7554062234492149819:2453] 2025-09-25T16:19:32.616793Z node 1 :KQP_PROXY DEBUG: query_actor.cpp:134: [TQueryBase] [TCreateScriptOperationQuery] OwnerId: [1:7554062234492149818:2452], ActorId: [1:7554062234492149820:2454], TraceId: ExecutionId: 36a844e7-5ca80803-327b1ccf-8db7bf3e, RequestDatabase: /dc-1, Bootstrap. Database: /dc-1, IsSystemUser: 0, run create session 2025-09-25T16:19:32.619206Z node 1 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:1323: Request has 18444985256536.932418s seconds to be completed 2025-09-25T16:19:32.619934Z node 1 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:1398: Created new session, sessionId: ydb://session/3?node_id=1&id=OGNiZmUwZjItNGI5Zjg5OWYtY2FkYTE3OTAtYjFjZGRlMDc=, workerId: [1:7554062234492149822:2318], database: /dc-1, longSession: 1, local sessions count: 1 2025-09-25T16:19:32.619967Z node 1 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:577: Received create session request, trace_id: 2025-09-25T16:19:32.620302Z node 1 :KQP_PROXY DEBUG: query_actor.cpp:200: [TQueryBase] [TCreateScriptOperationQuery] OwnerId: [1:7554062234492149818:2452], ActorId: [1:7554062234492149820:2454], TraceId: ExecutionId: 36a844e7-5ca80803-327b1ccf-8db7bf3e, RequestDatabase: /dc-1, RunDataQuery with SessionId: ydb://session/3?node_id=1&id=OGNiZmUwZjItNGI5Zjg5OWYtY2FkYTE3OTAtYjFjZGRlMDc=, TxId: , text: -- TCreateScriptOperationQuery::OnRunQuery DECLARE $database AS Text; DECLARE $execution_id AS Text; DECLARE $run_script_actor_id AS Text; DECLARE $execution_status AS Int32; DECLARE $execution_mode AS Int32; DECLARE $query_text AS Text; DECLARE $syntax AS Int32; DECLARE $meta AS JsonDocument; DECLARE $lease_duration AS Interval; DECLARE $lease_state AS Int32; DECLARE $execution_meta_ttl AS Interval; DECLARE $retry_state AS JsonDocument; DECLARE $user_sid AS Text; DECLARE $user_group_sids AS JsonDocument; DECLARE $parameters AS String; DECLARE $graph_compressed AS Optional; DECLARE $graph_compression_method AS Optional; UPSERT INTO `.metadata/script_executions` ( database, execution_id, run_script_actor_id, execution_status, execution_mode, start_ts, query_text, syntax, meta, expire_at, retry_state, user_token, user_group_sids, parameters, graph_compressed, graph_compression_method, lease_generation ) VALUES ( $database, $execution_id, $run_script_actor_id, $execution_status, $execution_mode, CurrentUtcTimestamp(), $query_text, $syntax, $meta, CurrentUtcTimestamp() + $execution_meta_ttl ... [ScriptExecutions] [TQueryBase] [TSaveScriptFinalStatusActor] OwnerId: [2:7554062245598899996:2385], ActorId: [2:7554062245598899997:2386], TraceId: ExecutionId: b0fbd307-661dfbb1-9d8c8efb-65762434, RequestDatabase: /dc-1, LeaseGeneration: 1, State: Update final status, Finish script execution operation. Status: SUCCESS. Issues: 2025-09-25T16:19:34.323697Z node 2 :KQP_PROXY DEBUG: query_actor.h:310: [TQueryRetryActor] [TSaveScriptFinalStatusActor] OwnerId: [2:7554062245598899995:2384], ActorId: [2:7554062245598899996:2385], TraceId: ExecutionId: b0fbd307-661dfbb1-9d8c8efb-65762434, RequestDatabase: /dc-1, LeaseGeneration: 1, Got response [2:7554062245598899997:2386] SUCCESS 2025-09-25T16:19:34.323721Z node 2 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:1212: Session closed, sessionId: ydb://session/3?node_id=2&id=YjNlNTYzNmEtZDkyMWQwNTMtM2RjMDdmOGYtNDM5NzY3Y2E=, workerId: [2:7554062245598899999:2388], local sessions count: 1 2025-09-25T16:19:34.323806Z node 2 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:1212: Session closed, sessionId: ydb://session/3?node_id=2&id=NDVlN2I2M2YtZmJkODhjMzUtZGY3YjlmOTktNDg0NWUyYjg=, workerId: [2:7554062245598899772:2328], local sessions count: 0 2025-09-25T16:19:34.413064Z node 2 :KQP_PROXY DEBUG: kqp_script_executions.cpp:2335: [ScriptExecutions] [TGetScriptExecutionOperationActor] OwnerId: [2:7554062245598900123:2602] ActorId: [2:7554062245598900124:2603] Database: /dc-1 ExecutionId: b0fbd307-661dfbb1-9d8c8efb-65762434. Bootstrap. Start TGetScriptExecutionOperationQueryActor [2:7554062245598900125:2604] 2025-09-25T16:19:34.413101Z node 2 :KQP_PROXY DEBUG: query_actor.cpp:134: [TQueryBase] [TGetScriptExecutionOperationQueryActor] OwnerId: [2:7554062245598900125:2604], ActorId: [2:7554062245598900126:2605], TraceId: ExecutionId: b0fbd307-661dfbb1-9d8c8efb-65762434, RequestDatabase: /dc-1, Bootstrap. Database: /dc-1, IsSystemUser: 0, run create session 2025-09-25T16:19:34.413105Z node 2 :KQP_PROXY DEBUG: query_actor.h:291: [TQueryRetryActor] [TGetScriptExecutionOperationQueryActor] OwnerId: [2:7554062245598900124:2603], ActorId: [2:7554062245598900125:2604], TraceId: ExecutionId: b0fbd307-661dfbb1-9d8c8efb-65762434, RequestDatabase: /dc-1, Starting query actor #1 [2:7554062245598900126:2605] 2025-09-25T16:19:34.413160Z node 2 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:1323: Request has 18444985256535.138465s seconds to be completed 2025-09-25T16:19:34.413581Z node 2 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:1398: Created new session, sessionId: ydb://session/3?node_id=2&id=YjMxZGYxYzMtMWM5OGFiZWEtZDM3NjJkNDEtNTQ0YzEwZjQ=, workerId: [2:7554062245598900128:2429], database: /dc-1, longSession: 1, local sessions count: 1 2025-09-25T16:19:34.413613Z node 2 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:577: Received create session request, trace_id: 2025-09-25T16:19:34.413697Z node 2 :KQP_PROXY DEBUG: query_actor.cpp:200: [TQueryBase] [TGetScriptExecutionOperationQueryActor] OwnerId: [2:7554062245598900125:2604], ActorId: [2:7554062245598900126:2605], TraceId: ExecutionId: b0fbd307-661dfbb1-9d8c8efb-65762434, RequestDatabase: /dc-1, RunDataQuery with SessionId: ydb://session/3?node_id=2&id=YjMxZGYxYzMtMWM5OGFiZWEtZDM3NjJkNDEtNTQ0YzEwZjQ=, TxId: , text: -- TGetScriptExecutionOperationQueryActor::OnRunQuery DECLARE $database AS Text; DECLARE $execution_id AS Text; SELECT run_script_actor_id, operation_status, execution_status, finalization_status, query_text, syntax, execution_mode, result_set_metas, plan, plan_compressed, plan_compression_method, issues, transient_issues, stats, ast, ast_compressed, ast_compression_method, graph_compressed IS NOT NULL AS has_graph FROM `.metadata/script_executions` WHERE database = $database AND execution_id = $execution_id AND (expire_at > CurrentUtcTimestamp() OR expire_at IS NULL); SELECT lease_deadline, lease_generation, lease_state FROM `.metadata/script_execution_leases` WHERE database = $database AND execution_id = $execution_id AND (expire_at > CurrentUtcTimestamp() OR expire_at IS NULL); 2025-09-25T16:19:34.413802Z node 2 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:700: Ctx: { TraceId: , Database: /dc-1, SessionId: ydb://session/3?node_id=2&id=YjMxZGYxYzMtMWM5OGFiZWEtZDM3NjJkNDEtNTQ0YzEwZjQ=, PoolId: , DatabaseId: }. TEvQueryRequest, set timer for: 300.000000s timeout: 0.000000s cancelAfter: 0.000000s. Send request to target, requestId: 24, targetId: [2:7554062245598900128:2429] 2025-09-25T16:19:34.413813Z node 2 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:1153: Scheduled timeout timer for requestId: 24 timeout: 300.000000s actor id: [2:7554062245598900130:2606] 2025-09-25T16:19:34.416017Z node 2 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:886: Forwarded response to sender actor, requestId: 24, sender: [2:7554062245598900129:2430], selfId: [2:7554062241303931743:2196], source: [2:7554062245598900128:2429] 2025-09-25T16:19:34.416128Z node 2 :KQP_PROXY DEBUG: query_actor.cpp:243: [TQueryBase] [TGetScriptExecutionOperationQueryActor] OwnerId: [2:7554062245598900125:2604], ActorId: [2:7554062245598900126:2605], TraceId: ExecutionId: b0fbd307-661dfbb1-9d8c8efb-65762434, RequestDatabase: /dc-1, DataQuery #1 finished SUCCESS, Issues: , SessionId: ydb://session/3?node_id=2&id=YjMxZGYxYzMtMWM5OGFiZWEtZDM3NjJkNDEtNTQ0YzEwZjQ=, TxId: 2025-09-25T16:19:34.416283Z node 2 :KQP_PROXY DEBUG: query_actor.cpp:370: [TQueryBase] [TGetScriptExecutionOperationQueryActor] OwnerId: [2:7554062245598900125:2604], ActorId: [2:7554062245598900126:2605], TraceId: ExecutionId: b0fbd307-661dfbb1-9d8c8efb-65762434, RequestDatabase: /dc-1, Finish with SUCCESS, SessionId: ydb://session/3?node_id=2&id=YjMxZGYxYzMtMWM5OGFiZWEtZDM3NjJkNDEtNTQ0YzEwZjQ=, TxId: 2025-09-25T16:19:34.416295Z node 2 :KQP_PROXY DEBUG: kqp_script_executions.cpp:2284: [ScriptExecutions] [TQueryBase] [TGetScriptExecutionOperationQueryActor] OwnerId: [2:7554062245598900125:2604], ActorId: [2:7554062245598900126:2605], TraceId: ExecutionId: b0fbd307-661dfbb1-9d8c8efb-65762434, RequestDatabase: /dc-1, Finish, OperationStatus: SUCCESS, FinalizationStatus: -1, LeaseStatus: -1 2025-09-25T16:19:34.416330Z node 2 :KQP_PROXY DEBUG: query_actor.h:310: [TQueryRetryActor] [TGetScriptExecutionOperationQueryActor] OwnerId: [2:7554062245598900124:2603], ActorId: [2:7554062245598900125:2604], TraceId: ExecutionId: b0fbd307-661dfbb1-9d8c8efb-65762434, RequestDatabase: /dc-1, Got response [2:7554062245598900126:2605] SUCCESS 2025-09-25T16:19:34.416350Z node 2 :KQP_PROXY DEBUG: kqp_script_executions.cpp:2391: [ScriptExecutions] [TGetScriptExecutionOperationActor] OwnerId: [2:7554062245598900123:2602] ActorId: [2:7554062245598900124:2603] Database: /dc-1 ExecutionId: b0fbd307-661dfbb1-9d8c8efb-65762434. Extracted script execution operation [2:7554062245598900126:2605], Status: SUCCESS, Issues: , Ready: 1, LeaseExpired: 0, RetryRequired: 0, RunScriptActorId: [2:7554062241303932431:2449], LeaseGeneration: 0 2025-09-25T16:19:34.416358Z node 2 :KQP_PROXY DEBUG: kqp_script_executions.cpp:2410: [ScriptExecutions] [TGetScriptExecutionOperationActor] OwnerId: [2:7554062245598900123:2602] ActorId: [2:7554062245598900124:2603] Database: /dc-1 ExecutionId: b0fbd307-661dfbb1-9d8c8efb-65762434. Reply success 2025-09-25T16:19:34.416389Z node 2 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:1212: Session closed, sessionId: ydb://session/3?node_id=2&id=YjMxZGYxYzMtMWM5OGFiZWEtZDM3NjJkNDEtNTQ0YzEwZjQ=, workerId: [2:7554062245598900128:2429], local sessions count: 0 2025-09-25T16:19:34.419602Z node 2 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:1323: TraceId: "01k60trgwk68k848va3teqm1nn", Request has 18444985256535.132019s seconds to be completed 2025-09-25T16:19:34.420030Z node 2 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:1398: TraceId: "01k60trgwk68k848va3teqm1nn", Created new session, sessionId: ydb://session/3?node_id=2&id=ZTUxNTM3YS1jNzg0NTYzYS0yNDYxYmNlYi05YTFlN2E2Mg==, workerId: [2:7554062245598900156:2442], database: /dc-1, longSession: 1, local sessions count: 1 2025-09-25T16:19:34.420056Z node 2 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:577: Received create session request, trace_id: 01k60trgwk68k848va3teqm1nn --------------------------- INIT FINISHED --------------------------- 2025-09-25T16:19:34.420598Z node 2 :KQP_PROXY DEBUG: table_creator.cpp:148: Table test_table updater. Describe result: PathErrorUnknown 2025-09-25T16:19:34.420607Z node 2 :KQP_PROXY NOTICE: table_creator.cpp:168: Table test_table updater. Creating table 2025-09-25T16:19:34.420615Z node 2 :KQP_PROXY DEBUG: table_creator.cpp:101: Table test_table updater. Full table path:/dc-1/test/test_table 2025-09-25T16:19:34.421339Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715675:1, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:19:34.421662Z node 2 :KQP_PROXY DEBUG: table_creator.cpp:191: Table test_table updater. TEvProposeTransactionStatus: { Status: 53 TxId: 281474976715675 SchemeShardStatus: 1 SchemeShardTabletId: 72057594046644480 PathId: 10 } 2025-09-25T16:19:34.421673Z node 2 :KQP_PROXY DEBUG: table_creator.cpp:262: Table test_table updater. Subscribe on create table tx: 281474976715675 2025-09-25T16:19:34.428741Z node 2 :KQP_PROXY DEBUG: table_creator.cpp:291: Table test_table updater. Request: create. Transaction completed: 281474976715675. Doublechecking... 2025-09-25T16:19:34.526911Z node 2 :KQP_PROXY DEBUG: table_creator.cpp:363: Table test_table updater. Column diff is empty, finishing 2025-09-25T16:19:34.527061Z node 2 :KQP_PROXY NOTICE: table_creator.cpp:366: Table test_table updater. Adding columns. New columns: col4, col5. Existing columns: col1, col2, col3 2025-09-25T16:19:34.527076Z node 2 :KQP_PROXY DEBUG: table_creator.cpp:101: Table test_table updater. Full table path:/dc-1/test/test_table 2025-09-25T16:19:34.527542Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterTable, opId: 281474976715676:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_table.cpp:172) 2025-09-25T16:19:34.527935Z node 2 :KQP_PROXY DEBUG: table_creator.cpp:191: Table test_table updater. TEvProposeTransactionStatus: { Status: 53 TxId: 281474976715676 SchemeShardStatus: 1 SchemeShardTabletId: 72057594046644480 } 2025-09-25T16:19:34.527951Z node 2 :KQP_PROXY DEBUG: table_creator.cpp:262: Table test_table updater. Subscribe on create table tx: 281474976715676 2025-09-25T16:19:34.530398Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-09-25T16:19:34.530987Z node 2 :KQP_PROXY DEBUG: table_creator.cpp:291: Table test_table updater. Request: alter. Transaction completed: 281474976715676. Doublechecking... 2025-09-25T16:19:34.624668Z node 2 :KQP_PROXY DEBUG: table_creator.cpp:363: Table test_table updater. Column diff is empty, finishing 2025-09-25T16:19:34.627061Z node 2 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:1212: Session closed, sessionId: ydb://session/3?node_id=2&id=ZTUxNTM3YS1jNzg0NTYzYS0yNDYxYmNlYi05YTFlN2E2Mg==, workerId: [2:7554062245598900156:2442], local sessions count: 0 |82.0%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/public/lib/ydb_cli/topic/ut/unittest >> TTopicWriterTests::TestEnterMessage_SomeBinaryData [GOOD] |82.0%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/public/lib/ydb_cli/topic/ut/unittest >> TTopicWriterTests::TestEnterMessage_No_Base64_Transform [GOOD] |82.0%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/replication/service/ut_topic_reader/unittest |82.0%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/replication/service/ut_topic_reader/unittest |82.1%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/public/lib/ydb_cli/topic/ut/unittest >> TTopicWriterTests::TestTopicWriterParams_Format_Concatenated [GOOD] >> RemoteTopicReader::ReadTopic |82.1%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/public/lib/ydb_cli/topic/ut/unittest >> TTopicWriterTests::TestTopicWriterParams_InvalidDelimiter [GOOD] >> TIncrementalRestoreTests::LongIncrementalRestoreOpCleanupMultipleOperations [GOOD] >> TAsyncIndexTests::SplitIndexWithReboots[TabletReboots] |82.1%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/replication/service/ut_topic_reader/unittest |82.1%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/replication/service/ut_topic_reader/unittest |82.1%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/replication/service/ut_topic_reader/unittest |82.1%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_index/unittest |82.1%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/replication/service/ut_topic_reader/unittest >> TCmsTest::StateStorageTwoBrokenRings [GOOD] >> TCmsTest::StateStorageRollingRestart >> TableCreation::ConcurrentTableCreationWithDifferentVersions [GOOD] |82.1%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/replication/service/ut_topic_reader/unittest ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/rbo/unittest >> KqpRbo::Bench_10Joins [GOOD] Test command err: Trying to start YDB, gRPC: 15751, MsgBus: 13026 2025-09-25T16:19:32.485101Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7554062234968023603:2069];send_to=[0:7307199536658146131:7762515]; 2025-09-25T16:19:32.485137Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/endf/004361/r3tmp/tmpHOK5p9/pdisk_1.dat 2025-09-25T16:19:32.551625Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-09-25T16:19:32.552002Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded 2025-09-25T16:19:32.560945Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1229: Notification cookie mismatch for subscription [1:7554062234968023564:2081] 1758817172484352 != 1758817172484355 TServer::EnableGrpc on GrpcPort 15751, node 1 2025-09-25T16:19:32.577017Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-09-25T16:19:32.577031Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-09-25T16:19:32.577033Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-09-25T16:19:32.577078Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:13026 2025-09-25T16:19:32.596239Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-09-25T16:19:32.596278Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-09-25T16:19:32.597330Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:13026 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-09-25T16:19:32.634556Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-09-25T16:19:32.639442Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-09-25T16:19:32.750196Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-09-25T16:19:32.886811Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7554062234968024242:2316], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:19:32.886832Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:19:32.886980Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7554062234968024251:2317], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:19:32.886989Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:19:32.930530Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:19:32.951811Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:19:32.964579Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7554062234968024415:2335], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:19:32.964604Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7554062234968024420:2338], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:19:32.964609Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:19:32.964669Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7554062234968024423:2340], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:19:32.964684Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:19:32.965454Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710660:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-09-25T16:19:32.974329Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7554062234968024422:2339], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710660 completed, doublechecking } 2025-09-25T16:19:33.070157Z node 1 :TX_PROXY ERROR: schemereq.cpp:590: Actor# [1:7554062239262991771:2444] txid# 281474976710661, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 7], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } Trying to start YDB, gRPC: 3581, MsgBus: 22788 2025-09-25T16:19:33.381194Z node 2 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7554062238062556577:2078];send_to=[0:7307199536658146131:7762515]; 2025-09-25T16:19:33.381534Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/endf/004361/r3tmp/tmpLhAS36/pdisk_1.dat 2025-09-25T16:19:33.384094Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-09-25T16:19:33.396378Z node 2 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 3581, node 2 2025-09-25T16:19:33.406422Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-09-25T16:19:33.406433Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-09-25T16:19:33.406435Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-09-25T16:19:33.406499Z node 2 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:22788 TClient is connected to server localhost:22788 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-09-25T16:19:33.454610Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but pro ... t path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/endf/004361/r3tmp/tmp0GDAm8/pdisk_1.dat 2025-09-25T16:19:33.954228Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-09-25T16:19:33.963281Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-09-25T16:19:33.963304Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-09-25T16:19:33.963874Z node 3 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded 2025-09-25T16:19:33.965625Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 6344, node 3 2025-09-25T16:19:33.971431Z node 3 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-09-25T16:19:33.971445Z node 3 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-09-25T16:19:33.971447Z node 3 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-09-25T16:19:33.971495Z node 3 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:30120 TClient is connected to server localhost:30120 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-09-25T16:19:34.009479Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-09-25T16:19:34.239874Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-09-25T16:19:34.351912Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7554062244920148786:2316], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:19:34.351962Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:19:34.352180Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7554062244920148796:2317], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:19:34.352205Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:19:34.359834Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:19:34.368638Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:19:34.381682Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:19:34.383430Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-09-25T16:19:34.395659Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:19:34.410285Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:19:34.423394Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:19:34.437309Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:19:34.451290Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:19:34.465605Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:19:34.480053Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:19:34.495940Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7554062244920149518:2389], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:19:34.495972Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:19:34.495988Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7554062244920149523:2392], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:19:34.496007Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7554062244920149525:2393], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:19:34.496016Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:19:34.496873Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-09-25T16:19:34.499906Z node 3 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [3:7554062244920149527:2394], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-09-25T16:19:34.584665Z node 3 :TX_PROXY ERROR: schemereq.cpp:590: Actor# [3:7554062244920149578:2809] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-09-25T16:19:34.901068Z node 3 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; >> TCmsTest::CheckUnreplicatedDiskPreventsRestart [GOOD] >> TCmsTest::BridgeModeSysTablets ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_incremental_restore/unittest >> TIncrementalRestoreTests::LongIncrementalRestoreOpCleanupMultipleOperations [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2144] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2144] Leader for TabletID 72057594046678944 is [1:130:2155] sender: [1:131:2058] recipient: [1:113:2144] 2025-09-25T16:19:34.081133Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7911: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-09-25T16:19:34.081152Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7939: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-09-25T16:19:34.081156Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7825: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-09-25T16:19:34.081160Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7841: OperationsProcessing config: using default configuration 2025-09-25T16:19:34.081165Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7847: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-09-25T16:19:34.081168Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7847: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-09-25T16:19:34.081174Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7971: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-09-25T16:19:34.081185Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-09-25T16:19:34.081256Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8042: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-09-25T16:19:34.081298Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2025-09-25T16:19:34.092418Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7729: Cannot subscribe to console configs 2025-09-25T16:19:34.092442Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded 2025-09-25T16:19:34.096234Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2025-09-25T16:19:34.096319Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2025-09-25T16:19:34.096351Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-09-25T16:19:34.097762Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2025-09-25T16:19:34.097812Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2025-09-25T16:19:34.097905Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1382: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-09-25T16:19:34.097956Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-09-25T16:19:34.098297Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:158: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-09-25T16:19:34.098330Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2025-09-25T16:19:34.098520Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-09-25T16:19:34.098527Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-09-25T16:19:34.098540Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-09-25T16:19:34.098546Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-09-25T16:19:34.098550Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:205: TTxServerlessStorageBilling.Complete 2025-09-25T16:19:34.098574Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7086: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-09-25T16:19:34.099814Z node 1 :HIVE INFO: tablet_helpers.cpp:1126: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:130:2155] sender: [1:245:2058] recipient: [1:15:2062] 2025-09-25T16:19:34.113485Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-09-25T16:19:34.113558Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-09-25T16:19:34.113607Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:599: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-09-25T16:19:34.113613Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5528: CreateTx for txid 1:0 type: TxAlterSubDomain target path: [OwnerId: 72057594046678944, LocalPathId: 1] source path: 2025-09-25T16:19:34.113660Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-09-25T16:19:34.113672Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2025-09-25T16:19:34.114320Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-09-25T16:19:34.114362Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-09-25T16:19:34.114402Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-09-25T16:19:34.114417Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:315: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-09-25T16:19:34.114423Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:369: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-09-25T16:19:34.114428Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 1:0 2 -> 3 2025-09-25T16:19:34.114842Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-09-25T16:19:34.114861Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-09-25T16:19:34.114867Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 1:0 3 -> 128 2025-09-25T16:19:34.115376Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-09-25T16:19:34.115393Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-09-25T16:19:34.115400Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-09-25T16:19:34.115408Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1723: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-09-25T16:19:34.116140Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1792: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-09-25T16:19:34.116605Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:663: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-09-25T16:19:34.116645Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1824: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-09-25T16:19:34.116908Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:685: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-09-25T16:19:34.116940Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:689: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 137 RawX2: 4294969455 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-09-25T16:19:34.116951Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:303: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-09-25T16:19:34.117019Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 1:0 128 -> 240 2025-09-25T16:19:34.117028Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:362: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-09-25T16:19:34.117056Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:599: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-09-25T16:19:34.117067Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:412: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-09-25T16:19:34.117574Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-09-25T16:19:34.117585Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme ... :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 281474976710663, ready parts: 1/1, is published: true 2025-09-25T16:19:35.450627Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 281474976710663 ready parts: 1/1 2025-09-25T16:19:35.450634Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:993: Operation and all the parts is done, operation id: 281474976710663:0 2025-09-25T16:19:35.450639Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5552: RemoveTx for txid 281474976710663:0 2025-09-25T16:19:35.450672Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:610: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 19] was 2 2025-09-25T16:19:35.450679Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:610: DecrementPathDbRefCount reason remove txstate source path for pathId [OwnerId: 72057594046678944, LocalPathId: 26] was 3 2025-09-25T16:19:35.450687Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_incremental_restore_scan.cpp:608: [IncrementalRestore] Operation 281474976710663:0 completed, triggering progress check for incremental restore 125 2025-09-25T16:19:35.450766Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_incremental_restore_scan.cpp:408: [IncrementalRestore] Handle(TEvProgressIncrementalRestore) operationId: 125 tablet: 72057594046678944 2025-09-25T16:19:35.450775Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_incremental_restore_scan.cpp:41: [IncrementalRestore] TTxProgressIncrementalRestore::Execute operationId: 125 tablet: 72057594046678944 2025-09-25T16:19:35.450787Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_incremental_restore_scan.cpp:171: [IncrementalRestore] Operation 281474976710663:0 completed for incremental restore 125 2025-09-25T16:19:35.450799Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_incremental_restore_scan.cpp:66: [IncrementalRestore] Persisted CompletedOperations update: @ 2025-09-25T16:19:35.450805Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_incremental_restore_scan.cpp:73: [IncrementalRestore] Checking completion: InProgressOperations.size()=0, CompletedOperations.size()=1, CurrentIncrementalIdx=2, IncrementalBackups.size()=3 2025-09-25T16:19:35.450809Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_incremental_restore_scan.cpp:76: [IncrementalRestore] All operations for current incremental backup completed, moving to next 2025-09-25T16:19:35.450816Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_incremental_restore_scan.cpp:86: [IncrementalRestore] After MoveToNextIncremental: CurrentIncrementalIdx=3, IncrementalBackups.size()=3 2025-09-25T16:19:35.450820Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_incremental_restore_scan.cpp:89: [IncrementalRestore] All incremental backups processed, performing finalization 2025-09-25T16:19:35.450825Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_incremental_restore_scan.cpp:210: [IncrementalRestore] Starting finalization of incremental restore operation: 125 2025-09-25T16:19:35.450846Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_incremental_restore_scan.cpp:234: [IncrementalRestore] Sending finalization operation with txId: 281474976710665 2025-09-25T16:19:35.451601Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { OperationType: ESchemeOpIncrementalRestoreFinalize Internal: true IncrementalRestoreFinalize { OriginalOperationId: 125 BackupCollectionPathId: 11 } } TxId: 281474976710665 , at schemeshard: 72057594046678944 2025-09-25T16:19:35.451641Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_incremental_restore_finalize.cpp:209: [72057594046678944] TIncrementalRestoreFinalizeOp Propose, opId: 281474976710665:0 2025-09-25T16:19:35.451653Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:599: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 11] was 4 2025-09-25T16:19:35.451661Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5528: CreateTx for txid 281474976710665:0 type: TxIncrementalRestoreFinalize target path: [OwnerId: 72057594046678944, LocalPathId: 11] source path: 2025-09-25T16:19:35.451671Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 281474976710665:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-09-25T16:19:35.452768Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_incremental_restore_scan.cpp:115: [IncrementalRestore] TTxProgressIncrementalRestore::Complete operationId: 125 2025-09-25T16:19:35.452854Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 281474976710665, response: Status: StatusAccepted TxId: 281474976710665 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2025-09-25T16:19:35.452892Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 281474976710665, subject: , status: StatusAccepted, operation: RESTORE INCREMENTAL FINALIZE, no path 2025-09-25T16:19:35.452927Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7120: Handle: TEvModifySchemeTransactionResult: txId# 281474976710665, status# StatusAccepted 2025-09-25T16:19:35.452936Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:7122: Message: Status: StatusAccepted TxId: 281474976710665 SchemeshardId: 72057594046678944 2025-09-25T16:19:35.452942Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7141: no able to determine destination for message TEvModifySchemeTransactionResult: txId: 281474976710665, at schemeshard: 72057594046678944 2025-09-25T16:19:35.452955Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 281474976710665:0, at schemeshard: 72057594046678944 2025-09-25T16:19:35.452963Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_incremental_restore_finalize.cpp:56: [72057594046678944] TIncrementalRestoreFinalize TPropose operationId: 281474976710665:0 ProgressState 2025-09-25T16:19:35.452970Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_incremental_restore_finalize.cpp:144: [72057594046678944] Marked incremental restore state as completed for operation: 125 2025-09-25T16:19:35.452974Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_incremental_restore_finalize.cpp:147: [72057594046678944] Keeping IncrementalRestoreOperations entry for operation: 125 - will be cleaned up on FORGET 2025-09-25T16:19:35.452979Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_incremental_restore_finalize.cpp:150: [72057594046678944] Cleaned up long incremental restore ops for operation: 125 2025-09-25T16:19:35.452985Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_incremental_restore_finalize.cpp:176: [72057594046678944] Cleaned up mappings for operation: 125 2025-09-25T16:19:35.452989Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_incremental_restore_finalize.cpp:82: [72057594046678944] Cleaning up 1 shard progress entries for operation 125 2025-09-25T16:19:35.453014Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:926: Part operation is done id#281474976710665:0 progress is 1/1 2025-09-25T16:19:35.453020Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 281474976710665 ready parts: 1/1 2025-09-25T16:19:35.453025Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:926: Part operation is done id#281474976710665:0 progress is 1/1 2025-09-25T16:19:35.453028Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 281474976710665 ready parts: 1/1 2025-09-25T16:19:35.453034Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 281474976710665, ready parts: 1/1, is published: true 2025-09-25T16:19:35.453038Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 281474976710665 ready parts: 1/1 2025-09-25T16:19:35.453043Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:993: Operation and all the parts is done, operation id: 281474976710665:0 2025-09-25T16:19:35.453047Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5552: RemoveTx for txid 281474976710665:0 2025-09-25T16:19:35.453059Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:610: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 11] was 5 2025-09-25T16:19:35.453475Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:270: Unable to activate 281474976710665:0 2025-09-25T16:19:35.618799Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/Table3" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-09-25T16:19:35.618892Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/Table3" took 112us result status StatusSuccess 2025-09-25T16:19:35.619022Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/Table3" PathDescription { Self { Name: "Table3" PathId: 27 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 126 CreateStep: 5000030 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "Table3" Columns { Name: "key" Type: "Uint64" TypeId: 4 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "value" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" KeyColumnIds: 1 TableSchemaVersion: 1 IsBackup: false IsRestore: false } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 26 PathsLimit: 10000 ShardsInside: 12 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 27 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 >> TableCreation::CreateOldTable [GOOD] >> KqpProxy::NoUserAccessToScriptExecutionsTable [GOOD] |82.1%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_index/unittest >> TAsyncIndexTests::MergeMainWithReboots[PipeResets] >> ScriptExecutionsTest::AttemptToUpdateDeletedLease [GOOD] >> TAsyncIndexTests::MergeBothWithReboots[TabletReboots] >> TAsyncIndexTests::CreateTable >> TOlapNaming::CreateColumnTableExtraSymbolsOk [GOOD] >> TOlapNaming::CreateColumnStoreOk ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/proxy_service/ut/unittest >> TableCreation::ConcurrentTableCreationWithDifferentVersions [GOOD] Test command err: 2025-09-25T16:19:32.011601Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7554062235204032119:2076];send_to=[0:7307199536658146131:7762515]; 2025-09-25T16:19:32.011817Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/endf/005078/r3tmp/tmpDX7WL9/pdisk_1.dat 2025-09-25T16:19:32.051574Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded TClient is connected to server localhost:63634 TServer::EnableGrpc on GrpcPort 63494, node 1 2025-09-25T16:19:32.072985Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-09-25T16:19:32.072997Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-09-25T16:19:32.072998Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-09-25T16:19:32.073034Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: 2025-09-25T16:19:32.114792Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-09-25T16:19:32.114824Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-09-25T16:19:32.115914Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-09-25T16:19:32.119287Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-09-25T16:19:32.418759Z node 1 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:1519: Updated YQL logs priority to current level: 4 2025-09-25T16:19:32.420531Z node 1 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:436: Subscribed for config changes. 2025-09-25T16:19:32.420538Z node 1 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:443: Updated table service config. 2025-09-25T16:19:32.420546Z node 1 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:1519: Updated YQL logs priority to current level: 4 2025-09-25T16:19:32.421295Z node 1 :KQP_PROXY DEBUG: table_creator.cpp:148: Table result_sets updater. Describe result: PathErrorUnknown 2025-09-25T16:19:32.421308Z node 1 :KQP_PROXY NOTICE: table_creator.cpp:168: Table result_sets updater. Creating table 2025-09-25T16:19:32.421315Z node 1 :KQP_PROXY DEBUG: table_creator.cpp:101: Table result_sets updater. Full table path:/dc-1/.metadata/result_sets 2025-09-25T16:19:32.421333Z node 1 :KQP_PROXY DEBUG: table_creator.cpp:148: Table script_execution_leases updater. Describe result: PathErrorUnknown 2025-09-25T16:19:32.421336Z node 1 :KQP_PROXY NOTICE: table_creator.cpp:168: Table script_execution_leases updater. Creating table 2025-09-25T16:19:32.421345Z node 1 :KQP_PROXY DEBUG: table_creator.cpp:101: Table script_execution_leases updater. Full table path:/dc-1/.metadata/script_execution_leases 2025-09-25T16:19:32.421353Z node 1 :KQP_PROXY DEBUG: table_creator.cpp:148: Table script_executions updater. Describe result: PathErrorUnknown 2025-09-25T16:19:32.421354Z node 1 :KQP_PROXY NOTICE: table_creator.cpp:168: Table script_executions updater. Creating table 2025-09-25T16:19:32.421358Z node 1 :KQP_PROXY DEBUG: table_creator.cpp:101: Table script_executions updater. Full table path:/dc-1/.metadata/script_executions 2025-09-25T16:19:32.422277Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:1, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:19:32.422855Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:19:32.423144Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:19:32.424625Z node 1 :KQP_PROXY DEBUG: table_creator.cpp:191: Table script_execution_leases updater. TEvProposeTransactionStatus: { Status: 53 TxId: 281474976710659 SchemeShardStatus: 1 SchemeShardTabletId: 72057594046644480 PathId: 3 } 2025-09-25T16:19:32.424635Z node 1 :KQP_PROXY DEBUG: table_creator.cpp:262: Table script_execution_leases updater. Subscribe on create table tx: 281474976710659 2025-09-25T16:19:32.424657Z node 1 :KQP_PROXY DEBUG: table_creator.cpp:191: Table script_executions updater. TEvProposeTransactionStatus: { Status: 53 TxId: 281474976710660 SchemeShardStatus: 1 SchemeShardTabletId: 72057594046644480 PathId: 4 } 2025-09-25T16:19:32.424659Z node 1 :KQP_PROXY DEBUG: table_creator.cpp:262: Table script_executions updater. Subscribe on create table tx: 281474976710660 2025-09-25T16:19:32.424668Z node 1 :KQP_PROXY DEBUG: table_creator.cpp:191: Table result_sets updater. TEvProposeTransactionStatus: { Status: 53 TxId: 281474976710658 SchemeShardStatus: 1 SchemeShardTabletId: 72057594046644480 PathId: 5 } 2025-09-25T16:19:32.424671Z node 1 :KQP_PROXY DEBUG: table_creator.cpp:262: Table result_sets updater. Subscribe on create table tx: 281474976710658 2025-09-25T16:19:32.444675Z node 1 :KQP_PROXY DEBUG: table_creator.cpp:291: Table script_execution_leases updater. Request: create. Transaction completed: 281474976710659. Doublechecking... 2025-09-25T16:19:32.453003Z node 1 :KQP_PROXY DEBUG: table_creator.cpp:291: Table script_executions updater. Request: create. Transaction completed: 281474976710660. Doublechecking... 2025-09-25T16:19:32.455749Z node 1 :KQP_PROXY DEBUG: table_creator.cpp:291: Table result_sets updater. Request: create. Transaction completed: 281474976710658. Doublechecking... 2025-09-25T16:19:32.512632Z node 1 :KQP_PROXY DEBUG: table_creator.cpp:363: Table script_executions updater. Column diff is empty, finishing 2025-09-25T16:19:32.517656Z node 1 :KQP_PROXY DEBUG: table_creator.cpp:363: Table result_sets updater. Column diff is empty, finishing 2025-09-25T16:19:32.537586Z node 1 :KQP_PROXY DEBUG: table_creator.cpp:363: Table script_execution_leases updater. Column diff is empty, finishing 2025-09-25T16:19:32.537788Z node 1 :KQP_PROXY DEBUG: kqp_script_executions.cpp:543: [ScriptExecutions] [TCreateScriptExecutionActor] OwnerId: [1:7554062235204032672:2286] ActorId: [1:7554062235204032946:2453] Database: /dc-1 ExecutionId: 8a11a77a-84137b70-f46c882f-819093ed. Bootstrap. Start TCreateScriptOperationQuery [1:7554062235204032948:2455], RunScriptActorId: [1:7554062235204032947:2454] 2025-09-25T16:19:32.537807Z node 1 :KQP_PROXY DEBUG: query_actor.cpp:134: [TQueryBase] [TCreateScriptOperationQuery] OwnerId: [1:7554062235204032946:2453], ActorId: [1:7554062235204032948:2455], TraceId: ExecutionId: 8a11a77a-84137b70-f46c882f-819093ed, RequestDatabase: /dc-1, Bootstrap. Database: /dc-1, IsSystemUser: 0, run create session 2025-09-25T16:19:32.540532Z node 1 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:1323: Request has 18444985256537.011094s seconds to be completed 2025-09-25T16:19:32.541308Z node 1 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:1398: Created new session, sessionId: ydb://session/3?node_id=1&id=MmMzZjA0MGQtMjMzNmU4ODUtNWUxZjRlYjUtNDYzMTczNzc=, workerId: [1:7554062235204032950:2318], database: /dc-1, longSession: 1, local sessions count: 1 2025-09-25T16:19:32.541346Z node 1 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:577: Received create session request, trace_id: 2025-09-25T16:19:32.541894Z node 1 :KQP_PROXY DEBUG: query_actor.cpp:200: [TQueryBase] [TCreateScriptOperationQuery] OwnerId: [1:7554062235204032946:2453], ActorId: [1:7554062235204032948:2455], TraceId: ExecutionId: 8a11a77a-84137b70-f46c882f-819093ed, RequestDatabase: /dc-1, RunDataQuery with SessionId: ydb://session/3?node_id=1&id=MmMzZjA0MGQtMjMzNmU4ODUtNWUxZjRlYjUtNDYzMTczNzc=, TxId: , text: -- TCreateScriptOperationQuery::OnRunQuery DECLARE $database AS Text; DECLARE $execution_id AS Text; DECLARE $run_script_actor_id AS Text; DECLARE $execution_status AS Int32; DECLARE $execution_mode AS Int32; DECLARE $query_text AS Text; DECLARE $syntax AS Int32; DECLARE $meta AS JsonDocument; DECLARE $lease_duration AS Interval; DECLARE $lease_state AS Int32; DECLARE $execution_meta_ttl AS Interval; DECLARE $retry_state AS JsonDocument; DECLARE $user_sid AS Text; DECLARE $user_group_sids AS JsonDocument; DECLARE $parameters AS String; DECLARE $graph_compressed AS Optional; DECLARE $graph_compression_method AS Optional; UPSERT INTO `.metadata/script_executions` ( database, execution_id, run_script_actor_id, execution_status, execution_mode, start_ts, query_text, syntax, meta, expire_at, retry_state, user_token, user_group_sids, parameters, graph_compressed, graph_compression_method, lease_generation ) VALUES ( $database, $execution_id, $run_script_actor_id, $execution_status, $execution_mode, CurrentUtcTimestamp(), $query_text, $syntax, $meta, CurrentUtcTimestamp() + $execution_meta_ttl, $retry_state, $user_sid, $user_group_sids, $parameters, $graph_compressed, $graph_compression_method, 1 ); UPSERT INTO `.metadata/script_execution_leases` ( database, execution_id, lease_deadline, lease_generation, expire_at, lease_state ) VALUES ( $database, $execution_id ... 46644480, LocalPathId: 10], type: EPathTypeTable, state: EPathStateCreate), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:500" severity: 1 } 2025-09-25T16:19:35.636976Z node 3 :TX_PROXY ERROR: schemereq.cpp:590: Actor# [3:7554062247865155722:2650] txid# 281474976710680, issues: { message: "Check failed: path: \'/dc-1/test/test_table\', error: path exists but creating right now (id: [OwnerId: 72057594046644480, LocalPathId: 10], type: EPathTypeTable, state: EPathStateCreate), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:500" severity: 1 } 2025-09-25T16:19:35.637005Z node 3 :TX_PROXY ERROR: schemereq.cpp:590: Actor# [3:7554062247865155725:2653] txid# 281474976710682, issues: { message: "Check failed: path: \'/dc-1/test/test_table\', error: path exists but creating right now (id: [OwnerId: 72057594046644480, LocalPathId: 10], type: EPathTypeTable, state: EPathStateCreate), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:500" severity: 1 } 2025-09-25T16:19:35.637107Z node 3 :KQP_PROXY DEBUG: table_creator.cpp:191: Table test_table updater. TEvProposeTransactionStatus: { Status: 52 TxId: 281474976710683 Issues { message: "Check failed: path: \'/dc-1/test/test_table\', error: path exists but creating right now (id: [OwnerId: 72057594046644480, LocalPathId: 10], type: EPathTypeTable, state: EPathStateCreate), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:500" severity: 1 } SchemeShardStatus: 8 SchemeShardReason: "Check failed: path: \'/dc-1/test/test_table\', error: path exists but creating right now (id: [OwnerId: 72057594046644480, LocalPathId: 10], type: EPathTypeTable, state: EPathStateCreate), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:500" SchemeShardTabletId: 72057594046644480 PathId: 10 PathCreateTxId: 281474976710679 } 2025-09-25T16:19:35.637119Z node 3 :KQP_PROXY DEBUG: table_creator.cpp:262: Table test_table updater. Subscribe on create table tx: 281474976710679 2025-09-25T16:19:35.637125Z node 3 :KQP_PROXY DEBUG: table_creator.cpp:191: Table test_table updater. TEvProposeTransactionStatus: { Status: 52 TxId: 281474976710677 Issues { message: "Check failed: path: \'/dc-1/test/test_table\', error: path exists but creating right now (id: [OwnerId: 72057594046644480, LocalPathId: 10], type: EPathTypeTable, state: EPathStateCreate), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:500" severity: 1 } SchemeShardStatus: 8 SchemeShardReason: "Check failed: path: \'/dc-1/test/test_table\', error: path exists but creating right now (id: [OwnerId: 72057594046644480, LocalPathId: 10], type: EPathTypeTable, state: EPathStateCreate), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:500" SchemeShardTabletId: 72057594046644480 PathId: 10 PathCreateTxId: 281474976710679 } 2025-09-25T16:19:35.637128Z node 3 :KQP_PROXY DEBUG: table_creator.cpp:262: Table test_table updater. Subscribe on create table tx: 281474976710679 2025-09-25T16:19:35.637145Z node 3 :KQP_PROXY DEBUG: table_creator.cpp:191: Table test_table updater. TEvProposeTransactionStatus: { Status: 52 TxId: 281474976710681 Issues { message: "Check failed: path: \'/dc-1/test/test_table\', error: path exists but creating right now (id: [OwnerId: 72057594046644480, LocalPathId: 10], type: EPathTypeTable, state: EPathStateCreate), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:500" severity: 1 } SchemeShardStatus: 8 SchemeShardReason: "Check failed: path: \'/dc-1/test/test_table\', error: path exists but creating right now (id: [OwnerId: 72057594046644480, LocalPathId: 10], type: EPathTypeTable, state: EPathStateCreate), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:500" SchemeShardTabletId: 72057594046644480 PathId: 10 PathCreateTxId: 281474976710679 } 2025-09-25T16:19:35.637152Z node 3 :KQP_PROXY DEBUG: table_creator.cpp:262: Table test_table updater. Subscribe on create table tx: 281474976710679 2025-09-25T16:19:35.637243Z node 3 :KQP_PROXY DEBUG: table_creator.cpp:191: Table test_table updater. TEvProposeTransactionStatus: { Status: 52 TxId: 281474976710680 Issues { message: "Check failed: path: \'/dc-1/test/test_table\', error: path exists but creating right now (id: [OwnerId: 72057594046644480, LocalPathId: 10], type: EPathTypeTable, state: EPathStateCreate), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:500" severity: 1 } SchemeShardStatus: 8 SchemeShardReason: "Check failed: path: \'/dc-1/test/test_table\', error: path exists but creating right now (id: [OwnerId: 72057594046644480, LocalPathId: 10], type: EPathTypeTable, state: EPathStateCreate), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:500" SchemeShardTabletId: 72057594046644480 PathId: 10 PathCreateTxId: 281474976710679 } 2025-09-25T16:19:35.637253Z node 3 :KQP_PROXY DEBUG: table_creator.cpp:262: Table test_table updater. Subscribe on create table tx: 281474976710679 2025-09-25T16:19:35.637273Z node 3 :KQP_PROXY DEBUG: table_creator.cpp:191: Table test_table updater. TEvProposeTransactionStatus: { Status: 52 TxId: 281474976710684 Issues { message: "Check failed: path: \'/dc-1/test/test_table\', error: path exists but creating right now (id: [OwnerId: 72057594046644480, LocalPathId: 10], type: EPathTypeTable, state: EPathStateCreate), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:500" severity: 1 } SchemeShardStatus: 8 SchemeShardReason: "Check failed: path: \'/dc-1/test/test_table\', error: path exists but creating right now (id: [OwnerId: 72057594046644480, LocalPathId: 10], type: EPathTypeTable, state: EPathStateCreate), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:500" SchemeShardTabletId: 72057594046644480 PathId: 10 PathCreateTxId: 281474976710679 } 2025-09-25T16:19:35.637277Z node 3 :KQP_PROXY DEBUG: table_creator.cpp:262: Table test_table updater. Subscribe on create table tx: 281474976710679 2025-09-25T16:19:35.637299Z node 3 :KQP_PROXY DEBUG: table_creator.cpp:191: Table test_table updater. TEvProposeTransactionStatus: { Status: 52 TxId: 281474976710682 Issues { message: "Check failed: path: \'/dc-1/test/test_table\', error: path exists but creating right now (id: [OwnerId: 72057594046644480, LocalPathId: 10], type: EPathTypeTable, state: EPathStateCreate), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:500" severity: 1 } SchemeShardStatus: 8 SchemeShardReason: "Check failed: path: \'/dc-1/test/test_table\', error: path exists but creating right now (id: [OwnerId: 72057594046644480, LocalPathId: 10], type: EPathTypeTable, state: EPathStateCreate), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:500" SchemeShardTabletId: 72057594046644480 PathId: 10 PathCreateTxId: 281474976710679 } 2025-09-25T16:19:35.637307Z node 3 :KQP_PROXY DEBUG: table_creator.cpp:262: Table test_table updater. Subscribe on create table tx: 281474976710679 2025-09-25T16:19:35.644579Z node 3 :KQP_PROXY DEBUG: table_creator.cpp:291: Table test_table updater. Request: create. Transaction completed: 281474976710679. Doublechecking... 2025-09-25T16:19:35.644599Z node 3 :KQP_PROXY DEBUG: table_creator.cpp:291: Table test_table updater. Request: create. Transaction completed: 281474976710679. Doublechecking... 2025-09-25T16:19:35.644603Z node 3 :KQP_PROXY DEBUG: table_creator.cpp:291: Table test_table updater. Request: create. Transaction completed: 281474976710679. Doublechecking... 2025-09-25T16:19:35.644608Z node 3 :KQP_PROXY DEBUG: table_creator.cpp:291: Table test_table updater. Request: create. Transaction completed: 281474976710679. Doublechecking... 2025-09-25T16:19:35.644610Z node 3 :KQP_PROXY DEBUG: table_creator.cpp:291: Table test_table updater. Request: create. Transaction completed: 281474976710679. Doublechecking... 2025-09-25T16:19:35.644613Z node 3 :KQP_PROXY DEBUG: table_creator.cpp:291: Table test_table updater. Request: create. Transaction completed: 281474976710679. Doublechecking... 2025-09-25T16:19:35.644615Z node 3 :KQP_PROXY DEBUG: table_creator.cpp:291: Table test_table updater. Request: create. Transaction completed: 281474976710679. Doublechecking... 2025-09-25T16:19:35.644619Z node 3 :KQP_PROXY DEBUG: table_creator.cpp:291: Table test_table updater. Request: create. Transaction completed: 281474976710679. Doublechecking... 2025-09-25T16:19:35.644626Z node 3 :KQP_PROXY DEBUG: table_creator.cpp:291: Table test_table updater. Request: create. Transaction completed: 281474976710679. Doublechecking... 2025-09-25T16:19:35.644633Z node 3 :KQP_PROXY DEBUG: table_creator.cpp:291: Table test_table updater. Request: create. Transaction completed: 281474976710679. Doublechecking... 2025-09-25T16:19:35.703357Z node 3 :KQP_PROXY DEBUG: table_creator.cpp:363: Table test_table updater. Column diff is empty, finishing 2025-09-25T16:19:35.703365Z node 3 :KQP_PROXY DEBUG: table_creator.cpp:363: Table test_table updater. Column diff is empty, finishing 2025-09-25T16:19:35.706324Z node 3 :KQP_PROXY NOTICE: table_creator.cpp:366: Table test_table updater. Adding columns. New columns: col4, col5. Existing columns: col1, col2, col3 2025-09-25T16:19:35.706344Z node 3 :KQP_PROXY DEBUG: table_creator.cpp:101: Table test_table updater. Full table path:/dc-1/test/test_table 2025-09-25T16:19:35.706807Z node 3 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-09-25T16:19:35.706809Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterTable, opId: 281474976710685:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_table.cpp:172) 2025-09-25T16:19:35.707092Z node 3 :KQP_PROXY DEBUG: table_creator.cpp:191: Table test_table updater. TEvProposeTransactionStatus: { Status: 53 TxId: 281474976710685 SchemeShardStatus: 1 SchemeShardTabletId: 72057594046644480 } 2025-09-25T16:19:35.707107Z node 3 :KQP_PROXY DEBUG: table_creator.cpp:262: Table test_table updater. Subscribe on create table tx: 281474976710685 2025-09-25T16:19:35.709880Z node 3 :KQP_PROXY DEBUG: table_creator.cpp:363: Table test_table updater. Column diff is empty, finishing 2025-09-25T16:19:35.709933Z node 3 :KQP_PROXY DEBUG: table_creator.cpp:291: Table test_table updater. Request: alter. Transaction completed: 281474976710685. Doublechecking... 2025-09-25T16:19:35.725314Z node 3 :KQP_PROXY DEBUG: table_creator.cpp:363: Table test_table updater. Column diff is empty, finishing 2025-09-25T16:19:35.726828Z node 3 :KQP_PROXY DEBUG: table_creator.cpp:363: Table test_table updater. Column diff is empty, finishing 2025-09-25T16:19:35.731492Z node 3 :KQP_PROXY DEBUG: table_creator.cpp:363: Table test_table updater. Column diff is empty, finishing 2025-09-25T16:19:35.733997Z node 3 :KQP_PROXY DEBUG: table_creator.cpp:363: Table test_table updater. Column diff is empty, finishing 2025-09-25T16:19:35.735502Z node 3 :KQP_PROXY DEBUG: table_creator.cpp:363: Table test_table updater. Column diff is empty, finishing 2025-09-25T16:19:35.743755Z node 3 :KQP_PROXY DEBUG: table_creator.cpp:363: Table test_table updater. Column diff is empty, finishing 2025-09-25T16:19:35.866131Z node 3 :KQP_PROXY DEBUG: table_creator.cpp:363: Table test_table updater. Column diff is empty, finishing 2025-09-25T16:19:35.868252Z node 3 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:1212: Session closed, sessionId: ydb://session/3?node_id=3&id=OTI4MDY4MTItYzY1ZWM2YWYtNzJmODVhY2ItYWYyYzE3NGQ=, workerId: [3:7554062247865155679:2442], local sessions count: 0 |82.1%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_index/unittest ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/proxy_service/ut/unittest >> TableCreation::CreateOldTable [GOOD] Test command err: 2025-09-25T16:19:32.166583Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7554062233519530333:2063];send_to=[0:7307199536658146131:7762515]; 2025-09-25T16:19:32.166755Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/endf/00507c/r3tmp/tmpygYY6f/pdisk_1.dat 2025-09-25T16:19:32.200289Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded 2025-09-25T16:19:32.200934Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1229: Notification cookie mismatch for subscription [1:7554062233519530310:2081] 1758817172166096 != 1758817172166099 TClient is connected to server localhost:12299 TServer::EnableGrpc on GrpcPort 6859, node 1 2025-09-25T16:19:32.225085Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-09-25T16:19:32.225104Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-09-25T16:19:32.225107Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-09-25T16:19:32.225161Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: 2025-09-25T16:19:32.270479Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-09-25T16:19:32.270514Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-09-25T16:19:32.271646Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-09-25T16:19:32.274130Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-09-25T16:19:32.276699Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-09-25T16:19:32.529818Z node 1 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:1519: Updated YQL logs priority to current level: 4 2025-09-25T16:19:32.549764Z node 1 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:436: Subscribed for config changes. 2025-09-25T16:19:32.549779Z node 1 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:443: Updated table service config. 2025-09-25T16:19:32.549786Z node 1 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:1519: Updated YQL logs priority to current level: 4 2025-09-25T16:19:32.550561Z node 1 :KQP_PROXY DEBUG: table_creator.cpp:148: Table script_executions updater. Describe result: PathErrorUnknown 2025-09-25T16:19:32.550564Z node 1 :KQP_PROXY NOTICE: table_creator.cpp:168: Table script_executions updater. Creating table 2025-09-25T16:19:32.550572Z node 1 :KQP_PROXY DEBUG: table_creator.cpp:101: Table script_executions updater. Full table path:/dc-1/.metadata/script_executions 2025-09-25T16:19:32.550592Z node 1 :KQP_PROXY DEBUG: table_creator.cpp:148: Table script_execution_leases updater. Describe result: PathErrorUnknown 2025-09-25T16:19:32.550593Z node 1 :KQP_PROXY NOTICE: table_creator.cpp:168: Table script_execution_leases updater. Creating table 2025-09-25T16:19:32.550595Z node 1 :KQP_PROXY DEBUG: table_creator.cpp:101: Table script_execution_leases updater. Full table path:/dc-1/.metadata/script_execution_leases 2025-09-25T16:19:32.550602Z node 1 :KQP_PROXY DEBUG: table_creator.cpp:148: Table result_sets updater. Describe result: PathErrorUnknown 2025-09-25T16:19:32.550602Z node 1 :KQP_PROXY NOTICE: table_creator.cpp:168: Table result_sets updater. Creating table 2025-09-25T16:19:32.550606Z node 1 :KQP_PROXY DEBUG: table_creator.cpp:101: Table result_sets updater. Full table path:/dc-1/.metadata/result_sets 2025-09-25T16:19:32.551722Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:1, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:19:32.552401Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:19:32.553489Z node 1 :KQP_PROXY DEBUG: table_creator.cpp:191: Table script_executions updater. TEvProposeTransactionStatus: { Status: 53 TxId: 281474976710658 SchemeShardStatus: 1 SchemeShardTabletId: 72057594046644480 PathId: 3 } 2025-09-25T16:19:32.553500Z node 1 :KQP_PROXY DEBUG: table_creator.cpp:262: Table script_executions updater. Subscribe on create table tx: 281474976710658 2025-09-25T16:19:32.554413Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:19:32.554552Z node 1 :KQP_PROXY DEBUG: table_creator.cpp:191: Table result_sets updater. TEvProposeTransactionStatus: { Status: 53 TxId: 281474976710660 SchemeShardStatus: 1 SchemeShardTabletId: 72057594046644480 PathId: 4 } 2025-09-25T16:19:32.554554Z node 1 :KQP_PROXY DEBUG: table_creator.cpp:262: Table result_sets updater. Subscribe on create table tx: 281474976710660 2025-09-25T16:19:32.554714Z node 1 :KQP_PROXY DEBUG: table_creator.cpp:191: Table script_execution_leases updater. TEvProposeTransactionStatus: { Status: 53 TxId: 281474976710659 SchemeShardStatus: 1 SchemeShardTabletId: 72057594046644480 PathId: 5 } 2025-09-25T16:19:32.554716Z node 1 :KQP_PROXY DEBUG: table_creator.cpp:262: Table script_execution_leases updater. Subscribe on create table tx: 281474976710659 2025-09-25T16:19:32.581534Z node 1 :KQP_PROXY DEBUG: table_creator.cpp:291: Table script_executions updater. Request: create. Transaction completed: 281474976710658. Doublechecking... 2025-09-25T16:19:32.588741Z node 1 :KQP_PROXY DEBUG: table_creator.cpp:291: Table result_sets updater. Request: create. Transaction completed: 281474976710660. Doublechecking... 2025-09-25T16:19:32.596039Z node 1 :KQP_PROXY DEBUG: table_creator.cpp:291: Table script_execution_leases updater. Request: create. Transaction completed: 281474976710659. Doublechecking... 2025-09-25T16:19:32.651863Z node 1 :KQP_PROXY DEBUG: table_creator.cpp:363: Table script_executions updater. Column diff is empty, finishing 2025-09-25T16:19:32.671814Z node 1 :KQP_PROXY DEBUG: table_creator.cpp:363: Table result_sets updater. Column diff is empty, finishing 2025-09-25T16:19:32.692325Z node 1 :KQP_PROXY DEBUG: table_creator.cpp:363: Table script_execution_leases updater. Column diff is empty, finishing 2025-09-25T16:19:32.692499Z node 1 :KQP_PROXY DEBUG: kqp_script_executions.cpp:543: [ScriptExecutions] [TCreateScriptExecutionActor] OwnerId: [1:7554062233519530906:2286] ActorId: [1:7554062233519531180:2453] Database: /dc-1 ExecutionId: ebc1ab85-7c747e3d-4527e9d1-66804899. Bootstrap. Start TCreateScriptOperationQuery [1:7554062233519531182:2455], RunScriptActorId: [1:7554062233519531181:2454] 2025-09-25T16:19:32.692509Z node 1 :KQP_PROXY DEBUG: query_actor.cpp:134: [TQueryBase] [TCreateScriptOperationQuery] OwnerId: [1:7554062233519531180:2453], ActorId: [1:7554062233519531182:2455], TraceId: ExecutionId: ebc1ab85-7c747e3d-4527e9d1-66804899, RequestDatabase: /dc-1, Bootstrap. Database: /dc-1, IsSystemUser: 0, run create session 2025-09-25T16:19:32.694651Z node 1 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:1323: Request has 18444985256536.856976s seconds to be completed 2025-09-25T16:19:32.695500Z node 1 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:1398: Created new session, sessionId: ydb://session/3?node_id=1&id=NGE4YzQ0OTAtNTc5NTZhZDMtNzhjOWQ1ZTAtZTJlYjIzOTE=, workerId: [1:7554062233519531184:2318], database: /dc-1, longSession: 1, local sessions count: 1 2025-09-25T16:19:32.695544Z node 1 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:577: Received create session request, trace_id: 2025-09-25T16:19:32.696039Z node 1 :KQP_PROXY DEBUG: query_actor.cpp:200: [TQueryBase] [TCreateScriptOperationQuery] OwnerId: [1:7554062233519531180:2453], ActorId: [1:7554062233519531182:2455], TraceId: ExecutionId: ebc1ab85-7c747e3d-4527e9d1-66804899, RequestDatabase: /dc-1, RunDataQuery with SessionId: ydb://session/3?node_id=1&id=NGE4YzQ0OTAtNTc5NTZhZDMtNzhjOWQ1ZTAtZTJlYjIzOTE=, TxId: , text: -- TCreateScriptOperationQuery::OnRunQuery DECLARE $database AS Text; DECLARE $execution_id AS Text; DECLARE $run_script_actor_id AS Text; DECLARE $execution_status AS Int32; DECLARE $execution_mode AS Int32; DECLARE $query_text AS Text; DECLARE $syntax AS Int32; DECLARE $meta AS JsonDocument; DECLARE $lease_duration AS Interval; DECLARE $lease_state AS Int32; DECLARE $execution_meta_ttl AS Interval; DECLARE $retry_state AS JsonDocument; DECLARE $user_sid AS Text; DECLARE $user_group_sids AS JsonDocument; DECLARE $parameters AS String; DECLARE $graph_compressed AS Optional; DECLARE $graph_compression_method AS Optional; UPSERT INTO `.metadata/script_executions` ( database, execution_id, run_script_actor_id, execution_status, execution_mode, start_ts, query_text, syntax, meta, expire_at, retry_state, user_token, user_group_sids, parameters, graph_compressed, graph_compression_method, lease_generation ) VALUES ( $database, $execution_id, $run_script_actor_id, $execution_status, $execution_mode, CurrentUtcTimestamp(), $query_text, $syntax, $meta, CurrentUtcTimestamp() + $execution_meta_ttl ... to sender actor, requestId: 20, sender: [3:7554062246329061171:2409], selfId: [3:7554062246329060232:2263], source: [3:7554062246329061111:2388] 2025-09-25T16:19:35.848927Z node 3 :KQP_PROXY DEBUG: query_actor.cpp:243: [TQueryBase] [TSaveScriptFinalStatusActor] OwnerId: [3:7554062246329061108:2385], ActorId: [3:7554062246329061109:2386], TraceId: ExecutionId: 3dc9e421-fda51b79-91f89b04-43554c14, RequestDatabase: /dc-1, LeaseGeneration: 1, State: Update final status, DataQuery #2 finished SUCCESS, Issues: [ {
:20:21: Warning: Symbol $retry_deadline is not used, code: 4527 } {
:21:21: Warning: Symbol $lease_state is not used, code: 4527 } ], SessionId: ydb://session/3?node_id=3&id=ZmYxZjlkNzEtNGJmNjM1ZTEtOGIxZDE3ZTYtNDJiNzFhZDM=, TxId: 2025-09-25T16:19:35.848968Z node 3 :KQP_PROXY DEBUG: query_actor.cpp:370: [TQueryBase] [TSaveScriptFinalStatusActor] OwnerId: [3:7554062246329061108:2385], ActorId: [3:7554062246329061109:2386], TraceId: ExecutionId: 3dc9e421-fda51b79-91f89b04-43554c14, RequestDatabase: /dc-1, LeaseGeneration: 1, State: Update final status, Finish with SUCCESS, SessionId: ydb://session/3?node_id=3&id=ZmYxZjlkNzEtNGJmNjM1ZTEtOGIxZDE3ZTYtNDJiNzFhZDM=, TxId: 2025-09-25T16:19:35.848976Z node 3 :KQP_PROXY DEBUG: kqp_script_executions.cpp:4060: [ScriptExecutions] [TQueryBase] [TSaveScriptFinalStatusActor] OwnerId: [3:7554062246329061108:2385], ActorId: [3:7554062246329061109:2386], TraceId: ExecutionId: 3dc9e421-fda51b79-91f89b04-43554c14, RequestDatabase: /dc-1, LeaseGeneration: 1, State: Update final status, Finish script execution operation. Status: SUCCESS. Issues: 2025-09-25T16:19:35.849011Z node 3 :KQP_PROXY DEBUG: query_actor.h:310: [TQueryRetryActor] [TSaveScriptFinalStatusActor] OwnerId: [3:7554062246329061107:2384], ActorId: [3:7554062246329061108:2385], TraceId: ExecutionId: 3dc9e421-fda51b79-91f89b04-43554c14, RequestDatabase: /dc-1, LeaseGeneration: 1, Got response [3:7554062246329061109:2386] SUCCESS 2025-09-25T16:19:35.849071Z node 3 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:1212: Session closed, sessionId: ydb://session/3?node_id=3&id=ZmYxZjlkNzEtNGJmNjM1ZTEtOGIxZDE3ZTYtNDJiNzFhZDM=, workerId: [3:7554062246329061111:2388], local sessions count: 1 2025-09-25T16:19:35.849211Z node 3 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:1212: Session closed, sessionId: ydb://session/3?node_id=3&id=ZjRkODBlMTMtOTBlMzhmYTktMjc5Yzk4ZTEtNWEwYmE0YjM=, workerId: [3:7554062246329060886:2328], local sessions count: 0 2025-09-25T16:19:35.941483Z node 3 :KQP_PROXY DEBUG: kqp_script_executions.cpp:2335: [ScriptExecutions] [TGetScriptExecutionOperationActor] OwnerId: [3:7554062246329061234:2601] ActorId: [3:7554062246329061235:2602] Database: /dc-1 ExecutionId: 3dc9e421-fda51b79-91f89b04-43554c14. Bootstrap. Start TGetScriptExecutionOperationQueryActor [3:7554062246329061236:2603] 2025-09-25T16:19:35.941501Z node 3 :KQP_PROXY DEBUG: query_actor.h:291: [TQueryRetryActor] [TGetScriptExecutionOperationQueryActor] OwnerId: [3:7554062246329061235:2602], ActorId: [3:7554062246329061236:2603], TraceId: ExecutionId: 3dc9e421-fda51b79-91f89b04-43554c14, RequestDatabase: /dc-1, Starting query actor #1 [3:7554062246329061237:2604] 2025-09-25T16:19:35.941514Z node 3 :KQP_PROXY DEBUG: query_actor.cpp:134: [TQueryBase] [TGetScriptExecutionOperationQueryActor] OwnerId: [3:7554062246329061236:2603], ActorId: [3:7554062246329061237:2604], TraceId: ExecutionId: 3dc9e421-fda51b79-91f89b04-43554c14, RequestDatabase: /dc-1, Bootstrap. Database: /dc-1, IsSystemUser: 0, run create session 2025-09-25T16:19:35.941585Z node 3 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:1323: Request has 18444985256533.610041s seconds to be completed 2025-09-25T16:19:35.942159Z node 3 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:1398: Created new session, sessionId: ydb://session/3?node_id=3&id=NzI1YjZlOWYtODJmZTAxYTMtNjIxOGU0Zi1mNjUzNzYxMA==, workerId: [3:7554062246329061239:2429], database: /dc-1, longSession: 1, local sessions count: 1 2025-09-25T16:19:35.942213Z node 3 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:577: Received create session request, trace_id: 2025-09-25T16:19:35.942346Z node 3 :KQP_PROXY DEBUG: query_actor.cpp:200: [TQueryBase] [TGetScriptExecutionOperationQueryActor] OwnerId: [3:7554062246329061236:2603], ActorId: [3:7554062246329061237:2604], TraceId: ExecutionId: 3dc9e421-fda51b79-91f89b04-43554c14, RequestDatabase: /dc-1, RunDataQuery with SessionId: ydb://session/3?node_id=3&id=NzI1YjZlOWYtODJmZTAxYTMtNjIxOGU0Zi1mNjUzNzYxMA==, TxId: , text: -- TGetScriptExecutionOperationQueryActor::OnRunQuery DECLARE $database AS Text; DECLARE $execution_id AS Text; SELECT run_script_actor_id, operation_status, execution_status, finalization_status, query_text, syntax, execution_mode, result_set_metas, plan, plan_compressed, plan_compression_method, issues, transient_issues, stats, ast, ast_compressed, ast_compression_method, graph_compressed IS NOT NULL AS has_graph FROM `.metadata/script_executions` WHERE database = $database AND execution_id = $execution_id AND (expire_at > CurrentUtcTimestamp() OR expire_at IS NULL); SELECT lease_deadline, lease_generation, lease_state FROM `.metadata/script_execution_leases` WHERE database = $database AND execution_id = $execution_id AND (expire_at > CurrentUtcTimestamp() OR expire_at IS NULL); 2025-09-25T16:19:35.942499Z node 3 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:700: Ctx: { TraceId: , Database: /dc-1, SessionId: ydb://session/3?node_id=3&id=NzI1YjZlOWYtODJmZTAxYTMtNjIxOGU0Zi1mNjUzNzYxMA==, PoolId: , DatabaseId: }. TEvQueryRequest, set timer for: 300.000000s timeout: 0.000000s cancelAfter: 0.000000s. Send request to target, requestId: 24, targetId: [3:7554062246329061239:2429] 2025-09-25T16:19:35.942512Z node 3 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:1153: Scheduled timeout timer for requestId: 24 timeout: 300.000000s actor id: [3:7554062246329061241:2605] 2025-09-25T16:19:35.944605Z node 3 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:886: Forwarded response to sender actor, requestId: 24, sender: [3:7554062246329061240:2430], selfId: [3:7554062246329060232:2263], source: [3:7554062246329061239:2429] 2025-09-25T16:19:35.944703Z node 3 :KQP_PROXY DEBUG: query_actor.cpp:243: [TQueryBase] [TGetScriptExecutionOperationQueryActor] OwnerId: [3:7554062246329061236:2603], ActorId: [3:7554062246329061237:2604], TraceId: ExecutionId: 3dc9e421-fda51b79-91f89b04-43554c14, RequestDatabase: /dc-1, DataQuery #1 finished SUCCESS, Issues: , SessionId: ydb://session/3?node_id=3&id=NzI1YjZlOWYtODJmZTAxYTMtNjIxOGU0Zi1mNjUzNzYxMA==, TxId: 2025-09-25T16:19:35.944872Z node 3 :KQP_PROXY DEBUG: query_actor.cpp:370: [TQueryBase] [TGetScriptExecutionOperationQueryActor] OwnerId: [3:7554062246329061236:2603], ActorId: [3:7554062246329061237:2604], TraceId: ExecutionId: 3dc9e421-fda51b79-91f89b04-43554c14, RequestDatabase: /dc-1, Finish with SUCCESS, SessionId: ydb://session/3?node_id=3&id=NzI1YjZlOWYtODJmZTAxYTMtNjIxOGU0Zi1mNjUzNzYxMA==, TxId: 2025-09-25T16:19:35.944884Z node 3 :KQP_PROXY DEBUG: kqp_script_executions.cpp:2284: [ScriptExecutions] [TQueryBase] [TGetScriptExecutionOperationQueryActor] OwnerId: [3:7554062246329061236:2603], ActorId: [3:7554062246329061237:2604], TraceId: ExecutionId: 3dc9e421-fda51b79-91f89b04-43554c14, RequestDatabase: /dc-1, Finish, OperationStatus: SUCCESS, FinalizationStatus: -1, LeaseStatus: -1 2025-09-25T16:19:35.944915Z node 3 :KQP_PROXY DEBUG: query_actor.h:310: [TQueryRetryActor] [TGetScriptExecutionOperationQueryActor] OwnerId: [3:7554062246329061235:2602], ActorId: [3:7554062246329061236:2603], TraceId: ExecutionId: 3dc9e421-fda51b79-91f89b04-43554c14, RequestDatabase: /dc-1, Got response [3:7554062246329061237:2604] SUCCESS 2025-09-25T16:19:35.944941Z node 3 :KQP_PROXY DEBUG: kqp_script_executions.cpp:2391: [ScriptExecutions] [TGetScriptExecutionOperationActor] OwnerId: [3:7554062246329061234:2601] ActorId: [3:7554062246329061235:2602] Database: /dc-1 ExecutionId: 3dc9e421-fda51b79-91f89b04-43554c14. Extracted script execution operation [3:7554062246329061237:2604], Status: SUCCESS, Issues: , Ready: 1, LeaseExpired: 0, RetryRequired: 0, RunScriptActorId: [3:7554062246329060839:2449], LeaseGeneration: 0 2025-09-25T16:19:35.944950Z node 3 :KQP_PROXY DEBUG: kqp_script_executions.cpp:2410: [ScriptExecutions] [TGetScriptExecutionOperationActor] OwnerId: [3:7554062246329061234:2601] ActorId: [3:7554062246329061235:2602] Database: /dc-1 ExecutionId: 3dc9e421-fda51b79-91f89b04-43554c14. Reply success 2025-09-25T16:19:35.944979Z node 3 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:1212: Session closed, sessionId: ydb://session/3?node_id=3&id=NzI1YjZlOWYtODJmZTAxYTMtNjIxOGU0Zi1mNjUzNzYxMA==, workerId: [3:7554062246329061239:2429], local sessions count: 0 2025-09-25T16:19:35.948269Z node 3 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:1323: TraceId: "01k60trjccbceapyc37fvdgekg", Request has 18444985256533.603357s seconds to be completed 2025-09-25T16:19:35.948764Z node 3 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:1398: TraceId: "01k60trjccbceapyc37fvdgekg", Created new session, sessionId: ydb://session/3?node_id=3&id=OWFhYWJhYjQtM2Q4YzVhMzktM2VmZWY1ZmQtZjJiYzYwNw==, workerId: [3:7554062246329061268:2442], database: /dc-1, longSession: 1, local sessions count: 1 2025-09-25T16:19:35.948800Z node 3 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:577: Received create session request, trace_id: 01k60trjccbceapyc37fvdgekg --------------------------- INIT FINISHED --------------------------- 2025-09-25T16:19:35.949496Z node 3 :KQP_PROXY DEBUG: table_creator.cpp:148: Table test_table updater. Describe result: PathErrorUnknown 2025-09-25T16:19:35.949503Z node 3 :KQP_PROXY NOTICE: table_creator.cpp:168: Table test_table updater. Creating table 2025-09-25T16:19:35.949510Z node 3 :KQP_PROXY DEBUG: table_creator.cpp:101: Table test_table updater. Full table path:/dc-1/test/test_table 2025-09-25T16:19:35.950125Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710675:1, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:19:35.950429Z node 3 :KQP_PROXY DEBUG: table_creator.cpp:191: Table test_table updater. TEvProposeTransactionStatus: { Status: 53 TxId: 281474976710675 SchemeShardStatus: 1 SchemeShardTabletId: 72057594046644480 PathId: 10 } 2025-09-25T16:19:35.950442Z node 3 :KQP_PROXY DEBUG: table_creator.cpp:262: Table test_table updater. Subscribe on create table tx: 281474976710675 2025-09-25T16:19:35.957655Z node 3 :KQP_PROXY DEBUG: table_creator.cpp:291: Table test_table updater. Request: create. Transaction completed: 281474976710675. Doublechecking... 2025-09-25T16:19:36.010477Z node 3 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-09-25T16:19:36.026926Z node 3 :KQP_PROXY DEBUG: table_creator.cpp:363: Table test_table updater. Column diff is empty, finishing 2025-09-25T16:19:36.027094Z node 3 :KQP_PROXY DEBUG: table_creator.cpp:363: Table test_table updater. Column diff is empty, finishing 2025-09-25T16:19:36.029126Z node 3 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:1212: Session closed, sessionId: ydb://session/3?node_id=3&id=OWFhYWJhYjQtM2Q4YzVhMzktM2VmZWY1ZmQtZjJiYzYwNw==, workerId: [3:7554062246329061268:2442], local sessions count: 0 >> TAsyncIndexTests::Decimal >> TAsyncIndexTests::CreateTable [GOOD] >> ScriptExecutionsTest::UpdatesLeaseAfterExpiring [GOOD] >> ScriptExecutionsTest::RestartQueryWithGetOperation |82.1%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_index/unittest >> TFulltextIndexTests::CreateTableMultipleColumns >> TOlapNaming::CreateColumnStoreOk [GOOD] >> TAsyncIndexTests::CdcAndSplitWithReboots[TabletReboots] >> TFulltextIndexTests::CreateTableUnsupportedSettings >> TAsyncIndexTests::MergeIndexWithReboots[PipeResets] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/proxy_service/ut/unittest >> KqpProxy::NoUserAccessToScriptExecutionsTable [GOOD] Test command err: 2025-09-25T16:19:32.006811Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7554062236144764525:2152];send_to=[0:7307199536658146131:7762515]; 2025-09-25T16:19:32.006908Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-09-25T16:19:32.011490Z node 2 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7554062235081231178:2076];send_to=[0:7307199536658146131:7762515]; 2025-09-25T16:19:32.011514Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/endf/0050bc/r3tmp/tmp6l3YLo/pdisk_1.dat 2025-09-25T16:19:32.054761Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2025-09-25T16:19:32.066879Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2025-09-25T16:19:32.073377Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded TClient is connected to server localhost:18199 2025-09-25T16:19:32.108055Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-09-25T16:19:32.108087Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-09-25T16:19:32.109543Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-09-25T16:19:32.117854Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-09-25T16:19:32.117882Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-09-25T16:19:32.120104Z node 1 :HIVE WARN: hive_impl.cpp:811: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2025-09-25T16:19:32.120414Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-09-25T16:19:32.241564Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions 2025-09-25T16:19:32.241581Z node 1 :KQP_PROXY DEBUG: kqp_finalize_script_service.cpp:146: [ScriptExecutions] [TKqpFinalizeScriptService] Script execution table dc-1/.metadata/script_executions not found 2025-09-25T16:19:32.361587Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions 2025-09-25T16:19:32.361603Z node 2 :KQP_PROXY DEBUG: kqp_finalize_script_service.cpp:146: [ScriptExecutions] [TKqpFinalizeScriptService] Script execution table dc-1/.metadata/script_executions not found 2025-09-25T16:19:32.386709Z node 2 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:1519: Updated YQL logs priority to current level: 4 2025-09-25T16:19:32.395184Z node 2 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:1398: Created new session, sessionId: ydb://session/3?node_id=2&id=YjI2NDc0ZTMtNTA3NDUzN2EtOGJiZTVkN2UtNmI4MjJkODA=, workerId: [2:7554062235081231462:2292], database: , longSession: 1, local sessions count: 1 2025-09-25T16:19:32.395247Z node 2 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:577: Received create session request, trace_id: 2025-09-25T16:19:32.395273Z node 2 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:436: Subscribed for config changes. 2025-09-25T16:19:32.395280Z node 2 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:443: Updated table service config. 2025-09-25T16:19:32.395288Z node 2 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:1519: Updated YQL logs priority to current level: 4 2025-09-25T16:19:32.396395Z node 1 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:1519: Updated YQL logs priority to current level: 4 2025-09-25T16:19:32.399703Z node 1 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:700: Ctx: { TraceId: , Database: , SessionId: ydb://session/3?node_id=2&id=YjI2NDc0ZTMtNTA3NDUzN2EtOGJiZTVkN2UtNmI4MjJkODA=, PoolId: }. TEvQueryRequest, set timer for: 600.000000s timeout: 0.000000s cancelAfter: 0.000000s. Send request to target, requestId: 2, targetId: [2:8678280833929343339:121] 2025-09-25T16:19:32.399727Z node 1 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:1153: Scheduled timeout timer for requestId: 2 timeout: 600.000000s actor id: [1:7554062236144765140:2457] 2025-09-25T16:19:32.399746Z node 1 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:436: Subscribed for config changes. 2025-09-25T16:19:32.399753Z node 1 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:443: Updated table service config. 2025-09-25T16:19:32.399760Z node 1 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:1519: Updated YQL logs priority to current level: 4 2025-09-25T16:19:32.400616Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7554062236144765150:2298], DatabaseId: /dc-1, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:19:32.400646Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /dc-1, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:19:32.400742Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7554062236144765166:2299], DatabaseId: /dc-1, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:19:32.400755Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /dc-1, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:19:32.401005Z node 2 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:700: Ctx: { TraceId: , Database: , SessionId: ydb://session/3?node_id=2&id=YjI2NDc0ZTMtNTA3NDUzN2EtOGJiZTVkN2UtNmI4MjJkODA=, PoolId: }. TEvQueryRequest, set timer for: 600.000000s timeout: 0.000000s cancelAfter: 0.000000s. Send request to target, requestId: 3, targetId: [2:7554062235081231462:2292] 2025-09-25T16:19:32.401018Z node 2 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:1153: Scheduled timeout timer for requestId: 3 timeout: 600.000000s actor id: [2:7554062235081231479:2122] 2025-09-25T16:19:32.414610Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7554062235081231480:2294], DatabaseId: /dc-1, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:19:32.414655Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /dc-1, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:19:32.414827Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7554062235081231490:2295], DatabaseId: /dc-1, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:19:32.414839Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /dc-1, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:19:32.444925Z node 2 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:1398: TraceId: "01k60trexhesj6thwxkhgm6x6d", Created new session, sessionId: ydb://session/3?node_id=2&id=ODY1NmUwNmYtNGExYWU3NjUtM2MyMDNiNmEtZmFmMDBjOTQ=, workerId: [2:7554062235081231493:2297], database: , longSession: 0, local sessions count: 2 2025-09-25T16:19:32.445005Z node 2 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:700: Ctx: { TraceId: 01k60trexhesj6thwxkhgm6x6d, Database: , SessionId: ydb://session/3?node_id=2&id=ODY1NmUwNmYtNGExYWU3NjUtM2MyMDNiNmEtZmFmMDBjOTQ=, PoolId: }. TEvQueryRequest, set timer for: 300.000000s timeout: 0.000000s cancelAfter: 0.000000s. Send request to target, requestId: 4, targetId: [2:7554062235081231493:2297] 2025-09-25T16:19:32.445012Z node 2 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:1153: Scheduled timeout timer for requestId: 4 timeout: 300.000000s actor id: [2:7554062235081231494:2126] 2025-09-25T16:19:32.448508Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7554062235081231495:2298], DatabaseId: /dc-1, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:19:32.448549Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /dc-1, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:19:32.448681Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7554062235081231500:2301], DatabaseId: /dc-1, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:19:32.448721Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7554062235081231501:2302], DatabaseId: /dc-1, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:19:32.448739Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /dc-1, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:19:32.451056Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715657:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-09-25T16:19:32.459014Z node 2 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7554062235081231504:2303], DatabaseId: /dc-1, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715657 completed, doublechecking } 2025-09-25T16:19:32.546366Z node 2 :TX_PROXY ERROR: schemereq.cpp:590: Actor# [2:7554062235081231532:2137] txid# 281474976715658, issues: { message: "Check failed: path: \'/dc-1/.metadata/workload_manager/pools/default\', error: path e ... -60b9661c-4a24421, RequestDatabase: /Root, Save result #0, FirstRow: 0, AccumulatedSize: 0, rows to save: 1, size to save: 7 2025-09-25T16:19:35.961332Z node 5 :KQP_PROXY DEBUG: query_actor.cpp:200: [TQueryBase] [TSaveScriptExecutionResultQuery] OwnerId: [5:7554062248862264610:3077], ActorId: [5:7554062248862264612:3078], TraceId: ExecutionId: 8fa17be4-e200e0de-60b9661c-4a24421, RequestDatabase: /Root, RunDataQuery with SessionId: ydb://session/3?node_id=5&id=Zjc4ZTU5ODgtMTc3MDJiODEtNTZkODQzMTMtNmExYzdmNzY=, TxId: , text: -- TSaveScriptExecutionResultQuery::OnRunQuery DECLARE $database AS Text; DECLARE $execution_id AS Text; DECLARE $result_set_id AS Int32; DECLARE $expire_at AS Optional; DECLARE $items AS List>; UPSERT INTO `.metadata/result_sets` SELECT $database AS database, $execution_id AS execution_id, $result_set_id AS result_set_id, $expire_at AS expire_at, T.row_id AS row_id, T.result_set AS result_set, T.accumulated_size AS accumulated_size FROM AS_TABLE($items) AS T; 2025-09-25T16:19:35.961464Z node 5 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:700: Ctx: { TraceId: , Database: /Root, SessionId: ydb://session/3?node_id=5&id=Zjc4ZTU5ODgtMTc3MDJiODEtNTZkODQzMTMtNmExYzdmNzY=, PoolId: , DatabaseId: }. TEvQueryRequest, set timer for: 300.000000s timeout: 0.000000s cancelAfter: 0.000000s. Send request to target, requestId: 15, targetId: [5:7554062248862264614:2395] 2025-09-25T16:19:35.961482Z node 5 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:1153: Scheduled timeout timer for requestId: 15 timeout: 300.000000s actor id: [5:7554062248862264616:3079] 2025-09-25T16:19:35.990123Z node 5 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:886: Forwarded response to sender actor, requestId: 15, sender: [5:7554062248862264615:2396], selfId: [5:7554062248862263209:2266], source: [5:7554062248862264614:2395] 2025-09-25T16:19:35.990227Z node 5 :KQP_PROXY DEBUG: query_actor.cpp:243: [TQueryBase] [TSaveScriptExecutionResultQuery] OwnerId: [5:7554062248862264610:3077], ActorId: [5:7554062248862264612:3078], TraceId: ExecutionId: 8fa17be4-e200e0de-60b9661c-4a24421, RequestDatabase: /Root, DataQuery #1 finished SUCCESS, Issues: , SessionId: ydb://session/3?node_id=5&id=Zjc4ZTU5ODgtMTc3MDJiODEtNTZkODQzMTMtNmExYzdmNzY=, TxId: 2025-09-25T16:19:35.990240Z node 5 :KQP_PROXY DEBUG: query_actor.cpp:370: [TQueryBase] [TSaveScriptExecutionResultQuery] OwnerId: [5:7554062248862264610:3077], ActorId: [5:7554062248862264612:3078], TraceId: ExecutionId: 8fa17be4-e200e0de-60b9661c-4a24421, RequestDatabase: /Root, Finish with SUCCESS, SessionId: ydb://session/3?node_id=5&id=Zjc4ZTU5ODgtMTc3MDJiODEtNTZkODQzMTMtNmExYzdmNzY=, TxId: 2025-09-25T16:19:35.990270Z node 5 :KQP_PROXY DEBUG: query_actor.h:310: [TQueryRetryActor] [TSaveScriptExecutionResultQuery] OwnerId: [5:7554062248862264609:3076], ActorId: [5:7554062248862264610:3077], TraceId: ExecutionId: 8fa17be4-e200e0de-60b9661c-4a24421, RequestDatabase: /Root, Got response [5:7554062248862264612:3078] SUCCESS 2025-09-25T16:19:35.990288Z node 5 :KQP_PROXY DEBUG: kqp_script_executions.cpp:3197: [ScriptExecutions] [TSaveScriptExecutionResultActor] OwnerId: [5:7554062248862264383:2956] ActorId: [5:7554062248862264609:3076] Database: /Root ExecutionId: 8fa17be4-e200e0de-60b9661c-4a24421 ResultSetId: 0. Result part successfully saved, AccumulatedSize: 7, saver actor: [5:7554062248862264612:3078] 2025-09-25T16:19:35.990297Z node 5 :KQP_PROXY DEBUG: kqp_script_executions.cpp:3208: [ScriptExecutions] [TSaveScriptExecutionResultActor] OwnerId: [5:7554062248862264383:2956] ActorId: [5:7554062248862264609:3076] Database: /Root ExecutionId: 8fa17be4-e200e0de-60b9661c-4a24421 ResultSetId: 0. Reply SUCCESS, issues: 2025-09-25T16:19:35.990347Z node 5 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:1212: Session closed, sessionId: ydb://session/3?node_id=5&id=Zjc4ZTU5ODgtMTc3MDJiODEtNTZkODQzMTMtNmExYzdmNzY=, workerId: [5:7554062248862264614:2395], local sessions count: 1 2025-09-25T16:19:35.990381Z node 5 :KQP_PROXY DEBUG: query_actor.h:291: [TQueryRetryActor] [TSaveScriptExecutionResultMetaQuery] OwnerId: [5:7554062248862264383:2956], ActorId: [5:7554062248862264638:3089], TraceId: ExecutionId: 8fa17be4-e200e0de-60b9661c-4a24421, RequestDatabase: /Root, LeaseGeneration: 1, Starting query actor #1 [5:7554062248862264639:3090] 2025-09-25T16:19:35.990385Z node 5 :KQP_PROXY DEBUG: query_actor.cpp:134: [TQueryBase] [TSaveScriptExecutionResultMetaQuery] OwnerId: [5:7554062248862264638:3089], ActorId: [5:7554062248862264639:3090], TraceId: ExecutionId: 8fa17be4-e200e0de-60b9661c-4a24421, RequestDatabase: /Root, LeaseGeneration: 1, Bootstrap. Database: /Root, IsSystemUser: 0, run create session 2025-09-25T16:19:35.990429Z node 5 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:1323: Request has 18444985256533.561190s seconds to be completed 2025-09-25T16:19:35.990951Z node 5 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:1398: Created new session, sessionId: ydb://session/3?node_id=5&id=MzNiZmVhMTgtYzJjZTE5ZDUtZDZjYzk5NjctM2ZlNGE4NmM=, workerId: [5:7554062248862264641:2402], database: /Root, longSession: 1, local sessions count: 2 2025-09-25T16:19:35.990990Z node 5 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:577: Received create session request, trace_id: 2025-09-25T16:19:35.991072Z node 5 :KQP_PROXY DEBUG: query_actor.cpp:200: [TQueryBase] [TSaveScriptExecutionResultMetaQuery] OwnerId: [5:7554062248862264638:3089], ActorId: [5:7554062248862264639:3090], TraceId: ExecutionId: 8fa17be4-e200e0de-60b9661c-4a24421, RequestDatabase: /Root, LeaseGeneration: 1, RunDataQuery with SessionId: ydb://session/3?node_id=5&id=MzNiZmVhMTgtYzJjZTE5ZDUtZDZjYzk5NjctM2ZlNGE4NmM=, TxId: , text: -- TSaveScriptExecutionResultMetaQuery::OnRunQuery DECLARE $database AS Text; DECLARE $execution_id AS Text; DECLARE $result_set_metas AS JsonDocument; DECLARE $lease_generation AS Int64; UPDATE `.metadata/script_executions` SET result_set_metas = $result_set_metas WHERE database = $database AND execution_id = $execution_id AND (lease_generation IS NULL OR lease_generation = $lease_generation); 2025-09-25T16:19:35.991178Z node 5 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:700: Ctx: { TraceId: , Database: /Root, SessionId: ydb://session/3?node_id=5&id=MzNiZmVhMTgtYzJjZTE5ZDUtZDZjYzk5NjctM2ZlNGE4NmM=, PoolId: , DatabaseId: }. TEvQueryRequest, set timer for: 300.000000s timeout: 0.000000s cancelAfter: 0.000000s. Send request to target, requestId: 17, targetId: [5:7554062248862264641:2402] 2025-09-25T16:19:35.991199Z node 5 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:1153: Scheduled timeout timer for requestId: 17 timeout: 300.000000s actor id: [5:7554062248862264643:3091] 2025-09-25T16:19:35.994695Z node 5 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:886: Forwarded response to sender actor, requestId: 17, sender: [5:7554062248862264642:2403], selfId: [5:7554062248862263209:2266], source: [5:7554062248862264641:2402] 2025-09-25T16:19:35.994755Z node 5 :KQP_PROXY DEBUG: query_actor.cpp:243: [TQueryBase] [TSaveScriptExecutionResultMetaQuery] OwnerId: [5:7554062248862264638:3089], ActorId: [5:7554062248862264639:3090], TraceId: ExecutionId: 8fa17be4-e200e0de-60b9661c-4a24421, RequestDatabase: /Root, LeaseGeneration: 1, DataQuery #1 finished SUCCESS, Issues: , SessionId: ydb://session/3?node_id=5&id=MzNiZmVhMTgtYzJjZTE5ZDUtZDZjYzk5NjctM2ZlNGE4NmM=, TxId: 2025-09-25T16:19:35.994768Z node 5 :KQP_PROXY DEBUG: query_actor.cpp:370: [TQueryBase] [TSaveScriptExecutionResultMetaQuery] OwnerId: [5:7554062248862264638:3089], ActorId: [5:7554062248862264639:3090], TraceId: ExecutionId: 8fa17be4-e200e0de-60b9661c-4a24421, RequestDatabase: /Root, LeaseGeneration: 1, Finish with SUCCESS, SessionId: ydb://session/3?node_id=5&id=MzNiZmVhMTgtYzJjZTE5ZDUtZDZjYzk5NjctM2ZlNGE4NmM=, TxId: 2025-09-25T16:19:35.994799Z node 5 :KQP_PROXY DEBUG: query_actor.h:310: [TQueryRetryActor] [TSaveScriptExecutionResultMetaQuery] OwnerId: [5:7554062248862264383:2956], ActorId: [5:7554062248862264638:3089], TraceId: ExecutionId: 8fa17be4-e200e0de-60b9661c-4a24421, RequestDatabase: /Root, LeaseGeneration: 1, Got response [5:7554062248862264639:3090] SUCCESS 2025-09-25T16:19:35.994863Z node 5 :KQP_PROXY DEBUG: query_actor.h:291: [TQueryRetryActor] [TSaveScriptFinalStatusActor] OwnerId: [5:7554062248862264661:2407], ActorId: [5:7554062248862264662:2408], TraceId: ExecutionId: 8fa17be4-e200e0de-60b9661c-4a24421, RequestDatabase: /Root, LeaseGeneration: 1, Starting query actor #1 [5:7554062248862264663:2409] 2025-09-25T16:19:35.994877Z node 5 :KQP_PROXY DEBUG: query_actor.cpp:134: [TQueryBase] [TSaveScriptFinalStatusActor] OwnerId: [5:7554062248862264662:2408], ActorId: [5:7554062248862264663:2409], TraceId: ExecutionId: 8fa17be4-e200e0de-60b9661c-4a24421, RequestDatabase: /Root, LeaseGeneration: 1, Bootstrap. Database: /Root, IsSystemUser: 0, run create session 2025-09-25T16:19:35.994906Z node 5 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:1212: Session closed, sessionId: ydb://session/3?node_id=5&id=MzNiZmVhMTgtYzJjZTE5ZDUtZDZjYzk5NjctM2ZlNGE4NmM=, workerId: [5:7554062248862264641:2402], local sessions count: 1 2025-09-25T16:19:35.994918Z node 5 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:1323: Request has 18444985256533.556700s seconds to be completed 2025-09-25T16:19:35.995429Z node 5 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:1398: Created new session, sessionId: ydb://session/3?node_id=5&id=YTQwODgxMGYtMjg5NjE1OGYtNzMxNGZkN2EtYTUxZTdhOWU=, workerId: [5:7554062248862264665:2411], database: /Root, longSession: 1, local sessions count: 2 2025-09-25T16:19:35.995466Z node 5 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:577: Received create session request, trace_id: 2025-09-25T16:19:35.995513Z node 5 :KQP_PROXY DEBUG: query_actor.cpp:200: [TQueryBase] [TSaveScriptFinalStatusActor] OwnerId: [5:7554062248862264662:2408], ActorId: [5:7554062248862264663:2409], TraceId: ExecutionId: 8fa17be4-e200e0de-60b9661c-4a24421, RequestDatabase: /Root, LeaseGeneration: 1, State: Get operation info, RunDataQuery with SessionId: ydb://session/3?node_id=5&id=YTQwODgxMGYtMjg5NjE1OGYtNzMxNGZkN2EtYTUxZTdhOWU=, TxId: , text: -- TSaveScriptFinalStatusActor::OnRunQuery DECLARE $database AS Text; DECLARE $execution_id AS Text; SELECT operation_status, finalization_status, meta, customer_supplied_id, user_token, script_sinks, script_secret_names, retry_state, graph_compressed FROM `.metadata/script_executions` WHERE database = $database AND execution_id = $execution_id AND (expire_at > CurrentUtcTimestamp() OR expire_at IS NULL); SELECT lease_generation, lease_state FROM `.metadata/script_execution_leases` WHERE database = $database AND execution_id = $execution_id AND (expire_at > CurrentUtcTimestamp() OR expire_at IS NULL); 2025-09-25T16:19:35.995595Z node 5 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:700: Ctx: { TraceId: , Database: /Root, SessionId: ydb://session/3?node_id=5&id=YTQwODgxMGYtMjg5NjE1OGYtNzMxNGZkN2EtYTUxZTdhOWU=, PoolId: , DatabaseId: }. TEvQueryRequest, set timer for: 300.000000s timeout: 0.000000s cancelAfter: 0.000000s. Send request to target, requestId: 19, targetId: [5:7554062248862264665:2411] 2025-09-25T16:19:35.995604Z node 5 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:1153: Scheduled timeout timer for requestId: 19 timeout: 300.000000s actor id: [5:7554062248862264667:3096] >> TAsyncIndexTests::Decimal [GOOD] >> TAsyncIndexTests::OnlineBuild >> TAsyncIndexTests::CdcAndSplitWithReboots[PipeResets] |82.1%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_index/unittest |82.1%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_index/unittest ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/proxy_service/ut/unittest >> ScriptExecutionsTest::AttemptToUpdateDeletedLease [GOOD] Test command err: 2025-09-25T16:19:32.518229Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7554062233540319656:2078];send_to=[0:7307199536658146131:7762515]; 2025-09-25T16:19:32.518252Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/endf/005051/r3tmp/tmpg44NeA/pdisk_1.dat 2025-09-25T16:19:32.553901Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2025-09-25T16:19:32.567951Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded TClient is connected to server localhost:17979 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-09-25T16:19:32.623276Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-09-25T16:19:32.623299Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-09-25T16:19:32.624280Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-09-25T16:19:32.628878Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-09-25T16:19:32.632806Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-09-25T16:19:32.730454Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions 2025-09-25T16:19:32.730467Z node 1 :KQP_PROXY DEBUG: kqp_finalize_script_service.cpp:146: [ScriptExecutions] [TKqpFinalizeScriptService] Script execution table dc-1/.metadata/script_executions not found 2025-09-25T16:19:32.887619Z node 1 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:1519: Updated YQL logs priority to current level: 4 2025-09-25T16:19:32.959162Z node 1 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:1398: Created new session, sessionId: ydb://session/3?node_id=1&id=ZDgxY2E2YjgtN2JiYzM1ZjgtYzMyNzI5ZmItZDU5ODViNmI=, workerId: [1:7554062233540320224:2294], database: , longSession: 0, local sessions count: 1 2025-09-25T16:19:32.959272Z node 1 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:700: Ctx: { TraceId: , Database: , SessionId: ydb://session/3?node_id=1&id=ZDgxY2E2YjgtN2JiYzM1ZjgtYzMyNzI5ZmItZDU5ODViNmI=, PoolId: }. TEvQueryRequest, set timer for: 0.010000s timeout: 0.010000s cancelAfter: 0.000000s. Send request to target, requestId: 2, targetId: [1:7554062233540320224:2294] 2025-09-25T16:19:32.959283Z node 1 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:1153: Scheduled timeout timer for requestId: 2 timeout: 0.010000s actor id: [0:0:0] 2025-09-25T16:19:32.959288Z node 1 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:436: Subscribed for config changes. 2025-09-25T16:19:32.959293Z node 1 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:443: Updated table service config. 2025-09-25T16:19:32.959298Z node 1 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:1519: Updated YQL logs priority to current level: 4 2025-09-25T16:19:32.959426Z node 1 :KQP_SESSION WARN: kqp_session_actor.cpp:2439: SessionId: ydb://session/3?node_id=1&id=ZDgxY2E2YjgtN2JiYzM1ZjgtYzMyNzI5ZmItZDU5ODViNmI=, ActorId: [1:7554062233540320224:2294], ActorState: ReadyState, Reply query error, msg:
: Error: SomeUniqTextForUt proxyRequestId: 2 2025-09-25T16:19:32.959496Z node 1 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:886: Forwarded response to sender actor, requestId: 2, sender: [1:7554062233540320178:2287], selfId: [1:7554062233540319867:2263], source: [1:7554062233540320224:2294] 2025-09-25T16:19:32.959912Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7554062233540320228:2296], DatabaseId: /dc-1, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:19:32.959933Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /dc-1, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:19:32.959978Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7554062233540320250:2297], DatabaseId: /dc-1, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:19:32.959989Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /dc-1, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:19:32.969189Z node 1 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:1172: Handle TEvPrivate::TEvOnRequestTimeout(2) 2025-09-25T16:19:32.969203Z node 1 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:1175: Invalid request info while on request timeout handle. RequestId: 2 2025-09-25T16:19:33.288807Z node 2 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7554062238048390084:2146];send_to=[0:7307199536658146131:7762515]; 2025-09-25T16:19:33.288915Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/endf/005051/r3tmp/tmpyUS6nS/pdisk_1.dat 2025-09-25T16:19:33.299097Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-09-25T16:19:33.314520Z node 2 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 4444, node 2 2025-09-25T16:19:33.328788Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-09-25T16:19:33.328799Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-09-25T16:19:33.328801Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-09-25T16:19:33.328870Z node 2 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:21085 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-09-25T16:19:33.344895Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-09-25T16:19:33.390369Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-09-25T16:19:33.390404Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-09-25T16:19:33.392073Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-09-25T16:19:33.557892Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-09-25T16:19:33.557908Z node 2 :KQP_PROXY DEBUG: kqp_finalize_script_service.cpp:146: [ScriptExecutions] [TKqpFinalizeScriptService] Script execution table Root/.metadata/script_executions not found 2025-09-25T16:19:33.682636Z node 2 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:1519: Updated YQL logs priority to current level: 4 2025-09-25T16:19:33.687944Z node 2 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:814: Received ping session request, request_id: 2, sender: [2:7554062238048390876:2302], trace_id: 01k60trfvw8scz6arcgh035df6 2025-09-25T16:19:33.688014Z node 2 :KQP_PROXY NOTICE: kqp_proxy_service.cpp:1418: TraceId: "01k60trfvw8scz6arcgh035df6", Session not found: ydb://session/3?node_id=2&id=YDB0NDRhNjItYWQwZmIzMTktMWUyOTE4ZWYtYzE0NzJjNg== 2025-09-25T16:19:33.688031Z node 2 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:436: Subscribed for config changes. 2025-09-25T16:19:33.688036Z node 2 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:443: Updated table service config. 2025-09-25T16:19:33.688045Z node 2 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:1519: Updated YQL logs priority to current level: 4 2025-09-25T16:19:33.688097Z node 2 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:886: TraceId: "01k60trfvw8scz6arcgh035df6", Forwarded response to sender actor ... WHERE database = $database AND execution_id = $execution_id; 2025-09-25T16:19:36.332037Z node 5 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:700: Ctx: { TraceId: , Database: /dc-1, SessionId: ydb://session/3?node_id=5&id=NmRmYzIwZmItYmFiMDUwOTAtMzNkYzkzNjktNTBmNDlmOGM=, PoolId: , DatabaseId: }. TEvQueryRequest, set timer for: 300.000000s timeout: 0.000000s cancelAfter: 0.000000s. Send request to target, requestId: 31, targetId: [5:7554062253283190664:2475] 2025-09-25T16:19:36.332049Z node 5 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:1153: Scheduled timeout timer for requestId: 31 timeout: 300.000000s actor id: [5:7554062253283190687:2654] 2025-09-25T16:19:36.334370Z node 5 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:886: Forwarded response to sender actor, requestId: 31, sender: [5:7554062253283190686:2482], selfId: [5:7554062244693254857:2197], source: [5:7554062253283190664:2475] 2025-09-25T16:19:36.334438Z node 5 :KQP_PROXY DEBUG: query_actor.cpp:243: [TQueryBase] [TSaveScriptFinalStatusActor] OwnerId: [5:7554062253283190661:2472], ActorId: [5:7554062253283190662:2473], TraceId: ExecutionId: 6236098a-c5303b9e-e0fc186d-b2b52d24, RequestDatabase: /dc-1, LeaseGeneration: 1, State: Update final status, DataQuery #2 finished SUCCESS, Issues: [ {
:20:21: Warning: Symbol $retry_deadline is not used, code: 4527 } {
:21:21: Warning: Symbol $lease_state is not used, code: 4527 } ], SessionId: ydb://session/3?node_id=5&id=NmRmYzIwZmItYmFiMDUwOTAtMzNkYzkzNjktNTBmNDlmOGM=, TxId: 2025-09-25T16:19:36.334466Z node 5 :KQP_PROXY DEBUG: query_actor.cpp:370: [TQueryBase] [TSaveScriptFinalStatusActor] OwnerId: [5:7554062253283190661:2472], ActorId: [5:7554062253283190662:2473], TraceId: ExecutionId: 6236098a-c5303b9e-e0fc186d-b2b52d24, RequestDatabase: /dc-1, LeaseGeneration: 1, State: Update final status, Finish with SUCCESS, SessionId: ydb://session/3?node_id=5&id=NmRmYzIwZmItYmFiMDUwOTAtMzNkYzkzNjktNTBmNDlmOGM=, TxId: 2025-09-25T16:19:36.334474Z node 5 :KQP_PROXY DEBUG: kqp_script_executions.cpp:4060: [ScriptExecutions] [TQueryBase] [TSaveScriptFinalStatusActor] OwnerId: [5:7554062253283190661:2472], ActorId: [5:7554062253283190662:2473], TraceId: ExecutionId: 6236098a-c5303b9e-e0fc186d-b2b52d24, RequestDatabase: /dc-1, LeaseGeneration: 1, State: Update final status, Finish script execution operation. Status: UNAVAILABLE. Issues: {
: Error: Lease expired } 2025-09-25T16:19:36.334499Z node 5 :KQP_PROXY DEBUG: query_actor.h:310: [TQueryRetryActor] [TSaveScriptFinalStatusActor] OwnerId: [5:7554062253283190660:2471], ActorId: [5:7554062253283190661:2472], TraceId: ExecutionId: 6236098a-c5303b9e-e0fc186d-b2b52d24, RequestDatabase: /dc-1, LeaseGeneration: 1, Got response [5:7554062253283190662:2473] SUCCESS 2025-09-25T16:19:36.334527Z node 5 :KQP_PROXY DEBUG: kqp_script_executions.cpp:1416: [ScriptExecutions] [TCheckLeaseStatusActor] OwnerId: [5:7554062253283190620:2634] ActorId: [5:7554062253283190621:2635] Database: /dc-1 ExecutionId: 6236098a-c5303b9e-e0fc186d-b2b52d24. Successfully finalized script execution operation, WaitingRetry: 0 2025-09-25T16:19:36.334531Z node 5 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:1212: Session closed, sessionId: ydb://session/3?node_id=5&id=NmRmYzIwZmItYmFiMDUwOTAtMzNkYzkzNjktNTBmNDlmOGM=, workerId: [5:7554062253283190664:2475], local sessions count: 1 2025-09-25T16:19:36.334537Z node 5 :KQP_PROXY DEBUG: kqp_script_executions.cpp:1753: [ScriptExecutions] [TCheckLeaseStatusActor] OwnerId: [5:7554062253283190620:2634] ActorId: [5:7554062253283190621:2635] Database: /dc-1 ExecutionId: 6236098a-c5303b9e-e0fc186d-b2b52d24. Reply success 2025-09-25T16:19:36.335304Z node 5 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:700: Ctx: { TraceId: 01k60trjrf04rfyn7nyhah1z3m, Database: /dc-1, SessionId: ydb://session/3?node_id=5&id=ZjkwNDQ2YzYtMWIyYzA2OGUtZjY2Y2EwN2YtYWNhNWFjNjM=, PoolId: , DatabaseId: }. TEvQueryRequest, set timer for: 300.000000s timeout: 0.000000s cancelAfter: 0.000000s. Send request to target, requestId: 32, targetId: [5:7554062248988223244:2433] 2025-09-25T16:19:36.335312Z node 5 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:1153: Scheduled timeout timer for requestId: 32 timeout: 300.000000s actor id: [5:7554062253283190712:2661] 2025-09-25T16:19:36.407683Z node 5 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:886: TraceId: "01k60trjrf04rfyn7nyhah1z3m", Forwarded response to sender actor, requestId: 32, sender: [5:7554062253283190711:2487], selfId: [5:7554062244693254857:2197], source: [5:7554062248988223244:2433] 2025-09-25T16:19:36.408308Z node 5 :KQP_PROXY DEBUG: kqp_script_executions.cpp:795: [ScriptExecutions] [TScriptLeaseUpdateActor] OwnerId: [5:7554062253283190747:2673] ActorId: [5:7554062253283190748:2674] Database: /dc-1 ExecutionId: 6236098a-c5303b9e-e0fc186d-b2b52d24. Bootstrap. Start TLeaseUpdateRetryActor [5:7554062253283190749:2675] 2025-09-25T16:19:36.408330Z node 5 :KQP_PROXY DEBUG: query_actor.cpp:134: [TQueryBase] [TScriptLeaseUpdater] OwnerId: [5:7554062253283190749:2675], ActorId: [5:7554062253283190750:2676], TraceId: ExecutionId: 6236098a-c5303b9e-e0fc186d-b2b52d24, RequestDatabase: /dc-1, LeaseGeneration: 1, Bootstrap. Database: /dc-1, IsSystemUser: 0, run create session 2025-09-25T16:19:36.408334Z node 5 :KQP_PROXY DEBUG: query_actor.h:291: [TQueryRetryActor] [TScriptLeaseUpdater] OwnerId: [5:7554062253283190748:2674], ActorId: [5:7554062253283190749:2675], TraceId: ExecutionId: 6236098a-c5303b9e-e0fc186d-b2b52d24, RequestDatabase: /dc-1, LeaseGeneration: 1, Starting query actor #1 [5:7554062253283190750:2676] 2025-09-25T16:19:36.408395Z node 5 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:1323: Request has 18444985256533.143227s seconds to be completed 2025-09-25T16:19:36.408807Z node 5 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:1398: Created new session, sessionId: ydb://session/3?node_id=5&id=MTg1YTBkOS03NTA2OTg0YS0xNDJhNDdjYS1kMWUyMWU3Ng==, workerId: [5:7554062253283190752:2499], database: /dc-1, longSession: 1, local sessions count: 2 2025-09-25T16:19:36.408850Z node 5 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:577: Received create session request, trace_id: 2025-09-25T16:19:36.408908Z node 5 :KQP_PROXY DEBUG: kqp_script_executions.cpp:657: [ScriptExecutions] [TQueryBase] [TScriptLeaseUpdater] OwnerId: [5:7554062253283190749:2675], ActorId: [5:7554062253283190750:2676], TraceId: ExecutionId: 6236098a-c5303b9e-e0fc186d-b2b52d24, RequestDatabase: /dc-1, LeaseGeneration: 1, Update lease on duration: 1.000000s 2025-09-25T16:19:36.408923Z node 5 :KQP_PROXY DEBUG: query_actor.cpp:200: [TQueryBase] [TScriptLeaseUpdater] OwnerId: [5:7554062253283190749:2675], ActorId: [5:7554062253283190750:2676], TraceId: ExecutionId: 6236098a-c5303b9e-e0fc186d-b2b52d24, RequestDatabase: /dc-1, LeaseGeneration: 1, State: Get lease info, RunDataQuery with SessionId: ydb://session/3?node_id=5&id=MTg1YTBkOS03NTA2OTg0YS0xNDJhNDdjYS1kMWUyMWU3Ng==, TxId: , text: -- TScriptLeaseUpdater::OnRunQuery DECLARE $database AS Text; DECLARE $execution_id AS Text; SELECT lease_generation, lease_state FROM `.metadata/script_execution_leases` WHERE database = $database AND execution_id = $execution_id AND (expire_at > CurrentUtcTimestamp() OR expire_at IS NULL); 2025-09-25T16:19:36.409000Z node 5 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:700: Ctx: { TraceId: , Database: /dc-1, SessionId: ydb://session/3?node_id=5&id=MTg1YTBkOS03NTA2OTg0YS0xNDJhNDdjYS1kMWUyMWU3Ng==, PoolId: , DatabaseId: }. TEvQueryRequest, set timer for: 300.000000s timeout: 0.000000s cancelAfter: 0.000000s. Send request to target, requestId: 34, targetId: [5:7554062253283190752:2499] 2025-09-25T16:19:36.409010Z node 5 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:1153: Scheduled timeout timer for requestId: 34 timeout: 300.000000s actor id: [5:7554062253283190754:2677] 2025-09-25T16:19:36.449514Z node 5 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:886: Forwarded response to sender actor, requestId: 34, sender: [5:7554062253283190753:2500], selfId: [5:7554062244693254857:2197], source: [5:7554062253283190752:2499] 2025-09-25T16:19:36.449584Z node 5 :KQP_PROXY DEBUG: query_actor.cpp:243: [TQueryBase] [TScriptLeaseUpdater] OwnerId: [5:7554062253283190749:2675], ActorId: [5:7554062253283190750:2676], TraceId: ExecutionId: 6236098a-c5303b9e-e0fc186d-b2b52d24, RequestDatabase: /dc-1, LeaseGeneration: 1, State: Get lease info, DataQuery #1 finished SUCCESS, Issues: , SessionId: ydb://session/3?node_id=5&id=MTg1YTBkOS03NTA2OTg0YS0xNDJhNDdjYS1kMWUyMWU3Ng==, TxId: 01k60trjw0bxsppr35gddfk0dp 2025-09-25T16:19:36.449609Z node 5 :KQP_PROXY WARN: query_actor.cpp:375: [TQueryBase] [TScriptLeaseUpdater] OwnerId: [5:7554062253283190749:2675], ActorId: [5:7554062253283190750:2676], TraceId: ExecutionId: 6236098a-c5303b9e-e0fc186d-b2b52d24, RequestDatabase: /dc-1, LeaseGeneration: 1, State: Get lease info, Finish with NOT_FOUND, Issues: {
: Error: No such execution }, SessionId: ydb://session/3?node_id=5&id=MTg1YTBkOS03NTA2OTg0YS0xNDJhNDdjYS1kMWUyMWU3Ng==, TxId: 01k60trjw0bxsppr35gddfk0dp 2025-09-25T16:19:36.449620Z node 5 :KQP_PROXY DEBUG: query_actor.cpp:431: [TQueryBase] [TScriptLeaseUpdater] OwnerId: [5:7554062253283190749:2675], ActorId: [5:7554062253283190750:2676], TraceId: ExecutionId: 6236098a-c5303b9e-e0fc186d-b2b52d24, RequestDatabase: /dc-1, LeaseGeneration: 1, State: Get lease info, Rollback transaction: 01k60trjw0bxsppr35gddfk0dp in session: ydb://session/3?node_id=5&id=MTg1YTBkOS03NTA2OTg0YS0xNDJhNDdjYS1kMWUyMWU3Ng== 2025-09-25T16:19:36.449645Z node 5 :KQP_PROXY DEBUG: query_actor.h:310: [TQueryRetryActor] [TScriptLeaseUpdater] OwnerId: [5:7554062253283190748:2674], ActorId: [5:7554062253283190749:2675], TraceId: ExecutionId: 6236098a-c5303b9e-e0fc186d-b2b52d24, RequestDatabase: /dc-1, LeaseGeneration: 1, Got response [5:7554062253283190750:2676] NOT_FOUND 2025-09-25T16:19:36.449662Z node 5 :KQP_PROXY DEBUG: kqp_script_executions.cpp:805: [ScriptExecutions] [TScriptLeaseUpdateActor] OwnerId: [5:7554062253283190747:2673] ActorId: [5:7554062253283190748:2674] Database: /dc-1 ExecutionId: 6236098a-c5303b9e-e0fc186d-b2b52d24. Lease update [5:7554062253283190750:2676] finished NOT_FOUND, issues: {
: Error: No such execution } 2025-09-25T16:19:36.449695Z node 5 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:700: Ctx: { TraceId: , Database: /dc-1, SessionId: ydb://session/3?node_id=5&id=MTg1YTBkOS03NTA2OTg0YS0xNDJhNDdjYS1kMWUyMWU3Ng==, PoolId: , DatabaseId: }. TEvQueryRequest, set timer for: 600.000000s timeout: 0.000000s cancelAfter: 0.000000s. Send request to target, requestId: 35, targetId: [5:7554062253283190752:2499] 2025-09-25T16:19:36.449706Z node 5 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:1153: Scheduled timeout timer for requestId: 35 timeout: 600.000000s actor id: [5:7554062253283190776:2685] 2025-09-25T16:19:36.449826Z node 5 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:886: Forwarded response to sender actor, requestId: 35, sender: [5:7554062253283190775:2507], selfId: [5:7554062244693254857:2197], source: [5:7554062253283190752:2499] 2025-09-25T16:19:36.449845Z node 5 :KQP_PROXY DEBUG: query_actor.cpp:440: [TQueryBase] [TScriptLeaseUpdater] OwnerId: [5:7554062253283190749:2675], ActorId: [5:7554062253283190750:2676], TraceId: ExecutionId: 6236098a-c5303b9e-e0fc186d-b2b52d24, RequestDatabase: /dc-1, LeaseGeneration: 1, State: Get lease info, RollbackTransactionResult: SUCCESS. Issues: 2025-09-25T16:19:36.449891Z node 5 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:1212: Session closed, sessionId: ydb://session/3?node_id=5&id=MTg1YTBkOS03NTA2OTg0YS0xNDJhNDdjYS1kMWUyMWU3Ng==, workerId: [5:7554062253283190752:2499], local sessions count: 1 2025-09-25T16:19:36.450484Z node 5 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:1212: Session closed, sessionId: ydb://session/3?node_id=5&id=ZjkwNDQ2YzYtMWIyYzA2OGUtZjY2Y2EwN2YtYWNhNWFjNjM=, workerId: [5:7554062248988223244:2433], local sessions count: 0 |82.1%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_index/unittest |82.1%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_index/unittest >> TFulltextIndexTests::CreateTableUnsupportedSettings [GOOD] >> TFulltextIndexTests::CreateTableMultipleColumns [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_index/unittest >> TAsyncIndexTests::CreateTable [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2144] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2144] Leader for TabletID 72057594046678944 is [1:130:2155] sender: [1:131:2058] recipient: [1:113:2144] 2025-09-25T16:19:36.767928Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7911: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-09-25T16:19:36.767951Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7939: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-09-25T16:19:36.767957Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7825: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-09-25T16:19:36.767962Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7841: OperationsProcessing config: using default configuration 2025-09-25T16:19:36.767969Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7847: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-09-25T16:19:36.767974Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7847: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-09-25T16:19:36.767983Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7971: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-09-25T16:19:36.767996Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-09-25T16:19:36.768118Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8042: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-09-25T16:19:36.768175Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2025-09-25T16:19:36.785193Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7729: Cannot subscribe to console configs 2025-09-25T16:19:36.785216Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded 2025-09-25T16:19:36.789510Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2025-09-25T16:19:36.789597Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2025-09-25T16:19:36.789630Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-09-25T16:19:36.791184Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2025-09-25T16:19:36.791255Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2025-09-25T16:19:36.791352Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1382: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-09-25T16:19:36.791413Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-09-25T16:19:36.791835Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:158: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-09-25T16:19:36.791876Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2025-09-25T16:19:36.792140Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-09-25T16:19:36.792151Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-09-25T16:19:36.792174Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-09-25T16:19:36.792183Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-09-25T16:19:36.792190Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:205: TTxServerlessStorageBilling.Complete 2025-09-25T16:19:36.792223Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7086: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-09-25T16:19:36.793618Z node 1 :HIVE INFO: tablet_helpers.cpp:1126: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:130:2155] sender: [1:245:2058] recipient: [1:15:2062] 2025-09-25T16:19:36.813941Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-09-25T16:19:36.814024Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-09-25T16:19:36.814083Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:599: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-09-25T16:19:36.814089Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5528: CreateTx for txid 1:0 type: TxAlterSubDomain target path: [OwnerId: 72057594046678944, LocalPathId: 1] source path: 2025-09-25T16:19:36.814125Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-09-25T16:19:36.814135Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2025-09-25T16:19:36.814936Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-09-25T16:19:36.814979Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-09-25T16:19:36.815020Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-09-25T16:19:36.815028Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:315: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-09-25T16:19:36.815032Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:369: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-09-25T16:19:36.815036Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 1:0 2 -> 3 2025-09-25T16:19:36.815401Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-09-25T16:19:36.815409Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-09-25T16:19:36.815413Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 1:0 3 -> 128 2025-09-25T16:19:36.815682Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-09-25T16:19:36.815689Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-09-25T16:19:36.815694Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-09-25T16:19:36.815699Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1723: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-09-25T16:19:36.816139Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1792: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-09-25T16:19:36.816491Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:663: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-09-25T16:19:36.816550Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1824: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-09-25T16:19:36.816733Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:685: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-09-25T16:19:36.816752Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:689: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 137 RawX2: 4294969455 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-09-25T16:19:36.816758Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:303: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-09-25T16:19:36.816803Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 1:0 128 -> 240 2025-09-25T16:19:36.816808Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:362: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-09-25T16:19:36.816854Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:599: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-09-25T16:19:36.816864Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:412: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-09-25T16:19:36.817256Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-09-25T16:19:36.817262Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme ... 72075186233409547, left await: 0, txState.State: ProposedWaitParts, txState.ReadyForNotifications: 1, at schemeshard: 72057594046678944 2025-09-25T16:19:36.903351Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:710: all shard schema changes has been received, operationId: 101:0, at schemeshard: 72057594046678944 2025-09-25T16:19:36.903356Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:722: send schema changes ack message, operation: 101:0, datashard: 72075186233409547, at schemeshard: 72057594046678944 2025-09-25T16:19:36.903361Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 101:0 129 -> 240 2025-09-25T16:19:36.903939Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 2025-09-25T16:19:36.905407Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 2025-09-25T16:19:36.905434Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 2025-09-25T16:19:36.905483Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 101:2, at schemeshard: 72057594046678944 2025-09-25T16:19:36.905519Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 101:0, at schemeshard: 72057594046678944 2025-09-25T16:19:36.905532Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 2025-09-25T16:19:36.905550Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 101:2, at schemeshard: 72057594046678944 2025-09-25T16:19:36.905614Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 101:0, at schemeshard: 72057594046678944 2025-09-25T16:19:36.905651Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 101:2, at schemeshard: 72057594046678944 2025-09-25T16:19:36.905659Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:497: [72057594046678944] TDone opId# 101:2 ProgressState 2025-09-25T16:19:36.905676Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:926: Part operation is done id#101:2 progress is 2/3 2025-09-25T16:19:36.905682Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 101 ready parts: 2/3 2025-09-25T16:19:36.905688Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:926: Part operation is done id#101:2 progress is 2/3 2025-09-25T16:19:36.905692Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 101 ready parts: 2/3 2025-09-25T16:19:36.905697Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 101, ready parts: 2/3, is published: true 2025-09-25T16:19:36.905736Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 101:0, at schemeshard: 72057594046678944 2025-09-25T16:19:36.905741Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:497: [72057594046678944] TDone opId# 101:0 ProgressState 2025-09-25T16:19:36.905749Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:926: Part operation is done id#101:0 progress is 3/3 2025-09-25T16:19:36.905753Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 101 ready parts: 3/3 2025-09-25T16:19:36.905758Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:926: Part operation is done id#101:0 progress is 3/3 2025-09-25T16:19:36.905762Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 101 ready parts: 3/3 2025-09-25T16:19:36.905767Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 101, ready parts: 3/3, is published: true 2025-09-25T16:19:36.905781Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1702: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [1:383:2350] message: TxId: 101 2025-09-25T16:19:36.905788Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 101 ready parts: 3/3 2025-09-25T16:19:36.905795Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:993: Operation and all the parts is done, operation id: 101:0 2025-09-25T16:19:36.905801Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5552: RemoveTx for txid 101:0 2025-09-25T16:19:36.905823Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:610: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 4 2025-09-25T16:19:36.905830Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:993: Operation and all the parts is done, operation id: 101:1 2025-09-25T16:19:36.905834Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5552: RemoveTx for txid 101:1 2025-09-25T16:19:36.905840Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:610: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 3 2025-09-25T16:19:36.905845Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:993: Operation and all the parts is done, operation id: 101:2 2025-09-25T16:19:36.905849Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5552: RemoveTx for txid 101:2 2025-09-25T16:19:36.905857Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:610: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 3 2025-09-25T16:19:36.906420Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 101: got EvNotifyTxCompletionResult 2025-09-25T16:19:36.906435Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 101: satisfy waiter [1:384:2351] TestWaitNotification: OK eventTxId 101 2025-09-25T16:19:36.906558Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/Table/UserDefinedIndex" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: true }, at schemeshard: 72057594046678944 2025-09-25T16:19:36.906616Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/Table/UserDefinedIndex" took 68us result status StatusSuccess 2025-09-25T16:19:36.906855Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/Table/UserDefinedIndex" PathDescription { Self { Name: "UserDefinedIndex" PathId: 3 SchemeshardId: 72057594046678944 PathType: EPathTypeTableIndex CreateFinished: true CreateTxId: 101 CreateStep: 5000002 ParentPathId: 2 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableIndexVersion: 1 } ChildrenExist: true } Children { Name: "indexImplTable" PathId: 4 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 101 CreateStep: 5000002 ParentPathId: 3 PathState: EPathStateCreate Owner: "root@builtin" ACL: "" PathSubType: EPathSubTypeAsyncIndexImplTable Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 3 PathsLimit: 10000 ShardsInside: 2 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } TableIndex { Name: "UserDefinedIndex" LocalPathId: 3 Type: EIndexTypeGlobalAsync State: EIndexStateReady KeyColumnNames: "indexed" SchemaVersion: 1 PathOwnerId: 72057594046678944 DataSize: 0 IndexImplTableDescriptions { PartitionConfig { CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot: 300 InMemForceStepsToSnapshot: 500 InMemForceSizeToSnapshot: 16777216 InMemCompactionBrokerQueue: 0 ReadAheadHiThreshold: 67108864 ReadAheadLoThreshold: 16777216 MinDataPageSize: 7168 SnapBrokerQueue: 0 Generation { GenerationId: 0 SizeToCompact: 0 CountToCompact: 8 ForceCountToCompact: 8 ForceSizeToCompact: 134217728 CompactionBrokerQueue: 4294967295 KeepInCache: true BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen1" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 0 UpliftPartSize: 0 } Generation { GenerationId: 1 SizeToCompact: 41943040 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 536870912 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen2" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 8388608 UpliftPartSize: 8388608 } Generation { GenerationId: 2 SizeToCompact: 419430400 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 17179869184 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen3" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } PartitioningPolicy { SizeToSplit: 2147483648 MinPartitionsCount: 1 } } } } } PathId: 3 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_index/unittest >> TAsyncIndexTests::Decimal [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2144] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2144] Leader for TabletID 72057594046678944 is [1:130:2155] sender: [1:131:2058] recipient: [1:113:2144] 2025-09-25T16:19:36.917390Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7911: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-09-25T16:19:36.917412Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7939: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-09-25T16:19:36.917417Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7825: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-09-25T16:19:36.917421Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7841: OperationsProcessing config: using default configuration 2025-09-25T16:19:36.917426Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7847: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-09-25T16:19:36.917429Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7847: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-09-25T16:19:36.917436Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7971: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-09-25T16:19:36.917447Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-09-25T16:19:36.917551Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8042: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-09-25T16:19:36.917603Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2025-09-25T16:19:36.928367Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7729: Cannot subscribe to console configs 2025-09-25T16:19:36.928390Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded 2025-09-25T16:19:36.932453Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2025-09-25T16:19:36.932560Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2025-09-25T16:19:36.932599Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-09-25T16:19:36.935081Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2025-09-25T16:19:36.935159Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2025-09-25T16:19:36.935306Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1382: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-09-25T16:19:36.935384Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-09-25T16:19:36.935842Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:158: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-09-25T16:19:36.935888Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2025-09-25T16:19:36.936146Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-09-25T16:19:36.936156Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-09-25T16:19:36.936177Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-09-25T16:19:36.936185Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-09-25T16:19:36.936192Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:205: TTxServerlessStorageBilling.Complete 2025-09-25T16:19:36.936223Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7086: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-09-25T16:19:36.937620Z node 1 :HIVE INFO: tablet_helpers.cpp:1126: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:130:2155] sender: [1:245:2058] recipient: [1:15:2062] 2025-09-25T16:19:36.959831Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-09-25T16:19:36.959927Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-09-25T16:19:36.959989Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:599: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-09-25T16:19:36.959997Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5528: CreateTx for txid 1:0 type: TxAlterSubDomain target path: [OwnerId: 72057594046678944, LocalPathId: 1] source path: 2025-09-25T16:19:36.960041Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-09-25T16:19:36.960061Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2025-09-25T16:19:36.960986Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-09-25T16:19:36.961040Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-09-25T16:19:36.961094Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-09-25T16:19:36.961106Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:315: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-09-25T16:19:36.961112Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:369: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-09-25T16:19:36.961118Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 1:0 2 -> 3 2025-09-25T16:19:36.961664Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-09-25T16:19:36.961679Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-09-25T16:19:36.961686Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 1:0 3 -> 128 2025-09-25T16:19:36.962159Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-09-25T16:19:36.962173Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-09-25T16:19:36.962179Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-09-25T16:19:36.962187Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1723: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-09-25T16:19:36.962880Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1792: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-09-25T16:19:36.963380Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:663: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-09-25T16:19:36.963435Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1824: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-09-25T16:19:36.963649Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:685: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-09-25T16:19:36.963680Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:689: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 137 RawX2: 4294969455 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-09-25T16:19:36.963688Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:303: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-09-25T16:19:36.963752Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 1:0 128 -> 240 2025-09-25T16:19:36.963761Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:362: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-09-25T16:19:36.963791Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:599: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-09-25T16:19:36.963804Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:412: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-09-25T16:19:36.964301Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-09-25T16:19:36.964310Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme ... 72075186233409547, left await: 0, txState.State: ProposedWaitParts, txState.ReadyForNotifications: 1, at schemeshard: 72057594046678944 2025-09-25T16:19:37.059857Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:710: all shard schema changes has been received, operationId: 101:0, at schemeshard: 72057594046678944 2025-09-25T16:19:37.059863Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:722: send schema changes ack message, operation: 101:0, datashard: 72075186233409547, at schemeshard: 72057594046678944 2025-09-25T16:19:37.059868Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 101:0 129 -> 240 2025-09-25T16:19:37.060528Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 2025-09-25T16:19:37.062170Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 2025-09-25T16:19:37.062204Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 2025-09-25T16:19:37.062248Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 101:2, at schemeshard: 72057594046678944 2025-09-25T16:19:37.062286Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 101:0, at schemeshard: 72057594046678944 2025-09-25T16:19:37.062299Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 2025-09-25T16:19:37.062317Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 101:2, at schemeshard: 72057594046678944 2025-09-25T16:19:37.062390Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 101:0, at schemeshard: 72057594046678944 2025-09-25T16:19:37.062431Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 101:2, at schemeshard: 72057594046678944 2025-09-25T16:19:37.062441Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:497: [72057594046678944] TDone opId# 101:2 ProgressState 2025-09-25T16:19:37.062458Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:926: Part operation is done id#101:2 progress is 2/3 2025-09-25T16:19:37.062464Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 101 ready parts: 2/3 2025-09-25T16:19:37.062472Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:926: Part operation is done id#101:2 progress is 2/3 2025-09-25T16:19:37.062476Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 101 ready parts: 2/3 2025-09-25T16:19:37.062482Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 101, ready parts: 2/3, is published: true 2025-09-25T16:19:37.062522Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 101:0, at schemeshard: 72057594046678944 2025-09-25T16:19:37.062528Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:497: [72057594046678944] TDone opId# 101:0 ProgressState 2025-09-25T16:19:37.062536Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:926: Part operation is done id#101:0 progress is 3/3 2025-09-25T16:19:37.062540Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 101 ready parts: 3/3 2025-09-25T16:19:37.062544Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:926: Part operation is done id#101:0 progress is 3/3 2025-09-25T16:19:37.062548Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 101 ready parts: 3/3 2025-09-25T16:19:37.062552Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 101, ready parts: 3/3, is published: true 2025-09-25T16:19:37.062569Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1702: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [1:383:2350] message: TxId: 101 2025-09-25T16:19:37.062575Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 101 ready parts: 3/3 2025-09-25T16:19:37.062582Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:993: Operation and all the parts is done, operation id: 101:0 2025-09-25T16:19:37.062587Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5552: RemoveTx for txid 101:0 2025-09-25T16:19:37.062611Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:610: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 4 2025-09-25T16:19:37.062617Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:993: Operation and all the parts is done, operation id: 101:1 2025-09-25T16:19:37.062621Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5552: RemoveTx for txid 101:1 2025-09-25T16:19:37.062627Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:610: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 3 2025-09-25T16:19:37.062631Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:993: Operation and all the parts is done, operation id: 101:2 2025-09-25T16:19:37.062635Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5552: RemoveTx for txid 101:2 2025-09-25T16:19:37.062643Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:610: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 3 2025-09-25T16:19:37.063255Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 101: got EvNotifyTxCompletionResult 2025-09-25T16:19:37.063271Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 101: satisfy waiter [1:384:2351] TestWaitNotification: OK eventTxId 101 2025-09-25T16:19:37.063411Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/Table/UserDefinedIndex" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: true }, at schemeshard: 72057594046678944 2025-09-25T16:19:37.063482Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/Table/UserDefinedIndex" took 80us result status StatusSuccess 2025-09-25T16:19:37.063738Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/Table/UserDefinedIndex" PathDescription { Self { Name: "UserDefinedIndex" PathId: 3 SchemeshardId: 72057594046678944 PathType: EPathTypeTableIndex CreateFinished: true CreateTxId: 101 CreateStep: 5000002 ParentPathId: 2 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableIndexVersion: 1 } ChildrenExist: true } Children { Name: "indexImplTable" PathId: 4 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 101 CreateStep: 5000002 ParentPathId: 3 PathState: EPathStateCreate Owner: "root@builtin" ACL: "" PathSubType: EPathSubTypeAsyncIndexImplTable Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 3 PathsLimit: 10000 ShardsInside: 2 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } TableIndex { Name: "UserDefinedIndex" LocalPathId: 3 Type: EIndexTypeGlobalAsync State: EIndexStateReady KeyColumnNames: "indexed" SchemaVersion: 1 PathOwnerId: 72057594046678944 DataSize: 0 IndexImplTableDescriptions { PartitionConfig { CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot: 300 InMemForceStepsToSnapshot: 500 InMemForceSizeToSnapshot: 16777216 InMemCompactionBrokerQueue: 0 ReadAheadHiThreshold: 67108864 ReadAheadLoThreshold: 16777216 MinDataPageSize: 7168 SnapBrokerQueue: 0 Generation { GenerationId: 0 SizeToCompact: 0 CountToCompact: 8 ForceCountToCompact: 8 ForceSizeToCompact: 134217728 CompactionBrokerQueue: 4294967295 KeepInCache: true BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen1" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 0 UpliftPartSize: 0 } Generation { GenerationId: 1 SizeToCompact: 41943040 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 536870912 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen2" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 8388608 UpliftPartSize: 8388608 } Generation { GenerationId: 2 SizeToCompact: 419430400 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 17179869184 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen3" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } PartitioningPolicy { SizeToSplit: 2147483648 MinPartitionsCount: 1 } } } } } PathId: 3 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 >> TAsyncIndexTests::SplitBothWithReboots[PipeResets] >> TVectorIndexTests::CreateTablePrefix >> TOlap::CreateDropStandaloneTableDefaultSharding [GOOD] >> TAsyncIndexTests::OnlineBuild [GOOD] >> TVectorIndexTests::CreateTablePrefixCovering |82.1%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_index/unittest ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_olap/unittest >> TOlapNaming::CreateColumnStoreOk [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2144] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2144] Leader for TabletID 72057594046678944 is [1:130:2155] sender: [1:131:2058] recipient: [1:113:2144] 2025-09-25T16:19:34.484759Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7911: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-09-25T16:19:34.484782Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7939: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-09-25T16:19:34.484787Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7825: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-09-25T16:19:34.484792Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7841: OperationsProcessing config: using default configuration 2025-09-25T16:19:34.484798Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7847: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-09-25T16:19:34.484803Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7847: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-09-25T16:19:34.484811Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7971: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-09-25T16:19:34.484838Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-09-25T16:19:34.484945Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8042: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-09-25T16:19:34.485011Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2025-09-25T16:19:34.495299Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7729: Cannot subscribe to console configs 2025-09-25T16:19:34.495321Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded 2025-09-25T16:19:34.500562Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2025-09-25T16:19:34.500688Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2025-09-25T16:19:34.500735Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-09-25T16:19:34.502823Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2025-09-25T16:19:34.502895Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2025-09-25T16:19:34.503027Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1382: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-09-25T16:19:34.503121Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-09-25T16:19:34.503687Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:158: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-09-25T16:19:34.503746Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2025-09-25T16:19:34.504059Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-09-25T16:19:34.504072Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-09-25T16:19:34.504097Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-09-25T16:19:34.504107Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-09-25T16:19:34.504113Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:205: TTxServerlessStorageBilling.Complete 2025-09-25T16:19:34.504163Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7086: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-09-25T16:19:34.505842Z node 1 :HIVE INFO: tablet_helpers.cpp:1126: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:130:2155] sender: [1:245:2058] recipient: [1:15:2062] 2025-09-25T16:19:34.530693Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-09-25T16:19:34.530785Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-09-25T16:19:34.530861Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:599: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-09-25T16:19:34.530871Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5528: CreateTx for txid 1:0 type: TxAlterSubDomain target path: [OwnerId: 72057594046678944, LocalPathId: 1] source path: 2025-09-25T16:19:34.530930Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-09-25T16:19:34.530948Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2025-09-25T16:19:34.531959Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-09-25T16:19:34.532030Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-09-25T16:19:34.532091Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-09-25T16:19:34.532104Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:315: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-09-25T16:19:34.532110Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:369: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-09-25T16:19:34.532117Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 1:0 2 -> 3 2025-09-25T16:19:34.532748Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-09-25T16:19:34.532764Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-09-25T16:19:34.532771Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 1:0 3 -> 128 2025-09-25T16:19:34.533379Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-09-25T16:19:34.533395Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-09-25T16:19:34.533403Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-09-25T16:19:34.533410Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1723: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-09-25T16:19:34.534194Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1792: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-09-25T16:19:34.534703Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:663: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-09-25T16:19:34.534770Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1824: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-09-25T16:19:34.535021Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:685: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-09-25T16:19:34.535055Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:689: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 137 RawX2: 4294969455 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-09-25T16:19:34.535064Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:303: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-09-25T16:19:34.535144Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 1:0 128 -> 240 2025-09-25T16:19:34.535155Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:362: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-09-25T16:19:34.535209Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:599: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-09-25T16:19:34.535226Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:412: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-09-25T16:19:34.535824Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-09-25T16:19:34.535835Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme ... entPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 3 2025-09-25T16:19:36.948324Z node 2 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186233409546;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=101;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=2;result=not_found; FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 2025-09-25T16:19:36.948747Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-09-25T16:19:36.948758Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 101, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-09-25T16:19:36.948810Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 101, path id: [OwnerId: 72057594046678944, LocalPathId: 2] 2025-09-25T16:19:36.948857Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-09-25T16:19:36.948864Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [2:212:2213], at schemeshard: 72057594046678944, txId: 101, path id: 1 2025-09-25T16:19:36.948871Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [2:212:2213], at schemeshard: 72057594046678944, txId: 101, path id: 2 2025-09-25T16:19:36.948938Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 101:0, at schemeshard: 72057594046678944 2025-09-25T16:19:36.948945Z node 2 :FLAT_TX_SCHEMESHARD INFO: create_store.cpp:246: TCreateOlapStore TProposedWaitParts operationId# 101:0 ProgressState at tablet: 72057594046678944 2025-09-25T16:19:36.948956Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: create_store.cpp:269: TCreateOlapStore TProposedWaitParts operationId# 101:0 ProgressState wait for NotifyTxCompletionResult tabletId: 72075186233409546 2025-09-25T16:19:36.949157Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6249: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 5 PathOwnerId: 72057594046678944, cookie: 101 2025-09-25T16:19:36.949172Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 5 PathOwnerId: 72057594046678944, cookie: 101 2025-09-25T16:19:36.949178Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:108: Operation in-flight, at schemeshard: 72057594046678944, txId: 101 2025-09-25T16:19:36.949185Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 101, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 5 2025-09-25T16:19:36.949192Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:610: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-09-25T16:19:36.949367Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6249: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 3 PathOwnerId: 72057594046678944, cookie: 101 2025-09-25T16:19:36.949385Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 3 PathOwnerId: 72057594046678944, cookie: 101 2025-09-25T16:19:36.949390Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:108: Operation in-flight, at schemeshard: 72057594046678944, txId: 101 2025-09-25T16:19:36.949394Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 101, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 3 2025-09-25T16:19:36.949400Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:610: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 4 2025-09-25T16:19:36.949412Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 101, ready parts: 0/1, is published: true 2025-09-25T16:19:36.950219Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:663: Send tablet strongly msg operationId: 101:0 from tablet: 72057594046678944 to tablet: 72075186233409546 cookie: 72057594046678944:1 msg type: 275382275 2025-09-25T16:19:36.950649Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 2025-09-25T16:19:36.950709Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 2025-09-25T16:19:36.971940Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:6561: Handle TEvNotifyTxCompletionResult, at schemeshard: 72057594046678944, message: Origin: 72075186233409546 TxId: 101 2025-09-25T16:19:36.971975Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1837: TOperation FindRelatedPartByTabletId, TxId: 101, tablet: 72075186233409546, partId: 0 2025-09-25T16:19:36.972001Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:628: TTxOperationReply execute, operationId: 101:0, at schemeshard: 72057594046678944, message: Origin: 72075186233409546 TxId: 101 FAKE_COORDINATOR: Erasing txId 101 2025-09-25T16:19:36.972585Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 101:0, at schemeshard: 72057594046678944 2025-09-25T16:19:36.972633Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 101:0, at schemeshard: 72057594046678944 2025-09-25T16:19:36.972643Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:497: [72057594046678944] TDone opId# 101:0 ProgressState 2025-09-25T16:19:36.972662Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:926: Part operation is done id#101:0 progress is 1/1 2025-09-25T16:19:36.972667Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2025-09-25T16:19:36.972673Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:926: Part operation is done id#101:0 progress is 1/1 2025-09-25T16:19:36.972677Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2025-09-25T16:19:36.972683Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 101, ready parts: 1/1, is published: true 2025-09-25T16:19:36.972699Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1702: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [2:347:2324] message: TxId: 101 2025-09-25T16:19:36.972710Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2025-09-25T16:19:36.972717Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:993: Operation and all the parts is done, operation id: 101:0 2025-09-25T16:19:36.972722Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5552: RemoveTx for txid 101:0 2025-09-25T16:19:36.972756Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:610: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 3 2025-09-25T16:19:36.973264Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 101: got EvNotifyTxCompletionResult 2025-09-25T16:19:36.973279Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 101: satisfy waiter [2:348:2325] TestWaitNotification: OK eventTxId 101 2025-09-25T16:19:36.973400Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/OlapStore" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-09-25T16:19:36.973465Z node 2 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/OlapStore" took 75us result status StatusSuccess 2025-09-25T16:19:36.973664Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/OlapStore" PathDescription { Self { Name: "OlapStore" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeColumnStore CreateFinished: true CreateTxId: 101 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 ColumnStoreVersion: 1 } ChildrenExist: false } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 0 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 1 PathsLimit: 10000 ShardsInside: 1 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } ColumnStoreDescription { Name: "OlapStore" ColumnShardCount: 1 ColumnShards: 72075186233409546 SchemaPresets { Id: 1 Name: "default" Schema { Columns { Id: 1 Name: "timestamp" Type: "Timestamp" TypeId: 50 NotNull: true StorageId: "" DefaultValue { } } Columns { Id: 2 Name: "data" Type: "Utf8" TypeId: 4608 NotNull: false StorageId: "" DefaultValue { } } KeyColumnNames: "timestamp" NextColumnId: 3 Version: 1 Options { SchemeNeedActualization: false } NextColumnFamilyId: 1 } } NextSchemaPresetId: 2 NextTtlSettingsPresetId: 1 } } PathId: 2 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 >> RemoteTopicReader::ReadTopic [GOOD] >> TIncrementalRestoreTests::BackupCollectionRestoreOpApiMultipleOperationsListing [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_index/unittest >> TFulltextIndexTests::CreateTableUnsupportedSettings [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2144] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2144] Leader for TabletID 72057594046678944 is [1:130:2155] sender: [1:131:2058] recipient: [1:113:2144] 2025-09-25T16:19:37.162992Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7911: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-09-25T16:19:37.163012Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7939: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-09-25T16:19:37.163016Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7825: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-09-25T16:19:37.163020Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7841: OperationsProcessing config: using default configuration 2025-09-25T16:19:37.163025Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7847: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-09-25T16:19:37.163028Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7847: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-09-25T16:19:37.163035Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7971: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-09-25T16:19:37.163046Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-09-25T16:19:37.163136Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8042: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-09-25T16:19:37.163189Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2025-09-25T16:19:37.174735Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7729: Cannot subscribe to console configs 2025-09-25T16:19:37.174756Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded 2025-09-25T16:19:37.178113Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2025-09-25T16:19:37.178196Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2025-09-25T16:19:37.178228Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-09-25T16:19:37.179541Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2025-09-25T16:19:37.179587Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2025-09-25T16:19:37.179677Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1382: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-09-25T16:19:37.179745Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-09-25T16:19:37.180117Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:158: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-09-25T16:19:37.180156Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2025-09-25T16:19:37.180363Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-09-25T16:19:37.180371Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-09-25T16:19:37.180388Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-09-25T16:19:37.180394Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-09-25T16:19:37.180399Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:205: TTxServerlessStorageBilling.Complete 2025-09-25T16:19:37.180424Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7086: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-09-25T16:19:37.181714Z node 1 :HIVE INFO: tablet_helpers.cpp:1126: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:130:2155] sender: [1:245:2058] recipient: [1:15:2062] 2025-09-25T16:19:37.197771Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-09-25T16:19:37.197840Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-09-25T16:19:37.197888Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:599: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-09-25T16:19:37.197894Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5528: CreateTx for txid 1:0 type: TxAlterSubDomain target path: [OwnerId: 72057594046678944, LocalPathId: 1] source path: 2025-09-25T16:19:37.197936Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-09-25T16:19:37.197947Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2025-09-25T16:19:37.198497Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-09-25T16:19:37.198538Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-09-25T16:19:37.198575Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-09-25T16:19:37.198583Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:315: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-09-25T16:19:37.198586Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:369: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-09-25T16:19:37.198591Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 1:0 2 -> 3 2025-09-25T16:19:37.198876Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-09-25T16:19:37.198884Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-09-25T16:19:37.198890Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 1:0 3 -> 128 2025-09-25T16:19:37.199141Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-09-25T16:19:37.199148Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-09-25T16:19:37.199153Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-09-25T16:19:37.199158Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1723: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-09-25T16:19:37.199628Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1792: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-09-25T16:19:37.199944Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:663: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-09-25T16:19:37.199994Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1824: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-09-25T16:19:37.200146Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:685: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-09-25T16:19:37.200165Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:689: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 137 RawX2: 4294969455 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-09-25T16:19:37.200170Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:303: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-09-25T16:19:37.200215Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 1:0 128 -> 240 2025-09-25T16:19:37.200220Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:362: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-09-25T16:19:37.200244Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:599: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-09-25T16:19:37.200253Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:412: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-09-25T16:19:37.200746Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-09-25T16:19:37.200757Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-09-25T16:19:37.200798Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-09-25T16:19:37.200805Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [1:212:2213], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-09-25T16:19:37.200901Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-09-25T16:19:37.200909Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:497: [72057594046678944] TDone opId# 1:0 ProgressState 2025-09-25T16:19:37.200921Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:926: Part operation is done id#1:0 progress is 1/1 2025-09-25T16:19:37.200925Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-09-25T16:19:37.200929Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:926: Part operation is done id#1:0 progress is 1/1 2025-09-25T16:19:37.200932Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-09-25T16:19:37.200935Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-09-25T16:19:37.200940Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-09-25T16:19:37.200944Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:993: Operation and all the parts is done, operation id: 1:0 2025-09-25T16:19:37.200946Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5552: RemoveTx for txid 1:0 2025-09-25T16:19:37.200957Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:610: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-09-25T16:19:37.200963Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:1002: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-09-25T16:19:37.200966Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1009: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-09-25T16:19:37.201236Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6249: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-09-25T16:19:37.201254Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-09-25T16:19:37.201260Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1 2025-09-25T16:19:37.201264Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 1, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 3 2025-09-25T16:19:37.201268Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:610: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-09-25T16:19:37.201280Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__publish_to_scheme_board.cpp:155: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 1, subscribers: 0 2025-09-25T16:19:37.202001Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1 2025-09-25T16:19:37.202092Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 1, at schemeshard: 72057594046678944 TestModificationResults wait txId: 101 2025-09-25T16:19:37.202472Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:433: actor# [1:275:2265] Bootstrap 2025-09-25T16:19:37.202669Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:452: actor# [1:275:2265] Become StateWork (SchemeCache [1:280:2270]) 2025-09-25T16:19:37.203309Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpCreateIndexedTable CreateIndexedTable { TableDescription { Name: "texts" Columns { Name: "id" Type: "Uint64" } Columns { Name: "text" Type: "String" } Columns { Name: "covered" Type: "String" } Columns { Name: "another" Type: "Uint64" } KeyColumnNames: "id" } IndexDescription { Name: "idx_fulltext" KeyColumnNames: "text" Type: EIndexTypeGlobalFulltext DataColumnNames: "covered" FulltextIndexDescription { Settings { layout: FLAT columns { column: "text" analyzers { tokenizer: STANDARD use_filter_edge_ngram: true } } } } } } } TxId: 101 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-09-25T16:19:37.203397Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_create_indexed_table.cpp:100: TCreateTableIndex construct operation table path: /MyRoot/texts domain path id: [OwnerId: 72057594046678944, LocalPathId: 1] domain path: /MyRoot shardsToCreate: 2 GetShardsInside: 0 MaxShards: 200000 2025-09-25T16:19:37.203423Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_just_reject.cpp:47: TReject Propose, opId: 101:0, explain: Unsupported use_filter_edge_ngram setting, at schemeshard: 72057594046678944 2025-09-25T16:19:37.203429Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 101:1, propose status:StatusInvalidParameter, reason: Unsupported use_filter_edge_ngram setting, at schemeshard: 72057594046678944 2025-09-25T16:19:37.203630Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:212: actor# [1:275:2265] HANDLE TEvClientConnected success connect from tablet# 72057594046447617 2025-09-25T16:19:37.204270Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 101, response: Status: StatusInvalidParameter Reason: "Unsupported use_filter_edge_ngram setting" TxId: 101 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2025-09-25T16:19:37.204308Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 101, database: /MyRoot, subject: , status: StatusInvalidParameter, reason: Unsupported use_filter_edge_ngram setting, operation: CREATE TABLE WITH INDEXES, path: /MyRoot/texts 2025-09-25T16:19:37.204353Z node 1 :TX_PROXY DEBUG: client.cpp:89: Handle TEvAllocateResult ACCEPTED RangeBegin# 281474976715656 RangeEnd# 281474976720656 txAllocator# 72057594046447617 TestModificationResult got TxId: 101, wait until txId: 101 TestWaitNotification wait txId: 101 2025-09-25T16:19:37.204394Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:213: tests -- TTxNotificationSubscriber for txId 101: send EvNotifyTxCompletion 2025-09-25T16:19:37.204399Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:259: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 101 2025-09-25T16:19:37.204439Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 101, at schemeshard: 72057594046678944 2025-09-25T16:19:37.204453Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 101: got EvNotifyTxCompletionResult 2025-09-25T16:19:37.204457Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 101: satisfy waiter [1:290:2280] TestWaitNotification: OK eventTxId 101 2025-09-25T16:19:37.204517Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/texts/idx_fulltext" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: true }, at schemeshard: 72057594046678944 2025-09-25T16:19:37.204547Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/texts/idx_fulltext" took 35us result status StatusPathDoesNotExist 2025-09-25T16:19:37.204574Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/texts/idx_fulltext\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot\' (id: [OwnerId: 72057594046678944, LocalPathId: 1]), source_location: ydb/core/tx/schemeshard/schemeshard_path_describer.cpp:1181" Path: "/MyRoot/texts/idx_fulltext" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot" LastExistedPrefixPathId: 1 LastExistedPrefixDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty ChildrenExist: false } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_index/unittest >> TFulltextIndexTests::CreateTableMultipleColumns [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2144] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2144] Leader for TabletID 72057594046678944 is [1:130:2155] sender: [1:131:2058] recipient: [1:113:2144] 2025-09-25T16:19:37.170003Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7911: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-09-25T16:19:37.170019Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7939: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-09-25T16:19:37.170023Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7825: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-09-25T16:19:37.170027Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7841: OperationsProcessing config: using default configuration 2025-09-25T16:19:37.170031Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7847: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-09-25T16:19:37.170034Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7847: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-09-25T16:19:37.170040Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7971: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-09-25T16:19:37.170049Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-09-25T16:19:37.170130Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8042: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-09-25T16:19:37.170172Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2025-09-25T16:19:37.181593Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7729: Cannot subscribe to console configs 2025-09-25T16:19:37.181609Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded 2025-09-25T16:19:37.184871Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2025-09-25T16:19:37.184940Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2025-09-25T16:19:37.184964Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-09-25T16:19:37.186275Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2025-09-25T16:19:37.186319Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2025-09-25T16:19:37.186419Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1382: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-09-25T16:19:37.186495Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-09-25T16:19:37.186881Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:158: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-09-25T16:19:37.186922Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2025-09-25T16:19:37.187091Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-09-25T16:19:37.187097Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-09-25T16:19:37.187111Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-09-25T16:19:37.187116Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-09-25T16:19:37.187120Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:205: TTxServerlessStorageBilling.Complete 2025-09-25T16:19:37.187144Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7086: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-09-25T16:19:37.188278Z node 1 :HIVE INFO: tablet_helpers.cpp:1126: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:130:2155] sender: [1:245:2058] recipient: [1:15:2062] 2025-09-25T16:19:37.201798Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-09-25T16:19:37.201862Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-09-25T16:19:37.201905Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:599: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-09-25T16:19:37.201910Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5528: CreateTx for txid 1:0 type: TxAlterSubDomain target path: [OwnerId: 72057594046678944, LocalPathId: 1] source path: 2025-09-25T16:19:37.201938Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-09-25T16:19:37.201947Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2025-09-25T16:19:37.202521Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-09-25T16:19:37.202555Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-09-25T16:19:37.202588Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-09-25T16:19:37.202594Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:315: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-09-25T16:19:37.202597Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:369: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-09-25T16:19:37.202601Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 1:0 2 -> 3 2025-09-25T16:19:37.202896Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-09-25T16:19:37.202904Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-09-25T16:19:37.202909Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 1:0 3 -> 128 2025-09-25T16:19:37.203139Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-09-25T16:19:37.203145Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-09-25T16:19:37.203148Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-09-25T16:19:37.203153Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1723: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-09-25T16:19:37.203590Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1792: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-09-25T16:19:37.203913Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:663: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-09-25T16:19:37.203940Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1824: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-09-25T16:19:37.204069Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:685: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-09-25T16:19:37.204084Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:689: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 137 RawX2: 4294969455 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-09-25T16:19:37.204089Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:303: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-09-25T16:19:37.204128Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 1:0 128 -> 240 2025-09-25T16:19:37.204133Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:362: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-09-25T16:19:37.204154Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:599: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-09-25T16:19:37.204162Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:412: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-09-25T16:19:37.204518Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-09-25T16:19:37.204526Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-09-25T16:19:37.204564Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-09-25T16:19:37.204573Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [1:212:2213], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-09-25T16:19:37.204656Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-09-25T16:19:37.204663Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:497: [72057594046678944] TDone opId# 1:0 ProgressState 2025-09-25T16:19:37.204675Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:926: Part operation is done id#1:0 progress is 1/1 2025-09-25T16:19:37.204680Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-09-25T16:19:37.204685Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:926: Part operation is done id#1:0 progress is 1/1 2025-09-25T16:19:37.204689Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-09-25T16:19:37.204693Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-09-25T16:19:37.204699Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-09-25T16:19:37.204704Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:993: Operation and all the parts is done, operation id: 1:0 2025-09-25T16:19:37.204709Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5552: RemoveTx for txid 1:0 2025-09-25T16:19:37.204719Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:610: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-09-25T16:19:37.204725Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:1002: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-09-25T16:19:37.204728Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1009: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-09-25T16:19:37.204988Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6249: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-09-25T16:19:37.205001Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-09-25T16:19:37.205005Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1 2025-09-25T16:19:37.205008Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 1, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 3 2025-09-25T16:19:37.205012Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:610: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-09-25T16:19:37.205021Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__publish_to_scheme_board.cpp:155: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 1, subscribers: 0 2025-09-25T16:19:37.205556Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1 2025-09-25T16:19:37.205622Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 1, at schemeshard: 72057594046678944 TestModificationResults wait txId: 101 2025-09-25T16:19:37.205912Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:433: actor# [1:275:2265] Bootstrap 2025-09-25T16:19:37.206063Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:452: actor# [1:275:2265] Become StateWork (SchemeCache [1:280:2270]) 2025-09-25T16:19:37.206535Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpCreateIndexedTable CreateIndexedTable { TableDescription { Name: "texts" Columns { Name: "id" Type: "Uint64" } Columns { Name: "text1" Type: "String" } Columns { Name: "text2" Type: "String" } Columns { Name: "covered" Type: "String" } Columns { Name: "another" Type: "Uint64" } KeyColumnNames: "id" } IndexDescription { Name: "idx_fulltext" KeyColumnNames: "text1" KeyColumnNames: "text2" Type: EIndexTypeGlobalFulltext DataColumnNames: "covered" FulltextIndexDescription { Settings { layout: FLAT columns { column: "text1" analyzers { tokenizer: STANDARD use_filter_lowercase: true } } columns { column: "text2" analyzers { tokenizer: STANDARD use_filter_lowercase: true } } } } } } } TxId: 101 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-09-25T16:19:37.206589Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_create_indexed_table.cpp:100: TCreateTableIndex construct operation table path: /MyRoot/texts domain path id: [OwnerId: 72057594046678944, LocalPathId: 1] domain path: /MyRoot shardsToCreate: 2 GetShardsInside: 0 MaxShards: 200000 2025-09-25T16:19:37.206609Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_just_reject.cpp:47: TReject Propose, opId: 101:0, explain: columns should have a single value, at schemeshard: 72057594046678944 2025-09-25T16:19:37.206615Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 101:1, propose status:StatusInvalidParameter, reason: columns should have a single value, at schemeshard: 72057594046678944 2025-09-25T16:19:37.206746Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:212: actor# [1:275:2265] HANDLE TEvClientConnected success connect from tablet# 72057594046447617 2025-09-25T16:19:37.207318Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 101, response: Status: StatusInvalidParameter Reason: "columns should have a single value" TxId: 101 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2025-09-25T16:19:37.207352Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 101, database: /MyRoot, subject: , status: StatusInvalidParameter, reason: columns should have a single value, operation: CREATE TABLE WITH INDEXES, path: /MyRoot/texts 2025-09-25T16:19:37.207396Z node 1 :TX_PROXY DEBUG: client.cpp:89: Handle TEvAllocateResult ACCEPTED RangeBegin# 281474976715656 RangeEnd# 281474976720656 txAllocator# 72057594046447617 TestModificationResult got TxId: 101, wait until txId: 101 TestWaitNotification wait txId: 101 2025-09-25T16:19:37.207426Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:213: tests -- TTxNotificationSubscriber for txId 101: send EvNotifyTxCompletion 2025-09-25T16:19:37.207430Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:259: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 101 2025-09-25T16:19:37.207466Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 101, at schemeshard: 72057594046678944 2025-09-25T16:19:37.207478Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 101: got EvNotifyTxCompletionResult 2025-09-25T16:19:37.207482Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 101: satisfy waiter [1:290:2280] TestWaitNotification: OK eventTxId 101 2025-09-25T16:19:37.207534Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/texts/idx_fulltext" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: true }, at schemeshard: 72057594046678944 2025-09-25T16:19:37.207565Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/texts/idx_fulltext" took 36us result status StatusPathDoesNotExist 2025-09-25T16:19:37.207589Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/texts/idx_fulltext\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot\' (id: [OwnerId: 72057594046678944, LocalPathId: 1]), source_location: ydb/core/tx/schemeshard/schemeshard_path_describer.cpp:1181" Path: "/MyRoot/texts/idx_fulltext" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot" LastExistedPrefixPathId: 1 LastExistedPrefixDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty ChildrenExist: false } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 >> TVectorIndexTests::CreateTablePrefix [GOOD] >> TAsyncIndexTests::CdcAndMergeWithReboots[PipeResets] |82.1%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_index/unittest |82.1%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_index/unittest ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_index/unittest >> TAsyncIndexTests::OnlineBuild [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2144] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2144] Leader for TabletID 72057594046678944 is [1:130:2155] sender: [1:131:2058] recipient: [1:113:2144] 2025-09-25T16:19:37.250321Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7911: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-09-25T16:19:37.250346Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7939: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-09-25T16:19:37.250353Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7825: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-09-25T16:19:37.250358Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7841: OperationsProcessing config: using default configuration 2025-09-25T16:19:37.250366Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7847: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-09-25T16:19:37.250371Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7847: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-09-25T16:19:37.250381Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7971: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-09-25T16:19:37.250395Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-09-25T16:19:37.250518Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8042: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-09-25T16:19:37.250603Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2025-09-25T16:19:37.267974Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7729: Cannot subscribe to console configs 2025-09-25T16:19:37.267997Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded 2025-09-25T16:19:37.272393Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2025-09-25T16:19:37.272489Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2025-09-25T16:19:37.272527Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-09-25T16:19:37.274488Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2025-09-25T16:19:37.274548Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2025-09-25T16:19:37.274651Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1382: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-09-25T16:19:37.274718Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-09-25T16:19:37.275182Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:158: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-09-25T16:19:37.275249Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2025-09-25T16:19:37.275504Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-09-25T16:19:37.275516Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-09-25T16:19:37.275539Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-09-25T16:19:37.275548Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-09-25T16:19:37.275557Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:205: TTxServerlessStorageBilling.Complete 2025-09-25T16:19:37.275593Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7086: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-09-25T16:19:37.277187Z node 1 :HIVE INFO: tablet_helpers.cpp:1126: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:130:2155] sender: [1:245:2058] recipient: [1:15:2062] 2025-09-25T16:19:37.301555Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-09-25T16:19:37.301645Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-09-25T16:19:37.301703Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:599: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-09-25T16:19:37.301712Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5528: CreateTx for txid 1:0 type: TxAlterSubDomain target path: [OwnerId: 72057594046678944, LocalPathId: 1] source path: 2025-09-25T16:19:37.301757Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-09-25T16:19:37.301773Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2025-09-25T16:19:37.302430Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-09-25T16:19:37.302487Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-09-25T16:19:37.302535Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-09-25T16:19:37.302546Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:315: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-09-25T16:19:37.302553Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:369: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-09-25T16:19:37.302559Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 1:0 2 -> 3 2025-09-25T16:19:37.302985Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-09-25T16:19:37.302998Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-09-25T16:19:37.303004Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 1:0 3 -> 128 2025-09-25T16:19:37.303415Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-09-25T16:19:37.303430Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-09-25T16:19:37.303437Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-09-25T16:19:37.303444Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1723: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-09-25T16:19:37.304178Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1792: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-09-25T16:19:37.304626Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:663: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-09-25T16:19:37.304669Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1824: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-09-25T16:19:37.304903Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:685: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-09-25T16:19:37.304931Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:689: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 137 RawX2: 4294969455 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-09-25T16:19:37.304940Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:303: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-09-25T16:19:37.305005Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 1:0 128 -> 240 2025-09-25T16:19:37.305013Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:362: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-09-25T16:19:37.305048Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:599: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-09-25T16:19:37.305061Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:412: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-09-25T16:19:37.305557Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-09-25T16:19:37.305567Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme ... 2025-09-25T16:19:37.542540Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 281474976710760:0, at schemeshard: 72057594046678944 2025-09-25T16:19:37.542550Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_drop_lock.cpp:30: [72057594046678944] TDropLock TPropose opId# 281474976710760:0 ProgressState 2025-09-25T16:19:37.542559Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1723: TOperation IsReadyToPropose , TxId: 281474976710760 ready parts: 1/1 2025-09-25T16:19:37.542586Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1792: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 281474976710760 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-09-25T16:19:37.543265Z node 1 :BUILD_INDEX NOTICE: schemeshard_build_index__progress.cpp:1478: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Execute: 102 Unlocking 2025-09-25T16:19:37.543298Z node 1 :BUILD_INDEX DEBUG: schemeshard_build_index__progress.cpp:1479: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Execute: 102 Unlocking TBuildInfo{ IndexBuildId: 102, Uid: , DomainPathId: [OwnerId: 72057594046678944, LocalPathId: 1], TablePathId: [OwnerId: 72057594046678944, LocalPathId: 2], IndexType: EIndexTypeGlobalAsync, IndexName: UserDefinedIndex, IndexColumn: indexed, State: Unlocking, SubState: None, IsBroken: 0, IsCancellationRequested: 0, Issue: , SubscribersCount: 1, CreateSender: [1:393:2363], AlterMainTableTxId: 0, AlterMainTableTxStatus: StatusSuccess, AlterMainTableTxDone: 0, LockTxId: 281474976710757, LockTxStatus: StatusAccepted, LockTxDone: 1, InitiateTxId: 281474976710758, InitiateTxStatus: StatusAccepted, InitiateTxDone: 1, SnapshotStepId: 5000004, ApplyTxId: 281474976710759, ApplyTxStatus: StatusAccepted, ApplyTxDone: 1, DropColumnsTxId: 0, DropColumnsTxStatus: StatusSuccess, DropColumnsTxDone: 0, UnlockTxId: 281474976710760, UnlockTxStatus: StatusAccepted, UnlockTxDone: 0, ToUploadShards: 0, DoneShards: 0, Processed: UploadRows: 0 UploadBytes: 0 ReadRows: 0 ReadBytes: 0 CpuTimeUs: 0, Billed: UploadRows: 0 UploadBytes: 0 ReadRows: 0 ReadBytes: 0 CpuTimeUs: 0} 2025-09-25T16:19:37.543338Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:663: Send tablet strongly msg operationId: 281474976710760:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:281474976710760 msg type: 269090816 2025-09-25T16:19:37.543372Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1824: TOperation RegisterRelationByTabletId, TxId: 281474976710760, partId: 4294967295, tablet: 72057594046316545 2025-09-25T16:19:37.543417Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__notify.cpp:30: NotifyTxCompletion operation in-flight, txId: 281474976710760, at schemeshard: 72057594046678944 2025-09-25T16:19:37.543424Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 281474976710760, ready parts: 0/1, is published: true 2025-09-25T16:19:37.543431Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__notify.cpp:131: NotifyTxCompletion transaction is registered, txId: 281474976710760, at schemeshard: 72057594046678944 FAKE_COORDINATOR: Add transaction: 281474976710760 at step: 5000006 FAKE_COORDINATOR: advance: minStep5000006 State->FrontStep: 5000005 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710760 at step: 5000006 2025-09-25T16:19:37.543498Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:685: TTxOperationPlanStep Execute, stepId: 5000006, transactions count in step: 1, at schemeshard: 72057594046678944 2025-09-25T16:19:37.543519Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:689: TTxOperationPlanStep Execute, message: Transactions { TxId: 281474976710760 Coordinator: 72057594046316545 AckTo { RawX1: 137 RawX2: 4294969455 } } Step: 5000006 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-09-25T16:19:37.543529Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_drop_lock.cpp:44: [72057594046678944] TDropLock TPropose opId# 281474976710760:0 HandleReply TEvOperationPlan: step# 5000006 2025-09-25T16:19:37.543537Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 281474976710760:0 128 -> 240 2025-09-25T16:19:37.543969Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 281474976710760:0, at schemeshard: 72057594046678944 2025-09-25T16:19:37.543986Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:497: [72057594046678944] TDone opId# 281474976710760:0 ProgressState 2025-09-25T16:19:37.544002Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:926: Part operation is done id#281474976710760:0 progress is 1/1 2025-09-25T16:19:37.544008Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 281474976710760 ready parts: 1/1 2025-09-25T16:19:37.544014Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:926: Part operation is done id#281474976710760:0 progress is 1/1 2025-09-25T16:19:37.544018Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 281474976710760 ready parts: 1/1 2025-09-25T16:19:37.544024Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 281474976710760, ready parts: 1/1, is published: true 2025-09-25T16:19:37.544036Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1702: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [1:130:2155] message: TxId: 281474976710760 2025-09-25T16:19:37.544044Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 281474976710760 ready parts: 1/1 2025-09-25T16:19:37.544050Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:993: Operation and all the parts is done, operation id: 281474976710760:0 2025-09-25T16:19:37.544055Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5552: RemoveTx for txid 281474976710760:0 2025-09-25T16:19:37.544070Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:610: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 4 FAKE_COORDINATOR: Erasing txId 281474976710760 2025-09-25T16:19:37.544491Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7171: Handle: TEvNotifyTxCompletionResult: txId# 281474976710760 2025-09-25T16:19:37.544511Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:7173: Message: TxId: 281474976710760 2025-09-25T16:19:37.544523Z node 1 :BUILD_INDEX INFO: schemeshard_build_index__progress.cpp:2417: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxReply : TEvNotifyTxCompletionResult, id# 102, txId# 281474976710760 2025-09-25T16:19:37.544544Z node 1 :BUILD_INDEX DEBUG: schemeshard_build_index__progress.cpp:2420: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxReply : TEvNotifyTxCompletionResult, TIndexBuildInfo: TBuildInfo{ IndexBuildId: 102, Uid: , DomainPathId: [OwnerId: 72057594046678944, LocalPathId: 1], TablePathId: [OwnerId: 72057594046678944, LocalPathId: 2], IndexType: EIndexTypeGlobalAsync, IndexName: UserDefinedIndex, IndexColumn: indexed, State: Unlocking, SubState: None, IsBroken: 0, IsCancellationRequested: 0, Issue: , SubscribersCount: 1, CreateSender: [1:393:2363], AlterMainTableTxId: 0, AlterMainTableTxStatus: StatusSuccess, AlterMainTableTxDone: 0, LockTxId: 281474976710757, LockTxStatus: StatusAccepted, LockTxDone: 1, InitiateTxId: 281474976710758, InitiateTxStatus: StatusAccepted, InitiateTxDone: 1, SnapshotStepId: 5000004, ApplyTxId: 281474976710759, ApplyTxStatus: StatusAccepted, ApplyTxDone: 1, DropColumnsTxId: 0, DropColumnsTxStatus: StatusSuccess, DropColumnsTxDone: 0, UnlockTxId: 281474976710760, UnlockTxStatus: StatusAccepted, UnlockTxDone: 0, ToUploadShards: 0, DoneShards: 0, Processed: UploadRows: 0 UploadBytes: 0 ReadRows: 0 ReadBytes: 0 CpuTimeUs: 0, Billed: UploadRows: 0 UploadBytes: 0 ReadRows: 0 ReadBytes: 0 CpuTimeUs: 0}, txId# 281474976710760 2025-09-25T16:19:37.544919Z node 1 :BUILD_INDEX NOTICE: schemeshard_build_index__progress.cpp:1478: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Execute: 102 Unlocking 2025-09-25T16:19:37.544948Z node 1 :BUILD_INDEX DEBUG: schemeshard_build_index__progress.cpp:1479: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Execute: 102 Unlocking TBuildInfo{ IndexBuildId: 102, Uid: , DomainPathId: [OwnerId: 72057594046678944, LocalPathId: 1], TablePathId: [OwnerId: 72057594046678944, LocalPathId: 2], IndexType: EIndexTypeGlobalAsync, IndexName: UserDefinedIndex, IndexColumn: indexed, State: Unlocking, SubState: None, IsBroken: 0, IsCancellationRequested: 0, Issue: , SubscribersCount: 1, CreateSender: [1:393:2363], AlterMainTableTxId: 0, AlterMainTableTxStatus: StatusSuccess, AlterMainTableTxDone: 0, LockTxId: 281474976710757, LockTxStatus: StatusAccepted, LockTxDone: 1, InitiateTxId: 281474976710758, InitiateTxStatus: StatusAccepted, InitiateTxDone: 1, SnapshotStepId: 5000004, ApplyTxId: 281474976710759, ApplyTxStatus: StatusAccepted, ApplyTxDone: 1, DropColumnsTxId: 0, DropColumnsTxStatus: StatusSuccess, DropColumnsTxDone: 0, UnlockTxId: 281474976710760, UnlockTxStatus: StatusAccepted, UnlockTxDone: 1, ToUploadShards: 0, DoneShards: 0, Processed: UploadRows: 0 UploadBytes: 0 ReadRows: 0 ReadBytes: 0 CpuTimeUs: 0, Billed: UploadRows: 0 UploadBytes: 0 ReadRows: 0 ReadBytes: 0 CpuTimeUs: 0} 2025-09-25T16:19:37.544958Z node 1 :BUILD_INDEX INFO: schemeshard_build_index_tx_base.cpp:24: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: Change state from Unlocking to Done 2025-09-25T16:19:37.545307Z node 1 :BUILD_INDEX NOTICE: schemeshard_build_index__progress.cpp:1478: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Execute: 102 Done 2025-09-25T16:19:37.545333Z node 1 :BUILD_INDEX DEBUG: schemeshard_build_index__progress.cpp:1479: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Execute: 102 Done TBuildInfo{ IndexBuildId: 102, Uid: , DomainPathId: [OwnerId: 72057594046678944, LocalPathId: 1], TablePathId: [OwnerId: 72057594046678944, LocalPathId: 2], IndexType: EIndexTypeGlobalAsync, IndexName: UserDefinedIndex, IndexColumn: indexed, State: Done, SubState: None, IsBroken: 0, IsCancellationRequested: 0, Issue: , SubscribersCount: 1, CreateSender: [1:393:2363], AlterMainTableTxId: 0, AlterMainTableTxStatus: StatusSuccess, AlterMainTableTxDone: 0, LockTxId: 281474976710757, LockTxStatus: StatusAccepted, LockTxDone: 1, InitiateTxId: 281474976710758, InitiateTxStatus: StatusAccepted, InitiateTxDone: 1, SnapshotStepId: 5000004, ApplyTxId: 281474976710759, ApplyTxStatus: StatusAccepted, ApplyTxDone: 1, DropColumnsTxId: 0, DropColumnsTxStatus: StatusSuccess, DropColumnsTxDone: 0, UnlockTxId: 281474976710760, UnlockTxStatus: StatusAccepted, UnlockTxDone: 1, ToUploadShards: 0, DoneShards: 0, Processed: UploadRows: 0 UploadBytes: 0 ReadRows: 0 ReadBytes: 0 CpuTimeUs: 0, Billed: UploadRows: 0 UploadBytes: 0 ReadRows: 0 ReadBytes: 0 CpuTimeUs: 0} 2025-09-25T16:19:37.545341Z node 1 :BUILD_INDEX TRACE: schemeshard_build_index_tx_base.cpp:338: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TIndexBuildInfo SendNotifications: : id# 102, subscribers count# 1 2025-09-25T16:19:37.545364Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 102: got EvNotifyTxCompletionResult 2025-09-25T16:19:37.545373Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 102: satisfy waiter [1:484:2443] TestWaitNotification: OK eventTxId 102 >> TVectorIndexTests::CreateTablePrefixCovering [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/replication/service/ut_topic_reader/unittest >> RemoteTopicReader::ReadTopic [GOOD] Test command err: 2025-09-25T16:19:35.721072Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7554062247796512718:2151];send_to=[0:7307199536658146131:7762515]; 2025-09-25T16:19:35.721089Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/endf/000793/r3tmp/tmpSOnPHJ/pdisk_1.dat 2025-09-25T16:19:35.759548Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-09-25T16:19:35.764311Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded TClient is connected to server localhost:19550 TServer::EnableGrpc on GrpcPort 3673, node 1 2025-09-25T16:19:35.791631Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-09-25T16:19:35.791645Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-09-25T16:19:35.791656Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-09-25T16:19:35.791710Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:19550 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-09-25T16:19:35.827515Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-09-25T16:19:35.827543Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-09-25T16:19:35.828483Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-09-25T16:19:35.828560Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-09-25T16:19:35.991455Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-09-25T16:19:36.722719Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-09-25T16:19:36.725170Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:2, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:19:36.740687Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7554062252091480802:2347], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:19:36.740694Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7554062252091480813:2350], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:19:36.740705Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:19:36.740763Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7554062252091480817:2352], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:19:36.740777Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:19:36.741261Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710660:2, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-09-25T16:19:36.746054Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7554062252091480816:2351], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710660 completed, doublechecking } 2025-09-25T16:19:36.831939Z node 1 :TX_PROXY ERROR: schemereq.cpp:590: Actor# [1:7554062252091480858:2464] txid# 281474976710661, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 8], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-09-25T16:19:36.932323Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_modify_acl.cpp:101) 2025-09-25T16:19:37.006278Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:19:37.070950Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterTable, opId: 281474976710670:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_table.cpp:172) 2025-09-25T16:19:37.151176Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterTable, opId: 281474976710673:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_table.cpp:172) 2025-09-25T16:19:37.224745Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710676:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_modify_acl.cpp:101) 2025-09-25T16:19:37.431732Z node 1 :REPLICATION_SERVICE DEBUG: topic_reader.cpp:33: [RemoteTopicReader][/Root/topic][0][1:7554062256386448712:2706] Handshake: worker# [1:7554062247796513194:2291] 2025-09-25T16:19:37.432330Z node 1 :REPLICATION_SERVICE DEBUG: topic_reader.cpp:43: [RemoteTopicReader][/Root/topic][0][1:7554062256386448712:2706] Create read session: session# [1:7554062256386448713:2290] 2025-09-25T16:19:37.432407Z node 1 :REPLICATION_SERVICE DEBUG: topic_reader.cpp:54: [RemoteTopicReader][/Root/topic][0][1:7554062256386448712:2706] Handle NKikimr::NReplication::NService::TEvWorker::TEvPoll { SkipCommit: 0 } 2025-09-25T16:19:37.434277Z node 1 :REPLICATION_SERVICE DEBUG: topic_reader.cpp:85: [RemoteTopicReader][/Root/topic][0][1:7554062256386448712:2706] Handle NKikimr::NReplication::TEvYdbProxy::TEvStartTopicReadingSession { Result: { ReadSessionId: consumer_1_1_13474866145591895085_v1 } } 2025-09-25T16:19:37.435103Z node 1 :REPLICATION_SERVICE DEBUG: topic_reader.cpp:64: [RemoteTopicReader][/Root/topic][0][1:7554062256386448712:2706] Handle NKikimr::NReplication::TEvYdbProxy::TEvReadTopicResponse { Result: { PartitionId: 0 Messages [{ Codec: RAW Data: 9b Offset: 0 SeqNo: 1 CreateTime: 2025-09-25T16:19:37.331000Z WriteTime: 2025-09-25T16:19:37.331000Z MessageGroupId: producer ProducerId: producer }] } } 2025-09-25T16:19:37.435193Z node 1 :REPLICATION_SERVICE DEBUG: topic_reader.cpp:54: [RemoteTopicReader][/Root/topic][0][1:7554062256386448712:2706] Handle NKikimr::NReplication::NService::TEvWorker::TEvPoll { SkipCommit: 0 } 2025-09-25T16:19:37.450459Z node 1 :REPLICATION_SERVICE DEBUG: topic_reader.cpp:64: [RemoteTopicReader][/Root/topic][0][1:7554062256386448712:2706] Handle NKikimr::NReplication::TEvYdbProxy::TEvReadTopicResponse { Result: { PartitionId: 0 Messages [{ Codec: RAW Data: 9b Offset: 1 SeqNo: 2 CreateTime: 2025-09-25T16:19:37.448000Z WriteTime: 2025-09-25T16:19:37.448000Z MessageGroupId: producer ProducerId: producer }] } } 2025-09-25T16:19:37.549145Z node 1 :REPLICATION_SERVICE DEBUG: topic_reader.cpp:33: [RemoteTopicReader][/Root/topic][0][1:7554062256386448806:2738] Handshake: worker# [1:7554062247796513194:2291] 2025-09-25T16:19:37.549568Z node 1 :REPLICATION_SERVICE DEBUG: topic_reader.cpp:43: [RemoteTopicReader][/Root/topic][0][1:7554062256386448806:2738] Create read session: session# [1:7554062256386448807:2290] 2025-09-25T16:19:37.549628Z node 1 :REPLICATION_SERVICE DEBUG: topic_reader.cpp:54: [RemoteTopicReader][/Root/topic][0][1:7554062256386448806:2738] Handle NKikimr::NReplication::NService::TEvWorker::TEvPoll { SkipCommit: 0 } 2025-09-25T16:19:37.551358Z node 1 :REPLICATION_SERVICE DEBUG: topic_reader.cpp:85: [RemoteTopicReader][/Root/topic][0][1:7554062256386448806:2738] Handle NKikimr::NReplication::TEvYdbProxy::TEvStartTopicReadingSession { Result: { ReadSessionId: consumer_1_2_16717343570385191785_v1 } } 2025-09-25T16:19:37.552121Z node 1 :REPLICATION_SERVICE DEBUG: topic_reader.cpp:64: [RemoteTopicReader][/Root/topic][0][1:7554062256386448806:2738] Handle NKikimr::NReplication::TEvYdbProxy::TEvReadTopicResponse { Result: { PartitionId: 0 Messages [{ Codec: RAW Data: 9b Offset: 1 SeqNo: 2 CreateTime: 2025-09-25T16:19:37.448000Z WriteTime: 2025-09-25T16:19:37.448000Z MessageGroupId: producer ProducerId: producer }] } } ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_olap/unittest >> TOlap::CreateDropStandaloneTableDefaultSharding [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2144] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2144] Leader for TabletID 72057594046678944 is [1:130:2155] sender: [1:131:2058] recipient: [1:113:2144] 2025-09-25T16:19:34.265120Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7911: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-09-25T16:19:34.265146Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7939: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-09-25T16:19:34.265153Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7825: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-09-25T16:19:34.265159Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7841: OperationsProcessing config: using default configuration 2025-09-25T16:19:34.265167Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7847: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-09-25T16:19:34.265172Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7847: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-09-25T16:19:34.265183Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7971: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-09-25T16:19:34.265197Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-09-25T16:19:34.265324Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8042: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-09-25T16:19:34.265398Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2025-09-25T16:19:34.282289Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7729: Cannot subscribe to console configs 2025-09-25T16:19:34.282316Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded 2025-09-25T16:19:34.286834Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2025-09-25T16:19:34.286898Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2025-09-25T16:19:34.286941Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-09-25T16:19:34.288614Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2025-09-25T16:19:34.288662Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2025-09-25T16:19:34.288762Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1382: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-09-25T16:19:34.288857Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-09-25T16:19:34.289259Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:158: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-09-25T16:19:34.289306Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2025-09-25T16:19:34.289587Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-09-25T16:19:34.289598Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-09-25T16:19:34.289618Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-09-25T16:19:34.289626Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-09-25T16:19:34.289632Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:205: TTxServerlessStorageBilling.Complete 2025-09-25T16:19:34.289672Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7086: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-09-25T16:19:34.291155Z node 1 :HIVE INFO: tablet_helpers.cpp:1126: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:130:2155] sender: [1:245:2058] recipient: [1:15:2062] 2025-09-25T16:19:34.312456Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-09-25T16:19:34.312531Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-09-25T16:19:34.312596Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:599: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-09-25T16:19:34.312604Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5528: CreateTx for txid 1:0 type: TxAlterSubDomain target path: [OwnerId: 72057594046678944, LocalPathId: 1] source path: 2025-09-25T16:19:34.312652Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-09-25T16:19:34.312666Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2025-09-25T16:19:34.313502Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-09-25T16:19:34.313555Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-09-25T16:19:34.313603Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-09-25T16:19:34.313613Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:315: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-09-25T16:19:34.313618Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:369: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-09-25T16:19:34.313624Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 1:0 2 -> 3 2025-09-25T16:19:34.314056Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-09-25T16:19:34.314069Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-09-25T16:19:34.314077Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 1:0 3 -> 128 2025-09-25T16:19:34.314402Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-09-25T16:19:34.314412Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-09-25T16:19:34.314418Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-09-25T16:19:34.314424Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1723: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-09-25T16:19:34.315053Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1792: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-09-25T16:19:34.315442Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:663: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-09-25T16:19:34.315479Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1824: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-09-25T16:19:34.315685Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:685: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-09-25T16:19:34.315709Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:689: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 137 RawX2: 4294969455 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-09-25T16:19:34.315716Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:303: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-09-25T16:19:34.315774Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 1:0 128 -> 240 2025-09-25T16:19:34.315782Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:362: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-09-25T16:19:34.315812Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:599: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-09-25T16:19:34.315824Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:412: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-09-25T16:19:34.316284Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-09-25T16:19:34.316294Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme ... shard__delete_tablet_reply.cpp:176: Deleted shardIdx 72057594046678944:6 2025-09-25T16:19:37.483772Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:182: Close pipe to deleted shardIdx 72057594046678944:6 tabletId 72075186233409551 2025-09-25T16:19:37.484063Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:176: Deleted shardIdx 72057594046678944:2 2025-09-25T16:19:37.484076Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:182: Close pipe to deleted shardIdx 72057594046678944:2 tabletId 72075186233409547 2025-09-25T16:19:37.484155Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:176: Deleted shardIdx 72057594046678944:4 2025-09-25T16:19:37.484162Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:182: Close pipe to deleted shardIdx 72057594046678944:4 tabletId 72075186233409549 2025-09-25T16:19:37.484323Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:176: Deleted shardIdx 72057594046678944:23 2025-09-25T16:19:37.484332Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:182: Close pipe to deleted shardIdx 72057594046678944:23 tabletId 72075186233409568 2025-09-25T16:19:37.484438Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:176: Deleted shardIdx 72057594046678944:21 2025-09-25T16:19:37.484443Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:182: Close pipe to deleted shardIdx 72057594046678944:21 tabletId 72075186233409566 2025-09-25T16:19:37.484458Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:176: Deleted shardIdx 72057594046678944:19 2025-09-25T16:19:37.484461Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:182: Close pipe to deleted shardIdx 72057594046678944:19 tabletId 72075186233409564 2025-09-25T16:19:37.484864Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:176: Deleted shardIdx 72057594046678944:17 2025-09-25T16:19:37.484882Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:182: Close pipe to deleted shardIdx 72057594046678944:17 tabletId 72075186233409562 2025-09-25T16:19:37.484920Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:176: Deleted shardIdx 72057594046678944:15 2025-09-25T16:19:37.484926Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:182: Close pipe to deleted shardIdx 72057594046678944:15 tabletId 72075186233409560 2025-09-25T16:19:37.484944Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:176: Deleted shardIdx 72057594046678944:13 2025-09-25T16:19:37.484948Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:182: Close pipe to deleted shardIdx 72057594046678944:13 tabletId 72075186233409558 2025-09-25T16:19:37.486658Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:176: Deleted shardIdx 72057594046678944:11 2025-09-25T16:19:37.486678Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:182: Close pipe to deleted shardIdx 72057594046678944:11 tabletId 72075186233409556 2025-09-25T16:19:37.486733Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:176: Deleted shardIdx 72057594046678944:9 2025-09-25T16:19:37.486739Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:182: Close pipe to deleted shardIdx 72057594046678944:9 tabletId 72075186233409554 2025-09-25T16:19:37.486760Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:176: Deleted shardIdx 72057594046678944:36 2025-09-25T16:19:37.486766Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:182: Close pipe to deleted shardIdx 72057594046678944:36 tabletId 72075186233409581 2025-09-25T16:19:37.486782Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:176: Deleted shardIdx 72057594046678944:38 2025-09-25T16:19:37.486787Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:182: Close pipe to deleted shardIdx 72057594046678944:38 tabletId 72075186233409583 2025-09-25T16:19:37.486803Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:176: Deleted shardIdx 72057594046678944:40 2025-09-25T16:19:37.486808Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:182: Close pipe to deleted shardIdx 72057594046678944:40 tabletId 72075186233409585 2025-09-25T16:19:37.486824Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:176: Deleted shardIdx 72057594046678944:32 2025-09-25T16:19:37.486829Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:182: Close pipe to deleted shardIdx 72057594046678944:32 tabletId 72075186233409577 2025-09-25T16:19:37.486841Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:176: Deleted shardIdx 72057594046678944:34 2025-09-25T16:19:37.486846Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:182: Close pipe to deleted shardIdx 72057594046678944:34 tabletId 72075186233409579 2025-09-25T16:19:37.488093Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:176: Deleted shardIdx 72057594046678944:28 2025-09-25T16:19:37.488110Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:182: Close pipe to deleted shardIdx 72057594046678944:28 tabletId 72075186233409573 2025-09-25T16:19:37.488134Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:176: Deleted shardIdx 72057594046678944:30 2025-09-25T16:19:37.488139Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:182: Close pipe to deleted shardIdx 72057594046678944:30 tabletId 72075186233409575 2025-09-25T16:19:37.488158Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:176: Deleted shardIdx 72057594046678944:26 2025-09-25T16:19:37.488163Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:182: Close pipe to deleted shardIdx 72057594046678944:26 tabletId 72075186233409571 2025-09-25T16:19:37.488176Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:176: Deleted shardIdx 72057594046678944:24 2025-09-25T16:19:37.488181Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:182: Close pipe to deleted shardIdx 72057594046678944:24 tabletId 72075186233409569 2025-09-25T16:19:37.488198Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:176: Deleted shardIdx 72057594046678944:57 2025-09-25T16:19:37.488203Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:182: Close pipe to deleted shardIdx 72057594046678944:57 tabletId 72075186233409602 2025-09-25T16:19:37.488217Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:176: Deleted shardIdx 72057594046678944:53 2025-09-25T16:19:37.488221Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:182: Close pipe to deleted shardIdx 72057594046678944:53 tabletId 72075186233409598 2025-09-25T16:19:37.488235Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:176: Deleted shardIdx 72057594046678944:55 2025-09-25T16:19:37.488240Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:182: Close pipe to deleted shardIdx 72057594046678944:55 tabletId 72075186233409600 2025-09-25T16:19:37.488255Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:176: Deleted shardIdx 72057594046678944:49 2025-09-25T16:19:37.488260Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:182: Close pipe to deleted shardIdx 72057594046678944:49 tabletId 72075186233409594 2025-09-25T16:19:37.489168Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:176: Deleted shardIdx 72057594046678944:47 2025-09-25T16:19:37.489184Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:182: Close pipe to deleted shardIdx 72057594046678944:47 tabletId 72075186233409592 2025-09-25T16:19:37.489201Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:176: Deleted shardIdx 72057594046678944:51 2025-09-25T16:19:37.489204Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:182: Close pipe to deleted shardIdx 72057594046678944:51 tabletId 72075186233409596 2025-09-25T16:19:37.489213Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:176: Deleted shardIdx 72057594046678944:45 2025-09-25T16:19:37.489216Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:182: Close pipe to deleted shardIdx 72057594046678944:45 tabletId 72075186233409590 2025-09-25T16:19:37.489240Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:176: Deleted shardIdx 72057594046678944:43 2025-09-25T16:19:37.489244Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:182: Close pipe to deleted shardIdx 72057594046678944:43 tabletId 72075186233409588 2025-09-25T16:19:37.489267Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:176: Deleted shardIdx 72057594046678944:41 2025-09-25T16:19:37.489274Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:182: Close pipe to deleted shardIdx 72057594046678944:41 tabletId 72075186233409586 2025-09-25T16:19:37.489286Z node 3 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__clean_pathes.cpp:160: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 TestWaitNotification: OK eventTxId 109 2025-09-25T16:19:37.489662Z node 3 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/MyDir/ColumnTable" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-09-25T16:19:37.489710Z node 3 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/MyDir/ColumnTable" took 71us result status StatusPathDoesNotExist 2025-09-25T16:19:37.489753Z node 3 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/MyDir/ColumnTable\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot/MyDir\' (id: [OwnerId: 72057594046678944, LocalPathId: 2]), source_location: ydb/core/tx/schemeshard/schemeshard_path_describer.cpp:1181" Path: "/MyRoot/MyDir/ColumnTable" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot/MyDir" LastExistedPrefixPathId: 2 LastExistedPrefixDescription { Self { Name: "MyDir" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 101 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty ChildrenExist: false } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 2025-09-25T16:19:37.489856Z node 3 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: PathId: 6 SchemeshardId: 72057594046678944 Options { }, at schemeshard: 72057594046678944 2025-09-25T16:19:37.489872Z node 3 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:44: Tablet 72057594046678944 describe pathId 6 took 18us result status StatusPathDoesNotExist 2025-09-25T16:19:37.489881Z node 3 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'\', error: path is empty, source_location: ydb/core/tx/schemeshard/schemeshard_path_describer.cpp:1181" Path: "" PathId: 6 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 |82.1%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_index/unittest |82.1%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_index/unittest ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_incremental_restore/unittest >> TIncrementalRestoreTests::BackupCollectionRestoreOpApiMultipleOperationsListing [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2144] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2144] Leader for TabletID 72057594046678944 is [1:130:2155] sender: [1:131:2058] recipient: [1:113:2144] 2025-09-25T16:19:32.810035Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7911: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-09-25T16:19:32.810060Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7939: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-09-25T16:19:32.810067Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7825: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-09-25T16:19:32.810073Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7841: OperationsProcessing config: using default configuration 2025-09-25T16:19:32.810079Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7847: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-09-25T16:19:32.810084Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7847: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-09-25T16:19:32.810094Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7971: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-09-25T16:19:32.810109Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-09-25T16:19:32.810230Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8042: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-09-25T16:19:32.810285Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2025-09-25T16:19:32.828083Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7729: Cannot subscribe to console configs 2025-09-25T16:19:32.828105Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded 2025-09-25T16:19:32.832494Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2025-09-25T16:19:32.832586Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2025-09-25T16:19:32.832616Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-09-25T16:19:32.834186Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2025-09-25T16:19:32.834249Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2025-09-25T16:19:32.834355Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1382: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-09-25T16:19:32.834414Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-09-25T16:19:32.834841Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:158: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-09-25T16:19:32.834877Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2025-09-25T16:19:32.835137Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-09-25T16:19:32.835147Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-09-25T16:19:32.835185Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-09-25T16:19:32.835194Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-09-25T16:19:32.835201Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:205: TTxServerlessStorageBilling.Complete 2025-09-25T16:19:32.835233Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7086: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-09-25T16:19:32.836595Z node 1 :HIVE INFO: tablet_helpers.cpp:1126: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:130:2155] sender: [1:245:2058] recipient: [1:15:2062] 2025-09-25T16:19:32.861765Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-09-25T16:19:32.861856Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-09-25T16:19:32.861922Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:599: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-09-25T16:19:32.861931Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5528: CreateTx for txid 1:0 type: TxAlterSubDomain target path: [OwnerId: 72057594046678944, LocalPathId: 1] source path: 2025-09-25T16:19:32.861992Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-09-25T16:19:32.862009Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2025-09-25T16:19:32.867309Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-09-25T16:19:32.867386Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-09-25T16:19:32.867444Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-09-25T16:19:32.867468Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:315: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-09-25T16:19:32.867475Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:369: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-09-25T16:19:32.867481Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 1:0 2 -> 3 2025-09-25T16:19:32.870532Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-09-25T16:19:32.870557Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-09-25T16:19:32.870565Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 1:0 3 -> 128 2025-09-25T16:19:32.871945Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-09-25T16:19:32.871965Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-09-25T16:19:32.871972Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-09-25T16:19:32.871979Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1723: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-09-25T16:19:32.873289Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1792: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-09-25T16:19:32.873975Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:663: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-09-25T16:19:32.874038Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1824: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-09-25T16:19:32.874270Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:685: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-09-25T16:19:32.874304Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:689: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 137 RawX2: 4294969455 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-09-25T16:19:32.874313Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:303: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-09-25T16:19:32.874394Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 1:0 128 -> 240 2025-09-25T16:19:32.874404Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:362: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-09-25T16:19:32.874440Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:599: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-09-25T16:19:32.874453Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:412: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-09-25T16:19:32.875032Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-09-25T16:19:32.875044Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme ... 6:19:36.905571Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_incremental_restore_scan.cpp:66: [IncrementalRestore] Persisted CompletedOperations update: @ 2025-09-25T16:19:36.905575Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_incremental_restore_scan.cpp:73: [IncrementalRestore] Checking completion: InProgressOperations.size()=0, CompletedOperations.size()=1, CurrentIncrementalIdx=2, IncrementalBackups.size()=3 2025-09-25T16:19:36.905578Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_incremental_restore_scan.cpp:76: [IncrementalRestore] All operations for current incremental backup completed, moving to next 2025-09-25T16:19:36.905583Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_incremental_restore_scan.cpp:86: [IncrementalRestore] After MoveToNextIncremental: CurrentIncrementalIdx=3, IncrementalBackups.size()=3 2025-09-25T16:19:36.905586Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_incremental_restore_scan.cpp:89: [IncrementalRestore] All incremental backups processed, performing finalization 2025-09-25T16:19:36.905589Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_incremental_restore_scan.cpp:210: [IncrementalRestore] Starting finalization of incremental restore operation: 136 2025-09-25T16:19:36.905605Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_incremental_restore_scan.cpp:234: [IncrementalRestore] Sending finalization operation with txId: 281474976710673 2025-09-25T16:19:36.906162Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { OperationType: ESchemeOpIncrementalRestoreFinalize Internal: true IncrementalRestoreFinalize { OriginalOperationId: 136 BackupCollectionPathId: 11 } } TxId: 281474976710673 , at schemeshard: 72057594046678944 2025-09-25T16:19:36.906184Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_incremental_restore_finalize.cpp:209: [72057594046678944] TIncrementalRestoreFinalizeOp Propose, opId: 281474976710673:0 2025-09-25T16:19:36.906190Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:599: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 11] was 3 2025-09-25T16:19:36.906196Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5528: CreateTx for txid 281474976710673:0 type: TxIncrementalRestoreFinalize target path: [OwnerId: 72057594046678944, LocalPathId: 11] source path: 2025-09-25T16:19:36.906204Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 281474976710673:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-09-25T16:19:36.906787Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_incremental_restore_scan.cpp:115: [IncrementalRestore] TTxProgressIncrementalRestore::Complete operationId: 136 2025-09-25T16:19:36.906813Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 281474976710673, response: Status: StatusAccepted TxId: 281474976710673 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2025-09-25T16:19:36.906836Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 281474976710673, subject: , status: StatusAccepted, operation: RESTORE INCREMENTAL FINALIZE, no path 2025-09-25T16:19:36.906860Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7120: Handle: TEvModifySchemeTransactionResult: txId# 281474976710673, status# StatusAccepted 2025-09-25T16:19:36.906865Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:7122: Message: Status: StatusAccepted TxId: 281474976710673 SchemeshardId: 72057594046678944 2025-09-25T16:19:36.906869Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7141: no able to determine destination for message TEvModifySchemeTransactionResult: txId: 281474976710673, at schemeshard: 72057594046678944 2025-09-25T16:19:36.906880Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 281474976710673:0, at schemeshard: 72057594046678944 2025-09-25T16:19:36.906885Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_incremental_restore_finalize.cpp:56: [72057594046678944] TIncrementalRestoreFinalize TPropose operationId: 281474976710673:0 ProgressState 2025-09-25T16:19:36.906889Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_incremental_restore_finalize.cpp:144: [72057594046678944] Marked incremental restore state as completed for operation: 136 2025-09-25T16:19:36.906892Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_incremental_restore_finalize.cpp:147: [72057594046678944] Keeping IncrementalRestoreOperations entry for operation: 136 - will be cleaned up on FORGET 2025-09-25T16:19:36.906896Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_incremental_restore_finalize.cpp:150: [72057594046678944] Cleaned up long incremental restore ops for operation: 136 2025-09-25T16:19:36.906901Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_incremental_restore_finalize.cpp:176: [72057594046678944] Cleaned up mappings for operation: 136 2025-09-25T16:19:36.906904Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_incremental_restore_finalize.cpp:82: [72057594046678944] Cleaning up 1 shard progress entries for operation 136 2025-09-25T16:19:36.906919Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:926: Part operation is done id#281474976710673:0 progress is 1/1 2025-09-25T16:19:36.906922Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 281474976710673 ready parts: 1/1 2025-09-25T16:19:36.906927Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:926: Part operation is done id#281474976710673:0 progress is 1/1 2025-09-25T16:19:36.906929Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 281474976710673 ready parts: 1/1 2025-09-25T16:19:36.906932Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 281474976710673, ready parts: 1/1, is published: true 2025-09-25T16:19:36.906935Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 281474976710673 ready parts: 1/1 2025-09-25T16:19:36.906938Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:993: Operation and all the parts is done, operation id: 281474976710673:0 2025-09-25T16:19:36.906940Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5552: RemoveTx for txid 281474976710673:0 2025-09-25T16:19:36.906949Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:610: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 11] was 4 2025-09-25T16:19:36.907234Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:270: Unable to activate 281474976710673:0 2025-09-25T16:19:37.531731Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_incremental_restore_scan.cpp:408: [IncrementalRestore] Handle(TEvProgressIncrementalRestore) operationId: 135 tablet: 72057594046678944 2025-09-25T16:19:37.531781Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_incremental_restore_scan.cpp:41: [IncrementalRestore] TTxProgressIncrementalRestore::Execute operationId: 135 tablet: 72057594046678944 2025-09-25T16:19:37.531816Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_incremental_restore_scan.cpp:73: [IncrementalRestore] Checking completion: InProgressOperations.size()=0, CompletedOperations.size()=0, CurrentIncrementalIdx=2, IncrementalBackups.size()=2 2025-09-25T16:19:37.531821Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_incremental_restore_scan.cpp:106: [IncrementalRestore] No operations in progress, starting first incremental backup 2025-09-25T16:19:37.531826Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_incremental_restore_scan.cpp:187: [IncrementalRestore] No more incremental backups to process 2025-09-25T16:19:37.532597Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_incremental_restore_scan.cpp:115: [IncrementalRestore] TTxProgressIncrementalRestore::Complete operationId: 135 2025-09-25T16:19:37.542819Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_incremental_restore_scan.cpp:408: [IncrementalRestore] Handle(TEvProgressIncrementalRestore) operationId: 135 tablet: 72057594046678944 2025-09-25T16:19:37.542877Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_incremental_restore_scan.cpp:41: [IncrementalRestore] TTxProgressIncrementalRestore::Execute operationId: 135 tablet: 72057594046678944 2025-09-25T16:19:37.542919Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_incremental_restore_scan.cpp:73: [IncrementalRestore] Checking completion: InProgressOperations.size()=0, CompletedOperations.size()=0, CurrentIncrementalIdx=2, IncrementalBackups.size()=2 2025-09-25T16:19:37.542924Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_incremental_restore_scan.cpp:106: [IncrementalRestore] No operations in progress, starting first incremental backup 2025-09-25T16:19:37.542942Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_incremental_restore_scan.cpp:187: [IncrementalRestore] No more incremental backups to process 2025-09-25T16:19:37.543777Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_incremental_restore_scan.cpp:115: [IncrementalRestore] TTxProgressIncrementalRestore::Complete operationId: 135 2025-09-25T16:19:37.564167Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_incremental_restore_scan.cpp:408: [IncrementalRestore] Handle(TEvProgressIncrementalRestore) operationId: 136 tablet: 72057594046678944 2025-09-25T16:19:37.564214Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_incremental_restore_scan.cpp:41: [IncrementalRestore] TTxProgressIncrementalRestore::Execute operationId: 136 tablet: 72057594046678944 2025-09-25T16:19:37.564249Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_incremental_restore_scan.cpp:73: [IncrementalRestore] Checking completion: InProgressOperations.size()=0, CompletedOperations.size()=0, CurrentIncrementalIdx=3, IncrementalBackups.size()=3 2025-09-25T16:19:37.564255Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_incremental_restore_scan.cpp:106: [IncrementalRestore] No operations in progress, starting first incremental backup 2025-09-25T16:19:37.564260Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_incremental_restore_scan.cpp:187: [IncrementalRestore] No more incremental backups to process 2025-09-25T16:19:37.565018Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_incremental_restore_scan.cpp:115: [IncrementalRestore] TTxProgressIncrementalRestore::Complete operationId: 136 2025-09-25T16:19:37.575229Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_incremental_restore_scan.cpp:408: [IncrementalRestore] Handle(TEvProgressIncrementalRestore) operationId: 136 tablet: 72057594046678944 2025-09-25T16:19:37.575270Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_incremental_restore_scan.cpp:41: [IncrementalRestore] TTxProgressIncrementalRestore::Execute operationId: 136 tablet: 72057594046678944 2025-09-25T16:19:37.575305Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_incremental_restore_scan.cpp:73: [IncrementalRestore] Checking completion: InProgressOperations.size()=0, CompletedOperations.size()=0, CurrentIncrementalIdx=3, IncrementalBackups.size()=3 2025-09-25T16:19:37.575311Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_incremental_restore_scan.cpp:106: [IncrementalRestore] No operations in progress, starting first incremental backup 2025-09-25T16:19:37.575316Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_incremental_restore_scan.cpp:187: [IncrementalRestore] No more incremental backups to process 2025-09-25T16:19:37.576092Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_incremental_restore_scan.cpp:115: [IncrementalRestore] TTxProgressIncrementalRestore::Complete operationId: 136 === PHASE 4: Selective FORGET Testing === === PHASE 5: Complete Cleanup Verification === Forgetting 5 remaining operations... ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_index/unittest >> TVectorIndexTests::CreateTablePrefix [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2144] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2144] Leader for TabletID 72057594046678944 is [1:130:2155] sender: [1:131:2058] recipient: [1:113:2144] 2025-09-25T16:19:37.589589Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7911: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-09-25T16:19:37.589608Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7939: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-09-25T16:19:37.589612Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7825: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-09-25T16:19:37.589616Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7841: OperationsProcessing config: using default configuration 2025-09-25T16:19:37.589621Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7847: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-09-25T16:19:37.589623Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7847: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-09-25T16:19:37.589629Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7971: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-09-25T16:19:37.589640Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-09-25T16:19:37.589736Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8042: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-09-25T16:19:37.589787Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2025-09-25T16:19:37.602901Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7729: Cannot subscribe to console configs 2025-09-25T16:19:37.602923Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded 2025-09-25T16:19:37.606408Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2025-09-25T16:19:37.606480Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2025-09-25T16:19:37.606505Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-09-25T16:19:37.608157Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2025-09-25T16:19:37.608210Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2025-09-25T16:19:37.608306Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1382: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-09-25T16:19:37.608370Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-09-25T16:19:37.608787Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:158: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-09-25T16:19:37.608836Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2025-09-25T16:19:37.609031Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-09-25T16:19:37.609038Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-09-25T16:19:37.609051Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-09-25T16:19:37.609056Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-09-25T16:19:37.609061Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:205: TTxServerlessStorageBilling.Complete 2025-09-25T16:19:37.609085Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7086: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-09-25T16:19:37.610245Z node 1 :HIVE INFO: tablet_helpers.cpp:1126: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:130:2155] sender: [1:245:2058] recipient: [1:15:2062] 2025-09-25T16:19:37.625334Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-09-25T16:19:37.625411Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-09-25T16:19:37.625458Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:599: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-09-25T16:19:37.625464Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5528: CreateTx for txid 1:0 type: TxAlterSubDomain target path: [OwnerId: 72057594046678944, LocalPathId: 1] source path: 2025-09-25T16:19:37.625495Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-09-25T16:19:37.625509Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2025-09-25T16:19:37.626132Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-09-25T16:19:37.626167Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-09-25T16:19:37.626199Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-09-25T16:19:37.626205Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:315: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-09-25T16:19:37.626209Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:369: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-09-25T16:19:37.626213Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 1:0 2 -> 3 2025-09-25T16:19:37.626529Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-09-25T16:19:37.626536Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-09-25T16:19:37.626540Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 1:0 3 -> 128 2025-09-25T16:19:37.626771Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-09-25T16:19:37.626777Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-09-25T16:19:37.626781Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-09-25T16:19:37.626786Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1723: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-09-25T16:19:37.627222Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1792: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-09-25T16:19:37.627532Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:663: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-09-25T16:19:37.627569Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1824: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-09-25T16:19:37.627714Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:685: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-09-25T16:19:37.627732Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:689: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 137 RawX2: 4294969455 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-09-25T16:19:37.627737Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:303: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-09-25T16:19:37.627777Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 1:0 128 -> 240 2025-09-25T16:19:37.627784Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:362: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-09-25T16:19:37.627811Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:599: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-09-25T16:19:37.627821Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:412: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-09-25T16:19:37.628154Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-09-25T16:19:37.628160Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme ... ementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 5] was 4 2025-09-25T16:19:37.770726Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6249: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 6 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 102 2025-09-25T16:19:37.770734Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 6 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 102 2025-09-25T16:19:37.770738Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:108: Operation in-flight, at schemeshard: 72057594046678944, txId: 102 2025-09-25T16:19:37.770743Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 102, pathId: [OwnerId: 72057594046678944, LocalPathId: 6], version: 18446744073709551615 2025-09-25T16:19:37.770747Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:610: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 6] was 4 2025-09-25T16:19:37.770755Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 102, ready parts: 1/5, is published: true 2025-09-25T16:19:37.771542Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 102:3, at schemeshard: 72057594046678944 2025-09-25T16:19:37.771559Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_drop_table.cpp:415: TDropTable TProposedDeletePart operationId: 102:3 ProgressState, at schemeshard: 72057594046678944 2025-09-25T16:19:37.771629Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:610: DecrementPathDbRefCount reason remove table for pathId [OwnerId: 72057594046678944, LocalPathId: 5] was 3 2025-09-25T16:19:37.771655Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:926: Part operation is done id#102:3 progress is 2/5 2025-09-25T16:19:37.771659Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 102 ready parts: 2/5 2025-09-25T16:19:37.771664Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:926: Part operation is done id#102:3 progress is 2/5 2025-09-25T16:19:37.771668Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 102 ready parts: 2/5 2025-09-25T16:19:37.771672Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 102, ready parts: 2/5, is published: true 2025-09-25T16:19:37.772016Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 102:4, at schemeshard: 72057594046678944 2025-09-25T16:19:37.772027Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_drop_table.cpp:415: TDropTable TProposedDeletePart operationId: 102:4 ProgressState, at schemeshard: 72057594046678944 2025-09-25T16:19:37.772061Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:610: DecrementPathDbRefCount reason remove table for pathId [OwnerId: 72057594046678944, LocalPathId: 6] was 3 2025-09-25T16:19:37.772078Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:926: Part operation is done id#102:4 progress is 3/5 2025-09-25T16:19:37.772082Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 102 ready parts: 3/5 2025-09-25T16:19:37.772087Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:926: Part operation is done id#102:4 progress is 3/5 2025-09-25T16:19:37.772090Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 102 ready parts: 3/5 2025-09-25T16:19:37.772094Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 102, ready parts: 3/5, is published: true 2025-09-25T16:19:37.772192Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 102:2, at schemeshard: 72057594046678944 2025-09-25T16:19:37.772199Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_drop_table.cpp:415: TDropTable TProposedDeletePart operationId: 102:2 ProgressState, at schemeshard: 72057594046678944 2025-09-25T16:19:37.772223Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:610: DecrementPathDbRefCount reason remove table for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 3 2025-09-25T16:19:37.772237Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:926: Part operation is done id#102:2 progress is 4/5 2025-09-25T16:19:37.772241Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 102 ready parts: 4/5 2025-09-25T16:19:37.772246Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:926: Part operation is done id#102:2 progress is 4/5 2025-09-25T16:19:37.772249Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 102 ready parts: 4/5 2025-09-25T16:19:37.772253Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 102, ready parts: 4/5, is published: true 2025-09-25T16:19:37.772299Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 2025-09-25T16:19:37.772324Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 2025-09-25T16:19:37.772331Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 2025-09-25T16:19:37.772336Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 2025-09-25T16:19:37.772370Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 102:0, at schemeshard: 72057594046678944 2025-09-25T16:19:37.772375Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_drop_table.cpp:415: TDropTable TProposedDeletePart operationId: 102:0 ProgressState, at schemeshard: 72057594046678944 2025-09-25T16:19:37.772399Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:610: DecrementPathDbRefCount reason remove table for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 4 2025-09-25T16:19:37.772415Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:926: Part operation is done id#102:0 progress is 5/5 2025-09-25T16:19:37.772418Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 102 ready parts: 5/5 2025-09-25T16:19:37.772423Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:926: Part operation is done id#102:0 progress is 5/5 2025-09-25T16:19:37.772427Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 102 ready parts: 5/5 2025-09-25T16:19:37.772431Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 102, ready parts: 5/5, is published: true 2025-09-25T16:19:37.772442Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1702: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [1:457:2402] message: TxId: 102 2025-09-25T16:19:37.772448Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 102 ready parts: 5/5 2025-09-25T16:19:37.772453Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:993: Operation and all the parts is done, operation id: 102:0 2025-09-25T16:19:37.772458Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5552: RemoveTx for txid 102:0 2025-09-25T16:19:37.772474Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:610: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 3 2025-09-25T16:19:37.772479Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:993: Operation and all the parts is done, operation id: 102:1 2025-09-25T16:19:37.772483Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5552: RemoveTx for txid 102:1 2025-09-25T16:19:37.772488Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:610: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 4 2025-09-25T16:19:37.772492Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:993: Operation and all the parts is done, operation id: 102:2 2025-09-25T16:19:37.772495Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5552: RemoveTx for txid 102:2 2025-09-25T16:19:37.772503Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:610: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 2 2025-09-25T16:19:37.772507Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:993: Operation and all the parts is done, operation id: 102:3 2025-09-25T16:19:37.772513Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5552: RemoveTx for txid 102:3 2025-09-25T16:19:37.772520Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:610: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 5] was 2 2025-09-25T16:19:37.772524Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:993: Operation and all the parts is done, operation id: 102:4 2025-09-25T16:19:37.772527Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5552: RemoveTx for txid 102:4 2025-09-25T16:19:37.772533Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:610: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 6] was 2 2025-09-25T16:19:37.772650Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 2025-09-25T16:19:37.772660Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 2025-09-25T16:19:37.772665Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 2025-09-25T16:19:37.772695Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 2025-09-25T16:19:37.772707Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 2025-09-25T16:19:37.772718Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 2025-09-25T16:19:37.773237Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 102: got EvNotifyTxCompletionResult 2025-09-25T16:19:37.773248Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 102: satisfy waiter [1:603:2540] TestWaitNotification: OK eventTxId 102 >> TFulltextIndexTests::CreateTablePrefix >> TVectorIndexTests::CreateTablePrefixInvalidKeyType |82.1%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_index/unittest |82.1%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_index/unittest >> TFulltextIndexTests::CreateTablePrefix [GOOD] >> TSchemeShardServerLessReboots::TestServerlessComputeResourcesModeWithReboots [GOOD] >> TFulltextIndexTests::CreateTableNotText >> TVectorIndexTests::CreateTablePrefixInvalidKeyType [GOOD] |82.1%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_index/unittest |82.1%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_index/unittest |82.1%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_index/unittest ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_index/unittest >> TVectorIndexTests::CreateTablePrefixCovering [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2144] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2144] Leader for TabletID 72057594046678944 is [1:130:2155] sender: [1:131:2058] recipient: [1:113:2144] 2025-09-25T16:19:37.866997Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7911: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-09-25T16:19:37.867015Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7939: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-09-25T16:19:37.867019Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7825: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-09-25T16:19:37.867023Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7841: OperationsProcessing config: using default configuration 2025-09-25T16:19:37.867027Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7847: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-09-25T16:19:37.867030Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7847: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-09-25T16:19:37.867036Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7971: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-09-25T16:19:37.867046Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-09-25T16:19:37.867131Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8042: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-09-25T16:19:37.867177Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2025-09-25T16:19:37.878548Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7729: Cannot subscribe to console configs 2025-09-25T16:19:37.878573Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded 2025-09-25T16:19:37.883165Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2025-09-25T16:19:37.883273Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2025-09-25T16:19:37.883329Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-09-25T16:19:37.885155Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2025-09-25T16:19:37.885216Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2025-09-25T16:19:37.885318Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1382: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-09-25T16:19:37.885388Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-09-25T16:19:37.885838Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:158: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-09-25T16:19:37.885881Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2025-09-25T16:19:37.886122Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-09-25T16:19:37.886132Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-09-25T16:19:37.886154Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-09-25T16:19:37.886162Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-09-25T16:19:37.886168Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:205: TTxServerlessStorageBilling.Complete 2025-09-25T16:19:37.886202Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7086: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-09-25T16:19:37.887575Z node 1 :HIVE INFO: tablet_helpers.cpp:1126: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:130:2155] sender: [1:245:2058] recipient: [1:15:2062] 2025-09-25T16:19:37.909760Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-09-25T16:19:37.909853Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-09-25T16:19:37.909916Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:599: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-09-25T16:19:37.909925Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5528: CreateTx for txid 1:0 type: TxAlterSubDomain target path: [OwnerId: 72057594046678944, LocalPathId: 1] source path: 2025-09-25T16:19:37.909967Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-09-25T16:19:37.909983Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2025-09-25T16:19:37.910806Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-09-25T16:19:37.910864Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-09-25T16:19:37.910912Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-09-25T16:19:37.910924Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:315: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-09-25T16:19:37.910931Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:369: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-09-25T16:19:37.910936Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 1:0 2 -> 3 2025-09-25T16:19:37.911402Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-09-25T16:19:37.911416Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-09-25T16:19:37.911421Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 1:0 3 -> 128 2025-09-25T16:19:37.911820Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-09-25T16:19:37.911831Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-09-25T16:19:37.911838Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-09-25T16:19:37.911846Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1723: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-09-25T16:19:37.912532Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1792: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-09-25T16:19:37.912976Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:663: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-09-25T16:19:37.913019Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1824: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-09-25T16:19:37.913228Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:685: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-09-25T16:19:37.913254Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:689: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 137 RawX2: 4294969455 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-09-25T16:19:37.913261Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:303: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-09-25T16:19:37.913321Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 1:0 128 -> 240 2025-09-25T16:19:37.913329Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:362: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-09-25T16:19:37.913360Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:599: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-09-25T16:19:37.913373Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:412: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-09-25T16:19:37.913868Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-09-25T16:19:37.913878Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme ... ementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 5] was 4 2025-09-25T16:19:38.062641Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6249: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 6 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 102 2025-09-25T16:19:38.062646Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 6 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 102 2025-09-25T16:19:38.062649Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:108: Operation in-flight, at schemeshard: 72057594046678944, txId: 102 2025-09-25T16:19:38.062651Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 102, pathId: [OwnerId: 72057594046678944, LocalPathId: 6], version: 18446744073709551615 2025-09-25T16:19:38.062654Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:610: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 6] was 4 2025-09-25T16:19:38.062658Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 102, ready parts: 1/5, is published: true 2025-09-25T16:19:38.063295Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 102:3, at schemeshard: 72057594046678944 2025-09-25T16:19:38.063307Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_drop_table.cpp:415: TDropTable TProposedDeletePart operationId: 102:3 ProgressState, at schemeshard: 72057594046678944 2025-09-25T16:19:38.063347Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:610: DecrementPathDbRefCount reason remove table for pathId [OwnerId: 72057594046678944, LocalPathId: 5] was 3 2025-09-25T16:19:38.063368Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:926: Part operation is done id#102:3 progress is 2/5 2025-09-25T16:19:38.063371Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 102 ready parts: 2/5 2025-09-25T16:19:38.063375Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:926: Part operation is done id#102:3 progress is 2/5 2025-09-25T16:19:38.063377Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 102 ready parts: 2/5 2025-09-25T16:19:38.063380Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 102, ready parts: 2/5, is published: true 2025-09-25T16:19:38.063670Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 102:4, at schemeshard: 72057594046678944 2025-09-25T16:19:38.063677Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_drop_table.cpp:415: TDropTable TProposedDeletePart operationId: 102:4 ProgressState, at schemeshard: 72057594046678944 2025-09-25T16:19:38.063705Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:610: DecrementPathDbRefCount reason remove table for pathId [OwnerId: 72057594046678944, LocalPathId: 6] was 3 2025-09-25T16:19:38.063717Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:926: Part operation is done id#102:4 progress is 3/5 2025-09-25T16:19:38.063720Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 102 ready parts: 3/5 2025-09-25T16:19:38.063723Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:926: Part operation is done id#102:4 progress is 3/5 2025-09-25T16:19:38.063725Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 102 ready parts: 3/5 2025-09-25T16:19:38.063728Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 102, ready parts: 3/5, is published: true 2025-09-25T16:19:38.063799Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 102:2, at schemeshard: 72057594046678944 2025-09-25T16:19:38.063802Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_drop_table.cpp:415: TDropTable TProposedDeletePart operationId: 102:2 ProgressState, at schemeshard: 72057594046678944 2025-09-25T16:19:38.063817Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:610: DecrementPathDbRefCount reason remove table for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 3 2025-09-25T16:19:38.063826Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:926: Part operation is done id#102:2 progress is 4/5 2025-09-25T16:19:38.063829Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 102 ready parts: 4/5 2025-09-25T16:19:38.063832Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:926: Part operation is done id#102:2 progress is 4/5 2025-09-25T16:19:38.063834Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 102 ready parts: 4/5 2025-09-25T16:19:38.063836Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 102, ready parts: 4/5, is published: true 2025-09-25T16:19:38.063883Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 2025-09-25T16:19:38.063899Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 2025-09-25T16:19:38.063903Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 2025-09-25T16:19:38.063906Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 2025-09-25T16:19:38.063929Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 102:0, at schemeshard: 72057594046678944 2025-09-25T16:19:38.063933Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_drop_table.cpp:415: TDropTable TProposedDeletePart operationId: 102:0 ProgressState, at schemeshard: 72057594046678944 2025-09-25T16:19:38.063949Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:610: DecrementPathDbRefCount reason remove table for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 4 2025-09-25T16:19:38.063958Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:926: Part operation is done id#102:0 progress is 5/5 2025-09-25T16:19:38.063960Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 102 ready parts: 5/5 2025-09-25T16:19:38.063963Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:926: Part operation is done id#102:0 progress is 5/5 2025-09-25T16:19:38.063965Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 102 ready parts: 5/5 2025-09-25T16:19:38.063968Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 102, ready parts: 5/5, is published: true 2025-09-25T16:19:38.063978Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1702: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [1:457:2402] message: TxId: 102 2025-09-25T16:19:38.063981Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 102 ready parts: 5/5 2025-09-25T16:19:38.063985Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:993: Operation and all the parts is done, operation id: 102:0 2025-09-25T16:19:38.063988Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5552: RemoveTx for txid 102:0 2025-09-25T16:19:38.064000Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:610: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 3 2025-09-25T16:19:38.064004Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:993: Operation and all the parts is done, operation id: 102:1 2025-09-25T16:19:38.064006Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5552: RemoveTx for txid 102:1 2025-09-25T16:19:38.064010Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:610: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 4 2025-09-25T16:19:38.064012Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:993: Operation and all the parts is done, operation id: 102:2 2025-09-25T16:19:38.064015Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5552: RemoveTx for txid 102:2 2025-09-25T16:19:38.064020Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:610: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 2 2025-09-25T16:19:38.064022Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:993: Operation and all the parts is done, operation id: 102:3 2025-09-25T16:19:38.064026Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5552: RemoveTx for txid 102:3 2025-09-25T16:19:38.064030Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:610: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 5] was 2 2025-09-25T16:19:38.064033Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:993: Operation and all the parts is done, operation id: 102:4 2025-09-25T16:19:38.064035Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5552: RemoveTx for txid 102:4 2025-09-25T16:19:38.064039Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:610: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 6] was 2 2025-09-25T16:19:38.064114Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 2025-09-25T16:19:38.064120Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 2025-09-25T16:19:38.064123Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 2025-09-25T16:19:38.064141Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 2025-09-25T16:19:38.064148Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 2025-09-25T16:19:38.064156Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 2025-09-25T16:19:38.064568Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 102: got EvNotifyTxCompletionResult 2025-09-25T16:19:38.064578Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 102: satisfy waiter [1:603:2540] TestWaitNotification: OK eventTxId 102 |82.1%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_index/unittest >> TFulltextIndexTests::CreateTableNotText [GOOD] |82.1%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_index/unittest |82.1%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_index/unittest >> TAsyncIndexTests::SplitBothWithReboots[TabletReboots] >> TCmsTest::BridgeModeSysTablets [GOOD] >> TCmsTest::CheckSysTabletsOnNodesWithPDisks ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_index/unittest >> TVectorIndexTests::CreateTablePrefixInvalidKeyType [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2144] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2144] Leader for TabletID 72057594046678944 is [1:130:2155] sender: [1:131:2058] recipient: [1:113:2144] 2025-09-25T16:19:38.700700Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7911: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-09-25T16:19:38.700727Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7939: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-09-25T16:19:38.700733Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7825: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-09-25T16:19:38.700738Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7841: OperationsProcessing config: using default configuration 2025-09-25T16:19:38.700745Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7847: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-09-25T16:19:38.700750Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7847: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-09-25T16:19:38.700760Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7971: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-09-25T16:19:38.700775Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-09-25T16:19:38.700935Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8042: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-09-25T16:19:38.701011Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2025-09-25T16:19:38.718334Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7729: Cannot subscribe to console configs 2025-09-25T16:19:38.718356Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded 2025-09-25T16:19:38.722927Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2025-09-25T16:19:38.723029Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2025-09-25T16:19:38.723073Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-09-25T16:19:38.724899Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2025-09-25T16:19:38.724962Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2025-09-25T16:19:38.725074Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1382: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-09-25T16:19:38.725157Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-09-25T16:19:38.725625Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:158: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-09-25T16:19:38.725675Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2025-09-25T16:19:38.725948Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-09-25T16:19:38.725960Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-09-25T16:19:38.725984Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-09-25T16:19:38.725994Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-09-25T16:19:38.726001Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:205: TTxServerlessStorageBilling.Complete 2025-09-25T16:19:38.726039Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7086: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-09-25T16:19:38.727564Z node 1 :HIVE INFO: tablet_helpers.cpp:1126: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:130:2155] sender: [1:245:2058] recipient: [1:15:2062] 2025-09-25T16:19:38.752131Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-09-25T16:19:38.752207Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-09-25T16:19:38.752259Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:599: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-09-25T16:19:38.752266Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5528: CreateTx for txid 1:0 type: TxAlterSubDomain target path: [OwnerId: 72057594046678944, LocalPathId: 1] source path: 2025-09-25T16:19:38.752305Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-09-25T16:19:38.752321Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2025-09-25T16:19:38.752932Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-09-25T16:19:38.752985Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-09-25T16:19:38.753029Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-09-25T16:19:38.753040Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:315: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-09-25T16:19:38.753046Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:369: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-09-25T16:19:38.753052Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 1:0 2 -> 3 2025-09-25T16:19:38.753527Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-09-25T16:19:38.753541Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-09-25T16:19:38.753546Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 1:0 3 -> 128 2025-09-25T16:19:38.753938Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-09-25T16:19:38.753951Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-09-25T16:19:38.753957Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-09-25T16:19:38.753965Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1723: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-09-25T16:19:38.754742Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1792: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-09-25T16:19:38.755205Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:663: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-09-25T16:19:38.755289Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1824: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-09-25T16:19:38.755488Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:685: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-09-25T16:19:38.755514Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:689: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 137 RawX2: 4294969455 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-09-25T16:19:38.755522Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:303: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-09-25T16:19:38.755586Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 1:0 128 -> 240 2025-09-25T16:19:38.755594Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:362: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-09-25T16:19:38.755623Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:599: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-09-25T16:19:38.755636Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:412: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-09-25T16:19:38.756129Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-09-25T16:19:38.756139Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-09-25T16:19:38.756181Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-09-25T16:19:38.756188Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [1:212:2213], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-09-25T16:19:38.756283Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-09-25T16:19:38.756293Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:497: [72057594046678944] TDone opId# 1:0 ProgressState 2025-09-25T16:19:38.756308Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:926: Part operation is done id#1:0 progress is 1/1 2025-09-25T16:19:38.756314Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-09-25T16:19:38.756320Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:926: Part operation is done id#1:0 progress is 1/1 2025-09-25T16:19:38.756327Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-09-25T16:19:38.756332Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-09-25T16:19:38.756339Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-09-25T16:19:38.756345Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:993: Operation and all the parts is done, operation id: 1:0 2025-09-25T16:19:38.756350Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5552: RemoveTx for txid 1:0 2025-09-25T16:19:38.756363Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:610: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-09-25T16:19:38.756371Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:1002: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-09-25T16:19:38.756376Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1009: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-09-25T16:19:38.756779Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6249: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-09-25T16:19:38.756798Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-09-25T16:19:38.756804Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1 2025-09-25T16:19:38.756811Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 1, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 3 2025-09-25T16:19:38.756817Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:610: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-09-25T16:19:38.756861Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__publish_to_scheme_board.cpp:155: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 1, subscribers: 0 2025-09-25T16:19:38.757607Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1 2025-09-25T16:19:38.757707Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 1, at schemeshard: 72057594046678944 TestModificationResults wait txId: 101 2025-09-25T16:19:38.758164Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:433: actor# [1:275:2265] Bootstrap 2025-09-25T16:19:38.758383Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:452: actor# [1:275:2265] Become StateWork (SchemeCache [1:280:2270]) 2025-09-25T16:19:38.759183Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpCreateIndexedTable CreateIndexedTable { TableDescription { Name: "vectors" Columns { Name: "id" Type: "Uint64" } Columns { Name: "embedding" Type: "String" } Columns { Name: "covered" Type: "String" } Columns { Name: "prefix" Type: "Float" } KeyColumnNames: "id" } IndexDescription { Name: "idx_vector" KeyColumnNames: "prefix" KeyColumnNames: "embedding" Type: EIndexTypeGlobalVectorKmeansTree DataColumnNames: "covered" VectorIndexKmeansTreeDescription { Settings { settings { metric: DISTANCE_COSINE vector_type: VECTOR_TYPE_FLOAT vector_dimension: 1024 } clusters: 4 levels: 5 } } } } } TxId: 101 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-09-25T16:19:38.759289Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_create_indexed_table.cpp:100: TCreateTableIndex construct operation table path: /MyRoot/vectors domain path id: [OwnerId: 72057594046678944, LocalPathId: 1] domain path: /MyRoot shardsToCreate: 2 GetShardsInside: 0 MaxShards: 200000 2025-09-25T16:19:38.759354Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_just_reject.cpp:47: TReject Propose, opId: 101:0, explain: Column 'prefix' has wrong key type Float for being key, at schemeshard: 72057594046678944 2025-09-25T16:19:38.759362Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 101:1, propose status:StatusInvalidParameter, reason: Column 'prefix' has wrong key type Float for being key, at schemeshard: 72057594046678944 2025-09-25T16:19:38.759587Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:212: actor# [1:275:2265] HANDLE TEvClientConnected success connect from tablet# 72057594046447617 2025-09-25T16:19:38.760321Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 101, response: Status: StatusInvalidParameter Reason: "Column \'prefix\' has wrong key type Float for being key" TxId: 101 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2025-09-25T16:19:38.760376Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 101, database: /MyRoot, subject: , status: StatusInvalidParameter, reason: Column 'prefix' has wrong key type Float for being key, operation: CREATE TABLE WITH INDEXES, path: /MyRoot/vectors 2025-09-25T16:19:38.760442Z node 1 :TX_PROXY DEBUG: client.cpp:89: Handle TEvAllocateResult ACCEPTED RangeBegin# 281474976715656 RangeEnd# 281474976720656 txAllocator# 72057594046447617 TestModificationResult got TxId: 101, wait until txId: 101 TestWaitNotification wait txId: 101 2025-09-25T16:19:38.760488Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:213: tests -- TTxNotificationSubscriber for txId 101: send EvNotifyTxCompletion 2025-09-25T16:19:38.760496Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:259: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 101 2025-09-25T16:19:38.760549Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 101, at schemeshard: 72057594046678944 2025-09-25T16:19:38.760569Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 101: got EvNotifyTxCompletionResult 2025-09-25T16:19:38.760575Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 101: satisfy waiter [1:290:2280] TestWaitNotification: OK eventTxId 101 2025-09-25T16:19:38.760653Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/vectors/idx_vector" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: true }, at schemeshard: 72057594046678944 2025-09-25T16:19:38.760683Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/vectors/idx_vector" took 36us result status StatusPathDoesNotExist 2025-09-25T16:19:38.760723Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/vectors/idx_vector\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot\' (id: [OwnerId: 72057594046678944, LocalPathId: 1]), source_location: ydb/core/tx/schemeshard/schemeshard_path_describer.cpp:1181" Path: "/MyRoot/vectors/idx_vector" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot" LastExistedPrefixPathId: 1 LastExistedPrefixDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty ChildrenExist: false } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_index/unittest >> TFulltextIndexTests::CreateTablePrefix [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2144] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2144] Leader for TabletID 72057594046678944 is [1:130:2155] sender: [1:131:2058] recipient: [1:113:2144] 2025-09-25T16:19:38.577333Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7911: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-09-25T16:19:38.577353Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7939: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-09-25T16:19:38.577357Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7825: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-09-25T16:19:38.577361Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7841: OperationsProcessing config: using default configuration 2025-09-25T16:19:38.577366Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7847: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-09-25T16:19:38.577369Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7847: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-09-25T16:19:38.577375Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7971: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-09-25T16:19:38.577386Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-09-25T16:19:38.577473Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8042: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-09-25T16:19:38.577527Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2025-09-25T16:19:38.589362Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7729: Cannot subscribe to console configs 2025-09-25T16:19:38.589381Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded 2025-09-25T16:19:38.593550Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2025-09-25T16:19:38.593618Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2025-09-25T16:19:38.593643Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-09-25T16:19:38.595699Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2025-09-25T16:19:38.595755Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2025-09-25T16:19:38.595831Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1382: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-09-25T16:19:38.595882Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-09-25T16:19:38.596377Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:158: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-09-25T16:19:38.596439Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2025-09-25T16:19:38.596728Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-09-25T16:19:38.596739Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-09-25T16:19:38.596760Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-09-25T16:19:38.596772Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-09-25T16:19:38.596779Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:205: TTxServerlessStorageBilling.Complete 2025-09-25T16:19:38.596811Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7086: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-09-25T16:19:38.598351Z node 1 :HIVE INFO: tablet_helpers.cpp:1126: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:130:2155] sender: [1:245:2058] recipient: [1:15:2062] 2025-09-25T16:19:38.613862Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-09-25T16:19:38.613946Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-09-25T16:19:38.613997Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:599: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-09-25T16:19:38.614003Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5528: CreateTx for txid 1:0 type: TxAlterSubDomain target path: [OwnerId: 72057594046678944, LocalPathId: 1] source path: 2025-09-25T16:19:38.614036Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-09-25T16:19:38.614047Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2025-09-25T16:19:38.614737Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-09-25T16:19:38.614785Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-09-25T16:19:38.614827Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-09-25T16:19:38.614838Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:315: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-09-25T16:19:38.614844Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:369: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-09-25T16:19:38.614849Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 1:0 2 -> 3 2025-09-25T16:19:38.615326Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-09-25T16:19:38.615335Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-09-25T16:19:38.615342Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 1:0 3 -> 128 2025-09-25T16:19:38.615651Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-09-25T16:19:38.615658Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-09-25T16:19:38.615662Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-09-25T16:19:38.615667Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1723: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-09-25T16:19:38.616320Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1792: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-09-25T16:19:38.616727Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:663: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-09-25T16:19:38.616767Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1824: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-09-25T16:19:38.616961Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:685: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-09-25T16:19:38.616989Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:689: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 137 RawX2: 4294969455 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-09-25T16:19:38.616997Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:303: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-09-25T16:19:38.617062Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 1:0 128 -> 240 2025-09-25T16:19:38.617071Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:362: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-09-25T16:19:38.617102Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:599: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-09-25T16:19:38.617114Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:412: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-09-25T16:19:38.617782Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-09-25T16:19:38.617807Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-09-25T16:19:38.617851Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-09-25T16:19:38.617861Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [1:212:2213], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-09-25T16:19:38.617932Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-09-25T16:19:38.617938Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:497: [72057594046678944] TDone opId# 1:0 ProgressState 2025-09-25T16:19:38.617949Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:926: Part operation is done id#1:0 progress is 1/1 2025-09-25T16:19:38.617952Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-09-25T16:19:38.617956Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:926: Part operation is done id#1:0 progress is 1/1 2025-09-25T16:19:38.617958Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-09-25T16:19:38.617963Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-09-25T16:19:38.617967Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-09-25T16:19:38.617970Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:993: Operation and all the parts is done, operation id: 1:0 2025-09-25T16:19:38.617973Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5552: RemoveTx for txid 1:0 2025-09-25T16:19:38.617983Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:610: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-09-25T16:19:38.617989Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:1002: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-09-25T16:19:38.617992Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1009: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-09-25T16:19:38.618288Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6249: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-09-25T16:19:38.618305Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-09-25T16:19:38.618310Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1 2025-09-25T16:19:38.618316Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 1, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 3 2025-09-25T16:19:38.618322Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:610: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-09-25T16:19:38.618333Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__publish_to_scheme_board.cpp:155: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 1, subscribers: 0 2025-09-25T16:19:38.619120Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1 2025-09-25T16:19:38.619219Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 1, at schemeshard: 72057594046678944 TestModificationResults wait txId: 101 2025-09-25T16:19:38.619631Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:433: actor# [1:275:2265] Bootstrap 2025-09-25T16:19:38.619869Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:452: actor# [1:275:2265] Become StateWork (SchemeCache [1:280:2270]) 2025-09-25T16:19:38.620671Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpCreateIndexedTable CreateIndexedTable { TableDescription { Name: "texts" Columns { Name: "id" Type: "Uint64" } Columns { Name: "text" Type: "String" } Columns { Name: "covered" Type: "String" } Columns { Name: "another" Type: "Uint64" } KeyColumnNames: "id" } IndexDescription { Name: "idx_fulltext" KeyColumnNames: "another" KeyColumnNames: "text" Type: EIndexTypeGlobalFulltext DataColumnNames: "covered" FulltextIndexDescription { Settings { layout: FLAT columns { column: "text" analyzers { tokenizer: STANDARD use_filter_lowercase: true } } } } } } } TxId: 101 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-09-25T16:19:38.620748Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_create_indexed_table.cpp:100: TCreateTableIndex construct operation table path: /MyRoot/texts domain path id: [OwnerId: 72057594046678944, LocalPathId: 1] domain path: /MyRoot shardsToCreate: 2 GetShardsInside: 0 MaxShards: 200000 2025-09-25T16:19:38.620802Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_just_reject.cpp:47: TReject Propose, opId: 101:0, explain: columns [ text ] should be [ another text ], at schemeshard: 72057594046678944 2025-09-25T16:19:38.620810Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 101:1, propose status:StatusInvalidParameter, reason: columns [ text ] should be [ another text ], at schemeshard: 72057594046678944 2025-09-25T16:19:38.621008Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:212: actor# [1:275:2265] HANDLE TEvClientConnected success connect from tablet# 72057594046447617 2025-09-25T16:19:38.621974Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 101, response: Status: StatusInvalidParameter Reason: "columns [ text ] should be [ another text ]" TxId: 101 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2025-09-25T16:19:38.622024Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 101, database: /MyRoot, subject: , status: StatusInvalidParameter, reason: columns [ text ] should be [ another text ], operation: CREATE TABLE WITH INDEXES, path: /MyRoot/texts 2025-09-25T16:19:38.622084Z node 1 :TX_PROXY DEBUG: client.cpp:89: Handle TEvAllocateResult ACCEPTED RangeBegin# 281474976715656 RangeEnd# 281474976720656 txAllocator# 72057594046447617 TestModificationResult got TxId: 101, wait until txId: 101 TestWaitNotification wait txId: 101 2025-09-25T16:19:38.622133Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:213: tests -- TTxNotificationSubscriber for txId 101: send EvNotifyTxCompletion 2025-09-25T16:19:38.622139Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:259: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 101 2025-09-25T16:19:38.622197Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 101, at schemeshard: 72057594046678944 2025-09-25T16:19:38.622212Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 101: got EvNotifyTxCompletionResult 2025-09-25T16:19:38.622216Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 101: satisfy waiter [1:290:2280] TestWaitNotification: OK eventTxId 101 2025-09-25T16:19:38.622286Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/texts/idx_fulltext" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: true }, at schemeshard: 72057594046678944 2025-09-25T16:19:38.622318Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/texts/idx_fulltext" took 42us result status StatusPathDoesNotExist 2025-09-25T16:19:38.622353Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/texts/idx_fulltext\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot\' (id: [OwnerId: 72057594046678944, LocalPathId: 1]), source_location: ydb/core/tx/schemeshard/schemeshard_path_describer.cpp:1181" Path: "/MyRoot/texts/idx_fulltext" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot" LastExistedPrefixPathId: 1 LastExistedPrefixDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty ChildrenExist: false } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 |82.1%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_index/unittest |82.1%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_index/unittest ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_index/unittest >> TFulltextIndexTests::CreateTableNotText [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2144] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2144] Leader for TabletID 72057594046678944 is [1:130:2155] sender: [1:131:2058] recipient: [1:113:2144] 2025-09-25T16:19:38.777348Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7911: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-09-25T16:19:38.777366Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7939: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-09-25T16:19:38.777369Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7825: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-09-25T16:19:38.777373Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7841: OperationsProcessing config: using default configuration 2025-09-25T16:19:38.777377Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7847: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-09-25T16:19:38.777380Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7847: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-09-25T16:19:38.777386Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7971: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-09-25T16:19:38.777395Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-09-25T16:19:38.777489Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8042: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-09-25T16:19:38.777535Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2025-09-25T16:19:38.792612Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7729: Cannot subscribe to console configs 2025-09-25T16:19:38.792640Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded 2025-09-25T16:19:38.797167Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2025-09-25T16:19:38.797275Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2025-09-25T16:19:38.797314Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-09-25T16:19:38.799052Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2025-09-25T16:19:38.799112Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2025-09-25T16:19:38.799229Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1382: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-09-25T16:19:38.799300Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-09-25T16:19:38.799757Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:158: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-09-25T16:19:38.799805Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2025-09-25T16:19:38.800052Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-09-25T16:19:38.800065Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-09-25T16:19:38.800090Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-09-25T16:19:38.800098Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-09-25T16:19:38.800105Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:205: TTxServerlessStorageBilling.Complete 2025-09-25T16:19:38.800137Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7086: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-09-25T16:19:38.801595Z node 1 :HIVE INFO: tablet_helpers.cpp:1126: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:130:2155] sender: [1:245:2058] recipient: [1:15:2062] 2025-09-25T16:19:38.818675Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-09-25T16:19:38.818742Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-09-25T16:19:38.818784Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:599: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-09-25T16:19:38.818789Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5528: CreateTx for txid 1:0 type: TxAlterSubDomain target path: [OwnerId: 72057594046678944, LocalPathId: 1] source path: 2025-09-25T16:19:38.818816Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-09-25T16:19:38.818826Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2025-09-25T16:19:38.819414Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-09-25T16:19:38.819449Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-09-25T16:19:38.819483Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-09-25T16:19:38.819489Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:315: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-09-25T16:19:38.819492Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:369: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-09-25T16:19:38.819496Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 1:0 2 -> 3 2025-09-25T16:19:38.819788Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-09-25T16:19:38.819795Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-09-25T16:19:38.819801Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 1:0 3 -> 128 2025-09-25T16:19:38.820151Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-09-25T16:19:38.820164Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-09-25T16:19:38.820170Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-09-25T16:19:38.820177Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1723: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-09-25T16:19:38.820714Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1792: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-09-25T16:19:38.821135Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:663: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-09-25T16:19:38.821178Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1824: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-09-25T16:19:38.821313Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:685: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-09-25T16:19:38.821330Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:689: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 137 RawX2: 4294969455 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-09-25T16:19:38.821335Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:303: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-09-25T16:19:38.821376Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 1:0 128 -> 240 2025-09-25T16:19:38.821381Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:362: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-09-25T16:19:38.821402Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:599: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-09-25T16:19:38.821409Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:412: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-09-25T16:19:38.821881Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-09-25T16:19:38.821893Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-09-25T16:19:38.821945Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-09-25T16:19:38.821953Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [1:212:2213], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-09-25T16:19:38.822020Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-09-25T16:19:38.822026Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:497: [72057594046678944] TDone opId# 1:0 ProgressState 2025-09-25T16:19:38.822036Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:926: Part operation is done id#1:0 progress is 1/1 2025-09-25T16:19:38.822039Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-09-25T16:19:38.822043Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:926: Part operation is done id#1:0 progress is 1/1 2025-09-25T16:19:38.822045Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-09-25T16:19:38.822049Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-09-25T16:19:38.822053Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-09-25T16:19:38.822056Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:993: Operation and all the parts is done, operation id: 1:0 2025-09-25T16:19:38.822059Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5552: RemoveTx for txid 1:0 2025-09-25T16:19:38.822070Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:610: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-09-25T16:19:38.822075Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:1002: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-09-25T16:19:38.822078Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1009: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-09-25T16:19:38.822320Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6249: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-09-25T16:19:38.822332Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-09-25T16:19:38.822335Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1 2025-09-25T16:19:38.822339Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 1, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 3 2025-09-25T16:19:38.822342Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:610: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-09-25T16:19:38.822352Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__publish_to_scheme_board.cpp:155: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 1, subscribers: 0 2025-09-25T16:19:38.822987Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1 2025-09-25T16:19:38.823058Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 1, at schemeshard: 72057594046678944 TestModificationResults wait txId: 101 2025-09-25T16:19:38.823405Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:433: actor# [1:275:2265] Bootstrap 2025-09-25T16:19:38.823569Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:452: actor# [1:275:2265] Become StateWork (SchemeCache [1:280:2270]) 2025-09-25T16:19:38.824077Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpCreateIndexedTable CreateIndexedTable { TableDescription { Name: "texts" Columns { Name: "id" Type: "Uint64" } Columns { Name: "text" Type: "Uint64" } Columns { Name: "covered" Type: "String" } Columns { Name: "another" Type: "Uint64" } KeyColumnNames: "id" } IndexDescription { Name: "idx_fulltext" KeyColumnNames: "text" Type: EIndexTypeGlobalFulltext DataColumnNames: "covered" FulltextIndexDescription { Settings { layout: FLAT columns { column: "text" analyzers { tokenizer: STANDARD use_filter_lowercase: true } } } } } } } TxId: 101 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-09-25T16:19:38.824132Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_create_indexed_table.cpp:100: TCreateTableIndex construct operation table path: /MyRoot/texts domain path id: [OwnerId: 72057594046678944, LocalPathId: 1] domain path: /MyRoot shardsToCreate: 2 GetShardsInside: 0 MaxShards: 200000 2025-09-25T16:19:38.824166Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_just_reject.cpp:47: TReject Propose, opId: 101:0, explain: Fulltext column 'text' expected type 'String' but got Uint64, at schemeshard: 72057594046678944 2025-09-25T16:19:38.824171Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 101:1, propose status:StatusInvalidParameter, reason: Fulltext column 'text' expected type 'String' but got Uint64, at schemeshard: 72057594046678944 2025-09-25T16:19:38.824314Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:212: actor# [1:275:2265] HANDLE TEvClientConnected success connect from tablet# 72057594046447617 2025-09-25T16:19:38.824895Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 101, response: Status: StatusInvalidParameter Reason: "Fulltext column \'text\' expected type \'String\' but got Uint64" TxId: 101 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2025-09-25T16:19:38.824930Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 101, database: /MyRoot, subject: , status: StatusInvalidParameter, reason: Fulltext column 'text' expected type 'String' but got Uint64, operation: CREATE TABLE WITH INDEXES, path: /MyRoot/texts 2025-09-25T16:19:38.824970Z node 1 :TX_PROXY DEBUG: client.cpp:89: Handle TEvAllocateResult ACCEPTED RangeBegin# 281474976715656 RangeEnd# 281474976720656 txAllocator# 72057594046447617 TestModificationResult got TxId: 101, wait until txId: 101 TestWaitNotification wait txId: 101 2025-09-25T16:19:38.824998Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:213: tests -- TTxNotificationSubscriber for txId 101: send EvNotifyTxCompletion 2025-09-25T16:19:38.825003Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:259: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 101 2025-09-25T16:19:38.825038Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 101, at schemeshard: 72057594046678944 2025-09-25T16:19:38.825050Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 101: got EvNotifyTxCompletionResult 2025-09-25T16:19:38.825054Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 101: satisfy waiter [1:290:2280] TestWaitNotification: OK eventTxId 101 2025-09-25T16:19:38.825108Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/texts/idx_fulltext" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: true }, at schemeshard: 72057594046678944 2025-09-25T16:19:38.825129Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/texts/idx_fulltext" took 26us result status StatusPathDoesNotExist 2025-09-25T16:19:38.825154Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/texts/idx_fulltext\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot\' (id: [OwnerId: 72057594046678944, LocalPathId: 1]), source_location: ydb/core/tx/schemeshard/schemeshard_path_describer.cpp:1181" Path: "/MyRoot/texts/idx_fulltext" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot" LastExistedPrefixPathId: 1 LastExistedPrefixDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty ChildrenExist: false } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 |82.1%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_index/unittest >> TFulltextIndexTests::CreateTableColumnsMismatch >> TVectorIndexTests::CreateTableCoveredEmbedding |82.1%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_index/unittest |82.2%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_index/unittest |82.2%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_index/unittest >> TVectorIndexTests::CreateTableWithError >> ScriptExecutionsTest::BackgroundOperationRestart [GOOD] >> ScriptExecutionsTest::BackgroundOperationFinalization >> TVectorIndexTests::VectorKmeansTreeImplTable [GOOD] |82.2%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_index/unittest >> TFulltextIndexTests::CreateTableColumnsMismatch [GOOD] >> TVectorIndexTests::CreateTableCoveredEmbedding [GOOD] >> TUniqueIndexTests::CreateTable |82.2%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_index/unittest >> TAsyncIndexTests::SplitMainWithReboots[TabletReboots] >> TVectorIndexTests::CreateTableWithError [GOOD] >> TAsyncIndexTests::MergeBothWithReboots[PipeResets] >> TAsyncIndexTests::MergeIndexWithReboots[TabletReboots] >> TFulltextIndexTests::CreateTable |82.2%| [TA] $(B)/ydb/core/kqp/ut/rbo/test-results/unittest/{meta.json ... results_accumulator.log} |82.2%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_index/unittest |82.2%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_index/unittest >> TVectorIndexTests::VectorKmeansTreeImplTable [GOOD] >> TUniqueIndexTests::CreateTable [GOOD] >> TAsyncIndexTests::CdcAndMergeWithReboots[TabletReboots] >> KqpProxy::LoadedMetadataAfterCompilationTimeout [GOOD] >> KqpProxy::ExecuteScriptFailsWithoutFeatureFlag ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_index/unittest >> TFulltextIndexTests::CreateTableColumnsMismatch [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2144] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2144] Leader for TabletID 72057594046678944 is [1:130:2155] sender: [1:131:2058] recipient: [1:113:2144] 2025-09-25T16:19:39.685144Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7911: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-09-25T16:19:39.685167Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7939: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-09-25T16:19:39.685172Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7825: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-09-25T16:19:39.685177Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7841: OperationsProcessing config: using default configuration 2025-09-25T16:19:39.685183Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7847: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-09-25T16:19:39.685188Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7847: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-09-25T16:19:39.685196Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7971: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-09-25T16:19:39.685208Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-09-25T16:19:39.685325Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8042: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-09-25T16:19:39.685390Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2025-09-25T16:19:39.700957Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7729: Cannot subscribe to console configs 2025-09-25T16:19:39.700981Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded 2025-09-25T16:19:39.705223Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2025-09-25T16:19:39.705309Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2025-09-25T16:19:39.705344Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-09-25T16:19:39.706991Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2025-09-25T16:19:39.707043Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2025-09-25T16:19:39.707137Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1382: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-09-25T16:19:39.707201Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-09-25T16:19:39.707627Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:158: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-09-25T16:19:39.707671Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2025-09-25T16:19:39.707899Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-09-25T16:19:39.707909Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-09-25T16:19:39.707928Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-09-25T16:19:39.707935Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-09-25T16:19:39.707941Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:205: TTxServerlessStorageBilling.Complete 2025-09-25T16:19:39.707972Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7086: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-09-25T16:19:39.709305Z node 1 :HIVE INFO: tablet_helpers.cpp:1126: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:130:2155] sender: [1:245:2058] recipient: [1:15:2062] 2025-09-25T16:19:39.724689Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-09-25T16:19:39.724763Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-09-25T16:19:39.724807Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:599: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-09-25T16:19:39.724814Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5528: CreateTx for txid 1:0 type: TxAlterSubDomain target path: [OwnerId: 72057594046678944, LocalPathId: 1] source path: 2025-09-25T16:19:39.724866Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-09-25T16:19:39.724879Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2025-09-25T16:19:39.725533Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-09-25T16:19:39.725571Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-09-25T16:19:39.725604Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-09-25T16:19:39.725611Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:315: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-09-25T16:19:39.725614Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:369: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-09-25T16:19:39.725618Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 1:0 2 -> 3 2025-09-25T16:19:39.725974Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-09-25T16:19:39.725982Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-09-25T16:19:39.725988Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 1:0 3 -> 128 2025-09-25T16:19:39.726364Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-09-25T16:19:39.726373Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-09-25T16:19:39.726377Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-09-25T16:19:39.726382Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1723: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-09-25T16:19:39.726881Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1792: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-09-25T16:19:39.727355Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:663: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-09-25T16:19:39.727415Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1824: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-09-25T16:19:39.727607Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:685: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-09-25T16:19:39.727634Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:689: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 137 RawX2: 4294969455 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-09-25T16:19:39.727641Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:303: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-09-25T16:19:39.727697Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 1:0 128 -> 240 2025-09-25T16:19:39.727706Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:362: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-09-25T16:19:39.727733Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:599: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-09-25T16:19:39.727744Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:412: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-09-25T16:19:39.728260Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-09-25T16:19:39.728269Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-09-25T16:19:39.728309Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-09-25T16:19:39.728318Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [1:212:2213], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-09-25T16:19:39.728401Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-09-25T16:19:39.728409Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:497: [72057594046678944] TDone opId# 1:0 ProgressState 2025-09-25T16:19:39.728421Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:926: Part operation is done id#1:0 progress is 1/1 2025-09-25T16:19:39.728426Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-09-25T16:19:39.728432Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:926: Part operation is done id#1:0 progress is 1/1 2025-09-25T16:19:39.728435Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-09-25T16:19:39.728440Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-09-25T16:19:39.728447Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-09-25T16:19:39.728452Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:993: Operation and all the parts is done, operation id: 1:0 2025-09-25T16:19:39.728456Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5552: RemoveTx for txid 1:0 2025-09-25T16:19:39.728467Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:610: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-09-25T16:19:39.728473Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:1002: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-09-25T16:19:39.728478Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1009: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-09-25T16:19:39.728799Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6249: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-09-25T16:19:39.728815Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-09-25T16:19:39.728820Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1 2025-09-25T16:19:39.728852Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 1, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 3 2025-09-25T16:19:39.728857Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:610: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-09-25T16:19:39.728871Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__publish_to_scheme_board.cpp:155: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 1, subscribers: 0 2025-09-25T16:19:39.730048Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1 2025-09-25T16:19:39.730146Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 1, at schemeshard: 72057594046678944 TestModificationResults wait txId: 101 2025-09-25T16:19:39.730539Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:433: actor# [1:275:2265] Bootstrap 2025-09-25T16:19:39.730760Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:452: actor# [1:275:2265] Become StateWork (SchemeCache [1:280:2270]) 2025-09-25T16:19:39.731518Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpCreateIndexedTable CreateIndexedTable { TableDescription { Name: "texts" Columns { Name: "id" Type: "Uint64" } Columns { Name: "text" Type: "String" } Columns { Name: "covered" Type: "String" } Columns { Name: "another" Type: "Uint64" } KeyColumnNames: "id" } IndexDescription { Name: "idx_fulltext" KeyColumnNames: "text" Type: EIndexTypeGlobalFulltext DataColumnNames: "covered" FulltextIndexDescription { Settings { layout: FLAT columns { column: "text_wrong" analyzers { tokenizer: STANDARD use_filter_lowercase: true } } } } } } } TxId: 101 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-09-25T16:19:39.731599Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_create_indexed_table.cpp:100: TCreateTableIndex construct operation table path: /MyRoot/texts domain path id: [OwnerId: 72057594046678944, LocalPathId: 1] domain path: /MyRoot shardsToCreate: 2 GetShardsInside: 0 MaxShards: 200000 2025-09-25T16:19:39.731646Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_just_reject.cpp:47: TReject Propose, opId: 101:0, explain: columns [ text_wrong ] should be [ text ], at schemeshard: 72057594046678944 2025-09-25T16:19:39.731653Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 101:1, propose status:StatusInvalidParameter, reason: columns [ text_wrong ] should be [ text ], at schemeshard: 72057594046678944 2025-09-25T16:19:39.731848Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:212: actor# [1:275:2265] HANDLE TEvClientConnected success connect from tablet# 72057594046447617 2025-09-25T16:19:39.732524Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 101, response: Status: StatusInvalidParameter Reason: "columns [ text_wrong ] should be [ text ]" TxId: 101 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2025-09-25T16:19:39.732568Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 101, database: /MyRoot, subject: , status: StatusInvalidParameter, reason: columns [ text_wrong ] should be [ text ], operation: CREATE TABLE WITH INDEXES, path: /MyRoot/texts 2025-09-25T16:19:39.732627Z node 1 :TX_PROXY DEBUG: client.cpp:89: Handle TEvAllocateResult ACCEPTED RangeBegin# 281474976715656 RangeEnd# 281474976720656 txAllocator# 72057594046447617 TestModificationResult got TxId: 101, wait until txId: 101 TestWaitNotification wait txId: 101 2025-09-25T16:19:39.732666Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:213: tests -- TTxNotificationSubscriber for txId 101: send EvNotifyTxCompletion 2025-09-25T16:19:39.732672Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:259: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 101 2025-09-25T16:19:39.732721Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 101, at schemeshard: 72057594046678944 2025-09-25T16:19:39.732738Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 101: got EvNotifyTxCompletionResult 2025-09-25T16:19:39.732743Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 101: satisfy waiter [1:290:2280] TestWaitNotification: OK eventTxId 101 2025-09-25T16:19:39.732816Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/texts/idx_fulltext" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: true }, at schemeshard: 72057594046678944 2025-09-25T16:19:39.732865Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/texts/idx_fulltext" took 54us result status StatusPathDoesNotExist 2025-09-25T16:19:39.732899Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/texts/idx_fulltext\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot\' (id: [OwnerId: 72057594046678944, LocalPathId: 1]), source_location: ydb/core/tx/schemeshard/schemeshard_path_describer.cpp:1181" Path: "/MyRoot/texts/idx_fulltext" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot" LastExistedPrefixPathId: 1 LastExistedPrefixDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty ChildrenExist: false } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_index/unittest >> TVectorIndexTests::CreateTableCoveredEmbedding [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2144] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2144] Leader for TabletID 72057594046678944 is [1:130:2155] sender: [1:131:2058] recipient: [1:113:2144] 2025-09-25T16:19:39.687628Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7911: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-09-25T16:19:39.687648Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7939: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-09-25T16:19:39.687654Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7825: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-09-25T16:19:39.687659Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7841: OperationsProcessing config: using default configuration 2025-09-25T16:19:39.687665Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7847: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-09-25T16:19:39.687669Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7847: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-09-25T16:19:39.687678Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7971: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-09-25T16:19:39.687690Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-09-25T16:19:39.687812Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8042: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-09-25T16:19:39.687874Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2025-09-25T16:19:39.704109Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7729: Cannot subscribe to console configs 2025-09-25T16:19:39.704130Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded 2025-09-25T16:19:39.708148Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2025-09-25T16:19:39.708240Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2025-09-25T16:19:39.708268Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-09-25T16:19:39.709884Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2025-09-25T16:19:39.709937Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2025-09-25T16:19:39.710031Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1382: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-09-25T16:19:39.710094Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-09-25T16:19:39.710605Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:158: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-09-25T16:19:39.710657Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2025-09-25T16:19:39.710919Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-09-25T16:19:39.710930Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-09-25T16:19:39.710951Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-09-25T16:19:39.710958Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-09-25T16:19:39.710965Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:205: TTxServerlessStorageBilling.Complete 2025-09-25T16:19:39.710995Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7086: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-09-25T16:19:39.712373Z node 1 :HIVE INFO: tablet_helpers.cpp:1126: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:130:2155] sender: [1:245:2058] recipient: [1:15:2062] 2025-09-25T16:19:39.733605Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-09-25T16:19:39.733674Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-09-25T16:19:39.733722Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:599: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-09-25T16:19:39.733729Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5528: CreateTx for txid 1:0 type: TxAlterSubDomain target path: [OwnerId: 72057594046678944, LocalPathId: 1] source path: 2025-09-25T16:19:39.733766Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-09-25T16:19:39.733779Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2025-09-25T16:19:39.734363Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-09-25T16:19:39.734408Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-09-25T16:19:39.734446Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-09-25T16:19:39.734455Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:315: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-09-25T16:19:39.734460Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:369: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-09-25T16:19:39.734465Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 1:0 2 -> 3 2025-09-25T16:19:39.734864Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-09-25T16:19:39.734876Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-09-25T16:19:39.734881Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 1:0 3 -> 128 2025-09-25T16:19:39.735230Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-09-25T16:19:39.735240Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-09-25T16:19:39.735246Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-09-25T16:19:39.735253Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1723: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-09-25T16:19:39.735895Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1792: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-09-25T16:19:39.736285Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:663: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-09-25T16:19:39.736332Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1824: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-09-25T16:19:39.736512Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:685: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-09-25T16:19:39.736536Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:689: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 137 RawX2: 4294969455 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-09-25T16:19:39.736543Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:303: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-09-25T16:19:39.736595Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 1:0 128 -> 240 2025-09-25T16:19:39.736602Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:362: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-09-25T16:19:39.736628Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:599: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-09-25T16:19:39.736638Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:412: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-09-25T16:19:39.737118Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-09-25T16:19:39.737127Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme ... ts: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } TableIndex { Name: "idx_vector" LocalPathId: 3 Type: EIndexTypeGlobalVectorKmeansTree State: EIndexStateReady KeyColumnNames: "embedding" SchemaVersion: 1 PathOwnerId: 72057594046678944 DataColumnNames: "embedding" DataSize: 0 IndexImplTableDescriptions { PartitionConfig { CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot: 300 InMemForceStepsToSnapshot: 500 InMemForceSizeToSnapshot: 16777216 InMemCompactionBrokerQueue: 0 ReadAheadHiThreshold: 67108864 ReadAheadLoThreshold: 16777216 MinDataPageSize: 7168 SnapBrokerQueue: 0 Generation { GenerationId: 0 SizeToCompact: 0 CountToCompact: 8 ForceCountToCompact: 8 ForceSizeToCompact: 134217728 CompactionBrokerQueue: 4294967295 KeepInCache: true BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen1" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 0 UpliftPartSize: 0 } Generation { GenerationId: 1 SizeToCompact: 41943040 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 536870912 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen2" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 8388608 UpliftPartSize: 8388608 } Generation { GenerationId: 2 SizeToCompact: 419430400 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 17179869184 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen3" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } PartitioningPolicy { SizeToSplit: 2147483648 MinPartitionsCount: 1 } } } IndexImplTableDescriptions { PartitionConfig { CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot: 300 InMemForceStepsToSnapshot: 500 InMemForceSizeToSnapshot: 16777216 InMemCompactionBrokerQueue: 0 ReadAheadHiThreshold: 67108864 ReadAheadLoThreshold: 16777216 MinDataPageSize: 7168 SnapBrokerQueue: 0 Generation { GenerationId: 0 SizeToCompact: 0 CountToCompact: 8 ForceCountToCompact: 8 ForceSizeToCompact: 134217728 CompactionBrokerQueue: 4294967295 KeepInCache: true BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen1" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 0 UpliftPartSize: 0 } Generation { GenerationId: 1 SizeToCompact: 41943040 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 536870912 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen2" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 8388608 UpliftPartSize: 8388608 } Generation { GenerationId: 2 SizeToCompact: 419430400 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 17179869184 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen3" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } PartitioningPolicy { SizeToSplit: 2147483648 MinPartitionsCount: 1 } } } VectorIndexKmeansTreeDescription { Settings { settings { metric: DISTANCE_COSINE vector_type: VECTOR_TYPE_FLOAT vector_dimension: 1024 } clusters: 4 levels: 5 } } } } PathId: 3 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-09-25T16:19:39.856653Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/vectors/idx_vector/indexImplLevelTable" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: true }, at schemeshard: 72057594046678944 2025-09-25T16:19:39.856682Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/vectors/idx_vector/indexImplLevelTable" took 32us result status StatusSuccess 2025-09-25T16:19:39.856773Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/vectors/idx_vector/indexImplLevelTable" PathDescription { Self { Name: "indexImplLevelTable" PathId: 4 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 101 CreateStep: 5000002 ParentPathId: 3 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeVectorKmeansTreeIndexImplTable Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "indexImplLevelTable" Columns { Name: "__ydb_parent" Type: "Uint64" TypeId: 4 Id: 1 NotNull: true IsBuildInProgress: false } Columns { Name: "__ydb_id" Type: "Uint64" TypeId: 4 Id: 2 NotNull: true IsBuildInProgress: false } Columns { Name: "__ydb_centroid" Type: "String" TypeId: 4097 Id: 3 NotNull: true IsBuildInProgress: false } KeyColumnNames: "__ydb_parent" KeyColumnNames: "__ydb_id" KeyColumnIds: 1 KeyColumnIds: 2 TableSchemaVersion: 1 IsBackup: false IsRestore: false } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 4 PathsLimit: 10000 ShardsInside: 3 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 4 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-09-25T16:19:39.856865Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/vectors/idx_vector/indexImplPostingTable" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: true }, at schemeshard: 72057594046678944 2025-09-25T16:19:39.856886Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/vectors/idx_vector/indexImplPostingTable" took 24us result status StatusSuccess 2025-09-25T16:19:39.856949Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/vectors/idx_vector/indexImplPostingTable" PathDescription { Self { Name: "indexImplPostingTable" PathId: 5 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 101 CreateStep: 5000002 ParentPathId: 3 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeVectorKmeansTreeIndexImplTable Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "indexImplPostingTable" Columns { Name: "__ydb_parent" Type: "Uint64" TypeId: 4 Id: 1 NotNull: true IsBuildInProgress: false } Columns { Name: "id" Type: "Uint64" TypeId: 4 Id: 2 NotNull: false IsBuildInProgress: false } Columns { Name: "embedding" Type: "String" TypeId: 4097 Id: 3 NotNull: false IsBuildInProgress: false } KeyColumnNames: "__ydb_parent" KeyColumnNames: "id" KeyColumnIds: 1 KeyColumnIds: 2 TableSchemaVersion: 1 IsBackup: false IsRestore: false } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 4 PathsLimit: 10000 ShardsInside: 3 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 5 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 >> TAsyncIndexTests::DropTableWithInflightChanges[TabletReboots] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_index/unittest >> TVectorIndexTests::CreateTableWithError [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2144] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2144] Leader for TabletID 72057594046678944 is [1:130:2155] sender: [1:131:2058] recipient: [1:113:2144] 2025-09-25T16:19:39.891037Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7911: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-09-25T16:19:39.891055Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7939: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-09-25T16:19:39.891059Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7825: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-09-25T16:19:39.891062Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7841: OperationsProcessing config: using default configuration 2025-09-25T16:19:39.891067Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7847: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-09-25T16:19:39.891069Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7847: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-09-25T16:19:39.891076Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7971: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-09-25T16:19:39.891086Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-09-25T16:19:39.891175Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8042: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-09-25T16:19:39.891232Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2025-09-25T16:19:39.903393Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7729: Cannot subscribe to console configs 2025-09-25T16:19:39.903415Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded 2025-09-25T16:19:39.906531Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2025-09-25T16:19:39.906590Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2025-09-25T16:19:39.906614Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-09-25T16:19:39.908045Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2025-09-25T16:19:39.908103Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2025-09-25T16:19:39.908194Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1382: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-09-25T16:19:39.908260Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-09-25T16:19:39.908769Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:158: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-09-25T16:19:39.908817Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2025-09-25T16:19:39.909081Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-09-25T16:19:39.909090Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-09-25T16:19:39.909106Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-09-25T16:19:39.909114Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-09-25T16:19:39.909121Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:205: TTxServerlessStorageBilling.Complete 2025-09-25T16:19:39.909146Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7086: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-09-25T16:19:39.910472Z node 1 :HIVE INFO: tablet_helpers.cpp:1126: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:130:2155] sender: [1:245:2058] recipient: [1:15:2062] 2025-09-25T16:19:39.929250Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-09-25T16:19:39.929341Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-09-25T16:19:39.929403Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:599: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-09-25T16:19:39.929411Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5528: CreateTx for txid 1:0 type: TxAlterSubDomain target path: [OwnerId: 72057594046678944, LocalPathId: 1] source path: 2025-09-25T16:19:39.929451Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-09-25T16:19:39.929466Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2025-09-25T16:19:39.930235Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-09-25T16:19:39.930289Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-09-25T16:19:39.930340Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-09-25T16:19:39.930351Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:315: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-09-25T16:19:39.930356Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:369: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-09-25T16:19:39.930362Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 1:0 2 -> 3 2025-09-25T16:19:39.930807Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-09-25T16:19:39.930821Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-09-25T16:19:39.930826Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 1:0 3 -> 128 2025-09-25T16:19:39.931238Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-09-25T16:19:39.931253Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-09-25T16:19:39.931260Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-09-25T16:19:39.931268Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1723: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-09-25T16:19:39.931975Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1792: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-09-25T16:19:39.932422Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:663: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-09-25T16:19:39.932490Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1824: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-09-25T16:19:39.932715Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:685: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-09-25T16:19:39.932741Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:689: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 137 RawX2: 4294969455 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-09-25T16:19:39.932750Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:303: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-09-25T16:19:39.932812Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 1:0 128 -> 240 2025-09-25T16:19:39.932850Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:362: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-09-25T16:19:39.932887Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:599: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-09-25T16:19:39.932900Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:412: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-09-25T16:19:39.933378Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-09-25T16:19:39.933388Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-09-25T16:19:39.933437Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-09-25T16:19:39.933444Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [1:212:2213], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-09-25T16:19:39.933543Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-09-25T16:19:39.933552Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:497: [72057594046678944] TDone opId# 1:0 ProgressState 2025-09-25T16:19:39.933566Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:926: Part operation is done id#1:0 progress is 1/1 2025-09-25T16:19:39.933572Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-09-25T16:19:39.933577Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:926: Part operation is done id#1:0 progress is 1/1 2025-09-25T16:19:39.933581Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-09-25T16:19:39.933586Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-09-25T16:19:39.933593Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-09-25T16:19:39.933598Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:993: Operation and all the parts is done, operation id: 1:0 2025-09-25T16:19:39.933602Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5552: RemoveTx for txid 1:0 2025-09-25T16:19:39.933613Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:610: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-09-25T16:19:39.933620Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:1002: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-09-25T16:19:39.933624Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1009: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-09-25T16:19:39.933988Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6249: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-09-25T16:19:39.934007Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-09-25T16:19:39.934012Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1 2025-09-25T16:19:39.934018Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 1, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 3 2025-09-25T16:19:39.934023Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:610: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-09-25T16:19:39.934036Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__publish_to_scheme_board.cpp:155: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 1, subscribers: 0 2025-09-25T16:19:39.934769Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1 2025-09-25T16:19:39.934879Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 1, at schemeshard: 72057594046678944 TestModificationResults wait txId: 101 2025-09-25T16:19:39.935329Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:433: actor# [1:275:2265] Bootstrap 2025-09-25T16:19:39.935571Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:452: actor# [1:275:2265] Become StateWork (SchemeCache [1:280:2270]) 2025-09-25T16:19:39.936343Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpCreateIndexedTable CreateIndexedTable { TableDescription { Name: "vectors" Columns { Name: "id" Type: "Uint64" } Columns { Name: "__ydb_parent" Type: "String" } KeyColumnNames: "id" } IndexDescription { Name: "idx_vector" KeyColumnNames: "__ydb_parent" Type: EIndexTypeGlobalVectorKmeansTree VectorIndexKmeansTreeDescription { Settings { settings { metric: DISTANCE_COSINE vector_type: VECTOR_TYPE_FLOAT vector_dimension: 1024 } } } } } } TxId: 101 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-09-25T16:19:39.936430Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_create_indexed_table.cpp:100: TCreateTableIndex construct operation table path: /MyRoot/vectors domain path id: [OwnerId: 72057594046678944, LocalPathId: 1] domain path: /MyRoot shardsToCreate: 2 GetShardsInside: 0 MaxShards: 200000 2025-09-25T16:19:39.936457Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_just_reject.cpp:47: TReject Propose, opId: 101:0, explain: levels should be set, at schemeshard: 72057594046678944 2025-09-25T16:19:39.936463Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 101:1, propose status:StatusInvalidParameter, reason: levels should be set, at schemeshard: 72057594046678944 2025-09-25T16:19:39.936681Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:212: actor# [1:275:2265] HANDLE TEvClientConnected success connect from tablet# 72057594046447617 2025-09-25T16:19:39.937384Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 101, response: Status: StatusInvalidParameter Reason: "levels should be set" TxId: 101 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2025-09-25T16:19:39.937434Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 101, database: /MyRoot, subject: , status: StatusInvalidParameter, reason: levels should be set, operation: CREATE TABLE WITH INDEXES, path: /MyRoot/vectors 2025-09-25T16:19:39.937493Z node 1 :TX_PROXY DEBUG: client.cpp:89: Handle TEvAllocateResult ACCEPTED RangeBegin# 281474976715656 RangeEnd# 281474976720656 txAllocator# 72057594046447617 TestModificationResult got TxId: 101, wait until txId: 101 TestModificationResults wait txId: 102 2025-09-25T16:19:39.938225Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpCreateIndexedTable CreateIndexedTable { TableDescription { Name: "vectors" Columns { Name: "id" Type: "Uint64" } Columns { Name: "embedding" Type: "String" } KeyColumnNames: "id" } IndexDescription { Name: "idx_vector" KeyColumnNames: "embedding" Type: EIndexTypeGlobalVectorKmeansTree DataColumnNames: "id" VectorIndexKmeansTreeDescription { Settings { settings { metric: DISTANCE_COSINE vector_type: VECTOR_TYPE_FLOAT vector_dimension: 1024 } } } } } } TxId: 102 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-09-25T16:19:39.938284Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_create_indexed_table.cpp:100: TCreateTableIndex construct operation table path: /MyRoot/vectors domain path id: [OwnerId: 72057594046678944, LocalPathId: 1] domain path: /MyRoot shardsToCreate: 2 GetShardsInside: 0 MaxShards: 200000 2025-09-25T16:19:39.938302Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_just_reject.cpp:47: TReject Propose, opId: 102:0, explain: levels should be set, at schemeshard: 72057594046678944 2025-09-25T16:19:39.938308Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 102:1, propose status:StatusInvalidParameter, reason: levels should be set, at schemeshard: 72057594046678944 2025-09-25T16:19:39.938767Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 102, response: Status: StatusInvalidParameter Reason: "levels should be set" TxId: 102 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2025-09-25T16:19:39.938805Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 102, database: /MyRoot, subject: , status: StatusInvalidParameter, reason: levels should be set, operation: CREATE TABLE WITH INDEXES, path: /MyRoot/vectors TestModificationResult got TxId: 102, wait until txId: 102 >> TVectorIndexTests::CreateTableMultiColumn >> TFulltextIndexTests::CreateTable [GOOD] >> TVectorIndexTests::CreateTable |82.2%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_index/unittest ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_index/unittest >> TUniqueIndexTests::CreateTable [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2144] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2144] Leader for TabletID 72057594046678944 is [1:130:2155] sender: [1:131:2058] recipient: [1:113:2144] 2025-09-25T16:19:40.057201Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7911: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-09-25T16:19:40.057232Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7939: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-09-25T16:19:40.057239Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7825: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-09-25T16:19:40.057245Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7841: OperationsProcessing config: using default configuration 2025-09-25T16:19:40.057253Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7847: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-09-25T16:19:40.057258Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7847: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-09-25T16:19:40.057269Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7971: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-09-25T16:19:40.057284Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-09-25T16:19:40.057418Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8042: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-09-25T16:19:40.057494Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2025-09-25T16:19:40.075348Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7729: Cannot subscribe to console configs 2025-09-25T16:19:40.075377Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded 2025-09-25T16:19:40.079916Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2025-09-25T16:19:40.080016Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2025-09-25T16:19:40.080060Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-09-25T16:19:40.081634Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2025-09-25T16:19:40.081693Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2025-09-25T16:19:40.081797Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1382: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-09-25T16:19:40.081875Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-09-25T16:19:40.082257Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:158: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-09-25T16:19:40.082301Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2025-09-25T16:19:40.082570Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-09-25T16:19:40.082580Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-09-25T16:19:40.082604Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-09-25T16:19:40.082613Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-09-25T16:19:40.082620Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:205: TTxServerlessStorageBilling.Complete 2025-09-25T16:19:40.082656Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7086: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-09-25T16:19:40.084011Z node 1 :HIVE INFO: tablet_helpers.cpp:1126: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:130:2155] sender: [1:245:2058] recipient: [1:15:2062] 2025-09-25T16:19:40.109218Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-09-25T16:19:40.109315Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-09-25T16:19:40.109376Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:599: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-09-25T16:19:40.109385Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5528: CreateTx for txid 1:0 type: TxAlterSubDomain target path: [OwnerId: 72057594046678944, LocalPathId: 1] source path: 2025-09-25T16:19:40.109428Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-09-25T16:19:40.109444Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2025-09-25T16:19:40.110235Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-09-25T16:19:40.110289Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-09-25T16:19:40.110338Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-09-25T16:19:40.110348Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:315: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-09-25T16:19:40.110355Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:369: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-09-25T16:19:40.110361Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 1:0 2 -> 3 2025-09-25T16:19:40.110808Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-09-25T16:19:40.110819Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-09-25T16:19:40.110827Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 1:0 3 -> 128 2025-09-25T16:19:40.111258Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-09-25T16:19:40.111268Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-09-25T16:19:40.111274Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-09-25T16:19:40.111282Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1723: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-09-25T16:19:40.112084Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1792: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-09-25T16:19:40.112525Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:663: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-09-25T16:19:40.112574Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1824: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-09-25T16:19:40.112781Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:685: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-09-25T16:19:40.112806Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:689: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 137 RawX2: 4294969455 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-09-25T16:19:40.112814Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:303: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-09-25T16:19:40.112889Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 1:0 128 -> 240 2025-09-25T16:19:40.112898Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:362: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-09-25T16:19:40.112930Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:599: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-09-25T16:19:40.112942Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:412: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-09-25T16:19:40.113391Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-09-25T16:19:40.113400Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme ... 72075186233409547, left await: 0, txState.State: ProposedWaitParts, txState.ReadyForNotifications: 1, at schemeshard: 72057594046678944 2025-09-25T16:19:40.207573Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:710: all shard schema changes has been received, operationId: 101:0, at schemeshard: 72057594046678944 2025-09-25T16:19:40.207576Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:722: send schema changes ack message, operation: 101:0, datashard: 72075186233409547, at schemeshard: 72057594046678944 2025-09-25T16:19:40.207579Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 101:0 129 -> 240 2025-09-25T16:19:40.207737Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 2025-09-25T16:19:40.208300Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 2025-09-25T16:19:40.208324Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 2025-09-25T16:19:40.208338Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 101:2, at schemeshard: 72057594046678944 2025-09-25T16:19:40.208350Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 101:0, at schemeshard: 72057594046678944 2025-09-25T16:19:40.208359Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 2025-09-25T16:19:40.208368Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 101:2, at schemeshard: 72057594046678944 2025-09-25T16:19:40.208409Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 101:0, at schemeshard: 72057594046678944 2025-09-25T16:19:40.208432Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 101:2, at schemeshard: 72057594046678944 2025-09-25T16:19:40.208437Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:497: [72057594046678944] TDone opId# 101:2 ProgressState 2025-09-25T16:19:40.208448Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:926: Part operation is done id#101:2 progress is 2/3 2025-09-25T16:19:40.208452Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 101 ready parts: 2/3 2025-09-25T16:19:40.208455Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:926: Part operation is done id#101:2 progress is 2/3 2025-09-25T16:19:40.208457Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 101 ready parts: 2/3 2025-09-25T16:19:40.208460Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 101, ready parts: 2/3, is published: true 2025-09-25T16:19:40.208487Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 101:0, at schemeshard: 72057594046678944 2025-09-25T16:19:40.208490Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:497: [72057594046678944] TDone opId# 101:0 ProgressState 2025-09-25T16:19:40.208495Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:926: Part operation is done id#101:0 progress is 3/3 2025-09-25T16:19:40.208497Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 101 ready parts: 3/3 2025-09-25T16:19:40.208500Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:926: Part operation is done id#101:0 progress is 3/3 2025-09-25T16:19:40.208502Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 101 ready parts: 3/3 2025-09-25T16:19:40.208506Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 101, ready parts: 3/3, is published: true 2025-09-25T16:19:40.208520Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1702: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [1:383:2350] message: TxId: 101 2025-09-25T16:19:40.208525Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 101 ready parts: 3/3 2025-09-25T16:19:40.208532Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:993: Operation and all the parts is done, operation id: 101:0 2025-09-25T16:19:40.208537Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5552: RemoveTx for txid 101:0 2025-09-25T16:19:40.208555Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:610: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 4 2025-09-25T16:19:40.208560Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:993: Operation and all the parts is done, operation id: 101:1 2025-09-25T16:19:40.208562Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5552: RemoveTx for txid 101:1 2025-09-25T16:19:40.208566Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:610: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 3 2025-09-25T16:19:40.208568Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:993: Operation and all the parts is done, operation id: 101:2 2025-09-25T16:19:40.208570Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5552: RemoveTx for txid 101:2 2025-09-25T16:19:40.208575Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:610: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 3 2025-09-25T16:19:40.209172Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 101: got EvNotifyTxCompletionResult 2025-09-25T16:19:40.209183Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 101: satisfy waiter [1:384:2351] TestWaitNotification: OK eventTxId 101 2025-09-25T16:19:40.209280Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/Table/UserDefinedIndex" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: true }, at schemeshard: 72057594046678944 2025-09-25T16:19:40.209328Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/Table/UserDefinedIndex" took 56us result status StatusSuccess 2025-09-25T16:19:40.209492Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/Table/UserDefinedIndex" PathDescription { Self { Name: "UserDefinedIndex" PathId: 3 SchemeshardId: 72057594046678944 PathType: EPathTypeTableIndex CreateFinished: true CreateTxId: 101 CreateStep: 5000002 ParentPathId: 2 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableIndexVersion: 1 } ChildrenExist: true } Children { Name: "indexImplTable" PathId: 4 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 101 CreateStep: 5000002 ParentPathId: 3 PathState: EPathStateCreate Owner: "root@builtin" ACL: "" PathSubType: EPathSubTypeSyncIndexImplTable Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 3 PathsLimit: 10000 ShardsInside: 2 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } TableIndex { Name: "UserDefinedIndex" LocalPathId: 3 Type: EIndexTypeGlobalUnique State: EIndexStateReady KeyColumnNames: "indexed" SchemaVersion: 1 PathOwnerId: 72057594046678944 DataSize: 0 IndexImplTableDescriptions { PartitionConfig { CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot: 300 InMemForceStepsToSnapshot: 500 InMemForceSizeToSnapshot: 16777216 InMemCompactionBrokerQueue: 0 ReadAheadHiThreshold: 67108864 ReadAheadLoThreshold: 16777216 MinDataPageSize: 7168 SnapBrokerQueue: 0 Generation { GenerationId: 0 SizeToCompact: 0 CountToCompact: 8 ForceCountToCompact: 8 ForceSizeToCompact: 134217728 CompactionBrokerQueue: 4294967295 KeepInCache: true BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen1" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 0 UpliftPartSize: 0 } Generation { GenerationId: 1 SizeToCompact: 41943040 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 536870912 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen2" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 8388608 UpliftPartSize: 8388608 } Generation { GenerationId: 2 SizeToCompact: 419430400 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 17179869184 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen3" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } PartitioningPolicy { SizeToSplit: 2147483648 MinPartitionsCount: 1 } } } } } PathId: 3 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_serverless_reboots/unittest >> TSchemeShardServerLessReboots::TestServerlessComputeResourcesModeWithReboots [GOOD] Test command err: ==== RunWithTabletReboots =========== RUN: Trace =========== Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2144] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2144] Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:120:2058] recipient: [1:114:2145] IGNORE Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:120:2058] recipient: [1:114:2145] Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:121:2058] recipient: [1:116:2146] IGNORE Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:121:2058] recipient: [1:116:2146] Leader for TabletID 72057594046678944 is [1:128:2153] sender: [1:131:2058] recipient: [1:113:2144] Leader for TabletID 72057594046447617 is [1:134:2158] sender: [1:136:2058] recipient: [1:114:2145] Leader for TabletID 72057594046316545 is [1:139:2161] sender: [1:141:2058] recipient: [1:116:2146] 2025-09-25T16:18:19.150869Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7911: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-09-25T16:18:19.150897Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7939: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-09-25T16:18:19.150903Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7825: StatsBatching config: StatsBatchTimeout# 0.000000s, StatsMaxBatchSize# 0, StatsMaxExecuteTime# 0.010000s 2025-09-25T16:18:19.150908Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7841: OperationsProcessing config: using default configuration 2025-09-25T16:18:19.150914Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7847: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-09-25T16:18:19.150918Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7847: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-09-25T16:18:19.150927Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7971: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-09-25T16:18:19.150941Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-09-25T16:18:19.151066Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8042: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-09-25T16:18:19.151138Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2025-09-25T16:18:19.186407Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:8074: Got new config: QueryServiceConfig { AllExternalDataSourcesAreAvailable: true } 2025-09-25T16:18:19.186455Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded 2025-09-25T16:18:19.186602Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8042: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# Leader for TabletID 72057594046447617 is [1:134:2158] sender: [1:179:2058] recipient: [1:15:2062] 2025-09-25T16:18:19.193671Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2025-09-25T16:18:19.193820Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2025-09-25T16:18:19.193873Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-09-25T16:18:19.195843Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2025-09-25T16:18:19.195936Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2025-09-25T16:18:19.196063Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1382: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-09-25T16:18:19.196305Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-09-25T16:18:19.197716Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:158: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-09-25T16:18:19.197783Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2025-09-25T16:18:19.206924Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-09-25T16:18:19.206955Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-09-25T16:18:19.206978Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-09-25T16:18:19.206992Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-09-25T16:18:19.206998Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:205: TTxServerlessStorageBilling.Complete 2025-09-25T16:18:19.207044Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7086: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:221:2058] recipient: [1:219:2219] IGNORE Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:221:2058] recipient: [1:219:2219] Leader for TabletID 72057594037968897 is [1:225:2223] sender: [1:226:2058] recipient: [1:219:2219] 2025-09-25T16:18:19.209523Z node 1 :HIVE INFO: tablet_helpers.cpp:1126: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:128:2153] sender: [1:246:2058] recipient: [1:15:2062] 2025-09-25T16:18:19.230286Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-09-25T16:18:19.230374Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-09-25T16:18:19.230448Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:599: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-09-25T16:18:19.230458Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5528: CreateTx for txid 1:0 type: TxAlterSubDomain target path: [OwnerId: 72057594046678944, LocalPathId: 1] source path: 2025-09-25T16:18:19.230522Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-09-25T16:18:19.230542Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2025-09-25T16:18:19.231506Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-09-25T16:18:19.231570Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-09-25T16:18:19.231627Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-09-25T16:18:19.231639Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:315: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-09-25T16:18:19.231646Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:369: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-09-25T16:18:19.231653Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 1:0 2 -> 3 2025-09-25T16:18:19.232137Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-09-25T16:18:19.232151Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-09-25T16:18:19.232157Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 1:0 3 -> 128 2025-09-25T16:18:19.232984Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-09-25T16:18:19.232999Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-09-25T16:18:19.233006Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-09-25T16:18:19.233014Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1723: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-09-25T16:18:19.233716Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1792: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-09-25T16:18:19.234190Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:663: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-09-25T16:18:19.234258Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1824: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 Leader for TabletID 72057594046316545 is [1:139:2161] sender: [1:261:2058] recipient: [1:15:2062] FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-09-25T16:18:19.234523Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:685: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-09-25T16:18:19.234555Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:689: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 139 RawX2: 4294969457 } } Step: 5000001 MediatorID: 0 Tab ... 2025-09-25T16:19:38.595055Z node 213 :HIVE INFO: tablet_helpers.cpp:1481: [72075186233409546] TEvUpdateDomain, msg: DomainKey { SchemeShard: 72057594046678944 PathId: 4 } ServerlessComputeResourcesMode: EServerlessComputeResourcesModeShared TxId: 1007 2025-09-25T16:19:38.595076Z node 213 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:6409: Update domain reply, message: Origin: 72075186233409546 TxId: 1007, at schemeshard: 72057594046678944 2025-09-25T16:19:38.595081Z node 213 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1837: TOperation FindRelatedPartByTabletId, TxId: 1007, tablet: 72075186233409546, partId: 0 2025-09-25T16:19:38.595101Z node 213 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:628: TTxOperationReply execute, operationId: 1007:0, at schemeshard: 72057594046678944, message: Origin: 72075186233409546 TxId: 1007 2025-09-25T16:19:38.595110Z node 213 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_alter_extsubdomain.cpp:796: [72057594046678944] TSyncHive, operationId 1007:0, HandleReply TEvUpdateDomainReply, from hive: 72075186233409546 2025-09-25T16:19:38.595117Z node 213 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 1007:0 138 -> 240 2025-09-25T16:19:38.595555Z node 213 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 1007:0, at schemeshard: 72057594046678944 2025-09-25T16:19:38.595587Z node 213 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1007:0, at schemeshard: 72057594046678944 2025-09-25T16:19:38.595595Z node 213 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:497: [72057594046678944] TDone opId# 1007:0 ProgressState 2025-09-25T16:19:38.595612Z node 213 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:926: Part operation is done id#1007:0 progress is 1/1 2025-09-25T16:19:38.595618Z node 213 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 1007 ready parts: 1/1 2025-09-25T16:19:38.595624Z node 213 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:926: Part operation is done id#1007:0 progress is 1/1 2025-09-25T16:19:38.595628Z node 213 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 1007 ready parts: 1/1 2025-09-25T16:19:38.595633Z node 213 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 1007, ready parts: 1/1, is published: true 2025-09-25T16:19:38.595639Z node 213 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 1007 ready parts: 1/1 2025-09-25T16:19:38.595645Z node 213 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:993: Operation and all the parts is done, operation id: 1007:0 2025-09-25T16:19:38.595650Z node 213 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5552: RemoveTx for txid 1007:0 2025-09-25T16:19:38.595663Z node 213 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:610: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 5 TestModificationResult got TxId: 1007, wait until txId: 1007 TestWaitNotification wait txId: 1007 2025-09-25T16:19:38.596142Z node 213 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:213: tests -- TTxNotificationSubscriber for txId 1007: send EvNotifyTxCompletion 2025-09-25T16:19:38.596155Z node 213 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:259: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 1007 2025-09-25T16:19:38.596253Z node 213 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 1007, at schemeshard: 72057594046678944 2025-09-25T16:19:38.596273Z node 213 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 1007: got EvNotifyTxCompletionResult 2025-09-25T16:19:38.596279Z node 213 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 1007: satisfy waiter [213:874:2761] TestWaitNotification: OK eventTxId 1007 2025-09-25T16:19:38.596389Z node 213 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/ServerLess0" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-09-25T16:19:38.596436Z node 213 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/ServerLess0" took 56us result status StatusSuccess 2025-09-25T16:19:38.596541Z node 213 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/ServerLess0" PathDescription { Self { Name: "ServerLess0" PathId: 4 SchemeshardId: 72057594046678944 PathType: EPathTypeExtSubDomain CreateFinished: true CreateTxId: 1004 CreateStep: 5000005 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 6 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 6 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 4 SecurityStateVersion: 0 } } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 4 PlanResolution: 50 Coordinators: 72075186234409550 TimeCastBucketsPerMediator: 2 Mediators: 72075186234409551 SchemeShard: 72075186234409549 } DomainKey { SchemeShard: 72057594046678944 PathId: 4 } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } PathsInside: 0 PathsLimit: 10000 ShardsInside: 3 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 3 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { } SharedHive: 72075186233409546 ServerlessComputeResourcesMode: EServerlessComputeResourcesModeShared SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 4 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-09-25T16:19:38.596633Z node 213 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/ServerLess0" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-09-25T16:19:38.596652Z node 213 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/ServerLess0" took 23us result status StatusSuccess 2025-09-25T16:19:38.596711Z node 213 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/ServerLess0" PathDescription { Self { Name: "ServerLess0" PathId: 4 SchemeshardId: 72057594046678944 PathType: EPathTypeExtSubDomain CreateFinished: true CreateTxId: 1004 CreateStep: 5000005 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 6 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 6 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 4 SecurityStateVersion: 0 } } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 4 PlanResolution: 50 Coordinators: 72075186234409550 TimeCastBucketsPerMediator: 2 Mediators: 72075186234409551 SchemeShard: 72075186234409549 } DomainKey { SchemeShard: 72057594046678944 PathId: 4 } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } PathsInside: 0 PathsLimit: 10000 ShardsInside: 3 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 3 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { } SharedHive: 72075186233409546 ServerlessComputeResourcesMode: EServerlessComputeResourcesModeShared SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 4 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-09-25T16:19:38.596780Z node 213 :HIVE INFO: tablet_helpers.cpp:1498: [72075186233409546] TEvRequestDomainInfo, 72057594046678944:4 2025-09-25T16:19:38.596873Z node 213 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/ServerLess0" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72075186234409549 2025-09-25T16:19:38.596897Z node 213 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72075186234409549 describe path "/MyRoot/ServerLess0" took 25us result status StatusSuccess 2025-09-25T16:19:38.596953Z node 213 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/ServerLess0" PathDescription { Self { Name: "MyRoot/ServerLess0" PathId: 1 SchemeshardId: 72075186234409549 PathType: EPathTypeSubDomain CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 6 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 6 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 4 SecurityStateVersion: 0 } } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 4 ProcessingParams { Version: 4 PlanResolution: 50 Coordinators: 72075186234409550 TimeCastBucketsPerMediator: 2 Mediators: 72075186234409551 SchemeShard: 72075186234409549 } DomainKey { SchemeShard: 72057594046678944 PathId: 4 } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } PathsInside: 0 PathsLimit: 10000 ShardsInside: 3 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 3 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { Audience: "/MyRoot/ServerLess0" } SharedHive: 72075186233409546 ServerlessComputeResourcesMode: EServerlessComputeResourcesModeShared SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 1 PathOwnerId: 72075186234409549, at schemeshard: 72075186234409549 >> TAsyncIndexTests::DropTableWithInflightChanges[PipeResets] >> TVectorIndexTests::CreateTableMultiColumn [GOOD] >> TCmsTest::StateStorageRollingRestart [GOOD] >> TCmsTest::StateStorageLockedNodes |82.2%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_index/unittest |82.2%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_index/unittest ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_index/unittest >> TFulltextIndexTests::CreateTable [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2144] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2144] Leader for TabletID 72057594046678944 is [1:130:2155] sender: [1:131:2058] recipient: [1:113:2144] 2025-09-25T16:19:40.235521Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7911: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-09-25T16:19:40.235546Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7939: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-09-25T16:19:40.235552Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7825: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-09-25T16:19:40.235557Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7841: OperationsProcessing config: using default configuration 2025-09-25T16:19:40.235563Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7847: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-09-25T16:19:40.235568Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7847: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-09-25T16:19:40.235576Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7971: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-09-25T16:19:40.235588Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-09-25T16:19:40.235703Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8042: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-09-25T16:19:40.235766Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2025-09-25T16:19:40.251358Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7729: Cannot subscribe to console configs 2025-09-25T16:19:40.251380Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded 2025-09-25T16:19:40.255416Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2025-09-25T16:19:40.255507Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2025-09-25T16:19:40.255541Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-09-25T16:19:40.257256Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2025-09-25T16:19:40.257311Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2025-09-25T16:19:40.257401Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1382: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-09-25T16:19:40.257462Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-09-25T16:19:40.257894Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:158: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-09-25T16:19:40.257935Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2025-09-25T16:19:40.258157Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-09-25T16:19:40.258167Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-09-25T16:19:40.258187Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-09-25T16:19:40.258193Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-09-25T16:19:40.258199Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:205: TTxServerlessStorageBilling.Complete 2025-09-25T16:19:40.258227Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7086: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-09-25T16:19:40.259572Z node 1 :HIVE INFO: tablet_helpers.cpp:1126: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:130:2155] sender: [1:245:2058] recipient: [1:15:2062] 2025-09-25T16:19:40.280341Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-09-25T16:19:40.280421Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-09-25T16:19:40.280475Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:599: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-09-25T16:19:40.280483Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5528: CreateTx for txid 1:0 type: TxAlterSubDomain target path: [OwnerId: 72057594046678944, LocalPathId: 1] source path: 2025-09-25T16:19:40.280519Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-09-25T16:19:40.280532Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2025-09-25T16:19:40.281221Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-09-25T16:19:40.281271Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-09-25T16:19:40.281314Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-09-25T16:19:40.281322Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:315: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-09-25T16:19:40.281328Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:369: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-09-25T16:19:40.281333Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 1:0 2 -> 3 2025-09-25T16:19:40.281719Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-09-25T16:19:40.281729Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-09-25T16:19:40.281737Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 1:0 3 -> 128 2025-09-25T16:19:40.282078Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-09-25T16:19:40.282088Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-09-25T16:19:40.282093Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-09-25T16:19:40.282100Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1723: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-09-25T16:19:40.282707Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1792: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-09-25T16:19:40.283099Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:663: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-09-25T16:19:40.283147Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1824: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-09-25T16:19:40.283335Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:685: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-09-25T16:19:40.283357Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:689: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 137 RawX2: 4294969455 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-09-25T16:19:40.283364Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:303: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-09-25T16:19:40.283413Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 1:0 128 -> 240 2025-09-25T16:19:40.283420Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:362: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-09-25T16:19:40.283446Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:599: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-09-25T16:19:40.283457Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:412: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-09-25T16:19:40.283894Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-09-25T16:19:40.283901Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme ... ediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 3 PathsLimit: 10000 ShardsInside: 2 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 4 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 Reboot SchemeShard.. 2025-09-25T16:19:40.468143Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 Leader for TabletID 72057594046678944 is [1:463:2419] sender: [1:531:2058] recipient: [1:107:2141] Leader for TabletID 72057594046678944 is [1:463:2419] sender: [1:534:2058] recipient: [1:533:2473] Leader for TabletID 72057594046678944 is [1:535:2474] sender: [1:536:2058] recipient: [1:533:2473] 2025-09-25T16:19:40.477726Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7911: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-09-25T16:19:40.477760Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7939: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-09-25T16:19:40.477767Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7825: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-09-25T16:19:40.477773Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7841: OperationsProcessing config: using default configuration 2025-09-25T16:19:40.477780Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7847: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-09-25T16:19:40.477785Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7847: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-09-25T16:19:40.477794Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7971: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-09-25T16:19:40.477810Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-09-25T16:19:40.477933Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8042: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-09-25T16:19:40.477985Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2025-09-25T16:19:40.479735Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2025-09-25T16:19:40.480168Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2025-09-25T16:19:40.480210Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2025-09-25T16:19:40.480229Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7729: Cannot subscribe to console configs 2025-09-25T16:19:40.480236Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded 2025-09-25T16:19:40.480281Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2025-09-25T16:19:40.480402Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1417: TTxInit for Paths, read records: 4, at schemeshard: 72057594046678944 2025-09-25T16:19:40.480421Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:441: AttachChild: child attached as only one child to the parent, parent id: [OwnerId: 72057594046678944, LocalPathId: 1], parent name: MyRoot, child name: texts, child id: [OwnerId: 72057594046678944, LocalPathId: 2], at schemeshard: 72057594046678944 2025-09-25T16:19:40.480429Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:441: AttachChild: child attached as only one child to the parent, parent id: [OwnerId: 72057594046678944, LocalPathId: 2], parent name: texts, child name: idx_fulltext, child id: [OwnerId: 72057594046678944, LocalPathId: 3], at schemeshard: 72057594046678944 2025-09-25T16:19:40.480436Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:441: AttachChild: child attached as only one child to the parent, parent id: [OwnerId: 72057594046678944, LocalPathId: 3], parent name: idx_fulltext, child name: indexImplTable, child id: [OwnerId: 72057594046678944, LocalPathId: 4], at schemeshard: 72057594046678944 2025-09-25T16:19:40.480447Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1491: TTxInit for UserAttributes, read records: 0, at schemeshard: 72057594046678944 2025-09-25T16:19:40.480458Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1517: TTxInit for UserAttributesAlterData, read records: 0, at schemeshard: 72057594046678944 2025-09-25T16:19:40.480540Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1819: TTxInit for Tables, read records: 2, at schemeshard: 72057594046678944 2025-09-25T16:19:40.480571Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:599: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 1 2025-09-25T16:19:40.480582Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:599: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 0 2025-09-25T16:19:40.480597Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__root_shred_manager.cpp:452: [RootShredManager] Restore: Generation# 0, Status# 0, WakeupInterval# 604800 s, NumberShredTenantsInRunning# 0 2025-09-25T16:19:40.480646Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:2090: TTxInit for Columns, read records: 7, at schemeshard: 72057594046678944 2025-09-25T16:19:40.480675Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:2150: TTxInit for ColumnsAlters, read records: 0, at schemeshard: 72057594046678944 2025-09-25T16:19:40.480690Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:2208: TTxInit for Shards, read records: 2, at schemeshard: 72057594046678944 2025-09-25T16:19:40.480697Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:599: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 2 2025-09-25T16:19:40.480701Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:599: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 1 2025-09-25T16:19:40.480723Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:2294: TTxInit for TablePartitions, read records: 2, at schemeshard: 72057594046678944 2025-09-25T16:19:40.480759Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:2360: TTxInit for TableShardPartitionConfigs, read records: 0, at schemeshard: 72057594046678944 2025-09-25T16:19:40.480787Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:2510: TTxInit for ChannelsBinding, read records: 6, at schemeshard: 72057594046678944 2025-09-25T16:19:40.480861Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:2889: TTxInit for TableIndexes, read records: 1, at schemeshard: 72057594046678944 2025-09-25T16:19:40.480877Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:599: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 1 2025-09-25T16:19:40.480904Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:2968: TTxInit for TableIndexKeys, read records: 1, at schemeshard: 72057594046678944 2025-09-25T16:19:40.480967Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:3469: TTxInit for KesusInfos, read records: 0, at schemeshard: 72057594046678944 2025-09-25T16:19:40.480977Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:3505: TTxInit for KesusAlters, read records: 0, at schemeshard: 72057594046678944 2025-09-25T16:19:40.481007Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:3738: TTxInit for TxShards, read records: 0, at schemeshard: 72057594046678944 2025-09-25T16:19:40.481018Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:3883: TTxInit for ShardToDelete, read records: 0, at schemeshard: 72057594046678944 2025-09-25T16:19:40.481026Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:3900: TTxInit for SystemShardToDelete, read records: 0, at schemeshard: 72057594046678944 2025-09-25T16:19:40.481039Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:3917: TTxInit for BackupSettings, read records: 0, at schemeshard: 72057594046678944 2025-09-25T16:19:40.481068Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:4077: TTxInit for ShardBackupStatus, read records: 0, at schemeshard: 72057594046678944 2025-09-25T16:19:40.481079Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:4093: TTxInit for CompletedBackup, read records: 0, at schemeshard: 72057594046678944 2025-09-25T16:19:40.481116Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:4378: TTxInit for Publications, read records: 0, at schemeshard: 72057594046678944 2025-09-25T16:19:40.481152Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:4723: IndexBuild , records: 0, at schemeshard: 72057594046678944 2025-09-25T16:19:40.481162Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:4784: KMeansTreeSample records: 0, at schemeshard: 72057594046678944 2025-09-25T16:19:40.481170Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:4843: KMeansTreeCluster records: 0, at schemeshard: 72057594046678944 2025-09-25T16:19:40.481189Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:4932: SnapshotTables: snapshots: 0 tables: 0, at schemeshard: 72057594046678944 2025-09-25T16:19:40.481197Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:4959: SnapshotSteps: snapshots: 0, at schemeshard: 72057594046678944 2025-09-25T16:19:40.481204Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:4986: LongLocks: records: 0, at schemeshard: 72057594046678944 2025-09-25T16:19:40.482341Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2025-09-25T16:19:40.482885Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-09-25T16:19:40.482899Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-09-25T16:19:40.483115Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-09-25T16:19:40.483127Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-09-25T16:19:40.483134Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:205: TTxServerlessStorageBilling.Complete 2025-09-25T16:19:40.483799Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7086: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 |82.2%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_index/unittest ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_index/unittest >> TVectorIndexTests::CreateTableMultiColumn [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2144] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2144] Leader for TabletID 72057594046678944 is [1:130:2155] sender: [1:131:2058] recipient: [1:113:2144] 2025-09-25T16:19:40.613675Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7911: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-09-25T16:19:40.613698Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7939: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-09-25T16:19:40.613703Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7825: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-09-25T16:19:40.613707Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7841: OperationsProcessing config: using default configuration 2025-09-25T16:19:40.613712Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7847: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-09-25T16:19:40.613715Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7847: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-09-25T16:19:40.613721Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7971: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-09-25T16:19:40.613730Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-09-25T16:19:40.613828Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8042: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-09-25T16:19:40.613898Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2025-09-25T16:19:40.626479Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7729: Cannot subscribe to console configs 2025-09-25T16:19:40.626500Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded 2025-09-25T16:19:40.630002Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2025-09-25T16:19:40.630072Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2025-09-25T16:19:40.630104Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-09-25T16:19:40.631529Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2025-09-25T16:19:40.631576Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2025-09-25T16:19:40.631654Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1382: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-09-25T16:19:40.631714Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-09-25T16:19:40.632330Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:158: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-09-25T16:19:40.632377Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2025-09-25T16:19:40.632618Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-09-25T16:19:40.632627Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-09-25T16:19:40.632644Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-09-25T16:19:40.632650Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-09-25T16:19:40.632654Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:205: TTxServerlessStorageBilling.Complete 2025-09-25T16:19:40.632681Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7086: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-09-25T16:19:40.633948Z node 1 :HIVE INFO: tablet_helpers.cpp:1126: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:130:2155] sender: [1:245:2058] recipient: [1:15:2062] 2025-09-25T16:19:40.652539Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-09-25T16:19:40.652627Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-09-25T16:19:40.652687Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:599: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-09-25T16:19:40.652696Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5528: CreateTx for txid 1:0 type: TxAlterSubDomain target path: [OwnerId: 72057594046678944, LocalPathId: 1] source path: 2025-09-25T16:19:40.652738Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-09-25T16:19:40.652753Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2025-09-25T16:19:40.653498Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-09-25T16:19:40.653554Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-09-25T16:19:40.653606Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-09-25T16:19:40.653617Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:315: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-09-25T16:19:40.653624Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:369: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-09-25T16:19:40.653629Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 1:0 2 -> 3 2025-09-25T16:19:40.654051Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-09-25T16:19:40.654061Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-09-25T16:19:40.654069Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 1:0 3 -> 128 2025-09-25T16:19:40.654604Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-09-25T16:19:40.654622Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-09-25T16:19:40.654630Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-09-25T16:19:40.654638Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1723: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-09-25T16:19:40.655371Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1792: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-09-25T16:19:40.655890Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:663: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-09-25T16:19:40.655933Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1824: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-09-25T16:19:40.656138Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:685: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-09-25T16:19:40.656163Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:689: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 137 RawX2: 4294969455 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-09-25T16:19:40.656172Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:303: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-09-25T16:19:40.656239Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 1:0 128 -> 240 2025-09-25T16:19:40.656246Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:362: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-09-25T16:19:40.656279Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:599: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-09-25T16:19:40.656292Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:412: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-09-25T16:19:40.656819Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-09-25T16:19:40.656847Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme ... taColumnNames: "covered2" DataSize: 0 IndexImplTableDescriptions { PartitionConfig { CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot: 300 InMemForceStepsToSnapshot: 500 InMemForceSizeToSnapshot: 16777216 InMemCompactionBrokerQueue: 0 ReadAheadHiThreshold: 67108864 ReadAheadLoThreshold: 16777216 MinDataPageSize: 7168 SnapBrokerQueue: 0 Generation { GenerationId: 0 SizeToCompact: 0 CountToCompact: 8 ForceCountToCompact: 8 ForceSizeToCompact: 134217728 CompactionBrokerQueue: 4294967295 KeepInCache: true BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen1" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 0 UpliftPartSize: 0 } Generation { GenerationId: 1 SizeToCompact: 41943040 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 536870912 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen2" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 8388608 UpliftPartSize: 8388608 } Generation { GenerationId: 2 SizeToCompact: 419430400 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 17179869184 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen3" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } PartitioningPolicy { SizeToSplit: 2147483648 MinPartitionsCount: 1 } } } IndexImplTableDescriptions { PartitionConfig { CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot: 300 InMemForceStepsToSnapshot: 500 InMemForceSizeToSnapshot: 16777216 InMemCompactionBrokerQueue: 0 ReadAheadHiThreshold: 67108864 ReadAheadLoThreshold: 16777216 MinDataPageSize: 7168 SnapBrokerQueue: 0 Generation { GenerationId: 0 SizeToCompact: 0 CountToCompact: 8 ForceCountToCompact: 8 ForceSizeToCompact: 134217728 CompactionBrokerQueue: 4294967295 KeepInCache: true BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen1" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 0 UpliftPartSize: 0 } Generation { GenerationId: 1 SizeToCompact: 41943040 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 536870912 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen2" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 8388608 UpliftPartSize: 8388608 } Generation { GenerationId: 2 SizeToCompact: 419430400 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 17179869184 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen3" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } PartitioningPolicy { SizeToSplit: 2147483648 MinPartitionsCount: 1 } } } VectorIndexKmeansTreeDescription { Settings { settings { metric: DISTANCE_COSINE vector_type: VECTOR_TYPE_FLOAT vector_dimension: 1024 } clusters: 10 levels: 3 } } } } PathId: 3 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-09-25T16:19:40.781789Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/vectors/idx_vector/indexImplLevelTable" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: true }, at schemeshard: 72057594046678944 2025-09-25T16:19:40.781819Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/vectors/idx_vector/indexImplLevelTable" took 32us result status StatusSuccess 2025-09-25T16:19:40.781897Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/vectors/idx_vector/indexImplLevelTable" PathDescription { Self { Name: "indexImplLevelTable" PathId: 4 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 101 CreateStep: 5000002 ParentPathId: 3 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeVectorKmeansTreeIndexImplTable Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "indexImplLevelTable" Columns { Name: "__ydb_parent" Type: "Uint64" TypeId: 4 Id: 1 NotNull: true IsBuildInProgress: false } Columns { Name: "__ydb_id" Type: "Uint64" TypeId: 4 Id: 2 NotNull: true IsBuildInProgress: false } Columns { Name: "__ydb_centroid" Type: "String" TypeId: 4097 Id: 3 NotNull: true IsBuildInProgress: false } KeyColumnNames: "__ydb_parent" KeyColumnNames: "__ydb_id" KeyColumnIds: 1 KeyColumnIds: 2 TableSchemaVersion: 1 IsBackup: false IsRestore: false } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 4 PathsLimit: 10000 ShardsInside: 3 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 4 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-09-25T16:19:40.781957Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/vectors/idx_vector/indexImplPostingTable" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: true }, at schemeshard: 72057594046678944 2025-09-25T16:19:40.781973Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/vectors/idx_vector/indexImplPostingTable" took 17us result status StatusSuccess 2025-09-25T16:19:40.782018Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/vectors/idx_vector/indexImplPostingTable" PathDescription { Self { Name: "indexImplPostingTable" PathId: 5 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 101 CreateStep: 5000002 ParentPathId: 3 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeVectorKmeansTreeIndexImplTable Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "indexImplPostingTable" Columns { Name: "__ydb_parent" Type: "Uint64" TypeId: 4 Id: 1 NotNull: true IsBuildInProgress: false } Columns { Name: "id1" Type: "String" TypeId: 4097 Id: 2 NotNull: false IsBuildInProgress: false } Columns { Name: "id2" Type: "String" TypeId: 4097 Id: 3 NotNull: false IsBuildInProgress: false } Columns { Name: "covered1" Type: "String" TypeId: 4097 Id: 4 NotNull: false IsBuildInProgress: false } Columns { Name: "covered2" Type: "String" TypeId: 4097 Id: 5 NotNull: false IsBuildInProgress: false } KeyColumnNames: "__ydb_parent" KeyColumnNames: "id1" KeyColumnNames: "id2" KeyColumnIds: 1 KeyColumnIds: 2 KeyColumnIds: 3 TableSchemaVersion: 1 IsBackup: false IsRestore: false } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 4 PathsLimit: 10000 ShardsInside: 3 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 5 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 |82.2%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_index/unittest |82.2%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_index/unittest >> TVectorIndexTests::CreateTable [GOOD] >> TTopicReaderTests::TestRun_ReadTwoMessages_With_Limit_1 [GOOD] >> TTopicReaderTests::TestRun_Read_Less_Messages_Than_Sent >> TCmsTest::CheckSysTabletsOnNodesWithPDisks [GOOD] |82.2%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_index/unittest >> KqpProxy::ExecuteScriptFailsWithoutFeatureFlag [GOOD] |82.2%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_index/unittest >> TTopicReaderTests::TestRun_ReadMessages_Output_Base64 [GOOD] >> TTopicReaderTests::TestRun_ReadMessages_With_Offset >> TAsyncIndexTests::MergeMainWithReboots[TabletReboots] |82.2%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_index/unittest |82.2%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_index/unittest ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_index/unittest >> TVectorIndexTests::CreateTable [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2144] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2144] Leader for TabletID 72057594046678944 is [1:130:2155] sender: [1:131:2058] recipient: [1:113:2144] 2025-09-25T16:19:40.768216Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7911: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-09-25T16:19:40.768237Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7939: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-09-25T16:19:40.768243Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7825: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-09-25T16:19:40.768249Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7841: OperationsProcessing config: using default configuration 2025-09-25T16:19:40.768255Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7847: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-09-25T16:19:40.768260Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7847: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-09-25T16:19:40.768269Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7971: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-09-25T16:19:40.768283Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-09-25T16:19:40.768409Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8042: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-09-25T16:19:40.768471Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2025-09-25T16:19:40.786598Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7729: Cannot subscribe to console configs 2025-09-25T16:19:40.786626Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded 2025-09-25T16:19:40.791348Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2025-09-25T16:19:40.791467Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2025-09-25T16:19:40.791506Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-09-25T16:19:40.793576Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2025-09-25T16:19:40.793649Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2025-09-25T16:19:40.793765Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1382: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-09-25T16:19:40.793839Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-09-25T16:19:40.794353Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:158: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-09-25T16:19:40.794409Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2025-09-25T16:19:40.794679Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-09-25T16:19:40.794693Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-09-25T16:19:40.794718Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-09-25T16:19:40.794726Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-09-25T16:19:40.794734Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:205: TTxServerlessStorageBilling.Complete 2025-09-25T16:19:40.794768Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7086: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-09-25T16:19:40.796661Z node 1 :HIVE INFO: tablet_helpers.cpp:1126: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:130:2155] sender: [1:245:2058] recipient: [1:15:2062] 2025-09-25T16:19:40.822027Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-09-25T16:19:40.822127Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-09-25T16:19:40.822188Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:599: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-09-25T16:19:40.822197Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5528: CreateTx for txid 1:0 type: TxAlterSubDomain target path: [OwnerId: 72057594046678944, LocalPathId: 1] source path: 2025-09-25T16:19:40.822242Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-09-25T16:19:40.822258Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2025-09-25T16:19:40.823022Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-09-25T16:19:40.823083Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-09-25T16:19:40.823135Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-09-25T16:19:40.823146Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:315: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-09-25T16:19:40.823152Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:369: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-09-25T16:19:40.823158Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 1:0 2 -> 3 2025-09-25T16:19:40.823622Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-09-25T16:19:40.823638Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-09-25T16:19:40.823646Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 1:0 3 -> 128 2025-09-25T16:19:40.824007Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-09-25T16:19:40.824018Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-09-25T16:19:40.824026Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-09-25T16:19:40.824033Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1723: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-09-25T16:19:40.824732Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1792: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-09-25T16:19:40.825199Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:663: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-09-25T16:19:40.825254Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1824: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-09-25T16:19:40.825458Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:685: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-09-25T16:19:40.825485Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:689: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 137 RawX2: 4294969455 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-09-25T16:19:40.825493Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:303: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-09-25T16:19:40.825558Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 1:0 128 -> 240 2025-09-25T16:19:40.825565Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:362: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-09-25T16:19:40.825596Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:599: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-09-25T16:19:40.825607Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:412: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-09-25T16:19:40.826094Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-09-25T16:19:40.826104Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme ... chemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 102 2025-09-25T16:19:40.967787Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:108: Operation in-flight, at schemeshard: 72057594046678944, txId: 102 2025-09-25T16:19:40.967803Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6249: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 102 2025-09-25T16:19:40.967807Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 102 2025-09-25T16:19:40.967811Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:108: Operation in-flight, at schemeshard: 72057594046678944, txId: 102 2025-09-25T16:19:40.967919Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6249: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 4 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 102 2025-09-25T16:19:40.967926Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 4 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 102 2025-09-25T16:19:40.967929Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:108: Operation in-flight, at schemeshard: 72057594046678944, txId: 102 2025-09-25T16:19:40.967932Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 102, pathId: [OwnerId: 72057594046678944, LocalPathId: 4], version: 18446744073709551615 2025-09-25T16:19:40.967934Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:610: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 4 2025-09-25T16:19:40.968326Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6249: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 5 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 102 2025-09-25T16:19:40.968352Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 5 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 102 2025-09-25T16:19:40.968358Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:108: Operation in-flight, at schemeshard: 72057594046678944, txId: 102 2025-09-25T16:19:40.968364Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 102, pathId: [OwnerId: 72057594046678944, LocalPathId: 5], version: 18446744073709551615 2025-09-25T16:19:40.968370Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:610: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 5] was 4 2025-09-25T16:19:40.968386Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 102, ready parts: 1/4, is published: true 2025-09-25T16:19:40.969018Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 102:3, at schemeshard: 72057594046678944 2025-09-25T16:19:40.969032Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_drop_table.cpp:415: TDropTable TProposedDeletePart operationId: 102:3 ProgressState, at schemeshard: 72057594046678944 2025-09-25T16:19:40.969092Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:610: DecrementPathDbRefCount reason remove table for pathId [OwnerId: 72057594046678944, LocalPathId: 5] was 3 2025-09-25T16:19:40.969122Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:926: Part operation is done id#102:3 progress is 2/4 2025-09-25T16:19:40.969126Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 102 ready parts: 2/4 2025-09-25T16:19:40.969131Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:926: Part operation is done id#102:3 progress is 2/4 2025-09-25T16:19:40.969135Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 102 ready parts: 2/4 2025-09-25T16:19:40.969139Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 102, ready parts: 2/4, is published: true 2025-09-25T16:19:40.969292Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 2025-09-25T16:19:40.969328Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 102:2, at schemeshard: 72057594046678944 2025-09-25T16:19:40.969334Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_drop_table.cpp:415: TDropTable TProposedDeletePart operationId: 102:2 ProgressState, at schemeshard: 72057594046678944 2025-09-25T16:19:40.969363Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:610: DecrementPathDbRefCount reason remove table for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 3 2025-09-25T16:19:40.969383Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:926: Part operation is done id#102:2 progress is 3/4 2025-09-25T16:19:40.969387Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 102 ready parts: 3/4 2025-09-25T16:19:40.969391Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:926: Part operation is done id#102:2 progress is 3/4 2025-09-25T16:19:40.969395Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 102 ready parts: 3/4 2025-09-25T16:19:40.969402Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 102, ready parts: 3/4, is published: true 2025-09-25T16:19:40.969458Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 2025-09-25T16:19:40.969467Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 2025-09-25T16:19:40.969512Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 102:0, at schemeshard: 72057594046678944 2025-09-25T16:19:40.969518Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_drop_table.cpp:415: TDropTable TProposedDeletePart operationId: 102:0 ProgressState, at schemeshard: 72057594046678944 2025-09-25T16:19:40.969545Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:610: DecrementPathDbRefCount reason remove table for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 4 2025-09-25T16:19:40.969561Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:926: Part operation is done id#102:0 progress is 4/4 2025-09-25T16:19:40.969565Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 102 ready parts: 4/4 2025-09-25T16:19:40.969569Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:926: Part operation is done id#102:0 progress is 4/4 2025-09-25T16:19:40.969573Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 102 ready parts: 4/4 2025-09-25T16:19:40.969577Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 102, ready parts: 4/4, is published: true 2025-09-25T16:19:40.969589Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1702: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [1:417:2373] message: TxId: 102 2025-09-25T16:19:40.969595Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 102 ready parts: 4/4 2025-09-25T16:19:40.969600Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:993: Operation and all the parts is done, operation id: 102:0 2025-09-25T16:19:40.969605Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5552: RemoveTx for txid 102:0 2025-09-25T16:19:40.969621Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:610: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 3 2025-09-25T16:19:40.969627Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:993: Operation and all the parts is done, operation id: 102:1 2025-09-25T16:19:40.969630Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5552: RemoveTx for txid 102:1 2025-09-25T16:19:40.969635Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:610: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 3 2025-09-25T16:19:40.969639Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:993: Operation and all the parts is done, operation id: 102:2 2025-09-25T16:19:40.969642Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5552: RemoveTx for txid 102:2 2025-09-25T16:19:40.969649Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:610: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 2 2025-09-25T16:19:40.969654Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:993: Operation and all the parts is done, operation id: 102:3 2025-09-25T16:19:40.969657Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5552: RemoveTx for txid 102:3 2025-09-25T16:19:40.969665Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:610: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 5] was 2 2025-09-25T16:19:40.969747Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 2025-09-25T16:19:40.969756Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 2025-09-25T16:19:40.969762Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 2025-09-25T16:19:40.969774Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 2025-09-25T16:19:40.970145Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 2025-09-25T16:19:40.970808Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 102: got EvNotifyTxCompletionResult 2025-09-25T16:19:40.970820Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 102: satisfy waiter [1:535:2483] TestWaitNotification: OK eventTxId 102 |82.2%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_index/unittest ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/cms/ut/unittest >> TCmsTest::CheckSysTabletsOnNodesWithPDisks [GOOD] Test command err: 2025-09-25T16:19:32.596272Z node 1 :CMS DEBUG: console__init_scheme.cpp:14: TConsole::TTxInitScheme Execute 2025-09-25T16:19:32.597138Z node 1 :CMS DEBUG: cms_impl.h:186: StateInit event type: 10060000 event: NKikimr::TEvTablet::TEvBoot 2025-09-25T16:19:32.602239Z node 1 :CMS DEBUG: cms_impl.h:186: StateInit event type: 10060001 event: NKikimr::TEvTablet::TEvRestored 2025-09-25T16:19:32.602329Z node 1 :CMS DEBUG: cms_tx_init_scheme.cpp:16: TTxInitScheme Execute 2025-09-25T16:19:32.602657Z node 1 :CMS DEBUG: cms_impl.h:186: StateInit event type: 1006000c event: NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-09-25T16:19:32.602799Z node 1 :CMS DEBUG: console__init_scheme.cpp:23: TConsole::TTxInitScheme Complete 2025-09-25T16:19:32.602821Z node 1 :CMS DEBUG: cms_impl.h:186: StateInit event type: 10031c0c event: NKikimr::TEvNodeWardenStorageConfig 2025-09-25T16:19:32.605091Z node 1 :CMS DEBUG: console__load_state.cpp:28: TConsole::TTxLoadState Execute 2025-09-25T16:19:32.605154Z node 1 :CMS DEBUG: console__load_state.cpp:50: Using default config. 2025-09-25T16:19:32.605256Z node 1 :CMS DEBUG: console__load_state.cpp:66: TConsole::TTxLoadState Complete 2025-09-25T16:19:32.605282Z node 1 :CMS DEBUG: cms_impl.h:186: StateInit event type: 104d0001 event: NKikimr::NConsole::TEvConfigsDispatcher::TEvSetConfigSubscriptionResponse 2025-09-25T16:19:32.607016Z node 1 :CMS DEBUG: cms_tx_init_scheme.cpp:24: TTxInitScheme Complete 2025-09-25T16:19:32.607190Z node 1 :CMS DEBUG: cms_tx_load_state.cpp:33: TTxLoadState Execute 2025-09-25T16:19:32.607222Z node 1 :CMS DEBUG: cms_tx_load_state.cpp:76: Using default config 2025-09-25T16:19:32.607250Z node 1 :CMS DEBUG: cms.cpp:1176: Running CleanupWalleTasks 2025-09-25T16:19:32.626855Z node 1 :CMS DEBUG: cms_impl.h:186: StateInit event type: 104a0012 event: NKikimr::NConsole::TEvConsole::TEvConfigNotificationRequest { Config { FeatureFlags { EnableCMSRequestPriorities: true EnableSingleCompositeActionGroup: true } } ItemKinds: 25 ItemKinds: 26 Local: true } 2025-09-25T16:19:32.670044Z node 1 :CMS DEBUG: cms_tx_load_state.cpp:256: TTxLoadState Complete 2025-09-25T16:19:32.670145Z node 1 :CMS DEBUG: cms_tx_update_config.cpp:23: TTxUpdateConfig Execute 2025-09-25T16:19:32.671992Z node 1 :CMS DEBUG: cms_tx_update_config.cpp:37: TTxUpdateConfig Complete 2025-09-25T16:19:32.672112Z node 1 :CMS DEBUG: sentinel.cpp:1020: [Sentinel] [Main] UpdateConfig 2025-09-25T16:19:32.672121Z node 1 :CMS DEBUG: sentinel.cpp:965: [Sentinel] [Main] Start ConfigUpdater 2025-09-25T16:19:32.672132Z node 1 :CMS DEBUG: sentinel.cpp:1036: [Sentinel] [Main] UpdateState 2025-09-25T16:19:32.672137Z node 1 :CMS INFO: sentinel.cpp:960: [Sentinel] [Main] StateUpdater was delayed 2025-09-25T16:19:32.672170Z node 1 :CMS DEBUG: sentinel.cpp:524: [Sentinel] [ConfigUpdater] Request blobstorage config: attempt# 0 2025-09-25T16:19:32.672192Z node 1 :CMS DEBUG: sentinel.cpp:537: [Sentinel] [ConfigUpdater] Request CMS cluster state: attempt# 0 2025-09-25T16:19:32.675135Z node 1 :CMS DEBUG: sentinel.cpp:599: [Sentinel] [ConfigUpdater] Handle TEvBlobStorage::TEvControllerConfigResponse: response# Status { Success: true BaseConfig { PDisk { NodeId: 1 PDiskId: 1 Path: "/1/pdisk-1.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 2 PDiskId: 2 Path: "/2/pdisk-2.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 3 PDiskId: 3 Path: "/3/pdisk-3.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 4 PDiskId: 4 Path: "/4/pdisk-4.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 5 PDiskId: 5 Path: "/5/pdisk-5.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 6 PDiskId: 6 Path: "/6/pdisk-6.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 7 PDiskId: 7 Path: "/7/pdisk-7.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 8 PDiskId: 8 Path: "/8/pdisk-8.data" Guid: 1 DriveStatus: ACTIVE } VSlot { VSlotId { NodeId: 1 PDiskId: 1 VSlotId: 1000 } GroupGeneration: 1 } VSlot { VSlotId { NodeId: 1 PDiskId: 1 VSlotId: 1001 } GroupId: 1 GroupGeneration: 1 } VSlot { VSlotId { NodeId: 1 PDiskId: 1 VSlotId: 1002 } GroupId: 2 GroupGeneration: 1 } VSlot { VSlotId { NodeId: 1 PDiskId: 1 VSlotId: 1003 } GroupId: 3 GroupGeneration: 1 } VSlot { VSlotId { NodeId: 2 PDiskId: 2 VSlotId: 1000 } GroupGeneration: 1 FailDomainIdx: 1 } VSlot { VSlotId { NodeId: 2 PDiskId: 2 VSlotId: 1001 } GroupId: 1 GroupGeneration: 1 FailDomainIdx: 1 } VSlot { VSlotId { NodeId: 2 PDiskId: 2 VSlotId: 1002 } GroupId: 2 GroupGeneration: 1 FailDomainIdx: 1 } VSlot { VSlotId { NodeId: 2 PDiskId: 2 VSlotId: 1003 } GroupId: 3 GroupGeneration: 1 FailDomainIdx: 1 } VSlot { VSlotId { NodeId: 3 PDiskId: 3 VSlotId: 1000 } GroupGeneration: 1 FailDomainIdx: 2 } VSlot { VSlotId { NodeId: 3 PDiskId: 3 VSlotId: 1001 } GroupId: 1 GroupGeneration: 1 FailDomainIdx: 2 } VSlot { VSlotId { NodeId: 3 PDiskId: 3 VSlotId: 1002 } GroupId: 2 GroupGeneration: 1 FailDomainIdx: 2 } VSlot { VSlotId { NodeId: 3 PDiskId: 3 VSlotId: 1003 } GroupId: 3 GroupGeneration: 1 FailDomainIdx: 2 } VSlot { VSlotId { NodeId: 4 PDiskId: 4 VSlotId: 1000 } GroupGeneration: 1 FailDomainIdx: 3 } VSlot { VSlotId { NodeId: 4 PDiskId: 4 VSlotId: 1001 } GroupId: 1 GroupGeneration: 1 FailDomainIdx: 3 } VSlot { VSlotId { NodeId: 4 PDiskId: 4 VSlotId: 1002 } GroupId: 2 GroupGeneration: 1 FailDomainIdx: 3 } VSlot { VSlotId { NodeId: 4 PDiskId: 4 VSlotId: 1003 } GroupId: 3 GroupGeneration: 1 FailDomainIdx: 3 } VSlot { VSlotId { NodeId: 5 PDiskId: 5 VSlotId: 1000 } GroupGeneration: 1 FailDomainIdx: 4 } VSlot { VSlotId { NodeId: 5 PDiskId: 5 VSlotId: 1001 } GroupId: 1 GroupGeneration: 1 FailDomainIdx: 4 } VSlot { VSlotId { NodeId: 5 PDiskId: 5 VSlotId: 1002 } GroupId: 2 GroupGeneration: 1 FailDomainIdx: 4 } VSlot { VSlotId { NodeId: 5 PDiskId: 5 VSlotId: 1003 } GroupId: 3 GroupGeneration: 1 FailDomainIdx: 4 } VSlot { VSlotId { NodeId: 6 PDiskId: 6 VSlotId: 1000 } GroupGeneration: 1 FailDomainIdx: 5 } VSlot { VSlotId { NodeId: 6 PDiskId: 6 VSlotId: 1001 } GroupId: 1 GroupGeneration: 1 FailDomainIdx: 5 } VSlot { VSlotId { NodeId: 6 PDiskId: 6 VSlotId: 1002 } GroupId: 2 GroupGeneration: 1 FailDomainIdx: 5 } VSlot { VSlotId { NodeId: 6 PDiskId: 6 VSlotId: 1003 } GroupId: 3 GroupGeneration: 1 FailDomainIdx: 5 } VSlot { VSlotId { NodeId: 7 PDiskId: 7 VSlotId: 1000 } GroupGeneration: 1 FailDomainIdx: 6 } VSlot { VSlotId { NodeId: 7 PDiskId: 7 VSlotId: 1001 } GroupId: 1 GroupGeneration: 1 FailDomainIdx: 6 } VSlot { VSlotId { NodeId: 7 PDiskId: 7 VSlotId: 1002 } GroupId: 2 GroupGeneration: 1 FailDomainIdx: 6 } VSlot { VSlotId { NodeId: 7 PDiskId: 7 VSlotId: 1003 } GroupId: 3 GroupGeneration: 1 FailDomainIdx: 6 } VSlot { VSlotId { NodeId: 8 PDiskId: 8 VSlotId: 1000 } GroupGeneration: 1 FailDomainIdx: 7 } VSlot { VSlotId { NodeId: 8 PDiskId: 8 VSlotId: 1001 } GroupId: 1 GroupGeneration: 1 FailDomainIdx: 7 } VSlot { VSlotId { NodeId: 8 PDiskId: 8 VSlotId: 1002 } GroupId: 2 GroupGeneration: 1 FailDomainIdx: 7 } VSlot { VSlotId { NodeId: 8 PDiskId: 8 VSlotId: 1003 } GroupId: 3 GroupGeneration: 1 FailDomainIdx: 7 } Group { GroupGeneration: 1 ErasureSpecies: "block-4-2" VSlotId { NodeId: 1 PDiskId: 1 VSlotId: 1000 } VSlotId { NodeId: 2 PDiskId: 2 VSlotId: 1000 } VSlotId { NodeId: 3 PDiskId: 3 VSlotId: 1000 } VSlotId { NodeId: 4 PDiskId: 4 VSlotId: 1000 } VSlotId { NodeId: 5 PDiskId: 5 VSlotId: 1000 } VSlotId { NodeId: 6 PDiskId: 6 VSlotId: 1000 } VSlotId { NodeId: 7 PDiskId: 7 VSlotId: 1000 } VSlotId { NodeId: 8 PDiskId: 8 VSlotId: 1000 } } Group { GroupId: 1 GroupGeneration: 1 ErasureSpecies: "block-4-2" VSlotId { NodeId: 1 PDiskId: 1 VSlotId: 1001 } VSlotId { NodeId: 2 PDiskId: 2 VSlotId: 1001 } VSlotId { NodeId: 3 PDiskId: 3 VSlotId: 1001 } VSlotId { NodeId: 4 PDiskId: 4 VSlotId: 1001 } VSlotId { NodeId: 5 PDiskId: 5 VSlotId: 1001 } VSlotId { NodeId: 6 PDiskId: 6 VSlotId: 1001 } VSlotId { NodeId: 7 PDiskId: 7 VSlotId: 1001 } VSlotId { NodeId: 8 PDiskId: 8 VSlotId: 1001 } } Group { GroupId: 2 GroupGeneration: 1 ErasureSpecies: "block-4-2" VSlotId { NodeId: 1 PDiskId: 1 VSlotId: 1002 } VSlotId { NodeId: 2 PDiskId: 2 VSlotId: 1002 } VSlotId { NodeId: 3 PDiskId: 3 VSlotId: 1002 } VSlotId { NodeId: 4 PDiskId: 4 VSlotId: 1002 } VSlotId { NodeId: 5 PDiskId: 5 VSlotId: 1002 } VSlotId { NodeId: 6 PDiskId: 6 VSlotId: 1002 } VSlotId { NodeId: 7 PDiskId: 7 VSlotId: 1002 } VSlotId { NodeId: 8 PDiskId: 8 VSlotId: 1002 } } Group { GroupId: 3 GroupGeneration: 1 ErasureSpecies: "block-4-2" VSlotId { NodeId: 1 PDiskId: 1 VSlotId: 1003 } VSlotId { NodeId: 2 PDiskId: 2 VSlotId: 1003 } VSlotId { NodeId: 3 PDiskId: 3 VSlotId: 1003 } VSlotId { NodeId: 4 PDiskId: 4 VSlotId: 1003 } VSlotId { NodeId: 5 PDiskId: 5 VSlotId: 1003 } VSlotId { NodeId: 6 PDiskId: 6 VSlotId: 1003 } VSlotId { NodeId: 7 PDiskId: 7 VSlotId: 1003 } VSlotId { NodeId: 8 PDiskId: 8 VSlotId: 1003 } } } } Success: true 2025-09-25T16:19:32.753711Z node 1 :CMS DEBUG: cms_tx_update_config.cpp:23: TTxUpdateConfig Execute 2025-09-25T16:19:32.767410Z node 1 :CMS DEBUG: cms_tx_update_config.cpp:37: TTxUpdateConfig Complete 2025-09-25T16:19:32.767486Z node 1 :CMS DEBUG: cms_tx_update_config.cpp:44: Updated config: TenantLimits { DisabledNodesRatioLimit: 0 } ClusterLimits { DisabledNodesRatioLimit: 0 } SentinelConfig { Enable: false } 2025-09-25T16:19:34.440275Z node 9 :CMS DEBUG: console__init_scheme.cpp:14: TConsole::TTxInitScheme Execute 2025-09-25T16:19:34.441574Z node 9 :CMS DEBUG: cms_impl.h:186: StateInit event type: 10060000 event: NKikimr::TEvTablet::TEvBoot 2025-09-25T16:19:34.444070Z node 9 :CMS DEBUG: console__init_scheme.cpp:23: TConsole::TTxInitScheme Complete 2025-09-25T16:19:34.444625Z node 9 :CMS DEBUG: console__load_state.cpp:28: TConsole::TTxLoadState Execute 2025-09-25T16:19:34.444671Z node 9 :CMS DEBUG: console__load_state.cpp:50: Using default config. 2025-09-25T16:19:34.444767Z node 9 :CMS DEBUG: console__load_state.cpp:66: TConsole::TTxLoadState Complete 2025-09-25T16:19:34.445032Z node 9 :CMS DEBUG: cms_impl.h:186: StateInit event type: 10060001 event: NKikimr::TEvTablet::TEvRestored 2025-09-25T16:19:34.445125Z node 9 :CMS DEBUG: cms_tx_init_scheme.cpp:16: TTxInitScheme Execute 2025-09-25T16:19:34.445646Z node 9 :CMS DEBUG: cms_impl.h:186: StateInit event type: 1006000c event: NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-09-25T16:19:34.445920Z node 9 :CMS DEBUG: cms_impl.h:186: StateInit event type: 10031c0c event: NKikimr::TEvNodeWardenStorageConfig 2025-09-25T16:19:34.446001Z node 9 :CMS DEBUG: cms_impl.h:186: StateInit event type: 104d0001 event: NKikimr::NConsole::TEvConfigsDispatcher::TEvSetConfigSubscriptionResponse 2025-09-25T16:19:34.447257Z node 9 :CMS DEBUG: cms_tx_init_scheme.cpp:24: TTxInitScheme Complete 2025-09-25T16:19:34.447285Z node 9 :CMS DEBUG: cms_tx_load_state.cpp:33: TTxLoadState Execute 2025-09-25T16:19:34.447322Z node 9 :CMS DEBUG: cms_tx_load_state.cpp:76: Using default config 2025-09-25T16:19:34.447347Z node 9 :CMS DEBUG: cms.cpp:1176: Running CleanupWalleTasks 2025-09-25T16:19:34.458668Z node 9 :CMS DEBUG: cms_impl.h:186: StateInit event type: 104a0012 event: NKikimr::NConsole::TEvConsole::TEvConfigNotificationRequest { Config { FeatureFlags { EnableCMSRequestPriorities: true EnableSingleCompositeActionGroup: true } } ItemKinds: 25 ItemKinds: 26 Local: true } 2025-09-25T16:19:34.501915Z node 9 :CMS DEBUG: cms_tx_load_state.cpp:256: TTxLoadState Complete 2025-09-25T16:19:34.502033Z node 9 :CMS DEBUG: cms_tx_update_config.cpp:23: TTxUpdateConfig Execute 2025-09-25T16:19:34.502065Z node 9 :CMS DEBUG: cms_tx_update_config.cpp:37: TTxUpdateConfig Complete 2025-09-25T16:19:34.502157Z node 9 :CMS DEBUG: sentinel.cpp:1020: [Sentinel] [Main] UpdateConfig 2025-09-25T16:19:34.502164Z node 9 :CMS DEBUG: sentinel.cpp:965: [Sentinel] [Main] Start ConfigUpdater 2025-09-25T16:19:34.502173Z node 9 :CMS DEBUG: sentinel.cpp:1036: [Sentinel] [Main] UpdateState 2025-09-25T16:19:34.502178Z node 9 :CMS INFO: sentinel.cpp:960: [Sentinel] [Main] StateUpdater was delayed 2025-09-25T16:19:34.502187Z node 9 :CMS DEBUG: sentinel.cpp:524: [Sentinel] [ ... uster_info.cpp:782: Adding lock for Host ::1:12002 (30) (permission user-p-2 until 1970-01-01T00:03:00Z) 2025-09-25T16:19:39.729783Z node 29 :CMS INFO: cluster_info.cpp:782: Adding lock for Host ::1:12001 (29) (permission user-p-1 until 1970-01-01T00:03:00Z) 2025-09-25T16:19:39.729882Z node 29 :CMS DEBUG: cms_tx_update_downtimes.cpp:17: TTxUpdateDowntimes Execute 2025-09-25T16:19:39.729904Z node 29 :CMS DEBUG: cms_tx_update_downtimes.cpp:26: TTxUpdateDowntimes Complete 2025-09-25T16:19:39.729918Z node 29 :CMS DEBUG: cluster_info.cpp:991: Timestamp: 1970-01-01T00:02:00Z 2025-09-25T16:19:39.730054Z node 29 :CMS INFO: cms.cpp:364: Check request: User: "user" Actions { Type: RESTART_SERVICES Host: "31" Services: "storage" Duration: 60000000 } PartialPermissionAllowed: false Schedule: false DryRun: false AvailabilityMode: MODE_MAX_AVAILABILITY EvictVDisks: false 2025-09-25T16:19:39.730065Z node 29 :CMS DEBUG: cms.cpp:396: Checking action: Type: RESTART_SERVICES Host: "31" Services: "storage" Duration: 60000000 2025-09-25T16:19:39.730075Z node 29 :CMS DEBUG: node_checkers.cpp:99: [Nodes Counter] Checking Node: 31, with state: Up, with limit: 0, with ratio limit: 0, locked nodes: 2, down nodes: 0 2025-09-25T16:19:39.730086Z node 29 :CMS DEBUG: node_checkers.cpp:162: [Nodes Counter] Checking limits for sys tablet: FLAT_BS_CONTROLLER, on node: 31, with state: Up, locked nodes: 2, down nodes: 0 2025-09-25T16:19:39.730091Z node 29 :CMS DEBUG: node_checkers.cpp:162: [Nodes Counter] Checking limits for sys tablet: FLAT_SCHEMESHARD, on node: 31, with state: Up, locked nodes: 2, down nodes: 0 2025-09-25T16:19:39.730095Z node 29 :CMS DEBUG: node_checkers.cpp:162: [Nodes Counter] Checking limits for sys tablet: FLAT_TX_COORDINATOR, on node: 31, with state: Up, locked nodes: 2, down nodes: 0 2025-09-25T16:19:39.730100Z node 29 :CMS DEBUG: node_checkers.cpp:162: [Nodes Counter] Checking limits for sys tablet: TX_MEDIATOR, on node: 31, with state: Up, locked nodes: 2, down nodes: 0 2025-09-25T16:19:39.730104Z node 29 :CMS DEBUG: node_checkers.cpp:162: [Nodes Counter] Checking limits for sys tablet: TX_ALLOCATOR, on node: 31, with state: Up, locked nodes: 2, down nodes: 0 2025-09-25T16:19:39.730108Z node 29 :CMS DEBUG: node_checkers.cpp:162: [Nodes Counter] Checking limits for sys tablet: CONSOLE, on node: 31, with state: Up, locked nodes: 2, down nodes: 0 2025-09-25T16:19:39.730113Z node 29 :CMS DEBUG: node_checkers.cpp:162: [Nodes Counter] Checking limits for sys tablet: CMS, on node: 31, with state: Up, locked nodes: 2, down nodes: 0 2025-09-25T16:19:39.730117Z node 29 :CMS DEBUG: node_checkers.cpp:162: [Nodes Counter] Checking limits for sys tablet: NODE_BROKER, on node: 31, with state: Up, locked nodes: 2, down nodes: 0 2025-09-25T16:19:39.730121Z node 29 :CMS DEBUG: node_checkers.cpp:162: [Nodes Counter] Checking limits for sys tablet: TENANT_SLOT_BROKER, on node: 31, with state: Up, locked nodes: 2, down nodes: 0 2025-09-25T16:19:39.730125Z node 29 :CMS DEBUG: cms.cpp:404: Result: ALLOW 2025-09-25T16:19:39.730147Z node 29 :CMS DEBUG: cms.cpp:1064: Accepting permission: id# user-p-3, requestId# user-r-3, owner# user 2025-09-25T16:19:39.730153Z node 29 :CMS INFO: cluster_info.cpp:782: Adding lock for Host ::1:12003 (31) (permission user-p-3 until 1970-01-01T00:03:00Z) 2025-09-25T16:19:39.730164Z node 29 :CMS DEBUG: cms_tx_store_permissions.cpp:26: TTxStorePermissions Execute 2025-09-25T16:19:39.730202Z node 29 :CMS NOTICE: audit_log.cpp:12: [AuditLog] [CMS tablet] Store permission: id# user-p-3, validity# 1970-01-01T00:03:00.313536Z, action# Type: RESTART_SERVICES Host: "31" Services: "storage" Duration: 60000000 2025-09-25T16:19:39.741344Z node 29 :CMS DEBUG: cms_tx_store_permissions.cpp:137: TTxStorePermissions complete 2025-09-25T16:19:39.741455Z node 29 :CMS NOTICE: audit_log.cpp:12: [AuditLog] [CMS tablet] Reply: request# NKikimr::NCms::TEvCms::TEvPermissionRequest { User: "user" Actions { Type: RESTART_SERVICES Host: "31" Services: "storage" Duration: 60000000 } PartialPermissionAllowed: false Schedule: false DryRun: false AvailabilityMode: MODE_MAX_AVAILABILITY EvictVDisks: false }, response# NKikimr::NCms::TEvCms::TEvPermissionResponse { Status { Code: ALLOW } RequestId: "user-r-3" Permissions { Id: "user-p-3" Action { Type: RESTART_SERVICES Host: "31" Services: "storage" Duration: 60000000 } Deadline: 180313536 Extentions { Type: HostInfo Hosts { Name: "::1" State: UP NodeId: 31 InterconnectPort: 12003 } } } } 2025-09-25T16:19:39.814972Z node 29 :CMS INFO: cluster_info.cpp:782: Adding lock for Host ::1:12003 (31) (permission user-p-3 until 1970-01-01T00:03:00Z) 2025-09-25T16:19:39.815002Z node 29 :CMS INFO: cluster_info.cpp:782: Adding lock for Host ::1:12002 (30) (permission user-p-2 until 1970-01-01T00:03:00Z) 2025-09-25T16:19:39.815014Z node 29 :CMS INFO: cluster_info.cpp:782: Adding lock for Host ::1:12001 (29) (permission user-p-1 until 1970-01-01T00:03:00Z) 2025-09-25T16:19:39.815113Z node 29 :CMS DEBUG: cms_tx_update_downtimes.cpp:17: TTxUpdateDowntimes Execute 2025-09-25T16:19:39.815135Z node 29 :CMS DEBUG: cms_tx_update_downtimes.cpp:26: TTxUpdateDowntimes Complete 2025-09-25T16:19:39.815150Z node 29 :CMS DEBUG: cluster_info.cpp:991: Timestamp: 1970-01-01T00:02:00Z 2025-09-25T16:19:39.815290Z node 29 :CMS INFO: cms.cpp:364: Check request: User: "user" Actions { Type: RESTART_SERVICES Host: "32" Services: "storage" Duration: 60000000 } PartialPermissionAllowed: false Schedule: false DryRun: false AvailabilityMode: MODE_MAX_AVAILABILITY EvictVDisks: false 2025-09-25T16:19:39.815302Z node 29 :CMS DEBUG: cms.cpp:396: Checking action: Type: RESTART_SERVICES Host: "32" Services: "storage" Duration: 60000000 2025-09-25T16:19:39.815314Z node 29 :CMS DEBUG: node_checkers.cpp:99: [Nodes Counter] Checking Node: 32, with state: Up, with limit: 0, with ratio limit: 0, locked nodes: 3, down nodes: 0 2025-09-25T16:19:39.815326Z node 29 :CMS DEBUG: node_checkers.cpp:162: [Nodes Counter] Checking limits for sys tablet: FLAT_BS_CONTROLLER, on node: 32, with state: Up, locked nodes: 3, down nodes: 0 2025-09-25T16:19:39.815331Z node 29 :CMS DEBUG: node_checkers.cpp:162: [Nodes Counter] Checking limits for sys tablet: FLAT_SCHEMESHARD, on node: 32, with state: Up, locked nodes: 3, down nodes: 0 2025-09-25T16:19:39.815336Z node 29 :CMS DEBUG: node_checkers.cpp:162: [Nodes Counter] Checking limits for sys tablet: FLAT_TX_COORDINATOR, on node: 32, with state: Up, locked nodes: 3, down nodes: 0 2025-09-25T16:19:39.815341Z node 29 :CMS DEBUG: node_checkers.cpp:162: [Nodes Counter] Checking limits for sys tablet: TX_MEDIATOR, on node: 32, with state: Up, locked nodes: 3, down nodes: 0 2025-09-25T16:19:39.815345Z node 29 :CMS DEBUG: node_checkers.cpp:162: [Nodes Counter] Checking limits for sys tablet: TX_ALLOCATOR, on node: 32, with state: Up, locked nodes: 3, down nodes: 0 2025-09-25T16:19:39.815350Z node 29 :CMS DEBUG: node_checkers.cpp:162: [Nodes Counter] Checking limits for sys tablet: CONSOLE, on node: 32, with state: Up, locked nodes: 3, down nodes: 0 2025-09-25T16:19:39.815355Z node 29 :CMS DEBUG: node_checkers.cpp:162: [Nodes Counter] Checking limits for sys tablet: CMS, on node: 32, with state: Up, locked nodes: 3, down nodes: 0 2025-09-25T16:19:39.815359Z node 29 :CMS DEBUG: node_checkers.cpp:162: [Nodes Counter] Checking limits for sys tablet: NODE_BROKER, on node: 32, with state: Up, locked nodes: 3, down nodes: 0 2025-09-25T16:19:39.815363Z node 29 :CMS DEBUG: node_checkers.cpp:162: [Nodes Counter] Checking limits for sys tablet: TENANT_SLOT_BROKER, on node: 32, with state: Up, locked nodes: 3, down nodes: 0 2025-09-25T16:19:39.815368Z node 29 :CMS DEBUG: cms.cpp:404: Result: ALLOW 2025-09-25T16:19:39.815388Z node 29 :CMS DEBUG: cms.cpp:1064: Accepting permission: id# user-p-4, requestId# user-r-4, owner# user 2025-09-25T16:19:39.815396Z node 29 :CMS INFO: cluster_info.cpp:782: Adding lock for Host ::1:12004 (32) (permission user-p-4 until 1970-01-01T00:03:00Z) 2025-09-25T16:19:39.815408Z node 29 :CMS DEBUG: cms_tx_store_permissions.cpp:26: TTxStorePermissions Execute 2025-09-25T16:19:39.815453Z node 29 :CMS NOTICE: audit_log.cpp:12: [AuditLog] [CMS tablet] Store permission: id# user-p-4, validity# 1970-01-01T00:03:00.415048Z, action# Type: RESTART_SERVICES Host: "32" Services: "storage" Duration: 60000000 2025-09-25T16:19:39.826510Z node 29 :CMS DEBUG: cms_tx_store_permissions.cpp:137: TTxStorePermissions complete 2025-09-25T16:19:39.826630Z node 29 :CMS NOTICE: audit_log.cpp:12: [AuditLog] [CMS tablet] Reply: request# NKikimr::NCms::TEvCms::TEvPermissionRequest { User: "user" Actions { Type: RESTART_SERVICES Host: "32" Services: "storage" Duration: 60000000 } PartialPermissionAllowed: false Schedule: false DryRun: false AvailabilityMode: MODE_MAX_AVAILABILITY EvictVDisks: false }, response# NKikimr::NCms::TEvCms::TEvPermissionResponse { Status { Code: ALLOW } RequestId: "user-r-4" Permissions { Id: "user-p-4" Action { Type: RESTART_SERVICES Host: "32" Services: "storage" Duration: 60000000 } Deadline: 180415048 Extentions { Type: HostInfo Hosts { Name: "::1" State: UP NodeId: 32 InterconnectPort: 12004 } } } } 2025-09-25T16:19:39.838216Z node 29 :CMS INFO: cluster_info.cpp:782: Adding lock for Host ::1:12003 (31) (permission user-p-3 until 1970-01-01T00:03:00Z) 2025-09-25T16:19:39.838245Z node 29 :CMS INFO: cluster_info.cpp:782: Adding lock for Host ::1:12002 (30) (permission user-p-2 until 1970-01-01T00:03:00Z) 2025-09-25T16:19:39.838254Z node 29 :CMS INFO: cluster_info.cpp:782: Adding lock for Host ::1:12001 (29) (permission user-p-1 until 1970-01-01T00:03:00Z) 2025-09-25T16:19:39.838263Z node 29 :CMS INFO: cluster_info.cpp:782: Adding lock for Host ::1:12004 (32) (permission user-p-4 until 1970-01-01T00:03:00Z) 2025-09-25T16:19:39.838363Z node 29 :CMS DEBUG: cms_tx_update_downtimes.cpp:17: TTxUpdateDowntimes Execute 2025-09-25T16:19:39.838385Z node 29 :CMS DEBUG: cms_tx_update_downtimes.cpp:26: TTxUpdateDowntimes Complete 2025-09-25T16:19:39.838398Z node 29 :CMS DEBUG: cluster_info.cpp:991: Timestamp: 1970-01-01T00:02:00Z 2025-09-25T16:19:39.838523Z node 29 :CMS INFO: cms.cpp:364: Check request: User: "user" Actions { Type: RESTART_SERVICES Host: "33" Services: "storage" Duration: 60000000 } PartialPermissionAllowed: false Schedule: false DryRun: false AvailabilityMode: MODE_MAX_AVAILABILITY EvictVDisks: false 2025-09-25T16:19:39.838534Z node 29 :CMS DEBUG: cms.cpp:396: Checking action: Type: RESTART_SERVICES Host: "33" Services: "storage" Duration: 60000000 2025-09-25T16:19:39.838545Z node 29 :CMS DEBUG: node_checkers.cpp:99: [Nodes Counter] Checking Node: 33, with state: Up, with limit: 0, with ratio limit: 0, locked nodes: 4, down nodes: 0 2025-09-25T16:19:39.838586Z node 29 :CMS DEBUG: node_checkers.cpp:162: [Nodes Counter] Checking limits for sys tablet: FLAT_BS_CONTROLLER, on node: 33, with state: Up, locked nodes: 4, down nodes: 0 2025-09-25T16:19:39.838596Z node 29 :CMS DEBUG: cms.cpp:415: Result: DISALLOW_TEMP (reason: Cannot lock node '33': tablet 'FLAT_BS_CONTROLLER' has too many unavailable nodes. Locked: 4, down: 0, limit: 4) 2025-09-25T16:19:39.838610Z node 29 :CMS DEBUG: cms_tx_store_permissions.cpp:26: TTxStorePermissions Execute 2025-09-25T16:19:39.849674Z node 29 :CMS DEBUG: cms_tx_store_permissions.cpp:137: TTxStorePermissions complete 2025-09-25T16:19:39.849787Z node 29 :CMS NOTICE: audit_log.cpp:12: [AuditLog] [CMS tablet] Reply: request# NKikimr::NCms::TEvCms::TEvPermissionRequest { User: "user" Actions { Type: RESTART_SERVICES Host: "33" Services: "storage" Duration: 60000000 } PartialPermissionAllowed: false Schedule: false DryRun: false AvailabilityMode: MODE_MAX_AVAILABILITY EvictVDisks: false }, response# NKikimr::NCms::TEvCms::TEvPermissionResponse { Status { Code: DISALLOW_TEMP Reason: "Cannot lock node \'33\': tablet \'FLAT_BS_CONTROLLER\' has too many unavailable nodes. Locked: 4, down: 0, limit: 4" } RequestId: "user-r-5" Deadline: 420516560 } >> TAsyncIndexTests::SplitIndexWithReboots[PipeResets] >> TTopicReaderTests::TestRun_ReadOneMessage [GOOD] >> TTopicReaderTests::TestRun_ReadMoreMessagesThanLimit_Without_Wait_NewlineDelimited >> TAsyncIndexTests::SplitMainWithReboots[PipeResets] |82.2%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_index/unittest ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/proxy_service/ut/unittest >> KqpProxy::ExecuteScriptFailsWithoutFeatureFlag [GOOD] Test command err: 2025-09-25T16:19:32.085864Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7554062237670254540:2067];send_to=[0:7307199536658146131:7762515]; 2025-09-25T16:19:32.085883Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/endf/005064/r3tmp/tmp5cB6Gx/pdisk_1.dat 2025-09-25T16:19:32.134231Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2025-09-25T16:19:32.145087Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1229: Notification cookie mismatch for subscription [1:7554062237670254513:2081] 1758817172085343 != 1758817172085346 2025-09-25T16:19:32.145582Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded TClient is connected to server localhost:23015 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-09-25T16:19:32.198406Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-09-25T16:19:32.198439Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-09-25T16:19:32.199373Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-09-25T16:19:32.201127Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-09-25T16:19:32.433333Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions 2025-09-25T16:19:32.433351Z node 1 :KQP_PROXY DEBUG: kqp_finalize_script_service.cpp:146: [ScriptExecutions] [TKqpFinalizeScriptService] Script execution table dc-1/.metadata/script_executions not found 2025-09-25T16:19:32.481658Z node 1 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:1519: Updated YQL logs priority to current level: 4 2025-09-25T16:19:32.485258Z node 1 :KQP_PROXY WARN: kqp_proxy_service.cpp:1411: Failed to parse session id: ydb://session/1?id=ZjY5NWRlM2EtYWMyYjA5YWEtNzQ0MTVlYTMtM2Q4ZDgzOWQ=&node_id=1234&node_id=12345 2025-09-25T16:19:32.485727Z node 1 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:436: Subscribed for config changes. 2025-09-25T16:19:32.485732Z node 1 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:443: Updated table service config. 2025-09-25T16:19:32.485740Z node 1 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:1519: Updated YQL logs priority to current level: 4 2025-09-25T16:19:32.485769Z node 1 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:886: Forwarded response to sender actor, requestId: 2, sender: [1:7554062237670255084:2286], selfId: [1:7554062237670254769:2263], source: [1:7554062237670254769:2263] 2025-09-25T16:19:32.486660Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7554062237670255122:2295], DatabaseId: /dc-1, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:19:32.486724Z node 1 :KQP_PROXY WARN: kqp_proxy_service.cpp:1411: Failed to parse session id: unknown://session/1?id=ZjY5NWRlM2EtYWMyYjA5YWEtNzQ0MTVlYTMtM2Q4ZDgzOWQ=&node_id=1234&node_id=12345 2025-09-25T16:19:32.486743Z node 1 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:886: Forwarded response to sender actor, requestId: 3, sender: [1:7554062237670255084:2286], selfId: [1:7554062237670254769:2263], source: [1:7554062237670254769:2263] 2025-09-25T16:19:32.486754Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /dc-1, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:19:32.486951Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7554062237670255154:2296], DatabaseId: /dc-1, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:19:32.486980Z node 1 :KQP_PROXY WARN: kqp_proxy_service.cpp:1411: Failed to parse session id: ydb://session/1?id=ZjY5NWRlM2EtYWMyYjA5YWEtNzQ0MTVlYTMtM2Q4ZDgzOWQ=&node_id=eqweq 2025-09-25T16:19:32.486994Z node 1 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:886: Forwarded response to sender actor, requestId: 4, sender: [1:7554062237670255084:2286], selfId: [1:7554062237670254769:2263], source: [1:7554062237670254769:2263] 2025-09-25T16:19:32.487005Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /dc-1, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:19:32.487109Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7554062237670255156:2297], DatabaseId: /dc-1, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:19:32.487117Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /dc-1, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:19:32.489515Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7554062237670255158:2298], DatabaseId: /dc-1, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:19:32.489533Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /dc-1, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:19:32.489691Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7554062237670255160:2299], DatabaseId: /dc-1, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:19:32.489702Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /dc-1, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:19:33.121655Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:123: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2025-09-25T16:19:33.125618Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2025-09-25T16:19:33.126695Z node 2 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [2:108:2155], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-09-25T16:19:33.126742Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-09-25T16:19:33.126766Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/endf/005064/r3tmp/tmpQWA4zW/pdisk_1.dat 2025-09-25T16:19:33.178270Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-09-25T16:19:33.178318Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-09-25T16:19:33.182936Z node 2 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded 2025-09-25T16:19:33.183431Z node 2 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1229: Notification cookie mismatch for subscription [2:34:2081] 1758817172765792 != 1758817172765796 2025-09-25T16:19:33.214753Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-09-25T16:19:33.257951Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2025-09-25T16:19:33.289765Z node 2 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2806: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [2:100:2147], request# { ErrorCount: 0 DatabaseName: /Root DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: Root/.metadata/workload_manager/delayed_requests TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo },{ Path: Root/.metadata/workload_manager/running_requests TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-09-25T16:19:33.290074Z node 2 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2647: HandleNotify: self# [2:100:2147], notify# NKikimr::TSchemeBoardEvents::TEvNotifyDelete { Path: /Root/.metadata/workload_manager/delayed_requests PathId: Strong: 1 } 2025-09-25T16:19:33.290089Z node 2 :TX_PROXY_SCHEME_CACHE DEBUG: c ... R Captured TEvents::TSystem::Wakeup to BLOB_CACHE_ACTOR Captured TEvents::TSystem::Wakeup to KQP_COMPILE_COMPUTATION_PATTERN_SERVICE Captured TEvents::TSystem::Wakeup to KQP_NODE_SERVICE Captured TEvents::TSystem::Wakeup to (anonymous namespace)::TComputeSchedulerService Captured TEvents::TSystem::Wakeup to PROXY_SCHEME_CACHE Captured TEvents::TSystem::Wakeup to PROXY_SCHEME_CACHE Captured TEvents::TSystem::Wakeup to PROXY_SCHEME_CACHE Captured TEvents::TSystem::Wakeup to PROXY_SCHEME_CACHE Captured TEvents::TSystem::Wakeup to DS_PROXY_NODE_MON_ACTOR Captured TEvents::TSystem::Wakeup to BS_SYNC_BROKER Captured TEvents::TSystem::Wakeup to EXT_COUNTERS_UPDATER_ACTOR Captured TEvents::TSystem::Wakeup to EXT_COUNTERS_UPDATER_ACTOR Captured TEvents::TSystem::Wakeup to BLOB_CACHE_ACTOR Captured TEvents::TSystem::Wakeup to TICKET_PARSER_ACTOR Captured TEvents::TSystem::Wakeup to TABLET_RESPONSIVENESS_PINGER Captured TEvents::TSystem::Wakeup to TABLET_RESPONSIVENESS_PINGER Captured TEvents::TSystem::Wakeup to BS_SYNC_BROKER Captured TEvents::TSystem::Wakeup to BLOB_CACHE_ACTOR Captured TEvents::TSystem::Wakeup to BLOB_CACHE_ACTOR Captured TEvents::TSystem::Wakeup to BS_SYNC_BROKER Captured TEvents::TSystem::Wakeup to BLOB_CACHE_ACTOR Captured TEvents::TSystem::Wakeup to BS_SYNC_BROKER Captured TEvents::TSystem::Wakeup to BLOB_CACHE_ACTOR Captured TEvents::TSystem::Wakeup to BS_SYNC_BROKER Captured TEvents::TSystem::Wakeup to BS_SYNC_BROKER Captured TEvents::TSystem::Wakeup to BLOB_CACHE_ACTOR Captured TEvents::TSystem::Wakeup to (anonymous namespace)::TComputeSchedulerService Captured TEvents::TSystem::Wakeup to BLOB_CACHE_ACTOR Captured TEvents::TSystem::Wakeup to BS_SYNC_BROKER Captured TEvents::TSystem::Wakeup to BS_SYNC_BROKER Captured TEvents::TSystem::Wakeup to BLOB_CACHE_ACTOR Captured TEvents::TSystem::Wakeup to BS_SYNC_BROKER Captured TEvents::TSystem::Wakeup to BLOB_CACHE_ACTOR Captured TEvents::TSystem::Wakeup to BS_SYNC_BROKER Captured TEvents::TSystem::Wakeup to BLOB_CACHE_ACTOR Captured TEvents::TSystem::Wakeup to BS_SYNC_BROKER Captured TEvents::TSystem::Wakeup to EXT_COUNTERS_UPDATER_ACTOR Captured TEvents::TSystem::Wakeup to EXT_COUNTERS_UPDATER_ACTOR Captured TEvents::TSystem::Wakeup to BLOB_CACHE_ACTOR Captured TEvents::TSystem::Wakeup to TABLET_COUNTERS_AGGREGATOR Captured TEvents::TSystem::Wakeup to BSC_STAT_PROCESSOR Captured TEvents::TSystem::Wakeup to NKikimr::NBsController::TBlobStorageController::TSelfHealActor Captured TEvents::TSystem::Wakeup to TICKET_PARSER_ACTOR Captured TEvents::TSystem::Wakeup to NKikimr::NIcNodeCache::TIcNodeCacheServiceActor Captured TEvents::TSystem::Wakeup to KQP_COMPILE_COMPUTATION_PATTERN_SERVICE Captured TEvents::TSystem::Wakeup to KQP_NODE_SERVICE Captured TEvents::TSystem::Wakeup to (anonymous namespace)::TComputeSchedulerService Captured TEvents::TSystem::Wakeup to PROXY_SCHEME_CACHE Captured TEvents::TSystem::Wakeup to PROXY_SCHEME_CACHE Captured TEvents::TSystem::Wakeup to PROXY_SCHEME_CACHE Captured TEvents::TSystem::Wakeup to PROXY_SCHEME_CACHE Captured TEvents::TSystem::Wakeup to DS_PROXY_NODE_MON_ACTOR Captured TEvents::TSystem::Wakeup to TABLET_RESPONSIVENESS_PINGER Captured TEvents::TSystem::Wakeup to TABLET_RESPONSIVENESS_PINGER Captured TEvents::TSystem::Wakeup to BS_SYNC_BROKER Captured TEvents::TSystem::Wakeup to BLOB_CACHE_ACTOR Captured TEvents::TSystem::Wakeup to BS_SYNC_BROKER Captured TEvents::TSystem::Wakeup to BLOB_CACHE_ACTOR Captured TEvents::TSystem::Wakeup to BLOB_CACHE_ACTOR Captured TEvents::TSystem::Wakeup to BS_SYNC_BROKER Captured TEvents::TSystem::Wakeup to BS_SYNC_BROKER Captured TEvents::TSystem::Wakeup to BLOB_CACHE_ACTOR Captured TEvents::TSystem::Wakeup to (anonymous namespace)::TComputeSchedulerService Captured TEvents::TSystem::Wakeup to BS_SYNC_BROKER Captured TEvents::TSystem::Wakeup to BLOB_CACHE_ACTOR Captured TEvents::TSystem::Wakeup to BLOB_CACHE_ACTOR Captured TEvents::TSystem::Wakeup to BS_SYNC_BROKER Captured TEvents::TSystem::Wakeup to BS_SYNC_BROKER Captured TEvents::TSystem::Wakeup to BLOB_CACHE_ACTOR Captured TEvents::TSystem::Wakeup to BLOB_CACHE_ACTOR Captured TEvents::TSystem::Wakeup to BS_SYNC_BROKER Captured TEvents::TSystem::Wakeup to BLOB_CACHE_ACTOR Captured TEvents::TSystem::Wakeup to BS_SYNC_BROKER Captured TEvents::TSystem::Wakeup to BLOB_CACHE_ACTOR Captured TEvents::TSystem::Wakeup to TICKET_PARSER_ACTOR Captured TEvents::TSystem::Wakeup to KQP_COMPILE_COMPUTATION_PATTERN_SERVICE Captured TEvents::TSystem::Wakeup to KQP_NODE_SERVICE Captured TEvents::TSystem::Wakeup to (anonymous namespace)::TComputeSchedulerService Captured TEvents::TSystem::Wakeup to PROXY_SCHEME_CACHE Captured TEvents::TSystem::Wakeup to PROXY_SCHEME_CACHE Captured TEvents::TSystem::Wakeup to PROXY_SCHEME_CACHE Captured TEvents::TSystem::Wakeup to PROXY_SCHEME_CACHE Captured TEvents::TSystem::Wakeup to DS_PROXY_NODE_MON_ACTOR Captured TEvents::TSystem::Wakeup to BS_SYNC_BROKER Captured TEvents::TSystem::Wakeup to EXT_COUNTERS_UPDATER_ACTOR Captured TEvents::TSystem::Wakeup to EXT_COUNTERS_UPDATER_ACTOR Captured TEvents::TSystem::Wakeup to TABLET_RESPONSIVENESS_PINGER Captured TEvents::TSystem::Wakeup to TABLET_RESPONSIVENESS_PINGER Captured TEvents::TSystem::Wakeup to BLOB_CACHE_ACTOR Captured TEvents::TSystem::Wakeup to BS_SYNC_BROKER Captured TEvents::TSystem::Wakeup to BS_SYNC_BROKER Captured TEvents::TSystem::Wakeup to BLOB_CACHE_ACTOR Captured TEvents::TSystem::Wakeup to BLOB_CACHE_ACTOR Captured TEvents::TSystem::Wakeup to BS_SYNC_BROKER Captured TEvents::TSystem::Wakeup to BLOB_CACHE_ACTOR Captured TEvents::TSystem::Wakeup to BS_SYNC_BROKER Captured TEvents::TSystem::Wakeup to (anonymous namespace)::TComputeSchedulerService Captured TEvents::TSystem::Wakeup to BS_SYNC_BROKER Captured TEvents::TSystem::Wakeup to BLOB_CACHE_ACTOR Captured TEvents::TSystem::Wakeup to BLOB_CACHE_ACTOR Captured TEvents::TSystem::Wakeup to BS_SYNC_BROKER Captured TEvents::TSystem::Wakeup to BS_SYNC_BROKER Captured TEvents::TSystem::Wakeup to BLOB_CACHE_ACTOR 2025-09-25T16:19:40.130674Z node 2 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:1172: Handle TEvPrivate::TEvOnRequestTimeout(20) 2025-09-25T16:19:40.130712Z node 2 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:1180: Reply timeout: requestId 20 sessionId: ydb://session/3?node_id=2&id=ODNhNGEwMTQtMmI3OWNlMGUtZmE0MjM4MTAtYjI3Mjg3OTI= status: TIMEOUT round: 0 2025-09-25T16:19:40.130762Z node 2 :KQP_SESSION WARN: kqp_session_actor.cpp:2830: SessionId: ydb://session/3?node_id=2&id=ODNhNGEwMTQtMmI3OWNlMGUtZmE0MjM4MTAtYjI3Mjg3OTI=, ActorId: [2:1123:2916], ActorState: ExecuteState, TraceId: 01k60trnpza5t4rvh2drxvhfmr, Create QueryResponse for error on request, msg: 2025-09-25T16:19:40.130849Z node 2 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:886: Forwarded response to sender actor, requestId: 20, sender: [2:589:2517], selfId: [2:63:2110], source: [2:1123:2916] Send scheduled evet back 2025-09-25T16:19:40.130880Z node 2 :KQP_COMPILE_ACTOR NOTICE: kqp_compile_actor.cpp:581: Compilation timeout, self: [2:1126:2919], cluster: db, database: , text: "SELECT * FROM `/Root/Table`;", startTime: 2025-09-25T16:19:39.360051Z 2025-09-25T16:19:40.130908Z node 2 :KQP_COMPILE_ACTOR DEBUG: kqp_compile_actor.cpp:404: Send response, self: [2:1126:2919], owner: [2:93:2140], status: TIMEOUT, issues:
: Error: Query compilation timed out. , uid: f13b4b66-b7a07b7c-7f53ece3-85375e97 Send captured event back Send captured event back Send captured event back Send captured event back Send captured event back 2025-09-25T16:19:40.445344Z node 3 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7554062269723349393:2079];send_to=[0:7307199536658146131:7762515]; 2025-09-25T16:19:40.445375Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/endf/005064/r3tmp/tmpatMaCf/pdisk_1.dat 2025-09-25T16:19:40.450880Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-09-25T16:19:40.473331Z node 3 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 21337, node 3 2025-09-25T16:19:40.487855Z node 3 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-09-25T16:19:40.487866Z node 3 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-09-25T16:19:40.487867Z node 3 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-09-25T16:19:40.487903Z node 3 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:16741 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-09-25T16:19:40.512036Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-09-25T16:19:40.546902Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-09-25T16:19:40.546928Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-09-25T16:19:40.548478Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-09-25T16:19:40.668352Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-09-25T16:19:40.668363Z node 3 :KQP_PROXY DEBUG: kqp_finalize_script_service.cpp:146: [ScriptExecutions] [TKqpFinalizeScriptService] Script execution table Root/.metadata/script_executions not found 2025-09-25T16:19:40.790407Z node 3 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:1519: Updated YQL logs priority to current level: 4 2025-09-25T16:19:40.794112Z node 3 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:436: Subscribed for config changes. 2025-09-25T16:19:40.794128Z node 3 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:443: Updated table service config. 2025-09-25T16:19:40.794136Z node 3 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:1519: Updated YQL logs priority to current level: 4 >> ScriptExecutionsTest::BackgroundOperationFinalization [GOOD] >> ScriptExecutionsTest::BackgroundChecksStartAfterRestart >> TRestoreTests::ExportImportDecimalKey >> TImportTests::ShouldWriteBillRecordOnServerlessDb >> TFulltextIndexTests::CreateTableNoColumnsSettings >> YdbOlapStore::BulkUpsert [GOOD] >> YdbOlapStore::DuplicateRows >> TImportTests::ImportStartTime >> TImportTests::ShouldSucceedOnIndexedTable1 >> TImportTests::Changefeeds >> TRestoreWithRebootsTests::ShouldFailOnFileWithoutNewLines[Raw] >> TFulltextIndexTests::CreateTableNoColumnsSettings [GOOD] >> TImportTests::ImportStartTime [GOOD] >> TImportTests::MultipleViewCreationRetries >> TRestoreWithRebootsTests::ShouldSucceedOnMultiShardTable[Raw] >> TImportTests::ShouldWriteBillRecordOnServerlessDb [GOOD] >> TImportTests::TablePermissions >> TImportWithRebootsTests::ShouldSucceedOnIndexedTable >> TImportWithRebootsTests::ShouldSucceedOnSimpleTable >> TImportTests::ShouldSucceedOnSingleShardTable >> TImportTests::CancelUponBuildingIndicesShouldSucceed >> TRestoreTests::ExportImportDecimalKey [GOOD] >> TRestoreTests::CancelUponUploadResponseShouldSucceed[Raw] >> TImportTests::Changefeeds [GOOD] >> TImportTests::ChangefeedWithTablePermissions >> TRestoreWithRebootsTests::ShouldFailOnEmptyToken[Raw] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_index/unittest >> TFulltextIndexTests::CreateTableNoColumnsSettings [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2144] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2144] Leader for TabletID 72057594046678944 is [1:130:2155] sender: [1:131:2058] recipient: [1:113:2144] 2025-09-25T16:19:42.505037Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7911: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-09-25T16:19:42.505054Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7939: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-09-25T16:19:42.505059Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7825: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-09-25T16:19:42.505062Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7841: OperationsProcessing config: using default configuration 2025-09-25T16:19:42.505067Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7847: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-09-25T16:19:42.505070Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7847: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-09-25T16:19:42.505076Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7971: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-09-25T16:19:42.505086Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-09-25T16:19:42.505172Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8042: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-09-25T16:19:42.505225Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2025-09-25T16:19:42.515669Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7729: Cannot subscribe to console configs 2025-09-25T16:19:42.515685Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded 2025-09-25T16:19:42.518696Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2025-09-25T16:19:42.518760Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2025-09-25T16:19:42.518785Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-09-25T16:19:42.520064Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2025-09-25T16:19:42.520111Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2025-09-25T16:19:42.520175Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1382: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-09-25T16:19:42.520220Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-09-25T16:19:42.520541Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:158: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-09-25T16:19:42.520572Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2025-09-25T16:19:42.520742Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-09-25T16:19:42.520748Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-09-25T16:19:42.520763Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-09-25T16:19:42.520768Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-09-25T16:19:42.520772Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:205: TTxServerlessStorageBilling.Complete 2025-09-25T16:19:42.520793Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7086: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-09-25T16:19:42.521698Z node 1 :HIVE INFO: tablet_helpers.cpp:1126: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:130:2155] sender: [1:245:2058] recipient: [1:15:2062] 2025-09-25T16:19:42.535613Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-09-25T16:19:42.535674Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-09-25T16:19:42.535716Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:599: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-09-25T16:19:42.535722Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5528: CreateTx for txid 1:0 type: TxAlterSubDomain target path: [OwnerId: 72057594046678944, LocalPathId: 1] source path: 2025-09-25T16:19:42.535750Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-09-25T16:19:42.535761Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2025-09-25T16:19:42.536327Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-09-25T16:19:42.536364Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-09-25T16:19:42.536397Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-09-25T16:19:42.536404Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:315: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-09-25T16:19:42.536408Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:369: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-09-25T16:19:42.536412Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 1:0 2 -> 3 2025-09-25T16:19:42.536783Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-09-25T16:19:42.536797Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-09-25T16:19:42.536804Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 1:0 3 -> 128 2025-09-25T16:19:42.537181Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-09-25T16:19:42.537190Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-09-25T16:19:42.537195Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-09-25T16:19:42.537201Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1723: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-09-25T16:19:42.537717Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1792: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-09-25T16:19:42.538086Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:663: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-09-25T16:19:42.538125Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1824: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-09-25T16:19:42.538263Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:685: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-09-25T16:19:42.538281Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:689: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 137 RawX2: 4294969455 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-09-25T16:19:42.538286Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:303: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-09-25T16:19:42.538326Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 1:0 128 -> 240 2025-09-25T16:19:42.538331Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:362: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-09-25T16:19:42.538352Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:599: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-09-25T16:19:42.538361Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:412: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-09-25T16:19:42.538752Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-09-25T16:19:42.538759Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-09-25T16:19:42.538794Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-09-25T16:19:42.538803Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [1:212:2213], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-09-25T16:19:42.538872Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-09-25T16:19:42.538878Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:497: [72057594046678944] TDone opId# 1:0 ProgressState 2025-09-25T16:19:42.538887Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:926: Part operation is done id#1:0 progress is 1/1 2025-09-25T16:19:42.538890Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-09-25T16:19:42.538894Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:926: Part operation is done id#1:0 progress is 1/1 2025-09-25T16:19:42.538896Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-09-25T16:19:42.538912Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-09-25T16:19:42.538918Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-09-25T16:19:42.538921Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:993: Operation and all the parts is done, operation id: 1:0 2025-09-25T16:19:42.538924Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5552: RemoveTx for txid 1:0 2025-09-25T16:19:42.538932Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:610: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-09-25T16:19:42.538937Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:1002: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-09-25T16:19:42.538940Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1009: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-09-25T16:19:42.539182Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6249: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-09-25T16:19:42.539198Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-09-25T16:19:42.539203Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1 2025-09-25T16:19:42.539208Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 1, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 3 2025-09-25T16:19:42.539213Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:610: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-09-25T16:19:42.539227Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__publish_to_scheme_board.cpp:155: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 1, subscribers: 0 2025-09-25T16:19:42.539833Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1 2025-09-25T16:19:42.539921Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 1, at schemeshard: 72057594046678944 TestModificationResults wait txId: 101 2025-09-25T16:19:42.540296Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:433: actor# [1:275:2265] Bootstrap 2025-09-25T16:19:42.540511Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:452: actor# [1:275:2265] Become StateWork (SchemeCache [1:280:2270]) 2025-09-25T16:19:42.541098Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpCreateIndexedTable CreateIndexedTable { TableDescription { Name: "texts" Columns { Name: "id" Type: "Uint64" } Columns { Name: "text" Type: "String" } Columns { Name: "covered" Type: "String" } Columns { Name: "another" Type: "Uint64" } KeyColumnNames: "id" } IndexDescription { Name: "idx_fulltext" KeyColumnNames: "text" Type: EIndexTypeGlobalFulltext DataColumnNames: "covered" FulltextIndexDescription { Settings { layout: FLAT } } } } } TxId: 101 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-09-25T16:19:42.541152Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_create_indexed_table.cpp:100: TCreateTableIndex construct operation table path: /MyRoot/texts domain path id: [OwnerId: 72057594046678944, LocalPathId: 1] domain path: /MyRoot shardsToCreate: 2 GetShardsInside: 0 MaxShards: 200000 2025-09-25T16:19:42.541166Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_just_reject.cpp:47: TReject Propose, opId: 101:0, explain: columns should be set, at schemeshard: 72057594046678944 2025-09-25T16:19:42.541171Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 101:1, propose status:StatusInvalidParameter, reason: columns should be set, at schemeshard: 72057594046678944 2025-09-25T16:19:42.541307Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:212: actor# [1:275:2265] HANDLE TEvClientConnected success connect from tablet# 72057594046447617 2025-09-25T16:19:42.541776Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 101, response: Status: StatusInvalidParameter Reason: "columns should be set" TxId: 101 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2025-09-25T16:19:42.541808Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 101, database: /MyRoot, subject: , status: StatusInvalidParameter, reason: columns should be set, operation: CREATE TABLE WITH INDEXES, path: /MyRoot/texts 2025-09-25T16:19:42.541850Z node 1 :TX_PROXY DEBUG: client.cpp:89: Handle TEvAllocateResult ACCEPTED RangeBegin# 281474976715656 RangeEnd# 281474976720656 txAllocator# 72057594046447617 TestModificationResult got TxId: 101, wait until txId: 101 TestWaitNotification wait txId: 101 2025-09-25T16:19:42.541878Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:213: tests -- TTxNotificationSubscriber for txId 101: send EvNotifyTxCompletion 2025-09-25T16:19:42.541883Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:259: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 101 2025-09-25T16:19:42.541917Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 101, at schemeshard: 72057594046678944 2025-09-25T16:19:42.541928Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 101: got EvNotifyTxCompletionResult 2025-09-25T16:19:42.541931Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 101: satisfy waiter [1:290:2280] TestWaitNotification: OK eventTxId 101 2025-09-25T16:19:42.541987Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/texts/idx_fulltext" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: true }, at schemeshard: 72057594046678944 2025-09-25T16:19:42.542012Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/texts/idx_fulltext" took 30us result status StatusPathDoesNotExist 2025-09-25T16:19:42.542040Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/texts/idx_fulltext\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot\' (id: [OwnerId: 72057594046678944, LocalPathId: 1]), source_location: ydb/core/tx/schemeshard/schemeshard_path_describer.cpp:1181" Path: "/MyRoot/texts/idx_fulltext" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot" LastExistedPrefixPathId: 1 LastExistedPrefixDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty ChildrenExist: false } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 >> TImportTests::TablePermissions [GOOD] >> TImportTests::TopicExportImport >> TImportTests::ShouldSucceedOnIndexedTable1 [GOOD] >> TImportTests::ShouldSucceedImportTableWithUniqueIndex >> TRestoreTests::ShouldFailOnFileWithoutNewLinesStandardBatch[Raw] >> TCmsTest::StateStorageLockedNodes [GOOD] >> TImportTests::ShouldSucceedOnSingleShardTable [GOOD] >> TImportTests::ShouldSucceedOnMultiShardTable >> TImportTests::NoACLOption >> TImportTests::TopicExportImport [GOOD] >> TImportTests::CancelUponBuildingIndicesShouldSucceed [GOOD] >> TImportTests::AuditCompletedImport >> TRestoreTests::CancelUponUploadResponseShouldSucceed[Raw] [GOOD] >> TRestoreTests::CancelUponUploadResponseShouldSucceed[Zstd] >> TImportTests::ShouldSucceedImportTableWithUniqueIndex [GOOD] >> TImportTests::ShouldSucceedExportImportTableWithUniqueIndex >> TImportTests::ChangefeedWithTablePermissions [GOOD] >> TImportTests::ChangefeedsExportRestore >> TRestoreTests::ShouldFailOnFileWithoutNewLinesStandardBatch[Raw] [GOOD] >> ScriptExecutionsTest::RestartQueryWithGetOperation [GOOD] >> TRestoreTests::ShouldFailOnFileWithoutNewLinesStandardBatch[Zstd] >> TImportTests::NoACLOption [GOOD] >> TImportTests::ShouldBlockMerge >> TImportTests::ShouldSucceedOnMultiShardTable [GOOD] >> TImportTests::ShouldSucceedWithoutTableProfiles >> TRestoreTests::ShouldFailOnFileWithoutNewLinesStandardBatch[Zstd] [GOOD] >> TRestoreTests::ShouldFailOnFileWithoutNewLinesSmallBatch[Zstd] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/cms/ut/unittest >> TCmsTest::StateStorageLockedNodes [GOOD] Test command err: 2025-09-25T16:19:32.620377Z node 1 :CMS DEBUG: cms_impl.h:186: StateInit event type: 10060000 event: NKikimr::TEvTablet::TEvBoot 2025-09-25T16:19:32.621527Z node 1 :CMS DEBUG: console__init_scheme.cpp:14: TConsole::TTxInitScheme Execute 2025-09-25T16:19:32.623371Z node 1 :CMS DEBUG: cms_impl.h:186: StateInit event type: 10060001 event: NKikimr::TEvTablet::TEvRestored 2025-09-25T16:19:32.623428Z node 1 :CMS DEBUG: cms_tx_init_scheme.cpp:16: TTxInitScheme Execute 2025-09-25T16:19:32.623687Z node 1 :CMS DEBUG: cms_impl.h:186: StateInit event type: 1006000c event: NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-09-25T16:19:32.623718Z node 1 :CMS DEBUG: cms_impl.h:186: StateInit event type: 10031c0c event: NKikimr::TEvNodeWardenStorageConfig 2025-09-25T16:19:32.623736Z node 1 :CMS DEBUG: cms_impl.h:186: StateInit event type: 104d0001 event: NKikimr::NConsole::TEvConfigsDispatcher::TEvSetConfigSubscriptionResponse 2025-09-25T16:19:32.625297Z node 1 :CMS DEBUG: console__init_scheme.cpp:23: TConsole::TTxInitScheme Complete 2025-09-25T16:19:32.625354Z node 1 :CMS DEBUG: console__load_state.cpp:28: TConsole::TTxLoadState Execute 2025-09-25T16:19:32.625381Z node 1 :CMS DEBUG: console__load_state.cpp:50: Using default config. 2025-09-25T16:19:32.625460Z node 1 :CMS DEBUG: console__load_state.cpp:66: TConsole::TTxLoadState Complete 2025-09-25T16:19:32.625973Z node 1 :CMS DEBUG: cms_tx_init_scheme.cpp:24: TTxInitScheme Complete 2025-09-25T16:19:32.626001Z node 1 :CMS DEBUG: cms_tx_load_state.cpp:33: TTxLoadState Execute 2025-09-25T16:19:32.626030Z node 1 :CMS DEBUG: cms_tx_load_state.cpp:76: Using default config 2025-09-25T16:19:32.626056Z node 1 :CMS DEBUG: cms.cpp:1176: Running CleanupWalleTasks 2025-09-25T16:19:32.648953Z node 1 :CMS DEBUG: cms_impl.h:186: StateInit event type: 104a0012 event: NKikimr::NConsole::TEvConsole::TEvConfigNotificationRequest { Config { FeatureFlags { EnableCMSRequestPriorities: true EnableSingleCompositeActionGroup: true } } ItemKinds: 25 ItemKinds: 26 Local: true } 2025-09-25T16:19:32.685794Z node 1 :CMS DEBUG: cms_tx_load_state.cpp:256: TTxLoadState Complete 2025-09-25T16:19:32.685905Z node 1 :CMS DEBUG: cms_tx_update_config.cpp:23: TTxUpdateConfig Execute 2025-09-25T16:19:32.687599Z node 1 :CMS DEBUG: cms_tx_update_config.cpp:37: TTxUpdateConfig Complete 2025-09-25T16:19:32.687741Z node 1 :CMS DEBUG: sentinel.cpp:1020: [Sentinel] [Main] UpdateConfig 2025-09-25T16:19:32.687750Z node 1 :CMS DEBUG: sentinel.cpp:965: [Sentinel] [Main] Start ConfigUpdater 2025-09-25T16:19:32.687759Z node 1 :CMS DEBUG: sentinel.cpp:1036: [Sentinel] [Main] UpdateState 2025-09-25T16:19:32.687763Z node 1 :CMS INFO: sentinel.cpp:960: [Sentinel] [Main] StateUpdater was delayed 2025-09-25T16:19:32.687774Z node 1 :CMS DEBUG: sentinel.cpp:524: [Sentinel] [ConfigUpdater] Request blobstorage config: attempt# 0 2025-09-25T16:19:32.687796Z node 1 :CMS DEBUG: sentinel.cpp:537: [Sentinel] [ConfigUpdater] Request CMS cluster state: attempt# 0 2025-09-25T16:19:32.689983Z node 1 :CMS DEBUG: sentinel.cpp:599: [Sentinel] [ConfigUpdater] Handle TEvBlobStorage::TEvControllerConfigResponse: response# Status { Success: true BaseConfig { } } Success: true 2025-09-25T16:19:32.766179Z node 1 :CMS DEBUG: cms_tx_update_config.cpp:23: TTxUpdateConfig Execute 2025-09-25T16:19:32.777114Z node 1 :CMS DEBUG: cms_tx_update_config.cpp:37: TTxUpdateConfig Complete 2025-09-25T16:19:32.777173Z node 1 :CMS DEBUG: cms_tx_update_config.cpp:44: Updated config: TenantLimits { DisabledNodesRatioLimit: 0 } ClusterLimits { DisabledNodesRatioLimit: 0 } SentinelConfig { Enable: false } 2025-09-25T16:19:32.807856Z node 1 :CMS DEBUG: cms_tx_update_downtimes.cpp:17: TTxUpdateDowntimes Execute 2025-09-25T16:19:32.807902Z node 1 :CMS DEBUG: cms_tx_update_downtimes.cpp:26: TTxUpdateDowntimes Complete 2025-09-25T16:19:32.807976Z node 1 :CMS DEBUG: cluster_info.cpp:991: Timestamp: 1970-01-01T00:05:00Z 2025-09-25T16:19:32.808111Z node 1 :CMS NOTICE: audit_log.cpp:12: [AuditLog] [CMS tablet] Reply: request# NKikimr::NCms::TEvCms::TEvClusterStateRequest { }, response# NKikimr::NCms::TEvCms::TEvClusterStateResponse { Status { Code: OK } State { Hosts { Name: "::1" State: UP Services { Name: "storage" State: UP Version: "-1" Timestamp: 300110512 } Timestamp: 300110512 NodeId: 1 InterconnectPort: 12001 Location { DataCenter: "1" Module: "1" Rack: "1" Unit: "1" } StartTimeSeconds: 0 } Hosts { Name: "::1" State: UP Services { Name: "storage" State: UP Version: "-1" Timestamp: 300110512 } Timestamp: 300110512 NodeId: 2 InterconnectPort: 12002 Location { DataCenter: "1" Module: "2" Rack: "2" Unit: "2" } StartTimeSeconds: 0 } Hosts { Name: "::1" State: UP Services { Name: "storage" State: UP Version: "-1" Timestamp: 300110512 } Timestamp: 300110512 NodeId: 3 InterconnectPort: 12003 Location { DataCenter: "1" Module: "3" Rack: "3" Unit: "3" } StartTimeSeconds: 0 } Hosts { Name: "::1" State: UP Services { Name: "storage" State: UP Version: "-1" Timestamp: 300110512 } Timestamp: 300110512 NodeId: 4 InterconnectPort: 12004 Location { DataCenter: "1" Module: "4" Rack: "4" Unit: "4" } StartTimeSeconds: 0 } Hosts { Name: "::1" State: UP Services { Name: "storage" State: UP Version: "-1" Timestamp: 300110512 } Timestamp: 300110512 NodeId: 5 InterconnectPort: 12005 Location { DataCenter: "1" Module: "5" Rack: "5" Unit: "5" } StartTimeSeconds: 0 } Timestamp: 300110512 } } 2025-09-25T16:19:32.808170Z node 1 :CMS INFO: cms.cpp:364: Check request: User: "user" Actions { Type: RESTART_SERVICES Host: "1" Services: "storage" Duration: 60000000 } PartialPermissionAllowed: false Schedule: false DryRun: false AvailabilityMode: MODE_MAX_AVAILABILITY EvictVDisks: false 2025-09-25T16:19:32.808180Z node 1 :CMS DEBUG: cms.cpp:396: Checking action: Type: RESTART_SERVICES Host: "1" Services: "storage" Duration: 60000000 2025-09-25T16:19:32.808192Z node 1 :CMS DEBUG: node_checkers.cpp:99: [Nodes Counter] Checking Node: 1, with state: Up, with limit: 0, with ratio limit: 0, locked nodes: 0, down nodes: 0 2025-09-25T16:19:32.808202Z node 1 :CMS DEBUG: cms.cpp:759: Ring: 0; State: Ok 2025-09-25T16:19:32.808206Z node 1 :CMS DEBUG: cms.cpp:759: Ring: 1; State: Ok 2025-09-25T16:19:32.808211Z node 1 :CMS DEBUG: cms.cpp:404: Result: ALLOW 2025-09-25T16:19:32.808228Z node 1 :CMS DEBUG: cms.cpp:1064: Accepting permission: id# user-p-1, requestId# user-r-1, owner# user 2025-09-25T16:19:32.808237Z node 1 :CMS INFO: cluster_info.cpp:782: Adding lock for Host ::1:12001 (1) (permission user-p-1 until 1970-01-01T00:06:00Z) 2025-09-25T16:19:32.808249Z node 1 :CMS DEBUG: cms_tx_store_permissions.cpp:26: TTxStorePermissions Execute 2025-09-25T16:19:32.808287Z node 1 :CMS NOTICE: audit_log.cpp:12: [AuditLog] [CMS tablet] Store permission: id# user-p-1, validity# 1970-01-01T00:06:00.110512Z, action# Type: RESTART_SERVICES Host: "1" Services: "storage" Duration: 60000000 2025-09-25T16:19:32.851000Z node 1 :CMS DEBUG: cms.cpp:1176: Running CleanupWalleTasks 2025-09-25T16:19:32.921828Z node 1 :CMS DEBUG: cms_tx_store_permissions.cpp:137: TTxStorePermissions complete 2025-09-25T16:19:32.921942Z node 1 :CMS NOTICE: audit_log.cpp:12: [AuditLog] [CMS tablet] Reply: request# NKikimr::NCms::TEvCms::TEvPermissionRequest { User: "user" Actions { Type: RESTART_SERVICES Host: "1" Services: "storage" Duration: 60000000 } PartialPermissionAllowed: false Schedule: false DryRun: false AvailabilityMode: MODE_MAX_AVAILABILITY EvictVDisks: false }, response# NKikimr::NCms::TEvCms::TEvPermissionResponse { Status { Code: ALLOW } RequestId: "user-r-1" Permissions { Id: "user-p-1" Action { Type: RESTART_SERVICES Host: "1" Services: "storage" Duration: 60000000 } Deadline: 360110512 Extentions { Type: HostInfo Hosts { Name: "::1" State: UP NodeId: 1 InterconnectPort: 12001 } } } } 2025-09-25T16:19:32.921956Z node 1 :CMS DEBUG: cms.cpp:1092: Schedule cleanup at 1970-01-01T00:08:00.110512Z 2025-09-25T16:19:32.932967Z node 1 :CMS INFO: cluster_info.cpp:782: Adding lock for Host ::1:12001 (1) (permission user-p-1 until 1970-01-01T00:06:00Z) 2025-09-25T16:19:32.933025Z node 1 :CMS DEBUG: cms_tx_update_downtimes.cpp:17: TTxUpdateDowntimes Execute 2025-09-25T16:19:32.933045Z node 1 :CMS DEBUG: cms_tx_update_downtimes.cpp:26: TTxUpdateDowntimes Complete 2025-09-25T16:19:32.933058Z node 1 :CMS DEBUG: cluster_info.cpp:991: Timestamp: 1970-01-01T00:05:00Z 2025-09-25T16:19:32.933109Z node 1 :CMS INFO: cms.cpp:364: Check request: User: "user" Actions { Type: RESTART_SERVICES Host: "2" Services: "storage" Duration: 60000000 } PartialPermissionAllowed: false Schedule: false DryRun: false AvailabilityMode: MODE_MAX_AVAILABILITY EvictVDisks: false 2025-09-25T16:19:32.933120Z node 1 :CMS DEBUG: cms.cpp:396: Checking action: Type: RESTART_SERVICES Host: "2" Services: "storage" Duration: 60000000 2025-09-25T16:19:32.933133Z node 1 :CMS DEBUG: node_checkers.cpp:99: [Nodes Counter] Checking Node: 2, with state: Up, with limit: 0, with ratio limit: 0, locked nodes: 1, down nodes: 0 2025-09-25T16:19:32.933144Z node 1 :CMS DEBUG: cms.cpp:759: Ring: 0; State: Restart 2025-09-25T16:19:32.933149Z node 1 :CMS DEBUG: cms.cpp:759: Ring: 1; State: Ok 2025-09-25T16:19:32.933154Z node 1 :CMS DEBUG: cms.cpp:404: Result: ALLOW 2025-09-25T16:19:32.933171Z node 1 :CMS DEBUG: cms.cpp:1064: Accepting permission: id# user-p-2, requestId# user-r-2, owner# user 2025-09-25T16:19:32.933179Z node 1 :CMS INFO: cluster_info.cpp:782: Adding lock for Host ::1:12002 (2) (permission user-p-2 until 1970-01-01T00:06:00Z) 2025-09-25T16:19:32.933189Z node 1 :CMS DEBUG: cms_tx_store_permissions.cpp:26: TTxStorePermissions Execute 2025-09-25T16:19:32.933227Z node 1 :CMS NOTICE: audit_log.cpp:12: [AuditLog] [CMS tablet] Store permission: id# user-p-2, validity# 1970-01-01T00:06:00.212024Z, action# Type: RESTART_SERVICES Host: "2" Services: "storage" Duration: 60000000 2025-09-25T16:19:32.943987Z node 1 :CMS DEBUG: cms_tx_store_permissions.cpp:137: TTxStorePermissions complete 2025-09-25T16:19:32.944100Z node 1 :CMS NOTICE: audit_log.cpp:12: [AuditLog] [CMS tablet] Reply: request# NKikimr::NCms::TEvCms::TEvPermissionRequest { User: "user" Actions { Type: RESTART_SERVICES Host: "2" Services: "storage" Duration: 60000000 } PartialPermissionAllowed: false Schedule: false DryRun: false AvailabilityMode: MODE_MAX_AVAILABILITY EvictVDisks: false }, response# NKikimr::NCms::TEvCms::TEvPermissionResponse { Status { Code: ALLOW } RequestId: "user-r-2" Permissions { Id: "user-p-2" Action { Type: RESTART_SERVICES Host: "2" Services: "storage" Duration: 60000000 } Deadline: 360212024 Extentions { Type: HostInfo Hosts { Name: "::1" State: UP NodeId: 2 InterconnectPort: 12002 } } } } 2025-09-25T16:19:33.901745Z node 6 :CMS DEBUG: cms_impl.h:186: StateInit event type: 10060000 event: NKikimr::TEvTablet::TEvBoot 2025-09-25T16:19:33.902049Z node 6 :CMS DEBUG: console__init_scheme.cpp:14: TConsole::TTxInitScheme Execute 2025-09-25T16:19:33.903504Z node 6 :CMS DEBUG: cms_impl.h:186: StateInit event type: 10060001 event: NKikimr::TEvTablet::TEvRestored 2025-09-25T16:19:33.903613Z node 6 :CMS DEBUG: cms_tx_init_scheme.cpp:16: TTxInitScheme Execute 2025-09-25T16:19:33.904137Z node 6 :CMS DEBUG: cms_impl.h:186: StateInit event type: 1006000c event: NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-09-25T16:19:33.904288Z node 6 :CMS DEBUG: cms_impl.h:186: StateInit event type: 10031c0c event: NKikimr::TEvNodeWardenStorageConfig 2025-09-25T16:19:33.904394Z node 6 :CMS DEBUG: cms_impl.h:186: StateInit event type: 104d0001 event: NKikimr::NConsole::TEvConfigsDispatcher::TEvSetConfigSubscriptionResponse 2025-09-25T16:19:33.904951Z node 6 :CMS DEBUG: console__init_scheme.cpp:23: TConsole::TTxInitScheme Complete 2025-09-25T16:19:33.904980Z node 6 :CMS DEBUG: console__load_state.cpp:28: TConsole::TTxLoadState Execute 2025-09-25T16:19:33.905009Z node 6 :CMS DEBUG: console__load_state.cpp:50: Using default config. 2025-09-25T16:19:33.905076Z node 6 :CMS DEBUG: console__load_state.cpp:66: TConsole::TT ... 16 :CMS DEBUG: cms_tx_remove_permissions.cpp:28: TTxRemovePermissions Execute 2025-09-25T16:19:37.030894Z node 16 :CMS NOTICE: audit_log.cpp:12: [AuditLog] [CMS tablet] Remove permission: id# user-p-19, reason# explicit remove 2025-09-25T16:19:37.041901Z node 16 :CMS DEBUG: cms_tx_remove_permissions.cpp:80: TTxRemovePermissions Complete 2025-09-25T16:19:37.041976Z node 16 :CMS NOTICE: audit_log.cpp:12: [AuditLog] [CMS tablet] Reply: request# NKikimr::NCms::TEvCms::TEvManagePermissionRequest { User: "user" Command: DONE Permissions: "user-p-19" DryRun: false }, response# NKikimr::NCms::TEvCms::TEvManagePermissionResponse { Status { Code: OK } } 2025-09-25T16:19:37.042151Z node 16 :CMS INFO: cms.cpp:1361: User user is done with permissions user-p-20 2025-09-25T16:19:37.042165Z node 16 :CMS DEBUG: cms.cpp:1384: Resulting status: OK 2025-09-25T16:19:37.042178Z node 16 :CMS DEBUG: cms_tx_remove_permissions.cpp:28: TTxRemovePermissions Execute 2025-09-25T16:19:37.042206Z node 16 :CMS NOTICE: audit_log.cpp:12: [AuditLog] [CMS tablet] Remove permission: id# user-p-20, reason# explicit remove 2025-09-25T16:19:37.053217Z node 16 :CMS DEBUG: cms_tx_remove_permissions.cpp:80: TTxRemovePermissions Complete 2025-09-25T16:19:37.053288Z node 16 :CMS NOTICE: audit_log.cpp:12: [AuditLog] [CMS tablet] Reply: request# NKikimr::NCms::TEvCms::TEvManagePermissionRequest { User: "user" Command: DONE Permissions: "user-p-20" DryRun: false }, response# NKikimr::NCms::TEvCms::TEvManagePermissionResponse { Status { Code: OK } } 2025-09-25T16:19:41.070687Z node 36 :CMS DEBUG: console__init_scheme.cpp:14: TConsole::TTxInitScheme Execute 2025-09-25T16:19:41.074231Z node 36 :CMS DEBUG: console__init_scheme.cpp:23: TConsole::TTxInitScheme Complete 2025-09-25T16:19:41.074568Z node 36 :CMS DEBUG: console__load_state.cpp:28: TConsole::TTxLoadState Execute 2025-09-25T16:19:41.074606Z node 36 :CMS DEBUG: console__load_state.cpp:50: Using default config. 2025-09-25T16:19:41.074674Z node 36 :CMS DEBUG: console__load_state.cpp:66: TConsole::TTxLoadState Complete 2025-09-25T16:19:41.074817Z node 36 :CMS DEBUG: cms_impl.h:186: StateInit event type: 10060000 event: NKikimr::TEvTablet::TEvBoot 2025-09-25T16:19:41.076684Z node 36 :CMS DEBUG: cms_impl.h:186: StateInit event type: 10060001 event: NKikimr::TEvTablet::TEvRestored 2025-09-25T16:19:41.076797Z node 36 :CMS DEBUG: cms_tx_init_scheme.cpp:16: TTxInitScheme Execute 2025-09-25T16:19:41.077227Z node 36 :CMS DEBUG: cms_impl.h:186: StateInit event type: 1006000c event: NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-09-25T16:19:41.077392Z node 36 :CMS DEBUG: cms_impl.h:186: StateInit event type: 10031c0c event: NKikimr::TEvNodeWardenStorageConfig 2025-09-25T16:19:41.077567Z node 36 :CMS DEBUG: cms_impl.h:186: StateInit event type: 104d0001 event: NKikimr::NConsole::TEvConfigsDispatcher::TEvSetConfigSubscriptionResponse 2025-09-25T16:19:41.078520Z node 36 :CMS DEBUG: cms_tx_init_scheme.cpp:24: TTxInitScheme Complete 2025-09-25T16:19:41.078548Z node 36 :CMS DEBUG: cms_tx_load_state.cpp:33: TTxLoadState Execute 2025-09-25T16:19:41.078591Z node 36 :CMS DEBUG: cms_tx_load_state.cpp:76: Using default config 2025-09-25T16:19:41.078613Z node 36 :CMS DEBUG: cms.cpp:1176: Running CleanupWalleTasks 2025-09-25T16:19:41.091409Z node 36 :CMS DEBUG: cms_impl.h:186: StateInit event type: 104a0012 event: NKikimr::NConsole::TEvConsole::TEvConfigNotificationRequest { Config { FeatureFlags { EnableCMSRequestPriorities: true EnableSingleCompositeActionGroup: true } } ItemKinds: 25 ItemKinds: 26 Local: true } 2025-09-25T16:19:41.125647Z node 36 :CMS DEBUG: cms_tx_load_state.cpp:256: TTxLoadState Complete 2025-09-25T16:19:41.125728Z node 36 :CMS DEBUG: cms_tx_update_config.cpp:23: TTxUpdateConfig Execute 2025-09-25T16:19:41.125749Z node 36 :CMS DEBUG: cms_tx_update_config.cpp:37: TTxUpdateConfig Complete 2025-09-25T16:19:41.125817Z node 36 :CMS DEBUG: sentinel.cpp:1020: [Sentinel] [Main] UpdateConfig 2025-09-25T16:19:41.125822Z node 36 :CMS DEBUG: sentinel.cpp:965: [Sentinel] [Main] Start ConfigUpdater 2025-09-25T16:19:41.125827Z node 36 :CMS DEBUG: sentinel.cpp:1036: [Sentinel] [Main] UpdateState 2025-09-25T16:19:41.125830Z node 36 :CMS INFO: sentinel.cpp:960: [Sentinel] [Main] StateUpdater was delayed 2025-09-25T16:19:41.125837Z node 36 :CMS DEBUG: sentinel.cpp:524: [Sentinel] [ConfigUpdater] Request blobstorage config: attempt# 0 2025-09-25T16:19:41.125847Z node 36 :CMS DEBUG: sentinel.cpp:537: [Sentinel] [ConfigUpdater] Request CMS cluster state: attempt# 0 2025-09-25T16:19:41.125907Z node 36 :CMS DEBUG: sentinel.cpp:599: [Sentinel] [ConfigUpdater] Handle TEvBlobStorage::TEvControllerConfigResponse: response# Status { Success: true BaseConfig { } } Success: true 2025-09-25T16:19:41.201680Z node 36 :CMS DEBUG: cms_tx_update_config.cpp:23: TTxUpdateConfig Execute 2025-09-25T16:19:41.212537Z node 36 :CMS DEBUG: cms_tx_update_config.cpp:37: TTxUpdateConfig Complete 2025-09-25T16:19:41.212601Z node 36 :CMS DEBUG: cms_tx_update_config.cpp:44: Updated config: TenantLimits { DisabledNodesRatioLimit: 0 } ClusterLimits { DisabledNodesRatioLimit: 0 } SentinelConfig { Enable: false } 2025-09-25T16:19:41.213106Z node 36 :CMS INFO: cms.cpp:107: OnTabletDead: 72057594037936128 2025-09-25T16:19:41.213115Z node 36 :CMS DEBUG: cms.cpp:1238: TCms::Cleanup 2025-09-25T16:19:41.214828Z node 36 :CMS DEBUG: cms_impl.h:186: StateInit event type: 10060000 event: NKikimr::TEvTablet::TEvBoot 2025-09-25T16:19:41.215408Z node 36 :CMS DEBUG: cms_impl.h:186: StateInit event type: 10060001 event: NKikimr::TEvTablet::TEvRestored 2025-09-25T16:19:41.215451Z node 36 :CMS DEBUG: cms_impl.h:186: StateInit event type: 1006000c event: NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-09-25T16:19:41.215840Z node 36 :CMS DEBUG: cms_tx_init_scheme.cpp:16: TTxInitScheme Execute 2025-09-25T16:19:41.215917Z node 36 :CMS DEBUG: cms_tx_init_scheme.cpp:24: TTxInitScheme Complete 2025-09-25T16:19:41.216036Z node 36 :CMS DEBUG: cms_tx_load_state.cpp:33: TTxLoadState Execute 2025-09-25T16:19:41.216096Z node 36 :CMS DEBUG: cms_tx_load_state.cpp:69: Loaded config: TenantLimits { DisabledNodesRatioLimit: 0 } ClusterLimits { DisabledNodesRatioLimit: 0 } SentinelConfig { Enable: false } 2025-09-25T16:19:41.216111Z node 36 :CMS DEBUG: cms.cpp:1176: Running CleanupWalleTasks 2025-09-25T16:19:41.216172Z node 36 :CMS DEBUG: cms_impl.h:186: StateInit event type: 10031c0c event: NKikimr::TEvNodeWardenStorageConfig 2025-09-25T16:19:41.216229Z node 36 :CMS DEBUG: cms_impl.h:186: StateInit event type: 104d0001 event: NKikimr::NConsole::TEvConfigsDispatcher::TEvSetConfigSubscriptionResponse 2025-09-25T16:19:41.216259Z node 36 :CMS DEBUG: cms_impl.h:186: StateInit event type: 104a0012 event: NKikimr::NConsole::TEvConsole::TEvConfigNotificationRequest { Config { FeatureFlags { EnableCMSRequestPriorities: true EnableSingleCompositeActionGroup: true } } ItemKinds: 25 ItemKinds: 26 } 2025-09-25T16:19:41.341830Z node 36 :CMS DEBUG: cms_tx_load_state.cpp:256: TTxLoadState Complete 2025-09-25T16:19:41.363745Z node 36 :CMS DEBUG: cms_tx_update_downtimes.cpp:17: TTxUpdateDowntimes Execute 2025-09-25T16:19:41.363807Z node 36 :CMS DEBUG: cluster_info.cpp:991: Timestamp: 1970-01-01T00:03:30Z 2025-09-25T16:19:41.363861Z node 36 :CMS INFO: cms.cpp:364: Check request: User: "user" Actions { Type: RESTART_SERVICES Host: "41" Services: "storage" Duration: 60000000 } PartialPermissionAllowed: false Schedule: false DryRun: true AvailabilityMode: MODE_KEEP_AVAILABLE EvictVDisks: false 2025-09-25T16:19:41.363870Z node 36 :CMS DEBUG: cms.cpp:396: Checking action: Type: RESTART_SERVICES Host: "41" Services: "storage" Duration: 60000000 2025-09-25T16:19:41.363881Z node 36 :CMS DEBUG: node_checkers.cpp:99: [Nodes Counter] Checking Node: 41, with state: Up, with limit: 0, with ratio limit: 0, locked nodes: 0, down nodes: 2 2025-09-25T16:19:41.363888Z node 36 :CMS DEBUG: cms.cpp:759: Ring: 0; State: Ok 2025-09-25T16:19:41.363894Z node 36 :CMS DEBUG: cms.cpp:759: Ring: 1; State: Restart 2025-09-25T16:19:41.363897Z node 36 :CMS DEBUG: cms.cpp:759: Ring: 2; State: Restart 2025-09-25T16:19:41.363900Z node 36 :CMS DEBUG: cms.cpp:759: Ring: 3; State: Locked 2025-09-25T16:19:41.363902Z node 36 :CMS DEBUG: cms.cpp:759: Ring: 4; State: Locked 2025-09-25T16:19:41.363905Z node 36 :CMS DEBUG: cms.cpp:759: Ring: 5; State: Ok 2025-09-25T16:19:41.363907Z node 36 :CMS DEBUG: cms.cpp:759: Ring: 6; State: Ok 2025-09-25T16:19:41.363909Z node 36 :CMS DEBUG: cms.cpp:759: Ring: 7; State: Ok 2025-09-25T16:19:41.363911Z node 36 :CMS DEBUG: cms.cpp:759: Ring: 8; State: Ok 2025-09-25T16:19:41.363917Z node 36 :CMS DEBUG: cms.cpp:415: Result: DISALLOW_TEMP (reason: Too many unavailable state storage rings. Restarting rings: 2. Temporary (for a 2 minutes) locked rings: 2. Disabled rings: 0. Maximum allowed number of unavailable rings for this mode: 4) 2025-09-25T16:19:41.363944Z node 36 :CMS NOTICE: audit_log.cpp:12: [AuditLog] [CMS tablet] Reply: request# NKikimr::NCms::TEvCms::TEvPermissionRequest { User: "user" Actions { Type: RESTART_SERVICES Host: "41" Services: "storage" Duration: 60000000 } PartialPermissionAllowed: false Schedule: false DryRun: true AvailabilityMode: MODE_KEEP_AVAILABLE EvictVDisks: false }, response# NKikimr::NCms::TEvCms::TEvPermissionResponse { Status { Code: DISALLOW_TEMP Reason: "Too many unavailable state storage rings. Restarting rings: 2. Temporary (for a 2 minutes) locked rings: 2. Disabled rings: 0. Maximum allowed number of unavailable rings for this mode: 4" } Deadline: 510216512 } 2025-09-25T16:19:41.374794Z node 36 :CMS DEBUG: cms_tx_update_downtimes.cpp:26: TTxUpdateDowntimes Complete 2025-09-25T16:19:41.396461Z node 36 :CMS DEBUG: cms_tx_update_downtimes.cpp:17: TTxUpdateDowntimes Execute 2025-09-25T16:19:41.396527Z node 36 :CMS DEBUG: cluster_info.cpp:991: Timestamp: 1970-01-01T00:03:30Z 2025-09-25T16:19:41.396572Z node 36 :CMS INFO: cms.cpp:364: Check request: User: "user" Actions { Type: RESTART_SERVICES Host: "40" Services: "storage" Duration: 60000000 } PartialPermissionAllowed: false Schedule: false DryRun: true AvailabilityMode: MODE_KEEP_AVAILABLE EvictVDisks: false 2025-09-25T16:19:41.396580Z node 36 :CMS DEBUG: cms.cpp:396: Checking action: Type: RESTART_SERVICES Host: "40" Services: "storage" Duration: 60000000 2025-09-25T16:19:41.396591Z node 36 :CMS DEBUG: node_checkers.cpp:99: [Nodes Counter] Checking Node: 40, with state: Up, with limit: 0, with ratio limit: 0, locked nodes: 0, down nodes: 2 2025-09-25T16:19:41.396599Z node 36 :CMS DEBUG: cms.cpp:759: Ring: 0; State: Ok 2025-09-25T16:19:41.396604Z node 36 :CMS DEBUG: cms.cpp:759: Ring: 1; State: Restart 2025-09-25T16:19:41.396607Z node 36 :CMS DEBUG: cms.cpp:759: Ring: 2; State: Restart 2025-09-25T16:19:41.396610Z node 36 :CMS DEBUG: cms.cpp:759: Ring: 3; State: Locked 2025-09-25T16:19:41.396612Z node 36 :CMS DEBUG: cms.cpp:759: Ring: 4; State: Locked 2025-09-25T16:19:41.396614Z node 36 :CMS DEBUG: cms.cpp:759: Ring: 5; State: Ok 2025-09-25T16:19:41.396617Z node 36 :CMS DEBUG: cms.cpp:759: Ring: 6; State: Ok 2025-09-25T16:19:41.396619Z node 36 :CMS DEBUG: cms.cpp:759: Ring: 7; State: Ok 2025-09-25T16:19:41.396621Z node 36 :CMS DEBUG: cms.cpp:759: Ring: 8; State: Ok 2025-09-25T16:19:41.396624Z node 36 :CMS DEBUG: cms.cpp:404: Result: ALLOW 2025-09-25T16:19:41.396662Z node 36 :CMS NOTICE: audit_log.cpp:12: [AuditLog] [CMS tablet] Reply: request# NKikimr::NCms::TEvCms::TEvPermissionRequest { User: "user" Actions { Type: RESTART_SERVICES Host: "40" Services: "storage" Duration: 60000000 } PartialPermissionAllowed: false Schedule: false DryRun: true AvailabilityMode: MODE_KEEP_AVAILABLE EvictVDisks: false }, response# NKikimr::NCms::TEvCms::TEvPermissionResponse { Status { Code: ALLOW } Permissions { Action { Type: RESTART_SERVICES Host: "40" Services: "storage" Duration: 60000000 } Deadline: 270316512 Extentions { Type: HostInfo Hosts { Name: "::1" State: UP NodeId: 40 InterconnectPort: 12005 } } } } ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_restore/unittest >> TImportTests::TopicExportImport [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2144] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2144] Leader for TabletID 72057594046678944 is [1:130:2155] sender: [1:131:2058] recipient: [1:113:2144] 2025-09-25T16:19:42.569356Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7911: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-09-25T16:19:42.569378Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7939: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-09-25T16:19:42.569383Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7825: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-09-25T16:19:42.569386Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7841: OperationsProcessing config: using default configuration 2025-09-25T16:19:42.569391Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7847: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-09-25T16:19:42.569394Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7847: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-09-25T16:19:42.569400Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7971: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-09-25T16:19:42.569410Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-09-25T16:19:42.569498Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8042: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-09-25T16:19:42.569541Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2025-09-25T16:19:42.580600Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7729: Cannot subscribe to console configs 2025-09-25T16:19:42.580623Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded 2025-09-25T16:19:42.584565Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2025-09-25T16:19:42.584675Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2025-09-25T16:19:42.584726Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-09-25T16:19:42.586649Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2025-09-25T16:19:42.586704Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2025-09-25T16:19:42.586799Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1382: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-09-25T16:19:42.586855Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-09-25T16:19:42.587392Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:158: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-09-25T16:19:42.587447Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2025-09-25T16:19:42.587740Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-09-25T16:19:42.587752Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-09-25T16:19:42.587773Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-09-25T16:19:42.587782Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-09-25T16:19:42.587788Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:205: TTxServerlessStorageBilling.Complete 2025-09-25T16:19:42.587835Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7086: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-09-25T16:19:42.589206Z node 1 :HIVE INFO: tablet_helpers.cpp:1126: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:130:2155] sender: [1:245:2058] recipient: [1:15:2062] 2025-09-25T16:19:42.612394Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-09-25T16:19:42.612458Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-09-25T16:19:42.612509Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:599: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-09-25T16:19:42.612518Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5528: CreateTx for txid 1:0 type: TxAlterSubDomain target path: [OwnerId: 72057594046678944, LocalPathId: 1] source path: 2025-09-25T16:19:42.612597Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-09-25T16:19:42.612611Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2025-09-25T16:19:42.613263Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-09-25T16:19:42.613307Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-09-25T16:19:42.613359Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-09-25T16:19:42.613368Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:315: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-09-25T16:19:42.613374Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:369: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-09-25T16:19:42.613379Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 1:0 2 -> 3 2025-09-25T16:19:42.613782Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-09-25T16:19:42.613793Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-09-25T16:19:42.613799Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 1:0 3 -> 128 2025-09-25T16:19:42.614129Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-09-25T16:19:42.614138Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-09-25T16:19:42.614144Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-09-25T16:19:42.614151Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1723: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-09-25T16:19:42.614881Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1792: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-09-25T16:19:42.615267Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:663: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-09-25T16:19:42.615318Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1824: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-09-25T16:19:42.615522Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:685: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-09-25T16:19:42.615546Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:689: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 137 RawX2: 4294969455 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-09-25T16:19:42.615554Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:303: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-09-25T16:19:42.615635Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 1:0 128 -> 240 2025-09-25T16:19:42.615643Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:362: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-09-25T16:19:42.615673Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:599: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-09-25T16:19:42.615684Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:412: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-09-25T16:19:42.616088Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-09-25T16:19:42.616097Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme ... publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 2 2025-09-25T16:19:43.467566Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:599: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 5] was 4 2025-09-25T16:19:43.468471Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 281474976710759:1, at schemeshard: 72057594046678944 2025-09-25T16:19:43.468582Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-09-25T16:19:43.468590Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 281474976710759, path id: [OwnerId: 72057594046678944, LocalPathId: 4] 2025-09-25T16:19:43.468623Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 281474976710759, path id: [OwnerId: 72057594046678944, LocalPathId: 5] 2025-09-25T16:19:43.468662Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-09-25T16:19:43.468668Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [3:210:2211], at schemeshard: 72057594046678944, txId: 281474976710759, path id: 4 2025-09-25T16:19:43.468675Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [3:210:2211], at schemeshard: 72057594046678944, txId: 281474976710759, path id: 5 2025-09-25T16:19:43.468697Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 281474976710759:1, at schemeshard: 72057594046678944 2025-09-25T16:19:43.468705Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:497: [72057594046678944] TDone opId# 281474976710759:1 ProgressState 2025-09-25T16:19:43.468719Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:926: Part operation is done id#281474976710759:1 progress is 2/2 2025-09-25T16:19:43.468725Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 281474976710759 ready parts: 2/2 2025-09-25T16:19:43.468731Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:926: Part operation is done id#281474976710759:1 progress is 2/2 2025-09-25T16:19:43.468738Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 281474976710759 ready parts: 2/2 2025-09-25T16:19:43.468744Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 281474976710759, ready parts: 2/2, is published: false 2025-09-25T16:19:43.468750Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 281474976710759 ready parts: 2/2 2025-09-25T16:19:43.468757Z node 3 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:993: Operation and all the parts is done, operation id: 281474976710759:0 2025-09-25T16:19:43.468762Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5552: RemoveTx for txid 281474976710759:0 2025-09-25T16:19:43.468773Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:610: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 3 2025-09-25T16:19:43.468779Z node 3 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:993: Operation and all the parts is done, operation id: 281474976710759:1 2025-09-25T16:19:43.468783Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5552: RemoveTx for txid 281474976710759:1 2025-09-25T16:19:43.468801Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:610: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 5] was 5 2025-09-25T16:19:43.468807Z node 3 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:1002: Publication still in progress, tx: 281474976710759, publications: 2, subscribers: 1 2025-09-25T16:19:43.468813Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1009: Publication details: tx: 281474976710759, [OwnerId: 72057594046678944, LocalPathId: 4], 5 2025-09-25T16:19:43.468817Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1009: Publication details: tx: 281474976710759, [OwnerId: 72057594046678944, LocalPathId: 5], 2 2025-09-25T16:19:43.469112Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6249: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 4 Version: 5 PathOwnerId: 72057594046678944, cookie: 281474976710759 2025-09-25T16:19:43.469129Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 4 Version: 5 PathOwnerId: 72057594046678944, cookie: 281474976710759 2025-09-25T16:19:43.469149Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 281474976710759 2025-09-25T16:19:43.469156Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 281474976710759, pathId: [OwnerId: 72057594046678944, LocalPathId: 4], version: 5 2025-09-25T16:19:43.469161Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:610: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 2 2025-09-25T16:19:43.469545Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6249: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 5 Version: 2 PathOwnerId: 72057594046678944, cookie: 281474976710759 2025-09-25T16:19:43.469571Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 5 Version: 2 PathOwnerId: 72057594046678944, cookie: 281474976710759 2025-09-25T16:19:43.469577Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 281474976710759 2025-09-25T16:19:43.469585Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 281474976710759, pathId: [OwnerId: 72057594046678944, LocalPathId: 5], version: 2 2025-09-25T16:19:43.469591Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:610: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 5] was 4 2025-09-25T16:19:43.469607Z node 3 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__publish_to_scheme_board.cpp:155: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 281474976710759, subscribers: 1 2025-09-25T16:19:43.469613Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:212: TTxAckPublishToSchemeBoard Notify send TEvNotifyTxCompletionResult, at schemeshard: 72057594046678944, to actorId: [3:127:2152] 2025-09-25T16:19:43.470729Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 281474976710759 2025-09-25T16:19:43.471094Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 281474976710759 2025-09-25T16:19:43.471127Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7171: Handle: TEvNotifyTxCompletionResult: txId# 281474976710759 2025-09-25T16:19:43.471140Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:7173: Message: TxId: 281474976710759 2025-09-25T16:19:43.471150Z node 3 :IMPORT DEBUG: schemeshard_import__create.cpp:372: TImport::TTxProgress: DoExecute 2025-09-25T16:19:43.471157Z node 3 :IMPORT DEBUG: schemeshard_import__create.cpp:1425: TImport::TTxProgress: OnNotifyResult: txId# 281474976710759 2025-09-25T16:19:43.472633Z node 3 :IMPORT DEBUG: schemeshard_import__create.cpp:396: TImport::TTxProgress: DoComplete 2025-09-25T16:19:43.472657Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 103: got EvNotifyTxCompletionResult 2025-09-25T16:19:43.472663Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 103: satisfy waiter [3:560:2508] TestWaitNotification: OK eventTxId 103 2025-09-25T16:19:43.473725Z node 3 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/Restored/Topic_1" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-09-25T16:19:43.473763Z node 3 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/Restored/Topic_1" took 50us result status StatusSuccess 2025-09-25T16:19:43.473878Z node 3 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/Restored/Topic_1" PathDescription { Self { Name: "Topic_1" PathId: 5 SchemeshardId: 72057594046678944 PathType: EPathTypePersQueueGroup CreateFinished: true CreateTxId: 281474976710759 CreateStep: 5000005 ParentPathId: 4 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 PQVersion: 1 } ChildrenExist: false } PersQueueGroup { Name: "Topic_1" PathId: 5 TotalGroupCount: 1 PartitionPerTablet: 1 PQTabletConfig { PartitionConfig { MaxCountInPartition: 2147483647 LifetimeSeconds: 10 SourceIdLifetimeSeconds: 1382400 WriteSpeedInBytesPerSecond: 50000000 BurstSize: 50000000 SourceIdMaxCounts: 6000000 } RequireAuthWrite: true RequireAuthRead: true FormatVersion: 0 Codecs { } YdbDatabasePath: "/MyRoot" } Partitions { PartitionId: 0 TabletId: 72075186233409548 Status: Active } AlterVersion: 1 BalancerTabletID: 72075186233409549 NextPartitionId: 1 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 3 PathsLimit: 10000 ShardsInside: 4 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 2 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 5 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 >> TImportTests::MultipleViewCreationRetries [GOOD] >> TImportTests::IgnoreBasicSchemeLimits >> TRestoreTests::CancelUponUploadResponseShouldSucceed[Zstd] [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/proxy_service/ut/unittest >> ScriptExecutionsTest::RestartQueryWithGetOperation [GOOD] Test command err: test_client.cpp: SetPath # /home/runner/.ya/build/build_root/endf/00507f/r3tmp/tmpjEDqiC/pdisk_1.dat 2025-09-25T16:19:32.065435Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-09-25T16:19:32.065471Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-09-25T16:19:32.066405Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-09-25T16:19:32.073182Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-09-25T16:19:32.073527Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded 2025-09-25T16:19:32.073813Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1229: Notification cookie mismatch for subscription [1:7554062235830582820:2081] 1758817172007483 != 1758817172007486 TClient is connected to server localhost:16350 TServer::EnableGrpc on GrpcPort 23980, node 1 2025-09-25T16:19:32.097750Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-09-25T16:19:32.097763Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-09-25T16:19:32.097777Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-09-25T16:19:32.097828Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-09-25T16:19:32.114493Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-09-25T16:19:32.389593Z node 1 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:1519: Updated YQL logs priority to current level: 4 2025-09-25T16:19:32.481432Z node 1 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:436: Subscribed for config changes. 2025-09-25T16:19:32.481452Z node 1 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:443: Updated table service config. 2025-09-25T16:19:32.481463Z node 1 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:1519: Updated YQL logs priority to current level: 4 2025-09-25T16:19:32.482314Z node 1 :KQP_PROXY DEBUG: table_creator.cpp:148: Table script_executions updater. Describe result: PathErrorUnknown 2025-09-25T16:19:32.482317Z node 1 :KQP_PROXY NOTICE: table_creator.cpp:168: Table script_executions updater. Creating table 2025-09-25T16:19:32.482327Z node 1 :KQP_PROXY DEBUG: table_creator.cpp:101: Table script_executions updater. Full table path:/dc-1/.metadata/script_executions 2025-09-25T16:19:32.482372Z node 1 :KQP_PROXY DEBUG: table_creator.cpp:148: Table script_execution_leases updater. Describe result: PathErrorUnknown 2025-09-25T16:19:32.482373Z node 1 :KQP_PROXY NOTICE: table_creator.cpp:168: Table script_execution_leases updater. Creating table 2025-09-25T16:19:32.482376Z node 1 :KQP_PROXY DEBUG: table_creator.cpp:101: Table script_execution_leases updater. Full table path:/dc-1/.metadata/script_execution_leases 2025-09-25T16:19:32.482385Z node 1 :KQP_PROXY DEBUG: table_creator.cpp:148: Table result_sets updater. Describe result: PathErrorUnknown 2025-09-25T16:19:32.482387Z node 1 :KQP_PROXY NOTICE: table_creator.cpp:168: Table result_sets updater. Creating table 2025-09-25T16:19:32.482389Z node 1 :KQP_PROXY DEBUG: table_creator.cpp:101: Table result_sets updater. Full table path:/dc-1/.metadata/result_sets 2025-09-25T16:19:32.483577Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:1, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:19:32.484112Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:19:32.484383Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:19:32.486887Z node 1 :KQP_PROXY DEBUG: table_creator.cpp:191: Table script_executions updater. TEvProposeTransactionStatus: { Status: 53 TxId: 281474976715658 SchemeShardStatus: 1 SchemeShardTabletId: 72057594046644480 PathId: 3 } 2025-09-25T16:19:32.486906Z node 1 :KQP_PROXY DEBUG: table_creator.cpp:262: Table script_executions updater. Subscribe on create table tx: 281474976715658 2025-09-25T16:19:32.486934Z node 1 :KQP_PROXY DEBUG: table_creator.cpp:191: Table script_execution_leases updater. TEvProposeTransactionStatus: { Status: 53 TxId: 281474976715659 SchemeShardStatus: 1 SchemeShardTabletId: 72057594046644480 PathId: 5 } 2025-09-25T16:19:32.486936Z node 1 :KQP_PROXY DEBUG: table_creator.cpp:262: Table script_execution_leases updater. Subscribe on create table tx: 281474976715659 2025-09-25T16:19:32.488379Z node 1 :KQP_PROXY DEBUG: table_creator.cpp:191: Table result_sets updater. TEvProposeTransactionStatus: { Status: 53 TxId: 281474976715660 SchemeShardStatus: 1 SchemeShardTabletId: 72057594046644480 PathId: 4 } 2025-09-25T16:19:32.488392Z node 1 :KQP_PROXY DEBUG: table_creator.cpp:262: Table result_sets updater. Subscribe on create table tx: 281474976715660 2025-09-25T16:19:32.513760Z node 1 :KQP_PROXY DEBUG: table_creator.cpp:291: Table script_executions updater. Request: create. Transaction completed: 281474976715658. Doublechecking... 2025-09-25T16:19:32.519900Z node 1 :KQP_PROXY DEBUG: table_creator.cpp:291: Table result_sets updater. Request: create. Transaction completed: 281474976715660. Doublechecking... 2025-09-25T16:19:32.525891Z node 1 :KQP_PROXY DEBUG: table_creator.cpp:291: Table script_execution_leases updater. Request: create. Transaction completed: 281474976715659. Doublechecking... 2025-09-25T16:19:32.596038Z node 1 :KQP_PROXY DEBUG: table_creator.cpp:363: Table script_executions updater. Column diff is empty, finishing 2025-09-25T16:19:32.596082Z node 1 :KQP_PROXY DEBUG: table_creator.cpp:363: Table script_execution_leases updater. Column diff is empty, finishing 2025-09-25T16:19:32.610990Z node 1 :KQP_PROXY DEBUG: table_creator.cpp:363: Table result_sets updater. Column diff is empty, finishing 2025-09-25T16:19:32.611336Z node 1 :KQP_PROXY DEBUG: kqp_script_executions.cpp:543: [ScriptExecutions] [TCreateScriptExecutionActor] OwnerId: [1:7554062235830583414:2289] ActorId: [1:7554062235830583688:2456] Database: /dc-1 ExecutionId: e2ad6cce-50343b39-c7113ca7-9245423. Bootstrap. Start TCreateScriptOperationQuery [1:7554062235830583690:2458], RunScriptActorId: [1:7554062235830583689:2457] 2025-09-25T16:19:32.611358Z node 1 :KQP_PROXY DEBUG: query_actor.cpp:134: [TQueryBase] [TCreateScriptOperationQuery] OwnerId: [1:7554062235830583688:2456], ActorId: [1:7554062235830583690:2458], TraceId: ExecutionId: e2ad6cce-50343b39-c7113ca7-9245423, RequestDatabase: /dc-1, Bootstrap. Database: /dc-1, IsSystemUser: 0, run create session 2025-09-25T16:19:32.613712Z node 1 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:1323: Request has 18444985256536.937914s seconds to be completed 2025-09-25T16:19:32.614518Z node 1 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:1398: Created new session, sessionId: ydb://session/3?node_id=1&id=Njc4M2U4OWMtOGZjOGJkZTMtMmM1MTExNDQtNDgwOTVlZTE=, workerId: [1:7554062235830583692:2318], database: /dc-1, longSession: 1, local sessions count: 1 2025-09-25T16:19:32.614565Z node 1 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:577: Received create session request, trace_id: 2025-09-25T16:19:32.614912Z node 1 :KQP_PROXY DEBUG: query_actor.cpp:200: [TQueryBase] [TCreateScriptOperationQuery] OwnerId: [1:7554062235830583688:2456], ActorId: [1:7554062235830583690:2458], TraceId: ExecutionId: e2ad6cce-50343b39-c7113ca7-9245423, RequestDatabase: /dc-1, RunDataQuery with SessionId: ydb://session/3?node_id=1&id=Njc4M2U4OWMtOGZjOGJkZTMtMmM1MTExNDQtNDgwOTVlZTE=, TxId: , text: -- TCreateScriptOperationQuery::OnRunQuery DECLARE $database AS Text; DECLARE $execution_id AS Text; DECLARE $run_script_actor_id AS Text; DECLARE $execution_status AS Int32; DECLARE $execution_mode AS Int32; DECLARE $query_text AS Text; DECLARE $syntax AS Int32; DECLARE $meta AS JsonDocument; DECLARE $lease_duration AS Interval; DECLARE $lease_state AS Int32; DECLARE $execution_meta_ttl AS Interval; DECLARE $retry_state AS JsonDocument; DECLARE $user_sid AS Text; DECLARE $user_group_sids AS JsonDocument; DECLARE $parameters AS String; DECLARE $graph_compressed AS Optional; DECLARE $graph_compression_method AS Optional; UPSERT INTO `.metadata/script_executions` ( database, execution_id, run_script_actor_id, execution_status, execution_mode, start_ts, query_text, syntax, meta, expire_at, retry_state, user_token, user_group_sids, parameters, graph_compressed, graph_compression_method, lease_generation ) VALUES ( $database, $execution_id, $run_script_actor_id, $execution_status, $execution_mode, CurrentUtcTimestamp(), $query_text, $syntax, $meta, CurrentUtcTimestamp() + $execution_meta_ttl, $retry_state, $user_sid, $user_group_sids, $parameters, $graph_compressed, $graph_compression_method, 1 ); UPSERT INTO `.metadata/script_execution_leases` ( database, execution_id, lease_deadline, lease_generation, expire_at, lease_state ) VALUES ( $database, $execution_id, CurrentUtcTimes ... xecutions.cpp:2391: [ScriptExecutions] [TGetScriptExecutionOperationActor] OwnerId: [3:7554062282154401088:3003] ActorId: [3:7554062282154401089:3004] Database: /dc-1 ExecutionId: cab114e3-6dc81863-852b5853-35fa9390. Extracted script execution operation [3:7554062282154401091:3006], Status: SUCCESS, Issues: {
: Error: Previous query retries subissue: {
: Error: Script execution operation failed with code SCHEME_ERROR and will be restarted (RetryCount: 1, Backoff: 5.000000s, RetryRate: 0) at 2025-09-25T16:19:37.837725Z subissue: {
: Error: Type annotation, code: 1030 subissue: {
:1:1: Error: At function: KiReadTable! subissue: {
:1:1: Error: Cannot find table 'db.[/dc-1/test_table]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 } } } } }, Ready: 1, LeaseExpired: 0, RetryRequired: 0, RunScriptActorId: [3:7554062282154400893:2945], LeaseGeneration: 0 2025-09-25T16:19:43.388848Z node 3 :KQP_PROXY DEBUG: kqp_script_executions.cpp:2410: [ScriptExecutions] [TGetScriptExecutionOperationActor] OwnerId: [3:7554062282154401088:3003] ActorId: [3:7554062282154401089:3004] Database: /dc-1 ExecutionId: cab114e3-6dc81863-852b5853-35fa9390. Reply success 2025-09-25T16:19:43.389063Z node 3 :KQP_PROXY DEBUG: query_actor.h:291: [TQueryRetryActor] [TGetScriptExecutionResultQueryActor] OwnerId: [3:7554062282154401116:3012], ActorId: [3:7554062282154401117:3013], TraceId: ExecutionId: cab114e3-6dc81863-852b5853-35fa9390, RequestDatabase: /dc-1, Starting query actor #1 [3:7554062282154401118:3014] 2025-09-25T16:19:43.389070Z node 3 :KQP_PROXY DEBUG: kqp_script_executions.cpp:3529: [ScriptExecutions] [TGetScriptExecutionResultActor] OwnerId: [3:7554062282154401115:3011] ActorId: [3:7554062282154401116:3012] Database: /dc-1 ExecutionId: cab114e3-6dc81863-852b5853-35fa9390 ResultSetIndex: 0. Bootstrap. Started TGetScriptExecutionResultQueryActor: [3:7554062282154401117:3013], Offset: 0, RowsLimit: 0, SizeLimit: 0 2025-09-25T16:19:43.389074Z node 3 :KQP_PROXY DEBUG: query_actor.cpp:134: [TQueryBase] [TGetScriptExecutionResultQueryActor] OwnerId: [3:7554062282154401117:3013], ActorId: [3:7554062282154401118:3014], TraceId: ExecutionId: cab114e3-6dc81863-852b5853-35fa9390, RequestDatabase: /dc-1, Bootstrap. Database: /dc-1, IsSystemUser: 0, run create session 2025-09-25T16:19:43.389113Z node 3 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:1323: Request has 18444985256526.162508s seconds to be completed 2025-09-25T16:19:43.389482Z node 3 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:1398: Created new session, sessionId: ydb://session/3?node_id=3&id=ZTNmNGE0MDEtY2ZmMTg0NjgtNTEyMTM0ZmMtZjBmMzZlMjk=, workerId: [3:7554062282154401120:2682], database: /dc-1, longSession: 1, local sessions count: 2 2025-09-25T16:19:43.389512Z node 3 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:577: Received create session request, trace_id: 2025-09-25T16:19:43.389569Z node 3 :KQP_PROXY DEBUG: query_actor.cpp:200: [TQueryBase] [TGetScriptExecutionResultQueryActor] OwnerId: [3:7554062282154401117:3013], ActorId: [3:7554062282154401118:3014], TraceId: ExecutionId: cab114e3-6dc81863-852b5853-35fa9390, RequestDatabase: /dc-1, State: Get results info, RunDataQuery with SessionId: ydb://session/3?node_id=3&id=ZTNmNGE0MDEtY2ZmMTg0NjgtNTEyMTM0ZmMtZjBmMzZlMjk=, TxId: , text: -- TGetScriptExecutionResultQueryActor::OnRunQuery DECLARE $database AS Text; DECLARE $execution_id AS Text; SELECT result_set_metas, operation_status, issues, transient_issues, end_ts, meta FROM `.metadata/script_executions` WHERE database = $database AND execution_id = $execution_id AND (expire_at > CurrentUtcTimestamp() OR expire_at IS NULL); 2025-09-25T16:19:43.389644Z node 3 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:700: Ctx: { TraceId: , Database: /dc-1, SessionId: ydb://session/3?node_id=3&id=ZTNmNGE0MDEtY2ZmMTg0NjgtNTEyMTM0ZmMtZjBmMzZlMjk=, PoolId: , DatabaseId: }. TEvQueryRequest, set timer for: 300.000000s timeout: 0.000000s cancelAfter: 0.000000s. Send request to target, requestId: 76, targetId: [3:7554062282154401120:2682] 2025-09-25T16:19:43.389654Z node 3 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:1153: Scheduled timeout timer for requestId: 76 timeout: 300.000000s actor id: [3:7554062282154401122:3015] 2025-09-25T16:19:43.437544Z node 3 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:886: Forwarded response to sender actor, requestId: 76, sender: [3:7554062282154401121:2683], selfId: [3:7554062256384595274:2233], source: [3:7554062282154401120:2682] 2025-09-25T16:19:43.437664Z node 3 :KQP_PROXY DEBUG: query_actor.cpp:243: [TQueryBase] [TGetScriptExecutionResultQueryActor] OwnerId: [3:7554062282154401117:3013], ActorId: [3:7554062282154401118:3014], TraceId: ExecutionId: cab114e3-6dc81863-852b5853-35fa9390, RequestDatabase: /dc-1, State: Get results info, DataQuery #1 finished SUCCESS, Issues: , SessionId: ydb://session/3?node_id=3&id=ZTNmNGE0MDEtY2ZmMTg0NjgtNTEyMTM0ZmMtZjBmMzZlMjk=, TxId: 2025-09-25T16:19:43.437797Z node 3 :KQP_PROXY DEBUG: kqp_script_executions.cpp:3371: [ScriptExecutions] [TQueryBase] [TGetScriptExecutionResultQueryActor] OwnerId: [3:7554062282154401117:3013], ActorId: [3:7554062282154401118:3014], TraceId: ExecutionId: cab114e3-6dc81863-852b5853-35fa9390, RequestDatabase: /dc-1, State: Get results info, Fetch results #0 with offset: 0, limit: 0, saved rows: 9223372036854775807 2025-09-25T16:19:43.437827Z node 3 :KQP_PROXY DEBUG: query_actor.cpp:272: [TQueryBase] [TGetScriptExecutionResultQueryActor] OwnerId: [3:7554062282154401117:3013], ActorId: [3:7554062282154401118:3014], TraceId: ExecutionId: cab114e3-6dc81863-852b5853-35fa9390, RequestDatabase: /dc-1, State: Fetch results for offset 0, limit: 0, RunStreamQuery with text: -- TGetScriptExecutionResultQuery::FetchScriptResults DECLARE $database AS Text; DECLARE $execution_id AS Text; DECLARE $result_set_id AS Int32; DECLARE $offset AS Int64; DECLARE $max_row_id AS Int64; DECLARE $limit AS Uint64; SELECT database, execution_id, result_set_id, row_id, result_set FROM `.metadata/result_sets` WHERE database = $database AND execution_id = $execution_id AND result_set_id = $result_set_id AND row_id >= $offset AND row_id < $max_row_id ORDER BY database, execution_id, result_set_id, row_id LIMIT $limit; 2025-09-25T16:19:43.437882Z node 3 :KQP_PROXY DEBUG: query_actor.cpp:291: [TQueryBase] [TGetScriptExecutionResultQueryActor] OwnerId: [3:7554062282154401117:3013], ActorId: [3:7554062282154401118:3014], TraceId: ExecutionId: cab114e3-6dc81863-852b5853-35fa9390, RequestDatabase: /dc-1, State: Fetch results for offset 0, limit: 0, Start read next stream part 2025-09-25T16:19:43.438327Z node 3 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:1398: TraceId: "01k60trspda24papva2xe7rvyb", Created new session, sessionId: ydb://session/3?node_id=3&id=NzFiMjNkOGItYzkxM2FiZjktNzRiYWMzMjUtNDU0YjY5YjY=, workerId: [3:7554062282154401141:2689], database: /dc-1, longSession: 0, local sessions count: 3 2025-09-25T16:19:43.438388Z node 3 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:700: Ctx: { TraceId: 01k60trspda24papva2xe7rvyb, Database: /dc-1, SessionId: ydb://session/3?node_id=3&id=NzFiMjNkOGItYzkxM2FiZjktNzRiYWMzMjUtNDU0YjY5YjY=, PoolId: , DatabaseId: }. TEvQueryRequest, set timer for: 600.000000s timeout: 0.000000s cancelAfter: 0.000000s. Send request to target, requestId: 77, targetId: [3:7554062282154401141:2689] 2025-09-25T16:19:43.438397Z node 3 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:1153: Scheduled timeout timer for requestId: 77 timeout: 600.000000s actor id: [3:7554062282154401142:3022] 2025-09-25T16:19:43.477161Z node 3 :KQP_PROXY DEBUG: query_actor.cpp:302: [TQueryBase] [TGetScriptExecutionResultQueryActor] OwnerId: [3:7554062282154401117:3013], ActorId: [3:7554062282154401118:3014], TraceId: ExecutionId: cab114e3-6dc81863-852b5853-35fa9390, RequestDatabase: /dc-1, State: Fetch results for offset 0, limit: 0, StreamQueryResultPart #1 finished SUCCESS, Issues: 2025-09-25T16:19:43.477205Z node 3 :KQP_PROXY DEBUG: query_actor.cpp:291: [TQueryBase] [TGetScriptExecutionResultQueryActor] OwnerId: [3:7554062282154401117:3013], ActorId: [3:7554062282154401118:3014], TraceId: ExecutionId: cab114e3-6dc81863-852b5853-35fa9390, RequestDatabase: /dc-1, State: Fetch results for offset 0, limit: 0, Start read next stream part 2025-09-25T16:19:43.477383Z node 3 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:886: TraceId: "01k60trspda24papva2xe7rvyb", Forwarded response to sender actor, requestId: 77, sender: [3:7554062282154401139:3014], selfId: [3:7554062256384595274:2233], source: [3:7554062282154401141:2689] 2025-09-25T16:19:43.477418Z node 3 :KQP_PROXY DEBUG: query_actor.cpp:302: [TQueryBase] [TGetScriptExecutionResultQueryActor] OwnerId: [3:7554062282154401117:3013], ActorId: [3:7554062282154401118:3014], TraceId: ExecutionId: cab114e3-6dc81863-852b5853-35fa9390, RequestDatabase: /dc-1, State: Fetch results for offset 0, limit: 0, StreamQueryResultPart #2 finished SUCCESS, Issues: 2025-09-25T16:19:43.477427Z node 3 :KQP_RESOURCE_MANAGER WARN: kqp_snapshot_manager.cpp:238: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1758817183522, txId: 281474976710709] shutting down 2025-09-25T16:19:43.477429Z node 3 :KQP_PROXY DEBUG: query_actor.cpp:370: [TQueryBase] [TGetScriptExecutionResultQueryActor] OwnerId: [3:7554062282154401117:3013], ActorId: [3:7554062282154401118:3014], TraceId: ExecutionId: cab114e3-6dc81863-852b5853-35fa9390, RequestDatabase: /dc-1, State: Fetch results for offset 0, limit: 0, Finish with SUCCESS, SessionId: ydb://session/3?node_id=3&id=ZTNmNGE0MDEtY2ZmMTg0NjgtNTEyMTM0ZmMtZjBmMzZlMjk=, TxId: 2025-09-25T16:19:43.477434Z node 3 :KQP_PROXY DEBUG: kqp_script_executions.cpp:3477: [ScriptExecutions] [TQueryBase] [TGetScriptExecutionResultQueryActor] OwnerId: [3:7554062282154401117:3013], ActorId: [3:7554062282154401118:3014], TraceId: ExecutionId: cab114e3-6dc81863-852b5853-35fa9390, RequestDatabase: /dc-1, State: Fetch results for offset 0, limit: 0, Successfully fetched 1 rows 2025-09-25T16:19:43.477464Z node 3 :KQP_PROXY DEBUG: query_actor.h:310: [TQueryRetryActor] [TGetScriptExecutionResultQueryActor] OwnerId: [3:7554062282154401116:3012], ActorId: [3:7554062282154401117:3013], TraceId: ExecutionId: cab114e3-6dc81863-852b5853-35fa9390, RequestDatabase: /dc-1, Got response [3:7554062282154401118:3014] SUCCESS 2025-09-25T16:19:43.477482Z node 3 :KQP_PROXY DEBUG: kqp_script_executions.cpp:3538: [ScriptExecutions] [TGetScriptExecutionResultActor] OwnerId: [3:7554062282154401115:3011] ActorId: [3:7554062282154401116:3012] Database: /dc-1 ExecutionId: cab114e3-6dc81863-852b5853-35fa9390 ResultSetIndex: 0. Finished [3:7554062282154401118:3014], status: SUCCESS, issues: , has more: 0 2025-09-25T16:19:43.477495Z node 3 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:1212: Session closed, sessionId: ydb://session/3?node_id=3&id=NzFiMjNkOGItYzkxM2FiZjktNzRiYWMzMjUtNDU0YjY5YjY=, workerId: [3:7554062282154401141:2689], local sessions count: 2 2025-09-25T16:19:43.477625Z node 3 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:1212: Session closed, sessionId: ydb://session/3?node_id=3&id=ZTNmNGE0MDEtY2ZmMTg0NjgtNTEyMTM0ZmMtZjBmMzZlMjk=, workerId: [3:7554062282154401120:2682], local sessions count: 1 2025-09-25T16:19:43.478460Z node 3 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:1212: Session closed, sessionId: ydb://session/3?node_id=3&id=NjM1YWVlMjktNTA3OTk0NDgtOGEzNGNhNDgtZDlmZmMzZDI=, workerId: [3:7554062256384596325:2433], local sessions count: 0 >> TImportTests::ShouldBlockMerge [GOOD] >> TRestoreTests::ShouldFailOnFileWithoutNewLinesSmallBatch[Zstd] [GOOD] >> TImportTests::ShouldBlockSplit >> TImportTests::ShouldSucceedWithoutTableProfiles [GOOD] >> TImportTests::ShouldSucceedExportImportTableWithUniqueIndex [GOOD] >> TRestoreTests::ShouldSucceedWithDefaultFromLiteral[Zstd] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_restore/unittest >> TRestoreTests::CancelUponUploadResponseShouldSucceed[Zstd] [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2144] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2144] Leader for TabletID 72057594046678944 is [1:130:2155] sender: [1:131:2058] recipient: [1:113:2144] 2025-09-25T16:19:42.549137Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7911: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-09-25T16:19:42.549156Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7939: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-09-25T16:19:42.549160Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7825: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-09-25T16:19:42.549163Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7841: OperationsProcessing config: using default configuration 2025-09-25T16:19:42.549169Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7847: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-09-25T16:19:42.549172Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7847: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-09-25T16:19:42.549178Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7971: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-09-25T16:19:42.549187Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-09-25T16:19:42.549273Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8042: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-09-25T16:19:42.549317Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2025-09-25T16:19:42.559999Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7729: Cannot subscribe to console configs 2025-09-25T16:19:42.560017Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded 2025-09-25T16:19:42.563144Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2025-09-25T16:19:42.563244Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2025-09-25T16:19:42.563281Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-09-25T16:19:42.565313Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2025-09-25T16:19:42.565380Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2025-09-25T16:19:42.565493Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1382: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-09-25T16:19:42.565558Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-09-25T16:19:42.566109Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:158: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-09-25T16:19:42.566158Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2025-09-25T16:19:42.566432Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-09-25T16:19:42.566443Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-09-25T16:19:42.566463Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-09-25T16:19:42.566470Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-09-25T16:19:42.566477Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:205: TTxServerlessStorageBilling.Complete 2025-09-25T16:19:42.566512Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7086: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-09-25T16:19:42.567823Z node 1 :HIVE INFO: tablet_helpers.cpp:1126: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:130:2155] sender: [1:245:2058] recipient: [1:15:2062] 2025-09-25T16:19:42.590728Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-09-25T16:19:42.590791Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-09-25T16:19:42.590845Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:599: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-09-25T16:19:42.590852Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5528: CreateTx for txid 1:0 type: TxAlterSubDomain target path: [OwnerId: 72057594046678944, LocalPathId: 1] source path: 2025-09-25T16:19:42.590927Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-09-25T16:19:42.590942Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2025-09-25T16:19:42.591636Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-09-25T16:19:42.591682Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-09-25T16:19:42.591735Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-09-25T16:19:42.591745Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:315: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-09-25T16:19:42.591751Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:369: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-09-25T16:19:42.591756Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 1:0 2 -> 3 2025-09-25T16:19:42.592177Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-09-25T16:19:42.592189Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-09-25T16:19:42.592194Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 1:0 3 -> 128 2025-09-25T16:19:42.592552Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-09-25T16:19:42.592563Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-09-25T16:19:42.592569Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-09-25T16:19:42.592576Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1723: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-09-25T16:19:42.593274Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1792: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-09-25T16:19:42.593684Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:663: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-09-25T16:19:42.593735Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1824: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-09-25T16:19:42.593923Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:685: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-09-25T16:19:42.593949Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:689: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 137 RawX2: 4294969455 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-09-25T16:19:42.593957Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:303: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-09-25T16:19:42.594039Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 1:0 128 -> 240 2025-09-25T16:19:42.594047Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:362: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-09-25T16:19:42.594075Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:599: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-09-25T16:19:42.594087Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:412: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-09-25T16:19:42.594578Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-09-25T16:19:42.594588Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme ... ic x86_64 Clang/20.1.8 x-amz-api-version: 2006-03-01 S3_MOCK::HttpServeRead: /data_00.csv.zst / 23 2025-09-25T16:19:43.876840Z node 3 :DATASHARD_RESTORE DEBUG: import_s3.cpp:527: [Import] [s3:102] Handle NKikimr::NWrappers::NExternalStorage::TEvHeadObjectResponse { Key: null Result: HeadObjectResult { ETag: 7443c2f403aa74cff1f199511bd22374 ContentLength: 23 } } 2025-09-25T16:19:43.887485Z node 3 :DATASHARD_RESTORE DEBUG: import_s3.cpp:606: [Import] [s3:102] Handle NKikimr::TEvDataShard::TEvS3DownloadInfo { Info: { DataETag: (empty maybe) ProcessedBytes: 0 WrittenBytes: 0 WrittenRows: 0 ChecksumState: DownloadState: } } FAKE_COORDINATOR: Erasing txId 102 2025-09-25T16:19:43.908277Z node 3 :DATASHARD_RESTORE DEBUG: import_s3.cpp:606: [Import] [s3:102] Handle NKikimr::TEvDataShard::TEvS3DownloadInfo { Info: { DataETag: 7443c2f403aa74cff1f199511bd22374 ProcessedBytes: 0 WrittenBytes: 0 WrittenRows: 0 ChecksumState: DownloadState: } } 2025-09-25T16:19:43.908293Z node 3 :DATASHARD_RESTORE NOTICE: import_s3.cpp:621: [Import] [s3:102] Process download info at 'DownloadInfo': info# { DataETag: 7443c2f403aa74cff1f199511bd22374 ProcessedBytes: 0 WrittenBytes: 0 WrittenRows: 0 ChecksumState: DownloadState: } 2025-09-25T16:19:43.908307Z node 3 :DATASHARD_RESTORE DEBUG: import_s3.cpp:517: [Import] [s3:102] GetObject: key# /data_00.csv.zst, range# 0-22 REQUEST: GET /data_00.csv.zst HTTP/1.1 HEADERS: Host: localhost:10678 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: B036A944-A6D5-4DE7-BD22-8A6CDF43543D amz-sdk-request: attempt=1 content-type: application/xml range: bytes=0-22 user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-143-generic x86_64 Clang/20.1.8 x-amz-api-version: 2006-03-01 S3_MOCK::HttpServeRead: /data_00.csv.zst / 23 2025-09-25T16:19:43.909024Z node 3 :DATASHARD_RESTORE DEBUG: import_s3.cpp:656: [Import] [s3:102] Handle NKikimr::NWrappers::NExternalStorage::TEvGetObjectResponse { Key: null Result: 7443c2f403aa74cff1f199511bd22374 Body: 23b } 2025-09-25T16:19:43.909031Z node 3 :DATASHARD_RESTORE TRACE: import_s3.cpp:673: [Import] [s3:102] Content size: processed-bytes# 0, content-length# 23, body-size# 23 2025-09-25T16:19:43.909055Z node 3 :DATASHARD_RESTORE INFO: import_s3.cpp:806: [Import] [s3:102] Upload rows: count# 1, size# 34 2025-09-25T16:19:43.909488Z node 3 :DATASHARD_RESTORE DEBUG: import_s3.cpp:814: [Import] [s3:102] Handle NKikimr::TEvDataShard::TEvS3UploadRowsResponse { Record: TabletID: 72075186233409546 Status: 0 Info: { DataETag: 7443c2f403aa74cff1f199511bd22374 ProcessedBytes: 23 WrittenBytes: 8 WrittenRows: 1 ChecksumState: DownloadState: } } 2025-09-25T16:19:43.909500Z node 3 :DATASHARD_RESTORE NOTICE: import_s3.cpp:621: [Import] [s3:102] Process download info at 'UploadResponse': info# { DataETag: 7443c2f403aa74cff1f199511bd22374 ProcessedBytes: 23 WrittenBytes: 8 WrittenRows: 1 ChecksumState: DownloadState: } 2025-09-25T16:19:43.909504Z node 3 :DATASHARD_RESTORE NOTICE: import_s3.cpp:962: [Import] [s3:102] Finish: success# 1, error# , writtenBytes# 8, writtenRows# 1 2025-09-25T16:19:43.909563Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:71: TTxOperationProposeCancelTx Execute, at schemeshard: 72057594046678944, message: TargetTxId: 102 TxId: 103 2025-09-25T16:19:43.909569Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_cancel_tx.cpp:37: Execute cancel tx: opId# 103:0, target opId# 102:0 2025-09-25T16:19:43.910769Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:88: TTxOperationProposeCancelTx Complete, at schemeshard: 72057594046678944 2025-09-25T16:19:43.910801Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 102:0, at schemeshard: 72057594046678944 2025-09-25T16:19:43.910808Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_backup_restore_common.h:258: TRestore TProposedWaitParts, opId: 102:0 ProgressState, at schemeshard: 72057594046678944 2025-09-25T16:19:43.910815Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 102:0 129 -> 133 2025-09-25T16:19:43.911108Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 102:0, at schemeshard: 72057594046678944 2025-09-25T16:19:43.911118Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_backup_restore_common.h:324: TRestore TAborting, opId: 102:0 ProgressState at tablet72057594046678944 2025-09-25T16:19:43.911124Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_backup_restore_common.h:351: TRestore Abort, on datashard: 72075186233409546, opId: 102:0, at schemeshard: 72057594046678944 2025-09-25T16:19:43.911375Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:663: Send tablet strongly msg operationId: 102:0 from tablet: 72057594046678944 to tablet: 72075186233409546 cookie: 72057594046678944:1 msg type: 269551625 TEvCancelTxResult for TargetTxId: 102, wait until TargetTxId: 102 TestWaitNotification wait txId: 102 2025-09-25T16:19:43.911435Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:213: tests -- TTxNotificationSubscriber for txId 102: send EvNotifyTxCompletion 2025-09-25T16:19:43.911440Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:259: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 102 TestWaitNotification wait txId: 103 2025-09-25T16:19:43.911450Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:213: tests -- TTxNotificationSubscriber for txId 103: send EvNotifyTxCompletion 2025-09-25T16:19:43.911453Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:259: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 103 2025-09-25T16:19:43.911499Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__notify.cpp:30: NotifyTxCompletion operation in-flight, txId: 102, at schemeshard: 72057594046678944 2025-09-25T16:19:43.911504Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 102, ready parts: 0/1, is published: true 2025-09-25T16:19:43.911507Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__notify.cpp:131: NotifyTxCompletion transaction is registered, txId: 102, at schemeshard: 72057594046678944 2025-09-25T16:19:43.911526Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 103, at schemeshard: 72057594046678944 2025-09-25T16:19:43.911535Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 103: got EvNotifyTxCompletionResult 2025-09-25T16:19:43.911541Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 103: satisfy waiter [3:435:2403] TestWaitNotification: OK eventTxId 103 2025-09-25T16:19:43.922234Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5901: Handle TEvSchemaChanged, tabletId: 72057594046678944, at schemeshard: 72057594046678944, message: Source { RawX1: 313 RawX2: 12884904187 } Origin: 72075186233409546 State: 2 TxId: 102 Step: 0 Generation: 2 OpResult { Success: true Explain: "" BytesProcessed: 8 RowsProcessed: 1 } 2025-09-25T16:19:43.922254Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1837: TOperation FindRelatedPartByTabletId, TxId: 102, tablet: 72075186233409546, partId: 0 2025-09-25T16:19:43.922278Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:628: TTxOperationReply execute, operationId: 102:0, at schemeshard: 72057594046678944, message: Source { RawX1: 313 RawX2: 12884904187 } Origin: 72075186233409546 State: 2 TxId: 102 Step: 0 Generation: 2 OpResult { Success: true Explain: "" BytesProcessed: 8 RowsProcessed: 1 } 2025-09-25T16:19:43.922294Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_backup_restore_common.h:233: TRestore TAborting, opId: 102:0 HandleReply TEvSchemaChanged at tablet# 72057594046678944 message# Source { RawX1: 313 RawX2: 12884904187 } Origin: 72075186233409546 State: 2 TxId: 102 Step: 0 Generation: 2 OpResult { Success: true Explain: "" BytesProcessed: 8 RowsProcessed: 1 } 2025-09-25T16:19:43.922309Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:673: CollectSchemaChanged accept TEvDataShard::TEvSchemaChanged, operationId: 102:0, shardIdx: 72057594046678944:1, shard: 72075186233409546, left await: 0, txState.State: Aborting, txState.ReadyForNotifications: 1, at schemeshard: 72057594046678944 2025-09-25T16:19:43.922315Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:710: all shard schema changes has been received, operationId: 102:0, at schemeshard: 72057594046678944 2025-09-25T16:19:43.922320Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:722: send schema changes ack message, operation: 102:0, datashard: 72075186233409546, at schemeshard: 72057594046678944 2025-09-25T16:19:43.922328Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 102:0 133 -> 240 2025-09-25T16:19:43.922363Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_backup_restore_common.h:116: Unable to make a bill: kind# TRestore, opId# 102:0, reason# domain is not a serverless db, domain# /MyRoot, domainPathId# [OwnerId: 72057594046678944, LocalPathId: 1], IsDomainSchemeShard: 1, ParentDomainId: [OwnerId: 72057594046678944, LocalPathId: 1], ResourcesDomainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-09-25T16:19:43.922768Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 102:0, at schemeshard: 72057594046678944 2025-09-25T16:19:43.922829Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 102:0, at schemeshard: 72057594046678944 2025-09-25T16:19:43.922837Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:497: [72057594046678944] TDone opId# 102:0 ProgressState 2025-09-25T16:19:43.922851Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:926: Part operation is done id#102:0 progress is 1/1 2025-09-25T16:19:43.922856Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2025-09-25T16:19:43.922862Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:926: Part operation is done id#102:0 progress is 1/1 2025-09-25T16:19:43.922866Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2025-09-25T16:19:43.922875Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 102, ready parts: 1/1, is published: true 2025-09-25T16:19:43.922887Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1702: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [3:339:2317] message: TxId: 102 2025-09-25T16:19:43.922894Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2025-09-25T16:19:43.922901Z node 3 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:993: Operation and all the parts is done, operation id: 102:0 2025-09-25T16:19:43.922906Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5552: RemoveTx for txid 102:0 2025-09-25T16:19:43.922928Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:610: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 3 2025-09-25T16:19:43.923311Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 102: got EvNotifyTxCompletionResult 2025-09-25T16:19:43.923322Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 102: satisfy waiter [3:435:2403] TestWaitNotification: OK eventTxId 102 >> TImportTests::ChangefeedsExportRestore [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_restore/unittest >> TRestoreTests::ShouldFailOnFileWithoutNewLinesSmallBatch[Zstd] [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2144] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2144] Leader for TabletID 72057594046678944 is [1:130:2155] sender: [1:131:2058] recipient: [1:113:2144] 2025-09-25T16:19:43.357662Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7911: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-09-25T16:19:43.357681Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7939: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-09-25T16:19:43.357685Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7825: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-09-25T16:19:43.357688Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7841: OperationsProcessing config: using default configuration 2025-09-25T16:19:43.357692Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7847: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-09-25T16:19:43.357695Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7847: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-09-25T16:19:43.357702Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7971: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-09-25T16:19:43.357713Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-09-25T16:19:43.357801Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8042: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-09-25T16:19:43.357848Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2025-09-25T16:19:43.369946Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7729: Cannot subscribe to console configs 2025-09-25T16:19:43.369964Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded 2025-09-25T16:19:43.373098Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2025-09-25T16:19:43.373169Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2025-09-25T16:19:43.373195Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-09-25T16:19:43.374700Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2025-09-25T16:19:43.374743Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2025-09-25T16:19:43.374816Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1382: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-09-25T16:19:43.374850Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-09-25T16:19:43.375203Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:158: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-09-25T16:19:43.375233Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2025-09-25T16:19:43.375414Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-09-25T16:19:43.375421Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-09-25T16:19:43.375433Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-09-25T16:19:43.375438Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-09-25T16:19:43.375442Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:205: TTxServerlessStorageBilling.Complete 2025-09-25T16:19:43.375467Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7086: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-09-25T16:19:43.376579Z node 1 :HIVE INFO: tablet_helpers.cpp:1126: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:130:2155] sender: [1:245:2058] recipient: [1:15:2062] 2025-09-25T16:19:43.396056Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-09-25T16:19:43.396124Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-09-25T16:19:43.396174Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:599: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-09-25T16:19:43.396182Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5528: CreateTx for txid 1:0 type: TxAlterSubDomain target path: [OwnerId: 72057594046678944, LocalPathId: 1] source path: 2025-09-25T16:19:43.396265Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-09-25T16:19:43.396282Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2025-09-25T16:19:43.396891Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-09-25T16:19:43.396929Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-09-25T16:19:43.396971Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-09-25T16:19:43.396980Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:315: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-09-25T16:19:43.396986Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:369: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-09-25T16:19:43.396991Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 1:0 2 -> 3 2025-09-25T16:19:43.397325Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-09-25T16:19:43.397334Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-09-25T16:19:43.397341Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 1:0 3 -> 128 2025-09-25T16:19:43.397694Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-09-25T16:19:43.397704Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-09-25T16:19:43.397708Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-09-25T16:19:43.397712Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1723: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-09-25T16:19:43.398194Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1792: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-09-25T16:19:43.398478Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:663: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-09-25T16:19:43.398510Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1824: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-09-25T16:19:43.398644Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:685: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-09-25T16:19:43.398661Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:689: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 137 RawX2: 4294969455 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-09-25T16:19:43.398666Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:303: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-09-25T16:19:43.398726Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 1:0 128 -> 240 2025-09-25T16:19:43.398731Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:362: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-09-25T16:19:43.398751Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:599: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-09-25T16:19:43.398758Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:412: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-09-25T16:19:43.399070Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-09-25T16:19:43.399076Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme ... ttpServeRead: /data_00.csv.zst / 22 2025-09-25T16:19:44.255229Z node 3 :DATASHARD_RESTORE DEBUG: import_s3.cpp:656: [Import] [s3:102] Handle NKikimr::NWrappers::NExternalStorage::TEvGetObjectResponse { Key: null Result: 074c5adf029fca0d8b15e306cee4e962 Body: 1b } 2025-09-25T16:19:44.255234Z node 3 :DATASHARD_RESTORE TRACE: import_s3.cpp:673: [Import] [s3:102] Content size: processed-bytes# 0, content-length# 22, body-size# 1 2025-09-25T16:19:44.255238Z node 3 :DATASHARD_RESTORE DEBUG: import_s3.cpp:517: [Import] [s3:102] GetObject: key# /data_00.csv.zst, range# 16-16 REQUEST: GET /data_00.csv.zst HTTP/1.1 HEADERS: Host: localhost:22500 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: D5548EE6-7C74-4FD6-B094-8FF16C859DD7 amz-sdk-request: attempt=1 content-type: application/xml range: bytes=16-16 user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-143-generic x86_64 Clang/20.1.8 x-amz-api-version: 2006-03-01 S3_MOCK::HttpServeRead: /data_00.csv.zst / 22 2025-09-25T16:19:44.255694Z node 3 :DATASHARD_RESTORE DEBUG: import_s3.cpp:656: [Import] [s3:102] Handle NKikimr::NWrappers::NExternalStorage::TEvGetObjectResponse { Key: null Result: 074c5adf029fca0d8b15e306cee4e962 Body: 1b } 2025-09-25T16:19:44.255709Z node 3 :DATASHARD_RESTORE TRACE: import_s3.cpp:673: [Import] [s3:102] Content size: processed-bytes# 0, content-length# 22, body-size# 1 2025-09-25T16:19:44.255719Z node 3 :DATASHARD_RESTORE DEBUG: import_s3.cpp:517: [Import] [s3:102] GetObject: key# /data_00.csv.zst, range# 17-17 REQUEST: GET /data_00.csv.zst HTTP/1.1 HEADERS: Host: localhost:22500 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: 6CAAE409-7DC6-47A6-A61F-AA9F06B481DB amz-sdk-request: attempt=1 content-type: application/xml range: bytes=17-17 user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-143-generic x86_64 Clang/20.1.8 x-amz-api-version: 2006-03-01 S3_MOCK::HttpServeRead: /data_00.csv.zst / 22 2025-09-25T16:19:44.256337Z node 3 :DATASHARD_RESTORE DEBUG: import_s3.cpp:656: [Import] [s3:102] Handle NKikimr::NWrappers::NExternalStorage::TEvGetObjectResponse { Key: null Result: 074c5adf029fca0d8b15e306cee4e962 Body: 1b } 2025-09-25T16:19:44.256352Z node 3 :DATASHARD_RESTORE TRACE: import_s3.cpp:673: [Import] [s3:102] Content size: processed-bytes# 0, content-length# 22, body-size# 1 2025-09-25T16:19:44.256361Z node 3 :DATASHARD_RESTORE DEBUG: import_s3.cpp:517: [Import] [s3:102] GetObject: key# /data_00.csv.zst, range# 18-18 REQUEST: GET /data_00.csv.zst HTTP/1.1 HEADERS: Host: localhost:22500 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: 731AF162-F8F0-47E0-BD36-7778BEAD453B amz-sdk-request: attempt=1 content-type: application/xml range: bytes=18-18 user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-143-generic x86_64 Clang/20.1.8 x-amz-api-version: 2006-03-01 S3_MOCK::HttpServeRead: /data_00.csv.zst / 22 2025-09-25T16:19:44.256860Z node 3 :DATASHARD_RESTORE DEBUG: import_s3.cpp:656: [Import] [s3:102] Handle NKikimr::NWrappers::NExternalStorage::TEvGetObjectResponse { Key: null Result: 074c5adf029fca0d8b15e306cee4e962 Body: 1b } 2025-09-25T16:19:44.256867Z node 3 :DATASHARD_RESTORE TRACE: import_s3.cpp:673: [Import] [s3:102] Content size: processed-bytes# 0, content-length# 22, body-size# 1 2025-09-25T16:19:44.256875Z node 3 :DATASHARD_RESTORE DEBUG: import_s3.cpp:517: [Import] [s3:102] GetObject: key# /data_00.csv.zst, range# 19-19 REQUEST: GET /data_00.csv.zst HTTP/1.1 HEADERS: Host: localhost:22500 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: 97C14007-317B-4CE4-8B96-8BB381FAE1AD amz-sdk-request: attempt=1 content-type: application/xml range: bytes=19-19 user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-143-generic x86_64 Clang/20.1.8 x-amz-api-version: 2006-03-01 S3_MOCK::HttpServeRead: /data_00.csv.zst / 22 2025-09-25T16:19:44.257290Z node 3 :DATASHARD_RESTORE DEBUG: import_s3.cpp:656: [Import] [s3:102] Handle NKikimr::NWrappers::NExternalStorage::TEvGetObjectResponse { Key: null Result: 074c5adf029fca0d8b15e306cee4e962 Body: 1b } 2025-09-25T16:19:44.257294Z node 3 :DATASHARD_RESTORE TRACE: import_s3.cpp:673: [Import] [s3:102] Content size: processed-bytes# 0, content-length# 22, body-size# 1 2025-09-25T16:19:44.257298Z node 3 :DATASHARD_RESTORE DEBUG: import_s3.cpp:517: [Import] [s3:102] GetObject: key# /data_00.csv.zst, range# 20-20 REQUEST: GET /data_00.csv.zst HTTP/1.1 HEADERS: Host: localhost:22500 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: 3DD59750-F5D6-4A12-8192-B636402EA7DE amz-sdk-request: attempt=1 content-type: application/xml range: bytes=20-20 user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-143-generic x86_64 Clang/20.1.8 x-amz-api-version: 2006-03-01 S3_MOCK::HttpServeRead: /data_00.csv.zst / 22 2025-09-25T16:19:44.257873Z node 3 :DATASHARD_RESTORE DEBUG: import_s3.cpp:656: [Import] [s3:102] Handle NKikimr::NWrappers::NExternalStorage::TEvGetObjectResponse { Key: null Result: 074c5adf029fca0d8b15e306cee4e962 Body: 1b } 2025-09-25T16:19:44.257893Z node 3 :DATASHARD_RESTORE TRACE: import_s3.cpp:673: [Import] [s3:102] Content size: processed-bytes# 0, content-length# 22, body-size# 1 2025-09-25T16:19:44.257903Z node 3 :DATASHARD_RESTORE DEBUG: import_s3.cpp:517: [Import] [s3:102] GetObject: key# /data_00.csv.zst, range# 21-21 REQUEST: GET /data_00.csv.zst HTTP/1.1 HEADERS: Host: localhost:22500 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: DD9259F6-76DF-44B7-AF79-49E9B8AFAB52 amz-sdk-request: attempt=1 content-type: application/xml range: bytes=21-21 user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-143-generic x86_64 Clang/20.1.8 x-amz-api-version: 2006-03-01 S3_MOCK::HttpServeRead: /data_00.csv.zst / 22 2025-09-25T16:19:44.258409Z node 3 :DATASHARD_RESTORE DEBUG: import_s3.cpp:656: [Import] [s3:102] Handle NKikimr::NWrappers::NExternalStorage::TEvGetObjectResponse { Key: null Result: 074c5adf029fca0d8b15e306cee4e962 Body: 1b } 2025-09-25T16:19:44.258420Z node 3 :DATASHARD_RESTORE TRACE: import_s3.cpp:673: [Import] [s3:102] Content size: processed-bytes# 0, content-length# 22, body-size# 1 2025-09-25T16:19:44.258430Z node 3 :DATASHARD_RESTORE NOTICE: import_s3.cpp:962: [Import] [s3:102] Finish: success# 0, error# Cannot process data: cannot find new line symbol, writtenBytes# 0, writtenRows# 0 2025-09-25T16:19:44.260906Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5901: Handle TEvSchemaChanged, tabletId: 72057594046678944, at schemeshard: 72057594046678944, message: Source { RawX1: 313 RawX2: 12884904187 } Origin: 72075186233409546 State: 2 TxId: 102 Step: 0 Generation: 2 OpResult { Success: false Explain: "Cannot process data: cannot find new line symbol" BytesProcessed: 0 RowsProcessed: 0 } 2025-09-25T16:19:44.260932Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1837: TOperation FindRelatedPartByTabletId, TxId: 102, tablet: 72075186233409546, partId: 0 2025-09-25T16:19:44.260969Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:628: TTxOperationReply execute, operationId: 102:0, at schemeshard: 72057594046678944, message: Source { RawX1: 313 RawX2: 12884904187 } Origin: 72075186233409546 State: 2 TxId: 102 Step: 0 Generation: 2 OpResult { Success: false Explain: "Cannot process data: cannot find new line symbol" BytesProcessed: 0 RowsProcessed: 0 } 2025-09-25T16:19:44.260993Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_backup_restore_common.h:233: TRestore TProposedWaitParts, opId: 102:0 HandleReply TEvSchemaChanged at tablet# 72057594046678944 message# Source { RawX1: 313 RawX2: 12884904187 } Origin: 72075186233409546 State: 2 TxId: 102 Step: 0 Generation: 2 OpResult { Success: false Explain: "Cannot process data: cannot find new line symbol" BytesProcessed: 0 RowsProcessed: 0 } 2025-09-25T16:19:44.261014Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:673: CollectSchemaChanged accept TEvDataShard::TEvSchemaChanged, operationId: 102:0, shardIdx: 72057594046678944:1, shard: 72075186233409546, left await: 0, txState.State: ProposedWaitParts, txState.ReadyForNotifications: 1, at schemeshard: 72057594046678944 2025-09-25T16:19:44.261021Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:710: all shard schema changes has been received, operationId: 102:0, at schemeshard: 72057594046678944 2025-09-25T16:19:44.261029Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:722: send schema changes ack message, operation: 102:0, datashard: 72075186233409546, at schemeshard: 72057594046678944 2025-09-25T16:19:44.261035Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 102:0 129 -> 240 2025-09-25T16:19:44.261071Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_backup_restore_common.h:116: Unable to make a bill: kind# TRestore, opId# 102:0, reason# domain is not a serverless db, domain# /MyRoot, domainPathId# [OwnerId: 72057594046678944, LocalPathId: 1], IsDomainSchemeShard: 1, ParentDomainId: [OwnerId: 72057594046678944, LocalPathId: 1], ResourcesDomainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-09-25T16:19:44.261785Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 102:0, at schemeshard: 72057594046678944 2025-09-25T16:19:44.261909Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 102:0, at schemeshard: 72057594046678944 2025-09-25T16:19:44.261925Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:497: [72057594046678944] TDone opId# 102:0 ProgressState 2025-09-25T16:19:44.261944Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:926: Part operation is done id#102:0 progress is 1/1 2025-09-25T16:19:44.261953Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2025-09-25T16:19:44.261963Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:926: Part operation is done id#102:0 progress is 1/1 2025-09-25T16:19:44.261969Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2025-09-25T16:19:44.261975Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 102, ready parts: 1/1, is published: true 2025-09-25T16:19:44.261993Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1702: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [3:339:2317] message: TxId: 102 2025-09-25T16:19:44.262006Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2025-09-25T16:19:44.262014Z node 3 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:993: Operation and all the parts is done, operation id: 102:0 2025-09-25T16:19:44.262023Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5552: RemoveTx for txid 102:0 2025-09-25T16:19:44.262065Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:610: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 3 2025-09-25T16:19:44.262686Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 102: got EvNotifyTxCompletionResult 2025-09-25T16:19:44.262705Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 102: satisfy waiter [3:399:2369] TestWaitNotification: OK eventTxId 102 ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_restore/unittest >> TImportTests::ShouldSucceedWithoutTableProfiles [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2144] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2144] Leader for TabletID 72057594046678944 is [1:130:2155] sender: [1:131:2058] recipient: [1:113:2144] 2025-09-25T16:19:43.152175Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7911: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-09-25T16:19:43.152198Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7939: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-09-25T16:19:43.152205Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7825: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-09-25T16:19:43.152211Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7841: OperationsProcessing config: using default configuration 2025-09-25T16:19:43.152217Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7847: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-09-25T16:19:43.152222Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7847: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-09-25T16:19:43.152232Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7971: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-09-25T16:19:43.152247Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-09-25T16:19:43.152374Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8042: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-09-25T16:19:43.152430Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2025-09-25T16:19:43.165510Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7729: Cannot subscribe to console configs 2025-09-25T16:19:43.165529Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded 2025-09-25T16:19:43.168530Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2025-09-25T16:19:43.168613Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2025-09-25T16:19:43.168648Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-09-25T16:19:43.170022Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2025-09-25T16:19:43.170081Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2025-09-25T16:19:43.170186Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1382: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-09-25T16:19:43.170239Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-09-25T16:19:43.170685Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:158: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-09-25T16:19:43.170729Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2025-09-25T16:19:43.170991Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-09-25T16:19:43.171001Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-09-25T16:19:43.171021Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-09-25T16:19:43.171031Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-09-25T16:19:43.171038Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:205: TTxServerlessStorageBilling.Complete 2025-09-25T16:19:43.171069Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7086: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-09-25T16:19:43.172472Z node 1 :HIVE INFO: tablet_helpers.cpp:1126: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:130:2155] sender: [1:245:2058] recipient: [1:15:2062] 2025-09-25T16:19:43.186854Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-09-25T16:19:43.186912Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-09-25T16:19:43.186956Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:599: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-09-25T16:19:43.186963Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5528: CreateTx for txid 1:0 type: TxAlterSubDomain target path: [OwnerId: 72057594046678944, LocalPathId: 1] source path: 2025-09-25T16:19:43.187023Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-09-25T16:19:43.187033Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2025-09-25T16:19:43.187654Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-09-25T16:19:43.187686Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-09-25T16:19:43.187732Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-09-25T16:19:43.187738Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:315: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-09-25T16:19:43.187742Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:369: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-09-25T16:19:43.187746Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 1:0 2 -> 3 2025-09-25T16:19:43.188040Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-09-25T16:19:43.188047Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-09-25T16:19:43.188053Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 1:0 3 -> 128 2025-09-25T16:19:43.188287Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-09-25T16:19:43.188294Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-09-25T16:19:43.188298Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-09-25T16:19:43.188302Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1723: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-09-25T16:19:43.188714Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1792: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-09-25T16:19:43.189020Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:663: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-09-25T16:19:43.189060Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1824: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-09-25T16:19:43.189218Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:685: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-09-25T16:19:43.189234Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:689: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 137 RawX2: 4294969455 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-09-25T16:19:43.189239Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:303: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-09-25T16:19:43.189301Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 1:0 128 -> 240 2025-09-25T16:19:43.189306Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:362: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-09-25T16:19:43.189326Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:599: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-09-25T16:19:43.189334Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:412: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-09-25T16:19:43.189643Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-09-25T16:19:43.189649Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme ... shard: 72057594046678944 2025-09-25T16:19:44.241692Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [3:213:2214], at schemeshard: 72057594046678944, txId: 281474976710758, path id: 2 REQUEST: HEAD /data_00.csv HTTP/1.1 HEADERS: Host: localhost:28104 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: 2744E80A-1091-41D9-9E30-5A4B0851E063 amz-sdk-request: attempt=1 content-type: application/xml user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-143-generic x86_64 Clang/20.1.8 x-amz-api-version: 2006-03-01 S3_MOCK::HttpServeRead: /data_00.csv / 14 2025-09-25T16:19:44.241768Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 281474976710758:0, at schemeshard: 72057594046678944 2025-09-25T16:19:44.241778Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_backup_restore_common.h:258: TRestore TProposedWaitParts, opId: 281474976710758:0 ProgressState, at schemeshard: 72057594046678944 2025-09-25T16:19:44.241911Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6249: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 3 PathOwnerId: 72057594046678944, cookie: 281474976710758 2025-09-25T16:19:44.241924Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 3 PathOwnerId: 72057594046678944, cookie: 281474976710758 2025-09-25T16:19:44.241929Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:108: Operation in-flight, at schemeshard: 72057594046678944, txId: 281474976710758 2025-09-25T16:19:44.241935Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 281474976710758, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 3 2025-09-25T16:19:44.241942Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:610: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 4 2025-09-25T16:19:44.241959Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 281474976710758, ready parts: 0/1, is published: true 2025-09-25T16:19:44.241989Z node 3 :DATASHARD_RESTORE DEBUG: import_s3.cpp:527: [Import] [s3:281474976710758] Handle NKikimr::NWrappers::NExternalStorage::TEvHeadObjectResponse { Key: null Result: HeadObjectResult { ETag: a3ed28bfb53c9214f635c51ed6b618c4 ContentLength: 14 } } 2025-09-25T16:19:44.242921Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 281474976710758 2025-09-25T16:19:44.253617Z node 3 :DATASHARD_RESTORE DEBUG: import_s3.cpp:606: [Import] [s3:281474976710758] Handle NKikimr::TEvDataShard::TEvS3DownloadInfo { Info: { DataETag: (empty maybe) ProcessedBytes: 0 WrittenBytes: 0 WrittenRows: 0 ChecksumState: DownloadState: } } FAKE_COORDINATOR: Erasing txId 281474976710758 2025-09-25T16:19:44.264379Z node 3 :DATASHARD_RESTORE DEBUG: import_s3.cpp:606: [Import] [s3:281474976710758] Handle NKikimr::TEvDataShard::TEvS3DownloadInfo { Info: { DataETag: a3ed28bfb53c9214f635c51ed6b618c4 ProcessedBytes: 0 WrittenBytes: 0 WrittenRows: 0 ChecksumState: DownloadState: } } 2025-09-25T16:19:44.264405Z node 3 :DATASHARD_RESTORE NOTICE: import_s3.cpp:621: [Import] [s3:281474976710758] Process download info at 'DownloadInfo': info# { DataETag: a3ed28bfb53c9214f635c51ed6b618c4 ProcessedBytes: 0 WrittenBytes: 0 WrittenRows: 0 ChecksumState: DownloadState: } 2025-09-25T16:19:44.264421Z node 3 :DATASHARD_RESTORE DEBUG: import_s3.cpp:517: [Import] [s3:281474976710758] GetObject: key# /data_00.csv, range# 0-13 REQUEST: GET /data_00.csv HTTP/1.1 HEADERS: Host: localhost:28104 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: 45299BDF-8E62-4032-9753-22B2FCA324D6 amz-sdk-request: attempt=1 content-type: application/xml range: bytes=0-13 user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-143-generic x86_64 Clang/20.1.8 x-amz-api-version: 2006-03-01 S3_MOCK::HttpServeRead: /data_00.csv / 14 2025-09-25T16:19:44.265437Z node 3 :DATASHARD_RESTORE DEBUG: import_s3.cpp:656: [Import] [s3:281474976710758] Handle NKikimr::NWrappers::NExternalStorage::TEvGetObjectResponse { Key: null Result: a3ed28bfb53c9214f635c51ed6b618c4 Body: 14b } 2025-09-25T16:19:44.265454Z node 3 :DATASHARD_RESTORE TRACE: import_s3.cpp:673: [Import] [s3:281474976710758] Content size: processed-bytes# 0, content-length# 14, body-size# 14 2025-09-25T16:19:44.265511Z node 3 :DATASHARD_RESTORE INFO: import_s3.cpp:806: [Import] [s3:281474976710758] Upload rows: count# 1, size# 34 2025-09-25T16:19:44.265957Z node 3 :DATASHARD_RESTORE DEBUG: import_s3.cpp:814: [Import] [s3:281474976710758] Handle NKikimr::TEvDataShard::TEvS3UploadRowsResponse { Record: TabletID: 72075186233409546 Status: 0 Info: { DataETag: a3ed28bfb53c9214f635c51ed6b618c4 ProcessedBytes: 14 WrittenBytes: 8 WrittenRows: 1 ChecksumState: DownloadState: } } 2025-09-25T16:19:44.265966Z node 3 :DATASHARD_RESTORE NOTICE: import_s3.cpp:621: [Import] [s3:281474976710758] Process download info at 'UploadResponse': info# { DataETag: a3ed28bfb53c9214f635c51ed6b618c4 ProcessedBytes: 14 WrittenBytes: 8 WrittenRows: 1 ChecksumState: DownloadState: } 2025-09-25T16:19:44.265971Z node 3 :DATASHARD_RESTORE NOTICE: import_s3.cpp:962: [Import] [s3:281474976710758] Finish: success# 1, error# , writtenBytes# 8, writtenRows# 1 2025-09-25T16:19:44.278025Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5901: Handle TEvSchemaChanged, tabletId: 72057594046678944, at schemeshard: 72057594046678944, message: Source { RawX1: 336 RawX2: 12884904201 } Origin: 72075186233409546 State: 2 TxId: 281474976710758 Step: 0 Generation: 2 OpResult { Success: true Explain: "" BytesProcessed: 8 RowsProcessed: 1 } 2025-09-25T16:19:44.278051Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1837: TOperation FindRelatedPartByTabletId, TxId: 281474976710758, tablet: 72075186233409546, partId: 0 2025-09-25T16:19:44.278082Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:628: TTxOperationReply execute, operationId: 281474976710758:0, at schemeshard: 72057594046678944, message: Source { RawX1: 336 RawX2: 12884904201 } Origin: 72075186233409546 State: 2 TxId: 281474976710758 Step: 0 Generation: 2 OpResult { Success: true Explain: "" BytesProcessed: 8 RowsProcessed: 1 } 2025-09-25T16:19:44.278098Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_backup_restore_common.h:233: TRestore TProposedWaitParts, opId: 281474976710758:0 HandleReply TEvSchemaChanged at tablet# 72057594046678944 message# Source { RawX1: 336 RawX2: 12884904201 } Origin: 72075186233409546 State: 2 TxId: 281474976710758 Step: 0 Generation: 2 OpResult { Success: true Explain: "" BytesProcessed: 8 RowsProcessed: 1 } 2025-09-25T16:19:44.278121Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:673: CollectSchemaChanged accept TEvDataShard::TEvSchemaChanged, operationId: 281474976710758:0, shardIdx: 72057594046678944:1, shard: 72075186233409546, left await: 0, txState.State: ProposedWaitParts, txState.ReadyForNotifications: 1, at schemeshard: 72057594046678944 2025-09-25T16:19:44.278127Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:710: all shard schema changes has been received, operationId: 281474976710758:0, at schemeshard: 72057594046678944 2025-09-25T16:19:44.278133Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:722: send schema changes ack message, operation: 281474976710758:0, datashard: 72075186233409546, at schemeshard: 72057594046678944 2025-09-25T16:19:44.278141Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 281474976710758:0 129 -> 240 2025-09-25T16:19:44.278192Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_backup_restore_common.h:116: Unable to make a bill: kind# TRestore, opId# 281474976710758:0, reason# domain is not a serverless db, domain# /MyRoot, domainPathId# [OwnerId: 72057594046678944, LocalPathId: 1], IsDomainSchemeShard: 1, ParentDomainId: [OwnerId: 72057594046678944, LocalPathId: 1], ResourcesDomainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-09-25T16:19:44.278701Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 281474976710758:0, at schemeshard: 72057594046678944 2025-09-25T16:19:44.278780Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 281474976710758:0, at schemeshard: 72057594046678944 2025-09-25T16:19:44.278789Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:497: [72057594046678944] TDone opId# 281474976710758:0 ProgressState 2025-09-25T16:19:44.278804Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:926: Part operation is done id#281474976710758:0 progress is 1/1 2025-09-25T16:19:44.278809Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 281474976710758 ready parts: 1/1 2025-09-25T16:19:44.278815Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:926: Part operation is done id#281474976710758:0 progress is 1/1 2025-09-25T16:19:44.278819Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 281474976710758 ready parts: 1/1 2025-09-25T16:19:44.278824Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 281474976710758, ready parts: 1/1, is published: true 2025-09-25T16:19:44.278838Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1702: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [3:129:2154] message: TxId: 281474976710758 2025-09-25T16:19:44.278844Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 281474976710758 ready parts: 1/1 2025-09-25T16:19:44.278851Z node 3 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:993: Operation and all the parts is done, operation id: 281474976710758:0 2025-09-25T16:19:44.278856Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5552: RemoveTx for txid 281474976710758:0 2025-09-25T16:19:44.278881Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:610: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 3 2025-09-25T16:19:44.279292Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7171: Handle: TEvNotifyTxCompletionResult: txId# 281474976710758 2025-09-25T16:19:44.279305Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:7173: Message: TxId: 281474976710758 2025-09-25T16:19:44.279315Z node 3 :IMPORT DEBUG: schemeshard_import__create.cpp:372: TImport::TTxProgress: DoExecute 2025-09-25T16:19:44.279321Z node 3 :IMPORT DEBUG: schemeshard_import__create.cpp:1425: TImport::TTxProgress: OnNotifyResult: txId# 281474976710758 2025-09-25T16:19:44.279695Z node 3 :IMPORT DEBUG: schemeshard_import__create.cpp:396: TImport::TTxProgress: DoComplete 2025-09-25T16:19:44.279714Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 101: got EvNotifyTxCompletionResult 2025-09-25T16:19:44.279721Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 101: satisfy waiter [3:293:2281] TestWaitNotification: OK eventTxId 101 >> TImportTests::ShouldBlockSplit [GOOD] >> TImportTests::IgnoreBasicSchemeLimits [GOOD] >> TRestoreTests::ShouldSucceedWithDefaultFromLiteral[Zstd] [GOOD] >> TRestoreTests::ZeroLengthEncryptedFileTreatedAsCorrupted ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_restore/unittest >> TImportTests::ShouldBlockSplit [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2144] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2144] Leader for TabletID 72057594046678944 is [1:130:2155] sender: [1:131:2058] recipient: [1:113:2144] 2025-09-25T16:19:43.583491Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7911: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-09-25T16:19:43.583514Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7939: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-09-25T16:19:43.583520Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7825: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-09-25T16:19:43.583525Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7841: OperationsProcessing config: using default configuration 2025-09-25T16:19:43.583531Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7847: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-09-25T16:19:43.583536Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7847: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-09-25T16:19:43.583547Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7971: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-09-25T16:19:43.583563Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-09-25T16:19:43.583672Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8042: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-09-25T16:19:43.583734Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2025-09-25T16:19:43.599298Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7729: Cannot subscribe to console configs 2025-09-25T16:19:43.599320Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded 2025-09-25T16:19:43.603487Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2025-09-25T16:19:43.603569Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2025-09-25T16:19:43.603604Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-09-25T16:19:43.605260Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2025-09-25T16:19:43.605318Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2025-09-25T16:19:43.605431Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1382: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-09-25T16:19:43.605478Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-09-25T16:19:43.606000Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:158: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-09-25T16:19:43.606042Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2025-09-25T16:19:43.606321Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-09-25T16:19:43.606331Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-09-25T16:19:43.606352Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-09-25T16:19:43.606360Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-09-25T16:19:43.606367Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:205: TTxServerlessStorageBilling.Complete 2025-09-25T16:19:43.606401Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7086: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-09-25T16:19:43.607750Z node 1 :HIVE INFO: tablet_helpers.cpp:1126: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:130:2155] sender: [1:245:2058] recipient: [1:15:2062] 2025-09-25T16:19:43.624750Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-09-25T16:19:43.624809Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-09-25T16:19:43.624872Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:599: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-09-25T16:19:43.624880Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5528: CreateTx for txid 1:0 type: TxAlterSubDomain target path: [OwnerId: 72057594046678944, LocalPathId: 1] source path: 2025-09-25T16:19:43.624953Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-09-25T16:19:43.624967Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2025-09-25T16:19:43.625474Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-09-25T16:19:43.625509Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-09-25T16:19:43.625548Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-09-25T16:19:43.625557Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:315: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-09-25T16:19:43.625563Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:369: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-09-25T16:19:43.625568Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 1:0 2 -> 3 2025-09-25T16:19:43.625932Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-09-25T16:19:43.625944Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-09-25T16:19:43.625949Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 1:0 3 -> 128 2025-09-25T16:19:43.626269Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-09-25T16:19:43.626279Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-09-25T16:19:43.626285Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-09-25T16:19:43.626292Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1723: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-09-25T16:19:43.626981Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1792: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-09-25T16:19:43.627347Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:663: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-09-25T16:19:43.627377Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1824: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-09-25T16:19:43.627546Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:685: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-09-25T16:19:43.627575Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:689: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 137 RawX2: 4294969455 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-09-25T16:19:43.627582Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:303: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-09-25T16:19:43.627651Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 1:0 128 -> 240 2025-09-25T16:19:43.627656Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:362: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-09-25T16:19:43.627674Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:599: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-09-25T16:19:43.627682Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:412: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-09-25T16:19:43.628001Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-09-25T16:19:43.628006Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme ... hemeshard__operation_split_merge.cpp:141: Initializing scheme on dst datashard: 72075186233409549 splitOp: 103:0 alterVersion: 1 at tablet: 72057594046678944 2025-09-25T16:19:44.760708Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:663: Send tablet strongly msg operationId: 103:0 from tablet: 72057594046678944 to tablet: 72075186233409548 cookie: 72057594046678944:3 msg type: 269553152 2025-09-25T16:19:44.760743Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:663: Send tablet strongly msg operationId: 103:0 from tablet: 72057594046678944 to tablet: 72075186233409549 cookie: 72057594046678944:4 msg type: 269553152 2025-09-25T16:19:44.760757Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1824: TOperation RegisterRelationByTabletId, TxId: 103, partId: 0, tablet: 72075186233409548 2025-09-25T16:19:44.760760Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1824: TOperation RegisterRelationByTabletId, TxId: 103, partId: 0, tablet: 72075186233409549 TestModificationResult got TxId: 103, wait until txId: 103 TestWaitNotification wait txId: 103 2025-09-25T16:19:44.765801Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:213: tests -- TTxNotificationSubscriber for txId 103: send EvNotifyTxCompletion 2025-09-25T16:19:44.765814Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:259: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 103 2025-09-25T16:19:44.765883Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__notify.cpp:30: NotifyTxCompletion operation in-flight, txId: 103, at schemeshard: 72057594046678944 2025-09-25T16:19:44.765891Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 103, ready parts: 0/1, is published: true 2025-09-25T16:19:44.765896Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__notify.cpp:131: NotifyTxCompletion transaction is registered, txId: 103, at schemeshard: 72057594046678944 2025-09-25T16:19:44.788753Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:628: TTxOperationReply execute, operationId: 103:0, at schemeshard: 72057594046678944, message: OperationCookie: 103 TabletId: 72075186233409548 2025-09-25T16:19:44.788777Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_split_merge.cpp:38: TSplitMerge TConfigureDestination operationId# 103:0 HandleReply TEvInitSplitMergeDestinationAck, operationId: 103:0, at schemeshard: 72057594046678944 message# OperationCookie: 103 TabletId: 72075186233409548 2025-09-25T16:19:44.788860Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:628: TTxOperationReply execute, operationId: 103:0, at schemeshard: 72057594046678944, message: OperationCookie: 103 TabletId: 72075186233409549 2025-09-25T16:19:44.788869Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_split_merge.cpp:38: TSplitMerge TConfigureDestination operationId# 103:0 HandleReply TEvInitSplitMergeDestinationAck, operationId: 103:0, at schemeshard: 72057594046678944 message# OperationCookie: 103 TabletId: 72075186233409549 2025-09-25T16:19:44.788876Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 103:0 3 -> 131 2025-09-25T16:19:44.789289Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 103:0, at schemeshard: 72057594046678944 2025-09-25T16:19:44.789552Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 103:0, at schemeshard: 72057594046678944 2025-09-25T16:19:44.789579Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 103:0, at schemeshard: 72057594046678944 2025-09-25T16:19:44.789587Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_split_merge.cpp:334: TSplitMerge TTransferData operationId# 103:0 ProgressState, at schemeshard: 72057594046678944 2025-09-25T16:19:44.789594Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_split_merge.cpp:353: TSplitMerge TTransferData operationId# 103:0 Starting split on src datashard 72075186233409546 splitOpId# 103:0 at tablet 72057594046678944 2025-09-25T16:19:44.789960Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:663: Send tablet strongly msg operationId: 103:0 from tablet: 72057594046678944 to tablet: 72075186233409546 cookie: 72057594046678944:1 msg type: 269553154 2025-09-25T16:19:44.789987Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1824: TOperation RegisterRelationByTabletId, TxId: 103, partId: 0, tablet: 72075186233409546 2025-09-25T16:19:44.836484Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:628: TTxOperationReply execute, operationId: 103:0, at schemeshard: 72057594046678944, message: OperationCookie: 103 TabletId: 72075186233409546 2025-09-25T16:19:44.836507Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_split_merge.cpp:207: TSplitMerge TTransferData operationId# 103:0 HandleReply TEvSplitAck, at schemeshard: 72057594046678944, message: OperationCookie: 103 TabletId: 72075186233409546 2025-09-25T16:19:44.836593Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 103:0 131 -> 132 2025-09-25T16:19:44.836617Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:599: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 6 2025-09-25T16:19:44.837050Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 103:0, at schemeshard: 72057594046678944 2025-09-25T16:19:44.837097Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-09-25T16:19:44.837104Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 103, path id: [OwnerId: 72057594046678944, LocalPathId: 2] 2025-09-25T16:19:44.837171Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-09-25T16:19:44.837178Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [3:210:2211], at schemeshard: 72057594046678944, txId: 103, path id: 2 2025-09-25T16:19:44.837263Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 103:0, at schemeshard: 72057594046678944 2025-09-25T16:19:44.837268Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_split_merge.cpp:437: TSplitMerge TNotifySrc, operationId: 103:0 ProgressState, at schemeshard: 72057594046678944 2025-09-25T16:19:44.837275Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_split_merge.cpp:468: Notify src datashard 72075186233409546 on partitioning changed splitOp# 103 at tablet 72057594046678944 2025-09-25T16:19:44.837364Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6249: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 4 PathOwnerId: 72057594046678944, cookie: 103 2025-09-25T16:19:44.837376Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 4 PathOwnerId: 72057594046678944, cookie: 103 2025-09-25T16:19:44.837381Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:108: Operation in-flight, at schemeshard: 72057594046678944, txId: 103 2025-09-25T16:19:44.837387Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 103, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 4 2025-09-25T16:19:44.837396Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:610: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 7 2025-09-25T16:19:44.837408Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 103, ready parts: 0/1, is published: true 2025-09-25T16:19:44.837869Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:663: Send tablet strongly msg operationId: 103:0 from tablet: 72057594046678944 to tablet: 72075186233409546 cookie: 72057594046678944:1 msg type: 269553158 2025-09-25T16:19:44.838275Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 103 2025-09-25T16:19:44.838633Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:628: TTxOperationReply execute, operationId: 103:0, at schemeshard: 72057594046678944, message: OperationCookie: 103 TabletId: 72075186233409546 2025-09-25T16:19:44.838643Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_split_merge.cpp:392: TSplitMerge TNotifySrc, operationId: 103:0 HandleReply TEvSplitPartitioningChangedAck, from datashard: 72075186233409546, at schemeshard: 72057594046678944 2025-09-25T16:19:44.838655Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:926: Part operation is done id#103:0 progress is 1/1 2025-09-25T16:19:44.838659Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 103 ready parts: 1/1 2025-09-25T16:19:44.838662Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:926: Part operation is done id#103:0 progress is 1/1 2025-09-25T16:19:44.838664Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 103 ready parts: 1/1 2025-09-25T16:19:44.838668Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 103, ready parts: 1/1, is published: true 2025-09-25T16:19:44.838677Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1702: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [3:455:2407] message: TxId: 103 2025-09-25T16:19:44.838682Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 103 ready parts: 1/1 2025-09-25T16:19:44.838686Z node 3 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:993: Operation and all the parts is done, operation id: 103:0 2025-09-25T16:19:44.838689Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5552: RemoveTx for txid 103:0 2025-09-25T16:19:44.838719Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:610: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 6 2025-09-25T16:19:44.838993Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 103:0, at schemeshard: 72057594046678944 2025-09-25T16:19:44.839000Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:270: Unable to activate 103:0 2025-09-25T16:19:44.839030Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 103: got EvNotifyTxCompletionResult 2025-09-25T16:19:44.839037Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 103: satisfy waiter [3:617:2545] TestWaitNotification: OK eventTxId 103 ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_restore/unittest >> TImportTests::ShouldSucceedExportImportTableWithUniqueIndex [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2144] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2144] Leader for TabletID 72057594046678944 is [1:130:2155] sender: [1:131:2058] recipient: [1:113:2144] 2025-09-25T16:19:42.693743Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7911: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-09-25T16:19:42.693764Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7939: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-09-25T16:19:42.693770Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7825: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-09-25T16:19:42.693775Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7841: OperationsProcessing config: using default configuration 2025-09-25T16:19:42.693781Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7847: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-09-25T16:19:42.693786Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7847: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-09-25T16:19:42.693795Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7971: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-09-25T16:19:42.693808Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-09-25T16:19:42.693922Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8042: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-09-25T16:19:42.693974Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2025-09-25T16:19:42.706458Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7729: Cannot subscribe to console configs 2025-09-25T16:19:42.706475Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded 2025-09-25T16:19:42.710069Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2025-09-25T16:19:42.710150Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2025-09-25T16:19:42.710195Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-09-25T16:19:42.711735Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2025-09-25T16:19:42.711799Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2025-09-25T16:19:42.711903Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1382: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-09-25T16:19:42.711955Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-09-25T16:19:42.712408Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:158: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-09-25T16:19:42.712454Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2025-09-25T16:19:42.712709Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-09-25T16:19:42.712718Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-09-25T16:19:42.712750Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-09-25T16:19:42.712757Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-09-25T16:19:42.712763Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:205: TTxServerlessStorageBilling.Complete 2025-09-25T16:19:42.712793Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7086: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-09-25T16:19:42.714126Z node 1 :HIVE INFO: tablet_helpers.cpp:1126: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:130:2155] sender: [1:245:2058] recipient: [1:15:2062] 2025-09-25T16:19:42.733392Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-09-25T16:19:42.733451Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-09-25T16:19:42.733497Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:599: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-09-25T16:19:42.733504Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5528: CreateTx for txid 1:0 type: TxAlterSubDomain target path: [OwnerId: 72057594046678944, LocalPathId: 1] source path: 2025-09-25T16:19:42.733562Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-09-25T16:19:42.733572Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2025-09-25T16:19:42.734278Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-09-25T16:19:42.734327Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-09-25T16:19:42.734392Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-09-25T16:19:42.734406Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:315: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-09-25T16:19:42.734411Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:369: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-09-25T16:19:42.734416Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 1:0 2 -> 3 2025-09-25T16:19:42.734994Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-09-25T16:19:42.735010Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-09-25T16:19:42.735016Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 1:0 3 -> 128 2025-09-25T16:19:42.735452Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-09-25T16:19:42.735462Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-09-25T16:19:42.735468Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-09-25T16:19:42.735475Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1723: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-09-25T16:19:42.736081Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1792: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-09-25T16:19:42.736500Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:663: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-09-25T16:19:42.736553Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1824: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-09-25T16:19:42.736757Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:685: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-09-25T16:19:42.736781Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:689: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 137 RawX2: 4294969455 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-09-25T16:19:42.736788Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:303: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-09-25T16:19:42.736881Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 1:0 128 -> 240 2025-09-25T16:19:42.736889Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:362: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-09-25T16:19:42.736948Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:599: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-09-25T16:19:42.736957Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:412: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-09-25T16:19:42.737362Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-09-25T16:19:42.737370Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme ... tiateTxDone: 1, SnapshotStepId: 5000012, ApplyTxId: 281474976710768, ApplyTxStatus: StatusAccepted, ApplyTxDone: 1, DropColumnsTxId: 0, DropColumnsTxStatus: StatusSuccess, DropColumnsTxDone: 0, UnlockTxId: 281474976710769, UnlockTxStatus: StatusAccepted, UnlockTxDone: 1, ToUploadShards: 0, DoneShards: 0, Processed: UploadRows: 0 UploadBytes: 0 ReadRows: 0 ReadBytes: 0 CpuTimeUs: 0, Billed: UploadRows: 0 UploadBytes: 0 ReadRows: 0 ReadBytes: 0 CpuTimeUs: 0} 2025-09-25T16:19:44.450835Z node 3 :BUILD_INDEX INFO: schemeshard_build_index_tx_base.cpp:24: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: Change state from Unlocking to Done 2025-09-25T16:19:44.451050Z node 3 :BUILD_INDEX NOTICE: schemeshard_build_index__progress.cpp:1478: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Execute: 281474976710764 Done 2025-09-25T16:19:44.451065Z node 3 :BUILD_INDEX DEBUG: schemeshard_build_index__progress.cpp:1479: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Execute: 281474976710764 Done TBuildInfo{ IndexBuildId: 281474976710764, Uid: 104-0-0, DomainPathId: [OwnerId: 72057594046678944, LocalPathId: 1], TablePathId: [OwnerId: 72057594046678944, LocalPathId: 7], IndexType: EIndexTypeGlobalUnique, IndexName: ByValue, IndexColumn: value, State: Done, SubState: UniqIndexValidation, IsBroken: 0, IsCancellationRequested: 0, Issue: , SubscribersCount: 1, CreateSender: [3:127:2152], AlterMainTableTxId: 0, AlterMainTableTxStatus: StatusSuccess, AlterMainTableTxDone: 0, LockTxId: 281474976710765, LockTxStatus: StatusAccepted, LockTxDone: 1, InitiateTxId: 281474976710766, InitiateTxStatus: StatusAccepted, InitiateTxDone: 1, SnapshotStepId: 5000012, ApplyTxId: 281474976710768, ApplyTxStatus: StatusAccepted, ApplyTxDone: 1, DropColumnsTxId: 0, DropColumnsTxStatus: StatusSuccess, DropColumnsTxDone: 0, UnlockTxId: 281474976710769, UnlockTxStatus: StatusAccepted, UnlockTxDone: 1, ToUploadShards: 0, DoneShards: 0, Processed: UploadRows: 0 UploadBytes: 0 ReadRows: 0 ReadBytes: 0 CpuTimeUs: 0, Billed: UploadRows: 0 UploadBytes: 0 ReadRows: 0 ReadBytes: 0 CpuTimeUs: 0} 2025-09-25T16:19:44.451071Z node 3 :BUILD_INDEX TRACE: schemeshard_build_index_tx_base.cpp:338: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TIndexBuildInfo SendNotifications: : id# 281474976710764, subscribers count# 1 2025-09-25T16:19:44.451086Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7171: Handle: TEvNotifyTxCompletionResult: txId# 281474976710764 2025-09-25T16:19:44.451090Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:7173: Message: TxId: 281474976710764 2025-09-25T16:19:44.451097Z node 3 :IMPORT DEBUG: schemeshard_import__create.cpp:372: TImport::TTxProgress: DoExecute 2025-09-25T16:19:44.451101Z node 3 :IMPORT DEBUG: schemeshard_import__create.cpp:1425: TImport::TTxProgress: OnNotifyResult: txId# 281474976710764 2025-09-25T16:19:44.451404Z node 3 :IMPORT DEBUG: schemeshard_import__create.cpp:396: TImport::TTxProgress: DoComplete 2025-09-25T16:19:44.451420Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 104: got EvNotifyTxCompletionResult 2025-09-25T16:19:44.451426Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 104: satisfy waiter [3:854:2794] TestWaitNotification: OK eventTxId 104 2025-09-25T16:19:44.451528Z node 3 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/Table" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-09-25T16:19:44.451562Z node 3 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/Table" took 40us result status StatusSuccess 2025-09-25T16:19:44.451659Z node 3 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/Table" PathDescription { Self { Name: "Table" PathId: 7 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976710762 CreateStep: 5000009 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 6 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 6 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 TableSchemaVersion: 3 TablePartitionVersion: 1 } ChildrenExist: true } Table { Name: "Table" Columns { Name: "key" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "value" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" KeyColumnIds: 1 TableIndexes { Name: "ByValue" LocalPathId: 8 Type: EIndexTypeGlobalUnique State: EIndexStateReady KeyColumnNames: "value" SchemaVersion: 2 PathOwnerId: 72057594046678944 DataSize: 0 IndexImplTableDescriptions { } } TableSchemaVersion: 3 IsBackup: false IsRestore: false } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 3 PathsLimit: 10000 ShardsInside: 2 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 7 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-09-25T16:19:44.451717Z node 3 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/Table/ByValue" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: true }, at schemeshard: 72057594046678944 2025-09-25T16:19:44.451738Z node 3 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/Table/ByValue" took 22us result status StatusSuccess 2025-09-25T16:19:44.451843Z node 3 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/Table/ByValue" PathDescription { Self { Name: "ByValue" PathId: 8 SchemeshardId: 72057594046678944 PathType: EPathTypeTableIndex CreateFinished: true CreateTxId: 281474976710766 CreateStep: 5000012 ParentPathId: 7 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableIndexVersion: 2 } ChildrenExist: true } Children { Name: "indexImplTable" PathId: 9 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976710766 CreateStep: 5000012 ParentPathId: 8 PathState: EPathStateAlter Owner: "root@builtin" ACL: "" PathSubType: EPathSubTypeSyncIndexImplTable Version { GeneralVersion: 4 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 2 TablePartitionVersion: 1 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 3 PathsLimit: 10000 ShardsInside: 2 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } TableIndex { Name: "ByValue" LocalPathId: 8 Type: EIndexTypeGlobalUnique State: EIndexStateReady KeyColumnNames: "value" SchemaVersion: 2 PathOwnerId: 72057594046678944 DataSize: 0 IndexImplTableDescriptions { PartitionConfig { CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot: 300 InMemForceStepsToSnapshot: 500 InMemForceSizeToSnapshot: 16777216 InMemCompactionBrokerQueue: 0 ReadAheadHiThreshold: 67108864 ReadAheadLoThreshold: 16777216 MinDataPageSize: 7168 SnapBrokerQueue: 0 Generation { GenerationId: 0 SizeToCompact: 0 CountToCompact: 8 ForceCountToCompact: 8 ForceSizeToCompact: 134217728 CompactionBrokerQueue: 4294967295 KeepInCache: true BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen1" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 0 UpliftPartSize: 0 } Generation { GenerationId: 1 SizeToCompact: 41943040 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 536870912 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen2" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 8388608 UpliftPartSize: 8388608 } Generation { GenerationId: 2 SizeToCompact: 419430400 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 17179869184 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen3" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 KeepEraseMarkers: false MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } PartitioningPolicy { SizeToSplit: 2147483648 MinPartitionsCount: 1 SplitByLoadSettings { Enabled: false } } } } } } PathId: 8 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 >> TRestoreTests::ExportImportOnSupportedDatatypesWithCommonDestPrefix >> TRestoreTests::ShouldNotDecompressEntirePortionAtOnce >> KqpProxy::DatabasesCacheForServerless [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_restore/unittest >> TImportTests::ChangefeedsExportRestore [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2144] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2144] Leader for TabletID 72057594046678944 is [1:130:2155] sender: [1:131:2058] recipient: [1:113:2144] 2025-09-25T16:19:42.634061Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7911: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-09-25T16:19:42.634080Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7939: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-09-25T16:19:42.634084Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7825: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-09-25T16:19:42.634087Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7841: OperationsProcessing config: using default configuration 2025-09-25T16:19:42.634091Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7847: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-09-25T16:19:42.634094Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7847: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-09-25T16:19:42.634101Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7971: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-09-25T16:19:42.634112Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-09-25T16:19:42.634199Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8042: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-09-25T16:19:42.634243Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2025-09-25T16:19:42.644857Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7729: Cannot subscribe to console configs 2025-09-25T16:19:42.644877Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded 2025-09-25T16:19:42.647822Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2025-09-25T16:19:42.647887Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2025-09-25T16:19:42.647918Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-09-25T16:19:42.649266Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2025-09-25T16:19:42.649321Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2025-09-25T16:19:42.649399Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1382: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-09-25T16:19:42.649440Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-09-25T16:19:42.649814Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:158: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-09-25T16:19:42.649850Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2025-09-25T16:19:42.650059Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-09-25T16:19:42.650066Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-09-25T16:19:42.650084Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-09-25T16:19:42.650089Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-09-25T16:19:42.650094Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:205: TTxServerlessStorageBilling.Complete 2025-09-25T16:19:42.650116Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7086: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-09-25T16:19:42.651064Z node 1 :HIVE INFO: tablet_helpers.cpp:1126: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:130:2155] sender: [1:245:2058] recipient: [1:15:2062] 2025-09-25T16:19:42.667886Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-09-25T16:19:42.667956Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-09-25T16:19:42.668015Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:599: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-09-25T16:19:42.668025Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5528: CreateTx for txid 1:0 type: TxAlterSubDomain target path: [OwnerId: 72057594046678944, LocalPathId: 1] source path: 2025-09-25T16:19:42.668103Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-09-25T16:19:42.668121Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2025-09-25T16:19:42.668894Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-09-25T16:19:42.668936Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-09-25T16:19:42.668984Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-09-25T16:19:42.668992Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:315: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-09-25T16:19:42.668996Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:369: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-09-25T16:19:42.669001Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 1:0 2 -> 3 2025-09-25T16:19:42.669521Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-09-25T16:19:42.669537Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-09-25T16:19:42.669541Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 1:0 3 -> 128 2025-09-25T16:19:42.669900Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-09-25T16:19:42.669908Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-09-25T16:19:42.669911Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-09-25T16:19:42.669916Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1723: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-09-25T16:19:42.670384Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1792: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-09-25T16:19:42.670827Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:663: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-09-25T16:19:42.670897Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1824: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-09-25T16:19:42.671110Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:685: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-09-25T16:19:42.671136Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:689: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 137 RawX2: 4294969455 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-09-25T16:19:42.671144Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:303: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-09-25T16:19:42.671227Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 1:0 128 -> 240 2025-09-25T16:19:42.671250Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:362: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-09-25T16:19:42.671279Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:599: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-09-25T16:19:42.671291Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:412: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-09-25T16:19:42.671732Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-09-25T16:19:42.671740Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme ... TabletId: 72057594046678944 Flags: 2 } AffectedSet { TabletId: 72075186233409559 Flags: 2 } ExecLevel: 0 TxId: 281474976710769 MinStep: 5000018 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-09-25T16:19:44.772754Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:663: Send tablet strongly msg operationId: 281474976710769:0 from tablet: 72057594046678944 to tablet: 72075186233409559 cookie: 72057594046678944:14 msg type: 269550082 2025-09-25T16:19:44.772776Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:663: Send tablet strongly msg operationId: 281474976710769:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:281474976710769 msg type: 269090816 2025-09-25T16:19:44.772795Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1824: TOperation RegisterRelationByTabletId, TxId: 281474976710769, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 281474976710769 at step: 5000018 FAKE_COORDINATOR: advance: minStep5000018 State->FrontStep: 5000017 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710769 at step: 5000018 FAKE_COORDINATOR: Send Plan to tablet 72075186233409559 for txId: 281474976710769 at step: 5000018 2025-09-25T16:19:44.772903Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1837: TOperation FindRelatedPartByTabletId, TxId: 281474976710769, tablet: 72075186233409559, partId: 0 2025-09-25T16:19:44.772928Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:628: TTxOperationReply execute, operationId: 281474976710769:0, at schemeshard: 72057594046678944, message: TabletId: 72075186233409559 TxId: 281474976710769 Status: OK 2025-09-25T16:19:44.772938Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_pq.cpp:654: NPQState::TPropose operationId# 281474976710769:0 HandleReply TEvProposeTransactionAttachResult triggers early, at schemeshard: 72057594046678944 message# TabletId: 72075186233409559 TxId: 281474976710769 Status: OK 2025-09-25T16:19:44.772944Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_pq.cpp:659: NPQState::TPropose operationId# 281474976710769:0 HandleReply TEvProposeTransactionAttachResult CollectPQConfigChanged: false 2025-09-25T16:19:44.772949Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_pq.cpp:765: NPQState::TPropose operationId# 281474976710769:0 can't persist state: ShardsInProgress is not empty, remain: 1 2025-09-25T16:19:44.773055Z node 3 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:685: TTxOperationPlanStep Execute, stepId: 5000018, transactions count in step: 1, at schemeshard: 72057594046678944 2025-09-25T16:19:44.773077Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:689: TTxOperationPlanStep Execute, message: Transactions { TxId: 281474976710769 Coordinator: 72057594046316545 AckTo { RawX1: 139 RawX2: 12884904049 } } Step: 5000018 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-09-25T16:19:44.773085Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_pq.cpp:673: NPQState::TPropose operationId# 281474976710769:0 HandleReply TEvOperationPlan, step: 5000018, at tablet: 72057594046678944 2025-09-25T16:19:44.773091Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_pq.cpp:765: NPQState::TPropose operationId# 281474976710769:0 can't persist state: ShardsInProgress is not empty, remain: 1 2025-09-25T16:19:44.773945Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 281474976710769:0, at schemeshard: 72057594046678944 FAKE_COORDINATOR: advance: minStep5000018 State->FrontStep: 5000018 FAKE_COORDINATOR: Erasing txId 281474976710769 2025-09-25T16:19:44.777220Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1837: TOperation FindRelatedPartByTabletId, TxId: 281474976710769, tablet: 72075186233409559, partId: 0 2025-09-25T16:19:44.777262Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:628: TTxOperationReply execute, operationId: 281474976710769:0, at schemeshard: 72057594046678944, message: Origin: 72075186233409559 Status: COMPLETE TxId: 281474976710769 Step: 5000018 2025-09-25T16:19:44.777274Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_pq.cpp:635: NPQState::TPropose operationId# 281474976710769:0 HandleReply TEvProposeTransactionResult triggers early, at schemeshard: 72057594046678944 message# Origin: 72075186233409559 Status: COMPLETE TxId: 281474976710769 Step: 5000018 2025-09-25T16:19:44.777288Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_pq.cpp:270: CollectPQConfigChanged accept TEvPersQueue::TEvProposeTransactionResult, operationId: 281474976710769:0, shardIdx: 72057594046678944:14, shard: 72075186233409559, left await: 0, txState.State: Propose, txState.ReadyForNotifications: 0, at schemeshard: 72057594046678944 2025-09-25T16:19:44.777293Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_pq.cpp:640: NPQState::TPropose operationId# 281474976710769:0 HandleReply TEvProposeTransactionResult CollectPQConfigChanged: true 2025-09-25T16:19:44.777337Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 281474976710769:0 128 -> 240 2025-09-25T16:19:44.777373Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:599: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 17] was 4 2025-09-25T16:19:44.778092Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 281474976710769:0, at schemeshard: 72057594046678944 2025-09-25T16:19:44.778153Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-09-25T16:19:44.778162Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 281474976710769, path id: [OwnerId: 72057594046678944, LocalPathId: 17] 2025-09-25T16:19:44.778231Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-09-25T16:19:44.778238Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [3:210:2211], at schemeshard: 72057594046678944, txId: 281474976710769, path id: 17 2025-09-25T16:19:44.778331Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 281474976710769:0, at schemeshard: 72057594046678944 2025-09-25T16:19:44.778340Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:497: [72057594046678944] TDone opId# 281474976710769:0 ProgressState 2025-09-25T16:19:44.778354Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:926: Part operation is done id#281474976710769:0 progress is 1/1 2025-09-25T16:19:44.778360Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 281474976710769 ready parts: 1/1 2025-09-25T16:19:44.778365Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:926: Part operation is done id#281474976710769:0 progress is 1/1 2025-09-25T16:19:44.778369Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 281474976710769 ready parts: 1/1 2025-09-25T16:19:44.778377Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 281474976710769, ready parts: 1/1, is published: false 2025-09-25T16:19:44.778383Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 281474976710769 ready parts: 1/1 2025-09-25T16:19:44.778389Z node 3 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:993: Operation and all the parts is done, operation id: 281474976710769:0 2025-09-25T16:19:44.778394Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5552: RemoveTx for txid 281474976710769:0 2025-09-25T16:19:44.778423Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:610: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 17] was 5 2025-09-25T16:19:44.778430Z node 3 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:1002: Publication still in progress, tx: 281474976710769, publications: 1, subscribers: 1 2025-09-25T16:19:44.778434Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1009: Publication details: tx: 281474976710769, [OwnerId: 72057594046678944, LocalPathId: 17], 3 2025-09-25T16:19:44.778658Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6249: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 17 Version: 3 PathOwnerId: 72057594046678944, cookie: 281474976710769 2025-09-25T16:19:44.778677Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 17 Version: 3 PathOwnerId: 72057594046678944, cookie: 281474976710769 2025-09-25T16:19:44.778683Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 281474976710769 2025-09-25T16:19:44.778689Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 281474976710769, pathId: [OwnerId: 72057594046678944, LocalPathId: 17], version: 3 2025-09-25T16:19:44.778694Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:610: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 17] was 4 2025-09-25T16:19:44.778711Z node 3 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__publish_to_scheme_board.cpp:155: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 281474976710769, subscribers: 1 2025-09-25T16:19:44.778716Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:212: TTxAckPublishToSchemeBoard Notify send TEvNotifyTxCompletionResult, at schemeshard: 72057594046678944, to actorId: [3:127:2152] 2025-09-25T16:19:44.779946Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 281474976710769 2025-09-25T16:19:44.779979Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7171: Handle: TEvNotifyTxCompletionResult: txId# 281474976710769 2025-09-25T16:19:44.779992Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:7173: Message: TxId: 281474976710769 2025-09-25T16:19:44.780004Z node 3 :IMPORT DEBUG: schemeshard_import__create.cpp:372: TImport::TTxProgress: DoExecute 2025-09-25T16:19:44.780010Z node 3 :IMPORT DEBUG: schemeshard_import__create.cpp:1425: TImport::TTxProgress: OnNotifyResult: txId# 281474976710769 2025-09-25T16:19:44.781090Z node 3 :IMPORT DEBUG: schemeshard_import__create.cpp:396: TImport::TTxProgress: DoComplete 2025-09-25T16:19:44.781167Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 106: got EvNotifyTxCompletionResult 2025-09-25T16:19:44.781181Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 106: satisfy waiter [3:1448:3236] TestWaitNotification: OK eventTxId 106 ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_restore/unittest >> TImportTests::IgnoreBasicSchemeLimits [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2144] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2144] Leader for TabletID 72057594046678944 is [1:130:2155] sender: [1:131:2058] recipient: [1:113:2144] 2025-09-25T16:19:42.640944Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7911: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-09-25T16:19:42.640968Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7939: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-09-25T16:19:42.640974Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7825: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-09-25T16:19:42.640979Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7841: OperationsProcessing config: using default configuration 2025-09-25T16:19:42.640985Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7847: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-09-25T16:19:42.640989Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7847: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-09-25T16:19:42.640998Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7971: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-09-25T16:19:42.641013Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-09-25T16:19:42.641127Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8042: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-09-25T16:19:42.641185Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2025-09-25T16:19:42.657163Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7729: Cannot subscribe to console configs 2025-09-25T16:19:42.657186Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded 2025-09-25T16:19:42.661306Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2025-09-25T16:19:42.661409Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2025-09-25T16:19:42.661461Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-09-25T16:19:42.663948Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2025-09-25T16:19:42.664042Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2025-09-25T16:19:42.664167Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1382: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-09-25T16:19:42.664239Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-09-25T16:19:42.664880Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:158: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-09-25T16:19:42.664936Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2025-09-25T16:19:42.665224Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-09-25T16:19:42.665240Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-09-25T16:19:42.665259Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-09-25T16:19:42.665268Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-09-25T16:19:42.665275Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:205: TTxServerlessStorageBilling.Complete 2025-09-25T16:19:42.665307Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7086: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-09-25T16:19:42.666878Z node 1 :HIVE INFO: tablet_helpers.cpp:1126: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:130:2155] sender: [1:245:2058] recipient: [1:15:2062] 2025-09-25T16:19:42.683281Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-09-25T16:19:42.683340Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-09-25T16:19:42.683384Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:599: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-09-25T16:19:42.683390Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5528: CreateTx for txid 1:0 type: TxAlterSubDomain target path: [OwnerId: 72057594046678944, LocalPathId: 1] source path: 2025-09-25T16:19:42.683447Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-09-25T16:19:42.683461Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2025-09-25T16:19:42.684066Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-09-25T16:19:42.684104Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-09-25T16:19:42.684144Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-09-25T16:19:42.684152Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:315: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-09-25T16:19:42.684156Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:369: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-09-25T16:19:42.684159Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 1:0 2 -> 3 2025-09-25T16:19:42.684555Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-09-25T16:19:42.684567Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-09-25T16:19:42.684572Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 1:0 3 -> 128 2025-09-25T16:19:42.684883Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-09-25T16:19:42.684892Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-09-25T16:19:42.684896Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-09-25T16:19:42.684901Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1723: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-09-25T16:19:42.685401Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1792: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-09-25T16:19:42.685735Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:663: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-09-25T16:19:42.685772Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1824: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-09-25T16:19:42.685906Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:685: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-09-25T16:19:42.685925Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:689: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 137 RawX2: 4294969455 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-09-25T16:19:42.685932Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:303: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-09-25T16:19:42.685989Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 1:0 128 -> 240 2025-09-25T16:19:42.685993Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:362: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-09-25T16:19:42.686015Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:599: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-09-25T16:19:42.686025Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:412: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-09-25T16:19:42.686447Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-09-25T16:19:42.686457Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme ... operation in-flight, txId: 281474976725764, at schemeshard: 72075186233409546 2025-09-25T16:19:44.789948Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 281474976725764, ready parts: 0/1, is published: true 2025-09-25T16:19:44.789952Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__notify.cpp:131: NotifyTxCompletion transaction is registered, txId: 281474976725764, at schemeshard: 72075186233409546 2025-09-25T16:19:44.821696Z node 3 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:685: TTxOperationPlanStep Execute, stepId: 500, transactions count in step: 1, at schemeshard: 72075186233409546 2025-09-25T16:19:44.821741Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:689: TTxOperationPlanStep Execute, message: Transactions { TxId: 281474976725764 AckTo { RawX1: 0 RawX2: 0 } } Step: 500 MediatorID: 72075186233409548 TabletID: 72075186233409546, at schemeshard: 72075186233409546 2025-09-25T16:19:44.821751Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_drop_lock.cpp:44: [72075186233409546] TDropLock TPropose opId# 281474976725764:0 HandleReply TEvOperationPlan: step# 500 2025-09-25T16:19:44.821757Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 281474976725764:0 128 -> 240 2025-09-25T16:19:44.822306Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 281474976725764:0, at schemeshard: 72075186233409546 2025-09-25T16:19:44.822322Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:497: [72075186233409546] TDone opId# 281474976725764:0 ProgressState 2025-09-25T16:19:44.822338Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:926: Part operation is done id#281474976725764:0 progress is 1/1 2025-09-25T16:19:44.822342Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 281474976725764 ready parts: 1/1 2025-09-25T16:19:44.822346Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:926: Part operation is done id#281474976725764:0 progress is 1/1 2025-09-25T16:19:44.822349Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 281474976725764 ready parts: 1/1 2025-09-25T16:19:44.822356Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 281474976725764, ready parts: 1/1, is published: true 2025-09-25T16:19:44.822369Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1702: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [3:502:2451] message: TxId: 281474976725764 2025-09-25T16:19:44.822374Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 281474976725764 ready parts: 1/1 2025-09-25T16:19:44.822379Z node 3 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:993: Operation and all the parts is done, operation id: 281474976725764:0 2025-09-25T16:19:44.822383Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5552: RemoveTx for txid 281474976725764:0 2025-09-25T16:19:44.822396Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:610: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72075186233409546, LocalPathId: 4] was 5 2025-09-25T16:19:44.823024Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7171: Handle: TEvNotifyTxCompletionResult: txId# 281474976725764 2025-09-25T16:19:44.823043Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:7173: Message: TxId: 281474976725764 2025-09-25T16:19:44.823060Z node 3 :BUILD_INDEX INFO: schemeshard_build_index__progress.cpp:2417: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxReply : TEvNotifyTxCompletionResult, id# 281474976725759, txId# 281474976725764 2025-09-25T16:19:44.823098Z node 3 :BUILD_INDEX DEBUG: schemeshard_build_index__progress.cpp:2420: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxReply : TEvNotifyTxCompletionResult, TIndexBuildInfo: TBuildInfo{ IndexBuildId: 281474976725759, Uid: 105-0-0, DomainPathId: [OwnerId: 72075186233409546, LocalPathId: 1], TablePathId: [OwnerId: 72075186233409546, LocalPathId: 4], IndexType: EIndexTypeGlobal, IndexName: ByValue, IndexColumn: value, State: Unlocking, SubState: None, IsBroken: 0, IsCancellationRequested: 0, Issue: , SubscribersCount: 1, CreateSender: [3:502:2451], AlterMainTableTxId: 0, AlterMainTableTxStatus: StatusSuccess, AlterMainTableTxDone: 0, LockTxId: 281474976725760, LockTxStatus: StatusAccepted, LockTxDone: 1, InitiateTxId: 281474976725761, InitiateTxStatus: StatusAccepted, InitiateTxDone: 1, SnapshotStepId: 400, ApplyTxId: 281474976725762, ApplyTxStatus: StatusAccepted, ApplyTxDone: 1, DropColumnsTxId: 0, DropColumnsTxStatus: StatusSuccess, DropColumnsTxDone: 0, UnlockTxId: 281474976725764, UnlockTxStatus: StatusAccepted, UnlockTxDone: 0, ToUploadShards: 0, DoneShards: 0, Processed: UploadRows: 2 UploadBytes: 36 ReadRows: 2 ReadBytes: 36 CpuTimeUs: 0, Billed: UploadRows: 0 UploadBytes: 0 ReadRows: 0 ReadBytes: 0 CpuTimeUs: 0}, txId# 281474976725764 2025-09-25T16:19:44.823616Z node 3 :BUILD_INDEX NOTICE: schemeshard_build_index__progress.cpp:1478: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Execute: 281474976725759 Unlocking 2025-09-25T16:19:44.823648Z node 3 :BUILD_INDEX DEBUG: schemeshard_build_index__progress.cpp:1479: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Execute: 281474976725759 Unlocking TBuildInfo{ IndexBuildId: 281474976725759, Uid: 105-0-0, DomainPathId: [OwnerId: 72075186233409546, LocalPathId: 1], TablePathId: [OwnerId: 72075186233409546, LocalPathId: 4], IndexType: EIndexTypeGlobal, IndexName: ByValue, IndexColumn: value, State: Unlocking, SubState: None, IsBroken: 0, IsCancellationRequested: 0, Issue: , SubscribersCount: 1, CreateSender: [3:502:2451], AlterMainTableTxId: 0, AlterMainTableTxStatus: StatusSuccess, AlterMainTableTxDone: 0, LockTxId: 281474976725760, LockTxStatus: StatusAccepted, LockTxDone: 1, InitiateTxId: 281474976725761, InitiateTxStatus: StatusAccepted, InitiateTxDone: 1, SnapshotStepId: 400, ApplyTxId: 281474976725762, ApplyTxStatus: StatusAccepted, ApplyTxDone: 1, DropColumnsTxId: 0, DropColumnsTxStatus: StatusSuccess, DropColumnsTxDone: 0, UnlockTxId: 281474976725764, UnlockTxStatus: StatusAccepted, UnlockTxDone: 1, ToUploadShards: 0, DoneShards: 0, Processed: UploadRows: 2 UploadBytes: 36 ReadRows: 2 ReadBytes: 36 CpuTimeUs: 0, Billed: UploadRows: 0 UploadBytes: 0 ReadRows: 0 ReadBytes: 0 CpuTimeUs: 0} 2025-09-25T16:19:44.823661Z node 3 :BUILD_INDEX INFO: schemeshard_build_index_tx_base.cpp:24: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: Change state from Unlocking to Done 2025-09-25T16:19:44.824047Z node 3 :BUILD_INDEX NOTICE: schemeshard_build_index__progress.cpp:1478: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Execute: 281474976725759 Done 2025-09-25T16:19:44.824075Z node 3 :BUILD_INDEX DEBUG: schemeshard_build_index__progress.cpp:1479: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Execute: 281474976725759 Done TBuildInfo{ IndexBuildId: 281474976725759, Uid: 105-0-0, DomainPathId: [OwnerId: 72075186233409546, LocalPathId: 1], TablePathId: [OwnerId: 72075186233409546, LocalPathId: 4], IndexType: EIndexTypeGlobal, IndexName: ByValue, IndexColumn: value, State: Done, SubState: None, IsBroken: 0, IsCancellationRequested: 0, Issue: , SubscribersCount: 1, CreateSender: [3:502:2451], AlterMainTableTxId: 0, AlterMainTableTxStatus: StatusSuccess, AlterMainTableTxDone: 0, LockTxId: 281474976725760, LockTxStatus: StatusAccepted, LockTxDone: 1, InitiateTxId: 281474976725761, InitiateTxStatus: StatusAccepted, InitiateTxDone: 1, SnapshotStepId: 400, ApplyTxId: 281474976725762, ApplyTxStatus: StatusAccepted, ApplyTxDone: 1, DropColumnsTxId: 0, DropColumnsTxStatus: StatusSuccess, DropColumnsTxDone: 0, UnlockTxId: 281474976725764, UnlockTxStatus: StatusAccepted, UnlockTxDone: 1, ToUploadShards: 0, DoneShards: 0, Processed: UploadRows: 2 UploadBytes: 36 ReadRows: 2 ReadBytes: 36 CpuTimeUs: 0, Billed: UploadRows: 0 UploadBytes: 0 ReadRows: 0 ReadBytes: 0 CpuTimeUs: 0} 2025-09-25T16:19:44.824081Z node 3 :BUILD_INDEX TRACE: schemeshard_build_index_tx_base.cpp:338: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TIndexBuildInfo SendNotifications: : id# 281474976725759, subscribers count# 1 2025-09-25T16:19:44.824103Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7171: Handle: TEvNotifyTxCompletionResult: txId# 281474976725759 2025-09-25T16:19:44.824111Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:7173: Message: TxId: 281474976725759 2025-09-25T16:19:44.824542Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 105: got EvNotifyTxCompletionResult 2025-09-25T16:19:44.824556Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 105: satisfy waiter [3:683:2598] TestWaitNotification: OK eventTxId 105 2025-09-25T16:19:44.824842Z node 3 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/Alice" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72075186233409546 2025-09-25T16:19:44.824886Z node 3 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72075186233409546 describe path "/MyRoot/Alice" took 74us result status StatusSuccess 2025-09-25T16:19:44.825033Z node 3 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/Alice" PathDescription { Self { Name: "MyRoot/Alice" PathId: 1 SchemeshardId: 72075186233409546 PathType: EPathTypeSubDomain CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 9 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 9 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 6 SubDomainVersion: 2 SecurityStateVersion: 0 } } Children { Name: "ImportDir" PathId: 3 SchemeshardId: 72075186233409546 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976725757 CreateStep: 250 ParentPathId: 1 PathState: EPathStateCreate Owner: "root@builtin" ACL: "" ChildrenExist: true } Children { Name: "table1" PathId: 2 SchemeshardId: 72075186233409546 PathType: EPathTypeTable CreateFinished: true CreateTxId: 103 CreateStep: 150 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 2 ProcessingParams { Version: 2 PlanResolution: 50 Coordinators: 72075186233409547 TimeCastBucketsPerMediator: 2 Mediators: 72075186233409548 SchemeShard: 72075186233409546 } DomainKey { SchemeShard: 72057594046678944 PathId: 2 } StoragePools { Name: "Alice:hdd" Kind: "hdd" } PathsInside: 5 PathsLimit: 10000 ShardsInside: 7 ShardsLimit: 4 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 2 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { Audience: "/MyRoot/Alice" } SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 4 MaxShardsInPath: 2 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"$%&\'()*+,-.:;<=>?@[]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 1 PathOwnerId: 72075186233409546, at schemeshard: 72075186233409546 >> ScriptExecutionsTest::BackgroundChecksStartAfterRestart [GOOD] >> TRestoreTests::ExportImportOnSupportedDatatypes >> TRestoreTests::ShouldNotDecompressEntirePortionAtOnce [GOOD] >> TRestoreTests::ShouldRestoreDefaultValuesFromLiteral >> TImportTests::CompletedImportEndTime ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/proxy_service/ut/unittest >> KqpProxy::DatabasesCacheForServerless [GOOD] Test command err: 2025-09-25T16:19:32.125150Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7554062235456028974:2150];send_to=[0:7307199536658146131:7762515]; 2025-09-25T16:19:32.125234Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-09-25T16:19:32.132614Z node 2 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7554062237352658490:2153];send_to=[0:7307199536658146131:7762515]; 2025-09-25T16:19:32.132914Z node 3 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7554062236348436154:2076];send_to=[0:7307199536658146131:7762515]; 2025-09-25T16:19:32.132935Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-09-25T16:19:32.134408Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-09-25T16:19:32.135933Z node 4 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7554062235432001481:2077];send_to=[0:7307199536658146131:7762515]; 2025-09-25T16:19:32.135970Z node 4 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-09-25T16:19:32.137751Z node 5 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[5:7554062236439169117:2085];send_to=[0:7307199536658146131:7762515]; 2025-09-25T16:19:32.137769Z node 5 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/endf/00509d/r3tmp/tmpr8Tzpp/pdisk_1.dat 2025-09-25T16:19:32.176849Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2025-09-25T16:19:32.195391Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions 2025-09-25T16:19:32.195307Z node 4 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2025-09-25T16:19:32.198374Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded 2025-09-25T16:19:32.202698Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions 2025-09-25T16:19:32.203307Z node 5 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions TClient is connected to server localhost:6709 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-09-25T16:19:32.227673Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-09-25T16:19:32.227712Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-09-25T16:19:32.229347Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-09-25T16:19:32.232983Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976720657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2025-09-25T16:19:32.234024Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-09-25T16:19:32.234044Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting waiting... 2025-09-25T16:19:32.235110Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-09-25T16:19:32.235128Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-09-25T16:19:32.235269Z node 1 :HIVE WARN: hive_impl.cpp:811: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2025-09-25T16:19:32.235497Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-09-25T16:19:32.236396Z node 1 :HIVE WARN: hive_impl.cpp:811: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 3 Cookie 3 2025-09-25T16:19:32.236603Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-09-25T16:19:32.241894Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-09-25T16:19:32.241917Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-09-25T16:19:32.242567Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-09-25T16:19:32.242586Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-09-25T16:19:32.242754Z node 1 :HIVE WARN: hive_impl.cpp:811: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 4 Cookie 4 2025-09-25T16:19:32.243050Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-09-25T16:19:32.244178Z node 1 :HIVE WARN: hive_impl.cpp:811: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 5 Cookie 5 2025-09-25T16:19:32.244394Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-09-25T16:19:32.361471Z node 4 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions 2025-09-25T16:19:32.361489Z node 4 :KQP_PROXY DEBUG: kqp_finalize_script_service.cpp:146: [ScriptExecutions] [TKqpFinalizeScriptService] Script execution table dc-1/.metadata/script_executions not found 2025-09-25T16:19:32.372183Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions 2025-09-25T16:19:32.372198Z node 1 :KQP_PROXY DEBUG: kqp_finalize_script_service.cpp:146: [ScriptExecutions] [TKqpFinalizeScriptService] Script execution table dc-1/.metadata/script_executions not found 2025-09-25T16:19:32.527066Z node 3 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:1519: Updated YQL logs priority to current level: 4 2025-09-25T16:19:32.533702Z node 3 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:436: Subscribed for config changes. 2025-09-25T16:19:32.533715Z node 3 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:443: Updated table service config. 2025-09-25T16:19:32.533723Z node 3 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:1519: Updated YQL logs priority to current level: 4 2025-09-25T16:19:32.533911Z node 3 :KQP_PROXY DEBUG: table_creator.cpp:148: Table result_sets updater. Describe result: PathErrorUnknown 2025-09-25T16:19:32.533925Z node 3 :KQP_PROXY NOTICE: table_creator.cpp:168: Table result_sets updater. Creating table 2025-09-25T16:19:32.533939Z node 3 :KQP_PROXY DEBUG: table_creator.cpp:101: Table result_sets updater. Full table path:/dc-1/.metadata/result_sets 2025-09-25T16:19:32.533963Z node 3 :KQP_PROXY DEBUG: table_creator.cpp:148: Table script_executions updater. Describe result: PathErrorUnknown 2025-09-25T16:19:32.533969Z node 3 :KQP_PROXY NOTICE: table_creator.cpp:168: Table script_executions updater. Creating table 2025-09-25T16:19:32.533989Z node 3 :KQP_PROXY DEBUG: table_creator.cpp:101: Table script_executions updater. Full table path:/dc-1/.metadata/script_executions 2025-09-25T16:19:32.534033Z node 3 :KQP_PROXY DEBUG: table_creator.cpp:148: Table script_execution_leases updater. Describe result: PathErrorUnknown 2025-09-25T16:19:32.534040Z node 3 :KQP_PROXY NOTICE: table_creator.cpp:168: Table script_execution_leases updater. Creating table 2025-09-25T16:19:32.534064Z node 3 :KQP_PROXY DEBUG: table_creator.cpp:101: Table script_execution_leases updater. Full table path:/dc-1/.metadata/script_execution_leases 2025-09-25T16:19:32.538536Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976730657:1, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:19:32.538962Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976730658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:19:32.539195Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976730659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_run ... 2515]; 2025-09-25T16:19:34.533200Z node 7 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/test-shared/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-09-25T16:19:34.534731Z node 6 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-09-25T16:19:34.534760Z node 6 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-09-25T16:19:34.535881Z node 6 :HIVE WARN: hive_impl.cpp:811: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 7 Cookie 7 2025-09-25T16:19:34.536151Z node 6 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-09-25T16:19:34.557013Z node 7 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/test-shared/.metadata/script_executions 2025-09-25T16:19:34.587226Z node 7 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224038889 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-09-25T16:19:34.587302Z node 7 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224038889 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-09-25T16:19:34.587323Z node 7 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224038889 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-09-25T16:19:34.587337Z node 7 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224038889 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-09-25T16:19:34.587347Z node 7 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224038889 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-09-25T16:19:34.587357Z node 7 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224038889 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-09-25T16:19:34.587372Z node 7 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224038889 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-09-25T16:19:34.587385Z node 7 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224038889 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-09-25T16:19:34.587397Z node 7 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224038889 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-09-25T16:19:34.636357Z node 7 :HIVE WARN: node_info.cpp:25: HIVE#72075186224038889 Node(7, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-09-25T16:19:34.636393Z node 7 :HIVE WARN: node_info.cpp:25: HIVE#72075186224038889 Node(7, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-09-25T16:19:34.638479Z node 7 :HIVE WARN: node_info.cpp:25: HIVE#72075186224038889 Node(7, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-09-25T16:19:34.650632Z node 7 :STATISTICS WARN: tx_init.cpp:288: [72075186224038895] TTxInit::Complete. EnableColumnStatistics=false 2025-09-25T16:19:34.650934Z node 7 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded 2025-09-25T16:19:34.765235Z node 8 :KQP_WORKLOAD_SERVICE INFO: kqp_workload_service.cpp:447: [WorkloadService] [Service] Started workload service initialization 2025-09-25T16:19:34.765263Z node 8 :KQP_WORKLOAD_SERVICE DEBUG: table_queries.cpp:241: [WorkloadService] [TCleanupTablesActor] ActorId: [8:7554062246100730742:2344], Start check tables existence, number paths: 2 2025-09-25T16:19:34.765326Z node 8 :KQP_WORKLOAD_SERVICE DEBUG: kqp_workload_service.cpp:102: [WorkloadService] [Service] Subscribed for config changes 2025-09-25T16:19:34.765341Z node 8 :KQP_WORKLOAD_SERVICE INFO: kqp_workload_service.cpp:113: [WorkloadService] [Service] Resource pools was enanbled 2025-09-25T16:19:34.765751Z node 8 :KQP_WORKLOAD_SERVICE TRACE: kqp_workload_service.cpp:132: [WorkloadService] [Service] Updated node info, noode count: 1 2025-09-25T16:19:34.765822Z node 8 :KQP_WORKLOAD_SERVICE DEBUG: table_queries.cpp:182: [WorkloadService] [TCleanupTablesActor] ActorId: [8:7554062246100730742:2344], Describe table /Root/test-dedicated/.metadata/workload_manager/delayed_requests status PathErrorUnknown 2025-09-25T16:19:34.765837Z node 8 :KQP_WORKLOAD_SERVICE DEBUG: table_queries.cpp:182: [WorkloadService] [TCleanupTablesActor] ActorId: [8:7554062246100730742:2344], Describe table /Root/test-dedicated/.metadata/workload_manager/running_requests status PathErrorUnknown 2025-09-25T16:19:34.765844Z node 8 :KQP_WORKLOAD_SERVICE DEBUG: table_queries.cpp:289: [WorkloadService] [TCleanupTablesActor] ActorId: [8:7554062246100730742:2344], Successfully finished 2025-09-25T16:19:34.765860Z node 8 :KQP_WORKLOAD_SERVICE DEBUG: kqp_workload_service.cpp:374: [WorkloadService] [Service] Cleanup completed, tables exists: 0 TServer::EnableGrpc on GrpcPort 9798, node 7 2025-09-25T16:19:34.779125Z node 7 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-09-25T16:19:34.779137Z node 7 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-09-25T16:19:34.779138Z node 7 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-09-25T16:19:34.779192Z node 7 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-09-25T16:19:34.785037Z node 6 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_extsubdomain.cpp:194) 2025-09-25T16:19:34.801116Z node 7 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded 2025-09-25T16:19:34.890135Z node 7 :KQP_WORKLOAD_SERVICE DEBUG: scheme_actors.cpp:566: [WorkloadService] [TDatabaseFetcherActor] ActorId: [7:7554062244418183020:2526], Database: /Root/test-serverless, Start database fetching 2025-09-25T16:19:34.890215Z node 7 :KQP_WORKLOAD_SERVICE DEBUG: scheme_actors.cpp:592: [WorkloadService] [TDatabaseFetcherActor] ActorId: [7:7554062244418183020:2526], Database: /Root/test-serverless, Database info successfully fetched, serverless: 1 2025-09-25T16:19:34.961621Z node 7 :KQP_WORKLOAD_SERVICE INFO: kqp_workload_service.cpp:447: [WorkloadService] [Service] Started workload service initialization 2025-09-25T16:19:34.965866Z node 7 :KQP_WORKLOAD_SERVICE DEBUG: table_queries.cpp:241: [WorkloadService] [TCleanupTablesActor] ActorId: [7:7554062244418183055:2371], Start check tables existence, number paths: 2 2025-09-25T16:19:34.966122Z node 7 :KQP_WORKLOAD_SERVICE DEBUG: kqp_workload_service.cpp:102: [WorkloadService] [Service] Subscribed for config changes 2025-09-25T16:19:34.966132Z node 7 :KQP_WORKLOAD_SERVICE INFO: kqp_workload_service.cpp:113: [WorkloadService] [Service] Resource pools was enanbled 2025-09-25T16:19:34.966314Z node 7 :KQP_WORKLOAD_SERVICE DEBUG: table_queries.cpp:182: [WorkloadService] [TCleanupTablesActor] ActorId: [7:7554062244418183055:2371], Describe table /Root/test-shared/.metadata/workload_manager/delayed_requests status PathErrorUnknown 2025-09-25T16:19:34.966329Z node 7 :KQP_WORKLOAD_SERVICE DEBUG: table_queries.cpp:182: [WorkloadService] [TCleanupTablesActor] ActorId: [7:7554062244418183055:2371], Describe table /Root/test-shared/.metadata/workload_manager/running_requests status PathErrorUnknown 2025-09-25T16:19:34.966340Z node 7 :KQP_WORKLOAD_SERVICE DEBUG: table_queries.cpp:289: [WorkloadService] [TCleanupTablesActor] ActorId: [7:7554062244418183055:2371], Successfully finished 2025-09-25T16:19:34.966353Z node 7 :KQP_WORKLOAD_SERVICE TRACE: kqp_workload_service.cpp:132: [WorkloadService] [Service] Updated node info, noode count: 1 2025-09-25T16:19:34.966362Z node 7 :KQP_WORKLOAD_SERVICE DEBUG: kqp_workload_service.cpp:374: [WorkloadService] [Service] Cleanup completed, tables exists: 0 2025-09-25T16:19:34.986117Z node 6 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-09-25T16:19:35.409037Z node 8 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-09-25T16:19:35.534979Z node 7 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-09-25T16:19:38.984303Z node 6 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[6:7554062241628918935:2087];send_to=[0:7307199536658146131:7762515]; 2025-09-25T16:19:38.984343Z node 6 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-09-25T16:19:39.407279Z node 8 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[8:7554062246100730008:2079];send_to=[0:7307199536658146131:7762515]; 2025-09-25T16:19:39.407321Z node 8 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/test-dedicated/.metadata/initialization/migrations;error=timeout; 2025-09-25T16:19:39.533190Z node 7 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[7:7554062244418182122:2150];send_to=[0:7307199536658146131:7762515]; 2025-09-25T16:19:39.533230Z node 7 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/test-shared/.metadata/initialization/migrations;error=timeout; 2025-09-25T16:19:44.891243Z node 6 :KQP_SESSION INFO: kqp_session_actor.cpp:2563: SessionId: ydb://session/3?node_id=6&id=OWMwMzFmODAtY2M2YmUwODItNjkxMzcwYjYtMzIwNzNkODE=, ActorId: [6:7554062245923887091:2321], ActorState: ReadyState, Session closed due to explicit close event 2025-09-25T16:19:44.891284Z node 6 :KQP_SESSION INFO: kqp_session_actor.cpp:2725: SessionId: ydb://session/3?node_id=6&id=OWMwMzFmODAtY2M2YmUwODItNjkxMzcwYjYtMzIwNzNkODE=, ActorId: [6:7554062245923887091:2321], ActorState: ReadyState, Cleanup start, isFinal: 1 CleanupCtx: 0 TransactionsToBeAborted.size(): 0 WorkerId: [0:0:0] WorkloadServiceCleanup: 0 2025-09-25T16:19:44.891288Z node 6 :KQP_SESSION DEBUG: kqp_session_actor.cpp:2786: SessionId: ydb://session/3?node_id=6&id=OWMwMzFmODAtY2M2YmUwODItNjkxMzcwYjYtMzIwNzNkODE=, ActorId: [6:7554062245923887091:2321], ActorState: ReadyState, EndCleanup, isFinal: 1 2025-09-25T16:19:44.891294Z node 6 :KQP_SESSION DEBUG: kqp_session_actor.cpp:2798: SessionId: ydb://session/3?node_id=6&id=OWMwMzFmODAtY2M2YmUwODItNjkxMzcwYjYtMzIwNzNkODE=, ActorId: [6:7554062245923887091:2321], ActorState: unknown state, Cleanup temp tables: 0 2025-09-25T16:19:44.891315Z node 6 :KQP_SESSION DEBUG: kqp_session_actor.cpp:2889: SessionId: ydb://session/3?node_id=6&id=OWMwMzFmODAtY2M2YmUwODItNjkxMzcwYjYtMzIwNzNkODE=, ActorId: [6:7554062245923887091:2321], ActorState: unknown state, Session actor destroyed 2025-09-25T16:19:44.891380Z node 6 :HIVE WARN: tx__status.cpp:58: HIVE#72057594037968897 THive::TTxStatus(status=2 node=Connected) - killing node 7 2025-09-25T16:19:44.891524Z node 6 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Connected -> Disconnected 2025-09-25T16:19:44.891561Z node 6 :HIVE WARN: tx__status.cpp:58: HIVE#72057594037968897 THive::TTxStatus(status=2 node=Connected) - killing node 8 2025-09-25T16:19:44.891596Z node 6 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(8, (0,0,0,0)) VolatileState: Connected -> Disconnected ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/proxy_service/ut/unittest >> ScriptExecutionsTest::BackgroundChecksStartAfterRestart [GOOD] Test command err: 2025-09-25T16:19:32.246776Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7554062235202502175:2150];send_to=[0:7307199536658146131:7762515]; 2025-09-25T16:19:32.246793Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/endf/005047/r3tmp/tmpgWW0vg/pdisk_1.dat 2025-09-25T16:19:32.301262Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2025-09-25T16:19:32.301433Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1229: Notification cookie mismatch for subscription [1:7554062235202502063:2081] 1758817172245821 != 1758817172245824 2025-09-25T16:19:32.304547Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded TClient is connected to server localhost:25553 TServer::EnableGrpc on GrpcPort 8601, node 1 2025-09-25T16:19:32.325944Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-09-25T16:19:32.325960Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-09-25T16:19:32.325962Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-09-25T16:19:32.326008Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-09-25T16:19:32.354907Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-09-25T16:19:32.354936Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-09-25T16:19:32.355648Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2025-09-25T16:19:32.355900Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected waiting... 2025-09-25T16:19:32.361321Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-09-25T16:19:32.588281Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions 2025-09-25T16:19:32.588297Z node 1 :KQP_PROXY DEBUG: kqp_finalize_script_service.cpp:146: [ScriptExecutions] [TKqpFinalizeScriptService] Script execution table dc-1/.metadata/script_executions not found 2025-09-25T16:19:32.678536Z node 1 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:1519: Updated YQL logs priority to current level: 4 2025-09-25T16:19:32.679280Z node 1 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:436: Subscribed for config changes. 2025-09-25T16:19:32.679283Z node 1 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:443: Updated table service config. 2025-09-25T16:19:32.679288Z node 1 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:1519: Updated YQL logs priority to current level: 4 2025-09-25T16:19:32.679483Z node 1 :KQP_PROXY DEBUG: table_creator.cpp:148: Table script_executions updater. Describe result: PathErrorUnknown 2025-09-25T16:19:32.679486Z node 1 :KQP_PROXY NOTICE: table_creator.cpp:168: Table script_executions updater. Creating table 2025-09-25T16:19:32.679494Z node 1 :KQP_PROXY DEBUG: table_creator.cpp:101: Table script_executions updater. Full table path:/dc-1/.metadata/script_executions 2025-09-25T16:19:32.679518Z node 1 :KQP_PROXY DEBUG: table_creator.cpp:148: Table script_execution_leases updater. Describe result: PathErrorUnknown 2025-09-25T16:19:32.679520Z node 1 :KQP_PROXY NOTICE: table_creator.cpp:168: Table script_execution_leases updater. Creating table 2025-09-25T16:19:32.679525Z node 1 :KQP_PROXY DEBUG: table_creator.cpp:101: Table script_execution_leases updater. Full table path:/dc-1/.metadata/script_execution_leases 2025-09-25T16:19:32.679541Z node 1 :KQP_PROXY DEBUG: table_creator.cpp:148: Table result_sets updater. Describe result: PathErrorUnknown 2025-09-25T16:19:32.679545Z node 1 :KQP_PROXY NOTICE: table_creator.cpp:168: Table result_sets updater. Creating table 2025-09-25T16:19:32.679554Z node 1 :KQP_PROXY DEBUG: table_creator.cpp:101: Table result_sets updater. Full table path:/dc-1/.metadata/result_sets 2025-09-25T16:19:32.680541Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:1, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:19:32.680939Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:19:32.682012Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:19:32.683475Z node 1 :KQP_PROXY DEBUG: table_creator.cpp:191: Table result_sets updater. TEvProposeTransactionStatus: { Status: 53 TxId: 281474976715660 SchemeShardStatus: 1 SchemeShardTabletId: 72057594046644480 PathId: 3 } 2025-09-25T16:19:32.683484Z node 1 :KQP_PROXY DEBUG: table_creator.cpp:262: Table result_sets updater. Subscribe on create table tx: 281474976715660 2025-09-25T16:19:32.683504Z node 1 :KQP_PROXY DEBUG: table_creator.cpp:191: Table script_execution_leases updater. TEvProposeTransactionStatus: { Status: 53 TxId: 281474976715659 SchemeShardStatus: 1 SchemeShardTabletId: 72057594046644480 PathId: 4 } 2025-09-25T16:19:32.683508Z node 1 :KQP_PROXY DEBUG: table_creator.cpp:262: Table script_execution_leases updater. Subscribe on create table tx: 281474976715659 2025-09-25T16:19:32.683841Z node 1 :KQP_PROXY DEBUG: table_creator.cpp:191: Table script_executions updater. TEvProposeTransactionStatus: { Status: 53 TxId: 281474976715658 SchemeShardStatus: 1 SchemeShardTabletId: 72057594046644480 PathId: 5 } 2025-09-25T16:19:32.683844Z node 1 :KQP_PROXY DEBUG: table_creator.cpp:262: Table script_executions updater. Subscribe on create table tx: 281474976715658 2025-09-25T16:19:32.703842Z node 1 :KQP_PROXY DEBUG: table_creator.cpp:291: Table result_sets updater. Request: create. Transaction completed: 281474976715660. Doublechecking... 2025-09-25T16:19:32.710548Z node 1 :KQP_PROXY DEBUG: table_creator.cpp:291: Table script_executions updater. Request: create. Transaction completed: 281474976715658. Doublechecking... 2025-09-25T16:19:32.714501Z node 1 :KQP_PROXY DEBUG: table_creator.cpp:291: Table script_execution_leases updater. Request: create. Transaction completed: 281474976715659. Doublechecking... 2025-09-25T16:19:32.774620Z node 1 :KQP_PROXY DEBUG: table_creator.cpp:363: Table script_execution_leases updater. Column diff is empty, finishing 2025-09-25T16:19:32.786900Z node 1 :KQP_PROXY DEBUG: table_creator.cpp:363: Table script_executions updater. Column diff is empty, finishing 2025-09-25T16:19:32.794547Z node 1 :KQP_PROXY DEBUG: table_creator.cpp:363: Table result_sets updater. Column diff is empty, finishing 2025-09-25T16:19:32.794688Z node 1 :KQP_PROXY DEBUG: kqp_check_script_lease_actor.cpp:29: [ScriptExecutions] [TScriptExecutionLeaseCheckActor] Bootstrap 2025-09-25T16:19:32.794704Z node 1 :KQP_PROXY DEBUG: kqp_check_script_lease_actor.cpp:82: [ScriptExecutions] [TScriptExecutionLeaseCheckActor] Do RefreshNodesInfo (WaitRefreshNodes: 0), next refresh after 60.000000s 2025-09-25T16:19:32.794718Z node 1 :KQP_PROXY DEBUG: kqp_check_script_lease_actor.cpp:92: [ScriptExecutions] [TScriptExecutionLeaseCheckActor] Do ScheduleRefreshScriptExecutions (WaitRefreshScriptExecutions: 0), next refresh after 1.000000s 2025-09-25T16:19:32.794722Z node 1 :KQP_PROXY DEBUG: kqp_check_script_lease_actor.cpp:102: [ScriptExecutions] [TScriptExecutionLeaseCheckActor] Schedule lease check after 0.761908s 2025-09-25T16:19:32.794751Z node 1 :KQP_PROXY DEBUG: kqp_script_executions.cpp:543: [ScriptExecutions] [TCreateScriptExecutionActor] OwnerId: [1:7554062235202502662:2288] ActorId: [1:7554062235202502937:2456] Database: /dc-1 ExecutionId: d653499b-f5110b88-2224e09f-3cc4b3d6. Bootstrap. Start TCreateScriptOperationQuery [1:7554062235202502942:2458], RunScriptActorId: [1:7554062235202502940:2457] 2025-09-25T16:19:32.794774Z node 1 :KQP_PROXY DEBUG: query_actor.cpp:134: [TQueryBase] [TCreateScriptOperationQuery] OwnerId: [1:7554062235202502937:2456], ActorId: [1:7554062235202502942:2458], TraceId: ExecutionId: d653499b-f5110b88-2224e09f-3cc4b3d6, RequestDatabase: /dc-1, Bootstrap. Database: /dc-1, IsSystemUser: 0, run create session 2025-09-25T16:19:32.794814Z node 1 :KQP_PROXY DEBUG: kqp_check_script_lease_actor.cpp:68: [ScriptExecutions] [TScriptExecutionLeaseCheckActor] Handle discover tenant nodes result, number of nodes #1, new RefreshLeasePeriod: 1.000000s 2025-09-25T16:19:32.797220Z node 1 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:1323: Request has 18444985256536.754403s seconds to be completed 2025-09-25T16:19:32.797912Z node 1 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:1398: Created new session, sessionId: ydb://session/3?node_id=1&id=NTlhOWE2NjgtYzY1NzY3MmYtZTI4NzY5NDctMzk3MDA0Mzg=, workerId: [1:7554062235202502944:2321], database: /dc-1, longSession: 1, local sessions count: 1 2025-09-25T16:19:32.797941Z node 1 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:577: Received create session request, trace_id: 2025-09-25T16:19:32.798293Z node 1 :KQP_PROXY DEBUG: query_actor.cpp:200: [TQueryBase] [TCreateScriptOperationQue ... RunDataQuery with SessionId: ydb://session/3?node_id=3&id=ZjdiZmQwMTgtNjcwNjk0MzEtMzNhNjMzMzctMTk4NjFlY2M=, TxId: , text: -- TSaveScriptFinalStatusActor::OnRunQuery DECLARE $database AS Text; DECLARE $execution_id AS Text; SELECT operation_status, finalization_status, meta, customer_supplied_id, user_token, script_sinks, script_secret_names, retry_state, graph_compressed FROM `.metadata/script_executions` WHERE database = $database AND execution_id = $execution_id AND (expire_at > CurrentUtcTimestamp() OR expire_at IS NULL); SELECT lease_generation, lease_state FROM `.metadata/script_execution_leases` WHERE database = $database AND execution_id = $execution_id AND (expire_at > CurrentUtcTimestamp() OR expire_at IS NULL); 2025-09-25T16:19:45.170861Z node 3 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:700: Ctx: { TraceId: , Database: /dc-1, SessionId: ydb://session/3?node_id=3&id=ZjdiZmQwMTgtNjcwNjk0MzEtMzNhNjMzMzctMTk4NjFlY2M=, PoolId: , DatabaseId: }. TEvQueryRequest, set timer for: 300.000000s timeout: 0.000000s cancelAfter: 0.000000s. Send request to target, requestId: 41, targetId: [3:7554062291259423978:2521] 2025-09-25T16:19:45.170872Z node 3 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:1153: Scheduled timeout timer for requestId: 41 timeout: 300.000000s actor id: [3:7554062291259423980:2720] 2025-09-25T16:19:45.172677Z node 3 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:886: Forwarded response to sender actor, requestId: 41, sender: [3:7554062291259423979:2522], selfId: [3:7554062278374520686:2179], source: [3:7554062291259423978:2521] 2025-09-25T16:19:45.172766Z node 3 :KQP_PROXY DEBUG: query_actor.cpp:243: [TQueryBase] [TSaveScriptFinalStatusActor] OwnerId: [3:7554062291259423975:2518], ActorId: [3:7554062291259423976:2519], TraceId: ExecutionId: 6df057ab-59e56436-e49c1cf7-6b63105a, RequestDatabase: /dc-1, LeaseGeneration: 1, State: Get operation info, DataQuery #1 finished SUCCESS, Issues: , SessionId: ydb://session/3?node_id=3&id=ZjdiZmQwMTgtNjcwNjk0MzEtMzNhNjMzMzctMTk4NjFlY2M=, TxId: 01k60trvckdgwjbzr330phfp5r 2025-09-25T16:19:45.172869Z node 3 :KQP_PROXY DEBUG: kqp_script_executions.cpp:3986: [ScriptExecutions] [TQueryBase] [TSaveScriptFinalStatusActor] OwnerId: [3:7554062291259423975:2518], ActorId: [3:7554062291259423976:2519], TraceId: ExecutionId: 6df057ab-59e56436-e49c1cf7-6b63105a, RequestDatabase: /dc-1, LeaseGeneration: 1, State: Get operation info, Do finalization with status UNAVAILABLE, exec status: EXEC_STATUS_ABORTED, finalization status (applicate effect: 0): 1, issues: {
: Error: Lease expired }, retry deadline (wait retry: 0): 2025-09-25T16:19:45.172847Z, lease state: 1 2025-09-25T16:19:45.172938Z node 3 :KQP_PROXY DEBUG: query_actor.cpp:200: [TQueryBase] [TSaveScriptFinalStatusActor] OwnerId: [3:7554062291259423975:2518], ActorId: [3:7554062291259423976:2519], TraceId: ExecutionId: 6df057ab-59e56436-e49c1cf7-6b63105a, RequestDatabase: /dc-1, LeaseGeneration: 1, State: Update final status, RunDataQuery with SessionId: ydb://session/3?node_id=3&id=ZjdiZmQwMTgtNjcwNjk0MzEtMzNhNjMzMzctMTk4NjFlY2M=, TxId: 01k60trvckdgwjbzr330phfp5r, text: -- TSaveScriptFinalStatusActor::FinishScriptExecution DECLARE $database AS Text; DECLARE $execution_id AS Text; DECLARE $operation_status AS Int32; DECLARE $execution_status AS Int32; DECLARE $finalization_status AS Int32; DECLARE $issues AS JsonDocument; DECLARE $plan_compressed AS Optional; DECLARE $plan_compression_method AS Optional; DECLARE $stats AS JsonDocument; DECLARE $ast_compressed AS Optional; DECLARE $ast_compression_method AS Optional; DECLARE $operation_ttl AS Interval; DECLARE $customer_supplied_id AS Text; DECLARE $script_sinks AS Optional; DECLARE $script_secret_names AS Optional; DECLARE $applicate_script_external_effect_required AS Bool; DECLARE $retry_state AS JsonDocument; DECLARE $retry_deadline AS Timestamp; DECLARE $lease_state AS Int32; UPDATE `.metadata/script_executions` SET operation_status = $operation_status, execution_status = $execution_status, finalization_status = IF($applicate_script_external_effect_required, $finalization_status, NULL), issues = $issues, plan_compressed = $plan_compressed, plan_compression_method = $plan_compression_method, end_ts = CurrentUtcTimestamp(), stats = $stats, ast_compressed = $ast_compressed, ast_compression_method = $ast_compression_method, expire_at = IF($operation_ttl > CAST(0 AS Interval), CurrentUtcTimestamp() + $operation_ttl, NULL), customer_supplied_id = IF($applicate_script_external_effect_required, $customer_supplied_id, NULL), script_sinks = IF($applicate_script_external_effect_required, $script_sinks, NULL), script_secret_names = IF($applicate_script_external_effect_required, $script_secret_names, NULL), retry_state = $retry_state WHERE database = $database AND execution_id = $execution_id; DELETE FROM `.metadata/script_execution_leases` WHERE database = $database AND execution_id = $execution_id; 2025-09-25T16:19:45.173077Z node 3 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:700: Ctx: { TraceId: , Database: /dc-1, SessionId: ydb://session/3?node_id=3&id=ZjdiZmQwMTgtNjcwNjk0MzEtMzNhNjMzMzctMTk4NjFlY2M=, PoolId: , DatabaseId: }. TEvQueryRequest, set timer for: 300.000000s timeout: 0.000000s cancelAfter: 0.000000s. Send request to target, requestId: 42, targetId: [3:7554062291259423978:2521] 2025-09-25T16:19:45.173085Z node 3 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:1153: Scheduled timeout timer for requestId: 42 timeout: 300.000000s actor id: [3:7554062291259424001:2725] 2025-09-25T16:19:45.175960Z node 3 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:886: Forwarded response to sender actor, requestId: 42, sender: [3:7554062291259424000:2528], selfId: [3:7554062278374520686:2179], source: [3:7554062291259423978:2521] 2025-09-25T16:19:45.176095Z node 3 :KQP_PROXY DEBUG: query_actor.cpp:243: [TQueryBase] [TSaveScriptFinalStatusActor] OwnerId: [3:7554062291259423975:2518], ActorId: [3:7554062291259423976:2519], TraceId: ExecutionId: 6df057ab-59e56436-e49c1cf7-6b63105a, RequestDatabase: /dc-1, LeaseGeneration: 1, State: Update final status, DataQuery #2 finished SUCCESS, Issues: [ {
:20:21: Warning: Symbol $retry_deadline is not used, code: 4527 } {
:21:21: Warning: Symbol $lease_state is not used, code: 4527 } ], SessionId: ydb://session/3?node_id=3&id=ZjdiZmQwMTgtNjcwNjk0MzEtMzNhNjMzMzctMTk4NjFlY2M=, TxId: 2025-09-25T16:19:45.176119Z node 3 :KQP_PROXY DEBUG: query_actor.cpp:370: [TQueryBase] [TSaveScriptFinalStatusActor] OwnerId: [3:7554062291259423975:2518], ActorId: [3:7554062291259423976:2519], TraceId: ExecutionId: 6df057ab-59e56436-e49c1cf7-6b63105a, RequestDatabase: /dc-1, LeaseGeneration: 1, State: Update final status, Finish with SUCCESS, SessionId: ydb://session/3?node_id=3&id=ZjdiZmQwMTgtNjcwNjk0MzEtMzNhNjMzMzctMTk4NjFlY2M=, TxId: 2025-09-25T16:19:45.176126Z node 3 :KQP_PROXY DEBUG: kqp_script_executions.cpp:4060: [ScriptExecutions] [TQueryBase] [TSaveScriptFinalStatusActor] OwnerId: [3:7554062291259423975:2518], ActorId: [3:7554062291259423976:2519], TraceId: ExecutionId: 6df057ab-59e56436-e49c1cf7-6b63105a, RequestDatabase: /dc-1, LeaseGeneration: 1, State: Update final status, Finish script execution operation. Status: UNAVAILABLE. Issues: {
: Error: Lease expired } 2025-09-25T16:19:45.176208Z node 3 :KQP_PROXY DEBUG: query_actor.h:310: [TQueryRetryActor] [TSaveScriptFinalStatusActor] OwnerId: [3:7554062291259423974:2517], ActorId: [3:7554062291259423975:2518], TraceId: ExecutionId: 6df057ab-59e56436-e49c1cf7-6b63105a, RequestDatabase: /dc-1, LeaseGeneration: 1, Got response [3:7554062291259423976:2519] SUCCESS 2025-09-25T16:19:45.176229Z node 3 :KQP_PROXY DEBUG: kqp_script_executions.cpp:1416: [ScriptExecutions] [TCheckLeaseStatusActor] OwnerId: [3:7554062291259423904:2694] ActorId: [3:7554062291259423927:2702] Database: /dc-1 ExecutionId: 6df057ab-59e56436-e49c1cf7-6b63105a. Successfully finalized script execution operation, WaitingRetry: 0 2025-09-25T16:19:45.176239Z node 3 :KQP_PROXY DEBUG: kqp_script_executions.cpp:1753: [ScriptExecutions] [TCheckLeaseStatusActor] OwnerId: [3:7554062291259423904:2694] ActorId: [3:7554062291259423927:2702] Database: /dc-1 ExecutionId: 6df057ab-59e56436-e49c1cf7-6b63105a. Reply success 2025-09-25T16:19:45.176257Z node 3 :KQP_PROXY DEBUG: kqp_script_executions.cpp:4582: [ScriptExecutions] [TRefreshScriptExecutionLeasesActor] OwnerId: [3:7554062286964456464:2632] ActorId: [3:7554062291259423904:2694]. Lease check #0 [3:7554062291259423930:2704] successfully completed, OperationsToCheck: 0 2025-09-25T16:19:45.176260Z node 3 :KQP_PROXY DEBUG: kqp_script_executions.cpp:4594: [ScriptExecutions] [TRefreshScriptExecutionLeasesActor] OwnerId: [3:7554062286964456464:2632] ActorId: [3:7554062291259423904:2694]. Finish, success: 1, issues: 2025-09-25T16:19:45.176274Z node 3 :KQP_PROXY DEBUG: kqp_check_script_lease_actor.cpp:76: [ScriptExecutions] [TScriptExecutionLeaseCheckActor] Refresh successfully completed 2025-09-25T16:19:45.176573Z node 3 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:1212: Session closed, sessionId: ydb://session/3?node_id=3&id=ZjdiZmQwMTgtNjcwNjk0MzEtMzNhNjMzMzctMTk4NjFlY2M=, workerId: [3:7554062291259423978:2521], local sessions count: 1 2025-09-25T16:19:45.176928Z node 3 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:886: TraceId: "01k60trvc75e3rebrwa211cekh", Forwarded response to sender actor, requestId: 39, sender: [3:7554062291259423946:2507], selfId: [3:7554062278374520686:2179], source: [3:7554062282669489087:2433] 2025-09-25T16:19:45.177810Z node 3 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:700: Ctx: { TraceId: 01k60trvcs7yt5j1p6wkmr8c0x, Database: /dc-1, SessionId: ydb://session/3?node_id=3&id=ZGRlNDg5ZjgtYjVkZDViYzUtN2ZjYWVkMjktOGY1MjM3NTA=, PoolId: , DatabaseId: }. TEvQueryRequest, set timer for: 300.000000s timeout: 0.000000s cancelAfter: 0.000000s. Send request to target, requestId: 43, targetId: [3:7554062282669489087:2433] 2025-09-25T16:19:45.177818Z node 3 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:1153: Scheduled timeout timer for requestId: 43 timeout: 300.000000s actor id: [3:7554062291259424033:2733] 2025-09-25T16:19:45.202262Z node 3 :KQP_PROXY DEBUG: kqp_check_script_lease_actor.cpp:92: [ScriptExecutions] [TScriptExecutionLeaseCheckActor] Do ScheduleRefreshScriptExecutions (WaitRefreshScriptExecutions: 0), next refresh after 1.000000s 2025-09-25T16:19:45.202285Z node 3 :KQP_PROXY DEBUG: kqp_check_script_lease_actor.cpp:102: [ScriptExecutions] [TScriptExecutionLeaseCheckActor] Schedule lease check after 0.961329s 2025-09-25T16:19:45.246170Z node 3 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:886: TraceId: "01k60trvcs7yt5j1p6wkmr8c0x", Forwarded response to sender actor, requestId: 43, sender: [3:7554062291259424032:2533], selfId: [3:7554062278374520686:2179], source: [3:7554062282669489087:2433] 2025-09-25T16:19:45.247192Z node 3 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:1212: Session closed, sessionId: ydb://session/3?node_id=3&id=ZGRlNDg5ZjgtYjVkZDViYzUtN2ZjYWVkMjktOGY1MjM3NTA=, workerId: [3:7554062282669489087:2433], local sessions count: 0 >> TRestoreTests::ZeroLengthEncryptedFileTreatedAsCorrupted [GOOD] >> TRestoreWithRebootsTests::CancelShouldSucceed[Raw] >> TRestoreTests::ExportImportOnSupportedDatatypesWithCommonDestPrefix [GOOD] >> TRestoreTests::ExportImportPg >> TRestoreTests::ExportImportOnSupportedDatatypes [GOOD] >> TRestoreTests::ExportImportOnSupportedDatatypesEncrypted >> TRestoreWithRebootsTests::ShouldSucceedOnMultiShardTableAndLimitedResources[Raw] >> TRestoreTests::ShouldExpandBuffer[Raw] >> TRestoreTests::ShouldRestoreDefaultValuesFromLiteral [GOOD] >> TRestoreTests::ShouldHandleOverloadedShard |82.2%| [TA] {RESULT} $(B)/ydb/core/client/server/ut/test-results/unittest/{meta.json ... results_accumulator.log} |82.2%| [TA] $(B)/ydb/core/tx/schemeshard/ut_serverless_reboots/test-results/unittest/{meta.json ... results_accumulator.log} >> TRestoreTests::CancelUponProposeResultShouldSucceed[Raw] >> TRestoreTests::ExportImportPg [GOOD] >> TRestoreTests::ExportImportUuid >> TRestoreTests::ShouldExpandBuffer[Raw] [GOOD] >> TRestoreTests::ShouldExpandBuffer[Zstd] >> TImportTests::ShouldNotWriteBillRecordOnCommonDb >> TRestoreTests::ExportImportWithDataChecksumCorruption[Raw] >> TRestoreTests::ExportImportOnSupportedDatatypesEncrypted [GOOD] >> TRestoreTests::ExportImportOnSupportedDatatypesEncryptedNoData >> TImportTests::CancelUponGettingSchemeShouldSucceed >> TRestoreTests::CancelUponProposeResultShouldSucceed[Raw] [GOOD] >> TRestoreTests::CancelHungOperationShouldSucceed[Raw] >> TImportTests::CancelUponGettingSchemeShouldSucceed [GOOD] >> TImportTests::CancelUponCreatingTableShouldSucceed >> TRestoreTests::ShouldExpandBuffer[Zstd] [GOOD] >> TRestoreTests::ShouldCountWrittenBytesAndRows[Zstd] >> TImportTests::ShouldNotWriteBillRecordOnCommonDb [GOOD] >> TImportTests::ShouldRestoreAttributes >> TRestoreTests::ExportImportUuid [GOOD] >> TImportTests::ShouldFailOnInvalidSchema >> TRestoreTests::CancelHungOperationShouldSucceed[Raw] [GOOD] >> TImportTests::ShouldRestoreTtlSettingsInDateTypeColumnMode >> TRestoreTests::ShouldCountWrittenBytesAndRows[Zstd] [GOOD] >> TRestoreTests::CancelHungOperationShouldSucceed[Zstd] >> TRestoreTests::ExportImportWithDataChecksumCorruption[Raw] [GOOD] >> TImportTests::CancelUponCreatingTableShouldSucceed [GOOD] >> TImportTests::CancelUponTransferringShouldSucceed >> TRestoreTests::ExportImportWithDataChecksumCorruption[Zstd] >> TAsyncIndexTests::DropTableWithInflightChanges[PipeResets] [GOOD] >> TRestoreTests::ExportImportOnSupportedDatatypesEncryptedNoData [GOOD] >> TRestoreTests::ShouldHandleOverloadedShard [GOOD] >> TTopicReaderTests::TestRun_Read_Less_Messages_Than_Sent [GOOD] >> TTopicWriterTests::TestEnterMessage_1KiB_Newline_Delimited_With_Two_Delimiters_In_A_Row [GOOD] >> TRestoreTests::CancelHungOperationShouldSucceed[Zstd] [GOOD] >> TImportTests::ShouldRestoreAttributes [GOOD] >> TImportTests::ShouldRestoreAnyAzReadReplicas >> TImportTests::ShouldFailOnInvalidSchema [GOOD] >> TImportTests::ShouldFailOnFileWithoutNewLines >> TImportTests::CancelUponTransferringShouldSucceed [GOOD] >> TImportTests::ShouldRestoreTtlSettingsInDateTypeColumnMode [GOOD] >> TImportTests::ShouldRestoreTtlSettingsInValueSinceUnixEpochMode ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_restore/unittest >> TRestoreTests::ExportImportUuid [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2144] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2144] Leader for TabletID 72057594046678944 is [1:130:2155] sender: [1:131:2058] recipient: [1:113:2144] 2025-09-25T16:19:45.542822Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7911: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-09-25T16:19:45.542853Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7939: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-09-25T16:19:45.542859Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7825: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-09-25T16:19:45.542865Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7841: OperationsProcessing config: using default configuration 2025-09-25T16:19:45.542871Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7847: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-09-25T16:19:45.542876Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7847: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-09-25T16:19:45.542895Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7971: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-09-25T16:19:45.542910Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-09-25T16:19:45.543028Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8042: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-09-25T16:19:45.543092Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2025-09-25T16:19:45.554531Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7729: Cannot subscribe to console configs 2025-09-25T16:19:45.554549Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded 2025-09-25T16:19:45.558723Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2025-09-25T16:19:45.558872Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2025-09-25T16:19:45.558929Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-09-25T16:19:45.561319Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2025-09-25T16:19:45.561386Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2025-09-25T16:19:45.561503Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1382: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-09-25T16:19:45.561574Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-09-25T16:19:45.562198Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:158: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-09-25T16:19:45.562253Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2025-09-25T16:19:45.562548Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-09-25T16:19:45.562559Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-09-25T16:19:45.562580Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-09-25T16:19:45.562589Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-09-25T16:19:45.562595Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:205: TTxServerlessStorageBilling.Complete 2025-09-25T16:19:45.562629Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7086: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-09-25T16:19:45.563980Z node 1 :HIVE INFO: tablet_helpers.cpp:1126: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:130:2155] sender: [1:245:2058] recipient: [1:15:2062] 2025-09-25T16:19:45.587560Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-09-25T16:19:45.587627Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-09-25T16:19:45.587691Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:599: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-09-25T16:19:45.587700Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5528: CreateTx for txid 1:0 type: TxAlterSubDomain target path: [OwnerId: 72057594046678944, LocalPathId: 1] source path: 2025-09-25T16:19:45.587846Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-09-25T16:19:45.587862Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2025-09-25T16:19:45.588462Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-09-25T16:19:45.588509Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-09-25T16:19:45.588563Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-09-25T16:19:45.588573Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:315: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-09-25T16:19:45.588579Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:369: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-09-25T16:19:45.588584Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 1:0 2 -> 3 2025-09-25T16:19:45.588977Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-09-25T16:19:45.588988Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-09-25T16:19:45.588997Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 1:0 3 -> 128 2025-09-25T16:19:45.589312Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-09-25T16:19:45.589321Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-09-25T16:19:45.589327Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-09-25T16:19:45.589335Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1723: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-09-25T16:19:45.590097Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1792: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-09-25T16:19:45.590517Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:663: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-09-25T16:19:45.590564Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1824: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-09-25T16:19:45.590770Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:685: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-09-25T16:19:45.590795Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:689: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 137 RawX2: 4294969455 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-09-25T16:19:45.590802Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:303: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-09-25T16:19:45.590884Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 1:0 128 -> 240 2025-09-25T16:19:45.590892Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:362: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-09-25T16:19:45.590922Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:599: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-09-25T16:19:45.590935Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:412: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-09-25T16:19:45.591400Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-09-25T16:19:45.591409Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme ... 63 at step: 5000009 FAKE_COORDINATOR: advance: minStep5000009 State->FrontStep: 5000008 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710763 at step: 5000009 FAKE_COORDINATOR: Send Plan to tablet 72075186233409548 for txId: 281474976710763 at step: 5000009 2025-09-25T16:19:47.264702Z node 3 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:685: TTxOperationPlanStep Execute, stepId: 5000009, transactions count in step: 1, at schemeshard: 72057594046678944 2025-09-25T16:19:47.264731Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:689: TTxOperationPlanStep Execute, message: Transactions { TxId: 281474976710763 Coordinator: 72057594046316545 AckTo { RawX1: 139 RawX2: 12884904049 } } Step: 5000009 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-09-25T16:19:47.264741Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_backup_restore_common.h:412: TRestore TPropose, opId: 281474976710763:0 HandleReply TEvOperationPlan, stepId: 5000009, at schemeshard: 72057594046678944 2025-09-25T16:19:47.264759Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 281474976710763:0 128 -> 129 2025-09-25T16:19:47.264787Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:599: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 5] was 3 FAKE_COORDINATOR: advance: minStep5000009 State->FrontStep: 5000009 2025-09-25T16:19:47.269425Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-09-25T16:19:47.269439Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 281474976710763, path id: [OwnerId: 72057594046678944, LocalPathId: 5] 2025-09-25T16:19:47.269514Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-09-25T16:19:47.269521Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [3:210:2211], at schemeshard: 72057594046678944, txId: 281474976710763, path id: 5 2025-09-25T16:19:47.269625Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 281474976710763:0, at schemeshard: 72057594046678944 2025-09-25T16:19:47.269639Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_backup_restore_common.h:258: TRestore TProposedWaitParts, opId: 281474976710763:0 ProgressState, at schemeshard: 72057594046678944 REQUEST: HEAD /Backup1/data_00.csv HTTP/1.1 HEADERS: Host: localhost:14579 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: 9B02B76F-EB06-427D-B7C3-034392EC0642 amz-sdk-request: attempt=1 content-type: application/xml user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-143-generic x86_64 Clang/20.1.8 x-amz-api-version: 2006-03-01 S3_MOCK::HttpServeRead: /Backup1/data_00.csv / 39 2025-09-25T16:19:47.269746Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6249: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 5 Version: 3 PathOwnerId: 72057594046678944, cookie: 281474976710763 2025-09-25T16:19:47.269759Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 5 Version: 3 PathOwnerId: 72057594046678944, cookie: 281474976710763 2025-09-25T16:19:47.269764Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:108: Operation in-flight, at schemeshard: 72057594046678944, txId: 281474976710763 2025-09-25T16:19:47.269770Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 281474976710763, pathId: [OwnerId: 72057594046678944, LocalPathId: 5], version: 3 2025-09-25T16:19:47.269777Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:610: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 5] was 4 2025-09-25T16:19:47.269797Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 281474976710763, ready parts: 0/1, is published: true REQUEST: HEAD /Backup1/data_00.csv.sha256 HTTP/1.1 HEADERS: Host: localhost:14579 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: D5417B71-61B5-41BD-A85A-16706E1D261B amz-sdk-request: attempt=1 content-type: application/xml user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-143-generic x86_64 Clang/20.1.8 x-amz-api-version: 2006-03-01 S3_MOCK::HttpServeRead: /Backup1/data_00.csv.sha256 / 76 2025-09-25T16:19:47.270694Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 281474976710763 REQUEST: GET /Backup1/data_00.csv.sha256 HTTP/1.1 HEADERS: Host: localhost:14579 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: 0A159429-F4CF-43C0-BA25-AC473767ECCD amz-sdk-request: attempt=1 content-type: application/xml range: bytes=0-75 user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-143-generic x86_64 Clang/20.1.8 x-amz-api-version: 2006-03-01 S3_MOCK::HttpServeRead: /Backup1/data_00.csv.sha256 / 76 FAKE_COORDINATOR: Erasing txId 281474976710763 REQUEST: GET /Backup1/data_00.csv HTTP/1.1 HEADERS: Host: localhost:14579 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: 3F9BA251-2C8E-4E47-ABB8-A3DCA4E0ABB6 amz-sdk-request: attempt=1 content-type: application/xml range: bytes=0-38 user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-143-generic x86_64 Clang/20.1.8 x-amz-api-version: 2006-03-01 S3_MOCK::HttpServeRead: /Backup1/data_00.csv / 39 2025-09-25T16:19:47.306576Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5901: Handle TEvSchemaChanged, tabletId: 72057594046678944, at schemeshard: 72057594046678944, message: Source { RawX1: 759 RawX2: 12884904589 } Origin: 72075186233409548 State: 2 TxId: 281474976710763 Step: 0 Generation: 2 OpResult { Success: true Explain: "" BytesProcessed: 20 RowsProcessed: 1 } 2025-09-25T16:19:47.306599Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1837: TOperation FindRelatedPartByTabletId, TxId: 281474976710763, tablet: 72075186233409548, partId: 0 2025-09-25T16:19:47.306618Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:628: TTxOperationReply execute, operationId: 281474976710763:0, at schemeshard: 72057594046678944, message: Source { RawX1: 759 RawX2: 12884904589 } Origin: 72075186233409548 State: 2 TxId: 281474976710763 Step: 0 Generation: 2 OpResult { Success: true Explain: "" BytesProcessed: 20 RowsProcessed: 1 } 2025-09-25T16:19:47.306628Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_backup_restore_common.h:233: TRestore TProposedWaitParts, opId: 281474976710763:0 HandleReply TEvSchemaChanged at tablet# 72057594046678944 message# Source { RawX1: 759 RawX2: 12884904589 } Origin: 72075186233409548 State: 2 TxId: 281474976710763 Step: 0 Generation: 2 OpResult { Success: true Explain: "" BytesProcessed: 20 RowsProcessed: 1 } 2025-09-25T16:19:47.306641Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:673: CollectSchemaChanged accept TEvDataShard::TEvSchemaChanged, operationId: 281474976710763:0, shardIdx: 72057594046678944:3, shard: 72075186233409548, left await: 0, txState.State: ProposedWaitParts, txState.ReadyForNotifications: 1, at schemeshard: 72057594046678944 2025-09-25T16:19:47.306644Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:710: all shard schema changes has been received, operationId: 281474976710763:0, at schemeshard: 72057594046678944 2025-09-25T16:19:47.306659Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:722: send schema changes ack message, operation: 281474976710763:0, datashard: 72075186233409548, at schemeshard: 72057594046678944 2025-09-25T16:19:47.306664Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 281474976710763:0 129 -> 240 2025-09-25T16:19:47.306696Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_backup_restore_common.h:116: Unable to make a bill: kind# TRestore, opId# 281474976710763:0, reason# domain is not a serverless db, domain# /MyRoot, domainPathId# [OwnerId: 72057594046678944, LocalPathId: 1], IsDomainSchemeShard: 1, ParentDomainId: [OwnerId: 72057594046678944, LocalPathId: 1], ResourcesDomainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-09-25T16:19:47.307159Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 281474976710763:0, at schemeshard: 72057594046678944 2025-09-25T16:19:47.307229Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 281474976710763:0, at schemeshard: 72057594046678944 2025-09-25T16:19:47.307236Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:497: [72057594046678944] TDone opId# 281474976710763:0 ProgressState 2025-09-25T16:19:47.307247Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:926: Part operation is done id#281474976710763:0 progress is 1/1 2025-09-25T16:19:47.307250Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 281474976710763 ready parts: 1/1 2025-09-25T16:19:47.307254Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:926: Part operation is done id#281474976710763:0 progress is 1/1 2025-09-25T16:19:47.307256Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 281474976710763 ready parts: 1/1 2025-09-25T16:19:47.307269Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 281474976710763, ready parts: 1/1, is published: true 2025-09-25T16:19:47.307284Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1702: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [3:127:2152] message: TxId: 281474976710763 2025-09-25T16:19:47.307288Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 281474976710763 ready parts: 1/1 2025-09-25T16:19:47.307292Z node 3 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:993: Operation and all the parts is done, operation id: 281474976710763:0 2025-09-25T16:19:47.307295Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5552: RemoveTx for txid 281474976710763:0 2025-09-25T16:19:47.307312Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:610: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 5] was 3 2025-09-25T16:19:47.307700Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7171: Handle: TEvNotifyTxCompletionResult: txId# 281474976710763 2025-09-25T16:19:47.307714Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:7173: Message: TxId: 281474976710763 2025-09-25T16:19:47.308081Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 103: got EvNotifyTxCompletionResult 2025-09-25T16:19:47.308091Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 103: satisfy waiter [3:702:2653] TestWaitNotification: OK eventTxId 103 >> TImportTests::ShouldFailOnFileWithoutNewLines [GOOD] >> TImportTests::ShouldFailOnInvalidPath ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_restore/unittest >> TRestoreTests::ShouldCountWrittenBytesAndRows[Zstd] [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2144] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2144] Leader for TabletID 72057594046678944 is [1:130:2155] sender: [1:131:2058] recipient: [1:113:2144] 2025-09-25T16:19:46.520324Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7911: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-09-25T16:19:46.520348Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7939: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-09-25T16:19:46.520354Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7825: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-09-25T16:19:46.520360Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7841: OperationsProcessing config: using default configuration 2025-09-25T16:19:46.520367Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7847: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-09-25T16:19:46.520371Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7847: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-09-25T16:19:46.520381Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7971: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-09-25T16:19:46.520396Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-09-25T16:19:46.520523Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8042: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-09-25T16:19:46.520578Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2025-09-25T16:19:46.537526Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7729: Cannot subscribe to console configs 2025-09-25T16:19:46.537547Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded 2025-09-25T16:19:46.541895Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2025-09-25T16:19:46.542008Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2025-09-25T16:19:46.542047Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-09-25T16:19:46.543851Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2025-09-25T16:19:46.543919Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2025-09-25T16:19:46.544033Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1382: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-09-25T16:19:46.544087Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-09-25T16:19:46.544559Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:158: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-09-25T16:19:46.544607Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2025-09-25T16:19:46.544900Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-09-25T16:19:46.544914Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-09-25T16:19:46.544936Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-09-25T16:19:46.544945Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-09-25T16:19:46.544952Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:205: TTxServerlessStorageBilling.Complete 2025-09-25T16:19:46.544985Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7086: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-09-25T16:19:46.546294Z node 1 :HIVE INFO: tablet_helpers.cpp:1126: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:130:2155] sender: [1:245:2058] recipient: [1:15:2062] 2025-09-25T16:19:46.569459Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-09-25T16:19:46.569521Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-09-25T16:19:46.569594Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:599: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-09-25T16:19:46.569604Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5528: CreateTx for txid 1:0 type: TxAlterSubDomain target path: [OwnerId: 72057594046678944, LocalPathId: 1] source path: 2025-09-25T16:19:46.569704Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-09-25T16:19:46.569722Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2025-09-25T16:19:46.570345Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-09-25T16:19:46.570386Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-09-25T16:19:46.570435Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-09-25T16:19:46.570445Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:315: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-09-25T16:19:46.570451Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:369: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-09-25T16:19:46.570456Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 1:0 2 -> 3 2025-09-25T16:19:46.570953Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-09-25T16:19:46.570967Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-09-25T16:19:46.570976Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 1:0 3 -> 128 2025-09-25T16:19:46.571396Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-09-25T16:19:46.571409Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-09-25T16:19:46.571415Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-09-25T16:19:46.571421Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1723: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-09-25T16:19:46.572161Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1792: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-09-25T16:19:46.572619Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:663: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-09-25T16:19:46.572670Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1824: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-09-25T16:19:46.572888Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:685: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-09-25T16:19:46.572917Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:689: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 137 RawX2: 4294969455 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-09-25T16:19:46.572925Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:303: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-09-25T16:19:46.573014Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 1:0 128 -> 240 2025-09-25T16:19:46.573024Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:362: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-09-25T16:19:46.573050Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:599: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-09-25T16:19:46.573062Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:412: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-09-25T16:19:46.573517Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-09-25T16:19:46.573527Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme ... hemeshard: 72057594046678944 2025-09-25T16:19:47.621442Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 102, path id: [OwnerId: 72057594046678944, LocalPathId: 2] 2025-09-25T16:19:47.621511Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-09-25T16:19:47.621517Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [3:210:2211], at schemeshard: 72057594046678944, txId: 102, path id: 2 2025-09-25T16:19:47.621606Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 102:0, at schemeshard: 72057594046678944 2025-09-25T16:19:47.621615Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_backup_restore_common.h:258: TRestore TProposedWaitParts, opId: 102:0 ProgressState, at schemeshard: 72057594046678944 REQUEST: HEAD /data_00.csv HTTP/1.1 HEADERS: Host: localhost:4368 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: C2B4FDE1-7B1E-4E7A-83BD-5F48314DDAF5 amz-sdk-request: attempt=1 content-type: application/xml user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-143-generic x86_64 Clang/20.1.8 x-amz-api-version: 2006-03-01 2025-09-25T16:19:47.621730Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6249: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 3 PathOwnerId: 72057594046678944, cookie: 102 2025-09-25T16:19:47.621744Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 3 PathOwnerId: 72057594046678944, cookie: 102 2025-09-25T16:19:47.621749Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:108: Operation in-flight, at schemeshard: 72057594046678944, txId: 102 2025-09-25T16:19:47.621754Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 102, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 3 2025-09-25T16:19:47.621761Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:610: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 4 2025-09-25T16:19:47.621776Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 102, ready parts: 0/1, is published: true 2025-09-25T16:19:47.621864Z node 3 :DATASHARD_RESTORE DEBUG: import_s3.cpp:527: [Import] [s3:102] Handle NKikimr::NWrappers::NExternalStorage::TEvHeadObjectResponse { Key: null Result: No response body. } 2025-09-25T16:19:47.621875Z node 3 :DATASHARD_RESTORE DEBUG: import_s3.cpp:506: [Import] [s3:102] HeadObject: key# /data_00.csv.zst REQUEST: HEAD /data_00.csv.zst HTTP/1.1 HEADERS: Host: localhost:4368 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: 464468EE-8353-461F-AF83-96B5B3F7C181 amz-sdk-request: attempt=1 content-type: application/xml user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-143-generic x86_64 Clang/20.1.8 x-amz-api-version: 2006-03-01 S3_MOCK::HttpServeRead: /data_00.csv.zst / 37 2025-09-25T16:19:47.622491Z node 3 :DATASHARD_RESTORE DEBUG: import_s3.cpp:527: [Import] [s3:102] Handle NKikimr::NWrappers::NExternalStorage::TEvHeadObjectResponse { Key: null Result: HeadObjectResult { ETag: fe451e85ab0310efdda31e730583289f ContentLength: 37 } } 2025-09-25T16:19:47.622564Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 2025-09-25T16:19:47.643445Z node 3 :DATASHARD_RESTORE DEBUG: import_s3.cpp:606: [Import] [s3:102] Handle NKikimr::TEvDataShard::TEvS3DownloadInfo { Info: { DataETag: (empty maybe) ProcessedBytes: 0 WrittenBytes: 0 WrittenRows: 0 ChecksumState: DownloadState: } } FAKE_COORDINATOR: Erasing txId 102 2025-09-25T16:19:47.664389Z node 3 :DATASHARD_RESTORE DEBUG: import_s3.cpp:606: [Import] [s3:102] Handle NKikimr::TEvDataShard::TEvS3DownloadInfo { Info: { DataETag: fe451e85ab0310efdda31e730583289f ProcessedBytes: 0 WrittenBytes: 0 WrittenRows: 0 ChecksumState: DownloadState: } } 2025-09-25T16:19:47.664410Z node 3 :DATASHARD_RESTORE NOTICE: import_s3.cpp:621: [Import] [s3:102] Process download info at 'DownloadInfo': info# { DataETag: fe451e85ab0310efdda31e730583289f ProcessedBytes: 0 WrittenBytes: 0 WrittenRows: 0 ChecksumState: DownloadState: } 2025-09-25T16:19:47.664430Z node 3 :DATASHARD_RESTORE DEBUG: import_s3.cpp:517: [Import] [s3:102] GetObject: key# /data_00.csv.zst, range# 0-36 REQUEST: GET /data_00.csv.zst HTTP/1.1 HEADERS: Host: localhost:4368 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: 07B5C402-C320-4B9D-8C21-E2D9DAABA7E4 amz-sdk-request: attempt=1 content-type: application/xml range: bytes=0-36 user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-143-generic x86_64 Clang/20.1.8 x-amz-api-version: 2006-03-01 S3_MOCK::HttpServeRead: /data_00.csv.zst / 37 2025-09-25T16:19:47.665240Z node 3 :DATASHARD_RESTORE DEBUG: import_s3.cpp:656: [Import] [s3:102] Handle NKikimr::NWrappers::NExternalStorage::TEvGetObjectResponse { Key: null Result: fe451e85ab0310efdda31e730583289f Body: 37b } 2025-09-25T16:19:47.665253Z node 3 :DATASHARD_RESTORE TRACE: import_s3.cpp:673: [Import] [s3:102] Content size: processed-bytes# 0, content-length# 37, body-size# 37 2025-09-25T16:19:47.665290Z node 3 :DATASHARD_RESTORE INFO: import_s3.cpp:806: [Import] [s3:102] Upload rows: count# 2, size# 60 2025-09-25T16:19:47.665787Z node 3 :DATASHARD_RESTORE DEBUG: import_s3.cpp:814: [Import] [s3:102] Handle NKikimr::TEvDataShard::TEvS3UploadRowsResponse { Record: TabletID: 72075186233409546 Status: 0 Info: { DataETag: fe451e85ab0310efdda31e730583289f ProcessedBytes: 37 WrittenBytes: 16 WrittenRows: 2 ChecksumState: DownloadState: } } 2025-09-25T16:19:47.665802Z node 3 :DATASHARD_RESTORE NOTICE: import_s3.cpp:621: [Import] [s3:102] Process download info at 'UploadResponse': info# { DataETag: fe451e85ab0310efdda31e730583289f ProcessedBytes: 37 WrittenBytes: 16 WrittenRows: 2 ChecksumState: DownloadState: } 2025-09-25T16:19:47.665809Z node 3 :DATASHARD_RESTORE NOTICE: import_s3.cpp:962: [Import] [s3:102] Finish: success# 1, error# , writtenBytes# 16, writtenRows# 2 2025-09-25T16:19:47.677847Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5901: Handle TEvSchemaChanged, tabletId: 72057594046678944, at schemeshard: 72057594046678944, message: Source { RawX1: 313 RawX2: 12884904187 } Origin: 72075186233409546 State: 2 TxId: 102 Step: 0 Generation: 2 OpResult { Success: true Explain: "" BytesProcessed: 16 RowsProcessed: 2 } 2025-09-25T16:19:47.677868Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1837: TOperation FindRelatedPartByTabletId, TxId: 102, tablet: 72075186233409546, partId: 0 2025-09-25T16:19:47.677892Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:628: TTxOperationReply execute, operationId: 102:0, at schemeshard: 72057594046678944, message: Source { RawX1: 313 RawX2: 12884904187 } Origin: 72075186233409546 State: 2 TxId: 102 Step: 0 Generation: 2 OpResult { Success: true Explain: "" BytesProcessed: 16 RowsProcessed: 2 } 2025-09-25T16:19:47.677907Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_backup_restore_common.h:233: TRestore TProposedWaitParts, opId: 102:0 HandleReply TEvSchemaChanged at tablet# 72057594046678944 message# Source { RawX1: 313 RawX2: 12884904187 } Origin: 72075186233409546 State: 2 TxId: 102 Step: 0 Generation: 2 OpResult { Success: true Explain: "" BytesProcessed: 16 RowsProcessed: 2 } 2025-09-25T16:19:47.677922Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:673: CollectSchemaChanged accept TEvDataShard::TEvSchemaChanged, operationId: 102:0, shardIdx: 72057594046678944:1, shard: 72075186233409546, left await: 0, txState.State: ProposedWaitParts, txState.ReadyForNotifications: 1, at schemeshard: 72057594046678944 2025-09-25T16:19:47.677929Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:710: all shard schema changes has been received, operationId: 102:0, at schemeshard: 72057594046678944 2025-09-25T16:19:47.677935Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:722: send schema changes ack message, operation: 102:0, datashard: 72075186233409546, at schemeshard: 72057594046678944 2025-09-25T16:19:47.677941Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 102:0 129 -> 240 2025-09-25T16:19:47.677980Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_backup_restore_common.h:116: Unable to make a bill: kind# TRestore, opId# 102:0, reason# domain is not a serverless db, domain# /MyRoot, domainPathId# [OwnerId: 72057594046678944, LocalPathId: 1], IsDomainSchemeShard: 1, ParentDomainId: [OwnerId: 72057594046678944, LocalPathId: 1], ResourcesDomainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-09-25T16:19:47.678451Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 102:0, at schemeshard: 72057594046678944 2025-09-25T16:19:47.678495Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 102:0, at schemeshard: 72057594046678944 2025-09-25T16:19:47.678501Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:497: [72057594046678944] TDone opId# 102:0 ProgressState 2025-09-25T16:19:47.678517Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:926: Part operation is done id#102:0 progress is 1/1 2025-09-25T16:19:47.678521Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2025-09-25T16:19:47.678525Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:926: Part operation is done id#102:0 progress is 1/1 2025-09-25T16:19:47.678527Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2025-09-25T16:19:47.678530Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 102, ready parts: 1/1, is published: true 2025-09-25T16:19:47.678546Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1702: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [3:339:2317] message: TxId: 102 2025-09-25T16:19:47.678553Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2025-09-25T16:19:47.678559Z node 3 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:993: Operation and all the parts is done, operation id: 102:0 2025-09-25T16:19:47.678565Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5552: RemoveTx for txid 102:0 2025-09-25T16:19:47.678592Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:610: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 3 2025-09-25T16:19:47.679051Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 102: got EvNotifyTxCompletionResult 2025-09-25T16:19:47.679065Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 102: satisfy waiter [3:399:2369] TestWaitNotification: OK eventTxId 102 >> TImportTests::ShouldRestoreAnyAzReadReplicas [GOOD] >> TImportTests::ShouldFailOnInvalidPath [GOOD] >> TRestoreTests::ExportImportWithDataChecksumCorruption[Zstd] [GOOD] >> TRestoreTests::ExportImportWithDataChecksumAbsence[Zstd] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_index/unittest >> TAsyncIndexTests::DropTableWithInflightChanges[PipeResets] [GOOD] Test command err: =========== RUN: Trace =========== Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2144] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2144] Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:120:2058] recipient: [1:114:2145] IGNORE Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:120:2058] recipient: [1:114:2145] Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:121:2058] recipient: [1:116:2146] IGNORE Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:121:2058] recipient: [1:116:2146] Leader for TabletID 72057594046678944 is [1:128:2153] sender: [1:131:2058] recipient: [1:113:2144] Leader for TabletID 72057594046447617 is [1:134:2158] sender: [1:136:2058] recipient: [1:114:2145] Leader for TabletID 72057594046316545 is [1:139:2161] sender: [1:141:2058] recipient: [1:116:2146] 2025-09-25T16:19:40.803169Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7911: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-09-25T16:19:40.803190Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7939: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-09-25T16:19:40.803195Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7825: StatsBatching config: StatsBatchTimeout# 0.000000s, StatsMaxBatchSize# 0, StatsMaxExecuteTime# 0.010000s 2025-09-25T16:19:40.803199Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7841: OperationsProcessing config: using default configuration 2025-09-25T16:19:40.803204Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7847: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-09-25T16:19:40.803206Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7847: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-09-25T16:19:40.803213Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7971: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-09-25T16:19:40.803239Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-09-25T16:19:40.803329Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8042: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-09-25T16:19:40.803374Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2025-09-25T16:19:40.820005Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:8074: Got new config: QueryServiceConfig { AllExternalDataSourcesAreAvailable: true } 2025-09-25T16:19:40.820036Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded 2025-09-25T16:19:40.820136Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8042: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# Leader for TabletID 72057594046447617 is [1:134:2158] sender: [1:179:2058] recipient: [1:15:2062] 2025-09-25T16:19:40.823769Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2025-09-25T16:19:40.823842Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2025-09-25T16:19:40.823868Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-09-25T16:19:40.825055Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2025-09-25T16:19:40.825122Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2025-09-25T16:19:40.825203Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1382: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-09-25T16:19:40.825383Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-09-25T16:19:40.826425Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:158: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-09-25T16:19:40.826460Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2025-09-25T16:19:40.826646Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-09-25T16:19:40.826657Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-09-25T16:19:40.826691Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-09-25T16:19:40.826699Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-09-25T16:19:40.826706Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:205: TTxServerlessStorageBilling.Complete 2025-09-25T16:19:40.826740Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7086: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:221:2058] recipient: [1:219:2219] IGNORE Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:221:2058] recipient: [1:219:2219] Leader for TabletID 72057594037968897 is [1:225:2223] sender: [1:226:2058] recipient: [1:219:2219] 2025-09-25T16:19:40.827913Z node 1 :HIVE INFO: tablet_helpers.cpp:1126: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:128:2153] sender: [1:246:2058] recipient: [1:15:2062] 2025-09-25T16:19:40.847271Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-09-25T16:19:40.847335Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-09-25T16:19:40.847383Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:599: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-09-25T16:19:40.847389Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5528: CreateTx for txid 1:0 type: TxAlterSubDomain target path: [OwnerId: 72057594046678944, LocalPathId: 1] source path: 2025-09-25T16:19:40.847432Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-09-25T16:19:40.847446Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2025-09-25T16:19:40.848236Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-09-25T16:19:40.848287Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-09-25T16:19:40.848327Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-09-25T16:19:40.848335Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:315: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-09-25T16:19:40.848339Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:369: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-09-25T16:19:40.848343Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 1:0 2 -> 3 2025-09-25T16:19:40.848850Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-09-25T16:19:40.848863Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-09-25T16:19:40.848869Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 1:0 3 -> 128 2025-09-25T16:19:40.849287Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-09-25T16:19:40.849296Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-09-25T16:19:40.849301Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-09-25T16:19:40.849307Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1723: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-09-25T16:19:40.849820Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1792: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-09-25T16:19:40.850178Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:663: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-09-25T16:19:40.850221Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1824: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 Leader for TabletID 72057594046316545 is [1:139:2161] sender: [1:261:2058] recipient: [1:15:2062] FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-09-25T16:19:40.850373Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:685: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-09-25T16:19:40.850392Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:689: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 139 RawX2: 4294969457 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, ... ve publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 5] was 3 2025-09-25T16:19:47.589341Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 1003, ready parts: 2/3, is published: true 2025-09-25T16:19:47.589489Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1003:0, at schemeshard: 72057594046678944 2025-09-25T16:19:47.589495Z node 26 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_drop_table.cpp:415: TDropTable TProposedDeletePart operationId: 1003:0 ProgressState, at schemeshard: 72057594046678944 2025-09-25T16:19:47.589524Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:610: DecrementPathDbRefCount reason remove table for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 4 2025-09-25T16:19:47.589539Z node 26 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:926: Part operation is done id#1003:0 progress is 3/3 2025-09-25T16:19:47.589542Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 1003 ready parts: 3/3 2025-09-25T16:19:47.589545Z node 26 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:926: Part operation is done id#1003:0 progress is 3/3 2025-09-25T16:19:47.589549Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 1003 ready parts: 3/3 2025-09-25T16:19:47.589552Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 1003, ready parts: 3/3, is published: true 2025-09-25T16:19:47.589555Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 1003 ready parts: 3/3 2025-09-25T16:19:47.589558Z node 26 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:993: Operation and all the parts is done, operation id: 1003:0 2025-09-25T16:19:47.589561Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5552: RemoveTx for txid 1003:0 2025-09-25T16:19:47.589573Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:610: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 3 2025-09-25T16:19:47.589577Z node 26 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:993: Operation and all the parts is done, operation id: 1003:1 2025-09-25T16:19:47.589580Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5552: RemoveTx for txid 1003:1 2025-09-25T16:19:47.589583Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:610: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 2 2025-09-25T16:19:47.589586Z node 26 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:993: Operation and all the parts is done, operation id: 1003:2 2025-09-25T16:19:47.589588Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5552: RemoveTx for txid 1003:2 2025-09-25T16:19:47.589593Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:610: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 5] was 2 2025-09-25T16:19:47.589737Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1003 2025-09-25T16:19:47.589955Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1003 2025-09-25T16:19:47.589974Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1003 2025-09-25T16:19:47.589980Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1003 2025-09-25T16:19:47.590327Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1003 2025-09-25T16:19:47.590654Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1003 2025-09-25T16:19:47.591208Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5938: Handle TEvStateChanged, at schemeshard: 72057594046678944, message: Source { RawX1: 356 RawX2: 111669152034 } TabletId: 72075186233409546 State: 4 2025-09-25T16:19:47.591222Z node 26 :FLAT_TX_SCHEMESHARD INFO: schemeshard__state_changed_reply.cpp:78: TTxShardStateChanged DoExecute, datashard informs about state changing, datashardId: 72075186233409546, state: Offline, at schemeshard: 72057594046678944 2025-09-25T16:19:47.591494Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_shard_deleter.cpp:20: SendDeleteRequests, shardsToDelete 1, to hive 72057594037968897, at schemeshard 72057594046678944 2025-09-25T16:19:47.591508Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_shard_deleter.cpp:47: Free shard 72057594046678944:2 hive 72057594037968897 at ss 72057594046678944 2025-09-25T16:19:47.591575Z node 26 :HIVE INFO: tablet_helpers.cpp:1356: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 2 TxId_Deprecated: 0 TabletID: 72075186233409546 FAKEHIVE 72057594037968897 TEvDeleteTablet ShardOwnerId: 72057594046678944 ShardLocalIdx: 2 TxId_Deprecated: 0 TabletID: 72075186233409546 Forgetting tablet 72075186233409546 2025-09-25T16:19:47.591609Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:6353: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 0 ShardOwnerId: 72057594046678944 ShardLocalIdx: 2, at schemeshard: 72057594046678944 2025-09-25T16:19:47.591653Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:610: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 5] was 1 2025-09-25T16:19:47.591726Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:123: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2025-09-25T16:19:47.591730Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:137: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 5], at schemeshard: 72057594046678944 2025-09-25T16:19:47.591737Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:610: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 1 2025-09-25T16:19:47.591743Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:137: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 4], at schemeshard: 72057594046678944 2025-09-25T16:19:47.591748Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:610: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 2 2025-09-25T16:19:47.592983Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:176: Deleted shardIdx 72057594046678944:2 2025-09-25T16:19:47.592998Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:182: Close pipe to deleted shardIdx 72057594046678944:2 tabletId 72075186233409546 2025-09-25T16:19:47.593045Z node 26 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__clean_pathes.cpp:160: TTxCleanDroppedPaths Complete, done PersistRemovePath for 2 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 TestModificationResult got TxId: 1003, wait until txId: 1003 TestWaitNotification wait txId: 1003 2025-09-25T16:19:47.593087Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:213: tests -- TTxNotificationSubscriber for txId 1003: send EvNotifyTxCompletion 2025-09-25T16:19:47.593093Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:259: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 1003 2025-09-25T16:19:47.593241Z node 26 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 1003, at schemeshard: 72057594046678944 2025-09-25T16:19:47.593262Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 1003: got EvNotifyTxCompletionResult 2025-09-25T16:19:47.593266Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 1003: satisfy waiter [26:637:2561] 2025-09-25T16:19:47.594281Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5938: Handle TEvStateChanged, at schemeshard: 72057594046678944, message: Source { RawX1: 360 RawX2: 111669152037 } TabletId: 72075186233409547 State: 4 2025-09-25T16:19:47.594295Z node 26 :FLAT_TX_SCHEMESHARD INFO: schemeshard__state_changed_reply.cpp:78: TTxShardStateChanged DoExecute, datashard informs about state changing, datashardId: 72075186233409547, state: Offline, at schemeshard: 72057594046678944 2025-09-25T16:19:47.594569Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_shard_deleter.cpp:20: SendDeleteRequests, shardsToDelete 1, to hive 72057594037968897, at schemeshard 72057594046678944 2025-09-25T16:19:47.594583Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_shard_deleter.cpp:47: Free shard 72057594046678944:1 hive 72057594037968897 at ss 72057594046678944 2025-09-25T16:19:47.594641Z node 26 :HIVE INFO: tablet_helpers.cpp:1356: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 1 TxId_Deprecated: 0 TabletID: 72075186233409547 FAKEHIVE 72057594037968897 TEvDeleteTablet ShardOwnerId: 72057594046678944 ShardLocalIdx: 1 TxId_Deprecated: 0 TabletID: 72075186233409547 2025-09-25T16:19:47.594671Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:6353: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 0 ShardOwnerId: 72057594046678944 ShardLocalIdx: 1, at schemeshard: 72057594046678944 2025-09-25T16:19:47.594720Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:610: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 1 Forgetting tablet 72075186233409547 2025-09-25T16:19:47.595429Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:123: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2025-09-25T16:19:47.595438Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:137: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 3], at schemeshard: 72057594046678944 2025-09-25T16:19:47.595449Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:610: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-09-25T16:19:47.596381Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:176: Deleted shardIdx 72057594046678944:1 2025-09-25T16:19:47.596399Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:182: Close pipe to deleted shardIdx 72057594046678944:1 tabletId 72075186233409547 2025-09-25T16:19:47.596504Z node 26 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__clean_pathes.cpp:160: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 TestWaitNotification: OK eventTxId 1003 wait until 72075186233409546 is deleted wait until 72075186233409547 is deleted 2025-09-25T16:19:47.596555Z node 26 :HIVE INFO: tablet_helpers.cpp:1504: [72057594037968897] TEvSubscribeToTabletDeletion, 72075186233409546 2025-09-25T16:19:47.596564Z node 26 :HIVE INFO: tablet_helpers.cpp:1504: [72057594037968897] TEvSubscribeToTabletDeletion, 72075186233409547 Deleted tabletId 72075186233409546 Deleted tabletId 72075186233409547 ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_restore/unittest >> TRestoreTests::ExportImportOnSupportedDatatypesEncryptedNoData [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2144] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2144] Leader for TabletID 72057594046678944 is [1:130:2155] sender: [1:131:2058] recipient: [1:113:2144] 2025-09-25T16:19:45.743428Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7911: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-09-25T16:19:45.743448Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7939: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-09-25T16:19:45.743452Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7825: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-09-25T16:19:45.743456Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7841: OperationsProcessing config: using default configuration 2025-09-25T16:19:45.743460Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7847: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-09-25T16:19:45.743464Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7847: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-09-25T16:19:45.743469Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7971: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-09-25T16:19:45.743480Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-09-25T16:19:45.743569Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8042: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-09-25T16:19:45.743613Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2025-09-25T16:19:45.757039Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7729: Cannot subscribe to console configs 2025-09-25T16:19:45.757056Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded 2025-09-25T16:19:45.760225Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2025-09-25T16:19:45.760289Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2025-09-25T16:19:45.760315Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-09-25T16:19:45.761746Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2025-09-25T16:19:45.761793Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2025-09-25T16:19:45.761871Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1382: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-09-25T16:19:45.761909Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-09-25T16:19:45.762256Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:158: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-09-25T16:19:45.762286Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2025-09-25T16:19:45.762469Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-09-25T16:19:45.762475Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-09-25T16:19:45.762487Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-09-25T16:19:45.762492Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-09-25T16:19:45.762496Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:205: TTxServerlessStorageBilling.Complete 2025-09-25T16:19:45.762518Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7086: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-09-25T16:19:45.763465Z node 1 :HIVE INFO: tablet_helpers.cpp:1126: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:130:2155] sender: [1:245:2058] recipient: [1:15:2062] 2025-09-25T16:19:45.778263Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-09-25T16:19:45.778314Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-09-25T16:19:45.778353Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:599: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-09-25T16:19:45.778358Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5528: CreateTx for txid 1:0 type: TxAlterSubDomain target path: [OwnerId: 72057594046678944, LocalPathId: 1] source path: 2025-09-25T16:19:45.778417Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-09-25T16:19:45.778429Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2025-09-25T16:19:45.779029Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-09-25T16:19:45.779065Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-09-25T16:19:45.779104Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-09-25T16:19:45.779110Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:315: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-09-25T16:19:45.779114Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:369: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-09-25T16:19:45.779117Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 1:0 2 -> 3 2025-09-25T16:19:45.779479Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-09-25T16:19:45.779490Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-09-25T16:19:45.779495Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 1:0 3 -> 128 2025-09-25T16:19:45.779792Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-09-25T16:19:45.779799Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-09-25T16:19:45.779858Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-09-25T16:19:45.779863Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1723: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-09-25T16:19:45.780354Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1792: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-09-25T16:19:45.780676Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:663: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-09-25T16:19:45.780712Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1824: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-09-25T16:19:45.780869Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:685: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-09-25T16:19:45.780891Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:689: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 137 RawX2: 4294969455 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-09-25T16:19:45.780896Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:303: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-09-25T16:19:45.780960Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 1:0 128 -> 240 2025-09-25T16:19:45.780965Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:362: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-09-25T16:19:45.780986Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:599: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-09-25T16:19:45.780995Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:412: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-09-25T16:19:45.781421Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-09-25T16:19:45.781431Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme ... 08 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710763 at step: 5000009 FAKE_COORDINATOR: Send Plan to tablet 72075186233409548 for txId: 281474976710763 at step: 5000009 2025-09-25T16:19:47.478122Z node 3 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:685: TTxOperationPlanStep Execute, stepId: 5000009, transactions count in step: 1, at schemeshard: 72057594046678944 2025-09-25T16:19:47.478149Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:689: TTxOperationPlanStep Execute, message: Transactions { TxId: 281474976710763 Coordinator: 72057594046316545 AckTo { RawX1: 139 RawX2: 12884904049 } } Step: 5000009 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-09-25T16:19:47.478157Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_backup_restore_common.h:412: TRestore TPropose, opId: 281474976710763:0 HandleReply TEvOperationPlan, stepId: 5000009, at schemeshard: 72057594046678944 2025-09-25T16:19:47.478191Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 281474976710763:0 128 -> 129 2025-09-25T16:19:47.478219Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:599: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 6] was 3 FAKE_COORDINATOR: advance: minStep5000009 State->FrontStep: 5000009REQUEST: HEAD /BackupPrefix/001/data_00.csv.enc HTTP/1.1 HEADERS: Host: localhost:27931 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: E5E9AF9B-E396-489C-B9E8-37A04550157E amz-sdk-request: attempt=1 content-type: application/xml user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-143-generic x86_64 Clang/20.1.8 x-amz-api-version: 2006-03-01 S3_MOCK::HttpServeRead: /BackupPrefix/001/data_00.csv.enc / 73 2025-09-25T16:19:47.483203Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-09-25T16:19:47.483216Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 281474976710763, path id: [OwnerId: 72057594046678944, LocalPathId: 6] 2025-09-25T16:19:47.483344Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-09-25T16:19:47.483354Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [3:210:2211], at schemeshard: 72057594046678944, txId: 281474976710763, path id: 6 2025-09-25T16:19:47.483372Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 281474976710763:0, at schemeshard: 72057594046678944 2025-09-25T16:19:47.483380Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_backup_restore_common.h:258: TRestore TProposedWaitParts, opId: 281474976710763:0 ProgressState, at schemeshard: 72057594046678944 2025-09-25T16:19:47.483652Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6249: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 6 Version: 3 PathOwnerId: 72057594046678944, cookie: 281474976710763 2025-09-25T16:19:47.483667Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 6 Version: 3 PathOwnerId: 72057594046678944, cookie: 281474976710763 2025-09-25T16:19:47.483672Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:108: Operation in-flight, at schemeshard: 72057594046678944, txId: 281474976710763 2025-09-25T16:19:47.483682Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 281474976710763, pathId: [OwnerId: 72057594046678944, LocalPathId: 6], version: 3 2025-09-25T16:19:47.483689Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:610: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 6] was 4 2025-09-25T16:19:47.483709Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 281474976710763, ready parts: 0/1, is published: true REQUEST: HEAD /BackupPrefix/001/data_00.csv.sha256 HTTP/1.1 HEADERS: Host: localhost:27931 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: CB3211C6-B015-40F7-9949-1838ADD9B42A amz-sdk-request: attempt=1 content-type: application/xml user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-143-generic x86_64 Clang/20.1.8 x-amz-api-version: 2006-03-01 S3_MOCK::HttpServeRead: /BackupPrefix/001/data_00.csv.sha256 / 76 2025-09-25T16:19:47.484437Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 281474976710763 REQUEST: GET /BackupPrefix/001/data_00.csv.sha256 HTTP/1.1 HEADERS: Host: localhost:27931 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: 5C12975D-5C46-436D-9304-C7D63DF2FE6A amz-sdk-request: attempt=1 content-type: application/xml range: bytes=0-75 user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-143-generic x86_64 Clang/20.1.8 x-amz-api-version: 2006-03-01 S3_MOCK::HttpServeRead: /BackupPrefix/001/data_00.csv.sha256 / 76 FAKE_COORDINATOR: Erasing txId 281474976710763 REQUEST: GET /BackupPrefix/001/data_00.csv.enc HTTP/1.1 HEADERS: Host: localhost:27931 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: BD087C16-7CA5-4C52-8C80-D820B880B97D amz-sdk-request: attempt=1 content-type: application/xml range: bytes=0-72 user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-143-generic x86_64 Clang/20.1.8 x-amz-api-version: 2006-03-01 S3_MOCK::HttpServeRead: /BackupPrefix/001/data_00.csv.enc / 73 2025-09-25T16:19:47.520297Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5901: Handle TEvSchemaChanged, tabletId: 72057594046678944, at schemeshard: 72057594046678944, message: Source { RawX1: 761 RawX2: 12884904583 } Origin: 72075186233409548 State: 2 TxId: 281474976710763 Step: 0 Generation: 2 OpResult { Success: true Explain: "" BytesProcessed: 0 RowsProcessed: 0 } 2025-09-25T16:19:47.520318Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1837: TOperation FindRelatedPartByTabletId, TxId: 281474976710763, tablet: 72075186233409548, partId: 0 2025-09-25T16:19:47.520341Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:628: TTxOperationReply execute, operationId: 281474976710763:0, at schemeshard: 72057594046678944, message: Source { RawX1: 761 RawX2: 12884904583 } Origin: 72075186233409548 State: 2 TxId: 281474976710763 Step: 0 Generation: 2 OpResult { Success: true Explain: "" BytesProcessed: 0 RowsProcessed: 0 } 2025-09-25T16:19:47.520352Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_backup_restore_common.h:233: TRestore TProposedWaitParts, opId: 281474976710763:0 HandleReply TEvSchemaChanged at tablet# 72057594046678944 message# Source { RawX1: 761 RawX2: 12884904583 } Origin: 72075186233409548 State: 2 TxId: 281474976710763 Step: 0 Generation: 2 OpResult { Success: true Explain: "" BytesProcessed: 0 RowsProcessed: 0 } 2025-09-25T16:19:47.520365Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:673: CollectSchemaChanged accept TEvDataShard::TEvSchemaChanged, operationId: 281474976710763:0, shardIdx: 72057594046678944:3, shard: 72075186233409548, left await: 0, txState.State: ProposedWaitParts, txState.ReadyForNotifications: 1, at schemeshard: 72057594046678944 2025-09-25T16:19:47.520371Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:710: all shard schema changes has been received, operationId: 281474976710763:0, at schemeshard: 72057594046678944 2025-09-25T16:19:47.520375Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:722: send schema changes ack message, operation: 281474976710763:0, datashard: 72075186233409548, at schemeshard: 72057594046678944 2025-09-25T16:19:47.520382Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 281474976710763:0 129 -> 240 2025-09-25T16:19:47.520441Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_backup_restore_common.h:116: Unable to make a bill: kind# TRestore, opId# 281474976710763:0, reason# domain is not a serverless db, domain# /MyRoot, domainPathId# [OwnerId: 72057594046678944, LocalPathId: 1], IsDomainSchemeShard: 1, ParentDomainId: [OwnerId: 72057594046678944, LocalPathId: 1], ResourcesDomainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-09-25T16:19:47.520970Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 281474976710763:0, at schemeshard: 72057594046678944 2025-09-25T16:19:47.521009Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 281474976710763:0, at schemeshard: 72057594046678944 2025-09-25T16:19:47.521015Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:497: [72057594046678944] TDone opId# 281474976710763:0 ProgressState 2025-09-25T16:19:47.521026Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:926: Part operation is done id#281474976710763:0 progress is 1/1 2025-09-25T16:19:47.521029Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 281474976710763 ready parts: 1/1 2025-09-25T16:19:47.521032Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:926: Part operation is done id#281474976710763:0 progress is 1/1 2025-09-25T16:19:47.521035Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 281474976710763 ready parts: 1/1 2025-09-25T16:19:47.521038Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 281474976710763, ready parts: 1/1, is published: true 2025-09-25T16:19:47.521049Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1702: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [3:127:2152] message: TxId: 281474976710763 2025-09-25T16:19:47.521054Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 281474976710763 ready parts: 1/1 2025-09-25T16:19:47.521057Z node 3 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:993: Operation and all the parts is done, operation id: 281474976710763:0 2025-09-25T16:19:47.521060Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5552: RemoveTx for txid 281474976710763:0 2025-09-25T16:19:47.521089Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:610: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 6] was 3 2025-09-25T16:19:47.521547Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7171: Handle: TEvNotifyTxCompletionResult: txId# 281474976710763 2025-09-25T16:19:47.521559Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:7173: Message: TxId: 281474976710763 2025-09-25T16:19:47.521898Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 103: got EvNotifyTxCompletionResult 2025-09-25T16:19:47.521908Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 103: satisfy waiter [3:691:2641] TestWaitNotification: OK eventTxId 103 ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_restore/unittest >> TRestoreTests::CancelHungOperationShouldSucceed[Zstd] [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2144] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2144] Leader for TabletID 72057594046678944 is [1:130:2155] sender: [1:131:2058] recipient: [1:113:2144] 2025-09-25T16:19:46.893229Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7911: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-09-25T16:19:46.893250Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7939: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-09-25T16:19:46.893256Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7825: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-09-25T16:19:46.893261Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7841: OperationsProcessing config: using default configuration 2025-09-25T16:19:46.893267Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7847: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-09-25T16:19:46.893271Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7847: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-09-25T16:19:46.893280Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7971: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-09-25T16:19:46.893293Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-09-25T16:19:46.893404Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8042: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-09-25T16:19:46.893454Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2025-09-25T16:19:46.910392Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7729: Cannot subscribe to console configs 2025-09-25T16:19:46.910413Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded 2025-09-25T16:19:46.915105Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2025-09-25T16:19:46.915189Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2025-09-25T16:19:46.915225Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-09-25T16:19:46.916723Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2025-09-25T16:19:46.916780Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2025-09-25T16:19:46.916893Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1382: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-09-25T16:19:46.916952Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-09-25T16:19:46.917369Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:158: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-09-25T16:19:46.917412Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2025-09-25T16:19:46.917674Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-09-25T16:19:46.917685Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-09-25T16:19:46.917698Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-09-25T16:19:46.917703Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-09-25T16:19:46.917707Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:205: TTxServerlessStorageBilling.Complete 2025-09-25T16:19:46.917731Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7086: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-09-25T16:19:46.919016Z node 1 :HIVE INFO: tablet_helpers.cpp:1126: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:130:2155] sender: [1:245:2058] recipient: [1:15:2062] 2025-09-25T16:19:46.937236Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-09-25T16:19:46.937293Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-09-25T16:19:46.937341Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:599: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-09-25T16:19:46.937348Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5528: CreateTx for txid 1:0 type: TxAlterSubDomain target path: [OwnerId: 72057594046678944, LocalPathId: 1] source path: 2025-09-25T16:19:46.937424Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-09-25T16:19:46.937441Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2025-09-25T16:19:46.938069Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-09-25T16:19:46.938116Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-09-25T16:19:46.938157Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-09-25T16:19:46.938164Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:315: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-09-25T16:19:46.938168Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:369: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-09-25T16:19:46.938171Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 1:0 2 -> 3 2025-09-25T16:19:46.938616Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-09-25T16:19:46.938629Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-09-25T16:19:46.938633Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 1:0 3 -> 128 2025-09-25T16:19:46.939014Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-09-25T16:19:46.939027Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-09-25T16:19:46.939033Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-09-25T16:19:46.939037Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1723: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-09-25T16:19:46.939599Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1792: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-09-25T16:19:46.939944Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:663: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-09-25T16:19:46.939970Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1824: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-09-25T16:19:46.940123Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:685: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-09-25T16:19:46.940144Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:689: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 137 RawX2: 4294969455 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-09-25T16:19:46.940150Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:303: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-09-25T16:19:46.940212Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 1:0 128 -> 240 2025-09-25T16:19:46.940218Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:362: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-09-25T16:19:46.940236Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:599: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-09-25T16:19:46.940245Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:412: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-09-25T16:19:46.940595Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-09-25T16:19:46.940601Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme ... path id: 2 2025-09-25T16:19:47.808001Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 102:0, at schemeshard: 72057594046678944 2025-09-25T16:19:47.808006Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_backup_restore_common.h:258: TRestore TProposedWaitParts, opId: 102:0 ProgressState, at schemeshard: 72057594046678944 2025-09-25T16:19:47.808020Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 102:0 129 -> 133 2025-09-25T16:19:47.808175Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6249: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 3 PathOwnerId: 72057594046678944, cookie: 102 2025-09-25T16:19:47.808185Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 3 PathOwnerId: 72057594046678944, cookie: 102 2025-09-25T16:19:47.808189Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:108: Operation in-flight, at schemeshard: 72057594046678944, txId: 102 2025-09-25T16:19:47.808193Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 102, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 3 2025-09-25T16:19:47.808197Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:610: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 4 2025-09-25T16:19:47.808208Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 102, ready parts: 0/1, is published: true 2025-09-25T16:19:47.808758Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 102:0, at schemeshard: 72057594046678944 2025-09-25T16:19:47.808766Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_backup_restore_common.h:324: TRestore TAborting, opId: 102:0 ProgressState at tablet72057594046678944 2025-09-25T16:19:47.808770Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_backup_restore_common.h:351: TRestore Abort, on datashard: 72075186233409546, opId: 102:0, at schemeshard: 72057594046678944 2025-09-25T16:19:47.808844Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 2025-09-25T16:19:47.809106Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:663: Send tablet strongly msg operationId: 102:0 from tablet: 72057594046678944 to tablet: 72075186233409546 cookie: 72057594046678944:1 msg type: 269551625 TEvCancelTxResult for TargetTxId: 102, wait until TargetTxId: 102 TestWaitNotification wait txId: 102 2025-09-25T16:19:47.810299Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:213: tests -- TTxNotificationSubscriber for txId 102: send EvNotifyTxCompletion 2025-09-25T16:19:47.810308Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:259: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 102 TestWaitNotification wait txId: 103 2025-09-25T16:19:47.810319Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:213: tests -- TTxNotificationSubscriber for txId 103: send EvNotifyTxCompletion 2025-09-25T16:19:47.810321Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:259: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 103 2025-09-25T16:19:47.810377Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__notify.cpp:30: NotifyTxCompletion operation in-flight, txId: 102, at schemeshard: 72057594046678944 2025-09-25T16:19:47.810381Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 102, ready parts: 0/1, is published: true 2025-09-25T16:19:47.810385Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__notify.cpp:131: NotifyTxCompletion transaction is registered, txId: 102, at schemeshard: 72057594046678944 2025-09-25T16:19:47.810400Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 103, at schemeshard: 72057594046678944 2025-09-25T16:19:47.810408Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 103: got EvNotifyTxCompletionResult 2025-09-25T16:19:47.810412Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 103: satisfy waiter [3:429:2398] TestWaitNotification: OK eventTxId 103 2025-09-25T16:19:47.831250Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:6700: Handle TEvProposeTransactionResult, at schemeshard: 72057594046678944, message: TxKind: TX_KIND_SCHEME Origin: 72075186233409546 Status: ERROR Error { Kind: WRONG_SHARD_STATE Reason: "Interrupted Restore operation [5000003:102] while waiting to finish at 72075186233409546" } TxId: 102 ExecLatency: 4 ProposeLatency: 5 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409546 CpuTimeUsec: 5507 } } 2025-09-25T16:19:47.831281Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1837: TOperation FindRelatedPartByTabletId, TxId: 102, tablet: 72075186233409546, partId: 0 2025-09-25T16:19:47.831302Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:628: TTxOperationReply execute, operationId: 102:0, at schemeshard: 72057594046678944, message: TxKind: TX_KIND_SCHEME Origin: 72075186233409546 Status: ERROR Error { Kind: WRONG_SHARD_STATE Reason: "Interrupted Restore operation [5000003:102] while waiting to finish at 72075186233409546" } TxId: 102 ExecLatency: 4 ProposeLatency: 5 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409546 CpuTimeUsec: 5507 } } 2025-09-25T16:19:47.831313Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_part.cpp:109: HandleReply TEvDataShard::TEvProposeTransactionResult Ignore message: tablet# 72057594046678944, ev# TxKind: TX_KIND_SCHEME Origin: 72075186233409546 Status: ERROR Error { Kind: WRONG_SHARD_STATE Reason: "Interrupted Restore operation [5000003:102] while waiting to finish at 72075186233409546" } TxId: 102 ExecLatency: 4 ProposeLatency: 5 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409546 CpuTimeUsec: 5507 } } FAKE_COORDINATOR: Erasing txId 102 2025-09-25T16:19:47.831439Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5901: Handle TEvSchemaChanged, tabletId: 72057594046678944, at schemeshard: 72057594046678944, message: Source { RawX1: 313 RawX2: 12884904187 } Origin: 72075186233409546 State: 2 TxId: 102 Step: 0 Generation: 2 OpResult { Success: false Explain: "" BytesProcessed: 0 RowsProcessed: 0 } 2025-09-25T16:19:47.831444Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1837: TOperation FindRelatedPartByTabletId, TxId: 102, tablet: 72075186233409546, partId: 0 2025-09-25T16:19:47.831457Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:628: TTxOperationReply execute, operationId: 102:0, at schemeshard: 72057594046678944, message: Source { RawX1: 313 RawX2: 12884904187 } Origin: 72075186233409546 State: 2 TxId: 102 Step: 0 Generation: 2 OpResult { Success: false Explain: "" BytesProcessed: 0 RowsProcessed: 0 } 2025-09-25T16:19:47.831471Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_backup_restore_common.h:233: TRestore TAborting, opId: 102:0 HandleReply TEvSchemaChanged at tablet# 72057594046678944 message# Source { RawX1: 313 RawX2: 12884904187 } Origin: 72075186233409546 State: 2 TxId: 102 Step: 0 Generation: 2 OpResult { Success: false Explain: "" BytesProcessed: 0 RowsProcessed: 0 } 2025-09-25T16:19:47.831488Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:673: CollectSchemaChanged accept TEvDataShard::TEvSchemaChanged, operationId: 102:0, shardIdx: 72057594046678944:1, shard: 72075186233409546, left await: 0, txState.State: Aborting, txState.ReadyForNotifications: 1, at schemeshard: 72057594046678944 2025-09-25T16:19:47.831493Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:710: all shard schema changes has been received, operationId: 102:0, at schemeshard: 72057594046678944 2025-09-25T16:19:47.831499Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:722: send schema changes ack message, operation: 102:0, datashard: 72075186233409546, at schemeshard: 72057594046678944 2025-09-25T16:19:47.831505Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 102:0 133 -> 240 2025-09-25T16:19:47.831543Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_backup_restore_common.h:116: Unable to make a bill: kind# TRestore, opId# 102:0, reason# domain is not a serverless db, domain# /MyRoot, domainPathId# [OwnerId: 72057594046678944, LocalPathId: 1], IsDomainSchemeShard: 1, ParentDomainId: [OwnerId: 72057594046678944, LocalPathId: 1], ResourcesDomainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-09-25T16:19:47.832069Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 102:0, at schemeshard: 72057594046678944 2025-09-25T16:19:47.832131Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 102:0, at schemeshard: 72057594046678944 2025-09-25T16:19:47.832150Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 102:0, at schemeshard: 72057594046678944 2025-09-25T16:19:47.832155Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:497: [72057594046678944] TDone opId# 102:0 ProgressState 2025-09-25T16:19:47.832166Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:926: Part operation is done id#102:0 progress is 1/1 2025-09-25T16:19:47.832169Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2025-09-25T16:19:47.832173Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:926: Part operation is done id#102:0 progress is 1/1 2025-09-25T16:19:47.832175Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2025-09-25T16:19:47.832180Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 102, ready parts: 1/1, is published: true 2025-09-25T16:19:47.832193Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1702: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [3:339:2317] message: TxId: 102 2025-09-25T16:19:47.832200Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2025-09-25T16:19:47.832205Z node 3 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:993: Operation and all the parts is done, operation id: 102:0 2025-09-25T16:19:47.832209Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5552: RemoveTx for txid 102:0 2025-09-25T16:19:47.832237Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:610: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 3 2025-09-25T16:19:47.832547Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 102: got EvNotifyTxCompletionResult 2025-09-25T16:19:47.832559Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 102: satisfy waiter [3:429:2398] TestWaitNotification: OK eventTxId 102 ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_restore/unittest >> TRestoreTests::ShouldHandleOverloadedShard [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2144] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2144] Leader for TabletID 72057594046678944 is [1:130:2155] sender: [1:131:2058] recipient: [1:113:2144] 2025-09-25T16:19:45.549501Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7911: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-09-25T16:19:45.549530Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7939: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-09-25T16:19:45.549536Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7825: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-09-25T16:19:45.549542Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7841: OperationsProcessing config: using default configuration 2025-09-25T16:19:45.549549Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7847: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-09-25T16:19:45.549554Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7847: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-09-25T16:19:45.549563Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7971: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-09-25T16:19:45.549579Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-09-25T16:19:45.549718Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8042: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-09-25T16:19:45.549782Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2025-09-25T16:19:45.566082Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7729: Cannot subscribe to console configs 2025-09-25T16:19:45.566103Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded 2025-09-25T16:19:45.569222Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2025-09-25T16:19:45.569283Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2025-09-25T16:19:45.569321Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-09-25T16:19:45.570982Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2025-09-25T16:19:45.571045Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2025-09-25T16:19:45.571149Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1382: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-09-25T16:19:45.571198Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-09-25T16:19:45.571653Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:158: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-09-25T16:19:45.571699Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2025-09-25T16:19:45.571942Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-09-25T16:19:45.571953Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-09-25T16:19:45.571974Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-09-25T16:19:45.571982Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-09-25T16:19:45.571989Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:205: TTxServerlessStorageBilling.Complete 2025-09-25T16:19:45.572018Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7086: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-09-25T16:19:45.573366Z node 1 :HIVE INFO: tablet_helpers.cpp:1126: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:130:2155] sender: [1:245:2058] recipient: [1:15:2062] 2025-09-25T16:19:45.594750Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-09-25T16:19:45.594813Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-09-25T16:19:45.594864Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:599: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-09-25T16:19:45.594873Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5528: CreateTx for txid 1:0 type: TxAlterSubDomain target path: [OwnerId: 72057594046678944, LocalPathId: 1] source path: 2025-09-25T16:19:45.594946Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-09-25T16:19:45.594961Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2025-09-25T16:19:45.595580Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-09-25T16:19:45.595621Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-09-25T16:19:45.595675Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-09-25T16:19:45.595685Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:315: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-09-25T16:19:45.595691Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:369: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-09-25T16:19:45.595696Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 1:0 2 -> 3 2025-09-25T16:19:45.596101Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-09-25T16:19:45.596113Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-09-25T16:19:45.596122Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 1:0 3 -> 128 2025-09-25T16:19:45.596446Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-09-25T16:19:45.596456Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-09-25T16:19:45.596462Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-09-25T16:19:45.596469Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1723: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-09-25T16:19:45.597151Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1792: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-09-25T16:19:45.601040Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:663: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-09-25T16:19:45.601102Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1824: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-09-25T16:19:45.601296Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:685: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-09-25T16:19:45.601345Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:689: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 137 RawX2: 4294969455 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-09-25T16:19:45.601354Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:303: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-09-25T16:19:45.601449Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 1:0 128 -> 240 2025-09-25T16:19:45.601459Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:362: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-09-25T16:19:45.601487Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:599: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-09-25T16:19:45.601499Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:412: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-09-25T16:19:45.601997Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-09-25T16:19:45.602007Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme ... 5.15.0-143-generic x86_64 Clang/20.1.8 x-amz-api-version: 2006-03-01 S3_MOCK::HttpServeRead: /data_00.csv / 14786 2025-09-25T16:19:47.558424Z node 3 :DATASHARD_RESTORE DEBUG: import_s3.cpp:656: [Import] [s3:103] Handle NKikimr::NWrappers::NExternalStorage::TEvGetObjectResponse { Key: null Result: a9d13eb7c9fe058d011cb596ff181e02 Body: 32b } 2025-09-25T16:19:47.558435Z node 3 :DATASHARD_RESTORE TRACE: import_s3.cpp:673: [Import] [s3:103] Content size: processed-bytes# 14679, content-length# 14786, body-size# 32 2025-09-25T16:19:47.558456Z node 3 :DATASHARD_RESTORE INFO: import_s3.cpp:806: [Import] [s3:103] Upload rows: count# 2, size# 68 2025-09-25T16:19:47.559695Z node 3 :DATASHARD_RESTORE DEBUG: import_s3.cpp:814: [Import] [s3:103] Handle NKikimr::TEvDataShard::TEvS3UploadRowsResponse { Record: TabletID: 72075186233409546 Status: 0 Info: { DataETag: a9d13eb7c9fe058d011cb596ff181e02 ProcessedBytes: 14709 WrittenBytes: 11832 WrittenRows: 995 ChecksumState: DownloadState: } } 2025-09-25T16:19:47.559709Z node 3 :DATASHARD_RESTORE NOTICE: import_s3.cpp:621: [Import] [s3:103] Process download info at 'UploadResponse': info# { DataETag: a9d13eb7c9fe058d011cb596ff181e02 ProcessedBytes: 14709 WrittenBytes: 11832 WrittenRows: 995 ChecksumState: DownloadState: } 2025-09-25T16:19:47.559719Z node 3 :DATASHARD_RESTORE DEBUG: import_s3.cpp:517: [Import] [s3:103] GetObject: key# /data_00.csv, range# 14720-14751 REQUEST: GET /data_00.csv HTTP/1.1 HEADERS: Host: localhost:19260 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: 7D0AB41C-E22F-40D9-96A3-FAB4C75F735A amz-sdk-request: attempt=1 content-type: application/xml range: bytes=14720-14751 user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-143-generic x86_64 Clang/20.1.8 x-amz-api-version: 2006-03-01 S3_MOCK::HttpServeRead: /data_00.csv / 14786 2025-09-25T16:19:47.560353Z node 3 :DATASHARD_RESTORE DEBUG: import_s3.cpp:656: [Import] [s3:103] Handle NKikimr::NWrappers::NExternalStorage::TEvGetObjectResponse { Key: null Result: a9d13eb7c9fe058d011cb596ff181e02 Body: 32b } 2025-09-25T16:19:47.560367Z node 3 :DATASHARD_RESTORE TRACE: import_s3.cpp:673: [Import] [s3:103] Content size: processed-bytes# 14709, content-length# 14786, body-size# 32 2025-09-25T16:19:47.560387Z node 3 :DATASHARD_RESTORE INFO: import_s3.cpp:806: [Import] [s3:103] Upload rows: count# 2, size# 68 2025-09-25T16:19:47.561688Z node 3 :DATASHARD_RESTORE DEBUG: import_s3.cpp:814: [Import] [s3:103] Handle NKikimr::TEvDataShard::TEvS3UploadRowsResponse { Record: TabletID: 72075186233409546 Status: 0 Info: { DataETag: a9d13eb7c9fe058d011cb596ff181e02 ProcessedBytes: 14739 WrittenBytes: 11856 WrittenRows: 997 ChecksumState: DownloadState: } } 2025-09-25T16:19:47.561702Z node 3 :DATASHARD_RESTORE NOTICE: import_s3.cpp:621: [Import] [s3:103] Process download info at 'UploadResponse': info# { DataETag: a9d13eb7c9fe058d011cb596ff181e02 ProcessedBytes: 14739 WrittenBytes: 11856 WrittenRows: 997 ChecksumState: DownloadState: } 2025-09-25T16:19:47.561711Z node 3 :DATASHARD_RESTORE DEBUG: import_s3.cpp:517: [Import] [s3:103] GetObject: key# /data_00.csv, range# 14752-14783 REQUEST: GET /data_00.csv HTTP/1.1 HEADERS: Host: localhost:19260 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: 1BB7B537-4BE3-4C4F-99F1-507A8C334AEB amz-sdk-request: attempt=1 content-type: application/xml range: bytes=14752-14783 user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-143-generic x86_64 Clang/20.1.8 x-amz-api-version: 2006-03-01 S3_MOCK::HttpServeRead: /data_00.csv / 14786 2025-09-25T16:19:47.562420Z node 3 :DATASHARD_RESTORE DEBUG: import_s3.cpp:656: [Import] [s3:103] Handle NKikimr::NWrappers::NExternalStorage::TEvGetObjectResponse { Key: null Result: a9d13eb7c9fe058d011cb596ff181e02 Body: 32b } 2025-09-25T16:19:47.562431Z node 3 :DATASHARD_RESTORE TRACE: import_s3.cpp:673: [Import] [s3:103] Content size: processed-bytes# 14739, content-length# 14786, body-size# 32 2025-09-25T16:19:47.562451Z node 3 :DATASHARD_RESTORE INFO: import_s3.cpp:806: [Import] [s3:103] Upload rows: count# 2, size# 68 2025-09-25T16:19:47.563777Z node 3 :DATASHARD_RESTORE DEBUG: import_s3.cpp:814: [Import] [s3:103] Handle NKikimr::TEvDataShard::TEvS3UploadRowsResponse { Record: TabletID: 72075186233409546 Status: 0 Info: { DataETag: a9d13eb7c9fe058d011cb596ff181e02 ProcessedBytes: 14769 WrittenBytes: 11880 WrittenRows: 999 ChecksumState: DownloadState: } } 2025-09-25T16:19:47.563791Z node 3 :DATASHARD_RESTORE NOTICE: import_s3.cpp:621: [Import] [s3:103] Process download info at 'UploadResponse': info# { DataETag: a9d13eb7c9fe058d011cb596ff181e02 ProcessedBytes: 14769 WrittenBytes: 11880 WrittenRows: 999 ChecksumState: DownloadState: } 2025-09-25T16:19:47.563801Z node 3 :DATASHARD_RESTORE DEBUG: import_s3.cpp:517: [Import] [s3:103] GetObject: key# /data_00.csv, range# 14784-14785 REQUEST: GET /data_00.csv HTTP/1.1 HEADERS: Host: localhost:19260 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: 9593BE9B-60C2-4C41-B30C-8CDBF654AB66 amz-sdk-request: attempt=1 content-type: application/xml range: bytes=14784-14785 user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-143-generic x86_64 Clang/20.1.8 x-amz-api-version: 2006-03-01 S3_MOCK::HttpServeRead: /data_00.csv / 14786 2025-09-25T16:19:47.564503Z node 3 :DATASHARD_RESTORE DEBUG: import_s3.cpp:656: [Import] [s3:103] Handle NKikimr::NWrappers::NExternalStorage::TEvGetObjectResponse { Key: null Result: a9d13eb7c9fe058d011cb596ff181e02 Body: 2b } 2025-09-25T16:19:47.564514Z node 3 :DATASHARD_RESTORE TRACE: import_s3.cpp:673: [Import] [s3:103] Content size: processed-bytes# 14769, content-length# 14786, body-size# 2 2025-09-25T16:19:47.564533Z node 3 :DATASHARD_RESTORE INFO: import_s3.cpp:806: [Import] [s3:103] Upload rows: count# 1, size# 39 2025-09-25T16:19:47.565966Z node 3 :DATASHARD_RESTORE DEBUG: import_s3.cpp:814: [Import] [s3:103] Handle NKikimr::TEvDataShard::TEvS3UploadRowsResponse { Record: TabletID: 72075186233409546 Status: 0 Info: { DataETag: a9d13eb7c9fe058d011cb596ff181e02 ProcessedBytes: 14786 WrittenBytes: 11893 WrittenRows: 1000 ChecksumState: DownloadState: } } 2025-09-25T16:19:47.565982Z node 3 :DATASHARD_RESTORE NOTICE: import_s3.cpp:621: [Import] [s3:103] Process download info at 'UploadResponse': info# { DataETag: a9d13eb7c9fe058d011cb596ff181e02 ProcessedBytes: 14786 WrittenBytes: 11893 WrittenRows: 1000 ChecksumState: DownloadState: } 2025-09-25T16:19:47.565991Z node 3 :DATASHARD_RESTORE NOTICE: import_s3.cpp:962: [Import] [s3:103] Finish: success# 1, error# , writtenBytes# 11893, writtenRows# 1000 2025-09-25T16:19:47.569260Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5901: Handle TEvSchemaChanged, tabletId: 72057594046678944, at schemeshard: 72057594046678944, message: Source { RawX1: 313 RawX2: 12884904187 } Origin: 72075186233409546 State: 2 TxId: 103 Step: 0 Generation: 2 OpResult { Success: true Explain: "" BytesProcessed: 11893 RowsProcessed: 1000 } 2025-09-25T16:19:47.569279Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1837: TOperation FindRelatedPartByTabletId, TxId: 103, tablet: 72075186233409546, partId: 0 2025-09-25T16:19:47.569307Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:628: TTxOperationReply execute, operationId: 103:0, at schemeshard: 72057594046678944, message: Source { RawX1: 313 RawX2: 12884904187 } Origin: 72075186233409546 State: 2 TxId: 103 Step: 0 Generation: 2 OpResult { Success: true Explain: "" BytesProcessed: 11893 RowsProcessed: 1000 } 2025-09-25T16:19:47.569325Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_backup_restore_common.h:233: TRestore TProposedWaitParts, opId: 103:0 HandleReply TEvSchemaChanged at tablet# 72057594046678944 message# Source { RawX1: 313 RawX2: 12884904187 } Origin: 72075186233409546 State: 2 TxId: 103 Step: 0 Generation: 2 OpResult { Success: true Explain: "" BytesProcessed: 11893 RowsProcessed: 1000 } 2025-09-25T16:19:47.569340Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:673: CollectSchemaChanged accept TEvDataShard::TEvSchemaChanged, operationId: 103:0, shardIdx: 72057594046678944:1, shard: 72075186233409546, left await: 0, txState.State: ProposedWaitParts, txState.ReadyForNotifications: 1, at schemeshard: 72057594046678944 2025-09-25T16:19:47.569345Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:710: all shard schema changes has been received, operationId: 103:0, at schemeshard: 72057594046678944 2025-09-25T16:19:47.569351Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:722: send schema changes ack message, operation: 103:0, datashard: 72075186233409546, at schemeshard: 72057594046678944 2025-09-25T16:19:47.569359Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 103:0 129 -> 240 2025-09-25T16:19:47.569394Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_backup_restore_common.h:116: Unable to make a bill: kind# TRestore, opId# 103:0, reason# domain is not a serverless db, domain# /MyRoot, domainPathId# [OwnerId: 72057594046678944, LocalPathId: 1], IsDomainSchemeShard: 1, ParentDomainId: [OwnerId: 72057594046678944, LocalPathId: 1], ResourcesDomainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-09-25T16:19:47.569802Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 103:0, at schemeshard: 72057594046678944 2025-09-25T16:19:47.569837Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 103:0, at schemeshard: 72057594046678944 2025-09-25T16:19:47.569845Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:497: [72057594046678944] TDone opId# 103:0 ProgressState 2025-09-25T16:19:47.569860Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:926: Part operation is done id#103:0 progress is 1/1 2025-09-25T16:19:47.569868Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 103 ready parts: 1/1 2025-09-25T16:19:47.569874Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:926: Part operation is done id#103:0 progress is 1/1 2025-09-25T16:19:47.569878Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 103 ready parts: 1/1 2025-09-25T16:19:47.569883Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 103, ready parts: 1/1, is published: true 2025-09-25T16:19:47.569910Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1702: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [3:339:2317] message: TxId: 103 2025-09-25T16:19:47.569918Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 103 ready parts: 1/1 2025-09-25T16:19:47.569925Z node 3 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:993: Operation and all the parts is done, operation id: 103:0 2025-09-25T16:19:47.569930Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5552: RemoveTx for txid 103:0 2025-09-25T16:19:47.569968Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:610: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 3 2025-09-25T16:19:47.570363Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 103: got EvNotifyTxCompletionResult 2025-09-25T16:19:47.570374Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 103: satisfy waiter [3:399:2369] TestWaitNotification: OK eventTxId 103 ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_restore/unittest >> TImportTests::CancelUponTransferringShouldSucceed [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2144] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2144] Leader for TabletID 72057594046678944 is [1:130:2155] sender: [1:131:2058] recipient: [1:113:2144] 2025-09-25T16:19:47.202008Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7911: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-09-25T16:19:47.202035Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7939: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-09-25T16:19:47.202041Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7825: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-09-25T16:19:47.202047Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7841: OperationsProcessing config: using default configuration 2025-09-25T16:19:47.202055Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7847: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-09-25T16:19:47.202059Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7847: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-09-25T16:19:47.202069Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7971: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-09-25T16:19:47.202085Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-09-25T16:19:47.202216Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8042: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-09-25T16:19:47.202279Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2025-09-25T16:19:47.217520Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7729: Cannot subscribe to console configs 2025-09-25T16:19:47.217543Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded 2025-09-25T16:19:47.221996Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2025-09-25T16:19:47.222076Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2025-09-25T16:19:47.222107Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-09-25T16:19:47.224344Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2025-09-25T16:19:47.224415Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2025-09-25T16:19:47.224542Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1382: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-09-25T16:19:47.224593Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-09-25T16:19:47.225107Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:158: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-09-25T16:19:47.225156Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2025-09-25T16:19:47.225421Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-09-25T16:19:47.225432Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-09-25T16:19:47.225449Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-09-25T16:19:47.225457Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-09-25T16:19:47.225464Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:205: TTxServerlessStorageBilling.Complete 2025-09-25T16:19:47.225491Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7086: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-09-25T16:19:47.226659Z node 1 :HIVE INFO: tablet_helpers.cpp:1126: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:130:2155] sender: [1:245:2058] recipient: [1:15:2062] 2025-09-25T16:19:47.246948Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-09-25T16:19:47.246997Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-09-25T16:19:47.247040Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:599: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-09-25T16:19:47.247047Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5528: CreateTx for txid 1:0 type: TxAlterSubDomain target path: [OwnerId: 72057594046678944, LocalPathId: 1] source path: 2025-09-25T16:19:47.247127Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-09-25T16:19:47.247145Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2025-09-25T16:19:47.247722Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-09-25T16:19:47.247753Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-09-25T16:19:47.247800Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-09-25T16:19:47.247809Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:315: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-09-25T16:19:47.247813Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:369: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-09-25T16:19:47.247818Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 1:0 2 -> 3 2025-09-25T16:19:47.248271Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-09-25T16:19:47.248285Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-09-25T16:19:47.248291Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 1:0 3 -> 128 2025-09-25T16:19:47.248648Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-09-25T16:19:47.248658Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-09-25T16:19:47.248664Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-09-25T16:19:47.248669Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1723: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-09-25T16:19:47.249331Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1792: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-09-25T16:19:47.249743Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:663: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-09-25T16:19:47.249786Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1824: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-09-25T16:19:47.249974Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:685: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-09-25T16:19:47.250000Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:689: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 137 RawX2: 4294969455 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-09-25T16:19:47.250007Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:303: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-09-25T16:19:47.250111Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 1:0 128 -> 240 2025-09-25T16:19:47.250120Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:362: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-09-25T16:19:47.250144Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:599: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-09-25T16:19:47.250156Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:412: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-09-25T16:19:47.250578Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-09-25T16:19:47.250586Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme ... ainPathId: [OwnerId: 72057594046678944, LocalPathId: 1] UserSID: '(empty maybe)' State: Waiting Issue: '' Items: 1 }, item# { Idx: 0 DstPathName: '/MyRoot/Table' DstPathId: [OwnerId: 72057594046678944, LocalPathId: 2] State: Transferring SubState: AllocateTxId WaitTxId: 0 SrcPath: SrcPrefix: Issue: '' } 2025-09-25T16:19:47.907894Z node 3 :IMPORT DEBUG: schemeshard_import__create.cpp:396: TImport::TTxProgress: DoComplete 2025-09-25T16:19:47.907919Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7086: Handle: TEvAllocateResult: Cookie# 101, at schemeshard: 72057594046678944 2025-09-25T16:19:47.907926Z node 3 :IMPORT DEBUG: schemeshard_import__create.cpp:372: TImport::TTxProgress: DoExecute 2025-09-25T16:19:47.907931Z node 3 :IMPORT DEBUG: schemeshard_import__create.cpp:1171: TImport::TTxProgress: OnAllocateResult: txId# 281474976710758, id# 101 2025-09-25T16:19:47.907943Z node 3 :IMPORT INFO: schemeshard_import__create.cpp:531: TImport::TTxProgress: Restore propose: info# { Id: 101 Uid: '' Kind: S3 DomainPathId: [OwnerId: 72057594046678944, LocalPathId: 1] UserSID: '(empty maybe)' State: Waiting Issue: '' Items: 1 }, item# { Idx: 0 DstPathName: '/MyRoot/Table' DstPathId: [OwnerId: 72057594046678944, LocalPathId: 2] State: Transferring SubState: Proposed WaitTxId: 0 SrcPath: SrcPrefix: Issue: '' }, txId# 281474976710758 2025-09-25T16:19:47.908040Z node 3 :IMPORT DEBUG: schemeshard_import__create.cpp:396: TImport::TTxProgress: DoComplete TestWaitNotification wait txId: 101 2025-09-25T16:19:47.919427Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:213: tests -- TTxNotificationSubscriber for txId 101: send EvNotifyTxCompletion 2025-09-25T16:19:47.919444Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:259: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 101 2025-09-25T16:19:47.920686Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpRestore Internal: true Restore { TableName: "Table" TableDescription { Columns { Name: "key" Type: "Utf8" TypeId: 4608 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "value" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" } NumberOfRetries: 0 S3Settings { Endpoint: "localhost:15105" Scheme: HTTP Bucket: "" ObjectKeyPattern: "" AccessKey: "" SecretKey: "" UseVirtualAddressing: true } } } TxId: 281474976710758 TabletId: 72057594046678944 PeerName: "" SanitizedToken: "" , at schemeshard: 72057594046678944 2025-09-25T16:19:47.920744Z node 3 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_backup_restore_common.h:586: TRestore Propose, path: /MyRoot/Table, opId: 281474976710758:0, at schemeshard: 72057594046678944 2025-09-25T16:19:47.920773Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:599: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 2 2025-09-25T16:19:47.920784Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5528: CreateTx for txid 281474976710758:0 type: TxRestore target path: [OwnerId: 72057594046678944, LocalPathId: 2] source path: 2025-09-25T16:19:47.920888Z node 3 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 281474976710758:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-09-25T16:19:47.920901Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpRestore, opId: 281474976710758:0, at schemeshard: 72057594046678944, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_backup_restore_common.h:563) 2025-09-25T16:19:47.921146Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__notify.cpp:83: NotifyTxCompletion import in-flight, txId: 101, at schemeshard: 72057594046678944 2025-09-25T16:19:47.921156Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__notify.cpp:131: NotifyTxCompletion transaction is registered, txId: 101, at schemeshard: 72057594046678944 2025-09-25T16:19:47.921552Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 281474976710758, response: Status: StatusAccepted TxId: 281474976710758 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2025-09-25T16:19:47.921599Z node 3 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 281474976710758, database: /MyRoot, subject: , status: StatusAccepted, operation: RESTORE TABLE, path: /MyRoot/Table 2025-09-25T16:19:47.921646Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7120: Handle: TEvModifySchemeTransactionResult: txId# 281474976710758, status# StatusAccepted 2025-09-25T16:19:47.921652Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:7122: Message: Status: StatusAccepted TxId: 281474976710758 SchemeshardId: 72057594046678944 2025-09-25T16:19:47.921661Z node 3 :IMPORT DEBUG: schemeshard_import__create.cpp:372: TImport::TTxProgress: DoExecute 2025-09-25T16:19:47.921666Z node 3 :IMPORT DEBUG: schemeshard_import__create.cpp:1267: TImport::TTxProgress: OnModifyResult: txId# 281474976710758, status# StatusAccepted 2025-09-25T16:19:47.921670Z node 3 :IMPORT TRACE: schemeshard_import__create.cpp:1268: Message: Status: StatusAccepted TxId: 281474976710758 SchemeshardId: 72057594046678944 2025-09-25T16:19:47.921695Z node 3 :IMPORT INFO: schemeshard_import__create.cpp:553: TImport::TTxProgress: cancel restore's tx: info# { Id: 101 Uid: '' Kind: S3 DomainPathId: [OwnerId: 72057594046678944, LocalPathId: 1] UserSID: '(empty maybe)' State: Cancellation Issue: 'Cancelled manually' Items: 1 }, item# { Idx: 0 DstPathName: '/MyRoot/Table' DstPathId: [OwnerId: 72057594046678944, LocalPathId: 2] State: Transferring SubState: Proposed WaitTxId: 281474976710758 SrcPath: SrcPrefix: Issue: '' } 2025-09-25T16:19:47.921735Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 281474976710758:0, at schemeshard: 72057594046678944 2025-09-25T16:19:47.921743Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:315: TCreateParts opId# 281474976710758:0 ProgressState, operation type: TxRestore, at tablet# 72057594046678944 2025-09-25T16:19:47.921748Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:369: TCreateParts opId# 281474976710758:0 ProgressState no shards to create, do next state 2025-09-25T16:19:47.921751Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 281474976710758:0 2 -> 3 2025-09-25T16:19:47.922208Z node 3 :IMPORT DEBUG: schemeshard_import__create.cpp:396: TImport::TTxProgress: DoComplete 2025-09-25T16:19:47.922259Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:71: TTxOperationProposeCancelTx Execute, at schemeshard: 72057594046678944, message: TargetTxId: 281474976710758 TxId: 101 2025-09-25T16:19:47.922265Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_cancel_tx.cpp:37: Execute cancel tx: opId# 101:0, target opId# 281474976710758:0 2025-09-25T16:19:47.922353Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 281474976710758:0, at schemeshard: 72057594046678944 2025-09-25T16:19:47.922358Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_backup_restore_common.h:58: TRestore TConfigurePart ProgressState, opId: 281474976710758:0, at schemeshard: 72057594046678944 2025-09-25T16:19:47.922363Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_create_restore.cpp:38: Propose restore, datashard: 72075186233409546, opId: 281474976710758:0, at schemeshard: 72057594046678944 2025-09-25T16:19:47.922718Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:88: TTxOperationProposeCancelTx Complete, at schemeshard: 72057594046678944 2025-09-25T16:19:47.922761Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 281474976710758:0, at schemeshard: 72057594046678944 2025-09-25T16:19:47.922767Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_backup_restore_common.h:58: TRestore TConfigurePart ProgressState, opId: 281474976710758:0, at schemeshard: 72057594046678944 2025-09-25T16:19:47.922771Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_create_restore.cpp:38: Propose restore, datashard: 72075186233409546, opId: 281474976710758:0, at schemeshard: 72057594046678944 2025-09-25T16:19:47.922828Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7213: Handle: TEvCancelTxResult: Cookie: 101, at schemeshard: 72057594046678944 2025-09-25T16:19:47.922841Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:7215: Message: Status: StatusAccepted Result: "Cancelled at SchemeShard" TargetTxId: 281474976710758 TxId: 101 2025-09-25T16:19:47.923101Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:663: Send tablet strongly msg operationId: 281474976710758:0 from tablet: 72057594046678944 to tablet: 72075186233409546 cookie: 72057594046678944:1 msg type: 269549568 2025-09-25T16:19:47.923123Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1824: TOperation RegisterRelationByTabletId, TxId: 281474976710758, partId: 0, tablet: 72075186233409546 2025-09-25T16:19:47.923467Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:663: Send tablet strongly msg operationId: 281474976710758:0 from tablet: 72057594046678944 to tablet: 72075186233409546 cookie: 72057594046678944:1 msg type: 269549568 2025-09-25T16:19:47.923500Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 101: got EvNotifyTxCompletionResult 2025-09-25T16:19:47.923505Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 101: satisfy waiter [3:408:2371] TestWaitNotification: OK eventTxId 101 AUDIT LOG buffer(5): 2025-09-25T16:19:47.821105Z: component=schemeshard, tx_id=1, remote_address={none}, subject={none}, sanitized_token={none}, database={none}, operation=ALTER DATABASE, paths=[//MyRoot], status=SUCCESS, detailed_status=StatusAccepted 2025-09-25T16:19:47.828349Z: component=schemeshard, id=101, remote_address={none}, subject={none}, sanitized_token={none}, database=/MyRoot, operation=IMPORT START, status=SUCCESS, detailed_status=SUCCESS, import_type=s3, import_item_count=1, import_s3_bucket={none}, import_s3_prefix={none} 2025-09-25T16:19:47.856580Z: component=schemeshard, tx_id=281474976710757, remote_address={none}, subject={none}, sanitized_token={none}, database=/MyRoot, operation=CREATE TABLE WITH INDEXES, paths=[/MyRoot/Table], status=SUCCESS, detailed_status=StatusAccepted 2025-09-25T16:19:47.921593Z: component=schemeshard, tx_id=281474976710758, remote_address={none}, subject={none}, sanitized_token={none}, database=/MyRoot, operation=RESTORE TABLE, paths=[/MyRoot/Table], status=SUCCESS, detailed_status=StatusAccepted 2025-09-25T16:19:47.922872Z: component=schemeshard, id=101, remote_address={none}, subject={none}, sanitized_token={none}, database=/MyRoot, operation=IMPORT END, status=ERROR, detailed_status=CANCELLED, reason=Cancelled manually, import_type=s3, import_item_count=1, import_s3_bucket={none}, import_s3_prefix={none}, start_time=1970-01-01T00:00:00.035000Z, end_time=1970-01-01T00:00:00.129500Z AUDIT LOG checked line: 2025-09-25T16:19:47.922872Z: component=schemeshard, id=101, remote_address={none}, subject={none}, sanitized_token={none}, database=/MyRoot, operation=IMPORT END, status=ERROR, detailed_status=CANCELLED, reason=Cancelled manually, import_type=s3, import_item_count=1, import_s3_bucket={none}, import_s3_prefix={none}, start_time=1970-01-01T00:00:00.035000Z, end_time=1970-01-01T00:00:00.129500Z ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/public/lib/ydb_cli/topic/ut/unittest >> TTopicWriterTests::TestEnterMessage_1KiB_Newline_Delimited_With_Two_Delimiters_In_A_Row [GOOD] Test command err: === Starting PQ server === Server->StartServer(false); 2025-09-25T16:19:34.592242Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7554062245695484939:2152];send_to=[0:7307199536658146131:7762515]; 2025-09-25T16:19:34.592331Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-09-25T16:19:34.596529Z node 1 :PQ_READ_PROXY DEBUG: caching_service.cpp:44: Direct read cache: : Created 2025-09-25T16:19:34.597222Z node 2 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7554062242128048483:2156];send_to=[0:7307199536658146131:7762515]; 2025-09-25T16:19:34.597394Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/endf/001147/r3tmp/tmpwpQ19Z/pdisk_1.dat 2025-09-25T16:19:34.599363Z node 2 :PQ_READ_PROXY DEBUG: caching_service.cpp:44: Direct read cache: : Created 2025-09-25T16:19:34.635730Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-09-25T16:19:34.639461Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-09-25T16:19:34.654528Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 14350, node 1 2025-09-25T16:19:34.664883Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: /home/runner/.ya/build/build_root/endf/001147/r3tmp/yandexJ8OgTp.tmp 2025-09-25T16:19:34.664897Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: /home/runner/.ya/build/build_root/endf/001147/r3tmp/yandexJ8OgTp.tmp 2025-09-25T16:19:34.668545Z INFO: TTestServer started on Port 17147 GrpcPort 14350 2025-09-25T16:19:34.679054Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:202: successfully initialized from file: /home/runner/.ya/build/build_root/endf/001147/r3tmp/yandexJ8OgTp.tmp 2025-09-25T16:19:34.679171Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:17147 PQClient connected to localhost:14350 === TenantModeEnabled() = 0 === Init PQ - start server on port 14350 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-09-25T16:19:34.693301Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-09-25T16:19:34.693337Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-09-25T16:19:34.694932Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-09-25T16:19:34.701296Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-09-25T16:19:34.701328Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-09-25T16:19:34.702902Z node 1 :HIVE WARN: hive_impl.cpp:811: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2025-09-25T16:19:34.703204Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-09-25T16:19:34.712440Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "Root" StoragePools { Name: "/Root:test" Kind: "test" } } } TxId: 281474976720657 TabletId: 72057594046644480 PeerName: "" , at schemeshard: 72057594046644480 2025-09-25T16:19:34.712528Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //Root, opId: 281474976720657:0, at schemeshard: 72057594046644480 2025-09-25T16:19:34.712596Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:599: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046644480, LocalPathId: 1] was 0 2025-09-25T16:19:34.712608Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5528: CreateTx for txid 281474976720657:0 type: TxAlterSubDomain target path: [OwnerId: 72057594046644480, LocalPathId: 1] source path: 2025-09-25T16:19:34.712679Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 281474976720657:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2025-09-25T16:19:34.712703Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976720657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2025-09-25T16:19:34.713563Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 281474976720657, response: Status: StatusAccepted TxId: 281474976720657 SchemeshardId: 72057594046644480 PathId: 1, at schemeshard: 72057594046644480 2025-09-25T16:19:34.713629Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 281474976720657, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //Root 2025-09-25T16:19:34.713745Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 281474976720657:0, at schemeshard: 72057594046644480 2025-09-25T16:19:34.713770Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:315: TCreateParts opId# 281474976720657:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046644480 2025-09-25T16:19:34.713774Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:369: TCreateParts opId# 281474976720657:0 ProgressState no shards to create, do next state 2025-09-25T16:19:34.713784Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 281474976720657:0 2 -> 3 waiting... 2025-09-25T16:19:34.714205Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__notify.cpp:30: NotifyTxCompletion operation in-flight, txId: 281474976720657, at schemeshard: 72057594046644480 2025-09-25T16:19:34.714217Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 281474976720657, ready parts: 0/1, is published: true 2025-09-25T16:19:34.714221Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__notify.cpp:131: NotifyTxCompletion transaction is registered, txId: 281474976720657, at schemeshard: 72057594046644480 2025-09-25T16:19:34.714342Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 281474976720657:0, at schemeshard: 72057594046644480 2025-09-25T16:19:34.714353Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 281474976720657:0 ProgressState, at schemeshard: 72057594046644480 2025-09-25T16:19:34.714357Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 281474976720657:0 3 -> 128 2025-09-25T16:19:34.714756Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 281474976720657:0, at schemeshard: 72057594046644480 2025-09-25T16:19:34.714768Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 281474976720657:0, at schemeshard: 72057594046644480 2025-09-25T16:19:34.714771Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 281474976720657:0, at tablet# 72057594046644480 2025-09-25T16:19:34.714792Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1723: TOperation IsReadyToPropose , TxId: 281474976720657 ready parts: 1/1 2025-09-25T16:19:34.715678Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1792: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046644480 Flags: 2 } ExecLevel: 0 TxId: 281474976720657 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-09-25T16:19:34.716142Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:663: Send tablet strongly msg operationId: 281474976720657:4294967295 from tablet: 72057594046644480 to tablet: 72057594046316545 cookie: 0:281474976720657 msg type: 269090816 2025-09-25T16:19:34.716183Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1824: TOperation RegisterRelationByTabletId, TxId: 281474976720657, partId: 4294967295, tablet: 72057594046316545 2025-09-25T16:19:34.716792Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:685: TTxOperationPlanStep Execute, stepId: 1758817174765, transactions count in step: 1, at schemeshard: 72057594046644480 2025-09-25T16:19:34.716845Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:689: TTxOperationPlanStep Execute, message: Transactions { TxId: 281474976720657 AckTo { RawX1: 0 RawX2: 0 } } Step: 1758817174765 MediatorID: 72057594046382081 TabletID: 72057594046644480, at schemeshard: 72057594046644480 2025-09-25T16:19:34.716856Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:303: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 281474976720657:0, at tablet# 72057594046644480 2025-09-25T16:19:34.716922Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 281474976720657:0 128 -> 240 2025-09-25T16:19:34.716932Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:362: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 281474976720657:0, at tablet# 72057594046644480 2025-09-25T16:19:34.716971Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:599: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046644480, LocalPathId: 1] wa ... -topic1" Generation: 1 Step: 1 Session: "shared/cli_3_1_4239307694517561338_v1" ClientId: "cli" PipeClient { RawX1: 7554062299873965823 RawX2: 4503612512274956 } Path: "/Root/PQ/rt3.dc1--topic1" } 2025-09-25T16:19:47.449743Z node 3 :PQ_READ_PROXY DEBUG: read_session_actor.cpp:122: session cookie 1 consumer shared/cli session shared/cli_3_1_4239307694517561338_v1 grpc read done: success# 1, data# { read_request { bytes_size: 52428800 } } 2025-09-25T16:19:47.449756Z node 3 :PQ_READ_PROXY INFO: partition_actor.cpp:1143: session cookie 1 consumer shared/cli session shared/cli_3_1_4239307694517561338_v1 INITING TopicId: Topic rt3.dc1--topic1 in dc dc1 in database: Root, partition 0(assignId:1) 2025-09-25T16:19:47.449791Z node 3 :PQ_READ_PROXY DEBUG: read_session_actor.cpp:1839: session cookie 1 consumer shared/cli session shared/cli_3_1_4239307694517561338_v1 got read request: guid# 30ef50f7-5922b142-e07bbcc3-f624b8ae 2025-09-25T16:19:47.449980Z node 3 :PQ_READ_PROXY INFO: partition_actor.cpp:983: session cookie 1 consumer shared/cli session shared/cli_3_1_4239307694517561338_v1 TopicId: Topic rt3.dc1--topic1 in dc dc1 in database: Root, partition 0(assignId:1) pipe restart attempt 0 pipe creation result: OK TabletId: 72075186224037897 Generation: 1, pipe: [3:7554062299873965826:2575] 2025-09-25T16:19:47.450057Z node 4 :PQ_READ_PROXY DEBUG: caching_service.cpp:283: Direct read cache: registered server session: shared/cli_3_1_4239307694517561338_v1:1 with generation 1 2025-09-25T16:19:47.451574Z node 3 :PQ_READ_PROXY DEBUG: partition_actor.cpp:663: session cookie 1 consumer shared/cli session shared/cli_3_1_4239307694517561338_v1 TopicId: Topic rt3.dc1--topic1 in dc dc1 in database: Root, partition 0(assignId:1) initDone 0 event { CmdGetClientOffsetResult { Offset: 0 EndOffset: 3 SizeLag: 280 WriteTimestampEstimateMS: 1758817187447 ClientHasAnyCommits: false } Cookie: 18446744073709551615 } 2025-09-25T16:19:47.451593Z node 3 :PQ_READ_PROXY INFO: partition_actor.cpp:694: session cookie 1 consumer shared/cli session shared/cli_3_1_4239307694517561338_v1 INIT DONE TopicId: Topic rt3.dc1--topic1 in dc dc1 in database: Root, partition 0(assignId:1) EndOffset 3 readOffset 0 committedOffset 0 2025-09-25T16:19:47.451621Z node 3 :PQ_READ_PROXY DEBUG: read_session_actor.cpp:1434: session cookie 1 consumer shared/cli session shared/cli_3_1_4239307694517561338_v1 sending to client partition status 2025-09-25T16:19:47.451924Z :INFO: [] [] [69e8c007-82a20869-f0d48c09-463dd900] [] Confirm partition stream create. Partition stream id: 1. Cluster: "-". Topic: "/topic1". Partition: 0. Read offset: (NULL) 2025-09-25T16:19:47.452077Z node 3 :PQ_READ_PROXY DEBUG: read_session_actor.cpp:122: session cookie 1 consumer shared/cli session shared/cli_3_1_4239307694517561338_v1 grpc read done: success# 1, data# { start_partition_session_response { partition_session_id: 1 } } 2025-09-25T16:19:47.452122Z node 3 :PQ_READ_PROXY INFO: read_session_actor.cpp:539: session cookie 1 consumer shared/cli session shared/cli_3_1_4239307694517561338_v1 got StartRead from client: partition# TopicId: Topic rt3.dc1--topic1 in dc dc1 in database: Root, partition 0(assignId:1), readOffset# 0, commitOffset# (empty maybe) 2025-09-25T16:19:47.452136Z node 3 :PQ_READ_PROXY INFO: partition_actor.cpp:1023: session cookie 1 consumer shared/cli session shared/cli_3_1_4239307694517561338_v1 Start reading TopicId: Topic rt3.dc1--topic1 in dc dc1 in database: Root, partition 0(assignId:1) EndOffset 3 readOffset 0 committedOffset 0 clientCommitOffset (empty maybe) clientReadOffset 0 2025-09-25T16:19:47.452143Z node 3 :PQ_READ_PROXY DEBUG: partition_actor.cpp:969: session cookie 1 consumer shared/cli session shared/cli_3_1_4239307694517561338_v1 TopicId: Topic rt3.dc1--topic1 in dc dc1 in database: Root, partition 0(assignId:1) ready for read with readOffset 0 endOffset 3 2025-09-25T16:19:47.452164Z node 3 :PQ_READ_PROXY DEBUG: read_session_actor.cpp:2332: session cookie 1 consumer shared/cli session shared/cli_3_1_4239307694517561338_v1 partition ready for read: partition# TopicId: Topic rt3.dc1--topic1 in dc dc1 in database: Root, partition 0(assignId:1), readOffset# 0, endOffset# 3, WTime# 0, sizeLag# 280 2025-09-25T16:19:47.452173Z node 3 :PQ_READ_PROXY DEBUG: read_session_actor.cpp:2343: session cookie 1 consumer shared/cli session shared/cli_3_1_4239307694517561338_v1TEvPartitionReady. Aval parts: 1 2025-09-25T16:19:47.452185Z node 3 :PQ_READ_PROXY DEBUG: read_session_actor.cpp:2266: session cookie 1 consumer shared/cli session shared/cli_3_1_4239307694517561338_v1 performing read request: guid# e0101212-6515e39d-ebc428c5-54290c92, from# TopicId: Topic rt3.dc1--topic1 in dc dc1 in database: Root, partition 0(assignId:1), count# 3, size# 336, partitionsAsked# 1, maxTimeLag# 0ms 2025-09-25T16:19:47.452210Z node 3 :PQ_READ_PROXY DEBUG: partition_actor.cpp:1395: session cookie 1 consumer shared/cli session shared/cli_3_1_4239307694517561338_v1 READ FROM TopicId: Topic rt3.dc1--topic1 in dc dc1 in database: Root, partition 0(assignId:1)maxCount 3 maxSize 336 maxTimeLagMs 0 readTimestampMs 0 readOffset 0 EndOffset 3 ClientCommitOffset 0 committedOffset 0 Guid e0101212-6515e39d-ebc428c5-54290c92 2025-09-25T16:19:47.452735Z node 3 :PQ_READ_PROXY DEBUG: partition_actor.cpp:663: session cookie 1 consumer shared/cli session shared/cli_3_1_4239307694517561338_v1 TopicId: Topic rt3.dc1--topic1 in dc dc1 in database: Root, partition 0(assignId:1) initDone 1 event { CmdReadResult { MaxOffset: 3 Result { Offset: 0 Data: "... 79 bytes ..." SourceId: "\000source1" SeqNo: 1 WriteTimestampMS: 1758817187347 CreateTimestampMS: 1758817187346 UncompressedSize: 8 PartitionKey: "" ExplicitHash: "" } Result { Offset: 1 Data: "... 79 bytes ..." SourceId: "\000source1" SeqNo: 2 WriteTimestampMS: 1758817187348 CreateTimestampMS: 1758817187347 UncompressedSize: 8 PartitionKey: "" ExplicitHash: "" } Result { Offset: 2 Data: "... 79 bytes ..." SourceId: "\000source1" SeqNo: 3 WriteTimestampMS: 1758817187348 CreateTimestampMS: 1758817187347 UncompressedSize: 8 PartitionKey: "" ExplicitHash: "" } BlobsFromDisk: 0 BlobsFromCache: 2 SizeLag: 18446744073709551530 RealReadOffset: 2 WaitQuotaTimeMs: 0 EndOffset: 3 StartOffset: 0 } Cookie: 0 } 2025-09-25T16:19:47.452768Z node 3 :PQ_READ_PROXY DEBUG: partition_actor.cpp:1277: session cookie 1 consumer shared/cli session shared/cli_3_1_4239307694517561338_v1 TopicId: Topic rt3.dc1--topic1 in dc dc1 in database: Root, partition 0(assignId:1) wait data in partition inited, cookie 1 from offset 3 2025-09-25T16:19:47.452780Z node 3 :PQ_READ_PROXY DEBUG: partition_actor.cpp:901: session cookie 1 consumer shared/cli session shared/cli_3_1_4239307694517561338_v1 after read state TopicId: Topic rt3.dc1--topic1 in dc dc1 in database: Root, partition 0(assignId:1) EndOffset 3 ReadOffset 3 ReadGuid e0101212-6515e39d-ebc428c5-54290c92 has messages 1 2025-09-25T16:19:47.452814Z node 3 :PQ_READ_PROXY DEBUG: read_session_actor.cpp:1940: session cookie 1 consumer shared/cli session shared/cli_3_1_4239307694517561338_v1 read done: guid# e0101212-6515e39d-ebc428c5-54290c92, partition# TopicId: Topic rt3.dc1--topic1 in dc dc1 in database: Root, partition 0(assignId:1), size# 371 2025-09-25T16:19:47.452841Z node 3 :PQ_READ_PROXY DEBUG: read_session_actor.cpp:2102: session cookie 1 consumer shared/cli session shared/cli_3_1_4239307694517561338_v1 response to read: guid# e0101212-6515e39d-ebc428c5-54290c92 2025-09-25T16:19:47.452926Z node 3 :PQ_READ_PROXY DEBUG: read_session_actor.cpp:2145: session cookie 1 consumer shared/cli session shared/cli_3_1_4239307694517561338_v1 Process answer. Aval parts: 0 2025-09-25T16:19:47.452997Z :DEBUG: [] [] [69e8c007-82a20869-f0d48c09-463dd900] [] Got ReadResponse, serverBytesSize = 371, now ReadSizeBudget = 0, ReadSizeServerDelta = 52428429 2025-09-25T16:19:47.453026Z :DEBUG: [] [] [69e8c007-82a20869-f0d48c09-463dd900] [] In ContinueReadingDataImpl, ReadSizeBudget = 0, ReadSizeServerDelta = 52428429 2025-09-25T16:19:47.453127Z :DEBUG: [] Decompression task done. Partition/PartitionSessionId: 1 (0-2) 2025-09-25T16:19:47.453145Z :DEBUG: [] [] [69e8c007-82a20869-f0d48c09-463dd900] [] Returning serverBytesSize = 371 to budget 2025-09-25T16:19:47.453153Z :DEBUG: [] [] [69e8c007-82a20869-f0d48c09-463dd900] [] In ContinueReadingDataImpl, ReadSizeBudget = 371, ReadSizeServerDelta = 52428429 2025-09-25T16:19:47.453278Z :DEBUG: [] [] [69e8c007-82a20869-f0d48c09-463dd900] [] After sending read request: ReadSizeBudget = 0, ReadSizeServerDelta = 52428800 2025-09-25T16:19:47.453333Z :DEBUG: [] Take Data. Partition 0. Read: {0, 0} (0-0) 2025-09-25T16:19:47.453345Z :DEBUG: [] Take Data. Partition 0. Read: {1, 0} (1-1) 2025-09-25T16:19:47.453349Z :DEBUG: [] Take Data. Partition 0. Read: {1, 1} (2-2) 2025-09-25T16:19:47.453357Z :DEBUG: [] [] [69e8c007-82a20869-f0d48c09-463dd900] [] The application data is transferred to the client. Number of messages 3, size 24 bytes 2025-09-25T16:19:47.453367Z :DEBUG: [] [] [69e8c007-82a20869-f0d48c09-463dd900] [] Returning serverBytesSize = 0 to budget 2025-09-25T16:19:47.453399Z :INFO: [] [] [69e8c007-82a20869-f0d48c09-463dd900] Closing read session. Close timeout: 0.000000s 2025-09-25T16:19:47.453406Z :INFO: [] Read/commit by partition streams (cluster:topic:partition:stream-id:read-offset:committed-offset): -:/topic1:0:1:2:0 2025-09-25T16:19:47.453412Z :INFO: [] [] [69e8c007-82a20869-f0d48c09-463dd900] Counters: { Errors: 0 CurrentSessionLifetimeMs: 5 BytesRead: 24 MessagesRead: 3 BytesReadCompressed: 24 BytesInflightUncompressed: 0 BytesInflightCompressed: 0 BytesInflightTotal: 0 MessagesInflight: 0 } 2025-09-25T16:19:47.453425Z :NOTICE: [] [] [69e8c007-82a20869-f0d48c09-463dd900] Aborting read session. Description: SessionClosed { Status: ABORTED Issues: "
: Error: Close with zero timeout " } 2025-09-25T16:19:47.453428Z :DEBUG: [] [] [69e8c007-82a20869-f0d48c09-463dd900] [] Abort session to cluster 2025-09-25T16:19:47.453424Z node 3 :PQ_READ_PROXY DEBUG: read_session_actor.cpp:122: session cookie 1 consumer shared/cli session shared/cli_3_1_4239307694517561338_v1 grpc read done: success# 1, data# { read_request { bytes_size: 371 } } 2025-09-25T16:19:47.453487Z node 3 :PQ_READ_PROXY DEBUG: read_session_actor.cpp:1839: session cookie 1 consumer shared/cli session shared/cli_3_1_4239307694517561338_v1 got read request: guid# a8bb99dc-18c27756-30036469-25b8c75 2025-09-25T16:19:47.453535Z :NOTICE: [] [] [69e8c007-82a20869-f0d48c09-463dd900] Aborting read session. Description: SessionClosed { Status: ABORTED Issues: "
: Error: Aborted " } 2025-09-25T16:19:47.453681Z node 3 :PQ_READ_PROXY DEBUG: read_session_actor.cpp:122: session cookie 1 consumer shared/cli session shared/cli_3_1_4239307694517561338_v1 grpc read done: success# 0, data# { } 2025-09-25T16:19:47.453691Z node 3 :PQ_READ_PROXY INFO: read_session_actor.cpp:125: session cookie 1 consumer shared/cli session shared/cli_3_1_4239307694517561338_v1 grpc read failed 2025-09-25T16:19:47.453696Z node 3 :PQ_READ_PROXY INFO: read_session_actor.cpp:92: session cookie 1 consumer shared/cli session shared/cli_3_1_4239307694517561338_v1 grpc closed 2025-09-25T16:19:47.453709Z node 3 :PQ_READ_PROXY INFO: read_session_actor.cpp:383: session cookie 1 consumer shared/cli session shared/cli_3_1_4239307694517561338_v1 is DEAD 2025-09-25T16:19:47.453869Z node 4 :PQ_READ_PROXY DEBUG: caching_service.cpp:139: Direct read cache: server session deregistered: shared/cli_3_1_4239307694517561338_v1 2025-09-25T16:19:47.453959Z node 3 :PERSQUEUE_READ_BALANCER INFO: read_balancer__balancing.cpp:1665: [72075186224037898][rt3.dc1--topic1] pipe [3:7554062299873965823:2572] disconnected; active server actors: 1 2025-09-25T16:19:47.453967Z node 3 :PERSQUEUE_READ_BALANCER NOTICE: read_balancer__balancing.cpp:1674: [72075186224037898][rt3.dc1--topic1] pipe [3:7554062299873965823:2572] client cli disconnected session shared/cli_3_1_4239307694517561338_v1 >> TImportTests::ShouldRestoreTtlSettingsInValueSinceUnixEpochMode [GOOD] >> TImportTests::ShouldRestoreStorageSettings ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_restore/unittest >> TImportTests::ShouldRestoreAnyAzReadReplicas [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2144] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2144] Leader for TabletID 72057594046678944 is [1:130:2155] sender: [1:131:2058] recipient: [1:113:2144] 2025-09-25T16:19:47.013832Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7911: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-09-25T16:19:47.013854Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7939: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-09-25T16:19:47.013860Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7825: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-09-25T16:19:47.013865Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7841: OperationsProcessing config: using default configuration 2025-09-25T16:19:47.013871Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7847: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-09-25T16:19:47.013875Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7847: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-09-25T16:19:47.013885Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7971: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-09-25T16:19:47.013899Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-09-25T16:19:47.014013Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8042: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-09-25T16:19:47.014070Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2025-09-25T16:19:47.030055Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7729: Cannot subscribe to console configs 2025-09-25T16:19:47.030076Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded 2025-09-25T16:19:47.034059Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2025-09-25T16:19:47.034144Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2025-09-25T16:19:47.034178Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-09-25T16:19:47.035663Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2025-09-25T16:19:47.035725Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2025-09-25T16:19:47.035825Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1382: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-09-25T16:19:47.035872Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-09-25T16:19:47.036300Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:158: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-09-25T16:19:47.036344Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2025-09-25T16:19:47.036582Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-09-25T16:19:47.036592Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-09-25T16:19:47.036612Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-09-25T16:19:47.036619Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-09-25T16:19:47.036626Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:205: TTxServerlessStorageBilling.Complete 2025-09-25T16:19:47.036653Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7086: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-09-25T16:19:47.037922Z node 1 :HIVE INFO: tablet_helpers.cpp:1126: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:130:2155] sender: [1:245:2058] recipient: [1:15:2062] 2025-09-25T16:19:47.059440Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-09-25T16:19:47.059503Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-09-25T16:19:47.059554Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:599: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-09-25T16:19:47.059562Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5528: CreateTx for txid 1:0 type: TxAlterSubDomain target path: [OwnerId: 72057594046678944, LocalPathId: 1] source path: 2025-09-25T16:19:47.059633Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-09-25T16:19:47.059648Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2025-09-25T16:19:47.060315Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-09-25T16:19:47.060358Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-09-25T16:19:47.060418Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-09-25T16:19:47.060429Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:315: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-09-25T16:19:47.060436Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:369: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-09-25T16:19:47.060442Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 1:0 2 -> 3 2025-09-25T16:19:47.060989Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-09-25T16:19:47.061004Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-09-25T16:19:47.061012Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 1:0 3 -> 128 2025-09-25T16:19:47.061476Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-09-25T16:19:47.061490Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-09-25T16:19:47.061497Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-09-25T16:19:47.061504Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1723: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-09-25T16:19:47.062249Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1792: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-09-25T16:19:47.062698Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:663: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-09-25T16:19:47.062734Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1824: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-09-25T16:19:47.062933Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:685: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-09-25T16:19:47.062960Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:689: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 137 RawX2: 4294969455 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-09-25T16:19:47.062967Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:303: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-09-25T16:19:47.063052Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 1:0 128 -> 240 2025-09-25T16:19:47.063060Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:362: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-09-25T16:19:47.063088Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:599: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-09-25T16:19:47.063096Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:412: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-09-25T16:19:47.063630Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-09-25T16:19:47.063642Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme ... __operation_backup_restore_common.h:116: Unable to make a bill: kind# TRestore, opId# 281474976720758:0, reason# domain is not a serverless db, domain# /MyRoot/User, domainPathId# [OwnerId: 72075186233409546, LocalPathId: 1], IsDomainSchemeShard: 0, ParentDomainId: [OwnerId: 72057594046678944, LocalPathId: 2], ResourcesDomainId: [OwnerId: 72057594046678944, LocalPathId: 2] 2025-09-25T16:19:48.192094Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 281474976720758:0, at schemeshard: 72075186233409546 2025-09-25T16:19:48.192179Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 281474976720758:0, at schemeshard: 72075186233409546 2025-09-25T16:19:48.192188Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:497: [72075186233409546] TDone opId# 281474976720758:0 ProgressState 2025-09-25T16:19:48.192203Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:926: Part operation is done id#281474976720758:0 progress is 1/1 2025-09-25T16:19:48.192207Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 281474976720758 ready parts: 1/1 2025-09-25T16:19:48.192212Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:926: Part operation is done id#281474976720758:0 progress is 1/1 2025-09-25T16:19:48.192216Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 281474976720758 ready parts: 1/1 2025-09-25T16:19:48.192220Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 281474976720758, ready parts: 1/1, is published: true 2025-09-25T16:19:48.192231Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1702: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [3:365:2342] message: TxId: 281474976720758 2025-09-25T16:19:48.192238Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 281474976720758 ready parts: 1/1 2025-09-25T16:19:48.192244Z node 3 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:993: Operation and all the parts is done, operation id: 281474976720758:0 2025-09-25T16:19:48.192248Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5552: RemoveTx for txid 281474976720758:0 2025-09-25T16:19:48.192272Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:610: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72075186233409546, LocalPathId: 2] was 3 2025-09-25T16:19:48.192649Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7171: Handle: TEvNotifyTxCompletionResult: txId# 281474976720758 2025-09-25T16:19:48.192662Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:7173: Message: TxId: 281474976720758 2025-09-25T16:19:48.192672Z node 3 :IMPORT DEBUG: schemeshard_import__create.cpp:372: TImport::TTxProgress: DoExecute 2025-09-25T16:19:48.192677Z node 3 :IMPORT DEBUG: schemeshard_import__create.cpp:1425: TImport::TTxProgress: OnNotifyResult: txId# 281474976720758 2025-09-25T16:19:48.193111Z node 3 :IMPORT DEBUG: schemeshard_import__create.cpp:396: TImport::TTxProgress: DoComplete 2025-09-25T16:19:48.193130Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 103: got EvNotifyTxCompletionResult 2025-09-25T16:19:48.193136Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 103: satisfy waiter [3:505:2455] TestWaitNotification: OK eventTxId 103 2025-09-25T16:19:48.193725Z node 3 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/User" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-09-25T16:19:48.193775Z node 3 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/User" took 59us result status StatusSuccess 2025-09-25T16:19:48.193885Z node 3 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/User" PathDescription { Self { Name: "User" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeExtSubDomain CreateFinished: true CreateTxId: 101 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 4 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 4 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 2 SecurityStateVersion: 0 } } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 2 PlanResolution: 50 Coordinators: 72075186233409547 TimeCastBucketsPerMediator: 2 Mediators: 72075186233409548 SchemeShard: 72075186233409546 } DomainKey { SchemeShard: 72057594046678944 PathId: 2 } StoragePools { Name: "name_User_kind_hdd-1" Kind: "common" } StoragePools { Name: "name_User_kind_hdd-2" Kind: "external" } PathsInside: 0 PathsLimit: 10000 ShardsInside: 3 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 2 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { } SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 2 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-09-25T16:19:48.193967Z node 3 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/User/Table" Options { ReturnPartitioningInfo: true ReturnPartitionConfig: true BackupInfo: false ReturnBoundaries: true ShowPrivateTable: false }, at schemeshard: 72075186233409546 2025-09-25T16:19:48.194018Z node 3 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72075186233409546 describe path "/MyRoot/User/Table" took 52us result status StatusSuccess 2025-09-25T16:19:48.194235Z node 3 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/User/Table" PathDescription { Self { Name: "Table" PathId: 2 SchemeshardId: 72075186233409546 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976720757 CreateStep: 150 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "Table" Columns { Name: "key" Type: "Utf8" TypeId: 4608 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "value" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false IsBuildInProgress: false } Columns { Name: "created_at" Type: "Timestamp" TypeId: 50 Id: 3 NotNull: false IsBuildInProgress: false } Columns { Name: "modified_at" Type: "Uint32" TypeId: 2 Id: 4 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" KeyColumnIds: 1 PartitionConfig { CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot: 300 InMemForceStepsToSnapshot: 500 InMemForceSizeToSnapshot: 16777216 InMemCompactionBrokerQueue: 0 ReadAheadHiThreshold: 67108864 ReadAheadLoThreshold: 16777216 MinDataPageSize: 7168 SnapBrokerQueue: 0 Generation { GenerationId: 0 SizeToCompact: 0 CountToCompact: 8 ForceCountToCompact: 8 ForceSizeToCompact: 134217728 CompactionBrokerQueue: 4294967295 KeepInCache: true BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen1" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 0 UpliftPartSize: 0 } Generation { GenerationId: 1 SizeToCompact: 41943040 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 536870912 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen2" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 8388608 UpliftPartSize: 8388608 } Generation { GenerationId: 2 SizeToCompact: 419430400 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 17179869184 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen3" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } PartitioningPolicy { MinPartitionsCount: 1 } FollowerGroups { FollowerCount: 1 RequireAllDataCenters: false } } TableSchemaVersion: 1 IsBackup: false IsRestore: false } TablePartitions { EndOfRangeKeyPrefix: "" IsPoint: false IsInclusive: false DatashardId: 72075186233409549 } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 2 ProcessingParams { Version: 2 PlanResolution: 50 Coordinators: 72075186233409547 TimeCastBucketsPerMediator: 2 Mediators: 72075186233409548 SchemeShard: 72075186233409546 } DomainKey { SchemeShard: 72057594046678944 PathId: 2 } PathsInside: 1 PathsLimit: 10000 ShardsInside: 4 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 2 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 2 PathOwnerId: 72075186233409546, at schemeshard: 72075186233409546 >> TTopicReaderTests::TestRun_ReadMessages_With_Offset [GOOD] >> TTopicReaderTests::TestRun_ReadMessages_With_Future_Offset >> TImportTests::ShouldRestoreStorageSettings [GOOD] >> TRestoreTests::ExportImportWithDataChecksumAbsence[Zstd] [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_restore/unittest >> TImportTests::ShouldFailOnInvalidPath [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2144] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2144] Leader for TabletID 72057594046678944 is [1:130:2155] sender: [1:131:2058] recipient: [1:113:2144] 2025-09-25T16:19:47.706721Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7911: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-09-25T16:19:47.706739Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7939: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-09-25T16:19:47.706743Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7825: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-09-25T16:19:47.706748Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7841: OperationsProcessing config: using default configuration 2025-09-25T16:19:47.706753Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7847: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-09-25T16:19:47.706755Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7847: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-09-25T16:19:47.706762Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7971: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-09-25T16:19:47.706772Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-09-25T16:19:47.706859Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8042: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-09-25T16:19:47.706902Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2025-09-25T16:19:47.718751Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7729: Cannot subscribe to console configs 2025-09-25T16:19:47.718768Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded 2025-09-25T16:19:47.721862Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2025-09-25T16:19:47.721932Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2025-09-25T16:19:47.721958Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-09-25T16:19:47.723165Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2025-09-25T16:19:47.723212Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2025-09-25T16:19:47.723301Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1382: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-09-25T16:19:47.723340Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-09-25T16:19:47.723747Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:158: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-09-25T16:19:47.723791Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2025-09-25T16:19:47.724038Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-09-25T16:19:47.724050Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-09-25T16:19:47.724072Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-09-25T16:19:47.724081Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-09-25T16:19:47.724087Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:205: TTxServerlessStorageBilling.Complete 2025-09-25T16:19:47.724120Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7086: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-09-25T16:19:47.725304Z node 1 :HIVE INFO: tablet_helpers.cpp:1126: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:130:2155] sender: [1:245:2058] recipient: [1:15:2062] 2025-09-25T16:19:47.740004Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-09-25T16:19:47.740058Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-09-25T16:19:47.740098Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:599: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-09-25T16:19:47.740104Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5528: CreateTx for txid 1:0 type: TxAlterSubDomain target path: [OwnerId: 72057594046678944, LocalPathId: 1] source path: 2025-09-25T16:19:47.740160Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-09-25T16:19:47.740171Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2025-09-25T16:19:47.740728Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-09-25T16:19:47.740762Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-09-25T16:19:47.740802Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-09-25T16:19:47.740809Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:315: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-09-25T16:19:47.740813Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:369: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-09-25T16:19:47.740817Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 1:0 2 -> 3 2025-09-25T16:19:47.741194Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-09-25T16:19:47.741204Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-09-25T16:19:47.741210Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 1:0 3 -> 128 2025-09-25T16:19:47.741503Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-09-25T16:19:47.741515Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-09-25T16:19:47.741522Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-09-25T16:19:47.741528Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1723: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-09-25T16:19:47.742061Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1792: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-09-25T16:19:47.742422Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:663: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-09-25T16:19:47.742447Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1824: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-09-25T16:19:47.742592Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:685: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-09-25T16:19:47.742610Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:689: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 137 RawX2: 4294969455 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-09-25T16:19:47.742615Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:303: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-09-25T16:19:47.742686Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 1:0 128 -> 240 2025-09-25T16:19:47.742693Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:362: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-09-25T16:19:47.742721Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:599: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-09-25T16:19:47.742730Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:412: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-09-25T16:19:47.743096Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-09-25T16:19:47.743105Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme ... b/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2025-09-25T16:19:48.270912Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-09-25T16:19:48.270945Z node 3 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-09-25T16:19:48.270975Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-09-25T16:19:48.270985Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:315: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-09-25T16:19:48.270990Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:369: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-09-25T16:19:48.270995Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 1:0 2 -> 3 2025-09-25T16:19:48.271395Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-09-25T16:19:48.271410Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-09-25T16:19:48.271416Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 1:0 3 -> 128 2025-09-25T16:19:48.271739Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-09-25T16:19:48.271748Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-09-25T16:19:48.271754Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-09-25T16:19:48.271760Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1723: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-09-25T16:19:48.271791Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1792: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-09-25T16:19:48.272069Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:663: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-09-25T16:19:48.272098Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1824: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-09-25T16:19:48.272271Z node 3 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:685: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-09-25T16:19:48.272295Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:689: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 139 RawX2: 12884904049 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-09-25T16:19:48.272301Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:303: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-09-25T16:19:48.272355Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 1:0 128 -> 240 2025-09-25T16:19:48.272361Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:362: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-09-25T16:19:48.272387Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:599: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-09-25T16:19:48.272395Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:412: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-09-25T16:19:48.272779Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-09-25T16:19:48.272788Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-09-25T16:19:48.272844Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-09-25T16:19:48.272851Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [3:210:2211], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-09-25T16:19:48.272916Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-09-25T16:19:48.272922Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:497: [72057594046678944] TDone opId# 1:0 ProgressState 2025-09-25T16:19:48.272932Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:926: Part operation is done id#1:0 progress is 1/1 2025-09-25T16:19:48.272936Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-09-25T16:19:48.272939Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:926: Part operation is done id#1:0 progress is 1/1 2025-09-25T16:19:48.272942Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-09-25T16:19:48.272945Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-09-25T16:19:48.272949Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-09-25T16:19:48.272952Z node 3 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:993: Operation and all the parts is done, operation id: 1:0 2025-09-25T16:19:48.272955Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5552: RemoveTx for txid 1:0 2025-09-25T16:19:48.272964Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:610: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-09-25T16:19:48.272968Z node 3 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:1002: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-09-25T16:19:48.272972Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1009: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-09-25T16:19:48.273030Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6249: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-09-25T16:19:48.273042Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-09-25T16:19:48.273047Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1 2025-09-25T16:19:48.273053Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 1, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 3 2025-09-25T16:19:48.273058Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:610: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-09-25T16:19:48.273068Z node 3 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__publish_to_scheme_board.cpp:155: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 1, subscribers: 0 2025-09-25T16:19:48.273640Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1 2025-09-25T16:19:48.273709Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 1, at schemeshard: 72057594046678944 2025-09-25T16:19:48.281936Z node 3 :TX_PROXY DEBUG: proxy_impl.cpp:433: actor# [3:273:2263] Bootstrap 2025-09-25T16:19:48.282173Z node 3 :TX_PROXY DEBUG: proxy_impl.cpp:452: actor# [3:273:2263] Become StateWork (SchemeCache [3:278:2268]) 2025-09-25T16:19:48.282247Z node 3 :IMPORT DEBUG: schemeshard_import__create.cpp:130: TImport::TTxCreate: DoExecute 2025-09-25T16:19:48.282280Z node 3 :IMPORT TRACE: schemeshard_import__create.cpp:131: Message: TxId: 101 DatabaseName: "/MyRoot" Request { ImportFromS3Settings { endpoint: "localhost:32157" scheme: HTTP items { source_prefix: "a" destination_path: "/InvalidRoot/Table" } } } 2025-09-25T16:19:48.282328Z node 3 :IMPORT DEBUG: schemeshard_import__create.cpp:254: TImport::TTxCreate: Reply: status# BAD_REQUEST, error# Check failed: path: '/InvalidRoot/Table', error: root not found, source_location: ydb/core/tx/schemeshard/schemeshard_import__create.cpp:75 2025-09-25T16:19:48.282335Z node 3 :IMPORT TRACE: schemeshard_import__create.cpp:255: Message: TxId: 101 2025-09-25T16:19:48.282384Z node 3 :IMPORT DEBUG: schemeshard_import__create.cpp:238: TImport::TTxCreate: DoComplete 2025-09-25T16:19:48.282456Z node 3 :TX_PROXY DEBUG: proxy_impl.cpp:212: actor# [3:273:2263] HANDLE TEvClientConnected success connect from tablet# 72057594046447617 2025-09-25T16:19:48.283046Z node 3 :TX_PROXY DEBUG: client.cpp:89: Handle TEvAllocateResult ACCEPTED RangeBegin# 281474976715656 RangeEnd# 281474976720656 txAllocator# 72057594046447617 TestWaitNotification wait txId: 101 2025-09-25T16:19:48.283098Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:213: tests -- TTxNotificationSubscriber for txId 101: send EvNotifyTxCompletion 2025-09-25T16:19:48.283106Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:259: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 101 2025-09-25T16:19:48.283170Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 101, at schemeshard: 72057594046678944 2025-09-25T16:19:48.283187Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 101: got EvNotifyTxCompletionResult 2025-09-25T16:19:48.283193Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 101: satisfy waiter [3:285:2275] TestWaitNotification: OK eventTxId 101 >> TRestoreTests::ExportImportWithSchemeCorruption >> TTopicReaderTests::TestRun_ReadMoreMessagesThanLimit_Without_Wait_NewlineDelimited [GOOD] >> TTopicReaderTests::TestRun_ReadMoreMessagesThanLimit_Without_Wait_NoDelimiter >> TImportTests::UserSID >> TImportTests::UidAsIdempotencyKey ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_restore/unittest >> TImportTests::ShouldRestoreStorageSettings [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2144] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2144] Leader for TabletID 72057594046678944 is [1:130:2155] sender: [1:131:2058] recipient: [1:113:2144] 2025-09-25T16:19:47.730348Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7911: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-09-25T16:19:47.730372Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7939: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-09-25T16:19:47.730378Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7825: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-09-25T16:19:47.730383Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7841: OperationsProcessing config: using default configuration 2025-09-25T16:19:47.730390Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7847: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-09-25T16:19:47.730393Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7847: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-09-25T16:19:47.730399Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7971: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-09-25T16:19:47.730409Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-09-25T16:19:47.730502Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8042: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-09-25T16:19:47.730550Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2025-09-25T16:19:47.742074Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7729: Cannot subscribe to console configs 2025-09-25T16:19:47.742092Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded 2025-09-25T16:19:47.745960Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2025-09-25T16:19:47.746049Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2025-09-25T16:19:47.746092Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-09-25T16:19:47.748230Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2025-09-25T16:19:47.748297Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2025-09-25T16:19:47.748408Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1382: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-09-25T16:19:47.748462Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-09-25T16:19:47.748956Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:158: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-09-25T16:19:47.749000Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2025-09-25T16:19:47.749202Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-09-25T16:19:47.749209Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-09-25T16:19:47.749223Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-09-25T16:19:47.749228Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-09-25T16:19:47.749232Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:205: TTxServerlessStorageBilling.Complete 2025-09-25T16:19:47.749254Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7086: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-09-25T16:19:47.750330Z node 1 :HIVE INFO: tablet_helpers.cpp:1126: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:130:2155] sender: [1:245:2058] recipient: [1:15:2062] 2025-09-25T16:19:47.766607Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-09-25T16:19:47.766667Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-09-25T16:19:47.766705Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:599: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-09-25T16:19:47.766711Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5528: CreateTx for txid 1:0 type: TxAlterSubDomain target path: [OwnerId: 72057594046678944, LocalPathId: 1] source path: 2025-09-25T16:19:47.766777Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-09-25T16:19:47.766812Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2025-09-25T16:19:47.767340Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-09-25T16:19:47.767376Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-09-25T16:19:47.767414Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-09-25T16:19:47.767422Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:315: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-09-25T16:19:47.767426Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:369: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-09-25T16:19:47.767429Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 1:0 2 -> 3 2025-09-25T16:19:47.767813Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-09-25T16:19:47.767824Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-09-25T16:19:47.767832Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 1:0 3 -> 128 2025-09-25T16:19:47.768154Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-09-25T16:19:47.768168Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-09-25T16:19:47.768172Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-09-25T16:19:47.768177Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1723: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-09-25T16:19:47.768668Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1792: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-09-25T16:19:47.769034Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:663: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-09-25T16:19:47.769070Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1824: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-09-25T16:19:47.769201Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:685: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-09-25T16:19:47.769218Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:689: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 137 RawX2: 4294969455 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-09-25T16:19:47.769224Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:303: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-09-25T16:19:47.769284Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 1:0 128 -> 240 2025-09-25T16:19:47.769289Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:362: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-09-25T16:19:47.769307Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:599: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-09-25T16:19:47.769315Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:412: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-09-25T16:19:47.769650Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-09-25T16:19:47.769656Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme ... 2075186233409546, LocalPathId: 1], IsDomainSchemeShard: 0, ParentDomainId: [OwnerId: 72057594046678944, LocalPathId: 2], ResourcesDomainId: [OwnerId: 72057594046678944, LocalPathId: 2] 2025-09-25T16:19:48.901263Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 281474976720758:0, at schemeshard: 72075186233409546 2025-09-25T16:19:48.901339Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 281474976720758:0, at schemeshard: 72075186233409546 2025-09-25T16:19:48.901352Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:497: [72075186233409546] TDone opId# 281474976720758:0 ProgressState 2025-09-25T16:19:48.901368Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:926: Part operation is done id#281474976720758:0 progress is 1/1 2025-09-25T16:19:48.901373Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 281474976720758 ready parts: 1/1 2025-09-25T16:19:48.901380Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:926: Part operation is done id#281474976720758:0 progress is 1/1 2025-09-25T16:19:48.901384Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 281474976720758 ready parts: 1/1 2025-09-25T16:19:48.901390Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 281474976720758, ready parts: 1/1, is published: true 2025-09-25T16:19:48.901403Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1702: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [3:365:2342] message: TxId: 281474976720758 2025-09-25T16:19:48.901410Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 281474976720758 ready parts: 1/1 2025-09-25T16:19:48.901417Z node 3 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:993: Operation and all the parts is done, operation id: 281474976720758:0 2025-09-25T16:19:48.901423Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5552: RemoveTx for txid 281474976720758:0 2025-09-25T16:19:48.901446Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:610: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72075186233409546, LocalPathId: 2] was 3 2025-09-25T16:19:48.901827Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7171: Handle: TEvNotifyTxCompletionResult: txId# 281474976720758 2025-09-25T16:19:48.901840Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:7173: Message: TxId: 281474976720758 2025-09-25T16:19:48.901849Z node 3 :IMPORT DEBUG: schemeshard_import__create.cpp:372: TImport::TTxProgress: DoExecute 2025-09-25T16:19:48.901856Z node 3 :IMPORT DEBUG: schemeshard_import__create.cpp:1425: TImport::TTxProgress: OnNotifyResult: txId# 281474976720758 2025-09-25T16:19:48.902227Z node 3 :IMPORT DEBUG: schemeshard_import__create.cpp:396: TImport::TTxProgress: DoComplete 2025-09-25T16:19:48.902245Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 103: got EvNotifyTxCompletionResult 2025-09-25T16:19:48.902253Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 103: satisfy waiter [3:505:2455] TestWaitNotification: OK eventTxId 103 2025-09-25T16:19:48.902809Z node 3 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/User" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-09-25T16:19:48.902852Z node 3 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/User" took 52us result status StatusSuccess 2025-09-25T16:19:48.902958Z node 3 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/User" PathDescription { Self { Name: "User" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeExtSubDomain CreateFinished: true CreateTxId: 101 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 4 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 4 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 2 SecurityStateVersion: 0 } } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 2 PlanResolution: 50 Coordinators: 72075186233409547 TimeCastBucketsPerMediator: 2 Mediators: 72075186233409548 SchemeShard: 72075186233409546 } DomainKey { SchemeShard: 72057594046678944 PathId: 2 } StoragePools { Name: "name_User_kind_hdd-1" Kind: "common" } StoragePools { Name: "name_User_kind_hdd-2" Kind: "external" } PathsInside: 0 PathsLimit: 10000 ShardsInside: 3 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 2 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { } SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 2 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-09-25T16:19:48.903030Z node 3 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/User/Table" Options { ReturnPartitioningInfo: true ReturnPartitionConfig: true BackupInfo: false ReturnBoundaries: true ShowPrivateTable: false }, at schemeshard: 72075186233409546 2025-09-25T16:19:48.903075Z node 3 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72075186233409546 describe path "/MyRoot/User/Table" took 46us result status StatusSuccess 2025-09-25T16:19:48.903306Z node 3 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/User/Table" PathDescription { Self { Name: "Table" PathId: 2 SchemeshardId: 72075186233409546 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976720757 CreateStep: 150 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "Table" Columns { Name: "key" Type: "Utf8" TypeId: 4608 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "value" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false IsBuildInProgress: false } Columns { Name: "created_at" Type: "Timestamp" TypeId: 50 Id: 3 NotNull: false IsBuildInProgress: false } Columns { Name: "modified_at" Type: "Uint32" TypeId: 2 Id: 4 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" KeyColumnIds: 1 PartitionConfig { CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot: 300 InMemForceStepsToSnapshot: 500 InMemForceSizeToSnapshot: 16777216 InMemCompactionBrokerQueue: 0 ReadAheadHiThreshold: 67108864 ReadAheadLoThreshold: 16777216 MinDataPageSize: 7168 SnapBrokerQueue: 0 Generation { GenerationId: 0 SizeToCompact: 0 CountToCompact: 8 ForceCountToCompact: 8 ForceSizeToCompact: 134217728 CompactionBrokerQueue: 4294967295 KeepInCache: true BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen1" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 0 UpliftPartSize: 0 } Generation { GenerationId: 1 SizeToCompact: 41943040 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 536870912 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen2" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 8388608 UpliftPartSize: 8388608 } Generation { GenerationId: 2 SizeToCompact: 419430400 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 17179869184 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen3" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } PartitioningPolicy { MinPartitionsCount: 1 } ColumnFamilies { Id: 0 StorageConfig { SysLog { PreferredPoolKind: "common" AllowOtherKinds: false } Log { PreferredPoolKind: "common" AllowOtherKinds: false } External { PreferredPoolKind: "external" AllowOtherKinds: false } ExternalThreshold: 524288 } } } TableSchemaVersion: 1 IsBackup: false IsRestore: false } TablePartitions { EndOfRangeKeyPrefix: "" IsPoint: false IsInclusive: false DatashardId: 72075186233409549 } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 2 ProcessingParams { Version: 2 PlanResolution: 50 Coordinators: 72075186233409547 TimeCastBucketsPerMediator: 2 Mediators: 72075186233409548 SchemeShard: 72075186233409546 } DomainKey { SchemeShard: 72057594046678944 PathId: 2 } PathsInside: 1 PathsLimit: 10000 ShardsInside: 4 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 2 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 2 PathOwnerId: 72075186233409546, at schemeshard: 72075186233409546 ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_restore/unittest >> TRestoreTests::ExportImportWithDataChecksumAbsence[Zstd] [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2144] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2144] Leader for TabletID 72057594046678944 is [1:130:2155] sender: [1:131:2058] recipient: [1:113:2144] 2025-09-25T16:19:47.073539Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7911: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-09-25T16:19:47.073562Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7939: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-09-25T16:19:47.073567Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7825: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-09-25T16:19:47.073571Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7841: OperationsProcessing config: using default configuration 2025-09-25T16:19:47.073582Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7847: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-09-25T16:19:47.073587Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7847: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-09-25T16:19:47.073597Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7971: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-09-25T16:19:47.073611Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-09-25T16:19:47.073736Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8042: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-09-25T16:19:47.073804Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2025-09-25T16:19:47.090784Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7729: Cannot subscribe to console configs 2025-09-25T16:19:47.090804Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded 2025-09-25T16:19:47.094536Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2025-09-25T16:19:47.094628Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2025-09-25T16:19:47.094660Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-09-25T16:19:47.096144Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2025-09-25T16:19:47.096201Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2025-09-25T16:19:47.096283Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1382: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-09-25T16:19:47.096330Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-09-25T16:19:47.096762Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:158: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-09-25T16:19:47.096810Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2025-09-25T16:19:47.097046Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-09-25T16:19:47.097054Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-09-25T16:19:47.097067Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-09-25T16:19:47.097073Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-09-25T16:19:47.097077Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:205: TTxServerlessStorageBilling.Complete 2025-09-25T16:19:47.097100Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7086: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-09-25T16:19:47.098217Z node 1 :HIVE INFO: tablet_helpers.cpp:1126: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:130:2155] sender: [1:245:2058] recipient: [1:15:2062] 2025-09-25T16:19:47.114121Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-09-25T16:19:47.114172Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-09-25T16:19:47.114212Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:599: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-09-25T16:19:47.114218Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5528: CreateTx for txid 1:0 type: TxAlterSubDomain target path: [OwnerId: 72057594046678944, LocalPathId: 1] source path: 2025-09-25T16:19:47.114278Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-09-25T16:19:47.114290Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2025-09-25T16:19:47.114794Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-09-25T16:19:47.114826Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-09-25T16:19:47.114870Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-09-25T16:19:47.114876Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:315: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-09-25T16:19:47.114880Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:369: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-09-25T16:19:47.114883Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 1:0 2 -> 3 2025-09-25T16:19:47.115282Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-09-25T16:19:47.115291Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-09-25T16:19:47.115295Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 1:0 3 -> 128 2025-09-25T16:19:47.115571Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-09-25T16:19:47.115578Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-09-25T16:19:47.115581Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-09-25T16:19:47.115586Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1723: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-09-25T16:19:47.116064Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1792: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-09-25T16:19:47.116409Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:663: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-09-25T16:19:47.116449Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1824: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-09-25T16:19:47.116602Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:685: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-09-25T16:19:47.116621Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:689: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 137 RawX2: 4294969455 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-09-25T16:19:47.116626Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:303: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-09-25T16:19:47.116686Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 1:0 128 -> 240 2025-09-25T16:19:47.116691Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:362: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-09-25T16:19:47.116720Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:599: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-09-25T16:19:47.116731Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:412: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-09-25T16:19:47.117102Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-09-25T16:19:47.117109Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme ... chemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 6 Version: 3 PathOwnerId: 72057594046678944, cookie: 281474976710765 2025-09-25T16:19:48.930514Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 6 Version: 3 PathOwnerId: 72057594046678944, cookie: 281474976710765 2025-09-25T16:19:48.930523Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:108: Operation in-flight, at schemeshard: 72057594046678944, txId: 281474976710765 2025-09-25T16:19:48.930528Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 281474976710765, pathId: [OwnerId: 72057594046678944, LocalPathId: 6], version: 3 2025-09-25T16:19:48.930535Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:610: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 6] was 4 2025-09-25T16:19:48.930551Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 281474976710765, ready parts: 0/1, is published: true REQUEST: HEAD /data_00.csv HTTP/1.1 HEADERS: Host: localhost:24574 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: 1370257F-ECEB-4AF8-A8AC-128890E8DCEC amz-sdk-request: attempt=1 content-type: application/xml user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-143-generic x86_64 Clang/20.1.8 x-amz-api-version: 2006-03-01 2025-09-25T16:19:48.930990Z node 3 :DATASHARD_RESTORE DEBUG: import_s3.cpp:527: [Import] [s3:281474976710765] Handle NKikimr::NWrappers::NExternalStorage::TEvHeadObjectResponse { Key: null Result: No response body. } 2025-09-25T16:19:48.931004Z node 3 :DATASHARD_RESTORE DEBUG: import_s3.cpp:506: [Import] [s3:281474976710765] HeadObject: key# /data_00.csv.zst 2025-09-25T16:19:48.931492Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 281474976710765 REQUEST: HEAD /data_00.csv.zst HTTP/1.1 HEADERS: Host: localhost:24574 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: D3F93C2B-2831-4C27-9BB4-8A5CC05EFC76 amz-sdk-request: attempt=1 content-type: application/xml user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-143-generic x86_64 Clang/20.1.8 x-amz-api-version: 2006-03-01 S3_MOCK::HttpServeRead: /data_00.csv.zst / 20 2025-09-25T16:19:48.931874Z node 3 :DATASHARD_RESTORE DEBUG: import_s3.cpp:527: [Import] [s3:281474976710765] Handle NKikimr::NWrappers::NExternalStorage::TEvHeadObjectResponse { Key: null Result: HeadObjectResult { ETag: daa167f46d135bcc1fbc9f42f80e496f ContentLength: 20 } } 2025-09-25T16:19:48.932345Z node 3 :DATASHARD_RESTORE DEBUG: import_s3.cpp:606: [Import] [s3:281474976710765] Handle NKikimr::TEvDataShard::TEvS3DownloadInfo { Info: { DataETag: (empty maybe) ProcessedBytes: 0 WrittenBytes: 0 WrittenRows: 0 ChecksumState: DownloadState: } } FAKE_COORDINATOR: Erasing txId 281474976710765 2025-09-25T16:19:48.943151Z node 3 :DATASHARD_RESTORE DEBUG: import_s3.cpp:606: [Import] [s3:281474976710765] Handle NKikimr::TEvDataShard::TEvS3DownloadInfo { Info: { DataETag: daa167f46d135bcc1fbc9f42f80e496f ProcessedBytes: 0 WrittenBytes: 0 WrittenRows: 0 ChecksumState: DownloadState: } } 2025-09-25T16:19:48.943173Z node 3 :DATASHARD_RESTORE NOTICE: import_s3.cpp:621: [Import] [s3:281474976710765] Process download info at 'DownloadInfo': info# { DataETag: daa167f46d135bcc1fbc9f42f80e496f ProcessedBytes: 0 WrittenBytes: 0 WrittenRows: 0 ChecksumState: DownloadState: } 2025-09-25T16:19:48.943190Z node 3 :DATASHARD_RESTORE DEBUG: import_s3.cpp:517: [Import] [s3:281474976710765] GetObject: key# /data_00.csv.zst, range# 0-19 REQUEST: GET /data_00.csv.zst HTTP/1.1 HEADERS: Host: localhost:24574 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: 6EAF3A3C-AC99-4672-84BF-5EEAC5B41D6B amz-sdk-request: attempt=1 content-type: application/xml range: bytes=0-19 user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-143-generic x86_64 Clang/20.1.8 x-amz-api-version: 2006-03-01 S3_MOCK::HttpServeRead: /data_00.csv.zst / 20 2025-09-25T16:19:48.944137Z node 3 :DATASHARD_RESTORE DEBUG: import_s3.cpp:656: [Import] [s3:281474976710765] Handle NKikimr::NWrappers::NExternalStorage::TEvGetObjectResponse { Key: null Result: daa167f46d135bcc1fbc9f42f80e496f Body: 20b } 2025-09-25T16:19:48.944149Z node 3 :DATASHARD_RESTORE TRACE: import_s3.cpp:673: [Import] [s3:281474976710765] Content size: processed-bytes# 0, content-length# 20, body-size# 20 2025-09-25T16:19:48.944227Z node 3 :DATASHARD_RESTORE INFO: import_s3.cpp:806: [Import] [s3:281474976710765] Upload rows: count# 1, size# 36 2025-09-25T16:19:48.944770Z node 3 :DATASHARD_RESTORE DEBUG: import_s3.cpp:814: [Import] [s3:281474976710765] Handle NKikimr::TEvDataShard::TEvS3UploadRowsResponse { Record: TabletID: 72075186233409549 Status: 0 Info: { DataETag: daa167f46d135bcc1fbc9f42f80e496f ProcessedBytes: 20 WrittenBytes: 10 WrittenRows: 1 ChecksumState: DownloadState: } } 2025-09-25T16:19:48.944783Z node 3 :DATASHARD_RESTORE NOTICE: import_s3.cpp:621: [Import] [s3:281474976710765] Process download info at 'UploadResponse': info# { DataETag: daa167f46d135bcc1fbc9f42f80e496f ProcessedBytes: 20 WrittenBytes: 10 WrittenRows: 1 ChecksumState: DownloadState: } 2025-09-25T16:19:48.944790Z node 3 :DATASHARD_RESTORE NOTICE: import_s3.cpp:962: [Import] [s3:281474976710765] Finish: success# 1, error# , writtenBytes# 10, writtenRows# 1 2025-09-25T16:19:48.957575Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5901: Handle TEvSchemaChanged, tabletId: 72057594046678944, at schemeshard: 72057594046678944, message: Source { RawX1: 909 RawX2: 12884904723 } Origin: 72075186233409549 State: 2 TxId: 281474976710765 Step: 0 Generation: 2 OpResult { Success: true Explain: "" BytesProcessed: 10 RowsProcessed: 1 } 2025-09-25T16:19:48.957602Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1837: TOperation FindRelatedPartByTabletId, TxId: 281474976710765, tablet: 72075186233409549, partId: 0 2025-09-25T16:19:48.957632Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:628: TTxOperationReply execute, operationId: 281474976710765:0, at schemeshard: 72057594046678944, message: Source { RawX1: 909 RawX2: 12884904723 } Origin: 72075186233409549 State: 2 TxId: 281474976710765 Step: 0 Generation: 2 OpResult { Success: true Explain: "" BytesProcessed: 10 RowsProcessed: 1 } 2025-09-25T16:19:48.957648Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_backup_restore_common.h:233: TRestore TProposedWaitParts, opId: 281474976710765:0 HandleReply TEvSchemaChanged at tablet# 72057594046678944 message# Source { RawX1: 909 RawX2: 12884904723 } Origin: 72075186233409549 State: 2 TxId: 281474976710765 Step: 0 Generation: 2 OpResult { Success: true Explain: "" BytesProcessed: 10 RowsProcessed: 1 } 2025-09-25T16:19:48.957665Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:673: CollectSchemaChanged accept TEvDataShard::TEvSchemaChanged, operationId: 281474976710765:0, shardIdx: 72057594046678944:4, shard: 72075186233409549, left await: 0, txState.State: ProposedWaitParts, txState.ReadyForNotifications: 1, at schemeshard: 72057594046678944 2025-09-25T16:19:48.957670Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:710: all shard schema changes has been received, operationId: 281474976710765:0, at schemeshard: 72057594046678944 2025-09-25T16:19:48.957687Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:722: send schema changes ack message, operation: 281474976710765:0, datashard: 72075186233409549, at schemeshard: 72057594046678944 2025-09-25T16:19:48.957695Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 281474976710765:0 129 -> 240 2025-09-25T16:19:48.957744Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_backup_restore_common.h:116: Unable to make a bill: kind# TRestore, opId# 281474976710765:0, reason# domain is not a serverless db, domain# /MyRoot, domainPathId# [OwnerId: 72057594046678944, LocalPathId: 1], IsDomainSchemeShard: 1, ParentDomainId: [OwnerId: 72057594046678944, LocalPathId: 1], ResourcesDomainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-09-25T16:19:48.958262Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 281474976710765:0, at schemeshard: 72057594046678944 2025-09-25T16:19:48.958344Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 281474976710765:0, at schemeshard: 72057594046678944 2025-09-25T16:19:48.958353Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:497: [72057594046678944] TDone opId# 281474976710765:0 ProgressState 2025-09-25T16:19:48.958367Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:926: Part operation is done id#281474976710765:0 progress is 1/1 2025-09-25T16:19:48.958372Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 281474976710765 ready parts: 1/1 2025-09-25T16:19:48.958378Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:926: Part operation is done id#281474976710765:0 progress is 1/1 2025-09-25T16:19:48.958381Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 281474976710765 ready parts: 1/1 2025-09-25T16:19:48.958386Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 281474976710765, ready parts: 1/1, is published: true 2025-09-25T16:19:48.958399Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1702: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [3:127:2152] message: TxId: 281474976710765 2025-09-25T16:19:48.958406Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 281474976710765 ready parts: 1/1 2025-09-25T16:19:48.958411Z node 3 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:993: Operation and all the parts is done, operation id: 281474976710765:0 2025-09-25T16:19:48.958416Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5552: RemoveTx for txid 281474976710765:0 2025-09-25T16:19:48.958438Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:610: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 6] was 3 2025-09-25T16:19:48.958840Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7171: Handle: TEvNotifyTxCompletionResult: txId# 281474976710765 2025-09-25T16:19:48.958854Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:7173: Message: TxId: 281474976710765 2025-09-25T16:19:48.958865Z node 3 :IMPORT DEBUG: schemeshard_import__create.cpp:372: TImport::TTxProgress: DoExecute 2025-09-25T16:19:48.958871Z node 3 :IMPORT DEBUG: schemeshard_import__create.cpp:1425: TImport::TTxProgress: OnNotifyResult: txId# 281474976710765 2025-09-25T16:19:48.959313Z node 3 :IMPORT DEBUG: schemeshard_import__create.cpp:396: TImport::TTxProgress: DoComplete 2025-09-25T16:19:48.959335Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 104: got EvNotifyTxCompletionResult 2025-09-25T16:19:48.959342Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 104: satisfy waiter [3:874:2805] TestWaitNotification: OK eventTxId 104 >> TImportTests::ChangefeedsWithPartitioning >> TImportTests::UserSID [GOOD] >> TImportTests::ViewCreationRetry >> TRestoreTests::ShouldSucceedWithDefaultFromLiteral[Raw] >> TImportTests::ShouldRestoreSplitPoints >> TRestoreTests::ShouldFailOnOutboundKey[Raw] >> TRestoreTests::ShouldFailOnFileWithoutNewLinesSmallBatch[Raw] >> TImportTests::UidAsIdempotencyKey [GOOD] >> TImportTests::UnexpectedPermission >> TRestoreTests::ExportImportWithSchemeCorruption [GOOD] >> TRestoreTests::ExportImportWithSchemeChecksumCorruption >> TRestoreTests::ShouldSucceedOnSingleShardTable[Raw] >> TImportTests::ShouldRestoreSplitPoints [GOOD] >> TImportTests::ShouldRestorePartitioningBySize >> TRestoreTests::ShouldSucceedWithDefaultFromLiteral[Raw] [GOOD] >> TRestoreTests::ShouldSucceedOnSmallBuffer >> TRestoreTests::ShouldFailOnOutboundKey[Raw] [GOOD] >> TRestoreTests::ShouldFailOnOutboundKey[Zstd] >> TImportTests::UnexpectedPermission [GOOD] >> TImportTests::TopicImport >> TRestoreTests::ShouldFailOnFileWithoutNewLinesSmallBatch[Raw] [GOOD] >> TImportTests::ChangefeedsWithPartitioning [GOOD] >> TImportTests::ChangefeedsWithTablePermissions >> TRestoreTests::ShouldFailOnEmptyToken[Raw] >> TImportTests::ShouldSucceedOnIndexedTable2 >> TImportTests::ViewCreationRetry [GOOD] >> TImportTests::UnknownSchemeObjectImport >> TImportTests::TopicImport [GOOD] >> TRestoreTests::ShouldSucceedOnSingleShardTable[Raw] [GOOD] >> TRestoreTests::ShouldSucceedOnSingleShardTable[Zstd] >> TImportTests::ShouldRestorePartitioningBySize [GOOD] >> TRestoreTests::ShouldFailOnOutboundKey[Zstd] [GOOD] >> TImportTests::ShouldRestorePerAzReadReplicas >> TRestoreTests::ShouldFailOnVariousErrors >> TRestoreTests::ShouldSucceedOnSmallBuffer [GOOD] >> TRestoreTests::ShouldSucceedOnSupportedDatatypes >> TRestoreTests::ExportImportWithSchemeChecksumCorruption [GOOD] >> TRestoreTests::ShouldCountWrittenBytesAndRows[Raw] |82.3%| [TA] $(B)/ydb/core/kqp/proxy_service/ut/test-results/unittest/{meta.json ... results_accumulator.log} >> TRestoreTests::ShouldFailOnEmptyToken[Raw] [GOOD] >> TRestoreTests::ShouldFailOnEmptyToken[Zstd] >> TImportTests::UnknownSchemeObjectImport [GOOD] >> TRestoreTests::ShouldCountWrittenBytesAndRows[Raw] [GOOD] >> TRestoreWithRebootsTests::ShouldFailOnInvalidValue[Zstd] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_restore/unittest >> TImportTests::TopicImport [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2144] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2144] Leader for TabletID 72057594046678944 is [1:130:2155] sender: [1:131:2058] recipient: [1:113:2144] 2025-09-25T16:19:49.810250Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7911: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-09-25T16:19:49.810274Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7939: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-09-25T16:19:49.810280Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7825: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-09-25T16:19:49.810285Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7841: OperationsProcessing config: using default configuration 2025-09-25T16:19:49.810291Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7847: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-09-25T16:19:49.810296Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7847: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-09-25T16:19:49.810305Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7971: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-09-25T16:19:49.810320Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-09-25T16:19:49.810421Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8042: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-09-25T16:19:49.810473Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2025-09-25T16:19:49.823239Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7729: Cannot subscribe to console configs 2025-09-25T16:19:49.823257Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded 2025-09-25T16:19:49.827089Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2025-09-25T16:19:49.827158Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2025-09-25T16:19:49.827185Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-09-25T16:19:49.828676Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2025-09-25T16:19:49.828744Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2025-09-25T16:19:49.828875Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1382: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-09-25T16:19:49.828931Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-09-25T16:19:49.829397Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:158: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-09-25T16:19:49.829438Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2025-09-25T16:19:49.829685Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-09-25T16:19:49.829692Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-09-25T16:19:49.829706Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-09-25T16:19:49.829711Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-09-25T16:19:49.829716Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:205: TTxServerlessStorageBilling.Complete 2025-09-25T16:19:49.829739Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7086: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-09-25T16:19:49.830990Z node 1 :HIVE INFO: tablet_helpers.cpp:1126: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:130:2155] sender: [1:245:2058] recipient: [1:15:2062] 2025-09-25T16:19:49.847552Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-09-25T16:19:49.847615Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-09-25T16:19:49.847672Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:599: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-09-25T16:19:49.847681Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5528: CreateTx for txid 1:0 type: TxAlterSubDomain target path: [OwnerId: 72057594046678944, LocalPathId: 1] source path: 2025-09-25T16:19:49.847761Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-09-25T16:19:49.847781Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2025-09-25T16:19:49.848545Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-09-25T16:19:49.848600Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-09-25T16:19:49.848665Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-09-25T16:19:49.848678Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:315: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-09-25T16:19:49.848684Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:369: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-09-25T16:19:49.848690Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 1:0 2 -> 3 2025-09-25T16:19:49.849242Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-09-25T16:19:49.849259Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-09-25T16:19:49.849267Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 1:0 3 -> 128 2025-09-25T16:19:49.849852Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-09-25T16:19:49.849866Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-09-25T16:19:49.849873Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-09-25T16:19:49.849880Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1723: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-09-25T16:19:49.850652Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1792: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-09-25T16:19:49.851075Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:663: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-09-25T16:19:49.851133Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1824: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-09-25T16:19:49.851377Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:685: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-09-25T16:19:49.851411Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:689: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 137 RawX2: 4294969455 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-09-25T16:19:49.851419Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:303: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-09-25T16:19:49.851506Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 1:0 128 -> 240 2025-09-25T16:19:49.851515Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:362: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-09-25T16:19:49.851546Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:599: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-09-25T16:19:49.851559Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:412: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-09-25T16:19:49.852089Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-09-25T16:19:49.852099Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme ... ] was 4 2025-09-25T16:19:50.653498Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 281474976710757:1, at schemeshard: 72057594046678944 2025-09-25T16:19:50.653605Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-09-25T16:19:50.653614Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 281474976710757, path id: [OwnerId: 72057594046678944, LocalPathId: 2] 2025-09-25T16:19:50.653649Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 281474976710757, path id: [OwnerId: 72057594046678944, LocalPathId: 3] 2025-09-25T16:19:50.653690Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-09-25T16:19:50.653696Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [3:210:2211], at schemeshard: 72057594046678944, txId: 281474976710757, path id: 2 2025-09-25T16:19:50.653702Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [3:210:2211], at schemeshard: 72057594046678944, txId: 281474976710757, path id: 3 2025-09-25T16:19:50.653770Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 281474976710757:1, at schemeshard: 72057594046678944 2025-09-25T16:19:50.653783Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:497: [72057594046678944] TDone opId# 281474976710757:1 ProgressState 2025-09-25T16:19:50.653815Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:926: Part operation is done id#281474976710757:1 progress is 2/2 2025-09-25T16:19:50.653821Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 281474976710757 ready parts: 2/2 2025-09-25T16:19:50.653826Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:926: Part operation is done id#281474976710757:1 progress is 2/2 2025-09-25T16:19:50.653830Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 281474976710757 ready parts: 2/2 2025-09-25T16:19:50.653834Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 281474976710757, ready parts: 2/2, is published: false 2025-09-25T16:19:50.653839Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 281474976710757 ready parts: 2/2 2025-09-25T16:19:50.653845Z node 3 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:993: Operation and all the parts is done, operation id: 281474976710757:0 2025-09-25T16:19:50.653851Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5552: RemoveTx for txid 281474976710757:0 2025-09-25T16:19:50.653863Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:610: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 3 2025-09-25T16:19:50.653868Z node 3 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:993: Operation and all the parts is done, operation id: 281474976710757:1 2025-09-25T16:19:50.653872Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5552: RemoveTx for txid 281474976710757:1 2025-09-25T16:19:50.653888Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:610: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 5 2025-09-25T16:19:50.653894Z node 3 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:1002: Publication still in progress, tx: 281474976710757, publications: 2, subscribers: 1 2025-09-25T16:19:50.653899Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1009: Publication details: tx: 281474976710757, [OwnerId: 72057594046678944, LocalPathId: 2], 5 2025-09-25T16:19:50.653903Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1009: Publication details: tx: 281474976710757, [OwnerId: 72057594046678944, LocalPathId: 3], 2 2025-09-25T16:19:50.654108Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6249: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 5 PathOwnerId: 72057594046678944, cookie: 281474976710757 2025-09-25T16:19:50.654125Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 5 PathOwnerId: 72057594046678944, cookie: 281474976710757 2025-09-25T16:19:50.654130Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 281474976710757 2025-09-25T16:19:50.654135Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 281474976710757, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 5 2025-09-25T16:19:50.654140Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:610: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 2 2025-09-25T16:19:50.654435Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6249: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 2 PathOwnerId: 72057594046678944, cookie: 281474976710757 2025-09-25T16:19:50.654453Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 2 PathOwnerId: 72057594046678944, cookie: 281474976710757 2025-09-25T16:19:50.654458Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 281474976710757 2025-09-25T16:19:50.654462Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 281474976710757, pathId: [OwnerId: 72057594046678944, LocalPathId: 3], version: 2 2025-09-25T16:19:50.654468Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:610: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 4 2025-09-25T16:19:50.654480Z node 3 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__publish_to_scheme_board.cpp:155: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 281474976710757, subscribers: 1 2025-09-25T16:19:50.654485Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:212: TTxAckPublishToSchemeBoard Notify send TEvNotifyTxCompletionResult, at schemeshard: 72057594046678944, to actorId: [3:127:2152] 2025-09-25T16:19:50.655187Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 281474976710757 2025-09-25T16:19:50.656677Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 281474976710757 2025-09-25T16:19:50.656732Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7171: Handle: TEvNotifyTxCompletionResult: txId# 281474976710757 2025-09-25T16:19:50.656746Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:7173: Message: TxId: 281474976710757 2025-09-25T16:19:50.656758Z node 3 :IMPORT DEBUG: schemeshard_import__create.cpp:372: TImport::TTxProgress: DoExecute 2025-09-25T16:19:50.656765Z node 3 :IMPORT DEBUG: schemeshard_import__create.cpp:1425: TImport::TTxProgress: OnNotifyResult: txId# 281474976710757 2025-09-25T16:19:50.657687Z node 3 :IMPORT DEBUG: schemeshard_import__create.cpp:396: TImport::TTxProgress: DoComplete 2025-09-25T16:19:50.657721Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 101: got EvNotifyTxCompletionResult 2025-09-25T16:19:50.657728Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 101: satisfy waiter [3:290:2278] TestWaitNotification: OK eventTxId 101 2025-09-25T16:19:50.661937Z node 3 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/Restored/Topic_0" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-09-25T16:19:50.662005Z node 3 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/Restored/Topic_0" took 87us result status StatusSuccess 2025-09-25T16:19:50.662171Z node 3 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/Restored/Topic_0" PathDescription { Self { Name: "Topic_0" PathId: 3 SchemeshardId: 72057594046678944 PathType: EPathTypePersQueueGroup CreateFinished: true CreateTxId: 281474976710757 CreateStep: 5000002 ParentPathId: 2 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 PQVersion: 1 } ChildrenExist: false } PersQueueGroup { Name: "Topic_0" PathId: 3 TotalGroupCount: 1 PartitionPerTablet: 1 PQTabletConfig { PartitionConfig { MaxCountInPartition: 2147483647 LifetimeSeconds: 10 SourceIdLifetimeSeconds: 1382400 WriteSpeedInBytesPerSecond: 50000000 BurstSize: 50000000 SourceIdMaxCounts: 6000000 } RequireAuthWrite: true RequireAuthRead: true FormatVersion: 0 Codecs { } YdbDatabasePath: "/MyRoot" Consumers { Name: "Consumer_0" ReadFromTimestampsMs: 0 FormatVersion: 0 Codec { } ServiceType: "data-streams" Version: 0 } Consumers { Name: "Consumer_1" ReadFromTimestampsMs: 0 FormatVersion: 0 Codec { } ServiceType: "data-streams" Version: 0 Important: true } } Partitions { PartitionId: 0 TabletId: 72075186233409546 Status: Active } AlterVersion: 1 BalancerTabletID: 72075186233409547 NextPartitionId: 1 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 2 PathsLimit: 10000 ShardsInside: 2 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 1 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 3 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 >> TImportTests::ChangefeedsWithTablePermissions [GOOD] >> TImportTests::ChangefeedsExportRestoreUnhappyPropose >> TImportWithRebootsTests::ShouldSucceedOnTableWithChecksum >> TRestoreTests::ShouldSucceedOnSingleShardTable[Zstd] [GOOD] >> TRestoreTests::ShouldSucceedOnMultipleFramesTinyBatch >> TImportTests::ShouldRestorePerAzReadReplicas [GOOD] >> TRestoreTests::ShouldFailOnVariousErrors [GOOD] >> TRestoreTests::ShouldFailOnEmptyToken[Zstd] [GOOD] >> TRestoreTests::ShouldSucceedOnSupportedDatatypes [GOOD] >> TImportTests::ShouldSucceedOnIndexedTable2 [GOOD] >> TImportTests::ShouldSucceedOnIndexedTable3 >> TRestoreTests::ShouldSucceedOnMultipleFramesTinyBatch [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_restore/unittest >> TImportTests::UnknownSchemeObjectImport [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2144] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2144] Leader for TabletID 72057594046678944 is [1:130:2155] sender: [1:131:2058] recipient: [1:113:2144] 2025-09-25T16:19:49.646962Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7911: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-09-25T16:19:49.646990Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7939: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-09-25T16:19:49.646996Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7825: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-09-25T16:19:49.647002Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7841: OperationsProcessing config: using default configuration 2025-09-25T16:19:49.647009Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7847: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-09-25T16:19:49.647013Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7847: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-09-25T16:19:49.647022Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7971: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-09-25T16:19:49.647036Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-09-25T16:19:49.647160Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8042: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-09-25T16:19:49.647226Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2025-09-25T16:19:49.664719Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7729: Cannot subscribe to console configs 2025-09-25T16:19:49.664741Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded 2025-09-25T16:19:49.669336Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2025-09-25T16:19:49.669438Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2025-09-25T16:19:49.669476Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-09-25T16:19:49.671227Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2025-09-25T16:19:49.671309Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2025-09-25T16:19:49.671420Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1382: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-09-25T16:19:49.671474Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-09-25T16:19:49.672001Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:158: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-09-25T16:19:49.672051Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2025-09-25T16:19:49.672333Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-09-25T16:19:49.672344Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-09-25T16:19:49.672382Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-09-25T16:19:49.672391Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-09-25T16:19:49.672397Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:205: TTxServerlessStorageBilling.Complete 2025-09-25T16:19:49.672433Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7086: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-09-25T16:19:49.673915Z node 1 :HIVE INFO: tablet_helpers.cpp:1126: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:130:2155] sender: [1:245:2058] recipient: [1:15:2062] 2025-09-25T16:19:49.697708Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-09-25T16:19:49.697773Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-09-25T16:19:49.697824Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:599: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-09-25T16:19:49.697833Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5528: CreateTx for txid 1:0 type: TxAlterSubDomain target path: [OwnerId: 72057594046678944, LocalPathId: 1] source path: 2025-09-25T16:19:49.697917Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-09-25T16:19:49.697933Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2025-09-25T16:19:49.698606Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-09-25T16:19:49.698648Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-09-25T16:19:49.698694Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-09-25T16:19:49.698703Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:315: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-09-25T16:19:49.698708Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:369: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-09-25T16:19:49.698712Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 1:0 2 -> 3 2025-09-25T16:19:49.699128Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-09-25T16:19:49.699137Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-09-25T16:19:49.699146Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 1:0 3 -> 128 2025-09-25T16:19:49.699463Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-09-25T16:19:49.699473Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-09-25T16:19:49.699479Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-09-25T16:19:49.699485Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1723: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-09-25T16:19:49.700206Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1792: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-09-25T16:19:49.700599Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:663: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-09-25T16:19:49.700639Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1824: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-09-25T16:19:49.700810Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:685: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-09-25T16:19:49.700851Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:689: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 137 RawX2: 4294969455 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-09-25T16:19:49.700859Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:303: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-09-25T16:19:49.700936Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 1:0 128 -> 240 2025-09-25T16:19:49.700943Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:362: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-09-25T16:19:49.700967Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:599: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-09-25T16:19:49.700978Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:412: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-09-25T16:19:49.701408Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-09-25T16:19:49.701418Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme ... 25-09-25T16:19:50.880324Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-09-25T16:19:50.880331Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 1:0 3 -> 128 2025-09-25T16:19:50.880706Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-09-25T16:19:50.880718Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-09-25T16:19:50.880723Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-09-25T16:19:50.880727Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1723: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-09-25T16:19:50.880752Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1792: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-09-25T16:19:50.881139Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:663: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-09-25T16:19:50.881172Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1824: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-09-25T16:19:50.881319Z node 3 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:685: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-09-25T16:19:50.881336Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:689: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 139 RawX2: 12884904049 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-09-25T16:19:50.881342Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:303: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-09-25T16:19:50.881381Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 1:0 128 -> 240 2025-09-25T16:19:50.881386Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:362: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-09-25T16:19:50.881409Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:599: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-09-25T16:19:50.881419Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:412: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-09-25T16:19:50.881780Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-09-25T16:19:50.881787Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-09-25T16:19:50.881819Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-09-25T16:19:50.881825Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [3:210:2211], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-09-25T16:19:50.881893Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-09-25T16:19:50.881898Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:497: [72057594046678944] TDone opId# 1:0 ProgressState 2025-09-25T16:19:50.881909Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:926: Part operation is done id#1:0 progress is 1/1 2025-09-25T16:19:50.881912Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-09-25T16:19:50.881915Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:926: Part operation is done id#1:0 progress is 1/1 2025-09-25T16:19:50.881918Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-09-25T16:19:50.881921Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-09-25T16:19:50.881925Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-09-25T16:19:50.881928Z node 3 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:993: Operation and all the parts is done, operation id: 1:0 2025-09-25T16:19:50.881931Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5552: RemoveTx for txid 1:0 2025-09-25T16:19:50.881940Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:610: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-09-25T16:19:50.881943Z node 3 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:1002: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-09-25T16:19:50.881946Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1009: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-09-25T16:19:50.881998Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6249: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-09-25T16:19:50.882007Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-09-25T16:19:50.882010Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1 2025-09-25T16:19:50.882014Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 1, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 3 2025-09-25T16:19:50.882017Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:610: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-09-25T16:19:50.882024Z node 3 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__publish_to_scheme_board.cpp:155: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 1, subscribers: 0 2025-09-25T16:19:50.882538Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1 2025-09-25T16:19:50.882599Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 1, at schemeshard: 72057594046678944 2025-09-25T16:19:50.883401Z node 3 :TX_PROXY DEBUG: proxy_impl.cpp:433: actor# [3:273:2263] Bootstrap 2025-09-25T16:19:50.883591Z node 3 :TX_PROXY DEBUG: proxy_impl.cpp:452: actor# [3:273:2263] Become StateWork (SchemeCache [3:278:2268]) 2025-09-25T16:19:50.883788Z node 3 :TX_PROXY DEBUG: proxy_impl.cpp:212: actor# [3:273:2263] HANDLE TEvClientConnected success connect from tablet# 72057594046447617 2025-09-25T16:19:50.892345Z node 3 :TX_PROXY DEBUG: client.cpp:89: Handle TEvAllocateResult ACCEPTED RangeBegin# 281474976715656 RangeEnd# 281474976720656 txAllocator# 72057594046447617 TestWaitNotification wait txId: 101 2025-09-25T16:19:50.892417Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:213: tests -- TTxNotificationSubscriber for txId 101: send EvNotifyTxCompletion 2025-09-25T16:19:50.892426Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:259: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 101 2025-09-25T16:19:50.892513Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__notify.cpp:83: NotifyTxCompletion import in-flight, txId: 101, at schemeshard: 72057594046678944 2025-09-25T16:19:50.892520Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__notify.cpp:131: NotifyTxCompletion transaction is registered, txId: 101, at schemeshard: 72057594046678944 REQUEST: HEAD /metadata.json HTTP/1.1 HEADERS: Host: localhost:17072 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: 5687D041-6569-4631-876A-8B840B6B60C6 amz-sdk-request: attempt=1 content-type: application/xml user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-143-generic x86_64 Clang/20.1.8 x-amz-api-version: 2006-03-01 S3_MOCK::HttpServeRead: /metadata.json / 14 REQUEST: GET /metadata.json HTTP/1.1 HEADERS: Host: localhost:17072 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: 7B5BFEB4-790E-497E-9CF0-FACBB4285E4C amz-sdk-request: attempt=1 content-type: application/xml range: bytes=0-13 user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-143-generic x86_64 Clang/20.1.8 x-amz-api-version: 2006-03-01 S3_MOCK::HttpServeRead: /metadata.json / 14 REQUEST: HEAD /scheme.pb HTTP/1.1 HEADERS: Host: localhost:17072 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: 95AA81D2-FEE9-4981-9838-CBA06520E7A7 amz-sdk-request: attempt=1 content-type: application/xml user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-143-generic x86_64 Clang/20.1.8 x-amz-api-version: 2006-03-01 REQUEST: HEAD /create_view.sql HTTP/1.1 HEADERS: Host: localhost:17072 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: F5611B32-B764-4EB4-8003-91A5F5A2ECEF amz-sdk-request: attempt=1 content-type: application/xml user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-143-generic x86_64 Clang/20.1.8 x-amz-api-version: 2006-03-01 REQUEST: HEAD /create_topic.pb HTTP/1.1 HEADERS: Host: localhost:17072 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: B0A19EB4-6A26-4BF0-B9A9-3A7856BBABA2 amz-sdk-request: attempt=1 content-type: application/xml user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-143-generic x86_64 Clang/20.1.8 x-amz-api-version: 2006-03-01 2025-09-25T16:19:50.901916Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 101: got EvNotifyTxCompletionResult 2025-09-25T16:19:50.901933Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 101: satisfy waiter [3:290:2278] TestWaitNotification: OK eventTxId 101 ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_restore/unittest >> TRestoreTests::ShouldCountWrittenBytesAndRows[Raw] [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2144] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2144] Leader for TabletID 72057594046678944 is [1:130:2155] sender: [1:131:2058] recipient: [1:113:2144] 2025-09-25T16:19:49.586180Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7911: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-09-25T16:19:49.586198Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7939: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-09-25T16:19:49.586202Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7825: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-09-25T16:19:49.586206Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7841: OperationsProcessing config: using default configuration 2025-09-25T16:19:49.586214Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7847: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-09-25T16:19:49.586217Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7847: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-09-25T16:19:49.586224Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7971: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-09-25T16:19:49.586233Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-09-25T16:19:49.586324Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8042: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-09-25T16:19:49.586369Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2025-09-25T16:19:49.598804Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7729: Cannot subscribe to console configs 2025-09-25T16:19:49.598822Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded 2025-09-25T16:19:49.601937Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2025-09-25T16:19:49.602017Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2025-09-25T16:19:49.602044Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-09-25T16:19:49.603507Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2025-09-25T16:19:49.603561Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2025-09-25T16:19:49.603642Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1382: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-09-25T16:19:49.603680Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-09-25T16:19:49.604119Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:158: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-09-25T16:19:49.604157Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2025-09-25T16:19:49.604371Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-09-25T16:19:49.604378Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-09-25T16:19:49.604393Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-09-25T16:19:49.604399Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-09-25T16:19:49.604403Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:205: TTxServerlessStorageBilling.Complete 2025-09-25T16:19:49.604425Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7086: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-09-25T16:19:49.605519Z node 1 :HIVE INFO: tablet_helpers.cpp:1126: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:130:2155] sender: [1:245:2058] recipient: [1:15:2062] 2025-09-25T16:19:49.621474Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-09-25T16:19:49.621527Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-09-25T16:19:49.621568Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:599: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-09-25T16:19:49.621574Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5528: CreateTx for txid 1:0 type: TxAlterSubDomain target path: [OwnerId: 72057594046678944, LocalPathId: 1] source path: 2025-09-25T16:19:49.621633Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-09-25T16:19:49.621647Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2025-09-25T16:19:49.622203Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-09-25T16:19:49.622242Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-09-25T16:19:49.622291Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-09-25T16:19:49.622302Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:315: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-09-25T16:19:49.622308Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:369: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-09-25T16:19:49.622312Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 1:0 2 -> 3 2025-09-25T16:19:49.622691Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-09-25T16:19:49.622702Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-09-25T16:19:49.622708Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 1:0 3 -> 128 2025-09-25T16:19:49.623002Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-09-25T16:19:49.623010Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-09-25T16:19:49.623014Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-09-25T16:19:49.623018Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1723: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-09-25T16:19:49.623703Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1792: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-09-25T16:19:49.624054Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:663: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-09-25T16:19:49.624081Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1824: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-09-25T16:19:49.624241Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:685: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-09-25T16:19:49.624259Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:689: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 137 RawX2: 4294969455 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-09-25T16:19:49.624265Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:303: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-09-25T16:19:49.624330Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 1:0 128 -> 240 2025-09-25T16:19:49.624336Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:362: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-09-25T16:19:49.624363Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:599: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-09-25T16:19:49.624371Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:412: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-09-25T16:19:49.624742Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-09-25T16:19:49.624750Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme ... : attempt# 0 2025-09-25T16:19:51.122469Z node 3 :DATASHARD_RESTORE DEBUG: import_s3.cpp:466: [Import] [s3:102] AllocateResource 2025-09-25T16:19:51.122510Z node 3 :DATASHARD_RESTORE INFO: import_s3.cpp:483: [Import] [s3:102] Handle TEvResourceBroker::TEvResourceAllocated { TaskId: 1 } 2025-09-25T16:19:51.122514Z node 3 :DATASHARD_RESTORE NOTICE: import_s3.cpp:491: [Import] [s3:102] Restart: attempt# 0 2025-09-25T16:19:51.125299Z node 3 :DATASHARD_RESTORE DEBUG: import_s3.cpp:506: [Import] [s3:102] HeadObject: key# /data_00.csv FAKE_COORDINATOR: advance: minStep5000003 State->FrontStep: 5000003 2025-09-25T16:19:51.125856Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-09-25T16:19:51.125867Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 102, path id: [OwnerId: 72057594046678944, LocalPathId: 2] 2025-09-25T16:19:51.125918Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-09-25T16:19:51.125924Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [3:210:2211], at schemeshard: 72057594046678944, txId: 102, path id: 2 2025-09-25T16:19:51.126001Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 102:0, at schemeshard: 72057594046678944 2025-09-25T16:19:51.126008Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_backup_restore_common.h:258: TRestore TProposedWaitParts, opId: 102:0 ProgressState, at schemeshard: 72057594046678944 REQUEST: HEAD /data_00.csv HTTP/1.1 HEADERS: Host: localhost:64076 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: B812DFD5-34F0-47F8-81C8-3C2C14FF68C5 amz-sdk-request: attempt=1 content-type: application/xml user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-143-generic x86_64 Clang/20.1.8 x-amz-api-version: 2006-03-01 S3_MOCK::HttpServeRead: /data_00.csv / 28 2025-09-25T16:19:51.126110Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6249: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 3 PathOwnerId: 72057594046678944, cookie: 102 2025-09-25T16:19:51.126120Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 3 PathOwnerId: 72057594046678944, cookie: 102 2025-09-25T16:19:51.126123Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:108: Operation in-flight, at schemeshard: 72057594046678944, txId: 102 2025-09-25T16:19:51.126128Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 102, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 3 2025-09-25T16:19:51.126132Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:610: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 4 2025-09-25T16:19:51.126143Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 102, ready parts: 0/1, is published: true 2025-09-25T16:19:51.126255Z node 3 :DATASHARD_RESTORE DEBUG: import_s3.cpp:527: [Import] [s3:102] Handle NKikimr::NWrappers::NExternalStorage::TEvHeadObjectResponse { Key: null Result: HeadObjectResult { ETag: b851191e5b982e03a34aa60df8f7d8ed ContentLength: 28 } } 2025-09-25T16:19:51.126727Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 2025-09-25T16:19:51.147517Z node 3 :DATASHARD_RESTORE DEBUG: import_s3.cpp:606: [Import] [s3:102] Handle NKikimr::TEvDataShard::TEvS3DownloadInfo { Info: { DataETag: (empty maybe) ProcessedBytes: 0 WrittenBytes: 0 WrittenRows: 0 ChecksumState: DownloadState: } } FAKE_COORDINATOR: Erasing txId 102 2025-09-25T16:19:51.168412Z node 3 :DATASHARD_RESTORE DEBUG: import_s3.cpp:606: [Import] [s3:102] Handle NKikimr::TEvDataShard::TEvS3DownloadInfo { Info: { DataETag: b851191e5b982e03a34aa60df8f7d8ed ProcessedBytes: 0 WrittenBytes: 0 WrittenRows: 0 ChecksumState: DownloadState: } } 2025-09-25T16:19:51.168433Z node 3 :DATASHARD_RESTORE NOTICE: import_s3.cpp:621: [Import] [s3:102] Process download info at 'DownloadInfo': info# { DataETag: b851191e5b982e03a34aa60df8f7d8ed ProcessedBytes: 0 WrittenBytes: 0 WrittenRows: 0 ChecksumState: DownloadState: } 2025-09-25T16:19:51.168446Z node 3 :DATASHARD_RESTORE DEBUG: import_s3.cpp:517: [Import] [s3:102] GetObject: key# /data_00.csv, range# 0-27 REQUEST: GET /data_00.csv HTTP/1.1 HEADERS: Host: localhost:64076 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: 7DC65EBC-5E39-4620-8E98-CE4E0D1368F5 amz-sdk-request: attempt=1 content-type: application/xml range: bytes=0-27 user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-143-generic x86_64 Clang/20.1.8 x-amz-api-version: 2006-03-01 S3_MOCK::HttpServeRead: /data_00.csv / 28 2025-09-25T16:19:51.169354Z node 3 :DATASHARD_RESTORE DEBUG: import_s3.cpp:656: [Import] [s3:102] Handle NKikimr::NWrappers::NExternalStorage::TEvGetObjectResponse { Key: null Result: b851191e5b982e03a34aa60df8f7d8ed Body: 28b } 2025-09-25T16:19:51.169360Z node 3 :DATASHARD_RESTORE TRACE: import_s3.cpp:673: [Import] [s3:102] Content size: processed-bytes# 0, content-length# 28, body-size# 28 2025-09-25T16:19:51.169385Z node 3 :DATASHARD_RESTORE INFO: import_s3.cpp:806: [Import] [s3:102] Upload rows: count# 2, size# 60 2025-09-25T16:19:51.169874Z node 3 :DATASHARD_RESTORE DEBUG: import_s3.cpp:814: [Import] [s3:102] Handle NKikimr::TEvDataShard::TEvS3UploadRowsResponse { Record: TabletID: 72075186233409546 Status: 0 Info: { DataETag: b851191e5b982e03a34aa60df8f7d8ed ProcessedBytes: 28 WrittenBytes: 16 WrittenRows: 2 ChecksumState: DownloadState: } } 2025-09-25T16:19:51.169890Z node 3 :DATASHARD_RESTORE NOTICE: import_s3.cpp:621: [Import] [s3:102] Process download info at 'UploadResponse': info# { DataETag: b851191e5b982e03a34aa60df8f7d8ed ProcessedBytes: 28 WrittenBytes: 16 WrittenRows: 2 ChecksumState: DownloadState: } 2025-09-25T16:19:51.169897Z node 3 :DATASHARD_RESTORE NOTICE: import_s3.cpp:962: [Import] [s3:102] Finish: success# 1, error# , writtenBytes# 16, writtenRows# 2 2025-09-25T16:19:51.181825Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5901: Handle TEvSchemaChanged, tabletId: 72057594046678944, at schemeshard: 72057594046678944, message: Source { RawX1: 313 RawX2: 12884904187 } Origin: 72075186233409546 State: 2 TxId: 102 Step: 0 Generation: 2 OpResult { Success: true Explain: "" BytesProcessed: 16 RowsProcessed: 2 } 2025-09-25T16:19:51.181844Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1837: TOperation FindRelatedPartByTabletId, TxId: 102, tablet: 72075186233409546, partId: 0 2025-09-25T16:19:51.181862Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:628: TTxOperationReply execute, operationId: 102:0, at schemeshard: 72057594046678944, message: Source { RawX1: 313 RawX2: 12884904187 } Origin: 72075186233409546 State: 2 TxId: 102 Step: 0 Generation: 2 OpResult { Success: true Explain: "" BytesProcessed: 16 RowsProcessed: 2 } 2025-09-25T16:19:51.181873Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_backup_restore_common.h:233: TRestore TProposedWaitParts, opId: 102:0 HandleReply TEvSchemaChanged at tablet# 72057594046678944 message# Source { RawX1: 313 RawX2: 12884904187 } Origin: 72075186233409546 State: 2 TxId: 102 Step: 0 Generation: 2 OpResult { Success: true Explain: "" BytesProcessed: 16 RowsProcessed: 2 } 2025-09-25T16:19:51.181887Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:673: CollectSchemaChanged accept TEvDataShard::TEvSchemaChanged, operationId: 102:0, shardIdx: 72057594046678944:1, shard: 72075186233409546, left await: 0, txState.State: ProposedWaitParts, txState.ReadyForNotifications: 1, at schemeshard: 72057594046678944 2025-09-25T16:19:51.181890Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:710: all shard schema changes has been received, operationId: 102:0, at schemeshard: 72057594046678944 2025-09-25T16:19:51.181894Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:722: send schema changes ack message, operation: 102:0, datashard: 72075186233409546, at schemeshard: 72057594046678944 2025-09-25T16:19:51.181899Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 102:0 129 -> 240 2025-09-25T16:19:51.181930Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_backup_restore_common.h:116: Unable to make a bill: kind# TRestore, opId# 102:0, reason# domain is not a serverless db, domain# /MyRoot, domainPathId# [OwnerId: 72057594046678944, LocalPathId: 1], IsDomainSchemeShard: 1, ParentDomainId: [OwnerId: 72057594046678944, LocalPathId: 1], ResourcesDomainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-09-25T16:19:51.182383Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 102:0, at schemeshard: 72057594046678944 2025-09-25T16:19:51.182428Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 102:0, at schemeshard: 72057594046678944 2025-09-25T16:19:51.182438Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:497: [72057594046678944] TDone opId# 102:0 ProgressState 2025-09-25T16:19:51.182454Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:926: Part operation is done id#102:0 progress is 1/1 2025-09-25T16:19:51.182458Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2025-09-25T16:19:51.182464Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:926: Part operation is done id#102:0 progress is 1/1 2025-09-25T16:19:51.182467Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2025-09-25T16:19:51.182472Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 102, ready parts: 1/1, is published: true 2025-09-25T16:19:51.182486Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1702: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [3:339:2317] message: TxId: 102 2025-09-25T16:19:51.182492Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2025-09-25T16:19:51.182498Z node 3 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:993: Operation and all the parts is done, operation id: 102:0 2025-09-25T16:19:51.182503Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5552: RemoveTx for txid 102:0 2025-09-25T16:19:51.182532Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:610: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 3 2025-09-25T16:19:51.183042Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 102: got EvNotifyTxCompletionResult 2025-09-25T16:19:51.183054Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 102: satisfy waiter [3:399:2369] TestWaitNotification: OK eventTxId 102 ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_restore/unittest >> TRestoreTests::ShouldSucceedOnSupportedDatatypes [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2144] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2144] Leader for TabletID 72057594046678944 is [1:130:2155] sender: [1:131:2058] recipient: [1:113:2144] 2025-09-25T16:19:49.996297Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7911: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-09-25T16:19:49.996319Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7939: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-09-25T16:19:49.996323Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7825: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-09-25T16:19:49.996327Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7841: OperationsProcessing config: using default configuration 2025-09-25T16:19:49.996331Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7847: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-09-25T16:19:49.996335Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7847: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-09-25T16:19:49.996341Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7971: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-09-25T16:19:49.996354Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-09-25T16:19:49.996460Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8042: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-09-25T16:19:49.996505Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2025-09-25T16:19:50.008481Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7729: Cannot subscribe to console configs 2025-09-25T16:19:50.008509Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded 2025-09-25T16:19:50.012482Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2025-09-25T16:19:50.012559Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2025-09-25T16:19:50.012592Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-09-25T16:19:50.013989Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2025-09-25T16:19:50.014040Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2025-09-25T16:19:50.014116Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1382: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-09-25T16:19:50.014153Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-09-25T16:19:50.014544Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:158: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-09-25T16:19:50.014579Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2025-09-25T16:19:50.014771Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-09-25T16:19:50.014777Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-09-25T16:19:50.014791Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-09-25T16:19:50.014796Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-09-25T16:19:50.014800Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:205: TTxServerlessStorageBilling.Complete 2025-09-25T16:19:50.014826Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7086: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-09-25T16:19:50.016013Z node 1 :HIVE INFO: tablet_helpers.cpp:1126: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:130:2155] sender: [1:245:2058] recipient: [1:15:2062] 2025-09-25T16:19:50.038554Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-09-25T16:19:50.038611Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-09-25T16:19:50.038661Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:599: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-09-25T16:19:50.038669Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5528: CreateTx for txid 1:0 type: TxAlterSubDomain target path: [OwnerId: 72057594046678944, LocalPathId: 1] source path: 2025-09-25T16:19:50.038742Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-09-25T16:19:50.038759Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2025-09-25T16:19:50.039466Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-09-25T16:19:50.039503Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-09-25T16:19:50.039552Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-09-25T16:19:50.039560Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:315: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-09-25T16:19:50.039565Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:369: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-09-25T16:19:50.039569Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 1:0 2 -> 3 2025-09-25T16:19:50.040065Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-09-25T16:19:50.040077Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-09-25T16:19:50.040082Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 1:0 3 -> 128 2025-09-25T16:19:50.040522Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-09-25T16:19:50.040536Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-09-25T16:19:50.040540Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-09-25T16:19:50.040545Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1723: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-09-25T16:19:50.041090Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1792: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-09-25T16:19:50.041544Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:663: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-09-25T16:19:50.041585Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1824: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-09-25T16:19:50.041725Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:685: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-09-25T16:19:50.041747Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:689: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 137 RawX2: 4294969455 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-09-25T16:19:50.041752Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:303: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-09-25T16:19:50.041818Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 1:0 128 -> 240 2025-09-25T16:19:50.041823Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:362: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-09-25T16:19:50.041842Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:599: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-09-25T16:19:50.041851Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:412: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-09-25T16:19:50.042260Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-09-25T16:19:50.042269Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme ... -09-25T16:19:51.124964Z node 3 :DATASHARD_RESTORE DEBUG: import_s3.cpp:466: [Import] [s3:102] AllocateResource 2025-09-25T16:19:51.125015Z node 3 :DATASHARD_RESTORE INFO: import_s3.cpp:483: [Import] [s3:102] Handle TEvResourceBroker::TEvResourceAllocated { TaskId: 1 } 2025-09-25T16:19:51.125021Z node 3 :DATASHARD_RESTORE NOTICE: import_s3.cpp:491: [Import] [s3:102] Restart: attempt# 0 2025-09-25T16:19:51.128638Z node 3 :DATASHARD_RESTORE DEBUG: import_s3.cpp:506: [Import] [s3:102] HeadObject: key# /data_00.csv REQUEST: HEAD /data_00.csv HTTP/1.1 HEADERS: Host: localhost:17825 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: 707E05CE-8BA4-42EB-A3A9-E2083F37661E amz-sdk-request: attempt=1 content-type: application/xml user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-143-generic x86_64 Clang/20.1.8 x-amz-api-version: 2006-03-01 S3_MOCK::HttpServeRead: /data_00.csv / 375 FAKE_COORDINATOR: advance: minStep5000003 State->FrontStep: 5000003 2025-09-25T16:19:51.129635Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-09-25T16:19:51.129649Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 102, path id: [OwnerId: 72057594046678944, LocalPathId: 2] 2025-09-25T16:19:51.129754Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-09-25T16:19:51.129763Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [3:210:2211], at schemeshard: 72057594046678944, txId: 102, path id: 2 2025-09-25T16:19:51.129872Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 102:0, at schemeshard: 72057594046678944 2025-09-25T16:19:51.129884Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_backup_restore_common.h:258: TRestore TProposedWaitParts, opId: 102:0 ProgressState, at schemeshard: 72057594046678944 2025-09-25T16:19:51.129973Z node 3 :DATASHARD_RESTORE DEBUG: import_s3.cpp:527: [Import] [s3:102] Handle NKikimr::NWrappers::NExternalStorage::TEvHeadObjectResponse { Key: null Result: HeadObjectResult { ETag: 35d3d7899d70c742e71e02584eabd1de ContentLength: 375 } } 2025-09-25T16:19:51.130050Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6249: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 3 PathOwnerId: 72057594046678944, cookie: 102 2025-09-25T16:19:51.130063Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 3 PathOwnerId: 72057594046678944, cookie: 102 2025-09-25T16:19:51.130069Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:108: Operation in-flight, at schemeshard: 72057594046678944, txId: 102 2025-09-25T16:19:51.130078Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 102, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 3 2025-09-25T16:19:51.130085Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:610: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 4 2025-09-25T16:19:51.130101Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 102, ready parts: 0/1, is published: true 2025-09-25T16:19:51.130950Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 2025-09-25T16:19:51.151803Z node 3 :DATASHARD_RESTORE DEBUG: import_s3.cpp:606: [Import] [s3:102] Handle NKikimr::TEvDataShard::TEvS3DownloadInfo { Info: { DataETag: (empty maybe) ProcessedBytes: 0 WrittenBytes: 0 WrittenRows: 0 ChecksumState: DownloadState: } } FAKE_COORDINATOR: Erasing txId 102 2025-09-25T16:19:51.172776Z node 3 :DATASHARD_RESTORE DEBUG: import_s3.cpp:606: [Import] [s3:102] Handle NKikimr::TEvDataShard::TEvS3DownloadInfo { Info: { DataETag: 35d3d7899d70c742e71e02584eabd1de ProcessedBytes: 0 WrittenBytes: 0 WrittenRows: 0 ChecksumState: DownloadState: } } 2025-09-25T16:19:51.172797Z node 3 :DATASHARD_RESTORE NOTICE: import_s3.cpp:621: [Import] [s3:102] Process download info at 'DownloadInfo': info# { DataETag: 35d3d7899d70c742e71e02584eabd1de ProcessedBytes: 0 WrittenBytes: 0 WrittenRows: 0 ChecksumState: DownloadState: } 2025-09-25T16:19:51.172813Z node 3 :DATASHARD_RESTORE DEBUG: import_s3.cpp:517: [Import] [s3:102] GetObject: key# /data_00.csv, range# 0-374 REQUEST: GET /data_00.csv HTTP/1.1 HEADERS: Host: localhost:17825 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: F8795D7D-DD6E-40C5-A8B9-7C1EC377D16C amz-sdk-request: attempt=1 content-type: application/xml range: bytes=0-374 user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-143-generic x86_64 Clang/20.1.8 x-amz-api-version: 2006-03-01 S3_MOCK::HttpServeRead: /data_00.csv / 375 2025-09-25T16:19:51.173727Z node 3 :DATASHARD_RESTORE DEBUG: import_s3.cpp:656: [Import] [s3:102] Handle NKikimr::NWrappers::NExternalStorage::TEvGetObjectResponse { Key: null Result: 35d3d7899d70c742e71e02584eabd1de Body: 375b } 2025-09-25T16:19:51.173738Z node 3 :DATASHARD_RESTORE TRACE: import_s3.cpp:673: [Import] [s3:102] Content size: processed-bytes# 0, content-length# 375, body-size# 375 2025-09-25T16:19:51.173903Z node 3 :DATASHARD_RESTORE INFO: import_s3.cpp:806: [Import] [s3:102] Upload rows: count# 1, size# 405 2025-09-25T16:19:51.174505Z node 3 :DATASHARD_RESTORE DEBUG: import_s3.cpp:814: [Import] [s3:102] Handle NKikimr::TEvDataShard::TEvS3UploadRowsResponse { Record: TabletID: 72075186233409546 Status: 0 Info: { DataETag: 35d3d7899d70c742e71e02584eabd1de ProcessedBytes: 375 WrittenBytes: 239 WrittenRows: 1 ChecksumState: DownloadState: } } 2025-09-25T16:19:51.174519Z node 3 :DATASHARD_RESTORE NOTICE: import_s3.cpp:621: [Import] [s3:102] Process download info at 'UploadResponse': info# { DataETag: 35d3d7899d70c742e71e02584eabd1de ProcessedBytes: 375 WrittenBytes: 239 WrittenRows: 1 ChecksumState: DownloadState: } 2025-09-25T16:19:51.174527Z node 3 :DATASHARD_RESTORE NOTICE: import_s3.cpp:962: [Import] [s3:102] Finish: success# 1, error# , writtenBytes# 239, writtenRows# 1 2025-09-25T16:19:51.186986Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5901: Handle TEvSchemaChanged, tabletId: 72057594046678944, at schemeshard: 72057594046678944, message: Source { RawX1: 313 RawX2: 12884904187 } Origin: 72075186233409546 State: 2 TxId: 102 Step: 0 Generation: 2 OpResult { Success: true Explain: "" BytesProcessed: 239 RowsProcessed: 1 } 2025-09-25T16:19:51.187010Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1837: TOperation FindRelatedPartByTabletId, TxId: 102, tablet: 72075186233409546, partId: 0 2025-09-25T16:19:51.187035Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:628: TTxOperationReply execute, operationId: 102:0, at schemeshard: 72057594046678944, message: Source { RawX1: 313 RawX2: 12884904187 } Origin: 72075186233409546 State: 2 TxId: 102 Step: 0 Generation: 2 OpResult { Success: true Explain: "" BytesProcessed: 239 RowsProcessed: 1 } 2025-09-25T16:19:51.187049Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_backup_restore_common.h:233: TRestore TProposedWaitParts, opId: 102:0 HandleReply TEvSchemaChanged at tablet# 72057594046678944 message# Source { RawX1: 313 RawX2: 12884904187 } Origin: 72075186233409546 State: 2 TxId: 102 Step: 0 Generation: 2 OpResult { Success: true Explain: "" BytesProcessed: 239 RowsProcessed: 1 } 2025-09-25T16:19:51.187067Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:673: CollectSchemaChanged accept TEvDataShard::TEvSchemaChanged, operationId: 102:0, shardIdx: 72057594046678944:1, shard: 72075186233409546, left await: 0, txState.State: ProposedWaitParts, txState.ReadyForNotifications: 1, at schemeshard: 72057594046678944 2025-09-25T16:19:51.187072Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:710: all shard schema changes has been received, operationId: 102:0, at schemeshard: 72057594046678944 2025-09-25T16:19:51.187077Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:722: send schema changes ack message, operation: 102:0, datashard: 72075186233409546, at schemeshard: 72057594046678944 2025-09-25T16:19:51.187084Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 102:0 129 -> 240 2025-09-25T16:19:51.187119Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_backup_restore_common.h:116: Unable to make a bill: kind# TRestore, opId# 102:0, reason# domain is not a serverless db, domain# /MyRoot, domainPathId# [OwnerId: 72057594046678944, LocalPathId: 1], IsDomainSchemeShard: 1, ParentDomainId: [OwnerId: 72057594046678944, LocalPathId: 1], ResourcesDomainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-09-25T16:19:51.187667Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 102:0, at schemeshard: 72057594046678944 2025-09-25T16:19:51.187705Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 102:0, at schemeshard: 72057594046678944 2025-09-25T16:19:51.187712Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:497: [72057594046678944] TDone opId# 102:0 ProgressState 2025-09-25T16:19:51.187725Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:926: Part operation is done id#102:0 progress is 1/1 2025-09-25T16:19:51.187730Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2025-09-25T16:19:51.187735Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:926: Part operation is done id#102:0 progress is 1/1 2025-09-25T16:19:51.187739Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2025-09-25T16:19:51.187743Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 102, ready parts: 1/1, is published: true 2025-09-25T16:19:51.187756Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1702: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [3:339:2317] message: TxId: 102 2025-09-25T16:19:51.187762Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2025-09-25T16:19:51.187767Z node 3 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:993: Operation and all the parts is done, operation id: 102:0 2025-09-25T16:19:51.187771Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5552: RemoveTx for txid 102:0 2025-09-25T16:19:51.187794Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:610: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 3 2025-09-25T16:19:51.188272Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 102: got EvNotifyTxCompletionResult 2025-09-25T16:19:51.188284Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 102: satisfy waiter [3:399:2369] TestWaitNotification: OK eventTxId 102 ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_restore/unittest >> TImportTests::ShouldRestorePerAzReadReplicas [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2144] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2144] Leader for TabletID 72057594046678944 is [1:130:2155] sender: [1:131:2058] recipient: [1:113:2144] 2025-09-25T16:19:49.977992Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7911: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-09-25T16:19:49.978018Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7939: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-09-25T16:19:49.978024Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7825: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-09-25T16:19:49.978030Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7841: OperationsProcessing config: using default configuration 2025-09-25T16:19:49.978037Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7847: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-09-25T16:19:49.978041Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7847: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-09-25T16:19:49.978050Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7971: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-09-25T16:19:49.978067Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-09-25T16:19:49.978185Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8042: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-09-25T16:19:49.978245Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2025-09-25T16:19:49.994147Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7729: Cannot subscribe to console configs 2025-09-25T16:19:49.994170Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded 2025-09-25T16:19:49.998199Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2025-09-25T16:19:49.998291Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2025-09-25T16:19:49.998330Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-09-25T16:19:50.000837Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2025-09-25T16:19:50.000917Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2025-09-25T16:19:50.001055Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1382: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-09-25T16:19:50.001121Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-09-25T16:19:50.001633Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:158: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-09-25T16:19:50.001688Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2025-09-25T16:19:50.001989Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-09-25T16:19:50.002000Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-09-25T16:19:50.002022Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-09-25T16:19:50.002030Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-09-25T16:19:50.002037Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:205: TTxServerlessStorageBilling.Complete 2025-09-25T16:19:50.002073Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7086: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-09-25T16:19:50.003543Z node 1 :HIVE INFO: tablet_helpers.cpp:1126: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:130:2155] sender: [1:245:2058] recipient: [1:15:2062] 2025-09-25T16:19:50.026269Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-09-25T16:19:50.026341Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-09-25T16:19:50.026406Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:599: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-09-25T16:19:50.026416Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5528: CreateTx for txid 1:0 type: TxAlterSubDomain target path: [OwnerId: 72057594046678944, LocalPathId: 1] source path: 2025-09-25T16:19:50.026492Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-09-25T16:19:50.026529Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2025-09-25T16:19:50.027273Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-09-25T16:19:50.027334Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-09-25T16:19:50.027398Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-09-25T16:19:50.027409Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:315: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-09-25T16:19:50.027415Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:369: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-09-25T16:19:50.027420Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 1:0 2 -> 3 2025-09-25T16:19:50.027839Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-09-25T16:19:50.027851Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-09-25T16:19:50.027857Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 1:0 3 -> 128 2025-09-25T16:19:50.028227Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-09-25T16:19:50.028238Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-09-25T16:19:50.028244Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-09-25T16:19:50.028250Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1723: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-09-25T16:19:50.028980Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1792: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-09-25T16:19:50.029397Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:663: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-09-25T16:19:50.029454Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1824: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-09-25T16:19:50.029679Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:685: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-09-25T16:19:50.029706Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:689: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 137 RawX2: 4294969455 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-09-25T16:19:50.029714Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:303: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-09-25T16:19:50.029805Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 1:0 128 -> 240 2025-09-25T16:19:50.029813Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:362: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-09-25T16:19:50.029846Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:599: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-09-25T16:19:50.029858Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:412: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-09-25T16:19:50.030333Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-09-25T16:19:50.030344Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme ... n.h:116: Unable to make a bill: kind# TRestore, opId# 281474976720758:0, reason# domain is not a serverless db, domain# /MyRoot/User, domainPathId# [OwnerId: 72075186233409546, LocalPathId: 1], IsDomainSchemeShard: 0, ParentDomainId: [OwnerId: 72057594046678944, LocalPathId: 2], ResourcesDomainId: [OwnerId: 72057594046678944, LocalPathId: 2] 2025-09-25T16:19:51.156169Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 281474976720758:0, at schemeshard: 72075186233409546 2025-09-25T16:19:51.156241Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 281474976720758:0, at schemeshard: 72075186233409546 2025-09-25T16:19:51.156247Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:497: [72075186233409546] TDone opId# 281474976720758:0 ProgressState 2025-09-25T16:19:51.156262Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:926: Part operation is done id#281474976720758:0 progress is 1/1 2025-09-25T16:19:51.156265Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 281474976720758 ready parts: 1/1 2025-09-25T16:19:51.156269Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:926: Part operation is done id#281474976720758:0 progress is 1/1 2025-09-25T16:19:51.156271Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 281474976720758 ready parts: 1/1 2025-09-25T16:19:51.156274Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 281474976720758, ready parts: 1/1, is published: true 2025-09-25T16:19:51.156284Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1702: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [3:365:2342] message: TxId: 281474976720758 2025-09-25T16:19:51.156289Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 281474976720758 ready parts: 1/1 2025-09-25T16:19:51.156293Z node 3 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:993: Operation and all the parts is done, operation id: 281474976720758:0 2025-09-25T16:19:51.156296Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5552: RemoveTx for txid 281474976720758:0 2025-09-25T16:19:51.156314Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:610: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72075186233409546, LocalPathId: 2] was 3 2025-09-25T16:19:51.156759Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7171: Handle: TEvNotifyTxCompletionResult: txId# 281474976720758 2025-09-25T16:19:51.156777Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:7173: Message: TxId: 281474976720758 2025-09-25T16:19:51.156789Z node 3 :IMPORT DEBUG: schemeshard_import__create.cpp:372: TImport::TTxProgress: DoExecute 2025-09-25T16:19:51.156795Z node 3 :IMPORT DEBUG: schemeshard_import__create.cpp:1425: TImport::TTxProgress: OnNotifyResult: txId# 281474976720758 2025-09-25T16:19:51.157223Z node 3 :IMPORT DEBUG: schemeshard_import__create.cpp:396: TImport::TTxProgress: DoComplete 2025-09-25T16:19:51.157245Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 103: got EvNotifyTxCompletionResult 2025-09-25T16:19:51.157252Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 103: satisfy waiter [3:505:2455] TestWaitNotification: OK eventTxId 103 2025-09-25T16:19:51.157819Z node 3 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/User" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-09-25T16:19:51.157873Z node 3 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/User" took 61us result status StatusSuccess 2025-09-25T16:19:51.157989Z node 3 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/User" PathDescription { Self { Name: "User" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeExtSubDomain CreateFinished: true CreateTxId: 101 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 4 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 4 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 2 SecurityStateVersion: 0 } } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 2 PlanResolution: 50 Coordinators: 72075186233409547 TimeCastBucketsPerMediator: 2 Mediators: 72075186233409548 SchemeShard: 72075186233409546 } DomainKey { SchemeShard: 72057594046678944 PathId: 2 } StoragePools { Name: "name_User_kind_hdd-1" Kind: "common" } StoragePools { Name: "name_User_kind_hdd-2" Kind: "external" } PathsInside: 0 PathsLimit: 10000 ShardsInside: 3 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 2 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { } SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 2 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-09-25T16:19:51.158067Z node 3 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/User/Table" Options { ReturnPartitioningInfo: true ReturnPartitionConfig: true BackupInfo: false ReturnBoundaries: true ShowPrivateTable: false }, at schemeshard: 72075186233409546 2025-09-25T16:19:51.158121Z node 3 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72075186233409546 describe path "/MyRoot/User/Table" took 55us result status StatusSuccess 2025-09-25T16:19:51.158325Z node 3 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/User/Table" PathDescription { Self { Name: "Table" PathId: 2 SchemeshardId: 72075186233409546 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976720757 CreateStep: 150 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "Table" Columns { Name: "key" Type: "Utf8" TypeId: 4608 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "value" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false IsBuildInProgress: false } Columns { Name: "created_at" Type: "Timestamp" TypeId: 50 Id: 3 NotNull: false IsBuildInProgress: false } Columns { Name: "modified_at" Type: "Uint32" TypeId: 2 Id: 4 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" KeyColumnIds: 1 PartitionConfig { CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot: 300 InMemForceStepsToSnapshot: 500 InMemForceSizeToSnapshot: 16777216 InMemCompactionBrokerQueue: 0 ReadAheadHiThreshold: 67108864 ReadAheadLoThreshold: 16777216 MinDataPageSize: 7168 SnapBrokerQueue: 0 Generation { GenerationId: 0 SizeToCompact: 0 CountToCompact: 8 ForceCountToCompact: 8 ForceSizeToCompact: 134217728 CompactionBrokerQueue: 4294967295 KeepInCache: true BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen1" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 0 UpliftPartSize: 0 } Generation { GenerationId: 1 SizeToCompact: 41943040 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 536870912 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen2" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 8388608 UpliftPartSize: 8388608 } Generation { GenerationId: 2 SizeToCompact: 419430400 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 17179869184 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen3" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } PartitioningPolicy { MinPartitionsCount: 1 } FollowerGroups { FollowerCount: 1 RequireAllDataCenters: true FollowerCountPerDataCenter: true } } TableSchemaVersion: 1 IsBackup: false IsRestore: false } TablePartitions { EndOfRangeKeyPrefix: "" IsPoint: false IsInclusive: false DatashardId: 72075186233409549 } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 2 ProcessingParams { Version: 2 PlanResolution: 50 Coordinators: 72075186233409547 TimeCastBucketsPerMediator: 2 Mediators: 72075186233409548 SchemeShard: 72075186233409546 } DomainKey { SchemeShard: 72057594046678944 PathId: 2 } PathsInside: 1 PathsLimit: 10000 ShardsInside: 4 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 2 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 2 PathOwnerId: 72075186233409546, at schemeshard: 72075186233409546 ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_restore/unittest >> TRestoreTests::ShouldFailOnEmptyToken[Zstd] [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2144] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2144] Leader for TabletID 72057594046678944 is [1:130:2155] sender: [1:131:2058] recipient: [1:113:2144] 2025-09-25T16:19:50.102808Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7911: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-09-25T16:19:50.102832Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7939: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-09-25T16:19:50.102838Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7825: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-09-25T16:19:50.102843Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7841: OperationsProcessing config: using default configuration 2025-09-25T16:19:50.102849Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7847: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-09-25T16:19:50.102853Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7847: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-09-25T16:19:50.102863Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7971: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-09-25T16:19:50.102877Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-09-25T16:19:50.103004Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8042: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-09-25T16:19:50.103066Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2025-09-25T16:19:50.119477Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7729: Cannot subscribe to console configs 2025-09-25T16:19:50.119500Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded 2025-09-25T16:19:50.123526Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2025-09-25T16:19:50.123615Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2025-09-25T16:19:50.123653Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-09-25T16:19:50.125359Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2025-09-25T16:19:50.125424Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2025-09-25T16:19:50.125534Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1382: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-09-25T16:19:50.125589Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-09-25T16:19:50.126077Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:158: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-09-25T16:19:50.126122Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2025-09-25T16:19:50.126376Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-09-25T16:19:50.126389Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-09-25T16:19:50.126408Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-09-25T16:19:50.126416Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-09-25T16:19:50.126422Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:205: TTxServerlessStorageBilling.Complete 2025-09-25T16:19:50.126453Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7086: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-09-25T16:19:50.127840Z node 1 :HIVE INFO: tablet_helpers.cpp:1126: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:130:2155] sender: [1:245:2058] recipient: [1:15:2062] 2025-09-25T16:19:50.151671Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-09-25T16:19:50.151729Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-09-25T16:19:50.151773Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:599: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-09-25T16:19:50.151781Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5528: CreateTx for txid 1:0 type: TxAlterSubDomain target path: [OwnerId: 72057594046678944, LocalPathId: 1] source path: 2025-09-25T16:19:50.151855Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-09-25T16:19:50.151872Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2025-09-25T16:19:50.152476Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-09-25T16:19:50.152514Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-09-25T16:19:50.152560Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-09-25T16:19:50.152569Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:315: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-09-25T16:19:50.152575Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:369: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-09-25T16:19:50.152580Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 1:0 2 -> 3 2025-09-25T16:19:50.153056Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-09-25T16:19:50.153070Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-09-25T16:19:50.153076Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 1:0 3 -> 128 2025-09-25T16:19:50.153462Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-09-25T16:19:50.153474Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-09-25T16:19:50.153480Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-09-25T16:19:50.153487Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1723: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-09-25T16:19:50.154197Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1792: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-09-25T16:19:50.154605Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:663: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-09-25T16:19:50.154645Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1824: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-09-25T16:19:50.154834Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:685: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-09-25T16:19:50.154864Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:689: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 137 RawX2: 4294969455 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-09-25T16:19:50.154871Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:303: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-09-25T16:19:50.154951Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 1:0 128 -> 240 2025-09-25T16:19:50.154958Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:362: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-09-25T16:19:50.154984Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:599: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-09-25T16:19:50.154994Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:412: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-09-25T16:19:50.155564Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-09-25T16:19:50.155572Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme ... rceBroker::TEvResourceAllocated { TaskId: 1 } 2025-09-25T16:19:51.203057Z node 3 :DATASHARD_RESTORE NOTICE: import_s3.cpp:491: [Import] [s3:102] Restart: attempt# 0 2025-09-25T16:19:51.205568Z node 3 :DATASHARD_RESTORE DEBUG: import_s3.cpp:506: [Import] [s3:102] HeadObject: key# /data_00.csv FAKE_COORDINATOR: advance: minStep5000003 State->FrontStep: 5000003 2025-09-25T16:19:51.206070Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-09-25T16:19:51.206080Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 102, path id: [OwnerId: 72057594046678944, LocalPathId: 2] 2025-09-25T16:19:51.206148Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-09-25T16:19:51.206152Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [3:210:2211], at schemeshard: 72057594046678944, txId: 102, path id: 2 2025-09-25T16:19:51.206220Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 102:0, at schemeshard: 72057594046678944 2025-09-25T16:19:51.206227Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_backup_restore_common.h:258: TRestore TProposedWaitParts, opId: 102:0 ProgressState, at schemeshard: 72057594046678944 2025-09-25T16:19:51.206319Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6249: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 3 PathOwnerId: 72057594046678944, cookie: 102 2025-09-25T16:19:51.206328Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 3 PathOwnerId: 72057594046678944, cookie: 102 2025-09-25T16:19:51.206331Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:108: Operation in-flight, at schemeshard: 72057594046678944, txId: 102 2025-09-25T16:19:51.206335Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 102, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 3 2025-09-25T16:19:51.206340Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:610: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 4 2025-09-25T16:19:51.206352Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 102, ready parts: 0/1, is published: true REQUEST: HEAD /data_00.csv HTTP/1.1 HEADERS: Host: localhost:2382 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: 7861E911-7B1B-47B2-8B14-97096E85AAC1 amz-sdk-request: attempt=1 content-type: application/xml user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-143-generic x86_64 Clang/20.1.8 x-amz-api-version: 2006-03-01 2025-09-25T16:19:51.206712Z node 3 :DATASHARD_RESTORE DEBUG: import_s3.cpp:527: [Import] [s3:102] Handle NKikimr::NWrappers::NExternalStorage::TEvHeadObjectResponse { Key: null Result: No response body. } 2025-09-25T16:19:51.206728Z node 3 :DATASHARD_RESTORE DEBUG: import_s3.cpp:506: [Import] [s3:102] HeadObject: key# /data_00.csv.zst 2025-09-25T16:19:51.207067Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 REQUEST: HEAD /data_00.csv.zst HTTP/1.1 HEADERS: Host: localhost:2382 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: A3256004-603F-4118-B3D6-46F073B8C490 amz-sdk-request: attempt=1 content-type: application/xml user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-143-generic x86_64 Clang/20.1.8 x-amz-api-version: 2006-03-01 S3_MOCK::HttpServeRead: /data_00.csv.zst / 15 2025-09-25T16:19:51.207713Z node 3 :DATASHARD_RESTORE DEBUG: import_s3.cpp:527: [Import] [s3:102] Handle NKikimr::NWrappers::NExternalStorage::TEvHeadObjectResponse { Key: null Result: HeadObjectResult { ETag: 9e2cb8a1ca146d055332641ef8e7b2a6 ContentLength: 15 } } 2025-09-25T16:19:51.218460Z node 3 :DATASHARD_RESTORE DEBUG: import_s3.cpp:606: [Import] [s3:102] Handle NKikimr::TEvDataShard::TEvS3DownloadInfo { Info: { DataETag: (empty maybe) ProcessedBytes: 0 WrittenBytes: 0 WrittenRows: 0 ChecksumState: DownloadState: } } FAKE_COORDINATOR: Erasing txId 102 2025-09-25T16:19:51.239470Z node 3 :DATASHARD_RESTORE DEBUG: import_s3.cpp:606: [Import] [s3:102] Handle NKikimr::TEvDataShard::TEvS3DownloadInfo { Info: { DataETag: 9e2cb8a1ca146d055332641ef8e7b2a6 ProcessedBytes: 0 WrittenBytes: 0 WrittenRows: 0 ChecksumState: DownloadState: } } 2025-09-25T16:19:51.239490Z node 3 :DATASHARD_RESTORE NOTICE: import_s3.cpp:621: [Import] [s3:102] Process download info at 'DownloadInfo': info# { DataETag: 9e2cb8a1ca146d055332641ef8e7b2a6 ProcessedBytes: 0 WrittenBytes: 0 WrittenRows: 0 ChecksumState: DownloadState: } 2025-09-25T16:19:51.239504Z node 3 :DATASHARD_RESTORE DEBUG: import_s3.cpp:517: [Import] [s3:102] GetObject: key# /data_00.csv.zst, range# 0-14 REQUEST: GET /data_00.csv.zst HTTP/1.1 HEADERS: Host: localhost:2382 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: CEF22E49-118C-4D20-A3BF-093EC7380855 amz-sdk-request: attempt=1 content-type: application/xml range: bytes=0-14 user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-143-generic x86_64 Clang/20.1.8 x-amz-api-version: 2006-03-01 S3_MOCK::HttpServeRead: /data_00.csv.zst / 15 2025-09-25T16:19:51.240312Z node 3 :DATASHARD_RESTORE DEBUG: import_s3.cpp:656: [Import] [s3:102] Handle NKikimr::NWrappers::NExternalStorage::TEvGetObjectResponse { Key: null Result: 9e2cb8a1ca146d055332641ef8e7b2a6 Body: 15b } 2025-09-25T16:19:51.240321Z node 3 :DATASHARD_RESTORE TRACE: import_s3.cpp:673: [Import] [s3:102] Content size: processed-bytes# 0, content-length# 15, body-size# 15 2025-09-25T16:19:51.240346Z node 3 :DATASHARD_RESTORE NOTICE: import_s3.cpp:962: [Import] [s3:102] Finish: success# 0, error# Empty token on line: "a1",, writtenBytes# 0, writtenRows# 0 2025-09-25T16:19:51.240355Z node 3 :DATASHARD_RESTORE INFO: import_s3.cpp:806: [Import] [s3:102] Upload rows: count# 0, size# 8 2025-09-25T16:19:51.242168Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5901: Handle TEvSchemaChanged, tabletId: 72057594046678944, at schemeshard: 72057594046678944, message: Source { RawX1: 313 RawX2: 12884904187 } Origin: 72075186233409546 State: 2 TxId: 102 Step: 0 Generation: 2 OpResult { Success: false Explain: "Empty token on line: \"a1\"," BytesProcessed: 0 RowsProcessed: 0 } 2025-09-25T16:19:51.242186Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1837: TOperation FindRelatedPartByTabletId, TxId: 102, tablet: 72075186233409546, partId: 0 2025-09-25T16:19:51.242204Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:628: TTxOperationReply execute, operationId: 102:0, at schemeshard: 72057594046678944, message: Source { RawX1: 313 RawX2: 12884904187 } Origin: 72075186233409546 State: 2 TxId: 102 Step: 0 Generation: 2 OpResult { Success: false Explain: "Empty token on line: \"a1\"," BytesProcessed: 0 RowsProcessed: 0 } 2025-09-25T16:19:51.242216Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_backup_restore_common.h:233: TRestore TProposedWaitParts, opId: 102:0 HandleReply TEvSchemaChanged at tablet# 72057594046678944 message# Source { RawX1: 313 RawX2: 12884904187 } Origin: 72075186233409546 State: 2 TxId: 102 Step: 0 Generation: 2 OpResult { Success: false Explain: "Empty token on line: \"a1\"," BytesProcessed: 0 RowsProcessed: 0 } 2025-09-25T16:19:51.242229Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:673: CollectSchemaChanged accept TEvDataShard::TEvSchemaChanged, operationId: 102:0, shardIdx: 72057594046678944:1, shard: 72075186233409546, left await: 0, txState.State: ProposedWaitParts, txState.ReadyForNotifications: 1, at schemeshard: 72057594046678944 2025-09-25T16:19:51.242233Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:710: all shard schema changes has been received, operationId: 102:0, at schemeshard: 72057594046678944 2025-09-25T16:19:51.242237Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:722: send schema changes ack message, operation: 102:0, datashard: 72075186233409546, at schemeshard: 72057594046678944 2025-09-25T16:19:51.242242Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 102:0 129 -> 240 2025-09-25T16:19:51.242277Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_backup_restore_common.h:116: Unable to make a bill: kind# TRestore, opId# 102:0, reason# domain is not a serverless db, domain# /MyRoot, domainPathId# [OwnerId: 72057594046678944, LocalPathId: 1], IsDomainSchemeShard: 1, ParentDomainId: [OwnerId: 72057594046678944, LocalPathId: 1], ResourcesDomainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-09-25T16:19:51.242736Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 102:0, at schemeshard: 72057594046678944 2025-09-25T16:19:51.242774Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 102:0, at schemeshard: 72057594046678944 2025-09-25T16:19:51.242780Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:497: [72057594046678944] TDone opId# 102:0 ProgressState 2025-09-25T16:19:51.242792Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:926: Part operation is done id#102:0 progress is 1/1 2025-09-25T16:19:51.242795Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2025-09-25T16:19:51.242798Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:926: Part operation is done id#102:0 progress is 1/1 2025-09-25T16:19:51.242800Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2025-09-25T16:19:51.242804Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 102, ready parts: 1/1, is published: true 2025-09-25T16:19:51.242815Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1702: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [3:339:2317] message: TxId: 102 2025-09-25T16:19:51.242819Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2025-09-25T16:19:51.242823Z node 3 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:993: Operation and all the parts is done, operation id: 102:0 2025-09-25T16:19:51.242826Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5552: RemoveTx for txid 102:0 2025-09-25T16:19:51.242847Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:610: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 3 2025-09-25T16:19:51.243238Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 102: got EvNotifyTxCompletionResult 2025-09-25T16:19:51.243248Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 102: satisfy waiter [3:399:2369] TestWaitNotification: OK eventTxId 102 ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_restore/unittest >> TRestoreTests::ShouldFailOnVariousErrors [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2144] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2144] Leader for TabletID 72057594046678944 is [1:130:2155] sender: [1:131:2058] recipient: [1:113:2144] 2025-09-25T16:19:49.996713Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7911: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-09-25T16:19:49.996738Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7939: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-09-25T16:19:49.996744Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7825: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-09-25T16:19:49.996749Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7841: OperationsProcessing config: using default configuration 2025-09-25T16:19:49.996755Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7847: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-09-25T16:19:49.996759Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7847: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-09-25T16:19:49.996768Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7971: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-09-25T16:19:49.996782Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-09-25T16:19:49.996923Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8042: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-09-25T16:19:49.996980Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2025-09-25T16:19:50.013721Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7729: Cannot subscribe to console configs 2025-09-25T16:19:50.013745Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded 2025-09-25T16:19:50.017649Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2025-09-25T16:19:50.017738Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2025-09-25T16:19:50.017763Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-09-25T16:19:50.019099Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2025-09-25T16:19:50.019144Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2025-09-25T16:19:50.019224Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1382: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-09-25T16:19:50.019261Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-09-25T16:19:50.019681Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:158: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-09-25T16:19:50.019716Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2025-09-25T16:19:50.019908Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-09-25T16:19:50.019914Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-09-25T16:19:50.019930Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-09-25T16:19:50.019935Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-09-25T16:19:50.019940Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:205: TTxServerlessStorageBilling.Complete 2025-09-25T16:19:50.019965Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7086: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-09-25T16:19:50.021007Z node 1 :HIVE INFO: tablet_helpers.cpp:1126: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:130:2155] sender: [1:245:2058] recipient: [1:15:2062] 2025-09-25T16:19:50.039065Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-09-25T16:19:50.039118Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-09-25T16:19:50.039164Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:599: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-09-25T16:19:50.039170Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5528: CreateTx for txid 1:0 type: TxAlterSubDomain target path: [OwnerId: 72057594046678944, LocalPathId: 1] source path: 2025-09-25T16:19:50.039230Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-09-25T16:19:50.039240Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2025-09-25T16:19:50.039869Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-09-25T16:19:50.039913Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-09-25T16:19:50.039960Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-09-25T16:19:50.039967Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:315: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-09-25T16:19:50.039971Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:369: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-09-25T16:19:50.039975Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 1:0 2 -> 3 2025-09-25T16:19:50.040295Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-09-25T16:19:50.040302Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-09-25T16:19:50.040308Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 1:0 3 -> 128 2025-09-25T16:19:50.040535Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-09-25T16:19:50.040541Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-09-25T16:19:50.040544Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-09-25T16:19:50.040549Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1723: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-09-25T16:19:50.041049Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1792: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-09-25T16:19:50.041342Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:663: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-09-25T16:19:50.041378Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1824: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-09-25T16:19:50.041535Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:685: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-09-25T16:19:50.041551Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:689: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 137 RawX2: 4294969455 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-09-25T16:19:50.041556Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:303: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-09-25T16:19:50.041615Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 1:0 128 -> 240 2025-09-25T16:19:50.041620Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:362: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-09-25T16:19:50.041641Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:599: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-09-25T16:19:50.041649Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:412: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-09-25T16:19:50.041981Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-09-25T16:19:50.041987Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme ... 628: TTxOperationReply execute, operationId: 111:2, at schemeshard: 72057594046678944, message: Source { RawX1: 584 RawX2: 12884904435 } Origin: 72075186233409547 State: 2 TxId: 111 Step: 0 Generation: 2 2025-09-25T16:19:51.230321Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:1052: NTableState::TProposedWaitParts operationId# 111:2 HandleReply TEvDataShard::TEvSchemaChanged at tablet: 72057594046678944 2025-09-25T16:19:51.230326Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:1056: NTableState::TProposedWaitParts operationId# 111:2 HandleReply TEvDataShard::TEvSchemaChanged at tablet: 72057594046678944 message: Source { RawX1: 584 RawX2: 12884904435 } Origin: 72075186233409547 State: 2 TxId: 111 Step: 0 Generation: 2 2025-09-25T16:19:51.230337Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:673: CollectSchemaChanged accept TEvDataShard::TEvSchemaChanged, operationId: 111:2, shardIdx: 72057594046678944:3, shard: 72075186233409547, left await: 0, txState.State: ProposedWaitParts, txState.ReadyForNotifications: 1, at schemeshard: 72057594046678944 2025-09-25T16:19:51.230342Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:710: all shard schema changes has been received, operationId: 111:2, at schemeshard: 72057594046678944 2025-09-25T16:19:51.230347Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:722: send schema changes ack message, operation: 111:2, datashard: 72075186233409547, at schemeshard: 72057594046678944 2025-09-25T16:19:51.230352Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 111:2 129 -> 240 2025-09-25T16:19:51.230413Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5901: Handle TEvSchemaChanged, tabletId: 72057594046678944, at schemeshard: 72057594046678944, message: Source { RawX1: 587 RawX2: 12884904437 } Origin: 72075186233409548 State: 2 TxId: 111 Step: 0 Generation: 2 2025-09-25T16:19:51.230417Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1837: TOperation FindRelatedPartByTabletId, TxId: 111, tablet: 72075186233409548, partId: 0 2025-09-25T16:19:51.230424Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:628: TTxOperationReply execute, operationId: 111:0, at schemeshard: 72057594046678944, message: Source { RawX1: 587 RawX2: 12884904437 } Origin: 72075186233409548 State: 2 TxId: 111 Step: 0 Generation: 2 2025-09-25T16:19:51.230428Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:1052: NTableState::TProposedWaitParts operationId# 111:0 HandleReply TEvDataShard::TEvSchemaChanged at tablet: 72057594046678944 2025-09-25T16:19:51.230433Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:1056: NTableState::TProposedWaitParts operationId# 111:0 HandleReply TEvDataShard::TEvSchemaChanged at tablet: 72057594046678944 message: Source { RawX1: 587 RawX2: 12884904437 } Origin: 72075186233409548 State: 2 TxId: 111 Step: 0 Generation: 2 2025-09-25T16:19:51.230437Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:673: CollectSchemaChanged accept TEvDataShard::TEvSchemaChanged, operationId: 111:0, shardIdx: 72057594046678944:2, shard: 72075186233409548, left await: 0, txState.State: ProposedWaitParts, txState.ReadyForNotifications: 1, at schemeshard: 72057594046678944 2025-09-25T16:19:51.230440Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:710: all shard schema changes has been received, operationId: 111:0, at schemeshard: 72057594046678944 2025-09-25T16:19:51.230442Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:722: send schema changes ack message, operation: 111:0, datashard: 72075186233409548, at schemeshard: 72057594046678944 2025-09-25T16:19:51.230446Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 111:0 129 -> 240 2025-09-25T16:19:51.230912Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 111 2025-09-25T16:19:51.231362Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 111 2025-09-25T16:19:51.231387Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 111:2, at schemeshard: 72057594046678944 2025-09-25T16:19:51.231405Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 111:0, at schemeshard: 72057594046678944 2025-09-25T16:19:51.231416Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 111 2025-09-25T16:19:51.231423Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 111 2025-09-25T16:19:51.231432Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 111:2, at schemeshard: 72057594046678944 2025-09-25T16:19:51.231475Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 111:2, at schemeshard: 72057594046678944 2025-09-25T16:19:51.231481Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:497: [72057594046678944] TDone opId# 111:2 ProgressState 2025-09-25T16:19:51.231489Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:926: Part operation is done id#111:2 progress is 2/3 2025-09-25T16:19:51.231491Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 111 ready parts: 2/3 2025-09-25T16:19:51.231495Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:926: Part operation is done id#111:2 progress is 2/3 2025-09-25T16:19:51.231497Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 111 ready parts: 2/3 2025-09-25T16:19:51.231500Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 111, ready parts: 2/3, is published: true 2025-09-25T16:19:51.231524Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 111:0, at schemeshard: 72057594046678944 2025-09-25T16:19:51.231549Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 111:0, at schemeshard: 72057594046678944 2025-09-25T16:19:51.231554Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:497: [72057594046678944] TDone opId# 111:0 ProgressState 2025-09-25T16:19:51.231561Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:926: Part operation is done id#111:0 progress is 3/3 2025-09-25T16:19:51.231565Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 111 ready parts: 3/3 2025-09-25T16:19:51.231570Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:926: Part operation is done id#111:0 progress is 3/3 2025-09-25T16:19:51.231573Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 111 ready parts: 3/3 2025-09-25T16:19:51.231577Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 111, ready parts: 3/3, is published: true 2025-09-25T16:19:51.231593Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1702: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [3:343:2321] message: TxId: 111 2025-09-25T16:19:51.231599Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 111 ready parts: 3/3 2025-09-25T16:19:51.231605Z node 3 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:993: Operation and all the parts is done, operation id: 111:0 2025-09-25T16:19:51.231610Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5552: RemoveTx for txid 111:0 2025-09-25T16:19:51.231630Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:610: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 4 2025-09-25T16:19:51.231636Z node 3 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:993: Operation and all the parts is done, operation id: 111:1 2025-09-25T16:19:51.231640Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5552: RemoveTx for txid 111:1 2025-09-25T16:19:51.231646Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:610: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 5] was 3 2025-09-25T16:19:51.231650Z node 3 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:993: Operation and all the parts is done, operation id: 111:2 2025-09-25T16:19:51.231653Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5552: RemoveTx for txid 111:2 2025-09-25T16:19:51.231660Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:610: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 6] was 3 2025-09-25T16:19:51.232071Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 111: got EvNotifyTxCompletionResult 2025-09-25T16:19:51.232080Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 111: satisfy waiter [3:642:2589] TestWaitNotification: OK eventTxId 111 TestModificationResults wait txId: 114 2025-09-25T16:19:51.232754Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpRestore Restore { TableName: "IndexedTable" S3Settings { Endpoint: "localhost" Scheme: HTTP } } } TxId: 114 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-09-25T16:19:51.232789Z node 3 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_backup_restore_common.h:586: TRestore Propose, path: /MyRoot/IndexedTable, opId: 114:0, at schemeshard: 72057594046678944 2025-09-25T16:19:51.232805Z node 3 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 114:1, propose status:StatusInvalidParameter, reason: Check failed: path: '/MyRoot/IndexedTable', error: path has indexes, request doesn't accept it, source_location: ydb/core/tx/schemeshard/schemeshard__operation_backup_restore_common.h:612, at schemeshard: 72057594046678944 2025-09-25T16:19:51.233244Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 114, response: Status: StatusInvalidParameter Reason: "Check failed: path: \'/MyRoot/IndexedTable\', error: path has indexes, request doesn\'t accept it, source_location: ydb/core/tx/schemeshard/schemeshard__operation_backup_restore_common.h:612" TxId: 114 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2025-09-25T16:19:51.233286Z node 3 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 114, database: /MyRoot, subject: , status: StatusInvalidParameter, reason: Check failed: path: '/MyRoot/IndexedTable', error: path has indexes, request doesn't accept it, source_location: ydb/core/tx/schemeshard/schemeshard__operation_backup_restore_common.h:612, operation: RESTORE TABLE, path: /MyRoot/IndexedTable TestModificationResult got TxId: 114, wait until txId: 114 >> TImportTests::ShouldSucceedOnIndexedTable3 [GOOD] >> TImportTests::ChangefeedsExportRestoreUnhappyPropose [GOOD] >> TImportTests::ShouldSucceedOnManyTables >> YdbOlapStore::DuplicateRows [GOOD] >> YdbOlapStore::LogCountByResource |82.3%| [TA] {RESULT} $(B)/ydb/core/http_proxy/ut/inside_ydb_ut/test-results/unittest/{meta.json ... results_accumulator.log} ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_restore/unittest >> TRestoreTests::ShouldSucceedOnMultipleFramesTinyBatch [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2144] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2144] Leader for TabletID 72057594046678944 is [1:130:2155] sender: [1:131:2058] recipient: [1:113:2144] 2025-09-25T16:19:50.363650Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7911: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-09-25T16:19:50.363668Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7939: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-09-25T16:19:50.363673Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7825: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-09-25T16:19:50.363676Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7841: OperationsProcessing config: using default configuration 2025-09-25T16:19:50.363681Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7847: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-09-25T16:19:50.363683Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7847: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-09-25T16:19:50.363690Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7971: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-09-25T16:19:50.363701Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-09-25T16:19:50.363791Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8042: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-09-25T16:19:50.363834Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2025-09-25T16:19:50.375369Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7729: Cannot subscribe to console configs 2025-09-25T16:19:50.375389Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded 2025-09-25T16:19:50.378664Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2025-09-25T16:19:50.378739Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2025-09-25T16:19:50.378773Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-09-25T16:19:50.380430Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2025-09-25T16:19:50.380505Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2025-09-25T16:19:50.380600Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1382: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-09-25T16:19:50.380637Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-09-25T16:19:50.381053Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:158: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-09-25T16:19:50.381091Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2025-09-25T16:19:50.381276Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-09-25T16:19:50.381283Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-09-25T16:19:50.381297Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-09-25T16:19:50.381302Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-09-25T16:19:50.381307Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:205: TTxServerlessStorageBilling.Complete 2025-09-25T16:19:50.381329Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7086: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-09-25T16:19:50.382733Z node 1 :HIVE INFO: tablet_helpers.cpp:1126: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:130:2155] sender: [1:245:2058] recipient: [1:15:2062] 2025-09-25T16:19:50.401263Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-09-25T16:19:50.401324Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-09-25T16:19:50.401365Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:599: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-09-25T16:19:50.401370Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5528: CreateTx for txid 1:0 type: TxAlterSubDomain target path: [OwnerId: 72057594046678944, LocalPathId: 1] source path: 2025-09-25T16:19:50.401446Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-09-25T16:19:50.401458Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2025-09-25T16:19:50.402110Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-09-25T16:19:50.402145Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-09-25T16:19:50.402189Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-09-25T16:19:50.402196Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:315: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-09-25T16:19:50.402200Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:369: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-09-25T16:19:50.402204Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 1:0 2 -> 3 2025-09-25T16:19:50.402567Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-09-25T16:19:50.402575Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-09-25T16:19:50.402582Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 1:0 3 -> 128 2025-09-25T16:19:50.402898Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-09-25T16:19:50.402905Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-09-25T16:19:50.402908Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-09-25T16:19:50.402913Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1723: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-09-25T16:19:50.403444Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1792: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-09-25T16:19:50.403784Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:663: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-09-25T16:19:50.403827Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1824: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-09-25T16:19:50.403994Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:685: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-09-25T16:19:50.404014Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:689: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 137 RawX2: 4294969455 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-09-25T16:19:50.404019Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:303: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-09-25T16:19:50.404085Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 1:0 128 -> 240 2025-09-25T16:19:50.404090Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:362: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-09-25T16:19:50.404110Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:599: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-09-25T16:19:50.404118Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:412: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-09-25T16:19:50.404445Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-09-25T16:19:50.404451Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme ... range# 54-54 REQUEST: GET /data_00.csv.zst HTTP/1.1 HEADERS: Host: localhost:21714 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: 8A52EBE2-A9D8-43E6-866B-BFC5E28B2737 amz-sdk-request: attempt=1 content-type: application/xml range: bytes=54-54 user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-143-generic x86_64 Clang/20.1.8 x-amz-api-version: 2006-03-01 S3_MOCK::HttpServeRead: /data_00.csv.zst / 60 2025-09-25T16:19:51.536930Z node 3 :DATASHARD_RESTORE DEBUG: import_s3.cpp:656: [Import] [s3:102] Handle NKikimr::NWrappers::NExternalStorage::TEvGetObjectResponse { Key: null Result: e0a029185b0e1ad2f41736bc4b274b81 Body: 1b } 2025-09-25T16:19:51.536935Z node 3 :DATASHARD_RESTORE TRACE: import_s3.cpp:673: [Import] [s3:102] Content size: processed-bytes# 37, content-length# 60, body-size# 1 2025-09-25T16:19:51.536939Z node 3 :DATASHARD_RESTORE DEBUG: import_s3.cpp:517: [Import] [s3:102] GetObject: key# /data_00.csv.zst, range# 55-55 REQUEST: GET /data_00.csv.zst HTTP/1.1 HEADERS: Host: localhost:21714 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: D08E7BBA-AB58-49CB-917A-65327244F1DF amz-sdk-request: attempt=1 content-type: application/xml range: bytes=55-55 user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-143-generic x86_64 Clang/20.1.8 x-amz-api-version: 2006-03-01 S3_MOCK::HttpServeRead: /data_00.csv.zst / 60 2025-09-25T16:19:51.537327Z node 3 :DATASHARD_RESTORE DEBUG: import_s3.cpp:656: [Import] [s3:102] Handle NKikimr::NWrappers::NExternalStorage::TEvGetObjectResponse { Key: null Result: e0a029185b0e1ad2f41736bc4b274b81 Body: 1b } 2025-09-25T16:19:51.537332Z node 3 :DATASHARD_RESTORE TRACE: import_s3.cpp:673: [Import] [s3:102] Content size: processed-bytes# 37, content-length# 60, body-size# 1 2025-09-25T16:19:51.537336Z node 3 :DATASHARD_RESTORE DEBUG: import_s3.cpp:517: [Import] [s3:102] GetObject: key# /data_00.csv.zst, range# 56-56 REQUEST: GET /data_00.csv.zst HTTP/1.1 HEADERS: Host: localhost:21714 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: A5D5556C-27DE-48CF-AA9D-E690C5852C4D amz-sdk-request: attempt=1 content-type: application/xml range: bytes=56-56 user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-143-generic x86_64 Clang/20.1.8 x-amz-api-version: 2006-03-01 S3_MOCK::HttpServeRead: /data_00.csv.zst / 60 2025-09-25T16:19:51.537641Z node 3 :DATASHARD_RESTORE DEBUG: import_s3.cpp:656: [Import] [s3:102] Handle NKikimr::NWrappers::NExternalStorage::TEvGetObjectResponse { Key: null Result: e0a029185b0e1ad2f41736bc4b274b81 Body: 1b } 2025-09-25T16:19:51.537652Z node 3 :DATASHARD_RESTORE TRACE: import_s3.cpp:673: [Import] [s3:102] Content size: processed-bytes# 37, content-length# 60, body-size# 1 2025-09-25T16:19:51.537660Z node 3 :DATASHARD_RESTORE DEBUG: import_s3.cpp:517: [Import] [s3:102] GetObject: key# /data_00.csv.zst, range# 57-57 REQUEST: GET /data_00.csv.zst HTTP/1.1 HEADERS: Host: localhost:21714 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: 7D1E7133-F6DB-4E9E-AC15-7425DC92A1C8 amz-sdk-request: attempt=1 content-type: application/xml range: bytes=57-57 user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-143-generic x86_64 Clang/20.1.8 x-amz-api-version: 2006-03-01 S3_MOCK::HttpServeRead: /data_00.csv.zst / 60 2025-09-25T16:19:51.538171Z node 3 :DATASHARD_RESTORE DEBUG: import_s3.cpp:656: [Import] [s3:102] Handle NKikimr::NWrappers::NExternalStorage::TEvGetObjectResponse { Key: null Result: e0a029185b0e1ad2f41736bc4b274b81 Body: 1b } 2025-09-25T16:19:51.538179Z node 3 :DATASHARD_RESTORE TRACE: import_s3.cpp:673: [Import] [s3:102] Content size: processed-bytes# 37, content-length# 60, body-size# 1 2025-09-25T16:19:51.538187Z node 3 :DATASHARD_RESTORE DEBUG: import_s3.cpp:517: [Import] [s3:102] GetObject: key# /data_00.csv.zst, range# 58-58 REQUEST: GET /data_00.csv.zst HTTP/1.1 HEADERS: Host: localhost:21714 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: 02DB0A58-67D6-42B1-95C3-A019039C6074 amz-sdk-request: attempt=1 content-type: application/xml range: bytes=58-58 user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-143-generic x86_64 Clang/20.1.8 x-amz-api-version: 2006-03-01 S3_MOCK::HttpServeRead: /data_00.csv.zst / 60 2025-09-25T16:19:51.538723Z node 3 :DATASHARD_RESTORE DEBUG: import_s3.cpp:656: [Import] [s3:102] Handle NKikimr::NWrappers::NExternalStorage::TEvGetObjectResponse { Key: null Result: e0a029185b0e1ad2f41736bc4b274b81 Body: 1b } 2025-09-25T16:19:51.538731Z node 3 :DATASHARD_RESTORE TRACE: import_s3.cpp:673: [Import] [s3:102] Content size: processed-bytes# 37, content-length# 60, body-size# 1 2025-09-25T16:19:51.538739Z node 3 :DATASHARD_RESTORE DEBUG: import_s3.cpp:517: [Import] [s3:102] GetObject: key# /data_00.csv.zst, range# 59-59 REQUEST: GET /data_00.csv.zst HTTP/1.1 HEADERS: Host: localhost:21714 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: BD21376C-580B-4A03-ADBF-360F2AED197A amz-sdk-request: attempt=1 content-type: application/xml range: bytes=59-59 user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-143-generic x86_64 Clang/20.1.8 x-amz-api-version: 2006-03-01 S3_MOCK::HttpServeRead: /data_00.csv.zst / 60 2025-09-25T16:19:51.539141Z node 3 :DATASHARD_RESTORE DEBUG: import_s3.cpp:656: [Import] [s3:102] Handle NKikimr::NWrappers::NExternalStorage::TEvGetObjectResponse { Key: null Result: e0a029185b0e1ad2f41736bc4b274b81 Body: 1b } 2025-09-25T16:19:51.539149Z node 3 :DATASHARD_RESTORE TRACE: import_s3.cpp:673: [Import] [s3:102] Content size: processed-bytes# 37, content-length# 60, body-size# 1 2025-09-25T16:19:51.539181Z node 3 :DATASHARD_RESTORE INFO: import_s3.cpp:806: [Import] [s3:102] Upload rows: count# 1, size# 34 2025-09-25T16:19:51.539872Z node 3 :DATASHARD_RESTORE DEBUG: import_s3.cpp:814: [Import] [s3:102] Handle NKikimr::TEvDataShard::TEvS3UploadRowsResponse { Record: TabletID: 72075186233409546 Status: 0 Info: { DataETag: e0a029185b0e1ad2f41736bc4b274b81 ProcessedBytes: 60 WrittenBytes: 24 WrittenRows: 3 ChecksumState: DownloadState: } } 2025-09-25T16:19:51.539887Z node 3 :DATASHARD_RESTORE NOTICE: import_s3.cpp:621: [Import] [s3:102] Process download info at 'UploadResponse': info# { DataETag: e0a029185b0e1ad2f41736bc4b274b81 ProcessedBytes: 60 WrittenBytes: 24 WrittenRows: 3 ChecksumState: DownloadState: } 2025-09-25T16:19:51.539897Z node 3 :DATASHARD_RESTORE NOTICE: import_s3.cpp:962: [Import] [s3:102] Finish: success# 1, error# , writtenBytes# 24, writtenRows# 3 2025-09-25T16:19:51.552297Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5901: Handle TEvSchemaChanged, tabletId: 72057594046678944, at schemeshard: 72057594046678944, message: Source { RawX1: 313 RawX2: 12884904187 } Origin: 72075186233409546 State: 2 TxId: 102 Step: 0 Generation: 2 OpResult { Success: true Explain: "" BytesProcessed: 24 RowsProcessed: 3 } 2025-09-25T16:19:51.552319Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1837: TOperation FindRelatedPartByTabletId, TxId: 102, tablet: 72075186233409546, partId: 0 2025-09-25T16:19:51.552342Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:628: TTxOperationReply execute, operationId: 102:0, at schemeshard: 72057594046678944, message: Source { RawX1: 313 RawX2: 12884904187 } Origin: 72075186233409546 State: 2 TxId: 102 Step: 0 Generation: 2 OpResult { Success: true Explain: "" BytesProcessed: 24 RowsProcessed: 3 } 2025-09-25T16:19:51.552354Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_backup_restore_common.h:233: TRestore TProposedWaitParts, opId: 102:0 HandleReply TEvSchemaChanged at tablet# 72057594046678944 message# Source { RawX1: 313 RawX2: 12884904187 } Origin: 72075186233409546 State: 2 TxId: 102 Step: 0 Generation: 2 OpResult { Success: true Explain: "" BytesProcessed: 24 RowsProcessed: 3 } 2025-09-25T16:19:51.552367Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:673: CollectSchemaChanged accept TEvDataShard::TEvSchemaChanged, operationId: 102:0, shardIdx: 72057594046678944:1, shard: 72075186233409546, left await: 0, txState.State: ProposedWaitParts, txState.ReadyForNotifications: 1, at schemeshard: 72057594046678944 2025-09-25T16:19:51.552370Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:710: all shard schema changes has been received, operationId: 102:0, at schemeshard: 72057594046678944 2025-09-25T16:19:51.552374Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:722: send schema changes ack message, operation: 102:0, datashard: 72075186233409546, at schemeshard: 72057594046678944 2025-09-25T16:19:51.552380Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 102:0 129 -> 240 2025-09-25T16:19:51.552419Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_backup_restore_common.h:116: Unable to make a bill: kind# TRestore, opId# 102:0, reason# domain is not a serverless db, domain# /MyRoot, domainPathId# [OwnerId: 72057594046678944, LocalPathId: 1], IsDomainSchemeShard: 1, ParentDomainId: [OwnerId: 72057594046678944, LocalPathId: 1], ResourcesDomainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-09-25T16:19:51.552815Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 102:0, at schemeshard: 72057594046678944 2025-09-25T16:19:51.552874Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 102:0, at schemeshard: 72057594046678944 2025-09-25T16:19:51.552883Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:497: [72057594046678944] TDone opId# 102:0 ProgressState 2025-09-25T16:19:51.552896Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:926: Part operation is done id#102:0 progress is 1/1 2025-09-25T16:19:51.552900Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2025-09-25T16:19:51.552903Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:926: Part operation is done id#102:0 progress is 1/1 2025-09-25T16:19:51.552906Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2025-09-25T16:19:51.552909Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 102, ready parts: 1/1, is published: true 2025-09-25T16:19:51.552919Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1702: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [3:339:2317] message: TxId: 102 2025-09-25T16:19:51.552923Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2025-09-25T16:19:51.552927Z node 3 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:993: Operation and all the parts is done, operation id: 102:0 2025-09-25T16:19:51.552931Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5552: RemoveTx for txid 102:0 2025-09-25T16:19:51.552956Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:610: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 3 2025-09-25T16:19:51.553402Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 102: got EvNotifyTxCompletionResult 2025-09-25T16:19:51.553418Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 102: satisfy waiter [3:399:2369] TestWaitNotification: OK eventTxId 102 |82.3%| [TA] {RESULT} $(B)/ydb/core/kqp/ut/rbo/test-results/unittest/{meta.json ... results_accumulator.log} >> TImportTests::ShouldSucceedOnManyTables [GOOD] >> TRestoreTests::ShouldSucceedOnMultiShardTable[Zstd] >> DSProxyStrategyTest::Restore_mirror3dc [GOOD] >> TRestoreTests::ShouldSucceedOnMultiShardTable[Zstd] [GOOD] >> TRestoreTests::ShouldSucceedOnMultipleFramesStandardBatch >> TRestoreWithRebootsTests::ShouldSucceedOnSingleShardTable[Raw] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_restore/unittest >> TImportTests::ShouldSucceedOnManyTables [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2144] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2144] Leader for TabletID 72057594046678944 is [1:130:2155] sender: [1:131:2058] recipient: [1:113:2144] 2025-09-25T16:19:50.767046Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7911: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-09-25T16:19:50.767064Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7939: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-09-25T16:19:50.767068Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7825: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-09-25T16:19:50.767072Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7841: OperationsProcessing config: using default configuration 2025-09-25T16:19:50.767076Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7847: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-09-25T16:19:50.767079Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7847: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-09-25T16:19:50.767086Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7971: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-09-25T16:19:50.767096Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-09-25T16:19:50.767187Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8042: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-09-25T16:19:50.767230Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2025-09-25T16:19:50.779721Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7729: Cannot subscribe to console configs 2025-09-25T16:19:50.779745Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded 2025-09-25T16:19:50.784588Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2025-09-25T16:19:50.784664Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2025-09-25T16:19:50.784700Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-09-25T16:19:50.786437Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2025-09-25T16:19:50.786523Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2025-09-25T16:19:50.786669Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1382: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-09-25T16:19:50.786721Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-09-25T16:19:50.787180Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:158: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-09-25T16:19:50.787226Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2025-09-25T16:19:50.787487Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-09-25T16:19:50.787496Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-09-25T16:19:50.787513Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-09-25T16:19:50.787520Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-09-25T16:19:50.787527Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:205: TTxServerlessStorageBilling.Complete 2025-09-25T16:19:50.787556Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7086: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-09-25T16:19:50.789031Z node 1 :HIVE INFO: tablet_helpers.cpp:1126: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:130:2155] sender: [1:245:2058] recipient: [1:15:2062] 2025-09-25T16:19:50.815097Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-09-25T16:19:50.815165Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-09-25T16:19:50.815220Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:599: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-09-25T16:19:50.815228Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5528: CreateTx for txid 1:0 type: TxAlterSubDomain target path: [OwnerId: 72057594046678944, LocalPathId: 1] source path: 2025-09-25T16:19:50.815319Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-09-25T16:19:50.815337Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2025-09-25T16:19:50.816096Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-09-25T16:19:50.816145Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-09-25T16:19:50.816196Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-09-25T16:19:50.816204Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:315: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-09-25T16:19:50.816207Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:369: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-09-25T16:19:50.816211Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 1:0 2 -> 3 2025-09-25T16:19:50.816653Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-09-25T16:19:50.816663Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-09-25T16:19:50.816667Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 1:0 3 -> 128 2025-09-25T16:19:50.816950Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-09-25T16:19:50.816958Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-09-25T16:19:50.816962Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-09-25T16:19:50.816967Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1723: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-09-25T16:19:50.817442Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1792: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-09-25T16:19:50.817846Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:663: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-09-25T16:19:50.817907Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1824: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-09-25T16:19:50.818123Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:685: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-09-25T16:19:50.818149Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:689: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 137 RawX2: 4294969455 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-09-25T16:19:50.818159Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:303: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-09-25T16:19:50.818246Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 1:0 128 -> 240 2025-09-25T16:19:50.818254Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:362: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-09-25T16:19:50.818282Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:599: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-09-25T16:19:50.818293Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:412: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-09-25T16:19:50.818739Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-09-25T16:19:50.818748Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme ... 10760] Finish: success# 1, error# , writtenBytes# 8, writtenRows# 1 2025-09-25T16:19:52.410302Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5901: Handle TEvSchemaChanged, tabletId: 72057594046678944, at schemeshard: 72057594046678944, message: Source { RawX1: 348 RawX2: 12884904208 } Origin: 72075186233409546 State: 2 TxId: 281474976710759 Step: 0 Generation: 2 OpResult { Success: true Explain: "" BytesProcessed: 8 RowsProcessed: 1 } 2025-09-25T16:19:52.410328Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1837: TOperation FindRelatedPartByTabletId, TxId: 281474976710759, tablet: 72075186233409546, partId: 0 2025-09-25T16:19:52.410356Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:628: TTxOperationReply execute, operationId: 281474976710759:0, at schemeshard: 72057594046678944, message: Source { RawX1: 348 RawX2: 12884904208 } Origin: 72075186233409546 State: 2 TxId: 281474976710759 Step: 0 Generation: 2 OpResult { Success: true Explain: "" BytesProcessed: 8 RowsProcessed: 1 } 2025-09-25T16:19:52.410371Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_backup_restore_common.h:233: TRestore TProposedWaitParts, opId: 281474976710759:0 HandleReply TEvSchemaChanged at tablet# 72057594046678944 message# Source { RawX1: 348 RawX2: 12884904208 } Origin: 72075186233409546 State: 2 TxId: 281474976710759 Step: 0 Generation: 2 OpResult { Success: true Explain: "" BytesProcessed: 8 RowsProcessed: 1 } 2025-09-25T16:19:52.410390Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:673: CollectSchemaChanged accept TEvDataShard::TEvSchemaChanged, operationId: 281474976710759:0, shardIdx: 72057594046678944:1, shard: 72075186233409546, left await: 0, txState.State: ProposedWaitParts, txState.ReadyForNotifications: 1, at schemeshard: 72057594046678944 2025-09-25T16:19:52.410396Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:710: all shard schema changes has been received, operationId: 281474976710759:0, at schemeshard: 72057594046678944 2025-09-25T16:19:52.410402Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:722: send schema changes ack message, operation: 281474976710759:0, datashard: 72075186233409546, at schemeshard: 72057594046678944 2025-09-25T16:19:52.410409Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 281474976710759:0 129 -> 240 2025-09-25T16:19:52.410454Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_backup_restore_common.h:116: Unable to make a bill: kind# TRestore, opId# 281474976710759:0, reason# domain is not a serverless db, domain# /MyRoot, domainPathId# [OwnerId: 72057594046678944, LocalPathId: 1], IsDomainSchemeShard: 1, ParentDomainId: [OwnerId: 72057594046678944, LocalPathId: 1], ResourcesDomainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-09-25T16:19:52.411149Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 281474976710759:0, at schemeshard: 72057594046678944 2025-09-25T16:19:52.411193Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 281474976710759:0, at schemeshard: 72057594046678944 2025-09-25T16:19:52.411201Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:497: [72057594046678944] TDone opId# 281474976710759:0 ProgressState 2025-09-25T16:19:52.411214Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:926: Part operation is done id#281474976710759:0 progress is 1/1 2025-09-25T16:19:52.411218Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 281474976710759 ready parts: 1/1 2025-09-25T16:19:52.411221Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:926: Part operation is done id#281474976710759:0 progress is 1/1 2025-09-25T16:19:52.411224Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 281474976710759 ready parts: 1/1 2025-09-25T16:19:52.411228Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 281474976710759, ready parts: 1/1, is published: true 2025-09-25T16:19:52.411244Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1702: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [3:127:2152] message: TxId: 281474976710759 2025-09-25T16:19:52.411249Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 281474976710759 ready parts: 1/1 2025-09-25T16:19:52.411254Z node 3 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:993: Operation and all the parts is done, operation id: 281474976710759:0 2025-09-25T16:19:52.411257Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5552: RemoveTx for txid 281474976710759:0 2025-09-25T16:19:52.411284Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:610: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 3 2025-09-25T16:19:52.411806Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7171: Handle: TEvNotifyTxCompletionResult: txId# 281474976710759 2025-09-25T16:19:52.411820Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:7173: Message: TxId: 281474976710759 2025-09-25T16:19:52.411832Z node 3 :IMPORT DEBUG: schemeshard_import__create.cpp:372: TImport::TTxProgress: DoExecute 2025-09-25T16:19:52.411839Z node 3 :IMPORT DEBUG: schemeshard_import__create.cpp:1425: TImport::TTxProgress: OnNotifyResult: txId# 281474976710759 2025-09-25T16:19:52.412291Z node 3 :IMPORT DEBUG: schemeshard_import__create.cpp:396: TImport::TTxProgress: DoComplete 2025-09-25T16:19:52.423841Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5901: Handle TEvSchemaChanged, tabletId: 72057594046678944, at schemeshard: 72057594046678944, message: Source { RawX1: 367 RawX2: 12884904222 } Origin: 72075186233409547 State: 2 TxId: 281474976710760 Step: 0 Generation: 2 OpResult { Success: true Explain: "" BytesProcessed: 8 RowsProcessed: 1 } 2025-09-25T16:19:52.423863Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1837: TOperation FindRelatedPartByTabletId, TxId: 281474976710760, tablet: 72075186233409547, partId: 0 2025-09-25T16:19:52.423902Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:628: TTxOperationReply execute, operationId: 281474976710760:0, at schemeshard: 72057594046678944, message: Source { RawX1: 367 RawX2: 12884904222 } Origin: 72075186233409547 State: 2 TxId: 281474976710760 Step: 0 Generation: 2 OpResult { Success: true Explain: "" BytesProcessed: 8 RowsProcessed: 1 } 2025-09-25T16:19:52.423915Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_backup_restore_common.h:233: TRestore TProposedWaitParts, opId: 281474976710760:0 HandleReply TEvSchemaChanged at tablet# 72057594046678944 message# Source { RawX1: 367 RawX2: 12884904222 } Origin: 72075186233409547 State: 2 TxId: 281474976710760 Step: 0 Generation: 2 OpResult { Success: true Explain: "" BytesProcessed: 8 RowsProcessed: 1 } 2025-09-25T16:19:52.423927Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:673: CollectSchemaChanged accept TEvDataShard::TEvSchemaChanged, operationId: 281474976710760:0, shardIdx: 72057594046678944:2, shard: 72075186233409547, left await: 0, txState.State: ProposedWaitParts, txState.ReadyForNotifications: 1, at schemeshard: 72057594046678944 2025-09-25T16:19:52.423945Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:710: all shard schema changes has been received, operationId: 281474976710760:0, at schemeshard: 72057594046678944 2025-09-25T16:19:52.423949Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:722: send schema changes ack message, operation: 281474976710760:0, datashard: 72075186233409547, at schemeshard: 72057594046678944 2025-09-25T16:19:52.423954Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 281474976710760:0 129 -> 240 2025-09-25T16:19:52.424004Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_backup_restore_common.h:116: Unable to make a bill: kind# TRestore, opId# 281474976710760:0, reason# domain is not a serverless db, domain# /MyRoot, domainPathId# [OwnerId: 72057594046678944, LocalPathId: 1], IsDomainSchemeShard: 1, ParentDomainId: [OwnerId: 72057594046678944, LocalPathId: 1], ResourcesDomainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-09-25T16:19:52.424464Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 281474976710760:0, at schemeshard: 72057594046678944 2025-09-25T16:19:52.424509Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 281474976710760:0, at schemeshard: 72057594046678944 2025-09-25T16:19:52.424517Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:497: [72057594046678944] TDone opId# 281474976710760:0 ProgressState 2025-09-25T16:19:52.424531Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:926: Part operation is done id#281474976710760:0 progress is 1/1 2025-09-25T16:19:52.424534Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 281474976710760 ready parts: 1/1 2025-09-25T16:19:52.424538Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:926: Part operation is done id#281474976710760:0 progress is 1/1 2025-09-25T16:19:52.424540Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 281474976710760 ready parts: 1/1 2025-09-25T16:19:52.424543Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 281474976710760, ready parts: 1/1, is published: true 2025-09-25T16:19:52.424555Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1702: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [3:127:2152] message: TxId: 281474976710760 2025-09-25T16:19:52.424559Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 281474976710760 ready parts: 1/1 2025-09-25T16:19:52.424563Z node 3 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:993: Operation and all the parts is done, operation id: 281474976710760:0 2025-09-25T16:19:52.424566Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5552: RemoveTx for txid 281474976710760:0 2025-09-25T16:19:52.424590Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:610: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 3 2025-09-25T16:19:52.425089Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7171: Handle: TEvNotifyTxCompletionResult: txId# 281474976710760 2025-09-25T16:19:52.425108Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:7173: Message: TxId: 281474976710760 2025-09-25T16:19:52.425120Z node 3 :IMPORT DEBUG: schemeshard_import__create.cpp:372: TImport::TTxProgress: DoExecute 2025-09-25T16:19:52.425125Z node 3 :IMPORT DEBUG: schemeshard_import__create.cpp:1425: TImport::TTxProgress: OnNotifyResult: txId# 281474976710760 2025-09-25T16:19:52.425490Z node 3 :IMPORT DEBUG: schemeshard_import__create.cpp:396: TImport::TTxProgress: DoComplete 2025-09-25T16:19:52.425506Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 101: got EvNotifyTxCompletionResult 2025-09-25T16:19:52.425511Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 101: satisfy waiter [3:292:2278] TestWaitNotification: OK eventTxId 101 ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_restore/unittest >> TImportTests::ChangefeedsExportRestoreUnhappyPropose [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2144] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2144] Leader for TabletID 72057594046678944 is [1:130:2155] sender: [1:131:2058] recipient: [1:113:2144] 2025-09-25T16:19:49.930832Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7911: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-09-25T16:19:49.930851Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7939: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-09-25T16:19:49.930856Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7825: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-09-25T16:19:49.930860Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7841: OperationsProcessing config: using default configuration 2025-09-25T16:19:49.930864Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7847: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-09-25T16:19:49.930866Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7847: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-09-25T16:19:49.930873Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7971: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-09-25T16:19:49.930884Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-09-25T16:19:49.930976Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8042: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-09-25T16:19:49.931018Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2025-09-25T16:19:49.941881Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7729: Cannot subscribe to console configs 2025-09-25T16:19:49.941899Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded 2025-09-25T16:19:49.944944Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2025-09-25T16:19:49.945015Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2025-09-25T16:19:49.945047Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-09-25T16:19:49.946975Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2025-09-25T16:19:49.947035Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2025-09-25T16:19:49.947139Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1382: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-09-25T16:19:49.947198Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-09-25T16:19:49.947759Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:158: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-09-25T16:19:49.947806Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2025-09-25T16:19:49.948077Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-09-25T16:19:49.948090Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-09-25T16:19:49.948111Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-09-25T16:19:49.948119Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-09-25T16:19:49.948127Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:205: TTxServerlessStorageBilling.Complete 2025-09-25T16:19:49.948161Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7086: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-09-25T16:19:49.949712Z node 1 :HIVE INFO: tablet_helpers.cpp:1126: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:130:2155] sender: [1:245:2058] recipient: [1:15:2062] 2025-09-25T16:19:49.973760Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-09-25T16:19:49.973822Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-09-25T16:19:49.973876Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:599: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-09-25T16:19:49.973885Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5528: CreateTx for txid 1:0 type: TxAlterSubDomain target path: [OwnerId: 72057594046678944, LocalPathId: 1] source path: 2025-09-25T16:19:49.973963Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-09-25T16:19:49.973980Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2025-09-25T16:19:49.974717Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-09-25T16:19:49.974763Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-09-25T16:19:49.974822Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-09-25T16:19:49.974834Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:315: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-09-25T16:19:49.974839Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:369: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-09-25T16:19:49.974845Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 1:0 2 -> 3 2025-09-25T16:19:49.975309Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-09-25T16:19:49.975324Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-09-25T16:19:49.975330Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 1:0 3 -> 128 2025-09-25T16:19:49.975710Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-09-25T16:19:49.975721Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-09-25T16:19:49.975728Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-09-25T16:19:49.975734Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1723: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-09-25T16:19:49.976460Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1792: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-09-25T16:19:49.976922Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:663: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-09-25T16:19:49.976979Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1824: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-09-25T16:19:49.977189Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:685: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-09-25T16:19:49.977217Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:689: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 137 RawX2: 4294969455 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-09-25T16:19:49.977226Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:303: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-09-25T16:19:49.977315Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 1:0 128 -> 240 2025-09-25T16:19:49.977324Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:362: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-09-25T16:19:49.977352Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:599: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-09-25T16:19:49.977365Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:412: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-09-25T16:19:49.977834Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-09-25T16:19:49.977844Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme ... rt] [s3:281474976710763] Handle NKikimr::NWrappers::NExternalStorage::TEvHeadObjectResponse { Key: null Result: HeadObjectResult { ETag: d41d8cd98f00b204e9800998ecf8427e ContentLength: 0 } } 2025-09-25T16:19:51.886076Z node 3 :DATASHARD_RESTORE DEBUG: import_s3.cpp:506: [Import] [s3:281474976710763] HeadObject: key# /data_00.csv.sha256 REQUEST: HEAD /data_00.csv.sha256 HTTP/1.1 HEADERS: Host: localhost:1829 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: 4D5FE15A-EA5B-4762-B097-F4F541C019F8 amz-sdk-request: attempt=1 content-type: application/xml user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-143-generic x86_64 Clang/20.1.8 x-amz-api-version: 2006-03-01 S3_MOCK::HttpServeRead: /data_00.csv.sha256 / 76 2025-09-25T16:19:51.886581Z node 3 :DATASHARD_RESTORE DEBUG: import_s3.cpp:681: [Import] [s3:281474976710763] HandleChecksum NKikimr::NWrappers::NExternalStorage::TEvHeadObjectResponse { Key: null Result: HeadObjectResult { ETag: b1e4e03f030176e093c2773f30899223 ContentLength: 76 } } 2025-09-25T16:19:51.886592Z node 3 :DATASHARD_RESTORE DEBUG: import_s3.cpp:517: [Import] [s3:281474976710763] GetObject: key# /data_00.csv.sha256, range# 0-75 2025-09-25T16:19:51.886810Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 281474976710763 REQUEST: GET /data_00.csv.sha256 HTTP/1.1 HEADERS: Host: localhost:1829 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: 31D4410F-2AA4-4F7B-9634-83E159D57A5B amz-sdk-request: attempt=1 content-type: application/xml range: bytes=0-75 user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-143-generic x86_64 Clang/20.1.8 x-amz-api-version: 2006-03-01 S3_MOCK::HttpServeRead: /data_00.csv.sha256 / 76 2025-09-25T16:19:51.887154Z node 3 :DATASHARD_RESTORE DEBUG: import_s3.cpp:695: [Import] [s3:281474976710763] HandleChecksum NKikimr::NWrappers::NExternalStorage::TEvGetObjectResponse { Key: null Result: b1e4e03f030176e093c2773f30899223 Body: 76b } 2025-09-25T16:19:51.887752Z node 3 :DATASHARD_RESTORE DEBUG: import_s3.cpp:606: [Import] [s3:281474976710763] Handle NKikimr::TEvDataShard::TEvS3DownloadInfo { Info: { DataETag: (empty maybe) ProcessedBytes: 0 WrittenBytes: 0 WrittenRows: 0 ChecksumState: DownloadState: } } FAKE_COORDINATOR: Erasing txId 281474976710763 2025-09-25T16:19:51.908965Z node 3 :DATASHARD_RESTORE DEBUG: import_s3.cpp:606: [Import] [s3:281474976710763] Handle NKikimr::TEvDataShard::TEvS3DownloadInfo { Info: { DataETag: d41d8cd98f00b204e9800998ecf8427e ProcessedBytes: 0 WrittenBytes: 0 WrittenRows: 0 ChecksumState: Sha256State { H: 1779033703 H: 3144134277 H: 1013904242 H: 2773480762 H: 1359893119 H: 2600822924 H: 528734635 H: 1541459225 Nh: 0 Nl: 0 Data: 0 Data: 0 Data: 0 Data: 0 Data: 0 Data: 0 Data: 0 Data: 0 Data: 0 Data: 0 Data: 0 Data: 0 Data: 0 Data: 0 Data: 0 Data: 0 Num: 0 MdLen: 32 } DownloadState: } } 2025-09-25T16:19:51.908999Z node 3 :DATASHARD_RESTORE NOTICE: import_s3.cpp:621: [Import] [s3:281474976710763] Process download info at 'DownloadInfo': info# { DataETag: d41d8cd98f00b204e9800998ecf8427e ProcessedBytes: 0 WrittenBytes: 0 WrittenRows: 0 ChecksumState: Sha256State { H: 1779033703 H: 3144134277 H: 1013904242 H: 2773480762 H: 1359893119 H: 2600822924 H: 528734635 H: 1541459225 Nh: 0 Nl: 0 Data: 0 Data: 0 Data: 0 Data: 0 Data: 0 Data: 0 Data: 0 Data: 0 Data: 0 Data: 0 Data: 0 Data: 0 Data: 0 Data: 0 Data: 0 Data: 0 Num: 0 MdLen: 32 } DownloadState: } 2025-09-25T16:19:51.909038Z node 3 :DATASHARD_RESTORE NOTICE: import_s3.cpp:962: [Import] [s3:281474976710763] Finish: success# 1, error# , writtenBytes# 0, writtenRows# 0 2025-09-25T16:19:51.921973Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5901: Handle TEvSchemaChanged, tabletId: 72057594046678944, at schemeshard: 72057594046678944, message: Source { RawX1: 1513 RawX2: 12884905174 } Origin: 72075186233409554 State: 2 TxId: 281474976710763 Step: 0 Generation: 2 OpResult { Success: true Explain: "" BytesProcessed: 0 RowsProcessed: 0 } 2025-09-25T16:19:51.921995Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1837: TOperation FindRelatedPartByTabletId, TxId: 281474976710763, tablet: 72075186233409554, partId: 0 2025-09-25T16:19:51.922019Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:628: TTxOperationReply execute, operationId: 281474976710763:0, at schemeshard: 72057594046678944, message: Source { RawX1: 1513 RawX2: 12884905174 } Origin: 72075186233409554 State: 2 TxId: 281474976710763 Step: 0 Generation: 2 OpResult { Success: true Explain: "" BytesProcessed: 0 RowsProcessed: 0 } 2025-09-25T16:19:51.922030Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_backup_restore_common.h:233: TRestore TProposedWaitParts, opId: 281474976710763:0 HandleReply TEvSchemaChanged at tablet# 72057594046678944 message# Source { RawX1: 1513 RawX2: 12884905174 } Origin: 72075186233409554 State: 2 TxId: 281474976710763 Step: 0 Generation: 2 OpResult { Success: true Explain: "" BytesProcessed: 0 RowsProcessed: 0 } 2025-09-25T16:19:51.922046Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:673: CollectSchemaChanged accept TEvDataShard::TEvSchemaChanged, operationId: 281474976710763:0, shardIdx: 72057594046678944:9, shard: 72075186233409554, left await: 0, txState.State: ProposedWaitParts, txState.ReadyForNotifications: 1, at schemeshard: 72057594046678944 2025-09-25T16:19:51.922051Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:710: all shard schema changes has been received, operationId: 281474976710763:0, at schemeshard: 72057594046678944 2025-09-25T16:19:51.922057Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:722: send schema changes ack message, operation: 281474976710763:0, datashard: 72075186233409554, at schemeshard: 72057594046678944 2025-09-25T16:19:51.922065Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 281474976710763:0 129 -> 240 2025-09-25T16:19:51.922144Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_backup_restore_common.h:116: Unable to make a bill: kind# TRestore, opId# 281474976710763:0, reason# domain is not a serverless db, domain# /MyRoot, domainPathId# [OwnerId: 72057594046678944, LocalPathId: 1], IsDomainSchemeShard: 1, ParentDomainId: [OwnerId: 72057594046678944, LocalPathId: 1], ResourcesDomainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-09-25T16:19:51.922834Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 281474976710763:0, at schemeshard: 72057594046678944 2025-09-25T16:19:51.922930Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 281474976710763:0, at schemeshard: 72057594046678944 2025-09-25T16:19:51.922941Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:497: [72057594046678944] TDone opId# 281474976710763:0 ProgressState 2025-09-25T16:19:51.922962Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:926: Part operation is done id#281474976710763:0 progress is 1/1 2025-09-25T16:19:51.922967Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 281474976710763 ready parts: 1/1 2025-09-25T16:19:51.922973Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:926: Part operation is done id#281474976710763:0 progress is 1/1 2025-09-25T16:19:51.922977Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 281474976710763 ready parts: 1/1 2025-09-25T16:19:51.922983Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 281474976710763, ready parts: 1/1, is published: true 2025-09-25T16:19:51.923003Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1702: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [3:127:2152] message: TxId: 281474976710763 2025-09-25T16:19:51.923011Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 281474976710763 ready parts: 1/1 2025-09-25T16:19:51.923017Z node 3 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:993: Operation and all the parts is done, operation id: 281474976710763:0 2025-09-25T16:19:51.923022Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5552: RemoveTx for txid 281474976710763:0 2025-09-25T16:19:51.923052Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:610: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 11] was 3 2025-09-25T16:19:51.923577Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7171: Handle: TEvNotifyTxCompletionResult: txId# 281474976710763 2025-09-25T16:19:51.923593Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:7173: Message: TxId: 281474976710763 2025-09-25T16:19:51.923607Z node 3 :IMPORT DEBUG: schemeshard_import__create.cpp:372: TImport::TTxProgress: DoExecute 2025-09-25T16:19:51.923613Z node 3 :IMPORT DEBUG: schemeshard_import__create.cpp:1425: TImport::TTxProgress: OnNotifyResult: txId# 281474976710763 2025-09-25T16:19:51.923648Z node 3 :IMPORT INFO: schemeshard_import__create.cpp:640: TImport::TTxProgress: Allocate txId: info# { Id: 106 Uid: '' Kind: S3 DomainPathId: [OwnerId: 72057594046678944, LocalPathId: 1] UserSID: '(empty maybe)' State: Waiting Issue: '' Items: 1 }, item# { Idx: 0 DstPathName: '/MyRoot/Restored' DstPathId: [OwnerId: 72057594046678944, LocalPathId: 11] State: CreateChangefeed SubState: AllocateTxId WaitTxId: 0 SrcPath: SrcPrefix: Issue: '' } 2025-09-25T16:19:51.924014Z node 3 :IMPORT DEBUG: schemeshard_import__create.cpp:396: TImport::TTxProgress: DoComplete 2025-09-25T16:19:51.924042Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7086: Handle: TEvAllocateResult: Cookie# 106, at schemeshard: 72057594046678944 2025-09-25T16:19:51.924050Z node 3 :IMPORT DEBUG: schemeshard_import__create.cpp:372: TImport::TTxProgress: DoExecute 2025-09-25T16:19:51.924056Z node 3 :IMPORT DEBUG: schemeshard_import__create.cpp:1171: TImport::TTxProgress: OnAllocateResult: txId# 281474976710764, id# 106 2025-09-25T16:19:51.924066Z node 3 :IMPORT INFO: schemeshard_import__create.cpp:604: TImport::TTxProgress: CreateChangefeed propose: info# { Id: 106 Uid: '' Kind: S3 DomainPathId: [OwnerId: 72057594046678944, LocalPathId: 1] UserSID: '(empty maybe)' State: Waiting Issue: '' Items: 1 }, item# { Idx: 0 DstPathName: '/MyRoot/Restored' DstPathId: [OwnerId: 72057594046678944, LocalPathId: 11] State: CreateChangefeed SubState: Proposed WaitTxId: 0 SrcPath: SrcPrefix: Issue: '' }, txId# 281474976710764 2025-09-25T16:19:51.924107Z node 3 :IMPORT NOTICE: schemeshard_import__create.cpp:764: TImport::TTxProgress: creation changefeed failed, cancelling, info# { Id: 106 Uid: '' Kind: S3 DomainPathId: [OwnerId: 72057594046678944, LocalPathId: 1] UserSID: '(empty maybe)' State: Waiting Issue: '' Items: 1 }, item# { Idx: 0 DstPathName: '/MyRoot/Restored' DstPathId: [OwnerId: 72057594046678944, LocalPathId: 11] State: CreateChangefeed SubState: Proposed WaitTxId: 0 SrcPath: SrcPrefix: Issue: 'Invalid changefeed format' } 2025-09-25T16:19:51.924529Z node 3 :IMPORT DEBUG: schemeshard_import__create.cpp:396: TImport::TTxProgress: DoComplete 2025-09-25T16:19:51.924563Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 106: got EvNotifyTxCompletionResult 2025-09-25T16:19:51.924572Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 106: satisfy waiter [3:1448:3236] TestWaitNotification: OK eventTxId 106 ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/dsproxy/ut_strategy/unittest >> DSProxyStrategyTest::Restore_mirror3dc [GOOD] Test command err: diskMask# 190 nonWorkingDomain# 0 56460 diskMask# 190 nonWorkingDomain# 1 4320 diskMask# 191 nonWorkingDomain# 0 20640 diskMask# 191 nonWorkingDomain# 1 48192 diskMask# 192 nonWorkingDomain# 0 496800 diskMask# 192 nonWorkingDomain# 1 161280 diskMask# 193 nonWorkingDomain# 0 194400 diskMask# 193 nonWorkingDomain# 1 23040 diskMask# 194 nonWorkingDomain# 0 194400 diskMask# 194 nonWorkingDomain# 1 23040 diskMask# 195 nonWorkingDomain# 0 87624 diskMask# 195 nonWorkingDomain# 1 54264 diskMask# 196 nonWorkingDomain# 0 102474 diskMask# 196 nonWorkingDomain# 1 161280 diskMask# 197 nonWorkingDomain# 0 74004 diskMask# 197 nonWorkingDomain# 1 23040 diskMask# 198 nonWorkingDomain# 0 74004 diskMask# 198 nonWorkingDomain# 1 23040 diskMask# 199 nonWorkingDomain# 0 51744 diskMask# 199 nonWorkingDomain# 1 54264 diskMask# 200 nonWorkingDomain# 0 383040 diskMask# 200 nonWorkingDomain# 1 23040 diskMask# 201 nonWorkingDomain# 0 191520 diskMask# 201 nonWorkingDomain# 1 8640 diskMask# 202 nonWorkingDomain# 0 128424 diskMask# 202 nonWorkingDomain# 1 69264 diskMask# 203 nonWorkingDomain# 0 91512 diskMask# 203 nonWorkingDomain# 1 60432 diskMask# 204 nonWorkingDomain# 0 78444 diskMask# 204 nonWorkingDomain# 1 23040 diskMask# 205 nonWorkingDomain# 0 69270 diskMask# 205 nonWorkingDomain# 1 8640 diskMask# 206 nonWorkingDomain# 0 63264 diskMask# 206 nonWorkingDomain# 1 69264 diskMask# 207 nonWorkingDomain# 0 43620 diskMask# 207 nonWorkingDomain# 1 60432 diskMask# 208 nonWorkingDomain# 0 383040 diskMask# 208 nonWorkingDomain# 1 23040 diskMask# 209 nonWorkingDomain# 0 128424 diskMask# 209 nonWorkingDomain# 1 69264 diskMask# 210 nonWorkingDomain# 0 191520 diskMask# 210 nonWorkingDomain# 1 8640 diskMask# 211 nonWorkingDomain# 0 91512 diskMask# 211 nonWorkingDomain# 1 60432 diskMask# 212 nonWorkingDomain# 0 78444 diskMask# 212 nonWorkingDomain# 1 23040 diskMask# 213 nonWorkingDomain# 0 63264 diskMask# 213 nonWorkingDomain# 1 69264 diskMask# 214 nonWorkingDomain# 0 69270 diskMask# 214 nonWorkingDomain# 1 8640 diskMask# 215 nonWorkingDomain# 0 43620 diskMask# 215 nonWorkingDomain# 1 60432 diskMask# 216 nonWorkingDomain# 0 170904 diskMask# 216 nonWorkingDomain# 1 84264 diskMask# 217 nonWorkingDomain# 0 140712 diskMask# 217 nonWorkingDomain# 1 75432 diskMask# 218 nonWorkingDomain# 0 140712 diskMask# 218 nonWorkingDomain# 1 75432 diskMask# 219 nonWorkingDomain# 0 75096 diskMask# 219 nonWorkingDomain# 1 45816 diskMask# 220 nonWorkingDomain# 0 74784 diskMask# 220 nonWorkingDomain# 1 84264 diskMask# 221 nonWorkingDomain# 0 55140 diskMask# 221 nonWorkingDomain# 1 75432 diskMask# 222 nonWorkingDomain# 0 55140 diskMask# 222 nonWorkingDomain# 1 75432 diskMask# 223 nonWorkingDomain# 0 25890 diskMask# 223 nonWorkingDomain# 1 45816 diskMask# 224 nonWorkingDomain# 0 148794 diskMask# 224 nonWorkingDomain# 1 161280 diskMask# 225 nonWorkingDomain# 0 124764 diskMask# 225 nonWorkingDomain# 1 23040 diskMask# 226 nonWorkingDomain# 0 124764 diskMask# 226 nonWorkingDomain# 1 23040 diskMask# 227 nonWorkingDomain# 0 73344 diskMask# 227 nonWorkingDomain# 1 54264 diskMask# 228 nonWorkingDomain# 0 124764 diskMask# 228 nonWorkingDomain# 1 161280 diskMask# 229 nonWorkingDomain# 0 73344 diskMask# 229 nonWorkingDomain# 1 23040 diskMask# 230 nonWorkingDomain# 0 73344 diskMask# 230 nonWorkingDomain# 1 23040 diskMask# 231 nonWorkingDomain# 0 31656 diskMask# 231 nonWorkingDomain# 1 54264 diskMask# 232 nonWorkingDomain# 0 129324 diskMask# 232 nonWorkingDomain# 1 23040 diskMask# 233 nonWorkingDomain# 0 120750 diskMask# 233 nonWorkingDomain# 1 8640 diskMask# 234 nonWorkingDomain# 0 83184 diskMask# 234 nonWorkingDomain# 1 69264 diskMask# 235 nonWorkingDomain# 0 56460 diskMask# 235 nonWorkingDomain# 1 60432 diskMask# 236 nonWorkingDomain# 0 83184 diskMask# 236 nonWorkingDomain# 1 23040 diskMask# 237 nonWorkingDomain# 0 56460 diskMask# 237 nonWorkingDomain# 1 8640 diskMask# 238 nonWorkingDomain# 0 35496 diskMask# 238 nonWorkingDomain# 1 69264 diskMask# 239 nonWorkingDomain# 0 20640 diskMask# 239 nonWorkingDomain# 1 60432 diskMask# 240 nonWorkingDomain# 0 129324 diskMask# 240 nonWorkingDomain# 1 23040 diskMask# 241 nonWorkingDomain# 0 83184 diskMask# 241 nonWorkingDomain# 1 69264 diskMask# 242 nonWorkingDomain# 0 120750 diskMask# 242 nonWorkingDomain# 1 8640 diskMask# 243 nonWorkingDomain# 0 56460 diskMask# 243 nonWorkingDomain# 1 60432 diskMask# 244 nonWorkingDomain# 0 83184 diskMask# 244 nonWorkingDomain# 1 23040 diskMask# 245 nonWorkingDomain# 0 35496 diskMask# 245 nonWorkingDomain# 1 69264 diskMask# 246 nonWorkingDomain# 0 56460 diskMask# 246 nonWorkingDomain# 1 8640 diskMask# 247 nonWorkingDomain# 0 20640 diskMask# 247 nonWorkingDomain# 1 60432 diskMask# 248 nonWorkingDomain# 0 93024 diskMask# 248 nonWorkingDomain# 1 84264 diskMask# 249 nonWorkingDomain# 0 66300 diskMask# 249 nonWorkingDomain# 1 75432 diskMask# 250 nonWorkingDomain# 0 66300 diskMask# 250 nonWorkingDomain# 1 75432 diskMask# 251 nonWorkingDomain# 0 29970 diskMask# 251 nonWorkingDomain# 1 45816 diskMask# 252 nonWorkingDomain# 0 39096 diskMask# 252 nonWorkingDomain# 1 84264 diskMask# 253 nonWorkingDomain# 0 22680 diskMask# 253 nonWorkingDomain# 1 75432 diskMask# 254 nonWorkingDomain# 0 22680 diskMask# 254 nonWorkingDomain# 1 75432 diskMask# 255 nonWorkingDomain# 0 9972 diskMask# 255 nonWorkingDomain# 1 45816 diskMask# 256 nonWorkingDomain# 0 781920 diskMask# 257 nonWorkingDomain# 0 210240 diskMask# 257 nonWorkingDomain# 1 336960 diskMask# 258 nonWorkingDomain# 0 210240 diskMask# 258 nonWorkingDomain# 1 336960 diskMask# 259 nonWorkingDomain# 0 58074 diskMask# 259 nonWorkingDomain# 1 8640 diskMask# 260 nonWorkingDomain# 0 220320 diskMask# 261 nonWorkingDomain# 0 95040 diskMask# 261 nonWorkingDomain# 1 336960 diskMask# 262 nonWorkingDomain# 0 95040 diskMask# 262 nonWorkingDomain# 1 336960 diskMask# 263 nonWorkingDomain# 0 25164 diskMask# 263 nonWorkingDomain# 1 8640 diskMask# 264 nonWorkingDomain# 0 496800 diskMask# 264 nonWorkingDomain# 1 336960 >> TRestoreTests::ExportImportWithDataCorruption[Raw] >> TRestoreTests::ExportImportWithMetadataCorruption >> TRestoreWithRebootsTests::ShouldSucceedOnMultiShardTable[Zstd] >> TImportTests::ShouldRestoreColumnFamilies >> TRestoreTests::ShouldSucceedOnMultipleFramesStandardBatch [GOOD] >> TImportTests::ShouldRestoreIndexTableSplitPoints >> TImportWithRebootsTests::CancelShouldSucceedOnSimpleTable >> TRestoreTests::ShouldSucceedOnMultipleFramesSmallBatch >> TRestoreTests::ExportImportWithMetadataCorruption [GOOD] >> TRestoreTests::ExportImportWithDataCorruption[Raw] [GOOD] >> TRestoreTests::ShouldSucceedOnMultipleFramesSmallBatch [GOOD] >> TImportTests::ShouldRestoreColumnFamilies [GOOD] >> TRestoreTests::ExportImportWithDataCorruption[Zstd] >> TRestoreTests::ExportImportWithMetadataChecksumCorruption >> TImportTests::ShouldRestoreIncrementalBackupFlag >> TImportTests::ShouldRestoreIndexTableSplitPoints [GOOD] >> TImportTests::ShouldRestoreIndexTableUniformPartitionsCount >> TImportTests::ShouldRestoreIncrementalBackupFlag [GOOD] >> TImportTests::ShouldRestoreIncrementalBackupFlagNullAsFalse >> TRestoreTests::ExportImportWithMetadataChecksumCorruption [GOOD] >> TRestoreTests::ExportImportWithPermissionsChecksumAbsence ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_restore/unittest >> TRestoreTests::ShouldSucceedOnMultipleFramesSmallBatch [GOOD] >> TRestoreTests::ExportImportWithDataCorruption[Zstd] [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2144] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2144] Leader for TabletID 72057594046678944 is [1:130:2155] sender: [1:131:2058] recipient: [1:113:2144] 2025-09-25T16:19:52.635741Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7911: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-09-25T16:19:52.635761Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7939: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-09-25T16:19:52.635765Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7825: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-09-25T16:19:52.635769Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7841: OperationsProcessing config: using default configuration 2025-09-25T16:19:52.635773Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7847: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-09-25T16:19:52.635776Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7847: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-09-25T16:19:52.635782Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7971: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-09-25T16:19:52.635793Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-09-25T16:19:52.635875Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8042: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-09-25T16:19:52.635916Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2025-09-25T16:19:52.647152Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7729: Cannot subscribe to console configs 2025-09-25T16:19:52.647170Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded 2025-09-25T16:19:52.651329Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2025-09-25T16:19:52.651402Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2025-09-25T16:19:52.651431Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-09-25T16:19:52.654387Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2025-09-25T16:19:52.654467Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2025-09-25T16:19:52.654551Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1382: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-09-25T16:19:52.654590Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-09-25T16:19:52.655020Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:158: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-09-25T16:19:52.655058Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2025-09-25T16:19:52.655284Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-09-25T16:19:52.655292Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-09-25T16:19:52.655338Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-09-25T16:19:52.655347Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-09-25T16:19:52.655375Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:205: TTxServerlessStorageBilling.Complete 2025-09-25T16:19:52.655403Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7086: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-09-25T16:19:52.656923Z node 1 :HIVE INFO: tablet_helpers.cpp:1126: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:130:2155] sender: [1:245:2058] recipient: [1:15:2062] 2025-09-25T16:19:52.677549Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-09-25T16:19:52.677601Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-09-25T16:19:52.677643Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:599: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-09-25T16:19:52.677650Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5528: CreateTx for txid 1:0 type: TxAlterSubDomain target path: [OwnerId: 72057594046678944, LocalPathId: 1] source path: 2025-09-25T16:19:52.677724Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-09-25T16:19:52.677737Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2025-09-25T16:19:52.678290Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-09-25T16:19:52.678323Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-09-25T16:19:52.678371Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-09-25T16:19:52.678380Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:315: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-09-25T16:19:52.678385Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:369: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-09-25T16:19:52.678390Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 1:0 2 -> 3 2025-09-25T16:19:52.678735Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-09-25T16:19:52.678744Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-09-25T16:19:52.678752Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 1:0 3 -> 128 2025-09-25T16:19:52.679056Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-09-25T16:19:52.679064Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-09-25T16:19:52.679070Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-09-25T16:19:52.679076Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1723: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-09-25T16:19:52.679735Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1792: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-09-25T16:19:52.680161Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:663: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-09-25T16:19:52.680221Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1824: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-09-25T16:19:52.680436Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:685: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-09-25T16:19:52.680459Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:689: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 137 RawX2: 4294969455 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-09-25T16:19:52.680464Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:303: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-09-25T16:19:52.680534Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 1:0 128 -> 240 2025-09-25T16:19:52.680539Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:362: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-09-25T16:19:52.680561Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:599: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-09-25T16:19:52.680572Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:412: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-09-25T16:19:52.681067Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-09-25T16:19:52.681077Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme ... DataETag: e0a029185b0e1ad2f41736bc4b274b81 ProcessedBytes: 0 WrittenBytes: 0 WrittenRows: 0 ChecksumState: DownloadState: } 2025-09-25T16:19:53.758735Z node 3 :DATASHARD_RESTORE DEBUG: import_s3.cpp:517: [Import] [s3:102] GetObject: key# /data_00.csv.zst, range# 28-34 REQUEST: GET /data_00.csv.zst HTTP/1.1 HEADERS: Host: localhost:16487 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: 86CEF6D0-9C98-4888-99BE-6DA5FD6583BB amz-sdk-request: attempt=1 content-type: application/xml range: bytes=28-34 user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-143-generic x86_64 Clang/20.1.8 x-amz-api-version: 2006-03-01 S3_MOCK::HttpServeRead: /data_00.csv.zst / 60 2025-09-25T16:19:53.759285Z node 3 :DATASHARD_RESTORE DEBUG: import_s3.cpp:656: [Import] [s3:102] Handle NKikimr::NWrappers::NExternalStorage::TEvGetObjectResponse { Key: null Result: e0a029185b0e1ad2f41736bc4b274b81 Body: 7b } 2025-09-25T16:19:53.759291Z node 3 :DATASHARD_RESTORE TRACE: import_s3.cpp:673: [Import] [s3:102] Content size: processed-bytes# 0, content-length# 60, body-size# 7 2025-09-25T16:19:53.759296Z node 3 :DATASHARD_RESTORE DEBUG: import_s3.cpp:517: [Import] [s3:102] GetObject: key# /data_00.csv.zst, range# 35-41 REQUEST: GET /data_00.csv.zst HTTP/1.1 HEADERS: Host: localhost:16487 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: D1EFA477-2127-4EC9-9FD0-16C1FA5A44AF amz-sdk-request: attempt=1 content-type: application/xml range: bytes=35-41 user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-143-generic x86_64 Clang/20.1.8 x-amz-api-version: 2006-03-01 S3_MOCK::HttpServeRead: /data_00.csv.zst / 60 2025-09-25T16:19:53.759884Z node 3 :DATASHARD_RESTORE DEBUG: import_s3.cpp:656: [Import] [s3:102] Handle NKikimr::NWrappers::NExternalStorage::TEvGetObjectResponse { Key: null Result: e0a029185b0e1ad2f41736bc4b274b81 Body: 7b } 2025-09-25T16:19:53.759892Z node 3 :DATASHARD_RESTORE TRACE: import_s3.cpp:673: [Import] [s3:102] Content size: processed-bytes# 0, content-length# 60, body-size# 7 2025-09-25T16:19:53.759915Z node 3 :DATASHARD_RESTORE INFO: import_s3.cpp:806: [Import] [s3:102] Upload rows: count# 1, size# 34 2025-09-25T16:19:53.760431Z node 3 :DATASHARD_RESTORE DEBUG: import_s3.cpp:814: [Import] [s3:102] Handle NKikimr::TEvDataShard::TEvS3UploadRowsResponse { Record: TabletID: 72075186233409546 Status: 0 Info: { DataETag: e0a029185b0e1ad2f41736bc4b274b81 ProcessedBytes: 37 WrittenBytes: 16 WrittenRows: 2 ChecksumState: DownloadState: } } 2025-09-25T16:19:53.760450Z node 3 :DATASHARD_RESTORE NOTICE: import_s3.cpp:621: [Import] [s3:102] Process download info at 'UploadResponse': info# { DataETag: e0a029185b0e1ad2f41736bc4b274b81 ProcessedBytes: 37 WrittenBytes: 16 WrittenRows: 2 ChecksumState: DownloadState: } 2025-09-25T16:19:53.760461Z node 3 :DATASHARD_RESTORE DEBUG: import_s3.cpp:517: [Import] [s3:102] GetObject: key# /data_00.csv.zst, range# 42-48 REQUEST: GET /data_00.csv.zst HTTP/1.1 HEADERS: Host: localhost:16487 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: 845EF6BB-4D09-4D52-B2E9-213BBDC35162 amz-sdk-request: attempt=1 content-type: application/xml range: bytes=42-48 user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-143-generic x86_64 Clang/20.1.8 x-amz-api-version: 2006-03-01 S3_MOCK::HttpServeRead: /data_00.csv.zst / 60 2025-09-25T16:19:53.761159Z node 3 :DATASHARD_RESTORE DEBUG: import_s3.cpp:656: [Import] [s3:102] Handle NKikimr::NWrappers::NExternalStorage::TEvGetObjectResponse { Key: null Result: e0a029185b0e1ad2f41736bc4b274b81 Body: 7b } 2025-09-25T16:19:53.761175Z node 3 :DATASHARD_RESTORE TRACE: import_s3.cpp:673: [Import] [s3:102] Content size: processed-bytes# 37, content-length# 60, body-size# 7 2025-09-25T16:19:53.761187Z node 3 :DATASHARD_RESTORE DEBUG: import_s3.cpp:517: [Import] [s3:102] GetObject: key# /data_00.csv.zst, range# 49-55 REQUEST: GET /data_00.csv.zst HTTP/1.1 HEADERS: Host: localhost:16487 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: A72F02FB-9FB7-4415-89F1-D93CD5F4E389 amz-sdk-request: attempt=1 content-type: application/xml range: bytes=49-55 user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-143-generic x86_64 Clang/20.1.8 x-amz-api-version: 2006-03-01 S3_MOCK::HttpServeRead: /data_00.csv.zst / 60 2025-09-25T16:19:53.761910Z node 3 :DATASHARD_RESTORE DEBUG: import_s3.cpp:656: [Import] [s3:102] Handle NKikimr::NWrappers::NExternalStorage::TEvGetObjectResponse { Key: null Result: e0a029185b0e1ad2f41736bc4b274b81 Body: 7b } 2025-09-25T16:19:53.761926Z node 3 :DATASHARD_RESTORE TRACE: import_s3.cpp:673: [Import] [s3:102] Content size: processed-bytes# 37, content-length# 60, body-size# 7 2025-09-25T16:19:53.761942Z node 3 :DATASHARD_RESTORE DEBUG: import_s3.cpp:517: [Import] [s3:102] GetObject: key# /data_00.csv.zst, range# 56-59 REQUEST: GET /data_00.csv.zst HTTP/1.1 HEADERS: Host: localhost:16487 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: 6711D9FE-3E06-4816-BDFA-99BD658070AA amz-sdk-request: attempt=1 content-type: application/xml range: bytes=56-59 user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-143-generic x86_64 Clang/20.1.8 x-amz-api-version: 2006-03-01 S3_MOCK::HttpServeRead: /data_00.csv.zst / 60 2025-09-25T16:19:53.762462Z node 3 :DATASHARD_RESTORE DEBUG: import_s3.cpp:656: [Import] [s3:102] Handle NKikimr::NWrappers::NExternalStorage::TEvGetObjectResponse { Key: null Result: e0a029185b0e1ad2f41736bc4b274b81 Body: 4b } 2025-09-25T16:19:53.762476Z node 3 :DATASHARD_RESTORE TRACE: import_s3.cpp:673: [Import] [s3:102] Content size: processed-bytes# 37, content-length# 60, body-size# 4 2025-09-25T16:19:53.762508Z node 3 :DATASHARD_RESTORE INFO: import_s3.cpp:806: [Import] [s3:102] Upload rows: count# 1, size# 34 2025-09-25T16:19:53.763052Z node 3 :DATASHARD_RESTORE DEBUG: import_s3.cpp:814: [Import] [s3:102] Handle NKikimr::TEvDataShard::TEvS3UploadRowsResponse { Record: TabletID: 72075186233409546 Status: 0 Info: { DataETag: e0a029185b0e1ad2f41736bc4b274b81 ProcessedBytes: 60 WrittenBytes: 24 WrittenRows: 3 ChecksumState: DownloadState: } } 2025-09-25T16:19:53.763062Z node 3 :DATASHARD_RESTORE NOTICE: import_s3.cpp:621: [Import] [s3:102] Process download info at 'UploadResponse': info# { DataETag: e0a029185b0e1ad2f41736bc4b274b81 ProcessedBytes: 60 WrittenBytes: 24 WrittenRows: 3 ChecksumState: DownloadState: } 2025-09-25T16:19:53.763069Z node 3 :DATASHARD_RESTORE NOTICE: import_s3.cpp:962: [Import] [s3:102] Finish: success# 1, error# , writtenBytes# 24, writtenRows# 3 2025-09-25T16:19:53.775196Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5901: Handle TEvSchemaChanged, tabletId: 72057594046678944, at schemeshard: 72057594046678944, message: Source { RawX1: 313 RawX2: 12884904187 } Origin: 72075186233409546 State: 2 TxId: 102 Step: 0 Generation: 2 OpResult { Success: true Explain: "" BytesProcessed: 24 RowsProcessed: 3 } 2025-09-25T16:19:53.775215Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1837: TOperation FindRelatedPartByTabletId, TxId: 102, tablet: 72075186233409546, partId: 0 2025-09-25T16:19:53.775240Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:628: TTxOperationReply execute, operationId: 102:0, at schemeshard: 72057594046678944, message: Source { RawX1: 313 RawX2: 12884904187 } Origin: 72075186233409546 State: 2 TxId: 102 Step: 0 Generation: 2 OpResult { Success: true Explain: "" BytesProcessed: 24 RowsProcessed: 3 } 2025-09-25T16:19:53.775256Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_backup_restore_common.h:233: TRestore TProposedWaitParts, opId: 102:0 HandleReply TEvSchemaChanged at tablet# 72057594046678944 message# Source { RawX1: 313 RawX2: 12884904187 } Origin: 72075186233409546 State: 2 TxId: 102 Step: 0 Generation: 2 OpResult { Success: true Explain: "" BytesProcessed: 24 RowsProcessed: 3 } 2025-09-25T16:19:53.775271Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:673: CollectSchemaChanged accept TEvDataShard::TEvSchemaChanged, operationId: 102:0, shardIdx: 72057594046678944:1, shard: 72075186233409546, left await: 0, txState.State: ProposedWaitParts, txState.ReadyForNotifications: 1, at schemeshard: 72057594046678944 2025-09-25T16:19:53.775277Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:710: all shard schema changes has been received, operationId: 102:0, at schemeshard: 72057594046678944 2025-09-25T16:19:53.775282Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:722: send schema changes ack message, operation: 102:0, datashard: 72075186233409546, at schemeshard: 72057594046678944 2025-09-25T16:19:53.775288Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 102:0 129 -> 240 2025-09-25T16:19:53.775335Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_backup_restore_common.h:116: Unable to make a bill: kind# TRestore, opId# 102:0, reason# domain is not a serverless db, domain# /MyRoot, domainPathId# [OwnerId: 72057594046678944, LocalPathId: 1], IsDomainSchemeShard: 1, ParentDomainId: [OwnerId: 72057594046678944, LocalPathId: 1], ResourcesDomainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-09-25T16:19:53.776046Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 102:0, at schemeshard: 72057594046678944 2025-09-25T16:19:53.776130Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 102:0, at schemeshard: 72057594046678944 2025-09-25T16:19:53.776139Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:497: [72057594046678944] TDone opId# 102:0 ProgressState 2025-09-25T16:19:53.776154Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:926: Part operation is done id#102:0 progress is 1/1 2025-09-25T16:19:53.776159Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2025-09-25T16:19:53.776164Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:926: Part operation is done id#102:0 progress is 1/1 2025-09-25T16:19:53.776170Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2025-09-25T16:19:53.776175Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 102, ready parts: 1/1, is published: true 2025-09-25T16:19:53.776188Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1702: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [3:339:2317] message: TxId: 102 2025-09-25T16:19:53.776194Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2025-09-25T16:19:53.776199Z node 3 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:993: Operation and all the parts is done, operation id: 102:0 2025-09-25T16:19:53.776203Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5552: RemoveTx for txid 102:0 2025-09-25T16:19:53.776226Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:610: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 3 2025-09-25T16:19:53.776684Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 102: got EvNotifyTxCompletionResult 2025-09-25T16:19:53.776695Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 102: satisfy waiter [3:399:2369] TestWaitNotification: OK eventTxId 102 |82.3%| [TA] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_serverless_reboots/test-results/unittest/{meta.json ... results_accumulator.log} >> TRestoreTests::ExportImportWithMetadataChecksumAbsence >> TImportTests::ShouldRestoreIncrementalBackupFlagNullAsFalse [GOOD] >> TRestoreTests::ExportImportWithMetadataChecksumAbsence [GOOD] >> TRestoreTests::ExportImportWithPermissionsChecksumAbsence [GOOD] >> TImportTests::ShouldRestoreIndexTableUniformPartitionsCount [GOOD] >> TImportTests::ShouldRestoreIndexTablePartitioningSettings ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_restore/unittest >> TImportTests::ShouldRestoreIncrementalBackupFlagNullAsFalse [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2144] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2144] Leader for TabletID 72057594046678944 is [1:130:2155] sender: [1:131:2058] recipient: [1:113:2144] 2025-09-25T16:19:53.687512Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7911: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-09-25T16:19:53.687530Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7939: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-09-25T16:19:53.687535Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7825: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-09-25T16:19:53.687539Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7841: OperationsProcessing config: using default configuration 2025-09-25T16:19:53.687543Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7847: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-09-25T16:19:53.687546Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7847: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-09-25T16:19:53.687552Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7971: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-09-25T16:19:53.687563Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-09-25T16:19:53.687654Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8042: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-09-25T16:19:53.687697Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2025-09-25T16:19:53.700408Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7729: Cannot subscribe to console configs 2025-09-25T16:19:53.700434Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded 2025-09-25T16:19:53.704771Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2025-09-25T16:19:53.704878Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2025-09-25T16:19:53.704918Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-09-25T16:19:53.706625Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2025-09-25T16:19:53.706678Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2025-09-25T16:19:53.706771Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1382: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-09-25T16:19:53.706826Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-09-25T16:19:53.707255Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:158: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-09-25T16:19:53.707289Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2025-09-25T16:19:53.707488Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-09-25T16:19:53.707495Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-09-25T16:19:53.707506Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-09-25T16:19:53.707510Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-09-25T16:19:53.707514Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:205: TTxServerlessStorageBilling.Complete 2025-09-25T16:19:53.707535Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7086: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-09-25T16:19:53.708591Z node 1 :HIVE INFO: tablet_helpers.cpp:1126: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:130:2155] sender: [1:245:2058] recipient: [1:15:2062] 2025-09-25T16:19:53.725744Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-09-25T16:19:53.725805Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-09-25T16:19:53.725844Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:599: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-09-25T16:19:53.725850Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5528: CreateTx for txid 1:0 type: TxAlterSubDomain target path: [OwnerId: 72057594046678944, LocalPathId: 1] source path: 2025-09-25T16:19:53.725909Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-09-25T16:19:53.725940Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2025-09-25T16:19:53.726494Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-09-25T16:19:53.726528Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-09-25T16:19:53.726564Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-09-25T16:19:53.726571Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:315: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-09-25T16:19:53.726574Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:369: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-09-25T16:19:53.726578Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 1:0 2 -> 3 2025-09-25T16:19:53.726988Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-09-25T16:19:53.727000Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-09-25T16:19:53.727003Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 1:0 3 -> 128 2025-09-25T16:19:53.727336Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-09-25T16:19:53.727346Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-09-25T16:19:53.727352Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-09-25T16:19:53.727358Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1723: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-09-25T16:19:53.727925Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1792: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-09-25T16:19:53.728283Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:663: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-09-25T16:19:53.728326Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1824: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-09-25T16:19:53.728480Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:685: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-09-25T16:19:53.728501Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:689: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 137 RawX2: 4294969455 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-09-25T16:19:53.728506Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:303: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-09-25T16:19:53.728567Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 1:0 128 -> 240 2025-09-25T16:19:53.728572Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:362: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-09-25T16:19:53.728597Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:599: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-09-25T16:19:53.728609Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:412: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-09-25T16:19:53.728980Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-09-25T16:19:53.728986Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme ... hard__operation_backup_restore_common.h:116: Unable to make a bill: kind# TRestore, opId# 281474976720758:0, reason# domain is not a serverless db, domain# /MyRoot/User, domainPathId# [OwnerId: 72075186233409546, LocalPathId: 1], IsDomainSchemeShard: 0, ParentDomainId: [OwnerId: 72057594046678944, LocalPathId: 2], ResourcesDomainId: [OwnerId: 72057594046678944, LocalPathId: 2] 2025-09-25T16:19:54.909760Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 281474976720758:0, at schemeshard: 72075186233409546 2025-09-25T16:19:54.909851Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 281474976720758:0, at schemeshard: 72075186233409546 2025-09-25T16:19:54.909861Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:497: [72075186233409546] TDone opId# 281474976720758:0 ProgressState 2025-09-25T16:19:54.909878Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:926: Part operation is done id#281474976720758:0 progress is 1/1 2025-09-25T16:19:54.909883Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 281474976720758 ready parts: 1/1 2025-09-25T16:19:54.909889Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:926: Part operation is done id#281474976720758:0 progress is 1/1 2025-09-25T16:19:54.909892Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 281474976720758 ready parts: 1/1 2025-09-25T16:19:54.909897Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 281474976720758, ready parts: 1/1, is published: true 2025-09-25T16:19:54.909913Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1702: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [3:365:2342] message: TxId: 281474976720758 2025-09-25T16:19:54.909919Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 281474976720758 ready parts: 1/1 2025-09-25T16:19:54.909926Z node 3 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:993: Operation and all the parts is done, operation id: 281474976720758:0 2025-09-25T16:19:54.909931Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5552: RemoveTx for txid 281474976720758:0 2025-09-25T16:19:54.909956Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:610: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72075186233409546, LocalPathId: 2] was 3 2025-09-25T16:19:54.910522Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7171: Handle: TEvNotifyTxCompletionResult: txId# 281474976720758 2025-09-25T16:19:54.910538Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:7173: Message: TxId: 281474976720758 2025-09-25T16:19:54.910550Z node 3 :IMPORT DEBUG: schemeshard_import__create.cpp:372: TImport::TTxProgress: DoExecute 2025-09-25T16:19:54.910556Z node 3 :IMPORT DEBUG: schemeshard_import__create.cpp:1425: TImport::TTxProgress: OnNotifyResult: txId# 281474976720758 2025-09-25T16:19:54.911012Z node 3 :IMPORT DEBUG: schemeshard_import__create.cpp:396: TImport::TTxProgress: DoComplete 2025-09-25T16:19:54.911034Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 103: got EvNotifyTxCompletionResult 2025-09-25T16:19:54.911041Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 103: satisfy waiter [3:505:2455] TestWaitNotification: OK eventTxId 103 2025-09-25T16:19:54.913220Z node 3 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/User" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-09-25T16:19:54.913279Z node 3 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/User" took 72us result status StatusSuccess 2025-09-25T16:19:54.913382Z node 3 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/User" PathDescription { Self { Name: "User" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeExtSubDomain CreateFinished: true CreateTxId: 101 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 4 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 4 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 2 SecurityStateVersion: 0 } } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 2 PlanResolution: 50 Coordinators: 72075186233409547 TimeCastBucketsPerMediator: 2 Mediators: 72075186233409548 SchemeShard: 72075186233409546 } DomainKey { SchemeShard: 72057594046678944 PathId: 2 } StoragePools { Name: "name_User_kind_hdd-1" Kind: "common" } StoragePools { Name: "name_User_kind_hdd-2" Kind: "external" } PathsInside: 0 PathsLimit: 10000 ShardsInside: 3 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 2 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { } SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 2 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-09-25T16:19:54.913466Z node 3 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/User/Table" Options { ReturnPartitioningInfo: true ReturnPartitionConfig: true BackupInfo: false ReturnBoundaries: true ShowPrivateTable: false }, at schemeshard: 72075186233409546 2025-09-25T16:19:54.913527Z node 3 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72075186233409546 describe path "/MyRoot/User/Table" took 60us result status StatusSuccess 2025-09-25T16:19:54.913732Z node 3 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/User/Table" PathDescription { Self { Name: "Table" PathId: 2 SchemeshardId: 72075186233409546 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976720757 CreateStep: 150 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "Table" Columns { Name: "key" Type: "Utf8" TypeId: 4608 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "value" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false IsBuildInProgress: false } Columns { Name: "created_at" Type: "Timestamp" TypeId: 50 Id: 3 NotNull: false IsBuildInProgress: false } Columns { Name: "modified_at" Type: "Uint32" TypeId: 2 Id: 4 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" KeyColumnIds: 1 PartitionConfig { CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot: 300 InMemForceStepsToSnapshot: 500 InMemForceSizeToSnapshot: 16777216 InMemCompactionBrokerQueue: 0 ReadAheadHiThreshold: 67108864 ReadAheadLoThreshold: 16777216 MinDataPageSize: 7168 SnapBrokerQueue: 0 Generation { GenerationId: 0 SizeToCompact: 0 CountToCompact: 8 ForceCountToCompact: 8 ForceSizeToCompact: 134217728 CompactionBrokerQueue: 4294967295 KeepInCache: true BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen1" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 0 UpliftPartSize: 0 } Generation { GenerationId: 1 SizeToCompact: 41943040 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 536870912 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen2" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 8388608 UpliftPartSize: 8388608 } Generation { GenerationId: 2 SizeToCompact: 419430400 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 17179869184 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen3" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } PartitioningPolicy { MinPartitionsCount: 1 } } TableSchemaVersion: 1 IsBackup: false IsRestore: false } TablePartitions { EndOfRangeKeyPrefix: "" IsPoint: false IsInclusive: false DatashardId: 72075186233409549 } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 2 ProcessingParams { Version: 2 PlanResolution: 50 Coordinators: 72075186233409547 TimeCastBucketsPerMediator: 2 Mediators: 72075186233409548 SchemeShard: 72075186233409546 } DomainKey { SchemeShard: 72057594046678944 PathId: 2 } PathsInside: 1 PathsLimit: 10000 ShardsInside: 4 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 2 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } UserAttributes { Key: "__incremental_backup" Value: "null" } } PathId: 2 PathOwnerId: 72075186233409546, at schemeshard: 72075186233409546 |82.3%| [TA] {RESULT} $(B)/ydb/core/kqp/proxy_service/ut/test-results/unittest/{meta.json ... results_accumulator.log} >> TImportWithRebootsTests::CancelShouldSucceedOnSingleView >> TImportTests::ShouldRestorePartitioningByLoad ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_restore/unittest >> TRestoreTests::ExportImportWithMetadataChecksumAbsence [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2144] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2144] Leader for TabletID 72057594046678944 is [1:130:2155] sender: [1:131:2058] recipient: [1:113:2144] 2025-09-25T16:19:53.446226Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7911: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-09-25T16:19:53.446245Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7939: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-09-25T16:19:53.446250Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7825: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-09-25T16:19:53.446254Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7841: OperationsProcessing config: using default configuration 2025-09-25T16:19:53.446263Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7847: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-09-25T16:19:53.446266Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7847: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-09-25T16:19:53.446273Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7971: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-09-25T16:19:53.446284Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-09-25T16:19:53.446381Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8042: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-09-25T16:19:53.446439Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2025-09-25T16:19:53.461409Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7729: Cannot subscribe to console configs 2025-09-25T16:19:53.461430Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded 2025-09-25T16:19:53.465577Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2025-09-25T16:19:53.465657Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2025-09-25T16:19:53.465693Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-09-25T16:19:53.467443Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2025-09-25T16:19:53.467509Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2025-09-25T16:19:53.467625Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1382: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-09-25T16:19:53.467685Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-09-25T16:19:53.468167Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:158: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-09-25T16:19:53.468218Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2025-09-25T16:19:53.468459Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-09-25T16:19:53.468467Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-09-25T16:19:53.468481Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-09-25T16:19:53.468486Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-09-25T16:19:53.468490Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:205: TTxServerlessStorageBilling.Complete 2025-09-25T16:19:53.468513Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7086: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-09-25T16:19:53.469699Z node 1 :HIVE INFO: tablet_helpers.cpp:1126: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:130:2155] sender: [1:245:2058] recipient: [1:15:2062] 2025-09-25T16:19:53.487260Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-09-25T16:19:53.487336Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-09-25T16:19:53.487379Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:599: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-09-25T16:19:53.487386Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5528: CreateTx for txid 1:0 type: TxAlterSubDomain target path: [OwnerId: 72057594046678944, LocalPathId: 1] source path: 2025-09-25T16:19:53.487449Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-09-25T16:19:53.487461Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2025-09-25T16:19:53.488081Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-09-25T16:19:53.488119Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-09-25T16:19:53.488168Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-09-25T16:19:53.488176Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:315: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-09-25T16:19:53.488180Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:369: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-09-25T16:19:53.488184Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 1:0 2 -> 3 2025-09-25T16:19:53.488524Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-09-25T16:19:53.488532Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-09-25T16:19:53.488538Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 1:0 3 -> 128 2025-09-25T16:19:53.488799Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-09-25T16:19:53.488806Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-09-25T16:19:53.488810Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-09-25T16:19:53.488814Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1723: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-09-25T16:19:53.489416Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1792: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-09-25T16:19:53.489858Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:663: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-09-25T16:19:53.489891Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1824: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-09-25T16:19:53.490042Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:685: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-09-25T16:19:53.490061Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:689: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 137 RawX2: 4294969455 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-09-25T16:19:53.490065Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:303: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-09-25T16:19:53.490131Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 1:0 128 -> 240 2025-09-25T16:19:53.490136Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:362: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-09-25T16:19:53.490159Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:599: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-09-25T16:19:53.490167Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:412: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-09-25T16:19:53.490534Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-09-25T16:19:53.490540Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme ... e3e0a41fdab8add833862f1bd2954c3 ContentLength: 11 } } FAKE_COORDINATOR: advance: minStep5000009 State->FrontStep: 5000009 2025-09-25T16:19:55.189227Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-09-25T16:19:55.189237Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 281474976710763, path id: [OwnerId: 72057594046678944, LocalPathId: 5] 2025-09-25T16:19:55.189306Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-09-25T16:19:55.189313Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [3:210:2211], at schemeshard: 72057594046678944, txId: 281474976710763, path id: 5 2025-09-25T16:19:55.189405Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 281474976710763:0, at schemeshard: 72057594046678944 2025-09-25T16:19:55.189416Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_backup_restore_common.h:258: TRestore TProposedWaitParts, opId: 281474976710763:0 ProgressState, at schemeshard: 72057594046678944 2025-09-25T16:19:55.189547Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6249: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 5 Version: 3 PathOwnerId: 72057594046678944, cookie: 281474976710763 2025-09-25T16:19:55.189563Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 5 Version: 3 PathOwnerId: 72057594046678944, cookie: 281474976710763 2025-09-25T16:19:55.189568Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:108: Operation in-flight, at schemeshard: 72057594046678944, txId: 281474976710763 2025-09-25T16:19:55.189574Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 281474976710763, pathId: [OwnerId: 72057594046678944, LocalPathId: 5], version: 3 2025-09-25T16:19:55.189580Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:610: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 5] was 4 2025-09-25T16:19:55.189598Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 281474976710763, ready parts: 0/1, is published: true 2025-09-25T16:19:55.190272Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 281474976710763 FAKE_COORDINATOR: Erasing txId 281474976710763 2025-09-25T16:19:55.201268Z node 3 :DATASHARD_RESTORE DEBUG: import_s3.cpp:606: [Import] [s3:281474976710763] Handle NKikimr::TEvDataShard::TEvS3DownloadInfo { Info: { DataETag: (empty maybe) ProcessedBytes: 0 WrittenBytes: 0 WrittenRows: 0 ChecksumState: DownloadState: } } 2025-09-25T16:19:55.224895Z node 3 :DATASHARD_RESTORE DEBUG: import_s3.cpp:606: [Import] [s3:281474976710763] Handle NKikimr::TEvDataShard::TEvS3DownloadInfo { Info: { DataETag: 6e3e0a41fdab8add833862f1bd2954c3 ProcessedBytes: 0 WrittenBytes: 0 WrittenRows: 0 ChecksumState: DownloadState: } } 2025-09-25T16:19:55.224919Z node 3 :DATASHARD_RESTORE NOTICE: import_s3.cpp:621: [Import] [s3:281474976710763] Process download info at 'DownloadInfo': info# { DataETag: 6e3e0a41fdab8add833862f1bd2954c3 ProcessedBytes: 0 WrittenBytes: 0 WrittenRows: 0 ChecksumState: DownloadState: } 2025-09-25T16:19:55.224933Z node 3 :DATASHARD_RESTORE DEBUG: import_s3.cpp:517: [Import] [s3:281474976710763] GetObject: key# /data_00.csv, range# 0-10 REQUEST: GET /data_00.csv HTTP/1.1 HEADERS: Host: localhost:8928 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: F2C3A0A5-D794-40C2-B459-54C5C9216704 amz-sdk-request: attempt=1 content-type: application/xml range: bytes=0-10 user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-143-generic x86_64 Clang/20.1.8 x-amz-api-version: 2006-03-01 S3_MOCK::HttpServeRead: /data_00.csv / 11 2025-09-25T16:19:55.227205Z node 3 :DATASHARD_RESTORE DEBUG: import_s3.cpp:656: [Import] [s3:281474976710763] Handle NKikimr::NWrappers::NExternalStorage::TEvGetObjectResponse { Key: null Result: 6e3e0a41fdab8add833862f1bd2954c3 Body: 11b } 2025-09-25T16:19:55.227228Z node 3 :DATASHARD_RESTORE TRACE: import_s3.cpp:673: [Import] [s3:281474976710763] Content size: processed-bytes# 0, content-length# 11, body-size# 11 2025-09-25T16:19:55.227265Z node 3 :DATASHARD_RESTORE INFO: import_s3.cpp:806: [Import] [s3:281474976710763] Upload rows: count# 1, size# 36 2025-09-25T16:19:55.228152Z node 3 :DATASHARD_RESTORE DEBUG: import_s3.cpp:814: [Import] [s3:281474976710763] Handle NKikimr::TEvDataShard::TEvS3UploadRowsResponse { Record: TabletID: 72075186233409548 Status: 0 Info: { DataETag: 6e3e0a41fdab8add833862f1bd2954c3 ProcessedBytes: 11 WrittenBytes: 10 WrittenRows: 1 ChecksumState: DownloadState: } } 2025-09-25T16:19:55.228165Z node 3 :DATASHARD_RESTORE NOTICE: import_s3.cpp:621: [Import] [s3:281474976710763] Process download info at 'UploadResponse': info# { DataETag: 6e3e0a41fdab8add833862f1bd2954c3 ProcessedBytes: 11 WrittenBytes: 10 WrittenRows: 1 ChecksumState: DownloadState: } 2025-09-25T16:19:55.228170Z node 3 :DATASHARD_RESTORE NOTICE: import_s3.cpp:962: [Import] [s3:281474976710763] Finish: success# 1, error# , writtenBytes# 10, writtenRows# 1 2025-09-25T16:19:55.241258Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5901: Handle TEvSchemaChanged, tabletId: 72057594046678944, at schemeshard: 72057594046678944, message: Source { RawX1: 757 RawX2: 12884904587 } Origin: 72075186233409548 State: 2 TxId: 281474976710763 Step: 0 Generation: 2 OpResult { Success: true Explain: "" BytesProcessed: 10 RowsProcessed: 1 } 2025-09-25T16:19:55.241294Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1837: TOperation FindRelatedPartByTabletId, TxId: 281474976710763, tablet: 72075186233409548, partId: 0 2025-09-25T16:19:55.241328Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:628: TTxOperationReply execute, operationId: 281474976710763:0, at schemeshard: 72057594046678944, message: Source { RawX1: 757 RawX2: 12884904587 } Origin: 72075186233409548 State: 2 TxId: 281474976710763 Step: 0 Generation: 2 OpResult { Success: true Explain: "" BytesProcessed: 10 RowsProcessed: 1 } 2025-09-25T16:19:55.241345Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_backup_restore_common.h:233: TRestore TProposedWaitParts, opId: 281474976710763:0 HandleReply TEvSchemaChanged at tablet# 72057594046678944 message# Source { RawX1: 757 RawX2: 12884904587 } Origin: 72075186233409548 State: 2 TxId: 281474976710763 Step: 0 Generation: 2 OpResult { Success: true Explain: "" BytesProcessed: 10 RowsProcessed: 1 } 2025-09-25T16:19:55.241362Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:673: CollectSchemaChanged accept TEvDataShard::TEvSchemaChanged, operationId: 281474976710763:0, shardIdx: 72057594046678944:3, shard: 72075186233409548, left await: 0, txState.State: ProposedWaitParts, txState.ReadyForNotifications: 1, at schemeshard: 72057594046678944 2025-09-25T16:19:55.241376Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:710: all shard schema changes has been received, operationId: 281474976710763:0, at schemeshard: 72057594046678944 2025-09-25T16:19:55.241382Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:722: send schema changes ack message, operation: 281474976710763:0, datashard: 72075186233409548, at schemeshard: 72057594046678944 2025-09-25T16:19:55.241389Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 281474976710763:0 129 -> 240 2025-09-25T16:19:55.241445Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_backup_restore_common.h:116: Unable to make a bill: kind# TRestore, opId# 281474976710763:0, reason# domain is not a serverless db, domain# /MyRoot, domainPathId# [OwnerId: 72057594046678944, LocalPathId: 1], IsDomainSchemeShard: 1, ParentDomainId: [OwnerId: 72057594046678944, LocalPathId: 1], ResourcesDomainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-09-25T16:19:55.242117Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 281474976710763:0, at schemeshard: 72057594046678944 2025-09-25T16:19:55.242217Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 281474976710763:0, at schemeshard: 72057594046678944 2025-09-25T16:19:55.242228Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:497: [72057594046678944] TDone opId# 281474976710763:0 ProgressState 2025-09-25T16:19:55.242244Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:926: Part operation is done id#281474976710763:0 progress is 1/1 2025-09-25T16:19:55.242249Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 281474976710763 ready parts: 1/1 2025-09-25T16:19:55.242255Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:926: Part operation is done id#281474976710763:0 progress is 1/1 2025-09-25T16:19:55.242259Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 281474976710763 ready parts: 1/1 2025-09-25T16:19:55.242264Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 281474976710763, ready parts: 1/1, is published: true 2025-09-25T16:19:55.242279Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1702: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [3:127:2152] message: TxId: 281474976710763 2025-09-25T16:19:55.242287Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 281474976710763 ready parts: 1/1 2025-09-25T16:19:55.242293Z node 3 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:993: Operation and all the parts is done, operation id: 281474976710763:0 2025-09-25T16:19:55.242298Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5552: RemoveTx for txid 281474976710763:0 2025-09-25T16:19:55.242346Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:610: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 5] was 3 2025-09-25T16:19:55.242906Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7171: Handle: TEvNotifyTxCompletionResult: txId# 281474976710763 2025-09-25T16:19:55.242925Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:7173: Message: TxId: 281474976710763 2025-09-25T16:19:55.242937Z node 3 :IMPORT DEBUG: schemeshard_import__create.cpp:372: TImport::TTxProgress: DoExecute 2025-09-25T16:19:55.242943Z node 3 :IMPORT DEBUG: schemeshard_import__create.cpp:1425: TImport::TTxProgress: OnNotifyResult: txId# 281474976710763 2025-09-25T16:19:55.243579Z node 3 :IMPORT DEBUG: schemeshard_import__create.cpp:396: TImport::TTxProgress: DoComplete 2025-09-25T16:19:55.243607Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 104: got EvNotifyTxCompletionResult 2025-09-25T16:19:55.243615Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 104: satisfy waiter [3:724:2671] TestWaitNotification: OK eventTxId 104 ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_restore/unittest >> TRestoreTests::ExportImportWithPermissionsChecksumAbsence [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2144] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2144] Leader for TabletID 72057594046678944 is [1:130:2155] sender: [1:131:2058] recipient: [1:113:2144] 2025-09-25T16:19:53.450973Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7911: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-09-25T16:19:53.450997Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7939: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-09-25T16:19:53.451003Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7825: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-09-25T16:19:53.451009Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7841: OperationsProcessing config: using default configuration 2025-09-25T16:19:53.451021Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7847: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-09-25T16:19:53.451026Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7847: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-09-25T16:19:53.451036Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7971: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-09-25T16:19:53.451050Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-09-25T16:19:53.451175Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8042: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-09-25T16:19:53.451234Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2025-09-25T16:19:53.463553Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7729: Cannot subscribe to console configs 2025-09-25T16:19:53.463574Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded 2025-09-25T16:19:53.466756Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2025-09-25T16:19:53.466839Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2025-09-25T16:19:53.466872Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-09-25T16:19:53.468382Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2025-09-25T16:19:53.468450Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2025-09-25T16:19:53.468547Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1382: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-09-25T16:19:53.468595Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-09-25T16:19:53.469098Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:158: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-09-25T16:19:53.469135Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2025-09-25T16:19:53.469347Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-09-25T16:19:53.469356Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-09-25T16:19:53.469373Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-09-25T16:19:53.469380Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-09-25T16:19:53.469386Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:205: TTxServerlessStorageBilling.Complete 2025-09-25T16:19:53.469411Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7086: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-09-25T16:19:53.470582Z node 1 :HIVE INFO: tablet_helpers.cpp:1126: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:130:2155] sender: [1:245:2058] recipient: [1:15:2062] 2025-09-25T16:19:53.487568Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-09-25T16:19:53.487634Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-09-25T16:19:53.487679Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:599: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-09-25T16:19:53.487685Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5528: CreateTx for txid 1:0 type: TxAlterSubDomain target path: [OwnerId: 72057594046678944, LocalPathId: 1] source path: 2025-09-25T16:19:53.487748Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-09-25T16:19:53.487759Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2025-09-25T16:19:53.488341Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-09-25T16:19:53.488376Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-09-25T16:19:53.488416Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-09-25T16:19:53.488422Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:315: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-09-25T16:19:53.488426Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:369: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-09-25T16:19:53.488430Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 1:0 2 -> 3 2025-09-25T16:19:53.488796Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-09-25T16:19:53.488805Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-09-25T16:19:53.488812Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 1:0 3 -> 128 2025-09-25T16:19:53.489109Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-09-25T16:19:53.489116Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-09-25T16:19:53.489120Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-09-25T16:19:53.489124Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1723: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-09-25T16:19:53.489589Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1792: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-09-25T16:19:53.489881Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:663: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-09-25T16:19:53.489915Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1824: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-09-25T16:19:53.490042Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:685: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-09-25T16:19:53.490060Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:689: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 137 RawX2: 4294969455 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-09-25T16:19:53.490065Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:303: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-09-25T16:19:53.490129Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 1:0 128 -> 240 2025-09-25T16:19:53.490134Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:362: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-09-25T16:19:53.490157Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:599: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-09-25T16:19:53.490165Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:412: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-09-25T16:19:53.490499Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-09-25T16:19:53.490505Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme ... 3e0a41fdab8add833862f1bd2954c3 ContentLength: 11 } } FAKE_COORDINATOR: advance: minStep5000009 State->FrontStep: 5000009 2025-09-25T16:19:55.268009Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-09-25T16:19:55.268018Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 281474976710763, path id: [OwnerId: 72057594046678944, LocalPathId: 5] 2025-09-25T16:19:55.268079Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-09-25T16:19:55.268086Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [3:211:2212], at schemeshard: 72057594046678944, txId: 281474976710763, path id: 5 2025-09-25T16:19:55.268184Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 281474976710763:0, at schemeshard: 72057594046678944 2025-09-25T16:19:55.268195Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_backup_restore_common.h:258: TRestore TProposedWaitParts, opId: 281474976710763:0 ProgressState, at schemeshard: 72057594046678944 2025-09-25T16:19:55.268301Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6249: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 5 Version: 3 PathOwnerId: 72057594046678944, cookie: 281474976710763 2025-09-25T16:19:55.268314Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 5 Version: 3 PathOwnerId: 72057594046678944, cookie: 281474976710763 2025-09-25T16:19:55.268319Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:108: Operation in-flight, at schemeshard: 72057594046678944, txId: 281474976710763 2025-09-25T16:19:55.268324Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 281474976710763, pathId: [OwnerId: 72057594046678944, LocalPathId: 5], version: 3 2025-09-25T16:19:55.268330Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:610: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 5] was 4 2025-09-25T16:19:55.268348Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 281474976710763, ready parts: 0/1, is published: true 2025-09-25T16:19:55.269081Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 281474976710763 FAKE_COORDINATOR: Erasing txId 281474976710763 2025-09-25T16:19:55.279824Z node 3 :DATASHARD_RESTORE DEBUG: import_s3.cpp:606: [Import] [s3:281474976710763] Handle NKikimr::TEvDataShard::TEvS3DownloadInfo { Info: { DataETag: (empty maybe) ProcessedBytes: 0 WrittenBytes: 0 WrittenRows: 0 ChecksumState: DownloadState: } } 2025-09-25T16:19:55.294212Z node 3 :DATASHARD_RESTORE DEBUG: import_s3.cpp:606: [Import] [s3:281474976710763] Handle NKikimr::TEvDataShard::TEvS3DownloadInfo { Info: { DataETag: 6e3e0a41fdab8add833862f1bd2954c3 ProcessedBytes: 0 WrittenBytes: 0 WrittenRows: 0 ChecksumState: DownloadState: } } 2025-09-25T16:19:55.294240Z node 3 :DATASHARD_RESTORE NOTICE: import_s3.cpp:621: [Import] [s3:281474976710763] Process download info at 'DownloadInfo': info# { DataETag: 6e3e0a41fdab8add833862f1bd2954c3 ProcessedBytes: 0 WrittenBytes: 0 WrittenRows: 0 ChecksumState: DownloadState: } 2025-09-25T16:19:55.294272Z node 3 :DATASHARD_RESTORE DEBUG: import_s3.cpp:517: [Import] [s3:281474976710763] GetObject: key# /data_00.csv, range# 0-10 REQUEST: GET /data_00.csv HTTP/1.1 HEADERS: Host: localhost:62894 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: 269A4CDF-28EA-463D-B1B6-460E060F0AB9 amz-sdk-request: attempt=1 content-type: application/xml range: bytes=0-10 user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-143-generic x86_64 Clang/20.1.8 x-amz-api-version: 2006-03-01 S3_MOCK::HttpServeRead: /data_00.csv / 11 2025-09-25T16:19:55.300298Z node 3 :DATASHARD_RESTORE DEBUG: import_s3.cpp:656: [Import] [s3:281474976710763] Handle NKikimr::NWrappers::NExternalStorage::TEvGetObjectResponse { Key: null Result: 6e3e0a41fdab8add833862f1bd2954c3 Body: 11b } 2025-09-25T16:19:55.300326Z node 3 :DATASHARD_RESTORE TRACE: import_s3.cpp:673: [Import] [s3:281474976710763] Content size: processed-bytes# 0, content-length# 11, body-size# 11 2025-09-25T16:19:55.300372Z node 3 :DATASHARD_RESTORE INFO: import_s3.cpp:806: [Import] [s3:281474976710763] Upload rows: count# 1, size# 36 2025-09-25T16:19:55.301232Z node 3 :DATASHARD_RESTORE DEBUG: import_s3.cpp:814: [Import] [s3:281474976710763] Handle NKikimr::TEvDataShard::TEvS3UploadRowsResponse { Record: TabletID: 72075186233409548 Status: 0 Info: { DataETag: 6e3e0a41fdab8add833862f1bd2954c3 ProcessedBytes: 11 WrittenBytes: 10 WrittenRows: 1 ChecksumState: DownloadState: } } 2025-09-25T16:19:55.301248Z node 3 :DATASHARD_RESTORE NOTICE: import_s3.cpp:621: [Import] [s3:281474976710763] Process download info at 'UploadResponse': info# { DataETag: 6e3e0a41fdab8add833862f1bd2954c3 ProcessedBytes: 11 WrittenBytes: 10 WrittenRows: 1 ChecksumState: DownloadState: } 2025-09-25T16:19:55.301256Z node 3 :DATASHARD_RESTORE NOTICE: import_s3.cpp:962: [Import] [s3:281474976710763] Finish: success# 1, error# , writtenBytes# 10, writtenRows# 1 2025-09-25T16:19:55.314427Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5901: Handle TEvSchemaChanged, tabletId: 72057594046678944, at schemeshard: 72057594046678944, message: Source { RawX1: 769 RawX2: 12884904595 } Origin: 72075186233409548 State: 2 TxId: 281474976710763 Step: 0 Generation: 2 OpResult { Success: true Explain: "" BytesProcessed: 10 RowsProcessed: 1 } 2025-09-25T16:19:55.314455Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1837: TOperation FindRelatedPartByTabletId, TxId: 281474976710763, tablet: 72075186233409548, partId: 0 2025-09-25T16:19:55.314487Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:628: TTxOperationReply execute, operationId: 281474976710763:0, at schemeshard: 72057594046678944, message: Source { RawX1: 769 RawX2: 12884904595 } Origin: 72075186233409548 State: 2 TxId: 281474976710763 Step: 0 Generation: 2 OpResult { Success: true Explain: "" BytesProcessed: 10 RowsProcessed: 1 } 2025-09-25T16:19:55.314502Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_backup_restore_common.h:233: TRestore TProposedWaitParts, opId: 281474976710763:0 HandleReply TEvSchemaChanged at tablet# 72057594046678944 message# Source { RawX1: 769 RawX2: 12884904595 } Origin: 72075186233409548 State: 2 TxId: 281474976710763 Step: 0 Generation: 2 OpResult { Success: true Explain: "" BytesProcessed: 10 RowsProcessed: 1 } 2025-09-25T16:19:55.314525Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:673: CollectSchemaChanged accept TEvDataShard::TEvSchemaChanged, operationId: 281474976710763:0, shardIdx: 72057594046678944:3, shard: 72075186233409548, left await: 0, txState.State: ProposedWaitParts, txState.ReadyForNotifications: 1, at schemeshard: 72057594046678944 2025-09-25T16:19:55.314530Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:710: all shard schema changes has been received, operationId: 281474976710763:0, at schemeshard: 72057594046678944 2025-09-25T16:19:55.314536Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:722: send schema changes ack message, operation: 281474976710763:0, datashard: 72075186233409548, at schemeshard: 72057594046678944 2025-09-25T16:19:55.314544Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 281474976710763:0 129 -> 240 2025-09-25T16:19:55.314612Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_backup_restore_common.h:116: Unable to make a bill: kind# TRestore, opId# 281474976710763:0, reason# domain is not a serverless db, domain# /MyRoot, domainPathId# [OwnerId: 72057594046678944, LocalPathId: 1], IsDomainSchemeShard: 1, ParentDomainId: [OwnerId: 72057594046678944, LocalPathId: 1], ResourcesDomainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-09-25T16:19:55.315502Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 281474976710763:0, at schemeshard: 72057594046678944 2025-09-25T16:19:55.315556Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 281474976710763:0, at schemeshard: 72057594046678944 2025-09-25T16:19:55.315565Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:497: [72057594046678944] TDone opId# 281474976710763:0 ProgressState 2025-09-25T16:19:55.315597Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:926: Part operation is done id#281474976710763:0 progress is 1/1 2025-09-25T16:19:55.315602Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 281474976710763 ready parts: 1/1 2025-09-25T16:19:55.315609Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:926: Part operation is done id#281474976710763:0 progress is 1/1 2025-09-25T16:19:55.315612Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 281474976710763 ready parts: 1/1 2025-09-25T16:19:55.315618Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 281474976710763, ready parts: 1/1, is published: true 2025-09-25T16:19:55.315633Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1702: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [3:128:2153] message: TxId: 281474976710763 2025-09-25T16:19:55.315640Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 281474976710763 ready parts: 1/1 2025-09-25T16:19:55.315646Z node 3 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:993: Operation and all the parts is done, operation id: 281474976710763:0 2025-09-25T16:19:55.315652Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5552: RemoveTx for txid 281474976710763:0 2025-09-25T16:19:55.315690Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:610: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 5] was 3 2025-09-25T16:19:55.316322Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7171: Handle: TEvNotifyTxCompletionResult: txId# 281474976710763 2025-09-25T16:19:55.316339Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:7173: Message: TxId: 281474976710763 2025-09-25T16:19:55.316350Z node 3 :IMPORT DEBUG: schemeshard_import__create.cpp:372: TImport::TTxProgress: DoExecute 2025-09-25T16:19:55.316357Z node 3 :IMPORT DEBUG: schemeshard_import__create.cpp:1425: TImport::TTxProgress: OnNotifyResult: txId# 281474976710763 2025-09-25T16:19:55.316961Z node 3 :IMPORT DEBUG: schemeshard_import__create.cpp:396: TImport::TTxProgress: DoComplete 2025-09-25T16:19:55.316987Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 104: got EvNotifyTxCompletionResult 2025-09-25T16:19:55.316994Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 104: satisfy waiter [3:733:2676] TestWaitNotification: OK eventTxId 104 >> TImportTests::CancelledImportEndTime >> TImportTests::ShouldRestoreIndexTablePartitioningSettings [GOOD] >> TRestoreTests::ExportImportWithPermissionsCorruption |82.3%| [TA] $(B)/ydb/core/blobstorage/dsproxy/ut_strategy/test-results/unittest/{meta.json ... results_accumulator.log} >> TImportTests::ShouldRestorePartitioningByLoad [GOOD] >> TImportTests::ShouldRestoreMinMaxPartitionsCount ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_restore/unittest >> TImportTests::ShouldRestoreIndexTablePartitioningSettings [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2144] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2144] Leader for TabletID 72057594046678944 is [1:130:2155] sender: [1:131:2058] recipient: [1:113:2144] 2025-09-25T16:19:53.772696Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7911: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-09-25T16:19:53.772711Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7939: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-09-25T16:19:53.772715Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7825: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-09-25T16:19:53.772718Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7841: OperationsProcessing config: using default configuration 2025-09-25T16:19:53.772722Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7847: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-09-25T16:19:53.772724Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7847: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-09-25T16:19:53.772730Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7971: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-09-25T16:19:53.772739Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-09-25T16:19:53.772816Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8042: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-09-25T16:19:53.772892Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2025-09-25T16:19:53.787440Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7729: Cannot subscribe to console configs 2025-09-25T16:19:53.787458Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded 2025-09-25T16:19:53.791206Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2025-09-25T16:19:53.791283Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2025-09-25T16:19:53.791330Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-09-25T16:19:53.793122Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2025-09-25T16:19:53.793171Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2025-09-25T16:19:53.793245Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1382: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-09-25T16:19:53.793282Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-09-25T16:19:53.793729Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:158: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-09-25T16:19:53.793771Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2025-09-25T16:19:53.794009Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-09-25T16:19:53.794019Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-09-25T16:19:53.794052Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-09-25T16:19:53.794060Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-09-25T16:19:53.794067Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:205: TTxServerlessStorageBilling.Complete 2025-09-25T16:19:53.794097Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7086: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-09-25T16:19:53.795479Z node 1 :HIVE INFO: tablet_helpers.cpp:1126: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:130:2155] sender: [1:245:2058] recipient: [1:15:2062] 2025-09-25T16:19:53.811562Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-09-25T16:19:53.811610Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-09-25T16:19:53.811650Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:599: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-09-25T16:19:53.811656Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5528: CreateTx for txid 1:0 type: TxAlterSubDomain target path: [OwnerId: 72057594046678944, LocalPathId: 1] source path: 2025-09-25T16:19:53.811715Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-09-25T16:19:53.811726Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2025-09-25T16:19:53.812206Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-09-25T16:19:53.812241Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-09-25T16:19:53.812282Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-09-25T16:19:53.812289Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:315: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-09-25T16:19:53.812294Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:369: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-09-25T16:19:53.812299Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 1:0 2 -> 3 2025-09-25T16:19:53.812703Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-09-25T16:19:53.812712Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-09-25T16:19:53.812716Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 1:0 3 -> 128 2025-09-25T16:19:53.812970Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-09-25T16:19:53.812977Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-09-25T16:19:53.812980Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-09-25T16:19:53.812985Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1723: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-09-25T16:19:53.813425Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1792: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-09-25T16:19:53.813765Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:663: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-09-25T16:19:53.813824Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1824: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-09-25T16:19:53.813955Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:685: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-09-25T16:19:53.813977Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:689: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 137 RawX2: 4294969455 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-09-25T16:19:53.813983Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:303: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-09-25T16:19:53.814041Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 1:0 128 -> 240 2025-09-25T16:19:53.814046Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:362: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-09-25T16:19:53.814066Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:599: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-09-25T16:19:53.814074Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:412: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-09-25T16:19:53.814470Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-09-25T16:19:53.814478Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme ... EIndexTypeGlobal, IndexName: ByValue, IndexColumn: value, State: Unlocking, SubState: None, IsBroken: 0, IsCancellationRequested: 0, Issue: , SubscribersCount: 1, CreateSender: [3:361:2339], AlterMainTableTxId: 0, AlterMainTableTxStatus: StatusSuccess, AlterMainTableTxDone: 0, LockTxId: 281474976720760, LockTxStatus: StatusAccepted, LockTxDone: 1, InitiateTxId: 281474976720761, InitiateTxStatus: StatusAccepted, InitiateTxDone: 1, SnapshotStepId: 300, ApplyTxId: 281474976720762, ApplyTxStatus: StatusAccepted, ApplyTxDone: 1, DropColumnsTxId: 0, DropColumnsTxStatus: StatusSuccess, DropColumnsTxDone: 0, UnlockTxId: 281474976720763, UnlockTxStatus: StatusAccepted, UnlockTxDone: 1, ToUploadShards: 0, DoneShards: 0, Processed: UploadRows: 0 UploadBytes: 0 ReadRows: 0 ReadBytes: 0 CpuTimeUs: 0, Billed: UploadRows: 0 UploadBytes: 0 ReadRows: 0 ReadBytes: 0 CpuTimeUs: 0} 2025-09-25T16:19:56.071383Z node 3 :BUILD_INDEX INFO: schemeshard_build_index_tx_base.cpp:24: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: Change state from Unlocking to Done 2025-09-25T16:19:56.071839Z node 3 :BUILD_INDEX NOTICE: schemeshard_build_index__progress.cpp:1478: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Execute: 281474976720759 Done 2025-09-25T16:19:56.071868Z node 3 :BUILD_INDEX DEBUG: schemeshard_build_index__progress.cpp:1479: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Execute: 281474976720759 Done TBuildInfo{ IndexBuildId: 281474976720759, Uid: 103-0-0, DomainPathId: [OwnerId: 72075186233409546, LocalPathId: 1], TablePathId: [OwnerId: 72075186233409546, LocalPathId: 2], IndexType: EIndexTypeGlobal, IndexName: ByValue, IndexColumn: value, State: Done, SubState: None, IsBroken: 0, IsCancellationRequested: 0, Issue: , SubscribersCount: 1, CreateSender: [3:361:2339], AlterMainTableTxId: 0, AlterMainTableTxStatus: StatusSuccess, AlterMainTableTxDone: 0, LockTxId: 281474976720760, LockTxStatus: StatusAccepted, LockTxDone: 1, InitiateTxId: 281474976720761, InitiateTxStatus: StatusAccepted, InitiateTxDone: 1, SnapshotStepId: 300, ApplyTxId: 281474976720762, ApplyTxStatus: StatusAccepted, ApplyTxDone: 1, DropColumnsTxId: 0, DropColumnsTxStatus: StatusSuccess, DropColumnsTxDone: 0, UnlockTxId: 281474976720763, UnlockTxStatus: StatusAccepted, UnlockTxDone: 1, ToUploadShards: 0, DoneShards: 0, Processed: UploadRows: 0 UploadBytes: 0 ReadRows: 0 ReadBytes: 0 CpuTimeUs: 0, Billed: UploadRows: 0 UploadBytes: 0 ReadRows: 0 ReadBytes: 0 CpuTimeUs: 0} 2025-09-25T16:19:56.071877Z node 3 :BUILD_INDEX TRACE: schemeshard_build_index_tx_base.cpp:338: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TIndexBuildInfo SendNotifications: : id# 281474976720759, subscribers count# 1 2025-09-25T16:19:56.071901Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7171: Handle: TEvNotifyTxCompletionResult: txId# 281474976720759 2025-09-25T16:19:56.071909Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:7173: Message: TxId: 281474976720759 2025-09-25T16:19:56.071918Z node 3 :IMPORT DEBUG: schemeshard_import__create.cpp:372: TImport::TTxProgress: DoExecute 2025-09-25T16:19:56.071924Z node 3 :IMPORT DEBUG: schemeshard_import__create.cpp:1425: TImport::TTxProgress: OnNotifyResult: txId# 281474976720759 2025-09-25T16:19:56.072400Z node 3 :IMPORT DEBUG: schemeshard_import__create.cpp:396: TImport::TTxProgress: DoComplete 2025-09-25T16:19:56.072421Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 103: got EvNotifyTxCompletionResult 2025-09-25T16:19:56.072427Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 103: satisfy waiter [3:504:2454] TestWaitNotification: OK eventTxId 103 2025-09-25T16:19:56.073028Z node 3 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/User" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-09-25T16:19:56.073103Z node 3 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/User" took 85us result status StatusSuccess 2025-09-25T16:19:56.073221Z node 3 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/User" PathDescription { Self { Name: "User" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeExtSubDomain CreateFinished: true CreateTxId: 101 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 4 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 4 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 2 SecurityStateVersion: 0 } } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 2 PlanResolution: 50 Coordinators: 72075186233409547 TimeCastBucketsPerMediator: 2 Mediators: 72075186233409548 SchemeShard: 72075186233409546 } DomainKey { SchemeShard: 72057594046678944 PathId: 2 } StoragePools { Name: "name_User_kind_hdd-1" Kind: "common" } StoragePools { Name: "name_User_kind_hdd-2" Kind: "external" } PathsInside: 0 PathsLimit: 10000 ShardsInside: 3 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 2 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { } SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 2 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-09-25T16:19:56.073313Z node 3 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/User/Table/ByValue/indexImplTable" Options { ReturnPartitioningInfo: true ReturnPartitionConfig: true BackupInfo: false ReturnBoundaries: true ShowPrivateTable: true }, at schemeshard: 72075186233409546 2025-09-25T16:19:56.073375Z node 3 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72075186233409546 describe path "/MyRoot/User/Table/ByValue/indexImplTable" took 63us result status StatusSuccess 2025-09-25T16:19:56.073619Z node 3 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/User/Table/ByValue/indexImplTable" PathDescription { Self { Name: "indexImplTable" PathId: 4 SchemeshardId: 72075186233409546 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976720761 CreateStep: 300 ParentPathId: 3 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 4 PathSubType: EPathSubTypeSyncIndexImplTable Version { GeneralVersion: 4 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 2 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "indexImplTable" Columns { Name: "value" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "key" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "value" KeyColumnNames: "key" KeyColumnIds: 1 KeyColumnIds: 2 PartitionConfig { CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot: 300 InMemForceStepsToSnapshot: 500 InMemForceSizeToSnapshot: 16777216 InMemCompactionBrokerQueue: 0 ReadAheadHiThreshold: 67108864 ReadAheadLoThreshold: 16777216 MinDataPageSize: 7168 SnapBrokerQueue: 0 Generation { GenerationId: 0 SizeToCompact: 0 CountToCompact: 8 ForceCountToCompact: 8 ForceSizeToCompact: 134217728 CompactionBrokerQueue: 4294967295 KeepInCache: true BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen1" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 0 UpliftPartSize: 0 } Generation { GenerationId: 1 SizeToCompact: 41943040 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 536870912 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen2" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 8388608 UpliftPartSize: 8388608 } Generation { GenerationId: 2 SizeToCompact: 419430400 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 17179869184 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen3" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 KeepEraseMarkers: false MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } PartitioningPolicy { SizeToSplit: 1073741824 MinPartitionsCount: 2 MaxPartitionsCount: 3 SplitByLoadSettings { Enabled: true } } } TableSchemaVersion: 2 IsBackup: false IsRestore: false } TablePartitions { EndOfRangeKeyPrefix: "" IsPoint: false IsInclusive: false DatashardId: 72075186233409550 } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 2 ProcessingParams { Version: 2 PlanResolution: 50 Coordinators: 72075186233409547 TimeCastBucketsPerMediator: 2 Mediators: 72075186233409548 SchemeShard: 72075186233409546 } DomainKey { SchemeShard: 72057594046678944 PathId: 2 } PathsInside: 3 PathsLimit: 10000 ShardsInside: 5 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 2 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 4 PathOwnerId: 72075186233409546, at schemeshard: 72075186233409546 >> TTopicReaderTests::TestRun_ReadMessages_With_Future_Offset [GOOD] >> TRestoreTests::ExportImportWithPermissionsCorruption [GOOD] >> TRestoreTests::ExportImportWithPermissionsChecksumCorruption >> TImportTests::ShouldRestoreMinMaxPartitionsCount [GOOD] >> TImportTests::ShouldRestoreKeyBloomFilter ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/public/lib/ydb_cli/topic/ut/unittest >> TTopicReaderTests::TestRun_ReadMessages_With_Future_Offset [GOOD] Test command err: === Starting PQ server === Server->StartServer(false); 2025-09-25T16:19:34.985497Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7554062243390114885:2153];send_to=[0:7307199536658146131:7762515]; 2025-09-25T16:19:34.985562Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-09-25T16:19:34.988970Z node 1 :PQ_READ_PROXY DEBUG: caching_service.cpp:44: Direct read cache: : Created 2025-09-25T16:19:34.989073Z node 2 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7554062245986998366:2077];send_to=[0:7307199536658146131:7762515]; 2025-09-25T16:19:34.989656Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/endf/000781/r3tmp/tmpR9A8Gu/pdisk_1.dat 2025-09-25T16:19:34.991054Z node 2 :PQ_READ_PROXY DEBUG: caching_service.cpp:44: Direct read cache: : Created 2025-09-25T16:19:35.027429Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-09-25T16:19:35.031599Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-09-25T16:19:35.047761Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 26601, node 1 2025-09-25T16:19:35.055980Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: /home/runner/.ya/build/build_root/endf/000781/r3tmp/yandexxkbKI6.tmp 2025-09-25T16:19:35.055989Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: /home/runner/.ya/build/build_root/endf/000781/r3tmp/yandexxkbKI6.tmp 2025-09-25T16:19:35.059232Z INFO: TTestServer started on Port 11520 GrpcPort 26601 TClient is connected to server localhost:11520 PQClient connected to localhost:26601 === TenantModeEnabled() = 0 === Init PQ - start server on port 26601 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-09-25T16:19:35.085563Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:202: successfully initialized from file: /home/runner/.ya/build/build_root/endf/000781/r3tmp/yandexxkbKI6.tmp 2025-09-25T16:19:35.085679Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-09-25T16:19:35.086754Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-09-25T16:19:35.086786Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-09-25T16:19:35.088284Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-09-25T16:19:35.093125Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-09-25T16:19:35.093154Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-09-25T16:19:35.094568Z node 1 :HIVE WARN: hive_impl.cpp:811: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2025-09-25T16:19:35.094862Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-09-25T16:19:35.100964Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "Root" StoragePools { Name: "/Root:test" Kind: "test" } } } TxId: 281474976715657 TabletId: 72057594046644480 PeerName: "" , at schemeshard: 72057594046644480 2025-09-25T16:19:35.101036Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //Root, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-09-25T16:19:35.101089Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:599: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046644480, LocalPathId: 1] was 0 2025-09-25T16:19:35.101099Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5528: CreateTx for txid 281474976715657:0 type: TxAlterSubDomain target path: [OwnerId: 72057594046644480, LocalPathId: 1] source path: 2025-09-25T16:19:35.101145Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 281474976715657:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2025-09-25T16:19:35.101162Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2025-09-25T16:19:35.101987Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 281474976715657, response: Status: StatusAccepted TxId: 281474976715657 SchemeshardId: 72057594046644480 PathId: 1, at schemeshard: 72057594046644480 2025-09-25T16:19:35.102045Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 281474976715657, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //Root 2025-09-25T16:19:35.102099Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 281474976715657:0, at schemeshard: 72057594046644480 2025-09-25T16:19:35.102112Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:315: TCreateParts opId# 281474976715657:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046644480 2025-09-25T16:19:35.102115Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:369: TCreateParts opId# 281474976715657:0 ProgressState no shards to create, do next state 2025-09-25T16:19:35.102124Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 281474976715657:0 2 -> 3 2025-09-25T16:19:35.102689Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 281474976715657:0, at schemeshard: 72057594046644480 2025-09-25T16:19:35.102703Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 281474976715657:0 ProgressState, at schemeshard: 72057594046644480 2025-09-25T16:19:35.102707Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 281474976715657:0 3 -> 128 2025-09-25T16:19:35.103179Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 281474976715657:0, at schemeshard: 72057594046644480 2025-09-25T16:19:35.103201Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 281474976715657:0, at schemeshard: 72057594046644480 2025-09-25T16:19:35.103205Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 281474976715657:0, at tablet# 72057594046644480 2025-09-25T16:19:35.103226Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1723: TOperation IsReadyToPropose , TxId: 281474976715657 ready parts: 1/1 2025-09-25T16:19:35.104373Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1792: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046644480 Flags: 2 } ExecLevel: 0 TxId: 281474976715657 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-09-25T16:19:35.104896Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:663: Send tablet strongly msg operationId: 281474976715657:4294967295 from tablet: 72057594046644480 to tablet: 72057594046316545 cookie: 0:281474976715657 msg type: 269090816 2025-09-25T16:19:35.104932Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1824: TOperation RegisterRelationByTabletId, TxId: 281474976715657, partId: 4294967295, tablet: 72057594046316545 waiting... 2025-09-25T16:19:35.105269Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__notify.cpp:30: NotifyTxCompletion operation in-flight, txId: 281474976715657, at schemeshard: 72057594046644480 2025-09-25T16:19:35.105280Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 281474976715657, ready parts: 0/1, is published: true 2025-09-25T16:19:35.105284Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__notify.cpp:131: NotifyTxCompletion transaction is registered, txId: 281474976715657, at schemeshard: 72057594046644480 2025-09-25T16:19:35.105599Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:685: TTxOperationPlanStep Execute, stepId: 1758817175150, transactions count in step: 1, at schemeshard: 72057594046644480 2025-09-25T16:19:35.105633Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:689: TTxOperationPlanStep Execute, message: Transactions { TxId: 281474976715657 AckTo { RawX1: 0 RawX2: 0 } } Step: 1758817175150 MediatorID: 72057594046382081 TabletID: 72057594046644480, at schemeshard: 72057594046644480 2025-09-25T16:19:35.105642Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:303: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 281474976715657:0, at tablet# 72057594046644480 2025-09-25T16:19:35.105722Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 281474976715657:0 128 -> 240 2025-09-25T16:19:35.105733Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:362: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 281474976715657:0, at tablet# 72057594046644480 2025-09-25T16:19:35.105769Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:599: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046644480, LocalPathId: 1] wa ... w grpc connection 2025-09-25T16:19:55.266368Z node 5 :PQ_READ_PROXY DEBUG: grpc_pq_read.h:133: new session created cookie 1 2025-09-25T16:19:55.266513Z node 5 :PQ_READ_PROXY DEBUG: read_session_actor.cpp:122: session cookie 1 consumer session grpc read done: success# 1, data# { init_request { topics_read_settings { path: "rt3.dc1--topic1" } consumer: "cli" } } 2025-09-25T16:19:55.266564Z node 5 :PQ_READ_PROXY INFO: read_session_actor.cpp:929: session cookie 1 consumer shared/cli session shared/cli_5_1_6347937225044306541_v1 read init: from# ipv6:[::1]:42306, request# { init_request { topics_read_settings { path: "rt3.dc1--topic1" } consumer: "cli" } } 2025-09-25T16:19:55.266654Z node 5 :PQ_READ_PROXY DEBUG: read_init_auth_actor.cpp:41: session cookie 1 consumer shared/cli session shared/cli_5_1_6347937225044306541_v1 auth for : cli 2025-09-25T16:19:55.266801Z node 5 :PQ_READ_PROXY DEBUG: read_init_auth_actor.cpp:131: session cookie 1 consumer shared/cli session shared/cli_5_1_6347937225044306541_v1 Handle describe topics response 2025-09-25T16:19:55.266821Z node 5 :PQ_READ_PROXY DEBUG: read_init_auth_actor.cpp:68: session cookie 1 consumer shared/cli session shared/cli_5_1_6347937225044306541_v1 auth is DEAD 2025-09-25T16:19:55.266836Z node 5 :PQ_READ_PROXY INFO: read_session_actor.cpp:1046: session cookie 1 consumer shared/cli session shared/cli_5_1_6347937225044306541_v1 auth ok: topics# 1, initDone# 0 2025-09-25T16:19:55.267101Z node 5 :PQ_READ_PROXY INFO: read_session_actor.cpp:1217: session cookie 1 consumer shared/cli session shared/cli_5_1_6347937225044306541_v1 register session: topic# rt3.dc1--topic1 2025-09-25T16:19:55.267175Z :INFO: [] [] [5815ffcf-7e6f53d1-652bc441-b08e556d] [] Got InitResponse. ReadSessionId: shared/cli_5_1_6347937225044306541_v1 2025-09-25T16:19:55.267185Z :DEBUG: [] [] [5815ffcf-7e6f53d1-652bc441-b08e556d] [] In ContinueReadingDataImpl, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-09-25T16:19:55.267278Z :DEBUG: [] [] [5815ffcf-7e6f53d1-652bc441-b08e556d] [] After sending read request: ReadSizeBudget = 0, ReadSizeServerDelta = 52428800 2025-09-25T16:19:55.267334Z node 5 :PERSQUEUE_READ_BALANCER INFO: read_balancer__balancing.cpp:1653: [72075186224037898][rt3.dc1--topic1] pipe [5:7554062336163547387:2559] connected; active server actors: 1 2025-09-25T16:19:55.267348Z node 5 :PERSQUEUE_READ_BALANCER NOTICE: read_balancer__balancing.cpp:1700: [72075186224037898][rt3.dc1--topic1] consumer "cli" register session for pipe [5:7554062336163547387:2559] session shared/cli_5_1_6347937225044306541_v1 2025-09-25T16:19:55.267357Z node 5 :PERSQUEUE_READ_BALANCER DEBUG: read_balancer__balancing.cpp:637: [72075186224037898][rt3.dc1--topic1] consumer cli register readable partition 0 2025-09-25T16:19:55.267367Z node 5 :PERSQUEUE_READ_BALANCER DEBUG: read_balancer__balancing.cpp:667: [72075186224037898][rt3.dc1--topic1] consumer cli family created family=1 (Status=Free, Partitions=[0]) 2025-09-25T16:19:55.267378Z node 5 :PERSQUEUE_READ_BALANCER INFO: read_balancer__balancing.cpp:869: [72075186224037898][rt3.dc1--topic1] consumer cli register reading session ReadingSession "shared/cli_5_1_6347937225044306541_v1" (Sender=[5:7554062336163547384:2559], Pipe=[5:7554062336163547387:2559], Partitions=[], ActiveFamilyCount=0) 2025-09-25T16:19:55.267384Z node 5 :PERSQUEUE_READ_BALANCER DEBUG: read_balancer__balancing.cpp:1184: [72075186224037898][rt3.dc1--topic1] consumer cli rebalancing was scheduled 2025-09-25T16:19:55.267395Z node 5 :PERSQUEUE_READ_BALANCER DEBUG: read_balancer__balancing.cpp:1256: [72075186224037898][rt3.dc1--topic1] consumer cli balancing. Sessions=1, Families=1, UnradableFamilies=1 [1 (0), ], RequireBalancing=0 [] 2025-09-25T16:19:55.267409Z node 5 :PERSQUEUE_READ_BALANCER DEBUG: read_balancer__balancing.cpp:1303: [72075186224037898][rt3.dc1--topic1] consumer cli balancing family=1 (Status=Free, Partitions=[0]) for ReadingSession "shared/cli_5_1_6347937225044306541_v1" (Sender=[5:7554062336163547384:2559], Pipe=[5:7554062336163547387:2559], Partitions=[], ActiveFamilyCount=0) 2025-09-25T16:19:55.267424Z node 5 :PERSQUEUE_READ_BALANCER INFO: read_balancer__balancing.cpp:547: [72075186224037898][rt3.dc1--topic1] consumer cli family 1 status Active partitions [0] session "shared/cli_5_1_6347937225044306541_v1" sender [5:7554062336163547384:2559] lock partition 0 for ReadingSession "shared/cli_5_1_6347937225044306541_v1" (Sender=[5:7554062336163547384:2559], Pipe=[5:7554062336163547387:2559], Partitions=[], ActiveFamilyCount=1) generation 1 step 1 2025-09-25T16:19:55.267431Z node 5 :PERSQUEUE_READ_BALANCER DEBUG: read_balancer__balancing.cpp:1323: [72075186224037898][rt3.dc1--topic1] consumer cli start rebalancing. familyCount=1, sessionCount=1, desiredFamilyCount=1, allowPlusOne=0 2025-09-25T16:19:55.267440Z node 5 :PERSQUEUE_READ_BALANCER DEBUG: read_balancer__balancing.cpp:1400: [72075186224037898][rt3.dc1--topic1] consumer cli balancing duration: 0.000038s 2025-09-25T16:19:55.267460Z node 5 :PQ_READ_PROXY DEBUG: read_session_actor.cpp:122: session cookie 1 consumer shared/cli session shared/cli_5_1_6347937225044306541_v1 grpc read done: success# 1, data# { read_request { bytes_size: 52428800 } } 2025-09-25T16:19:55.267681Z node 5 :PQ_READ_PROXY INFO: read_session_actor.cpp:1335: session cookie 1 consumer shared/cli session shared/cli_5_1_6347937225044306541_v1 assign: record# { Partition: 0 TabletId: 72075186224037897 Topic: "rt3.dc1--topic1" Generation: 1 Step: 1 Session: "shared/cli_5_1_6347937225044306541_v1" ClientId: "cli" PipeClient { RawX1: 7554062336163547387 RawX2: 4503621102209535 } Path: "/Root/PQ/rt3.dc1--topic1" } 2025-09-25T16:19:55.267734Z node 5 :PQ_READ_PROXY DEBUG: read_session_actor.cpp:1839: session cookie 1 consumer shared/cli session shared/cli_5_1_6347937225044306541_v1 got read request: guid# 73620d5-9e7a7cc4-b122be0c-55ced0dd 2025-09-25T16:19:55.267763Z node 5 :PQ_READ_PROXY INFO: partition_actor.cpp:1143: session cookie 1 consumer shared/cli session shared/cli_5_1_6347937225044306541_v1 INITING TopicId: Topic rt3.dc1--topic1 in dc dc1 in database: Root, partition 0(assignId:1) 2025-09-25T16:19:55.268001Z node 5 :PQ_READ_PROXY INFO: partition_actor.cpp:983: session cookie 1 consumer shared/cli session shared/cli_5_1_6347937225044306541_v1 TopicId: Topic rt3.dc1--topic1 in dc dc1 in database: Root, partition 0(assignId:1) pipe restart attempt 0 pipe creation result: OK TabletId: 72075186224037897 Generation: 1, pipe: [5:7554062336163547390:2562] 2025-09-25T16:19:55.268078Z node 6 :PQ_READ_PROXY DEBUG: caching_service.cpp:283: Direct read cache: registered server session: shared/cli_5_1_6347937225044306541_v1:1 with generation 1 2025-09-25T16:19:55.269462Z node 5 :PQ_READ_PROXY DEBUG: partition_actor.cpp:663: session cookie 1 consumer shared/cli session shared/cli_5_1_6347937225044306541_v1 TopicId: Topic rt3.dc1--topic1 in dc dc1 in database: Root, partition 0(assignId:1) initDone 0 event { CmdGetClientOffsetResult { Offset: 0 EndOffset: 6 SizeLag: 280 WriteTimestampEstimateMS: 1758817195265 ClientHasAnyCommits: false } Cookie: 18446744073709551615 } 2025-09-25T16:19:55.269476Z node 5 :PQ_READ_PROXY INFO: partition_actor.cpp:694: session cookie 1 consumer shared/cli session shared/cli_5_1_6347937225044306541_v1 INIT DONE TopicId: Topic rt3.dc1--topic1 in dc dc1 in database: Root, partition 0(assignId:1) EndOffset 6 readOffset 0 committedOffset 0 2025-09-25T16:19:55.269496Z node 5 :PQ_READ_PROXY DEBUG: read_session_actor.cpp:1434: session cookie 1 consumer shared/cli session shared/cli_5_1_6347937225044306541_v1 sending to client partition status 2025-09-25T16:19:55.269811Z :INFO: [] [] [5815ffcf-7e6f53d1-652bc441-b08e556d] [] Confirm partition stream create. Partition stream id: 1. Cluster: "-". Topic: "/topic1". Partition: 0. Read offset: 10 2025-09-25T16:19:55.270047Z node 5 :PQ_READ_PROXY DEBUG: read_session_actor.cpp:122: session cookie 1 consumer shared/cli session shared/cli_5_1_6347937225044306541_v1 grpc read done: success# 1, data# { start_partition_session_response { partition_session_id: 1 read_offset: 10 } } 2025-09-25T16:19:55.270099Z node 5 :PQ_READ_PROXY INFO: read_session_actor.cpp:539: session cookie 1 consumer shared/cli session shared/cli_5_1_6347937225044306541_v1 got StartRead from client: partition# TopicId: Topic rt3.dc1--topic1 in dc dc1 in database: Root, partition 0(assignId:1), readOffset# 10, commitOffset# (empty maybe) 2025-09-25T16:19:55.270110Z node 5 :PQ_READ_PROXY INFO: partition_actor.cpp:1023: session cookie 1 consumer shared/cli session shared/cli_5_1_6347937225044306541_v1 Start reading TopicId: Topic rt3.dc1--topic1 in dc dc1 in database: Root, partition 0(assignId:1) EndOffset 6 readOffset 0 committedOffset 0 clientCommitOffset (empty maybe) clientReadOffset 10 2025-09-25T16:19:55.270114Z node 5 :PQ_READ_PROXY DEBUG: partition_actor.cpp:1277: session cookie 1 consumer shared/cli session shared/cli_5_1_6347937225044306541_v1 TopicId: Topic rt3.dc1--topic1 in dc dc1 in database: Root, partition 0(assignId:1) wait data in partition inited, cookie 1 from offset 10 2025-09-25T16:19:56.266027Z :INFO: [] Read/commit by partition streams (cluster:topic:partition:stream-id:read-offset:committed-offset): -:/topic1:0:1:0:0 2025-09-25T16:19:56.266058Z :INFO: [] [] [5815ffcf-7e6f53d1-652bc441-b08e556d] Counters: { Errors: 0 CurrentSessionLifetimeMs: 1000 BytesRead: 0 MessagesRead: 0 BytesReadCompressed: 0 BytesInflightUncompressed: 0 BytesInflightCompressed: 0 BytesInflightTotal: 0 MessagesInflight: 0 } 2025-09-25T16:19:56.266252Z :INFO: [] [] [5815ffcf-7e6f53d1-652bc441-b08e556d] Closing read session. Close timeout: 0.000000s 2025-09-25T16:19:56.266272Z :INFO: [] Read/commit by partition streams (cluster:topic:partition:stream-id:read-offset:committed-offset): -:/topic1:0:1:0:0 2025-09-25T16:19:56.266282Z :INFO: [] [] [5815ffcf-7e6f53d1-652bc441-b08e556d] Counters: { Errors: 0 CurrentSessionLifetimeMs: 1000 BytesRead: 0 MessagesRead: 0 BytesReadCompressed: 0 BytesInflightUncompressed: 0 BytesInflightCompressed: 0 BytesInflightTotal: 0 MessagesInflight: 0 } 2025-09-25T16:19:56.266307Z :NOTICE: [] [] [5815ffcf-7e6f53d1-652bc441-b08e556d] Aborting read session. Description: SessionClosed { Status: ABORTED Issues: "
: Error: Close with zero timeout " } 2025-09-25T16:19:56.266319Z :DEBUG: [] [] [5815ffcf-7e6f53d1-652bc441-b08e556d] [] Abort session to cluster 2025-09-25T16:19:56.266539Z :NOTICE: [] [] [5815ffcf-7e6f53d1-652bc441-b08e556d] Aborting read session. Description: SessionClosed { Status: ABORTED Issues: "
: Error: Aborted " } 2025-09-25T16:19:56.267469Z node 6 :PQ_READ_PROXY DEBUG: caching_service.cpp:139: Direct read cache: server session deregistered: shared/cli_5_1_6347937225044306541_v1 2025-09-25T16:19:56.267110Z node 5 :PQ_READ_PROXY DEBUG: read_session_actor.cpp:122: session cookie 1 consumer shared/cli session shared/cli_5_1_6347937225044306541_v1 grpc read done: success# 0, data# { } 2025-09-25T16:19:56.267125Z node 5 :PQ_READ_PROXY INFO: read_session_actor.cpp:125: session cookie 1 consumer shared/cli session shared/cli_5_1_6347937225044306541_v1 grpc read failed 2025-09-25T16:19:56.267133Z node 5 :PQ_READ_PROXY INFO: read_session_actor.cpp:92: session cookie 1 consumer shared/cli session shared/cli_5_1_6347937225044306541_v1 grpc closed 2025-09-25T16:19:56.267158Z node 5 :PQ_READ_PROXY INFO: read_session_actor.cpp:383: session cookie 1 consumer shared/cli session shared/cli_5_1_6347937225044306541_v1 is DEAD 2025-09-25T16:19:56.267504Z node 5 :PERSQUEUE_READ_BALANCER INFO: read_balancer__balancing.cpp:1665: [72075186224037898][rt3.dc1--topic1] pipe [5:7554062336163547387:2559] disconnected; active server actors: 1 2025-09-25T16:19:56.267508Z node 5 :PERSQUEUE_READ_BALANCER NOTICE: read_balancer__balancing.cpp:1674: [72075186224037898][rt3.dc1--topic1] pipe [5:7554062336163547387:2559] client cli disconnected session shared/cli_5_1_6347937225044306541_v1 |82.3%| [TA] {RESULT} $(B)/ydb/core/blobstorage/dsproxy/ut_strategy/test-results/unittest/{meta.json ... results_accumulator.log} >> TImportTests::ShouldFailOnEmptyToken >> RemoteTopicReader::PassAwayOnCreatingReadSession >> TTopicReaderTests::TestRun_ReadMoreMessagesThanLimit_Without_Wait_NoDelimiter [GOOD] >> TImportTests::ShouldRestoreKeyBloomFilter [GOOD] >> TRestoreTests::ExportImportWithPermissionsChecksumCorruption [GOOD] >> TRestoreTests::ExportImportWithSchemeChecksumAbsence >> TRestoreTests::ShouldSucceedOnMultiShardTable[Raw] >> TImportTests::ShouldFailOnEmptyToken [GOOD] >> TImportTests::ShouldFailOnAbsentData >> TRestoreWithRebootsTests::ShouldFailOnFileWithoutNewLines[Raw] [GOOD] >> TRestoreWithRebootsTests::ShouldFailOnFileWithoutNewLines[Zstd] >> TImportWithRebootsTests::ShouldSucceedOnViewsAndTables >> RemoteTopicReader::PassAwayOnCreatingReadSession [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/public/lib/ydb_cli/topic/ut/unittest >> TTopicReaderTests::TestRun_ReadMoreMessagesThanLimit_Without_Wait_NoDelimiter [GOOD] Test command err: === Starting PQ server === Server->StartServer(false); 2025-09-25T16:19:35.378548Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7554062246278279823:2160];send_to=[0:7307199536658146131:7762515]; 2025-09-25T16:19:35.378569Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-09-25T16:19:35.381770Z node 1 :PQ_READ_PROXY DEBUG: caching_service.cpp:44: Direct read cache: : Created 2025-09-25T16:19:35.382275Z node 2 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7554062248599163622:2153];send_to=[0:7307199536658146131:7762515]; 2025-09-25T16:19:35.382319Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/endf/0004bd/r3tmp/tmpGiYQMB/pdisk_1.dat 2025-09-25T16:19:35.384366Z node 2 :PQ_READ_PROXY DEBUG: caching_service.cpp:44: Direct read cache: : Created 2025-09-25T16:19:35.408856Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-09-25T16:19:35.410261Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-09-25T16:19:35.424855Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 30227, node 1 2025-09-25T16:19:35.435866Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: /home/runner/.ya/build/build_root/endf/0004bd/r3tmp/yandexugjyoM.tmp 2025-09-25T16:19:35.435880Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: /home/runner/.ya/build/build_root/endf/0004bd/r3tmp/yandexugjyoM.tmp 2025-09-25T16:19:35.440092Z INFO: TTestServer started on Port 3751 GrpcPort 30227 2025-09-25T16:19:35.450107Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:202: successfully initialized from file: /home/runner/.ya/build/build_root/endf/0004bd/r3tmp/yandexugjyoM.tmp 2025-09-25T16:19:35.450258Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:3751 PQClient connected to localhost:30227 === TenantModeEnabled() = 0 === Init PQ - start server on port 30227 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-09-25T16:19:35.479901Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-09-25T16:19:35.479933Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-09-25T16:19:35.481540Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-09-25T16:19:35.485602Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-09-25T16:19:35.485651Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-09-25T16:19:35.486699Z node 1 :HIVE WARN: hive_impl.cpp:811: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2025-09-25T16:19:35.486929Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-09-25T16:19:35.495620Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "Root" StoragePools { Name: "/Root:test" Kind: "test" } } } TxId: 281474976720657 TabletId: 72057594046644480 PeerName: "" , at schemeshard: 72057594046644480 2025-09-25T16:19:35.495687Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //Root, opId: 281474976720657:0, at schemeshard: 72057594046644480 2025-09-25T16:19:35.495744Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:599: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046644480, LocalPathId: 1] was 0 2025-09-25T16:19:35.495752Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5528: CreateTx for txid 281474976720657:0 type: TxAlterSubDomain target path: [OwnerId: 72057594046644480, LocalPathId: 1] source path: 2025-09-25T16:19:35.495813Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 281474976720657:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2025-09-25T16:19:35.495838Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976720657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2025-09-25T16:19:35.496631Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 281474976720657, response: Status: StatusAccepted TxId: 281474976720657 SchemeshardId: 72057594046644480 PathId: 1, at schemeshard: 72057594046644480 2025-09-25T16:19:35.496678Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 281474976720657, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //Root 2025-09-25T16:19:35.496714Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 281474976720657:0, at schemeshard: 72057594046644480 2025-09-25T16:19:35.496723Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:315: TCreateParts opId# 281474976720657:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046644480 2025-09-25T16:19:35.496725Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:369: TCreateParts opId# 281474976720657:0 ProgressState no shards to create, do next state 2025-09-25T16:19:35.496727Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 281474976720657:0 2 -> 3 waiting... 2025-09-25T16:19:35.497118Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 281474976720657:0, at schemeshard: 72057594046644480 2025-09-25T16:19:35.497125Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 281474976720657:0 ProgressState, at schemeshard: 72057594046644480 2025-09-25T16:19:35.497128Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 281474976720657:0 3 -> 128 2025-09-25T16:19:35.497302Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__notify.cpp:30: NotifyTxCompletion operation in-flight, txId: 281474976720657, at schemeshard: 72057594046644480 2025-09-25T16:19:35.497308Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 281474976720657, ready parts: 0/1, is published: true 2025-09-25T16:19:35.497311Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__notify.cpp:131: NotifyTxCompletion transaction is registered, txId: 281474976720657, at schemeshard: 72057594046644480 2025-09-25T16:19:35.497465Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 281474976720657:0, at schemeshard: 72057594046644480 2025-09-25T16:19:35.497471Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 281474976720657:0, at schemeshard: 72057594046644480 2025-09-25T16:19:35.497473Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 281474976720657:0, at tablet# 72057594046644480 2025-09-25T16:19:35.497484Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1723: TOperation IsReadyToPropose , TxId: 281474976720657 ready parts: 1/1 2025-09-25T16:19:35.498109Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1792: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046644480 Flags: 2 } ExecLevel: 0 TxId: 281474976720657 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-09-25T16:19:35.498434Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:663: Send tablet strongly msg operationId: 281474976720657:4294967295 from tablet: 72057594046644480 to tablet: 72057594046316545 cookie: 0:281474976720657 msg type: 269090816 2025-09-25T16:19:35.498462Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1824: TOperation RegisterRelationByTabletId, TxId: 281474976720657, partId: 4294967295, tablet: 72057594046316545 2025-09-25T16:19:35.498972Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:685: TTxOperationPlanStep Execute, stepId: 1758817175542, transactions count in step: 1, at schemeshard: 72057594046644480 2025-09-25T16:19:35.499001Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:689: TTxOperationPlanStep Execute, message: Transactions { TxId: 281474976720657 AckTo { RawX1: 0 RawX2: 0 } } Step: 1758817175542 MediatorID: 72057594046382081 TabletID: 72057594046644480, at schemeshard: 72057594046644480 2025-09-25T16:19:35.499009Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:303: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 281474976720657:0, at tablet# 72057594046644480 2025-09-25T16:19:35.499063Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 281474976720657:0 128 -> 240 2025-09-25T16:19:35.499072Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:362: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 281474976720657:0, at tablet# 72057594046644480 2025-09-25T16:19:35.499102Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:599: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046644480, LocalPathId: 1] was ... partition 0(assignId:1) pipe restart attempt 0 pipe creation result: OK TabletId: 72075186224037897 Generation: 1, pipe: [5:7554062332474549206:2576] 2025-09-25T16:19:55.719236Z node 6 :PQ_READ_PROXY DEBUG: caching_service.cpp:283: Direct read cache: registered server session: shared/cli_5_1_3656321100252111477_v1:1 with generation 1 2025-09-25T16:19:55.720916Z node 5 :PQ_READ_PROXY DEBUG: partition_actor.cpp:663: session cookie 1 consumer shared/cli session shared/cli_5_1_3656321100252111477_v1 TopicId: Topic rt3.dc1--topic1 in dc dc1 in database: Root, partition 0(assignId:1) initDone 0 event { CmdGetClientOffsetResult { Offset: 0 EndOffset: 4 SizeLag: 280 WriteTimestampEstimateMS: 1758817195716 ClientHasAnyCommits: false } Cookie: 18446744073709551615 } 2025-09-25T16:19:55.720934Z node 5 :PQ_READ_PROXY INFO: partition_actor.cpp:694: session cookie 1 consumer shared/cli session shared/cli_5_1_3656321100252111477_v1 INIT DONE TopicId: Topic rt3.dc1--topic1 in dc dc1 in database: Root, partition 0(assignId:1) EndOffset 4 readOffset 0 committedOffset 0 2025-09-25T16:19:55.720960Z node 5 :PQ_READ_PROXY DEBUG: read_session_actor.cpp:1434: session cookie 1 consumer shared/cli session shared/cli_5_1_3656321100252111477_v1 sending to client partition status 2025-09-25T16:19:55.721236Z :INFO: [] [] [31e407e4-ac7f1e2c-c3a577b3-b9bf6fc6] [] Confirm partition stream create. Partition stream id: 1. Cluster: "-". Topic: "/topic1". Partition: 0. Read offset: (NULL) 2025-09-25T16:19:55.721381Z node 5 :PQ_READ_PROXY DEBUG: read_session_actor.cpp:122: session cookie 1 consumer shared/cli session shared/cli_5_1_3656321100252111477_v1 grpc read done: success# 1, data# { start_partition_session_response { partition_session_id: 1 } } 2025-09-25T16:19:55.721420Z node 5 :PQ_READ_PROXY INFO: read_session_actor.cpp:539: session cookie 1 consumer shared/cli session shared/cli_5_1_3656321100252111477_v1 got StartRead from client: partition# TopicId: Topic rt3.dc1--topic1 in dc dc1 in database: Root, partition 0(assignId:1), readOffset# 0, commitOffset# (empty maybe) 2025-09-25T16:19:55.721434Z node 5 :PQ_READ_PROXY INFO: partition_actor.cpp:1023: session cookie 1 consumer shared/cli session shared/cli_5_1_3656321100252111477_v1 Start reading TopicId: Topic rt3.dc1--topic1 in dc dc1 in database: Root, partition 0(assignId:1) EndOffset 4 readOffset 0 committedOffset 0 clientCommitOffset (empty maybe) clientReadOffset 0 2025-09-25T16:19:55.721439Z node 5 :PQ_READ_PROXY DEBUG: partition_actor.cpp:969: session cookie 1 consumer shared/cli session shared/cli_5_1_3656321100252111477_v1 TopicId: Topic rt3.dc1--topic1 in dc dc1 in database: Root, partition 0(assignId:1) ready for read with readOffset 0 endOffset 4 2025-09-25T16:19:55.721462Z node 5 :PQ_READ_PROXY DEBUG: read_session_actor.cpp:2332: session cookie 1 consumer shared/cli session shared/cli_5_1_3656321100252111477_v1 partition ready for read: partition# TopicId: Topic rt3.dc1--topic1 in dc dc1 in database: Root, partition 0(assignId:1), readOffset# 0, endOffset# 4, WTime# 0, sizeLag# 280 2025-09-25T16:19:55.721470Z node 5 :PQ_READ_PROXY DEBUG: read_session_actor.cpp:2343: session cookie 1 consumer shared/cli session shared/cli_5_1_3656321100252111477_v1TEvPartitionReady. Aval parts: 1 2025-09-25T16:19:55.721483Z node 5 :PQ_READ_PROXY DEBUG: read_session_actor.cpp:2266: session cookie 1 consumer shared/cli session shared/cli_5_1_3656321100252111477_v1 performing read request: guid# 708b5c7a-34da29f0-235813a-e33bc499, from# TopicId: Topic rt3.dc1--topic1 in dc dc1 in database: Root, partition 0(assignId:1), count# 4, size# 336, partitionsAsked# 1, maxTimeLag# 0ms 2025-09-25T16:19:55.721512Z node 5 :PQ_READ_PROXY DEBUG: partition_actor.cpp:1395: session cookie 1 consumer shared/cli session shared/cli_5_1_3656321100252111477_v1 READ FROM TopicId: Topic rt3.dc1--topic1 in dc dc1 in database: Root, partition 0(assignId:1)maxCount 4 maxSize 336 maxTimeLagMs 0 readTimestampMs 0 readOffset 0 EndOffset 4 ClientCommitOffset 0 committedOffset 0 Guid 708b5c7a-34da29f0-235813a-e33bc499 2025-09-25T16:19:55.722024Z node 5 :PQ_READ_PROXY DEBUG: partition_actor.cpp:663: session cookie 1 consumer shared/cli session shared/cli_5_1_3656321100252111477_v1 TopicId: Topic rt3.dc1--topic1 in dc dc1 in database: Root, partition 0(assignId:1) initDone 1 event { CmdReadResult { MaxOffset: 4 Result { Offset: 0 Data: "... 79 bytes ..." SourceId: "\000source1" SeqNo: 1 WriteTimestampMS: 1758817195616 CreateTimestampMS: 1758817195615 UncompressedSize: 8 PartitionKey: "" ExplicitHash: "" } Result { Offset: 1 Data: "... 79 bytes ..." SourceId: "\000source1" SeqNo: 2 WriteTimestampMS: 1758817195621 CreateTimestampMS: 1758817195616 UncompressedSize: 8 PartitionKey: "" ExplicitHash: "" } Result { Offset: 2 Data: "... 79 bytes ..." SourceId: "\000source1" SeqNo: 3 WriteTimestampMS: 1758817195621 CreateTimestampMS: 1758817195616 UncompressedSize: 8 PartitionKey: "" ExplicitHash: "" } Result { Offset: 3 Data: "... 79 bytes ..." SourceId: "\000source1" SeqNo: 4 WriteTimestampMS: 1758817195621 CreateTimestampMS: 1758817195616 UncompressedSize: 8 PartitionKey: "" ExplicitHash: "" } BlobsFromDisk: 0 BlobsFromCache: 2 SizeLag: 18446744073709551408 RealReadOffset: 3 WaitQuotaTimeMs: 0 EndOffset: 4 StartOffset: 0 } Cookie: 0 } 2025-09-25T16:19:55.722071Z node 5 :PQ_READ_PROXY DEBUG: partition_actor.cpp:1277: session cookie 1 consumer shared/cli session shared/cli_5_1_3656321100252111477_v1 TopicId: Topic rt3.dc1--topic1 in dc dc1 in database: Root, partition 0(assignId:1) wait data in partition inited, cookie 1 from offset 4 2025-09-25T16:19:55.722083Z node 5 :PQ_READ_PROXY DEBUG: partition_actor.cpp:901: session cookie 1 consumer shared/cli session shared/cli_5_1_3656321100252111477_v1 after read state TopicId: Topic rt3.dc1--topic1 in dc dc1 in database: Root, partition 0(assignId:1) EndOffset 4 ReadOffset 4 ReadGuid 708b5c7a-34da29f0-235813a-e33bc499 has messages 1 2025-09-25T16:19:55.722123Z node 5 :PQ_READ_PROXY DEBUG: read_session_actor.cpp:1940: session cookie 1 consumer shared/cli session shared/cli_5_1_3656321100252111477_v1 read done: guid# 708b5c7a-34da29f0-235813a-e33bc499, partition# TopicId: Topic rt3.dc1--topic1 in dc dc1 in database: Root, partition 0(assignId:1), size# 412 2025-09-25T16:19:55.722139Z node 5 :PQ_READ_PROXY DEBUG: read_session_actor.cpp:2102: session cookie 1 consumer shared/cli session shared/cli_5_1_3656321100252111477_v1 response to read: guid# 708b5c7a-34da29f0-235813a-e33bc499 2025-09-25T16:19:55.722227Z node 5 :PQ_READ_PROXY DEBUG: read_session_actor.cpp:2145: session cookie 1 consumer shared/cli session shared/cli_5_1_3656321100252111477_v1 Process answer. Aval parts: 0 2025-09-25T16:19:55.722337Z :DEBUG: [] [] [31e407e4-ac7f1e2c-c3a577b3-b9bf6fc6] [] Got ReadResponse, serverBytesSize = 412, now ReadSizeBudget = 0, ReadSizeServerDelta = 52428388 2025-09-25T16:19:55.722369Z :DEBUG: [] [] [31e407e4-ac7f1e2c-c3a577b3-b9bf6fc6] [] In ContinueReadingDataImpl, ReadSizeBudget = 0, ReadSizeServerDelta = 52428388 2025-09-25T16:19:55.722465Z :DEBUG: [] Decompression task done. Partition/PartitionSessionId: 1 (0-3) 2025-09-25T16:19:55.722477Z :DEBUG: [] [] [31e407e4-ac7f1e2c-c3a577b3-b9bf6fc6] [] Returning serverBytesSize = 412 to budget 2025-09-25T16:19:55.722483Z :DEBUG: [] [] [31e407e4-ac7f1e2c-c3a577b3-b9bf6fc6] [] In ContinueReadingDataImpl, ReadSizeBudget = 412, ReadSizeServerDelta = 52428388 2025-09-25T16:19:55.722569Z :DEBUG: [] [] [31e407e4-ac7f1e2c-c3a577b3-b9bf6fc6] [] After sending read request: ReadSizeBudget = 0, ReadSizeServerDelta = 52428800 2025-09-25T16:19:55.722636Z :DEBUG: [] Take Data. Partition 0. Read: {0, 0} (0-0) 2025-09-25T16:19:55.722649Z :DEBUG: [] Take Data. Partition 0. Read: {1, 0} (1-1) 2025-09-25T16:19:55.722655Z :DEBUG: [] Take Data. Partition 0. Read: {1, 1} (2-2) 2025-09-25T16:19:55.722660Z :DEBUG: [] Take Data. Partition 0. Read: {1, 2} (3-3) 2025-09-25T16:19:55.722645Z node 5 :PQ_READ_PROXY DEBUG: read_session_actor.cpp:122: session cookie 1 consumer shared/cli session shared/cli_5_1_3656321100252111477_v1 grpc read done: success# 1, data# { read_request { bytes_size: 412 } } 2025-09-25T16:19:55.722671Z :DEBUG: [] [] [31e407e4-ac7f1e2c-c3a577b3-b9bf6fc6] [] The application data is transferred to the client. Number of messages 4, size 32 bytes 2025-09-25T16:19:55.722679Z :DEBUG: [] [] [31e407e4-ac7f1e2c-c3a577b3-b9bf6fc6] [] Returning serverBytesSize = 0 to budget 2025-09-25T16:19:55.722693Z node 5 :PQ_READ_PROXY DEBUG: read_session_actor.cpp:1839: session cookie 1 consumer shared/cli session shared/cli_5_1_3656321100252111477_v1 got read request: guid# 661fe9d3-f4d33471-43195e49-2d97fd78 2025-09-25T16:19:56.717158Z :INFO: [] Read/commit by partition streams (cluster:topic:partition:stream-id:read-offset:committed-offset): -:/topic1:0:1:3:0 2025-09-25T16:19:56.717182Z :INFO: [] [] [31e407e4-ac7f1e2c-c3a577b3-b9bf6fc6] Counters: { Errors: 0 CurrentSessionLifetimeMs: 1000 BytesRead: 32 MessagesRead: 4 BytesReadCompressed: 32 BytesInflightUncompressed: 0 BytesInflightCompressed: 0 BytesInflightTotal: 0 MessagesInflight: 0 } 2025-09-25T16:19:56.722825Z :INFO: [] [] [31e407e4-ac7f1e2c-c3a577b3-b9bf6fc6] Closing read session. Close timeout: 0.000000s 2025-09-25T16:19:56.722861Z :INFO: [] Read/commit by partition streams (cluster:topic:partition:stream-id:read-offset:committed-offset): -:/topic1:0:1:3:0 2025-09-25T16:19:56.722874Z :INFO: [] [] [31e407e4-ac7f1e2c-c3a577b3-b9bf6fc6] Counters: { Errors: 0 CurrentSessionLifetimeMs: 1006 BytesRead: 32 MessagesRead: 4 BytesReadCompressed: 32 BytesInflightUncompressed: 0 BytesInflightCompressed: 0 BytesInflightTotal: 0 MessagesInflight: 0 } 2025-09-25T16:19:56.722909Z :NOTICE: [] [] [31e407e4-ac7f1e2c-c3a577b3-b9bf6fc6] Aborting read session. Description: SessionClosed { Status: ABORTED Issues: "
: Error: Close with zero timeout " } 2025-09-25T16:19:56.722921Z :DEBUG: [] [] [31e407e4-ac7f1e2c-c3a577b3-b9bf6fc6] [] Abort session to cluster 2025-09-25T16:19:56.723171Z :NOTICE: [] [] [31e407e4-ac7f1e2c-c3a577b3-b9bf6fc6] Aborting read session. Description: SessionClosed { Status: ABORTED Issues: "
: Error: Aborted " } 2025-09-25T16:19:56.723482Z node 5 :PQ_READ_PROXY DEBUG: read_session_actor.cpp:122: session cookie 1 consumer shared/cli session shared/cli_5_1_3656321100252111477_v1 grpc read done: success# 0, data# { } 2025-09-25T16:19:56.723503Z node 5 :PQ_READ_PROXY INFO: read_session_actor.cpp:125: session cookie 1 consumer shared/cli session shared/cli_5_1_3656321100252111477_v1 grpc read failed 2025-09-25T16:19:56.723512Z node 5 :PQ_READ_PROXY INFO: read_session_actor.cpp:92: session cookie 1 consumer shared/cli session shared/cli_5_1_3656321100252111477_v1 grpc closed 2025-09-25T16:19:56.723537Z node 5 :PQ_READ_PROXY INFO: read_session_actor.cpp:383: session cookie 1 consumer shared/cli session shared/cli_5_1_3656321100252111477_v1 is DEAD 2025-09-25T16:19:56.723913Z node 5 :PERSQUEUE_READ_BALANCER INFO: read_balancer__balancing.cpp:1665: [72075186224037898][rt3.dc1--topic1] pipe [5:7554062332474549203:2573] disconnected; active server actors: 1 2025-09-25T16:19:56.723923Z node 5 :PERSQUEUE_READ_BALANCER NOTICE: read_balancer__balancing.cpp:1674: [72075186224037898][rt3.dc1--topic1] pipe [5:7554062332474549203:2573] client cli disconnected session shared/cli_5_1_3656321100252111477_v1 2025-09-25T16:19:56.724889Z node 6 :PQ_READ_PROXY DEBUG: caching_service.cpp:139: Direct read cache: server session deregistered: shared/cli_5_1_3656321100252111477_v1 2025-09-25T16:19:57.034404Z node 5 :KQP_COMPUTE WARN: kqp_read_actor.cpp:1093: TxId: 281474976715697, task: 1, CA Id [5:7554062341064483928:2600]. Got EvDeliveryProblem, TabletId: 72075186224037891, NotDelivered: 0 2025-09-25T16:19:57.072769Z node 5 :KQP_COMPUTE WARN: kqp_read_actor.cpp:1093: TxId: 281474976715697, task: 1, CA Id [5:7554062341064483928:2600]. Got EvDeliveryProblem, TabletId: 72075186224037891, NotDelivered: 1 ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_restore/unittest >> TImportTests::ShouldRestoreKeyBloomFilter [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2144] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2144] Leader for TabletID 72057594046678944 is [1:130:2155] sender: [1:131:2058] recipient: [1:113:2144] 2025-09-25T16:19:56.145201Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7911: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-09-25T16:19:56.145227Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7939: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-09-25T16:19:56.145235Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7825: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-09-25T16:19:56.145240Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7841: OperationsProcessing config: using default configuration 2025-09-25T16:19:56.145248Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7847: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-09-25T16:19:56.145253Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7847: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-09-25T16:19:56.145263Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7971: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-09-25T16:19:56.145279Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-09-25T16:19:56.145410Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8042: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-09-25T16:19:56.145469Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2025-09-25T16:19:56.163595Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7729: Cannot subscribe to console configs 2025-09-25T16:19:56.163618Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded 2025-09-25T16:19:56.167974Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2025-09-25T16:19:56.168059Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2025-09-25T16:19:56.168100Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-09-25T16:19:56.169925Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2025-09-25T16:19:56.169984Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2025-09-25T16:19:56.170090Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1382: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-09-25T16:19:56.170135Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-09-25T16:19:56.170590Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:158: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-09-25T16:19:56.170628Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2025-09-25T16:19:56.170844Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-09-25T16:19:56.170853Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-09-25T16:19:56.170871Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-09-25T16:19:56.170877Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-09-25T16:19:56.170882Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:205: TTxServerlessStorageBilling.Complete 2025-09-25T16:19:56.170909Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7086: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-09-25T16:19:56.172076Z node 1 :HIVE INFO: tablet_helpers.cpp:1126: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:130:2155] sender: [1:245:2058] recipient: [1:15:2062] 2025-09-25T16:19:56.196742Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-09-25T16:19:56.196817Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-09-25T16:19:56.196900Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:599: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-09-25T16:19:56.196909Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5528: CreateTx for txid 1:0 type: TxAlterSubDomain target path: [OwnerId: 72057594046678944, LocalPathId: 1] source path: 2025-09-25T16:19:56.197011Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-09-25T16:19:56.197054Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2025-09-25T16:19:56.197741Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-09-25T16:19:56.197793Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-09-25T16:19:56.197861Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-09-25T16:19:56.197872Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:315: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-09-25T16:19:56.197878Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:369: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-09-25T16:19:56.197884Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 1:0 2 -> 3 2025-09-25T16:19:56.198363Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-09-25T16:19:56.198378Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-09-25T16:19:56.198388Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 1:0 3 -> 128 2025-09-25T16:19:56.198808Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-09-25T16:19:56.198824Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-09-25T16:19:56.198831Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-09-25T16:19:56.198840Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1723: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-09-25T16:19:56.199714Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1792: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-09-25T16:19:56.200155Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:663: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-09-25T16:19:56.200216Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1824: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-09-25T16:19:56.200451Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:685: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-09-25T16:19:56.200479Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:689: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 137 RawX2: 4294969455 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-09-25T16:19:56.200488Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:303: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-09-25T16:19:56.200581Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 1:0 128 -> 240 2025-09-25T16:19:56.200591Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:362: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-09-25T16:19:56.200628Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:599: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-09-25T16:19:56.200644Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:412: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-09-25T16:19:56.201147Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-09-25T16:19:56.201160Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme ... 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_backup_restore_common.h:116: Unable to make a bill: kind# TRestore, opId# 281474976720758:0, reason# domain is not a serverless db, domain# /MyRoot/User, domainPathId# [OwnerId: 72075186233409546, LocalPathId: 1], IsDomainSchemeShard: 0, ParentDomainId: [OwnerId: 72057594046678944, LocalPathId: 2], ResourcesDomainId: [OwnerId: 72057594046678944, LocalPathId: 2] 2025-09-25T16:19:57.342825Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 281474976720758:0, at schemeshard: 72075186233409546 2025-09-25T16:19:57.342944Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 281474976720758:0, at schemeshard: 72075186233409546 2025-09-25T16:19:57.342953Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:497: [72075186233409546] TDone opId# 281474976720758:0 ProgressState 2025-09-25T16:19:57.342967Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:926: Part operation is done id#281474976720758:0 progress is 1/1 2025-09-25T16:19:57.342970Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 281474976720758 ready parts: 1/1 2025-09-25T16:19:57.342974Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:926: Part operation is done id#281474976720758:0 progress is 1/1 2025-09-25T16:19:57.342976Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 281474976720758 ready parts: 1/1 2025-09-25T16:19:57.342980Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 281474976720758, ready parts: 1/1, is published: true 2025-09-25T16:19:57.342993Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1702: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [3:364:2340] message: TxId: 281474976720758 2025-09-25T16:19:57.342998Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 281474976720758 ready parts: 1/1 2025-09-25T16:19:57.343002Z node 3 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:993: Operation and all the parts is done, operation id: 281474976720758:0 2025-09-25T16:19:57.343006Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5552: RemoveTx for txid 281474976720758:0 2025-09-25T16:19:57.343026Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:610: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72075186233409546, LocalPathId: 2] was 3 2025-09-25T16:19:57.343428Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7171: Handle: TEvNotifyTxCompletionResult: txId# 281474976720758 2025-09-25T16:19:57.343442Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:7173: Message: TxId: 281474976720758 2025-09-25T16:19:57.343451Z node 3 :IMPORT DEBUG: schemeshard_import__create.cpp:372: TImport::TTxProgress: DoExecute 2025-09-25T16:19:57.343456Z node 3 :IMPORT DEBUG: schemeshard_import__create.cpp:1425: TImport::TTxProgress: OnNotifyResult: txId# 281474976720758 2025-09-25T16:19:57.343773Z node 3 :IMPORT DEBUG: schemeshard_import__create.cpp:396: TImport::TTxProgress: DoComplete 2025-09-25T16:19:57.343788Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 103: got EvNotifyTxCompletionResult 2025-09-25T16:19:57.343793Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 103: satisfy waiter [3:499:2448] TestWaitNotification: OK eventTxId 103 2025-09-25T16:19:57.344473Z node 3 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/User" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-09-25T16:19:57.344525Z node 3 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/User" took 60us result status StatusSuccess 2025-09-25T16:19:57.344613Z node 3 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/User" PathDescription { Self { Name: "User" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeExtSubDomain CreateFinished: true CreateTxId: 101 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 4 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 4 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 2 SecurityStateVersion: 0 } } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 2 PlanResolution: 50 Coordinators: 72075186233409547 TimeCastBucketsPerMediator: 2 Mediators: 72075186233409548 SchemeShard: 72075186233409546 } DomainKey { SchemeShard: 72057594046678944 PathId: 2 } StoragePools { Name: "name_User_kind_hdd-1" Kind: "common" } StoragePools { Name: "name_User_kind_hdd-2" Kind: "external" } PathsInside: 0 PathsLimit: 10000 ShardsInside: 3 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 2 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { } SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 2 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-09-25T16:19:57.344681Z node 3 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/User/Table" Options { ReturnPartitioningInfo: true ReturnPartitionConfig: true BackupInfo: false ReturnBoundaries: true ShowPrivateTable: false }, at schemeshard: 72075186233409546 2025-09-25T16:19:57.344725Z node 3 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72075186233409546 describe path "/MyRoot/User/Table" took 47us result status StatusSuccess 2025-09-25T16:19:57.344962Z node 3 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/User/Table" PathDescription { Self { Name: "Table" PathId: 2 SchemeshardId: 72075186233409546 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976720757 CreateStep: 150 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "Table" Columns { Name: "key" Type: "Utf8" TypeId: 4608 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "value" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false IsBuildInProgress: false } Columns { Name: "created_at" Type: "Timestamp" TypeId: 50 Id: 3 NotNull: false IsBuildInProgress: false } Columns { Name: "modified_at" Type: "Uint32" TypeId: 2 Id: 4 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" KeyColumnIds: 1 PartitionConfig { CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot: 300 InMemForceStepsToSnapshot: 500 InMemForceSizeToSnapshot: 16777216 InMemCompactionBrokerQueue: 0 ReadAheadHiThreshold: 67108864 ReadAheadLoThreshold: 16777216 MinDataPageSize: 7168 SnapBrokerQueue: 0 Generation { GenerationId: 0 SizeToCompact: 0 CountToCompact: 8 ForceCountToCompact: 8 ForceSizeToCompact: 134217728 CompactionBrokerQueue: 4294967295 KeepInCache: true BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen1" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 0 UpliftPartSize: 0 } Generation { GenerationId: 1 SizeToCompact: 41943040 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 536870912 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen2" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 8388608 UpliftPartSize: 8388608 } Generation { GenerationId: 2 SizeToCompact: 419430400 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 17179869184 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen3" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } PartitioningPolicy { MinPartitionsCount: 1 } EnableFilterByKey: true } TableSchemaVersion: 1 IsBackup: false IsRestore: false } TablePartitions { EndOfRangeKeyPrefix: "" IsPoint: false IsInclusive: false DatashardId: 72075186233409549 } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 2 ProcessingParams { Version: 2 PlanResolution: 50 Coordinators: 72075186233409547 TimeCastBucketsPerMediator: 2 Mediators: 72075186233409548 SchemeShard: 72075186233409546 } DomainKey { SchemeShard: 72057594046678944 PathId: 2 } PathsInside: 1 PathsLimit: 10000 ShardsInside: 4 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 2 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 2 PathOwnerId: 72075186233409546, at schemeshard: 72075186233409546 |82.3%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_index/unittest >> TImportTests::ShouldFailOnAbsentData [GOOD] >> TImportTests::ShouldCheckQuotas >> TRestoreTests::ExportImportWithSchemeChecksumAbsence [GOOD] >> TRestoreTests::ShouldSucceedOnMultiShardTable[Raw] [GOOD] >> TRestoreTests::ShouldRestoreSpecialFpValues ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/replication/service/ut_topic_reader/unittest >> RemoteTopicReader::PassAwayOnCreatingReadSession [GOOD] Test command err: 2025-09-25T16:19:57.444125Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7554062343436280813:2143];send_to=[0:7307199536658146131:7762515]; 2025-09-25T16:19:57.444269Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/endf/0004f1/r3tmp/tmp2Odd5Y/pdisk_1.dat 2025-09-25T16:19:57.509497Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-09-25T16:19:57.513418Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded TClient is connected to server localhost:27424 2025-09-25T16:19:57.549215Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-09-25T16:19:57.549253Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-09-25T16:19:57.550579Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 9106, node 1 2025-09-25T16:19:57.569044Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-09-25T16:19:57.569059Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-09-25T16:19:57.569061Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-09-25T16:19:57.569106Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:27424 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-09-25T16:19:57.607797Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-09-25T16:19:57.609753Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-09-25T16:19:57.609992Z node 1 :REPLICATION_SERVICE DEBUG: topic_reader.cpp:33: [RemoteTopicReader][/Root/topic][0][1:7554062343436281304:2294] Handshake: worker# [1:7554062343436281302:2292] 2025-09-25T16:19:57.610065Z node 1 :REPLICATION_SERVICE DEBUG: topic_reader.cpp:43: [RemoteTopicReader][/Root/topic][0][1:7554062343436281304:2294] Create read session: session# [1:7554062343436281305:2295] >> TRestoreTests::ShouldSucceedOnLargeData[Raw] >> TRestoreWithRebootsTests::ShouldFailOnEmptyToken[Raw] [GOOD] >> TRestoreWithRebootsTests::ShouldFailOnEmptyToken[Zstd] |82.3%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/tx/schemeshard/ut_backup_collection_reboots/tx-schemeshard-ut_backup_collection_reboots |82.3%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_backup_collection_reboots/tx-schemeshard-ut_backup_collection_reboots |82.3%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_backup_collection_reboots/tx-schemeshard-ut_backup_collection_reboots >> TImportTests::ShouldCheckQuotas [GOOD] >> TRestoreTests::ShouldSucceedOnLargeData[Raw] [GOOD] >> TRestoreTests::ShouldSucceedOnLargeData[Zstd] |82.3%| [TA] $(B)/ydb/public/lib/ydb_cli/topic/ut/test-results/unittest/{meta.json ... results_accumulator.log} |82.3%| [TA] $(B)/ydb/core/tx/replication/service/ut_topic_reader/test-results/unittest/{meta.json ... results_accumulator.log} >> TRestoreTests::ShouldRestoreDefaultValuesFromSequence ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_restore/unittest >> TRestoreTests::ExportImportWithSchemeChecksumAbsence [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2144] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2144] Leader for TabletID 72057594046678944 is [1:130:2155] sender: [1:131:2058] recipient: [1:113:2144] 2025-09-25T16:19:56.432283Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7911: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-09-25T16:19:56.432319Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7939: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-09-25T16:19:56.432325Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7825: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-09-25T16:19:56.432331Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7841: OperationsProcessing config: using default configuration 2025-09-25T16:19:56.432337Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7847: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-09-25T16:19:56.432342Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7847: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-09-25T16:19:56.432351Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7971: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-09-25T16:19:56.432362Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-09-25T16:19:56.432486Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8042: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-09-25T16:19:56.432541Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2025-09-25T16:19:56.445300Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7729: Cannot subscribe to console configs 2025-09-25T16:19:56.445323Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded 2025-09-25T16:19:56.449932Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2025-09-25T16:19:56.450027Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2025-09-25T16:19:56.450063Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-09-25T16:19:56.453706Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2025-09-25T16:19:56.453793Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2025-09-25T16:19:56.453924Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1382: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-09-25T16:19:56.454005Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-09-25T16:19:56.454543Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:158: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-09-25T16:19:56.454603Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2025-09-25T16:19:56.454921Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-09-25T16:19:56.454937Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-09-25T16:19:56.454959Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-09-25T16:19:56.454969Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-09-25T16:19:56.454976Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:205: TTxServerlessStorageBilling.Complete 2025-09-25T16:19:56.455016Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7086: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-09-25T16:19:56.456625Z node 1 :HIVE INFO: tablet_helpers.cpp:1126: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:130:2155] sender: [1:245:2058] recipient: [1:15:2062] 2025-09-25T16:19:56.472927Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-09-25T16:19:56.472986Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-09-25T16:19:56.473033Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:599: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-09-25T16:19:56.473041Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5528: CreateTx for txid 1:0 type: TxAlterSubDomain target path: [OwnerId: 72057594046678944, LocalPathId: 1] source path: 2025-09-25T16:19:56.473139Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-09-25T16:19:56.473152Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2025-09-25T16:19:56.473752Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-09-25T16:19:56.473787Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-09-25T16:19:56.473826Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-09-25T16:19:56.473836Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:315: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-09-25T16:19:56.473839Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:369: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-09-25T16:19:56.473843Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 1:0 2 -> 3 2025-09-25T16:19:56.474238Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-09-25T16:19:56.474248Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-09-25T16:19:56.474252Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 1:0 3 -> 128 2025-09-25T16:19:56.474520Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-09-25T16:19:56.474527Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-09-25T16:19:56.474532Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-09-25T16:19:56.474536Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1723: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-09-25T16:19:56.475095Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1792: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-09-25T16:19:56.475437Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:663: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-09-25T16:19:56.475477Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1824: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-09-25T16:19:56.475632Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:685: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-09-25T16:19:56.475651Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:689: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 137 RawX2: 4294969455 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-09-25T16:19:56.475656Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:303: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-09-25T16:19:56.475731Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 1:0 128 -> 240 2025-09-25T16:19:56.475736Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:362: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-09-25T16:19:56.475762Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:599: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-09-25T16:19:56.475771Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:412: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-09-25T16:19:56.476126Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-09-25T16:19:56.476133Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme ... 72057594046678944 2025-09-25T16:19:58.304578Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [3:210:2211], at schemeshard: 72057594046678944, txId: 281474976710763, path id: 5 2025-09-25T16:19:58.304593Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 281474976710763:0, at schemeshard: 72057594046678944 2025-09-25T16:19:58.304606Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_backup_restore_common.h:258: TRestore TProposedWaitParts, opId: 281474976710763:0 ProgressState, at schemeshard: 72057594046678944 REQUEST: HEAD /data_00.csv HTTP/1.1 HEADERS: Host: localhost:14699 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: 966A9229-0824-464C-81D3-13356F42B408 amz-sdk-request: attempt=1 content-type: application/xml user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-143-generic x86_64 Clang/20.1.8 x-amz-api-version: 2006-03-01 S3_MOCK::HttpServeRead: /data_00.csv / 11 2025-09-25T16:19:58.306274Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6249: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 5 Version: 3 PathOwnerId: 72057594046678944, cookie: 281474976710763 2025-09-25T16:19:58.306374Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 5 Version: 3 PathOwnerId: 72057594046678944, cookie: 281474976710763 2025-09-25T16:19:58.306382Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:108: Operation in-flight, at schemeshard: 72057594046678944, txId: 281474976710763 2025-09-25T16:19:58.306389Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 281474976710763, pathId: [OwnerId: 72057594046678944, LocalPathId: 5], version: 3 2025-09-25T16:19:58.306395Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:610: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 5] was 4 2025-09-25T16:19:58.306420Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 281474976710763, ready parts: 0/1, is published: true 2025-09-25T16:19:58.306575Z node 3 :DATASHARD_RESTORE DEBUG: import_s3.cpp:527: [Import] [s3:281474976710763] Handle NKikimr::NWrappers::NExternalStorage::TEvHeadObjectResponse { Key: null Result: HeadObjectResult { ETag: 6e3e0a41fdab8add833862f1bd2954c3 ContentLength: 11 } } 2025-09-25T16:19:58.307844Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 281474976710763 2025-09-25T16:19:58.339084Z node 3 :DATASHARD_RESTORE DEBUG: import_s3.cpp:606: [Import] [s3:281474976710763] Handle NKikimr::TEvDataShard::TEvS3DownloadInfo { Info: { DataETag: (empty maybe) ProcessedBytes: 0 WrittenBytes: 0 WrittenRows: 0 ChecksumState: DownloadState: } } FAKE_COORDINATOR: Erasing txId 281474976710763 2025-09-25T16:19:58.350134Z node 3 :DATASHARD_RESTORE DEBUG: import_s3.cpp:606: [Import] [s3:281474976710763] Handle NKikimr::TEvDataShard::TEvS3DownloadInfo { Info: { DataETag: 6e3e0a41fdab8add833862f1bd2954c3 ProcessedBytes: 0 WrittenBytes: 0 WrittenRows: 0 ChecksumState: DownloadState: } } 2025-09-25T16:19:58.350154Z node 3 :DATASHARD_RESTORE NOTICE: import_s3.cpp:621: [Import] [s3:281474976710763] Process download info at 'DownloadInfo': info# { DataETag: 6e3e0a41fdab8add833862f1bd2954c3 ProcessedBytes: 0 WrittenBytes: 0 WrittenRows: 0 ChecksumState: DownloadState: } 2025-09-25T16:19:58.350178Z node 3 :DATASHARD_RESTORE DEBUG: import_s3.cpp:517: [Import] [s3:281474976710763] GetObject: key# /data_00.csv, range# 0-10 REQUEST: GET /data_00.csv HTTP/1.1 HEADERS: Host: localhost:14699 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: 86A5E9CA-7C40-496D-8CDA-567F27F9F9BB amz-sdk-request: attempt=1 content-type: application/xml range: bytes=0-10 user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-143-generic x86_64 Clang/20.1.8 x-amz-api-version: 2006-03-01 S3_MOCK::HttpServeRead: /data_00.csv / 11 2025-09-25T16:19:58.351095Z node 3 :DATASHARD_RESTORE DEBUG: import_s3.cpp:656: [Import] [s3:281474976710763] Handle NKikimr::NWrappers::NExternalStorage::TEvGetObjectResponse { Key: null Result: 6e3e0a41fdab8add833862f1bd2954c3 Body: 11b } 2025-09-25T16:19:58.351112Z node 3 :DATASHARD_RESTORE TRACE: import_s3.cpp:673: [Import] [s3:281474976710763] Content size: processed-bytes# 0, content-length# 11, body-size# 11 2025-09-25T16:19:58.351147Z node 3 :DATASHARD_RESTORE INFO: import_s3.cpp:806: [Import] [s3:281474976710763] Upload rows: count# 1, size# 36 2025-09-25T16:19:58.351714Z node 3 :DATASHARD_RESTORE DEBUG: import_s3.cpp:814: [Import] [s3:281474976710763] Handle NKikimr::TEvDataShard::TEvS3UploadRowsResponse { Record: TabletID: 72075186233409548 Status: 0 Info: { DataETag: 6e3e0a41fdab8add833862f1bd2954c3 ProcessedBytes: 11 WrittenBytes: 10 WrittenRows: 1 ChecksumState: DownloadState: } } 2025-09-25T16:19:58.351729Z node 3 :DATASHARD_RESTORE NOTICE: import_s3.cpp:621: [Import] [s3:281474976710763] Process download info at 'UploadResponse': info# { DataETag: 6e3e0a41fdab8add833862f1bd2954c3 ProcessedBytes: 11 WrittenBytes: 10 WrittenRows: 1 ChecksumState: DownloadState: } 2025-09-25T16:19:58.351735Z node 3 :DATASHARD_RESTORE NOTICE: import_s3.cpp:962: [Import] [s3:281474976710763] Finish: success# 1, error# , writtenBytes# 10, writtenRows# 1 2025-09-25T16:19:58.364378Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5901: Handle TEvSchemaChanged, tabletId: 72057594046678944, at schemeshard: 72057594046678944, message: Source { RawX1: 761 RawX2: 12884904589 } Origin: 72075186233409548 State: 2 TxId: 281474976710763 Step: 0 Generation: 2 OpResult { Success: true Explain: "" BytesProcessed: 10 RowsProcessed: 1 } 2025-09-25T16:19:58.364399Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1837: TOperation FindRelatedPartByTabletId, TxId: 281474976710763, tablet: 72075186233409548, partId: 0 2025-09-25T16:19:58.364421Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:628: TTxOperationReply execute, operationId: 281474976710763:0, at schemeshard: 72057594046678944, message: Source { RawX1: 761 RawX2: 12884904589 } Origin: 72075186233409548 State: 2 TxId: 281474976710763 Step: 0 Generation: 2 OpResult { Success: true Explain: "" BytesProcessed: 10 RowsProcessed: 1 } 2025-09-25T16:19:58.364433Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_backup_restore_common.h:233: TRestore TProposedWaitParts, opId: 281474976710763:0 HandleReply TEvSchemaChanged at tablet# 72057594046678944 message# Source { RawX1: 761 RawX2: 12884904589 } Origin: 72075186233409548 State: 2 TxId: 281474976710763 Step: 0 Generation: 2 OpResult { Success: true Explain: "" BytesProcessed: 10 RowsProcessed: 1 } 2025-09-25T16:19:58.364447Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:673: CollectSchemaChanged accept TEvDataShard::TEvSchemaChanged, operationId: 281474976710763:0, shardIdx: 72057594046678944:3, shard: 72075186233409548, left await: 0, txState.State: ProposedWaitParts, txState.ReadyForNotifications: 1, at schemeshard: 72057594046678944 2025-09-25T16:19:58.364450Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:710: all shard schema changes has been received, operationId: 281474976710763:0, at schemeshard: 72057594046678944 2025-09-25T16:19:58.364454Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:722: send schema changes ack message, operation: 281474976710763:0, datashard: 72075186233409548, at schemeshard: 72057594046678944 2025-09-25T16:19:58.364460Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 281474976710763:0 129 -> 240 2025-09-25T16:19:58.364512Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_backup_restore_common.h:116: Unable to make a bill: kind# TRestore, opId# 281474976710763:0, reason# domain is not a serverless db, domain# /MyRoot, domainPathId# [OwnerId: 72057594046678944, LocalPathId: 1], IsDomainSchemeShard: 1, ParentDomainId: [OwnerId: 72057594046678944, LocalPathId: 1], ResourcesDomainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-09-25T16:19:58.365027Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 281474976710763:0, at schemeshard: 72057594046678944 2025-09-25T16:19:58.365125Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 281474976710763:0, at schemeshard: 72057594046678944 2025-09-25T16:19:58.365136Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:497: [72057594046678944] TDone opId# 281474976710763:0 ProgressState 2025-09-25T16:19:58.365152Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:926: Part operation is done id#281474976710763:0 progress is 1/1 2025-09-25T16:19:58.365156Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 281474976710763 ready parts: 1/1 2025-09-25T16:19:58.365162Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:926: Part operation is done id#281474976710763:0 progress is 1/1 2025-09-25T16:19:58.365166Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 281474976710763 ready parts: 1/1 2025-09-25T16:19:58.365170Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 281474976710763, ready parts: 1/1, is published: true 2025-09-25T16:19:58.365190Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1702: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [3:127:2152] message: TxId: 281474976710763 2025-09-25T16:19:58.365197Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 281474976710763 ready parts: 1/1 2025-09-25T16:19:58.365203Z node 3 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:993: Operation and all the parts is done, operation id: 281474976710763:0 2025-09-25T16:19:58.365207Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5552: RemoveTx for txid 281474976710763:0 2025-09-25T16:19:58.365236Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:610: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 5] was 3 2025-09-25T16:19:58.365751Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7171: Handle: TEvNotifyTxCompletionResult: txId# 281474976710763 2025-09-25T16:19:58.365782Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:7173: Message: TxId: 281474976710763 2025-09-25T16:19:58.365792Z node 3 :IMPORT DEBUG: schemeshard_import__create.cpp:372: TImport::TTxProgress: DoExecute 2025-09-25T16:19:58.365796Z node 3 :IMPORT DEBUG: schemeshard_import__create.cpp:1425: TImport::TTxProgress: OnNotifyResult: txId# 281474976710763 2025-09-25T16:19:58.366215Z node 3 :IMPORT DEBUG: schemeshard_import__create.cpp:396: TImport::TTxProgress: DoComplete 2025-09-25T16:19:58.366233Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 104: got EvNotifyTxCompletionResult 2025-09-25T16:19:58.366238Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 104: satisfy waiter [3:728:2673] TestWaitNotification: OK eventTxId 104 ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_restore/unittest >> TImportTests::ShouldCheckQuotas [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:120:2058] recipient: [1:114:2144] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:120:2058] recipient: [1:114:2144] Leader for TabletID 72057594046678944 is [1:131:2155] sender: [1:132:2058] recipient: [1:114:2144] 2025-09-25T16:19:57.466977Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7911: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-09-25T16:19:57.467002Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7939: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-09-25T16:19:57.467008Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7825: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-09-25T16:19:57.467013Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7841: OperationsProcessing config: using default configuration 2025-09-25T16:19:57.467020Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7847: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-09-25T16:19:57.467024Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7847: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-09-25T16:19:57.467034Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7971: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-09-25T16:19:57.467047Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-09-25T16:19:57.467184Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8042: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-09-25T16:19:57.467244Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2025-09-25T16:19:57.485067Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7729: Cannot subscribe to console configs 2025-09-25T16:19:57.485089Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded 2025-09-25T16:19:57.488948Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2025-09-25T16:19:57.488979Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2025-09-25T16:19:57.489013Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-09-25T16:19:57.490235Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2025-09-25T16:19:57.490292Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2025-09-25T16:19:57.490395Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1382: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-09-25T16:19:57.490462Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-09-25T16:19:57.491392Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:158: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-09-25T16:19:57.491444Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2025-09-25T16:19:57.491746Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-09-25T16:19:57.491772Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-09-25T16:19:57.491801Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-09-25T16:19:57.491810Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-09-25T16:19:57.491817Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:205: TTxServerlessStorageBilling.Complete 2025-09-25T16:19:57.491840Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7086: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-09-25T16:19:57.493199Z node 1 :HIVE INFO: tablet_helpers.cpp:1126: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:131:2155] sender: [1:244:2058] recipient: [1:15:2062] 2025-09-25T16:19:57.518380Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-09-25T16:19:57.518448Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-09-25T16:19:57.518502Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:599: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-09-25T16:19:57.518510Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5528: CreateTx for txid 1:0 type: TxAlterSubDomain target path: [OwnerId: 72057594046678944, LocalPathId: 1] source path: 2025-09-25T16:19:57.518563Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-09-25T16:19:57.518577Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2025-09-25T16:19:57.519382Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-09-25T16:19:57.519428Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-09-25T16:19:57.519490Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-09-25T16:19:57.519500Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:315: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-09-25T16:19:57.519506Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:369: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-09-25T16:19:57.519513Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 1:0 2 -> 3 2025-09-25T16:19:57.519984Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-09-25T16:19:57.519996Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-09-25T16:19:57.520001Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 1:0 3 -> 128 2025-09-25T16:19:57.520367Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-09-25T16:19:57.520377Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-09-25T16:19:57.520383Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-09-25T16:19:57.520390Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1723: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-09-25T16:19:57.521117Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1792: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-09-25T16:19:57.521606Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:663: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-09-25T16:19:57.521643Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1824: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-09-25T16:19:57.521837Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:685: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-09-25T16:19:57.521863Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:689: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 138 RawX2: 4294969455 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-09-25T16:19:57.521871Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:303: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-09-25T16:19:57.521959Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 1:0 128 -> 240 2025-09-25T16:19:57.521967Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:362: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-09-25T16:19:57.522002Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:599: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-09-25T16:19:57.522014Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:412: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-09-25T16:19:57.522432Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-09-25T16:19:57.522442Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme ... meshard: 72057594046678944 2025-09-25T16:19:58.765863Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [3:339:2315], at schemeshard: 72057594046678944, txId: 281474976720758, path id: 2 2025-09-25T16:19:58.765879Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 281474976720758:0, at schemeshard: 72057594046678944 2025-09-25T16:19:58.765887Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_backup_restore_common.h:258: TRestore TProposedWaitParts, opId: 281474976720758:0 ProgressState, at schemeshard: 72057594046678944 2025-09-25T16:19:58.766118Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6249: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 3 LocalPathId: 2 Version: 3 PathOwnerId: 72057594046678944, cookie: 281474976720758 2025-09-25T16:19:58.766132Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 3 LocalPathId: 2 Version: 3 PathOwnerId: 72057594046678944, cookie: 281474976720758 2025-09-25T16:19:58.766137Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:108: Operation in-flight, at schemeshard: 72057594046678944, txId: 281474976720758 2025-09-25T16:19:58.766143Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 281474976720758, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 3 2025-09-25T16:19:58.766150Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:610: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 4 2025-09-25T16:19:58.766167Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 281474976720758, ready parts: 0/1, is published: true REQUEST: HEAD /data_00.csv HTTP/1.1 HEADERS: Host: localhost:1275 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: 4CDE118D-C124-4D19-87EE-AC83C8CB81F0 amz-sdk-request: attempt=1 content-type: application/xml user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-143-generic x86_64 Clang/20.1.8 x-amz-api-version: 2006-03-01 S3_MOCK::HttpServeRead: /data_00.csv / 14 2025-09-25T16:19:58.766585Z node 3 :DATASHARD_RESTORE DEBUG: import_s3.cpp:527: [Import] [s3:281474976720758] Handle NKikimr::NWrappers::NExternalStorage::TEvHeadObjectResponse { Key: null Result: HeadObjectResult { ETag: a3ed28bfb53c9214f635c51ed6b618c4 ContentLength: 14 } } 2025-09-25T16:19:58.767285Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 281474976720758 2025-09-25T16:19:58.778293Z node 3 :DATASHARD_RESTORE DEBUG: import_s3.cpp:606: [Import] [s3:281474976720758] Handle NKikimr::TEvDataShard::TEvS3DownloadInfo { Info: { DataETag: (empty maybe) ProcessedBytes: 0 WrittenBytes: 0 WrittenRows: 0 ChecksumState: DownloadState: } } FAKE_COORDINATOR: Erasing txId 281474976720758 2025-09-25T16:19:58.789001Z node 3 :DATASHARD_RESTORE DEBUG: import_s3.cpp:606: [Import] [s3:281474976720758] Handle NKikimr::TEvDataShard::TEvS3DownloadInfo { Info: { DataETag: a3ed28bfb53c9214f635c51ed6b618c4 ProcessedBytes: 0 WrittenBytes: 0 WrittenRows: 0 ChecksumState: DownloadState: } } 2025-09-25T16:19:58.789019Z node 3 :DATASHARD_RESTORE NOTICE: import_s3.cpp:621: [Import] [s3:281474976720758] Process download info at 'DownloadInfo': info# { DataETag: a3ed28bfb53c9214f635c51ed6b618c4 ProcessedBytes: 0 WrittenBytes: 0 WrittenRows: 0 ChecksumState: DownloadState: } 2025-09-25T16:19:58.789032Z node 3 :DATASHARD_RESTORE DEBUG: import_s3.cpp:517: [Import] [s3:281474976720758] GetObject: key# /data_00.csv, range# 0-13 REQUEST: GET /data_00.csv HTTP/1.1 HEADERS: Host: localhost:1275 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: A270475C-7709-41A0-8D6C-596C1382F423 amz-sdk-request: attempt=1 content-type: application/xml range: bytes=0-13 user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-143-generic x86_64 Clang/20.1.8 x-amz-api-version: 2006-03-01 S3_MOCK::HttpServeRead: /data_00.csv / 14 2025-09-25T16:19:58.789968Z node 3 :DATASHARD_RESTORE DEBUG: import_s3.cpp:656: [Import] [s3:281474976720758] Handle NKikimr::NWrappers::NExternalStorage::TEvGetObjectResponse { Key: null Result: a3ed28bfb53c9214f635c51ed6b618c4 Body: 14b } 2025-09-25T16:19:58.789984Z node 3 :DATASHARD_RESTORE TRACE: import_s3.cpp:673: [Import] [s3:281474976720758] Content size: processed-bytes# 0, content-length# 14, body-size# 14 2025-09-25T16:19:58.790021Z node 3 :DATASHARD_RESTORE INFO: import_s3.cpp:806: [Import] [s3:281474976720758] Upload rows: count# 1, size# 34 2025-09-25T16:19:58.790566Z node 3 :DATASHARD_RESTORE DEBUG: import_s3.cpp:814: [Import] [s3:281474976720758] Handle NKikimr::TEvDataShard::TEvS3UploadRowsResponse { Record: TabletID: 72075186233409546 Status: 0 Info: { DataETag: a3ed28bfb53c9214f635c51ed6b618c4 ProcessedBytes: 14 WrittenBytes: 8 WrittenRows: 1 ChecksumState: DownloadState: } } 2025-09-25T16:19:58.790583Z node 3 :DATASHARD_RESTORE NOTICE: import_s3.cpp:621: [Import] [s3:281474976720758] Process download info at 'UploadResponse': info# { DataETag: a3ed28bfb53c9214f635c51ed6b618c4 ProcessedBytes: 14 WrittenBytes: 8 WrittenRows: 1 ChecksumState: DownloadState: } 2025-09-25T16:19:58.790591Z node 3 :DATASHARD_RESTORE NOTICE: import_s3.cpp:962: [Import] [s3:281474976720758] Finish: success# 1, error# , writtenBytes# 8, writtenRows# 1 2025-09-25T16:19:58.804266Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5901: Handle TEvSchemaChanged, tabletId: 72057594046678944, at schemeshard: 72057594046678944, message: Source { RawX1: 401 RawX2: 12884904250 } Origin: 72075186233409546 State: 2 TxId: 281474976720758 Step: 0 Generation: 2 OpResult { Success: true Explain: "" BytesProcessed: 8 RowsProcessed: 1 } 2025-09-25T16:19:58.804285Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1837: TOperation FindRelatedPartByTabletId, TxId: 281474976720758, tablet: 72075186233409546, partId: 0 2025-09-25T16:19:58.804310Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:628: TTxOperationReply execute, operationId: 281474976720758:0, at schemeshard: 72057594046678944, message: Source { RawX1: 401 RawX2: 12884904250 } Origin: 72075186233409546 State: 2 TxId: 281474976720758 Step: 0 Generation: 2 OpResult { Success: true Explain: "" BytesProcessed: 8 RowsProcessed: 1 } 2025-09-25T16:19:58.804321Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_backup_restore_common.h:233: TRestore TProposedWaitParts, opId: 281474976720758:0 HandleReply TEvSchemaChanged at tablet# 72057594046678944 message# Source { RawX1: 401 RawX2: 12884904250 } Origin: 72075186233409546 State: 2 TxId: 281474976720758 Step: 0 Generation: 2 OpResult { Success: true Explain: "" BytesProcessed: 8 RowsProcessed: 1 } 2025-09-25T16:19:58.804334Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:673: CollectSchemaChanged accept TEvDataShard::TEvSchemaChanged, operationId: 281474976720758:0, shardIdx: 72057594046678944:1, shard: 72075186233409546, left await: 0, txState.State: ProposedWaitParts, txState.ReadyForNotifications: 1, at schemeshard: 72057594046678944 2025-09-25T16:19:58.804337Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:710: all shard schema changes has been received, operationId: 281474976720758:0, at schemeshard: 72057594046678944 2025-09-25T16:19:58.804341Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:722: send schema changes ack message, operation: 281474976720758:0, datashard: 72075186233409546, at schemeshard: 72057594046678944 2025-09-25T16:19:58.804347Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 281474976720758:0 129 -> 240 2025-09-25T16:19:58.804385Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_backup_restore_common.h:116: Unable to make a bill: kind# TRestore, opId# 281474976720758:0, reason# domain is not a serverless db, domain# /MyRoot, domainPathId# [OwnerId: 72057594046678944, LocalPathId: 1], IsDomainSchemeShard: 1, ParentDomainId: [OwnerId: 72057594046678944, LocalPathId: 1], ResourcesDomainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-09-25T16:19:58.804817Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 281474976720758:0, at schemeshard: 72057594046678944 2025-09-25T16:19:58.804952Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 281474976720758:0, at schemeshard: 72057594046678944 2025-09-25T16:19:58.804963Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:497: [72057594046678944] TDone opId# 281474976720758:0 ProgressState 2025-09-25T16:19:58.804979Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:926: Part operation is done id#281474976720758:0 progress is 1/1 2025-09-25T16:19:58.804984Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 281474976720758 ready parts: 1/1 2025-09-25T16:19:58.804990Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:926: Part operation is done id#281474976720758:0 progress is 1/1 2025-09-25T16:19:58.804993Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 281474976720758 ready parts: 1/1 2025-09-25T16:19:58.804999Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 281474976720758, ready parts: 1/1, is published: true 2025-09-25T16:19:58.805017Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1702: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [3:292:2279] message: TxId: 281474976720758 2025-09-25T16:19:58.805025Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 281474976720758 ready parts: 1/1 2025-09-25T16:19:58.805032Z node 3 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:993: Operation and all the parts is done, operation id: 281474976720758:0 2025-09-25T16:19:58.805037Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5552: RemoveTx for txid 281474976720758:0 2025-09-25T16:19:58.805066Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:610: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 3 2025-09-25T16:19:58.805583Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7171: Handle: TEvNotifyTxCompletionResult: txId# 281474976720758 2025-09-25T16:19:58.805607Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:7173: Message: TxId: 281474976720758 2025-09-25T16:19:58.805620Z node 3 :IMPORT DEBUG: schemeshard_import__create.cpp:372: TImport::TTxProgress: DoExecute 2025-09-25T16:19:58.805627Z node 3 :IMPORT DEBUG: schemeshard_import__create.cpp:1425: TImport::TTxProgress: OnNotifyResult: txId# 281474976720758 2025-09-25T16:19:58.806134Z node 3 :IMPORT DEBUG: schemeshard_import__create.cpp:396: TImport::TTxProgress: DoComplete 2025-09-25T16:19:58.806159Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 101: got EvNotifyTxCompletionResult 2025-09-25T16:19:58.806168Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 101: satisfy waiter [3:361:2331] TestWaitNotification: OK eventTxId 101 >> TRestoreTests::ShouldRestoreSpecialFpValues [GOOD] >> TRestoreTests::ShouldRestoreTableWithVolatilePartitioningMerge >> TRestoreTests::ShouldSucceedOnLargeData[Zstd] [GOOD] >> TImportTests::ShouldFailOnInvalidValue >> TRestoreTests::CancelUponProposeShouldSucceed[Raw] >> TRestoreTests::ShouldFailOnInvalidValue[Raw] >> TRestoreTests::ShouldRestoreDefaultValuesFromSequence [GOOD] >> TRestoreTests::ShouldRestoreSequence >> TRestoreTests::ExportImportWithChecksums[Raw] >> TImportWithRebootsTests::CancelShouldSucceedOnSimpleTable [GOOD] >> TImportWithRebootsTests::CancelShouldSucceedOnIndexedTable >> TRestoreTests::ShouldFailOnInvalidValue[Raw] [GOOD] >> TRestoreTests::ShouldFailOnInvalidValue[Zstd] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_restore/unittest >> TRestoreTests::ShouldSucceedOnLargeData[Zstd] [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:120:2058] recipient: [1:114:2144] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:120:2058] recipient: [1:114:2144] Leader for TabletID 72057594046678944 is [1:131:2155] sender: [1:132:2058] recipient: [1:114:2144] 2025-09-25T16:19:57.869648Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7911: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-09-25T16:19:57.869672Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7939: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-09-25T16:19:57.869678Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7825: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-09-25T16:19:57.869687Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7841: OperationsProcessing config: using default configuration 2025-09-25T16:19:57.869692Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7847: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-09-25T16:19:57.869696Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7847: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-09-25T16:19:57.869705Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7971: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-09-25T16:19:57.869717Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-09-25T16:19:57.869839Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8042: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-09-25T16:19:57.869904Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2025-09-25T16:19:57.886750Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7729: Cannot subscribe to console configs 2025-09-25T16:19:57.886771Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded 2025-09-25T16:19:57.891198Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2025-09-25T16:19:57.891234Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2025-09-25T16:19:57.891273Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-09-25T16:19:57.893110Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2025-09-25T16:19:57.893191Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2025-09-25T16:19:57.893322Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1382: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-09-25T16:19:57.893416Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-09-25T16:19:57.894678Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:158: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-09-25T16:19:57.894740Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2025-09-25T16:19:57.895104Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-09-25T16:19:57.895117Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-09-25T16:19:57.895150Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-09-25T16:19:57.895159Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-09-25T16:19:57.895166Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:205: TTxServerlessStorageBilling.Complete 2025-09-25T16:19:57.895194Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7086: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-09-25T16:19:57.896761Z node 1 :HIVE INFO: tablet_helpers.cpp:1126: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:131:2155] sender: [1:244:2058] recipient: [1:15:2062] 2025-09-25T16:19:57.922635Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-09-25T16:19:57.922709Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-09-25T16:19:57.922770Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:599: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-09-25T16:19:57.922778Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5528: CreateTx for txid 1:0 type: TxAlterSubDomain target path: [OwnerId: 72057594046678944, LocalPathId: 1] source path: 2025-09-25T16:19:57.922886Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-09-25T16:19:57.922904Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2025-09-25T16:19:57.924709Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-09-25T16:19:57.924764Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-09-25T16:19:57.924852Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-09-25T16:19:57.924865Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:315: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-09-25T16:19:57.924871Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:369: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-09-25T16:19:57.924876Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 1:0 2 -> 3 2025-09-25T16:19:57.925447Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-09-25T16:19:57.925461Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-09-25T16:19:57.925469Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 1:0 3 -> 128 2025-09-25T16:19:57.925830Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-09-25T16:19:57.925840Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-09-25T16:19:57.925846Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-09-25T16:19:57.925854Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1723: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-09-25T16:19:57.926584Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1792: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-09-25T16:19:57.927157Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:663: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-09-25T16:19:57.927197Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1824: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-09-25T16:19:57.927420Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:685: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-09-25T16:19:57.927446Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:689: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 138 RawX2: 4294969455 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-09-25T16:19:57.927454Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:303: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-09-25T16:19:57.927542Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 1:0 128 -> 240 2025-09-25T16:19:57.927550Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:362: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-09-25T16:19:57.927584Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:599: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-09-25T16:19:57.927595Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:412: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-09-25T16:19:57.928155Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-09-25T16:19:57.928164Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme ... 16:19:59.203216Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 3 PathOwnerId: 72057594046678944, cookie: 102 2025-09-25T16:19:59.203221Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:108: Operation in-flight, at schemeshard: 72057594046678944, txId: 102 2025-09-25T16:19:59.203227Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 102, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 3 2025-09-25T16:19:59.203235Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:610: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 4 2025-09-25T16:19:59.203252Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 102, ready parts: 0/1, is published: true REQUEST: HEAD /data_00.csv.zst HTTP/1.1 HEADERS: Host: localhost:18175 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: 5EAB6471-F35C-461F-84A1-5960CD82FE50 amz-sdk-request: attempt=1 content-type: application/xml user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-143-generic x86_64 Clang/20.1.8 x-amz-api-version: 2006-03-01 S3_MOCK::HttpServeRead: /data_00.csv.zst / 317 2025-09-25T16:19:59.203728Z node 3 :DATASHARD_RESTORE DEBUG: import_s3.cpp:527: [Import] [s3:102] Handle NKikimr::NWrappers::NExternalStorage::TEvHeadObjectResponse { Key: null Result: HeadObjectResult { ETag: 9fefc518a77e08ff2e1005d0369e6533 ContentLength: 317 } } 2025-09-25T16:19:59.204162Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 2025-09-25T16:19:59.229210Z node 3 :DATASHARD_RESTORE DEBUG: import_s3.cpp:606: [Import] [s3:102] Handle NKikimr::TEvDataShard::TEvS3DownloadInfo { Info: { DataETag: (empty maybe) ProcessedBytes: 0 WrittenBytes: 0 WrittenRows: 0 ChecksumState: DownloadState: } } FAKE_COORDINATOR: Erasing txId 102 2025-09-25T16:19:59.250416Z node 3 :DATASHARD_RESTORE DEBUG: import_s3.cpp:606: [Import] [s3:102] Handle NKikimr::TEvDataShard::TEvS3DownloadInfo { Info: { DataETag: 9fefc518a77e08ff2e1005d0369e6533 ProcessedBytes: 0 WrittenBytes: 0 WrittenRows: 0 ChecksumState: DownloadState: } } 2025-09-25T16:19:59.250440Z node 3 :DATASHARD_RESTORE NOTICE: import_s3.cpp:621: [Import] [s3:102] Process download info at 'DownloadInfo': info# { DataETag: 9fefc518a77e08ff2e1005d0369e6533 ProcessedBytes: 0 WrittenBytes: 0 WrittenRows: 0 ChecksumState: DownloadState: } 2025-09-25T16:19:59.250460Z node 3 :DATASHARD_RESTORE DEBUG: import_s3.cpp:517: [Import] [s3:102] GetObject: key# /data_00.csv.zst, range# 0-127 REQUEST: GET /data_00.csv.zst HTTP/1.1 HEADERS: Host: localhost:18175 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: 8E398225-9409-4FAB-82BE-BBAC2F8E5230 amz-sdk-request: attempt=1 content-type: application/xml range: bytes=0-127 user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-143-generic x86_64 Clang/20.1.8 x-amz-api-version: 2006-03-01 S3_MOCK::HttpServeRead: /data_00.csv.zst / 317 2025-09-25T16:19:59.251267Z node 3 :DATASHARD_RESTORE DEBUG: import_s3.cpp:656: [Import] [s3:102] Handle NKikimr::NWrappers::NExternalStorage::TEvGetObjectResponse { Key: null Result: 9fefc518a77e08ff2e1005d0369e6533 Body: 128b } 2025-09-25T16:19:59.251283Z node 3 :DATASHARD_RESTORE TRACE: import_s3.cpp:673: [Import] [s3:102] Content size: processed-bytes# 0, content-length# 317, body-size# 128 2025-09-25T16:19:59.251300Z node 3 :DATASHARD_RESTORE DEBUG: import_s3.cpp:517: [Import] [s3:102] GetObject: key# /data_00.csv.zst, range# 128-255 REQUEST: GET /data_00.csv.zst HTTP/1.1 HEADERS: Host: localhost:18175 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: 92D9AA8F-2633-4994-A66B-F410062D68CD amz-sdk-request: attempt=1 content-type: application/xml range: bytes=128-255 user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-143-generic x86_64 Clang/20.1.8 x-amz-api-version: 2006-03-01 S3_MOCK::HttpServeRead: /data_00.csv.zst / 317 2025-09-25T16:19:59.253877Z node 3 :DATASHARD_RESTORE DEBUG: import_s3.cpp:656: [Import] [s3:102] Handle NKikimr::NWrappers::NExternalStorage::TEvGetObjectResponse { Key: null Result: 9fefc518a77e08ff2e1005d0369e6533 Body: 128b } 2025-09-25T16:19:59.253895Z node 3 :DATASHARD_RESTORE TRACE: import_s3.cpp:673: [Import] [s3:102] Content size: processed-bytes# 0, content-length# 317, body-size# 128 2025-09-25T16:19:59.253910Z node 3 :DATASHARD_RESTORE DEBUG: import_s3.cpp:517: [Import] [s3:102] GetObject: key# /data_00.csv.zst, range# 256-316 REQUEST: GET /data_00.csv.zst HTTP/1.1 HEADERS: Host: localhost:18175 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: 01C2F29A-22F2-4C6A-95CD-92FB2A19A8DF amz-sdk-request: attempt=1 content-type: application/xml range: bytes=256-316 user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-143-generic x86_64 Clang/20.1.8 x-amz-api-version: 2006-03-01 S3_MOCK::HttpServeRead: /data_00.csv.zst / 317 2025-09-25T16:19:59.257835Z node 3 :DATASHARD_RESTORE DEBUG: import_s3.cpp:656: [Import] [s3:102] Handle NKikimr::NWrappers::NExternalStorage::TEvGetObjectResponse { Key: null Result: 9fefc518a77e08ff2e1005d0369e6533 Body: 61b } 2025-09-25T16:19:59.257854Z node 3 :DATASHARD_RESTORE TRACE: import_s3.cpp:673: [Import] [s3:102] Content size: processed-bytes# 0, content-length# 317, body-size# 61 2025-09-25T16:19:59.257989Z node 3 :DATASHARD_RESTORE INFO: import_s3.cpp:806: [Import] [s3:102] Upload rows: count# 100, size# 2900 2025-09-25T16:19:59.259635Z node 3 :DATASHARD_RESTORE DEBUG: import_s3.cpp:814: [Import] [s3:102] Handle NKikimr::TEvDataShard::TEvS3UploadRowsResponse { Record: TabletID: 72075186233409546 Status: 0 Info: { DataETag: 9fefc518a77e08ff2e1005d0369e6533 ProcessedBytes: 317 WrittenBytes: 1092 WrittenRows: 100 ChecksumState: DownloadState: } } 2025-09-25T16:19:59.259659Z node 3 :DATASHARD_RESTORE NOTICE: import_s3.cpp:621: [Import] [s3:102] Process download info at 'UploadResponse': info# { DataETag: 9fefc518a77e08ff2e1005d0369e6533 ProcessedBytes: 317 WrittenBytes: 1092 WrittenRows: 100 ChecksumState: DownloadState: } 2025-09-25T16:19:59.259669Z node 3 :DATASHARD_RESTORE NOTICE: import_s3.cpp:962: [Import] [s3:102] Finish: success# 1, error# , writtenBytes# 1092, writtenRows# 100 2025-09-25T16:19:59.284563Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5901: Handle TEvSchemaChanged, tabletId: 72057594046678944, at schemeshard: 72057594046678944, message: Source { RawX1: 313 RawX2: 12884904187 } Origin: 72075186233409546 State: 2 TxId: 102 Step: 0 Generation: 2 OpResult { Success: true Explain: "" BytesProcessed: 1092 RowsProcessed: 100 } 2025-09-25T16:19:59.284587Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1837: TOperation FindRelatedPartByTabletId, TxId: 102, tablet: 72075186233409546, partId: 0 2025-09-25T16:19:59.284614Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:628: TTxOperationReply execute, operationId: 102:0, at schemeshard: 72057594046678944, message: Source { RawX1: 313 RawX2: 12884904187 } Origin: 72075186233409546 State: 2 TxId: 102 Step: 0 Generation: 2 OpResult { Success: true Explain: "" BytesProcessed: 1092 RowsProcessed: 100 } 2025-09-25T16:19:59.284628Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_backup_restore_common.h:233: TRestore TProposedWaitParts, opId: 102:0 HandleReply TEvSchemaChanged at tablet# 72057594046678944 message# Source { RawX1: 313 RawX2: 12884904187 } Origin: 72075186233409546 State: 2 TxId: 102 Step: 0 Generation: 2 OpResult { Success: true Explain: "" BytesProcessed: 1092 RowsProcessed: 100 } 2025-09-25T16:19:59.284643Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:673: CollectSchemaChanged accept TEvDataShard::TEvSchemaChanged, operationId: 102:0, shardIdx: 72057594046678944:1, shard: 72075186233409546, left await: 0, txState.State: ProposedWaitParts, txState.ReadyForNotifications: 1, at schemeshard: 72057594046678944 2025-09-25T16:19:59.284648Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:710: all shard schema changes has been received, operationId: 102:0, at schemeshard: 72057594046678944 2025-09-25T16:19:59.284653Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:722: send schema changes ack message, operation: 102:0, datashard: 72075186233409546, at schemeshard: 72057594046678944 2025-09-25T16:19:59.284660Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 102:0 129 -> 240 2025-09-25T16:19:59.284705Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_backup_restore_common.h:116: Unable to make a bill: kind# TRestore, opId# 102:0, reason# domain is not a serverless db, domain# /MyRoot, domainPathId# [OwnerId: 72057594046678944, LocalPathId: 1], IsDomainSchemeShard: 1, ParentDomainId: [OwnerId: 72057594046678944, LocalPathId: 1], ResourcesDomainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-09-25T16:19:59.285331Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 102:0, at schemeshard: 72057594046678944 2025-09-25T16:19:59.285435Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 102:0, at schemeshard: 72057594046678944 2025-09-25T16:19:59.285446Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:497: [72057594046678944] TDone opId# 102:0 ProgressState 2025-09-25T16:19:59.285464Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:926: Part operation is done id#102:0 progress is 1/1 2025-09-25T16:19:59.285472Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2025-09-25T16:19:59.285478Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:926: Part operation is done id#102:0 progress is 1/1 2025-09-25T16:19:59.285482Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2025-09-25T16:19:59.285487Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 102, ready parts: 1/1, is published: true 2025-09-25T16:19:59.285504Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1702: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [3:339:2317] message: TxId: 102 2025-09-25T16:19:59.285511Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2025-09-25T16:19:59.285517Z node 3 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:993: Operation and all the parts is done, operation id: 102:0 2025-09-25T16:19:59.285522Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5552: RemoveTx for txid 102:0 2025-09-25T16:19:59.285548Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:610: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 3 2025-09-25T16:19:59.286070Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 102: got EvNotifyTxCompletionResult 2025-09-25T16:19:59.286083Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 102: satisfy waiter [3:399:2369] TestWaitNotification: OK eventTxId 102 >> TRestoreTests::CancelUponProposeShouldSucceed[Raw] [GOOD] >> TRestoreTests::CancelUponProposeShouldSucceed[Zstd] >> TRestoreTests::ShouldRestoreTableWithVolatilePartitioningMerge [GOOD] >> TRestoreTests::ShouldRestoreTableWithVolatilePartitioningSplit >> TImportTests::ShouldFailOnInvalidValue [GOOD] >> TImportTests::ShouldFailOnOutboundKey >> YdbOlapStore::LogCountByResource [GOOD] >> TRestoreTests::CancelUponProposeShouldSucceed[Zstd] [GOOD] >> TRestoreTests::CancelUponProposeResultShouldSucceed[Zstd] >> TRestoreTests::ExportImportWithChecksums[Raw] [GOOD] >> TRestoreTests::ExportImportWithChecksums[Zstd] >> TRestoreTests::ShouldFailOnInvalidValue[Zstd] [GOOD] >> TRestoreTests::ShouldFailOnInvalidFrame >> TImportTests::ShouldFailOnOutboundKey [GOOD] >> TImportTests::ShouldFailOnNonUniqDestinationPaths >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-ModifyUser-37 >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-CreateUser-1 >> TRestoreTests::ShouldRestoreSequence [GOOD] >> TRestoreTests::ShouldRestoreSequenceWithOverflow >> TImportTests::ShouldFailOnNonUniqDestinationPaths [GOOD] |82.3%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/apps/ydbd/ydbd |82.3%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/apps/ydbd/ydbd >> TRestoreTests::CancelUponProposeResultShouldSucceed[Zstd] [GOOD] >> TRestoreTests::ShouldFailOnInvalidFrame [GOOD] |82.3%| [TA] {RESULT} $(B)/ydb/public/lib/ydb_cli/topic/ut/test-results/unittest/{meta.json ... results_accumulator.log} |82.3%| [TA] {RESULT} $(B)/ydb/core/tx/replication/service/ut_topic_reader/test-results/unittest/{meta.json ... results_accumulator.log} |82.3%| [LD] {RESULT} $(B)/ydb/apps/ydbd/ydbd >> TRestoreTests::ExportImportWithChecksums[Zstd] [GOOD] >> TRestoreTests::ExportImportWithDataChecksumAbsence[Raw] >> TRestoreTests::ShouldRestoreTableWithVolatilePartitioningSplit [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-ModifyUser-31 ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_restore/unittest >> TRestoreTests::ShouldFailOnInvalidFrame [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:120:2058] recipient: [1:114:2144] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:120:2058] recipient: [1:114:2144] Leader for TabletID 72057594046678944 is [1:131:2155] sender: [1:132:2058] recipient: [1:114:2144] 2025-09-25T16:19:59.932878Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7911: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-09-25T16:19:59.932904Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7939: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-09-25T16:19:59.932910Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7825: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-09-25T16:19:59.932916Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7841: OperationsProcessing config: using default configuration 2025-09-25T16:19:59.932922Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7847: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-09-25T16:19:59.932926Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7847: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-09-25T16:19:59.932937Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7971: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-09-25T16:19:59.932951Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-09-25T16:19:59.933090Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8042: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-09-25T16:19:59.933161Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2025-09-25T16:19:59.950481Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7729: Cannot subscribe to console configs 2025-09-25T16:19:59.950501Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded 2025-09-25T16:19:59.954434Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2025-09-25T16:19:59.954469Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2025-09-25T16:19:59.954506Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-09-25T16:19:59.956713Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2025-09-25T16:19:59.956786Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2025-09-25T16:19:59.956941Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1382: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-09-25T16:19:59.957027Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-09-25T16:19:59.958002Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:158: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-09-25T16:19:59.958055Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2025-09-25T16:19:59.958360Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-09-25T16:19:59.958372Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-09-25T16:19:59.958401Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-09-25T16:19:59.958410Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-09-25T16:19:59.958416Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:205: TTxServerlessStorageBilling.Complete 2025-09-25T16:19:59.958441Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7086: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-09-25T16:19:59.960721Z node 1 :HIVE INFO: tablet_helpers.cpp:1126: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:131:2155] sender: [1:244:2058] recipient: [1:15:2062] 2025-09-25T16:19:59.984566Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-09-25T16:19:59.984640Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-09-25T16:19:59.984710Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:599: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-09-25T16:19:59.984718Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5528: CreateTx for txid 1:0 type: TxAlterSubDomain target path: [OwnerId: 72057594046678944, LocalPathId: 1] source path: 2025-09-25T16:19:59.984776Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-09-25T16:19:59.984791Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2025-09-25T16:19:59.985563Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-09-25T16:19:59.985609Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-09-25T16:19:59.985677Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-09-25T16:19:59.985688Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:315: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-09-25T16:19:59.985693Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:369: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-09-25T16:19:59.985699Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 1:0 2 -> 3 2025-09-25T16:19:59.986191Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-09-25T16:19:59.986205Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-09-25T16:19:59.986214Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 1:0 3 -> 128 2025-09-25T16:19:59.986584Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-09-25T16:19:59.986595Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-09-25T16:19:59.986601Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-09-25T16:19:59.986607Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1723: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-09-25T16:19:59.987258Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1792: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-09-25T16:19:59.987687Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:663: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-09-25T16:19:59.987734Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1824: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-09-25T16:19:59.987973Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:685: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-09-25T16:19:59.988001Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:689: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 138 RawX2: 4294969455 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-09-25T16:19:59.988008Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:303: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-09-25T16:19:59.988116Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 1:0 128 -> 240 2025-09-25T16:19:59.988124Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:362: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-09-25T16:19:59.988155Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:599: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-09-25T16:19:59.988168Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:412: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-09-25T16:19:59.988620Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-09-25T16:19:59.988630Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme ... INFO: import_s3.cpp:483: [Import] [s3:102] Handle TEvResourceBroker::TEvResourceAllocated { TaskId: 1 } 2025-09-25T16:20:00.999227Z node 3 :DATASHARD_RESTORE NOTICE: import_s3.cpp:491: [Import] [s3:102] Restart: attempt# 0 2025-09-25T16:20:01.003022Z node 3 :DATASHARD_RESTORE DEBUG: import_s3.cpp:506: [Import] [s3:102] HeadObject: key# /data_00.csv FAKE_COORDINATOR: advance: minStep5000003 State->FrontStep: 5000003 2025-09-25T16:20:01.003678Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-09-25T16:20:01.003688Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 102, path id: [OwnerId: 72057594046678944, LocalPathId: 2] 2025-09-25T16:20:01.003764Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-09-25T16:20:01.003771Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [3:210:2211], at schemeshard: 72057594046678944, txId: 102, path id: 2 2025-09-25T16:20:01.003873Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 102:0, at schemeshard: 72057594046678944 2025-09-25T16:20:01.003882Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_backup_restore_common.h:258: TRestore TProposedWaitParts, opId: 102:0 ProgressState, at schemeshard: 72057594046678944 2025-09-25T16:20:01.004015Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6249: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 3 PathOwnerId: 72057594046678944, cookie: 102 2025-09-25T16:20:01.004028Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 3 PathOwnerId: 72057594046678944, cookie: 102 2025-09-25T16:20:01.004033Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:108: Operation in-flight, at schemeshard: 72057594046678944, txId: 102 2025-09-25T16:20:01.004040Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 102, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 3 2025-09-25T16:20:01.004047Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:610: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 4 2025-09-25T16:20:01.004066Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 102, ready parts: 0/1, is published: true 2025-09-25T16:20:01.004768Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 REQUEST: HEAD /data_00.csv HTTP/1.1 HEADERS: Host: localhost:4475 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: 0D8D1DD9-1C96-4749-A011-028F76D23749 amz-sdk-request: attempt=1 content-type: application/xml user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-143-generic x86_64 Clang/20.1.8 x-amz-api-version: 2006-03-01 2025-09-25T16:20:01.005079Z node 3 :DATASHARD_RESTORE DEBUG: import_s3.cpp:527: [Import] [s3:102] Handle NKikimr::NWrappers::NExternalStorage::TEvHeadObjectResponse { Key: null Result: No response body. } 2025-09-25T16:20:01.005095Z node 3 :DATASHARD_RESTORE DEBUG: import_s3.cpp:506: [Import] [s3:102] HeadObject: key# /data_00.csv.zst REQUEST: HEAD /data_00.csv.zst HTTP/1.1 HEADERS: Host: localhost:4475 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: BCBB8FCD-5C82-4934-B1A8-7C9A97F86196 amz-sdk-request: attempt=1 content-type: application/xml user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-143-generic x86_64 Clang/20.1.8 x-amz-api-version: 2006-03-01 S3_MOCK::HttpServeRead: /data_00.csv.zst / 13 2025-09-25T16:20:01.005728Z node 3 :DATASHARD_RESTORE DEBUG: import_s3.cpp:527: [Import] [s3:102] Handle NKikimr::NWrappers::NExternalStorage::TEvHeadObjectResponse { Key: null Result: HeadObjectResult { ETag: a5511826537e4249478b686b99281952 ContentLength: 13 } } 2025-09-25T16:20:01.006208Z node 3 :DATASHARD_RESTORE DEBUG: import_s3.cpp:606: [Import] [s3:102] Handle NKikimr::TEvDataShard::TEvS3DownloadInfo { Info: { DataETag: (empty maybe) ProcessedBytes: 0 WrittenBytes: 0 WrittenRows: 0 ChecksumState: DownloadState: } } FAKE_COORDINATOR: Erasing txId 102 2025-09-25T16:20:01.029559Z node 3 :DATASHARD_RESTORE DEBUG: import_s3.cpp:606: [Import] [s3:102] Handle NKikimr::TEvDataShard::TEvS3DownloadInfo { Info: { DataETag: a5511826537e4249478b686b99281952 ProcessedBytes: 0 WrittenBytes: 0 WrittenRows: 0 ChecksumState: DownloadState: } } 2025-09-25T16:20:01.029585Z node 3 :DATASHARD_RESTORE NOTICE: import_s3.cpp:621: [Import] [s3:102] Process download info at 'DownloadInfo': info# { DataETag: a5511826537e4249478b686b99281952 ProcessedBytes: 0 WrittenBytes: 0 WrittenRows: 0 ChecksumState: DownloadState: } 2025-09-25T16:20:01.029604Z node 3 :DATASHARD_RESTORE DEBUG: import_s3.cpp:517: [Import] [s3:102] GetObject: key# /data_00.csv.zst, range# 0-12 REQUEST: GET /data_00.csv.zst HTTP/1.1 HEADERS: Host: localhost:4475 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: A16A316E-58C4-4438-9A95-F7FF0209A575 amz-sdk-request: attempt=1 content-type: application/xml range: bytes=0-12 user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-143-generic x86_64 Clang/20.1.8 x-amz-api-version: 2006-03-01 S3_MOCK::HttpServeRead: /data_00.csv.zst / 13 2025-09-25T16:20:01.031860Z node 3 :DATASHARD_RESTORE DEBUG: import_s3.cpp:656: [Import] [s3:102] Handle NKikimr::NWrappers::NExternalStorage::TEvGetObjectResponse { Key: null Result: a5511826537e4249478b686b99281952 Body: 13b } 2025-09-25T16:20:01.031885Z node 3 :DATASHARD_RESTORE TRACE: import_s3.cpp:673: [Import] [s3:102] Content size: processed-bytes# 0, content-length# 13, body-size# 13 2025-09-25T16:20:01.031901Z node 3 :DATASHARD_RESTORE NOTICE: import_s3.cpp:962: [Import] [s3:102] Finish: success# 0, error# Cannot process data: Unknown frame descriptor, writtenBytes# 0, writtenRows# 0 2025-09-25T16:20:01.038126Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5901: Handle TEvSchemaChanged, tabletId: 72057594046678944, at schemeshard: 72057594046678944, message: Source { RawX1: 313 RawX2: 12884904187 } Origin: 72075186233409546 State: 2 TxId: 102 Step: 0 Generation: 2 OpResult { Success: false Explain: "Cannot process data: Unknown frame descriptor" BytesProcessed: 0 RowsProcessed: 0 } 2025-09-25T16:20:01.038154Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1837: TOperation FindRelatedPartByTabletId, TxId: 102, tablet: 72075186233409546, partId: 0 2025-09-25T16:20:01.038188Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:628: TTxOperationReply execute, operationId: 102:0, at schemeshard: 72057594046678944, message: Source { RawX1: 313 RawX2: 12884904187 } Origin: 72075186233409546 State: 2 TxId: 102 Step: 0 Generation: 2 OpResult { Success: false Explain: "Cannot process data: Unknown frame descriptor" BytesProcessed: 0 RowsProcessed: 0 } 2025-09-25T16:20:01.038207Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_backup_restore_common.h:233: TRestore TProposedWaitParts, opId: 102:0 HandleReply TEvSchemaChanged at tablet# 72057594046678944 message# Source { RawX1: 313 RawX2: 12884904187 } Origin: 72075186233409546 State: 2 TxId: 102 Step: 0 Generation: 2 OpResult { Success: false Explain: "Cannot process data: Unknown frame descriptor" BytesProcessed: 0 RowsProcessed: 0 } 2025-09-25T16:20:01.038226Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:673: CollectSchemaChanged accept TEvDataShard::TEvSchemaChanged, operationId: 102:0, shardIdx: 72057594046678944:1, shard: 72075186233409546, left await: 0, txState.State: ProposedWaitParts, txState.ReadyForNotifications: 1, at schemeshard: 72057594046678944 2025-09-25T16:20:01.038232Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:710: all shard schema changes has been received, operationId: 102:0, at schemeshard: 72057594046678944 2025-09-25T16:20:01.038239Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:722: send schema changes ack message, operation: 102:0, datashard: 72075186233409546, at schemeshard: 72057594046678944 2025-09-25T16:20:01.038246Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 102:0 129 -> 240 2025-09-25T16:20:01.038297Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_backup_restore_common.h:116: Unable to make a bill: kind# TRestore, opId# 102:0, reason# domain is not a serverless db, domain# /MyRoot, domainPathId# [OwnerId: 72057594046678944, LocalPathId: 1], IsDomainSchemeShard: 1, ParentDomainId: [OwnerId: 72057594046678944, LocalPathId: 1], ResourcesDomainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-09-25T16:20:01.038934Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 102:0, at schemeshard: 72057594046678944 2025-09-25T16:20:01.039032Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 102:0, at schemeshard: 72057594046678944 2025-09-25T16:20:01.039041Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:497: [72057594046678944] TDone opId# 102:0 ProgressState 2025-09-25T16:20:01.039059Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:926: Part operation is done id#102:0 progress is 1/1 2025-09-25T16:20:01.039064Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2025-09-25T16:20:01.039071Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:926: Part operation is done id#102:0 progress is 1/1 2025-09-25T16:20:01.039076Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2025-09-25T16:20:01.039082Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 102, ready parts: 1/1, is published: true 2025-09-25T16:20:01.039099Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1702: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [3:339:2317] message: TxId: 102 2025-09-25T16:20:01.039107Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2025-09-25T16:20:01.039114Z node 3 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:993: Operation and all the parts is done, operation id: 102:0 2025-09-25T16:20:01.039119Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5552: RemoveTx for txid 102:0 2025-09-25T16:20:01.039149Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:610: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 3 2025-09-25T16:20:01.039603Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 102: got EvNotifyTxCompletionResult 2025-09-25T16:20:01.039616Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 102: satisfy waiter [3:399:2369] TestWaitNotification: OK eventTxId 102 ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_restore/unittest >> TImportTests::ShouldFailOnNonUniqDestinationPaths [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2144] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2144] Leader for TabletID 72057594046678944 is [1:130:2155] sender: [1:131:2058] recipient: [1:113:2144] 2025-09-25T16:19:59.862249Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7911: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-09-25T16:19:59.862273Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7939: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-09-25T16:19:59.862279Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7825: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-09-25T16:19:59.862284Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7841: OperationsProcessing config: using default configuration 2025-09-25T16:19:59.862290Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7847: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-09-25T16:19:59.862295Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7847: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-09-25T16:19:59.862305Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7971: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-09-25T16:19:59.862319Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-09-25T16:19:59.862437Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8042: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-09-25T16:19:59.862495Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2025-09-25T16:19:59.879858Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7729: Cannot subscribe to console configs 2025-09-25T16:19:59.879877Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded 2025-09-25T16:19:59.884930Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2025-09-25T16:19:59.885024Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2025-09-25T16:19:59.885059Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-09-25T16:19:59.886537Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2025-09-25T16:19:59.886601Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2025-09-25T16:19:59.886720Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1382: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-09-25T16:19:59.886770Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-09-25T16:19:59.887212Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:158: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-09-25T16:19:59.887257Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2025-09-25T16:19:59.887526Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-09-25T16:19:59.887537Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-09-25T16:19:59.887557Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-09-25T16:19:59.887565Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-09-25T16:19:59.887573Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:205: TTxServerlessStorageBilling.Complete 2025-09-25T16:19:59.887601Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7086: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-09-25T16:19:59.888899Z node 1 :HIVE INFO: tablet_helpers.cpp:1126: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:130:2155] sender: [1:245:2058] recipient: [1:15:2062] 2025-09-25T16:19:59.913040Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-09-25T16:19:59.913103Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-09-25T16:19:59.913152Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:599: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-09-25T16:19:59.913160Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5528: CreateTx for txid 1:0 type: TxAlterSubDomain target path: [OwnerId: 72057594046678944, LocalPathId: 1] source path: 2025-09-25T16:19:59.913251Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-09-25T16:19:59.913265Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2025-09-25T16:19:59.913911Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-09-25T16:19:59.913957Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-09-25T16:19:59.914012Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-09-25T16:19:59.914022Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:315: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-09-25T16:19:59.914027Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:369: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-09-25T16:19:59.914032Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 1:0 2 -> 3 2025-09-25T16:19:59.914462Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-09-25T16:19:59.914474Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-09-25T16:19:59.914482Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 1:0 3 -> 128 2025-09-25T16:19:59.914804Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-09-25T16:19:59.914815Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-09-25T16:19:59.914820Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-09-25T16:19:59.914826Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1723: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-09-25T16:19:59.915497Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1792: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-09-25T16:19:59.916298Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:663: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-09-25T16:19:59.916344Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1824: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-09-25T16:19:59.916530Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:685: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-09-25T16:19:59.916555Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:689: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 137 RawX2: 4294969455 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-09-25T16:19:59.916562Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:303: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-09-25T16:19:59.916655Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 1:0 128 -> 240 2025-09-25T16:19:59.916664Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:362: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-09-25T16:19:59.916689Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:599: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-09-25T16:19:59.916700Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:412: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-09-25T16:19:59.917124Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-09-25T16:19:59.917132Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme ... /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2025-09-25T16:20:00.997626Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-09-25T16:20:00.997672Z node 3 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-09-25T16:20:00.997718Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-09-25T16:20:00.997729Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:315: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-09-25T16:20:00.997735Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:369: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-09-25T16:20:00.997741Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 1:0 2 -> 3 2025-09-25T16:20:00.998223Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-09-25T16:20:00.998237Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-09-25T16:20:00.998244Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 1:0 3 -> 128 2025-09-25T16:20:00.998809Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-09-25T16:20:00.998822Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-09-25T16:20:00.998829Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-09-25T16:20:00.998836Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1723: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-09-25T16:20:00.998871Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1792: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-09-25T16:20:00.999192Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:663: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-09-25T16:20:00.999227Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1824: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-09-25T16:20:00.999460Z node 3 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:685: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-09-25T16:20:00.999484Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:689: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 139 RawX2: 12884904049 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-09-25T16:20:00.999492Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:303: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-09-25T16:20:00.999562Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 1:0 128 -> 240 2025-09-25T16:20:00.999570Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:362: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-09-25T16:20:00.999599Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:599: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-09-25T16:20:00.999613Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:412: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-09-25T16:20:01.000024Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-09-25T16:20:01.000035Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-09-25T16:20:01.000084Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-09-25T16:20:01.000090Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [3:211:2212], at schemeshard: 72057594046678944, txId: 1, path id: 1 FAKE_COORDINATOR: Erasing txId 1 2025-09-25T16:20:01.000166Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-09-25T16:20:01.000173Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:497: [72057594046678944] TDone opId# 1:0 ProgressState 2025-09-25T16:20:01.000187Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:926: Part operation is done id#1:0 progress is 1/1 2025-09-25T16:20:01.000192Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-09-25T16:20:01.000198Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:926: Part operation is done id#1:0 progress is 1/1 2025-09-25T16:20:01.000202Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-09-25T16:20:01.000209Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-09-25T16:20:01.000215Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-09-25T16:20:01.000220Z node 3 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:993: Operation and all the parts is done, operation id: 1:0 2025-09-25T16:20:01.000225Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5552: RemoveTx for txid 1:0 2025-09-25T16:20:01.000237Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:610: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-09-25T16:20:01.000244Z node 3 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:1002: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-09-25T16:20:01.000249Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1009: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-09-25T16:20:01.000357Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6249: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-09-25T16:20:01.000372Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-09-25T16:20:01.000378Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1 2025-09-25T16:20:01.000384Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 1, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 3 2025-09-25T16:20:01.000389Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:610: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-09-25T16:20:01.000402Z node 3 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__publish_to_scheme_board.cpp:155: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 1, subscribers: 0 2025-09-25T16:20:01.000998Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1 2025-09-25T16:20:01.001089Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 1, at schemeshard: 72057594046678944 2025-09-25T16:20:01.077712Z node 3 :TX_PROXY DEBUG: proxy_impl.cpp:433: actor# [3:274:2264] Bootstrap 2025-09-25T16:20:01.077982Z node 3 :TX_PROXY DEBUG: proxy_impl.cpp:452: actor# [3:274:2264] Become StateWork (SchemeCache [3:279:2269]) 2025-09-25T16:20:01.078064Z node 3 :IMPORT DEBUG: schemeshard_import__create.cpp:130: TImport::TTxCreate: DoExecute 2025-09-25T16:20:01.078097Z node 3 :IMPORT TRACE: schemeshard_import__create.cpp:131: Message: TxId: 101 DatabaseName: "/MyRoot" Request { ImportFromS3Settings { endpoint: "localhost:5472" scheme: HTTP items { source_prefix: "a" destination_path: "/MyRoot/Table" } items { source_prefix: "b" destination_path: "/MyRoot/Table" } } } 2025-09-25T16:20:01.078143Z node 3 :IMPORT DEBUG: schemeshard_import__create.cpp:254: TImport::TTxCreate: Reply: status# BAD_REQUEST, error# Duplicate destination_path: /MyRoot/Table 2025-09-25T16:20:01.078152Z node 3 :IMPORT TRACE: schemeshard_import__create.cpp:255: Message: TxId: 101 2025-09-25T16:20:01.078193Z node 3 :IMPORT DEBUG: schemeshard_import__create.cpp:238: TImport::TTxCreate: DoComplete 2025-09-25T16:20:01.078263Z node 3 :TX_PROXY DEBUG: proxy_impl.cpp:212: actor# [3:274:2264] HANDLE TEvClientConnected success connect from tablet# 72057594046447617 2025-09-25T16:20:01.085130Z node 3 :TX_PROXY DEBUG: client.cpp:89: Handle TEvAllocateResult ACCEPTED RangeBegin# 281474976715656 RangeEnd# 281474976720656 txAllocator# 72057594046447617 TestWaitNotification wait txId: 101 2025-09-25T16:20:01.085203Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:213: tests -- TTxNotificationSubscriber for txId 101: send EvNotifyTxCompletion 2025-09-25T16:20:01.085210Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:259: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 101 2025-09-25T16:20:01.085279Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 101, at schemeshard: 72057594046678944 2025-09-25T16:20:01.085302Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 101: got EvNotifyTxCompletionResult 2025-09-25T16:20:01.085308Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 101: satisfy waiter [3:286:2276] TestWaitNotification: OK eventTxId 101 ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_restore/unittest >> TRestoreTests::CancelUponProposeResultShouldSucceed[Zstd] [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2144] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2144] Leader for TabletID 72057594046678944 is [1:130:2155] sender: [1:131:2058] recipient: [1:113:2144] 2025-09-25T16:19:59.836882Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7911: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-09-25T16:19:59.836909Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7939: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-09-25T16:19:59.836915Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7825: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-09-25T16:19:59.836921Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7841: OperationsProcessing config: using default configuration 2025-09-25T16:19:59.836927Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7847: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-09-25T16:19:59.836931Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7847: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-09-25T16:19:59.836941Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7971: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-09-25T16:19:59.836957Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-09-25T16:19:59.837093Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8042: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-09-25T16:19:59.837153Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2025-09-25T16:19:59.866986Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7729: Cannot subscribe to console configs 2025-09-25T16:19:59.867008Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded 2025-09-25T16:19:59.871141Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2025-09-25T16:19:59.871225Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2025-09-25T16:19:59.871261Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-09-25T16:19:59.872626Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2025-09-25T16:19:59.872689Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2025-09-25T16:19:59.872794Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1382: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-09-25T16:19:59.872861Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-09-25T16:19:59.873317Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:158: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-09-25T16:19:59.873360Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2025-09-25T16:19:59.873618Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-09-25T16:19:59.873627Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-09-25T16:19:59.873645Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-09-25T16:19:59.873653Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-09-25T16:19:59.873660Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:205: TTxServerlessStorageBilling.Complete 2025-09-25T16:19:59.873692Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7086: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-09-25T16:19:59.875063Z node 1 :HIVE INFO: tablet_helpers.cpp:1126: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:130:2155] sender: [1:245:2058] recipient: [1:15:2062] 2025-09-25T16:19:59.899796Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-09-25T16:19:59.899863Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-09-25T16:19:59.899922Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:599: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-09-25T16:19:59.899930Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5528: CreateTx for txid 1:0 type: TxAlterSubDomain target path: [OwnerId: 72057594046678944, LocalPathId: 1] source path: 2025-09-25T16:19:59.900013Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-09-25T16:19:59.900030Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2025-09-25T16:19:59.900661Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-09-25T16:19:59.900704Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-09-25T16:19:59.900755Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-09-25T16:19:59.900765Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:315: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-09-25T16:19:59.900770Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:369: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-09-25T16:19:59.900775Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 1:0 2 -> 3 2025-09-25T16:19:59.901222Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-09-25T16:19:59.901236Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-09-25T16:19:59.901241Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 1:0 3 -> 128 2025-09-25T16:19:59.901601Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-09-25T16:19:59.901611Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-09-25T16:19:59.901617Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-09-25T16:19:59.901624Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1723: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-09-25T16:19:59.902337Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1792: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-09-25T16:19:59.902752Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:663: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-09-25T16:19:59.902797Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1824: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-09-25T16:19:59.902966Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:685: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-09-25T16:19:59.902994Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:689: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 137 RawX2: 4294969455 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-09-25T16:19:59.903002Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:303: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-09-25T16:19:59.903097Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 1:0 128 -> 240 2025-09-25T16:19:59.903105Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:362: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-09-25T16:19:59.903132Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:599: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-09-25T16:19:59.903144Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:412: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-09-25T16:19:59.903576Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-09-25T16:19:59.903585Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme ... :TEvHeadObjectResponse { Key: null Result: HeadObjectResult { ETag: 7443c2f403aa74cff1f199511bd22374 ContentLength: 23 } } 2025-09-25T16:20:01.013265Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-09-25T16:20:01.013291Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [3:211:2212], at schemeshard: 72057594046678944, txId: 102, path id: 2 2025-09-25T16:20:01.013541Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 102:0, at schemeshard: 72057594046678944 2025-09-25T16:20:01.013555Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_backup_restore_common.h:324: TRestore TAborting, opId: 102:0 ProgressState at tablet72057594046678944 2025-09-25T16:20:01.013564Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_backup_restore_common.h:351: TRestore Abort, on datashard: 72075186233409546, opId: 102:0, at schemeshard: 72057594046678944 2025-09-25T16:20:01.013667Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6249: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 3 PathOwnerId: 72057594046678944, cookie: 102 2025-09-25T16:20:01.013682Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 3 PathOwnerId: 72057594046678944, cookie: 102 2025-09-25T16:20:01.013688Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:108: Operation in-flight, at schemeshard: 72057594046678944, txId: 102 2025-09-25T16:20:01.013695Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 102, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 3 2025-09-25T16:20:01.013704Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:610: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 4 2025-09-25T16:20:01.013723Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 102, ready parts: 0/1, is published: true 2025-09-25T16:20:01.014417Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:663: Send tablet strongly msg operationId: 102:0 from tablet: 72057594046678944 to tablet: 72075186233409546 cookie: 72057594046678944:1 msg type: 269551625 2025-09-25T16:20:01.014697Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 TEvCancelTxResult for TargetTxId: 102, wait until TargetTxId: 102 TestWaitNotification wait txId: 102 2025-09-25T16:20:01.016623Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:213: tests -- TTxNotificationSubscriber for txId 102: send EvNotifyTxCompletion 2025-09-25T16:20:01.016639Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:259: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 102 TestWaitNotification wait txId: 103 2025-09-25T16:20:01.016659Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:213: tests -- TTxNotificationSubscriber for txId 103: send EvNotifyTxCompletion 2025-09-25T16:20:01.016662Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:259: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 103 2025-09-25T16:20:01.016754Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__notify.cpp:30: NotifyTxCompletion operation in-flight, txId: 102, at schemeshard: 72057594046678944 2025-09-25T16:20:01.016761Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 102, ready parts: 0/1, is published: true 2025-09-25T16:20:01.016766Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__notify.cpp:131: NotifyTxCompletion transaction is registered, txId: 102, at schemeshard: 72057594046678944 2025-09-25T16:20:01.016796Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 103, at schemeshard: 72057594046678944 2025-09-25T16:20:01.016808Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 103: got EvNotifyTxCompletionResult 2025-09-25T16:20:01.016814Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 103: satisfy waiter [3:434:2403] TestWaitNotification: OK eventTxId 103 2025-09-25T16:20:01.040286Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:6700: Handle TEvProposeTransactionResult, at schemeshard: 72057594046678944, message: TxKind: TX_KIND_SCHEME Origin: 72075186233409546 Status: ERROR Error { Kind: WRONG_SHARD_STATE Reason: "Interrupted Restore operation [5000003:102] while waiting to finish at 72075186233409546" } TxId: 102 ExecLatency: 5 ProposeLatency: 6 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409546 CpuTimeUsec: 6896 } } 2025-09-25T16:20:01.040311Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1837: TOperation FindRelatedPartByTabletId, TxId: 102, tablet: 72075186233409546, partId: 0 2025-09-25T16:20:01.040335Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:628: TTxOperationReply execute, operationId: 102:0, at schemeshard: 72057594046678944, message: TxKind: TX_KIND_SCHEME Origin: 72075186233409546 Status: ERROR Error { Kind: WRONG_SHARD_STATE Reason: "Interrupted Restore operation [5000003:102] while waiting to finish at 72075186233409546" } TxId: 102 ExecLatency: 5 ProposeLatency: 6 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409546 CpuTimeUsec: 6896 } } 2025-09-25T16:20:01.040348Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_part.cpp:109: HandleReply TEvDataShard::TEvProposeTransactionResult Ignore message: tablet# 72057594046678944, ev# TxKind: TX_KIND_SCHEME Origin: 72075186233409546 Status: ERROR Error { Kind: WRONG_SHARD_STATE Reason: "Interrupted Restore operation [5000003:102] while waiting to finish at 72075186233409546" } TxId: 102 ExecLatency: 5 ProposeLatency: 6 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409546 CpuTimeUsec: 6896 } } FAKE_COORDINATOR: Erasing txId 102 2025-09-25T16:20:01.040540Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5901: Handle TEvSchemaChanged, tabletId: 72057594046678944, at schemeshard: 72057594046678944, message: Source { RawX1: 312 RawX2: 12884904186 } Origin: 72075186233409546 State: 2 TxId: 102 Step: 0 Generation: 2 OpResult { Success: false Explain: "" BytesProcessed: 0 RowsProcessed: 0 } 2025-09-25T16:20:01.040546Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1837: TOperation FindRelatedPartByTabletId, TxId: 102, tablet: 72075186233409546, partId: 0 2025-09-25T16:20:01.040559Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:628: TTxOperationReply execute, operationId: 102:0, at schemeshard: 72057594046678944, message: Source { RawX1: 312 RawX2: 12884904186 } Origin: 72075186233409546 State: 2 TxId: 102 Step: 0 Generation: 2 OpResult { Success: false Explain: "" BytesProcessed: 0 RowsProcessed: 0 } 2025-09-25T16:20:01.040569Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_backup_restore_common.h:233: TRestore TAborting, opId: 102:0 HandleReply TEvSchemaChanged at tablet# 72057594046678944 message# Source { RawX1: 312 RawX2: 12884904186 } Origin: 72075186233409546 State: 2 TxId: 102 Step: 0 Generation: 2 OpResult { Success: false Explain: "" BytesProcessed: 0 RowsProcessed: 0 } 2025-09-25T16:20:01.040583Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:673: CollectSchemaChanged accept TEvDataShard::TEvSchemaChanged, operationId: 102:0, shardIdx: 72057594046678944:1, shard: 72075186233409546, left await: 0, txState.State: Aborting, txState.ReadyForNotifications: 1, at schemeshard: 72057594046678944 2025-09-25T16:20:01.040586Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:710: all shard schema changes has been received, operationId: 102:0, at schemeshard: 72057594046678944 2025-09-25T16:20:01.040590Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:722: send schema changes ack message, operation: 102:0, datashard: 72075186233409546, at schemeshard: 72057594046678944 2025-09-25T16:20:01.040596Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 102:0 133 -> 240 2025-09-25T16:20:01.040634Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_backup_restore_common.h:116: Unable to make a bill: kind# TRestore, opId# 102:0, reason# domain is not a serverless db, domain# /MyRoot, domainPathId# [OwnerId: 72057594046678944, LocalPathId: 1], IsDomainSchemeShard: 1, ParentDomainId: [OwnerId: 72057594046678944, LocalPathId: 1], ResourcesDomainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-09-25T16:20:01.041213Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 102:0, at schemeshard: 72057594046678944 2025-09-25T16:20:01.041273Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 102:0, at schemeshard: 72057594046678944 2025-09-25T16:20:01.041293Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 102:0, at schemeshard: 72057594046678944 2025-09-25T16:20:01.041299Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:497: [72057594046678944] TDone opId# 102:0 ProgressState 2025-09-25T16:20:01.041310Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:926: Part operation is done id#102:0 progress is 1/1 2025-09-25T16:20:01.041314Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2025-09-25T16:20:01.041318Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:926: Part operation is done id#102:0 progress is 1/1 2025-09-25T16:20:01.041320Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2025-09-25T16:20:01.041324Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 102, ready parts: 1/1, is published: true 2025-09-25T16:20:01.041335Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1702: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [3:340:2318] message: TxId: 102 2025-09-25T16:20:01.041340Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2025-09-25T16:20:01.041344Z node 3 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:993: Operation and all the parts is done, operation id: 102:0 2025-09-25T16:20:01.041347Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5552: RemoveTx for txid 102:0 2025-09-25T16:20:01.041373Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:610: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 3 2025-09-25T16:20:01.041768Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 102: got EvNotifyTxCompletionResult 2025-09-25T16:20:01.041780Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 102: satisfy waiter [3:434:2403] TestWaitNotification: OK eventTxId 102 >> TRestoreTests::ShouldRestoreSequenceWithOverflow [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-CreateUser-1 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-CreateUser-10 ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_restore/unittest >> TRestoreTests::ShouldRestoreTableWithVolatilePartitioningSplit [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2144] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2144] Leader for TabletID 72057594046678944 is [1:130:2155] sender: [1:131:2058] recipient: [1:113:2144] 2025-09-25T16:19:58.574928Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7911: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-09-25T16:19:58.574954Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7939: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-09-25T16:19:58.574960Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7825: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-09-25T16:19:58.574966Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7841: OperationsProcessing config: using default configuration 2025-09-25T16:19:58.574978Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7847: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-09-25T16:19:58.574983Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7847: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-09-25T16:19:58.574993Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7971: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-09-25T16:19:58.575008Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-09-25T16:19:58.575139Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8042: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-09-25T16:19:58.575205Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2025-09-25T16:19:58.589080Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7729: Cannot subscribe to console configs 2025-09-25T16:19:58.589100Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded 2025-09-25T16:19:58.596150Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2025-09-25T16:19:58.596234Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2025-09-25T16:19:58.596267Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-09-25T16:19:58.597755Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2025-09-25T16:19:58.597814Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2025-09-25T16:19:58.597924Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1382: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-09-25T16:19:58.597973Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-09-25T16:19:58.598512Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:158: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-09-25T16:19:58.598553Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2025-09-25T16:19:58.598790Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-09-25T16:19:58.598799Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-09-25T16:19:58.598817Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-09-25T16:19:58.598827Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-09-25T16:19:58.598833Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:205: TTxServerlessStorageBilling.Complete 2025-09-25T16:19:58.598864Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7086: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-09-25T16:19:58.600337Z node 1 :HIVE INFO: tablet_helpers.cpp:1126: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:130:2155] sender: [1:245:2058] recipient: [1:15:2062] 2025-09-25T16:19:58.623844Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-09-25T16:19:58.623921Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-09-25T16:19:58.623986Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:599: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-09-25T16:19:58.623995Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5528: CreateTx for txid 1:0 type: TxAlterSubDomain target path: [OwnerId: 72057594046678944, LocalPathId: 1] source path: 2025-09-25T16:19:58.624079Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-09-25T16:19:58.624098Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2025-09-25T16:19:58.625185Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-09-25T16:19:58.625234Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-09-25T16:19:58.625294Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-09-25T16:19:58.625305Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:315: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-09-25T16:19:58.625311Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:369: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-09-25T16:19:58.625317Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 1:0 2 -> 3 2025-09-25T16:19:58.625779Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-09-25T16:19:58.625790Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-09-25T16:19:58.625799Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 1:0 3 -> 128 2025-09-25T16:19:58.626114Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-09-25T16:19:58.626126Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-09-25T16:19:58.626132Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-09-25T16:19:58.626139Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1723: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-09-25T16:19:58.626868Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1792: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-09-25T16:19:58.627291Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:663: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-09-25T16:19:58.627359Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1824: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-09-25T16:19:58.627588Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:685: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-09-25T16:19:58.627614Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:689: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 137 RawX2: 4294969455 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-09-25T16:19:58.627623Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:303: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-09-25T16:19:58.627718Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 1:0 128 -> 240 2025-09-25T16:19:58.627728Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:362: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-09-25T16:19:58.627762Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:599: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-09-25T16:19:58.627777Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:412: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-09-25T16:19:58.628273Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-09-25T16:19:58.628284Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme ... 6678944, message: Source { RawX1: 1160 RawX2: 12884904935 } Origin: 72075186233409552 State: 2 TxId: 281474976710763 Step: 0 Generation: 2 OpResult { Success: true Explain: "" BytesProcessed: 20 RowsProcessed: 2 } 2025-09-25T16:20:01.478598Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1837: TOperation FindRelatedPartByTabletId, TxId: 281474976710763, tablet: 72075186233409552, partId: 0 2025-09-25T16:20:01.478628Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:628: TTxOperationReply execute, operationId: 281474976710763:0, at schemeshard: 72057594046678944, message: Source { RawX1: 1160 RawX2: 12884904935 } Origin: 72075186233409552 State: 2 TxId: 281474976710763 Step: 0 Generation: 2 OpResult { Success: true Explain: "" BytesProcessed: 20 RowsProcessed: 2 } 2025-09-25T16:20:01.478657Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_backup_restore_common.h:233: TRestore TProposedWaitParts, opId: 281474976710763:0 HandleReply TEvSchemaChanged at tablet# 72057594046678944 message# Source { RawX1: 1160 RawX2: 12884904935 } Origin: 72075186233409552 State: 2 TxId: 281474976710763 Step: 0 Generation: 2 OpResult { Success: true Explain: "" BytesProcessed: 20 RowsProcessed: 2 } 2025-09-25T16:20:01.478671Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:673: CollectSchemaChanged accept TEvDataShard::TEvSchemaChanged, operationId: 281474976710763:0, shardIdx: 72057594046678944:7, shard: 72075186233409552, left await: 0, txState.State: ProposedWaitParts, txState.ReadyForNotifications: 1, at schemeshard: 72057594046678944 2025-09-25T16:20:01.478677Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:710: all shard schema changes has been received, operationId: 281474976710763:0, at schemeshard: 72057594046678944 2025-09-25T16:20:01.478683Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:722: send schema changes ack message, operation: 281474976710763:0, datashard: 72075186233409552, at schemeshard: 72057594046678944 2025-09-25T16:20:01.478690Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:722: send schema changes ack message, operation: 281474976710763:0, datashard: 72075186233409553, at schemeshard: 72057594046678944 2025-09-25T16:20:01.478695Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 281474976710763:0 129 -> 240 2025-09-25T16:20:01.478736Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_backup_restore_common.h:116: Unable to make a bill: kind# TRestore, opId# 281474976710763:0, reason# domain is not a serverless db, domain# /MyRoot, domainPathId# [OwnerId: 72057594046678944, LocalPathId: 1], IsDomainSchemeShard: 1, ParentDomainId: [OwnerId: 72057594046678944, LocalPathId: 1], ResourcesDomainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-09-25T16:20:01.479495Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 281474976710763:0, at schemeshard: 72057594046678944 2025-09-25T16:20:01.479612Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 281474976710763:0, at schemeshard: 72057594046678944 2025-09-25T16:20:01.479621Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:497: [72057594046678944] TDone opId# 281474976710763:0 ProgressState 2025-09-25T16:20:01.479637Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:926: Part operation is done id#281474976710763:0 progress is 1/1 2025-09-25T16:20:01.479642Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 281474976710763 ready parts: 1/1 2025-09-25T16:20:01.479648Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:926: Part operation is done id#281474976710763:0 progress is 1/1 2025-09-25T16:20:01.479655Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 281474976710763 ready parts: 1/1 2025-09-25T16:20:01.479660Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 281474976710763, ready parts: 1/1, is published: true 2025-09-25T16:20:01.479686Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1702: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [3:128:2153] message: TxId: 281474976710763 2025-09-25T16:20:01.479693Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 281474976710763 ready parts: 1/1 2025-09-25T16:20:01.479699Z node 3 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:993: Operation and all the parts is done, operation id: 281474976710763:0 2025-09-25T16:20:01.479704Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5552: RemoveTx for txid 281474976710763:0 2025-09-25T16:20:01.479736Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:610: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 5] was 4 2025-09-25T16:20:01.480952Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7171: Handle: TEvNotifyTxCompletionResult: txId# 281474976710763 2025-09-25T16:20:01.480973Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:7173: Message: TxId: 281474976710763 2025-09-25T16:20:01.480984Z node 3 :IMPORT DEBUG: schemeshard_import__create.cpp:372: TImport::TTxProgress: DoExecute 2025-09-25T16:20:01.480990Z node 3 :IMPORT DEBUG: schemeshard_import__create.cpp:1425: TImport::TTxProgress: OnNotifyResult: txId# 281474976710763 2025-09-25T16:20:01.482867Z node 3 :IMPORT DEBUG: schemeshard_import__create.cpp:396: TImport::TTxProgress: DoComplete 2025-09-25T16:20:01.482895Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 105: got EvNotifyTxCompletionResult 2025-09-25T16:20:01.482903Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 105: satisfy waiter [3:1085:2984] TestWaitNotification: OK eventTxId 105 2025-09-25T16:20:01.483135Z node 3 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/Restored" Options { ReturnPartitioningInfo: true ReturnPartitionConfig: true BackupInfo: false ReturnBoundaries: true ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-09-25T16:20:01.483227Z node 3 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/Restored" took 100us result status StatusSuccess 2025-09-25T16:20:01.483501Z node 3 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/Restored" PathDescription { Self { Name: "Restored" PathId: 5 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976710762 CreateStep: 5000009 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "Restored" Columns { Name: "key" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "value" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" KeyColumnIds: 1 PartitionConfig { CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot: 300 InMemForceStepsToSnapshot: 500 InMemForceSizeToSnapshot: 16777216 InMemCompactionBrokerQueue: 0 ReadAheadHiThreshold: 67108864 ReadAheadLoThreshold: 16777216 MinDataPageSize: 7168 SnapBrokerQueue: 0 Generation { GenerationId: 0 SizeToCompact: 0 CountToCompact: 8 ForceCountToCompact: 8 ForceSizeToCompact: 134217728 CompactionBrokerQueue: 4294967295 KeepInCache: true BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen1" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 0 UpliftPartSize: 0 } Generation { GenerationId: 1 SizeToCompact: 41943040 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 536870912 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen2" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 8388608 UpliftPartSize: 8388608 } Generation { GenerationId: 2 SizeToCompact: 419430400 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 17179869184 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen3" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } PartitioningPolicy { SizeToSplit: 0 MinPartitionsCount: 2 MaxPartitionsCount: 2 SplitByLoadSettings { Enabled: false } } } SplitBoundary { KeyPrefix { Tuple { Optional { Uint32: 3 } } } } TableSchemaVersion: 1 IsBackup: false IsRestore: false } TablePartitions { EndOfRangeKeyPrefix: "\001\000\004\000\000\000\003\000\000\000" IsPoint: false IsInclusive: false DatashardId: 72075186233409552 } TablePartitions { EndOfRangeKeyPrefix: "" IsPoint: false IsInclusive: false DatashardId: 72075186233409553 } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 2 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 2 PathsLimit: 10000 ShardsInside: 6 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 5 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-ModifyUser-37 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-ModifyUser-38 >> TAsyncIndexTests::SplitIndexWithReboots[PipeResets] [GOOD] >> TRestoreTests::ExportImportWithDataChecksumAbsence[Raw] [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_restore/unittest >> TRestoreTests::ShouldRestoreSequenceWithOverflow [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2144] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2144] Leader for TabletID 72057594046678944 is [1:130:2155] sender: [1:131:2058] recipient: [1:113:2144] 2025-09-25T16:19:59.238453Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7911: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-09-25T16:19:59.238480Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7939: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-09-25T16:19:59.238487Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7825: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-09-25T16:19:59.238493Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7841: OperationsProcessing config: using default configuration 2025-09-25T16:19:59.238504Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7847: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-09-25T16:19:59.238510Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7847: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-09-25T16:19:59.238520Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7971: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-09-25T16:19:59.238535Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-09-25T16:19:59.238668Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8042: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-09-25T16:19:59.238733Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2025-09-25T16:19:59.256288Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7729: Cannot subscribe to console configs 2025-09-25T16:19:59.256315Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded 2025-09-25T16:19:59.260522Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2025-09-25T16:19:59.260609Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2025-09-25T16:19:59.260647Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-09-25T16:19:59.262056Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2025-09-25T16:19:59.262121Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2025-09-25T16:19:59.262231Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1382: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-09-25T16:19:59.262285Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-09-25T16:19:59.262737Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:158: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-09-25T16:19:59.262782Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2025-09-25T16:19:59.263039Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-09-25T16:19:59.263050Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-09-25T16:19:59.263071Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-09-25T16:19:59.263079Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-09-25T16:19:59.263086Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:205: TTxServerlessStorageBilling.Complete 2025-09-25T16:19:59.263116Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7086: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-09-25T16:19:59.264473Z node 1 :HIVE INFO: tablet_helpers.cpp:1126: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:130:2155] sender: [1:245:2058] recipient: [1:15:2062] 2025-09-25T16:19:59.289373Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-09-25T16:19:59.289436Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-09-25T16:19:59.289483Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:599: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-09-25T16:19:59.289491Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5528: CreateTx for txid 1:0 type: TxAlterSubDomain target path: [OwnerId: 72057594046678944, LocalPathId: 1] source path: 2025-09-25T16:19:59.289573Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-09-25T16:19:59.289588Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2025-09-25T16:19:59.290247Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-09-25T16:19:59.290283Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-09-25T16:19:59.290332Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-09-25T16:19:59.290341Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:315: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-09-25T16:19:59.290346Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:369: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-09-25T16:19:59.290352Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 1:0 2 -> 3 2025-09-25T16:19:59.290784Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-09-25T16:19:59.290795Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-09-25T16:19:59.290800Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 1:0 3 -> 128 2025-09-25T16:19:59.291209Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-09-25T16:19:59.291222Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-09-25T16:19:59.291227Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-09-25T16:19:59.291231Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1723: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-09-25T16:19:59.291778Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1792: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-09-25T16:19:59.292189Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:663: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-09-25T16:19:59.292249Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1824: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-09-25T16:19:59.292438Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:685: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-09-25T16:19:59.292465Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:689: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 137 RawX2: 4294969455 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-09-25T16:19:59.292471Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:303: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-09-25T16:19:59.292539Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 1:0 128 -> 240 2025-09-25T16:19:59.292546Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:362: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-09-25T16:19:59.292575Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:599: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-09-25T16:19:59.292586Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:412: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-09-25T16:19:59.293019Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-09-25T16:19:59.293026Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme ... 3] Finish: success# 1, error# , writtenBytes# 0, writtenRows# 0 2025-09-25T16:20:01.796522Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5901: Handle TEvSchemaChanged, tabletId: 72057594046678944, at schemeshard: 72057594046678944, message: Source { RawX1: 866 RawX2: 12884904681 } Origin: 72075186233409549 State: 2 TxId: 281474976710763 Step: 0 Generation: 2 OpResult { Success: true Explain: "" BytesProcessed: 0 RowsProcessed: 0 } 2025-09-25T16:20:01.796548Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1837: TOperation FindRelatedPartByTabletId, TxId: 281474976710763, tablet: 72075186233409549, partId: 0 2025-09-25T16:20:01.796580Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:628: TTxOperationReply execute, operationId: 281474976710763:0, at schemeshard: 72057594046678944, message: Source { RawX1: 866 RawX2: 12884904681 } Origin: 72075186233409549 State: 2 TxId: 281474976710763 Step: 0 Generation: 2 OpResult { Success: true Explain: "" BytesProcessed: 0 RowsProcessed: 0 } 2025-09-25T16:20:01.796613Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_backup_restore_common.h:233: TRestore TProposedWaitParts, opId: 281474976710763:0 HandleReply TEvSchemaChanged at tablet# 72057594046678944 message# Source { RawX1: 866 RawX2: 12884904681 } Origin: 72075186233409549 State: 2 TxId: 281474976710763 Step: 0 Generation: 2 OpResult { Success: true Explain: "" BytesProcessed: 0 RowsProcessed: 0 } 2025-09-25T16:20:01.796634Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:673: CollectSchemaChanged accept TEvDataShard::TEvSchemaChanged, operationId: 281474976710763:0, shardIdx: 72057594046678944:4, shard: 72075186233409549, left await: 0, txState.State: ProposedWaitParts, txState.ReadyForNotifications: 1, at schemeshard: 72057594046678944 2025-09-25T16:20:01.796642Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:710: all shard schema changes has been received, operationId: 281474976710763:0, at schemeshard: 72057594046678944 2025-09-25T16:20:01.796651Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:722: send schema changes ack message, operation: 281474976710763:0, datashard: 72075186233409549, at schemeshard: 72057594046678944 2025-09-25T16:20:01.796660Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 281474976710763:0 129 -> 240 2025-09-25T16:20:01.796707Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_backup_restore_common.h:116: Unable to make a bill: kind# TRestore, opId# 281474976710763:0, reason# domain is not a serverless db, domain# /MyRoot, domainPathId# [OwnerId: 72057594046678944, LocalPathId: 1], IsDomainSchemeShard: 1, ParentDomainId: [OwnerId: 72057594046678944, LocalPathId: 1], ResourcesDomainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-09-25T16:20:01.797350Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 281474976710763:0, at schemeshard: 72057594046678944 2025-09-25T16:20:01.797442Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 281474976710763:0, at schemeshard: 72057594046678944 2025-09-25T16:20:01.797452Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:497: [72057594046678944] TDone opId# 281474976710763:0 ProgressState 2025-09-25T16:20:01.797470Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:926: Part operation is done id#281474976710763:0 progress is 1/1 2025-09-25T16:20:01.797474Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 281474976710763 ready parts: 1/1 2025-09-25T16:20:01.797479Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:926: Part operation is done id#281474976710763:0 progress is 1/1 2025-09-25T16:20:01.797482Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 281474976710763 ready parts: 1/1 2025-09-25T16:20:01.797487Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 281474976710763, ready parts: 1/1, is published: true 2025-09-25T16:20:01.797505Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1702: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [3:128:2153] message: TxId: 281474976710763 2025-09-25T16:20:01.797512Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 281474976710763 ready parts: 1/1 2025-09-25T16:20:01.797518Z node 3 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:993: Operation and all the parts is done, operation id: 281474976710763:0 2025-09-25T16:20:01.797523Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5552: RemoveTx for txid 281474976710763:0 2025-09-25T16:20:01.797552Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:610: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 7] was 4 2025-09-25T16:20:01.797966Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7171: Handle: TEvNotifyTxCompletionResult: txId# 281474976710763 2025-09-25T16:20:01.797979Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:7173: Message: TxId: 281474976710763 2025-09-25T16:20:01.797988Z node 3 :IMPORT DEBUG: schemeshard_import__create.cpp:372: TImport::TTxProgress: DoExecute 2025-09-25T16:20:01.797994Z node 3 :IMPORT DEBUG: schemeshard_import__create.cpp:1425: TImport::TTxProgress: OnNotifyResult: txId# 281474976710763 2025-09-25T16:20:01.798508Z node 3 :IMPORT DEBUG: schemeshard_import__create.cpp:396: TImport::TTxProgress: DoComplete 2025-09-25T16:20:01.798530Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 103: got EvNotifyTxCompletionResult 2025-09-25T16:20:01.798538Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 103: satisfy waiter [3:808:2744] TestWaitNotification: OK eventTxId 103 2025-09-25T16:20:01.798728Z node 3 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/Restored" Options { ReturnPartitioningInfo: true ReturnPartitionConfig: true BackupInfo: false ReturnBoundaries: true ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-09-25T16:20:01.798800Z node 3 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/Restored" took 79us result status StatusSuccess 2025-09-25T16:20:01.799076Z node 3 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/Restored" PathDescription { Self { Name: "Restored" PathId: 7 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976710762 CreateStep: 5000008 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 4 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 4 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: true } Table { Name: "Restored" Columns { Name: "key" Type: "Uint64" TypeId: 4 Id: 1 DefaultFromSequence: "myseq" NotNull: false IsBuildInProgress: false } Columns { Name: "value" Type: "Uint64" TypeId: 4 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" KeyColumnIds: 1 PartitionConfig { CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot: 300 InMemForceStepsToSnapshot: 500 InMemForceSizeToSnapshot: 16777216 InMemCompactionBrokerQueue: 0 ReadAheadHiThreshold: 67108864 ReadAheadLoThreshold: 16777216 MinDataPageSize: 7168 SnapBrokerQueue: 0 Generation { GenerationId: 0 SizeToCompact: 0 CountToCompact: 8 ForceCountToCompact: 8 ForceSizeToCompact: 134217728 CompactionBrokerQueue: 4294967295 KeepInCache: true BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen1" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 0 UpliftPartSize: 0 } Generation { GenerationId: 1 SizeToCompact: 41943040 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 536870912 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen2" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 8388608 UpliftPartSize: 8388608 } Generation { GenerationId: 2 SizeToCompact: 419430400 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 17179869184 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen3" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } PartitioningPolicy { SizeToSplit: 0 MinPartitionsCount: 1 SplitByLoadSettings { Enabled: false } } } TableSchemaVersion: 1 IsBackup: false Sequences { Name: "myseq" PathId { OwnerId: 72057594046678944 LocalId: 8 } Version: 1 SequenceShard: 72075186233409546 MinValue: 1 MaxValue: 2 StartValue: 1 Cache: 1 Increment: 1 Cycle: false DataType: "Int64" } IsRestore: false } TablePartitions { EndOfRangeKeyPrefix: "" IsPoint: false IsInclusive: false DatashardId: 72075186233409549 } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 4 PathsLimit: 10000 ShardsInside: 3 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 7 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 >> TAsyncIndexTests::MergeMainWithReboots[PipeResets] [GOOD] >> TAsyncIndexTests::SplitMainWithReboots[PipeResets] [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_index/unittest >> TAsyncIndexTests::SplitIndexWithReboots[PipeResets] [GOOD] Test command err: =========== RUN: Trace =========== Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2144] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2144] Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:120:2058] recipient: [1:114:2145] IGNORE Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:120:2058] recipient: [1:114:2145] Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:121:2058] recipient: [1:116:2146] IGNORE Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:121:2058] recipient: [1:116:2146] Leader for TabletID 72057594046678944 is [1:128:2153] sender: [1:131:2058] recipient: [1:113:2144] Leader for TabletID 72057594046447617 is [1:134:2158] sender: [1:136:2058] recipient: [1:114:2145] Leader for TabletID 72057594046316545 is [1:139:2161] sender: [1:141:2058] recipient: [1:116:2146] 2025-09-25T16:19:41.884762Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7911: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-09-25T16:19:41.884780Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7939: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-09-25T16:19:41.884786Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7825: StatsBatching config: StatsBatchTimeout# 0.000000s, StatsMaxBatchSize# 0, StatsMaxExecuteTime# 0.010000s 2025-09-25T16:19:41.884791Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7841: OperationsProcessing config: using default configuration 2025-09-25T16:19:41.884798Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7847: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-09-25T16:19:41.884802Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7847: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-09-25T16:19:41.884812Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7971: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-09-25T16:19:41.884842Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-09-25T16:19:41.884947Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8042: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-09-25T16:19:41.884991Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2025-09-25T16:19:41.902607Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:8074: Got new config: QueryServiceConfig { AllExternalDataSourcesAreAvailable: true } 2025-09-25T16:19:41.902630Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded 2025-09-25T16:19:41.902708Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8042: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# Leader for TabletID 72057594046447617 is [1:134:2158] sender: [1:179:2058] recipient: [1:15:2062] 2025-09-25T16:19:41.905885Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2025-09-25T16:19:41.905958Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2025-09-25T16:19:41.905984Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-09-25T16:19:41.907053Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2025-09-25T16:19:41.907105Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2025-09-25T16:19:41.907170Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1382: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-09-25T16:19:41.907341Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-09-25T16:19:41.908213Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:158: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-09-25T16:19:41.908253Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2025-09-25T16:19:41.908451Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-09-25T16:19:41.908458Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-09-25T16:19:41.908471Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-09-25T16:19:41.908477Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-09-25T16:19:41.908482Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:205: TTxServerlessStorageBilling.Complete 2025-09-25T16:19:41.908509Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7086: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:221:2058] recipient: [1:219:2219] IGNORE Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:221:2058] recipient: [1:219:2219] Leader for TabletID 72057594037968897 is [1:225:2223] sender: [1:226:2058] recipient: [1:219:2219] 2025-09-25T16:19:41.909474Z node 1 :HIVE INFO: tablet_helpers.cpp:1126: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:128:2153] sender: [1:246:2058] recipient: [1:15:2062] 2025-09-25T16:19:41.927440Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-09-25T16:19:41.927514Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-09-25T16:19:41.927569Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:599: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-09-25T16:19:41.927578Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5528: CreateTx for txid 1:0 type: TxAlterSubDomain target path: [OwnerId: 72057594046678944, LocalPathId: 1] source path: 2025-09-25T16:19:41.927631Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-09-25T16:19:41.927648Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2025-09-25T16:19:41.929090Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-09-25T16:19:41.929149Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-09-25T16:19:41.929191Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-09-25T16:19:41.929202Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:315: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-09-25T16:19:41.929209Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:369: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-09-25T16:19:41.929214Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 1:0 2 -> 3 2025-09-25T16:19:41.929741Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-09-25T16:19:41.929756Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-09-25T16:19:41.929763Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 1:0 3 -> 128 2025-09-25T16:19:41.930165Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-09-25T16:19:41.930176Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-09-25T16:19:41.930182Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-09-25T16:19:41.930189Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1723: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-09-25T16:19:41.930841Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1792: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-09-25T16:19:41.931272Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:663: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-09-25T16:19:41.931306Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1824: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 Leader for TabletID 72057594046316545 is [1:139:2161] sender: [1:261:2058] recipient: [1:15:2062] FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-09-25T16:19:41.931504Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:685: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-09-25T16:19:41.931529Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:689: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 139 RawX2: 4294969457 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, ... ntToCompact: 8 ForceCountToCompact: 8 ForceSizeToCompact: 134217728 CompactionBrokerQueue: 4294967295 KeepInCache: true BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen1" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 0 UpliftPartSize: 0 } Generation { GenerationId: 1 SizeToCompact: 41943040 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 536870912 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen2" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 8388608 UpliftPartSize: 8388608 } Generation { GenerationId: 2 SizeToCompact: 419430400 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 17179869184 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen3" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } PartitioningPolicy { MinPartitionsCount: 1 } } TableIndexes { Name: "UserDefinedIndex" LocalPathId: 4 Type: EIndexTypeGlobalAsync State: EIndexStateReady KeyColumnNames: "indexed" SchemaVersion: 1 PathOwnerId: 72057594046678944 DataSize: 0 IndexImplTableDescriptions { PartitionConfig { CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot: 300 InMemForceStepsToSnapshot: 500 InMemForceSizeToSnapshot: 16777216 InMemCompactionBrokerQueue: 0 ReadAheadHiThreshold: 67108864 ReadAheadLoThreshold: 16777216 MinDataPageSize: 7168 SnapBrokerQueue: 0 Generation { GenerationId: 0 SizeToCompact: 0 CountToCompact: 8 ForceCountToCompact: 8 ForceSizeToCompact: 134217728 CompactionBrokerQueue: 4294967295 KeepInCache: true BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen1" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 0 UpliftPartSize: 0 } Generation { GenerationId: 1 SizeToCompact: 41943040 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 536870912 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen2" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 8388608 UpliftPartSize: 8388608 } Generation { GenerationId: 2 SizeToCompact: 419430400 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 17179869184 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen3" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } PartitioningPolicy { SizeToSplit: 2147483648 MinPartitionsCount: 1 } } } } TableSchemaVersion: 1 IsBackup: false IsRestore: false } TablePartitions { EndOfRangeKeyPrefix: "" IsPoint: false IsInclusive: false DatashardId: 72075186233409547 } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 4 PathsLimit: 10000 ShardsInside: 3 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 3 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-09-25T16:20:02.279530Z node 22 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/Table/UserDefinedIndex/indexImplTable" Options { ReturnPartitioningInfo: true ReturnPartitionConfig: true BackupInfo: false ReturnBoundaries: false ShowPrivateTable: true }, at schemeshard: 72057594046678944 2025-09-25T16:20:02.279664Z node 22 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/Table/UserDefinedIndex/indexImplTable" took 151us result status StatusSuccess 2025-09-25T16:20:02.279938Z node 22 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/Table/UserDefinedIndex/indexImplTable" PathDescription { Self { Name: "indexImplTable" PathId: 5 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 1002 CreateStep: 5000003 ParentPathId: 4 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 4 PathSubType: EPathSubTypeAsyncIndexImplTable Version { GeneralVersion: 4 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 2 } ChildrenExist: false } Table { Name: "indexImplTable" Columns { Name: "indexed" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "key" Type: "Uint32" TypeId: 2 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "indexed" KeyColumnNames: "key" KeyColumnIds: 1 KeyColumnIds: 2 PartitionConfig { CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot: 300 InMemForceStepsToSnapshot: 500 InMemForceSizeToSnapshot: 16777216 InMemCompactionBrokerQueue: 0 ReadAheadHiThreshold: 67108864 ReadAheadLoThreshold: 16777216 MinDataPageSize: 7168 SnapBrokerQueue: 0 Generation { GenerationId: 0 SizeToCompact: 0 CountToCompact: 8 ForceCountToCompact: 8 ForceSizeToCompact: 134217728 CompactionBrokerQueue: 4294967295 KeepInCache: true BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen1" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 0 UpliftPartSize: 0 } Generation { GenerationId: 1 SizeToCompact: 41943040 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 536870912 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen2" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 8388608 UpliftPartSize: 8388608 } Generation { GenerationId: 2 SizeToCompact: 419430400 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 17179869184 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen3" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } PartitioningPolicy { SizeToSplit: 2147483648 MinPartitionsCount: 1 } } TableSchemaVersion: 1 IsBackup: false IsRestore: false } TablePartitions { EndOfRangeKeyPrefix: "\002\000\004\000\000\0002\000\000\000\000\000\000\200" IsPoint: false IsInclusive: false DatashardId: 72075186233409548 } TablePartitions { EndOfRangeKeyPrefix: "" IsPoint: false IsInclusive: false DatashardId: 72075186233409549 } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 2 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 4 PathsLimit: 10000 ShardsInside: 3 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 5 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-ModifyUser-25 ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/ydb/ut/unittest >> YdbOlapStore::LogCountByResource [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-ModifyUser-31 [GOOD] Test command err: 2025-09-25T16:18:21.075374Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7554061929002421904:2080];send_to=[0:7307199536658146131:7762515]; 2025-09-25T16:18:21.075446Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/endf/0041a9/r3tmp/tmpBioYgR/pdisk_1.dat 2025-09-25T16:18:21.131505Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-09-25T16:18:21.148756Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 8643, node 1 2025-09-25T16:18:21.169059Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-09-25T16:18:21.169070Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-09-25T16:18:21.169072Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-09-25T16:18:21.169116Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-09-25T16:18:21.177464Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-09-25T16:18:21.177496Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-09-25T16:18:21.180477Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:30117 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-09-25T16:18:21.196201Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... self_check_result: GOOD issue_log { id: "YELLOW-e9e2-1231c6b1-1" status: YELLOW message: "LoadAverage above 100%" location { compute { node { id: 1 host: "::1" port: 12001 } } database { name: "/Root" } } type: "LOAD_AVERAGE" level: 4 } issue_log { id: "YELLOW-e9e2-1231c6b1-2" status: YELLOW message: "LoadAverage above 100%" location { compute { node { id: 2 host: "::1" port: 12002 } } database { name: "/Root" } } type: "LOAD_AVERAGE" level: 4 } issue_log { id: "YELLOW-e9e2-1231c6b1-3" status: YELLOW message: "LoadAverage above 100%" location { compute { node { id: 3 host: "::1" port: 12003 } } database { name: "/Root" } } type: "LOAD_AVERAGE" level: 4 } issue_log { id: "YELLOW-1ba8-1231c6b1" status: YELLOW message: "Compute is overloaded" location { database { name: "/Root" } } reason: "YELLOW-e9e2-1231c6b1-1" reason: "YELLOW-e9e2-1231c6b1-2" reason: "YELLOW-e9e2-1231c6b1-3" type: "COMPUTE" level: 2 } issue_log { id: "YELLOW-f489-1231c6b1" status: YELLOW message: "Database has compute issues" location { database { name: "/Root" } } reason: "YELLOW-1ba8-1231c6b1" type: "DATABASE" level: 1 } location { id: 1 host: "::1" port: 12001 } 2025-09-25T16:18:21.319945Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-09-25T16:18:23.096999Z node 4 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7554061937499050236:2078];send_to=[0:7307199536658146131:7762515]; 2025-09-25T16:18:23.097022Z node 4 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-09-25T16:18:23.130213Z node 4 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-09-25T16:18:23.176403Z node 5 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[5:7554061939203679721:2161];send_to=[0:7307199536658146131:7762515]; 2025-09-25T16:18:23.176507Z node 5 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/endf/0041a9/r3tmp/tmpMrKCeD/pdisk_1.dat 2025-09-25T16:18:23.180468Z node 6 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[6:7554061941087734594:2074];send_to=[0:7307199536658146131:7762515]; 2025-09-25T16:18:23.183873Z node 5 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-09-25T16:18:23.185115Z node 6 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-09-25T16:18:23.186942Z node 6 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-09-25T16:18:23.262053Z node 4 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-09-25T16:18:23.262082Z node 4 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-09-25T16:18:23.262561Z node 4 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-09-25T16:18:23.262569Z node 4 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-09-25T16:18:23.264608Z node 4 :HIVE WARN: hive_impl.cpp:811: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 6 Cookie 6 2025-09-25T16:18:23.264755Z node 4 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-09-25T16:18:23.264924Z node 4 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-09-25T16:18:23.265184Z node 4 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-09-25T16:18:23.265202Z node 4 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-09-25T16:18:23.266065Z node 4 :HIVE WARN: hive_impl.cpp:811: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 5 Cookie 5 2025-09-25T16:18:23.266559Z node 4 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 16281, node 4 2025-09-25T16:18:23.286956Z node 4 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded 2025-09-25T16:18:23.291447Z node 4 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-09-25T16:18:23.291463Z node 4 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-09-25T16:18:23.291465Z node 4 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-09-25T16:18:23.291523Z node 4 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:32225 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-09-25T16:18:23.315463Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-09-25T16:18:23.385428Z node 4 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-09-25T16:18:23.419120Z node 6 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-09-25T16:18:23.442488Z node 5 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeSc ... 8;ScanId=40;TxId=281474976715673;ScanGen=1;task_identifier=;fline=actor.cpp:99;event=TEvTaskProcessedResult; 2025-09-25T16:19:59.912769Z node 47 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[47:7554062350090027505:3142];TabletId=72075186224037888;ScanId=40;TxId=281474976715673;ScanGen=1;task_identifier=;fline=fetching.cpp:17;event=apply; 2025-09-25T16:19:59.912770Z node 47 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: source_id=7;tablet_id=72075186224037888;fline=script_cursor.cpp:21;event=empty_data;scan_step_idx=3; 2025-09-25T16:19:59.912773Z node 47 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[47:7554062350090027505:3142];TabletId=72075186224037888;ScanId=40;TxId=281474976715673;ScanGen=1;task_identifier=;sync_point=RESULT;aborted=0;tablet_id=72075186224037888;prepared_source_id=55;fline=abstract.cpp:22;event=OnSourcePrepared;source_id=55;prepared=1; 2025-09-25T16:19:59.912775Z node 47 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: source_id=8;tablet_id=72075186224037888;fline=script_cursor.cpp:21;event=empty_data;scan_step_idx=3; 2025-09-25T16:19:59.912781Z node 47 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: source_id=9;tablet_id=72075186224037888;fline=script_cursor.cpp:21;event=empty_data;scan_step_idx=3; 2025-09-25T16:19:59.912788Z node 47 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: source_id=10;tablet_id=72075186224037888;fline=script_cursor.cpp:21;event=empty_data;scan_step_idx=3; 2025-09-25T16:19:59.912788Z node 47 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[47:7554062350090027505:3142];TabletId=72075186224037888;ScanId=40;TxId=281474976715673;ScanGen=1;task_identifier=;fline=actor.cpp:211;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=3,6;column_names=resource_id,resource_type;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=3,4,5,6;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=3,6;column_names=resource_id,resource_type;);;program_input=(column_ids=3,6;column_names=resource_id,resource_type;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=100;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=0;SRCS:[{1,721},{2,722},{3,723},{4,724},{5,725},{6,726},{7,727},{8,728},{9,729},{10,730},... (90 more)];}};]};SF:0;PR:0;); 2025-09-25T16:19:59.912790Z node 47 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[47:7554062350090027505:3142];TabletId=72075186224037888;ScanId=40;TxId=281474976715673;ScanGen=1;task_identifier=;fline=plain_read_data.cpp:31;event=DoExtractReadyResults;result=0;count=0;finished=0; 2025-09-25T16:19:59.912793Z node 47 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: source_id=11;tablet_id=72075186224037888;fline=script_cursor.cpp:21;event=empty_data;scan_step_idx=3; 2025-09-25T16:19:59.912798Z node 47 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: source_id=12;tablet_id=72075186224037888;fline=script_cursor.cpp:21;event=empty_data;scan_step_idx=3; 2025-09-25T16:19:59.912803Z node 47 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: source_id=13;tablet_id=72075186224037888;fline=script_cursor.cpp:21;event=empty_data;scan_step_idx=3; 2025-09-25T16:19:59.912810Z node 47 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: source_id=14;tablet_id=72075186224037888;fline=script_cursor.cpp:21;event=empty_data;scan_step_idx=3; 2025-09-25T16:19:59.912815Z node 47 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: source_id=15;tablet_id=72075186224037888;fline=script_cursor.cpp:21;event=empty_data;scan_step_idx=3; 2025-09-25T16:19:59.912877Z node 47 :BLOB_CACHE DEBUG: ctor_logger.h:56: Batch read request: { Blob: DS:2181038080:[72075186224037889:1:100:38:0:3928:0] Offset: 2392 Size: 352 } { Blob: DS:2181038080:[72075186224037889:1:100:38:0:3928:0] Offset: 1368 Size: 392 } 2025-09-25T16:19:59.912887Z node 47 :BLOB_CACHE DEBUG: log.cpp:841: fline=blob_cache.cpp:247;ask={ Blob: DS:2181038080:[72075186224037889:1:100:38:0:3928:0] Offset: 2392 Size: 352 }; 2025-09-25T16:19:59.912892Z node 47 :BLOB_CACHE DEBUG: ctor_logger.h:56: Miss cache: { Blob: DS:2181038080:[72075186224037889:1:100:38:0:3928:0] Offset: 2392 Size: 352 } sender:[47:7554062350090028109:3346] 2025-09-25T16:19:59.912896Z node 47 :BLOB_CACHE DEBUG: ctor_logger.h:56: Enqueue read range: { Blob: DS:2181038080:[72075186224037889:1:100:38:0:3928:0] Offset: 2392 Size: 352 } 2025-09-25T16:19:59.912900Z node 47 :BLOB_CACHE DEBUG: log.cpp:841: fline=blob_cache.cpp:247;ask={ Blob: DS:2181038080:[72075186224037889:1:100:38:0:3928:0] Offset: 1368 Size: 392 }; 2025-09-25T16:19:59.912904Z node 47 :BLOB_CACHE DEBUG: ctor_logger.h:56: Miss cache: { Blob: DS:2181038080:[72075186224037889:1:100:38:0:3928:0] Offset: 1368 Size: 392 } sender:[47:7554062350090028109:3346] 2025-09-25T16:19:59.912909Z node 47 :BLOB_CACHE DEBUG: ctor_logger.h:56: Enqueue read range: { Blob: DS:2181038080:[72075186224037889:1:100:38:0:3928:0] Offset: 1368 Size: 392 } 2025-09-25T16:19:59.912922Z node 47 :BLOB_CACHE DEBUG: ctor_logger.h:56: Sending read from BlobCache: group: 2181038080 ranges: { Blob: DS:2181038080:[72075186224037889:1:100:38:0:3928:0] Offset: 2392 Size: 352 } { Blob: DS:2181038080:[72075186224037889:1:100:38:0:3928:0] Offset: 1368 Size: 392 } cookie: 602 2025-09-25T16:19:59.913313Z node 47 :BLOB_CACHE DEBUG: log.cpp:841: fline=blob_cache.cpp:491;success=TEvGetResult {Status# OK ResponseSz# 2 {[72075186224037889:1:100:38:0:3928:0] OK Shift# 2392 Size# 352 RequestedSize# 352} {[72075186224037889:1:100:38:0:3928:0] OK Shift# 1368 Size# 392 RequestedSize# 392}}; 2025-09-25T16:19:59.913322Z node 47 :BLOB_CACHE DEBUG: log.cpp:841: fline=blob_cache.cpp:517;ProcessSingleRangeResult={ Blob: DS:2181038080:[72075186224037889:1:100:38:0:3928:0] Offset: 2392 Size: 352 }; 2025-09-25T16:19:59.913328Z node 47 :BLOB_CACHE DEBUG: log.cpp:841: fline=blob_cache.cpp:607;insert_cache={ Blob: DS:2181038080:[72075186224037889:1:100:38:0:3928:0] Offset: 2392 Size: 352 }; 2025-09-25T16:19:59.913336Z node 47 :BLOB_CACHE DEBUG: log.cpp:841: fline=blob_cache.cpp:547;ProcessSingleRangeResult={ Blob: DS:2181038080:[72075186224037889:1:100:38:0:3928:0] Offset: 2392 Size: 352 };send_replies=1; 2025-09-25T16:19:59.913342Z node 47 :BLOB_CACHE DEBUG: ctor_logger.h:56: Send result: { Blob: DS:2181038080:[72075186224037889:1:100:38:0:3928:0] Offset: 2392 Size: 352 } to: [47:7554062350090028109:3346] status: OK 2025-09-25T16:19:59.913349Z node 47 :BLOB_CACHE DEBUG: log.cpp:841: fline=blob_cache.cpp:517;ProcessSingleRangeResult={ Blob: DS:2181038080:[72075186224037889:1:100:38:0:3928:0] Offset: 1368 Size: 392 }; 2025-09-25T16:19:59.913352Z node 47 :BLOB_CACHE DEBUG: log.cpp:841: fline=blob_cache.cpp:607;insert_cache={ Blob: DS:2181038080:[72075186224037889:1:100:38:0:3928:0] Offset: 1368 Size: 392 }; 2025-09-25T16:19:59.913356Z node 47 :BLOB_CACHE DEBUG: log.cpp:841: fline=blob_cache.cpp:547;ProcessSingleRangeResult={ Blob: DS:2181038080:[72075186224037889:1:100:38:0:3928:0] Offset: 1368 Size: 392 };send_replies=1; 2025-09-25T16:19:59.913359Z node 47 :BLOB_CACHE DEBUG: ctor_logger.h:56: Send result: { Blob: DS:2181038080:[72075186224037889:1:100:38:0:3928:0] Offset: 1368 Size: 392 } to: [47:7554062350090028109:3346] status: OK 2025-09-25T16:19:59.913431Z node 47 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: source_id=16;tablet_id=72075186224037888;fline=script_cursor.cpp:21;event=empty_data;scan_step_idx=3; 2025-09-25T16:19:59.913449Z node 47 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: source_id=17;tablet_id=72075186224037888;fline=script_cursor.cpp:21;event=empty_data;scan_step_idx=3; 2025-09-25T16:19:59.913456Z node 47 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: source_id=18;tablet_id=72075186224037888;fline=script_cursor.cpp:21;event=empty_data;scan_step_idx=3; 2025-09-25T16:19:59.913475Z node 47 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: source_id=19;tablet_id=72075186224037888;fline=script_cursor.cpp:21;event=empty_data;scan_step_idx=3; 2025-09-25T16:19:59.913483Z node 47 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: source_id=20;tablet_id=72075186224037888;fline=script_cursor.cpp:21;event=empty_data;scan_step_idx=3; 2025-09-25T16:19:59.913489Z node 47 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: source_id=21;tablet_id=72075186224037888;fline=script_cursor.cpp:21;event=empty_data;scan_step_idx=3; 2025-09-25T16:19:59.913497Z node 47 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: source_id=22;tablet_id=72075186224037888;fline=script_cursor.cpp:21;event=empty_data;scan_step_idx=3; 2025-09-25T16:19:59.913503Z node 47 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: source_id=23;tablet_id=72075186224037888;fline=script_cursor.cpp:21;event=empty_data;scan_step_idx=3; 2025-09-25T16:19:59.913509Z node 47 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: source_id=24;tablet_id=72075186224037888;fline=script_cursor.cpp:21;event=empty_data;scan_step_idx=3; 2025-09-25T16:19:59.913515Z node 47 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: source_id=25;tablet_id=72075186224037888;fline=script_cursor.cpp:21;event=empty_data;scan_step_idx=3; 2025-09-25T16:19:59.913521Z node 47 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: source_id=26;tablet_id=72075186224037888;fline=script_cursor.cpp:21;event=empty_data;scan_step_idx=3; 2025-09-25T16:19:59.913527Z node 47 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: source_id=27;tablet_id=72075186224037888;fline=script_cursor.cpp:21;event=empty_data;scan_step_idx=3; 2025-09-25T16:19:59.913533Z node 47 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: source_id=28;tablet_id=72075186224037888;fline=script_cursor.cpp:21;event=empty_data;scan_step_idx=3; 2025-09-25T16:19:59.913538Z node 47 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: source_id=29;tablet_id=72075186224037888;fline=script_cursor.cpp:21;event=empty_data;scan_step_idx=3; 2025-09-25T16:19:59.913544Z node 47 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: source_id=30;tablet_id=72075186224037888;fline=script_cursor.cpp:21;event=empty_data;scan_step_idx=3; 2025-09-25T16:19:59.913549Z node 47 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: source_id=31;tablet_id=72075186224037888;fline=script_cursor.cpp:21;event=empty_data;scan_step_idx=3; 2025-09-25T16:19:59.913555Z node 47 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: source_id=32;tablet_id=72075186224037888;fline=script_cursor.cpp:21;event=empty_data;scan_step_idx=3; 2025-09-25T16:19:59.913560Z node 47 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: source_id=33;tablet_id=72075186224037888;fline=script_cursor.cpp:21;event=empty_data;scan_step_idx=3; 2025-09-25T16:19:59.913566Z node 47 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: source_id=34;tablet_id=72075186224037888;fline=script_cursor.cpp:21;event=empty_data;scan_step_idx=3; 2025-09-25T16:19:59.913573Z node 47 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: source_id=35;tablet_id=72075186224037888;fline=script_cursor.cpp:21;event=empty_data;scan_step_idx=3; 2025-09-25T16:19:59.913578Z node 47 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: source_id=36;tablet_id=72075186224037888;fline=script_cursor.cpp:21;event=empty_data;scan_step_idx=3; 2025-09-25T16:19:59.913585Z node 47 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: source_id=37;tablet_id=72075186224037888;fline=script_cursor.cpp:21;event=empty_data;scan_step_idx=3; 2025-09-25T16:19:59.913591Z node 47 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: source_id=38;tablet_id=72075186224037888;fline=script_cursor.cpp:21;event=empty_data;scan_step_idx=3; 2025-09-25T16:19:59.913596Z node 47 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: source_id=39;tablet_id=72075186224037888;fline=script_cursor.cpp:21;event=empty_data;scan_step_idx=3; 2025-09-25T16:19:59.913602Z node 47 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: source_id=40;tablet_id=72075186224037888;fline=script_cursor.cpp:21;event=empty_data;scan_step_idx=3; 2025-09-25T16:19:59.913607Z node 47 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: source_id=41;tablet_id=72075186224037888;fline=script_cursor.cpp:21;event=empty_data;scan_step_idx=3; 2025-09-25T16:19:59.913612Z node 47 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: source_id=42;tablet_id=72075186224037888;fline=script_cursor.cpp:21;event=empty_data;scan_step_idx=3; >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-ModifyUser-32 >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-CreateUser-10 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-CreateUser-11 >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-CreateUser-2 ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_restore/unittest >> TRestoreTests::ExportImportWithDataChecksumAbsence[Raw] [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:120:2058] recipient: [1:114:2144] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:120:2058] recipient: [1:114:2144] Leader for TabletID 72057594046678944 is [1:131:2155] sender: [1:132:2058] recipient: [1:114:2144] 2025-09-25T16:20:00.206162Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7911: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-09-25T16:20:00.206186Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7939: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-09-25T16:20:00.206192Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7825: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-09-25T16:20:00.206197Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7841: OperationsProcessing config: using default configuration 2025-09-25T16:20:00.206205Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7847: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-09-25T16:20:00.206209Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7847: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-09-25T16:20:00.206219Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7971: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-09-25T16:20:00.206233Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-09-25T16:20:00.206376Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8042: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-09-25T16:20:00.206433Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2025-09-25T16:20:00.222694Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7729: Cannot subscribe to console configs 2025-09-25T16:20:00.222721Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded 2025-09-25T16:20:00.227492Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2025-09-25T16:20:00.227528Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2025-09-25T16:20:00.227563Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-09-25T16:20:00.229609Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2025-09-25T16:20:00.229683Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2025-09-25T16:20:00.229806Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1382: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-09-25T16:20:00.229894Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-09-25T16:20:00.233234Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:158: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-09-25T16:20:00.233308Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2025-09-25T16:20:00.233661Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-09-25T16:20:00.233675Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-09-25T16:20:00.233707Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-09-25T16:20:00.233716Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-09-25T16:20:00.233722Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:205: TTxServerlessStorageBilling.Complete 2025-09-25T16:20:00.233751Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7086: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-09-25T16:20:00.236236Z node 1 :HIVE INFO: tablet_helpers.cpp:1126: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:131:2155] sender: [1:244:2058] recipient: [1:15:2062] 2025-09-25T16:20:00.258966Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-09-25T16:20:00.259036Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-09-25T16:20:00.259094Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:599: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-09-25T16:20:00.259102Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5528: CreateTx for txid 1:0 type: TxAlterSubDomain target path: [OwnerId: 72057594046678944, LocalPathId: 1] source path: 2025-09-25T16:20:00.259161Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-09-25T16:20:00.259177Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2025-09-25T16:20:00.261242Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-09-25T16:20:00.261298Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-09-25T16:20:00.261365Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-09-25T16:20:00.261377Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:315: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-09-25T16:20:00.261382Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:369: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-09-25T16:20:00.261388Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 1:0 2 -> 3 2025-09-25T16:20:00.261895Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-09-25T16:20:00.261907Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-09-25T16:20:00.261915Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 1:0 3 -> 128 2025-09-25T16:20:00.262755Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-09-25T16:20:00.262767Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-09-25T16:20:00.262771Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-09-25T16:20:00.262776Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1723: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-09-25T16:20:00.263383Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1792: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-09-25T16:20:00.263761Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:663: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-09-25T16:20:00.263800Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1824: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-09-25T16:20:00.263997Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:685: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-09-25T16:20:00.264023Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:689: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 138 RawX2: 4294969455 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-09-25T16:20:00.264031Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:303: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-09-25T16:20:00.264129Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 1:0 128 -> 240 2025-09-25T16:20:00.264139Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:362: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-09-25T16:20:00.264171Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:599: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-09-25T16:20:00.264183Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:412: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-09-25T16:20:00.264643Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-09-25T16:20:00.264650Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme ... 72057594046678944 2025-09-25T16:20:02.198635Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [3:210:2211], at schemeshard: 72057594046678944, txId: 281474976710765, path id: 6 2025-09-25T16:20:02.198748Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 281474976710765:0, at schemeshard: 72057594046678944 2025-09-25T16:20:02.198758Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_backup_restore_common.h:258: TRestore TProposedWaitParts, opId: 281474976710765:0 ProgressState, at schemeshard: 72057594046678944 2025-09-25T16:20:02.198909Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6249: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 6 Version: 3 PathOwnerId: 72057594046678944, cookie: 281474976710765 2025-09-25T16:20:02.198923Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 6 Version: 3 PathOwnerId: 72057594046678944, cookie: 281474976710765 2025-09-25T16:20:02.198928Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:108: Operation in-flight, at schemeshard: 72057594046678944, txId: 281474976710765 2025-09-25T16:20:02.198934Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 281474976710765, pathId: [OwnerId: 72057594046678944, LocalPathId: 6], version: 3 2025-09-25T16:20:02.198942Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:610: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 6] was 4 2025-09-25T16:20:02.198961Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 281474976710765, ready parts: 0/1, is published: true 2025-09-25T16:20:02.200918Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 281474976710765 REQUEST: HEAD /data_00.csv HTTP/1.1 HEADERS: Host: localhost:23115 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: 175D5A3A-B1D6-46C7-BC13-82D386C9E462 amz-sdk-request: attempt=1 content-type: application/xml user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-143-generic x86_64 Clang/20.1.8 x-amz-api-version: 2006-03-01 S3_MOCK::HttpServeRead: /data_00.csv / 11 2025-09-25T16:20:02.202098Z node 3 :DATASHARD_RESTORE DEBUG: import_s3.cpp:527: [Import] [s3:281474976710765] Handle NKikimr::NWrappers::NExternalStorage::TEvHeadObjectResponse { Key: null Result: HeadObjectResult { ETag: 6e3e0a41fdab8add833862f1bd2954c3 ContentLength: 11 } } 2025-09-25T16:20:02.202679Z node 3 :DATASHARD_RESTORE DEBUG: import_s3.cpp:606: [Import] [s3:281474976710765] Handle NKikimr::TEvDataShard::TEvS3DownloadInfo { Info: { DataETag: (empty maybe) ProcessedBytes: 0 WrittenBytes: 0 WrittenRows: 0 ChecksumState: DownloadState: } } FAKE_COORDINATOR: Erasing txId 281474976710765 2025-09-25T16:20:02.213722Z node 3 :DATASHARD_RESTORE DEBUG: import_s3.cpp:606: [Import] [s3:281474976710765] Handle NKikimr::TEvDataShard::TEvS3DownloadInfo { Info: { DataETag: 6e3e0a41fdab8add833862f1bd2954c3 ProcessedBytes: 0 WrittenBytes: 0 WrittenRows: 0 ChecksumState: DownloadState: } } 2025-09-25T16:20:02.213750Z node 3 :DATASHARD_RESTORE NOTICE: import_s3.cpp:621: [Import] [s3:281474976710765] Process download info at 'DownloadInfo': info# { DataETag: 6e3e0a41fdab8add833862f1bd2954c3 ProcessedBytes: 0 WrittenBytes: 0 WrittenRows: 0 ChecksumState: DownloadState: } 2025-09-25T16:20:02.213769Z node 3 :DATASHARD_RESTORE DEBUG: import_s3.cpp:517: [Import] [s3:281474976710765] GetObject: key# /data_00.csv, range# 0-10 REQUEST: GET /data_00.csv HTTP/1.1 HEADERS: Host: localhost:23115 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: 34E37AB5-C30B-4B55-80E3-67526F81730D amz-sdk-request: attempt=1 content-type: application/xml range: bytes=0-10 user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-143-generic x86_64 Clang/20.1.8 x-amz-api-version: 2006-03-01 S3_MOCK::HttpServeRead: /data_00.csv / 11 2025-09-25T16:20:02.215076Z node 3 :DATASHARD_RESTORE DEBUG: import_s3.cpp:656: [Import] [s3:281474976710765] Handle NKikimr::NWrappers::NExternalStorage::TEvGetObjectResponse { Key: null Result: 6e3e0a41fdab8add833862f1bd2954c3 Body: 11b } 2025-09-25T16:20:02.215096Z node 3 :DATASHARD_RESTORE TRACE: import_s3.cpp:673: [Import] [s3:281474976710765] Content size: processed-bytes# 0, content-length# 11, body-size# 11 2025-09-25T16:20:02.215137Z node 3 :DATASHARD_RESTORE INFO: import_s3.cpp:806: [Import] [s3:281474976710765] Upload rows: count# 1, size# 36 2025-09-25T16:20:02.215995Z node 3 :DATASHARD_RESTORE DEBUG: import_s3.cpp:814: [Import] [s3:281474976710765] Handle NKikimr::TEvDataShard::TEvS3UploadRowsResponse { Record: TabletID: 72075186233409549 Status: 0 Info: { DataETag: 6e3e0a41fdab8add833862f1bd2954c3 ProcessedBytes: 11 WrittenBytes: 10 WrittenRows: 1 ChecksumState: DownloadState: } } 2025-09-25T16:20:02.216012Z node 3 :DATASHARD_RESTORE NOTICE: import_s3.cpp:621: [Import] [s3:281474976710765] Process download info at 'UploadResponse': info# { DataETag: 6e3e0a41fdab8add833862f1bd2954c3 ProcessedBytes: 11 WrittenBytes: 10 WrittenRows: 1 ChecksumState: DownloadState: } 2025-09-25T16:20:02.216020Z node 3 :DATASHARD_RESTORE NOTICE: import_s3.cpp:962: [Import] [s3:281474976710765] Finish: success# 1, error# , writtenBytes# 10, writtenRows# 1 2025-09-25T16:20:02.233216Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5901: Handle TEvSchemaChanged, tabletId: 72057594046678944, at schemeshard: 72057594046678944, message: Source { RawX1: 911 RawX2: 12884904724 } Origin: 72075186233409549 State: 2 TxId: 281474976710765 Step: 0 Generation: 2 OpResult { Success: true Explain: "" BytesProcessed: 10 RowsProcessed: 1 } 2025-09-25T16:20:02.233246Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1837: TOperation FindRelatedPartByTabletId, TxId: 281474976710765, tablet: 72075186233409549, partId: 0 2025-09-25T16:20:02.233271Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:628: TTxOperationReply execute, operationId: 281474976710765:0, at schemeshard: 72057594046678944, message: Source { RawX1: 911 RawX2: 12884904724 } Origin: 72075186233409549 State: 2 TxId: 281474976710765 Step: 0 Generation: 2 OpResult { Success: true Explain: "" BytesProcessed: 10 RowsProcessed: 1 } 2025-09-25T16:20:02.233282Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_backup_restore_common.h:233: TRestore TProposedWaitParts, opId: 281474976710765:0 HandleReply TEvSchemaChanged at tablet# 72057594046678944 message# Source { RawX1: 911 RawX2: 12884904724 } Origin: 72075186233409549 State: 2 TxId: 281474976710765 Step: 0 Generation: 2 OpResult { Success: true Explain: "" BytesProcessed: 10 RowsProcessed: 1 } 2025-09-25T16:20:02.233297Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:673: CollectSchemaChanged accept TEvDataShard::TEvSchemaChanged, operationId: 281474976710765:0, shardIdx: 72057594046678944:4, shard: 72075186233409549, left await: 0, txState.State: ProposedWaitParts, txState.ReadyForNotifications: 1, at schemeshard: 72057594046678944 2025-09-25T16:20:02.233300Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:710: all shard schema changes has been received, operationId: 281474976710765:0, at schemeshard: 72057594046678944 2025-09-25T16:20:02.233304Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:722: send schema changes ack message, operation: 281474976710765:0, datashard: 72075186233409549, at schemeshard: 72057594046678944 2025-09-25T16:20:02.233310Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 281474976710765:0 129 -> 240 2025-09-25T16:20:02.233352Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_backup_restore_common.h:116: Unable to make a bill: kind# TRestore, opId# 281474976710765:0, reason# domain is not a serverless db, domain# /MyRoot, domainPathId# [OwnerId: 72057594046678944, LocalPathId: 1], IsDomainSchemeShard: 1, ParentDomainId: [OwnerId: 72057594046678944, LocalPathId: 1], ResourcesDomainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-09-25T16:20:02.234745Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 281474976710765:0, at schemeshard: 72057594046678944 2025-09-25T16:20:02.234845Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 281474976710765:0, at schemeshard: 72057594046678944 2025-09-25T16:20:02.234853Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:497: [72057594046678944] TDone opId# 281474976710765:0 ProgressState 2025-09-25T16:20:02.234868Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:926: Part operation is done id#281474976710765:0 progress is 1/1 2025-09-25T16:20:02.234872Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 281474976710765 ready parts: 1/1 2025-09-25T16:20:02.234877Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:926: Part operation is done id#281474976710765:0 progress is 1/1 2025-09-25T16:20:02.234879Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 281474976710765 ready parts: 1/1 2025-09-25T16:20:02.234883Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 281474976710765, ready parts: 1/1, is published: true 2025-09-25T16:20:02.234900Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1702: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [3:127:2152] message: TxId: 281474976710765 2025-09-25T16:20:02.234905Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 281474976710765 ready parts: 1/1 2025-09-25T16:20:02.234909Z node 3 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:993: Operation and all the parts is done, operation id: 281474976710765:0 2025-09-25T16:20:02.234914Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5552: RemoveTx for txid 281474976710765:0 2025-09-25T16:20:02.234940Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:610: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 6] was 3 2025-09-25T16:20:02.241241Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7171: Handle: TEvNotifyTxCompletionResult: txId# 281474976710765 2025-09-25T16:20:02.241272Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:7173: Message: TxId: 281474976710765 2025-09-25T16:20:02.241286Z node 3 :IMPORT DEBUG: schemeshard_import__create.cpp:372: TImport::TTxProgress: DoExecute 2025-09-25T16:20:02.241291Z node 3 :IMPORT DEBUG: schemeshard_import__create.cpp:1425: TImport::TTxProgress: OnNotifyResult: txId# 281474976710765 2025-09-25T16:20:02.242923Z node 3 :IMPORT DEBUG: schemeshard_import__create.cpp:396: TImport::TTxProgress: DoComplete 2025-09-25T16:20:02.242950Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 104: got EvNotifyTxCompletionResult 2025-09-25T16:20:02.242958Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 104: satisfy waiter [3:877:2807] TestWaitNotification: OK eventTxId 104 >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-ModifyUser-43 >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-ModifyUser-38 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-ModifyUser-39 >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-ModifyUser-25 ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_index/unittest >> TAsyncIndexTests::MergeMainWithReboots[PipeResets] [GOOD] Test command err: =========== RUN: Trace =========== Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2144] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2144] Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:120:2058] recipient: [1:114:2145] IGNORE Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:120:2058] recipient: [1:114:2145] Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:121:2058] recipient: [1:116:2146] IGNORE Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:121:2058] recipient: [1:116:2146] Leader for TabletID 72057594046678944 is [1:128:2153] sender: [1:131:2058] recipient: [1:113:2144] Leader for TabletID 72057594046447617 is [1:134:2158] sender: [1:136:2058] recipient: [1:114:2145] Leader for TabletID 72057594046316545 is [1:139:2161] sender: [1:141:2058] recipient: [1:116:2146] 2025-09-25T16:19:36.661840Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7911: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-09-25T16:19:36.661862Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7939: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-09-25T16:19:36.661868Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7825: StatsBatching config: StatsBatchTimeout# 0.000000s, StatsMaxBatchSize# 0, StatsMaxExecuteTime# 0.010000s 2025-09-25T16:19:36.661874Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7841: OperationsProcessing config: using default configuration 2025-09-25T16:19:36.661881Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7847: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-09-25T16:19:36.661886Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7847: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-09-25T16:19:36.661894Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7971: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-09-25T16:19:36.661907Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-09-25T16:19:36.662020Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8042: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-09-25T16:19:36.662083Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2025-09-25T16:19:36.686366Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:8074: Got new config: QueryServiceConfig { AllExternalDataSourcesAreAvailable: true } 2025-09-25T16:19:36.686400Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded 2025-09-25T16:19:36.686505Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8042: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# Leader for TabletID 72057594046447617 is [1:134:2158] sender: [1:179:2058] recipient: [1:15:2062] 2025-09-25T16:19:36.691264Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2025-09-25T16:19:36.691377Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2025-09-25T16:19:36.691416Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-09-25T16:19:36.693012Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2025-09-25T16:19:36.693108Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2025-09-25T16:19:36.693208Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1382: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-09-25T16:19:36.693428Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-09-25T16:19:36.694564Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:158: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-09-25T16:19:36.694629Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2025-09-25T16:19:36.694883Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-09-25T16:19:36.694896Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-09-25T16:19:36.694918Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-09-25T16:19:36.694926Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-09-25T16:19:36.694933Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:205: TTxServerlessStorageBilling.Complete 2025-09-25T16:19:36.694974Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7086: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:221:2058] recipient: [1:219:2219] IGNORE Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:221:2058] recipient: [1:219:2219] Leader for TabletID 72057594037968897 is [1:225:2223] sender: [1:226:2058] recipient: [1:219:2219] 2025-09-25T16:19:36.696520Z node 1 :HIVE INFO: tablet_helpers.cpp:1126: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:128:2153] sender: [1:246:2058] recipient: [1:15:2062] 2025-09-25T16:19:36.720242Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-09-25T16:19:36.720315Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-09-25T16:19:36.720372Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:599: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-09-25T16:19:36.720381Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5528: CreateTx for txid 1:0 type: TxAlterSubDomain target path: [OwnerId: 72057594046678944, LocalPathId: 1] source path: 2025-09-25T16:19:36.720441Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-09-25T16:19:36.720458Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2025-09-25T16:19:36.721253Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-09-25T16:19:36.721308Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-09-25T16:19:36.721357Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-09-25T16:19:36.721368Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:315: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-09-25T16:19:36.721373Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:369: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-09-25T16:19:36.721379Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 1:0 2 -> 3 2025-09-25T16:19:36.721935Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-09-25T16:19:36.721952Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-09-25T16:19:36.721958Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 1:0 3 -> 128 2025-09-25T16:19:36.722422Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-09-25T16:19:36.722435Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-09-25T16:19:36.722441Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-09-25T16:19:36.722449Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1723: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-09-25T16:19:36.723170Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1792: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-09-25T16:19:36.723618Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:663: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-09-25T16:19:36.723670Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1824: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 Leader for TabletID 72057594046316545 is [1:139:2161] sender: [1:261:2058] recipient: [1:15:2062] FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-09-25T16:19:36.723886Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:685: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-09-25T16:19:36.723910Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:689: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 139 RawX2: 4294969457 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, ... CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot: 300 InMemForceStepsToSnapshot: 500 InMemForceSizeToSnapshot: 16777216 InMemCompactionBrokerQueue: 0 ReadAheadHiThreshold: 67108864 ReadAheadLoThreshold: 16777216 MinDataPageSize: 7168 SnapBrokerQueue: 0 Generation { GenerationId: 0 SizeToCompact: 0 CountToCompact: 8 ForceCountToCompact: 8 ForceSizeToCompact: 134217728 CompactionBrokerQueue: 4294967295 KeepInCache: true BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen1" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 0 UpliftPartSize: 0 } Generation { GenerationId: 1 SizeToCompact: 41943040 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 536870912 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen2" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 8388608 UpliftPartSize: 8388608 } Generation { GenerationId: 2 SizeToCompact: 419430400 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 17179869184 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen3" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } PartitioningPolicy { SizeToSplit: 2147483648 MinPartitionsCount: 1 } } } } TableSchemaVersion: 1 IsBackup: false IsRestore: false } TablePartitions { EndOfRangeKeyPrefix: "" IsPoint: false IsInclusive: false DatashardId: 72075186233409549 } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 4 PathsLimit: 10000 ShardsInside: 3 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 3 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-09-25T16:20:02.864700Z node 30 :CHANGE_EXCHANGE DEBUG: change_sender_table_base.cpp:78: [TableChangeSenderShard][72075186233409549:2][72075186233409546][30:837:2679] Handshake NKikimrChangeExchange.TEvStatus Status: STATUS_OK LastRecordOrder: 0 2025-09-25T16:20:02.864737Z node 30 :CHANGE_EXCHANGE DEBUG: change_sender_async_index.cpp:239: [AsyncIndexChangeSenderMain][72075186233409549:2][30:789:2679] Handle NKikimr::NChangeExchange::TEvChangeExchangePrivate::TEvReady { PartitionId: 72075186233409546 } 2025-09-25T16:20:02.864777Z node 30 :CHANGE_EXCHANGE DEBUG: change_sender_table_base.cpp:123: [TableChangeSenderShard][72075186233409549:2][72075186233409546][30:837:2679] Handle NKikimr::NChangeExchange::TEvChangeExchange::TEvRecords { Records [{ Order: 1 Group: 1758817202850751 Step: 5000003 TxId: 18446744073709551615 PathId: [OwnerId: 72057594046678944, LocalPathId: 4] Kind: AsyncIndex Source: Unspecified Body: 28b TableId: [OwnerId: 72057594046678944, LocalPathId: 3] SchemaVersion: 1 LockId: 0 LockOffset: 0 },{ Order: 2 Group: 1758817202850751 Step: 5000003 TxId: 18446744073709551615 PathId: [OwnerId: 72057594046678944, LocalPathId: 4] Kind: AsyncIndex Source: Unspecified Body: 28b TableId: [OwnerId: 72057594046678944, LocalPathId: 3] SchemaVersion: 1 LockId: 0 LockOffset: 0 },{ Order: 3 Group: 1758817202850751 Step: 5000003 TxId: 18446744073709551615 PathId: [OwnerId: 72057594046678944, LocalPathId: 4] Kind: AsyncIndex Source: Unspecified Body: 28b TableId: [OwnerId: 72057594046678944, LocalPathId: 3] SchemaVersion: 1 LockId: 0 LockOffset: 0 }] } 2025-09-25T16:20:02.865569Z node 30 :CHANGE_EXCHANGE DEBUG: change_sender_table_base.cpp:200: [TableChangeSenderShard][72075186233409549:2][72075186233409546][30:837:2679] Handle NKikimrChangeExchange.TEvStatus Status: STATUS_OK RecordStatuses { Order: 1 Status: STATUS_OK Reason: REASON_NONE } RecordStatuses { Order: 2 Status: STATUS_OK Reason: REASON_NONE } RecordStatuses { Order: 3 Status: STATUS_OK Reason: REASON_NONE } LastRecordOrder: 3 2025-09-25T16:20:02.865593Z node 30 :CHANGE_EXCHANGE DEBUG: change_sender_async_index.cpp:239: [AsyncIndexChangeSenderMain][72075186233409549:2][30:789:2679] Handle NKikimr::NChangeExchange::TEvChangeExchangePrivate::TEvReady { PartitionId: 72075186233409546 } 2025-09-25T16:20:03.016565Z node 30 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/Table/UserDefinedIndex/indexImplTable" Options { ReturnPartitioningInfo: true ReturnPartitionConfig: true BackupInfo: false ReturnBoundaries: false ShowPrivateTable: true }, at schemeshard: 72057594046678944 2025-09-25T16:20:03.016672Z node 30 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/Table/UserDefinedIndex/indexImplTable" took 125us result status StatusSuccess 2025-09-25T16:20:03.016926Z node 30 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/Table/UserDefinedIndex/indexImplTable" PathDescription { Self { Name: "indexImplTable" PathId: 5 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 1002 CreateStep: 5000003 ParentPathId: 4 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeAsyncIndexImplTable Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "indexImplTable" Columns { Name: "indexed" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "key" Type: "Uint32" TypeId: 2 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "indexed" KeyColumnNames: "key" KeyColumnIds: 1 KeyColumnIds: 2 PartitionConfig { CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot: 300 InMemForceStepsToSnapshot: 500 InMemForceSizeToSnapshot: 16777216 InMemCompactionBrokerQueue: 0 ReadAheadHiThreshold: 67108864 ReadAheadLoThreshold: 16777216 MinDataPageSize: 7168 SnapBrokerQueue: 0 Generation { GenerationId: 0 SizeToCompact: 0 CountToCompact: 8 ForceCountToCompact: 8 ForceSizeToCompact: 134217728 CompactionBrokerQueue: 4294967295 KeepInCache: true BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen1" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 0 UpliftPartSize: 0 } Generation { GenerationId: 1 SizeToCompact: 41943040 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 536870912 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen2" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 8388608 UpliftPartSize: 8388608 } Generation { GenerationId: 2 SizeToCompact: 419430400 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 17179869184 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen3" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } PartitioningPolicy { SizeToSplit: 2147483648 MinPartitionsCount: 1 } } TableSchemaVersion: 1 IsBackup: false IsRestore: false } TablePartitions { EndOfRangeKeyPrefix: "" IsPoint: false IsInclusive: false DatashardId: 72075186233409546 } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 4 PathsLimit: 10000 ShardsInside: 3 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 5 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_index/unittest >> TAsyncIndexTests::SplitMainWithReboots[PipeResets] [GOOD] Test command err: =========== RUN: Trace =========== Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2144] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2144] Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:120:2058] recipient: [1:114:2145] IGNORE Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:120:2058] recipient: [1:114:2145] Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:121:2058] recipient: [1:116:2146] IGNORE Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:121:2058] recipient: [1:116:2146] Leader for TabletID 72057594046678944 is [1:128:2153] sender: [1:131:2058] recipient: [1:113:2144] Leader for TabletID 72057594046447617 is [1:134:2158] sender: [1:136:2058] recipient: [1:114:2145] Leader for TabletID 72057594046316545 is [1:139:2161] sender: [1:141:2058] recipient: [1:116:2146] 2025-09-25T16:19:41.909394Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7911: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-09-25T16:19:41.909417Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7939: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-09-25T16:19:41.909423Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7825: StatsBatching config: StatsBatchTimeout# 0.000000s, StatsMaxBatchSize# 0, StatsMaxExecuteTime# 0.010000s 2025-09-25T16:19:41.909428Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7841: OperationsProcessing config: using default configuration 2025-09-25T16:19:41.909434Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7847: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-09-25T16:19:41.909439Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7847: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-09-25T16:19:41.909448Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7971: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-09-25T16:19:41.909461Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-09-25T16:19:41.909584Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8042: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-09-25T16:19:41.909642Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2025-09-25T16:19:41.932027Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:8074: Got new config: QueryServiceConfig { AllExternalDataSourcesAreAvailable: true } 2025-09-25T16:19:41.932051Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded 2025-09-25T16:19:41.932125Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8042: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# Leader for TabletID 72057594046447617 is [1:134:2158] sender: [1:179:2058] recipient: [1:15:2062] 2025-09-25T16:19:41.935325Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2025-09-25T16:19:41.935394Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2025-09-25T16:19:41.935420Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-09-25T16:19:41.936404Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2025-09-25T16:19:41.936453Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2025-09-25T16:19:41.936520Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1382: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-09-25T16:19:41.936677Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-09-25T16:19:41.937487Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:158: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-09-25T16:19:41.937524Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2025-09-25T16:19:41.937688Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-09-25T16:19:41.937695Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-09-25T16:19:41.937707Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-09-25T16:19:41.937712Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-09-25T16:19:41.937717Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:205: TTxServerlessStorageBilling.Complete 2025-09-25T16:19:41.937739Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7086: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:221:2058] recipient: [1:219:2219] IGNORE Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:221:2058] recipient: [1:219:2219] Leader for TabletID 72057594037968897 is [1:225:2223] sender: [1:226:2058] recipient: [1:219:2219] 2025-09-25T16:19:41.938655Z node 1 :HIVE INFO: tablet_helpers.cpp:1126: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:128:2153] sender: [1:246:2058] recipient: [1:15:2062] 2025-09-25T16:19:41.955451Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-09-25T16:19:41.955525Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-09-25T16:19:41.955570Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:599: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-09-25T16:19:41.955575Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5528: CreateTx for txid 1:0 type: TxAlterSubDomain target path: [OwnerId: 72057594046678944, LocalPathId: 1] source path: 2025-09-25T16:19:41.955618Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-09-25T16:19:41.955635Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2025-09-25T16:19:41.957364Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-09-25T16:19:41.957429Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-09-25T16:19:41.957481Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-09-25T16:19:41.957494Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:315: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-09-25T16:19:41.957500Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:369: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-09-25T16:19:41.957506Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 1:0 2 -> 3 2025-09-25T16:19:41.957999Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-09-25T16:19:41.958012Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-09-25T16:19:41.958018Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 1:0 3 -> 128 2025-09-25T16:19:41.958414Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-09-25T16:19:41.958424Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-09-25T16:19:41.958429Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-09-25T16:19:41.958435Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1723: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-09-25T16:19:41.959149Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1792: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-09-25T16:19:41.959649Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:663: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-09-25T16:19:41.959703Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1824: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 Leader for TabletID 72057594046316545 is [1:139:2161] sender: [1:261:2058] recipient: [1:15:2062] FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-09-25T16:19:41.959920Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:685: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-09-25T16:19:41.959948Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:689: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 139 RawX2: 4294969457 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, ... ompactionBrokerQueue: 0 ReadAheadHiThreshold: 67108864 ReadAheadLoThreshold: 16777216 MinDataPageSize: 7168 SnapBrokerQueue: 0 Generation { GenerationId: 0 SizeToCompact: 0 CountToCompact: 8 ForceCountToCompact: 8 ForceSizeToCompact: 134217728 CompactionBrokerQueue: 4294967295 KeepInCache: true BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen1" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 0 UpliftPartSize: 0 } Generation { GenerationId: 1 SizeToCompact: 41943040 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 536870912 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen2" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 8388608 UpliftPartSize: 8388608 } Generation { GenerationId: 2 SizeToCompact: 419430400 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 17179869184 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen3" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } PartitioningPolicy { SizeToSplit: 2147483648 MinPartitionsCount: 1 } } } } TableSchemaVersion: 1 IsBackup: false IsRestore: false } TablePartitions { EndOfRangeKeyPrefix: "\001\000\004\000\000\0002\000\000\000" IsPoint: false IsInclusive: false DatashardId: 72075186233409548 } TablePartitions { EndOfRangeKeyPrefix: "" IsPoint: false IsInclusive: false DatashardId: 72075186233409549 } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 2 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 4 PathsLimit: 10000 ShardsInside: 4 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 3 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-09-25T16:20:02.901116Z node 24 :CHANGE_EXCHANGE DEBUG: change_sender_table_base.cpp:78: [TableChangeSenderShard][72075186233409548:2][72075186233409546][24:780:2613] Handshake NKikimrChangeExchange.TEvStatus Status: STATUS_OK LastRecordOrder: 0 2025-09-25T16:20:02.901159Z node 24 :CHANGE_EXCHANGE DEBUG: change_sender_async_index.cpp:239: [AsyncIndexChangeSenderMain][72075186233409548:2][24:718:2613] Handle NKikimr::NChangeExchange::TEvChangeExchangePrivate::TEvReady { PartitionId: 72075186233409546 } 2025-09-25T16:20:02.901205Z node 24 :CHANGE_EXCHANGE DEBUG: change_sender_table_base.cpp:123: [TableChangeSenderShard][72075186233409548:2][72075186233409546][24:780:2613] Handle NKikimr::NChangeExchange::TEvChangeExchange::TEvRecords { Records [{ Order: 1 Group: 1758817202887050 Step: 5000003 TxId: 18446744073709551615 PathId: [OwnerId: 72057594046678944, LocalPathId: 4] Kind: AsyncIndex Source: Unspecified Body: 28b TableId: [OwnerId: 72057594046678944, LocalPathId: 3] SchemaVersion: 1 LockId: 0 LockOffset: 0 },{ Order: 2 Group: 1758817202887050 Step: 5000003 TxId: 18446744073709551615 PathId: [OwnerId: 72057594046678944, LocalPathId: 4] Kind: AsyncIndex Source: Unspecified Body: 28b TableId: [OwnerId: 72057594046678944, LocalPathId: 3] SchemaVersion: 1 LockId: 0 LockOffset: 0 },{ Order: 3 Group: 1758817202887050 Step: 5000003 TxId: 18446744073709551615 PathId: [OwnerId: 72057594046678944, LocalPathId: 4] Kind: AsyncIndex Source: Unspecified Body: 28b TableId: [OwnerId: 72057594046678944, LocalPathId: 3] SchemaVersion: 1 LockId: 0 LockOffset: 0 }] } 2025-09-25T16:20:02.902316Z node 24 :CHANGE_EXCHANGE DEBUG: change_sender_table_base.cpp:200: [TableChangeSenderShard][72075186233409548:2][72075186233409546][24:780:2613] Handle NKikimrChangeExchange.TEvStatus Status: STATUS_OK RecordStatuses { Order: 1 Status: STATUS_OK Reason: REASON_NONE } RecordStatuses { Order: 2 Status: STATUS_OK Reason: REASON_NONE } RecordStatuses { Order: 3 Status: STATUS_OK Reason: REASON_NONE } LastRecordOrder: 3 2025-09-25T16:20:02.902338Z node 24 :CHANGE_EXCHANGE DEBUG: change_sender_async_index.cpp:239: [AsyncIndexChangeSenderMain][72075186233409548:2][24:718:2613] Handle NKikimr::NChangeExchange::TEvChangeExchangePrivate::TEvReady { PartitionId: 72075186233409546 } 2025-09-25T16:20:03.069116Z node 24 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/Table/UserDefinedIndex/indexImplTable" Options { ReturnPartitioningInfo: true ReturnPartitionConfig: true BackupInfo: false ReturnBoundaries: false ShowPrivateTable: true }, at schemeshard: 72057594046678944 2025-09-25T16:20:03.069238Z node 24 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/Table/UserDefinedIndex/indexImplTable" took 145us result status StatusSuccess 2025-09-25T16:20:03.069499Z node 24 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/Table/UserDefinedIndex/indexImplTable" PathDescription { Self { Name: "indexImplTable" PathId: 5 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 1002 CreateStep: 5000003 ParentPathId: 4 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeAsyncIndexImplTable Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "indexImplTable" Columns { Name: "indexed" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "key" Type: "Uint32" TypeId: 2 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "indexed" KeyColumnNames: "key" KeyColumnIds: 1 KeyColumnIds: 2 PartitionConfig { CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot: 300 InMemForceStepsToSnapshot: 500 InMemForceSizeToSnapshot: 16777216 InMemCompactionBrokerQueue: 0 ReadAheadHiThreshold: 67108864 ReadAheadLoThreshold: 16777216 MinDataPageSize: 7168 SnapBrokerQueue: 0 Generation { GenerationId: 0 SizeToCompact: 0 CountToCompact: 8 ForceCountToCompact: 8 ForceSizeToCompact: 134217728 CompactionBrokerQueue: 4294967295 KeepInCache: true BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen1" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 0 UpliftPartSize: 0 } Generation { GenerationId: 1 SizeToCompact: 41943040 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 536870912 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen2" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 8388608 UpliftPartSize: 8388608 } Generation { GenerationId: 2 SizeToCompact: 419430400 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 17179869184 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen3" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } PartitioningPolicy { SizeToSplit: 2147483648 MinPartitionsCount: 1 } } TableSchemaVersion: 1 IsBackup: false IsRestore: false } TablePartitions { EndOfRangeKeyPrefix: "" IsPoint: false IsInclusive: false DatashardId: 72075186233409546 } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 4 PathsLimit: 10000 ShardsInside: 4 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 5 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-CreateUser-1 >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-ModifyUser-25 >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-ModifyUser-32 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-ModifyUser-33 >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-CreateUser-11 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-CreateUser-12 >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-ModifyUser-25 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-ModifyUser-26 >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-CreateUser-2 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-CreateUser-3 >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-ModifyUser-39 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-ModifyUser-40 >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-CreateUser-4 >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-ModifyUser-43 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-ModifyUser-44 >> TRestoreWithRebootsTests::CancelShouldSucceed[Raw] [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-ModifyUser-25 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-ModifyUser-26 >> TRestoreWithRebootsTests::ShouldSucceedOnMultiShardTable[Raw] [GOOD] >> TRestoreWithRebootsTests::ShouldSucceedOnMultiShardTableAndLimitedResources[Zstd] |82.4%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_backup_collection_reboots/unittest >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-CreateUser-1 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-CreateUser-2 |82.4%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_backup_collection_reboots/unittest >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-ModifyUser-33 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-ModifyUser-34 >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-CreateUser-3 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-CreateUser-4 >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-CreateUser-12 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-CreateUser-13 |82.4%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_backup_collection_reboots/unittest >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-ModifyUser-25 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-ModifyUser-26 ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_restore/unittest >> TRestoreWithRebootsTests::CancelShouldSucceed[Raw] [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2144] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2144] Leader for TabletID 72057594046678944 is [1:130:2155] sender: [1:131:2058] recipient: [1:113:2144] 2025-09-25T16:19:44.785038Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7911: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-09-25T16:19:44.785054Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7939: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-09-25T16:19:44.785057Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7825: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-09-25T16:19:44.785061Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7841: OperationsProcessing config: using default configuration 2025-09-25T16:19:44.785065Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7847: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-09-25T16:19:44.785068Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7847: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-09-25T16:19:44.785075Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7971: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-09-25T16:19:44.785086Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-09-25T16:19:44.785172Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8042: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-09-25T16:19:44.785211Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2025-09-25T16:19:44.797042Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7729: Cannot subscribe to console configs 2025-09-25T16:19:44.797060Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded 2025-09-25T16:19:44.800154Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2025-09-25T16:19:44.800236Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2025-09-25T16:19:44.800269Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-09-25T16:19:44.802349Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2025-09-25T16:19:44.802415Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2025-09-25T16:19:44.802511Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1382: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-09-25T16:19:44.802556Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-09-25T16:19:44.803104Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:158: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-09-25T16:19:44.803161Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2025-09-25T16:19:44.803455Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-09-25T16:19:44.803465Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-09-25T16:19:44.803480Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-09-25T16:19:44.803486Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-09-25T16:19:44.803491Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:205: TTxServerlessStorageBilling.Complete 2025-09-25T16:19:44.803518Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7086: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-09-25T16:19:44.805101Z node 1 :HIVE INFO: tablet_helpers.cpp:1126: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:130:2155] sender: [1:245:2058] recipient: [1:15:2062] 2025-09-25T16:19:44.830420Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-09-25T16:19:44.830498Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-09-25T16:19:44.830563Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:599: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-09-25T16:19:44.830573Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5528: CreateTx for txid 1:0 type: TxAlterSubDomain target path: [OwnerId: 72057594046678944, LocalPathId: 1] source path: 2025-09-25T16:19:44.830656Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-09-25T16:19:44.830674Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2025-09-25T16:19:44.831508Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-09-25T16:19:44.831563Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-09-25T16:19:44.831618Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-09-25T16:19:44.831630Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:315: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-09-25T16:19:44.831636Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:369: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-09-25T16:19:44.831642Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 1:0 2 -> 3 2025-09-25T16:19:44.832160Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-09-25T16:19:44.832173Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-09-25T16:19:44.832179Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 1:0 3 -> 128 2025-09-25T16:19:44.832637Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-09-25T16:19:44.832648Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-09-25T16:19:44.832654Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-09-25T16:19:44.832662Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1723: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-09-25T16:19:44.833501Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1792: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-09-25T16:19:44.833931Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:663: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-09-25T16:19:44.833986Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1824: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-09-25T16:19:44.834206Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:685: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-09-25T16:19:44.834235Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:689: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 137 RawX2: 4294969455 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-09-25T16:19:44.834243Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:303: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-09-25T16:19:44.834339Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 1:0 128 -> 240 2025-09-25T16:19:44.834348Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:362: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-09-25T16:19:44.834377Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:599: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-09-25T16:19:44.834390Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:412: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-09-25T16:19:44.834937Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-09-25T16:19:44.834947Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme ... andle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 3 PathOwnerId: 72057594046678944, cookie: 1003 2025-09-25T16:20:04.973847Z node 68 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 3 PathOwnerId: 72057594046678944, cookie: 1003 2025-09-25T16:20:04.973853Z node 68 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:108: Operation in-flight, at schemeshard: 72057594046678944, txId: 1003 2025-09-25T16:20:04.973860Z node 68 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 1003, pathId: [OwnerId: 72057594046678944, LocalPathId: 3], version: 3 2025-09-25T16:20:04.973867Z node 68 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:610: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 4 2025-09-25T16:20:04.973886Z node 68 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 1003, ready parts: 0/1, is published: true 2025-09-25T16:20:04.974642Z node 68 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1003:0, at schemeshard: 72057594046678944 2025-09-25T16:20:04.974659Z node 68 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_backup_restore_common.h:324: TRestore TAborting, opId: 1003:0 ProgressState at tablet72057594046678944 2025-09-25T16:20:04.974666Z node 68 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_backup_restore_common.h:351: TRestore Abort, on datashard: 72075186233409546, opId: 1003:0, at schemeshard: 72057594046678944 2025-09-25T16:20:04.974844Z node 68 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1003 2025-09-25T16:20:04.974893Z node 68 :DATASHARD_RESTORE DEBUG: import_s3.cpp:606: [Import] [s3:1003] Handle NKikimr::TEvDataShard::TEvS3DownloadInfo { Info: { DataETag: a3ed28bfb53c9214f635c51ed6b618c4 ProcessedBytes: 0 WrittenBytes: 0 WrittenRows: 0 ChecksumState: DownloadState: } } 2025-09-25T16:20:04.974902Z node 68 :DATASHARD_RESTORE NOTICE: import_s3.cpp:621: [Import] [s3:1003] Process download info at 'DownloadInfo': info# { DataETag: a3ed28bfb53c9214f635c51ed6b618c4 ProcessedBytes: 0 WrittenBytes: 0 WrittenRows: 0 ChecksumState: DownloadState: } 2025-09-25T16:20:04.974916Z node 68 :DATASHARD_RESTORE DEBUG: import_s3.cpp:517: [Import] [s3:1003] GetObject: key# /data_00.csv, range# 0-13 2025-09-25T16:20:04.975325Z node 68 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:663: Send tablet strongly msg operationId: 1003:0 from tablet: 72057594046678944 to tablet: 72075186233409546 cookie: 72057594046678944:1 msg type: 269551625 REQUEST: GET /data_00.csv HTTP/1.1 HEADERS: Host: localhost:28804 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: 1F941157-0346-4AB6-BB10-FFD9CE1B103C amz-sdk-request: attempt=1 content-type: application/xml range: bytes=0-13 user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-143-generic x86_64 Clang/20.1.8 x-amz-api-version: 2006-03-01 S3_MOCK::HttpServeRead: /data_00.csv / 14 2025-09-25T16:20:04.980709Z node 68 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:6700: Handle TEvProposeTransactionResult, at schemeshard: 72057594046678944, message: TxKind: TX_KIND_SCHEME Origin: 72075186233409546 Status: ERROR Error { Kind: WRONG_SHARD_STATE Reason: "Interrupted Restore operation [5000004:1003] while waiting to finish at 72075186233409546" } TxId: 1003 ExecLatency: 5 ProposeLatency: 6 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409546 CpuTimeUsec: 243 } } 2025-09-25T16:20:04.980742Z node 68 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1837: TOperation FindRelatedPartByTabletId, TxId: 1003, tablet: 72075186233409546, partId: 0 2025-09-25T16:20:04.980773Z node 68 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:628: TTxOperationReply execute, operationId: 1003:0, at schemeshard: 72057594046678944, message: TxKind: TX_KIND_SCHEME Origin: 72075186233409546 Status: ERROR Error { Kind: WRONG_SHARD_STATE Reason: "Interrupted Restore operation [5000004:1003] while waiting to finish at 72075186233409546" } TxId: 1003 ExecLatency: 5 ProposeLatency: 6 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409546 CpuTimeUsec: 243 } } 2025-09-25T16:20:04.980790Z node 68 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_part.cpp:109: HandleReply TEvDataShard::TEvProposeTransactionResult Ignore message: tablet# 72057594046678944, ev# TxKind: TX_KIND_SCHEME Origin: 72075186233409546 Status: ERROR Error { Kind: WRONG_SHARD_STATE Reason: "Interrupted Restore operation [5000004:1003] while waiting to finish at 72075186233409546" } TxId: 1003 ExecLatency: 5 ProposeLatency: 6 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409546 CpuTimeUsec: 243 } } 2025-09-25T16:20:04.981015Z node 68 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5901: Handle TEvSchemaChanged, tabletId: 72057594046678944, at schemeshard: 72057594046678944, message: Source { RawX1: 339 RawX2: 292057778453 } Origin: 72075186233409546 State: 2 TxId: 1003 Step: 0 Generation: 2 OpResult { Success: false Explain: "" BytesProcessed: 0 RowsProcessed: 0 } 2025-09-25T16:20:04.981027Z node 68 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1837: TOperation FindRelatedPartByTabletId, TxId: 1003, tablet: 72075186233409546, partId: 0 2025-09-25T16:20:04.981046Z node 68 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:628: TTxOperationReply execute, operationId: 1003:0, at schemeshard: 72057594046678944, message: Source { RawX1: 339 RawX2: 292057778453 } Origin: 72075186233409546 State: 2 TxId: 1003 Step: 0 Generation: 2 OpResult { Success: false Explain: "" BytesProcessed: 0 RowsProcessed: 0 } 2025-09-25T16:20:04.981060Z node 68 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_backup_restore_common.h:233: TRestore TAborting, opId: 1003:0 HandleReply TEvSchemaChanged at tablet# 72057594046678944 message# Source { RawX1: 339 RawX2: 292057778453 } Origin: 72075186233409546 State: 2 TxId: 1003 Step: 0 Generation: 2 OpResult { Success: false Explain: "" BytesProcessed: 0 RowsProcessed: 0 } 2025-09-25T16:20:04.981077Z node 68 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:673: CollectSchemaChanged accept TEvDataShard::TEvSchemaChanged, operationId: 1003:0, shardIdx: 72057594046678944:1, shard: 72075186233409546, left await: 0, txState.State: Aborting, txState.ReadyForNotifications: 1, at schemeshard: 72057594046678944 2025-09-25T16:20:04.981081Z node 68 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:710: all shard schema changes has been received, operationId: 1003:0, at schemeshard: 72057594046678944 2025-09-25T16:20:04.981087Z node 68 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:722: send schema changes ack message, operation: 1003:0, datashard: 72075186233409546, at schemeshard: 72057594046678944 2025-09-25T16:20:04.981093Z node 68 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 1003:0 133 -> 240 2025-09-25T16:20:04.981132Z node 68 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_backup_restore_common.h:116: Unable to make a bill: kind# TRestore, opId# 1003:0, reason# domain is not a serverless db, domain# /MyRoot, domainPathId# [OwnerId: 72057594046678944, LocalPathId: 1], IsDomainSchemeShard: 1, ParentDomainId: [OwnerId: 72057594046678944, LocalPathId: 1], ResourcesDomainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-09-25T16:20:04.981776Z node 68 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 1003:0, at schemeshard: 72057594046678944 2025-09-25T16:20:04.981881Z node 68 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 1003:0, at schemeshard: 72057594046678944 2025-09-25T16:20:04.981962Z node 68 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1003:0, at schemeshard: 72057594046678944 2025-09-25T16:20:04.981971Z node 68 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:497: [72057594046678944] TDone opId# 1003:0 ProgressState 2025-09-25T16:20:04.981987Z node 68 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:926: Part operation is done id#1003:0 progress is 1/1 2025-09-25T16:20:04.981992Z node 68 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 1003 ready parts: 1/1 2025-09-25T16:20:04.981998Z node 68 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:926: Part operation is done id#1003:0 progress is 1/1 2025-09-25T16:20:04.982001Z node 68 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 1003 ready parts: 1/1 2025-09-25T16:20:04.982006Z node 68 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 1003, ready parts: 1/1, is published: true 2025-09-25T16:20:04.982012Z node 68 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 1003 ready parts: 1/1 2025-09-25T16:20:04.982018Z node 68 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:993: Operation and all the parts is done, operation id: 1003:0 2025-09-25T16:20:04.982022Z node 68 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5552: RemoveTx for txid 1003:0 2025-09-25T16:20:04.982047Z node 68 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:610: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 3 TestWaitNotification wait txId: 1003 2025-09-25T16:20:04.982679Z node 68 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:213: tests -- TTxNotificationSubscriber for txId 1003: send EvNotifyTxCompletion 2025-09-25T16:20:04.982690Z node 68 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:259: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 1003 TestWaitNotification wait txId: 1004 2025-09-25T16:20:04.982705Z node 68 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:213: tests -- TTxNotificationSubscriber for txId 1004: send EvNotifyTxCompletion 2025-09-25T16:20:04.982709Z node 68 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:259: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 1004 2025-09-25T16:20:04.982793Z node 68 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 1003, at schemeshard: 72057594046678944 2025-09-25T16:20:04.982815Z node 68 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 1003: got EvNotifyTxCompletionResult 2025-09-25T16:20:04.982821Z node 68 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 1003: satisfy waiter [68:488:2459] 2025-09-25T16:20:04.982847Z node 68 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 1004, at schemeshard: 72057594046678944 2025-09-25T16:20:04.982859Z node 68 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 1004: got EvNotifyTxCompletionResult 2025-09-25T16:20:04.982863Z node 68 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 1004: satisfy waiter [68:488:2459] TestWaitNotification: OK eventTxId 1003 TestWaitNotification: OK eventTxId 1004 >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-ModifyUser-40 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-ModifyUser-41 >> TAsyncIndexTests::MergeIndexWithReboots[PipeResets] [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-ModifyUser-26 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-ModifyUser-27 >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-ModifyUser-26 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-ModifyUser-27 >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-CreateUser-4 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-CreateUser-5 >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-ModifyUser-44 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-ModifyUser-45 >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-CreateUser-2 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-CreateUser-3 >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-CreateUser-4 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-CreateUser-5 >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-CreateUser-13 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-CreateUser-14 >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-ModifyUser-34 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-ModifyUser-35 >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-ModifyUser-41 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-ModifyUser-42 ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_index/unittest >> TAsyncIndexTests::MergeIndexWithReboots[PipeResets] [GOOD] Test command err: =========== RUN: Trace =========== Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2144] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2144] Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:120:2058] recipient: [1:114:2145] IGNORE Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:120:2058] recipient: [1:114:2145] Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:121:2058] recipient: [1:116:2146] IGNORE Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:121:2058] recipient: [1:116:2146] Leader for TabletID 72057594046678944 is [1:128:2153] sender: [1:131:2058] recipient: [1:113:2144] Leader for TabletID 72057594046447617 is [1:134:2158] sender: [1:136:2058] recipient: [1:114:2145] Leader for TabletID 72057594046316545 is [1:139:2161] sender: [1:141:2058] recipient: [1:116:2146] 2025-09-25T16:19:37.170899Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7911: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-09-25T16:19:37.170917Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7939: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-09-25T16:19:37.170921Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7825: StatsBatching config: StatsBatchTimeout# 0.000000s, StatsMaxBatchSize# 0, StatsMaxExecuteTime# 0.010000s 2025-09-25T16:19:37.170925Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7841: OperationsProcessing config: using default configuration 2025-09-25T16:19:37.170929Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7847: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-09-25T16:19:37.170932Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7847: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-09-25T16:19:37.170937Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7971: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-09-25T16:19:37.170947Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-09-25T16:19:37.171040Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8042: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-09-25T16:19:37.171084Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2025-09-25T16:19:37.186761Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:8074: Got new config: QueryServiceConfig { AllExternalDataSourcesAreAvailable: true } 2025-09-25T16:19:37.186793Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded 2025-09-25T16:19:37.186873Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8042: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# Leader for TabletID 72057594046447617 is [1:134:2158] sender: [1:179:2058] recipient: [1:15:2062] 2025-09-25T16:19:37.190199Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2025-09-25T16:19:37.190269Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2025-09-25T16:19:37.190293Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-09-25T16:19:37.191573Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2025-09-25T16:19:37.191631Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2025-09-25T16:19:37.191707Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1382: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-09-25T16:19:37.191905Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-09-25T16:19:37.192742Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:158: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-09-25T16:19:37.192775Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2025-09-25T16:19:37.192950Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-09-25T16:19:37.192958Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-09-25T16:19:37.192969Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-09-25T16:19:37.192974Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-09-25T16:19:37.192979Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:205: TTxServerlessStorageBilling.Complete 2025-09-25T16:19:37.193009Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7086: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:221:2058] recipient: [1:219:2219] IGNORE Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:221:2058] recipient: [1:219:2219] Leader for TabletID 72057594037968897 is [1:225:2223] sender: [1:226:2058] recipient: [1:219:2219] 2025-09-25T16:19:37.194046Z node 1 :HIVE INFO: tablet_helpers.cpp:1126: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:128:2153] sender: [1:246:2058] recipient: [1:15:2062] 2025-09-25T16:19:37.208757Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-09-25T16:19:37.208838Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-09-25T16:19:37.208897Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:599: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-09-25T16:19:37.208905Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5528: CreateTx for txid 1:0 type: TxAlterSubDomain target path: [OwnerId: 72057594046678944, LocalPathId: 1] source path: 2025-09-25T16:19:37.208948Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-09-25T16:19:37.208960Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2025-09-25T16:19:37.209576Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-09-25T16:19:37.209616Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-09-25T16:19:37.209650Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-09-25T16:19:37.209657Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:315: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-09-25T16:19:37.209661Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:369: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-09-25T16:19:37.209665Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 1:0 2 -> 3 2025-09-25T16:19:37.210004Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-09-25T16:19:37.210011Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-09-25T16:19:37.210015Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 1:0 3 -> 128 2025-09-25T16:19:37.210288Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-09-25T16:19:37.210295Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-09-25T16:19:37.210299Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-09-25T16:19:37.210304Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1723: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-09-25T16:19:37.210789Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1792: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-09-25T16:19:37.211123Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:663: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-09-25T16:19:37.211162Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1824: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 Leader for TabletID 72057594046316545 is [1:139:2161] sender: [1:261:2058] recipient: [1:15:2062] FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-09-25T16:19:37.211361Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:685: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-09-25T16:19:37.211379Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:689: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 139 RawX2: 4294969457 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, ... rceSizeToSnapshot: 16777216 InMemCompactionBrokerQueue: 0 ReadAheadHiThreshold: 67108864 ReadAheadLoThreshold: 16777216 MinDataPageSize: 7168 SnapBrokerQueue: 0 Generation { GenerationId: 0 SizeToCompact: 0 CountToCompact: 8 ForceCountToCompact: 8 ForceSizeToCompact: 134217728 CompactionBrokerQueue: 4294967295 KeepInCache: true BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen1" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 0 UpliftPartSize: 0 } Generation { GenerationId: 1 SizeToCompact: 41943040 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 536870912 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen2" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 8388608 UpliftPartSize: 8388608 } Generation { GenerationId: 2 SizeToCompact: 419430400 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 17179869184 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen3" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } PartitioningPolicy { MinPartitionsCount: 1 } } TableIndexes { Name: "UserDefinedIndex" LocalPathId: 4 Type: EIndexTypeGlobalAsync State: EIndexStateReady KeyColumnNames: "indexed" SchemaVersion: 1 PathOwnerId: 72057594046678944 DataSize: 0 IndexImplTableDescriptions { PartitionConfig { CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot: 300 InMemForceStepsToSnapshot: 500 InMemForceSizeToSnapshot: 16777216 InMemCompactionBrokerQueue: 0 ReadAheadHiThreshold: 67108864 ReadAheadLoThreshold: 16777216 MinDataPageSize: 7168 SnapBrokerQueue: 0 Generation { GenerationId: 0 SizeToCompact: 0 CountToCompact: 8 ForceCountToCompact: 8 ForceSizeToCompact: 134217728 CompactionBrokerQueue: 4294967295 KeepInCache: true BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen1" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 0 UpliftPartSize: 0 } Generation { GenerationId: 1 SizeToCompact: 41943040 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 536870912 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen2" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 8388608 UpliftPartSize: 8388608 } Generation { GenerationId: 2 SizeToCompact: 419430400 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 17179869184 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen3" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } PartitioningPolicy { MinPartitionsCount: 1 } } } } TableSchemaVersion: 1 IsBackup: false IsRestore: false } TablePartitions { EndOfRangeKeyPrefix: "" IsPoint: false IsInclusive: false DatashardId: 72075186233409548 } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 4 PathsLimit: 10000 ShardsInside: 2 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 3 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-09-25T16:20:06.221082Z node 26 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/Table/UserDefinedIndex/indexImplTable" Options { ReturnPartitioningInfo: true ReturnPartitionConfig: true BackupInfo: false ReturnBoundaries: false ShowPrivateTable: true }, at schemeshard: 72057594046678944 2025-09-25T16:20:06.221181Z node 26 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/Table/UserDefinedIndex/indexImplTable" took 117us result status StatusSuccess 2025-09-25T16:20:06.221424Z node 26 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/Table/UserDefinedIndex/indexImplTable" PathDescription { Self { Name: "indexImplTable" PathId: 5 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 1002 CreateStep: 5000003 ParentPathId: 4 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 4 PathSubType: EPathSubTypeAsyncIndexImplTable Version { GeneralVersion: 4 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 2 } ChildrenExist: false } Table { Name: "indexImplTable" Columns { Name: "indexed" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "key" Type: "Uint32" TypeId: 2 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "indexed" KeyColumnNames: "key" KeyColumnIds: 1 KeyColumnIds: 2 PartitionConfig { CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot: 300 InMemForceStepsToSnapshot: 500 InMemForceSizeToSnapshot: 16777216 InMemCompactionBrokerQueue: 0 ReadAheadHiThreshold: 67108864 ReadAheadLoThreshold: 16777216 MinDataPageSize: 7168 SnapBrokerQueue: 0 Generation { GenerationId: 0 SizeToCompact: 0 CountToCompact: 8 ForceCountToCompact: 8 ForceSizeToCompact: 134217728 CompactionBrokerQueue: 4294967295 KeepInCache: true BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen1" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 0 UpliftPartSize: 0 } Generation { GenerationId: 1 SizeToCompact: 41943040 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 536870912 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen2" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 8388608 UpliftPartSize: 8388608 } Generation { GenerationId: 2 SizeToCompact: 419430400 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 17179869184 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen3" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } PartitioningPolicy { MinPartitionsCount: 1 } } TableSchemaVersion: 1 IsBackup: false IsRestore: false } TablePartitions { EndOfRangeKeyPrefix: "" IsPoint: false IsInclusive: false DatashardId: 72075186233409549 } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 4 PathsLimit: 10000 ShardsInside: 2 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 5 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-ModifyUser-45 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-ModifyUser-46 >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-ModifyUser-26 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-ModifyUser-27 >> TImportWithRebootsTests::CancelShouldSucceedOnIndexedTable [GOOD] >> TImportWithRebootsTests::CancelShouldSucceedOnDependentView >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-CreateUser-3 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-CreateUser-4 >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-CreateUser-5 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-CreateUser-6 >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-ModifyUser-27 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-ModifyUser-28 >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-CreateUser-14 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-ModifyUser-25 >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-CreateUser-5 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-CreateUser-6 >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-ModifyUser-27 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-ModifyUser-28 >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-ModifyUser-35 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-ModifyUser-36 >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-ModifyUser-42 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-ModifyUser-43 >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-ModifyUser-46 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-ModifyUser-47 >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-CreateUser-4 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-CreateUser-5 |82.4%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/audit/py3test >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-ModifyUser-27 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-ModifyUser-28 |82.4%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/tx/schemeshard/ut_subdomain/ydb-core-tx-schemeshard-ut_subdomain |82.4%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_subdomain/ydb-core-tx-schemeshard-ut_subdomain |82.4%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_subdomain/ydb-core-tx-schemeshard-ut_subdomain |82.4%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/tx/schemeshard/ut_sysview_reboots/ydb-core-tx-schemeshard-ut_sysview_reboots >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-CreateUser-6 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-CreateUser-7 |82.4%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_sysview_reboots/ydb-core-tx-schemeshard-ut_sysview_reboots |82.4%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_sysview_reboots/ydb-core-tx-schemeshard-ut_sysview_reboots |82.4%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/audit/py3test >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-ModifyUser-25 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-ModifyUser-26 >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-ModifyUser-28 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-ModifyUser-29 >> TImportTests::AuditCompletedImport [GOOD] >> TImportTests::AuditCancelledImport >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-CreateUser-6 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-CreateUser-7 >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-ModifyUser-43 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-ModifyUser-44 >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-CreateUser-5 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-CreateUser-6 >> TRestoreWithRebootsTests::ShouldFailOnInvalidValue[Zstd] [GOOD] >> TRestoreWithRebootsTests::ShouldFailOnOutboundKey[Raw] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-ModifyUser-28 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-ModifyUser-47 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-ModifyUser-29 >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-ModifyUser-48 >> TRestoreWithRebootsTests::ShouldSucceedOnSingleShardTable[Raw] [GOOD] >> TRestoreWithRebootsTests::ShouldSucceedOnSingleShardTable[Zstd] >> TRestoreWithRebootsTests::ShouldSucceedOnMultiShardTableAndLimitedResources[Raw] [GOOD] >> TRestoreWithRebootsTests::ShouldSucceedOnLargeData[Raw] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-ModifyUser-36 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-ModifyUser-37 |82.4%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/tx/tx_proxy/ut_storage_tenant/ydb-core-tx-tx_proxy-ut_storage_tenant |82.4%| [LD] {RESULT} $(B)/ydb/core/tx/tx_proxy/ut_storage_tenant/ydb-core-tx-tx_proxy-ut_storage_tenant >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-ModifyUser-28 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-ModifyUser-29 |82.4%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/tx_proxy/ut_storage_tenant/ydb-core-tx-tx_proxy-ut_storage_tenant >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-CreateUser-7 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-CreateUser-8 >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-ModifyUser-26 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-ModifyUser-27 >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-ModifyUser-29 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-ModifyUser-30 >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-CreateUser-6 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-CreateUser-7 >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-CreateUser-7 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-CreateUser-8 >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-ModifyUser-44 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-ModifyUser-45 >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-ModifyUser-29 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-ModifyUser-30 >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-ModifyUser-48 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-CreateUser-1 >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-ModifyUser-37 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-ModifyUser-38 >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-ModifyUser-29 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-ModifyUser-30 >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-CreateUser-8 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-CreateUser-9 >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-ModifyUser-27 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-ModifyUser-28 >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-ModifyUser-30 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-DropUser-49 |82.4%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/kqp/ut/cost/ydb-core-kqp-ut-cost |82.4%| [LD] {RESULT} $(B)/ydb/core/kqp/ut/cost/ydb-core-kqp-ut-cost |82.4%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/kqp/ut/cost/ydb-core-kqp-ut-cost >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-CreateUser-8 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-CreateUser-9 >> TReplicationWithRebootsTests::CreateInParallelWithoutInitialController [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-ModifyUser-45 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-ModifyUser-46 >> YdbSdkSessionsPool1Session::GetSession/0 >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-ModifyUser-30 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-ModifyUser-31 >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-CreateUser-7 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-CreateUser-8 >> YdbSdkSessionsPool1Session::GetSession/0 [GOOD] >> YdbSdkSessionsPool::StressTestSync/0 >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-CreateUser-1 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-CreateUser-2 >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-ModifyUser-38 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-ModifyUser-39 >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-ModifyUser-30 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-ModifyUser-31 >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-CreateUser-9 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-DropUser-49 >> YdbSdkSessions::TestSessionPool >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-CreateUser-9 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-ModifyUser-31 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-ModifyUser-32 >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-CreateUser-15 |82.4%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/tx/tiering/ut/ydb-core-tx-tiering-ut |82.4%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/tiering/ut/ydb-core-tx-tiering-ut |82.4%| [LD] {RESULT} $(B)/ydb/core/tx/tiering/ut/ydb-core-tx-tiering-ut >> TImportWithRebootsTests::CancelShouldSucceedOnSingleView [GOOD] >> TImportWithRebootsTests::CancelShouldSucceedOnSingleChangefeed >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-DropUser-49 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-DropUser-50 >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-ModifyUser-28 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-ModifyUser-29 |82.4%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/client/ut/ydb-core-client-ut |82.4%| [LD] {RESULT} $(B)/ydb/core/client/ut/ydb-core-client-ut >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-CreateUser-8 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-CreateUser-9 >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-ModifyUser-46 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-ModifyUser-47 >> TAsyncIndexTests::DropTableWithInflightChanges[TabletReboots] [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-CreateUser-2 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-CreateUser-3 |82.4%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/client/ut/ydb-core-client-ut ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_replication_reboots/unittest >> TReplicationWithRebootsTests::CreateInParallelWithoutInitialController [GOOD] Test command err: ==== RunWithTabletReboots =========== RUN: Trace =========== Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2144] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2144] Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:120:2058] recipient: [1:114:2145] IGNORE Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:120:2058] recipient: [1:114:2145] Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:121:2058] recipient: [1:116:2146] IGNORE Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:121:2058] recipient: [1:116:2146] Leader for TabletID 72057594046678944 is [1:128:2153] sender: [1:131:2058] recipient: [1:113:2144] Leader for TabletID 72057594046447617 is [1:134:2158] sender: [1:136:2058] recipient: [1:114:2145] Leader for TabletID 72057594046316545 is [1:139:2161] sender: [1:141:2058] recipient: [1:116:2146] 2025-09-25T16:19:34.264169Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7911: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-09-25T16:19:34.264196Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7939: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-09-25T16:19:34.264203Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7825: StatsBatching config: StatsBatchTimeout# 0.000000s, StatsMaxBatchSize# 0, StatsMaxExecuteTime# 0.010000s 2025-09-25T16:19:34.264209Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7841: OperationsProcessing config: using default configuration 2025-09-25T16:19:34.264216Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7847: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-09-25T16:19:34.264220Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7847: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-09-25T16:19:34.264230Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7971: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-09-25T16:19:34.264245Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-09-25T16:19:34.264371Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8042: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-09-25T16:19:34.264443Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2025-09-25T16:19:34.289162Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:8074: Got new config: QueryServiceConfig { AllExternalDataSourcesAreAvailable: true } 2025-09-25T16:19:34.289200Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded 2025-09-25T16:19:34.289308Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8042: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# Leader for TabletID 72057594046447617 is [1:134:2158] sender: [1:179:2058] recipient: [1:15:2062] 2025-09-25T16:19:34.294055Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2025-09-25T16:19:34.294165Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2025-09-25T16:19:34.294217Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-09-25T16:19:34.295731Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2025-09-25T16:19:34.295805Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2025-09-25T16:19:34.295914Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1382: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-09-25T16:19:34.296201Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-09-25T16:19:34.297330Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:158: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-09-25T16:19:34.297383Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2025-09-25T16:19:34.297654Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-09-25T16:19:34.297668Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-09-25T16:19:34.297688Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-09-25T16:19:34.297696Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-09-25T16:19:34.297703Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:205: TTxServerlessStorageBilling.Complete 2025-09-25T16:19:34.297745Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7086: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:221:2058] recipient: [1:219:2219] IGNORE Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:221:2058] recipient: [1:219:2219] Leader for TabletID 72057594037968897 is [1:225:2223] sender: [1:226:2058] recipient: [1:219:2219] 2025-09-25T16:19:34.299201Z node 1 :HIVE INFO: tablet_helpers.cpp:1126: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:128:2153] sender: [1:246:2058] recipient: [1:15:2062] 2025-09-25T16:19:34.323150Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-09-25T16:19:34.323253Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-09-25T16:19:34.323315Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:599: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-09-25T16:19:34.323325Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5528: CreateTx for txid 1:0 type: TxAlterSubDomain target path: [OwnerId: 72057594046678944, LocalPathId: 1] source path: 2025-09-25T16:19:34.323375Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-09-25T16:19:34.323389Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2025-09-25T16:19:34.324151Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-09-25T16:19:34.324203Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-09-25T16:19:34.324253Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-09-25T16:19:34.324265Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:315: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-09-25T16:19:34.324271Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:369: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-09-25T16:19:34.324277Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 1:0 2 -> 3 2025-09-25T16:19:34.324738Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-09-25T16:19:34.324751Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-09-25T16:19:34.324757Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 1:0 3 -> 128 2025-09-25T16:19:34.325184Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-09-25T16:19:34.325194Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-09-25T16:19:34.325201Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-09-25T16:19:34.325209Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1723: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-09-25T16:19:34.325929Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1792: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-09-25T16:19:34.326408Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:663: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-09-25T16:19:34.326466Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1824: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 Leader for TabletID 72057594046316545 is [1:139:2161] sender: [1:261:2058] recipient: [1:15:2062] FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-09-25T16:19:34.326713Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:685: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-09-25T16:19:34.326744Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:689: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 139 RawX2: 4294969457 } } Step: 5000001 MediatorID: 0 Tab ... pipe is reset, at schemeshard: 72057594046678944 2025-09-25T16:20:13.274838Z node 133 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5225: StateWork, received event# 271124996, Sender [133:660:2565], Recipient [133:129:2154]: NKikimrScheme.TEvNotifyTxCompletion TxId: 1004 2025-09-25T16:20:13.274842Z node 133 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5238: StateWork, processing event TEvSchemeShard::TEvNotifyTxCompletion 2025-09-25T16:20:13.274847Z node 133 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 1004, at schemeshard: 72057594046678944 2025-09-25T16:20:13.274856Z node 133 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 1002: got EvNotifyTxCompletionResult 2025-09-25T16:20:13.274860Z node 133 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 1002: satisfy waiter [133:661:2566] 2025-09-25T16:20:13.274875Z node 133 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5225: StateWork, received event# 269877764, Sender [133:666:2571], Recipient [133:129:2154]: NKikimr::TEvTabletPipe::TEvServerDisconnected 2025-09-25T16:20:13.274879Z node 133 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5323: StateWork, processing event TEvTabletPipe::TEvServerDisconnected 2025-09-25T16:20:13.274883Z node 133 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:6190: Server pipe is reset, at schemeshard: 72057594046678944 2025-09-25T16:20:13.274890Z node 133 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 1004: got EvNotifyTxCompletionResult 2025-09-25T16:20:13.274894Z node 133 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 1004: satisfy waiter [133:661:2566] 2025-09-25T16:20:13.274909Z node 133 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5225: StateWork, received event# 269877764, Sender [133:667:2572], Recipient [133:129:2154]: NKikimr::TEvTabletPipe::TEvServerDisconnected 2025-09-25T16:20:13.274914Z node 133 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5323: StateWork, processing event TEvTabletPipe::TEvServerDisconnected 2025-09-25T16:20:13.274917Z node 133 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:6190: Server pipe is reset, at schemeshard: 72057594046678944 TestWaitNotification: OK eventTxId 1003 TestWaitNotification: OK eventTxId 1002 TestWaitNotification: OK eventTxId 1004 2025-09-25T16:20:13.274988Z node 133 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5225: StateWork, received event# 271122945, Sender [133:668:2573], Recipient [133:129:2154]: NKikimrSchemeOp.TDescribePath Path: "/MyRoot/Replication1" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false } 2025-09-25T16:20:13.274993Z node 133 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5237: StateWork, processing event TEvSchemeShard::TEvDescribeScheme 2025-09-25T16:20:13.275007Z node 133 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/Replication1" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-09-25T16:20:13.275072Z node 133 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/Replication1" took 63us result status StatusSuccess 2025-09-25T16:20:13.275188Z node 133 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/Replication1" PathDescription { Self { Name: "Replication1" PathId: 3 SchemeshardId: 72057594046678944 PathType: EPathTypeReplication CreateFinished: true CreateTxId: 1002 CreateStep: 5000003 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 ReplicationVersion: 1 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 4 PathsLimit: 10000 ShardsInside: 3 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } ReplicationDescription { Name: "Replication1" Config { SrcConnectionParams { StaticCredentials { User: "user" } } Specific { Targets { SrcPath: "/MyRoot1/Table" DstPath: "/MyRoot2/Table" } } ConsistencySettings { Row { } } } PathId { OwnerId: 72057594046678944 LocalId: 3 } Version: 1 ControllerId: 72075186233409546 State { StandBy { } } } } PathId: 3 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-09-25T16:20:13.275312Z node 133 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5225: StateWork, received event# 271122945, Sender [133:669:2574], Recipient [133:129:2154]: NKikimrSchemeOp.TDescribePath Path: "/MyRoot/Replication2" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false } 2025-09-25T16:20:13.275318Z node 133 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5237: StateWork, processing event TEvSchemeShard::TEvDescribeScheme 2025-09-25T16:20:13.275328Z node 133 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/Replication2" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-09-25T16:20:13.275352Z node 133 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/Replication2" took 24us result status StatusSuccess 2025-09-25T16:20:13.275441Z node 133 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/Replication2" PathDescription { Self { Name: "Replication2" PathId: 4 SchemeshardId: 72057594046678944 PathType: EPathTypeReplication CreateFinished: true CreateTxId: 1003 CreateStep: 5000004 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 ReplicationVersion: 1 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 4 PathsLimit: 10000 ShardsInside: 3 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } ReplicationDescription { Name: "Replication2" Config { SrcConnectionParams { StaticCredentials { User: "user" } } Specific { Targets { SrcPath: "/MyRoot1/Table" DstPath: "/MyRoot2/Table" } } ConsistencySettings { Row { } } } PathId { OwnerId: 72057594046678944 LocalId: 4 } Version: 1 ControllerId: 72075186233409547 State { StandBy { } } } } PathId: 4 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-09-25T16:20:13.275553Z node 133 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5225: StateWork, received event# 271122945, Sender [133:670:2575], Recipient [133:129:2154]: NKikimrSchemeOp.TDescribePath Path: "/MyRoot/Replication3" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false } 2025-09-25T16:20:13.275558Z node 133 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5237: StateWork, processing event TEvSchemeShard::TEvDescribeScheme 2025-09-25T16:20:13.275568Z node 133 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/Replication3" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-09-25T16:20:13.275587Z node 133 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/Replication3" took 20us result status StatusSuccess 2025-09-25T16:20:13.275642Z node 133 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/Replication3" PathDescription { Self { Name: "Replication3" PathId: 5 SchemeshardId: 72057594046678944 PathType: EPathTypeReplication CreateFinished: true CreateTxId: 1004 CreateStep: 5000005 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 ReplicationVersion: 1 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 4 PathsLimit: 10000 ShardsInside: 3 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } ReplicationDescription { Name: "Replication3" Config { SrcConnectionParams { StaticCredentials { User: "user" } } Specific { Targets { SrcPath: "/MyRoot1/Table" DstPath: "/MyRoot2/Table" } } ConsistencySettings { Row { } } } PathId { OwnerId: 72057594046678944 LocalId: 5 } Version: 1 ControllerId: 72075186233409548 State { StandBy { } } } } PathId: 5 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 >> YdbSdkSessions::TestSessionPool [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-ModifyUser-39 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-ModifyUser-40 >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-ModifyUser-31 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-ModifyUser-32 |82.4%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/tx/schemeshard/ut_export_reboots_s3/ydb-core-tx-schemeshard-ut_export_reboots_s3 |82.4%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/mind/address_classification/ut/ydb-core-mind-address_classification-ut |82.4%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_export_reboots_s3/ydb-core-tx-schemeshard-ut_export_reboots_s3 |82.4%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_export_reboots_s3/ydb-core-tx-schemeshard-ut_export_reboots_s3 |82.4%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/mind/address_classification/ut/ydb-core-mind-address_classification-ut |82.4%| [LD] {RESULT} $(B)/ydb/core/mind/address_classification/ut/ydb-core-mind-address_classification-ut |82.4%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/tx/datashard/ut_read_table/ydb-core-tx-datashard-ut_read_table |82.4%| [LD] {RESULT} $(B)/ydb/core/tx/datashard/ut_read_table/ydb-core-tx-datashard-ut_read_table |82.4%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/datashard/ut_read_table/ydb-core-tx-datashard-ut_read_table >> TImportTests::CompletedImportEndTime [GOOD] >> TImportTests::CorruptedPermissions >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-DropUser-49 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-DropUser-50 |82.4%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/engine/ut/ydb-core-engine-ut |82.4%| [LD] {RESULT} $(B)/ydb/core/engine/ut/ydb-core-engine-ut |82.4%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/engine/ut/ydb-core-engine-ut |82.4%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/public/sdk/cpp/tests/integration/sessions_pool/gtest >> YdbSdkSessionsPool1Session::GetSession/0 [GOOD] >> TImportTests::CorruptedPermissions [GOOD] >> TImportTests::CorruptedTopicImport >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-CreateUser-15 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-CreateUser-16 >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-ModifyUser-32 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-ModifyUser-33 >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-ModifyUser-47 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-ModifyUser-48 >> TImportTests::CorruptedTopicImport [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-DropUser-50 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-DropUser-51 ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_index/unittest >> TAsyncIndexTests::DropTableWithInflightChanges[TabletReboots] [GOOD] Test command err: =========== RUN: Trace =========== Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2144] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2144] Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:120:2058] recipient: [1:114:2145] IGNORE Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:120:2058] recipient: [1:114:2145] Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:121:2058] recipient: [1:116:2146] IGNORE Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:121:2058] recipient: [1:116:2146] Leader for TabletID 72057594046678944 is [1:128:2153] sender: [1:131:2058] recipient: [1:113:2144] Leader for TabletID 72057594046447617 is [1:134:2158] sender: [1:136:2058] recipient: [1:114:2145] Leader for TabletID 72057594046316545 is [1:139:2161] sender: [1:141:2058] recipient: [1:116:2146] 2025-09-25T16:19:40.594499Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7911: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-09-25T16:19:40.594521Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7939: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-09-25T16:19:40.594527Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7825: StatsBatching config: StatsBatchTimeout# 0.000000s, StatsMaxBatchSize# 0, StatsMaxExecuteTime# 0.010000s 2025-09-25T16:19:40.594531Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7841: OperationsProcessing config: using default configuration 2025-09-25T16:19:40.594537Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7847: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-09-25T16:19:40.594542Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7847: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-09-25T16:19:40.594551Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7971: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-09-25T16:19:40.594564Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-09-25T16:19:40.594671Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8042: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-09-25T16:19:40.594730Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2025-09-25T16:19:40.615946Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:8074: Got new config: QueryServiceConfig { AllExternalDataSourcesAreAvailable: true } 2025-09-25T16:19:40.615981Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded 2025-09-25T16:19:40.616087Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8042: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# Leader for TabletID 72057594046447617 is [1:134:2158] sender: [1:179:2058] recipient: [1:15:2062] 2025-09-25T16:19:40.620177Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2025-09-25T16:19:40.620276Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2025-09-25T16:19:40.620321Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-09-25T16:19:40.621577Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2025-09-25T16:19:40.621640Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2025-09-25T16:19:40.621725Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1382: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-09-25T16:19:40.621904Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-09-25T16:19:40.622849Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:158: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-09-25T16:19:40.622894Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2025-09-25T16:19:40.623119Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-09-25T16:19:40.623127Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-09-25T16:19:40.623142Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-09-25T16:19:40.623148Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-09-25T16:19:40.623154Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:205: TTxServerlessStorageBilling.Complete 2025-09-25T16:19:40.623186Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7086: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:221:2058] recipient: [1:219:2219] IGNORE Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:221:2058] recipient: [1:219:2219] Leader for TabletID 72057594037968897 is [1:225:2223] sender: [1:226:2058] recipient: [1:219:2219] 2025-09-25T16:19:40.624358Z node 1 :HIVE INFO: tablet_helpers.cpp:1126: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:128:2153] sender: [1:246:2058] recipient: [1:15:2062] 2025-09-25T16:19:40.640170Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-09-25T16:19:40.640226Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-09-25T16:19:40.640269Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:599: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-09-25T16:19:40.640275Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5528: CreateTx for txid 1:0 type: TxAlterSubDomain target path: [OwnerId: 72057594046678944, LocalPathId: 1] source path: 2025-09-25T16:19:40.640321Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-09-25T16:19:40.640333Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2025-09-25T16:19:40.640986Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-09-25T16:19:40.641021Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-09-25T16:19:40.641057Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-09-25T16:19:40.641063Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:315: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-09-25T16:19:40.641066Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:369: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-09-25T16:19:40.641069Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 1:0 2 -> 3 2025-09-25T16:19:40.641431Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-09-25T16:19:40.641439Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-09-25T16:19:40.641443Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 1:0 3 -> 128 2025-09-25T16:19:40.641737Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-09-25T16:19:40.641744Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-09-25T16:19:40.641748Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-09-25T16:19:40.641753Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1723: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-09-25T16:19:40.642187Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1792: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-09-25T16:19:40.642515Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:663: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-09-25T16:19:40.642555Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1824: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 Leader for TabletID 72057594046316545 is [1:139:2161] sender: [1:261:2058] recipient: [1:15:2062] FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-09-25T16:19:40.642700Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:685: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-09-25T16:19:40.642720Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:689: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 139 RawX2: 4294969457 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, ... thId: 5] was 3 2025-09-25T16:20:14.731576Z node 112 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 1003, ready parts: 2/3, is published: true 2025-09-25T16:20:14.732059Z node 112 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1003:0, at schemeshard: 72057594046678944 2025-09-25T16:20:14.732070Z node 112 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_drop_table.cpp:415: TDropTable TProposedDeletePart operationId: 1003:0 ProgressState, at schemeshard: 72057594046678944 2025-09-25T16:20:14.732119Z node 112 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:610: DecrementPathDbRefCount reason remove table for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 4 2025-09-25T16:20:14.732145Z node 112 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:926: Part operation is done id#1003:0 progress is 3/3 2025-09-25T16:20:14.732150Z node 112 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 1003 ready parts: 3/3 2025-09-25T16:20:14.732155Z node 112 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:926: Part operation is done id#1003:0 progress is 3/3 2025-09-25T16:20:14.732158Z node 112 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 1003 ready parts: 3/3 2025-09-25T16:20:14.732162Z node 112 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 1003, ready parts: 3/3, is published: true 2025-09-25T16:20:14.732167Z node 112 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 1003 ready parts: 3/3 2025-09-25T16:20:14.732174Z node 112 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:993: Operation and all the parts is done, operation id: 1003:0 2025-09-25T16:20:14.732179Z node 112 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5552: RemoveTx for txid 1003:0 2025-09-25T16:20:14.732200Z node 112 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:610: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 3 2025-09-25T16:20:14.732210Z node 112 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:993: Operation and all the parts is done, operation id: 1003:1 2025-09-25T16:20:14.732214Z node 112 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5552: RemoveTx for txid 1003:1 2025-09-25T16:20:14.732220Z node 112 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:610: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 2 2025-09-25T16:20:14.732224Z node 112 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:993: Operation and all the parts is done, operation id: 1003:2 2025-09-25T16:20:14.732227Z node 112 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5552: RemoveTx for txid 1003:2 2025-09-25T16:20:14.732235Z node 112 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:610: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 5] was 2 2025-09-25T16:20:14.732382Z node 112 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1003 2025-09-25T16:20:14.732538Z node 112 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1003 2025-09-25T16:20:14.732659Z node 112 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1003 2025-09-25T16:20:14.732667Z node 112 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1003 2025-09-25T16:20:14.732708Z node 112 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1003 2025-09-25T16:20:14.733626Z node 112 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1003 2025-09-25T16:20:14.734119Z node 112 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5938: Handle TEvStateChanged, at schemeshard: 72057594046678944, message: Source { RawX1: 351 RawX2: 481036339486 } TabletId: 72075186233409546 State: 4 2025-09-25T16:20:14.734139Z node 112 :FLAT_TX_SCHEMESHARD INFO: schemeshard__state_changed_reply.cpp:78: TTxShardStateChanged DoExecute, datashard informs about state changing, datashardId: 72075186233409546, state: Offline, at schemeshard: 72057594046678944 2025-09-25T16:20:14.734552Z node 112 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_shard_deleter.cpp:20: SendDeleteRequests, shardsToDelete 1, to hive 72057594037968897, at schemeshard 72057594046678944 2025-09-25T16:20:14.734575Z node 112 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_shard_deleter.cpp:47: Free shard 72057594046678944:2 hive 72057594037968897 at ss 72057594046678944 2025-09-25T16:20:14.734658Z node 112 :HIVE INFO: tablet_helpers.cpp:1356: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 2 TxId_Deprecated: 0 TabletID: 72075186233409546 FAKEHIVE 72057594037968897 TEvDeleteTablet ShardOwnerId: 72057594046678944 ShardLocalIdx: 2 TxId_Deprecated: 0 TabletID: 72075186233409546 2025-09-25T16:20:14.734692Z node 112 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:6353: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 0 ShardOwnerId: 72057594046678944 ShardLocalIdx: 2, at schemeshard: 72057594046678944 2025-09-25T16:20:14.734747Z node 112 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:610: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 5] was 1 2025-09-25T16:20:14.734809Z node 112 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:123: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2025-09-25T16:20:14.734815Z node 112 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:137: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 5], at schemeshard: 72057594046678944 2025-09-25T16:20:14.734827Z node 112 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:610: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 1 2025-09-25T16:20:14.734835Z node 112 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:137: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 4], at schemeshard: 72057594046678944 2025-09-25T16:20:14.734841Z node 112 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:610: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 2 Forgetting tablet 72075186233409546 2025-09-25T16:20:14.736646Z node 112 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:176: Deleted shardIdx 72057594046678944:2 2025-09-25T16:20:14.736662Z node 112 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:182: Close pipe to deleted shardIdx 72057594046678944:2 tabletId 72075186233409546 2025-09-25T16:20:14.736775Z node 112 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__clean_pathes.cpp:160: TTxCleanDroppedPaths Complete, done PersistRemovePath for 2 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 TestModificationResult got TxId: 1003, wait until txId: 1003 TestWaitNotification wait txId: 1003 2025-09-25T16:20:14.736861Z node 112 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:213: tests -- TTxNotificationSubscriber for txId 1003: send EvNotifyTxCompletion 2025-09-25T16:20:14.736869Z node 112 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:259: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 1003 2025-09-25T16:20:14.737013Z node 112 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 1003, at schemeshard: 72057594046678944 2025-09-25T16:20:14.737031Z node 112 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 1003: got EvNotifyTxCompletionResult 2025-09-25T16:20:14.737037Z node 112 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 1003: satisfy waiter [112:637:2562] 2025-09-25T16:20:14.738131Z node 112 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5938: Handle TEvStateChanged, at schemeshard: 72057594046678944, message: Source { RawX1: 354 RawX2: 481036339488 } TabletId: 72075186233409547 State: 4 2025-09-25T16:20:14.738149Z node 112 :FLAT_TX_SCHEMESHARD INFO: schemeshard__state_changed_reply.cpp:78: TTxShardStateChanged DoExecute, datashard informs about state changing, datashardId: 72075186233409547, state: Offline, at schemeshard: 72057594046678944 2025-09-25T16:20:14.738460Z node 112 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_shard_deleter.cpp:20: SendDeleteRequests, shardsToDelete 1, to hive 72057594037968897, at schemeshard 72057594046678944 2025-09-25T16:20:14.738477Z node 112 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_shard_deleter.cpp:47: Free shard 72057594046678944:1 hive 72057594037968897 at ss 72057594046678944 2025-09-25T16:20:14.738548Z node 112 :HIVE INFO: tablet_helpers.cpp:1356: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 1 TxId_Deprecated: 0 TabletID: 72075186233409547 FAKEHIVE 72057594037968897 TEvDeleteTablet ShardOwnerId: 72057594046678944 ShardLocalIdx: 1 TxId_Deprecated: 0 TabletID: 72075186233409547 2025-09-25T16:20:14.738577Z node 112 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:6353: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 0 ShardOwnerId: 72057594046678944 ShardLocalIdx: 1, at schemeshard: 72057594046678944 2025-09-25T16:20:14.738635Z node 112 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:610: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 1 2025-09-25T16:20:14.738695Z node 112 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:123: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2025-09-25T16:20:14.738700Z node 112 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:137: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 3], at schemeshard: 72057594046678944 2025-09-25T16:20:14.738713Z node 112 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:610: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 Forgetting tablet 72075186233409547 2025-09-25T16:20:14.740290Z node 112 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:176: Deleted shardIdx 72057594046678944:1 2025-09-25T16:20:14.740304Z node 112 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:182: Close pipe to deleted shardIdx 72057594046678944:1 tabletId 72075186233409547 2025-09-25T16:20:14.740367Z node 112 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__clean_pathes.cpp:160: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 TestWaitNotification: OK eventTxId 1003 wait until 72075186233409546 is deleted wait until 72075186233409547 is deleted 2025-09-25T16:20:14.740465Z node 112 :HIVE INFO: tablet_helpers.cpp:1504: [72057594037968897] TEvSubscribeToTabletDeletion, 72075186233409546 2025-09-25T16:20:14.740479Z node 112 :HIVE INFO: tablet_helpers.cpp:1504: [72057594037968897] TEvSubscribeToTabletDeletion, 72075186233409547 Deleted tabletId 72075186233409546 Deleted tabletId 72075186233409547 >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-CreateUser-9 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-CreateUser-10 >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-CreateUser-3 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-CreateUser-4 >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-ModifyUser-29 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-ModifyUser-30 >> YdbSdkSessionsPool::StressTestSync/1 >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-ModifyUser-32 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-ModifyUser-33 ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_restore/unittest >> TImportTests::CorruptedTopicImport [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2144] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2144] Leader for TabletID 72057594046678944 is [1:130:2155] sender: [1:131:2058] recipient: [1:113:2144] 2025-09-25T16:19:46.092472Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7911: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-09-25T16:19:46.092494Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7939: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-09-25T16:19:46.092500Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7825: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-09-25T16:19:46.092505Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7841: OperationsProcessing config: using default configuration 2025-09-25T16:19:46.092511Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7847: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-09-25T16:19:46.092515Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7847: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-09-25T16:19:46.092523Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7971: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-09-25T16:19:46.092537Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-09-25T16:19:46.092654Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8042: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-09-25T16:19:46.092706Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2025-09-25T16:19:46.103859Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7729: Cannot subscribe to console configs 2025-09-25T16:19:46.103878Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded 2025-09-25T16:19:46.106973Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2025-09-25T16:19:46.107052Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2025-09-25T16:19:46.107084Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-09-25T16:19:46.109081Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2025-09-25T16:19:46.109139Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2025-09-25T16:19:46.109222Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1382: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-09-25T16:19:46.109278Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-09-25T16:19:46.109785Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:158: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-09-25T16:19:46.109830Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2025-09-25T16:19:46.110066Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-09-25T16:19:46.110075Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-09-25T16:19:46.110092Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-09-25T16:19:46.110098Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-09-25T16:19:46.110102Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:205: TTxServerlessStorageBilling.Complete 2025-09-25T16:19:46.110130Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7086: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-09-25T16:19:46.111350Z node 1 :HIVE INFO: tablet_helpers.cpp:1126: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:130:2155] sender: [1:245:2058] recipient: [1:15:2062] 2025-09-25T16:19:46.131183Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-09-25T16:19:46.131247Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-09-25T16:19:46.131317Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:599: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-09-25T16:19:46.131325Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5528: CreateTx for txid 1:0 type: TxAlterSubDomain target path: [OwnerId: 72057594046678944, LocalPathId: 1] source path: 2025-09-25T16:19:46.131410Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-09-25T16:19:46.131425Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2025-09-25T16:19:46.132058Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-09-25T16:19:46.132097Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-09-25T16:19:46.132143Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-09-25T16:19:46.132153Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:315: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-09-25T16:19:46.132159Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:369: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-09-25T16:19:46.132164Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 1:0 2 -> 3 2025-09-25T16:19:46.132574Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-09-25T16:19:46.132584Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-09-25T16:19:46.132593Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 1:0 3 -> 128 2025-09-25T16:19:46.132916Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-09-25T16:19:46.132924Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-09-25T16:19:46.132928Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-09-25T16:19:46.132933Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1723: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-09-25T16:19:46.133414Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1792: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-09-25T16:19:46.133746Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:663: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-09-25T16:19:46.133788Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1824: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-09-25T16:19:46.133935Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:685: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-09-25T16:19:46.133954Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:689: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 137 RawX2: 4294969455 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-09-25T16:19:46.133959Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:303: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-09-25T16:19:46.134022Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 1:0 128 -> 240 2025-09-25T16:19:46.134027Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:362: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-09-25T16:19:46.134049Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:599: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-09-25T16:19:46.134057Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:412: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-09-25T16:19:46.134483Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-09-25T16:19:46.134492Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme ... ly: status# SUCCESS, error# 2025-09-25T16:20:15.799891Z node 3 :IMPORT TRACE: schemeshard_import__create.cpp:255: Message: TxId: 101 Response { Entry { Id: 101 Status: SUCCESS Progress: PROGRESS_PREPARING ImportFromS3Settings { endpoint: "localhost:8986" scheme: HTTP items { source_prefix: "/Topic_0" destination_path: "/MyRoot/Restored/Topic_0" } } StartTime { } } } 2025-09-25T16:20:15.800074Z node 3 :TX_PROXY DEBUG: proxy_impl.cpp:212: actor# [3:273:2263] HANDLE TEvClientConnected success connect from tablet# 72057594046447617 2025-09-25T16:20:15.800653Z node 3 :IMPORT DEBUG: schemeshard_import__create.cpp:238: TImport::TTxCreate: DoComplete 2025-09-25T16:20:15.800694Z node 3 :IMPORT DEBUG: schemeshard_import__create.cpp:372: TImport::TTxProgress: DoExecute 2025-09-25T16:20:15.800699Z node 3 :IMPORT DEBUG: schemeshard_import__create.cpp:880: TImport::TTxProgress: Resume: id# 101, itemIdx# (empty maybe) 2025-09-25T16:20:15.800713Z node 3 :IMPORT DEBUG: schemeshard_import__create.cpp:906: TImport::TTxProgress: Resume: info# { Id: 101 Uid: '' Kind: S3 DomainPathId: [OwnerId: 72057594046678944, LocalPathId: 1] UserSID: '(empty maybe)' State: Waiting Issue: '' Items: 1 }, item# { Idx: 0 DstPathName: '/MyRoot/Restored/Topic_0' DstPathId: State: GetScheme SubState: AllocateTxId WaitTxId: 0 SrcPath: SrcPrefix: /Topic_0 Issue: '' } 2025-09-25T16:20:15.800718Z node 3 :IMPORT INFO: schemeshard_import__create.cpp:406: TImport::TTxProgress: Get scheme: info# { Id: 101 Uid: '' Kind: S3 DomainPathId: [OwnerId: 72057594046678944, LocalPathId: 1] UserSID: '(empty maybe)' State: Waiting Issue: '' Items: 1 }, item# { Idx: 0 DstPathName: '/MyRoot/Restored/Topic_0' DstPathId: State: GetScheme SubState: AllocateTxId WaitTxId: 0 SrcPath: SrcPrefix: /Topic_0 Issue: '' } 2025-09-25T16:20:15.808729Z node 3 :IMPORT DEBUG: schemeshard_import__create.cpp:396: TImport::TTxProgress: DoComplete 2025-09-25T16:20:15.812785Z node 3 :TX_PROXY DEBUG: client.cpp:89: Handle TEvAllocateResult ACCEPTED RangeBegin# 281474976715656 RangeEnd# 281474976720656 txAllocator# 72057594046447617 TestWaitNotification wait txId: 101 2025-09-25T16:20:15.812866Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:213: tests -- TTxNotificationSubscriber for txId 101: send EvNotifyTxCompletion 2025-09-25T16:20:15.812875Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:259: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 101 2025-09-25T16:20:15.812962Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__notify.cpp:83: NotifyTxCompletion import in-flight, txId: 101, at schemeshard: 72057594046678944 2025-09-25T16:20:15.812970Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__notify.cpp:131: NotifyTxCompletion transaction is registered, txId: 101, at schemeshard: 72057594046678944 REQUEST: HEAD /Topic_0/metadata.json HTTP/1.1 HEADERS: Host: localhost:8986 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: 9C423865-A142-42FF-BF22-2C29D372721E amz-sdk-request: attempt=1 content-type: application/xml user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-143-generic x86_64 Clang/20.1.8 x-amz-api-version: 2006-03-01 S3_MOCK::HttpServeRead: /Topic_0/metadata.json / 14 2025-09-25T16:20:15.813689Z node 3 :IMPORT DEBUG: schemeshard_import_getters.cpp:334: HandleMetadata TEvExternalStorage::TEvHeadObjectResponse: self# [3:287:2152], result# HeadObjectResult { ETag: f0051ed82f23d747468723901781553a ContentLength: 14 } REQUEST: GET /Topic_0/metadata.json HTTP/1.1 HEADERS: Host: localhost:8986 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: D7389FD5-AE9B-4FC7-8DC5-8E1FF3803623 amz-sdk-request: attempt=1 content-type: application/xml range: bytes=0-13 user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-143-generic x86_64 Clang/20.1.8 x-amz-api-version: 2006-03-01 S3_MOCK::HttpServeRead: /Topic_0/metadata.json / 14 2025-09-25T16:20:15.814479Z node 3 :IMPORT DEBUG: schemeshard_import_getters.cpp:433: HandleMetadata TEvExternalStorage::TEvGetObjectResponse: self# [3:287:2152], result# f0051ed82f23d747468723901781553a 2025-09-25T16:20:15.814492Z node 3 :IMPORT TRACE: schemeshard_import_getters.cpp:449: Trying to parse metadata: self# [3:287:2152], body# {"version": 0} REQUEST: HEAD /Topic_0/scheme.pb HTTP/1.1 HEADERS: Host: localhost:8986 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: 65E14918-4DA7-4A06-A9EE-ACFEDD4AC8DE amz-sdk-request: attempt=1 content-type: application/xml user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-143-generic x86_64 Clang/20.1.8 x-amz-api-version: 2006-03-01 2025-09-25T16:20:15.820864Z node 3 :IMPORT DEBUG: schemeshard_import_getters.cpp:348: HandleScheme TEvExternalStorage::TEvHeadObjectResponse: self# [3:287:2152], result# No response body. REQUEST: HEAD /Topic_0/create_view.sql HTTP/1.1 HEADERS: Host: localhost:8986 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: E1973BAC-3403-42D5-8464-BA785EB93BC5 amz-sdk-request: attempt=1 content-type: application/xml user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-143-generic x86_64 Clang/20.1.8 x-amz-api-version: 2006-03-01 2025-09-25T16:20:15.821429Z node 3 :IMPORT DEBUG: schemeshard_import_getters.cpp:348: HandleScheme TEvExternalStorage::TEvHeadObjectResponse: self# [3:287:2152], result# No response body. REQUEST: HEAD /Topic_0/create_topic.pb HTTP/1.1 HEADERS: Host: localhost:8986 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: 3DA3553F-9EC3-4351-ACED-9AB994D1AD2F amz-sdk-request: attempt=1 content-type: application/xml user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-143-generic x86_64 Clang/20.1.8 x-amz-api-version: 2006-03-01 S3_MOCK::HttpServeRead: /Topic_0/create_topic.pb / 732 2025-09-25T16:20:15.821962Z node 3 :IMPORT DEBUG: schemeshard_import_getters.cpp:348: HandleScheme TEvExternalStorage::TEvHeadObjectResponse: self# [3:287:2152], result# HeadObjectResult { ETag: 456e940d62d8f48fce4930e371868262 ContentLength: 732 } REQUEST: GET /Topic_0/create_topic.pb HTTP/1.1 HEADERS: Host: localhost:8986 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: 7345F301-13D9-4CA3-A325-C0F53B6477E8 amz-sdk-request: attempt=1 content-type: application/xml range: bytes=0-731 user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-143-generic x86_64 Clang/20.1.8 x-amz-api-version: 2006-03-01 S3_MOCK::HttpServeRead: /Topic_0/create_topic.pb / 732 2025-09-25T16:20:15.822455Z node 3 :IMPORT DEBUG: schemeshard_import_getters.cpp:477: HandleScheme TEvExternalStorage::TEvGetObjectResponse: self# [3:287:2152], result# 456e940d62d8f48fce4930e371868262 2025-09-25T16:20:15.822473Z node 3 :IMPORT TRACE: schemeshard_import_getters.cpp:495: Trying to parse scheme: self# [3:287:2152], itemIdx# 0, schemeKey# /Topic_0/create_topic.pb, body# partitioning_settings {\n min_active_partitions: 1\n max_active_partitions: 1\n auto_partitioning_settings {\n strategy: AUTO_PARTITIONING_STRATEGY_DISABLED\n partition_write_speed {\n stabilization_window {\n seconds: 300\n }\n up_utilization_percent: 80\n down_utilization_percent: 20\n }\n }\n}\nretention_period {\n seconds: -1\n}\nsupported_codecs {\n}\npartition_write_speed_bytes_per_second: 50000000\npartition_write_burst_bytes: 50000000\nconsumers {\n name: "Consumer_0"\n read_from {\n }\n attributes {\n key: "_service_type"\n value: "data-streams"\n }\n}\nconsumers {\n name: "Consumer_1"\n important: true\n read_from {\n }\n attributes {\n key: "_service_type"\n value: "data-streams"\n }\n}\n REQUEST: HEAD /Topic_0/permissions.pb HTTP/1.1 HEADERS: Host: localhost:8986 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: 54CA0DE6-7BCE-4929-9299-E8EEC6DD8AC2 amz-sdk-request: attempt=1 content-type: application/xml user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-143-generic x86_64 Clang/20.1.8 x-amz-api-version: 2006-03-01 2025-09-25T16:20:15.828314Z node 3 :IMPORT DEBUG: schemeshard_import_getters.cpp:379: HandlePermissions TEvExternalStorage::TEvHeadObjectResponse: self# [3:287:2152], result# No response body. REQUEST: GET /?prefix=%2FTopic_0%2F HTTP/1.1 HEADERS: Host: localhost:8986 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: C46A1A23-C541-4187-86E0-42D0BBE6F5D2 amz-sdk-request: attempt=1 content-type: application/xml user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-143-generic x86_64 Clang/20.1.8 x-amz-api-version: 2006-03-01 S3_MOCK::HttpServeList: /Topic_0/ 2025-09-25T16:20:15.838078Z node 3 :IMPORT DEBUG: schemeshard_import_getters.cpp:669: HandleChangefeeds TEvExternalStorage::TEvListObjectResponse: self# [3:287:2152], result# ListObjectsResult { } 2025-09-25T16:20:15.838100Z node 3 :IMPORT INFO: schemeshard_import_getters.cpp:694: Reply: self# [3:287:2152], success# 1, error# 2025-09-25T16:20:15.838135Z node 3 :IMPORT DEBUG: schemeshard_import__create.cpp:372: TImport::TTxProgress: DoExecute 2025-09-25T16:20:15.838141Z node 3 :IMPORT DEBUG: schemeshard_import__create.cpp:1000: TImport::TTxProgress: OnSchemeResult: id# 101, itemIdx# 0, success# 1 2025-09-25T16:20:15.838219Z node 3 :IMPORT INFO: schemeshard_import__create.cpp:640: TImport::TTxProgress: Allocate txId: info# { Id: 101 Uid: '' Kind: S3 DomainPathId: [OwnerId: 72057594046678944, LocalPathId: 1] UserSID: '(empty maybe)' State: Waiting Issue: '' Items: 1 }, item# { Idx: 0 DstPathName: '/MyRoot/Restored/Topic_0' DstPathId: State: CreateSchemeObject SubState: AllocateTxId WaitTxId: 0 SrcPath: SrcPrefix: /Topic_0 Issue: '' } 2025-09-25T16:20:15.841863Z node 3 :IMPORT DEBUG: schemeshard_import__create.cpp:396: TImport::TTxProgress: DoComplete 2025-09-25T16:20:15.841904Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7086: Handle: TEvAllocateResult: Cookie# 101, at schemeshard: 72057594046678944 2025-09-25T16:20:15.841917Z node 3 :IMPORT DEBUG: schemeshard_import__create.cpp:372: TImport::TTxProgress: DoExecute 2025-09-25T16:20:15.841924Z node 3 :IMPORT DEBUG: schemeshard_import__create.cpp:1171: TImport::TTxProgress: OnAllocateResult: txId# 281474976710757, id# 101 2025-09-25T16:20:15.841944Z node 3 :IMPORT INFO: schemeshard_import__create.cpp:449: TImport::TTxProgress: CreateTopic propose: info# { Id: 101 Uid: '' Kind: S3 DomainPathId: [OwnerId: 72057594046678944, LocalPathId: 1] UserSID: '(empty maybe)' State: Waiting Issue: '' Items: 1 }, item# { Idx: 0 DstPathName: '/MyRoot/Restored/Topic_0' DstPathId: State: CreateSchemeObject SubState: Proposed WaitTxId: 0 SrcPath: SrcPrefix: /Topic_0 Issue: '' }, txId# 281474976710757 2025-09-25T16:20:15.842014Z node 3 :IMPORT NOTICE: schemeshard_import__create.cpp:764: TImport::TTxProgress: creation topic failed, cancelling, info# { Id: 101 Uid: '' Kind: S3 DomainPathId: [OwnerId: 72057594046678944, LocalPathId: 1] UserSID: '(empty maybe)' State: Waiting Issue: '' Items: 1 }, item# { Idx: 0 DstPathName: '/MyRoot/Restored/Topic_0' DstPathId: State: CreateSchemeObject SubState: Proposed WaitTxId: 0 SrcPath: SrcPrefix: /Topic_0 Issue: 'retention_period must be not negative, provided seconds: -1 ' } 2025-09-25T16:20:15.842497Z node 3 :IMPORT DEBUG: schemeshard_import__create.cpp:396: TImport::TTxProgress: DoComplete 2025-09-25T16:20:15.842538Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 101: got EvNotifyTxCompletionResult 2025-09-25T16:20:15.842546Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 101: satisfy waiter [3:290:2278] TestWaitNotification: OK eventTxId 101 >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-ModifyUser-40 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-ModifyUser-41 >> TRestoreWithRebootsTests::ShouldFailOnEmptyToken[Zstd] [GOOD] >> TRestoreWithRebootsTests::CancelShouldSucceed[Zstd] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-DropUser-50 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-DropUser-51 >> YdbSdkSessions::TestSdkFreeSessionAfterBadSessionQueryServiceStreamCall [SKIPPED] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-ModifyUser-33 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-ModifyUser-34 >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-CreateUser-16 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-CreateUser-17 |82.4%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/public/sdk/cpp/tests/integration/sessions/gtest >> YdbSdkSessions::TestSessionPool [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-ModifyUser-48 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-CreateUser-1 >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-DropUser-51 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-DropUser-52 >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-ModifyUser-33 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-ModifyUser-34 >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-CreateUser-10 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-CreateUser-11 >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-CreateUser-4 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-CreateUser-5 |82.4%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/tx/schemeshard/ut_background_cleaning/ydb-core-tx-schemeshard-ut_background_cleaning |82.4%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_background_cleaning/ydb-core-tx-schemeshard-ut_background_cleaning |82.4%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_background_cleaning/ydb-core-tx-schemeshard-ut_background_cleaning >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-ModifyUser-30 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-ModifyUser-31 >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-ModifyUser-41 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-ModifyUser-42 >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-DropUser-51 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-DropUser-52 >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-CreateUser-17 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-CreateUser-18 >> TRestoreWithRebootsTests::ShouldFailOnFileWithoutNewLines[Zstd] [GOOD] >> TRestoreWithRebootsTests::ShouldFailOnInvalidValue[Raw] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-ModifyUser-34 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-ModifyUser-35 >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-ModifyUser-34 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-ModifyUser-35 >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-CreateUser-1 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-CreateUser-2 >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-DropUser-52 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-DropUser-53 >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-CreateUser-5 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-CreateUser-6 >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-CreateUser-11 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-CreateUser-12 |82.4%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/tx/schemeshard/ut_transfer/ydb-core-tx-schemeshard-ut_transfer |82.4%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_transfer/ydb-core-tx-schemeshard-ut_transfer |82.4%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_transfer/ydb-core-tx-schemeshard-ut_transfer >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-ModifyUser-31 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-ModifyUser-32 >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-ModifyUser-42 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-ModifyUser-43 ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/public/sdk/cpp/tests/integration/sessions/gtest >> YdbSdkSessions::TestSdkFreeSessionAfterBadSessionQueryServiceStreamCall [SKIPPED] Test command err: ydb/public/sdk/cpp/tests/integration/sessions/main.cpp:248: Test is failing right now >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-DropUser-52 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-DropUser-53 >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-CreateUser-18 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-CreateUser-19 >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-ModifyUser-35 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-ModifyUser-36 >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-ModifyUser-35 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-ModifyUser-36 >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-DropUser-53 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-DropUser-54 >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-CreateUser-12 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-CreateUser-13 >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-CreateUser-6 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-CreateUser-7 >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-CreateUser-2 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-CreateUser-3 >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-ModifyUser-32 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-ModifyUser-33 >> TRestoreWithRebootsTests::ShouldSucceedOnMultiShardTable[Zstd] [GOOD] >> TRestoreWithRebootsTests::ShouldSucceedOnMultipleFrames >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-CreateUser-19 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-CreateUser-20 >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-ModifyUser-43 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-ModifyUser-44 >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-ModifyUser-36 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-DropUser-55 >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-ModifyUser-36 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-ModifyUser-37 >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-DropUser-53 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-DropUser-54 >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-DropUser-54 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-DropUser-55 >> YdbSdkSessionsPool1Session::RunSmallPlan/0 >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-CreateUser-13 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-CreateUser-14 >> YdbSdkSessionsPool1Session::RunSmallPlan/0 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-CreateUser-7 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-CreateUser-8 >> TImportWithRebootsTests::CancelShouldSucceedOnSingleChangefeed [GOOD] >> TImportWithRebootsTests::CancelShouldSucceedOnSingleTableWithUniqueIndex >> YdbSdkSessions::MultiThreadSync >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-ModifyUser-33 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-ModifyUser-34 >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-CreateUser-20 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-CreateUser-21 >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-CreateUser-3 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-CreateUser-10 >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-ModifyUser-44 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-ModifyUser-45 >> YdbSdkSessions::MultiThreadSync [GOOD] >> YdbSdkSessions::SessionsServerLimit [SKIPPED] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-DropUser-55 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-DropUser-56 >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-ModifyUser-37 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-ModifyUser-38 >> YdbSdkSessionsPool::WaitQueue/1 >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-DropUser-54 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-DropUser-55 >> YdbSdkSessions::TestMultipleSessions >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-CreateUser-14 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-CreateUser-15 >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-DropUser-55 [GOOD] >> YdbSdkSessions::TestMultipleSessions [GOOD] >> YdbSdkSessions::TestActiveSessionCountAfterTransportError >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-DropUser-56 >> YdbSdkSessions::TestActiveSessionCountAfterTransportError [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-ModifyUser-34 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-ModifyUser-35 >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-CreateUser-21 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-CreateUser-8 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-CreateUser-22 >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-CreateUser-9 >> YdbSdkSessionsPool::WaitQueue/1 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-ModifyUser-45 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-ModifyUser-46 >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-DropUser-56 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-DropUser-57 >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-ModifyUser-38 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-ModifyUser-39 >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-CreateUser-10 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-CreateUser-11 >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-DropUser-55 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-DropUser-56 >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-CreateUser-15 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-CreateUser-16 >> YdbSdkSessionsPool::StressTestSync/0 [GOOD] |82.4%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/public/sdk/cpp/tests/integration/sessions_pool/gtest >> YdbSdkSessionsPool1Session::RunSmallPlan/0 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-CreateUser-22 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-CreateUser-23 >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-ModifyUser-35 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-DropUser-56 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-DropUser-57 >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-ModifyUser-36 >> YdbSdkSessionsPool1Session::FailTest/0 ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/public/sdk/cpp/tests/integration/sessions/gtest >> YdbSdkSessions::SessionsServerLimit [SKIPPED] Test command err: ydb/public/sdk/cpp/tests/integration/sessions/main.cpp:548: Enable after accepting a pull request with merging configs >> YdbSdkSessionsPool1Session::FailTest/0 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-ModifyUser-39 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-ModifyUser-40 >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-DropUser-57 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-DropUser-58 >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-CreateUser-9 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-CreateUser-10 >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-CreateUser-16 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-CreateUser-17 >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-CreateUser-11 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-CreateUser-12 >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-ModifyUser-46 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-ModifyUser-47 >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-DropUser-56 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-DropUser-57 >> TImportTests::CancelledImportEndTime [GOOD] >> TImportTests::Changefeed |82.4%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/tx/columnshard/engines/ut/ydb-core-tx-columnshard-engines-ut |82.4%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/columnshard/engines/ut/ydb-core-tx-columnshard-engines-ut |82.4%| [LD] {RESULT} $(B)/ydb/core/tx/columnshard/engines/ut/ydb-core-tx-columnshard-engines-ut >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-CreateUser-23 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-CreateUser-24 >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-ModifyUser-36 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-ModifyUser-37 >> TAsyncIndexTests::CdcAndSplitWithReboots[PipeResets] [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-DropUser-57 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-DropUser-58 >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-ModifyUser-40 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-ModifyUser-41 |82.4%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/public/sdk/cpp/tests/integration/sessions_pool/gtest >> YdbSdkSessionsPool::WaitQueue/1 [GOOD] |82.4%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/public/sdk/cpp/tests/integration/sessions/gtest >> YdbSdkSessions::TestActiveSessionCountAfterTransportError [GOOD] >> YdbSdkSessions::TestActiveSessionCountAfterBadSession >> TImportTests::Changefeed [GOOD] >> TImportTests::ChangefeedWithPartitioning >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-DropUser-58 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-DropUser-59 >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-CreateUser-17 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-CreateUser-18 >> YdbSdkSessions::TestActiveSessionCountAfterBadSession [GOOD] >> YdbSdkSessions::SessionsServerLimitWithSessionPool [SKIPPED] |82.4%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/public/sdk/cpp/tests/integration/sessions_pool/gtest >> YdbSdkSessionsPool::StressTestSync/0 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-CreateUser-10 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-CreateUser-11 |82.4%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/tx/schemeshard/ut_export/ydb-core-tx-schemeshard-ut_export |82.4%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_export/ydb-core-tx-schemeshard-ut_export |82.4%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_export/ydb-core-tx-schemeshard-ut_export >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-DropUser-57 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-ModifyUser-47 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-DropUser-58 >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-ModifyUser-48 >> YdbSdkSessionsPool::StressTestSync/1 [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_index/unittest >> TAsyncIndexTests::CdcAndSplitWithReboots[PipeResets] [GOOD] Test command err: =========== RUN: Trace =========== Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2144] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2144] Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:120:2058] recipient: [1:114:2145] IGNORE Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:120:2058] recipient: [1:114:2145] Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:121:2058] recipient: [1:116:2146] IGNORE Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:121:2058] recipient: [1:116:2146] Leader for TabletID 72057594046678944 is [1:128:2153] sender: [1:131:2058] recipient: [1:113:2144] Leader for TabletID 72057594046447617 is [1:134:2158] sender: [1:136:2058] recipient: [1:114:2145] Leader for TabletID 72057594046316545 is [1:139:2161] sender: [1:141:2058] recipient: [1:116:2146] 2025-09-25T16:19:37.246068Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7911: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-09-25T16:19:37.246086Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7939: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-09-25T16:19:37.246091Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7825: StatsBatching config: StatsBatchTimeout# 0.000000s, StatsMaxBatchSize# 0, StatsMaxExecuteTime# 0.010000s 2025-09-25T16:19:37.246096Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7841: OperationsProcessing config: using default configuration 2025-09-25T16:19:37.246103Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7847: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-09-25T16:19:37.246106Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7847: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-09-25T16:19:37.246114Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7971: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-09-25T16:19:37.246127Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-09-25T16:19:37.246221Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8042: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-09-25T16:19:37.246282Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2025-09-25T16:19:37.269446Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:8074: Got new config: QueryServiceConfig { AllExternalDataSourcesAreAvailable: true } 2025-09-25T16:19:37.269474Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded 2025-09-25T16:19:37.269572Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8042: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# Leader for TabletID 72057594046447617 is [1:134:2158] sender: [1:179:2058] recipient: [1:15:2062] 2025-09-25T16:19:37.273625Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2025-09-25T16:19:37.273724Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2025-09-25T16:19:37.273762Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-09-25T16:19:37.275177Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2025-09-25T16:19:37.275270Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2025-09-25T16:19:37.275371Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1382: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-09-25T16:19:37.275605Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-09-25T16:19:37.277008Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:158: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-09-25T16:19:37.277058Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2025-09-25T16:19:37.277311Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-09-25T16:19:37.277322Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-09-25T16:19:37.277342Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-09-25T16:19:37.277350Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-09-25T16:19:37.277358Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:205: TTxServerlessStorageBilling.Complete 2025-09-25T16:19:37.277400Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7086: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:221:2058] recipient: [1:219:2219] IGNORE Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:221:2058] recipient: [1:219:2219] Leader for TabletID 72057594037968897 is [1:225:2223] sender: [1:226:2058] recipient: [1:219:2219] 2025-09-25T16:19:37.278969Z node 1 :HIVE INFO: tablet_helpers.cpp:1126: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:128:2153] sender: [1:246:2058] recipient: [1:15:2062] 2025-09-25T16:19:37.301952Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-09-25T16:19:37.302030Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-09-25T16:19:37.302085Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:599: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-09-25T16:19:37.302093Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5528: CreateTx for txid 1:0 type: TxAlterSubDomain target path: [OwnerId: 72057594046678944, LocalPathId: 1] source path: 2025-09-25T16:19:37.302132Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-09-25T16:19:37.302146Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2025-09-25T16:19:37.302855Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-09-25T16:19:37.302904Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-09-25T16:19:37.302952Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-09-25T16:19:37.302961Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:315: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-09-25T16:19:37.302967Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:369: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-09-25T16:19:37.302972Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 1:0 2 -> 3 2025-09-25T16:19:37.303418Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-09-25T16:19:37.303432Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-09-25T16:19:37.303438Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 1:0 3 -> 128 2025-09-25T16:19:37.303804Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-09-25T16:19:37.303815Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-09-25T16:19:37.303821Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-09-25T16:19:37.303828Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1723: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-09-25T16:19:37.304474Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1792: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-09-25T16:19:37.304882Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:663: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-09-25T16:19:37.304922Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1824: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 Leader for TabletID 72057594046316545 is [1:139:2161] sender: [1:261:2058] recipient: [1:15:2062] FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-09-25T16:19:37.305127Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:685: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-09-25T16:19:37.305152Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:689: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 139 RawX2: 4294969457 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, ... epInCache: true BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen1" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 0 UpliftPartSize: 0 } Generation { GenerationId: 1 SizeToCompact: 41943040 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 536870912 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen2" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 8388608 UpliftPartSize: 8388608 } Generation { GenerationId: 2 SizeToCompact: 419430400 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 17179869184 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen3" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } PartitioningPolicy { SizeToSplit: 2147483648 MinPartitionsCount: 1 } } } } TableSchemaVersion: 2 IsBackup: false CdcStreams { Name: "Stream" Mode: ECdcStreamModeKeysOnly PathId { OwnerId: 72057594046678944 LocalId: 6 } State: ECdcStreamStateReady SchemaVersion: 1 Format: ECdcStreamFormatProto VirtualTimestamps: false AwsRegion: "" ResolvedTimestampsIntervalMs: 0 SchemaChanges: false } IsRestore: false } TablePartitions { EndOfRangeKeyPrefix: "\001\000\004\000\000\0002\000\000\000" IsPoint: false IsInclusive: false DatashardId: 72075186233409550 } TablePartitions { EndOfRangeKeyPrefix: "" IsPoint: false IsInclusive: false DatashardId: 72075186233409551 } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 2 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 6 PathsLimit: 10000 ShardsInside: 6 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 1 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 3 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-09-25T16:20:25.042215Z node 34 :CHANGE_EXCHANGE DEBUG: change_sender_table_base.cpp:78: [TableChangeSenderShard][72075186233409550:2][72075186233409546][34:1075:2862] Handshake NKikimrChangeExchange.TEvStatus Status: STATUS_OK LastRecordOrder: 0 2025-09-25T16:20:25.042256Z node 34 :CHANGE_EXCHANGE DEBUG: change_sender_async_index.cpp:239: [AsyncIndexChangeSenderMain][72075186233409550:2][34:1035:2862] Handle NKikimr::NChangeExchange::TEvChangeExchangePrivate::TEvReady { PartitionId: 72075186233409546 } 2025-09-25T16:20:25.042315Z node 34 :CHANGE_EXCHANGE DEBUG: change_sender_table_base.cpp:123: [TableChangeSenderShard][72075186233409550:2][72075186233409546][34:1075:2862] Handle NKikimr::NChangeExchange::TEvChangeExchange::TEvRecords { Records [{ Order: 1 Group: 1758817225014122 Step: 5000004 TxId: 18446744073709551615 PathId: [OwnerId: 72057594046678944, LocalPathId: 4] Kind: AsyncIndex Source: Unspecified Body: 28b TableId: [OwnerId: 72057594046678944, LocalPathId: 3] SchemaVersion: 2 LockId: 0 LockOffset: 0 },{ Order: 3 Group: 1758817225014122 Step: 5000004 TxId: 18446744073709551615 PathId: [OwnerId: 72057594046678944, LocalPathId: 4] Kind: AsyncIndex Source: Unspecified Body: 28b TableId: [OwnerId: 72057594046678944, LocalPathId: 3] SchemaVersion: 2 LockId: 0 LockOffset: 0 },{ Order: 5 Group: 1758817225014122 Step: 5000004 TxId: 18446744073709551615 PathId: [OwnerId: 72057594046678944, LocalPathId: 4] Kind: AsyncIndex Source: Unspecified Body: 28b TableId: [OwnerId: 72057594046678944, LocalPathId: 3] SchemaVersion: 2 LockId: 0 LockOffset: 0 }] } 2025-09-25T16:20:25.044536Z node 34 :CHANGE_EXCHANGE DEBUG: change_sender_table_base.cpp:200: [TableChangeSenderShard][72075186233409550:2][72075186233409546][34:1075:2862] Handle NKikimrChangeExchange.TEvStatus Status: STATUS_OK RecordStatuses { Order: 1 Status: STATUS_OK Reason: REASON_NONE } RecordStatuses { Order: 3 Status: STATUS_OK Reason: REASON_NONE } RecordStatuses { Order: 5 Status: STATUS_OK Reason: REASON_NONE } LastRecordOrder: 5 2025-09-25T16:20:25.044572Z node 34 :CHANGE_EXCHANGE DEBUG: change_sender_async_index.cpp:239: [AsyncIndexChangeSenderMain][72075186233409550:2][34:1035:2862] Handle NKikimr::NChangeExchange::TEvChangeExchangePrivate::TEvReady { PartitionId: 72075186233409546 } 2025-09-25T16:20:25.317246Z node 34 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/Table/UserDefinedIndex/indexImplTable" Options { ReturnPartitioningInfo: true ReturnPartitionConfig: true BackupInfo: false ReturnBoundaries: false ShowPrivateTable: true }, at schemeshard: 72057594046678944 2025-09-25T16:20:25.317358Z node 34 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/Table/UserDefinedIndex/indexImplTable" took 128us result status StatusSuccess 2025-09-25T16:20:25.317607Z node 34 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/Table/UserDefinedIndex/indexImplTable" PathDescription { Self { Name: "indexImplTable" PathId: 5 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 1002 CreateStep: 5000003 ParentPathId: 4 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeAsyncIndexImplTable Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "indexImplTable" Columns { Name: "indexed" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "key" Type: "Uint32" TypeId: 2 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "indexed" KeyColumnNames: "key" KeyColumnIds: 1 KeyColumnIds: 2 PartitionConfig { CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot: 300 InMemForceStepsToSnapshot: 500 InMemForceSizeToSnapshot: 16777216 InMemCompactionBrokerQueue: 0 ReadAheadHiThreshold: 67108864 ReadAheadLoThreshold: 16777216 MinDataPageSize: 7168 SnapBrokerQueue: 0 Generation { GenerationId: 0 SizeToCompact: 0 CountToCompact: 8 ForceCountToCompact: 8 ForceSizeToCompact: 134217728 CompactionBrokerQueue: 4294967295 KeepInCache: true BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen1" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 0 UpliftPartSize: 0 } Generation { GenerationId: 1 SizeToCompact: 41943040 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 536870912 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen2" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 8388608 UpliftPartSize: 8388608 } Generation { GenerationId: 2 SizeToCompact: 419430400 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 17179869184 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen3" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } PartitioningPolicy { SizeToSplit: 2147483648 MinPartitionsCount: 1 } } TableSchemaVersion: 1 IsBackup: false IsRestore: false } TablePartitions { EndOfRangeKeyPrefix: "" IsPoint: false IsInclusive: false DatashardId: 72075186233409546 } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 6 PathsLimit: 10000 ShardsInside: 6 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 1 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 5 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 |82.4%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/tx/schemeshard/ut_column_build/ydb-core-tx-schemeshard-ut_column_build |82.4%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_column_build/ydb-core-tx-schemeshard-ut_column_build >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-CreateUser-24 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-DropUser-49 >> TImportTests::ChangefeedWithPartitioning [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-CreateUser-12 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-CreateUser-13 >> TImportWithRebootsTests::ShouldSucceedOnSimpleTable [GOOD] >> TImportWithRebootsTests::ShouldSucceedOnSingleChangefeed |82.4%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_column_build/ydb-core-tx-schemeshard-ut_column_build |82.4%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/public/sdk/cpp/tests/integration/sessions_pool/gtest >> YdbSdkSessionsPool1Session::FailTest/0 [GOOD] |82.4%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/blobstorage/dsproxy/ut_ftol/ydb-core-blobstorage-dsproxy-ut_ftol |82.4%| [LD] {RESULT} $(B)/ydb/core/blobstorage/dsproxy/ut_ftol/ydb-core-blobstorage-dsproxy-ut_ftol |82.5%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/blobstorage/dsproxy/ut_ftol/ydb-core-blobstorage-dsproxy-ut_ftol |82.5%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/tx/schemeshard/ut_shred/ydb-core-tx-schemeshard-ut_shred |82.5%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_shred/ydb-core-tx-schemeshard-ut_shred |82.5%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_shred/ydb-core-tx-schemeshard-ut_shred >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-ModifyUser-41 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-ModifyUser-42 >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-DropUser-58 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-DropUser-59 >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-ModifyUser-37 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-ModifyUser-38 ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_restore/unittest >> TImportTests::ChangefeedWithPartitioning [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2144] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2144] Leader for TabletID 72057594046678944 is [1:130:2155] sender: [1:131:2058] recipient: [1:113:2144] 2025-09-25T16:19:56.225695Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7911: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-09-25T16:19:56.225716Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7939: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-09-25T16:19:56.225722Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7825: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-09-25T16:19:56.225727Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7841: OperationsProcessing config: using default configuration 2025-09-25T16:19:56.225734Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7847: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-09-25T16:19:56.225738Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7847: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-09-25T16:19:56.225747Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7971: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-09-25T16:19:56.225761Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-09-25T16:19:56.225879Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8042: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-09-25T16:19:56.225933Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2025-09-25T16:19:56.239814Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7729: Cannot subscribe to console configs 2025-09-25T16:19:56.239832Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded 2025-09-25T16:19:56.244469Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2025-09-25T16:19:56.244554Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2025-09-25T16:19:56.244584Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-09-25T16:19:56.246327Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2025-09-25T16:19:56.246383Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2025-09-25T16:19:56.246480Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1382: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-09-25T16:19:56.246524Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-09-25T16:19:56.247020Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:158: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-09-25T16:19:56.247059Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2025-09-25T16:19:56.247285Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-09-25T16:19:56.247295Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-09-25T16:19:56.247312Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-09-25T16:19:56.247335Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-09-25T16:19:56.247342Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:205: TTxServerlessStorageBilling.Complete 2025-09-25T16:19:56.247372Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7086: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-09-25T16:19:56.248619Z node 1 :HIVE INFO: tablet_helpers.cpp:1126: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:130:2155] sender: [1:245:2058] recipient: [1:15:2062] 2025-09-25T16:19:56.271476Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-09-25T16:19:56.271538Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-09-25T16:19:56.271585Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:599: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-09-25T16:19:56.271592Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5528: CreateTx for txid 1:0 type: TxAlterSubDomain target path: [OwnerId: 72057594046678944, LocalPathId: 1] source path: 2025-09-25T16:19:56.271667Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-09-25T16:19:56.271682Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2025-09-25T16:19:56.272421Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-09-25T16:19:56.272460Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-09-25T16:19:56.272505Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-09-25T16:19:56.272514Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:315: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-09-25T16:19:56.272520Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:369: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-09-25T16:19:56.272525Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 1:0 2 -> 3 2025-09-25T16:19:56.272957Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-09-25T16:19:56.272970Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-09-25T16:19:56.272977Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 1:0 3 -> 128 2025-09-25T16:19:56.273360Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-09-25T16:19:56.273372Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-09-25T16:19:56.273378Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-09-25T16:19:56.273384Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1723: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-09-25T16:19:56.274084Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1792: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-09-25T16:19:56.276455Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:663: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-09-25T16:19:56.276504Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1824: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-09-25T16:19:56.276674Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:685: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-09-25T16:19:56.276702Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:689: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 137 RawX2: 4294969455 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-09-25T16:19:56.276709Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:303: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-09-25T16:19:56.276791Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 1:0 128 -> 240 2025-09-25T16:19:56.276799Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:362: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-09-25T16:19:56.276845Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:599: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-09-25T16:19:56.276858Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:412: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-09-25T16:19:56.277372Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-09-25T16:19:56.277382Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme ... 6710760 2025-09-25T16:20:26.401591Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 281474976710760 2025-09-25T16:20:26.401596Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 281474976710760, pathId: [OwnerId: 72057594046678944, LocalPathId: 4], version: 3 2025-09-25T16:20:26.401602Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:610: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 4 2025-09-25T16:20:26.401623Z node 3 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__publish_to_scheme_board.cpp:155: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 281474976710760, subscribers: 1 2025-09-25T16:20:26.401628Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:212: TTxAckPublishToSchemeBoard Notify send TEvNotifyTxCompletionResult, at schemeshard: 72057594046678944, to actorId: [3:127:2152] 2025-09-25T16:20:26.402989Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 281474976710760 2025-09-25T16:20:26.403165Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7171: Handle: TEvNotifyTxCompletionResult: txId# 281474976710760 2025-09-25T16:20:26.403183Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:7173: Message: TxId: 281474976710760 2025-09-25T16:20:26.403198Z node 3 :IMPORT DEBUG: schemeshard_import__create.cpp:372: TImport::TTxProgress: DoExecute 2025-09-25T16:20:26.403205Z node 3 :IMPORT DEBUG: schemeshard_import__create.cpp:1425: TImport::TTxProgress: OnNotifyResult: txId# 281474976710760 2025-09-25T16:20:26.404675Z node 3 :IMPORT DEBUG: schemeshard_import__create.cpp:396: TImport::TTxProgress: DoComplete 2025-09-25T16:20:26.404727Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 101: got EvNotifyTxCompletionResult 2025-09-25T16:20:26.404736Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 101: satisfy waiter [3:290:2278] TestWaitNotification: OK eventTxId 101 2025-09-25T16:20:26.405865Z node 3 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/Table" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-09-25T16:20:26.405933Z node 3 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/Table" took 93us result status StatusSuccess 2025-09-25T16:20:26.406144Z node 3 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/Table" PathDescription { Self { Name: "Table" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976710757 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 4 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 4 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 2 TablePartitionVersion: 1 } ChildrenExist: true } Table { Name: "Table" Columns { Name: "key" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "value" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" KeyColumnIds: 1 TableSchemaVersion: 2 IsBackup: false CdcStreams { Name: "updates_feed1" Mode: ECdcStreamModeUpdate PathId { OwnerId: 72057594046678944 LocalId: 3 } State: ECdcStreamStateReady SchemaVersion: 1 Format: ECdcStreamFormatJson VirtualTimestamps: false AwsRegion: "" ResolvedTimestampsIntervalMs: 0 SchemaChanges: false } IsRestore: false } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 3 PathsLimit: 10000 ShardsInside: 3 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 2 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 2 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-09-25T16:20:26.406236Z node 3 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/Table/updates_feed1" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: true }, at schemeshard: 72057594046678944 2025-09-25T16:20:26.406269Z node 3 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/Table/updates_feed1" took 35us result status StatusSuccess 2025-09-25T16:20:26.406345Z node 3 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/Table/updates_feed1" PathDescription { Self { Name: "updates_feed1" PathId: 3 SchemeshardId: 72057594046678944 PathType: EPathTypeCdcStream CreateFinished: true CreateTxId: 281474976710759 CreateStep: 5000004 ParentPathId: 2 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 4 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 4 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 2 CdcStreamVersion: 1 } ChildrenExist: true } Children { Name: "streamImpl" PathId: 4 SchemeshardId: 72057594046678944 PathType: EPathTypePersQueueGroup CreateFinished: true CreateTxId: 281474976710759 CreateStep: 5000004 ParentPathId: 3 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" PathSubType: EPathSubTypeStreamImpl ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 3 PathsLimit: 10000 ShardsInside: 3 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 2 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } CdcStreamDescription { Name: "updates_feed1" Mode: ECdcStreamModeUpdate PathId { OwnerId: 72057594046678944 LocalId: 3 } State: ECdcStreamStateReady SchemaVersion: 1 Format: ECdcStreamFormatJson VirtualTimestamps: false AwsRegion: "" ResolvedTimestampsIntervalMs: 0 SchemaChanges: false } } PathId: 3 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-09-25T16:20:26.406381Z node 3 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/Table/updates_feed1/streamImpl" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: true }, at schemeshard: 72057594046678944 2025-09-25T16:20:26.406394Z node 3 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/Table/updates_feed1/streamImpl" took 15us result status StatusSuccess 2025-09-25T16:20:26.406467Z node 3 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/Table/updates_feed1/streamImpl" PathDescription { Self { Name: "streamImpl" PathId: 4 SchemeshardId: 72057594046678944 PathType: EPathTypePersQueueGroup CreateFinished: true CreateTxId: 281474976710759 CreateStep: 5000004 ParentPathId: 3 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeStreamImpl Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 PQVersion: 2 } ChildrenExist: false } PersQueueGroup { Name: "streamImpl" PathId: 4 TotalGroupCount: 2 PartitionPerTablet: 2 PQTabletConfig { PartitionConfig { MaxCountInPartition: 2147483647 LifetimeSeconds: 86400 WriteSpeedInBytesPerSecond: 1048576 BurstSize: 1048576 } TopicName: "updates_feed1" TopicPath: "/MyRoot/Table/updates_feed1/streamImpl" YdbDatabasePath: "/MyRoot" MeteringMode: METERING_MODE_REQUEST_UNITS PartitionStrategy { MinPartitionCount: 2 MaxPartitionCount: 3 ScaleThresholdSeconds: 30 PartitionStrategyType: CAN_SPLIT } Consumers { Name: "my_consumer" } } Partitions { PartitionId: 0 TabletId: 72075186233409547 KeyRange { ToBound: "\177\377\377\377\377\377\377\377\377\377\377\377\377\377\377\376" } Status: Active } Partitions { PartitionId: 1 TabletId: 72075186233409547 KeyRange { FromBound: "\177\377\377\377\377\377\377\377\377\377\377\377\377\377\377\376" } Status: Active } AlterVersion: 2 BalancerTabletID: 72075186233409548 NextPartitionId: 2 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 3 PathsLimit: 10000 ShardsInside: 3 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 2 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 4 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 |82.5%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_sysview_reboots/unittest >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-CreateUser-18 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-CreateUser-19 >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-DropUser-59 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-DropUser-60 >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-CreateUser-11 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-CreateUser-12 >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-DropUser-58 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-DropUser-59 >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-DropUser-49 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-DropUser-50 >> TMiniKQLEngineFlatTest::TestSelectRangeFullWithoutColumnsNotExists [GOOD] >> TMiniKQLEngineFlatTest::TestSelectRangeFullWithoutColumnsNotExistsNullKey >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-ModifyUser-48 [GOOD] >> SchemeReqAdminAccessInTenant::ClusterAdminCanAdministerTenant >> TMiniKQLEngineFlatTest::TestSelectRangeFullWithoutColumnsNotExistsNullKey [GOOD] >> TMiniKQLEngineFlatTest::TestSelectRangeFullExistsTruncatedByItems [GOOD] >> TMiniKQLEngineFlatTest::TestSelectRangeFullExistsTruncatedByItemsFromNull [GOOD] >> TMiniKQLEngineFlatTest::TestSelectRangeFullExistsTruncatedByBytes [GOOD] >> TMiniKQLEngineFlatTest::TestSelectRangeNullNull >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-CreateUser-13 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-CreateUser-14 >> TFlatTest::CopyTableAndRead >> TMiniKQLEngineFlatTest::TestSelectRangeNullNull [GOOD] >> TMiniKQLEngineFlatTest::TestSelectRangeToExclusive [GOOD] >> TMiniKQLEngineFlatTest::TestSelectRangeNoShards [GOOD] >> TMiniKQLEngineFlatTest::TestSelectRangeReverseWithPartitions [GOOD] >> TMiniKQLEngineFlatTest::TestSelectRangeReverseWithPartitionsTruncatedByItems1 >> TMiniKQLProgramBuilderTest::TestEraseRowStaticKey >> TMiniKQLEngineFlatTest::TestSelectRangeReverseWithPartitionsTruncatedByItems1 [GOOD] >> TMiniKQLEngineFlatTest::TestSelectRangeReverseWithPartitionsTruncatedByItems2 [GOOD] >> TMiniKQLEngineFlatTest::TestSelectRangeReverseWithPartitionsTruncatedByItems3 [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/public/sdk/cpp/tests/integration/sessions/gtest >> YdbSdkSessions::SessionsServerLimitWithSessionPool [SKIPPED] Test command err: ydb/public/sdk/cpp/tests/integration/sessions/main.cpp:588: Enable after accepting a pull request with merging configs >> TMiniKQLEngineFlatTest::TestSelectRangeNoColumns [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-DropUser-59 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-DropUser-60 >> TMiniKQLProgramBuilderTest::TestEraseRowStaticKey [GOOD] >> TMiniKQLProgramBuilderTest::TestEraseRowPartialDynamicKey [GOOD] >> TMiniKQLProgramBuilderTest::TestSelectRow [GOOD] >> TMiniKQLProgramBuilderTest::TestUpdateRowDynamicKey [GOOD] >> TMiniKQLProgramBuilderTest::TestSelectFromInclusiveRange [GOOD] >> TMiniKQLProgramBuilderTest::TestSelectFromExclusiveRange [GOOD] >> TMiniKQLProgramBuilderTest::TestSelectToInclusiveRange [GOOD] >> TMiniKQLProgramBuilderTest::TestSelectToExclusiveRange >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-ModifyUser-42 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-DropUser-61 >> TImportWithRebootsTests::CancelShouldSucceedOnSingleTableWithUniqueIndex [GOOD] >> TMiniKQLProgramBuilderTest::TestSelectToExclusiveRange [GOOD] >> TMiniKQLProgramBuilderTest::TestSelectBothFromInclusiveToInclusiveRange [GOOD] >> TMiniKQLProgramBuilderTest::TestSelectBothFromExclusiveToExclusiveRange [GOOD] >> TMiniKQLProgramBuilderTest::TestInvalidParameterName [GOOD] >> TMiniKQLProgramBuilderTest::TestInvalidParameterType [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-ModifyUser-38 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-ModifyUser-39 |82.5%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/public/sdk/cpp/tests/integration/sessions_pool/gtest >> YdbSdkSessionsPool::StressTestSync/1 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-DropUser-60 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-DropUser-61 >> TMiniKQLEngineFlatTest::TestUpdateRowNotExistWithoutColumns [GOOD] >> TMiniKQLEngineFlatTest::TestUpdateRowNotExistSetPayload >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-CreateUser-19 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-CreateUser-20 >> TMiniKQLProtoTestYdb::TestExportOptionalTypeYdb [GOOD] >> TMiniKQLProtoTestYdb::TestExportListTypeYdb [GOOD] >> TMiniKQLProtoTestYdb::TestExportIntegralYdb [GOOD] >> TMiniKQLProtoTestYdb::TestExportEmptyOptionalYdb [GOOD] >> TMiniKQLProtoTestYdb::TestExportMultipleOptionalNotEmptyYdb [GOOD] >> TMiniKQLProtoTestYdb::TestExportOptionalYdb >> TImportWithRebootsTests::CancelShouldSucceedOnDependentView [GOOD] >> TMiniKQLEngineFlatTest::TestUpdateRowNotExistSetPayload [GOOD] >> TMiniKQLEngineFlatTest::TestUpdateRowNotExistSetPayloadNullValue [GOOD] >> TMiniKQLEngineFlatTest::TestUpdateRowNotExistErasePayload >> TMiniKQLProtoTestYdb::TestExportOptionalYdb [GOOD] >> TMiniKQLProtoTestYdb::TestExportListYdb [GOOD] >> TMiniKQLProtoTestYdb::TestExportMultipleOptionalVariantNotNullYdb [GOOD] >> TMiniKQLProtoTestYdb::TestExportOptionalVariantOptionalNullYdb >> TMiniKQLEngineFlatTest::TestUpdateRowNotExistErasePayload [GOOD] >> TMiniKQLEngineFlatTest::TestUpdateRowExistChangePayload >> TMiniKQLProtoTestYdb::TestExportOptionalVariantOptionalNullYdb [GOOD] >> TMiniKQLProtoTestYdb::TestExportMultipleOptionalVariantOptionalNullYdb [GOOD] >> TMiniKQLProtoTestYdb::TestExportMultipleOptionalVariantOptionalNotNullYdb [GOOD] >> TMiniKQLProtoTestYdb::TestExportOptionalVariantOptionalYdbType [GOOD] >> TRestoreWithRebootsTests::ShouldSucceedOnSingleShardTable[Zstd] [GOOD] >> TFlatTest::CopyTableAndRead [GOOD] >> TFlatTest::CopyTableAndDropOriginal |82.5%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/engine/ut/unittest >> TMiniKQLEngineFlatTest::TestSelectRangeNoColumns [GOOD] >> TMiniKQLEngineFlatTest::TestUpdateRowExistChangePayload [GOOD] >> TMiniKQLEngineFlatTest::TestUpdateRowExistErasePayload [GOOD] >> TMiniKQLEngineFlatTest::TestUpdateRowManyShards [GOOD] >> TMiniKQLEngineFlatTest::TestUpdateRowNoShards [GOOD] >> TMiniKQLEngineFlatTest::TestTopSortPushdownPk [GOOD] >> TMiniKQLEngineFlatTest::TestTopSortPushdown [GOOD] >> TMiniKQLProgramBuilderTest::TestEraseRowDynamicKey [GOOD] >> TMiniKQLProgramBuilderTest::TestAcquireLocks [GOOD] >> TMiniKQLProgramBuilderTest::TestDiagnostics [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-DropUser-50 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-DropUser-51 >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-CreateUser-12 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-CreateUser-13 |82.5%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/engine/ut/unittest >> TMiniKQLProgramBuilderTest::TestInvalidParameterType [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-DropUser-60 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-DropUser-61 >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-DropUser-59 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-DropUser-60 >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-CreateUser-14 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-CreateUser-15 ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_restore/unittest >> TImportWithRebootsTests::CancelShouldSucceedOnSingleTableWithUniqueIndex [GOOD] Test command err: ==== RunWithTabletReboots =========== RUN: Trace =========== Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2144] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2144] Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:120:2058] recipient: [1:114:2145] IGNORE Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:120:2058] recipient: [1:114:2145] Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:121:2058] recipient: [1:116:2146] IGNORE Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:121:2058] recipient: [1:116:2146] Leader for TabletID 72057594046678944 is [1:128:2153] sender: [1:131:2058] recipient: [1:113:2144] Leader for TabletID 72057594046447617 is [1:134:2158] sender: [1:136:2058] recipient: [1:114:2145] Leader for TabletID 72057594046316545 is [1:139:2161] sender: [1:141:2058] recipient: [1:116:2146] 2025-09-25T16:19:56.026823Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7911: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-09-25T16:19:56.026850Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7939: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-09-25T16:19:56.026856Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7825: StatsBatching config: StatsBatchTimeout# 0.000000s, StatsMaxBatchSize# 0, StatsMaxExecuteTime# 0.010000s 2025-09-25T16:19:56.026862Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7841: OperationsProcessing config: using default configuration 2025-09-25T16:19:56.026869Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7847: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-09-25T16:19:56.026874Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7847: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-09-25T16:19:56.026884Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7971: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-09-25T16:19:56.026898Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-09-25T16:19:56.027035Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8042: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-09-25T16:19:56.027109Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2025-09-25T16:19:56.051741Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:8074: Got new config: QueryServiceConfig { AllExternalDataSourcesAreAvailable: true } 2025-09-25T16:19:56.051776Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded 2025-09-25T16:19:56.051868Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8042: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# Leader for TabletID 72057594046447617 is [1:134:2158] sender: [1:179:2058] recipient: [1:15:2062] 2025-09-25T16:19:56.055758Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2025-09-25T16:19:56.055864Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2025-09-25T16:19:56.055907Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-09-25T16:19:56.057499Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2025-09-25T16:19:56.057714Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2025-09-25T16:19:56.057816Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1382: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-09-25T16:19:56.057883Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-09-25T16:19:56.059267Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:158: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-09-25T16:19:56.059340Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2025-09-25T16:19:56.059621Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-09-25T16:19:56.059633Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-09-25T16:19:56.059652Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-09-25T16:19:56.059660Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-09-25T16:19:56.059667Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:205: TTxServerlessStorageBilling.Complete 2025-09-25T16:19:56.059702Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7086: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:221:2058] recipient: [1:219:2219] IGNORE Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:221:2058] recipient: [1:219:2219] Leader for TabletID 72057594037968897 is [1:225:2223] sender: [1:226:2058] recipient: [1:219:2219] 2025-09-25T16:19:56.061272Z node 1 :HIVE INFO: tablet_helpers.cpp:1126: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:128:2153] sender: [1:246:2058] recipient: [1:15:2062] 2025-09-25T16:19:56.083531Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-09-25T16:19:56.083598Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-09-25T16:19:56.083650Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:599: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-09-25T16:19:56.083660Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5528: CreateTx for txid 1:0 type: TxAlterSubDomain target path: [OwnerId: 72057594046678944, LocalPathId: 1] source path: 2025-09-25T16:19:56.083717Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-09-25T16:19:56.083758Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2025-09-25T16:19:56.084415Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-09-25T16:19:56.084462Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-09-25T16:19:56.084517Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-09-25T16:19:56.084527Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:315: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-09-25T16:19:56.084532Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:369: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-09-25T16:19:56.084536Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 1:0 2 -> 3 2025-09-25T16:19:56.085041Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-09-25T16:19:56.085055Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-09-25T16:19:56.085060Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 1:0 3 -> 128 2025-09-25T16:19:56.085496Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-09-25T16:19:56.085511Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-09-25T16:19:56.085516Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-09-25T16:19:56.085520Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1723: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-09-25T16:19:56.086068Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1792: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-09-25T16:19:56.086470Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:663: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-09-25T16:19:56.086519Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1824: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 Leader for TabletID 72057594046316545 is [1:139:2161] sender: [1:261:2058] recipient: [1:15:2062] FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-09-25T16:19:56.086727Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:685: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-09-25T16:19:56.086748Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:689: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 139 RawX2: 4294969457 } } Step: 5000001 MediatorID: 0 Tab ... __operation.cpp:1679: TOperation IsReadyToNotify, TxId: 1000, ready parts: 0/1, is published: true 2025-09-25T16:20:28.044773Z node 69 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:663: Send tablet strongly msg operationId: 1000:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1000 msg type: 269090816 2025-09-25T16:20:28.044800Z node 69 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1824: TOperation RegisterRelationByTabletId, TxId: 1000, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1000 at step: 5000002 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000001 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1000 at step: 5000002 2025-09-25T16:20:28.044938Z node 69 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:685: TTxOperationPlanStep Execute, stepId: 5000002, transactions count in step: 1, at schemeshard: 72057594046678944 2025-09-25T16:20:28.044959Z node 69 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:689: TTxOperationPlanStep Execute, message: Transactions { TxId: 1000 Coordinator: 72057594046316545 AckTo { RawX1: 138 RawX2: 296352745584 } } Step: 5000002 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-09-25T16:20:28.044966Z node 69 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_mkdir.cpp:33: MkDir::TPropose operationId# 1000:0 HandleReply TEvPrivate::TEvOperationPlan, step: 5000002, at schemeshard: 72057594046678944 2025-09-25T16:20:28.044992Z node 69 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 1000:0 128 -> 240 2025-09-25T16:20:28.045018Z node 69 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:599: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-09-25T16:20:28.045026Z node 69 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:599: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 1 2025-09-25T16:20:28.045576Z node 69 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1000 2025-09-25T16:20:28.045600Z node 69 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1000 FAKE_COORDINATOR: Erasing txId 1000 2025-09-25T16:20:28.045704Z node 69 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-09-25T16:20:28.045710Z node 69 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1000, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-09-25T16:20:28.045739Z node 69 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1000, path id: [OwnerId: 72057594046678944, LocalPathId: 2] 2025-09-25T16:20:28.045753Z node 69 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-09-25T16:20:28.045758Z node 69 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [69:213:2214], at schemeshard: 72057594046678944, txId: 1000, path id: 1 2025-09-25T16:20:28.045763Z node 69 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [69:213:2214], at schemeshard: 72057594046678944, txId: 1000, path id: 2 2025-09-25T16:20:28.045823Z node 69 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1000:0, at schemeshard: 72057594046678944 2025-09-25T16:20:28.045830Z node 69 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:497: [72057594046678944] TDone opId# 1000:0 ProgressState 2025-09-25T16:20:28.045842Z node 69 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:926: Part operation is done id#1000:0 progress is 1/1 2025-09-25T16:20:28.045846Z node 69 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 1000 ready parts: 1/1 2025-09-25T16:20:28.045851Z node 69 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:926: Part operation is done id#1000:0 progress is 1/1 2025-09-25T16:20:28.045855Z node 69 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 1000 ready parts: 1/1 2025-09-25T16:20:28.045859Z node 69 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 1000, ready parts: 1/1, is published: false 2025-09-25T16:20:28.045864Z node 69 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 1000 ready parts: 1/1 2025-09-25T16:20:28.045869Z node 69 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:993: Operation and all the parts is done, operation id: 1000:0 2025-09-25T16:20:28.045873Z node 69 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5552: RemoveTx for txid 1000:0 2025-09-25T16:20:28.045884Z node 69 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:610: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 2 2025-09-25T16:20:28.045890Z node 69 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:1002: Publication still in progress, tx: 1000, publications: 2, subscribers: 0 2025-09-25T16:20:28.045894Z node 69 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1009: Publication details: tx: 1000, [OwnerId: 72057594046678944, LocalPathId: 1], 5 2025-09-25T16:20:28.045898Z node 69 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1009: Publication details: tx: 1000, [OwnerId: 72057594046678944, LocalPathId: 2], 3 2025-09-25T16:20:28.045996Z node 69 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6249: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 5 PathOwnerId: 72057594046678944, cookie: 1000 2025-09-25T16:20:28.046007Z node 69 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 5 PathOwnerId: 72057594046678944, cookie: 1000 2025-09-25T16:20:28.046012Z node 69 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 1000 2025-09-25T16:20:28.046019Z node 69 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 1000, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 5 2025-09-25T16:20:28.046023Z node 69 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:610: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-09-25T16:20:28.046296Z node 69 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6249: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 3 PathOwnerId: 72057594046678944, cookie: 1000 2025-09-25T16:20:28.046312Z node 69 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 3 PathOwnerId: 72057594046678944, cookie: 1000 2025-09-25T16:20:28.046317Z node 69 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1000 2025-09-25T16:20:28.046322Z node 69 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 1000, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 3 2025-09-25T16:20:28.046327Z node 69 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:610: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 1 2025-09-25T16:20:28.046340Z node 69 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__publish_to_scheme_board.cpp:155: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 1000, subscribers: 0 2025-09-25T16:20:28.046767Z node 69 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1000 2025-09-25T16:20:28.047002Z node 69 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1000 TestModificationResult got TxId: 1000, wait until txId: 1000 2025-09-25T16:20:28.047159Z node 69 :IMPORT DEBUG: schemeshard_import__create.cpp:130: TImport::TTxCreate: DoExecute 2025-09-25T16:20:28.047183Z node 69 :IMPORT TRACE: schemeshard_import__create.cpp:131: Message: TxId: 1002 DatabaseName: "/MyRoot" Request { ImportFromS3Settings { endpoint: "localhost:4621" scheme: HTTP items { source_prefix: "" destination_path: "/MyRoot/Table" } } } 2025-09-25T16:20:28.047247Z node 69 :IMPORT DEBUG: schemeshard_import__create.cpp:254: TImport::TTxCreate: Reply: status# SUCCESS, error# 2025-09-25T16:20:28.047269Z node 69 :IMPORT TRACE: schemeshard_import__create.cpp:255: Message: TxId: 1002 Response { Entry { Id: 1002 Status: SUCCESS Progress: PROGRESS_PREPARING ImportFromS3Settings { endpoint: "localhost:4621" scheme: HTTP items { source_prefix: "" destination_path: "/MyRoot/Table" } } StartTime { } } } 2025-09-25T16:20:28.047952Z node 69 :IMPORT DEBUG: schemeshard_import__create.cpp:238: TImport::TTxCreate: DoComplete 2025-09-25T16:20:28.047988Z node 69 :IMPORT DEBUG: schemeshard_import__create.cpp:372: TImport::TTxProgress: DoExecute 2025-09-25T16:20:28.047994Z node 69 :IMPORT DEBUG: schemeshard_import__create.cpp:880: TImport::TTxProgress: Resume: id# 1002, itemIdx# (empty maybe) 2025-09-25T16:20:28.048011Z node 69 :IMPORT DEBUG: schemeshard_import__create.cpp:906: TImport::TTxProgress: Resume: info# { Id: 1002 Uid: '' Kind: S3 DomainPathId: [OwnerId: 72057594046678944, LocalPathId: 1] UserSID: '(empty maybe)' State: Cancelled Issue: 'Cancelled manually' Items: 1 }, item# { Idx: 0 DstPathName: '/MyRoot/Table' DstPathId: State: GetScheme SubState: AllocateTxId WaitTxId: 0 SrcPath: SrcPrefix: Issue: '' } 2025-09-25T16:20:28.048096Z node 69 :IMPORT DEBUG: schemeshard_import__create.cpp:396: TImport::TTxProgress: DoComplete TestWaitNotification wait txId: 1002 2025-09-25T16:20:28.048139Z node 69 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:213: tests -- TTxNotificationSubscriber for txId 1002: send EvNotifyTxCompletion 2025-09-25T16:20:28.048147Z node 69 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:259: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 1002 2025-09-25T16:20:28.048199Z node 69 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__notify.cpp:83: NotifyTxCompletion import in-flight, txId: 1002, at schemeshard: 72057594046678944 2025-09-25T16:20:28.048206Z node 69 :FLAT_TX_SCHEMESHARD INFO: schemeshard__notify.cpp:91: NotifyTxCompletion, import is ready to notify, txId: 1002, at schemeshard: 72057594046678944 2025-09-25T16:20:28.048221Z node 69 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 1002: got EvNotifyTxCompletionResult 2025-09-25T16:20:28.048225Z node 69 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 1002: satisfy waiter [69:320:2310] TestWaitNotification: OK eventTxId 1002 |82.5%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/engine/ut/unittest >> TMiniKQLProtoTestYdb::TestExportOptionalVariantOptionalYdbType [GOOD] |82.5%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/engine/ut/unittest >> TMiniKQLProgramBuilderTest::TestDiagnostics [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-DropUser-61 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-DropUser-62 >> TFlatTest::CopyTableAndDropOriginal [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_restore/unittest >> TImportWithRebootsTests::CancelShouldSucceedOnDependentView [GOOD] Test command err: ==== RunWithTabletReboots =========== RUN: Trace =========== Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2144] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2144] Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:120:2058] recipient: [1:114:2145] IGNORE Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:120:2058] recipient: [1:114:2145] Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:121:2058] recipient: [1:116:2146] IGNORE Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:121:2058] recipient: [1:116:2146] Leader for TabletID 72057594046678944 is [1:128:2153] sender: [1:131:2058] recipient: [1:113:2144] Leader for TabletID 72057594046447617 is [1:134:2158] sender: [1:136:2058] recipient: [1:114:2145] Leader for TabletID 72057594046316545 is [1:139:2161] sender: [1:141:2058] recipient: [1:116:2146] 2025-09-25T16:19:53.704518Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7911: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-09-25T16:19:53.704539Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7939: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-09-25T16:19:53.704545Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7825: StatsBatching config: StatsBatchTimeout# 0.000000s, StatsMaxBatchSize# 0, StatsMaxExecuteTime# 0.010000s 2025-09-25T16:19:53.704550Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7841: OperationsProcessing config: using default configuration 2025-09-25T16:19:53.704556Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7847: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-09-25T16:19:53.704561Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7847: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-09-25T16:19:53.704570Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7971: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-09-25T16:19:53.704582Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-09-25T16:19:53.704713Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8042: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-09-25T16:19:53.704773Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2025-09-25T16:19:53.722503Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:8074: Got new config: QueryServiceConfig { AllExternalDataSourcesAreAvailable: true } 2025-09-25T16:19:53.722530Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded 2025-09-25T16:19:53.722625Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8042: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# Leader for TabletID 72057594046447617 is [1:134:2158] sender: [1:179:2058] recipient: [1:15:2062] 2025-09-25T16:19:53.726663Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2025-09-25T16:19:53.726748Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2025-09-25T16:19:53.726790Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-09-25T16:19:53.728119Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2025-09-25T16:19:53.728187Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2025-09-25T16:19:53.728286Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1382: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-09-25T16:19:53.728507Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-09-25T16:19:53.729506Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:158: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-09-25T16:19:53.729552Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2025-09-25T16:19:53.729789Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-09-25T16:19:53.729799Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-09-25T16:19:53.729816Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-09-25T16:19:53.729823Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-09-25T16:19:53.729829Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:205: TTxServerlessStorageBilling.Complete 2025-09-25T16:19:53.729860Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7086: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:221:2058] recipient: [1:219:2219] IGNORE Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:221:2058] recipient: [1:219:2219] Leader for TabletID 72057594037968897 is [1:225:2223] sender: [1:226:2058] recipient: [1:219:2219] 2025-09-25T16:19:53.731260Z node 1 :HIVE INFO: tablet_helpers.cpp:1126: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:128:2153] sender: [1:246:2058] recipient: [1:15:2062] 2025-09-25T16:19:53.754365Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-09-25T16:19:53.754425Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-09-25T16:19:53.754478Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:599: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-09-25T16:19:53.754485Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5528: CreateTx for txid 1:0 type: TxAlterSubDomain target path: [OwnerId: 72057594046678944, LocalPathId: 1] source path: 2025-09-25T16:19:53.754532Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-09-25T16:19:53.754568Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2025-09-25T16:19:53.755161Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-09-25T16:19:53.755201Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-09-25T16:19:53.755259Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-09-25T16:19:53.755269Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:315: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-09-25T16:19:53.755278Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:369: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-09-25T16:19:53.755284Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 1:0 2 -> 3 2025-09-25T16:19:53.755732Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-09-25T16:19:53.755743Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-09-25T16:19:53.755749Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 1:0 3 -> 128 2025-09-25T16:19:53.756120Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-09-25T16:19:53.756130Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-09-25T16:19:53.756136Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-09-25T16:19:53.756142Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1723: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-09-25T16:19:53.756883Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1792: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-09-25T16:19:53.757305Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:663: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-09-25T16:19:53.757360Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1824: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 Leader for TabletID 72057594046316545 is [1:139:2161] sender: [1:261:2058] recipient: [1:15:2062] FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-09-25T16:19:53.757568Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:685: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-09-25T16:19:53.757593Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:689: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 139 RawX2: 4294969457 } } Step: 5000001 MediatorID: 0 Tab ... ep: 5000001 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1000 at step: 5000002 2025-09-25T16:20:28.515647Z node 69 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:685: TTxOperationPlanStep Execute, stepId: 5000002, transactions count in step: 1, at schemeshard: 72057594046678944 2025-09-25T16:20:28.515685Z node 69 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:689: TTxOperationPlanStep Execute, message: Transactions { TxId: 1000 Coordinator: 72057594046316545 AckTo { RawX1: 138 RawX2: 296352745584 } } Step: 5000002 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-09-25T16:20:28.515695Z node 69 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_mkdir.cpp:33: MkDir::TPropose operationId# 1000:0 HandleReply TEvPrivate::TEvOperationPlan, step: 5000002, at schemeshard: 72057594046678944 2025-09-25T16:20:28.515729Z node 69 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 1000:0 128 -> 240 2025-09-25T16:20:28.515764Z node 69 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:599: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-09-25T16:20:28.515774Z node 69 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:599: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 1 2025-09-25T16:20:28.515986Z node 69 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1000 2025-09-25T16:20:28.516037Z node 69 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1000 FAKE_COORDINATOR: Erasing txId 1000 2025-09-25T16:20:28.516370Z node 69 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-09-25T16:20:28.516377Z node 69 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1000, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-09-25T16:20:28.516417Z node 69 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1000, path id: [OwnerId: 72057594046678944, LocalPathId: 2] 2025-09-25T16:20:28.516433Z node 69 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-09-25T16:20:28.516438Z node 69 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [69:213:2214], at schemeshard: 72057594046678944, txId: 1000, path id: 1 2025-09-25T16:20:28.516444Z node 69 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [69:213:2214], at schemeshard: 72057594046678944, txId: 1000, path id: 2 2025-09-25T16:20:28.516454Z node 69 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1000:0, at schemeshard: 72057594046678944 2025-09-25T16:20:28.516462Z node 69 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:497: [72057594046678944] TDone opId# 1000:0 ProgressState 2025-09-25T16:20:28.516484Z node 69 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:926: Part operation is done id#1000:0 progress is 1/1 2025-09-25T16:20:28.516489Z node 69 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 1000 ready parts: 1/1 2025-09-25T16:20:28.516495Z node 69 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:926: Part operation is done id#1000:0 progress is 1/1 2025-09-25T16:20:28.516499Z node 69 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 1000 ready parts: 1/1 2025-09-25T16:20:28.516504Z node 69 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 1000, ready parts: 1/1, is published: false 2025-09-25T16:20:28.516511Z node 69 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 1000 ready parts: 1/1 2025-09-25T16:20:28.516516Z node 69 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:993: Operation and all the parts is done, operation id: 1000:0 2025-09-25T16:20:28.516522Z node 69 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5552: RemoveTx for txid 1000:0 2025-09-25T16:20:28.516534Z node 69 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:610: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 2 2025-09-25T16:20:28.516541Z node 69 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:1002: Publication still in progress, tx: 1000, publications: 2, subscribers: 0 2025-09-25T16:20:28.516547Z node 69 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1009: Publication details: tx: 1000, [OwnerId: 72057594046678944, LocalPathId: 1], 5 2025-09-25T16:20:28.516551Z node 69 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1009: Publication details: tx: 1000, [OwnerId: 72057594046678944, LocalPathId: 2], 3 2025-09-25T16:20:28.516787Z node 69 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6249: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 5 PathOwnerId: 72057594046678944, cookie: 1000 2025-09-25T16:20:28.516801Z node 69 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 5 PathOwnerId: 72057594046678944, cookie: 1000 2025-09-25T16:20:28.516807Z node 69 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 1000 2025-09-25T16:20:28.516813Z node 69 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 1000, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 5 2025-09-25T16:20:28.516818Z node 69 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:610: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-09-25T16:20:28.517087Z node 69 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6249: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 3 PathOwnerId: 72057594046678944, cookie: 1000 2025-09-25T16:20:28.517101Z node 69 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 3 PathOwnerId: 72057594046678944, cookie: 1000 2025-09-25T16:20:28.517106Z node 69 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1000 2025-09-25T16:20:28.517111Z node 69 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 1000, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 3 2025-09-25T16:20:28.517120Z node 69 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:610: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 1 2025-09-25T16:20:28.517132Z node 69 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__publish_to_scheme_board.cpp:155: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 1000, subscribers: 0 2025-09-25T16:20:28.517649Z node 69 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1000 2025-09-25T16:20:28.517910Z node 69 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1000 TestModificationResult got TxId: 1000, wait until txId: 1000 2025-09-25T16:20:28.518105Z node 69 :IMPORT DEBUG: schemeshard_import__create.cpp:130: TImport::TTxCreate: DoExecute 2025-09-25T16:20:28.518131Z node 69 :IMPORT TRACE: schemeshard_import__create.cpp:131: Message: TxId: 1002 DatabaseName: "/MyRoot" Request { ImportFromS3Settings { endpoint: "localhost:16750" scheme: HTTP items { source_prefix: "DependentView" destination_path: "/MyRoot/DependentView" } items { source_prefix: "BaseView" destination_path: "/MyRoot/BaseView" } } } 2025-09-25T16:20:28.518208Z node 69 :IMPORT DEBUG: schemeshard_import__create.cpp:254: TImport::TTxCreate: Reply: status# SUCCESS, error# 2025-09-25T16:20:28.518249Z node 69 :IMPORT TRACE: schemeshard_import__create.cpp:255: Message: TxId: 1002 Response { Entry { Id: 1002 Status: SUCCESS Progress: PROGRESS_PREPARING ImportFromS3Settings { endpoint: "localhost:16750" scheme: HTTP items { source_prefix: "DependentView" destination_path: "/MyRoot/DependentView" } items { source_prefix: "BaseView" destination_path: "/MyRoot/BaseView" } } StartTime { } } } 2025-09-25T16:20:28.519041Z node 69 :IMPORT DEBUG: schemeshard_import__create.cpp:238: TImport::TTxCreate: DoComplete 2025-09-25T16:20:28.519096Z node 69 :IMPORT DEBUG: schemeshard_import__create.cpp:372: TImport::TTxProgress: DoExecute 2025-09-25T16:20:28.519103Z node 69 :IMPORT DEBUG: schemeshard_import__create.cpp:880: TImport::TTxProgress: Resume: id# 1002, itemIdx# (empty maybe) 2025-09-25T16:20:28.519120Z node 69 :IMPORT DEBUG: schemeshard_import__create.cpp:906: TImport::TTxProgress: Resume: info# { Id: 1002 Uid: '' Kind: S3 DomainPathId: [OwnerId: 72057594046678944, LocalPathId: 1] UserSID: '(empty maybe)' State: Cancelled Issue: 'Cancelled manually' Items: 2 }, item# { Idx: 0 DstPathName: '/MyRoot/DependentView' DstPathId: State: GetScheme SubState: AllocateTxId WaitTxId: 0 SrcPath: SrcPrefix: DependentView Issue: '' } 2025-09-25T16:20:28.519127Z node 69 :IMPORT DEBUG: schemeshard_import__create.cpp:906: TImport::TTxProgress: Resume: info# { Id: 1002 Uid: '' Kind: S3 DomainPathId: [OwnerId: 72057594046678944, LocalPathId: 1] UserSID: '(empty maybe)' State: Cancelled Issue: 'Cancelled manually' Items: 2 }, item# { Idx: 1 DstPathName: '/MyRoot/BaseView' DstPathId: State: GetScheme SubState: AllocateTxId WaitTxId: 0 SrcPath: SrcPrefix: BaseView Issue: '' } 2025-09-25T16:20:28.519157Z node 69 :IMPORT DEBUG: schemeshard_import__create.cpp:396: TImport::TTxProgress: DoComplete TestWaitNotification wait txId: 1002 2025-09-25T16:20:28.519208Z node 69 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:213: tests -- TTxNotificationSubscriber for txId 1002: send EvNotifyTxCompletion 2025-09-25T16:20:28.519215Z node 69 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:259: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 1002 2025-09-25T16:20:28.519288Z node 69 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__notify.cpp:83: NotifyTxCompletion import in-flight, txId: 1002, at schemeshard: 72057594046678944 2025-09-25T16:20:28.519294Z node 69 :FLAT_TX_SCHEMESHARD INFO: schemeshard__notify.cpp:91: NotifyTxCompletion, import is ready to notify, txId: 1002, at schemeshard: 72057594046678944 2025-09-25T16:20:28.519312Z node 69 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 1002: got EvNotifyTxCompletionResult 2025-09-25T16:20:28.519317Z node 69 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 1002: satisfy waiter [69:341:2331] TestWaitNotification: OK eventTxId 1002 |82.5%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/blobstorage/ut_blobstorage/ut_read_only_vdisk/ut_blobstorage-ut_read_only_vdisk |82.5%| [LD] {RESULT} $(B)/ydb/core/blobstorage/ut_blobstorage/ut_read_only_vdisk/ut_blobstorage-ut_read_only_vdisk >> TTransferTests::Create |82.5%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/blobstorage/ut_blobstorage/ut_read_only_vdisk/ut_blobstorage-ut_read_only_vdisk >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-ModifyUser-39 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-ModifyUser-40 >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-CreateUser-20 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-CreateUser-21 >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-DropUser-61 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-DropUser-62 >> SchemeReqAdminAccessInTenant::ClusterAdminCanAdministerTenant [GOOD] >> SchemeReqAdminAccessInTenant::ClusterAdminCanAdministerTenant-StrictAclCheck >> YdbSdkSessionsPool::WaitQueue/0 >> TTransferTests::Create_Disabled >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-CreateUser-13 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-CreateUser-14 >> TTransferTests::Create [GOOD] >> YdbSdkSessions::MultiThreadMultipleRequestsOnSharedSessionsTableClient >> TTransferTests::CreateSequential >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-DropUser-51 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-DropUser-52 ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/client/ut/unittest >> TFlatTest::CopyTableAndDropOriginal [GOOD] Test command err: 2025-09-25T16:20:28.051021Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7554062476100829340:2259];send_to=[0:7307199536658146131:7762515]; 2025-09-25T16:20:28.051071Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/endf/005b28/r3tmp/tmpoa960U/pdisk_1.dat 2025-09-25T16:20:28.108394Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-09-25T16:20:28.108439Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-09-25T16:20:28.109016Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2025-09-25T16:20:28.112982Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-09-25T16:20:28.130272Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded 2025-09-25T16:20:28.140165Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1229: Notification cookie mismatch for subscription [1:7554062476100829088:2081] 1758817228047809 != 1758817228047812 TClient is connected to server localhost:20834 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-09-25T16:20:28.204237Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-09-25T16:20:28.213398Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 waiting... 2025-09-25T16:20:28.224390Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 281474976715658, at schemeshard: 72057594046644480 2025-09-25T16:20:28.226812Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) waiting... 2025-09-25T16:20:28.310179Z node 1 :OPS_COMPACT INFO: Compact{72075186224037888.1.11, eph 1} end=Done, 4 blobs 3r (max 3), put Spent{time=0.000s,wait=0.000s,interrupts=1} Part{ 2 pk, lobs 2 +0, (1265 647 2154)b }, ecr=1.000 2025-09-25T16:20:28.319329Z node 1 :OPS_COMPACT INFO: Compact{72075186224037889.1.11, eph 1} end=Done, 4 blobs 3r (max 3), put Spent{time=0.000s,wait=0.000s,interrupts=1} Part{ 2 pk, lobs 2 +0, (1139 521 2626)b }, ecr=1.000 2025-09-25T16:20:28.336424Z node 1 :OPS_COMPACT INFO: Compact{72075186224037888.1.16, eph 2} end=Done, 4 blobs 6r (max 6), put Spent{time=0.000s,wait=0.000s,interrupts=1} Part{ 2 pk, lobs 5 +0, (1573 647 6413)b }, ecr=1.000 2025-09-25T16:20:28.350797Z node 1 :OPS_COMPACT INFO: Compact{72075186224037889.1.16, eph 2} end=Done, 4 blobs 6r (max 6), put Spent{time=0.000s,wait=0.000s,interrupts=1} Part{ 2 pk, lobs 4 +0, (2326 1432 5183)b }, ecr=1.000 Copy TableOld to Table 2025-09-25T16:20:28.409412Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions 2025-09-25T16:20:28.427873Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/dc-1/Dir" OperationType: ESchemeOpCreateTable CreateTable { Name: "Table" CopyFromTable: "/dc-1/Dir/TableOld" } } TxId: 281474976715676 TabletId: 72057594046644480 PeerName: "" , at schemeshard: 72057594046644480 2025-09-25T16:20:28.427989Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_copy_table.cpp:343: TCopyTable Propose, path: /dc-1/Dir/Table, opId: 281474976715676:0, at schemeshard: 72057594046644480 2025-09-25T16:20:28.428124Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:441: AttachChild: child attached as only one child to the parent, parent id: [OwnerId: 72057594046644480, LocalPathId: 2], parent name: Dir, child name: Table, child id: [OwnerId: 72057594046644480, LocalPathId: 4], at schemeshard: 72057594046644480 2025-09-25T16:20:28.428137Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:599: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046644480, LocalPathId: 4] was 0 2025-09-25T16:20:28.428139Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:599: IncrementPathDbRefCount reason transaction source path for pathId [OwnerId: 72057594046644480, LocalPathId: 3] was 3 2025-09-25T16:20:28.428145Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5528: CreateTx for txid 281474976715676:0 type: TxCopyTable target path: [OwnerId: 72057594046644480, LocalPathId: 4] source path: [OwnerId: 72057594046644480, LocalPathId: 3] 2025-09-25T16:20:28.428153Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:599: IncrementPathDbRefCount reason new shard created for pathId [OwnerId: 72057594046644480, LocalPathId: 4] was 1 2025-09-25T16:20:28.428156Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:599: IncrementPathDbRefCount reason new shard created for pathId [OwnerId: 72057594046644480, LocalPathId: 4] was 2 2025-09-25T16:20:28.428203Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:599: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046644480, LocalPathId: 4] was 3 2025-09-25T16:20:28.428230Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 281474976715676:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2025-09-25T16:20:28.428518Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:599: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046644480, LocalPathId: 2] was 2 2025-09-25T16:20:28.428526Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:599: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046644480, LocalPathId: 4] was 4 2025-09-25T16:20:28.428790Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 281474976715676, response: Status: StatusAccepted TxId: 281474976715676 SchemeshardId: 72057594046644480 PathId: 4, at schemeshard: 72057594046644480 2025-09-25T16:20:28.428863Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 281474976715676, database: /dc-1, subject: , status: StatusAccepted, operation: CREATE TABLE, path: /dc-1/Dir/Table 2025-09-25T16:20:28.428919Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2025-09-25T16:20:28.428923Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046644480, txId: 281474976715676, path id: [OwnerId: 72057594046644480, LocalPathId: 2] 2025-09-25T16:20:28.428969Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046644480, txId: 281474976715676, path id: [OwnerId: 72057594046644480, LocalPathId: 4] 2025-09-25T16:20:28.428984Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2025-09-25T16:20:28.428987Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [1:7554062476100829582:2233], at schemeshard: 72057594046644480, txId: 281474976715676, path id: 2 2025-09-25T16:20:28.428990Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [1:7554062476100829582:2233], at schemeshard: 72057594046644480, txId: 281474976715676, path id: 4 2025-09-25T16:20:28.428998Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 281474976715676:0, at schemeshard: 72057594046644480 2025-09-25T16:20:28.429007Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:315: TCreateParts opId# 281474976715676:0 ProgressState, operation type: TxCopyTable, at tablet# 72057594046644480 2025-09-25T16:20:28.429103Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:359: TCreateParts opId# 281474976715676:0 CreateRequest Event to Hive: 72057594037968897 msg: Owner: 72057594046644480 OwnerIdx: 3 TabletType: DataShard FollowerCount: 0 ObjectDomain { SchemeShard: 72057594046644480 PathId: 1 } ObjectId: 4 BindedChannels { StoragePoolName: "/dc-1:test" } BindedChannels { StoragePoolName: "/dc-1:test" } BindedChannels { StoragePoolName: "/dc-1:test" } AllowedDomains { SchemeShard: 72057594046644480 PathId: 1 } 2025-09-25T16:20:28.429124Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:359: TCreateParts opId# 281474976715676:0 CreateRequest Event to Hive: 72057594037968897 msg: Owner: 72057594046644480 OwnerIdx: 4 TabletType: DataShard FollowerCount: 0 ObjectDomain { SchemeShard: 72057594046644480 PathId: 1 } ObjectId: 4 BindedChannels { StoragePoolName: "/dc-1:test" } BindedChannels { StoragePoolName: "/dc-1:test" } BindedChannels { StoragePoolName: "/dc-1:test" } AllowedDomains { SchemeShard: 72057594046644480 PathId: 1 } 2025-09-25T16:20:28.429534Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:663: Send tablet strongly msg operationId: 281474976715676:0 from tablet: 72057594046644480 to tablet: 72057594037968897 cookie: 72057594046644480:3 msg type: 268697601 2025-09-25T16:20:28.429556Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:663: Send tablet strongly msg operationId: 281474976715676:0 from tablet: 72057594046644480 to tablet: 72057594037968897 cookie: 72057594046644480:4 msg type: 268697601 2025-09-25T16:20:28.429568Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1824: TOperation RegisterRelationByTable ... 29.315569Z node 2 :TX_DATASHARD INFO: datashard_impl.h:3325: 72075186224037890 Reporting state Offline to schemeshard 72057594046644480 2025-09-25T16:20:29.315574Z node 2 :TX_DATASHARD INFO: datashard_impl.h:3325: 72075186224037890 Reporting state Offline to schemeshard 72057594046644480 2025-09-25T16:20:29.315583Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:2975: Handle TEvStateChangedResult datashard 72075186224037889 state Offline 2025-09-25T16:20:29.315608Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5938: Handle TEvStateChanged, at schemeshard: 72057594046644480, message: Source { RawX1: 7554062478889774166 RawX2: 4503608217307431 } TabletId: 72075186224037891 State: 4 2025-09-25T16:20:29.315612Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__state_changed_reply.cpp:78: TTxShardStateChanged DoExecute, datashard informs about state changing, datashardId: 72075186224037891, state: Offline, at schemeshard: 72057594046644480 2025-09-25T16:20:29.315668Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_shard_deleter.cpp:20: SendDeleteRequests, shardsToDelete 1, to hive 72057594037968897, at schemeshard 72057594046644480 2025-09-25T16:20:29.315670Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:2975: Handle TEvStateChangedResult datashard 72075186224037891 state Offline 2025-09-25T16:20:29.315671Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_shard_deleter.cpp:47: Free shard 72057594046644480:4 hive 72057594037968897 at ss 72057594046644480 2025-09-25T16:20:29.315720Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5938: Handle TEvStateChanged, at schemeshard: 72057594046644480, message: Source { RawX1: 7554062478889774164 RawX2: 4503608217307430 } TabletId: 72075186224037890 State: 4 2025-09-25T16:20:29.315723Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__state_changed_reply.cpp:78: TTxShardStateChanged DoExecute, datashard informs about state changing, datashardId: 72075186224037890, state: Offline, at schemeshard: 72057594046644480 2025-09-25T16:20:29.315739Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5938: Handle TEvStateChanged, at schemeshard: 72057594046644480, message: Source { RawX1: 7554062478889774164 RawX2: 4503608217307430 } TabletId: 72075186224037890 State: 4 2025-09-25T16:20:29.315742Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__state_changed_reply.cpp:78: TTxShardStateChanged DoExecute, datashard informs about state changing, datashardId: 72075186224037890, state: Offline, at schemeshard: 72057594046644480 2025-09-25T16:20:29.315779Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_shard_deleter.cpp:20: SendDeleteRequests, shardsToDelete 1, to hive 72057594037968897, at schemeshard 72057594046644480 2025-09-25T16:20:29.315781Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_shard_deleter.cpp:47: Free shard 72057594046644480:3 hive 72057594037968897 at ss 72057594046644480 2025-09-25T16:20:29.315819Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_shard_deleter.cpp:20: SendDeleteRequests, shardsToDelete 1, to hive 72057594037968897, at schemeshard 72057594046644480 2025-09-25T16:20:29.315821Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_shard_deleter.cpp:47: Free shard 72057594046644480:3 hive 72057594037968897 at ss 72057594046644480 Check that tablet 72075186224037889 was deleted 2025-09-25T16:20:29.315933Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:2975: Handle TEvStateChangedResult datashard 72075186224037890 state Offline 2025-09-25T16:20:29.315942Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:2975: Handle TEvStateChangedResult datashard 72075186224037890 state Offline 2025-09-25T16:20:29.315971Z node 2 :HIVE WARN: hive_impl.cpp:2003: HIVE#72057594037968897 Can't find the tablet from RequestHiveInfo(TabletID=72075186224037889) Check that tablet 72075186224037890 was deleted Check that tablet 72075186224037891 was deleted 2025-09-25T16:20:29.316092Z node 2 :HIVE WARN: hive_impl.cpp:2003: HIVE#72057594037968897 Can't find the tablet from RequestHiveInfo(TabletID=72075186224037890) 2025-09-25T16:20:29.316186Z node 2 :HIVE WARN: hive_impl.cpp:2003: HIVE#72057594037968897 Can't find the tablet from RequestHiveInfo(TabletID=72075186224037891) 2025-09-25T16:20:29.317544Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:6353: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 0 ShardOwnerId: 72057594046644480 ShardLocalIdx: 2, at schemeshard: 72057594046644480 2025-09-25T16:20:29.317619Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:610: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046644480, LocalPathId: 3] was 2 2025-09-25T16:20:29.317675Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:6353: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 0 ShardOwnerId: 72057594046644480 ShardLocalIdx: 1, at schemeshard: 72057594046644480 2025-09-25T16:20:29.317699Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:610: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046644480, LocalPathId: 3] was 1 2025-09-25T16:20:29.317723Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:6353: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 0 ShardOwnerId: 72057594046644480 ShardLocalIdx: 4, at schemeshard: 72057594046644480 2025-09-25T16:20:29.317742Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:610: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046644480, LocalPathId: 4] was 2 2025-09-25T16:20:29.317761Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:6353: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 0 ShardOwnerId: 72057594046644480 ShardLocalIdx: 3, at schemeshard: 72057594046644480 2025-09-25T16:20:29.317780Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:610: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046644480, LocalPathId: 4] was 1 2025-09-25T16:20:29.317799Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:6353: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 0 ShardOwnerId: 72057594046644480 ShardLocalIdx: 3, at schemeshard: 72057594046644480 2025-09-25T16:20:29.317817Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:123: TTxCleanDroppedPaths Execute, 2 paths in candidate queue, at schemeshard: 72057594046644480 2025-09-25T16:20:29.317819Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:137: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046644480, LocalPathId: 4], at schemeshard: 72057594046644480 2025-09-25T16:20:29.317828Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:610: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046644480, LocalPathId: 2] was 2 2025-09-25T16:20:29.317833Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:137: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046644480, LocalPathId: 3], at schemeshard: 72057594046644480 2025-09-25T16:20:29.317837Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:610: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046644480, LocalPathId: 2] was 1 2025-09-25T16:20:29.318120Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:176: Deleted shardIdx 72057594046644480:2 2025-09-25T16:20:29.318127Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:182: Close pipe to deleted shardIdx 72057594046644480:2 tabletId 72075186224037889 2025-09-25T16:20:29.318141Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:176: Deleted shardIdx 72057594046644480:1 2025-09-25T16:20:29.318144Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:182: Close pipe to deleted shardIdx 72057594046644480:1 tabletId 72075186224037888 2025-09-25T16:20:29.318148Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:176: Deleted shardIdx 72057594046644480:4 2025-09-25T16:20:29.318149Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:182: Close pipe to deleted shardIdx 72057594046644480:4 tabletId 72075186224037891 2025-09-25T16:20:29.318153Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:176: Deleted shardIdx 72057594046644480:3 2025-09-25T16:20:29.318156Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:182: Close pipe to deleted shardIdx 72057594046644480:3 tabletId 72075186224037890 2025-09-25T16:20:29.318160Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:176: Deleted shardIdx 72057594046644480:3 2025-09-25T16:20:29.318170Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__clean_pathes.cpp:160: TTxCleanDroppedPaths Complete, done PersistRemovePath for 2 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046644480 2025-09-25T16:20:29.318594Z node 2 :TX_DATASHARD INFO: datashard.cpp:197: OnTabletStop: 72075186224037890 reason = ReasonStop 2025-09-25T16:20:29.318616Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:3741: Server disconnected at leader tablet# 72075186224037890, clientId# [2:7554062478889774344:2647], serverId# [2:7554062478889774346:2649], sessionId# [0:0:0] 2025-09-25T16:20:29.318621Z node 2 :TX_DATASHARD INFO: datashard.cpp:197: OnTabletStop: 72075186224037889 reason = ReasonStop 2025-09-25T16:20:29.318625Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:3741: Server disconnected at leader tablet# 72075186224037889, clientId# [2:7554062478889773982:2400], serverId# [2:7554062478889773983:2401], sessionId# [0:0:0] 2025-09-25T16:20:29.318629Z node 2 :TX_DATASHARD INFO: datashard.cpp:197: OnTabletStop: 72075186224037888 reason = ReasonStop 2025-09-25T16:20:29.318632Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:3741: Server disconnected at leader tablet# 72075186224037888, clientId# [2:7554062478889773972:2393], serverId# [2:7554062478889773973:2394], sessionId# [0:0:0] 2025-09-25T16:20:29.318645Z node 2 :TX_DATASHARD INFO: datashard.cpp:197: OnTabletStop: 72075186224037891 reason = ReasonStop 2025-09-25T16:20:29.318649Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:3741: Server disconnected at leader tablet# 72075186224037891, clientId# [2:7554062478889774345:2648], serverId# [2:7554062478889774347:2650], sessionId# [0:0:0] 2025-09-25T16:20:29.318781Z node 2 :HIVE WARN: hive_impl.cpp:516: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 2, TabletId: 72075186224037890 not found 2025-09-25T16:20:29.318790Z node 2 :HIVE WARN: hive_impl.cpp:516: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 2, TabletId: 72075186224037889 not found 2025-09-25T16:20:29.318792Z node 2 :HIVE WARN: hive_impl.cpp:516: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 2, TabletId: 72075186224037888 not found 2025-09-25T16:20:29.318794Z node 2 :HIVE WARN: hive_impl.cpp:516: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 2, TabletId: 72075186224037891 not found 2025-09-25T16:20:29.318997Z node 2 :TX_DATASHARD INFO: datashard.cpp:257: OnTabletDead: 72075186224037889 2025-09-25T16:20:29.319039Z node 2 :TX_DATASHARD INFO: datashard.cpp:1311: Change sender killed: at tablet: 72075186224037889 2025-09-25T16:20:29.319604Z node 2 :TX_DATASHARD INFO: datashard.cpp:257: OnTabletDead: 72075186224037888 2025-09-25T16:20:29.319618Z node 2 :TX_DATASHARD INFO: datashard.cpp:1311: Change sender killed: at tablet: 72075186224037888 2025-09-25T16:20:29.319974Z node 2 :TX_DATASHARD INFO: datashard.cpp:257: OnTabletDead: 72075186224037891 2025-09-25T16:20:29.319985Z node 2 :TX_DATASHARD INFO: datashard.cpp:1311: Change sender killed: at tablet: 72075186224037891 2025-09-25T16:20:29.320369Z node 2 :TX_DATASHARD INFO: datashard.cpp:257: OnTabletDead: 72075186224037890 2025-09-25T16:20:29.320374Z node 2 :TX_DATASHARD INFO: datashard.cpp:1311: Change sender killed: at tablet: 72075186224037890 ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_restore/unittest >> TRestoreWithRebootsTests::ShouldSucceedOnSingleShardTable[Zstd] [GOOD] Test command err: ==== RunWithTabletReboots =========== RUN: Trace =========== Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2144] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2144] Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:120:2058] recipient: [1:114:2145] IGNORE Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:120:2058] recipient: [1:114:2145] Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:121:2058] recipient: [1:116:2146] IGNORE Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:121:2058] recipient: [1:116:2146] Leader for TabletID 72057594046678944 is [1:128:2153] sender: [1:131:2058] recipient: [1:113:2144] Leader for TabletID 72057594046447617 is [1:134:2158] sender: [1:136:2058] recipient: [1:114:2145] Leader for TabletID 72057594046316545 is [1:139:2161] sender: [1:141:2058] recipient: [1:116:2146] 2025-09-25T16:19:53.410531Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7911: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-09-25T16:19:53.410556Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7939: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-09-25T16:19:53.410562Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7825: StatsBatching config: StatsBatchTimeout# 0.000000s, StatsMaxBatchSize# 0, StatsMaxExecuteTime# 0.010000s 2025-09-25T16:19:53.410568Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7841: OperationsProcessing config: using default configuration 2025-09-25T16:19:53.410574Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7847: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-09-25T16:19:53.410578Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7847: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-09-25T16:19:53.410588Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7971: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-09-25T16:19:53.410601Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-09-25T16:19:53.410723Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8042: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-09-25T16:19:53.410789Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2025-09-25T16:19:53.435092Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:8074: Got new config: QueryServiceConfig { AllExternalDataSourcesAreAvailable: true } 2025-09-25T16:19:53.435119Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded 2025-09-25T16:19:53.435217Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8042: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# Leader for TabletID 72057594046447617 is [1:134:2158] sender: [1:179:2058] recipient: [1:15:2062] 2025-09-25T16:19:53.438755Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2025-09-25T16:19:53.438824Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2025-09-25T16:19:53.438853Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-09-25T16:19:53.440314Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2025-09-25T16:19:53.440388Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2025-09-25T16:19:53.440498Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1382: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-09-25T16:19:53.440693Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-09-25T16:19:53.441748Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:158: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-09-25T16:19:53.441795Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2025-09-25T16:19:53.442050Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-09-25T16:19:53.442061Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-09-25T16:19:53.442080Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-09-25T16:19:53.442087Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-09-25T16:19:53.442094Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:205: TTxServerlessStorageBilling.Complete 2025-09-25T16:19:53.442130Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7086: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:221:2058] recipient: [1:219:2219] IGNORE Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:221:2058] recipient: [1:219:2219] Leader for TabletID 72057594037968897 is [1:225:2223] sender: [1:226:2058] recipient: [1:219:2219] 2025-09-25T16:19:53.443449Z node 1 :HIVE INFO: tablet_helpers.cpp:1126: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:128:2153] sender: [1:246:2058] recipient: [1:15:2062] 2025-09-25T16:19:53.466350Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-09-25T16:19:53.466403Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-09-25T16:19:53.466452Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:599: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-09-25T16:19:53.466458Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5528: CreateTx for txid 1:0 type: TxAlterSubDomain target path: [OwnerId: 72057594046678944, LocalPathId: 1] source path: 2025-09-25T16:19:53.466499Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-09-25T16:19:53.466524Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2025-09-25T16:19:53.467046Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-09-25T16:19:53.467077Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-09-25T16:19:53.467121Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-09-25T16:19:53.467127Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:315: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-09-25T16:19:53.467131Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:369: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-09-25T16:19:53.467134Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 1:0 2 -> 3 2025-09-25T16:19:53.467457Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-09-25T16:19:53.467466Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-09-25T16:19:53.467470Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 1:0 3 -> 128 2025-09-25T16:19:53.467699Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-09-25T16:19:53.467705Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-09-25T16:19:53.467710Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-09-25T16:19:53.467714Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1723: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-09-25T16:19:53.468281Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1792: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-09-25T16:19:53.468681Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:663: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-09-25T16:19:53.468724Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1824: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 Leader for TabletID 72057594046316545 is [1:139:2161] sender: [1:261:2058] recipient: [1:15:2062] FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-09-25T16:19:53.468913Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:685: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-09-25T16:19:53.468932Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:689: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 139 RawX2: 4294969457 } } Step: 5000001 MediatorID: 0 Tab ... 32Z node 110 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6249: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 3 PathOwnerId: 72057594046678944, cookie: 1003 2025-09-25T16:20:29.009447Z node 110 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 3 PathOwnerId: 72057594046678944, cookie: 1003 2025-09-25T16:20:29.009455Z node 110 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:108: Operation in-flight, at schemeshard: 72057594046678944, txId: 1003 2025-09-25T16:20:29.009462Z node 110 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 1003, pathId: [OwnerId: 72057594046678944, LocalPathId: 3], version: 3 2025-09-25T16:20:29.009469Z node 110 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:610: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 4 2025-09-25T16:20:29.009486Z node 110 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 1003, ready parts: 0/1, is published: true FAKE_COORDINATOR: Erasing txId 1003 2025-09-25T16:20:29.010192Z node 110 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1003 TestModificationResult got TxId: 1003, wait until txId: 1003 TestWaitNotification wait txId: 1003 2025-09-25T16:20:29.010262Z node 110 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:213: tests -- TTxNotificationSubscriber for txId 1003: send EvNotifyTxCompletion 2025-09-25T16:20:29.010271Z node 110 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:259: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 1003 2025-09-25T16:20:29.010354Z node 110 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__notify.cpp:30: NotifyTxCompletion operation in-flight, txId: 1003, at schemeshard: 72057594046678944 2025-09-25T16:20:29.010360Z node 110 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 1003, ready parts: 0/1, is published: true 2025-09-25T16:20:29.010365Z node 110 :FLAT_TX_SCHEMESHARD INFO: schemeshard__notify.cpp:131: NotifyTxCompletion transaction is registered, txId: 1003, at schemeshard: 72057594046678944 REQUEST: HEAD /data_00.csv HTTP/1.1 HEADERS: Host: localhost:23909 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: 9E22BF57-AE33-455F-BA1F-179554EBC978 amz-sdk-request: attempt=1 content-type: application/xml user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-143-generic x86_64 Clang/20.1.8 x-amz-api-version: 2006-03-01 2025-09-25T16:20:29.013265Z node 110 :DATASHARD_RESTORE DEBUG: import_s3.cpp:527: [Import] [s3:1003] Handle NKikimr::NWrappers::NExternalStorage::TEvHeadObjectResponse { Key: null Result: No response body. } 2025-09-25T16:20:29.013286Z node 110 :DATASHARD_RESTORE DEBUG: import_s3.cpp:506: [Import] [s3:1003] HeadObject: key# /data_00.csv.zst REQUEST: HEAD /data_00.csv.zst HTTP/1.1 HEADERS: Host: localhost:23909 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: 53CB85AC-E702-4E56-BBA1-982C05A9E15C amz-sdk-request: attempt=1 content-type: application/xml user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-143-generic x86_64 Clang/20.1.8 x-amz-api-version: 2006-03-01 S3_MOCK::HttpServeRead: /data_00.csv.zst / 23 2025-09-25T16:20:29.017813Z node 110 :DATASHARD_RESTORE DEBUG: import_s3.cpp:527: [Import] [s3:1003] Handle NKikimr::NWrappers::NExternalStorage::TEvHeadObjectResponse { Key: null Result: HeadObjectResult { ETag: 7443c2f403aa74cff1f199511bd22374 ContentLength: 23 } } 2025-09-25T16:20:29.017919Z node 110 :DATASHARD_RESTORE DEBUG: import_s3.cpp:606: [Import] [s3:1003] Handle NKikimr::TEvDataShard::TEvS3DownloadInfo { Info: { DataETag: (empty maybe) ProcessedBytes: 0 WrittenBytes: 0 WrittenRows: 0 ChecksumState: DownloadState: } } 2025-09-25T16:20:29.025198Z node 110 :DATASHARD_RESTORE DEBUG: import_s3.cpp:606: [Import] [s3:1003] Handle NKikimr::TEvDataShard::TEvS3DownloadInfo { Info: { DataETag: 7443c2f403aa74cff1f199511bd22374 ProcessedBytes: 0 WrittenBytes: 0 WrittenRows: 0 ChecksumState: DownloadState: } } 2025-09-25T16:20:29.025226Z node 110 :DATASHARD_RESTORE NOTICE: import_s3.cpp:621: [Import] [s3:1003] Process download info at 'DownloadInfo': info# { DataETag: 7443c2f403aa74cff1f199511bd22374 ProcessedBytes: 0 WrittenBytes: 0 WrittenRows: 0 ChecksumState: DownloadState: } 2025-09-25T16:20:29.025247Z node 110 :DATASHARD_RESTORE DEBUG: import_s3.cpp:517: [Import] [s3:1003] GetObject: key# /data_00.csv.zst, range# 0-22 REQUEST: GET /data_00.csv.zst HTTP/1.1 HEADERS: Host: localhost:23909 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: 187CFF12-C4D1-491E-B402-6B11B4C99261 amz-sdk-request: attempt=1 content-type: application/xml range: bytes=0-22 user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-143-generic x86_64 Clang/20.1.8 x-amz-api-version: 2006-03-01 S3_MOCK::HttpServeRead: /data_00.csv.zst / 23 2025-09-25T16:20:29.031701Z node 110 :DATASHARD_RESTORE DEBUG: import_s3.cpp:656: [Import] [s3:1003] Handle NKikimr::NWrappers::NExternalStorage::TEvGetObjectResponse { Key: null Result: 7443c2f403aa74cff1f199511bd22374 Body: 23b } 2025-09-25T16:20:29.031725Z node 110 :DATASHARD_RESTORE TRACE: import_s3.cpp:673: [Import] [s3:1003] Content size: processed-bytes# 0, content-length# 23, body-size# 23 2025-09-25T16:20:29.031775Z node 110 :DATASHARD_RESTORE INFO: import_s3.cpp:806: [Import] [s3:1003] Upload rows: count# 1, size# 34 2025-09-25T16:20:29.037196Z node 110 :DATASHARD_RESTORE DEBUG: import_s3.cpp:814: [Import] [s3:1003] Handle NKikimr::TEvDataShard::TEvS3UploadRowsResponse { Record: TabletID: 72075186233409546 Status: 0 Info: { DataETag: 7443c2f403aa74cff1f199511bd22374 ProcessedBytes: 23 WrittenBytes: 8 WrittenRows: 1 ChecksumState: DownloadState: } } 2025-09-25T16:20:29.037220Z node 110 :DATASHARD_RESTORE NOTICE: import_s3.cpp:621: [Import] [s3:1003] Process download info at 'UploadResponse': info# { DataETag: 7443c2f403aa74cff1f199511bd22374 ProcessedBytes: 23 WrittenBytes: 8 WrittenRows: 1 ChecksumState: DownloadState: } 2025-09-25T16:20:29.037230Z node 110 :DATASHARD_RESTORE NOTICE: import_s3.cpp:962: [Import] [s3:1003] Finish: success# 1, error# , writtenBytes# 8, writtenRows# 1 2025-09-25T16:20:29.044662Z node 110 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5901: Handle TEvSchemaChanged, tabletId: 72057594046678944, at schemeshard: 72057594046678944, message: Source { RawX1: 339 RawX2: 472446404885 } Origin: 72075186233409546 State: 2 TxId: 1003 Step: 0 Generation: 2 OpResult { Success: true Explain: "" BytesProcessed: 8 RowsProcessed: 1 } 2025-09-25T16:20:29.044690Z node 110 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1837: TOperation FindRelatedPartByTabletId, TxId: 1003, tablet: 72075186233409546, partId: 0 2025-09-25T16:20:29.044719Z node 110 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:628: TTxOperationReply execute, operationId: 1003:0, at schemeshard: 72057594046678944, message: Source { RawX1: 339 RawX2: 472446404885 } Origin: 72075186233409546 State: 2 TxId: 1003 Step: 0 Generation: 2 OpResult { Success: true Explain: "" BytesProcessed: 8 RowsProcessed: 1 } 2025-09-25T16:20:29.044736Z node 110 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_backup_restore_common.h:233: TRestore TProposedWaitParts, opId: 1003:0 HandleReply TEvSchemaChanged at tablet# 72057594046678944 message# Source { RawX1: 339 RawX2: 472446404885 } Origin: 72075186233409546 State: 2 TxId: 1003 Step: 0 Generation: 2 OpResult { Success: true Explain: "" BytesProcessed: 8 RowsProcessed: 1 } 2025-09-25T16:20:29.044754Z node 110 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:673: CollectSchemaChanged accept TEvDataShard::TEvSchemaChanged, operationId: 1003:0, shardIdx: 72057594046678944:1, shard: 72075186233409546, left await: 0, txState.State: ProposedWaitParts, txState.ReadyForNotifications: 1, at schemeshard: 72057594046678944 2025-09-25T16:20:29.044759Z node 110 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:710: all shard schema changes has been received, operationId: 1003:0, at schemeshard: 72057594046678944 2025-09-25T16:20:29.044765Z node 110 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:722: send schema changes ack message, operation: 1003:0, datashard: 72075186233409546, at schemeshard: 72057594046678944 2025-09-25T16:20:29.044772Z node 110 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 1003:0 129 -> 240 2025-09-25T16:20:29.044818Z node 110 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_backup_restore_common.h:116: Unable to make a bill: kind# TRestore, opId# 1003:0, reason# domain is not a serverless db, domain# /MyRoot, domainPathId# [OwnerId: 72057594046678944, LocalPathId: 1], IsDomainSchemeShard: 1, ParentDomainId: [OwnerId: 72057594046678944, LocalPathId: 1], ResourcesDomainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-09-25T16:20:29.045408Z node 110 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 1003:0, at schemeshard: 72057594046678944 2025-09-25T16:20:29.045504Z node 110 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1003:0, at schemeshard: 72057594046678944 2025-09-25T16:20:29.045513Z node 110 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:497: [72057594046678944] TDone opId# 1003:0 ProgressState 2025-09-25T16:20:29.045528Z node 110 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:926: Part operation is done id#1003:0 progress is 1/1 2025-09-25T16:20:29.045533Z node 110 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 1003 ready parts: 1/1 2025-09-25T16:20:29.045538Z node 110 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:926: Part operation is done id#1003:0 progress is 1/1 2025-09-25T16:20:29.045542Z node 110 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 1003 ready parts: 1/1 2025-09-25T16:20:29.045547Z node 110 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 1003, ready parts: 1/1, is published: true 2025-09-25T16:20:29.045561Z node 110 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1702: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [110:417:2389] message: TxId: 1003 2025-09-25T16:20:29.045571Z node 110 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 1003 ready parts: 1/1 2025-09-25T16:20:29.045577Z node 110 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:993: Operation and all the parts is done, operation id: 1003:0 2025-09-25T16:20:29.045581Z node 110 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5552: RemoveTx for txid 1003:0 2025-09-25T16:20:29.045608Z node 110 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:610: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 3 2025-09-25T16:20:29.046403Z node 110 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 1003: got EvNotifyTxCompletionResult 2025-09-25T16:20:29.046420Z node 110 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 1003: satisfy waiter [110:456:2427] TestWaitNotification: OK eventTxId 1003 >> TSchemeshardBackgroundCleaningTest::SchemeshardBackgroundCleaningTestSimpleCreateClean |82.5%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/tx/columnshard/ut_rw/ydb-core-tx-columnshard-ut_rw |82.5%| [LD] {RESULT} $(B)/ydb/core/tx/columnshard/ut_rw/ydb-core-tx-columnshard-ut_rw |82.5%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/columnshard/ut_rw/ydb-core-tx-columnshard-ut_rw >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-DropUser-60 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-DropUser-61 |82.5%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/columnshard/engines/ut/unittest >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-CreateUser-15 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-CreateUser-16 >> TTransferTests::Create_Disabled [GOOD] >> TTransferTests::CreateWithoutCredentials >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-DropUser-61 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-DropUser-62 >> TestProgram::Like >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-DropUser-62 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-DropUser-63 >> TTransferTests::CreateSequential [GOOD] >> TTransferTests::CreateInParallel >> TestProgram::Like [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-CreateUser-21 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-CreateUser-22 >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-ModifyUser-40 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-ModifyUser-41 >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-DropUser-62 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-DropUser-63 >> YdbSdkSessionsPool::WaitQueue/0 [GOOD] >> TTransferTests::CreateWithoutCredentials [GOOD] >> TTransferTests::CreateWrongConfig >> TTransferTests::CreateInParallel [GOOD] >> TTransferTests::CreateDropRecreate >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-CreateUser-14 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-CreateUser-15 >> TTransferTests::CreateWrongConfig [GOOD] >> TTransferTests::CreateWrongBatchSize >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-DropUser-52 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-DropUser-53 >> YdbSdkSessionsPool1Session::CustomPlan/0 ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/columnshard/engines/ut/unittest >> TestProgram::Like [GOOD] Test command err: FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=program.cpp:34;event=parse_program;program=Command { Assign { Column { Id: 15 } Constant { Bytes: "001" } } } Command { Assign { Column { Id: 16 } Constant { Bytes: "uid" } } } Command { Assign { Column { Id: 17 } Function { Id: 33 Arguments { Id: 7 } Arguments { Id: 16 } FunctionType: YQL_KERNEL KernelIdx: 0 } } } Command { Assign { Column { Id: 18 } Function { Id: 34 Arguments { Id: 7 } Arguments { Id: 15 } FunctionType: YQL_KERNEL KernelIdx: 1 } } } Command { Assign { Column { Id: 19 } Function { Id: 18 Arguments { Id: 17 } FunctionType: SIMPLE_ARROW } } } Command { Assign { Column { Id: 20 } Function { Id: 18 Arguments { Id: 18 } FunctionType: SIMPLE_ARROW } } } Command { Assign { Column { Id: 21 } Function { Id: 11 Arguments { Id: 19 } Arguments { Id: 20 } FunctionType: SIMPLE_ARROW } } } Command { Projection { Columns { Id: 21 } } } Kernels: "O\006\006Arg\022BlockFunc\030BlockAsTuple\t\211\004\235\213\004\213\004\203\001H\203\005@\213\004\203\014?\006\001\235?\004\001\235?\010\001\n\000\t\211\004?\016\235?\000\001\235?\002\000\n\000\t\251\000?\024\002\000\t\251\000?\026\002\000\000\t\211\004?\020\235?\006\001?$\n\000\t\211\006?$\203\005@?\024?\026\006\000\003?(\024StartsWith?\034? \001\t\211\006?$\203\005@?\024?\026\006\000\003?0\020EndsWith?\034? \001\000\000/" ; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=program.cpp:103;parse_proto_program=Command { Assign { Column { Id: 15 } Constant { Bytes: "001" } } } Command { Assign { Column { Id: 16 } Constant { Bytes: "uid" } } } Command { Assign { Column { Id: 17 } Function { Id: 33 Arguments { Id: 7 } Arguments { Id: 16 } FunctionType: YQL_KERNEL KernelIdx: 0 } } } Command { Assign { Column { Id: 18 } Function { Id: 34 Arguments { Id: 7 } Arguments { Id: 15 } FunctionType: YQL_KERNEL KernelIdx: 1 } } } Command { Assign { Column { Id: 19 } Function { Id: 18 Arguments { Id: 17 } FunctionType: SIMPLE_ARROW } } } Command { Assign { Column { Id: 20 } Function { Id: 18 Arguments { Id: 18 } FunctionType: SIMPLE_ARROW } } } Command { Assign { Column { Id: 21 } Function { Id: 11 Arguments { Id: 19 } Arguments { Id: 20 } FunctionType: SIMPLE_ARROW } } } Command { Projection { Columns { Id: 21 } } } Kernels: "O\006\006Arg\022BlockFunc\030BlockAsTuple\t\211\004\235\213\004\213\004\203\001H\203\005@\213\004\203\014?\006\001\235?\004\001\235?\010\001\n\000\t\211\004?\016\235?\000\001\235?\002\000\n\000\t\251\000?\024\002\000\t\251\000?\026\002\000\000\t\211\004?\020\235?\006\001?$\n\000\t\211\006?$\203\005@?\024?\026\006\000\003?(\024StartsWith?\034? \001\t\211\006?$\203\005@?\024?\026\006\000\003?0\020EndsWith?\034? \001\000\000/" ; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2101;fline=graph_execute.cpp:162;graph_constructed=digraph program {N0[shape=box, label="N6(0):{\"p\":{\"v\":\"001\"},\"o\":\"15\",\"t\":\"Const\"}\n"]; N1[shape=box, label="N0(0):{\"p\":{\"v\":\"uid\"},\"o\":\"16\",\"t\":\"Const\"}\n"]; N2[shape=box, label="N4(15):{\"i\":\"7,16\",\"p\":{\"function\":{},\"kernel\":{\"class_name\":\"SIMPLE\"}},\"o\":\"17\",\"t\":\"Calculation\"}\nREMOVE:16"]; N1 -> N2[label="1"]; N4 -> N2[label="2"]; N3[shape=box, label="N2(2):{\"i\":\"0\",\"p\":{\"data\":[{\"name\":\"string\",\"id\":7}]},\"o\":\"7\",\"t\":\"FetchOriginalData\"}\n",style=filled,color="#FFFF88"]; N10 -> N3[label="1"]; N4[shape=box, label="N3(7):{\"i\":\"7\",\"p\":{\"address\":{\"name\":\"string\",\"id\":7}},\"o\":\"7\",\"t\":\"AssembleOriginalData\"}\n",style=filled,color="#FFFF88"]; N3 -> N4[label="1"]; N5[shape=box, label="N7(15):{\"i\":\"7,15\",\"p\":{\"function\":{},\"kernel\":{\"class_name\":\"SIMPLE\"}},\"o\":\"18\",\"t\":\"Calculation\"}\nREMOVE:7,15"]; N0 -> N5[label="1"]; N4 -> N5[label="2"]; N6[shape=box, label="N5(23):{\"i\":\"17\",\"p\":{\"function\":{},\"kernel\":{\"class_name\":\"SIMPLE\"}},\"o\":\"19\",\"t\":\"Calculation\"}\nREMOVE:17"]; N2 -> N6[label="1"]; N7[shape=box, label="N8(23):{\"i\":\"18\",\"p\":{\"function\":{},\"kernel\":{\"class_name\":\"SIMPLE\"}},\"o\":\"20\",\"t\":\"Calculation\"}\nREMOVE:18"]; N5 -> N7[label="1"]; N8[shape=box, label="N9(54):{\"i\":\"19,20\",\"p\":{\"function\":{},\"kernel\":{\"class_name\":\"SIMPLE\"}},\"o\":\"21\",\"t\":\"Calculation\"}\nREMOVE:19,20"]; N6 -> N8[label="1"]; N7 -> N8[label="2"]; N9[shape=box, label="N10(54):{\"i\":\"21\",\"t\":\"Projection\"}\n",style=filled,color="#FFAAAA"]; N8 -> N9[label="1"]; N10[shape=box, label="N1(0):{\"p\":{\"data\":[{\"name\":\"string\",\"id\":7}]},\"o\":\"0\",\"t\":\"ReserveMemory\"}\n"]; N1->N10->N3->N4->N2->N6->N0->N5->N7->N8->N9[color=red]; }; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=program.cpp:52;event=program_parsed;result={"edges":[{"owner_id":0,"inputs":[]},{"owner_id":1,"inputs":[]},{"owner_id":2,"inputs":[{"from":1},{"from":4}]},{"owner_id":3,"inputs":[{"from":10}]},{"owner_id":4,"inputs":[{"from":3}]},{"owner_id":5,"inputs":[{"from":0},{"from":4}]},{"owner_id":6,"inputs":[{"from":2}]},{"owner_id":7,"inputs":[{"from":5}]},{"owner_id":8,"inputs":[{"from":6},{"from":7}]},{"owner_id":9,"inputs":[{"from":8}]},{"owner_id":10,"inputs":[]}],"nodes":{"1":{"p":{"p":{"v":"uid"},"o":"16","t":"Const"},"w":0,"id":1},"3":{"p":{"i":"0","p":{"data":[{"name":"string","id":7}]},"o":"7","t":"FetchOriginalData"},"w":2,"id":3},"8":{"p":{"i":"19,20","p":{"function":{},"kernel":{"class_name":"SIMPLE"}},"o":"21","t":"Calculation"},"w":54,"id":8},"2":{"p":{"i":"7,16","p":{"function":{},"kernel":{"class_name":"SIMPLE"}},"o":"17","t":"Calculation"},"w":15,"id":2},"0":{"p":{"p":{"v":"001"},"o":"15","t":"Const"},"w":0,"id":0},"5":{"p":{"i":"7,15","p":{"function":{},"kernel":{"class_name":"SIMPLE"}},"o":"18","t":"Calculation"},"w":15,"id":5},"9":{"p":{"i":"21","t":"Projection"},"w":54,"id":9},"7":{"p":{"i":"18","p":{"function":{},"kernel":{"class_name":"SIMPLE"}},"o":"20","t":"Calculation"},"w":23,"id":7},"4":{"p":{"i":"7","p":{"address":{"name":"string","id":7}},"o":"7","t":"AssembleOriginalData"},"w":7,"id":4},"10":{"p":{"p":{"data":[{"name":"string","id":7}]},"o":"0","t":"ReserveMemory"},"w":0,"id":10},"6":{"p":{"i":"17","p":{"function":{},"kernel":{"class_name":"SIMPLE"}},"o":"19","t":"Calculation"},"w":23,"id":6}}}; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:520;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:520;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=334;fline=abstract.cpp:30;execute=Const; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=334;fline=abstract.cpp:30;execute=ReserveMemory; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=334;fline=abstract.cpp:30;execute=FetchOriginalData; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=334;fline=abstract.cpp:30;execute=AssembleOriginalData; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=334;fline=abstract.cpp:30;execute=Calculation; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=334;fline=abstract.cpp:30;execute=Calculation; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=334;fline=abstract.cpp:30;execute=Const; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=334;fline=abstract.cpp:30;execute=Calculation; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=334;fline=abstract.cpp:30;execute=Calculation; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=334;fline=abstract.cpp:30;execute=Calculation; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=334;fline=abstract.cpp:30;execute=Projection; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:520;T=N5arrow11BooleanTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:520;T=N5arrow11BooleanTypeE; >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-DropUser-61 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-DropUser-62 >> TTransferTests::CreateDropRecreate [GOOD] >> TestShred::SimpleTestForTables >> TTransferTests::ConsistencyLevel >> TExportToS3Tests::UidAsIdempotencyKey >> TTransferTests::CreateWrongBatchSize [GOOD] >> TTransferTests::CreateWrongFlushIntervalIsSmall >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-CreateUser-16 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-CreateUser-17 >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-DropUser-63 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-DropUser-64 >> TestShred::ManualLaunch3Cycles >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-DropUser-62 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-DropUser-63 >> ColumnBuildTest::BaseCase >> TRestoreWithRebootsTests::ShouldSucceedOnMultiShardTableAndLimitedResources[Zstd] [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-CreateUser-22 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-CreateUser-23 >> TTransferTests::CreateWrongFlushIntervalIsSmall [GOOD] >> TTransferTests::CreateWrongFlushIntervalIsBig |82.5%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/audit/py3test >> TTransferTests::ConsistencyLevel [GOOD] >> TTransferTests::Alter >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-ModifyUser-41 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-ModifyUser-42 >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-DropUser-63 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-DropUser-64 >> TExportToS3Tests::UidAsIdempotencyKey [GOOD] >> TTransferTests::CreateWrongFlushIntervalIsBig [GOOD] >> HullReplWriteSst::Basic [GOOD] >> SchemeReqAdminAccessInTenant::ClusterAdminCanAdministerTenant-StrictAclCheck [GOOD] >> SchemeReqAdminAccessInTenant::ClusterAdminCanAdministerTenant-DomainLoginOnly >> TExportToS3Tests::UserSID |82.5%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/audit/py3test >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-CreateUser-15 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-CreateUser-16 >> TTransferTests::Alter [GOOD] >> TExportToS3Tests::UserSID [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-DropUser-53 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-DropUser-54 >> TExportToS3Tests::TopicsExport >> TestShred::ShredWithMerge >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-DropUser-62 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-DropUser-63 ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_transfer/unittest >> TTransferTests::CreateWrongFlushIntervalIsBig [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2144] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2144] Leader for TabletID 72057594046678944 is [1:130:2155] sender: [1:131:2058] recipient: [1:113:2144] 2025-09-25T16:20:30.524536Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7911: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-09-25T16:20:30.524568Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7939: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-09-25T16:20:30.524575Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7825: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-09-25T16:20:30.524580Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7841: OperationsProcessing config: using default configuration 2025-09-25T16:20:30.524587Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7847: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-09-25T16:20:30.524592Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7847: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-09-25T16:20:30.524602Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7971: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-09-25T16:20:30.524618Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-09-25T16:20:30.524764Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8042: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-09-25T16:20:30.525140Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2025-09-25T16:20:30.541914Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7729: Cannot subscribe to console configs 2025-09-25T16:20:30.541948Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded 2025-09-25T16:20:30.547059Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2025-09-25T16:20:30.547206Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2025-09-25T16:20:30.547263Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-09-25T16:20:30.549440Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2025-09-25T16:20:30.549542Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2025-09-25T16:20:30.549678Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1382: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-09-25T16:20:30.549780Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-09-25T16:20:30.550388Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:158: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-09-25T16:20:30.550456Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2025-09-25T16:20:30.550790Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-09-25T16:20:30.550807Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-09-25T16:20:30.550833Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-09-25T16:20:30.550846Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-09-25T16:20:30.550854Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:205: TTxServerlessStorageBilling.Complete 2025-09-25T16:20:30.550900Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7086: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-09-25T16:20:30.552673Z node 1 :HIVE INFO: tablet_helpers.cpp:1126: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:130:2155] sender: [1:245:2058] recipient: [1:15:2062] 2025-09-25T16:20:30.576871Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-09-25T16:20:30.576974Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-09-25T16:20:30.577037Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:599: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-09-25T16:20:30.577044Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5528: CreateTx for txid 1:0 type: TxAlterSubDomain target path: [OwnerId: 72057594046678944, LocalPathId: 1] source path: 2025-09-25T16:20:30.577108Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-09-25T16:20:30.577121Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2025-09-25T16:20:30.577964Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-09-25T16:20:30.578020Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-09-25T16:20:30.578075Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-09-25T16:20:30.578085Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:315: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-09-25T16:20:30.578090Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:369: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-09-25T16:20:30.578095Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 1:0 2 -> 3 2025-09-25T16:20:30.578632Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-09-25T16:20:30.578652Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-09-25T16:20:30.578660Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 1:0 3 -> 128 2025-09-25T16:20:30.579173Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-09-25T16:20:30.579189Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-09-25T16:20:30.579197Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-09-25T16:20:30.579205Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1723: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-09-25T16:20:30.580056Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1792: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-09-25T16:20:30.580582Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:663: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-09-25T16:20:30.580644Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1824: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-09-25T16:20:30.580905Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:685: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-09-25T16:20:30.580936Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:689: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 137 RawX2: 4294969455 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-09-25T16:20:30.580945Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:303: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-09-25T16:20:30.581020Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 1:0 128 -> 240 2025-09-25T16:20:30.581029Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:362: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-09-25T16:20:30.581068Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:599: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-09-25T16:20:30.581083Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:412: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-09-25T16:20:30.581707Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-09-25T16:20:30.581721Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme ... nResult Origin: 72075186233409546 TxId: 101 2025-09-25T16:20:32.648939Z node 6 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5268: StateWork, processing event TEvColumnShard::TEvNotifyTxCompletionResult 2025-09-25T16:20:32.648945Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:6561: Handle TEvNotifyTxCompletionResult, at schemeshard: 72057594046678944, message: Origin: 72075186233409546 TxId: 101 2025-09-25T16:20:32.648952Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1837: TOperation FindRelatedPartByTabletId, TxId: 101, tablet: 72075186233409546, partId: 0 2025-09-25T16:20:32.648974Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:628: TTxOperationReply execute, operationId: 101:0, at schemeshard: 72057594046678944, message: Origin: 72075186233409546 TxId: 101 2025-09-25T16:20:32.649003Z node 6 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:173: TSideEffects ApplyOnExecute at tablet# 72057594046678944 FAKE_COORDINATOR: Erasing txId 101 2025-09-25T16:20:32.649588Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 101:0, at schemeshard: 72057594046678944 2025-09-25T16:20:32.649603Z node 6 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:216: TSideEffects ApplyOnComplete at tablet# 72057594046678944 2025-09-25T16:20:32.649609Z node 6 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:285: Activate send for 101:0 2025-09-25T16:20:32.649646Z node 6 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5225: StateWork, received event# 2146435072, Sender [6:128:2153], Recipient [6:128:2153]: NKikimr::NSchemeShard::TEvPrivate::TEvProgressOperation 2025-09-25T16:20:32.649652Z node 6 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5242: StateWork, processing event TEvPrivate::TEvProgressOperation 2025-09-25T16:20:32.649661Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 101:0, at schemeshard: 72057594046678944 2025-09-25T16:20:32.649668Z node 6 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:497: [72057594046678944] TDone opId# 101:0 ProgressState 2025-09-25T16:20:32.649684Z node 6 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:173: TSideEffects ApplyOnExecute at tablet# 72057594046678944 2025-09-25T16:20:32.649688Z node 6 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:926: Part operation is done id#101:0 progress is 1/1 2025-09-25T16:20:32.649692Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2025-09-25T16:20:32.649695Z node 6 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:926: Part operation is done id#101:0 progress is 1/1 2025-09-25T16:20:32.649698Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2025-09-25T16:20:32.649702Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 101, ready parts: 1/1, is published: true 2025-09-25T16:20:32.649716Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1702: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [6:344:2321] message: TxId: 101 2025-09-25T16:20:32.649721Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2025-09-25T16:20:32.649726Z node 6 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:993: Operation and all the parts is done, operation id: 101:0 2025-09-25T16:20:32.649729Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5552: RemoveTx for txid 101:0 2025-09-25T16:20:32.649755Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:610: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 3 2025-09-25T16:20:32.650225Z node 6 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:216: TSideEffects ApplyOnComplete at tablet# 72057594046678944 2025-09-25T16:20:32.650253Z node 6 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:641: Send to actor: [6:344:2321] msg type: 271124998 msg: NKikimrScheme.TEvNotifyTxCompletionResult TxId: 101 at schemeshard: 72057594046678944 2025-09-25T16:20:32.650298Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 101: got EvNotifyTxCompletionResult 2025-09-25T16:20:32.650305Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 101: satisfy waiter [6:345:2322] 2025-09-25T16:20:32.650373Z node 6 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5225: StateWork, received event# 269877764, Sender [6:347:2324], Recipient [6:128:2153]: NKikimr::TEvTabletPipe::TEvServerDisconnected 2025-09-25T16:20:32.650380Z node 6 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5323: StateWork, processing event TEvTabletPipe::TEvServerDisconnected 2025-09-25T16:20:32.650385Z node 6 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:6190: Server pipe is reset, at schemeshard: 72057594046678944 TestWaitNotification: OK eventTxId 101 TestModificationResults wait txId: 102 2025-09-25T16:20:32.650571Z node 6 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5225: StateWork, received event# 271122432, Sender [6:390:2359], Recipient [6:128:2153]: {TEvModifySchemeTransaction txid# 102 TabletId# 72057594046678944} 2025-09-25T16:20:32.650602Z node 6 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5236: StateWork, processing event TEvSchemeShard::TEvModifySchemeTransaction 2025-09-25T16:20:32.651283Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpCreateTransfer Replication { Name: "Transfer" Config { TransferSpecific { Target { SrcPath: "/MyRoot1/Table" DstPath: "/MyRoot/Table" } Batching { FlushIntervalMilliSeconds: 86400001 } } } } } TxId: 102 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-09-25T16:20:32.651330Z node 6 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_create_replication.cpp:357: [72057594046678944] TCreateReplication Propose: opId# 102:0, path# /MyRoot/Transfer 2025-09-25T16:20:32.651343Z node 6 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 102:1, propose status:StatusInvalidParameter, reason: Flush interval must be less than or equal to 24 hours, at schemeshard: 72057594046678944 2025-09-25T16:20:32.651391Z node 6 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:173: TSideEffects ApplyOnExecute at tablet# 72057594046678944 2025-09-25T16:20:32.653546Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 102, response: Status: StatusInvalidParameter Reason: "Flush interval must be less than or equal to 24 hours" TxId: 102 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2025-09-25T16:20:32.653605Z node 6 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 102, database: /MyRoot, subject: , status: StatusInvalidParameter, reason: Flush interval must be less than or equal to 24 hours, operation: CREATE TRANSFER, path: /MyRoot/Transfer 2025-09-25T16:20:32.653611Z node 6 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:216: TSideEffects ApplyOnComplete at tablet# 72057594046678944 TestModificationResult got TxId: 102, wait until txId: 102 TestWaitNotification wait txId: 102 2025-09-25T16:20:32.653700Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:213: tests -- TTxNotificationSubscriber for txId 102: send EvNotifyTxCompletion 2025-09-25T16:20:32.653707Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:259: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 102 2025-09-25T16:20:32.653763Z node 6 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5225: StateWork, received event# 269877761, Sender [6:396:2365], Recipient [6:128:2153]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-09-25T16:20:32.653769Z node 6 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5322: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-09-25T16:20:32.653773Z node 6 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:6142: Pipe server connected, at tablet: 72057594046678944 2025-09-25T16:20:32.653788Z node 6 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5225: StateWork, received event# 271124996, Sender [6:344:2321], Recipient [6:128:2153]: NKikimrScheme.TEvNotifyTxCompletion TxId: 102 2025-09-25T16:20:32.653792Z node 6 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5238: StateWork, processing event TEvSchemeShard::TEvNotifyTxCompletion 2025-09-25T16:20:32.653812Z node 6 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 102, at schemeshard: 72057594046678944 2025-09-25T16:20:32.653834Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 102: got EvNotifyTxCompletionResult 2025-09-25T16:20:32.653838Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 102: satisfy waiter [6:394:2363] 2025-09-25T16:20:32.653855Z node 6 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5225: StateWork, received event# 269877764, Sender [6:396:2365], Recipient [6:128:2153]: NKikimr::TEvTabletPipe::TEvServerDisconnected 2025-09-25T16:20:32.653861Z node 6 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5323: StateWork, processing event TEvTabletPipe::TEvServerDisconnected 2025-09-25T16:20:32.653864Z node 6 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:6190: Server pipe is reset, at schemeshard: 72057594046678944 TestWaitNotification: OK eventTxId 102 2025-09-25T16:20:32.653907Z node 6 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5225: StateWork, received event# 271122945, Sender [6:397:2366], Recipient [6:128:2153]: NKikimrSchemeOp.TDescribePath Path: "/MyRoot/Transfer" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false } 2025-09-25T16:20:32.653911Z node 6 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5237: StateWork, processing event TEvSchemeShard::TEvDescribeScheme 2025-09-25T16:20:32.653920Z node 6 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/Transfer" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-09-25T16:20:32.653951Z node 6 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/Transfer" took 29us result status StatusPathDoesNotExist 2025-09-25T16:20:32.653979Z node 6 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/Transfer\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot\' (id: [OwnerId: 72057594046678944, LocalPathId: 1]), source_location: ydb/core/tx/schemeshard/schemeshard_path_describer.cpp:1181" Path: "/MyRoot/Transfer" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot" LastExistedPrefixPathId: 1 LastExistedPrefixDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty ChildrenExist: true } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-CreateUser-17 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-CreateUser-18 >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-DropUser-64 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-DropUser-65 >> TExportToS3Tests::TopicsExport [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-CreateUser-23 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-CreateUser-24 ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/vdisk/repl/ut/unittest >> HullReplWriteSst::Basic [GOOD] Test command err: commit chunk# 1 {ChunkIdx: 1 Offset: 101232640 Size: 32981044} 749567 commit chunk# 2 {ChunkIdx: 2 Offset: 101191680 Size: 33026012} 750589 commit chunk# 3 {ChunkIdx: 3 Offset: 101220352 Size: 32996928} 749928 commit chunk# 4 {ChunkIdx: 4 Offset: 101236736 Size: 32976864} 749472 commit chunk# 5 {ChunkIdx: 5 Offset: 101208064 Size: 33007356} 750165 commit chunk# 6 {ChunkIdx: 6 Offset: 101228544 Size: 32985268} 749663 commit chunk# 7 {ChunkIdx: 7 Offset: 101220352 Size: 32993540} 749851 commit chunk# 8 {ChunkIdx: 8 Offset: 101216256 Size: 33000316} 750005 commit chunk# 9 {ChunkIdx: 9 Offset: 101228544 Size: 32989184} 749752 commit chunk# 10 {ChunkIdx: 10 Offset: 101240832 Size: 32975588} 749443 commit chunk# 11 {ChunkIdx: 11 Offset: 101228544 Size: 32987468} 749713 commit chunk# 12 {ChunkIdx: 12 Offset: 101216256 Size: 33001240} 750026 commit chunk# 13 {ChunkIdx: 13 Offset: 101253120 Size: 32961288} 749118 commit chunk# 14 {ChunkIdx: 14 Offset: 101212160 Size: 33005552} 750124 commit chunk# 15 {ChunkIdx: 15 Offset: 101208064 Size: 33009336} 750210 commit chunk# 16 {ChunkIdx: 16 Offset: 101208064 Size: 33008764} 750197 commit chunk# 17 {ChunkIdx: 17 Offset: 101224448 Size: 32992792} 749834 commit chunk# 18 {ChunkIdx: 18 Offset: 101216256 Size: 33000624} 750012 commit chunk# 19 {ChunkIdx: 19 Offset: 101220352 Size: 32996796} 749925 commit chunk# 20 {ChunkIdx: 20 Offset: 101228544 Size: 32989184} 749752 commit chunk# 21 {ChunkIdx: 21 Offset: 101265408 Size: 32951960} 748906 commit chunk# 22 {ChunkIdx: 22 Offset: 101212160 Size: 33005552} 750124 commit chunk# 23 {ChunkIdx: 23 Offset: 101232640 Size: 32982628} 749603 commit chunk# 24 {ChunkIdx: 24 Offset: 101224448 Size: 32992528} 749828 commit chunk# 25 {ChunkIdx: 25 Offset: 101236736 Size: 32980956} 749565 commit chunk# 26 {ChunkIdx: 26 Offset: 101199872 Size: 33017256} 750390 commit chunk# 27 {ChunkIdx: 27 Offset: 101228544 Size: 32986412} 749689 commit chunk# 28 {ChunkIdx: 28 Offset: 101232640 Size: 32985048} 749658 commit chunk# 29 {ChunkIdx: 29 Offset: 101244928 Size: 32969516} 749305 commit chunk# 30 {ChunkIdx: 30 Offset: 101236736 Size: 32980956} 749565 commit chunk# 31 {ChunkIdx: 31 Offset: 101216256 Size: 32999524} 749987 commit chunk# 32 {ChunkIdx: 32 Offset: 101228544 Size: 32988964} 749747 commit chunk# 33 {ChunkIdx: 33 Offset: 101228544 Size: 32987952} 749724 commit chunk# 34 {ChunkIdx: 34 Offset: 101208064 Size: 33009336} 750210 commit chunk# 35 {ChunkIdx: 35 Offset: 101203968 Size: 33013736} 750310 commit chunk# 36 {ChunkIdx: 36 Offset: 101203968 Size: 33010612} 750239 commit chunk# 37 {ChunkIdx: 37 Offset: 101220352 Size: 32994816} 749880 commit chunk# 38 {ChunkIdx: 38 Offset: 101232640 Size: 32982408} 749598 commit chunk# 39 {ChunkIdx: 39 Offset: 101224448 Size: 32993276} 749845 commit chunk# 40 {ChunkIdx: 40 Offset: 101203968 Size: 33010700} 750241 commit chunk# 41 {ChunkIdx: 41 Offset: 101228544 Size: 32987336} 749710 commit chunk# 42 {ChunkIdx: 42 Offset: 101195776 Size: 33020908} 750473 commit chunk# 43 {ChunkIdx: 43 Offset: 101253120 Size: 32964588} 749193 commit chunk# 44 {ChunkIdx: 44 Offset: 101240832 Size: 32976864} 749472 commit chunk# 45 {ChunkIdx: 45 Offset: 101253120 Size: 32962960} 749156 commit chunk# 46 {ChunkIdx: 46 Offset: 101216256 Size: 32997720} 749946 commit chunk# 47 {ChunkIdx: 47 Offset: 101249024 Size: 32967272} 749254 commit chunk# 48 {ChunkIdx: 48 Offset: 101236736 Size: 32979460} 749531 commit chunk# 49 {ChunkIdx: 49 Offset: 101203968 Size: 33010656} 750240 commit chunk# 50 {ChunkIdx: 50 Offset: 101228544 Size: 32989184} 749752 commit chunk# 51 {ChunkIdx: 51 Offset: 101224448 Size: 32992880} 749836 commit chunk# 52 {ChunkIdx: 52 Offset: 101228544 Size: 32989184} 749752 commit chunk# 53 {ChunkIdx: 53 Offset: 101240832 Size: 32973388} 749393 commit chunk# 54 {ChunkIdx: 54 Offset: 101208064 Size: 33009644} 750217 commit chunk# 55 {ChunkIdx: 55 Offset: 101228544 Size: 32988040} 749726 commit chunk# 56 {ChunkIdx: 56 Offset: 101236736 Size: 32977920} 749496 commit chunk# 57 {ChunkIdx: 57 Offset: 101208064 Size: 33008280} 750186 commit chunk# 58 {ChunkIdx: 58 Offset: 101220352 Size: 32997368} 749938 commit chunk# 59 {ChunkIdx: 59 Offset: 101208064 Size: 33009644} 750217 commit chunk# 60 {ChunkIdx: 60 Offset: 101257216 Size: 32959616} 749080 commit chunk# 61 {ChunkIdx: 61 Offset: 101216256 Size: 32999304} 749982 commit chunk# 62 {ChunkIdx: 62 Offset: 101228544 Size: 32989184} 749752 commit chunk# 63 {ChunkIdx: 63 Offset: 101212160 Size: 33001636} 750035 commit chunk# 64 {ChunkIdx: 64 Offset: 101244928 Size: 32971936} 749360 commit chunk# 65 {ChunkIdx: 65 Offset: 101228544 Size: 32986852} 749699 commit chunk# 66 {ChunkIdx: 66 Offset: 101203968 Size: 33013736} 750310 commit chunk# 67 {ChunkIdx: 67 Offset: 101183488 Size: 33032700} 750741 commit chunk# 68 {ChunkIdx: 68 Offset: 101203968 Size: 33013736} 750310 commit chunk# 69 {ChunkIdx: 69 Offset: 101216256 Size: 32999348} 749983 commit chunk# 70 {ChunkIdx: 70 Offset: 101212160 Size: 33005156} 750115 commit chunk# 71 {ChunkIdx: 71 Offset: 101224448 Size: 32992088} 749818 commit chunk# 72 {ChunkIdx: 72 Offset: 101199872 Size: 33016200} 750366 commit chunk# 73 {ChunkIdx: 73 Offset: 101232640 Size: 32983728} 749628 commit chunk# 74 {ChunkIdx: 74 Offset: 101203968 Size: 33010788} 750243 commit chunk# 75 {ChunkIdx: 75 Offset: 101232640 Size: 32985048} 749658 commit chunk# 76 {ChunkIdx: 76 Offset: 101220352 Size: 32996312} 749914 commit chunk# 77 {ChunkIdx: 77 Offset: 101265408 Size: 32949584} 748852 commit chunk# 78 {ChunkIdx: 78 Offset: 101208064 Size: 33007004} 750157 commit chunk# 79 {ChunkIdx: 79 Offset: 101240832 Size: 32976864} 749472 commit chunk# 80 {ChunkIdx: 80 Offset: 101244928 Size: 32972508} 749373 commit chunk# 81 {ChunkIdx: 81 Offset: 101224448 Size: 32991208} 749798 commit chunk# 82 {ChunkIdx: 82 Offset: 101244928 Size: 32972772} 749379 commit chunk# 83 {ChunkIdx: 83 Offset: 101232640 Size: 32982892} 749609 commit chunk# 84 {ChunkIdx: 84 Offset: 101216256 Size: 33001240} 750026 commit chunk# 85 {ChunkIdx: 85 Offset: 101224448 Size: 32992308} 749823 commit chunk# 86 {ChunkIdx: 86 Offset: 101216256 Size: 33001460} 750031 commit chunk# 87 {ChunkIdx: 87 Offset: 101212160 Size: 33002120} 750046 commit chunk# 88 {ChunkIdx: 88 Offset: 101228544 Size: 32987028} 749703 commit chunk# 89 {ChunkIdx: 89 Offset: 101249024 Size: 32967800} 749266 commit chunk# 90 {ChunkIdx: 90 Offset: 101236736 Size: 32980956} 749565 commit chunk# 91 {ChunkIdx: 91 Offset: 101244928 Size: 32970264} 749322 commit chunk# 92 {ChunkIdx: 92 Offset: 101232640 Size: 32984080} 749636 commit chunk# 93 {ChunkIdx: 93 Offset: 101240832 Size: 32976864} 749472 commit chunk# 94 {ChunkIdx: 94 Offset: 101220352 Size: 32997368} 749938 commit chunk# 95 {ChunkIdx: 95 Offset: 101232640 Size: 32984960} 749656 commit chunk# 96 {ChunkIdx: 96 Offset: 101224448 Size: 32989184} 749752 commit chunk# 97 {ChunkIdx: 97 Offset: 101232640 Size: 32983948} 749633 commit chunk# 98 {ChunkIdx: 98 Offset: 101236736 Size: 32979020} 749521 commit chunk# 99 {ChunkIdx: 99 Offset: 101232640 Size: 32985048} 749658 commit chunk# 100 {ChunkIdx: 100 Offset: 101224448 Size: 32992616} 749830 commit chunk# 101 {ChunkIdx: 101 Offset: 101179392 Size: 33037012} 750839 commit chunk# 102 {ChunkIdx: 102 Offset: 101228544 Size: 32986236} 749685 commit chunk# 103 {ChunkIdx: 103 Offset: 101216256 Size: 32998468} 749963 commit chunk# 104 {ChunkIdx: 104 Offset: 101203968 Size: 33010920} 750246 commit chunk# 105 {ChunkIdx: 105 Offset: 101224448 Size: 32993276} 749845 commit chunk# 106 {ChunkIdx: 106 Offset: 101228544 Size: 32987908} 749723 commit chunk# 107 {ChunkIdx: 107 Offset: 101212160 Size: 33005552} 750124 commit chunk# 108 {ChunkIdx: 108 Offset: 101224448 Size: 32993276} 749845 commit chunk# 109 {ChunkIdx: 109 Offset: 101244928 Size: 32970572} 749329 commit chunk# 110 {ChunkIdx: 110 Offset: 101224448 Size: 32992836} 749835 commit chunk# 111 {ChunkIdx: 111 Offset: 101228544 Size: 32989184} 749752 commit chunk# 112 {ChunkIdx: 112 Offset: 101261312 Size: 32955656} 748990 commit chunk# 113 {ChunkIdx: 113 Offset: 101224448 Size: 32990284} 749777 commit chunk# 114 {ChunkIdx: 114 Offset: 101236736 Size: 32979548} 749533 commit chunk# 115 {ChunkIdx: 115 Offset: 101224448 Size: 32993276} 749845 commit chunk# 116 {ChunkIdx: 116 Offset: 101224448 Size: 32993276} 749845 ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_transfer/unittest >> TTransferTests::Alter [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2144] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2144] Leader for TabletID 72057594046678944 is [1:130:2155] sender: [1:131:2058] recipient: [1:113:2144] 2025-09-25T16:20:30.056378Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7911: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-09-25T16:20:30.056412Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7939: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-09-25T16:20:30.056418Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7825: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-09-25T16:20:30.056424Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7841: OperationsProcessing config: using default configuration 2025-09-25T16:20:30.056431Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7847: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-09-25T16:20:30.056436Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7847: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-09-25T16:20:30.056446Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7971: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-09-25T16:20:30.056460Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-09-25T16:20:30.056611Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8042: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-09-25T16:20:30.056709Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2025-09-25T16:20:30.078128Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7729: Cannot subscribe to console configs 2025-09-25T16:20:30.078162Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded 2025-09-25T16:20:30.087512Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2025-09-25T16:20:30.087647Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2025-09-25T16:20:30.087703Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-09-25T16:20:30.089773Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2025-09-25T16:20:30.089861Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2025-09-25T16:20:30.089976Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1382: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-09-25T16:20:30.090077Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-09-25T16:20:30.090589Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:158: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-09-25T16:20:30.090650Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2025-09-25T16:20:30.090986Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-09-25T16:20:30.091001Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-09-25T16:20:30.091027Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-09-25T16:20:30.091040Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-09-25T16:20:30.091050Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:205: TTxServerlessStorageBilling.Complete 2025-09-25T16:20:30.091094Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7086: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-09-25T16:20:30.092966Z node 1 :HIVE INFO: tablet_helpers.cpp:1126: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:130:2155] sender: [1:245:2058] recipient: [1:15:2062] 2025-09-25T16:20:30.115262Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-09-25T16:20:30.115377Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-09-25T16:20:30.115462Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:599: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-09-25T16:20:30.115472Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5528: CreateTx for txid 1:0 type: TxAlterSubDomain target path: [OwnerId: 72057594046678944, LocalPathId: 1] source path: 2025-09-25T16:20:30.115556Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-09-25T16:20:30.115576Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2025-09-25T16:20:30.118181Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-09-25T16:20:30.118263Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-09-25T16:20:30.118345Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-09-25T16:20:30.118361Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:315: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-09-25T16:20:30.118368Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:369: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-09-25T16:20:30.118375Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 1:0 2 -> 3 2025-09-25T16:20:30.119165Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-09-25T16:20:30.119182Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-09-25T16:20:30.119189Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 1:0 3 -> 128 2025-09-25T16:20:30.119646Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-09-25T16:20:30.119663Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-09-25T16:20:30.119670Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-09-25T16:20:30.119678Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1723: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-09-25T16:20:30.120459Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1792: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-09-25T16:20:30.120965Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:663: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-09-25T16:20:30.121033Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1824: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-09-25T16:20:30.121278Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:685: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-09-25T16:20:30.121310Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:689: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 137 RawX2: 4294969455 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-09-25T16:20:30.121318Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:303: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-09-25T16:20:30.121383Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 1:0 128 -> 240 2025-09-25T16:20:30.121391Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:362: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-09-25T16:20:30.121428Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:599: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-09-25T16:20:30.121441Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:412: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-09-25T16:20:30.121904Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-09-25T16:20:30.121915Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme ... ivate::TEvProgressOperation 2025-09-25T16:20:32.784075Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 104:0, at schemeshard: 72057594046678944 2025-09-25T16:20:32.784082Z node 6 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_alter_replication.cpp:196: [72057594046678944] TAlterReplication TPropose opId# 104:0 ProgressState 2025-09-25T16:20:32.784087Z node 6 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:173: TSideEffects ApplyOnExecute at tablet# 72057594046678944 2025-09-25T16:20:32.784095Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1723: TOperation IsReadyToPropose , TxId: 104 ready parts: 1/1 2025-09-25T16:20:32.784125Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1792: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 104 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-09-25T16:20:32.784265Z node 6 :TX_PROXY INFO: describe.cpp:354: Actor# [6:575:2506] HANDLE EvNavigateKeySetResult TDescribeReq marker# P5 ErrorCount# 1 2025-09-25T16:20:32.784373Z node 6 :REPLICATION_CONTROLLER TRACE: target_discoverer.cpp:28: [TargetDiscoverer][rid 1] Handle NKikimr::NReplication::TEvYdbProxy::TEvDescribePathResponse { Result: { status: SCHEME_ERROR, issues: } } 2025-09-25T16:20:32.784384Z node 6 :REPLICATION_CONTROLLER ERROR: target_discoverer.cpp:80: [TargetDiscoverer][rid 1] Describe path failed: path# /MyRoot1/Table, status# SCHEME_ERROR, issues# , iteration# 0 2025-09-25T16:20:32.784529Z node 6 :REPLICATION_CONTROLLER TRACE: controller.cpp:173: [controller 72075186233409547] Handle NKikimr::NReplication::NController::TEvPrivate::TEvDiscoveryTargetsResult { ReplicationId: 1 ToAdd [] ToDelete [] Failed [/MyRoot1/Table: SCHEME_ERROR ()] } 2025-09-25T16:20:32.784544Z node 6 :REPLICATION_CONTROLLER DEBUG: tx_discovery_targets_result.cpp:24: [controller 72075186233409547][TxDiscoveryTargetsResult] Execute: NKikimr::NReplication::NController::TEvPrivate::TEvDiscoveryTargetsResult { ReplicationId: 1 ToAdd [] ToDelete [] Failed [/MyRoot1/Table: SCHEME_ERROR ()] } 2025-09-25T16:20:32.784556Z node 6 :REPLICATION_CONTROLLER ERROR: tx_discovery_targets_result.cpp:79: [controller 72075186233409547][TxDiscoveryTargetsResult] Discovery error: rid# 1, error# /MyRoot1/Table: SCHEME_ERROR () 2025-09-25T16:20:32.784712Z node 6 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:216: TSideEffects ApplyOnComplete at tablet# 72057594046678944 2025-09-25T16:20:32.784721Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:663: Send tablet strongly msg operationId: 104:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:104 msg type: 269090816 2025-09-25T16:20:32.784747Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1824: TOperation RegisterRelationByTabletId, TxId: 104, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 104 at step: 5000005 FAKE_COORDINATOR: advance: minStep5000005 State->FrontStep: 5000004 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 104 at step: 5000005 2025-09-25T16:20:32.785003Z node 6 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5225: StateWork, received event# 269287424, Sender [6:138:2160], Recipient [6:262:2252] 2025-09-25T16:20:32.785012Z node 6 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5245: StateWork, processing event TEvTxProcessing::TEvPlanStep 2025-09-25T16:20:32.785028Z node 6 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:685: TTxOperationPlanStep Execute, stepId: 5000005, transactions count in step: 1, at schemeshard: 72057594046678944 2025-09-25T16:20:32.785048Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:689: TTxOperationPlanStep Execute, message: Transactions { TxId: 104 Coordinator: 72057594046316545 AckTo { RawX1: 138 RawX2: 25769805936 } } Step: 5000005 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-09-25T16:20:32.785057Z node 6 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_alter_replication.cpp:210: [72057594046678944] TAlterReplication TPropose opId# 104:0 HandleReply TEvOperationPlan: step# 5000005 2025-09-25T16:20:32.785088Z node 6 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 104:0 128 -> 240 2025-09-25T16:20:32.785115Z node 6 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:173: TSideEffects ApplyOnExecute at tablet# 72057594046678944 2025-09-25T16:20:32.785128Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:599: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 3 2025-09-25T16:20:32.785141Z node 6 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:704: Ack tablet strongly msg opId: 104:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:104 2025-09-25T16:20:32.785327Z node 6 :REPLICATION_CONTROLLER DEBUG: tx_discovery_targets_result.cpp:92: [controller 72075186233409547][TxDiscoveryTargetsResult] Complete 2025-09-25T16:20:32.785582Z node 6 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:216: TSideEffects ApplyOnComplete at tablet# 72057594046678944 2025-09-25T16:20:32.785592Z node 6 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:394: Ack coordinator stepId#5000005 first txId#104 countTxs#1 2025-09-25T16:20:32.785600Z node 6 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:364: Ack mediator stepId#5000005 2025-09-25T16:20:32.785605Z node 6 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:285: Activate send for 104:0 2025-09-25T16:20:32.785646Z node 6 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5225: StateWork, received event# 2146435072, Sender [6:128:2153], Recipient [6:128:2153]: NKikimr::NSchemeShard::TEvPrivate::TEvProgressOperation 2025-09-25T16:20:32.785654Z node 6 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5242: StateWork, processing event TEvPrivate::TEvProgressOperation FAKE_COORDINATOR: Erasing txId 104 2025-09-25T16:20:32.785674Z node 6 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-09-25T16:20:32.785680Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 104, path id: [OwnerId: 72057594046678944, LocalPathId: 3] 2025-09-25T16:20:32.785739Z node 6 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-09-25T16:20:32.785744Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [6:211:2212], at schemeshard: 72057594046678944, txId: 104, path id: 3 2025-09-25T16:20:32.785827Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 104:0, at schemeshard: 72057594046678944 2025-09-25T16:20:32.785835Z node 6 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:497: [72057594046678944] TDone opId# 104:0 ProgressState 2025-09-25T16:20:32.785850Z node 6 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:173: TSideEffects ApplyOnExecute at tablet# 72057594046678944 2025-09-25T16:20:32.785856Z node 6 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:926: Part operation is done id#104:0 progress is 1/1 2025-09-25T16:20:32.785861Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 104 ready parts: 1/1 2025-09-25T16:20:32.785867Z node 6 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:926: Part operation is done id#104:0 progress is 1/1 2025-09-25T16:20:32.785870Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 104 ready parts: 1/1 2025-09-25T16:20:32.785875Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 104, ready parts: 1/1, is published: false 2025-09-25T16:20:32.785880Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 104 ready parts: 1/1 2025-09-25T16:20:32.785886Z node 6 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:993: Operation and all the parts is done, operation id: 104:0 2025-09-25T16:20:32.785891Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5552: RemoveTx for txid 104:0 2025-09-25T16:20:32.785919Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:610: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 4 2025-09-25T16:20:32.785926Z node 6 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:1002: Publication still in progress, tx: 104, publications: 1, subscribers: 0 2025-09-25T16:20:32.785931Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1009: Publication details: tx: 104, [OwnerId: 72057594046678944, LocalPathId: 3], 4 2025-09-25T16:20:32.786087Z node 6 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5225: StateWork, received event# 274137603, Sender [6:211:2212], Recipient [6:128:2153]: NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 2 PathId: [OwnerId: 72057594046678944, LocalPathId: 3] Version: 4 } 2025-09-25T16:20:32.786095Z node 6 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5309: StateWork, processing event NSchemeBoard::NSchemeshardEvents::TEvUpdateAck 2025-09-25T16:20:32.786109Z node 6 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6249: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 4 PathOwnerId: 72057594046678944, cookie: 104 2025-09-25T16:20:32.786124Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 4 PathOwnerId: 72057594046678944, cookie: 104 2025-09-25T16:20:32.786129Z node 6 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 104 2025-09-25T16:20:32.786134Z node 6 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 104, pathId: [OwnerId: 72057594046678944, LocalPathId: 3], version: 4 2025-09-25T16:20:32.786139Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:610: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 3 2025-09-25T16:20:32.786153Z node 6 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__publish_to_scheme_board.cpp:155: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 104, subscribers: 0 2025-09-25T16:20:32.786157Z node 6 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:173: TSideEffects ApplyOnExecute at tablet# 72057594046678944 2025-09-25T16:20:32.786800Z node 6 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:216: TSideEffects ApplyOnComplete at tablet# 72057594046678944 2025-09-25T16:20:32.786896Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 104 2025-09-25T16:20:32.786902Z node 6 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:216: TSideEffects ApplyOnComplete at tablet# 72057594046678944 TestModificationResult got TxId: 104, wait until txId: 104 >> TExportToS3Tests::TopicsWithPermissionsExport >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-DropUser-63 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-DropUser-64 >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-DropUser-64 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-DropUser-65 >> TestShred::SimpleTestForAllSupportedObjects |82.5%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/public/sdk/cpp/tests/integration/sessions_pool/gtest >> YdbSdkSessionsPool::WaitQueue/0 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-ModifyUser-42 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-ModifyUser-43 ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_restore/unittest >> TRestoreWithRebootsTests::ShouldSucceedOnMultiShardTableAndLimitedResources[Zstd] [GOOD] Test command err: ==== RunWithTabletReboots =========== RUN: Trace =========== Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2144] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2144] Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:120:2058] recipient: [1:114:2145] IGNORE Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:120:2058] recipient: [1:114:2145] Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:121:2058] recipient: [1:116:2146] IGNORE Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:121:2058] recipient: [1:116:2146] Leader for TabletID 72057594046678944 is [1:128:2153] sender: [1:131:2058] recipient: [1:113:2144] Leader for TabletID 72057594046447617 is [1:134:2158] sender: [1:136:2058] recipient: [1:114:2145] Leader for TabletID 72057594046316545 is [1:139:2161] sender: [1:141:2058] recipient: [1:116:2146] 2025-09-25T16:19:42.968155Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7911: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-09-25T16:19:42.968173Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7939: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-09-25T16:19:42.968177Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7825: StatsBatching config: StatsBatchTimeout# 0.000000s, StatsMaxBatchSize# 0, StatsMaxExecuteTime# 0.010000s 2025-09-25T16:19:42.968181Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7841: OperationsProcessing config: using default configuration 2025-09-25T16:19:42.968185Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7847: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-09-25T16:19:42.968188Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7847: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-09-25T16:19:42.968195Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7971: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-09-25T16:19:42.968206Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-09-25T16:19:42.968296Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8042: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-09-25T16:19:42.968344Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2025-09-25T16:19:42.983729Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:8074: Got new config: QueryServiceConfig { AllExternalDataSourcesAreAvailable: true } 2025-09-25T16:19:42.983756Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded 2025-09-25T16:19:42.983840Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8042: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# Leader for TabletID 72057594046447617 is [1:134:2158] sender: [1:179:2058] recipient: [1:15:2062] 2025-09-25T16:19:42.987274Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2025-09-25T16:19:42.987366Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2025-09-25T16:19:42.987403Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-09-25T16:19:42.988857Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2025-09-25T16:19:42.988937Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2025-09-25T16:19:42.989051Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1382: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-09-25T16:19:42.989257Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-09-25T16:19:42.990276Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:158: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-09-25T16:19:42.990317Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2025-09-25T16:19:42.990523Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-09-25T16:19:42.990530Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-09-25T16:19:42.990544Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-09-25T16:19:42.990549Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-09-25T16:19:42.990554Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:205: TTxServerlessStorageBilling.Complete 2025-09-25T16:19:42.990581Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7086: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:221:2058] recipient: [1:219:2219] IGNORE Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:221:2058] recipient: [1:219:2219] Leader for TabletID 72057594037968897 is [1:225:2223] sender: [1:226:2058] recipient: [1:219:2219] 2025-09-25T16:19:42.991690Z node 1 :HIVE INFO: tablet_helpers.cpp:1126: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:128:2153] sender: [1:246:2058] recipient: [1:15:2062] 2025-09-25T16:19:43.007460Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-09-25T16:19:43.007520Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-09-25T16:19:43.007566Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:599: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-09-25T16:19:43.007572Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5528: CreateTx for txid 1:0 type: TxAlterSubDomain target path: [OwnerId: 72057594046678944, LocalPathId: 1] source path: 2025-09-25T16:19:43.007614Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-09-25T16:19:43.007639Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2025-09-25T16:19:43.008200Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-09-25T16:19:43.008238Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-09-25T16:19:43.008286Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-09-25T16:19:43.008293Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:315: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-09-25T16:19:43.008297Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:369: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-09-25T16:19:43.008301Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 1:0 2 -> 3 2025-09-25T16:19:43.008731Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-09-25T16:19:43.008746Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-09-25T16:19:43.008752Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 1:0 3 -> 128 2025-09-25T16:19:43.009173Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-09-25T16:19:43.009182Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-09-25T16:19:43.009186Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-09-25T16:19:43.009191Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1723: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-09-25T16:19:43.009827Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1792: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-09-25T16:19:43.010329Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:663: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-09-25T16:19:43.010372Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1824: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 Leader for TabletID 72057594046316545 is [1:139:2161] sender: [1:261:2058] recipient: [1:15:2062] FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-09-25T16:19:43.010610Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:685: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-09-25T16:19:43.010636Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:689: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 139 RawX2: 4294969457 } } Step: 5000001 MediatorID: 0 Tab ... bletId: 72057594046678944, at schemeshard: 72057594046678944, message: Source { RawX1: 350 RawX2: 670014900508 } Origin: 72075186233409546 State: 2 TxId: 1003 Step: 0 Generation: 2 OpResult { Success: true Explain: "" BytesProcessed: 8 RowsProcessed: 1 } 2025-09-25T16:20:32.171585Z node 156 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1837: TOperation FindRelatedPartByTabletId, TxId: 1003, tablet: 72075186233409546, partId: 0 2025-09-25T16:20:32.171612Z node 156 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:628: TTxOperationReply execute, operationId: 1003:0, at schemeshard: 72057594046678944, message: Source { RawX1: 350 RawX2: 670014900508 } Origin: 72075186233409546 State: 2 TxId: 1003 Step: 0 Generation: 2 OpResult { Success: true Explain: "" BytesProcessed: 8 RowsProcessed: 1 } 2025-09-25T16:20:32.171627Z node 156 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_backup_restore_common.h:233: TRestore TProposedWaitParts, opId: 1003:0 HandleReply TEvSchemaChanged at tablet# 72057594046678944 message# Source { RawX1: 350 RawX2: 670014900508 } Origin: 72075186233409546 State: 2 TxId: 1003 Step: 0 Generation: 2 OpResult { Success: true Explain: "" BytesProcessed: 8 RowsProcessed: 1 } 2025-09-25T16:20:32.171642Z node 156 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:673: CollectSchemaChanged accept TEvDataShard::TEvSchemaChanged, operationId: 1003:0, shardIdx: 72057594046678944:1, shard: 72075186233409546, left await: 1, txState.State: ProposedWaitParts, txState.ReadyForNotifications: 1, at schemeshard: 72057594046678944 2025-09-25T16:20:32.171681Z node 156 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_backup_restore_common.h:116: Unable to make a bill: kind# TRestore, opId# 1003:0, reason# domain is not a serverless db, domain# /MyRoot, domainPathId# [OwnerId: 72057594046678944, LocalPathId: 1], IsDomainSchemeShard: 1, ParentDomainId: [OwnerId: 72057594046678944, LocalPathId: 1], ResourcesDomainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-09-25T16:20:32.172113Z node 156 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 1003:0, at schemeshard: 72057594046678944 REQUEST: HEAD /data_01.csv HTTP/1.1 HEADERS: Host: localhost:12893 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: 9294D306-3BF8-4D84-85E1-DA466EBFCC72 amz-sdk-request: attempt=1 content-type: application/xml user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-143-generic x86_64 Clang/20.1.8 x-amz-api-version: 2006-03-01 2025-09-25T16:20:32.174190Z node 156 :DATASHARD_RESTORE DEBUG: import_s3.cpp:527: [Import] [s3:1003] Handle NKikimr::NWrappers::NExternalStorage::TEvHeadObjectResponse { Key: null Result: No response body. } 2025-09-25T16:20:32.174211Z node 156 :DATASHARD_RESTORE DEBUG: import_s3.cpp:506: [Import] [s3:1003] HeadObject: key# /data_01.csv.zst REQUEST: HEAD /data_01.csv.zst HTTP/1.1 HEADERS: Host: localhost:12893 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: 0E883180-01EF-4D1F-8C60-1941F54D503F amz-sdk-request: attempt=1 content-type: application/xml user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-143-generic x86_64 Clang/20.1.8 x-amz-api-version: 2006-03-01 S3_MOCK::HttpServeRead: /data_01.csv.zst / 23 2025-09-25T16:20:32.181299Z node 156 :DATASHARD_RESTORE DEBUG: import_s3.cpp:527: [Import] [s3:1003] Handle NKikimr::NWrappers::NExternalStorage::TEvHeadObjectResponse { Key: null Result: HeadObjectResult { ETag: 15111746c55662cf8bd4f6f5fd6129b9 ContentLength: 23 } } 2025-09-25T16:20:32.181442Z node 156 :DATASHARD_RESTORE DEBUG: import_s3.cpp:606: [Import] [s3:1003] Handle NKikimr::TEvDataShard::TEvS3DownloadInfo { Info: { DataETag: (empty maybe) ProcessedBytes: 0 WrittenBytes: 0 WrittenRows: 0 ChecksumState: DownloadState: } } 2025-09-25T16:20:32.182846Z node 156 :DATASHARD_RESTORE DEBUG: import_s3.cpp:606: [Import] [s3:1003] Handle NKikimr::TEvDataShard::TEvS3DownloadInfo { Info: { DataETag: 15111746c55662cf8bd4f6f5fd6129b9 ProcessedBytes: 0 WrittenBytes: 0 WrittenRows: 0 ChecksumState: DownloadState: } } 2025-09-25T16:20:32.182873Z node 156 :DATASHARD_RESTORE NOTICE: import_s3.cpp:621: [Import] [s3:1003] Process download info at 'DownloadInfo': info# { DataETag: 15111746c55662cf8bd4f6f5fd6129b9 ProcessedBytes: 0 WrittenBytes: 0 WrittenRows: 0 ChecksumState: DownloadState: } 2025-09-25T16:20:32.182896Z node 156 :DATASHARD_RESTORE DEBUG: import_s3.cpp:517: [Import] [s3:1003] GetObject: key# /data_01.csv.zst, range# 0-22 REQUEST: GET /data_01.csv.zst HTTP/1.1 HEADERS: Host: localhost:12893 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: BE570F69-4B3C-4926-885A-36443E5179C1 amz-sdk-request: attempt=1 content-type: application/xml range: bytes=0-22 user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-143-generic x86_64 Clang/20.1.8 x-amz-api-version: 2006-03-01 S3_MOCK::HttpServeRead: /data_01.csv.zst / 23 2025-09-25T16:20:32.188065Z node 156 :DATASHARD_RESTORE DEBUG: import_s3.cpp:656: [Import] [s3:1003] Handle NKikimr::NWrappers::NExternalStorage::TEvGetObjectResponse { Key: null Result: 15111746c55662cf8bd4f6f5fd6129b9 Body: 23b } 2025-09-25T16:20:32.188099Z node 156 :DATASHARD_RESTORE TRACE: import_s3.cpp:673: [Import] [s3:1003] Content size: processed-bytes# 0, content-length# 23, body-size# 23 2025-09-25T16:20:32.188147Z node 156 :DATASHARD_RESTORE INFO: import_s3.cpp:806: [Import] [s3:1003] Upload rows: count# 1, size# 34 2025-09-25T16:20:32.189366Z node 156 :DATASHARD_RESTORE DEBUG: import_s3.cpp:814: [Import] [s3:1003] Handle NKikimr::TEvDataShard::TEvS3UploadRowsResponse { Record: TabletID: 72075186233409547 Status: 0 Info: { DataETag: 15111746c55662cf8bd4f6f5fd6129b9 ProcessedBytes: 23 WrittenBytes: 8 WrittenRows: 1 ChecksumState: DownloadState: } } 2025-09-25T16:20:32.189389Z node 156 :DATASHARD_RESTORE NOTICE: import_s3.cpp:621: [Import] [s3:1003] Process download info at 'UploadResponse': info# { DataETag: 15111746c55662cf8bd4f6f5fd6129b9 ProcessedBytes: 23 WrittenBytes: 8 WrittenRows: 1 ChecksumState: DownloadState: } 2025-09-25T16:20:32.189400Z node 156 :DATASHARD_RESTORE NOTICE: import_s3.cpp:962: [Import] [s3:1003] Finish: success# 1, error# , writtenBytes# 8, writtenRows# 1 2025-09-25T16:20:32.201744Z node 156 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5901: Handle TEvSchemaChanged, tabletId: 72057594046678944, at schemeshard: 72057594046678944, message: Source { RawX1: 355 RawX2: 670014900511 } Origin: 72075186233409547 State: 2 TxId: 1003 Step: 0 Generation: 2 OpResult { Success: true Explain: "" BytesProcessed: 8 RowsProcessed: 1 } 2025-09-25T16:20:32.201772Z node 156 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1837: TOperation FindRelatedPartByTabletId, TxId: 1003, tablet: 72075186233409547, partId: 0 2025-09-25T16:20:32.201799Z node 156 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:628: TTxOperationReply execute, operationId: 1003:0, at schemeshard: 72057594046678944, message: Source { RawX1: 355 RawX2: 670014900511 } Origin: 72075186233409547 State: 2 TxId: 1003 Step: 0 Generation: 2 OpResult { Success: true Explain: "" BytesProcessed: 8 RowsProcessed: 1 } 2025-09-25T16:20:32.201812Z node 156 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_backup_restore_common.h:233: TRestore TProposedWaitParts, opId: 1003:0 HandleReply TEvSchemaChanged at tablet# 72057594046678944 message# Source { RawX1: 355 RawX2: 670014900511 } Origin: 72075186233409547 State: 2 TxId: 1003 Step: 0 Generation: 2 OpResult { Success: true Explain: "" BytesProcessed: 8 RowsProcessed: 1 } 2025-09-25T16:20:32.201828Z node 156 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:673: CollectSchemaChanged accept TEvDataShard::TEvSchemaChanged, operationId: 1003:0, shardIdx: 72057594046678944:2, shard: 72075186233409547, left await: 0, txState.State: ProposedWaitParts, txState.ReadyForNotifications: 1, at schemeshard: 72057594046678944 2025-09-25T16:20:32.201832Z node 156 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:710: all shard schema changes has been received, operationId: 1003:0, at schemeshard: 72057594046678944 2025-09-25T16:20:32.201836Z node 156 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:722: send schema changes ack message, operation: 1003:0, datashard: 72075186233409546, at schemeshard: 72057594046678944 2025-09-25T16:20:32.201840Z node 156 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:722: send schema changes ack message, operation: 1003:0, datashard: 72075186233409547, at schemeshard: 72057594046678944 2025-09-25T16:20:32.201845Z node 156 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 1003:0 129 -> 240 2025-09-25T16:20:32.201886Z node 156 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_backup_restore_common.h:116: Unable to make a bill: kind# TRestore, opId# 1003:0, reason# domain is not a serverless db, domain# /MyRoot, domainPathId# [OwnerId: 72057594046678944, LocalPathId: 1], IsDomainSchemeShard: 1, ParentDomainId: [OwnerId: 72057594046678944, LocalPathId: 1], ResourcesDomainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-09-25T16:20:32.209357Z node 156 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 1003:0, at schemeshard: 72057594046678944 2025-09-25T16:20:32.209583Z node 156 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1003:0, at schemeshard: 72057594046678944 2025-09-25T16:20:32.209597Z node 156 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:497: [72057594046678944] TDone opId# 1003:0 ProgressState 2025-09-25T16:20:32.209627Z node 156 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:926: Part operation is done id#1003:0 progress is 1/1 2025-09-25T16:20:32.209632Z node 156 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 1003 ready parts: 1/1 2025-09-25T16:20:32.209638Z node 156 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:926: Part operation is done id#1003:0 progress is 1/1 2025-09-25T16:20:32.209642Z node 156 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 1003 ready parts: 1/1 2025-09-25T16:20:32.209648Z node 156 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 1003, ready parts: 1/1, is published: true 2025-09-25T16:20:32.209760Z node 156 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1702: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [156:466:2427] message: TxId: 1003 2025-09-25T16:20:32.209801Z node 156 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 1003 ready parts: 1/1 2025-09-25T16:20:32.209811Z node 156 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:993: Operation and all the parts is done, operation id: 1003:0 2025-09-25T16:20:32.209818Z node 156 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5552: RemoveTx for txid 1003:0 2025-09-25T16:20:32.209872Z node 156 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:610: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 4 2025-09-25T16:20:32.217506Z node 156 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 1003: got EvNotifyTxCompletionResult 2025-09-25T16:20:32.217534Z node 156 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 1003: satisfy waiter [156:510:2470] TestWaitNotification: OK eventTxId 1003 >> TExportToS3Tests::TopicsWithPermissionsExport [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-CreateUser-16 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-CreateUser-17 >> TRestoreWithRebootsTests::ShouldSucceedOnLargeData[Raw] [GOOD] >> TRestoreWithRebootsTests::ShouldSucceedOnLargeData[Zstd] >> TestShred::SimpleTestForTopic >> TAsyncIndexTests::SplitBothWithReboots[PipeResets] [GOOD] >> THiveTest::TestCreateSubHiveCreateManyTabletsWithReboots [GOOD] >> THiveTest::TestCheckSubHiveMigrationWithReboots >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-CreateUser-18 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-CreateUser-19 |82.5%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/tx/schemeshard/ut_view/ydb-core-tx-schemeshard-ut_view |82.5%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_view/ydb-core-tx-schemeshard-ut_view >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-DropUser-63 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-DropUser-64 |82.5%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_view/ydb-core-tx-schemeshard-ut_view >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-DropUser-65 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-DropUser-54 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-DropUser-66 >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-DropUser-55 >> TestShred::ShredWithSplit >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-CreateUser-24 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-DropUser-49 >> TestShred::Run3CyclesForTables >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-DropUser-64 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-DropUser-65 |82.5%| [TA] $(B)/ydb/core/blobstorage/vdisk/repl/ut/test-results/unittest/{meta.json ... results_accumulator.log} |82.5%| [TA] $(B)/ydb/core/tx/schemeshard/ut_transfer/test-results/unittest/{meta.json ... results_accumulator.log} >> TestShred::ShredManualLaunch >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-DropUser-65 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-DropUser-66 >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-ModifyUser-43 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-ModifyUser-44 >> TestShred::SimpleTestForTables [GOOD] >> TestShred::ShredWithCopyTable >> SchemeReqAdminAccessInTenant::ClusterAdminCanAdministerTenant-DomainLoginOnly [GOOD] >> SchemeReqAdminAccessInTenant::ClusterAdminCanAdministerTenant-DomainLoginOnly-StrictAclCheck ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_export/unittest >> TExportToS3Tests::TopicsWithPermissionsExport [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2144] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2144] Leader for TabletID 72057594046678944 is [1:130:2155] sender: [1:131:2058] recipient: [1:113:2144] 2025-09-25T16:20:32.229682Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7911: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-09-25T16:20:32.229715Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7939: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-09-25T16:20:32.229721Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7825: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-09-25T16:20:32.229727Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7841: OperationsProcessing config: using default configuration 2025-09-25T16:20:32.229734Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7847: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-09-25T16:20:32.229740Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7847: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-09-25T16:20:32.229751Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7971: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-09-25T16:20:32.229766Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-09-25T16:20:32.229892Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8042: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-09-25T16:20:32.229962Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2025-09-25T16:20:32.243298Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7729: Cannot subscribe to console configs 2025-09-25T16:20:32.243330Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded 2025-09-25T16:20:32.248637Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2025-09-25T16:20:32.248766Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2025-09-25T16:20:32.248817Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-09-25T16:20:32.254392Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2025-09-25T16:20:32.254506Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2025-09-25T16:20:32.254655Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1382: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-09-25T16:20:32.254784Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-09-25T16:20:32.255769Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:158: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-09-25T16:20:32.255837Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2025-09-25T16:20:32.256310Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-09-25T16:20:32.256330Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-09-25T16:20:32.256360Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-09-25T16:20:32.256371Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-09-25T16:20:32.256378Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:205: TTxServerlessStorageBilling.Complete 2025-09-25T16:20:32.256430Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7086: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-09-25T16:20:32.261573Z node 1 :HIVE INFO: tablet_helpers.cpp:1126: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:130:2155] sender: [1:245:2058] recipient: [1:15:2062] 2025-09-25T16:20:32.287632Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-09-25T16:20:32.287758Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-09-25T16:20:32.287854Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:599: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-09-25T16:20:32.287864Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5528: CreateTx for txid 1:0 type: TxAlterSubDomain target path: [OwnerId: 72057594046678944, LocalPathId: 1] source path: 2025-09-25T16:20:32.287960Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-09-25T16:20:32.287977Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2025-09-25T16:20:32.289410Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-09-25T16:20:32.289486Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-09-25T16:20:32.289552Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-09-25T16:20:32.289566Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:315: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-09-25T16:20:32.289573Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:369: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-09-25T16:20:32.289579Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 1:0 2 -> 3 2025-09-25T16:20:32.290132Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-09-25T16:20:32.290143Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-09-25T16:20:32.290148Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 1:0 3 -> 128 2025-09-25T16:20:32.290442Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-09-25T16:20:32.290450Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-09-25T16:20:32.290455Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-09-25T16:20:32.290461Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1723: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-09-25T16:20:32.291023Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1792: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-09-25T16:20:32.291444Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:663: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-09-25T16:20:32.291503Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1824: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-09-25T16:20:32.291717Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:685: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-09-25T16:20:32.291743Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:689: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 137 RawX2: 4294969455 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-09-25T16:20:32.291750Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:303: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-09-25T16:20:32.291852Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 1:0 128 -> 240 2025-09-25T16:20:32.291859Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:362: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-09-25T16:20:32.291906Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:599: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-09-25T16:20:32.291916Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:412: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-09-25T16:20:32.292304Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-09-25T16:20:32.292312Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme ... 6710758 2025-09-25T16:20:34.286104Z node 4 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 281474976710758, pathId: [OwnerId: 72057594046678944, LocalPathId: 7], version: 3 2025-09-25T16:20:34.286111Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:610: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 7] was 2 2025-09-25T16:20:34.286128Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 281474976710758, ready parts: 0/1, is published: true 2025-09-25T16:20:34.287057Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__notify.cpp:30: NotifyTxCompletion operation in-flight, txId: 281474976710758, at schemeshard: 72057594046678944 2025-09-25T16:20:34.287073Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 281474976710758, ready parts: 0/1, is published: true 2025-09-25T16:20:34.287084Z node 4 :FLAT_TX_SCHEMESHARD INFO: schemeshard__notify.cpp:131: NotifyTxCompletion transaction is registered, txId: 281474976710758, at schemeshard: 72057594046678944 2025-09-25T16:20:34.287271Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:663: Send tablet strongly msg operationId: 281474976710758:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:281474976710758 msg type: 269090816 2025-09-25T16:20:34.287313Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1824: TOperation RegisterRelationByTabletId, TxId: 281474976710758, partId: 4294967295, tablet: 72057594046316545 2025-09-25T16:20:34.287393Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 281474976710758 FAKE_COORDINATOR: Add transaction: 281474976710758 at step: 5000008 FAKE_COORDINATOR: advance: minStep5000008 State->FrontStep: 5000007 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710758 at step: 5000008 2025-09-25T16:20:34.287667Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:685: TTxOperationPlanStep Execute, stepId: 5000008, transactions count in step: 1, at schemeshard: 72057594046678944 2025-09-25T16:20:34.287703Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:689: TTxOperationPlanStep Execute, message: Transactions { TxId: 281474976710758 Coordinator: 72057594046316545 AckTo { RawX1: 140 RawX2: 17179871345 } } Step: 5000008 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-09-25T16:20:34.287713Z node 4 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_rmdir.cpp:128: TRmDir HandleReply TEvOperationPlan, opId: 281474976710758:0, step: 5000008, at schemeshard: 72057594046678944 2025-09-25T16:20:34.287753Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_rmdir.cpp:179: RmDir is done, opId: 281474976710758:0, at schemeshard: 72057594046678944 2025-09-25T16:20:34.287766Z node 4 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:926: Part operation is done id#281474976710758:0 progress is 1/1 2025-09-25T16:20:34.287772Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 281474976710758 ready parts: 1/1 2025-09-25T16:20:34.287778Z node 4 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:926: Part operation is done id#281474976710758:0 progress is 1/1 2025-09-25T16:20:34.287782Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 281474976710758 ready parts: 1/1 2025-09-25T16:20:34.287794Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:599: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 6 2025-09-25T16:20:34.287807Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:599: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 7] was 1 2025-09-25T16:20:34.287815Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 281474976710758, ready parts: 1/1, is published: false 2025-09-25T16:20:34.287824Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 281474976710758 ready parts: 1/1 2025-09-25T16:20:34.287830Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:993: Operation and all the parts is done, operation id: 281474976710758:0 2025-09-25T16:20:34.287835Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5552: RemoveTx for txid 281474976710758:0 2025-09-25T16:20:34.287850Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:610: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 7] was 2 2025-09-25T16:20:34.287861Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:1002: Publication still in progress, tx: 281474976710758, publications: 2, subscribers: 1 2025-09-25T16:20:34.287867Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1009: Publication details: tx: 281474976710758, [OwnerId: 72057594046678944, LocalPathId: 1], 17 2025-09-25T16:20:34.287872Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1009: Publication details: tx: 281474976710758, [OwnerId: 72057594046678944, LocalPathId: 7], 18446744073709551615 2025-09-25T16:20:34.288058Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 281474976710758 2025-09-25T16:20:34.288505Z node 4 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-09-25T16:20:34.288518Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 281474976710758, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-09-25T16:20:34.288577Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 281474976710758, path id: [OwnerId: 72057594046678944, LocalPathId: 7] 2025-09-25T16:20:34.288607Z node 4 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-09-25T16:20:34.288613Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [4:212:2213], at schemeshard: 72057594046678944, txId: 281474976710758, path id: 1 2025-09-25T16:20:34.288619Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [4:212:2213], at schemeshard: 72057594046678944, txId: 281474976710758, path id: 7 FAKE_COORDINATOR: Erasing txId 281474976710758 2025-09-25T16:20:34.288857Z node 4 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6249: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 17 PathOwnerId: 72057594046678944, cookie: 281474976710758 2025-09-25T16:20:34.288880Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 17 PathOwnerId: 72057594046678944, cookie: 281474976710758 2025-09-25T16:20:34.288886Z node 4 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 281474976710758 2025-09-25T16:20:34.288893Z node 4 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 281474976710758, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 17 2025-09-25T16:20:34.288902Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:610: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 7 2025-09-25T16:20:34.289053Z node 4 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6249: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 7 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 281474976710758 2025-09-25T16:20:34.289065Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 7 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 281474976710758 2025-09-25T16:20:34.289073Z node 4 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 281474976710758 2025-09-25T16:20:34.289078Z node 4 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 281474976710758, pathId: [OwnerId: 72057594046678944, LocalPathId: 7], version: 18446744073709551615 2025-09-25T16:20:34.289083Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:610: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 7] was 1 2025-09-25T16:20:34.289097Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__publish_to_scheme_board.cpp:155: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 281474976710758, subscribers: 1 2025-09-25T16:20:34.289104Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:212: TTxAckPublishToSchemeBoard Notify send TEvNotifyTxCompletionResult, at schemeshard: 72057594046678944, to actorId: [4:129:2154] 2025-09-25T16:20:34.289305Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:123: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2025-09-25T16:20:34.289316Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:137: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 7], at schemeshard: 72057594046678944 2025-09-25T16:20:34.289336Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:610: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 6 2025-09-25T16:20:34.289930Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 281474976710758 2025-09-25T16:20:34.290235Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 281474976710758 2025-09-25T16:20:34.290268Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__clean_pathes.cpp:160: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 2025-09-25T16:20:34.290283Z node 4 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7171: Handle: TEvNotifyTxCompletionResult: txId# 281474976710758 2025-09-25T16:20:34.290299Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:7173: Message: TxId: 281474976710758 2025-09-25T16:20:34.290809Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 106: got EvNotifyTxCompletionResult 2025-09-25T16:20:34.290823Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 106: satisfy waiter [4:1284:3112] TestWaitNotification: OK eventTxId 106 >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-CreateUser-17 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-CreateUser-18 >> YdbSdkSessions::MultiThreadMultipleRequestsOnSharedSessionsTableClient [GOOD] >> YdbSdkSessions::MultiThreadMultipleRequestsOnSharedSessionsQueryClient [SKIPPED] >> ColumnBuildTest::BaseCase [GOOD] >> TestShred::SchemeShardCounterDoesNotConsistWithBscCounter ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_index/unittest >> TAsyncIndexTests::SplitBothWithReboots[PipeResets] [GOOD] Test command err: =========== RUN: Trace =========== Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2144] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2144] Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:120:2058] recipient: [1:114:2145] IGNORE Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:120:2058] recipient: [1:114:2145] Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:121:2058] recipient: [1:116:2146] IGNORE Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:121:2058] recipient: [1:116:2146] Leader for TabletID 72057594046678944 is [1:128:2153] sender: [1:131:2058] recipient: [1:113:2144] Leader for TabletID 72057594046447617 is [1:134:2158] sender: [1:136:2058] recipient: [1:114:2145] Leader for TabletID 72057594046316545 is [1:139:2161] sender: [1:141:2058] recipient: [1:116:2146] 2025-09-25T16:19:37.492723Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7911: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-09-25T16:19:37.492746Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7939: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-09-25T16:19:37.492751Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7825: StatsBatching config: StatsBatchTimeout# 0.000000s, StatsMaxBatchSize# 0, StatsMaxExecuteTime# 0.010000s 2025-09-25T16:19:37.492756Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7841: OperationsProcessing config: using default configuration 2025-09-25T16:19:37.492763Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7847: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-09-25T16:19:37.492767Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7847: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-09-25T16:19:37.492776Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7971: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-09-25T16:19:37.492789Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-09-25T16:19:37.492967Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8042: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-09-25T16:19:37.493036Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2025-09-25T16:19:37.517142Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:8074: Got new config: QueryServiceConfig { AllExternalDataSourcesAreAvailable: true } 2025-09-25T16:19:37.517172Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded 2025-09-25T16:19:37.517274Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8042: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# Leader for TabletID 72057594046447617 is [1:134:2158] sender: [1:179:2058] recipient: [1:15:2062] 2025-09-25T16:19:37.521524Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2025-09-25T16:19:37.521622Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2025-09-25T16:19:37.521659Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-09-25T16:19:37.523073Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2025-09-25T16:19:37.523139Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2025-09-25T16:19:37.523244Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1382: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-09-25T16:19:37.523473Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-09-25T16:19:37.524434Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:158: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-09-25T16:19:37.524482Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2025-09-25T16:19:37.524729Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-09-25T16:19:37.524738Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-09-25T16:19:37.524756Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-09-25T16:19:37.524764Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-09-25T16:19:37.524770Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:205: TTxServerlessStorageBilling.Complete 2025-09-25T16:19:37.524805Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7086: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:221:2058] recipient: [1:219:2219] IGNORE Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:221:2058] recipient: [1:219:2219] Leader for TabletID 72057594037968897 is [1:225:2223] sender: [1:226:2058] recipient: [1:219:2219] 2025-09-25T16:19:37.526256Z node 1 :HIVE INFO: tablet_helpers.cpp:1126: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:128:2153] sender: [1:246:2058] recipient: [1:15:2062] 2025-09-25T16:19:37.550490Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-09-25T16:19:37.550569Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-09-25T16:19:37.550639Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:599: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-09-25T16:19:37.550648Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5528: CreateTx for txid 1:0 type: TxAlterSubDomain target path: [OwnerId: 72057594046678944, LocalPathId: 1] source path: 2025-09-25T16:19:37.550700Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-09-25T16:19:37.550716Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2025-09-25T16:19:37.551504Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-09-25T16:19:37.551564Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-09-25T16:19:37.551619Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-09-25T16:19:37.551630Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:315: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-09-25T16:19:37.551636Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:369: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-09-25T16:19:37.551641Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 1:0 2 -> 3 2025-09-25T16:19:37.552170Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-09-25T16:19:37.552184Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-09-25T16:19:37.552189Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 1:0 3 -> 128 2025-09-25T16:19:37.552648Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-09-25T16:19:37.552660Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-09-25T16:19:37.552666Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-09-25T16:19:37.552673Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1723: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-09-25T16:19:37.553350Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1792: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-09-25T16:19:37.553860Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:663: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-09-25T16:19:37.553924Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1824: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 Leader for TabletID 72057594046316545 is [1:139:2161] sender: [1:261:2058] recipient: [1:15:2062] FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-09-25T16:19:37.554141Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:685: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-09-25T16:19:37.554167Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:689: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 139 RawX2: 4294969457 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, ... ercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } PartitioningPolicy { SizeToSplit: 2147483648 MinPartitionsCount: 1 } } } } TableSchemaVersion: 1 IsBackup: false IsRestore: false } TablePartitions { EndOfRangeKeyPrefix: "\001\000\004\000\000\0002\000\000\000" IsPoint: false IsInclusive: false DatashardId: 72075186233409548 } TablePartitions { EndOfRangeKeyPrefix: "" IsPoint: false IsInclusive: false DatashardId: 72075186233409549 } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 2 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 4 PathsLimit: 10000 ShardsInside: 5 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 3 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-09-25T16:20:34.145157Z node 46 :CHANGE_EXCHANGE DEBUG: change_sender_table_base.cpp:78: [TableChangeSenderShard][72075186233409548:2][72075186233409550][46:1008:2807] Handshake NKikimrChangeExchange.TEvStatus Status: STATUS_OK LastRecordOrder: 0 2025-09-25T16:20:34.145200Z node 46 :CHANGE_EXCHANGE DEBUG: change_sender_table_base.cpp:78: [TableChangeSenderShard][72075186233409548:2][72075186233409551][46:1009:2807] Handshake NKikimrChangeExchange.TEvStatus Status: STATUS_OK LastRecordOrder: 0 2025-09-25T16:20:34.145218Z node 46 :CHANGE_EXCHANGE DEBUG: change_sender_async_index.cpp:239: [AsyncIndexChangeSenderMain][72075186233409548:2][46:959:2807] Handle NKikimr::NChangeExchange::TEvChangeExchangePrivate::TEvReady { PartitionId: 72075186233409550 } 2025-09-25T16:20:34.145243Z node 46 :CHANGE_EXCHANGE DEBUG: change_sender_async_index.cpp:239: [AsyncIndexChangeSenderMain][72075186233409548:2][46:959:2807] Handle NKikimr::NChangeExchange::TEvChangeExchangePrivate::TEvReady { PartitionId: 72075186233409551 } 2025-09-25T16:20:34.145272Z node 46 :CHANGE_EXCHANGE DEBUG: change_sender_table_base.cpp:123: [TableChangeSenderShard][72075186233409548:2][72075186233409550][46:1008:2807] Handle NKikimr::NChangeExchange::TEvChangeExchange::TEvRecords { Records [{ Order: 1 Group: 1758817234124138 Step: 5000003 TxId: 18446744073709551615 PathId: [OwnerId: 72057594046678944, LocalPathId: 4] Kind: AsyncIndex Source: Unspecified Body: 28b TableId: [OwnerId: 72057594046678944, LocalPathId: 3] SchemaVersion: 1 LockId: 0 LockOffset: 0 },{ Order: 2 Group: 1758817234124138 Step: 5000003 TxId: 18446744073709551615 PathId: [OwnerId: 72057594046678944, LocalPathId: 4] Kind: AsyncIndex Source: Unspecified Body: 28b TableId: [OwnerId: 72057594046678944, LocalPathId: 3] SchemaVersion: 1 LockId: 0 LockOffset: 0 }] } 2025-09-25T16:20:34.145319Z node 46 :CHANGE_EXCHANGE DEBUG: change_sender_table_base.cpp:123: [TableChangeSenderShard][72075186233409548:2][72075186233409551][46:1009:2807] Handle NKikimr::NChangeExchange::TEvChangeExchange::TEvRecords { Records [{ Order: 3 Group: 1758817234124138 Step: 5000003 TxId: 18446744073709551615 PathId: [OwnerId: 72057594046678944, LocalPathId: 4] Kind: AsyncIndex Source: Unspecified Body: 28b TableId: [OwnerId: 72057594046678944, LocalPathId: 3] SchemaVersion: 1 LockId: 0 LockOffset: 0 }] } 2025-09-25T16:20:34.149835Z node 46 :CHANGE_EXCHANGE DEBUG: change_sender_table_base.cpp:200: [TableChangeSenderShard][72075186233409548:2][72075186233409550][46:1008:2807] Handle NKikimrChangeExchange.TEvStatus Status: STATUS_OK RecordStatuses { Order: 1 Status: STATUS_OK Reason: REASON_NONE } RecordStatuses { Order: 2 Status: STATUS_OK Reason: REASON_NONE } LastRecordOrder: 2 2025-09-25T16:20:34.149877Z node 46 :CHANGE_EXCHANGE DEBUG: change_sender_table_base.cpp:200: [TableChangeSenderShard][72075186233409548:2][72075186233409551][46:1009:2807] Handle NKikimrChangeExchange.TEvStatus Status: STATUS_OK RecordStatuses { Order: 3 Status: STATUS_OK Reason: REASON_NONE } LastRecordOrder: 3 2025-09-25T16:20:34.149901Z node 46 :CHANGE_EXCHANGE DEBUG: change_sender_async_index.cpp:239: [AsyncIndexChangeSenderMain][72075186233409548:2][46:959:2807] Handle NKikimr::NChangeExchange::TEvChangeExchangePrivate::TEvReady { PartitionId: 72075186233409550 } 2025-09-25T16:20:34.149924Z node 46 :CHANGE_EXCHANGE DEBUG: change_sender_async_index.cpp:239: [AsyncIndexChangeSenderMain][72075186233409548:2][46:959:2807] Handle NKikimr::NChangeExchange::TEvChangeExchangePrivate::TEvReady { PartitionId: 72075186233409551 } 2025-09-25T16:20:34.296647Z node 46 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/Table/UserDefinedIndex/indexImplTable" Options { ReturnPartitioningInfo: true ReturnPartitionConfig: true BackupInfo: false ReturnBoundaries: false ShowPrivateTable: true }, at schemeshard: 72057594046678944 2025-09-25T16:20:34.296777Z node 46 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/Table/UserDefinedIndex/indexImplTable" took 146us result status StatusSuccess 2025-09-25T16:20:34.297060Z node 46 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/Table/UserDefinedIndex/indexImplTable" PathDescription { Self { Name: "indexImplTable" PathId: 5 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 1002 CreateStep: 5000003 ParentPathId: 4 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 4 PathSubType: EPathSubTypeAsyncIndexImplTable Version { GeneralVersion: 4 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 2 } ChildrenExist: false } Table { Name: "indexImplTable" Columns { Name: "indexed" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "key" Type: "Uint32" TypeId: 2 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "indexed" KeyColumnNames: "key" KeyColumnIds: 1 KeyColumnIds: 2 PartitionConfig { CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot: 300 InMemForceStepsToSnapshot: 500 InMemForceSizeToSnapshot: 16777216 InMemCompactionBrokerQueue: 0 ReadAheadHiThreshold: 67108864 ReadAheadLoThreshold: 16777216 MinDataPageSize: 7168 SnapBrokerQueue: 0 Generation { GenerationId: 0 SizeToCompact: 0 CountToCompact: 8 ForceCountToCompact: 8 ForceSizeToCompact: 134217728 CompactionBrokerQueue: 4294967295 KeepInCache: true BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen1" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 0 UpliftPartSize: 0 } Generation { GenerationId: 1 SizeToCompact: 41943040 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 536870912 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen2" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 8388608 UpliftPartSize: 8388608 } Generation { GenerationId: 2 SizeToCompact: 419430400 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 17179869184 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen3" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } PartitioningPolicy { SizeToSplit: 2147483648 MinPartitionsCount: 1 } } TableSchemaVersion: 1 IsBackup: false IsRestore: false } TablePartitions { EndOfRangeKeyPrefix: "\002\000\004\000\000\0002\000\000\000\000\000\000\200" IsPoint: false IsInclusive: false DatashardId: 72075186233409550 } TablePartitions { EndOfRangeKeyPrefix: "" IsPoint: false IsInclusive: false DatashardId: 72075186233409551 } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 2 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 4 PathsLimit: 10000 ShardsInside: 5 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 5 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-DropUser-49 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-DropUser-50 >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-DropUser-55 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-DropUser-56 ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_shred/unittest >> TestShred::SimpleTestForTables [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2144] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2144] Leader for TabletID 72057594046678944 is [1:130:2155] sender: [1:131:2058] recipient: [1:113:2144] 2025-09-25T16:20:32.160766Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7911: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-09-25T16:20:32.160790Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7939: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-09-25T16:20:32.160795Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7825: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-09-25T16:20:32.160799Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7841: OperationsProcessing config: using default configuration 2025-09-25T16:20:32.160804Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7847: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-09-25T16:20:32.160807Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7847: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-09-25T16:20:32.160815Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7971: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-09-25T16:20:32.160853Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-09-25T16:20:32.160987Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8042: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-09-25T16:20:32.161073Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2025-09-25T16:20:32.174485Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7729: Cannot subscribe to console configs 2025-09-25T16:20:32.174509Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded 2025-09-25T16:20:32.180135Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2025-09-25T16:20:32.180197Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2025-09-25T16:20:32.180235Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-09-25T16:20:32.182204Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2025-09-25T16:20:32.182287Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2025-09-25T16:20:32.182430Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1382: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-09-25T16:20:32.182510Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-09-25T16:20:32.183100Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:158: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-09-25T16:20:32.183154Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2025-09-25T16:20:32.183540Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-09-25T16:20:32.183576Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-09-25T16:20:32.183610Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-09-25T16:20:32.183620Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-09-25T16:20:32.183628Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:205: TTxServerlessStorageBilling.Complete 2025-09-25T16:20:32.183674Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7086: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-09-25T16:20:32.185391Z node 1 :HIVE INFO: tablet_helpers.cpp:1126: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:130:2155] sender: [1:245:2058] recipient: [1:15:2062] 2025-09-25T16:20:32.208789Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-09-25T16:20:32.208958Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-09-25T16:20:32.209044Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:599: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-09-25T16:20:32.209053Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5528: CreateTx for txid 1:0 type: TxAlterSubDomain target path: [OwnerId: 72057594046678944, LocalPathId: 1] source path: 2025-09-25T16:20:32.209123Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-09-25T16:20:32.209139Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2025-09-25T16:20:32.211464Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-09-25T16:20:32.211525Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-09-25T16:20:32.211623Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-09-25T16:20:32.211637Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:315: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-09-25T16:20:32.211643Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:369: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-09-25T16:20:32.211650Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 1:0 2 -> 3 2025-09-25T16:20:32.212272Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-09-25T16:20:32.212284Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-09-25T16:20:32.212291Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 1:0 3 -> 128 2025-09-25T16:20:32.212624Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-09-25T16:20:32.212633Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-09-25T16:20:32.212641Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-09-25T16:20:32.212650Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1723: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-09-25T16:20:32.213473Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1792: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-09-25T16:20:32.213933Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:663: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-09-25T16:20:32.213978Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1824: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-09-25T16:20:32.214203Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:685: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-09-25T16:20:32.214231Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:689: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 137 RawX2: 4294969455 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-09-25T16:20:32.214238Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:303: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-09-25T16:20:32.214309Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 1:0 128 -> 240 2025-09-25T16:20:32.214315Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:362: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-09-25T16:20:32.214352Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:599: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-09-25T16:20:32.214361Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:412: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-09-25T16:20:32.214835Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-09-25T16:20:32.214848Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme ... RD DEBUG: schemeshard__root_shred_manager.cpp:615: TTxCompleteShredTenant Complete at schemeshard: 72057594046678944, NeedSendRequestToBSC# true 2025-09-25T16:20:34.283226Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:354: [RootShredManager] SendRequestToBSC: Generation# 1 2025-09-25T16:20:34.283384Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5225: StateWork, received event# 269877760, Sender [1:1955:3632], Recipient [1:297:2281]: NKikimr::TEvTabletPipe::TEvClientConnected { TabletId: 72057594037932033 Status: OK ServerId: [1:1956:3633] Leader: 1 Dead: 0 Generation: 2 VersionInfo: } 2025-09-25T16:20:34.283395Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5320: StateWork, processing event TEvTabletPipe::TEvClientConnected 2025-09-25T16:20:34.283402Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:6092: Handle TEvClientConnected, tabletId: 72057594037932033, status: OK, at schemeshard: 72057594046678944 2025-09-25T16:20:34.283448Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5225: StateWork, received event# 268637738, Sender [1:302:2284], Recipient [1:297:2281]: NKikimrBlobStorage.TEvControllerShredResponse CurrentGeneration: 1 Completed: false Progress10k: 0 2025-09-25T16:20:34.283454Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5446: StateWork, processing event TEvBlobStorage::TEvControllerShredResponse 2025-09-25T16:20:34.283459Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:8237: Handle TEvControllerShredResponse, at schemeshard: 72057594046678944 2025-09-25T16:20:34.283472Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__root_shred_manager.cpp:639: TTxCompleteShredBSC Execute at schemeshard: 72057594046678944 2025-09-25T16:20:34.283481Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:657: TTxCompleteShredBSC: Progress data shred in BSC 0% 2025-09-25T16:20:34.283495Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__root_shred_manager.cpp:665: TTxCompleteShredBSC Complete at schemeshard: 72057594046678944, NeedScheduleRequestToBSC# true 2025-09-25T16:20:34.283507Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:348: [RootShredManager] ScheduleRequestToBSC: Interval# 1.000000s 2025-09-25T16:20:34.705295Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5225: StateWork, received event# 271125000, Sender [0:0:0], Recipient [1:829:2716]: NKikimr::NSchemeShard::TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-09-25T16:20:34.705328Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5233: StateWork, processing event TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-09-25T16:20:34.705348Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5225: StateWork, received event# 271125000, Sender [0:0:0], Recipient [1:297:2281]: NKikimr::NSchemeShard::TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-09-25T16:20:34.705352Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5233: StateWork, processing event TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-09-25T16:20:34.705361Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5225: StateWork, received event# 271125000, Sender [0:0:0], Recipient [1:463:2414]: NKikimr::NSchemeShard::TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-09-25T16:20:34.705365Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5233: StateWork, processing event TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-09-25T16:20:34.705376Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5225: StateWork, received event# 271124999, Sender [1:463:2414], Recipient [1:463:2414]: NKikimr::NSchemeShard::TEvSchemeShard::TEvMeasureSelfResponseTime 2025-09-25T16:20:34.705380Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5232: StateWork, processing event TEvSchemeShard::TEvMeasureSelfResponseTime 2025-09-25T16:20:34.705397Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5225: StateWork, received event# 271124999, Sender [1:829:2716], Recipient [1:829:2716]: NKikimr::NSchemeShard::TEvSchemeShard::TEvMeasureSelfResponseTime 2025-09-25T16:20:34.705401Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5232: StateWork, processing event TEvSchemeShard::TEvMeasureSelfResponseTime 2025-09-25T16:20:34.705412Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5225: StateWork, received event# 271124999, Sender [1:297:2281], Recipient [1:297:2281]: NKikimr::NSchemeShard::TEvSchemeShard::TEvMeasureSelfResponseTime 2025-09-25T16:20:34.705416Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5232: StateWork, processing event TEvSchemeShard::TEvMeasureSelfResponseTime 2025-09-25T16:20:34.759236Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5225: StateWork, received event# 271125517, Sender [0:0:0], Recipient [1:297:2281]: NKikimr::NSchemeShard::TEvSchemeShard::TEvWakeupToRunShredBSC 2025-09-25T16:20:34.759267Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5447: StateWork, processing event TEvSchemeShard::TEvWakeupToRunShredBSC 2025-09-25T16:20:34.759276Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:354: [RootShredManager] SendRequestToBSC: Generation# 1 2025-09-25T16:20:34.759338Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5225: StateWork, received event# 268637738, Sender [1:302:2284], Recipient [1:297:2281]: NKikimrBlobStorage.TEvControllerShredResponse CurrentGeneration: 1 Completed: false Progress10k: 5000 2025-09-25T16:20:34.759344Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5446: StateWork, processing event TEvBlobStorage::TEvControllerShredResponse 2025-09-25T16:20:34.759347Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:8237: Handle TEvControllerShredResponse, at schemeshard: 72057594046678944 2025-09-25T16:20:34.759362Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__root_shred_manager.cpp:639: TTxCompleteShredBSC Execute at schemeshard: 72057594046678944 2025-09-25T16:20:34.759373Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:657: TTxCompleteShredBSC: Progress data shred in BSC 50% 2025-09-25T16:20:34.759385Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__root_shred_manager.cpp:665: TTxCompleteShredBSC Complete at schemeshard: 72057594046678944, NeedScheduleRequestToBSC# true 2025-09-25T16:20:34.759396Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:348: [RootShredManager] ScheduleRequestToBSC: Interval# 1.000000s 2025-09-25T16:20:35.143032Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5225: StateWork, received event# 271125000, Sender [0:0:0], Recipient [1:463:2414]: NKikimr::NSchemeShard::TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-09-25T16:20:35.143058Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5233: StateWork, processing event TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-09-25T16:20:35.143070Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5225: StateWork, received event# 271125000, Sender [0:0:0], Recipient [1:829:2716]: NKikimr::NSchemeShard::TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-09-25T16:20:35.143073Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5233: StateWork, processing event TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-09-25T16:20:35.143080Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5225: StateWork, received event# 271125000, Sender [0:0:0], Recipient [1:297:2281]: NKikimr::NSchemeShard::TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-09-25T16:20:35.143083Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5233: StateWork, processing event TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-09-25T16:20:35.143090Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5225: StateWork, received event# 271124999, Sender [1:297:2281], Recipient [1:297:2281]: NKikimr::NSchemeShard::TEvSchemeShard::TEvMeasureSelfResponseTime 2025-09-25T16:20:35.143094Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5232: StateWork, processing event TEvSchemeShard::TEvMeasureSelfResponseTime 2025-09-25T16:20:35.143106Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5225: StateWork, received event# 271124999, Sender [1:463:2414], Recipient [1:463:2414]: NKikimr::NSchemeShard::TEvSchemeShard::TEvMeasureSelfResponseTime 2025-09-25T16:20:35.143109Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5232: StateWork, processing event TEvSchemeShard::TEvMeasureSelfResponseTime 2025-09-25T16:20:35.143116Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5225: StateWork, received event# 271124999, Sender [1:829:2716], Recipient [1:829:2716]: NKikimr::NSchemeShard::TEvSchemeShard::TEvMeasureSelfResponseTime 2025-09-25T16:20:35.143119Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5232: StateWork, processing event TEvSchemeShard::TEvMeasureSelfResponseTime 2025-09-25T16:20:35.193905Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5225: StateWork, received event# 271125517, Sender [0:0:0], Recipient [1:297:2281]: NKikimr::NSchemeShard::TEvSchemeShard::TEvWakeupToRunShredBSC 2025-09-25T16:20:35.193936Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5447: StateWork, processing event TEvSchemeShard::TEvWakeupToRunShredBSC 2025-09-25T16:20:35.193942Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:354: [RootShredManager] SendRequestToBSC: Generation# 1 2025-09-25T16:20:35.194008Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5225: StateWork, received event# 268637738, Sender [1:302:2284], Recipient [1:297:2281]: NKikimrBlobStorage.TEvControllerShredResponse CurrentGeneration: 1 Completed: true Progress10k: 10000 2025-09-25T16:20:35.194014Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5446: StateWork, processing event TEvBlobStorage::TEvControllerShredResponse 2025-09-25T16:20:35.194020Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:8237: Handle TEvControllerShredResponse, at schemeshard: 72057594046678944 2025-09-25T16:20:35.194041Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__root_shred_manager.cpp:639: TTxCompleteShredBSC Execute at schemeshard: 72057594046678944 2025-09-25T16:20:35.194047Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:653: TTxCompleteShredBSC: Data shred in BSC is completed 2025-09-25T16:20:35.194061Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:170: [RootShredManager] ScheduleShredWakeup: Interval# 0.932000s, Timestamp# 1970-01-01T00:00:05.113000Z 2025-09-25T16:20:35.194067Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:378: [RootShredManager] Complete: Generation# 1, duration# 2 s 2025-09-25T16:20:35.194957Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__root_shred_manager.cpp:665: TTxCompleteShredBSC Complete at schemeshard: 72057594046678944, NeedScheduleRequestToBSC# false 2025-09-25T16:20:35.195112Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5225: StateWork, received event# 269877761, Sender [1:1979:3656], Recipient [1:297:2281]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-09-25T16:20:35.195122Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5322: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-09-25T16:20:35.195128Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:6142: Pipe server connected, at tablet: 72057594046678944 2025-09-25T16:20:35.195157Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5225: StateWork, received event# 271125519, Sender [1:280:2270], Recipient [1:297:2281]: NKikimrScheme.TEvShredInfoRequest 2025-09-25T16:20:35.195163Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5444: StateWork, processing event TEvSchemeShard::TEvShredInfoRequest 2025-09-25T16:20:35.195168Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:8188: Handle TEvShredInfoRequest, at schemeshard: 72057594046678944 >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-DropUser-66 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-DropUser-67 >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-DropUser-64 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-DropUser-65 >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-CreateUser-19 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-CreateUser-20 ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_column_build/unittest >> ColumnBuildTest::BaseCase [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:120:2058] recipient: [1:114:2144] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:120:2058] recipient: [1:114:2144] Leader for TabletID 72057594046678944 is [1:131:2155] sender: [1:132:2058] recipient: [1:114:2144] 2025-09-25T16:20:32.501477Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7911: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-09-25T16:20:32.501512Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7939: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-09-25T16:20:32.501518Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7825: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-09-25T16:20:32.501524Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7841: OperationsProcessing config: using default configuration 2025-09-25T16:20:32.501530Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7847: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-09-25T16:20:32.501534Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7847: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-09-25T16:20:32.501545Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7971: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-09-25T16:20:32.501558Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-09-25T16:20:32.501697Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8042: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-09-25T16:20:32.501767Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2025-09-25T16:20:32.520054Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7729: Cannot subscribe to console configs 2025-09-25T16:20:32.520080Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded 2025-09-25T16:20:32.524999Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2025-09-25T16:20:32.525163Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2025-09-25T16:20:32.525236Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-09-25T16:20:32.539268Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2025-09-25T16:20:32.539397Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2025-09-25T16:20:32.539576Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1382: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-09-25T16:20:32.539662Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-09-25T16:20:32.540305Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:158: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-09-25T16:20:32.540351Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2025-09-25T16:20:32.540679Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-09-25T16:20:32.540690Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-09-25T16:20:32.540721Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-09-25T16:20:32.540730Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-09-25T16:20:32.540737Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:205: TTxServerlessStorageBilling.Complete 2025-09-25T16:20:32.540768Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7086: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-09-25T16:20:32.542479Z node 1 :HIVE INFO: tablet_helpers.cpp:1126: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:131:2155] sender: [1:246:2058] recipient: [1:15:2062] 2025-09-25T16:20:32.567837Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-09-25T16:20:32.567960Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-09-25T16:20:32.568030Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:599: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-09-25T16:20:32.568039Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5528: CreateTx for txid 1:0 type: TxAlterSubDomain target path: [OwnerId: 72057594046678944, LocalPathId: 1] source path: 2025-09-25T16:20:32.568115Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-09-25T16:20:32.568132Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2025-09-25T16:20:32.569286Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-09-25T16:20:32.569338Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-09-25T16:20:32.569404Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-09-25T16:20:32.569415Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:315: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-09-25T16:20:32.569422Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:369: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-09-25T16:20:32.569429Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 1:0 2 -> 3 2025-09-25T16:20:32.570105Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-09-25T16:20:32.570120Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-09-25T16:20:32.570126Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 1:0 3 -> 128 2025-09-25T16:20:32.572175Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-09-25T16:20:32.572193Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-09-25T16:20:32.572203Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-09-25T16:20:32.572213Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1723: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-09-25T16:20:32.573144Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1792: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-09-25T16:20:32.574323Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:663: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-09-25T16:20:32.574394Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1824: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-09-25T16:20:32.574657Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:685: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-09-25T16:20:32.574695Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:689: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 138 RawX2: 4294969455 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-09-25T16:20:32.574703Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:303: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-09-25T16:20:32.574794Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 1:0 128 -> 240 2025-09-25T16:20:32.574803Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:362: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-09-25T16:20:32.574841Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:599: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-09-25T16:20:32.574854Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:412: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-09-25T16:20:32.576937Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-09-25T16:20:32.576961Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme ... ress Execute, operationId: 281474976725761:0, at schemeshard: 72075186233409549 2025-09-25T16:20:35.421512Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:497: [72075186233409549] TDone opId# 281474976725761:0 ProgressState 2025-09-25T16:20:35.421532Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:926: Part operation is done id#281474976725761:0 progress is 1/1 2025-09-25T16:20:35.421538Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 281474976725761 ready parts: 1/1 2025-09-25T16:20:35.421544Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:926: Part operation is done id#281474976725761:0 progress is 1/1 2025-09-25T16:20:35.421547Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 281474976725761 ready parts: 1/1 2025-09-25T16:20:35.421553Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 281474976725761, ready parts: 1/1, is published: true 2025-09-25T16:20:35.421570Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1702: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [1:574:2513] message: TxId: 281474976725761 2025-09-25T16:20:35.421578Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 281474976725761 ready parts: 1/1 2025-09-25T16:20:35.421584Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:993: Operation and all the parts is done, operation id: 281474976725761:0 2025-09-25T16:20:35.421589Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5552: RemoveTx for txid 281474976725761:0 2025-09-25T16:20:35.421604Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:610: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72075186233409549, LocalPathId: 2] was 3 2025-09-25T16:20:35.422332Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7171: Handle: TEvNotifyTxCompletionResult: txId# 281474976725761 2025-09-25T16:20:35.422349Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:7173: Message: TxId: 281474976725761 2025-09-25T16:20:35.422367Z node 1 :BUILD_INDEX INFO: schemeshard_build_index__progress.cpp:2417: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxReply : TEvNotifyTxCompletionResult, id# 106, txId# 281474976725761 2025-09-25T16:20:35.422404Z node 1 :BUILD_INDEX DEBUG: schemeshard_build_index__progress.cpp:2420: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxReply : TEvNotifyTxCompletionResult, TIndexBuildInfo: TBuildInfo{ IndexBuildId: 106, Uid: , DomainPathId: [OwnerId: 72075186233409549, LocalPathId: 1], TablePathId: [OwnerId: 72075186233409549, LocalPathId: 2], IndexType: EIndexTypeInvalid, IndexName: , State: Unlocking, SubState: None, IsBroken: 0, IsCancellationRequested: 0, Issue: , SubscribersCount: 1, CreateSender: [1:1149:3023], AlterMainTableTxId: 281474976725757, AlterMainTableTxStatus: StatusAccepted, AlterMainTableTxDone: 1, LockTxId: 281474976725758, LockTxStatus: StatusAccepted, LockTxDone: 1, InitiateTxId: 281474976725759, InitiateTxStatus: StatusAccepted, InitiateTxDone: 1, SnapshotStepId: 450, ApplyTxId: 281474976725760, ApplyTxStatus: StatusAccepted, ApplyTxDone: 1, DropColumnsTxId: 0, DropColumnsTxStatus: StatusSuccess, DropColumnsTxDone: 0, UnlockTxId: 281474976725761, UnlockTxStatus: StatusAccepted, UnlockTxDone: 0, ToUploadShards: 0, DoneShards: 0, Processed: UploadRows: 101 UploadBytes: 2424 ReadRows: 101 ReadBytes: 2424 CpuTimeUs: 0, Billed: UploadRows: 101 UploadBytes: 2424 ReadRows: 101 ReadBytes: 2424 CpuTimeUs: 0}, txId# 281474976725761 2025-09-25T16:20:35.422875Z node 1 :BUILD_INDEX NOTICE: schemeshard_build_index__progress.cpp:1478: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Execute: 106 Unlocking 2025-09-25T16:20:35.422908Z node 1 :BUILD_INDEX DEBUG: schemeshard_build_index__progress.cpp:1479: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Execute: 106 Unlocking TBuildInfo{ IndexBuildId: 106, Uid: , DomainPathId: [OwnerId: 72075186233409549, LocalPathId: 1], TablePathId: [OwnerId: 72075186233409549, LocalPathId: 2], IndexType: EIndexTypeInvalid, IndexName: , State: Unlocking, SubState: None, IsBroken: 0, IsCancellationRequested: 0, Issue: , SubscribersCount: 1, CreateSender: [1:1149:3023], AlterMainTableTxId: 281474976725757, AlterMainTableTxStatus: StatusAccepted, AlterMainTableTxDone: 1, LockTxId: 281474976725758, LockTxStatus: StatusAccepted, LockTxDone: 1, InitiateTxId: 281474976725759, InitiateTxStatus: StatusAccepted, InitiateTxDone: 1, SnapshotStepId: 450, ApplyTxId: 281474976725760, ApplyTxStatus: StatusAccepted, ApplyTxDone: 1, DropColumnsTxId: 0, DropColumnsTxStatus: StatusSuccess, DropColumnsTxDone: 0, UnlockTxId: 281474976725761, UnlockTxStatus: StatusAccepted, UnlockTxDone: 1, ToUploadShards: 0, DoneShards: 0, Processed: UploadRows: 101 UploadBytes: 2424 ReadRows: 101 ReadBytes: 2424 CpuTimeUs: 0, Billed: UploadRows: 101 UploadBytes: 2424 ReadRows: 101 ReadBytes: 2424 CpuTimeUs: 0} 2025-09-25T16:20:35.422919Z node 1 :BUILD_INDEX INFO: schemeshard_build_index_tx_base.cpp:24: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: Change state from Unlocking to Done 2025-09-25T16:20:35.423373Z node 1 :BUILD_INDEX NOTICE: schemeshard_build_index__progress.cpp:1478: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Execute: 106 Done 2025-09-25T16:20:35.423400Z node 1 :BUILD_INDEX DEBUG: schemeshard_build_index__progress.cpp:1479: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Execute: 106 Done TBuildInfo{ IndexBuildId: 106, Uid: , DomainPathId: [OwnerId: 72075186233409549, LocalPathId: 1], TablePathId: [OwnerId: 72075186233409549, LocalPathId: 2], IndexType: EIndexTypeInvalid, IndexName: , State: Done, SubState: None, IsBroken: 0, IsCancellationRequested: 0, Issue: , SubscribersCount: 1, CreateSender: [1:1149:3023], AlterMainTableTxId: 281474976725757, AlterMainTableTxStatus: StatusAccepted, AlterMainTableTxDone: 1, LockTxId: 281474976725758, LockTxStatus: StatusAccepted, LockTxDone: 1, InitiateTxId: 281474976725759, InitiateTxStatus: StatusAccepted, InitiateTxDone: 1, SnapshotStepId: 450, ApplyTxId: 281474976725760, ApplyTxStatus: StatusAccepted, ApplyTxDone: 1, DropColumnsTxId: 0, DropColumnsTxStatus: StatusSuccess, DropColumnsTxDone: 0, UnlockTxId: 281474976725761, UnlockTxStatus: StatusAccepted, UnlockTxDone: 1, ToUploadShards: 0, DoneShards: 0, Processed: UploadRows: 101 UploadBytes: 2424 ReadRows: 101 ReadBytes: 2424 CpuTimeUs: 0, Billed: UploadRows: 101 UploadBytes: 2424 ReadRows: 101 ReadBytes: 2424 CpuTimeUs: 0} 2025-09-25T16:20:35.423408Z node 1 :BUILD_INDEX TRACE: schemeshard_build_index_tx_base.cpp:338: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TIndexBuildInfo SendNotifications: : id# 106, subscribers count# 1 2025-09-25T16:20:35.423437Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 106: got EvNotifyTxCompletionResult 2025-09-25T16:20:35.423444Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 106: satisfy waiter [1:1167:3041] TestWaitNotification: OK eventTxId 106 2025-09-25T16:20:35.423918Z node 1 :BUILD_INDEX DEBUG: schemeshard_build_index__get.cpp:19: TIndexBuilder::TXTYPE_GET_INDEX_BUILD: DoExecute DatabaseName: "/MyRoot/ServerLessDB" IndexBuildId: 106 2025-09-25T16:20:35.424037Z node 1 :BUILD_INDEX DEBUG: schemeshard_build_index_tx_base.h:103: TIndexBuilder::TXTYPE_GET_INDEX_BUILD: Reply Status: SUCCESS IndexBuild { Id: 106 State: STATE_DONE Settings { source_path: "/MyRoot/ServerLessDB/Table" max_shards_in_flight: 2 column_build_operation { column { ColumnName: "DefaultValue" default_from_literal { type { type_id: UINT64 } value { uint64_value: 10 } } } } ScanSettings { MaxBatchRows: 1 } } Progress: 100 StartTime { } EndTime { } } BUILDINDEX RESPONSE Get: NKikimrIndexBuilder.TEvGetResponse Status: SUCCESS IndexBuild { Id: 106 State: STATE_DONE Settings { source_path: "/MyRoot/ServerLessDB/Table" max_shards_in_flight: 2 column_build_operation { column { ColumnName: "DefaultValue" default_from_literal { type { type_id: UINT64 } value { uint64_value: 10 } } } } ScanSettings { MaxBatchRows: 1 } } Progress: 100 StartTime { } EndTime { } } 2025-09-25T16:20:35.424339Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/ServerLessDB/Table" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72075186233409549 2025-09-25T16:20:35.424408Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72075186233409549 describe path "/MyRoot/ServerLessDB/Table" took 76us result status StatusSuccess 2025-09-25T16:20:35.424550Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/ServerLessDB/Table" PathDescription { Self { Name: "Table" PathId: 2 SchemeshardId: 72075186233409549 PathType: EPathTypeTable CreateFinished: true CreateTxId: 105 CreateStep: 200 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 6 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 6 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 4 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "Table" Columns { Name: "key" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "index" Type: "Uint32" TypeId: 2 Id: 2 NotNull: false IsBuildInProgress: false } Columns { Name: "value" Type: "Utf8" TypeId: 4608 Id: 3 NotNull: false IsBuildInProgress: false } Columns { Name: "DefaultValue" Type: "Uint64" TypeId: 4 Id: 4 NotNull: false DefaultFromLiteral { type { type_id: UINT64 } value { uint64_value: 10 } } IsBuildInProgress: false } KeyColumnNames: "key" KeyColumnIds: 1 TableSchemaVersion: 4 IsBackup: false IsRestore: false } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 3 ProcessingParams { Version: 2 PlanResolution: 50 Coordinators: 72075186233409550 TimeCastBucketsPerMediator: 2 Mediators: 72075186233409551 SchemeShard: 72075186233409549 } DomainKey { SchemeShard: 72057594046678944 PathId: 3 } PathsInside: 1 PathsLimit: 10000 ShardsInside: 4 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 2 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SharedHive: 72057594037968897 ServerlessComputeResourcesMode: EServerlessComputeResourcesModeShared SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 2 PathOwnerId: 72075186233409549, at schemeshard: 72075186233409549 >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-DropUser-65 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-DropUser-66 >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-DropUser-66 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-DropUser-67 >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-ModifyUser-44 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-ModifyUser-45 >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-CreateUser-18 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-CreateUser-19 >> ReadOnlyVDisk::TestWrites >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-DropUser-56 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-DropUser-57 |82.5%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/ut_blobstorage/ut_read_only_vdisk/unittest >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-DropUser-67 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-DropUser-68 >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-DropUser-50 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-DropUser-51 >> TColumnShardTestReadWrite::ReadGroupBy-SimpleReader >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-DropUser-65 [GOOD] >> TestShred::SimpleTestForTopic [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-DropUser-66 >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-CreateUser-20 [GOOD] >> TRestoreWithRebootsTests::CancelShouldSucceed[Zstd] [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-CreateUser-21 >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-DropUser-67 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-DropUser-68 >> TestShred::ManualLaunch3Cycles [GOOD] >> TestShred::ManualLaunch3CyclesWithNotConsistentCountersInSchemeShardAndBSC >> TestShred::ShredManualLaunch [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-DropUser-66 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-DropUser-67 ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/public/sdk/cpp/tests/integration/sessions/gtest >> YdbSdkSessions::MultiThreadMultipleRequestsOnSharedSessionsQueryClient [SKIPPED] Test command err: ydb/public/sdk/cpp/tests/integration/sessions/main.cpp:539: Enable after interactive tx support >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-ModifyUser-45 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-ModifyUser-46 >> TColumnShardTestReadWrite::CompactionInGranule_PKUtf8 ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_shred/unittest >> TestShred::SimpleTestForTopic [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2144] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2144] Leader for TabletID 72057594046678944 is [1:130:2155] sender: [1:131:2058] recipient: [1:113:2144] 2025-09-25T16:20:34.577966Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7911: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-09-25T16:20:34.577987Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7939: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-09-25T16:20:34.577991Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7825: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-09-25T16:20:34.577995Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7841: OperationsProcessing config: using default configuration 2025-09-25T16:20:34.577999Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7847: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-09-25T16:20:34.578002Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7847: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-09-25T16:20:34.578008Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7971: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-09-25T16:20:34.578018Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-09-25T16:20:34.578109Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8042: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-09-25T16:20:34.578171Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2025-09-25T16:20:34.593307Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7729: Cannot subscribe to console configs 2025-09-25T16:20:34.593332Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded 2025-09-25T16:20:34.597320Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2025-09-25T16:20:34.597432Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2025-09-25T16:20:34.597467Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-09-25T16:20:34.599262Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2025-09-25T16:20:34.599329Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2025-09-25T16:20:34.599487Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1382: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-09-25T16:20:34.599558Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-09-25T16:20:34.600081Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:158: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-09-25T16:20:34.600125Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2025-09-25T16:20:34.600429Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-09-25T16:20:34.600441Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-09-25T16:20:34.600462Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-09-25T16:20:34.600470Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-09-25T16:20:34.600477Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:205: TTxServerlessStorageBilling.Complete 2025-09-25T16:20:34.600514Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7086: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-09-25T16:20:34.602095Z node 1 :HIVE INFO: tablet_helpers.cpp:1126: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:130:2155] sender: [1:245:2058] recipient: [1:15:2062] 2025-09-25T16:20:34.628555Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-09-25T16:20:34.628677Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-09-25T16:20:34.628749Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:599: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-09-25T16:20:34.628760Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5528: CreateTx for txid 1:0 type: TxAlterSubDomain target path: [OwnerId: 72057594046678944, LocalPathId: 1] source path: 2025-09-25T16:20:34.628854Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-09-25T16:20:34.628875Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2025-09-25T16:20:34.629996Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-09-25T16:20:34.630056Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-09-25T16:20:34.630125Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-09-25T16:20:34.630138Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:315: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-09-25T16:20:34.630145Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:369: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-09-25T16:20:34.630151Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 1:0 2 -> 3 2025-09-25T16:20:34.630742Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-09-25T16:20:34.630755Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-09-25T16:20:34.630761Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 1:0 3 -> 128 2025-09-25T16:20:34.631205Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-09-25T16:20:34.631218Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-09-25T16:20:34.631226Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-09-25T16:20:34.631235Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1723: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-09-25T16:20:34.632165Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1792: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-09-25T16:20:34.632667Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:663: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-09-25T16:20:34.632737Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1824: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-09-25T16:20:34.633015Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:685: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-09-25T16:20:34.633055Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:689: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 137 RawX2: 4294969455 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-09-25T16:20:34.633065Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:303: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-09-25T16:20:34.633157Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 1:0 128 -> 240 2025-09-25T16:20:34.633168Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:362: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-09-25T16:20:34.633206Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:599: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-09-25T16:20:34.633221Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:412: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-09-25T16:20:34.634013Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-09-25T16:20:34.634029Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme ... RD DEBUG: schemeshard__root_shred_manager.cpp:615: TTxCompleteShredTenant Complete at schemeshard: 72057594046678944, NeedSendRequestToBSC# true 2025-09-25T16:20:36.162918Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:354: [RootShredManager] SendRequestToBSC: Generation# 1 2025-09-25T16:20:36.163022Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5225: StateWork, received event# 269877760, Sender [1:1318:3133], Recipient [1:297:2281]: NKikimr::TEvTabletPipe::TEvClientConnected { TabletId: 72057594037932033 Status: OK ServerId: [1:1319:3134] Leader: 1 Dead: 0 Generation: 2 VersionInfo: } 2025-09-25T16:20:36.163032Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5320: StateWork, processing event TEvTabletPipe::TEvClientConnected 2025-09-25T16:20:36.163039Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:6092: Handle TEvClientConnected, tabletId: 72057594037932033, status: OK, at schemeshard: 72057594046678944 2025-09-25T16:20:36.163074Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5225: StateWork, received event# 268637738, Sender [1:302:2284], Recipient [1:297:2281]: NKikimrBlobStorage.TEvControllerShredResponse CurrentGeneration: 1 Completed: false Progress10k: 0 2025-09-25T16:20:36.163079Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5446: StateWork, processing event TEvBlobStorage::TEvControllerShredResponse 2025-09-25T16:20:36.163083Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:8237: Handle TEvControllerShredResponse, at schemeshard: 72057594046678944 2025-09-25T16:20:36.163092Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__root_shred_manager.cpp:639: TTxCompleteShredBSC Execute at schemeshard: 72057594046678944 2025-09-25T16:20:36.163099Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:657: TTxCompleteShredBSC: Progress data shred in BSC 0% 2025-09-25T16:20:36.163109Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__root_shred_manager.cpp:665: TTxCompleteShredBSC Complete at schemeshard: 72057594046678944, NeedScheduleRequestToBSC# true 2025-09-25T16:20:36.163118Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:348: [RootShredManager] ScheduleRequestToBSC: Interval# 1.000000s 2025-09-25T16:20:36.579515Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5225: StateWork, received event# 271125000, Sender [0:0:0], Recipient [1:463:2414]: NKikimr::NSchemeShard::TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-09-25T16:20:36.579545Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5233: StateWork, processing event TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-09-25T16:20:36.579560Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5225: StateWork, received event# 271125000, Sender [0:0:0], Recipient [1:885:2763]: NKikimr::NSchemeShard::TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-09-25T16:20:36.579565Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5233: StateWork, processing event TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-09-25T16:20:36.579584Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5225: StateWork, received event# 271125000, Sender [0:0:0], Recipient [1:297:2281]: NKikimr::NSchemeShard::TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-09-25T16:20:36.579589Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5233: StateWork, processing event TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-09-25T16:20:36.579599Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5225: StateWork, received event# 271124999, Sender [1:463:2414], Recipient [1:463:2414]: NKikimr::NSchemeShard::TEvSchemeShard::TEvMeasureSelfResponseTime 2025-09-25T16:20:36.579604Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5232: StateWork, processing event TEvSchemeShard::TEvMeasureSelfResponseTime 2025-09-25T16:20:36.579621Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5225: StateWork, received event# 271124999, Sender [1:885:2763], Recipient [1:885:2763]: NKikimr::NSchemeShard::TEvSchemeShard::TEvMeasureSelfResponseTime 2025-09-25T16:20:36.579625Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5232: StateWork, processing event TEvSchemeShard::TEvMeasureSelfResponseTime 2025-09-25T16:20:36.579634Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5225: StateWork, received event# 271124999, Sender [1:297:2281], Recipient [1:297:2281]: NKikimr::NSchemeShard::TEvSchemeShard::TEvMeasureSelfResponseTime 2025-09-25T16:20:36.579638Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5232: StateWork, processing event TEvSchemeShard::TEvMeasureSelfResponseTime 2025-09-25T16:20:36.600030Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5225: StateWork, received event# 271125517, Sender [0:0:0], Recipient [1:297:2281]: NKikimr::NSchemeShard::TEvSchemeShard::TEvWakeupToRunShredBSC 2025-09-25T16:20:36.600066Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5447: StateWork, processing event TEvSchemeShard::TEvWakeupToRunShredBSC 2025-09-25T16:20:36.600076Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:354: [RootShredManager] SendRequestToBSC: Generation# 1 2025-09-25T16:20:36.600150Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5225: StateWork, received event# 268637738, Sender [1:302:2284], Recipient [1:297:2281]: NKikimrBlobStorage.TEvControllerShredResponse CurrentGeneration: 1 Completed: false Progress10k: 5000 2025-09-25T16:20:36.600157Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5446: StateWork, processing event TEvBlobStorage::TEvControllerShredResponse 2025-09-25T16:20:36.600162Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:8237: Handle TEvControllerShredResponse, at schemeshard: 72057594046678944 2025-09-25T16:20:36.600183Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__root_shred_manager.cpp:639: TTxCompleteShredBSC Execute at schemeshard: 72057594046678944 2025-09-25T16:20:36.600198Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:657: TTxCompleteShredBSC: Progress data shred in BSC 50% 2025-09-25T16:20:36.600216Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__root_shred_manager.cpp:665: TTxCompleteShredBSC Complete at schemeshard: 72057594046678944, NeedScheduleRequestToBSC# true 2025-09-25T16:20:36.600224Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:348: [RootShredManager] ScheduleRequestToBSC: Interval# 1.000000s 2025-09-25T16:20:36.999948Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5225: StateWork, received event# 271125000, Sender [0:0:0], Recipient [1:463:2414]: NKikimr::NSchemeShard::TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-09-25T16:20:36.999977Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5233: StateWork, processing event TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-09-25T16:20:36.999993Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5225: StateWork, received event# 271125000, Sender [0:0:0], Recipient [1:885:2763]: NKikimr::NSchemeShard::TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-09-25T16:20:36.999998Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5233: StateWork, processing event TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-09-25T16:20:37.000009Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5225: StateWork, received event# 271125000, Sender [0:0:0], Recipient [1:297:2281]: NKikimr::NSchemeShard::TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-09-25T16:20:37.000014Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5233: StateWork, processing event TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-09-25T16:20:37.000026Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5225: StateWork, received event# 271124999, Sender [1:463:2414], Recipient [1:463:2414]: NKikimr::NSchemeShard::TEvSchemeShard::TEvMeasureSelfResponseTime 2025-09-25T16:20:37.000032Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5232: StateWork, processing event TEvSchemeShard::TEvMeasureSelfResponseTime 2025-09-25T16:20:37.000050Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5225: StateWork, received event# 271124999, Sender [1:885:2763], Recipient [1:885:2763]: NKikimr::NSchemeShard::TEvSchemeShard::TEvMeasureSelfResponseTime 2025-09-25T16:20:37.000055Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5232: StateWork, processing event TEvSchemeShard::TEvMeasureSelfResponseTime 2025-09-25T16:20:37.000066Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5225: StateWork, received event# 271124999, Sender [1:297:2281], Recipient [1:297:2281]: NKikimr::NSchemeShard::TEvSchemeShard::TEvMeasureSelfResponseTime 2025-09-25T16:20:37.000070Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5232: StateWork, processing event TEvSchemeShard::TEvMeasureSelfResponseTime 2025-09-25T16:20:37.023335Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5225: StateWork, received event# 271125517, Sender [0:0:0], Recipient [1:297:2281]: NKikimr::NSchemeShard::TEvSchemeShard::TEvWakeupToRunShredBSC 2025-09-25T16:20:37.023373Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5447: StateWork, processing event TEvSchemeShard::TEvWakeupToRunShredBSC 2025-09-25T16:20:37.023382Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:354: [RootShredManager] SendRequestToBSC: Generation# 1 2025-09-25T16:20:37.023462Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5225: StateWork, received event# 268637738, Sender [1:302:2284], Recipient [1:297:2281]: NKikimrBlobStorage.TEvControllerShredResponse CurrentGeneration: 1 Completed: true Progress10k: 10000 2025-09-25T16:20:37.023471Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5446: StateWork, processing event TEvBlobStorage::TEvControllerShredResponse 2025-09-25T16:20:37.023476Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:8237: Handle TEvControllerShredResponse, at schemeshard: 72057594046678944 2025-09-25T16:20:37.023500Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__root_shred_manager.cpp:639: TTxCompleteShredBSC Execute at schemeshard: 72057594046678944 2025-09-25T16:20:37.023507Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:653: TTxCompleteShredBSC: Data shred in BSC is completed 2025-09-25T16:20:37.023517Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:170: [RootShredManager] ScheduleShredWakeup: Interval# 0.979000s, Timestamp# 1970-01-01T00:00:05.066000Z 2025-09-25T16:20:37.023525Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:378: [RootShredManager] Complete: Generation# 1, duration# 2 s 2025-09-25T16:20:37.024612Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__root_shred_manager.cpp:665: TTxCompleteShredBSC Complete at schemeshard: 72057594046678944, NeedScheduleRequestToBSC# false 2025-09-25T16:20:37.024781Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5225: StateWork, received event# 269877761, Sender [1:1342:3157], Recipient [1:297:2281]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-09-25T16:20:37.024789Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5322: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-09-25T16:20:37.024794Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:6142: Pipe server connected, at tablet: 72057594046678944 2025-09-25T16:20:37.024839Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5225: StateWork, received event# 271125519, Sender [1:280:2270], Recipient [1:297:2281]: NKikimrScheme.TEvShredInfoRequest 2025-09-25T16:20:37.024846Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5444: StateWork, processing event TEvSchemeShard::TEvShredInfoRequest 2025-09-25T16:20:37.024850Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:8188: Handle TEvShredInfoRequest, at schemeshard: 72057594046678944 >> ReadOnlyVDisk::TestWrites [GOOD] >> SchemeReqAdminAccessInTenant::ClusterAdminCanAdministerTenant-DomainLoginOnly-StrictAclCheck [GOOD] >> SchemeReqAdminAccessInTenant::ClusterAdminCanAuthOnEmptyTenant >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-CreateUser-19 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-CreateUser-20 >> TRestoreWithRebootsTests::ShouldFailOnInvalidValue[Raw] [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_shred/unittest >> TestShred::ShredManualLaunch [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2144] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2144] Leader for TabletID 72057594046678944 is [1:130:2155] sender: [1:131:2058] recipient: [1:113:2144] 2025-09-25T16:20:35.212076Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7911: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-09-25T16:20:35.212105Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7939: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-09-25T16:20:35.212111Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7825: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-09-25T16:20:35.212117Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7841: OperationsProcessing config: using default configuration 2025-09-25T16:20:35.212123Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7847: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-09-25T16:20:35.212128Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7847: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-09-25T16:20:35.212137Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7971: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-09-25T16:20:35.212151Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-09-25T16:20:35.212263Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8042: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-09-25T16:20:35.212329Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2025-09-25T16:20:35.236384Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7729: Cannot subscribe to console configs 2025-09-25T16:20:35.236405Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded 2025-09-25T16:20:35.242654Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2025-09-25T16:20:35.242783Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2025-09-25T16:20:35.242827Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-09-25T16:20:35.244970Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2025-09-25T16:20:35.245047Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2025-09-25T16:20:35.245165Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1382: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-09-25T16:20:35.245231Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-09-25T16:20:35.245970Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:158: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-09-25T16:20:35.246052Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2025-09-25T16:20:35.246390Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-09-25T16:20:35.246401Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-09-25T16:20:35.246420Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-09-25T16:20:35.246427Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-09-25T16:20:35.246451Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:205: TTxServerlessStorageBilling.Complete 2025-09-25T16:20:35.246495Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7086: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-09-25T16:20:35.248254Z node 1 :HIVE INFO: tablet_helpers.cpp:1126: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:130:2155] sender: [1:245:2058] recipient: [1:15:2062] 2025-09-25T16:20:35.268078Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-09-25T16:20:35.268165Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-09-25T16:20:35.268216Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:599: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-09-25T16:20:35.268223Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5528: CreateTx for txid 1:0 type: TxAlterSubDomain target path: [OwnerId: 72057594046678944, LocalPathId: 1] source path: 2025-09-25T16:20:35.268272Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-09-25T16:20:35.268283Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2025-09-25T16:20:35.268958Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-09-25T16:20:35.269001Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-09-25T16:20:35.269048Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-09-25T16:20:35.269058Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:315: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-09-25T16:20:35.269064Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:369: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-09-25T16:20:35.269069Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 1:0 2 -> 3 2025-09-25T16:20:35.269640Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-09-25T16:20:35.269656Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-09-25T16:20:35.269662Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 1:0 3 -> 128 2025-09-25T16:20:35.270136Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-09-25T16:20:35.270153Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-09-25T16:20:35.270161Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-09-25T16:20:35.270168Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1723: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-09-25T16:20:35.270893Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1792: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-09-25T16:20:35.271374Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:663: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-09-25T16:20:35.271427Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1824: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-09-25T16:20:35.271644Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:685: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-09-25T16:20:35.271674Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:689: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 137 RawX2: 4294969455 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-09-25T16:20:35.271682Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:303: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-09-25T16:20:35.271749Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 1:0 128 -> 240 2025-09-25T16:20:35.271755Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:362: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-09-25T16:20:35.271784Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:599: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-09-25T16:20:35.271797Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:412: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-09-25T16:20:35.272301Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-09-25T16:20:35.272313Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme ... 5Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5225: StateWork, received event# 269553241, Sender [1:640:2559], Recipient [1:463:2414]: NKikimrTxDataShard.TEvVacuumResult VacuumGeneration: 1 TabletId: 72075186233409550 Status: OK 2025-09-25T16:20:35.879706Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5440: StateWork, processing event TEvDataShard::TEvVacuumResult 2025-09-25T16:20:35.879725Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__tenant_shred_manager.cpp:546: TTxCompleteShredShard Execute at schemestard: 72075186233409546 2025-09-25T16:20:35.879757Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__tenant_shred_manager.cpp:309: [TenantShredManager] [Finished] Shred is completed for pathId# [OwnerId: 72075186233409546, LocalPathId: 2], tabletId# 72075186233409550, shardIdx# 72075186233409546:5 in# 105 ms, next wakeup in# 14.895000s, rate# 1, in queue# 0 shards, running# 0 shards at schemeshard 72075186233409546 2025-09-25T16:20:35.879777Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__tenant_shred_manager.cpp:326: [TenantShredManager] Shred in shards is completed. Send response to root schemeshard 2025-09-25T16:20:35.879782Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__tenant_shred_manager.cpp:349: [TenantShredManager] Complete: Generation# 1 2025-09-25T16:20:35.880308Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__tenant_shred_manager.cpp:571: TTxCompleteShredShard Complete at schemestard: 72075186233409546, NeedResponseComplete# true 2025-09-25T16:20:35.880429Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5225: StateWork, received event# 269877760, Sender [1:2331:3946], Recipient [1:463:2414]: NKikimr::TEvTabletPipe::TEvClientConnected { TabletId: 72057594046678944 Status: OK ServerId: [1:2332:3947] Leader: 1 Dead: 0 Generation: 3 VersionInfo: } 2025-09-25T16:20:35.880447Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5320: StateWork, processing event TEvTabletPipe::TEvClientConnected 2025-09-25T16:20:35.880454Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:6092: Handle TEvClientConnected, tabletId: 72057594046678944, status: OK, at schemeshard: 72075186233409546 2025-09-25T16:20:35.880466Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5225: StateWork, received event# 269877761, Sender [1:2332:3947], Recipient [1:297:2281]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-09-25T16:20:35.880472Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5322: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-09-25T16:20:35.880477Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:6142: Pipe server connected, at tablet: 72057594046678944 2025-09-25T16:20:35.880508Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5225: StateWork, received event# 271125514, Sender [1:463:2414], Recipient [1:297:2281]: NKikimrScheme.TEvTenantShredResponse PathId { OwnerId: 72057594046678944 LocalId: 2 } Generation: 1 Status: COMPLETED 2025-09-25T16:20:35.880517Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5443: StateWork, processing event TEvSchemeShard::TEvTenantShredResponse 2025-09-25T16:20:35.880549Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__root_shred_manager.cpp:591: TTxCompleteShredTenant Execute at schemeshard: 72057594046678944 2025-09-25T16:20:35.880567Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__root_shred_manager.cpp:312: [RootShredManager] [Finished] Shred completed for pathId# [OwnerId: 72057594046678944, LocalPathId: 2] in# 106 ms, next wakeup# 599.894000s, rate# 0, in queue# 0 tenants, running# 0 tenants at schemeshard 72057594046678944 2025-09-25T16:20:35.880585Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__root_shred_manager.cpp:327: [RootShredManager] Shred in tenants is completed. Send request to BS controller 2025-09-25T16:20:35.881063Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__root_shred_manager.cpp:615: TTxCompleteShredTenant Complete at schemeshard: 72057594046678944, NeedSendRequestToBSC# true 2025-09-25T16:20:35.881075Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:354: [RootShredManager] SendRequestToBSC: Generation# 1 2025-09-25T16:20:35.881167Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5225: StateWork, received event# 269877760, Sender [1:2336:3951], Recipient [1:297:2281]: NKikimr::TEvTabletPipe::TEvClientConnected { TabletId: 72057594037932033 Status: OK ServerId: [1:2337:3952] Leader: 1 Dead: 0 Generation: 2 VersionInfo: } 2025-09-25T16:20:35.881173Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5320: StateWork, processing event TEvTabletPipe::TEvClientConnected 2025-09-25T16:20:35.881178Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:6092: Handle TEvClientConnected, tabletId: 72057594037932033, status: OK, at schemeshard: 72057594046678944 2025-09-25T16:20:35.881206Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5225: StateWork, received event# 268637738, Sender [1:302:2284], Recipient [1:297:2281]: NKikimrBlobStorage.TEvControllerShredResponse CurrentGeneration: 1 Completed: false Progress10k: 0 2025-09-25T16:20:35.881210Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5446: StateWork, processing event TEvBlobStorage::TEvControllerShredResponse 2025-09-25T16:20:35.881214Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:8237: Handle TEvControllerShredResponse, at schemeshard: 72057594046678944 2025-09-25T16:20:35.881221Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__root_shred_manager.cpp:639: TTxCompleteShredBSC Execute at schemeshard: 72057594046678944 2025-09-25T16:20:35.881226Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:657: TTxCompleteShredBSC: Progress data shred in BSC 0% 2025-09-25T16:20:35.881235Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__root_shred_manager.cpp:665: TTxCompleteShredBSC Complete at schemeshard: 72057594046678944, NeedScheduleRequestToBSC# true 2025-09-25T16:20:35.881243Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:348: [RootShredManager] ScheduleRequestToBSC: Interval# 1.000000s 2025-09-25T16:20:36.832816Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5225: StateWork, received event# 271125517, Sender [0:0:0], Recipient [1:297:2281]: NKikimr::NSchemeShard::TEvSchemeShard::TEvWakeupToRunShredBSC 2025-09-25T16:20:36.832859Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5447: StateWork, processing event TEvSchemeShard::TEvWakeupToRunShredBSC 2025-09-25T16:20:36.832868Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:354: [RootShredManager] SendRequestToBSC: Generation# 1 2025-09-25T16:20:36.832909Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5225: StateWork, received event# 271124999, Sender [1:297:2281], Recipient [1:297:2281]: NKikimr::NSchemeShard::TEvSchemeShard::TEvMeasureSelfResponseTime 2025-09-25T16:20:36.832920Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5232: StateWork, processing event TEvSchemeShard::TEvMeasureSelfResponseTime 2025-09-25T16:20:36.832991Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5225: StateWork, received event# 268637738, Sender [1:302:2284], Recipient [1:297:2281]: NKikimrBlobStorage.TEvControllerShredResponse CurrentGeneration: 1 Completed: false Progress10k: 5000 2025-09-25T16:20:36.833003Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5446: StateWork, processing event TEvBlobStorage::TEvControllerShredResponse 2025-09-25T16:20:36.833008Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:8237: Handle TEvControllerShredResponse, at schemeshard: 72057594046678944 2025-09-25T16:20:36.833034Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__root_shred_manager.cpp:639: TTxCompleteShredBSC Execute at schemeshard: 72057594046678944 2025-09-25T16:20:36.833049Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:657: TTxCompleteShredBSC: Progress data shred in BSC 50% 2025-09-25T16:20:36.833068Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__root_shred_manager.cpp:665: TTxCompleteShredBSC Complete at schemeshard: 72057594046678944, NeedScheduleRequestToBSC# true 2025-09-25T16:20:36.833078Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:348: [RootShredManager] ScheduleRequestToBSC: Interval# 1.000000s 2025-09-25T16:20:37.355745Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5225: StateWork, received event# 271125000, Sender [0:0:0], Recipient [1:297:2281]: NKikimr::NSchemeShard::TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-09-25T16:20:37.355776Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5233: StateWork, processing event TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-09-25T16:20:37.355807Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5225: StateWork, received event# 271125517, Sender [0:0:0], Recipient [1:297:2281]: NKikimr::NSchemeShard::TEvSchemeShard::TEvWakeupToRunShredBSC 2025-09-25T16:20:37.355815Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5447: StateWork, processing event TEvSchemeShard::TEvWakeupToRunShredBSC 2025-09-25T16:20:37.355823Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:354: [RootShredManager] SendRequestToBSC: Generation# 1 2025-09-25T16:20:37.355875Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5225: StateWork, received event# 271124999, Sender [1:297:2281], Recipient [1:297:2281]: NKikimr::NSchemeShard::TEvSchemeShard::TEvMeasureSelfResponseTime 2025-09-25T16:20:37.355882Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5232: StateWork, processing event TEvSchemeShard::TEvMeasureSelfResponseTime 2025-09-25T16:20:37.355938Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5225: StateWork, received event# 268637738, Sender [1:302:2284], Recipient [1:297:2281]: NKikimrBlobStorage.TEvControllerShredResponse CurrentGeneration: 1 Completed: true Progress10k: 10000 2025-09-25T16:20:37.355946Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5446: StateWork, processing event TEvBlobStorage::TEvControllerShredResponse 2025-09-25T16:20:37.355951Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:8237: Handle TEvControllerShredResponse, at schemeshard: 72057594046678944 2025-09-25T16:20:37.355976Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__root_shred_manager.cpp:639: TTxCompleteShredBSC Execute at schemeshard: 72057594046678944 2025-09-25T16:20:37.355982Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:653: TTxCompleteShredBSC: Data shred in BSC is completed 2025-09-25T16:20:37.355989Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:378: [RootShredManager] Complete: Generation# 1, duration# 2 s 2025-09-25T16:20:37.357287Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__root_shred_manager.cpp:665: TTxCompleteShredBSC Complete at schemeshard: 72057594046678944, NeedScheduleRequestToBSC# false 2025-09-25T16:20:37.357471Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5225: StateWork, received event# 269877761, Sender [1:2386:4001], Recipient [1:297:2281]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-09-25T16:20:37.357479Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5322: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-09-25T16:20:37.357484Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:6142: Pipe server connected, at tablet: 72057594046678944 2025-09-25T16:20:37.357516Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5225: StateWork, received event# 271125519, Sender [1:280:2270], Recipient [1:297:2281]: NKikimrScheme.TEvShredInfoRequest 2025-09-25T16:20:37.357522Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5444: StateWork, processing event TEvSchemeShard::TEvShredInfoRequest 2025-09-25T16:20:37.357526Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:8188: Handle TEvShredInfoRequest, at schemeshard: 72057594046678944 >> TestShred::SimpleTestForAllSupportedObjects [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-DropUser-68 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-DropUser-69 >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-DropUser-51 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-DropUser-52 >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-DropUser-57 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-DropUser-58 >> TColumnShardTestReadWrite::PortionInfoSize [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/ut_blobstorage/ut_read_only_vdisk/unittest >> ReadOnlyVDisk::TestWrites [GOOD] Test command err: RandomSeed# 15335481338256870200 === Trying to put and get a blob === SEND TEvPut with key [1:1:0:0:0:131072:0] TEvPutResult: TEvPutResult {Id# [1:1:0:0:0:131072:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999988} === Read all 1 blob(s) === SEND TEvGet with key [1:1:0:0:0:131072:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:0:0:0:131072:0] OK Size# 131072 RequestedSize# 131072}} === Putting VDisk #0 to read-only === Setting VDisk read-only to 1 for position 0 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:0:0] === Write 10 blobs, expect some VDisks refuse parts but writes go through === SEND TEvPut with key [1:1:1:0:0:32768:0] 2025-09-25T16:20:37.162104Z 1 00h01m30.060512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) Unavailable in read-only Sender# [1:5332:705] TEvPutResult: TEvPutResult {Id# [1:1:1:0:0:32768:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999988} SEND TEvPut with key [1:1:2:0:0:131072:0] 2025-09-25T16:20:37.164889Z 1 00h01m30.060512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) Unavailable in read-only Sender# [1:5332:705] TEvPutResult: TEvPutResult {Id# [1:1:2:0:0:131072:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999988} SEND TEvPut with key [1:1:3:0:0:32768:0] 2025-09-25T16:20:37.167645Z 1 00h01m30.060512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) Unavailable in read-only Sender# [1:5332:705] TEvPutResult: TEvPutResult {Id# [1:1:3:0:0:32768:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999988} SEND TEvPut with key [1:1:4:0:0:131072:0] 2025-09-25T16:20:37.168288Z 1 00h01m30.060512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) Unavailable in read-only Sender# [1:5332:705] TEvPutResult: TEvPutResult {Id# [1:1:4:0:0:131072:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999988} SEND TEvPut with key [1:1:5:0:0:32768:0] TEvPutResult: TEvPutResult {Id# [1:1:5:0:0:32768:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999988} SEND TEvPut with key [1:1:6:0:0:131072:0] TEvPutResult: TEvPutResult {Id# [1:1:6:0:0:131072:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999988} SEND TEvPut with key [1:1:7:0:0:32768:0] 2025-09-25T16:20:37.170191Z 1 00h01m30.060512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) Unavailable in read-only Sender# [1:5332:705] TEvPutResult: TEvPutResult {Id# [1:1:7:0:0:32768:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999988} SEND TEvPut with key [1:1:8:0:0:131072:0] 2025-09-25T16:20:37.170762Z 1 00h01m30.060512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) Unavailable in read-only Sender# [1:5332:705] TEvPutResult: TEvPutResult {Id# [1:1:8:0:0:131072:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999988} SEND TEvPut with key [1:1:9:0:0:32768:0] 2025-09-25T16:20:37.171334Z 1 00h01m30.060512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) Unavailable in read-only Sender# [1:5332:705] TEvPutResult: TEvPutResult {Id# [1:1:9:0:0:32768:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999988} SEND TEvPut with key [1:1:10:0:0:131072:0] 2025-09-25T16:20:37.171725Z 1 00h01m30.060512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) Unavailable in read-only Sender# [1:5332:705] TEvPutResult: TEvPutResult {Id# [1:1:10:0:0:131072:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999988} === Read all 11 blob(s) === SEND TEvGet with key [1:1:0:0:0:131072:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:0:0:0:131072:0] OK Size# 131072 RequestedSize# 131072}} SEND TEvGet with key [1:1:1:0:0:32768:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:1:0:0:32768:0] OK Size# 32768 RequestedSize# 32768}} SEND TEvGet with key [1:1:2:0:0:131072:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:2:0:0:131072:0] OK Size# 131072 RequestedSize# 131072}} SEND TEvGet with key [1:1:3:0:0:32768:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:3:0:0:32768:0] OK Size# 32768 RequestedSize# 32768}} SEND TEvGet with key [1:1:4:0:0:131072:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:4:0:0:131072:0] OK Size# 131072 RequestedSize# 131072}} SEND TEvGet with key [1:1:5:0:0:32768:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:5:0:0:32768:0] OK Size# 32768 RequestedSize# 32768}} SEND TEvGet with key [1:1:6:0:0:131072:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:6:0:0:131072:0] OK Size# 131072 RequestedSize# 131072}} SEND TEvGet with key [1:1:7:0:0:32768:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:7:0:0:32768:0] OK Size# 32768 RequestedSize# 32768}} SEND TEvGet with key [1:1:8:0:0:131072:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:8:0:0:131072:0] OK Size# 131072 RequestedSize# 131072}} SEND TEvGet with key [1:1:9:0:0:32768:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:9:0:0:32768:0] OK Size# 32768 RequestedSize# 32768}} SEND TEvGet with key [1:1:10:0:0:131072:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:10:0:0:131072:0] OK Size# 131072 RequestedSize# 131072}} === Put 2 more VDisks to read-only === Setting VDisk read-only to 1 for position 1 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:1:0] Setting VDisk read-only to 1 for position 2 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:2:0] === Write 10 more blobs, expect errors === SEND TEvPut with key [1:1:11:0:0:32768:0] 2025-09-25T16:20:37.325235Z 1 00h03m30.110512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) Unavailable in read-only Sender# [1:5332:705] 2025-09-25T16:20:37.325262Z 3 00h03m30.110512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:2:0]: (2181038080) Unavailable in read-only Sender# [1:5346:719] 2025-09-25T16:20:37.325284Z 2 00h03m30.110512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:1:0]: (2181038080) Unavailable in read-only Sender# [1:5339:712] 2025-09-25T16:20:37.325494Z 1 00h03m30.110512s :BS_PROXY_PUT ERROR: [d77f8a8841b25819] Result# TEvPutResult {Id# [1:1:11:0:0:32768:0] Status# ERROR StatusFlags# { } ErrorReason# "TRestoreStrategy saw optimisticState# EBS_DISINTEGRATED GroupId# 2181038080 BlobId# [1:1:11:0:0:32768:0] Reported ErrorReasons# [ { OrderNumber# 0 VDiskId# [82000000:1:0:0:0] NodeId# 1 ErrorReasons# [ "VDisk is in read-only mode", ] } { OrderNumber# 1 VDiskId# [82000000:1:0:1:0] NodeId# 2 ErrorReasons# [ "VDisk is in read-only mode", ] } { OrderNumber# 2 VDiskId# [82000000:1:0:2:0] NodeId# 3 ErrorReasons# [ "VDisk is in read-only mode", ] } ] Part situations# [ { OrderNumber# 5 Situations# SUUUUU } { OrderNumber# 6 Situations# USUUUU } { OrderNumber# 7 Situations# UUSUUU } { OrderNumber# 0 Situations# UUUEUU } { OrderNumber# 1 Situations# UUUUEU } { OrderNumber# 2 Situations# UUUUUE } { OrderNumber# 3 Situations# UUUSUU } { OrderNumber# 4 Situations# UUUUUS } ] " ApproximateFreeSpaceShare# 0.999988} GroupId# 2181038080 Marker# BPP12 TEvPutResult: TEvPutResult {Id# [1:1:11:0:0:32768:0] Status# ERROR StatusFlags# { } ErrorReason# "TRestoreStrategy saw optimisticState# EBS_DISINTEGRATED GroupId# 2181038080 BlobId# [1:1:11:0:0:32768:0] Reported ErrorReasons# [ { OrderNumber# 0 VDiskId# [82000000:1:0:0:0] NodeId# 1 ErrorReasons# [ "VDisk is in read-only mode", ] } { OrderNumber# 1 VDiskId# [82000000:1:0:1:0] NodeId# 2 ErrorReasons# [ "VDisk is in read-only mode", ] } { OrderNumber# 2 VDiskId# [82000000:1:0:2:0] NodeId# 3 ErrorReasons# [ "VDisk is in read-only mode", ] } ] Part situations# [ { OrderNumber# 5 Situations# SUUUUU } { OrderNumber# 6 Situations# USUUUU } { OrderNumber# 7 Situations# UUSUUU } { OrderNumber# 0 Situations# UUUEUU } { OrderNumber# 1 Situations# UUUUEU } { OrderNumber# 2 Situations# UUUUUE } { OrderNumber# 3 Situations# UUUSUU } { OrderNumber# 4 Situations# UUUUUS } ] " ApproximateFreeSpaceShare# 0.999988} SEND TEvPut with key [1:1:12:0:0:131072:0] 2025-09-25T16:20:37.325878Z 1 00h03m30.110512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) Unavailable in read-only Sender# [1:5332:705] 2025-09-25T16:20:37.325908Z 2 00h03m30.110512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:1:0]: (2181038080) Unavailable in read-only Sender# [1:5339:712] 2025-09-25T16:20:37.326128Z 3 00h03m30.110512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:2:0]: (2181038080) Unavailable in read-only Sender# [1:5346:719] TEvPutResult: TEvPutResult {Id# [1:1:12:0:0:131072:0] Status# ERROR StatusFlags# { } ErrorReason# "TRestoreStrategy saw optimisticState# EBS_DISINTEGRATED GroupId# 2181038080 BlobId# [1:1:12:0:0:131072:0] Reported ErrorReasons# [ { OrderNumber# 0 VDiskId# [82000000:1:0:0:0] NodeId# 1 ErrorReasons# [ "VDisk is in read-only mode", ] } { OrderNumber# 1 VDiskId# [82000000:1:0:1:0] NodeId# 2 ErrorReasons# [ "VDisk is in read-only mode", ] } { OrderNumber# 2 VDiskId# [82000000:1:0:2:0] NodeId# 3 ErrorReasons# [ "VDisk is in read-only mode", ] } ] Part situations# [ { OrderNumber# 4 Situations# SUUUUU } { OrderNumber# 5 Situations# USUUUU } { OrderNumber# 6 Situations# UUSUUU } { OrderNumber# 7 Situations# UUUSUU } { OrderNumber# 0 Situations# UUUUEU } { OrderNumber# 1 Situations# UUUUUE } { OrderNumber# 2 Situations# UUUUEU } { OrderNumber# 3 Situations# UUUUUS } ] " ApproximateFreeSpaceShare# 0.999988} SEND TEvPut with key [1:1:13:0:0:32768:0] 2025-09-25T16:20:37.326410Z 1 00h03m30.110512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) Unavailable in read-only Sender# [1:5332:705] 2025-09-25T16:20:37.326530Z 2 00h03m30.110512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:1:0]: (2181038080) Unavailable in read-only Sender# [1:5339:712] 2025-09-25T16:20:37.326638Z 3 00h03m30.110512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:2:0]: (2181038080) Unavailable in read-only Sender# [1:5346:719] TEvPutResult: TEvPutResult {Id# [1:1:13:0:0:32768:0] Status# ERROR StatusFlags# { } ErrorReason# "TRestoreStrategy saw optimisticState# EBS_DISINTEGRATED GroupId# 2181038080 BlobId# [1:1:13:0:0:32768:0] Reported ErrorReasons# [ { OrderNumber# 0 VDiskId# [82000000:1:0:0:0] NodeId# 1 ErrorReasons# [ "VDisk is in read-only mode", ] } { OrderNumber# 1 VDiskId# [82000000:1:0:1:0] NodeId# 2 ErrorReasons# [ "VDisk is in read-only mode", ] } { OrderNumber# 2 VDiskId# [82000000:1:0:2:0] NodeId# 3 ErrorReasons# [ "VDisk is in read-only mode", ] } ] Part situations# [ { OrderNumber# 3 Situations# PUUUUU } { OrderNumber# 4 Situations# UPUUUU } { OrderNumber# 5 Situations# UUPUUU } { OrderNumber# 6 Situations# UUUPUU } { OrderNumber# 7 Situations# UUUUPU } { OrderNumber# 0 Situations# UUUUUE } { OrderNumber# 1 Situations# UUUUUE } { OrderNumber# 2 Situations# UUUUUE } ] " ApproximateFreeSpaceShare# 0.999988} SEND TEvPut with key [1:1:14:0:0:131072:0] 2025-09-25T16:20:37.326882Z 3 00h03m30.110512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:2:0]: (2181038080) Unavailable in read-only Sender# [1:5346:719] 2025-09-25T16:20:37.327150Z 1 00h03m30.110512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) Unavailable in read-only Sender# [1:5332:705] 2025-09-25T16:20:37.327239Z 2 00h03m30.110512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:1:0]: (2181038080) Unavailable in read-only Sender# [1:5339:712] TEvPutResult: TEvPutResult {Id# [1:1:14:0:0:131072:0] Status# ERROR StatusFlags# { } ErrorReason# "TRestoreStrategy saw optimisticState# EBS_DISINTEGRATED GroupId# 2181038080 BlobId# [1:1:14:0:0:131072:0] Reported ErrorReasons# [ { OrderNumber# 2 VDiskId# [82000000:1:0:2:0] NodeId# 3 ErrorReasons# [ "VDisk is in read-only mode", ] } { OrderNumber# 0 VDiskId# [82000000:1:0:0:0] NodeId# 1 ErrorReasons# [ "VDisk is in read-only mode", ] } { OrderNumber# 1 VDiskId# [82000000:1:0:1:0] NodeId# 2 ErrorReasons# [ "VDisk is in read-only m ... ey [1:1:5:0:0:32768:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:5:0:0:32768:0] OK Size# 32768 RequestedSize# 32768}} SEND TEvGet with key [1:1:6:0:0:131072:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:6:0:0:131072:0] OK Size# 131072 RequestedSize# 131072}} SEND TEvGet with key [1:1:7:0:0:32768:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:7:0:0:32768:0] OK Size# 32768 RequestedSize# 32768}} SEND TEvGet with key [1:1:8:0:0:131072:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:8:0:0:131072:0] OK Size# 131072 RequestedSize# 131072}} SEND TEvGet with key [1:1:9:0:0:32768:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:9:0:0:32768:0] OK Size# 32768 RequestedSize# 32768}} SEND TEvGet with key [1:1:10:0:0:131072:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:10:0:0:131072:0] OK Size# 131072 RequestedSize# 131072}} SEND TEvGet with key [1:1:11:0:0:32768:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:11:0:0:32768:0] OK Size# 32768 RequestedSize# 32768}} SEND TEvGet with key [1:1:12:0:0:131072:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:12:0:0:131072:0] OK Size# 131072 RequestedSize# 131072}} SEND TEvGet with key [1:1:13:0:0:32768:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:13:0:0:32768:0] OK Size# 32768 RequestedSize# 32768}} SEND TEvGet with key [1:1:14:0:0:131072:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:14:0:0:131072:0] OK Size# 131072 RequestedSize# 131072}} SEND TEvGet with key [1:1:15:0:0:32768:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:15:0:0:32768:0] OK Size# 32768 RequestedSize# 32768}} SEND TEvGet with key [1:1:16:0:0:131072:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:16:0:0:131072:0] OK Size# 131072 RequestedSize# 131072}} SEND TEvGet with key [1:1:17:0:0:32768:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:17:0:0:32768:0] OK Size# 32768 RequestedSize# 32768}} SEND TEvGet with key [1:1:18:0:0:131072:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:18:0:0:131072:0] OK Size# 131072 RequestedSize# 131072}} SEND TEvGet with key [1:1:19:0:0:32768:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:19:0:0:32768:0] OK Size# 32768 RequestedSize# 32768}} SEND TEvGet with key [1:1:20:0:0:131072:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:20:0:0:131072:0] OK Size# 131072 RequestedSize# 131072}} === Restoring to normal VDisk #0 === Setting VDisk read-only to 0 for position 0 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:0:0] === Write 10 blobs, expect some VDisks refuse parts but the writes still go through === SEND TEvPut with key [1:1:21:0:0:32768:0] 2025-09-25T16:20:37.676934Z 3 00h08m00.161536s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:2:0]: (2181038080) Unavailable in read-only Sender# [1:5346:719] 2025-09-25T16:20:37.676967Z 2 00h08m00.161536s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:1:0]: (2181038080) Unavailable in read-only Sender# [1:5339:712] TEvPutResult: TEvPutResult {Id# [1:1:21:0:0:32768:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999963} SEND TEvPut with key [1:1:22:0:0:131072:0] 2025-09-25T16:20:37.677790Z 2 00h08m00.161536s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:1:0]: (2181038080) Unavailable in read-only Sender# [1:5339:712] 2025-09-25T16:20:37.678079Z 3 00h08m00.161536s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:2:0]: (2181038080) Unavailable in read-only Sender# [1:5346:719] TEvPutResult: TEvPutResult {Id# [1:1:22:0:0:131072:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999963} SEND TEvPut with key [1:1:23:0:0:32768:0] TEvPutResult: TEvPutResult {Id# [1:1:23:0:0:32768:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999963} SEND TEvPut with key [1:1:24:0:0:131072:0] 2025-09-25T16:20:37.678753Z 3 00h08m00.161536s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:2:0]: (2181038080) Unavailable in read-only Sender# [1:5346:719] TEvPutResult: TEvPutResult {Id# [1:1:24:0:0:131072:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999963} SEND TEvPut with key [1:1:25:0:0:32768:0] 2025-09-25T16:20:37.679265Z 3 00h08m00.161536s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:2:0]: (2181038080) Unavailable in read-only Sender# [1:5346:719] 2025-09-25T16:20:37.679275Z 2 00h08m00.161536s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:1:0]: (2181038080) Unavailable in read-only Sender# [1:5339:712] TEvPutResult: TEvPutResult {Id# [1:1:25:0:0:32768:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999963} SEND TEvPut with key [1:1:26:0:0:131072:0] 2025-09-25T16:20:37.679656Z 3 00h08m00.161536s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:2:0]: (2181038080) Unavailable in read-only Sender# [1:5346:719] 2025-09-25T16:20:37.679667Z 2 00h08m00.161536s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:1:0]: (2181038080) Unavailable in read-only Sender# [1:5339:712] TEvPutResult: TEvPutResult {Id# [1:1:26:0:0:131072:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999963} SEND TEvPut with key [1:1:27:0:0:32768:0] 2025-09-25T16:20:37.680151Z 3 00h08m00.161536s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:2:0]: (2181038080) Unavailable in read-only Sender# [1:5346:719] 2025-09-25T16:20:37.680161Z 2 00h08m00.161536s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:1:0]: (2181038080) Unavailable in read-only Sender# [1:5339:712] TEvPutResult: TEvPutResult {Id# [1:1:27:0:0:32768:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999963} SEND TEvPut with key [1:1:28:0:0:131072:0] 2025-09-25T16:20:37.680550Z 2 00h08m00.161536s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:1:0]: (2181038080) Unavailable in read-only Sender# [1:5339:712] 2025-09-25T16:20:37.680597Z 3 00h08m00.161536s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:2:0]: (2181038080) Unavailable in read-only Sender# [1:5346:719] TEvPutResult: TEvPutResult {Id# [1:1:28:0:0:131072:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999963} SEND TEvPut with key [1:1:29:0:0:32768:0] 2025-09-25T16:20:37.681151Z 3 00h08m00.161536s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:2:0]: (2181038080) Unavailable in read-only Sender# [1:5346:719] 2025-09-25T16:20:37.681165Z 2 00h08m00.161536s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:1:0]: (2181038080) Unavailable in read-only Sender# [1:5339:712] TEvPutResult: TEvPutResult {Id# [1:1:29:0:0:32768:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999963} SEND TEvPut with key [1:1:30:0:0:131072:0] 2025-09-25T16:20:37.681583Z 3 00h08m00.161536s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:2:0]: (2181038080) Unavailable in read-only Sender# [1:5346:719] 2025-09-25T16:20:37.681603Z 2 00h08m00.161536s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:1:0]: (2181038080) Unavailable in read-only Sender# [1:5339:712] TEvPutResult: TEvPutResult {Id# [1:1:30:0:0:131072:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999963} === Read all 31 blob(s) === SEND TEvGet with key [1:1:0:0:0:131072:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:0:0:0:131072:0] OK Size# 131072 RequestedSize# 131072}} SEND TEvGet with key [1:1:1:0:0:32768:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:1:0:0:32768:0] OK Size# 32768 RequestedSize# 32768}} SEND TEvGet with key [1:1:2:0:0:131072:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:2:0:0:131072:0] OK Size# 131072 RequestedSize# 131072}} SEND TEvGet with key [1:1:3:0:0:32768:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:3:0:0:32768:0] OK Size# 32768 RequestedSize# 32768}} SEND TEvGet with key [1:1:4:0:0:131072:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:4:0:0:131072:0] OK Size# 131072 RequestedSize# 131072}} SEND TEvGet with key [1:1:5:0:0:32768:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:5:0:0:32768:0] OK Size# 32768 RequestedSize# 32768}} SEND TEvGet with key [1:1:6:0:0:131072:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:6:0:0:131072:0] OK Size# 131072 RequestedSize# 131072}} SEND TEvGet with key [1:1:7:0:0:32768:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:7:0:0:32768:0] OK Size# 32768 RequestedSize# 32768}} SEND TEvGet with key [1:1:8:0:0:131072:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:8:0:0:131072:0] OK Size# 131072 RequestedSize# 131072}} SEND TEvGet with key [1:1:9:0:0:32768:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:9:0:0:32768:0] OK Size# 32768 RequestedSize# 32768}} SEND TEvGet with key [1:1:10:0:0:131072:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:10:0:0:131072:0] OK Size# 131072 RequestedSize# 131072}} SEND TEvGet with key [1:1:11:0:0:32768:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:11:0:0:32768:0] OK Size# 32768 RequestedSize# 32768}} SEND TEvGet with key [1:1:12:0:0:131072:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:12:0:0:131072:0] OK Size# 131072 RequestedSize# 131072}} SEND TEvGet with key [1:1:13:0:0:32768:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:13:0:0:32768:0] OK Size# 32768 RequestedSize# 32768}} SEND TEvGet with key [1:1:14:0:0:131072:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:14:0:0:131072:0] OK Size# 131072 RequestedSize# 131072}} SEND TEvGet with key [1:1:15:0:0:32768:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:15:0:0:32768:0] OK Size# 32768 RequestedSize# 32768}} SEND TEvGet with key [1:1:16:0:0:131072:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:16:0:0:131072:0] OK Size# 131072 RequestedSize# 131072}} SEND TEvGet with key [1:1:17:0:0:32768:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:17:0:0:32768:0] OK Size# 32768 RequestedSize# 32768}} SEND TEvGet with key [1:1:18:0:0:131072:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:18:0:0:131072:0] OK Size# 131072 RequestedSize# 131072}} SEND TEvGet with key [1:1:19:0:0:32768:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:19:0:0:32768:0] OK Size# 32768 RequestedSize# 32768}} SEND TEvGet with key [1:1:20:0:0:131072:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:20:0:0:131072:0] OK Size# 131072 RequestedSize# 131072}} SEND TEvGet with key [1:1:21:0:0:32768:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:21:0:0:32768:0] OK Size# 32768 RequestedSize# 32768}} SEND TEvGet with key [1:1:22:0:0:131072:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:22:0:0:131072:0] OK Size# 131072 RequestedSize# 131072}} SEND TEvGet with key [1:1:23:0:0:32768:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:23:0:0:32768:0] OK Size# 32768 RequestedSize# 32768}} SEND TEvGet with key [1:1:24:0:0:131072:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:24:0:0:131072:0] OK Size# 131072 RequestedSize# 131072}} SEND TEvGet with key [1:1:25:0:0:32768:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:25:0:0:32768:0] OK Size# 32768 RequestedSize# 32768}} SEND TEvGet with key [1:1:26:0:0:131072:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:26:0:0:131072:0] OK Size# 131072 RequestedSize# 131072}} SEND TEvGet with key [1:1:27:0:0:32768:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:27:0:0:32768:0] OK Size# 32768 RequestedSize# 32768}} SEND TEvGet with key [1:1:28:0:0:131072:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:28:0:0:131072:0] OK Size# 131072 RequestedSize# 131072}} SEND TEvGet with key [1:1:29:0:0:32768:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:29:0:0:32768:0] OK Size# 32768 RequestedSize# 32768}} SEND TEvGet with key [1:1:30:0:0:131072:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:30:0:0:131072:0] OK Size# 131072 RequestedSize# 131072}} ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_shred/unittest >> TestShred::SimpleTestForAllSupportedObjects [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:120:2058] recipient: [1:114:2144] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:120:2058] recipient: [1:114:2144] Leader for TabletID 72057594046678944 is [1:131:2155] sender: [1:132:2058] recipient: [1:114:2144] 2025-09-25T16:20:34.097747Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7911: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-09-25T16:20:34.097777Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7939: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-09-25T16:20:34.097784Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7825: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-09-25T16:20:34.097790Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7841: OperationsProcessing config: using default configuration 2025-09-25T16:20:34.097797Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7847: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-09-25T16:20:34.097802Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7847: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-09-25T16:20:34.097813Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7971: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-09-25T16:20:34.097829Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-09-25T16:20:34.097957Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8042: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-09-25T16:20:34.098035Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2025-09-25T16:20:34.116000Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7729: Cannot subscribe to console configs 2025-09-25T16:20:34.116030Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded 2025-09-25T16:20:34.120645Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2025-09-25T16:20:34.120682Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2025-09-25T16:20:34.120716Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-09-25T16:20:34.122081Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2025-09-25T16:20:34.122149Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2025-09-25T16:20:34.122277Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1382: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-09-25T16:20:34.122376Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-09-25T16:20:34.123379Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:158: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-09-25T16:20:34.123429Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2025-09-25T16:20:34.123801Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-09-25T16:20:34.123814Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-09-25T16:20:34.123849Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-09-25T16:20:34.123858Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-09-25T16:20:34.123865Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:205: TTxServerlessStorageBilling.Complete 2025-09-25T16:20:34.123892Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7086: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-09-25T16:20:34.129172Z node 1 :HIVE INFO: tablet_helpers.cpp:1126: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:131:2155] sender: [1:244:2058] recipient: [1:15:2062] 2025-09-25T16:20:34.152266Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-09-25T16:20:34.152387Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-09-25T16:20:34.152469Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:599: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-09-25T16:20:34.152478Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5528: CreateTx for txid 1:0 type: TxAlterSubDomain target path: [OwnerId: 72057594046678944, LocalPathId: 1] source path: 2025-09-25T16:20:34.152552Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-09-25T16:20:34.152571Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2025-09-25T16:20:34.153386Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-09-25T16:20:34.153464Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-09-25T16:20:34.153546Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-09-25T16:20:34.153559Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:315: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-09-25T16:20:34.153567Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:369: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-09-25T16:20:34.153573Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 1:0 2 -> 3 2025-09-25T16:20:34.154059Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-09-25T16:20:34.154073Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-09-25T16:20:34.154082Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 1:0 3 -> 128 2025-09-25T16:20:34.154469Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-09-25T16:20:34.154483Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-09-25T16:20:34.154490Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-09-25T16:20:34.154500Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1723: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-09-25T16:20:34.155241Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1792: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-09-25T16:20:34.155672Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:663: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-09-25T16:20:34.155723Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1824: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-09-25T16:20:34.155985Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:685: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-09-25T16:20:34.156013Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:689: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 138 RawX2: 4294969455 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-09-25T16:20:34.156021Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:303: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-09-25T16:20:34.156106Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 1:0 128 -> 240 2025-09-25T16:20:34.156114Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:362: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-09-25T16:20:34.156163Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:599: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-09-25T16:20:34.156178Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:412: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-09-25T16:20:34.156644Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-09-25T16:20:34.156654Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme ... RD DEBUG: schemeshard__root_shred_manager.cpp:615: TTxCompleteShredTenant Complete at schemeshard: 72057594046678944, NeedSendRequestToBSC# true 2025-09-25T16:20:36.684199Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:354: [RootShredManager] SendRequestToBSC: Generation# 1 2025-09-25T16:20:36.684294Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5225: StateWork, received event# 269877760, Sender [1:2383:3997], Recipient [1:296:2279]: NKikimr::TEvTabletPipe::TEvClientConnected { TabletId: 72057594037932033 Status: OK ServerId: [1:2384:3998] Leader: 1 Dead: 0 Generation: 2 VersionInfo: } 2025-09-25T16:20:36.684301Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5320: StateWork, processing event TEvTabletPipe::TEvClientConnected 2025-09-25T16:20:36.684306Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:6092: Handle TEvClientConnected, tabletId: 72057594037932033, status: OK, at schemeshard: 72057594046678944 2025-09-25T16:20:36.684334Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5225: StateWork, received event# 268637738, Sender [1:299:2281], Recipient [1:296:2279]: NKikimrBlobStorage.TEvControllerShredResponse CurrentGeneration: 1 Completed: false Progress10k: 0 2025-09-25T16:20:36.684339Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5446: StateWork, processing event TEvBlobStorage::TEvControllerShredResponse 2025-09-25T16:20:36.684343Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:8237: Handle TEvControllerShredResponse, at schemeshard: 72057594046678944 2025-09-25T16:20:36.684351Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__root_shred_manager.cpp:639: TTxCompleteShredBSC Execute at schemeshard: 72057594046678944 2025-09-25T16:20:36.684356Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:657: TTxCompleteShredBSC: Progress data shred in BSC 0% 2025-09-25T16:20:36.684369Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__root_shred_manager.cpp:665: TTxCompleteShredBSC Complete at schemeshard: 72057594046678944, NeedScheduleRequestToBSC# true 2025-09-25T16:20:36.684378Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:348: [RootShredManager] ScheduleRequestToBSC: Interval# 1.000000s 2025-09-25T16:20:37.264153Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5225: StateWork, received event# 271125000, Sender [0:0:0], Recipient [1:959:2820]: NKikimr::NSchemeShard::TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-09-25T16:20:37.264186Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5233: StateWork, processing event TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-09-25T16:20:37.264224Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5225: StateWork, received event# 271125000, Sender [0:0:0], Recipient [1:296:2279]: NKikimr::NSchemeShard::TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-09-25T16:20:37.264229Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5233: StateWork, processing event TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-09-25T16:20:37.264241Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5225: StateWork, received event# 271125000, Sender [0:0:0], Recipient [1:461:2412]: NKikimr::NSchemeShard::TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-09-25T16:20:37.264245Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5233: StateWork, processing event TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-09-25T16:20:37.264257Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5225: StateWork, received event# 271124999, Sender [1:461:2412], Recipient [1:461:2412]: NKikimr::NSchemeShard::TEvSchemeShard::TEvMeasureSelfResponseTime 2025-09-25T16:20:37.264262Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5232: StateWork, processing event TEvSchemeShard::TEvMeasureSelfResponseTime 2025-09-25T16:20:37.264280Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5225: StateWork, received event# 271124999, Sender [1:959:2820], Recipient [1:959:2820]: NKikimr::NSchemeShard::TEvSchemeShard::TEvMeasureSelfResponseTime 2025-09-25T16:20:37.264284Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5232: StateWork, processing event TEvSchemeShard::TEvMeasureSelfResponseTime 2025-09-25T16:20:37.264295Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5225: StateWork, received event# 271124999, Sender [1:296:2279], Recipient [1:296:2279]: NKikimr::NSchemeShard::TEvSchemeShard::TEvMeasureSelfResponseTime 2025-09-25T16:20:37.264299Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5232: StateWork, processing event TEvSchemeShard::TEvMeasureSelfResponseTime 2025-09-25T16:20:37.315236Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5225: StateWork, received event# 271125517, Sender [0:0:0], Recipient [1:296:2279]: NKikimr::NSchemeShard::TEvSchemeShard::TEvWakeupToRunShredBSC 2025-09-25T16:20:37.315263Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5447: StateWork, processing event TEvSchemeShard::TEvWakeupToRunShredBSC 2025-09-25T16:20:37.315271Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:354: [RootShredManager] SendRequestToBSC: Generation# 1 2025-09-25T16:20:37.315376Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5225: StateWork, received event# 268637738, Sender [1:299:2281], Recipient [1:296:2279]: NKikimrBlobStorage.TEvControllerShredResponse CurrentGeneration: 1 Completed: false Progress10k: 5000 2025-09-25T16:20:37.315384Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5446: StateWork, processing event TEvBlobStorage::TEvControllerShredResponse 2025-09-25T16:20:37.315390Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:8237: Handle TEvControllerShredResponse, at schemeshard: 72057594046678944 2025-09-25T16:20:37.315412Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__root_shred_manager.cpp:639: TTxCompleteShredBSC Execute at schemeshard: 72057594046678944 2025-09-25T16:20:37.315425Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:657: TTxCompleteShredBSC: Progress data shred in BSC 50% 2025-09-25T16:20:37.315443Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__root_shred_manager.cpp:665: TTxCompleteShredBSC Complete at schemeshard: 72057594046678944, NeedScheduleRequestToBSC# true 2025-09-25T16:20:37.315453Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:348: [RootShredManager] ScheduleRequestToBSC: Interval# 1.000000s 2025-09-25T16:20:37.786315Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5225: StateWork, received event# 271125000, Sender [0:0:0], Recipient [1:296:2279]: NKikimr::NSchemeShard::TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-09-25T16:20:37.786342Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5233: StateWork, processing event TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-09-25T16:20:37.786357Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5225: StateWork, received event# 271125000, Sender [0:0:0], Recipient [1:461:2412]: NKikimr::NSchemeShard::TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-09-25T16:20:37.786359Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5233: StateWork, processing event TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-09-25T16:20:37.786366Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5225: StateWork, received event# 271125000, Sender [0:0:0], Recipient [1:959:2820]: NKikimr::NSchemeShard::TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-09-25T16:20:37.786368Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5233: StateWork, processing event TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-09-25T16:20:37.786375Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5225: StateWork, received event# 271124999, Sender [1:461:2412], Recipient [1:461:2412]: NKikimr::NSchemeShard::TEvSchemeShard::TEvMeasureSelfResponseTime 2025-09-25T16:20:37.786378Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5232: StateWork, processing event TEvSchemeShard::TEvMeasureSelfResponseTime 2025-09-25T16:20:37.786390Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5225: StateWork, received event# 271124999, Sender [1:959:2820], Recipient [1:959:2820]: NKikimr::NSchemeShard::TEvSchemeShard::TEvMeasureSelfResponseTime 2025-09-25T16:20:37.786393Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5232: StateWork, processing event TEvSchemeShard::TEvMeasureSelfResponseTime 2025-09-25T16:20:37.786399Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5225: StateWork, received event# 271124999, Sender [1:296:2279], Recipient [1:296:2279]: NKikimr::NSchemeShard::TEvSchemeShard::TEvMeasureSelfResponseTime 2025-09-25T16:20:37.786402Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5232: StateWork, processing event TEvSchemeShard::TEvMeasureSelfResponseTime 2025-09-25T16:20:37.837190Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5225: StateWork, received event# 271125517, Sender [0:0:0], Recipient [1:296:2279]: NKikimr::NSchemeShard::TEvSchemeShard::TEvWakeupToRunShredBSC 2025-09-25T16:20:37.837225Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5447: StateWork, processing event TEvSchemeShard::TEvWakeupToRunShredBSC 2025-09-25T16:20:37.837233Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:354: [RootShredManager] SendRequestToBSC: Generation# 1 2025-09-25T16:20:37.837303Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5225: StateWork, received event# 268637738, Sender [1:299:2281], Recipient [1:296:2279]: NKikimrBlobStorage.TEvControllerShredResponse CurrentGeneration: 1 Completed: true Progress10k: 10000 2025-09-25T16:20:37.837309Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5446: StateWork, processing event TEvBlobStorage::TEvControllerShredResponse 2025-09-25T16:20:37.837320Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:8237: Handle TEvControllerShredResponse, at schemeshard: 72057594046678944 2025-09-25T16:20:37.837340Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__root_shred_manager.cpp:639: TTxCompleteShredBSC Execute at schemeshard: 72057594046678944 2025-09-25T16:20:37.837345Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:653: TTxCompleteShredBSC: Data shred in BSC is completed 2025-09-25T16:20:37.837356Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:170: [RootShredManager] ScheduleShredWakeup: Interval# 0.922000s, Timestamp# 1970-01-01T00:00:05.123000Z 2025-09-25T16:20:37.837363Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:378: [RootShredManager] Complete: Generation# 1, duration# 2 s 2025-09-25T16:20:37.838142Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__root_shred_manager.cpp:665: TTxCompleteShredBSC Complete at schemeshard: 72057594046678944, NeedScheduleRequestToBSC# false 2025-09-25T16:20:37.838278Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5225: StateWork, received event# 269877761, Sender [1:2403:4017], Recipient [1:296:2279]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-09-25T16:20:37.838285Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5322: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-09-25T16:20:37.838289Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:6142: Pipe server connected, at tablet: 72057594046678944 2025-09-25T16:20:37.838308Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5225: StateWork, received event# 271125519, Sender [1:279:2268], Recipient [1:296:2279]: NKikimrScheme.TEvShredInfoRequest 2025-09-25T16:20:37.838313Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5444: StateWork, processing event TEvSchemeShard::TEvShredInfoRequest 2025-09-25T16:20:37.838316Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:8188: Handle TEvShredInfoRequest, at schemeshard: 72057594046678944 >> TestShred::ShredWithMerge [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-DropUser-66 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-DropUser-67 >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-DropUser-68 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-DropUser-69 >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-CreateUser-21 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-CreateUser-22 >> TColumnShardTestReadWrite::WriteOverload+InStore >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-DropUser-67 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-DropUser-68 >> TestShred::SchemeShardCounterDoesNotConsistWithBscCounter [GOOD] >> TestShred::Run3CyclesForTopics ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/columnshard/ut_rw/unittest >> TColumnShardTestReadWrite::PortionInfoSize [GOOD] Test command err: 184 112 28 48 32 24 16 24 56 >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-ModifyUser-46 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-ModifyUser-47 >> TColumnShardTestReadWrite::WriteStandaloneExoticTypes >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-CreateUser-20 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-CreateUser-21 >> TColumnShardTestReadWrite::ReadWithProgramNoProjection ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_shred/unittest >> TestShred::ShredWithMerge [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:68:2058] recipient: [1:61:2103] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:68:2058] recipient: [1:61:2103] Leader for TabletID 72057594046678944 is [1:72:2107] sender: [1:76:2058] recipient: [1:61:2103] 2025-09-25T16:20:33.423402Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7911: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-09-25T16:20:33.423430Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7939: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-09-25T16:20:33.423436Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7825: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-09-25T16:20:33.423442Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7841: OperationsProcessing config: using default configuration 2025-09-25T16:20:33.423449Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7847: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-09-25T16:20:33.423454Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7847: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-09-25T16:20:33.423464Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7971: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-09-25T16:20:33.423480Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-09-25T16:20:33.423623Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8042: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-09-25T16:20:33.423712Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2025-09-25T16:20:33.441229Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7729: Cannot subscribe to console configs 2025-09-25T16:20:33.441265Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded 2025-09-25T16:20:33.443189Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2025-09-25T16:20:33.443281Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2025-09-25T16:20:33.443323Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-09-25T16:20:33.444582Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2025-09-25T16:20:33.444732Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2025-09-25T16:20:33.444880Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1382: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-09-25T16:20:33.444962Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-09-25T16:20:33.445179Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:158: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-09-25T16:20:33.445218Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2025-09-25T16:20:33.445522Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-09-25T16:20:33.445532Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-09-25T16:20:33.445589Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-09-25T16:20:33.445598Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-09-25T16:20:33.445604Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:205: TTxServerlessStorageBilling.Complete 2025-09-25T16:20:33.445623Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7086: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-09-25T16:20:33.446168Z node 1 :HIVE INFO: tablet_helpers.cpp:1126: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:72:2107] sender: [1:152:2058] recipient: [1:16:2063] 2025-09-25T16:20:33.470783Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-09-25T16:20:33.470911Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-09-25T16:20:33.470997Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:599: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-09-25T16:20:33.471006Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5528: CreateTx for txid 1:0 type: TxAlterSubDomain target path: [OwnerId: 72057594046678944, LocalPathId: 1] source path: 2025-09-25T16:20:33.471102Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-09-25T16:20:33.471121Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2025-09-25T16:20:33.471403Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-09-25T16:20:33.471454Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-09-25T16:20:33.471526Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-09-25T16:20:33.471537Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:315: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-09-25T16:20:33.471544Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:369: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-09-25T16:20:33.471550Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 1:0 2 -> 3 2025-09-25T16:20:33.471653Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-09-25T16:20:33.471661Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-09-25T16:20:33.471667Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 1:0 3 -> 128 2025-09-25T16:20:33.471734Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-09-25T16:20:33.471741Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-09-25T16:20:33.471747Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-09-25T16:20:33.471755Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1723: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-09-25T16:20:33.472447Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1792: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-09-25T16:20:33.472535Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:663: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-09-25T16:20:33.472596Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1824: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-09-25T16:20:33.472875Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:685: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-09-25T16:20:33.472904Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:689: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 77 RawX2: 4294969407 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-09-25T16:20:33.472913Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:303: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-09-25T16:20:33.473012Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 1:0 128 -> 240 2025-09-25T16:20:33.473020Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:362: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-09-25T16:20:33.473055Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:599: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-09-25T16:20:33.473069Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:412: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-09-25T16:20:33.473196Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-09-25T16:20:33.473205Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard Des ... :TEvMeasureSelfResponseTime 2025-09-25T16:20:38.220292Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5225: StateWork, received event# 271125000, Sender [0:0:0], Recipient [1:281:2243]: NKikimr::NSchemeShard::TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-09-25T16:20:38.220320Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5233: StateWork, processing event TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-09-25T16:20:38.220338Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5225: StateWork, received event# 271124999, Sender [1:281:2243], Recipient [1:281:2243]: NKikimr::NSchemeShard::TEvSchemeShard::TEvMeasureSelfResponseTime 2025-09-25T16:20:38.220343Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5232: StateWork, processing event TEvSchemeShard::TEvMeasureSelfResponseTime 2025-09-25T16:20:38.251311Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5225: StateWork, received event# 271125000, Sender [0:0:0], Recipient [1:186:2180]: NKikimr::NSchemeShard::TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-09-25T16:20:38.251348Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5233: StateWork, processing event TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-09-25T16:20:38.251380Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5225: StateWork, received event# 271124999, Sender [1:186:2180], Recipient [1:186:2180]: NKikimr::NSchemeShard::TEvSchemeShard::TEvMeasureSelfResponseTime 2025-09-25T16:20:38.251386Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5232: StateWork, processing event TEvSchemeShard::TEvMeasureSelfResponseTime 2025-09-25T16:20:38.261572Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5225: StateWork, received event# 271125000, Sender [0:0:0], Recipient [1:281:2243]: NKikimr::NSchemeShard::TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-09-25T16:20:38.261605Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5233: StateWork, processing event TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-09-25T16:20:38.261626Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5225: StateWork, received event# 271124999, Sender [1:281:2243], Recipient [1:281:2243]: NKikimr::NSchemeShard::TEvSchemeShard::TEvMeasureSelfResponseTime 2025-09-25T16:20:38.261632Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5232: StateWork, processing event TEvSchemeShard::TEvMeasureSelfResponseTime 2025-09-25T16:20:38.292552Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5225: StateWork, received event# 271125000, Sender [0:0:0], Recipient [1:186:2180]: NKikimr::NSchemeShard::TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-09-25T16:20:38.292601Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5233: StateWork, processing event TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-09-25T16:20:38.292623Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5225: StateWork, received event# 271124999, Sender [1:186:2180], Recipient [1:186:2180]: NKikimr::NSchemeShard::TEvSchemeShard::TEvMeasureSelfResponseTime 2025-09-25T16:20:38.292629Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5232: StateWork, processing event TEvSchemeShard::TEvMeasureSelfResponseTime 2025-09-25T16:20:38.302847Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5225: StateWork, received event# 271125000, Sender [0:0:0], Recipient [1:281:2243]: NKikimr::NSchemeShard::TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-09-25T16:20:38.302886Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5233: StateWork, processing event TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-09-25T16:20:38.302910Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5225: StateWork, received event# 271124999, Sender [1:281:2243], Recipient [1:281:2243]: NKikimr::NSchemeShard::TEvSchemeShard::TEvMeasureSelfResponseTime 2025-09-25T16:20:38.302915Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5232: StateWork, processing event TEvSchemeShard::TEvMeasureSelfResponseTime 2025-09-25T16:20:38.333980Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5225: StateWork, received event# 271125000, Sender [0:0:0], Recipient [1:186:2180]: NKikimr::NSchemeShard::TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-09-25T16:20:38.334027Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5233: StateWork, processing event TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-09-25T16:20:38.334049Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5225: StateWork, received event# 271124999, Sender [1:186:2180], Recipient [1:186:2180]: NKikimr::NSchemeShard::TEvSchemeShard::TEvMeasureSelfResponseTime 2025-09-25T16:20:38.334055Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5232: StateWork, processing event TEvSchemeShard::TEvMeasureSelfResponseTime 2025-09-25T16:20:38.344243Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5225: StateWork, received event# 271125000, Sender [0:0:0], Recipient [1:281:2243]: NKikimr::NSchemeShard::TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-09-25T16:20:38.344273Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5233: StateWork, processing event TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-09-25T16:20:38.344291Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5225: StateWork, received event# 271124999, Sender [1:281:2243], Recipient [1:281:2243]: NKikimr::NSchemeShard::TEvSchemeShard::TEvMeasureSelfResponseTime 2025-09-25T16:20:38.344296Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5232: StateWork, processing event TEvSchemeShard::TEvMeasureSelfResponseTime 2025-09-25T16:20:38.375223Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5225: StateWork, received event# 271125000, Sender [0:0:0], Recipient [1:186:2180]: NKikimr::NSchemeShard::TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-09-25T16:20:38.375257Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5233: StateWork, processing event TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-09-25T16:20:38.375335Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5225: StateWork, received event# 271124999, Sender [1:186:2180], Recipient [1:186:2180]: NKikimr::NSchemeShard::TEvSchemeShard::TEvMeasureSelfResponseTime 2025-09-25T16:20:38.375341Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5232: StateWork, processing event TEvSchemeShard::TEvMeasureSelfResponseTime 2025-09-25T16:20:38.385696Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5225: StateWork, received event# 269553162, Sender [1:1205:3023], Recipient [1:281:2243]: NKikimrTxDataShard.TEvPeriodicTableStats DatashardId: 72075186233409551 TableLocalId: 2 Generation: 2 Round: 1 TableStats { DataSize: 10141461 RowCount: 99 IndexSize: 4463 InMemSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 SearchHeight: 1 LastFullCompactionTs: 50 HasLoanedParts: false Channels { Channel: 1 DataSize: 10141461 IndexSize: 4463 } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 831 Memory: 90253 Storage: 10149823 } ShardState: 2 UserTablePartOwners: 72075186233409551 NodeId: 1 StartTime: 50000 TableOwnerId: 72075186233409546 IsDstSplit: true FollowerId: 0 2025-09-25T16:20:38.385728Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5262: StateWork, processing event TEvDataShard::TEvPeriodicTableStats 2025-09-25T16:20:38.385752Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:561: Got periodic table stats at tablet 72075186233409546 from shard 72075186233409551 followerId 0 pathId [OwnerId: 72075186233409546, LocalPathId: 2] state 'Ready' dataSize 10141461 rowCount 99 cpuUsage 0.0831 2025-09-25T16:20:38.385774Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__table_stats.cpp:568: Got periodic table stats at tablet 72075186233409546 from shard 72075186233409551 followerId 0 pathId [OwnerId: 72075186233409546, LocalPathId: 2] raw table stats: DataSize: 10141461 RowCount: 99 IndexSize: 4463 InMemSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 SearchHeight: 1 LastFullCompactionTs: 50 HasLoanedParts: false Channels { Channel: 1 DataSize: 10141461 IndexSize: 4463 } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 2025-09-25T16:20:38.385787Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__table_stats.cpp:608: Will delay TTxStoreTableStats on# 0.100000s, queue# 1 2025-09-25T16:20:38.395992Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5225: StateWork, received event# 271125517, Sender [0:0:0], Recipient [1:186:2180]: NKikimr::NSchemeShard::TEvSchemeShard::TEvWakeupToRunShredBSC 2025-09-25T16:20:38.396028Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5447: StateWork, processing event TEvSchemeShard::TEvWakeupToRunShredBSC 2025-09-25T16:20:38.396036Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:354: [RootShredManager] SendRequestToBSC: Generation# 1 2025-09-25T16:20:38.396145Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5225: StateWork, received event# 268637738, Sender [1:189:2182], Recipient [1:186:2180]: NKikimrBlobStorage.TEvControllerShredResponse CurrentGeneration: 1 Completed: true Progress10k: 10000 2025-09-25T16:20:38.396154Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5446: StateWork, processing event TEvBlobStorage::TEvControllerShredResponse 2025-09-25T16:20:38.396159Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:8237: Handle TEvControllerShredResponse, at schemeshard: 72057594046678944 2025-09-25T16:20:38.396181Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__root_shred_manager.cpp:639: TTxCompleteShredBSC Execute at schemeshard: 72057594046678944 2025-09-25T16:20:38.396187Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:653: TTxCompleteShredBSC: Data shred in BSC is completed 2025-09-25T16:20:38.396199Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:170: [RootShredManager] ScheduleShredWakeup: Interval# 29.998000s, Timestamp# 1970-01-01T00:01:10.002000Z 2025-09-25T16:20:38.396207Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:378: [RootShredManager] Complete: Generation# 1, duration# 20 s 2025-09-25T16:20:38.396387Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__root_shred_manager.cpp:665: TTxCompleteShredBSC Complete at schemeshard: 72057594046678944, NeedScheduleRequestToBSC# false 2025-09-25T16:20:38.397081Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5225: StateWork, received event# 269877761, Sender [1:1519:3279], Recipient [1:186:2180]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-09-25T16:20:38.397092Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5322: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-09-25T16:20:38.397097Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:6142: Pipe server connected, at tablet: 72057594046678944 2025-09-25T16:20:38.397117Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5225: StateWork, received event# 271125519, Sender [1:173:2173], Recipient [1:186:2180]: NKikimrScheme.TEvShredInfoRequest 2025-09-25T16:20:38.397123Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5444: StateWork, processing event TEvSchemeShard::TEvShredInfoRequest 2025-09-25T16:20:38.397128Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:8188: Handle TEvShredInfoRequest, at schemeshard: 72057594046678944 >> TRestoreWithRebootsTests::ShouldFailOnOutboundKey[Raw] [GOOD] >> TRestoreWithRebootsTests::ShouldFailOnOutboundKey[Zstd] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-DropUser-69 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-DropUser-70 >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-DropUser-52 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-DropUser-53 >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-DropUser-58 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-DropUser-59 ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_restore/unittest >> TRestoreWithRebootsTests::CancelShouldSucceed[Zstd] [GOOD] Test command err: ==== RunWithTabletReboots =========== RUN: Trace =========== Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2144] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2144] Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:120:2058] recipient: [1:114:2145] IGNORE Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:120:2058] recipient: [1:114:2145] Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:121:2058] recipient: [1:116:2146] IGNORE Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:121:2058] recipient: [1:116:2146] Leader for TabletID 72057594046678944 is [1:128:2153] sender: [1:131:2058] recipient: [1:113:2144] Leader for TabletID 72057594046447617 is [1:134:2158] sender: [1:136:2058] recipient: [1:114:2145] Leader for TabletID 72057594046316545 is [1:139:2161] sender: [1:141:2058] recipient: [1:116:2146] 2025-09-25T16:19:43.295035Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7911: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-09-25T16:19:43.295056Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7939: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-09-25T16:19:43.295061Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7825: StatsBatching config: StatsBatchTimeout# 0.000000s, StatsMaxBatchSize# 0, StatsMaxExecuteTime# 0.010000s 2025-09-25T16:19:43.295065Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7841: OperationsProcessing config: using default configuration 2025-09-25T16:19:43.295070Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7847: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-09-25T16:19:43.295073Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7847: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-09-25T16:19:43.295080Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7971: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-09-25T16:19:43.295092Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-09-25T16:19:43.295206Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8042: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-09-25T16:19:43.295274Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2025-09-25T16:19:43.310185Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:8074: Got new config: QueryServiceConfig { AllExternalDataSourcesAreAvailable: true } 2025-09-25T16:19:43.310208Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded 2025-09-25T16:19:43.310287Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8042: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# Leader for TabletID 72057594046447617 is [1:134:2158] sender: [1:179:2058] recipient: [1:15:2062] 2025-09-25T16:19:43.314131Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2025-09-25T16:19:43.314233Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2025-09-25T16:19:43.314271Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-09-25T16:19:43.315518Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2025-09-25T16:19:43.315576Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2025-09-25T16:19:43.315651Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1382: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-09-25T16:19:43.315803Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-09-25T16:19:43.316610Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:158: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-09-25T16:19:43.316648Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2025-09-25T16:19:43.316864Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-09-25T16:19:43.316873Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-09-25T16:19:43.316886Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-09-25T16:19:43.316891Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-09-25T16:19:43.316896Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:205: TTxServerlessStorageBilling.Complete 2025-09-25T16:19:43.316919Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7086: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:221:2058] recipient: [1:219:2219] IGNORE Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:221:2058] recipient: [1:219:2219] Leader for TabletID 72057594037968897 is [1:225:2223] sender: [1:226:2058] recipient: [1:219:2219] 2025-09-25T16:19:43.317999Z node 1 :HIVE INFO: tablet_helpers.cpp:1126: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:128:2153] sender: [1:246:2058] recipient: [1:15:2062] 2025-09-25T16:19:43.332640Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-09-25T16:19:43.332694Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-09-25T16:19:43.332736Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:599: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-09-25T16:19:43.332742Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5528: CreateTx for txid 1:0 type: TxAlterSubDomain target path: [OwnerId: 72057594046678944, LocalPathId: 1] source path: 2025-09-25T16:19:43.332778Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-09-25T16:19:43.332807Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2025-09-25T16:19:43.333385Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-09-25T16:19:43.333418Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-09-25T16:19:43.333462Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-09-25T16:19:43.333469Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:315: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-09-25T16:19:43.333473Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:369: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-09-25T16:19:43.333477Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 1:0 2 -> 3 2025-09-25T16:19:43.333888Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-09-25T16:19:43.333899Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-09-25T16:19:43.333903Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 1:0 3 -> 128 2025-09-25T16:19:43.334242Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-09-25T16:19:43.334249Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-09-25T16:19:43.334253Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-09-25T16:19:43.334257Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1723: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-09-25T16:19:43.334690Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1792: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-09-25T16:19:43.335036Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:663: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-09-25T16:19:43.335083Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1824: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 Leader for TabletID 72057594046316545 is [1:139:2161] sender: [1:261:2058] recipient: [1:15:2062] FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-09-25T16:19:43.335303Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:685: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-09-25T16:19:43.335325Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:689: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 139 RawX2: 4294969457 } } Step: 5000001 MediatorID: 0 Tab ... 57594046678944, LocalPathId: 3] was 4 2025-09-25T16:20:37.220638Z node 174 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 1003, ready parts: 0/1, is published: true REQUEST: HEAD /data_00.csv.zst HTTP/1.1 HEADERS: Host: localhost:7099 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: 192E2169-EA92-4488-8E34-E18FC7512861 amz-sdk-request: attempt=1 content-type: application/xml user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-143-generic x86_64 Clang/20.1.8 x-amz-api-version: 2006-03-01 S3_MOCK::HttpServeRead: /data_00.csv.zst / 23 2025-09-25T16:20:37.221139Z node 174 :DATASHARD_RESTORE DEBUG: import_s3.cpp:527: [Import] [s3:1003] Handle NKikimr::NWrappers::NExternalStorage::TEvHeadObjectResponse { Key: null Result: HeadObjectResult { ETag: 7443c2f403aa74cff1f199511bd22374 ContentLength: 23 } } 2025-09-25T16:20:37.221345Z node 174 :DATASHARD_RESTORE DEBUG: import_s3.cpp:606: [Import] [s3:1003] Handle NKikimr::TEvDataShard::TEvS3DownloadInfo { Info: { DataETag: (empty maybe) ProcessedBytes: 0 WrittenBytes: 0 WrittenRows: 0 ChecksumState: DownloadState: } } 2025-09-25T16:20:37.221403Z node 174 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1003:0, at schemeshard: 72057594046678944 2025-09-25T16:20:37.221410Z node 174 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_backup_restore_common.h:324: TRestore TAborting, opId: 1003:0 ProgressState at tablet72057594046678944 2025-09-25T16:20:37.221415Z node 174 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_backup_restore_common.h:351: TRestore Abort, on datashard: 72075186233409546, opId: 1003:0, at schemeshard: 72057594046678944 2025-09-25T16:20:37.221766Z node 174 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1003 2025-09-25T16:20:37.222262Z node 174 :DATASHARD_RESTORE DEBUG: import_s3.cpp:606: [Import] [s3:1003] Handle NKikimr::TEvDataShard::TEvS3DownloadInfo { Info: { DataETag: 7443c2f403aa74cff1f199511bd22374 ProcessedBytes: 0 WrittenBytes: 0 WrittenRows: 0 ChecksumState: DownloadState: } } 2025-09-25T16:20:37.222274Z node 174 :DATASHARD_RESTORE NOTICE: import_s3.cpp:621: [Import] [s3:1003] Process download info at 'DownloadInfo': info# { DataETag: 7443c2f403aa74cff1f199511bd22374 ProcessedBytes: 0 WrittenBytes: 0 WrittenRows: 0 ChecksumState: DownloadState: } 2025-09-25T16:20:37.222287Z node 174 :DATASHARD_RESTORE DEBUG: import_s3.cpp:517: [Import] [s3:1003] GetObject: key# /data_00.csv.zst, range# 0-22 2025-09-25T16:20:37.222310Z node 174 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:663: Send tablet strongly msg operationId: 1003:0 from tablet: 72057594046678944 to tablet: 72075186233409546 cookie: 72057594046678944:1 msg type: 269551625 REQUEST: GET /data_00.csv.zst HTTP/1.1 HEADERS: Host: localhost:7099 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: 367E4DEF-3F66-4C6F-90C8-240B519F5473 amz-sdk-request: attempt=1 content-type: application/xml range: bytes=0-22 user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-143-generic x86_64 Clang/20.1.8 x-amz-api-version: 2006-03-01 S3_MOCK::HttpServeRead: /data_00.csv.zst / 23 2025-09-25T16:20:37.225915Z node 174 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:6700: Handle TEvProposeTransactionResult, at schemeshard: 72057594046678944, message: TxKind: TX_KIND_SCHEME Origin: 72075186233409546 Status: ERROR Error { Kind: WRONG_SHARD_STATE Reason: "Interrupted Restore operation [5000004:1003] while waiting to finish at 72075186233409546" } TxId: 1003 ExecLatency: 5 ProposeLatency: 6 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409546 CpuTimeUsec: 204 } } 2025-09-25T16:20:37.225931Z node 174 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1837: TOperation FindRelatedPartByTabletId, TxId: 1003, tablet: 72075186233409546, partId: 0 2025-09-25T16:20:37.225951Z node 174 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:628: TTxOperationReply execute, operationId: 1003:0, at schemeshard: 72057594046678944, message: TxKind: TX_KIND_SCHEME Origin: 72075186233409546 Status: ERROR Error { Kind: WRONG_SHARD_STATE Reason: "Interrupted Restore operation [5000004:1003] while waiting to finish at 72075186233409546" } TxId: 1003 ExecLatency: 5 ProposeLatency: 6 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409546 CpuTimeUsec: 204 } } 2025-09-25T16:20:37.225962Z node 174 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_part.cpp:109: HandleReply TEvDataShard::TEvProposeTransactionResult Ignore message: tablet# 72057594046678944, ev# TxKind: TX_KIND_SCHEME Origin: 72075186233409546 Status: ERROR Error { Kind: WRONG_SHARD_STATE Reason: "Interrupted Restore operation [5000004:1003] while waiting to finish at 72075186233409546" } TxId: 1003 ExecLatency: 5 ProposeLatency: 6 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409546 CpuTimeUsec: 204 } } 2025-09-25T16:20:37.226109Z node 174 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5901: Handle TEvSchemaChanged, tabletId: 72057594046678944, at schemeshard: 72057594046678944, message: Source { RawX1: 339 RawX2: 747324311829 } Origin: 72075186233409546 State: 2 TxId: 1003 Step: 0 Generation: 2 OpResult { Success: false Explain: "" BytesProcessed: 0 RowsProcessed: 0 } 2025-09-25T16:20:37.226114Z node 174 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1837: TOperation FindRelatedPartByTabletId, TxId: 1003, tablet: 72075186233409546, partId: 0 2025-09-25T16:20:37.226126Z node 174 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:628: TTxOperationReply execute, operationId: 1003:0, at schemeshard: 72057594046678944, message: Source { RawX1: 339 RawX2: 747324311829 } Origin: 72075186233409546 State: 2 TxId: 1003 Step: 0 Generation: 2 OpResult { Success: false Explain: "" BytesProcessed: 0 RowsProcessed: 0 } 2025-09-25T16:20:37.226139Z node 174 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_backup_restore_common.h:233: TRestore TAborting, opId: 1003:0 HandleReply TEvSchemaChanged at tablet# 72057594046678944 message# Source { RawX1: 339 RawX2: 747324311829 } Origin: 72075186233409546 State: 2 TxId: 1003 Step: 0 Generation: 2 OpResult { Success: false Explain: "" BytesProcessed: 0 RowsProcessed: 0 } 2025-09-25T16:20:37.226154Z node 174 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:673: CollectSchemaChanged accept TEvDataShard::TEvSchemaChanged, operationId: 1003:0, shardIdx: 72057594046678944:1, shard: 72075186233409546, left await: 0, txState.State: Aborting, txState.ReadyForNotifications: 1, at schemeshard: 72057594046678944 2025-09-25T16:20:37.226159Z node 174 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:710: all shard schema changes has been received, operationId: 1003:0, at schemeshard: 72057594046678944 2025-09-25T16:20:37.226165Z node 174 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:722: send schema changes ack message, operation: 1003:0, datashard: 72075186233409546, at schemeshard: 72057594046678944 2025-09-25T16:20:37.226176Z node 174 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 1003:0 133 -> 240 2025-09-25T16:20:37.226205Z node 174 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_backup_restore_common.h:116: Unable to make a bill: kind# TRestore, opId# 1003:0, reason# domain is not a serverless db, domain# /MyRoot, domainPathId# [OwnerId: 72057594046678944, LocalPathId: 1], IsDomainSchemeShard: 1, ParentDomainId: [OwnerId: 72057594046678944, LocalPathId: 1], ResourcesDomainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-09-25T16:20:37.226811Z node 174 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 1003:0, at schemeshard: 72057594046678944 2025-09-25T16:20:37.226932Z node 174 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 1003:0, at schemeshard: 72057594046678944 2025-09-25T16:20:37.226964Z node 174 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1003:0, at schemeshard: 72057594046678944 2025-09-25T16:20:37.226972Z node 174 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:497: [72057594046678944] TDone opId# 1003:0 ProgressState 2025-09-25T16:20:37.226988Z node 174 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:926: Part operation is done id#1003:0 progress is 1/1 2025-09-25T16:20:37.226992Z node 174 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 1003 ready parts: 1/1 2025-09-25T16:20:37.226998Z node 174 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:926: Part operation is done id#1003:0 progress is 1/1 2025-09-25T16:20:37.227001Z node 174 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 1003 ready parts: 1/1 2025-09-25T16:20:37.227006Z node 174 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 1003, ready parts: 1/1, is published: true 2025-09-25T16:20:37.227012Z node 174 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 1003 ready parts: 1/1 2025-09-25T16:20:37.227017Z node 174 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:993: Operation and all the parts is done, operation id: 1003:0 2025-09-25T16:20:37.227023Z node 174 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5552: RemoveTx for txid 1003:0 2025-09-25T16:20:37.227054Z node 174 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:610: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 3 TestWaitNotification wait txId: 1003 2025-09-25T16:20:37.227683Z node 174 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:213: tests -- TTxNotificationSubscriber for txId 1003: send EvNotifyTxCompletion 2025-09-25T16:20:37.227693Z node 174 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:259: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 1003 TestWaitNotification wait txId: 1004 2025-09-25T16:20:37.227710Z node 174 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:213: tests -- TTxNotificationSubscriber for txId 1004: send EvNotifyTxCompletion 2025-09-25T16:20:37.227715Z node 174 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:259: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 1004 2025-09-25T16:20:37.227777Z node 174 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 1003, at schemeshard: 72057594046678944 2025-09-25T16:20:37.227793Z node 174 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 1003: got EvNotifyTxCompletionResult 2025-09-25T16:20:37.227797Z node 174 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 1003: satisfy waiter [174:488:2459] 2025-09-25T16:20:37.227812Z node 174 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 1004, at schemeshard: 72057594046678944 2025-09-25T16:20:37.227821Z node 174 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 1004: got EvNotifyTxCompletionResult 2025-09-25T16:20:37.227824Z node 174 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 1004: satisfy waiter [174:488:2459] TestWaitNotification: OK eventTxId 1003 TestWaitNotification: OK eventTxId 1004 >> Normalizers::SchemaVersionsNormalizer >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-DropUser-67 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-DropUser-68 >> TRestoreWithRebootsTests::ShouldSucceedOnMultipleFrames [GOOD] >> TColumnShardTestReadWrite::CompactionGCFailingBs >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-DropUser-69 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-DropUser-70 ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_restore/unittest >> TRestoreWithRebootsTests::ShouldFailOnInvalidValue[Raw] [GOOD] Test command err: ==== RunWithTabletReboots =========== RUN: Trace =========== Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2144] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2144] Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:120:2058] recipient: [1:114:2145] IGNORE Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:120:2058] recipient: [1:114:2145] Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:121:2058] recipient: [1:116:2146] IGNORE Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:121:2058] recipient: [1:116:2146] Leader for TabletID 72057594046678944 is [1:128:2153] sender: [1:131:2058] recipient: [1:113:2144] Leader for TabletID 72057594046447617 is [1:134:2158] sender: [1:136:2058] recipient: [1:114:2145] Leader for TabletID 72057594046316545 is [1:139:2161] sender: [1:141:2058] recipient: [1:116:2146] 2025-09-25T16:19:42.659663Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7911: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-09-25T16:19:42.659686Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7939: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-09-25T16:19:42.659692Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7825: StatsBatching config: StatsBatchTimeout# 0.000000s, StatsMaxBatchSize# 0, StatsMaxExecuteTime# 0.010000s 2025-09-25T16:19:42.659698Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7841: OperationsProcessing config: using default configuration 2025-09-25T16:19:42.659704Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7847: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-09-25T16:19:42.659709Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7847: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-09-25T16:19:42.659718Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7971: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-09-25T16:19:42.659731Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-09-25T16:19:42.659853Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8042: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-09-25T16:19:42.659904Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2025-09-25T16:19:42.677675Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:8074: Got new config: QueryServiceConfig { AllExternalDataSourcesAreAvailable: true } 2025-09-25T16:19:42.677697Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded 2025-09-25T16:19:42.677787Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8042: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# Leader for TabletID 72057594046447617 is [1:134:2158] sender: [1:179:2058] recipient: [1:15:2062] 2025-09-25T16:19:42.681363Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2025-09-25T16:19:42.681432Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2025-09-25T16:19:42.681462Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-09-25T16:19:42.682895Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2025-09-25T16:19:42.682952Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2025-09-25T16:19:42.683025Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1382: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-09-25T16:19:42.683224Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-09-25T16:19:42.684414Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:158: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-09-25T16:19:42.684462Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2025-09-25T16:19:42.684668Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-09-25T16:19:42.684678Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-09-25T16:19:42.684699Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-09-25T16:19:42.684707Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-09-25T16:19:42.684713Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:205: TTxServerlessStorageBilling.Complete 2025-09-25T16:19:42.684746Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7086: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:221:2058] recipient: [1:219:2219] IGNORE Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:221:2058] recipient: [1:219:2219] Leader for TabletID 72057594037968897 is [1:225:2223] sender: [1:226:2058] recipient: [1:219:2219] 2025-09-25T16:19:42.685940Z node 1 :HIVE INFO: tablet_helpers.cpp:1126: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:128:2153] sender: [1:246:2058] recipient: [1:15:2062] 2025-09-25T16:19:42.700674Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-09-25T16:19:42.700732Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-09-25T16:19:42.700780Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:599: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-09-25T16:19:42.700787Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5528: CreateTx for txid 1:0 type: TxAlterSubDomain target path: [OwnerId: 72057594046678944, LocalPathId: 1] source path: 2025-09-25T16:19:42.700849Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-09-25T16:19:42.700884Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2025-09-25T16:19:42.701527Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-09-25T16:19:42.701562Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-09-25T16:19:42.701614Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-09-25T16:19:42.701622Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:315: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-09-25T16:19:42.701627Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:369: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-09-25T16:19:42.701630Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 1:0 2 -> 3 2025-09-25T16:19:42.702029Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-09-25T16:19:42.702038Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-09-25T16:19:42.702041Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 1:0 3 -> 128 2025-09-25T16:19:42.702377Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-09-25T16:19:42.702387Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-09-25T16:19:42.702393Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-09-25T16:19:42.702399Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1723: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-09-25T16:19:42.703018Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1792: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-09-25T16:19:42.703368Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:663: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-09-25T16:19:42.703409Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1824: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 Leader for TabletID 72057594046316545 is [1:139:2161] sender: [1:261:2058] recipient: [1:15:2062] FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-09-25T16:19:42.703581Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:685: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-09-25T16:19:42.703599Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:689: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 139 RawX2: 4294969457 } } Step: 5000001 MediatorID: 0 Tab ... -0251CF2C3A92 amz-sdk-request: attempt=1 content-type: application/xml user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-143-generic x86_64 Clang/20.1.8 x-amz-api-version: 2006-03-01 S3_MOCK::HttpServeRead: /data_00.csv / 14 FAKE_COORDINATOR: advance: minStep5000004 State->FrontStep: 5000004 2025-09-25T16:20:37.694573Z node 163 :DATASHARD_RESTORE DEBUG: import_s3.cpp:527: [Import] [s3:1003] Handle NKikimr::NWrappers::NExternalStorage::TEvHeadObjectResponse { Key: null Result: HeadObjectResult { ETag: a3ed28bfb53c9214f635c51ed6b618c4 ContentLength: 14 } } 2025-09-25T16:20:37.694622Z node 163 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-09-25T16:20:37.694630Z node 163 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1003, path id: [OwnerId: 72057594046678944, LocalPathId: 3] 2025-09-25T16:20:37.694717Z node 163 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-09-25T16:20:37.694725Z node 163 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [163:213:2214], at schemeshard: 72057594046678944, txId: 1003, path id: 3 2025-09-25T16:20:37.694854Z node 163 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1003:0, at schemeshard: 72057594046678944 2025-09-25T16:20:37.694865Z node 163 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_backup_restore_common.h:258: TRestore TProposedWaitParts, opId: 1003:0 ProgressState, at schemeshard: 72057594046678944 2025-09-25T16:20:37.695062Z node 163 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6249: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 3 PathOwnerId: 72057594046678944, cookie: 1003 2025-09-25T16:20:37.695076Z node 163 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 3 PathOwnerId: 72057594046678944, cookie: 1003 2025-09-25T16:20:37.695081Z node 163 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:108: Operation in-flight, at schemeshard: 72057594046678944, txId: 1003 2025-09-25T16:20:37.695087Z node 163 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 1003, pathId: [OwnerId: 72057594046678944, LocalPathId: 3], version: 3 2025-09-25T16:20:37.695094Z node 163 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:610: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 4 2025-09-25T16:20:37.695113Z node 163 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 1003, ready parts: 0/1, is published: true 2025-09-25T16:20:37.695247Z node 163 :DATASHARD_RESTORE DEBUG: import_s3.cpp:606: [Import] [s3:1003] Handle NKikimr::TEvDataShard::TEvS3DownloadInfo { Info: { DataETag: (empty maybe) ProcessedBytes: 0 WrittenBytes: 0 WrittenRows: 0 ChecksumState: DownloadState: } } FAKE_COORDINATOR: Erasing txId 1003 2025-09-25T16:20:37.696132Z node 163 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1003 2025-09-25T16:20:37.696181Z node 163 :DATASHARD_RESTORE DEBUG: import_s3.cpp:606: [Import] [s3:1003] Handle NKikimr::TEvDataShard::TEvS3DownloadInfo { Info: { DataETag: a3ed28bfb53c9214f635c51ed6b618c4 ProcessedBytes: 0 WrittenBytes: 0 WrittenRows: 0 ChecksumState: DownloadState: } } 2025-09-25T16:20:37.696190Z node 163 :DATASHARD_RESTORE NOTICE: import_s3.cpp:621: [Import] [s3:1003] Process download info at 'DownloadInfo': info# { DataETag: a3ed28bfb53c9214f635c51ed6b618c4 ProcessedBytes: 0 WrittenBytes: 0 WrittenRows: 0 ChecksumState: DownloadState: } 2025-09-25T16:20:37.696205Z node 163 :DATASHARD_RESTORE DEBUG: import_s3.cpp:517: [Import] [s3:1003] GetObject: key# /data_00.csv, range# 0-13 TestModificationResult got TxId: 1003, wait until txId: 1003 TestWaitNotification wait txId: 1003 2025-09-25T16:20:37.696299Z node 163 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:213: tests -- TTxNotificationSubscriber for txId 1003: send EvNotifyTxCompletion 2025-09-25T16:20:37.696308Z node 163 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:259: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 1003 2025-09-25T16:20:37.696389Z node 163 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__notify.cpp:30: NotifyTxCompletion operation in-flight, txId: 1003, at schemeshard: 72057594046678944 2025-09-25T16:20:37.696395Z node 163 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 1003, ready parts: 0/1, is published: true 2025-09-25T16:20:37.696401Z node 163 :FLAT_TX_SCHEMESHARD INFO: schemeshard__notify.cpp:131: NotifyTxCompletion transaction is registered, txId: 1003, at schemeshard: 72057594046678944 REQUEST: GET /data_00.csv HTTP/1.1 HEADERS: Host: localhost:15742 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: E1180921-140E-47B6-BBF7-1F5CC115F6B2 amz-sdk-request: attempt=1 content-type: application/xml range: bytes=0-13 user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-143-generic x86_64 Clang/20.1.8 x-amz-api-version: 2006-03-01 S3_MOCK::HttpServeRead: /data_00.csv / 14 2025-09-25T16:20:37.696875Z node 163 :DATASHARD_RESTORE DEBUG: import_s3.cpp:656: [Import] [s3:1003] Handle NKikimr::NWrappers::NExternalStorage::TEvGetObjectResponse { Key: null Result: a3ed28bfb53c9214f635c51ed6b618c4 Body: 14b } 2025-09-25T16:20:37.696888Z node 163 :DATASHARD_RESTORE TRACE: import_s3.cpp:673: [Import] [s3:1003] Content size: processed-bytes# 0, content-length# 14, body-size# 14 2025-09-25T16:20:37.696910Z node 163 :DATASHARD_RESTORE NOTICE: import_s3.cpp:962: [Import] [s3:1003] Finish: success# 0, error# Value parse error: '"a1"' m is expected. on line: "a1","value1", writtenBytes# 0, writtenRows# 0 2025-09-25T16:20:37.696923Z node 163 :DATASHARD_RESTORE INFO: import_s3.cpp:806: [Import] [s3:1003] Upload rows: count# 0, size# 8 2025-09-25T16:20:37.699891Z node 163 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5901: Handle TEvSchemaChanged, tabletId: 72057594046678944, at schemeshard: 72057594046678944, message: Source { RawX1: 339 RawX2: 700079671573 } Origin: 72075186233409546 State: 2 TxId: 1003 Step: 0 Generation: 2 OpResult { Success: false Explain: "Value parse error: \'\"a1\"\' m is expected. on line: \"a1\",\"value1\"" BytesProcessed: 0 RowsProcessed: 0 } 2025-09-25T16:20:37.699903Z node 163 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1837: TOperation FindRelatedPartByTabletId, TxId: 1003, tablet: 72075186233409546, partId: 0 2025-09-25T16:20:37.699920Z node 163 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:628: TTxOperationReply execute, operationId: 1003:0, at schemeshard: 72057594046678944, message: Source { RawX1: 339 RawX2: 700079671573 } Origin: 72075186233409546 State: 2 TxId: 1003 Step: 0 Generation: 2 OpResult { Success: false Explain: "Value parse error: \'\"a1\"\' m is expected. on line: \"a1\",\"value1\"" BytesProcessed: 0 RowsProcessed: 0 } 2025-09-25T16:20:37.699930Z node 163 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_backup_restore_common.h:233: TRestore TProposedWaitParts, opId: 1003:0 HandleReply TEvSchemaChanged at tablet# 72057594046678944 message# Source { RawX1: 339 RawX2: 700079671573 } Origin: 72075186233409546 State: 2 TxId: 1003 Step: 0 Generation: 2 OpResult { Success: false Explain: "Value parse error: \'\"a1\"\' m is expected. on line: \"a1\",\"value1\"" BytesProcessed: 0 RowsProcessed: 0 } 2025-09-25T16:20:37.699940Z node 163 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:673: CollectSchemaChanged accept TEvDataShard::TEvSchemaChanged, operationId: 1003:0, shardIdx: 72057594046678944:1, shard: 72075186233409546, left await: 0, txState.State: ProposedWaitParts, txState.ReadyForNotifications: 1, at schemeshard: 72057594046678944 2025-09-25T16:20:37.699943Z node 163 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:710: all shard schema changes has been received, operationId: 1003:0, at schemeshard: 72057594046678944 2025-09-25T16:20:37.699947Z node 163 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:722: send schema changes ack message, operation: 1003:0, datashard: 72075186233409546, at schemeshard: 72057594046678944 2025-09-25T16:20:37.699952Z node 163 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 1003:0 129 -> 240 2025-09-25T16:20:37.699986Z node 163 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_backup_restore_common.h:116: Unable to make a bill: kind# TRestore, opId# 1003:0, reason# domain is not a serverless db, domain# /MyRoot, domainPathId# [OwnerId: 72057594046678944, LocalPathId: 1], IsDomainSchemeShard: 1, ParentDomainId: [OwnerId: 72057594046678944, LocalPathId: 1], ResourcesDomainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-09-25T16:20:37.700347Z node 163 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 1003:0, at schemeshard: 72057594046678944 2025-09-25T16:20:37.700373Z node 163 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1003:0, at schemeshard: 72057594046678944 2025-09-25T16:20:37.700382Z node 163 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:497: [72057594046678944] TDone opId# 1003:0 ProgressState 2025-09-25T16:20:37.700393Z node 163 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:926: Part operation is done id#1003:0 progress is 1/1 2025-09-25T16:20:37.700397Z node 163 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 1003 ready parts: 1/1 2025-09-25T16:20:37.700400Z node 163 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:926: Part operation is done id#1003:0 progress is 1/1 2025-09-25T16:20:37.700402Z node 163 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 1003 ready parts: 1/1 2025-09-25T16:20:37.700406Z node 163 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 1003, ready parts: 1/1, is published: true 2025-09-25T16:20:37.700414Z node 163 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1702: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [163:417:2389] message: TxId: 1003 2025-09-25T16:20:37.700419Z node 163 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 1003 ready parts: 1/1 2025-09-25T16:20:37.700423Z node 163 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:993: Operation and all the parts is done, operation id: 1003:0 2025-09-25T16:20:37.700427Z node 163 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5552: RemoveTx for txid 1003:0 2025-09-25T16:20:37.700449Z node 163 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:610: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 3 2025-09-25T16:20:37.701265Z node 163 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 1003: got EvNotifyTxCompletionResult 2025-09-25T16:20:37.701281Z node 163 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 1003: satisfy waiter [163:457:2428] TestWaitNotification: OK eventTxId 1003 >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-DropUser-68 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-DropUser-69 >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-CreateUser-22 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-CreateUser-23 >> TColumnShardTestReadWrite::ReadWithProgramLike >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-ModifyUser-47 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-ModifyUser-48 >> TestShred::ShredWithSplit [GOOD] >> SchemeReqAdminAccessInTenant::ClusterAdminCanAuthOnEmptyTenant [GOOD] >> SchemeReqAdminAccessInTenant::ClusterAdminCanAuthOnEmptyTenant-StrictAclCheck >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-DropUser-59 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-DropUser-60 >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-DropUser-70 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-DropUser-71 >> TSchemeShardViewTest::EmptyName >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-DropUser-53 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-DropUser-54 >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-CreateUser-21 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-CreateUser-22 >> TImportTests::AuditCancelledImport [GOOD] >> TSchemeShardViewTest::AsyncCreateSameView >> TSchemeShardViewTest::EmptyName [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-DropUser-68 [GOOD] >> TColumnShardTestReadWrite::WriteOverload+InStore [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-DropUser-69 >> TColumnShardTestReadWrite::ReadWithProgramNoProjection [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-DropUser-70 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-DropUser-71 ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_shred/unittest >> TestShred::ShredWithSplit [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:68:2058] recipient: [1:61:2103] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:68:2058] recipient: [1:61:2103] Leader for TabletID 72057594046678944 is [1:72:2107] sender: [1:76:2058] recipient: [1:61:2103] 2025-09-25T16:20:34.889944Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7911: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-09-25T16:20:34.889969Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7939: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-09-25T16:20:34.889976Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7825: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-09-25T16:20:34.889981Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7841: OperationsProcessing config: using default configuration 2025-09-25T16:20:34.889987Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7847: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-09-25T16:20:34.889992Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7847: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-09-25T16:20:34.890001Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7971: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-09-25T16:20:34.890015Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-09-25T16:20:34.890134Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8042: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-09-25T16:20:34.890220Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2025-09-25T16:20:34.903842Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7729: Cannot subscribe to console configs 2025-09-25T16:20:34.903871Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded 2025-09-25T16:20:34.905310Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2025-09-25T16:20:34.905368Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2025-09-25T16:20:34.905399Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-09-25T16:20:34.906477Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2025-09-25T16:20:34.906569Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2025-09-25T16:20:34.906663Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1382: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-09-25T16:20:34.906733Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-09-25T16:20:34.906888Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:158: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-09-25T16:20:34.906917Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2025-09-25T16:20:34.907189Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-09-25T16:20:34.907198Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-09-25T16:20:34.907237Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-09-25T16:20:34.907244Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-09-25T16:20:34.907249Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:205: TTxServerlessStorageBilling.Complete 2025-09-25T16:20:34.907263Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7086: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-09-25T16:20:34.907656Z node 1 :HIVE INFO: tablet_helpers.cpp:1126: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:72:2107] sender: [1:152:2058] recipient: [1:16:2063] 2025-09-25T16:20:34.929486Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-09-25T16:20:34.929600Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-09-25T16:20:34.929679Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:599: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-09-25T16:20:34.929690Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5528: CreateTx for txid 1:0 type: TxAlterSubDomain target path: [OwnerId: 72057594046678944, LocalPathId: 1] source path: 2025-09-25T16:20:34.929824Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-09-25T16:20:34.929845Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2025-09-25T16:20:34.930099Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-09-25T16:20:34.930149Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-09-25T16:20:34.930224Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-09-25T16:20:34.930236Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:315: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-09-25T16:20:34.930243Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:369: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-09-25T16:20:34.930249Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 1:0 2 -> 3 2025-09-25T16:20:34.930333Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-09-25T16:20:34.930340Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-09-25T16:20:34.930346Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 1:0 3 -> 128 2025-09-25T16:20:34.930409Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-09-25T16:20:34.930416Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-09-25T16:20:34.930422Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-09-25T16:20:34.930430Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1723: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-09-25T16:20:34.931178Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1792: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-09-25T16:20:34.931260Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:663: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-09-25T16:20:34.931320Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1824: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-09-25T16:20:34.931588Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:685: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-09-25T16:20:34.931618Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:689: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 77 RawX2: 4294969407 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-09-25T16:20:34.931629Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:303: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-09-25T16:20:34.931735Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 1:0 128 -> 240 2025-09-25T16:20:34.931745Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:362: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-09-25T16:20:34.931784Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:599: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-09-25T16:20:34.931799Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:412: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-09-25T16:20:34.931945Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-09-25T16:20:34.931951Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard Des ... eShard::TEvMeasureSelfResponseTime 2025-09-25T16:20:39.619168Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5232: StateWork, processing event TEvSchemeShard::TEvMeasureSelfResponseTime 2025-09-25T16:20:39.650126Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5225: StateWork, received event# 271125000, Sender [0:0:0], Recipient [1:186:2180]: NKikimr::NSchemeShard::TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-09-25T16:20:39.650159Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5233: StateWork, processing event TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-09-25T16:20:39.650196Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5225: StateWork, received event# 271124999, Sender [1:186:2180], Recipient [1:186:2180]: NKikimr::NSchemeShard::TEvSchemeShard::TEvMeasureSelfResponseTime 2025-09-25T16:20:39.650202Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5232: StateWork, processing event TEvSchemeShard::TEvMeasureSelfResponseTime 2025-09-25T16:20:39.660440Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5225: StateWork, received event# 271125000, Sender [0:0:0], Recipient [1:281:2243]: NKikimr::NSchemeShard::TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-09-25T16:20:39.660468Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5233: StateWork, processing event TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-09-25T16:20:39.660486Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5225: StateWork, received event# 271124999, Sender [1:281:2243], Recipient [1:281:2243]: NKikimr::NSchemeShard::TEvSchemeShard::TEvMeasureSelfResponseTime 2025-09-25T16:20:39.660489Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5232: StateWork, processing event TEvSchemeShard::TEvMeasureSelfResponseTime 2025-09-25T16:20:39.691929Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5225: StateWork, received event# 271125000, Sender [0:0:0], Recipient [1:186:2180]: NKikimr::NSchemeShard::TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-09-25T16:20:39.691964Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5233: StateWork, processing event TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-09-25T16:20:39.691999Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5225: StateWork, received event# 271124999, Sender [1:186:2180], Recipient [1:186:2180]: NKikimr::NSchemeShard::TEvSchemeShard::TEvMeasureSelfResponseTime 2025-09-25T16:20:39.692004Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5232: StateWork, processing event TEvSchemeShard::TEvMeasureSelfResponseTime 2025-09-25T16:20:39.702250Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5225: StateWork, received event# 271125000, Sender [0:0:0], Recipient [1:281:2243]: NKikimr::NSchemeShard::TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-09-25T16:20:39.702286Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5233: StateWork, processing event TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-09-25T16:20:39.702311Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5225: StateWork, received event# 271124999, Sender [1:281:2243], Recipient [1:281:2243]: NKikimr::NSchemeShard::TEvSchemeShard::TEvMeasureSelfResponseTime 2025-09-25T16:20:39.702315Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5232: StateWork, processing event TEvSchemeShard::TEvMeasureSelfResponseTime 2025-09-25T16:20:39.733491Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5225: StateWork, received event# 271125000, Sender [0:0:0], Recipient [1:186:2180]: NKikimr::NSchemeShard::TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-09-25T16:20:39.733519Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5233: StateWork, processing event TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-09-25T16:20:39.733539Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5225: StateWork, received event# 271124999, Sender [1:186:2180], Recipient [1:186:2180]: NKikimr::NSchemeShard::TEvSchemeShard::TEvMeasureSelfResponseTime 2025-09-25T16:20:39.733545Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5232: StateWork, processing event TEvSchemeShard::TEvMeasureSelfResponseTime 2025-09-25T16:20:39.744058Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5225: StateWork, received event# 269553162, Sender [1:1000:2868], Recipient [1:281:2243]: NKikimrTxDataShard.TEvPeriodicTableStats DatashardId: 72075186233409550 TableLocalId: 2 Generation: 2 Round: 1 TableStats { DataSize: 5019511 RowCount: 49 IndexSize: 2213 InMemSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 SearchHeight: 1 LastFullCompactionTs: 50 HasLoanedParts: false Channels { Channel: 1 DataSize: 5019511 IndexSize: 2213 } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 402 Memory: 89029 Storage: 5024539 } ShardState: 2 UserTablePartOwners: 72075186233409550 NodeId: 1 StartTime: 50000 TableOwnerId: 72075186233409546 IsDstSplit: true FollowerId: 0 2025-09-25T16:20:39.744090Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5262: StateWork, processing event TEvDataShard::TEvPeriodicTableStats 2025-09-25T16:20:39.744109Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:561: Got periodic table stats at tablet 72075186233409546 from shard 72075186233409550 followerId 0 pathId [OwnerId: 72075186233409546, LocalPathId: 2] state 'Ready' dataSize 5019511 rowCount 49 cpuUsage 0.0402 2025-09-25T16:20:39.744136Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__table_stats.cpp:568: Got periodic table stats at tablet 72075186233409546 from shard 72075186233409550 followerId 0 pathId [OwnerId: 72075186233409546, LocalPathId: 2] raw table stats: DataSize: 5019511 RowCount: 49 IndexSize: 2213 InMemSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 SearchHeight: 1 LastFullCompactionTs: 50 HasLoanedParts: false Channels { Channel: 1 DataSize: 5019511 IndexSize: 2213 } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 2025-09-25T16:20:39.744146Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__table_stats.cpp:608: Will delay TTxStoreTableStats on# 0.100000s, queue# 1 2025-09-25T16:20:39.744203Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5225: StateWork, received event# 269553162, Sender [1:1003:2870], Recipient [1:281:2243]: NKikimrTxDataShard.TEvPeriodicTableStats DatashardId: 72075186233409551 TableLocalId: 2 Generation: 2 Round: 1 TableStats { DataSize: 5121950 RowCount: 50 IndexSize: 2258 InMemSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 SearchHeight: 1 LastFullCompactionTs: 50 HasLoanedParts: false Channels { Channel: 1 DataSize: 5121950 IndexSize: 2258 } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 422 Memory: 89053 Storage: 5127032 } ShardState: 2 UserTablePartOwners: 72075186233409551 NodeId: 1 StartTime: 50000 TableOwnerId: 72075186233409546 IsDstSplit: true FollowerId: 0 2025-09-25T16:20:39.744213Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5262: StateWork, processing event TEvDataShard::TEvPeriodicTableStats 2025-09-25T16:20:39.744219Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:561: Got periodic table stats at tablet 72075186233409546 from shard 72075186233409551 followerId 0 pathId [OwnerId: 72075186233409546, LocalPathId: 2] state 'Ready' dataSize 5121950 rowCount 50 cpuUsage 0.0422 2025-09-25T16:20:39.744238Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__table_stats.cpp:568: Got periodic table stats at tablet 72075186233409546 from shard 72075186233409551 followerId 0 pathId [OwnerId: 72075186233409546, LocalPathId: 2] raw table stats: DataSize: 5121950 RowCount: 50 IndexSize: 2258 InMemSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 SearchHeight: 1 LastFullCompactionTs: 50 HasLoanedParts: false Channels { Channel: 1 DataSize: 5121950 IndexSize: 2258 } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 2025-09-25T16:20:39.754673Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5225: StateWork, received event# 271125517, Sender [0:0:0], Recipient [1:186:2180]: NKikimr::NSchemeShard::TEvSchemeShard::TEvWakeupToRunShredBSC 2025-09-25T16:20:39.754707Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5447: StateWork, processing event TEvSchemeShard::TEvWakeupToRunShredBSC 2025-09-25T16:20:39.754715Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:354: [RootShredManager] SendRequestToBSC: Generation# 1 2025-09-25T16:20:39.754807Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5225: StateWork, received event# 268637738, Sender [1:189:2182], Recipient [1:186:2180]: NKikimrBlobStorage.TEvControllerShredResponse CurrentGeneration: 1 Completed: true Progress10k: 10000 2025-09-25T16:20:39.754815Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5446: StateWork, processing event TEvBlobStorage::TEvControllerShredResponse 2025-09-25T16:20:39.754820Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:8237: Handle TEvControllerShredResponse, at schemeshard: 72057594046678944 2025-09-25T16:20:39.754843Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__root_shred_manager.cpp:639: TTxCompleteShredBSC Execute at schemeshard: 72057594046678944 2025-09-25T16:20:39.754848Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:653: TTxCompleteShredBSC: Data shred in BSC is completed 2025-09-25T16:20:39.754860Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:170: [RootShredManager] ScheduleShredWakeup: Interval# 29.997500s, Timestamp# 1970-01-01T00:01:10.002500Z 2025-09-25T16:20:39.754867Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:378: [RootShredManager] Complete: Generation# 1, duration# 20 s 2025-09-25T16:20:39.755076Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__root_shred_manager.cpp:665: TTxCompleteShredBSC Complete at schemeshard: 72057594046678944, NeedScheduleRequestToBSC# false 2025-09-25T16:20:39.755801Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5225: StateWork, received event# 269877761, Sender [1:1473:3239], Recipient [1:186:2180]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-09-25T16:20:39.755811Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5322: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-09-25T16:20:39.755816Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:6142: Pipe server connected, at tablet: 72057594046678944 2025-09-25T16:20:39.755851Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5225: StateWork, received event# 271125519, Sender [1:173:2173], Recipient [1:186:2180]: NKikimrScheme.TEvShredInfoRequest 2025-09-25T16:20:39.755857Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5444: StateWork, processing event TEvSchemeShard::TEvShredInfoRequest 2025-09-25T16:20:39.755862Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:8188: Handle TEvShredInfoRequest, at schemeshard: 72057594046678944 >> TSchemeShardViewTest::AsyncCreateSameView [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/columnshard/ut_rw/unittest >> TColumnShardTestReadWrite::ReadWithProgramNoProjection [GOOD] Test command err: 2025-09-25T16:20:39.097347Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:129:2160];fline=columnshard.cpp:105;event=initialize_shard;step=OnActivateExecutor; 2025-09-25T16:20:39.102824Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:129:2160];fline=columnshard.cpp:124;event=initialize_shard;step=initialize_tiring_finished; 2025-09-25T16:20:39.102888Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxInitSchema.Execute at tablet 9437184 2025-09-25T16:20:39.103818Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:129:2160];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-09-25T16:20:39.103884Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:129:2160];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-09-25T16:20:39.103931Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:129:2160];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-09-25T16:20:39.103953Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:129:2160];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-09-25T16:20:39.103972Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:129:2160];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-09-25T16:20:39.104003Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:129:2160];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-09-25T16:20:39.104068Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:129:2160];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-09-25T16:20:39.104092Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:129:2160];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-09-25T16:20:39.104114Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:129:2160];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-09-25T16:20:39.104134Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:129:2160];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanDeprecatedSnapshot; 2025-09-25T16:20:39.104154Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:129:2160];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV0ChunksMeta; 2025-09-25T16:20:39.104174Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:129:2160];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CopyBlobIdsToV2; 2025-09-25T16:20:39.104220Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:129:2160];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreAppearanceSnapshot; 2025-09-25T16:20:39.110779Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxInitSchema.Complete at tablet 9437184 2025-09-25T16:20:39.110856Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:131;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=12;current_normalizer=CLASS_NAME=Granules; 2025-09-25T16:20:39.110868Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=1;type=Granules; 2025-09-25T16:20:39.110916Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-09-25T16:20:39.110953Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-09-25T16:20:39.110966Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-09-25T16:20:39.110972Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2025-09-25T16:20:39.110984Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:138;normalizer=TChunksNormalizer;message=0 chunks found; 2025-09-25T16:20:39.110995Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-09-25T16:20:39.111003Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-09-25T16:20:39.111008Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2025-09-25T16:20:39.111029Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-09-25T16:20:39.111039Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-09-25T16:20:39.111048Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-09-25T16:20:39.111053Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2025-09-25T16:20:39.111065Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2025-09-25T16:20:39.111073Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-09-25T16:20:39.111082Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-09-25T16:20:39.111088Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2025-09-25T16:20:39.111098Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-09-25T16:20:39.111106Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-09-25T16:20:39.111111Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2025-09-25T16:20:39.111122Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-09-25T16:20:39.111130Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-09-25T16:20:39.111136Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=15;type=RestoreV1Chunks_V2; 2025-09-25T16:20:39.111165Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-09-25T16:20:39.111175Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-09-25T16:20:39.111181Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=16;type=RestoreV2Chunks; 2025-09-25T16:20:39.111197Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2025-09-25T16:20:39.111206Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanDeprecatedSnapshot;id=CleanDeprecatedSnapshot; 2025-09-25T16:20:39.111211Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=17;type=CleanDeprecatedSnapshot; 2025-09-25T16:20:39.111220Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanDeprecatedSnapshot;id=17; 2025-09-25T16:20:39.111228Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV0ChunksMeta;id=RestoreV0ChunksMeta; 2025-09-25T16:20:39.111233Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=18;type=RestoreV0ChunksMeta; 2025-09-25T16:20:39.111241Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV0ChunksMeta;id=18; 2025-09-25T16:20:39.111250Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CopyBlobIdsToV2;id=CopyBlobIdsToV2; 2025-09-25T16:20:39.111255Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=19;type=CopyBlobIdsToV2; 2025-09-25T16:20:39.111270Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CopyBlobIdsToV2;id=19; 2025-09-25T16:20:39.111279Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLAS ... CTX:{ef=(column_ids=1,9;column_names=saved_at,timestamp;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,5,9;column_names=level,saved_at,timestamp;);;program_input=(column_ids=1,5,9;column_names=level,saved_at,timestamp;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;);columns=2;rows=100; 2025-09-25T16:20:40.137367Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[1:281:2293];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;fline=actor.cpp:274;stage=data_format;batch_size=0;num_rows=100;batch_columns=level,timestamp; 2025-09-25T16:20:40.137428Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[1:281:2293];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;fline=actor.cpp:392;event=send_data;compute_actor_id=[1:273:2285];bytes=1200;rows=100;faults=0;finished=0;fault=0;schema=level: int32 timestamp: timestamp[us]; 2025-09-25T16:20:40.137446Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[1:281:2293];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;fline=actor.cpp:296;stage=finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=1,9;column_names=saved_at,timestamp;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,5,9;column_names=level,saved_at,timestamp;);;program_input=(column_ids=1,5,9;column_names=level,saved_at,timestamp;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2025-09-25T16:20:40.137464Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[1:281:2293];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;fline=actor.cpp:211;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=1,9;column_names=saved_at,timestamp;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,5,9;column_names=level,saved_at,timestamp;);;program_input=(column_ids=1,5,9;column_names=level,saved_at,timestamp;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2025-09-25T16:20:40.137480Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[1:281:2293];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;fline=actor.cpp:216;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=1,9;column_names=saved_at,timestamp;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,5,9;column_names=level,saved_at,timestamp;);;program_input=(column_ids=1,5,9;column_names=level,saved_at,timestamp;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2025-09-25T16:20:40.137502Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[1:281:2293];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;fline=actor.cpp:120;event=TEvScanDataAck;info=limits:(bytes=8388608;chunks=1);; 2025-09-25T16:20:40.137516Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[1:281:2293];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;fline=actor.cpp:211;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=1,9;column_names=saved_at,timestamp;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,5,9;column_names=level,saved_at,timestamp;);;program_input=(column_ids=1,5,9;column_names=level,saved_at,timestamp;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2025-09-25T16:20:40.137530Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[1:281:2293];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;fline=actor.cpp:216;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=1,9;column_names=saved_at,timestamp;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,5,9;column_names=level,saved_at,timestamp;);;program_input=(column_ids=1,5,9;column_names=level,saved_at,timestamp;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2025-09-25T16:20:40.137536Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: actor.cpp:442: Scan [1:281:2293] finished for tablet 9437184 2025-09-25T16:20:40.137620Z node 1 :TX_COLUMNSHARD_SCAN INFO: log.cpp:841: SelfId=[1:281:2293];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;fline=actor.cpp:448;event=scan_finish;compute_actor_id=[1:273:2285];stats={"p":[{"events":["f_bootstrap","l_bootstrap","f_processing","f_ProduceResults","f_task_result"],"t":0},{"events":["f_ack","l_ack","l_processing","l_ProduceResults","f_Finish","l_Finish","l_task_result"],"t":0.002}],"full":{"a":1166331,"name":"_full_task","f":1166331,"d_finished":0,"c":0,"l":1169187,"d":2856},"events":[{"name":"bootstrap","f":1166410,"d_finished":416,"c":1,"l":1166826,"d":416},{"a":1169141,"name":"ack","f":1168947,"d_finished":178,"c":1,"l":1169125,"d":224},{"a":1169139,"name":"processing","f":1166886,"d_finished":642,"c":3,"l":1169125,"d":690},{"name":"ProduceResults","f":1166712,"d_finished":310,"c":6,"l":1169175,"d":310},{"a":1169175,"name":"Finish","f":1169175,"d_finished":0,"c":0,"l":1169187,"d":12},{"name":"task_result","f":1166891,"d_finished":452,"c":2,"l":1168922,"d":452}],"id":"9437184::1"};iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=1,9;column_names=saved_at,timestamp;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,5,9;column_names=level,saved_at,timestamp;);;program_input=(column_ids=1,5,9;column_names=level,saved_at,timestamp;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2025-09-25T16:20:40.137642Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[1:281:2293];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;fline=actor.cpp:392;event=send_data;compute_actor_id=[1:273:2285];bytes=0;rows=0;faults=0;finished=1;fault=0;schema=; 2025-09-25T16:20:40.137695Z node 1 :TX_COLUMNSHARD_SCAN INFO: log.cpp:841: SelfId=[1:281:2293];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;fline=actor.cpp:397;event=scan_finished;compute_actor_id=[1:273:2285];packs_sum=0;bytes=0;bytes_sum=0;rows=0;rows_sum=0;faults=0;finished=1;fault=0;stats={"p":[{"events":["f_bootstrap","l_bootstrap","f_processing","f_ProduceResults","f_task_result"],"t":0},{"events":["f_ack","l_ack","l_processing","l_ProduceResults","f_Finish","l_Finish","l_task_result"],"t":0.002}],"full":{"a":1166331,"name":"_full_task","f":1166331,"d_finished":0,"c":0,"l":1169289,"d":2958},"events":[{"name":"bootstrap","f":1166410,"d_finished":416,"c":1,"l":1166826,"d":416},{"a":1169141,"name":"ack","f":1168947,"d_finished":178,"c":1,"l":1169125,"d":326},{"a":1169139,"name":"processing","f":1166886,"d_finished":642,"c":3,"l":1169125,"d":792},{"name":"ProduceResults","f":1166712,"d_finished":310,"c":6,"l":1169175,"d":310},{"a":1169175,"name":"Finish","f":1169175,"d_finished":0,"c":0,"l":1169289,"d":114},{"name":"task_result","f":1166891,"d_finished":452,"c":2,"l":1168922,"d":452}],"id":"9437184::1"};iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=1,9;column_names=saved_at,timestamp;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,5,9;column_names=level,saved_at,timestamp;);;program_input=(column_ids=1,5,9;column_names=level,saved_at,timestamp;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2025-09-25T16:20:40.137708Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[1:281:2293];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;fline=stats.cpp:8;event=statistic;begin=2025-09-25T16:20:40.134445Z;index_granules=0;index_portions=1;index_batches=0;schema_columns=3;filter_columns=0;additional_columns=0;compacted_portions_bytes=0;inserted_portions_bytes=8392;committed_portions_bytes=0;data_filter_bytes=0;data_additional_bytes=0;delta_bytes=8392;selected_rows=0; 2025-09-25T16:20:40.137713Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[1:281:2293];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;fline=read_context.h:196;event=scan_aborted;reason=unexpected on destructor; 2025-09-25T16:20:40.137736Z node 1 :TX_COLUMNSHARD_SCAN INFO: log.cpp:841: SelfId=[1:281:2293];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;fline=context.h:82;fetching=ef=(column_ids=1,9;column_names=saved_at,timestamp;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,5,9;column_names=level,saved_at,timestamp;);;program_input=(column_ids=1,5,9;column_names=level,saved_at,timestamp;);;; 2025-09-25T16:20:40.137849Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: Finished read cookie: 1 at tablet 9437184 2025-09-25T16:20:40.137908Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: EvScan txId: 100 scanId: 0 version: {1758817240101:100} readable: {1758817240101:max} at tablet 9437184 2025-09-25T16:20:40.137939Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TTxScan prepare txId: 100 scanId: 0 at tablet 9437184 2025-09-25T16:20:40.138002Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:129:2160];ev=NKikimr::TEvDataShard::TEvKqpScan;tx_id=100;scan_id=0;gen=0;table=;snapshot={1758817240101:100};tablet=9437184;timeout=0.000000s;cpu_limits=Disabled;;fline=program.cpp:34;event=parse_program;program=Command { Assign { Column { Id: 100 } Function { Id: 1 Arguments { Id: 1 } Arguments { Id: 9 } } } } Command { Filter { Predicate { Id: 100 } } } ; 2025-09-25T16:20:40.138017Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:129:2160];ev=NKikimr::TEvDataShard::TEvKqpScan;tx_id=100;scan_id=0;gen=0;table=;snapshot={1758817240101:100};tablet=9437184;timeout=0.000000s;cpu_limits=Disabled;;fline=program.cpp:103;parse_proto_program=Command { Assign { Column { Id: 100 } Function { Id: 1 Arguments { Id: 1 } Arguments { Id: 9 } } } } Command { Filter { Predicate { Id: 100 } } } ; 2025-09-25T16:20:40.138035Z node 1 :TX_COLUMNSHARD_SCAN WARN: log.cpp:841: tablet_id=9437184;self_id=[1:129:2160];ev=NKikimr::TEvDataShard::TEvKqpScan;tx_id=100;scan_id=0;gen=0;table=;snapshot={1758817240101:100};tablet=9437184;timeout=0.000000s;cpu_limits=Disabled;;fline=tx_scan.cpp:11;event=TTxScan failed;problem=cannot parse program;details=Can't parse SsaProgram: program has no projections; >> TColumnShardTestReadWrite::ReadWithProgramLike [GOOD] >> TestShred::Run3CyclesForTables [GOOD] >> TestShred::Run3CyclesForAllSupportedObjects ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_view/unittest >> TSchemeShardViewTest::EmptyName [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2144] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2144] Leader for TabletID 72057594046678944 is [1:130:2155] sender: [1:131:2058] recipient: [1:113:2144] 2025-09-25T16:20:40.139217Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7911: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-09-25T16:20:40.139238Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7939: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-09-25T16:20:40.139242Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7825: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-09-25T16:20:40.139246Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7841: OperationsProcessing config: using default configuration 2025-09-25T16:20:40.139251Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7847: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-09-25T16:20:40.139254Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7847: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-09-25T16:20:40.139260Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7971: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-09-25T16:20:40.139270Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-09-25T16:20:40.139355Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8042: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-09-25T16:20:40.139412Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2025-09-25T16:20:40.152449Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7729: Cannot subscribe to console configs 2025-09-25T16:20:40.152472Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded 2025-09-25T16:20:40.157137Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2025-09-25T16:20:40.157232Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2025-09-25T16:20:40.157277Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-09-25T16:20:40.159349Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2025-09-25T16:20:40.159455Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2025-09-25T16:20:40.159617Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1382: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-09-25T16:20:40.159698Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-09-25T16:20:40.160417Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:158: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-09-25T16:20:40.160487Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2025-09-25T16:20:40.160799Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-09-25T16:20:40.160808Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-09-25T16:20:40.160844Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-09-25T16:20:40.160855Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-09-25T16:20:40.160862Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:205: TTxServerlessStorageBilling.Complete 2025-09-25T16:20:40.160903Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7086: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-09-25T16:20:40.162263Z node 1 :HIVE INFO: tablet_helpers.cpp:1126: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:130:2155] sender: [1:245:2058] recipient: [1:15:2062] 2025-09-25T16:20:40.180314Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-09-25T16:20:40.180409Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-09-25T16:20:40.180499Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:599: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-09-25T16:20:40.180510Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5528: CreateTx for txid 1:0 type: TxAlterSubDomain target path: [OwnerId: 72057594046678944, LocalPathId: 1] source path: 2025-09-25T16:20:40.180577Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-09-25T16:20:40.180590Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2025-09-25T16:20:40.181563Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-09-25T16:20:40.181628Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-09-25T16:20:40.181703Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-09-25T16:20:40.181716Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:315: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-09-25T16:20:40.181722Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:369: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-09-25T16:20:40.181728Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 1:0 2 -> 3 2025-09-25T16:20:40.182297Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-09-25T16:20:40.182311Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-09-25T16:20:40.182320Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 1:0 3 -> 128 2025-09-25T16:20:40.182707Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-09-25T16:20:40.182718Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-09-25T16:20:40.182724Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-09-25T16:20:40.182731Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1723: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-09-25T16:20:40.183511Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1792: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-09-25T16:20:40.183988Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:663: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-09-25T16:20:40.184033Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1824: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-09-25T16:20:40.184270Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:685: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-09-25T16:20:40.184298Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:689: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 137 RawX2: 4294969455 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-09-25T16:20:40.184307Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:303: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-09-25T16:20:40.184390Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 1:0 128 -> 240 2025-09-25T16:20:40.184399Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:362: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-09-25T16:20:40.184435Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:599: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-09-25T16:20:40.184450Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:412: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-09-25T16:20:40.184960Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-09-25T16:20:40.184971Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-09-25T16:20:40.185023Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-09-25T16:20:40.185033Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [1:212:2213], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-09-25T16:20:40.185132Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-09-25T16:20:40.185141Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:497: [72057594046678944] TDone opId# 1:0 ProgressState 2025-09-25T16:20:40.185156Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:926: Part operation is done id#1:0 progress is 1/1 2025-09-25T16:20:40.185161Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-09-25T16:20:40.185166Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:926: Part operation is done id#1:0 progress is 1/1 2025-09-25T16:20:40.185170Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-09-25T16:20:40.185174Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-09-25T16:20:40.185180Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-09-25T16:20:40.185185Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:993: Operation and all the parts is done, operation id: 1:0 2025-09-25T16:20:40.185190Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5552: RemoveTx for txid 1:0 2025-09-25T16:20:40.185203Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:610: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-09-25T16:20:40.185210Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:1002: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-09-25T16:20:40.185215Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1009: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-09-25T16:20:40.185572Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6249: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-09-25T16:20:40.185588Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-09-25T16:20:40.185594Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1 2025-09-25T16:20:40.185601Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 1, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 3 2025-09-25T16:20:40.185606Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:610: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-09-25T16:20:40.185621Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__publish_to_scheme_board.cpp:155: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 1, subscribers: 0 2025-09-25T16:20:40.186353Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1 2025-09-25T16:20:40.186464Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 1, at schemeshard: 72057594046678944 TestModificationResults wait txId: 101 2025-09-25T16:20:40.186625Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:433: actor# [1:275:2265] Bootstrap 2025-09-25T16:20:40.186859Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:452: actor# [1:275:2265] Become StateWork (SchemeCache [1:280:2270]) 2025-09-25T16:20:40.187729Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpCreateView CreateView { Name: "" QueryText: "Some query" } } TxId: 101 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-09-25T16:20:40.187774Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_create_view.cpp:118: [72057594046678944] TCreateView Propose, path: /MyRoot/, opId: 101:0 2025-09-25T16:20:40.187782Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_create_view.cpp:124: [72057594046678944] TCreateView Propose, path: /MyRoot/, opId: 101:0, viewDescription: Name: "" QueryText: "Some query" 2025-09-25T16:20:40.187804Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 101:1, propose status:StatusSchemeError, reason: Check failed: path: '/MyRoot/', error: path part shouldn't be empty, source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_view.cpp:151, at schemeshard: 72057594046678944 2025-09-25T16:20:40.188020Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:212: actor# [1:275:2265] HANDLE TEvClientConnected success connect from tablet# 72057594046447617 2025-09-25T16:20:40.188779Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 101, response: Status: StatusSchemeError Reason: "Check failed: path: \'/MyRoot/\', error: path part shouldn\'t be empty, source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_view.cpp:151" TxId: 101 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2025-09-25T16:20:40.188848Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 101, database: /MyRoot, subject: , status: StatusSchemeError, reason: Check failed: path: '/MyRoot/', error: path part shouldn't be empty, source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_view.cpp:151, operation: CREATE VIEW, path: /MyRoot/ 2025-09-25T16:20:40.188912Z node 1 :TX_PROXY DEBUG: client.cpp:89: Handle TEvAllocateResult ACCEPTED RangeBegin# 281474976715656 RangeEnd# 281474976720656 txAllocator# 72057594046447617 TestModificationResult got TxId: 101, wait until txId: 101 ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/columnshard/ut_rw/unittest >> TColumnShardTestReadWrite::WriteOverload+InStore [GOOD] Test command err: 2025-09-25T16:20:38.746434Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:129:2160];fline=columnshard.cpp:105;event=initialize_shard;step=OnActivateExecutor; 2025-09-25T16:20:38.751619Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:129:2160];fline=columnshard.cpp:124;event=initialize_shard;step=initialize_tiring_finished; 2025-09-25T16:20:38.751679Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxInitSchema.Execute at tablet 9437184 2025-09-25T16:20:38.752491Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:129:2160];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-09-25T16:20:38.752554Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:129:2160];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-09-25T16:20:38.752596Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:129:2160];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-09-25T16:20:38.752617Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:129:2160];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-09-25T16:20:38.752636Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:129:2160];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-09-25T16:20:38.752660Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:129:2160];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-09-25T16:20:38.752681Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:129:2160];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-09-25T16:20:38.752701Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:129:2160];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-09-25T16:20:38.752721Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:129:2160];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-09-25T16:20:38.752740Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:129:2160];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanDeprecatedSnapshot; 2025-09-25T16:20:38.752760Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:129:2160];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV0ChunksMeta; 2025-09-25T16:20:38.752778Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:129:2160];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CopyBlobIdsToV2; 2025-09-25T16:20:38.752845Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:129:2160];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreAppearanceSnapshot; 2025-09-25T16:20:38.759807Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxInitSchema.Complete at tablet 9437184 2025-09-25T16:20:38.759889Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:131;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=12;current_normalizer=CLASS_NAME=Granules; 2025-09-25T16:20:38.759900Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=1;type=Granules; 2025-09-25T16:20:38.759946Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-09-25T16:20:38.759985Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-09-25T16:20:38.759999Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-09-25T16:20:38.760006Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2025-09-25T16:20:38.760017Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:138;normalizer=TChunksNormalizer;message=0 chunks found; 2025-09-25T16:20:38.760027Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-09-25T16:20:38.760035Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-09-25T16:20:38.760040Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2025-09-25T16:20:38.760061Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-09-25T16:20:38.760071Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-09-25T16:20:38.760080Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-09-25T16:20:38.760084Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2025-09-25T16:20:38.760096Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2025-09-25T16:20:38.760104Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-09-25T16:20:38.760113Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-09-25T16:20:38.760118Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2025-09-25T16:20:38.760129Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-09-25T16:20:38.760137Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-09-25T16:20:38.760142Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2025-09-25T16:20:38.760152Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-09-25T16:20:38.760160Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-09-25T16:20:38.760165Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=15;type=RestoreV1Chunks_V2; 2025-09-25T16:20:38.760194Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-09-25T16:20:38.760203Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-09-25T16:20:38.760207Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=16;type=RestoreV2Chunks; 2025-09-25T16:20:38.760222Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2025-09-25T16:20:38.760231Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanDeprecatedSnapshot;id=CleanDeprecatedSnapshot; 2025-09-25T16:20:38.760235Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=17;type=CleanDeprecatedSnapshot; 2025-09-25T16:20:38.760244Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanDeprecatedSnapshot;id=17; 2025-09-25T16:20:38.760252Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV0ChunksMeta;id=RestoreV0ChunksMeta; 2025-09-25T16:20:38.760256Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=18;type=RestoreV0ChunksMeta; 2025-09-25T16:20:38.760265Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV0ChunksMeta;id=18; 2025-09-25T16:20:38.760273Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CopyBlobIdsToV2;id=CopyBlobIdsToV2; 2025-09-25T16:20:38.760278Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=19;type=CopyBlobIdsToV2; 2025-09-25T16:20:38.760293Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CopyBlobIdsToV2;id=19; 2025-09-25T16:20:38.760303Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLAS ... 37184;self_id=[1:129:2160];ev=NKikimr::NColumnShard::NPrivateEvents::NWrite::TEvWritePortionResult;tablet_id=9437184;event=TEvWritePortionResult;tablet_id=9437184;local_tx_no=4;method=execute;tx_info=TTxBlobsWritingFinished;tablet_id=9437184;tx_state=execute;fline=constructor_meta.cpp:65;memory_size=190;data_size=180;sum=190;count=2;size_of_meta=112; 2025-09-25T16:20:40.012415Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:129:2160];ev=NKikimr::NColumnShard::NPrivateEvents::NWrite::TEvWritePortionResult;tablet_id=9437184;event=TEvWritePortionResult;tablet_id=9437184;local_tx_no=4;method=execute;tx_info=TTxBlobsWritingFinished;tablet_id=9437184;tx_state=execute;fline=constructor_portion.cpp:40;memory_size=262;data_size=252;sum=262;count=1;size_of_portion=184; 2025-09-25T16:20:40.012613Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: BlobManager on execute at tablet 9437184 Save Batch GenStep: 2:1 Blob count: 1 2025-09-25T16:20:40.012650Z node 1 :TX_COLUMNSHARD_WRITE DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:129:2160];ev=NKikimr::NColumnShard::NPrivateEvents::NWrite::TEvWritePortionResult;tablet_id=9437184;event=TEvWritePortionResult;tablet_id=9437184;local_tx_no=4;method=execute;tx_info=TTxBlobsWritingFinished;tablet_id=9437184;tx_state=execute;fline=manager.h:175;event=add_by_insert_id;id=2;operation_id=1; 2025-09-25T16:20:40.023510Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: BlobManager at tablet 9437184 Save Batch GenStep: 2:1 Blob count: 1 2025-09-25T16:20:40.023710Z node 1 :TX_COLUMNSHARD_WRITE DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:129:2160];ev=NKikimr::NColumnShard::NPrivateEvents::NWrite::TEvWritePortionResult;tablet_id=9437184;event=TEvWritePortionResult;fline=columnshard__write.cpp:85;writing_size=6330728;event=data_write_finished;writing_id=936c0db8-9a2b11f0-90b359cb-394e1a6e; 2025-09-25T16:20:40.023770Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:129:2160];ev=NKikimr::NColumnShard::NPrivateEvents::NWrite::TEvWritePortionResult;tablet_id=9437184;event=TEvWritePortionResult;tablet_id=9437184;local_tx_no=5;method=execute;tx_info=TTxBlobsWritingFinished;tablet_id=9437184;tx_state=execute;fline=constructor_meta.cpp:48;memory_size=94;data_size=68;sum=188;count=3; 2025-09-25T16:20:40.023784Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:129:2160];ev=NKikimr::NColumnShard::NPrivateEvents::NWrite::TEvWritePortionResult;tablet_id=9437184;event=TEvWritePortionResult;tablet_id=9437184;local_tx_no=5;method=execute;tx_info=TTxBlobsWritingFinished;tablet_id=9437184;tx_state=execute;fline=constructor_meta.cpp:65;memory_size=190;data_size=180;sum=380;count=4;size_of_meta=112; 2025-09-25T16:20:40.023797Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:129:2160];ev=NKikimr::NColumnShard::NPrivateEvents::NWrite::TEvWritePortionResult;tablet_id=9437184;event=TEvWritePortionResult;tablet_id=9437184;local_tx_no=5;method=execute;tx_info=TTxBlobsWritingFinished;tablet_id=9437184;tx_state=execute;fline=constructor_portion.cpp:40;memory_size=262;data_size=252;sum=524;count=2;size_of_portion=184; 2025-09-25T16:20:40.023943Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: BlobManager on execute at tablet 9437184 Save Batch GenStep: 2:2 Blob count: 1 2025-09-25T16:20:40.023967Z node 1 :TX_COLUMNSHARD_WRITE DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:129:2160];ev=NKikimr::NColumnShard::NPrivateEvents::NWrite::TEvWritePortionResult;tablet_id=9437184;event=TEvWritePortionResult;tablet_id=9437184;local_tx_no=5;method=execute;tx_info=TTxBlobsWritingFinished;tablet_id=9437184;tx_state=execute;fline=manager.h:175;event=add_by_insert_id;id=3;operation_id=2; 2025-09-25T16:20:40.034845Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: BlobManager at tablet 9437184 Save Batch GenStep: 2:2 Blob count: 1 2025-09-25T16:20:40.035030Z node 1 :TX_COLUMNSHARD_WRITE DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:129:2160];ev=NKikimr::NColumnShard::NPrivateEvents::NWrite::TEvWritePortionResult;tablet_id=9437184;event=TEvWritePortionResult;fline=columnshard__write.cpp:85;writing_size=6330728;event=data_write_finished;writing_id=937c97b4-9a2b11f0-9653d985-e9a73683; 2025-09-25T16:20:40.035080Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:129:2160];ev=NKikimr::NColumnShard::NPrivateEvents::NWrite::TEvWritePortionResult;tablet_id=9437184;event=TEvWritePortionResult;tablet_id=9437184;local_tx_no=6;method=execute;tx_info=TTxBlobsWritingFinished;tablet_id=9437184;tx_state=execute;fline=constructor_meta.cpp:48;memory_size=94;data_size=68;sum=282;count=5; 2025-09-25T16:20:40.035095Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:129:2160];ev=NKikimr::NColumnShard::NPrivateEvents::NWrite::TEvWritePortionResult;tablet_id=9437184;event=TEvWritePortionResult;tablet_id=9437184;local_tx_no=6;method=execute;tx_info=TTxBlobsWritingFinished;tablet_id=9437184;tx_state=execute;fline=constructor_meta.cpp:65;memory_size=190;data_size=180;sum=570;count=6;size_of_meta=112; 2025-09-25T16:20:40.035106Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:129:2160];ev=NKikimr::NColumnShard::NPrivateEvents::NWrite::TEvWritePortionResult;tablet_id=9437184;event=TEvWritePortionResult;tablet_id=9437184;local_tx_no=6;method=execute;tx_info=TTxBlobsWritingFinished;tablet_id=9437184;tx_state=execute;fline=constructor_portion.cpp:40;memory_size=262;data_size=252;sum=786;count=3;size_of_portion=184; 2025-09-25T16:20:40.035225Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: BlobManager on execute at tablet 9437184 Save Batch GenStep: 2:3 Blob count: 1 2025-09-25T16:20:40.035250Z node 1 :TX_COLUMNSHARD_WRITE DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:129:2160];ev=NKikimr::NColumnShard::NPrivateEvents::NWrite::TEvWritePortionResult;tablet_id=9437184;event=TEvWritePortionResult;tablet_id=9437184;local_tx_no=6;method=execute;tx_info=TTxBlobsWritingFinished;tablet_id=9437184;tx_state=execute;fline=manager.h:175;event=add_by_insert_id;id=4;operation_id=3; 2025-09-25T16:20:40.046022Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: BlobManager at tablet 9437184 Save Batch GenStep: 2:3 Blob count: 1 2025-09-25T16:20:40.046237Z node 1 :TX_COLUMNSHARD_WRITE DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:129:2160];ev=NKikimr::NColumnShard::NPrivateEvents::NWrite::TEvWritePortionResult;tablet_id=9437184;event=TEvWritePortionResult;fline=columnshard__write.cpp:85;writing_size=6330728;event=data_write_finished;writing_id=938be494-9a2b11f0-a78db86a-9f85a76f; 2025-09-25T16:20:40.046311Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:129:2160];ev=NKikimr::NColumnShard::NPrivateEvents::NWrite::TEvWritePortionResult;tablet_id=9437184;event=TEvWritePortionResult;tablet_id=9437184;local_tx_no=7;method=execute;tx_info=TTxBlobsWritingFinished;tablet_id=9437184;tx_state=execute;fline=constructor_meta.cpp:48;memory_size=94;data_size=68;sum=376;count=7; 2025-09-25T16:20:40.046334Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:129:2160];ev=NKikimr::NColumnShard::NPrivateEvents::NWrite::TEvWritePortionResult;tablet_id=9437184;event=TEvWritePortionResult;tablet_id=9437184;local_tx_no=7;method=execute;tx_info=TTxBlobsWritingFinished;tablet_id=9437184;tx_state=execute;fline=constructor_meta.cpp:65;memory_size=190;data_size=180;sum=760;count=8;size_of_meta=112; 2025-09-25T16:20:40.046363Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:129:2160];ev=NKikimr::NColumnShard::NPrivateEvents::NWrite::TEvWritePortionResult;tablet_id=9437184;event=TEvWritePortionResult;tablet_id=9437184;local_tx_no=7;method=execute;tx_info=TTxBlobsWritingFinished;tablet_id=9437184;tx_state=execute;fline=constructor_portion.cpp:40;memory_size=262;data_size=252;sum=1048;count=4;size_of_portion=184; 2025-09-25T16:20:40.046516Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: BlobManager on execute at tablet 9437184 Save Batch GenStep: 2:4 Blob count: 1 2025-09-25T16:20:40.046549Z node 1 :TX_COLUMNSHARD_WRITE DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:129:2160];ev=NKikimr::NColumnShard::NPrivateEvents::NWrite::TEvWritePortionResult;tablet_id=9437184;event=TEvWritePortionResult;tablet_id=9437184;local_tx_no=7;method=execute;tx_info=TTxBlobsWritingFinished;tablet_id=9437184;tx_state=execute;fline=manager.h:175;event=add_by_insert_id;id=5;operation_id=4; 2025-09-25T16:20:40.057411Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: BlobManager at tablet 9437184 Save Batch GenStep: 2:4 Blob count: 1 2025-09-25T16:20:40.058475Z node 1 :TX_COLUMNSHARD_WRITE DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:129:2160];ev=NKikimr::NEvents::TDataEvents::TEvWrite;tablet_id=9437184;event=TEvWrite;fline=manager.cpp:210;event=register_operation;operation_id=5;last=5; 2025-09-25T16:20:40.058497Z node 1 :TX_COLUMNSHARD_WRITE DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:129:2160];ev=NKikimr::NEvents::TDataEvents::TEvWrite;tablet_id=9437184;event=TEvWrite;fline=write_queue.cpp:18;writing_size=6330728;operation_id=93b0a7a2-9a2b11f0-abbb9263-994372cb;in_flight=1;size_in_flight=6330728; 2025-09-25T16:20:40.151497Z node 1 :TX_COLUMNSHARD_WRITE DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:129:2160];ev=NKikimr::NEvents::TDataEvents::TEvWrite;tablet_id=9437184;event=TEvWrite;scope=TBuildBatchesTask::DoExecute;tablet_id=9437184;parent_id=[1:129:2160];write_id=5;path_id={internal: 1000000185, ss: 1};fline=write_actor.cpp:24;event=actor_created;tablet_id=9437184;debug=size=8246112;count=1;actions=__DEFAULT,;waiting=1;; 2025-09-25T16:20:40.171738Z node 1 :TX_COLUMNSHARD_WRITE DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:129:2160];ev=NKikimr::NColumnShard::NPrivateEvents::NWrite::TEvWritePortionResult;tablet_id=9437184;event=TEvWritePortionResult;fline=columnshard__write.cpp:85;writing_size=6330728;event=data_write_finished;writing_id=93b0a7a2-9a2b11f0-abbb9263-994372cb; 2025-09-25T16:20:40.171842Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:129:2160];ev=NKikimr::NColumnShard::NPrivateEvents::NWrite::TEvWritePortionResult;tablet_id=9437184;event=TEvWritePortionResult;tablet_id=9437184;local_tx_no=8;method=execute;tx_info=TTxBlobsWritingFinished;tablet_id=9437184;tx_state=execute;fline=constructor_meta.cpp:48;memory_size=94;data_size=68;sum=470;count=9; 2025-09-25T16:20:40.171864Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:129:2160];ev=NKikimr::NColumnShard::NPrivateEvents::NWrite::TEvWritePortionResult;tablet_id=9437184;event=TEvWritePortionResult;tablet_id=9437184;local_tx_no=8;method=execute;tx_info=TTxBlobsWritingFinished;tablet_id=9437184;tx_state=execute;fline=constructor_meta.cpp:65;memory_size=190;data_size=180;sum=950;count=10;size_of_meta=112; 2025-09-25T16:20:40.171879Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:129:2160];ev=NKikimr::NColumnShard::NPrivateEvents::NWrite::TEvWritePortionResult;tablet_id=9437184;event=TEvWritePortionResult;tablet_id=9437184;local_tx_no=8;method=execute;tx_info=TTxBlobsWritingFinished;tablet_id=9437184;tx_state=execute;fline=constructor_portion.cpp:40;memory_size=262;data_size=252;sum=1310;count=5;size_of_portion=184; 2025-09-25T16:20:40.172132Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: BlobManager on execute at tablet 9437184 Save Batch GenStep: 2:5 Blob count: 1 2025-09-25T16:20:40.172165Z node 1 :TX_COLUMNSHARD_WRITE DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:129:2160];ev=NKikimr::NColumnShard::NPrivateEvents::NWrite::TEvWritePortionResult;tablet_id=9437184;event=TEvWritePortionResult;tablet_id=9437184;local_tx_no=8;method=execute;tx_info=TTxBlobsWritingFinished;tablet_id=9437184;tx_state=execute;fline=manager.h:175;event=add_by_insert_id;id=6;operation_id=5; 2025-09-25T16:20:40.185232Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: BlobManager at tablet 9437184 Save Batch GenStep: 2:5 Blob count: 1 ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_restore/unittest >> TImportTests::AuditCancelledImport [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2144] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2144] Leader for TabletID 72057594046678944 is [1:130:2155] sender: [1:131:2058] recipient: [1:113:2144] 2025-09-25T16:19:43.224588Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7911: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-09-25T16:19:43.224608Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7939: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-09-25T16:19:43.224614Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7825: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-09-25T16:19:43.224618Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7841: OperationsProcessing config: using default configuration 2025-09-25T16:19:43.224623Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7847: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-09-25T16:19:43.224628Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7847: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-09-25T16:19:43.224637Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7971: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-09-25T16:19:43.224651Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-09-25T16:19:43.224767Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8042: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-09-25T16:19:43.224848Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2025-09-25T16:19:43.237996Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7729: Cannot subscribe to console configs 2025-09-25T16:19:43.238016Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded 2025-09-25T16:19:43.241314Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2025-09-25T16:19:43.241376Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2025-09-25T16:19:43.241406Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-09-25T16:19:43.242999Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2025-09-25T16:19:43.243050Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2025-09-25T16:19:43.243130Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1382: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-09-25T16:19:43.243167Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-09-25T16:19:43.243565Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:158: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-09-25T16:19:43.243597Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2025-09-25T16:19:43.243808Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-09-25T16:19:43.243815Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-09-25T16:19:43.243828Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-09-25T16:19:43.243834Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-09-25T16:19:43.243838Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:205: TTxServerlessStorageBilling.Complete 2025-09-25T16:19:43.243861Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7086: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-09-25T16:19:43.244922Z node 1 :HIVE INFO: tablet_helpers.cpp:1126: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:130:2155] sender: [1:245:2058] recipient: [1:15:2062] 2025-09-25T16:19:43.265348Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-09-25T16:19:43.265421Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-09-25T16:19:43.265478Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:599: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-09-25T16:19:43.265487Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5528: CreateTx for txid 1:0 type: TxAlterSubDomain target path: [OwnerId: 72057594046678944, LocalPathId: 1] source path: 2025-09-25T16:19:43.265575Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-09-25T16:19:43.265591Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2025-09-25T16:19:43.266346Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-09-25T16:19:43.266393Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-09-25T16:19:43.266446Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-09-25T16:19:43.266455Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:315: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-09-25T16:19:43.266462Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:369: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-09-25T16:19:43.266467Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 1:0 2 -> 3 2025-09-25T16:19:43.266892Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-09-25T16:19:43.266902Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-09-25T16:19:43.266908Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 1:0 3 -> 128 2025-09-25T16:19:43.267268Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-09-25T16:19:43.267277Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-09-25T16:19:43.267282Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-09-25T16:19:43.267289Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1723: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-09-25T16:19:43.268047Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1792: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-09-25T16:19:43.268495Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:663: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-09-25T16:19:43.268542Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1824: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-09-25T16:19:43.268689Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:685: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-09-25T16:19:43.268713Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:689: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 137 RawX2: 4294969455 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-09-25T16:19:43.268718Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:303: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-09-25T16:19:43.268787Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 1:0 128 -> 240 2025-09-25T16:19:43.268793Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:362: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-09-25T16:19:43.268814Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:599: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-09-25T16:19:43.268843Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:412: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-09-25T16:19:43.269250Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-09-25T16:19:43.269256Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme ... TTxOperationProgress Execute, operationId: 281474976710757:0, at schemeshard: 72057594046678944 2025-09-25T16:20:40.021874Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:497: [72057594046678944] TDone opId# 281474976710757:0 ProgressState 2025-09-25T16:20:40.021886Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:926: Part operation is done id#281474976710757:0 progress is 1/1 2025-09-25T16:20:40.021888Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 281474976710757 ready parts: 1/1 2025-09-25T16:20:40.021892Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:926: Part operation is done id#281474976710757:0 progress is 1/1 2025-09-25T16:20:40.021894Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 281474976710757 ready parts: 1/1 2025-09-25T16:20:40.021897Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 281474976710757, ready parts: 1/1, is published: true 2025-09-25T16:20:40.021923Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1702: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [3:127:2152] message: TxId: 281474976710757 2025-09-25T16:20:40.021928Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 281474976710757 ready parts: 1/1 2025-09-25T16:20:40.021932Z node 3 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:993: Operation and all the parts is done, operation id: 281474976710757:0 2025-09-25T16:20:40.021935Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5552: RemoveTx for txid 281474976710757:0 2025-09-25T16:20:40.021966Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:610: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 3 2025-09-25T16:20:40.022344Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7171: Handle: TEvNotifyTxCompletionResult: txId# 281474976710757 2025-09-25T16:20:40.022361Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:7173: Message: TxId: 281474976710757 2025-09-25T16:20:40.022804Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7086: Handle: TEvAllocateResult: Cookie# 101, at schemeshard: 72057594046678944 TestWaitNotification wait txId: 101 2025-09-25T16:20:40.034497Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:213: tests -- TTxNotificationSubscriber for txId 101: send EvNotifyTxCompletion 2025-09-25T16:20:40.034513Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:259: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 101 2025-09-25T16:20:40.035385Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpRestore Internal: true Restore { TableName: "Table" TableDescription { Columns { Name: "key" Type: "Utf8" TypeId: 4608 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "value" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" } NumberOfRetries: 0 S3Settings { Endpoint: "localhost:2570" Scheme: HTTP Bucket: "" ObjectKeyPattern: "" AccessKey: "" SecretKey: "" UseVirtualAddressing: true } } } TxId: 281474976710758 TabletId: 72057594046678944 Owner: "user@builtin" PeerName: "127.0.0.1:9876" UserSID: "user@builtin" SanitizedToken: "" , at schemeshard: 72057594046678944 2025-09-25T16:20:40.035428Z node 3 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_backup_restore_common.h:586: TRestore Propose, path: /MyRoot/Table, opId: 281474976710758:0, at schemeshard: 72057594046678944 2025-09-25T16:20:40.035451Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:599: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 2 2025-09-25T16:20:40.035457Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5528: CreateTx for txid 281474976710758:0 type: TxRestore target path: [OwnerId: 72057594046678944, LocalPathId: 2] source path: 2025-09-25T16:20:40.035544Z node 3 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 281474976710758:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-09-25T16:20:40.035562Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpRestore, opId: 281474976710758:0, at schemeshard: 72057594046678944, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_backup_restore_common.h:563) 2025-09-25T16:20:40.035790Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__notify.cpp:83: NotifyTxCompletion import in-flight, txId: 101, at schemeshard: 72057594046678944 2025-09-25T16:20:40.035798Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__notify.cpp:131: NotifyTxCompletion transaction is registered, txId: 101, at schemeshard: 72057594046678944 2025-09-25T16:20:40.036077Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 281474976710758, response: Status: StatusAccepted TxId: 281474976710758 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2025-09-25T16:20:40.036132Z node 3 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 281474976710758, database: /MyRoot, subject: user@builtin, status: StatusAccepted, operation: RESTORE TABLE, path: /MyRoot/Table 2025-09-25T16:20:40.036183Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7120: Handle: TEvModifySchemeTransactionResult: txId# 281474976710758, status# StatusAccepted 2025-09-25T16:20:40.036190Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:7122: Message: Status: StatusAccepted TxId: 281474976710758 SchemeshardId: 72057594046678944 2025-09-25T16:20:40.036252Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 281474976710758:0, at schemeshard: 72057594046678944 2025-09-25T16:20:40.036263Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:315: TCreateParts opId# 281474976710758:0 ProgressState, operation type: TxRestore, at tablet# 72057594046678944 2025-09-25T16:20:40.036272Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:369: TCreateParts opId# 281474976710758:0 ProgressState no shards to create, do next state 2025-09-25T16:20:40.036277Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 281474976710758:0 2 -> 3 2025-09-25T16:20:40.037075Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:71: TTxOperationProposeCancelTx Execute, at schemeshard: 72057594046678944, message: TargetTxId: 281474976710758 TxId: 101 2025-09-25T16:20:40.037093Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_cancel_tx.cpp:37: Execute cancel tx: opId# 101:0, target opId# 281474976710758:0 2025-09-25T16:20:40.037238Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 281474976710758:0, at schemeshard: 72057594046678944 2025-09-25T16:20:40.037247Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_backup_restore_common.h:58: TRestore TConfigurePart ProgressState, opId: 281474976710758:0, at schemeshard: 72057594046678944 2025-09-25T16:20:40.037255Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_create_restore.cpp:38: Propose restore, datashard: 72075186233409546, opId: 281474976710758:0, at schemeshard: 72057594046678944 2025-09-25T16:20:40.037924Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:88: TTxOperationProposeCancelTx Complete, at schemeshard: 72057594046678944 2025-09-25T16:20:40.037985Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 281474976710758:0, at schemeshard: 72057594046678944 2025-09-25T16:20:40.037994Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_backup_restore_common.h:58: TRestore TConfigurePart ProgressState, opId: 281474976710758:0, at schemeshard: 72057594046678944 2025-09-25T16:20:40.038002Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_create_restore.cpp:38: Propose restore, datashard: 72075186233409546, opId: 281474976710758:0, at schemeshard: 72057594046678944 2025-09-25T16:20:40.038097Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7213: Handle: TEvCancelTxResult: Cookie: 101, at schemeshard: 72057594046678944 2025-09-25T16:20:40.038117Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:7215: Message: Status: StatusAccepted Result: "Cancelled at SchemeShard" TargetTxId: 281474976710758 TxId: 101 2025-09-25T16:20:40.038259Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:663: Send tablet strongly msg operationId: 281474976710758:0 from tablet: 72057594046678944 to tablet: 72075186233409546 cookie: 72057594046678944:1 msg type: 269549568 2025-09-25T16:20:40.038285Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1824: TOperation RegisterRelationByTabletId, TxId: 281474976710758, partId: 0, tablet: 72075186233409546 2025-09-25T16:20:40.038783Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:663: Send tablet strongly msg operationId: 281474976710758:0 from tablet: 72057594046678944 to tablet: 72075186233409546 cookie: 72057594046678944:1 msg type: 269549568 2025-09-25T16:20:40.038823Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 101: got EvNotifyTxCompletionResult 2025-09-25T16:20:40.038829Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 101: satisfy waiter [3:414:2379] TestWaitNotification: OK eventTxId 101 AUDIT LOG buffer(5): 2025-09-25T16:20:10.534708Z: component=schemeshard, tx_id=1, remote_address={none}, subject={none}, sanitized_token={none}, database={none}, operation=ALTER DATABASE, paths=[//MyRoot], status=SUCCESS, detailed_status=StatusAccepted 2025-09-25T16:20:10.570825Z: component=schemeshard, id=101, uid=foo, remote_address=127.0.0.1, subject=user@builtin, sanitized_token={none}, database=/MyRoot, operation=IMPORT START, status=SUCCESS, detailed_status=SUCCESS, import_type=s3, import_item_count=1, import_s3_bucket={none}, import_s3_prefix={none} 2025-09-25T16:20:10.660843Z: component=schemeshard, tx_id=281474976710757, remote_address=127.0.0.1, subject=user@builtin, sanitized_token={none}, database=/MyRoot, operation=CREATE TABLE WITH INDEXES, paths=[/MyRoot/Table], status=SUCCESS, detailed_status=StatusAccepted 2025-09-25T16:20:40.036126Z: component=schemeshard, tx_id=281474976710758, remote_address=127.0.0.1, subject=user@builtin, sanitized_token={none}, database=/MyRoot, operation=RESTORE TABLE, paths=[/MyRoot/Table], status=SUCCESS, detailed_status=StatusAccepted 2025-09-25T16:20:40.038188Z: component=schemeshard, id=101, uid=foo, remote_address=127.0.0.1, subject=user@builtin, sanitized_token={none}, database=/MyRoot, operation=IMPORT END, status=ERROR, detailed_status=CANCELLED, reason=Cancelled manually, import_type=s3, import_item_count=1, import_s3_bucket={none}, import_s3_prefix={none}, start_time=2025-09-25T16:20:10.538213Z, end_time=2025-09-25T16:20:40.573713Z AUDIT LOG checked line: 2025-09-25T16:20:40.038188Z: component=schemeshard, id=101, uid=foo, remote_address=127.0.0.1, subject=user@builtin, sanitized_token={none}, database=/MyRoot, operation=IMPORT END, status=ERROR, detailed_status=CANCELLED, reason=Cancelled manually, import_type=s3, import_item_count=1, import_s3_bucket={none}, import_s3_prefix={none}, start_time=2025-09-25T16:20:10.538213Z, end_time=2025-09-25T16:20:40.573713Z >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-DropUser-69 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-DropUser-70 >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-CreateUser-23 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-CreateUser-24 |82.6%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_view/unittest ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_restore/unittest >> TRestoreWithRebootsTests::ShouldSucceedOnMultipleFrames [GOOD] Test command err: ==== RunWithTabletReboots =========== RUN: Trace =========== Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2144] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2144] Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:120:2058] recipient: [1:114:2145] IGNORE Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:120:2058] recipient: [1:114:2145] Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:121:2058] recipient: [1:116:2146] IGNORE Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:121:2058] recipient: [1:116:2146] Leader for TabletID 72057594046678944 is [1:128:2153] sender: [1:131:2058] recipient: [1:113:2144] Leader for TabletID 72057594046447617 is [1:134:2158] sender: [1:136:2058] recipient: [1:114:2145] Leader for TabletID 72057594046316545 is [1:139:2161] sender: [1:141:2058] recipient: [1:116:2146] 2025-09-25T16:19:53.669152Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7911: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-09-25T16:19:53.669172Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7939: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-09-25T16:19:53.669176Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7825: StatsBatching config: StatsBatchTimeout# 0.000000s, StatsMaxBatchSize# 0, StatsMaxExecuteTime# 0.010000s 2025-09-25T16:19:53.669180Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7841: OperationsProcessing config: using default configuration 2025-09-25T16:19:53.669184Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7847: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-09-25T16:19:53.669187Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7847: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-09-25T16:19:53.669194Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7971: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-09-25T16:19:53.669204Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-09-25T16:19:53.669318Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8042: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-09-25T16:19:53.669379Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2025-09-25T16:19:53.687795Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:8074: Got new config: QueryServiceConfig { AllExternalDataSourcesAreAvailable: true } 2025-09-25T16:19:53.687820Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded 2025-09-25T16:19:53.687887Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8042: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# Leader for TabletID 72057594046447617 is [1:134:2158] sender: [1:179:2058] recipient: [1:15:2062] 2025-09-25T16:19:53.691248Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2025-09-25T16:19:53.691339Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2025-09-25T16:19:53.691374Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-09-25T16:19:53.692379Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2025-09-25T16:19:53.692452Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2025-09-25T16:19:53.692549Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1382: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-09-25T16:19:53.692716Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-09-25T16:19:53.693907Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:158: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-09-25T16:19:53.693955Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2025-09-25T16:19:53.694202Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-09-25T16:19:53.694215Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-09-25T16:19:53.694233Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-09-25T16:19:53.694241Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-09-25T16:19:53.694248Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:205: TTxServerlessStorageBilling.Complete 2025-09-25T16:19:53.694282Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7086: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:221:2058] recipient: [1:219:2219] IGNORE Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:221:2058] recipient: [1:219:2219] Leader for TabletID 72057594037968897 is [1:225:2223] sender: [1:226:2058] recipient: [1:219:2219] 2025-09-25T16:19:53.695700Z node 1 :HIVE INFO: tablet_helpers.cpp:1126: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:128:2153] sender: [1:246:2058] recipient: [1:15:2062] 2025-09-25T16:19:53.712117Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-09-25T16:19:53.712168Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-09-25T16:19:53.712206Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:599: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-09-25T16:19:53.712212Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5528: CreateTx for txid 1:0 type: TxAlterSubDomain target path: [OwnerId: 72057594046678944, LocalPathId: 1] source path: 2025-09-25T16:19:53.712253Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-09-25T16:19:53.712278Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2025-09-25T16:19:53.712814Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-09-25T16:19:53.712870Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-09-25T16:19:53.712919Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-09-25T16:19:53.712929Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:315: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-09-25T16:19:53.712934Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:369: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-09-25T16:19:53.712939Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 1:0 2 -> 3 2025-09-25T16:19:53.713368Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-09-25T16:19:53.713378Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-09-25T16:19:53.713382Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 1:0 3 -> 128 2025-09-25T16:19:53.713794Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-09-25T16:19:53.713808Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-09-25T16:19:53.713813Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-09-25T16:19:53.713819Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1723: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-09-25T16:19:53.714557Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1792: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-09-25T16:19:53.714970Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:663: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-09-25T16:19:53.715014Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1824: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 Leader for TabletID 72057594046316545 is [1:139:2161] sender: [1:261:2058] recipient: [1:15:2062] FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-09-25T16:19:53.715233Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:685: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-09-25T16:19:53.715258Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:689: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 139 RawX2: 4294969457 } } Step: 5000001 MediatorID: 0 Tab ... 261984Z node 135 :DATASHARD_RESTORE DEBUG: import_s3.cpp:517: [Import] [s3:1003] GetObject: key# /data_00.csv.zst, range# 28-34 REQUEST: GET /data_00.csv.zst HTTP/1.1 HEADERS: Host: localhost:26103 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: 10075125-B48B-47E9-9EDA-43C62599AE89 amz-sdk-request: attempt=1 content-type: application/xml range: bytes=28-34 user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-143-generic x86_64 Clang/20.1.8 x-amz-api-version: 2006-03-01 S3_MOCK::HttpServeRead: /data_00.csv.zst / 60 2025-09-25T16:20:39.262646Z node 135 :DATASHARD_RESTORE DEBUG: import_s3.cpp:656: [Import] [s3:1003] Handle NKikimr::NWrappers::NExternalStorage::TEvGetObjectResponse { Key: null Result: e0a029185b0e1ad2f41736bc4b274b81 Body: 7b } 2025-09-25T16:20:39.262655Z node 135 :DATASHARD_RESTORE TRACE: import_s3.cpp:673: [Import] [s3:1003] Content size: processed-bytes# 0, content-length# 60, body-size# 7 2025-09-25T16:20:39.262664Z node 135 :DATASHARD_RESTORE DEBUG: import_s3.cpp:517: [Import] [s3:1003] GetObject: key# /data_00.csv.zst, range# 35-41 REQUEST: GET /data_00.csv.zst HTTP/1.1 HEADERS: Host: localhost:26103 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: DAFDB56D-C8AE-4CC7-8E07-29D3FEFD36B5 amz-sdk-request: attempt=1 content-type: application/xml range: bytes=35-41 user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-143-generic x86_64 Clang/20.1.8 x-amz-api-version: 2006-03-01 S3_MOCK::HttpServeRead: /data_00.csv.zst / 60 2025-09-25T16:20:39.263274Z node 135 :DATASHARD_RESTORE DEBUG: import_s3.cpp:656: [Import] [s3:1003] Handle NKikimr::NWrappers::NExternalStorage::TEvGetObjectResponse { Key: null Result: e0a029185b0e1ad2f41736bc4b274b81 Body: 7b } 2025-09-25T16:20:39.263291Z node 135 :DATASHARD_RESTORE TRACE: import_s3.cpp:673: [Import] [s3:1003] Content size: processed-bytes# 0, content-length# 60, body-size# 7 2025-09-25T16:20:39.263320Z node 135 :DATASHARD_RESTORE INFO: import_s3.cpp:806: [Import] [s3:1003] Upload rows: count# 1, size# 34 2025-09-25T16:20:39.263952Z node 135 :DATASHARD_RESTORE DEBUG: import_s3.cpp:814: [Import] [s3:1003] Handle NKikimr::TEvDataShard::TEvS3UploadRowsResponse { Record: TabletID: 72075186233409546 Status: 0 Info: { DataETag: e0a029185b0e1ad2f41736bc4b274b81 ProcessedBytes: 37 WrittenBytes: 16 WrittenRows: 2 ChecksumState: DownloadState: } } 2025-09-25T16:20:39.263967Z node 135 :DATASHARD_RESTORE NOTICE: import_s3.cpp:621: [Import] [s3:1003] Process download info at 'UploadResponse': info# { DataETag: e0a029185b0e1ad2f41736bc4b274b81 ProcessedBytes: 37 WrittenBytes: 16 WrittenRows: 2 ChecksumState: DownloadState: } 2025-09-25T16:20:39.263979Z node 135 :DATASHARD_RESTORE DEBUG: import_s3.cpp:517: [Import] [s3:1003] GetObject: key# /data_00.csv.zst, range# 42-48 REQUEST: GET /data_00.csv.zst HTTP/1.1 HEADERS: Host: localhost:26103 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: 2572BDA7-FD9E-4A04-A448-5491C9334603 amz-sdk-request: attempt=1 content-type: application/xml range: bytes=42-48 user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-143-generic x86_64 Clang/20.1.8 x-amz-api-version: 2006-03-01 S3_MOCK::HttpServeRead: /data_00.csv.zst / 60 2025-09-25T16:20:39.264576Z node 135 :DATASHARD_RESTORE DEBUG: import_s3.cpp:656: [Import] [s3:1003] Handle NKikimr::NWrappers::NExternalStorage::TEvGetObjectResponse { Key: null Result: e0a029185b0e1ad2f41736bc4b274b81 Body: 7b } 2025-09-25T16:20:39.264586Z node 135 :DATASHARD_RESTORE TRACE: import_s3.cpp:673: [Import] [s3:1003] Content size: processed-bytes# 37, content-length# 60, body-size# 7 2025-09-25T16:20:39.264597Z node 135 :DATASHARD_RESTORE DEBUG: import_s3.cpp:517: [Import] [s3:1003] GetObject: key# /data_00.csv.zst, range# 49-55 REQUEST: GET /data_00.csv.zst HTTP/1.1 HEADERS: Host: localhost:26103 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: 4D3A4BB4-CC3E-452D-98BD-90EF24652154 amz-sdk-request: attempt=1 content-type: application/xml range: bytes=49-55 user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-143-generic x86_64 Clang/20.1.8 x-amz-api-version: 2006-03-01 S3_MOCK::HttpServeRead: /data_00.csv.zst / 60 2025-09-25T16:20:39.265163Z node 135 :DATASHARD_RESTORE DEBUG: import_s3.cpp:656: [Import] [s3:1003] Handle NKikimr::NWrappers::NExternalStorage::TEvGetObjectResponse { Key: null Result: e0a029185b0e1ad2f41736bc4b274b81 Body: 7b } 2025-09-25T16:20:39.265171Z node 135 :DATASHARD_RESTORE TRACE: import_s3.cpp:673: [Import] [s3:1003] Content size: processed-bytes# 37, content-length# 60, body-size# 7 2025-09-25T16:20:39.265177Z node 135 :DATASHARD_RESTORE DEBUG: import_s3.cpp:517: [Import] [s3:1003] GetObject: key# /data_00.csv.zst, range# 56-59 REQUEST: GET /data_00.csv.zst HTTP/1.1 HEADERS: Host: localhost:26103 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: 5442EB90-B90E-41E6-A769-AE5C3E42AAC5 amz-sdk-request: attempt=1 content-type: application/xml range: bytes=56-59 user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-143-generic x86_64 Clang/20.1.8 x-amz-api-version: 2006-03-01 S3_MOCK::HttpServeRead: /data_00.csv.zst / 60 2025-09-25T16:20:39.265717Z node 135 :DATASHARD_RESTORE DEBUG: import_s3.cpp:656: [Import] [s3:1003] Handle NKikimr::NWrappers::NExternalStorage::TEvGetObjectResponse { Key: null Result: e0a029185b0e1ad2f41736bc4b274b81 Body: 4b } 2025-09-25T16:20:39.265722Z node 135 :DATASHARD_RESTORE TRACE: import_s3.cpp:673: [Import] [s3:1003] Content size: processed-bytes# 37, content-length# 60, body-size# 4 2025-09-25T16:20:39.265742Z node 135 :DATASHARD_RESTORE INFO: import_s3.cpp:806: [Import] [s3:1003] Upload rows: count# 1, size# 34 2025-09-25T16:20:39.266250Z node 135 :DATASHARD_RESTORE DEBUG: import_s3.cpp:814: [Import] [s3:1003] Handle NKikimr::TEvDataShard::TEvS3UploadRowsResponse { Record: TabletID: 72075186233409546 Status: 0 Info: { DataETag: e0a029185b0e1ad2f41736bc4b274b81 ProcessedBytes: 60 WrittenBytes: 24 WrittenRows: 3 ChecksumState: DownloadState: } } 2025-09-25T16:20:39.266260Z node 135 :DATASHARD_RESTORE NOTICE: import_s3.cpp:621: [Import] [s3:1003] Process download info at 'UploadResponse': info# { DataETag: e0a029185b0e1ad2f41736bc4b274b81 ProcessedBytes: 60 WrittenBytes: 24 WrittenRows: 3 ChecksumState: DownloadState: } 2025-09-25T16:20:39.266265Z node 135 :DATASHARD_RESTORE NOTICE: import_s3.cpp:962: [Import] [s3:1003] Finish: success# 1, error# , writtenBytes# 24, writtenRows# 3 2025-09-25T16:20:39.269040Z node 135 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5901: Handle TEvSchemaChanged, tabletId: 72057594046678944, at schemeshard: 72057594046678944, message: Source { RawX1: 340 RawX2: 579820587286 } Origin: 72075186233409546 State: 2 TxId: 1003 Step: 0 Generation: 2 OpResult { Success: true Explain: "" BytesProcessed: 24 RowsProcessed: 3 } 2025-09-25T16:20:39.269062Z node 135 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1837: TOperation FindRelatedPartByTabletId, TxId: 1003, tablet: 72075186233409546, partId: 0 2025-09-25T16:20:39.269088Z node 135 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:628: TTxOperationReply execute, operationId: 1003:0, at schemeshard: 72057594046678944, message: Source { RawX1: 340 RawX2: 579820587286 } Origin: 72075186233409546 State: 2 TxId: 1003 Step: 0 Generation: 2 OpResult { Success: true Explain: "" BytesProcessed: 24 RowsProcessed: 3 } 2025-09-25T16:20:39.269104Z node 135 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_backup_restore_common.h:233: TRestore TProposedWaitParts, opId: 1003:0 HandleReply TEvSchemaChanged at tablet# 72057594046678944 message# Source { RawX1: 340 RawX2: 579820587286 } Origin: 72075186233409546 State: 2 TxId: 1003 Step: 0 Generation: 2 OpResult { Success: true Explain: "" BytesProcessed: 24 RowsProcessed: 3 } 2025-09-25T16:20:39.269121Z node 135 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:673: CollectSchemaChanged accept TEvDataShard::TEvSchemaChanged, operationId: 1003:0, shardIdx: 72057594046678944:1, shard: 72075186233409546, left await: 0, txState.State: ProposedWaitParts, txState.ReadyForNotifications: 1, at schemeshard: 72057594046678944 2025-09-25T16:20:39.269125Z node 135 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:710: all shard schema changes has been received, operationId: 1003:0, at schemeshard: 72057594046678944 2025-09-25T16:20:39.269131Z node 135 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:722: send schema changes ack message, operation: 1003:0, datashard: 72075186233409546, at schemeshard: 72057594046678944 2025-09-25T16:20:39.269138Z node 135 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 1003:0 129 -> 240 2025-09-25T16:20:39.269180Z node 135 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_backup_restore_common.h:116: Unable to make a bill: kind# TRestore, opId# 1003:0, reason# domain is not a serverless db, domain# /MyRoot, domainPathId# [OwnerId: 72057594046678944, LocalPathId: 1], IsDomainSchemeShard: 1, ParentDomainId: [OwnerId: 72057594046678944, LocalPathId: 1], ResourcesDomainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-09-25T16:20:39.269619Z node 135 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 1003:0, at schemeshard: 72057594046678944 2025-09-25T16:20:39.269658Z node 135 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1003:0, at schemeshard: 72057594046678944 2025-09-25T16:20:39.269665Z node 135 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:497: [72057594046678944] TDone opId# 1003:0 ProgressState 2025-09-25T16:20:39.269677Z node 135 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:926: Part operation is done id#1003:0 progress is 1/1 2025-09-25T16:20:39.269681Z node 135 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 1003 ready parts: 1/1 2025-09-25T16:20:39.269685Z node 135 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:926: Part operation is done id#1003:0 progress is 1/1 2025-09-25T16:20:39.269687Z node 135 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 1003 ready parts: 1/1 2025-09-25T16:20:39.269690Z node 135 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 1003, ready parts: 1/1, is published: true 2025-09-25T16:20:39.269701Z node 135 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1702: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [135:416:2388] message: TxId: 1003 2025-09-25T16:20:39.269708Z node 135 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 1003 ready parts: 1/1 2025-09-25T16:20:39.269713Z node 135 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:993: Operation and all the parts is done, operation id: 1003:0 2025-09-25T16:20:39.269716Z node 135 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5552: RemoveTx for txid 1003:0 2025-09-25T16:20:39.269738Z node 135 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:610: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 3 2025-09-25T16:20:39.270154Z node 135 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 1003: got EvNotifyTxCompletionResult 2025-09-25T16:20:39.270164Z node 135 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 1003: satisfy waiter [135:458:2429] TestWaitNotification: OK eventTxId 1003 >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-ModifyUser-48 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-DropUser-71 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-DropUser-72 >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-DropUser-54 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-DropUser-60 [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/columnshard/ut_rw/unittest >> TColumnShardTestReadWrite::ReadWithProgramLike [GOOD] Test command err: 2025-09-25T16:20:39.856460Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:129:2160];fline=columnshard.cpp:105;event=initialize_shard;step=OnActivateExecutor; 2025-09-25T16:20:39.861956Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:129:2160];fline=columnshard.cpp:124;event=initialize_shard;step=initialize_tiring_finished; 2025-09-25T16:20:39.862035Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxInitSchema.Execute at tablet 9437184 2025-09-25T16:20:39.862907Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:129:2160];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-09-25T16:20:39.862962Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:129:2160];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-09-25T16:20:39.862997Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:129:2160];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-09-25T16:20:39.863014Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:129:2160];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-09-25T16:20:39.863027Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:129:2160];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-09-25T16:20:39.863043Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:129:2160];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-09-25T16:20:39.863058Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:129:2160];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-09-25T16:20:39.863071Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:129:2160];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-09-25T16:20:39.863085Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:129:2160];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-09-25T16:20:39.863097Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:129:2160];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanDeprecatedSnapshot; 2025-09-25T16:20:39.863111Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:129:2160];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV0ChunksMeta; 2025-09-25T16:20:39.863125Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:129:2160];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CopyBlobIdsToV2; 2025-09-25T16:20:39.863162Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:129:2160];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreAppearanceSnapshot; 2025-09-25T16:20:39.867941Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxInitSchema.Complete at tablet 9437184 2025-09-25T16:20:39.867996Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:131;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=12;current_normalizer=CLASS_NAME=Granules; 2025-09-25T16:20:39.868004Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=1;type=Granules; 2025-09-25T16:20:39.868037Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-09-25T16:20:39.868068Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-09-25T16:20:39.868078Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-09-25T16:20:39.868082Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2025-09-25T16:20:39.868090Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:138;normalizer=TChunksNormalizer;message=0 chunks found; 2025-09-25T16:20:39.868097Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-09-25T16:20:39.868102Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-09-25T16:20:39.868106Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2025-09-25T16:20:39.868119Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-09-25T16:20:39.868125Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-09-25T16:20:39.868132Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-09-25T16:20:39.868135Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2025-09-25T16:20:39.868144Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2025-09-25T16:20:39.868149Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-09-25T16:20:39.868155Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-09-25T16:20:39.868158Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2025-09-25T16:20:39.868165Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-09-25T16:20:39.868171Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-09-25T16:20:39.868175Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2025-09-25T16:20:39.868182Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-09-25T16:20:39.868190Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-09-25T16:20:39.868193Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=15;type=RestoreV1Chunks_V2; 2025-09-25T16:20:39.868212Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-09-25T16:20:39.868219Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-09-25T16:20:39.868222Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=16;type=RestoreV2Chunks; 2025-09-25T16:20:39.868232Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2025-09-25T16:20:39.868238Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanDeprecatedSnapshot;id=CleanDeprecatedSnapshot; 2025-09-25T16:20:39.868241Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=17;type=CleanDeprecatedSnapshot; 2025-09-25T16:20:39.868247Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanDeprecatedSnapshot;id=17; 2025-09-25T16:20:39.868253Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV0ChunksMeta;id=RestoreV0ChunksMeta; 2025-09-25T16:20:39.868257Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=18;type=RestoreV0ChunksMeta; 2025-09-25T16:20:39.868262Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV0ChunksMeta;id=18; 2025-09-25T16:20:39.868268Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CopyBlobIdsToV2;id=CopyBlobIdsToV2; 2025-09-25T16:20:39.868272Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=19;type=CopyBlobIdsToV2; 2025-09-25T16:20:39.868282Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CopyBlobIdsToV2;id=19; 2025-09-25T16:20:39.868288Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLAS ... rce_id,resource_type,timestamp,uid;);;ff=(column_ids=6;column_names=message;);;program_input=(column_ids=6;column_names=message;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:1;); 2025-09-25T16:20:40.525689Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[1:316:2328];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;fline=plain_read_data.cpp:31;event=DoExtractReadyResults;result=1;count=10;finished=1; 2025-09-25T16:20:40.525695Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[1:316:2328];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;fline=actor.cpp:222;stage=limit exhausted;limit=limits:(bytes=0;chunks=0);; 2025-09-25T16:20:40.525701Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[1:316:2328];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;tablet_id=9437184;fline=scanner.cpp:52;event=build_next_interval; 2025-09-25T16:20:40.525736Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[1:316:2328];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;fline=actor.cpp:120;event=TEvScanDataAck;info=limits:(bytes=8388608;chunks=1);; 2025-09-25T16:20:40.525754Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[1:316:2328];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;fline=actor.cpp:211;stage=start;iterator=ready_results:(count:1;records_count:10;schema=message: string;);indexed_data:(CTX:{ef=(column_ids=6;column_names=message;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=6;column_names=message;);;program_input=(column_ids=6;column_names=message;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2025-09-25T16:20:40.525760Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[1:316:2328];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;fline=plain_read_data.cpp:31;event=DoExtractReadyResults;result=0;count=0;finished=1; 2025-09-25T16:20:40.525774Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[1:316:2328];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;fline=actor.cpp:253;stage=ready result;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=6;column_names=message;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=6;column_names=message;);;program_input=(column_ids=6;column_names=message;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;);columns=1;rows=10; 2025-09-25T16:20:40.525783Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[1:316:2328];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;fline=actor.cpp:274;stage=data_format;batch_size=0;num_rows=10;batch_columns=message; 2025-09-25T16:20:40.525826Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[1:316:2328];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;fline=actor.cpp:392;event=send_data;compute_actor_id=[1:315:2327];bytes=61;rows=10;faults=0;finished=0;fault=0;schema=message: string; 2025-09-25T16:20:40.525842Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[1:316:2328];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;fline=actor.cpp:296;stage=finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=6;column_names=message;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=6;column_names=message;);;program_input=(column_ids=6;column_names=message;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2025-09-25T16:20:40.525858Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[1:316:2328];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;fline=actor.cpp:211;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=6;column_names=message;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=6;column_names=message;);;program_input=(column_ids=6;column_names=message;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2025-09-25T16:20:40.525872Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[1:316:2328];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;fline=actor.cpp:216;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=6;column_names=message;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=6;column_names=message;);;program_input=(column_ids=6;column_names=message;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2025-09-25T16:20:40.525895Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[1:316:2328];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;fline=actor.cpp:120;event=TEvScanDataAck;info=limits:(bytes=8388608;chunks=1);; 2025-09-25T16:20:40.525909Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[1:316:2328];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;fline=actor.cpp:211;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=6;column_names=message;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=6;column_names=message;);;program_input=(column_ids=6;column_names=message;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2025-09-25T16:20:40.525922Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[1:316:2328];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;fline=actor.cpp:216;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=6;column_names=message;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=6;column_names=message;);;program_input=(column_ids=6;column_names=message;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2025-09-25T16:20:40.525930Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: actor.cpp:442: Scan [1:316:2328] finished for tablet 9437184 2025-09-25T16:20:40.525992Z node 1 :TX_COLUMNSHARD_SCAN INFO: log.cpp:841: SelfId=[1:316:2328];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;fline=actor.cpp:448;event=scan_finish;compute_actor_id=[1:315:2327];stats={"p":[{"events":["f_bootstrap","l_bootstrap","f_ack","f_processing","f_ProduceResults","f_task_result","l_task_result"],"t":0},{"events":["l_ack","l_processing","l_ProduceResults","f_Finish","l_Finish"],"t":0.001}],"full":{"a":779533,"name":"_full_task","f":779533,"d_finished":0,"c":0,"l":780735,"d":1202},"events":[{"name":"bootstrap","f":779580,"d_finished":178,"c":1,"l":779758,"d":178},{"a":780689,"name":"ack","f":780529,"d_finished":143,"c":1,"l":780672,"d":189},{"a":780687,"name":"processing","f":779785,"d_finished":376,"c":3,"l":780673,"d":424},{"name":"ProduceResults","f":779700,"d_finished":251,"c":6,"l":780724,"d":251},{"a":780724,"name":"Finish","f":780724,"d_finished":0,"c":0,"l":780735,"d":11},{"name":"task_result","f":779787,"d_finished":224,"c":2,"l":780501,"d":224}],"id":"9437184::6"};iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=6;column_names=message;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=6;column_names=message;);;program_input=(column_ids=6;column_names=message;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2025-09-25T16:20:40.526003Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[1:316:2328];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;fline=actor.cpp:392;event=send_data;compute_actor_id=[1:315:2327];bytes=0;rows=0;faults=0;finished=1;fault=0;schema=; 2025-09-25T16:20:40.526048Z node 1 :TX_COLUMNSHARD_SCAN INFO: log.cpp:841: SelfId=[1:316:2328];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;fline=actor.cpp:397;event=scan_finished;compute_actor_id=[1:315:2327];packs_sum=0;bytes=0;bytes_sum=0;rows=0;rows_sum=0;faults=0;finished=1;fault=0;stats={"p":[{"events":["f_bootstrap","l_bootstrap","f_ack","f_processing","f_ProduceResults","f_task_result","l_task_result"],"t":0},{"events":["l_ack","l_processing","l_ProduceResults","f_Finish","l_Finish"],"t":0.001}],"full":{"a":779533,"name":"_full_task","f":779533,"d_finished":0,"c":0,"l":780805,"d":1272},"events":[{"name":"bootstrap","f":779580,"d_finished":178,"c":1,"l":779758,"d":178},{"a":780689,"name":"ack","f":780529,"d_finished":143,"c":1,"l":780672,"d":259},{"a":780687,"name":"processing","f":779785,"d_finished":376,"c":3,"l":780673,"d":494},{"name":"ProduceResults","f":779700,"d_finished":251,"c":6,"l":780724,"d":251},{"a":780724,"name":"Finish","f":780724,"d_finished":0,"c":0,"l":780805,"d":81},{"name":"task_result","f":779787,"d_finished":224,"c":2,"l":780501,"d":224}],"id":"9437184::6"};iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=6;column_names=message;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=6;column_names=message;);;program_input=(column_ids=6;column_names=message;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2025-09-25T16:20:40.526062Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[1:316:2328];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;fline=stats.cpp:8;event=statistic;begin=2025-09-25T16:20:40.524639Z;index_granules=0;index_portions=1;index_batches=0;schema_columns=1;filter_columns=0;additional_columns=0;compacted_portions_bytes=0;inserted_portions_bytes=8392;committed_portions_bytes=0;data_filter_bytes=0;data_additional_bytes=0;delta_bytes=8392;selected_rows=0; 2025-09-25T16:20:40.526067Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[1:316:2328];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;fline=read_context.h:196;event=scan_aborted;reason=unexpected on destructor; 2025-09-25T16:20:40.526084Z node 1 :TX_COLUMNSHARD_SCAN INFO: log.cpp:841: SelfId=[1:316:2328];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;fline=context.h:82;fetching=ef=(column_ids=6;column_names=message;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=6;column_names=message;);;program_input=(column_ids=6;column_names=message;);;; ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_view/unittest >> TSchemeShardViewTest::AsyncCreateSameView [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2144] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2144] Leader for TabletID 72057594046678944 is [1:130:2155] sender: [1:131:2058] recipient: [1:113:2144] 2025-09-25T16:20:40.406380Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7911: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-09-25T16:20:40.406403Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7939: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-09-25T16:20:40.406407Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7825: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-09-25T16:20:40.406412Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7841: OperationsProcessing config: using default configuration 2025-09-25T16:20:40.406416Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7847: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-09-25T16:20:40.406419Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7847: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-09-25T16:20:40.406426Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7971: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-09-25T16:20:40.406436Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-09-25T16:20:40.406523Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8042: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-09-25T16:20:40.406572Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2025-09-25T16:20:40.421737Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7729: Cannot subscribe to console configs 2025-09-25T16:20:40.421765Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded 2025-09-25T16:20:40.426508Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2025-09-25T16:20:40.426637Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2025-09-25T16:20:40.426689Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-09-25T16:20:40.428387Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2025-09-25T16:20:40.428461Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2025-09-25T16:20:40.428575Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1382: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-09-25T16:20:40.428645Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-09-25T16:20:40.429193Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:158: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-09-25T16:20:40.429236Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2025-09-25T16:20:40.429552Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-09-25T16:20:40.429563Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-09-25T16:20:40.429588Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-09-25T16:20:40.429598Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-09-25T16:20:40.429604Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:205: TTxServerlessStorageBilling.Complete 2025-09-25T16:20:40.429645Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7086: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-09-25T16:20:40.431105Z node 1 :HIVE INFO: tablet_helpers.cpp:1126: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:130:2155] sender: [1:245:2058] recipient: [1:15:2062] 2025-09-25T16:20:40.451953Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-09-25T16:20:40.452059Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-09-25T16:20:40.452132Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:599: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-09-25T16:20:40.452141Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5528: CreateTx for txid 1:0 type: TxAlterSubDomain target path: [OwnerId: 72057594046678944, LocalPathId: 1] source path: 2025-09-25T16:20:40.452216Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-09-25T16:20:40.452232Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2025-09-25T16:20:40.453189Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-09-25T16:20:40.453274Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-09-25T16:20:40.453352Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-09-25T16:20:40.453364Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:315: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-09-25T16:20:40.453370Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:369: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-09-25T16:20:40.453376Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 1:0 2 -> 3 2025-09-25T16:20:40.454045Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-09-25T16:20:40.454065Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-09-25T16:20:40.454073Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 1:0 3 -> 128 2025-09-25T16:20:40.459495Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-09-25T16:20:40.459522Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-09-25T16:20:40.459532Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-09-25T16:20:40.459543Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1723: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-09-25T16:20:40.460431Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1792: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-09-25T16:20:40.461142Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:663: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-09-25T16:20:40.461205Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1824: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-09-25T16:20:40.461464Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:685: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-09-25T16:20:40.461496Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:689: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 137 RawX2: 4294969455 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-09-25T16:20:40.461506Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:303: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-09-25T16:20:40.461593Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 1:0 128 -> 240 2025-09-25T16:20:40.461603Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:362: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-09-25T16:20:40.461646Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:599: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-09-25T16:20:40.461660Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:412: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-09-25T16:20:40.462319Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-09-25T16:20:40.462333Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme ... _SCHEMESHARD DEBUG: schemeshard_impl.cpp:599: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 1 FAKE_COORDINATOR: Erasing txId 101 2025-09-25T16:20:40.468239Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-09-25T16:20:40.468244Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 101, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-09-25T16:20:40.468290Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 101, path id: [OwnerId: 72057594046678944, LocalPathId: 2] 2025-09-25T16:20:40.468306Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-09-25T16:20:40.468310Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [1:212:2213], at schemeshard: 72057594046678944, txId: 101, path id: 1 2025-09-25T16:20:40.468317Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [1:212:2213], at schemeshard: 72057594046678944, txId: 101, path id: 2 2025-09-25T16:20:40.468392Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 101:0, at schemeshard: 72057594046678944 2025-09-25T16:20:40.468397Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:497: [72057594046678944] TDone opId# 101:0 ProgressState 2025-09-25T16:20:40.468409Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:926: Part operation is done id#101:0 progress is 1/1 2025-09-25T16:20:40.468412Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2025-09-25T16:20:40.468416Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:926: Part operation is done id#101:0 progress is 1/1 2025-09-25T16:20:40.468418Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2025-09-25T16:20:40.468421Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 101, ready parts: 1/1, is published: false 2025-09-25T16:20:40.468424Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2025-09-25T16:20:40.468428Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:993: Operation and all the parts is done, operation id: 101:0 2025-09-25T16:20:40.468431Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5552: RemoveTx for txid 101:0 2025-09-25T16:20:40.468438Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:610: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 2 2025-09-25T16:20:40.468443Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:1002: Publication still in progress, tx: 101, publications: 2, subscribers: 0 2025-09-25T16:20:40.468446Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1009: Publication details: tx: 101, [OwnerId: 72057594046678944, LocalPathId: 1], 4 2025-09-25T16:20:40.468448Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1009: Publication details: tx: 101, [OwnerId: 72057594046678944, LocalPathId: 2], 2 2025-09-25T16:20:40.468511Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6249: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 4 PathOwnerId: 72057594046678944, cookie: 101 2025-09-25T16:20:40.468518Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 4 PathOwnerId: 72057594046678944, cookie: 101 2025-09-25T16:20:40.468522Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 101 2025-09-25T16:20:40.468525Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 101, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 4 2025-09-25T16:20:40.468528Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:610: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-09-25T16:20:40.468594Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6249: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 2 PathOwnerId: 72057594046678944, cookie: 101 2025-09-25T16:20:40.468602Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 2 PathOwnerId: 72057594046678944, cookie: 101 2025-09-25T16:20:40.468605Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 101 2025-09-25T16:20:40.468607Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 101, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 2 2025-09-25T16:20:40.468609Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:610: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 1 2025-09-25T16:20:40.468615Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__publish_to_scheme_board.cpp:155: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 101, subscribers: 0 2025-09-25T16:20:40.469181Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 2025-09-25T16:20:40.469201Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 TestModificationResult got TxId: 101, wait until txId: 101 TestModificationResults wait txId: 102 TestModificationResult got TxId: 102, wait until txId: 102 TestModificationResults wait txId: 103 TestModificationResult got TxId: 103, wait until txId: 103 TestWaitNotification wait txId: 101 2025-09-25T16:20:40.469264Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:213: tests -- TTxNotificationSubscriber for txId 101: send EvNotifyTxCompletion 2025-09-25T16:20:40.469274Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:259: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 101 TestWaitNotification wait txId: 102 2025-09-25T16:20:40.469287Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:213: tests -- TTxNotificationSubscriber for txId 102: send EvNotifyTxCompletion 2025-09-25T16:20:40.469290Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:259: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 102 TestWaitNotification wait txId: 103 2025-09-25T16:20:40.469295Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:213: tests -- TTxNotificationSubscriber for txId 103: send EvNotifyTxCompletion 2025-09-25T16:20:40.469297Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:259: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 103 2025-09-25T16:20:40.469364Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 101, at schemeshard: 72057594046678944 2025-09-25T16:20:40.469380Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 102, at schemeshard: 72057594046678944 2025-09-25T16:20:40.469386Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 101: got EvNotifyTxCompletionResult 2025-09-25T16:20:40.469390Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 101: satisfy waiter [1:307:2297] 2025-09-25T16:20:40.469408Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 103, at schemeshard: 72057594046678944 2025-09-25T16:20:40.469412Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 102: got EvNotifyTxCompletionResult 2025-09-25T16:20:40.469415Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 102: satisfy waiter [1:307:2297] 2025-09-25T16:20:40.469437Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 103: got EvNotifyTxCompletionResult 2025-09-25T16:20:40.469439Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 103: satisfy waiter [1:307:2297] TestWaitNotification: OK eventTxId 101 TestWaitNotification: OK eventTxId 102 TestWaitNotification: OK eventTxId 103 2025-09-25T16:20:40.469487Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/MyView" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-09-25T16:20:40.469512Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/MyView" took 29us result status StatusSuccess 2025-09-25T16:20:40.469592Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/MyView" PathDescription { Self { Name: "MyView" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeView CreateFinished: true CreateTxId: 101 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 ViewVersion: 1 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 1 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } ViewDescription { Name: "MyView" PathId { OwnerId: 72057594046678944 LocalId: 2 } Version: 1 QueryText: "Some query" CapturedContext { } } } PathId: 2 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-CreateUser-22 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-CreateUser-23 >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-DropUser-71 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-DropUser-72 >> TColumnShardTestReadWrite::WriteStandaloneExoticTypes [GOOD] >> TestShred::ShredWithCopyTable [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-DropUser-69 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-DropUser-70 >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-CreateUser-24 [GOOD] >> TSchemeShardViewTest::EmptyQueryText ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/columnshard/ut_rw/unittest >> TColumnShardTestReadWrite::WriteStandaloneExoticTypes [GOOD] Test command err: 2025-09-25T16:20:39.003709Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:129:2160];fline=columnshard.cpp:105;event=initialize_shard;step=OnActivateExecutor; 2025-09-25T16:20:39.010994Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:129:2160];fline=columnshard.cpp:124;event=initialize_shard;step=initialize_tiring_finished; 2025-09-25T16:20:39.011057Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxInitSchema.Execute at tablet 9437184 2025-09-25T16:20:39.011866Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:129:2160];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-09-25T16:20:39.011919Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:129:2160];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-09-25T16:20:39.011959Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:129:2160];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-09-25T16:20:39.011978Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:129:2160];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-09-25T16:20:39.011992Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:129:2160];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-09-25T16:20:39.012005Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:129:2160];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-09-25T16:20:39.012018Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:129:2160];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-09-25T16:20:39.012032Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:129:2160];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-09-25T16:20:39.012045Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:129:2160];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-09-25T16:20:39.012058Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:129:2160];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanDeprecatedSnapshot; 2025-09-25T16:20:39.012071Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:129:2160];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV0ChunksMeta; 2025-09-25T16:20:39.012085Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:129:2160];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CopyBlobIdsToV2; 2025-09-25T16:20:39.012121Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:129:2160];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreAppearanceSnapshot; 2025-09-25T16:20:39.018690Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxInitSchema.Complete at tablet 9437184 2025-09-25T16:20:39.018770Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:131;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=12;current_normalizer=CLASS_NAME=Granules; 2025-09-25T16:20:39.018781Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=1;type=Granules; 2025-09-25T16:20:39.018826Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-09-25T16:20:39.018868Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-09-25T16:20:39.018882Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-09-25T16:20:39.018889Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2025-09-25T16:20:39.018900Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:138;normalizer=TChunksNormalizer;message=0 chunks found; 2025-09-25T16:20:39.018911Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-09-25T16:20:39.018919Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-09-25T16:20:39.018924Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2025-09-25T16:20:39.018945Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-09-25T16:20:39.018955Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-09-25T16:20:39.018964Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-09-25T16:20:39.018969Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2025-09-25T16:20:39.018981Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2025-09-25T16:20:39.018988Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-09-25T16:20:39.018997Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-09-25T16:20:39.019002Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2025-09-25T16:20:39.019011Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-09-25T16:20:39.019020Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-09-25T16:20:39.019024Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2025-09-25T16:20:39.019034Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-09-25T16:20:39.019044Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-09-25T16:20:39.019048Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=15;type=RestoreV1Chunks_V2; 2025-09-25T16:20:39.019076Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-09-25T16:20:39.019084Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-09-25T16:20:39.019089Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=16;type=RestoreV2Chunks; 2025-09-25T16:20:39.019104Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2025-09-25T16:20:39.019112Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanDeprecatedSnapshot;id=CleanDeprecatedSnapshot; 2025-09-25T16:20:39.019117Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=17;type=CleanDeprecatedSnapshot; 2025-09-25T16:20:39.019125Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanDeprecatedSnapshot;id=17; 2025-09-25T16:20:39.019133Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV0ChunksMeta;id=RestoreV0ChunksMeta; 2025-09-25T16:20:39.019138Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=18;type=RestoreV0ChunksMeta; 2025-09-25T16:20:39.019147Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV0ChunksMeta;id=18; 2025-09-25T16:20:39.019156Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CopyBlobIdsToV2;id=CopyBlobIdsToV2; 2025-09-25T16:20:39.019162Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=19;type=CopyBlobIdsToV2; 2025-09-25T16:20:39.019178Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CopyBlobIdsToV2;id=19; 2025-09-25T16:20:39.019186Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLAS ... [{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"19,19,19,19,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"20,20,20,20,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"21,21,21,21,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"22,22,22,22,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"23,23,23,23,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"24,24,24,24,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"25,25,25,25,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"26,26,26,26,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"27,27,27,27,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"28,28,28,28,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"29,29,29,29,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"30,30,30,30,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"31,31,31,31,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"32,32,32,32,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"33,33,33,33,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"34,34,34,34,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"35,35,35,35,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"36,36,36,36,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"37,37,37,37,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"38,38,38,38,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"39,39,39,39,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"40,40,40,40,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"41,41,41,41,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"42,42,42,42,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"43,43,43,43,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"44,44,44,44,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"45,45,45,45,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"46,46,46,46,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"47,47,47,47,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"48,48,48,48,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"49,49,49,49,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"50,50,50,50,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"51,51,51,51,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"52,52,52,52,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"53,53,53,53,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"54,54,54,54,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"55,55,55,55,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"56,56,56,56,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"57,57,57,57,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"58,58,58,58,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"59,59,59,59,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"60,60,60,60,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"61,61,61,61,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"62,62,62,62,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"63,63,63,63,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"64,64,64,64,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"65,65,65,65,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"66,66,66,66,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"67,67,67,67,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"68,68,68,68,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"69,69,69,69,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"70,70,70,70,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"71,71,71,71,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"72,72,72,72,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"73,73,73,73,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"74,74,74,74,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"75,75,75,75,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"76,76,76,76,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"77,77,77,77,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"78,78,78,78,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"79,79,79,79,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"80,80,80,80,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"81,81,81,81,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"82,82,82,82,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"83,83,83,83,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"84,84,84,84,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"85,85,85,85,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"86,86,86,86,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"87,87,87,87,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"88,88,88,88,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"89,89,89,89,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"90,90,90,90,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"91,91,91,91,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"92,92,92,92,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"93,93,93,93,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"94,94,94,94,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"95,95,95,95,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"96,96,96,96,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"97,97,97,97,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"98,98,98,98,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"99,99,99,99,"}}]}; ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/tx_proxy/ut_schemereq/unittest >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-DropUser-54 [GOOD] Test command err: Starting YDB, grpc: 15601, msgbus: 11813 2025-09-25T16:20:04.581679Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7554062374906502571:2152];send_to=[0:7307199536658146131:7762515]; 2025-09-25T16:20:04.581698Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/endf/002542/r3tmp/tmp0Q1BJE/pdisk_1.dat 2025-09-25T16:20:04.632094Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2025-09-25T16:20:04.645821Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 15601, node 1 2025-09-25T16:20:04.677979Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-09-25T16:20:04.677994Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-09-25T16:20:04.677996Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-09-25T16:20:04.678059Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-09-25T16:20:04.681939Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-09-25T16:20:04.681967Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-09-25T16:20:04.683662Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:11813 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 2025-09-25T16:20:04.696780Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:402: actor# [1:7554062374906502579:2143] Handle TEvNavigate describe path dc-1 2025-09-25T16:20:04.696801Z node 1 :TX_PROXY DEBUG: describe.cpp:270: Actor# [1:7554062374906503147:2437] HANDLE EvNavigateScheme dc-1 2025-09-25T16:20:04.696992Z node 1 :TX_PROXY DEBUG: describe.cpp:354: Actor# [1:7554062374906503147:2437] HANDLE EvNavigateKeySetResult TDescribeReq marker# P5 ErrorCount# 0 2025-09-25T16:20:04.705301Z node 1 :TX_PROXY DEBUG: describe.cpp:433: Actor# [1:7554062374906503147:2437] SEND to# 72057594046644480 shardToRequest NKikimrSchemeOp.TDescribePath Path: "dc-1" Options { ReturnBoundaries: true ShowPrivateTable: true ReturnRangeKey: true } 2025-09-25T16:20:04.707162Z node 1 :TX_PROXY DEBUG: describe.cpp:446: Actor# [1:7554062374906503147:2437] Handle TEvDescribeSchemeResult Forward to# [1:7554062374906503146:2436] Cookie: 0 TEvDescribeSchemeResult: NKikimrScheme.TEvDescribeSchemeResult PreSerializedData size# 2 Record# Status: StatusSuccess Path: "dc-1" PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } DomainKey { SchemeShard: 72057594046644480 PathId: 1 } PathsInside: 0 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046644480 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { Audience: "/dc-1" } SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 1 PathOwnerId: 72057594046644480 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-09-25T16:20:04.711521Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:314: actor# [1:7554062374906502579:2143] Handle TEvProposeTransaction 2025-09-25T16:20:04.711539Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:237: actor# [1:7554062374906502579:2143] TxId# 281474976710657 ProcessProposeTransaction 2025-09-25T16:20:04.711569Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:256: actor# [1:7554062374906502579:2143] Cookie# 0 userReqId# "" txid# 281474976710657 SEND to# [1:7554062374906503153:2442] 2025-09-25T16:20:04.721318Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1673: Actor# [1:7554062374906503153:2442] txid# 281474976710657 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "dc-1" StoragePools { Name: "" Kind: "tenant-db" } StoragePools { Name: "/dc-1:test" Kind: "test" } } } } UserToken: "\n\014root@builtin\022\030\022\026\n\024all-users@well-known\032\014root@builtin\"\007Builtin*\017**** (B6C6F477)0\000" PeerName: "" 2025-09-25T16:20:04.721366Z node 1 :TX_PROXY DEBUG: schemereq.cpp:613: Actor# [1:7554062374906503153:2442] txid# 281474976710657 Bootstrap, UserSID: root@builtin CheckAdministrator: 0 CheckDatabaseAdministrator: 0 2025-09-25T16:20:04.721370Z node 1 :TX_PROXY DEBUG: schemereq.cpp:622: Actor# [1:7554062374906503153:2442] txid# 281474976710657 Bootstrap, UserSID: root@builtin IsClusterAdministrator: 1 2025-09-25T16:20:04.721385Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1728: Actor# [1:7554062374906503153:2442] txid# 281474976710657 TEvNavigateKeySet requested from SchemeCache 2025-09-25T16:20:04.721591Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1561: Actor# [1:7554062374906503153:2442] txid# 281474976710657 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2025-09-25T16:20:04.721668Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1608: Actor# [1:7554062374906503153:2442] HANDLE EvNavigateKeySetResult, txid# 281474976710657 shardToRequest# 72057594046644480 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 1] DomainInfo.Params# Version: 0 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 RedirectRequired# false 2025-09-25T16:20:04.721706Z node 1 :TX_PROXY DEBUG: schemereq.cpp:103: Actor# [1:7554062374906503153:2442] txid# 281474976710657 SEND to# 72057594046644480 shardToRequest {TEvModifySchemeTransaction txid# 281474976710657 TabletId# 72057594046644480} 2025-09-25T16:20:04.721781Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1463: Actor# [1:7554062374906503153:2442] txid# 281474976710657 HANDLE EvClientConnected 2025-09-25T16:20:04.722101Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2025-09-25T16:20:04.723657Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1485: Actor# [1:7554062374906503153:2442] txid# 281474976710657 Status StatusAccepted HANDLE {TEvModifySchemeTransactionResult Status# StatusAccepted txid# 281474976710657} 2025-09-25T16:20:04.723675Z node 1 :TX_PROXY DEBUG: schemereq.cpp:593: Actor# [1:7554062374906503153:2442] txid# 281474976710657 SEND to# [1:7554062374906503152:2441] Source {TEvProposeTransactionStatus txid# 281474976710657 Status# 53} waiting... 2025-09-25T16:20:04.749866Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:314: actor# [1:7554062374906502579:2143] Handle TEvProposeTransaction 2025-09-25T16:20:04.749882Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:237: actor# [1:7554062374906502579:2143] TxId# 281474976710658 ProcessProposeTransaction 2025-09-25T16:20:04.749898Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:256: actor# [1:7554062374906502579:2143] Cookie# 0 userReqId# "" txid# 281474976710658 SEND to# [1:7554062374906503191:2476] 2025-09-25T16:20:04.750885Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1673: Actor# [1:7554062374906503191:2476] txid# 281474976710658 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/" OperationType: ESchemeOpModifyACL ModifyACL { Name: "dc-1" DiffACL: "\n\032\010\000\022\026\010\001\020\377\377\003\032\014root@builtin \003" } } } UserToken: "\n\014root@builtin\022\030\022\026\n\024all-users@well-known\032\014root@builtin\"\007Builtin*\017**** (B6C6F477)0\000" PeerName: "" 2025-09-25T16:20:04.750915Z node 1 :TX_PROXY DEBUG: schemereq.cpp:613: Actor# [1:7554062374906503191:2476] txid# 281474976710658 Bootstrap, UserSID: root@builtin CheckAdministrator: 0 CheckDatabaseAdministrator: 0 2025-09-25T16:20:04.750920Z node 1 :TX_PROXY DEBUG: schemereq.cpp:622: Actor# [1:7554062374906503191:2476] txid# 281474976710658 Bootstrap, UserSID: root@builtin IsClusterAdministrator: 1 2025-09-25T16:20:04.750943Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1728: Actor# [1:7554062374906503191:2476] txid# 281474976710658 TEvNavigateKeySet requested from SchemeCache 2025-09-25T16:20:04.751079Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1561: Actor# [1:7554062374906503191:2476] txid# 281474976710658 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2025-09-25T16:20:04.751116Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1608: Actor# [1:7554062374906503191:2476] HANDLE EvNavigateKeySetResult, txid# 281474976710658 shardToRequest# 72057594046644480 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 1] DomainInfo.Params# Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 RedirectRequired# true 2025-09-25T16:20:04.751157Z node 1 :TX_PROXY DEBUG: schemereq.cpp:103: Actor# [1:7554062374906503191:2476] txid# 281474976710658 SEND to# 72057594046644480 shardToRequest {TEvModifySchemeTransaction txid# 281474976710658 TabletId# 72057594046644480} 2025-09-25T16:20:04.751208Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1463: Actor# [1:7554062374906503191:2476] txid# 281474976710658 HANDLE EvClientConnected 2025-09-25T16:20:04.751366Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part propos ... ns_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-09-25T16:20:40.567445Z node 59 :TX_PROXY DEBUG: schemereq.cpp:1485: Actor# [59:7554062527162141138:2520] txid# 281474976710660 Status StatusAccepted HANDLE {TEvModifySchemeTransactionResult Status# StatusAccepted txid# 281474976710660} 2025-09-25T16:20:40.567462Z node 59 :TX_PROXY DEBUG: schemereq.cpp:593: Actor# [59:7554062527162141138:2520] txid# 281474976710660 SEND to# [59:7554062527162141135:2326] Source {TEvProposeTransactionStatus txid# 281474976710660 Status# 53} 2025-09-25T16:20:40.570549Z node 59 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [59:7554062527162141135:2326], DatabaseId: /dc-1, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710660 completed, doublechecking } 2025-09-25T16:20:40.623136Z node 59 :TX_PROXY DEBUG: proxy_impl.cpp:314: actor# [59:7554062527162140445:2144] Handle TEvProposeTransaction 2025-09-25T16:20:40.623158Z node 59 :TX_PROXY DEBUG: proxy_impl.cpp:237: actor# [59:7554062527162140445:2144] TxId# 281474976710661 ProcessProposeTransaction 2025-09-25T16:20:40.623186Z node 59 :TX_PROXY DEBUG: proxy_impl.cpp:256: actor# [59:7554062527162140445:2144] Cookie# 0 userReqId# "" txid# 281474976710661 SEND to# [59:7554062527162141214:2576] 2025-09-25T16:20:40.624242Z node 59 :TX_PROXY DEBUG: schemereq.cpp:1673: Actor# [59:7554062527162141214:2576] txid# 281474976710661 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/dc-1/.metadata/workload_manager/pools" OperationType: ESchemeOpCreateResourcePool ModifyACL { Name: "default" DiffACL: "\n\032\010\000\022\026\010\001\020\377\317\003\032\014root@builtin \003\n#\010\000\022\037\010\001\020\377\317\003\032\025cluster_admin@builtin \003\n!\010\000\022\035\010\001\020\201\004\032\024all-users@well-known \003\n\031\010\000\022\025\010\001\020\201\004\032\014root@builtin \003" NewOwner: "metadata@system" } Internal: true CreateResourcePool { Name: "default" Properties { Properties { key: "concurrent_query_limit" value: "-1" } Properties { key: "database_load_cpu_threshold" value: "-1" } Properties { key: "query_cancel_after_seconds" value: "0" } Properties { key: "query_cpu_limit_percent_per_node" value: "-1" } Properties { key: "query_memory_limit_percent_per_node" value: "-1" } Properties { key: "queue_size" value: "-1" } Properties { key: "resource_weight" value: "-1" } Properties { key: "total_cpu_limit_percent_per_node" value: "-1" } } } } } UserToken: "\n\017metadata@system\022\000" DatabaseName: "/dc-1" 2025-09-25T16:20:40.624273Z node 59 :TX_PROXY DEBUG: schemereq.cpp:613: Actor# [59:7554062527162141214:2576] txid# 281474976710661 Bootstrap, UserSID: metadata@system CheckAdministrator: 0 CheckDatabaseAdministrator: 0 2025-09-25T16:20:40.624279Z node 59 :TX_PROXY DEBUG: schemereq.cpp:622: Actor# [59:7554062527162141214:2576] txid# 281474976710661 Bootstrap, UserSID: metadata@system IsClusterAdministrator: 0 2025-09-25T16:20:40.624553Z node 59 :TX_PROXY DEBUG: schemereq.cpp:1738: Actor# [59:7554062527162141214:2576] txid# 281474976710661 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P6 2025-09-25T16:20:40.624570Z node 59 :TX_PROXY DEBUG: schemereq.cpp:1728: Actor# [59:7554062527162141214:2576] txid# 281474976710661 TEvNavigateKeySet requested from SchemeCache 2025-09-25T16:20:40.624604Z node 59 :TX_PROXY DEBUG: schemereq.cpp:1561: Actor# [59:7554062527162141214:2576] txid# 281474976710661 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2025-09-25T16:20:40.624640Z node 59 :TX_PROXY DEBUG: schemereq.cpp:1608: Actor# [59:7554062527162141214:2576] HANDLE EvNavigateKeySetResult, txid# 281474976710661 shardToRequest# 72057594046644480 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 1] DomainInfo.Params# Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 RedirectRequired# true 2025-09-25T16:20:40.624651Z node 59 :TX_PROXY DEBUG: schemereq.cpp:103: Actor# [59:7554062527162141214:2576] txid# 281474976710661 SEND to# 72057594046644480 shardToRequest {TEvModifySchemeTransaction txid# 281474976710661 TabletId# 72057594046644480} 2025-09-25T16:20:40.624695Z node 59 :TX_PROXY DEBUG: schemereq.cpp:1463: Actor# [59:7554062527162141214:2576] txid# 281474976710661 HANDLE EvClientConnected 2025-09-25T16:20:40.625738Z node 59 :TX_PROXY DEBUG: schemereq.cpp:1485: Actor# [59:7554062527162141214:2576] txid# 281474976710661 Status StatusAlreadyExists HANDLE {TEvModifySchemeTransactionResult Status# StatusAlreadyExists txid# 281474976710661 Reason# Check failed: path: '/dc-1/.metadata/workload_manager/pools/default', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92} 2025-09-25T16:20:40.625788Z node 59 :TX_PROXY ERROR: schemereq.cpp:590: Actor# [59:7554062527162141214:2576] txid# 281474976710661, issues: { message: "Check failed: path: \'/dc-1/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-09-25T16:20:40.625792Z node 59 :TX_PROXY DEBUG: schemereq.cpp:593: Actor# [59:7554062527162141214:2576] txid# 281474976710661 SEND to# [59:7554062527162141135:2326] Source {TEvProposeTransactionStatus txid# 281474976710661 Status# 48} 2025-09-25T16:20:40.630597Z node 59 :TX_PROXY DEBUG: proxy_impl.cpp:314: actor# [59:7554062527162140445:2144] Handle TEvProposeTransaction 2025-09-25T16:20:40.630617Z node 59 :TX_PROXY DEBUG: proxy_impl.cpp:237: actor# [59:7554062527162140445:2144] TxId# 281474976710662 ProcessProposeTransaction 2025-09-25T16:20:40.630638Z node 59 :TX_PROXY DEBUG: proxy_impl.cpp:256: actor# [59:7554062527162140445:2144] Cookie# 0 userReqId# "" txid# 281474976710662 SEND to# [59:7554062527162141237:2587] 2025-09-25T16:20:40.631421Z node 59 :TX_PROXY DEBUG: schemereq.cpp:1673: Actor# [59:7554062527162141237:2587] txid# 281474976710662 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/dc-1" OperationType: ESchemeOpAlterLogin AlterLogin { CreateUser { User: "targetuser" Password: "passwd" CanLogin: true IsHashedPassword: false } } } } UserToken: "\n\014root@builtin\022\030\022\026\n\024all-users@well-known\032\014root@builtin\"\007Builtin*\017**** (B6C6F477)0\000" DatabaseName: "/dc-1" RequestType: "" PeerName: "ipv6:[::1]:53636" 2025-09-25T16:20:40.631448Z node 59 :TX_PROXY DEBUG: schemereq.cpp:613: Actor# [59:7554062527162141237:2587] txid# 281474976710662 Bootstrap, UserSID: root@builtin CheckAdministrator: 0 CheckDatabaseAdministrator: 0 2025-09-25T16:20:40.631454Z node 59 :TX_PROXY DEBUG: schemereq.cpp:622: Actor# [59:7554062527162141237:2587] txid# 281474976710662 Bootstrap, UserSID: root@builtin IsClusterAdministrator: 1 2025-09-25T16:20:40.631472Z node 59 :TX_PROXY DEBUG: schemereq.cpp:1728: Actor# [59:7554062527162141237:2587] txid# 281474976710662 TEvNavigateKeySet requested from SchemeCache 2025-09-25T16:20:40.631629Z node 59 :TX_PROXY DEBUG: schemereq.cpp:1561: Actor# [59:7554062527162141237:2587] txid# 281474976710662 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2025-09-25T16:20:40.631666Z node 59 :TX_PROXY DEBUG: schemereq.cpp:1608: Actor# [59:7554062527162141237:2587] HANDLE EvNavigateKeySetResult, txid# 281474976710662 shardToRequest# 72057594046644480 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 1] DomainInfo.Params# Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 RedirectRequired# true 2025-09-25T16:20:40.631683Z node 59 :TX_PROXY DEBUG: schemereq.cpp:103: Actor# [59:7554062527162141237:2587] txid# 281474976710662 SEND to# 72057594046644480 shardToRequest {TEvModifySchemeTransaction txid# 281474976710662 TabletId# 72057594046644480} 2025-09-25T16:20:40.631736Z node 59 :TX_PROXY DEBUG: schemereq.cpp:1463: Actor# [59:7554062527162141237:2587] txid# 281474976710662 HANDLE EvClientConnected 2025-09-25T16:20:40.635585Z node 59 :TX_PROXY DEBUG: schemereq.cpp:1485: Actor# [59:7554062527162141237:2587] txid# 281474976710662 Status StatusSuccess HANDLE {TEvModifySchemeTransactionResult Status# StatusSuccess txid# 281474976710662} 2025-09-25T16:20:40.635626Z node 59 :TX_PROXY DEBUG: schemereq.cpp:593: Actor# [59:7554062527162141237:2587] txid# 281474976710662 SEND to# [59:7554062527162141236:2318] Source {TEvProposeTransactionStatus txid# 281474976710662 Status# 48} 2025-09-25T16:20:40.644869Z node 59 :TX_PROXY DEBUG: proxy_impl.cpp:314: actor# [59:7554062527162140445:2144] Handle TEvProposeTransaction 2025-09-25T16:20:40.644896Z node 59 :TX_PROXY DEBUG: proxy_impl.cpp:237: actor# [59:7554062527162140445:2144] TxId# 281474976710663 ProcessProposeTransaction 2025-09-25T16:20:40.644922Z node 59 :TX_PROXY DEBUG: proxy_impl.cpp:256: actor# [59:7554062527162140445:2144] Cookie# 0 userReqId# "" txid# 281474976710663 SEND to# [59:7554062527162141269:2601] 2025-09-25T16:20:40.645904Z node 59 :TX_PROXY DEBUG: schemereq.cpp:1673: Actor# [59:7554062527162141269:2601] txid# 281474976710663 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/dc-1" OperationType: ESchemeOpAlterLogin AlterLogin { RemoveUser { User: "targetuser" MissingOk: false } } } } UserToken: "\n\025cluster_admin@builtin\022\030\022\026\n\024all-users@well-known\032\025cluster_admin@builtin\"\007Builtin*\027clus****ltin (2AB0E265)0\000" DatabaseName: "/dc-1" RequestType: "" PeerName: "ipv6:[::1]:53636" 2025-09-25T16:20:40.645939Z node 59 :TX_PROXY DEBUG: schemereq.cpp:613: Actor# [59:7554062527162141269:2601] txid# 281474976710663 Bootstrap, UserSID: cluster_admin@builtin CheckAdministrator: 0 CheckDatabaseAdministrator: 0 2025-09-25T16:20:40.645945Z node 59 :TX_PROXY DEBUG: schemereq.cpp:622: Actor# [59:7554062527162141269:2601] txid# 281474976710663 Bootstrap, UserSID: cluster_admin@builtin IsClusterAdministrator: 1 2025-09-25T16:20:40.645960Z node 59 :TX_PROXY DEBUG: schemereq.cpp:1728: Actor# [59:7554062527162141269:2601] txid# 281474976710663 TEvNavigateKeySet requested from SchemeCache 2025-09-25T16:20:40.646136Z node 59 :TX_PROXY DEBUG: schemereq.cpp:1561: Actor# [59:7554062527162141269:2601] txid# 281474976710663 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2025-09-25T16:20:40.646160Z node 59 :TX_PROXY ERROR: schemereq.cpp:1177: Actor# [59:7554062527162141269:2601] txid# 281474976710663, Access denied for cluster_admin@builtin on path /dc-1, with access AlterSchema 2025-09-25T16:20:40.646204Z node 59 :TX_PROXY ERROR: schemereq.cpp:590: Actor# [59:7554062527162141269:2601] txid# 281474976710663, issues: { message: "Access denied for cluster_admin@builtin on path /dc-1" issue_code: 200000 severity: 1 } 2025-09-25T16:20:40.646215Z node 59 :TX_PROXY DEBUG: schemereq.cpp:593: Actor# [59:7554062527162141269:2601] txid# 281474976710663 SEND to# [59:7554062527162141268:2336] Source {TEvProposeTransactionStatus Status# 5} 2025-09-25T16:20:40.646339Z node 59 :KQP_SESSION WARN: kqp_session_actor.cpp:2830: SessionId: ydb://session/3?node_id=59&id=NTJlNjBiZWMtNDExY2M3YmMtZjM4NTM0YjctMTFhNjIxYzg=, ActorId: [59:7554062527162141254:2336], ActorState: ExecuteState, TraceId: 01k60tthj1792zj35r631mpgmp, Create QueryResponse for error on request, msg: 2025-09-25T16:20:40.646446Z node 59 :TX_PROXY DEBUG: proxy_impl.cpp:352: actor# [59:7554062527162140445:2144] Handle TEvExecuteKqpTransaction 2025-09-25T16:20:40.646457Z node 59 :TX_PROXY DEBUG: proxy_impl.cpp:341: actor# [59:7554062527162140445:2144] TxId# 281474976710664 ProcessProposeKqpTransaction ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/tx_proxy/ut_schemereq/unittest >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-DropUser-60 [GOOD] Test command err: Starting YDB, grpc: 10496, msgbus: 27643 2025-09-25T16:20:03.827352Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7554062368000164710:2144];send_to=[0:7307199536658146131:7762515]; 2025-09-25T16:20:03.827383Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-09-25T16:20:03.833663Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:123: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown test_client.cpp: SetPath # /home/runner/.ya/build/build_root/endf/0025c6/r3tmp/tmpXa0jda/pdisk_1.dat 2025-09-25T16:20:03.894490Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 10496, node 1 2025-09-25T16:20:03.931821Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-09-25T16:20:03.931849Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-09-25T16:20:03.933304Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-09-25T16:20:03.938867Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-09-25T16:20:03.938879Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-09-25T16:20:03.938880Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-09-25T16:20:03.938927Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-09-25T16:20:03.944503Z node 1 :TX_PROXY DEBUG: client.cpp:89: Handle TEvAllocateResult ACCEPTED RangeBegin# 281474976715656 RangeEnd# 281474976720656 txAllocator# 72057594046447617 2025-09-25T16:20:03.960003Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions TClient is connected to server localhost:27643 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 2025-09-25T16:20:03.965639Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:402: actor# [1:7554062368000164848:2118] Handle TEvNavigate describe path dc-1 2025-09-25T16:20:03.965657Z node 1 :TX_PROXY DEBUG: describe.cpp:270: Actor# [1:7554062368000165369:2444] HANDLE EvNavigateScheme dc-1 2025-09-25T16:20:03.965762Z node 1 :TX_PROXY DEBUG: describe.cpp:354: Actor# [1:7554062368000165369:2444] HANDLE EvNavigateKeySetResult TDescribeReq marker# P5 ErrorCount# 0 2025-09-25T16:20:03.971137Z node 1 :TX_PROXY DEBUG: describe.cpp:433: Actor# [1:7554062368000165369:2444] SEND to# 72057594046644480 shardToRequest NKikimrSchemeOp.TDescribePath Path: "dc-1" Options { ReturnBoundaries: true ShowPrivateTable: true ReturnRangeKey: true } 2025-09-25T16:20:03.973174Z node 1 :TX_PROXY DEBUG: describe.cpp:446: Actor# [1:7554062368000165369:2444] Handle TEvDescribeSchemeResult Forward to# [1:7554062368000165368:2443] Cookie: 0 TEvDescribeSchemeResult: NKikimrScheme.TEvDescribeSchemeResult PreSerializedData size# 2 Record# Status: StatusSuccess Path: "dc-1" PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } DomainKey { SchemeShard: 72057594046644480 PathId: 1 } PathsInside: 0 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046644480 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { Audience: "/dc-1" } SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 1 PathOwnerId: 72057594046644480 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-09-25T16:20:03.981570Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:314: actor# [1:7554062368000164848:2118] Handle TEvProposeTransaction 2025-09-25T16:20:03.981582Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:237: actor# [1:7554062368000164848:2118] TxId# 281474976715657 ProcessProposeTransaction 2025-09-25T16:20:03.981611Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:256: actor# [1:7554062368000164848:2118] Cookie# 0 userReqId# "" txid# 281474976715657 SEND to# [1:7554062368000165375:2449] 2025-09-25T16:20:03.993678Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1673: Actor# [1:7554062368000165375:2449] txid# 281474976715657 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "dc-1" StoragePools { Name: "" Kind: "tenant-db" } StoragePools { Name: "/dc-1:test" Kind: "test" } } } } UserToken: "\n\014root@builtin\022\030\022\026\n\024all-users@well-known\032\014root@builtin\"\007Builtin*\017**** (B6C6F477)0\000" PeerName: "" 2025-09-25T16:20:03.993724Z node 1 :TX_PROXY DEBUG: schemereq.cpp:613: Actor# [1:7554062368000165375:2449] txid# 281474976715657 Bootstrap, UserSID: root@builtin CheckAdministrator: 0 CheckDatabaseAdministrator: 0 2025-09-25T16:20:03.993728Z node 1 :TX_PROXY DEBUG: schemereq.cpp:622: Actor# [1:7554062368000165375:2449] txid# 281474976715657 Bootstrap, UserSID: root@builtin IsClusterAdministrator: 1 2025-09-25T16:20:03.993743Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1728: Actor# [1:7554062368000165375:2449] txid# 281474976715657 TEvNavigateKeySet requested from SchemeCache 2025-09-25T16:20:03.993872Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1561: Actor# [1:7554062368000165375:2449] txid# 281474976715657 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2025-09-25T16:20:03.993898Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1608: Actor# [1:7554062368000165375:2449] HANDLE EvNavigateKeySetResult, txid# 281474976715657 shardToRequest# 72057594046644480 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 1] DomainInfo.Params# Version: 0 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 RedirectRequired# false 2025-09-25T16:20:03.993910Z node 1 :TX_PROXY DEBUG: schemereq.cpp:103: Actor# [1:7554062368000165375:2449] txid# 281474976715657 SEND to# 72057594046644480 shardToRequest {TEvModifySchemeTransaction txid# 281474976715657 TabletId# 72057594046644480} 2025-09-25T16:20:03.993954Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1463: Actor# [1:7554062368000165375:2449] txid# 281474976715657 HANDLE EvClientConnected 2025-09-25T16:20:03.994236Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-09-25T16:20:03.995391Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1485: Actor# [1:7554062368000165375:2449] txid# 281474976715657 Status StatusAccepted HANDLE {TEvModifySchemeTransactionResult Status# StatusAccepted txid# 281474976715657} 2025-09-25T16:20:03.995416Z node 1 :TX_PROXY DEBUG: schemereq.cpp:593: Actor# [1:7554062368000165375:2449] txid# 281474976715657 SEND to# [1:7554062368000165374:2448] Source {TEvProposeTransactionStatus txid# 281474976715657 Status# 53} 2025-09-25T16:20:03.999640Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:314: actor# [1:7554062368000164848:2118] Handle TEvProposeTransaction 2025-09-25T16:20:03.999650Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:237: actor# [1:7554062368000164848:2118] TxId# 281474976715658 ProcessProposeTransaction 2025-09-25T16:20:03.999658Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:256: actor# [1:7554062368000164848:2118] Cookie# 0 userReqId# "" txid# 281474976715658 SEND to# [1:7554062368000165415:2485] 2025-09-25T16:20:04.000293Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1673: Actor# [1:7554062368000165415:2485] txid# 281474976715658 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/" OperationType: ESchemeOpModifyACL ModifyACL { Name: "dc-1" DiffACL: "\n\032\010\000\022\026\010\001\020\377\377\003\032\014root@builtin \003" } } } UserToken: "\n\014root@builtin\022\030\022\026\n\024all-users@well-known\032\014root@builtin\"\007Builtin*\017**** (B6C6F477)0\000" PeerName: "" 2025-09-25T16:20:04.000304Z node 1 :TX_PROXY DEBUG: schemereq.cpp:613: Actor# [1:7554062368000165415:2485] txid# 281474976715658 Bootstrap, UserSID: root@builtin CheckAdministrator: 0 CheckDatabaseAdministrator: 0 2025-09-25T16:20:04.000306Z node 1 :TX_PROXY DEBUG: schemereq.cpp:622: Actor# [1:7554062368000165415:2485] txid# 281474976715658 Bootstrap, UserSID: root@builtin IsClusterAdministrator: 1 2025-09-25T16:20:04.000317Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1728: Actor# [1:7554062368000165415:2485] txid# 281474976715658 TEvNavigateKeySet requested from SchemeCache 2025-09-25T16:20:04.000385Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1561: Actor# [1:7554062368000165415:2485] txid# 281474976715658 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2025-09-25T16:20:04.000405Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1608: Actor# [1:7554062368000165415:2485] HANDLE EvNavigateKeySetResult, txid# 281474976715658 shardToRequest# 72057594046644480 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 1] DomainInfo.Params# Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 RedirectRequired# true 2025-09-25T16:20:04.000413Z node 1 :TX_PROXY DEBUG: schemereq.cpp:103: Actor# [1:75540623680001 ... red# true 2025-09-25T16:20:40.699441Z node 59 :TX_PROXY DEBUG: schemereq.cpp:103: Actor# [59:7554062528885849813:2564] txid# 281474976715661 SEND to# 72057594046644480 shardToRequest {TEvModifySchemeTransaction txid# 281474976715661 TabletId# 72057594046644480} 2025-09-25T16:20:40.699498Z node 59 :TX_PROXY DEBUG: schemereq.cpp:1463: Actor# [59:7554062528885849813:2564] txid# 281474976715661 HANDLE EvClientConnected 2025-09-25T16:20:40.700690Z node 59 :TX_PROXY DEBUG: schemereq.cpp:1485: Actor# [59:7554062528885849813:2564] txid# 281474976715661 Status StatusAlreadyExists HANDLE {TEvModifySchemeTransactionResult Status# StatusAlreadyExists txid# 281474976715661 Reason# Check failed: path: '/dc-1/.metadata/workload_manager/pools/default', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92} 2025-09-25T16:20:40.700747Z node 59 :TX_PROXY ERROR: schemereq.cpp:590: Actor# [59:7554062528885849813:2564] txid# 281474976715661, issues: { message: "Check failed: path: \'/dc-1/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-09-25T16:20:40.700760Z node 59 :TX_PROXY DEBUG: schemereq.cpp:593: Actor# [59:7554062528885849813:2564] txid# 281474976715661 SEND to# [59:7554062528885849741:2326] Source {TEvProposeTransactionStatus txid# 281474976715661 Status# 48} 2025-09-25T16:20:40.704599Z node 59 :TX_PROXY DEBUG: proxy_impl.cpp:314: actor# [59:7554062528885848944:2144] Handle TEvProposeTransaction 2025-09-25T16:20:40.704616Z node 59 :TX_PROXY DEBUG: proxy_impl.cpp:237: actor# [59:7554062528885848944:2144] TxId# 281474976715662 ProcessProposeTransaction 2025-09-25T16:20:40.704638Z node 59 :TX_PROXY DEBUG: proxy_impl.cpp:256: actor# [59:7554062528885848944:2144] Cookie# 0 userReqId# "" txid# 281474976715662 SEND to# [59:7554062528885849837:2576] 2025-09-25T16:20:40.705608Z node 59 :TX_PROXY DEBUG: schemereq.cpp:1673: Actor# [59:7554062528885849837:2576] txid# 281474976715662 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/dc-1" OperationType: ESchemeOpAlterLogin AlterLogin { CreateUser { User: "targetuser" Password: "passwd" CanLogin: true IsHashedPassword: false } } } } UserToken: "\n\014root@builtin\022\030\022\026\n\024all-users@well-known\032\014root@builtin\"\007Builtin*\017**** (B6C6F477)0\000" DatabaseName: "/dc-1" RequestType: "" PeerName: "ipv4:127.0.0.1:47858" 2025-09-25T16:20:40.705637Z node 59 :TX_PROXY DEBUG: schemereq.cpp:613: Actor# [59:7554062528885849837:2576] txid# 281474976715662 Bootstrap, UserSID: root@builtin CheckAdministrator: 1 CheckDatabaseAdministrator: 1 2025-09-25T16:20:40.705642Z node 59 :TX_PROXY DEBUG: schemereq.cpp:622: Actor# [59:7554062528885849837:2576] txid# 281474976715662 Bootstrap, UserSID: root@builtin IsClusterAdministrator: 1 2025-09-25T16:20:40.705659Z node 59 :TX_PROXY DEBUG: schemereq.cpp:1728: Actor# [59:7554062528885849837:2576] txid# 281474976715662 TEvNavigateKeySet requested from SchemeCache 2025-09-25T16:20:40.705826Z node 59 :TX_PROXY DEBUG: schemereq.cpp:1561: Actor# [59:7554062528885849837:2576] txid# 281474976715662 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2025-09-25T16:20:40.705869Z node 59 :TX_PROXY DEBUG: schemereq.cpp:1608: Actor# [59:7554062528885849837:2576] HANDLE EvNavigateKeySetResult, txid# 281474976715662 shardToRequest# 72057594046644480 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 1] DomainInfo.Params# Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 RedirectRequired# true 2025-09-25T16:20:40.705888Z node 59 :TX_PROXY DEBUG: schemereq.cpp:103: Actor# [59:7554062528885849837:2576] txid# 281474976715662 SEND to# 72057594046644480 shardToRequest {TEvModifySchemeTransaction txid# 281474976715662 TabletId# 72057594046644480} 2025-09-25T16:20:40.705954Z node 59 :TX_PROXY DEBUG: schemereq.cpp:1463: Actor# [59:7554062528885849837:2576] txid# 281474976715662 HANDLE EvClientConnected 2025-09-25T16:20:40.709716Z node 59 :TX_PROXY DEBUG: schemereq.cpp:1485: Actor# [59:7554062528885849837:2576] txid# 281474976715662 Status StatusSuccess HANDLE {TEvModifySchemeTransactionResult Status# StatusSuccess txid# 281474976715662} 2025-09-25T16:20:40.709741Z node 59 :TX_PROXY DEBUG: schemereq.cpp:593: Actor# [59:7554062528885849837:2576] txid# 281474976715662 SEND to# [59:7554062528885849836:2319] Source {TEvProposeTransactionStatus txid# 281474976715662 Status# 48} 2025-09-25T16:20:40.711458Z node 59 :TX_PROXY DEBUG: proxy_impl.cpp:314: actor# [59:7554062528885848944:2144] Handle TEvProposeTransaction 2025-09-25T16:20:40.711474Z node 59 :TX_PROXY DEBUG: proxy_impl.cpp:237: actor# [59:7554062528885848944:2144] TxId# 281474976715663 ProcessProposeTransaction 2025-09-25T16:20:40.711486Z node 59 :TX_PROXY DEBUG: proxy_impl.cpp:256: actor# [59:7554062528885848944:2144] Cookie# 0 userReqId# "" txid# 281474976715663 SEND to# [59:7554062528885849850:2585] 2025-09-25T16:20:40.712425Z node 59 :TX_PROXY DEBUG: schemereq.cpp:1673: Actor# [59:7554062528885849850:2585] txid# 281474976715663 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "" OperationType: ESchemeOpModifyACL ModifyACL { Name: "dc-1" DiffACL: "" NewOwner: "db_admin@builtin" } } } UserToken: "\n\014root@builtin\022\030\022\026\n\024all-users@well-known\032\014root@builtin\"\007Builtin*\017**** (B6C6F477)0\000" DatabaseName: "/dc-1" RequestType: "" PeerName: "ipv6:[::1]:51714" 2025-09-25T16:20:40.712451Z node 59 :TX_PROXY DEBUG: schemereq.cpp:613: Actor# [59:7554062528885849850:2585] txid# 281474976715663 Bootstrap, UserSID: root@builtin CheckAdministrator: 1 CheckDatabaseAdministrator: 1 2025-09-25T16:20:40.712455Z node 59 :TX_PROXY DEBUG: schemereq.cpp:622: Actor# [59:7554062528885849850:2585] txid# 281474976715663 Bootstrap, UserSID: root@builtin IsClusterAdministrator: 1 2025-09-25T16:20:40.712474Z node 59 :TX_PROXY DEBUG: schemereq.cpp:1728: Actor# [59:7554062528885849850:2585] txid# 281474976715663 TEvNavigateKeySet requested from SchemeCache 2025-09-25T16:20:40.712595Z node 59 :TX_PROXY DEBUG: schemereq.cpp:1561: Actor# [59:7554062528885849850:2585] txid# 281474976715663 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2025-09-25T16:20:40.712631Z node 59 :TX_PROXY DEBUG: schemereq.cpp:1608: Actor# [59:7554062528885849850:2585] HANDLE EvNavigateKeySetResult, txid# 281474976715663 shardToRequest# 72057594046644480 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 1] DomainInfo.Params# Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 RedirectRequired# true 2025-09-25T16:20:40.712644Z node 59 :TX_PROXY DEBUG: schemereq.cpp:103: Actor# [59:7554062528885849850:2585] txid# 281474976715663 SEND to# 72057594046644480 shardToRequest {TEvModifySchemeTransaction txid# 281474976715663 TabletId# 72057594046644480} 2025-09-25T16:20:40.712694Z node 59 :TX_PROXY DEBUG: schemereq.cpp:1463: Actor# [59:7554062528885849850:2585] txid# 281474976715663 HANDLE EvClientConnected 2025-09-25T16:20:40.712838Z node 59 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_modify_acl.cpp:101) 2025-09-25T16:20:40.713648Z node 59 :TX_PROXY DEBUG: schemereq.cpp:1485: Actor# [59:7554062528885849850:2585] txid# 281474976715663 Status StatusSuccess HANDLE {TEvModifySchemeTransactionResult Status# StatusSuccess txid# 281474976715663} 2025-09-25T16:20:40.713665Z node 59 :TX_PROXY DEBUG: schemereq.cpp:593: Actor# [59:7554062528885849850:2585] txid# 281474976715663 SEND to# [59:7554062528885849849:2332] Source {TEvProposeTransactionStatus txid# 281474976715663 Status# 48} 2025-09-25T16:20:40.724781Z node 59 :TX_PROXY DEBUG: proxy_impl.cpp:314: actor# [59:7554062528885848944:2144] Handle TEvProposeTransaction 2025-09-25T16:20:40.724799Z node 59 :TX_PROXY DEBUG: proxy_impl.cpp:237: actor# [59:7554062528885848944:2144] TxId# 281474976715664 ProcessProposeTransaction 2025-09-25T16:20:40.724834Z node 59 :TX_PROXY DEBUG: proxy_impl.cpp:256: actor# [59:7554062528885848944:2144] Cookie# 0 userReqId# "" txid# 281474976715664 SEND to# [59:7554062528885849881:2599] 2025-09-25T16:20:40.725668Z node 59 :TX_PROXY DEBUG: schemereq.cpp:1673: Actor# [59:7554062528885849881:2599] txid# 281474976715664 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/dc-1" OperationType: ESchemeOpAlterLogin AlterLogin { RemoveUser { User: "targetuser" MissingOk: false } } } } UserToken: "\n\020db_admin@builtin\022\030\022\026\n\024all-users@well-known\032\020db_admin@builtin\"\007Builtin*\027db_a****ltin (DEFA2CD5)0\000" DatabaseName: "/dc-1" RequestType: "" PeerName: "ipv6:[::1]:51714" 2025-09-25T16:20:40.725694Z node 59 :TX_PROXY DEBUG: schemereq.cpp:613: Actor# [59:7554062528885849881:2599] txid# 281474976715664 Bootstrap, UserSID: db_admin@builtin CheckAdministrator: 1 CheckDatabaseAdministrator: 1 2025-09-25T16:20:40.725698Z node 59 :TX_PROXY DEBUG: schemereq.cpp:622: Actor# [59:7554062528885849881:2599] txid# 281474976715664 Bootstrap, UserSID: db_admin@builtin IsClusterAdministrator: 0 2025-09-25T16:20:40.725769Z node 59 :TX_PROXY DEBUG: schemereq.cpp:1512: Actor# [59:7554062528885849881:2599] txid# 281474976715664 HandleResolveDatabase, ResultSet size: 1 ResultSet error count: 0 2025-09-25T16:20:40.725782Z node 59 :TX_PROXY DEBUG: schemereq.cpp:1547: Actor# [59:7554062528885849881:2599] txid# 281474976715664 HandleResolveDatabase, UserSID: db_admin@builtin CheckAdministrator: 1 CheckDatabaseAdministrator: 1 IsClusterAdministrator: 0 IsDatabaseAdministrator: 1 DatabaseOwner: db_admin@builtin 2025-09-25T16:20:40.725798Z node 59 :TX_PROXY DEBUG: schemereq.cpp:1728: Actor# [59:7554062528885849881:2599] txid# 281474976715664 TEvNavigateKeySet requested from SchemeCache 2025-09-25T16:20:40.725957Z node 59 :TX_PROXY DEBUG: schemereq.cpp:1561: Actor# [59:7554062528885849881:2599] txid# 281474976715664 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2025-09-25T16:20:40.726001Z node 59 :TX_PROXY DEBUG: schemereq.cpp:1608: Actor# [59:7554062528885849881:2599] HANDLE EvNavigateKeySetResult, txid# 281474976715664 shardToRequest# 72057594046644480 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 1] DomainInfo.Params# Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 RedirectRequired# true 2025-09-25T16:20:40.726021Z node 59 :TX_PROXY DEBUG: schemereq.cpp:103: Actor# [59:7554062528885849881:2599] txid# 281474976715664 SEND to# 72057594046644480 shardToRequest {TEvModifySchemeTransaction txid# 281474976715664 TabletId# 72057594046644480} 2025-09-25T16:20:40.726086Z node 59 :TX_PROXY DEBUG: schemereq.cpp:1463: Actor# [59:7554062528885849881:2599] txid# 281474976715664 HANDLE EvClientConnected 2025-09-25T16:20:40.727307Z node 59 :TX_PROXY DEBUG: schemereq.cpp:1485: Actor# [59:7554062528885849881:2599] txid# 281474976715664 Status StatusSuccess HANDLE {TEvModifySchemeTransactionResult Status# StatusSuccess txid# 281474976715664} 2025-09-25T16:20:40.727327Z node 59 :TX_PROXY DEBUG: schemereq.cpp:593: Actor# [59:7554062528885849881:2599] txid# 281474976715664 SEND to# [59:7554062528885849880:2337] Source {TEvProposeTransactionStatus txid# 281474976715664 Status# 48} ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_shred/unittest >> TestShred::ShredWithCopyTable [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:68:2058] recipient: [1:61:2103] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:68:2058] recipient: [1:61:2103] Leader for TabletID 72057594046678944 is [1:72:2107] sender: [1:76:2058] recipient: [1:61:2103] 2025-09-25T16:20:35.418903Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7911: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-09-25T16:20:35.418924Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7939: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-09-25T16:20:35.418930Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7825: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-09-25T16:20:35.418935Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7841: OperationsProcessing config: using default configuration 2025-09-25T16:20:35.418941Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7847: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-09-25T16:20:35.418945Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7847: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-09-25T16:20:35.418954Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7971: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-09-25T16:20:35.418966Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-09-25T16:20:35.419078Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8042: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-09-25T16:20:35.419164Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2025-09-25T16:20:35.436436Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7729: Cannot subscribe to console configs 2025-09-25T16:20:35.436464Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded 2025-09-25T16:20:35.438237Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2025-09-25T16:20:35.438321Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2025-09-25T16:20:35.438350Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-09-25T16:20:35.439510Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2025-09-25T16:20:35.439640Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2025-09-25T16:20:35.439752Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1382: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-09-25T16:20:35.439808Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-09-25T16:20:35.439984Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:158: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-09-25T16:20:35.440015Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2025-09-25T16:20:35.440262Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-09-25T16:20:35.440272Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-09-25T16:20:35.440316Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-09-25T16:20:35.440323Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-09-25T16:20:35.440329Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:205: TTxServerlessStorageBilling.Complete 2025-09-25T16:20:35.440345Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7086: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-09-25T16:20:35.440802Z node 1 :HIVE INFO: tablet_helpers.cpp:1126: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:72:2107] sender: [1:152:2058] recipient: [1:16:2063] 2025-09-25T16:20:35.468076Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-09-25T16:20:35.468174Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-09-25T16:20:35.468239Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:599: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-09-25T16:20:35.468247Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5528: CreateTx for txid 1:0 type: TxAlterSubDomain target path: [OwnerId: 72057594046678944, LocalPathId: 1] source path: 2025-09-25T16:20:35.468351Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-09-25T16:20:35.468371Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2025-09-25T16:20:35.468601Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-09-25T16:20:35.468642Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-09-25T16:20:35.468700Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-09-25T16:20:35.468709Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:315: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-09-25T16:20:35.468716Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:369: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-09-25T16:20:35.468722Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 1:0 2 -> 3 2025-09-25T16:20:35.468797Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-09-25T16:20:35.468805Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-09-25T16:20:35.468812Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 1:0 3 -> 128 2025-09-25T16:20:35.468907Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-09-25T16:20:35.468915Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-09-25T16:20:35.468921Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-09-25T16:20:35.468929Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1723: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-09-25T16:20:35.469704Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1792: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-09-25T16:20:35.469786Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:663: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-09-25T16:20:35.469836Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1824: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-09-25T16:20:35.470049Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:685: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-09-25T16:20:35.470071Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:689: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 77 RawX2: 4294969407 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-09-25T16:20:35.470081Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:303: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-09-25T16:20:35.470161Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 1:0 128 -> 240 2025-09-25T16:20:35.470169Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:362: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-09-25T16:20:35.470238Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:599: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-09-25T16:20:35.470253Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:412: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-09-25T16:20:35.470365Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-09-25T16:20:35.470372Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard Des ... ble_stats.cpp:292: TTxStoreTableStats.PersistSingleStats: main stats from datashardId(TabletID)=72075186233409552 maps to shardIdx: 72075186233409546:7 followerId=0, pathId: [OwnerId: 72075186233409546, LocalPathId: 3], pathId map=SimpleCopy, is column=0, is olap=0, RowCount 50, DataSize 5121950 2025-09-25T16:20:41.059363Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__table_stats.cpp:62: BuildStatsForCollector: datashardId 72075186233409552, followerId 0 2025-09-25T16:20:41.059369Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__background_compaction.cpp:235: [BackgroundCompaction] [Update] Skipped shard# 72075186233409546:7 with partCount# 1, rowCount# 50, searchHeight# 1, lastFullCompaction# 1970-01-01T00:00:50.000000Z at schemeshard 72075186233409546 2025-09-25T16:20:41.059375Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:483: Do not want to split tablet 72075186233409552 SplitByLoadNotEnabledForTable 2025-09-25T16:20:41.059391Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:216: TSideEffects ApplyOnComplete at tablet# 72075186233409546 2025-09-25T16:20:41.069674Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5225: StateWork, received event# 2146435092, Sender [0:0:0], Recipient [1:281:2243]: NKikimr::NSchemeShard::TEvPrivate::TEvPersistTableStats 2025-09-25T16:20:41.069704Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5421: StateWork, processing event TEvPrivate::TEvPersistTableStats 2025-09-25T16:20:41.069711Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:588: Started TEvPersistStats at tablet 72075186233409546, queue size# 0 2025-09-25T16:20:41.091823Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5225: StateWork, received event# 271125000, Sender [0:0:0], Recipient [1:186:2180]: NKikimr::NSchemeShard::TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-09-25T16:20:41.091851Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5233: StateWork, processing event TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-09-25T16:20:41.091870Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5225: StateWork, received event# 271124999, Sender [1:186:2180], Recipient [1:186:2180]: NKikimr::NSchemeShard::TEvSchemeShard::TEvMeasureSelfResponseTime 2025-09-25T16:20:41.091875Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5232: StateWork, processing event TEvSchemeShard::TEvMeasureSelfResponseTime 2025-09-25T16:20:41.102093Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5225: StateWork, received event# 271125000, Sender [0:0:0], Recipient [1:281:2243]: NKikimr::NSchemeShard::TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-09-25T16:20:41.102127Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5233: StateWork, processing event TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-09-25T16:20:41.102149Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5225: StateWork, received event# 271124999, Sender [1:281:2243], Recipient [1:281:2243]: NKikimr::NSchemeShard::TEvSchemeShard::TEvMeasureSelfResponseTime 2025-09-25T16:20:41.102155Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5232: StateWork, processing event TEvSchemeShard::TEvMeasureSelfResponseTime 2025-09-25T16:20:41.137011Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5225: StateWork, received event# 271125000, Sender [0:0:0], Recipient [1:186:2180]: NKikimr::NSchemeShard::TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-09-25T16:20:41.137040Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5233: StateWork, processing event TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-09-25T16:20:41.137058Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5225: StateWork, received event# 271124999, Sender [1:186:2180], Recipient [1:186:2180]: NKikimr::NSchemeShard::TEvSchemeShard::TEvMeasureSelfResponseTime 2025-09-25T16:20:41.137063Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5232: StateWork, processing event TEvSchemeShard::TEvMeasureSelfResponseTime 2025-09-25T16:20:41.147734Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5225: StateWork, received event# 271125000, Sender [0:0:0], Recipient [1:281:2243]: NKikimr::NSchemeShard::TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-09-25T16:20:41.147771Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5233: StateWork, processing event TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-09-25T16:20:41.147798Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5225: StateWork, received event# 271124999, Sender [1:281:2243], Recipient [1:281:2243]: NKikimr::NSchemeShard::TEvSchemeShard::TEvMeasureSelfResponseTime 2025-09-25T16:20:41.147804Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5232: StateWork, processing event TEvSchemeShard::TEvMeasureSelfResponseTime 2025-09-25T16:20:41.178994Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5225: StateWork, received event# 271125000, Sender [0:0:0], Recipient [1:186:2180]: NKikimr::NSchemeShard::TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-09-25T16:20:41.179026Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5233: StateWork, processing event TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-09-25T16:20:41.179057Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5225: StateWork, received event# 271124999, Sender [1:186:2180], Recipient [1:186:2180]: NKikimr::NSchemeShard::TEvSchemeShard::TEvMeasureSelfResponseTime 2025-09-25T16:20:41.179062Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5232: StateWork, processing event TEvSchemeShard::TEvMeasureSelfResponseTime 2025-09-25T16:20:41.189509Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5225: StateWork, received event# 271125000, Sender [0:0:0], Recipient [1:281:2243]: NKikimr::NSchemeShard::TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-09-25T16:20:41.189546Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5233: StateWork, processing event TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-09-25T16:20:41.189572Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5225: StateWork, received event# 271124999, Sender [1:281:2243], Recipient [1:281:2243]: NKikimr::NSchemeShard::TEvSchemeShard::TEvMeasureSelfResponseTime 2025-09-25T16:20:41.189577Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5232: StateWork, processing event TEvSchemeShard::TEvMeasureSelfResponseTime 2025-09-25T16:20:41.220552Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5225: StateWork, received event# 271125000, Sender [0:0:0], Recipient [1:186:2180]: NKikimr::NSchemeShard::TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-09-25T16:20:41.220585Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5233: StateWork, processing event TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-09-25T16:20:41.220633Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5225: StateWork, received event# 271124999, Sender [1:186:2180], Recipient [1:186:2180]: NKikimr::NSchemeShard::TEvSchemeShard::TEvMeasureSelfResponseTime 2025-09-25T16:20:41.220640Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5232: StateWork, processing event TEvSchemeShard::TEvMeasureSelfResponseTime 2025-09-25T16:20:41.230830Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5225: StateWork, received event# 271125000, Sender [0:0:0], Recipient [1:281:2243]: NKikimr::NSchemeShard::TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-09-25T16:20:41.230857Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5233: StateWork, processing event TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-09-25T16:20:41.230875Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5225: StateWork, received event# 271124999, Sender [1:281:2243], Recipient [1:281:2243]: NKikimr::NSchemeShard::TEvSchemeShard::TEvMeasureSelfResponseTime 2025-09-25T16:20:41.230878Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5232: StateWork, processing event TEvSchemeShard::TEvMeasureSelfResponseTime 2025-09-25T16:20:41.261972Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5225: StateWork, received event# 271125000, Sender [0:0:0], Recipient [1:186:2180]: NKikimr::NSchemeShard::TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-09-25T16:20:41.262002Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5233: StateWork, processing event TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-09-25T16:20:41.262019Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5225: StateWork, received event# 271124999, Sender [1:186:2180], Recipient [1:186:2180]: NKikimr::NSchemeShard::TEvSchemeShard::TEvMeasureSelfResponseTime 2025-09-25T16:20:41.262023Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5232: StateWork, processing event TEvSchemeShard::TEvMeasureSelfResponseTime 2025-09-25T16:20:41.272253Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5225: StateWork, received event# 271125517, Sender [0:0:0], Recipient [1:186:2180]: NKikimr::NSchemeShard::TEvSchemeShard::TEvWakeupToRunShredBSC 2025-09-25T16:20:41.272285Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5447: StateWork, processing event TEvSchemeShard::TEvWakeupToRunShredBSC 2025-09-25T16:20:41.272294Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:354: [RootShredManager] SendRequestToBSC: Generation# 1 2025-09-25T16:20:41.272381Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5225: StateWork, received event# 268637738, Sender [1:189:2182], Recipient [1:186:2180]: NKikimrBlobStorage.TEvControllerShredResponse CurrentGeneration: 1 Completed: true Progress10k: 10000 2025-09-25T16:20:41.272389Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5446: StateWork, processing event TEvBlobStorage::TEvControllerShredResponse 2025-09-25T16:20:41.272394Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:8237: Handle TEvControllerShredResponse, at schemeshard: 72057594046678944 2025-09-25T16:20:41.272416Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__root_shred_manager.cpp:639: TTxCompleteShredBSC Execute at schemeshard: 72057594046678944 2025-09-25T16:20:41.272422Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:653: TTxCompleteShredBSC: Data shred in BSC is completed 2025-09-25T16:20:41.272450Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:170: [RootShredManager] ScheduleShredWakeup: Interval# 14.999500s, Timestamp# 1970-01-01T00:01:25.000500Z 2025-09-25T16:20:41.272458Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:378: [RootShredManager] Complete: Generation# 1, duration# 35 s 2025-09-25T16:20:41.272679Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__root_shred_manager.cpp:665: TTxCompleteShredBSC Complete at schemeshard: 72057594046678944, NeedScheduleRequestToBSC# false 2025-09-25T16:20:41.273346Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5225: StateWork, received event# 269877761, Sender [1:1732:3445], Recipient [1:186:2180]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-09-25T16:20:41.273360Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5322: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-09-25T16:20:41.273366Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:6142: Pipe server connected, at tablet: 72057594046678944 2025-09-25T16:20:41.273408Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5225: StateWork, received event# 271125519, Sender [1:173:2173], Recipient [1:186:2180]: NKikimrScheme.TEvShredInfoRequest 2025-09-25T16:20:41.273414Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5444: StateWork, processing event TEvSchemeShard::TEvShredInfoRequest 2025-09-25T16:20:41.273418Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:8188: Handle TEvShredInfoRequest, at schemeshard: 72057594046678944 |82.6%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_view/unittest >> TSchemeShardViewTest::AsyncCreateDifferentViews >> TSchemeShardViewTest::DropView >> YdbSdkSessionsPool1Session::CustomPlan/0 [GOOD] >> TSchemeShardViewTest::ReadOnlyMode >> TSchemeShardViewTest::EmptyQueryText [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/tx_proxy/ut_schemereq/unittest >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-ModifyUser-48 [GOOD] Test command err: Starting YDB, grpc: 25703, msgbus: 64859 2025-09-25T16:20:01.085194Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7554062361933298810:2087];send_to=[0:7307199536658146131:7762515]; 2025-09-25T16:20:01.085217Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/endf/002687/r3tmp/tmp69CzIC/pdisk_1.dat 2025-09-25T16:20:01.144034Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2025-09-25T16:20:01.163049Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 25703, node 1 2025-09-25T16:20:01.186540Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-09-25T16:20:01.186582Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-09-25T16:20:01.193133Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-09-25T16:20:01.193151Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-09-25T16:20:01.193154Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-09-25T16:20:01.193222Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-09-25T16:20:01.193594Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:64859 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 2025-09-25T16:20:01.221162Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:402: actor# [1:7554062361933298995:2142] Handle TEvNavigate describe path dc-1 2025-09-25T16:20:01.221189Z node 1 :TX_PROXY DEBUG: describe.cpp:270: Actor# [1:7554062361933299458:2439] HANDLE EvNavigateScheme dc-1 2025-09-25T16:20:01.221294Z node 1 :TX_PROXY DEBUG: describe.cpp:354: Actor# [1:7554062361933299458:2439] HANDLE EvNavigateKeySetResult TDescribeReq marker# P5 ErrorCount# 0 2025-09-25T16:20:01.229678Z node 1 :TX_PROXY DEBUG: describe.cpp:433: Actor# [1:7554062361933299458:2439] SEND to# 72057594046644480 shardToRequest NKikimrSchemeOp.TDescribePath Path: "dc-1" Options { ReturnBoundaries: true ShowPrivateTable: true ReturnRangeKey: true } 2025-09-25T16:20:01.231646Z node 1 :TX_PROXY DEBUG: describe.cpp:446: Actor# [1:7554062361933299458:2439] Handle TEvDescribeSchemeResult Forward to# [1:7554062361933299457:2438] Cookie: 0 TEvDescribeSchemeResult: NKikimrScheme.TEvDescribeSchemeResult PreSerializedData size# 2 Record# Status: StatusSuccess Path: "dc-1" PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } DomainKey { SchemeShard: 72057594046644480 PathId: 1 } PathsInside: 0 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046644480 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { Audience: "/dc-1" } SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 1 PathOwnerId: 72057594046644480 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-09-25T16:20:01.236997Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:314: actor# [1:7554062361933298995:2142] Handle TEvProposeTransaction 2025-09-25T16:20:01.237019Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:237: actor# [1:7554062361933298995:2142] TxId# 281474976710657 ProcessProposeTransaction 2025-09-25T16:20:01.237060Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:256: actor# [1:7554062361933298995:2142] Cookie# 0 userReqId# "" txid# 281474976710657 SEND to# [1:7554062361933299464:2444] 2025-09-25T16:20:01.258540Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1673: Actor# [1:7554062361933299464:2444] txid# 281474976710657 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "dc-1" StoragePools { Name: "" Kind: "tenant-db" } StoragePools { Name: "/dc-1:test" Kind: "test" } } } } UserToken: "\n\014root@builtin\022\030\022\026\n\024all-users@well-known\032\014root@builtin\"\007Builtin*\017**** (B6C6F477)0\000" PeerName: "" 2025-09-25T16:20:01.258599Z node 1 :TX_PROXY DEBUG: schemereq.cpp:613: Actor# [1:7554062361933299464:2444] txid# 281474976710657 Bootstrap, UserSID: root@builtin CheckAdministrator: 0 CheckDatabaseAdministrator: 0 2025-09-25T16:20:01.258605Z node 1 :TX_PROXY DEBUG: schemereq.cpp:622: Actor# [1:7554062361933299464:2444] txid# 281474976710657 Bootstrap, UserSID: root@builtin IsClusterAdministrator: 1 2025-09-25T16:20:01.258632Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1728: Actor# [1:7554062361933299464:2444] txid# 281474976710657 TEvNavigateKeySet requested from SchemeCache 2025-09-25T16:20:01.258803Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1561: Actor# [1:7554062361933299464:2444] txid# 281474976710657 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2025-09-25T16:20:01.258838Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1608: Actor# [1:7554062361933299464:2444] HANDLE EvNavigateKeySetResult, txid# 281474976710657 shardToRequest# 72057594046644480 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 1] DomainInfo.Params# Version: 0 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 RedirectRequired# false 2025-09-25T16:20:01.258853Z node 1 :TX_PROXY DEBUG: schemereq.cpp:103: Actor# [1:7554062361933299464:2444] txid# 281474976710657 SEND to# 72057594046644480 shardToRequest {TEvModifySchemeTransaction txid# 281474976710657 TabletId# 72057594046644480} 2025-09-25T16:20:01.258911Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1463: Actor# [1:7554062361933299464:2444] txid# 281474976710657 HANDLE EvClientConnected 2025-09-25T16:20:01.259220Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2025-09-25T16:20:01.260427Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1485: Actor# [1:7554062361933299464:2444] txid# 281474976710657 Status StatusAccepted HANDLE {TEvModifySchemeTransactionResult Status# StatusAccepted txid# 281474976710657} 2025-09-25T16:20:01.260447Z node 1 :TX_PROXY DEBUG: schemereq.cpp:593: Actor# [1:7554062361933299464:2444] txid# 281474976710657 SEND to# [1:7554062361933299463:2443] Source {TEvProposeTransactionStatus txid# 281474976710657 Status# 53} waiting... 2025-09-25T16:20:01.265461Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-09-25T16:20:01.290907Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:314: actor# [1:7554062361933298995:2142] Handle TEvProposeTransaction 2025-09-25T16:20:01.290923Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:237: actor# [1:7554062361933298995:2142] TxId# 281474976710658 ProcessProposeTransaction 2025-09-25T16:20:01.290937Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:256: actor# [1:7554062361933298995:2142] Cookie# 0 userReqId# "" txid# 281474976710658 SEND to# [1:7554062361933299504:2480] 2025-09-25T16:20:01.291934Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1673: Actor# [1:7554062361933299504:2480] txid# 281474976710658 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/" OperationType: ESchemeOpModifyACL ModifyACL { Name: "dc-1" DiffACL: "\n\032\010\000\022\026\010\001\020\377\377\003\032\014root@builtin \003" } } } UserToken: "\n\014root@builtin\022\030\022\026\n\024all-users@well-known\032\014root@builtin\"\007Builtin*\017**** (B6C6F477)0\000" PeerName: "" 2025-09-25T16:20:01.291956Z node 1 :TX_PROXY DEBUG: schemereq.cpp:613: Actor# [1:7554062361933299504:2480] txid# 281474976710658 Bootstrap, UserSID: root@builtin CheckAdministrator: 0 CheckDatabaseAdministrator: 0 2025-09-25T16:20:01.291961Z node 1 :TX_PROXY DEBUG: schemereq.cpp:622: Actor# [1:7554062361933299504:2480] txid# 281474976710658 Bootstrap, UserSID: root@builtin IsClusterAdministrator: 1 2025-09-25T16:20:01.292007Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1728: Actor# [1:7554062361933299504:2480] txid# 281474976710658 TEvNavigateKeySet requested from SchemeCache 2025-09-25T16:20:01.292146Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1561: Actor# [1:7554062361933299504:2480] txid# 281474976710658 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2025-09-25T16:20:01.292189Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1608: Actor# [1:7554062361933299504:2480] HANDLE EvNavigateKeySetResult, txid# 281474976710658 shardToRequest# 72057594046644480 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 1] DomainInfo.Params# Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 RedirectRequired# true 2025-09-25T16:20:01.292207Z node 1 :TX_PROXY DEBUG: schemereq.cpp:103: Actor# [1:7554062361933299504:2480] txid# 281474976710658 SEND to# 72057594046644480 shardToRequest {TEvModifySchemeTransaction txid# 281474976710658 TabletId# 72057594046644480} 2025-09-25T16:20:01.292263Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1463: Actor# [1 ... 2057594046644480, LocalPathId: 1] DomainInfo.Params# Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 RedirectRequired# true 2025-09-25T16:20:40.411657Z node 59 :TX_PROXY DEBUG: schemereq.cpp:103: Actor# [59:7554062529464863894:2576] txid# 281474976715661 SEND to# 72057594046644480 shardToRequest {TEvModifySchemeTransaction txid# 281474976715661 TabletId# 72057594046644480} 2025-09-25T16:20:40.411715Z node 59 :TX_PROXY DEBUG: schemereq.cpp:1463: Actor# [59:7554062529464863894:2576] txid# 281474976715661 HANDLE EvClientConnected 2025-09-25T16:20:40.415139Z node 59 :TX_PROXY DEBUG: schemereq.cpp:1485: Actor# [59:7554062529464863894:2576] txid# 281474976715661 Status StatusSuccess HANDLE {TEvModifySchemeTransactionResult Status# StatusSuccess txid# 281474976715661} 2025-09-25T16:20:40.415159Z node 59 :TX_PROXY DEBUG: schemereq.cpp:593: Actor# [59:7554062529464863894:2576] txid# 281474976715661 SEND to# [59:7554062529464863893:2318] Source {TEvProposeTransactionStatus txid# 281474976715661 Status# 48} 2025-09-25T16:20:40.472231Z node 59 :TX_PROXY DEBUG: proxy_impl.cpp:314: actor# [59:7554062525169895823:2144] Handle TEvProposeTransaction 2025-09-25T16:20:40.472244Z node 59 :TX_PROXY DEBUG: proxy_impl.cpp:237: actor# [59:7554062525169895823:2144] TxId# 281474976715662 ProcessProposeTransaction 2025-09-25T16:20:40.472257Z node 59 :TX_PROXY DEBUG: proxy_impl.cpp:256: actor# [59:7554062525169895823:2144] Cookie# 0 userReqId# "" txid# 281474976715662 SEND to# [59:7554062529464863917:2593] 2025-09-25T16:20:40.472943Z node 59 :TX_PROXY DEBUG: schemereq.cpp:1673: Actor# [59:7554062529464863917:2593] txid# 281474976715662 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "" OperationType: ESchemeOpModifyACL ModifyACL { Name: "dc-1" DiffACL: "\n\022\010\001\022\016\032\014ordinaryuser\n\032\010\000\022\026\010\001\020\200\200\002\032\014ordinaryuser \000" } } } UserToken: "\n\014root@builtin\022\030\022\026\n\024all-users@well-known\032\014root@builtin\"\007Builtin*\017**** (B6C6F477)0\000" DatabaseName: "/dc-1" RequestType: "" PeerName: "ipv6:[::1]:60368" 2025-09-25T16:20:40.472961Z node 59 :TX_PROXY DEBUG: schemereq.cpp:613: Actor# [59:7554062529464863917:2593] txid# 281474976715662 Bootstrap, UserSID: root@builtin CheckAdministrator: 1 CheckDatabaseAdministrator: 1 2025-09-25T16:20:40.472968Z node 59 :TX_PROXY DEBUG: schemereq.cpp:622: Actor# [59:7554062529464863917:2593] txid# 281474976715662 Bootstrap, UserSID: root@builtin IsClusterAdministrator: 1 2025-09-25T16:20:40.472990Z node 59 :TX_PROXY DEBUG: schemereq.cpp:1728: Actor# [59:7554062529464863917:2593] txid# 281474976715662 TEvNavigateKeySet requested from SchemeCache 2025-09-25T16:20:40.473137Z node 59 :TX_PROXY DEBUG: schemereq.cpp:1561: Actor# [59:7554062529464863917:2593] txid# 281474976715662 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2025-09-25T16:20:40.473193Z node 59 :TX_PROXY DEBUG: schemereq.cpp:1608: Actor# [59:7554062529464863917:2593] HANDLE EvNavigateKeySetResult, txid# 281474976715662 shardToRequest# 72057594046644480 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 1] DomainInfo.Params# Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 RedirectRequired# true 2025-09-25T16:20:40.473209Z node 59 :TX_PROXY DEBUG: schemereq.cpp:103: Actor# [59:7554062529464863917:2593] txid# 281474976715662 SEND to# 72057594046644480 shardToRequest {TEvModifySchemeTransaction txid# 281474976715662 TabletId# 72057594046644480} 2025-09-25T16:20:40.473263Z node 59 :TX_PROXY DEBUG: schemereq.cpp:1463: Actor# [59:7554062529464863917:2593] txid# 281474976715662 HANDLE EvClientConnected 2025-09-25T16:20:40.473420Z node 59 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_modify_acl.cpp:101) 2025-09-25T16:20:40.477772Z node 59 :TX_PROXY DEBUG: schemereq.cpp:1485: Actor# [59:7554062529464863917:2593] txid# 281474976715662 Status StatusSuccess HANDLE {TEvModifySchemeTransactionResult Status# StatusSuccess txid# 281474976715662} 2025-09-25T16:20:40.477797Z node 59 :TX_PROXY DEBUG: schemereq.cpp:593: Actor# [59:7554062529464863917:2593] txid# 281474976715662 SEND to# [59:7554062529464863916:2332] Source {TEvProposeTransactionStatus txid# 281474976715662 Status# 48} 2025-09-25T16:20:40.485385Z node 59 :TX_PROXY DEBUG: proxy_impl.cpp:314: actor# [59:7554062525169895823:2144] Handle TEvProposeTransaction 2025-09-25T16:20:40.485405Z node 59 :TX_PROXY DEBUG: proxy_impl.cpp:237: actor# [59:7554062525169895823:2144] TxId# 281474976715663 ProcessProposeTransaction 2025-09-25T16:20:40.485425Z node 59 :TX_PROXY DEBUG: proxy_impl.cpp:256: actor# [59:7554062525169895823:2144] Cookie# 0 userReqId# "" txid# 281474976715663 SEND to# [59:7554062529464863949:2611] 2025-09-25T16:20:40.486279Z node 59 :TX_PROXY DEBUG: schemereq.cpp:1673: Actor# [59:7554062529464863949:2611] txid# 281474976715663 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/dc-1" OperationType: ESchemeOpAlterLogin AlterLogin { CreateUser { User: "targetuser" Password: "passwd" CanLogin: true IsHashedPassword: false } } } } UserToken: "\n\014root@builtin\022\030\022\026\n\024all-users@well-known\032\014root@builtin\"\007Builtin*\017**** (B6C6F477)0\000" DatabaseName: "/dc-1" RequestType: "" PeerName: "ipv4:127.0.0.1:53426" 2025-09-25T16:20:40.486304Z node 59 :TX_PROXY DEBUG: schemereq.cpp:613: Actor# [59:7554062529464863949:2611] txid# 281474976715663 Bootstrap, UserSID: root@builtin CheckAdministrator: 1 CheckDatabaseAdministrator: 1 2025-09-25T16:20:40.486309Z node 59 :TX_PROXY DEBUG: schemereq.cpp:622: Actor# [59:7554062529464863949:2611] txid# 281474976715663 Bootstrap, UserSID: root@builtin IsClusterAdministrator: 1 2025-09-25T16:20:40.486325Z node 59 :TX_PROXY DEBUG: schemereq.cpp:1728: Actor# [59:7554062529464863949:2611] txid# 281474976715663 TEvNavigateKeySet requested from SchemeCache 2025-09-25T16:20:40.486448Z node 59 :TX_PROXY DEBUG: schemereq.cpp:1561: Actor# [59:7554062529464863949:2611] txid# 281474976715663 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2025-09-25T16:20:40.486486Z node 59 :TX_PROXY DEBUG: schemereq.cpp:1608: Actor# [59:7554062529464863949:2611] HANDLE EvNavigateKeySetResult, txid# 281474976715663 shardToRequest# 72057594046644480 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 1] DomainInfo.Params# Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 RedirectRequired# true 2025-09-25T16:20:40.486507Z node 59 :TX_PROXY DEBUG: schemereq.cpp:103: Actor# [59:7554062529464863949:2611] txid# 281474976715663 SEND to# 72057594046644480 shardToRequest {TEvModifySchemeTransaction txid# 281474976715663 TabletId# 72057594046644480} 2025-09-25T16:20:40.486561Z node 59 :TX_PROXY DEBUG: schemereq.cpp:1463: Actor# [59:7554062529464863949:2611] txid# 281474976715663 HANDLE EvClientConnected 2025-09-25T16:20:40.489774Z node 59 :TX_PROXY DEBUG: schemereq.cpp:1485: Actor# [59:7554062529464863949:2611] txid# 281474976715663 Status StatusSuccess HANDLE {TEvModifySchemeTransactionResult Status# StatusSuccess txid# 281474976715663} 2025-09-25T16:20:40.489795Z node 59 :TX_PROXY DEBUG: schemereq.cpp:593: Actor# [59:7554062529464863949:2611] txid# 281474976715663 SEND to# [59:7554062529464863948:2334] Source {TEvProposeTransactionStatus txid# 281474976715663 Status# 48} 2025-09-25T16:20:40.500513Z node 59 :TX_PROXY DEBUG: proxy_impl.cpp:314: actor# [59:7554062525169895823:2144] Handle TEvProposeTransaction 2025-09-25T16:20:40.500529Z node 59 :TX_PROXY DEBUG: proxy_impl.cpp:237: actor# [59:7554062525169895823:2144] TxId# 281474976715664 ProcessProposeTransaction 2025-09-25T16:20:40.500554Z node 59 :TX_PROXY DEBUG: proxy_impl.cpp:256: actor# [59:7554062525169895823:2144] Cookie# 0 userReqId# "" txid# 281474976715664 SEND to# [59:7554062529464863976:2623] 2025-09-25T16:20:40.501466Z node 59 :TX_PROXY DEBUG: schemereq.cpp:1673: Actor# [59:7554062529464863976:2623] txid# 281474976715664 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/dc-1" OperationType: ESchemeOpAlterLogin AlterLogin { ModifyUser { User: "targetuser" Password: "passwd" IsHashedPassword: false } } } } UserToken: "\n\014ordinaryuser\022\030\022\026\n\024all-users@well-known\032\334\003eyJhbGciOiJQUzI1NiIsImtpZCI6IjEifQ.eyJhdWQiOlsiXC9kYy0xIl0sImV4cCI6MTc1ODg2MDQ0MCwiaWF0IjoxNzU4ODE3MjQwLCJzdWIiOiJvcmRpbmFyeXVzZXIifQ.XLpHgldq-6duTxvMxxtgnmJwBGzknDgvtN-815JIuaHwdeVVAdYXJ9v8YEydpy_ao80GhPQGwFyLssVr74FJFoPgaqMJT7nAxSeXNV3FDtwfI15tx9aJnQogIWaLDslr-Urb7T15bmP7Utn3xu5_oqVBbSHlrHJIEBGzVCt8jGZ3-0jcx-ZdxFg2ra8_PV1d03y0amSVvp4MuG01bRzcZbYDkhtBAb4tdMGBbmL5thKCUyWzH-u_2q0-PuSA-suBUBaQyQlWKFO4Xb47M6VtJNdtKdi0PKCGK8v2prjctAv0g3eHDydtsXr1hl1WJW02stHTmArrLTUe64qMCIjm7w\"\005Login*\210\001eyJhbGciOiJQUzI1NiIsImtpZCI6IjEifQ.eyJhdWQiOlsiXC9kYy0xIl0sImV4cCI6MTc1ODg2MDQ0MCwiaWF0IjoxNzU4ODE3MjQwLCJzdWIiOiJvcmRpbmFyeXVzZXIifQ.**0\000" DatabaseName: "/dc-1" RequestType: "" PeerName: "ipv4:127.0.0.1:53426" 2025-09-25T16:20:40.501500Z node 59 :TX_PROXY DEBUG: schemereq.cpp:613: Actor# [59:7554062529464863976:2623] txid# 281474976715664 Bootstrap, UserSID: ordinaryuser CheckAdministrator: 1 CheckDatabaseAdministrator: 1 2025-09-25T16:20:40.501505Z node 59 :TX_PROXY DEBUG: schemereq.cpp:622: Actor# [59:7554062529464863976:2623] txid# 281474976715664 Bootstrap, UserSID: ordinaryuser IsClusterAdministrator: 0 2025-09-25T16:20:40.501586Z node 59 :TX_PROXY DEBUG: schemereq.cpp:1512: Actor# [59:7554062529464863976:2623] txid# 281474976715664 HandleResolveDatabase, ResultSet size: 1 ResultSet error count: 0 2025-09-25T16:20:40.501606Z node 59 :TX_PROXY DEBUG: schemereq.cpp:1547: Actor# [59:7554062529464863976:2623] txid# 281474976715664 HandleResolveDatabase, UserSID: ordinaryuser CheckAdministrator: 1 CheckDatabaseAdministrator: 1 IsClusterAdministrator: 0 IsDatabaseAdministrator: 0 DatabaseOwner: root@builtin 2025-09-25T16:20:40.501621Z node 59 :TX_PROXY DEBUG: schemereq.cpp:1728: Actor# [59:7554062529464863976:2623] txid# 281474976715664 TEvNavigateKeySet requested from SchemeCache 2025-09-25T16:20:40.501720Z node 59 :TX_PROXY DEBUG: schemereq.cpp:1561: Actor# [59:7554062529464863976:2623] txid# 281474976715664 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2025-09-25T16:20:40.501732Z node 59 :TX_PROXY ERROR: schemereq.cpp:1184: Actor# [59:7554062529464863976:2623] txid# 281474976715664, Access denied for ordinaryuser, attempt to manage user 2025-09-25T16:20:40.501761Z node 59 :TX_PROXY ERROR: schemereq.cpp:590: Actor# [59:7554062529464863976:2623] txid# 281474976715664, issues: { message: "Access denied for ordinaryuser" issue_code: 200000 severity: 1 } 2025-09-25T16:20:40.501781Z node 59 :TX_PROXY DEBUG: schemereq.cpp:593: Actor# [59:7554062529464863976:2623] txid# 281474976715664 SEND to# [59:7554062529464863975:2345] Source {TEvProposeTransactionStatus Status# 5} 2025-09-25T16:20:40.501890Z node 59 :KQP_SESSION WARN: kqp_session_actor.cpp:2830: SessionId: ydb://session/3?node_id=59&id=NDdjYzc2MGQtNzhiODU5NDQtNWY2OWNmYmUtYTJmMmQ2NDY=, ActorId: [59:7554062529464863966:2345], ActorState: ExecuteState, TraceId: 01k60tthdhfnarjjzdrhebrb2z, Create QueryResponse for error on request, msg: 2025-09-25T16:20:40.501982Z node 59 :TX_PROXY DEBUG: proxy_impl.cpp:352: actor# [59:7554062525169895823:2144] Handle TEvExecuteKqpTransaction 2025-09-25T16:20:40.501991Z node 59 :TX_PROXY DEBUG: proxy_impl.cpp:341: actor# [59:7554062525169895823:2144] TxId# 281474976715665 ProcessProposeKqpTransaction >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-DropUser-70 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-DropUser-71 >> TSchemeShardViewTest::CreateView >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-DropUser-72 [GOOD] >> TSchemeShardViewTest::DropView [GOOD] >> TestShred::ManualLaunch3CyclesWithNotConsistentCountersInSchemeShardAndBSC [GOOD] >> TSchemeShardViewTest::AsyncDropSameView >> TSchemeShardViewTest::AsyncCreateDifferentViews [GOOD] >> SchemeReqAdminAccessInTenant::ClusterAdminCanAuthOnEmptyTenant-StrictAclCheck [GOOD] >> SchemeReqAdminAccessInTenant::ClusterAdminCanAuthOnEmptyTenant-DomainLoginOnly >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-CreateUser-23 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-CreateUser-24 >> TColumnShardTestReadWrite::CompactionSplitGranule_PKInt32 >> TColumnShardTestReadWrite::CompactionInGranule_PKTimestamp_Reboot >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-DropUser-72 [GOOD] >> TSchemeShardViewTest::AsyncDropSameView [GOOD] >> TSchemeShardViewTest::CreateView [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_view/unittest >> TSchemeShardViewTest::EmptyQueryText [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2144] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2144] Leader for TabletID 72057594046678944 is [1:130:2155] sender: [1:131:2058] recipient: [1:113:2144] 2025-09-25T16:20:41.875402Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7911: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-09-25T16:20:41.875429Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7939: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-09-25T16:20:41.875436Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7825: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-09-25T16:20:41.875441Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7841: OperationsProcessing config: using default configuration 2025-09-25T16:20:41.875448Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7847: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-09-25T16:20:41.875452Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7847: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-09-25T16:20:41.875461Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7971: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-09-25T16:20:41.875474Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-09-25T16:20:41.875600Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8042: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-09-25T16:20:41.875680Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2025-09-25T16:20:41.887481Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7729: Cannot subscribe to console configs 2025-09-25T16:20:41.887503Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded 2025-09-25T16:20:41.890954Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2025-09-25T16:20:41.891054Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2025-09-25T16:20:41.891099Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-09-25T16:20:41.892944Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2025-09-25T16:20:41.893017Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2025-09-25T16:20:41.893141Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1382: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-09-25T16:20:41.893199Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-09-25T16:20:41.893624Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:158: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-09-25T16:20:41.893662Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2025-09-25T16:20:41.893886Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-09-25T16:20:41.893893Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-09-25T16:20:41.893908Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-09-25T16:20:41.893913Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-09-25T16:20:41.893917Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:205: TTxServerlessStorageBilling.Complete 2025-09-25T16:20:41.893941Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7086: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-09-25T16:20:41.895037Z node 1 :HIVE INFO: tablet_helpers.cpp:1126: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:130:2155] sender: [1:245:2058] recipient: [1:15:2062] 2025-09-25T16:20:41.912540Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-09-25T16:20:41.912624Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-09-25T16:20:41.912686Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:599: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-09-25T16:20:41.912694Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5528: CreateTx for txid 1:0 type: TxAlterSubDomain target path: [OwnerId: 72057594046678944, LocalPathId: 1] source path: 2025-09-25T16:20:41.912757Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-09-25T16:20:41.912772Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2025-09-25T16:20:41.913548Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-09-25T16:20:41.913586Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-09-25T16:20:41.913631Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-09-25T16:20:41.913639Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:315: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-09-25T16:20:41.913644Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:369: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-09-25T16:20:41.913648Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 1:0 2 -> 3 2025-09-25T16:20:41.914042Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-09-25T16:20:41.914052Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-09-25T16:20:41.914058Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 1:0 3 -> 128 2025-09-25T16:20:41.914352Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-09-25T16:20:41.914360Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-09-25T16:20:41.914364Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-09-25T16:20:41.914369Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1723: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-09-25T16:20:41.914948Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1792: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-09-25T16:20:41.915349Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:663: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-09-25T16:20:41.915383Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1824: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-09-25T16:20:41.915543Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:685: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-09-25T16:20:41.915563Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:689: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 137 RawX2: 4294969455 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-09-25T16:20:41.915568Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:303: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-09-25T16:20:41.915637Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 1:0 128 -> 240 2025-09-25T16:20:41.915643Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:362: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-09-25T16:20:41.915667Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:599: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-09-25T16:20:41.915677Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:412: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-09-25T16:20:41.916089Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-09-25T16:20:41.916096Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme ... ctor# [1:275:2265] Become StateWork (SchemeCache [1:280:2270]) 2025-09-25T16:20:41.918370Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpCreateView CreateView { Name: "MyView" QueryText: "" } } TxId: 101 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-09-25T16:20:41.918419Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_create_view.cpp:118: [72057594046678944] TCreateView Propose, path: /MyRoot/MyView, opId: 101:0 2025-09-25T16:20:41.918428Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_create_view.cpp:124: [72057594046678944] TCreateView Propose, path: /MyRoot/MyView, opId: 101:0, viewDescription: Name: "MyView" QueryText: "" 2025-09-25T16:20:41.918453Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:441: AttachChild: child attached as only one child to the parent, parent id: [OwnerId: 72057594046678944, LocalPathId: 1], parent name: MyRoot, child name: MyView, child id: [OwnerId: 72057594046678944, LocalPathId: 2], at schemeshard: 72057594046678944 2025-09-25T16:20:41.918469Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:599: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 0 2025-09-25T16:20:41.918475Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5528: CreateTx for txid 101:0 type: TxCreateView target path: [OwnerId: 72057594046678944, LocalPathId: 2] source path: 2025-09-25T16:20:41.918481Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 101:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-09-25T16:20:41.918697Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:212: actor# [1:275:2265] HANDLE TEvClientConnected success connect from tablet# 72057594046447617 2025-09-25T16:20:41.919317Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 101, response: Status: StatusAccepted TxId: 101 SchemeshardId: 72057594046678944 PathId: 2, at schemeshard: 72057594046678944 2025-09-25T16:20:41.919362Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 101, database: /MyRoot, subject: , status: StatusAccepted, operation: CREATE VIEW, path: /MyRoot/MyView 2025-09-25T16:20:41.919395Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 101:0, at schemeshard: 72057594046678944 2025-09-25T16:20:41.919402Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_create_view.cpp:30: [72057594046678944] TCreateView::TPropose, opId: 101:0 ProgressState 2025-09-25T16:20:41.919410Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1723: TOperation IsReadyToPropose , TxId: 101 ready parts: 1/1 2025-09-25T16:20:41.919433Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1792: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 101 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-09-25T16:20:41.919515Z node 1 :TX_PROXY DEBUG: client.cpp:89: Handle TEvAllocateResult ACCEPTED RangeBegin# 281474976715656 RangeEnd# 281474976720656 txAllocator# 72057594046447617 2025-09-25T16:20:41.919878Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:663: Send tablet strongly msg operationId: 101:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:101 msg type: 269090816 2025-09-25T16:20:41.919908Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1824: TOperation RegisterRelationByTabletId, TxId: 101, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 101 at step: 5000002 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000001 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 101 at step: 5000002 2025-09-25T16:20:41.919978Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:685: TTxOperationPlanStep Execute, stepId: 5000002, transactions count in step: 1, at schemeshard: 72057594046678944 2025-09-25T16:20:41.919998Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:689: TTxOperationPlanStep Execute, message: Transactions { TxId: 101 Coordinator: 72057594046316545 AckTo { RawX1: 137 RawX2: 4294969455 } } Step: 5000002 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-09-25T16:20:41.920006Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_create_view.cpp:45: [72057594046678944] TCreateView::TPropose, opId: 101:0 HandleReply TEvPrivate::TEvOperationPlan, step: 5000002 2025-09-25T16:20:41.920031Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 101:0 128 -> 240 2025-09-25T16:20:41.920056Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:599: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-09-25T16:20:41.920065Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:599: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 1 FAKE_COORDINATOR: Erasing txId 101 2025-09-25T16:20:41.920439Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-09-25T16:20:41.920448Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 101, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-09-25T16:20:41.920486Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 101, path id: [OwnerId: 72057594046678944, LocalPathId: 2] 2025-09-25T16:20:41.920503Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-09-25T16:20:41.920508Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [1:212:2213], at schemeshard: 72057594046678944, txId: 101, path id: 1 2025-09-25T16:20:41.920513Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [1:212:2213], at schemeshard: 72057594046678944, txId: 101, path id: 2 2025-09-25T16:20:41.920567Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 101:0, at schemeshard: 72057594046678944 2025-09-25T16:20:41.920573Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:497: [72057594046678944] TDone opId# 101:0 ProgressState 2025-09-25T16:20:41.920585Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:926: Part operation is done id#101:0 progress is 1/1 2025-09-25T16:20:41.920589Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2025-09-25T16:20:41.920594Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:926: Part operation is done id#101:0 progress is 1/1 2025-09-25T16:20:41.920597Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2025-09-25T16:20:41.920601Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 101, ready parts: 1/1, is published: false 2025-09-25T16:20:41.920609Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2025-09-25T16:20:41.920614Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:993: Operation and all the parts is done, operation id: 101:0 2025-09-25T16:20:41.920619Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5552: RemoveTx for txid 101:0 2025-09-25T16:20:41.920629Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:610: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 2 2025-09-25T16:20:41.920636Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:1002: Publication still in progress, tx: 101, publications: 2, subscribers: 0 2025-09-25T16:20:41.920640Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1009: Publication details: tx: 101, [OwnerId: 72057594046678944, LocalPathId: 1], 4 2025-09-25T16:20:41.920643Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1009: Publication details: tx: 101, [OwnerId: 72057594046678944, LocalPathId: 2], 2 2025-09-25T16:20:41.920771Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6249: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 4 PathOwnerId: 72057594046678944, cookie: 101 2025-09-25T16:20:41.920782Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 4 PathOwnerId: 72057594046678944, cookie: 101 2025-09-25T16:20:41.920787Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 101 2025-09-25T16:20:41.920792Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 101, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 4 2025-09-25T16:20:41.920796Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:610: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-09-25T16:20:41.920948Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6249: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 2 PathOwnerId: 72057594046678944, cookie: 101 2025-09-25T16:20:41.920961Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 2 PathOwnerId: 72057594046678944, cookie: 101 2025-09-25T16:20:41.920964Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 101 2025-09-25T16:20:41.920968Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 101, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 2 2025-09-25T16:20:41.920972Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:610: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 1 2025-09-25T16:20:41.920981Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__publish_to_scheme_board.cpp:155: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 101, subscribers: 0 2025-09-25T16:20:41.921789Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 2025-09-25T16:20:41.922008Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 TestModificationResult got TxId: 101, wait until txId: 101 ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_view/unittest >> TSchemeShardViewTest::AsyncCreateDifferentViews [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2144] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2144] Leader for TabletID 72057594046678944 is [1:130:2155] sender: [1:131:2058] recipient: [1:113:2144] 2025-09-25T16:20:42.003206Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7911: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-09-25T16:20:42.003246Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7939: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-09-25T16:20:42.003252Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7825: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-09-25T16:20:42.003258Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7841: OperationsProcessing config: using default configuration 2025-09-25T16:20:42.003265Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7847: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-09-25T16:20:42.003270Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7847: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-09-25T16:20:42.003279Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7971: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-09-25T16:20:42.003292Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-09-25T16:20:42.003425Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8042: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-09-25T16:20:42.003502Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2025-09-25T16:20:42.020173Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7729: Cannot subscribe to console configs 2025-09-25T16:20:42.020200Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded 2025-09-25T16:20:42.023848Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2025-09-25T16:20:42.023921Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2025-09-25T16:20:42.023964Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-09-25T16:20:42.025911Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2025-09-25T16:20:42.025984Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2025-09-25T16:20:42.026098Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1382: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-09-25T16:20:42.026162Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-09-25T16:20:42.026654Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:158: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-09-25T16:20:42.026696Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2025-09-25T16:20:42.027015Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-09-25T16:20:42.027027Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-09-25T16:20:42.027048Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-09-25T16:20:42.027058Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-09-25T16:20:42.027067Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:205: TTxServerlessStorageBilling.Complete 2025-09-25T16:20:42.027103Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7086: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-09-25T16:20:42.028581Z node 1 :HIVE INFO: tablet_helpers.cpp:1126: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:130:2155] sender: [1:245:2058] recipient: [1:15:2062] 2025-09-25T16:20:42.050815Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-09-25T16:20:42.050939Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-09-25T16:20:42.051010Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:599: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-09-25T16:20:42.051019Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5528: CreateTx for txid 1:0 type: TxAlterSubDomain target path: [OwnerId: 72057594046678944, LocalPathId: 1] source path: 2025-09-25T16:20:42.051103Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-09-25T16:20:42.051119Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2025-09-25T16:20:42.052213Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-09-25T16:20:42.052270Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-09-25T16:20:42.052337Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-09-25T16:20:42.052348Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:315: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-09-25T16:20:42.052355Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:369: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-09-25T16:20:42.052360Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 1:0 2 -> 3 2025-09-25T16:20:42.052860Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-09-25T16:20:42.052874Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-09-25T16:20:42.052881Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 1:0 3 -> 128 2025-09-25T16:20:42.053310Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-09-25T16:20:42.053325Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-09-25T16:20:42.053335Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-09-25T16:20:42.053343Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1723: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-09-25T16:20:42.054071Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1792: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-09-25T16:20:42.054483Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:663: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-09-25T16:20:42.054543Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1824: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-09-25T16:20:42.054775Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:685: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-09-25T16:20:42.054802Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:689: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 137 RawX2: 4294969455 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-09-25T16:20:42.054810Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:303: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-09-25T16:20:42.054879Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 1:0 128 -> 240 2025-09-25T16:20:42.054887Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:362: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-09-25T16:20:42.054919Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:599: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-09-25T16:20:42.054931Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:412: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-09-25T16:20:42.055395Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-09-25T16:20:42.055404Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme ... T_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 2 PathOwnerId: 72057594046678944, cookie: 102 2025-09-25T16:20:42.067844Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 102 2025-09-25T16:20:42.067848Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 102, pathId: [OwnerId: 72057594046678944, LocalPathId: 3], version: 2 2025-09-25T16:20:42.067853Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:610: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 1 2025-09-25T16:20:42.067861Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__publish_to_scheme_board.cpp:155: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 102, subscribers: 0 2025-09-25T16:20:42.068432Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 2025-09-25T16:20:42.068450Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 TestModificationResult got TxId: 101, wait until txId: 101 TestModificationResults wait txId: 102 TestModificationResult got TxId: 102, wait until txId: 102 TestModificationResults wait txId: 103 TestModificationResult got TxId: 103, wait until txId: 103 TestWaitNotification wait txId: 101 2025-09-25T16:20:42.068518Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:213: tests -- TTxNotificationSubscriber for txId 101: send EvNotifyTxCompletion 2025-09-25T16:20:42.068526Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:259: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 101 TestWaitNotification wait txId: 102 2025-09-25T16:20:42.068543Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:213: tests -- TTxNotificationSubscriber for txId 102: send EvNotifyTxCompletion 2025-09-25T16:20:42.068547Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:259: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 102 TestWaitNotification wait txId: 103 2025-09-25T16:20:42.068556Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:213: tests -- TTxNotificationSubscriber for txId 103: send EvNotifyTxCompletion 2025-09-25T16:20:42.068560Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:259: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 103 2025-09-25T16:20:42.068648Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 102, at schemeshard: 72057594046678944 2025-09-25T16:20:42.068665Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 102: got EvNotifyTxCompletionResult 2025-09-25T16:20:42.068670Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 102: satisfy waiter [1:336:2326] 2025-09-25T16:20:42.068720Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 103, at schemeshard: 72057594046678944 2025-09-25T16:20:42.068731Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 103: got EvNotifyTxCompletionResult 2025-09-25T16:20:42.068734Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 103: satisfy waiter [1:336:2326] 2025-09-25T16:20:42.068749Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 101, at schemeshard: 72057594046678944 2025-09-25T16:20:42.068759Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 101: got EvNotifyTxCompletionResult 2025-09-25T16:20:42.068763Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 101: satisfy waiter [1:336:2326] TestWaitNotification: OK eventTxId 102 TestWaitNotification: OK eventTxId 103 TestWaitNotification: OK eventTxId 101 2025-09-25T16:20:42.068856Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/SomeDir" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-09-25T16:20:42.068885Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/SomeDir" took 59us result status StatusSuccess 2025-09-25T16:20:42.069003Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/SomeDir" PathDescription { Self { Name: "SomeDir" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 101 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 } ChildrenExist: true } Children { Name: "FirstView" PathId: 3 SchemeshardId: 72057594046678944 PathType: EPathTypeView CreateFinished: true CreateTxId: 102 CreateStep: 5000004 ParentPathId: 2 PathState: EPathStateCreate Owner: "root@builtin" ACL: "" ChildrenExist: false } Children { Name: "SecondView" PathId: 4 SchemeshardId: 72057594046678944 PathType: EPathTypeView CreateFinished: true CreateTxId: 103 CreateStep: 5000003 ParentPathId: 2 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 3 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 2 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-09-25T16:20:42.069071Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/SomeDir/FirstView" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-09-25T16:20:42.069090Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/SomeDir/FirstView" took 21us result status StatusSuccess 2025-09-25T16:20:42.069148Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/SomeDir/FirstView" PathDescription { Self { Name: "FirstView" PathId: 3 SchemeshardId: 72057594046678944 PathType: EPathTypeView CreateFinished: true CreateTxId: 102 CreateStep: 5000004 ParentPathId: 2 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 ViewVersion: 1 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 3 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } ViewDescription { Name: "FirstView" PathId { OwnerId: 72057594046678944 LocalId: 3 } Version: 1 QueryText: "First query" CapturedContext { } } } PathId: 3 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-09-25T16:20:42.069201Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/SomeDir/SecondView" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-09-25T16:20:42.069214Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/SomeDir/SecondView" took 15us result status StatusSuccess 2025-09-25T16:20:42.069254Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/SomeDir/SecondView" PathDescription { Self { Name: "SecondView" PathId: 4 SchemeshardId: 72057594046678944 PathType: EPathTypeView CreateFinished: true CreateTxId: 103 CreateStep: 5000003 ParentPathId: 2 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 ViewVersion: 1 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 3 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } ViewDescription { Name: "SecondView" PathId { OwnerId: 72057594046678944 LocalId: 4 } Version: 1 QueryText: "Second query" CapturedContext { } } } PathId: 4 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 >> TSchemeShardViewTest::ReadOnlyMode [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_view/unittest >> TSchemeShardViewTest::DropView [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2144] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2144] Leader for TabletID 72057594046678944 is [1:130:2155] sender: [1:131:2058] recipient: [1:113:2144] 2025-09-25T16:20:41.955201Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7911: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-09-25T16:20:41.955230Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7939: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-09-25T16:20:41.955236Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7825: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-09-25T16:20:41.955241Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7841: OperationsProcessing config: using default configuration 2025-09-25T16:20:41.955248Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7847: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-09-25T16:20:41.955252Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7847: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-09-25T16:20:41.955261Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7971: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-09-25T16:20:41.955273Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-09-25T16:20:41.955391Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8042: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-09-25T16:20:41.955454Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2025-09-25T16:20:41.972408Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7729: Cannot subscribe to console configs 2025-09-25T16:20:41.972432Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded 2025-09-25T16:20:41.982607Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2025-09-25T16:20:41.982806Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2025-09-25T16:20:41.982847Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-09-25T16:20:41.984786Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2025-09-25T16:20:41.984874Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2025-09-25T16:20:41.984998Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1382: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-09-25T16:20:41.985062Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-09-25T16:20:41.985585Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:158: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-09-25T16:20:41.985626Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2025-09-25T16:20:41.985925Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-09-25T16:20:41.985936Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-09-25T16:20:41.985958Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-09-25T16:20:41.985967Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-09-25T16:20:41.985972Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:205: TTxServerlessStorageBilling.Complete 2025-09-25T16:20:41.986001Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7086: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-09-25T16:20:41.987229Z node 1 :HIVE INFO: tablet_helpers.cpp:1126: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:130:2155] sender: [1:245:2058] recipient: [1:15:2062] 2025-09-25T16:20:42.008281Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-09-25T16:20:42.008366Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-09-25T16:20:42.008422Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:599: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-09-25T16:20:42.008428Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5528: CreateTx for txid 1:0 type: TxAlterSubDomain target path: [OwnerId: 72057594046678944, LocalPathId: 1] source path: 2025-09-25T16:20:42.008483Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-09-25T16:20:42.008494Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2025-09-25T16:20:42.011655Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-09-25T16:20:42.011714Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-09-25T16:20:42.011769Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-09-25T16:20:42.011779Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:315: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-09-25T16:20:42.011783Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:369: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-09-25T16:20:42.011787Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 1:0 2 -> 3 2025-09-25T16:20:42.012305Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-09-25T16:20:42.012315Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-09-25T16:20:42.012322Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 1:0 3 -> 128 2025-09-25T16:20:42.012625Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-09-25T16:20:42.012633Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-09-25T16:20:42.012637Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-09-25T16:20:42.012642Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1723: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-09-25T16:20:42.013382Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1792: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-09-25T16:20:42.013870Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:663: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-09-25T16:20:42.013930Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1824: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-09-25T16:20:42.014095Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:685: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-09-25T16:20:42.014116Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:689: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 137 RawX2: 4294969455 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-09-25T16:20:42.014122Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:303: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-09-25T16:20:42.014176Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 1:0 128 -> 240 2025-09-25T16:20:42.014181Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:362: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-09-25T16:20:42.014206Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:599: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-09-25T16:20:42.014214Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:412: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-09-25T16:20:42.014598Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-09-25T16:20:42.014605Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme ... 7594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 102 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-09-25T16:20:42.030920Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:663: Send tablet strongly msg operationId: 102:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:102 msg type: 269090816 2025-09-25T16:20:42.030958Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1824: TOperation RegisterRelationByTabletId, TxId: 102, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 102 at step: 5000003 FAKE_COORDINATOR: advance: minStep5000003 State->FrontStep: 5000002 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 102 at step: 5000003 2025-09-25T16:20:42.031026Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:685: TTxOperationPlanStep Execute, stepId: 5000003, transactions count in step: 1, at schemeshard: 72057594046678944 2025-09-25T16:20:42.031042Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:689: TTxOperationPlanStep Execute, message: Transactions { TxId: 102 Coordinator: 72057594046316545 AckTo { RawX1: 137 RawX2: 4294969455 } } Step: 5000003 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-09-25T16:20:42.031048Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_drop_view.cpp:43: [72057594046678944] TDropView TPropose, opId: 102:0 HandleReply TEvOperationPlan, step: 5000003 2025-09-25T16:20:42.031074Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 102:0 128 -> 240 2025-09-25T16:20:42.031103Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:599: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-09-25T16:20:42.031111Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:599: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 1 FAKE_COORDINATOR: Erasing txId 102 2025-09-25T16:20:42.031661Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-09-25T16:20:42.031668Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 102, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-09-25T16:20:42.031698Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 102, path id: [OwnerId: 72057594046678944, LocalPathId: 2] 2025-09-25T16:20:42.031721Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-09-25T16:20:42.031734Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [1:212:2213], at schemeshard: 72057594046678944, txId: 102, path id: 1 2025-09-25T16:20:42.031738Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [1:212:2213], at schemeshard: 72057594046678944, txId: 102, path id: 2 2025-09-25T16:20:42.031747Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 102:0, at schemeshard: 72057594046678944 2025-09-25T16:20:42.031752Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:497: [72057594046678944] TDone opId# 102:0 ProgressState 2025-09-25T16:20:42.031763Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:926: Part operation is done id#102:0 progress is 1/1 2025-09-25T16:20:42.031766Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2025-09-25T16:20:42.031772Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:926: Part operation is done id#102:0 progress is 1/1 2025-09-25T16:20:42.031774Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2025-09-25T16:20:42.031777Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 102, ready parts: 1/1, is published: false 2025-09-25T16:20:42.031781Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2025-09-25T16:20:42.031784Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:993: Operation and all the parts is done, operation id: 102:0 2025-09-25T16:20:42.031787Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5552: RemoveTx for txid 102:0 2025-09-25T16:20:42.031796Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:610: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 2 2025-09-25T16:20:42.031800Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:1002: Publication still in progress, tx: 102, publications: 2, subscribers: 0 2025-09-25T16:20:42.031803Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1009: Publication details: tx: 102, [OwnerId: 72057594046678944, LocalPathId: 1], 5 2025-09-25T16:20:42.031805Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1009: Publication details: tx: 102, [OwnerId: 72057594046678944, LocalPathId: 2], 18446744073709551615 2025-09-25T16:20:42.032013Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6249: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 5 PathOwnerId: 72057594046678944, cookie: 102 2025-09-25T16:20:42.032024Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 5 PathOwnerId: 72057594046678944, cookie: 102 2025-09-25T16:20:42.032028Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 102 2025-09-25T16:20:42.032032Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 102, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 5 2025-09-25T16:20:42.032035Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:610: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-09-25T16:20:42.032254Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6249: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 102 2025-09-25T16:20:42.032263Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 102 2025-09-25T16:20:42.032266Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 102 2025-09-25T16:20:42.032269Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 102, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 18446744073709551615 2025-09-25T16:20:42.032271Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:610: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 1 2025-09-25T16:20:42.032282Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__publish_to_scheme_board.cpp:155: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 102, subscribers: 0 2025-09-25T16:20:42.032325Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:123: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2025-09-25T16:20:42.032332Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:137: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 2], at schemeshard: 72057594046678944 2025-09-25T16:20:42.032343Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:610: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-09-25T16:20:42.032790Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 2025-09-25T16:20:42.033129Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 2025-09-25T16:20:42.033165Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__clean_pathes.cpp:160: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 TestModificationResult got TxId: 102, wait until txId: 102 TestWaitNotification wait txId: 102 2025-09-25T16:20:42.033217Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:213: tests -- TTxNotificationSubscriber for txId 102: send EvNotifyTxCompletion 2025-09-25T16:20:42.033224Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:259: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 102 2025-09-25T16:20:42.033300Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 102, at schemeshard: 72057594046678944 2025-09-25T16:20:42.033321Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 102: got EvNotifyTxCompletionResult 2025-09-25T16:20:42.033326Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 102: satisfy waiter [1:329:2319] TestWaitNotification: OK eventTxId 102 2025-09-25T16:20:42.033424Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/MyView" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-09-25T16:20:42.033455Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/MyView" took 43us result status StatusPathDoesNotExist 2025-09-25T16:20:42.033500Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/MyView\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot\' (id: [OwnerId: 72057594046678944, LocalPathId: 1]), source_location: ydb/core/tx/schemeshard/schemeshard_path_describer.cpp:1181" Path: "/MyRoot/MyView" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot" LastExistedPrefixPathId: 1 LastExistedPrefixDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty ChildrenExist: false } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/tx_proxy/ut_schemereq/unittest >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-CreateUser-24 [GOOD] Test command err: Starting YDB, grpc: 18102, msgbus: 3487 2025-09-25T16:20:01.221700Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7554062358130874738:2152];send_to=[0:7307199536658146131:7762515]; 2025-09-25T16:20:01.221815Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-09-25T16:20:01.226309Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:123: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown test_client.cpp: SetPath # /home/runner/.ya/build/build_root/endf/002631/r3tmp/tmpODx2Qf/pdisk_1.dat 2025-09-25T16:20:01.278636Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 18102, node 1 2025-09-25T16:20:01.321051Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-09-25T16:20:01.321064Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-09-25T16:20:01.321066Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-09-25T16:20:01.321116Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-09-25T16:20:01.322473Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-09-25T16:20:01.322495Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-09-25T16:20:01.324871Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions 2025-09-25T16:20:01.325392Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:3487 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 2025-09-25T16:20:01.341835Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:402: actor# [1:7554062358130874865:2143] Handle TEvNavigate describe path dc-1 2025-09-25T16:20:01.341860Z node 1 :TX_PROXY DEBUG: describe.cpp:270: Actor# [1:7554062358130875347:2444] HANDLE EvNavigateScheme dc-1 2025-09-25T16:20:01.342052Z node 1 :TX_PROXY DEBUG: describe.cpp:354: Actor# [1:7554062358130875347:2444] HANDLE EvNavigateKeySetResult TDescribeReq marker# P5 ErrorCount# 0 2025-09-25T16:20:01.351476Z node 1 :TX_PROXY DEBUG: describe.cpp:433: Actor# [1:7554062358130875347:2444] SEND to# 72057594046644480 shardToRequest NKikimrSchemeOp.TDescribePath Path: "dc-1" Options { ReturnBoundaries: true ShowPrivateTable: true ReturnRangeKey: true } 2025-09-25T16:20:01.354584Z node 1 :TX_PROXY DEBUG: describe.cpp:446: Actor# [1:7554062358130875347:2444] Handle TEvDescribeSchemeResult Forward to# [1:7554062358130875346:2443] Cookie: 0 TEvDescribeSchemeResult: NKikimrScheme.TEvDescribeSchemeResult PreSerializedData size# 2 Record# Status: StatusSuccess Path: "dc-1" PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } DomainKey { SchemeShard: 72057594046644480 PathId: 1 } PathsInside: 0 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046644480 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { Audience: "/dc-1" } SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 1 PathOwnerId: 72057594046644480 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-09-25T16:20:01.357586Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:314: actor# [1:7554062358130874865:2143] Handle TEvProposeTransaction 2025-09-25T16:20:01.357598Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:237: actor# [1:7554062358130874865:2143] TxId# 281474976710657 ProcessProposeTransaction 2025-09-25T16:20:01.357623Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:256: actor# [1:7554062358130874865:2143] Cookie# 0 userReqId# "" txid# 281474976710657 SEND to# [1:7554062358130875353:2449] 2025-09-25T16:20:01.371465Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1673: Actor# [1:7554062358130875353:2449] txid# 281474976710657 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "dc-1" StoragePools { Name: "" Kind: "tenant-db" } StoragePools { Name: "/dc-1:test" Kind: "test" } } } } UserToken: "\n\014root@builtin\022\030\022\026\n\024all-users@well-known\032\014root@builtin\"\007Builtin*\017**** (B6C6F477)0\000" PeerName: "" 2025-09-25T16:20:01.371511Z node 1 :TX_PROXY DEBUG: schemereq.cpp:613: Actor# [1:7554062358130875353:2449] txid# 281474976710657 Bootstrap, UserSID: root@builtin CheckAdministrator: 0 CheckDatabaseAdministrator: 0 2025-09-25T16:20:01.371516Z node 1 :TX_PROXY DEBUG: schemereq.cpp:622: Actor# [1:7554062358130875353:2449] txid# 281474976710657 Bootstrap, UserSID: root@builtin IsClusterAdministrator: 1 2025-09-25T16:20:01.371531Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1728: Actor# [1:7554062358130875353:2449] txid# 281474976710657 TEvNavigateKeySet requested from SchemeCache 2025-09-25T16:20:01.371663Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1561: Actor# [1:7554062358130875353:2449] txid# 281474976710657 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2025-09-25T16:20:01.371692Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1608: Actor# [1:7554062358130875353:2449] HANDLE EvNavigateKeySetResult, txid# 281474976710657 shardToRequest# 72057594046644480 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 1] DomainInfo.Params# Version: 0 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 RedirectRequired# false 2025-09-25T16:20:01.371705Z node 1 :TX_PROXY DEBUG: schemereq.cpp:103: Actor# [1:7554062358130875353:2449] txid# 281474976710657 SEND to# 72057594046644480 shardToRequest {TEvModifySchemeTransaction txid# 281474976710657 TabletId# 72057594046644480} 2025-09-25T16:20:01.371760Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1463: Actor# [1:7554062358130875353:2449] txid# 281474976710657 HANDLE EvClientConnected 2025-09-25T16:20:01.371997Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2025-09-25T16:20:01.372996Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1485: Actor# [1:7554062358130875353:2449] txid# 281474976710657 Status StatusAccepted HANDLE {TEvModifySchemeTransactionResult Status# StatusAccepted txid# 281474976710657} 2025-09-25T16:20:01.373017Z node 1 :TX_PROXY DEBUG: schemereq.cpp:593: Actor# [1:7554062358130875353:2449] txid# 281474976710657 SEND to# [1:7554062358130875352:2448] Source {TEvProposeTransactionStatus txid# 281474976710657 Status# 53} waiting... 2025-09-25T16:20:01.401398Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:314: actor# [1:7554062358130874865:2143] Handle TEvProposeTransaction 2025-09-25T16:20:01.401413Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:237: actor# [1:7554062358130874865:2143] TxId# 281474976710658 ProcessProposeTransaction 2025-09-25T16:20:01.401425Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:256: actor# [1:7554062358130874865:2143] Cookie# 0 userReqId# "" txid# 281474976710658 SEND to# [1:7554062358130875391:2483] 2025-09-25T16:20:01.402244Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1673: Actor# [1:7554062358130875391:2483] txid# 281474976710658 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/" OperationType: ESchemeOpModifyACL ModifyACL { Name: "dc-1" DiffACL: "\n\032\010\000\022\026\010\001\020\377\377\003\032\014root@builtin \003" } } } UserToken: "\n\014root@builtin\022\030\022\026\n\024all-users@well-known\032\014root@builtin\"\007Builtin*\017**** (B6C6F477)0\000" PeerName: "" 2025-09-25T16:20:01.402266Z node 1 :TX_PROXY DEBUG: schemereq.cpp:613: Actor# [1:7554062358130875391:2483] txid# 281474976710658 Bootstrap, UserSID: root@builtin CheckAdministrator: 0 CheckDatabaseAdministrator: 0 2025-09-25T16:20:01.402269Z node 1 :TX_PROXY DEBUG: schemereq.cpp:622: Actor# [1:7554062358130875391:2483] txid# 281474976710658 Bootstrap, UserSID: root@builtin IsClusterAdministrator: 1 2025-09-25T16:20:01.402287Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1728: Actor# [1:7554062358130875391:2483] txid# 281474976710658 TEvNavigateKeySet requested from SchemeCache 2025-09-25T16:20:01.402406Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1561: Actor# [1:7554062358130875391:2483] txid# 281474976710658 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2025-09-25T16:20:01.402442Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1608: Actor# [1:7554062358130875391:2483] HANDLE EvNavigateKeySetResult, txid# 281474976710658 shardToRequest# 72057594046644480 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 1] DomainInfo.Params# Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 RedirectRequired# true 2025-09-25T16:20:01.402460Z node 1 :TX_PROXY DEBUG: schemereq.cpp:103: Actor# [1:7554062358130875391:2483] txid# 281474976710658 SEND to# 72057594046644480 shardToRequest {TEvModifySchemeTransaction txid# 281474976710658 TabletId# 72057594046644480} 2025-09-25T16:20:01.402510Z node 1 ... dyExists txid# 281474976710660 Reason# Check failed: path: '/dc-1/.metadata/workload_manager/pools/default', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92} 2025-09-25T16:20:41.373317Z node 59 :TX_PROXY ERROR: schemereq.cpp:590: Actor# [59:7554062533899100088:2553] txid# 281474976710660, issues: { message: "Check failed: path: \'/dc-1/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-09-25T16:20:41.373326Z node 59 :TX_PROXY DEBUG: schemereq.cpp:593: Actor# [59:7554062533899100088:2553] txid# 281474976710660 SEND to# [59:7554062533899100017:2326] Source {TEvProposeTransactionStatus txid# 281474976710660 Status# 48} 2025-09-25T16:20:41.376094Z node 59 :TX_PROXY DEBUG: proxy_impl.cpp:314: actor# [59:7554062529604132061:2144] Handle TEvProposeTransaction 2025-09-25T16:20:41.376105Z node 59 :TX_PROXY DEBUG: proxy_impl.cpp:237: actor# [59:7554062529604132061:2144] TxId# 281474976710661 ProcessProposeTransaction 2025-09-25T16:20:41.376119Z node 59 :TX_PROXY DEBUG: proxy_impl.cpp:256: actor# [59:7554062529604132061:2144] Cookie# 0 userReqId# "" txid# 281474976710661 SEND to# [59:7554062533899100112:2565] 2025-09-25T16:20:41.376789Z node 59 :TX_PROXY DEBUG: schemereq.cpp:1673: Actor# [59:7554062533899100112:2565] txid# 281474976710661 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/dc-1" OperationType: ESchemeOpAlterLogin AlterLogin { CreateUser { User: "ordinaryuser" Password: "passwd" CanLogin: true IsHashedPassword: false } } } } UserToken: "\n\014root@builtin\022\030\022\026\n\024all-users@well-known\032\014root@builtin\"\007Builtin*\017**** (B6C6F477)0\000" DatabaseName: "/dc-1" RequestType: "" PeerName: "ipv6:[::1]:60500" 2025-09-25T16:20:41.376805Z node 59 :TX_PROXY DEBUG: schemereq.cpp:613: Actor# [59:7554062533899100112:2565] txid# 281474976710661 Bootstrap, UserSID: root@builtin CheckAdministrator: 1 CheckDatabaseAdministrator: 1 2025-09-25T16:20:41.376810Z node 59 :TX_PROXY DEBUG: schemereq.cpp:622: Actor# [59:7554062533899100112:2565] txid# 281474976710661 Bootstrap, UserSID: root@builtin IsClusterAdministrator: 1 2025-09-25T16:20:41.376831Z node 59 :TX_PROXY DEBUG: schemereq.cpp:1728: Actor# [59:7554062533899100112:2565] txid# 281474976710661 TEvNavigateKeySet requested from SchemeCache 2025-09-25T16:20:41.376929Z node 59 :TX_PROXY DEBUG: schemereq.cpp:1561: Actor# [59:7554062533899100112:2565] txid# 281474976710661 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2025-09-25T16:20:41.376969Z node 59 :TX_PROXY DEBUG: schemereq.cpp:1608: Actor# [59:7554062533899100112:2565] HANDLE EvNavigateKeySetResult, txid# 281474976710661 shardToRequest# 72057594046644480 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 1] DomainInfo.Params# Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 RedirectRequired# true 2025-09-25T16:20:41.376986Z node 59 :TX_PROXY DEBUG: schemereq.cpp:103: Actor# [59:7554062533899100112:2565] txid# 281474976710661 SEND to# 72057594046644480 shardToRequest {TEvModifySchemeTransaction txid# 281474976710661 TabletId# 72057594046644480} 2025-09-25T16:20:41.377020Z node 59 :TX_PROXY DEBUG: schemereq.cpp:1463: Actor# [59:7554062533899100112:2565] txid# 281474976710661 HANDLE EvClientConnected 2025-09-25T16:20:41.379865Z node 59 :TX_PROXY DEBUG: schemereq.cpp:1485: Actor# [59:7554062533899100112:2565] txid# 281474976710661 Status StatusSuccess HANDLE {TEvModifySchemeTransactionResult Status# StatusSuccess txid# 281474976710661} 2025-09-25T16:20:41.379886Z node 59 :TX_PROXY DEBUG: schemereq.cpp:593: Actor# [59:7554062533899100112:2565] txid# 281474976710661 SEND to# [59:7554062533899100111:2317] Source {TEvProposeTransactionStatus txid# 281474976710661 Status# 48} 2025-09-25T16:20:41.416392Z node 59 :TX_PROXY DEBUG: proxy_impl.cpp:314: actor# [59:7554062529604132061:2144] Handle TEvProposeTransaction 2025-09-25T16:20:41.416414Z node 59 :TX_PROXY DEBUG: proxy_impl.cpp:237: actor# [59:7554062529604132061:2144] TxId# 281474976710662 ProcessProposeTransaction 2025-09-25T16:20:41.416434Z node 59 :TX_PROXY DEBUG: proxy_impl.cpp:256: actor# [59:7554062529604132061:2144] Cookie# 0 userReqId# "" txid# 281474976710662 SEND to# [59:7554062533899100135:2582] 2025-09-25T16:20:41.417455Z node 59 :TX_PROXY DEBUG: schemereq.cpp:1673: Actor# [59:7554062533899100135:2582] txid# 281474976710662 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "" OperationType: ESchemeOpModifyACL ModifyACL { Name: "dc-1" DiffACL: "\n\022\010\001\022\016\032\014ordinaryuser\n\032\010\000\022\026\010\001\020\200\200\002\032\014ordinaryuser \000" } } } UserToken: "\n\014root@builtin\022\030\022\026\n\024all-users@well-known\032\014root@builtin\"\007Builtin*\017**** (B6C6F477)0\000" DatabaseName: "/dc-1" RequestType: "" PeerName: "ipv4:127.0.0.1:58060" 2025-09-25T16:20:41.417485Z node 59 :TX_PROXY DEBUG: schemereq.cpp:613: Actor# [59:7554062533899100135:2582] txid# 281474976710662 Bootstrap, UserSID: root@builtin CheckAdministrator: 1 CheckDatabaseAdministrator: 1 2025-09-25T16:20:41.417490Z node 59 :TX_PROXY DEBUG: schemereq.cpp:622: Actor# [59:7554062533899100135:2582] txid# 281474976710662 Bootstrap, UserSID: root@builtin IsClusterAdministrator: 1 2025-09-25T16:20:41.417508Z node 59 :TX_PROXY DEBUG: schemereq.cpp:1728: Actor# [59:7554062533899100135:2582] txid# 281474976710662 TEvNavigateKeySet requested from SchemeCache 2025-09-25T16:20:41.417705Z node 59 :TX_PROXY DEBUG: schemereq.cpp:1561: Actor# [59:7554062533899100135:2582] txid# 281474976710662 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2025-09-25T16:20:41.417801Z node 59 :TX_PROXY DEBUG: schemereq.cpp:1608: Actor# [59:7554062533899100135:2582] HANDLE EvNavigateKeySetResult, txid# 281474976710662 shardToRequest# 72057594046644480 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 1] DomainInfo.Params# Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 RedirectRequired# true 2025-09-25T16:20:41.417824Z node 59 :TX_PROXY DEBUG: schemereq.cpp:103: Actor# [59:7554062533899100135:2582] txid# 281474976710662 SEND to# 72057594046644480 shardToRequest {TEvModifySchemeTransaction txid# 281474976710662 TabletId# 72057594046644480} 2025-09-25T16:20:41.417884Z node 59 :TX_PROXY DEBUG: schemereq.cpp:1463: Actor# [59:7554062533899100135:2582] txid# 281474976710662 HANDLE EvClientConnected 2025-09-25T16:20:41.418091Z node 59 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_modify_acl.cpp:101) 2025-09-25T16:20:41.419302Z node 59 :TX_PROXY DEBUG: schemereq.cpp:1485: Actor# [59:7554062533899100135:2582] txid# 281474976710662 Status StatusSuccess HANDLE {TEvModifySchemeTransactionResult Status# StatusSuccess txid# 281474976710662} 2025-09-25T16:20:41.419328Z node 59 :TX_PROXY DEBUG: schemereq.cpp:593: Actor# [59:7554062533899100135:2582] txid# 281474976710662 SEND to# [59:7554062533899100134:2332] Source {TEvProposeTransactionStatus txid# 281474976710662 Status# 48} 2025-09-25T16:20:41.428941Z node 59 :TX_PROXY DEBUG: proxy_impl.cpp:314: actor# [59:7554062529604132061:2144] Handle TEvProposeTransaction 2025-09-25T16:20:41.428964Z node 59 :TX_PROXY DEBUG: proxy_impl.cpp:237: actor# [59:7554062529604132061:2144] TxId# 281474976710663 ProcessProposeTransaction 2025-09-25T16:20:41.428991Z node 59 :TX_PROXY DEBUG: proxy_impl.cpp:256: actor# [59:7554062529604132061:2144] Cookie# 0 userReqId# "" txid# 281474976710663 SEND to# [59:7554062533899100176:2606] 2025-09-25T16:20:41.429985Z node 59 :TX_PROXY DEBUG: schemereq.cpp:1673: Actor# [59:7554062533899100176:2606] txid# 281474976710663 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/dc-1" OperationType: ESchemeOpAlterLogin AlterLogin { CreateUser { User: "targetuser" Password: "passwd" CanLogin: true IsHashedPassword: false } } } } UserToken: "\n\014ordinaryuser\022\030\022\026\n\024all-users@well-known\032\334\003eyJhbGciOiJQUzI1NiIsImtpZCI6IjEifQ.eyJhdWQiOlsiXC9kYy0xIl0sImV4cCI6MTc1ODg2MDQ0MSwiaWF0IjoxNzU4ODE3MjQxLCJzdWIiOiJvcmRpbmFyeXVzZXIifQ.I6tox_hSZB-Y2EWc67wFg41GTmxp4MkNnjWI4k7DlPYgC3F1RZVaomuCE6LeYF54YKGBZLjYZemE3cpzuHK3iRdqGvxaHbkQcU0J42o6YwWb33QfFabbxDdwoZxy4tyYOu8Nn8H2DXiClasTDhtJJ3AOe80NdFsImhFWcO3EINOgdpYbiFmdPLg8u0uJumSLR8SS_gPMGkJUqwXqxCKv6EE26vp3NnfN0VtkcbbUrt4gDK8hsSa7G05D8HazVPX7zQnfZd8uDvuySnFfcP-DbI1zMxpfodP2gQIwtreeUY57Rc8TfiACdoMQrpkiz1_nXUk7a5MTt48HXx9YF4n1ww\"\005Login*\210\001eyJhbGciOiJQUzI1NiIsImtpZCI6IjEifQ.eyJhdWQiOlsiXC9kYy0xIl0sImV4cCI6MTc1ODg2MDQ0MSwiaWF0IjoxNzU4ODE3MjQxLCJzdWIiOiJvcmRpbmFyeXVzZXIifQ.**0\000" DatabaseName: "/dc-1" RequestType: "" PeerName: "ipv4:127.0.0.1:58060" 2025-09-25T16:20:41.430020Z node 59 :TX_PROXY DEBUG: schemereq.cpp:613: Actor# [59:7554062533899100176:2606] txid# 281474976710663 Bootstrap, UserSID: ordinaryuser CheckAdministrator: 1 CheckDatabaseAdministrator: 1 2025-09-25T16:20:41.430026Z node 59 :TX_PROXY DEBUG: schemereq.cpp:622: Actor# [59:7554062533899100176:2606] txid# 281474976710663 Bootstrap, UserSID: ordinaryuser IsClusterAdministrator: 0 2025-09-25T16:20:41.430113Z node 59 :TX_PROXY DEBUG: schemereq.cpp:1512: Actor# [59:7554062533899100176:2606] txid# 281474976710663 HandleResolveDatabase, ResultSet size: 1 ResultSet error count: 0 2025-09-25T16:20:41.430129Z node 59 :TX_PROXY DEBUG: schemereq.cpp:1547: Actor# [59:7554062533899100176:2606] txid# 281474976710663 HandleResolveDatabase, UserSID: ordinaryuser CheckAdministrator: 1 CheckDatabaseAdministrator: 1 IsClusterAdministrator: 0 IsDatabaseAdministrator: 0 DatabaseOwner: root@builtin 2025-09-25T16:20:41.430142Z node 59 :TX_PROXY DEBUG: schemereq.cpp:1728: Actor# [59:7554062533899100176:2606] txid# 281474976710663 TEvNavigateKeySet requested from SchemeCache 2025-09-25T16:20:41.430235Z node 59 :TX_PROXY DEBUG: schemereq.cpp:1561: Actor# [59:7554062533899100176:2606] txid# 281474976710663 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2025-09-25T16:20:41.430241Z node 59 :TX_PROXY ERROR: schemereq.cpp:1184: Actor# [59:7554062533899100176:2606] txid# 281474976710663, Access denied for ordinaryuser, attempt to manage user 2025-09-25T16:20:41.430272Z node 59 :TX_PROXY ERROR: schemereq.cpp:590: Actor# [59:7554062533899100176:2606] txid# 281474976710663, issues: { message: "Access denied for ordinaryuser" issue_code: 200000 severity: 1 } 2025-09-25T16:20:41.430279Z node 59 :TX_PROXY DEBUG: schemereq.cpp:593: Actor# [59:7554062533899100176:2606] txid# 281474976710663 SEND to# [59:7554062533899100175:2337] Source {TEvProposeTransactionStatus Status# 5} 2025-09-25T16:20:41.430365Z node 59 :KQP_SESSION WARN: kqp_session_actor.cpp:2830: SessionId: ydb://session/3?node_id=59&id=OWJlNjU1N2ItYmZhY2NiYTItZTdlNGQ5MjQtNzI4YTc1MzE=, ActorId: [59:7554062533899100161:2337], ActorState: ExecuteState, TraceId: 01k60ttjah08cyaxe6h7qy0755, Create QueryResponse for error on request, msg: 2025-09-25T16:20:41.430446Z node 59 :TX_PROXY DEBUG: proxy_impl.cpp:352: actor# [59:7554062529604132061:2144] Handle TEvExecuteKqpTransaction 2025-09-25T16:20:41.430456Z node 59 :TX_PROXY DEBUG: proxy_impl.cpp:341: actor# [59:7554062529604132061:2144] TxId# 281474976710664 ProcessProposeKqpTransaction ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_shred/unittest >> TestShred::ManualLaunch3CyclesWithNotConsistentCountersInSchemeShardAndBSC [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2144] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2144] Leader for TabletID 72057594046678944 is [1:130:2155] sender: [1:131:2058] recipient: [1:113:2144] 2025-09-25T16:20:32.401215Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7911: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-09-25T16:20:32.401246Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7939: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-09-25T16:20:32.401250Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7825: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-09-25T16:20:32.401255Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7841: OperationsProcessing config: using default configuration 2025-09-25T16:20:32.401260Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7847: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-09-25T16:20:32.401264Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7847: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-09-25T16:20:32.401271Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7971: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-09-25T16:20:32.401282Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-09-25T16:20:32.401383Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8042: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-09-25T16:20:32.401444Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2025-09-25T16:20:32.414515Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7729: Cannot subscribe to console configs 2025-09-25T16:20:32.414538Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded 2025-09-25T16:20:32.418249Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2025-09-25T16:20:32.418325Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2025-09-25T16:20:32.418352Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-09-25T16:20:32.419789Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2025-09-25T16:20:32.419851Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2025-09-25T16:20:32.419965Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1382: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-09-25T16:20:32.420029Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-09-25T16:20:32.420415Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:158: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-09-25T16:20:32.420455Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2025-09-25T16:20:32.420689Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-09-25T16:20:32.420696Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-09-25T16:20:32.420714Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-09-25T16:20:32.420719Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-09-25T16:20:32.420724Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:205: TTxServerlessStorageBilling.Complete 2025-09-25T16:20:32.420750Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7086: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-09-25T16:20:32.421970Z node 1 :HIVE INFO: tablet_helpers.cpp:1126: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:130:2155] sender: [1:245:2058] recipient: [1:15:2062] 2025-09-25T16:20:32.441027Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-09-25T16:20:32.441125Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-09-25T16:20:32.441181Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:599: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-09-25T16:20:32.441188Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5528: CreateTx for txid 1:0 type: TxAlterSubDomain target path: [OwnerId: 72057594046678944, LocalPathId: 1] source path: 2025-09-25T16:20:32.441255Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-09-25T16:20:32.441272Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2025-09-25T16:20:32.442057Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-09-25T16:20:32.442106Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-09-25T16:20:32.442169Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-09-25T16:20:32.442181Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:315: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-09-25T16:20:32.442187Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:369: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-09-25T16:20:32.442193Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 1:0 2 -> 3 2025-09-25T16:20:32.442645Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-09-25T16:20:32.442658Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-09-25T16:20:32.442664Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 1:0 3 -> 128 2025-09-25T16:20:32.443031Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-09-25T16:20:32.443042Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-09-25T16:20:32.443048Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-09-25T16:20:32.443057Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1723: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-09-25T16:20:32.443839Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1792: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-09-25T16:20:32.444240Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:663: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-09-25T16:20:32.444277Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1824: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-09-25T16:20:32.444453Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:685: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-09-25T16:20:32.444522Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:689: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 137 RawX2: 4294969455 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-09-25T16:20:32.444531Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:303: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-09-25T16:20:32.444594Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 1:0 128 -> 240 2025-09-25T16:20:32.444600Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:362: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-09-25T16:20:32.444630Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:599: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-09-25T16:20:32.444645Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:412: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-09-25T16:20:32.445213Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-09-25T16:20:32.445224Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme ... ecipient [2:295:2279]: NKikimrBlobStorage.TEvControllerShredResponse CurrentGeneration: 100 Completed: false Progress10k: 0 2025-09-25T16:20:40.705165Z node 2 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5446: StateWork, processing event TEvBlobStorage::TEvControllerShredResponse 2025-09-25T16:20:40.705169Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:8237: Handle TEvControllerShredResponse, at schemeshard: 72057594046678944 2025-09-25T16:20:40.705177Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__root_shred_manager.cpp:639: TTxCompleteShredBSC Execute at schemeshard: 72057594046678944 2025-09-25T16:20:40.705183Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__root_shred_manager.cpp:646: TTxCompleteShredBSC Unknown generation#100, Expected gen# 52 at schemestard: 72057594046678944 2025-09-25T16:20:40.705196Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:354: [RootShredManager] SendRequestToBSC: Generation# 101 2025-09-25T16:20:40.705237Z node 2 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5225: StateWork, received event# 268637738, Sender [2:303:2285], Recipient [2:295:2279]: NKikimrBlobStorage.TEvControllerShredResponse CurrentGeneration: 101 Completed: false Progress10k: 0 2025-09-25T16:20:40.705241Z node 2 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5446: StateWork, processing event TEvBlobStorage::TEvControllerShredResponse 2025-09-25T16:20:40.705245Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:8237: Handle TEvControllerShredResponse, at schemeshard: 72057594046678944 2025-09-25T16:20:40.705249Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__root_shred_manager.cpp:639: TTxCompleteShredBSC Execute at schemeshard: 72057594046678944 2025-09-25T16:20:40.705254Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:657: TTxCompleteShredBSC: Progress data shred in BSC 0% 2025-09-25T16:20:40.705588Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__root_shred_manager.cpp:665: TTxCompleteShredBSC Complete at schemeshard: 72057594046678944, NeedScheduleRequestToBSC# false 2025-09-25T16:20:40.705599Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__root_shred_manager.cpp:665: TTxCompleteShredBSC Complete at schemeshard: 72057594046678944, NeedScheduleRequestToBSC# true 2025-09-25T16:20:40.705608Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:348: [RootShredManager] ScheduleRequestToBSC: Interval# 1.000000s 2025-09-25T16:20:41.308600Z node 2 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5225: StateWork, received event# 271125000, Sender [0:0:0], Recipient [2:295:2279]: NKikimr::NSchemeShard::TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-09-25T16:20:41.308648Z node 2 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5233: StateWork, processing event TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-09-25T16:20:41.308670Z node 2 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5225: StateWork, received event# 271124999, Sender [2:295:2279], Recipient [2:295:2279]: NKikimr::NSchemeShard::TEvSchemeShard::TEvMeasureSelfResponseTime 2025-09-25T16:20:41.308677Z node 2 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5232: StateWork, processing event TEvSchemeShard::TEvMeasureSelfResponseTime 2025-09-25T16:20:41.318918Z node 2 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5225: StateWork, received event# 271125000, Sender [0:0:0], Recipient [2:466:2417]: NKikimr::NSchemeShard::TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-09-25T16:20:41.318951Z node 2 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5233: StateWork, processing event TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-09-25T16:20:41.318979Z node 2 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5225: StateWork, received event# 271125000, Sender [0:0:0], Recipient [2:953:2817]: NKikimr::NSchemeShard::TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-09-25T16:20:41.318984Z node 2 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5233: StateWork, processing event TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-09-25T16:20:41.318998Z node 2 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5225: StateWork, received event# 271124999, Sender [2:466:2417], Recipient [2:466:2417]: NKikimr::NSchemeShard::TEvSchemeShard::TEvMeasureSelfResponseTime 2025-09-25T16:20:41.319003Z node 2 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5232: StateWork, processing event TEvSchemeShard::TEvMeasureSelfResponseTime 2025-09-25T16:20:41.319020Z node 2 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5225: StateWork, received event# 271124999, Sender [2:953:2817], Recipient [2:953:2817]: NKikimr::NSchemeShard::TEvSchemeShard::TEvMeasureSelfResponseTime 2025-09-25T16:20:41.319023Z node 2 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5232: StateWork, processing event TEvSchemeShard::TEvMeasureSelfResponseTime 2025-09-25T16:20:41.390503Z node 2 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5225: StateWork, received event# 271125517, Sender [0:0:0], Recipient [2:295:2279]: NKikimr::NSchemeShard::TEvSchemeShard::TEvWakeupToRunShredBSC 2025-09-25T16:20:41.390547Z node 2 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5447: StateWork, processing event TEvSchemeShard::TEvWakeupToRunShredBSC 2025-09-25T16:20:41.390556Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:354: [RootShredManager] SendRequestToBSC: Generation# 101 2025-09-25T16:20:41.390674Z node 2 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5225: StateWork, received event# 268637738, Sender [2:303:2285], Recipient [2:295:2279]: NKikimrBlobStorage.TEvControllerShredResponse CurrentGeneration: 101 Completed: false Progress10k: 5000 2025-09-25T16:20:41.390683Z node 2 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5446: StateWork, processing event TEvBlobStorage::TEvControllerShredResponse 2025-09-25T16:20:41.390688Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:8237: Handle TEvControllerShredResponse, at schemeshard: 72057594046678944 2025-09-25T16:20:41.390716Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__root_shred_manager.cpp:639: TTxCompleteShredBSC Execute at schemeshard: 72057594046678944 2025-09-25T16:20:41.390728Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:657: TTxCompleteShredBSC: Progress data shred in BSC 50% 2025-09-25T16:20:41.390748Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__root_shred_manager.cpp:665: TTxCompleteShredBSC Complete at schemeshard: 72057594046678944, NeedScheduleRequestToBSC# true 2025-09-25T16:20:41.390759Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:348: [RootShredManager] ScheduleRequestToBSC: Interval# 1.000000s 2025-09-25T16:20:41.832970Z node 2 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5225: StateWork, received event# 271125000, Sender [0:0:0], Recipient [2:295:2279]: NKikimr::NSchemeShard::TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-09-25T16:20:41.833006Z node 2 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5233: StateWork, processing event TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-09-25T16:20:41.833022Z node 2 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5225: StateWork, received event# 271124999, Sender [2:295:2279], Recipient [2:295:2279]: NKikimr::NSchemeShard::TEvSchemeShard::TEvMeasureSelfResponseTime 2025-09-25T16:20:41.833027Z node 2 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5232: StateWork, processing event TEvSchemeShard::TEvMeasureSelfResponseTime 2025-09-25T16:20:41.843246Z node 2 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5225: StateWork, received event# 271125000, Sender [0:0:0], Recipient [2:466:2417]: NKikimr::NSchemeShard::TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-09-25T16:20:41.843291Z node 2 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5233: StateWork, processing event TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-09-25T16:20:41.843310Z node 2 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5225: StateWork, received event# 271125000, Sender [0:0:0], Recipient [2:953:2817]: NKikimr::NSchemeShard::TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-09-25T16:20:41.843313Z node 2 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5233: StateWork, processing event TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-09-25T16:20:41.843325Z node 2 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5225: StateWork, received event# 271124999, Sender [2:466:2417], Recipient [2:466:2417]: NKikimr::NSchemeShard::TEvSchemeShard::TEvMeasureSelfResponseTime 2025-09-25T16:20:41.843329Z node 2 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5232: StateWork, processing event TEvSchemeShard::TEvMeasureSelfResponseTime 2025-09-25T16:20:41.843349Z node 2 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5225: StateWork, received event# 271124999, Sender [2:953:2817], Recipient [2:953:2817]: NKikimr::NSchemeShard::TEvSchemeShard::TEvMeasureSelfResponseTime 2025-09-25T16:20:41.843353Z node 2 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5232: StateWork, processing event TEvSchemeShard::TEvMeasureSelfResponseTime 2025-09-25T16:20:41.914914Z node 2 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5225: StateWork, received event# 271125517, Sender [0:0:0], Recipient [2:295:2279]: NKikimr::NSchemeShard::TEvSchemeShard::TEvWakeupToRunShredBSC 2025-09-25T16:20:41.914947Z node 2 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5447: StateWork, processing event TEvSchemeShard::TEvWakeupToRunShredBSC 2025-09-25T16:20:41.914954Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:354: [RootShredManager] SendRequestToBSC: Generation# 101 2025-09-25T16:20:41.915024Z node 2 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5225: StateWork, received event# 268637738, Sender [2:303:2285], Recipient [2:295:2279]: NKikimrBlobStorage.TEvControllerShredResponse CurrentGeneration: 101 Completed: true Progress10k: 10000 2025-09-25T16:20:41.915030Z node 2 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5446: StateWork, processing event TEvBlobStorage::TEvControllerShredResponse 2025-09-25T16:20:41.915034Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:8237: Handle TEvControllerShredResponse, at schemeshard: 72057594046678944 2025-09-25T16:20:41.915057Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__root_shred_manager.cpp:639: TTxCompleteShredBSC Execute at schemeshard: 72057594046678944 2025-09-25T16:20:41.915061Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:653: TTxCompleteShredBSC: Data shred in BSC is completed 2025-09-25T16:20:41.915065Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:378: [RootShredManager] Complete: Generation# 101, duration# 2 s 2025-09-25T16:20:41.915883Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__root_shred_manager.cpp:665: TTxCompleteShredBSC Complete at schemeshard: 72057594046678944, NeedScheduleRequestToBSC# false 2025-09-25T16:20:41.916037Z node 2 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5225: StateWork, received event# 269877761, Sender [2:4054:5341], Recipient [2:295:2279]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-09-25T16:20:41.916044Z node 2 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5322: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-09-25T16:20:41.916047Z node 2 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:6142: Pipe server connected, at tablet: 72057594046678944 2025-09-25T16:20:41.916068Z node 2 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5225: StateWork, received event# 271125519, Sender [2:3210:4667], Recipient [2:295:2279]: NKikimrScheme.TEvShredInfoRequest 2025-09-25T16:20:41.916073Z node 2 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5444: StateWork, processing event TEvSchemeShard::TEvShredInfoRequest 2025-09-25T16:20:41.916076Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:8188: Handle TEvShredInfoRequest, at schemeshard: 72057594046678944 >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-DropUser-70 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-DropUser-71 ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/tx_proxy/ut_schemereq/unittest >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-DropUser-72 [GOOD] Test command err: Starting YDB, grpc: 7954, msgbus: 20852 2025-09-25T16:20:04.771540Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7554062373927725178:2152];send_to=[0:7307199536658146131:7762515]; 2025-09-25T16:20:04.771789Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/endf/002537/r3tmp/tmpE5z0q9/pdisk_1.dat 2025-09-25T16:20:04.815544Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2025-09-25T16:20:04.826377Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 7954, node 1 2025-09-25T16:20:04.846936Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-09-25T16:20:04.846962Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-09-25T16:20:04.846964Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-09-25T16:20:04.847009Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:20852 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 2025-09-25T16:20:04.865424Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:402: actor# [1:7554062373927725333:2137] Handle TEvNavigate describe path dc-1 2025-09-25T16:20:04.865448Z node 1 :TX_PROXY DEBUG: describe.cpp:270: Actor# [1:7554062373927725788:2426] HANDLE EvNavigateScheme dc-1 2025-09-25T16:20:04.865603Z node 1 :TX_PROXY DEBUG: describe.cpp:354: Actor# [1:7554062373927725788:2426] HANDLE EvNavigateKeySetResult TDescribeReq marker# P5 ErrorCount# 0 2025-09-25T16:20:04.875556Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-09-25T16:20:04.875588Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-09-25T16:20:04.876104Z node 1 :TX_PROXY DEBUG: describe.cpp:433: Actor# [1:7554062373927725788:2426] SEND to# 72057594046644480 shardToRequest NKikimrSchemeOp.TDescribePath Path: "dc-1" Options { ReturnBoundaries: true ShowPrivateTable: true ReturnRangeKey: true } 2025-09-25T16:20:04.878217Z node 1 :TX_PROXY DEBUG: describe.cpp:446: Actor# [1:7554062373927725788:2426] Handle TEvDescribeSchemeResult Forward to# [1:7554062373927725787:2425] Cookie: 0 TEvDescribeSchemeResult: NKikimrScheme.TEvDescribeSchemeResult PreSerializedData size# 2 Record# Status: StatusSuccess Path: "dc-1" PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } DomainKey { SchemeShard: 72057594046644480 PathId: 1 } PathsInside: 0 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046644480 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { Audience: "/dc-1" } SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 1 PathOwnerId: 72057594046644480 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-09-25T16:20:04.882008Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:314: actor# [1:7554062373927725333:2137] Handle TEvProposeTransaction 2025-09-25T16:20:04.882021Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:326: actor# [1:7554062373927725333:2137] Cookie# 0 userReqId# "" DELAY REQUEST, wait txids from allocator Type# Scheme 2025-09-25T16:20:04.883655Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:212: actor# [1:7554062373927725333:2137] HANDLE TEvClientConnected success connect from tablet# 72057594046447617 2025-09-25T16:20:04.885691Z node 1 :TX_PROXY DEBUG: client.cpp:89: Handle TEvAllocateResult ACCEPTED RangeBegin# 281474976715656 RangeEnd# 281474976720656 txAllocator# 72057594046447617 2025-09-25T16:20:04.885709Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:237: actor# [1:7554062373927725333:2137] TxId# 281474976715657 ProcessProposeTransaction 2025-09-25T16:20:04.885739Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:256: actor# [1:7554062373927725333:2137] Cookie# 0 userReqId# "" txid# 281474976715657 SEND to# [1:7554062373927725803:2439] 2025-09-25T16:20:04.885840Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-09-25T16:20:04.898797Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1673: Actor# [1:7554062373927725803:2439] txid# 281474976715657 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "dc-1" StoragePools { Name: "" Kind: "tenant-db" } StoragePools { Name: "/dc-1:test" Kind: "test" } } } } UserToken: "\n\014root@builtin\022\030\022\026\n\024all-users@well-known\032\014root@builtin\"\007Builtin*\017**** (B6C6F477)0\000" PeerName: "" 2025-09-25T16:20:04.898843Z node 1 :TX_PROXY DEBUG: schemereq.cpp:613: Actor# [1:7554062373927725803:2439] txid# 281474976715657 Bootstrap, UserSID: root@builtin CheckAdministrator: 0 CheckDatabaseAdministrator: 0 2025-09-25T16:20:04.898848Z node 1 :TX_PROXY DEBUG: schemereq.cpp:622: Actor# [1:7554062373927725803:2439] txid# 281474976715657 Bootstrap, UserSID: root@builtin IsClusterAdministrator: 1 2025-09-25T16:20:04.898862Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1728: Actor# [1:7554062373927725803:2439] txid# 281474976715657 TEvNavigateKeySet requested from SchemeCache 2025-09-25T16:20:04.898987Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1561: Actor# [1:7554062373927725803:2439] txid# 281474976715657 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2025-09-25T16:20:04.899012Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1608: Actor# [1:7554062373927725803:2439] HANDLE EvNavigateKeySetResult, txid# 281474976715657 shardToRequest# 72057594046644480 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 1] DomainInfo.Params# Version: 0 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 RedirectRequired# false 2025-09-25T16:20:04.899022Z node 1 :TX_PROXY DEBUG: schemereq.cpp:103: Actor# [1:7554062373927725803:2439] txid# 281474976715657 SEND to# 72057594046644480 shardToRequest {TEvModifySchemeTransaction txid# 281474976715657 TabletId# 72057594046644480} 2025-09-25T16:20:04.899060Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1463: Actor# [1:7554062373927725803:2439] txid# 281474976715657 HANDLE EvClientConnected 2025-09-25T16:20:04.899255Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2025-09-25T16:20:04.899998Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1485: Actor# [1:7554062373927725803:2439] txid# 281474976715657 Status StatusAccepted HANDLE {TEvModifySchemeTransactionResult Status# StatusAccepted txid# 281474976715657} 2025-09-25T16:20:04.900012Z node 1 :TX_PROXY DEBUG: schemereq.cpp:593: Actor# [1:7554062373927725803:2439] txid# 281474976715657 SEND to# [1:7554062373927725798:2434] Source {TEvProposeTransactionStatus txid# 281474976715657 Status# 53} waiting... 2025-09-25T16:20:04.964881Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:314: actor# [1:7554062373927725333:2137] Handle TEvProposeTransaction 2025-09-25T16:20:04.964908Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:237: actor# [1:7554062373927725333:2137] TxId# 281474976715658 ProcessProposeTransaction 2025-09-25T16:20:04.964921Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:256: actor# [1:7554062373927725333:2137] Cookie# 0 userReqId# "" txid# 281474976715658 SEND to# [1:7554062373927725843:2475] 2025-09-25T16:20:04.965624Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1673: Actor# [1:7554062373927725843:2475] txid# 281474976715658 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/" OperationType: ESchemeOpModifyACL ModifyACL { Name: "dc-1" DiffACL: "\n\032\010\000\022\026\010\001\020\377\377\003\032\014root@builtin \003" } } } UserToken: "\n\014root@builtin\022\030\022\026\n\024all-users@well-known\032\014root@builtin\"\007Builtin*\017**** (B6C6F477)0\000" PeerName: "" 2025-09-25T16:20:04.965649Z node 1 :TX_PROXY DEBUG: schemereq.cpp:613: Actor# [1:7554062373927725843:2475] txid# 281474976715658 Bootstrap, UserSID: root@builtin CheckAdministrator: 0 CheckDatabaseAdministrator: 0 2025-09-25T16:20:04.965652Z node 1 :TX_PROXY DEBUG: schemereq.cpp:622: Actor# [1:7554062373927725843:2475] txid# 281474976715658 Bootstrap, UserSID: root@builtin IsClusterAdministrator: 1 2025-09-25T16:20:04.965670Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1728: Actor# [1:7554062373927725843:2475] txid# 281474976715658 TEvNavigateKeySet requested from SchemeCache 2025-09-25T16:20:04.965785Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1561: Actor# [1:7554062373927725843:2475] txid# 281474976715658 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2025-09-25T16:20:04.965823Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1608: Actor# [1:7554062373927725843:2475] HANDLE EvNavigateKeySetResult, txid# 281474976715658 shardToRequest# 72057594046644480 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 1] DomainInfo.Params# Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 720575940 ... TX_PROXY DEBUG: proxy_impl.cpp:314: actor# [59:7554062532801749301:2144] Handle TEvProposeTransaction 2025-09-25T16:20:41.635550Z node 59 :TX_PROXY DEBUG: proxy_impl.cpp:237: actor# [59:7554062532801749301:2144] TxId# 281474976710661 ProcessProposeTransaction 2025-09-25T16:20:41.635568Z node 59 :TX_PROXY DEBUG: proxy_impl.cpp:256: actor# [59:7554062532801749301:2144] Cookie# 0 userReqId# "" txid# 281474976710661 SEND to# [59:7554062532801750066:2565] 2025-09-25T16:20:41.636328Z node 59 :TX_PROXY DEBUG: schemereq.cpp:1673: Actor# [59:7554062532801750066:2565] txid# 281474976710661 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/dc-1/.metadata/workload_manager/pools" OperationType: ESchemeOpCreateResourcePool ModifyACL { Name: "default" DiffACL: "\n\032\010\000\022\026\010\001\020\377\317\003\032\014root@builtin \003\n!\010\000\022\035\010\001\020\201\004\032\024all-users@well-known \003\n\031\010\000\022\025\010\001\020\201\004\032\014root@builtin \003" NewOwner: "metadata@system" } Internal: true CreateResourcePool { Name: "default" Properties { Properties { key: "concurrent_query_limit" value: "-1" } Properties { key: "database_load_cpu_threshold" value: "-1" } Properties { key: "query_cancel_after_seconds" value: "0" } Properties { key: "query_cpu_limit_percent_per_node" value: "-1" } Properties { key: "query_memory_limit_percent_per_node" value: "-1" } Properties { key: "queue_size" value: "-1" } Properties { key: "resource_weight" value: "-1" } Properties { key: "total_cpu_limit_percent_per_node" value: "-1" } } } } } UserToken: "\n\017metadata@system\022\000" DatabaseName: "/dc-1" 2025-09-25T16:20:41.636349Z node 59 :TX_PROXY DEBUG: schemereq.cpp:613: Actor# [59:7554062532801750066:2565] txid# 281474976710661 Bootstrap, UserSID: metadata@system CheckAdministrator: 1 CheckDatabaseAdministrator: 1 2025-09-25T16:20:41.636352Z node 59 :TX_PROXY DEBUG: schemereq.cpp:622: Actor# [59:7554062532801750066:2565] txid# 281474976710661 Bootstrap, UserSID: metadata@system IsClusterAdministrator: 0 2025-09-25T16:20:41.636439Z node 59 :TX_PROXY DEBUG: schemereq.cpp:1512: Actor# [59:7554062532801750066:2565] txid# 281474976710661 HandleResolveDatabase, ResultSet size: 1 ResultSet error count: 0 2025-09-25T16:20:41.636452Z node 59 :TX_PROXY DEBUG: schemereq.cpp:1547: Actor# [59:7554062532801750066:2565] txid# 281474976710661 HandleResolveDatabase, UserSID: metadata@system CheckAdministrator: 1 CheckDatabaseAdministrator: 1 IsClusterAdministrator: 0 IsDatabaseAdministrator: 0 DatabaseOwner: root@builtin 2025-09-25T16:20:41.636603Z node 59 :TX_PROXY DEBUG: schemereq.cpp:1738: Actor# [59:7554062532801750066:2565] txid# 281474976710661 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P6 2025-09-25T16:20:41.636618Z node 59 :TX_PROXY DEBUG: schemereq.cpp:1728: Actor# [59:7554062532801750066:2565] txid# 281474976710661 TEvNavigateKeySet requested from SchemeCache 2025-09-25T16:20:41.636660Z node 59 :TX_PROXY DEBUG: schemereq.cpp:1561: Actor# [59:7554062532801750066:2565] txid# 281474976710661 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2025-09-25T16:20:41.636711Z node 59 :TX_PROXY DEBUG: schemereq.cpp:1608: Actor# [59:7554062532801750066:2565] HANDLE EvNavigateKeySetResult, txid# 281474976710661 shardToRequest# 72057594046644480 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 1] DomainInfo.Params# Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 RedirectRequired# true 2025-09-25T16:20:41.636736Z node 59 :TX_PROXY DEBUG: schemereq.cpp:103: Actor# [59:7554062532801750066:2565] txid# 281474976710661 SEND to# 72057594046644480 shardToRequest {TEvModifySchemeTransaction txid# 281474976710661 TabletId# 72057594046644480} 2025-09-25T16:20:41.636810Z node 59 :TX_PROXY DEBUG: schemereq.cpp:1463: Actor# [59:7554062532801750066:2565] txid# 281474976710661 HANDLE EvClientConnected 2025-09-25T16:20:41.637857Z node 59 :TX_PROXY DEBUG: schemereq.cpp:1485: Actor# [59:7554062532801750066:2565] txid# 281474976710661 Status StatusAlreadyExists HANDLE {TEvModifySchemeTransactionResult Status# StatusAlreadyExists txid# 281474976710661 Reason# Check failed: path: '/dc-1/.metadata/workload_manager/pools/default', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92} 2025-09-25T16:20:41.637897Z node 59 :TX_PROXY ERROR: schemereq.cpp:590: Actor# [59:7554062532801750066:2565] txid# 281474976710661, issues: { message: "Check failed: path: \'/dc-1/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-09-25T16:20:41.637907Z node 59 :TX_PROXY DEBUG: schemereq.cpp:593: Actor# [59:7554062532801750066:2565] txid# 281474976710661 SEND to# [59:7554062532801749994:2326] Source {TEvProposeTransactionStatus txid# 281474976710661 Status# 48} 2025-09-25T16:20:41.640912Z node 59 :TX_PROXY DEBUG: proxy_impl.cpp:314: actor# [59:7554062532801749301:2144] Handle TEvProposeTransaction 2025-09-25T16:20:41.640927Z node 59 :TX_PROXY DEBUG: proxy_impl.cpp:237: actor# [59:7554062532801749301:2144] TxId# 281474976710662 ProcessProposeTransaction 2025-09-25T16:20:41.640944Z node 59 :TX_PROXY DEBUG: proxy_impl.cpp:256: actor# [59:7554062532801749301:2144] Cookie# 0 userReqId# "" txid# 281474976710662 SEND to# [59:7554062532801750090:2577] 2025-09-25T16:20:41.641791Z node 59 :TX_PROXY DEBUG: schemereq.cpp:1673: Actor# [59:7554062532801750090:2577] txid# 281474976710662 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/dc-1" OperationType: ESchemeOpAlterLogin AlterLogin { CreateUser { User: "targetuser" Password: "passwd" CanLogin: true IsHashedPassword: false } } } } UserToken: "\n\014root@builtin\022\030\022\026\n\024all-users@well-known\032\014root@builtin\"\007Builtin*\017**** (B6C6F477)0\000" DatabaseName: "/dc-1" RequestType: "" PeerName: "ipv6:[::1]:41152" 2025-09-25T16:20:41.641816Z node 59 :TX_PROXY DEBUG: schemereq.cpp:613: Actor# [59:7554062532801750090:2577] txid# 281474976710662 Bootstrap, UserSID: root@builtin CheckAdministrator: 1 CheckDatabaseAdministrator: 1 2025-09-25T16:20:41.641821Z node 59 :TX_PROXY DEBUG: schemereq.cpp:622: Actor# [59:7554062532801750090:2577] txid# 281474976710662 Bootstrap, UserSID: root@builtin IsClusterAdministrator: 1 2025-09-25T16:20:41.641834Z node 59 :TX_PROXY DEBUG: schemereq.cpp:1728: Actor# [59:7554062532801750090:2577] txid# 281474976710662 TEvNavigateKeySet requested from SchemeCache 2025-09-25T16:20:41.641950Z node 59 :TX_PROXY DEBUG: schemereq.cpp:1561: Actor# [59:7554062532801750090:2577] txid# 281474976710662 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2025-09-25T16:20:41.641989Z node 59 :TX_PROXY DEBUG: schemereq.cpp:1608: Actor# [59:7554062532801750090:2577] HANDLE EvNavigateKeySetResult, txid# 281474976710662 shardToRequest# 72057594046644480 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 1] DomainInfo.Params# Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 RedirectRequired# true 2025-09-25T16:20:41.642010Z node 59 :TX_PROXY DEBUG: schemereq.cpp:103: Actor# [59:7554062532801750090:2577] txid# 281474976710662 SEND to# 72057594046644480 shardToRequest {TEvModifySchemeTransaction txid# 281474976710662 TabletId# 72057594046644480} 2025-09-25T16:20:41.642075Z node 59 :TX_PROXY DEBUG: schemereq.cpp:1463: Actor# [59:7554062532801750090:2577] txid# 281474976710662 HANDLE EvClientConnected 2025-09-25T16:20:41.645256Z node 59 :TX_PROXY DEBUG: schemereq.cpp:1485: Actor# [59:7554062532801750090:2577] txid# 281474976710662 Status StatusSuccess HANDLE {TEvModifySchemeTransactionResult Status# StatusSuccess txid# 281474976710662} 2025-09-25T16:20:41.645277Z node 59 :TX_PROXY DEBUG: schemereq.cpp:593: Actor# [59:7554062532801750090:2577] txid# 281474976710662 SEND to# [59:7554062532801750089:2318] Source {TEvProposeTransactionStatus txid# 281474976710662 Status# 48} 2025-09-25T16:20:41.652335Z node 59 :TX_PROXY DEBUG: proxy_impl.cpp:314: actor# [59:7554062532801749301:2144] Handle TEvProposeTransaction 2025-09-25T16:20:41.652354Z node 59 :TX_PROXY DEBUG: proxy_impl.cpp:237: actor# [59:7554062532801749301:2144] TxId# 281474976710663 ProcessProposeTransaction 2025-09-25T16:20:41.652377Z node 59 :TX_PROXY DEBUG: proxy_impl.cpp:256: actor# [59:7554062532801749301:2144] Cookie# 0 userReqId# "" txid# 281474976710663 SEND to# [59:7554062532801750122:2591] 2025-09-25T16:20:41.653221Z node 59 :TX_PROXY DEBUG: schemereq.cpp:1673: Actor# [59:7554062532801750122:2591] txid# 281474976710663 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/dc-1" OperationType: ESchemeOpAlterLogin AlterLogin { RemoveUser { User: "targetuser" MissingOk: false } } } } UserToken: "\n\024ordinaryuser@builtin\022\030\022\026\n\024all-users@well-known\032\024ordinaryuser@builtin\"\007Builtin*\027ordi****ltin (32520BBF)0\000" DatabaseName: "/dc-1" RequestType: "" PeerName: "ipv6:[::1]:41152" 2025-09-25T16:20:41.653244Z node 59 :TX_PROXY DEBUG: schemereq.cpp:613: Actor# [59:7554062532801750122:2591] txid# 281474976710663 Bootstrap, UserSID: ordinaryuser@builtin CheckAdministrator: 1 CheckDatabaseAdministrator: 1 2025-09-25T16:20:41.653249Z node 59 :TX_PROXY DEBUG: schemereq.cpp:622: Actor# [59:7554062532801750122:2591] txid# 281474976710663 Bootstrap, UserSID: ordinaryuser@builtin IsClusterAdministrator: 0 2025-09-25T16:20:41.653313Z node 59 :TX_PROXY DEBUG: schemereq.cpp:1512: Actor# [59:7554062532801750122:2591] txid# 281474976710663 HandleResolveDatabase, ResultSet size: 1 ResultSet error count: 0 2025-09-25T16:20:41.653340Z node 59 :TX_PROXY DEBUG: schemereq.cpp:1547: Actor# [59:7554062532801750122:2591] txid# 281474976710663 HandleResolveDatabase, UserSID: ordinaryuser@builtin CheckAdministrator: 1 CheckDatabaseAdministrator: 1 IsClusterAdministrator: 0 IsDatabaseAdministrator: 0 DatabaseOwner: root@builtin 2025-09-25T16:20:41.653364Z node 59 :TX_PROXY DEBUG: schemereq.cpp:1728: Actor# [59:7554062532801750122:2591] txid# 281474976710663 TEvNavigateKeySet requested from SchemeCache 2025-09-25T16:20:41.653515Z node 59 :TX_PROXY DEBUG: schemereq.cpp:1561: Actor# [59:7554062532801750122:2591] txid# 281474976710663 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2025-09-25T16:20:41.653530Z node 59 :TX_PROXY ERROR: schemereq.cpp:1184: Actor# [59:7554062532801750122:2591] txid# 281474976710663, Access denied for ordinaryuser@builtin, attempt to manage user 2025-09-25T16:20:41.653575Z node 59 :TX_PROXY ERROR: schemereq.cpp:590: Actor# [59:7554062532801750122:2591] txid# 281474976710663, issues: { message: "Access denied for ordinaryuser@builtin" issue_code: 200000 severity: 1 } 2025-09-25T16:20:41.653585Z node 59 :TX_PROXY DEBUG: schemereq.cpp:593: Actor# [59:7554062532801750122:2591] txid# 281474976710663 SEND to# [59:7554062532801750121:2336] Source {TEvProposeTransactionStatus Status# 5} 2025-09-25T16:20:41.653682Z node 59 :KQP_SESSION WARN: kqp_session_actor.cpp:2830: SessionId: ydb://session/3?node_id=59&id=NGQ5OGJkNmEtOTAzOTQxMDMtMzkyNDdmLTcyMGNmMjUx, ActorId: [59:7554062532801750107:2336], ActorState: ExecuteState, TraceId: 01k60ttjhha9qyge17cwhk9prp, Create QueryResponse for error on request, msg: 2025-09-25T16:20:41.653789Z node 59 :TX_PROXY DEBUG: proxy_impl.cpp:352: actor# [59:7554062532801749301:2144] Handle TEvExecuteKqpTransaction 2025-09-25T16:20:41.653801Z node 59 :TX_PROXY DEBUG: proxy_impl.cpp:341: actor# [59:7554062532801749301:2144] TxId# 281474976710664 ProcessProposeKqpTransaction ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_view/unittest >> TSchemeShardViewTest::ReadOnlyMode [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2144] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2144] Leader for TabletID 72057594046678944 is [1:130:2155] sender: [1:131:2058] recipient: [1:113:2144] 2025-09-25T16:20:42.111557Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7911: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-09-25T16:20:42.111589Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7939: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-09-25T16:20:42.111596Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7825: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-09-25T16:20:42.111602Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7841: OperationsProcessing config: using default configuration 2025-09-25T16:20:42.111624Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7847: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-09-25T16:20:42.111629Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7847: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-09-25T16:20:42.111639Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7971: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-09-25T16:20:42.111654Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-09-25T16:20:42.111804Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8042: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-09-25T16:20:42.111874Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2025-09-25T16:20:42.128491Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7729: Cannot subscribe to console configs 2025-09-25T16:20:42.128523Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded 2025-09-25T16:20:42.133272Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2025-09-25T16:20:42.133377Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2025-09-25T16:20:42.133431Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-09-25T16:20:42.134987Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2025-09-25T16:20:42.135057Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2025-09-25T16:20:42.135171Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1382: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-09-25T16:20:42.135239Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-09-25T16:20:42.135694Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:158: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-09-25T16:20:42.135740Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2025-09-25T16:20:42.136052Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-09-25T16:20:42.136063Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-09-25T16:20:42.136088Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-09-25T16:20:42.136096Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-09-25T16:20:42.136102Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:205: TTxServerlessStorageBilling.Complete 2025-09-25T16:20:42.136140Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7086: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-09-25T16:20:42.137628Z node 1 :HIVE INFO: tablet_helpers.cpp:1126: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:130:2155] sender: [1:245:2058] recipient: [1:15:2062] 2025-09-25T16:20:42.161336Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-09-25T16:20:42.161443Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-09-25T16:20:42.161525Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:599: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-09-25T16:20:42.161535Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5528: CreateTx for txid 1:0 type: TxAlterSubDomain target path: [OwnerId: 72057594046678944, LocalPathId: 1] source path: 2025-09-25T16:20:42.161608Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-09-25T16:20:42.161624Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2025-09-25T16:20:42.162521Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-09-25T16:20:42.162577Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-09-25T16:20:42.162645Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-09-25T16:20:42.162658Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:315: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-09-25T16:20:42.162664Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:369: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-09-25T16:20:42.162670Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 1:0 2 -> 3 2025-09-25T16:20:42.163128Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-09-25T16:20:42.163142Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-09-25T16:20:42.163151Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 1:0 3 -> 128 2025-09-25T16:20:42.163517Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-09-25T16:20:42.163529Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-09-25T16:20:42.163535Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-09-25T16:20:42.163543Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1723: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-09-25T16:20:42.164306Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1792: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-09-25T16:20:42.164748Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:663: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-09-25T16:20:42.164813Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1824: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-09-25T16:20:42.165077Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:685: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-09-25T16:20:42.165103Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:689: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 137 RawX2: 4294969455 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-09-25T16:20:42.165111Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:303: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-09-25T16:20:42.165185Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 1:0 128 -> 240 2025-09-25T16:20:42.165193Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:362: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-09-25T16:20:42.165228Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:599: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-09-25T16:20:42.165241Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:412: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-09-25T16:20:42.165714Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-09-25T16:20:42.165725Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme ... hard__serverless_storage_billing.cpp:205: TTxServerlessStorageBilling.Complete 2025-09-25T16:20:42.280525Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7086: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 TestModificationResults wait txId: 103 Leader for TabletID 72057594046678944 is [1:386:2355] sender: [1:444:2058] recipient: [1:15:2062] 2025-09-25T16:20:42.313113Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpCreateView CreateView { Name: "ThirdView" QueryText: "Some query" } } TxId: 103 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-09-25T16:20:42.313183Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_create_view.cpp:118: [72057594046678944] TCreateView Propose, path: /MyRoot/ThirdView, opId: 103:0 2025-09-25T16:20:42.313195Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_create_view.cpp:124: [72057594046678944] TCreateView Propose, path: /MyRoot/ThirdView, opId: 103:0, viewDescription: Name: "ThirdView" QueryText: "Some query" 2025-09-25T16:20:42.313223Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:441: AttachChild: child attached as only one child to the parent, parent id: [OwnerId: 72057594046678944, LocalPathId: 1], parent name: MyRoot, child name: ThirdView, child id: [OwnerId: 72057594046678944, LocalPathId: 3], at schemeshard: 72057594046678944 2025-09-25T16:20:42.313241Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:599: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 0 2025-09-25T16:20:42.313247Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5528: CreateTx for txid 103:0 type: TxCreateView target path: [OwnerId: 72057594046678944, LocalPathId: 3] source path: 2025-09-25T16:20:42.313255Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 103:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-09-25T16:20:42.314222Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 103, response: Status: StatusAccepted TxId: 103 SchemeshardId: 72057594046678944 PathId: 3, at schemeshard: 72057594046678944 2025-09-25T16:20:42.314269Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 103, database: /MyRoot, subject: , status: StatusAccepted, operation: CREATE VIEW, path: /MyRoot/ThirdView 2025-09-25T16:20:42.314313Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 103:0, at schemeshard: 72057594046678944 2025-09-25T16:20:42.314320Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_create_view.cpp:30: [72057594046678944] TCreateView::TPropose, opId: 103:0 ProgressState 2025-09-25T16:20:42.314327Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1723: TOperation IsReadyToPropose , TxId: 103 ready parts: 1/1 2025-09-25T16:20:42.314350Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1792: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 103 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-09-25T16:20:42.314745Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:663: Send tablet strongly msg operationId: 103:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:103 msg type: 269090816 2025-09-25T16:20:42.314783Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1824: TOperation RegisterRelationByTabletId, TxId: 103, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 103 at step: 5000003 FAKE_COORDINATOR: advance: minStep5000003 State->FrontStep: 5000002 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 103 at step: 5000003 2025-09-25T16:20:42.314906Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:685: TTxOperationPlanStep Execute, stepId: 5000003, transactions count in step: 1, at schemeshard: 72057594046678944 2025-09-25T16:20:42.314922Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:689: TTxOperationPlanStep Execute, message: Transactions { TxId: 103 Coordinator: 72057594046316545 AckTo { RawX1: 137 RawX2: 4294969455 } } Step: 5000003 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-09-25T16:20:42.314931Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_create_view.cpp:45: [72057594046678944] TCreateView::TPropose, opId: 103:0 HandleReply TEvPrivate::TEvOperationPlan, step: 5000003 2025-09-25T16:20:42.314952Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 103:0 128 -> 240 2025-09-25T16:20:42.314979Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:599: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-09-25T16:20:42.314990Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:599: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 1 FAKE_COORDINATOR: Erasing txId 103 2025-09-25T16:20:42.315382Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-09-25T16:20:42.315391Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 103, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-09-25T16:20:42.315438Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 103, path id: [OwnerId: 72057594046678944, LocalPathId: 3] 2025-09-25T16:20:42.315459Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-09-25T16:20:42.315464Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [1:435:2393], at schemeshard: 72057594046678944, txId: 103, path id: 1 2025-09-25T16:20:42.315470Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [1:435:2393], at schemeshard: 72057594046678944, txId: 103, path id: 3 2025-09-25T16:20:42.317276Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 103:0, at schemeshard: 72057594046678944 2025-09-25T16:20:42.317296Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:497: [72057594046678944] TDone opId# 103:0 ProgressState 2025-09-25T16:20:42.317316Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:926: Part operation is done id#103:0 progress is 1/1 2025-09-25T16:20:42.317321Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 103 ready parts: 1/1 2025-09-25T16:20:42.317328Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:926: Part operation is done id#103:0 progress is 1/1 2025-09-25T16:20:42.317332Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 103 ready parts: 1/1 2025-09-25T16:20:42.317337Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 103, ready parts: 1/1, is published: false 2025-09-25T16:20:42.317344Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 103 ready parts: 1/1 2025-09-25T16:20:42.317350Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:993: Operation and all the parts is done, operation id: 103:0 2025-09-25T16:20:42.317355Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5552: RemoveTx for txid 103:0 2025-09-25T16:20:42.317373Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:610: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 2 2025-09-25T16:20:42.317384Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:1002: Publication still in progress, tx: 103, publications: 2, subscribers: 0 2025-09-25T16:20:42.317389Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1009: Publication details: tx: 103, [OwnerId: 72057594046678944, LocalPathId: 1], 5 2025-09-25T16:20:42.317393Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1009: Publication details: tx: 103, [OwnerId: 72057594046678944, LocalPathId: 3], 2 2025-09-25T16:20:42.317607Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6249: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 4 LocalPathId: 1 Version: 5 PathOwnerId: 72057594046678944, cookie: 103 2025-09-25T16:20:42.317627Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 4 LocalPathId: 1 Version: 5 PathOwnerId: 72057594046678944, cookie: 103 2025-09-25T16:20:42.317634Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 103 2025-09-25T16:20:42.317640Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 103, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 5 2025-09-25T16:20:42.317648Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:610: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 3 2025-09-25T16:20:42.317802Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6249: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 4 LocalPathId: 3 Version: 2 PathOwnerId: 72057594046678944, cookie: 103 2025-09-25T16:20:42.317815Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 4 LocalPathId: 3 Version: 2 PathOwnerId: 72057594046678944, cookie: 103 2025-09-25T16:20:42.317821Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 103 2025-09-25T16:20:42.317825Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 103, pathId: [OwnerId: 72057594046678944, LocalPathId: 3], version: 2 2025-09-25T16:20:42.317830Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:610: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 1 2025-09-25T16:20:42.317842Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__publish_to_scheme_board.cpp:155: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 103, subscribers: 0 2025-09-25T16:20:42.318862Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 103 2025-09-25T16:20:42.318895Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 103 TestModificationResult got TxId: 103, wait until txId: 103 ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_view/unittest >> TSchemeShardViewTest::AsyncDropSameView [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2144] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2144] Leader for TabletID 72057594046678944 is [1:130:2155] sender: [1:131:2058] recipient: [1:113:2144] 2025-09-25T16:20:42.276069Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7911: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-09-25T16:20:42.276093Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7939: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-09-25T16:20:42.276099Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7825: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-09-25T16:20:42.276105Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7841: OperationsProcessing config: using default configuration 2025-09-25T16:20:42.276112Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7847: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-09-25T16:20:42.276117Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7847: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-09-25T16:20:42.276127Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7971: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-09-25T16:20:42.276140Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-09-25T16:20:42.276233Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8042: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-09-25T16:20:42.276280Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2025-09-25T16:20:42.291769Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7729: Cannot subscribe to console configs 2025-09-25T16:20:42.291788Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded 2025-09-25T16:20:42.296014Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2025-09-25T16:20:42.296048Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2025-09-25T16:20:42.296087Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-09-25T16:20:42.297644Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2025-09-25T16:20:42.297700Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2025-09-25T16:20:42.297781Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1382: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-09-25T16:20:42.297825Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-09-25T16:20:42.298130Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:158: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-09-25T16:20:42.298158Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2025-09-25T16:20:42.298360Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-09-25T16:20:42.298367Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-09-25T16:20:42.298384Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-09-25T16:20:42.298389Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-09-25T16:20:42.298393Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:205: TTxServerlessStorageBilling.Complete 2025-09-25T16:20:42.298442Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7086: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-09-25T16:20:42.299711Z node 1 :HIVE INFO: tablet_helpers.cpp:1126: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:130:2155] sender: [1:245:2058] recipient: [1:15:2062] 2025-09-25T16:20:42.322030Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-09-25T16:20:42.322104Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-09-25T16:20:42.322161Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:599: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-09-25T16:20:42.322167Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5528: CreateTx for txid 1:0 type: TxAlterSubDomain target path: [OwnerId: 72057594046678944, LocalPathId: 1] source path: 2025-09-25T16:20:42.322241Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-09-25T16:20:42.322252Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2025-09-25T16:20:42.323075Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-09-25T16:20:42.323133Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-09-25T16:20:42.323198Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-09-25T16:20:42.323209Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:315: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-09-25T16:20:42.323215Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:369: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-09-25T16:20:42.323220Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 1:0 2 -> 3 2025-09-25T16:20:42.324030Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-09-25T16:20:42.324047Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-09-25T16:20:42.324053Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 1:0 3 -> 128 2025-09-25T16:20:42.324492Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-09-25T16:20:42.324501Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-09-25T16:20:42.324505Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-09-25T16:20:42.324510Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1723: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-09-25T16:20:42.325189Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1792: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-09-25T16:20:42.325633Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:663: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-09-25T16:20:42.325681Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1824: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-09-25T16:20:42.325923Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:685: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-09-25T16:20:42.325953Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:689: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 137 RawX2: 4294969455 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-09-25T16:20:42.325961Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:303: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-09-25T16:20:42.326041Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 1:0 128 -> 240 2025-09-25T16:20:42.326049Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:362: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-09-25T16:20:42.326085Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:599: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-09-25T16:20:42.326099Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:412: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-09-25T16:20:42.326655Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-09-25T16:20:42.326666Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme ... 0:42.335571Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:599: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 1 2025-09-25T16:20:42.335994Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-09-25T16:20:42.336006Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 102, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-09-25T16:20:42.336042Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 102, path id: [OwnerId: 72057594046678944, LocalPathId: 2] 2025-09-25T16:20:42.336065Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-09-25T16:20:42.336071Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [1:212:2213], at schemeshard: 72057594046678944, txId: 102, path id: 1 2025-09-25T16:20:42.336076Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [1:212:2213], at schemeshard: 72057594046678944, txId: 102, path id: 2 FAKE_COORDINATOR: Erasing txId 102 2025-09-25T16:20:42.336146Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 102:0, at schemeshard: 72057594046678944 2025-09-25T16:20:42.336154Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:497: [72057594046678944] TDone opId# 102:0 ProgressState 2025-09-25T16:20:42.336168Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:926: Part operation is done id#102:0 progress is 1/1 2025-09-25T16:20:42.336173Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2025-09-25T16:20:42.336178Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:926: Part operation is done id#102:0 progress is 1/1 2025-09-25T16:20:42.336181Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2025-09-25T16:20:42.336186Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 102, ready parts: 1/1, is published: false 2025-09-25T16:20:42.336192Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2025-09-25T16:20:42.336197Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:993: Operation and all the parts is done, operation id: 102:0 2025-09-25T16:20:42.336202Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5552: RemoveTx for txid 102:0 2025-09-25T16:20:42.336214Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:610: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 2 2025-09-25T16:20:42.336220Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:1002: Publication still in progress, tx: 102, publications: 2, subscribers: 0 2025-09-25T16:20:42.336224Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1009: Publication details: tx: 102, [OwnerId: 72057594046678944, LocalPathId: 1], 5 2025-09-25T16:20:42.336228Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1009: Publication details: tx: 102, [OwnerId: 72057594046678944, LocalPathId: 2], 18446744073709551615 2025-09-25T16:20:42.336366Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6249: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 5 PathOwnerId: 72057594046678944, cookie: 102 2025-09-25T16:20:42.336380Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 5 PathOwnerId: 72057594046678944, cookie: 102 2025-09-25T16:20:42.336386Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 102 2025-09-25T16:20:42.336391Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 102, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 5 2025-09-25T16:20:42.336396Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:610: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-09-25T16:20:42.336539Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6249: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 102 2025-09-25T16:20:42.336553Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 102 2025-09-25T16:20:42.336560Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 102 2025-09-25T16:20:42.336565Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 102, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 18446744073709551615 2025-09-25T16:20:42.336570Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:610: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 1 2025-09-25T16:20:42.336585Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__publish_to_scheme_board.cpp:155: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 102, subscribers: 0 2025-09-25T16:20:42.336762Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:123: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2025-09-25T16:20:42.336772Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:137: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 2], at schemeshard: 72057594046678944 2025-09-25T16:20:42.336784Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:610: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-09-25T16:20:42.337184Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 2025-09-25T16:20:42.337561Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 2025-09-25T16:20:42.337584Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__clean_pathes.cpp:160: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 TestModificationResult got TxId: 102, wait until txId: 102 TestModificationResults wait txId: 103 TestModificationResult got TxId: 103, wait until txId: 103 TestModificationResults wait txId: 104 TestModificationResult got TxId: 104, wait until txId: 104 TestWaitNotification wait txId: 102 2025-09-25T16:20:42.337649Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:213: tests -- TTxNotificationSubscriber for txId 102: send EvNotifyTxCompletion 2025-09-25T16:20:42.337657Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:259: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 102 TestWaitNotification wait txId: 103 2025-09-25T16:20:42.337673Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:213: tests -- TTxNotificationSubscriber for txId 103: send EvNotifyTxCompletion 2025-09-25T16:20:42.337677Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:259: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 103 TestWaitNotification wait txId: 104 2025-09-25T16:20:42.337685Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:213: tests -- TTxNotificationSubscriber for txId 104: send EvNotifyTxCompletion 2025-09-25T16:20:42.337689Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:259: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 104 2025-09-25T16:20:42.337777Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 102, at schemeshard: 72057594046678944 2025-09-25T16:20:42.337798Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 102: got EvNotifyTxCompletionResult 2025-09-25T16:20:42.337803Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 102: satisfy waiter [1:335:2325] 2025-09-25T16:20:42.337833Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 103, at schemeshard: 72057594046678944 2025-09-25T16:20:42.337847Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 103: got EvNotifyTxCompletionResult 2025-09-25T16:20:42.337851Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 103: satisfy waiter [1:335:2325] 2025-09-25T16:20:42.337858Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 104, at schemeshard: 72057594046678944 2025-09-25T16:20:42.337870Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 104: got EvNotifyTxCompletionResult 2025-09-25T16:20:42.337873Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 104: satisfy waiter [1:335:2325] TestWaitNotification: OK eventTxId 102 TestWaitNotification: OK eventTxId 103 TestWaitNotification: OK eventTxId 104 2025-09-25T16:20:42.337945Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/MyView" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-09-25T16:20:42.337972Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/MyView" took 34us result status StatusPathDoesNotExist 2025-09-25T16:20:42.338013Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/MyView\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot\' (id: [OwnerId: 72057594046678944, LocalPathId: 1]), source_location: ydb/core/tx/schemeshard/schemeshard_path_describer.cpp:1181" Path: "/MyRoot/MyView" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot" LastExistedPrefixPathId: 1 LastExistedPrefixDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty ChildrenExist: false } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_view/unittest >> TSchemeShardViewTest::CreateView [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:120:2058] recipient: [1:114:2144] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:120:2058] recipient: [1:114:2144] Leader for TabletID 72057594046678944 is [1:131:2155] sender: [1:132:2058] recipient: [1:114:2144] 2025-09-25T16:20:42.203370Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7911: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-09-25T16:20:42.203399Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7939: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-09-25T16:20:42.203404Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7825: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-09-25T16:20:42.203409Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7841: OperationsProcessing config: using default configuration 2025-09-25T16:20:42.203414Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7847: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-09-25T16:20:42.203417Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7847: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-09-25T16:20:42.203424Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7971: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-09-25T16:20:42.203435Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-09-25T16:20:42.203526Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8042: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-09-25T16:20:42.203626Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2025-09-25T16:20:42.216773Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7729: Cannot subscribe to console configs 2025-09-25T16:20:42.216796Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded 2025-09-25T16:20:42.225424Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2025-09-25T16:20:42.225471Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2025-09-25T16:20:42.225513Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-09-25T16:20:42.233505Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2025-09-25T16:20:42.233608Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2025-09-25T16:20:42.233772Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1382: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-09-25T16:20:42.233938Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-09-25T16:20:42.240550Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:158: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-09-25T16:20:42.240621Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2025-09-25T16:20:42.241006Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-09-25T16:20:42.241022Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-09-25T16:20:42.241052Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-09-25T16:20:42.241062Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-09-25T16:20:42.241068Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:205: TTxServerlessStorageBilling.Complete 2025-09-25T16:20:42.241096Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7086: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-09-25T16:20:42.244405Z node 1 :HIVE INFO: tablet_helpers.cpp:1126: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:131:2155] sender: [1:244:2058] recipient: [1:15:2062] 2025-09-25T16:20:42.273337Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-09-25T16:20:42.273432Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-09-25T16:20:42.273520Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:599: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-09-25T16:20:42.273530Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5528: CreateTx for txid 1:0 type: TxAlterSubDomain target path: [OwnerId: 72057594046678944, LocalPathId: 1] source path: 2025-09-25T16:20:42.273614Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-09-25T16:20:42.273632Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2025-09-25T16:20:42.277711Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-09-25T16:20:42.277784Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-09-25T16:20:42.277857Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-09-25T16:20:42.277871Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:315: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-09-25T16:20:42.277877Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:369: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-09-25T16:20:42.277884Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 1:0 2 -> 3 2025-09-25T16:20:42.281945Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-09-25T16:20:42.281969Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-09-25T16:20:42.281977Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 1:0 3 -> 128 2025-09-25T16:20:42.283511Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-09-25T16:20:42.283526Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-09-25T16:20:42.283533Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-09-25T16:20:42.283542Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1723: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-09-25T16:20:42.284495Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1792: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-09-25T16:20:42.285232Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:663: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-09-25T16:20:42.285279Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1824: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-09-25T16:20:42.285535Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:685: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-09-25T16:20:42.285567Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:689: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 138 RawX2: 4294969455 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-09-25T16:20:42.285579Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:303: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-09-25T16:20:42.285664Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 1:0 128 -> 240 2025-09-25T16:20:42.285675Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:362: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-09-25T16:20:42.285711Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:599: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-09-25T16:20:42.285725Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:412: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-09-25T16:20:42.286243Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-09-25T16:20:42.286253Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme ... .290555Z node 1 :TX_PROXY DEBUG: client.cpp:89: Handle TEvAllocateResult ACCEPTED RangeBegin# 281474976715656 RangeEnd# 281474976720656 txAllocator# 72057594046447617 2025-09-25T16:20:42.290839Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:663: Send tablet strongly msg operationId: 101:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:101 msg type: 269090816 2025-09-25T16:20:42.290868Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1824: TOperation RegisterRelationByTabletId, TxId: 101, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 101 at step: 5000002 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000001 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 101 at step: 5000002 2025-09-25T16:20:42.290937Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:685: TTxOperationPlanStep Execute, stepId: 5000002, transactions count in step: 1, at schemeshard: 72057594046678944 2025-09-25T16:20:42.290957Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:689: TTxOperationPlanStep Execute, message: Transactions { TxId: 101 Coordinator: 72057594046316545 AckTo { RawX1: 138 RawX2: 4294969455 } } Step: 5000002 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-09-25T16:20:42.290965Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_create_view.cpp:45: [72057594046678944] TCreateView::TPropose, opId: 101:0 HandleReply TEvPrivate::TEvOperationPlan, step: 5000002 2025-09-25T16:20:42.290991Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 101:0 128 -> 240 2025-09-25T16:20:42.291017Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:599: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-09-25T16:20:42.291028Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:599: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 1 FAKE_COORDINATOR: Erasing txId 101 2025-09-25T16:20:42.291378Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-09-25T16:20:42.291386Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 101, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-09-25T16:20:42.291437Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 101, path id: [OwnerId: 72057594046678944, LocalPathId: 2] 2025-09-25T16:20:42.291459Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-09-25T16:20:42.291465Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [1:211:2211], at schemeshard: 72057594046678944, txId: 101, path id: 1 2025-09-25T16:20:42.291470Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [1:211:2211], at schemeshard: 72057594046678944, txId: 101, path id: 2 2025-09-25T16:20:42.291549Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 101:0, at schemeshard: 72057594046678944 2025-09-25T16:20:42.291557Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:497: [72057594046678944] TDone opId# 101:0 ProgressState 2025-09-25T16:20:42.291570Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:926: Part operation is done id#101:0 progress is 1/1 2025-09-25T16:20:42.291575Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2025-09-25T16:20:42.291581Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:926: Part operation is done id#101:0 progress is 1/1 2025-09-25T16:20:42.291584Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2025-09-25T16:20:42.291589Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 101, ready parts: 1/1, is published: false 2025-09-25T16:20:42.291596Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2025-09-25T16:20:42.291600Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:993: Operation and all the parts is done, operation id: 101:0 2025-09-25T16:20:42.291605Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5552: RemoveTx for txid 101:0 2025-09-25T16:20:42.291629Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:610: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 2 2025-09-25T16:20:42.291635Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:1002: Publication still in progress, tx: 101, publications: 2, subscribers: 0 2025-09-25T16:20:42.291640Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1009: Publication details: tx: 101, [OwnerId: 72057594046678944, LocalPathId: 1], 4 2025-09-25T16:20:42.291643Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1009: Publication details: tx: 101, [OwnerId: 72057594046678944, LocalPathId: 2], 2 2025-09-25T16:20:42.291766Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6249: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 4 PathOwnerId: 72057594046678944, cookie: 101 2025-09-25T16:20:42.291778Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 4 PathOwnerId: 72057594046678944, cookie: 101 2025-09-25T16:20:42.291783Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 101 2025-09-25T16:20:42.291789Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 101, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 4 2025-09-25T16:20:42.291796Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:610: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-09-25T16:20:42.291860Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6249: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 2 PathOwnerId: 72057594046678944, cookie: 101 2025-09-25T16:20:42.291869Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 2 PathOwnerId: 72057594046678944, cookie: 101 2025-09-25T16:20:42.291873Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 101 2025-09-25T16:20:42.291877Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 101, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 2 2025-09-25T16:20:42.291882Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:610: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 1 2025-09-25T16:20:42.291890Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__publish_to_scheme_board.cpp:155: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 101, subscribers: 0 2025-09-25T16:20:42.292491Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 2025-09-25T16:20:42.292514Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 TestModificationResult got TxId: 101, wait until txId: 101 TestWaitNotification wait txId: 101 2025-09-25T16:20:42.292561Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:213: tests -- TTxNotificationSubscriber for txId 101: send EvNotifyTxCompletion 2025-09-25T16:20:42.292570Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:259: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 101 2025-09-25T16:20:42.292624Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 101, at schemeshard: 72057594046678944 2025-09-25T16:20:42.292644Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 101: got EvNotifyTxCompletionResult 2025-09-25T16:20:42.292649Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 101: satisfy waiter [1:302:2291] TestWaitNotification: OK eventTxId 101 2025-09-25T16:20:42.292725Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/MyView" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-09-25T16:20:42.292754Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/MyView" took 38us result status StatusSuccess 2025-09-25T16:20:42.292870Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/MyView" PathDescription { Self { Name: "MyView" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeView CreateFinished: true CreateTxId: 101 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 ViewVersion: 1 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 1 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } ViewDescription { Name: "MyView" PathId { OwnerId: 72057594046678944 LocalId: 2 } Version: 1 QueryText: "Some query" CapturedContext { } } } PathId: 2 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 >> TColumnShardTestReadWrite::CompactionInGranule_PKInt32 >> EvWrite::WriteWithSplit >> TColumnShardTestReadWrite::CompactionSplitGranule_PKInt64 >> TColumnShardTestReadWrite::WriteExoticTypes >> TColumnShardTestReadWrite::CompactionInGranule_PKTimestamp >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-DropUser-71 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-DropUser-72 ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/tx_proxy/ut_schemereq/unittest >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-DropUser-72 [GOOD] Test command err: Starting YDB, grpc: 11087, msgbus: 22565 2025-09-25T16:20:04.143830Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7554062375001452637:2152];send_to=[0:7307199536658146131:7762515]; 2025-09-25T16:20:04.143867Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/endf/00257d/r3tmp/tmp7ASQ0z/pdisk_1.dat 2025-09-25T16:20:04.156985Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:123: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2025-09-25T16:20:04.207938Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 11087, node 1 2025-09-25T16:20:04.227702Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-09-25T16:20:04.227715Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-09-25T16:20:04.227717Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-09-25T16:20:04.227772Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-09-25T16:20:04.243945Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-09-25T16:20:04.243984Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-09-25T16:20:04.245530Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:22565 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 2025-09-25T16:20:04.248691Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:402: actor# [1:7554062375001452760:2143] Handle TEvNavigate describe path dc-1 2025-09-25T16:20:04.248713Z node 1 :TX_PROXY DEBUG: describe.cpp:270: Actor# [1:7554062375001453216:2440] HANDLE EvNavigateScheme dc-1 2025-09-25T16:20:04.285681Z node 1 :TX_PROXY DEBUG: describe.cpp:354: Actor# [1:7554062375001453216:2440] HANDLE EvNavigateKeySetResult TDescribeReq marker# P5 ErrorCount# 0 2025-09-25T16:20:04.286076Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions 2025-09-25T16:20:04.294672Z node 1 :TX_PROXY DEBUG: describe.cpp:433: Actor# [1:7554062375001453216:2440] SEND to# 72057594046644480 shardToRequest NKikimrSchemeOp.TDescribePath Path: "dc-1" Options { ReturnBoundaries: true ShowPrivateTable: true ReturnRangeKey: true } 2025-09-25T16:20:04.296245Z node 1 :TX_PROXY DEBUG: describe.cpp:446: Actor# [1:7554062375001453216:2440] Handle TEvDescribeSchemeResult Forward to# [1:7554062375001453215:2439] Cookie: 0 TEvDescribeSchemeResult: NKikimrScheme.TEvDescribeSchemeResult PreSerializedData size# 2 Record# Status: StatusSuccess Path: "dc-1" PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } DomainKey { SchemeShard: 72057594046644480 PathId: 1 } PathsInside: 0 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046644480 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { Audience: "/dc-1" } SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 1 PathOwnerId: 72057594046644480 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-09-25T16:20:04.300318Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:314: actor# [1:7554062375001452760:2143] Handle TEvProposeTransaction 2025-09-25T16:20:04.300335Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:237: actor# [1:7554062375001452760:2143] TxId# 281474976715657 ProcessProposeTransaction 2025-09-25T16:20:04.300379Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:256: actor# [1:7554062375001452760:2143] Cookie# 0 userReqId# "" txid# 281474976715657 SEND to# [1:7554062375001453253:2452] 2025-09-25T16:20:04.311535Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1673: Actor# [1:7554062375001453253:2452] txid# 281474976715657 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "dc-1" StoragePools { Name: "" Kind: "tenant-db" } StoragePools { Name: "/dc-1:test" Kind: "test" } } } } UserToken: "\n\014root@builtin\022\030\022\026\n\024all-users@well-known\032\014root@builtin\"\007Builtin*\017**** (B6C6F477)0\000" PeerName: "" 2025-09-25T16:20:04.311587Z node 1 :TX_PROXY DEBUG: schemereq.cpp:613: Actor# [1:7554062375001453253:2452] txid# 281474976715657 Bootstrap, UserSID: root@builtin CheckAdministrator: 0 CheckDatabaseAdministrator: 0 2025-09-25T16:20:04.311593Z node 1 :TX_PROXY DEBUG: schemereq.cpp:622: Actor# [1:7554062375001453253:2452] txid# 281474976715657 Bootstrap, UserSID: root@builtin IsClusterAdministrator: 1 2025-09-25T16:20:04.311612Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1728: Actor# [1:7554062375001453253:2452] txid# 281474976715657 TEvNavigateKeySet requested from SchemeCache 2025-09-25T16:20:04.311776Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1561: Actor# [1:7554062375001453253:2452] txid# 281474976715657 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2025-09-25T16:20:04.311832Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1608: Actor# [1:7554062375001453253:2452] HANDLE EvNavigateKeySetResult, txid# 281474976715657 shardToRequest# 72057594046644480 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 1] DomainInfo.Params# Version: 0 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 RedirectRequired# false 2025-09-25T16:20:04.311863Z node 1 :TX_PROXY DEBUG: schemereq.cpp:103: Actor# [1:7554062375001453253:2452] txid# 281474976715657 SEND to# 72057594046644480 shardToRequest {TEvModifySchemeTransaction txid# 281474976715657 TabletId# 72057594046644480} 2025-09-25T16:20:04.311930Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1463: Actor# [1:7554062375001453253:2452] txid# 281474976715657 HANDLE EvClientConnected 2025-09-25T16:20:04.312174Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2025-09-25T16:20:04.314923Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1485: Actor# [1:7554062375001453253:2452] txid# 281474976715657 Status StatusAccepted HANDLE {TEvModifySchemeTransactionResult Status# StatusAccepted txid# 281474976715657} 2025-09-25T16:20:04.314943Z node 1 :TX_PROXY DEBUG: schemereq.cpp:593: Actor# [1:7554062375001453253:2452] txid# 281474976715657 SEND to# [1:7554062375001453252:2451] Source {TEvProposeTransactionStatus txid# 281474976715657 Status# 53} waiting... 2025-09-25T16:20:04.325751Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:314: actor# [1:7554062375001452760:2143] Handle TEvProposeTransaction 2025-09-25T16:20:04.325767Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:237: actor# [1:7554062375001452760:2143] TxId# 281474976715658 ProcessProposeTransaction 2025-09-25T16:20:04.325782Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:256: actor# [1:7554062375001452760:2143] Cookie# 0 userReqId# "" txid# 281474976715658 SEND to# [1:7554062375001453293:2488] 2025-09-25T16:20:04.326538Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1673: Actor# [1:7554062375001453293:2488] txid# 281474976715658 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/" OperationType: ESchemeOpModifyACL ModifyACL { Name: "dc-1" DiffACL: "\n\032\010\000\022\026\010\001\020\377\377\003\032\014root@builtin \003" } } } UserToken: "\n\014root@builtin\022\030\022\026\n\024all-users@well-known\032\014root@builtin\"\007Builtin*\017**** (B6C6F477)0\000" PeerName: "" 2025-09-25T16:20:04.326561Z node 1 :TX_PROXY DEBUG: schemereq.cpp:613: Actor# [1:7554062375001453293:2488] txid# 281474976715658 Bootstrap, UserSID: root@builtin CheckAdministrator: 0 CheckDatabaseAdministrator: 0 2025-09-25T16:20:04.326564Z node 1 :TX_PROXY DEBUG: schemereq.cpp:622: Actor# [1:7554062375001453293:2488] txid# 281474976715658 Bootstrap, UserSID: root@builtin IsClusterAdministrator: 1 2025-09-25T16:20:04.326596Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1728: Actor# [1:7554062375001453293:2488] txid# 281474976715658 TEvNavigateKeySet requested from SchemeCache 2025-09-25T16:20:04.326704Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1561: Actor# [1:7554062375001453293:2488] txid# 281474976715658 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2025-09-25T16:20:04.326748Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1608: Actor# [1:7554062375001453293:2488] HANDLE EvNavigateKeySetResult, txid# 281474976715658 shardToRequest# 72057594046644480 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 1] DomainInfo.Params# Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 RedirectRequired# true 2025-09-25T16:20:04.326767Z node 1 :TX_PROXY DEBUG: schemereq.cpp:103: Actor# [1:7554062375001453293:2488] txid# 281474976715658 SEND to# 72057594046644480 shardToRequest {TEvModifySchemeTransaction txid# 281474976715658 TabletId# 72057594046644480} 2025-09-25T16:20:04.326827Z node ... ROXY DEBUG: proxy_impl.cpp:314: actor# [59:7554062530370128402:2142] Handle TEvProposeTransaction 2025-09-25T16:20:41.913193Z node 59 :TX_PROXY DEBUG: proxy_impl.cpp:237: actor# [59:7554062530370128402:2142] TxId# 281474976715661 ProcessProposeTransaction 2025-09-25T16:20:41.913222Z node 59 :TX_PROXY DEBUG: proxy_impl.cpp:256: actor# [59:7554062530370128402:2142] Cookie# 0 userReqId# "" txid# 281474976715661 SEND to# [59:7554062530370129161:2570] 2025-09-25T16:20:41.914272Z node 59 :TX_PROXY DEBUG: schemereq.cpp:1673: Actor# [59:7554062530370129161:2570] txid# 281474976715661 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/dc-1/.metadata/workload_manager/pools" OperationType: ESchemeOpCreateResourcePool ModifyACL { Name: "default" DiffACL: "\n\032\010\000\022\026\010\001\020\377\317\003\032\014root@builtin \003\n!\010\000\022\035\010\001\020\201\004\032\024all-users@well-known \003\n\031\010\000\022\025\010\001\020\201\004\032\014root@builtin \003" NewOwner: "metadata@system" } Internal: true CreateResourcePool { Name: "default" Properties { Properties { key: "concurrent_query_limit" value: "-1" } Properties { key: "database_load_cpu_threshold" value: "-1" } Properties { key: "query_cancel_after_seconds" value: "0" } Properties { key: "query_cpu_limit_percent_per_node" value: "-1" } Properties { key: "query_memory_limit_percent_per_node" value: "-1" } Properties { key: "queue_size" value: "-1" } Properties { key: "resource_weight" value: "-1" } Properties { key: "total_cpu_limit_percent_per_node" value: "-1" } } } } } UserToken: "\n\017metadata@system\022\000" DatabaseName: "/dc-1" 2025-09-25T16:20:41.914299Z node 59 :TX_PROXY DEBUG: schemereq.cpp:613: Actor# [59:7554062530370129161:2570] txid# 281474976715661 Bootstrap, UserSID: metadata@system CheckAdministrator: 1 CheckDatabaseAdministrator: 1 2025-09-25T16:20:41.914304Z node 59 :TX_PROXY DEBUG: schemereq.cpp:622: Actor# [59:7554062530370129161:2570] txid# 281474976715661 Bootstrap, UserSID: metadata@system IsClusterAdministrator: 0 2025-09-25T16:20:41.914390Z node 59 :TX_PROXY DEBUG: schemereq.cpp:1512: Actor# [59:7554062530370129161:2570] txid# 281474976715661 HandleResolveDatabase, ResultSet size: 1 ResultSet error count: 0 2025-09-25T16:20:41.914416Z node 59 :TX_PROXY DEBUG: schemereq.cpp:1547: Actor# [59:7554062530370129161:2570] txid# 281474976715661 HandleResolveDatabase, UserSID: metadata@system CheckAdministrator: 1 CheckDatabaseAdministrator: 1 IsClusterAdministrator: 0 IsDatabaseAdministrator: 0 DatabaseOwner: root@builtin 2025-09-25T16:20:41.914583Z node 59 :TX_PROXY DEBUG: schemereq.cpp:1738: Actor# [59:7554062530370129161:2570] txid# 281474976715661 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P6 2025-09-25T16:20:41.914633Z node 59 :TX_PROXY DEBUG: schemereq.cpp:1728: Actor# [59:7554062530370129161:2570] txid# 281474976715661 TEvNavigateKeySet requested from SchemeCache 2025-09-25T16:20:41.914683Z node 59 :TX_PROXY DEBUG: schemereq.cpp:1561: Actor# [59:7554062530370129161:2570] txid# 281474976715661 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2025-09-25T16:20:41.914733Z node 59 :TX_PROXY DEBUG: schemereq.cpp:1608: Actor# [59:7554062530370129161:2570] HANDLE EvNavigateKeySetResult, txid# 281474976715661 shardToRequest# 72057594046644480 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 1] DomainInfo.Params# Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 RedirectRequired# true 2025-09-25T16:20:41.914751Z node 59 :TX_PROXY DEBUG: schemereq.cpp:103: Actor# [59:7554062530370129161:2570] txid# 281474976715661 SEND to# 72057594046644480 shardToRequest {TEvModifySchemeTransaction txid# 281474976715661 TabletId# 72057594046644480} 2025-09-25T16:20:41.914807Z node 59 :TX_PROXY DEBUG: schemereq.cpp:1463: Actor# [59:7554062530370129161:2570] txid# 281474976715661 HANDLE EvClientConnected 2025-09-25T16:20:41.915757Z node 59 :TX_PROXY DEBUG: schemereq.cpp:1485: Actor# [59:7554062530370129161:2570] txid# 281474976715661 Status StatusAlreadyExists HANDLE {TEvModifySchemeTransactionResult Status# StatusAlreadyExists txid# 281474976715661 Reason# Check failed: path: '/dc-1/.metadata/workload_manager/pools/default', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92} 2025-09-25T16:20:41.915793Z node 59 :TX_PROXY ERROR: schemereq.cpp:590: Actor# [59:7554062530370129161:2570] txid# 281474976715661, issues: { message: "Check failed: path: \'/dc-1/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-09-25T16:20:41.915798Z node 59 :TX_PROXY DEBUG: schemereq.cpp:593: Actor# [59:7554062530370129161:2570] txid# 281474976715661 SEND to# [59:7554062530370129081:2326] Source {TEvProposeTransactionStatus txid# 281474976715661 Status# 48} 2025-09-25T16:20:41.918693Z node 59 :TX_PROXY DEBUG: proxy_impl.cpp:314: actor# [59:7554062530370128402:2142] Handle TEvProposeTransaction 2025-09-25T16:20:41.918706Z node 59 :TX_PROXY DEBUG: proxy_impl.cpp:237: actor# [59:7554062530370128402:2142] TxId# 281474976715662 ProcessProposeTransaction 2025-09-25T16:20:41.918720Z node 59 :TX_PROXY DEBUG: proxy_impl.cpp:256: actor# [59:7554062530370128402:2142] Cookie# 0 userReqId# "" txid# 281474976715662 SEND to# [59:7554062530370129185:2582] 2025-09-25T16:20:41.919383Z node 59 :TX_PROXY DEBUG: schemereq.cpp:1673: Actor# [59:7554062530370129185:2582] txid# 281474976715662 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/dc-1" OperationType: ESchemeOpAlterLogin AlterLogin { CreateUser { User: "targetuser" Password: "passwd" CanLogin: true IsHashedPassword: false } } } } UserToken: "\n\014root@builtin\022\030\022\026\n\024all-users@well-known\032\014root@builtin\"\007Builtin*\017**** (B6C6F477)0\000" DatabaseName: "/dc-1" RequestType: "" PeerName: "ipv6:[::1]:35246" 2025-09-25T16:20:41.919400Z node 59 :TX_PROXY DEBUG: schemereq.cpp:613: Actor# [59:7554062530370129185:2582] txid# 281474976715662 Bootstrap, UserSID: root@builtin CheckAdministrator: 1 CheckDatabaseAdministrator: 1 2025-09-25T16:20:41.919404Z node 59 :TX_PROXY DEBUG: schemereq.cpp:622: Actor# [59:7554062530370129185:2582] txid# 281474976715662 Bootstrap, UserSID: root@builtin IsClusterAdministrator: 1 2025-09-25T16:20:41.919417Z node 59 :TX_PROXY DEBUG: schemereq.cpp:1728: Actor# [59:7554062530370129185:2582] txid# 281474976715662 TEvNavigateKeySet requested from SchemeCache 2025-09-25T16:20:41.919539Z node 59 :TX_PROXY DEBUG: schemereq.cpp:1561: Actor# [59:7554062530370129185:2582] txid# 281474976715662 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2025-09-25T16:20:41.919587Z node 59 :TX_PROXY DEBUG: schemereq.cpp:1608: Actor# [59:7554062530370129185:2582] HANDLE EvNavigateKeySetResult, txid# 281474976715662 shardToRequest# 72057594046644480 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 1] DomainInfo.Params# Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 RedirectRequired# true 2025-09-25T16:20:41.919621Z node 59 :TX_PROXY DEBUG: schemereq.cpp:103: Actor# [59:7554062530370129185:2582] txid# 281474976715662 SEND to# 72057594046644480 shardToRequest {TEvModifySchemeTransaction txid# 281474976715662 TabletId# 72057594046644480} 2025-09-25T16:20:41.919680Z node 59 :TX_PROXY DEBUG: schemereq.cpp:1463: Actor# [59:7554062530370129185:2582] txid# 281474976715662 HANDLE EvClientConnected 2025-09-25T16:20:41.923329Z node 59 :TX_PROXY DEBUG: schemereq.cpp:1485: Actor# [59:7554062530370129185:2582] txid# 281474976715662 Status StatusSuccess HANDLE {TEvModifySchemeTransactionResult Status# StatusSuccess txid# 281474976715662} 2025-09-25T16:20:41.923346Z node 59 :TX_PROXY DEBUG: schemereq.cpp:593: Actor# [59:7554062530370129185:2582] txid# 281474976715662 SEND to# [59:7554062530370129184:2319] Source {TEvProposeTransactionStatus txid# 281474976715662 Status# 48} 2025-09-25T16:20:41.930199Z node 59 :TX_PROXY DEBUG: proxy_impl.cpp:314: actor# [59:7554062530370128402:2142] Handle TEvProposeTransaction 2025-09-25T16:20:41.930214Z node 59 :TX_PROXY DEBUG: proxy_impl.cpp:237: actor# [59:7554062530370128402:2142] TxId# 281474976715663 ProcessProposeTransaction 2025-09-25T16:20:41.930230Z node 59 :TX_PROXY DEBUG: proxy_impl.cpp:256: actor# [59:7554062530370128402:2142] Cookie# 0 userReqId# "" txid# 281474976715663 SEND to# [59:7554062530370129217:2596] 2025-09-25T16:20:41.931073Z node 59 :TX_PROXY DEBUG: schemereq.cpp:1673: Actor# [59:7554062530370129217:2596] txid# 281474976715663 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/dc-1" OperationType: ESchemeOpAlterLogin AlterLogin { RemoveUser { User: "targetuser" MissingOk: false } } } } UserToken: "\n\024ordinaryuser@builtin\022\030\022\026\n\024all-users@well-known\032\024ordinaryuser@builtin\"\007Builtin*\027ordi****ltin (32520BBF)0\000" DatabaseName: "/dc-1" RequestType: "" PeerName: "ipv6:[::1]:35246" 2025-09-25T16:20:41.931097Z node 59 :TX_PROXY DEBUG: schemereq.cpp:613: Actor# [59:7554062530370129217:2596] txid# 281474976715663 Bootstrap, UserSID: ordinaryuser@builtin CheckAdministrator: 1 CheckDatabaseAdministrator: 1 2025-09-25T16:20:41.931102Z node 59 :TX_PROXY DEBUG: schemereq.cpp:622: Actor# [59:7554062530370129217:2596] txid# 281474976715663 Bootstrap, UserSID: ordinaryuser@builtin IsClusterAdministrator: 0 2025-09-25T16:20:41.931157Z node 59 :TX_PROXY DEBUG: schemereq.cpp:1512: Actor# [59:7554062530370129217:2596] txid# 281474976715663 HandleResolveDatabase, ResultSet size: 1 ResultSet error count: 0 2025-09-25T16:20:41.931176Z node 59 :TX_PROXY DEBUG: schemereq.cpp:1547: Actor# [59:7554062530370129217:2596] txid# 281474976715663 HandleResolveDatabase, UserSID: ordinaryuser@builtin CheckAdministrator: 1 CheckDatabaseAdministrator: 1 IsClusterAdministrator: 0 IsDatabaseAdministrator: 0 DatabaseOwner: root@builtin 2025-09-25T16:20:41.931191Z node 59 :TX_PROXY DEBUG: schemereq.cpp:1728: Actor# [59:7554062530370129217:2596] txid# 281474976715663 TEvNavigateKeySet requested from SchemeCache 2025-09-25T16:20:41.931280Z node 59 :TX_PROXY DEBUG: schemereq.cpp:1561: Actor# [59:7554062530370129217:2596] txid# 281474976715663 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2025-09-25T16:20:41.931290Z node 59 :TX_PROXY ERROR: schemereq.cpp:1184: Actor# [59:7554062530370129217:2596] txid# 281474976715663, Access denied for ordinaryuser@builtin, attempt to manage user 2025-09-25T16:20:41.931315Z node 59 :TX_PROXY ERROR: schemereq.cpp:590: Actor# [59:7554062530370129217:2596] txid# 281474976715663, issues: { message: "Access denied for ordinaryuser@builtin" issue_code: 200000 severity: 1 } 2025-09-25T16:20:41.931324Z node 59 :TX_PROXY DEBUG: schemereq.cpp:593: Actor# [59:7554062530370129217:2596] txid# 281474976715663 SEND to# [59:7554062530370129216:2336] Source {TEvProposeTransactionStatus Status# 5} 2025-09-25T16:20:41.931494Z node 59 :KQP_SESSION WARN: kqp_session_actor.cpp:2830: SessionId: ydb://session/3?node_id=59&id=ZjMwZDIzNjAtOGJjY2EyN2EtNDQ2N2RmMmItMWFkYjQyOTI=, ActorId: [59:7554062530370129202:2336], ActorState: ExecuteState, TraceId: 01k60ttjt71mkm2rfcayeftkm8, Create QueryResponse for error on request, msg: 2025-09-25T16:20:41.931574Z node 59 :TX_PROXY DEBUG: proxy_impl.cpp:352: actor# [59:7554062530370128402:2142] Handle TEvExecuteKqpTransaction 2025-09-25T16:20:41.931583Z node 59 :TX_PROXY DEBUG: proxy_impl.cpp:341: actor# [59:7554062530370128402:2142] TxId# 281474976715664 ProcessProposeKqpTransaction >> TColumnShardTestReadWrite::CompactionInGranule_PKInt64_Reboot >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-CreateUser-24 [GOOD] >> TColumnShardTestReadWrite::CompactionSplitGranule_PKDatetime >> TImportWithRebootsTests::ShouldSucceedOnTableWithChecksum [GOOD] >> TImportWithRebootsTests::ShouldSucceedOnSingleView >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-DropUser-71 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-DropUser-72 >> TColumnShardTestReadWrite::WriteStandalone >> Normalizers::RemoveWriteIdNormalizer >> TColumnShardTestReadWrite::CompactionInGranule_PKDatetime >> TestShred::Run3CyclesForTopics [GOOD] >> TColumnShardTestReadWrite::CompactionGC |82.6%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/public/sdk/cpp/tests/integration/sessions_pool/gtest >> YdbSdkSessionsPool1Session::CustomPlan/0 [GOOD] |82.6%| [TA] $(B)/ydb/core/tx/schemeshard/ut_view/test-results/unittest/{meta.json ... results_accumulator.log} ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/tx_proxy/ut_schemereq/unittest >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-CreateUser-24 [GOOD] Test command err: Starting YDB, grpc: 25456, msgbus: 3573 2025-09-25T16:20:03.877530Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7554062366616805393:2152];send_to=[0:7307199536658146131:7762515]; 2025-09-25T16:20:03.878697Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:123: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2025-09-25T16:20:03.878832Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/endf/0025a3/r3tmp/tmpMnrvuk/pdisk_1.dat 2025-09-25T16:20:03.946090Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 25456, node 1 2025-09-25T16:20:03.976760Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-09-25T16:20:03.976796Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-09-25T16:20:03.985380Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-09-25T16:20:03.992839Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions 2025-09-25T16:20:03.997120Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-09-25T16:20:03.997136Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-09-25T16:20:03.997139Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-09-25T16:20:03.997191Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:3573 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 2025-09-25T16:20:04.019020Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:402: actor# [1:7554062366616805505:2124] Handle TEvNavigate describe path dc-1 2025-09-25T16:20:04.019042Z node 1 :TX_PROXY DEBUG: describe.cpp:270: Actor# [1:7554062370911773345:2453] HANDLE EvNavigateScheme dc-1 2025-09-25T16:20:04.019174Z node 1 :TX_PROXY DEBUG: describe.cpp:354: Actor# [1:7554062370911773345:2453] HANDLE EvNavigateKeySetResult TDescribeReq marker# P5 ErrorCount# 0 2025-09-25T16:20:04.027924Z node 1 :TX_PROXY DEBUG: describe.cpp:433: Actor# [1:7554062370911773345:2453] SEND to# 72057594046644480 shardToRequest NKikimrSchemeOp.TDescribePath Path: "dc-1" Options { ReturnBoundaries: true ShowPrivateTable: true ReturnRangeKey: true } 2025-09-25T16:20:04.030516Z node 1 :TX_PROXY DEBUG: describe.cpp:446: Actor# [1:7554062370911773345:2453] Handle TEvDescribeSchemeResult Forward to# [1:7554062370911773344:2452] Cookie: 0 TEvDescribeSchemeResult: NKikimrScheme.TEvDescribeSchemeResult PreSerializedData size# 2 Record# Status: StatusSuccess Path: "dc-1" PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } DomainKey { SchemeShard: 72057594046644480 PathId: 1 } PathsInside: 0 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046644480 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { Audience: "/dc-1" } SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 1 PathOwnerId: 72057594046644480 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-09-25T16:20:04.034638Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:314: actor# [1:7554062366616805505:2124] Handle TEvProposeTransaction 2025-09-25T16:20:04.034652Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:237: actor# [1:7554062366616805505:2124] TxId# 281474976710657 ProcessProposeTransaction 2025-09-25T16:20:04.034689Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:256: actor# [1:7554062366616805505:2124] Cookie# 0 userReqId# "" txid# 281474976710657 SEND to# [1:7554062370911773351:2458] 2025-09-25T16:20:04.049708Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1673: Actor# [1:7554062370911773351:2458] txid# 281474976710657 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "dc-1" StoragePools { Name: "" Kind: "tenant-db" } StoragePools { Name: "/dc-1:test" Kind: "test" } } } } UserToken: "\n\014root@builtin\022\030\022\026\n\024all-users@well-known\032\014root@builtin\"\007Builtin*\017**** (B6C6F477)0\000" PeerName: "" 2025-09-25T16:20:04.049768Z node 1 :TX_PROXY DEBUG: schemereq.cpp:613: Actor# [1:7554062370911773351:2458] txid# 281474976710657 Bootstrap, UserSID: root@builtin CheckAdministrator: 1 CheckDatabaseAdministrator: 0 2025-09-25T16:20:04.049774Z node 1 :TX_PROXY DEBUG: schemereq.cpp:622: Actor# [1:7554062370911773351:2458] txid# 281474976710657 Bootstrap, UserSID: root@builtin IsClusterAdministrator: 1 2025-09-25T16:20:04.049794Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1728: Actor# [1:7554062370911773351:2458] txid# 281474976710657 TEvNavigateKeySet requested from SchemeCache 2025-09-25T16:20:04.049954Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1561: Actor# [1:7554062370911773351:2458] txid# 281474976710657 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2025-09-25T16:20:04.049990Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1608: Actor# [1:7554062370911773351:2458] HANDLE EvNavigateKeySetResult, txid# 281474976710657 shardToRequest# 72057594046644480 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 1] DomainInfo.Params# Version: 0 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 RedirectRequired# false 2025-09-25T16:20:04.050005Z node 1 :TX_PROXY DEBUG: schemereq.cpp:103: Actor# [1:7554062370911773351:2458] txid# 281474976710657 SEND to# 72057594046644480 shardToRequest {TEvModifySchemeTransaction txid# 281474976710657 TabletId# 72057594046644480} 2025-09-25T16:20:04.050064Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1463: Actor# [1:7554062370911773351:2458] txid# 281474976710657 HANDLE EvClientConnected 2025-09-25T16:20:04.050408Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2025-09-25T16:20:04.051933Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1485: Actor# [1:7554062370911773351:2458] txid# 281474976710657 Status StatusAccepted HANDLE {TEvModifySchemeTransactionResult Status# StatusAccepted txid# 281474976710657} 2025-09-25T16:20:04.051955Z node 1 :TX_PROXY DEBUG: schemereq.cpp:593: Actor# [1:7554062370911773351:2458] txid# 281474976710657 SEND to# [1:7554062370911773350:2457] Source {TEvProposeTransactionStatus txid# 281474976710657 Status# 53} waiting... 2025-09-25T16:20:04.056027Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:314: actor# [1:7554062366616805505:2124] Handle TEvProposeTransaction 2025-09-25T16:20:04.056039Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:237: actor# [1:7554062366616805505:2124] TxId# 281474976710658 ProcessProposeTransaction 2025-09-25T16:20:04.056053Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:256: actor# [1:7554062366616805505:2124] Cookie# 0 userReqId# "" txid# 281474976710658 SEND to# [1:7554062370911773389:2492] 2025-09-25T16:20:04.057008Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1673: Actor# [1:7554062370911773389:2492] txid# 281474976710658 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/" OperationType: ESchemeOpModifyACL ModifyACL { Name: "dc-1" DiffACL: "\n\032\010\000\022\026\010\001\020\377\377\003\032\014root@builtin \003" } } } UserToken: "\n\014root@builtin\022\030\022\026\n\024all-users@well-known\032\014root@builtin\"\007Builtin*\017**** (B6C6F477)0\000" PeerName: "" 2025-09-25T16:20:04.057039Z node 1 :TX_PROXY DEBUG: schemereq.cpp:613: Actor# [1:7554062370911773389:2492] txid# 281474976710658 Bootstrap, UserSID: root@builtin CheckAdministrator: 1 CheckDatabaseAdministrator: 0 2025-09-25T16:20:04.057044Z node 1 :TX_PROXY DEBUG: schemereq.cpp:622: Actor# [1:7554062370911773389:2492] txid# 281474976710658 Bootstrap, UserSID: root@builtin IsClusterAdministrator: 1 2025-09-25T16:20:04.057067Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1728: Actor# [1:7554062370911773389:2492] txid# 281474976710658 TEvNavigateKeySet requested from SchemeCache 2025-09-25T16:20:04.057208Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1561: Actor# [1:7554062370911773389:2492] txid# 281474976710658 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2025-09-25T16:20:04.057254Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1608: Actor# [1:7554062370911773389:2492] HANDLE EvNavigateKeySetResult, txid# 281474976710658 shardToRequest# 72057594046644480 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 1] DomainInfo.Params# Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 RedirectRequired# true 2025-09-25T16:20:04.057268Z node 1 :TX_PROXY DEBUG: schemereq.cpp:103: Actor# [1:7554062370911773389:2492] txid# 281474976710658 SEND to# 72057594046644480 shardToRequest {TEvModifySchemeTransaction txid# 281474976710658 TabletId# 72057594046644480} 2025-09-25T16:20:04.057310Z node 1 ... dyExists txid# 281474976715660 Reason# Check failed: path: '/dc-1/.metadata/workload_manager/pools/default', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92} 2025-09-25T16:20:42.881670Z node 59 :TX_PROXY ERROR: schemereq.cpp:590: Actor# [59:7554062536388789581:2563] txid# 281474976715660, issues: { message: "Check failed: path: \'/dc-1/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-09-25T16:20:42.881679Z node 59 :TX_PROXY DEBUG: schemereq.cpp:593: Actor# [59:7554062536388789581:2563] txid# 281474976715660 SEND to# [59:7554062536388789503:2325] Source {TEvProposeTransactionStatus txid# 281474976715660 Status# 48} 2025-09-25T16:20:42.884688Z node 59 :TX_PROXY DEBUG: proxy_impl.cpp:314: actor# [59:7554062536388788694:2138] Handle TEvProposeTransaction 2025-09-25T16:20:42.884705Z node 59 :TX_PROXY DEBUG: proxy_impl.cpp:237: actor# [59:7554062536388788694:2138] TxId# 281474976715661 ProcessProposeTransaction 2025-09-25T16:20:42.884728Z node 59 :TX_PROXY DEBUG: proxy_impl.cpp:256: actor# [59:7554062536388788694:2138] Cookie# 0 userReqId# "" txid# 281474976715661 SEND to# [59:7554062536388789605:2575] 2025-09-25T16:20:42.885753Z node 59 :TX_PROXY DEBUG: schemereq.cpp:1673: Actor# [59:7554062536388789605:2575] txid# 281474976715661 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/dc-1" OperationType: ESchemeOpAlterLogin AlterLogin { CreateUser { User: "ordinaryuser" Password: "passwd" CanLogin: true IsHashedPassword: false } } } } UserToken: "\n\014root@builtin\022\030\022\026\n\024all-users@well-known\032\014root@builtin\"\007Builtin*\017**** (B6C6F477)0\000" DatabaseName: "/dc-1" RequestType: "" PeerName: "ipv6:[::1]:56196" 2025-09-25T16:20:42.885781Z node 59 :TX_PROXY DEBUG: schemereq.cpp:613: Actor# [59:7554062536388789605:2575] txid# 281474976715661 Bootstrap, UserSID: root@builtin CheckAdministrator: 1 CheckDatabaseAdministrator: 1 2025-09-25T16:20:42.885786Z node 59 :TX_PROXY DEBUG: schemereq.cpp:622: Actor# [59:7554062536388789605:2575] txid# 281474976715661 Bootstrap, UserSID: root@builtin IsClusterAdministrator: 1 2025-09-25T16:20:42.885802Z node 59 :TX_PROXY DEBUG: schemereq.cpp:1728: Actor# [59:7554062536388789605:2575] txid# 281474976715661 TEvNavigateKeySet requested from SchemeCache 2025-09-25T16:20:42.885974Z node 59 :TX_PROXY DEBUG: schemereq.cpp:1561: Actor# [59:7554062536388789605:2575] txid# 281474976715661 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2025-09-25T16:20:42.886020Z node 59 :TX_PROXY DEBUG: schemereq.cpp:1608: Actor# [59:7554062536388789605:2575] HANDLE EvNavigateKeySetResult, txid# 281474976715661 shardToRequest# 72057594046644480 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 1] DomainInfo.Params# Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 RedirectRequired# true 2025-09-25T16:20:42.886036Z node 59 :TX_PROXY DEBUG: schemereq.cpp:103: Actor# [59:7554062536388789605:2575] txid# 281474976715661 SEND to# 72057594046644480 shardToRequest {TEvModifySchemeTransaction txid# 281474976715661 TabletId# 72057594046644480} 2025-09-25T16:20:42.886089Z node 59 :TX_PROXY DEBUG: schemereq.cpp:1463: Actor# [59:7554062536388789605:2575] txid# 281474976715661 HANDLE EvClientConnected 2025-09-25T16:20:42.889977Z node 59 :TX_PROXY DEBUG: schemereq.cpp:1485: Actor# [59:7554062536388789605:2575] txid# 281474976715661 Status StatusSuccess HANDLE {TEvModifySchemeTransactionResult Status# StatusSuccess txid# 281474976715661} 2025-09-25T16:20:42.890013Z node 59 :TX_PROXY DEBUG: schemereq.cpp:593: Actor# [59:7554062536388789605:2575] txid# 281474976715661 SEND to# [59:7554062536388789604:2317] Source {TEvProposeTransactionStatus txid# 281474976715661 Status# 48} 2025-09-25T16:20:42.932846Z node 59 :TX_PROXY DEBUG: proxy_impl.cpp:314: actor# [59:7554062536388788694:2138] Handle TEvProposeTransaction 2025-09-25T16:20:42.932870Z node 59 :TX_PROXY DEBUG: proxy_impl.cpp:237: actor# [59:7554062536388788694:2138] TxId# 281474976715662 ProcessProposeTransaction 2025-09-25T16:20:42.932886Z node 59 :TX_PROXY DEBUG: proxy_impl.cpp:256: actor# [59:7554062536388788694:2138] Cookie# 0 userReqId# "" txid# 281474976715662 SEND to# [59:7554062536388789628:2592] 2025-09-25T16:20:42.933905Z node 59 :TX_PROXY DEBUG: schemereq.cpp:1673: Actor# [59:7554062536388789628:2592] txid# 281474976715662 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "" OperationType: ESchemeOpModifyACL ModifyACL { Name: "dc-1" DiffACL: "\n\022\010\001\022\016\032\014ordinaryuser\n\032\010\000\022\026\010\001\020\200\200\002\032\014ordinaryuser \000" } } } UserToken: "\n\014root@builtin\022\030\022\026\n\024all-users@well-known\032\014root@builtin\"\007Builtin*\017**** (B6C6F477)0\000" DatabaseName: "/dc-1" RequestType: "" PeerName: "ipv4:127.0.0.1:54386" 2025-09-25T16:20:42.933922Z node 59 :TX_PROXY DEBUG: schemereq.cpp:613: Actor# [59:7554062536388789628:2592] txid# 281474976715662 Bootstrap, UserSID: root@builtin CheckAdministrator: 1 CheckDatabaseAdministrator: 1 2025-09-25T16:20:42.933927Z node 59 :TX_PROXY DEBUG: schemereq.cpp:622: Actor# [59:7554062536388789628:2592] txid# 281474976715662 Bootstrap, UserSID: root@builtin IsClusterAdministrator: 1 2025-09-25T16:20:42.933944Z node 59 :TX_PROXY DEBUG: schemereq.cpp:1728: Actor# [59:7554062536388789628:2592] txid# 281474976715662 TEvNavigateKeySet requested from SchemeCache 2025-09-25T16:20:42.934075Z node 59 :TX_PROXY DEBUG: schemereq.cpp:1561: Actor# [59:7554062536388789628:2592] txid# 281474976715662 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2025-09-25T16:20:42.934111Z node 59 :TX_PROXY DEBUG: schemereq.cpp:1608: Actor# [59:7554062536388789628:2592] HANDLE EvNavigateKeySetResult, txid# 281474976715662 shardToRequest# 72057594046644480 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 1] DomainInfo.Params# Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 RedirectRequired# true 2025-09-25T16:20:42.934124Z node 59 :TX_PROXY DEBUG: schemereq.cpp:103: Actor# [59:7554062536388789628:2592] txid# 281474976715662 SEND to# 72057594046644480 shardToRequest {TEvModifySchemeTransaction txid# 281474976715662 TabletId# 72057594046644480} 2025-09-25T16:20:42.934178Z node 59 :TX_PROXY DEBUG: schemereq.cpp:1463: Actor# [59:7554062536388789628:2592] txid# 281474976715662 HANDLE EvClientConnected 2025-09-25T16:20:42.934300Z node 59 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_modify_acl.cpp:101) 2025-09-25T16:20:42.941493Z node 59 :TX_PROXY DEBUG: schemereq.cpp:1485: Actor# [59:7554062536388789628:2592] txid# 281474976715662 Status StatusSuccess HANDLE {TEvModifySchemeTransactionResult Status# StatusSuccess txid# 281474976715662} 2025-09-25T16:20:42.941524Z node 59 :TX_PROXY DEBUG: schemereq.cpp:593: Actor# [59:7554062536388789628:2592] txid# 281474976715662 SEND to# [59:7554062536388789627:2333] Source {TEvProposeTransactionStatus txid# 281474976715662 Status# 48} 2025-09-25T16:20:42.962574Z node 59 :TX_PROXY DEBUG: proxy_impl.cpp:314: actor# [59:7554062536388788694:2138] Handle TEvProposeTransaction 2025-09-25T16:20:42.962600Z node 59 :TX_PROXY DEBUG: proxy_impl.cpp:237: actor# [59:7554062536388788694:2138] TxId# 281474976715663 ProcessProposeTransaction 2025-09-25T16:20:42.962618Z node 59 :TX_PROXY DEBUG: proxy_impl.cpp:256: actor# [59:7554062536388788694:2138] Cookie# 0 userReqId# "" txid# 281474976715663 SEND to# [59:7554062536388789670:2617] 2025-09-25T16:20:42.963562Z node 59 :TX_PROXY DEBUG: schemereq.cpp:1673: Actor# [59:7554062536388789670:2617] txid# 281474976715663 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/dc-1" OperationType: ESchemeOpAlterLogin AlterLogin { CreateUser { User: "targetuser" Password: "passwd" CanLogin: true IsHashedPassword: false } } } } UserToken: "\n\014ordinaryuser\022\030\022\026\n\024all-users@well-known\032\334\003eyJhbGciOiJQUzI1NiIsImtpZCI6IjEifQ.eyJhdWQiOlsiXC9kYy0xIl0sImV4cCI6MTc1ODg2MDQ0MiwiaWF0IjoxNzU4ODE3MjQyLCJzdWIiOiJvcmRpbmFyeXVzZXIifQ.DA9Gy77xq56tmwKyPnzN72JGi7akeSNXIm7ry8N2vR1UKnq3-WQXJZ-fv2EWXH_k8shyDGqT8TfmTIjPgJVxEUj7oSUf3F7Q_FizDviPFIX6GgBwoI5MtBfvDaibEOfbjXjtvr_1jC8BHN_qHospQZ7Iq_7z10yQLsQS8Af-wRQP9771g7RJgNgytqAsZ1SLW94zH0XKvuKUUwItNtUl60ZjuUvT28s5USAPzXF8vvlnW6E7MQIsHuxVkbmTL9mzQaC-CzhCNfNeU3uvO_EG1dXIFxqpXudVntSYTlf3Jcp6Mtb9c5cyzynwYaHyKrEhrlazfUnZ24rakxkgCmCnzA\"\005Login*\210\001eyJhbGciOiJQUzI1NiIsImtpZCI6IjEifQ.eyJhdWQiOlsiXC9kYy0xIl0sImV4cCI6MTc1ODg2MDQ0MiwiaWF0IjoxNzU4ODE3MjQyLCJzdWIiOiJvcmRpbmFyeXVzZXIifQ.**0\000" DatabaseName: "/dc-1" RequestType: "" PeerName: "ipv4:127.0.0.1:54386" 2025-09-25T16:20:42.963579Z node 59 :TX_PROXY DEBUG: schemereq.cpp:613: Actor# [59:7554062536388789670:2617] txid# 281474976715663 Bootstrap, UserSID: ordinaryuser CheckAdministrator: 1 CheckDatabaseAdministrator: 1 2025-09-25T16:20:42.963584Z node 59 :TX_PROXY DEBUG: schemereq.cpp:622: Actor# [59:7554062536388789670:2617] txid# 281474976715663 Bootstrap, UserSID: ordinaryuser IsClusterAdministrator: 0 2025-09-25T16:20:42.963694Z node 59 :TX_PROXY DEBUG: schemereq.cpp:1512: Actor# [59:7554062536388789670:2617] txid# 281474976715663 HandleResolveDatabase, ResultSet size: 1 ResultSet error count: 0 2025-09-25T16:20:42.963721Z node 59 :TX_PROXY DEBUG: schemereq.cpp:1547: Actor# [59:7554062536388789670:2617] txid# 281474976715663 HandleResolveDatabase, UserSID: ordinaryuser CheckAdministrator: 1 CheckDatabaseAdministrator: 1 IsClusterAdministrator: 0 IsDatabaseAdministrator: 0 DatabaseOwner: root@builtin 2025-09-25T16:20:42.963735Z node 59 :TX_PROXY DEBUG: schemereq.cpp:1728: Actor# [59:7554062536388789670:2617] txid# 281474976715663 TEvNavigateKeySet requested from SchemeCache 2025-09-25T16:20:42.963847Z node 59 :TX_PROXY DEBUG: schemereq.cpp:1561: Actor# [59:7554062536388789670:2617] txid# 281474976715663 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2025-09-25T16:20:42.963853Z node 59 :TX_PROXY ERROR: schemereq.cpp:1184: Actor# [59:7554062536388789670:2617] txid# 281474976715663, Access denied for ordinaryuser, attempt to manage user 2025-09-25T16:20:42.963881Z node 59 :TX_PROXY ERROR: schemereq.cpp:590: Actor# [59:7554062536388789670:2617] txid# 281474976715663, issues: { message: "Access denied for ordinaryuser" issue_code: 200000 severity: 1 } 2025-09-25T16:20:42.963885Z node 59 :TX_PROXY DEBUG: schemereq.cpp:593: Actor# [59:7554062536388789670:2617] txid# 281474976715663 SEND to# [59:7554062536388789669:2338] Source {TEvProposeTransactionStatus Status# 5} 2025-09-25T16:20:42.963975Z node 59 :KQP_SESSION WARN: kqp_session_actor.cpp:2830: SessionId: ydb://session/3?node_id=59&id=ZjhmNDBlOGItY2JmY2EzNmYtNmJlYjNkOTktY2Q5NTg1NDc=, ActorId: [59:7554062536388789655:2338], ActorState: ExecuteState, TraceId: 01k60ttktd66bz7t61h1r7rhgz, Create QueryResponse for error on request, msg: 2025-09-25T16:20:42.964105Z node 59 :TX_PROXY DEBUG: proxy_impl.cpp:352: actor# [59:7554062536388788694:2138] Handle TEvExecuteKqpTransaction 2025-09-25T16:20:42.964109Z node 59 :TX_PROXY DEBUG: proxy_impl.cpp:341: actor# [59:7554062536388788694:2138] TxId# 281474976715664 ProcessProposeKqpTransaction >> TColumnShardTestReadWrite::ReadGroupBy+SimpleReader >> TColumnShardTestReadWrite::WriteReadStandalone >> Backup::ProposeBackup >> TColumnShardTestReadWrite::WriteReadDuplicate >> SchemeReqAdminAccessInTenant::ClusterAdminCanAuthOnEmptyTenant-DomainLoginOnly [GOOD] >> SchemeReqAdminAccessInTenant::ClusterAdminCanAuthOnEmptyTenant-DomainLoginOnly-StrictAclCheck >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-DropUser-72 [GOOD] >> TColumnShardTestReadWrite::RebootWriteRead ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_shred/unittest >> TestShred::Run3CyclesForTopics [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2144] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2144] Leader for TabletID 72057594046678944 is [1:130:2155] sender: [1:131:2058] recipient: [1:113:2144] 2025-09-25T16:20:35.736924Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7911: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-09-25T16:20:35.736967Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7939: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-09-25T16:20:35.736974Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7825: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-09-25T16:20:35.736980Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7841: OperationsProcessing config: using default configuration 2025-09-25T16:20:35.736987Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7847: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-09-25T16:20:35.736992Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7847: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-09-25T16:20:35.737002Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7971: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-09-25T16:20:35.737018Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-09-25T16:20:35.737124Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8042: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-09-25T16:20:35.737190Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2025-09-25T16:20:35.750742Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7729: Cannot subscribe to console configs 2025-09-25T16:20:35.750763Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded 2025-09-25T16:20:35.754428Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2025-09-25T16:20:35.754513Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2025-09-25T16:20:35.754547Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-09-25T16:20:35.756459Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2025-09-25T16:20:35.756532Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2025-09-25T16:20:35.756663Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1382: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-09-25T16:20:35.756746Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-09-25T16:20:35.757415Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:158: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-09-25T16:20:35.757465Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2025-09-25T16:20:35.757803Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-09-25T16:20:35.757816Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-09-25T16:20:35.757843Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-09-25T16:20:35.757852Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-09-25T16:20:35.757862Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:205: TTxServerlessStorageBilling.Complete 2025-09-25T16:20:35.757903Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7086: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-09-25T16:20:35.759452Z node 1 :HIVE INFO: tablet_helpers.cpp:1126: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:130:2155] sender: [1:245:2058] recipient: [1:15:2062] 2025-09-25T16:20:35.778847Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-09-25T16:20:35.778956Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-09-25T16:20:35.779024Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:599: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-09-25T16:20:35.779033Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5528: CreateTx for txid 1:0 type: TxAlterSubDomain target path: [OwnerId: 72057594046678944, LocalPathId: 1] source path: 2025-09-25T16:20:35.779091Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-09-25T16:20:35.779106Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2025-09-25T16:20:35.779681Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-09-25T16:20:35.779729Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-09-25T16:20:35.779781Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-09-25T16:20:35.779792Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:315: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-09-25T16:20:35.779799Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:369: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-09-25T16:20:35.779804Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 1:0 2 -> 3 2025-09-25T16:20:35.780184Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-09-25T16:20:35.780194Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-09-25T16:20:35.780199Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 1:0 3 -> 128 2025-09-25T16:20:35.780631Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-09-25T16:20:35.780649Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-09-25T16:20:35.780657Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-09-25T16:20:35.780667Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1723: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-09-25T16:20:35.781271Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1792: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-09-25T16:20:35.781701Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:663: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-09-25T16:20:35.781770Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1824: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-09-25T16:20:35.782004Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:685: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-09-25T16:20:35.782037Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:689: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 137 RawX2: 4294969455 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-09-25T16:20:35.782046Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:303: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-09-25T16:20:35.782128Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 1:0 128 -> 240 2025-09-25T16:20:35.782136Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:362: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-09-25T16:20:35.782171Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:599: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-09-25T16:20:35.782184Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:412: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-09-25T16:20:35.782584Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-09-25T16:20:35.782592Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme ... manager.cpp:312: [RootShredManager] [Finished] Shred completed for pathId# [OwnerId: 72057594046678944, LocalPathId: 2] in# 17 ms, next wakeup# 593.983000s, rate# 0, in queue# 0 tenants, running# 0 tenants at schemeshard 72057594046678944 2025-09-25T16:20:42.963955Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__root_shred_manager.cpp:327: [RootShredManager] Shred in tenants is completed. Send request to BS controller 2025-09-25T16:20:42.964352Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__root_shred_manager.cpp:615: TTxCompleteShredTenant Complete at schemeshard: 72057594046678944, NeedSendRequestToBSC# false 2025-09-25T16:20:42.964584Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__root_shred_manager.cpp:615: TTxCompleteShredTenant Complete at schemeshard: 72057594046678944, NeedSendRequestToBSC# true 2025-09-25T16:20:42.964595Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:354: [RootShredManager] SendRequestToBSC: Generation# 3 2025-09-25T16:20:42.964649Z node 2 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5225: StateWork, received event# 268637738, Sender [2:303:2285], Recipient [2:295:2279]: NKikimrBlobStorage.TEvControllerShredResponse CurrentGeneration: 3 Completed: false Progress10k: 0 2025-09-25T16:20:42.964657Z node 2 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5446: StateWork, processing event TEvBlobStorage::TEvControllerShredResponse 2025-09-25T16:20:42.964661Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:8237: Handle TEvControllerShredResponse, at schemeshard: 72057594046678944 2025-09-25T16:20:42.964670Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__root_shred_manager.cpp:639: TTxCompleteShredBSC Execute at schemeshard: 72057594046678944 2025-09-25T16:20:42.964676Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:657: TTxCompleteShredBSC: Progress data shred in BSC 0% 2025-09-25T16:20:42.964689Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__root_shred_manager.cpp:665: TTxCompleteShredBSC Complete at schemeshard: 72057594046678944, NeedScheduleRequestToBSC# true 2025-09-25T16:20:42.964698Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:348: [RootShredManager] ScheduleRequestToBSC: Interval# 1.000000s 2025-09-25T16:20:43.436162Z node 2 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5225: StateWork, received event# 271125000, Sender [0:0:0], Recipient [2:885:2763]: NKikimr::NSchemeShard::TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-09-25T16:20:43.436200Z node 2 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5233: StateWork, processing event TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-09-25T16:20:43.436221Z node 2 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5225: StateWork, received event# 271125000, Sender [0:0:0], Recipient [2:295:2279]: NKikimr::NSchemeShard::TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-09-25T16:20:43.436225Z node 2 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5233: StateWork, processing event TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-09-25T16:20:43.436235Z node 2 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5225: StateWork, received event# 271125000, Sender [0:0:0], Recipient [2:466:2417]: NKikimr::NSchemeShard::TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-09-25T16:20:43.436240Z node 2 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5233: StateWork, processing event TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-09-25T16:20:43.436251Z node 2 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5225: StateWork, received event# 271124999, Sender [2:885:2763], Recipient [2:885:2763]: NKikimr::NSchemeShard::TEvSchemeShard::TEvMeasureSelfResponseTime 2025-09-25T16:20:43.436257Z node 2 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5232: StateWork, processing event TEvSchemeShard::TEvMeasureSelfResponseTime 2025-09-25T16:20:43.436274Z node 2 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5225: StateWork, received event# 271124999, Sender [2:295:2279], Recipient [2:295:2279]: NKikimr::NSchemeShard::TEvSchemeShard::TEvMeasureSelfResponseTime 2025-09-25T16:20:43.436278Z node 2 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5232: StateWork, processing event TEvSchemeShard::TEvMeasureSelfResponseTime 2025-09-25T16:20:43.436287Z node 2 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5225: StateWork, received event# 271124999, Sender [2:466:2417], Recipient [2:466:2417]: NKikimr::NSchemeShard::TEvSchemeShard::TEvMeasureSelfResponseTime 2025-09-25T16:20:43.436291Z node 2 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5232: StateWork, processing event TEvSchemeShard::TEvMeasureSelfResponseTime 2025-09-25T16:20:43.446566Z node 2 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5225: StateWork, received event# 271125517, Sender [0:0:0], Recipient [2:295:2279]: NKikimr::NSchemeShard::TEvSchemeShard::TEvWakeupToRunShredBSC 2025-09-25T16:20:43.446604Z node 2 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5447: StateWork, processing event TEvSchemeShard::TEvWakeupToRunShredBSC 2025-09-25T16:20:43.446612Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:354: [RootShredManager] SendRequestToBSC: Generation# 3 2025-09-25T16:20:43.446691Z node 2 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5225: StateWork, received event# 268637738, Sender [2:303:2285], Recipient [2:295:2279]: NKikimrBlobStorage.TEvControllerShredResponse CurrentGeneration: 3 Completed: false Progress10k: 5000 2025-09-25T16:20:43.446698Z node 2 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5446: StateWork, processing event TEvBlobStorage::TEvControllerShredResponse 2025-09-25T16:20:43.446703Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:8237: Handle TEvControllerShredResponse, at schemeshard: 72057594046678944 2025-09-25T16:20:43.446730Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__root_shred_manager.cpp:639: TTxCompleteShredBSC Execute at schemeshard: 72057594046678944 2025-09-25T16:20:43.446745Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:657: TTxCompleteShredBSC: Progress data shred in BSC 50% 2025-09-25T16:20:43.446767Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__root_shred_manager.cpp:665: TTxCompleteShredBSC Complete at schemeshard: 72057594046678944, NeedScheduleRequestToBSC# true 2025-09-25T16:20:43.446781Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:348: [RootShredManager] ScheduleRequestToBSC: Interval# 1.000000s 2025-09-25T16:20:43.917886Z node 2 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5225: StateWork, received event# 271125000, Sender [0:0:0], Recipient [2:466:2417]: NKikimr::NSchemeShard::TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-09-25T16:20:43.917918Z node 2 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5233: StateWork, processing event TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-09-25T16:20:43.917934Z node 2 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5225: StateWork, received event# 271125000, Sender [0:0:0], Recipient [2:885:2763]: NKikimr::NSchemeShard::TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-09-25T16:20:43.917937Z node 2 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5233: StateWork, processing event TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-09-25T16:20:43.917944Z node 2 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5225: StateWork, received event# 271125000, Sender [0:0:0], Recipient [2:295:2279]: NKikimr::NSchemeShard::TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-09-25T16:20:43.917947Z node 2 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5233: StateWork, processing event TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-09-25T16:20:43.917955Z node 2 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5225: StateWork, received event# 271124999, Sender [2:885:2763], Recipient [2:885:2763]: NKikimr::NSchemeShard::TEvSchemeShard::TEvMeasureSelfResponseTime 2025-09-25T16:20:43.917958Z node 2 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5232: StateWork, processing event TEvSchemeShard::TEvMeasureSelfResponseTime 2025-09-25T16:20:43.917972Z node 2 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5225: StateWork, received event# 271124999, Sender [2:295:2279], Recipient [2:295:2279]: NKikimr::NSchemeShard::TEvSchemeShard::TEvMeasureSelfResponseTime 2025-09-25T16:20:43.917974Z node 2 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5232: StateWork, processing event TEvSchemeShard::TEvMeasureSelfResponseTime 2025-09-25T16:20:43.917989Z node 2 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5225: StateWork, received event# 271124999, Sender [2:466:2417], Recipient [2:466:2417]: NKikimr::NSchemeShard::TEvSchemeShard::TEvMeasureSelfResponseTime 2025-09-25T16:20:43.917992Z node 2 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5232: StateWork, processing event TEvSchemeShard::TEvMeasureSelfResponseTime 2025-09-25T16:20:43.928158Z node 2 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5225: StateWork, received event# 271125517, Sender [0:0:0], Recipient [2:295:2279]: NKikimr::NSchemeShard::TEvSchemeShard::TEvWakeupToRunShredBSC 2025-09-25T16:20:43.928200Z node 2 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5447: StateWork, processing event TEvSchemeShard::TEvWakeupToRunShredBSC 2025-09-25T16:20:43.928208Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:354: [RootShredManager] SendRequestToBSC: Generation# 3 2025-09-25T16:20:43.928299Z node 2 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5225: StateWork, received event# 268637738, Sender [2:303:2285], Recipient [2:295:2279]: NKikimrBlobStorage.TEvControllerShredResponse CurrentGeneration: 3 Completed: true Progress10k: 10000 2025-09-25T16:20:43.928308Z node 2 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5446: StateWork, processing event TEvBlobStorage::TEvControllerShredResponse 2025-09-25T16:20:43.928314Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:8237: Handle TEvControllerShredResponse, at schemeshard: 72057594046678944 2025-09-25T16:20:43.928343Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__root_shred_manager.cpp:639: TTxCompleteShredBSC Execute at schemeshard: 72057594046678944 2025-09-25T16:20:43.928349Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:653: TTxCompleteShredBSC: Data shred in BSC is completed 2025-09-25T16:20:43.928364Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:170: [RootShredManager] ScheduleShredWakeup: Interval# 0.981000s, Timestamp# 1970-01-01T00:00:11.064000Z 2025-09-25T16:20:43.928375Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:378: [RootShredManager] Complete: Generation# 3, duration# 2 s 2025-09-25T16:20:43.929415Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__root_shred_manager.cpp:665: TTxCompleteShredBSC Complete at schemeshard: 72057594046678944, NeedScheduleRequestToBSC# false 2025-09-25T16:20:43.929598Z node 2 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5225: StateWork, received event# 269877761, Sender [2:1520:3319], Recipient [2:295:2279]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-09-25T16:20:43.929608Z node 2 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5322: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-09-25T16:20:43.929615Z node 2 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:6142: Pipe server connected, at tablet: 72057594046678944 2025-09-25T16:20:43.929656Z node 2 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5225: StateWork, received event# 271125519, Sender [2:280:2270], Recipient [2:295:2279]: NKikimrScheme.TEvShredInfoRequest 2025-09-25T16:20:43.929663Z node 2 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5444: StateWork, processing event TEvSchemeShard::TEvShredInfoRequest 2025-09-25T16:20:43.929681Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:8188: Handle TEvShredInfoRequest, at schemeshard: 72057594046678944 >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-DropUser-72 [GOOD] >> TColumnShardTestReadWrite::WriteExoticTypes [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/tx_proxy/ut_schemereq/unittest >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-DropUser-72 [GOOD] Test command err: Starting YDB, grpc: 24826, msgbus: 1326 2025-09-25T16:20:03.551971Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7554062369601827948:2261];send_to=[0:7307199536658146131:7762515]; 2025-09-25T16:20:03.552003Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-09-25T16:20:03.560841Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:123: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown test_client.cpp: SetPath # /home/runner/.ya/build/build_root/endf/00258e/r3tmp/tmpeBpxzk/pdisk_1.dat 2025-09-25T16:20:03.631766Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded 2025-09-25T16:20:03.646706Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-09-25T16:20:03.646746Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting TServer::EnableGrpc on GrpcPort 24826, node 1 2025-09-25T16:20:03.650137Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-09-25T16:20:03.658766Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-09-25T16:20:03.658782Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-09-25T16:20:03.658785Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-09-25T16:20:03.658852Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-09-25T16:20:03.671515Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:391: actor# [1:7554062369601827964:2143] Handle TEvGetProxyServicesRequest TClient is connected to server localhost:1326 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 2025-09-25T16:20:03.686759Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:402: actor# [1:7554062369601827964:2143] Handle TEvNavigate describe path dc-1 2025-09-25T16:20:03.686782Z node 1 :TX_PROXY DEBUG: describe.cpp:270: Actor# [1:7554062369601828412:2433] HANDLE EvNavigateScheme dc-1 2025-09-25T16:20:03.699176Z node 1 :TX_PROXY DEBUG: describe.cpp:354: Actor# [1:7554062369601828412:2433] HANDLE EvNavigateKeySetResult TDescribeReq marker# P5 ErrorCount# 0 2025-09-25T16:20:03.700916Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions 2025-09-25T16:20:03.709066Z node 1 :TX_PROXY DEBUG: describe.cpp:433: Actor# [1:7554062369601828412:2433] SEND to# 72057594046644480 shardToRequest NKikimrSchemeOp.TDescribePath Path: "dc-1" Options { ReturnBoundaries: true ShowPrivateTable: true ReturnRangeKey: true } TClient::Ls response: 2025-09-25T16:20:03.711344Z node 1 :TX_PROXY DEBUG: describe.cpp:446: Actor# [1:7554062369601828412:2433] Handle TEvDescribeSchemeResult Forward to# [1:7554062369601828411:2432] Cookie: 0 TEvDescribeSchemeResult: NKikimrScheme.TEvDescribeSchemeResult PreSerializedData size# 2 Record# Status: StatusSuccess Path: "dc-1" PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } DomainKey { SchemeShard: 72057594046644480 PathId: 1 } PathsInside: 0 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046644480 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { Audience: "/dc-1" } SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 1 PathOwnerId: 72057594046644480 Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-09-25T16:20:03.715220Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:314: actor# [1:7554062369601827964:2143] Handle TEvProposeTransaction 2025-09-25T16:20:03.715236Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:237: actor# [1:7554062369601827964:2143] TxId# 281474976715657 ProcessProposeTransaction 2025-09-25T16:20:03.715272Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:256: actor# [1:7554062369601827964:2143] Cookie# 0 userReqId# "" txid# 281474976715657 SEND to# [1:7554062369601828448:2444] 2025-09-25T16:20:03.727544Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1673: Actor# [1:7554062369601828448:2444] txid# 281474976715657 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "dc-1" StoragePools { Name: "" Kind: "tenant-db" } StoragePools { Name: "/dc-1:test" Kind: "test" } } } } UserToken: "\n\014root@builtin\022\030\022\026\n\024all-users@well-known\032\014root@builtin\"\007Builtin*\017**** (B6C6F477)0\000" PeerName: "" 2025-09-25T16:20:03.727606Z node 1 :TX_PROXY DEBUG: schemereq.cpp:613: Actor# [1:7554062369601828448:2444] txid# 281474976715657 Bootstrap, UserSID: root@builtin CheckAdministrator: 0 CheckDatabaseAdministrator: 0 2025-09-25T16:20:03.727612Z node 1 :TX_PROXY DEBUG: schemereq.cpp:622: Actor# [1:7554062369601828448:2444] txid# 281474976715657 Bootstrap, UserSID: root@builtin IsClusterAdministrator: 1 2025-09-25T16:20:03.727633Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1728: Actor# [1:7554062369601828448:2444] txid# 281474976715657 TEvNavigateKeySet requested from SchemeCache 2025-09-25T16:20:03.727792Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1561: Actor# [1:7554062369601828448:2444] txid# 281474976715657 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2025-09-25T16:20:03.727827Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1608: Actor# [1:7554062369601828448:2444] HANDLE EvNavigateKeySetResult, txid# 281474976715657 shardToRequest# 72057594046644480 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 1] DomainInfo.Params# Version: 0 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 RedirectRequired# false 2025-09-25T16:20:03.727841Z node 1 :TX_PROXY DEBUG: schemereq.cpp:103: Actor# [1:7554062369601828448:2444] txid# 281474976715657 SEND to# 72057594046644480 shardToRequest {TEvModifySchemeTransaction txid# 281474976715657 TabletId# 72057594046644480} 2025-09-25T16:20:03.727917Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1463: Actor# [1:7554062369601828448:2444] txid# 281474976715657 HANDLE EvClientConnected 2025-09-25T16:20:03.728188Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2025-09-25T16:20:03.729153Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1485: Actor# [1:7554062369601828448:2444] txid# 281474976715657 Status StatusAccepted HANDLE {TEvModifySchemeTransactionResult Status# StatusAccepted txid# 281474976715657} 2025-09-25T16:20:03.729171Z node 1 :TX_PROXY DEBUG: schemereq.cpp:593: Actor# [1:7554062369601828448:2444] txid# 281474976715657 SEND to# [1:7554062369601828447:2443] Source {TEvProposeTransactionStatus txid# 281474976715657 Status# 53} waiting... 2025-09-25T16:20:03.743172Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:314: actor# [1:7554062369601827964:2143] Handle TEvProposeTransaction 2025-09-25T16:20:03.743188Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:237: actor# [1:7554062369601827964:2143] TxId# 281474976715658 ProcessProposeTransaction 2025-09-25T16:20:03.743203Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:256: actor# [1:7554062369601827964:2143] Cookie# 0 userReqId# "" txid# 281474976715658 SEND to# [1:7554062369601828488:2480] 2025-09-25T16:20:03.743884Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1673: Actor# [1:7554062369601828488:2480] txid# 281474976715658 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/" OperationType: ESchemeOpModifyACL ModifyACL { Name: "dc-1" DiffACL: "\n\032\010\000\022\026\010\001\020\377\377\003\032\014root@builtin \003" } } } UserToken: "\n\014root@builtin\022\030\022\026\n\024all-users@well-known\032\014root@builtin\"\007Builtin*\017**** (B6C6F477)0\000" PeerName: "" 2025-09-25T16:20:03.743902Z node 1 :TX_PROXY DEBUG: schemereq.cpp:613: Actor# [1:7554062369601828488:2480] txid# 281474976715658 Bootstrap, UserSID: root@builtin CheckAdministrator: 0 CheckDatabaseAdministrator: 0 2025-09-25T16:20:03.743906Z node 1 :TX_PROXY DEBUG: schemereq.cpp:622: Actor# [1:7554062369601828488:2480] txid# 281474976715658 Bootstrap, UserSID: root@builtin IsClusterAdministrator: 1 2025-09-25T16:20:03.743921Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1728: Actor# [1:7554062369601828488:2480] txid# 281474976715658 TEvNavigateKeySet requested from SchemeCache 2025-09-25T16:20:03.744046Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1561: Actor# [1:7554062369601828488:2480] txid# 281474976715658 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2025-09-25T16:20:03.744080Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1608: Actor# [1:7554062369601828488:2480] HANDLE EvNavigateKeySetResult, txid# 281474976715658 shardToRequest# 72057594046644480 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 1] DomainInfo.Params# Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 RedirectRequired# true 2025-09-25T16:20:03.744102Z node 1 :TX_PROXY DEBUG: schemereq.cpp:103: Actor# [1:7554062369601828488:2480] txid# 281474976715658 SEND to# 7205 ... or: 2 Mediators: 72057594046382081 RedirectRequired# true 2025-09-25T16:20:43.890179Z node 59 :TX_PROXY DEBUG: schemereq.cpp:103: Actor# [59:7554062538610368032:2568] txid# 281474976710661 SEND to# 72057594046644480 shardToRequest {TEvModifySchemeTransaction txid# 281474976710661 TabletId# 72057594046644480} 2025-09-25T16:20:43.890234Z node 59 :TX_PROXY DEBUG: schemereq.cpp:1463: Actor# [59:7554062538610368032:2568] txid# 281474976710661 HANDLE EvClientConnected 2025-09-25T16:20:43.893821Z node 59 :TX_PROXY DEBUG: schemereq.cpp:1485: Actor# [59:7554062538610368032:2568] txid# 281474976710661 Status StatusSuccess HANDLE {TEvModifySchemeTransactionResult Status# StatusSuccess txid# 281474976710661} 2025-09-25T16:20:43.893842Z node 59 :TX_PROXY DEBUG: schemereq.cpp:593: Actor# [59:7554062538610368032:2568] txid# 281474976710661 SEND to# [59:7554062538610368031:2317] Source {TEvProposeTransactionStatus txid# 281474976710661 Status# 48} 2025-09-25T16:20:43.927358Z node 59 :TX_PROXY DEBUG: proxy_impl.cpp:314: actor# [59:7554062538610367241:2095] Handle TEvProposeTransaction 2025-09-25T16:20:43.927377Z node 59 :TX_PROXY DEBUG: proxy_impl.cpp:237: actor# [59:7554062538610367241:2095] TxId# 281474976710662 ProcessProposeTransaction 2025-09-25T16:20:43.927410Z node 59 :TX_PROXY DEBUG: proxy_impl.cpp:256: actor# [59:7554062538610367241:2095] Cookie# 0 userReqId# "" txid# 281474976710662 SEND to# [59:7554062538610368055:2585] 2025-09-25T16:20:43.928459Z node 59 :TX_PROXY DEBUG: schemereq.cpp:1673: Actor# [59:7554062538610368055:2585] txid# 281474976710662 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "" OperationType: ESchemeOpModifyACL ModifyACL { Name: "dc-1" DiffACL: "\n\022\010\001\022\016\032\014ordinaryuser\n\032\010\000\022\026\010\001\020\200\200\002\032\014ordinaryuser \000" } } } UserToken: "\n\014root@builtin\022\030\022\026\n\024all-users@well-known\032\014root@builtin\"\007Builtin*\017**** (B6C6F477)0\000" DatabaseName: "/dc-1" RequestType: "" PeerName: "ipv4:127.0.0.1:43814" 2025-09-25T16:20:43.928485Z node 59 :TX_PROXY DEBUG: schemereq.cpp:613: Actor# [59:7554062538610368055:2585] txid# 281474976710662 Bootstrap, UserSID: root@builtin CheckAdministrator: 1 CheckDatabaseAdministrator: 1 2025-09-25T16:20:43.928491Z node 59 :TX_PROXY DEBUG: schemereq.cpp:622: Actor# [59:7554062538610368055:2585] txid# 281474976710662 Bootstrap, UserSID: root@builtin IsClusterAdministrator: 1 2025-09-25T16:20:43.928509Z node 59 :TX_PROXY DEBUG: schemereq.cpp:1728: Actor# [59:7554062538610368055:2585] txid# 281474976710662 TEvNavigateKeySet requested from SchemeCache 2025-09-25T16:20:43.928654Z node 59 :TX_PROXY DEBUG: schemereq.cpp:1561: Actor# [59:7554062538610368055:2585] txid# 281474976710662 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2025-09-25T16:20:43.928690Z node 59 :TX_PROXY DEBUG: schemereq.cpp:1608: Actor# [59:7554062538610368055:2585] HANDLE EvNavigateKeySetResult, txid# 281474976710662 shardToRequest# 72057594046644480 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 1] DomainInfo.Params# Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 RedirectRequired# true 2025-09-25T16:20:43.928702Z node 59 :TX_PROXY DEBUG: schemereq.cpp:103: Actor# [59:7554062538610368055:2585] txid# 281474976710662 SEND to# 72057594046644480 shardToRequest {TEvModifySchemeTransaction txid# 281474976710662 TabletId# 72057594046644480} 2025-09-25T16:20:43.928772Z node 59 :TX_PROXY DEBUG: schemereq.cpp:1463: Actor# [59:7554062538610368055:2585] txid# 281474976710662 HANDLE EvClientConnected 2025-09-25T16:20:43.929033Z node 59 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_modify_acl.cpp:101) 2025-09-25T16:20:43.930129Z node 59 :TX_PROXY DEBUG: schemereq.cpp:1485: Actor# [59:7554062538610368055:2585] txid# 281474976710662 Status StatusSuccess HANDLE {TEvModifySchemeTransactionResult Status# StatusSuccess txid# 281474976710662} 2025-09-25T16:20:43.930148Z node 59 :TX_PROXY DEBUG: schemereq.cpp:593: Actor# [59:7554062538610368055:2585] txid# 281474976710662 SEND to# [59:7554062538610368054:2333] Source {TEvProposeTransactionStatus txid# 281474976710662 Status# 48} 2025-09-25T16:20:43.942095Z node 59 :TX_PROXY DEBUG: proxy_impl.cpp:314: actor# [59:7554062538610367241:2095] Handle TEvProposeTransaction 2025-09-25T16:20:43.942116Z node 59 :TX_PROXY DEBUG: proxy_impl.cpp:237: actor# [59:7554062538610367241:2095] TxId# 281474976710663 ProcessProposeTransaction 2025-09-25T16:20:43.942141Z node 59 :TX_PROXY DEBUG: proxy_impl.cpp:256: actor# [59:7554062538610367241:2095] Cookie# 0 userReqId# "" txid# 281474976710663 SEND to# [59:7554062538610368092:2608] 2025-09-25T16:20:43.943124Z node 59 :TX_PROXY DEBUG: schemereq.cpp:1673: Actor# [59:7554062538610368092:2608] txid# 281474976710663 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/dc-1" OperationType: ESchemeOpAlterLogin AlterLogin { CreateUser { User: "targetuser" Password: "passwd" CanLogin: true IsHashedPassword: false } } } } UserToken: "\n\014root@builtin\022\030\022\026\n\024all-users@well-known\032\014root@builtin\"\007Builtin*\017**** (B6C6F477)0\000" DatabaseName: "/dc-1" RequestType: "" PeerName: "ipv6:[::1]:55228" 2025-09-25T16:20:43.943150Z node 59 :TX_PROXY DEBUG: schemereq.cpp:613: Actor# [59:7554062538610368092:2608] txid# 281474976710663 Bootstrap, UserSID: root@builtin CheckAdministrator: 1 CheckDatabaseAdministrator: 1 2025-09-25T16:20:43.943156Z node 59 :TX_PROXY DEBUG: schemereq.cpp:622: Actor# [59:7554062538610368092:2608] txid# 281474976710663 Bootstrap, UserSID: root@builtin IsClusterAdministrator: 1 2025-09-25T16:20:43.943171Z node 59 :TX_PROXY DEBUG: schemereq.cpp:1728: Actor# [59:7554062538610368092:2608] txid# 281474976710663 TEvNavigateKeySet requested from SchemeCache 2025-09-25T16:20:43.943333Z node 59 :TX_PROXY DEBUG: schemereq.cpp:1561: Actor# [59:7554062538610368092:2608] txid# 281474976710663 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2025-09-25T16:20:43.943377Z node 59 :TX_PROXY DEBUG: schemereq.cpp:1608: Actor# [59:7554062538610368092:2608] HANDLE EvNavigateKeySetResult, txid# 281474976710663 shardToRequest# 72057594046644480 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 1] DomainInfo.Params# Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 RedirectRequired# true 2025-09-25T16:20:43.943397Z node 59 :TX_PROXY DEBUG: schemereq.cpp:103: Actor# [59:7554062538610368092:2608] txid# 281474976710663 SEND to# 72057594046644480 shardToRequest {TEvModifySchemeTransaction txid# 281474976710663 TabletId# 72057594046644480} 2025-09-25T16:20:43.943455Z node 59 :TX_PROXY DEBUG: schemereq.cpp:1463: Actor# [59:7554062538610368092:2608] txid# 281474976710663 HANDLE EvClientConnected 2025-09-25T16:20:43.946955Z node 59 :TX_PROXY DEBUG: schemereq.cpp:1485: Actor# [59:7554062538610368092:2608] txid# 281474976710663 Status StatusSuccess HANDLE {TEvModifySchemeTransactionResult Status# StatusSuccess txid# 281474976710663} 2025-09-25T16:20:43.946978Z node 59 :TX_PROXY DEBUG: schemereq.cpp:593: Actor# [59:7554062538610368092:2608] txid# 281474976710663 SEND to# [59:7554062538610368091:2335] Source {TEvProposeTransactionStatus txid# 281474976710663 Status# 48} 2025-09-25T16:20:43.958090Z node 59 :TX_PROXY DEBUG: proxy_impl.cpp:314: actor# [59:7554062538610367241:2095] Handle TEvProposeTransaction 2025-09-25T16:20:43.958117Z node 59 :TX_PROXY DEBUG: proxy_impl.cpp:237: actor# [59:7554062538610367241:2095] TxId# 281474976710664 ProcessProposeTransaction 2025-09-25T16:20:43.958138Z node 59 :TX_PROXY DEBUG: proxy_impl.cpp:256: actor# [59:7554062538610367241:2095] Cookie# 0 userReqId# "" txid# 281474976710664 SEND to# [59:7554062538610368122:2623] 2025-09-25T16:20:43.959106Z node 59 :TX_PROXY DEBUG: schemereq.cpp:1673: Actor# [59:7554062538610368122:2623] txid# 281474976710664 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/dc-1" OperationType: ESchemeOpAlterLogin AlterLogin { RemoveUser { User: "targetuser" MissingOk: false } } } } UserToken: "\n\014ordinaryuser\022\030\022\026\n\024all-users@well-known\032\334\003eyJhbGciOiJQUzI1NiIsImtpZCI6IjEifQ.eyJhdWQiOlsiXC9kYy0xIl0sImV4cCI6MTc1ODg2MDQ0MywiaWF0IjoxNzU4ODE3MjQzLCJzdWIiOiJvcmRpbmFyeXVzZXIifQ.UZ72KEgCJMFO130lMiMRnclLUA_9EBSGLJsDnS5UtcwoOFj68d7qq07HZcKb0pZlKK1q-Ovjjs7fhnkQSj48a9Zy8ThzqN9wxWaHIpvcaVAIbOL6YhhVZL6eUqUDvrzmueSZ50ngRWFc9r_hh9TOz6qABvcWeNXQQdcip3-jauytP1zZ6lAxxmkOyD_bb931dRUWBlj1JmbQnPkjFkKwt6UTFvbbZOb2fpdiTiTl3MjoQiAYH6cYpCSrFPWC7acJkK3DhXZB-GCOQrTLUMsMcoT9A4xFUuTOTkUcCKCxV9Jf8vaBXmWM5n636C3HqPmUM-Ni1my_g9wwJ-UM2IsuDA\"\005Login*\210\001eyJhbGciOiJQUzI1NiIsImtpZCI6IjEifQ.eyJhdWQiOlsiXC9kYy0xIl0sImV4cCI6MTc1ODg2MDQ0MywiaWF0IjoxNzU4ODE3MjQzLCJzdWIiOiJvcmRpbmFyeXVzZXIifQ.**0\000" DatabaseName: "/dc-1" RequestType: "" PeerName: "ipv6:[::1]:55228" 2025-09-25T16:20:43.959143Z node 59 :TX_PROXY DEBUG: schemereq.cpp:613: Actor# [59:7554062538610368122:2623] txid# 281474976710664 Bootstrap, UserSID: ordinaryuser CheckAdministrator: 1 CheckDatabaseAdministrator: 1 2025-09-25T16:20:43.959149Z node 59 :TX_PROXY DEBUG: schemereq.cpp:622: Actor# [59:7554062538610368122:2623] txid# 281474976710664 Bootstrap, UserSID: ordinaryuser IsClusterAdministrator: 0 2025-09-25T16:20:43.959233Z node 59 :TX_PROXY DEBUG: schemereq.cpp:1512: Actor# [59:7554062538610368122:2623] txid# 281474976710664 HandleResolveDatabase, ResultSet size: 1 ResultSet error count: 0 2025-09-25T16:20:43.959254Z node 59 :TX_PROXY DEBUG: schemereq.cpp:1547: Actor# [59:7554062538610368122:2623] txid# 281474976710664 HandleResolveDatabase, UserSID: ordinaryuser CheckAdministrator: 1 CheckDatabaseAdministrator: 1 IsClusterAdministrator: 0 IsDatabaseAdministrator: 0 DatabaseOwner: root@builtin 2025-09-25T16:20:43.959269Z node 59 :TX_PROXY DEBUG: schemereq.cpp:1728: Actor# [59:7554062538610368122:2623] txid# 281474976710664 TEvNavigateKeySet requested from SchemeCache 2025-09-25T16:20:43.959370Z node 59 :TX_PROXY DEBUG: schemereq.cpp:1561: Actor# [59:7554062538610368122:2623] txid# 281474976710664 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2025-09-25T16:20:43.959391Z node 59 :TX_PROXY ERROR: schemereq.cpp:1184: Actor# [59:7554062538610368122:2623] txid# 281474976710664, Access denied for ordinaryuser, attempt to manage user 2025-09-25T16:20:43.959419Z node 59 :TX_PROXY ERROR: schemereq.cpp:590: Actor# [59:7554062538610368122:2623] txid# 281474976710664, issues: { message: "Access denied for ordinaryuser" issue_code: 200000 severity: 1 } 2025-09-25T16:20:43.959425Z node 59 :TX_PROXY DEBUG: schemereq.cpp:593: Actor# [59:7554062538610368122:2623] txid# 281474976710664 SEND to# [59:7554062538610368121:2346] Source {TEvProposeTransactionStatus Status# 5} 2025-09-25T16:20:43.959556Z node 59 :KQP_SESSION WARN: kqp_session_actor.cpp:2830: SessionId: ydb://session/3?node_id=59&id=NDE4ZjZiMGItZDg1MTIyNjAtNDA4NjljZDQtMjliN2I1NTU=, ActorId: [59:7554062538610368112:2346], ActorState: ExecuteState, TraceId: 01k60ttmsh38f5e2k72zs8qtrp, Create QueryResponse for error on request, msg: 2025-09-25T16:20:43.959690Z node 59 :TX_PROXY DEBUG: proxy_impl.cpp:352: actor# [59:7554062538610367241:2095] Handle TEvExecuteKqpTransaction 2025-09-25T16:20:43.959708Z node 59 :TX_PROXY DEBUG: proxy_impl.cpp:341: actor# [59:7554062538610367241:2095] TxId# 281474976710665 ProcessProposeKqpTransaction 2025-09-25T16:20:44.376895Z node 59 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; >> TColumnShardTestReadWrite::RebootWriteReadStandalone >> TColumnShardTestReadWrite::CompactionSplitGranuleStrKey_PKUtf8 >> Backup::ProposeBackup [GOOD] >> EvWrite::AbortInTransaction >> Normalizers::SchemaVersionsNormalizer [GOOD] >> TColumnShardTestReadWrite::WriteStandalone [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/columnshard/ut_rw/unittest >> TColumnShardTestReadWrite::WriteExoticTypes [GOOD] Test command err: 2025-09-25T16:20:43.247427Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:129:2160];fline=columnshard.cpp:105;event=initialize_shard;step=OnActivateExecutor; 2025-09-25T16:20:43.250976Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:129:2160];fline=columnshard.cpp:124;event=initialize_shard;step=initialize_tiring_finished; 2025-09-25T16:20:43.251012Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxInitSchema.Execute at tablet 9437184 2025-09-25T16:20:43.251822Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:129:2160];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-09-25T16:20:43.251882Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:129:2160];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-09-25T16:20:43.251910Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:129:2160];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-09-25T16:20:43.251930Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:129:2160];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-09-25T16:20:43.251950Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:129:2160];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-09-25T16:20:43.251972Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:129:2160];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-09-25T16:20:43.251993Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:129:2160];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-09-25T16:20:43.252013Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:129:2160];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-09-25T16:20:43.252029Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:129:2160];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-09-25T16:20:43.252042Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:129:2160];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanDeprecatedSnapshot; 2025-09-25T16:20:43.252054Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:129:2160];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV0ChunksMeta; 2025-09-25T16:20:43.252066Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:129:2160];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CopyBlobIdsToV2; 2025-09-25T16:20:43.252096Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:129:2160];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreAppearanceSnapshot; 2025-09-25T16:20:43.257467Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxInitSchema.Complete at tablet 9437184 2025-09-25T16:20:43.257520Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:131;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=12;current_normalizer=CLASS_NAME=Granules; 2025-09-25T16:20:43.257527Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=1;type=Granules; 2025-09-25T16:20:43.257559Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-09-25T16:20:43.257586Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-09-25T16:20:43.257595Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-09-25T16:20:43.257601Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2025-09-25T16:20:43.257613Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:138;normalizer=TChunksNormalizer;message=0 chunks found; 2025-09-25T16:20:43.257623Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-09-25T16:20:43.257632Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-09-25T16:20:43.257637Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2025-09-25T16:20:43.257664Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-09-25T16:20:43.257670Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-09-25T16:20:43.257676Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-09-25T16:20:43.257679Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2025-09-25T16:20:43.257687Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2025-09-25T16:20:43.257692Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-09-25T16:20:43.257697Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-09-25T16:20:43.257700Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2025-09-25T16:20:43.257707Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-09-25T16:20:43.257713Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-09-25T16:20:43.257716Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2025-09-25T16:20:43.257723Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-09-25T16:20:43.257729Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-09-25T16:20:43.257732Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=15;type=RestoreV1Chunks_V2; 2025-09-25T16:20:43.257750Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-09-25T16:20:43.257756Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-09-25T16:20:43.257759Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=16;type=RestoreV2Chunks; 2025-09-25T16:20:43.257768Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2025-09-25T16:20:43.257774Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanDeprecatedSnapshot;id=CleanDeprecatedSnapshot; 2025-09-25T16:20:43.257777Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=17;type=CleanDeprecatedSnapshot; 2025-09-25T16:20:43.257782Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanDeprecatedSnapshot;id=17; 2025-09-25T16:20:43.257787Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV0ChunksMeta;id=RestoreV0ChunksMeta; 2025-09-25T16:20:43.257790Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=18;type=RestoreV0ChunksMeta; 2025-09-25T16:20:43.257796Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV0ChunksMeta;id=18; 2025-09-25T16:20:43.257801Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CopyBlobIdsToV2;id=CopyBlobIdsToV2; 2025-09-25T16:20:43.257805Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=19;type=CopyBlobIdsToV2; 2025-09-25T16:20:43.257814Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CopyBlobIdsToV2;id=19; 2025-09-25T16:20:43.257819Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLAS ... [{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"19,19,19,19,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"20,20,20,20,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"21,21,21,21,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"22,22,22,22,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"23,23,23,23,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"24,24,24,24,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"25,25,25,25,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"26,26,26,26,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"27,27,27,27,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"28,28,28,28,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"29,29,29,29,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"30,30,30,30,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"31,31,31,31,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"32,32,32,32,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"33,33,33,33,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"34,34,34,34,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"35,35,35,35,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"36,36,36,36,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"37,37,37,37,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"38,38,38,38,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"39,39,39,39,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"40,40,40,40,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"41,41,41,41,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"42,42,42,42,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"43,43,43,43,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"44,44,44,44,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"45,45,45,45,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"46,46,46,46,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"47,47,47,47,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"48,48,48,48,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"49,49,49,49,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"50,50,50,50,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"51,51,51,51,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"52,52,52,52,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"53,53,53,53,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"54,54,54,54,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"55,55,55,55,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"56,56,56,56,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"57,57,57,57,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"58,58,58,58,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"59,59,59,59,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"60,60,60,60,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"61,61,61,61,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"62,62,62,62,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"63,63,63,63,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"64,64,64,64,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"65,65,65,65,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"66,66,66,66,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"67,67,67,67,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"68,68,68,68,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"69,69,69,69,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"70,70,70,70,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"71,71,71,71,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"72,72,72,72,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"73,73,73,73,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"74,74,74,74,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"75,75,75,75,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"76,76,76,76,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"77,77,77,77,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"78,78,78,78,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"79,79,79,79,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"80,80,80,80,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"81,81,81,81,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"82,82,82,82,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"83,83,83,83,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"84,84,84,84,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"85,85,85,85,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"86,86,86,86,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"87,87,87,87,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"88,88,88,88,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"89,89,89,89,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"90,90,90,90,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"91,91,91,91,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"92,92,92,92,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"93,93,93,93,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"94,94,94,94,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"95,95,95,95,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"96,96,96,96,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"97,97,97,97,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"98,98,98,98,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"99,99,99,99,"}}]}; >> TColumnShardTestReadWrite::CompactionInGranule_PKString_Reboot >> TColumnShardTestReadWrite::WriteReadStandalone [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/tx_proxy/ut_schemereq/unittest >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-DropUser-72 [GOOD] Test command err: Starting YDB, grpc: 13463, msgbus: 11951 2025-09-25T16:20:05.104967Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7554062378518175896:2086];send_to=[0:7307199536658146131:7762515]; 2025-09-25T16:20:05.105539Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/endf/002522/r3tmp/tmpWyby1T/pdisk_1.dat 2025-09-25T16:20:05.188883Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2025-09-25T16:20:05.217392Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-09-25T16:20:05.217424Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-09-25T16:20:05.238701Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded 2025-09-25T16:20:05.258811Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 13463, node 1 2025-09-25T16:20:05.287409Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-09-25T16:20:05.287426Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-09-25T16:20:05.287429Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-09-25T16:20:05.287490Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:11951 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 2025-09-25T16:20:05.324947Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:402: actor# [1:7554062378518176078:2132] Handle TEvNavigate describe path dc-1 2025-09-25T16:20:05.324975Z node 1 :TX_PROXY DEBUG: describe.cpp:270: Actor# [1:7554062378518176567:2443] HANDLE EvNavigateScheme dc-1 2025-09-25T16:20:05.325130Z node 1 :TX_PROXY DEBUG: describe.cpp:354: Actor# [1:7554062378518176567:2443] HANDLE EvNavigateKeySetResult TDescribeReq marker# P5 ErrorCount# 0 TClient::Ls response: 2025-09-25T16:20:05.339187Z node 1 :TX_PROXY DEBUG: describe.cpp:433: Actor# [1:7554062378518176567:2443] SEND to# 72057594046644480 shardToRequest NKikimrSchemeOp.TDescribePath Path: "dc-1" Options { ReturnBoundaries: true ShowPrivateTable: true ReturnRangeKey: true } 2025-09-25T16:20:05.341903Z node 1 :TX_PROXY DEBUG: describe.cpp:446: Actor# [1:7554062378518176567:2443] Handle TEvDescribeSchemeResult Forward to# [1:7554062378518176566:2442] Cookie: 0 TEvDescribeSchemeResult: NKikimrScheme.TEvDescribeSchemeResult PreSerializedData size# 2 Record# Status: StatusSuccess Path: "dc-1" PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } DomainKey { SchemeShard: 72057594046644480 PathId: 1 } PathsInside: 0 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046644480 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { Audience: "/dc-1" } SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 1 PathOwnerId: 72057594046644480 Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-09-25T16:20:05.346746Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:314: actor# [1:7554062378518176078:2132] Handle TEvProposeTransaction 2025-09-25T16:20:05.346761Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:237: actor# [1:7554062378518176078:2132] TxId# 281474976715657 ProcessProposeTransaction 2025-09-25T16:20:05.346802Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:256: actor# [1:7554062378518176078:2132] Cookie# 0 userReqId# "" txid# 281474976715657 SEND to# [1:7554062378518176573:2448] 2025-09-25T16:20:05.381480Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1673: Actor# [1:7554062378518176573:2448] txid# 281474976715657 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "dc-1" StoragePools { Name: "" Kind: "tenant-db" } StoragePools { Name: "/dc-1:test" Kind: "test" } } } } UserToken: "\n\014root@builtin\022\030\022\026\n\024all-users@well-known\032\014root@builtin\"\007Builtin*\017**** (B6C6F477)0\000" PeerName: "" 2025-09-25T16:20:05.381534Z node 1 :TX_PROXY DEBUG: schemereq.cpp:613: Actor# [1:7554062378518176573:2448] txid# 281474976715657 Bootstrap, UserSID: root@builtin CheckAdministrator: 1 CheckDatabaseAdministrator: 1 2025-09-25T16:20:05.381540Z node 1 :TX_PROXY DEBUG: schemereq.cpp:622: Actor# [1:7554062378518176573:2448] txid# 281474976715657 Bootstrap, UserSID: root@builtin IsClusterAdministrator: 1 2025-09-25T16:20:05.381558Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1728: Actor# [1:7554062378518176573:2448] txid# 281474976715657 TEvNavigateKeySet requested from SchemeCache 2025-09-25T16:20:05.381724Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1561: Actor# [1:7554062378518176573:2448] txid# 281474976715657 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2025-09-25T16:20:05.381760Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1608: Actor# [1:7554062378518176573:2448] HANDLE EvNavigateKeySetResult, txid# 281474976715657 shardToRequest# 72057594046644480 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 1] DomainInfo.Params# Version: 0 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 RedirectRequired# false 2025-09-25T16:20:05.381776Z node 1 :TX_PROXY DEBUG: schemereq.cpp:103: Actor# [1:7554062378518176573:2448] txid# 281474976715657 SEND to# 72057594046644480 shardToRequest {TEvModifySchemeTransaction txid# 281474976715657 TabletId# 72057594046644480} 2025-09-25T16:20:05.381834Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1463: Actor# [1:7554062378518176573:2448] txid# 281474976715657 HANDLE EvClientConnected 2025-09-25T16:20:05.382178Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-09-25T16:20:05.385842Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1485: Actor# [1:7554062378518176573:2448] txid# 281474976715657 Status StatusAccepted HANDLE {TEvModifySchemeTransactionResult Status# StatusAccepted txid# 281474976715657} 2025-09-25T16:20:05.385865Z node 1 :TX_PROXY DEBUG: schemereq.cpp:593: Actor# [1:7554062378518176573:2448] txid# 281474976715657 SEND to# [1:7554062378518176572:2447] Source {TEvProposeTransactionStatus txid# 281474976715657 Status# 53} 2025-09-25T16:20:05.407843Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions 2025-09-25T16:20:05.441037Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:314: actor# [1:7554062378518176078:2132] Handle TEvProposeTransaction 2025-09-25T16:20:05.441052Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:237: actor# [1:7554062378518176078:2132] TxId# 281474976715658 ProcessProposeTransaction 2025-09-25T16:20:05.441065Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:256: actor# [1:7554062378518176078:2132] Cookie# 0 userReqId# "" txid# 281474976715658 SEND to# [1:7554062378518176636:2489] 2025-09-25T16:20:05.441925Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1673: Actor# [1:7554062378518176636:2489] txid# 281474976715658 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/" OperationType: ESchemeOpModifyACL ModifyACL { Name: "dc-1" DiffACL: "\n\032\010\000\022\026\010\001\020\377\377\003\032\014root@builtin \003" } } } UserToken: "\n\014root@builtin\022\030\022\026\n\024all-users@well-known\032\014root@builtin\"\007Builtin*\017**** (B6C6F477)0\000" PeerName: "" 2025-09-25T16:20:05.441943Z node 1 :TX_PROXY DEBUG: schemereq.cpp:613: Actor# [1:7554062378518176636:2489] txid# 281474976715658 Bootstrap, UserSID: root@builtin CheckAdministrator: 1 CheckDatabaseAdministrator: 1 2025-09-25T16:20:05.441946Z node 1 :TX_PROXY DEBUG: schemereq.cpp:622: Actor# [1:7554062378518176636:2489] txid# 281474976715658 Bootstrap, UserSID: root@builtin IsClusterAdministrator: 1 2025-09-25T16:20:05.441962Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1728: Actor# [1:7554062378518176636:2489] txid# 281474976715658 TEvNavigateKeySet requested from SchemeCache 2025-09-25T16:20:05.442061Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1561: Actor# [1:7554062378518176636:2489] txid# 281474976715658 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2025-09-25T16:20:05.442096Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1608: Actor# [1:7554062378518176636:2489] HANDLE EvNavigateKeySetResult, txid# 281474976715658 shardToRequest# 72057594046644480 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 1] DomainInfo.Params# Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 RedirectRequired# true 2025-09-25T16:20:05.442108Z node 1 :TX_PROXY DEBUG: schemereq.cpp:103: Actor# [1:7554062378518176636:2489] txid# 281474976715658 SEND to# 72057594046644480 shardToRequest {TEvModifySchemeTransaction txid# 281474976715658 TabletId# 72057594046644480} 2025-09-25T ... 480 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 1] DomainInfo.Params# Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 RedirectRequired# true 2025-09-25T16:20:44.347194Z node 59 :TX_PROXY DEBUG: schemereq.cpp:103: Actor# [59:7554062543069696797:2568] txid# 281474976710661 SEND to# 72057594046644480 shardToRequest {TEvModifySchemeTransaction txid# 281474976710661 TabletId# 72057594046644480} 2025-09-25T16:20:44.347234Z node 59 :TX_PROXY DEBUG: schemereq.cpp:1463: Actor# [59:7554062543069696797:2568] txid# 281474976710661 HANDLE EvClientConnected 2025-09-25T16:20:44.350027Z node 59 :TX_PROXY DEBUG: schemereq.cpp:1485: Actor# [59:7554062543069696797:2568] txid# 281474976710661 Status StatusSuccess HANDLE {TEvModifySchemeTransactionResult Status# StatusSuccess txid# 281474976710661} 2025-09-25T16:20:44.350045Z node 59 :TX_PROXY DEBUG: schemereq.cpp:593: Actor# [59:7554062543069696797:2568] txid# 281474976710661 SEND to# [59:7554062543069696796:2318] Source {TEvProposeTransactionStatus txid# 281474976710661 Status# 48} 2025-09-25T16:20:44.517811Z node 59 :TX_PROXY DEBUG: proxy_impl.cpp:314: actor# [59:7554062538774728740:2144] Handle TEvProposeTransaction 2025-09-25T16:20:44.517826Z node 59 :TX_PROXY DEBUG: proxy_impl.cpp:237: actor# [59:7554062538774728740:2144] TxId# 281474976710662 ProcessProposeTransaction 2025-09-25T16:20:44.517842Z node 59 :TX_PROXY DEBUG: proxy_impl.cpp:256: actor# [59:7554062538774728740:2144] Cookie# 0 userReqId# "" txid# 281474976710662 SEND to# [59:7554062543069696821:2586] 2025-09-25T16:20:44.518602Z node 59 :TX_PROXY DEBUG: schemereq.cpp:1673: Actor# [59:7554062543069696821:2586] txid# 281474976710662 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "" OperationType: ESchemeOpModifyACL ModifyACL { Name: "dc-1" DiffACL: "\n\022\010\001\022\016\032\014ordinaryuser\n\032\010\000\022\026\010\001\020\200\200\002\032\014ordinaryuser \000" } } } UserToken: "\n\014root@builtin\022\030\022\026\n\024all-users@well-known\032\014root@builtin\"\007Builtin*\017**** (B6C6F477)0\000" DatabaseName: "/dc-1" RequestType: "" PeerName: "ipv6:[::1]:57312" 2025-09-25T16:20:44.518622Z node 59 :TX_PROXY DEBUG: schemereq.cpp:613: Actor# [59:7554062543069696821:2586] txid# 281474976710662 Bootstrap, UserSID: root@builtin CheckAdministrator: 1 CheckDatabaseAdministrator: 1 2025-09-25T16:20:44.518625Z node 59 :TX_PROXY DEBUG: schemereq.cpp:622: Actor# [59:7554062543069696821:2586] txid# 281474976710662 Bootstrap, UserSID: root@builtin IsClusterAdministrator: 1 2025-09-25T16:20:44.518640Z node 59 :TX_PROXY DEBUG: schemereq.cpp:1728: Actor# [59:7554062543069696821:2586] txid# 281474976710662 TEvNavigateKeySet requested from SchemeCache 2025-09-25T16:20:44.518734Z node 59 :TX_PROXY DEBUG: schemereq.cpp:1561: Actor# [59:7554062543069696821:2586] txid# 281474976710662 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2025-09-25T16:20:44.518770Z node 59 :TX_PROXY DEBUG: schemereq.cpp:1608: Actor# [59:7554062543069696821:2586] HANDLE EvNavigateKeySetResult, txid# 281474976710662 shardToRequest# 72057594046644480 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 1] DomainInfo.Params# Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 RedirectRequired# true 2025-09-25T16:20:44.518784Z node 59 :TX_PROXY DEBUG: schemereq.cpp:103: Actor# [59:7554062543069696821:2586] txid# 281474976710662 SEND to# 72057594046644480 shardToRequest {TEvModifySchemeTransaction txid# 281474976710662 TabletId# 72057594046644480} 2025-09-25T16:20:44.518830Z node 59 :TX_PROXY DEBUG: schemereq.cpp:1463: Actor# [59:7554062543069696821:2586] txid# 281474976710662 HANDLE EvClientConnected 2025-09-25T16:20:44.518959Z node 59 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_modify_acl.cpp:101) 2025-09-25T16:20:44.519527Z node 59 :TX_PROXY DEBUG: schemereq.cpp:1485: Actor# [59:7554062543069696821:2586] txid# 281474976710662 Status StatusSuccess HANDLE {TEvModifySchemeTransactionResult Status# StatusSuccess txid# 281474976710662} 2025-09-25T16:20:44.519541Z node 59 :TX_PROXY DEBUG: schemereq.cpp:593: Actor# [59:7554062543069696821:2586] txid# 281474976710662 SEND to# [59:7554062543069696820:2332] Source {TEvProposeTransactionStatus txid# 281474976710662 Status# 48} 2025-09-25T16:20:44.528817Z node 59 :TX_PROXY DEBUG: proxy_impl.cpp:314: actor# [59:7554062538774728740:2144] Handle TEvProposeTransaction 2025-09-25T16:20:44.528852Z node 59 :TX_PROXY DEBUG: proxy_impl.cpp:237: actor# [59:7554062538774728740:2144] TxId# 281474976710663 ProcessProposeTransaction 2025-09-25T16:20:44.528868Z node 59 :TX_PROXY DEBUG: proxy_impl.cpp:256: actor# [59:7554062538774728740:2144] Cookie# 0 userReqId# "" txid# 281474976710663 SEND to# [59:7554062543069696856:2607] 2025-09-25T16:20:44.529655Z node 59 :TX_PROXY DEBUG: schemereq.cpp:1673: Actor# [59:7554062543069696856:2607] txid# 281474976710663 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/dc-1" OperationType: ESchemeOpAlterLogin AlterLogin { CreateUser { User: "targetuser" Password: "passwd" CanLogin: true IsHashedPassword: false } } } } UserToken: "\n\014root@builtin\022\030\022\026\n\024all-users@well-known\032\014root@builtin\"\007Builtin*\017**** (B6C6F477)0\000" DatabaseName: "/dc-1" RequestType: "" PeerName: "ipv4:127.0.0.1:50962" 2025-09-25T16:20:44.529679Z node 59 :TX_PROXY DEBUG: schemereq.cpp:613: Actor# [59:7554062543069696856:2607] txid# 281474976710663 Bootstrap, UserSID: root@builtin CheckAdministrator: 1 CheckDatabaseAdministrator: 1 2025-09-25T16:20:44.529683Z node 59 :TX_PROXY DEBUG: schemereq.cpp:622: Actor# [59:7554062543069696856:2607] txid# 281474976710663 Bootstrap, UserSID: root@builtin IsClusterAdministrator: 1 2025-09-25T16:20:44.529694Z node 59 :TX_PROXY DEBUG: schemereq.cpp:1728: Actor# [59:7554062543069696856:2607] txid# 281474976710663 TEvNavigateKeySet requested from SchemeCache 2025-09-25T16:20:44.529792Z node 59 :TX_PROXY DEBUG: schemereq.cpp:1561: Actor# [59:7554062543069696856:2607] txid# 281474976710663 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2025-09-25T16:20:44.529820Z node 59 :TX_PROXY DEBUG: schemereq.cpp:1608: Actor# [59:7554062543069696856:2607] HANDLE EvNavigateKeySetResult, txid# 281474976710663 shardToRequest# 72057594046644480 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 1] DomainInfo.Params# Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 RedirectRequired# true 2025-09-25T16:20:44.529835Z node 59 :TX_PROXY DEBUG: schemereq.cpp:103: Actor# [59:7554062543069696856:2607] txid# 281474976710663 SEND to# 72057594046644480 shardToRequest {TEvModifySchemeTransaction txid# 281474976710663 TabletId# 72057594046644480} 2025-09-25T16:20:44.529878Z node 59 :TX_PROXY DEBUG: schemereq.cpp:1463: Actor# [59:7554062543069696856:2607] txid# 281474976710663 HANDLE EvClientConnected 2025-09-25T16:20:44.533152Z node 59 :TX_PROXY DEBUG: schemereq.cpp:1485: Actor# [59:7554062543069696856:2607] txid# 281474976710663 Status StatusSuccess HANDLE {TEvModifySchemeTransactionResult Status# StatusSuccess txid# 281474976710663} 2025-09-25T16:20:44.533171Z node 59 :TX_PROXY DEBUG: schemereq.cpp:593: Actor# [59:7554062543069696856:2607] txid# 281474976710663 SEND to# [59:7554062543069696855:2334] Source {TEvProposeTransactionStatus txid# 281474976710663 Status# 48} 2025-09-25T16:20:44.544846Z node 59 :TX_PROXY DEBUG: proxy_impl.cpp:314: actor# [59:7554062538774728740:2144] Handle TEvProposeTransaction 2025-09-25T16:20:44.544861Z node 59 :TX_PROXY DEBUG: proxy_impl.cpp:237: actor# [59:7554062538774728740:2144] TxId# 281474976710664 ProcessProposeTransaction 2025-09-25T16:20:44.544879Z node 59 :TX_PROXY DEBUG: proxy_impl.cpp:256: actor# [59:7554062538774728740:2144] Cookie# 0 userReqId# "" txid# 281474976710664 SEND to# [59:7554062543069696883:2619] 2025-09-25T16:20:44.545684Z node 59 :TX_PROXY DEBUG: schemereq.cpp:1673: Actor# [59:7554062543069696883:2619] txid# 281474976710664 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/dc-1" OperationType: ESchemeOpAlterLogin AlterLogin { RemoveUser { User: "targetuser" MissingOk: false } } } } UserToken: "\n\014ordinaryuser\022\030\022\026\n\024all-users@well-known\032\334\003eyJhbGciOiJQUzI1NiIsImtpZCI6IjEifQ.eyJhdWQiOlsiXC9kYy0xIl0sImV4cCI6MTc1ODg2MDQ0NCwiaWF0IjoxNzU4ODE3MjQ0LCJzdWIiOiJvcmRpbmFyeXVzZXIifQ.JATHPwNSYI1cAgMHzqPTk9gcb9eYps5g4dDxx3Dec33SVTPbUPTGh2e0kzraMDem2YZrvLCXUp2Q5uebNFZXDRuDOom00-DIJ6S0i0LX0PlyH90p5y4q8Dzwi635qwR-29_lSYrIkRmI7ArDMZuXxmG5ASZLpxZ0jzIZq_apk_bYYkg5BBSLnjZg1NqwGzR9zFPRpxPVT7IFneaFfEzCiYiNjVuCpBfhxNVoscDFZilmsUlTtHJiHVQnw762nfIHQjjHeKddqi823kqFv9mlzp1glOd-gxZDiTakozaa3s4Gxf9zfz6UP44OHoc1Y9F4IddLFGI2c7pBr-WOQ82vNg\"\005Login*\210\001eyJhbGciOiJQUzI1NiIsImtpZCI6IjEifQ.eyJhdWQiOlsiXC9kYy0xIl0sImV4cCI6MTc1ODg2MDQ0NCwiaWF0IjoxNzU4ODE3MjQ0LCJzdWIiOiJvcmRpbmFyeXVzZXIifQ.**0\000" DatabaseName: "/dc-1" RequestType: "" PeerName: "ipv4:127.0.0.1:50962" 2025-09-25T16:20:44.545710Z node 59 :TX_PROXY DEBUG: schemereq.cpp:613: Actor# [59:7554062543069696883:2619] txid# 281474976710664 Bootstrap, UserSID: ordinaryuser CheckAdministrator: 1 CheckDatabaseAdministrator: 1 2025-09-25T16:20:44.545714Z node 59 :TX_PROXY DEBUG: schemereq.cpp:622: Actor# [59:7554062543069696883:2619] txid# 281474976710664 Bootstrap, UserSID: ordinaryuser IsClusterAdministrator: 0 2025-09-25T16:20:44.545782Z node 59 :TX_PROXY DEBUG: schemereq.cpp:1512: Actor# [59:7554062543069696883:2619] txid# 281474976710664 HandleResolveDatabase, ResultSet size: 1 ResultSet error count: 0 2025-09-25T16:20:44.545798Z node 59 :TX_PROXY DEBUG: schemereq.cpp:1547: Actor# [59:7554062543069696883:2619] txid# 281474976710664 HandleResolveDatabase, UserSID: ordinaryuser CheckAdministrator: 1 CheckDatabaseAdministrator: 1 IsClusterAdministrator: 0 IsDatabaseAdministrator: 0 DatabaseOwner: root@builtin 2025-09-25T16:20:44.545812Z node 59 :TX_PROXY DEBUG: schemereq.cpp:1728: Actor# [59:7554062543069696883:2619] txid# 281474976710664 TEvNavigateKeySet requested from SchemeCache 2025-09-25T16:20:44.545920Z node 59 :TX_PROXY DEBUG: schemereq.cpp:1561: Actor# [59:7554062543069696883:2619] txid# 281474976710664 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2025-09-25T16:20:44.545925Z node 59 :TX_PROXY ERROR: schemereq.cpp:1184: Actor# [59:7554062543069696883:2619] txid# 281474976710664, Access denied for ordinaryuser, attempt to manage user 2025-09-25T16:20:44.545954Z node 59 :TX_PROXY ERROR: schemereq.cpp:590: Actor# [59:7554062543069696883:2619] txid# 281474976710664, issues: { message: "Access denied for ordinaryuser" issue_code: 200000 severity: 1 } 2025-09-25T16:20:44.545958Z node 59 :TX_PROXY DEBUG: schemereq.cpp:593: Actor# [59:7554062543069696883:2619] txid# 281474976710664 SEND to# [59:7554062543069696882:2345] Source {TEvProposeTransactionStatus Status# 5} 2025-09-25T16:20:44.546158Z node 59 :KQP_SESSION WARN: kqp_session_actor.cpp:2830: SessionId: ydb://session/3?node_id=59&id=MmViNTkyZGUtODhlYjY1ODEtZDdkOGQyYjQtNmRiNjI5M2I=, ActorId: [59:7554062543069696873:2345], ActorState: ExecuteState, TraceId: 01k60ttnbx34wjtzn5sr97ysyg, Create QueryResponse for error on request, msg: 2025-09-25T16:20:44.546275Z node 59 :TX_PROXY DEBUG: proxy_impl.cpp:352: actor# [59:7554062538774728740:2144] Handle TEvExecuteKqpTransaction 2025-09-25T16:20:44.546279Z node 59 :TX_PROXY DEBUG: proxy_impl.cpp:341: actor# [59:7554062538774728740:2144] TxId# 281474976710665 ProcessProposeKqpTransaction ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/columnshard/ut_rw/unittest >> TColumnShardTestReadWrite::WriteStandalone [GOOD] Test command err: 2025-09-25T16:20:43.935604Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:129:2160];fline=columnshard.cpp:105;event=initialize_shard;step=OnActivateExecutor; 2025-09-25T16:20:43.940443Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:129:2160];fline=columnshard.cpp:124;event=initialize_shard;step=initialize_tiring_finished; 2025-09-25T16:20:43.940500Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxInitSchema.Execute at tablet 9437184 2025-09-25T16:20:43.941359Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:129:2160];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-09-25T16:20:43.941419Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:129:2160];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-09-25T16:20:43.941465Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:129:2160];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-09-25T16:20:43.941491Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:129:2160];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-09-25T16:20:43.941511Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:129:2160];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-09-25T16:20:43.941531Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:129:2160];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-09-25T16:20:43.941551Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:129:2160];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-09-25T16:20:43.941572Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:129:2160];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-09-25T16:20:43.941592Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:129:2160];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-09-25T16:20:43.941612Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:129:2160];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanDeprecatedSnapshot; 2025-09-25T16:20:43.941632Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:129:2160];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV0ChunksMeta; 2025-09-25T16:20:43.941652Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:129:2160];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CopyBlobIdsToV2; 2025-09-25T16:20:43.941699Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:129:2160];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreAppearanceSnapshot; 2025-09-25T16:20:43.949635Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxInitSchema.Complete at tablet 9437184 2025-09-25T16:20:43.949728Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:131;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=12;current_normalizer=CLASS_NAME=Granules; 2025-09-25T16:20:43.949738Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=1;type=Granules; 2025-09-25T16:20:43.949786Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-09-25T16:20:43.949827Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-09-25T16:20:43.949841Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-09-25T16:20:43.949847Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2025-09-25T16:20:43.949858Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:138;normalizer=TChunksNormalizer;message=0 chunks found; 2025-09-25T16:20:43.949868Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-09-25T16:20:43.949876Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-09-25T16:20:43.949880Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2025-09-25T16:20:43.949900Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-09-25T16:20:43.949908Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-09-25T16:20:43.949917Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-09-25T16:20:43.949921Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2025-09-25T16:20:43.949933Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2025-09-25T16:20:43.949940Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-09-25T16:20:43.949948Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-09-25T16:20:43.949952Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2025-09-25T16:20:43.949962Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-09-25T16:20:43.949970Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-09-25T16:20:43.949974Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2025-09-25T16:20:43.949985Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-09-25T16:20:43.949993Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-09-25T16:20:43.949997Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=15;type=RestoreV1Chunks_V2; 2025-09-25T16:20:43.950027Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-09-25T16:20:43.950035Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-09-25T16:20:43.950040Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=16;type=RestoreV2Chunks; 2025-09-25T16:20:43.950054Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2025-09-25T16:20:43.950062Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanDeprecatedSnapshot;id=CleanDeprecatedSnapshot; 2025-09-25T16:20:43.950067Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=17;type=CleanDeprecatedSnapshot; 2025-09-25T16:20:43.950075Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanDeprecatedSnapshot;id=17; 2025-09-25T16:20:43.950083Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV0ChunksMeta;id=RestoreV0ChunksMeta; 2025-09-25T16:20:43.950088Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=18;type=RestoreV0ChunksMeta; 2025-09-25T16:20:43.950096Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV0ChunksMeta;id=18; 2025-09-25T16:20:43.950104Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CopyBlobIdsToV2;id=CopyBlobIdsToV2; 2025-09-25T16:20:43.950109Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=19;type=CopyBlobIdsToV2; 2025-09-25T16:20:43.950123Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CopyBlobIdsToV2;id=19; 2025-09-25T16:20:43.950131Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLAS ... [{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"19,19,19,19,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"20,20,20,20,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"21,21,21,21,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"22,22,22,22,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"23,23,23,23,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"24,24,24,24,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"25,25,25,25,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"26,26,26,26,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"27,27,27,27,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"28,28,28,28,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"29,29,29,29,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"30,30,30,30,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"31,31,31,31,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"32,32,32,32,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"33,33,33,33,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"34,34,34,34,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"35,35,35,35,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"36,36,36,36,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"37,37,37,37,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"38,38,38,38,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"39,39,39,39,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"40,40,40,40,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"41,41,41,41,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"42,42,42,42,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"43,43,43,43,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"44,44,44,44,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"45,45,45,45,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"46,46,46,46,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"47,47,47,47,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"48,48,48,48,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"49,49,49,49,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"50,50,50,50,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"51,51,51,51,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"52,52,52,52,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"53,53,53,53,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"54,54,54,54,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"55,55,55,55,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"56,56,56,56,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"57,57,57,57,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"58,58,58,58,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"59,59,59,59,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"60,60,60,60,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"61,61,61,61,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"62,62,62,62,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"63,63,63,63,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"64,64,64,64,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"65,65,65,65,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"66,66,66,66,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"67,67,67,67,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"68,68,68,68,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"69,69,69,69,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"70,70,70,70,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"71,71,71,71,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"72,72,72,72,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"73,73,73,73,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"74,74,74,74,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"75,75,75,75,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"76,76,76,76,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"77,77,77,77,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"78,78,78,78,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"79,79,79,79,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"80,80,80,80,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"81,81,81,81,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"82,82,82,82,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"83,83,83,83,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"84,84,84,84,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"85,85,85,85,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"86,86,86,86,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"87,87,87,87,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"88,88,88,88,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"89,89,89,89,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"90,90,90,90,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"91,91,91,91,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"92,92,92,92,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"93,93,93,93,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"94,94,94,94,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"95,95,95,95,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"96,96,96,96,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"97,97,97,97,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"98,98,98,98,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"99,99,99,99,"}}]}; >> EvWrite::AbortInTransaction [GOOD] >> TColumnShardTestReadWrite::WriteReadExoticTypes >> SchemeReqAdminAccessInTenant::ClusterAdminCanAuthOnEmptyTenant-DomainLoginOnly-StrictAclCheck [GOOD] >> SchemeReqAdminAccessInTenant::ClusterAdminCanAuthOnNonEmptyTenant ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/columnshard/ut_rw/unittest >> TColumnShardTestReadWrite::WriteReadStandalone [GOOD] Test command err: 2025-09-25T16:20:44.296986Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:129:2160];fline=columnshard.cpp:105;event=initialize_shard;step=OnActivateExecutor; 2025-09-25T16:20:44.302482Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:129:2160];fline=columnshard.cpp:124;event=initialize_shard;step=initialize_tiring_finished; 2025-09-25T16:20:44.302527Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxInitSchema.Execute at tablet 9437184 2025-09-25T16:20:44.303384Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:129:2160];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-09-25T16:20:44.303439Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:129:2160];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-09-25T16:20:44.303477Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:129:2160];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-09-25T16:20:44.303503Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:129:2160];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-09-25T16:20:44.303523Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:129:2160];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-09-25T16:20:44.303544Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:129:2160];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-09-25T16:20:44.303564Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:129:2160];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-09-25T16:20:44.303585Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:129:2160];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-09-25T16:20:44.303606Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:129:2160];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-09-25T16:20:44.303641Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:129:2160];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanDeprecatedSnapshot; 2025-09-25T16:20:44.303662Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:129:2160];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV0ChunksMeta; 2025-09-25T16:20:44.303682Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:129:2160];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CopyBlobIdsToV2; 2025-09-25T16:20:44.303727Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:129:2160];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreAppearanceSnapshot; 2025-09-25T16:20:44.310449Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxInitSchema.Complete at tablet 9437184 2025-09-25T16:20:44.310510Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:131;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=12;current_normalizer=CLASS_NAME=Granules; 2025-09-25T16:20:44.310519Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=1;type=Granules; 2025-09-25T16:20:44.310559Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-09-25T16:20:44.310599Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-09-25T16:20:44.310612Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-09-25T16:20:44.310618Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2025-09-25T16:20:44.310630Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:138;normalizer=TChunksNormalizer;message=0 chunks found; 2025-09-25T16:20:44.310640Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-09-25T16:20:44.310648Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-09-25T16:20:44.310653Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2025-09-25T16:20:44.310674Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-09-25T16:20:44.310683Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-09-25T16:20:44.310691Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-09-25T16:20:44.310696Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2025-09-25T16:20:44.310708Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2025-09-25T16:20:44.310716Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-09-25T16:20:44.310725Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-09-25T16:20:44.310729Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2025-09-25T16:20:44.310739Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-09-25T16:20:44.310747Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-09-25T16:20:44.310752Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2025-09-25T16:20:44.310762Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-09-25T16:20:44.310771Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-09-25T16:20:44.310776Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=15;type=RestoreV1Chunks_V2; 2025-09-25T16:20:44.310803Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-09-25T16:20:44.310812Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-09-25T16:20:44.310817Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=16;type=RestoreV2Chunks; 2025-09-25T16:20:44.310831Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2025-09-25T16:20:44.310840Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanDeprecatedSnapshot;id=CleanDeprecatedSnapshot; 2025-09-25T16:20:44.310845Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=17;type=CleanDeprecatedSnapshot; 2025-09-25T16:20:44.310853Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanDeprecatedSnapshot;id=17; 2025-09-25T16:20:44.310860Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV0ChunksMeta;id=RestoreV0ChunksMeta; 2025-09-25T16:20:44.310865Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=18;type=RestoreV0ChunksMeta; 2025-09-25T16:20:44.310873Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV0ChunksMeta;id=18; 2025-09-25T16:20:44.310881Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CopyBlobIdsToV2;id=CopyBlobIdsToV2; 2025-09-25T16:20:44.310887Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=19;type=CopyBlobIdsToV2; 2025-09-25T16:20:44.310902Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CopyBlobIdsToV2;id=19; 2025-09-25T16:20:44.310910Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLAS ... age=ready result;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;);columns=10;rows=31; 2025-09-25T16:20:45.823847Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: TEST_STEP=11;SelfId=[1:414:2425];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;fline=actor.cpp:274;stage=data_format;batch_size=0;num_rows=31;batch_columns=timestamp,resource_type,resource_id,uid,level,message,json_payload,ingested_at,saved_at,request_id; 2025-09-25T16:20:45.823880Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: TEST_STEP=11;SelfId=[1:414:2425];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;fline=actor.cpp:392;event=send_data;compute_actor_id=[1:413:2424];bytes=2020;rows=31;faults=0;finished=0;fault=0;schema=timestamp: timestamp[us] resource_type: string resource_id: string uid: string level: int32 message: string json_payload: string ingested_at: timestamp[us] saved_at: timestamp[us] request_id: string; 2025-09-25T16:20:45.823891Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: TEST_STEP=11;SelfId=[1:414:2425];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;fline=actor.cpp:296;stage=finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2025-09-25T16:20:45.823904Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: TEST_STEP=11;SelfId=[1:414:2425];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;fline=actor.cpp:211;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2025-09-25T16:20:45.823913Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: TEST_STEP=11;SelfId=[1:414:2425];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;fline=actor.cpp:216;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2025-09-25T16:20:45.823935Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: TEST_STEP=11;SelfId=[1:414:2425];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;fline=actor.cpp:120;event=TEvScanDataAck;info=limits:(bytes=8388608;chunks=1);; 2025-09-25T16:20:45.823944Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: TEST_STEP=11;SelfId=[1:414:2425];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;fline=actor.cpp:211;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2025-09-25T16:20:45.823952Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: TEST_STEP=11;SelfId=[1:414:2425];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;fline=actor.cpp:216;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2025-09-25T16:20:45.823958Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: actor.cpp:442: Scan [1:414:2425] finished for tablet 9437184 2025-09-25T16:20:45.824016Z node 1 :TX_COLUMNSHARD_SCAN INFO: log.cpp:841: TEST_STEP=11;SelfId=[1:414:2425];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;fline=actor.cpp:448;event=scan_finish;compute_actor_id=[1:413:2424];stats={"p":[{"events":["f_bootstrap","l_bootstrap","f_processing","f_ProduceResults","f_task_result"],"t":0},{"events":["f_ack","l_ack","l_processing","l_ProduceResults","f_Finish","l_Finish","l_task_result"],"t":0.001}],"full":{"a":1650726,"name":"_full_task","f":1650726,"d_finished":0,"c":0,"l":1652253,"d":1527},"events":[{"name":"bootstrap","f":1650764,"d_finished":203,"c":1,"l":1650967,"d":203},{"a":1652222,"name":"ack","f":1652088,"d_finished":116,"c":1,"l":1652204,"d":147},{"a":1652221,"name":"processing","f":1651001,"d_finished":424,"c":3,"l":1652204,"d":456},{"name":"ProduceResults","f":1650904,"d_finished":215,"c":6,"l":1652244,"d":215},{"a":1652245,"name":"Finish","f":1652245,"d_finished":0,"c":0,"l":1652253,"d":8},{"name":"task_result","f":1651005,"d_finished":298,"c":2,"l":1652056,"d":298}],"id":"9437184::12"};iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2025-09-25T16:20:45.824025Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: TEST_STEP=11;SelfId=[1:414:2425];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;fline=actor.cpp:392;event=send_data;compute_actor_id=[1:413:2424];bytes=0;rows=0;faults=0;finished=1;fault=0;schema=; 2025-09-25T16:20:45.824054Z node 1 :TX_COLUMNSHARD_SCAN INFO: log.cpp:841: TEST_STEP=11;SelfId=[1:414:2425];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;fline=actor.cpp:397;event=scan_finished;compute_actor_id=[1:413:2424];packs_sum=0;bytes=0;bytes_sum=0;rows=0;rows_sum=0;faults=0;finished=1;fault=0;stats={"p":[{"events":["f_bootstrap","l_bootstrap","f_processing","f_ProduceResults","f_task_result"],"t":0},{"events":["f_ack","l_ack","l_processing","l_ProduceResults","f_Finish","l_Finish","l_task_result"],"t":0.001}],"full":{"a":1650726,"name":"_full_task","f":1650726,"d_finished":0,"c":0,"l":1652318,"d":1592},"events":[{"name":"bootstrap","f":1650764,"d_finished":203,"c":1,"l":1650967,"d":203},{"a":1652222,"name":"ack","f":1652088,"d_finished":116,"c":1,"l":1652204,"d":212},{"a":1652221,"name":"processing","f":1651001,"d_finished":424,"c":3,"l":1652204,"d":521},{"name":"ProduceResults","f":1650904,"d_finished":215,"c":6,"l":1652244,"d":215},{"a":1652245,"name":"Finish","f":1652245,"d_finished":0,"c":0,"l":1652318,"d":73},{"name":"task_result","f":1651005,"d_finished":298,"c":2,"l":1652056,"d":298}],"id":"9437184::12"};iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2025-09-25T16:20:45.824064Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: TEST_STEP=11;SelfId=[1:414:2425];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;fline=stats.cpp:8;event=statistic;begin=2025-09-25T16:20:45.822311Z;index_granules=0;index_portions=1;index_batches=0;schema_columns=10;filter_columns=0;additional_columns=0;compacted_portions_bytes=0;inserted_portions_bytes=7600;committed_portions_bytes=0;data_filter_bytes=0;data_additional_bytes=0;delta_bytes=7600;selected_rows=0; 2025-09-25T16:20:45.824068Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: TEST_STEP=11;SelfId=[1:414:2425];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;fline=read_context.h:196;event=scan_aborted;reason=unexpected on destructor; 2025-09-25T16:20:45.824089Z node 1 :TX_COLUMNSHARD_SCAN INFO: log.cpp:841: TEST_STEP=11;SelfId=[1:414:2425];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;fline=context.h:82;fetching=ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;; ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/columnshard/ut_rw/unittest >> Normalizers::SchemaVersionsNormalizer [GOOD] Test command err: 2025-09-25T16:20:39.483082Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];fline=columnshard.cpp:105;event=initialize_shard;step=OnActivateExecutor; 2025-09-25T16:20:39.486668Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];fline=columnshard.cpp:124;event=initialize_shard;step=initialize_tiring_finished; 2025-09-25T16:20:39.486713Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxInitSchema.Execute at tablet 9437184 2025-09-25T16:20:39.487380Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SchemaVersionCleaner; 2025-09-25T16:20:39.487417Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=NO_VALUE_OPTIONAL; 2025-09-25T16:20:39.487444Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-09-25T16:20:39.487463Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-09-25T16:20:39.487484Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-09-25T16:20:39.487507Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-09-25T16:20:39.487526Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-09-25T16:20:39.487545Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-09-25T16:20:39.487565Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-09-25T16:20:39.487583Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-09-25T16:20:39.487612Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanDeprecatedSnapshot; 2025-09-25T16:20:39.487632Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV0ChunksMeta; 2025-09-25T16:20:39.487669Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CopyBlobIdsToV2; 2025-09-25T16:20:39.487683Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreAppearanceSnapshot; 2025-09-25T16:20:39.492363Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxInitSchema.Complete at tablet 9437184 2025-09-25T16:20:39.492418Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:131;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=13;current_normalizer=CLASS_NAME=SchemaVersionCleaner; 2025-09-25T16:20:39.492429Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=NO_VALUE_OPTIONAL;seq_id=NO_VALUE_OPTIONAL;type=NO_VALUE_OPTIONAL; 2025-09-25T16:20:39.492531Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SchemaVersionCleaner;id=NO_VALUE_OPTIONAL; 2025-09-25T16:20:39.492548Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Granules;id=Granules; 2025-09-25T16:20:39.492556Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=NO_VALUE_OPTIONAL;seq_id=1;type=Granules; 2025-09-25T16:20:39.492579Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-09-25T16:20:39.492619Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-09-25T16:20:39.492630Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-09-25T16:20:39.492635Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=NO_VALUE_OPTIONAL;seq_id=2;type=Chunks; 2025-09-25T16:20:39.492648Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:138;normalizer=TChunksNormalizer;message=0 chunks found; 2025-09-25T16:20:39.492658Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-09-25T16:20:39.492666Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-09-25T16:20:39.492672Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=NO_VALUE_OPTIONAL;seq_id=4;type=TablesCleaner; 2025-09-25T16:20:39.492703Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-09-25T16:20:39.492712Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-09-25T16:20:39.492721Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-09-25T16:20:39.492726Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=NO_VALUE_OPTIONAL;seq_id=6;type=CleanGranuleId; 2025-09-25T16:20:39.492740Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2025-09-25T16:20:39.492749Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-09-25T16:20:39.492760Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-09-25T16:20:39.492765Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=NO_VALUE_OPTIONAL;seq_id=9;type=GCCountersNormalizer; 2025-09-25T16:20:39.492776Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-09-25T16:20:39.492784Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-09-25T16:20:39.492790Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=NO_VALUE_OPTIONAL;seq_id=11;type=SyncPortionFromChunks; 2025-09-25T16:20:39.492800Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-09-25T16:20:39.492810Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-09-25T16:20:39.492816Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=NO_VALUE_OPTIONAL;seq_id=15;type=RestoreV1Chunks_V2; 2025-09-25T16:20:39.492859Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-09-25T16:20:39.492869Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-09-25T16:20:39.492874Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=NO_VALUE_OPTIONAL;seq_id=16;type=RestoreV2Chunks; 2025-09-25T16:20:39.492893Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2025-09-25T16:20:39.492902Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanDeprecatedSnapshot;id=CleanDeprecatedSnapshot; 2025-09-25T16:20:39.492908Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=NO_VALUE_OPTIONAL;seq_id=17;type=CleanDeprecatedSnapshot; 2025-09-25T16:20:39.492917Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanDeprecatedSnapshot;id=17; 2025-09-25T16:20:39.492926Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV0ChunksMeta;id=RestoreV0ChunksMeta; 2025-09-25T16:20:39.492931Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=n ... ,key2;);;program_input=(column_ids=1,2,3;column_names=field,key1,key2;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:1;); 2025-09-25T16:20:45.260636Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[1:423:2423];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;fline=plain_read_data.cpp:31;event=DoExtractReadyResults;result=1;count=77;finished=1; 2025-09-25T16:20:45.260641Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[1:423:2423];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;fline=actor.cpp:222;stage=limit exhausted;limit=limits:(bytes=0;chunks=0);; 2025-09-25T16:20:45.260644Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[1:423:2423];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;tablet_id=9437184;fline=scanner.cpp:52;event=build_next_interval; 2025-09-25T16:20:45.260702Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[1:423:2423];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;fline=actor.cpp:120;event=TEvScanDataAck;info=limits:(bytes=8388608;chunks=1);; 2025-09-25T16:20:45.260712Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[1:423:2423];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;fline=actor.cpp:211;stage=start;iterator=ready_results:(count:1;records_count:77;schema=key1: uint64 key2: uint64 field: string;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2;column_names=key1,key2;);;ff=(column_ids=1,2,3;column_names=field,key1,key2;);;program_input=(column_ids=1,2,3;column_names=field,key1,key2;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2025-09-25T16:20:45.260715Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[1:423:2423];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;fline=plain_read_data.cpp:31;event=DoExtractReadyResults;result=0;count=0;finished=1; 2025-09-25T16:20:45.260722Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[1:423:2423];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;fline=actor.cpp:253;stage=ready result;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2;column_names=key1,key2;);;ff=(column_ids=1,2,3;column_names=field,key1,key2;);;program_input=(column_ids=1,2,3;column_names=field,key1,key2;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;);columns=3;rows=77; 2025-09-25T16:20:45.260727Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[1:423:2423];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;fline=actor.cpp:274;stage=data_format;batch_size=0;num_rows=77;batch_columns=key1,key2,field; 2025-09-25T16:20:45.260744Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[1:423:2423];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;fline=actor.cpp:392;event=send_data;compute_actor_id=[1:421:2422];bytes=130200;rows=1085;faults=0;finished=0;fault=0;schema=key1: uint64 key2: uint64 field: string; 2025-09-25T16:20:45.260760Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[1:423:2423];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;fline=actor.cpp:296;stage=finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2;column_names=key1,key2;);;ff=(column_ids=1,2,3;column_names=field,key1,key2;);;program_input=(column_ids=1,2,3;column_names=field,key1,key2;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2025-09-25T16:20:45.260776Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[1:423:2423];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;fline=actor.cpp:211;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2;column_names=key1,key2;);;ff=(column_ids=1,2,3;column_names=field,key1,key2;);;program_input=(column_ids=1,2,3;column_names=field,key1,key2;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2025-09-25T16:20:45.260788Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[1:423:2423];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;fline=actor.cpp:216;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2;column_names=key1,key2;);;ff=(column_ids=1,2,3;column_names=field,key1,key2;);;program_input=(column_ids=1,2,3;column_names=field,key1,key2;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2025-09-25T16:20:45.260806Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[1:423:2423];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;fline=actor.cpp:120;event=TEvScanDataAck;info=limits:(bytes=8388608;chunks=1);; 2025-09-25T16:20:45.260819Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[1:423:2423];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;fline=actor.cpp:211;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2;column_names=key1,key2;);;ff=(column_ids=1,2,3;column_names=field,key1,key2;);;program_input=(column_ids=1,2,3;column_names=field,key1,key2;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2025-09-25T16:20:45.260847Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[1:423:2423];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;fline=actor.cpp:216;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2;column_names=key1,key2;);;ff=(column_ids=1,2,3;column_names=field,key1,key2;);;program_input=(column_ids=1,2,3;column_names=field,key1,key2;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2025-09-25T16:20:45.260854Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: actor.cpp:442: Scan [1:423:2423] finished for tablet 9437184 2025-09-25T16:20:45.260918Z node 1 :TX_COLUMNSHARD_SCAN INFO: log.cpp:841: SelfId=[1:423:2423];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;fline=actor.cpp:448;event=scan_finish;compute_actor_id=[1:421:2422];stats={"p":[{"events":["f_bootstrap","l_bootstrap","f_processing","f_ProduceResults","f_task_result"],"t":0},{"events":["f_ack"],"t":0.009},{"events":["l_ack","l_processing","l_ProduceResults","f_Finish","l_Finish","l_task_result"],"t":2.588}],"full":{"a":3291927,"name":"_full_task","f":3291927,"d_finished":0,"c":0,"l":5880853,"d":2588926},"events":[{"name":"bootstrap","f":3291976,"d_finished":231,"c":1,"l":3292207,"d":231},{"a":5880794,"name":"ack","f":3301486,"d_finished":1362533,"c":421,"l":5880781,"d":1362592},{"a":5880792,"name":"processing","f":3292264,"d_finished":2566202,"c":843,"l":5880782,"d":2566263},{"name":"ProduceResults","f":3292127,"d_finished":2136834,"c":1266,"l":5880840,"d":2136834},{"a":5880841,"name":"Finish","f":5880841,"d_finished":0,"c":0,"l":5880853,"d":12},{"name":"task_result","f":3292269,"d_finished":1201770,"c":422,"l":5880636,"d":1201770}],"id":"9437184::2"};iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2;column_names=key1,key2;);;ff=(column_ids=1,2,3;column_names=field,key1,key2;);;program_input=(column_ids=1,2,3;column_names=field,key1,key2;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2025-09-25T16:20:45.260927Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[1:423:2423];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;fline=actor.cpp:392;event=send_data;compute_actor_id=[1:421:2422];bytes=0;rows=0;faults=0;finished=1;fault=0;schema=; 2025-09-25T16:20:45.260955Z node 1 :TX_COLUMNSHARD_SCAN INFO: log.cpp:841: SelfId=[1:423:2423];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;fline=actor.cpp:397;event=scan_finished;compute_actor_id=[1:421:2422];packs_sum=0;bytes=0;bytes_sum=0;rows=0;rows_sum=0;faults=0;finished=1;fault=0;stats={"p":[{"events":["f_bootstrap","l_bootstrap","f_processing","f_ProduceResults","f_task_result"],"t":0},{"events":["f_ack"],"t":0.009},{"events":["l_ack","l_processing","l_ProduceResults","f_Finish","l_Finish","l_task_result"],"t":2.588}],"full":{"a":3291927,"name":"_full_task","f":3291927,"d_finished":0,"c":0,"l":5880920,"d":2588993},"events":[{"name":"bootstrap","f":3291976,"d_finished":231,"c":1,"l":3292207,"d":231},{"a":5880794,"name":"ack","f":3301486,"d_finished":1362533,"c":421,"l":5880781,"d":1362659},{"a":5880792,"name":"processing","f":3292264,"d_finished":2566202,"c":843,"l":5880782,"d":2566330},{"name":"ProduceResults","f":3292127,"d_finished":2136834,"c":1266,"l":5880840,"d":2136834},{"a":5880841,"name":"Finish","f":5880841,"d_finished":0,"c":0,"l":5880920,"d":79},{"name":"task_result","f":3292269,"d_finished":1201770,"c":422,"l":5880636,"d":1201770}],"id":"9437184::2"};iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2;column_names=key1,key2;);;ff=(column_ids=1,2,3;column_names=field,key1,key2;);;program_input=(column_ids=1,2,3;column_names=field,key1,key2;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2025-09-25T16:20:45.260964Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[1:423:2423];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;fline=stats.cpp:8;event=statistic;begin=2025-09-25T16:20:42.671801Z;index_granules=0;index_portions=1;index_batches=0;schema_columns=3;filter_columns=0;additional_columns=0;compacted_portions_bytes=0;inserted_portions_bytes=2488696;committed_portions_bytes=0;data_filter_bytes=0;data_additional_bytes=0;delta_bytes=2488696;selected_rows=0; 2025-09-25T16:20:45.392490Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[1:423:2423];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;fline=read_context.h:196;event=scan_aborted;reason=unexpected on destructor; 2025-09-25T16:20:45.392550Z node 1 :TX_COLUMNSHARD_SCAN INFO: log.cpp:841: SelfId=[1:423:2423];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;fline=context.h:82;fetching=ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2;column_names=key1,key2;);;ff=(column_ids=1,2,3;column_names=field,key1,key2;);;program_input=(column_ids=1,2,3;column_names=field,key1,key2;);;; ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/columnshard/ut_rw/unittest >> EvWrite::AbortInTransaction [GOOD] Test command err: 2025-09-25T16:20:44.298406Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];fline=columnshard.cpp:105;event=initialize_shard;step=OnActivateExecutor; 2025-09-25T16:20:44.302828Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];fline=columnshard.cpp:124;event=initialize_shard;step=initialize_tiring_finished; 2025-09-25T16:20:44.302867Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxInitSchema.Execute at tablet 9437184 2025-09-25T16:20:44.303569Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-09-25T16:20:44.303606Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-09-25T16:20:44.303650Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-09-25T16:20:44.303664Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-09-25T16:20:44.303677Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-09-25T16:20:44.303692Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-09-25T16:20:44.303705Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-09-25T16:20:44.303717Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-09-25T16:20:44.303729Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-09-25T16:20:44.303742Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanDeprecatedSnapshot; 2025-09-25T16:20:44.303755Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV0ChunksMeta; 2025-09-25T16:20:44.303767Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CopyBlobIdsToV2; 2025-09-25T16:20:44.303796Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreAppearanceSnapshot; 2025-09-25T16:20:44.308556Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxInitSchema.Complete at tablet 9437184 2025-09-25T16:20:44.308651Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:131;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=12;current_normalizer=CLASS_NAME=Granules; 2025-09-25T16:20:44.308661Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=1;type=Granules; 2025-09-25T16:20:44.308716Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-09-25T16:20:44.308760Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-09-25T16:20:44.308776Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-09-25T16:20:44.308786Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2025-09-25T16:20:44.308797Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:138;normalizer=TChunksNormalizer;message=0 chunks found; 2025-09-25T16:20:44.308811Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-09-25T16:20:44.308837Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-09-25T16:20:44.308847Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2025-09-25T16:20:44.308875Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-09-25T16:20:44.308889Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-09-25T16:20:44.308900Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-09-25T16:20:44.308906Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2025-09-25T16:20:44.308921Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2025-09-25T16:20:44.308931Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-09-25T16:20:44.308944Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-09-25T16:20:44.308952Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2025-09-25T16:20:44.308966Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-09-25T16:20:44.308978Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-09-25T16:20:44.308985Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2025-09-25T16:20:44.309004Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-09-25T16:20:44.309016Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-09-25T16:20:44.309023Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=15;type=RestoreV1Chunks_V2; 2025-09-25T16:20:44.309060Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-09-25T16:20:44.309074Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-09-25T16:20:44.309081Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=16;type=RestoreV2Chunks; 2025-09-25T16:20:44.309104Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2025-09-25T16:20:44.309118Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanDeprecatedSnapshot;id=CleanDeprecatedSnapshot; 2025-09-25T16:20:44.309126Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=17;type=CleanDeprecatedSnapshot; 2025-09-25T16:20:44.309139Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanDeprecatedSnapshot;id=17; 2025-09-25T16:20:44.309147Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV0ChunksMeta;id=RestoreV0ChunksMeta; 2025-09-25T16:20:44.309152Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=18;type=RestoreV0ChunksMeta; 2025-09-25T16:20:44.309161Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV0ChunksMeta;id=18; 2025-09-25T16:20:44.309173Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CopyBlobIdsToV2;id=CopyBlobIdsToV2; 2025-09-25T16:20:44.309181Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=19;type=CopyBlobIdsToV2; 2025-09-25T16:20:44.309204Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CopyBlobIdsToV2;id=19; 2025-09-25T16:20:44.309216Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLAS ... d=9437184;event=TEvWrite;fline=manager.cpp:116;event=abort;tx_id=222;problem=finished; 2025-09-25T16:20:46.144349Z node 2 :TX_COLUMNSHARD_TX WARN: log.cpp:841: tablet_id=9437184;self_id=[2:113:2143];ev=NKikimr::NEvents::TDataEvents::TEvWrite;tablet_id=9437184;event=TEvWrite;fline=manager.cpp:134;event=abort;tx_id=222;problem=finished; 2025-09-25T16:20:46.144384Z node 2 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: PlanStep 1758817246577 at tablet 9437184, mediator 0 2025-09-25T16:20:46.144393Z node 2 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxPlanStep[5] execute at tablet 9437184 2025-09-25T16:20:46.144402Z node 2 :TX_COLUMNSHARD ERROR: ctor_logger.h:56: TxPlanStep[5] Ignore old txIds [112] for step 1758817246577 last planned step 1758817246577 at tablet 9437184 2025-09-25T16:20:46.144414Z node 2 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxPlanStep[5] complete at tablet 9437184 2025-09-25T16:20:46.144475Z node 2 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: EvScan txId: 18446744073709551615 scanId: 0 version: {1758817246577:max} readable: {1758817246577:max} at tablet 9437184 2025-09-25T16:20:46.144493Z node 2 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TTxScan prepare txId: 18446744073709551615 scanId: 0 at tablet 9437184 2025-09-25T16:20:46.144562Z node 2 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[2:113:2143];ev=NKikimr::TEvDataShard::TEvKqpScan;tx_id=18446744073709551615;scan_id=0;gen=0;table=;snapshot={1758817246577:max};tablet=9437184;timeout=0.000000s;cpu_limits=Disabled;;fline=program.cpp:34;event=parse_program;program=Command { Projection { Columns { Id: 1 } Columns { Id: 2 } } } ; 2025-09-25T16:20:46.144574Z node 2 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[2:113:2143];ev=NKikimr::TEvDataShard::TEvKqpScan;tx_id=18446744073709551615;scan_id=0;gen=0;table=;snapshot={1758817246577:max};tablet=9437184;timeout=0.000000s;cpu_limits=Disabled;;fline=program.cpp:103;parse_proto_program=Command { Projection { Columns { Id: 1 } Columns { Id: 2 } } } ; 2025-09-25T16:20:46.144728Z node 2 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[2:113:2143];ev=NKikimr::TEvDataShard::TEvKqpScan;tx_id=18446744073709551615;scan_id=0;gen=0;table=;snapshot={1758817246577:max};tablet=9437184;timeout=0.000000s;cpu_limits=Disabled;;fline=program.cpp:52;event=program_parsed;result={"edges":[{"owner_id":0,"inputs":[{"from":2},{"from":4}]},{"owner_id":2,"inputs":[{"from":5}]},{"owner_id":4,"inputs":[{"from":5}]},{"owner_id":5,"inputs":[{"from":6}]},{"owner_id":6,"inputs":[]}],"nodes":{"2":{"p":{"i":"1","p":{"address":{"name":"key","id":1}},"o":"1","t":"AssembleOriginalData"},"w":9,"id":2},"6":{"p":{"p":{"data":[{"name":"key","id":1},{"name":"field","id":2}]},"o":"0","t":"ReserveMemory"},"w":0,"id":6},"5":{"p":{"i":"0","p":{"data":[{"name":"key","id":1},{"name":"field","id":2}]},"o":"1,2","t":"FetchOriginalData"},"w":4,"id":5},"4":{"p":{"i":"2","p":{"address":{"name":"field","id":2}},"o":"2","t":"AssembleOriginalData"},"w":9,"id":4},"0":{"p":{"i":"1,2","t":"Projection"},"w":18,"id":0}}}; 2025-09-25T16:20:46.145081Z node 2 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: tablet_id=9437184;self_id=[2:113:2143];ev=NKikimr::TEvDataShard::TEvKqpScan;tx_id=18446744073709551615;scan_id=0;gen=0;table=;snapshot={1758817246577:max};tablet=9437184;timeout=0.000000s;cpu_limits=Disabled;;fline=read_metadata.h:133;filter_limit_not_detected= range{ from {+Inf} to {-Inf}}; 2025-09-25T16:20:46.145218Z node 2 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: tablet_id=9437184;self_id=[2:113:2143];ev=NKikimr::TEvDataShard::TEvKqpScan;tx_id=18446744073709551615;scan_id=0;gen=0;table=;snapshot={1758817246577:max};tablet=9437184;timeout=0.000000s;cpu_limits=Disabled;;fline=tx_scan.cpp:181;event=TTxScan started;actor_id=[2:180:2192];trace_detailed=; 2025-09-25T16:20:46.145338Z node 2 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: fline=context.cpp:82;ff_first=(column_ids=1,2;column_names=field,key;);; 2025-09-25T16:20:46.145363Z node 2 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: fline=context.cpp:97;columns_context_info=ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1;column_names=key;);;ff=(column_ids=1,2;column_names=field,key;);;program_input=(column_ids=1,2;column_names=field,key;);;; 2025-09-25T16:20:46.145417Z node 2 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: fline=actor.cpp:211;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1;column_names=key;);;ff=(column_ids=1,2;column_names=field,key;);;program_input=(column_ids=1,2;column_names=field,key;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2025-09-25T16:20:46.145434Z node 2 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: fline=actor.cpp:216;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1;column_names=key;);;ff=(column_ids=1,2;column_names=field,key;);;program_input=(column_ids=1,2;column_names=field,key;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2025-09-25T16:20:46.145471Z node 2 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[2:180:2192];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:120;event=TEvScanDataAck;info=limits:(bytes=8388608;chunks=1);; 2025-09-25T16:20:46.145485Z node 2 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[2:180:2192];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:211;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1;column_names=key;);;ff=(column_ids=1,2;column_names=field,key;);;program_input=(column_ids=1,2;column_names=field,key;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2025-09-25T16:20:46.145497Z node 2 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[2:180:2192];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:216;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1;column_names=key;);;ff=(column_ids=1,2;column_names=field,key;);;program_input=(column_ids=1,2;column_names=field,key;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2025-09-25T16:20:46.145504Z node 2 :TX_COLUMNSHARD_SCAN DEBUG: actor.cpp:442: Scan [2:180:2192] finished for tablet 9437184 2025-09-25T16:20:46.145564Z node 2 :TX_COLUMNSHARD_SCAN INFO: log.cpp:841: SelfId=[2:180:2192];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:448;event=scan_finish;compute_actor_id=[2:179:2191];stats={"p":[{"events":["f_bootstrap","l_bootstrap","f_ack","l_ack","f_processing","l_processing","f_ProduceResults","l_ProduceResults","f_Finish","l_Finish"],"t":0}],"full":{"a":1955574,"name":"_full_task","f":1955574,"d_finished":0,"c":0,"l":1955884,"d":310},"events":[{"name":"bootstrap","f":1955614,"d_finished":198,"c":1,"l":1955812,"d":198},{"a":1955837,"name":"ack","f":1955837,"d_finished":0,"c":0,"l":1955884,"d":47},{"a":1955832,"name":"processing","f":1955832,"d_finished":0,"c":0,"l":1955884,"d":52},{"name":"ProduceResults","f":1955766,"d_finished":69,"c":2,"l":1955871,"d":69},{"a":1955871,"name":"Finish","f":1955871,"d_finished":0,"c":0,"l":1955884,"d":13}],"id":"9437184::2"};iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1;column_names=key;);;ff=(column_ids=1,2;column_names=field,key;);;program_input=(column_ids=1,2;column_names=field,key;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2025-09-25T16:20:46.145576Z node 2 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[2:180:2192];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:392;event=send_data;compute_actor_id=[2:179:2191];bytes=0;rows=0;faults=0;finished=1;fault=0;schema=; 2025-09-25T16:20:46.145614Z node 2 :TX_COLUMNSHARD_SCAN INFO: log.cpp:841: SelfId=[2:180:2192];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:397;event=scan_finished;compute_actor_id=[2:179:2191];packs_sum=0;bytes=0;bytes_sum=0;rows=0;rows_sum=0;faults=0;finished=1;fault=0;stats={"p":[{"events":["f_bootstrap","l_bootstrap","f_ack","l_ack","f_processing","l_processing","f_ProduceResults","l_ProduceResults","f_Finish","l_Finish"],"t":0}],"full":{"a":1955574,"name":"_full_task","f":1955574,"d_finished":0,"c":0,"l":1955954,"d":380},"events":[{"name":"bootstrap","f":1955614,"d_finished":198,"c":1,"l":1955812,"d":198},{"a":1955837,"name":"ack","f":1955837,"d_finished":0,"c":0,"l":1955954,"d":117},{"a":1955832,"name":"processing","f":1955832,"d_finished":0,"c":0,"l":1955954,"d":122},{"name":"ProduceResults","f":1955766,"d_finished":69,"c":2,"l":1955871,"d":69},{"a":1955871,"name":"Finish","f":1955871,"d_finished":0,"c":0,"l":1955954,"d":83}],"id":"9437184::2"};iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1;column_names=key;);;ff=(column_ids=1,2;column_names=field,key;);;program_input=(column_ids=1,2;column_names=field,key;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2025-09-25T16:20:46.145630Z node 2 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[2:180:2192];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=stats.cpp:8;event=statistic;begin=2025-09-25T16:20:46.145073Z;index_granules=0;index_portions=0;index_batches=0;schema_columns=2;filter_columns=0;additional_columns=0;compacted_portions_bytes=0;inserted_portions_bytes=0;committed_portions_bytes=0;data_filter_bytes=0;data_additional_bytes=0;delta_bytes=0;selected_rows=0; 2025-09-25T16:20:46.145635Z node 2 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[2:180:2192];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=read_context.h:196;event=scan_aborted;reason=unexpected on destructor; 2025-09-25T16:20:46.145649Z node 2 :TX_COLUMNSHARD_SCAN INFO: log.cpp:841: SelfId=[2:180:2192];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=context.h:82;fetching=ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1;column_names=key;);;ff=(column_ids=1,2;column_names=field,key;);;program_input=(column_ids=1,2;column_names=field,key;);;; FALLBACK_ACTOR_LOGGING;priority=INFO;component=2100;fline=simple_arrays_cache.h:65;event=slice_from_cache;key=uint64;records=0;count=0; FALLBACK_ACTOR_LOGGING;priority=INFO;component=2100;fline=simple_arrays_cache.h:49;event=insert_to_cache;key=string;records=0;size=0; FALLBACK_ACTOR_LOGGING;priority=INFO;component=2100;fline=simple_arrays_cache.h:65;event=slice_from_cache;key=string;records=0;count=0; >> TColumnShardTestReadWrite::ReadStale >> TColumnShardTestReadWrite::CompactionInGranule_PKUInt64 >> TColumnShardTestReadWrite::RebootWriteRead [GOOD] >> TColumnShardTestReadWrite::CompactionInGranule_PKUtf8_Reboot >> TColumnShardTestReadWrite::CompactionInGranule_PKString ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/columnshard/ut_rw/unittest >> TColumnShardTestReadWrite::RebootWriteRead [GOOD] Test command err: 2025-09-25T16:20:44.554548Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:129:2160];fline=columnshard.cpp:105;event=initialize_shard;step=OnActivateExecutor; 2025-09-25T16:20:44.557963Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:129:2160];fline=columnshard.cpp:124;event=initialize_shard;step=initialize_tiring_finished; 2025-09-25T16:20:44.558019Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxInitSchema.Execute at tablet 9437184 2025-09-25T16:20:44.558796Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:129:2160];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-09-25T16:20:44.558844Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:129:2160];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-09-25T16:20:44.558881Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:129:2160];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-09-25T16:20:44.558896Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:129:2160];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-09-25T16:20:44.558908Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:129:2160];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-09-25T16:20:44.558923Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:129:2160];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-09-25T16:20:44.558937Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:129:2160];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-09-25T16:20:44.558950Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:129:2160];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-09-25T16:20:44.558962Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:129:2160];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-09-25T16:20:44.558975Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:129:2160];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanDeprecatedSnapshot; 2025-09-25T16:20:44.558988Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:129:2160];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV0ChunksMeta; 2025-09-25T16:20:44.559001Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:129:2160];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CopyBlobIdsToV2; 2025-09-25T16:20:44.559047Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:129:2160];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreAppearanceSnapshot; 2025-09-25T16:20:44.564745Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxInitSchema.Complete at tablet 9437184 2025-09-25T16:20:44.564814Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:131;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=12;current_normalizer=CLASS_NAME=Granules; 2025-09-25T16:20:44.564838Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=1;type=Granules; 2025-09-25T16:20:44.564882Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-09-25T16:20:44.564924Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-09-25T16:20:44.564937Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-09-25T16:20:44.564944Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2025-09-25T16:20:44.564956Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:138;normalizer=TChunksNormalizer;message=0 chunks found; 2025-09-25T16:20:44.564968Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-09-25T16:20:44.564977Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-09-25T16:20:44.564982Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2025-09-25T16:20:44.565017Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-09-25T16:20:44.565027Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-09-25T16:20:44.565036Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-09-25T16:20:44.565041Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2025-09-25T16:20:44.565055Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2025-09-25T16:20:44.565063Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-09-25T16:20:44.565072Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-09-25T16:20:44.565077Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2025-09-25T16:20:44.565087Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-09-25T16:20:44.565096Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-09-25T16:20:44.565102Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2025-09-25T16:20:44.565113Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-09-25T16:20:44.565122Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-09-25T16:20:44.565127Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=15;type=RestoreV1Chunks_V2; 2025-09-25T16:20:44.565160Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-09-25T16:20:44.565169Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-09-25T16:20:44.565174Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=16;type=RestoreV2Chunks; 2025-09-25T16:20:44.565204Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2025-09-25T16:20:44.565213Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanDeprecatedSnapshot;id=CleanDeprecatedSnapshot; 2025-09-25T16:20:44.565219Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=17;type=CleanDeprecatedSnapshot; 2025-09-25T16:20:44.565228Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanDeprecatedSnapshot;id=17; 2025-09-25T16:20:44.565236Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV0ChunksMeta;id=RestoreV0ChunksMeta; 2025-09-25T16:20:44.565241Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=18;type=RestoreV0ChunksMeta; 2025-09-25T16:20:44.565250Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV0ChunksMeta;id=18; 2025-09-25T16:20:44.565259Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CopyBlobIdsToV2;id=CopyBlobIdsToV2; 2025-09-25T16:20:44.565266Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=19;type=CopyBlobIdsToV2; 2025-09-25T16:20:44.565283Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CopyBlobIdsToV2;id=19; 2025-09-25T16:20:44.565294Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLAS ... ge=ready result;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;);columns=10;rows=31; 2025-09-25T16:20:46.938776Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: TEST_STEP=11;SelfId=[1:984:2851];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;fline=actor.cpp:274;stage=data_format;batch_size=0;num_rows=31;batch_columns=timestamp,resource_type,resource_id,uid,level,message,json_payload,ingested_at,saved_at,request_id; 2025-09-25T16:20:46.938823Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: TEST_STEP=11;SelfId=[1:984:2851];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;fline=actor.cpp:392;event=send_data;compute_actor_id=[1:983:2850];bytes=2020;rows=31;faults=0;finished=0;fault=0;schema=timestamp: timestamp[us] resource_type: string resource_id: string uid: string level: int32 message: string json_payload: string ingested_at: timestamp[us] saved_at: timestamp[us] request_id: string; 2025-09-25T16:20:46.938843Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: TEST_STEP=11;SelfId=[1:984:2851];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;fline=actor.cpp:296;stage=finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2025-09-25T16:20:46.938865Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: TEST_STEP=11;SelfId=[1:984:2851];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;fline=actor.cpp:211;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2025-09-25T16:20:46.938879Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: TEST_STEP=11;SelfId=[1:984:2851];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;fline=actor.cpp:216;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2025-09-25T16:20:46.938913Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: TEST_STEP=11;SelfId=[1:984:2851];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;fline=actor.cpp:120;event=TEvScanDataAck;info=limits:(bytes=8388608;chunks=1);; 2025-09-25T16:20:46.938927Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: TEST_STEP=11;SelfId=[1:984:2851];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;fline=actor.cpp:211;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2025-09-25T16:20:46.938940Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: TEST_STEP=11;SelfId=[1:984:2851];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;fline=actor.cpp:216;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2025-09-25T16:20:46.938948Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: actor.cpp:442: Scan [1:984:2851] finished for tablet 9437184 2025-09-25T16:20:46.939018Z node 1 :TX_COLUMNSHARD_SCAN INFO: log.cpp:841: TEST_STEP=11;SelfId=[1:984:2851];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;fline=actor.cpp:448;event=scan_finish;compute_actor_id=[1:983:2850];stats={"p":[{"events":["f_bootstrap","l_bootstrap","f_processing","f_ProduceResults","f_task_result"],"t":0},{"events":["f_ack","l_ack","l_processing","l_ProduceResults","f_Finish","l_Finish","l_task_result"],"t":0.001}],"full":{"a":2479065,"name":"_full_task","f":2479065,"d_finished":0,"c":0,"l":2480947,"d":1882},"events":[{"name":"bootstrap","f":2479116,"d_finished":226,"c":1,"l":2479342,"d":226},{"a":2480901,"name":"ack","f":2480694,"d_finished":179,"c":1,"l":2480873,"d":225},{"a":2480899,"name":"processing","f":2479375,"d_finished":520,"c":3,"l":2480874,"d":568},{"name":"ProduceResults","f":2479272,"d_finished":300,"c":6,"l":2480935,"d":300},{"a":2480936,"name":"Finish","f":2480936,"d_finished":0,"c":0,"l":2480947,"d":11},{"name":"task_result","f":2479379,"d_finished":329,"c":2,"l":2480647,"d":329}],"id":"9437184::12"};iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2025-09-25T16:20:46.939028Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: TEST_STEP=11;SelfId=[1:984:2851];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;fline=actor.cpp:392;event=send_data;compute_actor_id=[1:983:2850];bytes=0;rows=0;faults=0;finished=1;fault=0;schema=; 2025-09-25T16:20:46.939073Z node 1 :TX_COLUMNSHARD_SCAN INFO: log.cpp:841: TEST_STEP=11;SelfId=[1:984:2851];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;fline=actor.cpp:397;event=scan_finished;compute_actor_id=[1:983:2850];packs_sum=0;bytes=0;bytes_sum=0;rows=0;rows_sum=0;faults=0;finished=1;fault=0;stats={"p":[{"events":["f_bootstrap","l_bootstrap","f_processing","f_ProduceResults","f_task_result"],"t":0},{"events":["f_ack","l_ack","l_processing","l_ProduceResults","f_Finish","l_Finish","l_task_result"],"t":0.001}],"full":{"a":2479065,"name":"_full_task","f":2479065,"d_finished":0,"c":0,"l":2481025,"d":1960},"events":[{"name":"bootstrap","f":2479116,"d_finished":226,"c":1,"l":2479342,"d":226},{"a":2480901,"name":"ack","f":2480694,"d_finished":179,"c":1,"l":2480873,"d":303},{"a":2480899,"name":"processing","f":2479375,"d_finished":520,"c":3,"l":2480874,"d":646},{"name":"ProduceResults","f":2479272,"d_finished":300,"c":6,"l":2480935,"d":300},{"a":2480936,"name":"Finish","f":2480936,"d_finished":0,"c":0,"l":2481025,"d":89},{"name":"task_result","f":2479379,"d_finished":329,"c":2,"l":2480647,"d":329}],"id":"9437184::12"};iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2025-09-25T16:20:46.939090Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: TEST_STEP=11;SelfId=[1:984:2851];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;fline=stats.cpp:8;event=statistic;begin=2025-09-25T16:20:46.936937Z;index_granules=0;index_portions=1;index_batches=0;schema_columns=10;filter_columns=0;additional_columns=0;compacted_portions_bytes=0;inserted_portions_bytes=7600;committed_portions_bytes=0;data_filter_bytes=0;data_additional_bytes=0;delta_bytes=7600;selected_rows=0; 2025-09-25T16:20:46.939096Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: TEST_STEP=11;SelfId=[1:984:2851];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;fline=read_context.h:196;event=scan_aborted;reason=unexpected on destructor; 2025-09-25T16:20:46.939129Z node 1 :TX_COLUMNSHARD_SCAN INFO: log.cpp:841: TEST_STEP=11;SelfId=[1:984:2851];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;fline=context.h:82;fetching=ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;; >> TestShred::Run3CyclesForAllSupportedObjects [GOOD] >> TColumnShardTestReadWrite::CompactionSplitGranuleStrKey_PKString >> Normalizers::RemoveDeleteFlagNormalizer >> TColumnShardTestReadWrite::RebootWriteReadStandalone [GOOD] >> TColumnShardTestReadWrite::ReadStale [GOOD] >> TColumnShardTestReadWrite::WriteReadExoticTypes [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/columnshard/ut_rw/unittest >> TColumnShardTestReadWrite::WriteReadExoticTypes [GOOD] Test command err: 2025-09-25T16:20:46.374459Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:129:2160];fline=columnshard.cpp:105;event=initialize_shard;step=OnActivateExecutor; 2025-09-25T16:20:46.380310Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:129:2160];fline=columnshard.cpp:124;event=initialize_shard;step=initialize_tiring_finished; 2025-09-25T16:20:46.380369Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxInitSchema.Execute at tablet 9437184 2025-09-25T16:20:46.381300Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:129:2160];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-09-25T16:20:46.381359Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:129:2160];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-09-25T16:20:46.381408Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:129:2160];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-09-25T16:20:46.381437Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:129:2160];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-09-25T16:20:46.381460Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:129:2160];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-09-25T16:20:46.381484Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:129:2160];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-09-25T16:20:46.381506Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:129:2160];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-09-25T16:20:46.381529Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:129:2160];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-09-25T16:20:46.381551Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:129:2160];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-09-25T16:20:46.381573Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:129:2160];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanDeprecatedSnapshot; 2025-09-25T16:20:46.381596Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:129:2160];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV0ChunksMeta; 2025-09-25T16:20:46.381618Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:129:2160];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CopyBlobIdsToV2; 2025-09-25T16:20:46.381663Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:129:2160];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreAppearanceSnapshot; 2025-09-25T16:20:46.388767Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxInitSchema.Complete at tablet 9437184 2025-09-25T16:20:46.388863Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:131;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=12;current_normalizer=CLASS_NAME=Granules; 2025-09-25T16:20:46.388875Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=1;type=Granules; 2025-09-25T16:20:46.388921Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-09-25T16:20:46.388967Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-09-25T16:20:46.388981Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-09-25T16:20:46.388989Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2025-09-25T16:20:46.389003Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:138;normalizer=TChunksNormalizer;message=0 chunks found; 2025-09-25T16:20:46.389012Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-09-25T16:20:46.389021Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-09-25T16:20:46.389028Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2025-09-25T16:20:46.389051Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-09-25T16:20:46.389059Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-09-25T16:20:46.389068Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-09-25T16:20:46.389073Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2025-09-25T16:20:46.389086Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2025-09-25T16:20:46.389094Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-09-25T16:20:46.389102Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-09-25T16:20:46.389107Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2025-09-25T16:20:46.389117Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-09-25T16:20:46.389125Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-09-25T16:20:46.389130Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2025-09-25T16:20:46.389142Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-09-25T16:20:46.389151Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-09-25T16:20:46.389156Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=15;type=RestoreV1Chunks_V2; 2025-09-25T16:20:46.389186Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-09-25T16:20:46.389195Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-09-25T16:20:46.389200Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=16;type=RestoreV2Chunks; 2025-09-25T16:20:46.389216Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2025-09-25T16:20:46.389225Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanDeprecatedSnapshot;id=CleanDeprecatedSnapshot; 2025-09-25T16:20:46.389230Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=17;type=CleanDeprecatedSnapshot; 2025-09-25T16:20:46.389238Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanDeprecatedSnapshot;id=17; 2025-09-25T16:20:46.389247Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV0ChunksMeta;id=RestoreV0ChunksMeta; 2025-09-25T16:20:46.389252Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=18;type=RestoreV0ChunksMeta; 2025-09-25T16:20:46.389260Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV0ChunksMeta;id=18; 2025-09-25T16:20:46.389269Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CopyBlobIdsToV2;id=CopyBlobIdsToV2; 2025-09-25T16:20:46.389276Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=19;type=CopyBlobIdsToV2; 2025-09-25T16:20:46.389292Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CopyBlobIdsToV2;id=19; 2025-09-25T16:20:46.389300Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLAS ... ge=ready result;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;);columns=10;rows=31; 2025-09-25T16:20:47.672526Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: TEST_STEP=11;SelfId=[1:414:2425];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;fline=actor.cpp:274;stage=data_format;batch_size=0;num_rows=31;batch_columns=timestamp,resource_type,resource_id,uid,level,message,json_payload,ingested_at,saved_at,request_id; 2025-09-25T16:20:47.672567Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: TEST_STEP=11;SelfId=[1:414:2425];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;fline=actor.cpp:392;event=send_data;compute_actor_id=[1:413:2424];bytes=2791;rows=31;faults=0;finished=0;fault=0;schema=timestamp: timestamp[us] resource_type: string resource_id: string uid: string level: int32 message: binary json_payload: binary ingested_at: timestamp[us] saved_at: timestamp[us] request_id: binary; 2025-09-25T16:20:47.672586Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: TEST_STEP=11;SelfId=[1:414:2425];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;fline=actor.cpp:296;stage=finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2025-09-25T16:20:47.672605Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: TEST_STEP=11;SelfId=[1:414:2425];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;fline=actor.cpp:211;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2025-09-25T16:20:47.672618Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: TEST_STEP=11;SelfId=[1:414:2425];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;fline=actor.cpp:216;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2025-09-25T16:20:47.672650Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: TEST_STEP=11;SelfId=[1:414:2425];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;fline=actor.cpp:120;event=TEvScanDataAck;info=limits:(bytes=8388608;chunks=1);; 2025-09-25T16:20:47.672668Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: TEST_STEP=11;SelfId=[1:414:2425];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;fline=actor.cpp:211;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2025-09-25T16:20:47.672683Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: TEST_STEP=11;SelfId=[1:414:2425];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;fline=actor.cpp:216;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2025-09-25T16:20:47.672690Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: actor.cpp:442: Scan [1:414:2425] finished for tablet 9437184 2025-09-25T16:20:47.672758Z node 1 :TX_COLUMNSHARD_SCAN INFO: log.cpp:841: TEST_STEP=11;SelfId=[1:414:2425];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;fline=actor.cpp:448;event=scan_finish;compute_actor_id=[1:413:2424];stats={"p":[{"events":["f_bootstrap","l_bootstrap","f_processing","f_ProduceResults","f_task_result"],"t":0},{"events":["f_ack","l_ack","l_processing","l_ProduceResults","f_Finish","l_Finish","l_task_result"],"t":0.001}],"full":{"a":1421875,"name":"_full_task","f":1421875,"d_finished":0,"c":0,"l":1423604,"d":1729},"events":[{"name":"bootstrap","f":1421916,"d_finished":209,"c":1,"l":1422125,"d":209},{"a":1423553,"name":"ack","f":1423363,"d_finished":165,"c":1,"l":1423528,"d":216},{"a":1423551,"name":"processing","f":1422156,"d_finished":469,"c":3,"l":1423528,"d":522},{"name":"ProduceResults","f":1422047,"d_finished":276,"c":6,"l":1423593,"d":276},{"a":1423593,"name":"Finish","f":1423593,"d_finished":0,"c":0,"l":1423604,"d":11},{"name":"task_result","f":1422158,"d_finished":296,"c":2,"l":1423325,"d":296}],"id":"9437184::12"};iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2025-09-25T16:20:47.672768Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: TEST_STEP=11;SelfId=[1:414:2425];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;fline=actor.cpp:392;event=send_data;compute_actor_id=[1:413:2424];bytes=0;rows=0;faults=0;finished=1;fault=0;schema=; 2025-09-25T16:20:47.672814Z node 1 :TX_COLUMNSHARD_SCAN INFO: log.cpp:841: TEST_STEP=11;SelfId=[1:414:2425];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;fline=actor.cpp:397;event=scan_finished;compute_actor_id=[1:413:2424];packs_sum=0;bytes=0;bytes_sum=0;rows=0;rows_sum=0;faults=0;finished=1;fault=0;stats={"p":[{"events":["f_bootstrap","l_bootstrap","f_processing","f_ProduceResults","f_task_result"],"t":0},{"events":["f_ack","l_ack","l_processing","l_ProduceResults","f_Finish","l_Finish","l_task_result"],"t":0.001}],"full":{"a":1421875,"name":"_full_task","f":1421875,"d_finished":0,"c":0,"l":1423679,"d":1804},"events":[{"name":"bootstrap","f":1421916,"d_finished":209,"c":1,"l":1422125,"d":209},{"a":1423553,"name":"ack","f":1423363,"d_finished":165,"c":1,"l":1423528,"d":291},{"a":1423551,"name":"processing","f":1422156,"d_finished":469,"c":3,"l":1423528,"d":597},{"name":"ProduceResults","f":1422047,"d_finished":276,"c":6,"l":1423593,"d":276},{"a":1423593,"name":"Finish","f":1423593,"d_finished":0,"c":0,"l":1423679,"d":86},{"name":"task_result","f":1422158,"d_finished":296,"c":2,"l":1423325,"d":296}],"id":"9437184::12"};iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2025-09-25T16:20:47.672847Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: TEST_STEP=11;SelfId=[1:414:2425];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;fline=stats.cpp:8;event=statistic;begin=2025-09-25T16:20:47.670859Z;index_granules=0;index_portions=1;index_batches=0;schema_columns=10;filter_columns=0;additional_columns=0;compacted_portions_bytes=0;inserted_portions_bytes=7928;committed_portions_bytes=0;data_filter_bytes=0;data_additional_bytes=0;delta_bytes=7928;selected_rows=0; 2025-09-25T16:20:47.672853Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: TEST_STEP=11;SelfId=[1:414:2425];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;fline=read_context.h:196;event=scan_aborted;reason=unexpected on destructor; 2025-09-25T16:20:47.672884Z node 1 :TX_COLUMNSHARD_SCAN INFO: log.cpp:841: TEST_STEP=11;SelfId=[1:414:2425];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;fline=context.h:82;fetching=ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;; ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_shred/unittest >> TestShred::Run3CyclesForAllSupportedObjects [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2144] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2144] Leader for TabletID 72057594046678944 is [1:130:2155] sender: [1:131:2058] recipient: [1:113:2144] 2025-09-25T16:20:35.102662Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7911: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-09-25T16:20:35.102687Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7939: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-09-25T16:20:35.102693Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7825: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-09-25T16:20:35.102698Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7841: OperationsProcessing config: using default configuration 2025-09-25T16:20:35.102704Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7847: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-09-25T16:20:35.102708Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7847: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-09-25T16:20:35.102717Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7971: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-09-25T16:20:35.102730Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-09-25T16:20:35.102840Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8042: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-09-25T16:20:35.102913Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2025-09-25T16:20:35.119730Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7729: Cannot subscribe to console configs 2025-09-25T16:20:35.119756Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded 2025-09-25T16:20:35.123328Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2025-09-25T16:20:35.123415Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2025-09-25T16:20:35.123450Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-09-25T16:20:35.125004Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2025-09-25T16:20:35.125072Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2025-09-25T16:20:35.125196Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1382: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-09-25T16:20:35.125267Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-09-25T16:20:35.125798Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:158: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-09-25T16:20:35.125856Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2025-09-25T16:20:35.126185Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-09-25T16:20:35.126197Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-09-25T16:20:35.126219Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-09-25T16:20:35.126227Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-09-25T16:20:35.126233Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:205: TTxServerlessStorageBilling.Complete 2025-09-25T16:20:35.126271Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7086: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-09-25T16:20:35.127670Z node 1 :HIVE INFO: tablet_helpers.cpp:1126: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:130:2155] sender: [1:245:2058] recipient: [1:15:2062] 2025-09-25T16:20:35.145456Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-09-25T16:20:35.145550Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-09-25T16:20:35.145604Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:599: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-09-25T16:20:35.145610Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5528: CreateTx for txid 1:0 type: TxAlterSubDomain target path: [OwnerId: 72057594046678944, LocalPathId: 1] source path: 2025-09-25T16:20:35.145663Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-09-25T16:20:35.145676Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2025-09-25T16:20:35.146566Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-09-25T16:20:35.146608Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-09-25T16:20:35.146666Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-09-25T16:20:35.146674Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:315: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-09-25T16:20:35.146679Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:369: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-09-25T16:20:35.146683Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 1:0 2 -> 3 2025-09-25T16:20:35.147179Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-09-25T16:20:35.147192Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-09-25T16:20:35.147198Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 1:0 3 -> 128 2025-09-25T16:20:35.147751Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-09-25T16:20:35.147763Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-09-25T16:20:35.147769Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-09-25T16:20:35.147778Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1723: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-09-25T16:20:35.148419Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1792: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-09-25T16:20:35.148869Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:663: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-09-25T16:20:35.148921Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1824: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-09-25T16:20:35.149167Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:685: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-09-25T16:20:35.149191Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:689: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 137 RawX2: 4294969455 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-09-25T16:20:35.149199Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:303: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-09-25T16:20:35.149278Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 1:0 128 -> 240 2025-09-25T16:20:35.149286Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:362: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-09-25T16:20:35.149321Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:599: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-09-25T16:20:35.149332Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:412: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-09-25T16:20:35.150154Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-09-25T16:20:35.150191Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme ... DEBUG: schemeshard__root_shred_manager.cpp:591: TTxCompleteShredTenant Execute at schemeshard: 72057594046678944 2025-09-25T16:20:46.639915Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__root_shred_manager.cpp:312: [RootShredManager] [Finished] Shred completed for pathId# [OwnerId: 72057594046678944, LocalPathId: 3] in# 82 ms, next wakeup# 593.918000s, rate# 0, in queue# 0 tenants, running# 0 tenants at schemeshard 72057594046678944 2025-09-25T16:20:46.639930Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__root_shred_manager.cpp:327: [RootShredManager] Shred in tenants is completed. Send request to BS controller 2025-09-25T16:20:46.640326Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__root_shred_manager.cpp:615: TTxCompleteShredTenant Complete at schemeshard: 72057594046678944, NeedSendRequestToBSC# true 2025-09-25T16:20:46.640337Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:354: [RootShredManager] SendRequestToBSC: Generation# 3 2025-09-25T16:20:46.640390Z node 2 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5225: StateWork, received event# 268637738, Sender [2:303:2285], Recipient [2:295:2279]: NKikimrBlobStorage.TEvControllerShredResponse CurrentGeneration: 3 Completed: false Progress10k: 0 2025-09-25T16:20:46.640396Z node 2 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5446: StateWork, processing event TEvBlobStorage::TEvControllerShredResponse 2025-09-25T16:20:46.640400Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:8237: Handle TEvControllerShredResponse, at schemeshard: 72057594046678944 2025-09-25T16:20:46.640408Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__root_shred_manager.cpp:639: TTxCompleteShredBSC Execute at schemeshard: 72057594046678944 2025-09-25T16:20:46.640414Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:657: TTxCompleteShredBSC: Progress data shred in BSC 0% 2025-09-25T16:20:46.640427Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__root_shred_manager.cpp:665: TTxCompleteShredBSC Complete at schemeshard: 72057594046678944, NeedScheduleRequestToBSC# true 2025-09-25T16:20:46.640435Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:348: [RootShredManager] ScheduleRequestToBSC: Interval# 1.000000s 2025-09-25T16:20:47.072208Z node 2 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5225: StateWork, received event# 271125000, Sender [0:0:0], Recipient [2:466:2417]: NKikimr::NSchemeShard::TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-09-25T16:20:47.072246Z node 2 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5233: StateWork, processing event TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-09-25T16:20:47.072267Z node 2 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5225: StateWork, received event# 271125000, Sender [0:0:0], Recipient [2:953:2817]: NKikimr::NSchemeShard::TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-09-25T16:20:47.072271Z node 2 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5233: StateWork, processing event TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-09-25T16:20:47.072281Z node 2 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5225: StateWork, received event# 271125000, Sender [0:0:0], Recipient [2:295:2279]: NKikimr::NSchemeShard::TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-09-25T16:20:47.072286Z node 2 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5233: StateWork, processing event TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-09-25T16:20:47.072297Z node 2 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5225: StateWork, received event# 271124999, Sender [2:466:2417], Recipient [2:466:2417]: NKikimr::NSchemeShard::TEvSchemeShard::TEvMeasureSelfResponseTime 2025-09-25T16:20:47.072303Z node 2 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5232: StateWork, processing event TEvSchemeShard::TEvMeasureSelfResponseTime 2025-09-25T16:20:47.072323Z node 2 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5225: StateWork, received event# 271124999, Sender [2:953:2817], Recipient [2:953:2817]: NKikimr::NSchemeShard::TEvSchemeShard::TEvMeasureSelfResponseTime 2025-09-25T16:20:47.072327Z node 2 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5232: StateWork, processing event TEvSchemeShard::TEvMeasureSelfResponseTime 2025-09-25T16:20:47.072337Z node 2 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5225: StateWork, received event# 271124999, Sender [2:295:2279], Recipient [2:295:2279]: NKikimr::NSchemeShard::TEvSchemeShard::TEvMeasureSelfResponseTime 2025-09-25T16:20:47.072341Z node 2 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5232: StateWork, processing event TEvSchemeShard::TEvMeasureSelfResponseTime 2025-09-25T16:20:47.102938Z node 2 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5225: StateWork, received event# 271125517, Sender [0:0:0], Recipient [2:295:2279]: NKikimr::NSchemeShard::TEvSchemeShard::TEvWakeupToRunShredBSC 2025-09-25T16:20:47.102974Z node 2 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5447: StateWork, processing event TEvSchemeShard::TEvWakeupToRunShredBSC 2025-09-25T16:20:47.102982Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:354: [RootShredManager] SendRequestToBSC: Generation# 3 2025-09-25T16:20:47.103094Z node 2 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5225: StateWork, received event# 268637738, Sender [2:303:2285], Recipient [2:295:2279]: NKikimrBlobStorage.TEvControllerShredResponse CurrentGeneration: 3 Completed: false Progress10k: 5000 2025-09-25T16:20:47.103101Z node 2 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5446: StateWork, processing event TEvBlobStorage::TEvControllerShredResponse 2025-09-25T16:20:47.103107Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:8237: Handle TEvControllerShredResponse, at schemeshard: 72057594046678944 2025-09-25T16:20:47.103133Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__root_shred_manager.cpp:639: TTxCompleteShredBSC Execute at schemeshard: 72057594046678944 2025-09-25T16:20:47.103148Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:657: TTxCompleteShredBSC: Progress data shred in BSC 50% 2025-09-25T16:20:47.103170Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__root_shred_manager.cpp:665: TTxCompleteShredBSC Complete at schemeshard: 72057594046678944, NeedScheduleRequestToBSC# true 2025-09-25T16:20:47.103176Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:348: [RootShredManager] ScheduleRequestToBSC: Interval# 1.000000s 2025-09-25T16:20:47.474006Z node 2 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5225: StateWork, received event# 271125000, Sender [0:0:0], Recipient [2:466:2417]: NKikimr::NSchemeShard::TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-09-25T16:20:47.474040Z node 2 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5233: StateWork, processing event TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-09-25T16:20:47.474061Z node 2 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5225: StateWork, received event# 271125000, Sender [0:0:0], Recipient [2:953:2817]: NKikimr::NSchemeShard::TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-09-25T16:20:47.474065Z node 2 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5233: StateWork, processing event TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-09-25T16:20:47.474075Z node 2 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5225: StateWork, received event# 271125000, Sender [0:0:0], Recipient [2:295:2279]: NKikimr::NSchemeShard::TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-09-25T16:20:47.474080Z node 2 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5233: StateWork, processing event TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-09-25T16:20:47.474092Z node 2 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5225: StateWork, received event# 271124999, Sender [2:466:2417], Recipient [2:466:2417]: NKikimr::NSchemeShard::TEvSchemeShard::TEvMeasureSelfResponseTime 2025-09-25T16:20:47.474097Z node 2 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5232: StateWork, processing event TEvSchemeShard::TEvMeasureSelfResponseTime 2025-09-25T16:20:47.474115Z node 2 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5225: StateWork, received event# 271124999, Sender [2:953:2817], Recipient [2:953:2817]: NKikimr::NSchemeShard::TEvSchemeShard::TEvMeasureSelfResponseTime 2025-09-25T16:20:47.474119Z node 2 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5232: StateWork, processing event TEvSchemeShard::TEvMeasureSelfResponseTime 2025-09-25T16:20:47.474130Z node 2 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5225: StateWork, received event# 271124999, Sender [2:295:2279], Recipient [2:295:2279]: NKikimr::NSchemeShard::TEvSchemeShard::TEvMeasureSelfResponseTime 2025-09-25T16:20:47.474134Z node 2 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5232: StateWork, processing event TEvSchemeShard::TEvMeasureSelfResponseTime 2025-09-25T16:20:47.504767Z node 2 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5225: StateWork, received event# 271125517, Sender [0:0:0], Recipient [2:295:2279]: NKikimr::NSchemeShard::TEvSchemeShard::TEvWakeupToRunShredBSC 2025-09-25T16:20:47.504812Z node 2 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5447: StateWork, processing event TEvSchemeShard::TEvWakeupToRunShredBSC 2025-09-25T16:20:47.504836Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:354: [RootShredManager] SendRequestToBSC: Generation# 3 2025-09-25T16:20:47.504968Z node 2 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5225: StateWork, received event# 268637738, Sender [2:303:2285], Recipient [2:295:2279]: NKikimrBlobStorage.TEvControllerShredResponse CurrentGeneration: 3 Completed: true Progress10k: 10000 2025-09-25T16:20:47.504976Z node 2 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5446: StateWork, processing event TEvBlobStorage::TEvControllerShredResponse 2025-09-25T16:20:47.504982Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:8237: Handle TEvControllerShredResponse, at schemeshard: 72057594046678944 2025-09-25T16:20:47.505009Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__root_shred_manager.cpp:639: TTxCompleteShredBSC Execute at schemeshard: 72057594046678944 2025-09-25T16:20:47.505015Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:653: TTxCompleteShredBSC: Data shred in BSC is completed 2025-09-25T16:20:47.505025Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:170: [RootShredManager] ScheduleShredWakeup: Interval# 0.917000s, Timestamp# 1970-01-01T00:00:11.128000Z 2025-09-25T16:20:47.505035Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:378: [RootShredManager] Complete: Generation# 3, duration# 2 s 2025-09-25T16:20:47.506007Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__root_shred_manager.cpp:665: TTxCompleteShredBSC Complete at schemeshard: 72057594046678944, NeedScheduleRequestToBSC# false 2025-09-25T16:20:47.506176Z node 2 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5225: StateWork, received event# 269877761, Sender [2:4083:5370], Recipient [2:295:2279]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-09-25T16:20:47.506185Z node 2 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5322: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-09-25T16:20:47.506190Z node 2 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:6142: Pipe server connected, at tablet: 72057594046678944 2025-09-25T16:20:47.506222Z node 2 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5225: StateWork, received event# 271125519, Sender [2:280:2270], Recipient [2:295:2279]: NKikimrScheme.TEvShredInfoRequest 2025-09-25T16:20:47.506229Z node 2 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5444: StateWork, processing event TEvSchemeShard::TEvShredInfoRequest 2025-09-25T16:20:47.506234Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:8188: Handle TEvShredInfoRequest, at schemeshard: 72057594046678944 ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/columnshard/ut_rw/unittest >> TColumnShardTestReadWrite::ReadStale [GOOD] Test command err: 2025-09-25T16:20:46.921811Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:129:2160];fline=columnshard.cpp:105;event=initialize_shard;step=OnActivateExecutor; 2025-09-25T16:20:46.926947Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:129:2160];fline=columnshard.cpp:124;event=initialize_shard;step=initialize_tiring_finished; 2025-09-25T16:20:46.927009Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxInitSchema.Execute at tablet 9437184 2025-09-25T16:20:46.927853Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:129:2160];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-09-25T16:20:46.927923Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:129:2160];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-09-25T16:20:46.927967Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:129:2160];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-09-25T16:20:46.927990Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:129:2160];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-09-25T16:20:46.928009Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:129:2160];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-09-25T16:20:46.928034Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:129:2160];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-09-25T16:20:46.928055Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:129:2160];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-09-25T16:20:46.928077Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:129:2160];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-09-25T16:20:46.928097Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:129:2160];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-09-25T16:20:46.928115Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:129:2160];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanDeprecatedSnapshot; 2025-09-25T16:20:46.928135Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:129:2160];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV0ChunksMeta; 2025-09-25T16:20:46.928154Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:129:2160];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CopyBlobIdsToV2; 2025-09-25T16:20:46.928201Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:129:2160];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreAppearanceSnapshot; 2025-09-25T16:20:46.934520Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxInitSchema.Complete at tablet 9437184 2025-09-25T16:20:46.934605Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:131;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=12;current_normalizer=CLASS_NAME=Granules; 2025-09-25T16:20:46.934615Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=1;type=Granules; 2025-09-25T16:20:46.934661Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-09-25T16:20:46.934700Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-09-25T16:20:46.934737Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-09-25T16:20:46.934745Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2025-09-25T16:20:46.934759Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:138;normalizer=TChunksNormalizer;message=0 chunks found; 2025-09-25T16:20:46.934770Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-09-25T16:20:46.934780Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-09-25T16:20:46.934786Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2025-09-25T16:20:46.934809Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-09-25T16:20:46.934820Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-09-25T16:20:46.934829Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-09-25T16:20:46.934834Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2025-09-25T16:20:46.934847Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2025-09-25T16:20:46.934856Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-09-25T16:20:46.934865Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-09-25T16:20:46.934871Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2025-09-25T16:20:46.934882Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-09-25T16:20:46.934891Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-09-25T16:20:46.934897Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2025-09-25T16:20:46.934908Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-09-25T16:20:46.934917Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-09-25T16:20:46.934923Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=15;type=RestoreV1Chunks_V2; 2025-09-25T16:20:46.934955Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-09-25T16:20:46.934965Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-09-25T16:20:46.934971Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=16;type=RestoreV2Chunks; 2025-09-25T16:20:46.934990Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2025-09-25T16:20:46.934999Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanDeprecatedSnapshot;id=CleanDeprecatedSnapshot; 2025-09-25T16:20:46.935004Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=17;type=CleanDeprecatedSnapshot; 2025-09-25T16:20:46.935014Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanDeprecatedSnapshot;id=17; 2025-09-25T16:20:46.935023Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV0ChunksMeta;id=RestoreV0ChunksMeta; 2025-09-25T16:20:46.935029Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=18;type=RestoreV0ChunksMeta; 2025-09-25T16:20:46.935038Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV0ChunksMeta;id=18; 2025-09-25T16:20:46.935048Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CopyBlobIdsToV2;id=CopyBlobIdsToV2; 2025-09-25T16:20:46.935054Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=19;type=CopyBlobIdsToV2; 2025-09-25T16:20:46.935071Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CopyBlobIdsToV2;id=19; 2025-09-25T16:20:46.935081Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLAS ... gger.h:56: TxPlanStep[6] complete at tablet 9437184 2025-09-25T16:20:47.532253Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;tx_state=TTxProgressTx::Complete;fline=abstract.h:98;progress_tx_id=100;lock_id=1;broken=0; 2025-09-25T16:20:47.532324Z node 1 :TX_COLUMNSHARD_WRITE DEBUG: log.cpp:841: tablet_id=9437184;tx_state=TTxProgressTx::Complete;commit_tx_id=100;commit_lock_id=1;fline=manager.cpp:177;event=remove_by_insert_id;id=2;operation_id=1; 2025-09-25T16:20:47.532331Z node 1 :TX_COLUMNSHARD_WRITE DEBUG: log.cpp:841: tablet_id=9437184;tx_state=TTxProgressTx::Complete;commit_tx_id=100;commit_lock_id=1;fline=manager.cpp:180;event=remove_operation;operation_id=1; 2025-09-25T16:20:47.532422Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:129:2160];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;fline=columnshard.cpp:246;event=TEvPrivate::TEvPeriodicWakeup::MANUAL;tablet_id=9437184; 2025-09-25T16:20:47.532428Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:129:2160];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:449;event=EnqueueBackgroundActivities;periodic=0; 2025-09-25T16:20:47.532451Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:129:2160];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:943;background=cleanup_schemas;skip_reason=no_changes; 2025-09-25T16:20:47.532456Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:129:2160];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:258;event=StartCleanup;portions_count=0; 2025-09-25T16:20:47.534875Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:129:2160];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:334;event=StartCleanup;portions_count=0;portions_prepared=0;drop=0;skip=0;portions_counter=0;chunks=0;limit=0;max_portions=1000;max_chunks=500000; 2025-09-25T16:20:47.534905Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:129:2160];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:800;background=cleanup;skip_reason=no_changes; 2025-09-25T16:20:47.534914Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:129:2160];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:832;background=cleanup;skip_reason=no_changes; 2025-09-25T16:20:47.534951Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:129:2160];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:755;background=ttl;skip_reason=no_changes; 2025-09-25T16:20:47.535097Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: EvScan txId: 18446744073709551615 scanId: 1 version: {1758816887926:max} readable: {1758817247926:max} at tablet 9437184 2025-09-25T16:20:47.546028Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TTxScan prepare txId: 18446744073709551615 scanId: 1 at tablet 9437184 2025-09-25T16:20:47.546303Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: tx_id=18446744073709551615;scan_id=1;gen=0;table=test_olap_table;snapshot={1758816887926:max};tablet=9437184;timeout=0.000000s;cpu_limits=Disabled;;fline=constructor.cpp:17;event=overriden_columns;ids=1,2,3,4,5,6,7,8,9,10,4294967040,4294967041,4294967042,4294967043; 2025-09-25T16:20:47.546630Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tx_id=18446744073709551615;scan_id=1;gen=0;table=test_olap_table;snapshot={1758816887926:max};tablet=9437184;timeout=0.000000s;cpu_limits=Disabled;;fline=program.cpp:34;event=parse_program;program=Command { Projection { Columns { Id: 1 } Columns { Id: 2 } Columns { Id: 3 } Columns { Id: 4 } Columns { Id: 5 } Columns { Id: 6 } Columns { Id: 7 } Columns { Id: 8 } Columns { Id: 9 } Columns { Id: 10 } Columns { Id: 4294967040 } Columns { Id: 4294967041 } Columns { Id: 4294967042 } Columns { Id: 4294967043 } } } ; 2025-09-25T16:20:47.546652Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tx_id=18446744073709551615;scan_id=1;gen=0;table=test_olap_table;snapshot={1758816887926:max};tablet=9437184;timeout=0.000000s;cpu_limits=Disabled;;fline=program.cpp:103;parse_proto_program=Command { Projection { Columns { Id: 1 } Columns { Id: 2 } Columns { Id: 3 } Columns { Id: 4 } Columns { Id: 5 } Columns { Id: 6 } Columns { Id: 7 } Columns { Id: 8 } Columns { Id: 9 } Columns { Id: 10 } Columns { Id: 4294967040 } Columns { Id: 4294967041 } Columns { Id: 4294967042 } Columns { Id: 4294967043 } } } ; 2025-09-25T16:20:47.546930Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tx_id=18446744073709551615;scan_id=1;gen=0;table=test_olap_table;snapshot={1758816887926:max};tablet=9437184;timeout=0.000000s;cpu_limits=Disabled;;fline=program.cpp:52;event=program_parsed;result={"edges":[{"owner_id":0,"inputs":[{"from":2},{"from":4},{"from":6},{"from":8},{"from":10},{"from":12},{"from":14},{"from":16},{"from":18},{"from":20},{"from":22},{"from":24},{"from":26},{"from":28}]},{"owner_id":18,"inputs":[{"from":29}]},{"owner_id":2,"inputs":[{"from":29}]},{"owner_id":20,"inputs":[{"from":29}]},{"owner_id":4,"inputs":[{"from":29}]},{"owner_id":22,"inputs":[{"from":29}]},{"owner_id":6,"inputs":[{"from":29}]},{"owner_id":24,"inputs":[{"from":29}]},{"owner_id":8,"inputs":[{"from":29}]},{"owner_id":26,"inputs":[{"from":29}]},{"owner_id":10,"inputs":[{"from":29}]},{"owner_id":28,"inputs":[{"from":29}]},{"owner_id":29,"inputs":[{"from":30}]},{"owner_id":12,"inputs":[{"from":29}]},{"owner_id":30,"inputs":[]},{"owner_id":14,"inputs":[{"from":29}]},{"owner_id":16,"inputs":[{"from":29}]}],"nodes":{"8":{"p":{"i":"4","p":{"address":{"name":"uid","id":4}},"o":"4","t":"AssembleOriginalData"},"w":33,"id":8},"2":{"p":{"i":"1","p":{"address":{"name":"timestamp","id":1}},"o":"1","t":"AssembleOriginalData"},"w":33,"id":2},"18":{"p":{"i":"9","p":{"address":{"name":"saved_at","id":9}},"o":"9","t":"AssembleOriginalData"},"w":33,"id":18},"0":{"p":{"i":"1,2,3,4,5,6,7,8,9,10,4294967040,4294967041,4294967042,4294967043","t":"Projection"},"w":462,"id":0},"4":{"p":{"i":"2","p":{"address":{"name":"resource_type","id":2}},"o":"2","t":"AssembleOriginalData"},"w":33,"id":4},"20":{"p":{"i":"10","p":{"address":{"name":"request_id","id":10}},"o":"10","t":"AssembleOriginalData"},"w":33,"id":20},"16":{"p":{"i":"8","p":{"address":{"name":"ingested_at","id":8}},"o":"8","t":"AssembleOriginalData"},"w":33,"id":16},"24":{"p":{"i":"4294967041","p":{"address":{"name":"_yql_tx_id","id":4294967041}},"o":"4294967041","t":"AssembleOriginalData"},"w":33,"id":24},"14":{"p":{"i":"7","p":{"address":{"name":"json_payload","id":7}},"o":"7","t":"AssembleOriginalData"},"w":33,"id":14},"10":{"p":{"i":"5","p":{"address":{"name":"level","id":5}},"o":"5","t":"AssembleOriginalData"},"w":33,"id":10},"29":{"p":{"i":"0","p":{"data":[{"name":"_yql_plan_step","id":4294967040},{"name":"_yql_tx_id","id":4294967041},{"name":"timestamp","id":1},{"name":"_yql_write_id","id":4294967042},{"name":"resource_type","id":2},{"name":"_yql_delete_flag","id":4294967043},{"name":"resource_id","id":3},{"name":"uid","id":4},{"name":"level","id":5},{"name":"message","id":6},{"name":"json_payload","id":7},{"name":"ingested_at","id":8},{"name":"saved_at","id":9},{"name":"request_id","id":10}]},"o":"4294967040,4294967041,1,4294967042,2,4294967043,3,4,5,6,7,8,9,10","t":"FetchOriginalData"},"w":28,"id":29},"6":{"p":{"i":"3","p":{"address":{"name":"resource_id","id":3}},"o":"3","t":"AssembleOriginalData"},"w":33,"id":6},"30":{"p":{"p":{"data":[{"name":"_yql_plan_step","id":4294967040},{"name":"_yql_tx_id","id":4294967041},{"name":"timestamp","id":1},{"name":"_yql_write_id","id":4294967042},{"name":"resource_type","id":2},{"name":"_yql_delete_flag","id":4294967043},{"name":"resource_id","id":3},{"name":"uid","id":4},{"name":"level","id":5},{"name":"message","id":6},{"name":"json_payload","id":7},{"name":"ingested_at","id":8},{"name":"saved_at","id":9},{"name":"request_id","id":10}]},"o":"0","t":"ReserveMemory"},"w":0,"id":30},"22":{"p":{"i":"4294967040","p":{"address":{"name":"_yql_plan_step","id":4294967040}},"o":"4294967040","t":"AssembleOriginalData"},"w":33,"id":22},"12":{"p":{"i":"6","p":{"address":{"name":"message","id":6}},"o":"6","t":"AssembleOriginalData"},"w":33,"id":12},"28":{"p":{"i":"4294967043","p":{"address":{"name":"_yql_delete_flag","id":4294967043}},"o":"4294967043","t":"AssembleOriginalData"},"w":33,"id":28},"26":{"p":{"i":"4294967042","p":{"address":{"name":"_yql_write_id","id":4294967042}},"o":"4294967042","t":"AssembleOriginalData"},"w":33,"id":26}}}; 2025-09-25T16:20:47.547079Z node 1 :TX_COLUMNSHARD_SCAN WARN: log.cpp:841: tx_id=18446744073709551615;scan_id=1;gen=0;table=test_olap_table;snapshot={1758816887926:max};tablet=9437184;timeout=0.000000s;cpu_limits=Disabled;;fline=tx_scan.cpp:11;event=TTxScan failed;problem=cannot build metadata;details=Snapshot too old: {1758816887926:max}. CS min read snapshot: {1758816947926:max}. now: 2025-09-25T16:20:47.547073Z; 2025-09-25T16:20:47.549387Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: EvScan txId: 18446744073709551615 scanId: 0 version: {1758816887926:max} readable: {1758817247926:max} at tablet 9437184 2025-09-25T16:20:47.560094Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TTxScan prepare txId: 18446744073709551615 scanId: 0 at tablet 9437184 2025-09-25T16:20:47.560188Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tx_id=18446744073709551615;scan_id=0;gen=0;table=;snapshot={1758816887926:max};tablet=9437184;timeout=0.000000s;cpu_limits=Disabled;;fline=program.cpp:34;event=parse_program;program=Command { Projection { Columns { Id: 1 } Columns { Id: 6 } } } ; 2025-09-25T16:20:47.560197Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tx_id=18446744073709551615;scan_id=0;gen=0;table=;snapshot={1758816887926:max};tablet=9437184;timeout=0.000000s;cpu_limits=Disabled;;fline=program.cpp:103;parse_proto_program=Command { Projection { Columns { Id: 1 } Columns { Id: 6 } } } ; 2025-09-25T16:20:47.560315Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tx_id=18446744073709551615;scan_id=0;gen=0;table=;snapshot={1758816887926:max};tablet=9437184;timeout=0.000000s;cpu_limits=Disabled;;fline=program.cpp:52;event=program_parsed;result={"edges":[{"owner_id":0,"inputs":[{"from":2},{"from":4}]},{"owner_id":2,"inputs":[{"from":5}]},{"owner_id":4,"inputs":[{"from":5}]},{"owner_id":5,"inputs":[{"from":6}]},{"owner_id":6,"inputs":[]}],"nodes":{"2":{"p":{"i":"1","p":{"address":{"name":"timestamp","id":1}},"o":"1","t":"AssembleOriginalData"},"w":9,"id":2},"6":{"p":{"p":{"data":[{"name":"timestamp","id":1},{"name":"message","id":6}]},"o":"0","t":"ReserveMemory"},"w":0,"id":6},"5":{"p":{"i":"0","p":{"data":[{"name":"timestamp","id":1},{"name":"message","id":6}]},"o":"1,6","t":"FetchOriginalData"},"w":4,"id":5},"4":{"p":{"i":"6","p":{"address":{"name":"message","id":6}},"o":"6","t":"AssembleOriginalData"},"w":9,"id":4},"0":{"p":{"i":"1,6","t":"Projection"},"w":18,"id":0}}}; 2025-09-25T16:20:47.560623Z node 1 :TX_COLUMNSHARD_SCAN WARN: log.cpp:841: tx_id=18446744073709551615;scan_id=0;gen=0;table=;snapshot={1758816887926:max};tablet=9437184;timeout=0.000000s;cpu_limits=Disabled;;fline=tx_scan.cpp:11;event=TTxScan failed;problem=cannot build metadata;details=Snapshot too old: {1758816887926:max}. CS min read snapshot: {1758816947926:max}. now: 2025-09-25T16:20:47.560615Z; ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/columnshard/ut_rw/unittest >> TColumnShardTestReadWrite::RebootWriteReadStandalone [GOOD] Test command err: 2025-09-25T16:20:45.308427Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:129:2160];fline=columnshard.cpp:105;event=initialize_shard;step=OnActivateExecutor; 2025-09-25T16:20:45.314321Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:129:2160];fline=columnshard.cpp:124;event=initialize_shard;step=initialize_tiring_finished; 2025-09-25T16:20:45.314375Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxInitSchema.Execute at tablet 9437184 2025-09-25T16:20:45.315259Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:129:2160];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-09-25T16:20:45.315322Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:129:2160];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-09-25T16:20:45.315365Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:129:2160];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-09-25T16:20:45.315407Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:129:2160];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-09-25T16:20:45.315428Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:129:2160];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-09-25T16:20:45.315453Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:129:2160];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-09-25T16:20:45.315476Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:129:2160];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-09-25T16:20:45.315498Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:129:2160];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-09-25T16:20:45.315522Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:129:2160];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-09-25T16:20:45.315543Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:129:2160];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanDeprecatedSnapshot; 2025-09-25T16:20:45.315566Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:129:2160];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV0ChunksMeta; 2025-09-25T16:20:45.315588Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:129:2160];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CopyBlobIdsToV2; 2025-09-25T16:20:45.315655Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:129:2160];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreAppearanceSnapshot; 2025-09-25T16:20:45.322799Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxInitSchema.Complete at tablet 9437184 2025-09-25T16:20:45.322869Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:131;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=12;current_normalizer=CLASS_NAME=Granules; 2025-09-25T16:20:45.322877Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=1;type=Granules; 2025-09-25T16:20:45.322913Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-09-25T16:20:45.322949Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-09-25T16:20:45.322960Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-09-25T16:20:45.322965Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2025-09-25T16:20:45.322978Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:138;normalizer=TChunksNormalizer;message=0 chunks found; 2025-09-25T16:20:45.322987Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-09-25T16:20:45.322996Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-09-25T16:20:45.323001Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2025-09-25T16:20:45.323016Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-09-25T16:20:45.323024Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-09-25T16:20:45.323046Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-09-25T16:20:45.323052Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2025-09-25T16:20:45.323065Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2025-09-25T16:20:45.323072Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-09-25T16:20:45.323079Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-09-25T16:20:45.323083Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2025-09-25T16:20:45.323093Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-09-25T16:20:45.323102Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-09-25T16:20:45.323108Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2025-09-25T16:20:45.323118Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-09-25T16:20:45.323126Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-09-25T16:20:45.323131Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=15;type=RestoreV1Chunks_V2; 2025-09-25T16:20:45.323164Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-09-25T16:20:45.323172Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-09-25T16:20:45.323177Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=16;type=RestoreV2Chunks; 2025-09-25T16:20:45.323193Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2025-09-25T16:20:45.323212Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanDeprecatedSnapshot;id=CleanDeprecatedSnapshot; 2025-09-25T16:20:45.323218Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=17;type=CleanDeprecatedSnapshot; 2025-09-25T16:20:45.323227Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanDeprecatedSnapshot;id=17; 2025-09-25T16:20:45.323235Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV0ChunksMeta;id=RestoreV0ChunksMeta; 2025-09-25T16:20:45.323240Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=18;type=RestoreV0ChunksMeta; 2025-09-25T16:20:45.323248Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV0ChunksMeta;id=18; 2025-09-25T16:20:45.323257Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CopyBlobIdsToV2;id=CopyBlobIdsToV2; 2025-09-25T16:20:45.323263Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=19;type=CopyBlobIdsToV2; 2025-09-25T16:20:45.323279Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CopyBlobIdsToV2;id=19; 2025-09-25T16:20:45.323287Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLAS ... ge=ready result;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;);columns=10;rows=31; 2025-09-25T16:20:47.600681Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: TEST_STEP=11;SelfId=[1:984:2851];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;fline=actor.cpp:274;stage=data_format;batch_size=0;num_rows=31;batch_columns=timestamp,resource_type,resource_id,uid,level,message,json_payload,ingested_at,saved_at,request_id; 2025-09-25T16:20:47.600723Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: TEST_STEP=11;SelfId=[1:984:2851];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;fline=actor.cpp:392;event=send_data;compute_actor_id=[1:983:2850];bytes=2020;rows=31;faults=0;finished=0;fault=0;schema=timestamp: timestamp[us] resource_type: string resource_id: string uid: string level: int32 message: string json_payload: string ingested_at: timestamp[us] saved_at: timestamp[us] request_id: string; 2025-09-25T16:20:47.600744Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: TEST_STEP=11;SelfId=[1:984:2851];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;fline=actor.cpp:296;stage=finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2025-09-25T16:20:47.600761Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: TEST_STEP=11;SelfId=[1:984:2851];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;fline=actor.cpp:211;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2025-09-25T16:20:47.600770Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: TEST_STEP=11;SelfId=[1:984:2851];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;fline=actor.cpp:216;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2025-09-25T16:20:47.600799Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: TEST_STEP=11;SelfId=[1:984:2851];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;fline=actor.cpp:120;event=TEvScanDataAck;info=limits:(bytes=8388608;chunks=1);; 2025-09-25T16:20:47.600813Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: TEST_STEP=11;SelfId=[1:984:2851];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;fline=actor.cpp:211;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2025-09-25T16:20:47.600850Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: TEST_STEP=11;SelfId=[1:984:2851];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;fline=actor.cpp:216;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2025-09-25T16:20:47.600860Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: actor.cpp:442: Scan [1:984:2851] finished for tablet 9437184 2025-09-25T16:20:47.600909Z node 1 :TX_COLUMNSHARD_SCAN INFO: log.cpp:841: TEST_STEP=11;SelfId=[1:984:2851];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;fline=actor.cpp:448;event=scan_finish;compute_actor_id=[1:983:2850];stats={"p":[{"events":["f_bootstrap","l_bootstrap","f_processing","f_ProduceResults","f_task_result"],"t":0},{"events":["f_ack","l_ack","l_processing","l_ProduceResults","f_Finish","l_Finish","l_task_result"],"t":0.001}],"full":{"a":2408560,"name":"_full_task","f":2408560,"d_finished":0,"c":0,"l":2409957,"d":1397},"events":[{"name":"bootstrap","f":2408605,"d_finished":180,"c":1,"l":2408785,"d":180},{"a":2409884,"name":"ack","f":2409713,"d_finished":148,"c":1,"l":2409861,"d":221},{"a":2409882,"name":"processing","f":2408814,"d_finished":383,"c":3,"l":2409861,"d":458},{"name":"ProduceResults","f":2408725,"d_finished":273,"c":6,"l":2409945,"d":273},{"a":2409945,"name":"Finish","f":2409945,"d_finished":0,"c":0,"l":2409957,"d":12},{"name":"task_result","f":2408817,"d_finished":227,"c":2,"l":2409680,"d":227}],"id":"9437184::12"};iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2025-09-25T16:20:47.600917Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: TEST_STEP=11;SelfId=[1:984:2851];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;fline=actor.cpp:392;event=send_data;compute_actor_id=[1:983:2850];bytes=0;rows=0;faults=0;finished=1;fault=0;schema=; 2025-09-25T16:20:47.600945Z node 1 :TX_COLUMNSHARD_SCAN INFO: log.cpp:841: TEST_STEP=11;SelfId=[1:984:2851];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;fline=actor.cpp:397;event=scan_finished;compute_actor_id=[1:983:2850];packs_sum=0;bytes=0;bytes_sum=0;rows=0;rows_sum=0;faults=0;finished=1;fault=0;stats={"p":[{"events":["f_bootstrap","l_bootstrap","f_processing","f_ProduceResults","f_task_result"],"t":0},{"events":["f_ack","l_ack","l_processing","l_ProduceResults","f_Finish","l_Finish","l_task_result"],"t":0.001}],"full":{"a":2408560,"name":"_full_task","f":2408560,"d_finished":0,"c":0,"l":2410010,"d":1450},"events":[{"name":"bootstrap","f":2408605,"d_finished":180,"c":1,"l":2408785,"d":180},{"a":2409884,"name":"ack","f":2409713,"d_finished":148,"c":1,"l":2409861,"d":274},{"a":2409882,"name":"processing","f":2408814,"d_finished":383,"c":3,"l":2409861,"d":511},{"name":"ProduceResults","f":2408725,"d_finished":273,"c":6,"l":2409945,"d":273},{"a":2409945,"name":"Finish","f":2409945,"d_finished":0,"c":0,"l":2410010,"d":65},{"name":"task_result","f":2408817,"d_finished":227,"c":2,"l":2409680,"d":227}],"id":"9437184::12"};iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2025-09-25T16:20:47.600955Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: TEST_STEP=11;SelfId=[1:984:2851];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;fline=stats.cpp:8;event=statistic;begin=2025-09-25T16:20:47.599374Z;index_granules=0;index_portions=1;index_batches=0;schema_columns=10;filter_columns=0;additional_columns=0;compacted_portions_bytes=0;inserted_portions_bytes=7600;committed_portions_bytes=0;data_filter_bytes=0;data_additional_bytes=0;delta_bytes=7600;selected_rows=0; 2025-09-25T16:20:47.600959Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: TEST_STEP=11;SelfId=[1:984:2851];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;fline=read_context.h:196;event=scan_aborted;reason=unexpected on destructor; 2025-09-25T16:20:47.600983Z node 1 :TX_COLUMNSHARD_SCAN INFO: log.cpp:841: TEST_STEP=11;SelfId=[1:984:2851];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;fline=context.h:82;fetching=ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;; >> SchemeReqAdminAccessInTenant::ClusterAdminCanAuthOnNonEmptyTenant [GOOD] >> SchemeReqAdminAccessInTenant::ClusterAdminCanAuthOnNonEmptyTenant-StrictAclCheck >> TColumnShardTestReadWrite::CompactionInGranule_PKInt64 >> TColumnShardTestReadWrite::CompactionSplitGranule_PKTimestamp >> EvWrite::WriteInTransaction >> TColumnShardTestReadWrite::CompactionInGranule_PKInt32_Reboot >> TColumnShardTestReadWrite::WriteReadZSTD >> TRestoreWithRebootsTests::ShouldSucceedOnLargeData[Zstd] [GOOD] >> Normalizers::RemoveWriteIdNormalizer [GOOD] >> SchemeReqAdminAccessInTenant::ClusterAdminCanAuthOnNonEmptyTenant-StrictAclCheck [GOOD] >> SchemeReqAdminAccessInTenant::ClusterAdminCanAuthOnNonEmptyTenant-DomainLoginOnly >> EvWrite::WriteInTransaction [GOOD] >> EvWrite::WriteWithLock >> TStorageBalanceTest::TestScenario3 [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/columnshard/ut_rw/unittest >> Normalizers::RemoveWriteIdNormalizer [GOOD] Test command err: 2025-09-25T16:20:44.006640Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];fline=columnshard.cpp:105;event=initialize_shard;step=OnActivateExecutor; 2025-09-25T16:20:44.012408Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];fline=columnshard.cpp:124;event=initialize_shard;step=initialize_tiring_finished; 2025-09-25T16:20:44.012473Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxInitSchema.Execute at tablet 9437184 2025-09-25T16:20:44.013394Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RemoveWriteId; 2025-09-25T16:20:44.013446Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=NO_VALUE_OPTIONAL; 2025-09-25T16:20:44.013490Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-09-25T16:20:44.013515Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-09-25T16:20:44.013537Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-09-25T16:20:44.013562Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-09-25T16:20:44.013584Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-09-25T16:20:44.013606Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-09-25T16:20:44.013630Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-09-25T16:20:44.013651Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-09-25T16:20:44.013673Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanDeprecatedSnapshot; 2025-09-25T16:20:44.013691Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV0ChunksMeta; 2025-09-25T16:20:44.013710Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CopyBlobIdsToV2; 2025-09-25T16:20:44.013730Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreAppearanceSnapshot; 2025-09-25T16:20:44.020559Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxInitSchema.Complete at tablet 9437184 2025-09-25T16:20:44.020627Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:131;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=13;current_normalizer=CLASS_NAME=RemoveWriteId; 2025-09-25T16:20:44.020640Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=NO_VALUE_OPTIONAL;seq_id=NO_VALUE_OPTIONAL;type=NO_VALUE_OPTIONAL; 2025-09-25T16:20:44.020698Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=special_cleaner.cpp:155;normalizer=TDeleteTrash;message=found 0 columns to delete grouped in 0 batches; 2025-09-25T16:20:44.020728Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RemoveWriteId;id=NO_VALUE_OPTIONAL; 2025-09-25T16:20:44.020743Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Granules;id=Granules; 2025-09-25T16:20:44.020749Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=NO_VALUE_OPTIONAL;seq_id=1;type=Granules; 2025-09-25T16:20:44.020766Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-09-25T16:20:44.020789Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-09-25T16:20:44.020798Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-09-25T16:20:44.020802Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=NO_VALUE_OPTIONAL;seq_id=2;type=Chunks; 2025-09-25T16:20:44.020813Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:138;normalizer=TChunksNormalizer;message=0 chunks found; 2025-09-25T16:20:44.020844Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-09-25T16:20:44.020853Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-09-25T16:20:44.020858Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=NO_VALUE_OPTIONAL;seq_id=4;type=TablesCleaner; 2025-09-25T16:20:44.020878Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-09-25T16:20:44.020887Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-09-25T16:20:44.020894Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-09-25T16:20:44.020899Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=NO_VALUE_OPTIONAL;seq_id=6;type=CleanGranuleId; 2025-09-25T16:20:44.020910Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2025-09-25T16:20:44.020919Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-09-25T16:20:44.020927Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-09-25T16:20:44.020931Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=NO_VALUE_OPTIONAL;seq_id=9;type=GCCountersNormalizer; 2025-09-25T16:20:44.020941Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-09-25T16:20:44.020950Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-09-25T16:20:44.020954Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=NO_VALUE_OPTIONAL;seq_id=11;type=SyncPortionFromChunks; 2025-09-25T16:20:44.020965Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-09-25T16:20:44.020972Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-09-25T16:20:44.020977Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=NO_VALUE_OPTIONAL;seq_id=15;type=RestoreV1Chunks_V2; 2025-09-25T16:20:44.021002Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-09-25T16:20:44.021010Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-09-25T16:20:44.021015Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=NO_VALUE_OPTIONAL;seq_id=16;type=RestoreV2Chunks; 2025-09-25T16:20:44.021030Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2025-09-25T16:20:44.021038Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanDeprecatedSnapshot;id=CleanDeprecatedSnapshot; 2025-09-25T16:20:44.021042Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=NO_VALUE_OPTIONAL;seq_id=17;type=CleanDeprecatedSnapshot; 2025-09-25T16:20:44.021050Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanDeprecatedSnapshot;id=17; 2025-09-25T16:20:44.021058Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;descript ... y1,key2;);;program_input=(column_ids=1,2,3;column_names=field,key1,key2;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:1;); 2025-09-25T16:20:49.740119Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[1:425:2425];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;fline=plain_read_data.cpp:31;event=DoExtractReadyResults;result=1;count=77;finished=1; 2025-09-25T16:20:49.740122Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[1:425:2425];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;fline=actor.cpp:222;stage=limit exhausted;limit=limits:(bytes=0;chunks=0);; 2025-09-25T16:20:49.740125Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[1:425:2425];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;tablet_id=9437184;fline=scanner.cpp:52;event=build_next_interval; 2025-09-25T16:20:49.740205Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[1:425:2425];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;fline=actor.cpp:120;event=TEvScanDataAck;info=limits:(bytes=8388608;chunks=1);; 2025-09-25T16:20:49.740217Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[1:425:2425];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;fline=actor.cpp:211;stage=start;iterator=ready_results:(count:1;records_count:77;schema=key1: uint64 key2: uint64 field: string;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2;column_names=key1,key2;);;ff=(column_ids=1,2,3;column_names=field,key1,key2;);;program_input=(column_ids=1,2,3;column_names=field,key1,key2;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2025-09-25T16:20:49.740220Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[1:425:2425];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;fline=plain_read_data.cpp:31;event=DoExtractReadyResults;result=0;count=0;finished=1; 2025-09-25T16:20:49.740228Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[1:425:2425];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;fline=actor.cpp:253;stage=ready result;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2;column_names=key1,key2;);;ff=(column_ids=1,2,3;column_names=field,key1,key2;);;program_input=(column_ids=1,2,3;column_names=field,key1,key2;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;);columns=3;rows=77; 2025-09-25T16:20:49.740233Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[1:425:2425];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;fline=actor.cpp:274;stage=data_format;batch_size=0;num_rows=77;batch_columns=key1,key2,field; 2025-09-25T16:20:49.740246Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[1:425:2425];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;fline=actor.cpp:392;event=send_data;compute_actor_id=[1:423:2424];bytes=130200;rows=1085;faults=0;finished=0;fault=0;schema=key1: uint64 key2: uint64 field: string; 2025-09-25T16:20:49.740255Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[1:425:2425];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;fline=actor.cpp:296;stage=finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2;column_names=key1,key2;);;ff=(column_ids=1,2,3;column_names=field,key1,key2;);;program_input=(column_ids=1,2,3;column_names=field,key1,key2;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2025-09-25T16:20:49.740266Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[1:425:2425];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;fline=actor.cpp:211;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2;column_names=key1,key2;);;ff=(column_ids=1,2,3;column_names=field,key1,key2;);;program_input=(column_ids=1,2,3;column_names=field,key1,key2;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2025-09-25T16:20:49.740273Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[1:425:2425];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;fline=actor.cpp:216;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2;column_names=key1,key2;);;ff=(column_ids=1,2,3;column_names=field,key1,key2;);;program_input=(column_ids=1,2,3;column_names=field,key1,key2;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2025-09-25T16:20:49.740286Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[1:425:2425];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;fline=actor.cpp:120;event=TEvScanDataAck;info=limits:(bytes=8388608;chunks=1);; 2025-09-25T16:20:49.740294Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[1:425:2425];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;fline=actor.cpp:211;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2;column_names=key1,key2;);;ff=(column_ids=1,2,3;column_names=field,key1,key2;);;program_input=(column_ids=1,2,3;column_names=field,key1,key2;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2025-09-25T16:20:49.740301Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[1:425:2425];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;fline=actor.cpp:216;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2;column_names=key1,key2;);;ff=(column_ids=1,2,3;column_names=field,key1,key2;);;program_input=(column_ids=1,2,3;column_names=field,key1,key2;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2025-09-25T16:20:49.740305Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: actor.cpp:442: Scan [1:425:2425] finished for tablet 9437184 2025-09-25T16:20:49.740360Z node 1 :TX_COLUMNSHARD_SCAN INFO: log.cpp:841: SelfId=[1:425:2425];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;fline=actor.cpp:448;event=scan_finish;compute_actor_id=[1:423:2424];stats={"p":[{"events":["f_bootstrap","l_bootstrap","f_processing","f_ProduceResults","f_task_result"],"t":0},{"events":["f_ack"],"t":0.007},{"events":["l_ack","l_processing","l_ProduceResults","f_Finish","l_Finish","l_task_result"],"t":2.54}],"full":{"a":3322496,"name":"_full_task","f":3322496,"d_finished":0,"c":0,"l":5862874,"d":2540378},"events":[{"name":"bootstrap","f":3322532,"d_finished":190,"c":1,"l":3322722,"d":190},{"a":5862844,"name":"ack","f":3330175,"d_finished":1382490,"c":421,"l":5862835,"d":1382520},{"a":5862843,"name":"processing","f":3322763,"d_finished":2520418,"c":843,"l":5862835,"d":2520449},{"name":"ProduceResults","f":3322647,"d_finished":1970152,"c":1266,"l":5862862,"d":1970152},{"a":5862862,"name":"Finish","f":5862862,"d_finished":0,"c":0,"l":5862874,"d":12},{"name":"task_result","f":3322767,"d_finished":1136088,"c":422,"l":5862687,"d":1136088}],"id":"9437184::2"};iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2;column_names=key1,key2;);;ff=(column_ids=1,2,3;column_names=field,key1,key2;);;program_input=(column_ids=1,2,3;column_names=field,key1,key2;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2025-09-25T16:20:49.740368Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[1:425:2425];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;fline=actor.cpp:392;event=send_data;compute_actor_id=[1:423:2424];bytes=0;rows=0;faults=0;finished=1;fault=0;schema=; 2025-09-25T16:20:49.740397Z node 1 :TX_COLUMNSHARD_SCAN INFO: log.cpp:841: SelfId=[1:425:2425];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;fline=actor.cpp:397;event=scan_finished;compute_actor_id=[1:423:2424];packs_sum=0;bytes=0;bytes_sum=0;rows=0;rows_sum=0;faults=0;finished=1;fault=0;stats={"p":[{"events":["f_bootstrap","l_bootstrap","f_processing","f_ProduceResults","f_task_result"],"t":0},{"events":["f_ack"],"t":0.007},{"events":["l_ack","l_processing","l_ProduceResults","f_Finish","l_Finish","l_task_result"],"t":2.54}],"full":{"a":3322496,"name":"_full_task","f":3322496,"d_finished":0,"c":0,"l":5862932,"d":2540436},"events":[{"name":"bootstrap","f":3322532,"d_finished":190,"c":1,"l":3322722,"d":190},{"a":5862844,"name":"ack","f":3330175,"d_finished":1382490,"c":421,"l":5862835,"d":1382578},{"a":5862843,"name":"processing","f":3322763,"d_finished":2520418,"c":843,"l":5862835,"d":2520507},{"name":"ProduceResults","f":3322647,"d_finished":1970152,"c":1266,"l":5862862,"d":1970152},{"a":5862862,"name":"Finish","f":5862862,"d_finished":0,"c":0,"l":5862932,"d":70},{"name":"task_result","f":3322767,"d_finished":1136088,"c":422,"l":5862687,"d":1136088}],"id":"9437184::2"};iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2;column_names=key1,key2;);;ff=(column_ids=1,2,3;column_names=field,key1,key2;);;program_input=(column_ids=1,2,3;column_names=field,key1,key2;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2025-09-25T16:20:49.740408Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[1:425:2425];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;fline=stats.cpp:8;event=statistic;begin=2025-09-25T16:20:47.199825Z;index_granules=0;index_portions=1;index_batches=0;schema_columns=3;filter_columns=0;additional_columns=0;compacted_portions_bytes=0;inserted_portions_bytes=2488696;committed_portions_bytes=0;data_filter_bytes=0;data_additional_bytes=0;delta_bytes=2488696;selected_rows=0; 2025-09-25T16:20:49.740413Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[1:425:2425];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;fline=read_context.h:196;event=scan_aborted;reason=unexpected on destructor; 2025-09-25T16:20:49.740433Z node 1 :TX_COLUMNSHARD_SCAN INFO: log.cpp:841: SelfId=[1:425:2425];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;fline=context.h:82;fetching=ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2;column_names=key1,key2;);;ff=(column_ids=1,2,3;column_names=field,key1,key2;);;program_input=(column_ids=1,2,3;column_names=field,key1,key2;);;; >> EvWrite::WriteWithLock [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_restore/unittest >> TRestoreWithRebootsTests::ShouldSucceedOnLargeData[Zstd] [GOOD] Test command err: ==== RunWithTabletReboots =========== RUN: Trace =========== Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2144] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2144] Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:120:2058] recipient: [1:114:2145] IGNORE Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:120:2058] recipient: [1:114:2145] Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:121:2058] recipient: [1:116:2146] IGNORE Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:121:2058] recipient: [1:116:2146] Leader for TabletID 72057594046678944 is [1:128:2153] sender: [1:131:2058] recipient: [1:113:2144] Leader for TabletID 72057594046447617 is [1:134:2158] sender: [1:136:2058] recipient: [1:114:2145] Leader for TabletID 72057594046316545 is [1:139:2161] sender: [1:141:2058] recipient: [1:116:2146] 2025-09-25T16:19:46.444921Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7911: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-09-25T16:19:46.444937Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7939: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-09-25T16:19:46.444941Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7825: StatsBatching config: StatsBatchTimeout# 0.000000s, StatsMaxBatchSize# 0, StatsMaxExecuteTime# 0.010000s 2025-09-25T16:19:46.444945Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7841: OperationsProcessing config: using default configuration 2025-09-25T16:19:46.444950Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7847: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-09-25T16:19:46.444953Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7847: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-09-25T16:19:46.444959Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7971: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-09-25T16:19:46.444970Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-09-25T16:19:46.445060Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8042: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-09-25T16:19:46.445103Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2025-09-25T16:19:46.464857Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:8074: Got new config: QueryServiceConfig { AllExternalDataSourcesAreAvailable: true } 2025-09-25T16:19:46.464883Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded 2025-09-25T16:19:46.464981Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8042: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# Leader for TabletID 72057594046447617 is [1:134:2158] sender: [1:179:2058] recipient: [1:15:2062] 2025-09-25T16:19:46.469254Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2025-09-25T16:19:46.469335Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2025-09-25T16:19:46.469371Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-09-25T16:19:46.470563Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2025-09-25T16:19:46.470627Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2025-09-25T16:19:46.470713Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1382: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-09-25T16:19:46.470862Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-09-25T16:19:46.471892Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:158: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-09-25T16:19:46.471932Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2025-09-25T16:19:46.472123Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-09-25T16:19:46.472133Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-09-25T16:19:46.472147Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-09-25T16:19:46.472152Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-09-25T16:19:46.472156Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:205: TTxServerlessStorageBilling.Complete 2025-09-25T16:19:46.472180Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7086: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:221:2058] recipient: [1:219:2219] IGNORE Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:221:2058] recipient: [1:219:2219] Leader for TabletID 72057594037968897 is [1:225:2223] sender: [1:226:2058] recipient: [1:219:2219] 2025-09-25T16:19:46.473265Z node 1 :HIVE INFO: tablet_helpers.cpp:1126: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:128:2153] sender: [1:246:2058] recipient: [1:15:2062] 2025-09-25T16:19:46.488802Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-09-25T16:19:46.488870Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-09-25T16:19:46.488909Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:599: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-09-25T16:19:46.488914Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5528: CreateTx for txid 1:0 type: TxAlterSubDomain target path: [OwnerId: 72057594046678944, LocalPathId: 1] source path: 2025-09-25T16:19:46.488950Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-09-25T16:19:46.488975Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2025-09-25T16:19:46.489420Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-09-25T16:19:46.489445Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-09-25T16:19:46.489482Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-09-25T16:19:46.489488Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:315: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-09-25T16:19:46.489492Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:369: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-09-25T16:19:46.489495Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 1:0 2 -> 3 2025-09-25T16:19:46.489771Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-09-25T16:19:46.489778Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-09-25T16:19:46.489781Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 1:0 3 -> 128 2025-09-25T16:19:46.490017Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-09-25T16:19:46.490024Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-09-25T16:19:46.490028Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-09-25T16:19:46.490032Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1723: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-09-25T16:19:46.490468Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1792: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-09-25T16:19:46.490726Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:663: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-09-25T16:19:46.490761Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1824: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 Leader for TabletID 72057594046316545 is [1:139:2161] sender: [1:261:2058] recipient: [1:15:2062] FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-09-25T16:19:46.490890Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:685: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-09-25T16:19:46.490906Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:689: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 139 RawX2: 4294969457 } } Step: 5000001 MediatorID: 0 Tab ... 0.csv.zst HTTP/1.1 HEADERS: Host: localhost:11789 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: 781071AE-8AC3-4571-9360-21DC416120DE amz-sdk-request: attempt=1 content-type: application/xml user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-143-generic x86_64 Clang/20.1.8 x-amz-api-version: 2006-03-01 S3_MOCK::HttpServeRead: /data_00.csv.zst / 317 2025-09-25T16:20:49.474209Z node 198 :DATASHARD_RESTORE DEBUG: import_s3.cpp:527: [Import] [s3:1003] Handle NKikimr::NWrappers::NExternalStorage::TEvHeadObjectResponse { Key: null Result: HeadObjectResult { ETag: 9fefc518a77e08ff2e1005d0369e6533 ContentLength: 317 } } FAKE_COORDINATOR: Erasing txId 1003 2025-09-25T16:20:49.474309Z node 198 :DATASHARD_RESTORE DEBUG: import_s3.cpp:606: [Import] [s3:1003] Handle NKikimr::TEvDataShard::TEvS3DownloadInfo { Info: { DataETag: (empty maybe) ProcessedBytes: 0 WrittenBytes: 0 WrittenRows: 0 ChecksumState: DownloadState: } } 2025-09-25T16:20:49.474870Z node 198 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1003 2025-09-25T16:20:49.474943Z node 198 :DATASHARD_RESTORE DEBUG: import_s3.cpp:606: [Import] [s3:1003] Handle NKikimr::TEvDataShard::TEvS3DownloadInfo { Info: { DataETag: 9fefc518a77e08ff2e1005d0369e6533 ProcessedBytes: 0 WrittenBytes: 0 WrittenRows: 0 ChecksumState: DownloadState: } } 2025-09-25T16:20:49.474950Z node 198 :DATASHARD_RESTORE NOTICE: import_s3.cpp:621: [Import] [s3:1003] Process download info at 'DownloadInfo': info# { DataETag: 9fefc518a77e08ff2e1005d0369e6533 ProcessedBytes: 0 WrittenBytes: 0 WrittenRows: 0 ChecksumState: DownloadState: } 2025-09-25T16:20:49.474962Z node 198 :DATASHARD_RESTORE DEBUG: import_s3.cpp:517: [Import] [s3:1003] GetObject: key# /data_00.csv.zst, range# 0-127 TestModificationResult got TxId: 1003, wait until txId: 1003 TestWaitNotification wait txId: 1003 2025-09-25T16:20:49.475042Z node 198 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:213: tests -- TTxNotificationSubscriber for txId 1003: send EvNotifyTxCompletion 2025-09-25T16:20:49.475048Z node 198 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:259: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 1003 2025-09-25T16:20:49.475109Z node 198 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__notify.cpp:30: NotifyTxCompletion operation in-flight, txId: 1003, at schemeshard: 72057594046678944 2025-09-25T16:20:49.475114Z node 198 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 1003, ready parts: 0/1, is published: true 2025-09-25T16:20:49.475120Z node 198 :FLAT_TX_SCHEMESHARD INFO: schemeshard__notify.cpp:131: NotifyTxCompletion transaction is registered, txId: 1003, at schemeshard: 72057594046678944 REQUEST: GET /data_00.csv.zst HTTP/1.1 HEADERS: Host: localhost:11789 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: 0608D329-D00D-43D8-A789-F3F47E0E9B69 amz-sdk-request: attempt=1 content-type: application/xml range: bytes=0-127 user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-143-generic x86_64 Clang/20.1.8 x-amz-api-version: 2006-03-01 S3_MOCK::HttpServeRead: /data_00.csv.zst / 317 2025-09-25T16:20:49.475490Z node 198 :DATASHARD_RESTORE DEBUG: import_s3.cpp:656: [Import] [s3:1003] Handle NKikimr::NWrappers::NExternalStorage::TEvGetObjectResponse { Key: null Result: 9fefc518a77e08ff2e1005d0369e6533 Body: 128b } 2025-09-25T16:20:49.475498Z node 198 :DATASHARD_RESTORE TRACE: import_s3.cpp:673: [Import] [s3:1003] Content size: processed-bytes# 0, content-length# 317, body-size# 128 2025-09-25T16:20:49.475507Z node 198 :DATASHARD_RESTORE DEBUG: import_s3.cpp:517: [Import] [s3:1003] GetObject: key# /data_00.csv.zst, range# 128-255 REQUEST: GET /data_00.csv.zst HTTP/1.1 HEADERS: Host: localhost:11789 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: FF64BCD3-8FEE-43D4-9824-A8676E832613 amz-sdk-request: attempt=1 content-type: application/xml range: bytes=128-255 user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-143-generic x86_64 Clang/20.1.8 x-amz-api-version: 2006-03-01 S3_MOCK::HttpServeRead: /data_00.csv.zst / 317 2025-09-25T16:20:49.475943Z node 198 :DATASHARD_RESTORE DEBUG: import_s3.cpp:656: [Import] [s3:1003] Handle NKikimr::NWrappers::NExternalStorage::TEvGetObjectResponse { Key: null Result: 9fefc518a77e08ff2e1005d0369e6533 Body: 128b } 2025-09-25T16:20:49.475951Z node 198 :DATASHARD_RESTORE TRACE: import_s3.cpp:673: [Import] [s3:1003] Content size: processed-bytes# 0, content-length# 317, body-size# 128 2025-09-25T16:20:49.475960Z node 198 :DATASHARD_RESTORE DEBUG: import_s3.cpp:517: [Import] [s3:1003] GetObject: key# /data_00.csv.zst, range# 256-316 REQUEST: GET /data_00.csv.zst HTTP/1.1 HEADERS: Host: localhost:11789 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: DD42BC54-841B-4B86-8755-1C8285866740 amz-sdk-request: attempt=1 content-type: application/xml range: bytes=256-316 user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-143-generic x86_64 Clang/20.1.8 x-amz-api-version: 2006-03-01 S3_MOCK::HttpServeRead: /data_00.csv.zst / 317 2025-09-25T16:20:49.476442Z node 198 :DATASHARD_RESTORE DEBUG: import_s3.cpp:656: [Import] [s3:1003] Handle NKikimr::NWrappers::NExternalStorage::TEvGetObjectResponse { Key: null Result: 9fefc518a77e08ff2e1005d0369e6533 Body: 61b } 2025-09-25T16:20:49.476448Z node 198 :DATASHARD_RESTORE TRACE: import_s3.cpp:673: [Import] [s3:1003] Content size: processed-bytes# 0, content-length# 317, body-size# 61 2025-09-25T16:20:49.476619Z node 198 :DATASHARD_RESTORE INFO: import_s3.cpp:806: [Import] [s3:1003] Upload rows: count# 100, size# 2900 2025-09-25T16:20:49.477195Z node 198 :DATASHARD_RESTORE DEBUG: import_s3.cpp:814: [Import] [s3:1003] Handle NKikimr::TEvDataShard::TEvS3UploadRowsResponse { Record: TabletID: 72075186233409546 Status: 0 Info: { DataETag: 9fefc518a77e08ff2e1005d0369e6533 ProcessedBytes: 317 WrittenBytes: 1092 WrittenRows: 100 ChecksumState: DownloadState: } } 2025-09-25T16:20:49.477206Z node 198 :DATASHARD_RESTORE NOTICE: import_s3.cpp:621: [Import] [s3:1003] Process download info at 'UploadResponse': info# { DataETag: 9fefc518a77e08ff2e1005d0369e6533 ProcessedBytes: 317 WrittenBytes: 1092 WrittenRows: 100 ChecksumState: DownloadState: } 2025-09-25T16:20:49.477211Z node 198 :DATASHARD_RESTORE NOTICE: import_s3.cpp:962: [Import] [s3:1003] Finish: success# 1, error# , writtenBytes# 1092, writtenRows# 100 2025-09-25T16:20:49.479452Z node 198 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5901: Handle TEvSchemaChanged, tabletId: 72057594046678944, at schemeshard: 72057594046678944, message: Source { RawX1: 336 RawX2: 850403526930 } Origin: 72075186233409546 State: 2 TxId: 1003 Step: 0 Generation: 2 OpResult { Success: true Explain: "" BytesProcessed: 1092 RowsProcessed: 100 } 2025-09-25T16:20:49.479464Z node 198 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1837: TOperation FindRelatedPartByTabletId, TxId: 1003, tablet: 72075186233409546, partId: 0 2025-09-25T16:20:49.479481Z node 198 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:628: TTxOperationReply execute, operationId: 1003:0, at schemeshard: 72057594046678944, message: Source { RawX1: 336 RawX2: 850403526930 } Origin: 72075186233409546 State: 2 TxId: 1003 Step: 0 Generation: 2 OpResult { Success: true Explain: "" BytesProcessed: 1092 RowsProcessed: 100 } 2025-09-25T16:20:49.479492Z node 198 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_backup_restore_common.h:233: TRestore TProposedWaitParts, opId: 1003:0 HandleReply TEvSchemaChanged at tablet# 72057594046678944 message# Source { RawX1: 336 RawX2: 850403526930 } Origin: 72075186233409546 State: 2 TxId: 1003 Step: 0 Generation: 2 OpResult { Success: true Explain: "" BytesProcessed: 1092 RowsProcessed: 100 } 2025-09-25T16:20:49.479506Z node 198 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:673: CollectSchemaChanged accept TEvDataShard::TEvSchemaChanged, operationId: 1003:0, shardIdx: 72057594046678944:1, shard: 72075186233409546, left await: 0, txState.State: ProposedWaitParts, txState.ReadyForNotifications: 1, at schemeshard: 72057594046678944 2025-09-25T16:20:49.479511Z node 198 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:710: all shard schema changes has been received, operationId: 1003:0, at schemeshard: 72057594046678944 2025-09-25T16:20:49.479517Z node 198 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:722: send schema changes ack message, operation: 1003:0, datashard: 72075186233409546, at schemeshard: 72057594046678944 2025-09-25T16:20:49.479525Z node 198 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 1003:0 129 -> 240 2025-09-25T16:20:49.479558Z node 198 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_backup_restore_common.h:116: Unable to make a bill: kind# TRestore, opId# 1003:0, reason# domain is not a serverless db, domain# /MyRoot, domainPathId# [OwnerId: 72057594046678944, LocalPathId: 1], IsDomainSchemeShard: 1, ParentDomainId: [OwnerId: 72057594046678944, LocalPathId: 1], ResourcesDomainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-09-25T16:20:49.479963Z node 198 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 1003:0, at schemeshard: 72057594046678944 2025-09-25T16:20:49.480053Z node 198 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1003:0, at schemeshard: 72057594046678944 2025-09-25T16:20:49.480063Z node 198 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:497: [72057594046678944] TDone opId# 1003:0 ProgressState 2025-09-25T16:20:49.480079Z node 198 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:926: Part operation is done id#1003:0 progress is 1/1 2025-09-25T16:20:49.480082Z node 198 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 1003 ready parts: 1/1 2025-09-25T16:20:49.480086Z node 198 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:926: Part operation is done id#1003:0 progress is 1/1 2025-09-25T16:20:49.480088Z node 198 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 1003 ready parts: 1/1 2025-09-25T16:20:49.480091Z node 198 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 1003, ready parts: 1/1, is published: true 2025-09-25T16:20:49.480105Z node 198 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1702: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [198:412:2384] message: TxId: 1003 2025-09-25T16:20:49.480109Z node 198 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 1003 ready parts: 1/1 2025-09-25T16:20:49.480113Z node 198 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:993: Operation and all the parts is done, operation id: 1003:0 2025-09-25T16:20:49.480116Z node 198 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5552: RemoveTx for txid 1003:0 2025-09-25T16:20:49.480141Z node 198 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:610: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 3 2025-09-25T16:20:49.480755Z node 198 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 1003: got EvNotifyTxCompletionResult 2025-09-25T16:20:49.480766Z node 198 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 1003: satisfy waiter [198:454:2425] TestWaitNotification: OK eventTxId 1003 ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/columnshard/ut_rw/unittest >> EvWrite::WriteWithLock [GOOD] Test command err: 2025-09-25T16:20:49.306797Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];fline=columnshard.cpp:105;event=initialize_shard;step=OnActivateExecutor; 2025-09-25T16:20:49.310526Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];fline=columnshard.cpp:124;event=initialize_shard;step=initialize_tiring_finished; 2025-09-25T16:20:49.310578Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxInitSchema.Execute at tablet 9437184 2025-09-25T16:20:49.311449Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-09-25T16:20:49.311496Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-09-25T16:20:49.311531Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-09-25T16:20:49.311546Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-09-25T16:20:49.311560Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-09-25T16:20:49.311575Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-09-25T16:20:49.311589Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-09-25T16:20:49.311602Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-09-25T16:20:49.311616Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-09-25T16:20:49.311633Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanDeprecatedSnapshot; 2025-09-25T16:20:49.311653Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV0ChunksMeta; 2025-09-25T16:20:49.311685Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CopyBlobIdsToV2; 2025-09-25T16:20:49.311720Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreAppearanceSnapshot; 2025-09-25T16:20:49.316799Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxInitSchema.Complete at tablet 9437184 2025-09-25T16:20:49.316866Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:131;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=12;current_normalizer=CLASS_NAME=Granules; 2025-09-25T16:20:49.316874Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=1;type=Granules; 2025-09-25T16:20:49.316902Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-09-25T16:20:49.316930Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-09-25T16:20:49.316943Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-09-25T16:20:49.316949Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2025-09-25T16:20:49.316961Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:138;normalizer=TChunksNormalizer;message=0 chunks found; 2025-09-25T16:20:49.316970Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-09-25T16:20:49.316978Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-09-25T16:20:49.316983Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2025-09-25T16:20:49.317000Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-09-25T16:20:49.317007Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-09-25T16:20:49.317013Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-09-25T16:20:49.317016Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2025-09-25T16:20:49.317024Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2025-09-25T16:20:49.317029Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-09-25T16:20:49.317034Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-09-25T16:20:49.317037Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2025-09-25T16:20:49.317044Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-09-25T16:20:49.317049Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-09-25T16:20:49.317052Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2025-09-25T16:20:49.317059Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-09-25T16:20:49.317066Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-09-25T16:20:49.317069Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=15;type=RestoreV1Chunks_V2; 2025-09-25T16:20:49.317086Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-09-25T16:20:49.317093Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-09-25T16:20:49.317096Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=16;type=RestoreV2Chunks; 2025-09-25T16:20:49.317105Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2025-09-25T16:20:49.317111Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanDeprecatedSnapshot;id=CleanDeprecatedSnapshot; 2025-09-25T16:20:49.317114Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=17;type=CleanDeprecatedSnapshot; 2025-09-25T16:20:49.317119Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanDeprecatedSnapshot;id=17; 2025-09-25T16:20:49.317125Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV0ChunksMeta;id=RestoreV0ChunksMeta; 2025-09-25T16:20:49.317128Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=18;type=RestoreV0ChunksMeta; 2025-09-25T16:20:49.317133Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV0ChunksMeta;id=18; 2025-09-25T16:20:49.317139Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CopyBlobIdsToV2;id=CopyBlobIdsToV2; 2025-09-25T16:20:49.317143Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=19;type=CopyBlobIdsToV2; 2025-09-25T16:20:49.317152Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CopyBlobIdsToV2;id=19; 2025-09-25T16:20:49.317158Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLAS ... 841: SelfId=[2:205:2217];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;fline=actor.cpp:211;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1;column_names=key;);;ff=(column_ids=1,2;column_names=field,key;);;program_input=(column_ids=1,2;column_names=field,key;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:1;); 2025-09-25T16:20:51.000998Z node 2 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[2:205:2217];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;fline=plain_read_data.cpp:31;event=DoExtractReadyResults;result=1;count=54;finished=1; 2025-09-25T16:20:51.001003Z node 2 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[2:205:2217];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;fline=actor.cpp:222;stage=limit exhausted;limit=limits:(bytes=0;chunks=0);; 2025-09-25T16:20:51.001009Z node 2 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[2:205:2217];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;tablet_id=9437184;fline=scanner.cpp:52;event=build_next_interval; 2025-09-25T16:20:51.001050Z node 2 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[2:205:2217];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;fline=actor.cpp:120;event=TEvScanDataAck;info=limits:(bytes=8388608;chunks=1);; 2025-09-25T16:20:51.001068Z node 2 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[2:205:2217];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;fline=actor.cpp:211;stage=start;iterator=ready_results:(count:1;records_count:54;schema=key: uint64 field: string;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1;column_names=key;);;ff=(column_ids=1,2;column_names=field,key;);;program_input=(column_ids=1,2;column_names=field,key;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2025-09-25T16:20:51.001073Z node 2 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[2:205:2217];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;fline=plain_read_data.cpp:31;event=DoExtractReadyResults;result=0;count=0;finished=1; 2025-09-25T16:20:51.001084Z node 2 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[2:205:2217];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;fline=actor.cpp:253;stage=ready result;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1;column_names=key;);;ff=(column_ids=1,2;column_names=field,key;);;program_input=(column_ids=1,2;column_names=field,key;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;);columns=2;rows=54; 2025-09-25T16:20:51.001092Z node 2 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[2:205:2217];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;fline=actor.cpp:274;stage=data_format;batch_size=0;num_rows=54;batch_columns=key,field; 2025-09-25T16:20:51.001113Z node 2 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[2:205:2217];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;fline=actor.cpp:392;event=send_data;compute_actor_id=[2:204:2216];bytes=458752;rows=4096;faults=0;finished=0;fault=0;schema=key: uint64 field: string; 2025-09-25T16:20:51.001124Z node 2 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[2:205:2217];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;fline=actor.cpp:296;stage=finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1;column_names=key;);;ff=(column_ids=1,2;column_names=field,key;);;program_input=(column_ids=1,2;column_names=field,key;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2025-09-25T16:20:51.001142Z node 2 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[2:205:2217];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;fline=actor.cpp:211;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1;column_names=key;);;ff=(column_ids=1,2;column_names=field,key;);;program_input=(column_ids=1,2;column_names=field,key;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2025-09-25T16:20:51.001153Z node 2 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[2:205:2217];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;fline=actor.cpp:216;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1;column_names=key;);;ff=(column_ids=1,2;column_names=field,key;);;program_input=(column_ids=1,2;column_names=field,key;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2025-09-25T16:20:51.001170Z node 2 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[2:205:2217];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;fline=actor.cpp:120;event=TEvScanDataAck;info=limits:(bytes=8388608;chunks=1);; 2025-09-25T16:20:51.001180Z node 2 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[2:205:2217];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;fline=actor.cpp:211;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1;column_names=key;);;ff=(column_ids=1,2;column_names=field,key;);;program_input=(column_ids=1,2;column_names=field,key;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2025-09-25T16:20:51.001191Z node 2 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[2:205:2217];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;fline=actor.cpp:216;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1;column_names=key;);;ff=(column_ids=1,2;column_names=field,key;);;program_input=(column_ids=1,2;column_names=field,key;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2025-09-25T16:20:51.001198Z node 2 :TX_COLUMNSHARD_SCAN DEBUG: actor.cpp:442: Scan [2:205:2217] finished for tablet 9437184 2025-09-25T16:20:51.001279Z node 2 :TX_COLUMNSHARD_SCAN INFO: log.cpp:841: SelfId=[2:205:2217];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;fline=actor.cpp:448;event=scan_finish;compute_actor_id=[2:204:2216];stats={"p":[{"events":["f_bootstrap","l_bootstrap","f_processing","f_ProduceResults","f_task_result"],"t":0},{"events":["f_ack"],"t":0.004},{"events":["l_ack","l_processing","l_ProduceResults","f_Finish","l_Finish","l_task_result"],"t":0.024}],"full":{"a":1783918,"name":"_full_task","f":1783918,"d_finished":0,"c":0,"l":1808320,"d":24402},"events":[{"name":"bootstrap","f":1783994,"d_finished":220,"c":1,"l":1784214,"d":220},{"a":1808279,"name":"ack","f":1788864,"d_finished":8238,"c":86,"l":1808268,"d":8279},{"a":1808277,"name":"processing","f":1784255,"d_finished":18166,"c":173,"l":1808268,"d":18209},{"name":"ProduceResults","f":1784135,"d_finished":13989,"c":261,"l":1808306,"d":13989},{"a":1808307,"name":"Finish","f":1808307,"d_finished":0,"c":0,"l":1808320,"d":13},{"name":"task_result","f":1784260,"d_finished":9553,"c":87,"l":1808124,"d":9553}],"id":"9437184::5"};iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1;column_names=key;);;ff=(column_ids=1,2;column_names=field,key;);;program_input=(column_ids=1,2;column_names=field,key;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2025-09-25T16:20:51.001293Z node 2 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[2:205:2217];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;fline=actor.cpp:392;event=send_data;compute_actor_id=[2:204:2216];bytes=0;rows=0;faults=0;finished=1;fault=0;schema=; 2025-09-25T16:20:51.001335Z node 2 :TX_COLUMNSHARD_SCAN INFO: log.cpp:841: SelfId=[2:205:2217];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;fline=actor.cpp:397;event=scan_finished;compute_actor_id=[2:204:2216];packs_sum=0;bytes=0;bytes_sum=0;rows=0;rows_sum=0;faults=0;finished=1;fault=0;stats={"p":[{"events":["f_bootstrap","l_bootstrap","f_processing","f_ProduceResults","f_task_result"],"t":0},{"events":["f_ack"],"t":0.004},{"events":["l_ack","l_processing","l_ProduceResults","f_Finish","l_Finish","l_task_result"],"t":0.024}],"full":{"a":1783918,"name":"_full_task","f":1783918,"d_finished":0,"c":0,"l":1808410,"d":24492},"events":[{"name":"bootstrap","f":1783994,"d_finished":220,"c":1,"l":1784214,"d":220},{"a":1808279,"name":"ack","f":1788864,"d_finished":8238,"c":86,"l":1808268,"d":8369},{"a":1808277,"name":"processing","f":1784255,"d_finished":18166,"c":173,"l":1808268,"d":18299},{"name":"ProduceResults","f":1784135,"d_finished":13989,"c":261,"l":1808306,"d":13989},{"a":1808307,"name":"Finish","f":1808307,"d_finished":0,"c":0,"l":1808410,"d":103},{"name":"task_result","f":1784260,"d_finished":9553,"c":87,"l":1808124,"d":9553}],"id":"9437184::5"};iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1;column_names=key;);;ff=(column_ids=1,2;column_names=field,key;);;program_input=(column_ids=1,2;column_names=field,key;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2025-09-25T16:20:51.001351Z node 2 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[2:205:2217];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;fline=stats.cpp:8;event=statistic;begin=2025-09-25T16:20:50.976668Z;index_granules=0;index_portions=2;index_batches=0;schema_columns=2;filter_columns=0;additional_columns=0;compacted_portions_bytes=0;inserted_portions_bytes=474480;committed_portions_bytes=0;data_filter_bytes=0;data_additional_bytes=0;delta_bytes=474480;selected_rows=0; 2025-09-25T16:20:51.001357Z node 2 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[2:205:2217];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;fline=read_context.h:196;event=scan_aborted;reason=unexpected on destructor; 2025-09-25T16:20:51.001385Z node 2 :TX_COLUMNSHARD_SCAN INFO: log.cpp:841: SelfId=[2:205:2217];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;fline=context.h:82;fetching=ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1;column_names=key;);;ff=(column_ids=1,2;column_names=field,key;);;program_input=(column_ids=1,2;column_names=field,key;);;; >> THeavyPerfTest::TTestLoadEverything [GOOD] >> ObjectDistribution::TestImbalanceCalcualtion [GOOD] >> ObjectDistribution::TestAllowedDomainsAndDown >> TColumnShardTestReadWrite::CompactionInGranule_PKUInt32 >> ObjectDistribution::TestAllowedDomainsAndDown [GOOD] >> ObjectDistribution::TestAddSameNode [GOOD] >> ObjectDistribution::TestManyIrrelevantNodes >> TColumnShardTestReadWrite::WriteReadZSTD [GOOD] >> TColumnShardTestReadWrite::WriteReadNoCompression >> SchemeReqAdminAccessInTenant::ClusterAdminCanAuthOnNonEmptyTenant-DomainLoginOnly [GOOD] >> SchemeReqAdminAccessInTenant::ClusterAdminCanAuthOnNonEmptyTenant-DomainLoginOnly-StrictAclCheck ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/columnshard/ut_rw/unittest >> TColumnShardTestReadWrite::WriteReadZSTD [GOOD] Test command err: 2025-09-25T16:20:49.456374Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:129:2160];fline=columnshard.cpp:105;event=initialize_shard;step=OnActivateExecutor; 2025-09-25T16:20:49.460187Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:129:2160];fline=columnshard.cpp:124;event=initialize_shard;step=initialize_tiring_finished; 2025-09-25T16:20:49.460226Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxInitSchema.Execute at tablet 9437184 2025-09-25T16:20:49.460802Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:129:2160];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-09-25T16:20:49.460882Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:129:2160];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-09-25T16:20:49.460925Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:129:2160];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-09-25T16:20:49.460951Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:129:2160];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-09-25T16:20:49.460971Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:129:2160];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-09-25T16:20:49.460991Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:129:2160];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-09-25T16:20:49.461011Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:129:2160];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-09-25T16:20:49.461031Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:129:2160];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-09-25T16:20:49.461051Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:129:2160];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-09-25T16:20:49.461073Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:129:2160];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanDeprecatedSnapshot; 2025-09-25T16:20:49.461095Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:129:2160];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV0ChunksMeta; 2025-09-25T16:20:49.461117Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:129:2160];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CopyBlobIdsToV2; 2025-09-25T16:20:49.461169Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:129:2160];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreAppearanceSnapshot; 2025-09-25T16:20:49.466736Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxInitSchema.Complete at tablet 9437184 2025-09-25T16:20:49.466863Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:131;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=12;current_normalizer=CLASS_NAME=Granules; 2025-09-25T16:20:49.466875Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=1;type=Granules; 2025-09-25T16:20:49.466914Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-09-25T16:20:49.466955Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-09-25T16:20:49.466969Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-09-25T16:20:49.466976Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2025-09-25T16:20:49.466988Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:138;normalizer=TChunksNormalizer;message=0 chunks found; 2025-09-25T16:20:49.466998Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-09-25T16:20:49.467007Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-09-25T16:20:49.467012Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2025-09-25T16:20:49.467033Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-09-25T16:20:49.467042Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-09-25T16:20:49.467051Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-09-25T16:20:49.467056Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2025-09-25T16:20:49.467068Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2025-09-25T16:20:49.467076Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-09-25T16:20:49.467084Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-09-25T16:20:49.467090Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2025-09-25T16:20:49.467100Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-09-25T16:20:49.467108Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-09-25T16:20:49.467113Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2025-09-25T16:20:49.467124Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-09-25T16:20:49.467132Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-09-25T16:20:49.467138Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=15;type=RestoreV1Chunks_V2; 2025-09-25T16:20:49.467166Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-09-25T16:20:49.467176Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-09-25T16:20:49.467181Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=16;type=RestoreV2Chunks; 2025-09-25T16:20:49.467196Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2025-09-25T16:20:49.467206Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanDeprecatedSnapshot;id=CleanDeprecatedSnapshot; 2025-09-25T16:20:49.467211Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=17;type=CleanDeprecatedSnapshot; 2025-09-25T16:20:49.467219Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanDeprecatedSnapshot;id=17; 2025-09-25T16:20:49.467228Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV0ChunksMeta;id=RestoreV0ChunksMeta; 2025-09-25T16:20:49.467233Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=18;type=RestoreV0ChunksMeta; 2025-09-25T16:20:49.467243Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV0ChunksMeta;id=18; 2025-09-25T16:20:49.467251Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CopyBlobIdsToV2;id=CopyBlobIdsToV2; 2025-09-25T16:20:49.467257Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=19;type=CopyBlobIdsToV2; 2025-09-25T16:20:49.467273Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CopyBlobIdsToV2;id=19; 2025-09-25T16:20:49.467283Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLAS ... ;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;);columns=10;rows=31; 2025-09-25T16:20:52.243083Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: TEST_STEP=11;SelfId=[1:984:2851];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;fline=actor.cpp:274;stage=data_format;batch_size=0;num_rows=31;batch_columns=timestamp,resource_type,resource_id,uid,level,message,json_payload,ingested_at,saved_at,request_id; 2025-09-25T16:20:52.243121Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: TEST_STEP=11;SelfId=[1:984:2851];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;fline=actor.cpp:392;event=send_data;compute_actor_id=[1:983:2850];bytes=2020;rows=31;faults=0;finished=0;fault=0;schema=timestamp: timestamp[us] resource_type: string resource_id: string uid: string level: int32 message: string json_payload: string ingested_at: timestamp[us] saved_at: timestamp[us] request_id: string; 2025-09-25T16:20:52.243133Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: TEST_STEP=11;SelfId=[1:984:2851];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;fline=actor.cpp:296;stage=finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2025-09-25T16:20:52.243146Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: TEST_STEP=11;SelfId=[1:984:2851];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;fline=actor.cpp:211;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2025-09-25T16:20:52.243155Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: TEST_STEP=11;SelfId=[1:984:2851];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;fline=actor.cpp:216;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2025-09-25T16:20:52.243180Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: TEST_STEP=11;SelfId=[1:984:2851];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;fline=actor.cpp:120;event=TEvScanDataAck;info=limits:(bytes=8388608;chunks=1);; 2025-09-25T16:20:52.243190Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: TEST_STEP=11;SelfId=[1:984:2851];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;fline=actor.cpp:211;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2025-09-25T16:20:52.243198Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: TEST_STEP=11;SelfId=[1:984:2851];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;fline=actor.cpp:216;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2025-09-25T16:20:52.243202Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: actor.cpp:442: Scan [1:984:2851] finished for tablet 9437184 2025-09-25T16:20:52.243256Z node 1 :TX_COLUMNSHARD_SCAN INFO: log.cpp:841: TEST_STEP=11;SelfId=[1:984:2851];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;fline=actor.cpp:448;event=scan_finish;compute_actor_id=[1:983:2850];stats={"p":[{"events":["f_bootstrap","l_bootstrap","f_processing","f_ProduceResults","f_task_result"],"t":0},{"events":["f_ack","l_ack","l_processing","l_ProduceResults","f_Finish","l_Finish","l_task_result"],"t":0.05}],"full":{"a":2851482,"name":"_full_task","f":2851482,"d_finished":0,"c":0,"l":2902142,"d":50660},"events":[{"name":"bootstrap","f":2851527,"d_finished":177,"c":1,"l":2851704,"d":177},{"a":2902111,"name":"ack","f":2901959,"d_finished":132,"c":1,"l":2902091,"d":163},{"a":2902109,"name":"processing","f":2851729,"d_finished":49334,"c":3,"l":2902091,"d":49367},{"name":"ProduceResults","f":2851661,"d_finished":221,"c":6,"l":2902132,"d":221},{"a":2902133,"name":"Finish","f":2902133,"d_finished":0,"c":0,"l":2902142,"d":9},{"name":"task_result","f":2851731,"d_finished":49193,"c":2,"l":2901908,"d":49193}],"id":"9437184::12"};iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2025-09-25T16:20:52.243264Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: TEST_STEP=11;SelfId=[1:984:2851];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;fline=actor.cpp:392;event=send_data;compute_actor_id=[1:983:2850];bytes=0;rows=0;faults=0;finished=1;fault=0;schema=; 2025-09-25T16:20:52.243294Z node 1 :TX_COLUMNSHARD_SCAN INFO: log.cpp:841: TEST_STEP=11;SelfId=[1:984:2851];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;fline=actor.cpp:397;event=scan_finished;compute_actor_id=[1:983:2850];packs_sum=0;bytes=0;bytes_sum=0;rows=0;rows_sum=0;faults=0;finished=1;fault=0;stats={"p":[{"events":["f_bootstrap","l_bootstrap","f_processing","f_ProduceResults","f_task_result"],"t":0},{"events":["f_ack","l_ack","l_processing","l_ProduceResults","f_Finish","l_Finish","l_task_result"],"t":0.05}],"full":{"a":2851482,"name":"_full_task","f":2851482,"d_finished":0,"c":0,"l":2902201,"d":50719},"events":[{"name":"bootstrap","f":2851527,"d_finished":177,"c":1,"l":2851704,"d":177},{"a":2902111,"name":"ack","f":2901959,"d_finished":132,"c":1,"l":2902091,"d":222},{"a":2902109,"name":"processing","f":2851729,"d_finished":49334,"c":3,"l":2902091,"d":49426},{"name":"ProduceResults","f":2851661,"d_finished":221,"c":6,"l":2902132,"d":221},{"a":2902133,"name":"Finish","f":2902133,"d_finished":0,"c":0,"l":2902201,"d":68},{"name":"task_result","f":2851731,"d_finished":49193,"c":2,"l":2901908,"d":49193}],"id":"9437184::12"};iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2025-09-25T16:20:52.243304Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: TEST_STEP=11;SelfId=[1:984:2851];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;fline=stats.cpp:8;event=statistic;begin=2025-09-25T16:20:52.192439Z;index_granules=0;index_portions=1;index_batches=0;schema_columns=10;filter_columns=0;additional_columns=0;compacted_portions_bytes=0;inserted_portions_bytes=4512;committed_portions_bytes=0;data_filter_bytes=0;data_additional_bytes=0;delta_bytes=4512;selected_rows=0; 2025-09-25T16:20:52.243309Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: TEST_STEP=11;SelfId=[1:984:2851];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;fline=read_context.h:196;event=scan_aborted;reason=unexpected on destructor; 2025-09-25T16:20:52.243336Z node 1 :TX_COLUMNSHARD_SCAN INFO: log.cpp:841: TEST_STEP=11;SelfId=[1:984:2851];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;fline=context.h:82;fetching=ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;; >> TColumnShardTestReadWrite::WriteReadStandaloneExoticTypes >> Normalizers::RemoveDeleteFlagNormalizer [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/mind/hive/ut/unittest >> TStorageBalanceTest::TestScenario3 [GOOD] Test command err: 2025-09-25T16:18:10.385122Z node 1 :BS_NODE DEBUG: {NW26@node_warden_impl.cpp:338} Bootstrap 2025-09-25T16:18:10.390629Z node 1 :BS_NODE DEBUG: {NW18@node_warden_resource.cpp:51} ApplyServiceSet IsStatic# true Comprehensive# false Origin# initial ServiceSet# {PDisks { NodeID: 1 PDiskID: 1 Path: "SectorMap:0:3200" PDiskGuid: 1 } VDisks { VDiskID { GroupID: 0 GroupGeneration: 1 Ring: 0 Domain: 0 VDisk: 0 } VDiskLocation { NodeID: 1 PDiskID: 1 VDiskSlotID: 0 PDiskGuid: 1 } } Groups { GroupID: 0 GroupGeneration: 1 ErasureSpecies: 0 Rings { FailDomains { VDiskLocations { NodeID: 1 PDiskID: 1 VDiskSlotID: 0 PDiskGuid: 1 } } } } AvailabilityDomains: 0 } 2025-09-25T16:18:10.390751Z node 1 :BS_NODE DEBUG: {NW04@node_warden_pdisk.cpp:233} StartLocalPDisk NodeId# 1 PDiskId# 1 Path# "SectorMap:0:3200" PDiskCategory# {Type# DEVICE_TYPE_ROT Kind# 0} Temporary# false 2025-09-25T16:18:10.391021Z node 1 :BS_NODE WARN: {NW89@node_warden_pdisk.cpp:122} Can't write new MockDevicesConfig to file Path# /Berkanavt/kikimr/testing/mock_devices.txt 2025-09-25T16:18:10.391110Z node 1 :BS_NODE DEBUG: {NW23@node_warden_vdisk.cpp:70} StartLocalVDiskActor SlayInFlight# false VDiskId# [0:1:0:0:0] VSlotId# 1:1:0 PDiskGuid# 1 DonorMode# false PDiskRestartInFlight# false PDisksWaitingToStart# false 2025-09-25T16:18:10.391345Z node 1 :BS_NODE DEBUG: {NW24@node_warden_vdisk.cpp:276} StartLocalVDiskActor done VDiskId# [0:1:0:0:0] VSlotId# 1:1:0 PDiskGuid# 1 2025-09-25T16:18:10.391356Z node 1 :BS_NODE DEBUG: {NW12@node_warden_proxy.cpp:24} StartLocalProxy GroupId# 0 2025-09-25T16:18:10.391551Z node 1 :BS_NODE DEBUG: {NW21@node_warden_pipe.cpp:23} EstablishPipe AvailDomainId# 0 PipeClientId# [1:28:2075] ControllerId# 72057594037932033 2025-09-25T16:18:10.391557Z node 1 :BS_NODE DEBUG: {NW20@node_warden_pipe.cpp:73} SendRegisterNode 2025-09-25T16:18:10.391583Z node 1 :BS_NODE DEBUG: {NW11@node_warden_impl.cpp:313} StartInvalidGroupProxy GroupId# 4294967295 2025-09-25T16:18:10.391610Z node 1 :BS_NODE DEBUG: {NW62@node_warden_impl.cpp:325} StartRequestReportingThrottler 2025-09-25T16:18:10.396463Z node 1 :BS_PROXY INFO: dsproxy_state.cpp:159: Group# 0 TEvConfigureProxy received GroupGeneration# 1 IsLimitedKeyless# false Marker# DSP02 2025-09-25T16:18:10.396481Z node 1 :BS_PROXY NOTICE: dsproxy_state.cpp:319: EnsureMonitoring Group# 0 IsLimitedKeyless# 0 fullIfPossible# 0 Marker# DSP58 2025-09-25T16:18:10.396950Z node 1 :BS_PROXY DEBUG: group_sessions.cpp:83: Group# 0 Actor# [1:27:2074] Create Queue# [1:36:2080] targetNodeId# 1 Marker# DSP01 2025-09-25T16:18:10.396992Z node 1 :BS_PROXY DEBUG: group_sessions.cpp:83: Group# 0 Actor# [1:27:2074] Create Queue# [1:37:2081] targetNodeId# 1 Marker# DSP01 2025-09-25T16:18:10.397022Z node 1 :BS_PROXY DEBUG: group_sessions.cpp:83: Group# 0 Actor# [1:27:2074] Create Queue# [1:38:2082] targetNodeId# 1 Marker# DSP01 2025-09-25T16:18:10.397053Z node 1 :BS_PROXY DEBUG: group_sessions.cpp:83: Group# 0 Actor# [1:27:2074] Create Queue# [1:39:2083] targetNodeId# 1 Marker# DSP01 2025-09-25T16:18:10.397086Z node 1 :BS_PROXY DEBUG: group_sessions.cpp:83: Group# 0 Actor# [1:27:2074] Create Queue# [1:40:2084] targetNodeId# 1 Marker# DSP01 2025-09-25T16:18:10.397120Z node 1 :BS_PROXY DEBUG: group_sessions.cpp:83: Group# 0 Actor# [1:27:2074] Create Queue# [1:41:2085] targetNodeId# 1 Marker# DSP01 2025-09-25T16:18:10.397147Z node 1 :BS_PROXY DEBUG: group_sessions.cpp:83: Group# 0 Actor# [1:27:2074] Create Queue# [1:42:2086] targetNodeId# 1 Marker# DSP01 2025-09-25T16:18:10.397153Z node 1 :BS_PROXY INFO: dsproxy_state.cpp:31: Group# 0 SetStateEstablishingSessions Marker# DSP03 2025-09-25T16:18:10.397167Z node 1 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:49: TClient[72057594037932033] ::Bootstrap [1:28:2075] 2025-09-25T16:18:10.397173Z node 1 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:542: TClient[72057594037932033] lookup [1:28:2075] 2025-09-25T16:18:10.397181Z node 1 :BS_PROXY NOTICE: dsproxy_state.cpp:259: Group# 4294967295 HasInvalidGroupId# 1 Bootstrap -> StateEjected Marker# DSP42 2025-09-25T16:18:10.397190Z node 1 :BS_NODE DEBUG: {NWDC00@distconf.cpp:28} Bootstrap 2025-09-25T16:18:10.397341Z node 1 :BS_NODE DEBUG: {NWDC40@distconf_persistent_storage.cpp:25} TReaderActor bootstrap Paths# [] 2025-09-25T16:18:10.400932Z node 1 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:148: TClient[72057594037932033] queue send [1:28:2075] 2025-09-25T16:18:10.400967Z node 1 :BS_NODE DEBUG: {NWDC53@distconf.cpp:332} StateWaitForInit event Type# 131082 StorageConfigLoaded# false NodeListObtained# false PendingEvents.size# 0 2025-09-25T16:18:10.401826Z node 1 :LOCAL DEBUG: local.cpp:1540: TLocal::Bootstrap 2025-09-25T16:18:10.401893Z node 1 :TABLET_RESOLVER DEBUG: tablet_resolver.cpp:882: Handle TEvForward tabletId: 72057594037932033 entry.State: StResolve leader: [0:0:0] followers: 0 ev: {EvForward TabletID: 72057594037932033 Ev: nullptr Flags: 1:2:0} 2025-09-25T16:18:10.401962Z node 1 :LOCAL DEBUG: local.cpp:1490: TDomainLocal(dc-1): Bootstrap 2025-09-25T16:18:10.402010Z node 1 :BS_NODE DEBUG: {NWDC53@distconf.cpp:332} StateWaitForInit event Type# 2146435074 StorageConfigLoaded# false NodeListObtained# false PendingEvents.size# 0 2025-09-25T16:18:10.402019Z node 1 :BS_NODE DEBUG: {NWDC32@distconf_persistent_storage.cpp:221} TEvStorageConfigLoaded Cookie# 0 NumItemsRead# 0 2025-09-25T16:18:10.404095Z node 1 :BS_NODE DEBUG: {NWDC35@distconf_persistent_storage.cpp:184} PersistConfig Record# {} Drives# [] 2025-09-25T16:18:10.404166Z node 1 :BS_NODE DEBUG: {NWDC18@distconf_binding.cpp:462} UpdateBound RefererNodeId# 0 NodeId# :0/0 Meta# {Fingerprint: "\363\365\\\016\336\205\240m2\241c\3010\003\261\342\227\n\267}" } 2025-09-25T16:18:10.405224Z node 1 :LOCAL DEBUG: local.cpp:1198: TDomainLocal(dc-1): Binding to hive 72057594037927937 at domain dc-1 (allocated resources: ) 2025-09-25T16:18:10.405263Z node 1 :BS_NODE DEBUG: {NWDC51@distconf_persistent_storage.cpp:103} TWriterActor bootstrap Drives# [] Record# {} 2025-09-25T16:18:10.405278Z node 1 :LOCAL DEBUG: local.cpp:1005: TLocalNodeRegistrar::Bootstrap 2025-09-25T16:18:10.405284Z node 1 :LOCAL DEBUG: local.cpp:183: TLocalNodeRegistrar::TryToRegister 2025-09-25T16:18:10.405331Z node 1 :LOCAL DEBUG: local.cpp:216: TLocalNodeRegistrar::TryToRegister pipe to hive, pipe:[1:53:2093] 2025-09-25T16:18:10.405353Z node 1 :STATESTORAGE DEBUG: statestorage_proxy.cpp:287: ProxyRequest::HandleInit ringGroup:0 ev: {EvLookup TabletID: 72057594037932033 Cookie: 0 ProxyOptions: SigNone} 2025-09-25T16:18:10.405568Z node 1 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:148: TClient[72057594037932033] queue send [1:28:2075] 2025-09-25T16:18:10.405583Z node 1 :BS_NODE DEBUG: {NWDC53@distconf.cpp:332} StateWaitForInit event Type# 268639258 StorageConfigLoaded# true NodeListObtained# false PendingEvents.size# 0 2025-09-25T16:18:10.405594Z node 1 :BS_NODE DEBUG: {NWDC53@distconf.cpp:332} StateWaitForInit event Type# 2146435075 StorageConfigLoaded# true NodeListObtained# false PendingEvents.size# 1 2025-09-25T16:18:10.405647Z node 1 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:49: TClient[72057594037927937] ::Bootstrap [1:53:2093] 2025-09-25T16:18:10.405653Z node 1 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:542: TClient[72057594037927937] lookup [1:53:2093] 2025-09-25T16:18:10.405664Z node 1 :STATESTORAGE DEBUG: statestorage_replica.cpp:185: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037932033 Cookie: 0} 2025-09-25T16:18:10.405674Z node 1 :STATESTORAGE DEBUG: statestorage_replica.cpp:185: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037932033 Cookie: 1} 2025-09-25T16:18:10.405680Z node 1 :STATESTORAGE DEBUG: statestorage_replica.cpp:185: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037932033 Cookie: 2} 2025-09-25T16:18:10.405689Z node 1 :STATESTORAGE DEBUG: statestorage_proxy.cpp:399: ProxyRequest::HandleLookup ringGroup:0 ev: {EvReplicaInfo Status: 1 TabletID: 72057594037932033 ClusterStateGeneration: 0 ClusterStateGuid: 0} 2025-09-25T16:18:10.405725Z node 1 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:49: TClient[72057594037936129] ::Bootstrap [1:32:2063] 2025-09-25T16:18:10.405730Z node 1 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:542: TClient[72057594037936129] lookup [1:32:2063] 2025-09-25T16:18:10.405785Z node 1 :TABLET_RESOLVER DEBUG: tablet_resolver.cpp:882: Handle TEvForward tabletId: 72057594037927937 entry.State: StResolve leader: [0:0:0] followers: 0 ev: {EvForward TabletID: 72057594037927937 Ev: nullptr Flags: 1:2:0} 2025-09-25T16:18:10.408002Z node 1 :BS_NODE DEBUG: {NWDC53@distconf.cpp:332} StateWaitForInit event Type# 131082 StorageConfigLoaded# true NodeListObtained# false PendingEvents.size# 2 2025-09-25T16:18:10.408019Z node 1 :BS_NODE DEBUG: {NWDC11@distconf_binding.cpp:8} TEvNodesInfo 2025-09-25T16:18:10.408052Z node 1 :BS_NODE DEBUG: {NWDC18@distconf_binding.cpp:462} UpdateBound RefererNodeId# 1 NodeId# ::1:12001/1 Meta# {Fingerprint: "\363\365\\\016\336\205\240m2\241c\3010\003\261\342\227\n\267}" } 2025-09-25T16:18:10.408110Z node 1 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:148: TClient[72057594037927937] queue send [1:53:2093] 2025-09-25T16:18:10.408124Z node 1 :STATESTORAGE DEBUG: statestorage_proxy.cpp:399: ProxyRequest::HandleLookup ringGroup:0 ev: {EvReplicaInfo Status: 1 TabletID: 72057594037932033 ClusterStateGeneration: 0 ClusterStateGuid: 0} 2025-09-25T16:18:10.408140Z node 1 :BS_NODE DEBUG: {NWDC53@distconf.cpp:332} StateWaitForInit event Type# 2146435072 StorageConfigLoaded# true NodeListObtained# true PendingEvents.size# 2 2025-09-25T16:18:10.408148Z node 1 :BS_NODE DEBUG: {NWDC15@distconf.cpp:401} StateFunc Type# 268639258 Sender# [1:12:2059] SessionId# [0:0:0] Cookie# 0 2025-09-25T16:18:10.408439Z node 1 :STATESTORAGE DEBUG: statestorage_proxy.cpp:399: ProxyRequest::HandleLookup ringGroup:0 ev: {EvReplicaInfo Status: 1 TabletID: 72057594037932033 ClusterStateGeneration: 0 ClusterStateGuid: 0} 2025-09-25T16:18:10.408494Z node 1 :TABLET_RESOLVER DEBUG: tablet_resolver.cpp:882: Handle TEvForward tabletId: 72057594037936129 entry.State: StResolve leader: [0:0:0] followers: 0 ev: {EvForward TabletID: 72057594037936129 Ev: nullptr Flags: 1:2:0} 2025-09-25T16:18:10.408627Z node 1 :BS_NODE DEBUG: {NWDC53@distconf.cpp:332} StateWaitForInit event Type# 2146435072 StorageConfigLoaded# true NodeListObtained# true PendingEvents.size# 1 2025-09-25T16:18:10.408639Z node 1 :BS_NODE DEBUG: {NWDC15@distconf.cpp:401} StateFunc Type# 2146435075 Sender# [1:51:2092] SessionId# [0:0:0] Cookie# 0 2025-09-25T16:18:10.408648Z node 1 :BS_NODE DEBUG: {NWDC36@distconf_persistent_storage.cpp:205} TEvStorageConfigStored NumOk# 0 NumError# 0 Passed# 0.006606s 2025-09-25T16:18:10.408751Z node 1 :BS_NODE DEBUG: {NW18@node_warden_resource.cpp:51} ApplyServiceSet IsStatic# true Comprehensive# true Origin# distconf ServiceSet# {PDisks { NodeID: 1 PDiskID: 1 Path: "SectorMap:0:3200" PDiskGuid: 1 } VDisks { VDiskID { GroupID: 0 GroupGeneration: 1 Ring: 0 Domain: 0 VDisk: 0 } VDiskLocation { NodeID: 1 PDiskID: 1 VDiskSlotID: 0 PDiskGuid: 1 } } Groups { GroupID: 0 GroupGeneration: 1 ErasureSpecies: 0 Rings { FailDomains { VDiskLocations { NodeID: 1 PDiskID: 1 VDiskSlotID: 0 PDiskGuid: 1 } } } } AvailabilityDomains: 0 } 2025-09-25T16:18:10.408847Z node 1 :BS_NODE DEBUG: {NW110@node_warden_pdisk.cpp:538} ApplyServiceSetPDisks PDiskId# 1 NewExpectedSlotCount# 0 OldExpectedSlotCount# 0 NewSlotSizeInUnits# 0 OldSlotSizeInUnits# 0 2025-09-25T16:18:10.408875Z node 1 :TABLET_RESOLVER DEBUG: tablet_resolver.cpp:781: ApplyEntry tabletId: 72057594037932033 leader: [0:0:0] followers: 0 2025-09-25T16:18:10.408921Z node 1 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:170: TClient[72057594037932033] forward result error, check reconnect [1:28:2075] 2025-09-25T16:18:10.408928Z node 1 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:569: TClient[72057594037932033] schedule retry [1:28:2075] 2025-09-25T16:18:10.408938Z node 1 :BS_NODE DEBUG: {NWDC15@distconf.cpp:401} StateFunc Type# 26863924 ... :65: [34a6dc9fca6230bd] restore Id# [72057594037927937:2:492:0:0:246:0] optimisticReplicas# 1 optimisticState# EBS_FULL Marker# BPG55 2025-09-25T16:20:49.087893Z node 14 :BS_PROXY_PUT DEBUG: dsproxy_strategy_base.cpp:324: [34a6dc9fca6230bd] partPlacement record partSituation# ESituation::Unknown to# 0 blob Id# [72057594037927937:2:492:0:0:246:1] Marker# BPG33 2025-09-25T16:20:49.087898Z node 14 :BS_PROXY_PUT DEBUG: dsproxy_strategy_base.cpp:345: [34a6dc9fca6230bd] Sending missing VPut part# 0 to# 0 blob Id# [72057594037927937:2:492:0:0:246:1] Marker# BPG32 2025-09-25T16:20:49.087932Z node 14 :BS_PROXY DEBUG: group_sessions.h:181: Send to queueActorId# [14:444:2090] NKikimr::TEvBlobStorage::TEvVPut# {ID# [72057594037927937:2:492:0:0:246:1] FDS# 246 HandleClass# TabletLog {MsgQoS ExtQueueId# PutTabletLog} DataSize# 0 Data# } cookie# 0 2025-09-25T16:20:49.088688Z node 14 :BS_PROXY_PUT DEBUG: dsproxy_put.cpp:264: [34a6dc9fca6230bd] received {EvVPutResult Status# OK ID# [72057594037927937:2:492:0:0:246:1] {MsgQoS MsgId# { SequenceId: 1 MsgId: 509 } Cost# 81937 ExtQueueId# PutTabletLog IntQueueId# IntPutLog Window# { Status# Processed ActualWindowSize# 0 MaxWindowSize# 150000000 ExpectedMsgId# { SequenceId: 1 MsgId: 510 }}}} from# [0:1:0:0:0] Marker# BPP01 2025-09-25T16:20:49.088713Z node 14 :BS_PROXY_PUT DEBUG: dsproxy_put_impl.cpp:72: [34a6dc9fca6230bd] Result# TEvPutResult {Id# [72057594037927937:2:492:0:0:246:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0.998955} GroupId# 0 Marker# BPP12 2025-09-25T16:20:49.088719Z node 14 :BS_PROXY_PUT INFO: dsproxy_put.cpp:490: [34a6dc9fca6230bd] SendReply putResult# TEvPutResult {Id# [72057594037927937:2:492:0:0:246:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0.998955} ResponsesSent# 0 PutImpl.Blobs.size# 1 Last# true Marker# BPP21 2025-09-25T16:20:49.088739Z node 14 :BS_PROXY_PUT DEBUG: {BPP72@dsproxy_put.cpp:474} Query history GroupId# 0 HandleClass# TabletLog Tactic# MinLatency History# THistory { Entries# [ TEvVPut{ TimestampMs# 0.174 sample PartId# [72057594037927937:2:492:0:0:246:1] QueryCount# 1 VDiskId# [0:1:0:0:0] NodeId# 14 } TEvVPutResult{ TimestampMs# 0.937 VDiskId# [0:1:0:0:0] NodeId# 14 Status# OK } ] } 2025-09-25T16:20:49.088866Z node 14 :TABLET_MAIN DEBUG: tablet_req_writelog.cpp:54: Put Result: TEvPutResult {Id# [72057594037927937:2:492:0:0:246:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0.998955} 2025-09-25T16:20:49.088904Z node 14 :TABLET_EXECUTOR DEBUG: Leader{72057594037927937:2:493} commited cookie 1 for step 492 2025-09-25T16:20:49.088915Z node 14 :HIVE DEBUG: tx__reassign_groups.cpp:65: HIVE#72057594037927937 THive::TTxReassignGroups(72075186224037906)::Complete 2025-09-25T16:20:49.088931Z node 14 :HIVE DEBUG: tx__update_tablet_groups.cpp:332: HIVE#72057594037927937 THive::TTxUpdateTabletGroups{21501664261152}(72075186224037906)::Complete SideEffects: {Notifications: 0x7FF0000F [14:5580:2224] NKikimr::NHive::TEvPrivate::TEvRestartCancelled} 2025-09-25T16:20:49.088981Z node 14 :HIVE DEBUG: storage_balancer.cpp:115: HIVE#72057594037927937 StorageBalancer received RestartCancelled for tablet (72075186224037906,0) 2025-09-25T16:20:49.088991Z node 14 :HIVE DEBUG: storage_balancer.cpp:92: HIVE#72057594037927937 StorageBalancer initiating reassign for tablet 72075186224037972 2025-09-25T16:20:49.089172Z node 14 :HIVE DEBUG: hive_impl.cpp:1004: HIVE#72057594037927937 THive::TEvReassignTablet TabletID: 72075186224037972 Channels: 2 Channels: 1 Channels: 0 ReassignReason: HIVE_REASSIGN_REASON_BALANCE 2025-09-25T16:20:49.089185Z node 14 :TABLET_EXECUTOR DEBUG: Leader{72057594037927937:2:493} Tx{1490, NKikimr::NHive::TTxReassignGroups} queued, type NKikimr::NHive::TTxReassignGroups 2025-09-25T16:20:49.089191Z node 14 :TABLET_EXECUTOR DEBUG: Leader{72057594037927937:2:493} Tx{1490, NKikimr::NHive::TTxReassignGroups} took 4194304b of static mem, Memory{4194304 dyn 0} 2025-09-25T16:20:49.089197Z node 14 :HIVE DEBUG: tx__reassign_groups.cpp:35: HIVE#72057594037927937 THive::TTxReassignGroups(72075186224037972,[0,1,2])::Execute 2025-09-25T16:20:49.089280Z node 14 :HIVE DEBUG: hive_impl.cpp:1105: HIVE#72057594037927937 THive::AssignTabletGroups TEvControllerSelectGroups tablet 72075186224037972 GroupParameters { StoragePoolSpecifier { Name: "def1" } } ReturnAllMatchingGroups: true 2025-09-25T16:20:49.089298Z node 14 :TABLET_EXECUTOR DEBUG: Leader{72057594037927937:2:493} Tx{1490, NKikimr::NHive::TTxReassignGroups} hope 1 -> done Change{996, redo 335b alter 0b annex 0, ~{ 1, 2 } -{ }, 0 gb} 2025-09-25T16:20:49.089307Z node 14 :TABLET_EXECUTOR DEBUG: Leader{72057594037927937:2:493} Tx{1490, NKikimr::NHive::TTxReassignGroups} release 4194304b of static, Memory{0 dyn 0} 2025-09-25T16:20:49.089338Z node 14 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:154: TClient[72057594037932033] send [14:1375:2260] 2025-09-25T16:20:49.089343Z node 14 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:645: TClient[72057594037932033] push event to server [14:1375:2260] 2025-09-25T16:20:49.089352Z node 14 :PIPE_SERVER DEBUG: tablet_pipe_server.cpp:141: [72057594037932033] HandleSend Sender# [14:1311:2224] EventType# 268637702 c[def1] ****------------------------------------------------------------------------------------------------ (0.044) ******---------------------------------------------------------------------------------------------- (0.064) ******---------------------------------------------------------------------------------------------- (0.056) *****----------------------------------------------------------------------------------------------- (0.046) *****----------------------------------------------------------------------------------------------- (0.046) *******--------------------------------------------------------------------------------------------- (0.07) ******---------------------------------------------------------------------------------------------- (0.056) *****----------------------------------------------------------------------------------------------- (0.054) ******---------------------------------------------------------------------------------------------- (0.062) *****----------------------------------------------------------------------------------------------- (0.046) ******---------------------------------------------------------------------------------------------- (0.056) 2025-09-25T16:20:49.190152Z node 14 :HIVE DEBUG: hive_impl.cpp:458: HIVE#72057594037927937 THive::Handle TEvControllerSelectGroupsResult: success Status: OK MatchingGroups { Groups { GroupID: 2147483649 StoragePoolName: "def1" AssuredResources { Space: 50000000000 Occupancy: 0 } CurrentResources { Space: 2200000000 Occupancy: 0.044 } AllocatedSize: 2200000000 } Groups { GroupID: 2147483650 StoragePoolName: "def1" AssuredResources { Space: 50000000000 Occupancy: 0 } CurrentResources { Space: 3200000000 Occupancy: 0.064 } AllocatedSize: 3200000000 } Groups { GroupID: 2147483651 StoragePoolName: "def1" AssuredResources { Space: 50000000000 Occupancy: 0 } CurrentResources { Space: 2800000000 Occupancy: 0.056 } AllocatedSize: 2800000000 } Groups { GroupID: 2147483652 StoragePoolName: "def1" AssuredResources { Space: 50000000000 Occupancy: 0 } CurrentResources { Space: 2300000000 Occupancy: 0.046 } AllocatedSize: 2300000000 } Groups { GroupID: 2147483653 StoragePoolName: "def1" AssuredResources { Space: 50000000000 Occupancy: 0 } CurrentResources { Space: 2300000000 Occupancy: 0.046 } AllocatedSize: 2300000000 } Groups { GroupID: 2147483654 StoragePoolName: "def1" AssuredResources { Space: 50000000000 Occupancy: 0 } CurrentResources { Space: 3500000000 Occupancy: 0.07 } AllocatedSize: 3500000000 } Groups { GroupID: 2147483655 StoragePoolName: "def1" AssuredResources { Space: 50000000000 Occupancy: 0 } CurrentResources { Space: 2800000000 Occupancy: 0.056 } AllocatedSize: 2800000000 } Groups { GroupID: 2147483656 StoragePoolName: "def1" AssuredResources { Space: 50000000000 Occupancy: 0 } CurrentResources { Space: 2700000000 Occupancy: 0.054 } AllocatedSize: 2700000000 } Groups { GroupID: 2147483657 StoragePoolName: "def1" AssuredResources { Space: 50000000000 Occupancy: 0 } CurrentResources { Space: 3100000000 Occupancy: 0.062 } AllocatedSize: 3100000000 } Groups { GroupID: 2147483658 StoragePoolName: "def1" AssuredResources { Space: 50000000000 Occupancy: 0 } CurrentResources { Space: 2300000000 Occupancy: 0.046 } AllocatedSize: 2300000000 } Groups { GroupID: 2147483659 StoragePoolName: "def1" AssuredResources { Space: 50000000000 Occupancy: 0 } CurrentResources { Space: 2800000000 Occupancy: 0.056 } AllocatedSize: 2800000000 } } 2025-09-25T16:20:49.190223Z node 14 :TABLET_EXECUTOR DEBUG: Leader{72057594037927937:2:494} Tx{1491, NKikimr::NHive::TTxUpdateTabletGroups} queued, type NKikimr::NHive::TTxUpdateTabletGroups 2025-09-25T16:20:49.190235Z node 14 :TABLET_EXECUTOR DEBUG: Leader{72057594037927937:2:494} Tx{1491, NKikimr::NHive::TTxUpdateTabletGroups} took 4194304b of static mem, Memory{4194304 dyn 0} 2025-09-25T16:20:49.190251Z node 14 :HIVE DEBUG: tx__update_tablet_groups.cpp:63: HIVE#72057594037927937 THive::TTxUpdateTabletGroups::Execute{21501664261152}(72075186224037972,HIVE_REASSIGN_REASON_BALANCE,[]) 2025-09-25T16:20:49.190278Z node 14 :HIVE DEBUG: tx__update_tablet_groups.cpp:151: HIVE#72057594037927937 THive::TTxUpdateTabletGroups::Execute{21501664261152}: tablet 72075186224037972 channel 0 assigned to group 2147483649 2025-09-25T16:20:49.190283Z node 14 :HIVE DEBUG: tx__update_tablet_groups.cpp:171: HIVE#72057594037927937 THive::TTxUpdateTabletGroups::Execute{21501664261152}: tablet 72075186224037972 skipped reassign of channel 0 2025-09-25T16:20:49.190291Z node 14 :HIVE DEBUG: tx__update_tablet_groups.cpp:151: HIVE#72057594037927937 THive::TTxUpdateTabletGroups::Execute{21501664261152}: tablet 72075186224037972 channel 1 assigned to group 2147483651 2025-09-25T16:20:49.190296Z node 14 :HIVE DEBUG: tx__update_tablet_groups.cpp:171: HIVE#72057594037927937 THive::TTxUpdateTabletGroups::Execute{21501664261152}: tablet 72075186224037972 skipped reassign of channel 1 2025-09-25T16:20:49.190303Z node 14 :HIVE DEBUG: tx__update_tablet_groups.cpp:151: HIVE#72057594037927937 THive::TTxUpdateTabletGroups::Execute{21501664261152}: tablet 72075186224037972 channel 2 assigned to group 2147483658 2025-09-25T16:20:49.190307Z node 14 :HIVE DEBUG: tx__update_tablet_groups.cpp:171: HIVE#72057594037927937 THive::TTxUpdateTabletGroups::Execute{21501664261152}: tablet 72075186224037972 skipped reassign of channel 2 2025-09-25T16:20:49.190316Z node 14 :HIVE WARN: tx__update_tablet_groups.cpp:272: HIVE#72057594037927937 THive::TTxUpdateTabletGroups::Execute{21501664261152}: tablet 72075186224037972 wasn't changed 2025-09-25T16:20:49.190321Z node 14 :HIVE WARN: tx__update_tablet_groups.cpp:281: HIVE#72057594037927937 THive::TTxUpdateTabletGroups::Execute{21501664261152}: tablet 72075186224037972 skipped channel 0 2025-09-25T16:20:49.190348Z node 14 :HIVE WARN: tx__update_tablet_groups.cpp:281: HIVE#72057594037927937 THive::TTxUpdateTabletGroups::Execute{21501664261152}: tablet 72075186224037972 skipped channel 1 2025-09-25T16:20:49.190355Z node 14 :HIVE WARN: tx__update_tablet_groups.cpp:281: HIVE#72057594037927937 THive::TTxUpdateTabletGroups::Execute{21501664261152}: tablet 72075186224037972 skipped channel 2 2025-09-25T16:20:49.190377Z node 14 :HIVE NOTICE: tx__update_tablet_groups.cpp:326: HIVE#72057594037927937 THive::TTxUpdateTabletGroups{21501664261152}(72075186224037972)::Execute - TryToBoot was not successfull 2025-09-25T16:20:49.190389Z node 14 :TABLET_EXECUTOR DEBUG: Leader{72057594037927937:2:494} Tx{1491, NKikimr::NHive::TTxUpdateTabletGroups} hope 1 -> done Change{997, redo 257b alter 0b annex 0, ~{ 2, 1 } -{ }, 0 gb} 2025-09-25T16:20:49.190395Z node 14 :TABLET_EXECUTOR DEBUG: Leader{72057594037927937:2:494} Tx{1491, NKikimr::NHive::TTxUpdateTabletGroups} release 4194304b of static, Memory{0 dyn 0} >> TColumnShardTestReadWrite::CompactionInGranule_PKUtf8 [GOOD] >> TColumnShardTestReadWrite::WriteRead ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/columnshard/ut_rw/unittest >> TColumnShardTestReadWrite::CompactionInGranule_PKUtf8 [GOOD] Test command err: 2025-09-25T16:20:37.647846Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:129:2160];fline=columnshard.cpp:105;event=initialize_shard;step=OnActivateExecutor; 2025-09-25T16:20:37.651266Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:129:2160];fline=columnshard.cpp:124;event=initialize_shard;step=initialize_tiring_finished; 2025-09-25T16:20:37.651319Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxInitSchema.Execute at tablet 9437184 2025-09-25T16:20:37.652172Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:129:2160];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-09-25T16:20:37.652243Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:129:2160];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-09-25T16:20:37.652288Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:129:2160];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-09-25T16:20:37.652315Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:129:2160];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-09-25T16:20:37.652334Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:129:2160];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-09-25T16:20:37.652355Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:129:2160];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-09-25T16:20:37.652377Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:129:2160];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-09-25T16:20:37.652395Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:129:2160];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-09-25T16:20:37.652413Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:129:2160];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-09-25T16:20:37.652433Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:129:2160];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanDeprecatedSnapshot; 2025-09-25T16:20:37.652454Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:129:2160];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV0ChunksMeta; 2025-09-25T16:20:37.652474Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:129:2160];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CopyBlobIdsToV2; 2025-09-25T16:20:37.652521Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:129:2160];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreAppearanceSnapshot; 2025-09-25T16:20:37.658001Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxInitSchema.Complete at tablet 9437184 2025-09-25T16:20:37.658064Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:131;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=12;current_normalizer=CLASS_NAME=Granules; 2025-09-25T16:20:37.658076Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=1;type=Granules; 2025-09-25T16:20:37.658115Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-09-25T16:20:37.658148Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-09-25T16:20:37.658158Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-09-25T16:20:37.658163Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2025-09-25T16:20:37.658171Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:138;normalizer=TChunksNormalizer;message=0 chunks found; 2025-09-25T16:20:37.658178Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-09-25T16:20:37.658184Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-09-25T16:20:37.658187Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2025-09-25T16:20:37.658203Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-09-25T16:20:37.658209Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-09-25T16:20:37.658214Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-09-25T16:20:37.658217Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2025-09-25T16:20:37.658226Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2025-09-25T16:20:37.658231Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-09-25T16:20:37.658237Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-09-25T16:20:37.658240Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2025-09-25T16:20:37.658247Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-09-25T16:20:37.658253Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-09-25T16:20:37.658256Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2025-09-25T16:20:37.658264Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-09-25T16:20:37.658270Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-09-25T16:20:37.658273Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=15;type=RestoreV1Chunks_V2; 2025-09-25T16:20:37.658293Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-09-25T16:20:37.658299Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-09-25T16:20:37.658302Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=16;type=RestoreV2Chunks; 2025-09-25T16:20:37.658313Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2025-09-25T16:20:37.658319Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanDeprecatedSnapshot;id=CleanDeprecatedSnapshot; 2025-09-25T16:20:37.658322Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=17;type=CleanDeprecatedSnapshot; 2025-09-25T16:20:37.658328Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanDeprecatedSnapshot;id=17; 2025-09-25T16:20:37.658334Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV0ChunksMeta;id=RestoreV0ChunksMeta; 2025-09-25T16:20:37.658337Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=18;type=RestoreV0ChunksMeta; 2025-09-25T16:20:37.658343Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV0ChunksMeta;id=18; 2025-09-25T16:20:37.658349Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CopyBlobIdsToV2;id=CopyBlobIdsToV2; 2025-09-25T16:20:37.658353Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=19;type=CopyBlobIdsToV2; 2025-09-25T16:20:37.658364Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CopyBlobIdsToV2;id=19; 2025-09-25T16:20:37.658370Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLAS ... :18;blob_range:[NO_BLOB:0:9296];;column_id:10;chunk_idx:19;blob_range:[NO_BLOB:0:9296];;column_id:10;chunk_idx:20;blob_range:[NO_BLOB:0:9296];;column_id:10;chunk_idx:21;blob_range:[NO_BLOB:0:9296];;column_id:10;chunk_idx:22;blob_range:[NO_BLOB:0:9296];;column_id:10;chunk_idx:23;blob_range:[NO_BLOB:0:9296];;column_id:10;chunk_idx:24;blob_range:[NO_BLOB:0:9296];;column_id:10;chunk_idx:25;blob_range:[NO_BLOB:0:9296];;column_id:10;chunk_idx:26;blob_range:[NO_BLOB:0:9296];;column_id:10;chunk_idx:27;blob_range:[NO_BLOB:0:9296];;column_id:10;chunk_idx:28;blob_range:[NO_BLOB:0:9296];;column_id:10;chunk_idx:29;blob_range:[NO_BLOB:0:9296];;column_id:10;chunk_idx:30;blob_range:[NO_BLOB:0:9296];;column_id:10;chunk_idx:31;blob_range:[NO_BLOB:0:9296];;column_id:10;chunk_idx:32;blob_range:[NO_BLOB:0:9296];;column_id:10;chunk_idx:33;blob_range:[NO_BLOB:0:9296];;column_id:10;chunk_idx:34;blob_range:[NO_BLOB:0:9296];;column_id:10;chunk_idx:35;blob_range:[NO_BLOB:0:9296];;column_id:10;chunk_idx:36;blob_range:[NO_BLOB:0:9296];;column_id:10;chunk_idx:37;blob_range:[NO_BLOB:0:9296];;column_id:10;chunk_idx:38;blob_range:[NO_BLOB:0:9296];;column_id:10;chunk_idx:39;blob_range:[NO_BLOB:0:9296];;column_id:10;chunk_idx:40;blob_range:[NO_BLOB:0:9296];;column_id:10;chunk_idx:41;blob_range:[NO_BLOB:0:9296];;column_id:10;chunk_idx:42;blob_range:[NO_BLOB:0:9296];;column_id:10;chunk_idx:43;blob_range:[NO_BLOB:0:9296];;column_id:10;chunk_idx:44;blob_range:[NO_BLOB:0:9312];;column_id:10;chunk_idx:45;blob_range:[NO_BLOB:0:9312];;column_id:10;chunk_idx:46;blob_range:[NO_BLOB:0:9312];;column_id:10;chunk_idx:47;blob_range:[NO_BLOB:0:9312];;column_id:10;chunk_idx:48;blob_range:[NO_BLOB:0:9312];;column_id:10;chunk_idx:49;blob_range:[NO_BLOB:0:9312];;column_id:10;chunk_idx:50;blob_range:[NO_BLOB:0:9312];;column_id:10;chunk_idx:51;blob_range:[NO_BLOB:0:9312];;column_id:10;chunk_idx:52;blob_range:[NO_BLOB:0:9312];;column_id:10;chunk_idx:53;blob_range:[NO_BLOB:0:9312];;column_id:10;chunk_idx:54;blob_range:[NO_BLOB:0:9304];;column_id:10;chunk_idx:55;blob_range:[NO_BLOB:0:9312];;column_id:10;chunk_idx:56;blob_range:[NO_BLOB:0:9312];;column_id:10;chunk_idx:57;blob_range:[NO_BLOB:0:9312];;column_id:10;chunk_idx:58;blob_range:[NO_BLOB:0:9312];;column_id:10;chunk_idx:59;blob_range:[NO_BLOB:0:9312];;column_id:10;chunk_idx:60;blob_range:[NO_BLOB:0:9312];;column_id:10;chunk_idx:61;blob_range:[NO_BLOB:0:9304];;column_id:10;chunk_idx:62;blob_range:[NO_BLOB:0:9312];;column_id:10;chunk_idx:63;blob_range:[NO_BLOB:0:9312];;column_id:10;chunk_idx:64;blob_range:[NO_BLOB:0:9304];;column_id:10;chunk_idx:65;blob_range:[NO_BLOB:0:9304];;column_id:10;chunk_idx:66;blob_range:[NO_BLOB:0:9312];;column_id:10;chunk_idx:67;blob_range:[NO_BLOB:0:9312];;column_id:10;chunk_idx:68;blob_range:[NO_BLOB:0:9312];;column_id:10;chunk_idx:69;blob_range:[NO_BLOB:0:9304];;column_id:10;chunk_idx:70;blob_range:[NO_BLOB:0:8592];;column_id:10;chunk_idx:71;blob_range:[NO_BLOB:0:8280];;column_id:10;chunk_idx:72;blob_range:[NO_BLOB:0:8288];;;;switched=(portion_id:215;path_id:1000000185;records_count:75000;schema_version:1;level:1;;column_size:6373584;index_size:0;meta:(()););(portion_id:213;path_id:1000000185;records_count:75000;schema_version:1;level:2;;column_size:6356384;index_size:0;meta:(()););; 2025-09-25T16:20:52.729615Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: event_type=NKikimr::NBlobCache::TEvBlobCache::TEvReadBlobRangeResult;event=on_execution;consumer=GENERAL_COMPACTION;task_id=9b0d14c2-9a2b11f0-97531d98-91b7fafd;script=FULL_PORTIONS_FETCHING::GENERAL_COMPACTION;event=on_execution;consumer=GENERAL_COMPACTION;task_id=9b0d14c2-9a2b11f0-97531d98-91b7fafd;script=FULL_PORTIONS_FETCHING::GENERAL_COMPACTION;event=on_finished;consumer=GENERAL_COMPACTION;task_id=9b0d14c2-9a2b11f0-97531d98-91b7fafd;script=FULL_PORTIONS_FETCHING::GENERAL_COMPACTION;tablet_id=9437184;parent_id=[1:4465:6457];task_id=9b0d14c2-9a2b11f0-97531d98-91b7fafd;task_class=CS::GENERAL;fline=general_compaction.cpp:140;event=blobs_created;appended=1;switched=2; 2025-09-25T16:20:52.729626Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: event_type=NKikimr::NBlobCache::TEvBlobCache::TEvReadBlobRangeResult;event=on_execution;consumer=GENERAL_COMPACTION;task_id=9b0d14c2-9a2b11f0-97531d98-91b7fafd;script=FULL_PORTIONS_FETCHING::GENERAL_COMPACTION;event=on_execution;consumer=GENERAL_COMPACTION;task_id=9b0d14c2-9a2b11f0-97531d98-91b7fafd;script=FULL_PORTIONS_FETCHING::GENERAL_COMPACTION;event=on_finished;consumer=GENERAL_COMPACTION;task_id=9b0d14c2-9a2b11f0-97531d98-91b7fafd;script=FULL_PORTIONS_FETCHING::GENERAL_COMPACTION;tablet_id=9437184;parent_id=[1:4465:6457];task_id=9b0d14c2-9a2b11f0-97531d98-91b7fafd;task_class=CS::GENERAL;fline=abstract.cpp:13;event=new_stage;stage=Constructed;task_id=9b0d14c2-9a2b11f0-97531d98-91b7fafd; 2025-09-25T16:20:52.730153Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:4465:6457];ev=NKikimr::NColumnShard::TEvPrivate::TEvWriteIndex;fline=columnshard__write_index.cpp:52;event=TEvWriteIndex;count=1; 2025-09-25T16:20:52.730842Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:4465:6457];ev=NKikimr::NColumnShard::TEvPrivate::TEvWriteIndex;fline=columnshard__write_index.cpp:59;event=TTxWriteDraft; 2025-09-25T16:20:52.730854Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:4465:6457];ev=NKikimr::NColumnShard::TEvPrivate::TEvWriteIndex;fline=abstract.cpp:13;event=new_stage;stage=WriteDraft;task_id=9b0d14c2-9a2b11f0-97531d98-91b7fafd; 2025-09-25T16:20:52.857841Z node 1 :TX_COLUMNSHARD_WRITE DEBUG: log.cpp:841: fline=tx_draft.cpp:16;event=draft_completed; 2025-09-25T16:20:52.857880Z node 1 :TX_COLUMNSHARD_WRITE DEBUG: log.cpp:841: fline=write_actor.cpp:24;event=actor_created;tablet_id=9437184;debug=size=6356384;count=689;actions=__MEMORY,__DEFAULT,;waiting=2;; 2025-09-25T16:20:52.973017Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: WriteIndex at tablet 9437184 2025-09-25T16:20:52.973087Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:4465:6457];ev=NKikimr::NColumnShard::TEvPrivate::TEvWriteIndex;fline=common_level.h:121;from=0,0,0,0,;to=9999,9999,9999,9999,; 2025-09-25T16:20:52.973101Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:4465:6457];ev=NKikimr::NColumnShard::TEvPrivate::TEvWriteIndex;fline=common_level.h:141;itFrom=1;itTo=1;raw=7433340;count=1;packed=6373584; 2025-09-25T16:20:52.973119Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:4465:6457];ev=NKikimr::NColumnShard::TEvPrivate::TEvWriteIndex;fline=constructor_meta.cpp:48;memory_size=86;data_size=62;sum=93010;count=1701; 2025-09-25T16:20:52.973125Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:4465:6457];ev=NKikimr::NColumnShard::TEvPrivate::TEvWriteIndex;fline=constructor_meta.cpp:65;memory_size=182;data_size=174;sum=174706;count=1702;size_of_meta=112; 2025-09-25T16:20:52.973136Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:4465:6457];ev=NKikimr::NColumnShard::TEvPrivate::TEvWriteIndex;fline=constructor_portion.cpp:40;memory_size=254;data_size=246;sum=235978;count=851;size_of_portion=184; 2025-09-25T16:20:52.973216Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:4465:6457];ev=NKikimr::NColumnShard::TEvPrivate::TEvWriteIndex;fline=abstract.cpp:13;event=new_stage;stage=Compiled;task_id=9b0d14c2-9a2b11f0-97531d98-91b7fafd; 2025-09-25T16:20:52.973289Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxWriteIndex[2] (CS::GENERAL) apply at tablet 9437184 2025-09-25T16:20:52.974417Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:4465:6457];ev=NKikimr::NColumnShard::TEvPrivate::TEvWriteIndex;tablet_id=9437184;external_task_id=9b0d14c2-9a2b11f0-97531d98-91b7fafd;fline=abstract.cpp:13;event=new_stage;stage=Written;task_id=9b0d14c2-9a2b11f0-97531d98-91b7fafd; 2025-09-25T16:20:52.974684Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: BlobManager on execute at tablet 9437184 Save Batch GenStep: 4:1 Blob count: 543 2025-09-25T16:20:52.975305Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: Index: tables 1 inserted {blob_bytes=0;raw_bytes=0;count=0;records=0} compacted {blob_bytes=0;raw_bytes=0;count=0;records=0} s-compacted {blob_bytes=25498800;raw_bytes=29753360;count=4;records=300200} inactive {blob_bytes=71325608;raw_bytes=70983540;count=211;records=825200} evicted {blob_bytes=0;raw_bytes=0;count=0;records=0} at tablet 9437184 2025-09-25T16:20:53.036959Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;task_id=9b0d14c2-9a2b11f0-97531d98-91b7fafd;fline=abstract.cpp:13;event=new_stage;stage=Finished;task_id=9b0d14c2-9a2b11f0-97531d98-91b7fafd; 2025-09-25T16:20:53.036982Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;task_id=9b0d14c2-9a2b11f0-97531d98-91b7fafd;fline=abstract.cpp:54;event=WriteIndexComplete;type=CS::GENERAL;success=1; 2025-09-25T16:20:53.036994Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;task_id=9b0d14c2-9a2b11f0-97531d98-91b7fafd;fline=with_appended.cpp:65;portions=216,;task_id=9b0d14c2-9a2b11f0-97531d98-91b7fafd; 2025-09-25T16:20:53.037093Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;task_id=9b0d14c2-9a2b11f0-97531d98-91b7fafd;fline=manager.cpp:15;event=unlock;process_id=CS::GENERAL::9b0d14c2-9a2b11f0-97531d98-91b7fafd; 2025-09-25T16:20:53.037114Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;task_id=9b0d14c2-9a2b11f0-97531d98-91b7fafd;fline=granule.cpp:97;event=OnCompactionFinished;info=(granule:1000000185;path_id:1000000185;size:19125216;portions_count:216;); 2025-09-25T16:20:53.037121Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;task_id=9b0d14c2-9a2b11f0-97531d98-91b7fafd;tablet_id=9437184;fline=columnshard_impl.cpp:449;event=EnqueueBackgroundActivities;periodic=0; 2025-09-25T16:20:53.037143Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;task_id=9b0d14c2-9a2b11f0-97531d98-91b7fafd;tablet_id=9437184;fline=columnshard_impl.cpp:943;background=cleanup_schemas;skip_reason=no_changes; 2025-09-25T16:20:53.037153Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;task_id=9b0d14c2-9a2b11f0-97531d98-91b7fafd;tablet_id=9437184;fline=column_engine_logs.cpp:258;event=StartCleanup;portions_count=6; 2025-09-25T16:20:53.037170Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;task_id=9b0d14c2-9a2b11f0-97531d98-91b7fafd;tablet_id=9437184;fline=column_engine_logs.cpp:271;event=StartCleanupStop;snapshot=plan_step=1758815440569;tx_id=18446744073709551615;;current_snapshot_ts=1758817239260; 2025-09-25T16:20:53.037179Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;task_id=9b0d14c2-9a2b11f0-97531d98-91b7fafd;tablet_id=9437184;fline=column_engine_logs.cpp:334;event=StartCleanup;portions_count=6;portions_prepared=0;drop=0;skip=0;portions_counter=0;chunks=0;limit=0;max_portions=1000;max_chunks=500000; 2025-09-25T16:20:53.037191Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;task_id=9b0d14c2-9a2b11f0-97531d98-91b7fafd;tablet_id=9437184;fline=columnshard_impl.cpp:800;background=cleanup;skip_reason=no_changes; 2025-09-25T16:20:53.037198Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;task_id=9b0d14c2-9a2b11f0-97531d98-91b7fafd;tablet_id=9437184;fline=columnshard_impl.cpp:832;background=cleanup;skip_reason=no_changes; 2025-09-25T16:20:53.037229Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;task_id=9b0d14c2-9a2b11f0-97531d98-91b7fafd;tablet_id=9437184;queue=ttl;external_count=0;fline=granule.cpp:168;event=skip_actualization;waiting=0.910000s; 2025-09-25T16:20:53.037240Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;task_id=9b0d14c2-9a2b11f0-97531d98-91b7fafd;tablet_id=9437184;fline=columnshard_impl.cpp:755;background=ttl;skip_reason=no_changes; 2025-09-25T16:20:53.037306Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: BlobManager at tablet 9437184 Save Batch GenStep: 4:1 Blob count: 543 ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/columnshard/ut_rw/unittest >> Normalizers::RemoveDeleteFlagNormalizer [GOOD] Test command err: 2025-09-25T16:20:47.730492Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];fline=columnshard.cpp:105;event=initialize_shard;step=OnActivateExecutor; 2025-09-25T16:20:47.735688Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];fline=columnshard.cpp:124;event=initialize_shard;step=initialize_tiring_finished; 2025-09-25T16:20:47.735741Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxInitSchema.Execute at tablet 9437184 2025-09-25T16:20:47.736528Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RemoveDeleteFlag; 2025-09-25T16:20:47.736570Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=NO_VALUE_OPTIONAL; 2025-09-25T16:20:47.736605Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-09-25T16:20:47.736630Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-09-25T16:20:47.736664Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-09-25T16:20:47.736714Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-09-25T16:20:47.736736Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-09-25T16:20:47.736756Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-09-25T16:20:47.736777Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-09-25T16:20:47.736796Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-09-25T16:20:47.736815Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanDeprecatedSnapshot; 2025-09-25T16:20:47.736851Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV0ChunksMeta; 2025-09-25T16:20:47.736892Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CopyBlobIdsToV2; 2025-09-25T16:20:47.736912Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreAppearanceSnapshot; 2025-09-25T16:20:47.743157Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxInitSchema.Complete at tablet 9437184 2025-09-25T16:20:47.743224Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:131;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=13;current_normalizer=CLASS_NAME=RemoveDeleteFlag; 2025-09-25T16:20:47.743239Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=NO_VALUE_OPTIONAL;seq_id=NO_VALUE_OPTIONAL;type=NO_VALUE_OPTIONAL; 2025-09-25T16:20:47.743298Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=special_cleaner.cpp:155;normalizer=TDeleteTrash;message=found 0 columns to delete grouped in 0 batches; 2025-09-25T16:20:47.743329Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RemoveDeleteFlag;id=NO_VALUE_OPTIONAL; 2025-09-25T16:20:47.743344Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Granules;id=Granules; 2025-09-25T16:20:47.743351Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=NO_VALUE_OPTIONAL;seq_id=1;type=Granules; 2025-09-25T16:20:47.743368Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-09-25T16:20:47.743388Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-09-25T16:20:47.743397Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-09-25T16:20:47.743402Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=NO_VALUE_OPTIONAL;seq_id=2;type=Chunks; 2025-09-25T16:20:47.743413Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:138;normalizer=TChunksNormalizer;message=0 chunks found; 2025-09-25T16:20:47.743423Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-09-25T16:20:47.743431Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-09-25T16:20:47.743436Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=NO_VALUE_OPTIONAL;seq_id=4;type=TablesCleaner; 2025-09-25T16:20:47.743455Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-09-25T16:20:47.743465Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-09-25T16:20:47.743473Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-09-25T16:20:47.743484Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=NO_VALUE_OPTIONAL;seq_id=6;type=CleanGranuleId; 2025-09-25T16:20:47.743496Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2025-09-25T16:20:47.743506Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-09-25T16:20:47.743514Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-09-25T16:20:47.743518Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=NO_VALUE_OPTIONAL;seq_id=9;type=GCCountersNormalizer; 2025-09-25T16:20:47.743528Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-09-25T16:20:47.743536Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-09-25T16:20:47.743541Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=NO_VALUE_OPTIONAL;seq_id=11;type=SyncPortionFromChunks; 2025-09-25T16:20:47.743550Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-09-25T16:20:47.743558Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-09-25T16:20:47.743563Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=NO_VALUE_OPTIONAL;seq_id=15;type=RestoreV1Chunks_V2; 2025-09-25T16:20:47.743588Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-09-25T16:20:47.743597Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-09-25T16:20:47.743602Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=NO_VALUE_OPTIONAL;seq_id=16;type=RestoreV2Chunks; 2025-09-25T16:20:47.743616Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2025-09-25T16:20:47.743624Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanDeprecatedSnapshot;id=CleanDeprecatedSnapshot; 2025-09-25T16:20:47.743629Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=NO_VALUE_OPTIONAL;seq_id=17;type=CleanDeprecatedSnapshot; 2025-09-25T16:20:47.743638Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanDeprecatedSnapshot;id=17; 2025-09-25T16:20:47.743661Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched ... key2;);;program_input=(column_ids=1,2,3;column_names=field,key1,key2;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:1;); 2025-09-25T16:20:52.853549Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[1:425:2425];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;fline=plain_read_data.cpp:31;event=DoExtractReadyResults;result=1;count=77;finished=1; 2025-09-25T16:20:52.853555Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[1:425:2425];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;fline=actor.cpp:222;stage=limit exhausted;limit=limits:(bytes=0;chunks=0);; 2025-09-25T16:20:52.853562Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[1:425:2425];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;tablet_id=9437184;fline=scanner.cpp:52;event=build_next_interval; 2025-09-25T16:20:52.853675Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[1:425:2425];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;fline=actor.cpp:120;event=TEvScanDataAck;info=limits:(bytes=8388608;chunks=1);; 2025-09-25T16:20:52.853704Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[1:425:2425];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;fline=actor.cpp:211;stage=start;iterator=ready_results:(count:1;records_count:77;schema=key1: uint64 key2: uint64 field: string;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2;column_names=key1,key2;);;ff=(column_ids=1,2,3;column_names=field,key1,key2;);;program_input=(column_ids=1,2,3;column_names=field,key1,key2;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2025-09-25T16:20:52.853712Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[1:425:2425];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;fline=plain_read_data.cpp:31;event=DoExtractReadyResults;result=0;count=0;finished=1; 2025-09-25T16:20:52.853728Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[1:425:2425];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;fline=actor.cpp:253;stage=ready result;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2;column_names=key1,key2;);;ff=(column_ids=1,2,3;column_names=field,key1,key2;);;program_input=(column_ids=1,2,3;column_names=field,key1,key2;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;);columns=3;rows=77; 2025-09-25T16:20:52.853738Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[1:425:2425];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;fline=actor.cpp:274;stage=data_format;batch_size=0;num_rows=77;batch_columns=key1,key2,field; 2025-09-25T16:20:52.853774Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[1:425:2425];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;fline=actor.cpp:392;event=send_data;compute_actor_id=[1:423:2424];bytes=130200;rows=1085;faults=0;finished=0;fault=0;schema=key1: uint64 key2: uint64 field: string; 2025-09-25T16:20:52.853794Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[1:425:2425];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;fline=actor.cpp:296;stage=finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2;column_names=key1,key2;);;ff=(column_ids=1,2,3;column_names=field,key1,key2;);;program_input=(column_ids=1,2,3;column_names=field,key1,key2;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2025-09-25T16:20:52.853823Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[1:425:2425];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;fline=actor.cpp:211;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2;column_names=key1,key2;);;ff=(column_ids=1,2,3;column_names=field,key1,key2;);;program_input=(column_ids=1,2,3;column_names=field,key1,key2;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2025-09-25T16:20:52.853837Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[1:425:2425];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;fline=actor.cpp:216;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2;column_names=key1,key2;);;ff=(column_ids=1,2,3;column_names=field,key1,key2;);;program_input=(column_ids=1,2,3;column_names=field,key1,key2;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2025-09-25T16:20:52.853864Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[1:425:2425];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;fline=actor.cpp:120;event=TEvScanDataAck;info=limits:(bytes=8388608;chunks=1);; 2025-09-25T16:20:52.853878Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[1:425:2425];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;fline=actor.cpp:211;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2;column_names=key1,key2;);;ff=(column_ids=1,2,3;column_names=field,key1,key2;);;program_input=(column_ids=1,2,3;column_names=field,key1,key2;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2025-09-25T16:20:52.853890Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[1:425:2425];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;fline=actor.cpp:216;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2;column_names=key1,key2;);;ff=(column_ids=1,2,3;column_names=field,key1,key2;);;program_input=(column_ids=1,2,3;column_names=field,key1,key2;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2025-09-25T16:20:52.853897Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: actor.cpp:442: Scan [1:425:2425] finished for tablet 9437184 2025-09-25T16:20:52.853986Z node 1 :TX_COLUMNSHARD_SCAN INFO: log.cpp:841: SelfId=[1:425:2425];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;fline=actor.cpp:448;event=scan_finish;compute_actor_id=[1:423:2424];stats={"p":[{"events":["f_bootstrap","l_bootstrap","f_processing","f_ProduceResults","f_task_result"],"t":0},{"events":["f_ack"],"t":0.009},{"events":["l_ack","l_processing","l_ProduceResults","f_Finish","l_Finish","l_task_result"],"t":2.439}],"full":{"a":2800280,"name":"_full_task","f":2800280,"d_finished":0,"c":0,"l":5240159,"d":2439879},"events":[{"name":"bootstrap","f":2800326,"d_finished":216,"c":1,"l":2800542,"d":216},{"a":5240108,"name":"ack","f":2810231,"d_finished":1164033,"c":421,"l":5240088,"d":1164084},{"a":5240106,"name":"processing","f":2800594,"d_finished":2417466,"c":843,"l":5240089,"d":2417519},{"name":"ProduceResults","f":2800466,"d_finished":1779099,"c":1266,"l":5240141,"d":1779099},{"a":5240142,"name":"Finish","f":5240142,"d_finished":0,"c":0,"l":5240159,"d":17},{"name":"task_result","f":2800600,"d_finished":1251421,"c":422,"l":5239813,"d":1251421}],"id":"9437184::2"};iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2;column_names=key1,key2;);;ff=(column_ids=1,2,3;column_names=field,key1,key2;);;program_input=(column_ids=1,2,3;column_names=field,key1,key2;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2025-09-25T16:20:52.854001Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[1:425:2425];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;fline=actor.cpp:392;event=send_data;compute_actor_id=[1:423:2424];bytes=0;rows=0;faults=0;finished=1;fault=0;schema=; 2025-09-25T16:20:52.854057Z node 1 :TX_COLUMNSHARD_SCAN INFO: log.cpp:841: SelfId=[1:425:2425];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;fline=actor.cpp:397;event=scan_finished;compute_actor_id=[1:423:2424];packs_sum=0;bytes=0;bytes_sum=0;rows=0;rows_sum=0;faults=0;finished=1;fault=0;stats={"p":[{"events":["f_bootstrap","l_bootstrap","f_processing","f_ProduceResults","f_task_result"],"t":0},{"events":["f_ack"],"t":0.009},{"events":["l_ack","l_processing","l_ProduceResults","f_Finish","l_Finish","l_task_result"],"t":2.439}],"full":{"a":2800280,"name":"_full_task","f":2800280,"d_finished":0,"c":0,"l":5240256,"d":2439976},"events":[{"name":"bootstrap","f":2800326,"d_finished":216,"c":1,"l":2800542,"d":216},{"a":5240108,"name":"ack","f":2810231,"d_finished":1164033,"c":421,"l":5240088,"d":1164181},{"a":5240106,"name":"processing","f":2800594,"d_finished":2417466,"c":843,"l":5240089,"d":2417616},{"name":"ProduceResults","f":2800466,"d_finished":1779099,"c":1266,"l":5240141,"d":1779099},{"a":5240142,"name":"Finish","f":5240142,"d_finished":0,"c":0,"l":5240256,"d":114},{"name":"task_result","f":2800600,"d_finished":1251421,"c":422,"l":5239813,"d":1251421}],"id":"9437184::2"};iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2;column_names=key1,key2;);;ff=(column_ids=1,2,3;column_names=field,key1,key2;);;program_input=(column_ids=1,2,3;column_names=field,key1,key2;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2025-09-25T16:20:52.854088Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[1:425:2425];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;fline=stats.cpp:8;event=statistic;begin=2025-09-25T16:20:50.413899Z;index_granules=0;index_portions=1;index_batches=0;schema_columns=3;filter_columns=0;additional_columns=0;compacted_portions_bytes=0;inserted_portions_bytes=2488696;committed_portions_bytes=0;data_filter_bytes=0;data_additional_bytes=0;delta_bytes=2488696;selected_rows=0; 2025-09-25T16:20:52.854096Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[1:425:2425];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;fline=read_context.h:196;event=scan_aborted;reason=unexpected on destructor; 2025-09-25T16:20:52.854124Z node 1 :TX_COLUMNSHARD_SCAN INFO: log.cpp:841: SelfId=[1:425:2425];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;fline=context.h:82;fetching=ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2;column_names=key1,key2;);;ff=(column_ids=1,2,3;column_names=field,key1,key2;);;program_input=(column_ids=1,2,3;column_names=field,key1,key2;);;; >> TColumnShardTestReadWrite::CompactionInGranule_PKUInt64_Reboot >> TColumnShardTestReadWrite::WriteReadStandaloneExoticTypes [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/columnshard/ut_rw/unittest >> TColumnShardTestReadWrite::WriteReadStandaloneExoticTypes [GOOD] Test command err: 2025-09-25T16:20:52.814252Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:129:2160];fline=columnshard.cpp:105;event=initialize_shard;step=OnActivateExecutor; 2025-09-25T16:20:52.820388Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:129:2160];fline=columnshard.cpp:124;event=initialize_shard;step=initialize_tiring_finished; 2025-09-25T16:20:52.820450Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxInitSchema.Execute at tablet 9437184 2025-09-25T16:20:52.821353Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:129:2160];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-09-25T16:20:52.821406Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:129:2160];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-09-25T16:20:52.821452Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:129:2160];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-09-25T16:20:52.821479Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:129:2160];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-09-25T16:20:52.821502Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:129:2160];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-09-25T16:20:52.821526Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:129:2160];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-09-25T16:20:52.821549Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:129:2160];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-09-25T16:20:52.821571Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:129:2160];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-09-25T16:20:52.821595Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:129:2160];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-09-25T16:20:52.821616Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:129:2160];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanDeprecatedSnapshot; 2025-09-25T16:20:52.821640Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:129:2160];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV0ChunksMeta; 2025-09-25T16:20:52.821662Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:129:2160];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CopyBlobIdsToV2; 2025-09-25T16:20:52.821716Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:129:2160];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreAppearanceSnapshot; 2025-09-25T16:20:52.828993Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxInitSchema.Complete at tablet 9437184 2025-09-25T16:20:52.829063Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:131;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=12;current_normalizer=CLASS_NAME=Granules; 2025-09-25T16:20:52.829074Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=1;type=Granules; 2025-09-25T16:20:52.829113Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-09-25T16:20:52.829158Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-09-25T16:20:52.829172Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-09-25T16:20:52.829179Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2025-09-25T16:20:52.829192Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:138;normalizer=TChunksNormalizer;message=0 chunks found; 2025-09-25T16:20:52.829204Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-09-25T16:20:52.829213Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-09-25T16:20:52.829219Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2025-09-25T16:20:52.829242Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-09-25T16:20:52.829252Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-09-25T16:20:52.829261Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-09-25T16:20:52.829265Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2025-09-25T16:20:52.829277Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2025-09-25T16:20:52.829286Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-09-25T16:20:52.829296Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-09-25T16:20:52.829301Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2025-09-25T16:20:52.829312Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-09-25T16:20:52.829321Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-09-25T16:20:52.829327Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2025-09-25T16:20:52.829337Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-09-25T16:20:52.829348Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-09-25T16:20:52.829354Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=15;type=RestoreV1Chunks_V2; 2025-09-25T16:20:52.829386Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-09-25T16:20:52.829395Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-09-25T16:20:52.829401Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=16;type=RestoreV2Chunks; 2025-09-25T16:20:52.829418Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2025-09-25T16:20:52.829428Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanDeprecatedSnapshot;id=CleanDeprecatedSnapshot; 2025-09-25T16:20:52.829434Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=17;type=CleanDeprecatedSnapshot; 2025-09-25T16:20:52.829443Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanDeprecatedSnapshot;id=17; 2025-09-25T16:20:52.829452Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV0ChunksMeta;id=RestoreV0ChunksMeta; 2025-09-25T16:20:52.829458Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=18;type=RestoreV0ChunksMeta; 2025-09-25T16:20:52.829468Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV0ChunksMeta;id=18; 2025-09-25T16:20:52.829477Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CopyBlobIdsToV2;id=CopyBlobIdsToV2; 2025-09-25T16:20:52.829484Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=19;type=CopyBlobIdsToV2; 2025-09-25T16:20:52.829501Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CopyBlobIdsToV2;id=19; 2025-09-25T16:20:52.829510Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLAS ... ge=ready result;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;);columns=10;rows=31; 2025-09-25T16:20:54.152371Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: TEST_STEP=11;SelfId=[1:414:2425];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;fline=actor.cpp:274;stage=data_format;batch_size=0;num_rows=31;batch_columns=timestamp,resource_type,resource_id,uid,level,message,json_payload,ingested_at,saved_at,request_id; 2025-09-25T16:20:54.152415Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: TEST_STEP=11;SelfId=[1:414:2425];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;fline=actor.cpp:392;event=send_data;compute_actor_id=[1:413:2424];bytes=2791;rows=31;faults=0;finished=0;fault=0;schema=timestamp: timestamp[us] resource_type: string resource_id: string uid: string level: int32 message: binary json_payload: binary ingested_at: timestamp[us] saved_at: timestamp[us] request_id: binary; 2025-09-25T16:20:54.152433Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: TEST_STEP=11;SelfId=[1:414:2425];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;fline=actor.cpp:296;stage=finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2025-09-25T16:20:54.152452Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: TEST_STEP=11;SelfId=[1:414:2425];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;fline=actor.cpp:211;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2025-09-25T16:20:54.152466Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: TEST_STEP=11;SelfId=[1:414:2425];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;fline=actor.cpp:216;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2025-09-25T16:20:54.152498Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: TEST_STEP=11;SelfId=[1:414:2425];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;fline=actor.cpp:120;event=TEvScanDataAck;info=limits:(bytes=8388608;chunks=1);; 2025-09-25T16:20:54.152514Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: TEST_STEP=11;SelfId=[1:414:2425];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;fline=actor.cpp:211;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2025-09-25T16:20:54.152527Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: TEST_STEP=11;SelfId=[1:414:2425];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;fline=actor.cpp:216;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2025-09-25T16:20:54.152535Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: actor.cpp:442: Scan [1:414:2425] finished for tablet 9437184 2025-09-25T16:20:54.152600Z node 1 :TX_COLUMNSHARD_SCAN INFO: log.cpp:841: TEST_STEP=11;SelfId=[1:414:2425];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;fline=actor.cpp:448;event=scan_finish;compute_actor_id=[1:413:2424];stats={"p":[{"events":["f_bootstrap","l_bootstrap","f_processing","f_ProduceResults","f_task_result"],"t":0},{"events":["f_ack","l_ack","l_processing","l_ProduceResults","f_Finish","l_Finish","l_task_result"],"t":0.001}],"full":{"a":1448061,"name":"_full_task","f":1448061,"d_finished":0,"c":0,"l":1449919,"d":1858},"events":[{"name":"bootstrap","f":1448111,"d_finished":201,"c":1,"l":1448312,"d":201},{"a":1449871,"name":"ack","f":1449674,"d_finished":171,"c":1,"l":1449845,"d":219},{"a":1449869,"name":"processing","f":1448343,"d_finished":491,"c":3,"l":1449846,"d":541},{"name":"ProduceResults","f":1448248,"d_finished":281,"c":6,"l":1449907,"d":281},{"a":1449908,"name":"Finish","f":1449908,"d_finished":0,"c":0,"l":1449919,"d":11},{"name":"task_result","f":1448346,"d_finished":310,"c":2,"l":1449633,"d":310}],"id":"9437184::12"};iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2025-09-25T16:20:54.152611Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: TEST_STEP=11;SelfId=[1:414:2425];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;fline=actor.cpp:392;event=send_data;compute_actor_id=[1:413:2424];bytes=0;rows=0;faults=0;finished=1;fault=0;schema=; 2025-09-25T16:20:54.152658Z node 1 :TX_COLUMNSHARD_SCAN INFO: log.cpp:841: TEST_STEP=11;SelfId=[1:414:2425];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;fline=actor.cpp:397;event=scan_finished;compute_actor_id=[1:413:2424];packs_sum=0;bytes=0;bytes_sum=0;rows=0;rows_sum=0;faults=0;finished=1;fault=0;stats={"p":[{"events":["f_bootstrap","l_bootstrap","f_processing","f_ProduceResults","f_task_result"],"t":0},{"events":["f_ack","l_ack","l_processing","l_ProduceResults","f_Finish","l_Finish","l_task_result"],"t":0.001}],"full":{"a":1448061,"name":"_full_task","f":1448061,"d_finished":0,"c":0,"l":1449993,"d":1932},"events":[{"name":"bootstrap","f":1448111,"d_finished":201,"c":1,"l":1448312,"d":201},{"a":1449871,"name":"ack","f":1449674,"d_finished":171,"c":1,"l":1449845,"d":293},{"a":1449869,"name":"processing","f":1448343,"d_finished":491,"c":3,"l":1449846,"d":615},{"name":"ProduceResults","f":1448248,"d_finished":281,"c":6,"l":1449907,"d":281},{"a":1449908,"name":"Finish","f":1449908,"d_finished":0,"c":0,"l":1449993,"d":85},{"name":"task_result","f":1448346,"d_finished":310,"c":2,"l":1449633,"d":310}],"id":"9437184::12"};iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2025-09-25T16:20:54.152673Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: TEST_STEP=11;SelfId=[1:414:2425];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;fline=stats.cpp:8;event=statistic;begin=2025-09-25T16:20:54.150554Z;index_granules=0;index_portions=1;index_batches=0;schema_columns=10;filter_columns=0;additional_columns=0;compacted_portions_bytes=0;inserted_portions_bytes=7928;committed_portions_bytes=0;data_filter_bytes=0;data_additional_bytes=0;delta_bytes=7928;selected_rows=0; 2025-09-25T16:20:54.152678Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: TEST_STEP=11;SelfId=[1:414:2425];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;fline=read_context.h:196;event=scan_aborted;reason=unexpected on destructor; 2025-09-25T16:20:54.152708Z node 1 :TX_COLUMNSHARD_SCAN INFO: log.cpp:841: TEST_STEP=11;SelfId=[1:414:2425];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;fline=context.h:82;fetching=ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;; >> SchemeReqAdminAccessInTenant::ClusterAdminCanAuthOnNonEmptyTenant-DomainLoginOnly-StrictAclCheck [GOOD] >> TColumnShardTestReadWrite::WriteReadNoCompression [GOOD] >> Normalizers::PortionsNormalizer >> TColumnShardTestReadWrite::CompactionSplitGranule_PKUInt32 ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/columnshard/ut_rw/unittest >> TColumnShardTestReadWrite::WriteReadNoCompression [GOOD] Test command err: 2025-09-25T16:20:52.317576Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:129:2160];fline=columnshard.cpp:105;event=initialize_shard;step=OnActivateExecutor; 2025-09-25T16:20:52.320963Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:129:2160];fline=columnshard.cpp:124;event=initialize_shard;step=initialize_tiring_finished; 2025-09-25T16:20:52.321004Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxInitSchema.Execute at tablet 9437184 2025-09-25T16:20:52.321611Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:129:2160];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-09-25T16:20:52.321663Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:129:2160];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-09-25T16:20:52.321690Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:129:2160];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-09-25T16:20:52.321704Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:129:2160];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-09-25T16:20:52.321718Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:129:2160];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-09-25T16:20:52.321742Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:129:2160];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-09-25T16:20:52.321763Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:129:2160];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-09-25T16:20:52.321776Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:129:2160];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-09-25T16:20:52.321789Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:129:2160];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-09-25T16:20:52.321801Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:129:2160];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanDeprecatedSnapshot; 2025-09-25T16:20:52.321815Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:129:2160];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV0ChunksMeta; 2025-09-25T16:20:52.321828Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:129:2160];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CopyBlobIdsToV2; 2025-09-25T16:20:52.321858Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:129:2160];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreAppearanceSnapshot; 2025-09-25T16:20:52.326617Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxInitSchema.Complete at tablet 9437184 2025-09-25T16:20:52.326679Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:131;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=12;current_normalizer=CLASS_NAME=Granules; 2025-09-25T16:20:52.326689Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=1;type=Granules; 2025-09-25T16:20:52.326717Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-09-25T16:20:52.326748Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-09-25T16:20:52.326758Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-09-25T16:20:52.326763Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2025-09-25T16:20:52.326775Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:138;normalizer=TChunksNormalizer;message=0 chunks found; 2025-09-25T16:20:52.326784Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-09-25T16:20:52.326794Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-09-25T16:20:52.326799Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2025-09-25T16:20:52.326820Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-09-25T16:20:52.326829Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-09-25T16:20:52.326836Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-09-25T16:20:52.326842Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2025-09-25T16:20:52.326854Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2025-09-25T16:20:52.326861Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-09-25T16:20:52.326870Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-09-25T16:20:52.326875Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2025-09-25T16:20:52.326881Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-09-25T16:20:52.326887Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-09-25T16:20:52.326890Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2025-09-25T16:20:52.326897Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-09-25T16:20:52.326902Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-09-25T16:20:52.326906Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=15;type=RestoreV1Chunks_V2; 2025-09-25T16:20:52.326924Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-09-25T16:20:52.326929Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-09-25T16:20:52.326932Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=16;type=RestoreV2Chunks; 2025-09-25T16:20:52.326941Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2025-09-25T16:20:52.326947Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanDeprecatedSnapshot;id=CleanDeprecatedSnapshot; 2025-09-25T16:20:52.326950Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=17;type=CleanDeprecatedSnapshot; 2025-09-25T16:20:52.326955Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanDeprecatedSnapshot;id=17; 2025-09-25T16:20:52.326961Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV0ChunksMeta;id=RestoreV0ChunksMeta; 2025-09-25T16:20:52.326964Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=18;type=RestoreV0ChunksMeta; 2025-09-25T16:20:52.326970Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV0ChunksMeta;id=18; 2025-09-25T16:20:52.326975Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CopyBlobIdsToV2;id=CopyBlobIdsToV2; 2025-09-25T16:20:52.326979Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=19;type=CopyBlobIdsToV2; 2025-09-25T16:20:52.326989Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CopyBlobIdsToV2;id=19; 2025-09-25T16:20:52.326995Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLAS ... age=ready result;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;);columns=10;rows=31; 2025-09-25T16:20:54.547096Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: TEST_STEP=11;SelfId=[1:984:2851];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;fline=actor.cpp:274;stage=data_format;batch_size=0;num_rows=31;batch_columns=timestamp,resource_type,resource_id,uid,level,message,json_payload,ingested_at,saved_at,request_id; 2025-09-25T16:20:54.547120Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: TEST_STEP=11;SelfId=[1:984:2851];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;fline=actor.cpp:392;event=send_data;compute_actor_id=[1:983:2850];bytes=2020;rows=31;faults=0;finished=0;fault=0;schema=timestamp: timestamp[us] resource_type: string resource_id: string uid: string level: int32 message: string json_payload: string ingested_at: timestamp[us] saved_at: timestamp[us] request_id: string; 2025-09-25T16:20:54.547131Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: TEST_STEP=11;SelfId=[1:984:2851];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;fline=actor.cpp:296;stage=finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2025-09-25T16:20:54.547143Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: TEST_STEP=11;SelfId=[1:984:2851];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;fline=actor.cpp:211;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2025-09-25T16:20:54.547151Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: TEST_STEP=11;SelfId=[1:984:2851];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;fline=actor.cpp:216;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2025-09-25T16:20:54.547170Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: TEST_STEP=11;SelfId=[1:984:2851];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;fline=actor.cpp:120;event=TEvScanDataAck;info=limits:(bytes=8388608;chunks=1);; 2025-09-25T16:20:54.547179Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: TEST_STEP=11;SelfId=[1:984:2851];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;fline=actor.cpp:211;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2025-09-25T16:20:54.547187Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: TEST_STEP=11;SelfId=[1:984:2851];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;fline=actor.cpp:216;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2025-09-25T16:20:54.547191Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: actor.cpp:442: Scan [1:984:2851] finished for tablet 9437184 2025-09-25T16:20:54.547231Z node 1 :TX_COLUMNSHARD_SCAN INFO: log.cpp:841: TEST_STEP=11;SelfId=[1:984:2851];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;fline=actor.cpp:448;event=scan_finish;compute_actor_id=[1:983:2850];stats={"p":[{"events":["f_bootstrap","l_bootstrap","f_ack","f_processing","f_ProduceResults","f_task_result","l_task_result"],"t":0},{"events":["l_ack","l_processing","l_ProduceResults","f_Finish","l_Finish"],"t":0.001}],"full":{"a":2332396,"name":"_full_task","f":2332396,"d_finished":0,"c":0,"l":2333518,"d":1122},"events":[{"name":"bootstrap","f":2332430,"d_finished":144,"c":1,"l":2332574,"d":144},{"a":2333490,"name":"ack","f":2333375,"d_finished":100,"c":1,"l":2333475,"d":128},{"a":2333489,"name":"processing","f":2332600,"d_finished":315,"c":3,"l":2333475,"d":344},{"name":"ProduceResults","f":2332517,"d_finished":191,"c":6,"l":2333511,"d":191},{"a":2333511,"name":"Finish","f":2333511,"d_finished":0,"c":0,"l":2333518,"d":7},{"name":"task_result","f":2332603,"d_finished":207,"c":2,"l":2333347,"d":207}],"id":"9437184::12"};iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2025-09-25T16:20:54.547237Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: TEST_STEP=11;SelfId=[1:984:2851];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;fline=actor.cpp:392;event=send_data;compute_actor_id=[1:983:2850];bytes=0;rows=0;faults=0;finished=1;fault=0;schema=; 2025-09-25T16:20:54.547264Z node 1 :TX_COLUMNSHARD_SCAN INFO: log.cpp:841: TEST_STEP=11;SelfId=[1:984:2851];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;fline=actor.cpp:397;event=scan_finished;compute_actor_id=[1:983:2850];packs_sum=0;bytes=0;bytes_sum=0;rows=0;rows_sum=0;faults=0;finished=1;fault=0;stats={"p":[{"events":["f_bootstrap","l_bootstrap","f_ack","f_processing","f_ProduceResults","f_task_result","l_task_result"],"t":0},{"events":["l_ack","l_processing","l_ProduceResults","f_Finish","l_Finish"],"t":0.001}],"full":{"a":2332396,"name":"_full_task","f":2332396,"d_finished":0,"c":0,"l":2333562,"d":1166},"events":[{"name":"bootstrap","f":2332430,"d_finished":144,"c":1,"l":2332574,"d":144},{"a":2333490,"name":"ack","f":2333375,"d_finished":100,"c":1,"l":2333475,"d":172},{"a":2333489,"name":"processing","f":2332600,"d_finished":315,"c":3,"l":2333475,"d":388},{"name":"ProduceResults","f":2332517,"d_finished":191,"c":6,"l":2333511,"d":191},{"a":2333511,"name":"Finish","f":2333511,"d_finished":0,"c":0,"l":2333562,"d":51},{"name":"task_result","f":2332603,"d_finished":207,"c":2,"l":2333347,"d":207}],"id":"9437184::12"};iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2025-09-25T16:20:54.547273Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: TEST_STEP=11;SelfId=[1:984:2851];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;fline=stats.cpp:8;event=statistic;begin=2025-09-25T16:20:54.546004Z;index_granules=0;index_portions=1;index_batches=0;schema_columns=10;filter_columns=0;additional_columns=0;compacted_portions_bytes=0;inserted_portions_bytes=8392;committed_portions_bytes=0;data_filter_bytes=0;data_additional_bytes=0;delta_bytes=8392;selected_rows=0; 2025-09-25T16:20:54.547277Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: TEST_STEP=11;SelfId=[1:984:2851];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;fline=read_context.h:196;event=scan_aborted;reason=unexpected on destructor; 2025-09-25T16:20:54.547294Z node 1 :TX_COLUMNSHARD_SCAN INFO: log.cpp:841: TEST_STEP=11;SelfId=[1:984:2851];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;fline=context.h:82;fetching=ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;; >> TColumnShardTestReadWrite::WriteRead [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/tx_proxy/ut_schemereq/unittest >> SchemeReqAdminAccessInTenant::ClusterAdminCanAuthOnNonEmptyTenant-DomainLoginOnly-StrictAclCheck [GOOD] Test command err: Starting YDB, grpc: 19440, msgbus: 16734 2025-09-25T16:20:02.089561Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7554062362710194888:2082];send_to=[0:7307199536658146131:7762515]; 2025-09-25T16:20:02.090352Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/endf/0025e7/r3tmp/tmpNeKGx1/pdisk_1.dat 2025-09-25T16:20:02.127687Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2025-09-25T16:20:02.137798Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 19440, node 1 2025-09-25T16:20:02.149506Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-09-25T16:20:02.149522Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-09-25T16:20:02.149524Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-09-25T16:20:02.149581Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:16734 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 2025-09-25T16:20:02.177349Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:402: actor# [1:7554062362710195092:2143] Handle TEvNavigate describe path dc-1 2025-09-25T16:20:02.177371Z node 1 :TX_PROXY DEBUG: describe.cpp:270: Actor# [1:7554062362710195547:2434] HANDLE EvNavigateScheme dc-1 2025-09-25T16:20:02.177510Z node 1 :TX_PROXY DEBUG: describe.cpp:354: Actor# [1:7554062362710195547:2434] HANDLE EvNavigateKeySetResult TDescribeReq marker# P5 ErrorCount# 0 2025-09-25T16:20:02.187300Z node 1 :TX_PROXY DEBUG: describe.cpp:433: Actor# [1:7554062362710195547:2434] SEND to# 72057594046644480 shardToRequest NKikimrSchemeOp.TDescribePath Path: "dc-1" Options { ReturnBoundaries: true ShowPrivateTable: true ReturnRangeKey: true } 2025-09-25T16:20:02.188992Z node 1 :TX_PROXY DEBUG: describe.cpp:446: Actor# [1:7554062362710195547:2434] Handle TEvDescribeSchemeResult Forward to# [1:7554062362710195544:2432] Cookie: 0 TEvDescribeSchemeResult: NKikimrScheme.TEvDescribeSchemeResult PreSerializedData size# 2 Record# Status: StatusSuccess Path: "dc-1" PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } DomainKey { SchemeShard: 72057594046644480 PathId: 1 } PathsInside: 0 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046644480 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { Audience: "/dc-1" } SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 1 PathOwnerId: 72057594046644480 TClient::Ls response: 2025-09-25T16:20:02.192505Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-09-25T16:20:02.192540Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-09-25T16:20:02.194348Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:212: actor# [1:7554062362710195092:2143] HANDLE TEvClientConnected success connect from tablet# 72057594046447617 2025-09-25T16:20:02.197037Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:314: actor# [1:7554062362710195092:2143] Handle TEvProposeTransaction 2025-09-25T16:20:02.197053Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:326: actor# [1:7554062362710195092:2143] Cookie# 0 userReqId# "" DELAY REQUEST, wait txids from allocator Type# Scheme 2025-09-25T16:20:02.197478Z node 1 :TX_PROXY DEBUG: client.cpp:89: Handle TEvAllocateResult ACCEPTED RangeBegin# 281474976715656 RangeEnd# 281474976720656 txAllocator# 72057594046447617 2025-09-25T16:20:02.197485Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:237: actor# [1:7554062362710195092:2143] TxId# 281474976715657 ProcessProposeTransaction 2025-09-25T16:20:02.197513Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:256: actor# [1:7554062362710195092:2143] Cookie# 0 userReqId# "" txid# 281474976715657 SEND to# [1:7554062362710195562:2447] 2025-09-25T16:20:02.199600Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-09-25T16:20:02.211705Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1673: Actor# [1:7554062362710195562:2447] txid# 281474976715657 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "dc-1" StoragePools { Name: "" Kind: "tenant-db" } StoragePools { Name: "/dc-1:test" Kind: "test" } } } } UserToken: "\n\014root@builtin\022\030\022\026\n\024all-users@well-known\032\014root@builtin\"\007Builtin*\017**** (B6C6F477)0\000" PeerName: "" 2025-09-25T16:20:02.211764Z node 1 :TX_PROXY DEBUG: schemereq.cpp:613: Actor# [1:7554062362710195562:2447] txid# 281474976715657 Bootstrap, UserSID: root@builtin CheckAdministrator: 1 CheckDatabaseAdministrator: 0 2025-09-25T16:20:02.211770Z node 1 :TX_PROXY DEBUG: schemereq.cpp:622: Actor# [1:7554062362710195562:2447] txid# 281474976715657 Bootstrap, UserSID: root@builtin IsClusterAdministrator: 1 2025-09-25T16:20:02.211795Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1728: Actor# [1:7554062362710195562:2447] txid# 281474976715657 TEvNavigateKeySet requested from SchemeCache 2025-09-25T16:20:02.211954Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1561: Actor# [1:7554062362710195562:2447] txid# 281474976715657 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2025-09-25T16:20:02.211988Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1608: Actor# [1:7554062362710195562:2447] HANDLE EvNavigateKeySetResult, txid# 281474976715657 shardToRequest# 72057594046644480 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 1] DomainInfo.Params# Version: 0 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 RedirectRequired# false 2025-09-25T16:20:02.212002Z node 1 :TX_PROXY DEBUG: schemereq.cpp:103: Actor# [1:7554062362710195562:2447] txid# 281474976715657 SEND to# 72057594046644480 shardToRequest {TEvModifySchemeTransaction txid# 281474976715657 TabletId# 72057594046644480} 2025-09-25T16:20:02.212061Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1463: Actor# [1:7554062362710195562:2447] txid# 281474976715657 HANDLE EvClientConnected 2025-09-25T16:20:02.212341Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2025-09-25T16:20:02.213517Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1485: Actor# [1:7554062362710195562:2447] txid# 281474976715657 Status StatusAccepted HANDLE {TEvModifySchemeTransactionResult Status# StatusAccepted txid# 281474976715657} 2025-09-25T16:20:02.213543Z node 1 :TX_PROXY DEBUG: schemereq.cpp:593: Actor# [1:7554062362710195562:2447] txid# 281474976715657 SEND to# [1:7554062362710195557:2442] Source {TEvProposeTransactionStatus txid# 281474976715657 Status# 53} waiting... 2025-09-25T16:20:02.222024Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:314: actor# [1:7554062362710195092:2143] Handle TEvProposeTransaction 2025-09-25T16:20:02.222037Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:237: actor# [1:7554062362710195092:2143] TxId# 281474976715658 ProcessProposeTransaction 2025-09-25T16:20:02.222053Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:256: actor# [1:7554062362710195092:2143] Cookie# 0 userReqId# "" txid# 281474976715658 SEND to# [1:7554062362710195604:2485] 2025-09-25T16:20:02.222861Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1673: Actor# [1:7554062362710195604:2485] txid# 281474976715658 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/" OperationType: ESchemeOpModifyACL ModifyACL { Name: "dc-1" DiffACL: "\n\032\010\000\022\026\010\001\020\377\377\003\032\014root@builtin \003" } } } UserToken: "\n\014root@builtin\022\030\022\026\n\024all-users@well-known\032\014root@builtin\"\007Builtin*\017**** (B6C6F477)0\000" PeerName: "" 2025-09-25T16:20:02.222886Z node 1 :TX_PROXY DEBUG: schemereq.cpp:613: Actor# [1:7554062362710195604:2485] txid# 281474976715658 Bootstrap, UserSID: root@builtin CheckAdministrator: 1 CheckDatabaseAdministrator: 0 2025-09-25T16:20:02.222890Z node 1 :TX_PROXY DEBUG: schemereq.cpp:622: Actor# [1:7554062362710195604:2485] txid# 281474976715658 Bootstrap, UserSID: root@builtin IsClusterAdministrator: 1 2025-09-25T16:20:02.222911Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1728: Actor# [1:7554062362710195604:2485] txid# 281474976715658 TEvNavigateKeySet requested from SchemeCache 2025-09-25T16:20:02.223068Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1561: Actor# [1:7554062362710195604:2485] txid# 281474976715658 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2025-09-25T16:20:02.223131Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1608: Actor# [1:7554062362710195604:2485] HANDLE EvNavigateKeySetResult, txid# 281474976715658 shardToRequest# 72057594046644480 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 1] DomainInfo.Params# Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 7205759 ... h: /dc-1/tenant-db, operationId: 281474976715665:0, at schemeshard: 72075186224037891 2025-09-25T16:20:54.157206Z node 60 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5601: ExamineTreeVFS visit path id [OwnerId: 72075186224037891, LocalPathId: 1] name: dc-1/tenant-db type: EPathTypeSubDomain state: EPathStateNoChanges stepDropped: 0 droppedTxId: 0 parent: [OwnerId: 72075186224037891, LocalPathId: 1] 2025-09-25T16:20:54.157209Z node 60 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5617: ExamineTreeVFS run path id: [OwnerId: 72075186224037891, LocalPathId: 1] 2025-09-25T16:20:54.157267Z node 60 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 281474976715665:1, propose status:StatusSuccess, reason: , at schemeshard: 72075186224037891 2025-09-25T16:20:54.157276Z node 60 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715665:0, at schemeshard: 72075186224037891, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_modify_acl.cpp:101) 2025-09-25T16:20:54.157300Z node 60 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:926: Part operation is done id#281474976715665:0 progress is 1/1 2025-09-25T16:20:54.157305Z node 60 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 281474976715665 ready parts: 1/1 2025-09-25T16:20:54.157310Z node 60 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:926: Part operation is done id#281474976715665:0 progress is 1/1 2025-09-25T16:20:54.157312Z node 60 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 281474976715665 ready parts: 1/1 2025-09-25T16:20:54.157324Z node 60 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:599: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72075186224037891, LocalPathId: 1] was 4 2025-09-25T16:20:54.157339Z node 60 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 281474976715665, ready parts: 1/1, is published: false 2025-09-25T16:20:54.157345Z node 60 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:412: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72075186224037891, LocalPathId: 1], at schemeshard: 72075186224037891 2025-09-25T16:20:54.157348Z node 60 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 281474976715665 ready parts: 1/1 2025-09-25T16:20:54.157353Z node 60 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:993: Operation and all the parts is done, operation id: 281474976715665:0 2025-09-25T16:20:54.157357Z node 60 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:1002: Publication still in progress, tx: 281474976715665, publications: 1, subscribers: 0 2025-09-25T16:20:54.157361Z node 60 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1009: Publication details: tx: 281474976715665, [OwnerId: 72075186224037891, LocalPathId: 1], 9 2025-09-25T16:20:54.158326Z node 60 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 281474976715665, response: Status: StatusSuccess TxId: 281474976715665 SchemeshardId: 72075186224037891, at schemeshard: 72075186224037891 2025-09-25T16:20:54.158405Z node 60 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 281474976715665, subject: root@builtin, status: StatusSuccess, operation: MODIFY ACL, path: /dc-1/tenant-db, add access: +(DS):clusteradmin, remove access: -():clusteradmin:- 2025-09-25T16:20:54.158467Z node 60 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72075186224037891 2025-09-25T16:20:54.158476Z node 60 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72075186224037891, txId: 281474976715665, path id: [OwnerId: 72075186224037891, LocalPathId: 1] 2025-09-25T16:20:54.158466Z node 59 :TX_PROXY DEBUG: schemereq.cpp:1485: Actor# [59:7554062586094122584:2838] txid# 281474976715665 Status StatusSuccess HANDLE {TEvModifySchemeTransactionResult Status# StatusSuccess txid# 281474976715665} 2025-09-25T16:20:54.158485Z node 59 :TX_PROXY DEBUG: schemereq.cpp:593: Actor# [59:7554062586094122584:2838] txid# 281474976715665 SEND to# [59:7554062586094122583:2340] Source {TEvProposeTransactionStatus txid# 281474976715665 Status# 48} 2025-09-25T16:20:54.158542Z node 60 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72075186224037891, txId: 281474976715665, path id: [OwnerId: 72075186224037891, LocalPathId: 1] 2025-09-25T16:20:54.158569Z node 60 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72075186224037891 2025-09-25T16:20:54.158593Z node 60 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [60:7554062577143366564:2293], at schemeshard: 72075186224037891, txId: 281474976715665, path id: 1 2025-09-25T16:20:54.158612Z node 60 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [60:7554062577143366564:2293], at schemeshard: 72075186224037891, txId: 281474976715665, path id: 1 TEST clusteradmin triggers auth on tenant 2025-09-25T16:20:54.159028Z node 60 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6249: Handle TEvUpdateAck, at schemeshard: 72075186224037891, msg: Owner: 72075186224037891 Generation: 1 LocalPathId: 1 Version: 9 PathOwnerId: 72075186224037891, cookie: 281474976715665 2025-09-25T16:20:54.159077Z node 60 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72075186224037891, msg: Owner: 72075186224037891 Generation: 1 LocalPathId: 1 Version: 9 PathOwnerId: 72075186224037891, cookie: 281474976715665 2025-09-25T16:20:54.159080Z node 60 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 1, at schemeshard: 72075186224037891, txId: 281474976715665 2025-09-25T16:20:54.159085Z node 60 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72075186224037891, txId: 281474976715665, pathId: [OwnerId: 72075186224037891, LocalPathId: 1], version: 9 2025-09-25T16:20:54.159090Z node 60 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:610: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72075186224037891, LocalPathId: 1] was 5 2025-09-25T16:20:54.159121Z node 60 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__publish_to_scheme_board.cpp:155: Publication complete, notify & remove, at schemeshard: 72075186224037891, txId: 281474976715665, subscribers: 0 2025-09-25T16:20:54.159759Z node 60 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72075186224037891, cookie: 281474976715665 TClient is connected to server localhost:27938 TClient::Ls request: /dc-1/tenant-db 2025-09-25T16:20:54.174637Z node 59 :TX_PROXY DEBUG: proxy_impl.cpp:402: actor# [59:7554062577504186876:2144] Handle TEvNavigate describe path /dc-1/tenant-db 2025-09-25T16:20:54.174666Z node 59 :TX_PROXY DEBUG: describe.cpp:270: Actor# [59:7554062586094122590:2843] HANDLE EvNavigateScheme /dc-1/tenant-db 2025-09-25T16:20:54.174838Z node 59 :TX_PROXY DEBUG: describe.cpp:354: Actor# [59:7554062586094122590:2843] HANDLE EvNavigateKeySetResult TDescribeReq marker# P5 ErrorCount# 0 2025-09-25T16:20:54.174888Z node 59 :TX_PROXY DEBUG: describe.cpp:433: Actor# [59:7554062586094122590:2843] SEND to# 72075186224037891 shardToRequest NKikimrSchemeOp.TDescribePath Path: "/dc-1/tenant-db" Options { ReturnBoundaries: false ShowPrivateTable: true ReturnRangeKey: false } 2025-09-25T16:20:54.175461Z node 59 :TX_PROXY DEBUG: describe.cpp:446: Actor# [59:7554062586094122590:2843] Handle TEvDescribeSchemeResult Forward to# [59:7554062586094122589:2842] Cookie: 0 TEvDescribeSchemeResult: NKikimrScheme.TEvDescribeSchemeResult PreSerializedData size# 0 Record# Status: StatusSuccess Path: "/dc-1/tenant-db" PathDescription { Self { Name: "dc-1/tenant-db" PathId: 1 SchemeshardId: 72075186224037891 PathType: EPathTypeSubDomain CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "\n\025\010\001\020\200\004\032\014clusteradmin \003" EffectiveACL: "\n\030\010\001\020\377\377\003\032\014root@builtin \003(\001\n\025\010\001\020\200\004\032\014clusteradmin \003" PathVersion: 9 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 9 ACLVersion: 1 EffectiveACLVersion: 2 UserAttrsVersion: 1 ChildrenVersion: 2 SubDomainVersion: 3 SecurityStateVersion: 1 } } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 2 ProcessingParams { Version: 3 PlanResolution: 50 Coordinators: 72075186224037890 TimeCastBucketsPerMediator: 2 Mediators: 72075186224037889 SchemeShard: 72075186224037891 Hive: 72075186224037888 } DomainKey { SchemeShard: 72057594046644480 PathId: 2 } StoragePools { Name: "name_tenant-db_kind_tenant-db" Kind: "tenant-db" } StoragePools { Name: "name_tenant-db_kind_test" Kind: "test" } PathsInside: 0 PathsLimit: 10000 ShardsInside: 4 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046644480 PathId: 2 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { Sids { Name: "tenantuser" Type: USER } Audience: "/dc-1/tenant-db" } SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 1 PathOwnerId: 72075186224037891 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1/tenant-db" PathId: 1 SchemeshardId: 72075186224037891 PathType: EPathTypeSubDomain CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "\n\025\010\001\020\200\004\032\014clusteradmin \003" EffectiveACL: "\n\030\010\001\020\377\377\003\032\014root@builtin \003(\001\n\025\010\001\020\200\004\032\014clusteradmin \003" PathVersion: 9 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 9 ACLVersion: 1 EffectiveACLVersion: 2 UserAttrsVersion: 1 ChildrenVersion: 2 SubDomainVersion: 3 SecurityStateVersion: 1 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72075186224037891 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 184467440737095... (TRUNCATED) 2025-09-25T16:20:54.186112Z node 59 :HIVE WARN: tx__status.cpp:58: HIVE#72057594037968897 THive::TTxStatus(status=2 node=Connected) - killing node 60 2025-09-25T16:20:54.186256Z node 59 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(60, (0,0,0,0)) VolatileState: Connected -> Disconnected 2025-09-25T16:20:54.186687Z node 60 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 >> TAsyncIndexTests::CdcAndMergeWithReboots[PipeResets] [GOOD] >> TColumnShardTestReadWrite::WriteOverload-InStore ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/columnshard/ut_rw/unittest >> TColumnShardTestReadWrite::WriteRead [GOOD] Test command err: 2025-09-25T16:20:53.881969Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:129:2160];fline=columnshard.cpp:105;event=initialize_shard;step=OnActivateExecutor; 2025-09-25T16:20:53.885374Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:129:2160];fline=columnshard.cpp:124;event=initialize_shard;step=initialize_tiring_finished; 2025-09-25T16:20:53.885423Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxInitSchema.Execute at tablet 9437184 2025-09-25T16:20:53.886288Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:129:2160];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-09-25T16:20:53.886338Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:129:2160];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-09-25T16:20:53.886371Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:129:2160];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-09-25T16:20:53.886389Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:129:2160];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-09-25T16:20:53.886402Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:129:2160];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-09-25T16:20:53.886416Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:129:2160];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-09-25T16:20:53.886429Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:129:2160];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-09-25T16:20:53.886442Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:129:2160];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-09-25T16:20:53.886455Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:129:2160];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-09-25T16:20:53.886468Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:129:2160];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanDeprecatedSnapshot; 2025-09-25T16:20:53.886483Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:129:2160];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV0ChunksMeta; 2025-09-25T16:20:53.886497Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:129:2160];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CopyBlobIdsToV2; 2025-09-25T16:20:53.886529Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:129:2160];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreAppearanceSnapshot; 2025-09-25T16:20:53.892766Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxInitSchema.Complete at tablet 9437184 2025-09-25T16:20:53.892867Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:131;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=12;current_normalizer=CLASS_NAME=Granules; 2025-09-25T16:20:53.892881Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=1;type=Granules; 2025-09-25T16:20:53.892940Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-09-25T16:20:53.892985Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-09-25T16:20:53.892999Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-09-25T16:20:53.893006Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2025-09-25T16:20:53.893019Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:138;normalizer=TChunksNormalizer;message=0 chunks found; 2025-09-25T16:20:53.893029Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-09-25T16:20:53.893035Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-09-25T16:20:53.893038Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2025-09-25T16:20:53.893053Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-09-25T16:20:53.893060Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-09-25T16:20:53.893066Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-09-25T16:20:53.893069Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2025-09-25T16:20:53.893078Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2025-09-25T16:20:53.893083Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-09-25T16:20:53.893089Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-09-25T16:20:53.893093Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2025-09-25T16:20:53.893101Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-09-25T16:20:53.893109Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-09-25T16:20:53.893114Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2025-09-25T16:20:53.893125Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-09-25T16:20:53.893134Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-09-25T16:20:53.893139Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=15;type=RestoreV1Chunks_V2; 2025-09-25T16:20:53.893164Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-09-25T16:20:53.893171Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-09-25T16:20:53.893174Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=16;type=RestoreV2Chunks; 2025-09-25T16:20:53.893184Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2025-09-25T16:20:53.893191Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanDeprecatedSnapshot;id=CleanDeprecatedSnapshot; 2025-09-25T16:20:53.893194Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=17;type=CleanDeprecatedSnapshot; 2025-09-25T16:20:53.893200Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanDeprecatedSnapshot;id=17; 2025-09-25T16:20:53.893206Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV0ChunksMeta;id=RestoreV0ChunksMeta; 2025-09-25T16:20:53.893210Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=18;type=RestoreV0ChunksMeta; 2025-09-25T16:20:53.893216Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV0ChunksMeta;id=18; 2025-09-25T16:20:53.893222Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CopyBlobIdsToV2;id=CopyBlobIdsToV2; 2025-09-25T16:20:53.893226Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=19;type=CopyBlobIdsToV2; 2025-09-25T16:20:53.893237Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CopyBlobIdsToV2;id=19; 2025-09-25T16:20:53.893243Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLAS ... age=ready result;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;);columns=10;rows=31; 2025-09-25T16:20:55.185486Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: TEST_STEP=11;SelfId=[1:414:2425];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;fline=actor.cpp:274;stage=data_format;batch_size=0;num_rows=31;batch_columns=timestamp,resource_type,resource_id,uid,level,message,json_payload,ingested_at,saved_at,request_id; 2025-09-25T16:20:55.185519Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: TEST_STEP=11;SelfId=[1:414:2425];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;fline=actor.cpp:392;event=send_data;compute_actor_id=[1:413:2424];bytes=2020;rows=31;faults=0;finished=0;fault=0;schema=timestamp: timestamp[us] resource_type: string resource_id: string uid: string level: int32 message: string json_payload: string ingested_at: timestamp[us] saved_at: timestamp[us] request_id: string; 2025-09-25T16:20:55.185530Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: TEST_STEP=11;SelfId=[1:414:2425];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;fline=actor.cpp:296;stage=finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2025-09-25T16:20:55.185543Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: TEST_STEP=11;SelfId=[1:414:2425];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;fline=actor.cpp:211;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2025-09-25T16:20:55.185552Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: TEST_STEP=11;SelfId=[1:414:2425];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;fline=actor.cpp:216;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2025-09-25T16:20:55.185576Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: TEST_STEP=11;SelfId=[1:414:2425];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;fline=actor.cpp:120;event=TEvScanDataAck;info=limits:(bytes=8388608;chunks=1);; 2025-09-25T16:20:55.185585Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: TEST_STEP=11;SelfId=[1:414:2425];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;fline=actor.cpp:211;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2025-09-25T16:20:55.185593Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: TEST_STEP=11;SelfId=[1:414:2425];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;fline=actor.cpp:216;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2025-09-25T16:20:55.185598Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: actor.cpp:442: Scan [1:414:2425] finished for tablet 9437184 2025-09-25T16:20:55.185639Z node 1 :TX_COLUMNSHARD_SCAN INFO: log.cpp:841: TEST_STEP=11;SelfId=[1:414:2425];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;fline=actor.cpp:448;event=scan_finish;compute_actor_id=[1:413:2424];stats={"p":[{"events":["f_bootstrap","l_bootstrap","f_processing","f_ProduceResults","f_task_result"],"t":0},{"events":["f_ack","l_ack","l_processing","l_ProduceResults","f_Finish","l_Finish","l_task_result"],"t":0.001}],"full":{"a":1403083,"name":"_full_task","f":1403083,"d_finished":0,"c":0,"l":1404515,"d":1432},"events":[{"name":"bootstrap","f":1403114,"d_finished":131,"c":1,"l":1403245,"d":131},{"a":1404485,"name":"ack","f":1404330,"d_finished":136,"c":1,"l":1404466,"d":166},{"a":1404484,"name":"processing","f":1403264,"d_finished":382,"c":3,"l":1404466,"d":413},{"name":"ProduceResults","f":1403206,"d_finished":217,"c":6,"l":1404507,"d":217},{"a":1404508,"name":"Finish","f":1404508,"d_finished":0,"c":0,"l":1404515,"d":7},{"name":"task_result","f":1403266,"d_finished":236,"c":2,"l":1404284,"d":236}],"id":"9437184::12"};iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2025-09-25T16:20:55.185645Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: TEST_STEP=11;SelfId=[1:414:2425];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;fline=actor.cpp:392;event=send_data;compute_actor_id=[1:413:2424];bytes=0;rows=0;faults=0;finished=1;fault=0;schema=; 2025-09-25T16:20:55.185672Z node 1 :TX_COLUMNSHARD_SCAN INFO: log.cpp:841: TEST_STEP=11;SelfId=[1:414:2425];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;fline=actor.cpp:397;event=scan_finished;compute_actor_id=[1:413:2424];packs_sum=0;bytes=0;bytes_sum=0;rows=0;rows_sum=0;faults=0;finished=1;fault=0;stats={"p":[{"events":["f_bootstrap","l_bootstrap","f_processing","f_ProduceResults","f_task_result"],"t":0},{"events":["f_ack","l_ack","l_processing","l_ProduceResults","f_Finish","l_Finish","l_task_result"],"t":0.001}],"full":{"a":1403083,"name":"_full_task","f":1403083,"d_finished":0,"c":0,"l":1404560,"d":1477},"events":[{"name":"bootstrap","f":1403114,"d_finished":131,"c":1,"l":1403245,"d":131},{"a":1404485,"name":"ack","f":1404330,"d_finished":136,"c":1,"l":1404466,"d":211},{"a":1404484,"name":"processing","f":1403264,"d_finished":382,"c":3,"l":1404466,"d":458},{"name":"ProduceResults","f":1403206,"d_finished":217,"c":6,"l":1404507,"d":217},{"a":1404508,"name":"Finish","f":1404508,"d_finished":0,"c":0,"l":1404560,"d":52},{"name":"task_result","f":1403266,"d_finished":236,"c":2,"l":1404284,"d":236}],"id":"9437184::12"};iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2025-09-25T16:20:55.185680Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: TEST_STEP=11;SelfId=[1:414:2425];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;fline=stats.cpp:8;event=statistic;begin=2025-09-25T16:20:55.184087Z;index_granules=0;index_portions=1;index_batches=0;schema_columns=10;filter_columns=0;additional_columns=0;compacted_portions_bytes=0;inserted_portions_bytes=7600;committed_portions_bytes=0;data_filter_bytes=0;data_additional_bytes=0;delta_bytes=7600;selected_rows=0; 2025-09-25T16:20:55.185683Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: TEST_STEP=11;SelfId=[1:414:2425];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;fline=read_context.h:196;event=scan_aborted;reason=unexpected on destructor; 2025-09-25T16:20:55.185703Z node 1 :TX_COLUMNSHARD_SCAN INFO: log.cpp:841: TEST_STEP=11;SelfId=[1:414:2425];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;fline=context.h:82;fetching=ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;; >> TAsyncIndexTests::MergeBothWithReboots[PipeResets] [GOOD] >> TColumnShardTestReadWrite::ReadAggregate-SimpleReader >> EvWrite::WriteWithSplit [GOOD] >> Normalizers::ChunksV0MetaNormalizer >> TColumnShardTestReadWrite::CompactionInGranule_PKDatetime_Reboot ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_index/unittest >> TAsyncIndexTests::MergeBothWithReboots[PipeResets] [GOOD] Test command err: =========== RUN: Trace =========== Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2144] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2144] Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:120:2058] recipient: [1:114:2145] IGNORE Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:120:2058] recipient: [1:114:2145] Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:121:2058] recipient: [1:116:2146] IGNORE Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:121:2058] recipient: [1:116:2146] Leader for TabletID 72057594046678944 is [1:128:2153] sender: [1:131:2058] recipient: [1:113:2144] Leader for TabletID 72057594046447617 is [1:134:2158] sender: [1:136:2058] recipient: [1:114:2145] Leader for TabletID 72057594046316545 is [1:139:2161] sender: [1:141:2058] recipient: [1:116:2146] 2025-09-25T16:19:40.228115Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7911: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-09-25T16:19:40.228137Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7939: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-09-25T16:19:40.228142Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7825: StatsBatching config: StatsBatchTimeout# 0.000000s, StatsMaxBatchSize# 0, StatsMaxExecuteTime# 0.010000s 2025-09-25T16:19:40.228145Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7841: OperationsProcessing config: using default configuration 2025-09-25T16:19:40.228150Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7847: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-09-25T16:19:40.228153Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7847: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-09-25T16:19:40.228159Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7971: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-09-25T16:19:40.228169Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-09-25T16:19:40.228253Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8042: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-09-25T16:19:40.228311Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2025-09-25T16:19:40.248507Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:8074: Got new config: QueryServiceConfig { AllExternalDataSourcesAreAvailable: true } 2025-09-25T16:19:40.248536Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded 2025-09-25T16:19:40.248633Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8042: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# Leader for TabletID 72057594046447617 is [1:134:2158] sender: [1:179:2058] recipient: [1:15:2062] 2025-09-25T16:19:40.252464Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2025-09-25T16:19:40.252516Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2025-09-25T16:19:40.252540Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-09-25T16:19:40.253813Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2025-09-25T16:19:40.253875Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2025-09-25T16:19:40.253938Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1382: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-09-25T16:19:40.254136Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-09-25T16:19:40.255175Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:158: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-09-25T16:19:40.255230Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2025-09-25T16:19:40.255414Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-09-25T16:19:40.255420Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-09-25T16:19:40.255434Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-09-25T16:19:40.255442Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-09-25T16:19:40.255449Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:205: TTxServerlessStorageBilling.Complete 2025-09-25T16:19:40.255479Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7086: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:221:2058] recipient: [1:219:2219] IGNORE Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:221:2058] recipient: [1:219:2219] Leader for TabletID 72057594037968897 is [1:225:2223] sender: [1:226:2058] recipient: [1:219:2219] 2025-09-25T16:19:40.256623Z node 1 :HIVE INFO: tablet_helpers.cpp:1126: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:128:2153] sender: [1:246:2058] recipient: [1:15:2062] 2025-09-25T16:19:40.275473Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-09-25T16:19:40.275530Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-09-25T16:19:40.275578Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:599: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-09-25T16:19:40.275586Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5528: CreateTx for txid 1:0 type: TxAlterSubDomain target path: [OwnerId: 72057594046678944, LocalPathId: 1] source path: 2025-09-25T16:19:40.275640Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-09-25T16:19:40.275656Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2025-09-25T16:19:40.276318Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-09-25T16:19:40.276370Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-09-25T16:19:40.276409Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-09-25T16:19:40.276418Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:315: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-09-25T16:19:40.276424Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:369: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-09-25T16:19:40.276429Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 1:0 2 -> 3 2025-09-25T16:19:40.276940Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-09-25T16:19:40.276955Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-09-25T16:19:40.276960Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 1:0 3 -> 128 2025-09-25T16:19:40.277381Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-09-25T16:19:40.277393Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-09-25T16:19:40.277399Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-09-25T16:19:40.277405Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1723: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-09-25T16:19:40.278082Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1792: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-09-25T16:19:40.278524Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:663: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-09-25T16:19:40.278577Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1824: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 Leader for TabletID 72057594046316545 is [1:139:2161] sender: [1:261:2058] recipient: [1:15:2062] FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-09-25T16:19:40.278785Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:685: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-09-25T16:19:40.278814Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:689: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 139 RawX2: 4294969457 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, ... xImplTableDescriptions { PartitionConfig { CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot: 300 InMemForceStepsToSnapshot: 500 InMemForceSizeToSnapshot: 16777216 InMemCompactionBrokerQueue: 0 ReadAheadHiThreshold: 67108864 ReadAheadLoThreshold: 16777216 MinDataPageSize: 7168 SnapBrokerQueue: 0 Generation { GenerationId: 0 SizeToCompact: 0 CountToCompact: 8 ForceCountToCompact: 8 ForceSizeToCompact: 134217728 CompactionBrokerQueue: 4294967295 KeepInCache: true BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen1" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 0 UpliftPartSize: 0 } Generation { GenerationId: 1 SizeToCompact: 41943040 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 536870912 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen2" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 8388608 UpliftPartSize: 8388608 } Generation { GenerationId: 2 SizeToCompact: 419430400 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 17179869184 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen3" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } PartitioningPolicy { MinPartitionsCount: 1 } } } } TableSchemaVersion: 1 IsBackup: false IsRestore: false } TablePartitions { EndOfRangeKeyPrefix: "" IsPoint: false IsInclusive: false DatashardId: 72075186233409550 } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 4 PathsLimit: 10000 ShardsInside: 3 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 3 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-09-25T16:20:55.458273Z node 54 :CHANGE_EXCHANGE DEBUG: change_sender_table_base.cpp:78: [TableChangeSenderShard][72075186233409550:2][72075186233409551][54:1113:2904] Handshake NKikimrChangeExchange.TEvStatus Status: STATUS_OK LastRecordOrder: 0 2025-09-25T16:20:55.458305Z node 54 :CHANGE_EXCHANGE DEBUG: change_sender_async_index.cpp:239: [AsyncIndexChangeSenderMain][72075186233409550:2][54:1078:2904] Handle NKikimr::NChangeExchange::TEvChangeExchangePrivate::TEvReady { PartitionId: 72075186233409551 } 2025-09-25T16:20:55.458347Z node 54 :CHANGE_EXCHANGE DEBUG: change_sender_table_base.cpp:123: [TableChangeSenderShard][72075186233409550:2][72075186233409551][54:1113:2904] Handle NKikimr::NChangeExchange::TEvChangeExchange::TEvRecords { Records [{ Order: 1 Group: 1758817255445628 Step: 5000003 TxId: 18446744073709551615 PathId: [OwnerId: 72057594046678944, LocalPathId: 4] Kind: AsyncIndex Source: Unspecified Body: 28b TableId: [OwnerId: 72057594046678944, LocalPathId: 3] SchemaVersion: 1 LockId: 0 LockOffset: 0 },{ Order: 2 Group: 1758817255445628 Step: 5000003 TxId: 18446744073709551615 PathId: [OwnerId: 72057594046678944, LocalPathId: 4] Kind: AsyncIndex Source: Unspecified Body: 28b TableId: [OwnerId: 72057594046678944, LocalPathId: 3] SchemaVersion: 1 LockId: 0 LockOffset: 0 },{ Order: 3 Group: 1758817255445628 Step: 5000003 TxId: 18446744073709551615 PathId: [OwnerId: 72057594046678944, LocalPathId: 4] Kind: AsyncIndex Source: Unspecified Body: 28b TableId: [OwnerId: 72057594046678944, LocalPathId: 3] SchemaVersion: 1 LockId: 0 LockOffset: 0 }] } 2025-09-25T16:20:55.459038Z node 54 :CHANGE_EXCHANGE DEBUG: change_sender_table_base.cpp:200: [TableChangeSenderShard][72075186233409550:2][72075186233409551][54:1113:2904] Handle NKikimrChangeExchange.TEvStatus Status: STATUS_OK RecordStatuses { Order: 1 Status: STATUS_OK Reason: REASON_NONE } RecordStatuses { Order: 2 Status: STATUS_OK Reason: REASON_NONE } RecordStatuses { Order: 3 Status: STATUS_OK Reason: REASON_NONE } LastRecordOrder: 3 2025-09-25T16:20:55.459057Z node 54 :CHANGE_EXCHANGE DEBUG: change_sender_async_index.cpp:239: [AsyncIndexChangeSenderMain][72075186233409550:2][54:1078:2904] Handle NKikimr::NChangeExchange::TEvChangeExchangePrivate::TEvReady { PartitionId: 72075186233409551 } 2025-09-25T16:20:55.602727Z node 54 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/Table/UserDefinedIndex/indexImplTable" Options { ReturnPartitioningInfo: true ReturnPartitionConfig: true BackupInfo: false ReturnBoundaries: false ShowPrivateTable: true }, at schemeshard: 72057594046678944 2025-09-25T16:20:55.602828Z node 54 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/Table/UserDefinedIndex/indexImplTable" took 119us result status StatusSuccess 2025-09-25T16:20:55.603080Z node 54 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/Table/UserDefinedIndex/indexImplTable" PathDescription { Self { Name: "indexImplTable" PathId: 5 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 1002 CreateStep: 5000003 ParentPathId: 4 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 4 PathSubType: EPathSubTypeAsyncIndexImplTable Version { GeneralVersion: 4 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 2 } ChildrenExist: false } Table { Name: "indexImplTable" Columns { Name: "indexed" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "key" Type: "Uint32" TypeId: 2 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "indexed" KeyColumnNames: "key" KeyColumnIds: 1 KeyColumnIds: 2 PartitionConfig { CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot: 300 InMemForceStepsToSnapshot: 500 InMemForceSizeToSnapshot: 16777216 InMemCompactionBrokerQueue: 0 ReadAheadHiThreshold: 67108864 ReadAheadLoThreshold: 16777216 MinDataPageSize: 7168 SnapBrokerQueue: 0 Generation { GenerationId: 0 SizeToCompact: 0 CountToCompact: 8 ForceCountToCompact: 8 ForceSizeToCompact: 134217728 CompactionBrokerQueue: 4294967295 KeepInCache: true BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen1" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 0 UpliftPartSize: 0 } Generation { GenerationId: 1 SizeToCompact: 41943040 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 536870912 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen2" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 8388608 UpliftPartSize: 8388608 } Generation { GenerationId: 2 SizeToCompact: 419430400 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 17179869184 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen3" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } PartitioningPolicy { MinPartitionsCount: 1 } } TableSchemaVersion: 1 IsBackup: false IsRestore: false } TablePartitions { EndOfRangeKeyPrefix: "" IsPoint: false IsInclusive: false DatashardId: 72075186233409551 } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 4 PathsLimit: 10000 ShardsInside: 3 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 5 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_index/unittest >> TAsyncIndexTests::CdcAndMergeWithReboots[PipeResets] [GOOD] Test command err: =========== RUN: Trace =========== Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2144] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2144] Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:120:2058] recipient: [1:114:2145] IGNORE Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:120:2058] recipient: [1:114:2145] Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:121:2058] recipient: [1:116:2146] IGNORE Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:121:2058] recipient: [1:116:2146] Leader for TabletID 72057594046678944 is [1:128:2153] sender: [1:131:2058] recipient: [1:113:2144] Leader for TabletID 72057594046447617 is [1:134:2158] sender: [1:136:2058] recipient: [1:114:2145] Leader for TabletID 72057594046316545 is [1:139:2161] sender: [1:141:2058] recipient: [1:116:2146] 2025-09-25T16:19:38.133534Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7911: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-09-25T16:19:38.133556Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7939: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-09-25T16:19:38.133560Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7825: StatsBatching config: StatsBatchTimeout# 0.000000s, StatsMaxBatchSize# 0, StatsMaxExecuteTime# 0.010000s 2025-09-25T16:19:38.133564Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7841: OperationsProcessing config: using default configuration 2025-09-25T16:19:38.133569Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7847: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-09-25T16:19:38.133572Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7847: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-09-25T16:19:38.133579Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7971: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-09-25T16:19:38.133591Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-09-25T16:19:38.133684Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8042: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-09-25T16:19:38.133735Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2025-09-25T16:19:38.150190Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:8074: Got new config: QueryServiceConfig { AllExternalDataSourcesAreAvailable: true } 2025-09-25T16:19:38.150225Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded 2025-09-25T16:19:38.150308Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8042: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# Leader for TabletID 72057594046447617 is [1:134:2158] sender: [1:179:2058] recipient: [1:15:2062] 2025-09-25T16:19:38.154209Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2025-09-25T16:19:38.154299Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2025-09-25T16:19:38.154335Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-09-25T16:19:38.155630Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2025-09-25T16:19:38.155689Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2025-09-25T16:19:38.155775Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1382: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-09-25T16:19:38.155981Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-09-25T16:19:38.157236Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:158: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-09-25T16:19:38.157300Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2025-09-25T16:19:38.157590Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-09-25T16:19:38.157602Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-09-25T16:19:38.157624Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-09-25T16:19:38.157633Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-09-25T16:19:38.157640Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:205: TTxServerlessStorageBilling.Complete 2025-09-25T16:19:38.157689Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7086: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:221:2058] recipient: [1:219:2219] IGNORE Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:221:2058] recipient: [1:219:2219] Leader for TabletID 72057594037968897 is [1:225:2223] sender: [1:226:2058] recipient: [1:219:2219] 2025-09-25T16:19:38.159624Z node 1 :HIVE INFO: tablet_helpers.cpp:1126: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:128:2153] sender: [1:246:2058] recipient: [1:15:2062] 2025-09-25T16:19:38.179917Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-09-25T16:19:38.179996Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-09-25T16:19:38.180056Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:599: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-09-25T16:19:38.180064Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5528: CreateTx for txid 1:0 type: TxAlterSubDomain target path: [OwnerId: 72057594046678944, LocalPathId: 1] source path: 2025-09-25T16:19:38.180120Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-09-25T16:19:38.180135Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2025-09-25T16:19:38.180934Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-09-25T16:19:38.180984Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-09-25T16:19:38.181030Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-09-25T16:19:38.181038Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:315: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-09-25T16:19:38.181042Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:369: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-09-25T16:19:38.181046Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 1:0 2 -> 3 2025-09-25T16:19:38.181487Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-09-25T16:19:38.181500Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-09-25T16:19:38.181506Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 1:0 3 -> 128 2025-09-25T16:19:38.181924Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-09-25T16:19:38.181934Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-09-25T16:19:38.181939Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-09-25T16:19:38.181945Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1723: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-09-25T16:19:38.182425Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1792: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-09-25T16:19:38.182815Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:663: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-09-25T16:19:38.182863Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1824: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 Leader for TabletID 72057594046316545 is [1:139:2161] sender: [1:261:2058] recipient: [1:15:2062] FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-09-25T16:19:38.183039Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:685: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-09-25T16:19:38.183059Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:689: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 139 RawX2: 4294969457 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, ... ion { GenerationId: 0 SizeToCompact: 0 CountToCompact: 8 ForceCountToCompact: 8 ForceSizeToCompact: 134217728 CompactionBrokerQueue: 4294967295 KeepInCache: true BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen1" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 0 UpliftPartSize: 0 } Generation { GenerationId: 1 SizeToCompact: 41943040 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 536870912 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen2" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 8388608 UpliftPartSize: 8388608 } Generation { GenerationId: 2 SizeToCompact: 419430400 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 17179869184 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen3" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } PartitioningPolicy { SizeToSplit: 2147483648 MinPartitionsCount: 1 } } } } TableSchemaVersion: 2 IsBackup: false CdcStreams { Name: "Stream" Mode: ECdcStreamModeKeysOnly PathId { OwnerId: 72057594046678944 LocalId: 6 } State: ECdcStreamStateReady SchemaVersion: 1 Format: ECdcStreamFormatProto VirtualTimestamps: false AwsRegion: "" ResolvedTimestampsIntervalMs: 0 SchemaChanges: false } IsRestore: false } TablePartitions { EndOfRangeKeyPrefix: "" IsPoint: false IsInclusive: false DatashardId: 72075186233409551 } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 6 PathsLimit: 10000 ShardsInside: 5 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 2 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 3 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-09-25T16:20:55.057926Z node 54 :CHANGE_EXCHANGE DEBUG: change_sender_table_base.cpp:78: [TableChangeSenderShard][72075186233409551:2][72075186233409546][54:1172:2951] Handshake NKikimrChangeExchange.TEvStatus Status: STATUS_OK LastRecordOrder: 0 2025-09-25T16:20:55.057972Z node 54 :CHANGE_EXCHANGE DEBUG: change_sender_async_index.cpp:239: [AsyncIndexChangeSenderMain][72075186233409551:2][54:1141:2951] Handle NKikimr::NChangeExchange::TEvChangeExchangePrivate::TEvReady { PartitionId: 72075186233409546 } 2025-09-25T16:20:55.058020Z node 54 :CHANGE_EXCHANGE DEBUG: change_sender_table_base.cpp:123: [TableChangeSenderShard][72075186233409551:2][72075186233409546][54:1172:2951] Handle NKikimr::NChangeExchange::TEvChangeExchange::TEvRecords { Records [{ Order: 1 Group: 1758817255040899 Step: 5000004 TxId: 18446744073709551615 PathId: [OwnerId: 72057594046678944, LocalPathId: 4] Kind: AsyncIndex Source: Unspecified Body: 28b TableId: [OwnerId: 72057594046678944, LocalPathId: 3] SchemaVersion: 2 LockId: 0 LockOffset: 0 },{ Order: 3 Group: 1758817255040899 Step: 5000004 TxId: 18446744073709551615 PathId: [OwnerId: 72057594046678944, LocalPathId: 4] Kind: AsyncIndex Source: Unspecified Body: 28b TableId: [OwnerId: 72057594046678944, LocalPathId: 3] SchemaVersion: 2 LockId: 0 LockOffset: 0 },{ Order: 5 Group: 1758817255040899 Step: 5000004 TxId: 18446744073709551615 PathId: [OwnerId: 72057594046678944, LocalPathId: 4] Kind: AsyncIndex Source: Unspecified Body: 28b TableId: [OwnerId: 72057594046678944, LocalPathId: 3] SchemaVersion: 2 LockId: 0 LockOffset: 0 }] } 2025-09-25T16:20:55.059025Z node 54 :CHANGE_EXCHANGE DEBUG: change_sender_table_base.cpp:200: [TableChangeSenderShard][72075186233409551:2][72075186233409546][54:1172:2951] Handle NKikimrChangeExchange.TEvStatus Status: STATUS_OK RecordStatuses { Order: 1 Status: STATUS_OK Reason: REASON_NONE } RecordStatuses { Order: 3 Status: STATUS_OK Reason: REASON_NONE } RecordStatuses { Order: 5 Status: STATUS_OK Reason: REASON_NONE } LastRecordOrder: 5 2025-09-25T16:20:55.059052Z node 54 :CHANGE_EXCHANGE DEBUG: change_sender_async_index.cpp:239: [AsyncIndexChangeSenderMain][72075186233409551:2][54:1141:2951] Handle NKikimr::NChangeExchange::TEvChangeExchangePrivate::TEvReady { PartitionId: 72075186233409546 } 2025-09-25T16:20:55.294796Z node 54 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/Table/UserDefinedIndex/indexImplTable" Options { ReturnPartitioningInfo: true ReturnPartitionConfig: true BackupInfo: false ReturnBoundaries: false ShowPrivateTable: true }, at schemeshard: 72057594046678944 2025-09-25T16:20:55.294911Z node 54 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/Table/UserDefinedIndex/indexImplTable" took 139us result status StatusSuccess 2025-09-25T16:20:55.295146Z node 54 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/Table/UserDefinedIndex/indexImplTable" PathDescription { Self { Name: "indexImplTable" PathId: 5 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 1002 CreateStep: 5000003 ParentPathId: 4 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeAsyncIndexImplTable Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "indexImplTable" Columns { Name: "indexed" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "key" Type: "Uint32" TypeId: 2 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "indexed" KeyColumnNames: "key" KeyColumnIds: 1 KeyColumnIds: 2 PartitionConfig { CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot: 300 InMemForceStepsToSnapshot: 500 InMemForceSizeToSnapshot: 16777216 InMemCompactionBrokerQueue: 0 ReadAheadHiThreshold: 67108864 ReadAheadLoThreshold: 16777216 MinDataPageSize: 7168 SnapBrokerQueue: 0 Generation { GenerationId: 0 SizeToCompact: 0 CountToCompact: 8 ForceCountToCompact: 8 ForceSizeToCompact: 134217728 CompactionBrokerQueue: 4294967295 KeepInCache: true BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen1" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 0 UpliftPartSize: 0 } Generation { GenerationId: 1 SizeToCompact: 41943040 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 536870912 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen2" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 8388608 UpliftPartSize: 8388608 } Generation { GenerationId: 2 SizeToCompact: 419430400 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 17179869184 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen3" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } PartitioningPolicy { SizeToSplit: 2147483648 MinPartitionsCount: 1 } } TableSchemaVersion: 1 IsBackup: false IsRestore: false } TablePartitions { EndOfRangeKeyPrefix: "" IsPoint: false IsInclusive: false DatashardId: 72075186233409546 } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 6 PathsLimit: 10000 ShardsInside: 5 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 2 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 5 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 >> TColumnShardTestReadWrite::Write >> TColumnShardTestReadWrite::WriteOverload-InStore [GOOD] >> Normalizers::PortionsNormalizer [GOOD] >> Normalizers::EmptyTablesNormalizer >> TColumnShardTestReadWrite::ReadAggregate+SimpleReader >> Normalizers::CleanEmptyPortionsNormalizer ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/columnshard/ut_rw/unittest >> TColumnShardTestReadWrite::WriteOverload-InStore [GOOD] Test command err: 2025-09-25T16:20:55.731362Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:130:2160];fline=columnshard.cpp:105;event=initialize_shard;step=OnActivateExecutor; 2025-09-25T16:20:55.734891Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:130:2160];fline=columnshard.cpp:124;event=initialize_shard;step=initialize_tiring_finished; 2025-09-25T16:20:55.734933Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxInitSchema.Execute at tablet 9437184 2025-09-25T16:20:55.735507Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:130:2160];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-09-25T16:20:55.735553Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:130:2160];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-09-25T16:20:55.735577Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:130:2160];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-09-25T16:20:55.735592Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:130:2160];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-09-25T16:20:55.735604Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:130:2160];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-09-25T16:20:55.735619Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:130:2160];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-09-25T16:20:55.735633Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:130:2160];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-09-25T16:20:55.735646Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:130:2160];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-09-25T16:20:55.735658Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:130:2160];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-09-25T16:20:55.735671Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:130:2160];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanDeprecatedSnapshot; 2025-09-25T16:20:55.735685Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:130:2160];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV0ChunksMeta; 2025-09-25T16:20:55.735708Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:130:2160];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CopyBlobIdsToV2; 2025-09-25T16:20:55.735738Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:130:2160];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreAppearanceSnapshot; 2025-09-25T16:20:55.740565Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxInitSchema.Complete at tablet 9437184 2025-09-25T16:20:55.740606Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:131;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=12;current_normalizer=CLASS_NAME=Granules; 2025-09-25T16:20:55.740615Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=1;type=Granules; 2025-09-25T16:20:55.740651Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-09-25T16:20:55.740680Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-09-25T16:20:55.740691Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-09-25T16:20:55.740695Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2025-09-25T16:20:55.740702Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:138;normalizer=TChunksNormalizer;message=0 chunks found; 2025-09-25T16:20:55.740709Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-09-25T16:20:55.740715Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-09-25T16:20:55.740718Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2025-09-25T16:20:55.740731Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-09-25T16:20:55.740737Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-09-25T16:20:55.740743Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-09-25T16:20:55.740746Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2025-09-25T16:20:55.740754Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2025-09-25T16:20:55.740772Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-09-25T16:20:55.740778Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-09-25T16:20:55.740781Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2025-09-25T16:20:55.740788Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-09-25T16:20:55.740793Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-09-25T16:20:55.740797Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2025-09-25T16:20:55.740804Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-09-25T16:20:55.740810Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-09-25T16:20:55.740813Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=15;type=RestoreV1Chunks_V2; 2025-09-25T16:20:55.740855Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-09-25T16:20:55.740878Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-09-25T16:20:55.740884Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=16;type=RestoreV2Chunks; 2025-09-25T16:20:55.740894Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2025-09-25T16:20:55.740901Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanDeprecatedSnapshot;id=CleanDeprecatedSnapshot; 2025-09-25T16:20:55.740904Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=17;type=CleanDeprecatedSnapshot; 2025-09-25T16:20:55.740909Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanDeprecatedSnapshot;id=17; 2025-09-25T16:20:55.740915Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV0ChunksMeta;id=RestoreV0ChunksMeta; 2025-09-25T16:20:55.740918Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=18;type=RestoreV0ChunksMeta; 2025-09-25T16:20:55.740924Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV0ChunksMeta;id=18; 2025-09-25T16:20:55.740929Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CopyBlobIdsToV2;id=CopyBlobIdsToV2; 2025-09-25T16:20:55.740933Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=19;type=CopyBlobIdsToV2; 2025-09-25T16:20:55.740942Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CopyBlobIdsToV2;id=19; 2025-09-25T16:20:55.740957Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLAS ... 37184;self_id=[1:130:2160];ev=NKikimr::NColumnShard::NPrivateEvents::NWrite::TEvWritePortionResult;tablet_id=9437184;event=TEvWritePortionResult;tablet_id=9437184;local_tx_no=4;method=execute;tx_info=TTxBlobsWritingFinished;tablet_id=9437184;tx_state=execute;fline=constructor_meta.cpp:65;memory_size=190;data_size=180;sum=190;count=2;size_of_meta=112; 2025-09-25T16:20:57.166547Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:130:2160];ev=NKikimr::NColumnShard::NPrivateEvents::NWrite::TEvWritePortionResult;tablet_id=9437184;event=TEvWritePortionResult;tablet_id=9437184;local_tx_no=4;method=execute;tx_info=TTxBlobsWritingFinished;tablet_id=9437184;tx_state=execute;fline=constructor_portion.cpp:40;memory_size=262;data_size=252;sum=262;count=1;size_of_portion=184; 2025-09-25T16:20:57.166716Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: BlobManager on execute at tablet 9437184 Save Batch GenStep: 2:1 Blob count: 1 2025-09-25T16:20:57.166764Z node 1 :TX_COLUMNSHARD_WRITE DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:130:2160];ev=NKikimr::NColumnShard::NPrivateEvents::NWrite::TEvWritePortionResult;tablet_id=9437184;event=TEvWritePortionResult;tablet_id=9437184;local_tx_no=4;method=execute;tx_info=TTxBlobsWritingFinished;tablet_id=9437184;tx_state=execute;fline=manager.h:175;event=add_by_insert_id;id=2;operation_id=1; 2025-09-25T16:20:57.177851Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: BlobManager at tablet 9437184 Save Batch GenStep: 2:1 Blob count: 1 2025-09-25T16:20:57.178076Z node 1 :TX_COLUMNSHARD_WRITE DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:130:2160];ev=NKikimr::NColumnShard::NPrivateEvents::NWrite::TEvWritePortionResult;tablet_id=9437184;event=TEvWritePortionResult;fline=columnshard__write.cpp:85;writing_size=6330728;event=data_write_finished;writing_id=9d95e71e-9a2b11f0-af1b3723-433ce622; 2025-09-25T16:20:57.178145Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:130:2160];ev=NKikimr::NColumnShard::NPrivateEvents::NWrite::TEvWritePortionResult;tablet_id=9437184;event=TEvWritePortionResult;tablet_id=9437184;local_tx_no=5;method=execute;tx_info=TTxBlobsWritingFinished;tablet_id=9437184;tx_state=execute;fline=constructor_meta.cpp:48;memory_size=94;data_size=68;sum=188;count=3; 2025-09-25T16:20:57.178165Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:130:2160];ev=NKikimr::NColumnShard::NPrivateEvents::NWrite::TEvWritePortionResult;tablet_id=9437184;event=TEvWritePortionResult;tablet_id=9437184;local_tx_no=5;method=execute;tx_info=TTxBlobsWritingFinished;tablet_id=9437184;tx_state=execute;fline=constructor_meta.cpp:65;memory_size=190;data_size=180;sum=380;count=4;size_of_meta=112; 2025-09-25T16:20:57.178177Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:130:2160];ev=NKikimr::NColumnShard::NPrivateEvents::NWrite::TEvWritePortionResult;tablet_id=9437184;event=TEvWritePortionResult;tablet_id=9437184;local_tx_no=5;method=execute;tx_info=TTxBlobsWritingFinished;tablet_id=9437184;tx_state=execute;fline=constructor_portion.cpp:40;memory_size=262;data_size=252;sum=524;count=2;size_of_portion=184; 2025-09-25T16:20:57.178328Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: BlobManager on execute at tablet 9437184 Save Batch GenStep: 2:2 Blob count: 1 2025-09-25T16:20:57.178364Z node 1 :TX_COLUMNSHARD_WRITE DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:130:2160];ev=NKikimr::NColumnShard::NPrivateEvents::NWrite::TEvWritePortionResult;tablet_id=9437184;event=TEvWritePortionResult;tablet_id=9437184;local_tx_no=5;method=execute;tx_info=TTxBlobsWritingFinished;tablet_id=9437184;tx_state=execute;fline=manager.h:175;event=add_by_insert_id;id=3;operation_id=2; 2025-09-25T16:20:57.189248Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: BlobManager at tablet 9437184 Save Batch GenStep: 2:2 Blob count: 1 2025-09-25T16:20:57.189470Z node 1 :TX_COLUMNSHARD_WRITE DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:130:2160];ev=NKikimr::NColumnShard::NPrivateEvents::NWrite::TEvWritePortionResult;tablet_id=9437184;event=TEvWritePortionResult;fline=columnshard__write.cpp:85;writing_size=6330728;event=data_write_finished;writing_id=9daab946-9a2b11f0-9540e0a3-cbd6b33c; 2025-09-25T16:20:57.189533Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:130:2160];ev=NKikimr::NColumnShard::NPrivateEvents::NWrite::TEvWritePortionResult;tablet_id=9437184;event=TEvWritePortionResult;tablet_id=9437184;local_tx_no=6;method=execute;tx_info=TTxBlobsWritingFinished;tablet_id=9437184;tx_state=execute;fline=constructor_meta.cpp:48;memory_size=94;data_size=68;sum=282;count=5; 2025-09-25T16:20:57.189547Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:130:2160];ev=NKikimr::NColumnShard::NPrivateEvents::NWrite::TEvWritePortionResult;tablet_id=9437184;event=TEvWritePortionResult;tablet_id=9437184;local_tx_no=6;method=execute;tx_info=TTxBlobsWritingFinished;tablet_id=9437184;tx_state=execute;fline=constructor_meta.cpp:65;memory_size=190;data_size=180;sum=570;count=6;size_of_meta=112; 2025-09-25T16:20:57.189558Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:130:2160];ev=NKikimr::NColumnShard::NPrivateEvents::NWrite::TEvWritePortionResult;tablet_id=9437184;event=TEvWritePortionResult;tablet_id=9437184;local_tx_no=6;method=execute;tx_info=TTxBlobsWritingFinished;tablet_id=9437184;tx_state=execute;fline=constructor_portion.cpp:40;memory_size=262;data_size=252;sum=786;count=3;size_of_portion=184; 2025-09-25T16:20:57.189690Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: BlobManager on execute at tablet 9437184 Save Batch GenStep: 2:3 Blob count: 1 2025-09-25T16:20:57.189721Z node 1 :TX_COLUMNSHARD_WRITE DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:130:2160];ev=NKikimr::NColumnShard::NPrivateEvents::NWrite::TEvWritePortionResult;tablet_id=9437184;event=TEvWritePortionResult;tablet_id=9437184;local_tx_no=6;method=execute;tx_info=TTxBlobsWritingFinished;tablet_id=9437184;tx_state=execute;fline=manager.h:175;event=add_by_insert_id;id=4;operation_id=3; 2025-09-25T16:20:57.200591Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: BlobManager at tablet 9437184 Save Batch GenStep: 2:3 Blob count: 1 2025-09-25T16:20:57.200852Z node 1 :TX_COLUMNSHARD_WRITE DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:130:2160];ev=NKikimr::NColumnShard::NPrivateEvents::NWrite::TEvWritePortionResult;tablet_id=9437184;event=TEvWritePortionResult;fline=columnshard__write.cpp:85;writing_size=6330728;event=data_write_finished;writing_id=9dc024c0-9a2b11f0-b19304c7-8f0d9766; 2025-09-25T16:20:57.200918Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:130:2160];ev=NKikimr::NColumnShard::NPrivateEvents::NWrite::TEvWritePortionResult;tablet_id=9437184;event=TEvWritePortionResult;tablet_id=9437184;local_tx_no=7;method=execute;tx_info=TTxBlobsWritingFinished;tablet_id=9437184;tx_state=execute;fline=constructor_meta.cpp:48;memory_size=94;data_size=68;sum=376;count=7; 2025-09-25T16:20:57.200933Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:130:2160];ev=NKikimr::NColumnShard::NPrivateEvents::NWrite::TEvWritePortionResult;tablet_id=9437184;event=TEvWritePortionResult;tablet_id=9437184;local_tx_no=7;method=execute;tx_info=TTxBlobsWritingFinished;tablet_id=9437184;tx_state=execute;fline=constructor_meta.cpp:65;memory_size=190;data_size=180;sum=760;count=8;size_of_meta=112; 2025-09-25T16:20:57.200947Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:130:2160];ev=NKikimr::NColumnShard::NPrivateEvents::NWrite::TEvWritePortionResult;tablet_id=9437184;event=TEvWritePortionResult;tablet_id=9437184;local_tx_no=7;method=execute;tx_info=TTxBlobsWritingFinished;tablet_id=9437184;tx_state=execute;fline=constructor_portion.cpp:40;memory_size=262;data_size=252;sum=1048;count=4;size_of_portion=184; 2025-09-25T16:20:57.201094Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: BlobManager on execute at tablet 9437184 Save Batch GenStep: 2:4 Blob count: 1 2025-09-25T16:20:57.201125Z node 1 :TX_COLUMNSHARD_WRITE DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:130:2160];ev=NKikimr::NColumnShard::NPrivateEvents::NWrite::TEvWritePortionResult;tablet_id=9437184;event=TEvWritePortionResult;tablet_id=9437184;local_tx_no=7;method=execute;tx_info=TTxBlobsWritingFinished;tablet_id=9437184;tx_state=execute;fline=manager.h:175;event=add_by_insert_id;id=5;operation_id=4; 2025-09-25T16:20:57.211848Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: BlobManager at tablet 9437184 Save Batch GenStep: 2:4 Blob count: 1 2025-09-25T16:20:57.212707Z node 1 :TX_COLUMNSHARD_WRITE DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:130:2160];ev=NKikimr::NEvents::TDataEvents::TEvWrite;tablet_id=9437184;event=TEvWrite;fline=manager.cpp:210;event=register_operation;operation_id=5;last=5; 2025-09-25T16:20:57.212727Z node 1 :TX_COLUMNSHARD_WRITE DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:130:2160];ev=NKikimr::NEvents::TDataEvents::TEvWrite;tablet_id=9437184;event=TEvWrite;fline=write_queue.cpp:18;writing_size=6330728;operation_id=9dea2edc-9a2b11f0-849b2d09-398fe7a7;in_flight=1;size_in_flight=6330728; 2025-09-25T16:20:57.315873Z node 1 :TX_COLUMNSHARD_WRITE DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:130:2160];ev=NKikimr::NEvents::TDataEvents::TEvWrite;tablet_id=9437184;event=TEvWrite;scope=TBuildBatchesTask::DoExecute;tablet_id=9437184;parent_id=[1:130:2160];write_id=5;path_id={internal: 1000000185, ss: 1};fline=write_actor.cpp:24;event=actor_created;tablet_id=9437184;debug=size=8246112;count=1;actions=__DEFAULT,;waiting=1;; 2025-09-25T16:20:57.338491Z node 1 :TX_COLUMNSHARD_WRITE DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:130:2160];ev=NKikimr::NColumnShard::NPrivateEvents::NWrite::TEvWritePortionResult;tablet_id=9437184;event=TEvWritePortionResult;fline=columnshard__write.cpp:85;writing_size=6330728;event=data_write_finished;writing_id=9dea2edc-9a2b11f0-849b2d09-398fe7a7; 2025-09-25T16:20:57.338591Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:130:2160];ev=NKikimr::NColumnShard::NPrivateEvents::NWrite::TEvWritePortionResult;tablet_id=9437184;event=TEvWritePortionResult;tablet_id=9437184;local_tx_no=8;method=execute;tx_info=TTxBlobsWritingFinished;tablet_id=9437184;tx_state=execute;fline=constructor_meta.cpp:48;memory_size=94;data_size=68;sum=470;count=9; 2025-09-25T16:20:57.338610Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:130:2160];ev=NKikimr::NColumnShard::NPrivateEvents::NWrite::TEvWritePortionResult;tablet_id=9437184;event=TEvWritePortionResult;tablet_id=9437184;local_tx_no=8;method=execute;tx_info=TTxBlobsWritingFinished;tablet_id=9437184;tx_state=execute;fline=constructor_meta.cpp:65;memory_size=190;data_size=180;sum=950;count=10;size_of_meta=112; 2025-09-25T16:20:57.338621Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:130:2160];ev=NKikimr::NColumnShard::NPrivateEvents::NWrite::TEvWritePortionResult;tablet_id=9437184;event=TEvWritePortionResult;tablet_id=9437184;local_tx_no=8;method=execute;tx_info=TTxBlobsWritingFinished;tablet_id=9437184;tx_state=execute;fline=constructor_portion.cpp:40;memory_size=262;data_size=252;sum=1310;count=5;size_of_portion=184; 2025-09-25T16:20:57.338773Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: BlobManager on execute at tablet 9437184 Save Batch GenStep: 2:5 Blob count: 1 2025-09-25T16:20:57.338801Z node 1 :TX_COLUMNSHARD_WRITE DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:130:2160];ev=NKikimr::NColumnShard::NPrivateEvents::NWrite::TEvWritePortionResult;tablet_id=9437184;event=TEvWritePortionResult;tablet_id=9437184;local_tx_no=8;method=execute;tx_info=TTxBlobsWritingFinished;tablet_id=9437184;tx_state=execute;fline=manager.h:175;event=add_by_insert_id;id=6;operation_id=5; 2025-09-25T16:20:57.349684Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: BlobManager at tablet 9437184 Save Batch GenStep: 2:5 Blob count: 1 >> TColumnShardTestReadWrite::ReadAggregate-SimpleReader [GOOD] >> TColumnShardTestReadWrite::CompactionInGranule_PKInt32 [GOOD] >> TColumnShardTestReadWrite::Write [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/columnshard/ut_rw/unittest >> TColumnShardTestReadWrite::ReadAggregate-SimpleReader [GOOD] Test command err: FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=8328;columns=19; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=8328;columns=19; 2025-09-25T16:20:56.308527Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:129:2160];fline=columnshard.cpp:105;event=initialize_shard;step=OnActivateExecutor; 2025-09-25T16:20:56.314388Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:129:2160];fline=columnshard.cpp:124;event=initialize_shard;step=initialize_tiring_finished; 2025-09-25T16:20:56.314442Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxInitSchema.Execute at tablet 9437184 2025-09-25T16:20:56.315265Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:129:2160];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-09-25T16:20:56.315318Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:129:2160];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-09-25T16:20:56.315356Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:129:2160];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-09-25T16:20:56.315382Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:129:2160];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-09-25T16:20:56.315404Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:129:2160];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-09-25T16:20:56.315427Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:129:2160];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-09-25T16:20:56.315449Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:129:2160];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-09-25T16:20:56.315471Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:129:2160];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-09-25T16:20:56.315492Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:129:2160];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-09-25T16:20:56.315514Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:129:2160];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanDeprecatedSnapshot; 2025-09-25T16:20:56.315536Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:129:2160];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV0ChunksMeta; 2025-09-25T16:20:56.315588Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:129:2160];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CopyBlobIdsToV2; 2025-09-25T16:20:56.315609Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:129:2160];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreAppearanceSnapshot; 2025-09-25T16:20:56.322830Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxInitSchema.Complete at tablet 9437184 2025-09-25T16:20:56.322898Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:131;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=12;current_normalizer=CLASS_NAME=Granules; 2025-09-25T16:20:56.322908Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=1;type=Granules; 2025-09-25T16:20:56.322952Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-09-25T16:20:56.322990Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-09-25T16:20:56.323004Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-09-25T16:20:56.323011Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2025-09-25T16:20:56.323023Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:138;normalizer=TChunksNormalizer;message=0 chunks found; 2025-09-25T16:20:56.323033Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-09-25T16:20:56.323042Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-09-25T16:20:56.323047Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2025-09-25T16:20:56.323069Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-09-25T16:20:56.323079Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-09-25T16:20:56.323087Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-09-25T16:20:56.323092Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2025-09-25T16:20:56.323105Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2025-09-25T16:20:56.323113Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-09-25T16:20:56.323122Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-09-25T16:20:56.323127Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2025-09-25T16:20:56.323137Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-09-25T16:20:56.323146Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-09-25T16:20:56.323151Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2025-09-25T16:20:56.323162Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-09-25T16:20:56.323170Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-09-25T16:20:56.323175Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=15;type=RestoreV1Chunks_V2; 2025-09-25T16:20:56.323205Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-09-25T16:20:56.323214Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-09-25T16:20:56.323219Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=16;type=RestoreV2Chunks; 2025-09-25T16:20:56.323235Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2025-09-25T16:20:56.323244Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanDeprecatedSnapshot;id=CleanDeprecatedSnapshot; 2025-09-25T16:20:56.323249Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=17;type=CleanDeprecatedSnapshot; 2025-09-25T16:20:56.323258Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanDeprecatedSnapshot;id=17; 2025-09-25T16:20:56.323266Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV0ChunksMeta;id=RestoreV0ChunksMeta; 2025-09-25T16:20:56.323272Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=18;type=RestoreV0ChunksMeta; 2025-09-25T16:20:56.323281Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV0ChunksMeta;id=18; 2025-09-25T16:20:56.323290Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CopyBlobIdsToV2;id=CopyBlobIdsToV2; 2025-09-25T16:20:56.323296Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=19;type=CopyBlobIdsToV2; 2025-09-25T16:20:56.323312Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;descriptio ... mn_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=i16,i32,i8,ts;);;ff=(column_ids=4,19;column_names=i32,jsondoc;);;program_input=(column_ids=4,19;column_names=i32,jsondoc;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:1;); 2025-09-25T16:20:57.915861Z node 2 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[2:464:2476];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;fline=plain_read_data.cpp:31;event=DoExtractReadyResults;result=1;count=1;finished=1; 2025-09-25T16:20:57.915869Z node 2 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[2:464:2476];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;fline=actor.cpp:222;stage=limit exhausted;limit=limits:(bytes=0;chunks=0);; 2025-09-25T16:20:57.915875Z node 2 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[2:464:2476];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;tablet_id=9437184;fline=scanner.cpp:52;event=build_next_interval; 2025-09-25T16:20:57.915962Z node 2 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[2:464:2476];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;fline=actor.cpp:120;event=TEvScanDataAck;info=limits:(bytes=8388608;chunks=1);; 2025-09-25T16:20:57.915983Z node 2 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[2:464:2476];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;fline=actor.cpp:211;stage=start;iterator=ready_results:(count:1;records_count:1;schema=100: binary 101: binary 102: binary 103: uint64;);indexed_data:(CTX:{ef=(column_ids=4;column_names=i32;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=i16,i32,i8,ts;);;ff=(column_ids=4,19;column_names=i32,jsondoc;);;program_input=(column_ids=4,19;column_names=i32,jsondoc;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2025-09-25T16:20:57.915989Z node 2 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[2:464:2476];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;fline=plain_read_data.cpp:31;event=DoExtractReadyResults;result=0;count=0;finished=1; 2025-09-25T16:20:57.916003Z node 2 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[2:464:2476];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;fline=actor.cpp:253;stage=ready result;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=4;column_names=i32;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=i16,i32,i8,ts;);;ff=(column_ids=4,19;column_names=i32,jsondoc;);;program_input=(column_ids=4,19;column_names=i32,jsondoc;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;);columns=4;rows=1; 2025-09-25T16:20:57.916012Z node 2 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[2:464:2476];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;fline=actor.cpp:274;stage=data_format;batch_size=0;num_rows=1;batch_columns=100,101,102,103; 2025-09-25T16:20:57.916051Z node 2 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[2:464:2476];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;fline=actor.cpp:392;event=send_data;compute_actor_id=[2:463:2475];bytes=26;rows=1;faults=0;finished=0;fault=0;schema=100: binary 101: binary 102: binary 103: uint64; 2025-09-25T16:20:57.916065Z node 2 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[2:464:2476];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;fline=actor.cpp:296;stage=finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=4;column_names=i32;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=i16,i32,i8,ts;);;ff=(column_ids=4,19;column_names=i32,jsondoc;);;program_input=(column_ids=4,19;column_names=i32,jsondoc;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2025-09-25T16:20:57.916081Z node 2 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[2:464:2476];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;fline=actor.cpp:211;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=4;column_names=i32;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=i16,i32,i8,ts;);;ff=(column_ids=4,19;column_names=i32,jsondoc;);;program_input=(column_ids=4,19;column_names=i32,jsondoc;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2025-09-25T16:20:57.916096Z node 2 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[2:464:2476];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;fline=actor.cpp:216;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=4;column_names=i32;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=i16,i32,i8,ts;);;ff=(column_ids=4,19;column_names=i32,jsondoc;);;program_input=(column_ids=4,19;column_names=i32,jsondoc;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2025-09-25T16:20:57.916127Z node 2 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[2:464:2476];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;fline=actor.cpp:120;event=TEvScanDataAck;info=limits:(bytes=8388608;chunks=1);; 2025-09-25T16:20:57.916140Z node 2 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[2:464:2476];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;fline=actor.cpp:211;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=4;column_names=i32;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=i16,i32,i8,ts;);;ff=(column_ids=4,19;column_names=i32,jsondoc;);;program_input=(column_ids=4,19;column_names=i32,jsondoc;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2025-09-25T16:20:57.916153Z node 2 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[2:464:2476];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;fline=actor.cpp:216;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=4;column_names=i32;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=i16,i32,i8,ts;);;ff=(column_ids=4,19;column_names=i32,jsondoc;);;program_input=(column_ids=4,19;column_names=i32,jsondoc;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2025-09-25T16:20:57.916158Z node 2 :TX_COLUMNSHARD_SCAN DEBUG: actor.cpp:442: Scan [2:464:2476] finished for tablet 9437184 2025-09-25T16:20:57.916217Z node 2 :TX_COLUMNSHARD_SCAN INFO: log.cpp:841: SelfId=[2:464:2476];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;fline=actor.cpp:448;event=scan_finish;compute_actor_id=[2:463:2475];stats={"p":[{"events":["f_bootstrap","l_bootstrap","f_processing","f_ProduceResults","f_task_result"],"t":0},{"events":["f_ack","l_ack","l_processing","l_ProduceResults","f_Finish","l_Finish","l_task_result"],"t":0.001}],"full":{"a":1714596,"name":"_full_task","f":1714596,"d_finished":0,"c":0,"l":1716040,"d":1444},"events":[{"name":"bootstrap","f":1714645,"d_finished":172,"c":1,"l":1714817,"d":172},{"a":1715999,"name":"ack","f":1715833,"d_finished":141,"c":1,"l":1715974,"d":182},{"a":1715998,"name":"processing","f":1714844,"d_finished":361,"c":3,"l":1715975,"d":403},{"name":"ProduceResults","f":1714754,"d_finished":252,"c":6,"l":1716030,"d":252},{"a":1716031,"name":"Finish","f":1716031,"d_finished":0,"c":0,"l":1716040,"d":9},{"name":"task_result","f":1714847,"d_finished":211,"c":2,"l":1715753,"d":211}],"id":"9437184::76"};iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=4;column_names=i32;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=i16,i32,i8,ts;);;ff=(column_ids=4,19;column_names=i32,jsondoc;);;program_input=(column_ids=4,19;column_names=i32,jsondoc;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2025-09-25T16:20:57.916228Z node 2 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[2:464:2476];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;fline=actor.cpp:392;event=send_data;compute_actor_id=[2:463:2475];bytes=0;rows=0;faults=0;finished=1;fault=0;schema=; 2025-09-25T16:20:57.916272Z node 2 :TX_COLUMNSHARD_SCAN INFO: log.cpp:841: SelfId=[2:464:2476];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;fline=actor.cpp:397;event=scan_finished;compute_actor_id=[2:463:2475];packs_sum=0;bytes=0;bytes_sum=0;rows=0;rows_sum=0;faults=0;finished=1;fault=0;stats={"p":[{"events":["f_bootstrap","l_bootstrap","f_processing","f_ProduceResults","f_task_result"],"t":0},{"events":["f_ack","l_ack","l_processing","l_ProduceResults","f_Finish","l_Finish","l_task_result"],"t":0.001}],"full":{"a":1714596,"name":"_full_task","f":1714596,"d_finished":0,"c":0,"l":1716109,"d":1513},"events":[{"name":"bootstrap","f":1714645,"d_finished":172,"c":1,"l":1714817,"d":172},{"a":1715999,"name":"ack","f":1715833,"d_finished":141,"c":1,"l":1715974,"d":251},{"a":1715998,"name":"processing","f":1714844,"d_finished":361,"c":3,"l":1715975,"d":472},{"name":"ProduceResults","f":1714754,"d_finished":252,"c":6,"l":1716030,"d":252},{"a":1716031,"name":"Finish","f":1716031,"d_finished":0,"c":0,"l":1716109,"d":78},{"name":"task_result","f":1714847,"d_finished":211,"c":2,"l":1715753,"d":211}],"id":"9437184::76"};iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=4;column_names=i32;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=i16,i32,i8,ts;);;ff=(column_ids=4,19;column_names=i32,jsondoc;);;program_input=(column_ids=4,19;column_names=i32,jsondoc;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2025-09-25T16:20:57.916285Z node 2 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[2:464:2476];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;fline=stats.cpp:8;event=statistic;begin=2025-09-25T16:20:57.914590Z;index_granules=0;index_portions=1;index_batches=0;schema_columns=2;filter_columns=0;additional_columns=0;compacted_portions_bytes=0;inserted_portions_bytes=14056;committed_portions_bytes=0;data_filter_bytes=0;data_additional_bytes=0;delta_bytes=14056;selected_rows=0; 2025-09-25T16:20:57.916291Z node 2 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[2:464:2476];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;fline=read_context.h:196;event=scan_aborted;reason=unexpected on destructor; 2025-09-25T16:20:57.916311Z node 2 :TX_COLUMNSHARD_SCAN INFO: log.cpp:841: SelfId=[2:464:2476];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;fline=context.h:82;fetching=ef=(column_ids=4;column_names=i32;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=i16,i32,i8,ts;);;ff=(column_ids=4,19;column_names=i32,jsondoc;);;program_input=(column_ids=4,19;column_names=i32,jsondoc;);;; ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/columnshard/ut_rw/unittest >> TColumnShardTestReadWrite::Write [GOOD] Test command err: 2025-09-25T16:20:56.753326Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:129:2160];fline=columnshard.cpp:105;event=initialize_shard;step=OnActivateExecutor; 2025-09-25T16:20:56.758333Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:129:2160];fline=columnshard.cpp:124;event=initialize_shard;step=initialize_tiring_finished; 2025-09-25T16:20:56.758383Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxInitSchema.Execute at tablet 9437184 2025-09-25T16:20:56.759193Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:129:2160];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-09-25T16:20:56.759249Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:129:2160];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-09-25T16:20:56.759289Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:129:2160];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-09-25T16:20:56.759310Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:129:2160];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-09-25T16:20:56.759328Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:129:2160];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-09-25T16:20:56.759351Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:129:2160];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-09-25T16:20:56.759371Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:129:2160];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-09-25T16:20:56.759389Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:129:2160];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-09-25T16:20:56.759409Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:129:2160];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-09-25T16:20:56.759427Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:129:2160];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanDeprecatedSnapshot; 2025-09-25T16:20:56.759446Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:129:2160];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV0ChunksMeta; 2025-09-25T16:20:56.759465Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:129:2160];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CopyBlobIdsToV2; 2025-09-25T16:20:56.759507Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:129:2160];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreAppearanceSnapshot; 2025-09-25T16:20:56.766151Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxInitSchema.Complete at tablet 9437184 2025-09-25T16:20:56.766221Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:131;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=12;current_normalizer=CLASS_NAME=Granules; 2025-09-25T16:20:56.766231Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=1;type=Granules; 2025-09-25T16:20:56.766278Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-09-25T16:20:56.766315Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-09-25T16:20:56.766328Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-09-25T16:20:56.766335Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2025-09-25T16:20:56.766347Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:138;normalizer=TChunksNormalizer;message=0 chunks found; 2025-09-25T16:20:56.766357Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-09-25T16:20:56.766366Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-09-25T16:20:56.766371Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2025-09-25T16:20:56.766392Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-09-25T16:20:56.766402Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-09-25T16:20:56.766410Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-09-25T16:20:56.766415Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2025-09-25T16:20:56.766428Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2025-09-25T16:20:56.766436Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-09-25T16:20:56.766444Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-09-25T16:20:56.766449Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2025-09-25T16:20:56.766459Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-09-25T16:20:56.766467Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-09-25T16:20:56.766472Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2025-09-25T16:20:56.766483Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-09-25T16:20:56.766491Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-09-25T16:20:56.766496Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=15;type=RestoreV1Chunks_V2; 2025-09-25T16:20:56.766526Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-09-25T16:20:56.766535Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-09-25T16:20:56.766540Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=16;type=RestoreV2Chunks; 2025-09-25T16:20:56.766557Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2025-09-25T16:20:56.766566Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanDeprecatedSnapshot;id=CleanDeprecatedSnapshot; 2025-09-25T16:20:56.766571Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=17;type=CleanDeprecatedSnapshot; 2025-09-25T16:20:56.766579Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanDeprecatedSnapshot;id=17; 2025-09-25T16:20:56.766587Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV0ChunksMeta;id=RestoreV0ChunksMeta; 2025-09-25T16:20:56.766592Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=18;type=RestoreV0ChunksMeta; 2025-09-25T16:20:56.766601Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV0ChunksMeta;id=18; 2025-09-25T16:20:56.766610Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CopyBlobIdsToV2;id=CopyBlobIdsToV2; 2025-09-25T16:20:56.766616Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=19;type=CopyBlobIdsToV2; 2025-09-25T16:20:56.766633Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CopyBlobIdsToV2;id=19; 2025-09-25T16:20:56.766642Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLAS ... [{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"19,19,19,19,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"20,20,20,20,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"21,21,21,21,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"22,22,22,22,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"23,23,23,23,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"24,24,24,24,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"25,25,25,25,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"26,26,26,26,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"27,27,27,27,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"28,28,28,28,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"29,29,29,29,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"30,30,30,30,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"31,31,31,31,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"32,32,32,32,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"33,33,33,33,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"34,34,34,34,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"35,35,35,35,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"36,36,36,36,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"37,37,37,37,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"38,38,38,38,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"39,39,39,39,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"40,40,40,40,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"41,41,41,41,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"42,42,42,42,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"43,43,43,43,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"44,44,44,44,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"45,45,45,45,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"46,46,46,46,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"47,47,47,47,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"48,48,48,48,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"49,49,49,49,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"50,50,50,50,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"51,51,51,51,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"52,52,52,52,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"53,53,53,53,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"54,54,54,54,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"55,55,55,55,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"56,56,56,56,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"57,57,57,57,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"58,58,58,58,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"59,59,59,59,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"60,60,60,60,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"61,61,61,61,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"62,62,62,62,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"63,63,63,63,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"64,64,64,64,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"65,65,65,65,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"66,66,66,66,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"67,67,67,67,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"68,68,68,68,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"69,69,69,69,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"70,70,70,70,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"71,71,71,71,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"72,72,72,72,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"73,73,73,73,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"74,74,74,74,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"75,75,75,75,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"76,76,76,76,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"77,77,77,77,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"78,78,78,78,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"79,79,79,79,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"80,80,80,80,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"81,81,81,81,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"82,82,82,82,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"83,83,83,83,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"84,84,84,84,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"85,85,85,85,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"86,86,86,86,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"87,87,87,87,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"88,88,88,88,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"89,89,89,89,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"90,90,90,90,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"91,91,91,91,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"92,92,92,92,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"93,93,93,93,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"94,94,94,94,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"95,95,95,95,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"96,96,96,96,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"97,97,97,97,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"98,98,98,98,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"99,99,99,99,"}}]}; ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/columnshard/ut_rw/unittest >> TColumnShardTestReadWrite::CompactionInGranule_PKInt32 [GOOD] Test command err: 2025-09-25T16:20:43.143504Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:129:2160];fline=columnshard.cpp:105;event=initialize_shard;step=OnActivateExecutor; 2025-09-25T16:20:43.149183Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:129:2160];fline=columnshard.cpp:124;event=initialize_shard;step=initialize_tiring_finished; 2025-09-25T16:20:43.149249Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxInitSchema.Execute at tablet 9437184 2025-09-25T16:20:43.150165Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:129:2160];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-09-25T16:20:43.150232Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:129:2160];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-09-25T16:20:43.150279Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:129:2160];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-09-25T16:20:43.150305Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:129:2160];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-09-25T16:20:43.150347Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:129:2160];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-09-25T16:20:43.150374Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:129:2160];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-09-25T16:20:43.150395Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:129:2160];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-09-25T16:20:43.150416Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:129:2160];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-09-25T16:20:43.150437Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:129:2160];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-09-25T16:20:43.150458Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:129:2160];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanDeprecatedSnapshot; 2025-09-25T16:20:43.150480Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:129:2160];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV0ChunksMeta; 2025-09-25T16:20:43.150501Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:129:2160];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CopyBlobIdsToV2; 2025-09-25T16:20:43.150555Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:129:2160];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreAppearanceSnapshot; 2025-09-25T16:20:43.158091Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxInitSchema.Complete at tablet 9437184 2025-09-25T16:20:43.158175Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:131;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=12;current_normalizer=CLASS_NAME=Granules; 2025-09-25T16:20:43.158187Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=1;type=Granules; 2025-09-25T16:20:43.158233Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-09-25T16:20:43.158275Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-09-25T16:20:43.158290Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-09-25T16:20:43.158297Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2025-09-25T16:20:43.158310Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:138;normalizer=TChunksNormalizer;message=0 chunks found; 2025-09-25T16:20:43.158321Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-09-25T16:20:43.158330Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-09-25T16:20:43.158336Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2025-09-25T16:20:43.158360Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-09-25T16:20:43.158370Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-09-25T16:20:43.158379Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-09-25T16:20:43.158385Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2025-09-25T16:20:43.158399Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2025-09-25T16:20:43.158407Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-09-25T16:20:43.158416Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-09-25T16:20:43.158422Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2025-09-25T16:20:43.158432Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-09-25T16:20:43.158442Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-09-25T16:20:43.158447Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2025-09-25T16:20:43.158458Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-09-25T16:20:43.158468Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-09-25T16:20:43.158473Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=15;type=RestoreV1Chunks_V2; 2025-09-25T16:20:43.158505Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-09-25T16:20:43.158515Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-09-25T16:20:43.158521Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=16;type=RestoreV2Chunks; 2025-09-25T16:20:43.158537Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2025-09-25T16:20:43.158547Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanDeprecatedSnapshot;id=CleanDeprecatedSnapshot; 2025-09-25T16:20:43.158553Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=17;type=CleanDeprecatedSnapshot; 2025-09-25T16:20:43.158563Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanDeprecatedSnapshot;id=17; 2025-09-25T16:20:43.158571Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV0ChunksMeta;id=RestoreV0ChunksMeta; 2025-09-25T16:20:43.158577Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=18;type=RestoreV0ChunksMeta; 2025-09-25T16:20:43.158586Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV0ChunksMeta;id=18; 2025-09-25T16:20:43.158595Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CopyBlobIdsToV2;id=CopyBlobIdsToV2; 2025-09-25T16:20:43.158602Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=19;type=CopyBlobIdsToV2; 2025-09-25T16:20:43.158618Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CopyBlobIdsToV2;id=19; 2025-09-25T16:20:43.158628Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLAS ... 8];;column_id:10;chunk_idx:18;blob_range:[NO_BLOB:0:9408];;column_id:10;chunk_idx:19;blob_range:[NO_BLOB:0:9408];;column_id:10;chunk_idx:20;blob_range:[NO_BLOB:0:9408];;column_id:10;chunk_idx:21;blob_range:[NO_BLOB:0:9408];;column_id:10;chunk_idx:22;blob_range:[NO_BLOB:0:9408];;column_id:10;chunk_idx:23;blob_range:[NO_BLOB:0:9408];;column_id:10;chunk_idx:24;blob_range:[NO_BLOB:0:9408];;column_id:10;chunk_idx:25;blob_range:[NO_BLOB:0:9408];;column_id:10;chunk_idx:26;blob_range:[NO_BLOB:0:9408];;column_id:10;chunk_idx:27;blob_range:[NO_BLOB:0:9408];;column_id:10;chunk_idx:28;blob_range:[NO_BLOB:0:9408];;column_id:10;chunk_idx:29;blob_range:[NO_BLOB:0:9408];;column_id:10;chunk_idx:30;blob_range:[NO_BLOB:0:9408];;column_id:10;chunk_idx:31;blob_range:[NO_BLOB:0:9408];;column_id:10;chunk_idx:32;blob_range:[NO_BLOB:0:9408];;column_id:10;chunk_idx:33;blob_range:[NO_BLOB:0:9408];;column_id:10;chunk_idx:34;blob_range:[NO_BLOB:0:9408];;column_id:10;chunk_idx:35;blob_range:[NO_BLOB:0:9408];;column_id:10;chunk_idx:36;blob_range:[NO_BLOB:0:9408];;column_id:10;chunk_idx:37;blob_range:[NO_BLOB:0:9408];;column_id:10;chunk_idx:38;blob_range:[NO_BLOB:0:9408];;column_id:10;chunk_idx:39;blob_range:[NO_BLOB:0:9408];;column_id:10;chunk_idx:40;blob_range:[NO_BLOB:0:9408];;column_id:10;chunk_idx:41;blob_range:[NO_BLOB:0:9408];;column_id:10;chunk_idx:42;blob_range:[NO_BLOB:0:9408];;column_id:10;chunk_idx:43;blob_range:[NO_BLOB:0:9408];;column_id:10;chunk_idx:44;blob_range:[NO_BLOB:0:9424];;column_id:10;chunk_idx:45;blob_range:[NO_BLOB:0:9424];;column_id:10;chunk_idx:46;blob_range:[NO_BLOB:0:9424];;column_id:10;chunk_idx:47;blob_range:[NO_BLOB:0:9424];;column_id:10;chunk_idx:48;blob_range:[NO_BLOB:0:9424];;column_id:10;chunk_idx:49;blob_range:[NO_BLOB:0:9424];;column_id:10;chunk_idx:50;blob_range:[NO_BLOB:0:9424];;column_id:10;chunk_idx:51;blob_range:[NO_BLOB:0:9424];;column_id:10;chunk_idx:52;blob_range:[NO_BLOB:0:9424];;column_id:10;chunk_idx:53;blob_range:[NO_BLOB:0:9424];;column_id:10;chunk_idx:54;blob_range:[NO_BLOB:0:9424];;column_id:10;chunk_idx:55;blob_range:[NO_BLOB:0:9424];;column_id:10;chunk_idx:56;blob_range:[NO_BLOB:0:9424];;column_id:10;chunk_idx:57;blob_range:[NO_BLOB:0:9424];;column_id:10;chunk_idx:58;blob_range:[NO_BLOB:0:9424];;column_id:10;chunk_idx:59;blob_range:[NO_BLOB:0:9424];;column_id:10;chunk_idx:60;blob_range:[NO_BLOB:0:9424];;column_id:10;chunk_idx:61;blob_range:[NO_BLOB:0:9424];;column_id:10;chunk_idx:62;blob_range:[NO_BLOB:0:9424];;column_id:10;chunk_idx:63;blob_range:[NO_BLOB:0:9424];;column_id:10;chunk_idx:64;blob_range:[NO_BLOB:0:9424];;column_id:10;chunk_idx:65;blob_range:[NO_BLOB:0:9424];;column_id:10;chunk_idx:66;blob_range:[NO_BLOB:0:9424];;column_id:10;chunk_idx:67;blob_range:[NO_BLOB:0:9424];;column_id:10;chunk_idx:68;blob_range:[NO_BLOB:0:9424];;column_id:10;chunk_idx:69;blob_range:[NO_BLOB:0:9424];;column_id:10;chunk_idx:70;blob_range:[NO_BLOB:0:9424];;column_id:10;chunk_idx:71;blob_range:[NO_BLOB:0:9424];;column_id:10;chunk_idx:72;blob_range:[NO_BLOB:0:9424];;;;switched=(portion_id:216;path_id:1000000185;records_count:75000;schema_version:1;level:1;;column_size:5998984;index_size:0;meta:(()););(portion_id:214;path_id:1000000185;records_count:75000;schema_version:1;level:2;;column_size:5984840;index_size:0;meta:(()););; 2025-09-25T16:20:57.564592Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: event_type=NKikimr::NBlobCache::TEvBlobCache::TEvReadBlobRangeResult;event=on_execution;consumer=GENERAL_COMPACTION;task_id=9deff790-9a2b11f0-966743ba-9041cd9;script=FULL_PORTIONS_FETCHING::GENERAL_COMPACTION;event=on_execution;consumer=GENERAL_COMPACTION;task_id=9deff790-9a2b11f0-966743ba-9041cd9;script=FULL_PORTIONS_FETCHING::GENERAL_COMPACTION;event=on_finished;consumer=GENERAL_COMPACTION;task_id=9deff790-9a2b11f0-966743ba-9041cd9;script=FULL_PORTIONS_FETCHING::GENERAL_COMPACTION;tablet_id=9437184;parent_id=[1:4417:6409];task_id=9deff790-9a2b11f0-966743ba-9041cd9;task_class=CS::GENERAL;fline=general_compaction.cpp:140;event=blobs_created;appended=1;switched=2; 2025-09-25T16:20:57.564609Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: event_type=NKikimr::NBlobCache::TEvBlobCache::TEvReadBlobRangeResult;event=on_execution;consumer=GENERAL_COMPACTION;task_id=9deff790-9a2b11f0-966743ba-9041cd9;script=FULL_PORTIONS_FETCHING::GENERAL_COMPACTION;event=on_execution;consumer=GENERAL_COMPACTION;task_id=9deff790-9a2b11f0-966743ba-9041cd9;script=FULL_PORTIONS_FETCHING::GENERAL_COMPACTION;event=on_finished;consumer=GENERAL_COMPACTION;task_id=9deff790-9a2b11f0-966743ba-9041cd9;script=FULL_PORTIONS_FETCHING::GENERAL_COMPACTION;tablet_id=9437184;parent_id=[1:4417:6409];task_id=9deff790-9a2b11f0-966743ba-9041cd9;task_class=CS::GENERAL;fline=abstract.cpp:13;event=new_stage;stage=Constructed;task_id=9deff790-9a2b11f0-966743ba-9041cd9; 2025-09-25T16:20:57.565666Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:4417:6409];ev=NKikimr::NColumnShard::TEvPrivate::TEvWriteIndex;fline=columnshard__write_index.cpp:52;event=TEvWriteIndex;count=1; 2025-09-25T16:20:57.566247Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:4417:6409];ev=NKikimr::NColumnShard::TEvPrivate::TEvWriteIndex;fline=columnshard__write_index.cpp:59;event=TTxWriteDraft; 2025-09-25T16:20:57.566256Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:4417:6409];ev=NKikimr::NColumnShard::TEvPrivate::TEvWriteIndex;fline=abstract.cpp:13;event=new_stage;stage=WriteDraft;task_id=9deff790-9a2b11f0-966743ba-9041cd9; 2025-09-25T16:20:57.653560Z node 1 :TX_COLUMNSHARD_WRITE DEBUG: log.cpp:841: fline=tx_draft.cpp:16;event=draft_completed; 2025-09-25T16:20:57.653597Z node 1 :TX_COLUMNSHARD_WRITE DEBUG: log.cpp:841: fline=write_actor.cpp:24;event=actor_created;tablet_id=9437184;debug=size=5984840;count=649;actions=__MEMORY,__DEFAULT,;waiting=2;; 2025-09-25T16:20:57.748568Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: WriteIndex at tablet 9437184 2025-09-25T16:20:57.748610Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:4417:6409];ev=NKikimr::NColumnShard::TEvPrivate::TEvWriteIndex;fline=common_level.h:121;from=0,0,0,0,;to=74999,74999,74999,74999,; 2025-09-25T16:20:57.748624Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:4417:6409];ev=NKikimr::NColumnShard::TEvPrivate::TEvWriteIndex;fline=common_level.h:141;itFrom=1;itTo=1;raw=7088450;count=1;packed=6021232; 2025-09-25T16:20:57.748644Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:4417:6409];ev=NKikimr::NColumnShard::TEvPrivate::TEvWriteIndex;fline=constructor_meta.cpp:48;memory_size=86;data_size=60;sum=86964;count=1707; 2025-09-25T16:20:57.748653Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:4417:6409];ev=NKikimr::NColumnShard::TEvPrivate::TEvWriteIndex;fline=constructor_meta.cpp:65;memory_size=182;data_size=172;sum=168948;count=1708;size_of_meta=112; 2025-09-25T16:20:57.748665Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:4417:6409];ev=NKikimr::NColumnShard::TEvPrivate::TEvWriteIndex;fline=constructor_portion.cpp:40;memory_size=254;data_size=244;sum=230436;count=854;size_of_portion=184; 2025-09-25T16:20:57.748754Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:4417:6409];ev=NKikimr::NColumnShard::TEvPrivate::TEvWriteIndex;fline=abstract.cpp:13;event=new_stage;stage=Compiled;task_id=9deff790-9a2b11f0-966743ba-9041cd9; 2025-09-25T16:20:57.748806Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxWriteIndex[2] (CS::GENERAL) apply at tablet 9437184 2025-09-25T16:20:57.750091Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:4417:6409];ev=NKikimr::NColumnShard::TEvPrivate::TEvWriteIndex;tablet_id=9437184;external_task_id=9deff790-9a2b11f0-966743ba-9041cd9;fline=abstract.cpp:13;event=new_stage;stage=Written;task_id=9deff790-9a2b11f0-966743ba-9041cd9; 2025-09-25T16:20:57.750522Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: BlobManager on execute at tablet 9437184 Save Batch GenStep: 4:1 Blob count: 503 2025-09-25T16:20:57.751392Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: Index: tables 1 inserted {blob_bytes=0;raw_bytes=0;count=0;records=0} compacted {blob_bytes=0;raw_bytes=0;count=0;records=0} s-compacted {blob_bytes=18005056;raw_bytes=21227350;count=3;records=225200} inactive {blob_bytes=78910360;raw_bytes=81118650;count=213;records=975200} evicted {blob_bytes=0;raw_bytes=0;count=0;records=0} at tablet 9437184 2025-09-25T16:20:57.797019Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;task_id=9deff790-9a2b11f0-966743ba-9041cd9;fline=abstract.cpp:13;event=new_stage;stage=Finished;task_id=9deff790-9a2b11f0-966743ba-9041cd9; 2025-09-25T16:20:57.797040Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;task_id=9deff790-9a2b11f0-966743ba-9041cd9;fline=abstract.cpp:54;event=WriteIndexComplete;type=CS::GENERAL;success=1; 2025-09-25T16:20:57.797054Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;task_id=9deff790-9a2b11f0-966743ba-9041cd9;fline=with_appended.cpp:65;portions=217,;task_id=9deff790-9a2b11f0-966743ba-9041cd9; 2025-09-25T16:20:57.797153Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;task_id=9deff790-9a2b11f0-966743ba-9041cd9;fline=manager.cpp:15;event=unlock;process_id=CS::GENERAL::9deff790-9a2b11f0-966743ba-9041cd9; 2025-09-25T16:20:57.797171Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;task_id=9deff790-9a2b11f0-966743ba-9041cd9;fline=granule.cpp:97;event=OnCompactionFinished;info=(granule:1000000185;path_id:1000000185;size:12006072;portions_count:217;); 2025-09-25T16:20:57.797179Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;task_id=9deff790-9a2b11f0-966743ba-9041cd9;tablet_id=9437184;fline=columnshard_impl.cpp:449;event=EnqueueBackgroundActivities;periodic=0; 2025-09-25T16:20:57.797200Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;task_id=9deff790-9a2b11f0-966743ba-9041cd9;tablet_id=9437184;fline=columnshard_impl.cpp:943;background=cleanup_schemas;skip_reason=no_changes; 2025-09-25T16:20:57.797208Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;task_id=9deff790-9a2b11f0-966743ba-9041cd9;tablet_id=9437184;fline=column_engine_logs.cpp:258;event=StartCleanup;portions_count=7; 2025-09-25T16:20:57.797226Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;task_id=9deff790-9a2b11f0-966743ba-9041cd9;tablet_id=9437184;fline=column_engine_logs.cpp:271;event=StartCleanupStop;snapshot=plan_step=1758815446056;tx_id=18446744073709551615;;current_snapshot_ts=1758817244747; 2025-09-25T16:20:57.797235Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;task_id=9deff790-9a2b11f0-966743ba-9041cd9;tablet_id=9437184;fline=column_engine_logs.cpp:334;event=StartCleanup;portions_count=7;portions_prepared=0;drop=0;skip=0;portions_counter=0;chunks=0;limit=0;max_portions=1000;max_chunks=500000; 2025-09-25T16:20:57.797248Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;task_id=9deff790-9a2b11f0-966743ba-9041cd9;tablet_id=9437184;fline=columnshard_impl.cpp:800;background=cleanup;skip_reason=no_changes; 2025-09-25T16:20:57.797255Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;task_id=9deff790-9a2b11f0-966743ba-9041cd9;tablet_id=9437184;fline=columnshard_impl.cpp:832;background=cleanup;skip_reason=no_changes; 2025-09-25T16:20:57.797275Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;task_id=9deff790-9a2b11f0-966743ba-9041cd9;tablet_id=9437184;queue=ttl;external_count=0;fline=granule.cpp:168;event=skip_actualization;waiting=0.915000s; 2025-09-25T16:20:57.797285Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;task_id=9deff790-9a2b11f0-966743ba-9041cd9;tablet_id=9437184;fline=columnshard_impl.cpp:755;background=ttl;skip_reason=no_changes; 2025-09-25T16:20:57.797331Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: BlobManager at tablet 9437184 Save Batch GenStep: 4:1 Blob count: 503 >> TColumnShardTestReadWrite::WriteReadModifications |82.7%| [TA] $(B)/ydb/core/tx/schemeshard/ut_shred/test-results/unittest/{meta.json ... results_accumulator.log} >> TColumnShardTestReadWrite::CompactionInGranule_PKTimestamp [GOOD] >> TColumnShardTestReadWrite::CompactionInGranule_PKDatetime [GOOD] >> TColumnShardTestReadWrite::ReadWithProgram >> Normalizers::EmptyTablesNormalizer [GOOD] >> TColumnShardTestReadWrite::CompactionInGranule_PKUInt32_Reboot >> TSchemeshardBackgroundCleaningTest::SchemeshardBackgroundCleaningTestSimpleCreateClean [GOOD] >> TSchemeshardBackgroundCleaningTest::SchemeshardBackgroundCleaningTestReboot ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/columnshard/ut_rw/unittest >> TColumnShardTestReadWrite::CompactionInGranule_PKDatetime [GOOD] Test command err: 2025-09-25T16:20:44.090004Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:129:2160];fline=columnshard.cpp:105;event=initialize_shard;step=OnActivateExecutor; 2025-09-25T16:20:44.093296Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:129:2160];fline=columnshard.cpp:124;event=initialize_shard;step=initialize_tiring_finished; 2025-09-25T16:20:44.093332Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxInitSchema.Execute at tablet 9437184 2025-09-25T16:20:44.093917Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:129:2160];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-09-25T16:20:44.093958Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:129:2160];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-09-25T16:20:44.093987Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:129:2160];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-09-25T16:20:44.094003Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:129:2160];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-09-25T16:20:44.094016Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:129:2160];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-09-25T16:20:44.094032Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:129:2160];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-09-25T16:20:44.094046Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:129:2160];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-09-25T16:20:44.094060Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:129:2160];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-09-25T16:20:44.094072Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:129:2160];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-09-25T16:20:44.094084Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:129:2160];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanDeprecatedSnapshot; 2025-09-25T16:20:44.094098Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:129:2160];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV0ChunksMeta; 2025-09-25T16:20:44.094111Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:129:2160];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CopyBlobIdsToV2; 2025-09-25T16:20:44.094140Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:129:2160];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreAppearanceSnapshot; 2025-09-25T16:20:44.099289Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxInitSchema.Complete at tablet 9437184 2025-09-25T16:20:44.099358Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:131;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=12;current_normalizer=CLASS_NAME=Granules; 2025-09-25T16:20:44.099366Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=1;type=Granules; 2025-09-25T16:20:44.099414Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-09-25T16:20:44.099449Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-09-25T16:20:44.099463Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-09-25T16:20:44.099469Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2025-09-25T16:20:44.099481Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:138;normalizer=TChunksNormalizer;message=0 chunks found; 2025-09-25T16:20:44.099491Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-09-25T16:20:44.099500Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-09-25T16:20:44.099506Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2025-09-25T16:20:44.099522Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-09-25T16:20:44.099528Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-09-25T16:20:44.099534Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-09-25T16:20:44.099537Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2025-09-25T16:20:44.099545Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2025-09-25T16:20:44.099550Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-09-25T16:20:44.099556Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-09-25T16:20:44.099559Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2025-09-25T16:20:44.099565Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-09-25T16:20:44.099571Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-09-25T16:20:44.099574Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2025-09-25T16:20:44.099583Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-09-25T16:20:44.099589Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-09-25T16:20:44.099592Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=15;type=RestoreV1Chunks_V2; 2025-09-25T16:20:44.099610Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-09-25T16:20:44.099616Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-09-25T16:20:44.099619Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=16;type=RestoreV2Chunks; 2025-09-25T16:20:44.099661Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2025-09-25T16:20:44.099668Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanDeprecatedSnapshot;id=CleanDeprecatedSnapshot; 2025-09-25T16:20:44.099671Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=17;type=CleanDeprecatedSnapshot; 2025-09-25T16:20:44.099677Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanDeprecatedSnapshot;id=17; 2025-09-25T16:20:44.099683Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV0ChunksMeta;id=RestoreV0ChunksMeta; 2025-09-25T16:20:44.099686Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=18;type=RestoreV0ChunksMeta; 2025-09-25T16:20:44.099692Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV0ChunksMeta;id=18; 2025-09-25T16:20:44.099698Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CopyBlobIdsToV2;id=CopyBlobIdsToV2; 2025-09-25T16:20:44.099703Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=19;type=CopyBlobIdsToV2; 2025-09-25T16:20:44.099713Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CopyBlobIdsToV2;id=19; 2025-09-25T16:20:44.099719Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLAS ... blob_range:[NO_BLOB:0:9408];;column_id:10;chunk_idx:19;blob_range:[NO_BLOB:0:9408];;column_id:10;chunk_idx:20;blob_range:[NO_BLOB:0:9408];;column_id:10;chunk_idx:21;blob_range:[NO_BLOB:0:9408];;column_id:10;chunk_idx:22;blob_range:[NO_BLOB:0:9408];;column_id:10;chunk_idx:23;blob_range:[NO_BLOB:0:9408];;column_id:10;chunk_idx:24;blob_range:[NO_BLOB:0:9408];;column_id:10;chunk_idx:25;blob_range:[NO_BLOB:0:9408];;column_id:10;chunk_idx:26;blob_range:[NO_BLOB:0:9408];;column_id:10;chunk_idx:27;blob_range:[NO_BLOB:0:9408];;column_id:10;chunk_idx:28;blob_range:[NO_BLOB:0:9408];;column_id:10;chunk_idx:29;blob_range:[NO_BLOB:0:9408];;column_id:10;chunk_idx:30;blob_range:[NO_BLOB:0:9408];;column_id:10;chunk_idx:31;blob_range:[NO_BLOB:0:9408];;column_id:10;chunk_idx:32;blob_range:[NO_BLOB:0:9408];;column_id:10;chunk_idx:33;blob_range:[NO_BLOB:0:9408];;column_id:10;chunk_idx:34;blob_range:[NO_BLOB:0:9408];;column_id:10;chunk_idx:35;blob_range:[NO_BLOB:0:9408];;column_id:10;chunk_idx:36;blob_range:[NO_BLOB:0:9408];;column_id:10;chunk_idx:37;blob_range:[NO_BLOB:0:9408];;column_id:10;chunk_idx:38;blob_range:[NO_BLOB:0:9408];;column_id:10;chunk_idx:39;blob_range:[NO_BLOB:0:9408];;column_id:10;chunk_idx:40;blob_range:[NO_BLOB:0:9408];;column_id:10;chunk_idx:41;blob_range:[NO_BLOB:0:9408];;column_id:10;chunk_idx:42;blob_range:[NO_BLOB:0:9408];;column_id:10;chunk_idx:43;blob_range:[NO_BLOB:0:9408];;column_id:10;chunk_idx:44;blob_range:[NO_BLOB:0:9424];;column_id:10;chunk_idx:45;blob_range:[NO_BLOB:0:9424];;column_id:10;chunk_idx:46;blob_range:[NO_BLOB:0:9424];;column_id:10;chunk_idx:47;blob_range:[NO_BLOB:0:9424];;column_id:10;chunk_idx:48;blob_range:[NO_BLOB:0:9424];;column_id:10;chunk_idx:49;blob_range:[NO_BLOB:0:9424];;column_id:10;chunk_idx:50;blob_range:[NO_BLOB:0:9424];;column_id:10;chunk_idx:51;blob_range:[NO_BLOB:0:9424];;column_id:10;chunk_idx:52;blob_range:[NO_BLOB:0:9424];;column_id:10;chunk_idx:53;blob_range:[NO_BLOB:0:9424];;column_id:10;chunk_idx:54;blob_range:[NO_BLOB:0:9424];;column_id:10;chunk_idx:55;blob_range:[NO_BLOB:0:9424];;column_id:10;chunk_idx:56;blob_range:[NO_BLOB:0:9424];;column_id:10;chunk_idx:57;blob_range:[NO_BLOB:0:9424];;column_id:10;chunk_idx:58;blob_range:[NO_BLOB:0:9424];;column_id:10;chunk_idx:59;blob_range:[NO_BLOB:0:9424];;column_id:10;chunk_idx:60;blob_range:[NO_BLOB:0:9424];;column_id:10;chunk_idx:61;blob_range:[NO_BLOB:0:9424];;column_id:10;chunk_idx:62;blob_range:[NO_BLOB:0:9424];;column_id:10;chunk_idx:63;blob_range:[NO_BLOB:0:9424];;column_id:10;chunk_idx:64;blob_range:[NO_BLOB:0:9424];;column_id:10;chunk_idx:65;blob_range:[NO_BLOB:0:9424];;column_id:10;chunk_idx:66;blob_range:[NO_BLOB:0:9424];;column_id:10;chunk_idx:67;blob_range:[NO_BLOB:0:9424];;column_id:10;chunk_idx:68;blob_range:[NO_BLOB:0:9424];;column_id:10;chunk_idx:69;blob_range:[NO_BLOB:0:9424];;column_id:10;chunk_idx:70;blob_range:[NO_BLOB:0:9424];;column_id:10;chunk_idx:71;blob_range:[NO_BLOB:0:9424];;column_id:10;chunk_idx:72;blob_range:[NO_BLOB:0:9424];;;;switched=(portion_id:216;path_id:1000000185;records_count:75000;schema_version:1;level:1;;column_size:5998984;index_size:0;meta:(()););(portion_id:214;path_id:1000000185;records_count:75000;schema_version:1;level:2;;column_size:5984840;index_size:0;meta:(()););; 2025-09-25T16:20:58.768053Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: event_type=NKikimr::NBlobCache::TEvBlobCache::TEvReadBlobRangeResult;event=on_execution;consumer=GENERAL_COMPACTION;task_id=9eaa7e26-9a2b11f0-9430569d-6d3bd886;script=FULL_PORTIONS_FETCHING::GENERAL_COMPACTION;event=on_execution;consumer=GENERAL_COMPACTION;task_id=9eaa7e26-9a2b11f0-9430569d-6d3bd886;script=FULL_PORTIONS_FETCHING::GENERAL_COMPACTION;event=on_finished;consumer=GENERAL_COMPACTION;task_id=9eaa7e26-9a2b11f0-9430569d-6d3bd886;script=FULL_PORTIONS_FETCHING::GENERAL_COMPACTION;tablet_id=9437184;parent_id=[1:4417:6409];task_id=9eaa7e26-9a2b11f0-9430569d-6d3bd886;task_class=CS::GENERAL;fline=general_compaction.cpp:140;event=blobs_created;appended=1;switched=2; 2025-09-25T16:20:58.768065Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: event_type=NKikimr::NBlobCache::TEvBlobCache::TEvReadBlobRangeResult;event=on_execution;consumer=GENERAL_COMPACTION;task_id=9eaa7e26-9a2b11f0-9430569d-6d3bd886;script=FULL_PORTIONS_FETCHING::GENERAL_COMPACTION;event=on_execution;consumer=GENERAL_COMPACTION;task_id=9eaa7e26-9a2b11f0-9430569d-6d3bd886;script=FULL_PORTIONS_FETCHING::GENERAL_COMPACTION;event=on_finished;consumer=GENERAL_COMPACTION;task_id=9eaa7e26-9a2b11f0-9430569d-6d3bd886;script=FULL_PORTIONS_FETCHING::GENERAL_COMPACTION;tablet_id=9437184;parent_id=[1:4417:6409];task_id=9eaa7e26-9a2b11f0-9430569d-6d3bd886;task_class=CS::GENERAL;fline=abstract.cpp:13;event=new_stage;stage=Constructed;task_id=9eaa7e26-9a2b11f0-9430569d-6d3bd886; 2025-09-25T16:20:58.769109Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:4417:6409];ev=NKikimr::NColumnShard::TEvPrivate::TEvWriteIndex;fline=columnshard__write_index.cpp:52;event=TEvWriteIndex;count=1; 2025-09-25T16:20:58.769629Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:4417:6409];ev=NKikimr::NColumnShard::TEvPrivate::TEvWriteIndex;fline=columnshard__write_index.cpp:59;event=TTxWriteDraft; 2025-09-25T16:20:58.769636Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:4417:6409];ev=NKikimr::NColumnShard::TEvPrivate::TEvWriteIndex;fline=abstract.cpp:13;event=new_stage;stage=WriteDraft;task_id=9eaa7e26-9a2b11f0-9430569d-6d3bd886; 2025-09-25T16:20:58.859267Z node 1 :TX_COLUMNSHARD_WRITE DEBUG: log.cpp:841: fline=tx_draft.cpp:16;event=draft_completed; 2025-09-25T16:20:58.859304Z node 1 :TX_COLUMNSHARD_WRITE DEBUG: log.cpp:841: fline=write_actor.cpp:24;event=actor_created;tablet_id=9437184;debug=size=5984840;count=649;actions=__MEMORY,__DEFAULT,;waiting=2;; 2025-09-25T16:20:58.952556Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: WriteIndex at tablet 9437184 2025-09-25T16:20:58.952601Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:4417:6409];ev=NKikimr::NColumnShard::TEvPrivate::TEvWriteIndex;fline=common_level.h:121;from=0,0,0,0,;to=74999,74999,74999,74999,; 2025-09-25T16:20:58.952611Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:4417:6409];ev=NKikimr::NColumnShard::TEvPrivate::TEvWriteIndex;fline=common_level.h:141;itFrom=1;itTo=1;raw=7088450;count=1;packed=6021232; 2025-09-25T16:20:58.952625Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:4417:6409];ev=NKikimr::NColumnShard::TEvPrivate::TEvWriteIndex;fline=constructor_meta.cpp:48;memory_size=86;data_size=60;sum=86964;count=1707; 2025-09-25T16:20:58.952631Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:4417:6409];ev=NKikimr::NColumnShard::TEvPrivate::TEvWriteIndex;fline=constructor_meta.cpp:65;memory_size=182;data_size=172;sum=168948;count=1708;size_of_meta=112; 2025-09-25T16:20:58.952640Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:4417:6409];ev=NKikimr::NColumnShard::TEvPrivate::TEvWriteIndex;fline=constructor_portion.cpp:40;memory_size=254;data_size=244;sum=230436;count=854;size_of_portion=184; 2025-09-25T16:20:58.952706Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:4417:6409];ev=NKikimr::NColumnShard::TEvPrivate::TEvWriteIndex;fline=abstract.cpp:13;event=new_stage;stage=Compiled;task_id=9eaa7e26-9a2b11f0-9430569d-6d3bd886; 2025-09-25T16:20:58.952765Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxWriteIndex[2] (CS::GENERAL) apply at tablet 9437184 2025-09-25T16:20:58.953743Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:4417:6409];ev=NKikimr::NColumnShard::TEvPrivate::TEvWriteIndex;tablet_id=9437184;external_task_id=9eaa7e26-9a2b11f0-9430569d-6d3bd886;fline=abstract.cpp:13;event=new_stage;stage=Written;task_id=9eaa7e26-9a2b11f0-9430569d-6d3bd886; 2025-09-25T16:20:58.953988Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: BlobManager on execute at tablet 9437184 Save Batch GenStep: 4:1 Blob count: 503 2025-09-25T16:20:58.954518Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: Index: tables 1 inserted {blob_bytes=0;raw_bytes=0;count=0;records=0} compacted {blob_bytes=0;raw_bytes=0;count=0;records=0} s-compacted {blob_bytes=18005056;raw_bytes=21227350;count=3;records=225200} inactive {blob_bytes=78910360;raw_bytes=81118650;count=213;records=975200} evicted {blob_bytes=0;raw_bytes=0;count=0;records=0} at tablet 9437184 2025-09-25T16:20:58.999672Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;task_id=9eaa7e26-9a2b11f0-9430569d-6d3bd886;fline=abstract.cpp:13;event=new_stage;stage=Finished;task_id=9eaa7e26-9a2b11f0-9430569d-6d3bd886; 2025-09-25T16:20:58.999700Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;task_id=9eaa7e26-9a2b11f0-9430569d-6d3bd886;fline=abstract.cpp:54;event=WriteIndexComplete;type=CS::GENERAL;success=1; 2025-09-25T16:20:58.999716Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;task_id=9eaa7e26-9a2b11f0-9430569d-6d3bd886;fline=with_appended.cpp:65;portions=217,;task_id=9eaa7e26-9a2b11f0-9430569d-6d3bd886; 2025-09-25T16:20:58.999826Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;task_id=9eaa7e26-9a2b11f0-9430569d-6d3bd886;fline=manager.cpp:15;event=unlock;process_id=CS::GENERAL::9eaa7e26-9a2b11f0-9430569d-6d3bd886; 2025-09-25T16:20:58.999862Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;task_id=9eaa7e26-9a2b11f0-9430569d-6d3bd886;fline=granule.cpp:97;event=OnCompactionFinished;info=(granule:1000000185;path_id:1000000185;size:12006072;portions_count:217;); 2025-09-25T16:20:58.999871Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;task_id=9eaa7e26-9a2b11f0-9430569d-6d3bd886;tablet_id=9437184;fline=columnshard_impl.cpp:449;event=EnqueueBackgroundActivities;periodic=0; 2025-09-25T16:20:58.999902Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;task_id=9eaa7e26-9a2b11f0-9430569d-6d3bd886;tablet_id=9437184;fline=columnshard_impl.cpp:943;background=cleanup_schemas;skip_reason=no_changes; 2025-09-25T16:20:58.999914Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;task_id=9eaa7e26-9a2b11f0-9430569d-6d3bd886;tablet_id=9437184;fline=column_engine_logs.cpp:258;event=StartCleanup;portions_count=7; 2025-09-25T16:20:58.999942Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;task_id=9eaa7e26-9a2b11f0-9430569d-6d3bd886;tablet_id=9437184;fline=column_engine_logs.cpp:271;event=StartCleanupStop;snapshot=plan_step=1758815447004;tx_id=18446744073709551615;;current_snapshot_ts=1758817245696; 2025-09-25T16:20:58.999953Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;task_id=9eaa7e26-9a2b11f0-9430569d-6d3bd886;tablet_id=9437184;fline=column_engine_logs.cpp:334;event=StartCleanup;portions_count=7;portions_prepared=0;drop=0;skip=0;portions_counter=0;chunks=0;limit=0;max_portions=1000;max_chunks=500000; 2025-09-25T16:20:58.999969Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;task_id=9eaa7e26-9a2b11f0-9430569d-6d3bd886;tablet_id=9437184;fline=columnshard_impl.cpp:800;background=cleanup;skip_reason=no_changes; 2025-09-25T16:20:58.999973Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;task_id=9eaa7e26-9a2b11f0-9430569d-6d3bd886;tablet_id=9437184;fline=columnshard_impl.cpp:832;background=cleanup;skip_reason=no_changes; 2025-09-25T16:20:58.999989Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;task_id=9eaa7e26-9a2b11f0-9430569d-6d3bd886;tablet_id=9437184;queue=ttl;external_count=0;fline=granule.cpp:168;event=skip_actualization;waiting=0.915000s; 2025-09-25T16:20:58.999996Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;task_id=9eaa7e26-9a2b11f0-9430569d-6d3bd886;tablet_id=9437184;fline=columnshard_impl.cpp:755;background=ttl;skip_reason=no_changes; 2025-09-25T16:20:59.000033Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: BlobManager at tablet 9437184 Save Batch GenStep: 4:1 Blob count: 503 >> TColumnShardTestReadWrite::CompactionSplitGranule_PKUInt64 ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/columnshard/ut_rw/unittest >> TColumnShardTestReadWrite::CompactionInGranule_PKTimestamp [GOOD] Test command err: 2025-09-25T16:20:43.246834Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:129:2160];fline=columnshard.cpp:105;event=initialize_shard;step=OnActivateExecutor; 2025-09-25T16:20:43.252397Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:129:2160];fline=columnshard.cpp:124;event=initialize_shard;step=initialize_tiring_finished; 2025-09-25T16:20:43.252444Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxInitSchema.Execute at tablet 9437184 2025-09-25T16:20:43.253300Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:129:2160];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-09-25T16:20:43.253352Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:129:2160];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-09-25T16:20:43.253390Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:129:2160];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-09-25T16:20:43.253413Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:129:2160];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-09-25T16:20:43.253433Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:129:2160];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-09-25T16:20:43.253472Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:129:2160];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-09-25T16:20:43.253493Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:129:2160];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-09-25T16:20:43.253513Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:129:2160];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-09-25T16:20:43.253537Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:129:2160];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-09-25T16:20:43.253560Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:129:2160];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanDeprecatedSnapshot; 2025-09-25T16:20:43.253580Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:129:2160];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV0ChunksMeta; 2025-09-25T16:20:43.253599Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:129:2160];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CopyBlobIdsToV2; 2025-09-25T16:20:43.253643Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:129:2160];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreAppearanceSnapshot; 2025-09-25T16:20:43.260503Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxInitSchema.Complete at tablet 9437184 2025-09-25T16:20:43.260573Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:131;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=12;current_normalizer=CLASS_NAME=Granules; 2025-09-25T16:20:43.260583Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=1;type=Granules; 2025-09-25T16:20:43.260629Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-09-25T16:20:43.260701Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-09-25T16:20:43.260716Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-09-25T16:20:43.260723Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2025-09-25T16:20:43.260736Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:138;normalizer=TChunksNormalizer;message=0 chunks found; 2025-09-25T16:20:43.260747Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-09-25T16:20:43.260756Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-09-25T16:20:43.260762Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2025-09-25T16:20:43.260784Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-09-25T16:20:43.260794Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-09-25T16:20:43.260803Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-09-25T16:20:43.260809Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2025-09-25T16:20:43.260855Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2025-09-25T16:20:43.260865Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-09-25T16:20:43.260875Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-09-25T16:20:43.260880Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2025-09-25T16:20:43.260891Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-09-25T16:20:43.260899Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-09-25T16:20:43.260905Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2025-09-25T16:20:43.260915Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-09-25T16:20:43.260924Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-09-25T16:20:43.260930Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=15;type=RestoreV1Chunks_V2; 2025-09-25T16:20:43.260959Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-09-25T16:20:43.260969Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-09-25T16:20:43.260974Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=16;type=RestoreV2Chunks; 2025-09-25T16:20:43.260990Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2025-09-25T16:20:43.260999Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanDeprecatedSnapshot;id=CleanDeprecatedSnapshot; 2025-09-25T16:20:43.261005Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=17;type=CleanDeprecatedSnapshot; 2025-09-25T16:20:43.261013Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanDeprecatedSnapshot;id=17; 2025-09-25T16:20:43.261022Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV0ChunksMeta;id=RestoreV0ChunksMeta; 2025-09-25T16:20:43.261027Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=18;type=RestoreV0ChunksMeta; 2025-09-25T16:20:43.261036Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV0ChunksMeta;id=18; 2025-09-25T16:20:43.261044Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CopyBlobIdsToV2;id=CopyBlobIdsToV2; 2025-09-25T16:20:43.261051Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=19;type=CopyBlobIdsToV2; 2025-09-25T16:20:43.261068Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CopyBlobIdsToV2;id=19; 2025-09-25T16:20:43.261078Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLAS ... blob_range:[NO_BLOB:0:9408];;column_id:10;chunk_idx:19;blob_range:[NO_BLOB:0:9408];;column_id:10;chunk_idx:20;blob_range:[NO_BLOB:0:9408];;column_id:10;chunk_idx:21;blob_range:[NO_BLOB:0:9408];;column_id:10;chunk_idx:22;blob_range:[NO_BLOB:0:9408];;column_id:10;chunk_idx:23;blob_range:[NO_BLOB:0:9408];;column_id:10;chunk_idx:24;blob_range:[NO_BLOB:0:9408];;column_id:10;chunk_idx:25;blob_range:[NO_BLOB:0:9408];;column_id:10;chunk_idx:26;blob_range:[NO_BLOB:0:9408];;column_id:10;chunk_idx:27;blob_range:[NO_BLOB:0:9408];;column_id:10;chunk_idx:28;blob_range:[NO_BLOB:0:9408];;column_id:10;chunk_idx:29;blob_range:[NO_BLOB:0:9408];;column_id:10;chunk_idx:30;blob_range:[NO_BLOB:0:9408];;column_id:10;chunk_idx:31;blob_range:[NO_BLOB:0:9408];;column_id:10;chunk_idx:32;blob_range:[NO_BLOB:0:9408];;column_id:10;chunk_idx:33;blob_range:[NO_BLOB:0:9408];;column_id:10;chunk_idx:34;blob_range:[NO_BLOB:0:9408];;column_id:10;chunk_idx:35;blob_range:[NO_BLOB:0:9408];;column_id:10;chunk_idx:36;blob_range:[NO_BLOB:0:9408];;column_id:10;chunk_idx:37;blob_range:[NO_BLOB:0:9408];;column_id:10;chunk_idx:38;blob_range:[NO_BLOB:0:9408];;column_id:10;chunk_idx:39;blob_range:[NO_BLOB:0:9408];;column_id:10;chunk_idx:40;blob_range:[NO_BLOB:0:9408];;column_id:10;chunk_idx:41;blob_range:[NO_BLOB:0:9408];;column_id:10;chunk_idx:42;blob_range:[NO_BLOB:0:9408];;column_id:10;chunk_idx:43;blob_range:[NO_BLOB:0:9408];;column_id:10;chunk_idx:44;blob_range:[NO_BLOB:0:9424];;column_id:10;chunk_idx:45;blob_range:[NO_BLOB:0:9424];;column_id:10;chunk_idx:46;blob_range:[NO_BLOB:0:9424];;column_id:10;chunk_idx:47;blob_range:[NO_BLOB:0:9424];;column_id:10;chunk_idx:48;blob_range:[NO_BLOB:0:9424];;column_id:10;chunk_idx:49;blob_range:[NO_BLOB:0:9424];;column_id:10;chunk_idx:50;blob_range:[NO_BLOB:0:9424];;column_id:10;chunk_idx:51;blob_range:[NO_BLOB:0:9424];;column_id:10;chunk_idx:52;blob_range:[NO_BLOB:0:9424];;column_id:10;chunk_idx:53;blob_range:[NO_BLOB:0:9424];;column_id:10;chunk_idx:54;blob_range:[NO_BLOB:0:9424];;column_id:10;chunk_idx:55;blob_range:[NO_BLOB:0:9424];;column_id:10;chunk_idx:56;blob_range:[NO_BLOB:0:9424];;column_id:10;chunk_idx:57;blob_range:[NO_BLOB:0:9424];;column_id:10;chunk_idx:58;blob_range:[NO_BLOB:0:9424];;column_id:10;chunk_idx:59;blob_range:[NO_BLOB:0:9424];;column_id:10;chunk_idx:60;blob_range:[NO_BLOB:0:9424];;column_id:10;chunk_idx:61;blob_range:[NO_BLOB:0:9424];;column_id:10;chunk_idx:62;blob_range:[NO_BLOB:0:9424];;column_id:10;chunk_idx:63;blob_range:[NO_BLOB:0:9424];;column_id:10;chunk_idx:64;blob_range:[NO_BLOB:0:9424];;column_id:10;chunk_idx:65;blob_range:[NO_BLOB:0:9424];;column_id:10;chunk_idx:66;blob_range:[NO_BLOB:0:9424];;column_id:10;chunk_idx:67;blob_range:[NO_BLOB:0:9424];;column_id:10;chunk_idx:68;blob_range:[NO_BLOB:0:9424];;column_id:10;chunk_idx:69;blob_range:[NO_BLOB:0:9424];;column_id:10;chunk_idx:70;blob_range:[NO_BLOB:0:9424];;column_id:10;chunk_idx:71;blob_range:[NO_BLOB:0:9424];;column_id:10;chunk_idx:72;blob_range:[NO_BLOB:0:9424];;;;switched=(portion_id:215;path_id:1000000185;records_count:75000;schema_version:1;level:1;;column_size:6303784;index_size:0;meta:(()););(portion_id:213;path_id:1000000185;records_count:75000;schema_version:1;level:2;;column_size:6289496;index_size:0;meta:(()););; 2025-09-25T16:20:58.721022Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: event_type=NKikimr::NBlobCache::TEvBlobCache::TEvReadBlobRangeResult;event=on_execution;consumer=GENERAL_COMPACTION;task_id=9ea0c7d2-9a2b11f0-bf3b39ad-31cbbad7;script=FULL_PORTIONS_FETCHING::GENERAL_COMPACTION;event=on_execution;consumer=GENERAL_COMPACTION;task_id=9ea0c7d2-9a2b11f0-bf3b39ad-31cbbad7;script=FULL_PORTIONS_FETCHING::GENERAL_COMPACTION;event=on_finished;consumer=GENERAL_COMPACTION;task_id=9ea0c7d2-9a2b11f0-bf3b39ad-31cbbad7;script=FULL_PORTIONS_FETCHING::GENERAL_COMPACTION;tablet_id=9437184;parent_id=[1:4421:6414];task_id=9ea0c7d2-9a2b11f0-bf3b39ad-31cbbad7;task_class=CS::GENERAL;fline=general_compaction.cpp:140;event=blobs_created;appended=1;switched=2; 2025-09-25T16:20:58.721034Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: event_type=NKikimr::NBlobCache::TEvBlobCache::TEvReadBlobRangeResult;event=on_execution;consumer=GENERAL_COMPACTION;task_id=9ea0c7d2-9a2b11f0-bf3b39ad-31cbbad7;script=FULL_PORTIONS_FETCHING::GENERAL_COMPACTION;event=on_execution;consumer=GENERAL_COMPACTION;task_id=9ea0c7d2-9a2b11f0-bf3b39ad-31cbbad7;script=FULL_PORTIONS_FETCHING::GENERAL_COMPACTION;event=on_finished;consumer=GENERAL_COMPACTION;task_id=9ea0c7d2-9a2b11f0-bf3b39ad-31cbbad7;script=FULL_PORTIONS_FETCHING::GENERAL_COMPACTION;tablet_id=9437184;parent_id=[1:4421:6414];task_id=9ea0c7d2-9a2b11f0-bf3b39ad-31cbbad7;task_class=CS::GENERAL;fline=abstract.cpp:13;event=new_stage;stage=Constructed;task_id=9ea0c7d2-9a2b11f0-bf3b39ad-31cbbad7; 2025-09-25T16:20:58.721759Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:4421:6414];ev=NKikimr::NColumnShard::TEvPrivate::TEvWriteIndex;fline=columnshard__write_index.cpp:52;event=TEvWriteIndex;count=1; 2025-09-25T16:20:58.722538Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:4421:6414];ev=NKikimr::NColumnShard::TEvPrivate::TEvWriteIndex;fline=columnshard__write_index.cpp:59;event=TTxWriteDraft; 2025-09-25T16:20:58.722551Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:4421:6414];ev=NKikimr::NColumnShard::TEvPrivate::TEvWriteIndex;fline=abstract.cpp:13;event=new_stage;stage=WriteDraft;task_id=9ea0c7d2-9a2b11f0-bf3b39ad-31cbbad7; 2025-09-25T16:20:58.884287Z node 1 :TX_COLUMNSHARD_WRITE DEBUG: log.cpp:841: fline=tx_draft.cpp:16;event=draft_completed; 2025-09-25T16:20:58.884334Z node 1 :TX_COLUMNSHARD_WRITE DEBUG: log.cpp:841: fline=write_actor.cpp:24;event=actor_created;tablet_id=9437184;debug=size=6289496;count=682;actions=__MEMORY,__DEFAULT,;waiting=2;; 2025-09-25T16:20:58.974538Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: WriteIndex at tablet 9437184 2025-09-25T16:20:58.974588Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:4421:6414];ev=NKikimr::NColumnShard::TEvPrivate::TEvWriteIndex;fline=common_level.h:121;from=0,0,0,0,;to=74999,74999,74999,74999,; 2025-09-25T16:20:58.974643Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:4421:6414];ev=NKikimr::NColumnShard::TEvPrivate::TEvWriteIndex;fline=common_level.h:141;itFrom=1;itTo=1;raw=7369450;count=1;packed=6303784; 2025-09-25T16:20:58.974665Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:4421:6414];ev=NKikimr::NColumnShard::TEvPrivate::TEvWriteIndex;fline=constructor_meta.cpp:48;memory_size=94;data_size=68;sum=93210;count=1701; 2025-09-25T16:20:58.974671Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:4421:6414];ev=NKikimr::NColumnShard::TEvPrivate::TEvWriteIndex;fline=constructor_meta.cpp:65;memory_size=190;data_size=180;sum=174906;count=1702;size_of_meta=112; 2025-09-25T16:20:58.974683Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:4421:6414];ev=NKikimr::NColumnShard::TEvPrivate::TEvWriteIndex;fline=constructor_portion.cpp:40;memory_size=262;data_size=252;sum=236178;count=851;size_of_portion=184; 2025-09-25T16:20:58.974763Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:4421:6414];ev=NKikimr::NColumnShard::TEvPrivate::TEvWriteIndex;fline=abstract.cpp:13;event=new_stage;stage=Compiled;task_id=9ea0c7d2-9a2b11f0-bf3b39ad-31cbbad7; 2025-09-25T16:20:58.974828Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxWriteIndex[2] (CS::GENERAL) apply at tablet 9437184 2025-09-25T16:20:58.976471Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:4421:6414];ev=NKikimr::NColumnShard::TEvPrivate::TEvWriteIndex;tablet_id=9437184;external_task_id=9ea0c7d2-9a2b11f0-bf3b39ad-31cbbad7;fline=abstract.cpp:13;event=new_stage;stage=Written;task_id=9ea0c7d2-9a2b11f0-bf3b39ad-31cbbad7; 2025-09-25T16:20:58.976914Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: BlobManager on execute at tablet 9437184 Save Batch GenStep: 4:1 Blob count: 536 2025-09-25T16:20:58.977690Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: Index: tables 1 inserted {blob_bytes=0;raw_bytes=0;count=0;records=0} compacted {blob_bytes=0;raw_bytes=0;count=0;records=0} s-compacted {blob_bytes=25223856;raw_bytes=29497600;count=4;records=300200} inactive {blob_bytes=70349968;raw_bytes=70280550;count=211;records=825200} evicted {blob_bytes=0;raw_bytes=0;count=0;records=0} at tablet 9437184 2025-09-25T16:20:59.028556Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;task_id=9ea0c7d2-9a2b11f0-bf3b39ad-31cbbad7;fline=abstract.cpp:13;event=new_stage;stage=Finished;task_id=9ea0c7d2-9a2b11f0-bf3b39ad-31cbbad7; 2025-09-25T16:20:59.028579Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;task_id=9ea0c7d2-9a2b11f0-bf3b39ad-31cbbad7;fline=abstract.cpp:54;event=WriteIndexComplete;type=CS::GENERAL;success=1; 2025-09-25T16:20:59.028592Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;task_id=9ea0c7d2-9a2b11f0-bf3b39ad-31cbbad7;fline=with_appended.cpp:65;portions=216,;task_id=9ea0c7d2-9a2b11f0-bf3b39ad-31cbbad7; 2025-09-25T16:20:59.028694Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;task_id=9ea0c7d2-9a2b11f0-bf3b39ad-31cbbad7;fline=manager.cpp:15;event=unlock;process_id=CS::GENERAL::9ea0c7d2-9a2b11f0-bf3b39ad-31cbbad7; 2025-09-25T16:20:59.028713Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;task_id=9ea0c7d2-9a2b11f0-bf3b39ad-31cbbad7;fline=granule.cpp:97;event=OnCompactionFinished;info=(granule:1000000185;path_id:1000000185;size:18920072;portions_count:216;); 2025-09-25T16:20:59.028723Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;task_id=9ea0c7d2-9a2b11f0-bf3b39ad-31cbbad7;tablet_id=9437184;fline=columnshard_impl.cpp:449;event=EnqueueBackgroundActivities;periodic=0; 2025-09-25T16:20:59.028748Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;task_id=9ea0c7d2-9a2b11f0-bf3b39ad-31cbbad7;tablet_id=9437184;fline=columnshard_impl.cpp:943;background=cleanup_schemas;skip_reason=no_changes; 2025-09-25T16:20:59.028760Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;task_id=9ea0c7d2-9a2b11f0-bf3b39ad-31cbbad7;tablet_id=9437184;fline=column_engine_logs.cpp:258;event=StartCleanup;portions_count=6; 2025-09-25T16:20:59.028779Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;task_id=9ea0c7d2-9a2b11f0-bf3b39ad-31cbbad7;tablet_id=9437184;fline=column_engine_logs.cpp:271;event=StartCleanupStop;snapshot=plan_step=1758815446149;tx_id=18446744073709551615;;current_snapshot_ts=1758817244856; 2025-09-25T16:20:59.028789Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;task_id=9ea0c7d2-9a2b11f0-bf3b39ad-31cbbad7;tablet_id=9437184;fline=column_engine_logs.cpp:334;event=StartCleanup;portions_count=6;portions_prepared=0;drop=0;skip=0;portions_counter=0;chunks=0;limit=0;max_portions=1000;max_chunks=500000; 2025-09-25T16:20:59.028802Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;task_id=9ea0c7d2-9a2b11f0-bf3b39ad-31cbbad7;tablet_id=9437184;fline=columnshard_impl.cpp:800;background=cleanup;skip_reason=no_changes; 2025-09-25T16:20:59.028809Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;task_id=9ea0c7d2-9a2b11f0-bf3b39ad-31cbbad7;tablet_id=9437184;fline=columnshard_impl.cpp:832;background=cleanup;skip_reason=no_changes; 2025-09-25T16:20:59.028850Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;task_id=9ea0c7d2-9a2b11f0-bf3b39ad-31cbbad7;tablet_id=9437184;queue=ttl;external_count=0;fline=granule.cpp:168;event=skip_actualization;waiting=0.909000s; 2025-09-25T16:20:59.028861Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;task_id=9ea0c7d2-9a2b11f0-bf3b39ad-31cbbad7;tablet_id=9437184;fline=columnshard_impl.cpp:755;background=ttl;skip_reason=no_changes; 2025-09-25T16:20:59.028933Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: BlobManager at tablet 9437184 Save Batch GenStep: 4:1 Blob count: 536 >> TRestoreWithRebootsTests::ShouldFailOnOutboundKey[Zstd] [GOOD] >> TColumnShardTestReadWrite::WriteReadModifications [GOOD] >> TColumnShardTestReadWrite::ReadWithProgram [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/columnshard/ut_rw/unittest >> Normalizers::EmptyTablesNormalizer [GOOD] Test command err: 2025-09-25T16:20:54.941446Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];fline=columnshard.cpp:105;event=initialize_shard;step=OnActivateExecutor; 2025-09-25T16:20:54.946182Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];fline=columnshard.cpp:124;event=initialize_shard;step=initialize_tiring_finished; 2025-09-25T16:20:54.946229Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxInitSchema.Execute at tablet 9437184 2025-09-25T16:20:54.946891Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=EmptyPortionsCleaner; 2025-09-25T16:20:54.946940Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=LeakedBlobsNormalizer; 2025-09-25T16:20:54.946951Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=NO_VALUE_OPTIONAL; 2025-09-25T16:20:54.946977Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-09-25T16:20:54.946990Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-09-25T16:20:54.947006Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-09-25T16:20:54.947020Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-09-25T16:20:54.947032Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-09-25T16:20:54.947048Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-09-25T16:20:54.947061Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-09-25T16:20:54.947073Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-09-25T16:20:54.947087Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanDeprecatedSnapshot; 2025-09-25T16:20:54.947126Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV0ChunksMeta; 2025-09-25T16:20:54.947147Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CopyBlobIdsToV2; 2025-09-25T16:20:54.947165Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreAppearanceSnapshot; 2025-09-25T16:20:54.952281Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxInitSchema.Complete at tablet 9437184 2025-09-25T16:20:54.952336Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:131;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=14;current_normalizer=CLASS_NAME=EmptyPortionsCleaner; 2025-09-25T16:20:54.952344Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=NO_VALUE_OPTIONAL;seq_id=NO_VALUE_OPTIONAL;type=NO_VALUE_OPTIONAL; 2025-09-25T16:20:54.952389Z node 1 :TX_COLUMNSHARD CRIT: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_empty.cpp:323;tasks_for_remove=0;distribution=; 2025-09-25T16:20:54.952414Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=EmptyPortionsCleaner;id=NO_VALUE_OPTIONAL; 2025-09-25T16:20:54.952425Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=LeakedBlobsNormalizer;id=NO_VALUE_OPTIONAL; 2025-09-25T16:20:54.952432Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=NO_VALUE_OPTIONAL;seq_id=NO_VALUE_OPTIONAL;type=NO_VALUE_OPTIONAL; 2025-09-25T16:20:54.952481Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TablesLoadingTime=14; 2025-09-25T16:20:54.952489Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetLoadingTime=4; 2025-09-25T16:20:54.952496Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TableVersionssLoadingTime=5; 2025-09-25T16:20:54.952505Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetVersionsLoadingTime=5; 2025-09-25T16:20:54.952516Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=LeakedBlobsNormalizer;id=NO_VALUE_OPTIONAL; 2025-09-25T16:20:54.952524Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Granules;id=Granules; 2025-09-25T16:20:54.952530Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=NO_VALUE_OPTIONAL;seq_id=1;type=Granules; 2025-09-25T16:20:54.952542Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-09-25T16:20:54.952557Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-09-25T16:20:54.952563Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-09-25T16:20:54.952566Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=NO_VALUE_OPTIONAL;seq_id=2;type=Chunks; 2025-09-25T16:20:54.952573Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:138;normalizer=TChunksNormalizer;message=0 chunks found; 2025-09-25T16:20:54.952581Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-09-25T16:20:54.952587Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-09-25T16:20:54.952590Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=NO_VALUE_OPTIONAL;seq_id=4;type=TablesCleaner; 2025-09-25T16:20:54.952602Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-09-25T16:20:54.952608Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-09-25T16:20:54.952613Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-09-25T16:20:54.952617Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=NO_VALUE_OPTIONAL;seq_id=6;type=CleanGranuleId; 2025-09-25T16:20:54.952625Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2025-09-25T16:20:54.952630Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-09-25T16:20:54.952635Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-09-25T16:20:54.952639Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=NO_VALUE_OPTIONAL;seq_id=9;type=GCCountersNormalizer; 2025-09-25T16:20:54.952645Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-09-25T16:20:54.952650Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-09-25T16:20:54.952653Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=NO_VALUE_OPTIONAL;seq_id=11;type=SyncPortionFromChunks; 2025-09-25T16:20:54.952660Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-09-25T16:20:54.952666Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-09-25T16:20:54.952669Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=NO_VALUE_OPTIONAL;seq_id=15;type=RestoreV1Chunks_V2; 2025-09-25T16:20:54.952683Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=ab ... g.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:column_enginesLoadingTime=1; 2025-09-25T16:20:59.702496Z node 2 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;fline=common_data.cpp:29;PRECHARGE:countersLoadingTime=6; 2025-09-25T16:20:59.702514Z node 2 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;fline=common_data.cpp:29;EXECUTE:countersLoadingTime=9; 2025-09-25T16:20:59.702523Z node 2 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;fline=common_data.cpp:29;PRECHARGE:sharding_infoLoadingTime=5; 2025-09-25T16:20:59.702530Z node 2 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;fline=common_data.cpp:29;EXECUTE:sharding_infoLoadingTime=4; 2025-09-25T16:20:59.702534Z node 2 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;fline=common_data.cpp:29;PRECHARGE:finishLoadingTime=1; 2025-09-25T16:20:59.702538Z node 2 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;fline=common_data.cpp:29;EXECUTE:finishLoadingTime=0; 2025-09-25T16:20:59.702541Z node 2 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:column_enginesLoadingTime=53; 2025-09-25T16:20:59.702553Z node 2 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:tx_controllerLoadingTime=7; 2025-09-25T16:20:59.702565Z node 2 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:tx_controllerLoadingTime=8; 2025-09-25T16:20:59.702581Z node 2 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:operations_managerLoadingTime=11; 2025-09-25T16:20:59.702590Z node 2 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:operations_managerLoadingTime=5; 2025-09-25T16:20:59.702616Z node 2 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:storages_managerLoadingTime=22; 2025-09-25T16:20:59.703762Z node 2 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:storages_managerLoadingTime=1139; 2025-09-25T16:20:59.703775Z node 2 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:db_locksLoadingTime=1; 2025-09-25T16:20:59.703781Z node 2 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:db_locksLoadingTime=1; 2025-09-25T16:20:59.703786Z node 2 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:bg_sessionsLoadingTime=1; 2025-09-25T16:20:59.703799Z node 2 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:bg_sessionsLoadingTime=9; 2025-09-25T16:20:59.703805Z node 2 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:sharing_sessionsLoadingTime=1; 2025-09-25T16:20:59.703818Z node 2 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:sharing_sessionsLoadingTime=8; 2025-09-25T16:20:59.703825Z node 2 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:in_flight_readsLoadingTime=0; 2025-09-25T16:20:59.703835Z node 2 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:in_flight_readsLoadingTime=6; 2025-09-25T16:20:59.703844Z node 2 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:tiers_managerLoadingTime=5; 2025-09-25T16:20:59.703851Z node 2 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:tiers_managerLoadingTime=3; 2025-09-25T16:20:59.703854Z node 2 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;fline=common_data.cpp:29;EXECUTE:composite_initLoadingTime=2317; 2025-09-25T16:20:59.703877Z node 2 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: Index: tables 0 inserted {blob_bytes=0;raw_bytes=0;count=0;records=0} compacted {blob_bytes=0;raw_bytes=0;count=0;records=0} s-compacted {blob_bytes=0;raw_bytes=0;count=0;records=0} inactive {blob_bytes=0;raw_bytes=0;count=0;records=0} evicted {blob_bytes=0;raw_bytes=0;count=0;records=0} at tablet 9437184 2025-09-25T16:20:59.703903Z node 2 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[2:268:2277];process=SwitchToWork;fline=columnshard.cpp:77;event=initialize_shard;step=SwitchToWork; 2025-09-25T16:20:59.703912Z node 2 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[2:268:2277];process=SwitchToWork;fline=columnshard.cpp:80;event=initialize_shard;step=SignalTabletActive; 2025-09-25T16:20:59.703926Z node 2 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[2:268:2277];process=SwitchToWork;fline=columnshard_impl.cpp:1528;event=OnTieringModified;path_id=NO_VALUE_OPTIONAL; 2025-09-25T16:20:59.703931Z node 2 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[2:268:2277];process=SwitchToWork;fline=column_engine_logs.cpp:516;event=OnTieringModified;new_count_tierings=0; 2025-09-25T16:20:59.703937Z node 2 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;fline=columnshard_impl.cpp:449;event=EnqueueBackgroundActivities;periodic=0; 2025-09-25T16:20:59.703954Z node 2 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;fline=columnshard_impl.cpp:943;background=cleanup_schemas;skip_reason=no_changes; 2025-09-25T16:20:59.703959Z node 2 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;fline=column_engine_logs.cpp:258;event=StartCleanup;portions_count=0; 2025-09-25T16:20:59.703972Z node 2 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;fline=column_engine_logs.cpp:334;event=StartCleanup;portions_count=0;portions_prepared=0;drop=0;skip=0;portions_counter=0;chunks=0;limit=0;max_portions=1000;max_chunks=500000; 2025-09-25T16:20:59.703983Z node 2 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;fline=columnshard_impl.cpp:800;background=cleanup;skip_reason=no_changes; 2025-09-25T16:20:59.703988Z node 2 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;fline=columnshard_impl.cpp:832;background=cleanup;skip_reason=no_changes; 2025-09-25T16:20:59.704001Z node 2 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;fline=columnshard_impl.cpp:755;background=ttl;skip_reason=no_changes; 2025-09-25T16:20:59.704677Z node 2 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[2:268:2277];ev=NActors::TEvents::TEvWakeup;fline=columnshard.cpp:260;event=TEvPrivate::TEvPeriodicWakeup::MANUAL;tablet_id=9437184; 2025-09-25T16:20:59.704694Z node 2 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[2:268:2277];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;fline=columnshard.cpp:249;event=TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184; 2025-09-25T16:20:59.704700Z node 2 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: Send periodic stats. 2025-09-25T16:20:59.704703Z node 2 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: Disabled periodic stats at tablet 9437184 2025-09-25T16:20:59.704707Z node 2 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[2:268:2277];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:449;event=EnqueueBackgroundActivities;periodic=0; 2025-09-25T16:20:59.704715Z node 2 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[2:268:2277];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:943;background=cleanup_schemas;skip_reason=no_changes; 2025-09-25T16:20:59.704719Z node 2 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[2:268:2277];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:258;event=StartCleanup;portions_count=0; 2025-09-25T16:20:59.704726Z node 2 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[2:268:2277];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:334;event=StartCleanup;portions_count=0;portions_prepared=0;drop=0;skip=0;portions_counter=0;chunks=0;limit=0;max_portions=1000;max_chunks=500000; 2025-09-25T16:20:59.704731Z node 2 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[2:268:2277];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:800;background=cleanup;skip_reason=no_changes; 2025-09-25T16:20:59.704734Z node 2 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[2:268:2277];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:832;background=cleanup;skip_reason=no_changes; 2025-09-25T16:20:59.704742Z node 2 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[2:268:2277];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:755;background=ttl;skip_reason=no_changes; 2025-09-25T16:20:59.754327Z node 2 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: EvScan txId: 111 scanId: 0 version: {1758817258658:111} readable: {1758817258658:max} at tablet 9437184 2025-09-25T16:20:59.754385Z node 2 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TTxScan prepare txId: 111 scanId: 0 at tablet 9437184 2025-09-25T16:20:59.754403Z node 2 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[2:268:2277];ev=NKikimr::TEvDataShard::TEvKqpScan;tx_id=111;scan_id=0;gen=0;table=;snapshot={1758817258658:111};tablet=9437184;timeout=0.000000s;cpu_limits=Disabled;;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=1;result=not_found; 2025-09-25T16:20:59.754423Z node 2 :TX_COLUMNSHARD_SCAN WARN: log.cpp:841: tablet_id=9437184;self_id=[2:268:2277];ev=NKikimr::TEvDataShard::TEvKqpScan;tx_id=111;scan_id=0;gen=0;table=;snapshot={1758817258658:111};tablet=9437184;timeout=0.000000s;cpu_limits=Disabled;;fline=tx_scan.cpp:11;event=TTxScan failed;problem=cannot build table metadata accessor for request: incorrect table name and table id for scan start: undefined::1;details=; FALLBACK_ACTOR_LOGGING;priority=INFO;component=2100;fline=simple_arrays_cache.h:65;event=slice_from_cache;key=uint64;records=0;count=0; FALLBACK_ACTOR_LOGGING;priority=INFO;component=2100;fline=simple_arrays_cache.h:65;event=slice_from_cache;key=uint64;records=0;count=0; FALLBACK_ACTOR_LOGGING;priority=INFO;component=2100;fline=simple_arrays_cache.h:65;event=slice_from_cache;key=string;records=0;count=0; >> Normalizers::ChunksV0MetaNormalizer [GOOD] >> ReadOnlyVDisk::TestDiscover ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/columnshard/ut_rw/unittest >> TColumnShardTestReadWrite::WriteReadModifications [GOOD] Test command err: 2025-09-25T16:20:59.048806Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:129:2160];fline=columnshard.cpp:105;event=initialize_shard;step=OnActivateExecutor; 2025-09-25T16:20:59.054768Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:129:2160];fline=columnshard.cpp:124;event=initialize_shard;step=initialize_tiring_finished; 2025-09-25T16:20:59.054828Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxInitSchema.Execute at tablet 9437184 2025-09-25T16:20:59.055700Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:129:2160];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-09-25T16:20:59.055770Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:129:2160];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-09-25T16:20:59.055816Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:129:2160];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-09-25T16:20:59.055839Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:129:2160];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-09-25T16:20:59.055860Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:129:2160];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-09-25T16:20:59.055889Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:129:2160];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-09-25T16:20:59.055910Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:129:2160];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-09-25T16:20:59.055932Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:129:2160];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-09-25T16:20:59.055951Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:129:2160];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-09-25T16:20:59.055981Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:129:2160];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanDeprecatedSnapshot; 2025-09-25T16:20:59.056013Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:129:2160];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV0ChunksMeta; 2025-09-25T16:20:59.056041Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:129:2160];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CopyBlobIdsToV2; 2025-09-25T16:20:59.056097Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:129:2160];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreAppearanceSnapshot; 2025-09-25T16:20:59.063633Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxInitSchema.Complete at tablet 9437184 2025-09-25T16:20:59.063707Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:131;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=12;current_normalizer=CLASS_NAME=Granules; 2025-09-25T16:20:59.063718Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=1;type=Granules; 2025-09-25T16:20:59.063777Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-09-25T16:20:59.063823Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-09-25T16:20:59.063837Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-09-25T16:20:59.063843Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2025-09-25T16:20:59.063855Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:138;normalizer=TChunksNormalizer;message=0 chunks found; 2025-09-25T16:20:59.063865Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-09-25T16:20:59.063874Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-09-25T16:20:59.063880Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2025-09-25T16:20:59.063902Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-09-25T16:20:59.063912Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-09-25T16:20:59.063920Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-09-25T16:20:59.063926Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2025-09-25T16:20:59.063939Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2025-09-25T16:20:59.063947Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-09-25T16:20:59.063956Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-09-25T16:20:59.063961Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2025-09-25T16:20:59.063971Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-09-25T16:20:59.063980Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-09-25T16:20:59.063985Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2025-09-25T16:20:59.063996Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-09-25T16:20:59.064005Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-09-25T16:20:59.064011Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=15;type=RestoreV1Chunks_V2; 2025-09-25T16:20:59.064041Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-09-25T16:20:59.064050Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-09-25T16:20:59.064055Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=16;type=RestoreV2Chunks; 2025-09-25T16:20:59.064071Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2025-09-25T16:20:59.064080Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanDeprecatedSnapshot;id=CleanDeprecatedSnapshot; 2025-09-25T16:20:59.064085Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=17;type=CleanDeprecatedSnapshot; 2025-09-25T16:20:59.064093Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanDeprecatedSnapshot;id=17; 2025-09-25T16:20:59.064103Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV0ChunksMeta;id=RestoreV0ChunksMeta; 2025-09-25T16:20:59.064108Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=18;type=RestoreV0ChunksMeta; 2025-09-25T16:20:59.064117Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV0ChunksMeta;id=18; 2025-09-25T16:20:59.064125Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CopyBlobIdsToV2;id=CopyBlobIdsToV2; 2025-09-25T16:20:59.064132Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=19;type=CopyBlobIdsToV2; 2025-09-25T16:20:59.064148Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CopyBlobIdsToV2;id=19; 2025-09-25T16:20:59.064157Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLAS ... tifier=;fline=actor.cpp:99;event=TEvTaskProcessedResult; 2025-09-25T16:21:00.444421Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[1:409:2421];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=fetching.cpp:17;event=apply; 2025-09-25T16:21:00.444424Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[1:409:2421];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;sync_point=RESULT;aborted=0;tablet_id=9437184;prepared_source_id=1;fline=abstract.cpp:22;event=OnSourcePrepared;source_id=1;prepared=1; 2025-09-25T16:21:00.444428Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[1:409:2421];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;sync_point=RESULT;aborted=0;tablet_id=9437184;prepared_source_id=1;fline=abstract.cpp:30;event=finish_source;source_id=1; 2025-09-25T16:21:00.444431Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[1:409:2421];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;sync_point=RESULT;aborted=0;tablet_id=9437184;prepared_source_id=1;fline=abstract.cpp:30;event=finish_source;source_id=2; 2025-09-25T16:21:00.444441Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[1:409:2421];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:211;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=2;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=0;SRCS:[{3,14},{4,15},];}};]};SF:0;PR:0;); 2025-09-25T16:21:00.444444Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[1:409:2421];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=plain_read_data.cpp:31;event=DoExtractReadyResults;result=0;count=0;finished=0; 2025-09-25T16:21:00.444447Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[1:409:2421];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:222;stage=limit exhausted;limit=limits:(bytes=0;chunks=0);; 2025-09-25T16:21:00.444450Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[1:409:2421];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;tablet_id=9437184;fline=scanner.cpp:52;event=build_next_interval; 2025-09-25T16:21:00.444456Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[1:409:2421];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:99;event=TEvTaskProcessedResult; 2025-09-25T16:21:00.444458Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[1:409:2421];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=fetching.cpp:17;event=apply; 2025-09-25T16:21:00.444462Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[1:409:2421];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;sync_point=RESULT;aborted=0;tablet_id=9437184;prepared_source_id=3;fline=abstract.cpp:22;event=OnSourcePrepared;source_id=3;prepared=1; 2025-09-25T16:21:00.444465Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[1:409:2421];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;sync_point=RESULT;aborted=0;tablet_id=9437184;prepared_source_id=3;fline=abstract.cpp:30;event=finish_source;source_id=3; 2025-09-25T16:21:00.444468Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[1:409:2421];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;sync_point=RESULT;aborted=0;tablet_id=9437184;prepared_source_id=3;fline=abstract.cpp:30;event=finish_source;source_id=4; 2025-09-25T16:21:00.444477Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[1:409:2421];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:211;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2025-09-25T16:21:00.444484Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[1:409:2421];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:216;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2025-09-25T16:21:00.444513Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[1:409:2421];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:120;event=TEvScanDataAck;info=limits:(bytes=8388608;chunks=1);; 2025-09-25T16:21:00.444522Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[1:409:2421];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:211;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2025-09-25T16:21:00.444529Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[1:409:2421];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:216;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2025-09-25T16:21:00.444534Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: actor.cpp:442: Scan [1:409:2421] finished for tablet 9437184 2025-09-25T16:21:00.444588Z node 1 :TX_COLUMNSHARD_SCAN INFO: log.cpp:841: SelfId=[1:409:2421];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:448;event=scan_finish;compute_actor_id=[1:405:2417];stats={"p":[{"events":["f_bootstrap","l_bootstrap","f_processing","f_ProduceResults","f_task_result"],"t":0},{"events":["f_ack","l_ack","l_processing","l_ProduceResults","f_Finish","l_Finish","l_task_result"],"t":0.002}],"full":{"a":1504165,"name":"_full_task","f":1504165,"d_finished":0,"c":0,"l":1506446,"d":2281},"events":[{"name":"bootstrap","f":1504208,"d_finished":214,"c":1,"l":1504422,"d":214},{"a":1506414,"name":"ack","f":1506414,"d_finished":0,"c":0,"l":1506446,"d":32},{"a":1506412,"name":"processing","f":1504459,"d_finished":541,"c":5,"l":1506389,"d":575},{"name":"ProduceResults","f":1504348,"d_finished":152,"c":7,"l":1506434,"d":152},{"a":1506435,"name":"Finish","f":1506435,"d_finished":0,"c":0,"l":1506446,"d":11},{"name":"task_result","f":1504464,"d_finished":528,"c":5,"l":1506389,"d":528}],"id":"9437184::9"};iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2025-09-25T16:21:00.444596Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[1:409:2421];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:392;event=send_data;compute_actor_id=[1:405:2417];bytes=0;rows=0;faults=0;finished=1;fault=0;schema=; 2025-09-25T16:21:00.444623Z node 1 :TX_COLUMNSHARD_SCAN INFO: log.cpp:841: SelfId=[1:409:2421];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:397;event=scan_finished;compute_actor_id=[1:405:2417];packs_sum=0;bytes=0;bytes_sum=0;rows=0;rows_sum=0;faults=0;finished=1;fault=0;stats={"p":[{"events":["f_bootstrap","l_bootstrap","f_processing","f_ProduceResults","f_task_result"],"t":0},{"events":["f_ack","l_ack","l_processing","l_ProduceResults","f_Finish","l_Finish","l_task_result"],"t":0.002}],"full":{"a":1504165,"name":"_full_task","f":1504165,"d_finished":0,"c":0,"l":1506505,"d":2340},"events":[{"name":"bootstrap","f":1504208,"d_finished":214,"c":1,"l":1504422,"d":214},{"a":1506414,"name":"ack","f":1506414,"d_finished":0,"c":0,"l":1506505,"d":91},{"a":1506412,"name":"processing","f":1504459,"d_finished":541,"c":5,"l":1506389,"d":634},{"name":"ProduceResults","f":1504348,"d_finished":152,"c":7,"l":1506434,"d":152},{"a":1506435,"name":"Finish","f":1506435,"d_finished":0,"c":0,"l":1506505,"d":70},{"name":"task_result","f":1504464,"d_finished":528,"c":5,"l":1506389,"d":528}],"id":"9437184::9"};iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2025-09-25T16:21:00.444634Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[1:409:2421];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=stats.cpp:8;event=statistic;begin=2025-09-25T16:21:00.442148Z;index_granules=0;index_portions=4;index_batches=0;schema_columns=1;filter_columns=0;additional_columns=0;compacted_portions_bytes=0;inserted_portions_bytes=9344;committed_portions_bytes=0;data_filter_bytes=0;data_additional_bytes=0;delta_bytes=9344;selected_rows=0; 2025-09-25T16:21:00.444639Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[1:409:2421];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=read_context.h:196;event=scan_aborted;reason=unexpected on destructor; 2025-09-25T16:21:00.444661Z node 1 :TX_COLUMNSHARD_SCAN INFO: log.cpp:841: SelfId=[1:409:2421];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=context.h:82;fetching=ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;; ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/columnshard/ut_rw/unittest >> TColumnShardTestReadWrite::ReadWithProgram [GOOD] Test command err: 2025-09-25T16:20:59.969791Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:129:2160];fline=columnshard.cpp:105;event=initialize_shard;step=OnActivateExecutor; 2025-09-25T16:20:59.973199Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:129:2160];fline=columnshard.cpp:124;event=initialize_shard;step=initialize_tiring_finished; 2025-09-25T16:20:59.973245Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxInitSchema.Execute at tablet 9437184 2025-09-25T16:20:59.973860Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:129:2160];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-09-25T16:20:59.973910Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:129:2160];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-09-25T16:20:59.973943Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:129:2160];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-09-25T16:20:59.973966Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:129:2160];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-09-25T16:20:59.973984Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:129:2160];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-09-25T16:20:59.974003Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:129:2160];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-09-25T16:20:59.974022Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:129:2160];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-09-25T16:20:59.974041Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:129:2160];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-09-25T16:20:59.974054Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:129:2160];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-09-25T16:20:59.974067Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:129:2160];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanDeprecatedSnapshot; 2025-09-25T16:20:59.974082Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:129:2160];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV0ChunksMeta; 2025-09-25T16:20:59.974095Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:129:2160];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CopyBlobIdsToV2; 2025-09-25T16:20:59.974128Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:129:2160];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreAppearanceSnapshot; 2025-09-25T16:20:59.979521Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxInitSchema.Complete at tablet 9437184 2025-09-25T16:20:59.979598Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:131;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=12;current_normalizer=CLASS_NAME=Granules; 2025-09-25T16:20:59.979607Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=1;type=Granules; 2025-09-25T16:20:59.979646Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-09-25T16:20:59.979675Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-09-25T16:20:59.979688Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-09-25T16:20:59.979695Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2025-09-25T16:20:59.979704Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:138;normalizer=TChunksNormalizer;message=0 chunks found; 2025-09-25T16:20:59.979712Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-09-25T16:20:59.979718Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-09-25T16:20:59.979721Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2025-09-25T16:20:59.979746Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-09-25T16:20:59.979753Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-09-25T16:20:59.979760Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-09-25T16:20:59.979763Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2025-09-25T16:20:59.979771Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2025-09-25T16:20:59.979777Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-09-25T16:20:59.979782Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-09-25T16:20:59.979785Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2025-09-25T16:20:59.979791Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-09-25T16:20:59.979797Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-09-25T16:20:59.979800Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2025-09-25T16:20:59.979807Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-09-25T16:20:59.979813Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-09-25T16:20:59.979816Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=15;type=RestoreV1Chunks_V2; 2025-09-25T16:20:59.979834Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-09-25T16:20:59.979840Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-09-25T16:20:59.979843Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=16;type=RestoreV2Chunks; 2025-09-25T16:20:59.979853Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2025-09-25T16:20:59.979859Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanDeprecatedSnapshot;id=CleanDeprecatedSnapshot; 2025-09-25T16:20:59.979862Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=17;type=CleanDeprecatedSnapshot; 2025-09-25T16:20:59.979867Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanDeprecatedSnapshot;id=17; 2025-09-25T16:20:59.979873Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV0ChunksMeta;id=RestoreV0ChunksMeta; 2025-09-25T16:20:59.979876Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=18;type=RestoreV0ChunksMeta; 2025-09-25T16:20:59.979881Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV0ChunksMeta;id=18; 2025-09-25T16:20:59.979890Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CopyBlobIdsToV2;id=CopyBlobIdsToV2; 2025-09-25T16:20:59.979896Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=19;type=CopyBlobIdsToV2; 2025-09-25T16:20:59.979909Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CopyBlobIdsToV2;id=19; 2025-09-25T16:20:59.979917Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLAS ... 4;source_id=1;tablet_id=9437184;fline=abstract.cpp:30;execute=FetchOriginalData; 2025-09-25T16:21:00.614864Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: source_id=1;tablet_id=9437184;source_id=1;tablet_id=9437184;fline=source.cpp:345;source_id=1; 2025-09-25T16:21:00.614870Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: source_id=1;tablet_id=9437184;source_id=1;tablet_id=9437184;fline=source.cpp:345;source_id=1; 2025-09-25T16:21:00.614897Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: source_id=1;tablet_id=9437184;source_id=1;tablet_id=9437184;fline=script_cursor.cpp:47;scan_step=name=PROGRAM_EXECUTION;details={};;scan_step_idx=3; 2025-09-25T16:21:00.614904Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: source_id=1;tablet_id=9437184;fline=script_cursor.cpp:47;scan_step=name=PROGRAM_EXECUTION;details={};;scan_step_idx=3; 2025-09-25T16:21:00.614919Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: external_task_id=;fline=actor.cpp:48;task=agents_waiting=1;additional_info=();; 2025-09-25T16:21:00.614944Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: event_type=NKikimr::NBlobCache::TEvBlobCache::TEvReadBlobRangeResult;fline=task.cpp:110;event=OnDataReady;task=agents_waiting=0;additional_info=();;external_task_id=; 2025-09-25T16:21:00.614953Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: event_type=NKikimr::NBlobCache::TEvBlobCache::TEvReadBlobRangeResult;source_id=1;tablet_id=9437184;fline=script_cursor.cpp:33;scan_step=name=PROGRAM_EXECUTION;details={};;scan_step_idx=3; 2025-09-25T16:21:00.614958Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: event_type=NKikimr::NBlobCache::TEvBlobCache::TEvReadBlobRangeResult;source_id=1;tablet_id=9437184;fline=abstract.cpp:30;execute=AssembleOriginalData; 2025-09-25T16:21:00.614997Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: event_type=NKikimr::NBlobCache::TEvBlobCache::TEvReadBlobRangeResult;source_id=1;tablet_id=9437184;fline=abstract.cpp:30;execute=AssembleOriginalData; 2025-09-25T16:21:00.615010Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: event_type=NKikimr::NBlobCache::TEvBlobCache::TEvReadBlobRangeResult;source_id=1;tablet_id=9437184;fline=abstract.cpp:30;execute=Calculation; 2025-09-25T16:21:00.615069Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: event_type=NKikimr::NBlobCache::TEvBlobCache::TEvReadBlobRangeResult;source_id=1;tablet_id=9437184;fline=abstract.cpp:30;execute=Filter; 2025-09-25T16:21:00.615084Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: event_type=NKikimr::NBlobCache::TEvBlobCache::TEvReadBlobRangeResult;source_id=1;tablet_id=9437184;fline=script_cursor.cpp:33;scan_step=name=BUILD_STAGE_RESULT;details={};;scan_step_idx=4; 2025-09-25T16:21:00.615101Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: event_type=NKikimr::NBlobCache::TEvBlobCache::TEvReadBlobRangeResult;source_id=1;tablet_id=9437184;fline=script_cursor.cpp:25;event=empty_result;scan_step_idx=5; 2025-09-25T16:21:00.615142Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[1:296:2308];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;fline=actor.cpp:99;event=TEvTaskProcessedResult; 2025-09-25T16:21:00.615149Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[1:296:2308];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;fline=fetching.cpp:17;event=apply; 2025-09-25T16:21:00.615156Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[1:296:2308];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;sync_point=RESULT;aborted=0;tablet_id=9437184;prepared_source_id=1;fline=abstract.cpp:22;event=OnSourcePrepared;source_id=1;prepared=1; 2025-09-25T16:21:00.615161Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[1:296:2308];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;sync_point=RESULT;aborted=0;tablet_id=9437184;prepared_source_id=1;fline=abstract.cpp:30;event=finish_source;source_id=1; 2025-09-25T16:21:00.615183Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[1:296:2308];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;fline=actor.cpp:211;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=1,9;column_names=saved_at,timestamp;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,5,9;column_names=level,saved_at,timestamp;);;program_input=(column_ids=1,5,9;column_names=level,saved_at,timestamp;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2025-09-25T16:21:00.615193Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[1:296:2308];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;fline=actor.cpp:216;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=1,9;column_names=saved_at,timestamp;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,5,9;column_names=level,saved_at,timestamp;);;program_input=(column_ids=1,5,9;column_names=level,saved_at,timestamp;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2025-09-25T16:21:00.615216Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[1:296:2308];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;fline=actor.cpp:120;event=TEvScanDataAck;info=limits:(bytes=8388608;chunks=1);; 2025-09-25T16:21:00.615224Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[1:296:2308];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;fline=actor.cpp:211;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=1,9;column_names=saved_at,timestamp;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,5,9;column_names=level,saved_at,timestamp;);;program_input=(column_ids=1,5,9;column_names=level,saved_at,timestamp;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2025-09-25T16:21:00.615232Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[1:296:2308];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;fline=actor.cpp:216;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=1,9;column_names=saved_at,timestamp;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,5,9;column_names=level,saved_at,timestamp;);;program_input=(column_ids=1,5,9;column_names=level,saved_at,timestamp;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2025-09-25T16:21:00.615237Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: actor.cpp:442: Scan [1:296:2308] finished for tablet 9437184 2025-09-25T16:21:00.615276Z node 1 :TX_COLUMNSHARD_SCAN INFO: log.cpp:841: SelfId=[1:296:2308];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;fline=actor.cpp:448;event=scan_finish;compute_actor_id=[1:295:2307];stats={"p":[{"events":["f_bootstrap","l_bootstrap","f_ack","l_ack","f_processing","l_processing","f_ProduceResults","l_ProduceResults","f_Finish","l_Finish","f_task_result","l_task_result"],"t":0}],"full":{"a":755228,"name":"_full_task","f":755228,"d_finished":0,"c":0,"l":756033,"d":805},"events":[{"name":"bootstrap","f":755270,"d_finished":169,"c":1,"l":755439,"d":169},{"a":756004,"name":"ack","f":756004,"d_finished":0,"c":0,"l":756033,"d":29},{"a":756003,"name":"processing","f":755462,"d_finished":206,"c":2,"l":755986,"d":236},{"name":"ProduceResults","f":755382,"d_finished":100,"c":4,"l":756025,"d":100},{"a":756026,"name":"Finish","f":756026,"d_finished":0,"c":0,"l":756033,"d":7},{"name":"task_result","f":755465,"d_finished":200,"c":2,"l":755986,"d":200}],"id":"9437184::2"};iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=1,9;column_names=saved_at,timestamp;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,5,9;column_names=level,saved_at,timestamp;);;program_input=(column_ids=1,5,9;column_names=level,saved_at,timestamp;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2025-09-25T16:21:00.615283Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[1:296:2308];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;fline=actor.cpp:392;event=send_data;compute_actor_id=[1:295:2307];bytes=0;rows=0;faults=0;finished=1;fault=0;schema=; 2025-09-25T16:21:00.615308Z node 1 :TX_COLUMNSHARD_SCAN INFO: log.cpp:841: SelfId=[1:296:2308];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;fline=actor.cpp:397;event=scan_finished;compute_actor_id=[1:295:2307];packs_sum=0;bytes=0;bytes_sum=0;rows=0;rows_sum=0;faults=0;finished=1;fault=0;stats={"p":[{"events":["f_bootstrap","l_bootstrap","f_ack","l_ack","f_processing","l_processing","f_ProduceResults","l_ProduceResults","f_Finish","l_Finish","f_task_result","l_task_result"],"t":0}],"full":{"a":755228,"name":"_full_task","f":755228,"d_finished":0,"c":0,"l":756077,"d":849},"events":[{"name":"bootstrap","f":755270,"d_finished":169,"c":1,"l":755439,"d":169},{"a":756004,"name":"ack","f":756004,"d_finished":0,"c":0,"l":756077,"d":73},{"a":756003,"name":"processing","f":755462,"d_finished":206,"c":2,"l":755986,"d":280},{"name":"ProduceResults","f":755382,"d_finished":100,"c":4,"l":756025,"d":100},{"a":756026,"name":"Finish","f":756026,"d_finished":0,"c":0,"l":756077,"d":51},{"name":"task_result","f":755465,"d_finished":200,"c":2,"l":755986,"d":200}],"id":"9437184::2"};iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=1,9;column_names=saved_at,timestamp;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,5,9;column_names=level,saved_at,timestamp;);;program_input=(column_ids=1,5,9;column_names=level,saved_at,timestamp;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2025-09-25T16:21:00.615318Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[1:296:2308];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;fline=stats.cpp:8;event=statistic;begin=2025-09-25T16:21:00.614324Z;index_granules=0;index_portions=1;index_batches=0;schema_columns=3;filter_columns=0;additional_columns=0;compacted_portions_bytes=0;inserted_portions_bytes=8392;committed_portions_bytes=0;data_filter_bytes=0;data_additional_bytes=0;delta_bytes=8392;selected_rows=0; 2025-09-25T16:21:00.615321Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[1:296:2308];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;fline=read_context.h:196;event=scan_aborted;reason=unexpected on destructor; 2025-09-25T16:21:00.615333Z node 1 :TX_COLUMNSHARD_SCAN INFO: log.cpp:841: SelfId=[1:296:2308];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;fline=context.h:82;fetching=ef=(column_ids=1,9;column_names=saved_at,timestamp;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,5,9;column_names=level,saved_at,timestamp;);;program_input=(column_ids=1,5,9;column_names=level,saved_at,timestamp;);;; >> TColumnShardTestReadWrite::ReadAggregate+SimpleReader [GOOD] >> ReadOnlyVDisk::TestGetWithMustRestoreFirst >> TColumnShardTestReadWrite::WriteReadDuplicate [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/columnshard/ut_rw/unittest >> TColumnShardTestReadWrite::ReadAggregate+SimpleReader [GOOD] Test command err: FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=8328;columns=19; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=8328;columns=19; 2025-09-25T16:20:57.731696Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:129:2160];fline=columnshard.cpp:105;event=initialize_shard;step=OnActivateExecutor; 2025-09-25T16:20:57.737963Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:129:2160];fline=columnshard.cpp:124;event=initialize_shard;step=initialize_tiring_finished; 2025-09-25T16:20:57.738018Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxInitSchema.Execute at tablet 9437184 2025-09-25T16:20:57.738914Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:129:2160];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-09-25T16:20:57.738971Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:129:2160];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-09-25T16:20:57.739019Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:129:2160];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-09-25T16:20:57.739059Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:129:2160];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-09-25T16:20:57.739080Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:129:2160];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-09-25T16:20:57.739102Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:129:2160];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-09-25T16:20:57.739124Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:129:2160];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-09-25T16:20:57.739146Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:129:2160];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-09-25T16:20:57.739168Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:129:2160];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-09-25T16:20:57.739189Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:129:2160];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanDeprecatedSnapshot; 2025-09-25T16:20:57.739211Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:129:2160];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV0ChunksMeta; 2025-09-25T16:20:57.739232Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:129:2160];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CopyBlobIdsToV2; 2025-09-25T16:20:57.739284Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:129:2160];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreAppearanceSnapshot; 2025-09-25T16:20:57.746435Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxInitSchema.Complete at tablet 9437184 2025-09-25T16:20:57.746501Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:131;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=12;current_normalizer=CLASS_NAME=Granules; 2025-09-25T16:20:57.746508Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=1;type=Granules; 2025-09-25T16:20:57.746541Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-09-25T16:20:57.746571Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-09-25T16:20:57.746581Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-09-25T16:20:57.746586Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2025-09-25T16:20:57.746593Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:138;normalizer=TChunksNormalizer;message=0 chunks found; 2025-09-25T16:20:57.746600Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-09-25T16:20:57.746606Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-09-25T16:20:57.746609Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2025-09-25T16:20:57.746624Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-09-25T16:20:57.746630Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-09-25T16:20:57.746636Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-09-25T16:20:57.746639Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2025-09-25T16:20:57.746647Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2025-09-25T16:20:57.746652Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-09-25T16:20:57.746657Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-09-25T16:20:57.746661Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2025-09-25T16:20:57.746667Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-09-25T16:20:57.746673Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-09-25T16:20:57.746676Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2025-09-25T16:20:57.746684Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-09-25T16:20:57.746690Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-09-25T16:20:57.746693Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=15;type=RestoreV1Chunks_V2; 2025-09-25T16:20:57.746713Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-09-25T16:20:57.746720Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-09-25T16:20:57.746723Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=16;type=RestoreV2Chunks; 2025-09-25T16:20:57.746733Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2025-09-25T16:20:57.746739Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanDeprecatedSnapshot;id=CleanDeprecatedSnapshot; 2025-09-25T16:20:57.746742Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=17;type=CleanDeprecatedSnapshot; 2025-09-25T16:20:57.746749Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanDeprecatedSnapshot;id=17; 2025-09-25T16:20:57.746754Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV0ChunksMeta;id=RestoreV0ChunksMeta; 2025-09-25T16:20:57.746757Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=18;type=RestoreV0ChunksMeta; 2025-09-25T16:20:57.746764Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV0ChunksMeta;id=18; 2025-09-25T16:20:57.746769Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CopyBlobIdsToV2;id=CopyBlobIdsToV2; 2025-09-25T16:20:57.746774Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=19;type=CopyBlobIdsToV2; 2025-09-25T16:20:57.746784Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;descriptio ... mn_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=i16,i32,i8,ts;);;ff=(column_ids=4,19;column_names=i32,jsondoc;);;program_input=(column_ids=4,19;column_names=i32,jsondoc;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:1;); 2025-09-25T16:21:00.795914Z node 2 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[2:464:2476];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;fline=plain_read_data.cpp:31;event=DoExtractReadyResults;result=1;count=1;finished=1; 2025-09-25T16:21:00.795921Z node 2 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[2:464:2476];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;fline=actor.cpp:222;stage=limit exhausted;limit=limits:(bytes=0;chunks=0);; 2025-09-25T16:21:00.795927Z node 2 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[2:464:2476];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;tablet_id=9437184;fline=scanner.cpp:52;event=build_next_interval; 2025-09-25T16:21:00.795989Z node 2 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[2:464:2476];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;fline=actor.cpp:120;event=TEvScanDataAck;info=limits:(bytes=8388608;chunks=1);; 2025-09-25T16:21:00.796009Z node 2 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[2:464:2476];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;fline=actor.cpp:211;stage=start;iterator=ready_results:(count:1;records_count:1;schema=100: binary 101: binary 102: binary 103: uint64;);indexed_data:(CTX:{ef=(column_ids=4;column_names=i32;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=i16,i32,i8,ts;);;ff=(column_ids=4,19;column_names=i32,jsondoc;);;program_input=(column_ids=4,19;column_names=i32,jsondoc;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2025-09-25T16:21:00.796014Z node 2 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[2:464:2476];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;fline=plain_read_data.cpp:31;event=DoExtractReadyResults;result=0;count=0;finished=1; 2025-09-25T16:21:00.796029Z node 2 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[2:464:2476];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;fline=actor.cpp:253;stage=ready result;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=4;column_names=i32;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=i16,i32,i8,ts;);;ff=(column_ids=4,19;column_names=i32,jsondoc;);;program_input=(column_ids=4,19;column_names=i32,jsondoc;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;);columns=4;rows=1; 2025-09-25T16:21:00.796038Z node 2 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[2:464:2476];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;fline=actor.cpp:274;stage=data_format;batch_size=0;num_rows=1;batch_columns=100,101,102,103; 2025-09-25T16:21:00.796073Z node 2 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[2:464:2476];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;fline=actor.cpp:392;event=send_data;compute_actor_id=[2:463:2475];bytes=26;rows=1;faults=0;finished=0;fault=0;schema=100: binary 101: binary 102: binary 103: uint64; 2025-09-25T16:21:00.796086Z node 2 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[2:464:2476];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;fline=actor.cpp:296;stage=finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=4;column_names=i32;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=i16,i32,i8,ts;);;ff=(column_ids=4,19;column_names=i32,jsondoc;);;program_input=(column_ids=4,19;column_names=i32,jsondoc;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2025-09-25T16:21:00.796100Z node 2 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[2:464:2476];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;fline=actor.cpp:211;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=4;column_names=i32;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=i16,i32,i8,ts;);;ff=(column_ids=4,19;column_names=i32,jsondoc;);;program_input=(column_ids=4,19;column_names=i32,jsondoc;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2025-09-25T16:21:00.796113Z node 2 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[2:464:2476];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;fline=actor.cpp:216;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=4;column_names=i32;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=i16,i32,i8,ts;);;ff=(column_ids=4,19;column_names=i32,jsondoc;);;program_input=(column_ids=4,19;column_names=i32,jsondoc;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2025-09-25T16:21:00.796140Z node 2 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[2:464:2476];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;fline=actor.cpp:120;event=TEvScanDataAck;info=limits:(bytes=8388608;chunks=1);; 2025-09-25T16:21:00.796152Z node 2 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[2:464:2476];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;fline=actor.cpp:211;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=4;column_names=i32;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=i16,i32,i8,ts;);;ff=(column_ids=4,19;column_names=i32,jsondoc;);;program_input=(column_ids=4,19;column_names=i32,jsondoc;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2025-09-25T16:21:00.796162Z node 2 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[2:464:2476];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;fline=actor.cpp:216;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=4;column_names=i32;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=i16,i32,i8,ts;);;ff=(column_ids=4,19;column_names=i32,jsondoc;);;program_input=(column_ids=4,19;column_names=i32,jsondoc;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2025-09-25T16:21:00.796167Z node 2 :TX_COLUMNSHARD_SCAN DEBUG: actor.cpp:442: Scan [2:464:2476] finished for tablet 9437184 2025-09-25T16:21:00.796220Z node 2 :TX_COLUMNSHARD_SCAN INFO: log.cpp:841: SelfId=[2:464:2476];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;fline=actor.cpp:448;event=scan_finish;compute_actor_id=[2:463:2475];stats={"p":[{"events":["f_bootstrap","l_bootstrap","f_processing","f_ProduceResults","f_task_result"],"t":0},{"events":["f_ack","l_ack","l_processing","l_ProduceResults","f_Finish","l_Finish","l_task_result"],"t":0.001}],"full":{"a":3178925,"name":"_full_task","f":3178925,"d_finished":0,"c":0,"l":3180224,"d":1299},"events":[{"name":"bootstrap","f":3178971,"d_finished":165,"c":1,"l":3179136,"d":165},{"a":3180188,"name":"ack","f":3180037,"d_finished":130,"c":1,"l":3180167,"d":166},{"a":3180186,"name":"processing","f":3179165,"d_finished":337,"c":3,"l":3180168,"d":375},{"name":"ProduceResults","f":3179075,"d_finished":231,"c":6,"l":3180215,"d":231},{"a":3180216,"name":"Finish","f":3180216,"d_finished":0,"c":0,"l":3180224,"d":8},{"name":"task_result","f":3179168,"d_finished":197,"c":2,"l":3179981,"d":197}],"id":"9437184::76"};iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=4;column_names=i32;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=i16,i32,i8,ts;);;ff=(column_ids=4,19;column_names=i32,jsondoc;);;program_input=(column_ids=4,19;column_names=i32,jsondoc;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2025-09-25T16:21:00.796232Z node 2 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[2:464:2476];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;fline=actor.cpp:392;event=send_data;compute_actor_id=[2:463:2475];bytes=0;rows=0;faults=0;finished=1;fault=0;schema=; 2025-09-25T16:21:00.796272Z node 2 :TX_COLUMNSHARD_SCAN INFO: log.cpp:841: SelfId=[2:464:2476];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;fline=actor.cpp:397;event=scan_finished;compute_actor_id=[2:463:2475];packs_sum=0;bytes=0;bytes_sum=0;rows=0;rows_sum=0;faults=0;finished=1;fault=0;stats={"p":[{"events":["f_bootstrap","l_bootstrap","f_processing","f_ProduceResults","f_task_result"],"t":0},{"events":["f_ack","l_ack","l_processing","l_ProduceResults","f_Finish","l_Finish","l_task_result"],"t":0.001}],"full":{"a":3178925,"name":"_full_task","f":3178925,"d_finished":0,"c":0,"l":3180288,"d":1363},"events":[{"name":"bootstrap","f":3178971,"d_finished":165,"c":1,"l":3179136,"d":165},{"a":3180188,"name":"ack","f":3180037,"d_finished":130,"c":1,"l":3180167,"d":230},{"a":3180186,"name":"processing","f":3179165,"d_finished":337,"c":3,"l":3180168,"d":439},{"name":"ProduceResults","f":3179075,"d_finished":231,"c":6,"l":3180215,"d":231},{"a":3180216,"name":"Finish","f":3180216,"d_finished":0,"c":0,"l":3180288,"d":72},{"name":"task_result","f":3179168,"d_finished":197,"c":2,"l":3179981,"d":197}],"id":"9437184::76"};iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=4;column_names=i32;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=i16,i32,i8,ts;);;ff=(column_ids=4,19;column_names=i32,jsondoc;);;program_input=(column_ids=4,19;column_names=i32,jsondoc;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2025-09-25T16:21:00.796285Z node 2 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[2:464:2476];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;fline=stats.cpp:8;event=statistic;begin=2025-09-25T16:21:00.794761Z;index_granules=0;index_portions=1;index_batches=0;schema_columns=2;filter_columns=0;additional_columns=0;compacted_portions_bytes=0;inserted_portions_bytes=14056;committed_portions_bytes=0;data_filter_bytes=0;data_additional_bytes=0;delta_bytes=14056;selected_rows=0; 2025-09-25T16:21:00.796290Z node 2 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[2:464:2476];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;fline=read_context.h:196;event=scan_aborted;reason=unexpected on destructor; 2025-09-25T16:21:00.796938Z node 2 :TX_COLUMNSHARD_SCAN INFO: log.cpp:841: SelfId=[2:464:2476];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;fline=context.h:82;fetching=ef=(column_ids=4;column_names=i32;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=i16,i32,i8,ts;);;ff=(column_ids=4,19;column_names=i32,jsondoc;);;program_input=(column_ids=4,19;column_names=i32,jsondoc;);;; >> ReadOnlyVDisk::TestGetWithMustRestoreFirst [GOOD] >> ReadOnlyVDisk::TestDiscover [GOOD] >> ReadOnlyVDisk::TestSync ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_restore/unittest >> TRestoreWithRebootsTests::ShouldFailOnOutboundKey[Zstd] [GOOD] Test command err: ==== RunWithTabletReboots =========== RUN: Trace =========== Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2144] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2144] Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:120:2058] recipient: [1:114:2145] IGNORE Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:120:2058] recipient: [1:114:2145] Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:121:2058] recipient: [1:116:2146] IGNORE Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:121:2058] recipient: [1:116:2146] Leader for TabletID 72057594046678944 is [1:128:2153] sender: [1:131:2058] recipient: [1:113:2144] Leader for TabletID 72057594046447617 is [1:134:2158] sender: [1:136:2058] recipient: [1:114:2145] Leader for TabletID 72057594046316545 is [1:139:2161] sender: [1:141:2058] recipient: [1:116:2146] 2025-09-25T16:19:51.282679Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7911: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-09-25T16:19:51.282697Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7939: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-09-25T16:19:51.282701Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7825: StatsBatching config: StatsBatchTimeout# 0.000000s, StatsMaxBatchSize# 0, StatsMaxExecuteTime# 0.010000s 2025-09-25T16:19:51.282705Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7841: OperationsProcessing config: using default configuration 2025-09-25T16:19:51.282709Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7847: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-09-25T16:19:51.282712Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7847: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-09-25T16:19:51.282718Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7971: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-09-25T16:19:51.282729Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-09-25T16:19:51.282814Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8042: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-09-25T16:19:51.282869Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2025-09-25T16:19:51.299725Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:8074: Got new config: QueryServiceConfig { AllExternalDataSourcesAreAvailable: true } 2025-09-25T16:19:51.299758Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded 2025-09-25T16:19:51.299831Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8042: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# Leader for TabletID 72057594046447617 is [1:134:2158] sender: [1:179:2058] recipient: [1:15:2062] 2025-09-25T16:19:51.303425Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2025-09-25T16:19:51.303489Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2025-09-25T16:19:51.303519Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-09-25T16:19:51.304811Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2025-09-25T16:19:51.304920Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2025-09-25T16:19:51.305025Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1382: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-09-25T16:19:51.305264Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-09-25T16:19:51.306507Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:158: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-09-25T16:19:51.306559Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2025-09-25T16:19:51.306846Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-09-25T16:19:51.306859Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-09-25T16:19:51.306880Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-09-25T16:19:51.306888Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-09-25T16:19:51.306895Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:205: TTxServerlessStorageBilling.Complete 2025-09-25T16:19:51.306935Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7086: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:221:2058] recipient: [1:219:2219] IGNORE Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:221:2058] recipient: [1:219:2219] Leader for TabletID 72057594037968897 is [1:225:2223] sender: [1:226:2058] recipient: [1:219:2219] 2025-09-25T16:19:51.308429Z node 1 :HIVE INFO: tablet_helpers.cpp:1126: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:128:2153] sender: [1:246:2058] recipient: [1:15:2062] 2025-09-25T16:19:51.332865Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-09-25T16:19:51.332928Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-09-25T16:19:51.332976Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:599: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-09-25T16:19:51.332983Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5528: CreateTx for txid 1:0 type: TxAlterSubDomain target path: [OwnerId: 72057594046678944, LocalPathId: 1] source path: 2025-09-25T16:19:51.333036Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-09-25T16:19:51.333087Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2025-09-25T16:19:51.333717Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-09-25T16:19:51.333757Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-09-25T16:19:51.333805Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-09-25T16:19:51.333814Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:315: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-09-25T16:19:51.333819Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:369: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-09-25T16:19:51.333824Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 1:0 2 -> 3 2025-09-25T16:19:51.334234Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-09-25T16:19:51.334247Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-09-25T16:19:51.334252Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 1:0 3 -> 128 2025-09-25T16:19:51.334597Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-09-25T16:19:51.334608Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-09-25T16:19:51.334614Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-09-25T16:19:51.334620Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1723: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-09-25T16:19:51.335340Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1792: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-09-25T16:19:51.335785Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:663: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-09-25T16:19:51.335836Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1824: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 Leader for TabletID 72057594046316545 is [1:139:2161] sender: [1:261:2058] recipient: [1:15:2062] FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-09-25T16:19:51.336036Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:685: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-09-25T16:19:51.336061Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:689: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 139 RawX2: 4294969457 } } Step: 5000001 MediatorID: 0 Tab ... hemeshard, txId 1003 2025-09-25T16:21:00.406454Z node 211 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__notify.cpp:30: NotifyTxCompletion operation in-flight, txId: 1003, at schemeshard: 72057594046678944 2025-09-25T16:21:00.406459Z node 211 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 1003, ready parts: 0/1, is published: true 2025-09-25T16:21:00.406462Z node 211 :FLAT_TX_SCHEMESHARD INFO: schemeshard__notify.cpp:131: NotifyTxCompletion transaction is registered, txId: 1003, at schemeshard: 72057594046678944 REQUEST: GET /data_00.csv.zst HTTP/1.1 HEADERS: Host: localhost:13400 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: 2CCA2B1B-637C-479C-89FF-E508DD60972A amz-sdk-request: attempt=1 content-type: application/xml range: bytes=0-22 user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-143-generic x86_64 Clang/20.1.8 x-amz-api-version: 2006-03-01 S3_MOCK::HttpServeRead: /data_00.csv.zst / 23 REQUEST: GET /data_01.csv HTTP/1.1 HEADERS: Host: localhost:13400 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: 8B188CCD-F551-4AD0-AF64-06C32CD283A8 amz-sdk-request: attempt=1 content-type: application/xml range: bytes=0-22 user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-143-generic x86_64 Clang/20.1.8 x-amz-api-version: 2006-03-01 S3_MOCK::HttpServeRead: /data_01.csv / 23 2025-09-25T16:21:00.406877Z node 211 :DATASHARD_RESTORE DEBUG: import_s3.cpp:656: [Import] [s3:1003] Handle NKikimr::NWrappers::NExternalStorage::TEvGetObjectResponse { Key: null Result: 7443c2f403aa74cff1f199511bd22374 Body: 23b } 2025-09-25T16:21:00.406892Z node 211 :DATASHARD_RESTORE TRACE: import_s3.cpp:673: [Import] [s3:1003] Content size: processed-bytes# 0, content-length# 23, body-size# 23 2025-09-25T16:21:00.406932Z node 211 :DATASHARD_RESTORE INFO: import_s3.cpp:806: [Import] [s3:1003] Upload rows: count# 1, size# 34 2025-09-25T16:21:00.406945Z node 211 :DATASHARD_RESTORE DEBUG: import_s3.cpp:656: [Import] [s3:1003] Handle NKikimr::NWrappers::NExternalStorage::TEvGetObjectResponse { Key: null Result: 7443c2f403aa74cff1f199511bd22374 Body: 23b } 2025-09-25T16:21:00.406949Z node 211 :DATASHARD_RESTORE TRACE: import_s3.cpp:673: [Import] [s3:1003] Content size: processed-bytes# 0, content-length# 23, body-size# 23 2025-09-25T16:21:00.406962Z node 211 :DATASHARD_RESTORE NOTICE: import_s3.cpp:962: [Import] [s3:1003] Finish: success# 0, error# Value parse error: '(/ q"a1"' 12TBasicStringIcNSt4__y111char_traitsIcEEE is expected. on line: (/ q"a1","value1", writtenBytes# 0, writtenRows# 0 2025-09-25T16:21:00.406973Z node 211 :DATASHARD_RESTORE INFO: import_s3.cpp:806: [Import] [s3:1003] Upload rows: count# 0, size# 8 2025-09-25T16:21:00.409802Z node 211 :DATASHARD_RESTORE DEBUG: import_s3.cpp:814: [Import] [s3:1003] Handle NKikimr::TEvDataShard::TEvS3UploadRowsResponse { Record: TabletID: 72075186233409546 Status: 0 Info: { DataETag: 7443c2f403aa74cff1f199511bd22374 ProcessedBytes: 23 WrittenBytes: 8 WrittenRows: 1 ChecksumState: DownloadState: } } 2025-09-25T16:21:00.409819Z node 211 :DATASHARD_RESTORE NOTICE: import_s3.cpp:621: [Import] [s3:1003] Process download info at 'UploadResponse': info# { DataETag: 7443c2f403aa74cff1f199511bd22374 ProcessedBytes: 23 WrittenBytes: 8 WrittenRows: 1 ChecksumState: DownloadState: } 2025-09-25T16:21:00.409825Z node 211 :DATASHARD_RESTORE NOTICE: import_s3.cpp:962: [Import] [s3:1003] Finish: success# 1, error# , writtenBytes# 8, writtenRows# 1 2025-09-25T16:21:00.414453Z node 211 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5901: Handle TEvSchemaChanged, tabletId: 72057594046678944, at schemeshard: 72057594046678944, message: Source { RawX1: 353 RawX2: 906238101790 } Origin: 72075186233409547 State: 2 TxId: 1003 Step: 0 Generation: 2 OpResult { Success: false Explain: "Value parse error: \'(\265/\375 \016q\000\000\"a1\"\' 12TBasicStringIcNSt4__y111char_traitsIcEEE is expected. on line: (\265/\375 \016q\000\000\"a1\",\"value1\"" BytesProcessed: 0 RowsProcessed: 0 } 2025-09-25T16:21:00.414480Z node 211 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1837: TOperation FindRelatedPartByTabletId, TxId: 1003, tablet: 72075186233409547, partId: 0 2025-09-25T16:21:00.414508Z node 211 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:628: TTxOperationReply execute, operationId: 1003:0, at schemeshard: 72057594046678944, message: Source { RawX1: 353 RawX2: 906238101790 } Origin: 72075186233409547 State: 2 TxId: 1003 Step: 0 Generation: 2 OpResult { Success: false Explain: "Value parse error: \'(\265/\375 \016q\000\000\"a1\"\' 12TBasicStringIcNSt4__y111char_traitsIcEEE is expected. on line: (\265/\375 \016q\000\000\"a1\",\"value1\"" BytesProcessed: 0 RowsProcessed: 0 } 2025-09-25T16:21:00.414525Z node 211 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_backup_restore_common.h:233: TRestore TProposedWaitParts, opId: 1003:0 HandleReply TEvSchemaChanged at tablet# 72057594046678944 message# Source { RawX1: 353 RawX2: 906238101790 } Origin: 72075186233409547 State: 2 TxId: 1003 Step: 0 Generation: 2 OpResult { Success: false Explain: "Value parse error: \'(\265/\375 \016q\000\000\"a1\"\' 12TBasicStringIcNSt4__y111char_traitsIcEEE is expected. on line: (\265/\375 \016q\000\000\"a1\",\"value1\"" BytesProcessed: 0 RowsProcessed: 0 } 2025-09-25T16:21:00.414544Z node 211 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:673: CollectSchemaChanged accept TEvDataShard::TEvSchemaChanged, operationId: 1003:0, shardIdx: 72057594046678944:2, shard: 72075186233409547, left await: 1, txState.State: ProposedWaitParts, txState.ReadyForNotifications: 1, at schemeshard: 72057594046678944 2025-09-25T16:21:00.414580Z node 211 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_backup_restore_common.h:116: Unable to make a bill: kind# TRestore, opId# 1003:0, reason# domain is not a serverless db, domain# /MyRoot, domainPathId# [OwnerId: 72057594046678944, LocalPathId: 1], IsDomainSchemeShard: 1, ParentDomainId: [OwnerId: 72057594046678944, LocalPathId: 1], ResourcesDomainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-09-25T16:21:00.415475Z node 211 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5901: Handle TEvSchemaChanged, tabletId: 72057594046678944, at schemeshard: 72057594046678944, message: Source { RawX1: 351 RawX2: 906238101789 } Origin: 72075186233409546 State: 2 TxId: 1003 Step: 0 Generation: 2 OpResult { Success: true Explain: "" BytesProcessed: 8 RowsProcessed: 1 } 2025-09-25T16:21:00.415493Z node 211 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1837: TOperation FindRelatedPartByTabletId, TxId: 1003, tablet: 72075186233409546, partId: 0 2025-09-25T16:21:00.415514Z node 211 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:628: TTxOperationReply execute, operationId: 1003:0, at schemeshard: 72057594046678944, message: Source { RawX1: 351 RawX2: 906238101789 } Origin: 72075186233409546 State: 2 TxId: 1003 Step: 0 Generation: 2 OpResult { Success: true Explain: "" BytesProcessed: 8 RowsProcessed: 1 } 2025-09-25T16:21:00.415530Z node 211 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_backup_restore_common.h:233: TRestore TProposedWaitParts, opId: 1003:0 HandleReply TEvSchemaChanged at tablet# 72057594046678944 message# Source { RawX1: 351 RawX2: 906238101789 } Origin: 72075186233409546 State: 2 TxId: 1003 Step: 0 Generation: 2 OpResult { Success: true Explain: "" BytesProcessed: 8 RowsProcessed: 1 } 2025-09-25T16:21:00.415542Z node 211 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:673: CollectSchemaChanged accept TEvDataShard::TEvSchemaChanged, operationId: 1003:0, shardIdx: 72057594046678944:1, shard: 72075186233409546, left await: 0, txState.State: ProposedWaitParts, txState.ReadyForNotifications: 1, at schemeshard: 72057594046678944 2025-09-25T16:21:00.415548Z node 211 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:710: all shard schema changes has been received, operationId: 1003:0, at schemeshard: 72057594046678944 2025-09-25T16:21:00.415555Z node 211 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:722: send schema changes ack message, operation: 1003:0, datashard: 72075186233409546, at schemeshard: 72057594046678944 2025-09-25T16:21:00.415561Z node 211 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:722: send schema changes ack message, operation: 1003:0, datashard: 72075186233409547, at schemeshard: 72057594046678944 2025-09-25T16:21:00.415567Z node 211 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 1003:0 129 -> 240 2025-09-25T16:21:00.415599Z node 211 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_backup_restore_common.h:116: Unable to make a bill: kind# TRestore, opId# 1003:0, reason# domain is not a serverless db, domain# /MyRoot, domainPathId# [OwnerId: 72057594046678944, LocalPathId: 1], IsDomainSchemeShard: 1, ParentDomainId: [OwnerId: 72057594046678944, LocalPathId: 1], ResourcesDomainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-09-25T16:21:00.415703Z node 211 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 1003:0, at schemeshard: 72057594046678944 2025-09-25T16:21:00.416109Z node 211 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 1003:0, at schemeshard: 72057594046678944 2025-09-25T16:21:00.416230Z node 211 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1003:0, at schemeshard: 72057594046678944 2025-09-25T16:21:00.416241Z node 211 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:497: [72057594046678944] TDone opId# 1003:0 ProgressState 2025-09-25T16:21:00.416259Z node 211 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:926: Part operation is done id#1003:0 progress is 1/1 2025-09-25T16:21:00.416264Z node 211 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 1003 ready parts: 1/1 2025-09-25T16:21:00.416270Z node 211 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:926: Part operation is done id#1003:0 progress is 1/1 2025-09-25T16:21:00.416275Z node 211 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 1003 ready parts: 1/1 2025-09-25T16:21:00.416280Z node 211 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 1003, ready parts: 1/1, is published: true 2025-09-25T16:21:00.416294Z node 211 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1702: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [211:470:2431] message: TxId: 1003 2025-09-25T16:21:00.416303Z node 211 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 1003 ready parts: 1/1 2025-09-25T16:21:00.416310Z node 211 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:993: Operation and all the parts is done, operation id: 1003:0 2025-09-25T16:21:00.416316Z node 211 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5552: RemoveTx for txid 1003:0 2025-09-25T16:21:00.416346Z node 211 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:610: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 4 2025-09-25T16:21:00.417175Z node 211 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 1003: got EvNotifyTxCompletionResult 2025-09-25T16:21:00.417188Z node 211 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 1003: satisfy waiter [211:517:2476] TestWaitNotification: OK eventTxId 1003 >> ReadOnlyVDisk::TestStorageLoad ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/ut_blobstorage/ut_read_only_vdisk/unittest >> ReadOnlyVDisk::TestGetWithMustRestoreFirst [GOOD] Test command err: RandomSeed# 4821817965225680316 === Trying to put and get a blob === SEND TEvPut with key [1:1:0:0:0:131072:0] TEvPutResult: TEvPutResult {Id# [1:1:0:0:0:131072:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999988} === Read all 1 blob(s) === SEND TEvGet with key [1:1:0:0:0:131072:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:0:0:0:131072:0] OK Size# 131072 RequestedSize# 131072}} === Putting VDisk #0 to read-only === Setting VDisk read-only to 1 for position 0 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:0:0] === Write 10 blobs, expect some VDisks refuse parts but writes go through === SEND TEvPut with key [1:1:1:0:0:32768:0] 2025-09-25T16:21:01.334695Z 1 00h01m30.060512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) Unavailable in read-only Sender# [1:5333:706] TEvPutResult: TEvPutResult {Id# [1:1:1:0:0:32768:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999988} SEND TEvPut with key [1:1:2:0:0:131072:0] 2025-09-25T16:21:01.337701Z 1 00h01m30.060512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) Unavailable in read-only Sender# [1:5333:706] TEvPutResult: TEvPutResult {Id# [1:1:2:0:0:131072:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999988} SEND TEvPut with key [1:1:3:0:0:32768:0] 2025-09-25T16:21:01.340669Z 1 00h01m30.060512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) Unavailable in read-only Sender# [1:5333:706] TEvPutResult: TEvPutResult {Id# [1:1:3:0:0:32768:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999988} SEND TEvPut with key [1:1:4:0:0:131072:0] 2025-09-25T16:21:01.341391Z 1 00h01m30.060512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) Unavailable in read-only Sender# [1:5333:706] TEvPutResult: TEvPutResult {Id# [1:1:4:0:0:131072:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999988} SEND TEvPut with key [1:1:5:0:0:32768:0] TEvPutResult: TEvPutResult {Id# [1:1:5:0:0:32768:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999988} SEND TEvPut with key [1:1:6:0:0:131072:0] TEvPutResult: TEvPutResult {Id# [1:1:6:0:0:131072:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999988} SEND TEvPut with key [1:1:7:0:0:32768:0] 2025-09-25T16:21:01.343251Z 1 00h01m30.060512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) Unavailable in read-only Sender# [1:5333:706] TEvPutResult: TEvPutResult {Id# [1:1:7:0:0:32768:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999988} SEND TEvPut with key [1:1:8:0:0:131072:0] 2025-09-25T16:21:01.343855Z 1 00h01m30.060512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) Unavailable in read-only Sender# [1:5333:706] TEvPutResult: TEvPutResult {Id# [1:1:8:0:0:131072:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999988} SEND TEvPut with key [1:1:9:0:0:32768:0] 2025-09-25T16:21:01.344527Z 1 00h01m30.060512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) Unavailable in read-only Sender# [1:5333:706] TEvPutResult: TEvPutResult {Id# [1:1:9:0:0:32768:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999988} SEND TEvPut with key [1:1:10:0:0:131072:0] 2025-09-25T16:21:01.345097Z 1 00h01m30.060512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) Unavailable in read-only Sender# [1:5333:706] TEvPutResult: TEvPutResult {Id# [1:1:10:0:0:131072:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999988} === Read all 11 blob(s) === SEND TEvGet with key [1:1:0:0:0:131072:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:0:0:0:131072:0] OK Size# 131072 RequestedSize# 131072}} SEND TEvGet with key [1:1:1:0:0:32768:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:1:0:0:32768:0] OK Size# 32768 RequestedSize# 32768}} SEND TEvGet with key [1:1:2:0:0:131072:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:2:0:0:131072:0] OK Size# 131072 RequestedSize# 131072}} SEND TEvGet with key [1:1:3:0:0:32768:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:3:0:0:32768:0] OK Size# 32768 RequestedSize# 32768}} SEND TEvGet with key [1:1:4:0:0:131072:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:4:0:0:131072:0] OK Size# 131072 RequestedSize# 131072}} SEND TEvGet with key [1:1:5:0:0:32768:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:5:0:0:32768:0] OK Size# 32768 RequestedSize# 32768}} SEND TEvGet with key [1:1:6:0:0:131072:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:6:0:0:131072:0] OK Size# 131072 RequestedSize# 131072}} SEND TEvGet with key [1:1:7:0:0:32768:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:7:0:0:32768:0] OK Size# 32768 RequestedSize# 32768}} SEND TEvGet with key [1:1:8:0:0:131072:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:8:0:0:131072:0] OK Size# 131072 RequestedSize# 131072}} SEND TEvGet with key [1:1:9:0:0:32768:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:9:0:0:32768:0] OK Size# 32768 RequestedSize# 32768}} SEND TEvGet with key [1:1:10:0:0:131072:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:10:0:0:131072:0] OK Size# 131072 RequestedSize# 131072}} === Put 2 more VDisks to read-only === Setting VDisk read-only to 1 for position 1 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:1:0] Setting VDisk read-only to 1 for position 2 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:2:0] === Write 10 more blobs, expect errors === SEND TEvPut with key [1:1:11:0:0:32768:0] 2025-09-25T16:21:01.716022Z 1 00h05m30.160512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) Unavailable in read-only Sender# [1:5333:706] 2025-09-25T16:21:01.716070Z 3 00h05m30.160512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:2:0]: (2181038080) Unavailable in read-only Sender# [1:5347:720] 2025-09-25T16:21:01.716101Z 2 00h05m30.160512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:1:0]: (2181038080) Unavailable in read-only Sender# [1:5340:713] 2025-09-25T16:21:01.716296Z 1 00h05m30.160512s :BS_PROXY_PUT ERROR: [8e2972341ab7470c] Result# TEvPutResult {Id# [1:1:11:0:0:32768:0] Status# ERROR StatusFlags# { } ErrorReason# "TRestoreStrategy saw optimisticState# EBS_DISINTEGRATED GroupId# 2181038080 BlobId# [1:1:11:0:0:32768:0] Reported ErrorReasons# [ { OrderNumber# 0 VDiskId# [82000000:1:0:0:0] NodeId# 1 ErrorReasons# [ "VDisk is in read-only mode", ] } { OrderNumber# 1 VDiskId# [82000000:1:0:1:0] NodeId# 2 ErrorReasons# [ "VDisk is in read-only mode", ] } { OrderNumber# 2 VDiskId# [82000000:1:0:2:0] NodeId# 3 ErrorReasons# [ "VDisk is in read-only mode", ] } ] Part situations# [ { OrderNumber# 5 Situations# SUUUUU } { OrderNumber# 6 Situations# USUUUU } { OrderNumber# 7 Situations# UUSUUU } { OrderNumber# 0 Situations# UUUEUU } { OrderNumber# 1 Situations# UUUUEU } { OrderNumber# 2 Situations# UUUUUE } { OrderNumber# 3 Situations# UUUSUU } { OrderNumber# 4 Situations# UUUUUS } ] " ApproximateFreeSpaceShare# 0.999988} GroupId# 2181038080 Marker# BPP12 TEvPutResult: TEvPutResult {Id# [1:1:11:0:0:32768:0] Status# ERROR StatusFlags# { } ErrorReason# "TRestoreStrategy saw optimisticState# EBS_DISINTEGRATED GroupId# 2181038080 BlobId# [1:1:11:0:0:32768:0] Reported ErrorReasons# [ { OrderNumber# 0 VDiskId# [82000000:1:0:0:0] NodeId# 1 ErrorReasons# [ "VDisk is in read-only mode", ] } { OrderNumber# 1 VDiskId# [82000000:1:0:1:0] NodeId# 2 ErrorReasons# [ "VDisk is in read-only mode", ] } { OrderNumber# 2 VDiskId# [82000000:1:0:2:0] NodeId# 3 ErrorReasons# [ "VDisk is in read-only mode", ] } ] Part situations# [ { OrderNumber# 5 Situations# SUUUUU } { OrderNumber# 6 Situations# USUUUU } { OrderNumber# 7 Situations# UUSUUU } { OrderNumber# 0 Situations# UUUEUU } { OrderNumber# 1 Situations# UUUUEU } { OrderNumber# 2 Situations# UUUUUE } { OrderNumber# 3 Situations# UUUSUU } { OrderNumber# 4 Situations# UUUUUS } ] " ApproximateFreeSpaceShare# 0.999988} SEND TEvPut with key [1:1:12:0:0:131072:0] 2025-09-25T16:21:01.716594Z 1 00h05m30.160512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) Unavailable in read-only Sender# [1:5333:706] 2025-09-25T16:21:01.716689Z 2 00h05m30.160512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:1:0]: (2181038080) Unavailable in read-only Sender# [1:5340:713] 2025-09-25T16:21:01.716956Z 3 00h05m30.160512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:2:0]: (2181038080) Unavailable in read-only Sender# [1:5347:720] TEvPutResult: TEvPutResult {Id# [1:1:12:0:0:131072:0] Status# ERROR StatusFlags# { } ErrorReason# "TRestoreStrategy saw optimisticState# EBS_DISINTEGRATED GroupId# 2181038080 BlobId# [1:1:12:0:0:131072:0] Reported ErrorReasons# [ { OrderNumber# 0 VDiskId# [82000000:1:0:0:0] NodeId# 1 ErrorReasons# [ "VDisk is in read-only mode", ] } { OrderNumber# 1 VDiskId# [82000000:1:0:1:0] NodeId# 2 ErrorReasons# [ "VDisk is in read-only mode", ] } { OrderNumber# 2 VDiskId# [82000000:1:0:2:0] NodeId# 3 ErrorReasons# [ "VDisk is in read-only mode", ] } ] Part situations# [ { OrderNumber# 4 Situations# SUUUUU } { OrderNumber# 5 Situations# USUUUU } { OrderNumber# 6 Situations# UUSUUU } { OrderNumber# 7 Situations# UUUSUU } { OrderNumber# 0 Situations# UUUUEU } { OrderNumber# 1 Situations# UUUUUE } { OrderNumber# 2 Situations# UUUUEU } { OrderNumber# 3 Situations# UUUUUS } ] " ApproximateFreeSpaceShare# 0.999988} SEND TEvPut with key [1:1:13:0:0:32768:0] 2025-09-25T16:21:01.717255Z 1 00h05m30.160512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) Unavailable in read-only Sender# [1:5333:706] 2025-09-25T16:21:01.717382Z 2 00h05m30.160512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:1:0]: (2181038080) Unavailable in read-only Sender# [1:5340:713] 2025-09-25T16:21:01.717503Z 3 00h05m30.160512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:2:0]: (2181038080) Unavailable in read-only Sender# [1:5347:720] TEvPutResult: TEvPutResult {Id# [1:1:13:0:0:32768:0] Status# ERROR StatusFlags# { } ErrorReason# "TRestoreStrategy saw optimisticState# EBS_DISINTEGRATED GroupId# 2181038080 BlobId# [1:1:13:0:0:32768:0] Reported ErrorReasons# [ { OrderNumber# 0 VDiskId# [82000000:1:0:0:0] NodeId# 1 ErrorReasons# [ "VDisk is in read-only mode", ] } { OrderNumber# 1 VDiskId# [82000000:1:0:1:0] NodeId# 2 ErrorReasons# [ "VDisk is in read-only mode", ] } { OrderNumber# 2 VDiskId# [82000000:1:0:2:0] NodeId# 3 ErrorReasons# [ "VDisk is in read-only mode", ] } ] Part situations# [ { OrderNumber# 3 Situations# PUUUUU } { OrderNumber# 4 Situations# UPUUUU } { OrderNumber# 5 Situations# UUPUUU } { OrderNumber# 6 Situations# UUUPUU } { OrderNumber# 7 Situations# UUUUPU } { OrderNumber# 0 Situations# UUUUUE } { OrderNumber# 1 Situations# UUUUUE } { OrderNumber# 2 Situations# UUUUUE } ] " ApproximateFreeSpaceShare# 0.999963} SEND TEvPut with key [1:1:14:0:0:131072:0] 2025-09-25T16:21:01.717699Z 3 00h05m30.160512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:2:0]: (2181038080) Unavailable in read-only Sender# [1:5347:720] 2025-09-25T16:21:01.717955Z 1 00h05m30.160512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) Unavailable in read-only Sender# [1:5333:706] 2025-09-25T16:21:01.718034Z 2 00h05m30.160512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:1:0]: (2181038080) Unavailable in read-only Sender# [1:5340:713] TEvPutResult: TEvPutResult {Id# [1:1:14:0:0:131072:0] Status# ERROR StatusFlags# { } ErrorReason# "TRestoreStrategy saw optimisticState# EBS_DISINTEGRATED GroupId# 2181038080 BlobId# [1:1:14:0:0:131072:0] Reported ErrorReasons# [ { OrderNumber# 2 VDiskId# [82000000:1:0:2:0] NodeId# 3 ErrorReasons# [ "VDisk is in read-only mode", ] } { OrderNumber# 0 VDiskId# [82000000:1:0:0:0] NodeId# 1 ErrorReasons# [ "VDisk is in read-only mode", ] } { OrderNumber# 1 VDiskId# [82000000:1:0:1:0] NodeId# 2 ErrorReasons# [ "VDisk is in read-only mode", ] } ] Part situations# [ { OrderNumber# 2 Situations# EUUUUU } { OrderNumber# 3 Situations# UPUUUU } { OrderNumber# 4 Situations# UUPUUU } { OrderNumber# 5 Situations# UUUPUU } { OrderNumber# 6 Situations# UUUUPU } { OrderNumber# 7 Situations# UUUUUP } { OrderNumber# 0 Situations# EUUUUU } { OrderNumber# 1 Situations# EUUUUU } ] " ApproximateFreeSpaceShare# 0.999963} SEND TEvPut with key [1:1:15:0:0:32768:0] 2025-09-25T16:21:01.718167Z 3 00h05m30.160512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:2:0]: (2181038080) Unavailable in read-only Sender# [1:5347:720] 2025-09-25T16:21:01.718176Z 2 00h05m30.160512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:1:0]: (2181038080) Unavailable in read-only Sender# [1:5340:713] 2025-09-25T16:21:01.718331Z 1 00h05m30.160512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) Unavailable in read-only Sender# [1:5333:706] TEvPutResult: TEvPutResult {Id# [1:1:15:0:0:32768:0] Status# ERROR StatusFlags# { } ErrorReason# "TRestoreStrategy saw optimisticState# EBS_DISINTEGRATED GroupId# 2181038080 BlobId# [1:1:15:0:0:32768:0] Reported ErrorReasons# [ { OrderNumber# 1 VDiskId# [82000000:1:0:1:0] NodeId# 2 ErrorReasons# [ "VDisk is in read-only mode", ] } { OrderNumber# 2 VDiskId# [82000000:1:0:2:0] NodeId# 3 ErrorReasons# [ "VDisk is in read-only mode", ] } { OrderNumber# 0 VDiskId# [82000000:1:0:0:0] NodeId# 1 ErrorReasons# [ "VDisk is in read-only mode", ] } ] Part situations# [ { OrderNumber# 1 Situations# EUUUUU } { OrderNumber# 2 Situations# UEUUUU } { OrderNumber# 3 Situations# UUSUUU } { OrderNumber# 4 Situations# UUUSUU } { OrderNumber# 5 Situations# UUUUSU } { OrderNumber# 6 Situations# UUUUUS } { OrderNumber# 7 Situations# USUUUU } { OrderNumber# 0 Situations# EUUUUU } ] " ApproximateFreeSpaceShare# 0.999988} SEND TEvPut with key [1:1:16:0:0:131072:0] 2025-09-25T16:21:01.718630Z 3 00h05m30.160512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:2:0]: (2181038080) Unavailable in read-only Sender# [1:5347:720] 2025-09-25T16:21:01.718641Z 2 00h05m30.160512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:1:0]: (2181038080) Unavailable in read-only Sender# [1:5340:713] 2025-09-25T16:21:01.718847Z 1 00h05m30.160512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) Unavailable in read-only Sender# [1:5333:706] TEvPutResult: TEvPutResult {Id# [1:1:16:0:0:131072:0] Status# ERROR StatusFlags# { } ErrorReason# "TRestoreStrategy saw optimisticState# EBS_DISINTEGRATED GroupId# 2181038080 BlobId# [1:1:16:0:0:131072:0] Reported ErrorReasons# [ { OrderNumber# 1 VDiskId# [82000000:1:0:1:0] NodeId# 2 ErrorReasons# [ "VDisk is in read-only mode", ] } { OrderNumber# 2 VDiskId# [82000000:1:0:2:0] NodeId# 3 ErrorReasons# [ "VDisk is in read-only mode", ] } { OrderNumber# 0 VDiskId# [82000000:1:0:0:0] NodeId# 1 ErrorReasons# [ "VDisk is in read-only mode", ] } ] Part situations# [ { OrderNumber# 1 Situations# EUUUUU } { OrderNumber# 2 Situations# UEUUUU } { OrderNumber# 3 Situations# UUSUUU } { OrderNumber# 4 Situations# UUUSUU } { OrderNumber# 5 Situations# UUUUSU } { OrderNumber# 6 Situations# UUUUUS } { OrderNumber# 7 Situations# USUUUU } { OrderNumber# 0 Situations# EUUUUU } ] " ApproximateFreeSpaceShare# 0.999988} SEND TEvPut with key [1:1:17:0:0:32768:0] 2025-09-25T16:21:01.719039Z 1 00h05m30.160512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) Unavailable in read-only Sender# [1:5333:706] 2025-09-25T16:21:01.719062Z 3 00h05m30.160512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:2:0]: (2181038080) Unavailable in read-only Sender# [1:5347:720] 2025-09-25T16:21:01.719068Z 2 00h05m30.160512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:1:0]: (2181038080) Unavailable in read-only Sender# [1:5340:713] TEvPutResult: TEvPutResult {Id# [1:1:17:0:0:32768:0] Status# ERROR StatusFlags# { } ErrorReason# "TRestoreStrategy saw optimisticState# EBS_DISINTEGRATED GroupId# 2181038080 BlobId# [1:1:17:0:0:32768:0] Reported ErrorReasons# [ { OrderNumber# 0 VDiskId# [82000000:1:0:0:0] NodeId# 1 ErrorReasons# [ "VDisk is in read-only mode", ] } { OrderNumber# 1 VDiskId# [82000000:1:0:1:0] NodeId# 2 ErrorReasons# [ "VDisk is in read-only mode", ] } { OrderNumber# 2 VDiskId# [82000000:1:0:2:0] NodeId# 3 ErrorReasons# [ "VDisk is in read-only mode", ] } ] Part situations# [ { OrderNumber# 0 Situations# EUUUUU } { OrderNumber# 1 Situations# UEUUUU } { OrderNumber# 2 Situations# UUEUUU } { OrderNumber# 3 Situations# UUUSUU } { OrderNumber# 4 Situations# UUUUSU } { OrderNumber# 5 Situations# UUUUUS } { OrderNumber# 6 Situations# SUUUUU } { OrderNumber# 7 Situations# UUSUUU } ] " ApproximateFreeSpaceShare# 0.999988} SEND TEvPut with key [1:1:18:0:0:131072:0] 2025-09-25T16:21:01.719367Z 1 00h05m30.160512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) Unavailable in read-only Sender# [1:5333:706] 2025-09-25T16:21:01.719394Z 2 00h05m30.160512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:1:0]: (2181038080) Unavailable in read-only Sender# [1:5340:713] 2025-09-25T16:21:01.719409Z 3 00h05m30.160512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:2:0]: (2181038080) Unavailable in read-only Sender# [1:5347:720] TEvPutResult: TEvPutResult {Id# [1:1:18:0:0:131072:0] Status# ERROR StatusFlags# { } ErrorReason# "TRestoreStrategy saw optimisticState# EBS_DISINTEGRATED GroupId# 2181038080 BlobId# [1:1:18:0:0:131072:0] Reported ErrorReasons# [ { OrderNumber# 0 VDiskId# [82000000:1:0:0:0] NodeId# 1 ErrorReasons# [ "VDisk is in read-only mode", ] } { OrderNumber# 1 VDiskId# [82000000:1:0:1:0] NodeId# 2 ErrorReasons# [ "VDisk is in read-only mode", ] } { OrderNumber# 2 VDiskId# [82000000:1:0:2:0] NodeId# 3 ErrorReasons# [ "VDisk is in read-only mode", ] } ] Part situations# [ { OrderNumber# 7 Situations# SUUUUU } { OrderNumber# 0 Situations# UEUUUU } { OrderNumber# 1 Situations# UUEUUU } { OrderNumber# 2 Situations# UUUEUU } { OrderNumber# 3 Situations# UUUUSU } { OrderNumber# 4 Situations# UUUUUS } { OrderNumber# 5 Situations# USUUUU } { OrderNumber# 6 Situations# UUSUUU } ] " ApproximateFreeSpaceShare# 0.999988} SEND TEvPut with key [1:1:19:0:0:32768:0] 2025-09-25T16:21:01.719874Z 1 00h05m30.160512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) Unavailable in read-only Sender# [1:5333:706] 2025-09-25T16:21:01.719905Z 3 00h05m30.160512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:2:0]: (2181038080) Unavailable in read-only Sender# [1:5347:720] 2025-09-25T16:21:01.719918Z 2 00h05m30.160512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:1:0]: (2181038080) Unavailable in read-only Sender# [1:5340:713] TEvPutResult: TEvPutResult {Id# [1:1:19:0:0:32768:0] Status# ERROR StatusFlags# { } ErrorReason# "TRestoreStrategy saw optimisticState# EBS_DISINTEGRATED GroupId# 2181038080 BlobId# [1:1:19:0:0:32768:0] Reported ErrorReasons# [ { OrderNumber# 0 VDiskId# [82000000:1:0:0:0] NodeId# 1 ErrorReasons# [ "VDisk is in read-only mode", ] } { OrderNumber# 1 VDiskId# [82000000:1:0:1:0] NodeId# 2 ErrorReasons# [ "VDisk is in read-only mode", ] } { OrderNumber# 2 VDiskId# [82000000:1:0:2:0] NodeId# 3 ErrorReasons# [ "VDisk is in read-only mode", ] } ] Part situations# [ { OrderNumber# 6 Situations# SUUUUU } { OrderNumber# 7 Situations# USUUUU } { OrderNumber# 0 Situations# UUEUUU } { OrderNumber# 1 Situations# UUUEUU } { OrderNumber# 2 Situations# UUUUEU } { OrderNumber# 3 Situations# UUUUUS } { OrderNumber# 4 Situations# UUSUUU } { OrderNumber# 5 Situations# UUUUSU } ] " ApproximateFreeSpaceShare# 0.999988} SEND TEvPut with key [1:1:20:0:0:131072:0] 2025-09-25T16:21:01.720252Z 1 00h05m30.160512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) Unavailable in read-only Sender# [1:5333:706] 2025-09-25T16:21:01.720265Z 3 00h05m30.160512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:2:0]: (2181038080) Unavailable in read-only Sender# [1:5347:720] 2025-09-25T16:21:01.720279Z 2 00h05m30.160512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:1:0]: (2181038080) Unavailable in read-only Sender# [1:5340:713] TEvPutResult: TEvPutResult {Id# [1:1:20:0:0:131072:0] Status# ERROR StatusFlags# { } ErrorReason# "TRestoreStrategy saw optimisticState# EBS_DISINTEGRATED GroupId# 2181038080 BlobId# [1:1:20:0:0:131072:0] Reported ErrorReasons# [ { OrderNumber# 0 VDiskId# [82000000:1:0:0:0] NodeId# 1 ErrorReasons# [ "VDisk is in read-only mode", ] } { OrderNumber# 1 VDiskId# [82000000:1:0:1:0] NodeId# 2 ErrorReasons# [ "VDisk is in read-only mode", ] } { OrderNumber# 2 VDiskId# [82000000:1:0:2:0] NodeId# 3 ErrorReasons# [ "VDisk is in read-only mode", ] } ] Part situations# [ { OrderNumber# 5 Situations# SUUUUU } { OrderNumber# 6 Situations# USUUUU } { OrderNumber# 7 Situations# UUSUUU } { OrderNumber# 0 Situations# UUUEUU } { OrderNumber# 1 Situations# UUUUEU } { OrderNumber# 2 Situations# UUUUUE } { OrderNumber# 3 Situations# UUUSUU } { OrderNumber# 4 Situations# UUUUUS } ] " ApproximateFreeSpaceShare# 0.999988} SEND TEvGet with key [1:1:11:0:0:32768:0] 2025-09-25T16:21:01.721511Z 1 00h05m30.160512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) Unavailable in read-only Sender# [1:5334:707] 2025-09-25T16:21:01.721561Z 2 00h05m30.160512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:1:0]: (2181038080) Unavailable in read-only Sender# [1:5341:714] 2025-09-25T16:21:01.721575Z 3 00h05m30.160512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:2:0]: (2181038080) Unavailable in read-only Sender# [1:5348:721] 2025-09-25T16:21:01.721703Z 1 00h05m30.160512s :BS_PROXY_GET ERROR: [57d0fa8c5c220317] Response# TEvGetResult {Status# ERROR ResponseSz# 1 {[1:1:11:0:0:32768:0] ERROR Size# 0 RequestedSize# 32768} ErrorReason# "TStrategyBase saw optimisticState# EBS_DISINTEGRATED GroupId# 2181038080 BlobId# [1:1:11:0:0:32768:0] Reported ErrorReasons# [ { OrderNumber# 0 VDiskId# [82000000:1:0:0:0] NodeId# 1 ErrorReasons# [ "VDisk is in read-only mode", ] } { OrderNumber# 1 VDiskId# [82000000:1:0:1:0] NodeId# 2 ErrorReasons# [ "VDisk is in read-only mode", ] } { OrderNumber# 2 VDiskId# [82000000:1:0:2:0] NodeId# 3 ErrorReasons# [ "VDisk is in read-only mode", ] } ] Part situations# [ { OrderNumber# 5 Situations# PUUUUU } { OrderNumber# 6 Situations# UPUUUU } { OrderNumber# 7 Situations# UUPUUU } { OrderNumber# 0 Situations# UUUEUU } { OrderNumber# 1 Situations# UUUUEU } { OrderNumber# 2 Situations# UUUUUE } { OrderNumber# 3 Situations# AAAPAA } { OrderNumber# 4 Situations# AAAAAA } ] "} Marker# BPG29 2025-09-25T16:21:01.721737Z 2 00h05m30.160512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:1:0]: (2181038080) Unavailable in read-only Sender# [1:5341:714] 2025-09-25T16:21:01.721750Z 3 00h05m30.160512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:2:0]: (2181038080) Unavailable in read-only Sender# [1:5348:721] TEvGetResult: TEvGetResult {Status# ERROR ResponseSz# 1 {[1:1:11:0:0:32768:0] ERROR Size# 0 RequestedSize# 32768} ErrorReason# "TStrategyBase saw optimisticState# EBS_DISINTEGRATED GroupId# 2181038080 BlobId# [1:1:11:0:0:32768:0] Reported ErrorReasons# [ { OrderNumber# 0 VDiskId# [82000000:1:0:0:0] NodeId# 1 ErrorReasons# [ "VDisk is in read-only mode", ] } { OrderNumber# 1 VDiskId# [82000000:1:0:1:0] NodeId# 2 ErrorReasons# [ "VDisk is in read-only mode", ] } { OrderNumber# 2 VDiskId# [82000000:1:0:2:0] NodeId# 3 ErrorReasons# [ "VDisk is in read-only mode", ] } ] Part situations# [ { OrderNumber# 5 Situations# PUUUUU } { OrderNumber# 6 Situations# UPUUUU } { OrderNumber# 7 Situations# UUPUUU } { OrderNumber# 0 Situations# UUUEUU } { OrderNumber# 1 Situations# UUUUEU } { OrderNumber# 2 Situations# UUUUUE } { OrderNumber# 3 Situations# AAAPAA } { OrderNumber# 4 Situations# AAAAAA } ] "} ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/columnshard/ut_rw/unittest >> TColumnShardTestReadWrite::WriteReadDuplicate [GOOD] Test command err: 2025-09-25T16:20:44.317504Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:129:2160];fline=columnshard.cpp:105;event=initialize_shard;step=OnActivateExecutor; 2025-09-25T16:20:44.322330Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:129:2160];fline=columnshard.cpp:124;event=initialize_shard;step=initialize_tiring_finished; 2025-09-25T16:20:44.322393Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxInitSchema.Execute at tablet 9437184 2025-09-25T16:20:44.323286Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:129:2160];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-09-25T16:20:44.323353Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:129:2160];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-09-25T16:20:44.323399Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:129:2160];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-09-25T16:20:44.323429Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:129:2160];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-09-25T16:20:44.323450Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:129:2160];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-09-25T16:20:44.323474Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:129:2160];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-09-25T16:20:44.323497Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:129:2160];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-09-25T16:20:44.323517Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:129:2160];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-09-25T16:20:44.323538Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:129:2160];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-09-25T16:20:44.323558Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:129:2160];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanDeprecatedSnapshot; 2025-09-25T16:20:44.323578Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:129:2160];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV0ChunksMeta; 2025-09-25T16:20:44.323598Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:129:2160];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CopyBlobIdsToV2; 2025-09-25T16:20:44.323654Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:129:2160];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreAppearanceSnapshot; 2025-09-25T16:20:44.332081Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxInitSchema.Complete at tablet 9437184 2025-09-25T16:20:44.332184Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:131;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=12;current_normalizer=CLASS_NAME=Granules; 2025-09-25T16:20:44.332196Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=1;type=Granules; 2025-09-25T16:20:44.332247Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-09-25T16:20:44.332290Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-09-25T16:20:44.332304Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-09-25T16:20:44.332311Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2025-09-25T16:20:44.332324Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:138;normalizer=TChunksNormalizer;message=0 chunks found; 2025-09-25T16:20:44.332335Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-09-25T16:20:44.332342Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-09-25T16:20:44.332346Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2025-09-25T16:20:44.332362Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-09-25T16:20:44.332368Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-09-25T16:20:44.332374Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-09-25T16:20:44.332377Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2025-09-25T16:20:44.332386Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2025-09-25T16:20:44.332391Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-09-25T16:20:44.332397Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-09-25T16:20:44.332400Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2025-09-25T16:20:44.332407Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-09-25T16:20:44.332413Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-09-25T16:20:44.332417Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2025-09-25T16:20:44.332429Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-09-25T16:20:44.332439Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-09-25T16:20:44.332445Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=15;type=RestoreV1Chunks_V2; 2025-09-25T16:20:44.332476Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-09-25T16:20:44.332486Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-09-25T16:20:44.332491Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=16;type=RestoreV2Chunks; 2025-09-25T16:20:44.332507Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2025-09-25T16:20:44.332516Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanDeprecatedSnapshot;id=CleanDeprecatedSnapshot; 2025-09-25T16:20:44.332521Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=17;type=CleanDeprecatedSnapshot; 2025-09-25T16:20:44.332530Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanDeprecatedSnapshot;id=17; 2025-09-25T16:20:44.332539Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV0ChunksMeta;id=RestoreV0ChunksMeta; 2025-09-25T16:20:44.332544Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=18;type=RestoreV0ChunksMeta; 2025-09-25T16:20:44.332553Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV0ChunksMeta;id=18; 2025-09-25T16:20:44.332563Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CopyBlobIdsToV2;id=CopyBlobIdsToV2; 2025-09-25T16:20:44.332569Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=19;type=CopyBlobIdsToV2; 2025-09-25T16:20:44.332583Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CopyBlobIdsToV2;id=19; 2025-09-25T16:20:44.332592Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLAS ... ; 2025-09-25T16:21:00.953042Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[1:3735:5741];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:222;stage=limit exhausted;limit=limits:(bytes=0;chunks=0);; 2025-09-25T16:21:00.953048Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[1:3735:5741];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;tablet_id=9437184;fline=scanner.cpp:52;event=build_next_interval; 2025-09-25T16:21:00.953432Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[1:3735:5741];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:120;event=TEvScanDataAck;info=limits:(bytes=8388608;chunks=1);; 2025-09-25T16:21:00.953471Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[1:3735:5741];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:211;stage=start;iterator=ready_results:(count:1;records_count:10;schema=timestamp: timestamp[us];);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2025-09-25T16:21:00.953478Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[1:3735:5741];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=plain_read_data.cpp:31;event=DoExtractReadyResults;result=0;count=0;finished=1; 2025-09-25T16:21:00.953495Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[1:3735:5741];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:253;stage=ready result;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;);columns=1;rows=10; 2025-09-25T16:21:00.953505Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[1:3735:5741];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:274;stage=data_format;batch_size=0;num_rows=10;batch_columns=timestamp; 2025-09-25T16:21:00.953565Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[1:3735:5741];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:392;event=send_data;compute_actor_id=[1:3731:5737];bytes=80;rows=10;faults=0;finished=0;fault=0;schema=timestamp: timestamp[us]; 2025-09-25T16:21:00.953582Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[1:3735:5741];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:296;stage=finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2025-09-25T16:21:00.953600Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[1:3735:5741];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:211;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2025-09-25T16:21:00.953612Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[1:3735:5741];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:216;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2025-09-25T16:21:00.953696Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[1:3735:5741];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:120;event=TEvScanDataAck;info=limits:(bytes=8388608;chunks=1);; 2025-09-25T16:21:00.953713Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[1:3735:5741];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:211;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2025-09-25T16:21:00.953726Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[1:3735:5741];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:216;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2025-09-25T16:21:00.953733Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: actor.cpp:442: Scan [1:3735:5741] finished for tablet 9437184 2025-09-25T16:21:00.953827Z node 1 :TX_COLUMNSHARD_SCAN INFO: log.cpp:841: SelfId=[1:3735:5741];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:448;event=scan_finish;compute_actor_id=[1:3731:5737];stats={"p":[{"events":["f_bootstrap","f_ProduceResults"],"t":0},{"events":["l_bootstrap","f_processing","f_task_result"],"t":0.001},{"events":["f_ack","l_task_result"],"t":0.116},{"events":["l_ack","l_processing","l_ProduceResults","f_Finish","l_Finish"],"t":0.117}],"full":{"a":16630441,"name":"_full_task","f":16630441,"d_finished":0,"c":0,"l":16747489,"d":117048},"events":[{"name":"bootstrap","f":16630580,"d_finished":1075,"c":1,"l":16631655,"d":1075},{"a":16747434,"name":"ack","f":16747167,"d_finished":190,"c":1,"l":16747357,"d":245},{"a":16747432,"name":"processing","f":16632031,"d_finished":12015,"c":111,"l":16747358,"d":12072},{"name":"ProduceResults","f":16631341,"d_finished":3151,"c":114,"l":16747470,"d":3151},{"a":16747471,"name":"Finish","f":16747471,"d_finished":0,"c":0,"l":16747489,"d":18},{"name":"task_result","f":16632037,"d_finished":11559,"c":110,"l":16746793,"d":11559}],"id":"9437184::49"};iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2025-09-25T16:21:00.953844Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[1:3735:5741];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:392;event=send_data;compute_actor_id=[1:3731:5737];bytes=0;rows=0;faults=0;finished=1;fault=0;schema=; 2025-09-25T16:21:00.953916Z node 1 :TX_COLUMNSHARD_SCAN INFO: log.cpp:841: SelfId=[1:3735:5741];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:397;event=scan_finished;compute_actor_id=[1:3731:5737];packs_sum=0;bytes=0;bytes_sum=0;rows=0;rows_sum=0;faults=0;finished=1;fault=0;stats={"p":[{"events":["f_bootstrap","f_ProduceResults"],"t":0},{"events":["l_bootstrap","f_processing","f_task_result"],"t":0.001},{"events":["f_ack","l_task_result"],"t":0.116},{"events":["l_ack","l_processing","l_ProduceResults","f_Finish","l_Finish"],"t":0.117}],"full":{"a":16630441,"name":"_full_task","f":16630441,"d_finished":0,"c":0,"l":16747592,"d":117151},"events":[{"name":"bootstrap","f":16630580,"d_finished":1075,"c":1,"l":16631655,"d":1075},{"a":16747434,"name":"ack","f":16747167,"d_finished":190,"c":1,"l":16747357,"d":348},{"a":16747432,"name":"processing","f":16632031,"d_finished":12015,"c":111,"l":16747358,"d":12175},{"name":"ProduceResults","f":16631341,"d_finished":3151,"c":114,"l":16747470,"d":3151},{"a":16747471,"name":"Finish","f":16747471,"d_finished":0,"c":0,"l":16747592,"d":121},{"name":"task_result","f":16632037,"d_finished":11559,"c":110,"l":16746793,"d":11559}],"id":"9437184::49"};iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2025-09-25T16:21:00.953933Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[1:3735:5741];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=stats.cpp:8;event=statistic;begin=2025-09-25T16:21:00.836186Z;index_granules=0;index_portions=109;index_batches=0;schema_columns=1;filter_columns=0;additional_columns=0;compacted_portions_bytes=2744;inserted_portions_bytes=252288;committed_portions_bytes=0;data_filter_bytes=0;data_additional_bytes=0;delta_bytes=255032;selected_rows=0; 2025-09-25T16:21:00.953940Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[1:3735:5741];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=read_context.h:196;event=scan_aborted;reason=unexpected on destructor; 2025-09-25T16:21:00.953971Z node 1 :TX_COLUMNSHARD_SCAN INFO: log.cpp:841: SelfId=[1:3735:5741];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=context.h:82;fetching=ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;; ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/ut_blobstorage/ut_read_only_vdisk/unittest >> ReadOnlyVDisk::TestDiscover [GOOD] Test command err: RandomSeed# 7421609824807225349 SEND TEvPut with key [1:1:0:0:0:131072:0] TEvPutResult: TEvPutResult {Id# [1:1:0:0:0:131072:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999988} SEND TEvPut with key [1:1:1:0:0:32768:0] TEvPutResult: TEvPutResult {Id# [1:1:1:0:0:32768:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999988} SEND TEvPut with key [1:1:2:0:0:131072:0] TEvPutResult: TEvPutResult {Id# [1:1:2:0:0:131072:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999988} === Read all 3 blob(s) === SEND TEvGet with key [1:1:0:0:0:131072:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:0:0:0:131072:0] OK Size# 131072 RequestedSize# 131072}} SEND TEvGet with key [1:1:1:0:0:32768:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:1:0:0:32768:0] OK Size# 32768 RequestedSize# 32768}} SEND TEvGet with key [1:1:2:0:0:131072:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:2:0:0:131072:0] OK Size# 131072 RequestedSize# 131072}} Setting VDisk read-only to 1 for position 0 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:0:0] SEND TEvPut with key [1:1:3:0:0:32768:0] 2025-09-25T16:21:01.224648Z 1 00h01m30.060512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) Unavailable in read-only Sender# [1:5332:705] TEvPutResult: TEvPutResult {Id# [1:1:3:0:0:32768:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999988} Setting VDisk read-only to 1 for position 1 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:1:0] SEND TEvPut with key [1:1:4:0:0:131072:0] 2025-09-25T16:21:01.271729Z 1 00h02m00.100000s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) Unavailable in read-only Sender# [1:5332:705] 2025-09-25T16:21:01.272045Z 2 00h02m00.100000s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:1:0]: (2181038080) Unavailable in read-only Sender# [1:5339:712] TEvPutResult: TEvPutResult {Id# [1:1:4:0:0:131072:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999988} Setting VDisk read-only to 1 for position 2 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:2:0] SEND TEvPut with key [1:1:5:0:0:32768:0] 2025-09-25T16:21:01.311472Z 3 00h02m30.110512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:2:0]: (2181038080) Unavailable in read-only Sender# [1:5346:719] 2025-09-25T16:21:01.311857Z 1 00h02m30.110512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) Unavailable in read-only Sender# [1:5332:705] 2025-09-25T16:21:01.312030Z 2 00h02m30.110512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:1:0]: (2181038080) Unavailable in read-only Sender# [1:5339:712] 2025-09-25T16:21:01.312102Z 1 00h02m30.110512s :BS_PROXY_PUT ERROR: [e3c99301b96e8116] Result# TEvPutResult {Id# [1:1:5:0:0:32768:0] Status# ERROR StatusFlags# { } ErrorReason# "TRestoreStrategy saw optimisticState# EBS_DISINTEGRATED GroupId# 2181038080 BlobId# [1:1:5:0:0:32768:0] Reported ErrorReasons# [ { OrderNumber# 2 VDiskId# [82000000:1:0:2:0] NodeId# 3 ErrorReasons# [ "VDisk is in read-only mode", ] } { OrderNumber# 0 VDiskId# [82000000:1:0:0:0] NodeId# 1 ErrorReasons# [ "VDisk is in read-only mode", ] } { OrderNumber# 1 VDiskId# [82000000:1:0:1:0] NodeId# 2 ErrorReasons# [ "VDisk is in read-only mode", ] } ] Part situations# [ { OrderNumber# 2 Situations# EUUUUU } { OrderNumber# 3 Situations# UPUUUU } { OrderNumber# 4 Situations# UUPUUU } { OrderNumber# 5 Situations# UUUPUU } { OrderNumber# 6 Situations# UUUUPU } { OrderNumber# 7 Situations# UUUUUP } { OrderNumber# 0 Situations# EUUUUU } { OrderNumber# 1 Situations# EUUUUU } ] " ApproximateFreeSpaceShare# 0.999988} GroupId# 2181038080 Marker# BPP12 TEvPutResult: TEvPutResult {Id# [1:1:5:0:0:32768:0] Status# ERROR StatusFlags# { } ErrorReason# "TRestoreStrategy saw optimisticState# EBS_DISINTEGRATED GroupId# 2181038080 BlobId# [1:1:5:0:0:32768:0] Reported ErrorReasons# [ { OrderNumber# 2 VDiskId# [82000000:1:0:2:0] NodeId# 3 ErrorReasons# [ "VDisk is in read-only mode", ] } { OrderNumber# 0 VDiskId# [82000000:1:0:0:0] NodeId# 1 ErrorReasons# [ "VDisk is in read-only mode", ] } { OrderNumber# 1 VDiskId# [82000000:1:0:1:0] NodeId# 2 ErrorReasons# [ "VDisk is in read-only mode", ] } ] Part situations# [ { OrderNumber# 2 Situations# EUUUUU } { OrderNumber# 3 Situations# UPUUUU } { OrderNumber# 4 Situations# UUPUUU } { OrderNumber# 5 Situations# UUUPUU } { OrderNumber# 6 Situations# UUUUPU } { OrderNumber# 7 Situations# UUUUUP } { OrderNumber# 0 Situations# EUUUUU } { OrderNumber# 1 Situations# EUUUUU } ] " ApproximateFreeSpaceShare# 0.999988} === Putting VDisk #3 to read-only === Setting VDisk read-only to 1 for position 3 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:3:0] === Read all 6 blob(s) === SEND TEvGet with key [1:1:0:0:0:131072:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:0:0:0:131072:0] OK Size# 131072 RequestedSize# 131072}} SEND TEvGet with key [1:1:1:0:0:32768:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:1:0:0:32768:0] OK Size# 32768 RequestedSize# 32768}} SEND TEvGet with key [1:1:2:0:0:131072:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:2:0:0:131072:0] OK Size# 131072 RequestedSize# 131072}} SEND TEvGet with key [1:1:3:0:0:32768:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:3:0:0:32768:0] OK Size# 32768 RequestedSize# 32768}} SEND TEvGet with key [1:1:4:0:0:131072:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:4:0:0:131072:0] OK Size# 131072 RequestedSize# 131072}} SEND TEvGet with key [1:1:5:0:0:32768:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:5:0:0:32768:0] OK Size# 32768 RequestedSize# 32768}} === Putting VDisk #4 to read-only === Setting VDisk read-only to 1 for position 4 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:4:0] === Read all 6 blob(s) === SEND TEvGet with key [1:1:0:0:0:131072:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:0:0:0:131072:0] OK Size# 131072 RequestedSize# 131072}} SEND TEvGet with key [1:1:1:0:0:32768:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:1:0:0:32768:0] OK Size# 32768 RequestedSize# 32768}} SEND TEvGet with key [1:1:2:0:0:131072:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:2:0:0:131072:0] OK Size# 131072 RequestedSize# 131072}} SEND TEvGet with key [1:1:3:0:0:32768:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:3:0:0:32768:0] OK Size# 32768 RequestedSize# 32768}} SEND TEvGet with key [1:1:4:0:0:131072:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:4:0:0:131072:0] OK Size# 131072 RequestedSize# 131072}} SEND TEvGet with key [1:1:5:0:0:32768:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:5:0:0:32768:0] OK Size# 32768 RequestedSize# 32768}} === Putting VDisk #5 to read-only === Setting VDisk read-only to 1 for position 5 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:5:0] === Read all 6 blob(s) === SEND TEvGet with key [1:1:0:0:0:131072:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:0:0:0:131072:0] OK Size# 131072 RequestedSize# 131072}} SEND TEvGet with key [1:1:1:0:0:32768:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:1:0:0:32768:0] OK Size# 32768 RequestedSize# 32768}} SEND TEvGet with key [1:1:2:0:0:131072:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:2:0:0:131072:0] OK Size# 131072 RequestedSize# 131072}} SEND TEvGet with key [1:1:3:0:0:32768:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:3:0:0:32768:0] OK Size# 32768 RequestedSize# 32768}} SEND TEvGet with key [1:1:4:0:0:131072:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:4:0:0:131072:0] OK Size# 131072 RequestedSize# 131072}} SEND TEvGet with key [1:1:5:0:0:32768:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:5:0:0:32768:0] OK Size# 32768 RequestedSize# 32768}} === Putting VDisk #6 to read-only === Setting VDisk read-only to 1 for position 6 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:6:0] === Read all 6 blob(s) === SEND TEvGet with key [1:1:0:0:0:131072:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:0:0:0:131072:0] OK Size# 131072 RequestedSize# 131072}} SEND TEvGet with key [1:1:1:0:0:32768:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:1:0:0:32768:0] OK Size# 32768 RequestedSize# 32768}} SEND TEvGet with key [1:1:2:0:0:131072:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:2:0:0:131072:0] OK Size# 131072 RequestedSize# 131072}} SEND TEvGet with key [1:1:3:0:0:32768:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:3:0:0:32768:0] OK Size# 32768 RequestedSize# 32768}} SEND TEvGet with key [1:1:4:0:0:131072:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:4:0:0:131072:0] OK Size# 131072 RequestedSize# 131072}} SEND TEvGet with key [1:1:5:0:0:32768:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:5:0:0:32768:0] OK Size# 32768 RequestedSize# 32768}} === Putting VDisk #0 to normal === Setting VDisk read-only to 0 for position 0 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:0:0] === Putting VDisk #1 to normal === Setting VDisk read-only to 0 for position 1 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:1:0] === Putting VDisk #2 to normal === Setting VDisk read-only to 0 for position 2 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:2:0] === Putting VDisk #3 to normal === Setting VDisk read-only to 0 for position 3 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:3:0] === Putting VDisk #4 to normal === Setting VDisk read-only to 0 for position 4 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:4:0] === Putting VDisk #5 to normal === Setting VDisk read-only to 0 for position 5 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:5:0] === Putting VDisk #6 to normal === Setting VDisk read-only to 0 for position 6 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:6:0] SEND TEvPut with key [1:1:6:0:0:131072:0] TEvPutResult: TEvPutResult {Id# [1:1:6:0:0:131072:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999988} |82.7%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/ut_blobstorage/ut_read_only_vdisk/unittest ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/columnshard/ut_rw/unittest >> Normalizers::ChunksV0MetaNormalizer [GOOD] Test command err: 2025-09-25T16:20:43.153788Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];fline=columnshard.cpp:105;event=initialize_shard;step=OnActivateExecutor; 2025-09-25T16:20:43.158932Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];fline=columnshard.cpp:124;event=initialize_shard;step=initialize_tiring_finished; 2025-09-25T16:20:43.158979Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxInitSchema.Execute at tablet 9437184 2025-09-25T16:20:43.159855Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-09-25T16:20:43.159915Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-09-25T16:20:43.159952Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-09-25T16:20:43.159973Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-09-25T16:20:43.159994Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-09-25T16:20:43.160018Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-09-25T16:20:43.160039Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-09-25T16:20:43.160059Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-09-25T16:20:43.160080Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-09-25T16:20:43.160100Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanDeprecatedSnapshot; 2025-09-25T16:20:43.160121Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV0ChunksMeta; 2025-09-25T16:20:43.160141Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CopyBlobIdsToV2; 2025-09-25T16:20:43.160185Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreAppearanceSnapshot; 2025-09-25T16:20:43.166864Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxInitSchema.Complete at tablet 9437184 2025-09-25T16:20:43.166945Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:131;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=12;current_normalizer=CLASS_NAME=Granules; 2025-09-25T16:20:43.166957Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=1;type=Granules; 2025-09-25T16:20:43.167014Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-09-25T16:20:43.167054Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-09-25T16:20:43.167069Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-09-25T16:20:43.167074Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2025-09-25T16:20:43.167086Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:138;normalizer=TChunksNormalizer;message=0 chunks found; 2025-09-25T16:20:43.167096Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-09-25T16:20:43.167104Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-09-25T16:20:43.167108Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2025-09-25T16:20:43.167130Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-09-25T16:20:43.167139Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-09-25T16:20:43.167148Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-09-25T16:20:43.167153Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2025-09-25T16:20:43.167166Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2025-09-25T16:20:43.167176Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-09-25T16:20:43.167184Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-09-25T16:20:43.167189Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2025-09-25T16:20:43.167198Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-09-25T16:20:43.167206Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-09-25T16:20:43.167210Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2025-09-25T16:20:43.167221Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-09-25T16:20:43.167229Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-09-25T16:20:43.167234Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=15;type=RestoreV1Chunks_V2; 2025-09-25T16:20:43.167261Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-09-25T16:20:43.167270Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-09-25T16:20:43.167275Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=16;type=RestoreV2Chunks; 2025-09-25T16:20:43.167291Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2025-09-25T16:20:43.167300Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanDeprecatedSnapshot;id=CleanDeprecatedSnapshot; 2025-09-25T16:20:43.167305Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=17;type=CleanDeprecatedSnapshot; 2025-09-25T16:20:43.167313Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanDeprecatedSnapshot;id=17; 2025-09-25T16:20:43.167321Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV0ChunksMeta;id=RestoreV0ChunksMeta; 2025-09-25T16:20:43.167326Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=18;type=RestoreV0ChunksMeta; 2025-09-25T16:20:43.167334Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV0ChunksMeta;id=18; 2025-09-25T16:20:43.167343Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CopyBlobIdsToV2;id=CopyBlobIdsToV2; 2025-09-25T16:20:43.167349Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=19;type=CopyBlobIdsToV2; 2025-09-25T16:20:43.167365Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CopyBlobIdsToV2;id=19; 2025-09-25T16:20:43.167374Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLAS ... s=1,2,3;column_names=field,key1,key2;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:1;); 2025-09-25T16:21:00.835461Z node 2 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[2:319:2319];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;fline=plain_read_data.cpp:31;event=DoExtractReadyResults;result=1;count=77;finished=1; 2025-09-25T16:21:00.835471Z node 2 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[2:319:2319];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;fline=actor.cpp:222;stage=limit exhausted;limit=limits:(bytes=0;chunks=0);; 2025-09-25T16:21:00.835481Z node 2 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[2:319:2319];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;tablet_id=9437184;fline=scanner.cpp:52;event=build_next_interval; 2025-09-25T16:21:00.835780Z node 2 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[2:319:2319];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;fline=actor.cpp:120;event=TEvScanDataAck;info=limits:(bytes=8388608;chunks=1);; 2025-09-25T16:21:00.835821Z node 2 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[2:319:2319];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;fline=actor.cpp:211;stage=start;iterator=ready_results:(count:1;records_count:77;schema=key1: uint64 key2: uint64 field: string;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2;column_names=key1,key2;);;ff=(column_ids=1,2,3;column_names=field,key1,key2;);;program_input=(column_ids=1,2,3;column_names=field,key1,key2;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2025-09-25T16:21:00.835828Z node 2 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[2:319:2319];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;fline=plain_read_data.cpp:31;event=DoExtractReadyResults;result=0;count=0;finished=1; 2025-09-25T16:21:00.835843Z node 2 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[2:319:2319];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;fline=actor.cpp:253;stage=ready result;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2;column_names=key1,key2;);;ff=(column_ids=1,2,3;column_names=field,key1,key2;);;program_input=(column_ids=1,2,3;column_names=field,key1,key2;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;);columns=3;rows=77; 2025-09-25T16:21:00.835859Z node 2 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[2:319:2319];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;fline=actor.cpp:274;stage=data_format;batch_size=0;num_rows=77;batch_columns=key1,key2,field; 2025-09-25T16:21:00.835917Z node 2 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[2:319:2319];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;fline=actor.cpp:392;event=send_data;compute_actor_id=[2:317:2318];bytes=130200;rows=1085;faults=0;finished=0;fault=0;schema=key1: uint64 key2: uint64 field: string; 2025-09-25T16:21:00.835939Z node 2 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[2:319:2319];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;fline=actor.cpp:296;stage=finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2;column_names=key1,key2;);;ff=(column_ids=1,2,3;column_names=field,key1,key2;);;program_input=(column_ids=1,2,3;column_names=field,key1,key2;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2025-09-25T16:21:00.835959Z node 2 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[2:319:2319];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;fline=actor.cpp:211;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2;column_names=key1,key2;);;ff=(column_ids=1,2,3;column_names=field,key1,key2;);;program_input=(column_ids=1,2,3;column_names=field,key1,key2;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2025-09-25T16:21:00.835971Z node 2 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[2:319:2319];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;fline=actor.cpp:216;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2;column_names=key1,key2;);;ff=(column_ids=1,2,3;column_names=field,key1,key2;);;program_input=(column_ids=1,2,3;column_names=field,key1,key2;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2025-09-25T16:21:00.835999Z node 2 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[2:319:2319];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;fline=actor.cpp:120;event=TEvScanDataAck;info=limits:(bytes=8388608;chunks=1);; 2025-09-25T16:21:00.836011Z node 2 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[2:319:2319];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;fline=actor.cpp:211;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2;column_names=key1,key2;);;ff=(column_ids=1,2,3;column_names=field,key1,key2;);;program_input=(column_ids=1,2,3;column_names=field,key1,key2;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2025-09-25T16:21:00.836022Z node 2 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[2:319:2319];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;fline=actor.cpp:216;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2;column_names=key1,key2;);;ff=(column_ids=1,2,3;column_names=field,key1,key2;);;program_input=(column_ids=1,2,3;column_names=field,key1,key2;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2025-09-25T16:21:00.836028Z node 2 :TX_COLUMNSHARD_SCAN DEBUG: actor.cpp:442: Scan [2:319:2319] finished for tablet 9437184 2025-09-25T16:21:00.836138Z node 2 :TX_COLUMNSHARD_SCAN INFO: log.cpp:841: SelfId=[2:319:2319];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;fline=actor.cpp:448;event=scan_finish;compute_actor_id=[2:317:2318];stats={"p":[{"events":["f_bootstrap","l_bootstrap","f_processing","f_ProduceResults","f_task_result"],"t":0},{"events":["f_ack"],"t":0.008},{"events":["l_ack","l_processing","l_ProduceResults","f_Finish","l_Finish","l_task_result"],"t":2.151}],"full":{"a":15691695,"name":"_full_task","f":15691695,"d_finished":0,"c":0,"l":17843408,"d":2151713},"events":[{"name":"bootstrap","f":15691779,"d_finished":242,"c":1,"l":15692021,"d":242},{"a":17843359,"name":"ack","f":15700110,"d_finished":1344173,"c":421,"l":17843338,"d":1344222},{"a":17843357,"name":"processing","f":15692069,"d_finished":2128527,"c":843,"l":17843338,"d":2128578},{"name":"ProduceResults","f":15691929,"d_finished":1664387,"c":1266,"l":17843387,"d":1664387},{"a":17843388,"name":"Finish","f":17843388,"d_finished":0,"c":0,"l":17843408,"d":20},{"name":"task_result","f":15692076,"d_finished":782384,"c":422,"l":17842849,"d":782384}],"id":"9437184::3"};iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2;column_names=key1,key2;);;ff=(column_ids=1,2,3;column_names=field,key1,key2;);;program_input=(column_ids=1,2,3;column_names=field,key1,key2;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2025-09-25T16:21:00.836157Z node 2 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[2:319:2319];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;fline=actor.cpp:392;event=send_data;compute_actor_id=[2:317:2318];bytes=0;rows=0;faults=0;finished=1;fault=0;schema=; 2025-09-25T16:21:00.836206Z node 2 :TX_COLUMNSHARD_SCAN INFO: log.cpp:841: SelfId=[2:319:2319];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;fline=actor.cpp:397;event=scan_finished;compute_actor_id=[2:317:2318];packs_sum=0;bytes=0;bytes_sum=0;rows=0;rows_sum=0;faults=0;finished=1;fault=0;stats={"p":[{"events":["f_bootstrap","l_bootstrap","f_processing","f_ProduceResults","f_task_result"],"t":0},{"events":["f_ack"],"t":0.008},{"events":["l_ack","l_processing","l_ProduceResults","f_Finish","l_Finish","l_task_result"],"t":2.151}],"full":{"a":15691695,"name":"_full_task","f":15691695,"d_finished":0,"c":0,"l":17843526,"d":2151831},"events":[{"name":"bootstrap","f":15691779,"d_finished":242,"c":1,"l":15692021,"d":242},{"a":17843359,"name":"ack","f":15700110,"d_finished":1344173,"c":421,"l":17843338,"d":1344340},{"a":17843357,"name":"processing","f":15692069,"d_finished":2128527,"c":843,"l":17843338,"d":2128696},{"name":"ProduceResults","f":15691929,"d_finished":1664387,"c":1266,"l":17843387,"d":1664387},{"a":17843388,"name":"Finish","f":17843388,"d_finished":0,"c":0,"l":17843526,"d":138},{"name":"task_result","f":15692076,"d_finished":782384,"c":422,"l":17842849,"d":782384}],"id":"9437184::3"};iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2;column_names=key1,key2;);;ff=(column_ids=1,2,3;column_names=field,key1,key2;);;program_input=(column_ids=1,2,3;column_names=field,key1,key2;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2025-09-25T16:21:00.836226Z node 2 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[2:319:2319];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;fline=stats.cpp:8;event=statistic;begin=2025-09-25T16:20:58.684181Z;index_granules=0;index_portions=1;index_batches=0;schema_columns=3;filter_columns=0;additional_columns=0;compacted_portions_bytes=0;inserted_portions_bytes=2488696;committed_portions_bytes=0;data_filter_bytes=0;data_additional_bytes=0;delta_bytes=2488696;selected_rows=0; 2025-09-25T16:21:00.836234Z node 2 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[2:319:2319];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;fline=read_context.h:196;event=scan_aborted;reason=unexpected on destructor; 2025-09-25T16:21:00.836267Z node 2 :TX_COLUMNSHARD_SCAN INFO: log.cpp:841: SelfId=[2:319:2319];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;fline=context.h:82;fetching=ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2;column_names=key1,key2;);;ff=(column_ids=1,2,3;column_names=field,key1,key2;);;program_input=(column_ids=1,2,3;column_names=field,key1,key2;);;; >> ObjectDistribution::TestManyIrrelevantNodes [GOOD] >> Sequencer::Basic1 [GOOD] >> StoragePool::TestDistributionRandomProbability |82.7%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/public/sdk/cpp/src/client/persqueue_public/ut/ydb-public-sdk-cpp-src-client-persqueue_public-ut |82.7%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/public/sdk/cpp/src/client/persqueue_public/ut/ydb-public-sdk-cpp-src-client-persqueue_public-ut |82.7%| [TA] {RESULT} $(B)/ydb/core/blobstorage/vdisk/repl/ut/test-results/unittest/{meta.json ... results_accumulator.log} >> TColumnShardTestReadWrite::CompactionInGranule_PKString [GOOD] >> ReadOnlyVDisk::TestGarbageCollect >> ReadOnlyVDisk::TestReads >> TBsProxyFaultToleranceTest::CheckGetHardenedErasureMirror3dcCount6Idx3 >> TBsProxyFaultToleranceTest::CheckGetHardenedErasureBlock42Count6Idx3 >> TBsProxyFaultToleranceTest::CheckGetHardenedErasureBlock42Count6Idx4 >> TBsProxyFaultToleranceTest::CheckGetHardenedErasureMirror3dcCount6Idx2 ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/columnshard/ut_rw/unittest >> TColumnShardTestReadWrite::CompactionInGranule_PKString [GOOD] >> TColumnShardTestReadWrite::CompactionInGranule_PKTimestamp_Reboot [GOOD] Test command err: 2025-09-25T16:20:47.573608Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:129:2160];fline=columnshard.cpp:105;event=initialize_shard;step=OnActivateExecutor; 2025-09-25T16:20:47.578860Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:129:2160];fline=columnshard.cpp:124;event=initialize_shard;step=initialize_tiring_finished; 2025-09-25T16:20:47.578928Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxInitSchema.Execute at tablet 9437184 2025-09-25T16:20:47.579813Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:129:2160];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-09-25T16:20:47.579883Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:129:2160];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-09-25T16:20:47.579932Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:129:2160];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-09-25T16:20:47.579959Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:129:2160];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-09-25T16:20:47.579979Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:129:2160];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-09-25T16:20:47.580001Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:129:2160];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-09-25T16:20:47.580022Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:129:2160];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-09-25T16:20:47.580042Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:129:2160];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-09-25T16:20:47.580062Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:129:2160];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-09-25T16:20:47.580084Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:129:2160];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanDeprecatedSnapshot; 2025-09-25T16:20:47.580105Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:129:2160];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV0ChunksMeta; 2025-09-25T16:20:47.580125Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:129:2160];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CopyBlobIdsToV2; 2025-09-25T16:20:47.580176Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:129:2160];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreAppearanceSnapshot; 2025-09-25T16:20:47.586975Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxInitSchema.Complete at tablet 9437184 2025-09-25T16:20:47.587048Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:131;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=12;current_normalizer=CLASS_NAME=Granules; 2025-09-25T16:20:47.587059Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=1;type=Granules; 2025-09-25T16:20:47.587101Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-09-25T16:20:47.587137Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-09-25T16:20:47.587151Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-09-25T16:20:47.587157Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2025-09-25T16:20:47.587169Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:138;normalizer=TChunksNormalizer;message=0 chunks found; 2025-09-25T16:20:47.587178Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-09-25T16:20:47.587186Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-09-25T16:20:47.587192Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2025-09-25T16:20:47.587213Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-09-25T16:20:47.587222Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-09-25T16:20:47.587232Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-09-25T16:20:47.587236Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2025-09-25T16:20:47.587250Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2025-09-25T16:20:47.587258Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-09-25T16:20:47.587265Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-09-25T16:20:47.587271Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2025-09-25T16:20:47.587280Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-09-25T16:20:47.587289Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-09-25T16:20:47.587294Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2025-09-25T16:20:47.587305Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-09-25T16:20:47.587313Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-09-25T16:20:47.587318Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=15;type=RestoreV1Chunks_V2; 2025-09-25T16:20:47.587348Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-09-25T16:20:47.587357Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-09-25T16:20:47.587362Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=16;type=RestoreV2Chunks; 2025-09-25T16:20:47.587378Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2025-09-25T16:20:47.587386Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanDeprecatedSnapshot;id=CleanDeprecatedSnapshot; 2025-09-25T16:20:47.587391Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=17;type=CleanDeprecatedSnapshot; 2025-09-25T16:20:47.587400Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanDeprecatedSnapshot;id=17; 2025-09-25T16:20:47.587408Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV0ChunksMeta;id=RestoreV0ChunksMeta; 2025-09-25T16:20:47.587413Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=18;type=RestoreV0ChunksMeta; 2025-09-25T16:20:47.587422Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV0ChunksMeta;id=18; 2025-09-25T16:20:47.587430Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CopyBlobIdsToV2;id=CopyBlobIdsToV2; 2025-09-25T16:20:47.587436Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=19;type=CopyBlobIdsToV2; 2025-09-25T16:20:47.587452Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CopyBlobIdsToV2;id=19; 2025-09-25T16:20:47.587462Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLAS ... :18;blob_range:[NO_BLOB:0:9296];;column_id:10;chunk_idx:19;blob_range:[NO_BLOB:0:9296];;column_id:10;chunk_idx:20;blob_range:[NO_BLOB:0:9296];;column_id:10;chunk_idx:21;blob_range:[NO_BLOB:0:9296];;column_id:10;chunk_idx:22;blob_range:[NO_BLOB:0:9296];;column_id:10;chunk_idx:23;blob_range:[NO_BLOB:0:9296];;column_id:10;chunk_idx:24;blob_range:[NO_BLOB:0:9296];;column_id:10;chunk_idx:25;blob_range:[NO_BLOB:0:9296];;column_id:10;chunk_idx:26;blob_range:[NO_BLOB:0:9296];;column_id:10;chunk_idx:27;blob_range:[NO_BLOB:0:9296];;column_id:10;chunk_idx:28;blob_range:[NO_BLOB:0:9296];;column_id:10;chunk_idx:29;blob_range:[NO_BLOB:0:9296];;column_id:10;chunk_idx:30;blob_range:[NO_BLOB:0:9296];;column_id:10;chunk_idx:31;blob_range:[NO_BLOB:0:9296];;column_id:10;chunk_idx:32;blob_range:[NO_BLOB:0:9296];;column_id:10;chunk_idx:33;blob_range:[NO_BLOB:0:9296];;column_id:10;chunk_idx:34;blob_range:[NO_BLOB:0:9296];;column_id:10;chunk_idx:35;blob_range:[NO_BLOB:0:9296];;column_id:10;chunk_idx:36;blob_range:[NO_BLOB:0:9296];;column_id:10;chunk_idx:37;blob_range:[NO_BLOB:0:9296];;column_id:10;chunk_idx:38;blob_range:[NO_BLOB:0:9296];;column_id:10;chunk_idx:39;blob_range:[NO_BLOB:0:9296];;column_id:10;chunk_idx:40;blob_range:[NO_BLOB:0:9296];;column_id:10;chunk_idx:41;blob_range:[NO_BLOB:0:9296];;column_id:10;chunk_idx:42;blob_range:[NO_BLOB:0:9296];;column_id:10;chunk_idx:43;blob_range:[NO_BLOB:0:9296];;column_id:10;chunk_idx:44;blob_range:[NO_BLOB:0:9312];;column_id:10;chunk_idx:45;blob_range:[NO_BLOB:0:9312];;column_id:10;chunk_idx:46;blob_range:[NO_BLOB:0:9312];;column_id:10;chunk_idx:47;blob_range:[NO_BLOB:0:9312];;column_id:10;chunk_idx:48;blob_range:[NO_BLOB:0:9312];;column_id:10;chunk_idx:49;blob_range:[NO_BLOB:0:9312];;column_id:10;chunk_idx:50;blob_range:[NO_BLOB:0:9312];;column_id:10;chunk_idx:51;blob_range:[NO_BLOB:0:9312];;column_id:10;chunk_idx:52;blob_range:[NO_BLOB:0:9312];;column_id:10;chunk_idx:53;blob_range:[NO_BLOB:0:9312];;column_id:10;chunk_idx:54;blob_range:[NO_BLOB:0:9304];;column_id:10;chunk_idx:55;blob_range:[NO_BLOB:0:9312];;column_id:10;chunk_idx:56;blob_range:[NO_BLOB:0:9312];;column_id:10;chunk_idx:57;blob_range:[NO_BLOB:0:9312];;column_id:10;chunk_idx:58;blob_range:[NO_BLOB:0:9312];;column_id:10;chunk_idx:59;blob_range:[NO_BLOB:0:9312];;column_id:10;chunk_idx:60;blob_range:[NO_BLOB:0:9312];;column_id:10;chunk_idx:61;blob_range:[NO_BLOB:0:9304];;column_id:10;chunk_idx:62;blob_range:[NO_BLOB:0:9312];;column_id:10;chunk_idx:63;blob_range:[NO_BLOB:0:9312];;column_id:10;chunk_idx:64;blob_range:[NO_BLOB:0:9304];;column_id:10;chunk_idx:65;blob_range:[NO_BLOB:0:9304];;column_id:10;chunk_idx:66;blob_range:[NO_BLOB:0:9312];;column_id:10;chunk_idx:67;blob_range:[NO_BLOB:0:9312];;column_id:10;chunk_idx:68;blob_range:[NO_BLOB:0:9312];;column_id:10;chunk_idx:69;blob_range:[NO_BLOB:0:9304];;column_id:10;chunk_idx:70;blob_range:[NO_BLOB:0:8592];;column_id:10;chunk_idx:71;blob_range:[NO_BLOB:0:8280];;column_id:10;chunk_idx:72;blob_range:[NO_BLOB:0:8288];;;;switched=(portion_id:215;path_id:1000000185;records_count:75000;schema_version:1;level:1;;column_size:6373584;index_size:0;meta:(()););(portion_id:213;path_id:1000000185;records_count:75000;schema_version:1;level:2;;column_size:6356384;index_size:0;meta:(()););; 2025-09-25T16:21:02.497484Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: event_type=NKikimr::NBlobCache::TEvBlobCache::TEvReadBlobRangeResult;event=on_execution;consumer=GENERAL_COMPACTION;task_id=a0dd1582-9a2b11f0-b8927c8f-17ccc96a;script=FULL_PORTIONS_FETCHING::GENERAL_COMPACTION;event=on_execution;consumer=GENERAL_COMPACTION;task_id=a0dd1582-9a2b11f0-b8927c8f-17ccc96a;script=FULL_PORTIONS_FETCHING::GENERAL_COMPACTION;event=on_finished;consumer=GENERAL_COMPACTION;task_id=a0dd1582-9a2b11f0-b8927c8f-17ccc96a;script=FULL_PORTIONS_FETCHING::GENERAL_COMPACTION;tablet_id=9437184;parent_id=[1:4465:6457];task_id=a0dd1582-9a2b11f0-b8927c8f-17ccc96a;task_class=CS::GENERAL;fline=general_compaction.cpp:140;event=blobs_created;appended=1;switched=2; 2025-09-25T16:21:02.497501Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: event_type=NKikimr::NBlobCache::TEvBlobCache::TEvReadBlobRangeResult;event=on_execution;consumer=GENERAL_COMPACTION;task_id=a0dd1582-9a2b11f0-b8927c8f-17ccc96a;script=FULL_PORTIONS_FETCHING::GENERAL_COMPACTION;event=on_execution;consumer=GENERAL_COMPACTION;task_id=a0dd1582-9a2b11f0-b8927c8f-17ccc96a;script=FULL_PORTIONS_FETCHING::GENERAL_COMPACTION;event=on_finished;consumer=GENERAL_COMPACTION;task_id=a0dd1582-9a2b11f0-b8927c8f-17ccc96a;script=FULL_PORTIONS_FETCHING::GENERAL_COMPACTION;tablet_id=9437184;parent_id=[1:4465:6457];task_id=a0dd1582-9a2b11f0-b8927c8f-17ccc96a;task_class=CS::GENERAL;fline=abstract.cpp:13;event=new_stage;stage=Constructed;task_id=a0dd1582-9a2b11f0-b8927c8f-17ccc96a; 2025-09-25T16:21:02.498144Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:4465:6457];ev=NKikimr::NColumnShard::TEvPrivate::TEvWriteIndex;fline=columnshard__write_index.cpp:52;event=TEvWriteIndex;count=1; 2025-09-25T16:21:02.498905Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:4465:6457];ev=NKikimr::NColumnShard::TEvPrivate::TEvWriteIndex;fline=columnshard__write_index.cpp:59;event=TTxWriteDraft; 2025-09-25T16:21:02.498923Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:4465:6457];ev=NKikimr::NColumnShard::TEvPrivate::TEvWriteIndex;fline=abstract.cpp:13;event=new_stage;stage=WriteDraft;task_id=a0dd1582-9a2b11f0-b8927c8f-17ccc96a; 2025-09-25T16:21:02.609539Z node 1 :TX_COLUMNSHARD_WRITE DEBUG: log.cpp:841: fline=tx_draft.cpp:16;event=draft_completed; 2025-09-25T16:21:02.609580Z node 1 :TX_COLUMNSHARD_WRITE DEBUG: log.cpp:841: fline=write_actor.cpp:24;event=actor_created;tablet_id=9437184;debug=size=6356384;count=689;actions=__MEMORY,__DEFAULT,;waiting=2;; 2025-09-25T16:21:02.720905Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: WriteIndex at tablet 9437184 2025-09-25T16:21:02.720981Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:4465:6457];ev=NKikimr::NColumnShard::TEvPrivate::TEvWriteIndex;fline=common_level.h:121;from=0,0,0,0,;to=9999,9999,9999,9999,; 2025-09-25T16:21:02.720999Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:4465:6457];ev=NKikimr::NColumnShard::TEvPrivate::TEvWriteIndex;fline=common_level.h:141;itFrom=1;itTo=1;raw=7433340;count=1;packed=6373584; 2025-09-25T16:21:02.721030Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:4465:6457];ev=NKikimr::NColumnShard::TEvPrivate::TEvWriteIndex;fline=constructor_meta.cpp:48;memory_size=86;data_size=62;sum=93010;count=1701; 2025-09-25T16:21:02.721039Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:4465:6457];ev=NKikimr::NColumnShard::TEvPrivate::TEvWriteIndex;fline=constructor_meta.cpp:65;memory_size=182;data_size=174;sum=174706;count=1702;size_of_meta=112; 2025-09-25T16:21:02.721054Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:4465:6457];ev=NKikimr::NColumnShard::TEvPrivate::TEvWriteIndex;fline=constructor_portion.cpp:40;memory_size=254;data_size=246;sum=235978;count=851;size_of_portion=184; 2025-09-25T16:21:02.721160Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:4465:6457];ev=NKikimr::NColumnShard::TEvPrivate::TEvWriteIndex;fline=abstract.cpp:13;event=new_stage;stage=Compiled;task_id=a0dd1582-9a2b11f0-b8927c8f-17ccc96a; 2025-09-25T16:21:02.721240Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxWriteIndex[2] (CS::GENERAL) apply at tablet 9437184 2025-09-25T16:21:02.723174Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:4465:6457];ev=NKikimr::NColumnShard::TEvPrivate::TEvWriteIndex;tablet_id=9437184;external_task_id=a0dd1582-9a2b11f0-b8927c8f-17ccc96a;fline=abstract.cpp:13;event=new_stage;stage=Written;task_id=a0dd1582-9a2b11f0-b8927c8f-17ccc96a; 2025-09-25T16:21:02.723645Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: BlobManager on execute at tablet 9437184 Save Batch GenStep: 4:1 Blob count: 543 2025-09-25T16:21:02.724794Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: Index: tables 1 inserted {blob_bytes=0;raw_bytes=0;count=0;records=0} compacted {blob_bytes=0;raw_bytes=0;count=0;records=0} s-compacted {blob_bytes=25498800;raw_bytes=29753360;count=4;records=300200} inactive {blob_bytes=71325608;raw_bytes=70983540;count=211;records=825200} evicted {blob_bytes=0;raw_bytes=0;count=0;records=0} at tablet 9437184 2025-09-25T16:21:02.787491Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;task_id=a0dd1582-9a2b11f0-b8927c8f-17ccc96a;fline=abstract.cpp:13;event=new_stage;stage=Finished;task_id=a0dd1582-9a2b11f0-b8927c8f-17ccc96a; 2025-09-25T16:21:02.787518Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;task_id=a0dd1582-9a2b11f0-b8927c8f-17ccc96a;fline=abstract.cpp:54;event=WriteIndexComplete;type=CS::GENERAL;success=1; 2025-09-25T16:21:02.787535Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;task_id=a0dd1582-9a2b11f0-b8927c8f-17ccc96a;fline=with_appended.cpp:65;portions=216,;task_id=a0dd1582-9a2b11f0-b8927c8f-17ccc96a; 2025-09-25T16:21:02.787697Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;task_id=a0dd1582-9a2b11f0-b8927c8f-17ccc96a;fline=manager.cpp:15;event=unlock;process_id=CS::GENERAL::a0dd1582-9a2b11f0-b8927c8f-17ccc96a; 2025-09-25T16:21:02.787730Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;task_id=a0dd1582-9a2b11f0-b8927c8f-17ccc96a;fline=granule.cpp:97;event=OnCompactionFinished;info=(granule:1000000185;path_id:1000000185;size:19125216;portions_count:216;); 2025-09-25T16:21:02.787739Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;task_id=a0dd1582-9a2b11f0-b8927c8f-17ccc96a;tablet_id=9437184;fline=columnshard_impl.cpp:449;event=EnqueueBackgroundActivities;periodic=0; 2025-09-25T16:21:02.787783Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;task_id=a0dd1582-9a2b11f0-b8927c8f-17ccc96a;tablet_id=9437184;fline=columnshard_impl.cpp:943;background=cleanup_schemas;skip_reason=no_changes; 2025-09-25T16:21:02.787792Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;task_id=a0dd1582-9a2b11f0-b8927c8f-17ccc96a;tablet_id=9437184;fline=column_engine_logs.cpp:258;event=StartCleanup;portions_count=6; 2025-09-25T16:21:02.787810Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;task_id=a0dd1582-9a2b11f0-b8927c8f-17ccc96a;tablet_id=9437184;fline=column_engine_logs.cpp:271;event=StartCleanupStop;snapshot=plan_step=1758815450493;tx_id=18446744073709551615;;current_snapshot_ts=1758817249184; 2025-09-25T16:21:02.787820Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;task_id=a0dd1582-9a2b11f0-b8927c8f-17ccc96a;tablet_id=9437184;fline=column_engine_logs.cpp:334;event=StartCleanup;portions_count=6;portions_prepared=0;drop=0;skip=0;portions_counter=0;chunks=0;limit=0;max_portions=1000;max_chunks=500000; 2025-09-25T16:21:02.787833Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;task_id=a0dd1582-9a2b11f0-b8927c8f-17ccc96a;tablet_id=9437184;fline=columnshard_impl.cpp:800;background=cleanup;skip_reason=no_changes; 2025-09-25T16:21:02.787840Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;task_id=a0dd1582-9a2b11f0-b8927c8f-17ccc96a;tablet_id=9437184;fline=columnshard_impl.cpp:832;background=cleanup;skip_reason=no_changes; 2025-09-25T16:21:02.787883Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;task_id=a0dd1582-9a2b11f0-b8927c8f-17ccc96a;tablet_id=9437184;queue=ttl;external_count=0;fline=granule.cpp:168;event=skip_actualization;waiting=0.910000s; 2025-09-25T16:21:02.787896Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;task_id=a0dd1582-9a2b11f0-b8927c8f-17ccc96a;tablet_id=9437184;fline=columnshard_impl.cpp:755;background=ttl;skip_reason=no_changes; 2025-09-25T16:21:02.787967Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: BlobManager at tablet 9437184 Save Batch GenStep: 4:1 Blob count: 543 |82.7%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/ut_blobstorage/ut_read_only_vdisk/unittest |82.7%| [TA] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_transfer/test-results/unittest/{meta.json ... results_accumulator.log} |82.7%| [TA] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_view/test-results/unittest/{meta.json ... results_accumulator.log} >> TColumnShardTestReadWrite::CompactionInGranule_PKInt64 [GOOD] >> TColumnShardTestReadWrite::CompactionInGranule_PKUInt64 [GOOD] >> StoragePool::TestDistributionRandomProbability [GOOD] >> StoragePool::TestDistributionRandomProbabilityWithOverflow [GOOD] >> StoragePool::TestDistributionExactMin >> ReadOnlyVDisk::TestGarbageCollect [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/columnshard/ut_rw/unittest >> TColumnShardTestReadWrite::CompactionInGranule_PKTimestamp_Reboot [GOOD] Test command err: 2025-09-25T16:20:42.439671Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:129:2160];fline=columnshard.cpp:105;event=initialize_shard;step=OnActivateExecutor; 2025-09-25T16:20:42.445232Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:129:2160];fline=columnshard.cpp:124;event=initialize_shard;step=initialize_tiring_finished; 2025-09-25T16:20:42.445295Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxInitSchema.Execute at tablet 9437184 2025-09-25T16:20:42.446202Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:129:2160];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-09-25T16:20:42.446262Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:129:2160];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-09-25T16:20:42.446307Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:129:2160];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-09-25T16:20:42.446332Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:129:2160];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-09-25T16:20:42.446353Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:129:2160];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-09-25T16:20:42.446392Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:129:2160];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-09-25T16:20:42.446416Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:129:2160];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-09-25T16:20:42.446437Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:129:2160];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-09-25T16:20:42.446459Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:129:2160];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-09-25T16:20:42.446479Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:129:2160];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanDeprecatedSnapshot; 2025-09-25T16:20:42.446502Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:129:2160];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV0ChunksMeta; 2025-09-25T16:20:42.446524Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:129:2160];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CopyBlobIdsToV2; 2025-09-25T16:20:42.446574Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:129:2160];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreAppearanceSnapshot; 2025-09-25T16:20:42.452493Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxInitSchema.Complete at tablet 9437184 2025-09-25T16:20:42.452574Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:131;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=12;current_normalizer=CLASS_NAME=Granules; 2025-09-25T16:20:42.452582Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=1;type=Granules; 2025-09-25T16:20:42.452619Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-09-25T16:20:42.452651Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-09-25T16:20:42.452661Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-09-25T16:20:42.452666Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2025-09-25T16:20:42.452674Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:138;normalizer=TChunksNormalizer;message=0 chunks found; 2025-09-25T16:20:42.452682Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-09-25T16:20:42.452688Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-09-25T16:20:42.452691Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2025-09-25T16:20:42.452704Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-09-25T16:20:42.452711Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-09-25T16:20:42.452716Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-09-25T16:20:42.452719Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2025-09-25T16:20:42.452728Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2025-09-25T16:20:42.452733Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-09-25T16:20:42.452738Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-09-25T16:20:42.452741Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2025-09-25T16:20:42.452748Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-09-25T16:20:42.452754Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-09-25T16:20:42.452757Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2025-09-25T16:20:42.452764Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-09-25T16:20:42.452770Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-09-25T16:20:42.452773Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=15;type=RestoreV1Chunks_V2; 2025-09-25T16:20:42.452792Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-09-25T16:20:42.452798Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-09-25T16:20:42.452801Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=16;type=RestoreV2Chunks; 2025-09-25T16:20:42.452811Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2025-09-25T16:20:42.452818Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanDeprecatedSnapshot;id=CleanDeprecatedSnapshot; 2025-09-25T16:20:42.452839Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=17;type=CleanDeprecatedSnapshot; 2025-09-25T16:20:42.452849Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanDeprecatedSnapshot;id=17; 2025-09-25T16:20:42.452858Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV0ChunksMeta;id=RestoreV0ChunksMeta; 2025-09-25T16:20:42.452862Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=18;type=RestoreV0ChunksMeta; 2025-09-25T16:20:42.452872Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV0ChunksMeta;id=18; 2025-09-25T16:20:42.452878Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CopyBlobIdsToV2;id=CopyBlobIdsToV2; 2025-09-25T16:20:42.452883Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=19;type=CopyBlobIdsToV2; 2025-09-25T16:20:42.452893Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CopyBlobIdsToV2;id=19; 2025-09-25T16:20:42.452899Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLAS ... le;load_stage_name=EXECUTE:granule/portions;fline=constructor_portion.cpp:40;memory_size=278;data_size=252;sum=1960334;count=7053;size_of_portion=184; 2025-09-25T16:21:03.084205Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;load_stage_name=EXECUTE:column_engines/granules;load_stage_name=EXECUTE:granules/granule;fline=common_data.cpp:29;EXECUTE:portionsLoadingTime=61950; 2025-09-25T16:21:03.084222Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;load_stage_name=EXECUTE:column_engines/granules;load_stage_name=EXECUTE:granules/granule;fline=common_data.cpp:29;PRECHARGE:granule_finished_commonLoadingTime=4; 2025-09-25T16:21:03.084417Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;load_stage_name=EXECUTE:column_engines/granules;load_stage_name=EXECUTE:granules/granule;fline=common_data.cpp:29;EXECUTE:granule_finished_commonLoadingTime=186; 2025-09-25T16:21:03.084426Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;load_stage_name=EXECUTE:column_engines/granules;fline=common_data.cpp:29;EXECUTE:granuleLoadingTime=62187; 2025-09-25T16:21:03.084432Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;fline=common_data.cpp:29;EXECUTE:granulesLoadingTime=62198; 2025-09-25T16:21:03.084444Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;fline=common_data.cpp:29;PRECHARGE:finishLoadingTime=1; 2025-09-25T16:21:03.084498Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;fline=common_data.cpp:29;EXECUTE:finishLoadingTime=48; 2025-09-25T16:21:03.084504Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:column_enginesLoadingTime=62314; 2025-09-25T16:21:03.084544Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:tx_controllerLoadingTime=32; 2025-09-25T16:21:03.084564Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:tx_controllerLoadingTime=14; 2025-09-25T16:21:03.084628Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:operations_managerLoadingTime=57; 2025-09-25T16:21:03.084675Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:operations_managerLoadingTime=40; 2025-09-25T16:21:03.087985Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:storages_managerLoadingTime=3289; 2025-09-25T16:21:03.092434Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:storages_managerLoadingTime=4419; 2025-09-25T16:21:03.092456Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:db_locksLoadingTime=3; 2025-09-25T16:21:03.092463Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:db_locksLoadingTime=1; 2025-09-25T16:21:03.092469Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:bg_sessionsLoadingTime=1; 2025-09-25T16:21:03.092482Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:bg_sessionsLoadingTime=10; 2025-09-25T16:21:03.092488Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:sharing_sessionsLoadingTime=1; 2025-09-25T16:21:03.092499Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:sharing_sessionsLoadingTime=7; 2025-09-25T16:21:03.092505Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:in_flight_readsLoadingTime=0; 2025-09-25T16:21:03.092516Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:in_flight_readsLoadingTime=8; 2025-09-25T16:21:03.092537Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:tiers_managerLoadingTime=13; 2025-09-25T16:21:03.092554Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:tiers_managerLoadingTime=11; 2025-09-25T16:21:03.092560Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;fline=common_data.cpp:29;EXECUTE:composite_initLoadingTime=71251; 2025-09-25T16:21:03.092598Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: Index: tables 1 inserted {blob_bytes=0;raw_bytes=0;count=0;records=0} compacted {blob_bytes=0;raw_bytes=0;count=0;records=0} s-compacted {blob_bytes=18920080;raw_bytes=22128150;count=3;records=225200} inactive {blob_bytes=82943248;raw_bytes=85019450;count=213;records=975200} evicted {blob_bytes=0;raw_bytes=0;count=0;records=0} at tablet 9437184 2025-09-25T16:21:03.092625Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:6990:8620];process=SwitchToWork;fline=columnshard.cpp:77;event=initialize_shard;step=SwitchToWork; 2025-09-25T16:21:03.092635Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:6990:8620];process=SwitchToWork;fline=columnshard.cpp:80;event=initialize_shard;step=SignalTabletActive; 2025-09-25T16:21:03.092650Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:6990:8620];process=SwitchToWork;fline=columnshard_impl.cpp:1528;event=OnTieringModified;path_id=NO_VALUE_OPTIONAL; 2025-09-25T16:21:03.092659Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:6990:8620];process=SwitchToWork;fline=column_engine_logs.cpp:516;event=OnTieringModified;new_count_tierings=0; 2025-09-25T16:21:03.092695Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;fline=columnshard_impl.cpp:449;event=EnqueueBackgroundActivities;periodic=0; 2025-09-25T16:21:03.092717Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;fline=columnshard_impl.cpp:943;background=cleanup_schemas;skip_reason=no_changes; 2025-09-25T16:21:03.092725Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;fline=column_engine_logs.cpp:258;event=StartCleanup;portions_count=5; 2025-09-25T16:21:03.092739Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;fline=column_engine_logs.cpp:271;event=StartCleanupStop;snapshot=plan_step=1758815446913;tx_id=18446744073709551615;;current_snapshot_ts=1758817244267; 2025-09-25T16:21:03.092749Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;fline=column_engine_logs.cpp:334;event=StartCleanup;portions_count=5;portions_prepared=0;drop=0;skip=0;portions_counter=0;chunks=0;limit=0;max_portions=1000;max_chunks=500000; 2025-09-25T16:21:03.092761Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;fline=columnshard_impl.cpp:800;background=cleanup;skip_reason=no_changes; 2025-09-25T16:21:03.092767Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;fline=columnshard_impl.cpp:832;background=cleanup;skip_reason=no_changes; 2025-09-25T16:21:03.092808Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;fline=columnshard_impl.cpp:755;background=ttl;skip_reason=no_changes; 2025-09-25T16:21:03.093818Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:6990:8620];ev=NActors::TEvents::TEvWakeup;fline=columnshard.cpp:260;event=TEvPrivate::TEvPeriodicWakeup::MANUAL;tablet_id=9437184; 2025-09-25T16:21:03.093897Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:6990:8620];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;fline=columnshard.cpp:249;event=TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184; 2025-09-25T16:21:03.093902Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: Send periodic stats. 2025-09-25T16:21:03.093907Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: Disabled periodic stats at tablet 9437184 2025-09-25T16:21:03.093913Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:6990:8620];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:449;event=EnqueueBackgroundActivities;periodic=0; 2025-09-25T16:21:03.093929Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:6990:8620];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:943;background=cleanup_schemas;skip_reason=no_changes; 2025-09-25T16:21:03.093936Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:6990:8620];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:258;event=StartCleanup;portions_count=5; 2025-09-25T16:21:03.093946Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:6990:8620];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:271;event=StartCleanupStop;snapshot=plan_step=1758815446913;tx_id=18446744073709551615;;current_snapshot_ts=1758817244267; 2025-09-25T16:21:03.093952Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:6990:8620];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:334;event=StartCleanup;portions_count=5;portions_prepared=0;drop=0;skip=0;portions_counter=0;chunks=0;limit=0;max_portions=1000;max_chunks=500000; 2025-09-25T16:21:03.093958Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:6990:8620];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:800;background=cleanup;skip_reason=no_changes; 2025-09-25T16:21:03.093962Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:6990:8620];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:832;background=cleanup;skip_reason=no_changes; 2025-09-25T16:21:03.093975Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:6990:8620];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;queue=ttl;external_count=0;fline=granule.cpp:168;event=skip_actualization;waiting=1.000000s; 2025-09-25T16:21:03.093983Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:6990:8620];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:755;background=ttl;skip_reason=no_changes; >> ReadOnlyVDisk::TestSync [GOOD] >> TBsProxyFaultToleranceTest::CheckGetHardenedErasureBlock42Count6Idx5 >> TBsProxyFaultToleranceTest::CheckTRangeFaultToleranceTestErasureMirror3of4 >> ReadOnlyVDisk::TestReads [GOOD] >> TColumnShardTestReadWrite::CompactionInGranule_PKInt64_Reboot [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/columnshard/ut_rw/unittest >> TColumnShardTestReadWrite::CompactionInGranule_PKInt64 [GOOD] Test command err: 2025-09-25T16:20:48.700005Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:129:2160];fline=columnshard.cpp:105;event=initialize_shard;step=OnActivateExecutor; 2025-09-25T16:20:48.706200Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:129:2160];fline=columnshard.cpp:124;event=initialize_shard;step=initialize_tiring_finished; 2025-09-25T16:20:48.706257Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxInitSchema.Execute at tablet 9437184 2025-09-25T16:20:48.707148Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:129:2160];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-09-25T16:20:48.707205Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:129:2160];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-09-25T16:20:48.707250Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:129:2160];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-09-25T16:20:48.707302Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:129:2160];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-09-25T16:20:48.707325Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:129:2160];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-09-25T16:20:48.707349Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:129:2160];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-09-25T16:20:48.707372Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:129:2160];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-09-25T16:20:48.707395Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:129:2160];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-09-25T16:20:48.707418Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:129:2160];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-09-25T16:20:48.707440Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:129:2160];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanDeprecatedSnapshot; 2025-09-25T16:20:48.707464Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:129:2160];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV0ChunksMeta; 2025-09-25T16:20:48.707486Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:129:2160];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CopyBlobIdsToV2; 2025-09-25T16:20:48.707540Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:129:2160];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreAppearanceSnapshot; 2025-09-25T16:20:48.714683Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxInitSchema.Complete at tablet 9437184 2025-09-25T16:20:48.714752Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:131;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=12;current_normalizer=CLASS_NAME=Granules; 2025-09-25T16:20:48.714764Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=1;type=Granules; 2025-09-25T16:20:48.714815Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-09-25T16:20:48.714852Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-09-25T16:20:48.714865Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-09-25T16:20:48.714872Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2025-09-25T16:20:48.714884Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:138;normalizer=TChunksNormalizer;message=0 chunks found; 2025-09-25T16:20:48.714894Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-09-25T16:20:48.714903Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-09-25T16:20:48.714908Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2025-09-25T16:20:48.714929Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-09-25T16:20:48.714938Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-09-25T16:20:48.714947Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-09-25T16:20:48.714952Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2025-09-25T16:20:48.714964Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2025-09-25T16:20:48.714972Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-09-25T16:20:48.714980Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-09-25T16:20:48.714985Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2025-09-25T16:20:48.714995Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-09-25T16:20:48.715003Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-09-25T16:20:48.715008Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2025-09-25T16:20:48.715019Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-09-25T16:20:48.715027Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-09-25T16:20:48.715032Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=15;type=RestoreV1Chunks_V2; 2025-09-25T16:20:48.715059Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-09-25T16:20:48.715068Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-09-25T16:20:48.715073Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=16;type=RestoreV2Chunks; 2025-09-25T16:20:48.715088Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2025-09-25T16:20:48.715097Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanDeprecatedSnapshot;id=CleanDeprecatedSnapshot; 2025-09-25T16:20:48.715102Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=17;type=CleanDeprecatedSnapshot; 2025-09-25T16:20:48.715110Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanDeprecatedSnapshot;id=17; 2025-09-25T16:20:48.715119Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV0ChunksMeta;id=RestoreV0ChunksMeta; 2025-09-25T16:20:48.715124Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=18;type=RestoreV0ChunksMeta; 2025-09-25T16:20:48.715132Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV0ChunksMeta;id=18; 2025-09-25T16:20:48.715141Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CopyBlobIdsToV2;id=CopyBlobIdsToV2; 2025-09-25T16:20:48.715147Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=19;type=CopyBlobIdsToV2; 2025-09-25T16:20:48.715162Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CopyBlobIdsToV2;id=19; 2025-09-25T16:20:48.715170Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLAS ... blob_range:[NO_BLOB:0:9408];;column_id:10;chunk_idx:19;blob_range:[NO_BLOB:0:9408];;column_id:10;chunk_idx:20;blob_range:[NO_BLOB:0:9408];;column_id:10;chunk_idx:21;blob_range:[NO_BLOB:0:9408];;column_id:10;chunk_idx:22;blob_range:[NO_BLOB:0:9408];;column_id:10;chunk_idx:23;blob_range:[NO_BLOB:0:9408];;column_id:10;chunk_idx:24;blob_range:[NO_BLOB:0:9408];;column_id:10;chunk_idx:25;blob_range:[NO_BLOB:0:9408];;column_id:10;chunk_idx:26;blob_range:[NO_BLOB:0:9408];;column_id:10;chunk_idx:27;blob_range:[NO_BLOB:0:9408];;column_id:10;chunk_idx:28;blob_range:[NO_BLOB:0:9408];;column_id:10;chunk_idx:29;blob_range:[NO_BLOB:0:9408];;column_id:10;chunk_idx:30;blob_range:[NO_BLOB:0:9408];;column_id:10;chunk_idx:31;blob_range:[NO_BLOB:0:9408];;column_id:10;chunk_idx:32;blob_range:[NO_BLOB:0:9408];;column_id:10;chunk_idx:33;blob_range:[NO_BLOB:0:9408];;column_id:10;chunk_idx:34;blob_range:[NO_BLOB:0:9408];;column_id:10;chunk_idx:35;blob_range:[NO_BLOB:0:9408];;column_id:10;chunk_idx:36;blob_range:[NO_BLOB:0:9408];;column_id:10;chunk_idx:37;blob_range:[NO_BLOB:0:9408];;column_id:10;chunk_idx:38;blob_range:[NO_BLOB:0:9408];;column_id:10;chunk_idx:39;blob_range:[NO_BLOB:0:9408];;column_id:10;chunk_idx:40;blob_range:[NO_BLOB:0:9408];;column_id:10;chunk_idx:41;blob_range:[NO_BLOB:0:9408];;column_id:10;chunk_idx:42;blob_range:[NO_BLOB:0:9408];;column_id:10;chunk_idx:43;blob_range:[NO_BLOB:0:9408];;column_id:10;chunk_idx:44;blob_range:[NO_BLOB:0:9424];;column_id:10;chunk_idx:45;blob_range:[NO_BLOB:0:9424];;column_id:10;chunk_idx:46;blob_range:[NO_BLOB:0:9424];;column_id:10;chunk_idx:47;blob_range:[NO_BLOB:0:9424];;column_id:10;chunk_idx:48;blob_range:[NO_BLOB:0:9424];;column_id:10;chunk_idx:49;blob_range:[NO_BLOB:0:9424];;column_id:10;chunk_idx:50;blob_range:[NO_BLOB:0:9424];;column_id:10;chunk_idx:51;blob_range:[NO_BLOB:0:9424];;column_id:10;chunk_idx:52;blob_range:[NO_BLOB:0:9424];;column_id:10;chunk_idx:53;blob_range:[NO_BLOB:0:9424];;column_id:10;chunk_idx:54;blob_range:[NO_BLOB:0:9424];;column_id:10;chunk_idx:55;blob_range:[NO_BLOB:0:9424];;column_id:10;chunk_idx:56;blob_range:[NO_BLOB:0:9424];;column_id:10;chunk_idx:57;blob_range:[NO_BLOB:0:9424];;column_id:10;chunk_idx:58;blob_range:[NO_BLOB:0:9424];;column_id:10;chunk_idx:59;blob_range:[NO_BLOB:0:9424];;column_id:10;chunk_idx:60;blob_range:[NO_BLOB:0:9424];;column_id:10;chunk_idx:61;blob_range:[NO_BLOB:0:9424];;column_id:10;chunk_idx:62;blob_range:[NO_BLOB:0:9424];;column_id:10;chunk_idx:63;blob_range:[NO_BLOB:0:9424];;column_id:10;chunk_idx:64;blob_range:[NO_BLOB:0:9424];;column_id:10;chunk_idx:65;blob_range:[NO_BLOB:0:9424];;column_id:10;chunk_idx:66;blob_range:[NO_BLOB:0:9424];;column_id:10;chunk_idx:67;blob_range:[NO_BLOB:0:9424];;column_id:10;chunk_idx:68;blob_range:[NO_BLOB:0:9424];;column_id:10;chunk_idx:69;blob_range:[NO_BLOB:0:9424];;column_id:10;chunk_idx:70;blob_range:[NO_BLOB:0:9424];;column_id:10;chunk_idx:71;blob_range:[NO_BLOB:0:9424];;column_id:10;chunk_idx:72;blob_range:[NO_BLOB:0:9424];;;;switched=(portion_id:215;path_id:1000000185;records_count:75000;schema_version:1;level:1;;column_size:6303784;index_size:0;meta:(()););(portion_id:213;path_id:1000000185;records_count:75000;schema_version:1;level:2;;column_size:6289496;index_size:0;meta:(()););; 2025-09-25T16:21:03.616780Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: event_type=NKikimr::NBlobCache::TEvBlobCache::TEvReadBlobRangeResult;event=on_execution;consumer=GENERAL_COMPACTION;task_id=a17ad2f4-9a2b11f0-816c70a3-a9712377;script=FULL_PORTIONS_FETCHING::GENERAL_COMPACTION;event=on_execution;consumer=GENERAL_COMPACTION;task_id=a17ad2f4-9a2b11f0-816c70a3-a9712377;script=FULL_PORTIONS_FETCHING::GENERAL_COMPACTION;event=on_finished;consumer=GENERAL_COMPACTION;task_id=a17ad2f4-9a2b11f0-816c70a3-a9712377;script=FULL_PORTIONS_FETCHING::GENERAL_COMPACTION;tablet_id=9437184;parent_id=[1:4421:6414];task_id=a17ad2f4-9a2b11f0-816c70a3-a9712377;task_class=CS::GENERAL;fline=general_compaction.cpp:140;event=blobs_created;appended=1;switched=2; 2025-09-25T16:21:03.616799Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: event_type=NKikimr::NBlobCache::TEvBlobCache::TEvReadBlobRangeResult;event=on_execution;consumer=GENERAL_COMPACTION;task_id=a17ad2f4-9a2b11f0-816c70a3-a9712377;script=FULL_PORTIONS_FETCHING::GENERAL_COMPACTION;event=on_execution;consumer=GENERAL_COMPACTION;task_id=a17ad2f4-9a2b11f0-816c70a3-a9712377;script=FULL_PORTIONS_FETCHING::GENERAL_COMPACTION;event=on_finished;consumer=GENERAL_COMPACTION;task_id=a17ad2f4-9a2b11f0-816c70a3-a9712377;script=FULL_PORTIONS_FETCHING::GENERAL_COMPACTION;tablet_id=9437184;parent_id=[1:4421:6414];task_id=a17ad2f4-9a2b11f0-816c70a3-a9712377;task_class=CS::GENERAL;fline=abstract.cpp:13;event=new_stage;stage=Constructed;task_id=a17ad2f4-9a2b11f0-816c70a3-a9712377; 2025-09-25T16:21:03.617568Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:4421:6414];ev=NKikimr::NColumnShard::TEvPrivate::TEvWriteIndex;fline=columnshard__write_index.cpp:52;event=TEvWriteIndex;count=1; 2025-09-25T16:21:03.618380Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:4421:6414];ev=NKikimr::NColumnShard::TEvPrivate::TEvWriteIndex;fline=columnshard__write_index.cpp:59;event=TTxWriteDraft; 2025-09-25T16:21:03.618395Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:4421:6414];ev=NKikimr::NColumnShard::TEvPrivate::TEvWriteIndex;fline=abstract.cpp:13;event=new_stage;stage=WriteDraft;task_id=a17ad2f4-9a2b11f0-816c70a3-a9712377; 2025-09-25T16:21:03.790133Z node 1 :TX_COLUMNSHARD_WRITE DEBUG: log.cpp:841: fline=tx_draft.cpp:16;event=draft_completed; 2025-09-25T16:21:03.790183Z node 1 :TX_COLUMNSHARD_WRITE DEBUG: log.cpp:841: fline=write_actor.cpp:24;event=actor_created;tablet_id=9437184;debug=size=6289496;count=682;actions=__MEMORY,__DEFAULT,;waiting=2;; 2025-09-25T16:21:03.933457Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: WriteIndex at tablet 9437184 2025-09-25T16:21:03.933527Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:4421:6414];ev=NKikimr::NColumnShard::TEvPrivate::TEvWriteIndex;fline=common_level.h:121;from=0,0,0,0,;to=74999,74999,74999,74999,; 2025-09-25T16:21:03.933539Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:4421:6414];ev=NKikimr::NColumnShard::TEvPrivate::TEvWriteIndex;fline=common_level.h:141;itFrom=1;itTo=1;raw=7369450;count=1;packed=6303784; 2025-09-25T16:21:03.933559Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:4421:6414];ev=NKikimr::NColumnShard::TEvPrivate::TEvWriteIndex;fline=constructor_meta.cpp:48;memory_size=94;data_size=68;sum=93210;count=1701; 2025-09-25T16:21:03.933565Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:4421:6414];ev=NKikimr::NColumnShard::TEvPrivate::TEvWriteIndex;fline=constructor_meta.cpp:65;memory_size=190;data_size=180;sum=174906;count=1702;size_of_meta=112; 2025-09-25T16:21:03.933575Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:4421:6414];ev=NKikimr::NColumnShard::TEvPrivate::TEvWriteIndex;fline=constructor_portion.cpp:40;memory_size=262;data_size=252;sum=236178;count=851;size_of_portion=184; 2025-09-25T16:21:03.933644Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:4421:6414];ev=NKikimr::NColumnShard::TEvPrivate::TEvWriteIndex;fline=abstract.cpp:13;event=new_stage;stage=Compiled;task_id=a17ad2f4-9a2b11f0-816c70a3-a9712377; 2025-09-25T16:21:03.933721Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxWriteIndex[2] (CS::GENERAL) apply at tablet 9437184 2025-09-25T16:21:03.934747Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:4421:6414];ev=NKikimr::NColumnShard::TEvPrivate::TEvWriteIndex;tablet_id=9437184;external_task_id=a17ad2f4-9a2b11f0-816c70a3-a9712377;fline=abstract.cpp:13;event=new_stage;stage=Written;task_id=a17ad2f4-9a2b11f0-816c70a3-a9712377; 2025-09-25T16:21:03.935009Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: BlobManager on execute at tablet 9437184 Save Batch GenStep: 4:1 Blob count: 536 2025-09-25T16:21:03.937423Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: Index: tables 1 inserted {blob_bytes=0;raw_bytes=0;count=0;records=0} compacted {blob_bytes=0;raw_bytes=0;count=0;records=0} s-compacted {blob_bytes=25223864;raw_bytes=29497600;count=4;records=300200} inactive {blob_bytes=70349968;raw_bytes=70280550;count=211;records=825200} evicted {blob_bytes=0;raw_bytes=0;count=0;records=0} at tablet 9437184 2025-09-25T16:21:03.999983Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;task_id=a17ad2f4-9a2b11f0-816c70a3-a9712377;fline=abstract.cpp:13;event=new_stage;stage=Finished;task_id=a17ad2f4-9a2b11f0-816c70a3-a9712377; 2025-09-25T16:21:04.000009Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;task_id=a17ad2f4-9a2b11f0-816c70a3-a9712377;fline=abstract.cpp:54;event=WriteIndexComplete;type=CS::GENERAL;success=1; 2025-09-25T16:21:04.000024Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;task_id=a17ad2f4-9a2b11f0-816c70a3-a9712377;fline=with_appended.cpp:65;portions=216,;task_id=a17ad2f4-9a2b11f0-816c70a3-a9712377; 2025-09-25T16:21:04.000156Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;task_id=a17ad2f4-9a2b11f0-816c70a3-a9712377;fline=manager.cpp:15;event=unlock;process_id=CS::GENERAL::a17ad2f4-9a2b11f0-816c70a3-a9712377; 2025-09-25T16:21:04.000181Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;task_id=a17ad2f4-9a2b11f0-816c70a3-a9712377;fline=granule.cpp:97;event=OnCompactionFinished;info=(granule:1000000185;path_id:1000000185;size:18920080;portions_count:216;); 2025-09-25T16:21:04.000191Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;task_id=a17ad2f4-9a2b11f0-816c70a3-a9712377;tablet_id=9437184;fline=columnshard_impl.cpp:449;event=EnqueueBackgroundActivities;periodic=0; 2025-09-25T16:21:04.000219Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;task_id=a17ad2f4-9a2b11f0-816c70a3-a9712377;tablet_id=9437184;fline=columnshard_impl.cpp:943;background=cleanup_schemas;skip_reason=no_changes; 2025-09-25T16:21:04.000228Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;task_id=a17ad2f4-9a2b11f0-816c70a3-a9712377;tablet_id=9437184;fline=column_engine_logs.cpp:258;event=StartCleanup;portions_count=6; 2025-09-25T16:21:04.000247Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;task_id=a17ad2f4-9a2b11f0-816c70a3-a9712377;tablet_id=9437184;fline=column_engine_logs.cpp:271;event=StartCleanupStop;snapshot=plan_step=1758815451600;tx_id=18446744073709551615;;current_snapshot_ts=1758817250307; 2025-09-25T16:21:04.000257Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;task_id=a17ad2f4-9a2b11f0-816c70a3-a9712377;tablet_id=9437184;fline=column_engine_logs.cpp:334;event=StartCleanup;portions_count=6;portions_prepared=0;drop=0;skip=0;portions_counter=0;chunks=0;limit=0;max_portions=1000;max_chunks=500000; 2025-09-25T16:21:04.000271Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;task_id=a17ad2f4-9a2b11f0-816c70a3-a9712377;tablet_id=9437184;fline=columnshard_impl.cpp:800;background=cleanup;skip_reason=no_changes; 2025-09-25T16:21:04.000278Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;task_id=a17ad2f4-9a2b11f0-816c70a3-a9712377;tablet_id=9437184;fline=columnshard_impl.cpp:832;background=cleanup;skip_reason=no_changes; 2025-09-25T16:21:04.000314Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;task_id=a17ad2f4-9a2b11f0-816c70a3-a9712377;tablet_id=9437184;queue=ttl;external_count=0;fline=granule.cpp:168;event=skip_actualization;waiting=0.909000s; 2025-09-25T16:21:04.000326Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;task_id=a17ad2f4-9a2b11f0-816c70a3-a9712377;tablet_id=9437184;fline=columnshard_impl.cpp:755;background=ttl;skip_reason=no_changes; 2025-09-25T16:21:04.000384Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: BlobManager at tablet 9437184 Save Batch GenStep: 4:1 Blob count: 536 >> THiveTest::TestCheckSubHiveMigrationWithReboots [GOOD] >> ReadOnlyVDisk::TestStorageLoad [GOOD] >> TAsyncIndexTests::SplitIndexWithReboots[TabletReboots] [GOOD] >> StoragePool::TestDistributionExactMin [GOOD] >> TBsProxyFaultToleranceTest::CheckGetHardenedErasureMirror3dcCount6Idx1 >> TBsProxyFaultToleranceTest::CheckGetHardenedErasureBlock42Count6Idx0 >> TAsyncIndexTests::SplitMainWithReboots[TabletReboots] [GOOD] >> TColumnShardTestReadWrite::CompactionInGranule_PKString_Reboot [GOOD] >> TBsProxyFaultToleranceTest::CheckTRangeFaultToleranceTestErasureMirror3of4 [GOOD] >> THiveTest::TestCreateAndDeleteTabletWithStoragePoolsReboots >> StoragePool::TestDistributionExactMinWithOverflow [GOOD] >> StoragePool::TestDistributionRandomMin7p >> TColumnShardTestReadWrite::CompactionInGranule_PKUInt32 [GOOD] |82.7%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/base/ut_board_subscriber/ydb-core-base-ut_board_subscriber |82.7%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/base/ut_board_subscriber/ydb-core-base-ut_board_subscriber ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/ut_blobstorage/ut_read_only_vdisk/unittest >> ReadOnlyVDisk::TestGarbageCollect [GOOD] Test command err: RandomSeed# 8990062936560470507 SEND TEvPut with key [1:1:0:0:0:131072:0] TEvPutResult: TEvPutResult {Id# [1:1:0:0:0:131072:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999988} SEND TEvPut with key [1:1:1:0:0:32768:0] TEvPutResult: TEvPutResult {Id# [1:1:1:0:0:32768:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999988} === Read all 2 blob(s) === SEND TEvGet with key [1:1:0:0:0:131072:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:0:0:0:131072:0] OK Size# 131072 RequestedSize# 131072}} SEND TEvGet with key [1:1:1:0:0:32768:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:1:0:0:32768:0] OK Size# 32768 RequestedSize# 32768}} SEND TEvGet with key [1:1:1:0:0:1:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:1:0:0:1:0] NODATA Size# 0}} Setting VDisk read-only to 1 for position 0 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:0:0] SEND TEvPut with key [1:1:2:0:0:131072:0] 2025-09-25T16:21:03.420017Z 1 00h01m40.100000s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) Unavailable in read-only Sender# [1:5333:705] 2025-09-25T16:21:03.420110Z 2 00h01m40.100000s :BS_HULLRECS CRIT: PDiskId# 1000 VDISK[82000000:_:0:1:0]: (2181038080) Db# LogoBlobs; putting blob beyond the barrier id# [1:1:2:0:0:131072:6] barrier# {Soft# {Gen# 1 Step# 2} Hard# } 2025-09-25T16:21:03.420170Z 8 00h01m40.100000s :BS_HULLRECS CRIT: PDiskId# 1000 VDISK[82000000:_:0:7:0]: (2181038080) Db# LogoBlobs; putting blob beyond the barrier id# [1:1:2:0:0:131072:4] barrier# {Soft# {Gen# 1 Step# 2} Hard# } 2025-09-25T16:21:03.420193Z 7 00h01m40.100000s :BS_HULLRECS CRIT: PDiskId# 1000 VDISK[82000000:_:0:6:0]: (2181038080) Db# LogoBlobs; putting blob beyond the barrier id# [1:1:2:0:0:131072:3] barrier# {Soft# {Gen# 1 Step# 2} Hard# } 2025-09-25T16:21:03.420239Z 6 00h01m40.100000s :BS_HULLRECS CRIT: PDiskId# 1000 VDISK[82000000:_:0:5:0]: (2181038080) Db# LogoBlobs; putting blob beyond the barrier id# [1:1:2:0:0:131072:2] barrier# {Soft# {Gen# 1 Step# 2} Hard# } 2025-09-25T16:21:03.420309Z 5 00h01m40.100000s :BS_HULLRECS CRIT: PDiskId# 1000 VDISK[82000000:_:0:4:0]: (2181038080) Db# LogoBlobs; putting blob beyond the barrier id# [1:1:2:0:0:131072:1] barrier# {Soft# {Gen# 1 Step# 2} Hard# } 2025-09-25T16:21:03.420479Z 3 00h01m40.100000s :BS_HULLRECS CRIT: PDiskId# 1000 VDISK[82000000:_:0:2:0]: (2181038080) Db# LogoBlobs; putting blob beyond the barrier id# [1:1:2:0:0:131072:5] barrier# {Soft# {Gen# 1 Step# 2} Hard# } TEvPutResult: TEvPutResult {Id# [1:1:2:0:0:131072:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999988} 2025-09-25T16:21:03.422771Z 1 00h01m40.100000s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) Unavailable in read-only Sender# [1:5333:705] SEND TEvGet with key [1:1:2:0:0:1:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:2:0:0:1:0] NODATA Size# 0}} Setting VDisk read-only to 1 for position 1 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:1:0] SEND TEvPut with key [1:1:3:0:0:32768:0] 2025-09-25T16:21:03.587090Z 1 00h03m20.160512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) Unavailable in read-only Sender# [1:5333:705] 2025-09-25T16:21:03.587214Z 8 00h03m20.160512s :BS_HULLRECS CRIT: PDiskId# 1000 VDISK[82000000:_:0:7:0]: (2181038080) Db# LogoBlobs; putting blob beyond the barrier id# [1:1:3:0:0:32768:5] barrier# {Soft# {Gen# 1 Step# 3} Hard# } 2025-09-25T16:21:03.587247Z 7 00h03m20.160512s :BS_HULLRECS CRIT: PDiskId# 1000 VDISK[82000000:_:0:6:0]: (2181038080) Db# LogoBlobs; putting blob beyond the barrier id# [1:1:3:0:0:32768:4] barrier# {Soft# {Gen# 1 Step# 3} Hard# } 2025-09-25T16:21:03.587275Z 6 00h03m20.160512s :BS_HULLRECS CRIT: PDiskId# 1000 VDISK[82000000:_:0:5:0]: (2181038080) Db# LogoBlobs; putting blob beyond the barrier id# [1:1:3:0:0:32768:3] barrier# {Soft# {Gen# 1 Step# 3} Hard# } 2025-09-25T16:21:03.587321Z 5 00h03m20.160512s :BS_HULLRECS CRIT: PDiskId# 1000 VDISK[82000000:_:0:4:0]: (2181038080) Db# LogoBlobs; putting blob beyond the barrier id# [1:1:3:0:0:32768:2] barrier# {Soft# {Gen# 1 Step# 3} Hard# } 2025-09-25T16:21:03.587350Z 4 00h03m20.160512s :BS_HULLRECS CRIT: PDiskId# 1000 VDISK[82000000:_:0:3:0]: (2181038080) Db# LogoBlobs; putting blob beyond the barrier id# [1:1:3:0:0:32768:1] barrier# {Soft# {Gen# 1 Step# 3} Hard# } 2025-09-25T16:21:03.587405Z 2 00h03m20.160512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:1:0]: (2181038080) Unavailable in read-only Sender# [1:5340:712] 2025-09-25T16:21:03.587637Z 3 00h03m20.160512s :BS_HULLRECS CRIT: PDiskId# 1000 VDISK[82000000:_:0:2:0]: (2181038080) Db# LogoBlobs; putting blob beyond the barrier id# [1:1:3:0:0:32768:6] barrier# {Soft# {Gen# 1 Step# 3} Hard# } TEvPutResult: TEvPutResult {Id# [1:1:3:0:0:32768:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999988} 2025-09-25T16:21:03.676455Z 1 00h04m20.161024s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) Unavailable in read-only Sender# [1:5333:705] 2025-09-25T16:21:03.676504Z 2 00h04m20.161024s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:1:0]: (2181038080) Unavailable in read-only Sender# [1:5340:712] SEND TEvGet with key [1:1:3:0:0:1:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:3:0:0:1:0] NODATA Size# 0}} Setting VDisk read-only to 1 for position 2 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:2:0] SEND TEvPut with key [1:1:4:0:0:131072:0] 2025-09-25T16:21:03.737732Z 1 00h05m00.200000s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) Unavailable in read-only Sender# [1:5333:705] 2025-09-25T16:21:03.737854Z 8 00h05m00.200000s :BS_HULLRECS CRIT: PDiskId# 1000 VDISK[82000000:_:0:7:0]: (2181038080) Db# LogoBlobs; putting blob beyond the barrier id# [1:1:4:0:0:131072:5] barrier# {Soft# {Gen# 1 Step# 4} Hard# } 2025-09-25T16:21:03.737921Z 7 00h05m00.200000s :BS_HULLRECS CRIT: PDiskId# 1000 VDISK[82000000:_:0:6:0]: (2181038080) Db# LogoBlobs; putting blob beyond the barrier id# [1:1:4:0:0:131072:4] barrier# {Soft# {Gen# 1 Step# 4} Hard# } 2025-09-25T16:21:03.737951Z 6 00h05m00.200000s :BS_HULLRECS CRIT: PDiskId# 1000 VDISK[82000000:_:0:5:0]: (2181038080) Db# LogoBlobs; putting blob beyond the barrier id# [1:1:4:0:0:131072:3] barrier# {Soft# {Gen# 1 Step# 4} Hard# } 2025-09-25T16:21:03.737972Z 5 00h05m00.200000s :BS_HULLRECS CRIT: PDiskId# 1000 VDISK[82000000:_:0:4:0]: (2181038080) Db# LogoBlobs; putting blob beyond the barrier id# [1:1:4:0:0:131072:2] barrier# {Soft# {Gen# 1 Step# 4} Hard# } 2025-09-25T16:21:03.738021Z 4 00h05m00.200000s :BS_HULLRECS CRIT: PDiskId# 1000 VDISK[82000000:_:0:3:0]: (2181038080) Db# LogoBlobs; putting blob beyond the barrier id# [1:1:4:0:0:131072:1] barrier# {Soft# {Gen# 1 Step# 4} Hard# } 2025-09-25T16:21:03.738079Z 2 00h05m00.200000s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:1:0]: (2181038080) Unavailable in read-only Sender# [1:5340:712] 2025-09-25T16:21:03.738321Z 3 00h05m00.200000s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:2:0]: (2181038080) Unavailable in read-only Sender# [1:5347:719] 2025-09-25T16:21:03.738376Z 1 00h05m00.200000s :BS_PROXY_PUT ERROR: [715b3485595fd47d] Result# TEvPutResult {Id# [1:1:4:0:0:131072:0] Status# ERROR StatusFlags# { } ErrorReason# "TRestoreStrategy saw optimisticState# EBS_DISINTEGRATED GroupId# 2181038080 BlobId# [1:1:4:0:0:131072:0] Reported ErrorReasons# [ { OrderNumber# 0 VDiskId# [82000000:1:0:0:0] NodeId# 1 ErrorReasons# [ "VDisk is in read-only mode", ] } { OrderNumber# 1 VDiskId# [82000000:1:0:1:0] NodeId# 2 ErrorReasons# [ "VDisk is in read-only mode", ] } { OrderNumber# 2 VDiskId# [82000000:1:0:2:0] NodeId# 3 ErrorReasons# [ "VDisk is in read-only mode", ] } ] Part situations# [ { OrderNumber# 3 Situations# PUUUUU } { OrderNumber# 4 Situations# UPUUUU } { OrderNumber# 5 Situations# UUPUUU } { OrderNumber# 6 Situations# UUUPUU } { OrderNumber# 7 Situations# UUUUPU } { OrderNumber# 0 Situations# UUUUUE } { OrderNumber# 1 Situations# UUUUUE } { OrderNumber# 2 Situations# UUUUUE } ] " ApproximateFreeSpaceShare# 0.999988} GroupId# 2181038080 Marker# BPP12 TEvPutResult: TEvPutResult {Id# [1:1:4:0:0:131072:0] Status# ERROR StatusFlags# { } ErrorReason# "TRestoreStrategy saw optimisticState# EBS_DISINTEGRATED GroupId# 2181038080 BlobId# [1:1:4:0:0:131072:0] Reported ErrorReasons# [ { OrderNumber# 0 VDiskId# [82000000:1:0:0:0] NodeId# 1 ErrorReasons# [ "VDisk is in read-only mode", ] } { OrderNumber# 1 VDiskId# [82000000:1:0:1:0] NodeId# 2 ErrorReasons# [ "VDisk is in read-only mode", ] } { OrderNumber# 2 VDiskId# [82000000:1:0:2:0] NodeId# 3 ErrorReasons# [ "VDisk is in read-only mode", ] } ] Part situations# [ { OrderNumber# 3 Situations# PUUUUU } { OrderNumber# 4 Situations# UPUUUU } { OrderNumber# 5 Situations# UUPUUU } { OrderNumber# 6 Situations# UUUPUU } { OrderNumber# 7 Situations# UUUUPU } { OrderNumber# 0 Situations# UUUUUE } { OrderNumber# 1 Situations# UUUUUE } { OrderNumber# 2 Situations# UUUUUE } ] " ApproximateFreeSpaceShare# 0.999988} 2025-09-25T16:21:03.838277Z 1 00h06m00.210512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) Unavailable in read-only Sender# [1:5333:705] 2025-09-25T16:21:03.838332Z 2 00h06m00.210512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:1:0]: (2181038080) Unavailable in read-only Sender# [1:5340:712] 2025-09-25T16:21:03.838346Z 3 00h06m00.210512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:2:0]: (2181038080) Unavailable in read-only Sender# [1:5347:719] === Putting VDisk #3 to read-only === Setting VDisk read-only to 1 for position 3 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:3:0] 2025-09-25T16:21:03.977323Z 1 00h07m40.260512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) Unavailable in read-only Sender# [1:5333:705] 2025-09-25T16:21:03.977379Z 2 00h07m40.260512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:1:0]: (2181038080) Unavailable in read-only Sender# [1:5340:712] 2025-09-25T16:21:03.977389Z 3 00h07m40.260512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:2:0]: (2181038080) Unavailable in read-only Sender# [1:5347:719] 2025-09-25T16:21:03.977398Z 4 00h07m40.260512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:3:0]: (2181038080) Unavailable in read-only Sender# [1:5354:726] === Putting VDisk #4 to read-only === Setting VDisk read-only to 1 for position 4 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:4:0] 2025-09-25T16:21:04.033715Z 1 00h08m20.262048s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) Unavailable in read-only Sender# [1:5333:705] 2025-09-25T16:21:04.033761Z 2 00h08m20.262048s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:1:0]: (2181038080) Unavailable in read-only Sender# [1:5340:712] 2025-09-25T16:21:04.033772Z 3 00h08m20.262048s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:2:0]: (2181038080) Unavailable in read-only Sender# [1:5347:719] 2025-09-25T16:21:04.033782Z 4 00h08m20.262048s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:3:0]: (2181038080) Unavailable in read-only Sender# [1:5354:726] 2025-09-25T16:21:04.033792Z 5 00h08m20.262048s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:4:0]: (2181038080) Unavailable in read-only Sender# [1:5361:733] === Putting VDisk #5 to read-only === Setting VDisk read-only to 1 for position 5 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:5:0] 2025-09-25T16:21:04.085005Z 1 00h09m00.310512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) Unavailable in read-only Sender# [1:5333:705] 2025-09-25T16:21:04.085058Z 2 00h09m00.310512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:1:0]: (2181038080) Unavailable in read-only Sender# [1:5340:712] 2025-09-25T16:21:04.085068Z 3 00h09m00.310512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:2:0]: (2181038080) Unavailable in read-only Sender# [1:5347:719] 2025-09-25T16:21:04.085075Z 4 00h09m00.310512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:3:0]: (2181038080) Unavailable in read-only Sender# [1:5354:726] 2025-09-25T16:21:04.085083Z 5 00h09m00.310512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:4:0]: (2181038080) Unavailable in read-only Sender# [1:5361:733] 2025-09-25T16:21:04.085091Z 6 00h09m00.310512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:5:0]: (2181038080) Unavailable in read-only Sender# [1:5368:740] === Putting VDisk #6 to read-only === Setting VDisk read-only to 1 for position 6 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:6:0] 2025-09-25T16:21:04.129066Z 1 00h09m40.312048s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) Unavailable in read-only Sender# [1:5333:705] 2025-09-25T16:21:04.129127Z 2 00h09m40.312048s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:1:0]: (2181038080) Unavailable in read-only Sender# [1:5340:712] 2025-09-25T16:21:04.129140Z 3 00h09m40.312048s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:2:0]: (2181038080) Unavailable in read-only Sender# [1:5347:719] 2025-09-25T16:21:04.129150Z 4 00h09m40.312048s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:3:0]: (2181038080) Unavailable in read-only Sender# [1:5354:726] 2025-09-25T16:21:04.129161Z 5 00h09m40.312048s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:4:0]: (2181038080) Unavailable in read-only Sender# [1:5361:733] 2025-09-25T16:21:04.129172Z 6 00h09m40.312048s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:5:0]: (2181038080) Unavailable in read-only Sender# [1:5368:740] 2025-09-25T16:21:04.129184Z 7 00h09m40.312048s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:6:0]: (2181038080) Unavailable in read-only Sender# [1:5375:747] === Putting VDisk #0 to normal === Setting VDisk read-only to 0 for position 0 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:0:0] 2025-09-25T16:21:04.177473Z 2 00h10m20.360512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:1:0]: (2181038080) Unavailable in read-only Sender# [1:5340:712] 2025-09-25T16:21:04.177504Z 3 00h10m20.360512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:2:0]: (2181038080) Unavailable in read-only Sender# [1:5347:719] 2025-09-25T16:21:04.177516Z 4 00h10m20.360512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:3:0]: (2181038080) Unavailable in read-only Sender# [1:5354:726] 2025-09-25T16:21:04.177526Z 5 00h10m20.360512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:4:0]: (2181038080) Unavailable in read-only Sender# [1:5361:733] 2025-09-25T16:21:04.177537Z 6 00h10m20.360512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:5:0]: (2181038080) Unavailable in read-only Sender# [1:5368:740] 2025-09-25T16:21:04.177548Z 7 00h10m20.360512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:6:0]: (2181038080) Unavailable in read-only Sender# [1:5375:747] === Putting VDisk #1 to normal === Setting VDisk read-only to 0 for position 1 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:1:0] 2025-09-25T16:21:04.232664Z 3 00h11m00.400000s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:2:0]: (2181038080) Unavailable in read-only Sender# [1:5347:719] 2025-09-25T16:21:04.232693Z 4 00h11m00.400000s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:3:0]: (2181038080) Unavailable in read-only Sender# [1:5354:726] 2025-09-25T16:21:04.232705Z 5 00h11m00.400000s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:4:0]: (2181038080) Unavailable in read-only Sender# [1:5361:733] 2025-09-25T16:21:04.232717Z 6 00h11m00.400000s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:5:0]: (2181038080) Unavailable in read-only Sender# [1:5368:740] 2025-09-25T16:21:04.232729Z 7 00h11m00.400000s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:6:0]: (2181038080) Unavailable in read-only Sender# [1:5375:747] === Putting VDisk #2 to normal === Setting VDisk read-only to 0 for position 2 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:2:0] 2025-09-25T16:21:04.292706Z 4 00h11m40.410512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:3:0]: (2181038080) Unavailable in read-only Sender# [1:5354:726] 2025-09-25T16:21:04.292735Z 5 00h11m40.410512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:4:0]: (2181038080) Unavailable in read-only Sender# [1:5361:733] 2025-09-25T16:21:04.292746Z 6 00h11m40.410512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:5:0]: (2181038080) Unavailable in read-only Sender# [1:5368:740] 2025-09-25T16:21:04.292757Z 7 00h11m40.410512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:6:0]: (2181038080) Unavailable in read-only Sender# [1:5375:747] === Putting VDisk #3 to normal === Setting VDisk read-only to 0 for position 3 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:3:0] 2025-09-25T16:21:04.362037Z 5 00h12m20.450512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:4:0]: (2181038080) Unavailable in read-only Sender# [1:5361:733] 2025-09-25T16:21:04.362069Z 6 00h12m20.450512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:5:0]: (2181038080) Unavailable in read-only Sender# [1:5368:740] 2025-09-25T16:21:04.362081Z 7 00h12m20.450512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:6:0]: (2181038080) Unavailable in read-only Sender# [1:5375:747] Setting VDisk read-only to 0 for position 4 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:4:0] 2025-09-25T16:21:04.529731Z 6 00h14m00.461536s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:5:0]: (2181038080) Unavailable in read-only Sender# [1:5368:740] 2025-09-25T16:21:04.529753Z 7 00h14m00.461536s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:6:0]: (2181038080) Unavailable in read-only Sender# [1:5375:747] SEND TEvGet with key [1:1:3:0:0:1:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:3:0:0:1:0] NODATA Size# 0}} Setting VDisk read-only to 0 for position 5 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:5:0] 2025-09-25T16:21:04.598670Z 7 00h14m40.500000s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:6:0]: (2181038080) Unavailable in read-only Sender# [1:5375:747] SEND TEvGet with key [1:1:3:0:0:1:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:3:0:0:1:0] NODATA Size# 0}} Setting VDisk read-only to 0 for position 6 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:6:0] SEND TEvGet with key [1:1:3:0:0:1:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:3:0:0:1:0] NODATA Size# 0}} SEND TEvPut with key [1:1:4:0:0:131072:0] 2025-09-25T16:21:04.813374Z 1 00h16m30.512048s :BS_HULLRECS CRIT: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) Db# LogoBlobs; putting blob beyond the barrier id# [1:1:4:0:0:131072:6] barrier# {Soft# {Gen# 1 Step# 4} Hard# } 2025-09-25T16:21:04.813470Z 8 00h16m30.512048s :BS_HULLRECS CRIT: PDiskId# 1000 VDISK[82000000:_:0:7:0]: (2181038080) Db# LogoBlobs; putting blob beyond the barrier id# [1:1:4:0:0:131072:5] barrier# {Soft# {Gen# 1 Step# 4} Hard# } 2025-09-25T16:21:04.813496Z 7 00h16m30.512048s :BS_HULLRECS CRIT: PDiskId# 1000 VDISK[82000000:_:0:6:0]: (2181038080) Db# LogoBlobs; putting blob beyond the barrier id# [1:1:4:0:0:131072:4] barrier# {Soft# {Gen# 1 Step# 4} Hard# } 2025-09-25T16:21:04.813522Z 6 00h16m30.512048s :BS_HULLRECS CRIT: PDiskId# 1000 VDISK[82000000:_:0:5:0]: (2181038080) Db# LogoBlobs; putting blob beyond the barrier id# [1:1:4:0:0:131072:3] barrier# {Soft# {Gen# 1 Step# 4} Hard# } 2025-09-25T16:21:04.813568Z 5 00h16m30.512048s :BS_HULLRECS CRIT: PDiskId# 1000 VDISK[82000000:_:0:4:0]: (2181038080) Db# LogoBlobs; putting blob beyond the barrier id# [1:1:4:0:0:131072:2] barrier# {Soft# {Gen# 1 Step# 4} Hard# } 2025-09-25T16:21:04.813634Z 4 00h16m30.512048s :BS_HULLRECS CRIT: PDiskId# 1000 VDISK[82000000:_:0:3:0]: (2181038080) Db# LogoBlobs; putting blob beyond the barrier id# [1:1:4:0:0:131072:1] barrier# {Soft# {Gen# 1 Step# 4} Hard# } TEvPutResult: TEvPutResult {Id# [1:1:4:0:0:131072:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999976} SEND TEvGet with key [1:1:4:0:0:1:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:4:0:0:1:0] NODATA Size# 0}} ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/columnshard/ut_rw/unittest >> TColumnShardTestReadWrite::CompactionInGranule_PKInt64_Reboot [GOOD] Test command err: 2025-09-25T16:20:43.458970Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:129:2160];fline=columnshard.cpp:105;event=initialize_shard;step=OnActivateExecutor; 2025-09-25T16:20:43.463171Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:129:2160];fline=columnshard.cpp:124;event=initialize_shard;step=initialize_tiring_finished; 2025-09-25T16:20:43.463220Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxInitSchema.Execute at tablet 9437184 2025-09-25T16:20:43.463896Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:129:2160];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-09-25T16:20:43.463944Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:129:2160];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-09-25T16:20:43.463971Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:129:2160];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-09-25T16:20:43.463985Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:129:2160];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-09-25T16:20:43.463998Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:129:2160];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-09-25T16:20:43.464016Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:129:2160];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-09-25T16:20:43.464038Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:129:2160];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-09-25T16:20:43.464052Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:129:2160];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-09-25T16:20:43.464065Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:129:2160];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-09-25T16:20:43.464078Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:129:2160];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanDeprecatedSnapshot; 2025-09-25T16:20:43.464091Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:129:2160];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV0ChunksMeta; 2025-09-25T16:20:43.464103Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:129:2160];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CopyBlobIdsToV2; 2025-09-25T16:20:43.464132Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:129:2160];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreAppearanceSnapshot; 2025-09-25T16:20:43.476731Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxInitSchema.Complete at tablet 9437184 2025-09-25T16:20:43.476866Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:131;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=12;current_normalizer=CLASS_NAME=Granules; 2025-09-25T16:20:43.476877Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=1;type=Granules; 2025-09-25T16:20:43.476929Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-09-25T16:20:43.476972Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-09-25T16:20:43.476986Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-09-25T16:20:43.476993Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2025-09-25T16:20:43.477005Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:138;normalizer=TChunksNormalizer;message=0 chunks found; 2025-09-25T16:20:43.477016Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-09-25T16:20:43.477026Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-09-25T16:20:43.477032Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2025-09-25T16:20:43.477054Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-09-25T16:20:43.477064Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-09-25T16:20:43.477073Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-09-25T16:20:43.477078Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2025-09-25T16:20:43.477090Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2025-09-25T16:20:43.477098Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-09-25T16:20:43.477107Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-09-25T16:20:43.477111Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2025-09-25T16:20:43.477122Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-09-25T16:20:43.477130Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-09-25T16:20:43.477134Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2025-09-25T16:20:43.477145Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-09-25T16:20:43.477154Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-09-25T16:20:43.477158Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=15;type=RestoreV1Chunks_V2; 2025-09-25T16:20:43.477190Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-09-25T16:20:43.477201Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-09-25T16:20:43.477206Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=16;type=RestoreV2Chunks; 2025-09-25T16:20:43.477223Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2025-09-25T16:20:43.477233Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanDeprecatedSnapshot;id=CleanDeprecatedSnapshot; 2025-09-25T16:20:43.477239Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=17;type=CleanDeprecatedSnapshot; 2025-09-25T16:20:43.477247Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanDeprecatedSnapshot;id=17; 2025-09-25T16:20:43.477255Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV0ChunksMeta;id=RestoreV0ChunksMeta; 2025-09-25T16:20:43.477260Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=18;type=RestoreV0ChunksMeta; 2025-09-25T16:20:43.477269Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV0ChunksMeta;id=18; 2025-09-25T16:20:43.477278Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CopyBlobIdsToV2;id=CopyBlobIdsToV2; 2025-09-25T16:20:43.477284Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=19;type=CopyBlobIdsToV2; 2025-09-25T16:20:43.477300Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CopyBlobIdsToV2;id=19; 2025-09-25T16:20:43.477309Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLAS ... anule;load_stage_name=EXECUTE:granule/portions;fline=constructor_portion.cpp:40;memory_size=278;data_size=252;sum=1960334;count=7053;size_of_portion=184; 2025-09-25T16:21:04.978652Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;load_stage_name=EXECUTE:column_engines/granules;load_stage_name=EXECUTE:granules/granule;fline=common_data.cpp:29;EXECUTE:portionsLoadingTime=9302; 2025-09-25T16:21:04.978667Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;load_stage_name=EXECUTE:column_engines/granules;load_stage_name=EXECUTE:granules/granule;fline=common_data.cpp:29;PRECHARGE:granule_finished_commonLoadingTime=2; 2025-09-25T16:21:04.978872Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;load_stage_name=EXECUTE:column_engines/granules;load_stage_name=EXECUTE:granules/granule;fline=common_data.cpp:29;EXECUTE:granule_finished_commonLoadingTime=197; 2025-09-25T16:21:04.978880Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;load_stage_name=EXECUTE:column_engines/granules;fline=common_data.cpp:29;EXECUTE:granuleLoadingTime=9558; 2025-09-25T16:21:04.978886Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;fline=common_data.cpp:29;EXECUTE:granulesLoadingTime=9576; 2025-09-25T16:21:04.978896Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;fline=common_data.cpp:29;PRECHARGE:finishLoadingTime=2; 2025-09-25T16:21:04.978956Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;fline=common_data.cpp:29;EXECUTE:finishLoadingTime=52; 2025-09-25T16:21:04.978962Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:column_enginesLoadingTime=9729; 2025-09-25T16:21:04.979005Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:tx_controllerLoadingTime=31; 2025-09-25T16:21:04.979027Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:tx_controllerLoadingTime=15; 2025-09-25T16:21:04.979097Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:operations_managerLoadingTime=62; 2025-09-25T16:21:04.979145Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:operations_managerLoadingTime=40; 2025-09-25T16:21:04.985720Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:storages_managerLoadingTime=6552; 2025-09-25T16:21:04.991332Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:storages_managerLoadingTime=5560; 2025-09-25T16:21:04.991374Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:db_locksLoadingTime=3; 2025-09-25T16:21:04.991383Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:db_locksLoadingTime=2; 2025-09-25T16:21:04.991393Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:bg_sessionsLoadingTime=2; 2025-09-25T16:21:04.991413Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:bg_sessionsLoadingTime=14; 2025-09-25T16:21:04.991422Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:sharing_sessionsLoadingTime=1; 2025-09-25T16:21:04.991440Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:sharing_sessionsLoadingTime=13; 2025-09-25T16:21:04.991449Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:in_flight_readsLoadingTime=1; 2025-09-25T16:21:04.991465Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:in_flight_readsLoadingTime=9; 2025-09-25T16:21:04.991487Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:tiers_managerLoadingTime=15; 2025-09-25T16:21:04.991505Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:tiers_managerLoadingTime=11; 2025-09-25T16:21:04.991511Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;fline=common_data.cpp:29;EXECUTE:composite_initLoadingTime=23680; 2025-09-25T16:21:04.991565Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: Index: tables 1 inserted {blob_bytes=0;raw_bytes=0;count=0;records=0} compacted {blob_bytes=0;raw_bytes=0;count=0;records=0} s-compacted {blob_bytes=18920080;raw_bytes=22128150;count=3;records=225200} inactive {blob_bytes=82943248;raw_bytes=85019450;count=213;records=975200} evicted {blob_bytes=0;raw_bytes=0;count=0;records=0} at tablet 9437184 2025-09-25T16:21:04.991606Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:6990:8620];process=SwitchToWork;fline=columnshard.cpp:77;event=initialize_shard;step=SwitchToWork; 2025-09-25T16:21:04.991617Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:6990:8620];process=SwitchToWork;fline=columnshard.cpp:80;event=initialize_shard;step=SignalTabletActive; 2025-09-25T16:21:04.991635Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:6990:8620];process=SwitchToWork;fline=columnshard_impl.cpp:1528;event=OnTieringModified;path_id=NO_VALUE_OPTIONAL; 2025-09-25T16:21:04.991644Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:6990:8620];process=SwitchToWork;fline=column_engine_logs.cpp:516;event=OnTieringModified;new_count_tierings=0; 2025-09-25T16:21:04.991693Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;fline=columnshard_impl.cpp:449;event=EnqueueBackgroundActivities;periodic=0; 2025-09-25T16:21:04.991724Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;fline=columnshard_impl.cpp:943;background=cleanup_schemas;skip_reason=no_changes; 2025-09-25T16:21:04.991732Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;fline=column_engine_logs.cpp:258;event=StartCleanup;portions_count=5; 2025-09-25T16:21:04.991747Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;fline=column_engine_logs.cpp:271;event=StartCleanupStop;snapshot=plan_step=1758815447934;tx_id=18446744073709551615;;current_snapshot_ts=1758817245288; 2025-09-25T16:21:04.991772Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;fline=column_engine_logs.cpp:334;event=StartCleanup;portions_count=5;portions_prepared=0;drop=0;skip=0;portions_counter=0;chunks=0;limit=0;max_portions=1000;max_chunks=500000; 2025-09-25T16:21:04.991786Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;fline=columnshard_impl.cpp:800;background=cleanup;skip_reason=no_changes; 2025-09-25T16:21:04.991793Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;fline=columnshard_impl.cpp:832;background=cleanup;skip_reason=no_changes; 2025-09-25T16:21:04.991825Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;fline=columnshard_impl.cpp:755;background=ttl;skip_reason=no_changes; 2025-09-25T16:21:04.993178Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:6990:8620];ev=NActors::TEvents::TEvWakeup;fline=columnshard.cpp:260;event=TEvPrivate::TEvPeriodicWakeup::MANUAL;tablet_id=9437184; 2025-09-25T16:21:04.993269Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:6990:8620];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;fline=columnshard.cpp:249;event=TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184; 2025-09-25T16:21:04.993276Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: Send periodic stats. 2025-09-25T16:21:04.993280Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: Disabled periodic stats at tablet 9437184 2025-09-25T16:21:04.993288Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:6990:8620];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:449;event=EnqueueBackgroundActivities;periodic=0; 2025-09-25T16:21:04.993313Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:6990:8620];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:943;background=cleanup_schemas;skip_reason=no_changes; 2025-09-25T16:21:04.993320Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:6990:8620];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:258;event=StartCleanup;portions_count=5; 2025-09-25T16:21:04.993332Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:6990:8620];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:271;event=StartCleanupStop;snapshot=plan_step=1758815447934;tx_id=18446744073709551615;;current_snapshot_ts=1758817245288; 2025-09-25T16:21:04.993340Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:6990:8620];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:334;event=StartCleanup;portions_count=5;portions_prepared=0;drop=0;skip=0;portions_counter=0;chunks=0;limit=0;max_portions=1000;max_chunks=500000; 2025-09-25T16:21:04.993351Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:6990:8620];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:800;background=cleanup;skip_reason=no_changes; 2025-09-25T16:21:04.993357Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:6990:8620];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:832;background=cleanup;skip_reason=no_changes; 2025-09-25T16:21:04.993380Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:6990:8620];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;queue=ttl;external_count=0;fline=granule.cpp:168;event=skip_actualization;waiting=1.000000s; 2025-09-25T16:21:04.993390Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:6990:8620];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:755;background=ttl;skip_reason=no_changes; |82.7%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/dsproxy/ut_ftol/unittest >> TBsProxyFaultToleranceTest::CheckTRangeFaultToleranceTestErasureMirror3of4 [GOOD] |82.7%| [LD] {RESULT} $(B)/ydb/public/sdk/cpp/src/client/persqueue_public/ut/ydb-public-sdk-cpp-src-client-persqueue_public-ut ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/ut_blobstorage/ut_read_only_vdisk/unittest >> ReadOnlyVDisk::TestReads [GOOD] Test command err: RandomSeed# 4474982159427697325 === Trying to put and get a blob === SEND TEvPut with key [1:1:0:0:0:131072:0] TEvPutResult: TEvPutResult {Id# [1:1:0:0:0:131072:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999988} === Read all 1 blob(s) === SEND TEvGet with key [1:1:0:0:0:131072:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:0:0:0:131072:0] OK Size# 131072 RequestedSize# 131072}} === Putting VDisk #0 to read-only === Setting VDisk read-only to 1 for position 0 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:0:0] === Read all 1 blob(s) === SEND TEvGet with key [1:1:0:0:0:131072:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:0:0:0:131072:0] OK Size# 131072 RequestedSize# 131072}} === Putting VDisk #1 to read-only === Setting VDisk read-only to 1 for position 1 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:1:0] === Read all 1 blob(s) === SEND TEvGet with key [1:1:0:0:0:131072:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:0:0:0:131072:0] OK Size# 131072 RequestedSize# 131072}} === Putting VDisk #2 to read-only === Setting VDisk read-only to 1 for position 2 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:2:0] === Read all 1 blob(s) === SEND TEvGet with key [1:1:0:0:0:131072:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:0:0:0:131072:0] OK Size# 131072 RequestedSize# 131072}} === Putting VDisk #3 to read-only === Setting VDisk read-only to 1 for position 3 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:3:0] === Read all 1 blob(s) === SEND TEvGet with key [1:1:0:0:0:131072:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:0:0:0:131072:0] OK Size# 131072 RequestedSize# 131072}} === Putting VDisk #4 to read-only === Setting VDisk read-only to 1 for position 4 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:4:0] === Read all 1 blob(s) === SEND TEvGet with key [1:1:0:0:0:131072:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:0:0:0:131072:0] OK Size# 131072 RequestedSize# 131072}} === Putting VDisk #5 to read-only === Setting VDisk read-only to 1 for position 5 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:5:0] === Read all 1 blob(s) === SEND TEvGet with key [1:1:0:0:0:131072:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:0:0:0:131072:0] OK Size# 131072 RequestedSize# 131072}} === Putting VDisk #6 to read-only === Setting VDisk read-only to 1 for position 6 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:6:0] === Read all 1 blob(s) === SEND TEvGet with key [1:1:0:0:0:131072:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:0:0:0:131072:0] OK Size# 131072 RequestedSize# 131072}} === Restoring to normal VDisk #0 === Setting VDisk read-only to 0 for position 0 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:0:0] === Read all 1 blob(s) === SEND TEvGet with key [1:1:0:0:0:131072:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:0:0:0:131072:0] OK Size# 131072 RequestedSize# 131072}} === Restoring to normal VDisk #1 === Setting VDisk read-only to 0 for position 1 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:1:0] === Read all 1 blob(s) === SEND TEvGet with key [1:1:0:0:0:131072:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:0:0:0:131072:0] OK Size# 131072 RequestedSize# 131072}} === Restoring to normal VDisk #2 === Setting VDisk read-only to 0 for position 2 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:2:0] === Read all 1 blob(s) === SEND TEvGet with key [1:1:0:0:0:131072:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:0:0:0:131072:0] OK Size# 131072 RequestedSize# 131072}} === Restoring to normal VDisk #3 === Setting VDisk read-only to 0 for position 3 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:3:0] === Read all 1 blob(s) === SEND TEvGet with key [1:1:0:0:0:131072:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:0:0:0:131072:0] OK Size# 131072 RequestedSize# 131072}} === Restoring to normal VDisk #4 === Setting VDisk read-only to 0 for position 4 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:4:0] === Read all 1 blob(s) === SEND TEvGet with key [1:1:0:0:0:131072:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:0:0:0:131072:0] OK Size# 131072 RequestedSize# 131072}} === Restoring to normal VDisk #5 === Setting VDisk read-only to 0 for position 5 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:5:0] === Read all 1 blob(s) === SEND TEvGet with key [1:1:0:0:0:131072:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:0:0:0:131072:0] OK Size# 131072 RequestedSize# 131072}} === Restoring to normal VDisk #6 === Setting VDisk read-only to 0 for position 6 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:6:0] === Read all 1 blob(s) === SEND TEvGet with key [1:1:0:0:0:131072:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:0:0:0:131072:0] OK Size# 131072 RequestedSize# 131072}} >> THiveTest::TestCreateAndDeleteTabletWithStoragePoolsReboots [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/columnshard/ut_rw/unittest >> TColumnShardTestReadWrite::CompactionInGranule_PKUInt64 [GOOD] Test command err: 2025-09-25T16:20:47.093458Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:129:2160];fline=columnshard.cpp:105;event=initialize_shard;step=OnActivateExecutor; 2025-09-25T16:20:47.097380Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:129:2160];fline=columnshard.cpp:124;event=initialize_shard;step=initialize_tiring_finished; 2025-09-25T16:20:47.097428Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxInitSchema.Execute at tablet 9437184 2025-09-25T16:20:47.098042Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:129:2160];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-09-25T16:20:47.098083Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:129:2160];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-09-25T16:20:47.098115Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:129:2160];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-09-25T16:20:47.098130Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:129:2160];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-09-25T16:20:47.098143Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:129:2160];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-09-25T16:20:47.098159Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:129:2160];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-09-25T16:20:47.098173Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:129:2160];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-09-25T16:20:47.098186Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:129:2160];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-09-25T16:20:47.098199Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:129:2160];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-09-25T16:20:47.098212Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:129:2160];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanDeprecatedSnapshot; 2025-09-25T16:20:47.098225Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:129:2160];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV0ChunksMeta; 2025-09-25T16:20:47.098238Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:129:2160];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CopyBlobIdsToV2; 2025-09-25T16:20:47.098269Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:129:2160];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreAppearanceSnapshot; 2025-09-25T16:20:47.103734Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxInitSchema.Complete at tablet 9437184 2025-09-25T16:20:47.103806Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:131;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=12;current_normalizer=CLASS_NAME=Granules; 2025-09-25T16:20:47.103816Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=1;type=Granules; 2025-09-25T16:20:47.103856Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-09-25T16:20:47.103897Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-09-25T16:20:47.103910Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-09-25T16:20:47.103916Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2025-09-25T16:20:47.103927Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:138;normalizer=TChunksNormalizer;message=0 chunks found; 2025-09-25T16:20:47.103936Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-09-25T16:20:47.103943Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-09-25T16:20:47.103948Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2025-09-25T16:20:47.103968Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-09-25T16:20:47.103976Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-09-25T16:20:47.103985Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-09-25T16:20:47.103990Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2025-09-25T16:20:47.104001Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2025-09-25T16:20:47.104009Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-09-25T16:20:47.104018Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-09-25T16:20:47.104022Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2025-09-25T16:20:47.104033Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-09-25T16:20:47.104040Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-09-25T16:20:47.104045Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2025-09-25T16:20:47.104055Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-09-25T16:20:47.104064Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-09-25T16:20:47.104068Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=15;type=RestoreV1Chunks_V2; 2025-09-25T16:20:47.104096Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-09-25T16:20:47.104105Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-09-25T16:20:47.104110Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=16;type=RestoreV2Chunks; 2025-09-25T16:20:47.104126Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2025-09-25T16:20:47.104134Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanDeprecatedSnapshot;id=CleanDeprecatedSnapshot; 2025-09-25T16:20:47.104139Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=17;type=CleanDeprecatedSnapshot; 2025-09-25T16:20:47.104148Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanDeprecatedSnapshot;id=17; 2025-09-25T16:20:47.104156Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV0ChunksMeta;id=RestoreV0ChunksMeta; 2025-09-25T16:20:47.104161Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=18;type=RestoreV0ChunksMeta; 2025-09-25T16:20:47.104169Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV0ChunksMeta;id=18; 2025-09-25T16:20:47.104178Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CopyBlobIdsToV2;id=CopyBlobIdsToV2; 2025-09-25T16:20:47.104184Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=19;type=CopyBlobIdsToV2; 2025-09-25T16:20:47.104199Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CopyBlobIdsToV2;id=19; 2025-09-25T16:20:47.104207Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLAS ... blob_range:[NO_BLOB:0:9408];;column_id:10;chunk_idx:19;blob_range:[NO_BLOB:0:9408];;column_id:10;chunk_idx:20;blob_range:[NO_BLOB:0:9408];;column_id:10;chunk_idx:21;blob_range:[NO_BLOB:0:9408];;column_id:10;chunk_idx:22;blob_range:[NO_BLOB:0:9408];;column_id:10;chunk_idx:23;blob_range:[NO_BLOB:0:9408];;column_id:10;chunk_idx:24;blob_range:[NO_BLOB:0:9408];;column_id:10;chunk_idx:25;blob_range:[NO_BLOB:0:9408];;column_id:10;chunk_idx:26;blob_range:[NO_BLOB:0:9408];;column_id:10;chunk_idx:27;blob_range:[NO_BLOB:0:9408];;column_id:10;chunk_idx:28;blob_range:[NO_BLOB:0:9408];;column_id:10;chunk_idx:29;blob_range:[NO_BLOB:0:9408];;column_id:10;chunk_idx:30;blob_range:[NO_BLOB:0:9408];;column_id:10;chunk_idx:31;blob_range:[NO_BLOB:0:9408];;column_id:10;chunk_idx:32;blob_range:[NO_BLOB:0:9408];;column_id:10;chunk_idx:33;blob_range:[NO_BLOB:0:9408];;column_id:10;chunk_idx:34;blob_range:[NO_BLOB:0:9408];;column_id:10;chunk_idx:35;blob_range:[NO_BLOB:0:9408];;column_id:10;chunk_idx:36;blob_range:[NO_BLOB:0:9408];;column_id:10;chunk_idx:37;blob_range:[NO_BLOB:0:9408];;column_id:10;chunk_idx:38;blob_range:[NO_BLOB:0:9408];;column_id:10;chunk_idx:39;blob_range:[NO_BLOB:0:9408];;column_id:10;chunk_idx:40;blob_range:[NO_BLOB:0:9408];;column_id:10;chunk_idx:41;blob_range:[NO_BLOB:0:9408];;column_id:10;chunk_idx:42;blob_range:[NO_BLOB:0:9408];;column_id:10;chunk_idx:43;blob_range:[NO_BLOB:0:9408];;column_id:10;chunk_idx:44;blob_range:[NO_BLOB:0:9424];;column_id:10;chunk_idx:45;blob_range:[NO_BLOB:0:9424];;column_id:10;chunk_idx:46;blob_range:[NO_BLOB:0:9424];;column_id:10;chunk_idx:47;blob_range:[NO_BLOB:0:9424];;column_id:10;chunk_idx:48;blob_range:[NO_BLOB:0:9424];;column_id:10;chunk_idx:49;blob_range:[NO_BLOB:0:9424];;column_id:10;chunk_idx:50;blob_range:[NO_BLOB:0:9424];;column_id:10;chunk_idx:51;blob_range:[NO_BLOB:0:9424];;column_id:10;chunk_idx:52;blob_range:[NO_BLOB:0:9424];;column_id:10;chunk_idx:53;blob_range:[NO_BLOB:0:9424];;column_id:10;chunk_idx:54;blob_range:[NO_BLOB:0:9424];;column_id:10;chunk_idx:55;blob_range:[NO_BLOB:0:9424];;column_id:10;chunk_idx:56;blob_range:[NO_BLOB:0:9424];;column_id:10;chunk_idx:57;blob_range:[NO_BLOB:0:9424];;column_id:10;chunk_idx:58;blob_range:[NO_BLOB:0:9424];;column_id:10;chunk_idx:59;blob_range:[NO_BLOB:0:9424];;column_id:10;chunk_idx:60;blob_range:[NO_BLOB:0:9424];;column_id:10;chunk_idx:61;blob_range:[NO_BLOB:0:9424];;column_id:10;chunk_idx:62;blob_range:[NO_BLOB:0:9424];;column_id:10;chunk_idx:63;blob_range:[NO_BLOB:0:9424];;column_id:10;chunk_idx:64;blob_range:[NO_BLOB:0:9424];;column_id:10;chunk_idx:65;blob_range:[NO_BLOB:0:9424];;column_id:10;chunk_idx:66;blob_range:[NO_BLOB:0:9424];;column_id:10;chunk_idx:67;blob_range:[NO_BLOB:0:9424];;column_id:10;chunk_idx:68;blob_range:[NO_BLOB:0:9424];;column_id:10;chunk_idx:69;blob_range:[NO_BLOB:0:9424];;column_id:10;chunk_idx:70;blob_range:[NO_BLOB:0:9424];;column_id:10;chunk_idx:71;blob_range:[NO_BLOB:0:9424];;column_id:10;chunk_idx:72;blob_range:[NO_BLOB:0:9424];;;;switched=(portion_id:215;path_id:1000000185;records_count:75000;schema_version:1;level:1;;column_size:6303784;index_size:0;meta:(()););(portion_id:213;path_id:1000000185;records_count:75000;schema_version:1;level:2;;column_size:6289496;index_size:0;meta:(()););; 2025-09-25T16:21:03.585069Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: event_type=NKikimr::NBlobCache::TEvBlobCache::TEvReadBlobRangeResult;event=on_execution;consumer=GENERAL_COMPACTION;task_id=a17a3bd2-9a2b11f0-9879d646-3b38fd10;script=FULL_PORTIONS_FETCHING::GENERAL_COMPACTION;event=on_execution;consumer=GENERAL_COMPACTION;task_id=a17a3bd2-9a2b11f0-9879d646-3b38fd10;script=FULL_PORTIONS_FETCHING::GENERAL_COMPACTION;event=on_finished;consumer=GENERAL_COMPACTION;task_id=a17a3bd2-9a2b11f0-9879d646-3b38fd10;script=FULL_PORTIONS_FETCHING::GENERAL_COMPACTION;tablet_id=9437184;parent_id=[1:4421:6414];task_id=a17a3bd2-9a2b11f0-9879d646-3b38fd10;task_class=CS::GENERAL;fline=general_compaction.cpp:140;event=blobs_created;appended=1;switched=2; 2025-09-25T16:21:03.585089Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: event_type=NKikimr::NBlobCache::TEvBlobCache::TEvReadBlobRangeResult;event=on_execution;consumer=GENERAL_COMPACTION;task_id=a17a3bd2-9a2b11f0-9879d646-3b38fd10;script=FULL_PORTIONS_FETCHING::GENERAL_COMPACTION;event=on_execution;consumer=GENERAL_COMPACTION;task_id=a17a3bd2-9a2b11f0-9879d646-3b38fd10;script=FULL_PORTIONS_FETCHING::GENERAL_COMPACTION;event=on_finished;consumer=GENERAL_COMPACTION;task_id=a17a3bd2-9a2b11f0-9879d646-3b38fd10;script=FULL_PORTIONS_FETCHING::GENERAL_COMPACTION;tablet_id=9437184;parent_id=[1:4421:6414];task_id=a17a3bd2-9a2b11f0-9879d646-3b38fd10;task_class=CS::GENERAL;fline=abstract.cpp:13;event=new_stage;stage=Constructed;task_id=a17a3bd2-9a2b11f0-9879d646-3b38fd10; 2025-09-25T16:21:03.586195Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:4421:6414];ev=NKikimr::NColumnShard::TEvPrivate::TEvWriteIndex;fline=columnshard__write_index.cpp:52;event=TEvWriteIndex;count=1; 2025-09-25T16:21:03.586983Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:4421:6414];ev=NKikimr::NColumnShard::TEvPrivate::TEvWriteIndex;fline=columnshard__write_index.cpp:59;event=TTxWriteDraft; 2025-09-25T16:21:03.586998Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:4421:6414];ev=NKikimr::NColumnShard::TEvPrivate::TEvWriteIndex;fline=abstract.cpp:13;event=new_stage;stage=WriteDraft;task_id=a17a3bd2-9a2b11f0-9879d646-3b38fd10; 2025-09-25T16:21:03.761904Z node 1 :TX_COLUMNSHARD_WRITE DEBUG: log.cpp:841: fline=tx_draft.cpp:16;event=draft_completed; 2025-09-25T16:21:03.761951Z node 1 :TX_COLUMNSHARD_WRITE DEBUG: log.cpp:841: fline=write_actor.cpp:24;event=actor_created;tablet_id=9437184;debug=size=6289496;count=682;actions=__MEMORY,__DEFAULT,;waiting=2;; 2025-09-25T16:21:03.931665Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: WriteIndex at tablet 9437184 2025-09-25T16:21:03.931731Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:4421:6414];ev=NKikimr::NColumnShard::TEvPrivate::TEvWriteIndex;fline=common_level.h:121;from=0,0,0,0,;to=74999,74999,74999,74999,; 2025-09-25T16:21:03.931747Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:4421:6414];ev=NKikimr::NColumnShard::TEvPrivate::TEvWriteIndex;fline=common_level.h:141;itFrom=1;itTo=1;raw=7369450;count=1;packed=6303784; 2025-09-25T16:21:03.931783Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:4421:6414];ev=NKikimr::NColumnShard::TEvPrivate::TEvWriteIndex;fline=constructor_meta.cpp:48;memory_size=94;data_size=68;sum=93210;count=1701; 2025-09-25T16:21:03.931792Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:4421:6414];ev=NKikimr::NColumnShard::TEvPrivate::TEvWriteIndex;fline=constructor_meta.cpp:65;memory_size=190;data_size=180;sum=174906;count=1702;size_of_meta=112; 2025-09-25T16:21:03.931806Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:4421:6414];ev=NKikimr::NColumnShard::TEvPrivate::TEvWriteIndex;fline=constructor_portion.cpp:40;memory_size=262;data_size=252;sum=236178;count=851;size_of_portion=184; 2025-09-25T16:21:03.931919Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:4421:6414];ev=NKikimr::NColumnShard::TEvPrivate::TEvWriteIndex;fline=abstract.cpp:13;event=new_stage;stage=Compiled;task_id=a17a3bd2-9a2b11f0-9879d646-3b38fd10; 2025-09-25T16:21:03.931986Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxWriteIndex[2] (CS::GENERAL) apply at tablet 9437184 2025-09-25T16:21:03.933822Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:4421:6414];ev=NKikimr::NColumnShard::TEvPrivate::TEvWriteIndex;tablet_id=9437184;external_task_id=a17a3bd2-9a2b11f0-9879d646-3b38fd10;fline=abstract.cpp:13;event=new_stage;stage=Written;task_id=a17a3bd2-9a2b11f0-9879d646-3b38fd10; 2025-09-25T16:21:03.934286Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: BlobManager on execute at tablet 9437184 Save Batch GenStep: 4:1 Blob count: 536 2025-09-25T16:21:03.935273Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: Index: tables 1 inserted {blob_bytes=0;raw_bytes=0;count=0;records=0} compacted {blob_bytes=0;raw_bytes=0;count=0;records=0} s-compacted {blob_bytes=25223856;raw_bytes=29497600;count=4;records=300200} inactive {blob_bytes=70349968;raw_bytes=70280550;count=211;records=825200} evicted {blob_bytes=0;raw_bytes=0;count=0;records=0} at tablet 9437184 2025-09-25T16:21:03.985382Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;task_id=a17a3bd2-9a2b11f0-9879d646-3b38fd10;fline=abstract.cpp:13;event=new_stage;stage=Finished;task_id=a17a3bd2-9a2b11f0-9879d646-3b38fd10; 2025-09-25T16:21:03.985406Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;task_id=a17a3bd2-9a2b11f0-9879d646-3b38fd10;fline=abstract.cpp:54;event=WriteIndexComplete;type=CS::GENERAL;success=1; 2025-09-25T16:21:03.985418Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;task_id=a17a3bd2-9a2b11f0-9879d646-3b38fd10;fline=with_appended.cpp:65;portions=216,;task_id=a17a3bd2-9a2b11f0-9879d646-3b38fd10; 2025-09-25T16:21:03.985538Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;task_id=a17a3bd2-9a2b11f0-9879d646-3b38fd10;fline=manager.cpp:15;event=unlock;process_id=CS::GENERAL::a17a3bd2-9a2b11f0-9879d646-3b38fd10; 2025-09-25T16:21:03.985566Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;task_id=a17a3bd2-9a2b11f0-9879d646-3b38fd10;fline=granule.cpp:97;event=OnCompactionFinished;info=(granule:1000000185;path_id:1000000185;size:18920072;portions_count:216;); 2025-09-25T16:21:03.985575Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;task_id=a17a3bd2-9a2b11f0-9879d646-3b38fd10;tablet_id=9437184;fline=columnshard_impl.cpp:449;event=EnqueueBackgroundActivities;periodic=0; 2025-09-25T16:21:03.985604Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;task_id=a17a3bd2-9a2b11f0-9879d646-3b38fd10;tablet_id=9437184;fline=columnshard_impl.cpp:943;background=cleanup_schemas;skip_reason=no_changes; 2025-09-25T16:21:03.985614Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;task_id=a17a3bd2-9a2b11f0-9879d646-3b38fd10;tablet_id=9437184;fline=column_engine_logs.cpp:258;event=StartCleanup;portions_count=6; 2025-09-25T16:21:03.985632Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;task_id=a17a3bd2-9a2b11f0-9879d646-3b38fd10;tablet_id=9437184;fline=column_engine_logs.cpp:271;event=StartCleanupStop;snapshot=plan_step=1758815449997;tx_id=18446744073709551615;;current_snapshot_ts=1758817248704; 2025-09-25T16:21:03.985641Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;task_id=a17a3bd2-9a2b11f0-9879d646-3b38fd10;tablet_id=9437184;fline=column_engine_logs.cpp:334;event=StartCleanup;portions_count=6;portions_prepared=0;drop=0;skip=0;portions_counter=0;chunks=0;limit=0;max_portions=1000;max_chunks=500000; 2025-09-25T16:21:03.985655Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;task_id=a17a3bd2-9a2b11f0-9879d646-3b38fd10;tablet_id=9437184;fline=columnshard_impl.cpp:800;background=cleanup;skip_reason=no_changes; 2025-09-25T16:21:03.985663Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;task_id=a17a3bd2-9a2b11f0-9879d646-3b38fd10;tablet_id=9437184;fline=columnshard_impl.cpp:832;background=cleanup;skip_reason=no_changes; 2025-09-25T16:21:03.985683Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;task_id=a17a3bd2-9a2b11f0-9879d646-3b38fd10;tablet_id=9437184;queue=ttl;external_count=0;fline=granule.cpp:168;event=skip_actualization;waiting=0.909000s; 2025-09-25T16:21:03.985695Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;task_id=a17a3bd2-9a2b11f0-9879d646-3b38fd10;tablet_id=9437184;fline=columnshard_impl.cpp:755;background=ttl;skip_reason=no_changes; 2025-09-25T16:21:03.985748Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: BlobManager at tablet 9437184 Save Batch GenStep: 4:1 Blob count: 536 >> THiveTest::TestCreateAndDeleteTabletWithStoragePools |82.7%| [LD] {RESULT} $(B)/ydb/core/base/ut_board_subscriber/ydb-core-base-ut_board_subscriber ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/ut_blobstorage/ut_read_only_vdisk/unittest >> ReadOnlyVDisk::TestSync [GOOD] Test command err: RandomSeed# 16325798003447011760 Setting VDisk read-only to 1 for position 0 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:0:0] Setting VDisk read-only to 1 for position 1 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:1:0] SEND TEvPut with key [1:1:0:0:0:131072:0] 2025-09-25T16:21:02.260148Z 1 00h02m00.100000s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) Unavailable in read-only Sender# [1:8837:948] 2025-09-25T16:21:02.260233Z 2 00h02m00.100000s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:1:0]: (2181038080) Unavailable in read-only Sender# [1:8844:955] TEvPutResult: TEvPutResult {Id# [1:1:0:0:0:131072:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999988} Setting VDisk read-only to 0 for position 0 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:0:0] Setting VDisk read-only to 0 for position 1 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:1:0] Setting VDisk read-only to 1 for position 1 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:1:0] Setting VDisk read-only to 1 for position 2 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:2:0] SEND TEvPut with key [1:1:1:0:0:32768:0] 2025-09-25T16:21:02.549807Z 3 00h06m00.210512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:2:0]: (2181038080) Unavailable in read-only Sender# [1:8851:962] 2025-09-25T16:21:02.549835Z 2 00h06m00.210512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:1:0]: (2181038080) Unavailable in read-only Sender# [1:8844:955] TEvPutResult: TEvPutResult {Id# [1:1:1:0:0:32768:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999988} Setting VDisk read-only to 0 for position 1 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:1:0] Setting VDisk read-only to 0 for position 2 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:2:0] Setting VDisk read-only to 1 for position 2 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:2:0] Setting VDisk read-only to 1 for position 3 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:3:0] SEND TEvPut with key [1:1:2:0:0:131072:0] TEvPutResult: TEvPutResult {Id# [1:1:2:0:0:131072:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999988} Setting VDisk read-only to 0 for position 2 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:2:0] Setting VDisk read-only to 0 for position 3 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:3:0] Setting VDisk read-only to 1 for position 3 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:3:0] Setting VDisk read-only to 1 for position 4 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:4:0] SEND TEvPut with key [1:1:3:0:0:32768:0] 2025-09-25T16:21:03.282630Z 5 00h14m00.361536s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:4:0]: (2181038080) Unavailable in read-only Sender# [1:8865:976] 2025-09-25T16:21:03.282647Z 4 00h14m00.361536s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:3:0]: (2181038080) Unavailable in read-only Sender# [1:8858:969] TEvPutResult: TEvPutResult {Id# [1:1:3:0:0:32768:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999988} Setting VDisk read-only to 0 for position 3 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:3:0] Setting VDisk read-only to 0 for position 4 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:4:0] Setting VDisk read-only to 1 for position 4 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:4:0] Setting VDisk read-only to 1 for position 5 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:5:0] SEND TEvPut with key [1:1:4:0:0:131072:0] 2025-09-25T16:21:03.641473Z 6 00h18m00.412560s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:5:0]: (2181038080) Unavailable in read-only Sender# [1:8872:983] 2025-09-25T16:21:03.641501Z 5 00h18m00.412560s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:4:0]: (2181038080) Unavailable in read-only Sender# [1:8865:976] TEvPutResult: TEvPutResult {Id# [1:1:4:0:0:131072:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999988} Setting VDisk read-only to 0 for position 4 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:4:0] Setting VDisk read-only to 0 for position 5 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:5:0] Setting VDisk read-only to 1 for position 5 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:5:0] Setting VDisk read-only to 1 for position 6 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:6:0] SEND TEvPut with key [1:1:5:0:0:32768:0] 2025-09-25T16:21:04.116693Z 7 00h22m00.500000s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:6:0]: (2181038080) Unavailable in read-only Sender# [1:8879:990] 2025-09-25T16:21:04.116723Z 6 00h22m00.500000s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:5:0]: (2181038080) Unavailable in read-only Sender# [1:8872:983] TEvPutResult: TEvPutResult {Id# [1:1:5:0:0:32768:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999988} Setting VDisk read-only to 0 for position 5 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:5:0] Setting VDisk read-only to 0 for position 6 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:6:0] Setting VDisk read-only to 1 for position 6 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:6:0] Setting VDisk read-only to 1 for position 0 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:0:0] SEND TEvPut with key [1:1:6:0:0:131072:0] 2025-09-25T16:21:04.542730Z 7 00h26m00.561536s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:6:0]: (2181038080) Unavailable in read-only Sender# [1:8879:990] TEvPutResult: TEvPutResult {Id# [1:1:6:0:0:131072:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999988} Setting VDisk read-only to 0 for position 6 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:6:0] Setting VDisk read-only to 0 for position 0 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:0:0] === Read all 7 blob(s) === SEND TEvGet with key [1:1:0:0:0:131072:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:0:0:0:131072:0] OK Size# 131072 RequestedSize# 131072}} SEND TEvGet with key [1:1:1:0:0:32768:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:1:0:0:32768:0] OK Size# 32768 RequestedSize# 32768}} SEND TEvGet with key [1:1:2:0:0:131072:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:2:0:0:131072:0] OK Size# 131072 RequestedSize# 131072}} SEND TEvGet with key [1:1:3:0:0:32768:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:3:0:0:32768:0] OK Size# 32768 RequestedSize# 32768}} SEND TEvGet with key [1:1:4:0:0:131072:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:4:0:0:131072:0] OK Size# 131072 RequestedSize# 131072}} SEND TEvGet with key [1:1:5:0:0:32768:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:5:0:0:32768:0] OK Size# 32768 RequestedSize# 32768}} SEND TEvGet with key [1:1:6:0:0:131072:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:6:0:0:131072:0] OK Size# 131072 RequestedSize# 131072}} ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/ut_blobstorage/ut_read_only_vdisk/unittest >> ReadOnlyVDisk::TestStorageLoad [GOOD] Test command err: RandomSeed# 15637311886278660223 2025-09-25T16:21:02.856498Z 1 00h01m08.010512s :BS_HULLRECS CRIT: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) Db# LogoBlobs; putting blob beyond the barrier id# [1:2:11:0:11:3487480:2] barrier# {Soft# {Gen# 2 Step# 7} Hard# {Gen# 2 Step# 4294967295}} 2025-09-25T16:21:02.856623Z 5 00h01m08.010512s :BS_HULLRECS CRIT: PDiskId# 1000 VDISK[82000000:_:0:4:0]: (2181038080) Db# LogoBlobs; putting blob beyond the barrier id# [1:2:11:0:11:3487480:6] barrier# {Soft# {Gen# 2 Step# 7} Hard# {Gen# 2 Step# 4294967295}} 2025-09-25T16:21:02.856641Z 4 00h01m08.010512s :BS_HULLRECS CRIT: PDiskId# 1000 VDISK[82000000:_:0:3:0]: (2181038080) Db# LogoBlobs; putting blob beyond the barrier id# [1:2:11:0:11:3487480:5] barrier# {Soft# {Gen# 2 Step# 7} Hard# {Gen# 2 Step# 4294967295}} 2025-09-25T16:21:02.856656Z 2 00h01m08.010512s :BS_HULLRECS CRIT: PDiskId# 1000 VDISK[82000000:_:0:1:0]: (2181038080) Db# LogoBlobs; putting blob beyond the barrier id# [1:2:11:0:11:3487480:3] barrier# {Soft# {Gen# 2 Step# 7} Hard# {Gen# 2 Step# 4294967295}} 2025-09-25T16:21:02.856678Z 8 00h01m08.010512s :BS_HULLRECS CRIT: PDiskId# 1000 VDISK[82000000:_:0:7:0]: (2181038080) Db# LogoBlobs; putting blob beyond the barrier id# [1:2:11:0:11:3487480:1] barrier# {Soft# {Gen# 2 Step# 7} Hard# {Gen# 2 Step# 4294967295}} 2025-09-25T16:21:02.856695Z 3 00h01m08.010512s :BS_HULLRECS CRIT: PDiskId# 1000 VDISK[82000000:_:0:2:0]: (2181038080) Db# LogoBlobs; putting blob beyond the barrier id# [1:2:11:0:11:3487480:4] barrier# {Soft# {Gen# 2 Step# 7} Hard# {Gen# 2 Step# 4294967295}} Setting VDisk read-only to 1 for position 0 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:0:0] 2025-09-25T16:21:03.017041Z 1 00h02m38.100000s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) Unavailable in read-only Sender# [1:5334:708] 2025-09-25T16:21:03.017464Z 1 00h02m38.100000s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) Unavailable in read-only Sender# [1:5334:708] 2025-09-25T16:21:03.018109Z 1 00h02m38.100000s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) Unavailable in read-only Sender# [1:5334:708] 2025-09-25T16:21:03.019116Z 1 00h02m38.100000s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) Unavailable in read-only Sender# [1:5334:708] 2025-09-25T16:21:03.019135Z 1 00h02m38.100000s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) Unavailable in read-only Sender# [1:5334:708] 2025-09-25T16:21:03.022219Z 1 00h02m38.200000s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) Unavailable in read-only Sender# [1:5334:708] 2025-09-25T16:21:03.024292Z 1 00h02m38.300000s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) Unavailable in read-only Sender# [1:5334:708] 2025-09-25T16:21:03.041920Z 1 00h02m38.500000s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) Unavailable in read-only Sender# [1:5334:708] 2025-09-25T16:21:03.045494Z 1 00h02m38.600000s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) Unavailable in read-only Sender# [1:5334:708] 2025-09-25T16:21:03.057515Z 1 00h02m38.800000s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) Unavailable in read-only Sender# [1:5334:708] 2025-09-25T16:21:03.061062Z 1 00h02m38.900000s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) Unavailable in read-only Sender# [1:5334:708] 2025-09-25T16:21:03.077677Z 1 00h02m39.100000s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) Unavailable in read-only Sender# [1:5334:708] 2025-09-25T16:21:03.077747Z 1 00h02m39.100000s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) Unavailable in read-only Sender# [1:5334:708] 2025-09-25T16:21:03.081622Z 1 00h02m39.200000s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) Unavailable in read-only Sender# [1:5334:708] 2025-09-25T16:21:03.092186Z 1 00h02m39.400000s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) Unavailable in read-only Sender# [1:5334:708] 2025-09-25T16:21:03.094853Z 1 00h02m39.500000s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) Unavailable in read-only Sender# [1:5334:708] 2025-09-25T16:21:03.104362Z 1 00h02m39.700000s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) Unavailable in read-only Sender# [1:5334:708] 2025-09-25T16:21:03.106559Z 1 00h02m39.800000s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) Unavailable in read-only Sender# [1:5334:708] 2025-09-25T16:21:03.119242Z 1 00h02m40.000000s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) Unavailable in read-only Sender# [1:5334:708] 2025-09-25T16:21:03.127597Z 1 00h02m40.100000s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) Unavailable in read-only Sender# [1:5334:708] 2025-09-25T16:21:03.127851Z 1 00h02m40.100000s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) Unavailable in read-only Sender# [1:5334:708] 2025-09-25T16:21:03.132310Z 1 00h02m40.200000s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) Unavailable in read-only Sender# [1:5334:708] 2025-09-25T16:21:03.134844Z 1 00h02m40.300000s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) Unavailable in read-only Sender# [1:5334:708] 2025-09-25T16:21:03.144623Z 1 00h02m40.400000s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) Unavailable in read-only Sender# [1:5334:708] 2025-09-25T16:21:03.148390Z 1 00h02m40.500000s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) Unavailable in read-only Sender# [1:5334:708] 2025-09-25T16:21:03.150865Z 1 00h02m40.600000s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) Unavailable in read-only Sender# [1:5334:708] 2025-09-25T16:21:03.153743Z 1 00h02m40.700000s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) Unavailable in read-only Sender# [1:5334:708] 2025-09-25T16:21:03.155744Z 1 00h02m40.800000s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) Unavailable in read-only Sender# [1:5334:708] 2025-09-25T16:21:03.157805Z 1 00h02m40.900000s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) Unavailable in read-only Sender# [1:5334:708] 2025-09-25T16:21:03.160999Z 1 00h02m41.000000s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) Unavailable in read-only Sender# [1:5334:708] 2025-09-25T16:21:03.164971Z 1 00h02m41.100000s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) Unavailable in read-only Sender# [1:5334:708] 2025-09-25T16:21:03.165015Z 1 00h02m41.100000s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) Unavailable in read-only Sender# [1:5334:708] 2025-09-25T16:21:03.170524Z 1 00h02m41.300000s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) Unavailable in read-only Sender# [1:5334:708] 2025-09-25T16:21:03.186088Z 1 00h02m41.400000s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) Unavailable in read-only Sender# [1:5334:708] 2025-09-25T16:21:03.191318Z 1 00h02m41.600000s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) Unavailable in read-only Sender# [1:5334:708] 2025-09-25T16:21:03.194849Z 1 00h02m41.700000s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) Unavailable in read-only Sender# [1:5334:708] 2025-09-25T16:21:03.210025Z 1 00h02m41.900000s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) Unavailable in read-only Sender# [1:5334:708] 2025-09-25T16:21:03.213511Z 1 00h02m42.000000s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) Unavailable in read-only Sender# [1:5334:708] 2025-09-25T16:21:03.220481Z 1 00h02m42.100000s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) Unavailable in read-only Sender# [1:5334:708] 2025-09-25T16:21:03.220530Z 1 00h02m42.100000s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) Unavailable in read-only Sender# [1:5334:708] 2025-09-25T16:21:03.227063Z 1 00h02m42.300000s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) Unavailable in read-only Sender# [1:5334:708] 2025-09-25T16:21:03.229029Z 1 00h02m42.400000s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) Unavailable in read-only Sender# [1:5334:708] 2025-09-25T16:21:03.244393Z 1 00h02m42.600000s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) Unavailable in read-only Sender# [1:5334:708] 2025-09-25T16:21:03.247956Z 1 00h02m42.700000s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) Unavailable in read-only Sender# [1:5334:708] 2025-09-25T16:21:03.254678Z 1 00h02m42.900000s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) Unavailable in read-only Sender# [1:5334:708] 2025-09-25T16:21:03.258139Z 1 00h02m43.000000s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) Unavailable in read-only Sender# [1:5334:708] 2025-09-25T16:21:03.261001Z 1 00h02m43.100000s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) Unavailable in read-only Sender# [1:5334:708] 2025-09-25T16:21:03.261041Z 1 00h02m43.100000s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) Unavailable in read-only Sender# [1:5334:708] 2025-09-25T16:21:03.263350Z 1 00h02m43.200000s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) Unavailable in read-only Sender# [1:5334:708] 2025-09-25T16:21:03.265657Z 1 00h02m43.300000s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) Unavailable in read-only Sender# [1:5334:708] 2025-09-25T16:21:03.267786Z 1 00h02m43.400000s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) Unavailable in read-only Sender# [1:5334:708] 2025-09-25T16:21:03.273446Z 1 00h02m43.500000s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) Unavailable in read-only Sender# [1:5334:708] 2025-09-25T16:21:03.281634Z 1 00h02m43.600000s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) Unavailable in read-only Sender# [1:5334:708] 2025-09-25T16:21:03.286545Z 1 00h02m43.700000s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) Unavailable in read-only Sender# [1:5334:708] 2025-09-25T16:21:03.288871Z 1 00h02m43.800000s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) Unavailable in read-only Sender# [1:5334:708] 2025-09-25T16:21:03.290998Z 1 00h02m43.900000s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) Unavailable in read-only Sender# [1:5334:708] 2025-09-25T16:21:03.292853Z 1 00h02m44.000000s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) Unavailable in read-only Sender# [1:5334:708] 2025-09-25T16:21:03.295926Z 1 00h02m44.100000s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) Unavailable in read-only Sender# [1:5334:708] 2025-09-25T16:21:03.300348Z 1 00h02m44.200000s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) Unavailable in read-only Sender# [1:5334:708] 2025-09-25T16:21:03.302864Z 1 00h02m44.300000s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) Unavailable in read-only Sender# [1:5334:708] 2025-09-25T16:21:03.321523Z 1 00h02m44.500000s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) Unavailable in read-only Sender# [1:5334:708] 2025-09-25T16:21:03.324655Z 1 00h02m44.600000s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) Unavailable in read-only Sender# [1:5334:708] 2025-09-25T16:21:03.334667Z 1 00h02m44.800000s :BS_SKELETON ERROR: PDiskId# 1000 VDI ... 60s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:7:0]: (2181038080) Unavailable in read-only Sender# [1:5383:757] 2025-09-25T16:21:05.555985Z 8 00h20m55.212560s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:7:0]: (2181038080) Unavailable in read-only Sender# [1:5383:757] 2025-09-25T16:21:05.564270Z 8 00h20m55.412560s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:7:0]: (2181038080) Unavailable in read-only Sender# [1:5383:757] 2025-09-25T16:21:05.564371Z 8 00h20m55.412560s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:7:0]: (2181038080) Unavailable in read-only Sender# [1:5383:757] 2025-09-25T16:21:05.571324Z 8 00h20m55.512560s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:7:0]: (2181038080) Unavailable in read-only Sender# [1:5383:757] 2025-09-25T16:21:05.579461Z 8 00h20m55.712560s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:7:0]: (2181038080) Unavailable in read-only Sender# [1:5383:757] 2025-09-25T16:21:05.583163Z 8 00h20m55.812560s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:7:0]: (2181038080) Unavailable in read-only Sender# [1:5383:757] 2025-09-25T16:21:05.605529Z 8 00h20m56.012560s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:7:0]: (2181038080) Unavailable in read-only Sender# [1:5383:757] 2025-09-25T16:21:05.609634Z 8 00h20m56.112560s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:7:0]: (2181038080) Unavailable in read-only Sender# [1:5383:757] 2025-09-25T16:21:05.618302Z 8 00h20m56.312560s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:7:0]: (2181038080) Unavailable in read-only Sender# [1:5383:757] 2025-09-25T16:21:05.622595Z 8 00h20m56.412560s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:7:0]: (2181038080) Unavailable in read-only Sender# [1:5383:757] 2025-09-25T16:21:05.622638Z 8 00h20m56.412560s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:7:0]: (2181038080) Unavailable in read-only Sender# [1:5383:757] 2025-09-25T16:21:05.627294Z 8 00h20m56.512560s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:7:0]: (2181038080) Unavailable in read-only Sender# [1:5383:757] 2025-09-25T16:21:05.634332Z 8 00h20m56.712560s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:7:0]: (2181038080) Unavailable in read-only Sender# [1:5383:757] 2025-09-25T16:21:05.643679Z 8 00h20m56.812560s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:7:0]: (2181038080) Unavailable in read-only Sender# [1:5383:757] 2025-09-25T16:21:05.663072Z 8 00h20m56.912560s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:7:0]: (2181038080) Unavailable in read-only Sender# [1:5383:757] 2025-09-25T16:21:05.668380Z 8 00h20m57.012560s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:7:0]: (2181038080) Unavailable in read-only Sender# [1:5383:757] 2025-09-25T16:21:05.675877Z 8 00h20m57.112560s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:7:0]: (2181038080) Unavailable in read-only Sender# [1:5383:757] 2025-09-25T16:21:05.695562Z 8 00h20m57.212560s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:7:0]: (2181038080) Unavailable in read-only Sender# [1:5383:757] 2025-09-25T16:21:05.700932Z 8 00h20m57.312560s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:7:0]: (2181038080) Unavailable in read-only Sender# [1:5383:757] 2025-09-25T16:21:05.715220Z 8 00h20m57.412560s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:7:0]: (2181038080) Unavailable in read-only Sender# [1:5383:757] 2025-09-25T16:21:05.715268Z 8 00h20m57.412560s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:7:0]: (2181038080) Unavailable in read-only Sender# [1:5383:757] 2025-09-25T16:21:05.722642Z 8 00h20m57.612560s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:7:0]: (2181038080) Unavailable in read-only Sender# [1:5383:757] 2025-09-25T16:21:05.736625Z 8 00h20m57.712560s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:7:0]: (2181038080) Unavailable in read-only Sender# [1:5383:757] 2025-09-25T16:21:05.748839Z 8 00h20m57.912560s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:7:0]: (2181038080) Unavailable in read-only Sender# [1:5383:757] 2025-09-25T16:21:05.752904Z 8 00h20m58.012560s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:7:0]: (2181038080) Unavailable in read-only Sender# [1:5383:757] 2025-09-25T16:21:05.766496Z 8 00h20m58.212560s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:7:0]: (2181038080) Unavailable in read-only Sender# [1:5383:757] 2025-09-25T16:21:05.774150Z 8 00h20m58.312560s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:7:0]: (2181038080) Unavailable in read-only Sender# [1:5383:757] 2025-09-25T16:21:05.780630Z 8 00h20m58.412560s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:7:0]: (2181038080) Unavailable in read-only Sender# [1:5383:757] 2025-09-25T16:21:05.780738Z 8 00h20m58.412560s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:7:0]: (2181038080) Unavailable in read-only Sender# [1:5383:757] 2025-09-25T16:21:05.789738Z 8 00h20m58.612560s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:7:0]: (2181038080) Unavailable in read-only Sender# [1:5383:757] 2025-09-25T16:21:05.794373Z 8 00h20m58.712560s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:7:0]: (2181038080) Unavailable in read-only Sender# [1:5383:757] 2025-09-25T16:21:05.801565Z 8 00h20m58.912560s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:7:0]: (2181038080) Unavailable in read-only Sender# [1:5383:757] 2025-09-25T16:21:05.809810Z 8 00h20m59.012560s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:7:0]: (2181038080) Unavailable in read-only Sender# [1:5383:757] 2025-09-25T16:21:05.816552Z 8 00h20m59.212560s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:7:0]: (2181038080) Unavailable in read-only Sender# [1:5383:757] 2025-09-25T16:21:05.820171Z 8 00h20m59.312560s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:7:0]: (2181038080) Unavailable in read-only Sender# [1:5383:757] 2025-09-25T16:21:05.825173Z 8 00h20m59.412560s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:7:0]: (2181038080) Unavailable in read-only Sender# [1:5383:757] 2025-09-25T16:21:05.825208Z 8 00h20m59.412560s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:7:0]: (2181038080) Unavailable in read-only Sender# [1:5383:757] 2025-09-25T16:21:05.833238Z 8 00h20m59.612560s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:7:0]: (2181038080) Unavailable in read-only Sender# [1:5383:757] 2025-09-25T16:21:05.837403Z 8 00h20m59.712560s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:7:0]: (2181038080) Unavailable in read-only Sender# [1:5383:757] 2025-09-25T16:21:05.844482Z 8 00h20m59.912560s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:7:0]: (2181038080) Unavailable in read-only Sender# [1:5383:757] 2025-09-25T16:21:05.849657Z 8 00h21m00.012560s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:7:0]: (2181038080) Unavailable in read-only Sender# [1:5383:757] 2025-09-25T16:21:05.867699Z 8 00h21m00.212560s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:7:0]: (2181038080) Unavailable in read-only Sender# [1:5383:757] 2025-09-25T16:21:05.872220Z 8 00h21m00.312560s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:7:0]: (2181038080) Unavailable in read-only Sender# [1:5383:757] 2025-09-25T16:21:05.885855Z 8 00h21m00.412560s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:7:0]: (2181038080) Unavailable in read-only Sender# [1:5383:757] 2025-09-25T16:21:05.885899Z 8 00h21m00.412560s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:7:0]: (2181038080) Unavailable in read-only Sender# [1:5383:757] 2025-09-25T16:21:05.890051Z 8 00h21m00.512560s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:7:0]: (2181038080) Unavailable in read-only Sender# [1:5383:757] 2025-09-25T16:21:05.894304Z 8 00h21m00.612560s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:7:0]: (2181038080) Unavailable in read-only Sender# [1:5383:757] 2025-09-25T16:21:05.898227Z 8 00h21m00.712560s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:7:0]: (2181038080) Unavailable in read-only Sender# [1:5383:757] 2025-09-25T16:21:05.902118Z 8 00h21m00.812560s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:7:0]: (2181038080) Unavailable in read-only Sender# [1:5383:757] 2025-09-25T16:21:05.906589Z 8 00h21m00.912560s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:7:0]: (2181038080) Unavailable in read-only Sender# [1:5383:757] 2025-09-25T16:21:05.914259Z 8 00h21m01.112560s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:7:0]: (2181038080) Unavailable in read-only Sender# [1:5383:757] 2025-09-25T16:21:05.917758Z 8 00h21m01.212560s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:7:0]: (2181038080) Unavailable in read-only Sender# [1:5383:757] 2025-09-25T16:21:05.936477Z 8 00h21m01.412560s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:7:0]: (2181038080) Unavailable in read-only Sender# [1:5383:757] 2025-09-25T16:21:05.942124Z 8 00h21m01.512560s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:7:0]: (2181038080) Unavailable in read-only Sender# [1:5383:757] 2025-09-25T16:21:05.951818Z 8 00h21m01.612560s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:7:0]: (2181038080) Unavailable in read-only Sender# [1:5383:757] 2025-09-25T16:21:05.963507Z 8 00h21m01.812560s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:7:0]: (2181038080) Unavailable in read-only Sender# [1:5383:757] 2025-09-25T16:21:05.970131Z 8 00h21m01.912560s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:7:0]: (2181038080) Unavailable in read-only Sender# [1:5383:757] 2025-09-25T16:21:05.983268Z 8 00h21m02.112560s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:7:0]: (2181038080) Unavailable in read-only Sender# [1:5383:757] 2025-09-25T16:21:05.994780Z 8 00h21m02.212560s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:7:0]: (2181038080) Unavailable in read-only Sender# [1:5383:757] 2025-09-25T16:21:06.003550Z 8 00h21m02.412560s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:7:0]: (2181038080) Unavailable in read-only Sender# [1:5383:757] 2025-09-25T16:21:06.003647Z 8 00h21m02.412560s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:7:0]: (2181038080) Unavailable in read-only Sender# [1:5383:757] 2025-09-25T16:21:06.007108Z 1 00h21m02.412560s :BS_HULLRECS CRIT: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) Db# LogoBlobs; putting blob beyond the barrier id# [1:5:11:0:11:3850832:3] barrier# {Soft# {Gen# 5 Step# 7} Hard# {Gen# 5 Step# 4294967295}} 2025-09-25T16:21:06.007240Z 4 00h21m02.412560s :BS_HULLRECS CRIT: PDiskId# 1000 VDISK[82000000:_:0:3:0]: (2181038080) Db# LogoBlobs; putting blob beyond the barrier id# [1:5:11:0:11:3850832:6] barrier# {Soft# {Gen# 5 Step# 7} Hard# {Gen# 5 Step# 4294967295}} 2025-09-25T16:21:06.007309Z 3 00h21m02.412560s :BS_HULLRECS CRIT: PDiskId# 1000 VDISK[82000000:_:0:2:0]: (2181038080) Db# LogoBlobs; putting blob beyond the barrier id# [1:5:11:0:11:3850832:5] barrier# {Soft# {Gen# 5 Step# 7} Hard# {Gen# 5 Step# 4294967295}} 2025-09-25T16:21:06.007329Z 7 00h21m02.412560s :BS_HULLRECS CRIT: PDiskId# 1000 VDISK[82000000:_:0:6:0]: (2181038080) Db# LogoBlobs; putting blob beyond the barrier id# [1:5:11:0:11:3850832:1] barrier# {Soft# {Gen# 5 Step# 7} Hard# {Gen# 5 Step# 4294967295}} 2025-09-25T16:21:06.007345Z 2 00h21m02.412560s :BS_HULLRECS CRIT: PDiskId# 1000 VDISK[82000000:_:0:1:0]: (2181038080) Db# LogoBlobs; putting blob beyond the barrier id# [1:5:11:0:11:3850832:4] barrier# {Soft# {Gen# 5 Step# 7} Hard# {Gen# 5 Step# 4294967295}} 2025-09-25T16:21:06.007433Z 5 00h21m02.412560s :BS_HULLRECS CRIT: PDiskId# 1000 VDISK[82000000:_:0:4:0]: (2181038080) Db# LogoBlobs; putting blob beyond the barrier id# [1:5:11:0:11:3850832:2] barrier# {Soft# {Gen# 5 Step# 7} Hard# {Gen# 5 Step# 4294967295}} 2025-09-25T16:21:06.009000Z 5 00h21m02.412560s :BS_HULLRECS CRIT: PDiskId# 1000 VDISK[82000000:_:0:4:0]: (2181038080) Db# LogoBlobs; putting blob beyond the barrier id# [1:5:11:0:11:3850832:2] barrier# {Soft# {Gen# 5 Step# 7} Hard# {Gen# 5 Step# 4294967295}} ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_index/unittest >> TAsyncIndexTests::SplitIndexWithReboots[TabletReboots] [GOOD] Test command err: =========== RUN: Trace =========== Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2144] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2144] Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:120:2058] recipient: [1:114:2145] IGNORE Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:120:2058] recipient: [1:114:2145] Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:121:2058] recipient: [1:116:2146] IGNORE Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:121:2058] recipient: [1:116:2146] Leader for TabletID 72057594046678944 is [1:128:2153] sender: [1:131:2058] recipient: [1:113:2144] Leader for TabletID 72057594046447617 is [1:134:2158] sender: [1:136:2058] recipient: [1:114:2145] Leader for TabletID 72057594046316545 is [1:139:2161] sender: [1:141:2058] recipient: [1:116:2146] 2025-09-25T16:19:35.886810Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7911: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-09-25T16:19:35.886854Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7939: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-09-25T16:19:35.886860Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7825: StatsBatching config: StatsBatchTimeout# 0.000000s, StatsMaxBatchSize# 0, StatsMaxExecuteTime# 0.010000s 2025-09-25T16:19:35.886866Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7841: OperationsProcessing config: using default configuration 2025-09-25T16:19:35.886874Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7847: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-09-25T16:19:35.886878Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7847: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-09-25T16:19:35.886888Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7971: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-09-25T16:19:35.886903Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-09-25T16:19:35.887016Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8042: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-09-25T16:19:35.887080Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2025-09-25T16:19:35.910035Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:8074: Got new config: QueryServiceConfig { AllExternalDataSourcesAreAvailable: true } 2025-09-25T16:19:35.910071Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded 2025-09-25T16:19:35.910172Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8042: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# Leader for TabletID 72057594046447617 is [1:134:2158] sender: [1:179:2058] recipient: [1:15:2062] 2025-09-25T16:19:35.914486Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2025-09-25T16:19:35.914589Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2025-09-25T16:19:35.914630Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-09-25T16:19:35.915953Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2025-09-25T16:19:35.916022Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2025-09-25T16:19:35.916117Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1382: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-09-25T16:19:35.916339Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-09-25T16:19:35.917354Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:158: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-09-25T16:19:35.917406Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2025-09-25T16:19:35.917663Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-09-25T16:19:35.917677Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-09-25T16:19:35.917698Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-09-25T16:19:35.917706Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-09-25T16:19:35.917713Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:205: TTxServerlessStorageBilling.Complete 2025-09-25T16:19:35.917752Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7086: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:221:2058] recipient: [1:219:2219] IGNORE Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:221:2058] recipient: [1:219:2219] Leader for TabletID 72057594037968897 is [1:225:2223] sender: [1:226:2058] recipient: [1:219:2219] 2025-09-25T16:19:35.919122Z node 1 :HIVE INFO: tablet_helpers.cpp:1126: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:128:2153] sender: [1:246:2058] recipient: [1:15:2062] 2025-09-25T16:19:35.942612Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-09-25T16:19:35.942696Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-09-25T16:19:35.942761Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:599: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-09-25T16:19:35.942770Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5528: CreateTx for txid 1:0 type: TxAlterSubDomain target path: [OwnerId: 72057594046678944, LocalPathId: 1] source path: 2025-09-25T16:19:35.942826Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-09-25T16:19:35.942843Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2025-09-25T16:19:35.943781Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-09-25T16:19:35.943844Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-09-25T16:19:35.943904Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-09-25T16:19:35.943916Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:315: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-09-25T16:19:35.943923Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:369: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-09-25T16:19:35.943929Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 1:0 2 -> 3 2025-09-25T16:19:35.944496Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-09-25T16:19:35.944511Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-09-25T16:19:35.944518Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 1:0 3 -> 128 2025-09-25T16:19:35.944946Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-09-25T16:19:35.944959Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-09-25T16:19:35.944966Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-09-25T16:19:35.944974Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1723: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-09-25T16:19:35.945733Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1792: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-09-25T16:19:35.946269Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:663: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-09-25T16:19:35.946312Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1824: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 Leader for TabletID 72057594046316545 is [1:139:2161] sender: [1:261:2058] recipient: [1:15:2062] FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-09-25T16:19:35.946553Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:685: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-09-25T16:19:35.946581Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:689: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 139 RawX2: 4294969457 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, ... ntToCompact: 8 ForceCountToCompact: 8 ForceSizeToCompact: 134217728 CompactionBrokerQueue: 4294967295 KeepInCache: true BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen1" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 0 UpliftPartSize: 0 } Generation { GenerationId: 1 SizeToCompact: 41943040 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 536870912 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen2" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 8388608 UpliftPartSize: 8388608 } Generation { GenerationId: 2 SizeToCompact: 419430400 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 17179869184 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen3" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } PartitioningPolicy { MinPartitionsCount: 1 } } TableIndexes { Name: "UserDefinedIndex" LocalPathId: 4 Type: EIndexTypeGlobalAsync State: EIndexStateReady KeyColumnNames: "indexed" SchemaVersion: 1 PathOwnerId: 72057594046678944 DataSize: 0 IndexImplTableDescriptions { PartitionConfig { CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot: 300 InMemForceStepsToSnapshot: 500 InMemForceSizeToSnapshot: 16777216 InMemCompactionBrokerQueue: 0 ReadAheadHiThreshold: 67108864 ReadAheadLoThreshold: 16777216 MinDataPageSize: 7168 SnapBrokerQueue: 0 Generation { GenerationId: 0 SizeToCompact: 0 CountToCompact: 8 ForceCountToCompact: 8 ForceSizeToCompact: 134217728 CompactionBrokerQueue: 4294967295 KeepInCache: true BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen1" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 0 UpliftPartSize: 0 } Generation { GenerationId: 1 SizeToCompact: 41943040 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 536870912 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen2" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 8388608 UpliftPartSize: 8388608 } Generation { GenerationId: 2 SizeToCompact: 419430400 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 17179869184 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen3" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } PartitioningPolicy { SizeToSplit: 2147483648 MinPartitionsCount: 1 } } } } TableSchemaVersion: 1 IsBackup: false IsRestore: false } TablePartitions { EndOfRangeKeyPrefix: "" IsPoint: false IsInclusive: false DatashardId: 72075186233409547 } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 4 PathsLimit: 10000 ShardsInside: 3 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 3 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-09-25T16:21:07.140661Z node 92 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/Table/UserDefinedIndex/indexImplTable" Options { ReturnPartitioningInfo: true ReturnPartitionConfig: true BackupInfo: false ReturnBoundaries: false ShowPrivateTable: true }, at schemeshard: 72057594046678944 2025-09-25T16:21:07.140804Z node 92 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/Table/UserDefinedIndex/indexImplTable" took 167us result status StatusSuccess 2025-09-25T16:21:07.141100Z node 92 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/Table/UserDefinedIndex/indexImplTable" PathDescription { Self { Name: "indexImplTable" PathId: 5 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 1002 CreateStep: 5000003 ParentPathId: 4 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 4 PathSubType: EPathSubTypeAsyncIndexImplTable Version { GeneralVersion: 4 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 2 } ChildrenExist: false } Table { Name: "indexImplTable" Columns { Name: "indexed" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "key" Type: "Uint32" TypeId: 2 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "indexed" KeyColumnNames: "key" KeyColumnIds: 1 KeyColumnIds: 2 PartitionConfig { CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot: 300 InMemForceStepsToSnapshot: 500 InMemForceSizeToSnapshot: 16777216 InMemCompactionBrokerQueue: 0 ReadAheadHiThreshold: 67108864 ReadAheadLoThreshold: 16777216 MinDataPageSize: 7168 SnapBrokerQueue: 0 Generation { GenerationId: 0 SizeToCompact: 0 CountToCompact: 8 ForceCountToCompact: 8 ForceSizeToCompact: 134217728 CompactionBrokerQueue: 4294967295 KeepInCache: true BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen1" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 0 UpliftPartSize: 0 } Generation { GenerationId: 1 SizeToCompact: 41943040 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 536870912 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen2" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 8388608 UpliftPartSize: 8388608 } Generation { GenerationId: 2 SizeToCompact: 419430400 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 17179869184 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen3" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } PartitioningPolicy { SizeToSplit: 2147483648 MinPartitionsCount: 1 } } TableSchemaVersion: 1 IsBackup: false IsRestore: false } TablePartitions { EndOfRangeKeyPrefix: "\002\000\004\000\000\0002\000\000\000\000\000\000\200" IsPoint: false IsInclusive: false DatashardId: 72075186233409548 } TablePartitions { EndOfRangeKeyPrefix: "" IsPoint: false IsInclusive: false DatashardId: 72075186233409549 } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 2 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 4 PathsLimit: 10000 ShardsInside: 3 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 5 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_index/unittest >> TAsyncIndexTests::SplitMainWithReboots[TabletReboots] [GOOD] Test command err: =========== RUN: Trace =========== Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2144] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2144] Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:120:2058] recipient: [1:114:2145] IGNORE Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:120:2058] recipient: [1:114:2145] Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:121:2058] recipient: [1:116:2146] IGNORE Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:121:2058] recipient: [1:116:2146] Leader for TabletID 72057594046678944 is [1:128:2153] sender: [1:131:2058] recipient: [1:113:2144] Leader for TabletID 72057594046447617 is [1:134:2158] sender: [1:136:2058] recipient: [1:114:2145] Leader for TabletID 72057594046316545 is [1:139:2161] sender: [1:141:2058] recipient: [1:116:2146] 2025-09-25T16:19:40.224803Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7911: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-09-25T16:19:40.224839Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7939: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-09-25T16:19:40.224845Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7825: StatsBatching config: StatsBatchTimeout# 0.000000s, StatsMaxBatchSize# 0, StatsMaxExecuteTime# 0.010000s 2025-09-25T16:19:40.224850Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7841: OperationsProcessing config: using default configuration 2025-09-25T16:19:40.224856Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7847: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-09-25T16:19:40.224861Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7847: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-09-25T16:19:40.224870Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7971: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-09-25T16:19:40.224879Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-09-25T16:19:40.224974Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8042: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-09-25T16:19:40.225023Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2025-09-25T16:19:40.245537Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:8074: Got new config: QueryServiceConfig { AllExternalDataSourcesAreAvailable: true } 2025-09-25T16:19:40.245572Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded 2025-09-25T16:19:40.245672Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8042: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# Leader for TabletID 72057594046447617 is [1:134:2158] sender: [1:179:2058] recipient: [1:15:2062] 2025-09-25T16:19:40.249967Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2025-09-25T16:19:40.250054Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2025-09-25T16:19:40.250088Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-09-25T16:19:40.251803Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2025-09-25T16:19:40.251883Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2025-09-25T16:19:40.251992Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1382: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-09-25T16:19:40.252207Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-09-25T16:19:40.253548Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:158: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-09-25T16:19:40.253600Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2025-09-25T16:19:40.253864Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-09-25T16:19:40.253875Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-09-25T16:19:40.253896Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-09-25T16:19:40.253904Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-09-25T16:19:40.253911Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:205: TTxServerlessStorageBilling.Complete 2025-09-25T16:19:40.253951Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7086: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:221:2058] recipient: [1:219:2219] IGNORE Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:221:2058] recipient: [1:219:2219] Leader for TabletID 72057594037968897 is [1:225:2223] sender: [1:226:2058] recipient: [1:219:2219] 2025-09-25T16:19:40.255431Z node 1 :HIVE INFO: tablet_helpers.cpp:1126: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:128:2153] sender: [1:246:2058] recipient: [1:15:2062] 2025-09-25T16:19:40.279941Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-09-25T16:19:40.280008Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-09-25T16:19:40.280056Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:599: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-09-25T16:19:40.280063Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5528: CreateTx for txid 1:0 type: TxAlterSubDomain target path: [OwnerId: 72057594046678944, LocalPathId: 1] source path: 2025-09-25T16:19:40.280111Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-09-25T16:19:40.280125Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2025-09-25T16:19:40.280751Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-09-25T16:19:40.280801Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-09-25T16:19:40.280855Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-09-25T16:19:40.280865Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:315: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-09-25T16:19:40.280871Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:369: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-09-25T16:19:40.280876Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 1:0 2 -> 3 2025-09-25T16:19:40.281304Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-09-25T16:19:40.281316Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-09-25T16:19:40.281321Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 1:0 3 -> 128 2025-09-25T16:19:40.281664Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-09-25T16:19:40.281673Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-09-25T16:19:40.281679Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-09-25T16:19:40.281685Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1723: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-09-25T16:19:40.282378Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1792: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-09-25T16:19:40.282786Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:663: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-09-25T16:19:40.282816Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1824: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 Leader for TabletID 72057594046316545 is [1:139:2161] sender: [1:261:2058] recipient: [1:15:2062] FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-09-25T16:19:40.282999Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:685: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-09-25T16:19:40.283023Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:689: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 139 RawX2: 4294969457 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, ... ompactionBrokerQueue: 0 ReadAheadHiThreshold: 67108864 ReadAheadLoThreshold: 16777216 MinDataPageSize: 7168 SnapBrokerQueue: 0 Generation { GenerationId: 0 SizeToCompact: 0 CountToCompact: 8 ForceCountToCompact: 8 ForceSizeToCompact: 134217728 CompactionBrokerQueue: 4294967295 KeepInCache: true BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen1" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 0 UpliftPartSize: 0 } Generation { GenerationId: 1 SizeToCompact: 41943040 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 536870912 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen2" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 8388608 UpliftPartSize: 8388608 } Generation { GenerationId: 2 SizeToCompact: 419430400 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 17179869184 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen3" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } PartitioningPolicy { SizeToSplit: 2147483648 MinPartitionsCount: 1 } } } } TableSchemaVersion: 1 IsBackup: false IsRestore: false } TablePartitions { EndOfRangeKeyPrefix: "\001\000\004\000\000\0002\000\000\000" IsPoint: false IsInclusive: false DatashardId: 72075186233409548 } TablePartitions { EndOfRangeKeyPrefix: "" IsPoint: false IsInclusive: false DatashardId: 72075186233409549 } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 2 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 4 PathsLimit: 10000 ShardsInside: 4 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 3 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-09-25T16:21:07.103851Z node 93 :CHANGE_EXCHANGE DEBUG: change_sender_table_base.cpp:78: [TableChangeSenderShard][72075186233409548:2][72075186233409546][93:797:2633] Handshake NKikimrChangeExchange.TEvStatus Status: STATUS_OK LastRecordOrder: 0 2025-09-25T16:21:07.103894Z node 93 :CHANGE_EXCHANGE DEBUG: change_sender_async_index.cpp:239: [AsyncIndexChangeSenderMain][72075186233409548:2][93:739:2633] Handle NKikimr::NChangeExchange::TEvChangeExchangePrivate::TEvReady { PartitionId: 72075186233409546 } 2025-09-25T16:21:07.103939Z node 93 :CHANGE_EXCHANGE DEBUG: change_sender_table_base.cpp:123: [TableChangeSenderShard][72075186233409548:2][72075186233409546][93:797:2633] Handle NKikimr::NChangeExchange::TEvChangeExchange::TEvRecords { Records [{ Order: 1 Group: 1758817267085890 Step: 5000003 TxId: 18446744073709551615 PathId: [OwnerId: 72057594046678944, LocalPathId: 4] Kind: AsyncIndex Source: Unspecified Body: 28b TableId: [OwnerId: 72057594046678944, LocalPathId: 3] SchemaVersion: 1 LockId: 0 LockOffset: 0 },{ Order: 2 Group: 1758817267085890 Step: 5000003 TxId: 18446744073709551615 PathId: [OwnerId: 72057594046678944, LocalPathId: 4] Kind: AsyncIndex Source: Unspecified Body: 28b TableId: [OwnerId: 72057594046678944, LocalPathId: 3] SchemaVersion: 1 LockId: 0 LockOffset: 0 },{ Order: 3 Group: 1758817267085890 Step: 5000003 TxId: 18446744073709551615 PathId: [OwnerId: 72057594046678944, LocalPathId: 4] Kind: AsyncIndex Source: Unspecified Body: 28b TableId: [OwnerId: 72057594046678944, LocalPathId: 3] SchemaVersion: 1 LockId: 0 LockOffset: 0 }] } 2025-09-25T16:21:07.105341Z node 93 :CHANGE_EXCHANGE DEBUG: change_sender_table_base.cpp:200: [TableChangeSenderShard][72075186233409548:2][72075186233409546][93:797:2633] Handle NKikimrChangeExchange.TEvStatus Status: STATUS_OK RecordStatuses { Order: 1 Status: STATUS_OK Reason: REASON_NONE } RecordStatuses { Order: 2 Status: STATUS_OK Reason: REASON_NONE } RecordStatuses { Order: 3 Status: STATUS_OK Reason: REASON_NONE } LastRecordOrder: 3 2025-09-25T16:21:07.105370Z node 93 :CHANGE_EXCHANGE DEBUG: change_sender_async_index.cpp:239: [AsyncIndexChangeSenderMain][72075186233409548:2][93:739:2633] Handle NKikimr::NChangeExchange::TEvChangeExchangePrivate::TEvReady { PartitionId: 72075186233409546 } 2025-09-25T16:21:07.279889Z node 93 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/Table/UserDefinedIndex/indexImplTable" Options { ReturnPartitioningInfo: true ReturnPartitionConfig: true BackupInfo: false ReturnBoundaries: false ShowPrivateTable: true }, at schemeshard: 72057594046678944 2025-09-25T16:21:07.280029Z node 93 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/Table/UserDefinedIndex/indexImplTable" took 168us result status StatusSuccess 2025-09-25T16:21:07.280301Z node 93 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/Table/UserDefinedIndex/indexImplTable" PathDescription { Self { Name: "indexImplTable" PathId: 5 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 1002 CreateStep: 5000003 ParentPathId: 4 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeAsyncIndexImplTable Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "indexImplTable" Columns { Name: "indexed" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "key" Type: "Uint32" TypeId: 2 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "indexed" KeyColumnNames: "key" KeyColumnIds: 1 KeyColumnIds: 2 PartitionConfig { CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot: 300 InMemForceStepsToSnapshot: 500 InMemForceSizeToSnapshot: 16777216 InMemCompactionBrokerQueue: 0 ReadAheadHiThreshold: 67108864 ReadAheadLoThreshold: 16777216 MinDataPageSize: 7168 SnapBrokerQueue: 0 Generation { GenerationId: 0 SizeToCompact: 0 CountToCompact: 8 ForceCountToCompact: 8 ForceSizeToCompact: 134217728 CompactionBrokerQueue: 4294967295 KeepInCache: true BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen1" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 0 UpliftPartSize: 0 } Generation { GenerationId: 1 SizeToCompact: 41943040 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 536870912 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen2" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 8388608 UpliftPartSize: 8388608 } Generation { GenerationId: 2 SizeToCompact: 419430400 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 17179869184 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen3" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } PartitioningPolicy { SizeToSplit: 2147483648 MinPartitionsCount: 1 } } TableSchemaVersion: 1 IsBackup: false IsRestore: false } TablePartitions { EndOfRangeKeyPrefix: "" IsPoint: false IsInclusive: false DatashardId: 72075186233409546 } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 4 PathsLimit: 10000 ShardsInside: 4 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 5 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 >> Normalizers::CleanEmptyPortionsNormalizer [GOOD] >> Normalizers::CleanUnusedTablesNormalizer >> StoragePool::TestDistributionRandomMin7p [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/columnshard/ut_rw/unittest >> TColumnShardTestReadWrite::CompactionInGranule_PKString_Reboot [GOOD] Test command err: 2025-09-25T16:20:45.844805Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:129:2160];fline=columnshard.cpp:105;event=initialize_shard;step=OnActivateExecutor; 2025-09-25T16:20:45.850565Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:129:2160];fline=columnshard.cpp:124;event=initialize_shard;step=initialize_tiring_finished; 2025-09-25T16:20:45.850637Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxInitSchema.Execute at tablet 9437184 2025-09-25T16:20:45.851541Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:129:2160];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-09-25T16:20:45.851657Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:129:2160];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-09-25T16:20:45.851710Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:129:2160];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-09-25T16:20:45.851739Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:129:2160];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-09-25T16:20:45.851762Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:129:2160];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-09-25T16:20:45.851785Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:129:2160];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-09-25T16:20:45.851809Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:129:2160];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-09-25T16:20:45.851832Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:129:2160];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-09-25T16:20:45.851857Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:129:2160];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-09-25T16:20:45.851879Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:129:2160];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanDeprecatedSnapshot; 2025-09-25T16:20:45.851903Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:129:2160];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV0ChunksMeta; 2025-09-25T16:20:45.851926Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:129:2160];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CopyBlobIdsToV2; 2025-09-25T16:20:45.851985Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:129:2160];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreAppearanceSnapshot; 2025-09-25T16:20:45.859074Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxInitSchema.Complete at tablet 9437184 2025-09-25T16:20:45.859132Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:131;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=12;current_normalizer=CLASS_NAME=Granules; 2025-09-25T16:20:45.859140Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=1;type=Granules; 2025-09-25T16:20:45.859185Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-09-25T16:20:45.859219Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-09-25T16:20:45.859229Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-09-25T16:20:45.859234Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2025-09-25T16:20:45.859242Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:138;normalizer=TChunksNormalizer;message=0 chunks found; 2025-09-25T16:20:45.859249Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-09-25T16:20:45.859255Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-09-25T16:20:45.859258Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2025-09-25T16:20:45.859274Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-09-25T16:20:45.859280Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-09-25T16:20:45.859286Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-09-25T16:20:45.859290Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2025-09-25T16:20:45.859298Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2025-09-25T16:20:45.859303Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-09-25T16:20:45.859310Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-09-25T16:20:45.859313Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2025-09-25T16:20:45.859319Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-09-25T16:20:45.859325Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-09-25T16:20:45.859328Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2025-09-25T16:20:45.859335Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-09-25T16:20:45.859342Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-09-25T16:20:45.859345Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=15;type=RestoreV1Chunks_V2; 2025-09-25T16:20:45.859366Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-09-25T16:20:45.859372Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-09-25T16:20:45.859375Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=16;type=RestoreV2Chunks; 2025-09-25T16:20:45.859385Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2025-09-25T16:20:45.859392Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanDeprecatedSnapshot;id=CleanDeprecatedSnapshot; 2025-09-25T16:20:45.859395Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=17;type=CleanDeprecatedSnapshot; 2025-09-25T16:20:45.859401Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanDeprecatedSnapshot;id=17; 2025-09-25T16:20:45.859407Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV0ChunksMeta;id=RestoreV0ChunksMeta; 2025-09-25T16:20:45.859410Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=18;type=RestoreV0ChunksMeta; 2025-09-25T16:20:45.859416Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV0ChunksMeta;id=18; 2025-09-25T16:20:45.859422Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CopyBlobIdsToV2;id=CopyBlobIdsToV2; 2025-09-25T16:20:45.859427Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=19;type=CopyBlobIdsToV2; 2025-09-25T16:20:45.859437Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CopyBlobIdsToV2;id=19; 2025-09-25T16:20:45.859443Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLAS ... e;load_stage_name=EXECUTE:granule/portions;fline=constructor_portion.cpp:40;memory_size=278;data_size=246;sum=1960134;count=7053;size_of_portion=184; 2025-09-25T16:21:07.001077Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;load_stage_name=EXECUTE:column_engines/granules;load_stage_name=EXECUTE:granules/granule;fline=common_data.cpp:29;EXECUTE:portionsLoadingTime=10434; 2025-09-25T16:21:07.001093Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;load_stage_name=EXECUTE:column_engines/granules;load_stage_name=EXECUTE:granules/granule;fline=common_data.cpp:29;PRECHARGE:granule_finished_commonLoadingTime=2; 2025-09-25T16:21:07.001299Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;load_stage_name=EXECUTE:column_engines/granules;load_stage_name=EXECUTE:granules/granule;fline=common_data.cpp:29;EXECUTE:granule_finished_commonLoadingTime=197; 2025-09-25T16:21:07.001308Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;load_stage_name=EXECUTE:column_engines/granules;fline=common_data.cpp:29;EXECUTE:granuleLoadingTime=10691; 2025-09-25T16:21:07.001314Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;fline=common_data.cpp:29;EXECUTE:granulesLoadingTime=10705; 2025-09-25T16:21:07.001323Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;fline=common_data.cpp:29;PRECHARGE:finishLoadingTime=2; 2025-09-25T16:21:07.001378Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;fline=common_data.cpp:29;EXECUTE:finishLoadingTime=49; 2025-09-25T16:21:07.001385Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:column_enginesLoadingTime=10845; 2025-09-25T16:21:07.001422Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:tx_controllerLoadingTime=29; 2025-09-25T16:21:07.001444Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:tx_controllerLoadingTime=15; 2025-09-25T16:21:07.001511Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:operations_managerLoadingTime=60; 2025-09-25T16:21:07.001554Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:operations_managerLoadingTime=36; 2025-09-25T16:21:07.006005Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:storages_managerLoadingTime=4430; 2025-09-25T16:21:07.011621Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:storages_managerLoadingTime=5572; 2025-09-25T16:21:07.011658Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:db_locksLoadingTime=4; 2025-09-25T16:21:07.011667Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:db_locksLoadingTime=1; 2025-09-25T16:21:07.011674Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:bg_sessionsLoadingTime=1; 2025-09-25T16:21:07.011695Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:bg_sessionsLoadingTime=14; 2025-09-25T16:21:07.011703Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:sharing_sessionsLoadingTime=1; 2025-09-25T16:21:07.011721Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:sharing_sessionsLoadingTime=12; 2025-09-25T16:21:07.011729Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:in_flight_readsLoadingTime=1; 2025-09-25T16:21:07.011743Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:in_flight_readsLoadingTime=8; 2025-09-25T16:21:07.011764Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:tiers_managerLoadingTime=13; 2025-09-25T16:21:07.011794Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:tiers_managerLoadingTime=24; 2025-09-25T16:21:07.011803Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;fline=common_data.cpp:29;EXECUTE:composite_initLoadingTime=22616; 2025-09-25T16:21:07.011855Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: Index: tables 1 inserted {blob_bytes=0;raw_bytes=0;count=0;records=0} compacted {blob_bytes=0;raw_bytes=0;count=0;records=0} s-compacted {blob_bytes=19125224;raw_bytes=22320020;count=3;records=225200} inactive {blob_bytes=84055576;raw_bytes=85850220;count=213;records=975200} evicted {blob_bytes=0;raw_bytes=0;count=0;records=0} at tablet 9437184 2025-09-25T16:21:07.011899Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:7028:8658];process=SwitchToWork;fline=columnshard.cpp:77;event=initialize_shard;step=SwitchToWork; 2025-09-25T16:21:07.011909Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:7028:8658];process=SwitchToWork;fline=columnshard.cpp:80;event=initialize_shard;step=SignalTabletActive; 2025-09-25T16:21:07.011928Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:7028:8658];process=SwitchToWork;fline=columnshard_impl.cpp:1528;event=OnTieringModified;path_id=NO_VALUE_OPTIONAL; 2025-09-25T16:21:07.011937Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:7028:8658];process=SwitchToWork;fline=column_engine_logs.cpp:516;event=OnTieringModified;new_count_tierings=0; 2025-09-25T16:21:07.011982Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;fline=columnshard_impl.cpp:449;event=EnqueueBackgroundActivities;periodic=0; 2025-09-25T16:21:07.012014Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;fline=columnshard_impl.cpp:943;background=cleanup_schemas;skip_reason=no_changes; 2025-09-25T16:21:07.012024Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;fline=column_engine_logs.cpp:258;event=StartCleanup;portions_count=5; 2025-09-25T16:21:07.012040Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;fline=column_engine_logs.cpp:271;event=StartCleanupStop;snapshot=plan_step=1758815450336;tx_id=18446744073709551615;;current_snapshot_ts=1758817247674; 2025-09-25T16:21:07.012050Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;fline=column_engine_logs.cpp:334;event=StartCleanup;portions_count=5;portions_prepared=0;drop=0;skip=0;portions_counter=0;chunks=0;limit=0;max_portions=1000;max_chunks=500000; 2025-09-25T16:21:07.012064Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;fline=columnshard_impl.cpp:800;background=cleanup;skip_reason=no_changes; 2025-09-25T16:21:07.012071Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;fline=columnshard_impl.cpp:832;background=cleanup;skip_reason=no_changes; 2025-09-25T16:21:07.012100Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;fline=columnshard_impl.cpp:755;background=ttl;skip_reason=no_changes; 2025-09-25T16:21:07.013421Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:7028:8658];ev=NActors::TEvents::TEvWakeup;fline=columnshard.cpp:260;event=TEvPrivate::TEvPeriodicWakeup::MANUAL;tablet_id=9437184; 2025-09-25T16:21:07.013528Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:7028:8658];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;fline=columnshard.cpp:249;event=TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184; 2025-09-25T16:21:07.013534Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: Send periodic stats. 2025-09-25T16:21:07.013538Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: Disabled periodic stats at tablet 9437184 2025-09-25T16:21:07.013545Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:7028:8658];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:449;event=EnqueueBackgroundActivities;periodic=0; 2025-09-25T16:21:07.013571Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:7028:8658];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:943;background=cleanup_schemas;skip_reason=no_changes; 2025-09-25T16:21:07.013578Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:7028:8658];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:258;event=StartCleanup;portions_count=5; 2025-09-25T16:21:07.013591Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:7028:8658];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:271;event=StartCleanupStop;snapshot=plan_step=1758815450336;tx_id=18446744073709551615;;current_snapshot_ts=1758817247674; 2025-09-25T16:21:07.013600Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:7028:8658];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:334;event=StartCleanup;portions_count=5;portions_prepared=0;drop=0;skip=0;portions_counter=0;chunks=0;limit=0;max_portions=1000;max_chunks=500000; 2025-09-25T16:21:07.013610Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:7028:8658];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:800;background=cleanup;skip_reason=no_changes; 2025-09-25T16:21:07.013616Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:7028:8658];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:832;background=cleanup;skip_reason=no_changes; 2025-09-25T16:21:07.013638Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:7028:8658];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;queue=ttl;external_count=0;fline=granule.cpp:168;event=skip_actualization;waiting=1.000000s; 2025-09-25T16:21:07.013646Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:7028:8658];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:755;background=ttl;skip_reason=no_changes; >> StoragePool::TestDistributionRandomMin7pWithOverflow [GOOD] |82.7%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/tx/columnshard/ut_schema/ydb-core-tx-columnshard-ut_schema |82.7%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/columnshard/ut_schema/ydb-core-tx-columnshard-ut_schema >> TColumnShardTestReadWrite::CompactionInGranule_PKUtf8_Reboot [GOOD] >> THiveTest::TestCreateAndDeleteTabletWithStoragePools [GOOD] >> TBsProxyFaultToleranceTest::CheckGetHardenedErasureMirror3dcCount6Idx5 >> TBsProxyFaultToleranceTest::CheckTDiscoverFaultToleranceTestErasure4Plus2Block >> THiveTest::TestCreateAndReassignTabletWithStoragePools >> THiveTest::TestCreateAndReassignTabletWithStoragePools [GOOD] >> THiveTest::TestCreateAndReassignTabletWhileStarting |82.7%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/tx/schemeshard/ut_sysview/ydb-core-tx-schemeshard-ut_sysview >> TBsProxyFaultToleranceTest::CheckTPutFaultToleranceTestErasureMirror3dc >> TBsProxyFaultToleranceTest::CheckTDiscoverFaultToleranceTestErasure4Plus2Block [GOOD] >> TBsProxyFaultToleranceTest::CheckTPutFaultToleranceTestErasure4Plus2Block >> TBsProxyFaultToleranceTest::CheckTRangeFaultToleranceTestErasureMirror3dc >> THiveTest::TestCreateAndReassignTabletWhileStarting [GOOD] >> THiveTest::TestCreateTabletAndReassignGroups |82.7%| [LD] {RESULT} $(B)/ydb/core/tx/columnshard/ut_schema/ydb-core-tx-columnshard-ut_schema |82.7%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_sysview/ydb-core-tx-schemeshard-ut_sysview ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/columnshard/ut_rw/unittest >> TColumnShardTestReadWrite::CompactionInGranule_PKUInt32 [GOOD] Test command err: 2025-09-25T16:20:52.154019Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:129:2160];fline=columnshard.cpp:105;event=initialize_shard;step=OnActivateExecutor; 2025-09-25T16:20:52.159503Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:129:2160];fline=columnshard.cpp:124;event=initialize_shard;step=initialize_tiring_finished; 2025-09-25T16:20:52.159555Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxInitSchema.Execute at tablet 9437184 2025-09-25T16:20:52.160363Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:129:2160];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-09-25T16:20:52.160417Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:129:2160];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-09-25T16:20:52.160454Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:129:2160];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-09-25T16:20:52.160478Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:129:2160];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-09-25T16:20:52.160497Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:129:2160];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-09-25T16:20:52.160517Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:129:2160];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-09-25T16:20:52.160537Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:129:2160];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-09-25T16:20:52.160556Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:129:2160];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-09-25T16:20:52.160575Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:129:2160];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-09-25T16:20:52.160594Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:129:2160];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanDeprecatedSnapshot; 2025-09-25T16:20:52.160615Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:129:2160];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV0ChunksMeta; 2025-09-25T16:20:52.160633Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:129:2160];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CopyBlobIdsToV2; 2025-09-25T16:20:52.160679Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:129:2160];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreAppearanceSnapshot; 2025-09-25T16:20:52.168644Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxInitSchema.Complete at tablet 9437184 2025-09-25T16:20:52.168715Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:131;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=12;current_normalizer=CLASS_NAME=Granules; 2025-09-25T16:20:52.168727Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=1;type=Granules; 2025-09-25T16:20:52.168778Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-09-25T16:20:52.168815Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-09-25T16:20:52.168846Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-09-25T16:20:52.168853Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2025-09-25T16:20:52.168865Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:138;normalizer=TChunksNormalizer;message=0 chunks found; 2025-09-25T16:20:52.168875Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-09-25T16:20:52.168884Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-09-25T16:20:52.168889Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2025-09-25T16:20:52.168911Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-09-25T16:20:52.168922Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-09-25T16:20:52.168931Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-09-25T16:20:52.168936Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2025-09-25T16:20:52.168948Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2025-09-25T16:20:52.168956Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-09-25T16:20:52.168965Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-09-25T16:20:52.168970Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2025-09-25T16:20:52.168980Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-09-25T16:20:52.168989Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-09-25T16:20:52.168994Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2025-09-25T16:20:52.169005Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-09-25T16:20:52.169014Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-09-25T16:20:52.169019Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=15;type=RestoreV1Chunks_V2; 2025-09-25T16:20:52.169050Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-09-25T16:20:52.169059Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-09-25T16:20:52.169064Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=16;type=RestoreV2Chunks; 2025-09-25T16:20:52.169081Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2025-09-25T16:20:52.169090Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanDeprecatedSnapshot;id=CleanDeprecatedSnapshot; 2025-09-25T16:20:52.169095Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=17;type=CleanDeprecatedSnapshot; 2025-09-25T16:20:52.169104Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanDeprecatedSnapshot;id=17; 2025-09-25T16:20:52.169112Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV0ChunksMeta;id=RestoreV0ChunksMeta; 2025-09-25T16:20:52.169118Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=18;type=RestoreV0ChunksMeta; 2025-09-25T16:20:52.169126Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV0ChunksMeta;id=18; 2025-09-25T16:20:52.169135Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CopyBlobIdsToV2;id=CopyBlobIdsToV2; 2025-09-25T16:20:52.169141Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=19;type=CopyBlobIdsToV2; 2025-09-25T16:20:52.169156Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CopyBlobIdsToV2;id=19; 2025-09-25T16:20:52.169165Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLAS ... blob_range:[NO_BLOB:0:9408];;column_id:10;chunk_idx:19;blob_range:[NO_BLOB:0:9408];;column_id:10;chunk_idx:20;blob_range:[NO_BLOB:0:9408];;column_id:10;chunk_idx:21;blob_range:[NO_BLOB:0:9408];;column_id:10;chunk_idx:22;blob_range:[NO_BLOB:0:9408];;column_id:10;chunk_idx:23;blob_range:[NO_BLOB:0:9408];;column_id:10;chunk_idx:24;blob_range:[NO_BLOB:0:9408];;column_id:10;chunk_idx:25;blob_range:[NO_BLOB:0:9408];;column_id:10;chunk_idx:26;blob_range:[NO_BLOB:0:9408];;column_id:10;chunk_idx:27;blob_range:[NO_BLOB:0:9408];;column_id:10;chunk_idx:28;blob_range:[NO_BLOB:0:9408];;column_id:10;chunk_idx:29;blob_range:[NO_BLOB:0:9408];;column_id:10;chunk_idx:30;blob_range:[NO_BLOB:0:9408];;column_id:10;chunk_idx:31;blob_range:[NO_BLOB:0:9408];;column_id:10;chunk_idx:32;blob_range:[NO_BLOB:0:9408];;column_id:10;chunk_idx:33;blob_range:[NO_BLOB:0:9408];;column_id:10;chunk_idx:34;blob_range:[NO_BLOB:0:9408];;column_id:10;chunk_idx:35;blob_range:[NO_BLOB:0:9408];;column_id:10;chunk_idx:36;blob_range:[NO_BLOB:0:9408];;column_id:10;chunk_idx:37;blob_range:[NO_BLOB:0:9408];;column_id:10;chunk_idx:38;blob_range:[NO_BLOB:0:9408];;column_id:10;chunk_idx:39;blob_range:[NO_BLOB:0:9408];;column_id:10;chunk_idx:40;blob_range:[NO_BLOB:0:9408];;column_id:10;chunk_idx:41;blob_range:[NO_BLOB:0:9408];;column_id:10;chunk_idx:42;blob_range:[NO_BLOB:0:9408];;column_id:10;chunk_idx:43;blob_range:[NO_BLOB:0:9408];;column_id:10;chunk_idx:44;blob_range:[NO_BLOB:0:9424];;column_id:10;chunk_idx:45;blob_range:[NO_BLOB:0:9424];;column_id:10;chunk_idx:46;blob_range:[NO_BLOB:0:9424];;column_id:10;chunk_idx:47;blob_range:[NO_BLOB:0:9424];;column_id:10;chunk_idx:48;blob_range:[NO_BLOB:0:9424];;column_id:10;chunk_idx:49;blob_range:[NO_BLOB:0:9424];;column_id:10;chunk_idx:50;blob_range:[NO_BLOB:0:9424];;column_id:10;chunk_idx:51;blob_range:[NO_BLOB:0:9424];;column_id:10;chunk_idx:52;blob_range:[NO_BLOB:0:9424];;column_id:10;chunk_idx:53;blob_range:[NO_BLOB:0:9424];;column_id:10;chunk_idx:54;blob_range:[NO_BLOB:0:9424];;column_id:10;chunk_idx:55;blob_range:[NO_BLOB:0:9424];;column_id:10;chunk_idx:56;blob_range:[NO_BLOB:0:9424];;column_id:10;chunk_idx:57;blob_range:[NO_BLOB:0:9424];;column_id:10;chunk_idx:58;blob_range:[NO_BLOB:0:9424];;column_id:10;chunk_idx:59;blob_range:[NO_BLOB:0:9424];;column_id:10;chunk_idx:60;blob_range:[NO_BLOB:0:9424];;column_id:10;chunk_idx:61;blob_range:[NO_BLOB:0:9424];;column_id:10;chunk_idx:62;blob_range:[NO_BLOB:0:9424];;column_id:10;chunk_idx:63;blob_range:[NO_BLOB:0:9424];;column_id:10;chunk_idx:64;blob_range:[NO_BLOB:0:9424];;column_id:10;chunk_idx:65;blob_range:[NO_BLOB:0:9424];;column_id:10;chunk_idx:66;blob_range:[NO_BLOB:0:9424];;column_id:10;chunk_idx:67;blob_range:[NO_BLOB:0:9424];;column_id:10;chunk_idx:68;blob_range:[NO_BLOB:0:9424];;column_id:10;chunk_idx:69;blob_range:[NO_BLOB:0:9424];;column_id:10;chunk_idx:70;blob_range:[NO_BLOB:0:9424];;column_id:10;chunk_idx:71;blob_range:[NO_BLOB:0:9424];;column_id:10;chunk_idx:72;blob_range:[NO_BLOB:0:9424];;;;switched=(portion_id:216;path_id:1000000185;records_count:75000;schema_version:1;level:1;;column_size:5998984;index_size:0;meta:(()););(portion_id:214;path_id:1000000185;records_count:75000;schema_version:1;level:2;;column_size:5984840;index_size:0;meta:(()););; 2025-09-25T16:21:07.758261Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: event_type=NKikimr::NBlobCache::TEvBlobCache::TEvReadBlobRangeResult;event=on_execution;consumer=GENERAL_COMPACTION;task_id=a3ea9286-9a2b11f0-8f10cf91-cbe7f0d4;script=FULL_PORTIONS_FETCHING::GENERAL_COMPACTION;event=on_execution;consumer=GENERAL_COMPACTION;task_id=a3ea9286-9a2b11f0-8f10cf91-cbe7f0d4;script=FULL_PORTIONS_FETCHING::GENERAL_COMPACTION;event=on_finished;consumer=GENERAL_COMPACTION;task_id=a3ea9286-9a2b11f0-8f10cf91-cbe7f0d4;script=FULL_PORTIONS_FETCHING::GENERAL_COMPACTION;tablet_id=9437184;parent_id=[1:4417:6409];task_id=a3ea9286-9a2b11f0-8f10cf91-cbe7f0d4;task_class=CS::GENERAL;fline=general_compaction.cpp:140;event=blobs_created;appended=1;switched=2; 2025-09-25T16:21:07.758280Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: event_type=NKikimr::NBlobCache::TEvBlobCache::TEvReadBlobRangeResult;event=on_execution;consumer=GENERAL_COMPACTION;task_id=a3ea9286-9a2b11f0-8f10cf91-cbe7f0d4;script=FULL_PORTIONS_FETCHING::GENERAL_COMPACTION;event=on_execution;consumer=GENERAL_COMPACTION;task_id=a3ea9286-9a2b11f0-8f10cf91-cbe7f0d4;script=FULL_PORTIONS_FETCHING::GENERAL_COMPACTION;event=on_finished;consumer=GENERAL_COMPACTION;task_id=a3ea9286-9a2b11f0-8f10cf91-cbe7f0d4;script=FULL_PORTIONS_FETCHING::GENERAL_COMPACTION;tablet_id=9437184;parent_id=[1:4417:6409];task_id=a3ea9286-9a2b11f0-8f10cf91-cbe7f0d4;task_class=CS::GENERAL;fline=abstract.cpp:13;event=new_stage;stage=Constructed;task_id=a3ea9286-9a2b11f0-8f10cf91-cbe7f0d4; 2025-09-25T16:21:07.758848Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:4417:6409];ev=NKikimr::NColumnShard::TEvPrivate::TEvWriteIndex;fline=columnshard__write_index.cpp:52;event=TEvWriteIndex;count=1; 2025-09-25T16:21:07.759370Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:4417:6409];ev=NKikimr::NColumnShard::TEvPrivate::TEvWriteIndex;fline=columnshard__write_index.cpp:59;event=TTxWriteDraft; 2025-09-25T16:21:07.759378Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:4417:6409];ev=NKikimr::NColumnShard::TEvPrivate::TEvWriteIndex;fline=abstract.cpp:13;event=new_stage;stage=WriteDraft;task_id=a3ea9286-9a2b11f0-8f10cf91-cbe7f0d4; 2025-09-25T16:21:07.821655Z node 1 :TX_COLUMNSHARD_WRITE DEBUG: log.cpp:841: fline=tx_draft.cpp:16;event=draft_completed; 2025-09-25T16:21:07.821708Z node 1 :TX_COLUMNSHARD_WRITE DEBUG: log.cpp:841: fline=write_actor.cpp:24;event=actor_created;tablet_id=9437184;debug=size=5984840;count=649;actions=__MEMORY,__DEFAULT,;waiting=2;; 2025-09-25T16:21:07.964877Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: WriteIndex at tablet 9437184 2025-09-25T16:21:07.964938Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:4417:6409];ev=NKikimr::NColumnShard::TEvPrivate::TEvWriteIndex;fline=common_level.h:121;from=0,0,0,0,;to=74999,74999,74999,74999,; 2025-09-25T16:21:07.964950Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:4417:6409];ev=NKikimr::NColumnShard::TEvPrivate::TEvWriteIndex;fline=common_level.h:141;itFrom=1;itTo=1;raw=7088450;count=1;packed=6021224; 2025-09-25T16:21:07.964969Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:4417:6409];ev=NKikimr::NColumnShard::TEvPrivate::TEvWriteIndex;fline=constructor_meta.cpp:48;memory_size=86;data_size=60;sum=86964;count=1707; 2025-09-25T16:21:07.964978Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:4417:6409];ev=NKikimr::NColumnShard::TEvPrivate::TEvWriteIndex;fline=constructor_meta.cpp:65;memory_size=182;data_size=172;sum=168948;count=1708;size_of_meta=112; 2025-09-25T16:21:07.964989Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:4417:6409];ev=NKikimr::NColumnShard::TEvPrivate::TEvWriteIndex;fline=constructor_portion.cpp:40;memory_size=254;data_size=244;sum=230436;count=854;size_of_portion=184; 2025-09-25T16:21:07.965084Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:4417:6409];ev=NKikimr::NColumnShard::TEvPrivate::TEvWriteIndex;fline=abstract.cpp:13;event=new_stage;stage=Compiled;task_id=a3ea9286-9a2b11f0-8f10cf91-cbe7f0d4; 2025-09-25T16:21:07.965158Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxWriteIndex[2] (CS::GENERAL) apply at tablet 9437184 2025-09-25T16:21:07.966571Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:4417:6409];ev=NKikimr::NColumnShard::TEvPrivate::TEvWriteIndex;tablet_id=9437184;external_task_id=a3ea9286-9a2b11f0-8f10cf91-cbe7f0d4;fline=abstract.cpp:13;event=new_stage;stage=Written;task_id=a3ea9286-9a2b11f0-8f10cf91-cbe7f0d4; 2025-09-25T16:21:07.966986Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: BlobManager on execute at tablet 9437184 Save Batch GenStep: 4:1 Blob count: 503 2025-09-25T16:21:07.967862Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: Index: tables 1 inserted {blob_bytes=0;raw_bytes=0;count=0;records=0} compacted {blob_bytes=0;raw_bytes=0;count=0;records=0} s-compacted {blob_bytes=18005048;raw_bytes=21227350;count=3;records=225200} inactive {blob_bytes=78910360;raw_bytes=81118650;count=213;records=975200} evicted {blob_bytes=0;raw_bytes=0;count=0;records=0} at tablet 9437184 2025-09-25T16:21:08.050021Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;task_id=a3ea9286-9a2b11f0-8f10cf91-cbe7f0d4;fline=abstract.cpp:13;event=new_stage;stage=Finished;task_id=a3ea9286-9a2b11f0-8f10cf91-cbe7f0d4; 2025-09-25T16:21:08.050054Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;task_id=a3ea9286-9a2b11f0-8f10cf91-cbe7f0d4;fline=abstract.cpp:54;event=WriteIndexComplete;type=CS::GENERAL;success=1; 2025-09-25T16:21:08.050071Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;task_id=a3ea9286-9a2b11f0-8f10cf91-cbe7f0d4;fline=with_appended.cpp:65;portions=217,;task_id=a3ea9286-9a2b11f0-8f10cf91-cbe7f0d4; 2025-09-25T16:21:08.050206Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;task_id=a3ea9286-9a2b11f0-8f10cf91-cbe7f0d4;fline=manager.cpp:15;event=unlock;process_id=CS::GENERAL::a3ea9286-9a2b11f0-8f10cf91-cbe7f0d4; 2025-09-25T16:21:08.050235Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;task_id=a3ea9286-9a2b11f0-8f10cf91-cbe7f0d4;fline=granule.cpp:97;event=OnCompactionFinished;info=(granule:1000000185;path_id:1000000185;size:12006064;portions_count:217;); 2025-09-25T16:21:08.050246Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;task_id=a3ea9286-9a2b11f0-8f10cf91-cbe7f0d4;tablet_id=9437184;fline=columnshard_impl.cpp:449;event=EnqueueBackgroundActivities;periodic=0; 2025-09-25T16:21:08.050274Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;task_id=a3ea9286-9a2b11f0-8f10cf91-cbe7f0d4;tablet_id=9437184;fline=columnshard_impl.cpp:943;background=cleanup_schemas;skip_reason=no_changes; 2025-09-25T16:21:08.050289Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;task_id=a3ea9286-9a2b11f0-8f10cf91-cbe7f0d4;tablet_id=9437184;fline=column_engine_logs.cpp:258;event=StartCleanup;portions_count=7; 2025-09-25T16:21:08.050318Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;task_id=a3ea9286-9a2b11f0-8f10cf91-cbe7f0d4;tablet_id=9437184;fline=column_engine_logs.cpp:271;event=StartCleanupStop;snapshot=plan_step=1758815455067;tx_id=18446744073709551615;;current_snapshot_ts=1758817253759; 2025-09-25T16:21:08.050329Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;task_id=a3ea9286-9a2b11f0-8f10cf91-cbe7f0d4;tablet_id=9437184;fline=column_engine_logs.cpp:334;event=StartCleanup;portions_count=7;portions_prepared=0;drop=0;skip=0;portions_counter=0;chunks=0;limit=0;max_portions=1000;max_chunks=500000; 2025-09-25T16:21:08.050349Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;task_id=a3ea9286-9a2b11f0-8f10cf91-cbe7f0d4;tablet_id=9437184;fline=columnshard_impl.cpp:800;background=cleanup;skip_reason=no_changes; 2025-09-25T16:21:08.050356Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;task_id=a3ea9286-9a2b11f0-8f10cf91-cbe7f0d4;tablet_id=9437184;fline=columnshard_impl.cpp:832;background=cleanup;skip_reason=no_changes; 2025-09-25T16:21:08.050383Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;task_id=a3ea9286-9a2b11f0-8f10cf91-cbe7f0d4;tablet_id=9437184;queue=ttl;external_count=0;fline=granule.cpp:168;event=skip_actualization;waiting=0.915000s; 2025-09-25T16:21:08.050394Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;task_id=a3ea9286-9a2b11f0-8f10cf91-cbe7f0d4;tablet_id=9437184;fline=columnshard_impl.cpp:755;background=ttl;skip_reason=no_changes; 2025-09-25T16:21:08.050473Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: BlobManager at tablet 9437184 Save Batch GenStep: 4:1 Blob count: 503 |82.7%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_sysview/ydb-core-tx-schemeshard-ut_sysview |82.7%| [TA] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_shred/test-results/unittest/{meta.json ... results_accumulator.log} >> THiveTest::TestCreateTabletAndReassignGroups [GOOD] >> THiveTest::TestCreateTabletAndReassignGroups3 >> TBsProxyFaultToleranceTest::CheckGetHardenedErasureMirror3dcCount6Idx4 >> PersQueueSdkReadSessionTest::ReadSessionWithExplicitlySpecifiedPartitions |82.7%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/dsproxy/ut_ftol/unittest >> TBsProxyFaultToleranceTest::CheckTDiscoverFaultToleranceTestErasure4Plus2Block [GOOD] >> Compression::WriteRAW >> THiveTest::TestCreateTabletAndReassignGroups3 [GOOD] >> THiveTest::TestCreateTabletAndMixedReassignGroups3 |82.7%| [TA] $(B)/ydb/core/tx/tx_proxy/ut_schemereq/test-results/unittest/{meta.json ... results_accumulator.log} >> Normalizers::CleanUnusedTablesNormalizer [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/columnshard/ut_rw/unittest >> TColumnShardTestReadWrite::CompactionInGranule_PKUtf8_Reboot [GOOD] Test command err: 2025-09-25T16:20:47.272550Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:129:2160];fline=columnshard.cpp:105;event=initialize_shard;step=OnActivateExecutor; 2025-09-25T16:20:47.278550Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:129:2160];fline=columnshard.cpp:124;event=initialize_shard;step=initialize_tiring_finished; 2025-09-25T16:20:47.278605Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxInitSchema.Execute at tablet 9437184 2025-09-25T16:20:47.279470Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:129:2160];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-09-25T16:20:47.279535Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:129:2160];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-09-25T16:20:47.279576Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:129:2160];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-09-25T16:20:47.279602Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:129:2160];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-09-25T16:20:47.279624Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:129:2160];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-09-25T16:20:47.279662Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:129:2160];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-09-25T16:20:47.279684Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:129:2160];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-09-25T16:20:47.279707Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:129:2160];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-09-25T16:20:47.279729Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:129:2160];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-09-25T16:20:47.279751Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:129:2160];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanDeprecatedSnapshot; 2025-09-25T16:20:47.279773Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:129:2160];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV0ChunksMeta; 2025-09-25T16:20:47.279795Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:129:2160];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CopyBlobIdsToV2; 2025-09-25T16:20:47.279849Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:129:2160];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreAppearanceSnapshot; 2025-09-25T16:20:47.287103Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxInitSchema.Complete at tablet 9437184 2025-09-25T16:20:47.287166Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:131;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=12;current_normalizer=CLASS_NAME=Granules; 2025-09-25T16:20:47.287176Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=1;type=Granules; 2025-09-25T16:20:47.287218Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-09-25T16:20:47.287255Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-09-25T16:20:47.287269Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-09-25T16:20:47.287276Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2025-09-25T16:20:47.287288Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:138;normalizer=TChunksNormalizer;message=0 chunks found; 2025-09-25T16:20:47.287298Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-09-25T16:20:47.287307Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-09-25T16:20:47.287313Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2025-09-25T16:20:47.287336Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-09-25T16:20:47.287346Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-09-25T16:20:47.287355Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-09-25T16:20:47.287361Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2025-09-25T16:20:47.287374Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2025-09-25T16:20:47.287382Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-09-25T16:20:47.287391Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-09-25T16:20:47.287396Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2025-09-25T16:20:47.287406Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-09-25T16:20:47.287415Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-09-25T16:20:47.287420Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2025-09-25T16:20:47.287429Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-09-25T16:20:47.287439Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-09-25T16:20:47.287444Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=15;type=RestoreV1Chunks_V2; 2025-09-25T16:20:47.287478Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-09-25T16:20:47.287487Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-09-25T16:20:47.287493Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=16;type=RestoreV2Chunks; 2025-09-25T16:20:47.287510Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2025-09-25T16:20:47.287519Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanDeprecatedSnapshot;id=CleanDeprecatedSnapshot; 2025-09-25T16:20:47.287524Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=17;type=CleanDeprecatedSnapshot; 2025-09-25T16:20:47.287533Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanDeprecatedSnapshot;id=17; 2025-09-25T16:20:47.287542Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV0ChunksMeta;id=RestoreV0ChunksMeta; 2025-09-25T16:20:47.287547Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=18;type=RestoreV0ChunksMeta; 2025-09-25T16:20:47.287556Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV0ChunksMeta;id=18; 2025-09-25T16:20:47.287565Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CopyBlobIdsToV2;id=CopyBlobIdsToV2; 2025-09-25T16:20:47.287571Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=19;type=CopyBlobIdsToV2; 2025-09-25T16:20:47.287588Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CopyBlobIdsToV2;id=19; 2025-09-25T16:20:47.287596Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLAS ... anule;load_stage_name=EXECUTE:granule/portions;fline=constructor_portion.cpp:40;memory_size=278;data_size=246;sum=1960134;count=7053;size_of_portion=184; 2025-09-25T16:21:09.702499Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;load_stage_name=EXECUTE:column_engines/granules;load_stage_name=EXECUTE:granules/granule;fline=common_data.cpp:29;EXECUTE:portionsLoadingTime=9391; 2025-09-25T16:21:09.702512Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;load_stage_name=EXECUTE:column_engines/granules;load_stage_name=EXECUTE:granules/granule;fline=common_data.cpp:29;PRECHARGE:granule_finished_commonLoadingTime=2; 2025-09-25T16:21:09.702702Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;load_stage_name=EXECUTE:column_engines/granules;load_stage_name=EXECUTE:granules/granule;fline=common_data.cpp:29;EXECUTE:granule_finished_commonLoadingTime=182; 2025-09-25T16:21:09.702710Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;load_stage_name=EXECUTE:column_engines/granules;fline=common_data.cpp:29;EXECUTE:granuleLoadingTime=9625; 2025-09-25T16:21:09.702719Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;fline=common_data.cpp:29;EXECUTE:granulesLoadingTime=9644; 2025-09-25T16:21:09.702727Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;fline=common_data.cpp:29;PRECHARGE:finishLoadingTime=1; 2025-09-25T16:21:09.702784Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;fline=common_data.cpp:29;EXECUTE:finishLoadingTime=50; 2025-09-25T16:21:09.702791Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:column_enginesLoadingTime=9786; 2025-09-25T16:21:09.702824Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:tx_controllerLoadingTime=25; 2025-09-25T16:21:09.702844Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:tx_controllerLoadingTime=15; 2025-09-25T16:21:09.702911Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:operations_managerLoadingTime=61; 2025-09-25T16:21:09.702954Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:operations_managerLoadingTime=36; 2025-09-25T16:21:09.707083Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:storages_managerLoadingTime=4113; 2025-09-25T16:21:09.712445Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:storages_managerLoadingTime=5326; 2025-09-25T16:21:09.712481Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:db_locksLoadingTime=4; 2025-09-25T16:21:09.712490Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:db_locksLoadingTime=1; 2025-09-25T16:21:09.712498Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:bg_sessionsLoadingTime=2; 2025-09-25T16:21:09.712517Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:bg_sessionsLoadingTime=13; 2025-09-25T16:21:09.712526Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:sharing_sessionsLoadingTime=1; 2025-09-25T16:21:09.712545Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:sharing_sessionsLoadingTime=12; 2025-09-25T16:21:09.712554Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:in_flight_readsLoadingTime=1; 2025-09-25T16:21:09.712570Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:in_flight_readsLoadingTime=9; 2025-09-25T16:21:09.712594Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:tiers_managerLoadingTime=13; 2025-09-25T16:21:09.712613Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:tiers_managerLoadingTime=13; 2025-09-25T16:21:09.712620Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;fline=common_data.cpp:29;EXECUTE:composite_initLoadingTime=21017; 2025-09-25T16:21:09.712672Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: Index: tables 1 inserted {blob_bytes=0;raw_bytes=0;count=0;records=0} compacted {blob_bytes=0;raw_bytes=0;count=0;records=0} s-compacted {blob_bytes=19125224;raw_bytes=22320020;count=3;records=225200} inactive {blob_bytes=84055576;raw_bytes=85850220;count=213;records=975200} evicted {blob_bytes=0;raw_bytes=0;count=0;records=0} at tablet 9437184 2025-09-25T16:21:09.712715Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:7028:8658];process=SwitchToWork;fline=columnshard.cpp:77;event=initialize_shard;step=SwitchToWork; 2025-09-25T16:21:09.712726Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:7028:8658];process=SwitchToWork;fline=columnshard.cpp:80;event=initialize_shard;step=SignalTabletActive; 2025-09-25T16:21:09.712743Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:7028:8658];process=SwitchToWork;fline=columnshard_impl.cpp:1528;event=OnTieringModified;path_id=NO_VALUE_OPTIONAL; 2025-09-25T16:21:09.712752Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:7028:8658];process=SwitchToWork;fline=column_engine_logs.cpp:516;event=OnTieringModified;new_count_tierings=0; 2025-09-25T16:21:09.712795Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;fline=columnshard_impl.cpp:449;event=EnqueueBackgroundActivities;periodic=0; 2025-09-25T16:21:09.712842Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;fline=columnshard_impl.cpp:943;background=cleanup_schemas;skip_reason=no_changes; 2025-09-25T16:21:09.712850Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;fline=column_engine_logs.cpp:258;event=StartCleanup;portions_count=5; 2025-09-25T16:21:09.712865Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;fline=column_engine_logs.cpp:271;event=StartCleanupStop;snapshot=plan_step=1758815451762;tx_id=18446744073709551615;;current_snapshot_ts=1758817249100; 2025-09-25T16:21:09.712874Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;fline=column_engine_logs.cpp:334;event=StartCleanup;portions_count=5;portions_prepared=0;drop=0;skip=0;portions_counter=0;chunks=0;limit=0;max_portions=1000;max_chunks=500000; 2025-09-25T16:21:09.712889Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;fline=columnshard_impl.cpp:800;background=cleanup;skip_reason=no_changes; 2025-09-25T16:21:09.712895Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;fline=columnshard_impl.cpp:832;background=cleanup;skip_reason=no_changes; 2025-09-25T16:21:09.712923Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;fline=columnshard_impl.cpp:755;background=ttl;skip_reason=no_changes; 2025-09-25T16:21:09.714129Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:7028:8658];ev=NActors::TEvents::TEvWakeup;fline=columnshard.cpp:260;event=TEvPrivate::TEvPeriodicWakeup::MANUAL;tablet_id=9437184; 2025-09-25T16:21:09.714199Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:7028:8658];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;fline=columnshard.cpp:249;event=TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184; 2025-09-25T16:21:09.714205Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: Send periodic stats. 2025-09-25T16:21:09.714213Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: Disabled periodic stats at tablet 9437184 2025-09-25T16:21:09.714219Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:7028:8658];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:449;event=EnqueueBackgroundActivities;periodic=0; 2025-09-25T16:21:09.714239Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:7028:8658];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:943;background=cleanup_schemas;skip_reason=no_changes; 2025-09-25T16:21:09.714246Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:7028:8658];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:258;event=StartCleanup;portions_count=5; 2025-09-25T16:21:09.714257Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:7028:8658];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:271;event=StartCleanupStop;snapshot=plan_step=1758815451762;tx_id=18446744073709551615;;current_snapshot_ts=1758817249100; 2025-09-25T16:21:09.714265Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:7028:8658];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:334;event=StartCleanup;portions_count=5;portions_prepared=0;drop=0;skip=0;portions_counter=0;chunks=0;limit=0;max_portions=1000;max_chunks=500000; 2025-09-25T16:21:09.714273Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:7028:8658];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:800;background=cleanup;skip_reason=no_changes; 2025-09-25T16:21:09.714279Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:7028:8658];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:832;background=cleanup;skip_reason=no_changes; 2025-09-25T16:21:09.714296Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:7028:8658];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;queue=ttl;external_count=0;fline=granule.cpp:168;event=skip_actualization;waiting=1.000000s; 2025-09-25T16:21:09.714305Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:7028:8658];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:755;background=ttl;skip_reason=no_changes; |82.7%| [TA] $(B)/ydb/core/blobstorage/ut_blobstorage/ut_read_only_vdisk/test-results/unittest/{meta.json ... results_accumulator.log} >> TBsProxyFaultToleranceTest::CheckTPutFaultToleranceTestErasure4Plus2Block [GOOD] >> THiveTest::TestCreateTabletAndMixedReassignGroups3 [GOOD] >> THiveTest::TestCreateExternalTablet >> TColumnShardTestReadWrite::CompactionInGranule_PKInt32_Reboot [GOOD] >> BasicUsage::WriteAndReadSomeMessagesWithAsyncCompression >> TBoardSubscriber2DCTest::NotAvailableByShutdown |82.8%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/dsproxy/ut_ftol/unittest >> TBsProxyFaultToleranceTest::CheckTPutFaultToleranceTestErasure4Plus2Block [GOOD] >> TBoardSubscriberTest::SimpleSubscriber >> ReadSessionImplTest::SuccessfulInit >> TImportWithRebootsTests::ShouldSucceedOnSingleView [GOOD] >> THiveTest::TestCreateExternalTablet [GOOD] >> TImportWithRebootsTests::ShouldSucceedOnSingleTopic >> ReadSessionImplTest::SuccessfulInit [GOOD] >> ReadSessionImplTest::SuccessfulInitAndThenTimeoutCallback [GOOD] >> ReadSessionImplTest::StopsRetryAfterFailedAttempt [GOOD] >> ReadSessionImplTest::StopsRetryAfterTimeout [GOOD] >> TBoardSubscriberTest::SimpleSubscriber [GOOD] >> ReadSessionImplTest::UnpackBigBatchWithTwoPartitions [GOOD] >> ReadSessionImplTest::SimpleDataHandlersWithGracefulRelease >> TBoardSubscriberTest::NotAvailableByShutdown >> ReadSessionImplTest::SimpleDataHandlersWithGracefulRelease [GOOD] >> ReadSessionImplTest::SimpleDataHandlersWithGracefulReleaseWithCommit >> ReadSessionImplTest::SimpleDataHandlersWithGracefulReleaseWithCommit [GOOD] |82.8%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/base/ut_board_subscriber/unittest >> TBoardSubscriberTest::SimpleSubscriber [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/columnshard/ut_rw/unittest >> TColumnShardTestReadWrite::CompactionInGranule_PKInt32_Reboot [GOOD] Test command err: 2025-09-25T16:20:49.409127Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:129:2160];fline=columnshard.cpp:105;event=initialize_shard;step=OnActivateExecutor; 2025-09-25T16:20:49.412478Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:129:2160];fline=columnshard.cpp:124;event=initialize_shard;step=initialize_tiring_finished; 2025-09-25T16:20:49.412515Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxInitSchema.Execute at tablet 9437184 2025-09-25T16:20:49.413073Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:129:2160];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-09-25T16:20:49.413109Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:129:2160];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-09-25T16:20:49.413132Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:129:2160];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-09-25T16:20:49.413149Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:129:2160];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-09-25T16:20:49.413162Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:129:2160];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-09-25T16:20:49.413176Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:129:2160];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-09-25T16:20:49.413189Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:129:2160];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-09-25T16:20:49.413202Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:129:2160];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-09-25T16:20:49.413215Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:129:2160];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-09-25T16:20:49.413227Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:129:2160];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanDeprecatedSnapshot; 2025-09-25T16:20:49.413241Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:129:2160];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV0ChunksMeta; 2025-09-25T16:20:49.413254Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:129:2160];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CopyBlobIdsToV2; 2025-09-25T16:20:49.413282Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:129:2160];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreAppearanceSnapshot; 2025-09-25T16:20:49.418018Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxInitSchema.Complete at tablet 9437184 2025-09-25T16:20:49.418072Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:131;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=12;current_normalizer=CLASS_NAME=Granules; 2025-09-25T16:20:49.418082Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=1;type=Granules; 2025-09-25T16:20:49.418126Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-09-25T16:20:49.418159Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-09-25T16:20:49.418171Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-09-25T16:20:49.418177Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2025-09-25T16:20:49.418185Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:138;normalizer=TChunksNormalizer;message=0 chunks found; 2025-09-25T16:20:49.418191Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-09-25T16:20:49.418197Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-09-25T16:20:49.418200Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2025-09-25T16:20:49.418214Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-09-25T16:20:49.418220Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-09-25T16:20:49.418226Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-09-25T16:20:49.418229Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2025-09-25T16:20:49.418237Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2025-09-25T16:20:49.418242Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-09-25T16:20:49.418247Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-09-25T16:20:49.418251Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2025-09-25T16:20:49.418257Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-09-25T16:20:49.418264Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-09-25T16:20:49.418267Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2025-09-25T16:20:49.418274Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-09-25T16:20:49.418280Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-09-25T16:20:49.418283Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=15;type=RestoreV1Chunks_V2; 2025-09-25T16:20:49.418301Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-09-25T16:20:49.418307Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-09-25T16:20:49.418310Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=16;type=RestoreV2Chunks; 2025-09-25T16:20:49.418321Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2025-09-25T16:20:49.418328Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanDeprecatedSnapshot;id=CleanDeprecatedSnapshot; 2025-09-25T16:20:49.418331Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=17;type=CleanDeprecatedSnapshot; 2025-09-25T16:20:49.418336Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanDeprecatedSnapshot;id=17; 2025-09-25T16:20:49.418342Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV0ChunksMeta;id=RestoreV0ChunksMeta; 2025-09-25T16:20:49.418345Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=18;type=RestoreV0ChunksMeta; 2025-09-25T16:20:49.418350Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV0ChunksMeta;id=18; 2025-09-25T16:20:49.418356Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CopyBlobIdsToV2;id=CopyBlobIdsToV2; 2025-09-25T16:20:49.418360Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=19;type=CopyBlobIdsToV2; 2025-09-25T16:20:49.418370Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CopyBlobIdsToV2;id=19; 2025-09-25T16:20:49.418376Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLAS ... ;load_stage_name=EXECUTE:granule/portions;fline=constructor_portion.cpp:40;memory_size=278;data_size=244;sum=1957904;count=7068;size_of_portion=184; 2025-09-25T16:21:12.465075Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;load_stage_name=EXECUTE:column_engines/granules;load_stage_name=EXECUTE:granules/granule;fline=common_data.cpp:29;EXECUTE:portionsLoadingTime=11200; 2025-09-25T16:21:12.465088Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;load_stage_name=EXECUTE:column_engines/granules;load_stage_name=EXECUTE:granules/granule;fline=common_data.cpp:29;PRECHARGE:granule_finished_commonLoadingTime=2; 2025-09-25T16:21:12.465303Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;load_stage_name=EXECUTE:column_engines/granules;load_stage_name=EXECUTE:granules/granule;fline=common_data.cpp:29;EXECUTE:granule_finished_commonLoadingTime=206; 2025-09-25T16:21:12.465309Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;load_stage_name=EXECUTE:column_engines/granules;fline=common_data.cpp:29;EXECUTE:granuleLoadingTime=11463; 2025-09-25T16:21:12.465317Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;fline=common_data.cpp:29;EXECUTE:granulesLoadingTime=11480; 2025-09-25T16:21:12.465327Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;fline=common_data.cpp:29;PRECHARGE:finishLoadingTime=2; 2025-09-25T16:21:12.465392Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;fline=common_data.cpp:29;EXECUTE:finishLoadingTime=59; 2025-09-25T16:21:12.465399Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:column_enginesLoadingTime=11655; 2025-09-25T16:21:12.465440Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:tx_controllerLoadingTime=32; 2025-09-25T16:21:12.465463Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:tx_controllerLoadingTime=15; 2025-09-25T16:21:12.465555Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:operations_managerLoadingTime=84; 2025-09-25T16:21:12.465606Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:operations_managerLoadingTime=43; 2025-09-25T16:21:12.470308Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:storages_managerLoadingTime=4686; 2025-09-25T16:21:12.476249Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:storages_managerLoadingTime=5907; 2025-09-25T16:21:12.476278Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:db_locksLoadingTime=4; 2025-09-25T16:21:12.476292Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:db_locksLoadingTime=2; 2025-09-25T16:21:12.476301Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:bg_sessionsLoadingTime=1; 2025-09-25T16:21:12.476322Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:bg_sessionsLoadingTime=14; 2025-09-25T16:21:12.476330Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:sharing_sessionsLoadingTime=1; 2025-09-25T16:21:12.476351Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:sharing_sessionsLoadingTime=14; 2025-09-25T16:21:12.476360Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:in_flight_readsLoadingTime=1; 2025-09-25T16:21:12.476375Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:in_flight_readsLoadingTime=8; 2025-09-25T16:21:12.476396Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:tiers_managerLoadingTime=13; 2025-09-25T16:21:12.476415Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:tiers_managerLoadingTime=12; 2025-09-25T16:21:12.476425Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;fline=common_data.cpp:29;EXECUTE:composite_initLoadingTime=28671; 2025-09-25T16:21:12.476473Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: Index: tables 1 inserted {blob_bytes=0;raw_bytes=0;count=0;records=0} compacted {blob_bytes=0;raw_bytes=0;count=0;records=0} s-compacted {blob_bytes=12006072;raw_bytes=14157900;count=2;records=150200} inactive {blob_bytes=90894184;raw_bytes=95257550;count=215;records=1125200} evicted {blob_bytes=0;raw_bytes=0;count=0;records=0} at tablet 9437184 2025-09-25T16:21:12.476523Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:7168:8797];process=SwitchToWork;fline=columnshard.cpp:77;event=initialize_shard;step=SwitchToWork; 2025-09-25T16:21:12.476535Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:7168:8797];process=SwitchToWork;fline=columnshard.cpp:80;event=initialize_shard;step=SignalTabletActive; 2025-09-25T16:21:12.476560Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:7168:8797];process=SwitchToWork;fline=columnshard_impl.cpp:1528;event=OnTieringModified;path_id=NO_VALUE_OPTIONAL; 2025-09-25T16:21:12.476569Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:7168:8797];process=SwitchToWork;fline=column_engine_logs.cpp:516;event=OnTieringModified;new_count_tierings=0; 2025-09-25T16:21:12.476618Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;fline=columnshard_impl.cpp:449;event=EnqueueBackgroundActivities;periodic=0; 2025-09-25T16:21:12.476644Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;fline=columnshard_impl.cpp:943;background=cleanup_schemas;skip_reason=no_changes; 2025-09-25T16:21:12.476652Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;fline=column_engine_logs.cpp:258;event=StartCleanup;portions_count=5; 2025-09-25T16:21:12.476672Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;fline=column_engine_logs.cpp:271;event=StartCleanupStop;snapshot=plan_step=1758815453934;tx_id=18446744073709551615;;current_snapshot_ts=1758817251231; 2025-09-25T16:21:12.476685Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;fline=column_engine_logs.cpp:334;event=StartCleanup;portions_count=5;portions_prepared=0;drop=0;skip=0;portions_counter=0;chunks=0;limit=0;max_portions=1000;max_chunks=500000; 2025-09-25T16:21:12.476699Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;fline=columnshard_impl.cpp:800;background=cleanup;skip_reason=no_changes; 2025-09-25T16:21:12.476709Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;fline=columnshard_impl.cpp:832;background=cleanup;skip_reason=no_changes; 2025-09-25T16:21:12.476740Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;fline=columnshard_impl.cpp:755;background=ttl;skip_reason=no_changes; 2025-09-25T16:21:12.478140Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:7168:8797];ev=NActors::TEvents::TEvWakeup;fline=columnshard.cpp:260;event=TEvPrivate::TEvPeriodicWakeup::MANUAL;tablet_id=9437184; 2025-09-25T16:21:12.478207Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:7168:8797];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;fline=columnshard.cpp:249;event=TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184; 2025-09-25T16:21:12.478213Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: Send periodic stats. 2025-09-25T16:21:12.478217Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: Disabled periodic stats at tablet 9437184 2025-09-25T16:21:12.478225Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:7168:8797];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:449;event=EnqueueBackgroundActivities;periodic=0; 2025-09-25T16:21:12.478242Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:7168:8797];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:943;background=cleanup_schemas;skip_reason=no_changes; 2025-09-25T16:21:12.478249Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:7168:8797];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:258;event=StartCleanup;portions_count=5; 2025-09-25T16:21:12.478261Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:7168:8797];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:271;event=StartCleanupStop;snapshot=plan_step=1758815453934;tx_id=18446744073709551615;;current_snapshot_ts=1758817251231; 2025-09-25T16:21:12.478270Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:7168:8797];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:334;event=StartCleanup;portions_count=5;portions_prepared=0;drop=0;skip=0;portions_counter=0;chunks=0;limit=0;max_portions=1000;max_chunks=500000; 2025-09-25T16:21:12.478278Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:7168:8797];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:800;background=cleanup;skip_reason=no_changes; 2025-09-25T16:21:12.478284Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:7168:8797];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:832;background=cleanup;skip_reason=no_changes; 2025-09-25T16:21:12.478301Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:7168:8797];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;queue=ttl;external_count=0;fline=granule.cpp:168;event=skip_actualization;waiting=1.000000s; 2025-09-25T16:21:12.478310Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:7168:8797];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:755;background=ttl;skip_reason=no_changes; >> TBoardSubscriber2DCTest::NotAvailableByShutdown [GOOD] >> TBoardSubscriberTest::NotAvailableByShutdown [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/columnshard/ut_rw/unittest >> Normalizers::CleanUnusedTablesNormalizer [GOOD] Test command err: 2025-09-25T16:20:57.728844Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];fline=columnshard.cpp:105;event=initialize_shard;step=OnActivateExecutor; 2025-09-25T16:20:57.732853Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];fline=columnshard.cpp:124;event=initialize_shard;step=initialize_tiring_finished; 2025-09-25T16:20:57.732899Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxInitSchema.Execute at tablet 9437184 2025-09-25T16:20:57.733640Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=EmptyPortionsCleaner; 2025-09-25T16:20:57.733682Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=NO_VALUE_OPTIONAL; 2025-09-25T16:20:57.733717Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-09-25T16:20:57.733739Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-09-25T16:20:57.733761Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-09-25T16:20:57.733781Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-09-25T16:20:57.733798Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-09-25T16:20:57.733817Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-09-25T16:20:57.733836Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-09-25T16:20:57.733854Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-09-25T16:20:57.733872Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanDeprecatedSnapshot; 2025-09-25T16:20:57.733891Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV0ChunksMeta; 2025-09-25T16:20:57.733935Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CopyBlobIdsToV2; 2025-09-25T16:20:57.733954Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreAppearanceSnapshot; 2025-09-25T16:20:57.740189Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxInitSchema.Complete at tablet 9437184 2025-09-25T16:20:57.740255Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:131;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=13;current_normalizer=CLASS_NAME=EmptyPortionsCleaner; 2025-09-25T16:20:57.740269Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=NO_VALUE_OPTIONAL;seq_id=NO_VALUE_OPTIONAL;type=NO_VALUE_OPTIONAL; 2025-09-25T16:20:57.740339Z node 1 :TX_COLUMNSHARD CRIT: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_empty.cpp:323;tasks_for_remove=0;distribution=; 2025-09-25T16:20:57.740369Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=EmptyPortionsCleaner;id=NO_VALUE_OPTIONAL; 2025-09-25T16:20:57.740383Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Granules;id=Granules; 2025-09-25T16:20:57.740391Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=NO_VALUE_OPTIONAL;seq_id=1;type=Granules; 2025-09-25T16:20:57.740411Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-09-25T16:20:57.740432Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-09-25T16:20:57.740442Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-09-25T16:20:57.740447Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=NO_VALUE_OPTIONAL;seq_id=2;type=Chunks; 2025-09-25T16:20:57.740458Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:138;normalizer=TChunksNormalizer;message=0 chunks found; 2025-09-25T16:20:57.740467Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-09-25T16:20:57.740475Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-09-25T16:20:57.740481Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=NO_VALUE_OPTIONAL;seq_id=4;type=TablesCleaner; 2025-09-25T16:20:57.740503Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-09-25T16:20:57.740511Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-09-25T16:20:57.740520Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-09-25T16:20:57.740525Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=NO_VALUE_OPTIONAL;seq_id=6;type=CleanGranuleId; 2025-09-25T16:20:57.740538Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2025-09-25T16:20:57.740549Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-09-25T16:20:57.740557Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-09-25T16:20:57.740562Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=NO_VALUE_OPTIONAL;seq_id=9;type=GCCountersNormalizer; 2025-09-25T16:20:57.740572Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-09-25T16:20:57.740581Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-09-25T16:20:57.740586Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=NO_VALUE_OPTIONAL;seq_id=11;type=SyncPortionFromChunks; 2025-09-25T16:20:57.740594Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-09-25T16:20:57.740603Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-09-25T16:20:57.740608Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=NO_VALUE_OPTIONAL;seq_id=15;type=RestoreV1Chunks_V2; 2025-09-25T16:20:57.740631Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-09-25T16:20:57.740639Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-09-25T16:20:57.740644Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=NO_VALUE_OPTIONAL;seq_id=16;type=RestoreV2Chunks; 2025-09-25T16:20:57.740658Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2025-09-25T16:20:57.740667Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanDeprecatedSnapshot;id=CleanDeprecatedSnapshot; 2025-09-25T16:20:57.740672Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=NO_VALUE_OPTIONAL;seq_id=17;type=CleanDeprecatedSnapshot; 2025-09-25T16:20:57.740680Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanDeprecatedSnapshot;id=17; 2025-09-25T16:20:57.740688Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV0ChunksM ... ;SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:1;); 2025-09-25T16:21:12.404878Z node 2 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[2:329:2329];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;fline=plain_read_data.cpp:31;event=DoExtractReadyResults;result=1;count=77;finished=1; 2025-09-25T16:21:12.404882Z node 2 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[2:329:2329];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;fline=actor.cpp:222;stage=limit exhausted;limit=limits:(bytes=0;chunks=0);; 2025-09-25T16:21:12.404888Z node 2 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[2:329:2329];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;tablet_id=9437184;fline=scanner.cpp:52;event=build_next_interval; 2025-09-25T16:21:12.405026Z node 2 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[2:329:2329];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;fline=actor.cpp:120;event=TEvScanDataAck;info=limits:(bytes=8388608;chunks=1);; 2025-09-25T16:21:12.405040Z node 2 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[2:329:2329];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;fline=actor.cpp:211;stage=start;iterator=ready_results:(count:1;records_count:77;schema=key1: uint64 key2: uint64 field: string;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2;column_names=key1,key2;);;ff=(column_ids=1,2,3;column_names=field,key1,key2;);;program_input=(column_ids=1,2,3;column_names=field,key1,key2;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2025-09-25T16:21:12.405046Z node 2 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[2:329:2329];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;fline=plain_read_data.cpp:31;event=DoExtractReadyResults;result=0;count=0;finished=1; 2025-09-25T16:21:12.405057Z node 2 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[2:329:2329];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;fline=actor.cpp:253;stage=ready result;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2;column_names=key1,key2;);;ff=(column_ids=1,2,3;column_names=field,key1,key2;);;program_input=(column_ids=1,2,3;column_names=field,key1,key2;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;);columns=3;rows=77; 2025-09-25T16:21:12.405065Z node 2 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[2:329:2329];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;fline=actor.cpp:274;stage=data_format;batch_size=0;num_rows=77;batch_columns=key1,key2,field; 2025-09-25T16:21:12.405086Z node 2 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[2:329:2329];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;fline=actor.cpp:392;event=send_data;compute_actor_id=[2:327:2328];bytes=130200;rows=1085;faults=0;finished=0;fault=0;schema=key1: uint64 key2: uint64 field: string; 2025-09-25T16:21:12.405099Z node 2 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[2:329:2329];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;fline=actor.cpp:296;stage=finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2;column_names=key1,key2;);;ff=(column_ids=1,2,3;column_names=field,key1,key2;);;program_input=(column_ids=1,2,3;column_names=field,key1,key2;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2025-09-25T16:21:12.405121Z node 2 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[2:329:2329];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;fline=actor.cpp:211;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2;column_names=key1,key2;);;ff=(column_ids=1,2,3;column_names=field,key1,key2;);;program_input=(column_ids=1,2,3;column_names=field,key1,key2;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2025-09-25T16:21:12.405132Z node 2 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[2:329:2329];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;fline=actor.cpp:216;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2;column_names=key1,key2;);;ff=(column_ids=1,2,3;column_names=field,key1,key2;);;program_input=(column_ids=1,2,3;column_names=field,key1,key2;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2025-09-25T16:21:12.405148Z node 2 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[2:329:2329];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;fline=actor.cpp:120;event=TEvScanDataAck;info=limits:(bytes=8388608;chunks=1);; 2025-09-25T16:21:12.405159Z node 2 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[2:329:2329];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;fline=actor.cpp:211;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2;column_names=key1,key2;);;ff=(column_ids=1,2,3;column_names=field,key1,key2;);;program_input=(column_ids=1,2,3;column_names=field,key1,key2;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2025-09-25T16:21:12.405171Z node 2 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[2:329:2329];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;fline=actor.cpp:216;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2;column_names=key1,key2;);;ff=(column_ids=1,2,3;column_names=field,key1,key2;);;program_input=(column_ids=1,2,3;column_names=field,key1,key2;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2025-09-25T16:21:12.405177Z node 2 :TX_COLUMNSHARD_SCAN DEBUG: actor.cpp:442: Scan [2:329:2329] finished for tablet 9437184 2025-09-25T16:21:12.405251Z node 2 :TX_COLUMNSHARD_SCAN INFO: log.cpp:841: SelfId=[2:329:2329];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;fline=actor.cpp:448;event=scan_finish;compute_actor_id=[2:327:2328];stats={"p":[{"events":["f_bootstrap","l_bootstrap","f_processing","f_ProduceResults","f_task_result"],"t":0},{"events":["f_ack"],"t":0.017},{"events":["l_task_result"],"t":1.387},{"events":["l_ack","l_processing","l_ProduceResults","f_Finish","l_Finish"],"t":1.388}],"full":{"a":13401201,"name":"_full_task","f":13401201,"d_finished":0,"c":0,"l":14789236,"d":1388035},"events":[{"name":"bootstrap","f":13401247,"d_finished":216,"c":1,"l":13401463,"d":216},{"a":14789193,"name":"ack","f":13418966,"d_finished":748324,"c":421,"l":14789182,"d":748367},{"a":14789192,"name":"processing","f":13401509,"d_finished":1359788,"c":843,"l":14789183,"d":1359832},{"name":"ProduceResults","f":13401385,"d_finished":1226178,"c":1266,"l":14789221,"d":1226178},{"a":14789222,"name":"Finish","f":14789222,"d_finished":0,"c":0,"l":14789236,"d":14},{"name":"task_result","f":13401514,"d_finished":609600,"c":422,"l":14788938,"d":609600}],"id":"9437184::4"};iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2;column_names=key1,key2;);;ff=(column_ids=1,2,3;column_names=field,key1,key2;);;program_input=(column_ids=1,2,3;column_names=field,key1,key2;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2025-09-25T16:21:12.405262Z node 2 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[2:329:2329];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;fline=actor.cpp:392;event=send_data;compute_actor_id=[2:327:2328];bytes=0;rows=0;faults=0;finished=1;fault=0;schema=; 2025-09-25T16:21:12.405304Z node 2 :TX_COLUMNSHARD_SCAN INFO: log.cpp:841: SelfId=[2:329:2329];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;fline=actor.cpp:397;event=scan_finished;compute_actor_id=[2:327:2328];packs_sum=0;bytes=0;bytes_sum=0;rows=0;rows_sum=0;faults=0;finished=1;fault=0;stats={"p":[{"events":["f_bootstrap","l_bootstrap","f_processing","f_ProduceResults","f_task_result"],"t":0},{"events":["f_ack"],"t":0.017},{"events":["l_task_result"],"t":1.387},{"events":["l_ack","l_processing","l_ProduceResults","f_Finish","l_Finish"],"t":1.388}],"full":{"a":13401201,"name":"_full_task","f":13401201,"d_finished":0,"c":0,"l":14789315,"d":1388114},"events":[{"name":"bootstrap","f":13401247,"d_finished":216,"c":1,"l":13401463,"d":216},{"a":14789193,"name":"ack","f":13418966,"d_finished":748324,"c":421,"l":14789182,"d":748446},{"a":14789192,"name":"processing","f":13401509,"d_finished":1359788,"c":843,"l":14789183,"d":1359911},{"name":"ProduceResults","f":13401385,"d_finished":1226178,"c":1266,"l":14789221,"d":1226178},{"a":14789222,"name":"Finish","f":14789222,"d_finished":0,"c":0,"l":14789315,"d":93},{"name":"task_result","f":13401514,"d_finished":609600,"c":422,"l":14788938,"d":609600}],"id":"9437184::4"};iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2;column_names=key1,key2;);;ff=(column_ids=1,2,3;column_names=field,key1,key2;);;program_input=(column_ids=1,2,3;column_names=field,key1,key2;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2025-09-25T16:21:12.405319Z node 2 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[2:329:2329];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;fline=stats.cpp:8;event=statistic;begin=2025-09-25T16:21:11.017035Z;index_granules=0;index_portions=1;index_batches=0;schema_columns=3;filter_columns=0;additional_columns=0;compacted_portions_bytes=0;inserted_portions_bytes=2488696;committed_portions_bytes=0;data_filter_bytes=0;data_additional_bytes=0;delta_bytes=2488696;selected_rows=0; 2025-09-25T16:21:12.405325Z node 2 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[2:329:2329];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;fline=read_context.h:196;event=scan_aborted;reason=unexpected on destructor; 2025-09-25T16:21:12.405348Z node 2 :TX_COLUMNSHARD_SCAN INFO: log.cpp:841: SelfId=[2:329:2329];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;fline=context.h:82;fetching=ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2;column_names=key1,key2;);;ff=(column_ids=1,2,3;column_names=field,key1,key2;);;program_input=(column_ids=1,2,3;column_names=field,key1,key2;);;; ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/public/sdk/cpp/src/client/persqueue_public/ut/unittest >> ReadSessionImplTest::SimpleDataHandlersWithGracefulReleaseWithCommit [GOOD] Test command err: 2025-09-25T16:21:13.467313Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-09-25T16:21:13.467322Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-09-25T16:21:13.467327Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2025-09-25T16:21:13.472861Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2025-09-25T16:21:13.473051Z :INFO: [db] [sessionid] [cluster] Server session id: session id 2025-09-25T16:21:13.473070Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-09-25T16:21:13.477184Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-09-25T16:21:13.477190Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-09-25T16:21:13.477194Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2025-09-25T16:21:13.484868Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2025-09-25T16:21:13.485014Z :INFO: [db] [sessionid] [cluster] Server session id: session id 2025-09-25T16:21:13.485028Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-09-25T16:21:13.485314Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-09-25T16:21:13.485319Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-09-25T16:21:13.485323Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2025-09-25T16:21:13.485433Z :ERROR: [db] [sessionid] [cluster] Got error. Status: INTERNAL_ERROR. Description: 2025-09-25T16:21:13.485451Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-09-25T16:21:13.485454Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-09-25T16:21:13.485500Z :INFO: [db] [sessionid] [cluster] Closing session to cluster: SessionClosed { Status: INTERNAL_ERROR Issues: "
: Error: Failed to establish connection to server "" ( cluster cluster). Attempts done: 1 " } 2025-09-25T16:21:13.497185Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-09-25T16:21:13.497192Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-09-25T16:21:13.497196Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2025-09-25T16:21:13.497292Z :ERROR: [db] [sessionid] [cluster] Got error. Status: TIMEOUT. Description:
: Error: Failed to establish connection to server. Attempts done: 1 2025-09-25T16:21:13.497305Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-09-25T16:21:13.497310Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-09-25T16:21:13.497322Z :INFO: [db] [sessionid] [cluster] Closing session to cluster: SessionClosed { Status: TIMEOUT Issues: "
: Error: Failed to establish connection to server. Attempts done: 1 " } 2025-09-25T16:21:13.501156Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 2500, ReadSizeServerDelta = 0 2025-09-25T16:21:13.501164Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 2500, ReadSizeServerDelta = 0 2025-09-25T16:21:13.501168Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2025-09-25T16:21:13.516883Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2025-09-25T16:21:13.525759Z :INFO: [db] [sessionid] [cluster] Server session id: 123-session-id-321 2025-09-25T16:21:13.530629Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 2500, ReadSizeServerDelta = 0 2025-09-25T16:21:13.532903Z :INFO: [db] [sessionid] [cluster] Confirm partition stream create. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1. Read offset: (NULL) 2025-09-25T16:21:13.533030Z :INFO: [db] [sessionid] [cluster] Confirm partition stream create. Partition stream id: 2. Cluster: "TestCluster". Topic: "TestTopic". Partition: 2. Read offset: (NULL) 2025-09-25T16:21:13.533446Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 1 (1-50) 2025-09-25T16:21:13.533501Z :DEBUG: Take Data. Partition 1. Read: {0, 0} (1-1) 2025-09-25T16:21:13.533507Z :DEBUG: Take Data. Partition 1. Read: {0, 1} (2-2) 2025-09-25T16:21:13.533512Z :DEBUG: Take Data. Partition 1. Read: {0, 2} (3-3) 2025-09-25T16:21:13.533516Z :DEBUG: Take Data. Partition 1. Read: {0, 3} (4-4) 2025-09-25T16:21:13.533521Z :DEBUG: Take Data. Partition 1. Read: {0, 4} (5-5) 2025-09-25T16:21:13.533524Z :DEBUG: Take Data. Partition 1. Read: {0, 5} (6-6) 2025-09-25T16:21:13.533528Z :DEBUG: Take Data. Partition 1. Read: {0, 6} (7-7) 2025-09-25T16:21:13.533531Z :DEBUG: Take Data. Partition 1. Read: {0, 7} (8-8) 2025-09-25T16:21:13.533537Z :DEBUG: Take Data. Partition 1. Read: {0, 8} (9-9) 2025-09-25T16:21:13.533541Z :DEBUG: Take Data. Partition 1. Read: {0, 9} (10-10) 2025-09-25T16:21:13.533545Z :DEBUG: Take Data. Partition 1. Read: {0, 10} (11-11) 2025-09-25T16:21:13.533548Z :DEBUG: Take Data. Partition 1. Read: {0, 11} (12-12) 2025-09-25T16:21:13.533553Z :DEBUG: Take Data. Partition 1. Read: {0, 12} (13-13) 2025-09-25T16:21:13.533556Z :DEBUG: Take Data. Partition 1. Read: {0, 13} (14-14) 2025-09-25T16:21:13.533560Z :DEBUG: Take Data. Partition 1. Read: {0, 14} (15-15) 2025-09-25T16:21:13.533564Z :DEBUG: Take Data. Partition 1. Read: {0, 15} (16-16) 2025-09-25T16:21:13.533571Z :DEBUG: Take Data. Partition 1. Read: {0, 16} (17-17) 2025-09-25T16:21:13.533574Z :DEBUG: Take Data. Partition 1. Read: {0, 17} (18-18) 2025-09-25T16:21:13.533578Z :DEBUG: Take Data. Partition 1. Read: {0, 18} (19-19) 2025-09-25T16:21:13.533581Z :DEBUG: Take Data. Partition 1. Read: {0, 19} (20-20) 2025-09-25T16:21:13.533585Z :DEBUG: Take Data. Partition 1. Read: {0, 20} (21-21) 2025-09-25T16:21:13.533588Z :DEBUG: Take Data. Partition 1. Read: {0, 21} (22-22) 2025-09-25T16:21:13.533591Z :DEBUG: Take Data. Partition 1. Read: {1, 0} (23-23) 2025-09-25T16:21:13.533595Z :DEBUG: Take Data. Partition 1. Read: {1, 1} (24-24) 2025-09-25T16:21:13.533598Z :DEBUG: Take Data. Partition 1. Read: {1, 2} (25-25) 2025-09-25T16:21:13.533602Z :DEBUG: Take Data. Partition 1. Read: {1, 3} (26-26) 2025-09-25T16:21:13.533606Z :DEBUG: Take Data. Partition 1. Read: {1, 4} (27-27) 2025-09-25T16:21:13.533609Z :DEBUG: Take Data. Partition 1. Read: {1, 5} (28-28) 2025-09-25T16:21:13.533613Z :DEBUG: Take Data. Partition 1. Read: {1, 6} (29-29) 2025-09-25T16:21:13.533616Z :DEBUG: Take Data. Partition 1. Read: {1, 7} (30-30) 2025-09-25T16:21:13.533620Z :DEBUG: Take Data. Partition 1. Read: {1, 8} (31-31) 2025-09-25T16:21:13.533623Z :DEBUG: Take Data. Partition 1. Read: {1, 9} (32-32) 2025-09-25T16:21:13.533640Z :DEBUG: Take Data. Partition 1. Read: {1, 10} (33-33) 2025-09-25T16:21:13.533644Z :DEBUG: Take Data. Partition 1. Read: {1, 11} (34-34) 2025-09-25T16:21:13.533647Z :DEBUG: Take Data. Partition 1. Read: {1, 12} (35-35) 2025-09-25T16:21:13.533652Z :DEBUG: Take Data. Partition 1. Read: {1, 13} (36-36) 2025-09-25T16:21:13.533657Z :DEBUG: Take Data. Partition 1. Read: {1, 14} (37-37) 2025-09-25T16:21:13.533661Z :DEBUG: Take Data. Partition 1. Read: {1, 15} (38-38) 2025-09-25T16:21:13.533664Z :DEBUG: Take Data. Partition 1. Read: {1, 16} (39-39) 2025-09-25T16:21:13.533667Z :DEBUG: Take Data. Partition 1. Read: {1, 17} (40-40) 2025-09-25T16:21:13.533671Z :DEBUG: Take Data. Partition 1. Read: {1, 18} (41-41) 2025-09-25T16:21:13.533675Z :DEBUG: Take Data. Partition 1. Read: {1, 19} (42-42) 2025-09-25T16:21:13.533678Z :DEBUG: Take Data. Partition 1. Read: {1, 20} (43-43) 2025-09-25T16:21:13.533682Z :DEBUG: Take Data. Partition 1. Read: {1, 21} (44-44) 2025-09-25T16:21:13.533686Z :DEBUG: Take Data. Partition 1. Read: {1, 22} (45-45) 2025-09-25T16:21:13.533689Z :DEBUG: Take Data. Partition 1. Read: {1, 23} (46-46) 2025-09-25T16:21:13.533692Z :DEBUG: Take Data. Partition 1. Read: {1, 24} (47-47) 2025-09-25T16:21:13.533696Z :DEBUG: Take Data. Partition 1. Read: {1, 25} (48-48) 2025-09-25T16:21:13.533700Z :DEBUG: Take Data. Partition 1. Read: {1, 26} (49-49) 2025-09-25T16:21:13.533703Z :DEBUG: Take Data. Partition 1. Read: {1, 27} (50-50) 2025-09-25T16:21:13.533714Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 50, size 5000 bytes 2025-09-25T16:21:13.533833Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 2 (51-100) 2025-09-25T16:21:13.533881Z :DEBUG: Take Data. Partition 2. Read: {0, 0} (51-51) 2025-09-25T16:21:13.533885Z :DEBUG: Take Data. Partition 2. Read: {0, 1} (52-52) 2025-09-25T16:21:13.533889Z :DEBUG: Take Data. Partition 2. Read: {0, 2} (53-53) 2025-09-25T16:21:13.533893Z :DEBUG: Take Data. Partition 2. Read: {0, 3} (54-54) 2025-09-25T16:21:13.533897Z :DEBUG: Take Data. Partition 2. Read: {0, 4} (55-55) 2025-09-25T16:21:13.533900Z :DEBUG: Take Data. Partition 2. Read: {0, 5} (56-56) 2025-09-25T16:21:13.533904Z :DEBUG: Take Data. Partition 2. Read: {0, 6} (57-57) 2025-09-25T16:21:13.533907Z :DEBUG: Take Data. Partition 2. Read: {0, 7} (58-58) 2025-09-25T16:21:13.533912Z :DEBUG: Take Data. Partition 2. Read: {0, 8} (59-59) 2025-09-25T16:21:13.533915Z :DEBUG: Take Data. Partition 2. Read: {0, 9} (60-60) 2025-09-25T16:21:13.533918Z :DEBUG: Take Data. Partition 2. Read: {0, 10} (61-61) 2025-09-25T16:21:13.533921Z :DEBUG: Take Data. Partition 2. Read: {0, 11} (62-62) 2025-09-25T16:21:13.533925Z :DEBUG: Take Data. Partition 2. Read: {0, 12} (63-63) 2025-09-25T16:21:13.533928Z :DEBUG: Take Data. Partition 2. Read: {0, 13} (64-64) 2025-09-25T16:21:13.533932Z :DEBUG: Take Data. Partition 2. Read: {0, 14} (65-65) 2025-09-25T16:21:13.533935Z :DEBUG: Take Data. Partition 2. Read: {0, 15} (66-66) 2025-09-25T16:21:13.533941Z :DEBUG: Take Data. Partition 2. Read: {0, 16} (67-67) 2025-09-25T16:21:13.533945Z :DEBUG: Take Data. Partition 2. Read: {0, 17} (68-68) 2025-09-25T16:21:13.533948Z :DEBUG: Take Data. Partition 2. Read: {0, 18} (69-69) 2025-09-25T16:21:13.533951Z :DEBUG: Take Data. Partition 2. Read: {0, 19} (70-70) 2025-09-25T16:21:13.533955Z :DEBUG: Take Data. Partition 2. Read: {0, 20} (71-71) 2025-09-25T16:21:13.533958Z :DEBUG: Take Data. Partition 2. Read: {0, 21} (72-72) 2025-09-25T16:21:13.533962Z :DEBUG: Take Data. Partition 2. Read: {1, 0} (73-73) 2025-09-25T16:21:13.533965Z :DEBUG: Take Data. Partition 2. Read: {1, 1} (74-74) 2025-09-25T16:21:13.533969Z :DEBUG: Take Data. Partition 2. Read: {1, 2} (75-75) 2025-09-25T16:21:13.533972Z :DEBUG: Take Data. Partition 2. Read: {1, 3} (76-76) 2025-09-25T16:21:13.533976Z :DEBUG: Take Data. Partition 2. Read: {1, 4} (77-77) 2025-09-25T16:21:13.533979Z :DEBUG: Take Data. Partition 2. Read: {1, 5} (78-78) 2025-09-25T16:21:13.533982Z :DEBUG: Take Data. Partition 2. Read: {1, 6} (79-79) 2025-09-25T16:21:13.533986Z :DEBUG: Take Data. Partition 2. Read: {1, 7} (80-80) 2025-09-25T16:21:13.533989Z :DEBUG: Take Data. Partition 2. Read: {1, 8} (81-81) 2025-09-25T16:21:13.533993Z :DEBUG: Take Data. Partition 2. Read: {1, 9} (82-82) 2025-09-25T16:21:13.534001Z :DEBUG: Take Data. Partition 2. Read: {1, 10} (83-83) 2025-09-25T16:21:13.534006Z :DEBUG: Take Data. Partition 2. Read: {1, 11} (84-84) 2025-09-25T16:21:13.534009Z :DEBUG: Take Data. Partition 2. Read: {1, 12} (85-85) 2025-09-25T16:21:13.534013Z :DEBUG: Take Data. Partition 2. Read: {1, 13} (86-86) 2025-09-25T16:21:13.534016Z :DEBUG: Take Data. Partition 2. Read: {1, 14} (87-87) 2025-09-25T16:21:13.534020Z :DEBUG: Take Data. Partition 2. Read: {1, 15} (88-88) 2025-09-25T16:21:13.534023Z :DEBUG: Take Data. Partition 2. Read: {1, 16} (89-89) 2025-09-25T16:21:13.534026Z :DEBUG: Take Data. Partition 2. Read: {1, 17} (90-90) 2025-09-25T16:21:13.534030Z :DEBUG: Take Data. Partition 2. Read: {1, 18} (91-91) 2025-09-25T16:21:13.534033Z :DEBUG: Take Data. Partition 2. Read: {1, 19} (92-92) 2025-09-25T16:21:13.534037Z :DEBUG: Take Data. Partition 2. Read: {1, 20} (93-93) 2025-09-25T16:21:13.534040Z :DEBUG: Take Data. Partition 2. Read: {1, 21} (94-94) 2025-09-25T16:21:13.534044Z :DEBUG: Take Data. Partition 2. Read: {1, 22} (95-95) 2025-09-25T16:21:13.534047Z :DEBUG: Take Data. Partition 2. Read: {1, 23} (96-96) 2025-09-25T16:21:13.534051Z :DEBUG: Take Data. Partition 2. Read: {1, 24} (97-97) 2025-09-25T16:21:13.534054Z :DEBUG: Take Data. Partition 2. Read: {1, 25} (98-98) 2025-09-25T16:21:13.534057Z :DEBUG: Take Data. Partition 2. Read: {1, 26} (99-99) 2025-09-25T16:21:13.534061Z :DEBUG: Take Data. Partition 2. Read: {1, 27} (100-100) 2025-09-25T16:21:13.534066Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 50, size 5000 bytes 2025-09-25T16:21:13.534437Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 2500, ReadSizeServerDelta = 0 2025-09-25T16:21:13.534977Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-09-25T16:21:13.534981Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-09-25T16:21:13.534984Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2025-09-25T16:21:13.544869Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2025-09-25T16:21:13.552879Z :INFO: [db] [sessionid] [cluster] Server session id: 123-session-id-321 2025-09-25T16:21:13.556870Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-09-25T16:21:13.560863Z :INFO: [db] [sessionid] [cluster] Confirm partition stream create. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1. Read offset: (NULL) 2025-09-25T16:21:13.661187Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-09-25T16:21:13.661614Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 1 (1-2) 2025-09-25T16:21:13.661642Z :DEBUG: Take Data. Partition 1. Read: {0, 0} (1-1) 2025-09-25T16:21:13.661649Z :DEBUG: Take Data. Partition 1. Read: {1, 0} (2-2) 2025-09-25T16:21:13.661675Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 2, size 6 bytes 2025-09-25T16:21:13.872874Z :DEBUG: [db] [sessionid] [cluster] Commit offsets [1, 3). Partition stream id: 1 2025-09-25T16:21:13.976002Z :DEBUG: [db] [sessionid] [cluster] Committed response: cookies { assign_id: 1 partition_cookie: 1 } 2025-09-25T16:21:13.976863Z :INFO: [db] [sessionid] [cluster] Confirm partition stream destroy. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1 2025-09-25T16:21:13.976932Z :DEBUG: [db] [sessionid] [cluster] Abort session to cluster 2025-09-25T16:21:13.977324Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-09-25T16:21:13.977329Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-09-25T16:21:13.977332Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2025-09-25T16:21:13.982321Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2025-09-25T16:21:13.982556Z :INFO: [db] [sessionid] [cluster] Server session id: 123-session-id-321 2025-09-25T16:21:13.999310Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-09-25T16:21:14.000996Z :INFO: [db] [sessionid] [cluster] Confirm partition stream create. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1. Read offset: (NULL) 2025-09-25T16:21:14.140909Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-09-25T16:21:14.144891Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 1 (1-2) 2025-09-25T16:21:14.144921Z :DEBUG: Take Data. Partition 1. Read: {0, 0} (1-1) 2025-09-25T16:21:14.144930Z :DEBUG: Take Data. Partition 1. Read: {1, 0} (2-2) 2025-09-25T16:21:14.144961Z :DEBUG: [db] [sessionid] [cluster] Commit offsets [1, 3). Partition stream id: 1 2025-09-25T16:21:14.145001Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 2, size 6 bytes 2025-09-25T16:21:14.148929Z :DEBUG: [db] [sessionid] [cluster] Committed response: cookies { assign_id: 1 partition_cookie: 1 } 2025-09-25T16:21:14.150180Z :INFO: [db] [sessionid] [cluster] Confirm partition stream destroy. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1 2025-09-25T16:21:14.150235Z :DEBUG: [db] [sessionid] [cluster] Abort session to cluster |82.8%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/base/ut_board_subscriber/unittest >> TBoardSubscriberTest::NotAvailableByShutdown [GOOD] |82.8%| [TA] {RESULT} $(B)/ydb/core/tx/tx_proxy/ut_schemereq/test-results/unittest/{meta.json ... results_accumulator.log} |82.8%| [TA] {RESULT} $(B)/ydb/core/blobstorage/ut_blobstorage/ut_read_only_vdisk/test-results/unittest/{meta.json ... results_accumulator.log} |82.8%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/base/ut_board_subscriber/unittest >> TBoardSubscriber2DCTest::NotAvailableByShutdown [GOOD] >> TColumnShardTestReadWrite::CompactionGCFailingBs [GOOD] |82.8%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/tx/schemeshard/ut_bsvolume_reboots/ydb-core-tx-schemeshard-ut_bsvolume_reboots |82.8%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/tx/datashard/ut_kqp/ydb-core-tx-datashard-ut_kqp |82.8%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_bsvolume_reboots/ydb-core-tx-schemeshard-ut_bsvolume_reboots |82.8%| [LD] {RESULT} $(B)/ydb/core/tx/datashard/ut_kqp/ydb-core-tx-datashard-ut_kqp |82.8%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_bsvolume_reboots/ydb-core-tx-schemeshard-ut_bsvolume_reboots |82.8%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/datashard/ut_kqp/ydb-core-tx-datashard-ut_kqp >> TBoardSubscriber2DCTest::ManySubscribersManyPublisher >> TBoardSubscriber2DCTest::ReconnectReplica |82.8%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/tx/schemeshard/ut_extsubdomain/ydb-core-tx-schemeshard-ut_extsubdomain |82.8%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_extsubdomain/ydb-core-tx-schemeshard-ut_extsubdomain |82.8%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_extsubdomain/ydb-core-tx-schemeshard-ut_extsubdomain |82.8%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/tx/datashard/ut_incremental_restore_scan/ydb-core-tx-datashard-ut_incremental_restore_scan |82.8%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/datashard/ut_incremental_restore_scan/ydb-core-tx-datashard-ut_incremental_restore_scan |82.8%| [LD] {RESULT} $(B)/ydb/core/tx/datashard/ut_incremental_restore_scan/ydb-core-tx-datashard-ut_incremental_restore_scan >> TBoardSubscriber2DCTest::SimpleSubscriber ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/columnshard/ut_rw/unittest >> TColumnShardTestReadWrite::CompactionGCFailingBs [GOOD] Test command err: 2025-09-25T16:20:39.541279Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:129:2160];fline=columnshard.cpp:105;event=initialize_shard;step=OnActivateExecutor; 2025-09-25T16:20:39.545230Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:129:2160];fline=columnshard.cpp:124;event=initialize_shard;step=initialize_tiring_finished; 2025-09-25T16:20:39.545288Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxInitSchema.Execute at tablet 9437184 2025-09-25T16:20:39.545962Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:129:2160];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-09-25T16:20:39.546017Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:129:2160];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-09-25T16:20:39.546064Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:129:2160];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-09-25T16:20:39.546088Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:129:2160];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-09-25T16:20:39.546103Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:129:2160];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-09-25T16:20:39.546121Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:129:2160];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-09-25T16:20:39.546135Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:129:2160];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-09-25T16:20:39.546149Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:129:2160];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-09-25T16:20:39.546166Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:129:2160];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-09-25T16:20:39.546189Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:129:2160];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanDeprecatedSnapshot; 2025-09-25T16:20:39.546210Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:129:2160];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV0ChunksMeta; 2025-09-25T16:20:39.546232Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:129:2160];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CopyBlobIdsToV2; 2025-09-25T16:20:39.546271Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:129:2160];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreAppearanceSnapshot; 2025-09-25T16:20:39.556812Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxInitSchema.Complete at tablet 9437184 2025-09-25T16:20:39.556929Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:131;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=12;current_normalizer=CLASS_NAME=Granules; 2025-09-25T16:20:39.556941Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=1;type=Granules; 2025-09-25T16:20:39.556995Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-09-25T16:20:39.557045Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-09-25T16:20:39.557061Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-09-25T16:20:39.557068Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2025-09-25T16:20:39.557081Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:138;normalizer=TChunksNormalizer;message=0 chunks found; 2025-09-25T16:20:39.557093Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-09-25T16:20:39.557103Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-09-25T16:20:39.557108Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2025-09-25T16:20:39.557130Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-09-25T16:20:39.557141Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-09-25T16:20:39.557150Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-09-25T16:20:39.557156Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2025-09-25T16:20:39.557170Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2025-09-25T16:20:39.557179Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-09-25T16:20:39.557189Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-09-25T16:20:39.557194Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2025-09-25T16:20:39.557207Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-09-25T16:20:39.557216Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-09-25T16:20:39.557222Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2025-09-25T16:20:39.557234Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-09-25T16:20:39.557243Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-09-25T16:20:39.557249Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=15;type=RestoreV1Chunks_V2; 2025-09-25T16:20:39.557281Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-09-25T16:20:39.557292Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-09-25T16:20:39.557297Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=16;type=RestoreV2Chunks; 2025-09-25T16:20:39.557315Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2025-09-25T16:20:39.557325Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanDeprecatedSnapshot;id=CleanDeprecatedSnapshot; 2025-09-25T16:20:39.557331Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=17;type=CleanDeprecatedSnapshot; 2025-09-25T16:20:39.557340Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanDeprecatedSnapshot;id=17; 2025-09-25T16:20:39.557350Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV0ChunksMeta;id=RestoreV0ChunksMeta; 2025-09-25T16:20:39.557355Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=18;type=RestoreV0ChunksMeta; 2025-09-25T16:20:39.557365Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV0ChunksMeta;id=18; 2025-09-25T16:20:39.557375Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CopyBlobIdsToV2;id=CopyBlobIdsToV2; 2025-09-25T16:20:39.557382Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=19;type=CopyBlobIdsToV2; 2025-09-25T16:20:39.557399Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CopyBlobIdsToV2;id=19; 2025-09-25T16:20:39.557408Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLAS ... :CLEANUP_PORTIONS;fline=abstract.cpp:13;event=new_stage;stage=ReadyForConstruct;task_id=9b8515e4-9a2b11f0-8748f7fe-e532994d; Cleanup old portions: 1 2 3 5 6 8 10 7 12 9 11 14 16 13 18 15 20 17 22 19 24 21 23 26 28 25 30 27 32 29 31 34 36 33 38 35 37 40 39 42 41 44 43 46 2025-09-25T16:20:53.195375Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: WriteIndex at tablet 9437184 2025-09-25T16:20:53.195384Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:289:2297];ev=NKikimr::NColumnShard::TEvPrivate::TEvWriteIndex;fline=abstract.cpp:13;event=new_stage;stage=Compiled;task_id=9b8515e4-9a2b11f0-8748f7fe-e532994d; 2025-09-25T16:20:53.195435Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxWriteIndex[223] (CS::CLEANUP::PORTIONS) apply at tablet 9437184 2025-09-25T16:20:53.195798Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:289:2297];ev=NKikimr::NColumnShard::TEvPrivate::TEvWriteIndex;tablet_id=9437184;external_task_id=9b8515e4-9a2b11f0-8748f7fe-e532994d;fline=abstract.cpp:13;event=new_stage;stage=Written;task_id=9b8515e4-9a2b11f0-8748f7fe-e532994d; 2025-09-25T16:20:53.196083Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: Index: tables 1 inserted {blob_bytes=129593352;raw_bytes=131330514;count=21;records=1575000} compacted {blob_bytes=0;raw_bytes=0;count=0;records=0} s-compacted {blob_bytes=12353440;raw_bytes=14738900;count=2;records=150000} inactive {blob_bytes=265471768;raw_bytes=291227308;count=44;records=3225002} evicted {blob_bytes=0;raw_bytes=0;count=0;records=0} at tablet 9437184 2025-09-25T16:20:53.207526Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;task_id=9b8515e4-9a2b11f0-8748f7fe-e532994d;fline=abstract.cpp:13;event=new_stage;stage=Finished;task_id=9b8515e4-9a2b11f0-8748f7fe-e532994d; 2025-09-25T16:20:53.207554Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;task_id=9b8515e4-9a2b11f0-8748f7fe-e532994d;fline=abstract.cpp:54;event=WriteIndexComplete;type=CS::CLEANUP::PORTIONS;success=1; 2025-09-25T16:20:53.207719Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;task_id=9b8515e4-9a2b11f0-8748f7fe-e532994d;fline=manager.cpp:15;event=unlock;process_id=CS::CLEANUP::PORTIONS::PORTIONS_DROP::9b8515e4-9a2b11f0-8748f7fe-e532994d; 2025-09-25T16:20:53.207753Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;task_id=9b8515e4-9a2b11f0-8748f7fe-e532994d;tablet_id=9437184;fline=columnshard_impl.cpp:449;event=EnqueueBackgroundActivities;periodic=0; 2025-09-25T16:20:53.207769Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;task_id=9b8515e4-9a2b11f0-8748f7fe-e532994d;tablet_id=9437184;fline=columnshard_impl.cpp:489;event=skip_compaction;reason=disabled; 2025-09-25T16:20:53.207787Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;task_id=9b8515e4-9a2b11f0-8748f7fe-e532994d;tablet_id=9437184;fline=columnshard_impl.cpp:943;background=cleanup_schemas;skip_reason=no_changes; 2025-09-25T16:20:53.207797Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;task_id=9b8515e4-9a2b11f0-8748f7fe-e532994d;tablet_id=9437184;fline=column_engine_logs.cpp:258;event=StartCleanup;portions_count=0; 2025-09-25T16:20:53.207815Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;task_id=9b8515e4-9a2b11f0-8748f7fe-e532994d;tablet_id=9437184;fline=column_engine_logs.cpp:334;event=StartCleanup;portions_count=0;portions_prepared=0;drop=0;skip=0;portions_counter=0;chunks=0;limit=0;max_portions=1000;max_chunks=500000; 2025-09-25T16:20:53.207828Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;task_id=9b8515e4-9a2b11f0-8748f7fe-e532994d;tablet_id=9437184;fline=columnshard_impl.cpp:800;background=cleanup;skip_reason=no_changes; 2025-09-25T16:20:53.207835Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;task_id=9b8515e4-9a2b11f0-8748f7fe-e532994d;tablet_id=9437184;fline=columnshard_impl.cpp:832;background=cleanup;skip_reason=no_changes; 2025-09-25T16:20:53.207859Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;task_id=9b8515e4-9a2b11f0-8748f7fe-e532994d;tablet_id=9437184;queue=ttl;external_count=0;fline=granule.cpp:168;event=skip_actualization;waiting=0.551000s; 2025-09-25T16:20:53.207870Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;task_id=9b8515e4-9a2b11f0-8748f7fe-e532994d;tablet_id=9437184;fline=columnshard_impl.cpp:755;background=ttl;skip_reason=no_changes; 2025-09-25T16:20:53.207918Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: BlobManager at tablet 9437184 Delete Blob DS:0:[9437184:3:31:3:0:6171112:0] 2025-09-25T16:20:53.207929Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: BlobManager at tablet 9437184 Delete Blob DS:0:[9437184:3:17:4:0:4848592:0] 2025-09-25T16:20:53.207936Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: BlobManager at tablet 9437184 Delete Blob DS:0:[9437184:3:1:3:0:6171112:0] 2025-09-25T16:20:53.207943Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: BlobManager at tablet 9437184 Delete Blob DS:0:[9437184:3:16:3:0:4848592:0] 2025-09-25T16:20:53.207950Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: BlobManager at tablet 9437184 Delete Blob DS:0:[9437184:3:15:2:0:6171112:0] 2025-09-25T16:20:53.207956Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: BlobManager at tablet 9437184 Delete Blob DS:0:[9437184:3:42:2:0:6171112:0] 2025-09-25T16:20:53.207963Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: BlobManager at tablet 9437184 Delete Blob DS:0:[9437184:3:35:4:0:6171112:0] 2025-09-25T16:20:53.207970Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: BlobManager at tablet 9437184 Delete Blob DS:0:[9437184:3:40:3:0:4848592:0] 2025-09-25T16:20:53.207977Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: BlobManager at tablet 9437184 Delete Blob DS:0:[9437184:3:3:2:0:6171112:0] 2025-09-25T16:20:53.207984Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: BlobManager at tablet 9437184 Delete Blob DS:0:[9437184:3:10:3:0:6171112:0] 2025-09-25T16:20:53.207992Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: BlobManager at tablet 9437184 Delete Blob DS:0:[9437184:3:33:2:0:4848592:0] 2025-09-25T16:20:53.207999Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: BlobManager at tablet 9437184 Delete Blob DS:0:[9437184:3:23:4:0:1792:0] 2025-09-25T16:20:53.208005Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: BlobManager at tablet 9437184 Delete Blob DS:0:[9437184:2:1:3:0:6171112:0] 2025-09-25T16:20:53.208025Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: BlobManager at tablet 9437184 Delete Blob DS:0:[9437184:3:44:4:0:4848592:0] 2025-09-25T16:20:53.208032Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: BlobManager at tablet 9437184 Delete Blob DS:0:[9437184:3:21:2:0:4848592:0] 2025-09-25T16:20:53.208038Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: BlobManager at tablet 9437184 Delete Blob DS:0:[9437184:3:34:3:0:6171112:0] 2025-09-25T16:20:53.208045Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: BlobManager at tablet 9437184 Delete Blob DS:0:[9437184:3:25:3:0:4848592:0] 2025-09-25T16:20:53.208052Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: BlobManager at tablet 9437184 Delete Blob DS:0:[9437184:3:32:4:0:4848592:0] 2025-09-25T16:20:53.208059Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: BlobManager at tablet 9437184 Delete Blob DS:0:[9437184:3:38:4:0:6171112:0] 2025-09-25T16:20:53.208066Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: BlobManager at tablet 9437184 Delete Blob DS:0:[9437184:2:2:4:0:6171112:0] 2025-09-25T16:20:53.208072Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: BlobManager at tablet 9437184 Delete Blob DS:0:[9437184:3:4:3:0:6171112:0] 2025-09-25T16:20:53.208084Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: BlobManager at tablet 9437184 Delete Blob DS:0:[9437184:3:11:4:0:6171112:0] 2025-09-25T16:20:53.208091Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: BlobManager at tablet 9437184 Delete Blob DS:0:[9437184:3:6:2:0:6171112:0] 2025-09-25T16:20:53.208098Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: BlobManager at tablet 9437184 Delete Blob DS:0:[9437184:3:28:3:0:4848592:0] 2025-09-25T16:20:53.208105Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: BlobManager at tablet 9437184 Delete Blob DS:0:[9437184:3:14:4:0:6171112:0] 2025-09-25T16:20:53.208112Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: BlobManager at tablet 9437184 Delete Blob DS:0:[9437184:3:9:2:0:4848592:0] 2025-09-25T16:20:53.208118Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: BlobManager at tablet 9437184 Delete Blob DS:0:[9437184:3:19:3:0:6171112:0] 2025-09-25T16:20:53.208125Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: BlobManager at tablet 9437184 Delete Blob DS:0:[9437184:3:18:2:0:6171112:0] 2025-09-25T16:20:53.208131Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: BlobManager at tablet 9437184 Delete Blob DS:0:[9437184:3:39:2:0:6171112:0] 2025-09-25T16:20:53.208138Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: BlobManager at tablet 9437184 Delete Blob DS:0:[9437184:3:5:4:0:4848592:0] 2025-09-25T16:20:53.208145Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: BlobManager at tablet 9437184 Delete Blob DS:0:[9437184:3:20:4:0:4848592:0] 2025-09-25T16:20:53.208152Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: BlobManager at tablet 9437184 Delete Blob DS:0:[9437184:3:26:4:0:6171112:0] 2025-09-25T16:20:53.208158Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: BlobManager at tablet 9437184 Delete Blob DS:0:[9437184:3:36:2:0:4848592:0] 2025-09-25T16:20:53.208165Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: BlobManager at tablet 9437184 Delete Blob DS:0:[9437184:3:30:2:0:6171112:0] 2025-09-25T16:20:53.208171Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: BlobManager at tablet 9437184 Delete Blob DS:0:[9437184:3:27:2:0:6171112:0] 2025-09-25T16:20:53.208178Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: BlobManager at tablet 9437184 Delete Blob DS:0:[9437184:3:7:3:0:6171112:0] 2025-09-25T16:20:53.208185Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: BlobManager at tablet 9437184 Delete Blob DS:0:[9437184:3:37:3:0:4848592:0] 2025-09-25T16:20:53.208193Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: BlobManager at tablet 9437184 Delete Blob DS:0:[9437184:3:29:4:0:4848592:0] 2025-09-25T16:20:53.208200Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: BlobManager at tablet 9437184 Delete Blob DS:0:[9437184:3:8:4:0:4848592:0] 2025-09-25T16:20:53.208207Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: BlobManager at tablet 9437184 Delete Blob DS:0:[9437184:3:13:3:0:4848592:0] 2025-09-25T16:20:53.208213Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: BlobManager at tablet 9437184 Delete Blob DS:0:[9437184:3:22:3:0:6171112:0] 2025-09-25T16:20:53.208220Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: BlobManager at tablet 9437184 Delete Blob DS:0:[9437184:3:24:2:0:4848592:0] 2025-09-25T16:20:53.208226Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: BlobManager at tablet 9437184 Delete Blob DS:0:[9437184:3:41:4:0:4848592:0] 2025-09-25T16:20:53.208233Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: BlobManager at tablet 9437184 Delete Blob DS:0:[9437184:3:12:2:0:4848592:0] GC for channel 4 deletes blobs: WAIT_CLEANING: 1 WAIT_CLEANING: 1 WAIT_CLEANING: 1 WAIT_CLEANING: 1 WAIT_CLEANING: 1 WAIT_CLEANING: 1 WAIT_CLEANING: 1 WAIT_CLEANING: 1 WAIT_CLEANING: 1 WAIT_CLEANING: 1 WAIT_CLEANING: 1 WAIT_CLEANING: 1 WAIT_CLEANING: 1 WAIT_CLEANING: 1 WAIT_CLEANING: 1 WAIT_CLEANING: 1 WAIT_CLEANING: 1 WAIT_CLEANING: 1 WAIT_CLEANING: 1 WAIT_CLEANING: 1 WAIT_CLEANING: 1 WAIT_CLEANING: 1 WAIT_CLEANING: 1 WAIT_CLEANING: 1 WAIT_CLEANING: 1 WAIT_CLEANING: 1 WAIT_CLEANING: 1 WAIT_CLEANING: 1 WAIT_CLEANING: 1 WAIT_CLEANING: 1 WAIT_CLEANING: 1 WAIT_CLEANING: 1 WAIT_CLEANING: 1 Compactions happened: 22 Cleanups happened: 1 Old portions: 1 2 3 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 46 Cleaned up portions: 1 2 3 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 46 >> TBoardSubscriber2DCTest::DropByDisconnect >> TBoardSubscriberTest::DropByDisconnect |82.8%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/services/ydb/backup_ut/ydb-services-ydb-backup_ut >> TBoardSubscriberTest::ManySubscribersManyPublisher |82.8%| [LD] {RESULT} $(B)/ydb/services/ydb/backup_ut/ydb-services-ydb-backup_ut |82.8%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/services/ydb/backup_ut/ydb-services-ydb-backup_ut >> TBoardSubscriberTest::ReconnectReplica >> TBoardSubscriber2DCTest::ManySubscribersManyPublisher [GOOD] >> TBoardSubscriberTest::ManySubscribersManyPublisher [GOOD] >> TBoardSubscriberTest::DropByDisconnect [GOOD] >> TBoardSubscriber2DCTest::ReconnectReplica [GOOD] >> TBoardSubscriber2DCTest::SimpleSubscriber [GOOD] |82.8%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/kqp/provider/ut/ydb-core-kqp-provider-ut |82.8%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/kqp/provider/ut/ydb-core-kqp-provider-ut |82.8%| [LD] {RESULT} $(B)/ydb/core/kqp/provider/ut/ydb-core-kqp-provider-ut |82.8%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/persqueue/ut/ut_with_sdk/ydb-core-persqueue-ut-ut_with_sdk |82.8%| [LD] {RESULT} $(B)/ydb/core/persqueue/ut/ut_with_sdk/ydb-core-persqueue-ut-ut_with_sdk |82.8%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/persqueue/ut/ut_with_sdk/ydb-core-persqueue-ut-ut_with_sdk |82.8%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/base/ut_board_subscriber/unittest >> TBoardSubscriber2DCTest::ManySubscribersManyPublisher [GOOD] |82.8%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/base/ut_board_subscriber/unittest >> TBoardSubscriberTest::ManySubscribersManyPublisher [GOOD] >> TBoardSubscriberTest::ReconnectReplica [GOOD] |82.8%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/tests/fq/control_plane_storage/ydb-tests-fq-control_plane_storage |82.8%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/fq/control_plane_storage/ydb-tests-fq-control_plane_storage |82.8%| [LD] {RESULT} $(B)/ydb/tests/fq/control_plane_storage/ydb-tests-fq-control_plane_storage |82.8%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/base/ut_board_subscriber/unittest >> TBoardSubscriber2DCTest::SimpleSubscriber [GOOD] |82.8%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/base/ut_board_subscriber/unittest >> TBoardSubscriber2DCTest::ReconnectReplica [GOOD] |82.8%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/base/ut_board_subscriber/unittest >> TBoardSubscriberTest::DropByDisconnect [GOOD] >> TBoardSubscriber2DCTest::DropByDisconnect [GOOD] |82.8%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/base/ut_board_subscriber/unittest >> TBoardSubscriberTest::ReconnectReplica [GOOD] >> TColumnShardTestReadWrite::CompactionInGranule_PKUInt64_Reboot [GOOD] >> TSchemeShardSysViewTest::EmptyName |82.8%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/base/ut_board_subscriber/unittest >> TBoardSubscriber2DCTest::DropByDisconnect [GOOD] >> TSchemeShardSysViewTest::EmptyName [GOOD] >> TSchemeShardSysViewsUpdateTest::RestoreAbsentSysViews |82.8%| [TA] $(B)/ydb/core/base/ut_board_subscriber/test-results/unittest/{meta.json ... results_accumulator.log} >> TSchemeShardSysViewTest::DropSysView >> TSchemeShardSysViewsUpdateTest::CreateDirWithDomainSysViews >> TSchemeShardSysViewsUpdateTest::DeleteObsoleteSysViews |82.8%| [TA] {RESULT} $(B)/ydb/core/base/ut_board_subscriber/test-results/unittest/{meta.json ... results_accumulator.log} >> TSchemeShardSysViewsUpdateTest::RestoreAbsentSysViews [GOOD] >> TSchemeShardSysViewTest::AsyncCreateSameSysView >> TSchemeShardSysViewTest::DropSysView [GOOD] >> TSchemeShardSysViewTest::CreateSysView >> TSchemeShardSysViewsUpdateTest::CreateDirWithDomainSysViews [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/columnshard/ut_rw/unittest >> TColumnShardTestReadWrite::CompactionInGranule_PKUInt64_Reboot [GOOD] Test command err: 2025-09-25T16:20:54.205308Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:129:2160];fline=columnshard.cpp:105;event=initialize_shard;step=OnActivateExecutor; 2025-09-25T16:20:54.209890Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:129:2160];fline=columnshard.cpp:124;event=initialize_shard;step=initialize_tiring_finished; 2025-09-25T16:20:54.209932Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxInitSchema.Execute at tablet 9437184 2025-09-25T16:20:54.210552Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:129:2160];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-09-25T16:20:54.210598Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:129:2160];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-09-25T16:20:54.210625Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:129:2160];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-09-25T16:20:54.210642Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:129:2160];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-09-25T16:20:54.210656Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:129:2160];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-09-25T16:20:54.210669Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:129:2160];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-09-25T16:20:54.210682Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:129:2160];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-09-25T16:20:54.210695Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:129:2160];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-09-25T16:20:54.210708Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:129:2160];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-09-25T16:20:54.210721Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:129:2160];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanDeprecatedSnapshot; 2025-09-25T16:20:54.210734Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:129:2160];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV0ChunksMeta; 2025-09-25T16:20:54.210747Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:129:2160];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CopyBlobIdsToV2; 2025-09-25T16:20:54.210777Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:129:2160];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreAppearanceSnapshot; 2025-09-25T16:20:54.215366Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxInitSchema.Complete at tablet 9437184 2025-09-25T16:20:54.215409Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:131;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=12;current_normalizer=CLASS_NAME=Granules; 2025-09-25T16:20:54.215417Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=1;type=Granules; 2025-09-25T16:20:54.215447Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-09-25T16:20:54.215474Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-09-25T16:20:54.215486Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-09-25T16:20:54.215492Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2025-09-25T16:20:54.215504Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:138;normalizer=TChunksNormalizer;message=0 chunks found; 2025-09-25T16:20:54.215514Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-09-25T16:20:54.215523Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-09-25T16:20:54.215529Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2025-09-25T16:20:54.215543Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-09-25T16:20:54.215549Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-09-25T16:20:54.215554Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-09-25T16:20:54.215557Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2025-09-25T16:20:54.215566Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2025-09-25T16:20:54.215572Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-09-25T16:20:54.215577Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-09-25T16:20:54.215580Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2025-09-25T16:20:54.215587Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-09-25T16:20:54.215593Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-09-25T16:20:54.215596Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2025-09-25T16:20:54.215604Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-09-25T16:20:54.215612Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-09-25T16:20:54.215617Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=15;type=RestoreV1Chunks_V2; 2025-09-25T16:20:54.215646Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-09-25T16:20:54.215656Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-09-25T16:20:54.215661Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=16;type=RestoreV2Chunks; 2025-09-25T16:20:54.215677Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2025-09-25T16:20:54.215683Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanDeprecatedSnapshot;id=CleanDeprecatedSnapshot; 2025-09-25T16:20:54.215686Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=17;type=CleanDeprecatedSnapshot; 2025-09-25T16:20:54.215705Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanDeprecatedSnapshot;id=17; 2025-09-25T16:20:54.215714Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV0ChunksMeta;id=RestoreV0ChunksMeta; 2025-09-25T16:20:54.215718Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=18;type=RestoreV0ChunksMeta; 2025-09-25T16:20:54.215727Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV0ChunksMeta;id=18; 2025-09-25T16:20:54.215736Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CopyBlobIdsToV2;id=CopyBlobIdsToV2; 2025-09-25T16:20:54.215742Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=19;type=CopyBlobIdsToV2; 2025-09-25T16:20:54.215759Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CopyBlobIdsToV2;id=19; 2025-09-25T16:20:54.215768Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLAS ... e;load_stage_name=EXECUTE:granule/portions;fline=constructor_portion.cpp:40;memory_size=278;data_size=252;sum=1960334;count=7053;size_of_portion=184; 2025-09-25T16:21:17.897491Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;load_stage_name=EXECUTE:column_engines/granules;load_stage_name=EXECUTE:granules/granule;fline=common_data.cpp:29;EXECUTE:portionsLoadingTime=11007; 2025-09-25T16:21:17.897507Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;load_stage_name=EXECUTE:column_engines/granules;load_stage_name=EXECUTE:granules/granule;fline=common_data.cpp:29;PRECHARGE:granule_finished_commonLoadingTime=3; 2025-09-25T16:21:17.897707Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;load_stage_name=EXECUTE:column_engines/granules;load_stage_name=EXECUTE:granules/granule;fline=common_data.cpp:29;EXECUTE:granule_finished_commonLoadingTime=190; 2025-09-25T16:21:17.897718Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;load_stage_name=EXECUTE:column_engines/granules;fline=common_data.cpp:29;EXECUTE:granuleLoadingTime=11259; 2025-09-25T16:21:17.897724Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;fline=common_data.cpp:29;EXECUTE:granulesLoadingTime=11275; 2025-09-25T16:21:17.897735Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;fline=common_data.cpp:29;PRECHARGE:finishLoadingTime=2; 2025-09-25T16:21:17.897794Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;fline=common_data.cpp:29;EXECUTE:finishLoadingTime=51; 2025-09-25T16:21:17.897801Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:column_enginesLoadingTime=11426; 2025-09-25T16:21:17.897845Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:tx_controllerLoadingTime=34; 2025-09-25T16:21:17.897868Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:tx_controllerLoadingTime=14; 2025-09-25T16:21:17.897933Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:operations_managerLoadingTime=57; 2025-09-25T16:21:17.897983Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:operations_managerLoadingTime=42; 2025-09-25T16:21:17.902300Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:storages_managerLoadingTime=4300; 2025-09-25T16:21:17.908010Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:storages_managerLoadingTime=5669; 2025-09-25T16:21:17.908058Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:db_locksLoadingTime=4; 2025-09-25T16:21:17.908069Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:db_locksLoadingTime=2; 2025-09-25T16:21:17.908078Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:bg_sessionsLoadingTime=1; 2025-09-25T16:21:17.908100Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:bg_sessionsLoadingTime=15; 2025-09-25T16:21:17.908109Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:sharing_sessionsLoadingTime=1; 2025-09-25T16:21:17.908129Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:sharing_sessionsLoadingTime=13; 2025-09-25T16:21:17.908138Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:in_flight_readsLoadingTime=1; 2025-09-25T16:21:17.908153Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:in_flight_readsLoadingTime=8; 2025-09-25T16:21:17.908176Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:tiers_managerLoadingTime=15; 2025-09-25T16:21:17.908206Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:tiers_managerLoadingTime=16; 2025-09-25T16:21:17.908215Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;fline=common_data.cpp:29;EXECUTE:composite_initLoadingTime=23266; 2025-09-25T16:21:17.908272Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: Index: tables 1 inserted {blob_bytes=0;raw_bytes=0;count=0;records=0} compacted {blob_bytes=0;raw_bytes=0;count=0;records=0} s-compacted {blob_bytes=18920080;raw_bytes=22128150;count=3;records=225200} inactive {blob_bytes=82943248;raw_bytes=85019450;count=213;records=975200} evicted {blob_bytes=0;raw_bytes=0;count=0;records=0} at tablet 9437184 2025-09-25T16:21:17.908320Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:6988:8618];process=SwitchToWork;fline=columnshard.cpp:77;event=initialize_shard;step=SwitchToWork; 2025-09-25T16:21:17.908334Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:6988:8618];process=SwitchToWork;fline=columnshard.cpp:80;event=initialize_shard;step=SignalTabletActive; 2025-09-25T16:21:17.908356Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:6988:8618];process=SwitchToWork;fline=columnshard_impl.cpp:1528;event=OnTieringModified;path_id=NO_VALUE_OPTIONAL; 2025-09-25T16:21:17.908364Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:6988:8618];process=SwitchToWork;fline=column_engine_logs.cpp:516;event=OnTieringModified;new_count_tierings=0; 2025-09-25T16:21:17.908406Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;fline=columnshard_impl.cpp:449;event=EnqueueBackgroundActivities;periodic=0; 2025-09-25T16:21:17.908437Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;fline=columnshard_impl.cpp:943;background=cleanup_schemas;skip_reason=no_changes; 2025-09-25T16:21:17.908446Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;fline=column_engine_logs.cpp:258;event=StartCleanup;portions_count=5; 2025-09-25T16:21:17.908463Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;fline=column_engine_logs.cpp:271;event=StartCleanupStop;snapshot=plan_step=1758815458678;tx_id=18446744073709551615;;current_snapshot_ts=1758817256032; 2025-09-25T16:21:17.908474Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;fline=column_engine_logs.cpp:334;event=StartCleanup;portions_count=5;portions_prepared=0;drop=0;skip=0;portions_counter=0;chunks=0;limit=0;max_portions=1000;max_chunks=500000; 2025-09-25T16:21:17.908486Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;fline=columnshard_impl.cpp:800;background=cleanup;skip_reason=no_changes; 2025-09-25T16:21:17.908493Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;fline=columnshard_impl.cpp:832;background=cleanup;skip_reason=no_changes; 2025-09-25T16:21:17.908524Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;fline=columnshard_impl.cpp:755;background=ttl;skip_reason=no_changes; 2025-09-25T16:21:17.909844Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:6988:8618];ev=NActors::TEvents::TEvWakeup;fline=columnshard.cpp:260;event=TEvPrivate::TEvPeriodicWakeup::MANUAL;tablet_id=9437184; 2025-09-25T16:21:17.909933Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:6988:8618];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;fline=columnshard.cpp:249;event=TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184; 2025-09-25T16:21:17.909941Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: Send periodic stats. 2025-09-25T16:21:17.909946Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: Disabled periodic stats at tablet 9437184 2025-09-25T16:21:17.909954Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:6988:8618];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:449;event=EnqueueBackgroundActivities;periodic=0; 2025-09-25T16:21:17.909978Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:6988:8618];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:943;background=cleanup_schemas;skip_reason=no_changes; 2025-09-25T16:21:17.909986Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:6988:8618];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:258;event=StartCleanup;portions_count=5; 2025-09-25T16:21:17.909999Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:6988:8618];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:271;event=StartCleanupStop;snapshot=plan_step=1758815458678;tx_id=18446744073709551615;;current_snapshot_ts=1758817256032; 2025-09-25T16:21:17.910008Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:6988:8618];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:334;event=StartCleanup;portions_count=5;portions_prepared=0;drop=0;skip=0;portions_counter=0;chunks=0;limit=0;max_portions=1000;max_chunks=500000; 2025-09-25T16:21:17.910018Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:6988:8618];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:800;background=cleanup;skip_reason=no_changes; 2025-09-25T16:21:17.910025Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:6988:8618];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:832;background=cleanup;skip_reason=no_changes; 2025-09-25T16:21:17.910049Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:6988:8618];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;queue=ttl;external_count=0;fline=granule.cpp:168;event=skip_actualization;waiting=1.000000s; 2025-09-25T16:21:17.910059Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:6988:8618];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:755;background=ttl;skip_reason=no_changes; ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_sysview/unittest >> TSchemeShardSysViewTest::EmptyName [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:120:2058] recipient: [1:114:2144] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:120:2058] recipient: [1:114:2144] Leader for TabletID 72057594046678944 is [1:131:2155] sender: [1:132:2058] recipient: [1:114:2144] 2025-09-25T16:21:18.866912Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7911: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-09-25T16:21:18.866940Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7939: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-09-25T16:21:18.866946Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7825: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-09-25T16:21:18.866952Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7841: OperationsProcessing config: using default configuration 2025-09-25T16:21:18.866959Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7847: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-09-25T16:21:18.866963Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7847: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-09-25T16:21:18.866972Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7971: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-09-25T16:21:18.866985Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-09-25T16:21:18.867094Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8042: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-09-25T16:21:18.867166Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2025-09-25T16:21:18.901825Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7729: Cannot subscribe to console configs 2025-09-25T16:21:18.901848Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded 2025-09-25T16:21:18.909164Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2025-09-25T16:21:18.909208Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2025-09-25T16:21:18.909254Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-09-25T16:21:18.910809Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2025-09-25T16:21:18.910881Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2025-09-25T16:21:18.910999Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1382: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-09-25T16:21:18.911104Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-09-25T16:21:18.915686Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:158: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-09-25T16:21:18.915765Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2025-09-25T16:21:18.916168Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-09-25T16:21:18.916184Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-09-25T16:21:18.916221Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-09-25T16:21:18.916231Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-09-25T16:21:18.916237Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:205: TTxServerlessStorageBilling.Complete 2025-09-25T16:21:18.916272Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7086: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-09-25T16:21:18.998841Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-09-25T16:21:18.999065Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-09-25T16:21:18.999089Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-09-25T16:21:18.999104Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-09-25T16:21:18.999130Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-09-25T16:21:18.999143Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-09-25T16:21:18.999154Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-09-25T16:21:18.999166Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-09-25T16:21:18.999201Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-09-25T16:21:18.999214Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-09-25T16:21:18.999227Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-09-25T16:21:18.999239Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-09-25T16:21:18.999253Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-09-25T16:21:18.999265Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-09-25T16:21:18.999278Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_minute" Type: ETopQueriesByR ... 683: Change state for txid 1:0 2 -> 3 2025-09-25T16:21:19.289150Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-09-25T16:21:19.289176Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-09-25T16:21:19.289186Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 1:0 3 -> 128 2025-09-25T16:21:19.297196Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-09-25T16:21:19.297221Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-09-25T16:21:19.297229Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-09-25T16:21:19.297238Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1723: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-09-25T16:21:19.297284Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1792: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-09-25T16:21:19.303302Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:663: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-09-25T16:21:19.303351Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1824: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000036 FAKE_COORDINATOR: advance: minStep5000036 State->FrontStep: 5000035 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000036 2025-09-25T16:21:19.303447Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:685: TTxOperationPlanStep Execute, stepId: 5000036, transactions count in step: 1, at schemeshard: 72057594046678944 2025-09-25T16:21:19.303478Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:689: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 138 RawX2: 4294969455 } } Step: 5000036 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-09-25T16:21:19.303487Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:303: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-09-25T16:21:19.303568Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 1:0 128 -> 240 2025-09-25T16:21:19.303579Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:362: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-09-25T16:21:19.303614Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:599: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-09-25T16:21:19.303631Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:412: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-09-25T16:21:19.308292Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-09-25T16:21:19.308312Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-09-25T16:21:19.308373Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-09-25T16:21:19.308379Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [1:211:2211], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-09-25T16:21:19.308454Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-09-25T16:21:19.308467Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:497: [72057594046678944] TDone opId# 1:0 ProgressState 2025-09-25T16:21:19.308485Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:926: Part operation is done id#1:0 progress is 1/1 2025-09-25T16:21:19.308491Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-09-25T16:21:19.308497Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:926: Part operation is done id#1:0 progress is 1/1 2025-09-25T16:21:19.308500Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-09-25T16:21:19.308506Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-09-25T16:21:19.308512Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-09-25T16:21:19.308518Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:993: Operation and all the parts is done, operation id: 1:0 2025-09-25T16:21:19.308522Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5552: RemoveTx for txid 1:0 2025-09-25T16:21:19.308544Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:610: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 3 2025-09-25T16:21:19.308550Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:1002: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-09-25T16:21:19.308555Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1009: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 6 2025-09-25T16:21:19.308690Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6249: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 6 PathOwnerId: 72057594046678944, cookie: 1 2025-09-25T16:21:19.308704Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 6 PathOwnerId: 72057594046678944, cookie: 1 2025-09-25T16:21:19.308709Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1 2025-09-25T16:21:19.308714Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 1, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 6 2025-09-25T16:21:19.308720Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:610: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-09-25T16:21:19.308737Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__publish_to_scheme_board.cpp:155: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 1, subscribers: 0 2025-09-25T16:21:19.313755Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1 2025-09-25T16:21:19.313896Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 1, at schemeshard: 72057594046678944 TestModificationResults wait txId: 101 2025-09-25T16:21:19.314054Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:433: actor# [1:664:2653] Bootstrap 2025-09-25T16:21:19.314275Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:452: actor# [1:664:2653] Become StateWork (SchemeCache [1:669:2658]) 2025-09-25T16:21:19.315081Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView CreateSysView { Name: "" Type: EPartitionStats } } TxId: 101 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-09-25T16:21:19.315126Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_create_sysview.cpp:117: [72057594046678944] TCreateSysView Propose, path: /MyRoot/.sys/, opId: 101:0 2025-09-25T16:21:19.315136Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_create_sysview.cpp:123: [72057594046678944] TCreateSysView Propose, path: /MyRoot/.sys/, opId: 101:0, sysViewDescription: Name: "" Type: EPartitionStats 2025-09-25T16:21:19.315157Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 101:1, propose status:StatusSchemeError, reason: Check failed: path: '/MyRoot/.sys/', error: path part shouldn't be empty, source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_sysview.cpp:149, at schemeshard: 72057594046678944 2025-09-25T16:21:19.315344Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:212: actor# [1:664:2653] HANDLE TEvClientConnected success connect from tablet# 72057594046447617 2025-09-25T16:21:19.316090Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 101, response: Status: StatusSchemeError Reason: "Check failed: path: \'/MyRoot/.sys/\', error: path part shouldn\'t be empty, source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_sysview.cpp:149" TxId: 101 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2025-09-25T16:21:19.316144Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 101, database: /MyRoot, subject: , status: StatusSchemeError, reason: Check failed: path: '/MyRoot/.sys/', error: path part shouldn't be empty, source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_sysview.cpp:149, operation: CREATE SYSTEM VIEW, path: /MyRoot/.sys/ 2025-09-25T16:21:19.316224Z node 1 :TX_PROXY DEBUG: client.cpp:89: Handle TEvAllocateResult ACCEPTED RangeBegin# 281474976715656 RangeEnd# 281474976720656 txAllocator# 72057594046447617 TestModificationResult got TxId: 101, wait until txId: 101 TestWaitNotification wait txId: 101 2025-09-25T16:21:19.316271Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:213: tests -- TTxNotificationSubscriber for txId 101: send EvNotifyTxCompletion 2025-09-25T16:21:19.316278Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:259: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 101 2025-09-25T16:21:19.316335Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 101, at schemeshard: 72057594046678944 2025-09-25T16:21:19.316371Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 101: got EvNotifyTxCompletionResult 2025-09-25T16:21:19.316377Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 101: satisfy waiter [1:679:2668] TestWaitNotification: OK eventTxId 101 >> TSchemeShardSysViewsUpdateTest::DeleteObsoleteSysViews [GOOD] >> TSchemeShardSysViewTest::CreateExistingSysView >> TColumnShardTestReadWrite::CompactionGC [GOOD] >> PersQueueSdkReadSessionTest::ReadSessionWithExplicitlySpecifiedPartitions [GOOD] >> PersQueueSdkReadSessionTest::SettingsValidation >> TSchemeShardSysViewTest::AsyncCreateSameSysView [GOOD] >> TSchemeShardSysViewTest::AsyncDropSameSysView >> Compression::WriteRAW [GOOD] >> Compression::WriteGZIP ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_sysview/unittest >> TSchemeShardSysViewsUpdateTest::RestoreAbsentSysViews [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:120:2058] recipient: [1:114:2144] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:120:2058] recipient: [1:114:2144] Leader for TabletID 72057594046678944 is [1:131:2155] sender: [1:132:2058] recipient: [1:114:2144] 2025-09-25T16:21:19.643253Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7911: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-09-25T16:21:19.643285Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7939: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-09-25T16:21:19.643291Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7825: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-09-25T16:21:19.643297Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7841: OperationsProcessing config: using default configuration 2025-09-25T16:21:19.643304Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7847: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-09-25T16:21:19.643309Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7847: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-09-25T16:21:19.643321Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7971: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-09-25T16:21:19.643336Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-09-25T16:21:19.643464Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8042: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-09-25T16:21:19.643538Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2025-09-25T16:21:19.661667Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7729: Cannot subscribe to console configs 2025-09-25T16:21:19.661697Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded 2025-09-25T16:21:19.665615Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2025-09-25T16:21:19.665659Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2025-09-25T16:21:19.665708Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-09-25T16:21:19.666946Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2025-09-25T16:21:19.667010Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2025-09-25T16:21:19.667133Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1382: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-09-25T16:21:19.667230Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-09-25T16:21:19.668351Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:158: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-09-25T16:21:19.668409Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2025-09-25T16:21:19.668733Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-09-25T16:21:19.668743Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-09-25T16:21:19.668777Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-09-25T16:21:19.668787Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-09-25T16:21:19.668794Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:205: TTxServerlessStorageBilling.Complete 2025-09-25T16:21:19.668820Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7086: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-09-25T16:21:19.726166Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-09-25T16:21:19.726409Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-09-25T16:21:19.726429Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-09-25T16:21:19.726444Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-09-25T16:21:19.726495Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-09-25T16:21:19.726509Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-09-25T16:21:19.726523Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-09-25T16:21:19.726536Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-09-25T16:21:19.726571Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-09-25T16:21:19.726586Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-09-25T16:21:19.726598Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-09-25T16:21:19.726611Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-09-25T16:21:19.726625Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-09-25T16:21:19.726638Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-09-25T16:21:19.726652Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_minute" Type: ETopQueriesByR ... Board DescribePath, at schemeshard: 72057594046678944, txId: 281474976720657, path id: [OwnerId: 72057594046678944, LocalPathId: 37] 2025-09-25T16:21:19.880855Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-09-25T16:21:19.880858Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [1:758:2733], at schemeshard: 72057594046678944, txId: 281474976720657, path id: 2 2025-09-25T16:21:19.880862Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [1:758:2733], at schemeshard: 72057594046678944, txId: 281474976720657, path id: 37 2025-09-25T16:21:19.880871Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 281474976720657:0, at schemeshard: 72057594046678944 2025-09-25T16:21:19.880876Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:497: [72057594046678944] TDone opId# 281474976720657:0 ProgressState 2025-09-25T16:21:19.880889Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:926: Part operation is done id#281474976720657:0 progress is 1/1 2025-09-25T16:21:19.880892Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 281474976720657 ready parts: 1/1 2025-09-25T16:21:19.880898Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:926: Part operation is done id#281474976720657:0 progress is 1/1 2025-09-25T16:21:19.880901Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 281474976720657 ready parts: 1/1 2025-09-25T16:21:19.880904Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 281474976720657, ready parts: 1/1, is published: false 2025-09-25T16:21:19.880908Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 281474976720657 ready parts: 1/1 2025-09-25T16:21:19.880911Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:993: Operation and all the parts is done, operation id: 281474976720657:0 2025-09-25T16:21:19.880914Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5552: RemoveTx for txid 281474976720657:0 2025-09-25T16:21:19.880925Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:610: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 37] was 2 2025-09-25T16:21:19.880930Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:1002: Publication still in progress, tx: 281474976720657, publications: 2, subscribers: 1 2025-09-25T16:21:19.880933Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1009: Publication details: tx: 281474976720657, [OwnerId: 72057594046678944, LocalPathId: 2], 39 2025-09-25T16:21:19.880936Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1009: Publication details: tx: 281474976720657, [OwnerId: 72057594046678944, LocalPathId: 37], 2 2025-09-25T16:21:19.881302Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6249: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 3 LocalPathId: 2 Version: 39 PathOwnerId: 72057594046678944, cookie: 281474976720657 2025-09-25T16:21:19.881317Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 3 LocalPathId: 2 Version: 39 PathOwnerId: 72057594046678944, cookie: 281474976720657 2025-09-25T16:21:19.881321Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 281474976720657 2025-09-25T16:21:19.881325Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 281474976720657, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 39 2025-09-25T16:21:19.881329Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:610: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 35 2025-09-25T16:21:19.881439Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6249: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 3 LocalPathId: 37 Version: 2 PathOwnerId: 72057594046678944, cookie: 281474976720657 2025-09-25T16:21:19.881446Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 3 LocalPathId: 37 Version: 2 PathOwnerId: 72057594046678944, cookie: 281474976720657 2025-09-25T16:21:19.881449Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 281474976720657 2025-09-25T16:21:19.881451Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 281474976720657, pathId: [OwnerId: 72057594046678944, LocalPathId: 37], version: 2 2025-09-25T16:21:19.881456Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:610: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 37] was 1 2025-09-25T16:21:19.881464Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__publish_to_scheme_board.cpp:155: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 281474976720657, subscribers: 1 2025-09-25T16:21:19.881467Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:212: TTxAckPublishToSchemeBoard Notify send TEvNotifyTxCompletionResult, at schemeshard: 72057594046678944, to actorId: [1:764:2739] 2025-09-25T16:21:19.881942Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 281474976720657 2025-09-25T16:21:19.882004Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 281474976720657 2025-09-25T16:21:19.882033Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:213: SysViewsRosterUpdate# [1:764:2739] at schemeshard: 72057594046678944 Handle TEvNotifyTxCompletionResult, create sys view '/MyRoot/.sys/ds_pdisks' 2025-09-25T16:21:19.882037Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:221: SysViewsRosterUpdate# [1:764:2739] at schemeshard: 72057594046678944 Send TEvRosterUpdateFinished Leader for TabletID 72057594046678944 is [1:706:2692] sender: [1:792:2058] recipient: [1:15:2062] 2025-09-25T16:21:19.957264Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/.sys/partition_stats" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-09-25T16:21:19.957350Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/.sys/partition_stats" took 92us result status StatusSuccess 2025-09-25T16:21:19.957449Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/.sys/partition_stats" PathDescription { Self { Name: "partition_stats" PathId: 29 SchemeshardId: 72057594046678944 PathType: EPathTypeSysView CreateFinished: true CreateTxId: 281474976710684 CreateStep: 5000022 ParentPathId: 2 PathState: EPathStateNoChanges Owner: "metadata@system" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 SysViewVersion: 1 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 35 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } SysViewDescription { Name: "partition_stats" Type: EPartitionStats SourceObject { OwnerId: 72057594046678944 LocalId: 1 } } } PathId: 29 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-09-25T16:21:19.957570Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/.sys/ds_pdisks" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-09-25T16:21:19.957590Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/.sys/ds_pdisks" took 24us result status StatusSuccess 2025-09-25T16:21:19.957644Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/.sys/ds_pdisks" PathDescription { Self { Name: "ds_pdisks" PathId: 37 SchemeshardId: 72057594046678944 PathType: EPathTypeSysView CreateFinished: true CreateTxId: 281474976720657 CreateStep: 5000038 ParentPathId: 2 PathState: EPathStateNoChanges Owner: "metadata@system" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 SysViewVersion: 1 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 35 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } SysViewDescription { Name: "ds_pdisks" Type: EPDisks SourceObject { OwnerId: 72057594046678944 LocalId: 1 } } } PathId: 37 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_sysview/unittest >> TSchemeShardSysViewsUpdateTest::CreateDirWithDomainSysViews [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:120:2058] recipient: [1:114:2144] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:120:2058] recipient: [1:114:2144] Leader for TabletID 72057594046678944 is [1:131:2155] sender: [1:132:2058] recipient: [1:114:2144] 2025-09-25T16:21:19.986381Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7911: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-09-25T16:21:19.986404Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7939: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-09-25T16:21:19.986410Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7825: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-09-25T16:21:19.986415Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7841: OperationsProcessing config: using default configuration 2025-09-25T16:21:19.986422Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7847: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-09-25T16:21:19.986428Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7847: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-09-25T16:21:19.986438Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7971: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-09-25T16:21:19.986452Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-09-25T16:21:19.986569Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8042: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-09-25T16:21:19.986640Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2025-09-25T16:21:20.005175Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7729: Cannot subscribe to console configs 2025-09-25T16:21:20.005195Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded 2025-09-25T16:21:20.012367Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2025-09-25T16:21:20.012401Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2025-09-25T16:21:20.012447Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-09-25T16:21:20.014053Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2025-09-25T16:21:20.014118Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2025-09-25T16:21:20.014234Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1382: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-09-25T16:21:20.014320Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-09-25T16:21:20.015212Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:158: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-09-25T16:21:20.015269Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2025-09-25T16:21:20.015558Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-09-25T16:21:20.015570Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-09-25T16:21:20.015593Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-09-25T16:21:20.015602Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-09-25T16:21:20.015611Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:205: TTxServerlessStorageBilling.Complete 2025-09-25T16:21:20.015634Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7086: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-09-25T16:21:20.059261Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-09-25T16:21:20.059461Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-09-25T16:21:20.059482Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-09-25T16:21:20.059496Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-09-25T16:21:20.059516Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-09-25T16:21:20.059551Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-09-25T16:21:20.059564Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-09-25T16:21:20.059577Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-09-25T16:21:20.059605Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-09-25T16:21:20.059619Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-09-25T16:21:20.059630Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-09-25T16:21:20.059641Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-09-25T16:21:20.059653Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-09-25T16:21:20.059664Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-09-25T16:21:20.059676Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_minute" Type: ETopQueriesByR ... Finished: true CreateTxId: 281474976710674 CreateStep: 5000032 ParentPathId: 2 PathState: EPathStateNoChanges Owner: "metadata@system" ACL: "" ChildrenExist: false } Children { Name: "top_partitions_one_hour" PathId: 35 SchemeshardId: 72057594046678944 PathType: EPathTypeSysView CreateFinished: true CreateTxId: 281474976710690 CreateStep: 5000019 ParentPathId: 2 PathState: EPathStateNoChanges Owner: "metadata@system" ACL: "" ChildrenExist: false } Children { Name: "top_partitions_one_minute" PathId: 9 SchemeshardId: 72057594046678944 PathType: EPathTypeSysView CreateFinished: true CreateTxId: 281474976710664 CreateStep: 5000007 ParentPathId: 2 PathState: EPathStateNoChanges Owner: "metadata@system" ACL: "" ChildrenExist: false } Children { Name: "top_queries_by_cpu_time_one_hour" PathId: 14 SchemeshardId: 72057594046678944 PathType: EPathTypeSysView CreateFinished: true CreateTxId: 281474976710669 CreateStep: 5000006 ParentPathId: 2 PathState: EPathStateNoChanges Owner: "metadata@system" ACL: "" ChildrenExist: false } Children { Name: "top_queries_by_cpu_time_one_minute" PathId: 27 SchemeshardId: 72057594046678944 PathType: EPathTypeSysView CreateFinished: true CreateTxId: 281474976710682 CreateStep: 5000025 ParentPathId: 2 PathState: EPathStateNoChanges Owner: "metadata@system" ACL: "" ChildrenExist: false } Children { Name: "top_queries_by_duration_one_hour" PathId: 36 SchemeshardId: 72057594046678944 PathType: EPathTypeSysView CreateFinished: true CreateTxId: 281474976710691 CreateStep: 5000017 ParentPathId: 2 PathState: EPathStateNoChanges Owner: "metadata@system" ACL: "" ChildrenExist: false } Children { Name: "top_queries_by_duration_one_minute" PathId: 24 SchemeshardId: 72057594046678944 PathType: EPathTypeSysView CreateFinished: true CreateTxId: 281474976710679 CreateStep: 5000031 ParentPathId: 2 PathState: EPathStateNoChanges Owner: "metadata@system" ACL: "" ChildrenExist: false } Children { Name: "top_queries_by_read_bytes_one_hour" PathId: 15 SchemeshardId: 72057594046678944 PathType: EPathTypeSysView CreateFinished: true CreateTxId: 281474976710670 CreateStep: 5000004 ParentPathId: 2 PathState: EPathStateNoChanges Owner: "metadata@system" ACL: "" ChildrenExist: false } Children { Name: "top_queries_by_read_bytes_one_minute" PathId: 16 SchemeshardId: 72057594046678944 PathType: EPathTypeSysView CreateFinished: true CreateTxId: 281474976710671 CreateStep: 5000002 ParentPathId: 2 PathState: EPathStateNoChanges Owner: "metadata@system" ACL: "" ChildrenExist: false } Children { Name: "top_queries_by_request_units_one_hour" PathId: 10 SchemeshardId: 72057594046678944 PathType: EPathTypeSysView CreateFinished: true CreateTxId: 281474976710665 CreateStep: 5000005 ParentPathId: 2 PathState: EPathStateNoChanges Owner: "metadata@system" ACL: "" ChildrenExist: false } Children { Name: "top_queries_by_request_units_one_minute" PathId: 28 SchemeshardId: 72057594046678944 PathType: EPathTypeSysView CreateFinished: true CreateTxId: 281474976710683 CreateStep: 5000024 ParentPathId: 2 PathState: EPathStateNoChanges Owner: "metadata@system" ACL: "" ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 35 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 2 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-09-25T16:21:20.298385Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:212: actor# [1:664:2653] HANDLE TEvClientConnected success connect from tablet# 72057594046447617 2025-09-25T16:21:20.298754Z node 1 :TX_PROXY DEBUG: client.cpp:89: Handle TEvAllocateResult ACCEPTED RangeBegin# 281474976715656 RangeEnd# 281474976720656 txAllocator# 72057594046447617 2025-09-25T16:21:20.298912Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/.sys/partition_stats" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-09-25T16:21:20.298952Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/.sys/partition_stats" took 46us result status StatusSuccess 2025-09-25T16:21:20.299028Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/.sys/partition_stats" PathDescription { Self { Name: "partition_stats" PathId: 29 SchemeshardId: 72057594046678944 PathType: EPathTypeSysView CreateFinished: true CreateTxId: 281474976710684 CreateStep: 5000022 ParentPathId: 2 PathState: EPathStateNoChanges Owner: "metadata@system" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 SysViewVersion: 1 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 35 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } SysViewDescription { Name: "partition_stats" Type: EPartitionStats SourceObject { OwnerId: 72057594046678944 LocalId: 1 } } } PathId: 29 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-09-25T16:21:20.299088Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/.sys/ds_pdisks" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-09-25T16:21:20.299107Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/.sys/ds_pdisks" took 21us result status StatusSuccess 2025-09-25T16:21:20.299150Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/.sys/ds_pdisks" PathDescription { Self { Name: "ds_pdisks" PathId: 7 SchemeshardId: 72057594046678944 PathType: EPathTypeSysView CreateFinished: true CreateTxId: 281474976710662 CreateStep: 5000011 ParentPathId: 2 PathState: EPathStateNoChanges Owner: "metadata@system" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 SysViewVersion: 1 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 35 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } SysViewDescription { Name: "ds_pdisks" Type: EPDisks SourceObject { OwnerId: 72057594046678944 LocalId: 1 } } } PathId: 7 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-09-25T16:21:20.299195Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/.sys/query_metrics_one_minute" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-09-25T16:21:20.299214Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/.sys/query_metrics_one_minute" took 21us result status StatusSuccess 2025-09-25T16:21:20.299257Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/.sys/query_metrics_one_minute" PathDescription { Self { Name: "query_metrics_one_minute" PathId: 33 SchemeshardId: 72057594046678944 PathType: EPathTypeSysView CreateFinished: true CreateTxId: 281474976710688 CreateStep: 5000023 ParentPathId: 2 PathState: EPathStateNoChanges Owner: "metadata@system" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 SysViewVersion: 1 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 35 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } SysViewDescription { Name: "query_metrics_one_minute" Type: EQueryMetricsOneMinute SourceObject { OwnerId: 72057594046678944 LocalId: 1 } } } PathId: 33 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 >> TColumnShardTestReadWrite::CompactionInGranule_PKDatetime_Reboot [GOOD] >> TSchemeShardSysViewTest::CreateSysView [GOOD] >> TSchemeShardSysViewTest::CreateExistingSysView [GOOD] >> TSchemeShardSysViewTest::AsyncDropSameSysView [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_sysview/unittest >> TSchemeShardSysViewTest::DropSysView [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:120:2058] recipient: [1:114:2144] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:120:2058] recipient: [1:114:2144] Leader for TabletID 72057594046678944 is [1:131:2155] sender: [1:132:2058] recipient: [1:114:2144] 2025-09-25T16:21:19.888767Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7911: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-09-25T16:21:19.888794Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7939: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-09-25T16:21:19.888800Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7825: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-09-25T16:21:19.888805Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7841: OperationsProcessing config: using default configuration 2025-09-25T16:21:19.888814Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7847: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-09-25T16:21:19.888819Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7847: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-09-25T16:21:19.888848Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7971: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-09-25T16:21:19.888862Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-09-25T16:21:19.888988Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8042: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-09-25T16:21:19.889059Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2025-09-25T16:21:19.906333Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7729: Cannot subscribe to console configs 2025-09-25T16:21:19.906359Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded 2025-09-25T16:21:19.910110Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2025-09-25T16:21:19.910148Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2025-09-25T16:21:19.910192Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-09-25T16:21:19.911466Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2025-09-25T16:21:19.911530Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2025-09-25T16:21:19.911646Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1382: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-09-25T16:21:19.911744Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-09-25T16:21:19.912603Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:158: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-09-25T16:21:19.912658Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2025-09-25T16:21:19.912993Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-09-25T16:21:19.913009Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-09-25T16:21:19.913045Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-09-25T16:21:19.913056Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-09-25T16:21:19.913062Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:205: TTxServerlessStorageBilling.Complete 2025-09-25T16:21:19.913091Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7086: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-09-25T16:21:19.952350Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-09-25T16:21:19.952578Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-09-25T16:21:19.952599Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-09-25T16:21:19.952615Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-09-25T16:21:19.952640Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-09-25T16:21:19.952654Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-09-25T16:21:19.952668Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-09-25T16:21:19.952682Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-09-25T16:21:19.952712Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-09-25T16:21:19.952729Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-09-25T16:21:19.952742Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-09-25T16:21:19.952755Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-09-25T16:21:19.952769Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-09-25T16:21:19.952782Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-09-25T16:21:19.952795Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_minute" Type: ETopQueriesByR ... 16:21:20.089773Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:599: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 8] was 0 2025-09-25T16:21:20.089776Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:599: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 9] was 0 2025-09-25T16:21:20.089779Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:599: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 10] was 0 2025-09-25T16:21:20.089782Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:599: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 11] was 0 2025-09-25T16:21:20.089786Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:599: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 12] was 0 2025-09-25T16:21:20.089789Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:599: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 13] was 0 2025-09-25T16:21:20.089792Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:599: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 14] was 0 2025-09-25T16:21:20.089795Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:599: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 15] was 0 2025-09-25T16:21:20.089797Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:599: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 16] was 0 2025-09-25T16:21:20.089800Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:599: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 17] was 0 2025-09-25T16:21:20.089803Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:599: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 18] was 0 2025-09-25T16:21:20.089806Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:599: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 19] was 0 2025-09-25T16:21:20.089809Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:599: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 20] was 0 2025-09-25T16:21:20.089812Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:599: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 21] was 0 2025-09-25T16:21:20.089815Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:599: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 22] was 0 2025-09-25T16:21:20.089818Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:599: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 23] was 0 2025-09-25T16:21:20.089820Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:599: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 24] was 0 2025-09-25T16:21:20.089823Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:599: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 25] was 0 2025-09-25T16:21:20.089826Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:599: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 26] was 0 2025-09-25T16:21:20.089829Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:599: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 27] was 0 2025-09-25T16:21:20.089832Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:599: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 28] was 0 2025-09-25T16:21:20.089834Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:599: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 29] was 0 2025-09-25T16:21:20.089837Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:599: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 30] was 0 2025-09-25T16:21:20.089840Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:599: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 31] was 0 2025-09-25T16:21:20.089844Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:599: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 32] was 0 2025-09-25T16:21:20.089847Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:599: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 33] was 0 2025-09-25T16:21:20.089850Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:599: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 34] was 0 2025-09-25T16:21:20.089853Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:599: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 35] was 0 2025-09-25T16:21:20.089856Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:599: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 36] was 0 2025-09-25T16:21:20.089869Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:2090: TTxInit for Columns, read records: 0, at schemeshard: 72057594046678944 2025-09-25T16:21:20.089886Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:2150: TTxInit for ColumnsAlters, read records: 0, at schemeshard: 72057594046678944 2025-09-25T16:21:20.089893Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:2208: TTxInit for Shards, read records: 0, at schemeshard: 72057594046678944 2025-09-25T16:21:20.089902Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:2294: TTxInit for TablePartitions, read records: 0, at schemeshard: 72057594046678944 2025-09-25T16:21:20.089909Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:2360: TTxInit for TableShardPartitionConfigs, read records: 0, at schemeshard: 72057594046678944 2025-09-25T16:21:20.089940Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:2510: TTxInit for ChannelsBinding, read records: 0, at schemeshard: 72057594046678944 2025-09-25T16:21:20.089962Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:2889: TTxInit for TableIndexes, read records: 0, at schemeshard: 72057594046678944 2025-09-25T16:21:20.089971Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:2968: TTxInit for TableIndexKeys, read records: 0, at schemeshard: 72057594046678944 2025-09-25T16:21:20.090019Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:3469: TTxInit for KesusInfos, read records: 0, at schemeshard: 72057594046678944 2025-09-25T16:21:20.090025Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:3505: TTxInit for KesusAlters, read records: 0, at schemeshard: 72057594046678944 2025-09-25T16:21:20.090044Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:3738: TTxInit for TxShards, read records: 0, at schemeshard: 72057594046678944 2025-09-25T16:21:20.090055Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:3883: TTxInit for ShardToDelete, read records: 0, at schemeshard: 72057594046678944 2025-09-25T16:21:20.090059Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:3900: TTxInit for SystemShardToDelete, read records: 0, at schemeshard: 72057594046678944 2025-09-25T16:21:20.090075Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:3917: TTxInit for BackupSettings, read records: 0, at schemeshard: 72057594046678944 2025-09-25T16:21:20.090093Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:4077: TTxInit for ShardBackupStatus, read records: 0, at schemeshard: 72057594046678944 2025-09-25T16:21:20.090099Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:4093: TTxInit for CompletedBackup, read records: 0, at schemeshard: 72057594046678944 2025-09-25T16:21:20.090141Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:4378: TTxInit for Publications, read records: 0, at schemeshard: 72057594046678944 2025-09-25T16:21:20.090170Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:4723: IndexBuild , records: 0, at schemeshard: 72057594046678944 2025-09-25T16:21:20.090177Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:4784: KMeansTreeSample records: 0, at schemeshard: 72057594046678944 2025-09-25T16:21:20.090182Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:4843: KMeansTreeCluster records: 0, at schemeshard: 72057594046678944 2025-09-25T16:21:20.090193Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:4932: SnapshotTables: snapshots: 0 tables: 0, at schemeshard: 72057594046678944 2025-09-25T16:21:20.090198Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:4959: SnapshotSteps: snapshots: 0, at schemeshard: 72057594046678944 2025-09-25T16:21:20.090203Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:4986: LongLocks: records: 0, at schemeshard: 72057594046678944 2025-09-25T16:21:20.091179Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2025-09-25T16:21:20.091924Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-09-25T16:21:20.091936Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-09-25T16:21:20.091946Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-09-25T16:21:20.091953Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-09-25T16:21:20.091957Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:205: TTxServerlessStorageBilling.Complete 2025-09-25T16:21:20.092166Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7086: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 Leader for TabletID 72057594046678944 is [1:725:2711] sender: [1:787:2058] recipient: [1:15:2062] 2025-09-25T16:21:20.165041Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/.sys/new_sys_view" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-09-25T16:21:20.165092Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/.sys/new_sys_view" took 65us result status StatusPathDoesNotExist 2025-09-25T16:21:20.165132Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/.sys/new_sys_view\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot/.sys\' (id: [OwnerId: 72057594046678944, LocalPathId: 2]), source_location: ydb/core/tx/schemeshard/schemeshard_path_describer.cpp:1181" Path: "/MyRoot/.sys/new_sys_view" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot/.sys" LastExistedPrefixPathId: 2 LastExistedPrefixDescription { Self { Name: ".sys" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976710657 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "metadata@system" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty ChildrenExist: true } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_sysview/unittest >> TSchemeShardSysViewsUpdateTest::DeleteObsoleteSysViews [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:120:2058] recipient: [1:114:2144] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:120:2058] recipient: [1:114:2144] Leader for TabletID 72057594046678944 is [1:131:2155] sender: [1:132:2058] recipient: [1:114:2144] 2025-09-25T16:21:20.040548Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7911: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-09-25T16:21:20.040572Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7939: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-09-25T16:21:20.040578Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7825: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-09-25T16:21:20.040583Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7841: OperationsProcessing config: using default configuration 2025-09-25T16:21:20.040590Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7847: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-09-25T16:21:20.040594Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7847: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-09-25T16:21:20.040604Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7971: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-09-25T16:21:20.040618Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-09-25T16:21:20.040734Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8042: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-09-25T16:21:20.040806Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2025-09-25T16:21:20.055084Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7729: Cannot subscribe to console configs 2025-09-25T16:21:20.055103Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded 2025-09-25T16:21:20.058475Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2025-09-25T16:21:20.058506Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2025-09-25T16:21:20.058551Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-09-25T16:21:20.059675Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2025-09-25T16:21:20.059731Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2025-09-25T16:21:20.059839Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1382: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-09-25T16:21:20.059937Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-09-25T16:21:20.060770Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:158: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-09-25T16:21:20.060816Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2025-09-25T16:21:20.061092Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-09-25T16:21:20.061103Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-09-25T16:21:20.061127Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-09-25T16:21:20.061137Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-09-25T16:21:20.061144Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:205: TTxServerlessStorageBilling.Complete 2025-09-25T16:21:20.061165Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7086: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-09-25T16:21:20.090939Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-09-25T16:21:20.091172Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-09-25T16:21:20.091190Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-09-25T16:21:20.091204Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-09-25T16:21:20.091228Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-09-25T16:21:20.091241Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-09-25T16:21:20.091254Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-09-25T16:21:20.091266Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-09-25T16:21:20.091298Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-09-25T16:21:20.091311Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-09-25T16:21:20.091322Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-09-25T16:21:20.091334Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-09-25T16:21:20.091345Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-09-25T16:21:20.091357Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-09-25T16:21:20.091369Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_minute" Type: ETopQueriesByR ... s/new_ds_pdisks' 2025-09-25T16:21:20.260912Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6249: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 3 LocalPathId: 2 Version: 42 PathOwnerId: 72057594046678944, cookie: 281474976720658 2025-09-25T16:21:20.260926Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 3 LocalPathId: 2 Version: 42 PathOwnerId: 72057594046678944, cookie: 281474976720658 2025-09-25T16:21:20.260931Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 281474976720658 2025-09-25T16:21:20.260936Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 281474976720658, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 42 2025-09-25T16:21:20.260941Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:610: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 38 2025-09-25T16:21:20.261003Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6249: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 3 LocalPathId: 37 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 281474976720658 2025-09-25T16:21:20.261012Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 3 LocalPathId: 37 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 281474976720658 2025-09-25T16:21:20.261016Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 281474976720658 2025-09-25T16:21:20.261021Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 281474976720658, pathId: [OwnerId: 72057594046678944, LocalPathId: 37], version: 18446744073709551615 2025-09-25T16:21:20.261025Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:610: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 37] was 2 2025-09-25T16:21:20.261033Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__publish_to_scheme_board.cpp:155: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 281474976720658, subscribers: 1 2025-09-25T16:21:20.261038Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:212: TTxAckPublishToSchemeBoard Notify send TEvNotifyTxCompletionResult, at schemeshard: 72057594046678944, to actorId: [1:808:2783] 2025-09-25T16:21:20.261625Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 281474976720658 2025-09-25T16:21:20.261715Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 281474976720658 2025-09-25T16:21:20.261738Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:213: SysViewsRosterUpdate# [1:808:2783] at schemeshard: 72057594046678944 Handle TEvNotifyTxCompletionResult, drop sys view '/MyRoot/.sys/new_sys_view' 2025-09-25T16:21:20.261744Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:221: SysViewsRosterUpdate# [1:808:2783] at schemeshard: 72057594046678944 Send TEvRosterUpdateFinished Leader for TabletID 72057594046678944 is [1:750:2736] sender: [1:852:2058] recipient: [1:15:2062] 2025-09-25T16:21:20.333234Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/.sys/partition_stats" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-09-25T16:21:20.333337Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/.sys/partition_stats" took 127us result status StatusSuccess 2025-09-25T16:21:20.333433Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/.sys/partition_stats" PathDescription { Self { Name: "partition_stats" PathId: 29 SchemeshardId: 72057594046678944 PathType: EPathTypeSysView CreateFinished: true CreateTxId: 281474976710684 CreateStep: 5000022 ParentPathId: 2 PathState: EPathStateNoChanges Owner: "metadata@system" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 SysViewVersion: 1 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 36 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } SysViewDescription { Name: "partition_stats" Type: EPartitionStats SourceObject { OwnerId: 72057594046678944 LocalId: 1 } } } PathId: 29 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-09-25T16:21:20.333564Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/.sys/new_sys_view" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-09-25T16:21:20.333595Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/.sys/new_sys_view" took 35us result status StatusPathDoesNotExist 2025-09-25T16:21:20.333628Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/.sys/new_sys_view\', error: path has been deleted (id: [OwnerId: 72057594046678944, LocalPathId: 37], type: EPathTypeSysView, state: EPathStateNotExist), drop stepId: 5000041, drop txId: 281474976720658, source_location: ydb/core/tx/schemeshard/schemeshard_path_describer.cpp:1181" Path: "/MyRoot/.sys/new_sys_view" PathId: 37 LastExistedPrefixPath: "/MyRoot/.sys" LastExistedPrefixPathId: 2 LastExistedPrefixDescription { Self { Name: ".sys" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976710657 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "metadata@system" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty ChildrenExist: true } } PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-09-25T16:21:20.333706Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/.sys/new_ds_pdisks" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-09-25T16:21:20.333723Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/.sys/new_ds_pdisks" took 19us result status StatusPathDoesNotExist 2025-09-25T16:21:20.333739Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/.sys/new_ds_pdisks\', error: path has been deleted (id: [OwnerId: 72057594046678944, LocalPathId: 38], type: EPathTypeSysView, state: EPathStateNotExist), drop stepId: 5000040, drop txId: 281474976720657, source_location: ydb/core/tx/schemeshard/schemeshard_path_describer.cpp:1181" Path: "/MyRoot/.sys/new_ds_pdisks" PathId: 38 LastExistedPrefixPath: "/MyRoot/.sys" LastExistedPrefixPathId: 2 LastExistedPrefixDescription { Self { Name: ".sys" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976710657 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "metadata@system" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty ChildrenExist: true } } PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-09-25T16:21:20.333807Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/.sys/new_partition_stats" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-09-25T16:21:20.333828Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/.sys/new_partition_stats" took 24us result status StatusSuccess 2025-09-25T16:21:20.333875Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/.sys/new_partition_stats" PathDescription { Self { Name: "new_partition_stats" PathId: 39 SchemeshardId: 72057594046678944 PathType: EPathTypeSysView CreateFinished: true CreateTxId: 103 CreateStep: 5000039 ParentPathId: 2 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 SysViewVersion: 1 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 36 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } SysViewDescription { Name: "new_partition_stats" Type: EPartitionStats SourceObject { OwnerId: 72057594046678944 LocalId: 1 } } } PathId: 39 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_sysview/unittest >> TSchemeShardSysViewTest::CreateExistingSysView [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2144] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2144] Leader for TabletID 72057594046678944 is [1:130:2155] sender: [1:131:2058] recipient: [1:113:2144] 2025-09-25T16:21:20.637862Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7911: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-09-25T16:21:20.637884Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7939: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-09-25T16:21:20.637890Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7825: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-09-25T16:21:20.637895Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7841: OperationsProcessing config: using default configuration 2025-09-25T16:21:20.637904Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7847: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-09-25T16:21:20.637909Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7847: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-09-25T16:21:20.637919Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7971: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-09-25T16:21:20.637933Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-09-25T16:21:20.638104Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8042: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-09-25T16:21:20.638246Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2025-09-25T16:21:20.657866Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7729: Cannot subscribe to console configs 2025-09-25T16:21:20.657886Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded 2025-09-25T16:21:20.664513Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2025-09-25T16:21:20.664586Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2025-09-25T16:21:20.664628Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-09-25T16:21:20.668287Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2025-09-25T16:21:20.668345Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2025-09-25T16:21:20.668441Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1382: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-09-25T16:21:20.668494Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-09-25T16:21:20.668802Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:158: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-09-25T16:21:20.668862Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2025-09-25T16:21:20.669074Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-09-25T16:21:20.669080Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-09-25T16:21:20.669094Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-09-25T16:21:20.669100Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-09-25T16:21:20.669105Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:205: TTxServerlessStorageBilling.Complete 2025-09-25T16:21:20.669130Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7086: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-09-25T16:21:20.704816Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-09-25T16:21:20.705080Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-09-25T16:21:20.705093Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-09-25T16:21:20.705116Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-09-25T16:21:20.705126Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-09-25T16:21:20.705133Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-09-25T16:21:20.705140Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-09-25T16:21:20.705148Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-09-25T16:21:20.705157Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-09-25T16:21:20.705164Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-09-25T16:21:20.705171Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-09-25T16:21:20.705178Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-09-25T16:21:20.705186Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-09-25T16:21:20.705194Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-09-25T16:21:20.705201Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_minute" Type: ETopQueriesByR ... e TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 38 PathOwnerId: 72057594046678944, cookie: 101 2025-09-25T16:21:20.841981Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 38 PathOwnerId: 72057594046678944, cookie: 101 2025-09-25T16:21:20.841988Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 101 2025-09-25T16:21:20.841994Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 101, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 38 2025-09-25T16:21:20.841999Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:610: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 36 2025-09-25T16:21:20.842097Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6249: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 37 Version: 2 PathOwnerId: 72057594046678944, cookie: 101 2025-09-25T16:21:20.842106Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 37 Version: 2 PathOwnerId: 72057594046678944, cookie: 101 2025-09-25T16:21:20.842111Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 101 2025-09-25T16:21:20.842115Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 101, pathId: [OwnerId: 72057594046678944, LocalPathId: 37], version: 2 2025-09-25T16:21:20.842119Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:610: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 37] was 1 2025-09-25T16:21:20.842128Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__publish_to_scheme_board.cpp:155: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 101, subscribers: 0 2025-09-25T16:21:20.842756Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 2025-09-25T16:21:20.842777Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 TestModificationResult got TxId: 101, wait until txId: 101 TestWaitNotification wait txId: 101 2025-09-25T16:21:20.842828Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:213: tests -- TTxNotificationSubscriber for txId 101: send EvNotifyTxCompletion 2025-09-25T16:21:20.842837Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:259: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 101 2025-09-25T16:21:20.842900Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 101, at schemeshard: 72057594046678944 2025-09-25T16:21:20.842917Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 101: got EvNotifyTxCompletionResult 2025-09-25T16:21:20.842922Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 101: satisfy waiter [1:691:2681] TestWaitNotification: OK eventTxId 101 2025-09-25T16:21:20.843006Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/.sys/new_sys_view" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-09-25T16:21:20.843038Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/.sys/new_sys_view" took 43us result status StatusSuccess 2025-09-25T16:21:20.843139Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/.sys/new_sys_view" PathDescription { Self { Name: "new_sys_view" PathId: 37 SchemeshardId: 72057594046678944 PathType: EPathTypeSysView CreateFinished: true CreateTxId: 101 CreateStep: 5000037 ParentPathId: 2 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 SysViewVersion: 1 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 36 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } SysViewDescription { Name: "new_sys_view" Type: EPartitionStats SourceObject { OwnerId: 72057594046678944 LocalId: 1 } } } PathId: 37 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 TestModificationResults wait txId: 102 2025-09-25T16:21:20.843970Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView CreateSysView { Name: "new_sys_view" Type: ENodes } } TxId: 102 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-09-25T16:21:20.844006Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_create_sysview.cpp:117: [72057594046678944] TCreateSysView Propose, path: /MyRoot/.sys/new_sys_view, opId: 102:0 2025-09-25T16:21:20.844013Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_create_sysview.cpp:123: [72057594046678944] TCreateSysView Propose, path: /MyRoot/.sys/new_sys_view, opId: 102:0, sysViewDescription: Name: "new_sys_view" Type: ENodes 2025-09-25T16:21:20.844041Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 102:1, propose status:StatusAlreadyExists, reason: Check failed: path: '/MyRoot/.sys/new_sys_view', error: path exist, request accepts it (id: [OwnerId: 72057594046678944, LocalPathId: 37], type: EPathTypeSysView, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_sysview.cpp:149, at schemeshard: 72057594046678944 2025-09-25T16:21:20.844490Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 102, response: Status: StatusAlreadyExists Reason: "Check failed: path: \'/MyRoot/.sys/new_sys_view\', error: path exist, request accepts it (id: [OwnerId: 72057594046678944, LocalPathId: 37], type: EPathTypeSysView, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_sysview.cpp:149" TxId: 102 SchemeshardId: 72057594046678944 PathId: 37 PathCreateTxId: 101, at schemeshard: 72057594046678944 2025-09-25T16:21:20.844546Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 102, database: /MyRoot, subject: , status: StatusAlreadyExists, reason: Check failed: path: '/MyRoot/.sys/new_sys_view', error: path exist, request accepts it (id: [OwnerId: 72057594046678944, LocalPathId: 37], type: EPathTypeSysView, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_sysview.cpp:149, operation: CREATE SYSTEM VIEW, path: /MyRoot/.sys/new_sys_view TestModificationResult got TxId: 102, wait until txId: 102 TestWaitNotification wait txId: 102 2025-09-25T16:21:20.844602Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:213: tests -- TTxNotificationSubscriber for txId 102: send EvNotifyTxCompletion 2025-09-25T16:21:20.844608Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:259: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 102 2025-09-25T16:21:20.844667Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 102, at schemeshard: 72057594046678944 2025-09-25T16:21:20.844683Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 102: got EvNotifyTxCompletionResult 2025-09-25T16:21:20.844688Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 102: satisfy waiter [1:699:2689] TestWaitNotification: OK eventTxId 102 2025-09-25T16:21:20.844752Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/.sys/new_sys_view" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-09-25T16:21:20.844778Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/.sys/new_sys_view" took 31us result status StatusSuccess 2025-09-25T16:21:20.844863Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/.sys/new_sys_view" PathDescription { Self { Name: "new_sys_view" PathId: 37 SchemeshardId: 72057594046678944 PathType: EPathTypeSysView CreateFinished: true CreateTxId: 101 CreateStep: 5000037 ParentPathId: 2 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 SysViewVersion: 1 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 36 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } SysViewDescription { Name: "new_sys_view" Type: EPartitionStats SourceObject { OwnerId: 72057594046678944 LocalId: 1 } } } PathId: 37 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_sysview/unittest >> TSchemeShardSysViewTest::CreateSysView [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2144] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2144] Leader for TabletID 72057594046678944 is [1:130:2155] sender: [1:131:2058] recipient: [1:113:2144] 2025-09-25T16:21:20.462412Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7911: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-09-25T16:21:20.462447Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7939: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-09-25T16:21:20.462453Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7825: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-09-25T16:21:20.462459Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7841: OperationsProcessing config: using default configuration 2025-09-25T16:21:20.462467Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7847: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-09-25T16:21:20.462472Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7847: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-09-25T16:21:20.462481Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7971: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-09-25T16:21:20.462498Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-09-25T16:21:20.462627Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8042: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-09-25T16:21:20.462719Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2025-09-25T16:21:20.487254Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7729: Cannot subscribe to console configs 2025-09-25T16:21:20.487277Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded 2025-09-25T16:21:20.514410Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2025-09-25T16:21:20.514562Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2025-09-25T16:21:20.514611Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-09-25T16:21:20.529742Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2025-09-25T16:21:20.529825Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2025-09-25T16:21:20.529958Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1382: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-09-25T16:21:20.530035Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-09-25T16:21:20.530512Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:158: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-09-25T16:21:20.530567Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2025-09-25T16:21:20.530864Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-09-25T16:21:20.530877Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-09-25T16:21:20.530898Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-09-25T16:21:20.530906Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-09-25T16:21:20.530913Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:205: TTxServerlessStorageBilling.Complete 2025-09-25T16:21:20.530953Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7086: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-09-25T16:21:20.579632Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-09-25T16:21:20.579836Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-09-25T16:21:20.579853Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-09-25T16:21:20.579878Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-09-25T16:21:20.579905Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-09-25T16:21:20.579918Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-09-25T16:21:20.579931Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-09-25T16:21:20.579944Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-09-25T16:21:20.579973Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-09-25T16:21:20.579985Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-09-25T16:21:20.579998Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-09-25T16:21:20.580011Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-09-25T16:21:20.580024Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-09-25T16:21:20.580037Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-09-25T16:21:20.580049Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_minute" Type: ETopQueriesByR ... 5-09-25T16:21:20.713815Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:599: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 14] was 0 2025-09-25T16:21:20.713819Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:599: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 15] was 0 2025-09-25T16:21:20.713823Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:599: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 16] was 0 2025-09-25T16:21:20.713828Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:599: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 17] was 0 2025-09-25T16:21:20.713832Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:599: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 18] was 0 2025-09-25T16:21:20.713837Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:599: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 19] was 0 2025-09-25T16:21:20.713842Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:599: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 20] was 0 2025-09-25T16:21:20.713847Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:599: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 21] was 0 2025-09-25T16:21:20.713851Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:599: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 22] was 0 2025-09-25T16:21:20.713855Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:599: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 23] was 0 2025-09-25T16:21:20.713859Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:599: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 24] was 0 2025-09-25T16:21:20.713864Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:599: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 25] was 0 2025-09-25T16:21:20.713868Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:599: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 26] was 0 2025-09-25T16:21:20.713873Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:599: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 27] was 0 2025-09-25T16:21:20.713877Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:599: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 28] was 0 2025-09-25T16:21:20.713881Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:599: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 29] was 0 2025-09-25T16:21:20.713885Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:599: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 30] was 0 2025-09-25T16:21:20.713902Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:599: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 31] was 0 2025-09-25T16:21:20.713908Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:599: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 32] was 0 2025-09-25T16:21:20.713912Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:599: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 33] was 0 2025-09-25T16:21:20.713917Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:599: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 34] was 0 2025-09-25T16:21:20.713921Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:599: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 35] was 0 2025-09-25T16:21:20.713925Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:599: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 36] was 0 2025-09-25T16:21:20.713930Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:599: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 37] was 0 2025-09-25T16:21:20.713951Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:2090: TTxInit for Columns, read records: 0, at schemeshard: 72057594046678944 2025-09-25T16:21:20.713973Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:2150: TTxInit for ColumnsAlters, read records: 0, at schemeshard: 72057594046678944 2025-09-25T16:21:20.713983Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:2208: TTxInit for Shards, read records: 0, at schemeshard: 72057594046678944 2025-09-25T16:21:20.713997Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:2294: TTxInit for TablePartitions, read records: 0, at schemeshard: 72057594046678944 2025-09-25T16:21:20.714008Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:2360: TTxInit for TableShardPartitionConfigs, read records: 0, at schemeshard: 72057594046678944 2025-09-25T16:21:20.714048Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:2510: TTxInit for ChannelsBinding, read records: 0, at schemeshard: 72057594046678944 2025-09-25T16:21:20.714088Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:2889: TTxInit for TableIndexes, read records: 0, at schemeshard: 72057594046678944 2025-09-25T16:21:20.714102Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:2968: TTxInit for TableIndexKeys, read records: 0, at schemeshard: 72057594046678944 2025-09-25T16:21:20.714171Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:3469: TTxInit for KesusInfos, read records: 0, at schemeshard: 72057594046678944 2025-09-25T16:21:20.714180Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:3505: TTxInit for KesusAlters, read records: 0, at schemeshard: 72057594046678944 2025-09-25T16:21:20.714215Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:3738: TTxInit for TxShards, read records: 0, at schemeshard: 72057594046678944 2025-09-25T16:21:20.714235Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:3883: TTxInit for ShardToDelete, read records: 0, at schemeshard: 72057594046678944 2025-09-25T16:21:20.714241Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:3900: TTxInit for SystemShardToDelete, read records: 0, at schemeshard: 72057594046678944 2025-09-25T16:21:20.714264Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:3917: TTxInit for BackupSettings, read records: 0, at schemeshard: 72057594046678944 2025-09-25T16:21:20.714288Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:4077: TTxInit for ShardBackupStatus, read records: 0, at schemeshard: 72057594046678944 2025-09-25T16:21:20.714299Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:4093: TTxInit for CompletedBackup, read records: 0, at schemeshard: 72057594046678944 2025-09-25T16:21:20.714374Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:4378: TTxInit for Publications, read records: 0, at schemeshard: 72057594046678944 2025-09-25T16:21:20.714417Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:4723: IndexBuild , records: 0, at schemeshard: 72057594046678944 2025-09-25T16:21:20.714427Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:4784: KMeansTreeSample records: 0, at schemeshard: 72057594046678944 2025-09-25T16:21:20.714435Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:4843: KMeansTreeCluster records: 0, at schemeshard: 72057594046678944 2025-09-25T16:21:20.714453Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:4932: SnapshotTables: snapshots: 0 tables: 0, at schemeshard: 72057594046678944 2025-09-25T16:21:20.714460Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:4959: SnapshotSteps: snapshots: 0, at schemeshard: 72057594046678944 2025-09-25T16:21:20.714467Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:4986: LongLocks: records: 0, at schemeshard: 72057594046678944 2025-09-25T16:21:20.715896Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2025-09-25T16:21:20.717101Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-09-25T16:21:20.717120Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-09-25T16:21:20.717180Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-09-25T16:21:20.717191Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-09-25T16:21:20.717198Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:205: TTxServerlessStorageBilling.Complete 2025-09-25T16:21:20.717215Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7086: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 Leader for TabletID 72057594046678944 is [1:700:2687] sender: [1:760:2058] recipient: [1:15:2062] 2025-09-25T16:21:20.785156Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/.sys/new_sys_view" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-09-25T16:21:20.785210Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/.sys/new_sys_view" took 69us result status StatusSuccess 2025-09-25T16:21:20.785300Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/.sys/new_sys_view" PathDescription { Self { Name: "new_sys_view" PathId: 37 SchemeshardId: 72057594046678944 PathType: EPathTypeSysView CreateFinished: true CreateTxId: 101 CreateStep: 5000037 ParentPathId: 2 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 SysViewVersion: 1 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 36 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } SysViewDescription { Name: "new_sys_view" Type: EPartitionStats SourceObject { OwnerId: 72057594046678944 LocalId: 1 } } } PathId: 37 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 >> BasicUsage::WriteAndReadSomeMessagesWithAsyncCompression [GOOD] >> BasicUsage::WriteAndReadSomeMessagesWithSyncCompression ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_sysview/unittest >> TSchemeShardSysViewTest::AsyncDropSameSysView [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2144] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2144] Leader for TabletID 72057594046678944 is [1:130:2155] sender: [1:131:2058] recipient: [1:113:2144] 2025-09-25T16:21:20.372906Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7911: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-09-25T16:21:20.372940Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7939: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-09-25T16:21:20.372946Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7825: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-09-25T16:21:20.372952Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7841: OperationsProcessing config: using default configuration 2025-09-25T16:21:20.372960Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7847: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-09-25T16:21:20.372965Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7847: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-09-25T16:21:20.372976Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7971: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-09-25T16:21:20.372992Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-09-25T16:21:20.373132Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8042: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-09-25T16:21:20.373221Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2025-09-25T16:21:20.412130Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7729: Cannot subscribe to console configs 2025-09-25T16:21:20.412160Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded 2025-09-25T16:21:20.423449Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2025-09-25T16:21:20.423585Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2025-09-25T16:21:20.423639Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-09-25T16:21:20.429406Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2025-09-25T16:21:20.429508Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2025-09-25T16:21:20.429649Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1382: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-09-25T16:21:20.429745Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-09-25T16:21:20.430314Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:158: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-09-25T16:21:20.430377Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2025-09-25T16:21:20.430697Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-09-25T16:21:20.430712Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-09-25T16:21:20.430739Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-09-25T16:21:20.430749Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-09-25T16:21:20.430756Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:205: TTxServerlessStorageBilling.Complete 2025-09-25T16:21:20.430797Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7086: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-09-25T16:21:20.484788Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-09-25T16:21:20.485041Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-09-25T16:21:20.485058Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-09-25T16:21:20.485072Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-09-25T16:21:20.485095Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-09-25T16:21:20.485107Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-09-25T16:21:20.485120Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-09-25T16:21:20.485132Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-09-25T16:21:20.485164Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-09-25T16:21:20.485178Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-09-25T16:21:20.485190Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-09-25T16:21:20.485202Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-09-25T16:21:20.485214Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-09-25T16:21:20.485227Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-09-25T16:21:20.485239Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_minute" Type: ETopQueriesByR ... : 72057594046678944 2025-09-25T16:21:21.145818Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:689: TTxOperationPlanStep Execute, message: Transactions { TxId: 102 Coordinator: 72057594046316545 AckTo { RawX1: 129 RawX2: 8589936746 } } Step: 5000038 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-09-25T16:21:21.145826Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_drop_sysview.cpp:43: [72057594046678944] TDropSysView TPropose, opId: 102:0 HandleReply TEvOperationPlan, step: 5000038 2025-09-25T16:21:21.145856Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 102:0 128 -> 240 2025-09-25T16:21:21.145883Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:599: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 35 2025-09-25T16:21:21.145893Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:599: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 37] was 1 2025-09-25T16:21:21.146296Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-09-25T16:21:21.146309Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 102, path id: [OwnerId: 72057594046678944, LocalPathId: 2] 2025-09-25T16:21:21.146385Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 102, path id: [OwnerId: 72057594046678944, LocalPathId: 37] 2025-09-25T16:21:21.146411Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-09-25T16:21:21.146417Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [2:214:2215], at schemeshard: 72057594046678944, txId: 102, path id: 2 2025-09-25T16:21:21.146423Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [2:214:2215], at schemeshard: 72057594046678944, txId: 102, path id: 37 FAKE_COORDINATOR: Erasing txId 102 2025-09-25T16:21:21.146524Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 102:0, at schemeshard: 72057594046678944 2025-09-25T16:21:21.146531Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:497: [72057594046678944] TDone opId# 102:0 ProgressState 2025-09-25T16:21:21.146544Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:926: Part operation is done id#102:0 progress is 1/1 2025-09-25T16:21:21.146549Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2025-09-25T16:21:21.146554Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:926: Part operation is done id#102:0 progress is 1/1 2025-09-25T16:21:21.146558Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2025-09-25T16:21:21.146563Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 102, ready parts: 1/1, is published: false 2025-09-25T16:21:21.146571Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2025-09-25T16:21:21.146576Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:993: Operation and all the parts is done, operation id: 102:0 2025-09-25T16:21:21.146581Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5552: RemoveTx for txid 102:0 2025-09-25T16:21:21.146593Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:610: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 37] was 2 2025-09-25T16:21:21.146599Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:1002: Publication still in progress, tx: 102, publications: 2, subscribers: 0 2025-09-25T16:21:21.146603Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1009: Publication details: tx: 102, [OwnerId: 72057594046678944, LocalPathId: 2], 39 2025-09-25T16:21:21.146607Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1009: Publication details: tx: 102, [OwnerId: 72057594046678944, LocalPathId: 37], 18446744073709551615 2025-09-25T16:21:21.146704Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6249: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 39 PathOwnerId: 72057594046678944, cookie: 102 2025-09-25T16:21:21.146715Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 39 PathOwnerId: 72057594046678944, cookie: 102 2025-09-25T16:21:21.146720Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 102 2025-09-25T16:21:21.146725Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 102, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 39 2025-09-25T16:21:21.146730Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:610: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 36 2025-09-25T16:21:21.146824Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6249: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 37 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 102 2025-09-25T16:21:21.146833Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 37 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 102 2025-09-25T16:21:21.146837Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 102 2025-09-25T16:21:21.146841Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 102, pathId: [OwnerId: 72057594046678944, LocalPathId: 37], version: 18446744073709551615 2025-09-25T16:21:21.146845Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:610: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 37] was 1 2025-09-25T16:21:21.146856Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__publish_to_scheme_board.cpp:155: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 102, subscribers: 0 2025-09-25T16:21:21.146877Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:123: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2025-09-25T16:21:21.146885Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:137: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 37], at schemeshard: 72057594046678944 2025-09-25T16:21:21.146894Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:610: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 35 2025-09-25T16:21:21.147467Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 2025-09-25T16:21:21.147680Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 2025-09-25T16:21:21.147703Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__clean_pathes.cpp:160: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 TestModificationResult got TxId: 102, wait until txId: 102 TestModificationResults wait txId: 103 TestModificationResult got TxId: 103, wait until txId: 103 TestWaitNotification wait txId: 102 2025-09-25T16:21:21.147763Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:213: tests -- TTxNotificationSubscriber for txId 102: send EvNotifyTxCompletion 2025-09-25T16:21:21.147772Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:259: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 102 TestWaitNotification wait txId: 103 2025-09-25T16:21:21.147787Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:213: tests -- TTxNotificationSubscriber for txId 103: send EvNotifyTxCompletion 2025-09-25T16:21:21.147791Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:259: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 103 2025-09-25T16:21:21.147856Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 102, at schemeshard: 72057594046678944 2025-09-25T16:21:21.147887Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 102: got EvNotifyTxCompletionResult 2025-09-25T16:21:21.147892Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 102: satisfy waiter [2:727:2717] 2025-09-25T16:21:21.147928Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 103, at schemeshard: 72057594046678944 2025-09-25T16:21:21.147936Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 103: got EvNotifyTxCompletionResult 2025-09-25T16:21:21.147940Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 103: satisfy waiter [2:727:2717] TestWaitNotification: OK eventTxId 102 TestWaitNotification: OK eventTxId 103 2025-09-25T16:21:21.148004Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/.sys/new_sys_view" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-09-25T16:21:21.148033Z node 2 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/.sys/new_sys_view" took 36us result status StatusPathDoesNotExist 2025-09-25T16:21:21.148078Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/.sys/new_sys_view\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot/.sys\' (id: [OwnerId: 72057594046678944, LocalPathId: 2]), source_location: ydb/core/tx/schemeshard/schemeshard_path_describer.cpp:1181" Path: "/MyRoot/.sys/new_sys_view" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot/.sys" LastExistedPrefixPathId: 2 LastExistedPrefixDescription { Self { Name: ".sys" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976710657 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "metadata@system" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty ChildrenExist: true } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/columnshard/ut_rw/unittest >> TColumnShardTestReadWrite::CompactionGC [GOOD] Test command err: 2025-09-25T16:20:44.172039Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:129:2160];fline=columnshard.cpp:105;event=initialize_shard;step=OnActivateExecutor; 2025-09-25T16:20:44.176880Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:129:2160];fline=columnshard.cpp:124;event=initialize_shard;step=initialize_tiring_finished; 2025-09-25T16:20:44.176947Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxInitSchema.Execute at tablet 9437184 2025-09-25T16:20:44.177876Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:129:2160];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-09-25T16:20:44.177931Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:129:2160];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-09-25T16:20:44.177965Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:129:2160];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-09-25T16:20:44.177980Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:129:2160];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-09-25T16:20:44.177993Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:129:2160];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-09-25T16:20:44.178009Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:129:2160];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-09-25T16:20:44.178023Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:129:2160];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-09-25T16:20:44.178036Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:129:2160];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-09-25T16:20:44.178050Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:129:2160];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-09-25T16:20:44.178063Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:129:2160];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanDeprecatedSnapshot; 2025-09-25T16:20:44.178077Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:129:2160];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV0ChunksMeta; 2025-09-25T16:20:44.178090Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:129:2160];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CopyBlobIdsToV2; 2025-09-25T16:20:44.178122Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:129:2160];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreAppearanceSnapshot; 2025-09-25T16:20:44.183574Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxInitSchema.Complete at tablet 9437184 2025-09-25T16:20:44.183648Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:131;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=12;current_normalizer=CLASS_NAME=Granules; 2025-09-25T16:20:44.183657Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=1;type=Granules; 2025-09-25T16:20:44.183702Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-09-25T16:20:44.183736Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-09-25T16:20:44.183747Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-09-25T16:20:44.183752Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2025-09-25T16:20:44.183782Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:138;normalizer=TChunksNormalizer;message=0 chunks found; 2025-09-25T16:20:44.183794Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-09-25T16:20:44.183803Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-09-25T16:20:44.183809Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2025-09-25T16:20:44.183834Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-09-25T16:20:44.183842Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-09-25T16:20:44.183847Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-09-25T16:20:44.183851Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2025-09-25T16:20:44.183859Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2025-09-25T16:20:44.183863Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-09-25T16:20:44.183869Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-09-25T16:20:44.183872Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2025-09-25T16:20:44.183879Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-09-25T16:20:44.183885Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-09-25T16:20:44.183888Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2025-09-25T16:20:44.183897Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-09-25T16:20:44.183906Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-09-25T16:20:44.183910Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=15;type=RestoreV1Chunks_V2; 2025-09-25T16:20:44.183948Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-09-25T16:20:44.183956Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-09-25T16:20:44.183961Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=16;type=RestoreV2Chunks; 2025-09-25T16:20:44.183977Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2025-09-25T16:20:44.183986Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanDeprecatedSnapshot;id=CleanDeprecatedSnapshot; 2025-09-25T16:20:44.183991Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=17;type=CleanDeprecatedSnapshot; 2025-09-25T16:20:44.184000Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanDeprecatedSnapshot;id=17; 2025-09-25T16:20:44.184017Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV0ChunksMeta;id=RestoreV0ChunksMeta; 2025-09-25T16:20:44.184022Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=18;type=RestoreV0ChunksMeta; 2025-09-25T16:20:44.184028Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV0ChunksMeta;id=18; 2025-09-25T16:20:44.184034Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CopyBlobIdsToV2;id=CopyBlobIdsToV2; 2025-09-25T16:20:44.184038Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=19;type=CopyBlobIdsToV2; 2025-09-25T16:20:44.184048Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CopyBlobIdsToV2;id=19; 2025-09-25T16:20:44.184053Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLAS ... line=abstract.cpp:13;event=new_stage;stage=ReadyForConstruct;task_id=9e203388-9a2b11f0-90c4c071-151f0998; Cleanup old portions: 1 2 3 5 6 8 10 7 12 9 11 14 16 13 18 15 20 17 22 19 24 21 23 26 28 25 30 27 32 29 31 34 36 33 38 35 37 40 39 42 41 44 43 46 2025-09-25T16:20:57.567517Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: WriteIndex at tablet 9437184 2025-09-25T16:20:57.567527Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:129:2160];ev=NKikimr::NColumnShard::TEvPrivate::TEvWriteIndex;fline=abstract.cpp:13;event=new_stage;stage=Compiled;task_id=9e203388-9a2b11f0-90c4c071-151f0998; 2025-09-25T16:20:57.567628Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxWriteIndex[230] (CS::CLEANUP::PORTIONS) apply at tablet 9437184 2025-09-25T16:20:57.568086Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:129:2160];ev=NKikimr::NColumnShard::TEvPrivate::TEvWriteIndex;tablet_id=9437184;external_task_id=9e203388-9a2b11f0-90c4c071-151f0998;fline=abstract.cpp:13;event=new_stage;stage=Written;task_id=9e203388-9a2b11f0-90c4c071-151f0998; 2025-09-25T16:20:57.568408Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: Index: tables 1 inserted {blob_bytes=129593352;raw_bytes=131330514;count=21;records=1575000} compacted {blob_bytes=0;raw_bytes=0;count=0;records=0} s-compacted {blob_bytes=12353440;raw_bytes=14738900;count=2;records=150000} inactive {blob_bytes=265471768;raw_bytes=291227308;count=44;records=3225002} evicted {blob_bytes=0;raw_bytes=0;count=0;records=0} at tablet 9437184 2025-09-25T16:20:57.579656Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;task_id=9e203388-9a2b11f0-90c4c071-151f0998;fline=abstract.cpp:13;event=new_stage;stage=Finished;task_id=9e203388-9a2b11f0-90c4c071-151f0998; 2025-09-25T16:20:57.579682Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;task_id=9e203388-9a2b11f0-90c4c071-151f0998;fline=abstract.cpp:54;event=WriteIndexComplete;type=CS::CLEANUP::PORTIONS;success=1; 2025-09-25T16:20:57.579839Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;task_id=9e203388-9a2b11f0-90c4c071-151f0998;fline=manager.cpp:15;event=unlock;process_id=CS::CLEANUP::PORTIONS::PORTIONS_DROP::9e203388-9a2b11f0-90c4c071-151f0998; 2025-09-25T16:20:57.579861Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;task_id=9e203388-9a2b11f0-90c4c071-151f0998;tablet_id=9437184;fline=columnshard_impl.cpp:449;event=EnqueueBackgroundActivities;periodic=0; 2025-09-25T16:20:57.579878Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;task_id=9e203388-9a2b11f0-90c4c071-151f0998;tablet_id=9437184;fline=columnshard_impl.cpp:489;event=skip_compaction;reason=disabled; 2025-09-25T16:20:57.579895Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;task_id=9e203388-9a2b11f0-90c4c071-151f0998;tablet_id=9437184;fline=columnshard_impl.cpp:943;background=cleanup_schemas;skip_reason=no_changes; 2025-09-25T16:20:57.579906Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;task_id=9e203388-9a2b11f0-90c4c071-151f0998;tablet_id=9437184;fline=column_engine_logs.cpp:258;event=StartCleanup;portions_count=0; 2025-09-25T16:20:57.579927Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;task_id=9e203388-9a2b11f0-90c4c071-151f0998;tablet_id=9437184;fline=column_engine_logs.cpp:334;event=StartCleanup;portions_count=0;portions_prepared=0;drop=0;skip=0;portions_counter=0;chunks=0;limit=0;max_portions=1000;max_chunks=500000; 2025-09-25T16:20:57.579940Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;task_id=9e203388-9a2b11f0-90c4c071-151f0998;tablet_id=9437184;fline=columnshard_impl.cpp:800;background=cleanup;skip_reason=no_changes; 2025-09-25T16:20:57.579948Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;task_id=9e203388-9a2b11f0-90c4c071-151f0998;tablet_id=9437184;fline=columnshard_impl.cpp:832;background=cleanup;skip_reason=no_changes; 2025-09-25T16:20:57.579971Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;task_id=9e203388-9a2b11f0-90c4c071-151f0998;tablet_id=9437184;queue=ttl;external_count=0;fline=granule.cpp:168;event=skip_actualization;waiting=0.556000s; 2025-09-25T16:20:57.579981Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;task_id=9e203388-9a2b11f0-90c4c071-151f0998;tablet_id=9437184;fline=columnshard_impl.cpp:755;background=ttl;skip_reason=no_changes; 2025-09-25T16:20:57.580035Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: BlobManager at tablet 9437184 Delete Blob DS:0:[9437184:2:43:3:0:4848592:0] 2025-09-25T16:20:57.580046Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: BlobManager at tablet 9437184 Delete Blob DS:0:[9437184:2:6:2:0:6171112:0] 2025-09-25T16:20:57.580053Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: BlobManager at tablet 9437184 Delete Blob DS:0:[9437184:2:28:3:0:6171112:0] 2025-09-25T16:20:57.580059Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: BlobManager at tablet 9437184 Delete Blob DS:0:[9437184:2:46:3:0:4848592:0] 2025-09-25T16:20:57.580065Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: BlobManager at tablet 9437184 Delete Blob DS:0:[9437184:2:38:4:0:4848592:0] 2025-09-25T16:20:57.580072Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: BlobManager at tablet 9437184 Delete Blob DS:0:[9437184:2:35:4:0:4848592:0] 2025-09-25T16:20:57.580078Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: BlobManager at tablet 9437184 Delete Blob DS:0:[9437184:2:22:3:0:4848592:0] 2025-09-25T16:20:57.580084Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: BlobManager at tablet 9437184 Delete Blob DS:0:[9437184:2:1:3:0:6171112:0] 2025-09-25T16:20:57.580090Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: BlobManager at tablet 9437184 Delete Blob DS:0:[9437184:2:9:2:0:6171112:0] 2025-09-25T16:20:57.580096Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: BlobManager at tablet 9437184 Delete Blob DS:0:[9437184:2:26:4:0:4848592:0] 2025-09-25T16:20:57.580102Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: BlobManager at tablet 9437184 Delete Blob DS:0:[9437184:2:40:3:0:6171112:0] 2025-09-25T16:20:57.580108Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: BlobManager at tablet 9437184 Delete Blob DS:0:[9437184:2:12:2:0:6171112:0] 2025-09-25T16:20:57.580114Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: BlobManager at tablet 9437184 Delete Blob DS:0:[9437184:2:7:3:0:4848592:0] 2025-09-25T16:20:57.580120Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: BlobManager at tablet 9437184 Delete Blob DS:0:[9437184:2:31:3:0:4848592:0] 2025-09-25T16:20:57.580126Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: BlobManager at tablet 9437184 Delete Blob DS:0:[9437184:2:34:3:0:4848592:0] 2025-09-25T16:20:57.580135Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: BlobManager at tablet 9437184 Delete Blob DS:0:[9437184:2:13:3:0:6171112:0] 2025-09-25T16:20:57.580142Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: BlobManager at tablet 9437184 Delete Blob DS:0:[9437184:2:2:4:0:6171112:0] 2025-09-25T16:20:57.580149Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: BlobManager at tablet 9437184 Delete Blob DS:0:[9437184:2:41:4:0:6171112:0] 2025-09-25T16:20:57.580156Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: BlobManager at tablet 9437184 Delete Blob DS:0:[9437184:2:19:3:0:4848592:0] 2025-09-25T16:20:57.580163Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: BlobManager at tablet 9437184 Delete Blob DS:0:[9437184:2:39:2:0:4848592:0] 2025-09-25T16:20:57.580170Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: BlobManager at tablet 9437184 Delete Blob DS:0:[9437184:2:14:4:0:4848592:0] 2025-09-25T16:20:57.580178Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: BlobManager at tablet 9437184 Delete Blob DS:0:[9437184:2:20:4:0:6171112:0] 2025-09-25T16:20:57.580185Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: BlobManager at tablet 9437184 Delete Blob DS:0:[9437184:2:18:2:0:4848592:0] 2025-09-25T16:20:57.580191Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: BlobManager at tablet 9437184 Delete Blob DS:0:[9437184:2:17:4:0:6171112:0] 2025-09-25T16:20:57.580197Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: BlobManager at tablet 9437184 Delete Blob DS:0:[9437184:2:16:3:0:6171112:0] 2025-09-25T16:20:57.580203Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: BlobManager at tablet 9437184 Delete Blob DS:0:[9437184:2:27:2:0:4848592:0] 2025-09-25T16:20:57.580209Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: BlobManager at tablet 9437184 Delete Blob DS:0:[9437184:2:24:2:0:6171112:0] 2025-09-25T16:20:57.580215Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: BlobManager at tablet 9437184 Delete Blob DS:0:[9437184:2:44:4:0:6171112:0] 2025-09-25T16:20:57.580221Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: BlobManager at tablet 9437184 Delete Blob DS:0:[9437184:2:33:2:0:6171112:0] 2025-09-25T16:20:57.580227Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: BlobManager at tablet 9437184 Delete Blob DS:0:[9437184:2:5:4:0:6171112:0] 2025-09-25T16:20:57.580234Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: BlobManager at tablet 9437184 Delete Blob DS:0:[9437184:2:15:2:0:4848592:0] 2025-09-25T16:20:57.580245Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: BlobManager at tablet 9437184 Delete Blob DS:0:[9437184:2:32:4:0:6171112:0] 2025-09-25T16:20:57.580251Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: BlobManager at tablet 9437184 Delete Blob DS:0:[9437184:2:11:4:0:4848592:0] 2025-09-25T16:20:57.580257Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: BlobManager at tablet 9437184 Delete Blob DS:0:[9437184:2:42:2:0:4848592:0] 2025-09-25T16:20:57.580263Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: BlobManager at tablet 9437184 Delete Blob DS:0:[9437184:2:25:3:0:1792:0] 2025-09-25T16:20:57.580269Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: BlobManager at tablet 9437184 Delete Blob DS:0:[9437184:2:29:4:0:6171112:0] 2025-09-25T16:20:57.580275Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: BlobManager at tablet 9437184 Delete Blob DS:0:[9437184:2:10:3:0:4848592:0] 2025-09-25T16:20:57.580281Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: BlobManager at tablet 9437184 Delete Blob DS:0:[9437184:2:30:2:0:4848592:0] 2025-09-25T16:20:57.580288Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: BlobManager at tablet 9437184 Delete Blob DS:0:[9437184:2:3:2:0:6171112:0] 2025-09-25T16:20:57.580295Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: BlobManager at tablet 9437184 Delete Blob DS:0:[9437184:2:8:4:0:6171112:0] 2025-09-25T16:20:57.580301Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: BlobManager at tablet 9437184 Delete Blob DS:0:[9437184:2:36:2:0:6171112:0] 2025-09-25T16:20:57.580308Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: BlobManager at tablet 9437184 Delete Blob DS:0:[9437184:2:23:4:0:4848592:0] 2025-09-25T16:20:57.580315Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: BlobManager at tablet 9437184 Delete Blob DS:0:[9437184:2:21:2:0:6171112:0] 2025-09-25T16:20:57.580321Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: BlobManager at tablet 9437184 Delete Blob DS:0:[9437184:2:37:3:0:6171112:0] GC for channel 3 deletes blobs: WAIT_CLEANING: 1 WAIT_CLEANING: 1 WAIT_CLEANING: 1 WAIT_CLEANING: 1 WAIT_CLEANING: 1 WAIT_CLEANING: 1 WAIT_CLEANING: 1 WAIT_CLEANING: 1 WAIT_CLEANING: 1 WAIT_CLEANING: 1 WAIT_CLEANING: 1 WAIT_CLEANING: 1 WAIT_CLEANING: 1 WAIT_CLEANING: 1 WAIT_CLEANING: 1 WAIT_CLEANING: 1 WAIT_CLEANING: 1 WAIT_CLEANING: 1 WAIT_CLEANING: 1 WAIT_CLEANING: 1 WAIT_CLEANING: 1 WAIT_CLEANING: 1 WAIT_CLEANING: 1 WAIT_CLEANING: 1 WAIT_CLEANING: 1 WAIT_CLEANING: 1 WAIT_CLEANING: 1 WAIT_CLEANING: 1 WAIT_CLEANING: 1 WAIT_CLEANING: 1 WAIT_CLEANING: 1 WAIT_CLEANING: 1 WAIT_CLEANING: 1 WAIT_CLEANING: 1 Compactions happened: 22 Cleanups happened: 1 Old portions: 1 2 3 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 46 Cleaned up portions: 1 2 3 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 46 ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/columnshard/ut_rw/unittest >> TColumnShardTestReadWrite::CompactionInGranule_PKDatetime_Reboot [GOOD] Test command err: 2025-09-25T16:20:56.493953Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:129:2160];fline=columnshard.cpp:105;event=initialize_shard;step=OnActivateExecutor; 2025-09-25T16:20:56.499614Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:129:2160];fline=columnshard.cpp:124;event=initialize_shard;step=initialize_tiring_finished; 2025-09-25T16:20:56.499669Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxInitSchema.Execute at tablet 9437184 2025-09-25T16:20:56.500600Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:129:2160];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-09-25T16:20:56.500658Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:129:2160];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-09-25T16:20:56.500699Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:129:2160];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-09-25T16:20:56.500720Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:129:2160];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-09-25T16:20:56.500738Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:129:2160];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-09-25T16:20:56.500759Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:129:2160];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-09-25T16:20:56.500777Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:129:2160];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-09-25T16:20:56.500795Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:129:2160];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-09-25T16:20:56.500814Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:129:2160];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-09-25T16:20:56.500849Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:129:2160];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanDeprecatedSnapshot; 2025-09-25T16:20:56.500869Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:129:2160];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV0ChunksMeta; 2025-09-25T16:20:56.500887Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:129:2160];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CopyBlobIdsToV2; 2025-09-25T16:20:56.500928Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:129:2160];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreAppearanceSnapshot; 2025-09-25T16:20:56.506651Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxInitSchema.Complete at tablet 9437184 2025-09-25T16:20:56.506705Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:131;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=12;current_normalizer=CLASS_NAME=Granules; 2025-09-25T16:20:56.506712Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=1;type=Granules; 2025-09-25T16:20:56.506746Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-09-25T16:20:56.506775Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-09-25T16:20:56.506786Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-09-25T16:20:56.506790Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2025-09-25T16:20:56.506798Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:138;normalizer=TChunksNormalizer;message=0 chunks found; 2025-09-25T16:20:56.506804Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-09-25T16:20:56.506810Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-09-25T16:20:56.506813Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2025-09-25T16:20:56.506826Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-09-25T16:20:56.506832Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-09-25T16:20:56.506838Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-09-25T16:20:56.506841Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2025-09-25T16:20:56.506850Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2025-09-25T16:20:56.506854Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-09-25T16:20:56.506860Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-09-25T16:20:56.506863Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2025-09-25T16:20:56.506870Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-09-25T16:20:56.506876Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-09-25T16:20:56.506879Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2025-09-25T16:20:56.506886Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-09-25T16:20:56.506892Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-09-25T16:20:56.506895Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=15;type=RestoreV1Chunks_V2; 2025-09-25T16:20:56.506912Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-09-25T16:20:56.506918Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-09-25T16:20:56.506921Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=16;type=RestoreV2Chunks; 2025-09-25T16:20:56.506931Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2025-09-25T16:20:56.506937Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanDeprecatedSnapshot;id=CleanDeprecatedSnapshot; 2025-09-25T16:20:56.506940Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=17;type=CleanDeprecatedSnapshot; 2025-09-25T16:20:56.506946Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanDeprecatedSnapshot;id=17; 2025-09-25T16:20:56.506951Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV0ChunksMeta;id=RestoreV0ChunksMeta; 2025-09-25T16:20:56.506954Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=18;type=RestoreV0ChunksMeta; 2025-09-25T16:20:56.506960Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV0ChunksMeta;id=18; 2025-09-25T16:20:56.506965Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CopyBlobIdsToV2;id=CopyBlobIdsToV2; 2025-09-25T16:20:56.506969Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=19;type=CopyBlobIdsToV2; 2025-09-25T16:20:56.506979Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CopyBlobIdsToV2;id=19; 2025-09-25T16:20:56.506985Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLAS ... nule;load_stage_name=EXECUTE:granule/portions;fline=constructor_portion.cpp:40;memory_size=278;data_size=244;sum=1957904;count=7068;size_of_portion=184; 2025-09-25T16:21:20.360109Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;load_stage_name=EXECUTE:column_engines/granules;load_stage_name=EXECUTE:granules/granule;fline=common_data.cpp:29;EXECUTE:portionsLoadingTime=9358; 2025-09-25T16:21:20.360121Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;load_stage_name=EXECUTE:column_engines/granules;load_stage_name=EXECUTE:granules/granule;fline=common_data.cpp:29;PRECHARGE:granule_finished_commonLoadingTime=2; 2025-09-25T16:21:20.360323Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;load_stage_name=EXECUTE:column_engines/granules;load_stage_name=EXECUTE:granules/granule;fline=common_data.cpp:29;EXECUTE:granule_finished_commonLoadingTime=193; 2025-09-25T16:21:20.360329Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;load_stage_name=EXECUTE:column_engines/granules;fline=common_data.cpp:29;EXECUTE:granuleLoadingTime=9604; 2025-09-25T16:21:20.360334Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;fline=common_data.cpp:29;EXECUTE:granulesLoadingTime=9618; 2025-09-25T16:21:20.360343Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;fline=common_data.cpp:29;PRECHARGE:finishLoadingTime=2; 2025-09-25T16:21:20.360408Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;fline=common_data.cpp:29;EXECUTE:finishLoadingTime=59; 2025-09-25T16:21:20.360413Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:column_enginesLoadingTime=9767; 2025-09-25T16:21:20.360450Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:tx_controllerLoadingTime=30; 2025-09-25T16:21:20.360468Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:tx_controllerLoadingTime=12; 2025-09-25T16:21:20.360541Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:operations_managerLoadingTime=65; 2025-09-25T16:21:20.360592Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:operations_managerLoadingTime=39; 2025-09-25T16:21:20.364515Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:storages_managerLoadingTime=3902; 2025-09-25T16:21:20.369555Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:storages_managerLoadingTime=5001; 2025-09-25T16:21:20.369592Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:db_locksLoadingTime=4; 2025-09-25T16:21:20.369601Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:db_locksLoadingTime=1; 2025-09-25T16:21:20.369610Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:bg_sessionsLoadingTime=1; 2025-09-25T16:21:20.369629Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:bg_sessionsLoadingTime=13; 2025-09-25T16:21:20.369637Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:sharing_sessionsLoadingTime=1; 2025-09-25T16:21:20.369659Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:sharing_sessionsLoadingTime=12; 2025-09-25T16:21:20.369667Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:in_flight_readsLoadingTime=1; 2025-09-25T16:21:20.369680Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:in_flight_readsLoadingTime=7; 2025-09-25T16:21:20.369700Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:tiers_managerLoadingTime=13; 2025-09-25T16:21:20.369717Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:tiers_managerLoadingTime=11; 2025-09-25T16:21:20.369723Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;fline=common_data.cpp:29;EXECUTE:composite_initLoadingTime=20430; 2025-09-25T16:21:20.369769Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: Index: tables 1 inserted {blob_bytes=0;raw_bytes=0;count=0;records=0} compacted {blob_bytes=0;raw_bytes=0;count=0;records=0} s-compacted {blob_bytes=12006072;raw_bytes=14157900;count=2;records=150200} inactive {blob_bytes=90894184;raw_bytes=95257550;count=215;records=1125200} evicted {blob_bytes=0;raw_bytes=0;count=0;records=0} at tablet 9437184 2025-09-25T16:21:20.369809Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:7168:8797];process=SwitchToWork;fline=columnshard.cpp:77;event=initialize_shard;step=SwitchToWork; 2025-09-25T16:21:20.369821Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:7168:8797];process=SwitchToWork;fline=columnshard.cpp:80;event=initialize_shard;step=SignalTabletActive; 2025-09-25T16:21:20.369841Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:7168:8797];process=SwitchToWork;fline=columnshard_impl.cpp:1528;event=OnTieringModified;path_id=NO_VALUE_OPTIONAL; 2025-09-25T16:21:20.369850Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:7168:8797];process=SwitchToWork;fline=column_engine_logs.cpp:516;event=OnTieringModified;new_count_tierings=0; 2025-09-25T16:21:20.369911Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;fline=columnshard_impl.cpp:449;event=EnqueueBackgroundActivities;periodic=0; 2025-09-25T16:21:20.369936Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;fline=columnshard_impl.cpp:943;background=cleanup_schemas;skip_reason=no_changes; 2025-09-25T16:21:20.369945Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;fline=column_engine_logs.cpp:258;event=StartCleanup;portions_count=5; 2025-09-25T16:21:20.369962Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;fline=column_engine_logs.cpp:271;event=StartCleanupStop;snapshot=plan_step=1758815461020;tx_id=18446744073709551615;;current_snapshot_ts=1758817258317; 2025-09-25T16:21:20.369972Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;fline=column_engine_logs.cpp:334;event=StartCleanup;portions_count=5;portions_prepared=0;drop=0;skip=0;portions_counter=0;chunks=0;limit=0;max_portions=1000;max_chunks=500000; 2025-09-25T16:21:20.369984Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;fline=columnshard_impl.cpp:800;background=cleanup;skip_reason=no_changes; 2025-09-25T16:21:20.369990Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;fline=columnshard_impl.cpp:832;background=cleanup;skip_reason=no_changes; 2025-09-25T16:21:20.370017Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;fline=columnshard_impl.cpp:755;background=ttl;skip_reason=no_changes; 2025-09-25T16:21:20.371090Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:7168:8797];ev=NActors::TEvents::TEvWakeup;fline=columnshard.cpp:260;event=TEvPrivate::TEvPeriodicWakeup::MANUAL;tablet_id=9437184; 2025-09-25T16:21:20.371199Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:7168:8797];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;fline=columnshard.cpp:249;event=TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184; 2025-09-25T16:21:20.371206Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: Send periodic stats. 2025-09-25T16:21:20.371210Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: Disabled periodic stats at tablet 9437184 2025-09-25T16:21:20.371217Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:7168:8797];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:449;event=EnqueueBackgroundActivities;periodic=0; 2025-09-25T16:21:20.371233Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:7168:8797];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:943;background=cleanup_schemas;skip_reason=no_changes; 2025-09-25T16:21:20.371240Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:7168:8797];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:258;event=StartCleanup;portions_count=5; 2025-09-25T16:21:20.371250Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:7168:8797];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:271;event=StartCleanupStop;snapshot=plan_step=1758815461020;tx_id=18446744073709551615;;current_snapshot_ts=1758817258317; 2025-09-25T16:21:20.371258Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:7168:8797];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:334;event=StartCleanup;portions_count=5;portions_prepared=0;drop=0;skip=0;portions_counter=0;chunks=0;limit=0;max_portions=1000;max_chunks=500000; 2025-09-25T16:21:20.371266Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:7168:8797];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:800;background=cleanup;skip_reason=no_changes; 2025-09-25T16:21:20.371271Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:7168:8797];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:832;background=cleanup;skip_reason=no_changes; 2025-09-25T16:21:20.371286Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:7168:8797];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;queue=ttl;external_count=0;fline=granule.cpp:168;event=skip_actualization;waiting=1.000000s; 2025-09-25T16:21:20.371294Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:7168:8797];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:755;background=ttl;skip_reason=no_changes; >> TSchemeShardSysViewTest::AsyncCreateDifferentSysViews >> IncrementalRestoreScan::Empty >> TopicTimestamp::TimestampRead_1MB_LegacyTopic_offset+middle >> TSchemeShardSysViewTest::AsyncCreateDifferentSysViews [GOOD] >> TSchemeShardSysViewTest::AsyncCreateDirWithSysView >> TSchemeShardSysViewTest::ReadOnlyMode >> TopicAutoscaling::PartitionSplit_BeforeAutoscaleAwareSDK >> TSchemeShardSysViewTest::AsyncCreateDirWithSysView [GOOD] >> IncrementalRestoreScan::Empty [GOOD] >> TSchemeShardSysViewTest::ReadOnlyMode [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_sysview/unittest >> TSchemeShardSysViewTest::AsyncCreateDirWithSysView [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:120:2058] recipient: [1:114:2144] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:120:2058] recipient: [1:114:2144] Leader for TabletID 72057594046678944 is [1:131:2155] sender: [1:132:2058] recipient: [1:114:2144] 2025-09-25T16:21:23.050086Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7911: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-09-25T16:21:23.050116Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7939: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-09-25T16:21:23.050124Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7825: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-09-25T16:21:23.050130Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7841: OperationsProcessing config: using default configuration 2025-09-25T16:21:23.050137Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7847: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-09-25T16:21:23.050142Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7847: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-09-25T16:21:23.050152Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7971: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-09-25T16:21:23.050168Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-09-25T16:21:23.050304Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8042: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-09-25T16:21:23.050386Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2025-09-25T16:21:23.088496Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7729: Cannot subscribe to console configs 2025-09-25T16:21:23.088524Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded 2025-09-25T16:21:23.097657Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2025-09-25T16:21:23.097708Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2025-09-25T16:21:23.097760Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-09-25T16:21:23.099525Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2025-09-25T16:21:23.099608Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2025-09-25T16:21:23.099744Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1382: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-09-25T16:21:23.099861Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-09-25T16:21:23.106101Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:158: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-09-25T16:21:23.106192Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2025-09-25T16:21:23.106592Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-09-25T16:21:23.106609Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-09-25T16:21:23.106641Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-09-25T16:21:23.106652Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-09-25T16:21:23.106659Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:205: TTxServerlessStorageBilling.Complete 2025-09-25T16:21:23.106691Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7086: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-09-25T16:21:23.190449Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-09-25T16:21:23.190698Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-09-25T16:21:23.190719Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-09-25T16:21:23.190732Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-09-25T16:21:23.190757Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-09-25T16:21:23.190768Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-09-25T16:21:23.190778Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-09-25T16:21:23.190789Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-09-25T16:21:23.190825Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-09-25T16:21:23.190836Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-09-25T16:21:23.190846Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-09-25T16:21:23.190857Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-09-25T16:21:23.190869Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-09-25T16:21:23.190879Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-09-25T16:21:23.190890Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_minute" Type: ETopQueriesByR ... ration.cpp:494: TTxOperationProgress Execute, operationId: 102:0, at schemeshard: 72057594046678944 2025-09-25T16:21:23.747650Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:497: [72057594046678944] TDone opId# 102:0 ProgressState 2025-09-25T16:21:23.747664Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:926: Part operation is done id#102:0 progress is 1/1 2025-09-25T16:21:23.747669Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2025-09-25T16:21:23.747675Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:926: Part operation is done id#102:0 progress is 1/1 2025-09-25T16:21:23.747679Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2025-09-25T16:21:23.747684Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 102, ready parts: 1/1, is published: false 2025-09-25T16:21:23.747691Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2025-09-25T16:21:23.747697Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:993: Operation and all the parts is done, operation id: 102:0 2025-09-25T16:21:23.747702Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5552: RemoveTx for txid 102:0 2025-09-25T16:21:23.747713Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:610: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 2 2025-09-25T16:21:23.747721Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:1002: Publication still in progress, tx: 102, publications: 2, subscribers: 0 2025-09-25T16:21:23.747726Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1009: Publication details: tx: 102, [OwnerId: 72057594046678944, LocalPathId: 2], 4 2025-09-25T16:21:23.747730Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1009: Publication details: tx: 102, [OwnerId: 72057594046678944, LocalPathId: 3], 2 2025-09-25T16:21:23.747851Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6249: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 4 PathOwnerId: 72057594046678944, cookie: 102 2025-09-25T16:21:23.747863Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 4 PathOwnerId: 72057594046678944, cookie: 102 2025-09-25T16:21:23.747867Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 102 2025-09-25T16:21:23.747872Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 102, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 4 2025-09-25T16:21:23.747877Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:610: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 2 2025-09-25T16:21:23.748043Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6249: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 2 PathOwnerId: 72057594046678944, cookie: 102 2025-09-25T16:21:23.748055Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 2 PathOwnerId: 72057594046678944, cookie: 102 2025-09-25T16:21:23.748060Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 102 2025-09-25T16:21:23.748065Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 102, pathId: [OwnerId: 72057594046678944, LocalPathId: 3], version: 2 2025-09-25T16:21:23.748070Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:610: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 1 2025-09-25T16:21:23.748079Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__publish_to_scheme_board.cpp:155: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 102, subscribers: 0 2025-09-25T16:21:23.748646Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 2025-09-25T16:21:23.748666Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 TestModificationResult got TxId: 101, wait until txId: 101 TestModificationResults wait txId: 102 TestModificationResult got TxId: 102, wait until txId: 102 TestWaitNotification wait txId: 101 2025-09-25T16:21:23.748727Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:213: tests -- TTxNotificationSubscriber for txId 101: send EvNotifyTxCompletion 2025-09-25T16:21:23.748736Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:259: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 101 TestWaitNotification wait txId: 102 2025-09-25T16:21:23.748753Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:213: tests -- TTxNotificationSubscriber for txId 102: send EvNotifyTxCompletion 2025-09-25T16:21:23.748757Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:259: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 102 2025-09-25T16:21:23.748860Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 101, at schemeshard: 72057594046678944 2025-09-25T16:21:23.748887Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 101: got EvNotifyTxCompletionResult 2025-09-25T16:21:23.748893Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 101: satisfy waiter [2:327:2317] 2025-09-25T16:21:23.748920Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 102, at schemeshard: 72057594046678944 2025-09-25T16:21:23.748933Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 102: got EvNotifyTxCompletionResult 2025-09-25T16:21:23.748937Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 102: satisfy waiter [2:327:2317] TestWaitNotification: OK eventTxId 101 TestWaitNotification: OK eventTxId 102 2025-09-25T16:21:23.749019Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/.sys" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-09-25T16:21:23.749049Z node 2 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/.sys" took 41us result status StatusSuccess 2025-09-25T16:21:23.749170Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/.sys" PathDescription { Self { Name: ".sys" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 101 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 4 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 4 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 3 } ChildrenExist: true } Children { Name: "new_sys_view" PathId: 3 SchemeshardId: 72057594046678944 PathType: EPathTypeSysView CreateFinished: true CreateTxId: 102 CreateStep: 5000003 ParentPathId: 2 PathState: EPathStateCreate Owner: "root@builtin" ACL: "" ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 2 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 2 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-09-25T16:21:23.749233Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/.sys/new_sys_view" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-09-25T16:21:23.749256Z node 2 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/.sys/new_sys_view" took 25us result status StatusSuccess 2025-09-25T16:21:23.749317Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/.sys/new_sys_view" PathDescription { Self { Name: "new_sys_view" PathId: 3 SchemeshardId: 72057594046678944 PathType: EPathTypeSysView CreateFinished: true CreateTxId: 102 CreateStep: 5000003 ParentPathId: 2 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 SysViewVersion: 1 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 2 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } SysViewDescription { Name: "new_sys_view" Type: EPartitionStats SourceObject { OwnerId: 72057594046678944 LocalId: 1 } } } PathId: 3 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/datashard/ut_incremental_restore_scan/unittest >> IncrementalRestoreScan::Empty [GOOD] Test command err: 2025-09-25T16:21:23.733354Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:123: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2025-09-25T16:21:23.814560Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2025-09-25T16:21:23.816721Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:311:2354], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-09-25T16:21:23.816793Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-09-25T16:21:23.816815Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/endf/003ade/r3tmp/tmpB72iHr/pdisk_1.dat 2025-09-25T16:21:23.913442Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-09-25T16:21:23.913477Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-09-25T16:21:23.924375Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded 2025-09-25T16:21:23.925223Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1229: Notification cookie mismatch for subscription [1:34:2081] 1758817283051980 != 1758817283051984 2025-09-25T16:21:23.956273Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-09-25T16:21:24.006159Z node 1 :CHANGE_EXCHANGE DEBUG: incr_restore_scan.cpp:182: [TIncrementalRestoreScan][1337][OwnerId: 1, LocalPathId: 2][OwnerId: 3, LocalPathId: 4][1:592:2519] Exhausted 2025-09-25T16:21:24.006189Z node 1 :CHANGE_EXCHANGE DEBUG: incr_restore_scan.cpp:131: [TIncrementalRestoreScan][1337][OwnerId: 1, LocalPathId: 2][OwnerId: 3, LocalPathId: 4][1:592:2519] Handle TEvIncrementalRestoreScan::TEvFinished NKikimr::NDataShard::TEvIncrementalRestoreScan::TEvFinished 2025-09-25T16:21:24.006196Z node 1 :CHANGE_EXCHANGE DEBUG: incr_restore_scan.cpp:195: [TIncrementalRestoreScan][1337][OwnerId: 1, LocalPathId: 2][OwnerId: 3, LocalPathId: 4][1:592:2519] Finish Done ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_sysview/unittest >> TSchemeShardSysViewTest::ReadOnlyMode [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:120:2058] recipient: [1:114:2144] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:120:2058] recipient: [1:114:2144] Leader for TabletID 72057594046678944 is [1:131:2155] sender: [1:132:2058] recipient: [1:114:2144] 2025-09-25T16:21:23.833135Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7911: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-09-25T16:21:23.833160Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7939: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-09-25T16:21:23.833168Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7825: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-09-25T16:21:23.833173Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7841: OperationsProcessing config: using default configuration 2025-09-25T16:21:23.833181Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7847: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-09-25T16:21:23.833187Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7847: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-09-25T16:21:23.833197Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7971: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-09-25T16:21:23.833211Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-09-25T16:21:23.833335Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8042: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-09-25T16:21:23.833409Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2025-09-25T16:21:23.850406Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7729: Cannot subscribe to console configs 2025-09-25T16:21:23.850434Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded 2025-09-25T16:21:23.854908Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2025-09-25T16:21:23.854952Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2025-09-25T16:21:23.854997Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-09-25T16:21:23.856192Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2025-09-25T16:21:23.856247Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2025-09-25T16:21:23.856356Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1382: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-09-25T16:21:23.856443Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-09-25T16:21:23.857414Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:158: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-09-25T16:21:23.857470Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2025-09-25T16:21:23.857765Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-09-25T16:21:23.857773Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-09-25T16:21:23.857834Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-09-25T16:21:23.857843Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-09-25T16:21:23.857848Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:205: TTxServerlessStorageBilling.Complete 2025-09-25T16:21:23.857872Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7086: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-09-25T16:21:23.910675Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-09-25T16:21:23.910929Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-09-25T16:21:23.910949Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-09-25T16:21:23.910964Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-09-25T16:21:23.910987Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-09-25T16:21:23.910999Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-09-25T16:21:23.911011Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-09-25T16:21:23.911026Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-09-25T16:21:23.911062Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-09-25T16:21:23.911074Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-09-25T16:21:23.911085Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-09-25T16:21:23.911097Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-09-25T16:21:23.911110Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-09-25T16:21:23.911121Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-09-25T16:21:23.911132Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_minute" Type: ETopQueriesByR ... el: 0 TxId: 102 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-09-25T16:21:24.339558Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:663: Send tablet strongly msg operationId: 102:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:102 msg type: 269090816 2025-09-25T16:21:24.339598Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1824: TOperation RegisterRelationByTabletId, TxId: 102, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 102 at step: 5000037 FAKE_COORDINATOR: advance: minStep5000037 State->FrontStep: 5000036 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 102 at step: 5000037 2025-09-25T16:21:24.339740Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:685: TTxOperationPlanStep Execute, stepId: 5000037, transactions count in step: 1, at schemeshard: 72057594046678944 2025-09-25T16:21:24.339759Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:689: TTxOperationPlanStep Execute, message: Transactions { TxId: 102 Coordinator: 72057594046316545 AckTo { RawX1: 138 RawX2: 4294969455 } } Step: 5000037 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-09-25T16:21:24.339780Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_create_sysview.cpp:45: [72057594046678944] TCreateSysView::TPropose, opId: 102:0 HandleReply TEvPrivate::TEvOperationPlan, step: 5000037 2025-09-25T16:21:24.339808Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 102:0 128 -> 240 2025-09-25T16:21:24.339840Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:599: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 35 2025-09-25T16:21:24.339854Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:599: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 37] was 1 FAKE_COORDINATOR: Erasing txId 102 2025-09-25T16:21:24.340561Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-09-25T16:21:24.340571Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 102, path id: [OwnerId: 72057594046678944, LocalPathId: 2] 2025-09-25T16:21:24.340662Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 102, path id: [OwnerId: 72057594046678944, LocalPathId: 37] 2025-09-25T16:21:24.340683Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-09-25T16:21:24.340688Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [1:806:2763], at schemeshard: 72057594046678944, txId: 102, path id: 2 2025-09-25T16:21:24.340695Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [1:806:2763], at schemeshard: 72057594046678944, txId: 102, path id: 37 2025-09-25T16:21:24.340774Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 102:0, at schemeshard: 72057594046678944 2025-09-25T16:21:24.340783Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:497: [72057594046678944] TDone opId# 102:0 ProgressState 2025-09-25T16:21:24.340799Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:926: Part operation is done id#102:0 progress is 1/1 2025-09-25T16:21:24.340804Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2025-09-25T16:21:24.340810Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:926: Part operation is done id#102:0 progress is 1/1 2025-09-25T16:21:24.340813Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2025-09-25T16:21:24.340818Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 102, ready parts: 1/1, is published: false 2025-09-25T16:21:24.340839Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2025-09-25T16:21:24.340846Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:993: Operation and all the parts is done, operation id: 102:0 2025-09-25T16:21:24.340851Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5552: RemoveTx for txid 102:0 2025-09-25T16:21:24.340864Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:610: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 37] was 2 2025-09-25T16:21:24.340871Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:1002: Publication still in progress, tx: 102, publications: 2, subscribers: 0 2025-09-25T16:21:24.340875Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1009: Publication details: tx: 102, [OwnerId: 72057594046678944, LocalPathId: 2], 38 2025-09-25T16:21:24.340879Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1009: Publication details: tx: 102, [OwnerId: 72057594046678944, LocalPathId: 37], 2 2025-09-25T16:21:24.341014Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6249: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 4 LocalPathId: 2 Version: 38 PathOwnerId: 72057594046678944, cookie: 102 2025-09-25T16:21:24.341026Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 4 LocalPathId: 2 Version: 38 PathOwnerId: 72057594046678944, cookie: 102 2025-09-25T16:21:24.341031Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 102 2025-09-25T16:21:24.341036Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 102, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 38 2025-09-25T16:21:24.341041Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:610: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 36 2025-09-25T16:21:24.341154Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6249: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 4 LocalPathId: 37 Version: 2 PathOwnerId: 72057594046678944, cookie: 102 2025-09-25T16:21:24.341165Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 4 LocalPathId: 37 Version: 2 PathOwnerId: 72057594046678944, cookie: 102 2025-09-25T16:21:24.341169Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 102 2025-09-25T16:21:24.341175Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 102, pathId: [OwnerId: 72057594046678944, LocalPathId: 37], version: 2 2025-09-25T16:21:24.341179Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:610: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 37] was 1 2025-09-25T16:21:24.341188Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__publish_to_scheme_board.cpp:155: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 102, subscribers: 0 2025-09-25T16:21:24.341745Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 2025-09-25T16:21:24.341765Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 TestModificationResult got TxId: 102, wait until txId: 102 TestWaitNotification wait txId: 102 2025-09-25T16:21:24.341830Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:213: tests -- TTxNotificationSubscriber for txId 102: send EvNotifyTxCompletion 2025-09-25T16:21:24.341838Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:259: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 102 2025-09-25T16:21:24.341915Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 102, at schemeshard: 72057594046678944 2025-09-25T16:21:24.341932Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 102: got EvNotifyTxCompletionResult 2025-09-25T16:21:24.341937Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 102: satisfy waiter [1:833:2788] TestWaitNotification: OK eventTxId 102 2025-09-25T16:21:24.342017Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/.sys/new_sys_view" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-09-25T16:21:24.342052Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/.sys/new_sys_view" took 46us result status StatusSuccess 2025-09-25T16:21:24.342150Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/.sys/new_sys_view" PathDescription { Self { Name: "new_sys_view" PathId: 37 SchemeshardId: 72057594046678944 PathType: EPathTypeSysView CreateFinished: true CreateTxId: 102 CreateStep: 5000037 ParentPathId: 2 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 SysViewVersion: 1 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 36 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } SysViewDescription { Name: "new_sys_view" Type: EPartitionStats SourceObject { OwnerId: 72057594046678944 LocalId: 1 } } } PathId: 37 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/mind/hive/ut/unittest >> StoragePool::TestDistributionRandomMin7pWithOverflow [GOOD] Test command err: 2025-09-25T16:18:07.721648Z node 1 :BS_NODE DEBUG: {NW26@node_warden_impl.cpp:338} Bootstrap 2025-09-25T16:18:07.727478Z node 1 :BS_NODE DEBUG: {NW18@node_warden_resource.cpp:51} ApplyServiceSet IsStatic# true Comprehensive# false Origin# initial ServiceSet# {PDisks { NodeID: 1 PDiskID: 1 Path: "SectorMap:0:3200" PDiskGuid: 1 } PDisks { NodeID: 2 PDiskID: 1 Path: "SectorMap:1:3200" PDiskGuid: 2 } VDisks { VDiskID { GroupID: 0 GroupGeneration: 1 Ring: 0 Domain: 0 VDisk: 0 } VDiskLocation { NodeID: 1 PDiskID: 1 VDiskSlotID: 0 PDiskGuid: 1 } } Groups { GroupID: 0 GroupGeneration: 1 ErasureSpecies: 0 Rings { FailDomains { VDiskLocations { NodeID: 1 PDiskID: 1 VDiskSlotID: 0 PDiskGuid: 1 } } } } AvailabilityDomains: 0 } 2025-09-25T16:18:07.727574Z node 1 :BS_NODE DEBUG: {NW04@node_warden_pdisk.cpp:233} StartLocalPDisk NodeId# 1 PDiskId# 1 Path# "SectorMap:0:3200" PDiskCategory# {Type# DEVICE_TYPE_ROT Kind# 0} Temporary# false 2025-09-25T16:18:07.727838Z node 1 :BS_NODE WARN: {NW89@node_warden_pdisk.cpp:122} Can't write new MockDevicesConfig to file Path# /Berkanavt/kikimr/testing/mock_devices.txt 2025-09-25T16:18:07.727931Z node 1 :BS_NODE DEBUG: {NW23@node_warden_vdisk.cpp:70} StartLocalVDiskActor SlayInFlight# false VDiskId# [0:1:0:0:0] VSlotId# 1:1:0 PDiskGuid# 1 DonorMode# false PDiskRestartInFlight# false PDisksWaitingToStart# false 2025-09-25T16:18:07.728152Z node 1 :BS_NODE DEBUG: {NW24@node_warden_vdisk.cpp:276} StartLocalVDiskActor done VDiskId# [0:1:0:0:0] VSlotId# 1:1:0 PDiskGuid# 1 2025-09-25T16:18:07.728163Z node 1 :BS_NODE DEBUG: {NW12@node_warden_proxy.cpp:24} StartLocalProxy GroupId# 0 2025-09-25T16:18:07.728370Z node 1 :BS_NODE DEBUG: {NW21@node_warden_pipe.cpp:23} EstablishPipe AvailDomainId# 0 PipeClientId# [1:50:2076] ControllerId# 72057594037932033 2025-09-25T16:18:07.728375Z node 1 :BS_NODE DEBUG: {NW20@node_warden_pipe.cpp:73} SendRegisterNode 2025-09-25T16:18:07.728402Z node 1 :BS_NODE DEBUG: {NW11@node_warden_impl.cpp:313} StartInvalidGroupProxy GroupId# 4294967295 2025-09-25T16:18:07.728432Z node 1 :BS_NODE DEBUG: {NW62@node_warden_impl.cpp:325} StartRequestReportingThrottler 2025-09-25T16:18:07.732435Z node 1 :BS_PROXY INFO: dsproxy_state.cpp:159: Group# 0 TEvConfigureProxy received GroupGeneration# 1 IsLimitedKeyless# false Marker# DSP02 2025-09-25T16:18:07.732453Z node 1 :BS_PROXY NOTICE: dsproxy_state.cpp:319: EnsureMonitoring Group# 0 IsLimitedKeyless# 0 fullIfPossible# 0 Marker# DSP58 2025-09-25T16:18:07.732800Z node 1 :BS_PROXY DEBUG: group_sessions.cpp:83: Group# 0 Actor# [1:49:2075] Create Queue# [1:58:2081] targetNodeId# 1 Marker# DSP01 2025-09-25T16:18:07.732854Z node 1 :BS_PROXY DEBUG: group_sessions.cpp:83: Group# 0 Actor# [1:49:2075] Create Queue# [1:59:2082] targetNodeId# 1 Marker# DSP01 2025-09-25T16:18:07.732886Z node 1 :BS_PROXY DEBUG: group_sessions.cpp:83: Group# 0 Actor# [1:49:2075] Create Queue# [1:60:2083] targetNodeId# 1 Marker# DSP01 2025-09-25T16:18:07.732915Z node 1 :BS_PROXY DEBUG: group_sessions.cpp:83: Group# 0 Actor# [1:49:2075] Create Queue# [1:61:2084] targetNodeId# 1 Marker# DSP01 2025-09-25T16:18:07.732945Z node 1 :BS_PROXY DEBUG: group_sessions.cpp:83: Group# 0 Actor# [1:49:2075] Create Queue# [1:62:2085] targetNodeId# 1 Marker# DSP01 2025-09-25T16:18:07.732974Z node 1 :BS_PROXY DEBUG: group_sessions.cpp:83: Group# 0 Actor# [1:49:2075] Create Queue# [1:63:2086] targetNodeId# 1 Marker# DSP01 2025-09-25T16:18:07.733002Z node 1 :BS_PROXY DEBUG: group_sessions.cpp:83: Group# 0 Actor# [1:49:2075] Create Queue# [1:64:2087] targetNodeId# 1 Marker# DSP01 2025-09-25T16:18:07.733008Z node 1 :BS_PROXY INFO: dsproxy_state.cpp:31: Group# 0 SetStateEstablishingSessions Marker# DSP03 2025-09-25T16:18:07.733024Z node 1 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:49: TClient[72057594037932033] ::Bootstrap [1:50:2076] 2025-09-25T16:18:07.733030Z node 1 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:542: TClient[72057594037932033] lookup [1:50:2076] 2025-09-25T16:18:07.733039Z node 1 :BS_PROXY NOTICE: dsproxy_state.cpp:259: Group# 4294967295 HasInvalidGroupId# 1 Bootstrap -> StateEjected Marker# DSP42 2025-09-25T16:18:07.733050Z node 1 :BS_NODE DEBUG: {NWDC00@distconf.cpp:28} Bootstrap 2025-09-25T16:18:07.733267Z node 1 :BS_NODE DEBUG: {NWDC40@distconf_persistent_storage.cpp:25} TReaderActor bootstrap Paths# [] 2025-09-25T16:18:07.733287Z node 2 :BS_NODE DEBUG: {NW26@node_warden_impl.cpp:338} Bootstrap 2025-09-25T16:18:07.733819Z node 2 :BS_NODE DEBUG: {NW18@node_warden_resource.cpp:51} ApplyServiceSet IsStatic# true Comprehensive# false Origin# initial ServiceSet# {PDisks { NodeID: 1 PDiskID: 1 Path: "SectorMap:0:3200" PDiskGuid: 1 } PDisks { NodeID: 2 PDiskID: 1 Path: "SectorMap:1:3200" PDiskGuid: 2 } VDisks { VDiskID { GroupID: 0 GroupGeneration: 1 Ring: 0 Domain: 0 VDisk: 0 } VDiskLocation { NodeID: 1 PDiskID: 1 VDiskSlotID: 0 PDiskGuid: 1 } } Groups { GroupID: 0 GroupGeneration: 1 ErasureSpecies: 0 Rings { FailDomains { VDiskLocations { NodeID: 1 PDiskID: 1 VDiskSlotID: 0 PDiskGuid: 1 } } } } AvailabilityDomains: 0 } 2025-09-25T16:18:07.733863Z node 2 :BS_NODE DEBUG: {NW04@node_warden_pdisk.cpp:233} StartLocalPDisk NodeId# 2 PDiskId# 1 Path# "SectorMap:1:3200" PDiskCategory# {Type# DEVICE_TYPE_ROT Kind# 0} Temporary# false 2025-09-25T16:18:07.733980Z node 2 :BS_NODE WARN: {NW89@node_warden_pdisk.cpp:122} Can't write new MockDevicesConfig to file Path# /Berkanavt/kikimr/testing/mock_devices.txt 2025-09-25T16:18:07.734031Z node 2 :BS_NODE DEBUG: {NW12@node_warden_proxy.cpp:24} StartLocalProxy GroupId# 0 2025-09-25T16:18:07.734199Z node 2 :BS_NODE DEBUG: {NW21@node_warden_pipe.cpp:23} EstablishPipe AvailDomainId# 0 PipeClientId# [2:75:2076] ControllerId# 72057594037932033 2025-09-25T16:18:07.734206Z node 2 :BS_NODE DEBUG: {NW20@node_warden_pipe.cpp:73} SendRegisterNode 2025-09-25T16:18:07.734221Z node 2 :BS_NODE DEBUG: {NW11@node_warden_impl.cpp:313} StartInvalidGroupProxy GroupId# 4294967295 2025-09-25T16:18:07.734245Z node 2 :BS_NODE DEBUG: {NW62@node_warden_impl.cpp:325} StartRequestReportingThrottler 2025-09-25T16:18:07.735744Z node 2 :LOCAL DEBUG: local.cpp:1540: TLocal::Bootstrap 2025-09-25T16:18:07.737267Z node 2 :BS_PROXY INFO: dsproxy_state.cpp:159: Group# 0 TEvConfigureProxy received GroupGeneration# 1 IsLimitedKeyless# false Marker# DSP02 2025-09-25T16:18:07.737283Z node 2 :BS_PROXY NOTICE: dsproxy_state.cpp:319: EnsureMonitoring Group# 0 IsLimitedKeyless# 0 fullIfPossible# 0 Marker# DSP58 2025-09-25T16:18:07.737751Z node 2 :BS_PROXY DEBUG: group_sessions.cpp:83: Group# 0 Actor# [2:74:2075] Create Queue# [2:82:2080] targetNodeId# 1 Marker# DSP01 2025-09-25T16:18:07.737806Z node 2 :BS_PROXY DEBUG: group_sessions.cpp:83: Group# 0 Actor# [2:74:2075] Create Queue# [2:83:2081] targetNodeId# 1 Marker# DSP01 2025-09-25T16:18:07.737851Z node 2 :BS_PROXY DEBUG: group_sessions.cpp:83: Group# 0 Actor# [2:74:2075] Create Queue# [2:84:2082] targetNodeId# 1 Marker# DSP01 2025-09-25T16:18:07.737892Z node 2 :BS_PROXY DEBUG: group_sessions.cpp:83: Group# 0 Actor# [2:74:2075] Create Queue# [2:85:2083] targetNodeId# 1 Marker# DSP01 2025-09-25T16:18:07.737920Z node 2 :BS_PROXY DEBUG: group_sessions.cpp:83: Group# 0 Actor# [2:74:2075] Create Queue# [2:86:2084] targetNodeId# 1 Marker# DSP01 2025-09-25T16:18:07.737947Z node 2 :BS_PROXY DEBUG: group_sessions.cpp:83: Group# 0 Actor# [2:74:2075] Create Queue# [2:87:2085] targetNodeId# 1 Marker# DSP01 2025-09-25T16:18:07.737976Z node 2 :BS_PROXY DEBUG: group_sessions.cpp:83: Group# 0 Actor# [2:74:2075] Create Queue# [2:88:2086] targetNodeId# 1 Marker# DSP01 2025-09-25T16:18:07.737982Z node 2 :BS_PROXY INFO: dsproxy_state.cpp:31: Group# 0 SetStateEstablishingSessions Marker# DSP03 2025-09-25T16:18:07.737995Z node 2 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:49: TClient[72057594037932033] ::Bootstrap [2:75:2076] 2025-09-25T16:18:07.738001Z node 2 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:542: TClient[72057594037932033] lookup [2:75:2076] 2025-09-25T16:18:07.738009Z node 2 :BS_PROXY NOTICE: dsproxy_state.cpp:259: Group# 4294967295 HasInvalidGroupId# 1 Bootstrap -> StateEjected Marker# DSP42 2025-09-25T16:18:07.738019Z node 2 :BS_NODE DEBUG: {NWDC00@distconf.cpp:28} Bootstrap 2025-09-25T16:18:07.738091Z node 2 :BS_NODE DEBUG: {NWDC40@distconf_persistent_storage.cpp:25} TReaderActor bootstrap Paths# [] 2025-09-25T16:18:07.738196Z node 1 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:148: TClient[72057594037932033] queue send [1:50:2076] 2025-09-25T16:18:07.738208Z node 1 :BS_NODE DEBUG: {NWDC53@distconf.cpp:332} StateWaitForInit event Type# 131082 StorageConfigLoaded# false NodeListObtained# false PendingEvents.size# 0 2025-09-25T16:18:07.738233Z node 1 :LOCAL DEBUG: local.cpp:1540: TLocal::Bootstrap 2025-09-25T16:18:07.738259Z node 2 :TABLET_RESOLVER DEBUG: tablet_resolver.cpp:882: Handle TEvForward tabletId: 72057594037932033 entry.State: StResolve leader: [0:0:0] followers: 0 ev: {EvForward TabletID: 72057594037932033 Ev: nullptr Flags: 1:2:0} 2025-09-25T16:18:07.738402Z node 2 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:148: TClient[72057594037932033] queue send [2:75:2076] 2025-09-25T16:18:07.738410Z node 2 :BS_NODE DEBUG: {NWDC53@distconf.cpp:332} StateWaitForInit event Type# 131082 StorageConfigLoaded# false NodeListObtained# false PendingEvents.size# 0 2025-09-25T16:18:07.738415Z node 2 :LOCAL DEBUG: local.cpp:1490: TDomainLocal(dc-1): Bootstrap 2025-09-25T16:18:07.738434Z node 1 :TABLET_RESOLVER DEBUG: tablet_resolver.cpp:882: Handle TEvForward tabletId: 72057594037932033 entry.State: StResolve leader: [0:0:0] followers: 0 ev: {EvForward TabletID: 72057594037932033 Ev: nullptr Flags: 1:2:0} 2025-09-25T16:18:07.747544Z node 1 :BS_NODE DEBUG: {NWDC53@distconf.cpp:332} StateWaitForInit event Type# 2146435074 StorageConfigLoaded# false NodeListObtained# false PendingEvents.size# 0 2025-09-25T16:18:07.747568Z node 1 :BS_NODE DEBUG: {NWDC32@distconf_persistent_storage.cpp:221} TEvStorageConfigLoaded Cookie# 0 NumItemsRead# 0 2025-09-25T16:18:07.748783Z node 1 :BS_NODE DEBUG: {NWDC35@distconf_persistent_storage.cpp:184} PersistConfig Record# {} Drives# [] 2025-09-25T16:18:07.748871Z node 1 :BS_NODE DEBUG: {NWDC18@distconf_binding.cpp:462} UpdateBound RefererNodeId# 0 NodeId# :0/0 Meta# {Fingerprint: "\206\nD\014\\\363\333K3\275\271\004\016{\341F\344\223\331\221" } 2025-09-25T16:18:07.749814Z node 1 :BS_NODE DEBUG: {NWDC51@distconf_persistent_storage.cpp:103} TWriterActor bootstrap Drives# [] Record# {} 2025-09-25T16:18:07.749844Z node 1 :LOCAL DEBUG: local.cpp:1490: TDomainLocal(dc-1): Bootstrap 2025-09-25T16:18:07.749902Z node 2 :BS_NODE DEBUG: {NWDC53@distconf.cpp:332} StateWaitForInit event Type# 2146435074 StorageConfigLoaded# false NodeListObtained# false PendingEvents.size# 0 2025-09-25T16:18:07.749912Z node 2 :BS_NODE DEBUG: {NWDC32@distconf_persistent_storage.cpp:221} TEvStorageConfigLoaded Cookie# 0 NumItemsRead# 0 2025-09-25T16:18:07.749930Z node 2 :BS_NODE DEBUG: {NWDC35@distconf_persistent_storage.cpp:184} PersistConfig Record# {} Drives# [] 2025-09-25T16:18:07.749961Z node 2 :BS_NODE DEBUG: {NWDC18@distconf_binding.cpp:462} UpdateBound RefererNodeId# 0 NodeId# :0/0 Meta# {Fingerprint: "\206\nD\014\\\363\333K3\275\271\004\016{\341F\344\223\331\221" } 2025-09-25T16:18:07.750052Z node 2 :LOCAL DEBUG: local.cpp:1198: TDomainLocal(dc-1): Binding to hive 72057594037927937 at domain dc-1 (allocated resources: ) 2025-09-25T16:18:07.750062Z node 2 :BS_NODE DEBUG: {NWDC51@distconf_persistent_storage.cpp:103} TWriterActor bootstrap Drives# [] Record# {} 2025-09-25T16:18:07.750072Z node 2 :LOCAL DEBUG: local.cpp:1005: TLocalNodeRegistrar::Bootstrap 2025-09-25T16:18:07.750077Z node 2 :LOCAL DEBUG: local.cpp:183: TLocalNodeRegistrar::TryToRegister 2025-09-25T16:18:07.750112Z node 2 :LOCAL DEBUG: local.cpp:216: TLocalNodeRegistrar::TryToRegister pipe to hive, pipe:[2:100:2090] 2025-09-25T16:18:07.750179Z node 2 :STATESTORAGE DEBUG: statestorage_proxy.cpp:287: ProxyRequest::HandleInit ringGroup:0 ev: {EvLookup ... 4 (1,3): 1 on 3 (1,2): 1 on 8 AddNode 5 (1,2): 1 on 7 (1,2): -1 on 8 RemoveNode 9 (1,2): -1 on 6 (1,2): 1 on 6 (1,3): 1 on 9 RemoveNode 6 (1,1): 1 on 2 (1,3): -1 on 2 (1,1): -1 on 4 RemoveNode 5 (1,2): 1 on 8 (1,2): 1 on 5 AddNode 2 (1,1): 1 on 4 (1,3): 1 on 3 RemoveNode 2 (1,3): 1 on 6 (1,2): 1 on 6 (1,3): 1 on 2 RemoveNode 7 (1,1): 1 on 3 AddNode 2 (1,2): 1 on 6 (1,2): 1 on 9 (1,3): 1 on 0 (1,2): 1 on 7 RemoveNode 4 (1,2): 1 on 5 (1,2): 1 on 9 AddNode 4 (1,3): 1 on 2 (1,3): 1 on 5 (1,3): 1 on 3 (1,2): 1 on 7 (1,1): 1 on 4 (1,3): 1 on 6 (1,3): 1 on 4 (1,1): 1 on 1 (1,3): 1 on 5 (1,1): -1 on 0 RemoveNode 3 (1,1): 1 on 3 (1,1): 1 on 3 (1,3): -1 on 1 RemoveNode 4 (1,3): 1 on 1 (1,1): 1 on 3 (1,1): 1 on 0 AddNode 0 (1,3): 1 on 2 (1,2): 1 on 5 AddNode 1 (1,2): 1 on 7 (1,2): 1 on 9 AddNode 4 (1,1): 1 on 2 (1,3): 1 on 0 AddNode 9 (1,3): -1 on 6 AddNode 7 (1,2): 1 on 8 (1,1): 1 on 2 RemoveNode 4 (1,3): 1 on 6 (1,1): 1 on 2 (1,1): 1 on 3 (1,2): 1 on 8 (1,2): 1 on 6 (1,2): 1 on 9 RemoveNode 7 (1,3): 1 on 9 (1,3): 1 on 3 (1,3): 1 on 5 (1,3): 1 on 5 (1,2): 1 on 6 (1,3): 1 on 7 (1,3): -1 on 2 (1,2): -1 on 9 (1,1): -1 on 4 (1,2): 1 on 7 RemoveNode 9 (1,3): 1 on 0 RemoveNode 1 (1,1): 1 on 0 AddNode 7 (1,3): 1 on 8 (1,2): 1 on 6 (1,1): 1 on 1 RemoveNode 2 (1,3): -1 on 0 (1,2): -1 on 6 (1,3): 1 on 5 AddNode 3 (1,1): -1 on 3 AddNode 4 (1,3): 1 on 1 (1,1): 1 on 2 (1,2): 1 on 5 AddNode 9 (1,1): 1 on 4 (1,2): 1 on 6 RemoveNode 7 (1,3): -1 on 5 (1,1): 1 on 1 (1,3): 1 on 6 RemoveNode 9 (1,3): 1 on 9 RemoveNode 8 (1,1): 1 on 2 AddNode 6 (1,1): 1 on 2 (1,3): 1 on 7 (1,2): 1 on 7 AddNode 8 (1,2): 1 on 5 AddNode 5 (1,2): 1 on 7 (1,2): 1 on 6 (1,2): 1 on 5 (1,3): 1 on 5 (1,1): 1 on 4 (1,2): -1 on 5 RemoveNode 4 (1,2): 1 on 5 (1,3): 1 on 2 (1,1): 1 on 1 (1,3): 1 on 3 (1,2): -1 on 9 (1,2): -1 on 6 AddNode 4 (1,3): 1 on 9 RemoveNode 4 (1,3): -1 on 1 RemoveNode 0 (1,3): 1 on 8 (1,2): 1 on 7 AddNode 2 (1,3): 1 on 1 (1,2): 1 on 6 AddNode 7 (1,2): 1 on 9 AddNode 1 (1,2): 1 on 9 (1,2): 1 on 8 (1,1): 1 on 0 (1,3): 1 on 9 RemoveNode 6 (1,2): 1 on 8 AddNode 6 (1,3): -1 on 7 (1,2): 1 on 8 (1,3): -1 on 5 (1,2): 1 on 8 AddNode 0 (1,1): 1 on 2 (1,1): 1 on 1 (1,2): 1 on 5 RemoveNode 0 (1,3): -1 on 9 (1,3): 1 on 0 AddNode 0 (1,3): 1 on 8 RemoveNode 7 (1,2): -1 on 5 (1,1): 1 on 1 (1,1): -1 on 3 RemoveNode 2 (1,1): 1 on 0 (1,2): -1 on 7 (1,3): 1 on 2 (1,1): 1 on 2 (1,3): 1 on 1 (1,1): -1 on 1 (1,2): 1 on 6 (1,3): 1 on 4 (1,2): 1 on 9 (1,3): -1 on 4 RemoveNode 3 (1,2): 1 on 6 (1,3): 1 on 4 RemoveNode 5 (1,1): 1 on 0 (1,3): 1 on 3 RemoveNode 1 (1,3): -1 on 0 (1,1): 1 on 2 (1,2): 1 on 6 (1,3): 1 on 0 (1,2): -1 on 5 AddNode 3 (1,2): 1 on 9 (1,1): 1 on 1 AddNode 2 (1,2): 1 on 8 RemoveNode 0 (1,3): 1 on 8 RemoveNode 2 (1,3): 1 on 0 RemoveNode 6 (1,2): 1 on 6 (1,3): 1 on 5 (1,1): 1 on 1 AddNode 7 (1,1): 1 on 0 (1,2): 1 on 6 (1,1): 1 on 3 (1,2): 1 on 5 RemoveNode 8 (1,2): 1 on 8 (1,1): 1 on 3 (1,1): 1 on 0 AddNode 4 (1,2): 1 on 8 AddNode 8 (1,2): 1 on 6 RemoveNode 8 (1,3): 1 on 9 AddNode 9 (1,2): 1 on 9 RemoveNode 9 (1,2): 1 on 9 AddNode 8 (1,2): 1 on 8 (1,3): -1 on 5 AddNode 6 (1,3): 1 on 3 RemoveNode 7 (1,3): 1 on 8 (1,2): 1 on 7 AddNode 0 (1,2): -1 on 9 (1,3): 1 on 4 (1,2): 1 on 7 (1,3): -1 on 5 (1,1): -1 on 1 (1,1): 1 on 2 (1,3): 1 on 6 AddNode 5 (1,3): 1 on 7 RemoveNode 3 (1,2): 1 on 8 (1,2): 1 on 5 (1,1): 1 on 3 (1,3): 1 on 5 (1,1): 1 on 4 (1,3): 1 on 8 (1,3): 1 on 7 (1,2): -1 on 8 AddNode 3 (1,1): 1 on 0 RemoveNode 0 (1,2): 1 on 6 (1,1): 1 on 2 (1,3): 1 on 8 RemoveNode 4 (1,3): 1 on 2 (1,2): -1 on 6 (1,3): 1 on 3 AddNode 2 (1,3): 1 on 5 (1,1): 1 on 2 (1,3): 1 on 2 RemoveNode 3 (1,3): 1 on 3 (1,2): 1 on 6 RemoveNode 5 (1,2): 1 on 9 (1,3): -1 on 9 (1,2): 1 on 6 (1,2): -1 on 6 AddNode 0 (1,2): 1 on 5 AddNode 3 (1,3): -1 on 4 (1,3): 1 on 7 RemoveNode 2 (1,1): 1 on 1 (1,3): 1 on 5 RemoveNode 8 (1,1): 1 on 2 (1,2): 1 on 7 (1,2): 1 on 9 RemoveNode 0 (1,1): -1 on 2 RemoveNode 6 (1,2): 1 on 6 AddNode 0 (1,1): 1 on 4 (1,1): 1 on 2 (1,1): 1 on 2 (1,2): 1 on 5 (1,1): 1 on 0 (1,2): 1 on 6 (1,3): -1 on 8 (1,3): 1 on 5 (1,3): 1 on 1 (1,1): 1 on 3 AddNode 4 (1,1): -1 on 3 (1,1): 1 on 2 (1,3): -1 on 5 RemoveNode 4 (1,3): 1 on 2 (1,1): 1 on 0 (1,3): -1 on 6 (1,1): 1 on 1 (1,2): 1 on 6 (1,2): -1 on 6 (1,2): 1 on 7 (1,3): -1 on 3 AddNode 7 (1,2): 1 on 7 RemoveNode 0 (1,3): 1 on 0 (1,1): 1 on 2 (1,2): 1 on 6 (1,3): 1 on 7 (1,1): 1 on 2 (1,2): 1 on 9 RemoveNode 7 (1,2): -1 on 6 AddNode 7 (1,2): 1 on 7 (1,2): 1 on 7 (1,2): -1 on 6 RemoveNode 3 (1,3): 1 on 9 (1,2): 1 on 5 (1,1): 1 on 3 (1,2): 1 on 5 AddNode 0 (1,1): 1 on 3 (1,1): 1 on 1 (1,2): 1 on 5 RemoveNode 7 (1,3): -1 on 7 AddNode 7 (1,1): 1 on 1 (1,2): 1 on 9 (1,2): -1 on 6 AddNode 5 (1,1): 1 on 1 (1,3): 1 on 2 (1,3): 1 on 2 (1,2): 1 on 9 AddNode 4 (1,3): 1 on 9 (1,3): -1 on 2 RemoveNode 4 (1,3): -1 on 4 RemoveNode 7 (1,1): -1 on 4 (1,3): 1 on 2 (1,3): -1 on 2 (1,1): 1 on 4 (1,1): 1 on 0 (1,2): 1 on 9 (1,3): 1 on 0 (1,2): -1 on 7 AddNode 9 (1,1): 1 on 4 (1,3): 1 on 7 (1,3): 1 on 8 (1,1): 1 on 3 AddNode 2 (1,3): 1 on 2 RemoveNode 0 (1,2): -1 on 7 (1,1): 1 on 2 (1,3): 1 on 7 (1,2): 1 on 6 (1,2): -1 on 6 AddNode 6 (1,1): 1 on 3 (1,1): 1 on 4 (1,2): -1 on 6 (1,3): 1 on 0 AddNode 7 (1,1): 1 on 0 (1,3): -1 on 8 RemoveNode 9 (1,2): 1 on 7 (1,2): 1 on 5 (1,1): 1 on 2 (1,1): 1 on 0 (1,3): 1 on 4 (1,1): 1 on 0 AddNode 0 (1,1): 1 on 3 RemoveNode 7 (1,3): 1 on 0 (1,2): 1 on 7 (1,2): 1 on 9 (1,2): 1 on 5 AddNode 7 (1,3): 1 on 8 (1,1): 1 on 1 RemoveNode 0 (1,2): 1 on 9 (1,2): -1 on 5 AddNode 1 (1,2): 1 on 5 (1,2): 1 on 6 (1,1): 1 on 4 (1,1): 1 on 3 (1,3): 1 on 0 (1,2): 1 on 9 (1,1): 1 on 1 (1,1): 1 on 1 AddNode 9 (1,3): 1 on 2 RemoveNode 7 (1,2): 1 on 5 RemoveNode 1 (1,1): -1 on 1 (1,1): -1 on 3 (1,3): 1 on 2 AddNode 0 (1,2): 1 on 7 (1,3): -1 on 0 (1,1): 1 on 3 AddNode 8 (1,2): 1 on 7 (1,3): 1 on 5 (1,2): 1 on 6 (1,3): 1 on 2 (1,3): 1 on 2 RemoveNode 0 (1,2): 1 on 5 AddNode 0 (1,1): -1 on 1 RemoveNode 2 (1,1): 1 on 2 (1,1): -1 on 2 (1,3): 1 on 8 (1,2): 1 on 9 (1,3): -1 on 6 (1,3): -1 on 8 (1,1): 1 on 2 RemoveNode 0 (1,1): -1 on 3 (1,2): 1 on 9 (1,1): 1 on 4 (1,1): 1 on 0 (1,1): 1 on 0 (1,2): -1 on 8 (1,2): 1 on 5 (1,1): 1 on 3 (1,2): 1 on 7 (1,2): 1 on 6 (1,2): 1 on 9 (1,1): 1 on 2 (1,2): 1 on 8 (1,3): 1 on 9 RemoveNode 5 (1,2): 1 on 7 (1,2): 1 on 5 (1,2): -1 on 9 (1,3): 1 on 5 (1,2): 1 on 8 (1,3): 1 on 8 RemoveNode 6 (1,2): -1 on 6 (1,3): 1 on 6 (1,3): 1 on 3 (1,2): 1 on 8 (1,1): 1 on 1 (1,3): 1 on 1 (1,1): 1 on 1 AddNode 6 (1,1): 1 on 4 AddNode 3 (1,2): 1 on 8 (1,1): 1 on 2 RemoveNode 9 (1,3): 1 on 1 AddNode 2 (1,1): 1 on 0 (1,3): 1 on 7 AddNode 9 (1,1): -1 on 2 AddNode 1 (1,1): -1 on 1 (1,2): 1 on 8 RemoveNode 2 (1,1): 1 on 3 (1,2): 1 on 7 (1,2): 1 on 7 (1,2): 1 on 9 AddNode 2 (1,2): 1 on 8 (1,2): 1 on 9 (1,3): 1 on 3 RemoveNode 2 (1,1): 1 on 4 AddNode 7 (1,1): 1 on 1 RemoveNode 9 (1,2): 1 on 9 (1,3): 1 on 7 AddNode 4 (1,2): 1 on 6 (1,3): -1 on 7 (1,2): -1 on 6 (1,3): 1 on 5 (1,2): -1 on 8 (1,1): 1 on 3 AddNode 2 (1,1): 1 on 1 (1,2): 1 on 8 (1,3): 1 on 2 (1,1): 1 on 4 (1,3): -1 on 8 (1,1): 1 on 3 (1,1): 1 on 4 RemoveNode 8 (1,1): 1 on 3 RemoveNode 4 (1,2): 1 on 8 (1,2): 1 on 9 (1,3): -1 on 2 (1,1): -1 on 0 (1,2): 1 on 5 AddNode 0 (1,1): 1 on 3 (1,1): 1 on 3 (1,1): 1 on 4 (1,1): -1 on 1 (1,1): 1 on 0 (1,1): -1 on 4 (1,2): 1 on 9 (1,3): 1 on 7 (1,3): 1 on 8 (1,1): 1 on 1 (1,3): -1 on 4 (1,1): 1 on 0 (1,1): 1 on 3 (1,1): 1 on 3 RemoveNode 1 (1,3): 1 on 3 (1,3): 1 on 0 (1,1): 1 on 3 RemoveNode 2 (1,3): 1 on 5 (1,1): -1 on 2 (1,2): 1 on 8 (1,1): 1 on 1 RemoveNode 7 (1,3): -1 on 2 (1,1): 1 on 2 (1,1): 1 on 0 (1,1): 1 on 3 (1,1): 1 on 3 (1,1): 1 on 0 AddNode 9 (1,3): -1 on 7 (1,1): 1 on 1 RemoveNode 0 (1,3): -1 on 9 AddNode 2 (1,1): 1 on 3 (1,1): -1 on 0 (1,1): 1 on 0 (1,3): -1 on 1 (1,2): 1 on 8 (1,2): -1 on 8 (1,2): 1 on 9 (1,1): -1 on 4 RemoveNode 2 (1,3): 1 on 2 (1,3): 1 on 3 (1,2): 1 on 8 (1,3): 1 on 5 (1,2): 1 on 9 AddNode 2 (1,2): -1 on 8 RemoveNode 9 (1,3): -1 on 3 (1,3): -1 on 1 RemoveNode 3 (1,1): 1 on 0 AddNode 5 (1,3): 1 on 4 RemoveNode 6 (1,2): 1 on 7 (1,1): 1 on 2 AddNode 1 (1,1): 1 on 1 RemoveNode 2 (1,3): -1 on 1 AddNode 2 (1,3): -1 on 7 (1,2): -1 on 5 (1,1): -1 on 1 (1,1): -1 on 1 (1,1): 1 on 1 AddNode 8 (1,1): 1 on 3 AddNode 3 (1,2): 1 on 9 (1,3): 1 on 5 (1,1): 1 on 2 (1,2): 1 on 6 (1,2): -1 on 7 AddNode 6 (1,3): 1 on 9 (1,1): 1 on 0 AddNode 0 (1,2): 1 on 9 AddNode 7 (1,2): 1 on 7 (1,3): 1 on 1 (1,3): 1 on 1 (1,2): -1 on 9 (1,1): -1 on 3 RemoveNode 7 (1,2): 1 on 8 (1,3): 1 on 0 RemoveNode 0 (1,3): 1 on 1 (1,1): -1 on 3 RemoveNode 1 (1,1): 1 on 4 (1,3): 1 on 0 (1,3): 1 on 5 (1,1): 1 on 0 (1,3): 1 on 8 (1,1): 1 on 2 AddNode 9 (1,3): 1 on 6 (1,3): 1 on 6 (1,1): -1 on 1 (1,1): 1 on 4 AddNode 7 (1,2): 1 on 9 (1,2): -1 on 8 (1,2): 1 on 8 (1,2): -1 on 5 AddNode 4 (1,1): 1 on 0 (1,3): 1 on 5 (1,3): 1 on 2 RemoveNode 4 (1,3): -1 on 9 (1,1): 1 on 4 (1,2): 1 on 9 RemoveNode 9 (1,2): 1 on 7 (1,1): 1 on 2 (1,1): 1 on 3 (1,2): 1 on 7 RemoveNode 6 (1,3): 1 on 2 (1,1): 1 on 2 (1,1): 1 on 0 (1,1): 1 on 0 (1,2): -1 on 5 (1,1): 1 on 2 (1,2): 1 on 9 (1,1): -1 on 3 (1,1): 1 on 1 RemoveNode 5 (1,1): 1 on 4 (1,2): 1 on 7 (1,2): 1 on 7 (1,3): 1 on 1 (1,2): 1 on 9 (1,1): 1 on 1 (1,3): 1 on 4 (1,3): 1 on 4 RemoveNode 2 (1,1): 1 on 0 (1,3): 1 on 0 AddNode 4 (1,1): 1 on 0 (1,1): 1 on 3 RemoveNode 4 (1,2): 1 on 8 (1,2): -1 on 9 (1,2): -1 on 7 AddNode 5 (1,1): 1 on 0 AddNode 0 (1,2): 1 on 7 (1,2): -1 on 5 (1,1): 1 on 2 (1,3): 1 on 8 (1,1): -1 on 2 RemoveNode 0 (1,2): 1 on 5 (1,2): -1 on 7 RemoveNode 5 (1,3): 1 on 5 AddNode 2 (1,2): 1 on 6 AddNode 0 (1,1): 1 on 3 (1,1): 1 on 4 (1,2): 1 on 5 (1,3): 1 on 3 AddNode 9 (1,1): 1 on 0 (1,2): 1 on 6 RemoveNode 0 (1,1): 1 on 1 (1,3): 1 on 2 (1,1): -1 on 4 (1,3): 1 on 3 AddNode 5 (1,2): 1 on 9 (1,3): 1 on 2 (1,2): -1 on 5 (1,2): 1 on 6 AddNode 1 (1,1): -1 on 0 RemoveNode 1 (1,2): -1 on 7 AddNode 4 (1,1): 1 on 0 (1,2): 1 on 9 (1,1): 1 on 2 RemoveNode 3 (1,3): -1 on 6 RemoveNode 8 (1,2): 1 on 8 (1,3): 1 on 7 (1,3): 1 on 0 RemoveNode 5 (1,2): -1 on 7 (1,3): 1 on 3 AddNode 6 (1,1): 1 on 2 AddNode 5 (1,2): 1 on 6 AddNode 3 (1,3): 1 on 2 RemoveNode 4 (1,3): 1 on 3 (1,2): 1 on 9 (1,3): 1 on 4 AddNode 0 (1,1): -1 on 4 RemoveNode 0 (1,2): 1 on 6 RemoveNode 5 (1,1): 1 on 0 (1,1): -1 on 4 (1,3): 1 on 1 (1,1): 1 on 0 AddNode 8 (1,1): -1 on 2 (1,3): -1 on 0 (1,3): 1 on 6 (1,1): 1 on 2 (1,2): 1 on 7 AddNode 5 (1,2): 1 on 9 (1,1): 1 on 2 (1,3): 1 on 5 (1,3): 1 on 1 RemoveNode 9 (1,2): 1 on 6 AddNode 0 (1,1): 1 on 1 (1,3): 1 on 3 (1,2): 1 on 6 (1,1): -1 on 0 (1,2): 1 on 9 (1,3): 1 on 1 (1,2): 1 on 8 (1,1): 1 on 3 (1,1): 1 on 3 (1,3): 1 on 4 RemoveNode 5 (1,2): -1 on 6 (1,3): 1 on 4 (1,1): -1 on 1 (1,1): 1 on 3 AddNode 5 (1,1): 1 on 4 (1,3): 1 on 3 (1,1): -1 on 2 (1,3): -1 on 1 (1,1): 1 on 1 (1,2): 1 on 9 (1,2): 1 on 7 (1,1): 1 on 0 (1,3): 1 on 1 RemoveNode 0 (1,2): 1 on 5 (1,3): -1 on 8 (1,2): 1 on 6 (1,1): 1 on 4 (1,1): -1 on 3 RemoveNode 6 (1,3): 1 on 9 AddNode 9 (1,1): 1 on 0 RemoveNode 5 (1,3): 1 on 0 RemoveNode 3 (1,3): -1 on 4 (1,2): 1 on 8 (1,2): 1 on 7 (1,2): -1 on 7 (1,3): -1 on 6 Final state: 403 387 397 417 400 0 0 0 0 0 0 0 0 0 0 359 427 442 433 410 192 199 174 233 198 205 200 154 185 175 - - + - - - - + + + Took 3.811852 seconds avg = 4800 min = 4800 max = 4800 std-dev = 0 ch.0 avg = 1600 ch.0 min = 1520 ch.0 max = 1667 ch.0 std-dev = 29.52659818 ch.1 avg = 1600 ch.1 min = 1529 ch.1 max = 1691 ch.1 std-dev = 31.3926743 ch.2 avg = 1600 ch.2 min = 1531 ch.2 max = 1662 ch.2 std-dev = 27.66694779 avg = 1250 std-dev = 0 avg = 4800 min = 4800 max = 4800 std-dev = 0 ch.0 avg = 1600 ch.0 min = 1600 ch.0 max = 1600 ch.0 std-dev = 0 ch.1 avg = 1600 ch.1 min = 1600 ch.1 max = 1600 ch.1 std-dev = 0 ch.2 avg = 1600 ch.2 min = 1600 ch.2 max = 1600 ch.2 std-dev = 0 avg = 1250 std-dev = 0 avg = 4800 min = 4799 max = 4801 std-dev = 0.2449489743 ch.0 avg = 1600 ch.0 min = 1519 ch.0 max = 1668 ch.0 std-dev = 29.39795911 ch.1 avg = 1600 ch.1 min = 1526 ch.1 max = 1666 ch.1 std-dev = 29.67052409 ch.2 avg = 1600 ch.2 min = 1522 ch.2 max = 1700 ch.2 std-dev = 33.00181813 avg = 1250 std-dev = 0 |82.9%| [TA] $(B)/ydb/core/tx/schemeshard/ut_sysview/test-results/unittest/{meta.json ... results_accumulator.log} >> TColumnShardTestReadWrite::CompactionInGranule_PKUInt32_Reboot [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/columnshard/ut_rw/unittest >> TColumnShardTestReadWrite::CompactionInGranule_PKUInt32_Reboot [GOOD] Test command err: 2025-09-25T16:20:59.948314Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:129:2160];fline=columnshard.cpp:105;event=initialize_shard;step=OnActivateExecutor; 2025-09-25T16:20:59.954401Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:129:2160];fline=columnshard.cpp:124;event=initialize_shard;step=initialize_tiring_finished; 2025-09-25T16:20:59.954461Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxInitSchema.Execute at tablet 9437184 2025-09-25T16:20:59.955367Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:129:2160];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-09-25T16:20:59.955427Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:129:2160];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-09-25T16:20:59.955470Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:129:2160];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-09-25T16:20:59.955496Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:129:2160];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-09-25T16:20:59.955518Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:129:2160];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-09-25T16:20:59.955545Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:129:2160];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-09-25T16:20:59.955567Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:129:2160];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-09-25T16:20:59.955590Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:129:2160];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-09-25T16:20:59.955613Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:129:2160];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-09-25T16:20:59.955635Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:129:2160];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanDeprecatedSnapshot; 2025-09-25T16:20:59.955659Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:129:2160];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV0ChunksMeta; 2025-09-25T16:20:59.955681Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:129:2160];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CopyBlobIdsToV2; 2025-09-25T16:20:59.955749Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:129:2160];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreAppearanceSnapshot; 2025-09-25T16:20:59.963170Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxInitSchema.Complete at tablet 9437184 2025-09-25T16:20:59.963246Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:131;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=12;current_normalizer=CLASS_NAME=Granules; 2025-09-25T16:20:59.963258Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=1;type=Granules; 2025-09-25T16:20:59.963306Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-09-25T16:20:59.963352Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-09-25T16:20:59.963367Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-09-25T16:20:59.963375Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2025-09-25T16:20:59.963388Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:138;normalizer=TChunksNormalizer;message=0 chunks found; 2025-09-25T16:20:59.963399Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-09-25T16:20:59.963408Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-09-25T16:20:59.963414Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2025-09-25T16:20:59.963439Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-09-25T16:20:59.963450Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-09-25T16:20:59.963459Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-09-25T16:20:59.963465Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2025-09-25T16:20:59.963478Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2025-09-25T16:20:59.963487Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-09-25T16:20:59.963497Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-09-25T16:20:59.963502Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2025-09-25T16:20:59.963514Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-09-25T16:20:59.963523Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-09-25T16:20:59.963529Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2025-09-25T16:20:59.963540Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-09-25T16:20:59.963550Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-09-25T16:20:59.963556Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=15;type=RestoreV1Chunks_V2; 2025-09-25T16:20:59.963587Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-09-25T16:20:59.963596Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-09-25T16:20:59.963602Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=16;type=RestoreV2Chunks; 2025-09-25T16:20:59.963621Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2025-09-25T16:20:59.963631Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanDeprecatedSnapshot;id=CleanDeprecatedSnapshot; 2025-09-25T16:20:59.963637Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=17;type=CleanDeprecatedSnapshot; 2025-09-25T16:20:59.963646Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanDeprecatedSnapshot;id=17; 2025-09-25T16:20:59.963655Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV0ChunksMeta;id=RestoreV0ChunksMeta; 2025-09-25T16:20:59.963661Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=18;type=RestoreV0ChunksMeta; 2025-09-25T16:20:59.963670Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV0ChunksMeta;id=18; 2025-09-25T16:20:59.963680Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CopyBlobIdsToV2;id=CopyBlobIdsToV2; 2025-09-25T16:20:59.963686Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=19;type=CopyBlobIdsToV2; 2025-09-25T16:20:59.963704Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CopyBlobIdsToV2;id=19; 2025-09-25T16:20:59.963713Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLAS ... ;load_stage_name=EXECUTE:granule/portions;fline=constructor_portion.cpp:40;memory_size=278;data_size=244;sum=1957904;count=7068;size_of_portion=184; 2025-09-25T16:21:26.650331Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;load_stage_name=EXECUTE:column_engines/granules;load_stage_name=EXECUTE:granules/granule;fline=common_data.cpp:29;EXECUTE:portionsLoadingTime=10196; 2025-09-25T16:21:26.650346Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;load_stage_name=EXECUTE:column_engines/granules;load_stage_name=EXECUTE:granules/granule;fline=common_data.cpp:29;PRECHARGE:granule_finished_commonLoadingTime=2; 2025-09-25T16:21:26.650551Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;load_stage_name=EXECUTE:column_engines/granules;load_stage_name=EXECUTE:granules/granule;fline=common_data.cpp:29;EXECUTE:granule_finished_commonLoadingTime=197; 2025-09-25T16:21:26.650560Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;load_stage_name=EXECUTE:column_engines/granules;fline=common_data.cpp:29;EXECUTE:granuleLoadingTime=10449; 2025-09-25T16:21:26.650567Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;fline=common_data.cpp:29;EXECUTE:granulesLoadingTime=10464; 2025-09-25T16:21:26.650577Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;fline=common_data.cpp:29;PRECHARGE:finishLoadingTime=2; 2025-09-25T16:21:26.650638Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;fline=common_data.cpp:29;EXECUTE:finishLoadingTime=52; 2025-09-25T16:21:26.650647Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:column_enginesLoadingTime=10613; 2025-09-25T16:21:26.650691Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:tx_controllerLoadingTime=35; 2025-09-25T16:21:26.650713Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:tx_controllerLoadingTime=15; 2025-09-25T16:21:26.650781Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:operations_managerLoadingTime=61; 2025-09-25T16:21:26.650827Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:operations_managerLoadingTime=38; 2025-09-25T16:21:26.655089Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:storages_managerLoadingTime=4237; 2025-09-25T16:21:26.661014Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:storages_managerLoadingTime=5879; 2025-09-25T16:21:26.661060Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:db_locksLoadingTime=4; 2025-09-25T16:21:26.661071Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:db_locksLoadingTime=2; 2025-09-25T16:21:26.661080Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:bg_sessionsLoadingTime=1; 2025-09-25T16:21:26.661101Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:bg_sessionsLoadingTime=15; 2025-09-25T16:21:26.661111Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:sharing_sessionsLoadingTime=2; 2025-09-25T16:21:26.661130Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:sharing_sessionsLoadingTime=12; 2025-09-25T16:21:26.661139Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:in_flight_readsLoadingTime=1; 2025-09-25T16:21:26.661154Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:in_flight_readsLoadingTime=8; 2025-09-25T16:21:26.661177Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:tiers_managerLoadingTime=16; 2025-09-25T16:21:26.661196Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:tiers_managerLoadingTime=12; 2025-09-25T16:21:26.661204Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;fline=common_data.cpp:29;EXECUTE:composite_initLoadingTime=22670; 2025-09-25T16:21:26.661252Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: Index: tables 1 inserted {blob_bytes=0;raw_bytes=0;count=0;records=0} compacted {blob_bytes=0;raw_bytes=0;count=0;records=0} s-compacted {blob_bytes=12006072;raw_bytes=14157900;count=2;records=150200} inactive {blob_bytes=90894184;raw_bytes=95257550;count=215;records=1125200} evicted {blob_bytes=0;raw_bytes=0;count=0;records=0} at tablet 9437184 2025-09-25T16:21:26.661336Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:7168:8797];process=SwitchToWork;fline=columnshard.cpp:77;event=initialize_shard;step=SwitchToWork; 2025-09-25T16:21:26.661351Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:7168:8797];process=SwitchToWork;fline=columnshard.cpp:80;event=initialize_shard;step=SignalTabletActive; 2025-09-25T16:21:26.661372Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:7168:8797];process=SwitchToWork;fline=columnshard_impl.cpp:1528;event=OnTieringModified;path_id=NO_VALUE_OPTIONAL; 2025-09-25T16:21:26.661381Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:7168:8797];process=SwitchToWork;fline=column_engine_logs.cpp:516;event=OnTieringModified;new_count_tierings=0; 2025-09-25T16:21:26.661425Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;fline=columnshard_impl.cpp:449;event=EnqueueBackgroundActivities;periodic=0; 2025-09-25T16:21:26.661453Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;fline=columnshard_impl.cpp:943;background=cleanup_schemas;skip_reason=no_changes; 2025-09-25T16:21:26.661461Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;fline=column_engine_logs.cpp:258;event=StartCleanup;portions_count=5; 2025-09-25T16:21:26.661478Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;fline=column_engine_logs.cpp:271;event=StartCleanupStop;snapshot=plan_step=1758815464473;tx_id=18446744073709551615;;current_snapshot_ts=1758817261769; 2025-09-25T16:21:26.661489Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;fline=column_engine_logs.cpp:334;event=StartCleanup;portions_count=5;portions_prepared=0;drop=0;skip=0;portions_counter=0;chunks=0;limit=0;max_portions=1000;max_chunks=500000; 2025-09-25T16:21:26.661501Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;fline=columnshard_impl.cpp:800;background=cleanup;skip_reason=no_changes; 2025-09-25T16:21:26.661507Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;fline=columnshard_impl.cpp:832;background=cleanup;skip_reason=no_changes; 2025-09-25T16:21:26.661535Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;fline=columnshard_impl.cpp:755;background=ttl;skip_reason=no_changes; 2025-09-25T16:21:26.662944Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:7168:8797];ev=NActors::TEvents::TEvWakeup;fline=columnshard.cpp:260;event=TEvPrivate::TEvPeriodicWakeup::MANUAL;tablet_id=9437184; 2025-09-25T16:21:26.663058Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:7168:8797];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;fline=columnshard.cpp:249;event=TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184; 2025-09-25T16:21:26.663066Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: Send periodic stats. 2025-09-25T16:21:26.663071Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: Disabled periodic stats at tablet 9437184 2025-09-25T16:21:26.663078Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:7168:8797];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:449;event=EnqueueBackgroundActivities;periodic=0; 2025-09-25T16:21:26.663100Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:7168:8797];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:943;background=cleanup_schemas;skip_reason=no_changes; 2025-09-25T16:21:26.663107Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:7168:8797];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:258;event=StartCleanup;portions_count=5; 2025-09-25T16:21:26.663123Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:7168:8797];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:271;event=StartCleanupStop;snapshot=plan_step=1758815464473;tx_id=18446744073709551615;;current_snapshot_ts=1758817261769; 2025-09-25T16:21:26.663132Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:7168:8797];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:334;event=StartCleanup;portions_count=5;portions_prepared=0;drop=0;skip=0;portions_counter=0;chunks=0;limit=0;max_portions=1000;max_chunks=500000; 2025-09-25T16:21:26.663141Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:7168:8797];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:800;background=cleanup;skip_reason=no_changes; 2025-09-25T16:21:26.663147Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:7168:8797];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:832;background=cleanup;skip_reason=no_changes; 2025-09-25T16:21:26.663166Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:7168:8797];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;queue=ttl;external_count=0;fline=granule.cpp:168;event=skip_actualization;waiting=1.000000s; 2025-09-25T16:21:26.663175Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:7168:8797];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:755;background=ttl;skip_reason=no_changes; >> PersQueueSdkReadSessionTest::SettingsValidation [GOOD] >> PersQueueSdkReadSessionTest::SpecifyClustersExplicitly >> TAsyncIndexTests::MergeMainWithReboots[TabletReboots] [GOOD] >> Compression::WriteGZIP [GOOD] >> Compression::WriteZSTD >> BasicUsage::WriteAndReadSomeMessagesWithSyncCompression [GOOD] >> BasicUsage::WriteAndReadSomeMessagesWithNoCompression >> TYdbControlPlaneStorageDescribeQuery::ShouldSuccess >> TYdbControlPlaneStorageDeleteBindingPermissions::ShouldApplyPermissionViewPublic >> TYdbControlPlaneStorageDescribeBindingPermissions::ShouldApplyPermissionViewPublic >> InMemoryControlPlaneStorage::ExecuteSimpleStreamQuery ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_index/unittest >> TAsyncIndexTests::MergeMainWithReboots[TabletReboots] [GOOD] Test command err: =========== RUN: Trace =========== Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2144] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2144] Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:120:2058] recipient: [1:114:2145] IGNORE Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:120:2058] recipient: [1:114:2145] Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:121:2058] recipient: [1:116:2146] IGNORE Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:121:2058] recipient: [1:116:2146] Leader for TabletID 72057594046678944 is [1:128:2153] sender: [1:131:2058] recipient: [1:113:2144] Leader for TabletID 72057594046447617 is [1:134:2158] sender: [1:136:2058] recipient: [1:114:2145] Leader for TabletID 72057594046316545 is [1:139:2161] sender: [1:141:2058] recipient: [1:116:2146] 2025-09-25T16:19:41.793061Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7911: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-09-25T16:19:41.793086Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7939: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-09-25T16:19:41.793092Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7825: StatsBatching config: StatsBatchTimeout# 0.000000s, StatsMaxBatchSize# 0, StatsMaxExecuteTime# 0.010000s 2025-09-25T16:19:41.793098Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7841: OperationsProcessing config: using default configuration 2025-09-25T16:19:41.793105Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7847: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-09-25T16:19:41.793110Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7847: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-09-25T16:19:41.793120Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7971: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-09-25T16:19:41.793136Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-09-25T16:19:41.793256Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8042: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-09-25T16:19:41.793323Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2025-09-25T16:19:41.818047Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:8074: Got new config: QueryServiceConfig { AllExternalDataSourcesAreAvailable: true } 2025-09-25T16:19:41.818078Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded 2025-09-25T16:19:41.818190Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8042: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# Leader for TabletID 72057594046447617 is [1:134:2158] sender: [1:179:2058] recipient: [1:15:2062] 2025-09-25T16:19:41.822904Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2025-09-25T16:19:41.823005Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2025-09-25T16:19:41.823047Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-09-25T16:19:41.824580Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2025-09-25T16:19:41.824660Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2025-09-25T16:19:41.824765Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1382: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-09-25T16:19:41.825000Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-09-25T16:19:41.826136Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:158: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-09-25T16:19:41.826190Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2025-09-25T16:19:41.826459Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-09-25T16:19:41.826472Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-09-25T16:19:41.826493Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-09-25T16:19:41.826502Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-09-25T16:19:41.826510Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:205: TTxServerlessStorageBilling.Complete 2025-09-25T16:19:41.826552Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7086: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:221:2058] recipient: [1:219:2219] IGNORE Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:221:2058] recipient: [1:219:2219] Leader for TabletID 72057594037968897 is [1:225:2223] sender: [1:226:2058] recipient: [1:219:2219] 2025-09-25T16:19:41.827919Z node 1 :HIVE INFO: tablet_helpers.cpp:1126: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:128:2153] sender: [1:246:2058] recipient: [1:15:2062] 2025-09-25T16:19:41.852965Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-09-25T16:19:41.853034Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-09-25T16:19:41.853086Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:599: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-09-25T16:19:41.853094Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5528: CreateTx for txid 1:0 type: TxAlterSubDomain target path: [OwnerId: 72057594046678944, LocalPathId: 1] source path: 2025-09-25T16:19:41.853156Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-09-25T16:19:41.853171Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2025-09-25T16:19:41.853848Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-09-25T16:19:41.853895Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-09-25T16:19:41.853931Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-09-25T16:19:41.853940Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:315: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-09-25T16:19:41.853948Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:369: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-09-25T16:19:41.853954Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 1:0 2 -> 3 2025-09-25T16:19:41.854402Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-09-25T16:19:41.854411Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-09-25T16:19:41.854417Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 1:0 3 -> 128 2025-09-25T16:19:41.854801Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-09-25T16:19:41.854812Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-09-25T16:19:41.854819Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-09-25T16:19:41.854826Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1723: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-09-25T16:19:41.855596Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1792: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-09-25T16:19:41.856061Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:663: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-09-25T16:19:41.856102Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1824: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 Leader for TabletID 72057594046316545 is [1:139:2161] sender: [1:261:2058] recipient: [1:15:2062] FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-09-25T16:19:41.856296Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:685: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-09-25T16:19:41.856322Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:689: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 139 RawX2: 4294969457 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, ... icy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot: 300 InMemForceStepsToSnapshot: 500 InMemForceSizeToSnapshot: 16777216 InMemCompactionBrokerQueue: 0 ReadAheadHiThreshold: 67108864 ReadAheadLoThreshold: 16777216 MinDataPageSize: 7168 SnapBrokerQueue: 0 Generation { GenerationId: 0 SizeToCompact: 0 CountToCompact: 8 ForceCountToCompact: 8 ForceSizeToCompact: 134217728 CompactionBrokerQueue: 4294967295 KeepInCache: true BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen1" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 0 UpliftPartSize: 0 } Generation { GenerationId: 1 SizeToCompact: 41943040 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 536870912 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen2" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 8388608 UpliftPartSize: 8388608 } Generation { GenerationId: 2 SizeToCompact: 419430400 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 17179869184 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen3" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } PartitioningPolicy { SizeToSplit: 2147483648 MinPartitionsCount: 1 } } } } TableSchemaVersion: 1 IsBackup: false IsRestore: false } TablePartitions { EndOfRangeKeyPrefix: "" IsPoint: false IsInclusive: false DatashardId: 72075186233409549 } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 4 PathsLimit: 10000 ShardsInside: 3 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 3 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-09-25T16:21:28.907592Z node 106 :CHANGE_EXCHANGE DEBUG: change_sender_table_base.cpp:78: [TableChangeSenderShard][72075186233409549:2][72075186233409546][106:839:2681] Handshake NKikimrChangeExchange.TEvStatus Status: STATUS_OK LastRecordOrder: 0 2025-09-25T16:21:28.907621Z node 106 :CHANGE_EXCHANGE DEBUG: change_sender_async_index.cpp:239: [AsyncIndexChangeSenderMain][72075186233409549:2][106:791:2681] Handle NKikimr::NChangeExchange::TEvChangeExchangePrivate::TEvReady { PartitionId: 72075186233409546 } 2025-09-25T16:21:28.907654Z node 106 :CHANGE_EXCHANGE DEBUG: change_sender_table_base.cpp:123: [TableChangeSenderShard][72075186233409549:2][72075186233409546][106:839:2681] Handle NKikimr::NChangeExchange::TEvChangeExchange::TEvRecords { Records [{ Order: 1 Group: 1758817288890613 Step: 5000003 TxId: 18446744073709551615 PathId: [OwnerId: 72057594046678944, LocalPathId: 4] Kind: AsyncIndex Source: Unspecified Body: 28b TableId: [OwnerId: 72057594046678944, LocalPathId: 3] SchemaVersion: 1 LockId: 0 LockOffset: 0 },{ Order: 2 Group: 1758817288890613 Step: 5000003 TxId: 18446744073709551615 PathId: [OwnerId: 72057594046678944, LocalPathId: 4] Kind: AsyncIndex Source: Unspecified Body: 28b TableId: [OwnerId: 72057594046678944, LocalPathId: 3] SchemaVersion: 1 LockId: 0 LockOffset: 0 },{ Order: 3 Group: 1758817288890613 Step: 5000003 TxId: 18446744073709551615 PathId: [OwnerId: 72057594046678944, LocalPathId: 4] Kind: AsyncIndex Source: Unspecified Body: 28b TableId: [OwnerId: 72057594046678944, LocalPathId: 3] SchemaVersion: 1 LockId: 0 LockOffset: 0 }] } 2025-09-25T16:21:28.908319Z node 106 :CHANGE_EXCHANGE DEBUG: change_sender_table_base.cpp:200: [TableChangeSenderShard][72075186233409549:2][72075186233409546][106:839:2681] Handle NKikimrChangeExchange.TEvStatus Status: STATUS_OK RecordStatuses { Order: 1 Status: STATUS_OK Reason: REASON_NONE } RecordStatuses { Order: 2 Status: STATUS_OK Reason: REASON_NONE } RecordStatuses { Order: 3 Status: STATUS_OK Reason: REASON_NONE } LastRecordOrder: 3 2025-09-25T16:21:28.908340Z node 106 :CHANGE_EXCHANGE DEBUG: change_sender_async_index.cpp:239: [AsyncIndexChangeSenderMain][72075186233409549:2][106:791:2681] Handle NKikimr::NChangeExchange::TEvChangeExchangePrivate::TEvReady { PartitionId: 72075186233409546 } 2025-09-25T16:21:29.078625Z node 106 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/Table/UserDefinedIndex/indexImplTable" Options { ReturnPartitioningInfo: true ReturnPartitionConfig: true BackupInfo: false ReturnBoundaries: false ShowPrivateTable: true }, at schemeshard: 72057594046678944 2025-09-25T16:21:29.078731Z node 106 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/Table/UserDefinedIndex/indexImplTable" took 126us result status StatusSuccess 2025-09-25T16:21:29.078975Z node 106 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/Table/UserDefinedIndex/indexImplTable" PathDescription { Self { Name: "indexImplTable" PathId: 5 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 1002 CreateStep: 5000003 ParentPathId: 4 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeAsyncIndexImplTable Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "indexImplTable" Columns { Name: "indexed" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "key" Type: "Uint32" TypeId: 2 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "indexed" KeyColumnNames: "key" KeyColumnIds: 1 KeyColumnIds: 2 PartitionConfig { CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot: 300 InMemForceStepsToSnapshot: 500 InMemForceSizeToSnapshot: 16777216 InMemCompactionBrokerQueue: 0 ReadAheadHiThreshold: 67108864 ReadAheadLoThreshold: 16777216 MinDataPageSize: 7168 SnapBrokerQueue: 0 Generation { GenerationId: 0 SizeToCompact: 0 CountToCompact: 8 ForceCountToCompact: 8 ForceSizeToCompact: 134217728 CompactionBrokerQueue: 4294967295 KeepInCache: true BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen1" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 0 UpliftPartSize: 0 } Generation { GenerationId: 1 SizeToCompact: 41943040 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 536870912 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen2" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 8388608 UpliftPartSize: 8388608 } Generation { GenerationId: 2 SizeToCompact: 419430400 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 17179869184 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen3" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } PartitioningPolicy { SizeToSplit: 2147483648 MinPartitionsCount: 1 } } TableSchemaVersion: 1 IsBackup: false IsRestore: false } TablePartitions { EndOfRangeKeyPrefix: "" IsPoint: false IsInclusive: false DatashardId: 72075186233409546 } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 4 PathsLimit: 10000 ShardsInside: 3 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 5 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 >> TYdbControlPlaneStoragePipeline::ShouldCheckSimplePipeline >> TYdbControlPlaneStorageGetQueryStatusPermissions::ShouldApplyPermissionViewPublic ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/mind/hive/ut/unittest >> THiveTest::TestCreateExternalTablet [GOOD] Test command err: 2025-09-25T16:18:08.507914Z node 1 :BS_NODE DEBUG: {NW26@node_warden_impl.cpp:338} Bootstrap 2025-09-25T16:18:08.513816Z node 1 :BS_NODE DEBUG: {NW18@node_warden_resource.cpp:51} ApplyServiceSet IsStatic# true Comprehensive# false Origin# initial ServiceSet# {PDisks { NodeID: 1 PDiskID: 1 Path: "SectorMap:0:3200" PDiskGuid: 1 } VDisks { VDiskID { GroupID: 0 GroupGeneration: 1 Ring: 0 Domain: 0 VDisk: 0 } VDiskLocation { NodeID: 1 PDiskID: 1 VDiskSlotID: 0 PDiskGuid: 1 } } Groups { GroupID: 0 GroupGeneration: 1 ErasureSpecies: 0 Rings { FailDomains { VDiskLocations { NodeID: 1 PDiskID: 1 VDiskSlotID: 0 PDiskGuid: 1 } } } } AvailabilityDomains: 0 } 2025-09-25T16:18:08.513948Z node 1 :BS_NODE DEBUG: {NW04@node_warden_pdisk.cpp:233} StartLocalPDisk NodeId# 1 PDiskId# 1 Path# "SectorMap:0:3200" PDiskCategory# {Type# DEVICE_TYPE_ROT Kind# 0} Temporary# false 2025-09-25T16:18:08.514254Z node 1 :BS_NODE WARN: {NW89@node_warden_pdisk.cpp:122} Can't write new MockDevicesConfig to file Path# /Berkanavt/kikimr/testing/mock_devices.txt 2025-09-25T16:18:08.514367Z node 1 :BS_NODE DEBUG: {NW23@node_warden_vdisk.cpp:70} StartLocalVDiskActor SlayInFlight# false VDiskId# [0:1:0:0:0] VSlotId# 1:1:0 PDiskGuid# 1 DonorMode# false PDiskRestartInFlight# false PDisksWaitingToStart# false 2025-09-25T16:18:08.514611Z node 1 :BS_NODE DEBUG: {NW24@node_warden_vdisk.cpp:276} StartLocalVDiskActor done VDiskId# [0:1:0:0:0] VSlotId# 1:1:0 PDiskGuid# 1 2025-09-25T16:18:08.514625Z node 1 :BS_NODE DEBUG: {NW12@node_warden_proxy.cpp:24} StartLocalProxy GroupId# 0 2025-09-25T16:18:08.514866Z node 1 :BS_NODE DEBUG: {NW21@node_warden_pipe.cpp:23} EstablishPipe AvailDomainId# 0 PipeClientId# [1:28:2075] ControllerId# 72057594037932033 2025-09-25T16:18:08.514872Z node 1 :BS_NODE DEBUG: {NW20@node_warden_pipe.cpp:73} SendRegisterNode 2025-09-25T16:18:08.514904Z node 1 :BS_NODE DEBUG: {NW11@node_warden_impl.cpp:313} StartInvalidGroupProxy GroupId# 4294967295 2025-09-25T16:18:08.514930Z node 1 :BS_NODE DEBUG: {NW62@node_warden_impl.cpp:325} StartRequestReportingThrottler 2025-09-25T16:18:08.519442Z node 1 :BS_PROXY INFO: dsproxy_state.cpp:159: Group# 0 TEvConfigureProxy received GroupGeneration# 1 IsLimitedKeyless# false Marker# DSP02 2025-09-25T16:18:08.519466Z node 1 :BS_PROXY NOTICE: dsproxy_state.cpp:319: EnsureMonitoring Group# 0 IsLimitedKeyless# 0 fullIfPossible# 0 Marker# DSP58 2025-09-25T16:18:08.519907Z node 1 :BS_PROXY DEBUG: group_sessions.cpp:83: Group# 0 Actor# [1:27:2074] Create Queue# [1:36:2080] targetNodeId# 1 Marker# DSP01 2025-09-25T16:18:08.519940Z node 1 :BS_PROXY DEBUG: group_sessions.cpp:83: Group# 0 Actor# [1:27:2074] Create Queue# [1:37:2081] targetNodeId# 1 Marker# DSP01 2025-09-25T16:18:08.519973Z node 1 :BS_PROXY DEBUG: group_sessions.cpp:83: Group# 0 Actor# [1:27:2074] Create Queue# [1:38:2082] targetNodeId# 1 Marker# DSP01 2025-09-25T16:18:08.520008Z node 1 :BS_PROXY DEBUG: group_sessions.cpp:83: Group# 0 Actor# [1:27:2074] Create Queue# [1:39:2083] targetNodeId# 1 Marker# DSP01 2025-09-25T16:18:08.520039Z node 1 :BS_PROXY DEBUG: group_sessions.cpp:83: Group# 0 Actor# [1:27:2074] Create Queue# [1:40:2084] targetNodeId# 1 Marker# DSP01 2025-09-25T16:18:08.520069Z node 1 :BS_PROXY DEBUG: group_sessions.cpp:83: Group# 0 Actor# [1:27:2074] Create Queue# [1:41:2085] targetNodeId# 1 Marker# DSP01 2025-09-25T16:18:08.520094Z node 1 :BS_PROXY DEBUG: group_sessions.cpp:83: Group# 0 Actor# [1:27:2074] Create Queue# [1:42:2086] targetNodeId# 1 Marker# DSP01 2025-09-25T16:18:08.520100Z node 1 :BS_PROXY INFO: dsproxy_state.cpp:31: Group# 0 SetStateEstablishingSessions Marker# DSP03 2025-09-25T16:18:08.520114Z node 1 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:49: TClient[72057594037932033] ::Bootstrap [1:28:2075] 2025-09-25T16:18:08.520119Z node 1 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:542: TClient[72057594037932033] lookup [1:28:2075] 2025-09-25T16:18:08.520128Z node 1 :BS_PROXY NOTICE: dsproxy_state.cpp:259: Group# 4294967295 HasInvalidGroupId# 1 Bootstrap -> StateEjected Marker# DSP42 2025-09-25T16:18:08.520137Z node 1 :BS_NODE DEBUG: {NWDC00@distconf.cpp:28} Bootstrap 2025-09-25T16:18:08.520297Z node 1 :BS_NODE DEBUG: {NWDC40@distconf_persistent_storage.cpp:25} TReaderActor bootstrap Paths# [] 2025-09-25T16:18:08.523854Z node 1 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:148: TClient[72057594037932033] queue send [1:28:2075] 2025-09-25T16:18:08.523886Z node 1 :BS_NODE DEBUG: {NWDC53@distconf.cpp:332} StateWaitForInit event Type# 131082 StorageConfigLoaded# false NodeListObtained# false PendingEvents.size# 0 2025-09-25T16:18:08.524317Z node 1 :LOCAL DEBUG: local.cpp:1540: TLocal::Bootstrap 2025-09-25T16:18:08.524382Z node 1 :TABLET_RESOLVER DEBUG: tablet_resolver.cpp:882: Handle TEvForward tabletId: 72057594037932033 entry.State: StResolve leader: [0:0:0] followers: 0 ev: {EvForward TabletID: 72057594037932033 Ev: nullptr Flags: 1:2:0} 2025-09-25T16:18:08.524450Z node 1 :LOCAL DEBUG: local.cpp:1490: TDomainLocal(dc-1): Bootstrap 2025-09-25T16:18:08.524498Z node 1 :BS_NODE DEBUG: {NWDC53@distconf.cpp:332} StateWaitForInit event Type# 2146435074 StorageConfigLoaded# false NodeListObtained# false PendingEvents.size# 0 2025-09-25T16:18:08.524507Z node 1 :BS_NODE DEBUG: {NWDC32@distconf_persistent_storage.cpp:221} TEvStorageConfigLoaded Cookie# 0 NumItemsRead# 0 2025-09-25T16:18:08.525720Z node 1 :BS_NODE DEBUG: {NWDC35@distconf_persistent_storage.cpp:184} PersistConfig Record# {} Drives# [] 2025-09-25T16:18:08.525796Z node 1 :BS_NODE DEBUG: {NWDC18@distconf_binding.cpp:462} UpdateBound RefererNodeId# 0 NodeId# :0/0 Meta# {Fingerprint: "\363\365\\\016\336\205\240m2\241c\3010\003\261\342\227\n\267}" } 2025-09-25T16:18:08.526530Z node 1 :LOCAL DEBUG: local.cpp:1198: TDomainLocal(dc-1): Binding to hive 72057594037927937 at domain dc-1 (allocated resources: ) 2025-09-25T16:18:08.526561Z node 1 :BS_NODE DEBUG: {NWDC51@distconf_persistent_storage.cpp:103} TWriterActor bootstrap Drives# [] Record# {} 2025-09-25T16:18:08.526576Z node 1 :LOCAL DEBUG: local.cpp:1005: TLocalNodeRegistrar::Bootstrap 2025-09-25T16:18:08.526582Z node 1 :LOCAL DEBUG: local.cpp:183: TLocalNodeRegistrar::TryToRegister 2025-09-25T16:18:08.526623Z node 1 :LOCAL DEBUG: local.cpp:216: TLocalNodeRegistrar::TryToRegister pipe to hive, pipe:[1:53:2093] 2025-09-25T16:18:08.526647Z node 1 :STATESTORAGE DEBUG: statestorage_proxy.cpp:287: ProxyRequest::HandleInit ringGroup:0 ev: {EvLookup TabletID: 72057594037932033 Cookie: 0 ProxyOptions: SigNone} 2025-09-25T16:18:08.526911Z node 1 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:148: TClient[72057594037932033] queue send [1:28:2075] 2025-09-25T16:18:08.526925Z node 1 :BS_NODE DEBUG: {NWDC53@distconf.cpp:332} StateWaitForInit event Type# 268639258 StorageConfigLoaded# true NodeListObtained# false PendingEvents.size# 0 2025-09-25T16:18:08.526936Z node 1 :BS_NODE DEBUG: {NWDC53@distconf.cpp:332} StateWaitForInit event Type# 2146435075 StorageConfigLoaded# true NodeListObtained# false PendingEvents.size# 1 2025-09-25T16:18:08.526993Z node 1 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:49: TClient[72057594037927937] ::Bootstrap [1:53:2093] 2025-09-25T16:18:08.526999Z node 1 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:542: TClient[72057594037927937] lookup [1:53:2093] 2025-09-25T16:18:08.527011Z node 1 :STATESTORAGE DEBUG: statestorage_replica.cpp:185: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037932033 Cookie: 0} 2025-09-25T16:18:08.527021Z node 1 :STATESTORAGE DEBUG: statestorage_replica.cpp:185: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037932033 Cookie: 1} 2025-09-25T16:18:08.527027Z node 1 :STATESTORAGE DEBUG: statestorage_replica.cpp:185: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037932033 Cookie: 2} 2025-09-25T16:18:08.527037Z node 1 :STATESTORAGE DEBUG: statestorage_proxy.cpp:399: ProxyRequest::HandleLookup ringGroup:0 ev: {EvReplicaInfo Status: 1 TabletID: 72057594037932033 ClusterStateGeneration: 0 ClusterStateGuid: 0} 2025-09-25T16:18:08.527077Z node 1 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:49: TClient[72057594037936129] ::Bootstrap [1:32:2063] 2025-09-25T16:18:08.527082Z node 1 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:542: TClient[72057594037936129] lookup [1:32:2063] 2025-09-25T16:18:08.527138Z node 1 :TABLET_RESOLVER DEBUG: tablet_resolver.cpp:882: Handle TEvForward tabletId: 72057594037927937 entry.State: StResolve leader: [0:0:0] followers: 0 ev: {EvForward TabletID: 72057594037927937 Ev: nullptr Flags: 1:2:0} 2025-09-25T16:18:08.528547Z node 1 :BS_NODE DEBUG: {NWDC53@distconf.cpp:332} StateWaitForInit event Type# 131082 StorageConfigLoaded# true NodeListObtained# false PendingEvents.size# 2 2025-09-25T16:18:08.528563Z node 1 :BS_NODE DEBUG: {NWDC11@distconf_binding.cpp:8} TEvNodesInfo 2025-09-25T16:18:08.528597Z node 1 :BS_NODE DEBUG: {NWDC18@distconf_binding.cpp:462} UpdateBound RefererNodeId# 1 NodeId# ::1:12001/1 Meta# {Fingerprint: "\363\365\\\016\336\205\240m2\241c\3010\003\261\342\227\n\267}" } 2025-09-25T16:18:08.528649Z node 1 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:148: TClient[72057594037927937] queue send [1:53:2093] 2025-09-25T16:18:08.528662Z node 1 :STATESTORAGE DEBUG: statestorage_proxy.cpp:399: ProxyRequest::HandleLookup ringGroup:0 ev: {EvReplicaInfo Status: 1 TabletID: 72057594037932033 ClusterStateGeneration: 0 ClusterStateGuid: 0} 2025-09-25T16:18:08.528675Z node 1 :BS_NODE DEBUG: {NWDC53@distconf.cpp:332} StateWaitForInit event Type# 2146435072 StorageConfigLoaded# true NodeListObtained# true PendingEvents.size# 2 2025-09-25T16:18:08.528686Z node 1 :BS_NODE DEBUG: {NWDC15@distconf.cpp:401} StateFunc Type# 268639258 Sender# [1:12:2059] SessionId# [0:0:0] Cookie# 0 2025-09-25T16:18:08.528961Z node 1 :STATESTORAGE DEBUG: statestorage_proxy.cpp:399: ProxyRequest::HandleLookup ringGroup:0 ev: {EvReplicaInfo Status: 1 TabletID: 72057594037932033 ClusterStateGeneration: 0 ClusterStateGuid: 0} 2025-09-25T16:18:08.529015Z node 1 :TABLET_RESOLVER DEBUG: tablet_resolver.cpp:882: Handle TEvForward tabletId: 72057594037936129 entry.State: StResolve leader: [0:0:0] followers: 0 ev: {EvForward TabletID: 72057594037936129 Ev: nullptr Flags: 1:2:0} 2025-09-25T16:18:08.529179Z node 1 :BS_NODE DEBUG: {NWDC53@distconf.cpp:332} StateWaitForInit event Type# 2146435072 StorageConfigLoaded# true NodeListObtained# true PendingEvents.size# 1 2025-09-25T16:18:08.529189Z node 1 :BS_NODE DEBUG: {NWDC15@distconf.cpp:401} StateFunc Type# 2146435075 Sender# [1:51:2092] SessionId# [0:0:0] Cookie# 0 2025-09-25T16:18:08.529199Z node 1 :BS_NODE DEBUG: {NWDC36@distconf_persistent_storage.cpp:205} TEvStorageConfigStored NumOk# 0 NumError# 0 Passed# 0.004665s 2025-09-25T16:18:08.529300Z node 1 :BS_NODE DEBUG: {NW18@node_warden_resource.cpp:51} ApplyServiceSet IsStatic# true Comprehensive# true Origin# distconf ServiceSet# {PDisks { NodeID: 1 PDiskID: 1 Path: "SectorMap:0:3200" PDiskGuid: 1 } VDisks { VDiskID { GroupID: 0 GroupGeneration: 1 Ring: 0 Domain: 0 VDisk: 0 } VDiskLocation { NodeID: 1 PDiskID: 1 VDiskSlotID: 0 PDiskGuid: 1 } } Groups { GroupID: 0 GroupGeneration: 1 ErasureSpecies: 0 Rings { FailDomains { VDiskLocations { NodeID: 1 PDiskID: 1 VDiskSlotID: 0 PDiskGuid: 1 } } } } AvailabilityDomains: 0 } 2025-09-25T16:18:08.529376Z node 1 :BS_NODE DEBUG: {NW110@node_warden_pdisk.cpp:538} ApplyServiceSetPDisks PDiskId# 1 NewExpectedSlotCount# 0 OldExpectedSlotCount# 0 NewSlotSizeInUnits# 0 OldSlotSizeInUnits# 0 2025-09-25T16:18:08.529402Z node 1 :TABLET_RESOLVER DEBUG: tablet_resolver.cpp:781: ApplyEntry tabletId: 72057594037932033 leader: [0:0:0] followers: 0 2025-09-25T16:18:08.529444Z node 1 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:170: TClient[72057594037932033] forward result error, check reconnect [1:28:2075] 2025-09-25T16:18:08.529451Z node 1 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:569: TClient[72057594037932033] schedule retry [1:28:2075] 2025-09-25T16:18:08.529460Z node 1 :BS_NODE DEBUG: {NWDC15@distconf.cpp:401} StateFunc Type# 26863924 ... node 160 :PIPE_SERVER DEBUG: tablet_pipe_server.cpp:291: [72057594037932033] Accept Connect Originator# [160:311:2291] 2025-09-25T16:21:13.253544Z node 160 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:308: TClient[72057594037932033] connected with status OK role: Leader [160:311:2291] 2025-09-25T16:21:13.253549Z node 160 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:323: TClient[72057594037932033] send queued [160:311:2291] 2025-09-25T16:21:13.253553Z node 160 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:645: TClient[72057594037932033] push event to server [160:311:2291] 2025-09-25T16:21:13.253560Z node 160 :PIPE_SERVER DEBUG: tablet_pipe_server.cpp:141: [72057594037932033] HandleSend Sender# [160:281:2270] EventType# 268637702 2025-09-25T16:21:13.253580Z node 160 :TABLET_EXECUTOR DEBUG: Leader{72057594037932033:2:9} Tx{28, NKikimr::NBsController::TBlobStorageController::TTxSelectGroups} queued, type NKikimr::NBsController::TBlobStorageController::TTxSelectGroups 2025-09-25T16:21:13.253587Z node 160 :TABLET_EXECUTOR DEBUG: Leader{72057594037932033:2:9} Tx{28, NKikimr::NBsController::TBlobStorageController::TTxSelectGroups} took 4194304b of static mem, Memory{4194304 dyn 0} 2025-09-25T16:21:13.253619Z node 160 :TABLET_EXECUTOR DEBUG: Leader{72057594037932033:2:9} Tx{28, NKikimr::NBsController::TBlobStorageController::TTxSelectGroups} hope 1 -> done Change{20, redo 0b alter 0b annex 0, ~{ } -{ }, 0 gb} 2025-09-25T16:21:13.253626Z node 160 :TABLET_EXECUTOR DEBUG: Leader{72057594037932033:2:9} Tx{28, NKikimr::NBsController::TBlobStorageController::TTxSelectGroups} release 4194304b of static, Memory{0 dyn 0} 2025-09-25T16:21:13.253643Z node 160 :HIVE DEBUG: hive_impl.cpp:93: HIVE#72057594037927937 Connected to tablet 72057594037932033 from tablet 72057594037927937 2025-09-25T16:21:13.253681Z node 160 :HIVE DEBUG: hive_impl.cpp:458: HIVE#72057594037927937 THive::Handle TEvControllerSelectGroupsResult: success Status: OK NewStyleQuerySupported: true MatchingGroups { Groups { ErasureSpecies: 0 GroupID: 2147483648 StoragePoolName: "def1" AssuredResources { } CurrentResources { } PhysicalGroup: true Decommitted: false GroupSizeInUnits: 0 } } MatchingGroups { Groups { ErasureSpecies: 0 GroupID: 2147483649 StoragePoolName: "def2" AssuredResources { } CurrentResources { } PhysicalGroup: true Decommitted: false GroupSizeInUnits: 0 } } MatchingGroups { Groups { ErasureSpecies: 0 GroupID: 2147483650 StoragePoolName: "def3" AssuredResources { } CurrentResources { } PhysicalGroup: true Decommitted: false GroupSizeInUnits: 0 } } 2025-09-25T16:21:13.253699Z node 160 :TABLET_EXECUTOR DEBUG: Leader{72057594037927937:2:5} Tx{5, NKikimr::NHive::TTxUpdateTabletGroups} queued, type NKikimr::NHive::TTxUpdateTabletGroups 2025-09-25T16:21:13.253704Z node 160 :TABLET_EXECUTOR DEBUG: Leader{72057594037927937:2:5} Tx{5, NKikimr::NHive::TTxUpdateTabletGroups} took 4194304b of static mem, Memory{4194304 dyn 0} 2025-09-25T16:21:13.253712Z node 160 :HIVE DEBUG: tx__update_tablet_groups.cpp:63: HIVE#72057594037927937 THive::TTxUpdateTabletGroups::Execute{92150536577936}(72075186224037888,HIVE_REASSIGN_REASON_NO,[]) 2025-09-25T16:21:13.253725Z node 160 :HIVE DEBUG: tx__update_tablet_groups.cpp:151: HIVE#72057594037927937 THive::TTxUpdateTabletGroups::Execute{92150536577936}: tablet 72075186224037888 channel 0 assigned to group 2147483648 2025-09-25T16:21:13.253753Z node 160 :HIVE DEBUG: tx__update_tablet_groups.cpp:151: HIVE#72057594037927937 THive::TTxUpdateTabletGroups::Execute{92150536577936}: tablet 72075186224037888 channel 1 assigned to group 2147483649 2025-09-25T16:21:13.253764Z node 160 :HIVE DEBUG: tx__update_tablet_groups.cpp:151: HIVE#72057594037927937 THive::TTxUpdateTabletGroups::Execute{92150536577936}: tablet 72075186224037888 channel 2 assigned to group 2147483650 2025-09-25T16:21:13.253779Z node 160 :HIVE NOTICE: tx__update_tablet_groups.cpp:326: HIVE#72057594037927937 THive::TTxUpdateTabletGroups{92150536577936}(72075186224037888)::Execute - TryToBoot was not successfull 2025-09-25T16:21:13.253788Z node 160 :TABLET_EXECUTOR DEBUG: Leader{72057594037927937:2:5} Tx{5, NKikimr::NHive::TTxUpdateTabletGroups} hope 1 -> done Change{5, redo 698b alter 0b annex 0, ~{ 2, 1, 3 } -{ }, 0 gb} 2025-09-25T16:21:13.253794Z node 160 :TABLET_EXECUTOR DEBUG: Leader{72057594037927937:2:5} Tx{5, NKikimr::NHive::TTxUpdateTabletGroups} release 4194304b of static, Memory{0 dyn 0} 2025-09-25T16:21:13.268479Z node 160 :BS_PROXY_PUT INFO: dsproxy_put.cpp:650: [8729fbeaec2f6015] bootstrap ActorId# [160:314:2294] Group# 0 BlobCount# 1 BlobIDs# [[72057594037927937:2:4:0:0:698:0]] HandleClass# TabletLog Tactic# MinLatency RestartCounter# 0 Marker# BPP13 2025-09-25T16:21:13.268856Z node 160 :BS_PROXY_PUT DEBUG: dsproxy_strategy_restore.h:42: [8729fbeaec2f6015] Id# [72057594037927937:2:4:0:0:698:0] restore disk# 0 part# 0 situation# ESituation::Unknown Marker# BPG51 2025-09-25T16:21:13.268951Z node 160 :BS_PROXY_PUT DEBUG: dsproxy_strategy_restore.h:65: [8729fbeaec2f6015] restore Id# [72057594037927937:2:4:0:0:698:0] optimisticReplicas# 1 optimisticState# EBS_FULL Marker# BPG55 2025-09-25T16:21:13.268966Z node 160 :BS_PROXY_PUT DEBUG: dsproxy_strategy_base.cpp:324: [8729fbeaec2f6015] partPlacement record partSituation# ESituation::Unknown to# 0 blob Id# [72057594037927937:2:4:0:0:698:1] Marker# BPG33 2025-09-25T16:21:13.268974Z node 160 :BS_PROXY_PUT DEBUG: dsproxy_strategy_base.cpp:345: [8729fbeaec2f6015] Sending missing VPut part# 0 to# 0 blob Id# [72057594037927937:2:4:0:0:698:1] Marker# BPG32 2025-09-25T16:21:13.269789Z node 160 :BS_PROXY DEBUG: group_sessions.h:181: Send to queueActorId# [160:36:2080] NKikimr::TEvBlobStorage::TEvVPut# {ID# [72057594037927937:2:4:0:0:698:1] FDS# 698 HandleClass# TabletLog {MsgQoS ExtQueueId# PutTabletLog} DataSize# 0 Data# } cookie# 0 2025-09-25T16:21:13.272428Z node 160 :BS_PROXY_PUT DEBUG: dsproxy_put.cpp:264: [8729fbeaec2f6015] received {EvVPutResult Status# OK ID# [72057594037927937:2:4:0:0:698:1] {MsgQoS MsgId# { SequenceId: 1 MsgId: 18 } Cost# 85496 ExtQueueId# PutTabletLog IntQueueId# IntPutLog Window# { Status# Processed ActualWindowSize# 0 MaxWindowSize# 150000000 ExpectedMsgId# { SequenceId: 1 MsgId: 19 }}}} from# [0:1:0:0:0] Marker# BPP01 2025-09-25T16:21:13.272622Z node 160 :BS_PROXY_PUT DEBUG: dsproxy_put_impl.cpp:72: [8729fbeaec2f6015] Result# TEvPutResult {Id# [72057594037927937:2:4:0:0:698:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0.998955} GroupId# 0 Marker# BPP12 2025-09-25T16:21:13.272638Z node 160 :BS_PROXY_PUT INFO: dsproxy_put.cpp:490: [8729fbeaec2f6015] SendReply putResult# TEvPutResult {Id# [72057594037927937:2:4:0:0:698:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0.998955} ResponsesSent# 0 PutImpl.Blobs.size# 1 Last# true Marker# BPP21 2025-09-25T16:21:13.272886Z node 160 :BS_PROXY_PUT DEBUG: {BPP72@dsproxy_put.cpp:474} Query history GroupId# 0 HandleClass# TabletLog Tactic# MinLatency History# THistory { Entries# [ TEvVPut{ TimestampMs# 1.515 sample PartId# [72057594037927937:2:4:0:0:698:1] QueryCount# 1 VDiskId# [0:1:0:0:0] NodeId# 160 } TEvVPutResult{ TimestampMs# 4.31 VDiskId# [0:1:0:0:0] NodeId# 160 Status# OK } ] } 2025-09-25T16:21:13.273153Z node 160 :TABLET_MAIN DEBUG: tablet_req_writelog.cpp:54: Put Result: TEvPutResult {Id# [72057594037927937:2:4:0:0:698:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0.998955} 2025-09-25T16:21:13.273377Z node 160 :TABLET_EXECUTOR DEBUG: Leader{72057594037927937:2:5} commited cookie 1 for step 4 2025-09-25T16:21:13.273400Z node 160 :HIVE DEBUG: tx__create_tablet.cpp:509: HIVE#72057594037927937 THive::TTxCreateTablet::Complete (72057594037927937,0) TabletId: 72075186224037888 SideEffects: {Notifications: 0x10040201 [160:271:2264] {EvCreateTabletReply Status: OK Owner: 72057594037927937 OwnerIdx: 0 TabletID: 72075186224037888 Origin: 72057594037927937}} 2025-09-25T16:21:13.273669Z node 160 :HIVE DEBUG: tx__update_tablet_groups.cpp:332: HIVE#72057594037927937 THive::TTxUpdateTabletGroups{92150536577936}(72075186224037888)::Complete SideEffects: {Notifications: 0x10040207 [160:271:2264] {EvTabletCreationResult Status: OK TabletID: 72075186224037888} Callbacks: 1 Actions: NKikimr::TTabletKillRequest} 2025-09-25T16:21:13.273872Z node 160 :STATESTORAGE DEBUG: statestorage_proxy.cpp:287: ProxyRequest::HandleInit ringGroup:0 ev: {EvLookup TabletID: 72075186224037888 Cookie: 0 ProxyOptions: SigNone} 2025-09-25T16:21:13.274087Z node 160 :STATESTORAGE DEBUG: statestorage_replica.cpp:185: Replica::Handle ev: {EvReplicaLookup TabletID: 72075186224037888 Cookie: 0} 2025-09-25T16:21:13.274114Z node 160 :STATESTORAGE DEBUG: statestorage_replica.cpp:185: Replica::Handle ev: {EvReplicaLookup TabletID: 72075186224037888 Cookie: 1} 2025-09-25T16:21:13.274120Z node 160 :STATESTORAGE DEBUG: statestorage_replica.cpp:185: Replica::Handle ev: {EvReplicaLookup TabletID: 72075186224037888 Cookie: 2} 2025-09-25T16:21:13.274129Z node 160 :STATESTORAGE DEBUG: statestorage_proxy.cpp:399: ProxyRequest::HandleLookup ringGroup:0 ev: {EvReplicaInfo Status: 1 TabletID: 72075186224037888 ClusterStateGeneration: 0 ClusterStateGuid: 0} 2025-09-25T16:21:13.274142Z node 160 :STATESTORAGE DEBUG: statestorage_proxy.cpp:399: ProxyRequest::HandleLookup ringGroup:0 ev: {EvReplicaInfo Status: 1 TabletID: 72075186224037888 ClusterStateGeneration: 0 ClusterStateGuid: 0} 2025-09-25T16:21:13.274150Z node 160 :STATESTORAGE DEBUG: statestorage_proxy.cpp:399: ProxyRequest::HandleLookup ringGroup:0 ev: {EvReplicaInfo Status: 1 TabletID: 72075186224037888 ClusterStateGeneration: 0 ClusterStateGuid: 0} 2025-09-25T16:21:13.274212Z node 160 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:49: TClient[72075186224037888] ::Bootstrap [160:318:2297] 2025-09-25T16:21:13.274219Z node 160 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:542: TClient[72075186224037888] lookup [160:318:2297] 2025-09-25T16:21:13.274247Z node 160 :TABLET_RESOLVER DEBUG: tablet_resolver.cpp:882: Handle TEvForward tabletId: 72075186224037888 entry.State: StResolve leader: [0:0:0] followers: 0 ev: {EvForward TabletID: 72075186224037888 Ev: nullptr Flags: 1:2:0} 2025-09-25T16:21:13.274263Z node 160 :STATESTORAGE DEBUG: statestorage_proxy.cpp:287: ProxyRequest::HandleInit ringGroup:0 ev: {EvLookup TabletID: 72075186224037888 Cookie: 0 ProxyOptions: SigNone} 2025-09-25T16:21:13.274276Z node 160 :STATESTORAGE DEBUG: statestorage_replica.cpp:185: Replica::Handle ev: {EvReplicaLookup TabletID: 72075186224037888 Cookie: 0} 2025-09-25T16:21:13.274282Z node 160 :STATESTORAGE DEBUG: statestorage_replica.cpp:185: Replica::Handle ev: {EvReplicaLookup TabletID: 72075186224037888 Cookie: 1} 2025-09-25T16:21:13.274287Z node 160 :STATESTORAGE DEBUG: statestorage_replica.cpp:185: Replica::Handle ev: {EvReplicaLookup TabletID: 72075186224037888 Cookie: 2} 2025-09-25T16:21:13.274293Z node 160 :STATESTORAGE DEBUG: statestorage_proxy.cpp:399: ProxyRequest::HandleLookup ringGroup:0 ev: {EvReplicaInfo Status: 1 TabletID: 72075186224037888 ClusterStateGeneration: 0 ClusterStateGuid: 0} 2025-09-25T16:21:13.274302Z node 160 :STATESTORAGE DEBUG: statestorage_proxy.cpp:399: ProxyRequest::HandleLookup ringGroup:0 ev: {EvReplicaInfo Status: 1 TabletID: 72075186224037888 ClusterStateGeneration: 0 ClusterStateGuid: 0} 2025-09-25T16:21:13.274308Z node 160 :STATESTORAGE DEBUG: statestorage_proxy.cpp:399: ProxyRequest::HandleLookup ringGroup:0 ev: {EvReplicaInfo Status: 1 TabletID: 72075186224037888 ClusterStateGeneration: 0 ClusterStateGuid: 0} 2025-09-25T16:21:13.274319Z node 160 :TABLET_RESOLVER DEBUG: tablet_resolver.cpp:781: ApplyEntry tabletId: 72075186224037888 leader: [0:0:0] followers: 0 2025-09-25T16:21:13.274328Z node 160 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:170: TClient[72075186224037888] forward result error, check reconnect [160:318:2297] 2025-09-25T16:21:13.274333Z node 160 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:507: TClient[72075186224037888] connect failed [160:318:2297] >> TYdbControlPlaneStorageDescribeQuery::ShouldSuccess [GOOD] >> TYdbControlPlaneStorageDescribeQuery::ShouldValidate >> TYdbControlPlaneStorageModifyBinding::ShouldValidate >> TYdbControlPlaneStorageListBindings::ShouldSuccess |82.9%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/backup/impl/ut_table_writer/ydb-core-backup-impl-ut_table_writer |82.9%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/backup/impl/ut_table_writer/ydb-core-backup-impl-ut_table_writer >> InMemoryControlPlaneStorage::ExecuteSimpleStreamQuery [GOOD] |82.9%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/wrappers/ut/ydb-core-wrappers-ut |82.9%| [LD] {RESULT} $(B)/ydb/core/backup/impl/ut_table_writer/ydb-core-backup-impl-ut_table_writer |82.9%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/wrappers/ut/ydb-core-wrappers-ut |82.9%| [LD] {RESULT} $(B)/ydb/core/wrappers/ut/ydb-core-wrappers-ut |82.9%| [TA] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_sysview/test-results/unittest/{meta.json ... results_accumulator.log} >> TYdbControlPlaneStorageGetQueryStatusPermissions::ShouldApplyPermissionViewPublic [GOOD] >> TYdbControlPlaneStorageGetResult::ShouldSuccess >> TYdbControlPlaneStorageDescribeQuery::ShouldValidate [GOOD] >> TYdbControlPlaneStorageDescribeQuery::ShouldCheckSuperUser >> InMemoryControlPlaneStorage::ExecuteSimpleAnalyticsQuery >> TYdbControlPlaneStorageCreateConnection::ShouldSucccess >> TColumnShardTestReadWrite::CompactionSplitGranule_PKDatetime [GOOD] >> TopicAutoscaling::PartitionSplit_BeforeAutoscaleAwareSDK [GOOD] >> TopicAutoscaling::PartitionSplit_PQv1 |82.9%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/tx/replication/controller/ut_target_discoverer/replication-controller-ut_target_discoverer |82.9%| [LD] {RESULT} $(B)/ydb/core/tx/replication/controller/ut_target_discoverer/replication-controller-ut_target_discoverer |82.9%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/replication/controller/ut_target_discoverer/replication-controller-ut_target_discoverer >> TBsProxyFaultToleranceTest::CheckTRangeFaultToleranceTestErasureMirror3dc [GOOD] >> TYdbControlPlaneStorageDescribeQuery::ShouldCheckSuperUser [GOOD] >> TYdbControlPlaneStorageDescribeQueryPermissions::ShouldApplyPermissionEmpty >> TColumnShardTestReadWrite::CompactionSplitGranule_PKInt32 [GOOD] >> TYdbControlPlaneStorageCreateConnection::ShouldSucccess [GOOD] >> TYdbControlPlaneStorageCreateConnection::ShouldDisableCurrentIam >> TYdbControlPlaneStorageDescribeBindingPermissions::ShouldApplyPermissionViewPublic [GOOD] >> TYdbControlPlaneStorageDescribeConnection::ShouldSuccess >> TYdbControlPlaneStorageModifyBinding::ShouldValidate [GOOD] >> TYdbControlPlaneStorageModifyBinding::ShouleCheckObjectStorageProjectionByColumns |82.9%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/dsproxy/ut_ftol/unittest >> TBsProxyFaultToleranceTest::CheckTRangeFaultToleranceTestErasureMirror3dc [GOOD] >> TColumnShardTestReadWrite::CompactionSplitGranule_PKInt64 [GOOD] >> TYdbControlPlaneStorageListBindings::ShouldSuccess [GOOD] >> TYdbControlPlaneStorageListBindings::ShouldFilterByName |82.9%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/tx/conveyor_composite/ut/ydb-core-tx-conveyor_composite-ut |82.9%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/conveyor_composite/ut/ydb-core-tx-conveyor_composite-ut |82.9%| [LD] {RESULT} $(B)/ydb/core/tx/conveyor_composite/ut/ydb-core-tx-conveyor_composite-ut ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/columnshard/ut_rw/unittest >> TColumnShardTestReadWrite::CompactionSplitGranule_PKDatetime [GOOD] Test command err: 2025-09-25T16:20:43.778328Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:129:2160];fline=columnshard.cpp:105;event=initialize_shard;step=OnActivateExecutor; 2025-09-25T16:20:43.783749Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:129:2160];fline=columnshard.cpp:124;event=initialize_shard;step=initialize_tiring_finished; 2025-09-25T16:20:43.783814Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxInitSchema.Execute at tablet 9437184 2025-09-25T16:20:43.784680Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:129:2160];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-09-25T16:20:43.784748Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:129:2160];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-09-25T16:20:43.784792Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:129:2160];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-09-25T16:20:43.784818Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:129:2160];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-09-25T16:20:43.784856Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:129:2160];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-09-25T16:20:43.784877Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:129:2160];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-09-25T16:20:43.784899Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:129:2160];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-09-25T16:20:43.784920Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:129:2160];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-09-25T16:20:43.784940Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:129:2160];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-09-25T16:20:43.784960Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:129:2160];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanDeprecatedSnapshot; 2025-09-25T16:20:43.784983Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:129:2160];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV0ChunksMeta; 2025-09-25T16:20:43.785002Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:129:2160];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CopyBlobIdsToV2; 2025-09-25T16:20:43.785053Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:129:2160];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreAppearanceSnapshot; 2025-09-25T16:20:43.792014Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxInitSchema.Complete at tablet 9437184 2025-09-25T16:20:43.792091Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:131;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=12;current_normalizer=CLASS_NAME=Granules; 2025-09-25T16:20:43.792103Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=1;type=Granules; 2025-09-25T16:20:43.792149Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-09-25T16:20:43.792185Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-09-25T16:20:43.792199Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-09-25T16:20:43.792206Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2025-09-25T16:20:43.792217Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:138;normalizer=TChunksNormalizer;message=0 chunks found; 2025-09-25T16:20:43.792227Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-09-25T16:20:43.792236Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-09-25T16:20:43.792241Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2025-09-25T16:20:43.792262Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-09-25T16:20:43.792272Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-09-25T16:20:43.792281Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-09-25T16:20:43.792286Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2025-09-25T16:20:43.792298Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2025-09-25T16:20:43.792306Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-09-25T16:20:43.792316Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-09-25T16:20:43.792321Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2025-09-25T16:20:43.792330Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-09-25T16:20:43.792340Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-09-25T16:20:43.792345Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2025-09-25T16:20:43.792354Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-09-25T16:20:43.792364Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-09-25T16:20:43.792369Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=15;type=RestoreV1Chunks_V2; 2025-09-25T16:20:43.792398Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-09-25T16:20:43.792408Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-09-25T16:20:43.792413Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=16;type=RestoreV2Chunks; 2025-09-25T16:20:43.792429Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2025-09-25T16:20:43.792438Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanDeprecatedSnapshot;id=CleanDeprecatedSnapshot; 2025-09-25T16:20:43.792443Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=17;type=CleanDeprecatedSnapshot; 2025-09-25T16:20:43.792452Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanDeprecatedSnapshot;id=17; 2025-09-25T16:20:43.792460Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV0ChunksMeta;id=RestoreV0ChunksMeta; 2025-09-25T16:20:43.792465Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=18;type=RestoreV0ChunksMeta; 2025-09-25T16:20:43.792473Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV0ChunksMeta;id=18; 2025-09-25T16:20:43.792482Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CopyBlobIdsToV2;id=CopyBlobIdsToV2; 2025-09-25T16:20:43.792488Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=19;type=CopyBlobIdsToV2; 2025-09-25T16:20:43.792503Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CopyBlobIdsToV2;id=19; 2025-09-25T16:20:43.792513Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLAS ... UTE:granule/portions;fline=constructor_portion.cpp:40;memory_size=278;data_size=254;sum=45988;count=168;size_of_portion=184; 2025-09-25T16:21:35.154008Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;load_stage_name=EXECUTE:column_engines/granules;load_stage_name=EXECUTE:granules/granule;fline=common_data.cpp:29;EXECUTE:portionsLoadingTime=11230; 2025-09-25T16:21:35.154022Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;load_stage_name=EXECUTE:column_engines/granules;load_stage_name=EXECUTE:granules/granule;fline=common_data.cpp:29;PRECHARGE:granule_finished_commonLoadingTime=2; 2025-09-25T16:21:35.154136Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;load_stage_name=EXECUTE:column_engines/granules;load_stage_name=EXECUTE:granules/granule;fline=common_data.cpp:29;EXECUTE:granule_finished_commonLoadingTime=103; 2025-09-25T16:21:35.154143Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;load_stage_name=EXECUTE:column_engines/granules;fline=common_data.cpp:29;EXECUTE:granuleLoadingTime=11390; 2025-09-25T16:21:35.154150Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;fline=common_data.cpp:29;EXECUTE:granulesLoadingTime=11405; 2025-09-25T16:21:35.154158Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;fline=common_data.cpp:29;PRECHARGE:finishLoadingTime=1; 2025-09-25T16:21:35.154183Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;fline=common_data.cpp:29;EXECUTE:finishLoadingTime=18; 2025-09-25T16:21:35.154190Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:column_enginesLoadingTime=11522; 2025-09-25T16:21:35.154237Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:tx_controllerLoadingTime=38; 2025-09-25T16:21:35.154259Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:tx_controllerLoadingTime=15; 2025-09-25T16:21:35.154291Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:operations_managerLoadingTime=25; 2025-09-25T16:21:35.154312Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:operations_managerLoadingTime=14; 2025-09-25T16:21:35.156074Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:storages_managerLoadingTime=1741; 2025-09-25T16:21:35.162383Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:storages_managerLoadingTime=6259; 2025-09-25T16:21:35.162432Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:db_locksLoadingTime=4; 2025-09-25T16:21:35.162442Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:db_locksLoadingTime=2; 2025-09-25T16:21:35.162451Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:bg_sessionsLoadingTime=1; 2025-09-25T16:21:35.162471Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:bg_sessionsLoadingTime=13; 2025-09-25T16:21:35.162480Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:sharing_sessionsLoadingTime=1; 2025-09-25T16:21:35.162503Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:sharing_sessionsLoadingTime=12; 2025-09-25T16:21:35.162512Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:in_flight_readsLoadingTime=1; 2025-09-25T16:21:35.162526Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:in_flight_readsLoadingTime=8; 2025-09-25T16:21:35.162550Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:tiers_managerLoadingTime=16; 2025-09-25T16:21:35.162570Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:tiers_managerLoadingTime=13; 2025-09-25T16:21:35.162577Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;fline=common_data.cpp:29;EXECUTE:composite_initLoadingTime=21482; 2025-09-25T16:21:35.162635Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: Index: tables 1 inserted {blob_bytes=4824360;raw_bytes=6980626;count=1;records=85000} compacted {blob_bytes=0;raw_bytes=0;count=0;records=0} s-compacted {blob_bytes=100568064;raw_bytes=169935560;count=5;records=1780000} inactive {blob_bytes=426090992;raw_bytes=690829332;count=36;records=7540000} evicted {blob_bytes=0;raw_bytes=0;count=0;records=0} at tablet 9437184 2025-09-25T16:21:35.162687Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:5688:7650];process=SwitchToWork;fline=columnshard.cpp:77;event=initialize_shard;step=SwitchToWork; 2025-09-25T16:21:35.162702Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:5688:7650];process=SwitchToWork;fline=columnshard.cpp:80;event=initialize_shard;step=SignalTabletActive; 2025-09-25T16:21:35.162723Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:5688:7650];process=SwitchToWork;fline=columnshard_impl.cpp:1528;event=OnTieringModified;path_id=NO_VALUE_OPTIONAL; 2025-09-25T16:21:35.162731Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:5688:7650];process=SwitchToWork;fline=column_engine_logs.cpp:516;event=OnTieringModified;new_count_tierings=0; 2025-09-25T16:21:35.162773Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;fline=columnshard_impl.cpp:449;event=EnqueueBackgroundActivities;periodic=0; 2025-09-25T16:21:35.162800Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;fline=columnshard_impl.cpp:943;background=cleanup_schemas;skip_reason=no_changes; 2025-09-25T16:21:35.162810Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;fline=column_engine_logs.cpp:258;event=StartCleanup;portions_count=18; 2025-09-25T16:21:35.162828Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;fline=column_engine_logs.cpp:271;event=StartCleanupStop;snapshot=plan_step=1758815449247;tx_id=18446744073709551615;;current_snapshot_ts=1758817244895; 2025-09-25T16:21:35.162840Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;fline=column_engine_logs.cpp:334;event=StartCleanup;portions_count=18;portions_prepared=0;drop=0;skip=0;portions_counter=0;chunks=0;limit=0;max_portions=1000;max_chunks=500000; 2025-09-25T16:21:35.162854Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;fline=columnshard_impl.cpp:800;background=cleanup;skip_reason=no_changes; 2025-09-25T16:21:35.162861Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;fline=columnshard_impl.cpp:832;background=cleanup;skip_reason=no_changes; 2025-09-25T16:21:35.162891Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;fline=columnshard_impl.cpp:755;background=ttl;skip_reason=no_changes; 2025-09-25T16:21:35.170084Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:5688:7650];ev=NActors::TEvents::TEvWakeup;fline=columnshard.cpp:260;event=TEvPrivate::TEvPeriodicWakeup::MANUAL;tablet_id=9437184; 2025-09-25T16:21:35.170207Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:5688:7650];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;fline=columnshard.cpp:249;event=TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184; 2025-09-25T16:21:35.170215Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: Send periodic stats. 2025-09-25T16:21:35.170220Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: Disabled periodic stats at tablet 9437184 2025-09-25T16:21:35.170229Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:5688:7650];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:449;event=EnqueueBackgroundActivities;periodic=0; 2025-09-25T16:21:35.170268Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:5688:7650];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:943;background=cleanup_schemas;skip_reason=no_changes; 2025-09-25T16:21:35.170277Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:5688:7650];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:258;event=StartCleanup;portions_count=18; 2025-09-25T16:21:35.170294Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:5688:7650];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:271;event=StartCleanupStop;snapshot=plan_step=1758815449247;tx_id=18446744073709551615;;current_snapshot_ts=1758817244895; 2025-09-25T16:21:35.170306Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:5688:7650];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:334;event=StartCleanup;portions_count=18;portions_prepared=0;drop=0;skip=0;portions_counter=0;chunks=0;limit=0;max_portions=1000;max_chunks=500000; 2025-09-25T16:21:35.170318Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:5688:7650];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:800;background=cleanup;skip_reason=no_changes; 2025-09-25T16:21:35.170326Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:5688:7650];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:832;background=cleanup;skip_reason=no_changes; 2025-09-25T16:21:35.170351Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:5688:7650];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;queue=ttl;external_count=0;fline=granule.cpp:168;event=skip_actualization;waiting=0.999000s; 2025-09-25T16:21:35.170362Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:5688:7650];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:755;background=ttl;skip_reason=no_changes; >> TYdbControlPlaneStorageGetResult::ShouldSuccess [GOOD] >> TYdbControlPlaneStorageGetResult::ShouldEmpty >> TYdbControlPlaneStorageCreateBinding::ShouldSucceed >> TYdbControlPlaneStorageCreateConnection::ShouldDisableCurrentIam [GOOD] >> TYdbControlPlaneStorageCreateConnection::ShouldValidate >> TYdbControlPlaneStorageDescribeQueryPermissions::ShouldApplyPermissionEmpty [GOOD] >> TYdbControlPlaneStorageDescribeQueryPermissions::ShouldApplyPermissionViewPublic >> Compression::WriteZSTD [GOOD] >> Compression::WriteWithMixedCodecs >> InMemoryControlPlaneStorage::ExecuteSimpleAnalyticsQuery [GOOD] >> TYdbControlPlaneStorageDescribeConnection::ShouldSuccess [GOOD] >> TYdbControlPlaneStorageDescribeConnection::ShouldCheckPermission >> ShouldNotShowPassword::ShouldNotShowPasswordClickHouse ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/columnshard/ut_rw/unittest >> TColumnShardTestReadWrite::CompactionSplitGranule_PKInt32 [GOOD] Test command err: 2025-09-25T16:20:42.410280Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:129:2160];fline=columnshard.cpp:105;event=initialize_shard;step=OnActivateExecutor; 2025-09-25T16:20:42.416117Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:129:2160];fline=columnshard.cpp:124;event=initialize_shard;step=initialize_tiring_finished; 2025-09-25T16:20:42.416194Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxInitSchema.Execute at tablet 9437184 2025-09-25T16:20:42.417201Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:129:2160];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-09-25T16:20:42.417274Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:129:2160];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-09-25T16:20:42.417328Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:129:2160];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-09-25T16:20:42.417354Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:129:2160];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-09-25T16:20:42.417375Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:129:2160];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-09-25T16:20:42.417398Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:129:2160];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-09-25T16:20:42.417421Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:129:2160];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-09-25T16:20:42.417443Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:129:2160];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-09-25T16:20:42.417465Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:129:2160];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-09-25T16:20:42.417486Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:129:2160];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanDeprecatedSnapshot; 2025-09-25T16:20:42.417509Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:129:2160];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV0ChunksMeta; 2025-09-25T16:20:42.417531Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:129:2160];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CopyBlobIdsToV2; 2025-09-25T16:20:42.417585Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:129:2160];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreAppearanceSnapshot; 2025-09-25T16:20:42.426950Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxInitSchema.Complete at tablet 9437184 2025-09-25T16:20:42.427032Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:131;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=12;current_normalizer=CLASS_NAME=Granules; 2025-09-25T16:20:42.427043Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=1;type=Granules; 2025-09-25T16:20:42.427087Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-09-25T16:20:42.427125Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-09-25T16:20:42.427140Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-09-25T16:20:42.427147Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2025-09-25T16:20:42.427158Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:138;normalizer=TChunksNormalizer;message=0 chunks found; 2025-09-25T16:20:42.427168Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-09-25T16:20:42.427177Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-09-25T16:20:42.427183Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2025-09-25T16:20:42.427206Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-09-25T16:20:42.427217Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-09-25T16:20:42.427226Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-09-25T16:20:42.427231Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2025-09-25T16:20:42.427243Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2025-09-25T16:20:42.427251Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-09-25T16:20:42.427259Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-09-25T16:20:42.427265Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2025-09-25T16:20:42.427275Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-09-25T16:20:42.427286Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-09-25T16:20:42.427291Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2025-09-25T16:20:42.427302Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-09-25T16:20:42.427310Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-09-25T16:20:42.427315Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=15;type=RestoreV1Chunks_V2; 2025-09-25T16:20:42.427348Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-09-25T16:20:42.427358Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-09-25T16:20:42.427363Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=16;type=RestoreV2Chunks; 2025-09-25T16:20:42.427380Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2025-09-25T16:20:42.427388Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanDeprecatedSnapshot;id=CleanDeprecatedSnapshot; 2025-09-25T16:20:42.427393Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=17;type=CleanDeprecatedSnapshot; 2025-09-25T16:20:42.427403Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanDeprecatedSnapshot;id=17; 2025-09-25T16:20:42.427411Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV0ChunksMeta;id=RestoreV0ChunksMeta; 2025-09-25T16:20:42.427416Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=18;type=RestoreV0ChunksMeta; 2025-09-25T16:20:42.427425Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV0ChunksMeta;id=18; 2025-09-25T16:20:42.427434Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CopyBlobIdsToV2;id=CopyBlobIdsToV2; 2025-09-25T16:20:42.427440Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=19;type=CopyBlobIdsToV2; 2025-09-25T16:20:42.427457Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CopyBlobIdsToV2;id=19; 2025-09-25T16:20:42.427467Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLAS ... e=EXECUTE:granule/portions;fline=constructor_portion.cpp:40;memory_size=278;data_size=254;sum=45988;count=168;size_of_portion=184; 2025-09-25T16:21:36.157599Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;load_stage_name=EXECUTE:column_engines/granules;load_stage_name=EXECUTE:granules/granule;fline=common_data.cpp:29;EXECUTE:portionsLoadingTime=2925; 2025-09-25T16:21:36.157608Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;load_stage_name=EXECUTE:column_engines/granules;load_stage_name=EXECUTE:granules/granule;fline=common_data.cpp:29;PRECHARGE:granule_finished_commonLoadingTime=1; 2025-09-25T16:21:36.157692Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;load_stage_name=EXECUTE:column_engines/granules;load_stage_name=EXECUTE:granules/granule;fline=common_data.cpp:29;EXECUTE:granule_finished_commonLoadingTime=77; 2025-09-25T16:21:36.157698Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;load_stage_name=EXECUTE:column_engines/granules;fline=common_data.cpp:29;EXECUTE:granuleLoadingTime=3045; 2025-09-25T16:21:36.157705Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;fline=common_data.cpp:29;EXECUTE:granulesLoadingTime=3061; 2025-09-25T16:21:36.157713Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;fline=common_data.cpp:29;PRECHARGE:finishLoadingTime=1; 2025-09-25T16:21:36.157733Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;fline=common_data.cpp:29;EXECUTE:finishLoadingTime=14; 2025-09-25T16:21:36.157738Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:column_enginesLoadingTime=3161; 2025-09-25T16:21:36.157767Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:tx_controllerLoadingTime=22; 2025-09-25T16:21:36.157784Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:tx_controllerLoadingTime=11; 2025-09-25T16:21:36.157809Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:operations_managerLoadingTime=20; 2025-09-25T16:21:36.157827Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:operations_managerLoadingTime=12; 2025-09-25T16:21:36.159260Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:storages_managerLoadingTime=1419; 2025-09-25T16:21:36.161601Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:storages_managerLoadingTime=2315; 2025-09-25T16:21:36.161626Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:db_locksLoadingTime=2; 2025-09-25T16:21:36.161635Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:db_locksLoadingTime=1; 2025-09-25T16:21:36.161655Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:bg_sessionsLoadingTime=1; 2025-09-25T16:21:36.161672Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:bg_sessionsLoadingTime=11; 2025-09-25T16:21:36.161680Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:sharing_sessionsLoadingTime=1; 2025-09-25T16:21:36.161696Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:sharing_sessionsLoadingTime=10; 2025-09-25T16:21:36.161703Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:in_flight_readsLoadingTime=1; 2025-09-25T16:21:36.161714Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:in_flight_readsLoadingTime=7; 2025-09-25T16:21:36.161734Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:tiers_managerLoadingTime=10; 2025-09-25T16:21:36.161750Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:tiers_managerLoadingTime=10; 2025-09-25T16:21:36.161756Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;fline=common_data.cpp:29;EXECUTE:composite_initLoadingTime=8611; 2025-09-25T16:21:36.161796Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: Index: tables 1 inserted {blob_bytes=4824360;raw_bytes=6980626;count=1;records=85000} compacted {blob_bytes=0;raw_bytes=0;count=0;records=0} s-compacted {blob_bytes=100568064;raw_bytes=169935560;count=5;records=1780000} inactive {blob_bytes=426091016;raw_bytes=690829332;count=36;records=7540000} evicted {blob_bytes=0;raw_bytes=0;count=0;records=0} at tablet 9437184 2025-09-25T16:21:36.161832Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:5688:7650];process=SwitchToWork;fline=columnshard.cpp:77;event=initialize_shard;step=SwitchToWork; 2025-09-25T16:21:36.161845Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:5688:7650];process=SwitchToWork;fline=columnshard.cpp:80;event=initialize_shard;step=SignalTabletActive; 2025-09-25T16:21:36.161864Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:5688:7650];process=SwitchToWork;fline=columnshard_impl.cpp:1528;event=OnTieringModified;path_id=NO_VALUE_OPTIONAL; 2025-09-25T16:21:36.161872Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:5688:7650];process=SwitchToWork;fline=column_engine_logs.cpp:516;event=OnTieringModified;new_count_tierings=0; 2025-09-25T16:21:36.161898Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;fline=columnshard_impl.cpp:449;event=EnqueueBackgroundActivities;periodic=0; 2025-09-25T16:21:36.161923Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;fline=columnshard_impl.cpp:943;background=cleanup_schemas;skip_reason=no_changes; 2025-09-25T16:21:36.161932Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;fline=column_engine_logs.cpp:258;event=StartCleanup;portions_count=18; 2025-09-25T16:21:36.161947Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;fline=column_engine_logs.cpp:271;event=StartCleanupStop;snapshot=plan_step=1758815447877;tx_id=18446744073709551615;;current_snapshot_ts=1758817243526; 2025-09-25T16:21:36.161972Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;fline=column_engine_logs.cpp:334;event=StartCleanup;portions_count=18;portions_prepared=0;drop=0;skip=0;portions_counter=0;chunks=0;limit=0;max_portions=1000;max_chunks=500000; 2025-09-25T16:21:36.161983Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;fline=columnshard_impl.cpp:800;background=cleanup;skip_reason=no_changes; 2025-09-25T16:21:36.162002Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;fline=columnshard_impl.cpp:832;background=cleanup;skip_reason=no_changes; 2025-09-25T16:21:36.162028Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;fline=columnshard_impl.cpp:755;background=ttl;skip_reason=no_changes; 2025-09-25T16:21:36.165493Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:5688:7650];ev=NActors::TEvents::TEvWakeup;fline=columnshard.cpp:260;event=TEvPrivate::TEvPeriodicWakeup::MANUAL;tablet_id=9437184; 2025-09-25T16:21:36.165587Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:5688:7650];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;fline=columnshard.cpp:249;event=TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184; 2025-09-25T16:21:36.165595Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: Send periodic stats. 2025-09-25T16:21:36.165599Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: Disabled periodic stats at tablet 9437184 2025-09-25T16:21:36.165607Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:5688:7650];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:449;event=EnqueueBackgroundActivities;periodic=0; 2025-09-25T16:21:36.165633Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:5688:7650];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:943;background=cleanup_schemas;skip_reason=no_changes; 2025-09-25T16:21:36.165640Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:5688:7650];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:258;event=StartCleanup;portions_count=18; 2025-09-25T16:21:36.165654Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:5688:7650];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:271;event=StartCleanupStop;snapshot=plan_step=1758815447877;tx_id=18446744073709551615;;current_snapshot_ts=1758817243526; 2025-09-25T16:21:36.165663Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:5688:7650];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:334;event=StartCleanup;portions_count=18;portions_prepared=0;drop=0;skip=0;portions_counter=0;chunks=0;limit=0;max_portions=1000;max_chunks=500000; 2025-09-25T16:21:36.165673Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:5688:7650];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:800;background=cleanup;skip_reason=no_changes; 2025-09-25T16:21:36.165679Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:5688:7650];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:832;background=cleanup;skip_reason=no_changes; 2025-09-25T16:21:36.165698Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:5688:7650];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;queue=ttl;external_count=0;fline=granule.cpp:168;event=skip_actualization;waiting=0.999000s; 2025-09-25T16:21:36.165708Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:5688:7650];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:755;background=ttl;skip_reason=no_changes; >> BasicUsage::WriteAndReadSomeMessagesWithNoCompression [GOOD] >> BasicUsage::TWriteSession_WriteAndReadAndCommitRandomMessages >> PersQueueSdkReadSessionTest::SpecifyClustersExplicitly [GOOD] >> PersQueueSdkReadSessionTest::StopResumeReadingData >> TYdbControlPlaneStorageDeleteBindingPermissions::ShouldApplyPermissionViewPublic [GOOD] >> TYdbControlPlaneStorageDeleteConnection::ShouldSuccess |82.9%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/library/ncloud/impl/ut/ydb-library-ncloud-impl-ut |82.9%| [LD] {RESULT} $(B)/ydb/library/ncloud/impl/ut/ydb-library-ncloud-impl-ut |82.9%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/library/ncloud/impl/ut/ydb-library-ncloud-impl-ut >> TYdbControlPlaneStorageCreateConnection::ShouldValidate [GOOD] >> TYdbControlPlaneStorageCreateConnection::ShouldCheckUniqueName >> TYdbControlPlaneStorageModifyBinding::ShouleCheckObjectStorageProjectionByColumns [GOOD] >> TYdbControlPlaneStorageModifyBindingPermissions::ShouldApplyPermissionEmpty ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/columnshard/ut_rw/unittest >> TColumnShardTestReadWrite::CompactionSplitGranule_PKInt64 [GOOD] Test command err: 2025-09-25T16:20:43.175175Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:130:2160];fline=columnshard.cpp:105;event=initialize_shard;step=OnActivateExecutor; 2025-09-25T16:20:43.181123Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:130:2160];fline=columnshard.cpp:124;event=initialize_shard;step=initialize_tiring_finished; 2025-09-25T16:20:43.181190Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxInitSchema.Execute at tablet 9437184 2025-09-25T16:20:43.182093Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:130:2160];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-09-25T16:20:43.182157Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:130:2160];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-09-25T16:20:43.182196Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:130:2160];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-09-25T16:20:43.182220Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:130:2160];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-09-25T16:20:43.182240Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:130:2160];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-09-25T16:20:43.182263Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:130:2160];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-09-25T16:20:43.182284Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:130:2160];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-09-25T16:20:43.182305Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:130:2160];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-09-25T16:20:43.182326Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:130:2160];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-09-25T16:20:43.182345Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:130:2160];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanDeprecatedSnapshot; 2025-09-25T16:20:43.182366Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:130:2160];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV0ChunksMeta; 2025-09-25T16:20:43.182386Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:130:2160];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CopyBlobIdsToV2; 2025-09-25T16:20:43.182432Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:130:2160];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreAppearanceSnapshot; 2025-09-25T16:20:43.190689Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxInitSchema.Complete at tablet 9437184 2025-09-25T16:20:43.190751Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:131;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=12;current_normalizer=CLASS_NAME=Granules; 2025-09-25T16:20:43.190765Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=1;type=Granules; 2025-09-25T16:20:43.190818Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-09-25T16:20:43.190861Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-09-25T16:20:43.190877Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-09-25T16:20:43.190884Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2025-09-25T16:20:43.190896Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:138;normalizer=TChunksNormalizer;message=0 chunks found; 2025-09-25T16:20:43.190907Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-09-25T16:20:43.190916Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-09-25T16:20:43.190921Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2025-09-25T16:20:43.190944Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-09-25T16:20:43.190954Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-09-25T16:20:43.190962Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-09-25T16:20:43.190968Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2025-09-25T16:20:43.190980Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2025-09-25T16:20:43.190989Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-09-25T16:20:43.190998Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-09-25T16:20:43.191002Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2025-09-25T16:20:43.191013Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-09-25T16:20:43.191022Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-09-25T16:20:43.191027Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2025-09-25T16:20:43.191046Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-09-25T16:20:43.191052Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-09-25T16:20:43.191055Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=15;type=RestoreV1Chunks_V2; 2025-09-25T16:20:43.191074Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-09-25T16:20:43.191087Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-09-25T16:20:43.191091Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=16;type=RestoreV2Chunks; 2025-09-25T16:20:43.191101Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2025-09-25T16:20:43.191107Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanDeprecatedSnapshot;id=CleanDeprecatedSnapshot; 2025-09-25T16:20:43.191110Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=17;type=CleanDeprecatedSnapshot; 2025-09-25T16:20:43.191115Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanDeprecatedSnapshot;id=17; 2025-09-25T16:20:43.191121Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV0ChunksMeta;id=RestoreV0ChunksMeta; 2025-09-25T16:20:43.191125Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=18;type=RestoreV0ChunksMeta; 2025-09-25T16:20:43.191133Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV0ChunksMeta;id=18; 2025-09-25T16:20:43.191143Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CopyBlobIdsToV2;id=CopyBlobIdsToV2; 2025-09-25T16:20:43.191149Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=19;type=CopyBlobIdsToV2; 2025-09-25T16:20:43.191164Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CopyBlobIdsToV2;id=19; 2025-09-25T16:20:43.191173Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLAS ... e=EXECUTE:granule/portions;fline=constructor_portion.cpp:40;memory_size=278;data_size=270;sum=46656;count=168;size_of_portion=184; 2025-09-25T16:21:36.603226Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;load_stage_name=EXECUTE:column_engines/granules;load_stage_name=EXECUTE:granules/granule;fline=common_data.cpp:29;EXECUTE:portionsLoadingTime=2890; 2025-09-25T16:21:36.603237Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;load_stage_name=EXECUTE:column_engines/granules;load_stage_name=EXECUTE:granules/granule;fline=common_data.cpp:29;PRECHARGE:granule_finished_commonLoadingTime=2; 2025-09-25T16:21:36.603327Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;load_stage_name=EXECUTE:column_engines/granules;load_stage_name=EXECUTE:granules/granule;fline=common_data.cpp:29;EXECUTE:granule_finished_commonLoadingTime=82; 2025-09-25T16:21:36.603334Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;load_stage_name=EXECUTE:column_engines/granules;fline=common_data.cpp:29;EXECUTE:granuleLoadingTime=3016; 2025-09-25T16:21:36.603342Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;fline=common_data.cpp:29;EXECUTE:granulesLoadingTime=3032; 2025-09-25T16:21:36.603351Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;fline=common_data.cpp:29;PRECHARGE:finishLoadingTime=2; 2025-09-25T16:21:36.603374Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;fline=common_data.cpp:29;EXECUTE:finishLoadingTime=17; 2025-09-25T16:21:36.603380Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:column_enginesLoadingTime=3128; 2025-09-25T16:21:36.603412Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:tx_controllerLoadingTime=24; 2025-09-25T16:21:36.603433Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:tx_controllerLoadingTime=14; 2025-09-25T16:21:36.603463Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:operations_managerLoadingTime=23; 2025-09-25T16:21:36.603483Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:operations_managerLoadingTime=13; 2025-09-25T16:21:36.605270Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:storages_managerLoadingTime=1774; 2025-09-25T16:21:36.607251Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:storages_managerLoadingTime=1951; 2025-09-25T16:21:36.607280Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:db_locksLoadingTime=2; 2025-09-25T16:21:36.607289Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:db_locksLoadingTime=2; 2025-09-25T16:21:36.607298Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:bg_sessionsLoadingTime=1; 2025-09-25T16:21:36.607316Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:bg_sessionsLoadingTime=12; 2025-09-25T16:21:36.607324Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:sharing_sessionsLoadingTime=1; 2025-09-25T16:21:36.607343Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:sharing_sessionsLoadingTime=12; 2025-09-25T16:21:36.607350Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:in_flight_readsLoadingTime=1; 2025-09-25T16:21:36.607363Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:in_flight_readsLoadingTime=6; 2025-09-25T16:21:36.607385Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:tiers_managerLoadingTime=11; 2025-09-25T16:21:36.607403Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:tiers_managerLoadingTime=11; 2025-09-25T16:21:36.607410Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;fline=common_data.cpp:29;EXECUTE:composite_initLoadingTime=8393; 2025-09-25T16:21:36.607456Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: Index: tables 1 inserted {blob_bytes=4825976;raw_bytes=7660626;count=1;records=85000} compacted {blob_bytes=0;raw_bytes=0;count=0;records=0} s-compacted {blob_bytes=100602600;raw_bytes=184175560;count=5;records=1780000} inactive {blob_bytes=426237224;raw_bytes=751149332;count=36;records=7540000} evicted {blob_bytes=0;raw_bytes=0;count=0;records=0} at tablet 9437184 2025-09-25T16:21:36.607601Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:5709:7671];process=SwitchToWork;fline=columnshard.cpp:77;event=initialize_shard;step=SwitchToWork; 2025-09-25T16:21:36.607618Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:5709:7671];process=SwitchToWork;fline=columnshard.cpp:80;event=initialize_shard;step=SignalTabletActive; 2025-09-25T16:21:36.607637Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:5709:7671];process=SwitchToWork;fline=columnshard_impl.cpp:1528;event=OnTieringModified;path_id=NO_VALUE_OPTIONAL; 2025-09-25T16:21:36.607647Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:5709:7671];process=SwitchToWork;fline=column_engine_logs.cpp:516;event=OnTieringModified;new_count_tierings=0; 2025-09-25T16:21:36.607676Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;fline=columnshard_impl.cpp:449;event=EnqueueBackgroundActivities;periodic=0; 2025-09-25T16:21:36.607702Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;fline=columnshard_impl.cpp:943;background=cleanup_schemas;skip_reason=no_changes; 2025-09-25T16:21:36.607711Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;fline=column_engine_logs.cpp:258;event=StartCleanup;portions_count=18; 2025-09-25T16:21:36.607728Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;fline=column_engine_logs.cpp:271;event=StartCleanupStop;snapshot=plan_step=1758815448646;tx_id=18446744073709551615;;current_snapshot_ts=1758817244284; 2025-09-25T16:21:36.607739Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;fline=column_engine_logs.cpp:334;event=StartCleanup;portions_count=18;portions_prepared=0;drop=0;skip=0;portions_counter=0;chunks=0;limit=0;max_portions=1000;max_chunks=500000; 2025-09-25T16:21:36.607750Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;fline=columnshard_impl.cpp:800;background=cleanup;skip_reason=no_changes; 2025-09-25T16:21:36.607756Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;fline=columnshard_impl.cpp:832;background=cleanup;skip_reason=no_changes; 2025-09-25T16:21:36.607781Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;fline=columnshard_impl.cpp:755;background=ttl;skip_reason=no_changes; 2025-09-25T16:21:36.610592Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:5709:7671];ev=NActors::TEvents::TEvWakeup;fline=columnshard.cpp:260;event=TEvPrivate::TEvPeriodicWakeup::MANUAL;tablet_id=9437184; 2025-09-25T16:21:36.610977Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:5709:7671];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;fline=columnshard.cpp:249;event=TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184; 2025-09-25T16:21:36.610992Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: Send periodic stats. 2025-09-25T16:21:36.611001Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: Disabled periodic stats at tablet 9437184 2025-09-25T16:21:36.611008Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:5709:7671];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:449;event=EnqueueBackgroundActivities;periodic=0; 2025-09-25T16:21:36.611040Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:5709:7671];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:943;background=cleanup_schemas;skip_reason=no_changes; 2025-09-25T16:21:36.611048Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:5709:7671];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:258;event=StartCleanup;portions_count=18; 2025-09-25T16:21:36.611063Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:5709:7671];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:271;event=StartCleanupStop;snapshot=plan_step=1758815448646;tx_id=18446744073709551615;;current_snapshot_ts=1758817244284; 2025-09-25T16:21:36.611074Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:5709:7671];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:334;event=StartCleanup;portions_count=18;portions_prepared=0;drop=0;skip=0;portions_counter=0;chunks=0;limit=0;max_portions=1000;max_chunks=500000; 2025-09-25T16:21:36.611086Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:5709:7671];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:800;background=cleanup;skip_reason=no_changes; 2025-09-25T16:21:36.611092Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:5709:7671];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:832;background=cleanup;skip_reason=no_changes; 2025-09-25T16:21:36.611114Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:5709:7671];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;queue=ttl;external_count=0;fline=granule.cpp:168;event=skip_actualization;waiting=0.999000s; 2025-09-25T16:21:36.611125Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:5709:7671];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:755;background=ttl;skip_reason=no_changes; >> TYdbControlPlaneStoragePipeline::ShouldCheckSimplePipeline [GOOD] >> TYdbControlPlaneStoragePipeline::ShouldIncrementGeneration >> TYdbControlPlaneStorageDescribeQueryPermissions::ShouldApplyPermissionViewPublic [GOOD] >> TYdbControlPlaneStorageDescribeQueryPermissions::ShouldApplyPermissionViewPrivate >> TYdbControlPlaneStorageDescribeConnection::ShouldCheckPermission [GOOD] >> TYdbControlPlaneStorageDescribeConnection::ShouldCheckExist >> ShouldNotShowPassword::ShouldNotShowPasswordClickHouse [GOOD] >> ShouldNotShowPassword::ShouldNotShowPasswordPostgreSQL >> TYdbControlPlaneStorageCreateConnection::ShouldCheckUniqueName [GOOD] >> TYdbControlPlaneStorageCreateConnectionPermissions::ShouldApplyPermissionManagePublicSuccess >> TYdbControlPlaneStorageGetResult::ShouldEmpty [GOOD] >> TYdbControlPlaneStorageGetResultDataPermissions::ShouldApplyPermissionEmpty >> TYdbControlPlaneStorageListBindings::ShouldFilterByName [GOOD] >> TYdbControlPlaneStorageListBindings::ShouldFilterByMe >> TYdbControlPlaneStorageDescribeQueryPermissions::ShouldApplyPermissionViewPrivate [GOOD] >> TYdbControlPlaneStorageDescribeQueryPermissions::ShouldApplyPermissionViewPrivatePublic >> TYdbControlPlaneStoragePipeline::ShouldIncrementGeneration [GOOD] >> TYdbControlPlaneStoragePipeline::ShouldCheckStopModifyRun >> TYdbControlPlaneStorageDeleteConnection::ShouldSuccess [GOOD] >> TYdbControlPlaneStorageDeleteConnection::ShouldCheckPermission >> TYdbControlPlaneStorageDescribeConnection::ShouldCheckExist [GOOD] >> TYdbControlPlaneStorageDescribeConnection::ShouldValidate >> TYdbControlPlaneStorageDeleteQuery::ShouldSuccess >> TYdbControlPlaneStorageCreateBinding::ShouldSucceed [GOOD] >> TYdbControlPlaneStorageCreateBinding::ShouldCheckMaxLengthName >> TYdbControlPlaneStorageCreateConnectionPermissions::ShouldApplyPermissionManagePublicSuccess [GOOD] >> TYdbControlPlaneStorageCreateConnectionPermissions::ShouldApplyPermissionManagePublicFailed >> TYdbControlPlaneStorageCreateQuery::ShouldSucccess >> ShouldNotShowPassword::ShouldNotShowPasswordPostgreSQL [GOOD] >> TYdbControlPlaneStorageControlQuery::ShouldSucccess >> TYdbControlPlaneStorageDescribeQueryPermissions::ShouldApplyPermissionViewPrivatePublic [GOOD] >> TYdbControlPlaneStorageDescribeQueryPermissions::ShouldApplyPermissionViewAst >> TYdbControlPlaneStorageListConnections::ShouldSuccess >> TColumnShardTestReadWrite::ReadGroupBy-SimpleReader [GOOD] >> TYdbControlPlaneStorageCreateConnectionPermissions::ShouldApplyPermissionManagePublicFailed [GOOD] >> TYdbControlPlaneStorageCreateQuery::ShouldCheckIdempotencyKey >> TYdbControlPlaneStorageDescribeConnection::ShouldValidate [GOOD] >> TYdbControlPlaneStorageDescribeConnection::ShouldCheckSuperUser >> TYdbControlPlaneStorageDeleteConnection::ShouldCheckPermission [GOOD] >> TYdbControlPlaneStorageDeleteConnection::ShouldCheckExist >> TYdbControlPlaneStorageCreateQuery::ShouldSucccess [GOOD] >> TYdbControlPlaneStorageCreateQuery::ShouldValidate >> TYdbControlPlaneStorageDeleteQuery::ShouldSuccess [GOOD] >> TYdbControlPlaneStorageDeleteQuery::ShouldValidate >> TYdbControlPlaneStorageCreateQuery::ShouldCheckIdempotencyKey [GOOD] >> TYdbControlPlaneStorageCreateQuery::ShouldCreateJob |82.9%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/security/ldap_auth_provider/ut/ydb-core-security-ldap_auth_provider-ut |82.9%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/mind/bscontroller/ut/ydb-core-mind-bscontroller-ut |82.9%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/security/ldap_auth_provider/ut/ydb-core-security-ldap_auth_provider-ut |82.9%| [LD] {RESULT} $(B)/ydb/core/security/ldap_auth_provider/ut/ydb-core-security-ldap_auth_provider-ut |82.9%| [LD] {RESULT} $(B)/ydb/core/mind/bscontroller/ut/ydb-core-mind-bscontroller-ut |82.9%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/mind/bscontroller/ut/ydb-core-mind-bscontroller-ut >> TYdbControlPlaneStorageListBindings::ShouldFilterByMe [GOOD] >> TYdbControlPlaneStorageListBindings::ShouldPageToken >> TYdbControlPlaneStoragePipeline::ShouldCheckStopModifyRun [GOOD] >> TYdbControlPlaneStoragePipeline::ShouldCheckJobMeta >> TYdbControlPlaneStorageModifyBindingPermissions::ShouldApplyPermissionEmpty [GOOD] >> TYdbControlPlaneStorageModifyBindingPermissions::ShouldApplyPermissionViewPublic >> TYdbControlPlaneStorageCreateBinding::ShouldCheckMaxLengthName [GOOD] >> TYdbControlPlaneStorageCreateBinding::ShouldCheckMultipleDotsName >> TYdbControlPlaneStorageCreateQuery::ShouldValidate [GOOD] >> TYdbControlPlaneStorageCreateQueryPermissions::ShouldApplyPermissionManagePublicSuccess >> TopicAutoscaling::PartitionSplit_PQv1 [GOOD] >> TopicAutoscaling::PartitionSplit_AutoscaleAwareSDK >> TYdbControlPlaneStorageDescribeConnection::ShouldCheckSuperUser [GOOD] >> TYdbControlPlaneStorageDescribeConnection::ShouldNotShowClickHousePassword >> TYdbControlPlaneStorageDeleteQuery::ShouldValidate [GOOD] >> TYdbControlPlaneStorageDeleteQuery::ShouldCheckSuperUser >> TYdbControlPlaneStorageDescribeQueryPermissions::ShouldApplyPermissionViewAst [GOOD] >> TYdbControlPlaneStorageDescribeQueryPermissions::ShouldNotApplyPermissionViewAstAndViewQueryText >> TYdbControlPlaneStorageControlQuery::ShouldSucccess [GOOD] >> TYdbControlPlaneStorageControlQuery::ShouldValidate >> TYdbControlPlaneStorageDeleteConnection::ShouldCheckExist [GOOD] >> TYdbControlPlaneStorageDeleteConnection::ShouldValidate >> TBsProxyFaultToleranceTest::CheckGetHardenedErasureMirror3dcCount6Idx4 [GOOD] >> TYdbControlPlaneStorageCreateQuery::ShouldCreateJob [GOOD] >> TYdbControlPlaneStorageCreateQuery::ShouldCheckListJobs >> TYdbControlPlaneStorageCreateQueryPermissions::ShouldApplyPermissionManagePublicSuccess [GOOD] >> TYdbControlPlaneStorageCreateQueryPermissions::ShouldApplyPermissionManagePublicFailed >> TYdbControlPlaneStorageListConnections::ShouldSuccess [GOOD] >> TYdbControlPlaneStorageListConnections::ShouldPageToken >> BasicUsage::TWriteSession_WriteAndReadAndCommitRandomMessages [GOOD] >> BasicUsage::TWriteSession_WriteAndReadAndCommitRandomMessagesNoClusterDiscovery >> Compression::WriteWithMixedCodecs [GOOD] >> PersQueueSdkReadSessionTest::ReadSessionWithAbort ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/dsproxy/ut_ftol/unittest >> TBsProxyFaultToleranceTest::CheckGetHardenedErasureMirror3dcCount6Idx4 [GOOD] Test command err: iteration# 4 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 10 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 16 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 22 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 28 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 34 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 40 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 46 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 52 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 58 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 64 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 70 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 76 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 82 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 88 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 94 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 100 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 106 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 112 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 118 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 124 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 130 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 136 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 142 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 148 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 154 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 160 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 166 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 172 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 178 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 184 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 190 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 196 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 202 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 208 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 214 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 220 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 226 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 232 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 238 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 244 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 250 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 256 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 262 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 268 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 274 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 280 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 286 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 292 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 298 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 304 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 310 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 316 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 322 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 328 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 334 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 340 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 346 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 352 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 358 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 364 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 370 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 376 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 382 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 388 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 394 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 400 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 406 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 412 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 418 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 424 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 430 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 436 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 442 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 448 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 454 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 460 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 466 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 472 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 478 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 484 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 >> TYdbControlPlaneStorageModifyQuery::ShouldSuccess >> TYdbControlPlaneStorageDescribeConnection::ShouldNotShowClickHousePassword [GOOD] >> TYdbControlPlaneStorageDescribeConnectionPermissions::ShouldApplyPermissionEmpty |82.9%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/tx/datashard/ut_read_iterator/ydb-core-tx-datashard-ut_read_iterator |82.9%| [LD] {RESULT} $(B)/ydb/core/tx/datashard/ut_read_iterator/ydb-core-tx-datashard-ut_read_iterator |82.9%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/datashard/ut_read_iterator/ydb-core-tx-datashard-ut_read_iterator >> TYdbControlPlaneStoragePipeline::ShouldSkipBindingIfDisabledConnection |82.9%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/tx/schemeshard/ut_sequence/ydb-core-tx-schemeshard-ut_sequence |82.9%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_sequence/ydb-core-tx-schemeshard-ut_sequence |82.9%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_sequence/ydb-core-tx-schemeshard-ut_sequence >> TYdbControlPlaneStorageControlQuery::ShouldValidate [GOOD] >> TYdbControlPlaneStorageControlQuery::ShouldCheckIdempotencyKey >> TYdbControlPlaneStorageDeleteQuery::ShouldCheckSuperUser [GOOD] >> TYdbControlPlaneStorageDeleteQuery::ShouldCheckPreviousRevisionSuccess >> PersQueueSdkReadSessionTest::StopResumeReadingData [GOOD] >> ReadSessionImplTest::CreatePartitionStream [GOOD] >> ReadSessionImplTest::BrokenCompressedData >> TYdbControlPlaneStorageCreateQueryPermissions::ShouldApplyPermissionManagePublicFailed [GOOD] >> TYdbControlPlaneStorageCreateQueryPermissions::ShouldApplyPermissionQueryInvokeSuccess >> ReadSessionImplTest::BrokenCompressedData [GOOD] >> ReadSessionImplTest::CommitOffsetTwiceIsError [GOOD] >> ReadSessionImplTest::CommonHandler [GOOD] >> TYdbControlPlaneStorageDeleteConnection::ShouldValidate [GOOD] >> TYdbControlPlaneStorageDeleteConnection::ShouldCheckSuperUser >> TYdbControlPlaneStorageCreateQuery::ShouldCheckListJobs [GOOD] >> TYdbControlPlaneStorageCreateQuery::ShouldListJobsByQuery >> TYdbControlPlaneStorageGetResultDataPermissions::ShouldApplyPermissionEmpty [GOOD] >> TYdbControlPlaneStorageGetResultDataPermissions::ShouldApplyPermissionViewPublic >> TYdbControlPlaneStorageCreateBinding::ShouldCheckMultipleDotsName [GOOD] >> TYdbControlPlaneStorageCreateBinding::ShouldCheckNotAvailable >> TYdbControlPlaneStorageListQueries::ShouldSuccess ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/public/sdk/cpp/src/client/persqueue_public/ut/unittest >> ReadSessionImplTest::CommonHandler [GOOD] Test command err: 2025-09-25T16:21:11.552865Z :ReadSession INFO: Random seed for debugging is 1758817271552855 2025-09-25T16:21:12.015043Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7554062665300132754:2159];send_to=[0:7307199536658146131:7762515]; 2025-09-25T16:21:12.021972Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-09-25T16:21:12.022616Z node 1 :PQ_READ_PROXY DEBUG: caching_service.cpp:44: Direct read cache: : Created 2025-09-25T16:21:12.022959Z node 2 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7554062664167506856:2084];send_to=[0:7307199536658146131:7762515]; 2025-09-25T16:21:12.022984Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/endf/002919/r3tmp/tmpKlD1yr/pdisk_1.dat 2025-09-25T16:21:12.029391Z node 2 :PQ_READ_PROXY DEBUG: caching_service.cpp:44: Direct read cache: : Created 2025-09-25T16:21:12.061037Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-09-25T16:21:12.072750Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-09-25T16:21:12.086635Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-09-25T16:21:12.086666Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-09-25T16:21:12.090991Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded 2025-09-25T16:21:12.097355Z node 1 :HIVE WARN: hive_impl.cpp:811: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2025-09-25T16:21:12.097673Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-09-25T16:21:12.105194Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-09-25T16:21:12.105216Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-09-25T16:21:12.113313Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 20597, node 1 2025-09-25T16:21:12.156910Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: /home/runner/.ya/build/build_root/endf/002919/r3tmp/yandexizoC8l.tmp 2025-09-25T16:21:12.156920Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: /home/runner/.ya/build/build_root/endf/002919/r3tmp/yandexizoC8l.tmp 2025-09-25T16:21:12.173041Z INFO: TTestServer started on Port 29348 GrpcPort 20597 2025-09-25T16:21:12.196634Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:202: successfully initialized from file: /home/runner/.ya/build/build_root/endf/002919/r3tmp/yandexizoC8l.tmp 2025-09-25T16:21:12.196706Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:29348 PQClient connected to localhost:20597 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: 2025-09-25T16:21:12.229012Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-09-25T16:21:12.234043Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976720657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2025-09-25T16:21:12.237475Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions waiting... waiting... waiting... waiting... 2025-09-25T16:21:12.476869Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 281474976720660, at schemeshard: 72057594046644480 2025-09-25T16:21:12.723160Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7554062664167507154:2297], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:21:12.723178Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7554062664167507142:2294], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:21:12.723193Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:21:12.721442Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7554062665300133579:2323], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:21:12.721470Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:21:12.721621Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7554062665300133589:2325], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:21:12.721632Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:21:12.721708Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7554062665300133593:2327], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:21:12.722625Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976720661:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-09-25T16:21:12.729770Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7554062664167507159:2299], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:21:12.729801Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:21:12.741768Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7554062665300133595:2328], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976720661 completed, doublechecking } 2025-09-25T16:21:12.743156Z node 2 :TX_PROXY ERROR: schemereq.cpp:590: Actor# [2:7554062664167507160:2128] txid# 281474976715657, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 8], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-09-25T16:21:12.821579Z node 1 :TX_PROXY ERROR: schemereq.cpp:590: Actor# [1:7554062665300133686:2668] txid# 281474976720662, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 8], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-09-25T16:21:12.829426Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:21:12.828428Z node 2 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:568: Compilation failed, self: [2:7554062664167507190:2302], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2025-09-25T16:21:12.828909Z node 2 :KQP_SESSION WARN: kqp_session_actor.cpp:2395: SessionId: ydb://session/3?no ... c286-68ddf15d] [dc1] The application data is transferred to the client. Number of messages 1, size 8 bytes DataReceived { PartitionStreamId: 1 PartitionId: 0 Message { Data: ..8 bytes.. Partition stream id: 1 Cluster: "dc1". Topic: "test-topic" Partition: 0 PartitionKey: "" Information: { Offset: 2 SeqNo: 3 MessageGroupId: "test-message-group-id" CreateTime: 2025-09-25T16:21:46.991000Z WriteTime: 2025-09-25T16:21:46.992000Z Ip: "ipv6:[::1]:48316" UncompressedSize: 8 Meta: { "logtype": "unknown", "ident": "unknown", "server": "ipv6:[::1]:48316" } } } } 2025-09-25T16:21:48.124579Z :INFO: [/Root] [/Root] [82986e91-37a77be9-4191c286-68ddf15d] Closing read session. Close timeout: 3.000000s 2025-09-25T16:21:48.124586Z :INFO: [/Root] Read/commit by partition streams (cluster:topic:partition:stream-id:read-offset:committed-offset): dc1:test-topic:0:1:2:2 2025-09-25T16:21:48.124592Z :INFO: [/Root] [/Root] [82986e91-37a77be9-4191c286-68ddf15d] Counters: { Errors: 0 CurrentSessionLifetimeMs: 1306 BytesRead: 24 MessagesRead: 3 BytesReadCompressed: 84 BytesInflightUncompressed: 0 BytesInflightCompressed: 0 BytesInflightTotal: 0 MessagesInflight: 0 } 2025-09-25T16:21:48.124713Z :INFO: [/Root] [/Root] [82986e91-37a77be9-4191c286-68ddf15d] Closing read session. Close timeout: 0.000000s 2025-09-25T16:21:48.124718Z :INFO: [/Root] Read/commit by partition streams (cluster:topic:partition:stream-id:read-offset:committed-offset): dc1:test-topic:0:1:2:2 2025-09-25T16:21:48.124721Z :INFO: [/Root] [/Root] [82986e91-37a77be9-4191c286-68ddf15d] Counters: { Errors: 0 CurrentSessionLifetimeMs: 1307 BytesRead: 24 MessagesRead: 3 BytesReadCompressed: 84 BytesInflightUncompressed: 0 BytesInflightCompressed: 0 BytesInflightTotal: 0 MessagesInflight: 0 } 2025-09-25T16:21:48.124736Z :NOTICE: [/Root] [/Root] [82986e91-37a77be9-4191c286-68ddf15d] Aborting read session. Description: SessionClosed { Status: ABORTED Issues: "
: Error: Aborted " } 2025-09-25T16:21:48.125508Z node 7 :PQ_READ_PROXY DEBUG: read_session_actor.cpp:122: session cookie 1 consumer shared/user session shared/user_7_1_1565500501041017239_v1 grpc read done: success# 1, data# { read { } } 2025-09-25T16:21:48.125566Z node 7 :PQ_READ_PROXY DEBUG: read_session_actor.cpp:1839: session cookie 1 consumer shared/user session shared/user_7_1_1565500501041017239_v1 got read request: guid# 5b46c8c2-9d4d619f-149a7aa4-1a179137 2025-09-25T16:21:48.129020Z node 7 :PQ_READ_PROXY INFO: read_session_actor.cpp:92: session cookie 1 consumer shared/user session shared/user_7_1_1565500501041017239_v1 grpc closed 2025-09-25T16:21:48.129042Z node 7 :PQ_READ_PROXY INFO: read_session_actor.cpp:383: session cookie 1 consumer shared/user session shared/user_7_1_1565500501041017239_v1 is DEAD 2025-09-25T16:21:48.129584Z node 7 :PERSQUEUE DEBUG: pq_impl.cpp:2254: [PQ: 72075186224037892] Destroy direct read session shared/user_7_1_1565500501041017239_v1 2025-09-25T16:21:48.129594Z node 7 :PERSQUEUE DEBUG: pq_impl.cpp:2743: [PQ: 72075186224037892] server disconnected, pipe [7:7554062811587410794:2519] destroyed 2025-09-25T16:21:48.129612Z node 7 :PQ_READ_PROXY DEBUG: caching_service.cpp:139: Direct read cache: server session deregistered: shared/user_7_1_1565500501041017239_v1 2025-09-25T16:21:48.131679Z node 8 :PERSQUEUE_READ_BALANCER INFO: read_balancer__balancing.cpp:1665: [72075186224037893][rt3.dc1--test-topic] pipe [7:7554062811587410792:2516] disconnected; active server actors: 1 2025-09-25T16:21:48.131692Z node 8 :PERSQUEUE_READ_BALANCER NOTICE: read_balancer__balancing.cpp:1674: [72075186224037893][rt3.dc1--test-topic] pipe [7:7554062811587410792:2516] client user disconnected session shared/user_7_1_1565500501041017239_v1 2025-09-25T16:21:48.184117Z node 7 :PERSQUEUE DEBUG: partition.cpp:2261: [72075186224037892][Partition][0][StateIdle] Batching state before ContinueProcessTxsAndUserActs: 0 2025-09-25T16:21:48.184135Z node 7 :PERSQUEUE DEBUG: partition.cpp:2270: [72075186224037892][Partition][0][StateIdle] Batching state after ContinueProcessTxsAndUserActs: 1 2025-09-25T16:21:48.184138Z node 7 :PERSQUEUE DEBUG: partition.cpp:2293: [72075186224037892][Partition][0][StateIdle] Try persist 2025-09-25T16:21:48.288959Z node 7 :PERSQUEUE DEBUG: partition.cpp:2261: [72075186224037892][Partition][0][StateIdle] Batching state before ContinueProcessTxsAndUserActs: 0 2025-09-25T16:21:48.288984Z node 7 :PERSQUEUE DEBUG: partition.cpp:2270: [72075186224037892][Partition][0][StateIdle] Batching state after ContinueProcessTxsAndUserActs: 1 2025-09-25T16:21:48.288987Z node 7 :PERSQUEUE DEBUG: partition.cpp:2293: [72075186224037892][Partition][0][StateIdle] Try persist 2025-09-25T16:21:48.389378Z node 7 :PERSQUEUE DEBUG: partition.cpp:2261: [72075186224037892][Partition][0][StateIdle] Batching state before ContinueProcessTxsAndUserActs: 0 2025-09-25T16:21:48.389404Z node 7 :PERSQUEUE DEBUG: partition.cpp:2270: [72075186224037892][Partition][0][StateIdle] Batching state after ContinueProcessTxsAndUserActs: 1 2025-09-25T16:21:48.389407Z node 7 :PERSQUEUE DEBUG: partition.cpp:2293: [72075186224037892][Partition][0][StateIdle] Try persist 2025-09-25T16:21:48.489659Z node 7 :PERSQUEUE DEBUG: partition.cpp:2261: [72075186224037892][Partition][0][StateIdle] Batching state before ContinueProcessTxsAndUserActs: 0 2025-09-25T16:21:48.489671Z node 7 :PERSQUEUE DEBUG: partition.cpp:2270: [72075186224037892][Partition][0][StateIdle] Batching state after ContinueProcessTxsAndUserActs: 1 2025-09-25T16:21:48.489673Z node 7 :PERSQUEUE DEBUG: partition.cpp:2293: [72075186224037892][Partition][0][StateIdle] Try persist 2025-09-25T16:21:48.867552Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-09-25T16:21:48.867559Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-09-25T16:21:48.867564Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2025-09-25T16:21:48.868479Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2025-09-25T16:21:48.868586Z :INFO: [db] [sessionid] [cluster] Server session id: 123-session-id-321 2025-09-25T16:21:48.868661Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-09-25T16:21:48.868798Z :INFO: [db] [sessionid] [cluster] Confirm partition stream create. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1. Read offset: 13. Commit offset: 31 2025-09-25T16:21:48.869241Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-09-25T16:21:48.869247Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-09-25T16:21:48.869251Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2025-09-25T16:21:48.869318Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2025-09-25T16:21:48.869438Z :INFO: [db] [sessionid] [cluster] Server session id: 123-session-id-321 2025-09-25T16:21:48.869484Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-09-25T16:21:48.869547Z :INFO: [db] [sessionid] [cluster] Confirm partition stream create. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1. Read offset: (NULL) 2025-09-25T16:21:48.869757Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 Post function 2025-09-25T16:21:48.869934Z :INFO: Error decompressing data: (TZLibDecompressorError) util/stream/zlib.cpp:143: inflate error(incorrect header check) 2025-09-25T16:21:48.869948Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 1 (1-3) 2025-09-25T16:21:48.869977Z :DEBUG: Take Data. Partition 1. Read: {0, 0} (1-1) 2025-09-25T16:21:48.869983Z :DEBUG: Take Data. Partition 1. Read: {0, 1} (2-2) 2025-09-25T16:21:48.869988Z :DEBUG: Take Data. Partition 1. Read: {0, 2} (3-3) 2025-09-25T16:21:48.869996Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 3, size 57 bytes DataReceived { PartitionStreamId: 1 PartitionId: 1 Message { DataDecompressionError: "(TZLibDecompressorError) util/stream/zlib.cpp:143: inflate error(incorrect header check)" Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 1 SeqNo: 1 MessageGroupId: "src_id" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:00:00.042000Z Ip: "::1" UncompressedSize: 0 Meta: { } } } Message { Data: ..8 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 2 SeqNo: 1 MessageGroupId: "src_id" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:00:00.042000Z Ip: "::1" UncompressedSize: 0 Meta: { } } } Message { Data: ..8 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 3 SeqNo: 1 MessageGroupId: "src_id" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:00:00.042000Z Ip: "::1" UncompressedSize: 0 Meta: { } } } } 2025-09-25T16:21:48.870465Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-09-25T16:21:48.870470Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-09-25T16:21:48.870474Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2025-09-25T16:21:48.870519Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2025-09-25T16:21:48.870612Z :INFO: [db] [sessionid] [cluster] Server session id: 123-session-id-321 2025-09-25T16:21:48.870657Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-09-25T16:21:48.870703Z :INFO: [db] [sessionid] [cluster] Confirm partition stream create. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1. Read offset: (NULL) 2025-09-25T16:21:48.870818Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-09-25T16:21:48.870851Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 1 (1-1) 2025-09-25T16:21:48.870873Z :DEBUG: Take Data. Partition 1. Read: {0, 0} (1-1) 2025-09-25T16:21:48.870879Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 1, size 8 bytes 2025-09-25T16:21:48.870889Z :DEBUG: [db] [sessionid] [cluster] Commit offsets [1, 2). Partition stream id: 1 2025-09-25T16:21:48.881233Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-09-25T16:21:48.881241Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-09-25T16:21:48.881245Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2025-09-25T16:21:48.881351Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2025-09-25T16:21:48.881489Z :INFO: [db] [sessionid] [cluster] Server session id: 123-session-id-321 2025-09-25T16:21:48.881558Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-09-25T16:21:48.885061Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-09-25T16:21:48.885112Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 1 (1-1) 2025-09-25T16:21:48.885126Z :DEBUG: Take Data. Partition 1. Read: {0, 0} (1-1) 2025-09-25T16:21:48.885142Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 1, size 8 bytes ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/columnshard/ut_rw/unittest >> TColumnShardTestReadWrite::ReadGroupBy-SimpleReader [GOOD] Test command err: FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=8328;columns=19; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=8328;columns=19; -- group by key: 0 2025-09-25T16:20:37.252020Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:129:2160];fline=columnshard.cpp:105;event=initialize_shard;step=OnActivateExecutor; 2025-09-25T16:20:37.257680Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:129:2160];fline=columnshard.cpp:124;event=initialize_shard;step=initialize_tiring_finished; 2025-09-25T16:20:37.257751Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxInitSchema.Execute at tablet 9437184 2025-09-25T16:20:37.258640Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:129:2160];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-09-25T16:20:37.258696Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:129:2160];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-09-25T16:20:37.258740Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:129:2160];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-09-25T16:20:37.258769Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:129:2160];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-09-25T16:20:37.258788Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:129:2160];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-09-25T16:20:37.258810Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:129:2160];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-09-25T16:20:37.258831Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:129:2160];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-09-25T16:20:37.258854Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:129:2160];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-09-25T16:20:37.258877Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:129:2160];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-09-25T16:20:37.258899Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:129:2160];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanDeprecatedSnapshot; 2025-09-25T16:20:37.258922Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:129:2160];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV0ChunksMeta; 2025-09-25T16:20:37.258944Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:129:2160];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CopyBlobIdsToV2; 2025-09-25T16:20:37.259002Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:129:2160];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreAppearanceSnapshot; 2025-09-25T16:20:37.266481Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxInitSchema.Complete at tablet 9437184 2025-09-25T16:20:37.266557Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:131;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=12;current_normalizer=CLASS_NAME=Granules; 2025-09-25T16:20:37.266568Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=1;type=Granules; 2025-09-25T16:20:37.266637Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-09-25T16:20:37.266679Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-09-25T16:20:37.266693Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-09-25T16:20:37.266699Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2025-09-25T16:20:37.266711Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:138;normalizer=TChunksNormalizer;message=0 chunks found; 2025-09-25T16:20:37.266722Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-09-25T16:20:37.266731Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-09-25T16:20:37.266736Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2025-09-25T16:20:37.266757Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-09-25T16:20:37.266767Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-09-25T16:20:37.266776Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-09-25T16:20:37.266781Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2025-09-25T16:20:37.266793Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2025-09-25T16:20:37.266801Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-09-25T16:20:37.266810Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-09-25T16:20:37.266816Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2025-09-25T16:20:37.266826Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-09-25T16:20:37.266834Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-09-25T16:20:37.266839Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2025-09-25T16:20:37.266850Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-09-25T16:20:37.266859Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-09-25T16:20:37.266865Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=15;type=RestoreV1Chunks_V2; 2025-09-25T16:20:37.266896Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-09-25T16:20:37.266906Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-09-25T16:20:37.266911Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=16;type=RestoreV2Chunks; 2025-09-25T16:20:37.266928Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2025-09-25T16:20:37.266937Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanDeprecatedSnapshot;id=CleanDeprecatedSnapshot; 2025-09-25T16:20:37.266942Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=17;type=CleanDeprecatedSnapshot; 2025-09-25T16:20:37.266951Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanDeprecatedSnapshot;id=17; 2025-09-25T16:20:37.266959Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV0ChunksMeta;id=RestoreV0ChunksMeta; 2025-09-25T16:20:37.266964Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=18;type=RestoreV0ChunksMeta; 2025-09-25T16:20:37.266972Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV0ChunksMeta;id=18; 2025-09-25T16:20:37.266981Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CopyBlobIdsToV2;id=CopyBlobIdsToV2; 2025-09-25T16:20:37.266986Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=19;type=CopyBlobIdsToV2; 2025-09-25T16:20:37.267001Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_ ... [54:459:2471];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;fline=plain_read_data.cpp:31;event=DoExtractReadyResults;result=1;count=1;finished=1; 2025-09-25T16:21:43.770426Z node 54 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[54:459:2471];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;fline=actor.cpp:222;stage=limit exhausted;limit=limits:(bytes=0;chunks=0);; 2025-09-25T16:21:43.770435Z node 54 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[54:459:2471];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;tablet_id=9437184;fline=scanner.cpp:52;event=build_next_interval; 2025-09-25T16:21:43.770569Z node 54 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[54:459:2471];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;fline=actor.cpp:120;event=TEvScanDataAck;info=limits:(bytes=8388608;chunks=1);; 2025-09-25T16:21:43.770604Z node 54 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[54:459:2471];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;fline=actor.cpp:211;stage=start;iterator=ready_results:(count:1;records_count:1;schema=100: binary 101: binary 102: binary 103: uint64;);indexed_data:(CTX:{ef=(column_ids=4;column_names=i32;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=i16,i32,i8,ts;);;ff=(column_ids=4,17,18,19;column_names=i32,json,jsondoc,yson;);;program_input=(column_ids=4,17,18,19;column_names=i32,json,jsondoc,yson;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2025-09-25T16:21:43.770611Z node 54 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[54:459:2471];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;fline=plain_read_data.cpp:31;event=DoExtractReadyResults;result=0;count=0;finished=1; 2025-09-25T16:21:43.770630Z node 54 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[54:459:2471];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;fline=actor.cpp:253;stage=ready result;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=4;column_names=i32;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=i16,i32,i8,ts;);;ff=(column_ids=4,17,18,19;column_names=i32,json,jsondoc,yson;);;program_input=(column_ids=4,17,18,19;column_names=i32,json,jsondoc,yson;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;);columns=4;rows=1; 2025-09-25T16:21:43.770649Z node 54 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[54:459:2471];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;fline=actor.cpp:274;stage=data_format;batch_size=0;num_rows=1;batch_columns=100,101,102,103; 2025-09-25T16:21:43.770714Z node 54 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[54:459:2471];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;fline=actor.cpp:392;event=send_data;compute_actor_id=[54:458:2470];bytes=26;rows=1;faults=0;finished=0;fault=0;schema=100: binary 101: binary 102: binary 103: uint64; 2025-09-25T16:21:43.770736Z node 54 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[54:459:2471];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;fline=actor.cpp:296;stage=finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=4;column_names=i32;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=i16,i32,i8,ts;);;ff=(column_ids=4,17,18,19;column_names=i32,json,jsondoc,yson;);;program_input=(column_ids=4,17,18,19;column_names=i32,json,jsondoc,yson;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2025-09-25T16:21:43.770759Z node 54 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[54:459:2471];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;fline=actor.cpp:211;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=4;column_names=i32;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=i16,i32,i8,ts;);;ff=(column_ids=4,17,18,19;column_names=i32,json,jsondoc,yson;);;program_input=(column_ids=4,17,18,19;column_names=i32,json,jsondoc,yson;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2025-09-25T16:21:43.770774Z node 54 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[54:459:2471];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;fline=actor.cpp:216;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=4;column_names=i32;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=i16,i32,i8,ts;);;ff=(column_ids=4,17,18,19;column_names=i32,json,jsondoc,yson;);;program_input=(column_ids=4,17,18,19;column_names=i32,json,jsondoc,yson;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2025-09-25T16:21:43.770819Z node 54 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[54:459:2471];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;fline=actor.cpp:120;event=TEvScanDataAck;info=limits:(bytes=8388608;chunks=1);; 2025-09-25T16:21:43.770836Z node 54 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[54:459:2471];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;fline=actor.cpp:211;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=4;column_names=i32;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=i16,i32,i8,ts;);;ff=(column_ids=4,17,18,19;column_names=i32,json,jsondoc,yson;);;program_input=(column_ids=4,17,18,19;column_names=i32,json,jsondoc,yson;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2025-09-25T16:21:43.770853Z node 54 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[54:459:2471];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;fline=actor.cpp:216;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=4;column_names=i32;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=i16,i32,i8,ts;);;ff=(column_ids=4,17,18,19;column_names=i32,json,jsondoc,yson;);;program_input=(column_ids=4,17,18,19;column_names=i32,json,jsondoc,yson;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2025-09-25T16:21:43.770861Z node 54 :TX_COLUMNSHARD_SCAN DEBUG: actor.cpp:442: Scan [54:459:2471] finished for tablet 9437184 2025-09-25T16:21:43.770948Z node 54 :TX_COLUMNSHARD_SCAN INFO: log.cpp:841: SelfId=[54:459:2471];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;fline=actor.cpp:448;event=scan_finish;compute_actor_id=[54:458:2470];stats={"p":[{"events":["f_bootstrap","l_bootstrap","f_processing","f_ProduceResults","f_task_result"],"t":0},{"events":["l_task_result"],"t":0.01},{"events":["f_ack","l_ack","l_processing","l_ProduceResults","f_Finish","l_Finish"],"t":0.011}],"full":{"a":66621953,"name":"_full_task","f":66621953,"d_finished":0,"c":0,"l":66633274,"d":11321},"events":[{"name":"bootstrap","f":66622012,"d_finished":217,"c":1,"l":66622229,"d":217},{"a":66633215,"name":"ack","f":66632963,"d_finished":215,"c":1,"l":66633178,"d":274},{"a":66633213,"name":"processing","f":66622271,"d_finished":540,"c":3,"l":66633178,"d":601},{"name":"ProduceResults","f":66622153,"d_finished":362,"c":6,"l":66633257,"d":362},{"a":66633257,"name":"Finish","f":66633257,"d_finished":0,"c":0,"l":66633274,"d":17},{"name":"task_result","f":66622274,"d_finished":312,"c":2,"l":66632840,"d":312}],"id":"9437184::2052"};iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=4;column_names=i32;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=i16,i32,i8,ts;);;ff=(column_ids=4,17,18,19;column_names=i32,json,jsondoc,yson;);;program_input=(column_ids=4,17,18,19;column_names=i32,json,jsondoc,yson;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2025-09-25T16:21:43.770964Z node 54 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[54:459:2471];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;fline=actor.cpp:392;event=send_data;compute_actor_id=[54:458:2470];bytes=0;rows=0;faults=0;finished=1;fault=0;schema=; 2025-09-25T16:21:43.771020Z node 54 :TX_COLUMNSHARD_SCAN INFO: log.cpp:841: SelfId=[54:459:2471];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;fline=actor.cpp:397;event=scan_finished;compute_actor_id=[54:458:2470];packs_sum=0;bytes=0;bytes_sum=0;rows=0;rows_sum=0;faults=0;finished=1;fault=0;stats={"p":[{"events":["f_bootstrap","l_bootstrap","f_processing","f_ProduceResults","f_task_result"],"t":0},{"events":["l_task_result"],"t":0.01},{"events":["f_ack","l_ack","l_processing","l_ProduceResults","f_Finish","l_Finish"],"t":0.011}],"full":{"a":66621953,"name":"_full_task","f":66621953,"d_finished":0,"c":0,"l":66633370,"d":11417},"events":[{"name":"bootstrap","f":66622012,"d_finished":217,"c":1,"l":66622229,"d":217},{"a":66633215,"name":"ack","f":66632963,"d_finished":215,"c":1,"l":66633178,"d":370},{"a":66633213,"name":"processing","f":66622271,"d_finished":540,"c":3,"l":66633178,"d":697},{"name":"ProduceResults","f":66622153,"d_finished":362,"c":6,"l":66633257,"d":362},{"a":66633257,"name":"Finish","f":66633257,"d_finished":0,"c":0,"l":66633370,"d":113},{"name":"task_result","f":66622274,"d_finished":312,"c":2,"l":66632840,"d":312}],"id":"9437184::2052"};iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=4;column_names=i32;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=i16,i32,i8,ts;);;ff=(column_ids=4,17,18,19;column_names=i32,json,jsondoc,yson;);;program_input=(column_ids=4,17,18,19;column_names=i32,json,jsondoc,yson;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2025-09-25T16:21:43.771038Z node 54 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[54:459:2471];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;fline=stats.cpp:8;event=statistic;begin=2025-09-25T16:21:43.759399Z;index_granules=0;index_portions=1;index_batches=0;schema_columns=4;filter_columns=0;additional_columns=0;compacted_portions_bytes=0;inserted_portions_bytes=14056;committed_portions_bytes=0;data_filter_bytes=0;data_additional_bytes=0;delta_bytes=14056;selected_rows=0; 2025-09-25T16:21:43.771044Z node 54 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[54:459:2471];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;fline=read_context.h:196;event=scan_aborted;reason=unexpected on destructor; 2025-09-25T16:21:43.771071Z node 54 :TX_COLUMNSHARD_SCAN INFO: log.cpp:841: SelfId=[54:459:2471];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;fline=context.h:82;fetching=ef=(column_ids=4;column_names=i32;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=i16,i32,i8,ts;);;ff=(column_ids=4,17,18,19;column_names=i32,json,jsondoc,yson;);;program_input=(column_ids=4,17,18,19;column_names=i32,json,jsondoc,yson;);;; >> TYdbControlPlaneStorageModifyBinding::ShouldSuccess >> TYdbControlPlaneStorageCreateQueryPermissions::ShouldApplyPermissionQueryInvokeSuccess [GOOD] >> TYdbControlPlaneStorageCreateQueryPermissions::ShouldApplyPermissionQueryInvokeFailed >> TYdbControlPlaneStorageDescribeQueryPermissions::ShouldNotApplyPermissionViewAstAndViewQueryText [GOOD] >> TYdbControlPlaneStorageGetQueryStatus::ShouldSuccess >> TYdbControlPlaneStorageModifyQuery::ShouldSuccess [GOOD] >> TYdbControlPlaneStorageModifyQuery::ShouldModifyRunningQuery >> TColumnShardTestReadWrite::ReadGroupBy+SimpleReader [GOOD] >> TYdbControlPlaneStorageControlQuery::ShouldCheckIdempotencyKey [GOOD] >> TYdbControlPlaneStorageControlQuery::ShouldCheckPreviousRevisionFailed |82.9%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/tx/schemeshard/ut_secret_reboots/ydb-core-tx-schemeshard-ut_secret_reboots |82.9%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_secret_reboots/ydb-core-tx-schemeshard-ut_secret_reboots |82.9%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_secret_reboots/ydb-core-tx-schemeshard-ut_secret_reboots >> TBsProxyFaultToleranceTest::CheckGetHardenedErasureMirror3dcCount6Idx1 [GOOD] >> TAsyncIndexTests::MergeIndexWithReboots[TabletReboots] [GOOD] |82.9%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/public/sdk/cpp/src/client/topic/ut/ydb-public-sdk-cpp-src-client-topic-ut |82.9%| [LD] {RESULT} $(B)/ydb/public/sdk/cpp/src/client/topic/ut/ydb-public-sdk-cpp-src-client-topic-ut |82.9%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/public/sdk/cpp/src/client/topic/ut/ydb-public-sdk-cpp-src-client-topic-ut >> TYdbControlPlaneStoragePipeline::ShouldCheckJobMeta [GOOD] >> TYdbControlPlaneStoragePipeline::ShouldCheckClearFields >> TYdbControlPlaneStorageCreateBinding::ShouldCheckNotAvailable [GOOD] >> TYdbControlPlaneStorageCreateBinding::ShouldValidate >> TYdbControlPlaneStorageDeleteQuery::ShouldCheckPreviousRevisionSuccess [GOOD] >> TYdbControlPlaneStorageCreateQueryPermissions::ShouldApplyPermissionQueryInvokeFailed [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/dsproxy/ut_ftol/unittest >> TBsProxyFaultToleranceTest::CheckGetHardenedErasureMirror3dcCount6Idx1 [GOOD] Test command err: iteration# 1 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 7 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 13 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 19 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 25 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 31 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 37 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 43 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 49 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 55 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 61 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 67 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 73 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 79 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 85 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 91 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 97 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 103 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 109 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 115 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 121 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 127 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 133 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 139 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 145 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 151 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 157 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 163 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 169 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 175 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 181 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 187 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 193 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 199 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 205 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 211 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 217 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 223 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 229 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 235 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 241 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 247 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 253 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 259 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 265 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 271 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 277 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 283 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 289 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 295 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 301 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 307 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 313 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 319 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 325 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 331 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 337 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 343 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 349 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 355 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 361 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 367 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 373 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 379 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 385 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 391 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 397 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 403 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 409 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 415 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 421 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 427 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 433 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 439 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 445 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 451 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 457 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 463 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 469 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 475 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 481 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 487 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 >> TYdbControlPlaneStorageDeleteQuery::ShouldProhibitDeletionOfRunningQuery >> TYdbControlPlaneStorageDeleteBinding::ShouldSuccess >> TYdbControlPlaneStorageCreateQuery::ShouldListJobsByQuery [GOOD] >> TYdbControlPlaneStorageCreateQuery::ShouldListJobsCreatedByMe >> Balancing::Balancing_OneTopic_TopicApi >> TYdbControlPlaneStorageGetQueryStatus::ShouldSuccess [GOOD] >> TYdbControlPlaneStorageGetQueryStatus::ShouldCheckPermission >> TopicTimestamp::TimestampRead_1MB_LegacyTopic_offset+middle [GOOD] >> TopicTimestamp::TimestampRead_1MB_Topic_exact >> TYdbControlPlaneStorageDeleteConnection::ShouldCheckSuperUser [GOOD] >> TYdbControlPlaneStorageDeleteConnection::ShouldCheckIdempotencyKey >> TopicAutoscaling::PartitionSplit_PreferedPartition_BeforeAutoscaleAwareSDK |82.9%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/cms/console/ut/ydb-core-cms-console-ut |82.9%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/keyvalue/ut/ydb-core-keyvalue-ut >> TYdbControlPlaneStorageDescribeConnectionPermissions::ShouldApplyPermissionEmpty [GOOD] >> TYdbControlPlaneStorageDescribeConnectionPermissions::ShouldApplyPermissionViewPublic |82.9%| [LD] {RESULT} $(B)/ydb/core/keyvalue/ut/ydb-core-keyvalue-ut |82.9%| [LD] {RESULT} $(B)/ydb/core/cms/console/ut/ydb-core-cms-console-ut |82.9%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/cms/console/ut/ydb-core-cms-console-ut |82.9%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/keyvalue/ut/ydb-core-keyvalue-ut >> TYdbControlPlaneStorageControlQuery::ShouldCheckPreviousRevisionFailed [GOOD] >> TYdbControlPlaneStorageControlQuery::ShouldCheckPreviousRevisionSuccess ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_index/unittest >> TAsyncIndexTests::MergeIndexWithReboots[TabletReboots] [GOOD] Test command err: =========== RUN: Trace =========== Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2144] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2144] Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:120:2058] recipient: [1:114:2145] IGNORE Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:120:2058] recipient: [1:114:2145] Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:121:2058] recipient: [1:116:2146] IGNORE Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:121:2058] recipient: [1:116:2146] Leader for TabletID 72057594046678944 is [1:128:2153] sender: [1:131:2058] recipient: [1:113:2144] Leader for TabletID 72057594046447617 is [1:134:2158] sender: [1:136:2058] recipient: [1:114:2145] Leader for TabletID 72057594046316545 is [1:139:2161] sender: [1:141:2058] recipient: [1:116:2146] 2025-09-25T16:19:40.224951Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7911: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-09-25T16:19:40.224969Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7939: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-09-25T16:19:40.224972Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7825: StatsBatching config: StatsBatchTimeout# 0.000000s, StatsMaxBatchSize# 0, StatsMaxExecuteTime# 0.010000s 2025-09-25T16:19:40.224976Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7841: OperationsProcessing config: using default configuration 2025-09-25T16:19:40.224980Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7847: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-09-25T16:19:40.224983Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7847: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-09-25T16:19:40.224989Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7971: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-09-25T16:19:40.225008Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-09-25T16:19:40.225084Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8042: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-09-25T16:19:40.225129Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2025-09-25T16:19:40.241033Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:8074: Got new config: QueryServiceConfig { AllExternalDataSourcesAreAvailable: true } 2025-09-25T16:19:40.241062Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded 2025-09-25T16:19:40.241130Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8042: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# Leader for TabletID 72057594046447617 is [1:134:2158] sender: [1:179:2058] recipient: [1:15:2062] 2025-09-25T16:19:40.244762Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2025-09-25T16:19:40.244886Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2025-09-25T16:19:40.244922Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-09-25T16:19:40.246339Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2025-09-25T16:19:40.246412Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2025-09-25T16:19:40.246506Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1382: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-09-25T16:19:40.246730Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-09-25T16:19:40.247780Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:158: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-09-25T16:19:40.247832Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2025-09-25T16:19:40.248108Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-09-25T16:19:40.248119Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-09-25T16:19:40.248139Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-09-25T16:19:40.248147Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-09-25T16:19:40.248153Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:205: TTxServerlessStorageBilling.Complete 2025-09-25T16:19:40.248204Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7086: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:221:2058] recipient: [1:219:2219] IGNORE Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:221:2058] recipient: [1:219:2219] Leader for TabletID 72057594037968897 is [1:225:2223] sender: [1:226:2058] recipient: [1:219:2219] 2025-09-25T16:19:40.249666Z node 1 :HIVE INFO: tablet_helpers.cpp:1126: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:128:2153] sender: [1:246:2058] recipient: [1:15:2062] 2025-09-25T16:19:40.274631Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-09-25T16:19:40.274715Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-09-25T16:19:40.274775Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:599: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-09-25T16:19:40.274784Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5528: CreateTx for txid 1:0 type: TxAlterSubDomain target path: [OwnerId: 72057594046678944, LocalPathId: 1] source path: 2025-09-25T16:19:40.274825Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-09-25T16:19:40.274840Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2025-09-25T16:19:40.275480Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-09-25T16:19:40.275540Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-09-25T16:19:40.275581Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-09-25T16:19:40.275597Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:315: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-09-25T16:19:40.275603Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:369: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-09-25T16:19:40.275608Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 1:0 2 -> 3 2025-09-25T16:19:40.276153Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-09-25T16:19:40.276169Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-09-25T16:19:40.276176Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 1:0 3 -> 128 2025-09-25T16:19:40.276652Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-09-25T16:19:40.276674Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-09-25T16:19:40.276682Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-09-25T16:19:40.276689Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1723: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-09-25T16:19:40.277452Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1792: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-09-25T16:19:40.277956Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:663: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-09-25T16:19:40.277995Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1824: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 Leader for TabletID 72057594046316545 is [1:139:2161] sender: [1:261:2058] recipient: [1:15:2062] FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-09-25T16:19:40.278230Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:685: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-09-25T16:19:40.278261Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:689: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 139 RawX2: 4294969457 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, ... SizeToSnapshot: 16777216 InMemCompactionBrokerQueue: 0 ReadAheadHiThreshold: 67108864 ReadAheadLoThreshold: 16777216 MinDataPageSize: 7168 SnapBrokerQueue: 0 Generation { GenerationId: 0 SizeToCompact: 0 CountToCompact: 8 ForceCountToCompact: 8 ForceSizeToCompact: 134217728 CompactionBrokerQueue: 4294967295 KeepInCache: true BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen1" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 0 UpliftPartSize: 0 } Generation { GenerationId: 1 SizeToCompact: 41943040 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 536870912 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen2" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 8388608 UpliftPartSize: 8388608 } Generation { GenerationId: 2 SizeToCompact: 419430400 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 17179869184 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen3" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } PartitioningPolicy { MinPartitionsCount: 1 } } TableIndexes { Name: "UserDefinedIndex" LocalPathId: 4 Type: EIndexTypeGlobalAsync State: EIndexStateReady KeyColumnNames: "indexed" SchemaVersion: 1 PathOwnerId: 72057594046678944 DataSize: 0 IndexImplTableDescriptions { PartitionConfig { CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot: 300 InMemForceStepsToSnapshot: 500 InMemForceSizeToSnapshot: 16777216 InMemCompactionBrokerQueue: 0 ReadAheadHiThreshold: 67108864 ReadAheadLoThreshold: 16777216 MinDataPageSize: 7168 SnapBrokerQueue: 0 Generation { GenerationId: 0 SizeToCompact: 0 CountToCompact: 8 ForceCountToCompact: 8 ForceSizeToCompact: 134217728 CompactionBrokerQueue: 4294967295 KeepInCache: true BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen1" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 0 UpliftPartSize: 0 } Generation { GenerationId: 1 SizeToCompact: 41943040 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 536870912 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen2" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 8388608 UpliftPartSize: 8388608 } Generation { GenerationId: 2 SizeToCompact: 419430400 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 17179869184 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen3" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } PartitioningPolicy { MinPartitionsCount: 1 } } } } TableSchemaVersion: 1 IsBackup: false IsRestore: false } TablePartitions { EndOfRangeKeyPrefix: "" IsPoint: false IsInclusive: false DatashardId: 72075186233409548 } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 4 PathsLimit: 10000 ShardsInside: 2 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 3 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-09-25T16:21:50.821129Z node 103 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/Table/UserDefinedIndex/indexImplTable" Options { ReturnPartitioningInfo: true ReturnPartitionConfig: true BackupInfo: false ReturnBoundaries: false ShowPrivateTable: true }, at schemeshard: 72057594046678944 2025-09-25T16:21:50.821249Z node 103 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/Table/UserDefinedIndex/indexImplTable" took 144us result status StatusSuccess 2025-09-25T16:21:50.821482Z node 103 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/Table/UserDefinedIndex/indexImplTable" PathDescription { Self { Name: "indexImplTable" PathId: 5 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 1002 CreateStep: 5000003 ParentPathId: 4 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 4 PathSubType: EPathSubTypeAsyncIndexImplTable Version { GeneralVersion: 4 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 2 } ChildrenExist: false } Table { Name: "indexImplTable" Columns { Name: "indexed" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "key" Type: "Uint32" TypeId: 2 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "indexed" KeyColumnNames: "key" KeyColumnIds: 1 KeyColumnIds: 2 PartitionConfig { CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot: 300 InMemForceStepsToSnapshot: 500 InMemForceSizeToSnapshot: 16777216 InMemCompactionBrokerQueue: 0 ReadAheadHiThreshold: 67108864 ReadAheadLoThreshold: 16777216 MinDataPageSize: 7168 SnapBrokerQueue: 0 Generation { GenerationId: 0 SizeToCompact: 0 CountToCompact: 8 ForceCountToCompact: 8 ForceSizeToCompact: 134217728 CompactionBrokerQueue: 4294967295 KeepInCache: true BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen1" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 0 UpliftPartSize: 0 } Generation { GenerationId: 1 SizeToCompact: 41943040 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 536870912 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen2" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 8388608 UpliftPartSize: 8388608 } Generation { GenerationId: 2 SizeToCompact: 419430400 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 17179869184 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen3" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } PartitioningPolicy { MinPartitionsCount: 1 } } TableSchemaVersion: 1 IsBackup: false IsRestore: false } TablePartitions { EndOfRangeKeyPrefix: "" IsPoint: false IsInclusive: false DatashardId: 72075186233409549 } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 4 PathsLimit: 10000 ShardsInside: 2 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 5 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 >> TYdbControlPlaneStorageCreateQuery::ShouldListJobsCreatedByMe [GOOD] >> TYdbControlPlaneStorageCreateQuery::ShouldCheckDescribeJob >> TYdbControlPlaneStorageGetQueryStatus::ShouldCheckPermission [GOOD] >> TYdbControlPlaneStorageGetQueryStatus::ShouldCheckExist >> TYdbControlPlaneStorageModifyBindingPermissions::ShouldApplyPermissionViewPublic [GOOD] >> TYdbControlPlaneStorageModifyBindingPermissions::ShouldApplyPermissionViewPrivate |82.9%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/kesus/tablet/ut/ydb-core-kesus-tablet-ut >> TYdbControlPlaneStorageListQueries::ShouldSuccess [GOOD] >> TYdbControlPlaneStorageListQueries::ShouldPageToken >> TYdbControlPlaneStorageModifyQuery::ShouldModifyRunningQuery [GOOD] >> TopicAutoscaling::Simple_BeforeAutoscaleAwareSDK >> TYdbControlPlaneStorageModifyQuery::ShouldValidate |82.9%| [LD] {RESULT} $(B)/ydb/core/kesus/tablet/ut/ydb-core-kesus-tablet-ut |82.9%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/kesus/tablet/ut/ydb-core-kesus-tablet-ut >> TYdbControlPlaneStorageDeleteQuery::ShouldProhibitDeletionOfRunningQuery [GOOD] >> TYdbControlPlaneStorageDeleteQueryPermissions::ShouldApplyPermissionEmpty >> TYdbControlPlaneStorageDeleteBinding::ShouldSuccess [GOOD] >> TYdbControlPlaneStorageDeleteBinding::ShouldCheckPermission >> TYdbControlPlaneStorageDeleteConnection::ShouldCheckIdempotencyKey [GOOD] >> TYdbControlPlaneStorageDeleteConnection::ShouldCheckPreviousRevisionFailed >> TYdbControlPlaneStorageGetQueryStatus::ShouldCheckExist [GOOD] >> TYdbControlPlaneStorageGetQueryStatus::ShouldValidate ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/columnshard/ut_rw/unittest >> TColumnShardTestReadWrite::ReadGroupBy+SimpleReader [GOOD] Test command err: FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=8328;columns=19; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=8328;columns=19; -- group by key: 0 2025-09-25T16:20:44.227325Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:129:2160];fline=columnshard.cpp:105;event=initialize_shard;step=OnActivateExecutor; 2025-09-25T16:20:44.232172Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:129:2160];fline=columnshard.cpp:124;event=initialize_shard;step=initialize_tiring_finished; 2025-09-25T16:20:44.232224Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxInitSchema.Execute at tablet 9437184 2025-09-25T16:20:44.233057Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:129:2160];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-09-25T16:20:44.233111Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:129:2160];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-09-25T16:20:44.233149Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:129:2160];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-09-25T16:20:44.233173Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:129:2160];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-09-25T16:20:44.233192Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:129:2160];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-09-25T16:20:44.233212Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:129:2160];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-09-25T16:20:44.233233Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:129:2160];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-09-25T16:20:44.233251Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:129:2160];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-09-25T16:20:44.233272Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:129:2160];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-09-25T16:20:44.233291Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:129:2160];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanDeprecatedSnapshot; 2025-09-25T16:20:44.233313Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:129:2160];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV0ChunksMeta; 2025-09-25T16:20:44.233333Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:129:2160];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CopyBlobIdsToV2; 2025-09-25T16:20:44.233379Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:129:2160];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreAppearanceSnapshot; 2025-09-25T16:20:44.240127Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxInitSchema.Complete at tablet 9437184 2025-09-25T16:20:44.240209Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:131;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=12;current_normalizer=CLASS_NAME=Granules; 2025-09-25T16:20:44.240222Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=1;type=Granules; 2025-09-25T16:20:44.240274Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-09-25T16:20:44.240316Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-09-25T16:20:44.240331Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-09-25T16:20:44.240338Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2025-09-25T16:20:44.240351Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:138;normalizer=TChunksNormalizer;message=0 chunks found; 2025-09-25T16:20:44.240362Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-09-25T16:20:44.240372Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-09-25T16:20:44.240378Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2025-09-25T16:20:44.240399Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-09-25T16:20:44.240408Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-09-25T16:20:44.240418Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-09-25T16:20:44.240423Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2025-09-25T16:20:44.240436Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2025-09-25T16:20:44.240444Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-09-25T16:20:44.240454Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-09-25T16:20:44.240459Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2025-09-25T16:20:44.240469Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-09-25T16:20:44.240478Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-09-25T16:20:44.240483Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2025-09-25T16:20:44.240493Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-09-25T16:20:44.240504Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-09-25T16:20:44.240509Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=15;type=RestoreV1Chunks_V2; 2025-09-25T16:20:44.240539Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-09-25T16:20:44.240550Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-09-25T16:20:44.240555Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=16;type=RestoreV2Chunks; 2025-09-25T16:20:44.240572Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2025-09-25T16:20:44.240581Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanDeprecatedSnapshot;id=CleanDeprecatedSnapshot; 2025-09-25T16:20:44.240587Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=17;type=CleanDeprecatedSnapshot; 2025-09-25T16:20:44.240596Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanDeprecatedSnapshot;id=17; 2025-09-25T16:20:44.240604Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV0ChunksMeta;id=RestoreV0ChunksMeta; 2025-09-25T16:20:44.240610Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=18;type=RestoreV0ChunksMeta; 2025-09-25T16:20:44.240619Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV0ChunksMeta;id=18; 2025-09-25T16:20:44.240630Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CopyBlobIdsToV2;id=CopyBlobIdsToV2; 2025-09-25T16:20:44.240636Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=19;type=CopyBlobIdsToV2; 2025-09-25T16:20:44.240654Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_ ... TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[54:457:2469];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;fline=plain_read_data.cpp:31;event=DoExtractReadyResults;result=1;count=1;finished=1; 2025-09-25T16:21:50.541504Z node 54 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[54:457:2469];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;fline=actor.cpp:222;stage=limit exhausted;limit=limits:(bytes=0;chunks=0);; 2025-09-25T16:21:50.541510Z node 54 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[54:457:2469];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;tablet_id=9437184;fline=scanner.cpp:52;event=build_next_interval; 2025-09-25T16:21:50.541603Z node 54 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[54:457:2469];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;fline=actor.cpp:120;event=TEvScanDataAck;info=limits:(bytes=8388608;chunks=1);; 2025-09-25T16:21:50.541631Z node 54 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[54:457:2469];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;fline=actor.cpp:211;stage=start;iterator=ready_results:(count:1;records_count:1;schema=100: binary 101: binary 102: binary 103: uint64;);indexed_data:(CTX:{ef=(column_ids=4;column_names=i32;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=i16,i32,i8,ts;);;ff=(column_ids=4,17,18,19;column_names=i32,json,jsondoc,yson;);;program_input=(column_ids=4,17,18,19;column_names=i32,json,jsondoc,yson;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2025-09-25T16:21:50.541637Z node 54 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[54:457:2469];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;fline=plain_read_data.cpp:31;event=DoExtractReadyResults;result=0;count=0;finished=1; 2025-09-25T16:21:50.541652Z node 54 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[54:457:2469];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;fline=actor.cpp:253;stage=ready result;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=4;column_names=i32;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=i16,i32,i8,ts;);;ff=(column_ids=4,17,18,19;column_names=i32,json,jsondoc,yson;);;program_input=(column_ids=4,17,18,19;column_names=i32,json,jsondoc,yson;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;);columns=4;rows=1; 2025-09-25T16:21:50.541667Z node 54 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[54:457:2469];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;fline=actor.cpp:274;stage=data_format;batch_size=0;num_rows=1;batch_columns=100,101,102,103; 2025-09-25T16:21:50.541723Z node 54 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[54:457:2469];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;fline=actor.cpp:392;event=send_data;compute_actor_id=[54:456:2468];bytes=26;rows=1;faults=0;finished=0;fault=0;schema=100: binary 101: binary 102: binary 103: uint64; 2025-09-25T16:21:50.541742Z node 54 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[54:457:2469];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;fline=actor.cpp:296;stage=finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=4;column_names=i32;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=i16,i32,i8,ts;);;ff=(column_ids=4,17,18,19;column_names=i32,json,jsondoc,yson;);;program_input=(column_ids=4,17,18,19;column_names=i32,json,jsondoc,yson;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2025-09-25T16:21:50.541760Z node 54 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[54:457:2469];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;fline=actor.cpp:211;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=4;column_names=i32;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=i16,i32,i8,ts;);;ff=(column_ids=4,17,18,19;column_names=i32,json,jsondoc,yson;);;program_input=(column_ids=4,17,18,19;column_names=i32,json,jsondoc,yson;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2025-09-25T16:21:50.541773Z node 54 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[54:457:2469];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;fline=actor.cpp:216;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=4;column_names=i32;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=i16,i32,i8,ts;);;ff=(column_ids=4,17,18,19;column_names=i32,json,jsondoc,yson;);;program_input=(column_ids=4,17,18,19;column_names=i32,json,jsondoc,yson;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2025-09-25T16:21:50.541809Z node 54 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[54:457:2469];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;fline=actor.cpp:120;event=TEvScanDataAck;info=limits:(bytes=8388608;chunks=1);; 2025-09-25T16:21:50.541825Z node 54 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[54:457:2469];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;fline=actor.cpp:211;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=4;column_names=i32;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=i16,i32,i8,ts;);;ff=(column_ids=4,17,18,19;column_names=i32,json,jsondoc,yson;);;program_input=(column_ids=4,17,18,19;column_names=i32,json,jsondoc,yson;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2025-09-25T16:21:50.541840Z node 54 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[54:457:2469];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;fline=actor.cpp:216;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=4;column_names=i32;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=i16,i32,i8,ts;);;ff=(column_ids=4,17,18,19;column_names=i32,json,jsondoc,yson;);;program_input=(column_ids=4,17,18,19;column_names=i32,json,jsondoc,yson;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2025-09-25T16:21:50.541846Z node 54 :TX_COLUMNSHARD_SCAN DEBUG: actor.cpp:442: Scan [54:457:2469] finished for tablet 9437184 2025-09-25T16:21:50.541915Z node 54 :TX_COLUMNSHARD_SCAN INFO: log.cpp:841: SelfId=[54:457:2469];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;fline=actor.cpp:448;event=scan_finish;compute_actor_id=[54:456:2468];stats={"p":[{"events":["f_bootstrap","l_bootstrap","f_processing","f_ProduceResults","f_task_result"],"t":0},{"events":["f_ack","l_ack","l_processing","l_ProduceResults","f_Finish","l_Finish","l_task_result"],"t":0.001}],"full":{"a":66420665,"name":"_full_task","f":66420665,"d_finished":0,"c":0,"l":66422415,"d":1750},"events":[{"name":"bootstrap","f":66420728,"d_finished":199,"c":1,"l":66420927,"d":199},{"a":66422366,"name":"ack","f":66422159,"d_finished":178,"c":1,"l":66422337,"d":227},{"a":66422364,"name":"processing","f":66420965,"d_finished":425,"c":3,"l":66422337,"d":476},{"name":"ProduceResults","f":66420860,"d_finished":296,"c":6,"l":66422403,"d":296},{"a":66422403,"name":"Finish","f":66422403,"d_finished":0,"c":0,"l":66422415,"d":12},{"name":"task_result","f":66420969,"d_finished":234,"c":2,"l":66422073,"d":234}],"id":"9437184::2052"};iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=4;column_names=i32;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=i16,i32,i8,ts;);;ff=(column_ids=4,17,18,19;column_names=i32,json,jsondoc,yson;);;program_input=(column_ids=4,17,18,19;column_names=i32,json,jsondoc,yson;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2025-09-25T16:21:50.541927Z node 54 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[54:457:2469];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;fline=actor.cpp:392;event=send_data;compute_actor_id=[54:456:2468];bytes=0;rows=0;faults=0;finished=1;fault=0;schema=; 2025-09-25T16:21:50.541972Z node 54 :TX_COLUMNSHARD_SCAN INFO: log.cpp:841: SelfId=[54:457:2469];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;fline=actor.cpp:397;event=scan_finished;compute_actor_id=[54:456:2468];packs_sum=0;bytes=0;bytes_sum=0;rows=0;rows_sum=0;faults=0;finished=1;fault=0;stats={"p":[{"events":["f_bootstrap","l_bootstrap","f_processing","f_ProduceResults","f_task_result"],"t":0},{"events":["f_ack","l_ack","l_processing","l_ProduceResults","f_Finish","l_Finish","l_task_result"],"t":0.001}],"full":{"a":66420665,"name":"_full_task","f":66420665,"d_finished":0,"c":0,"l":66422493,"d":1828},"events":[{"name":"bootstrap","f":66420728,"d_finished":199,"c":1,"l":66420927,"d":199},{"a":66422366,"name":"ack","f":66422159,"d_finished":178,"c":1,"l":66422337,"d":305},{"a":66422364,"name":"processing","f":66420965,"d_finished":425,"c":3,"l":66422337,"d":554},{"name":"ProduceResults","f":66420860,"d_finished":296,"c":6,"l":66422403,"d":296},{"a":66422403,"name":"Finish","f":66422403,"d_finished":0,"c":0,"l":66422493,"d":90},{"name":"task_result","f":66420969,"d_finished":234,"c":2,"l":66422073,"d":234}],"id":"9437184::2052"};iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=4;column_names=i32;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=i16,i32,i8,ts;);;ff=(column_ids=4,17,18,19;column_names=i32,json,jsondoc,yson;);;program_input=(column_ids=4,17,18,19;column_names=i32,json,jsondoc,yson;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2025-09-25T16:21:50.582554Z node 54 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[54:457:2469];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;fline=stats.cpp:8;event=statistic;begin=2025-09-25T16:21:50.539933Z;index_granules=0;index_portions=1;index_batches=0;schema_columns=4;filter_columns=0;additional_columns=0;compacted_portions_bytes=0;inserted_portions_bytes=14056;committed_portions_bytes=0;data_filter_bytes=0;data_additional_bytes=0;delta_bytes=14056;selected_rows=0; 2025-09-25T16:21:50.582578Z node 54 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[54:457:2469];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;fline=read_context.h:196;event=scan_aborted;reason=unexpected on destructor; 2025-09-25T16:21:50.582634Z node 54 :TX_COLUMNSHARD_SCAN INFO: log.cpp:841: SelfId=[54:457:2469];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;fline=context.h:82;fetching=ef=(column_ids=4;column_names=i32;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=i16,i32,i8,ts;);;ff=(column_ids=4,17,18,19;column_names=i32,json,jsondoc,yson;);;program_input=(column_ids=4,17,18,19;column_names=i32,json,jsondoc,yson;);;; >> TYdbControlPlaneStorageModifyQuery::ShouldValidate [GOOD] >> TYdbControlPlaneStorageModifyQuery::ShouldCheckSuperUser >> TYdbControlPlaneStorageModifyBinding::ShouldSuccess [GOOD] >> TYdbControlPlaneStorageModifyBinding::ShouldCheckLowerCaseName >> TYdbControlPlaneStorageCreateQuery::ShouldCheckDescribeJob [GOOD] >> TYdbControlPlaneStorageCreateQuery::ShouldCheckDescribeIncorrectJob >> TYdbControlPlaneStorageControlQuery::ShouldCheckPreviousRevisionSuccess [GOOD] >> TYdbControlPlaneStorageControlQueryPermissions::ShouldApplyPermissionEmpty >> TYdbControlPlaneStoragePipeline::ShouldCheckClearFields [GOOD] >> TYdbControlPlaneStoragePipeline::ShouldCheckNodesHealthCheck >> TYdbControlPlaneStorageCreateBinding::ShouldValidate [GOOD] >> TYdbControlPlaneStorageCreateBinding::ShouldValidateFormatSetting >> TYdbControlPlaneStorageGetQueryStatus::ShouldValidate [GOOD] >> TYdbControlPlaneStorageGetQueryStatus::ShouldCheckSuperUser >> TopicAutoscaling::PartitionMerge_PreferedPartition_BeforeAutoscaleAwareSDK >> TSchemeshardBackgroundCleaningTest::SchemeshardBackgroundCleaningTestReboot [GOOD] >> TSchemeshardBackgroundCleaningTest::SchemeshardBackgroundCleaningTestSimpleCleanIndex >> TYdbControlPlaneStorageCreateQuery::ShouldCheckDescribeIncorrectJob [GOOD] >> TYdbControlPlaneStorageCreateQuery::ShouldCheckDescribeJobIncorrectVisibility >> TYdbControlPlaneStorageDescribeConnectionPermissions::ShouldApplyPermissionViewPublic [GOOD] >> TYdbControlPlaneStorageDescribeConnectionPermissions::ShouldApplyPermissionViewPrivate >> TYdbControlPlaneStorageListBindings::ShouldPageToken [GOOD] >> TYdbControlPlaneStorageListBindings::ShouldValidate >> TYdbControlPlaneStorageDeleteQueryPermissions::ShouldApplyPermissionEmpty [GOOD] >> TYdbControlPlaneStorageDeleteQueryPermissions::ShouldApplyPermissionManagePublic >> PersQueueSdkReadSessionTest::ReadSessionWithAbort [GOOD] >> PersQueueSdkReadSessionTest::ReadSessionWithClose |82.9%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/blobstorage/nodewarden/ut/ydb-core-blobstorage-nodewarden-ut |82.9%| [LD] {RESULT} $(B)/ydb/core/blobstorage/nodewarden/ut/ydb-core-blobstorage-nodewarden-ut |82.9%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/blobstorage/nodewarden/ut/ydb-core-blobstorage-nodewarden-ut >> TYdbControlPlaneStoragePipeline::ShouldCheckNodesHealthCheck [GOOD] >> TYdbControlPlaneStoragePipeline::ShouldCheckResultSetMeta >> TYdbControlPlaneStorageGetResultDataPermissions::ShouldApplyPermissionViewPublic [GOOD] >> TYdbControlPlaneStorageGetResultDataPermissions::ShouldApplyPermissionViewPrivate >> BasicUsage::TWriteSession_WriteAndReadAndCommitRandomMessagesNoClusterDiscovery [GOOD] >> BasicUsage::TWriteSession_WriteEncoded >> TopicAutoscaling::PartitionSplit_AutoscaleAwareSDK [GOOD] >> TopicAutoscaling::PartitionMerge_PreferedPartition_PQv1 >> TYdbControlPlaneStorageDeleteConnection::ShouldCheckPreviousRevisionFailed [GOOD] >> TYdbControlPlaneStorageDeleteConnection::ShouldCheckPreviousRevisionSuccess >> TYdbControlPlaneStorageModifyQuery::ShouldCheckSuperUser [GOOD] >> TYdbControlPlaneStorageModifyQuery::ShouldCheckWithoutIdempotencyKey >> TYdbControlPlaneStorageGetQueryStatus::ShouldCheckSuperUser [GOOD] >> TYdbControlPlaneStorageGetQueryStatusPermissions::ShouldApplyPermissionEmpty >> CommitOffset::Commit_WithoutSession_TopPast >> TYdbControlPlaneStorageModifyConnection::ShouldSuccess >> TYdbControlPlaneStorageCreateQuery::ShouldCheckDescribeJobIncorrectVisibility [GOOD] >> TYdbControlPlaneStorageCreateQuery::ShouldSaveQuery >> TYdbControlPlaneStorageListConnections::ShouldPageToken [GOOD] >> TYdbControlPlaneStorageListConnections::ShouldEmptyPageToken >> TYdbControlPlaneStorageControlQueryPermissions::ShouldApplyPermissionEmpty [GOOD] >> TYdbControlPlaneStorageControlQueryPermissions::ShouldApplyPermissionManagePublic >> TYdbControlPlaneStorageCreateBinding::ShouldValidateFormatSetting [GOOD] >> TYdbControlPlaneStorageCreateBindingPermissions::ShouldApplyPermissionManagePublicSuccess >> TYdbControlPlaneStorageDeleteBinding::ShouldCheckPermission [GOOD] >> TYdbControlPlaneStorageDeleteBinding::ShouldCheckExist >> TYdbControlPlaneStorageGetQueryStatusPermissions::ShouldApplyPermissionEmpty [GOOD] >> TYdbControlPlaneStorageGetQueryStatusPermissions::ShouldApplyPermissionViewPrivate >> TYdbControlPlaneStorageListBindings::ShouldValidate [GOOD] >> TYdbControlPlaneStorageListBindingsPermissions::ShouldApplyPermissionEmpty >> TYdbControlPlaneStoragePipeline::ShouldSkipBindingIfDisabledConnection [GOOD] >> TYdbControlPlaneStoragePipeline::ShouldSaveTopicConsumers >> TYdbControlPlaneStorageCreateQuery::ShouldSaveQuery [GOOD] >> TYdbControlPlaneStorageCreateQuery::ShouldCheckQueryName >> TYdbControlPlaneStorageDeleteQueryPermissions::ShouldApplyPermissionManagePublic [GOOD] >> TYdbControlPlaneStorageDeleteQueryPermissions::ShouldApplyPermissionManagePrivate >> TYdbControlPlaneStorageDescribeConnectionPermissions::ShouldApplyPermissionViewPrivate [GOOD] >> TYdbControlPlaneStorageDescribeConnectionPermissions::ShouldApplyPermissionViewPrivatePublic >> TYdbControlPlaneStorageModifyQuery::ShouldCheckWithoutIdempotencyKey [GOOD] >> TYdbControlPlaneStorageModifyQueryPermissions::ShouldApplyPermissionQueryInvokeSuccess >> TYdbControlPlaneStorageModifyBinding::ShouldCheckLowerCaseName [GOOD] >> TYdbControlPlaneStorageModifyBinding::ShouldCheckMaxLengthName >> TYdbControlPlaneStorageModifyBindingPermissions::ShouldApplyPermissionViewPrivate [GOOD] >> TYdbControlPlaneStorageModifyBindingPermissions::ShouldApplyPermissionViewPrivateAfterModify >> TYdbControlPlaneStorageGetQueryStatusPermissions::ShouldApplyPermissionViewPrivate [GOOD] >> TYdbControlPlaneStorageGetQueryStatusPermissions::ShouldApplyPermissionViewPrivatePublic >> TYdbControlPlaneStorageListQueries::ShouldPageToken [GOOD] >> TYdbControlPlaneStorageListQueries::ShouldEmptyPageToken >> TYdbControlPlaneStorageCreateQuery::ShouldCheckQueryName [GOOD] >> TYdbControlPlaneStorageCreateQuery::ShouldCheckAvailableConnections >> TYdbControlPlaneStorageDeleteConnection::ShouldCheckPreviousRevisionSuccess [GOOD] >> TYdbControlPlaneStorageDeleteConnectionPermissions::ShouldApplyPermissionEmpty >> TYdbControlPlaneStorageDeleteQueryPermissions::ShouldApplyPermissionManagePrivate [GOOD] >> TYdbControlPlaneStorageDeleteQueryPermissions::ShouldApplyPermissionManagePrivatePublic >> TYdbControlPlaneStorageModifyQueryPermissions::ShouldApplyPermissionQueryInvokeSuccess [GOOD] >> TYdbControlPlaneStorageModifyQueryPermissions::ShouldApplyPermissionQueryInvokeFailed >> TYdbControlPlaneStorageDeleteBinding::ShouldCheckExist [GOOD] >> TYdbControlPlaneStorageDeleteBinding::ShouldValidate >> TYdbControlPlaneStorageGetQueryStatusPermissions::ShouldApplyPermissionViewPrivatePublic [GOOD] >> TYdbControlPlaneStorageCreateBindingPermissions::ShouldApplyPermissionManagePublicSuccess [GOOD] >> TYdbControlPlaneStorageCreateBindingPermissions::ShouldApplyPermissionManagePublicFailed >> TYdbControlPlaneStoragePipeline::ShouldCheckResultSetMeta [GOOD] >> TYdbControlPlaneStoragePipeline::ShouldCheckRemovingOldResultSet >> TopicAutoscaling::Simple_BeforeAutoscaleAwareSDK [GOOD] >> TopicAutoscaling::Simple_AutoscaleAwareSDK >> TYdbControlPlaneStoragePipeline::ShouldSaveTopicConsumers [GOOD] >> TYdbControlPlaneStoragePipeline::ShouldSaveDqGraphs >> TYdbControlPlaneStorageModifyQueryPermissions::ShouldApplyPermissionQueryInvokeFailed [GOOD] >> TYdbControlPlaneStorageModifyQueryPermissions::ShouldApplyPermissionEmpty >> TYdbControlPlaneStorageControlQueryPermissions::ShouldApplyPermissionManagePublic [GOOD] >> TYdbControlPlaneStorageControlQueryPermissions::ShouldApplyPermissionManagePrivate >> TYdbControlPlaneStorageCreateQuery::ShouldCheckAvailableConnections [GOOD] >> TYdbControlPlaneStorageDescribeConnectionPermissions::ShouldApplyPermissionViewPrivatePublic [GOOD] >> TYdbControlPlaneStorageDescribeJobPermissions::ShouldApplyPermissionEmpty >> TYdbControlPlaneStorageListBindingsPermissions::ShouldApplyPermissionEmpty [GOOD] >> TYdbControlPlaneStorageListBindingsPermissions::ShouldApplyPermissionViewPublic >> TYdbControlPlaneStorageModifyBindingPermissions::ShouldApplyPermissionViewPrivateAfterModify [GOOD] >> TYdbControlPlaneStorageModifyBindingPermissions::ShouldApplyPermissionViewPrivatePublic >> TYdbControlPlaneStorageDeleteQueryPermissions::ShouldApplyPermissionManagePrivatePublic [GOOD] >> TYdbControlPlaneStorageDescribeBinding::ShouldSuccess >> TYdbControlPlaneStorageGetResultDataPermissions::ShouldApplyPermissionViewPrivate [GOOD] >> TYdbControlPlaneStorageGetResultDataPermissions::ShouldApplyPermissionViewPrivatePublic >> TYdbControlPlaneStorageModifyQueryPermissions::ShouldApplyPermissionEmpty [GOOD] >> TYdbControlPlaneStorageModifyQueryPermissions::ShouldApplyPermissionManagePublic >> Balancing::Balancing_OneTopic_TopicApi [GOOD] >> Balancing::Balancing_OneTopic_PQv1 >> TYdbControlPlaneStorageCreateBindingPermissions::ShouldApplyPermissionManagePublicFailed [GOOD] >> TYdbControlPlaneStorageCreateConnection::ShouldCheckNotAvailable >> PersQueueSdkReadSessionTest::ReadSessionWithClose [GOOD] >> PersQueueSdkReadSessionTest::ReadSessionWithCloseNotCommitted >> TColumnShardTestReadWrite::CompactionSplitGranule_PKUInt64 [GOOD] >> TYdbControlPlaneStorageDeleteBinding::ShouldValidate [GOOD] >> TYdbControlPlaneStorageDeleteBinding::ShouldCheckSuperUser >> TYdbControlPlaneStoragePipeline::ShouldSaveDqGraphs [GOOD] >> TYdbControlPlaneStoragePipeline::ShouldSaveResultSetMetas |82.9%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/tx/schemeshard/ut_filestore_reboots/ydb-core-tx-schemeshard-ut_filestore_reboots |82.9%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_filestore_reboots/ydb-core-tx-schemeshard-ut_filestore_reboots |82.9%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_filestore_reboots/ydb-core-tx-schemeshard-ut_filestore_reboots >> TYdbControlPlaneStorageCreateConnection::ShouldCheckNotAvailable [GOOD] >> TYdbControlPlaneStorageCreateConnection::ShouldCheckMaxCountConnections >> TYdbControlPlaneStorageModifyBinding::ShouldCheckMaxLengthName [GOOD] >> TYdbControlPlaneStorageModifyBinding::ShouldCheckMultipleDotsName >> TYdbControlPlaneStorageDescribeJobPermissions::ShouldApplyPermissionEmpty [GOOD] >> TYdbControlPlaneStorageDescribeJobPermissions::ShouldApplyPermissionViewPublic >> TYdbControlPlaneStorageDescribeBinding::ShouldSuccess [GOOD] >> TYdbControlPlaneStorageDescribeBinding::ShouldCheckPermission ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/fq/control_plane_storage/unittest >> TYdbControlPlaneStorageGetQueryStatusPermissions::ShouldApplyPermissionViewPrivatePublic [GOOD] Test command err: Netstat: sh: 1: netstat: not found Process stat: USER PID %CPU %MEM VSZ RSS TTY STAT START TIME COMMAND root 1 0.0 0.0 167280 12396 ? Ss 15:23 0:02 /sbin/init root 2 0.0 0.0 0 0 ? S 15:23 0:00 [kthreadd] root 3 0.0 0.0 0 0 ? I< 15:23 0:00 [rcu_gp] root 4 0.0 0.0 0 0 ? I< 15:23 0:00 [rcu_par_gp] root 5 0.0 0.0 0 0 ? I< 15:23 0:00 [slub_flushwq] root 6 0.0 0.0 0 0 ? I< 15:23 0:00 [netns] root 8 0.0 0.0 0 0 ? I< 15:23 0:00 [kworker/0:0H-events_highpri] root 9 0.7 0.0 0 0 ? I 15:23 0:24 [kworker/u128:0-ext4-rsv-conversion] root 11 0.0 0.0 0 0 ? I< 15:23 0:00 [mm_percpu_wq] root 12 0.0 0.0 0 0 ? S 15:23 0:00 [rcu_tasks_rude_] root 13 0.0 0.0 0 0 ? S 15:23 0:00 [rcu_tasks_trace] root 14 0.0 0.0 0 0 ? S 15:23 0:00 [ksoftirqd/0] root 15 0.3 0.0 0 0 ? I 15:23 0:11 [rcu_sched] root 16 0.0 0.0 0 0 ? S 15:23 0:00 [migration/0] root 17 0.0 0.0 0 0 ? S 15:23 0:00 [idle_inject/0] root 18 0.0 0.0 0 0 ? I 15:23 0:00 [kworker/0:1-rcu_par_gp] root 19 0.0 0.0 0 0 ? S 15:23 0:00 [cpuhp/0] root 20 0.0 0.0 0 0 ? S 15:23 0:00 [cpuhp/1] root 21 0.0 0.0 0 0 ? S 15:23 0:00 [idle_inject/1] root 22 0.0 0.0 0 0 ? S 15:23 0:02 [migration/1] root 23 0.0 0.0 0 0 ? S 15:23 0:00 [ksoftirqd/1] root 25 0.0 0.0 0 0 ? I< 15:23 0:00 [kworker/1:0H-events_highpri] root 26 0.0 0.0 0 0 ? S 15:23 0:00 [cpuhp/2] root 27 0.0 0.0 0 0 ? S 15:23 0:00 [idle_inject/2] root 28 0.0 0.0 0 0 ? S 15:23 0:02 [migration/2] root 29 0.0 0.0 0 0 ? S 15:23 0:00 [ksoftirqd/2] root 31 0.0 0.0 0 0 ? I< 15:23 0:00 [kworker/2:0H-events_highpri] root 32 0.0 0.0 0 0 ? S 15:23 0:00 [cpuhp/3] root 33 0.0 0.0 0 0 ? S 15:23 0:00 [idle_inject/3] root 34 0.0 0.0 0 0 ? S 15:23 0:02 [migration/3] root 35 0.0 0.0 0 0 ? S 15:23 0:00 [ksoftirqd/3] root 37 0.0 0.0 0 0 ? I< 15:23 0:00 [kworker/3:0H-events_highpri] root 38 0.0 0.0 0 0 ? S 15:23 0:00 [cpuhp/4] root 39 0.0 0.0 0 0 ? S 15:23 0:00 [idle_inject/4] root 40 0.0 0.0 0 0 ? S 15:23 0:02 [migration/4] root 41 0.0 0.0 0 0 ? S 15:23 0:00 [ksoftirqd/4] root 43 0.0 0.0 0 0 ? I< 15:23 0:00 [kworker/4:0H-events_highpri] root 44 0.0 0.0 0 0 ? S 15:23 0:00 [cpuhp/5] root 45 0.0 0.0 0 0 ? S 15:23 0:00 [idle_inject/5] root 46 0.0 0.0 0 0 ? S 15:23 0:02 [migration/5] root 47 0.0 0.0 0 0 ? S 15:23 0:00 [ksoftirqd/5] root 49 0.0 0.0 0 0 ? I< 15:23 0:00 [kworker/5:0H-events_highpri] root 50 0.0 0.0 0 0 ? S 15:23 0:00 [cpuhp/6] root 51 0.0 0.0 0 0 ? S 15:23 0:00 [idle_inject/6] root 52 0.0 0.0 0 0 ? S 15:23 0:02 [migration/6] root 53 0.0 0.0 0 0 ? S 15:23 0:00 [ksoftirqd/6] root 54 0.0 0.0 0 0 ? I 15:23 0:00 [kworker/6:0-rcu_par_gp] root 55 0.0 0.0 0 0 ? I< 15:23 0:00 [kworker/6:0H-events_highpri] root 56 0.0 0.0 0 0 ? S 15:23 0:00 [cpuhp/7] root 57 0.0 0.0 0 0 ? S 15:23 0:00 [idle_inject/7] root 58 0.0 0.0 0 0 ? S 15:23 0:02 [migration/7] root 59 0.0 0.0 0 0 ? S 15:23 0:00 [ksoftirqd/7] root 60 0.0 0.0 0 0 ? I 15:23 0:00 [kworker/7:0-dio/vda1] root 61 0.0 0.0 0 0 ? I< 15:23 0:00 [kworker/7:0H-events_highpri] root 62 0.0 0.0 0 0 ? S 15:23 0:00 [cpuhp/8] root 63 0.0 0.0 0 0 ? S 15:23 0:00 [idle_inject/8] root 64 0.0 0.0 0 0 ? S 15:23 0:02 [migration/8] root 65 0.0 0.0 0 0 ? S 15:23 0:00 [ksoftirqd/8] root 67 0.0 0.0 0 0 ? I< 15:23 0:00 [kworker/8:0H-kblockd] root 68 0.0 0.0 0 0 ? S 15:23 0:00 [cpuhp/9] root 69 0.0 0.0 0 0 ? S 15:23 0:00 [idle_inject/9] root 70 0.0 0.0 0 0 ? S 15:23 0:02 [migration/9] root 71 0.0 0.0 0 0 ? S 15:23 0:00 [ksoftirqd/9] root 72 0.0 0.0 0 0 ? I 15:23 0:00 [kworker/9:0-rcu_gp] root 73 0.0 0.0 0 0 ? I< 15:23 0:00 [kworker/9:0H-events_highpri] root 74 0.0 0.0 0 0 ? S 15:23 0:00 [cpuhp/10] root 75 0.0 0.0 0 0 ? S 15:23 0:00 [idle_inject/10] root 76 0.0 0.0 0 0 ? S 15:23 0:02 [migration/10] root 77 0.0 0.0 0 0 ? S 15:23 0:00 [ksoftirqd/10] root 79 0.0 0.0 0 0 ? I< 15:23 0:00 [kworker/10:0H-events_highpri] root 80 0.0 0.0 0 0 ? S 15:23 0:00 [cpuhp/11] root 81 0.0 0.0 0 0 ? S 15:23 0:00 [idle_inject/11] root 82 0.0 0.0 0 0 ? S 15:23 0:02 [migration/11] root 83 0.0 0.0 0 0 ? S 15:23 0:00 [ksoftirqd/11] root 84 0.0 0.0 0 0 ? I 15:23 0:00 [kworker/11:0-rcu_par_gp] root 85 0.0 0.0 0 0 ? I< 15:23 0:00 [kworker/11:0H-events_highpri] root 86 0.0 0.0 0 0 ? S 15:23 0:00 [cpuhp/12] root 87 0.0 0.0 0 0 ? S 15:23 0:00 [idle_inject/12] root 88 0.0 0.0 0 0 ? S 15:23 0:02 [migration/12] root 89 0.0 0.0 0 0 ? S 15:23 0:00 [ksoftirqd/12] root 90 0.0 0.0 0 0 ? I 15:23 0:00 [kworker/12:0-cgroup_destroy] root 91 0.0 0.0 0 0 ? I< 15:23 0:00 [kworker/12:0H-events_highpri] root 92 0.0 0.0 0 0 ? S 15:23 0:00 [cpuhp/13] root 93 0.0 0.0 0 0 ? S 15:23 0:00 [idle_inject/13] root 94 0.0 0.0 0 0 ? S 15:23 0:02 [migration/13] root 95 0.0 0.0 0 0 ? S 15:23 0:00 [ksoftirqd/13] root 96 0.0 0.0 0 0 ? I 15:23 0:00 [kworker/13:0-rcu_gp] root 97 0.0 0.0 0 0 ? I< 15:23 0:00 [kworker/13:0H-events_highpri] root 98 0.0 0.0 0 0 ? S 15:23 0:00 [cpuhp/14] root 99 0.0 0.0 0 0 ? S 15:23 0:00 [idle_inject/14] root 100 0.0 0.0 0 0 ? S 15:23 0:02 [migration/14] root 101 0.0 0.0 0 0 ? S 15:23 0:00 [ksoftirqd/14] root 103 0.0 0.0 0 0 ? I< 15:23 0:00 [kworker/14:0H-events_highpri] root 104 0.0 0.0 0 0 ? S 15:23 0:00 [cpuhp/15] root 105 0.0 0.0 0 0 ? S 15:23 0:00 [idle_inject/15] root 106 0.0 0.0 0 0 ? S 15:23 0:02 [migration/15] root 107 0.0 0.0 0 0 ? S 15:23 0:00 [ksoftirqd/15] root 109 0.0 0.0 0 0 ? I< 15:23 0:00 [kworker/15:0H-events_highpri] root 110 0.0 0.0 0 0 ? S 15:23 0:00 [cpuhp/16] root 111 0.0 0.0 0 0 ? S 15:23 0:00 [idle_inject/16] root 112 0.0 0.0 0 0 ? S 15:23 0:02 [migration/16] root 113 0.0 0.0 0 0 ? S 15:23 0:00 [ksoftirqd/16] root 114 0.0 0.0 0 0 ? I 15:23 0:00 [kworker/16:0-rcu_gp] root 115 0.0 0.0 0 0 ? I< 15:23 0:00 [kworker/16:0H-events_highpri] root 116 0.0 0.0 0 0 ? S 15:23 0:00 [cpuhp/17] root 117 0.0 0.0 0 0 ? S 15:23 0:00 [idle_inject/17] root 118 0.0 0.0 0 0 ? S 15:23 0:02 [migration/17] root 119 0.0 0.0 0 0 ? S 15:23 0:00 [ksoftirqd/17] root 121 0.0 0.0 0 0 ? I< 15:23 0:00 [kworker/17:0H-events_highpri] root 122 0.0 0.0 0 0 ? S 15:23 0:00 [cpuhp/18] root 123 0.0 0.0 0 0 ? S 15:23 0:00 [idle_inject/18] root 124 0.0 0.0 0 0 ? S 15:23 0:02 [migration/18] root 125 0.0 0.0 0 0 ? S 15:23 0:00 [ksoftirqd/18] root 127 0.0 0.0 0 0 ? I< 15:23 0:00 [kworker/18:0H-events_highpri] root 128 0.0 0.0 0 0 ? S 15:23 0:00 [cpuhp/19] root 129 0.0 0.0 0 0 ? S 15:23 0:00 [idle_inject/19] root 130 0.0 0.0 0 0 ? S 15:23 0:02 [migration/19] root 131 0.0 0.0 0 0 ? S 15:23 0:00 [ksoftirqd/19] root 133 0.0 0.0 0 0 ? I< 15:23 0:00 [kworker/19:0H-events_highpri] root 134 0.0 0.0 0 0 ? S 15:23 0:00 [cpuhp/20] root 135 0.0 0.0 0 0 ? S 15:23 0:00 [idle_inject/20] root 136 0.0 0.0 0 0 ? S 15:23 0:02 [migration/20] root 137 0.0 0.0 0 0 ? S 15:23 0:00 [ksoftirqd/20] root 138 0.0 0.0 0 0 ? I 15:23 0:00 [kworker/20:0-rcu_gp] root 139 0.0 0.0 0 0 ? I< 15:23 0:00 [kworker/20:0H-events_highpri] root 140 0.0 0.0 0 0 ? S 15:23 0:00 [cpuhp/21] root 141 0.0 0.0 0 0 ? S 15:23 0:00 [idle_inject/21] root 142 0.0 0.0 0 0 ? S 15:23 0:02 [migration/21] root 143 0.0 0.0 0 0 ? S 15:23 0:00 [ksoftirqd/21] root 145 0.0 0.0 0 0 ? I< 15:23 0:00 [kworker/21:0H-events_highpri] root 146 0.0 0.0 0 0 ? S 15:23 0:00 [cpuhp/22] root 147 0.0 0.0 0 0 ? S 15:23 0:00 [idle_inject/22] root 148 0.0 0.0 0 0 ? S 15:23 0:02 [migration/22] root 149 0.0 0.0 0 0 ? S 15:23 0:00 [ksoftirqd/22] root 151 0.0 0.0 0 0 ? I< 15:23 0:00 [kworker/22:0H-events_highpri] root 152 0.0 0.0 0 0 ? S 15:23 0:00 [cpuhp/23] root 153 0.0 0.0 0 0 ? S 15:23 0:00 [idle_inject/23] root 154 0.0 0.0 0 0 ? S 15:23 0:02 [migration/23] root 155 0.0 0.0 0 0 ? S 15:23 0:00 [ksoftirqd/23] root 157 0.0 0.0 0 0 ? I< 15:23 0:00 [kworker/23:0H-events_highpri] root 158 0.0 0.0 0 ... PlaneStorageGetQueryStatusPermissions::TTestCaseShouldApplyPermissionViewPrivatePublic::Execute_(NUnitTest::TTestContext&)/bindings". Create session OK 2025-09-25T16:21:59.692938Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: schema.cpp:113: Call create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageGetQueryStatusPermissions::TTestCaseShouldApplyPermissionViewPrivatePublic::Execute_(NUnitTest::TTestContext&)/bindings" 2025-09-25T16:21:59.692939Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: schema.cpp:354: Call create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageGetQueryStatusPermissions::TTestCaseShouldApplyPermissionViewPrivatePublic::Execute_(NUnitTest::TTestContext&)/bindings" 2025-09-25T16:21:59.693022Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: schema.cpp:293: Create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageGetQueryStatusPermissions::TTestCaseShouldApplyPermissionViewPrivatePublic::Execute_(NUnitTest::TTestContext&)/queries". Create session OK 2025-09-25T16:21:59.693024Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: schema.cpp:113: Call create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageGetQueryStatusPermissions::TTestCaseShouldApplyPermissionViewPrivatePublic::Execute_(NUnitTest::TTestContext&)/queries" 2025-09-25T16:21:59.693025Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: schema.cpp:354: Call create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageGetQueryStatusPermissions::TTestCaseShouldApplyPermissionViewPrivatePublic::Execute_(NUnitTest::TTestContext&)/queries" 2025-09-25T16:21:59.693086Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: schema.cpp:293: Create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageGetQueryStatusPermissions::TTestCaseShouldApplyPermissionViewPrivatePublic::Execute_(NUnitTest::TTestContext&)/tenant_acks". Create session OK 2025-09-25T16:21:59.693089Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: schema.cpp:113: Call create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageGetQueryStatusPermissions::TTestCaseShouldApplyPermissionViewPrivatePublic::Execute_(NUnitTest::TTestContext&)/tenant_acks" 2025-09-25T16:21:59.693090Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: schema.cpp:354: Call create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageGetQueryStatusPermissions::TTestCaseShouldApplyPermissionViewPrivatePublic::Execute_(NUnitTest::TTestContext&)/tenant_acks" 2025-09-25T16:21:59.693110Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: schema.cpp:293: Create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageGetQueryStatusPermissions::TTestCaseShouldApplyPermissionViewPrivatePublic::Execute_(NUnitTest::TTestContext&)/idempotency_keys". Create session OK 2025-09-25T16:21:59.693112Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: schema.cpp:113: Call create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageGetQueryStatusPermissions::TTestCaseShouldApplyPermissionViewPrivatePublic::Execute_(NUnitTest::TTestContext&)/idempotency_keys" 2025-09-25T16:21:59.693113Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: schema.cpp:354: Call create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageGetQueryStatusPermissions::TTestCaseShouldApplyPermissionViewPrivatePublic::Execute_(NUnitTest::TTestContext&)/idempotency_keys" 2025-09-25T16:21:59.724548Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: schema.cpp:155: Successfully created directory "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageGetQueryStatusPermissions::TTestCaseShouldApplyPermissionViewPrivatePublic::Execute_(NUnitTest::TTestContext&)" 2025-09-25T16:21:59.724564Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: schema.cpp:122: Reply for create directory "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageGetQueryStatusPermissions::TTestCaseShouldApplyPermissionViewPrivatePublic::Execute_(NUnitTest::TTestContext&)": 2025-09-25T16:21:59.792490Z node 17 :YQ_RATE_LIMITER DEBUG: schema.cpp:155: Successfully created coordination node "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageGetQueryStatusPermissions::TTestCaseShouldApplyPermissionViewPrivatePublic::Execute_(NUnitTest::TTestContext&)_rate_limiter/alpha" 2025-09-25T16:21:59.792507Z node 17 :YQ_RATE_LIMITER DEBUG: schema.cpp:122: Reply for create coordination node "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageGetQueryStatusPermissions::TTestCaseShouldApplyPermissionViewPrivatePublic::Execute_(NUnitTest::TTestContext&)_rate_limiter/alpha": 2025-09-25T16:21:59.873078Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: schema.cpp:155: Successfully created table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageGetQueryStatusPermissions::TTestCaseShouldApplyPermissionViewPrivatePublic::Execute_(NUnitTest::TTestContext&)/tenant_acks" 2025-09-25T16:21:59.873099Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: schema.cpp:122: Reply for create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageGetQueryStatusPermissions::TTestCaseShouldApplyPermissionViewPrivatePublic::Execute_(NUnitTest::TTestContext&)/tenant_acks": 2025-09-25T16:21:59.874728Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: schema.cpp:155: Successfully created table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageGetQueryStatusPermissions::TTestCaseShouldApplyPermissionViewPrivatePublic::Execute_(NUnitTest::TTestContext&)/pending_small" 2025-09-25T16:21:59.874735Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: schema.cpp:122: Reply for create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageGetQueryStatusPermissions::TTestCaseShouldApplyPermissionViewPrivatePublic::Execute_(NUnitTest::TTestContext&)/pending_small": 2025-09-25T16:21:59.875620Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: schema.cpp:155: Successfully created table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageGetQueryStatusPermissions::TTestCaseShouldApplyPermissionViewPrivatePublic::Execute_(NUnitTest::TTestContext&)/mappings" 2025-09-25T16:21:59.875625Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: schema.cpp:122: Reply for create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageGetQueryStatusPermissions::TTestCaseShouldApplyPermissionViewPrivatePublic::Execute_(NUnitTest::TTestContext&)/mappings": 2025-09-25T16:21:59.877852Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: schema.cpp:155: Successfully created table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageGetQueryStatusPermissions::TTestCaseShouldApplyPermissionViewPrivatePublic::Execute_(NUnitTest::TTestContext&)/jobs" 2025-09-25T16:21:59.877861Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: schema.cpp:122: Reply for create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageGetQueryStatusPermissions::TTestCaseShouldApplyPermissionViewPrivatePublic::Execute_(NUnitTest::TTestContext&)/jobs": 2025-09-25T16:21:59.877996Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: schema.cpp:155: Successfully created table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageGetQueryStatusPermissions::TTestCaseShouldApplyPermissionViewPrivatePublic::Execute_(NUnitTest::TTestContext&)/bindings" 2025-09-25T16:21:59.877999Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: schema.cpp:122: Reply for create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageGetQueryStatusPermissions::TTestCaseShouldApplyPermissionViewPrivatePublic::Execute_(NUnitTest::TTestContext&)/bindings": 2025-09-25T16:21:59.878601Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: schema.cpp:155: Successfully created table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageGetQueryStatusPermissions::TTestCaseShouldApplyPermissionViewPrivatePublic::Execute_(NUnitTest::TTestContext&)/nodes" 2025-09-25T16:21:59.878604Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: schema.cpp:122: Reply for create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageGetQueryStatusPermissions::TTestCaseShouldApplyPermissionViewPrivatePublic::Execute_(NUnitTest::TTestContext&)/nodes": 2025-09-25T16:21:59.878721Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: schema.cpp:155: Successfully created table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageGetQueryStatusPermissions::TTestCaseShouldApplyPermissionViewPrivatePublic::Execute_(NUnitTest::TTestContext&)/tenants" 2025-09-25T16:21:59.878724Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: schema.cpp:122: Reply for create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageGetQueryStatusPermissions::TTestCaseShouldApplyPermissionViewPrivatePublic::Execute_(NUnitTest::TTestContext&)/tenants": 2025-09-25T16:21:59.882814Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: schema.cpp:155: Successfully created table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageGetQueryStatusPermissions::TTestCaseShouldApplyPermissionViewPrivatePublic::Execute_(NUnitTest::TTestContext&)/result_sets" 2025-09-25T16:21:59.882825Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: schema.cpp:122: Reply for create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageGetQueryStatusPermissions::TTestCaseShouldApplyPermissionViewPrivatePublic::Execute_(NUnitTest::TTestContext&)/result_sets": 2025-09-25T16:21:59.882995Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: schema.cpp:155: Successfully created table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageGetQueryStatusPermissions::TTestCaseShouldApplyPermissionViewPrivatePublic::Execute_(NUnitTest::TTestContext&)/connections" 2025-09-25T16:21:59.882997Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: schema.cpp:122: Reply for create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageGetQueryStatusPermissions::TTestCaseShouldApplyPermissionViewPrivatePublic::Execute_(NUnitTest::TTestContext&)/connections": 2025-09-25T16:21:59.883077Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: schema.cpp:155: Successfully created table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageGetQueryStatusPermissions::TTestCaseShouldApplyPermissionViewPrivatePublic::Execute_(NUnitTest::TTestContext&)/compute_databases" 2025-09-25T16:21:59.883079Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: schema.cpp:122: Reply for create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageGetQueryStatusPermissions::TTestCaseShouldApplyPermissionViewPrivatePublic::Execute_(NUnitTest::TTestContext&)/compute_databases": 2025-09-25T16:21:59.883133Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: schema.cpp:155: Successfully created table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageGetQueryStatusPermissions::TTestCaseShouldApplyPermissionViewPrivatePublic::Execute_(NUnitTest::TTestContext&)/queries" 2025-09-25T16:21:59.883135Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: schema.cpp:122: Reply for create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageGetQueryStatusPermissions::TTestCaseShouldApplyPermissionViewPrivatePublic::Execute_(NUnitTest::TTestContext&)/queries": 2025-09-25T16:21:59.883194Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: schema.cpp:155: Successfully created table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageGetQueryStatusPermissions::TTestCaseShouldApplyPermissionViewPrivatePublic::Execute_(NUnitTest::TTestContext&)/quotas" 2025-09-25T16:21:59.883195Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: schema.cpp:122: Reply for create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageGetQueryStatusPermissions::TTestCaseShouldApplyPermissionViewPrivatePublic::Execute_(NUnitTest::TTestContext&)/quotas": 2025-09-25T16:21:59.889850Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: schema.cpp:155: Successfully created table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageGetQueryStatusPermissions::TTestCaseShouldApplyPermissionViewPrivatePublic::Execute_(NUnitTest::TTestContext&)/idempotency_keys" 2025-09-25T16:21:59.889862Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: schema.cpp:122: Reply for create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageGetQueryStatusPermissions::TTestCaseShouldApplyPermissionViewPrivatePublic::Execute_(NUnitTest::TTestContext&)/idempotency_keys": |82.9%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/actorlib_impl/ut/ydb-core-actorlib_impl-ut |82.9%| [LD] {RESULT} $(B)/ydb/core/actorlib_impl/ut/ydb-core-actorlib_impl-ut |82.9%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/actorlib_impl/ut/ydb-core-actorlib_impl-ut ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/columnshard/ut_rw/unittest >> TColumnShardTestReadWrite::CompactionSplitGranule_PKUInt64 [GOOD] Test command err: 2025-09-25T16:20:59.948777Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:129:2160];fline=columnshard.cpp:105;event=initialize_shard;step=OnActivateExecutor; 2025-09-25T16:20:59.953953Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:129:2160];fline=columnshard.cpp:124;event=initialize_shard;step=initialize_tiring_finished; 2025-09-25T16:20:59.954009Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxInitSchema.Execute at tablet 9437184 2025-09-25T16:20:59.954840Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:129:2160];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-09-25T16:20:59.954897Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:129:2160];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-09-25T16:20:59.954934Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:129:2160];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-09-25T16:20:59.954960Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:129:2160];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-09-25T16:20:59.954980Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:129:2160];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-09-25T16:20:59.955000Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:129:2160];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-09-25T16:20:59.955020Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:129:2160];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-09-25T16:20:59.955039Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:129:2160];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-09-25T16:20:59.955059Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:129:2160];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-09-25T16:20:59.955079Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:129:2160];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanDeprecatedSnapshot; 2025-09-25T16:20:59.955099Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:129:2160];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV0ChunksMeta; 2025-09-25T16:20:59.955119Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:129:2160];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CopyBlobIdsToV2; 2025-09-25T16:20:59.955163Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:129:2160];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreAppearanceSnapshot; 2025-09-25T16:20:59.961784Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxInitSchema.Complete at tablet 9437184 2025-09-25T16:20:59.961854Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:131;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=12;current_normalizer=CLASS_NAME=Granules; 2025-09-25T16:20:59.961867Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=1;type=Granules; 2025-09-25T16:20:59.961910Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-09-25T16:20:59.961946Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-09-25T16:20:59.961960Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-09-25T16:20:59.961966Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2025-09-25T16:20:59.961977Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:138;normalizer=TChunksNormalizer;message=0 chunks found; 2025-09-25T16:20:59.961986Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-09-25T16:20:59.961994Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-09-25T16:20:59.961999Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2025-09-25T16:20:59.962018Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-09-25T16:20:59.962027Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-09-25T16:20:59.962035Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-09-25T16:20:59.962039Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2025-09-25T16:20:59.962050Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2025-09-25T16:20:59.962058Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-09-25T16:20:59.962066Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-09-25T16:20:59.962071Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2025-09-25T16:20:59.962080Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-09-25T16:20:59.962088Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-09-25T16:20:59.962092Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2025-09-25T16:20:59.962102Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-09-25T16:20:59.962110Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-09-25T16:20:59.962115Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=15;type=RestoreV1Chunks_V2; 2025-09-25T16:20:59.962141Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-09-25T16:20:59.962150Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-09-25T16:20:59.962155Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=16;type=RestoreV2Chunks; 2025-09-25T16:20:59.962169Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2025-09-25T16:20:59.962177Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanDeprecatedSnapshot;id=CleanDeprecatedSnapshot; 2025-09-25T16:20:59.962182Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=17;type=CleanDeprecatedSnapshot; 2025-09-25T16:20:59.962189Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanDeprecatedSnapshot;id=17; 2025-09-25T16:20:59.962197Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV0ChunksMeta;id=RestoreV0ChunksMeta; 2025-09-25T16:20:59.962202Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=18;type=RestoreV0ChunksMeta; 2025-09-25T16:20:59.962209Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV0ChunksMeta;id=18; 2025-09-25T16:20:59.962217Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CopyBlobIdsToV2;id=CopyBlobIdsToV2; 2025-09-25T16:20:59.962223Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=19;type=CopyBlobIdsToV2; 2025-09-25T16:20:59.962238Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CopyBlobIdsToV2;id=19; 2025-09-25T16:20:59.962246Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLAS ... TE:granule/portions;fline=constructor_portion.cpp:40;memory_size=278;data_size=270;sum=46656;count=168;size_of_portion=184; 2025-09-25T16:22:04.120136Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;load_stage_name=EXECUTE:column_engines/granules;load_stage_name=EXECUTE:granules/granule;fline=common_data.cpp:29;EXECUTE:portionsLoadingTime=9920; 2025-09-25T16:22:04.120167Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;load_stage_name=EXECUTE:column_engines/granules;load_stage_name=EXECUTE:granules/granule;fline=common_data.cpp:29;PRECHARGE:granule_finished_commonLoadingTime=20; 2025-09-25T16:22:04.120299Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;load_stage_name=EXECUTE:column_engines/granules;load_stage_name=EXECUTE:granules/granule;fline=common_data.cpp:29;EXECUTE:granule_finished_commonLoadingTime=122; 2025-09-25T16:22:04.120306Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;load_stage_name=EXECUTE:column_engines/granules;fline=common_data.cpp:29;EXECUTE:granuleLoadingTime=10129; 2025-09-25T16:22:04.120314Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;fline=common_data.cpp:29;EXECUTE:granulesLoadingTime=10148; 2025-09-25T16:22:04.120323Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;fline=common_data.cpp:29;PRECHARGE:finishLoadingTime=1; 2025-09-25T16:22:04.120346Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;fline=common_data.cpp:29;EXECUTE:finishLoadingTime=18; 2025-09-25T16:22:04.120353Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:column_enginesLoadingTime=10298; 2025-09-25T16:22:04.120403Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:tx_controllerLoadingTime=42; 2025-09-25T16:22:04.120430Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:tx_controllerLoadingTime=20; 2025-09-25T16:22:04.120464Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:operations_managerLoadingTime=27; 2025-09-25T16:22:04.120484Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:operations_managerLoadingTime=13; 2025-09-25T16:22:04.122232Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:storages_managerLoadingTime=1730; 2025-09-25T16:22:04.124263Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:storages_managerLoadingTime=1998; 2025-09-25T16:22:04.124287Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:db_locksLoadingTime=3; 2025-09-25T16:22:04.124294Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:db_locksLoadingTime=1; 2025-09-25T16:22:04.124302Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:bg_sessionsLoadingTime=1; 2025-09-25T16:22:04.124320Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:bg_sessionsLoadingTime=12; 2025-09-25T16:22:04.124328Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:sharing_sessionsLoadingTime=1; 2025-09-25T16:22:04.124353Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:sharing_sessionsLoadingTime=19; 2025-09-25T16:22:04.124362Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:in_flight_readsLoadingTime=2; 2025-09-25T16:22:04.124379Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:in_flight_readsLoadingTime=11; 2025-09-25T16:22:04.124400Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:tiers_managerLoadingTime=12; 2025-09-25T16:22:04.124418Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:tiers_managerLoadingTime=12; 2025-09-25T16:22:04.124459Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;fline=common_data.cpp:29;EXECUTE:composite_initLoadingTime=32684; 2025-09-25T16:22:04.124512Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: Index: tables 1 inserted {blob_bytes=4825976;raw_bytes=7660626;count=1;records=85000} compacted {blob_bytes=0;raw_bytes=0;count=0;records=0} s-compacted {blob_bytes=100602608;raw_bytes=184175560;count=5;records=1780000} inactive {blob_bytes=426237216;raw_bytes=751149332;count=36;records=7540000} evicted {blob_bytes=0;raw_bytes=0;count=0;records=0} at tablet 9437184 2025-09-25T16:22:04.124563Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:5709:7671];process=SwitchToWork;fline=columnshard.cpp:77;event=initialize_shard;step=SwitchToWork; 2025-09-25T16:22:04.124577Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:5709:7671];process=SwitchToWork;fline=columnshard.cpp:80;event=initialize_shard;step=SignalTabletActive; 2025-09-25T16:22:04.124597Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:5709:7671];process=SwitchToWork;fline=columnshard_impl.cpp:1528;event=OnTieringModified;path_id=NO_VALUE_OPTIONAL; 2025-09-25T16:22:04.124606Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:5709:7671];process=SwitchToWork;fline=column_engine_logs.cpp:516;event=OnTieringModified;new_count_tierings=0; 2025-09-25T16:22:04.124645Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;fline=columnshard_impl.cpp:449;event=EnqueueBackgroundActivities;periodic=0; 2025-09-25T16:22:04.124674Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;fline=columnshard_impl.cpp:943;background=cleanup_schemas;skip_reason=no_changes; 2025-09-25T16:22:04.124684Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;fline=column_engine_logs.cpp:258;event=StartCleanup;portions_count=18; 2025-09-25T16:22:04.124706Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;fline=column_engine_logs.cpp:271;event=StartCleanupStop;snapshot=plan_step=1758815465430;tx_id=18446744073709551615;;current_snapshot_ts=1758817261068; 2025-09-25T16:22:04.124716Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;fline=column_engine_logs.cpp:334;event=StartCleanup;portions_count=18;portions_prepared=0;drop=0;skip=0;portions_counter=0;chunks=0;limit=0;max_portions=1000;max_chunks=500000; 2025-09-25T16:22:04.124729Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;fline=columnshard_impl.cpp:800;background=cleanup;skip_reason=no_changes; 2025-09-25T16:22:04.124735Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;fline=columnshard_impl.cpp:832;background=cleanup;skip_reason=no_changes; 2025-09-25T16:22:04.124764Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;fline=columnshard_impl.cpp:755;background=ttl;skip_reason=no_changes; 2025-09-25T16:22:04.129771Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:5709:7671];ev=NActors::TEvents::TEvWakeup;fline=columnshard.cpp:260;event=TEvPrivate::TEvPeriodicWakeup::MANUAL;tablet_id=9437184; 2025-09-25T16:22:04.130002Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:5709:7671];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;fline=columnshard.cpp:249;event=TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184; 2025-09-25T16:22:04.130013Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: Send periodic stats. 2025-09-25T16:22:04.130022Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: Disabled periodic stats at tablet 9437184 2025-09-25T16:22:04.130031Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:5709:7671];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:449;event=EnqueueBackgroundActivities;periodic=0; 2025-09-25T16:22:04.130070Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:5709:7671];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:943;background=cleanup_schemas;skip_reason=no_changes; 2025-09-25T16:22:04.130080Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:5709:7671];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:258;event=StartCleanup;portions_count=18; 2025-09-25T16:22:04.130097Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:5709:7671];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:271;event=StartCleanupStop;snapshot=plan_step=1758815465430;tx_id=18446744073709551615;;current_snapshot_ts=1758817261068; 2025-09-25T16:22:04.130108Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:5709:7671];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:334;event=StartCleanup;portions_count=18;portions_prepared=0;drop=0;skip=0;portions_counter=0;chunks=0;limit=0;max_portions=1000;max_chunks=500000; 2025-09-25T16:22:04.130120Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:5709:7671];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:800;background=cleanup;skip_reason=no_changes; 2025-09-25T16:22:04.130126Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:5709:7671];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:832;background=cleanup;skip_reason=no_changes; 2025-09-25T16:22:04.130150Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:5709:7671];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;queue=ttl;external_count=0;fline=granule.cpp:168;event=skip_actualization;waiting=0.999000s; 2025-09-25T16:22:04.130160Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:5709:7671];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:755;background=ttl;skip_reason=no_changes; |83.0%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/tx/schemeshard/ut_incremental_restore_reboots/schemeshard-ut_incremental_restore_reboots |83.0%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_incremental_restore_reboots/schemeshard-ut_incremental_restore_reboots |83.0%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_incremental_restore_reboots/schemeshard-ut_incremental_restore_reboots >> TYdbControlPlaneStorageDeleteConnectionPermissions::ShouldApplyPermissionEmpty [GOOD] >> TYdbControlPlaneStorageDeleteConnectionPermissions::ShouldApplyPermissionViewPublic >> TYdbControlPlaneStoragePipeline::ShouldCheckRemovingOldResultSet [GOOD] >> TYdbControlPlaneStoragePipeline::ShouldCheckPrioritySelectionEntities >> TYdbControlPlaneStorageControlQueryPermissions::ShouldApplyPermissionManagePrivate [GOOD] >> TYdbControlPlaneStorageControlQueryPermissions::ShouldApplyPermissionManagePrivatePublic |83.0%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/tablet_flat/ut/ydb-core-tablet_flat-ut |83.0%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tablet_flat/ut/ydb-core-tablet_flat-ut |83.0%| [LD] {RESULT} $(B)/ydb/core/tablet_flat/ut/ydb-core-tablet_flat-ut >> TYdbControlPlaneStoragePipeline::ShouldSaveResultSetMetas [GOOD] >> TYdbControlPlaneStorageQuotas::GetDefaultQuotas >> TYdbControlPlaneStorageListBindingsPermissions::ShouldApplyPermissionViewPublic [GOOD] >> TYdbControlPlaneStorageListBindingsPermissions::ShouldApplyPermissionViewPrivate >> TYdbControlPlaneStorageModifyQueryPermissions::ShouldApplyPermissionManagePublic [GOOD] >> TYdbControlPlaneStorageModifyQueryPermissions::ShouldApplyPermissionManagePrivate >> TYdbControlPlaneStorageDescribeJobPermissions::ShouldApplyPermissionViewPublic [GOOD] >> TYdbControlPlaneStorageDescribeJobPermissions::ShouldApplyPermissionViewPrivate >> TopicAutoscaling::PartitionSplit_PreferedPartition_BeforeAutoscaleAwareSDK [GOOD] >> TopicAutoscaling::PartitionSplit_PreferedPartition_AutoscaleAwareSDK >> TYdbControlPlaneStorageDescribeBinding::ShouldCheckPermission [GOOD] >> TYdbControlPlaneStorageDescribeBinding::ShouldCheckExist |83.0%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/replication/controller/ut_target_discoverer/unittest ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/fq/control_plane_storage/unittest >> TYdbControlPlaneStorageCreateQuery::ShouldCheckAvailableConnections [GOOD] Test command err: Netstat: sh: 1: netstat: not found Process stat: USER PID %CPU %MEM VSZ RSS TTY STAT START TIME COMMAND root 1 0.0 0.0 167280 12396 ? Ss 15:23 0:02 /sbin/init root 2 0.0 0.0 0 0 ? S 15:23 0:00 [kthreadd] root 3 0.0 0.0 0 0 ? I< 15:23 0:00 [rcu_gp] root 4 0.0 0.0 0 0 ? I< 15:23 0:00 [rcu_par_gp] root 5 0.0 0.0 0 0 ? I< 15:23 0:00 [slub_flushwq] root 6 0.0 0.0 0 0 ? I< 15:23 0:00 [netns] root 8 0.0 0.0 0 0 ? I< 15:23 0:00 [kworker/0:0H-events_highpri] root 9 0.7 0.0 0 0 ? I 15:23 0:24 [kworker/u128:0-ext4-rsv-conversion] root 11 0.0 0.0 0 0 ? I< 15:23 0:00 [mm_percpu_wq] root 12 0.0 0.0 0 0 ? S 15:23 0:00 [rcu_tasks_rude_] root 13 0.0 0.0 0 0 ? S 15:23 0:00 [rcu_tasks_trace] root 14 0.0 0.0 0 0 ? S 15:23 0:00 [ksoftirqd/0] root 15 0.3 0.0 0 0 ? I 15:23 0:11 [rcu_sched] root 16 0.0 0.0 0 0 ? S 15:23 0:00 [migration/0] root 17 0.0 0.0 0 0 ? S 15:23 0:00 [idle_inject/0] root 18 0.0 0.0 0 0 ? I 15:23 0:00 [kworker/0:1-rcu_par_gp] root 19 0.0 0.0 0 0 ? S 15:23 0:00 [cpuhp/0] root 20 0.0 0.0 0 0 ? S 15:23 0:00 [cpuhp/1] root 21 0.0 0.0 0 0 ? S 15:23 0:00 [idle_inject/1] root 22 0.0 0.0 0 0 ? S 15:23 0:02 [migration/1] root 23 0.0 0.0 0 0 ? S 15:23 0:00 [ksoftirqd/1] root 25 0.0 0.0 0 0 ? I< 15:23 0:00 [kworker/1:0H-events_highpri] root 26 0.0 0.0 0 0 ? S 15:23 0:00 [cpuhp/2] root 27 0.0 0.0 0 0 ? S 15:23 0:00 [idle_inject/2] root 28 0.0 0.0 0 0 ? S 15:23 0:02 [migration/2] root 29 0.0 0.0 0 0 ? S 15:23 0:00 [ksoftirqd/2] root 31 0.0 0.0 0 0 ? I< 15:23 0:00 [kworker/2:0H-events_highpri] root 32 0.0 0.0 0 0 ? S 15:23 0:00 [cpuhp/3] root 33 0.0 0.0 0 0 ? S 15:23 0:00 [idle_inject/3] root 34 0.0 0.0 0 0 ? S 15:23 0:02 [migration/3] root 35 0.0 0.0 0 0 ? S 15:23 0:00 [ksoftirqd/3] root 37 0.0 0.0 0 0 ? I< 15:23 0:00 [kworker/3:0H-events_highpri] root 38 0.0 0.0 0 0 ? S 15:23 0:00 [cpuhp/4] root 39 0.0 0.0 0 0 ? S 15:23 0:00 [idle_inject/4] root 40 0.0 0.0 0 0 ? S 15:23 0:02 [migration/4] root 41 0.0 0.0 0 0 ? S 15:23 0:00 [ksoftirqd/4] root 43 0.0 0.0 0 0 ? I< 15:23 0:00 [kworker/4:0H-events_highpri] root 44 0.0 0.0 0 0 ? S 15:23 0:00 [cpuhp/5] root 45 0.0 0.0 0 0 ? S 15:23 0:00 [idle_inject/5] root 46 0.0 0.0 0 0 ? S 15:23 0:02 [migration/5] root 47 0.0 0.0 0 0 ? S 15:23 0:00 [ksoftirqd/5] root 49 0.0 0.0 0 0 ? I< 15:23 0:00 [kworker/5:0H-events_highpri] root 50 0.0 0.0 0 0 ? S 15:23 0:00 [cpuhp/6] root 51 0.0 0.0 0 0 ? S 15:23 0:00 [idle_inject/6] root 52 0.0 0.0 0 0 ? S 15:23 0:02 [migration/6] root 53 0.0 0.0 0 0 ? S 15:23 0:00 [ksoftirqd/6] root 54 0.0 0.0 0 0 ? I 15:23 0:00 [kworker/6:0-rcu_par_gp] root 55 0.0 0.0 0 0 ? I< 15:23 0:00 [kworker/6:0H-events_highpri] root 56 0.0 0.0 0 0 ? S 15:23 0:00 [cpuhp/7] root 57 0.0 0.0 0 0 ? S 15:23 0:00 [idle_inject/7] root 58 0.0 0.0 0 0 ? S 15:23 0:02 [migration/7] root 59 0.0 0.0 0 0 ? S 15:23 0:00 [ksoftirqd/7] root 60 0.0 0.0 0 0 ? I 15:23 0:00 [kworker/7:0-dio/vda1] root 61 0.0 0.0 0 0 ? I< 15:23 0:00 [kworker/7:0H-events_highpri] root 62 0.0 0.0 0 0 ? S 15:23 0:00 [cpuhp/8] root 63 0.0 0.0 0 0 ? S 15:23 0:00 [idle_inject/8] root 64 0.0 0.0 0 0 ? S 15:23 0:02 [migration/8] root 65 0.0 0.0 0 0 ? S 15:23 0:00 [ksoftirqd/8] root 67 0.0 0.0 0 0 ? I< 15:23 0:00 [kworker/8:0H-kblockd] root 68 0.0 0.0 0 0 ? S 15:23 0:00 [cpuhp/9] root 69 0.0 0.0 0 0 ? S 15:23 0:00 [idle_inject/9] root 70 0.0 0.0 0 0 ? S 15:23 0:02 [migration/9] root 71 0.0 0.0 0 0 ? S 15:23 0:00 [ksoftirqd/9] root 72 0.0 0.0 0 0 ? I 15:23 0:00 [kworker/9:0-rcu_gp] root 73 0.0 0.0 0 0 ? I< 15:23 0:00 [kworker/9:0H-events_highpri] root 74 0.0 0.0 0 0 ? S 15:23 0:00 [cpuhp/10] root 75 0.0 0.0 0 0 ? S 15:23 0:00 [idle_inject/10] root 76 0.0 0.0 0 0 ? S 15:23 0:02 [migration/10] root 77 0.0 0.0 0 0 ? S 15:23 0:00 [ksoftirqd/10] root 79 0.0 0.0 0 0 ? I< 15:23 0:00 [kworker/10:0H-events_highpri] root 80 0.0 0.0 0 0 ? S 15:23 0:00 [cpuhp/11] root 81 0.0 0.0 0 0 ? S 15:23 0:00 [idle_inject/11] root 82 0.0 0.0 0 0 ? S 15:23 0:02 [migration/11] root 83 0.0 0.0 0 0 ? S 15:23 0:00 [ksoftirqd/11] root 84 0.0 0.0 0 0 ? I 15:23 0:00 [kworker/11:0-rcu_par_gp] root 85 0.0 0.0 0 0 ? I< 15:23 0:00 [kworker/11:0H-events_highpri] root 86 0.0 0.0 0 0 ? S 15:23 0:00 [cpuhp/12] root 87 0.0 0.0 0 0 ? S 15:23 0:00 [idle_inject/12] root 88 0.0 0.0 0 0 ? S 15:23 0:02 [migration/12] root 89 0.0 0.0 0 0 ? S 15:23 0:00 [ksoftirqd/12] root 90 0.0 0.0 0 0 ? I 15:23 0:00 [kworker/12:0-cgroup_destroy] root 91 0.0 0.0 0 0 ? I< 15:23 0:00 [kworker/12:0H-events_highpri] root 92 0.0 0.0 0 0 ? S 15:23 0:00 [cpuhp/13] root 93 0.0 0.0 0 0 ? S 15:23 0:00 [idle_inject/13] root 94 0.0 0.0 0 0 ? S 15:23 0:02 [migration/13] root 95 0.0 0.0 0 0 ? S 15:23 0:00 [ksoftirqd/13] root 96 0.0 0.0 0 0 ? I 15:23 0:00 [kworker/13:0-rcu_gp] root 97 0.0 0.0 0 0 ? I< 15:23 0:00 [kworker/13:0H-events_highpri] root 98 0.0 0.0 0 0 ? S 15:23 0:00 [cpuhp/14] root 99 0.0 0.0 0 0 ? S 15:23 0:00 [idle_inject/14] root 100 0.0 0.0 0 0 ? S 15:23 0:02 [migration/14] root 101 0.0 0.0 0 0 ? S 15:23 0:00 [ksoftirqd/14] root 103 0.0 0.0 0 0 ? I< 15:23 0:00 [kworker/14:0H-events_highpri] root 104 0.0 0.0 0 0 ? S 15:23 0:00 [cpuhp/15] root 105 0.0 0.0 0 0 ? S 15:23 0:00 [idle_inject/15] root 106 0.0 0.0 0 0 ? S 15:23 0:02 [migration/15] root 107 0.0 0.0 0 0 ? S 15:23 0:00 [ksoftirqd/15] root 109 0.0 0.0 0 0 ? I< 15:23 0:00 [kworker/15:0H-events_highpri] root 110 0.0 0.0 0 0 ? S 15:23 0:00 [cpuhp/16] root 111 0.0 0.0 0 0 ? S 15:23 0:00 [idle_inject/16] root 112 0.0 0.0 0 0 ? S 15:23 0:02 [migration/16] root 113 0.0 0.0 0 0 ? S 15:23 0:00 [ksoftirqd/16] root 114 0.0 0.0 0 0 ? I 15:23 0:00 [kworker/16:0-rcu_gp] root 115 0.0 0.0 0 0 ? I< 15:23 0:00 [kworker/16:0H-events_highpri] root 116 0.0 0.0 0 0 ? S 15:23 0:00 [cpuhp/17] root 117 0.0 0.0 0 0 ? S 15:23 0:00 [idle_inject/17] root 118 0.0 0.0 0 0 ? S 15:23 0:02 [migration/17] root 119 0.0 0.0 0 0 ? S 15:23 0:00 [ksoftirqd/17] root 121 0.0 0.0 0 0 ? I< 15:23 0:00 [kworker/17:0H-events_highpri] root 122 0.0 0.0 0 0 ? S 15:23 0:00 [cpuhp/18] root 123 0.0 0.0 0 0 ? S 15:23 0:00 [idle_inject/18] root 124 0.0 0.0 0 0 ? S 15:23 0:02 [migration/18] root 125 0.0 0.0 0 0 ? S 15:23 0:00 [ksoftirqd/18] root 127 0.0 0.0 0 0 ? I< 15:23 0:00 [kworker/18:0H-events_highpri] root 128 0.0 0.0 0 0 ? S 15:23 0:00 [cpuhp/19] root 129 0.0 0.0 0 0 ? S 15:23 0:00 [idle_inject/19] root 130 0.0 0.0 0 0 ? S 15:23 0:02 [migration/19] root 131 0.0 0.0 0 0 ? S 15:23 0:00 [ksoftirqd/19] root 133 0.0 0.0 0 0 ? I< 15:23 0:00 [kworker/19:0H-events_highpri] root 134 0.0 0.0 0 0 ? S 15:23 0:00 [cpuhp/20] root 135 0.0 0.0 0 0 ? S 15:23 0:00 [idle_inject/20] root 136 0.0 0.0 0 0 ? S 15:23 0:02 [migration/20] root 137 0.0 0.0 0 0 ? S 15:23 0:00 [ksoftirqd/20] root 138 0.0 0.0 0 0 ? I 15:23 0:00 [kworker/20:0-rcu_gp] root 139 0.0 0.0 0 0 ? I< 15:23 0:00 [kworker/20:0H-events_highpri] root 140 0.0 0.0 0 0 ? S 15:23 0:00 [cpuhp/21] root 141 0.0 0.0 0 0 ? S 15:23 0:00 [idle_inject/21] root 142 0.0 0.0 0 0 ? S 15:23 0:02 [migration/21] root 143 0.0 0.0 0 0 ? S 15:23 0:00 [ksoftirqd/21] root 145 0.0 0.0 0 0 ? I< 15:23 0:00 [kworker/21:0H-events_highpri] root 146 0.0 0.0 0 0 ? S 15:23 0:00 [cpuhp/22] root 147 0.0 0.0 0 0 ? S 15:23 0:00 [idle_inject/22] root 148 0.0 0.0 0 0 ? S 15:23 0:02 [migration/22] root 149 0.0 0.0 0 0 ? S 15:23 0:00 [ksoftirqd/22] root 151 0.0 0.0 0 0 ? I< 15:23 0:00 [kworker/22:0H-events_highpri] root 152 0.0 0.0 0 0 ? S 15:23 0:00 [cpuhp/23] root 153 0.0 0.0 0 0 ? S 15:23 0:00 [idle_inject/23] root 154 0.0 0.0 0 0 ? S 15:23 0:02 [migration/23] root 155 0.0 0.0 0 0 ? S 15:23 0:00 [ksoftirqd/23] root 157 0.0 0.0 0 0 ? I< 15:23 0:00 [kworker/23:0H-events_highpri] root 158 0.0 0.0 0 ... 5T16:22:00.025919Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: schema.cpp:293: Create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageCreateQuery::TTestCaseShouldCheckAvailableConnections::Execute_(NUnitTest::TTestContext&)/queries". Create session OK 2025-09-25T16:22:00.025923Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: schema.cpp:113: Call create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageCreateQuery::TTestCaseShouldCheckAvailableConnections::Execute_(NUnitTest::TTestContext&)/queries" 2025-09-25T16:22:00.025925Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: schema.cpp:354: Call create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageCreateQuery::TTestCaseShouldCheckAvailableConnections::Execute_(NUnitTest::TTestContext&)/queries" 2025-09-25T16:22:00.028318Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: schema.cpp:293: Create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageCreateQuery::TTestCaseShouldCheckAvailableConnections::Execute_(NUnitTest::TTestContext&)/jobs". Create session OK 2025-09-25T16:22:00.028329Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: schema.cpp:113: Call create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageCreateQuery::TTestCaseShouldCheckAvailableConnections::Execute_(NUnitTest::TTestContext&)/jobs" 2025-09-25T16:22:00.028332Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: schema.cpp:354: Call create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageCreateQuery::TTestCaseShouldCheckAvailableConnections::Execute_(NUnitTest::TTestContext&)/jobs" 2025-09-25T16:22:00.028609Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: schema.cpp:293: Create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageCreateQuery::TTestCaseShouldCheckAvailableConnections::Execute_(NUnitTest::TTestContext&)/bindings". Create session OK 2025-09-25T16:22:00.028612Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: schema.cpp:113: Call create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageCreateQuery::TTestCaseShouldCheckAvailableConnections::Execute_(NUnitTest::TTestContext&)/bindings" 2025-09-25T16:22:00.028613Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: schema.cpp:354: Call create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageCreateQuery::TTestCaseShouldCheckAvailableConnections::Execute_(NUnitTest::TTestContext&)/bindings" 2025-09-25T16:22:00.028733Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: schema.cpp:293: Create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageCreateQuery::TTestCaseShouldCheckAvailableConnections::Execute_(NUnitTest::TTestContext&)/connections". Create session OK 2025-09-25T16:22:00.028735Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: schema.cpp:113: Call create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageCreateQuery::TTestCaseShouldCheckAvailableConnections::Execute_(NUnitTest::TTestContext&)/connections" 2025-09-25T16:22:00.028736Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: schema.cpp:354: Call create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageCreateQuery::TTestCaseShouldCheckAvailableConnections::Execute_(NUnitTest::TTestContext&)/connections" 2025-09-25T16:22:00.028848Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: schema.cpp:293: Create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageCreateQuery::TTestCaseShouldCheckAvailableConnections::Execute_(NUnitTest::TTestContext&)/quotas". Create session OK 2025-09-25T16:22:00.028850Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: schema.cpp:113: Call create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageCreateQuery::TTestCaseShouldCheckAvailableConnections::Execute_(NUnitTest::TTestContext&)/quotas" 2025-09-25T16:22:00.028851Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: schema.cpp:354: Call create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageCreateQuery::TTestCaseShouldCheckAvailableConnections::Execute_(NUnitTest::TTestContext&)/quotas" 2025-09-25T16:22:00.065137Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: schema.cpp:155: Successfully created directory "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageCreateQuery::TTestCaseShouldCheckAvailableConnections::Execute_(NUnitTest::TTestContext&)" 2025-09-25T16:22:00.065151Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: schema.cpp:122: Reply for create directory "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageCreateQuery::TTestCaseShouldCheckAvailableConnections::Execute_(NUnitTest::TTestContext&)": 2025-09-25T16:22:00.117677Z node 17 :YQ_RATE_LIMITER DEBUG: schema.cpp:155: Successfully created coordination node "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageCreateQuery::TTestCaseShouldCheckAvailableConnections::Execute_(NUnitTest::TTestContext&)_rate_limiter/alpha" 2025-09-25T16:22:00.117689Z node 17 :YQ_RATE_LIMITER DEBUG: schema.cpp:122: Reply for create coordination node "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageCreateQuery::TTestCaseShouldCheckAvailableConnections::Execute_(NUnitTest::TTestContext&)_rate_limiter/alpha": 2025-09-25T16:22:00.126285Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: schema.cpp:155: Successfully created table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageCreateQuery::TTestCaseShouldCheckAvailableConnections::Execute_(NUnitTest::TTestContext&)/queries" 2025-09-25T16:22:00.126300Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: schema.cpp:122: Reply for create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageCreateQuery::TTestCaseShouldCheckAvailableConnections::Execute_(NUnitTest::TTestContext&)/queries": 2025-09-25T16:22:00.161127Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: schema.cpp:155: Successfully created table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageCreateQuery::TTestCaseShouldCheckAvailableConnections::Execute_(NUnitTest::TTestContext&)/jobs" 2025-09-25T16:22:00.161141Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: schema.cpp:122: Reply for create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageCreateQuery::TTestCaseShouldCheckAvailableConnections::Execute_(NUnitTest::TTestContext&)/jobs": 2025-09-25T16:22:00.161837Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: schema.cpp:155: Successfully created table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageCreateQuery::TTestCaseShouldCheckAvailableConnections::Execute_(NUnitTest::TTestContext&)/quotas" 2025-09-25T16:22:00.161845Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: schema.cpp:122: Reply for create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageCreateQuery::TTestCaseShouldCheckAvailableConnections::Execute_(NUnitTest::TTestContext&)/quotas": 2025-09-25T16:22:00.161984Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: schema.cpp:155: Successfully created table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageCreateQuery::TTestCaseShouldCheckAvailableConnections::Execute_(NUnitTest::TTestContext&)/nodes" 2025-09-25T16:22:00.161986Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: schema.cpp:122: Reply for create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageCreateQuery::TTestCaseShouldCheckAvailableConnections::Execute_(NUnitTest::TTestContext&)/nodes": 2025-09-25T16:22:00.162709Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: schema.cpp:155: Successfully created table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageCreateQuery::TTestCaseShouldCheckAvailableConnections::Execute_(NUnitTest::TTestContext&)/connections" 2025-09-25T16:22:00.162715Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: schema.cpp:122: Reply for create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageCreateQuery::TTestCaseShouldCheckAvailableConnections::Execute_(NUnitTest::TTestContext&)/connections": 2025-09-25T16:22:00.162840Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: schema.cpp:155: Successfully created table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageCreateQuery::TTestCaseShouldCheckAvailableConnections::Execute_(NUnitTest::TTestContext&)/bindings" 2025-09-25T16:22:00.162843Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: schema.cpp:122: Reply for create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageCreateQuery::TTestCaseShouldCheckAvailableConnections::Execute_(NUnitTest::TTestContext&)/bindings": 2025-09-25T16:22:00.162928Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: schema.cpp:155: Successfully created table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageCreateQuery::TTestCaseShouldCheckAvailableConnections::Execute_(NUnitTest::TTestContext&)/tenants" 2025-09-25T16:22:00.162931Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: schema.cpp:122: Reply for create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageCreateQuery::TTestCaseShouldCheckAvailableConnections::Execute_(NUnitTest::TTestContext&)/tenants": 2025-09-25T16:22:00.162995Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: schema.cpp:155: Successfully created table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageCreateQuery::TTestCaseShouldCheckAvailableConnections::Execute_(NUnitTest::TTestContext&)/compute_databases" 2025-09-25T16:22:00.162997Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: schema.cpp:122: Reply for create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageCreateQuery::TTestCaseShouldCheckAvailableConnections::Execute_(NUnitTest::TTestContext&)/compute_databases": 2025-09-25T16:22:00.163063Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: schema.cpp:155: Successfully created table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageCreateQuery::TTestCaseShouldCheckAvailableConnections::Execute_(NUnitTest::TTestContext&)/mappings" 2025-09-25T16:22:00.163066Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: schema.cpp:122: Reply for create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageCreateQuery::TTestCaseShouldCheckAvailableConnections::Execute_(NUnitTest::TTestContext&)/mappings": 2025-09-25T16:22:00.163127Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: schema.cpp:155: Successfully created table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageCreateQuery::TTestCaseShouldCheckAvailableConnections::Execute_(NUnitTest::TTestContext&)/result_sets" 2025-09-25T16:22:00.163129Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: schema.cpp:122: Reply for create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageCreateQuery::TTestCaseShouldCheckAvailableConnections::Execute_(NUnitTest::TTestContext&)/result_sets": 2025-09-25T16:22:00.163189Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: schema.cpp:155: Successfully created table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageCreateQuery::TTestCaseShouldCheckAvailableConnections::Execute_(NUnitTest::TTestContext&)/tenant_acks" 2025-09-25T16:22:00.163191Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: schema.cpp:122: Reply for create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageCreateQuery::TTestCaseShouldCheckAvailableConnections::Execute_(NUnitTest::TTestContext&)/tenant_acks": 2025-09-25T16:22:00.163257Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: schema.cpp:155: Successfully created table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageCreateQuery::TTestCaseShouldCheckAvailableConnections::Execute_(NUnitTest::TTestContext&)/idempotency_keys" 2025-09-25T16:22:00.163258Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: schema.cpp:122: Reply for create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageCreateQuery::TTestCaseShouldCheckAvailableConnections::Execute_(NUnitTest::TTestContext&)/idempotency_keys": 2025-09-25T16:22:00.163958Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: schema.cpp:155: Successfully created table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageCreateQuery::TTestCaseShouldCheckAvailableConnections::Execute_(NUnitTest::TTestContext&)/pending_small" 2025-09-25T16:22:00.163966Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: schema.cpp:122: Reply for create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageCreateQuery::TTestCaseShouldCheckAvailableConnections::Execute_(NUnitTest::TTestContext&)/pending_small": >> TYdbControlPlaneStorageQuotas::GetDefaultQuotas [GOOD] >> TYdbControlPlaneStorageQuotas::OverrideQuotas >> TColumnShardTestReadWrite::CompactionSplitGranule_PKTimestamp [GOOD] >> TYdbControlPlaneStorageModifyConnection::ShouldSuccess [GOOD] >> TYdbControlPlaneStorageModifyConnection::ShouldValidate >> TYdbControlPlaneStorageListQueries::ShouldEmptyPageToken [GOOD] >> TYdbControlPlaneStorageListQueries::ShouldValidate |83.0%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/conveyor_composite/ut/unittest >> TYdbControlPlaneStorageModifyBindingPermissions::ShouldApplyPermissionViewPrivatePublic [GOOD] >> TYdbControlPlaneStorageModifyConnection::ShouldCheckPermission >> TopicAutoscaling::Simple_AutoscaleAwareSDK [GOOD] >> TopicAutoscaling::Simple_PQv1 >> TYdbControlPlaneStorageCreateConnection::ShouldCheckMaxCountConnections [GOOD] >> TYdbControlPlaneStorageCreateConnection::ShouldCheckIdempotencyKey >> TYdbControlPlaneStorageModifyQueryPermissions::ShouldApplyPermissionManagePrivate [GOOD] >> TYdbControlPlaneStorageModifyQueryPermissions::ShouldApplyPermissionManagePrivatePublic >> TYdbControlPlaneStorageDeleteBinding::ShouldCheckSuperUser [GOOD] >> TYdbControlPlaneStorageDeleteBinding::ShouldCheckIdempotencyKey |83.0%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/conveyor_composite/ut/unittest >> TYdbControlPlaneStorageQuotas::OverrideQuotas [GOOD] >> TYdbControlPlaneStorageQuotas::GetStaleUsage >> TopicAutoscaling::PartitionMerge_PreferedPartition_PQv1 [GOOD] >> TopicAutoscaling::PartitionSplit_ManySession_BeforeAutoscaleAwareSDK >> TYdbControlPlaneStorageDescribeJobPermissions::ShouldApplyPermissionViewPrivate [GOOD] >> TYdbControlPlaneStorageDescribeJobPermissions::ShouldApplyPermissionViewPrivatePublic >> TYdbControlPlaneStorageDescribeBinding::ShouldCheckExist [GOOD] >> TYdbControlPlaneStorageDescribeBinding::ShouldValidate |83.0%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/library/ncloud/impl/ut/unittest ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/columnshard/ut_rw/unittest >> TColumnShardTestReadWrite::CompactionSplitGranule_PKTimestamp [GOOD] Test command err: 2025-09-25T16:20:49.311939Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:129:2160];fline=columnshard.cpp:105;event=initialize_shard;step=OnActivateExecutor; 2025-09-25T16:20:49.316531Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:129:2160];fline=columnshard.cpp:124;event=initialize_shard;step=initialize_tiring_finished; 2025-09-25T16:20:49.316587Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxInitSchema.Execute at tablet 9437184 2025-09-25T16:20:49.317445Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:129:2160];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-09-25T16:20:49.317504Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:129:2160];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-09-25T16:20:49.317545Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:129:2160];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-09-25T16:20:49.317570Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:129:2160];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-09-25T16:20:49.317591Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:129:2160];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-09-25T16:20:49.317612Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:129:2160];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-09-25T16:20:49.317632Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:129:2160];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-09-25T16:20:49.317653Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:129:2160];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-09-25T16:20:49.317673Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:129:2160];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-09-25T16:20:49.317686Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:129:2160];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanDeprecatedSnapshot; 2025-09-25T16:20:49.317700Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:129:2160];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV0ChunksMeta; 2025-09-25T16:20:49.317712Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:129:2160];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CopyBlobIdsToV2; 2025-09-25T16:20:49.317743Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:129:2160];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreAppearanceSnapshot; 2025-09-25T16:20:49.322735Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxInitSchema.Complete at tablet 9437184 2025-09-25T16:20:49.322802Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:131;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=12;current_normalizer=CLASS_NAME=Granules; 2025-09-25T16:20:49.322810Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=1;type=Granules; 2025-09-25T16:20:49.322843Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-09-25T16:20:49.322875Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-09-25T16:20:49.322886Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-09-25T16:20:49.322891Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2025-09-25T16:20:49.322901Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:138;normalizer=TChunksNormalizer;message=0 chunks found; 2025-09-25T16:20:49.322911Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-09-25T16:20:49.322919Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-09-25T16:20:49.322925Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2025-09-25T16:20:49.322946Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-09-25T16:20:49.322955Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-09-25T16:20:49.322964Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-09-25T16:20:49.322969Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2025-09-25T16:20:49.322982Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2025-09-25T16:20:49.322990Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-09-25T16:20:49.323008Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-09-25T16:20:49.323013Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2025-09-25T16:20:49.323022Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-09-25T16:20:49.323031Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-09-25T16:20:49.323036Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2025-09-25T16:20:49.323046Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-09-25T16:20:49.323055Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-09-25T16:20:49.323060Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=15;type=RestoreV1Chunks_V2; 2025-09-25T16:20:49.323090Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-09-25T16:20:49.323101Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-09-25T16:20:49.323107Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=16;type=RestoreV2Chunks; 2025-09-25T16:20:49.323123Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2025-09-25T16:20:49.323132Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanDeprecatedSnapshot;id=CleanDeprecatedSnapshot; 2025-09-25T16:20:49.323136Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=17;type=CleanDeprecatedSnapshot; 2025-09-25T16:20:49.323144Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanDeprecatedSnapshot;id=17; 2025-09-25T16:20:49.323152Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV0ChunksMeta;id=RestoreV0ChunksMeta; 2025-09-25T16:20:49.323156Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=18;type=RestoreV0ChunksMeta; 2025-09-25T16:20:49.323164Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV0ChunksMeta;id=18; 2025-09-25T16:20:49.323173Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CopyBlobIdsToV2;id=CopyBlobIdsToV2; 2025-09-25T16:20:49.323178Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=19;type=CopyBlobIdsToV2; 2025-09-25T16:20:49.323195Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CopyBlobIdsToV2;id=19; 2025-09-25T16:20:49.323204Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLAS ... UTE:granule/portions;fline=constructor_portion.cpp:40;memory_size=278;data_size=270;sum=46656;count=168;size_of_portion=184; 2025-09-25T16:22:07.482967Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;load_stage_name=EXECUTE:column_engines/granules;load_stage_name=EXECUTE:granules/granule;fline=common_data.cpp:29;EXECUTE:portionsLoadingTime=22164; 2025-09-25T16:22:07.482981Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;load_stage_name=EXECUTE:column_engines/granules;load_stage_name=EXECUTE:granules/granule;fline=common_data.cpp:29;PRECHARGE:granule_finished_commonLoadingTime=3; 2025-09-25T16:22:07.483082Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;load_stage_name=EXECUTE:column_engines/granules;load_stage_name=EXECUTE:granules/granule;fline=common_data.cpp:29;EXECUTE:granule_finished_commonLoadingTime=94; 2025-09-25T16:22:07.483088Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;load_stage_name=EXECUTE:column_engines/granules;fline=common_data.cpp:29;EXECUTE:granuleLoadingTime=22308; 2025-09-25T16:22:07.483095Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;fline=common_data.cpp:29;EXECUTE:granulesLoadingTime=22323; 2025-09-25T16:22:07.483104Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;fline=common_data.cpp:29;PRECHARGE:finishLoadingTime=2; 2025-09-25T16:22:07.483128Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;fline=common_data.cpp:29;EXECUTE:finishLoadingTime=17; 2025-09-25T16:22:07.483134Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:column_enginesLoadingTime=22431; 2025-09-25T16:22:07.483172Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:tx_controllerLoadingTime=30; 2025-09-25T16:22:07.483193Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:tx_controllerLoadingTime=15; 2025-09-25T16:22:07.483223Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:operations_managerLoadingTime=23; 2025-09-25T16:22:07.483242Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:operations_managerLoadingTime=13; 2025-09-25T16:22:07.499782Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:storages_managerLoadingTime=16519; 2025-09-25T16:22:07.502004Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:storages_managerLoadingTime=2178; 2025-09-25T16:22:07.502036Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:db_locksLoadingTime=4; 2025-09-25T16:22:07.502045Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:db_locksLoadingTime=2; 2025-09-25T16:22:07.502053Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:bg_sessionsLoadingTime=1; 2025-09-25T16:22:07.502069Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:bg_sessionsLoadingTime=11; 2025-09-25T16:22:07.502077Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:sharing_sessionsLoadingTime=1; 2025-09-25T16:22:07.502096Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:sharing_sessionsLoadingTime=13; 2025-09-25T16:22:07.502104Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:in_flight_readsLoadingTime=1; 2025-09-25T16:22:07.502119Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:in_flight_readsLoadingTime=9; 2025-09-25T16:22:07.502139Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:tiers_managerLoadingTime=13; 2025-09-25T16:22:07.502157Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:tiers_managerLoadingTime=12; 2025-09-25T16:22:07.502163Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;fline=common_data.cpp:29;EXECUTE:composite_initLoadingTime=52956; 2025-09-25T16:22:07.502215Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: Index: tables 1 inserted {blob_bytes=4825976;raw_bytes=7660626;count=1;records=85000} compacted {blob_bytes=0;raw_bytes=0;count=0;records=0} s-compacted {blob_bytes=100602600;raw_bytes=184175560;count=5;records=1780000} inactive {blob_bytes=426237224;raw_bytes=751149332;count=36;records=7540000} evicted {blob_bytes=0;raw_bytes=0;count=0;records=0} at tablet 9437184 2025-09-25T16:22:07.502255Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:5709:7671];process=SwitchToWork;fline=columnshard.cpp:77;event=initialize_shard;step=SwitchToWork; 2025-09-25T16:22:07.502268Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:5709:7671];process=SwitchToWork;fline=columnshard.cpp:80;event=initialize_shard;step=SignalTabletActive; 2025-09-25T16:22:07.502287Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:5709:7671];process=SwitchToWork;fline=columnshard_impl.cpp:1528;event=OnTieringModified;path_id=NO_VALUE_OPTIONAL; 2025-09-25T16:22:07.502299Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:5709:7671];process=SwitchToWork;fline=column_engine_logs.cpp:516;event=OnTieringModified;new_count_tierings=0; 2025-09-25T16:22:07.502335Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;fline=columnshard_impl.cpp:449;event=EnqueueBackgroundActivities;periodic=0; 2025-09-25T16:22:07.502362Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;fline=columnshard_impl.cpp:943;background=cleanup_schemas;skip_reason=no_changes; 2025-09-25T16:22:07.502374Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;fline=column_engine_logs.cpp:258;event=StartCleanup;portions_count=18; 2025-09-25T16:22:07.502391Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;fline=column_engine_logs.cpp:271;event=StartCleanupStop;snapshot=plan_step=1758815454792;tx_id=18446744073709551615;;current_snapshot_ts=1758817250431; 2025-09-25T16:22:07.502400Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;fline=column_engine_logs.cpp:334;event=StartCleanup;portions_count=18;portions_prepared=0;drop=0;skip=0;portions_counter=0;chunks=0;limit=0;max_portions=1000;max_chunks=500000; 2025-09-25T16:22:07.502413Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;fline=columnshard_impl.cpp:800;background=cleanup;skip_reason=no_changes; 2025-09-25T16:22:07.502419Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;fline=columnshard_impl.cpp:832;background=cleanup;skip_reason=no_changes; 2025-09-25T16:22:07.502447Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;fline=columnshard_impl.cpp:755;background=ttl;skip_reason=no_changes; 2025-09-25T16:22:07.512484Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:5709:7671];ev=NActors::TEvents::TEvWakeup;fline=columnshard.cpp:260;event=TEvPrivate::TEvPeriodicWakeup::MANUAL;tablet_id=9437184; 2025-09-25T16:22:07.512795Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:5709:7671];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;fline=columnshard.cpp:249;event=TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184; 2025-09-25T16:22:07.512805Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: Send periodic stats. 2025-09-25T16:22:07.512809Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: Disabled periodic stats at tablet 9437184 2025-09-25T16:22:07.512817Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:5709:7671];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:449;event=EnqueueBackgroundActivities;periodic=0; 2025-09-25T16:22:07.512872Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:5709:7671];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:943;background=cleanup_schemas;skip_reason=no_changes; 2025-09-25T16:22:07.512881Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:5709:7671];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:258;event=StartCleanup;portions_count=18; 2025-09-25T16:22:07.512897Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:5709:7671];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:271;event=StartCleanupStop;snapshot=plan_step=1758815454792;tx_id=18446744073709551615;;current_snapshot_ts=1758817250431; 2025-09-25T16:22:07.512907Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:5709:7671];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:334;event=StartCleanup;portions_count=18;portions_prepared=0;drop=0;skip=0;portions_counter=0;chunks=0;limit=0;max_portions=1000;max_chunks=500000; 2025-09-25T16:22:07.512920Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:5709:7671];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:800;background=cleanup;skip_reason=no_changes; 2025-09-25T16:22:07.512926Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:5709:7671];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:832;background=cleanup;skip_reason=no_changes; 2025-09-25T16:22:07.512951Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:5709:7671];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;queue=ttl;external_count=0;fline=granule.cpp:168;event=skip_actualization;waiting=0.999000s; 2025-09-25T16:22:07.512962Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:5709:7671];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:755;background=ttl;skip_reason=no_changes; >> TopicAutoscaling::PartitionMerge_PreferedPartition_BeforeAutoscaleAwareSDK [GOOD] >> TopicAutoscaling::PartitionMerge_PreferedPartition_AutoscaleAwareSDK >> BasicUsage::TWriteSession_WriteEncoded [GOOD] >> CompressExecutor::TestReorderedExecutor >> TYdbControlPlaneStorageModifyBinding::ShouldCheckMultipleDotsName [GOOD] >> TYdbControlPlaneStorageModifyBinding::ShouldCheckPermission >> TGroupMapperTest::NonUniformClusterMirror3dcWithUnusableDomain >> TYdbControlPlaneStorageControlQueryPermissions::ShouldApplyPermissionManagePrivatePublic [GOOD] >> TYdbControlPlaneStorageCreateBinding::ShouldCheckLowerCaseName >> TYdbControlPlaneStorageQuotas::GetStaleUsage [GOOD] >> TYdbControlPlaneStorageQuotas::PushUsageUpdate >> TGroupMapperTest::NonUniformClusterMirror3dcWithUnusableDomain [GOOD] >> TYdbControlPlaneStorageModifyConnection::ShouldCheckPermission [GOOD] >> TYdbControlPlaneStorageModifyConnection::ShouldCheckExist >> TYdbControlPlaneStorageCreateConnection::ShouldCheckIdempotencyKey [GOOD] >> TYdbControlPlaneStorageCreateConnection::ShouldCheckLowerCaseName >> TYdbControlPlaneStorageListConnections::ShouldEmptyPageToken [GOOD] >> TYdbControlPlaneStorageListConnections::ShouldValidate >> TYdbControlPlaneStoragePipeline::ShouldCheckPrioritySelectionEntities [GOOD] >> TYdbControlPlaneStoragePipeline::ShouldCheckResultSetLimit |83.0%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/mind/bscontroller/ut/unittest >> TGroupMapperTest::NonUniformClusterMirror3dcWithUnusableDomain [GOOD] >> TSequence::CreateSequence >> TYdbControlPlaneStorageGetResultDataPermissions::ShouldApplyPermissionViewPrivatePublic [GOOD] >> TYdbControlPlaneStorageGetTask::ShouldValidate >> TYdbControlPlaneStorageListBindingsPermissions::ShouldApplyPermissionViewPrivate [GOOD] >> TYdbControlPlaneStorageListBindingsPermissions::ShouldApplyPermissionViewPrivatePublic >> TYdbControlPlaneStorageQuotas::PushUsageUpdate [GOOD] >> TYdbControlPlaneStorageRateLimiter::ShouldValidateCreate >> TopicTimestamp::TimestampRead_1MB_Topic_exact [GOOD] >> TopicTimestamp::TimestampRead_1MB_Topic_offset+middle >> CommitOffset::Commit_WithoutSession_TopPast [GOOD] >> CommitOffset::Commit_WithWrongSession_ToParent >> TSequence::CreateSequence [GOOD] >> TSequence::CreateDropRecreate >> TYdbControlPlaneStorageModifyConnection::ShouldCheckExist [GOOD] >> TYdbControlPlaneStorageModifyConnection::ShouldCheckNotExistOldName >> TYdbControlPlaneStorageDeleteConnectionPermissions::ShouldApplyPermissionViewPublic [GOOD] >> TYdbControlPlaneStorageDeleteConnectionPermissions::ShouldApplyPermissionViewPrivate >> PersQueueSdkReadSessionTest::ReadSessionWithCloseNotCommitted [GOOD] >> PersQueueSdkReadSessionTest::ClosesAfterFailedConnectionToCds >> TYdbControlPlaneStorageModifyQueryPermissions::ShouldApplyPermissionManagePrivatePublic [GOOD] >> TYdbControlPlaneStorageNodesHealthCheck::ShouldValidate >> TYdbControlPlaneStorageRateLimiter::ShouldValidateCreate [GOOD] >> TYdbControlPlaneStorageRateLimiter::ShouldValidateDelete >> TYdbControlPlaneStorageDescribeJobPermissions::ShouldApplyPermissionViewPrivatePublic [GOOD] >> TYdbControlPlaneStorageDescribeQuery::ShouldCheckPermission >> TYdbControlPlaneStorageDescribeBinding::ShouldValidate [GOOD] >> TYdbControlPlaneStorageDescribeBinding::ShouldCheckSuperUser >> TYdbControlPlaneStorageListQueries::ShouldValidate [GOOD] >> TYdbControlPlaneStorageListQueries::ShouldFilterName >> TYdbControlPlaneStorageGetTask::ShouldValidate [GOOD] >> TYdbControlPlaneStorageGetTask::ShouldWorkWithEmptyPending |83.0%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/tx/datashard/ut_trace/ydb-core-tx-datashard-ut_trace |83.0%| [LD] {RESULT} $(B)/ydb/core/tx/datashard/ut_trace/ydb-core-tx-datashard-ut_trace |83.0%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_secret_reboots/unittest |83.0%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/datashard/ut_trace/ydb-core-tx-datashard-ut_trace >> TSequence::CreateDropRecreate [GOOD] >> TSequence::CreateSequenceInsideSequenceNotAllowed >> TYdbControlPlaneStorageNodesHealthCheck::ShouldValidate [GOOD] >> TYdbControlPlaneStoragePingTask::ShouldValidate >> TSequence::CreateSequenceParallel >> TColumnShardTestReadWrite::CompactionSplitGranuleStrKey_PKUtf8 [GOOD] >> TYdbControlPlaneStorageRateLimiter::ShouldValidateDelete [GOOD] >> TYdbControlPlaneStorageRateLimiter::ShouldCreateRateLimiterResource >> TSequence::CreateSequenceInsideSequenceNotAllowed [GOOD] >> TSequence::CreateSequenceInsideIndexTableNotAllowed >> TYdbControlPlaneStorageDescribeQuery::ShouldCheckPermission [GOOD] >> TYdbControlPlaneStorageDescribeQuery::ShouldCheckExist >> TYdbControlPlaneStorageCreateConnection::ShouldCheckLowerCaseName [GOOD] >> TYdbControlPlaneStorageCreateConnection::ShouldCheckMaxLengthName >> TYdbControlPlaneStorageDeleteBinding::ShouldCheckIdempotencyKey [GOOD] >> TYdbControlPlaneStorageDeleteBinding::ShouldCheckPreviousRevisionFailed >> TSequence::CreateSequenceParallel [GOOD] >> TSequence::CreateSequenceSequential |83.0%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/kqp/ut/query/ydb-core-kqp-ut-query |83.0%| [LD] {RESULT} $(B)/ydb/core/kqp/ut/query/ydb-core-kqp-ut-query |83.0%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/kqp/ut/query/ydb-core-kqp-ut-query >> TSequence::CreateSequenceInsideIndexTableNotAllowed [GOOD] >> TSequence::CopyTableWithSequence >> TKeyValueTest::TestVacuumOnEmptyTablet >> TYdbControlPlaneStorageCreateBinding::ShouldCheckLowerCaseName [GOOD] >> TYdbControlPlaneStorageCreateBinding::ShouldCheckAllowedSymbolsName >> TYdbControlPlaneStorageGetTask::ShouldWorkWithEmptyPending [GOOD] >> TYdbControlPlaneStorageGetTask::ShouldBatchingGetTasks >> TYdbControlPlaneStoragePingTask::ShouldValidate [GOOD] >> TYdbControlPlaneStoragePipeline::ShouldCheckAbortInTerminatedState >> TSequence::CreateSequenceSequential [GOOD] >> TSequence::CreateSequenceInsideTableThenDropSequence >> TYdbControlPlaneStoragePipeline::ShouldCheckResultSetLimit [GOOD] >> TYdbControlPlaneStoragePipeline::ShouldCheckGetResultDataRequest >> TYdbControlPlaneStorageListConnections::ShouldValidate [GOOD] >> TYdbControlPlaneStorageListConnectionsPermissions::ShouldApplyPermissionEmpty >> TSequence::CopyTableWithSequence [GOOD] >> TSequence::AlterSequence >> TYdbControlPlaneStorageModifyConnection::ShouldCheckNotExistOldName [GOOD] >> TYdbControlPlaneStorageModifyConnection::ShouldCheckLowerCaseName >> TSequence::CreateSequenceInsideTableThenDropSequence [GOOD] >> TSequence::CreateSequenceInsideTableThenDropTable >> TYdbControlPlaneStorageDescribeBinding::ShouldCheckSuperUser [GOOD] >> TYdbControlPlaneStorageDescribeBindingPermissions::ShouldApplyPermissionEmpty >> TYdbControlPlaneStorageDescribeQuery::ShouldCheckExist [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/columnshard/ut_rw/unittest >> TColumnShardTestReadWrite::CompactionSplitGranuleStrKey_PKUtf8 [GOOD] Test command err: 2025-09-25T16:20:45.487785Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:129:2160];fline=columnshard.cpp:105;event=initialize_shard;step=OnActivateExecutor; 2025-09-25T16:20:45.492697Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:129:2160];fline=columnshard.cpp:124;event=initialize_shard;step=initialize_tiring_finished; 2025-09-25T16:20:45.492746Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxInitSchema.Execute at tablet 9437184 2025-09-25T16:20:45.493396Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:129:2160];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-09-25T16:20:45.493441Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:129:2160];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-09-25T16:20:45.493469Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:129:2160];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-09-25T16:20:45.493483Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:129:2160];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-09-25T16:20:45.493496Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:129:2160];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-09-25T16:20:45.493511Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:129:2160];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-09-25T16:20:45.493525Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:129:2160];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-09-25T16:20:45.493538Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:129:2160];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-09-25T16:20:45.493552Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:129:2160];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-09-25T16:20:45.493564Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:129:2160];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanDeprecatedSnapshot; 2025-09-25T16:20:45.493577Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:129:2160];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV0ChunksMeta; 2025-09-25T16:20:45.493591Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:129:2160];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CopyBlobIdsToV2; 2025-09-25T16:20:45.493621Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:129:2160];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreAppearanceSnapshot; 2025-09-25T16:20:45.498759Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxInitSchema.Complete at tablet 9437184 2025-09-25T16:20:45.498814Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:131;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=12;current_normalizer=CLASS_NAME=Granules; 2025-09-25T16:20:45.498822Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=1;type=Granules; 2025-09-25T16:20:45.498855Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-09-25T16:20:45.498886Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-09-25T16:20:45.498896Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-09-25T16:20:45.498900Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2025-09-25T16:20:45.498908Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:138;normalizer=TChunksNormalizer;message=0 chunks found; 2025-09-25T16:20:45.498914Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-09-25T16:20:45.498920Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-09-25T16:20:45.498924Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2025-09-25T16:20:45.498939Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-09-25T16:20:45.498944Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-09-25T16:20:45.498950Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-09-25T16:20:45.498953Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2025-09-25T16:20:45.498962Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2025-09-25T16:20:45.498966Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-09-25T16:20:45.498972Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-09-25T16:20:45.498975Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2025-09-25T16:20:45.498982Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-09-25T16:20:45.498988Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-09-25T16:20:45.498991Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2025-09-25T16:20:45.498997Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-09-25T16:20:45.499003Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-09-25T16:20:45.499006Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=15;type=RestoreV1Chunks_V2; 2025-09-25T16:20:45.499026Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-09-25T16:20:45.499032Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-09-25T16:20:45.499035Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=16;type=RestoreV2Chunks; 2025-09-25T16:20:45.499045Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2025-09-25T16:20:45.499051Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanDeprecatedSnapshot;id=CleanDeprecatedSnapshot; 2025-09-25T16:20:45.499055Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=17;type=CleanDeprecatedSnapshot; 2025-09-25T16:20:45.499060Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanDeprecatedSnapshot;id=17; 2025-09-25T16:20:45.499065Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV0ChunksMeta;id=RestoreV0ChunksMeta; 2025-09-25T16:20:45.499069Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=18;type=RestoreV0ChunksMeta; 2025-09-25T16:20:45.499075Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV0ChunksMeta;id=18; 2025-09-25T16:20:45.499083Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CopyBlobIdsToV2;id=CopyBlobIdsToV2; 2025-09-25T16:20:45.499089Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=19;type=CopyBlobIdsToV2; 2025-09-25T16:20:45.499106Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CopyBlobIdsToV2;id=19; 2025-09-25T16:20:45.499114Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLAS ... _name=EXECUTE:granule/portions;fline=constructor_portion.cpp:40;memory_size=278;data_size=269;sum=46648;count=168;size_of_portion=184; 2025-09-25T16:22:13.287180Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;load_stage_name=EXECUTE:column_engines/granules;load_stage_name=EXECUTE:granules/granule;fline=common_data.cpp:29;EXECUTE:portionsLoadingTime=1384; 2025-09-25T16:22:13.287187Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;load_stage_name=EXECUTE:column_engines/granules;load_stage_name=EXECUTE:granules/granule;fline=common_data.cpp:29;PRECHARGE:granule_finished_commonLoadingTime=1; 2025-09-25T16:22:13.287264Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;load_stage_name=EXECUTE:column_engines/granules;load_stage_name=EXECUTE:granules/granule;fline=common_data.cpp:29;EXECUTE:granule_finished_commonLoadingTime=71; 2025-09-25T16:22:13.287269Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;load_stage_name=EXECUTE:column_engines/granules;fline=common_data.cpp:29;EXECUTE:granuleLoadingTime=1497; 2025-09-25T16:22:13.287275Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;fline=common_data.cpp:29;EXECUTE:granulesLoadingTime=1512; 2025-09-25T16:22:13.287282Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;fline=common_data.cpp:29;PRECHARGE:finishLoadingTime=1; 2025-09-25T16:22:13.287305Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;fline=common_data.cpp:29;EXECUTE:finishLoadingTime=19; 2025-09-25T16:22:13.287310Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:column_enginesLoadingTime=1639; 2025-09-25T16:22:13.287335Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:tx_controllerLoadingTime=18; 2025-09-25T16:22:13.287354Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:tx_controllerLoadingTime=14; 2025-09-25T16:22:13.287384Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:operations_managerLoadingTime=23; 2025-09-25T16:22:13.287405Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:operations_managerLoadingTime=14; 2025-09-25T16:22:13.287761Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:storages_managerLoadingTime=349; 2025-09-25T16:22:13.288110Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:storages_managerLoadingTime=343; 2025-09-25T16:22:13.288119Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:db_locksLoadingTime=1; 2025-09-25T16:22:13.288126Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:db_locksLoadingTime=1; 2025-09-25T16:22:13.288133Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:bg_sessionsLoadingTime=1; 2025-09-25T16:22:13.288146Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:bg_sessionsLoadingTime=8; 2025-09-25T16:22:13.288154Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:sharing_sessionsLoadingTime=1; 2025-09-25T16:22:13.288169Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:sharing_sessionsLoadingTime=9; 2025-09-25T16:22:13.288176Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:in_flight_readsLoadingTime=1; 2025-09-25T16:22:13.288189Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:in_flight_readsLoadingTime=7; 2025-09-25T16:22:13.288204Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:tiers_managerLoadingTime=7; 2025-09-25T16:22:13.288235Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:tiers_managerLoadingTime=10; 2025-09-25T16:22:13.288241Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;fline=common_data.cpp:29;EXECUTE:composite_initLoadingTime=12281; 2025-09-25T16:22:13.288277Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: Index: tables 1 inserted {blob_bytes=5508872;raw_bytes=7830634;count=1;records=85000} compacted {blob_bytes=0;raw_bytes=0;count=0;records=0} s-compacted {blob_bytes=114929968;raw_bytes=184829450;count=5;records=1780000} inactive {blob_bytes=449763544;raw_bytes=695770898;count=36;records=6977500} evicted {blob_bytes=0;raw_bytes=0;count=0;records=0} at tablet 9437184 2025-09-25T16:22:13.288307Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:2409:4375];process=SwitchToWork;fline=columnshard.cpp:77;event=initialize_shard;step=SwitchToWork; 2025-09-25T16:22:13.288316Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:2409:4375];process=SwitchToWork;fline=columnshard.cpp:80;event=initialize_shard;step=SignalTabletActive; 2025-09-25T16:22:13.288331Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:2409:4375];process=SwitchToWork;fline=columnshard_impl.cpp:1528;event=OnTieringModified;path_id=NO_VALUE_OPTIONAL; 2025-09-25T16:22:13.288338Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:2409:4375];process=SwitchToWork;fline=column_engine_logs.cpp:516;event=OnTieringModified;new_count_tierings=0; 2025-09-25T16:22:13.288362Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;fline=columnshard_impl.cpp:449;event=EnqueueBackgroundActivities;periodic=0; 2025-09-25T16:22:13.288384Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;fline=columnshard_impl.cpp:943;background=cleanup_schemas;skip_reason=no_changes; 2025-09-25T16:22:13.288392Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;fline=column_engine_logs.cpp:258;event=StartCleanup;portions_count=18; 2025-09-25T16:22:13.288406Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;fline=column_engine_logs.cpp:271;event=StartCleanupStop;snapshot=plan_step=1758815448182;tx_id=18446744073709551615;;current_snapshot_ts=1758817246536; 2025-09-25T16:22:13.288415Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;fline=column_engine_logs.cpp:334;event=StartCleanup;portions_count=18;portions_prepared=0;drop=0;skip=0;portions_counter=0;chunks=0;limit=0;max_portions=1000;max_chunks=500000; 2025-09-25T16:22:13.288427Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;fline=columnshard_impl.cpp:800;background=cleanup;skip_reason=no_changes; 2025-09-25T16:22:13.288433Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;fline=columnshard_impl.cpp:832;background=cleanup;skip_reason=no_changes; 2025-09-25T16:22:13.288456Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;fline=columnshard_impl.cpp:755;background=ttl;skip_reason=no_changes; 2025-09-25T16:22:13.301647Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:2409:4375];ev=NActors::TEvents::TEvWakeup;fline=columnshard.cpp:260;event=TEvPrivate::TEvPeriodicWakeup::MANUAL;tablet_id=9437184; 2025-09-25T16:22:13.301726Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:2409:4375];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;fline=columnshard.cpp:249;event=TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184; 2025-09-25T16:22:13.301733Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: Send periodic stats. 2025-09-25T16:22:13.301739Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: Disabled periodic stats at tablet 9437184 2025-09-25T16:22:13.301747Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:2409:4375];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:449;event=EnqueueBackgroundActivities;periodic=0; 2025-09-25T16:22:13.301783Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:2409:4375];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:943;background=cleanup_schemas;skip_reason=no_changes; 2025-09-25T16:22:13.301792Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:2409:4375];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:258;event=StartCleanup;portions_count=18; 2025-09-25T16:22:13.301808Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:2409:4375];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:271;event=StartCleanupStop;snapshot=plan_step=1758815448182;tx_id=18446744073709551615;;current_snapshot_ts=1758817246536; 2025-09-25T16:22:13.301819Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:2409:4375];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:334;event=StartCleanup;portions_count=18;portions_prepared=0;drop=0;skip=0;portions_counter=0;chunks=0;limit=0;max_portions=1000;max_chunks=500000; 2025-09-25T16:22:13.301833Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:2409:4375];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:800;background=cleanup;skip_reason=no_changes; 2025-09-25T16:22:13.301841Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:2409:4375];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:832;background=cleanup;skip_reason=no_changes; 2025-09-25T16:22:13.301866Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:2409:4375];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;queue=ttl;external_count=0;fline=granule.cpp:168;event=skip_actualization;waiting=1.000000s; 2025-09-25T16:22:13.301877Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:2409:4375];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:755;background=ttl;skip_reason=no_changes; >> TYdbControlPlaneStorageRateLimiter::ShouldCreateRateLimiterResource [GOOD] >> TYdbControlPlaneStorageRateLimiter::ShouldDeleteRateLimiterResource >> TSequence::CreateSequenceInsideTableThenDropTable [GOOD] >> TSequence::CreateSequencesWithIndexedTable |83.0%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/kqp/ut/opt/ydb-core-kqp-ut-opt |83.0%| [LD] {RESULT} $(B)/ydb/core/kqp/ut/opt/ydb-core-kqp-ut-opt |83.0%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/kqp/ut/opt/ydb-core-kqp-ut-opt >> TYdbControlPlaneStorageListBindingsPermissions::ShouldApplyPermissionViewPrivatePublic [GOOD] >> TYdbControlPlaneStorageListConnections::ShouldCheckLimit >> TSequence::AlterSequence [GOOD] >> TSequence::AlterTableSetDefaultFromSequence >> TKesusTest::TestKesusConfig |83.0%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/blobstorage/dsproxy/ut/ydb-core-blobstorage-dsproxy-ut |83.0%| [LD] {RESULT} $(B)/ydb/core/blobstorage/dsproxy/ut/ydb-core-blobstorage-dsproxy-ut |83.0%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/blobstorage/dsproxy/ut/ydb-core-blobstorage-dsproxy-ut >> Balancing::Balancing_OneTopic_PQv1 [GOOD] >> Balancing::Balancing_ManyTopics_TopicApi >> TSequence::CreateSequencesWithIndexedTable [GOOD] >> TSequence::CreateTableWithDefaultFromSequence >> TYdbControlPlaneStorageCreateConnection::ShouldCheckMaxLengthName [GOOD] >> TYdbControlPlaneStorageCreateConnection::ShouldCheckMultipleDotsName >> TKesusTest::TestKesusConfig [GOOD] >> TKesusTest::TestLockNotFound >> TSequence::CreateTableWithDefaultFromSequence [GOOD] >> TYdbControlPlaneStorageModifyConnection::ShouldCheckLowerCaseName [GOOD] >> TSequence::CreateTableWithDefaultFromSequenceAndIndex >> TYdbControlPlaneStorageModifyConnection::ShouldCheckMaxLengthName >> TKesusTest::TestLockNotFound [GOOD] >> TKesusTest::TestDeleteSemaphore >> TYdbControlPlaneStorageModifyConnection::ShouldValidate [GOOD] >> TYdbControlPlaneStorageModifyConnection::ShouldCheckSuperUser >> TSequence::AlterTableSetDefaultFromSequence [GOOD] >> TYdbControlPlaneStoragePipeline::ShouldCheckGetResultDataRequest [GOOD] >> TKesusTest::TestDeleteSemaphore [GOOD] >> TKesusTest::TestDescribeSemaphoreWatches >> TYdbControlPlaneStoragePipeline::ShouldRetryQuery >> TYdbControlPlaneStorageRateLimiter::ShouldDeleteRateLimiterResource [GOOD] >> TYdbControlPlaneStorageTest::ShouldCreateTable >> TopicAutoscaling::Simple_PQv1 [GOOD] >> TopicAutoscaling::WithDir_PartitionSplit_AutosplitByLoad >> TYdbControlPlaneStorageGetTask::ShouldBatchingGetTasks [GOOD] >> TYdbControlPlaneStorageListBindings::ShouldEmptyPageToken >> TYdbControlPlaneStorageListQueries::ShouldFilterName [GOOD] >> TYdbControlPlaneStorageListQueries::ShouldFilterByMe |83.0%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_filestore_reboots/unittest >> TYdbControlPlaneStoragePipeline::ShouldCheckAbortInTerminatedState [GOOD] >> TYdbControlPlaneStoragePipeline::ShouldCheckAst >> TSequence::CreateTableWithDefaultFromSequenceAndIndex [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_sequence/unittest >> TSequence::AlterTableSetDefaultFromSequence [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:120:2058] recipient: [1:114:2144] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:120:2058] recipient: [1:114:2144] Leader for TabletID 72057594046678944 is [1:131:2155] sender: [1:132:2058] recipient: [1:114:2144] 2025-09-25T16:22:12.131353Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7911: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-09-25T16:22:12.131387Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7939: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-09-25T16:22:12.131393Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7825: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-09-25T16:22:12.131399Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7841: OperationsProcessing config: using default configuration 2025-09-25T16:22:12.131406Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7847: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-09-25T16:22:12.131411Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7847: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-09-25T16:22:12.131422Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7971: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-09-25T16:22:12.131438Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-09-25T16:22:12.131557Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8042: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-09-25T16:22:12.131629Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2025-09-25T16:22:12.156364Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7729: Cannot subscribe to console configs 2025-09-25T16:22:12.156395Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded 2025-09-25T16:22:12.160624Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2025-09-25T16:22:12.160668Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2025-09-25T16:22:12.160717Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-09-25T16:22:12.163090Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2025-09-25T16:22:12.163163Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2025-09-25T16:22:12.163276Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1382: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-09-25T16:22:12.163377Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-09-25T16:22:12.170623Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:158: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-09-25T16:22:12.170711Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2025-09-25T16:22:12.171083Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-09-25T16:22:12.171099Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-09-25T16:22:12.171134Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-09-25T16:22:12.171146Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-09-25T16:22:12.171153Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:205: TTxServerlessStorageBilling.Complete 2025-09-25T16:22:12.171204Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7086: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-09-25T16:22:12.174460Z node 1 :HIVE INFO: tablet_helpers.cpp:1126: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:131:2155] sender: [1:244:2058] recipient: [1:15:2062] 2025-09-25T16:22:12.222923Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-09-25T16:22:12.223032Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-09-25T16:22:12.223107Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:599: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-09-25T16:22:12.223116Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5528: CreateTx for txid 1:0 type: TxAlterSubDomain target path: [OwnerId: 72057594046678944, LocalPathId: 1] source path: 2025-09-25T16:22:12.223160Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-09-25T16:22:12.223177Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2025-09-25T16:22:12.233307Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-09-25T16:22:12.233398Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-09-25T16:22:12.233480Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-09-25T16:22:12.233496Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:315: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-09-25T16:22:12.233502Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:369: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-09-25T16:22:12.233508Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 1:0 2 -> 3 2025-09-25T16:22:12.241311Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-09-25T16:22:12.241351Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-09-25T16:22:12.241362Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 1:0 3 -> 128 2025-09-25T16:22:12.245463Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-09-25T16:22:12.245497Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-09-25T16:22:12.245507Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-09-25T16:22:12.245519Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1723: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-09-25T16:22:12.246470Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1792: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-09-25T16:22:12.252988Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:663: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-09-25T16:22:12.253105Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1824: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-09-25T16:22:12.253409Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:685: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-09-25T16:22:12.253461Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:689: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 138 RawX2: 4294969455 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-09-25T16:22:12.253483Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:303: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-09-25T16:22:12.253584Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 1:0 128 -> 240 2025-09-25T16:22:12.253594Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:362: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-09-25T16:22:12.253651Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:599: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-09-25T16:22:12.253668Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:412: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-09-25T16:22:12.254633Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-09-25T16:22:12.254648Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme ... 3409549 Status: COMPLETE TxId: 114 Step: 5000014 OrderId: 114 ExecLatency: 0 ProposeLatency: 2 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409549 CpuTimeUsec: 317 } } CommitVersion { Step: 5000014 TxId: 114 } 2025-09-25T16:22:16.861871Z node 7 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:173: TSideEffects ApplyOnExecute at tablet# 72057594046678944 2025-09-25T16:22:16.862048Z node 7 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5225: StateWork, received event# 269877761, Sender [7:1052:2987], Recipient [7:129:2154]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-09-25T16:22:16.862057Z node 7 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5322: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-09-25T16:22:16.862065Z node 7 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:6142: Pipe server connected, at tablet: 72057594046678944 2025-09-25T16:22:16.862101Z node 7 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5225: StateWork, received event# 269551620, Sender [7:989:2933], Recipient [7:129:2154]: NKikimrTxDataShard.TEvSchemaChanged Source { RawX1: 989 RawX2: 30064774005 } Origin: 72075186233409549 State: 2 TxId: 114 Step: 0 Generation: 2 2025-09-25T16:22:16.862107Z node 7 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5257: StateWork, processing event TEvDataShard::TEvSchemaChanged 2025-09-25T16:22:16.862116Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5901: Handle TEvSchemaChanged, tabletId: 72057594046678944, at schemeshard: 72057594046678944, message: Source { RawX1: 989 RawX2: 30064774005 } Origin: 72075186233409549 State: 2 TxId: 114 Step: 0 Generation: 2 2025-09-25T16:22:16.862121Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1837: TOperation FindRelatedPartByTabletId, TxId: 114, tablet: 72075186233409549, partId: 0 2025-09-25T16:22:16.862136Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:628: TTxOperationReply execute, operationId: 114:0, at schemeshard: 72057594046678944, message: Source { RawX1: 989 RawX2: 30064774005 } Origin: 72075186233409549 State: 2 TxId: 114 Step: 0 Generation: 2 2025-09-25T16:22:16.862143Z node 7 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:1052: NTableState::TProposedWaitParts operationId# 114:0 HandleReply TEvDataShard::TEvSchemaChanged at tablet: 72057594046678944 2025-09-25T16:22:16.862152Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:1056: NTableState::TProposedWaitParts operationId# 114:0 HandleReply TEvDataShard::TEvSchemaChanged at tablet: 72057594046678944 message: Source { RawX1: 989 RawX2: 30064774005 } Origin: 72075186233409549 State: 2 TxId: 114 Step: 0 Generation: 2 2025-09-25T16:22:16.862164Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:673: CollectSchemaChanged accept TEvDataShard::TEvSchemaChanged, operationId: 114:0, shardIdx: 72057594046678944:4, shard: 72075186233409549, left await: 0, txState.State: ProposedWaitParts, txState.ReadyForNotifications: 1, at schemeshard: 72057594046678944 2025-09-25T16:22:16.862169Z node 7 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:710: all shard schema changes has been received, operationId: 114:0, at schemeshard: 72057594046678944 2025-09-25T16:22:16.862175Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:722: send schema changes ack message, operation: 114:0, datashard: 72075186233409549, at schemeshard: 72057594046678944 2025-09-25T16:22:16.862181Z node 7 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 114:0 129 -> 240 2025-09-25T16:22:16.862207Z node 7 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:173: TSideEffects ApplyOnExecute at tablet# 72057594046678944 2025-09-25T16:22:16.862794Z node 7 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:216: TSideEffects ApplyOnComplete at tablet# 72057594046678944 2025-09-25T16:22:16.862909Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 114 2025-09-25T16:22:16.862916Z node 7 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:216: TSideEffects ApplyOnComplete at tablet# 72057594046678944 2025-09-25T16:22:16.863383Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 114 2025-09-25T16:22:16.863394Z node 7 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:216: TSideEffects ApplyOnComplete at tablet# 72057594046678944 2025-09-25T16:22:16.863420Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 114:0, at schemeshard: 72057594046678944 2025-09-25T16:22:16.863427Z node 7 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:216: TSideEffects ApplyOnComplete at tablet# 72057594046678944 2025-09-25T16:22:16.863449Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 114:0, at schemeshard: 72057594046678944 2025-09-25T16:22:16.863453Z node 7 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:216: TSideEffects ApplyOnComplete at tablet# 72057594046678944 2025-09-25T16:22:16.863458Z node 7 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:285: Activate send for 114:0 2025-09-25T16:22:16.863475Z node 7 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:641: Send to actor: [7:989:2933] msg type: 269552132 msg: NKikimrTxDataShard.TEvSchemaChangedResult TxId: 114 at schemeshard: 72057594046678944 2025-09-25T16:22:16.863536Z node 7 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5225: StateWork, received event# 2146435072, Sender [7:129:2154], Recipient [7:129:2154]: NKikimr::NSchemeShard::TEvPrivate::TEvProgressOperation 2025-09-25T16:22:16.863542Z node 7 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5242: StateWork, processing event TEvPrivate::TEvProgressOperation 2025-09-25T16:22:16.863550Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 114:0, at schemeshard: 72057594046678944 2025-09-25T16:22:16.863558Z node 7 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:497: [72057594046678944] TDone opId# 114:0 ProgressState 2025-09-25T16:22:16.863571Z node 7 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:173: TSideEffects ApplyOnExecute at tablet# 72057594046678944 2025-09-25T16:22:16.863575Z node 7 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:926: Part operation is done id#114:0 progress is 1/1 2025-09-25T16:22:16.863580Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 114 ready parts: 1/1 2025-09-25T16:22:16.863585Z node 7 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:926: Part operation is done id#114:0 progress is 1/1 2025-09-25T16:22:16.863589Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 114 ready parts: 1/1 2025-09-25T16:22:16.863594Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 114, ready parts: 1/1, is published: true 2025-09-25T16:22:16.863605Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1702: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [7:392:2359] message: TxId: 114 2025-09-25T16:22:16.863612Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 114 ready parts: 1/1 2025-09-25T16:22:16.863618Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:993: Operation and all the parts is done, operation id: 114:0 2025-09-25T16:22:16.863623Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5552: RemoveTx for txid 114:0 2025-09-25T16:22:16.863649Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:610: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 6] was 3 2025-09-25T16:22:16.864045Z node 7 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:216: TSideEffects ApplyOnComplete at tablet# 72057594046678944 2025-09-25T16:22:16.864066Z node 7 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:641: Send to actor: [7:392:2359] msg type: 271124998 msg: NKikimrScheme.TEvNotifyTxCompletionResult TxId: 114 at schemeshard: 72057594046678944 2025-09-25T16:22:16.864123Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 114: got EvNotifyTxCompletionResult 2025-09-25T16:22:16.864132Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 114: satisfy waiter [7:1020:2956] 2025-09-25T16:22:16.864181Z node 7 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5225: StateWork, received event# 269877764, Sender [7:1022:2958], Recipient [7:129:2154]: NKikimr::TEvTabletPipe::TEvServerDisconnected 2025-09-25T16:22:16.864188Z node 7 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5323: StateWork, processing event TEvTabletPipe::TEvServerDisconnected 2025-09-25T16:22:16.864193Z node 7 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:6190: Server pipe is reset, at schemeshard: 72057594046678944 TestWaitNotification: OK eventTxId 114 TestModificationResults wait txId: 115 2025-09-25T16:22:16.864440Z node 7 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5225: StateWork, received event# 271122432, Sender [7:1061:2996], Recipient [7:129:2154]: {TEvModifySchemeTransaction txid# 115 TabletId# 72057594046678944} 2025-09-25T16:22:16.864446Z node 7 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5236: StateWork, processing event TEvSchemeShard::TEvModifySchemeTransaction 2025-09-25T16:22:16.865263Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpAlterTable AlterTable { Name: "Table3" Columns { Name: "value" DefaultFromSequence: "/MyRoot/seq1" } } } TxId: 115 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-09-25T16:22:16.865321Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_table.cpp:507: TAlterTable Propose, path: /MyRoot/Table3, pathId: , opId: 115:0, at schemeshard: 72057594046678944 2025-09-25T16:22:16.865400Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 115:1, propose status:StatusInvalidParameter, reason: Column 'value' is of type Bool but default expression is of type Int64, at schemeshard: 72057594046678944 2025-09-25T16:22:16.865454Z node 7 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:173: TSideEffects ApplyOnExecute at tablet# 72057594046678944 2025-09-25T16:22:16.866092Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 115, response: Status: StatusInvalidParameter Reason: "Column \'value\' is of type Bool but default expression is of type Int64" TxId: 115 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2025-09-25T16:22:16.866139Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 115, database: /MyRoot, subject: , status: StatusInvalidParameter, reason: Column 'value' is of type Bool but default expression is of type Int64, operation: ALTER TABLE, path: /MyRoot/Table3 2025-09-25T16:22:16.866147Z node 7 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:216: TSideEffects ApplyOnComplete at tablet# 72057594046678944 TestModificationResult got TxId: 115, wait until txId: 115 >> TYdbControlPlaneStorageTest::ShouldCreateTable [GOOD] >> TYdbControlPlaneStorageWriteResultData::ShouldValidateWrite >> TYdbControlPlaneStorageModifyBinding::ShouldCheckPermission [GOOD] >> TYdbControlPlaneStorageModifyBinding::ShouldCheckNotExistOldName >> TYdbControlPlaneStorageCreateBinding::ShouldCheckAllowedSymbolsName [GOOD] >> TYdbControlPlaneStorageCreateBinding::ShouldCheckMaxCountBindings >> TYdbControlPlaneStorageDeleteBinding::ShouldCheckPreviousRevisionFailed [GOOD] >> TYdbControlPlaneStorageDeleteBinding::ShouldCheckPreviousRevisionSuccess >> TYdbControlPlaneStorageCreateConnection::ShouldCheckMultipleDotsName [GOOD] >> TYdbControlPlaneStorageCreateConnection::ShouldCheckAllowedSymbolsName >> TYdbControlPlaneStorageModifyConnection::ShouldCheckMaxLengthName [GOOD] >> TYdbControlPlaneStorageModifyConnection::ShouldCheckMultipleDotsName ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_sequence/unittest >> TSequence::CreateTableWithDefaultFromSequenceAndIndex [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2144] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2144] Leader for TabletID 72057594046678944 is [1:130:2155] sender: [1:131:2058] recipient: [1:113:2144] 2025-09-25T16:22:13.789381Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7911: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-09-25T16:22:13.789411Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7939: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-09-25T16:22:13.789418Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7825: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-09-25T16:22:13.789423Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7841: OperationsProcessing config: using default configuration 2025-09-25T16:22:13.789429Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7847: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-09-25T16:22:13.789434Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7847: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-09-25T16:22:13.789443Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7971: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-09-25T16:22:13.789456Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-09-25T16:22:13.789574Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8042: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-09-25T16:22:13.789652Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2025-09-25T16:22:13.808799Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7729: Cannot subscribe to console configs 2025-09-25T16:22:13.808919Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded 2025-09-25T16:22:13.817077Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2025-09-25T16:22:13.817217Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2025-09-25T16:22:13.817264Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-09-25T16:22:13.819301Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2025-09-25T16:22:13.819378Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2025-09-25T16:22:13.819503Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1382: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-09-25T16:22:13.819603Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-09-25T16:22:13.820081Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:158: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-09-25T16:22:13.820138Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2025-09-25T16:22:13.820429Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-09-25T16:22:13.820443Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-09-25T16:22:13.820469Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-09-25T16:22:13.820479Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-09-25T16:22:13.820487Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:205: TTxServerlessStorageBilling.Complete 2025-09-25T16:22:13.820525Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7086: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-09-25T16:22:13.822188Z node 1 :HIVE INFO: tablet_helpers.cpp:1126: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:130:2155] sender: [1:245:2058] recipient: [1:15:2062] 2025-09-25T16:22:13.847468Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-09-25T16:22:13.847576Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-09-25T16:22:13.847642Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:599: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-09-25T16:22:13.847651Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5528: CreateTx for txid 1:0 type: TxAlterSubDomain target path: [OwnerId: 72057594046678944, LocalPathId: 1] source path: 2025-09-25T16:22:13.847702Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-09-25T16:22:13.847717Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2025-09-25T16:22:13.851689Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-09-25T16:22:13.851767Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-09-25T16:22:13.851830Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-09-25T16:22:13.851844Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:315: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-09-25T16:22:13.851850Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:369: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-09-25T16:22:13.851857Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 1:0 2 -> 3 2025-09-25T16:22:13.852672Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-09-25T16:22:13.852690Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-09-25T16:22:13.852697Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 1:0 3 -> 128 2025-09-25T16:22:13.853399Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-09-25T16:22:13.853416Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-09-25T16:22:13.853423Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-09-25T16:22:13.853430Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1723: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-09-25T16:22:13.854414Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1792: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-09-25T16:22:13.854904Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:663: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-09-25T16:22:13.854961Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1824: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-09-25T16:22:13.855205Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:685: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-09-25T16:22:13.855237Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:689: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 137 RawX2: 4294969455 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-09-25T16:22:13.855255Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:303: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-09-25T16:22:13.855348Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 1:0 128 -> 240 2025-09-25T16:22:13.855358Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:362: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-09-25T16:22:13.855391Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:599: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-09-25T16:22:13.855403Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:412: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-09-25T16:22:13.855935Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-09-25T16:22:13.855946Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme ... INFO: schemeshard__operation_side_effects.cpp:926: Part operation is done id#102:2 progress is 3/4 2025-09-25T16:22:17.679959Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 102 ready parts: 3/4 2025-09-25T16:22:17.679963Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 102, ready parts: 3/4, is published: true 2025-09-25T16:22:17.680115Z node 7 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:216: TSideEffects ApplyOnComplete at tablet# 72057594046678944 2025-09-25T16:22:17.680121Z node 7 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:285: Activate send for 102:0 2025-09-25T16:22:17.680129Z node 7 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:641: Send to actor: [7:346:2325] msg type: 269552132 msg: NKikimrTxDataShard.TEvSchemaChangedResult TxId: 102 at schemeshard: 72057594046678944 2025-09-25T16:22:17.680195Z node 7 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5225: StateWork, received event# 2146435072, Sender [7:129:2154], Recipient [7:129:2154]: NKikimr::NSchemeShard::TEvPrivate::TEvProgressOperation 2025-09-25T16:22:17.680200Z node 7 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5242: StateWork, processing event TEvPrivate::TEvProgressOperation 2025-09-25T16:22:17.680205Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 102:0, at schemeshard: 72057594046678944 2025-09-25T16:22:17.680209Z node 7 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_drop_table.cpp:415: TDropTable TProposedDeletePart operationId: 102:0 ProgressState, at schemeshard: 72057594046678944 2025-09-25T16:22:17.680258Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:610: DecrementPathDbRefCount reason remove table for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 5 2025-09-25T16:22:17.680279Z node 7 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:173: TSideEffects ApplyOnExecute at tablet# 72057594046678944 2025-09-25T16:22:17.680283Z node 7 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:926: Part operation is done id#102:0 progress is 4/4 2025-09-25T16:22:17.680286Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 102 ready parts: 4/4 2025-09-25T16:22:17.680291Z node 7 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:926: Part operation is done id#102:0 progress is 4/4 2025-09-25T16:22:17.680295Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 102 ready parts: 4/4 2025-09-25T16:22:17.680299Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 102, ready parts: 4/4, is published: true 2025-09-25T16:22:17.680310Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1702: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [7:421:2378] message: TxId: 102 2025-09-25T16:22:17.680319Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 102 ready parts: 4/4 2025-09-25T16:22:17.680326Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:993: Operation and all the parts is done, operation id: 102:0 2025-09-25T16:22:17.680330Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5552: RemoveTx for txid 102:0 2025-09-25T16:22:17.680355Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:610: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 4 2025-09-25T16:22:17.680360Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:993: Operation and all the parts is done, operation id: 102:1 2025-09-25T16:22:17.680364Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5552: RemoveTx for txid 102:1 2025-09-25T16:22:17.680368Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:610: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 2 2025-09-25T16:22:17.680372Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:993: Operation and all the parts is done, operation id: 102:2 2025-09-25T16:22:17.680375Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5552: RemoveTx for txid 102:2 2025-09-25T16:22:17.680383Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:610: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 2 2025-09-25T16:22:17.680388Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:993: Operation and all the parts is done, operation id: 102:3 2025-09-25T16:22:17.680391Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5552: RemoveTx for txid 102:3 2025-09-25T16:22:17.680400Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:610: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 5] was 1 2025-09-25T16:22:17.680478Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 2025-09-25T16:22:17.680483Z node 7 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:216: TSideEffects ApplyOnComplete at tablet# 72057594046678944 2025-09-25T16:22:17.680520Z node 7 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5225: StateWork, received event# 2146435084, Sender [7:129:2154], Recipient [7:129:2154]: NKikimr::NSchemeShard::TEvPrivate::TEvCleanDroppedPaths 2025-09-25T16:22:17.680525Z node 7 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5414: StateWork, processing event TEvPrivate::TEvCleanDroppedPaths 2025-09-25T16:22:17.680532Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:123: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2025-09-25T16:22:17.680537Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:137: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 5], at schemeshard: 72057594046678944 2025-09-25T16:22:17.680549Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:610: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 3 2025-09-25T16:22:17.680610Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 2025-09-25T16:22:17.680614Z node 7 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:216: TSideEffects ApplyOnComplete at tablet# 72057594046678944 2025-09-25T16:22:17.680883Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 2025-09-25T16:22:17.680900Z node 7 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:216: TSideEffects ApplyOnComplete at tablet# 72057594046678944 2025-09-25T16:22:17.680912Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 2025-09-25T16:22:17.680915Z node 7 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:216: TSideEffects ApplyOnComplete at tablet# 72057594046678944 2025-09-25T16:22:17.680921Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 2025-09-25T16:22:17.680924Z node 7 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:216: TSideEffects ApplyOnComplete at tablet# 72057594046678944 2025-09-25T16:22:17.681413Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 2025-09-25T16:22:17.681427Z node 7 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:216: TSideEffects ApplyOnComplete at tablet# 72057594046678944 2025-09-25T16:22:17.681447Z node 7 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:216: TSideEffects ApplyOnComplete at tablet# 72057594046678944 2025-09-25T16:22:17.681462Z node 7 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:216: TSideEffects ApplyOnComplete at tablet# 72057594046678944 2025-09-25T16:22:17.681478Z node 7 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:641: Send to actor: [7:421:2378] msg type: 271124998 msg: NKikimrScheme.TEvNotifyTxCompletionResult TxId: 102 at schemeshard: 72057594046678944 2025-09-25T16:22:17.681511Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 102: got EvNotifyTxCompletionResult 2025-09-25T16:22:17.681517Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 102: satisfy waiter [7:527:2476] 2025-09-25T16:22:17.681537Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__clean_pathes.cpp:160: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 2025-09-25T16:22:17.681580Z node 7 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5225: StateWork, received event# 269877764, Sender [7:529:2478], Recipient [7:129:2154]: NKikimr::TEvTabletPipe::TEvServerDisconnected 2025-09-25T16:22:17.681587Z node 7 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5323: StateWork, processing event TEvTabletPipe::TEvServerDisconnected 2025-09-25T16:22:17.681591Z node 7 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:6190: Server pipe is reset, at schemeshard: 72057594046678944 TestWaitNotification: OK eventTxId 102 2025-09-25T16:22:17.681687Z node 7 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5225: StateWork, received event# 271122945, Sender [7:604:2553], Recipient [7:129:2154]: NKikimrSchemeOp.TDescribePath Path: "/MyRoot/Table" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false } 2025-09-25T16:22:17.681695Z node 7 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5237: StateWork, processing event TEvSchemeShard::TEvDescribeScheme 2025-09-25T16:22:17.681711Z node 7 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/Table" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-09-25T16:22:17.681780Z node 7 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/Table" took 62us result status StatusPathDoesNotExist 2025-09-25T16:22:17.681828Z node 7 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/Table\', error: path has been deleted (id: [OwnerId: 72057594046678944, LocalPathId: 2], type: EPathTypeTable, state: EPathStateNotExist), drop stepId: 5000003, drop txId: 102, source_location: ydb/core/tx/schemeshard/schemeshard_path_describer.cpp:1181" Path: "/MyRoot/Table" PathId: 2 LastExistedPrefixPath: "/MyRoot" LastExistedPrefixPathId: 1 LastExistedPrefixDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty ChildrenExist: false } } PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 >> TKesusTest::TestDescribeSemaphoreWatches [GOOD] >> TKesusTest::TestGetQuoterResourceCounters >> TYdbControlPlaneStorageWriteResultData::ShouldValidateWrite [GOOD] >> TYdbControlPlaneStorageWriteResultData::ShouldValidateRead >> TInterconnectTest::TestNotifyUndelivered >> TColumnShardTestReadWrite::CompactionSplitGranuleStrKey_PKString [GOOD] >> TYdbControlPlaneStorageDeleteConnectionPermissions::ShouldApplyPermissionViewPrivate [GOOD] >> TYdbControlPlaneStorageDeleteConnectionPermissions::ShouldApplyPermissionViewPrivatePublic ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/fq/control_plane_storage/unittest >> TYdbControlPlaneStorageDescribeQuery::ShouldCheckExist [GOOD] Test command err: Netstat: sh: 1: netstat: not found Process stat: USER PID %CPU %MEM VSZ RSS TTY STAT START TIME COMMAND root 1 0.0 0.0 167280 12396 ? Ss 15:23 0:02 /sbin/init root 2 0.0 0.0 0 0 ? S 15:23 0:00 [kthreadd] root 3 0.0 0.0 0 0 ? I< 15:23 0:00 [rcu_gp] root 4 0.0 0.0 0 0 ? I< 15:23 0:00 [rcu_par_gp] root 5 0.0 0.0 0 0 ? I< 15:23 0:00 [slub_flushwq] root 6 0.0 0.0 0 0 ? I< 15:23 0:00 [netns] root 8 0.0 0.0 0 0 ? I< 15:23 0:00 [kworker/0:0H-events_highpri] root 9 0.7 0.0 0 0 ? I 15:23 0:24 [kworker/u128:0-ext4-rsv-conversion] root 11 0.0 0.0 0 0 ? I< 15:23 0:00 [mm_percpu_wq] root 12 0.0 0.0 0 0 ? S 15:23 0:00 [rcu_tasks_rude_] root 13 0.0 0.0 0 0 ? S 15:23 0:00 [rcu_tasks_trace] root 14 0.0 0.0 0 0 ? S 15:23 0:00 [ksoftirqd/0] root 15 0.3 0.0 0 0 ? I 15:23 0:11 [rcu_sched] root 16 0.0 0.0 0 0 ? S 15:23 0:00 [migration/0] root 17 0.0 0.0 0 0 ? S 15:23 0:00 [idle_inject/0] root 18 0.0 0.0 0 0 ? I 15:23 0:00 [kworker/0:1-rcu_par_gp] root 19 0.0 0.0 0 0 ? S 15:23 0:00 [cpuhp/0] root 20 0.0 0.0 0 0 ? S 15:23 0:00 [cpuhp/1] root 21 0.0 0.0 0 0 ? S 15:23 0:00 [idle_inject/1] root 22 0.0 0.0 0 0 ? S 15:23 0:02 [migration/1] root 23 0.0 0.0 0 0 ? S 15:23 0:00 [ksoftirqd/1] root 25 0.0 0.0 0 0 ? I< 15:23 0:00 [kworker/1:0H-events_highpri] root 26 0.0 0.0 0 0 ? S 15:23 0:00 [cpuhp/2] root 27 0.0 0.0 0 0 ? S 15:23 0:00 [idle_inject/2] root 28 0.0 0.0 0 0 ? S 15:23 0:02 [migration/2] root 29 0.0 0.0 0 0 ? S 15:23 0:00 [ksoftirqd/2] root 31 0.0 0.0 0 0 ? I< 15:23 0:00 [kworker/2:0H-events_highpri] root 32 0.0 0.0 0 0 ? S 15:23 0:00 [cpuhp/3] root 33 0.0 0.0 0 0 ? S 15:23 0:00 [idle_inject/3] root 34 0.0 0.0 0 0 ? S 15:23 0:02 [migration/3] root 35 0.0 0.0 0 0 ? S 15:23 0:00 [ksoftirqd/3] root 37 0.0 0.0 0 0 ? I< 15:23 0:00 [kworker/3:0H-events_highpri] root 38 0.0 0.0 0 0 ? S 15:23 0:00 [cpuhp/4] root 39 0.0 0.0 0 0 ? S 15:23 0:00 [idle_inject/4] root 40 0.0 0.0 0 0 ? S 15:23 0:02 [migration/4] root 41 0.0 0.0 0 0 ? S 15:23 0:00 [ksoftirqd/4] root 43 0.0 0.0 0 0 ? I< 15:23 0:00 [kworker/4:0H-events_highpri] root 44 0.0 0.0 0 0 ? S 15:23 0:00 [cpuhp/5] root 45 0.0 0.0 0 0 ? S 15:23 0:00 [idle_inject/5] root 46 0.0 0.0 0 0 ? S 15:23 0:02 [migration/5] root 47 0.0 0.0 0 0 ? S 15:23 0:00 [ksoftirqd/5] root 49 0.0 0.0 0 0 ? I< 15:23 0:00 [kworker/5:0H-events_highpri] root 50 0.0 0.0 0 0 ? S 15:23 0:00 [cpuhp/6] root 51 0.0 0.0 0 0 ? S 15:23 0:00 [idle_inject/6] root 52 0.0 0.0 0 0 ? S 15:23 0:02 [migration/6] root 53 0.0 0.0 0 0 ? S 15:23 0:00 [ksoftirqd/6] root 54 0.0 0.0 0 0 ? I 15:23 0:00 [kworker/6:0-rcu_par_gp] root 55 0.0 0.0 0 0 ? I< 15:23 0:00 [kworker/6:0H-events_highpri] root 56 0.0 0.0 0 0 ? S 15:23 0:00 [cpuhp/7] root 57 0.0 0.0 0 0 ? S 15:23 0:00 [idle_inject/7] root 58 0.0 0.0 0 0 ? S 15:23 0:02 [migration/7] root 59 0.0 0.0 0 0 ? S 15:23 0:00 [ksoftirqd/7] root 60 0.0 0.0 0 0 ? I 15:23 0:00 [kworker/7:0-dio/vda1] root 61 0.0 0.0 0 0 ? I< 15:23 0:00 [kworker/7:0H-events_highpri] root 62 0.0 0.0 0 0 ? S 15:23 0:00 [cpuhp/8] root 63 0.0 0.0 0 0 ? S 15:23 0:00 [idle_inject/8] root 64 0.0 0.0 0 0 ? S 15:23 0:02 [migration/8] root 65 0.0 0.0 0 0 ? S 15:23 0:00 [ksoftirqd/8] root 67 0.0 0.0 0 0 ? I< 15:23 0:00 [kworker/8:0H-kblockd] root 68 0.0 0.0 0 0 ? S 15:23 0:00 [cpuhp/9] root 69 0.0 0.0 0 0 ? S 15:23 0:00 [idle_inject/9] root 70 0.0 0.0 0 0 ? S 15:23 0:02 [migration/9] root 71 0.0 0.0 0 0 ? S 15:23 0:00 [ksoftirqd/9] root 72 0.0 0.0 0 0 ? I 15:23 0:00 [kworker/9:0-rcu_gp] root 73 0.0 0.0 0 0 ? I< 15:23 0:00 [kworker/9:0H-events_highpri] root 74 0.0 0.0 0 0 ? S 15:23 0:00 [cpuhp/10] root 75 0.0 0.0 0 0 ? S 15:23 0:00 [idle_inject/10] root 76 0.0 0.0 0 0 ? S 15:23 0:02 [migration/10] root 77 0.0 0.0 0 0 ? S 15:23 0:00 [ksoftirqd/10] root 79 0.0 0.0 0 0 ? I< 15:23 0:00 [kworker/10:0H-events_highpri] root 80 0.0 0.0 0 0 ? S 15:23 0:00 [cpuhp/11] root 81 0.0 0.0 0 0 ? S 15:23 0:00 [idle_inject/11] root 82 0.0 0.0 0 0 ? S 15:23 0:02 [migration/11] root 83 0.0 0.0 0 0 ? S 15:23 0:00 [ksoftirqd/11] root 84 0.0 0.0 0 0 ? I 15:23 0:00 [kworker/11:0-rcu_par_gp] root 85 0.0 0.0 0 0 ? I< 15:23 0:00 [kworker/11:0H-events_highpri] root 86 0.0 0.0 0 0 ? S 15:23 0:00 [cpuhp/12] root 87 0.0 0.0 0 0 ? S 15:23 0:00 [idle_inject/12] root 88 0.0 0.0 0 0 ? S 15:23 0:02 [migration/12] root 89 0.0 0.0 0 0 ? S 15:23 0:00 [ksoftirqd/12] root 90 0.0 0.0 0 0 ? I 15:23 0:00 [kworker/12:0-cgroup_destroy] root 91 0.0 0.0 0 0 ? I< 15:23 0:00 [kworker/12:0H-events_highpri] root 92 0.0 0.0 0 0 ? S 15:23 0:00 [cpuhp/13] root 93 0.0 0.0 0 0 ? S 15:23 0:00 [idle_inject/13] root 94 0.0 0.0 0 0 ? S 15:23 0:02 [migration/13] root 95 0.0 0.0 0 0 ? S 15:23 0:00 [ksoftirqd/13] root 96 0.0 0.0 0 0 ? I 15:23 0:00 [kworker/13:0-rcu_gp] root 97 0.0 0.0 0 0 ? I< 15:23 0:00 [kworker/13:0H-events_highpri] root 98 0.0 0.0 0 0 ? S 15:23 0:00 [cpuhp/14] root 99 0.0 0.0 0 0 ? S 15:23 0:00 [idle_inject/14] root 100 0.0 0.0 0 0 ? S 15:23 0:02 [migration/14] root 101 0.0 0.0 0 0 ? S 15:23 0:00 [ksoftirqd/14] root 103 0.0 0.0 0 0 ? I< 15:23 0:00 [kworker/14:0H-events_highpri] root 104 0.0 0.0 0 0 ? S 15:23 0:00 [cpuhp/15] root 105 0.0 0.0 0 0 ? S 15:23 0:00 [idle_inject/15] root 106 0.0 0.0 0 0 ? S 15:23 0:02 [migration/15] root 107 0.0 0.0 0 0 ? S 15:23 0:00 [ksoftirqd/15] root 109 0.0 0.0 0 0 ? I< 15:23 0:00 [kworker/15:0H-events_highpri] root 110 0.0 0.0 0 0 ? S 15:23 0:00 [cpuhp/16] root 111 0.0 0.0 0 0 ? S 15:23 0:00 [idle_inject/16] root 112 0.0 0.0 0 0 ? S 15:23 0:02 [migration/16] root 113 0.0 0.0 0 0 ? S 15:23 0:00 [ksoftirqd/16] root 114 0.0 0.0 0 0 ? I 15:23 0:00 [kworker/16:0-rcu_gp] root 115 0.0 0.0 0 0 ? I< 15:23 0:00 [kworker/16:0H-events_highpri] root 116 0.0 0.0 0 0 ? S 15:23 0:00 [cpuhp/17] root 117 0.0 0.0 0 0 ? S 15:23 0:00 [idle_inject/17] root 118 0.0 0.0 0 0 ? S 15:23 0:02 [migration/17] root 119 0.0 0.0 0 0 ? S 15:23 0:00 [ksoftirqd/17] root 121 0.0 0.0 0 0 ? I< 15:23 0:00 [kworker/17:0H-events_highpri] root 122 0.0 0.0 0 0 ? S 15:23 0:00 [cpuhp/18] root 123 0.0 0.0 0 0 ? S 15:23 0:00 [idle_inject/18] root 124 0.0 0.0 0 0 ? S 15:23 0:02 [migration/18] root 125 0.0 0.0 0 0 ? S 15:23 0:00 [ksoftirqd/18] root 127 0.0 0.0 0 0 ? I< 15:23 0:00 [kworker/18:0H-events_highpri] root 128 0.0 0.0 0 0 ? S 15:23 0:00 [cpuhp/19] root 129 0.0 0.0 0 0 ? S 15:23 0:00 [idle_inject/19] root 130 0.0 0.0 0 0 ? S 15:23 0:02 [migration/19] root 131 0.0 0.0 0 0 ? S 15:23 0:00 [ksoftirqd/19] root 133 0.0 0.0 0 0 ? I< 15:23 0:00 [kworker/19:0H-events_highpri] root 134 0.0 0.0 0 0 ? S 15:23 0:00 [cpuhp/20] root 135 0.0 0.0 0 0 ? S 15:23 0:00 [idle_inject/20] root 136 0.0 0.0 0 0 ? S 15:23 0:02 [migration/20] root 137 0.0 0.0 0 0 ? S 15:23 0:00 [ksoftirqd/20] root 138 0.0 0.0 0 0 ? I 15:23 0:00 [kworker/20:0-rcu_gp] root 139 0.0 0.0 0 0 ? I< 15:23 0:00 [kworker/20:0H-events_highpri] root 140 0.0 0.0 0 0 ? S 15:23 0:00 [cpuhp/21] root 141 0.0 0.0 0 0 ? S 15:23 0:00 [idle_inject/21] root 142 0.0 0.0 0 0 ? S 15:23 0:02 [migration/21] root 143 0.0 0.0 0 0 ? S 15:23 0:00 [ksoftirqd/21] root 145 0.0 0.0 0 0 ? I< 15:23 0:00 [kworker/21:0H-events_highpri] root 146 0.0 0.0 0 0 ? S 15:23 0:00 [cpuhp/22] root 147 0.0 0.0 0 0 ? S 15:23 0:00 [idle_inject/22] root 148 0.0 0.0 0 0 ? S 15:23 0:02 [migration/22] root 149 0.0 0.0 0 0 ? S 15:23 0:00 [ksoftirqd/22] root 151 0.0 0.0 0 0 ? I< 15:23 0:00 [kworker/22:0H-events_highpri] root 152 0.0 0.0 0 0 ? S 15:23 0:00 [cpuhp/23] root 153 0.0 0.0 0 0 ? S 15:23 0:00 [idle_inject/23] root 154 0.0 0.0 0 0 ? S 15:23 0:02 [migration/23] root 155 0.0 0.0 0 0 ? S 15:23 0:00 [ksoftirqd/23] root 157 0.0 0.0 0 0 ? I< 15:23 0:00 [kworker/23:0H-events_highpri] root 158 0.0 0.0 0 ... oidNFq::NTestSuiteTYdbControlPlaneStorageDescribeQuery::TTestCaseShouldCheckExist::Execute_(NUnitTest::TTestContext&)/tenant_acks" 2025-09-25T16:22:14.586636Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: schema.cpp:293: Create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageDescribeQuery::TTestCaseShouldCheckExist::Execute_(NUnitTest::TTestContext&)/nodes". Create session OK 2025-09-25T16:22:14.586639Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: schema.cpp:113: Call create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageDescribeQuery::TTestCaseShouldCheckExist::Execute_(NUnitTest::TTestContext&)/nodes" 2025-09-25T16:22:14.586640Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: schema.cpp:354: Call create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageDescribeQuery::TTestCaseShouldCheckExist::Execute_(NUnitTest::TTestContext&)/nodes" 2025-09-25T16:22:14.586819Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: schema.cpp:293: Create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageDescribeQuery::TTestCaseShouldCheckExist::Execute_(NUnitTest::TTestContext&)/compute_databases". Create session OK 2025-09-25T16:22:14.586826Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: schema.cpp:113: Call create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageDescribeQuery::TTestCaseShouldCheckExist::Execute_(NUnitTest::TTestContext&)/compute_databases" 2025-09-25T16:22:14.586827Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: schema.cpp:354: Call create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageDescribeQuery::TTestCaseShouldCheckExist::Execute_(NUnitTest::TTestContext&)/compute_databases" 2025-09-25T16:22:14.586866Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: schema.cpp:293: Create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageDescribeQuery::TTestCaseShouldCheckExist::Execute_(NUnitTest::TTestContext&)/pending_small". Create session OK 2025-09-25T16:22:14.586873Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: schema.cpp:113: Call create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageDescribeQuery::TTestCaseShouldCheckExist::Execute_(NUnitTest::TTestContext&)/pending_small" 2025-09-25T16:22:14.586875Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: schema.cpp:354: Call create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageDescribeQuery::TTestCaseShouldCheckExist::Execute_(NUnitTest::TTestContext&)/pending_small" 2025-09-25T16:22:14.587063Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: schema.cpp:293: Create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageDescribeQuery::TTestCaseShouldCheckExist::Execute_(NUnitTest::TTestContext&)/queries". Create session OK 2025-09-25T16:22:14.587071Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: schema.cpp:113: Call create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageDescribeQuery::TTestCaseShouldCheckExist::Execute_(NUnitTest::TTestContext&)/queries" 2025-09-25T16:22:14.587073Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: schema.cpp:354: Call create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageDescribeQuery::TTestCaseShouldCheckExist::Execute_(NUnitTest::TTestContext&)/queries" 2025-09-25T16:22:14.590087Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: schema.cpp:293: Create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageDescribeQuery::TTestCaseShouldCheckExist::Execute_(NUnitTest::TTestContext&)/jobs". Create session OK 2025-09-25T16:22:14.590097Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: schema.cpp:113: Call create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageDescribeQuery::TTestCaseShouldCheckExist::Execute_(NUnitTest::TTestContext&)/jobs" 2025-09-25T16:22:14.590099Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: schema.cpp:354: Call create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageDescribeQuery::TTestCaseShouldCheckExist::Execute_(NUnitTest::TTestContext&)/jobs" 2025-09-25T16:22:14.598687Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: schema.cpp:155: Successfully created directory "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageDescribeQuery::TTestCaseShouldCheckExist::Execute_(NUnitTest::TTestContext&)" 2025-09-25T16:22:14.598702Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: schema.cpp:122: Reply for create directory "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageDescribeQuery::TTestCaseShouldCheckExist::Execute_(NUnitTest::TTestContext&)": 2025-09-25T16:22:14.661910Z node 17 :YQ_RATE_LIMITER DEBUG: schema.cpp:155: Successfully created coordination node "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageDescribeQuery::TTestCaseShouldCheckExist::Execute_(NUnitTest::TTestContext&)_rate_limiter/alpha" 2025-09-25T16:22:14.661927Z node 17 :YQ_RATE_LIMITER DEBUG: schema.cpp:122: Reply for create coordination node "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageDescribeQuery::TTestCaseShouldCheckExist::Execute_(NUnitTest::TTestContext&)_rate_limiter/alpha": 2025-09-25T16:22:14.661936Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: schema.cpp:155: Successfully created table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageDescribeQuery::TTestCaseShouldCheckExist::Execute_(NUnitTest::TTestContext&)/result_sets" 2025-09-25T16:22:14.661941Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: schema.cpp:122: Reply for create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageDescribeQuery::TTestCaseShouldCheckExist::Execute_(NUnitTest::TTestContext&)/result_sets": 2025-09-25T16:22:14.680968Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: schema.cpp:155: Successfully created table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageDescribeQuery::TTestCaseShouldCheckExist::Execute_(NUnitTest::TTestContext&)/idempotency_keys" 2025-09-25T16:22:14.681004Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: schema.cpp:122: Reply for create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageDescribeQuery::TTestCaseShouldCheckExist::Execute_(NUnitTest::TTestContext&)/idempotency_keys": 2025-09-25T16:22:14.681007Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: schema.cpp:155: Successfully created table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageDescribeQuery::TTestCaseShouldCheckExist::Execute_(NUnitTest::TTestContext&)/nodes" 2025-09-25T16:22:14.681013Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: schema.cpp:122: Reply for create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageDescribeQuery::TTestCaseShouldCheckExist::Execute_(NUnitTest::TTestContext&)/nodes": 2025-09-25T16:22:14.681160Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: schema.cpp:155: Successfully created table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageDescribeQuery::TTestCaseShouldCheckExist::Execute_(NUnitTest::TTestContext&)/connections" 2025-09-25T16:22:14.681165Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: schema.cpp:155: Successfully created table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageDescribeQuery::TTestCaseShouldCheckExist::Execute_(NUnitTest::TTestContext&)/mappings" 2025-09-25T16:22:14.681169Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: schema.cpp:122: Reply for create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageDescribeQuery::TTestCaseShouldCheckExist::Execute_(NUnitTest::TTestContext&)/mappings": 2025-09-25T16:22:14.681178Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: schema.cpp:122: Reply for create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageDescribeQuery::TTestCaseShouldCheckExist::Execute_(NUnitTest::TTestContext&)/connections": 2025-09-25T16:22:14.681245Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: schema.cpp:155: Successfully created table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageDescribeQuery::TTestCaseShouldCheckExist::Execute_(NUnitTest::TTestContext&)/tenants" 2025-09-25T16:22:14.681253Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: schema.cpp:122: Reply for create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageDescribeQuery::TTestCaseShouldCheckExist::Execute_(NUnitTest::TTestContext&)/tenants": 2025-09-25T16:22:14.681298Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: schema.cpp:155: Successfully created table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageDescribeQuery::TTestCaseShouldCheckExist::Execute_(NUnitTest::TTestContext&)/quotas" 2025-09-25T16:22:14.681303Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: schema.cpp:155: Successfully created table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageDescribeQuery::TTestCaseShouldCheckExist::Execute_(NUnitTest::TTestContext&)/tenant_acks" 2025-09-25T16:22:14.681305Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: schema.cpp:122: Reply for create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageDescribeQuery::TTestCaseShouldCheckExist::Execute_(NUnitTest::TTestContext&)/tenant_acks": 2025-09-25T16:22:14.681308Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: schema.cpp:122: Reply for create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageDescribeQuery::TTestCaseShouldCheckExist::Execute_(NUnitTest::TTestContext&)/quotas": 2025-09-25T16:22:14.681344Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: schema.cpp:155: Successfully created table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageDescribeQuery::TTestCaseShouldCheckExist::Execute_(NUnitTest::TTestContext&)/compute_databases" 2025-09-25T16:22:14.681352Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: schema.cpp:122: Reply for create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageDescribeQuery::TTestCaseShouldCheckExist::Execute_(NUnitTest::TTestContext&)/compute_databases": 2025-09-25T16:22:14.681383Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: schema.cpp:155: Successfully created table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageDescribeQuery::TTestCaseShouldCheckExist::Execute_(NUnitTest::TTestContext&)/pending_small" 2025-09-25T16:22:14.681391Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: schema.cpp:122: Reply for create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageDescribeQuery::TTestCaseShouldCheckExist::Execute_(NUnitTest::TTestContext&)/pending_small": 2025-09-25T16:22:14.681399Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: schema.cpp:155: Successfully created table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageDescribeQuery::TTestCaseShouldCheckExist::Execute_(NUnitTest::TTestContext&)/bindings" 2025-09-25T16:22:14.681401Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: schema.cpp:122: Reply for create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageDescribeQuery::TTestCaseShouldCheckExist::Execute_(NUnitTest::TTestContext&)/bindings": 2025-09-25T16:22:14.681458Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: schema.cpp:155: Successfully created table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageDescribeQuery::TTestCaseShouldCheckExist::Execute_(NUnitTest::TTestContext&)/jobs" 2025-09-25T16:22:14.681466Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: schema.cpp:122: Reply for create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageDescribeQuery::TTestCaseShouldCheckExist::Execute_(NUnitTest::TTestContext&)/jobs": 2025-09-25T16:22:14.681568Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: schema.cpp:155: Successfully created table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageDescribeQuery::TTestCaseShouldCheckExist::Execute_(NUnitTest::TTestContext&)/queries" 2025-09-25T16:22:14.681578Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: schema.cpp:122: Reply for create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageDescribeQuery::TTestCaseShouldCheckExist::Execute_(NUnitTest::TTestContext&)/queries": 2025-09-25T16:22:15.314898Z node 17 :YQ_CONTROL_PLANE_STORAGE WARN: ydb_control_plane_storage_impl.h:775: DescribeQueryRequest - DescribeQueryResult: {query_id: "abra" } ERROR: {
: Error: (NYql::TCodeLineException) ydb/core/fq/libs/control_plane_storage/ydb_control_plane_storage_queries.cpp:669: Query does not exist or permission denied. Please check the id of the query or your access rights, code: 1000 } >> TInterconnectTest::TestNotifyUndelivered [GOOD] >> TKesusTest::TestGetQuoterResourceCounters [GOOD] >> TInterconnectTest::TestNotifyUndeliveredOnMissedActor >> TYdbControlPlaneStoragePipeline::ShouldRetryQuery [GOOD] >> TYdbControlPlaneStoragePipeline::ShouldCheckNotAutomaticTtl >> TYdbControlPlaneStorageWriteResultData::ShouldValidateRead [GOOD] >> TYdbControlPlaneStorageWriteResultData::ShouldSuccess |83.0%| [TA] $(B)/ydb/core/tx/schemeshard/ut_sequence/test-results/unittest/{meta.json ... results_accumulator.log} >> TInterconnectTest::TestNotifyUndeliveredOnMissedActor [GOOD] >> TInterconnectTest::TestPreSerializedBlobEventUpToMebibytes >> TYdbControlPlaneStorageListConnectionsPermissions::ShouldApplyPermissionEmpty [GOOD] >> TYdbControlPlaneStorageListConnectionsPermissions::ShouldApplyPermissionViewPublic >> PersQueueSdkReadSessionTest::ClosesAfterFailedConnectionToCds [GOOD] >> TYdbControlPlaneStorageDescribeBindingPermissions::ShouldApplyPermissionEmpty [GOOD] >> TYdbControlPlaneStorageDescribeBindingPermissions::ShouldApplyPermissionViewPrivate >> TInterconnectTest::TestPreSerializedBlobEventUpToMebibytes [GOOD] >> TInterconnectTest::TestPingPongThroughSubChannel >> TFlatTableExecutor_LongTxAndBlobs::SmallValues ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kesus/tablet/ut/unittest >> TKesusTest::TestGetQuoterResourceCounters [GOOD] Test command err: 2025-09-25T16:22:16.323394Z node 1 :KESUS_TABLET INFO: tablet_impl.cpp:71: OnActivateExecutor: 72057594037927937 2025-09-25T16:22:16.323435Z node 1 :KESUS_TABLET DEBUG: tx_init_schema.cpp:14: [72057594037927937] TTxInitSchema::Execute 2025-09-25T16:22:16.328270Z node 1 :KESUS_TABLET DEBUG: tx_init_schema.cpp:21: [72057594037927937] TTxInitSchema::Complete 2025-09-25T16:22:16.328311Z node 1 :KESUS_TABLET DEBUG: tx_init.cpp:30: [72057594037927937] TTxInit::Execute 2025-09-25T16:22:16.353206Z node 1 :KESUS_TABLET DEBUG: tx_init.cpp:242: [72057594037927937] TTxInit::Complete 2025-09-25T16:22:16.353342Z node 1 :KESUS_TABLET DEBUG: tx_config_set.cpp:28: [72057594037927937] TTxConfigSet::Execute (sender=[1:138:2162], cookie=6257768118992724772, path="/foo/bar/baz") 2025-09-25T16:22:16.364483Z node 1 :KESUS_TABLET DEBUG: tx_config_set.cpp:94: [72057594037927937] TTxConfigSet::Complete (sender=[1:138:2162], cookie=6257768118992724772, status=SUCCESS) 2025-09-25T16:22:16.364629Z node 1 :KESUS_TABLET DEBUG: tx_config_get.cpp:23: [72057594037927937] TTxConfigGet::Execute (sender=[1:147:2169], cookie=4626479990443251050) 2025-09-25T16:22:16.377669Z node 1 :KESUS_TABLET DEBUG: tx_config_get.cpp:44: [72057594037927937] TTxConfigGet::Complete (sender=[1:147:2169], cookie=4626479990443251050) 2025-09-25T16:22:16.377814Z node 1 :KESUS_TABLET DEBUG: tx_config_set.cpp:28: [72057594037927937] TTxConfigSet::Execute (sender=[1:152:2174], cookie=5671311642831490639, path="/foo/bar/baz") 2025-09-25T16:22:16.388790Z node 1 :KESUS_TABLET DEBUG: tx_config_set.cpp:94: [72057594037927937] TTxConfigSet::Complete (sender=[1:152:2174], cookie=5671311642831490639, status=SUCCESS) 2025-09-25T16:22:16.388944Z node 1 :KESUS_TABLET DEBUG: tx_config_get.cpp:23: [72057594037927937] TTxConfigGet::Execute (sender=[1:157:2179], cookie=16698485550921406009) 2025-09-25T16:22:16.399937Z node 1 :KESUS_TABLET DEBUG: tx_config_get.cpp:44: [72057594037927937] TTxConfigGet::Complete (sender=[1:157:2179], cookie=16698485550921406009) 2025-09-25T16:22:16.403372Z node 1 :KESUS_TABLET INFO: tablet_impl.cpp:71: OnActivateExecutor: 72057594037927937 2025-09-25T16:22:16.403411Z node 1 :KESUS_TABLET DEBUG: tx_init_schema.cpp:14: [72057594037927937] TTxInitSchema::Execute 2025-09-25T16:22:16.403477Z node 1 :KESUS_TABLET DEBUG: tx_init_schema.cpp:21: [72057594037927937] TTxInitSchema::Complete 2025-09-25T16:22:16.403632Z node 1 :KESUS_TABLET DEBUG: tx_init.cpp:30: [72057594037927937] TTxInit::Execute 2025-09-25T16:22:16.449292Z node 1 :KESUS_TABLET DEBUG: tx_init.cpp:242: [72057594037927937] TTxInit::Complete 2025-09-25T16:22:16.449444Z node 1 :KESUS_TABLET DEBUG: tx_config_get.cpp:23: [72057594037927937] TTxConfigGet::Execute (sender=[1:198:2211], cookie=14772956137108484575) 2025-09-25T16:22:16.460558Z node 1 :KESUS_TABLET DEBUG: tx_config_get.cpp:44: [72057594037927937] TTxConfigGet::Complete (sender=[1:198:2211], cookie=14772956137108484575) 2025-09-25T16:22:16.460743Z node 1 :KESUS_TABLET DEBUG: tx_config_set.cpp:28: [72057594037927937] TTxConfigSet::Execute (sender=[1:206:2218], cookie=13624886921224863847, path="/foo/bar/baz") 2025-09-25T16:22:16.471831Z node 1 :KESUS_TABLET DEBUG: tx_config_set.cpp:94: [72057594037927937] TTxConfigSet::Complete (sender=[1:206:2218], cookie=13624886921224863847, status=SUCCESS) 2025-09-25T16:22:16.471989Z node 1 :KESUS_TABLET DEBUG: tx_config_set.cpp:28: [72057594037927937] TTxConfigSet::Execute (sender=[1:211:2223], cookie=10794975048751523845, path="/foo/bar/baz") 2025-09-25T16:22:16.472005Z node 1 :KESUS_TABLET DEBUG: tx_config_set.cpp:94: [72057594037927937] TTxConfigSet::Complete (sender=[1:211:2223], cookie=10794975048751523845, status=PRECONDITION_FAILED) 2025-09-25T16:22:16.575839Z node 2 :KESUS_TABLET INFO: tablet_impl.cpp:71: OnActivateExecutor: 72057594037927937 2025-09-25T16:22:16.575881Z node 2 :KESUS_TABLET DEBUG: tx_init_schema.cpp:14: [72057594037927937] TTxInitSchema::Execute 2025-09-25T16:22:16.580393Z node 2 :KESUS_TABLET DEBUG: tx_init_schema.cpp:21: [72057594037927937] TTxInitSchema::Complete 2025-09-25T16:22:16.580518Z node 2 :KESUS_TABLET DEBUG: tx_init.cpp:30: [72057594037927937] TTxInit::Execute 2025-09-25T16:22:16.605735Z node 2 :KESUS_TABLET DEBUG: tx_init.cpp:242: [72057594037927937] TTxInit::Complete 2025-09-25T16:22:16.605882Z node 2 :KESUS_TABLET DEBUG: tx_semaphore_describe.cpp:29: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[2:138:2162], cookie=5877775790375907400, name="Lock1") 2025-09-25T16:22:16.605906Z node 2 :KESUS_TABLET DEBUG: tx_semaphore_describe.cpp:134: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[2:138:2162], cookie=5877775790375907400) 2025-09-25T16:22:17.078740Z node 3 :KESUS_TABLET INFO: tablet_impl.cpp:71: OnActivateExecutor: 72057594037927937 2025-09-25T16:22:17.078777Z node 3 :KESUS_TABLET DEBUG: tx_init_schema.cpp:14: [72057594037927937] TTxInitSchema::Execute 2025-09-25T16:22:17.082565Z node 3 :KESUS_TABLET DEBUG: tx_init_schema.cpp:21: [72057594037927937] TTxInitSchema::Complete 2025-09-25T16:22:17.082735Z node 3 :KESUS_TABLET DEBUG: tx_init.cpp:30: [72057594037927937] TTxInit::Execute 2025-09-25T16:22:17.115384Z node 3 :KESUS_TABLET DEBUG: tx_init.cpp:242: [72057594037927937] TTxInit::Complete 2025-09-25T16:22:17.115596Z node 3 :KESUS_TABLET DEBUG: tx_session_attach.cpp:35: [72057594037927937] TTxSessionAttach::Execute (sender=[3:137:2162], cookie=17569632372129557292, session=0, seqNo=0) 2025-09-25T16:22:17.115643Z node 3 :KESUS_TABLET DEBUG: tx_session_attach.cpp:118: [72057594037927937] Created new session 1 2025-09-25T16:22:17.126555Z node 3 :KESUS_TABLET DEBUG: tx_session_attach.cpp:140: [72057594037927937] TTxSessionAttach::Complete (sender=[3:137:2162], cookie=17569632372129557292, session=1) 2025-09-25T16:22:17.126675Z node 3 :KESUS_TABLET DEBUG: tx_semaphore_acquire.cpp:48: [72057594037927937] TTxSemaphoreAcquire::Execute (sender=[3:137:2162], cookie=111, session=1, semaphore="Lock1" count=18446744073709551615) 2025-09-25T16:22:17.126720Z node 3 :KESUS_TABLET DEBUG: tx_semaphore_acquire.cpp:126: [72057594037927937] Created new ephemeral semaphore 1 "Lock1" 2025-09-25T16:22:17.126732Z node 3 :KESUS_TABLET DEBUG: tablet_db.cpp:152: [72057594037927937] Processing semaphore 1 "Lock1" queue: next order #1 session 1 2025-09-25T16:22:17.138753Z node 3 :KESUS_TABLET DEBUG: tx_semaphore_acquire.cpp:263: [72057594037927937] TTxSemaphoreAcquire::Complete (sender=[3:137:2162], cookie=111) 2025-09-25T16:22:17.138961Z node 3 :KESUS_TABLET DEBUG: tx_semaphore_delete.cpp:28: [72057594037927937] TTxSemaphoreDelete::Execute (sender=[3:149:2171], cookie=7009661570864755896, name="Lock1", force=0) 2025-09-25T16:22:17.158326Z node 3 :KESUS_TABLET DEBUG: tx_semaphore_delete.cpp:95: [72057594037927937] TTxSemaphoreDelete::Complete (sender=[3:149:2171], cookie=7009661570864755896) 2025-09-25T16:22:17.158523Z node 3 :KESUS_TABLET DEBUG: tx_semaphore_delete.cpp:28: [72057594037927937] TTxSemaphoreDelete::Execute (sender=[3:154:2176], cookie=6708689305743058991, name="Sem1", force=0) 2025-09-25T16:22:17.171941Z node 3 :KESUS_TABLET DEBUG: tx_semaphore_delete.cpp:95: [72057594037927937] TTxSemaphoreDelete::Complete (sender=[3:154:2176], cookie=6708689305743058991) 2025-09-25T16:22:17.172115Z node 3 :KESUS_TABLET DEBUG: tx_semaphore_create.cpp:32: [72057594037927937] TTxSemaphoreCreate::Execute (sender=[3:159:2181], cookie=7677981767754991239, name="Sem1", limit=42) 2025-09-25T16:22:17.172175Z node 3 :KESUS_TABLET DEBUG: tx_semaphore_create.cpp:104: [72057594037927937] Created new semaphore 2 "Sem1" 2025-09-25T16:22:17.185783Z node 3 :KESUS_TABLET DEBUG: tx_semaphore_create.cpp:112: [72057594037927937] TTxSemaphoreCreate::Complete (sender=[3:159:2181], cookie=7677981767754991239) 2025-09-25T16:22:17.185929Z node 3 :KESUS_TABLET DEBUG: tx_semaphore_delete.cpp:28: [72057594037927937] TTxSemaphoreDelete::Execute (sender=[3:164:2186], cookie=7049819239533601291, name="Sem1", force=0) 2025-09-25T16:22:17.185963Z node 3 :KESUS_TABLET DEBUG: tablet_db.cpp:58: [72057594037927937] Deleting semaphore 2 "Sem1" 2025-09-25T16:22:17.196993Z node 3 :KESUS_TABLET DEBUG: tx_semaphore_delete.cpp:95: [72057594037927937] TTxSemaphoreDelete::Complete (sender=[3:164:2186], cookie=7049819239533601291) 2025-09-25T16:22:17.197165Z node 3 :KESUS_TABLET DEBUG: tx_semaphore_delete.cpp:28: [72057594037927937] TTxSemaphoreDelete::Execute (sender=[3:169:2191], cookie=9197674397853754270, name="Sem1", force=0) 2025-09-25T16:22:17.209199Z node 3 :KESUS_TABLET DEBUG: tx_semaphore_delete.cpp:95: [72057594037927937] TTxSemaphoreDelete::Complete (sender=[3:169:2191], cookie=9197674397853754270) 2025-09-25T16:22:17.564726Z node 4 :KESUS_TABLET INFO: tablet_impl.cpp:71: OnActivateExecutor: 72057594037927937 2025-09-25T16:22:17.564767Z node 4 :KESUS_TABLET DEBUG: tx_init_schema.cpp:14: [72057594037927937] TTxInitSchema::Execute 2025-09-25T16:22:17.570033Z node 4 :KESUS_TABLET DEBUG: tx_init_schema.cpp:21: [72057594037927937] TTxInitSchema::Complete 2025-09-25T16:22:17.570128Z node 4 :KESUS_TABLET DEBUG: tx_init.cpp:30: [72057594037927937] TTxInit::Execute 2025-09-25T16:22:17.602314Z node 4 :KESUS_TABLET DEBUG: tx_init.cpp:242: [72057594037927937] TTxInit::Complete 2025-09-25T16:22:17.602508Z node 4 :KESUS_TABLET DEBUG: tx_session_attach.cpp:35: [72057594037927937] TTxSessionAttach::Execute (sender=[4:137:2162], cookie=5027305404541215132, session=0, seqNo=0) 2025-09-25T16:22:17.602554Z node 4 :KESUS_TABLET DEBUG: tx_session_attach.cpp:118: [72057594037927937] Created new session 1 2025-09-25T16:22:17.613959Z node 4 :KESUS_TABLET DEBUG: tx_session_attach.cpp:140: [72057594037927937] TTxSessionAttach::Complete (sender=[4:137:2162], cookie=5027305404541215132, session=1) 2025-09-25T16:22:17.614083Z node 4 :KESUS_TABLET DEBUG: tx_session_attach.cpp:35: [72057594037927937] TTxSessionAttach::Execute (sender=[4:137:2162], cookie=15376420450816641658, session=0, seqNo=0) 2025-09-25T16:22:17.614136Z node 4 :KESUS_TABLET DEBUG: tx_session_attach.cpp:118: [72057594037927937] Created new session 2 2025-09-25T16:22:17.625356Z node 4 :KESUS_TABLET DEBUG: tx_session_attach.cpp:140: [72057594037927937] TTxSessionAttach::Complete (sender=[4:137:2162], cookie=15376420450816641658, session=2) 2025-09-25T16:22:17.625474Z node 4 :KESUS_TABLET DEBUG: tx_session_detach.cpp:100: [72057594037927937] Fast-path detach session=2 from sender=[4:137:2162], cookie=12748943064527630429 2025-09-25T16:22:17.625617Z node 4 :KESUS_TABLET DEBUG: tx_semaphore_create.cpp:32: [72057594037927937] TTxSemaphoreCreate::Execute (sender=[4:150:2172], cookie=14145334913478066880, name="Sem1", limit=3) 2025-09-25T16:22:17.625661Z node 4 :KESUS_TABLET DEBUG: tx_semaphore_create.cpp:104: [72057594037927937] Created new semaphore 1 "Sem1" 2025-09-25T16:22:17.636779Z node 4 :KESUS_TABLET DEBUG: tx_semaphore_create.cpp:112: [72057594037927937] TTxSemaphoreCreate::Complete (sender=[4:150:2172], cookie=14145334913478066880) 2025-09-25T16:22:17.636912Z node 4 :KESUS_TABLET DEBUG: tx_semaphore_describe.cpp:29: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[4:137:2162], cookie=112, name="Sem1") 2025-09-25T16:22:17.636939Z node 4 :KESUS_TABLET DEBUG: tx_semaphore_describe.cpp:134: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[4:137:2162], cookie=112) 2025-09-25T16:22:17.636984Z node 4 :KESUS_TABLET DEBUG: tx_semaphore_describe.cpp:29: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[4:137:2162], cookie=113, name="Sem1") 2025-09-25T16:22:17.636992Z node 4 :KESUS_TABLET DEBUG: tx_semaphore_describe.cpp:134: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[4:137:2162], cookie=113) 2025-09-25T16:22:17.637024Z node 4 :KESUS_TABLET DEBUG: tx_session_attach.cpp:35: [72057594037927937] TTxSessionAttach::Execute (sender=[4:137:2162], cookie=16860683329636435599, session=2, seqNo=0) 2025-09-25T16:22:17.647965Z node 4 :KESUS_TABLET DEBUG: tx_session_attach.cpp:140: [72057594037927937] TTx ... node 4 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2025-09-25T16:22:19.241939Z node 4 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2025-09-25T16:22:19.256582Z node 4 :KESUS_TABLET DEBUG: tx_semaphore_acquire.cpp:48: [72057594037927937] TTxSemaphoreAcquire::Execute (sender=[4:137:2162], cookie=129, session=1, semaphore="Sem2" count=2) 2025-09-25T16:22:19.273307Z node 4 :KESUS_TABLET DEBUG: tx_semaphore_acquire.cpp:263: [72057594037927937] TTxSemaphoreAcquire::Complete (sender=[4:137:2162], cookie=129) 2025-09-25T16:22:19.273443Z node 4 :KESUS_TABLET DEBUG: tx_semaphore_describe.cpp:29: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[4:137:2162], cookie=130, name="Sem2") 2025-09-25T16:22:19.273472Z node 4 :KESUS_TABLET DEBUG: tx_semaphore_describe.cpp:134: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[4:137:2162], cookie=130) 2025-09-25T16:22:19.273521Z node 4 :KESUS_TABLET DEBUG: tx_semaphore_acquire.cpp:48: [72057594037927937] TTxSemaphoreAcquire::Execute (sender=[4:137:2162], cookie=131, session=1, semaphore="Sem2" count=1) 2025-09-25T16:22:19.284542Z node 4 :KESUS_TABLET DEBUG: tx_semaphore_acquire.cpp:263: [72057594037927937] TTxSemaphoreAcquire::Complete (sender=[4:137:2162], cookie=131) 2025-09-25T16:22:19.284678Z node 4 :KESUS_TABLET DEBUG: tx_semaphore_describe.cpp:29: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[4:137:2162], cookie=132, name="Sem2") 2025-09-25T16:22:19.284707Z node 4 :KESUS_TABLET DEBUG: tx_semaphore_describe.cpp:134: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[4:137:2162], cookie=132) 2025-09-25T16:22:19.284747Z node 4 :KESUS_TABLET DEBUG: tx_semaphore_describe.cpp:29: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[4:137:2162], cookie=133, name="Sem2") 2025-09-25T16:22:19.284753Z node 4 :KESUS_TABLET DEBUG: tx_semaphore_describe.cpp:134: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[4:137:2162], cookie=133) 2025-09-25T16:22:19.543678Z node 5 :KESUS_TABLET INFO: tablet_impl.cpp:71: OnActivateExecutor: 72057594037927937 2025-09-25T16:22:19.543718Z node 5 :KESUS_TABLET DEBUG: tx_init_schema.cpp:14: [72057594037927937] TTxInitSchema::Execute 2025-09-25T16:22:19.557915Z node 5 :KESUS_TABLET DEBUG: tx_init_schema.cpp:21: [72057594037927937] TTxInitSchema::Complete 2025-09-25T16:22:19.557986Z node 5 :KESUS_TABLET DEBUG: tx_init.cpp:30: [72057594037927937] TTxInit::Execute 2025-09-25T16:22:19.593606Z node 5 :KESUS_TABLET DEBUG: tx_init.cpp:242: [72057594037927937] TTxInit::Complete 2025-09-25T16:22:19.594985Z node 5 :KESUS_TABLET DEBUG: tx_quoter_resource_add.cpp:36: [72057594037927937] TTxQuoterResourceAdd::Execute (sender=[5:137:2162], cookie=7033944387447030723, path="/Root1", config={ MaxUnitsPerSecond: 1000 }) 2025-09-25T16:22:19.595051Z node 5 :KESUS_TABLET DEBUG: tx_quoter_resource_add.cpp:76: [72057594037927937] Created new quoter resource 1 "Root1" 2025-09-25T16:22:19.609555Z node 5 :KESUS_TABLET DEBUG: tx_quoter_resource_add.cpp:85: [72057594037927937] TTxQuoterResourceAdd::Complete (sender=[5:137:2162], cookie=7033944387447030723) 2025-09-25T16:22:19.609722Z node 5 :KESUS_TABLET DEBUG: tx_quoter_resource_add.cpp:36: [72057594037927937] TTxQuoterResourceAdd::Execute (sender=[5:147:2169], cookie=16010713006176350767, path="/Root1/Res", config={ }) 2025-09-25T16:22:19.609783Z node 5 :KESUS_TABLET DEBUG: tx_quoter_resource_add.cpp:76: [72057594037927937] Created new quoter resource 2 "Root1/Res" 2025-09-25T16:22:19.621650Z node 5 :KESUS_TABLET DEBUG: tx_quoter_resource_add.cpp:85: [72057594037927937] TTxQuoterResourceAdd::Complete (sender=[5:147:2169], cookie=16010713006176350767) 2025-09-25T16:22:19.621838Z node 5 :KESUS_TABLET DEBUG: tx_quoter_resource_add.cpp:36: [72057594037927937] TTxQuoterResourceAdd::Execute (sender=[5:152:2174], cookie=5858828064638639350, path="/Root2", config={ MaxUnitsPerSecond: 1000 }) 2025-09-25T16:22:19.621891Z node 5 :KESUS_TABLET DEBUG: tx_quoter_resource_add.cpp:76: [72057594037927937] Created new quoter resource 3 "Root2" 2025-09-25T16:22:19.633660Z node 5 :KESUS_TABLET DEBUG: tx_quoter_resource_add.cpp:85: [72057594037927937] TTxQuoterResourceAdd::Complete (sender=[5:152:2174], cookie=5858828064638639350) 2025-09-25T16:22:19.633857Z node 5 :KESUS_TABLET DEBUG: tx_quoter_resource_add.cpp:36: [72057594037927937] TTxQuoterResourceAdd::Execute (sender=[5:157:2179], cookie=9236871120026996425, path="/Root2/Res", config={ }) 2025-09-25T16:22:19.633929Z node 5 :KESUS_TABLET DEBUG: tx_quoter_resource_add.cpp:76: [72057594037927937] Created new quoter resource 4 "Root2/Res" 2025-09-25T16:22:19.645617Z node 5 :KESUS_TABLET DEBUG: tx_quoter_resource_add.cpp:85: [72057594037927937] TTxQuoterResourceAdd::Complete (sender=[5:157:2179], cookie=9236871120026996425) 2025-09-25T16:22:19.645812Z node 5 :KESUS_TABLET DEBUG: tx_quoter_resource_add.cpp:36: [72057594037927937] TTxQuoterResourceAdd::Execute (sender=[5:162:2184], cookie=17738183986544762, path="/Root2/Res/Subres", config={ }) 2025-09-25T16:22:19.645885Z node 5 :KESUS_TABLET DEBUG: tx_quoter_resource_add.cpp:76: [72057594037927937] Created new quoter resource 5 "Root2/Res/Subres" 2025-09-25T16:22:19.657561Z node 5 :KESUS_TABLET DEBUG: tx_quoter_resource_add.cpp:85: [72057594037927937] TTxQuoterResourceAdd::Complete (sender=[5:162:2184], cookie=17738183986544762) 2025-09-25T16:22:19.657964Z node 5 :KESUS_TABLET TRACE: quoter_runtime.cpp:145: [72057594037927937] Send TEvSubscribeOnResourcesResult to [5:167:2189]. Cookie: 12791077370448146244. Data: { Results { ResourceId: 2 Error { Status: SUCCESS } EffectiveProps { ResourceId: 2 ResourcePath: "Root1/Res" HierarchicalDRRResourceConfig { MaxUnitsPerSecond: 1000 MaxBurstSizeCoefficient: 1 Weight: 1 } AccountingConfig { ReportPeriodMs: 5000 AccountPeriodMs: 1000 CollectPeriodSec: 30 ProvisionedCoefficient: 60 OvershootCoefficient: 1.1 Provisioned { BillingPeriodSec: 60 } OnDemand { BillingPeriodSec: 60 } Overshoot { BillingPeriodSec: 60 } } } } ProtocolVersion: 1 } 2025-09-25T16:22:19.657976Z node 5 :KESUS_TABLET DEBUG: quoter_runtime.cpp:150: [72057594037927937] Subscribe on quoter resources (sender=[5:167:2189], cookie=12791077370448146244) 2025-09-25T16:22:19.705072Z node 5 :KESUS_TABLET TRACE: quoter_runtime.cpp:93: [72057594037927937] Send TEvResourcesAllocated to [5:167:2189]. Cookie: 0. Data: { ResourcesInfo { ResourceId: 2 Amount: 100 StateNotification { Status: SUCCESS } } } 2025-09-25T16:22:19.767270Z node 5 :KESUS_TABLET TRACE: quoter_runtime.cpp:93: [72057594037927937] Send TEvResourcesAllocated to [5:167:2189]. Cookie: 0. Data: { ResourcesInfo { ResourceId: 2 Amount: 100 StateNotification { Status: SUCCESS } } } 2025-09-25T16:22:19.807208Z node 5 :KESUS_TABLET TRACE: quoter_runtime.cpp:93: [72057594037927937] Send TEvResourcesAllocated to [5:167:2189]. Cookie: 0. Data: { ResourcesInfo { ResourceId: 2 Amount: 100 StateNotification { Status: SUCCESS } } } 2025-09-25T16:22:19.807404Z node 5 :KESUS_TABLET TRACE: quoter_runtime.cpp:313: [72057594037927937] Send TEvGetQuoterResourceCountersResult to [5:175:2193]. Cookie: 4652249333841876500. Data: { ResourceCounters { ResourcePath: "Root2/Res" } ResourceCounters { ResourcePath: "Root2/Res/Subres" } ResourceCounters { ResourcePath: "Root2" } ResourceCounters { ResourcePath: "Root1/Res" Allocated: 300 } ResourceCounters { ResourcePath: "Root1" Allocated: 300 } } 2025-09-25T16:22:19.807547Z node 5 :KESUS_TABLET TRACE: quoter_runtime.cpp:145: [72057594037927937] Send TEvSubscribeOnResourcesResult to [5:178:2196]. Cookie: 12127641936795657795. Data: { Results { ResourceId: 5 Error { Status: SUCCESS } EffectiveProps { ResourceId: 5 ResourcePath: "Root2/Res/Subres" HierarchicalDRRResourceConfig { MaxUnitsPerSecond: 1000 MaxBurstSizeCoefficient: 1 Weight: 1 } AccountingConfig { ReportPeriodMs: 5000 AccountPeriodMs: 1000 CollectPeriodSec: 30 ProvisionedCoefficient: 60 OvershootCoefficient: 1.1 Provisioned { BillingPeriodSec: 60 } OnDemand { BillingPeriodSec: 60 } Overshoot { BillingPeriodSec: 60 } } } } ProtocolVersion: 1 } 2025-09-25T16:22:19.807556Z node 5 :KESUS_TABLET DEBUG: quoter_runtime.cpp:150: [72057594037927937] Subscribe on quoter resources (sender=[5:178:2196], cookie=12127641936795657795) 2025-09-25T16:22:19.861025Z node 5 :KESUS_TABLET TRACE: quoter_runtime.cpp:93: [72057594037927937] Send TEvResourcesAllocated to [5:178:2196]. Cookie: 0. Data: { ResourcesInfo { ResourceId: 5 Amount: 100 StateNotification { Status: SUCCESS } } } 2025-09-25T16:22:19.914230Z node 5 :KESUS_TABLET TRACE: quoter_runtime.cpp:93: [72057594037927937] Send TEvResourcesAllocated to [5:178:2196]. Cookie: 0. Data: { ResourcesInfo { ResourceId: 5 Amount: 100 StateNotification { Status: SUCCESS } } } 2025-09-25T16:22:19.914432Z node 5 :KESUS_TABLET TRACE: quoter_runtime.cpp:313: [72057594037927937] Send TEvGetQuoterResourceCountersResult to [5:184:2200]. Cookie: 15576823289060997627. Data: { ResourceCounters { ResourcePath: "Root2/Res" Allocated: 200 } ResourceCounters { ResourcePath: "Root2/Res/Subres" Allocated: 200 } ResourceCounters { ResourcePath: "Root2" Allocated: 200 } ResourceCounters { ResourcePath: "Root1/Res" Allocated: 300 } ResourceCounters { ResourcePath: "Root1" Allocated: 300 } } 2025-09-25T16:22:19.914578Z node 5 :KESUS_TABLET TRACE: quoter_runtime.cpp:145: [72057594037927937] Send TEvSubscribeOnResourcesResult to [5:167:2189]. Cookie: 1945573544098033207. Data: { Results { ResourceId: 2 Error { Status: SUCCESS } EffectiveProps { ResourceId: 2 ResourcePath: "Root1/Res" HierarchicalDRRResourceConfig { MaxUnitsPerSecond: 1000 MaxBurstSizeCoefficient: 1 Weight: 1 } AccountingConfig { ReportPeriodMs: 5000 AccountPeriodMs: 1000 CollectPeriodSec: 30 ProvisionedCoefficient: 60 OvershootCoefficient: 1.1 Provisioned { BillingPeriodSec: 60 } OnDemand { BillingPeriodSec: 60 } Overshoot { BillingPeriodSec: 60 } } } } ProtocolVersion: 1 } 2025-09-25T16:22:19.914587Z node 5 :KESUS_TABLET DEBUG: quoter_runtime.cpp:150: [72057594037927937] Subscribe on quoter resources (sender=[5:167:2189], cookie=1945573544098033207) 2025-09-25T16:22:19.914685Z node 5 :KESUS_TABLET TRACE: quoter_runtime.cpp:145: [72057594037927937] Send TEvSubscribeOnResourcesResult to [5:178:2196]. Cookie: 14375787306789561982. Data: { Results { ResourceId: 5 Error { Status: SUCCESS } EffectiveProps { ResourceId: 5 ResourcePath: "Root2/Res/Subres" HierarchicalDRRResourceConfig { MaxUnitsPerSecond: 1000 MaxBurstSizeCoefficient: 1 Weight: 1 } AccountingConfig { ReportPeriodMs: 5000 AccountPeriodMs: 1000 CollectPeriodSec: 30 ProvisionedCoefficient: 60 OvershootCoefficient: 1.1 Provisioned { BillingPeriodSec: 60 } OnDemand { BillingPeriodSec: 60 } Overshoot { BillingPeriodSec: 60 } } } } ProtocolVersion: 1 } 2025-09-25T16:22:19.914692Z node 5 :KESUS_TABLET DEBUG: quoter_runtime.cpp:150: [72057594037927937] Subscribe on quoter resources (sender=[5:178:2196], cookie=14375787306789561982) 2025-09-25T16:22:19.953881Z node 5 :KESUS_TABLET TRACE: quoter_runtime.cpp:93: [72057594037927937] Send TEvResourcesAllocated to [5:167:2189]. Cookie: 0. Data: { ResourcesInfo { ResourceId: 2 Amount: 20 StateNotification { Status: SUCCESS } } } 2025-09-25T16:22:19.953929Z node 5 :KESUS_TABLET TRACE: quoter_runtime.cpp:93: [72057594037927937] Send TEvResourcesAllocated to [5:178:2196]. Cookie: 0. Data: { ResourcesInfo { ResourceId: 5 Amount: 50 StateNotification { Status: SUCCESS } } } 2025-09-25T16:22:19.954106Z node 5 :KESUS_TABLET TRACE: quoter_runtime.cpp:313: [72057594037927937] Send TEvGetQuoterResourceCountersResult to [5:191:2207]. Cookie: 15196299739153080682. Data: { ResourceCounters { ResourcePath: "Root2/Res" Allocated: 250 } ResourceCounters { ResourcePath: "Root2/Res/Subres" Allocated: 250 } ResourceCounters { ResourcePath: "Root2" Allocated: 250 } ResourceCounters { ResourcePath: "Root1/Res" Allocated: 320 } ResourceCounters { ResourcePath: "Root1" Allocated: 320 } } >> TYdbControlPlaneStorageModifyConnection::ShouldCheckMultipleDotsName [GOOD] >> TYdbControlPlaneStorageModifyConnection::ShouldCheckAllowedSymbolsName >> TFlatTableExecutor_LongTxAndBlobs::SmallValues [GOOD] >> TFlatTableExecutor_LowPriorityTxs::TestEnqueueCancel [GOOD] >> TFlatTableExecutor_LowPriorityTxs::TestLowPriority [GOOD] >> TFlatTableExecutor_LowPriorityTxs::TestLowPriorityCancel [GOOD] >> TFlatTableExecutor_LowPriorityTxs::TestLowPriorityAllocatingCancel [GOOD] >> TFlatTableExecutor_MoveTableData::TestMoveSnapshot [GOOD] >> TFlatTableExecutor_MoveTableData::TestMoveSnapshotFollower >> TYdbControlPlaneStoragePipeline::ShouldCheckAst [GOOD] >> TYdbControlPlaneStoragePipeline::ShouldCheckAstClear >> TFlatTableExecutor_MoveTableData::TestMoveSnapshotFollower [GOOD] >> TFlatTableExecutor_PostponedScan::TestPostponedScan [GOOD] >> TFlatTableExecutor_PostponedScan::TestCancelFinishedScan >> TImportWithRebootsTests::ShouldSucceedOnViewsAndTables [GOOD] >> TRestoreTests::CancelAlmostCompleteOperationShouldNotHaveEffect[Raw] >> TFlatTableExecutor_PostponedScan::TestCancelFinishedScan [GOOD] >> TFlatTableExecutor_PostponedScan::TestCancelRunningPostponedScan [GOOD] >> TFlatTableExecutor_PostponedScan::TestPostponedScanSnapshotMVCC [GOOD] >> TFlatTableExecutor_Reboot::TestSchemeGcAfterReassign >> TInterconnectTest::TestPingPongThroughSubChannel [GOOD] >> TFlatTableExecutor_Reboot::TestSchemeGcAfterReassign [GOOD] >> TFlatTableExecutor_RejectProbability::MaxedOutRejectProbability ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/columnshard/ut_rw/unittest >> TColumnShardTestReadWrite::CompactionSplitGranuleStrKey_PKString [GOOD] Test command err: 2025-09-25T16:20:47.728887Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:129:2160];fline=columnshard.cpp:105;event=initialize_shard;step=OnActivateExecutor; 2025-09-25T16:20:47.733247Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:129:2160];fline=columnshard.cpp:124;event=initialize_shard;step=initialize_tiring_finished; 2025-09-25T16:20:47.733300Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxInitSchema.Execute at tablet 9437184 2025-09-25T16:20:47.734037Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:129:2160];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-09-25T16:20:47.734081Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:129:2160];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-09-25T16:20:47.734111Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:129:2160];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-09-25T16:20:47.734125Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:129:2160];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-09-25T16:20:47.734138Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:129:2160];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-09-25T16:20:47.734156Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:129:2160];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-09-25T16:20:47.734169Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:129:2160];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-09-25T16:20:47.734183Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:129:2160];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-09-25T16:20:47.734196Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:129:2160];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-09-25T16:20:47.734208Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:129:2160];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanDeprecatedSnapshot; 2025-09-25T16:20:47.734222Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:129:2160];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV0ChunksMeta; 2025-09-25T16:20:47.734235Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:129:2160];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CopyBlobIdsToV2; 2025-09-25T16:20:47.734268Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:129:2160];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreAppearanceSnapshot; 2025-09-25T16:20:47.739272Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxInitSchema.Complete at tablet 9437184 2025-09-25T16:20:47.739321Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:131;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=12;current_normalizer=CLASS_NAME=Granules; 2025-09-25T16:20:47.739329Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=1;type=Granules; 2025-09-25T16:20:47.739360Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-09-25T16:20:47.739394Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-09-25T16:20:47.739405Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-09-25T16:20:47.739409Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2025-09-25T16:20:47.739416Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:138;normalizer=TChunksNormalizer;message=0 chunks found; 2025-09-25T16:20:47.739423Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-09-25T16:20:47.739428Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-09-25T16:20:47.739432Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2025-09-25T16:20:47.739444Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-09-25T16:20:47.739450Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-09-25T16:20:47.739455Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-09-25T16:20:47.739459Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2025-09-25T16:20:47.739467Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2025-09-25T16:20:47.739472Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-09-25T16:20:47.739477Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-09-25T16:20:47.739480Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2025-09-25T16:20:47.739487Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-09-25T16:20:47.739493Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-09-25T16:20:47.739496Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2025-09-25T16:20:47.739503Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-09-25T16:20:47.739509Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-09-25T16:20:47.739512Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=15;type=RestoreV1Chunks_V2; 2025-09-25T16:20:47.739529Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-09-25T16:20:47.739535Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-09-25T16:20:47.739538Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=16;type=RestoreV2Chunks; 2025-09-25T16:20:47.739548Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2025-09-25T16:20:47.739554Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanDeprecatedSnapshot;id=CleanDeprecatedSnapshot; 2025-09-25T16:20:47.739558Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=17;type=CleanDeprecatedSnapshot; 2025-09-25T16:20:47.739563Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanDeprecatedSnapshot;id=17; 2025-09-25T16:20:47.739569Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV0ChunksMeta;id=RestoreV0ChunksMeta; 2025-09-25T16:20:47.739572Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=18;type=RestoreV0ChunksMeta; 2025-09-25T16:20:47.739578Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV0ChunksMeta;id=18; 2025-09-25T16:20:47.739584Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CopyBlobIdsToV2;id=CopyBlobIdsToV2; 2025-09-25T16:20:47.739587Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=19;type=CopyBlobIdsToV2; 2025-09-25T16:20:47.739597Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CopyBlobIdsToV2;id=19; 2025-09-25T16:20:47.739603Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLAS ... ge_name=EXECUTE:granule/portions;fline=constructor_portion.cpp:40;memory_size=278;data_size=269;sum=46648;count=168;size_of_portion=184; 2025-09-25T16:22:19.213621Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;load_stage_name=EXECUTE:column_engines/granules;load_stage_name=EXECUTE:granules/granule;fline=common_data.cpp:29;EXECUTE:portionsLoadingTime=1451; 2025-09-25T16:22:19.213629Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;load_stage_name=EXECUTE:column_engines/granules;load_stage_name=EXECUTE:granules/granule;fline=common_data.cpp:29;PRECHARGE:granule_finished_commonLoadingTime=1; 2025-09-25T16:22:19.213707Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;load_stage_name=EXECUTE:column_engines/granules;load_stage_name=EXECUTE:granules/granule;fline=common_data.cpp:29;EXECUTE:granule_finished_commonLoadingTime=72; 2025-09-25T16:22:19.213713Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;load_stage_name=EXECUTE:column_engines/granules;fline=common_data.cpp:29;EXECUTE:granuleLoadingTime=1564; 2025-09-25T16:22:19.213720Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;fline=common_data.cpp:29;EXECUTE:granulesLoadingTime=1580; 2025-09-25T16:22:19.213727Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;fline=common_data.cpp:29;PRECHARGE:finishLoadingTime=2; 2025-09-25T16:22:19.213751Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;fline=common_data.cpp:29;EXECUTE:finishLoadingTime=17; 2025-09-25T16:22:19.213757Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:column_enginesLoadingTime=1702; 2025-09-25T16:22:19.213779Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:tx_controllerLoadingTime=15; 2025-09-25T16:22:19.213798Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:tx_controllerLoadingTime=14; 2025-09-25T16:22:19.213827Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:operations_managerLoadingTime=23; 2025-09-25T16:22:19.213848Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:operations_managerLoadingTime=15; 2025-09-25T16:22:19.214199Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:storages_managerLoadingTime=344; 2025-09-25T16:22:19.214580Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:storages_managerLoadingTime=374; 2025-09-25T16:22:19.214589Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:db_locksLoadingTime=1; 2025-09-25T16:22:19.214596Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:db_locksLoadingTime=1; 2025-09-25T16:22:19.214604Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:bg_sessionsLoadingTime=1; 2025-09-25T16:22:19.214618Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:bg_sessionsLoadingTime=8; 2025-09-25T16:22:19.214625Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:sharing_sessionsLoadingTime=1; 2025-09-25T16:22:19.214640Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:sharing_sessionsLoadingTime=8; 2025-09-25T16:22:19.214647Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:in_flight_readsLoadingTime=1; 2025-09-25T16:22:19.214660Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:in_flight_readsLoadingTime=7; 2025-09-25T16:22:19.214676Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:tiers_managerLoadingTime=7; 2025-09-25T16:22:19.214691Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:tiers_managerLoadingTime=9; 2025-09-25T16:22:19.214697Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;fline=common_data.cpp:29;EXECUTE:composite_initLoadingTime=6622; 2025-09-25T16:22:19.214729Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: Index: tables 1 inserted {blob_bytes=5508872;raw_bytes=7830634;count=1;records=85000} compacted {blob_bytes=0;raw_bytes=0;count=0;records=0} s-compacted {blob_bytes=114929968;raw_bytes=184829450;count=5;records=1780000} inactive {blob_bytes=449763544;raw_bytes=695770898;count=36;records=6977500} evicted {blob_bytes=0;raw_bytes=0;count=0;records=0} at tablet 9437184 2025-09-25T16:22:19.214757Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:2409:4375];process=SwitchToWork;fline=columnshard.cpp:77;event=initialize_shard;step=SwitchToWork; 2025-09-25T16:22:19.214766Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:2409:4375];process=SwitchToWork;fline=columnshard.cpp:80;event=initialize_shard;step=SignalTabletActive; 2025-09-25T16:22:19.214781Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:2409:4375];process=SwitchToWork;fline=columnshard_impl.cpp:1528;event=OnTieringModified;path_id=NO_VALUE_OPTIONAL; 2025-09-25T16:22:19.214788Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:2409:4375];process=SwitchToWork;fline=column_engine_logs.cpp:516;event=OnTieringModified;new_count_tierings=0; 2025-09-25T16:22:19.214814Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;fline=columnshard_impl.cpp:449;event=EnqueueBackgroundActivities;periodic=0; 2025-09-25T16:22:19.214835Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;fline=columnshard_impl.cpp:943;background=cleanup_schemas;skip_reason=no_changes; 2025-09-25T16:22:19.214844Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;fline=column_engine_logs.cpp:258;event=StartCleanup;portions_count=18; 2025-09-25T16:22:19.214859Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;fline=column_engine_logs.cpp:271;event=StartCleanupStop;snapshot=plan_step=1758815450423;tx_id=18446744073709551615;;current_snapshot_ts=1758817248778; 2025-09-25T16:22:19.214867Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;fline=column_engine_logs.cpp:334;event=StartCleanup;portions_count=18;portions_prepared=0;drop=0;skip=0;portions_counter=0;chunks=0;limit=0;max_portions=1000;max_chunks=500000; 2025-09-25T16:22:19.214880Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;fline=columnshard_impl.cpp:800;background=cleanup;skip_reason=no_changes; 2025-09-25T16:22:19.214886Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;fline=columnshard_impl.cpp:832;background=cleanup;skip_reason=no_changes; 2025-09-25T16:22:19.214910Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;fline=columnshard_impl.cpp:755;background=ttl;skip_reason=no_changes; 2025-09-25T16:22:19.215647Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:2409:4375];ev=NActors::TEvents::TEvWakeup;fline=columnshard.cpp:260;event=TEvPrivate::TEvPeriodicWakeup::MANUAL;tablet_id=9437184; 2025-09-25T16:22:19.216195Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:2409:4375];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;fline=columnshard.cpp:249;event=TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184; 2025-09-25T16:22:19.216208Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: Send periodic stats. 2025-09-25T16:22:19.216215Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: Disabled periodic stats at tablet 9437184 2025-09-25T16:22:19.216221Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:2409:4375];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:449;event=EnqueueBackgroundActivities;periodic=0; 2025-09-25T16:22:19.216245Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:2409:4375];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:943;background=cleanup_schemas;skip_reason=no_changes; 2025-09-25T16:22:19.216265Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:2409:4375];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:258;event=StartCleanup;portions_count=18; 2025-09-25T16:22:19.216278Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:2409:4375];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:271;event=StartCleanupStop;snapshot=plan_step=1758815450423;tx_id=18446744073709551615;;current_snapshot_ts=1758817248778; 2025-09-25T16:22:19.216287Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:2409:4375];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:334;event=StartCleanup;portions_count=18;portions_prepared=0;drop=0;skip=0;portions_counter=0;chunks=0;limit=0;max_portions=1000;max_chunks=500000; 2025-09-25T16:22:19.216296Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:2409:4375];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:800;background=cleanup;skip_reason=no_changes; 2025-09-25T16:22:19.216302Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:2409:4375];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:832;background=cleanup;skip_reason=no_changes; 2025-09-25T16:22:19.216319Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:2409:4375];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;queue=ttl;external_count=0;fline=granule.cpp:168;event=skip_actualization;waiting=0.999000s; 2025-09-25T16:22:19.216328Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:2409:4375];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:755;background=ttl;skip_reason=no_changes; >> CompressExecutor::TestReorderedExecutor [GOOD] >> CompressExecutor::TestExecutorMemUsage >> TYdbControlPlaneStorageCreateConnection::ShouldCheckAllowedSymbolsName [GOOD] >> TYdbControlPlaneStorageCreateConnection::ShouldCheckCommitTransactionWrite >> TRestoreTests::CancelAlmostCompleteOperationShouldNotHaveEffect[Raw] [GOOD] >> TRestoreTests::CancelAlmostCompleteOperationShouldNotHaveEffect[Zstd] >> TYdbControlPlaneStorageWriteResultData::ShouldSuccess [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/public/sdk/cpp/src/client/persqueue_public/ut/unittest >> PersQueueSdkReadSessionTest::ClosesAfterFailedConnectionToCds [GOOD] Test command err: 2025-09-25T16:21:12.072906Z :WriteRAW INFO: Random seed for debugging is 1758817272072897 2025-09-25T16:21:12.513440Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7554062663852912286:2261];send_to=[0:7307199536658146131:7762515]; 2025-09-25T16:21:12.513492Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-09-25T16:21:12.517720Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:123: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2025-09-25T16:21:12.517863Z node 1 :PQ_READ_PROXY DEBUG: caching_service.cpp:44: Direct read cache: : Created test_client.cpp: SetPath # /home/runner/.ya/build/build_root/endf/002911/r3tmp/tmpK3kKJL/pdisk_1.dat 2025-09-25T16:21:12.521536Z node 2 :PQ_READ_PROXY DEBUG: caching_service.cpp:44: Direct read cache: : Created 2025-09-25T16:21:12.562114Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-09-25T16:21:12.562183Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-09-25T16:21:12.610950Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded 2025-09-25T16:21:12.612938Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-09-25T16:21:12.620993Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-09-25T16:21:12.621035Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-09-25T16:21:12.621914Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-09-25T16:21:12.621936Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-09-25T16:21:12.627621Z node 1 :HIVE WARN: hive_impl.cpp:811: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2025-09-25T16:21:12.627693Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-09-25T16:21:12.628065Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 7894, node 1 2025-09-25T16:21:12.681063Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: /home/runner/.ya/build/build_root/endf/002911/r3tmp/yandexcV2pn2.tmp 2025-09-25T16:21:12.681079Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: /home/runner/.ya/build/build_root/endf/002911/r3tmp/yandexcV2pn2.tmp 2025-09-25T16:21:12.681165Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:202: successfully initialized from file: /home/runner/.ya/build/build_root/endf/002911/r3tmp/yandexcV2pn2.tmp 2025-09-25T16:21:12.681210Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-09-25T16:21:12.682394Z INFO: TTestServer started on Port 18685 GrpcPort 7894 TClient is connected to server localhost:18685 PQClient connected to localhost:7894 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-09-25T16:21:12.723150Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-09-25T16:21:12.729219Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-09-25T16:21:12.773483Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-09-25T16:21:12.805335Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions waiting... waiting... 2025-09-25T16:21:12.901501Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 281474976710659, at schemeshard: 72057594046644480 waiting... 2025-09-25T16:21:13.027692Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7554062670278526955:2293], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:21:13.027726Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:21:13.028017Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7554062670278526983:2297], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:21:13.028028Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7554062670278526984:2298], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:21:13.028048Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-09-25T16:21:13.029652Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715657:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-09-25T16:21:13.035712Z node 2 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7554062670278526987:2299], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715657 completed, doublechecking } 2025-09-25T16:21:13.131935Z node 2 :TX_PROXY ERROR: schemereq.cpp:590: Actor# [2:7554062670278527015:2138] txid# 281474976715658, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 8], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-09-25T16:21:13.135622Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:684) 2025-09-25T16:21:13.141201Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:568: Compilation failed, self: [1:7554062668147880363:2328], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2025-09-25T16:21:13.141807Z node 1 :KQP_SESSION WARN: kqp_session_actor.cpp:2395: SessionId: ydb://session/3?node_id=1&id=NjhlMmRkNDAtMmI5ODM1OTQtNjY4MDFiNWYtMjU5ZGVjZTk=, ActorId: [1:7554062668147880321:2320], ActorState: ExecuteState, TraceId: 01k60tvh7hagadb2zkwp2t42hq, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2025-09-25T16:21:13.141264Z node 2 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:568: Compilation failed, self: [2:7554062670278527022:2303], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2025-09-25T16:21:13.141810Z node 2 :KQP_SESSION WARN: kqp_session_actor.cpp:2395: SessionId: ydb://session/3?node_id=2&id=Mjg0NzVkMC0xNTFjY2Y5Mi0yN2QxYWEyMi0zZTAwMWE5Yg==, ActorId: [2:7554062670278526953:2292], ActorState: ExecuteState, TraceId: 01k60tvh6222hm938vfqafd87v, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2025-09-25T16:21:13.143452Z node 1 :PERSQUEUE_CLUSTER_TRACKER ERROR: cluster_tracker.cpp:167: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2025-09-25T16:21:13.143466Z node 2 :PERSQUEUE_CLUSTER_TRACKER ERROR: cluster_tracker.cpp:167: failed to list c ... Status=SUCCESS 2025-09-25T16:22:19.875969Z node 15 :PQ_PARTITION_CHOOSER DEBUG: partition_chooser_impl__abstract_chooser_actor.h:305: TPartitionChooser [15:7554062952601607171:2471] (SourceId=src, PreferedPartition=(NULL)) ReplyResult: Partition=0, SeqNo=(NULL) 2025-09-25T16:22:19.875973Z node 15 :PQ_PARTITION_CHOOSER DEBUG: partition_chooser_impl__abstract_chooser_actor.h:268: TPartitionChooser [15:7554062952601607171:2471] (SourceId=src, PreferedPartition=(NULL)) Start idle 2025-09-25T16:22:19.875981Z node 15 :PQ_WRITE_PROXY DEBUG: write_session_actor.cpp:694: ProceedPartition. session cookie: 1 sessionId: partition: 0 expectedGeneration: (NULL) 2025-09-25T16:22:19.876399Z node 15 :PQ_WRITE_PROXY DEBUG: writer.cpp:822: TPartitionWriter 72075186224037892 (partition=0) TEvClientConnected Status OK, TabletId: 72075186224037892, NodeId 16, Generation: 1 2025-09-25T16:22:19.876343Z node 16 :PERSQUEUE DEBUG: pq_impl.cpp:2718: [PQ: 72075186224037892] server connected, pipe [15:7554062952601607230:2471], now have 1 active actors on pipe 2025-09-25T16:22:19.876582Z node 16 :PERSQUEUE DEBUG: pq_impl.cpp:146: Handle TEvRequest topic: 'rt3.dc1--test-topic' requestId: 2025-09-25T16:22:19.876597Z node 16 :PERSQUEUE DEBUG: pq_impl.cpp:2632: [PQ: 72075186224037892] got client message batch for topic 'rt3.dc1--test-topic' partition 0 2025-09-25T16:22:19.876638Z node 16 :PERSQUEUE INFO: ownerinfo.cpp:30: new Cookie src|52420297-78b4a0c0-fc49478b-a76b6b4d_0 generated for partition 0 topic 'rt3.dc1--test-topic' owner src 2025-09-25T16:22:19.876647Z node 16 :PERSQUEUE DEBUG: partition.cpp:2261: [72075186224037892][Partition][0][StateIdle] Batching state before ContinueProcessTxsAndUserActs: 0 2025-09-25T16:22:19.876651Z node 16 :PERSQUEUE DEBUG: partition.cpp:2270: [72075186224037892][Partition][0][StateIdle] Batching state after ContinueProcessTxsAndUserActs: 1 2025-09-25T16:22:19.876653Z node 16 :PERSQUEUE DEBUG: partition.cpp:2274: [72075186224037892][Partition][0][StateIdle] Batch completed (1) 2025-09-25T16:22:19.876664Z node 16 :PERSQUEUE DEBUG: partition.cpp:2293: [72075186224037892][Partition][0][StateIdle] Try persist 2025-09-25T16:22:19.876678Z node 16 :PERSQUEUE DEBUG: partition_write.cpp:35: [72075186224037892][Partition][0][StateIdle] TPartition::ReplyOwnerOk. Partition: 0 2025-09-25T16:22:19.876690Z node 16 :PERSQUEUE DEBUG: pq_impl.cpp:181: Answer ok topic: 'rt3.dc1--test-topic' partition: 0 messageNo: 0 requestId: cookie: 0 2025-09-25T16:22:19.876894Z node 16 :PERSQUEUE DEBUG: pq_impl.cpp:146: Handle TEvRequest topic: 'rt3.dc1--test-topic' requestId: 2025-09-25T16:22:19.876902Z node 16 :PERSQUEUE DEBUG: pq_impl.cpp:2632: [PQ: 72075186224037892] got client message batch for topic 'rt3.dc1--test-topic' partition 0 2025-09-25T16:22:19.876920Z node 16 :PERSQUEUE DEBUG: pq_impl.cpp:181: Answer ok topic: 'rt3.dc1--test-topic' partition: 0 messageNo: 0 requestId: cookie: 0 2025-09-25T16:22:19.877100Z node 15 :PQ_WRITE_PROXY INFO: write_session_actor.cpp:871: session inited cookie: 1 partition: 0 MaxSeqNo: 0 sessionId: src|52420297-78b4a0c0-fc49478b-a76b6b4d_0 2025-09-25T16:22:19.877484Z :INFO: [] MessageGroupId [src] SessionId [] Counters: { Errors: 0 CurrentSessionLifetimeMs: 1758817339877 BytesWritten: 0 MessagesWritten: 0 BytesWrittenCompressed: 0 BytesInflightUncompressed: 0 BytesInflightCompressed: 0 BytesInflightTotal: 0 MessagesInflight: 0 } 2025-09-25T16:22:19.877531Z :INFO: [] MessageGroupId [src] SessionId [] Write session established. Init response: session_id: "src|52420297-78b4a0c0-fc49478b-a76b6b4d_0" topic: "test-topic" cluster: "dc1" supported_codecs: CODEC_RAW supported_codecs: CODEC_GZIP supported_codecs: CODEC_LZOP 2025-09-25T16:22:19.877626Z :INFO: [] MessageGroupId [src] SessionId [src|52420297-78b4a0c0-fc49478b-a76b6b4d_0] Write session: close. Timeout = 0 ms 2025-09-25T16:22:19.877631Z :INFO: [] MessageGroupId [src] SessionId [src|52420297-78b4a0c0-fc49478b-a76b6b4d_0] Write session will now close 2025-09-25T16:22:19.877636Z :DEBUG: [] MessageGroupId [src] SessionId [src|52420297-78b4a0c0-fc49478b-a76b6b4d_0] Write session: aborting 2025-09-25T16:22:19.877723Z :INFO: [] MessageGroupId [src] SessionId [src|52420297-78b4a0c0-fc49478b-a76b6b4d_0] Write session: gracefully shut down, all writes complete 2025-09-25T16:22:19.877727Z :DEBUG: [] MessageGroupId [src] SessionId [src|52420297-78b4a0c0-fc49478b-a76b6b4d_0] Write session: destroy 2025-09-25T16:22:19.877998Z node 15 :PQ_WRITE_PROXY DEBUG: write_session_actor.cpp:254: session v1 cookie: 1 sessionId: src|52420297-78b4a0c0-fc49478b-a76b6b4d_0 grpc read done: success: 0 data: 2025-09-25T16:22:19.878005Z node 15 :PQ_WRITE_PROXY INFO: write_session_actor.cpp:256: session v1 cookie: 1 sessionId: src|52420297-78b4a0c0-fc49478b-a76b6b4d_0 grpc read failed 2025-09-25T16:22:19.878011Z node 15 :PQ_WRITE_PROXY INFO: write_session_actor.cpp:232: session v1 cookie: 1 sessionId: src|52420297-78b4a0c0-fc49478b-a76b6b4d_0 grpc closed 2025-09-25T16:22:19.878016Z node 15 :PQ_WRITE_PROXY INFO: write_session_actor.cpp:304: session v1 cookie: 1 sessionId: src|52420297-78b4a0c0-fc49478b-a76b6b4d_0 is DEAD 2025-09-25T16:22:19.878227Z node 15 :PQ_WRITE_PROXY DEBUG: writer.cpp:562: TPartitionWriter 72075186224037892 (partition=0) Received event: NActors::TEvents::TEvPoison 2025-09-25T16:22:19.878368Z node 16 :PERSQUEUE DEBUG: pq_impl.cpp:2743: [PQ: 72075186224037892] server disconnected, pipe [15:7554062952601607230:2471] destroyed 2025-09-25T16:22:19.878390Z node 16 :PERSQUEUE DEBUG: partition_write.cpp:138: [72075186224037892][Partition][0][StateIdle] TPartition::DropOwner. 2025-09-25T16:22:19.878398Z node 16 :PERSQUEUE DEBUG: partition.cpp:2261: [72075186224037892][Partition][0][StateIdle] Batching state before ContinueProcessTxsAndUserActs: 0 2025-09-25T16:22:19.878401Z node 16 :PERSQUEUE DEBUG: partition.cpp:2270: [72075186224037892][Partition][0][StateIdle] Batching state after ContinueProcessTxsAndUserActs: 1 2025-09-25T16:22:19.878403Z node 16 :PERSQUEUE DEBUG: partition.cpp:2293: [72075186224037892][Partition][0][StateIdle] Try persist 2025-09-25T16:22:19.894908Z :INFO: [/Root] [/Root] [fb78499c-db22f6fd-d5e9c17d-61bbbf3d] Starting read session 2025-09-25T16:22:19.894931Z :DEBUG: [/Root] [/Root] [fb78499c-db22f6fd-d5e9c17d-61bbbf3d] Starting cluster discovery 2025-09-25T16:22:19.894964Z :INFO: [/Root] [/Root] [fb78499c-db22f6fd-d5e9c17d-61bbbf3d] Cluster discovery request failed. Status: TRANSPORT_UNAVAILABLE. Issues: "
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:21411: Failed to connect to remote host: Connection refused
: Error: Grpc error response on endpoint localhost:21411
: Error: Endpoint list is empty for database /Root, cluster endpoint localhost:21411. " 2025-09-25T16:22:19.894968Z :DEBUG: [/Root] [/Root] [fb78499c-db22f6fd-d5e9c17d-61bbbf3d] Restart cluster discovery in 0.006390s 2025-09-25T16:22:19.902020Z :DEBUG: [/Root] [/Root] [fb78499c-db22f6fd-d5e9c17d-61bbbf3d] Starting cluster discovery 2025-09-25T16:22:19.902104Z :INFO: [/Root] [/Root] [fb78499c-db22f6fd-d5e9c17d-61bbbf3d] Cluster discovery request failed. Status: TRANSPORT_UNAVAILABLE. Issues: "
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:21411: Failed to connect to remote host: Connection refused
: Error: Grpc error response on endpoint localhost:21411
: Error: Endpoint list is empty for database /Root, cluster endpoint localhost:21411. " 2025-09-25T16:22:19.902109Z :DEBUG: [/Root] [/Root] [fb78499c-db22f6fd-d5e9c17d-61bbbf3d] Restart cluster discovery in 0.010527s 2025-09-25T16:22:19.912937Z :DEBUG: [/Root] [/Root] [fb78499c-db22f6fd-d5e9c17d-61bbbf3d] Starting cluster discovery 2025-09-25T16:22:19.913012Z :INFO: [/Root] [/Root] [fb78499c-db22f6fd-d5e9c17d-61bbbf3d] Cluster discovery request failed. Status: TRANSPORT_UNAVAILABLE. Issues: "
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:21411: Failed to connect to remote host: Connection refused
: Error: Grpc error response on endpoint localhost:21411
: Error: Endpoint list is empty for database /Root, cluster endpoint localhost:21411. " 2025-09-25T16:22:19.913018Z :DEBUG: [/Root] [/Root] [fb78499c-db22f6fd-d5e9c17d-61bbbf3d] Restart cluster discovery in 0.021648s 2025-09-25T16:22:19.935000Z :DEBUG: [/Root] [/Root] [fb78499c-db22f6fd-d5e9c17d-61bbbf3d] Starting cluster discovery 2025-09-25T16:22:19.935087Z :NOTICE: [/Root] [/Root] [fb78499c-db22f6fd-d5e9c17d-61bbbf3d] Aborting read session. Description: SessionClosed { Status: TRANSPORT_UNAVAILABLE Issues: "
: Error: Failed to discover clusters
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:21411: Failed to connect to remote host: Connection refused
: Error: Grpc error response on endpoint localhost:21411
: Error: Endpoint list is empty for database /Root, cluster endpoint localhost:21411. " } 2025-09-25T16:22:19.935137Z :NOTICE: [/Root] [/Root] [fb78499c-db22f6fd-d5e9c17d-61bbbf3d] Aborting read session. Description: SessionClosed { Status: ABORTED Issues: "
: Error: Aborted " } SessionClosed { Status: TRANSPORT_UNAVAILABLE Issues: "
: Error: Failed to discover clusters
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:21411: Failed to connect to remote host: Connection refused
: Error: Grpc error response on endpoint localhost:21411
: Error: Endpoint list is empty for database /Root, cluster endpoint localhost:21411. " } 2025-09-25T16:22:19.935155Z :INFO: [/Root] [/Root] [fb78499c-db22f6fd-d5e9c17d-61bbbf3d] Closing read session. Close timeout: 0.000000s 2025-09-25T16:22:19.935163Z :NOTICE: [/Root] [/Root] [fb78499c-db22f6fd-d5e9c17d-61bbbf3d] Aborting read session. Description: SessionClosed { Status: ABORTED Issues: "
: Error: Aborted " } 2025-09-25T16:22:19.977417Z node 16 :PERSQUEUE DEBUG: partition.cpp:2261: [72075186224037892][Partition][0][StateIdle] Batching state before ContinueProcessTxsAndUserActs: 0 2025-09-25T16:22:19.977446Z node 16 :PERSQUEUE DEBUG: partition.cpp:2270: [72075186224037892][Partition][0][StateIdle] Batching state after ContinueProcessTxsAndUserActs: 1 2025-09-25T16:22:19.977450Z node 16 :PERSQUEUE DEBUG: partition.cpp:2293: [72075186224037892][Partition][0][StateIdle] Try persist 2025-09-25T16:22:20.081415Z node 16 :PERSQUEUE DEBUG: partition.cpp:2261: [72075186224037892][Partition][0][StateIdle] Batching state before ContinueProcessTxsAndUserActs: 0 2025-09-25T16:22:20.081441Z node 16 :PERSQUEUE DEBUG: partition.cpp:2270: [72075186224037892][Partition][0][StateIdle] Batching state after ContinueProcessTxsAndUserActs: 1 2025-09-25T16:22:20.081445Z node 16 :PERSQUEUE DEBUG: partition.cpp:2293: [72075186224037892][Partition][0][StateIdle] Try persist 2025-09-25T16:22:20.227901Z node 15 :KQP_EXECUTER ERROR: kqp_executer_impl.h:1005: ActorId: [15:7554062956896574554:2483] TxId: 281474976715684. Ctx: { TraceId: 01k60txjpe1a90bpx97b0je5wr, Database: /Root, SessionId: ydb://session/3?node_id=15&id=ZWEwNWU2OWYtZDAyOTdhZWYtMWQ0MzhkMjYtYjg2MzhmOGQ=, PoolId: default}. UNAVAILABLE: Failed to send EvStartKqpTasksRequest because node is unavailable: 16 2025-09-25T16:22:20.228209Z node 15 :KQP_COMPUTE ERROR: dq_compute_actor_impl.h:1208: SelfId: [15:7554062956896574562:2483], TxId: 281474976715684, task: 3. Ctx: { TraceId : 01k60txjpe1a90bpx97b0je5wr. RunScriptActorId : [0:0:0]. CustomerSuppliedId : . PoolId : default. SessionId : ydb://session/3?node_id=15&id=ZWEwNWU2OWYtZDAyOTdhZWYtMWQ0MzhkMjYtYjg2MzhmOGQ=. CurrentExecutionId : . Database : /Root. DatabaseId : /Root. }. Handle abort execution event from: [15:7554062956896574554:2483], status: UNAVAILABLE, reason: {
: Error: Terminate execution } |83.0%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/actorlib_impl/ut/unittest >> TInterconnectTest::TestPingPongThroughSubChannel [GOOD] >> TopicAutoscaling::PartitionSplit_ManySession_BeforeAutoscaleAwareSDK [GOOD] >> TopicAutoscaling::PartitionSplit_ManySession_PQv1 |83.0%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/tx/schemeshard/ut_auditsettings/ydb-core-tx-schemeshard-ut_auditsettings |83.0%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_auditsettings/ydb-core-tx-schemeshard-ut_auditsettings >> TFlatTableExecutor_RejectProbability::MaxedOutRejectProbability [GOOD] >> TYdbControlPlaneStorageListConnections::ShouldCheckLimit [GOOD] >> TRestoreTests::CancelAlmostCompleteOperationShouldNotHaveEffect[Zstd] [GOOD] >> TFlatTableExecutor_RejectProbability::SomeRejectProbability >> TYdbControlPlaneStorageListConnections::ShouldCheckScopeVisibility >> TFlatTableExecutor_RejectProbability::SomeRejectProbability [GOOD] >> TFlatTableExecutor_RejectProbability::ZeroRejectProbability |83.0%| [TA] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_sequence/test-results/unittest/{meta.json ... results_accumulator.log} |83.0%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_auditsettings/ydb-core-tx-schemeshard-ut_auditsettings >> TFlatTableExecutor_RejectProbability::ZeroRejectProbability [GOOD] >> TFlatTableExecutor_RejectProbability::ZeroRejectProbabilityMultipleTables [GOOD] >> TFlatTableExecutor_Reschedule::TestExecuteReschedule >> TFlatTableExecutor_Reschedule::TestExecuteReschedule [GOOD] >> TFlatTableExecutor_ResourceProfile::TestExecutorSetResourceProfile [GOOD] >> TFlatTableExecutor_ResourceProfile::TestExecutorRequestTxData [GOOD] >> TFlatTableExecutor_ResourceProfile::TestExecutorStaticMemoryLimits [GOOD] >> TFlatTableExecutor_ResourceProfile::TestExecutorReuseStaticMemory [GOOD] >> TFlatTableExecutor_ResourceProfile::TestExecutorTxDataLimitExceeded [GOOD] >> TFlatTableExecutor_ResourceProfile::TestExecutorRequestPages >> TYdbControlPlaneStorageListQueries::ShouldFilterByMe [GOOD] >> TYdbControlPlaneStorageListQueries::ShouldFilterType >> TBtreeIndexBuilder::NoNodes [GOOD] >> TBtreeIndexBuilder::OneNode [GOOD] >> TBtreeIndexBuilder::FewNodes [GOOD] >> TBtreeIndexBuilder::SplitBySize [GOOD] >> TBtreeIndexNode::TIsNullBitmap [GOOD] >> TBtreeIndexNode::CompareTo >> TYdbControlPlaneStorageCreateBinding::ShouldCheckMaxCountBindings [GOOD] >> TYdbControlPlaneStorageCreateBinding::ShouldCheckIdempotencyKey >> TFlatTableExecutor_ResourceProfile::TestExecutorRequestPages [GOOD] >> TFlatTableExecutor_ResourceProfile::TestExecutorPageLimitExceeded [GOOD] >> TFlatTableExecutor_ResourceProfile::TestExecutorRequestMemory >> TBtreeIndexNode::CompareTo [GOOD] >> TBtreeIndexNode::Basics [GOOD] >> TBtreeIndexNode::Group [GOOD] >> TBtreeIndexNode::History [GOOD] >> TBtreeIndexNode::OneKey [GOOD] >> TBtreeIndexNode::Reusable [GOOD] >> TBtreeIndexNode::CutKeys [GOOD] >> TBtreeIndexTPart::Conf [GOOD] >> TBtreeIndexTPart::NoNodes [GOOD] >> TBtreeIndexTPart::OneNode [GOOD] >> TBtreeIndexTPart::FewNodes [GOOD] >> TBtreeIndexTPart::Erases [GOOD] >> TBtreeIndexTPart::Groups [GOOD] >> TBtreeIndexTPart::History [GOOD] >> TBtreeIndexTPart::External [GOOD] >> TChargeBTreeIndex::NoNodes >> TFlatTableExecutor_ResourceProfile::TestExecutorRequestMemory [GOOD] >> TFlatTableExecutor_ResourceProfile::TestExecutorRequestMemoryFollower [GOOD] >> TFlatTableExecutor_ResourceProfile::TestExecutorMemoryLimitExceeded [GOOD] >> TFlatTableExecutor_ResourceProfile::TestExecutorPreserveTxData >> TYdbControlPlaneStorageDeleteBinding::ShouldCheckPreviousRevisionSuccess [GOOD] >> TYdbControlPlaneStorageDeleteBindingPermissions::ShouldApplyPermissionEmpty >> TFlatTableExecutor_ResourceProfile::TestExecutorPreserveTxData [GOOD] >> TFlatTableExecutor_ResourceProfile::TestExecutorTxDataGC [GOOD] >> TFlatTableExecutor_ResourceProfile::TestExecutorTxPartialDataHold [GOOD] >> TFlatTableExecutor_ResourceProfile::TestExecutorTxHoldAndUse [GOOD] >> TFlatTableExecutor_ResourceProfile::TestExecutorTxHoldOnRelease >> TYdbControlPlaneStorageModifyConnection::ShouldCheckAllowedSymbolsName [GOOD] >> TYdbControlPlaneStorageModifyConnection::ShouldCheckMoveToScope >> TFlatTableExecutor_ResourceProfile::TestExecutorTxHoldOnRelease [GOOD] >> TFlatTableExecutor_ResourceProfile::TestUpdateConfig [GOOD] >> TFlatTableExecutor_SliceOverlapScan::TestSliceOverlapScan >> TFlatTableExecutor_TryKeepInMemory::TestOnceSharedCache |83.0%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/tx/schemeshard/ut_failure_injection/ydb-core-tx-schemeshard-ut_failure_injection >> TYdbControlPlaneStoragePipeline::ShouldCheckNotAutomaticTtl [GOOD] >> TYdbControlPlaneStoragePipeline::ShouldCheckChangeAutomaticTtl >> TChargeBTreeIndex::NoNodes [GOOD] >> TChargeBTreeIndex::NoNodes_Groups |83.0%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_failure_injection/ydb-core-tx-schemeshard-ut_failure_injection |83.0%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_failure_injection/ydb-core-tx-schemeshard-ut_failure_injection >> TFlatTableExecutor_TryKeepInMemory::TestOnceSharedCache [GOOD] >> TFlatTableExecutor_TryKeepInMemory::TestTryKeepInMemory [GOOD] >> TFlatTableExecutor_TryKeepInMemory::TestTryKeepInMemoryMain [GOOD] >> TFlatTableExecutor_TryKeepInMemory::TestTryKeepInMemoryAlt_FlatIndex >> TFlatTableExecutor_SliceOverlapScan::TestSliceOverlapScan [GOOD] >> TFlatTableExecutor_SnapshotWithCommits::SnapshotWithCommits >> TFlatTableExecutor_TryKeepInMemory::TestTryKeepInMemoryAlt_FlatIndex [GOOD] >> TFlatTableExecutor_TryKeepInMemory::TestTryKeepInMemoryAlt_BTreeIndex [GOOD] >> TFlatTableExecutor_TryKeepInMemory::TestTryKeepInMemoryAll [GOOD] >> TFlatTableExecutor_VersionedLargeBlobs::TestMultiVersionCompactionLargeBlobs >> TFlatTableExecutor_SnapshotWithCommits::SnapshotWithCommits [GOOD] >> TFlatTableExecutor_StickyPages::TestNonSticky_FlatIndex [GOOD] >> TFlatTableExecutor_StickyPages::TestNonSticky_BTreeIndex >> TFlatTableExecutor_VersionedLargeBlobs::TestMultiVersionCompactionLargeBlobs [GOOD] >> TFlatTableExecutor_VersionedRows::TestVersionedRows >> TFlatTableExecutor_VersionedRows::TestVersionedRows [GOOD] >> TFlatTableExecutor_VersionedRows::TestVersionedRowsSmallBlobs >> TFlatTableExecutor_StickyPages::TestNonSticky_BTreeIndex [GOOD] >> TFlatTableExecutor_StickyPages::TestSticky [GOOD] >> TFlatTableExecutor_StickyPages::TestNonStickyGroup_FlatIndex [GOOD] >> TFlatTableExecutor_StickyPages::TestNonStickyGroup_BTreeIndex >> TFlatTableExecutor_StickyPages::TestNonStickyGroup_BTreeIndex [GOOD] >> TFlatTableExecutor_StickyPages::TestStickyMain [GOOD] >> TFlatTableExecutor_StickyPages::TestStickyAlt_FlatIndex [GOOD] >> TFlatTableExecutor_StickyPages::TestStickyAlt_BTreeIndex >> TFlatTableExecutor_StickyPages::TestStickyAlt_BTreeIndex [GOOD] >> TFlatTableExecutor_StickyPages::TestStickyAll >> TopicAutoscaling::PartitionSplit_PreferedPartition_AutoscaleAwareSDK [GOOD] >> TopicAutoscaling::PartitionSplit_PreferedPartition_PQv1 >> TFlatTableExecutor_StickyPages::TestStickyAll [GOOD] >> TFlatTableExecutor_StickyPages::TestAlterAddFamilySticky [GOOD] >> TFlatTableExecutor_StickyPages::TestAlterAddFamilyPartiallySticky >> TFlatTableExecutor_StickyPages::TestAlterAddFamilyPartiallySticky [GOOD] >> TFlatTableExecutor_Truncate::Truncate [GOOD] >> TFlatTableExecutor_Truncate::TruncateAndWrite [GOOD] >> TFlatTableExecutor_Truncate::TruncateWhileCompacting [GOOD] >> TFlatTableExecutor_Truncate::TruncateAndWriteWhileCompacting [GOOD] >> TFlatTableExecutor_Truncate::CompactThenTruncate >> TieredCache::Touch [GOOD] >> TieredCache::Erase [GOOD] >> TieredCache::EvictNext [GOOD] >> TieredCache::UpdateLimit [GOOD] >> TSharedPageCache_Actor::Request_Basics >> TRowVersionRangesTest::SimpleInserts [GOOD] >> TFlatTableExecutor_Truncate::CompactThenTruncate [GOOD] >> TFlatTableExecutor_Truncate::CompactThenTruncateAndWrite [GOOD] >> TFlatTableExecutor_Truncate::TruncateAtFollower [GOOD] >> TFlatTableExecutor_Truncate::TruncateAndWriteAtFollower [GOOD] >> TFlatTableExecutor_Truncate::TruncateAndWriteThenAttachFollower [GOOD] >> TFlatTableExecutor_Truncate::PartiallyCommitThenTruncateAndWrite [GOOD] >> TFlatTableExecutor_TryKeepInMemory::TestAlterAddFamilyTryKeepInMemory >> TSharedPageCache_Actor::Request_Basics [GOOD] >> TSharedPageCache_Actor::Request_Failed [GOOD] >> TSharedPageCache_Actor::Request_Queue [GOOD] >> TSharedPageCache_Actor::Request_Queue_Failed [GOOD] >> TSharedPageCache_Actor::Request_Queue_Fast >> DBase::WideKey >> TFlatTableExecutor_TryKeepInMemory::TestAlterAddFamilyTryKeepInMemory [GOOD] >> TFlatTableExecutor_TryKeepInMemory::TestAlterAddFamilyPartiallyTryKeepInMemory [GOOD] >> TFlatTableExecutor_TryKeepInMemory::TestAlterFamilyDisableTryKeepInMemoryAll [GOOD] >> TFlatTableExecutor_TryKeepInMemory::TestAlterFamilyDisableTryKeepInMemoryPartially [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_restore/unittest >> TRestoreTests::CancelAlmostCompleteOperationShouldNotHaveEffect[Zstd] [GOOD] Test command err: ==== RunWithTabletReboots =========== RUN: Trace =========== Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2144] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2144] Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:120:2058] recipient: [1:114:2145] IGNORE Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:120:2058] recipient: [1:114:2145] Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:121:2058] recipient: [1:116:2146] IGNORE Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:121:2058] recipient: [1:116:2146] Leader for TabletID 72057594046678944 is [1:128:2153] sender: [1:131:2058] recipient: [1:113:2144] Leader for TabletID 72057594046447617 is [1:134:2158] sender: [1:136:2058] recipient: [1:114:2145] Leader for TabletID 72057594046316545 is [1:139:2161] sender: [1:141:2058] recipient: [1:116:2146] 2025-09-25T16:19:58.003012Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7911: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-09-25T16:19:58.003035Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7939: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-09-25T16:19:58.003041Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7825: StatsBatching config: StatsBatchTimeout# 0.000000s, StatsMaxBatchSize# 0, StatsMaxExecuteTime# 0.010000s 2025-09-25T16:19:58.003047Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7841: OperationsProcessing config: using default configuration 2025-09-25T16:19:58.003053Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7847: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-09-25T16:19:58.003058Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7847: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-09-25T16:19:58.003067Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7971: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-09-25T16:19:58.003081Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-09-25T16:19:58.003312Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8042: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-09-25T16:19:58.003379Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2025-09-25T16:19:58.025278Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:8074: Got new config: QueryServiceConfig { AllExternalDataSourcesAreAvailable: true } 2025-09-25T16:19:58.025317Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded 2025-09-25T16:19:58.025430Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8042: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# Leader for TabletID 72057594046447617 is [1:134:2158] sender: [1:179:2058] recipient: [1:15:2062] 2025-09-25T16:19:58.030277Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2025-09-25T16:19:58.030374Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2025-09-25T16:19:58.030414Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-09-25T16:19:58.031992Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2025-09-25T16:19:58.032084Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2025-09-25T16:19:58.032203Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1382: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-09-25T16:19:58.032408Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-09-25T16:19:58.033583Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:158: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-09-25T16:19:58.033638Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2025-09-25T16:19:58.033920Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-09-25T16:19:58.033935Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-09-25T16:19:58.033954Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-09-25T16:19:58.033963Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-09-25T16:19:58.033970Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:205: TTxServerlessStorageBilling.Complete 2025-09-25T16:19:58.034010Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7086: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:221:2058] recipient: [1:219:2219] IGNORE Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:221:2058] recipient: [1:219:2219] Leader for TabletID 72057594037968897 is [1:225:2223] sender: [1:226:2058] recipient: [1:219:2219] 2025-09-25T16:19:58.035727Z node 1 :HIVE INFO: tablet_helpers.cpp:1126: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:128:2153] sender: [1:246:2058] recipient: [1:15:2062] 2025-09-25T16:19:58.061512Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-09-25T16:19:58.061592Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-09-25T16:19:58.061657Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:599: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-09-25T16:19:58.061667Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5528: CreateTx for txid 1:0 type: TxAlterSubDomain target path: [OwnerId: 72057594046678944, LocalPathId: 1] source path: 2025-09-25T16:19:58.061728Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-09-25T16:19:58.061773Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2025-09-25T16:19:58.062487Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-09-25T16:19:58.062533Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-09-25T16:19:58.062596Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-09-25T16:19:58.062608Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:315: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-09-25T16:19:58.062614Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:369: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-09-25T16:19:58.062620Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 1:0 2 -> 3 2025-09-25T16:19:58.063077Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-09-25T16:19:58.063091Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-09-25T16:19:58.063101Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 1:0 3 -> 128 2025-09-25T16:19:58.063523Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-09-25T16:19:58.063537Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-09-25T16:19:58.063543Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-09-25T16:19:58.063550Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1723: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-09-25T16:19:58.064297Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1792: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-09-25T16:19:58.064746Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:663: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-09-25T16:19:58.064804Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1824: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 Leader for TabletID 72057594046316545 is [1:139:2161] sender: [1:261:2058] recipient: [1:15:2062] FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-09-25T16:19:58.065060Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:685: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-09-25T16:19:58.065089Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:689: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 139 RawX2: 4294969457 } } Step: 5000001 MediatorID: 0 Tab ... 7594046678944, cookie: 102 2025-09-25T16:22:22.033680Z node 139 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 3 PathOwnerId: 72057594046678944, cookie: 102 2025-09-25T16:22:22.033686Z node 139 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:108: Operation in-flight, at schemeshard: 72057594046678944, txId: 102 2025-09-25T16:22:22.033692Z node 139 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 102, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 3 2025-09-25T16:22:22.033700Z node 139 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:610: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 4 2025-09-25T16:22:22.033722Z node 139 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 102, ready parts: 0/1, is published: true REQUEST: HEAD /data_00.csv.zst HTTP/1.1 HEADERS: Host: localhost:27910 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: E7F8A8B4-D8FB-46AC-9161-020C9333ECC2 amz-sdk-request: attempt=1 content-type: application/xml user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-143-generic x86_64 Clang/20.1.8 x-amz-api-version: 2006-03-01 S3_MOCK::HttpServeRead: /data_00.csv.zst / 23 2025-09-25T16:22:22.034266Z node 139 :DATASHARD_RESTORE DEBUG: import_s3.cpp:527: [Import] [s3:102] Handle NKikimr::NWrappers::NExternalStorage::TEvHeadObjectResponse { Key: null Result: HeadObjectResult { ETag: 7443c2f403aa74cff1f199511bd22374 ContentLength: 23 } } 2025-09-25T16:22:22.034871Z node 139 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 2025-09-25T16:22:22.046294Z node 139 :DATASHARD_RESTORE DEBUG: import_s3.cpp:606: [Import] [s3:102] Handle NKikimr::TEvDataShard::TEvS3DownloadInfo { Info: { DataETag: (empty maybe) ProcessedBytes: 0 WrittenBytes: 0 WrittenRows: 0 ChecksumState: DownloadState: } } FAKE_COORDINATOR: Erasing txId 102 2025-09-25T16:22:22.057444Z node 139 :DATASHARD_RESTORE DEBUG: import_s3.cpp:606: [Import] [s3:102] Handle NKikimr::TEvDataShard::TEvS3DownloadInfo { Info: { DataETag: 7443c2f403aa74cff1f199511bd22374 ProcessedBytes: 0 WrittenBytes: 0 WrittenRows: 0 ChecksumState: DownloadState: } } 2025-09-25T16:22:22.057476Z node 139 :DATASHARD_RESTORE NOTICE: import_s3.cpp:621: [Import] [s3:102] Process download info at 'DownloadInfo': info# { DataETag: 7443c2f403aa74cff1f199511bd22374 ProcessedBytes: 0 WrittenBytes: 0 WrittenRows: 0 ChecksumState: DownloadState: } 2025-09-25T16:22:22.057500Z node 139 :DATASHARD_RESTORE DEBUG: import_s3.cpp:517: [Import] [s3:102] GetObject: key# /data_00.csv.zst, range# 0-22 REQUEST: GET /data_00.csv.zst HTTP/1.1 HEADERS: Host: localhost:27910 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: D011317C-40D6-4C6D-815D-C47C6E8BC92C amz-sdk-request: attempt=1 content-type: application/xml range: bytes=0-22 user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-143-generic x86_64 Clang/20.1.8 x-amz-api-version: 2006-03-01 S3_MOCK::HttpServeRead: /data_00.csv.zst / 23 2025-09-25T16:22:22.061936Z node 139 :DATASHARD_RESTORE DEBUG: import_s3.cpp:656: [Import] [s3:102] Handle NKikimr::NWrappers::NExternalStorage::TEvGetObjectResponse { Key: null Result: 7443c2f403aa74cff1f199511bd22374 Body: 23b } 2025-09-25T16:22:22.061963Z node 139 :DATASHARD_RESTORE TRACE: import_s3.cpp:673: [Import] [s3:102] Content size: processed-bytes# 0, content-length# 23, body-size# 23 2025-09-25T16:22:22.062017Z node 139 :DATASHARD_RESTORE INFO: import_s3.cpp:806: [Import] [s3:102] Upload rows: count# 1, size# 34 2025-09-25T16:22:22.073345Z node 139 :DATASHARD_RESTORE DEBUG: import_s3.cpp:814: [Import] [s3:102] Handle NKikimr::TEvDataShard::TEvS3UploadRowsResponse { Record: TabletID: 72075186233409546 Status: 0 Info: { DataETag: 7443c2f403aa74cff1f199511bd22374 ProcessedBytes: 23 WrittenBytes: 8 WrittenRows: 1 ChecksumState: DownloadState: } } 2025-09-25T16:22:22.073374Z node 139 :DATASHARD_RESTORE NOTICE: import_s3.cpp:621: [Import] [s3:102] Process download info at 'UploadResponse': info# { DataETag: 7443c2f403aa74cff1f199511bd22374 ProcessedBytes: 23 WrittenBytes: 8 WrittenRows: 1 ChecksumState: DownloadState: } 2025-09-25T16:22:22.073385Z node 139 :DATASHARD_RESTORE NOTICE: import_s3.cpp:962: [Import] [s3:102] Finish: success# 1, error# , writtenBytes# 8, writtenRows# 1 2025-09-25T16:22:22.098048Z node 139 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:71: TTxOperationProposeCancelTx Execute, at schemeshard: 72057594046678944, message: TargetTxId: 102 TxId: 103 2025-09-25T16:22:22.098085Z node 139 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_cancel_tx.cpp:37: Execute cancel tx: opId# 103:0, target opId# 102:0 2025-09-25T16:22:22.099185Z node 139 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:88: TTxOperationProposeCancelTx Complete, at schemeshard: 72057594046678944 TEvCancelTxResult for TargetTxId: 102, wait until TargetTxId: 102 TestWaitNotification wait txId: 102 2025-09-25T16:22:22.099291Z node 139 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:213: tests -- TTxNotificationSubscriber for txId 102: send EvNotifyTxCompletion 2025-09-25T16:22:22.099298Z node 139 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:259: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 102 TestWaitNotification wait txId: 103 2025-09-25T16:22:22.099313Z node 139 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:213: tests -- TTxNotificationSubscriber for txId 103: send EvNotifyTxCompletion 2025-09-25T16:22:22.099315Z node 139 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:259: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 103 2025-09-25T16:22:22.099369Z node 139 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5901: Handle TEvSchemaChanged, tabletId: 72057594046678944, at schemeshard: 72057594046678944, message: Source { RawX1: 313 RawX2: 597000456443 } Origin: 72075186233409546 State: 2 TxId: 102 Step: 0 Generation: 2 OpResult { Success: true Explain: "" BytesProcessed: 8 RowsProcessed: 1 } 2025-09-25T16:22:22.099377Z node 139 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1837: TOperation FindRelatedPartByTabletId, TxId: 102, tablet: 72075186233409546, partId: 0 2025-09-25T16:22:22.099397Z node 139 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:628: TTxOperationReply execute, operationId: 102:0, at schemeshard: 72057594046678944, message: Source { RawX1: 313 RawX2: 597000456443 } Origin: 72075186233409546 State: 2 TxId: 102 Step: 0 Generation: 2 OpResult { Success: true Explain: "" BytesProcessed: 8 RowsProcessed: 1 } 2025-09-25T16:22:22.099409Z node 139 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_backup_restore_common.h:233: TRestore TProposedWaitParts, opId: 102:0 HandleReply TEvSchemaChanged at tablet# 72057594046678944 message# Source { RawX1: 313 RawX2: 597000456443 } Origin: 72075186233409546 State: 2 TxId: 102 Step: 0 Generation: 2 OpResult { Success: true Explain: "" BytesProcessed: 8 RowsProcessed: 1 } 2025-09-25T16:22:22.099423Z node 139 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:673: CollectSchemaChanged accept TEvDataShard::TEvSchemaChanged, operationId: 102:0, shardIdx: 72057594046678944:1, shard: 72075186233409546, left await: 0, txState.State: ProposedWaitParts, txState.ReadyForNotifications: 1, at schemeshard: 72057594046678944 2025-09-25T16:22:22.099427Z node 139 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:710: all shard schema changes has been received, operationId: 102:0, at schemeshard: 72057594046678944 2025-09-25T16:22:22.099435Z node 139 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:722: send schema changes ack message, operation: 102:0, datashard: 72075186233409546, at schemeshard: 72057594046678944 2025-09-25T16:22:22.099441Z node 139 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 102:0 129 -> 240 2025-09-25T16:22:22.099486Z node 139 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_backup_restore_common.h:116: Unable to make a bill: kind# TRestore, opId# 102:0, reason# domain is not a serverless db, domain# /MyRoot, domainPathId# [OwnerId: 72057594046678944, LocalPathId: 1], IsDomainSchemeShard: 1, ParentDomainId: [OwnerId: 72057594046678944, LocalPathId: 1], ResourcesDomainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-09-25T16:22:22.099616Z node 139 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 102:0, at schemeshard: 72057594046678944 2025-09-25T16:22:22.099623Z node 139 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:497: [72057594046678944] TDone opId# 102:0 ProgressState 2025-09-25T16:22:22.099633Z node 139 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:926: Part operation is done id#102:0 progress is 1/1 2025-09-25T16:22:22.099636Z node 139 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2025-09-25T16:22:22.099640Z node 139 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:926: Part operation is done id#102:0 progress is 1/1 2025-09-25T16:22:22.099642Z node 139 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2025-09-25T16:22:22.099645Z node 139 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 102, ready parts: 1/1, is published: true 2025-09-25T16:22:22.099649Z node 139 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2025-09-25T16:22:22.099653Z node 139 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:993: Operation and all the parts is done, operation id: 102:0 2025-09-25T16:22:22.099656Z node 139 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5552: RemoveTx for txid 102:0 2025-09-25T16:22:22.099680Z node 139 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:610: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 3 2025-09-25T16:22:22.103943Z node 139 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 102, at schemeshard: 72057594046678944 2025-09-25T16:22:22.104042Z node 139 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 103, at schemeshard: 72057594046678944 2025-09-25T16:22:22.105071Z node 139 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 102:0, at schemeshard: 72057594046678944 2025-09-25T16:22:22.105091Z node 139 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:270: Unable to activate 102:0 2025-09-25T16:22:22.105391Z node 139 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 102: got EvNotifyTxCompletionResult 2025-09-25T16:22:22.105405Z node 139 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 102: satisfy waiter [139:435:2403] 2025-09-25T16:22:22.105452Z node 139 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 103: got EvNotifyTxCompletionResult 2025-09-25T16:22:22.105457Z node 139 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 103: satisfy waiter [139:435:2403] TestWaitNotification: OK eventTxId 102 TestWaitNotification: OK eventTxId 103 >> TSharedPageCache_Actor::Request_Queue_Fast [GOOD] >> TSharedPageCache_Actor::Request_Sequential [GOOD] >> TSharedPageCache_Actor::Request_Cached [GOOD] >> TSharedPageCache_Actor::Request_Different_Collections [GOOD] >> TSharedPageCache_Actor::Request_Different_Pages [GOOD] >> DBase::WideKey [GOOD] >> TSharedPageCache_Actor::Request_Different_Pages_Reversed [GOOD] >> DBase::VersionPureMem >> TSharedPageCache_Actor::Request_Subset [GOOD] >> TSharedPageCache_Actor::Request_Subset_Shuffled [GOOD] >> TSharedPageCache_Actor::Request_Superset [GOOD] >> TSharedPageCache_Actor::Request_Superset_Reversed >> TRowVersionRangesTest::MergeFailLeft [GOOD] >> TRowVersionRangesTest::MergeFailRight [GOOD] >> TRowVersionRangesTest::MergeFailOuter [GOOD] >> TRowVersionRangesTest::MergeFailInner [GOOD] >> TRowVersionRangesTest::MergeExtendRightInner [GOOD] >> TRowVersionRangesTest::MergeExtendRightComplete [GOOD] >> TRowVersionRangesTest::MergeHoleExact [GOOD] >> TRowVersionRangesTest::MergeHoleInner [GOOD] >> TRowVersionRangesTest::MergeHoleOuter [GOOD] >> TRowVersionRangesTest::SteppedCookieAllocatorOrder [GOOD] >> TRowVersionRangesTest::SteppedCookieAllocatorLowerBound [GOOD] >> TS3FIFOCache::Touch [GOOD] >> TS3FIFOCache::Touch_MainQueue [GOOD] >> TS3FIFOCache::EvictNext [GOOD] >> TS3FIFOCache::UpdateLimit [GOOD] >> TS3FIFOCache::Erase [GOOD] >> TS3FIFOCache::Random >> TFlatEraseCacheTest::StressGarbageCollectionWithStrings [GOOD] >> TFlatExecutorLeases::Basics >> TSharedPageCache_Actor::Request_Superset_Reversed [GOOD] >> TSharedPageCache_Actor::Request_Crossing [GOOD] >> TSharedPageCache_Actor::Request_Crossing_Reversed >> DBase::VersionPureMem [GOOD] >> DBase::VersionPureParts >> TYdbControlPlaneStoragePipeline::ShouldCheckAstClear [GOOD] >> TYdbControlPlaneStoragePipeline::ShouldCheckAutomaticTtl >> TS3FIFOCache::Random [GOOD] >> TS3FIFOGhostQueue::Basics [GOOD] >> TScheme::Shapshot [GOOD] >> TScheme::Delta [GOOD] >> TScheme::Policy [GOOD] >> TScreen::Cuts [GOOD] >> TScreen::Join [GOOD] >> TScreen::Sequential >> TSharedPageCache_Actor::Request_Crossing_Reversed [GOOD] >> TSharedPageCache_Actor::Request_Crossing_Shuffled >> DBase::VersionPureParts [GOOD] >> DBase::VersionCompactedMem >> TYdbControlPlaneStorageDescribeBindingPermissions::ShouldApplyPermissionViewPrivate [GOOD] >> TYdbControlPlaneStorageDescribeBindingPermissions::ShouldApplyPermissionViewPrivatePublic >> TSharedPageCache_Actor::Request_Crossing_Shuffled [GOOD] >> TScreen::Sequential [GOOD] >> TScreen::Random >> TSharedPageCache_Actor::Unregister_Basics [GOOD] >> TSharedPageCache_Actor::Unregister_Cached >> DBase::VersionCompactedMem [GOOD] >> TYdbControlPlaneStorageModifyBinding::ShouldCheckNotExistOldName [GOOD] >> TYdbControlPlaneStorageModifyBinding::ShouldCheckMoveToScope >> DBase::VersionCompactedParts >> TScreen::Random [GOOD] >> TScreen::Shrink [GOOD] >> TScreen::Cook [GOOD] >> TSharedPageCache::Limits >> TSharedPageCache_Actor::Unregister_Cached [GOOD] >> TSharedPageCache_Actor::Unregister_Expired [GOOD] >> TSharedPageCache_Actor::Unregister_InFly [GOOD] >> TSharedPageCache_Actor::Unregister_Queued [GOOD] >> TSharedPageCache_Actor::Unregister_Queued_Pending >> TSharedPageCache_Actor::Unregister_Queued_Pending [GOOD] >> TSharedPageCache_Actor::InMemory_Preemption [GOOD] >> TSharedPageCache_Actor::InMemory_NotEnoughMemory [GOOD] >> TSharedPageCache_Actor::InMemory_Unregister >> TSharedPageCache::Limits [GOOD] >> TSharedPageCache::Limits_Config >> DBase::VersionCompactedParts [GOOD] >> Memtable::Basics [GOOD] >> Memtable::BasicsReverse [GOOD] >> Memtable::Markers [GOOD] >> Memtable::Overlap [GOOD] >> Memtable::Wreck [GOOD] >> Memtable::Erased >> TSharedPageCache_Actor::InMemory_Unregister [GOOD] >> TSharedPageCache_Actor::IncrementFrequency_Active [GOOD] >> TSharedPageCache_Actor::IncrementFrequency_Passive [GOOD] >> TSharedPageCache_Transactions::One_Transaction_One_Key >> TFlatTableExecutor_VersionedRows::TestVersionedRowsSmallBlobs [GOOD] >> TFlatTableExecutor_VersionedRows::TestVersionedRowsLargeBlobs >> TYdbControlPlaneStorageModifyConnection::ShouldCheckMoveToScope [GOOD] >> TYdbControlPlaneStorageModifyConnection::ShouldCheckIdempotencyKey >> TSharedPageCache_Transactions::One_Transaction_One_Key [GOOD] >> TSharedPageCache_Transactions::One_Transaction_Two_Keys >> Memtable::Erased [GOOD] >> NFwd_TBlobs::MemTableTest [GOOD] >> NFwd_TBlobs::Lower [GOOD] >> NFwd_TBlobs::Sieve [GOOD] >> NFwd_TBlobs::SieveFiltered [GOOD] >> NFwd_TBlobs::Basics [GOOD] >> NFwd_TBlobs::Simple [GOOD] >> NFwd_TBlobs::Shuffle [GOOD] >> NFwd_TBlobs::Grow [GOOD] >> NFwd_TBlobs::Trace [GOOD] >> NFwd_TBlobs::Filtered [GOOD] >> NFwd_TBTreeIndexCache::Basics [GOOD] >> NFwd_TBTreeIndexCache::IndexPagesLocator [GOOD] >> NFwd_TBTreeIndexCache::GetTwice [GOOD] >> NFwd_TBTreeIndexCache::ForwardTwice [GOOD] >> NFwd_TBTreeIndexCache::Forward_OnlyUsed [GOOD] >> NFwd_TBTreeIndexCache::Skip_Done [GOOD] >> NFwd_TBTreeIndexCache::Skip_Done_None [GOOD] >> NFwd_TBTreeIndexCache::Skip_Keep [GOOD] >> NFwd_TBTreeIndexCache::Skip_Wait [GOOD] >> NFwd_TBTreeIndexCache::Trace_BTree [GOOD] >> NFwd_TBTreeIndexCache::Trace_Data [GOOD] >> NFwd_TBTreeIndexCache::End [GOOD] >> NFwd_TBTreeIndexCache::Slices [GOOD] >> NFwd_TBTreeIndexCache::ManyApplies [GOOD] >> NFwd_TFlatIndexCache::Basics [GOOD] >> NFwd_TFlatIndexCache::IndexPagesLocator [GOOD] >> NFwd_TFlatIndexCache::GetTwice [GOOD] >> NFwd_TFlatIndexCache::ForwardTwice [GOOD] >> NFwd_TFlatIndexCache::Skip_Done [GOOD] >> NFwd_TFlatIndexCache::Skip_Done_None [GOOD] >> NFwd_TFlatIndexCache::Skip_Keep [GOOD] >> NFwd_TFlatIndexCache::Skip_Wait [GOOD] >> NFwd_TFlatIndexCache::Trace [GOOD] >> NFwd_TFlatIndexCache::End [GOOD] >> NFwd_TFlatIndexCache::Slices [GOOD] >> NFwd_TLoadedPagesCircularBuffer::Basics [GOOD] >> NOther::Blocks [GOOD] >> NPage::Encoded [GOOD] >> NPage::ABI_002 [GOOD] >> NPage::GroupIdEncoding [GOOD] >> NPageCollection::Align [GOOD] >> NPageCollection::Meta >> TSharedPageCache_Transactions::One_Transaction_Two_Keys [GOOD] >> TSharedPageCache_Transactions::One_Transaction_Two_Keys_Many_Parts >> NPageCollection::Meta [GOOD] >> NPageCollection::PagesToBlobsConverter [GOOD] >> NPageCollection::Grow [GOOD] >> NPageCollection::Groups [GOOD] >> NPageCollection::Chop [GOOD] >> NPageCollection::CookieAllocator [GOOD] >> NProto::LargeGlobId [GOOD] >> Redo::ABI_008 [GOOD] >> TYdbControlPlaneStorageCreateBinding::ShouldCheckIdempotencyKey [GOOD] >> TopicAutoscaling::PartitionMerge_PreferedPartition_AutoscaleAwareSDK [GOOD] >> TopicAutoscaling::ControlPlane_CreateAlterDescribe ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tablet_flat/ut/unittest >> TFlatTableExecutor_TryKeepInMemory::TestAlterFamilyDisableTryKeepInMemoryPartially [GOOD] Test command err: 00000.000 II| FAKE_ENV: Born at 2025-09-25T16:22:20.968648Z 00000.005 NN| TABLET_SAUSAGECACHE: Bootstrap with config MemoryLimit: 8388608 ScanQueueInFlyLimit: 262144 AsyncQueueInFlyLimit: 262144 00000.005 II| FAKE_ENV: Starting storage for BS group 0 00000.005 II| FAKE_ENV: Starting storage for BS group 1 00000.005 II| FAKE_ENV: Starting storage for BS group 2 00000.005 II| FAKE_ENV: Starting storage for BS group 3 00000.006 II| TABLET_EXECUTOR: Leader{1:2:0} activating executor 00000.006 II| TABLET_EXECUTOR: LSnap{1:2, on 2:1, 35b, wait} done, Waste{2:0, 0b +(0, 0b), 0 trc} 00000.006 DD| TABLET_EXECUTOR: Leader{1:2:2} commited cookie 2 for step 1 00000.006 DD| TABLET_EXECUTOR: Leader{1:2:2} Tx{1, NKikimr::NTabletFlatExecutor::NTestSuiteTFlatTableExecutor_LongTxAndBlobs::TTxInitSchema} queued, type NKikimr::NTabletFlatExecutor::NTestSuiteTFlatTableExecutor_LongTxAndBlobs::TTxInitSchema 00000.006 DD| TABLET_EXECUTOR: Leader{1:2:2} Tx{1, NKikimr::NTabletFlatExecutor::NTestSuiteTFlatTableExecutor_LongTxAndBlobs::TTxInitSchema} took 4194304b of static mem, Memory{4194304 dyn 0} 00000.006 DD| TABLET_EXECUTOR: Leader{1:2:2} Tx{1, NKikimr::NTabletFlatExecutor::NTestSuiteTFlatTableExecutor_LongTxAndBlobs::TTxInitSchema} hope 1 -> done Change{2, redo 0b alter 270b annex 0, ~{ } -{ }, 0 gb} 00000.006 DD| TABLET_EXECUTOR: Leader{1:2:2} Tx{1, NKikimr::NTabletFlatExecutor::NTestSuiteTFlatTableExecutor_LongTxAndBlobs::TTxInitSchema} release 4194304b of static, Memory{0 dyn 0} 00000.006 DD| TABLET_EXECUTOR: Leader{1:2:3} commited cookie 1 for step 2 00000.006 DD| TABLET_EXECUTOR: Leader{1:2:3} Tx{2, NKikimr::NTabletFlatExecutor::NTestSuiteTFlatTableExecutor_LongTxAndBlobs::TTxWriteManyDeltas} queued, type NKikimr::NTabletFlatExecutor::NTestSuiteTFlatTableExecutor_LongTxAndBlobs::TTxWriteManyDeltas 00000.006 DD| TABLET_EXECUTOR: Leader{1:2:3} Tx{2, NKikimr::NTabletFlatExecutor::NTestSuiteTFlatTableExecutor_LongTxAndBlobs::TTxWriteManyDeltas} took 4194304b of static mem, Memory{4194304 dyn 0} 00000.008 DD| TABLET_EXECUTOR: Leader{1:2:3} Tx{2, NKikimr::NTabletFlatExecutor::NTestSuiteTFlatTableExecutor_LongTxAndBlobs::TTxWriteManyDeltas} hope 1 -> done Change{2, redo 54925b alter 0b annex 0, ~{ 101 } -{ }, 0 gb} 00000.008 DD| TABLET_EXECUTOR: Leader{1:2:3} Tx{2, NKikimr::NTabletFlatExecutor::NTestSuiteTFlatTableExecutor_LongTxAndBlobs::TTxWriteManyDeltas} release 4194304b of static, Memory{0 dyn 0} 00000.009 DD| TABLET_EXECUTOR: Leader{1:2:4} commited cookie 1 for step 3 ...compacting 00000.009 DD| TABLET_EXECUTOR: TCompactionLogic PrepareForceCompaction for 1 table 101, mode Full, forced state None, forced mode Full 00000.009 DD| TABLET_EXECUTOR: TGenCompactionStrategy PrepareCompaction for 1: task 1, edge 9223372036854775807/0, generation 0 00000.009 II| TABLET_EXECUTOR: Leader{1:2:4} starting compaction 00000.009 II| TABLET_EXECUTOR: Leader{1:2:5} starting Scan{1 on 101, Compact{1.2.4, eph 1}} 00000.009 II| TABLET_EXECUTOR: Leader{1:2:5} started compaction 1 00000.009 DD| TABLET_EXECUTOR: TGenCompactionStrategy PrepareCompaction for 1 started compaction 1 generation 0 00000.011 DD| OPS_COMPACT: Compact{1.2.4, eph 1} saving [1:2:4:1:69632:81040:0] left 81040b 00000.011 DD| OPS_COMPACT: Compact{1.2.4, eph 1} saving [1:2:4:1:12288:214:0] left 81254b 00000.011 DD| OPS_COMPACT: Compact{1.2.4, eph 1} put [1:2:4:1:69632:81040:0] result OK flags { Valid } left 214b 00000.011 DD| OPS_COMPACT: Compact{1.2.4, eph 1} put [1:2:4:1:12288:214:0] result OK flags { Valid } left 0b 00000.011 II| OPS_COMPACT: Compact{1.2.4, eph 1} end=Done, 2 blobs 1r (max 1), put Spent{time=0.000s,wait=0.000s,interrupts=1} Part{ 1 pk, lobs 0 +0, (81040 0 0)b }, ecr=1.000 00000.011 II| TABLET_EXECUTOR: Leader{1:2:5} Compact 1 on TGenCompactionParams{101: gen 0 epoch +inf, 0 parts} step 4, product {1 parts epoch 2} done 00000.012 DD| TABLET_EXECUTOR: TGenCompactionStrategy CompactionFinished for 1: compaction 1, generation 0 00000.012 DD| TABLET_EXECUTOR: Leader{1:2:6} commited cookie 8 for step 4 00000.012 DD| TABLET_EXECUTOR: Leader{1:2:6} commited cookie 3 for step 5 00000.012 DD| TABLET_EXECUTOR: Leader{1:2:6} switch applied on followers, step 5 ...waiting until compacted 00000.012 DD| TABLET_EXECUTOR: Leader{1:2:6} Tx{3, NKikimr::NTabletFlatExecutor::NTestSuiteTFlatTableExecutor_LongTxAndBlobs::TTxDisableBlobs} queued, type NKikimr::NTabletFlatExecutor::NTestSuiteTFlatTableExecutor_LongTxAndBlobs::TTxDisableBlobs 00000.012 DD| TABLET_EXECUTOR: Leader{1:2:6} Tx{3, NKikimr::NTabletFlatExecutor::NTestSuiteTFlatTableExecutor_LongTxAndBlobs::TTxDisableBlobs} took 4194304b of static mem, Memory{4194304 dyn 0} 00000.012 DD| TABLET_EXECUTOR: Leader{1:2:6} Tx{3, NKikimr::NTabletFlatExecutor::NTestSuiteTFlatTableExecutor_LongTxAndBlobs::TTxDisableBlobs} hope 1 -> done Change{4, redo 64b alter 21b annex 0, ~{ 101 } -{ }, 0 gb} 00000.012 DD| TABLET_EXECUTOR: Leader{1:2:6} Tx{3, NKikimr::NTabletFlatExecutor::NTestSuiteTFlatTableExecutor_LongTxAndBlobs::TTxDisableBlobs} release 4194304b of static, Memory{0 dyn 0} 00000.012 DD| TABLET_EXECUTOR: Leader{1:2:7} commited cookie 1 for step 6 00000.012 DD| TABLET_EXECUTOR: Leader{1:2:7} Tx{4, NKikimr::NTabletFlatExecutor::NTestSuiteTFlatTableExecutor_LongTxAndBlobs::TTxCommitManyDeltas} queued, type NKikimr::NTabletFlatExecutor::NTestSuiteTFlatTableExecutor_LongTxAndBlobs::TTxCommitManyDeltas 00000.012 DD| TABLET_EXECUTOR: Leader{1:2:7} Tx{4, NKikimr::NTabletFlatExecutor::NTestSuiteTFlatTableExecutor_LongTxAndBlobs::TTxCommitManyDeltas} took 4194304b of static mem, Memory{4194304 dyn 0} 00000.013 DD| TABLET_EXECUTOR: Leader{1:2:7} Tx{4, NKikimr::NTabletFlatExecutor::NTestSuiteTFlatTableExecutor_LongTxAndBlobs::TTxCommitManyDeltas} hope 1 -> done Change{5, redo 40032b alter 0b annex 0, ~{ 101 } -{ }, 0 gb} 00000.013 DD| TABLET_EXECUTOR: Leader{1:2:7} Tx{4, NKikimr::NTabletFlatExecutor::NTestSuiteTFlatTableExecutor_LongTxAndBlobs::TTxCommitManyDeltas} release 4194304b of static, Memory{0 dyn 0} 00000.013 DD| TABLET_EXECUTOR: Leader{1:2:8} commited cookie 1 for step 7 ...compacting 00000.013 DD| TABLET_EXECUTOR: TCompactionLogic PrepareForceCompaction for 1 table 101, mode Full, forced state None, forced mode Full 00000.013 DD| TABLET_EXECUTOR: TGenCompactionStrategy PrepareCompaction for 1: task 2, edge 9223372036854775807/0, generation 0 00000.013 II| TABLET_EXECUTOR: Leader{1:2:8} starting compaction 00000.013 II| TABLET_EXECUTOR: Leader{1:2:9} starting Scan{3 on 101, Compact{1.2.8, eph 2}} 00000.013 II| TABLET_EXECUTOR: Leader{1:2:9} started compaction 3 00000.013 DD| TABLET_EXECUTOR: TGenCompactionStrategy PrepareCompaction for 1 started compaction 3 generation 0 00000.016 DD| OPS_COMPACT: Compact{1.2.8, eph 2} saving [1:2:8:1:69632:81293:0] left 81293b 00000.016 DD| OPS_COMPACT: Compact{1.2.8, eph 2} saving [1:2:8:1:12288:2246:0] left 83539b 00000.016 DD| OPS_COMPACT: Compact{1.2.8, eph 2} saving [1:2:8:1:69634:24024:0] left 107563b 00000.016 DD| OPS_COMPACT: Compact{1.2.8, eph 2} put [1:2:8:1:69632:81293:0] result OK flags { Valid } left 26270b 00000.017 DD| TABLET_EXECUTOR: Leader{1:2:9} commited cookie 8 for step 8 00000.017 DD| OPS_COMPACT: Compact{1.2.8, eph 2} put [1:2:8:1:12288:2246:0] result OK flags { Valid } left 24024b 00000.017 DD| OPS_COMPACT: Compact{1.2.8, eph 2} put [1:2:8:1:69634:24024:0] result OK flags { Valid } left 0b 00000.017 II| OPS_COMPACT: Compact{1.2.8, eph 2} end=Done, 3 blobs 1r (max 1), put Spent{time=0.000s,wait=0.000s,interrupts=1} Part{ 1 pk, lobs 0 +0, (77762 0 0)b }, ecr=1.000 TxStatus{ [1:2:8:1:69634:24024:0] } 00000.017 II| TABLET_EXECUTOR: Leader{1:2:9} Compact 3 on TGenCompactionParams{101: gen 0 epoch +inf, 1 parts} step 8, product {tx status + 1 parts epoch 3} done 00000.017 DD| TABLET_EXECUTOR: TGenCompactionStrategy CompactionFinished for 1: compaction 3, generation 0 00000.018 DD| TABLET_EXECUTOR: Leader{1:2:10} commited cookie 3 for step 9 00000.018 DD| TABLET_EXECUTOR: Leader{1:2:10} switch applied on followers, step 9 ...waiting until compacted 00000.018 II| FAKE_ENV: Model starts hard shutdown on level 7 of 8, left 3 actors 00000.018 II| TABLET_EXECUTOR: Leader{1:2:10} suiciding, Waste{2:0, 107854b +(5, 106204b), 9 trc, -106204b acc} 00000.018 NN| TABLET_SAUSAGECACHE: Poison cache serviced 1 reqs hit {1 56917b} miss {0 0b} 00000.018 II| FAKE_ENV: Shut order, stopping 4 BS groups 00000.018 II| FAKE_ENV: DS.0 gone, left {887b, 9}, put {907b, 10} 00000.018 II| FAKE_ENV: DS.1 gone, left {214331b, 13}, put {214331b, 13} 00000.018 II| FAKE_ENV: DS.2 gone, left {0b, 0}, put {0b, 0} 00000.018 II| FAKE_ENV: DS.3 gone, left {0b, 0}, put {0b, 0} 00000.018 II| FAKE_ENV: All BS storage groups are stopped 00000.018 II| FAKE_ENV: Model stopped, hosted 3 actors, spent 0.000s 00000.018 II| FAKE_ENV: Logged {Emerg 0 Alert 0 Crit 0 Error 0 Left 73}, stopped 00000.000 II| FAKE_ENV: Born at 2025-09-25T16:22:20.989079Z 00000.003 NN| TABLET_SAUSAGECACHE: Bootstrap with config MemoryLimit: 8388608 ScanQueueInFlyLimit: 262144 AsyncQueueInFlyLimit: 262144 00000.003 II| FAKE_ENV: Starting storage for BS group 0 00000.003 II| FAKE_ENV: Starting storage for BS group 1 00000.003 II| FAKE_ENV: Starting storage for BS group 2 00000.003 II| FAKE_ENV: Starting storage for BS group 3 00000.004 II| FAKE_ENV: Model starts hard shutdown on level 7 of 8, left 3 actors 00000.004 NN| TABLET_SAUSAGECACHE: Poison cache serviced 0 reqs hit {0 0b} miss {0 0b} 00000.004 II| FAKE_ENV: Shut order, stopping 4 BS groups 00000.004 II| FAKE_ENV: DS.0 gone, left {42b, 1}, put {62b, 2} 00000.004 II| FAKE_ENV: DS.1 gone, left {35b, 1}, put {35b, 1} 00000.004 II| FAKE_ENV: DS.2 gone, left {0b, 0}, put {0b, 0} 00000.004 II| FAKE_ENV: DS.3 gone, left {0b, 0}, put {0b, 0} 00000.004 II| FAKE_ENV: All BS storage groups are stopped 00000.004 II| FAKE_ENV: Model stopped, hosted 3 actors, spent 0.000s 00000.004 II| FAKE_ENV: Logged {Emerg 0 Alert 0 Crit 0 Error 0 Left 15}, stopped 00000.000 II| FAKE_ENV: Born at 2025-09-25T16:22:21.002906Z 00000.002 NN| TABLET_SAUSAGECACHE: Bootstrap with config MemoryLimit: 8388608 ScanQueueInFlyLimit: 262144 AsyncQueueInFlyLimit: 262144 00000.002 II| FAKE_ENV: Starting storage for BS group 0 00000.002 II| FAKE_ENV: Starting storage for BS group 1 00000.002 II| FAKE_ENV: Starting storage for BS group 2 00000.002 II| FAKE_ENV: Starting storage for BS group 3 00000.002 II| FAKE_ENV: Model starts hard shutdown on level 7 of 8, left 3 actors 00000.003 NN| TABLET_SAUSAGECACHE: Poison cache serviced 0 reqs hit {0 0b} miss {0 0b} 00000.003 II| FAKE_ENV: Shut order, stopping 4 BS groups 00000.003 II| FAKE_ENV: DS.0 gone, left {42b, 1}, put {62b, 2} 00000.003 II| FAKE_ENV: DS.1 gone, left {35b, 1}, put {35b, 1} 00000.003 II| FAKE_ENV: DS.2 gone, left {0b, 0}, put {0b, 0} 00000.003 II| FAKE_ENV: DS.3 gone, left {0b, 0}, put {0b, 0} 00000.003 II| FAKE_ENV: All BS storage groups are stopped 00000.003 II| FAKE_ENV: Model stopped, hosted 3 actors, spent 0.000s 00000.003 II| FAKE_ENV: Logged {Emerg 0 Alert 0 Crit 0 Error 0 Left 15}, stopped 00000.000 II| FAKE_ENV: Born at 2025-09-25T16:22:21.012985Z 00000.001 NN| TABLET_SAUSAGECACHE: Bootstrap with config MemoryLimit: 8388608 ScanQueueInFlyLimit: 262144 AsyncQueueInFlyLimit: 262144 00000.002 II| FAKE_ENV: Starting storage for BS group 0 00000.002 II| FAKE_ENV: Starting storage for BS group 1 00000.002 II| FAKE_ENV: Starting storage for BS group 2 00000.002 II| FAKE_ENV: Starting storage for BS group 3 00000.002 II| FAKE_ENV: Model starts hard shutdown on level 7 of 8, left 3 actors 00000.002 NN| TABLET_SAUSAGECACHE: Poison cache serviced 0 reqs hit {0 0b} miss {0 0b} 00000.002 II| FAKE_ENV: Shut order, stopping 4 BS groups 00000.002 II| FAKE_ENV: DS.0 gone, left {42b, 1}, put {62b, 2} 00000.002 II| FAKE_ENV: DS.1 gone, left {35b, 1}, put {35b, 1} 00000.002 II| FAKE_ENV: DS.2 gone, left {0b, 0}, put {0b, 0} 00000.002 II| FAKE_ENV: DS.3 gone, left {0b, 0}, put {0b, 0} 00000.002 II| FAKE_ENV: All BS storage groups are stopped 00000.0 ... :96:0], [1:2:33:1:24576:101:0], [1:2:34:1:24576:97:0], [1:2:35:1:24576:97:0], [1:2:36:1:24576:97:0], [1:2:37:1:24576:99:0], [1:2:38:1:24576:97:0], [1:2:39:1:24576:97:0], [1:2:40:1:24576:97:0], [1:2:41:1:24576:97:0], [1:2:42:1:24576:97:0], [1:2:43:1:24576:97:0], [1:2:44:1:24576:97:0], [1:2:45:1:24576:97:0], [1:2:46:1:24576:97:0], [1:2:47:1:24576:97:0], [1:2:48:1:24576:97:0], [1:2:49:1:24576:97:0], [1:2:50:1:24576:97:0], [1:2:51:1:24576:97:0], [1:2:52:1:24576:97:0], [1:2:53:1:24576:97:0], [1:2:54:1:24576:97:0], [1:2:55:1:24576:97:0], [1:2:56:1:24576:97:0], [1:2:57:1:24576:97:0], [1:2:58:1:24576:97:0], [1:2:59:1:24576:97:0], [1:2:60:1:24576:97:0], [1:2:61:1:24576:97:0], [1:2:62:1:24576:97:0], [1:2:63:1:24576:97:0], [1:2:64:1:24576:97:0], [1:2:65:1:24576:97:0], [1:2:66:1:24576:97:0], [1:2:67:1:24576:97:0], [1:2:68:1:24576:97:0], [1:2:69:1:24576:97:0], [1:2:70:1:24576:97:0], [1:2:71:1:24576:97:0], [1:2:72:1:24576:97:0], [1:2:73:1:24576:97:0], [1:2:74:1:24576:97:0], [1:2:75:1:24576:101:0], [1:2:76:1:24576:102:0], [1:2:77:1:24576:101:0], [1:2:78:1:24576:102:0], [1:2:79:1:24576:104:0], [1:2:80:1:24576:104:0], [1:2:81:1:24576:104:0], [1:2:82:1:24576:104:0], [1:2:83:1:24576:103:0], [1:2:84:1:24576:101:0], [1:2:85:1:24576:104:0], [1:2:86:1:24576:104:0], [1:2:87:1:24576:104:0], [1:2:88:1:24576:104:0], [1:2:89:1:24576:104:0], [1:2:90:1:24576:104:0], [1:2:91:1:24576:104:0], [1:2:92:1:24576:101:0], [1:2:93:1:24576:104:0], [1:2:94:1:24576:104:0], [1:2:95:1:24576:98:0], [1:2:96:1:24576:104:0], [1:2:97:1:24576:104:0], [1:2:98:1:24576:104:0], [1:2:99:1:24576:104:0], [1:2:100:1:24576:104:0], [1:2:101:1:24576:100:0], [1:2:102:1:24576:100:0], [1:2:103:1:24576:101:0], [1:2:104:1:24576:104:0], [1:2:105:1:24576:104:0], [1:2:106:1:24576:104:0], [1:2:107:1:24576:104:0], [1:2:108:1:24576:104:0], [1:2:109:1:24576:104:0], [1:2:110:1:24576:104:0], [1:2:111:1:24576:104:0], [1:2:112:1:24576:104:0], [1:2:113:1:24576:104:0], [1:2:114:1:24576:104:0], [1:2:115:1:24576:104:0], [1:2:116:1:24576:104:0], [1:2:117:1:24576:104:0], [1:2:118:1:24576:104:0], [1:2:119:1:24576:104:0], [1:2:120:1:24576:104:0], [1:2:121:1:24576:104:0], [1:2:122:1:24576:104:0], [1:2:123:1:24576:104:0], [1:2:124:1:24576:104:0], [1:2:125:1:24576:104:0], [1:2:126:1:24576:104:0], [1:2:127:1:24576:104:0], [1:2:128:1:24576:104:0], [1:2:129:1:24576:104:0], [1:2:130:1:24576:104:0], [1:2:131:1:24576:104:0], [1:2:132:1:24576:104:0], [1:2:133:1:24576:104:0], [1:2:134:1:24576:104:0], [1:2:135:1:24576:104:0], [1:2:136:1:24576:104:0], [1:2:137:1:24576:104:0], [1:2:138:1:24576:104:0], [1:2:139:1:24576:104:0], [1:2:140:1:24576:104:0], [1:2:141:1:24576:104:0], [1:2:142:1:24576:104:0], [1:2:143:1:24576:104:0], [1:2:144:1:24576:104:0], [1:2:147:1:24576:60:0] } 00000.013 DD| TABLET_SAUSAGECACHE: Add page collection [1:2:145:1:12288:412:0] 00000.013 DD| TABLET_SAUSAGECACHE: Add page collection [1:2:145:1:12288:412:0] owner [60:214:2239] 00000.013 TT| TABLET_SAUSAGECACHE: Request page collection [1:2:145:1:12288:412:0] owner [60:214:2239] cookie 4 class Online from cache [ ] already requested [ ] to request [ 2 3 8 9 ] 00000.013 TT| TABLET_SAUSAGECACHE: GC has finished with Limit: 8MiB Active: 0B Passive: 0B LoadInFly: 509B EvictedInMemoryBytes: 0B 00000.013 TT| TABLET_SAUSAGECACHE: Receive page collection [1:2:145:1:12288:412:0] status OK pages [ 2 3 8 9 ] 00000.013 TT| TABLET_SAUSAGECACHE: Send page collection result [1:2:145:1:12288:412:0] owner [60:214:2239] class Online pages [ 2 3 8 9 ] cookie 4 00000.013 TT| TABLET_SAUSAGECACHE: GC has finished with Limit: 8MiB Active: 957B Passive: 0B LoadInFly: 0B EvictedInMemoryBytes: 0B 00000.013 II| TABLET_EXECUTOR: Leader{1:3:0} activating executor 00000.013 II| TABLET_EXECUTOR: LSnap{1:3, on 3:1, 1950b, wait} done, Waste{2:0, 141968b +(142, 14142b), 147 trc} 00000.013 DD| TABLET_SAUSAGECACHE: Attach page collection [1:2:145:1:12288:412:0] owner [60:214:2239] cache mode Regular 00000.013 TT| TABLET_SAUSAGECACHE: GC has finished with Limit: 8MiB Active: 957B Passive: 0B LoadInFly: 0B EvictedInMemoryBytes: 0B 00000.013 TT| TABLET_SAUSAGECACHE: Sync page collection [1:2:145:1:12288:412:0] owner [60:214:2239] pages [ 8 9 2 3 ] 00000.013 TT| TABLET_SAUSAGECACHE: GC has finished with Limit: 8MiB Active: 957B Passive: 0B LoadInFly: 0B EvictedInMemoryBytes: 0B 00000.014 DD| TABLET_SAUSAGECACHE: Attach page collection [1:2:145:1:12290:524:0] owner [60:214:2239] cache mode TryKeepInMemory 00000.014 DD| TABLET_SAUSAGECACHE: Add page collection [1:2:145:1:12290:524:0] 00000.014 DD| TABLET_SAUSAGECACHE: Add page collection [1:2:145:1:12290:524:0] owner [60:214:2239] 00000.014 DD| TABLET_SAUSAGECACHE: Change mode of page collection [1:2:145:1:12290:524:0] to TryKeepInMemory 00000.014 TT| TABLET_SAUSAGECACHE: Try move collection [1:2:145:1:12290:524:0] in memory, total pages: 20 (134KiB), pages already loaded: 0 , pages to request: 20 (132KiB), pages out of memory limit: 0 00000.014 TT| TABLET_SAUSAGECACHE: Request page collection [1:2:145:1:12290:524:0] owner [60:214:2239] class Scan pages [ 0 1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 ] 00000.014 TT| TABLET_SAUSAGECACHE: GC has finished with Limit: 8MiB Active: 957B Passive: 0B LoadInFly: 132KiB EvictedInMemoryBytes: 0B 00000.014 TT| TABLET_SAUSAGECACHE: Receive page collection [1:2:145:1:12290:524:0] status OK pages [ 0 1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 ] 00000.014 TT| TABLET_SAUSAGECACHE: GC has finished with Limit: 8MiB Active: 135KiB Passive: 0B LoadInFly: 0B EvictedInMemoryBytes: 0B 00000.014 DD| TABLET_EXECUTOR: Leader{1:3:2} got result TEvResult{20 pages [1:2:145:1:12290:524:0] ok OK}, type 5 00000.014 DD| TABLET_EXECUTOR: Leader{1:3:2} commited cookie 2 for step 1 00000.014 DD| TABLET_EXECUTOR: Leader{1:3:2} Tx{1, NKikimr::NTabletFlatExecutor::NTestSuiteTFlatTableExecutor_TryKeepInMemory::TTxFullScan} queued, type NKikimr::NTabletFlatExecutor::NTestSuiteTFlatTableExecutor_TryKeepInMemory::TTxFullScan 00000.014 DD| TABLET_EXECUTOR: Leader{1:3:2} Tx{1, NKikimr::NTabletFlatExecutor::NTestSuiteTFlatTableExecutor_TryKeepInMemory::TTxFullScan} took 4194304b of static mem, Memory{4194304 dyn 0} 00000.014 DD| TABLET_EXECUTOR: Leader{1:3:2} Tx{1, NKikimr::NTabletFlatExecutor::NTestSuiteTFlatTableExecutor_TryKeepInMemory::TTxFullScan} hope 1 -> retry Change{146, redo 0b alter 0b annex 0, ~{ } -{ }, 0 gb} 00000.014 DD| TABLET_EXECUTOR: Leader{1:3:2} Tx{1, NKikimr::NTabletFlatExecutor::NTestSuiteTFlatTableExecutor_TryKeepInMemory::TTxFullScan} pin 0 (0 b) load 1 (2120 b) 00000.014 DD| TABLET_EXECUTOR: Leader{1:3:2} Tx{1, NKikimr::NTabletFlatExecutor::NTestSuiteTFlatTableExecutor_TryKeepInMemory::TTxFullScan} took 8388608b of static mem, Memory{8388608 dyn 0} 00000.014 D3| TABLET_EXECUTOR: Leader{1:3:2} Tx{1, NKikimr::NTabletFlatExecutor::NTestSuiteTFlatTableExecutor_TryKeepInMemory::TTxFullScan} request page collection [1:2:145:1:12288:412:0] pages [ 1 ] 00000.014 DD| TABLET_EXECUTOR: Leader{1:3:2} Tx{1, NKikimr::NTabletFlatExecutor::NTestSuiteTFlatTableExecutor_TryKeepInMemory::TTxFullScan} postponed, loading 1 pages, 2120 bytes, newly pinned 0 pages, 0 bytes 00000.014 TT| TABLET_SAUSAGECACHE: Request page collection [1:2:145:1:12288:412:0] owner [60:214:2239] cookie 1 class Online from cache [ ] already requested [ ] to request [ 1 ] 00000.014 TT| TABLET_SAUSAGECACHE: GC has finished with Limit: 8MiB Active: 135KiB Passive: 0B LoadInFly: 2.07KiB EvictedInMemoryBytes: 0B 00000.014 TT| TABLET_SAUSAGECACHE: Receive page collection [1:2:145:1:12288:412:0] status OK pages [ 1 ] 00000.014 TT| TABLET_SAUSAGECACHE: Send page collection result [1:2:145:1:12288:412:0] owner [60:214:2239] class Online pages [ 1 ] cookie 1 00000.014 TT| TABLET_SAUSAGECACHE: GC has finished with Limit: 8MiB Active: 137KiB Passive: 0B LoadInFly: 0B EvictedInMemoryBytes: 0B 00000.014 DD| TABLET_EXECUTOR: Leader{1:3:2} got result TEvResult{1 pages [1:2:145:1:12288:412:0] ok OK}, type 1 00000.014 DD| TABLET_EXECUTOR: Leader{1:3:2} Tx{1, NKikimr::NTabletFlatExecutor::NTestSuiteTFlatTableExecutor_TryKeepInMemory::TTxFullScan} activated 00000.014 DD| TABLET_EXECUTOR: Leader{1:3:2} Tx{1, NKikimr::NTabletFlatExecutor::NTestSuiteTFlatTableExecutor_TryKeepInMemory::TTxFullScan} hope 2 -> retry Change{146, redo 0b alter 0b annex 0, ~{ } -{ }, 0 gb} 00000.014 DD| TABLET_EXECUTOR: Leader{1:3:2} Tx{1, NKikimr::NTabletFlatExecutor::NTestSuiteTFlatTableExecutor_TryKeepInMemory::TTxFullScan} pin 0 (0 b) load 1 (2260 b) 00000.014 D3| TABLET_EXECUTOR: Leader{1:3:2} Tx{1, NKikimr::NTabletFlatExecutor::NTestSuiteTFlatTableExecutor_TryKeepInMemory::TTxFullScan} request page collection [1:2:145:1:12288:412:0] pages [ 0 ] 00000.014 DD| TABLET_EXECUTOR: Leader{1:3:2} Tx{1, NKikimr::NTabletFlatExecutor::NTestSuiteTFlatTableExecutor_TryKeepInMemory::TTxFullScan} postponed, loading 1 pages, 2260 bytes, newly pinned 0 pages, 0 bytes 00000.014 TT| TABLET_SAUSAGECACHE: Request page collection [1:2:145:1:12288:412:0] owner [60:214:2239] cookie 1 class Online from cache [ ] already requested [ ] to request [ 0 ] 00000.014 TT| TABLET_SAUSAGECACHE: GC has finished with Limit: 8MiB Active: 137KiB Passive: 0B LoadInFly: 2.21KiB EvictedInMemoryBytes: 0B 00000.014 TT| TABLET_SAUSAGECACHE: Receive page collection [1:2:145:1:12288:412:0] status OK pages [ 0 ] 00000.014 TT| TABLET_SAUSAGECACHE: Send page collection result [1:2:145:1:12288:412:0] owner [60:214:2239] class Online pages [ 0 ] cookie 1 00000.014 TT| TABLET_SAUSAGECACHE: GC has finished with Limit: 8MiB Active: 140KiB Passive: 0B LoadInFly: 0B EvictedInMemoryBytes: 0B 00000.014 DD| TABLET_EXECUTOR: Leader{1:3:2} got result TEvResult{1 pages [1:2:145:1:12288:412:0] ok OK}, type 1 00000.014 DD| TABLET_EXECUTOR: Leader{1:3:2} Tx{1, NKikimr::NTabletFlatExecutor::NTestSuiteTFlatTableExecutor_TryKeepInMemory::TTxFullScan} activated 00000.014 DD| TABLET_EXECUTOR: Leader{1:3:2} Tx{1, NKikimr::NTabletFlatExecutor::NTestSuiteTFlatTableExecutor_TryKeepInMemory::TTxFullScan} hope 3 -> done Change{146, redo 0b alter 0b annex 0, ~{ } -{ }, 0 gb} 00000.014 DD| TABLET_EXECUTOR: Leader{1:3:2} Tx{1, NKikimr::NTabletFlatExecutor::NTestSuiteTFlatTableExecutor_TryKeepInMemory::TTxFullScan} release 8388608b of static, Memory{0 dyn 0} ... waiting for NActors::TEvents::TEvWakeup 00000.014 II| TABLET_SAUSAGECACHE: Wakeup DoGCManual 00000.014 TT| TABLET_SAUSAGECACHE: GC has finished with Limit: 8MiB Active: 140KiB Passive: 0B LoadInFly: 0B EvictedInMemoryBytes: 0B ... waiting for NActors::TEvents::TEvWakeup (done) 00000.014 II| FAKE_ENV: Model starts hard shutdown on level 7 of 8, left 3 actors 00000.014 II| TABLET_EXECUTOR: Leader{1:3:2} suiciding, Waste{2:0, 141968b +(0, 0b), 1 trc, -14142b acc} 00000.014 DD| TABLET_SAUSAGECACHE: Unregister owner [60:214:2239] 00000.014 DD| TABLET_SAUSAGECACHE: Remove page collection [1:2:145:1:12288:412:0] owner [60:214:2239] 00000.014 DD| TABLET_SAUSAGECACHE: Remove page collection [1:2:145:1:12290:524:0] owner [60:214:2239] 00000.014 DD| TABLET_SAUSAGECACHE: Change mode of page collection [1:2:145:1:12290:524:0] to Regular 00000.014 DD| TABLET_SAUSAGECACHE: Remove owner [60:214:2239] 00000.014 TT| TABLET_SAUSAGECACHE: GC has finished with Limit: 8MiB Active: 140KiB Passive: 0B LoadInFly: 0B EvictedInMemoryBytes: 0B 00000.014 NN| TABLET_SAUSAGECACHE: Poison cache serviced 3 reqs hit {0 0b} miss {6 4889b} 00000.014 TT| TABLET_SAUSAGECACHE: GC has finished with Limit: 8MiB Active: 140KiB Passive: 0B LoadInFly: 0B EvictedInMemoryBytes: 0B 00000.014 II| FAKE_ENV: Shut order, stopping 4 BS groups 00000.014 II| FAKE_ENV: DS.0 gone, left {42b, 1}, put {14598b, 150} 00000.014 II| FAKE_ENV: DS.1 gone, left {143918b, 10}, put {158217b, 154} 00000.014 II| FAKE_ENV: DS.2 gone, left {0b, 0}, put {0b, 0} 00000.014 II| FAKE_ENV: DS.3 gone, left {0b, 0}, put {0b, 0} 00000.014 II| FAKE_ENV: All BS storage groups are stopped 00000.015 II| FAKE_ENV: Model stopped, hosted 4 actors, spent 0.000s 00000.015 II| FAKE_ENV: Logged {Emerg 0 Alert 0 Crit 0 Error 0 Left 861}, stopped >> TSharedPageCache::Limits_Config [GOOD] >> TSharedPageCache::S3FIFO >> TSharedPageCache_Transactions::One_Transaction_Two_Keys_Many_Parts [GOOD] >> TSharedPageCache_Transactions::Two_Transactions_One_Key [GOOD] >> TSharedPageCache_Transactions::Two_Transactions_Two_Keys >> TSharedPageCache_Transactions::Two_Transactions_Two_Keys [GOOD] >> TSharedPageCache_Transactions::Compaction >> TYdbControlPlaneStorageDeleteConnectionPermissions::ShouldApplyPermissionViewPrivatePublic [GOOD] >> TYdbControlPlaneStorageDeleteQuery::ShouldCheckPermission >> TSharedPageCache_Transactions::Compaction [GOOD] >> Vacuum::StartVacuumNoTables [GOOD] >> Vacuum::StartVacuumNoTablesWithRestart [GOOD] >> Vacuum::StartVacuumLog >> Vacuum::StartVacuumLog [GOOD] >> Vacuum::StartVacuum [GOOD] >> Vacuum::StartVacuumMultipleFamilies [GOOD] >> Vacuum::StartVacuumMultipleTables [GOOD] >> Vacuum::StartVacuumWithFollowers [GOOD] >> Vacuum::StartVacuumMultipleTimes [GOOD] >> Vacuum::StartVacuumEmptyTable >> Balancing::Balancing_ManyTopics_TopicApi [GOOD] >> Balancing::Balancing_ManyTopics_PQv1 >> Vacuum::StartVacuumEmptyTable [GOOD] >> Vacuum::StartVacuumWithRestarts [GOOD] >> Vacuum::StartVacuumRetryWithNotGreaterGenerations [GOOD] >> Vacuum::StartVacuumWithTabletGCErrors [GOOD] >> Vacuum::StartVacuumWithSysTabletGCErrors >> TYdbControlPlaneStorageListQueries::ShouldFilterType [GOOD] >> TYdbControlPlaneStorageListQueries::ShouldFilterMode >> TopicAutoscaling::WithDir_PartitionSplit_AutosplitByLoad [GOOD] >> TopicDescribe::BasicStartOffset ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/fq/control_plane_storage/unittest >> TYdbControlPlaneStorageWriteResultData::ShouldSuccess [GOOD] Test command err: Netstat: sh: 1: netstat: not found Process stat: USER PID %CPU %MEM VSZ RSS TTY STAT START TIME COMMAND root 1 0.0 0.0 167280 12396 ? Ss 15:23 0:02 /sbin/init root 2 0.0 0.0 0 0 ? S 15:23 0:00 [kthreadd] root 3 0.0 0.0 0 0 ? I< 15:23 0:00 [rcu_gp] root 4 0.0 0.0 0 0 ? I< 15:23 0:00 [rcu_par_gp] root 5 0.0 0.0 0 0 ? I< 15:23 0:00 [slub_flushwq] root 6 0.0 0.0 0 0 ? I< 15:23 0:00 [netns] root 8 0.0 0.0 0 0 ? I< 15:23 0:00 [kworker/0:0H-events_highpri] root 9 0.7 0.0 0 0 ? R 15:23 0:25 [kworker/u128:0+ext4-rsv-conversion] root 11 0.0 0.0 0 0 ? I< 15:23 0:00 [mm_percpu_wq] root 12 0.0 0.0 0 0 ? S 15:23 0:00 [rcu_tasks_rude_] root 13 0.0 0.0 0 0 ? S 15:23 0:00 [rcu_tasks_trace] root 14 0.0 0.0 0 0 ? S 15:23 0:00 [ksoftirqd/0] root 15 0.3 0.0 0 0 ? I 15:23 0:12 [rcu_sched] root 16 0.0 0.0 0 0 ? S 15:23 0:00 [migration/0] root 17 0.0 0.0 0 0 ? S 15:23 0:00 [idle_inject/0] root 18 0.0 0.0 0 0 ? I 15:23 0:00 [kworker/0:1-rcu_par_gp] root 19 0.0 0.0 0 0 ? S 15:23 0:00 [cpuhp/0] root 20 0.0 0.0 0 0 ? S 15:23 0:00 [cpuhp/1] root 21 0.0 0.0 0 0 ? S 15:23 0:00 [idle_inject/1] root 22 0.0 0.0 0 0 ? S 15:23 0:02 [migration/1] root 23 0.0 0.0 0 0 ? S 15:23 0:00 [ksoftirqd/1] root 25 0.0 0.0 0 0 ? I< 15:23 0:00 [kworker/1:0H-events_highpri] root 26 0.0 0.0 0 0 ? S 15:23 0:00 [cpuhp/2] root 27 0.0 0.0 0 0 ? S 15:23 0:00 [idle_inject/2] root 28 0.0 0.0 0 0 ? S 15:23 0:02 [migration/2] root 29 0.0 0.0 0 0 ? S 15:23 0:00 [ksoftirqd/2] root 31 0.0 0.0 0 0 ? I< 15:23 0:00 [kworker/2:0H-events_highpri] root 32 0.0 0.0 0 0 ? S 15:23 0:00 [cpuhp/3] root 33 0.0 0.0 0 0 ? S 15:23 0:00 [idle_inject/3] root 34 0.0 0.0 0 0 ? S 15:23 0:02 [migration/3] root 35 0.0 0.0 0 0 ? S 15:23 0:00 [ksoftirqd/3] root 37 0.0 0.0 0 0 ? I< 15:23 0:00 [kworker/3:0H-events_highpri] root 38 0.0 0.0 0 0 ? S 15:23 0:00 [cpuhp/4] root 39 0.0 0.0 0 0 ? S 15:23 0:00 [idle_inject/4] root 40 0.0 0.0 0 0 ? S 15:23 0:02 [migration/4] root 41 0.0 0.0 0 0 ? S 15:23 0:00 [ksoftirqd/4] root 43 0.0 0.0 0 0 ? I< 15:23 0:00 [kworker/4:0H-events_highpri] root 44 0.0 0.0 0 0 ? S 15:23 0:00 [cpuhp/5] root 45 0.0 0.0 0 0 ? S 15:23 0:00 [idle_inject/5] root 46 0.0 0.0 0 0 ? S 15:23 0:02 [migration/5] root 47 0.0 0.0 0 0 ? S 15:23 0:00 [ksoftirqd/5] root 49 0.0 0.0 0 0 ? I< 15:23 0:00 [kworker/5:0H-events_highpri] root 50 0.0 0.0 0 0 ? S 15:23 0:00 [cpuhp/6] root 51 0.0 0.0 0 0 ? S 15:23 0:00 [idle_inject/6] root 52 0.0 0.0 0 0 ? S 15:23 0:02 [migration/6] root 53 0.0 0.0 0 0 ? S 15:23 0:00 [ksoftirqd/6] root 54 0.0 0.0 0 0 ? I 15:23 0:00 [kworker/6:0-rcu_par_gp] root 55 0.0 0.0 0 0 ? I< 15:23 0:00 [kworker/6:0H-events_highpri] root 56 0.0 0.0 0 0 ? S 15:23 0:00 [cpuhp/7] root 57 0.0 0.0 0 0 ? S 15:23 0:00 [idle_inject/7] root 58 0.0 0.0 0 0 ? S 15:23 0:02 [migration/7] root 59 0.0 0.0 0 0 ? S 15:23 0:00 [ksoftirqd/7] root 61 0.0 0.0 0 0 ? I< 15:23 0:00 [kworker/7:0H-events_highpri] root 62 0.0 0.0 0 0 ? S 15:23 0:00 [cpuhp/8] root 63 0.0 0.0 0 0 ? S 15:23 0:00 [idle_inject/8] root 64 0.0 0.0 0 0 ? S 15:23 0:02 [migration/8] root 65 0.0 0.0 0 0 ? S 15:23 0:00 [ksoftirqd/8] root 67 0.0 0.0 0 0 ? I< 15:23 0:00 [kworker/8:0H-kblockd] root 68 0.0 0.0 0 0 ? S 15:23 0:00 [cpuhp/9] root 69 0.0 0.0 0 0 ? S 15:23 0:00 [idle_inject/9] root 70 0.0 0.0 0 0 ? S 15:23 0:02 [migration/9] root 71 0.0 0.0 0 0 ? S 15:23 0:00 [ksoftirqd/9] root 72 0.0 0.0 0 0 ? I 15:23 0:00 [kworker/9:0-rcu_gp] root 73 0.0 0.0 0 0 ? I< 15:23 0:00 [kworker/9:0H-events_highpri] root 74 0.0 0.0 0 0 ? S 15:23 0:00 [cpuhp/10] root 75 0.0 0.0 0 0 ? S 15:23 0:00 [idle_inject/10] root 76 0.0 0.0 0 0 ? S 15:23 0:02 [migration/10] root 77 0.0 0.0 0 0 ? S 15:23 0:00 [ksoftirqd/10] root 79 0.0 0.0 0 0 ? I< 15:23 0:00 [kworker/10:0H-events_highpri] root 80 0.0 0.0 0 0 ? S 15:23 0:00 [cpuhp/11] root 81 0.0 0.0 0 0 ? S 15:23 0:00 [idle_inject/11] root 82 0.0 0.0 0 0 ? S 15:23 0:02 [migration/11] root 83 0.0 0.0 0 0 ? S 15:23 0:00 [ksoftirqd/11] root 84 0.0 0.0 0 0 ? I 15:23 0:00 [kworker/11:0-rcu_par_gp] root 85 0.0 0.0 0 0 ? I< 15:23 0:00 [kworker/11:0H-events_highpri] root 86 0.0 0.0 0 0 ? S 15:23 0:00 [cpuhp/12] root 87 0.0 0.0 0 0 ? S 15:23 0:00 [idle_inject/12] root 88 0.0 0.0 0 0 ? S 15:23 0:02 [migration/12] root 89 0.0 0.0 0 0 ? S 15:23 0:00 [ksoftirqd/12] root 90 0.0 0.0 0 0 ? I 15:23 0:00 [kworker/12:0-cgroup_destroy] root 91 0.0 0.0 0 0 ? I< 15:23 0:00 [kworker/12:0H-events_highpri] root 92 0.0 0.0 0 0 ? S 15:23 0:00 [cpuhp/13] root 93 0.0 0.0 0 0 ? S 15:23 0:00 [idle_inject/13] root 94 0.0 0.0 0 0 ? S 15:23 0:02 [migration/13] root 95 0.0 0.0 0 0 ? S 15:23 0:00 [ksoftirqd/13] root 96 0.0 0.0 0 0 ? I 15:23 0:00 [kworker/13:0-rcu_gp] root 97 0.0 0.0 0 0 ? I< 15:23 0:00 [kworker/13:0H-events_highpri] root 98 0.0 0.0 0 0 ? S 15:23 0:00 [cpuhp/14] root 99 0.0 0.0 0 0 ? S 15:23 0:00 [idle_inject/14] root 100 0.0 0.0 0 0 ? S 15:23 0:02 [migration/14] root 101 0.0 0.0 0 0 ? S 15:23 0:00 [ksoftirqd/14] root 103 0.0 0.0 0 0 ? I< 15:23 0:00 [kworker/14:0H-events_highpri] root 104 0.0 0.0 0 0 ? S 15:23 0:00 [cpuhp/15] root 105 0.0 0.0 0 0 ? S 15:23 0:00 [idle_inject/15] root 106 0.0 0.0 0 0 ? S 15:23 0:02 [migration/15] root 107 0.0 0.0 0 0 ? S 15:23 0:00 [ksoftirqd/15] root 109 0.0 0.0 0 0 ? I< 15:23 0:00 [kworker/15:0H-events_highpri] root 110 0.0 0.0 0 0 ? S 15:23 0:00 [cpuhp/16] root 111 0.0 0.0 0 0 ? S 15:23 0:00 [idle_inject/16] root 112 0.0 0.0 0 0 ? S 15:23 0:02 [migration/16] root 113 0.0 0.0 0 0 ? S 15:23 0:00 [ksoftirqd/16] root 114 0.0 0.0 0 0 ? I 15:23 0:00 [kworker/16:0-rcu_gp] root 115 0.0 0.0 0 0 ? I< 15:23 0:00 [kworker/16:0H-events_highpri] root 116 0.0 0.0 0 0 ? S 15:23 0:00 [cpuhp/17] root 117 0.0 0.0 0 0 ? S 15:23 0:00 [idle_inject/17] root 118 0.0 0.0 0 0 ? S 15:23 0:02 [migration/17] root 119 0.0 0.0 0 0 ? S 15:23 0:00 [ksoftirqd/17] root 121 0.0 0.0 0 0 ? I< 15:23 0:00 [kworker/17:0H-events_highpri] root 122 0.0 0.0 0 0 ? S 15:23 0:00 [cpuhp/18] root 123 0.0 0.0 0 0 ? S 15:23 0:00 [idle_inject/18] root 124 0.0 0.0 0 0 ? S 15:23 0:02 [migration/18] root 125 0.0 0.0 0 0 ? S 15:23 0:00 [ksoftirqd/18] root 127 0.0 0.0 0 0 ? I< 15:23 0:00 [kworker/18:0H-events_highpri] root 128 0.0 0.0 0 0 ? S 15:23 0:00 [cpuhp/19] root 129 0.0 0.0 0 0 ? S 15:23 0:00 [idle_inject/19] root 130 0.0 0.0 0 0 ? S 15:23 0:02 [migration/19] root 131 0.0 0.0 0 0 ? S 15:23 0:00 [ksoftirqd/19] root 133 0.0 0.0 0 0 ? I< 15:23 0:00 [kworker/19:0H-events_highpri] root 134 0.0 0.0 0 0 ? S 15:23 0:00 [cpuhp/20] root 135 0.0 0.0 0 0 ? S 15:23 0:00 [idle_inject/20] root 136 0.0 0.0 0 0 ? S 15:23 0:02 [migration/20] root 137 0.0 0.0 0 0 ? S 15:23 0:00 [ksoftirqd/20] root 138 0.0 0.0 0 0 ? I 15:23 0:00 [kworker/20:0-rcu_gp] root 139 0.0 0.0 0 0 ? I< 15:23 0:00 [kworker/20:0H-events_highpri] root 140 0.0 0.0 0 0 ? S 15:23 0:00 [cpuhp/21] root 141 0.0 0.0 0 0 ? S 15:23 0:00 [idle_inject/21] root 142 0.0 0.0 0 0 ? S 15:23 0:02 [migration/21] root 143 0.0 0.0 0 0 ? S 15:23 0:00 [ksoftirqd/21] root 145 0.0 0.0 0 0 ? I< 15:23 0:00 [kworker/21:0H-events_highpri] root 146 0.0 0.0 0 0 ? S 15:23 0:00 [cpuhp/22] root 147 0.0 0.0 0 0 ? S 15:23 0:00 [idle_inject/22] root 148 0.0 0.0 0 0 ? S 15:23 0:02 [migration/22] root 149 0.0 0.0 0 0 ? S 15:23 0:00 [ksoftirqd/22] root 151 0.0 0.0 0 0 ? I< 15:23 0:00 [kworker/22:0H-events_highpri] root 152 0.0 0.0 0 0 ? S 15:23 0:00 [cpuhp/23] root 153 0.0 0.0 0 0 ? S 15:23 0:00 [idle_inject/23] root 154 0.0 0.0 0 0 ? S 15:23 0:02 [migration/23] root 155 0.0 0.0 0 0 ? S 15:23 0:00 [ksoftirqd/23] root 157 0.0 0.0 0 0 ? I< 15:23 0:00 [kworker/23:0H-events_highpri] root 158 0.0 0.0 0 0 ? S 15:23 0:00 [cpuhp/24] root 159 0.0 0.0 0 0 ? ... TTestCaseShouldSuccess::Execute_(NUnitTest::TTestContext&)/connections" 2025-09-25T16:22:21.117134Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: schema.cpp:354: Call create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageWriteResultData::TTestCaseShouldSuccess::Execute_(NUnitTest::TTestContext&)/connections" 2025-09-25T16:22:21.117333Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: schema.cpp:293: Create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageWriteResultData::TTestCaseShouldSuccess::Execute_(NUnitTest::TTestContext&)/mappings". Create session OK 2025-09-25T16:22:21.117340Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: schema.cpp:113: Call create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageWriteResultData::TTestCaseShouldSuccess::Execute_(NUnitTest::TTestContext&)/mappings" 2025-09-25T16:22:21.117341Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: schema.cpp:354: Call create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageWriteResultData::TTestCaseShouldSuccess::Execute_(NUnitTest::TTestContext&)/mappings" 2025-09-25T16:22:21.117413Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: schema.cpp:293: Create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageWriteResultData::TTestCaseShouldSuccess::Execute_(NUnitTest::TTestContext&)/idempotency_keys". Create session OK 2025-09-25T16:22:21.117415Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: schema.cpp:113: Call create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageWriteResultData::TTestCaseShouldSuccess::Execute_(NUnitTest::TTestContext&)/idempotency_keys" 2025-09-25T16:22:21.117416Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: schema.cpp:354: Call create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageWriteResultData::TTestCaseShouldSuccess::Execute_(NUnitTest::TTestContext&)/idempotency_keys" 2025-09-25T16:22:21.117473Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: schema.cpp:293: Create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageWriteResultData::TTestCaseShouldSuccess::Execute_(NUnitTest::TTestContext&)/jobs". Create session OK 2025-09-25T16:22:21.117475Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: schema.cpp:113: Call create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageWriteResultData::TTestCaseShouldSuccess::Execute_(NUnitTest::TTestContext&)/jobs" 2025-09-25T16:22:21.117476Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: schema.cpp:354: Call create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageWriteResultData::TTestCaseShouldSuccess::Execute_(NUnitTest::TTestContext&)/jobs" 2025-09-25T16:22:21.117532Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: schema.cpp:293: Create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageWriteResultData::TTestCaseShouldSuccess::Execute_(NUnitTest::TTestContext&)/bindings". Create session OK 2025-09-25T16:22:21.117534Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: schema.cpp:113: Call create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageWriteResultData::TTestCaseShouldSuccess::Execute_(NUnitTest::TTestContext&)/bindings" 2025-09-25T16:22:21.117535Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: schema.cpp:354: Call create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageWriteResultData::TTestCaseShouldSuccess::Execute_(NUnitTest::TTestContext&)/bindings" 2025-09-25T16:22:21.117596Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: schema.cpp:293: Create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageWriteResultData::TTestCaseShouldSuccess::Execute_(NUnitTest::TTestContext&)/tenants". Create session OK 2025-09-25T16:22:21.117597Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: schema.cpp:113: Call create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageWriteResultData::TTestCaseShouldSuccess::Execute_(NUnitTest::TTestContext&)/tenants" 2025-09-25T16:22:21.117598Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: schema.cpp:354: Call create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageWriteResultData::TTestCaseShouldSuccess::Execute_(NUnitTest::TTestContext&)/tenants" 2025-09-25T16:22:21.117664Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: schema.cpp:293: Create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageWriteResultData::TTestCaseShouldSuccess::Execute_(NUnitTest::TTestContext&)/compute_databases". Create session OK 2025-09-25T16:22:21.117665Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: schema.cpp:113: Call create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageWriteResultData::TTestCaseShouldSuccess::Execute_(NUnitTest::TTestContext&)/compute_databases" 2025-09-25T16:22:21.117666Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: schema.cpp:354: Call create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageWriteResultData::TTestCaseShouldSuccess::Execute_(NUnitTest::TTestContext&)/compute_databases" 2025-09-25T16:22:21.170560Z node 17 :YQ_RATE_LIMITER DEBUG: schema.cpp:155: Successfully created coordination node "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageWriteResultData::TTestCaseShouldSuccess::Execute_(NUnitTest::TTestContext&)_rate_limiter/alpha" 2025-09-25T16:22:21.170576Z node 17 :YQ_RATE_LIMITER DEBUG: schema.cpp:122: Reply for create coordination node "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageWriteResultData::TTestCaseShouldSuccess::Execute_(NUnitTest::TTestContext&)_rate_limiter/alpha": 2025-09-25T16:22:21.201338Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: schema.cpp:155: Successfully created table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageWriteResultData::TTestCaseShouldSuccess::Execute_(NUnitTest::TTestContext&)/result_sets" 2025-09-25T16:22:21.201358Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: schema.cpp:122: Reply for create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageWriteResultData::TTestCaseShouldSuccess::Execute_(NUnitTest::TTestContext&)/result_sets": 2025-09-25T16:22:21.201682Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: schema.cpp:155: Successfully created table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageWriteResultData::TTestCaseShouldSuccess::Execute_(NUnitTest::TTestContext&)/pending_small" 2025-09-25T16:22:21.201688Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: schema.cpp:122: Reply for create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageWriteResultData::TTestCaseShouldSuccess::Execute_(NUnitTest::TTestContext&)/pending_small": 2025-09-25T16:22:21.201718Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: schema.cpp:155: Successfully created table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageWriteResultData::TTestCaseShouldSuccess::Execute_(NUnitTest::TTestContext&)/nodes" 2025-09-25T16:22:21.201721Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: schema.cpp:122: Reply for create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageWriteResultData::TTestCaseShouldSuccess::Execute_(NUnitTest::TTestContext&)/nodes": 2025-09-25T16:22:21.201830Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: schema.cpp:155: Successfully created table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageWriteResultData::TTestCaseShouldSuccess::Execute_(NUnitTest::TTestContext&)/tenant_acks" 2025-09-25T16:22:21.201832Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: schema.cpp:122: Reply for create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageWriteResultData::TTestCaseShouldSuccess::Execute_(NUnitTest::TTestContext&)/tenant_acks": 2025-09-25T16:22:21.201914Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: schema.cpp:155: Successfully created table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageWriteResultData::TTestCaseShouldSuccess::Execute_(NUnitTest::TTestContext&)/queries" 2025-09-25T16:22:21.201915Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: schema.cpp:122: Reply for create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageWriteResultData::TTestCaseShouldSuccess::Execute_(NUnitTest::TTestContext&)/queries": 2025-09-25T16:22:21.201951Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: schema.cpp:155: Successfully created table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageWriteResultData::TTestCaseShouldSuccess::Execute_(NUnitTest::TTestContext&)/quotas" 2025-09-25T16:22:21.201954Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: schema.cpp:122: Reply for create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageWriteResultData::TTestCaseShouldSuccess::Execute_(NUnitTest::TTestContext&)/quotas": 2025-09-25T16:22:21.202008Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: schema.cpp:155: Successfully created table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageWriteResultData::TTestCaseShouldSuccess::Execute_(NUnitTest::TTestContext&)/connections" 2025-09-25T16:22:21.202010Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: schema.cpp:122: Reply for create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageWriteResultData::TTestCaseShouldSuccess::Execute_(NUnitTest::TTestContext&)/connections": 2025-09-25T16:22:21.224502Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: schema.cpp:155: Successfully created table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageWriteResultData::TTestCaseShouldSuccess::Execute_(NUnitTest::TTestContext&)/mappings" 2025-09-25T16:22:21.224520Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: schema.cpp:122: Reply for create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageWriteResultData::TTestCaseShouldSuccess::Execute_(NUnitTest::TTestContext&)/mappings": 2025-09-25T16:22:21.224808Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: schema.cpp:155: Successfully created table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageWriteResultData::TTestCaseShouldSuccess::Execute_(NUnitTest::TTestContext&)/bindings" 2025-09-25T16:22:21.224813Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: schema.cpp:122: Reply for create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageWriteResultData::TTestCaseShouldSuccess::Execute_(NUnitTest::TTestContext&)/bindings": 2025-09-25T16:22:21.224950Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: schema.cpp:155: Successfully created table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageWriteResultData::TTestCaseShouldSuccess::Execute_(NUnitTest::TTestContext&)/tenants" 2025-09-25T16:22:21.224957Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: schema.cpp:122: Reply for create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageWriteResultData::TTestCaseShouldSuccess::Execute_(NUnitTest::TTestContext&)/tenants": 2025-09-25T16:22:21.225671Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: schema.cpp:155: Successfully created table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageWriteResultData::TTestCaseShouldSuccess::Execute_(NUnitTest::TTestContext&)/idempotency_keys" 2025-09-25T16:22:21.225681Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: schema.cpp:122: Reply for create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageWriteResultData::TTestCaseShouldSuccess::Execute_(NUnitTest::TTestContext&)/idempotency_keys": 2025-09-25T16:22:21.225835Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: schema.cpp:155: Successfully created table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageWriteResultData::TTestCaseShouldSuccess::Execute_(NUnitTest::TTestContext&)/jobs" 2025-09-25T16:22:21.225839Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: schema.cpp:122: Reply for create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageWriteResultData::TTestCaseShouldSuccess::Execute_(NUnitTest::TTestContext&)/jobs": 2025-09-25T16:22:21.225947Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: schema.cpp:155: Successfully created table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageWriteResultData::TTestCaseShouldSuccess::Execute_(NUnitTest::TTestContext&)/compute_databases" 2025-09-25T16:22:21.225949Z node 17 :YQ_CONTROL_PLANE_STORAGE DEBUG: schema.cpp:122: Reply for create table "local/virtualvoidNFq::NTestSuiteTYdbControlPlaneStorageWriteResultData::TTestCaseShouldSuccess::Execute_(NUnitTest::TTestContext&)/compute_databases": >> TFlatExecutorLeases::Basics [GOOD] >> TFlatExecutorLeases::BasicsLeaseTimeout >> TYdbControlPlaneStorageListConnectionsPermissions::ShouldApplyPermissionViewPublic [GOOD] >> TYdbControlPlaneStorageListConnectionsPermissions::ShouldApplyPermissionViewPrivate >> TSharedPageCache::S3FIFO [GOOD] >> TSharedPageCache::BigCache_BTreeIndex >> KqpQuery::UdfTerminate >> Vacuum::StartVacuumWithSysTabletGCErrors [GOOD] >> TVersions::WreckHead ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tablet_flat/ut/unittest >> Redo::ABI_008 [GOOD] Test command err: Part{[1:2:3:0:0:0:0] eph 0, 1000b 40r} data 2358b + BTreeIndex{PageId: 28 RowCount: 40 DataSize: 1000 ErasedRowCount: 0} Label{13 rev 1, 98b} | + BTreeIndex{PageId: 23 RowCount: 18 DataSize: 450 ErasedRowCount: 0} Label{13 rev 1, 143b} | | + BTreeIndex{PageId: 6 RowCount: 6 DataSize: 150 ErasedRowCount: 0} Label{13 rev 1, 143b} | | | PageId: 0 RowCount: 2 DataSize: 50 ErasedRowCount: 0 | | | > {2} | | | PageId: 1 RowCount: 4 DataSize: 100 ErasedRowCount: 0 | | | > {4} | | | PageId: 2 RowCount: 6 DataSize: 150 ErasedRowCount: 0 | | > {6} | | + BTreeIndex{PageId: 10 RowCount: 12 DataSize: 300 ErasedRowCount: 0} Label{13 rev 1, 143b} | | | PageId: 3 RowCount: 8 DataSize: 200 ErasedRowCount: 0 | | | > {8} | | | PageId: 4 RowCount: 10 DataSize: 250 ErasedRowCount: 0 | | | > {10} | | | PageId: 5 RowCount: 12 DataSize: 300 ErasedRowCount: 0 | | > {12} | | + BTreeIndex{PageId: 14 RowCount: 18 DataSize: 450 ErasedRowCount: 0} Label{13 rev 1, 143b} | | | PageId: 7 RowCount: 14 DataSize: 350 ErasedRowCount: 0 | | | > {14} | | | PageId: 8 RowCount: 16 DataSize: 400 ErasedRowCount: 0 | | | > {16} | | | PageId: 9 RowCount: 18 DataSize: 450 ErasedRowCount: 0 | > {18} | + BTreeIndex{PageId: 27 RowCount: 40 DataSize: 1000 ErasedRowCount: 0} Label{13 rev 1, 143b} | | + BTreeIndex{PageId: 18 RowCount: 24 DataSize: 600 ErasedRowCount: 0} Label{13 rev 1, 143b} | | | PageId: 11 RowCount: 20 DataSize: 500 ErasedRowCount: 0 | | | > {20} | | | PageId: 12 RowCount: 22 DataSize: 550 ErasedRowCount: 0 | | | > {22} | | | PageId: 13 RowCount: 24 DataSize: 600 ErasedRowCount: 0 | | > {24} | | + BTreeIndex{PageId: 22 RowCount: 30 DataSize: 750 ErasedRowCount: 0} Label{13 rev 1, 143b} | | | PageId: 15 RowCount: 26 DataSize: 650 ErasedRowCount: 0 | | | > {26} | | | PageId: 16 RowCount: 28 DataSize: 700 ErasedRowCount: 0 | | | > {28} | | | PageId: 17 RowCount: 30 DataSize: 750 ErasedRowCount: 0 | | > {30} | | + BTreeIndex{PageId: 26 RowCount: 40 DataSize: 1000 ErasedRowCount: 0} Label{13 rev 1, 233b} | | | PageId: 19 RowCount: 32 DataSize: 800 ErasedRowCount: 0 | | | > {32} | | | PageId: 20 RowCount: 34 DataSize: 850 ErasedRowCount: 0 | | | > {34} | | | PageId: 21 RowCount: 36 DataSize: 900 ErasedRowCount: 0 | | | > {36} | | | PageId: 24 RowCount: 38 DataSize: 950 ErasedRowCount: 0 | | | > {38} | | | PageId: 25 RowCount: 40 DataSize: 1000 ErasedRowCount: 0 + Rows{0} Label{04 rev 1, 50b}, [0, +2)row | ERowOp 1: {0} {Set 1 Uint32 : 0} | ERowOp 1: {1} {Set 1 Uint32 : 100} + Rows{1} Label{14 rev 1, 50b}, [2, +2)row | ERowOp 1: {2} {Set 1 Uint32 : 200} | ERowOp 1: {3} {Set 1 Uint32 : 300} + Rows{2} Label{24 rev 1, 50b}, [4, +2)row | ERowOp 1: {4} {Set 1 Uint32 : 400} | ERowOp 1: {5} {Set 1 Uint32 : 500} + Rows{3} Label{34 rev 1, 50b}, [6, +2)row | ERowOp 1: {6} {Set 1 Uint32 : 600} | ERowOp 1: {7} {Set 1 Uint32 : 700} + Rows{4} Label{44 rev 1, 50b}, [8, +2)row | ERowOp 1: {8} {Set 1 Uint32 : 800} | ERowOp 1: {9} {Set 1 Uint32 : 900} + Rows{5} Label{54 rev 1, 50b}, [10, +2)row | ERowOp 1: {10} {Set 1 Uint32 : 1000} | ERowOp 1: {11} {Set 1 Uint32 : 1100} + Rows{7} Label{74 rev 1, 50b}, [12, +2)row | ERowOp 1: {12} {Set 1 Uint32 : 1200} | ERowOp 1: {13} {Set 1 Uint32 : 1300} + Rows{8} Label{84 rev 1, 50b}, [14, +2)row | ERowOp 1: {14} {Set 1 Uint32 : 1400} | ERowOp 1: {15} {Set 1 Uint32 : 1500} + Rows{9} Label{94 rev 1, 50b}, [16, +2)row | ERowOp 1: {16} {Set 1 Uint32 : 1600} | ERowOp 1: {17} {Set 1 Uint32 : 1700} + Rows{11} Label{114 rev 1, 50b}, [18, +2)row | ERowOp 1: {18} {Set 1 Uint32 : 1800} | ERowOp 1: {19} {Set 1 Uint32 : 1900} + Rows{12} Label{124 rev 1, 50b}, [20, +2)row | ERowOp 1: {20} {Set 1 Uint32 : 2000} | ERowOp 1: {21} {Set 1 Uint32 : 2100} + Rows{13} Label{134 rev 1, 50b}, [22, +2)row | ERowOp 1: {22} {Set 1 Uint32 : 2200} | ERowOp 1: {23} {Set 1 Uint32 : 2300} + Rows{15} Label{154 rev 1, 50b}, [24, +2)row | ERowOp 1: {24} {Set 1 Uint32 : 2400} | ERowOp 1: {25} {Set 1 Uint32 : 2500} + Rows{16} Label{164 rev 1, 50b}, [26, +2)row | ERowOp 1: {26} {Set 1 Uint32 : 2600} | ERowOp 1: {27} {Set 1 Uint32 : 2700} + Rows{17} Label{174 rev 1, 50b}, [28, +2)row | ERowOp 1: {28} {Set 1 Uint32 : 2800} | ERowOp 1: {29} {Set 1 Uint32 : 2900} + Rows{19} Label{194 rev 1, 50b}, [30, +2)row | ERowOp 1: {30} {Set 1 Uint32 : 3000} | ERowOp 1: {31} {Set 1 Uint32 : 3100} + Rows{20} Label{204 rev 1, 50b}, [32, +2)row | ERowOp 1: {32} {Set 1 Uint32 : 3200} | ERowOp 1: {33} {Set 1 Uint32 : 3300} + Rows{21} Label{214 rev 1, 50b}, [34, +2)row | ERowOp 1: {34} {Set 1 Uint32 : 3400} | ERowOp 1: {35} {Set 1 Uint32 : 3500} + Rows{24} Label{244 rev 1, 50b}, [36, +2)row | ERowOp 1: {36} {Set 1 Uint32 : 3600} | ERowOp 1: {37} {Set 1 Uint32 : 3700} + Rows{25} Label{254 rev 1, 50b}, [38, +2)row | ERowOp 1: {38} {Set 1 Uint32 : 3800} | ERowOp 1: {39} {Set 1 Uint32 : 3900} Part{[1:2:3:0:0:0:0] eph 0, 1000b 40r} data 2358b + BTreeIndex{PageId: 28 RowCount: 40 DataSize: 1000 ErasedRowCount: 0} Label{13 rev 1, 98b} | + BTreeIndex{PageId: 23 RowCount: 18 DataSize: 450 ErasedRowCount: 0} Label{13 rev 1, 143b} | | + BTreeIndex{PageId: 6 RowCount: 6 DataSize: 150 ErasedRowCount: 0} Label{13 rev 1, 143b} | | | PageId: 0 RowCount: 2 DataSize: 50 ErasedRowCount: 0 | | | > {2} | | | PageId: 1 RowCount: 4 DataSize: 100 ErasedRowCount: 0 | | | > {4} | | | PageId: 2 RowCount: 6 DataSize: 150 ErasedRowCount: 0 | | > {6} | | + BTreeIndex{PageId: 10 RowCount: 12 DataSize: 300 ErasedRowCount: 0} Label{13 rev 1, 143b} | | | PageId: 3 RowCount: 8 DataSize: 200 ErasedRowCount: 0 | | | > {8} | | | PageId: 4 RowCount: 10 DataSize: 250 ErasedRowCount: 0 | | | > {10} | | | PageId: 5 RowCount: 12 DataSize: 300 ErasedRowCount: 0 | | > {12} | | + BTreeIndex{PageId: 14 RowCount: 18 DataSize: 450 ErasedRowCount: 0} Label{13 rev 1, 143b} | | | PageId: 7 RowCount: 14 DataSize: 350 ErasedRowCount: 0 | | | > {14} | | | PageId: 8 RowCount: 16 DataSize: 400 ErasedRowCount: 0 | | | > {16} | | | PageId: 9 RowCount: 18 DataSize: 450 ErasedRowCount: 0 | > {18} | + BTreeIndex{PageId: 27 RowCount: 40 DataSize: 1000 ErasedRowCount: 0} Label{13 rev 1, 143b} | | + BTreeIndex{PageId: 18 RowCount: 24 DataSize: 600 ErasedRowCount: 0} Label{13 rev 1, 143b} | | | PageId: 11 RowCount: 20 DataSize: 500 ErasedRowCount: 0 | | | > {20} | | | PageId: 12 RowCount: 22 DataSize: 550 ErasedRowCount: 0 | | | > {22} | | | PageId: 13 RowCount: 24 DataSize: 600 ErasedRowCount: 0 | | > {24} | | + BTreeIndex{PageId: 22 RowCount: 30 DataSize: 750 ErasedRowCount: 0} Label{13 rev 1, 143b} | | | PageId: 15 RowCount: 26 DataSize: 650 ErasedRowCount: 0 | | | > {26} | | | PageId: 16 RowCount: 28 DataSize: 700 ErasedRowCount: 0 | | | > {28} | | | PageId: 17 RowCount: 30 DataSize: 750 ErasedRowCount: 0 | | > {30} | | + BTreeIndex{PageId: 26 RowCount: 40 DataSize: 1000 ErasedRowCount: 0} Label{13 rev 1, 233b} | | | PageId: 19 RowCount: 32 DataSize: 800 ErasedRowCount: 0 | | | > {32} | | | PageId: 20 RowCount: 34 DataSize: 850 ErasedRowCount: 0 | | | > {34} | | | PageId: 21 RowCount: 36 DataSize: 900 ErasedRowCount: 0 | | | > {36} | | | PageId: 24 RowCount: 38 DataSize: 950 ErasedRowCount: 0 | | | > {38} | | | PageId: 25 RowCount: 40 DataSize: 1000 ErasedRowCount: 0 + Rows{0} Label{04 rev 1, 50b}, [0, +2)row | ERowOp 1: {0} {Set 1 Uint32 : 0} | ERowOp 1: {1} {Set 1 Uint32 : 100} + Rows{1} Label{14 rev 1, 50b}, [2, +2)row | ERowOp 1: {2} {Set 1 Uint32 : 200} | ERowOp 1: {3} {Set 1 Uint32 : 300} + Rows{2} Label{24 rev 1, 50b}, [4, +2)row | ERowOp 1: {4} {Set 1 Uint32 : 400} | ERowOp 1: {5} {Set 1 Uint32 : 500} + Rows{3} Label{34 rev 1, 50b}, [6, +2)row | ERowOp 1: {6} {Set 1 Uint32 : 600} | ERowOp 1: {7} {Set 1 Uint32 : 700} + Rows{4} Label{44 rev 1, 50b}, [8, +2)row | ERowOp 1: {8} {Set 1 Uint32 : 800} | ERowOp 1: {9} {Set 1 Uint32 : 900} + Rows{5} Label{54 rev 1, 50b}, [10, +2)row | ERowOp 1: {10} {Set 1 Uint32 : 1000} | ERowOp 1: {11} {Set 1 Uint32 : 1100} + Rows{7} Label{74 rev 1, 50b}, [12, +2)row | ERowOp 1: {12} {Set 1 Uint32 : 1200} | ERowOp 1: {13} {Set 1 Uint32 : 1300} + Rows{8} Label{84 rev 1, 50b}, [14, +2)row | ERowOp 1: {14} {Set 1 Uint32 : 1400} | ERowOp 1: {15} {Set 1 Uint32 : 1500} + Rows{9} Label{94 rev 1, 50b}, [16, +2)row | ERowOp 1: {16} {Set 1 Uint32 : 1600} | ERowOp 1: {17} {Set 1 Uint32 : 1700} + Rows{11} Label{114 rev 1, 50b}, [18, +2)row | ERowOp 1: {18} {Set 1 Uint32 : 1800} | ERowOp 1: {19} {Set 1 Uint32 : 1900} + Rows{12} Label{124 rev 1, 50b}, [20, +2)row | ERowOp 1: {20} {Set 1 Uint32 : 2000} | ERowOp 1: {21} {Set 1 Uint32 : 2100} + Rows{13} Label{134 rev 1, 50b}, [22, +2)row | ERowOp 1: {22} {Set 1 Uint32 : 2200} | ERowOp 1: {23} {Set 1 Uint32 : 2300} + Rows{15} Label{154 rev 1, 50b}, [24, +2)row | ERowOp 1: {24} {Set 1 Uint32 : 2400} | ERowOp 1: {25} {Set 1 Uint32 : 2500} + Rows{16} Label{164 rev 1, 50b}, [26, +2)row | ERowOp 1: {26} {Set 1 Uint32 : 2600} | ERowOp 1: {27} {Set 1 Uint32 : 2700} + Rows{17} Label{174 rev 1, 50b}, [28, +2)row | ERowOp 1: {28} {Set 1 Uint32 : 2800} | ERowOp 1: {29} {Set 1 Uint32 : 2900} + Rows{19} Label{194 rev 1, 50b}, [30, +2)row | ERowOp 1: {30} {Set 1 Uint32 : 3000} | ERowOp 1: {31} {Set 1 Uint32 : 3100} + Rows{20} Label{204 rev 1, 50b}, [32, +2)row | ERowOp 1: {32} {Set 1 Uint32 : 3200} | ERowOp 1: {33} {Set 1 Uint32 : 3300} + Rows{21} Label{214 rev 1, 50b}, [34, +2)row | ERowOp 1: {34} {Set 1 Uint32 : 3400} | ERowOp 1: {35} {Set 1 Uint32 : 3500} + Rows{24} Label{244 rev 1, 50b}, [36, +2)row | ERowOp 1: {36} {Set 1 Uint32 : 3600} | ERowOp 1: {37} {Set 1 Uint32 : 3700} + Rows{25} Label{254 rev 1, 50b}, [38, +2)row | ERowOp 1: {38} {Set 1 Uint32 : 3800} | ERowOp 1: {39} {Set 1 Uint32 : 3900} Part{[1:2:3:0:0:0:0] eph 0, 1000b 40r} data 2358b + BTreeIndex{PageId: 28 RowCount: 40 DataSize: 1000 ErasedRowCount: 0} Label{13 rev 1, 98b} | + BTreeIndex{PageId: 23 RowCount: 18 DataSize: 450 ErasedRowCount: 0} Label{13 rev 1, 143b} | | + BTreeIndex{PageId: 6 RowCount: 6 DataSize: 150 ErasedRowCount: 0} Label{13 rev 1, 143b} | | | PageId: 0 RowCount: 2 DataSize: 50 ErasedRowCount: 0 | | | > {2} | | | PageId: 1 RowCount: 4 DataSize: 100 ErasedRowCount: 0 | | | > {4} | | | PageId: 2 RowCount: 6 DataSize: 150 ErasedRowCount: 0 | | > {6} | | + BTreeIndex{PageId: 10 RowCount: 12 DataSize: 300 ErasedRowCount: 0} Label{13 rev 1, 143b} | | | PageId: 3 RowCount: 8 DataSize: 200 ErasedRowCount: 0 | | | > {8} | | | PageId: 4 RowCount: 10 DataSize: 250 ErasedRowCount: 0 | | | > {10} | | | PageId: 5 RowCount: 12 DataSize: 300 ErasedRowCount: 0 | | > {12} | | + BTreeIndex{PageId: 14 RowCount: 18 DataSize: 450 ErasedRowCount: 0} Label{13 rev 1, 143b} | | | PageId: 7 RowCount: 14 DataSize: 350 ErasedRowCount: 0 | | | > {14} | | | PageId: 8 RowCount: 16 DataSize: 400 ErasedRowCount: 0 | | | > {16} | | | PageId: 9 RowCount: 18 DataSize: 450 ErasedRowCount: 0 | > {18} | + BTreeIndex{PageId: 27 RowCount: 40 DataSize: 1000 ErasedRowCount: 0} Label{13 rev 1, 143b} | | + BTreeIndex{PageId: 18 RowCount: 24 DataSize: 600 ErasedRowCount: 0} Label{13 rev 1, 143b} | | | PageId: 11 RowCount: 20 DataSize: 500 ErasedRowCount: 0 | | | > {20} | | | PageId: 12 RowCount: 22 DataSize: 550 ErasedRowCount: 0 | | | > {22} | | | PageId: 13 RowCount: 24 DataSize: 600 ErasedRowCount: 0 | | > {24} | | + BTreeIndex{PageId: 22 RowCount: 30 DataSize: 750 ErasedRowCount: 0} Label{13 rev 1, 143b} | | | PageId: 15 RowCount: 26 DataSize: 650 ErasedRowCount: 0 | | | > {26} | | | PageId: 16 RowCount: 28 DataSize: 700 ErasedRowCount: 0 | | | > {28} | | | PageId: 17 RowCount: 30 DataSize: 750 ErasedRowCount: 0 | | > {30} | | + BTreeIndex{PageId: 26 RowCount: 40 DataSize: 1000 ErasedRowCount: 0} Label{13 rev 1, 233b} | | | PageId: 19 RowCount: 32 DataSize: 800 ErasedRowCount: 0 | | | > {32} | | | PageId: 20 RowCount: 34 DataSize: 850 ErasedRowCount: 0 | | | ... 3} Label{34 rev 1, 50b}, [6, +2)row | ERowOp 1: {6} {Set 1 Uint32 : 600} | ERowOp 1: {7} {Set 1 Uint32 : 700} + Rows{4} Label{44 rev 1, 50b}, [8, +2)row | ERowOp 1: {8} {Set 1 Uint32 : 800} | ERowOp 1: {9} {Set 1 Uint32 : 900} + Rows{5} Label{54 rev 1, 50b}, [10, +2)row | ERowOp 1: {10} {Set 1 Uint32 : 1000} | ERowOp 1: {11} {Set 1 Uint32 : 1100} + Rows{6} Label{64 rev 1, 50b}, [12, +2)row | ERowOp 1: {12} {Set 1 Uint32 : 1200} | ERowOp 1: {13} {Set 1 Uint32 : 1300} + Rows{7} Label{74 rev 1, 50b}, [14, +2)row | ERowOp 1: {14} {Set 1 Uint32 : 1400} | ERowOp 1: {15} {Set 1 Uint32 : 1500} + Rows{8} Label{84 rev 1, 50b}, [16, +2)row | ERowOp 1: {16} {Set 1 Uint32 : 1600} | ERowOp 1: {17} {Set 1 Uint32 : 1700} + Rows{9} Label{94 rev 1, 50b}, [18, +2)row | ERowOp 1: {18} {Set 1 Uint32 : 1800} | ERowOp 1: {19} {Set 1 Uint32 : 1900} + Rows{10} Label{104 rev 1, 50b}, [20, +2)row | ERowOp 1: {20} {Set 1 Uint32 : 2000} | ERowOp 1: {21} {Set 1 Uint32 : 2100} + Rows{11} Label{114 rev 1, 50b}, [22, +2)row | ERowOp 1: {22} {Set 1 Uint32 : 2200} | ERowOp 1: {23} {Set 1 Uint32 : 2300} + Rows{12} Label{124 rev 1, 50b}, [24, +2)row | ERowOp 1: {24} {Set 1 Uint32 : 2400} | ERowOp 1: {25} {Set 1 Uint32 : 2500} + Rows{13} Label{134 rev 1, 50b}, [26, +2)row | ERowOp 1: {26} {Set 1 Uint32 : 2600} | ERowOp 1: {27} {Set 1 Uint32 : 2700} + Rows{14} Label{144 rev 1, 50b}, [28, +2)row | ERowOp 1: {28} {Set 1 Uint32 : 2800} | ERowOp 1: {29} {Set 1 Uint32 : 2900} + Rows{15} Label{154 rev 1, 50b}, [30, +2)row | ERowOp 1: {30} {Set 1 Uint32 : 3000} | ERowOp 1: {31} {Set 1 Uint32 : 3100} + Rows{16} Label{164 rev 1, 50b}, [32, +2)row | ERowOp 1: {32} {Set 1 Uint32 : 3200} | ERowOp 1: {33} {Set 1 Uint32 : 3300} + Rows{17} Label{174 rev 1, 50b}, [34, +2)row | ERowOp 1: {34} {Set 1 Uint32 : 3400} | ERowOp 1: {35} {Set 1 Uint32 : 3500} + Rows{18} Label{184 rev 1, 50b}, [36, +2)row | ERowOp 1: {36} {Set 1 Uint32 : 3600} | ERowOp 1: {37} {Set 1 Uint32 : 3700} + Rows{19} Label{194 rev 1, 50b}, [38, +2)row | ERowOp 1: {38} {Set 1 Uint32 : 3800} | ERowOp 1: {39} {Set 1 Uint32 : 3900} Part{[1:2:3:0:0:0:0] eph 0, 1000b 40r} data 1479b + FlatIndex{20} Label{3 rev 3, 453b} 21 rec | Page Row Bytes (Uint32) | 0 0 50b {0} | 1 2 50b {2} | 2 4 50b {4} | 3 6 50b {6} | 4 8 50b {8} | 5 10 50b {10} | 6 12 50b {12} | 7 14 50b {14} | 8 16 50b {16} | 9 18 50b {18} | 10 20 50b {20} | 11 22 50b {22} | 12 24 50b {24} | 13 26 50b {26} | 14 28 50b {28} | 15 30 50b {30} | 16 32 50b {32} | 17 34 50b {34} | 18 36 50b {36} | 19 38 50b {38} | 19 39 50b {39} + Rows{0} Label{04 rev 1, 50b}, [0, +2)row | ERowOp 1: {0} {Set 1 Uint32 : 0} | ERowOp 1: {1} {Set 1 Uint32 : 100} + Rows{1} Label{14 rev 1, 50b}, [2, +2)row | ERowOp 1: {2} {Set 1 Uint32 : 200} | ERowOp 1: {3} {Set 1 Uint32 : 300} + Rows{2} Label{24 rev 1, 50b}, [4, +2)row | ERowOp 1: {4} {Set 1 Uint32 : 400} | ERowOp 1: {5} {Set 1 Uint32 : 500} + Rows{3} Label{34 rev 1, 50b}, [6, +2)row | ERowOp 1: {6} {Set 1 Uint32 : 600} | ERowOp 1: {7} {Set 1 Uint32 : 700} + Rows{4} Label{44 rev 1, 50b}, [8, +2)row | ERowOp 1: {8} {Set 1 Uint32 : 800} | ERowOp 1: {9} {Set 1 Uint32 : 900} + Rows{5} Label{54 rev 1, 50b}, [10, +2)row | ERowOp 1: {10} {Set 1 Uint32 : 1000} | ERowOp 1: {11} {Set 1 Uint32 : 1100} + Rows{6} Label{64 rev 1, 50b}, [12, +2)row | ERowOp 1: {12} {Set 1 Uint32 : 1200} | ERowOp 1: {13} {Set 1 Uint32 : 1300} + Rows{7} Label{74 rev 1, 50b}, [14, +2)row | ERowOp 1: {14} {Set 1 Uint32 : 1400} | ERowOp 1: {15} {Set 1 Uint32 : 1500} + Rows{8} Label{84 rev 1, 50b}, [16, +2)row | ERowOp 1: {16} {Set 1 Uint32 : 1600} | ERowOp 1: {17} {Set 1 Uint32 : 1700} + Rows{9} Label{94 rev 1, 50b}, [18, +2)row | ERowOp 1: {18} {Set 1 Uint32 : 1800} | ERowOp 1: {19} {Set 1 Uint32 : 1900} + Rows{10} Label{104 rev 1, 50b}, [20, +2)row | ERowOp 1: {20} {Set 1 Uint32 : 2000} | ERowOp 1: {21} {Set 1 Uint32 : 2100} + Rows{11} Label{114 rev 1, 50b}, [22, +2)row | ERowOp 1: {22} {Set 1 Uint32 : 2200} | ERowOp 1: {23} {Set 1 Uint32 : 2300} + Rows{12} Label{124 rev 1, 50b}, [24, +2)row | ERowOp 1: {24} {Set 1 Uint32 : 2400} | ERowOp 1: {25} {Set 1 Uint32 : 2500} + Rows{13} Label{134 rev 1, 50b}, [26, +2)row | ERowOp 1: {26} {Set 1 Uint32 : 2600} | ERowOp 1: {27} {Set 1 Uint32 : 2700} + Rows{14} Label{144 rev 1, 50b}, [28, +2)row | ERowOp 1: {28} {Set 1 Uint32 : 2800} | ERowOp 1: {29} {Set 1 Uint32 : 2900} + Rows{15} Label{154 rev 1, 50b}, [30, +2)row | ERowOp 1: {30} {Set 1 Uint32 : 3000} | ERowOp 1: {31} {Set 1 Uint32 : 3100} + Rows{16} Label{164 rev 1, 50b}, [32, +2)row | ERowOp 1: {32} {Set 1 Uint32 : 3200} | ERowOp 1: {33} {Set 1 Uint32 : 3300} + Rows{17} Label{174 rev 1, 50b}, [34, +2)row | ERowOp 1: {34} {Set 1 Uint32 : 3400} | ERowOp 1: {35} {Set 1 Uint32 : 3500} + Rows{18} Label{184 rev 1, 50b}, [36, +2)row | ERowOp 1: {36} {Set 1 Uint32 : 3600} | ERowOp 1: {37} {Set 1 Uint32 : 3700} + Rows{19} Label{194 rev 1, 50b}, [38, +2)row | ERowOp 1: {38} {Set 1 Uint32 : 3800} | ERowOp 1: {39} {Set 1 Uint32 : 3900} Part{[1:2:3:0:0:0:0] eph 0, 1000b 40r} data 1479b + FlatIndex{20} Label{3 rev 3, 453b} 21 rec | Page Row Bytes (Uint32) | 0 0 50b {0} | 1 2 50b {2} | 2 4 50b {4} | 3 6 50b {6} | 4 8 50b {8} | 5 10 50b {10} | 6 12 50b {12} | 7 14 50b {14} | 8 16 50b {16} | 9 18 50b {18} | 10 20 50b {20} | 11 22 50b {22} | 12 24 50b {24} | 13 26 50b {26} | 14 28 50b {28} | 15 30 50b {30} | 16 32 50b {32} | 17 34 50b {34} | 18 36 50b {36} | 19 38 50b {38} | 19 39 50b {39} + Rows{0} Label{04 rev 1, 50b}, [0, +2)row | ERowOp 1: {0} {Set 1 Uint32 : 0} | ERowOp 1: {1} {Set 1 Uint32 : 100} + Rows{1} Label{14 rev 1, 50b}, [2, +2)row | ERowOp 1: {2} {Set 1 Uint32 : 200} | ERowOp 1: {3} {Set 1 Uint32 : 300} + Rows{2} Label{24 rev 1, 50b}, [4, +2)row | ERowOp 1: {4} {Set 1 Uint32 : 400} | ERowOp 1: {5} {Set 1 Uint32 : 500} + Rows{3} Label{34 rev 1, 50b}, [6, +2)row | ERowOp 1: {6} {Set 1 Uint32 : 600} | ERowOp 1: {7} {Set 1 Uint32 : 700} + Rows{4} Label{44 rev 1, 50b}, [8, +2)row | ERowOp 1: {8} {Set 1 Uint32 : 800} | ERowOp 1: {9} {Set 1 Uint32 : 900} + Rows{5} Label{54 rev 1, 50b}, [10, +2)row | ERowOp 1: {10} {Set 1 Uint32 : 1000} | ERowOp 1: {11} {Set 1 Uint32 : 1100} + Rows{6} Label{64 rev 1, 50b}, [12, +2)row | ERowOp 1: {12} {Set 1 Uint32 : 1200} | ERowOp 1: {13} {Set 1 Uint32 : 1300} + Rows{7} Label{74 rev 1, 50b}, [14, +2)row | ERowOp 1: {14} {Set 1 Uint32 : 1400} | ERowOp 1: {15} {Set 1 Uint32 : 1500} + Rows{8} Label{84 rev 1, 50b}, [16, +2)row | ERowOp 1: {16} {Set 1 Uint32 : 1600} | ERowOp 1: {17} {Set 1 Uint32 : 1700} + Rows{9} Label{94 rev 1, 50b}, [18, +2)row | ERowOp 1: {18} {Set 1 Uint32 : 1800} | ERowOp 1: {19} {Set 1 Uint32 : 1900} + Rows{10} Label{104 rev 1, 50b}, [20, +2)row | ERowOp 1: {20} {Set 1 Uint32 : 2000} | ERowOp 1: {21} {Set 1 Uint32 : 2100} + Rows{11} Label{114 rev 1, 50b}, [22, +2)row | ERowOp 1: {22} {Set 1 Uint32 : 2200} | ERowOp 1: {23} {Set 1 Uint32 : 2300} + Rows{12} Label{124 rev 1, 50b}, [24, +2)row | ERowOp 1: {24} {Set 1 Uint32 : 2400} | ERowOp 1: {25} {Set 1 Uint32 : 2500} + Rows{13} Label{134 rev 1, 50b}, [26, +2)row | ERowOp 1: {26} {Set 1 Uint32 : 2600} | ERowOp 1: {27} {Set 1 Uint32 : 2700} + Rows{14} Label{144 rev 1, 50b}, [28, +2)row | ERowOp 1: {28} {Set 1 Uint32 : 2800} | ERowOp 1: {29} {Set 1 Uint32 : 2900} + Rows{15} Label{154 rev 1, 50b}, [30, +2)row | ERowOp 1: {30} {Set 1 Uint32 : 3000} | ERowOp 1: {31} {Set 1 Uint32 : 3100} + Rows{16} Label{164 rev 1, 50b}, [32, +2)row | ERowOp 1: {32} {Set 1 Uint32 : 3200} | ERowOp 1: {33} {Set 1 Uint32 : 3300} + Rows{17} Label{174 rev 1, 50b}, [34, +2)row | ERowOp 1: {34} {Set 1 Uint32 : 3400} | ERowOp 1: {35} {Set 1 Uint32 : 3500} + Rows{18} Label{184 rev 1, 50b}, [36, +2)row | ERowOp 1: {36} {Set 1 Uint32 : 3600} | ERowOp 1: {37} {Set 1 Uint32 : 3700} + Rows{19} Label{194 rev 1, 50b}, [38, +2)row | ERowOp 1: {38} {Set 1 Uint32 : 3800} | ERowOp 1: {39} {Set 1 Uint32 : 3900} Part{[1:2:3:0:0:0:0] eph 0, 1000b 40r} data 1479b + FlatIndex{20} Label{3 rev 3, 453b} 21 rec | Page Row Bytes (Uint32) | 0 0 50b {0} | 1 2 50b {2} | 2 4 50b {4} | 3 6 50b {6} | 4 8 50b {8} | 5 10 50b {10} | 6 12 50b {12} | 7 14 50b {14} | 8 16 50b {16} | 9 18 50b {18} | 10 20 50b {20} | 11 22 50b {22} | 12 24 50b {24} | 13 26 50b {26} | 14 28 50b {28} | 15 30 50b {30} | 16 32 50b {32} | 17 34 50b {34} | 18 36 50b {36} | 19 38 50b {38} | 19 39 50b {39} + Rows{0} Label{04 rev 1, 50b}, [0, +2)row | ERowOp 1: {0} {Set 1 Uint32 : 0} | ERowOp 1: {1} {Set 1 Uint32 : 100} + Rows{1} Label{14 rev 1, 50b}, [2, +2)row | ERowOp 1: {2} {Set 1 Uint32 : 200} | ERowOp 1: {3} {Set 1 Uint32 : 300} + Rows{2} Label{24 rev 1, 50b}, [4, +2)row | ERowOp 1: {4} {Set 1 Uint32 : 400} | ERowOp 1: {5} {Set 1 Uint32 : 500} + Rows{3} Label{34 rev 1, 50b}, [6, +2)row | ERowOp 1: {6} {Set 1 Uint32 : 600} | ERowOp 1: {7} {Set 1 Uint32 : 700} + Rows{4} Label{44 rev 1, 50b}, [8, +2)row | ERowOp 1: {8} {Set 1 Uint32 : 800} | ERowOp 1: {9} {Set 1 Uint32 : 900} + Rows{5} Label{54 rev 1, 50b}, [10, +2)row | ERowOp 1: {10} {Set 1 Uint32 : 1000} | ERowOp 1: {11} {Set 1 Uint32 : 1100} + Rows{6} Label{64 rev 1, 50b}, [12, +2)row | ERowOp 1: {12} {Set 1 Uint32 : 1200} | ERowOp 1: {13} {Set 1 Uint32 : 1300} + Rows{7} Label{74 rev 1, 50b}, [14, +2)row | ERowOp 1: {14} {Set 1 Uint32 : 1400} | ERowOp 1: {15} {Set 1 Uint32 : 1500} + Rows{8} Label{84 rev 1, 50b}, [16, +2)row | ERowOp 1: {16} {Set 1 Uint32 : 1600} | ERowOp 1: {17} {Set 1 Uint32 : 1700} + Rows{9} Label{94 rev 1, 50b}, [18, +2)row | ERowOp 1: {18} {Set 1 Uint32 : 1800} | ERowOp 1: {19} {Set 1 Uint32 : 1900} + Rows{10} Label{104 rev 1, 50b}, [20, +2)row | ERowOp 1: {20} {Set 1 Uint32 : 2000} | ERowOp 1: {21} {Set 1 Uint32 : 2100} + Rows{11} Label{114 rev 1, 50b}, [22, +2)row | ERowOp 1: {22} {Set 1 Uint32 : 2200} | ERowOp 1: {23} {Set 1 Uint32 : 2300} + Rows{12} Label{124 rev 1, 50b}, [24, +2)row | ERowOp 1: {24} {Set 1 Uint32 : 2400} | ERowOp 1: {25} {Set 1 Uint32 : 2500} + Rows{13} Label{134 rev 1, 50b}, [26, +2)row | ERowOp 1: {26} {Set 1 Uint32 : 2600} | ERowOp 1: {27} {Set 1 Uint32 : 2700} + Rows{14} Label{144 rev 1, 50b}, [28, +2)row | ERowOp 1: {28} {Set 1 Uint32 : 2800} | ERowOp 1: {29} {Set 1 Uint32 : 2900} + Rows{15} Label{154 rev 1, 50b}, [30, +2)row | ERowOp 1: {30} {Set 1 Uint32 : 3000} | ERowOp 1: {31} {Set 1 Uint32 : 3100} + Rows{16} Label{164 rev 1, 50b}, [32, +2)row | ERowOp 1: {32} {Set 1 Uint32 : 3200} | ERowOp 1: {33} {Set 1 Uint32 : 3300} + Rows{17} Label{174 rev 1, 50b}, [34, +2)row | ERowOp 1: {34} {Set 1 Uint32 : 3400} | ERowOp 1: {35} {Set 1 Uint32 : 3500} + Rows{18} Label{184 rev 1, 50b}, [36, +2)row | ERowOp 1: {36} {Set 1 Uint32 : 3600} | ERowOp 1: {37} {Set 1 Uint32 : 3700} + Rows{19} Label{194 rev 1, 50b}, [38, +2)row | ERowOp 1: {38} {Set 1 Uint32 : 3800} | ERowOp 1: {39} {Set 1 Uint32 : 3900} ------- [LD] {default-linux-x86_64, relwithdebinfo, FAILED} $(B)/ydb/core/tx/schemeshard/ut_index_build/ydb-core-tx-schemeshard-ut_index_build command (pid: 495549) /home/runner/.ya/tools/v4/9095799934/bin/python3 /home/runner/actions_runner/_work/ydb/ydb/build/scripts/link_exe.py --start-plugins --end-plugins --clang-ver 20 --source-root /home/runner/actions_runner/_work/ydb/ydb --build-root /home/runner/.ya/build/build_root/endf/0039dc --arch=LINUX --objcopy-exe /home/runner/.ya/tools/v4/9767151549/bin/llvm-objcopy /home/runner/.ya/tools/v4/9767151549/bin/clang++ -Wl,--whole-archive @/home/runner/.ya/build/build_root/endf/0039dc/ya_command_file_0.args -Wl,--no-whole-archive /home/runner/.ya/build/build_root/endf/0039dc/ydb/core/tx/schemeshard/ut_index_build/__vcs_version__.c.o /home/runner/.ya/build/build_root/endf/0039dc/ydb/core/tx/schemeshard/ut_index_build/ut_schemeshard_build_index_helpers.cpp.o /home/runner/.ya/build/build_root/endf/0039dc/ydb/core/tx/schemeshard/ut_index_build/ut_fulltext_build.cpp.o /home/runner/.ya/build/build_root/endf/0039dc/ydb/core/tx/schemeshard/ut_index_build/ut_index_build.cpp.o /home/runner/.ya/build/build_root/endf/0039dc/ydb/core/tx/schemeshard/ut_index_build/ut_vector_index_build.cpp.o -o /home/runner/.ya/build/build_root/endf/0039dc/ydb/core/tx/schemeshard/ut_index_build/ydb-core-tx-schemeshard-ut_index_build --target=x86_64-linux-gnu --sysroot=/home/runner/.ya/tools/v4/243881345 -B/home/runner/.ya/tools/v4/243881345/usr/bin -Wl,--start-group contrib/libs/cxxsupp/libcxxabi-parts/liblibs-cxxsupp-libcxxabi-parts.a contrib/libs/libunwind/libcontrib-libs-libunwind.a contrib/libs/cxxsupp/libcxxrt/liblibs-cxxsupp-libcxxrt.a contrib/libs/cxxsupp/builtins/liblibs-cxxsupp-builtins.a contrib/libs/cxxsupp/libcxx/liblibs-cxxsupp-libcxx.a util/charset/libutil-charset.a contrib/libs/zlib/libcontrib-libs-zlib.a contrib/libs/double-conversion/libcontrib-libs-double-conversion.a contrib/libs/libc_compat/libcontrib-libs-libc_compat.a util/libyutil.a build/cow/on/libbuild-cow-on.a library/cpp/malloc/api/libcpp-malloc-api.a contrib/restricted/abseil-cpp/libcontrib-restricted-abseil-cpp.a contrib/libs/tcmalloc/malloc_extension/liblibs-tcmalloc-malloc_extension.a library/cpp/malloc/tcmalloc/libcpp-malloc-tcmalloc.a contrib/libs/tcmalloc/no_percpu_cache/liblibs-tcmalloc-no_percpu_cache.a library/cpp/colorizer/liblibrary-cpp-colorizer.a library/cpp/dbg_output/liblibrary-cpp-dbg_output.a library/cpp/containers/paged_vector/libcpp-containers-paged_vector.a library/cpp/lcs/liblibrary-cpp-lcs.a library/cpp/containers/stack_array/libcpp-containers-stack_array.a library/cpp/diff/liblibrary-cpp-diff.a library/cpp/json/common/libcpp-json-common.a library/cpp/json/fast_sax/libcpp-json-fast_sax.a tools/enum_parser/enum_serialization_runtime/libtools-enum_parser-enum_serialization_runtime.a library/cpp/json/writer/libcpp-json-writer.a library/cpp/string_utils/relaxed_escaper/libcpp-string_utils-relaxed_escaper.a library/cpp/json/liblibrary-cpp-json.a library/cpp/testing/common/libcpp-testing-common.a library/cpp/testing/hook/libcpp-testing-hook.a library/cpp/testing/unittest/libcpp-testing-unittest.a library/cpp/terminate_handler/liblibrary-cpp-terminate_handler.a library/cpp/testing/unittest_main/libcpp-testing-unittest_main.a contrib/restricted/abseil-cpp-tstring/libcontrib-restricted-abseil-cpp-tstring.a contrib/libs/protobuf/third_party/utf8_range/libprotobuf-third_party-utf8_range.a contrib/libs/protobuf/libcontrib-libs-protobuf.a library/cpp/deprecated/enum_codegen/libcpp-deprecated-enum_codegen.a library/cpp/html/pcdata/libcpp-html-pcdata.a library/cpp/containers/2d_array/libcpp-containers-2d_array.a library/cpp/binsaver/liblibrary-cpp-binsaver.a library/cpp/protobuf/util/proto/libprotobuf-util-proto.a contrib/libs/base64/avx2/liblibs-base64-avx2.a contrib/libs/base64/ssse3/liblibs-base64-ssse3.a contrib/libs/base64/neon32/liblibs-base64-neon32.a contrib/libs/base64/neon64/liblibs-base64-neon64.a contrib/libs/base64/plain32/liblibs-base64-plain32.a contrib/libs/base64/plain64/liblibs-base64-plain64.a library/cpp/string_utils/base64/libcpp-string_utils-base64.a library/cpp/protobuf/util/libcpp-protobuf-util.a library/cpp/protobuf/json/proto/libprotobuf-json-proto.a library/cpp/protobuf/json/libcpp-protobuf-json.a library/cpp/containers/absl_flat_hash/libcpp-containers-absl_flat_hash.a ydb/library/actors/util/liblibrary-actors-util.a library/cpp/charset/lite/libcpp-charset-lite.a library/cpp/containers/str_map/libcpp-containers-str_map.a library/cpp/containers/atomizer/libcpp-containers-atomizer.a ydb/library/actors/prof/liblibrary-actors-prof.a ydb/library/actors/actor_type/liblibrary-actors-actor_type.a ydb/library/actors/protos/liblibrary-actors-protos.a library/cpp/blockcodecs/core/libcpp-blockcodecs-core.a contrib/libs/xxhash/libcontrib-libs-xxhash.a contrib/libs/zstd/libcontrib-libs-zstd.a library/cpp/resource/liblibrary-cpp-resource.a contrib/libs/c-ares/libcontrib-libs-c-ares.a contrib/libs/grpc/third_party/address_sorting/libgrpc-third_party-address_sorting.a contrib/libs/grpc/third_party/upb/libgrpc-third_party-upb.a contrib/libs/openssl/libcontrib-libs-openssl.a contrib/libs/re2/libcontrib-libs-re2.a contrib/libs/grpc/libcontrib-libs-grpc.a ydb/library/services/libydb-library-services.a library/cpp/logger/liblibrary-cpp-logger.a library/cpp/lwtrace/protos/libcpp-lwtrace-protos.a library/cpp/lwtrace/liblibrary-cpp-lwtrace.a library/cpp/containers/stack_vector/libcpp-containers-stack_vector.a library/cpp/monlib/metrics/libcpp-monlib-metrics.a library/cpp/monlib/encode/libcpp-monlib-encode.a library/cpp/monlib/encode/buffered/libmonlib-encode-buffered.a library/cpp/monlib/exception/libcpp-monlib-exception.a library/cpp/monlib/encode/json/libmonlib-encode-json.a contrib/libs/lz4/libcontrib-libs-lz4.a library/cpp/monlib/encode/spack/libmonlib-encode-spack.a library/cpp/monlib/encode/prometheus/libmonlib-encode-prometheus.a library/cpp/build_info/liblibrary-cpp-build_info.a library/cpp/svnversion/liblibrary-cpp-svnversion.a library/cpp/containers/intrusive_rb_tree/libcpp-containers-intrusive_rb_tree.a library/cpp/coroutine/engine/libcpp-coroutine-engine.a library/cpp/coroutine/listener/libcpp-coroutine-listener.a contrib/libs/libiconv/static/liblibs-libiconv-static.a library/cpp/charset/liblibrary-cpp-charset.a contrib/libs/nayuki_md5/libcontrib-libs-nayuki_md5.a library/cpp/digest/md5/libcpp-digest-md5.a library/cpp/digest/murmur/libcpp-digest-murmur.a library/cpp/case_insensitive_string/liblibrary-cpp-case_insensitive_string.a library/cpp/iterator/liblibrary-cpp-iterator.a library/cpp/string_utils/quote/libcpp-string_utils-quote.a library/cpp/string_utils/scan/libcpp-string_utils-scan.a library/cpp/cgiparam/liblibrary-cpp-cgiparam.a library/cpp/digest/lower_case/libcpp-digest-lower_case.a library/cpp/http/misc/libcpp-http-misc.a library/cpp/mime/types/libcpp-mime-types.a contrib/libs/libidn/static/liblibs-libidn-static.a library/cpp/uri/liblibrary-cpp-uri.a library/cpp/http/fetch/libcpp-http-fetch.a contrib/libs/brotli/c/common/libbrotli-c-common.a contrib/libs/brotli/c/dec/libbrotli-c-dec.a contrib/libs/brotli/c/enc/libbrotli-c-enc.a contrib/libs/libbz2/libcontrib-libs-libbz2.a contrib/libs/fastlz/libcontrib-libs-fastlz.a contrib/libs/lzmasdk/libcontrib-libs-lzmasdk.a contrib/libs/snappy/libcontrib-libs-snappy.a library/cpp/streams/brotli/libcpp-streams-brotli.a library/cpp/streams/bzip2/libcpp-streams-bzip2.a library/cpp/streams/lzma/libcpp-streams-lzma.a library/cpp/http/io/libcpp-http-io.a library/cpp/threading/equeue/libcpp-threading-equeue.a library/cpp/http/server/libcpp-http-server.a library/cpp/monlib/service/libcpp-monlib-service.a library/cpp/monlib/encode/text/libmonlib-encode-text.a library/cpp/monlib/service/pages/libmonlib-service-pages.a library/cpp/threading/light_rw_lock/libcpp-threading-light_rw_lock.a library/cpp/monlib/dynamic_counters/libcpp-monlib-dynamic_counters.a library/cpp/time_provider/liblibrary-cpp-time_provider.a ydb/library/actors/core/harmonizer/libactors-core-harmonizer.a library/cpp/threading/queue/libcpp-threading-queue.a contrib/libs/linuxvdso/original/liblibs-linuxvdso-original.a contrib/libs/linuxvdso/libcontrib-libs-linuxvdso.a ydb/library/actors/memory_log/liblibrary-actors-memory_log.a library/cpp/execprofile/liblibrary-cpp-execprofile.a library/cpp/threading/future/libcpp-threading-future.a ydb/library/actors/core/liblibrary-actors-core.a ydb/library/actors/dnscachelib/liblibrary-actors-dnscachelib.a contrib/libs/crcutil/libcontrib-libs-crcutil.a library/cpp/digest/crc32c/libcpp-digest-crc32c.a library/cpp/lwtrace/mon/analytics/liblwtrace-mon-analytics.a library/cpp/lwtrace/mon/libcpp-lwtrace-mon.a library/cpp/monlib/encode/legacy_protobuf/protos/libencode-legacy_protobuf-protos.a library/cpp/messagebus/monitoring/libcpp-messagebus-monitoring.a library/cpp/streams/zc_memory_input/libcpp-streams-zc_memory_input.a library/cpp/packedtypes/liblibrary-cpp-packedtypes.a library/cpp/sliding_window/liblibrary-cpp-sliding_window.a ydb/library/actors/helpers/liblibrary-actors-helpers.a ydb/library/actors/dnsresolver/liblibrary-actors-dnsresolver.a contrib/libs/opentelemetry-proto/libcontrib-libs-opentelemetry-proto.a ydb/library/actors/wilson/liblibrary-actors-wilson.a ydb/library/actors/interconnect/liblibrary-actors-interconnect.a ydb/library/aclib/protos/liblibrary-aclib-protos.a ydb/library/aclib/libydb-library-aclib.a library/cpp/sse/liblibrary-cpp-sse.a library/cpp/dot_product/liblibrary-cpp-dot_product.a library/cpp/l2_distance/liblibrary-cpp-l2_distance.a library/cpp/random_provider/liblibrary-cpp-random_provider.a ydb/core/config/protos/libcore-config-protos.a ydb/library/folder_service/proto/liblibrary-folder_service-proto.a ydb/public/api/protos/annotations/libapi-protos-annotations.a ydb/public/api/protos/libapi-protos.a yql/essentials/public/issue/protos/libpublic-issue-protos.a yql/essentials/core/issue/protos/libcore-issue-protos.a ydb/library/yql/dq/proto/libyql-dq-proto.a yql/essentials/public/types/libessentials-public-types.a ydb/library/yql/dq/actors/protos/libdq-actors-protos.a yql/essentials/protos/libyql-essentials-protos.a yql/essentials/providers/common/proto/libproviders-common-proto.a ydb/core/fq/libs/config/protos/liblibs-config-protos.a ydb/core/protos/nbs/libcore-protos-nbs.a ydb/core/protos/schemeshard/libcore-protos-schemeshard.a ydb/core/scheme/protos/libcore-scheme-protos.a ydb/core/tx/columnshard/common/protos/libcolumnshard-common-protos.a ydb/library/formats/arrow/protos/liblibrary-formats-arrow-protos.a ydb/core/tx/columnshard/engines/protos/libcolumnshard-engines-protos.a ydb/core/tx/columnshard/engines/scheme/defaults/protos/libscheme-defaults-protos.a ydb/library/login/protos/liblibrary-login-protos.a ydb/library/mkql_proto/protos/liblibrary-mkql_proto-protos.a ydb/library/ydb_issue/proto/liblibrary-ydb_issue-proto.a yql/essentials/core/file_storage/proto/libcore-file_storage-proto.a ydb/core/protos/libydb-core-protos.a ydb/core/audit/audit_config/libcore-audit-audit_config.a ydb/core/base/generated/libcore-base-generated.a ydb/core/control/lib/base/libcontrol-lib-base.a library/cpp/threading/hot_swap/libcpp-threading-hot_swap.a ydb/core/control/lib/generated/libcontrol-lib-generated.a ydb/core/control/lib/libcore-control-lib.a ydb/core/debug/libydb-core-debug.a library/cpp/digest/old_crc/libcpp-digest-old_crc.a ydb/core/erasure/libydb-core-erasure.a ydb/core/graph/protos/libcore-graph-protos.a ydb/core/jaeger_tracing/libydb-core-jaeger_tracing.a ydb/core/protos/out/libcore-protos-out.a library/cpp/threading/poor_man_openmp/libcpp-threading-poor_man_openmp.a library/cpp/digest/argonish/internal/proxies/avx2/libinternal-proxies-avx2.a library/cpp/digest/argonish/internal/proxies/ref/libinternal-proxies-ref.a library/cpp/digest/argonish/internal/proxies/sse2/libinternal-proxies-sse2.a library/cpp/digest/argonish/internal/proxies/sse41/libinternal-proxies-sse41.a library/cpp/digest/argonish/internal/proxies/ssse3/libinternal-proxies-ssse3.a library/cpp/digest/argonish/libcpp-digest-argonish.a ydb/library/login/password_checker/liblibrary-login-password_checker.a ydb/library/login/account_lockout/liblibrary-login-account_lockout.a ydb/library/login/cache/liblibrary-login-cache.a ydb/library/login/libydb-library-login.a contrib/libs/libaio/static/liblibs-libaio-static.a contrib/libs/liburing/libcontrib-libs-liburing.a ydb/library/pdisk_io/protos/liblibrary-pdisk_io-protos.a ydb/library/pdisk_io/libydb-library-pdisk_io.a ydb/library/pretty_types_print/protobuf/liblibrary-pretty_types_print-protobuf.a yql/essentials/utils/libyql-essentials-utils.a yql/essentials/public/issue/libessentials-public-issue.a ydb/library/yql/public/ydb_issue/libyql-public-ydb_issue.a ydb/library/ydb_issue/libydb-library-ydb_issue.a ydb/public/api/protos/out/libapi-protos-out.a contrib/libs/apache/orc-format/liblibs-apache-orc-format.a contrib/libs/apache/orc/liblibs-apache-orc.a contrib/libs/utf8proc/libcontrib-libs-utf8proc.a contrib/libs/libevent/event_core/liblibs-libevent-event_core.a contrib/libs/libevent/event_extra/liblibs-libevent-event_extra.a contrib/libs/libevent/event_openssl/liblibs-libevent-event_openssl.a contrib/libs/libevent/event_thread/liblibs-libevent-event_thread.a contrib/restricted/boost/container/librestricted-boost-container.a contrib/libs/icu/libcontrib-libs-icu.a contrib/restricted/boost/atomic/librestricted-boost-atomic.a contrib/restricted/boost/chrono/librestricted-boost-chrono.a contrib/restricted/boost/exception/librestricted-boost-exception.a contrib/restricted/boost/regex/librestricted-boost-regex.a contrib/restricted/boost/thread/librestricted-boost-thread.a contrib/restricted/boost/locale/librestricted-boost-locale.a contrib/restricted/boost/random/librestricted-boost-random.a contrib/restricted/thrift/libcontrib-restricted-thrift.a contrib/restricted/uriparser/libcontrib-restricted-uriparser.a contrib/libs/flatbuffers/libcontrib-libs-flatbuffers.a contrib/libs/apache/arrow/liblibs-apache-arrow.a contrib/libs/cctz/libcontrib-libs-cctz.a library/cpp/enumbitset/liblibrary-cpp-enumbitset.a library/cpp/type_info/tz/libcpp-type_info-tz.a library/cpp/yt/assert/libcpp-yt-assert.a library/cpp/yt/exception/libcpp-yt-exception.a library/cpp/yt/misc/libcpp-yt-misc.a library/cpp/yt/malloc/libcpp-yt-malloc.a library/cpp/yt/string/libcpp-yt-string.a library/cpp/yt/system/libcpp-yt-system.a library/cpp/yt/memory/libcpp-yt-memory.a library/cpp/yt/yson_string/libcpp-yt-yson_string.a library/cpp/yt/yson/libcpp-yt-yson.a library/cpp/yson/liblibrary-cpp-yson.a yql/essentials/core/pg_settings/libessentials-core-pg_settings.a yql/essentials/core/issue/libessentials-core-issue.a yql/essentials/core/sql_types/libessentials-core-sql_types.a library/cpp/yson_pull/libyson_pull.a yql/essentials/public/decimal/libessentials-public-decimal.a yql/essentials/public/udf/libessentials-public-udf.a yql/essentials/minikql/dom/libessentials-minikql-dom.a yql/essentials/parser/pg_catalog/proto/libparser-pg_catalog-proto.a library/cpp/logger/global/libcpp-logger-global.a yql/essentials/utils/log/proto/libutils-log-proto.a contrib/libs/backtrace/libcontrib-libs-backtrace.a yql/essentials/utils/backtrace/libessentials-utils-backtrace.a yql/essentials/utils/log/libessentials-utils-log.a yql/essentials/parser/pg_catalog/libessentials-parser-pg_catalog.a library/cpp/containers/sorted_vector/libcpp-containers-sorted_vector.a util/draft/libutil-draft.a library/cpp/string_utils/levenshtein_diff/libcpp-string_utils-levenshtein_diff.a library/cpp/yson/json/libcpp-yson-json.a library/cpp/yson/node/libcpp-yson-node.a library/cpp/openssl/holders/libcpp-openssl-holders.a library/cpp/openssl/method/libcpp-openssl-method.a library/cpp/openssl/io/libcpp-openssl-io.a library/cpp/retry/protos/libcpp-retry-protos.a library/cpp/retry/liblibrary-cpp-retry.a yql/essentials/utils/fetch/libessentials-utils-fetch.a yql/essentials/core/credentials/libessentials-core-credentials.a yql/essentials/core/url_preprocessing/interface/libcore-url_preprocessing-interface.a yql/essentials/core/url_lister/interface/libcore-url_lister-interface.a yql/essentials/ast/libyql-essentials-ast.a yql/essentials/public/udf/arrow/libpublic-udf-arrow.a yql/essentials/core/cbo/libessentials-core-cbo.a library/cpp/disjoint_sets/liblibrary-cpp-disjoint_sets.a yql/essentials/parser/pg_wrapper/interface/libparser-pg_wrapper-interface.a yql/essentials/public/langver/libessentials-public-langver.a contrib/libs/simdjson/libcontrib-libs-simdjson.a yql/essentials/types/binary_json/libessentials-types-binary_json.a yql/essentials/types/dynumber/libessentials-types-dynumber.a yql/essentials/types/uuid/libessentials-types-uuid.a yql/essentials/minikql/libyql-essentials-minikql.a library/cpp/dwarf_backtrace/liblibrary-cpp-dwarf_backtrace.a ydb/core/base/libydb-core-base.a library/cpp/getopt/small/libcpp-getopt-small.a ydb/library/global_plugins/libydb-library-global_plugins.a ydb/core/viewer/protos/libcore-viewer-protos.a ydb/core/viewer/json/libcore-viewer-json.a ydb/core/driver_lib/version/libversion.a library/cpp/string_utils/url/libcpp-string_utils-url.a library/cpp/cache/liblibrary-cpp-cache.a library/cpp/threading/cancellation/libcpp-threading-cancellation.a library/cpp/http/simple/libcpp-http-simple.a contrib/libs/googleapis-common-protos/libcontrib-libs-googleapis-common-protos.a ydb/public/api/client/yc_public/common/libclient-yc_public-common.a ydb/public/api/client/yc_public/iam/libclient-yc_public-iam.a ydb/public/sdk/cpp/src/library/jwt/libsrc-library-jwt.a ydb/public/api/grpc/libapi-grpc.a ydb/public/sdk/cpp/src/client/resources/libsrc-client-resources.a ydb/public/sdk/cpp/src/library/grpc/client/libsdk-library-grpc-client-v3.a ydb/public/sdk/cpp/src/library/string_utils/helpers/liblibrary-string_utils-helpers.a ydb/public/sdk/cpp/src/library/issue/libsrc-library-issue.a ydb/public/sdk/cpp/src/client/impl/internal/plain_status/libimpl-internal-plain_status.a ydb/public/sdk/cpp/src/client/types/libsrc-client-types.a ydb/public/sdk/cpp/src/client/types/exceptions/libclient-types-exceptions.a ydb/public/sdk/cpp/src/client/types/fatal_error_handlers/libclient-types-fatal_error_handlers.a ydb/public/sdk/cpp/src/client/types/status/libclient-types-status.a ydb/public/sdk/cpp/src/client/types/credentials/libclient-types-credentials.a ydb/public/sdk/cpp/src/client/iam/libsrc-client-iam.a ydb/library/security/libydb-library-security.a ydb/library/protobuf_printer/libydb-library-protobuf_printer.a ydb/library/grpc/server/liblibrary-grpc-server.a ydb/public/sdk/cpp/src/client/impl/internal/value_helpers/libimpl-internal-value_helpers.a ydb/public/sdk/cpp/src/library/decimal/libsrc-library-decimal.a ydb/public/sdk/cpp/src/library/uuid/libsrc-library-uuid.a ydb/public/sdk/cpp/src/client/value/libsrc-client-value.a ydb/public/sdk/cpp/src/client/params/libsrc-client-params.a ydb/core/graph/service/libcore-graph-service.a ydb/core/sys_view/service/libcore-sys_view-service.a ydb/core/grpc_services/counters/libcore-grpc_services-counters.a ydb/core/grpc_streaming/libydb-core-grpc_streaming.a library/cpp/dns/liblibrary-cpp-dns.a ydb/library/actors/http/liblibrary-actors-http.a ydb/core/audit/libydb-core-audit.a ydb/core/mon/audit/libcore-mon-audit.a ydb/public/sdk/cpp/adapters/issue/libcpp-adapters-issue.a ydb/core/mon/libydb-core-mon.a ydb/core/blobstorage/base/libcore-blobstorage-base.a contrib/libs/t1ha/libcontrib-libs-t1ha.a ydb/core/blobstorage/crypto/chacha_512/libblobstorage-crypto-chacha_512.a ydb/core/blobstorage/crypto/libcore-blobstorage-crypto.a ydb/core/blobstorage/vdisk/ingress/libblobstorage-vdisk-ingress.a ydb/core/blobstorage/groupinfo/libcore-blobstorage-groupinfo.a ydb/library/actors/interconnect/mock/libactors-interconnect-mock.a library/cpp/html/escape/libcpp-html-escape.a library/cpp/int128/liblibrary-cpp-int128.a library/cpp/ipv6_address/liblibrary-cpp-ipv6_address.a library/cpp/ipmath/liblibrary-cpp-ipmath.a contrib/libs/libssh2/libcontrib-libs-libssh2.a contrib/libs/nghttp2/libcontrib-libs-nghttp2.a contrib/libs/nghttp3/libcontrib-libs-nghttp3.a contrib/libs/ngtcp2/libcontrib-libs-ngtcp2.a contrib/libs/ngtcp2/crypto/quictls/libngtcp2-crypto-quictls.a contrib/libs/curl/libcontrib-libs-curl.a contrib/restricted/aws/aws-c-common/librestricted-aws-aws-c-common.a contrib/restricted/aws/aws-c-cal/librestricted-aws-aws-c-cal.a contrib/restricted/aws/aws-c-compression/librestricted-aws-aws-c-compression.a contrib/restricted/aws/s2n/librestricted-aws-s2n.a contrib/restricted/aws/aws-c-io/librestricted-aws-aws-c-io.a contrib/restricted/aws/aws-c-http/librestricted-aws-aws-c-http.a contrib/restricted/aws/aws-c-sdkutils/librestricted-aws-aws-c-sdkutils.a contrib/restricted/aws/aws-c-auth/librestricted-aws-aws-c-auth.a contrib/restricted/aws/aws-checksums/librestricted-aws-aws-checksums.a contrib/restricted/aws/aws-c-event-stream/librestricted-aws-aws-c-event-stream.a contrib/restricted/aws/aws-c-mqtt/librestricted-aws-aws-c-mqtt.a contrib/restricted/aws/aws-c-s3/librestricted-aws-aws-c-s3.a contrib/restricted/aws/aws-crt-cpp/librestricted-aws-aws-crt-cpp.a contrib/libs/aws-sdk-cpp/aws-cpp-sdk-core/liblibs-aws-sdk-cpp-aws-cpp-sdk-core.a ydb/core/util/libydb-core-util.a ydb/core/actorlib_impl/libydb-core-actorlib_impl.a ydb/core/blobstorage/lwtrace_probes/libcore-blobstorage-lwtrace_probes.a ydb/library/schlab/probes/liblibrary-schlab-probes.a ydb/library/schlab/schine/liblibrary-schlab-schine.a ydb/library/schlab/libydb-library-schlab.a ydb/library/schlab/protos/liblibrary-schlab-protos.a ydb/library/schlab/schoot/liblibrary-schlab-schoot.a ydb/library/schlab/schemu/liblibrary-schlab-schemu.a ydb/library/schlab/mon/liblibrary-schlab-mon.a ydb/core/blobstorage/pdisk/libcore-blobstorage-pdisk.a ydb/core/blobstorage/vdisk/protos/libblobstorage-vdisk-protos.a ydb/core/blobstorage/vdisk/hulldb/base/libvdisk-hulldb-base.a ydb/core/blobstorage/vdisk/common/libblobstorage-vdisk-common.a ydb/library/actors/async/liblibrary-actors-async.a contrib/libs/zstd06/libcontrib-libs-zstd06.a library/cpp/blockcodecs/liblibrary-cpp-blockcodecs.a ydb/core/graph/shard/protos/libgraph-shard-protos.a library/cpp/lfalloc/dbg_info/libcpp-lfalloc-dbg_info.a library/cpp/lfalloc/alloc_profiler/libcpp-lfalloc-alloc_profiler.a ydb/core/mon_alloc/libydb-core-mon_alloc.a library/cpp/containers/bitseq/libcpp-containers-bitseq.a ydb/public/lib/scheme_types/libpublic-lib-scheme_types.a ydb/core/scheme_types/libydb-core-scheme_types.a ydb/core/scheme/libydb-core-scheme.a ydb/core/tracing/libydb-core-tracing.a ydb/public/sdk/cpp/src/library/persqueue/topic_parser_public/libsdk-library-persqueue-topic_parser_public-v3.a ydb/library/persqueue/topic_parser/liblibrary-persqueue-topic_parser.a ydb/core/tablet/libydb-core-tablet.a ydb/core/tablet_flat/protos/libcore-tablet_flat-protos.a ydb/core/tablet_flat/libydb-core-tablet_flat.a contrib/libs/aws-sdk-cpp/aws-cpp-sdk-s3/liblibs-aws-sdk-cpp-aws-cpp-sdk-s3.a ydb/core/wrappers/events/libcore-wrappers-events.a ydb/core/wrappers/libydb-core-wrappers.a ydb/core/blob_depot/libydb-core-blob_depot.a ydb/core/blockstore/core/libcore-blockstore-core.a yql/essentials/minikql/arrow/libessentials-minikql-arrow.a yql/essentials/minikql/computation/libessentials-minikql-computation.a yql/essentials/core/file_storage/defs/libcore-file_storage-defs.a yql/essentials/core/file_storage/download/libcore-file_storage-download.a yql/essentials/core/file_storage/http_download/proto/libfile_storage-http_download-proto.a yql/essentials/core/file_storage/http_download/libcore-file_storage-http_download.a yql/essentials/core/file_storage/libessentials-core-file_storage.a yql/essentials/minikql/jsonpath/rewrapper/proto/libjsonpath-rewrapper-proto.a yql/essentials/minikql/jsonpath/rewrapper/libminikql-jsonpath-rewrapper.a yql/essentials/parser/common/libessentials-parser-common.a contrib/libs/antlr3_cpp_runtime/libcontrib-libs-antlr3_cpp_runtime.a yql/essentials/parser/proto_ast/antlr3/libparser-proto_ast-antlr3.a yql/essentials/parser/proto_ast/gen/jsonpath/libproto_ast-gen-jsonpath.a yql/essentials/minikql/jsonpath/parser/libminikql-jsonpath-parser.a yql/essentials/core/minsketch/libessentials-core-minsketch.a yql/essentials/core/histogram/libessentials-core-histogram.a library/cpp/deprecated/kmp/libcpp-deprecated-kmp.a library/cpp/deprecated/split/libcpp-deprecated-split.a yql/essentials/sql/settings/libessentials-sql-settings.a yql/essentials/parser/lexer_common/libessentials-parser-lexer_common.a yql/essentials/sql/libyql-essentials-sql.a yql/essentials/core/expr_nodes_gen/libessentials-core-expr_nodes_gen.a yql/essentials/core/expr_nodes/libessentials-core-expr_nodes.a yql/essentials/core/libyql-essentials-core.a yql/essentials/providers/common/schema/parser/libcommon-schema-parser.a yql/essentials/providers/common/schema/expr/libcommon-schema-expr.a yql/essentials/core/type_ann/libessentials-core-type_ann.a yql/essentials/providers/common/mkql/libproviders-common-mkql.a yql/essentials/public/result_format/libessentials-public-result_format.a yql/essentials/providers/common/codec/libproviders-common-codec.a ydb/library/mkql_proto/libydb-library-mkql_proto.a ydb/core/engine/libydb-core-engine.a ydb/core/docapi/libydb-core-docapi.a ydb/public/api/grpc/draft/libapi-grpc-draft.a ydb/public/sdk/cpp/src/library/operation_id/protos/liblibrary-operation_id-protos.a ydb/library/yql/dq/actors/libyql-dq-actors.a ydb/core/kqp/common/simple/libkqp-common-simple.a yql/essentials/providers/common/provider/libproviders-common-provider.a yql/essentials/providers/result/expr_nodes/libproviders-result-expr_nodes.a ydb/core/kqp/expr_nodes/libcore-kqp-expr_nodes.a ydb/library/formats/arrow/simple_builder/liblibrary-formats-arrow-simple_builder.a ydb/library/formats/arrow/transformer/liblibrary-formats-arrow-transformer.a ydb/library/conclusion/libydb-library-conclusion.a ydb/library/formats/arrow/splitter/liblibrary-formats-arrow-splitter.a ydb/library/formats/arrow/validation/liblibrary-formats-arrow-validation.a ydb/library/formats/arrow/switch/liblibrary-formats-arrow-switch.a ydb/library/formats/arrow/modifier/liblibrary-formats-arrow-modifier.a ydb/library/formats/arrow/scalar/liblibrary-formats-arrow-scalar.a ydb/library/formats/arrow/hash/liblibrary-formats-arrow-hash.a ydb/library/arrow_kernels/libydb-library-arrow_kernels.a yql/essentials/core/arrow_kernels/request/libcore-arrow_kernels-request.a contrib/restricted/cityhash-1.0.2/libcontrib-restricted-cityhash-1.0.2.a ydb/library/arrow_clickhouse/Common/liblibrary-arrow_clickhouse-Common.a ydb/library/arrow_clickhouse/Columns/liblibrary-arrow_clickhouse-Columns.a ydb/library/arrow_clickhouse/DataStreams/liblibrary-arrow_clickhouse-DataStreams.a ydb/library/arrow_clickhouse/libydb-library-arrow_clickhouse.a ydb/library/formats/arrow/liblibrary-formats-arrow.a ydb/library/yql/dq/common/libyql-dq-common.a ydb/library/yql/dq/type_ann/libyql-dq-type_ann.a yql/essentials/ast/serialize/libessentials-ast-serialize.a yql/essentials/providers/common/comp_nodes/libproviders-common-comp_nodes.a yql/essentials/providers/common/schema/mkql/libcommon-schema-mkql.a ydb/library/yql/dq/runtime/libyql-dq-runtime.a ydb/core/kqp/common/result_set_format/libkqp-common-result_set_format.a ydb/core/kqp/query_data/libcore-kqp-query_data.a yql/essentials/core/common_opt/libessentials-core-common_opt.a yql/essentials/core/peephole_opt/libessentials-core-peephole_opt.a yql/essentials/core/services/libessentials-core-services.a library/cpp/streams/zstd/libcpp-streams-zstd.a ydb/public/sdk/cpp/include/ydb-cpp-sdk/client/topic/libydb-cpp-sdk-client-topic.a ydb/public/sdk/cpp/src/client/impl/endpoints/libclient-impl-endpoints.a ydb/public/sdk/cpp/src/client/impl/internal/logger/libimpl-internal-logger.a ydb/public/sdk/cpp/src/client/impl/internal/db_driver_state/libimpl-internal-db_driver_state.a ydb/public/sdk/cpp/src/client/impl/internal/thread_pool/libimpl-internal-thread_pool.a ydb/public/sdk/cpp/src/client/impl/stats/libclient-impl-stats.a ydb/public/sdk/cpp/src/client/impl/internal/grpc_connections/libimpl-internal-grpc_connections.a ydb/public/sdk/cpp/src/client/common_client/impl/libclient-common_client-impl.a ydb/public/sdk/cpp/src/client/topic/common/libclient-topic-common.a ydb/public/sdk/cpp/src/library/persqueue/obfuscate/libsdk-library-persqueue-obfuscate-v3.a ydb/public/sdk/cpp/src/client/impl/internal/make_request/libimpl-internal-make_request.a ydb/public/sdk/cpp/src/client/impl/internal/common/libimpl-internal-common.a ydb/public/sdk/cpp/src/client/common_client/libsrc-client-common_client.a ydb/public/sdk/cpp/src/client/driver/libsrc-client-driver.a ydb/public/sdk/cpp/src/client/proto/libsrc-client-proto.a ydb/public/sdk/cpp/src/client/topic/impl/libclient-topic-impl.a ydb/public/sdk/cpp/src/client/impl/internal/retry/libimpl-internal-retry.a ydb/public/sdk/cpp/src/library/operation_id/libsrc-library-operation_id.a ydb/public/sdk/cpp/src/client/types/operation/libclient-types-operation.a ydb/public/sdk/cpp/src/client/impl/session/libclient-impl-session.a ydb/public/sdk/cpp/src/client/result/libsrc-client-result.a ydb/public/sdk/cpp/src/client/scheme/libsrc-client-scheme.a ydb/public/sdk/cpp/src/client/query/impl/libclient-query-impl.a ydb/public/sdk/cpp/src/client/query/libsrc-client-query.a ydb/public/sdk/cpp/src/client/table/query_stats/libclient-table-query_stats.a ydb/public/sdk/cpp/src/client/table/impl/libclient-table-impl.a ydb/public/sdk/cpp/src/client/table/libsrc-client-table.a ydb/public/sdk/cpp/src/client/topic/libsrc-client-topic.a ydb/services/metadata/optimization/libservices-metadata-optimization.a library/cpp/string_utils/parse_size/libcpp-string_utils-parse_size.a yql/essentials/providers/common/config/libproviders-common-config.a yql/essentials/providers/common/gateway/libproviders-common-gateway.a yql/essentials/providers/pg/expr_nodes/libproviders-pg-expr_nodes.a yql/essentials/providers/result/provider/libproviders-result-provider.a yql/essentials/parser/proto_ast/gen/v1_proto_split/libproto_ast-gen-v1_proto_split.a yql/essentials/sql/v1/lexer/libsql-v1-lexer.a yql/essentials/sql/v1/proto_parser/libsql-v1-proto_parser.a yql/essentials/parser/proto_ast/gen/v1/libproto_ast-gen-v1.a contrib/libs/antlr4_cpp_runtime/libcontrib-libs-antlr4_cpp_runtime.a yql/essentials/parser/common/antlr4/libparser-common-antlr4.a yql/essentials/parser/proto_ast/antlr4/libparser-proto_ast-antlr4.a yql/essentials/parser/proto_ast/gen/v1_antlr4/libproto_ast-gen-v1_antlr4.a yql/essentials/sql/v1/libessentials-sql-v1.a yql/essentials/sql/v1/lexer/antlr4/libv1-lexer-antlr4.a yql/essentials/parser/proto_ast/gen/v1_ansi_antlr4/libproto_ast-gen-v1_ansi_antlr4.a yql/essentials/sql/v1/lexer/antlr4_ansi/libv1-lexer-antlr4_ansi.a yql/essentials/sql/v1/proto_parser/antlr4/libv1-proto_parser-antlr4.a yql/essentials/sql/v1/proto_parser/antlr4_ansi/libv1-proto_parser-antlr4_ansi.a ydb/core/kqp/provider/libcore-kqp-provider.a ydb/core/client/minikql_compile/libcore-client-minikql_compile.a ydb/core/formats/libydb-core-formats.a ydb/core/engine/minikql/libcore-engine-minikql.a contrib/libs/pcre/libcontrib-libs-pcre.a contrib/libs/pcre/pcre16/liblibs-pcre-pcre16.a contrib/libs/pcre/pcre32/liblibs-pcre-pcre32.a library/cpp/regex/pcre/libcpp-regex-pcre.a library/cpp/scheme/liblibrary-cpp-scheme.a ydb/library/yql/providers/common/http_gateway/libproviders-common-http_gateway.a ydb/library/yql/providers/generic/connector/api/service/protos/libapi-service-protos.a ydb/library/yql/providers/s3/proto/libproviders-s3-proto.a yql/essentials/public/udf/support/libpublic-udf-support.a contrib/restricted/boost/program_options/librestricted-boost-program_options.a contrib/restricted/dragonbox/libdragonbox.a contrib/libs/expat/libcontrib-libs-expat.a contrib/libs/poco/Foundation/liblibs-poco-Foundation.a contrib/libs/poco/JSON/liblibs-poco-JSON.a contrib/libs/poco/XML/liblibs-poco-XML.a contrib/libs/poco/Util/liblibs-poco-Util.a contrib/libs/poco/Net/liblibs-poco-Net.a contrib/libs/poco/Crypto/liblibs-poco-Crypto.a contrib/libs/poco/NetSSL_OpenSSL/liblibs-poco-NetSSL_OpenSSL.a contrib/libs/fmt/libcontrib-libs-fmt.a contrib/restricted/boost/context/impl_common/libboost-context-impl_common.a contrib/restricted/boost/context/fcontext_impl/libboost-context-fcontext_impl.a contrib/restricted/boost/coroutine/librestricted-boost-coroutine.a contrib/libs/lzma/libcontrib-libs-lzma.a contrib/restricted/boost/iostreams/librestricted-boost-iostreams.a contrib/libs/apache/avro/liblibs-apache-avro.a ydb/library/yql/providers/s3/events/libproviders-s3-events.a ydb/library/yql/providers/s3/common/libproviders-s3-common.a library/cpp/threading/atomic/libcpp-threading-atomic.a yql/essentials/providers/common/structured_token/libproviders-common-structured_token.a ydb/library/yql/providers/common/token_accessor/grpc/libcommon-token_accessor-grpc.a ydb/public/sdk/cpp/src/client/types/credentials/login/libtypes-credentials-login.a ydb/library/yql/providers/common/token_accessor/client/libcommon-token_accessor-client.a ydb/library/yql/providers/s3/credentials/libproviders-s3-credentials.a ydb/core/external_sources/object_storage/libcore-external_sources-object_storage.a ydb/library/yql/providers/s3/compressors/libproviders-s3-compressors.a ydb/core/external_sources/object_storage/inference/libexternal_sources-object_storage-inference.a ydb/library/yql/providers/common/db_id_async_resolver/libproviders-common-db_id_async_resolver.a contrib/libs/libxml/libcontrib-libs-libxml.a library/cpp/xml/init/libcpp-xml-init.a library/cpp/string_utils/ztstrbuf/libcpp-string_utils-ztstrbuf.a library/cpp/xml/document/libcpp-xml-document.a ydb/library/yql/utils/actor_log/libyql-utils-actor_log.a yql/essentials/utils/threading/libessentials-utils-threading.a ydb/library/yql/providers/s3/object_listers/libproviders-s3-object_listers.a yql/essentials/minikql/datetime/libessentials-minikql-datetime.a ydb/library/yql/providers/s3/path_generator/libproviders-s3-path_generator.a ydb/core/external_sources/libydb-core-external_sources.a ydb/core/filestore/core/libcore-filestore-core.a ydb/core/metering/libydb-core-metering.a ydb/core/kesus/tablet/libcore-kesus-tablet.a ydb/core/keyvalue/protos/libcore-keyvalue-protos.a ydb/core/tx/libydb-core-tx.a library/cpp/deprecated/accessors/libcpp-deprecated-accessors.a library/cpp/bit_io/liblibrary-cpp-bit_io.a library/cpp/packers/liblibrary-cpp-packers.a library/cpp/containers/compact_vector/libcpp-containers-compact_vector.a library/cpp/on_disk/chunks/libcpp-on_disk-chunks.a library/cpp/containers/comptrie/libcpp-containers-comptrie.a library/cpp/codecs/greedy_dict/libcpp-codecs-greedy_dict.a library/cpp/compproto/liblibrary-cpp-compproto.a library/cpp/comptable/liblibrary-cpp-comptable.a library/cpp/codecs/liblibrary-cpp-codecs.a library/cpp/messagebus/actor/libmessagebus_actor.a library/cpp/messagebus/config/libcpp-messagebus-config.a library/cpp/messagebus/scheduler/libcpp-messagebus-scheduler.a library/cpp/string_utils/indent_text/libcpp-string_utils-indent_text.a library/cpp/messagebus/liblibrary-cpp-messagebus.a library/cpp/messagebus/protobuf/libmessagebus_protobuf.a ydb/public/lib/base/libpublic-lib-base.a ydb/core/keyvalue/libydb-core-keyvalue.a ydb/core/persqueue/public/counters/libpersqueue-public-counters.a ydb/core/persqueue/events/libcore-persqueue-events.a ydb/core/persqueue/public/libcore-persqueue-public.a ydb/core/persqueue/public/partition_index_generator/libpersqueue-public-partition_index_generator.a ydb/core/persqueue/public/partition_key_range/libpersqueue-public-partition_key_range.a ydb/core/grpc_services/cancelation/protos/libgrpc_services-cancelation-protos.a ydb/public/lib/deprecated/client/liblib-deprecated-client.a ydb/public/lib/value/libpublic-lib-value.a ydb/public/lib/deprecated/kicli/liblib-deprecated-kicli.a ydb/core/persqueue/writer/libcore-persqueue-writer.a ydb/core/resource_pools/libydb-core-resource_pools.a ydb/library/query_actor/libydb-library-query_actor.a ydb/core/sys_view/common/libcore-sys_view-common.a ydb/core/kqp/common/compilation/libkqp-common-compilation.a ydb/core/grpc_services/cancelation/libcore-grpc_services-cancelation.a ydb/core/kqp/common/shutdown/libkqp-common-shutdown.a ydb/core/kqp/common/events/libkqp-common-events.a ydb/core/tx/long_tx_service/public/libtx-long_tx_service-public.a ydb/core/formats/arrow/switch/libformats-arrow-switch.a ydb/library/accessor/libydb-library-accessor.a ydb/services/metadata/abstract/libservices-metadata-abstract.a ydb/core/formats/arrow/serializer/libformats-arrow-serializer.a ydb/core/formats/arrow/splitter/libformats-arrow-splitter.a ydb/core/formats/arrow/common/libformats-arrow-common.a ydb/core/formats/arrow/reader/libformats-arrow-reader.a ydb/core/formats/arrow/hash/libformats-arrow-hash.a ydb/core/formats/arrow/dictionary/libformats-arrow-dictionary.a ydb/core/tx/schemeshard/olap/common/libschemeshard-olap-common.a ydb/core/tx/schemeshard/olap/column_families/libschemeshard-olap-column_families.a ydb/core/tx/columnshard/engines/scheme/defaults/common/libscheme-defaults-common.a ydb/core/tx/schemeshard/olap/columns/libschemeshard-olap-columns.a ydb/services/bg_tasks/protos/libservices-bg_tasks-protos.a ydb/services/bg_tasks/abstract/libservices-bg_tasks-abstract.a ydb/core/formats/arrow/accessor/common/liblibrary-formats-arrow-accessor-common.a ydb/core/formats/arrow/accessor/abstract/libarrow-accessor-abstract.a ydb/core/formats/arrow/accessor/plain/libarrow-accessor-plain.a ydb/core/formats/arrow/transformer/libformats-arrow-transformer.a ydb/core/formats/arrow/save_load/libformats-arrow-save_load.a ydb/core/formats/arrow/accessor/composite_serial/libarrow-accessor-composite_serial.a ydb/core/formats/arrow/accessor/composite/liblibrary-formats-arrow-accessor-composite.a ydb/core/formats/arrow/accessor/dictionary/libarrow-accessor-dictionary.a ydb/core/formats/arrow/accessor/sparsed/libarrow-accessor-sparsed.a ydb/library/signals/libydb-library-signals.a ydb/core/formats/arrow/accessor/sub_columns/libarrow-accessor-sub_columns.a ydb/core/formats/arrow/rows/libformats-arrow-rows.a ydb/core/formats/arrow/libcore-formats-arrow.a yql/essentials/core/arrow_kernels/registry/libcore-arrow_kernels-registry.a contrib/libs/llvm16/lib/Demangle/libllvm16-lib-Demangle.a contrib/libs/llvm16/lib/Support/libllvm16-lib-Support.a contrib/libs/llvm16/lib/TargetParser/libllvm16-lib-TargetParser.a contrib/libs/llvm16/lib/BinaryFormat/libllvm16-lib-BinaryFormat.a contrib/libs/llvm16/lib/Bitstream/Reader/liblib-Bitstream-Reader.a contrib/libs/llvm16/lib/Remarks/libllvm16-lib-Remarks.a contrib/libs/llvm16/lib/IR/libllvm16-lib-IR.a contrib/libs/llvm16/lib/ExecutionEngine/Orc/Shared/libExecutionEngine-Orc-Shared.a contrib/libs/llvm16/lib/ExecutionEngine/Orc/TargetProcess/libExecutionEngine-Orc-TargetProcess.a contrib/libs/llvm16/lib/DebugInfo/CodeView/liblib-DebugInfo-CodeView.a contrib/libs/llvm16/lib/MC/libllvm16-lib-MC.a contrib/libs/llvm16/lib/Bitcode/Reader/liblib-Bitcode-Reader.a contrib/libs/llvm16/lib/AsmParser/libllvm16-lib-AsmParser.a contrib/libs/llvm16/lib/IRReader/libllvm16-lib-IRReader.a contrib/libs/llvm16/lib/MC/MCParser/liblib-MC-MCParser.a contrib/libs/llvm16/lib/TextAPI/libllvm16-lib-TextAPI.a contrib/libs/llvm16/lib/Object/libllvm16-lib-Object.a contrib/libs/llvm16/lib/ExecutionEngine/RuntimeDyld/liblib-ExecutionEngine-RuntimeDyld.a contrib/libs/llvm16/lib/DebugInfo/DWARF/liblib-DebugInfo-DWARF.a contrib/libs/llvm16/lib/DebugInfo/MSF/liblib-DebugInfo-MSF.a contrib/libs/llvm16/lib/DebugInfo/PDB/liblib-DebugInfo-PDB.a contrib/libs/llvm16/lib/DebugInfo/Symbolize/liblib-DebugInfo-Symbolize.a contrib/libs/llvm16/lib/ProfileData/libllvm16-lib-ProfileData.a contrib/libs/llvm16/lib/Analysis/libllvm16-lib-Analysis.a contrib/libs/llvm16/lib/Target/libllvm16-lib-Target.a contrib/libs/llvm16/lib/ExecutionEngine/libllvm16-lib-ExecutionEngine.a contrib/libs/llvm16/lib/ExecutionEngine/MCJIT/liblib-ExecutionEngine-MCJIT.a contrib/libs/llvm16/lib/Transforms/Utils/liblib-Transforms-Utils.a contrib/libs/llvm16/lib/Linker/libllvm16-lib-Linker.a contrib/libs/llvm16/lib/Bitcode/Writer/liblib-Bitcode-Writer.a contrib/libs/llvm16/lib/Transforms/ObjCARC/liblib-Transforms-ObjCARC.a contrib/libs/llvm16/lib/Transforms/AggressiveInstCombine/liblib-Transforms-AggressiveInstCombine.a contrib/libs/llvm16/lib/Transforms/InstCombine/liblib-Transforms-InstCombine.a contrib/libs/llvm16/lib/Transforms/Scalar/liblib-Transforms-Scalar.a contrib/libs/llvm16/lib/CodeGen/libllvm16-lib-CodeGen.a contrib/libs/llvm16/lib/IRPrinter/libllvm16-lib-IRPrinter.a contrib/libs/llvm16/lib/Frontend/OpenMP/liblib-Frontend-OpenMP.a contrib/libs/llvm16/lib/Transforms/Instrumentation/liblib-Transforms-Instrumentation.a contrib/libs/llvm16/lib/Transforms/Vectorize/liblib-Transforms-Vectorize.a contrib/libs/llvm16/lib/Transforms/IPO/liblib-Transforms-IPO.a contrib/libs/llvm16/lib/Transforms/Coroutines/liblib-Transforms-Coroutines.a contrib/libs/llvm16/lib/Passes/libllvm16-lib-Passes.a contrib/libs/llvm16/lib/CodeGen/AsmPrinter/liblib-CodeGen-AsmPrinter.a contrib/libs/llvm16/lib/CodeGen/SelectionDAG/liblib-CodeGen-SelectionDAG.a contrib/libs/llvm16/lib/CodeGen/GlobalISel/liblib-CodeGen-GlobalISel.a contrib/libs/llvm16/lib/MC/MCDisassembler/liblib-MC-MCDisassembler.a contrib/libs/llvm16/lib/Target/X86/TargetInfo/libTarget-X86-TargetInfo.a contrib/libs/llvm16/lib/Target/X86/MCTargetDesc/libTarget-X86-MCTargetDesc.a contrib/libs/llvm16/lib/Transforms/CFGuard/liblib-Transforms-CFGuard.a contrib/libs/llvm16/lib/Target/X86/liblib-Target-X86.a contrib/libs/llvm16/lib/Target/X86/AsmParser/libTarget-X86-AsmParser.a contrib/libs/llvm16/lib/Target/X86/Disassembler/libTarget-X86-Disassembler.a contrib/libs/llvm16/lib/ExecutionEngine/PerfJITEvents/liblib-ExecutionEngine-PerfJITEvents.a yql/essentials/minikql/codegen/llvm16/libminikql-codegen-llvm16.a yql/essentials/minikql/computation/llvm16/libminikql-computation-llvm16.a yql/essentials/minikql/invoke_builtins/llvm16/libminikql-invoke_builtins-llvm16.a yql/essentials/minikql/comp_nodes/llvm16/libminikql-comp_nodes-llvm16.a ydb/core/formats/arrow/program/libformats-arrow-program.a ydb/core/tx/columnshard/engines/scheme/indexes/abstract/libscheme-indexes-abstract.a ydb/core/tx/schemeshard/olap/indexes/libschemeshard-olap-indexes.a ydb/core/tx/schemeshard/olap/options/libschemeshard-olap-options.a ydb/core/tx/schemeshard/common/libtx-schemeshard-common.a ydb/core/tx/schemeshard/olap/schema/libschemeshard-olap-schema.a ydb/core/tx/columnshard/blobs_action/protos/libcolumnshard-blobs_action-protos.a ydb/core/tx/columnshard/data_sharing/protos/libcolumnshard-data_sharing-protos.a ydb/core/tx/columnshard/transactions/protos/libcolumnshard-transactions-protos.a ydb/core/tx/columnshard/export/protos/libcolumnshard-export-protos.a ydb/core/tx/columnshard/common/libtx-columnshard-common.a ydb/core/tx/sharding/libcore-tx-sharding.a yql/essentials/core/dq_integration/libessentials-core-dq_integration.a ydb/core/kqp/common/libcore-kqp-common.a ydb/core/kqp/common/buffer/libkqp-common-buffer.a ydb/core/ydb_convert/libydb-core-ydb_convert.a ydb/library/yql/dq/actors/spilling/libdq-actors-spilling.a ydb/core/kqp/runtime/libcore-kqp-runtime.a ydb/core/sys_view/partition_stats/libcore-sys_view-partition_stats.a library/cpp/json/yson/libcpp-json-yson.a ydb/core/backup/common/proto/libbackup-common-proto.a ydb/core/backup/common/libcore-backup-common.a ydb/core/change_exchange/libydb-core-change_exchange.a ydb/core/io_formats/cell_maker/libcore-io_formats-cell_maker.a ydb/core/io_formats/ydb_dump/libcore-io_formats-ydb_dump.a ydb/library/range_treap/libydb-library-range_treap.a ydb/core/tx/locks/libcore-tx-locks.a ydb/library/chunks_limiter/libydb-library-chunks_limiter.a ydb/library/yql/dq/actors/common/libdq-actors-common.a ydb/library/yql/dq/tasks/libyql-dq-tasks.a ydb/core/quoter/public/libcore-quoter-public.a ydb/library/yql/dq/actors/compute/libdq-actors-compute.a ydb/services/lib/sharding/libservices-lib-sharding.a ydb/core/tx/datashard/libcore-tx-datashard.a ydb/core/tx/columnshard/bg_tasks/protos/libcolumnshard-bg_tasks-protos.a ydb/core/tx/columnshard/bg_tasks/abstract/libcolumnshard-bg_tasks-abstract.a ydb/core/tx/columnshard/bg_tasks/events/libcolumnshard-bg_tasks-events.a ydb/core/tx/columnshard/bg_tasks/manager/libcolumnshard-bg_tasks-manager.a ydb/core/tx/columnshard/bg_tasks/session/libcolumnshard-bg_tasks-session.a ydb/core/tx/columnshard/bg_tasks/transactions/libcolumnshard-bg_tasks-transactions.a ydb/core/tx/schemeshard/olap/bg_tasks/adapter/libolap-bg_tasks-adapter.a ydb/core/tx/schemeshard/olap/bg_tasks/protos/libolap-bg_tasks-protos.a ydb/core/tx/schemeshard/olap/bg_tasks/events/libolap-bg_tasks-events.a ydb/core/tx/schemeshard/olap/bg_tasks/transactions/libolap-bg_tasks-transactions.a ydb/core/tx/schemeshard/olap/bg_tasks/tx_chain/libolap-bg_tasks-tx_chain.a library/cpp/containers/ring_buffer/libcpp-containers-ring_buffer.a library/cpp/openssl/big_integer/libcpp-openssl-big_integer.a library/cpp/openssl/crypto/libcpp-openssl-crypto.a ydb/core/blob_depot/agent/libcore-blob_depot-agent.a ydb/core/blobstorage/backpressure/libcore-blobstorage-backpressure.a ydb/core/blobstorage/common/libcore-blobstorage-common.a ydb/core/blobstorage/dsproxy/libcore-blobstorage-dsproxy.a ydb/core/blobstorage/bridge/proxy/libblobstorage-bridge-proxy.a ydb/core/blobstorage/bridge/syncer/libblobstorage-bridge-syncer.a ydb/core/cms/console/util/libcms-console-util.a ydb/core/config/validation/libcore-config-validation.a contrib/libs/libfyaml/libcontrib-libs-libfyaml.a ydb/library/fyamlcpp/libydb-library-fyamlcpp.a ydb/library/yaml_config/protos/libyaml-config-protos.a contrib/libs/yaml-cpp/libcontrib-libs-yaml-cpp.a ydb/library/yaml_config/public/liblibrary-yaml_config-public.a ydb/library/yaml_json/libydb-library-yaml_json.a ydb/library/yaml_config/libydb-library-yaml_config.a ydb/apps/version/libversion_definition.a ydb/core/blobstorage/pdisk/mock/libblobstorage-pdisk-mock.a ydb/core/blobstorage/vdisk/hulldb/fresh/libvdisk-hulldb-fresh.a ydb/core/blobstorage/vdisk/hulldb/generic/libvdisk-hulldb-generic.a ydb/core/blobstorage/vdisk/hulldb/barriers/libvdisk-hulldb-barriers.a ydb/core/blobstorage/vdisk/anubis_osiris/libblobstorage-vdisk-anubis_osiris.a ydb/core/blobstorage/vdisk/hulldb/compstrat/libvdisk-hulldb-compstrat.a ydb/core/blobstorage/vdisk/hulldb/bulksst_add/libvdisk-hulldb-bulksst_add.a ydb/core/blobstorage/vdisk/hulldb/recovery/libvdisk-hulldb-recovery.a ydb/core/blobstorage/vdisk/defrag/libblobstorage-vdisk-defrag.a ydb/core/blobstorage/vdisk/huge/libblobstorage-vdisk-huge.a ydb/core/blobstorage/vdisk/hulldb/cache_block/libvdisk-hulldb-cache_block.a ydb/core/blobstorage/vdisk/hullop/hullcompdelete/libvdisk-hullop-hullcompdelete.a library/cpp/containers/intrusive_avl_tree/libcpp-containers-intrusive_avl_tree.a ydb/core/blobstorage/vdisk/synclog/libblobstorage-vdisk-synclog.a ydb/core/blobstorage/vdisk/hullop/libblobstorage-vdisk-hullop.a ydb/core/blobstorage/vdisk/localrecovery/libblobstorage-vdisk-localrecovery.a ydb/core/blobstorage/vdisk/query/libblobstorage-vdisk-query.a ydb/core/blobstorage/vdisk/repl/libblobstorage-vdisk-repl.a ydb/core/blobstorage/vdisk/balance/libblobstorage-vdisk-balance.a ydb/core/blobstorage/vdisk/metadata/libblobstorage-vdisk-metadata.a ydb/core/blobstorage/vdisk/skeleton/libblobstorage-vdisk-skeleton.a ydb/core/blobstorage/vdisk/scrub/libblobstorage-vdisk-scrub.a ydb/core/blobstorage/vdisk/syncer/libblobstorage-vdisk-syncer.a ydb/core/blobstorage/vdisk/libcore-blobstorage-vdisk.a ydb/core/tx/scheme_cache/libcore-tx-scheme_cache.a ydb/core/tx/scheme_board/libcore-tx-scheme_board.a ydb/core/util/actorsys_test/libcore-util-actorsys_test.a ydb/core/blobstorage/nodewarden/libcore-blobstorage-nodewarden.a ydb/core/mind/hive/libcore-mind-hive.a ydb/core/tx/schemeshard/olap/operations/alter/abstract/liboperations-alter-abstract.a ydb/core/tx/schemeshard/olap/operations/alter/common/liboperations-alter-common.a ydb/core/tx/schemeshard/olap/operations/alter/in_store/config_shards/libalter-in_store-config_shards.a ydb/core/tx/schemeshard/olap/operations/alter/in_store/resharding/libalter-in_store-resharding.a ydb/core/tx/schemeshard/olap/operations/alter/in_store/schema/libalter-in_store-schema.a ydb/core/tx/columnshard/data_sharing/initiator/status/libdata_sharing-initiator-status.a ydb/core/tx/columnshard/data_sharing/initiator/controller/libdata_sharing-initiator-controller.a ydb/core/tx/schemeshard/olap/operations/alter/in_store/transfer/libalter-in_store-transfer.a ydb/core/tx/schemeshard/olap/operations/alter/in_store/common/libalter-in_store-common.a ydb/services/metadata/secret/accessor/libmetadata-secret-accessor.a ydb/core/tx/tiering/tier/libtx-tiering-tier.a ydb/core/tx/schemeshard/olap/ttl/libschemeshard-olap-ttl.a ydb/core/tx/schemeshard/olap/table/libschemeshard-olap-table.a ydb/core/tx/schemeshard/olap/layout/libschemeshard-olap-layout.a ydb/core/tx/schemeshard/olap/store/libschemeshard-olap-store.a ydb/core/tx/schemeshard/olap/operations/alter/in_store/liboperations-alter-in_store.a ydb/core/tx/schemeshard/olap/operations/alter/standalone/liboperations-alter-standalone.a ydb/core/tx/schemeshard/olap/operations/libschemeshard-olap-operations.a ydb/core/tx/schemeshard/olap/manager/libschemeshard-olap-manager.a ydb/core/tx/tx_allocator/libcore-tx-tx_allocator.a ydb/core/tx/tx_allocator_client/libcore-tx-tx_allocator_client.a ydb/public/lib/ydb_cli/dump/files/libydb_cli-dump-files.a library/cpp/string_utils/csv/libcpp-string_utils-csv.a ydb/public/lib/json_value/libpublic-lib-json_value.a ydb/public/lib/yson_value/libpublic-lib-yson_value.a ydb/public/sdk/cpp/src/client/coordination/libsrc-client-coordination.a ydb/public/sdk/cpp/src/client/draft/libsrc-client-draft.a ydb/public/sdk/cpp/src/client/types/credentials/oauth2_token_exchange/libtypes-credentials-oauth2_token_exchange.a ydb/library/arrow_parquet/libydb-library-arrow_parquet.a ydb/library/plan2svg/libydb-library-plan2svg.a ydb/public/lib/ydb_cli/common/ini_config/libini_config.a ydb/public/lib/ydb_cli/common/yql_parser/libydb_cli-common-yql_parser.a ydb/public/lib/ydb_cli/common/libcommon.a yql/essentials/sql/v1/format/libsql-v1-format.a ydb/public/lib/ydb_cli/dump/util/libydb_cli-dump-util.a ydb/core/tx/schemeshard/libcore-tx-schemeshard.a ydb/core/fq/libs/protos/libfq-libs-protos.a ydb/core/fq/libs/grpc/libfq-libs-grpc.a yql/essentials/providers/common/metrics/protos/libcommon-metrics-protos.a ydb/library/yql/providers/dq/api/protos/libdq-api-protos.a ydb/core/fq/libs/control_plane_storage/proto/liblibs-control_plane_storage-proto.a ydb/core/fq/libs/graph_params/proto/liblibs-graph_params-proto.a ydb/library/yql/providers/pq/proto/libproviders-pq-proto.a ydb/core/fq/libs/row_dispatcher/protos/liblibs-row_dispatcher-protos.a yql/essentials/core/extract_predicate/libessentials-core-extract_predicate.a yql/essentials/core/qplayer/storage/interface/libqplayer-storage-interface.a yql/essentials/core/qplayer/udf_resolver/libcore-qplayer-udf_resolver.a yql/essentials/core/qplayer/url_lister/libcore-qplayer-url_lister.a yql/essentials/core/langver/libessentials-core-langver.a yql/essentials/providers/common/udf_resolve/libproviders-common-udf_resolve.a yql/essentials/providers/common/arrow_resolve/libproviders-common-arrow_resolve.a yql/essentials/providers/common/gateways_utils/libproviders-common-gateways_utils.a yql/essentials/providers/common/activation/libproviders-common-activation.a yql/essentials/providers/config/libessentials-providers-config.a yql/essentials/core/facade/libessentials-core-facade.a library/cpp/threading/task_scheduler/libcpp-threading-task_scheduler.a yql/essentials/utils/failure_injector/libessentials-utils-failure_injector.a yql/essentials/core/dq_integration/transform/libcore-dq_integration-transform.a ydb/library/yql/dq/transform/libyql-dq-transform.a yql/essentials/providers/common/metrics/libproviders-common-metrics.a yql/essentials/providers/common/transform/libproviders-common-transform.a ydb/library/yql/providers/dq/api/grpc/libdq-api-grpc.a ydb/library/yql/providers/dq/common/libproviders-dq-common.a ydb/library/yql/providers/dq/config/libproviders-dq-config.a ydb/library/yql/dq/opt/libyql-dq-opt.a ydb/library/yql/providers/dq/opt/libproviders-dq-opt.a ydb/library/yql/providers/dq/mkql/libproviders-dq-mkql.a ydb/library/yql/providers/dq/planner/libproviders-dq-planner.a yql/essentials/core/user_data/libessentials-core-user_data.a yql/essentials/core/services/mounts/libcore-services-mounts.a ydb/library/yql/providers/solomon/proto/libproviders-solomon-proto.a ydb/library/yql/utils/actors/libyql-utils-actors.a ydb/library/yql/providers/dq/actors/events/libdq-actors-events.a ydb/library/yql/providers/dq/counters/libproviders-dq-counters.a ydb/library/yql/providers/dq/interface/libproviders-dq-interface.a ydb/library/yql/providers/dq/task_runner/libproviders-dq-task_runner.a ydb/library/yql/dq/actors/task_runner/libdq-actors-task_runner.a ydb/library/yql/providers/dq/runtime/libproviders-dq-runtime.a ydb/library/yql/providers/dq/task_runner_actor/libproviders-dq-task_runner_actor.a ydb/library/yql/providers/dq/worker_manager/interface/libdq-worker_manager-interface.a ydb/library/yql/providers/dq/worker_manager/libproviders-dq-worker_manager.a ydb/library/yql/providers/dq/actors/libproviders-dq-actors.a ydb/library/yql/providers/dq/provider/libproviders-dq-provider.a ydb/core/fq/libs/events/libfq-libs-events.a ydb/core/fq/libs/quota_manager/proto/liblibs-quota_manager-proto.a ydb/core/fq/libs/quota_manager/events/liblibs-quota_manager-events.a ydb/core/fq/libs/control_plane_storage/events/liblibs-control_plane_storage-events.a ydb/core/fq/libs/common/libfq-libs-common.a ydb/core/fq/libs/db_id_async_resolver_impl/libfq-libs-db_id_async_resolver_impl.a ydb/library/db_pool/protos/liblibrary-db_pool-protos.a ydb/library/logger/libydb-library-logger.a ydb/library/yql/providers/generic/connector/api/service/libconnector-api-service.a ydb/library/yql/providers/generic/connector/libcpp/libgeneric-connector-libcpp.a ydb/library/yql/providers/s3/actors_factory/libproviders-s3-actors_factory.a library/cpp/json/easy_parse/libcpp-json-easy_parse.a library/cpp/protobuf/interop/libcpp-protobuf-interop.a ydb/library/yql/providers/solomon/common/libproviders-solomon-common.a ydb/library/yql/providers/solomon/solomon_accessor/grpc/libsolomon-solomon_accessor-grpc.a ydb/library/yql/providers/solomon/solomon_accessor/client/libsolomon-solomon_accessor-client.a ydb/library/yql/providers/solomon/events/libproviders-solomon-events.a ydb/library/yql/providers/solomon/actors/libproviders-solomon-actors.a ydb/library/yql/providers/solomon/expr_nodes/libproviders-solomon-expr_nodes.a ydb/library/yql/providers/solomon/provider/libproviders-solomon-provider.a ydb/library/yql/providers/solomon/gateway/libproviders-solomon-gateway.a ydb/library/yql/providers/pq/cm_client/libproviders-pq-cm_client.a yql/essentials/providers/common/dq/libproviders-common-dq.a ydb/library/yql/providers/common/pushdown/libproviders-common-pushdown.a ydb/library/yql/providers/dq/provider/exec/libdq-provider-exec.a ydb/core/fq/libs/result_formatter/libfq-libs-result_formatter.a ydb/library/yql/providers/generic/expr_nodes/libproviders-generic-expr_nodes.a ydb/library/yql/providers/generic/proto/libproviders-generic-proto.a ydb/library/yql/utils/plan/libyql-utils-plan.a ydb/library/yql/providers/generic/provider/libproviders-generic-provider.a ydb/library/yql/providers/pq/common/libproviders-pq-common.a ydb/library/yql/providers/pq/expr_nodes/libproviders-pq-expr_nodes.a ydb/library/yql/providers/pq/provider/libproviders-pq-provider.a ydb/public/sdk/cpp/src/client/datastreams/libsrc-client-datastreams.a ydb/public/sdk/cpp/src/client/federated_topic/impl/libclient-federated_topic-impl.a ydb/public/sdk/cpp/src/client/federated_topic/libsrc-client-federated_topic.a ydb/library/yql/providers/pq/gateway/native/libpq-gateway-native.a library/cpp/type_info/liblibrary-cpp-type_info.a library/cpp/yt/logging/libcpp-yt-logging.a contrib/libs/farmhash/arch/sse41/libfarmhash-arch-sse41.a contrib/libs/farmhash/arch/sse42/libfarmhash-arch-sse42.a contrib/libs/farmhash/arch/sse42_aesni/libfarmhash-arch-sse42_aesni.a contrib/libs/farmhash/libcontrib-libs-farmhash.a contrib/deprecated/yajl/libcontrib-deprecated-yajl.a library/cpp/threading/skip_list/libcpp-threading-skip_list.a library/cpp/threading/thread_local/libcpp-threading-thread_local.a library/cpp/yt/cpu_clock/libcpp-yt-cpu_clock.a library/cpp/yt/threading/libcpp-yt-threading.a library/cpp/yt/global/libcpp-yt-global.a library/cpp/yt/error/libcpp-yt-error.a library/cpp/yt/logging/plain_text_formatter/libyt-logging-plain_text_formatter.a library/cpp/ytalloc/api/libcpp-ytalloc-api.a yt/yt/build/libyt-yt-build.a yt/yt/core/misc/isa_crc64/libisa-l_crc_yt_patch.a yt/yt_proto/yt/core/libyt_proto-yt-core.a library/cpp/containers/concurrent_hash/libcpp-containers-concurrent_hash.a library/cpp/yt/backtrace/libcpp-yt-backtrace.a yt/yt/library/profiling/libyt-library-profiling.a yt/yt/library/undumpable/libyt-library-undumpable.a yt/yt/library/ytprof/api/liblibrary-ytprof-api.a yt/yt/library/procfs/libyt-library-procfs.a yt/yt/library/signals/libyt-library-signals.a yt/yt/library/profiling/resource_tracker/liblibrary-profiling-resource_tracker.a yt/yt/library/tracing/libyt-library-tracing.a yt/yt/library/numeric/libyt-library-numeric.a library/cpp/yt/backtrace/cursors/libunwind/libbacktrace-cursors-libunwind.a yt/yt/core/libyt-yt-core.a yt/cpp/mapreduce/interface/logging/libmapreduce-interface-logging.a yt/yt_proto/yt/formats/libyt_proto-yt-formats.a yt/yt/library/tvm/libyt-library-tvm.a yt/cpp/mapreduce/interface/libcpp-mapreduce-interface.a yt/cpp/mapreduce/common/libcpp-mapreduce-common.a library/cpp/skiff/liblibrary-cpp-skiff.a yt/cpp/mapreduce/io/libcpp-mapreduce-io.a yql/essentials/providers/common/codec/arrow/libcommon-codec-arrow.a yt/yql/providers/yt/common/libproviders-yt-common.a yt/yql/providers/yt/lib/mkql_helpers/libyt-lib-mkql_helpers.a yql/essentials/providers/common/schema/skiff/libcommon-schema-skiff.a yt/yql/providers/yt/lib/skiff/libyt-lib-skiff.a yt/yt/library/decimal/libyt-library-decimal.a yt/yql/providers/yt/codec/libproviders-yt-codec.a yt/yql/providers/yt/expr_nodes/libproviders-yt-expr_nodes.a library/cpp/sighandler/liblibrary-cpp-sighandler.a library/cpp/threading/blocking_queue/libcpp-threading-blocking_queue.a library/cpp/threading/cron/libcpp-threading-cron.a contrib/deprecated/http-parser/libcontrib-deprecated-http-parser.a yt/yt/core/http/libyt-core-http.a yt/yt/core/https/libyt-core-https.a yt/cpp/mapreduce/http/libcpp-mapreduce-http.a yt/cpp/mapreduce/http_client/libcpp-mapreduce-http_client.a yt/yt/client/query_tracker_client/libyt-client-query_tracker_client.a yt/yt/library/auth/libyt-library-auth.a yt/yt/library/erasure/libyt-library-erasure.a library/cpp/tdigest/liblibrary-cpp-tdigest.a yt/yt/library/quantile_digest/libyt-library-quantile_digest.a yt/yt/library/re2/libyt-library-re2.a yt/yt/library/tz_types/libyt-library-tz_types.a yt/yt_proto/yt/client/libyt_proto-yt-client.a library/cpp/timezone_conversion/liblibrary-cpp-timezone_conversion.a library/cpp/cron_expression/liblibrary-cpp-cron_expression.a yt/yt/client/libyt-yt-client.a yt/cpp/mapreduce/rpc_client/libcpp-mapreduce-rpc_client.a yt/cpp/mapreduce/client/libcpp-mapreduce-client.a yql/providers/stat/expr_nodes/libproviders-stat-expr_nodes.a yt/yql/providers/yt/lib/expr_traits/libyt-lib-expr_traits.a yt/yql/providers/yt/lib/graph_reorder/libyt-lib-graph_reorder.a yt/yql/providers/yt/lib/hash/libyt-lib-hash.a yt/yql/providers/yt/lib/key_filter/libyt-lib-key_filter.a yt/yql/providers/yt/lib/res_pull/libyt-lib-res_pull.a yql/essentials/providers/common/schema/libproviders-common-schema.a yt/yql/providers/yt/lib/row_spec/libyt-lib-row_spec.a yt/yql/providers/yt/lib/schema/libyt-lib-schema.a yt/yql/providers/yt/lib/yson_helpers/libyt-lib-yson_helpers.a yt/yql/providers/yt/opt/libproviders-yt-opt.a yt/yql/providers/yt/gateway/qplayer/libyt-gateway-qplayer.a yt/yql/providers/yt/proto/libproviders-yt-proto.a yt/yql/providers/ytflow/expr_nodes/libproviders-ytflow-expr_nodes.a yt/yql/providers/ytflow/integration/interface/libytflow-integration-interface.a yt/yql/providers/ytflow/integration/proto/libytflow-integration-proto.a yt/yql/providers/yt/provider/libproviders-yt-provider.a yt/yql/providers/yt/lib/url_mapper/libyt-lib-url_mapper.a yt/yql/providers/yt/gateway/lib/libyt-gateway-lib.a yt/cpp/mapreduce/library/user_job_statistics/libmapreduce-library-user_job_statistics.a yt/yql/providers/yt/lib/infer_schema/libyt-lib-infer_schema.a yt/yql/providers/yt/lib/lambda_builder/libyt-lib-lambda_builder.a yt/yql/providers/yt/job/libproviders-yt-job.a yt/yql/providers/yt/lib/init_yt_api/libyt-lib-init_yt_api.a yt/yql/providers/yt/lib/log/libyt-lib-log.a yt/yql/providers/yt/lib/config_clusters/libyt-lib-config_clusters.a yt/yql/providers/yt/gateway/native/libyt-gateway-native.a yt/yql/providers/yt/lib/yt_download/libyt-lib-yt_download.a yt/yql/providers/yt/mkql_dq/libproviders-yt-mkql_dq.a ydb/core/kqp/federated_query/libcore-kqp-federated_query.a ydb/library/grpc/server/actors/libgrpc-server-actors.a contrib/restricted/googletest/googletest/librestricted-googletest-googletest.a contrib/restricted/googletest/googlemock/librestricted-googletest-googlemock.a library/cpp/testing/gtest_extensions/libcpp-testing-gtest_extensions.a library/cpp/testing/gmock_in_unittest/libcpp-testing-gmock_in_unittest.a library/cpp/messagebus/oldmodule/libcpp-messagebus-oldmodule.a library/cpp/monlib/deprecated/json/libmonlib-deprecated-json.a library/cpp/messagebus/www/libcpp-messagebus-www.a library/cpp/monlib/messagebus/libcpp-monlib-messagebus.a ydb/core/audit/heartbeat_actor/libcore-audit-heartbeat_actor.a ydb/core/backup/controller/libcore-backup-controller.a ydb/core/blobstorage/incrhuge/libcore-blobstorage-incrhuge.a ydb/core/blobstorage/other/libcore-blobstorage-other.a ydb/core/client/scheme_cache_lib/libcore-client-scheme_cache_lib.a ydb/core/client/metadata/libcore-client-metadata.a ydb/core/discovery/libydb-core-discovery.a ydb/core/fq/libs/control_plane_proxy/events/liblibs-control_plane_proxy-events.a ydb/core/health_check/libydb-core-health_check.a ydb/core/kqp/session_actor/libcore-kqp-session_actor.a ydb/core/sys_view/auth/libcore-sys_view-auth.a ydb/core/sys_view/compile_cache/libcore-sys_view-compile_cache.a ydb/core/sys_view/nodes/libcore-sys_view-nodes.a ydb/core/sys_view/pg_tables/libcore-sys_view-pg_tables.a ydb/core/sys_view/query_stats/libcore-sys_view-query_stats.a ydb/core/sys_view/resource_pool_classifiers/libcore-sys_view-resource_pool_classifiers.a ydb/core/sys_view/resource_pools/libcore-sys_view-resource_pools.a ydb/core/sys_view/sessions/libcore-sys_view-sessions.a ydb/core/tx/sequenceproxy/public/libtx-sequenceproxy-public.a ydb/core/tx/sequenceshard/public/libtx-sequenceshard-public.a ydb/core/tx/sequenceproxy/libcore-tx-sequenceproxy.a ydb/library/formats/arrow/csv/converter/libarrow-csv-converter.a ydb/core/io_formats/arrow/scheme/libio_formats-arrow-scheme.a ydb/core/tx/balance_coverage/libcore-tx-balance_coverage.a ydb/core/tx/tx_proxy/libcore-tx-tx_proxy.a ydb/core/sys_view/show_create/libcore-sys_view-show_create.a ydb/core/sys_view/storage/libcore-sys_view-storage.a ydb/core/sys_view/tablets/libcore-sys_view-tablets.a ydb/core/sys_view/libydb-core-sys_view.a ydb/core/tx/data_events/common/libtx-data_events-common.a ydb/core/tx/data_events/libcore-tx-data_events.a ydb/public/api/client/nc_private/libapi-client-nc_private.a ydb/public/api/client/nc_private/audit/v1/common/libaudit-v1-common.a ydb/public/api/client/nc_private/audit/libclient-nc_private-audit.a ydb/public/api/client/nc_private/common/v1/libnc_private-common-v1.a ydb/public/api/client/nc_private/iam/v1/libnc_private-iam-v1.a ydb/library/ncloud/impl/liblibrary-ncloud-impl.a ydb/public/api/client/yc_private/operation/libclient-yc_private-operation.a ydb/public/api/client/yc_private/iam/libclient-yc_private-iam.a ydb/public/api/client/yc_private/servicecontrol/libclient-yc_private-servicecontrol.a ydb/public/api/client/yc_private/accessservice/libclient-yc_private-accessservice.a ydb/public/api/client/yc_private/resourcemanager/libclient-yc_private-resourcemanager.a ydb/core/grpc_caching/libydb-core-grpc_caching.a ydb/library/ycloud/impl/liblibrary-ycloud-impl.a ydb/core/security/libydb-core-security.a contrib/libs/openldap/libraries/liblber/libopenldap-libraries-liblber.a contrib/libs/sasl/libcontrib-libs-sasl.a contrib/libs/openldap/libcontrib-libs-openldap.a ydb/core/security/ldap_auth_provider/libcore-security-ldap_auth_provider.a ydb/public/lib/fq/libpublic-lib-fq.a ydb/services/ext_index/common/libservices-ext_index-common.a ydb/core/grpc_services/libydb-core-grpc_services.a ydb/core/security/certificate_check/libcore-security-certificate_check.a ydb/core/persqueue/public/cluster_tracker/libpersqueue-public-cluster_tracker.a ydb/core/persqueue/public/fetcher/libpersqueue-public-fetcher.a ydb/core/persqueue/public/list_topics/libpersqueue-public-list_topics.a ydb/core/persqueue/common/libcore-persqueue-common.a ydb/core/persqueue/pqrb/libcore-persqueue-pqrb.a library/cpp/containers/disjoint_interval_tree/libcpp-containers-disjoint_interval_tree.a ydb/public/sdk/cpp/src/client/persqueue_public/impl/libclient-persqueue_public-impl.a ydb/public/sdk/cpp/src/client/persqueue_public/include/libclient-persqueue_public-include.a ydb/core/persqueue/pqtablet/common/libpersqueue-pqtablet-common.a ydb/core/persqueue/common/proxy/libpersqueue-common-proxy.a ydb/core/persqueue/pqtablet/blob/libpersqueue-pqtablet-blob.a ydb/core/persqueue/pqtablet/cache/libpersqueue-pqtablet-cache.a ydb/core/fq/libs/row_dispatcher/events/liblibs-row_dispatcher-events.a ydb/core/tx/replication/common/libtx-replication-common.a ydb/public/sdk/cpp/src/client/iam_private/libsrc-client-iam_private.a ydb/core/tx/replication/ydb_proxy/libtx-replication-ydb_proxy.a ydb/core/tx/replication/ydb_proxy/local_proxy/libreplication-ydb_proxy-local_proxy.a ydb/core/tx/replication/service/libtx-replication-service.a ydb/core/backup/impl/libcore-backup-impl.a ydb/core/persqueue/public/codecs/libpersqueue-public-codecs.a ydb/core/persqueue/public/write_meta/libpersqueue-public-write_meta.a ydb/core/persqueue/pqtablet/partition/mirrorer/libpqtablet-partition-mirrorer.a ydb/core/persqueue/pqtablet/partition/libpersqueue-pqtablet-partition.a ydb/core/persqueue/pqtablet/readproxy/libpersqueue-pqtablet-readproxy.a ydb/core/persqueue/pqtablet/libcore-persqueue-pqtablet.a ydb/core/persqueue/libydb-core-persqueue.a ydb/core/cms/console/validators/libcms-console-validators.a ydb/core/driver_lib/cli_config_base/libcore-driver_lib-cli_config_base.a ydb/public/lib/ydb_cli/commands/sdk_core_access/libydb_sdk_core_access.a ydb/public/lib/ydb_cli/commands/command_base/libydb_cli_command_base.a ydb/public/sdk/cpp/src/client/discovery/libsrc-client-discovery.a ydb/public/lib/ydb_cli/commands/ydb_discovery/libydb_cli_command_ydb_discovery.a ydb/core/driver_lib/cli_base/libcli_base.a ydb/public/sdk/cpp/src/client/config/libsrc-client-config.a ydb/core/config/init/libcore-config-init.a ydb/core/blobstorage/dsproxy/mock/libblobstorage-dsproxy-mock.a ydb/core/mind/bscontroller/libcore-mind-bscontroller.a ydb/core/kqp/counters/libcore-kqp-counters.a ydb/core/sys_view/processor/libcore-sys_view-processor.a ydb/core/test_tablet/libydb-core-test_tablet.a ydb/library/table_creator/libydb-library-table_creator.a ydb/services/metadata/request/libservices-metadata-request.a ydb/services/metadata/initializer/libservices-metadata-initializer.a ydb/services/metadata/manager/libservices-metadata-manager.a ydb/services/metadata/libydb-services-metadata.a ydb/core/tx/replication/controller/libtx-replication-controller.a ydb/core/mind/libydb-core-mind.a ydb/core/cms/console/libcore-cms-console.a ydb/core/mind/address_classification/libcore-mind-address_classification.a library/cpp/unified_agent_client/proto/libcpp-unified_agent_client-proto.a library/cpp/unified_agent_client/liblibrary-cpp-unified_agent_client.a ydb/core/ymq/proto/libcore-ymq-proto.a ydb/library/http_proxy/error/liblibrary-http_proxy-error.a ydb/library/http_proxy/authorization/liblibrary-http_proxy-authorization.a ydb/core/ymq/base/libcore-ymq-base.a ydb/core/ymq/queues/common/libymq-queues-common.a ydb/core/ymq/actor/cfg/libymq-actor-cfg.a ydb/public/api/client/yc_public/events/libclient-yc_public-events.a ydb/core/ymq/actor/cloud_events/proto/libactor-cloud_events-proto.a ydb/core/ymq/actor/cloud_events/libymq-actor-cloud_events.a ydb/core/ymq/queues/fifo/libymq-queues-fifo.a ydb/core/ymq/queues/std/libymq-queues-std.a ydb/core/ymq/actor/libcore-ymq-actor.a ydb/core/kqp/gateway/actors/libkqp-gateway-actors.a ydb/core/kqp/gateway/utils/libkqp-gateway-utils.a ydb/core/kqp/opt/logical/libkqp-opt-logical.a ydb/library/naming_conventions/libydb-library-naming_conventions.a ydb/core/kqp/opt/physical/effects/libopt-physical-effects.a ydb/core/kqp/opt/physical/libkqp-opt-physical.a ydb/core/kqp/opt/peephole/libkqp-opt-peephole.a ydb/library/yql/providers/s3/expr_nodes/libproviders-s3-expr_nodes.a ydb/library/yql/providers/s3/statistics/libproviders-s3-statistics.a ydb/core/kqp/opt/rbo/libkqp-opt-rbo.a ydb/core/kqp/opt/libcore-kqp-opt.a yql/essentials/parser/proto_ast/gen/v0_proto_split/libproto_ast-gen-v0_proto_split.a yql/essentials/parser/proto_ast/gen/v0/libproto_ast-gen-v0.a yql/essentials/sql/v0/lexer/libsql-v0-lexer.a yql/essentials/sql/v0/libessentials-sql-v0.a ydb/library/yql/providers/dq/helper/libproviders-dq-helper.a yql/essentials/providers/pg/provider/libproviders-pg-provider.a ydb/core/kqp/host/libcore-kqp-host.a ydb/core/kqp/compile_service/libcore-kqp-compile_service.a ydb/library/yql/providers/generic/actors/libproviders-generic-actors.a ydb/core/fq/libs/metrics/libfq-libs-metrics.a ydb/core/fq/libs/row_dispatcher/format_handler/common/librow_dispatcher-format_handler-common.a ydb/core/fq/libs/row_dispatcher/purecalc_no_pg_wrapper/liblibs-row_dispatcher-purecalc_no_pg_wrapper.a ydb/core/fq/libs/row_dispatcher/format_handler/filters/librow_dispatcher-format_handler-filters.a ydb/core/fq/libs/row_dispatcher/format_handler/parsers/librow_dispatcher-format_handler-parsers.a ydb/core/fq/libs/row_dispatcher/format_handler/liblibs-row_dispatcher-format_handler.a ydb/core/fq/libs/row_dispatcher/purecalc_compilation/liblibs-row_dispatcher-purecalc_compilation.a ydb/core/fq/libs/config/libfq-libs-config.a ydb/core/fq/libs/db_schema/libfq-libs-db_schema.a ydb/core/fq/libs/shared_resources/interface/liblibs-shared_resources-interface.a ydb/library/db_pool/libydb-library-db_pool.a ydb/public/sdk/cpp/src/client/extension_common/libsrc-client-extension_common.a ydb/public/sdk/cpp/src/client/extensions/solomon_stats/libclient-extensions-solomon_stats.a ydb/core/fq/libs/shared_resources/libfq-libs-shared_resources.a ydb/public/sdk/cpp/src/client/rate_limiter/libsrc-client-rate_limiter.a ydb/core/fq/libs/ydb/libfq-libs-ydb.a ydb/core/fq/libs/row_dispatcher/libfq-libs-row_dispatcher.a ydb/library/actors/log_backend/liblibrary-actors-log_backend.a ydb/library/yql/providers/pq/async_io/libproviders-pq-async_io.a ydb/library/yql/dq/comp_nodes/hash_join_utils/libdq-comp_nodes-hash_join_utils.a ydb/library/yql/dq/comp_nodes/llvm16/libdq-comp_nodes-llvm16.a ydb/core/kqp/compute_actor/libcore-kqp-compute_actor.a ydb/core/fq/libs/checkpointing_common/libfq-libs-checkpointing_common.a ydb/core/fq/libs/checkpoint_storage/proto/liblibs-checkpoint_storage-proto.a ydb/core/fq/libs/checkpoint_storage/events/liblibs-checkpoint_storage-events.a ydb/library/yql/providers/pq/task_meta/libproviders-pq-task_meta.a ydb/library/yql/dq/state/libyql-dq-state.a ydb/core/fq/libs/checkpointing/libfq-libs-checkpointing.a ydb/core/kqp/executer_actor/shards_resolver/libkqp-executer_actor-shards_resolver.a ydb/core/kqp/gateway/local_rpc/libkqp-gateway-local_rpc.a ydb/core/kqp/query_compiler/libcore-kqp-query_compiler.a ydb/core/kqp/rm_service/libcore-kqp-rm_service.a ydb/core/kqp/topics/libcore-kqp-topics.a ydb/core/kqp/executer_actor/libcore-kqp-executer_actor.a ydb/services/metadata/secret/libservices-metadata-secret.a ydb/core/kqp/gateway/behaviour/external_data_source/libgateway-behaviour-external_data_source.a ydb/core/kqp/gateway/behaviour/resource_pool/libgateway-behaviour-resource_pool.a ydb/core/kqp/workload_service/common/libkqp-workload_service-common.a ydb/core/kqp/workload_service/tables/libkqp-workload_service-tables.a ydb/core/kqp/workload_service/actors/libkqp-workload_service-actors.a ydb/core/kqp/gateway/behaviour/resource_pool_classifier/libgateway-behaviour-resource_pool_classifier.a ydb/core/kqp/gateway/behaviour/streaming_query/libgateway-behaviour-streaming_query.a ydb/core/tx/columnshard/blobs_action/counters/libcolumnshard-blobs_action-counters.a ydb/core/tx/columnshard/engines/changes/counters/libengines-changes-counters.a ydb/core/tx/columnshard/hooks/abstract/libcolumnshard-hooks-abstract.a ydb/core/tx/tiering/libcore-tx-tiering.a ydb/core/tx/tiering/abstract/libtx-tiering-abstract.a ydb/core/tx/columnshard/blobs_action/common/libcolumnshard-blobs_action-common.a ydb/core/tx/columnshard/blobs_action/events/libcolumnshard-blobs_action-events.a ydb/core/tx/columnshard/blobs_action/abstract/libcolumnshard-blobs_action-abstract.a ydb/core/tx/columnshard/blobs_action/transaction/libcolumnshard-blobs_action-transaction.a ydb/core/tx/columnshard/blobs_action/libtx-columnshard-blobs_action.a ydb/core/tx/columnshard/engines/changes/abstract/libengines-changes-abstract.a ydb/core/tx/columnshard/engines/storage/optimizer/abstract/libstorage-optimizer-abstract.a ydb/core/tx/columnshard/engines/scheme/abstract/libengines-scheme-abstract.a ydb/core/tx/columnshard/engines/scheme/common/libengines-scheme-common.a ydb/core/tx/columnshard/engines/scheme/versions/libengines-scheme-versions.a ydb/core/tx/columnshard/engines/scheme/tiering/libengines-scheme-tiering.a ydb/core/tx/columnshard/engines/scheme/column/libengines-scheme-column.a ydb/core/tx/columnshard/engines/scheme/libcolumnshard-engines-scheme.a ydb/core/tx/columnshard/splitter/abstract/libcolumnshard-splitter-abstract.a ydb/core/tx/columnshard/splitter/libtx-columnshard-splitter.a ydb/core/tx/columnshard/engines/portions/libcolumnshard-engines-portions.a ydb/core/tx/columnshard/data_accessor/abstract/libcolumnshard-data_accessor-abstract.a ydb/core/kqp/gateway/behaviour/tablestore/operations/libbehaviour-tablestore-operations.a ydb/core/kqp/gateway/behaviour/tablestore/libgateway-behaviour-tablestore.a ydb/core/kqp/gateway/behaviour/view/libgateway-behaviour-view.a ydb/core/statistics/database/libcore-statistics-database.a ydb/core/statistics/service/libcore-statistics-service.a ydb/core/kqp/gateway/libcore-kqp-gateway.a ydb/core/kqp/node_service/libcore-kqp-node_service.a ydb/core/kqp/proxy_service/proto/libkqp-proxy_service-proto.a ydb/core/kqp/proxy_service/script_executions_utils/libkqp-proxy_service-script_executions_utils.a ydb/core/kqp/run_script_actor/libcore-kqp-run_script_actor.a ydb/core/fq/libs/compute/common/liblibs-compute-common.a ydb/core/kqp/workload_service/libcore-kqp-workload_service.a ydb/core/kqp/proxy_service/libcore-kqp-proxy_service.a ydb/services/lib/actors/libservices-lib-actors.a ydb/services/persqueue_v1/actors/libservices-persqueue_v1-actors.a ydb/services/persqueue_v1/libydb-services-persqueue_v1.a ydb/core/client/server/libcore-client-server.a ydb/core/cms/libydb-core-cms.a ydb/core/control/libydb-core-control.a ydb/core/driver_lib/base_utils/libbase_utils.a ydb/core/driver_lib/cli_utils/libcli_utils.a ydb/core/fq/libs/compute/ydb/liblibs-compute-ydb.a ydb/core/fq/libs/quota_manager/libfq-libs-quota_manager.a ydb/core/fq/libs/rate_limiter/events/liblibs-rate_limiter-events.a ydb/core/fq/libs/control_plane_storage/internal/liblibs-control_plane_storage-internal.a ydb/core/fq/libs/control_plane_storage/libfq-libs-control_plane_storage.a ydb/core/fq/libs/private_client/libfq-libs-private_client.a ydb/core/fq/libs/rate_limiter/utils/liblibs-rate_limiter-utils.a ydb/core/fq/libs/hmac/libfq-libs-hmac.a ydb/core/fq/libs/signer/libfq-libs-signer.a ydb/library/yql/providers/common/arrow/interface/libcommon-arrow-interface.a ydb/library/yql/providers/common/arrow/libproviders-common-arrow.a ydb/library/yql/providers/generic/pushdown/libproviders-generic-pushdown.a ydb/library/yql/providers/s3/range_helpers/libproviders-s3-range_helpers.a ydb/library/yql/providers/s3/serializations/libproviders-s3-serializations.a ydb/library/yql/providers/s3/actors/libproviders-s3-actors.a ydb/library/yql/providers/s3/provider/libproviders-s3-provider.a ydb/public/sdk/cpp/src/client/export/libsrc-client-export.a ydb/public/sdk/cpp/src/client/import/libsrc-client-import.a ydb/public/sdk/cpp/src/client/ss_tasks/libsrc-client-ss_tasks.a ydb/public/sdk/cpp/src/client/operation/libsrc-client-operation.a ydb/core/fq/libs/actors/libfq-libs-actors.a ydb/core/fq/libs/audit/libfq-libs-audit.a ydb/core/fq/libs/checkpoint_storage/libfq-libs-checkpoint_storage.a ydb/core/fq/libs/audit/events/liblibs-audit-events.a ydb/library/folder_service/libydb-library-folder_service.a ydb/core/fq/libs/cloud_audit/libfq-libs-cloud_audit.a ydb/core/fq/libs/compute/ydb/synchronization_service/libcompute-ydb-synchronization_service.a ydb/core/fq/libs/compute/ydb/control_plane/libcompute-ydb-control_plane.a ydb/core/fq/libs/control_plane_config/events/liblibs-control_plane_config-events.a ydb/core/fq/libs/control_plane_config/libfq-libs-control_plane_config.a ydb/core/fq/libs/control_plane_proxy/actors/liblibs-control_plane_proxy-actors.a ydb/core/fq/libs/control_plane_proxy/libfq-libs-control_plane_proxy.a ydb/core/fq/libs/read_rule/libfq-libs-read_rule.a ydb/core/fq/libs/tasks_packer/libfq-libs-tasks_packer.a ydb/core/fq/libs/gateway/libfq-libs-gateway.a ydb/core/fq/libs/health/libfq-libs-health.a ydb/core/fq/libs/rate_limiter/control_plane_service/liblibs-rate_limiter-control_plane_service.a ydb/core/fq/libs/rate_limiter/quoter_service/liblibs-rate_limiter-quoter_service.a ydb/core/fq/libs/test_connection/events/liblibs-test_connection-events.a ydb/core/fq/libs/test_connection/libfq-libs-test_connection.a ydb/library/yql/dq/actors/input_transforms/libdq-actors-input_transforms.a ydb/core/fq/libs/init/libfq-libs-init.a ydb/core/fq/libs/logs/libfq-libs-logs.a ydb/core/graph/shard/libcore-graph-shard.a ydb/services/ydb/libydb-services-ydb.a ydb/services/datastreams/libydb-services-datastreams.a ydb/services/ymq/libydb-services-ymq.a ydb/core/http_proxy/libydb-core-http_proxy.a ydb/core/raw_socket/libydb-core-raw_socket.a ydb/core/kafka_proxy/libydb-core-kafka_proxy.a ydb/core/kesus/proxy/libcore-kesus-proxy.a ydb/core/kqp/finalize_script_service/libcore-kqp-finalize_script_service.a contrib/libs/hdr_histogram/libcontrib-libs-hdr_histogram.a library/cpp/histogram/hdr/libcpp-histogram-hdr.a ydb/core/tx/columnshard/data_sharing/common/context/libdata_sharing-common-context.a ydb/core/tx/columnshard/data_sharing/common/session/libdata_sharing-common-session.a ydb/core/tx/columnshard/tablet/libtx-columnshard-tablet.a ydb/core/tx/columnshard/data_sharing/destination/transactions/libdata_sharing-destination-transactions.a ydb/core/tx/columnshard/data_sharing/destination/session/libdata_sharing-destination-session.a ydb/core/tx/columnshard/data_sharing/destination/events/libdata_sharing-destination-events.a ydb/core/tx/columnshard/data_sharing/source/transactions/libdata_sharing-source-transactions.a ydb/core/tx/columnshard/data_sharing/source/session/libdata_sharing-source-session.a ydb/core/tx/columnshard/data_sharing/manager/libcolumnshard-data_sharing-manager.a ydb/core/tx/columnshard/blobs_action/bs/libcolumnshard-blobs_action-bs.a ydb/core/tx/columnshard/blobs_action/local/libcolumnshard-blobs_action-local.a ydb/core/tx/columnshard/blobs_action/tier/libcolumnshard-blobs_action-tier.a ydb/core/tx/columnshard/blobs_action/storages_manager/libcolumnshard-blobs_action-storages_manager.a ydb/core/tx/columnshard/blobs_reader/libtx-columnshard-blobs_reader.a ydb/core/tx/general_cache/service/libtx-general_cache-service.a ydb/core/tx/general_cache/source/libtx-general_cache-source.a ydb/core/tx/general_cache/usage/libtx-general_cache-usage.a ydb/core/tx/columnshard/resource_subscriber/libtx-columnshard-resource_subscriber.a ydb/core/tx/columnshard/column_fetching/libtx-columnshard-column_fetching.a ydb/core/tx/columnshard/counters/libtx-columnshard-counters.a ydb/core/tx/columnshard/data_accessor/local_db/libcolumnshard-data_accessor-local_db.a ydb/core/tx/columnshard/data_accessor/cache_policy/libcolumnshard-data_accessor-cache_policy.a ydb/core/tx/columnshard/data_accessor/libtx-columnshard-data_accessor.a ydb/core/tx/columnshard/data_accessor/in_mem/libcolumnshard-data_accessor-in_mem.a ydb/core/tx/columnshard/engines/storage/optimizer/lbuckets/planner/liboptimizer-lbuckets-planner.a ydb/core/tx/columnshard/engines/storage/optimizer/lcbuckets/planner/level/liblcbuckets-planner-level.a ydb/core/tx/columnshard/engines/storage/optimizer/lcbuckets/planner/selector/liblcbuckets-planner-selector.a ydb/core/tx/columnshard/engines/storage/optimizer/lcbuckets/constructor/selector/liblcbuckets-constructor-selector.a ydb/core/tx/columnshard/engines/storage/optimizer/lcbuckets/constructor/level/liblcbuckets-constructor-level.a ydb/core/tx/columnshard/engines/storage/actualizer/index/libstorage-actualizer-index.a ydb/core/tx/columnshard/engines/storage/actualizer/common/libstorage-actualizer-common.a ydb/core/tx/columnshard/engines/storage/actualizer/abstract/libstorage-actualizer-abstract.a ydb/core/tx/columnshard/engines/storage/actualizer/scheme/libstorage-actualizer-scheme.a ydb/core/tx/columnshard/engines/storage/actualizer/tiering/libstorage-actualizer-tiering.a ydb/core/tx/columnshard/engines/storage/actualizer/counters/libstorage-actualizer-counters.a ydb/core/tx/columnshard/engines/storage/chunks/libengines-storage-chunks.a ydb/core/tx/columnshard/engines/storage/indexes/portions/extractor/libindexes-portions-extractor.a ydb/core/tx/columnshard/engines/storage/indexes/portions/libstorage-indexes-portions.a ydb/core/tx/columnshard/engines/storage/indexes/bits_storage/libstorage-indexes-bits_storage.a ydb/core/tx/columnshard/engines/storage/indexes/skip_index/libstorage-indexes-skip_index.a ydb/core/tx/columnshard/engines/storage/indexes/categories_bloom/libstorage-indexes-categories_bloom.a ydb/core/tx/columnshard/engines/storage/indexes/bloom_ngramm/libstorage-indexes-bloom_ngramm.a ydb/core/tx/columnshard/engines/storage/granule/libengines-storage-granule.a ydb/core/tx/columnshard/data_locks/locks/libcolumnshard-data_locks-locks.a ydb/core/tx/columnshard/data_locks/manager/libcolumnshard-data_locks-manager.a ydb/core/tx/columnshard/data_sharing/source/events/libdata_sharing-source-events.a ydb/core/tx/columnshard/data_sharing/modification/tasks/libdata_sharing-modification-tasks.a ydb/core/tx/columnshard/data_sharing/modification/transactions/libdata_sharing-modification-transactions.a ydb/core/tx/columnshard/data_sharing/modification/events/libdata_sharing-modification-events.a ydb/core/tx/columnshard/engines/changes/compaction/common/libchanges-compaction-common.a ydb/core/tx/columnshard/engines/changes/compaction/abstract/libchanges-compaction-abstract.a ydb/core/tx/columnshard/engines/changes/compaction/plain/libchanges-compaction-plain.a ydb/core/tx/columnshard/engines/changes/compaction/sub_columns/libchanges-compaction-sub_columns.a ydb/core/tx/columnshard/engines/changes/compaction/libengines-changes-compaction.a ydb/core/tx/columnshard/engines/changes/actualization/construction/libchanges-actualization-construction.a ydb/core/tx/columnshard/engines/changes/actualization/controller/libchanges-actualization-controller.a ydb/core/tx/columnshard/engines/changes/libcolumnshard-engines-changes.a ydb/core/tx/columnshard/tx_reader/libtx-columnshard-tx_reader.a ydb/core/tx/columnshard/engines/loading/libcolumnshard-engines-loading.a ydb/core/tx/columnshard/engines/predicate/libcolumnshard-engines-predicate.a ydb/core/tx/conveyor/usage/libtx-conveyor-usage.a ydb/core/tx/program/libcore-tx-program.a ydb/core/tx/columnshard/engines/reader/abstract/libengines-reader-abstract.a ydb/core/tx/columnshard/engines/reader/actor/libengines-reader-actor.a ydb/core/tx/columnshard/engines/reader/common/libengines-reader-common.a ydb/core/util/evlog/libcore-util-evlog.a ydb/core/tx/columnshard/engines/reader/common_reader/iterator/libreader-common_reader-iterator.a ydb/core/tx/columnshard/engines/reader/common_reader/constructor/libreader-common_reader-constructor.a ydb/core/tx/columnshard/engines/reader/common_reader/common/libreader-common_reader-common.a ydb/core/tx/columnshard/engines/reader/plain_reader/constructor/libreader-plain_reader-constructor.a ydb/core/tx/limiter/grouped_memory/tracing/liblimiter-grouped_memory-tracing.a ydb/core/tx/limiter/grouped_memory/service/liblimiter-grouped_memory-service.a ydb/core/tx/limiter/grouped_memory/usage/liblimiter-grouped_memory-usage.a ydb/core/tx/columnshard/engines/reader/plain_reader/iterator/libreader-plain_reader-iterator.a ydb/core/tx/columnshard/engines/reader/simple_reader/constructor/libreader-simple_reader-constructor.a ydb/core/tx/columnshard/engines/reader/simple_reader/iterator/collections/libsimple_reader-iterator-collections.a ydb/core/tx/columnshard/engines/reader/simple_reader/iterator/sync_points/libsimple_reader-iterator-sync_points.a ydb/core/tx/columnshard/engines/reader/simple_reader/iterator/sys_view/abstract/libiterator-sys_view-abstract.a ydb/core/tx/columnshard/engines/reader/simple_reader/iterator/sys_view/portions/libiterator-sys_view-portions.a ydb/core/tx/columnshard/engines/reader/simple_reader/iterator/sys_view/chunks/libiterator-sys_view-chunks.a ydb/core/tx/columnshard/engines/reader/simple_reader/iterator/sys_view/granules/libiterator-sys_view-granules.a ydb/core/tx/columnshard/engines/reader/simple_reader/iterator/sys_view/optimizer/libiterator-sys_view-optimizer.a ydb/core/tx/columnshard/engines/reader/simple_reader/iterator/sys_view/schemas/libiterator-sys_view-schemas.a ydb/core/tx/columnshard/engines/reader/simple_reader/duplicates/libreader-simple_reader-duplicates.a ydb/core/tx/columnshard/engines/reader/simple_reader/iterator/libreader-simple_reader-iterator.a ydb/core/tx/columnshard/engines/reader/tracing/libengines-reader-tracing.a ydb/core/tx/columnshard/engines/reader/transaction/libengines-reader-transaction.a ydb/core/tx/columnshard/resources/libtx-columnshard-resources.a ydb/core/tx/columnshard/tracing/libtx-columnshard-tracing.a ydb/core/tx/columnshard/engines/libtx-columnshard-engines.a ydb/library/actors/testlib/common/libactors-testlib-common.a ydb/core/tx/columnshard/engines/writer/buffer/libengines-writer-buffer.a ydb/core/tx/columnshard/engines/writer/libcolumnshard-engines-writer.a ydb/core/tx/columnshard/export/session/selector/abstract/libsession-selector-abstract.a ydb/core/tx/columnshard/export/session/storage/abstract/libsession-storage-abstract.a ydb/core/tx/columnshard/export/session/libcolumnshard-export-session.a ydb/core/tx/columnshard/export/common/libcolumnshard-export-common.a ydb/core/tx/columnshard/export/events/libcolumnshard-export-events.a ydb/core/tx/columnshard/export/actor/libcolumnshard-export-actor.a ydb/core/tx/columnshard/loading/libtx-columnshard-loading.a ydb/core/tx/columnshard/normalizer/abstract/libcolumnshard-normalizer-abstract.a ydb/core/tx/columnshard/normalizer/portion/libcolumnshard-normalizer-portion.a ydb/core/tx/columnshard/overload_manager/libtx-columnshard-overload_manager.a ydb/core/tx/columnshard/data_reader/libtx-columnshard-data_reader.a ydb/core/tx/columnshard/transactions/locks/libcolumnshard-transactions-locks.a ydb/core/tx/columnshard/operations/batch_builder/libcolumnshard-operations-batch_builder.a ydb/core/tx/columnshard/operations/slice_builder/libcolumnshard-operations-slice_builder.a ydb/core/tx/columnshard/operations/common/libcolumnshard-operations-common.a ydb/core/tx/columnshard/operations/libtx-columnshard-operations.a ydb/core/tx/columnshard/subscriber/abstract/events/libsubscriber-abstract-events.a ydb/core/tx/columnshard/subscriber/abstract/subscriber/libsubscriber-abstract-subscriber.a ydb/core/tx/columnshard/subscriber/events/tables_erased/libsubscriber-events-tables_erased.a ydb/core/tx/columnshard/subscriber/events/tx_completed/libsubscriber-events-tx_completed.a ydb/core/tx/columnshard/transactions/operators/ev_write/libtransactions-operators-ev_write.a ydb/core/tx/columnshard/transactions/operators/libcolumnshard-transactions-operators.a ydb/core/tx/columnshard/transactions/transactions/libcolumnshard-transactions-transactions.a ydb/core/tx/columnshard/transactions/libtx-columnshard-transactions.a ydb/core/tx/conveyor_composite/tracing/libtx-conveyor_composite-tracing.a ydb/core/tx/conveyor_composite/usage/libtx-conveyor_composite-usage.a ydb/core/tx/conveyor_composite/service/libtx-conveyor_composite-service.a ydb/core/tx/priorities/usage/libtx-priorities-usage.a ydb/core/tx/priorities/service/libtx-priorities-service.a ydb/core/tx/time_cast/libcore-tx-time_cast.a ydb/core/tx/tracing/usage/libtx-tracing-usage.a ydb/core/tx/tracing/service/libtx-tracing-service.a ydb/core/tx/columnshard/libcore-tx-columnshard.a ydb/library/workload/abstract/liblibrary-workload-abstract.a ydb/library/workload/kv/liblibrary-workload-kv.a ydb/library/workload/stock/liblibrary-workload-stock.a ydb/services/kesus/libydb-services-kesus.a ydb/services/persqueue_cluster_discovery/cluster_ordering/libservices-persqueue_cluster_discovery-cluster_ordering.a ydb/services/persqueue_cluster_discovery/libydb-services-persqueue_cluster_discovery.a ydb/core/load_test/libydb-core-load_test.a ydb/core/pgproxy/protos/libcore-pgproxy-protos.a ydb/core/pgproxy/libydb-core-pgproxy.a ydb/core/local_pgwire/libydb-core-local_pgwire.a ydb/core/log_backend/libydb-core-log_backend.a ydb/core/memory_controller/libydb-core-memory_controller.a ydb/core/public_http/protos/libcore-public_http-protos.a ydb/core/public_http/libydb-core-public_http.a ydb/core/quoter/libydb-core-quoter.a ydb/core/security/token_manager/libcore-security-token_manager.a ydb/core/statistics/aggregator/libcore-statistics-aggregator.a ydb/core/transfer/libydb-core-transfer.a ydb/core/tx/conveyor/tracing/libtx-conveyor-tracing.a ydb/core/tx/conveyor/service/libtx-conveyor-service.a ydb/core/tx/coordinator/protos/libtx-coordinator-protos.a ydb/core/tx/coordinator/public/libtx-coordinator-public.a ydb/core/tx/coordinator/libcore-tx-coordinator.a ydb/core/tx/long_tx_service/libcore-tx-long_tx_service.a ydb/core/tx/mediator/libcore-tx-mediator.a ydb/core/tx/sequenceshard/libcore-tx-sequenceshard.a library/cpp/archive/liblibrary-cpp-archive.a ydb/core/viewer/yaml/libcore-viewer-yaml.a ydb/services/lib/auth/libservices-lib-auth.a ydb/core/viewer/libydb-core-viewer.a ydb/core/ymq/http/libcore-ymq-http.a ydb/library/slide_limiter/usage/liblibrary-slide_limiter-usage.a ydb/library/slide_limiter/service/liblibrary-slide_limiter-service.a ydb/services/auth/libydb-services-auth.a ydb/services/backup/libydb-services-backup.a ydb/services/bridge/libydb-services-bridge.a ydb/services/cms/libydb-services-cms.a ydb/services/config/libydb-services-config.a ydb/services/deprecated/persqueue_v0/api/protos/libapi-protos-persqueue-deprecated.a ydb/services/deprecated/persqueue_v0/api/grpc/libapi-grpc-persqueue-deprecated.a ydb/library/persqueue/deprecated/read_batch_converter/libpersqueue-deprecated-read_batch_converter.a ydb/services/deprecated/persqueue_v0/libservices-deprecated-persqueue_v0.a ydb/services/discovery/libydb-services-discovery.a ydb/services/dynamic_config/libydb-services-dynamic_config.a ydb/services/ext_index/metadata/extractor/libext_index-metadata-extractor.a ydb/services/ext_index/metadata/libservices-ext_index-metadata.a contrib/restricted/boost/serialization/librestricted-boost-serialization.a contrib/restricted/boost/graph/librestricted-boost-graph.a contrib/libs/hyperscan/libcontrib-libs-hyperscan.a contrib/libs/hyperscan/runtime_core2/liblibs-hyperscan-runtime_core2.a contrib/libs/hyperscan/runtime_corei7/liblibs-hyperscan-runtime_corei7.a contrib/libs/hyperscan/runtime_avx2/liblibs-hyperscan-runtime_avx2.a library/cpp/regex/hyperscan/libcpp-regex-hyperscan.a yql/essentials/minikql/jsonpath/libessentials-minikql-jsonpath.a ydb/services/ext_index/service/libservices-ext_index-service.a ydb/services/fq/libydb-services-fq.a ydb/services/keyvalue/libydb-services-keyvalue.a ydb/services/local_discovery/libydb-services-local_discovery.a ydb/services/maintenance/libydb-services-maintenance.a ydb/services/metadata/common/libservices-metadata-common.a ydb/services/metadata/ds_table/libservices-metadata-ds_table.a ydb/services/monitoring/libydb-services-monitoring.a ydb/services/rate_limiter/libydb-services-rate_limiter.a ydb/services/replication/libydb-services-replication.a ydb/core/grpc_services/tablet/libcore-grpc_services-tablet.a ydb/services/tablet/libydb-services-tablet.a ydb/services/view/libydb-services-view.a yt/yql/providers/yt/comp_nodes/dq/llvm16/libcomp_nodes-dq-llvm16.a yt/yql/providers/yt/codec/codegen/llvm16/libcodec-codegen-llvm16.a yt/yql/providers/yt/comp_nodes/llvm16/libyt-comp_nodes-llvm16.a ydb/library/signal_backtrace/libydb-library-signal_backtrace.a ydb/core/driver_lib/run/librun.a ydb/library/yql/providers/clickhouse/expr_nodes/libproviders-clickhouse-expr_nodes.a ydb/library/yql/providers/clickhouse/proto/libproviders-clickhouse-proto.a ydb/library/yql/providers/clickhouse/provider/libproviders-clickhouse-provider.a ydb/library/yql/providers/ydb/expr_nodes/libproviders-ydb-expr_nodes.a ydb/library/yql/providers/ydb/proto/libproviders-ydb-proto.a ydb/public/lib/experimental/libpublic-lib-experimental.a ydb/library/yql/providers/ydb/provider/libproviders-ydb-provider.a ydb/core/fq/libs/mock/libfq-libs-mock.a ydb/library/actors/testlib/liblibrary-actors-testlib.a ydb/core/testlib/audit_helpers/libcore-testlib-audit_helpers.a ydb/core/testlib/actors/libcore-testlib-actors.a ydb/core/testlib/basics/libcore-testlib-basics.a ydb/library/folder_service/mock/liblibrary-folder_service-mock.a ydb/core/testlib/libydb-core-testlib.a library/cpp/string_utils/base32/libcpp-string_utils-base32.a yql/essentials/udfs/common/math/lib/libcommon-math-lib.a library/cpp/unicode/normalization/libcpp-unicode-normalization.a library/cpp/unicode/set/libcpp-unicode-set.a yql/essentials/udfs/common/unicode_base/lib/libcommon-unicode_base-lib.a ydb/core/tx/columnshard/test_helper/libtx-columnshard-test_helper.a contrib/libs/highwayhash/arch/avx2/libhighwayhash-arch-avx2.a contrib/libs/highwayhash/arch/sse41/libhighwayhash-arch-sse41.a contrib/libs/highwayhash/libcontrib-libs-highwayhash.a ydb/core/kqp/ut/common/libkqp-ut-common.a yql/essentials/sql/pg_dummy/libessentials-sql-pg_dummy.a ydb/core/persqueue/ut/common/libpersqueue-ut-common.a ydb/core/tx/schemeshard/ut_helpers/libtx-schemeshard-ut_helpers.a -Wl,--end-group -rdynamic -ldl -lrt -Wl,--no-as-needed -Wl,--gdb-index -fuse-ld=lld --ld-path=/home/runner/.ya/tools/v4/9766798141/bin/ld.lld -Wl,--no-rosegment -Wl,--build-id=sha1 -lrt -ldl -lutil -nodefaultlibs -lpthread -lc -lm -Wl,--gc-sections -Wl,-no-pie failed with exit code 1 in /home/runner/.ya/build/build_root/endf/0039dc ld.lld: error: undefined symbol: void Out>, std::__y1::allocator>>>>(IOutputStream&, TTypeTraits>, std::__y1::allocator>>>>::TFuncParam) >>> referenced by output.h:238 (/-S/util/stream/output.h:238) >>> /home/runner/.ya/build/build_root/endf/0039dc/ydb/core/tx/schemeshard/ut_index_build/ut_fulltext_build.cpp.o:(NTestSuiteFulltextIndexBuildTest::TTestCaseBasic::Execute_(NUnitTest::TTestContext&)) clang++: error: linker command failed with exit code 1 (use -v to see invocation) >> TBsProxyFaultToleranceTest::CheckGetHardenedErasureMirror3dcCount6Idx2 [GOOD] >> TSharedPageCache::BigCache_BTreeIndex [GOOD] >> TSharedPageCache::BigCache_FlatIndex >> TSharedPageCache::BigCache_FlatIndex [GOOD] >> CommitOffset::Commit_WithWrongSession_ToParent [GOOD] >> CommitOffset::Commit_WithoutSession_ParentNotFinished >> TSharedPageCache::MiddleCache_BTreeIndex >> TSharedPageCache::MiddleCache_BTreeIndex [GOOD] >> TSharedPageCache::MiddleCache_FlatIndex >> TSharedPageCache::MiddleCache_FlatIndex [GOOD] >> TSharedPageCache::ZeroCache_BTreeIndex >> TPDiskTest::DeviceHaltTooLong [GOOD] >> TPDiskTest::ChangePDiskKey >> TFlatTableExecutor_VersionedRows::TestVersionedRowsLargeBlobs [GOOD] >> TFlatTableRenameTableAndColumn::TestSchema1ToSchema2NoRestart [GOOD] >> TFlatTableRenameTableAndColumn::TestSchema1ToSchema2 [GOOD] >> TFlatTableRenameTableAndColumn::TestSchema1ToSchema2ToSchema1 >> TFlatTableRenameTableAndColumn::TestSchema1ToSchema2ToSchema1 [GOOD] >> TFlatTableRenameTableAndColumn::TestSchema1ToSchema2ToSchema1ToSchema2 [GOOD] >> TGenCompaction::OverloadFactorDuringForceCompaction >> KqpLimits::ComputeActorMemoryAllocationFailure+useSink >> TSharedPageCache::ZeroCache_BTreeIndex [GOOD] >> TSharedPageCache::ZeroCache_FlatIndex >> TPDiskTest::ChangePDiskKey [GOOD] >> TPDiskTest::FailedToFormatDiskInfoUpdate >> TFlatExecutorLeases::BasicsLeaseTimeout [GOOD] >> TFlatExecutorLeases::BasicsInitialLease >> TYdbControlPlaneStoragePipeline::ShouldCheckAutomaticTtl [GOOD] >> TYdbControlPlaneStorageDeleteQuery::ShouldCheckPermission [GOOD] >> TYdbControlPlaneStorageDeleteQuery::ShouldCheckExist >> TYdbControlPlaneStorageModifyConnection::ShouldCheckIdempotencyKey [GOOD] >> TYdbControlPlaneStorageModifyConnection::ShouldCheckPreviousRevisionFailed >> TYdbControlPlaneStorageListConnections::ShouldCheckScopeVisibility [GOOD] >> TYdbControlPlaneStorageListConnections::ShouldCheckPrivateVisibility >> TPDiskTest::FailedToFormatDiskInfoUpdate [GOOD] >> TPDiskTest::ChangeExpectedSlotCount >> TGenCompaction::OverloadFactorDuringForceCompaction [GOOD] >> TGenCompaction::ForcedCompactionNoGenerations [GOOD] >> TGenCompaction::ForcedCompactionWithGenerations [GOOD] >> TGenCompaction::ForcedCompactionWithFinalParts [GOOD] >> TGenCompaction::ForcedCompactionByDeletedRows >> KqpQuery::UdfTerminate [GOOD] >> KqpQuery::UdfMemoryLimit >> TSchemeshardBackgroundCleaningTest::SchemeshardBackgroundCleaningTestSimpleCleanIndex [GOOD] >> TSharedPageCache::ZeroCache_FlatIndex [GOOD] >> TSharedPageCache::TryKeepInMemoryMode_Basics >> TGenCompaction::ForcedCompactionByDeletedRows [GOOD] >> TGenCompaction::ForcedCompactionByUnreachableMvccData [GOOD] >> TGenCompaction::ForcedCompactionByUnreachableMvccDataRestart [GOOD] >> TGenCompaction::ForcedCompactionByUnreachableMvccDataBorrowed [GOOD] >> TIterator::Basics [GOOD] >> TIterator::External [GOOD] >> TIterator::Single >> TYdbControlPlaneStorageDeleteBindingPermissions::ShouldApplyPermissionEmpty [GOOD] >> TYdbControlPlaneStorageDeleteBindingPermissions::ShouldApplyPermissionViewPrivate >> TIterator::Single [GOOD] >> TIterator::SingleReverse >> TIterator::SingleReverse [GOOD] >> TIterator::Mixed >> KqpStats::SysViewClientLost >> TYdbControlPlaneStoragePipeline::ShouldCheckChangeAutomaticTtl [GOOD] >> TYdbControlPlaneStoragePipeline::ShouldCheckResultsTTL >> TSharedPageCache::TryKeepInMemoryMode_Basics [GOOD] >> TSharedPageCache::TryKeepInMemoryMode_Enabling >> KqpExplain::UpdateConditional-UseSink >> TYdbControlPlaneStorageDescribeBindingPermissions::ShouldApplyPermissionViewPrivatePublic [GOOD] >> KqpLimits::ComputeActorMemoryAllocationFailure+useSink [GOOD] >> KqpLimits::ComputeActorMemoryAllocationFailure-useSink >> TFlatExecutorLeases::BasicsInitialLease [GOOD] >> TFlatExecutorLeases::BasicsInitialLeaseTimeout >> TColumnShardTestReadWrite::CompactionSplitGranule_PKUInt32 [GOOD] >> TSharedPageCache::TryKeepInMemoryMode_Enabling [GOOD] >> TSharedPageCache::TryKeepInMemoryMode_Disabling >> TYdbControlPlaneStorageModifyConnection::ShouldCheckPreviousRevisionFailed [GOOD] >> TVersions::WreckHead [GOOD] >> TVersions::WreckHeadReverse >> TSharedPageCache::TryKeepInMemoryMode_Disabling [GOOD] >> TSharedPageCache::TryKeepInMemoryMode_AfterCompaction >> TIterator::Mixed [GOOD] >> TIterator::MixedReverse >> TYdbControlPlaneStorageDeleteQuery::ShouldCheckExist [GOOD] >> TYdbControlPlaneStorageDeleteQuery::ShouldCheckIdempotencyKey >> TChargeBTreeIndex::NoNodes_Groups [GOOD] >> TChargeBTreeIndex::NoNodes_History >> TSharedPageCache::TryKeepInMemoryMode_AfterCompaction [GOOD] >> TSharedPageCache_Actor::Attach_Basics [GOOD] >> TSharedPageCache_Actor::Attach_Request [GOOD] >> TSharedPageCache_Actor::Detach_Basics >> KqpLimits::ComputeActorMemoryAllocationFailure-useSink [GOOD] >> KqpLimits::ComputeActorMemoryAllocationFailureQueryService+useSink >> TSharedPageCache_Actor::Detach_Basics [GOOD] >> TSharedPageCache_Actor::Detach_Cached [GOOD] >> TSharedPageCache_Actor::Detach_Expired [GOOD] >> TSharedPageCache_Actor::Detach_InFly [GOOD] >> TSharedPageCache_Actor::Detach_Queued [GOOD] >> TSharedPageCache_Actor::InMemory_Basics >> TYdbControlPlaneStorageListQueries::ShouldFilterMode [GOOD] >> TYdbControlPlaneStorageListQueries::ShouldFilterVisibility >> TFlatExecutorLeases::BasicsInitialLeaseTimeout [GOOD] >> TSharedPageCache_Actor::InMemory_Basics [GOOD] >> TFlatExecutorLeases::BasicsInitialLeaseSleep >> TSharedPageCache_Actor::InMemory_Enabling [GOOD] >> TSharedPageCache_Actor::InMemory_Enabling_AllRequested >> TSharedPageCache_Actor::InMemory_Enabling_AllRequested [GOOD] >> TSharedPageCache_Actor::InMemory_Disabling [GOOD] >> TSharedPageCache_Actor::InMemory_Detach [GOOD] >> TSharedPageCache_Actor::InMemory_MoveEvictedToInMemory >> TSharedPageCache_Actor::InMemory_MoveEvictedToInMemory [GOOD] >> TSharedPageCache_Actor::InMemory_MoveEvictedToRegular >> TSharedPageCache_Actor::InMemory_MoveEvictedToRegular [GOOD] >> TSharedPageCache_Actor::GC_Manual [GOOD] >> TSharedPageCache_Actor::GC_Scheduled >> TSharedPageCache_Actor::GC_Scheduled [GOOD] >> TSharedPageCache_Actor::Evict_Active >> TSharedPageCache_Actor::Evict_Active [GOOD] >> TSharedPageCache_Actor::Evict_Passive [GOOD] >> TIterator::MixedReverse [GOOD] >> TIterator::Serial >> TYdbControlPlaneStoragePipeline::ShouldCheckResultsTTL [GOOD] >> TYdbControlPlaneStoragePipeline::ShouldCheckDisableCurrentIamGetTask >> KqpExplain::UpdateConditional-UseSink [GOOD] >> KqpExplain::UpdateConditionalKey+UseSink >> TIterator::Serial [GOOD] >> TIterator::SerialReverse >> TChargeBTreeIndex::NoNodes_History [GOOD] >> TChargeBTreeIndex::NoNodes_Groups_History >> KqpLimits::ComputeActorMemoryAllocationFailureQueryService+useSink [GOOD] >> KqpLimits::CancelAfterRwTx+useSink >> TPDiskTest::ChangeExpectedSlotCount [GOOD] >> TPDiskTest::AllRequestsAreAnsweredOnPDiskRestart >> TYdbControlPlaneStorageModifyBinding::ShouldCheckMoveToScope [GOOD] >> TYdbControlPlaneStorageModifyBinding::ShouldCheckModifyTheSame >> KqpQuery::UdfMemoryLimit [GOOD] >> KqpQuery::TryToUpdateNonExistentColumn >> TYdbControlPlaneStorageCreateConnection::ShouldCheckCommitTransactionWrite [GOOD] >> TYdbControlPlaneStorageCreateConnection::ShouldCheckCommitTransactionReadWrite >> TIterator::SerialReverse [GOOD] >> TIterator::GetKey >> TPDiskTest::AllRequestsAreAnsweredOnPDiskRestart [GOOD] >> TPDiskTest::ChunkWriteDifferentOffsetAndSize >> TIterator::GetKey [GOOD] >> TIterator::GetKeyWithEraseCache [GOOD] >> TIterator::GetKeyWithVersionSkips [GOOD] >> TLegacy::IndexIter >> TVersions::WreckHeadReverse [GOOD] >> TVersions::Wreck2 >> TYdbControlPlaneStorageDeleteQuery::ShouldCheckIdempotencyKey [GOOD] >> TYdbControlPlaneStorageDeleteQuery::ShouldCheckPreviousRevisionFailed >> TLegacy::IndexIter [GOOD] >> TLegacy::ScreenedIndexIter [GOOD] >> TLegacy::StatsIter >> TLegacy::StatsIter [GOOD] >> TPageHandleTest::Uninitialized [GOOD] >> TPageHandleTest::NormalUse [GOOD] >> TPageHandleTest::HandleRef [GOOD] >> TPageHandleTest::PinnedRef [GOOD] >> TPageHandleTest::PinnedRefPure [GOOD] >> TPart::Basics [GOOD] >> TPart::BasicColumnGroups [GOOD] >> TPart::CellDefaults [GOOD] >> TPart::Matter [GOOD] >> TPart::External [GOOD] >> TPart::Outer [GOOD] >> TPart::MassCheck >> TPDiskTest::ChunkWriteDifferentOffsetAndSize [GOOD] >> TPDiskTest::ChunkWriteBadOffset [GOOD] >> TPDiskTest::CheckChunkReadOperationPriorities >> TPart::MassCheck [GOOD] >> TPart::PageFailEnv >> TopicAutoscaling::ControlPlane_CreateAlterDescribe [GOOD] >> TopicAutoscaling::ControlPlane_DisableAutoPartitioning >> TYdbControlPlaneStoragePipeline::ShouldCheckDisableCurrentIamGetTask [GOOD] >> TYdbControlPlaneStoragePipeline::ShouldReturnPartialBatchForGetTask >> TYdbControlPlaneStorageListConnectionsPermissions::ShouldApplyPermissionViewPrivate [GOOD] >> TYdbControlPlaneStorageListConnectionsPermissions::ShouldApplyPermissionViewPrivatePublic >> TYdbControlPlaneStorageListConnections::ShouldCheckPrivateVisibility [GOOD] >> TYdbControlPlaneStorageListConnections::ShouldCheckSuperUser >> KqpExplain::UpdateConditionalKey+UseSink [GOOD] >> KqpExplain::UpdateConditionalKey-UseSink >> TPart::PageFailEnv [GOOD] >> TPart::ForwardEnv [GOOD] >> TPart::PageFailEnvColumnGroups >> TopicAutoscaling::PartitionSplit_ManySession_PQv1 [GOOD] >> TopicAutoscaling::PartitionSplit_ManySession_AutoscaleAwareSDK >> TFlatExecutorLeases::BasicsInitialLeaseSleep [GOOD] >> TFlatExecutorLeases::BasicsInitialLeaseSleepTimeout >> TPart::PageFailEnvColumnGroups [GOOD] >> TPart::ForwardEnvColumnGroups [GOOD] >> TPart::ManyVersions >> KqpQuery::TryToUpdateNonExistentColumn [GOOD] >> KqpQuery::UpdateThenDelete+UseSink >> TPart::ManyVersions [GOOD] >> TPart::ManyDeltas [GOOD] >> TPart::CutKeys_Lz4 [GOOD] >> TPart::CutKeys_Seek [GOOD] >> TPart::CutKeys_SeekPages [GOOD] >> TPart::CutKeys_SeekSlices [GOOD] >> TPart::CutKeys_CutString [GOOD] >> TPart::CutKeys_CutUtf8String [GOOD] >> TYdbControlPlaneStorageDeleteQuery::ShouldCheckPreviousRevisionFailed [GOOD] >> TYdbControlPlaneStorageListBindings::ShouldEmptyPageToken [GOOD] >> TYdbControlPlaneStorageListBindings::ShouldCheckLimit >> KqpExplain::UpdateConditionalKey-UseSink [GOOD] >> KqpExplain::UpdateOn+UseSink >> TYdbControlPlaneStorageDeleteBindingPermissions::ShouldApplyPermissionViewPrivate [GOOD] >> TYdbControlPlaneStorageDeleteBindingPermissions::ShouldApplyPermissionViewPrivatePublic >> TYdbControlPlaneStorageListQueries::ShouldFilterVisibility [GOOD] >> TYdbControlPlaneStorageListQueries::ShouldFilterAutomatic >> TVersions::Wreck2 [GOOD] >> TVersions::Wreck2Reverse >> CompressExecutor::TestExecutorMemUsage [GOOD] >> TBsProxyFaultToleranceTest::CheckGetHardenedErasureMirror3dcCount6Idx3 [GOOD] >> TYdbControlPlaneStoragePipeline::ShouldReturnPartialBatchForGetTask [GOOD] >> KqpQuery::UpdateThenDelete+UseSink [GOOD] >> TopicTimestamp::TimestampRead_1MB_Topic_offset+middle [GOOD] >> TopicTimestamp::TimestampRead_6MB_LB_exact >> KqpExplain::UpdateOn+UseSink [GOOD] >> KqpExplain::UpdateOn-UseSink >> Balancing::Balancing_ManyTopics_PQv1 [GOOD] >> CommitOffset::Commit_Flat_WithWrongSession >> TPDiskTest::CheckChunkReadOperationPriorities [GOOD] >> TPDiskTest::CheckChunkWriteOperationPriorities >> TVersions::Wreck2Reverse [GOOD] >> TVersions::Wreck1 >> TopicDescribe::BasicStartOffset [GOOD] >> TopicDescribe::CompactionPreserveStartOffset >> TYdbControlPlaneStorageListConnections::ShouldCheckSuperUser [GOOD] >> TYdbControlPlaneStorageListConnections::ShouldCheckFilterByName >> TFlatExecutorLeases::BasicsInitialLeaseSleepTimeout [GOOD] >> TFlatTableDatetime::TestDate >> TFlatTableDatetime::TestDate [GOOD] >> TFlatTableExecutor_BackgroundCompactions::TestRunBackgroundSnapshot [GOOD] >> TFlatTableExecutor_BackgroundCompactions::TestChangeBackgroundSnapshotToRegular [GOOD] >> TFlatTableExecutor_BackgroundCompactions::TestRunBackgroundCompactionGen1 [GOOD] >> TFlatTableExecutor_BackgroundCompactions::TestChangeBackgroundCompactionToRegular [GOOD] >> TFlatTableExecutor_BackgroundCompactions::TestRunBackgroundCompactionGen2 [GOOD] >> TFlatTableExecutor_BackgroundCompactions::TestChangeBackgroundSnapshotPriorityByTime >> KqpExplain::UpdateOn-UseSink [GOOD] >> TFlatTableExecutor_BackgroundCompactions::TestChangeBackgroundSnapshotPriorityByTime [GOOD] >> TFlatTableExecutor_BackgroundCompactions::TestChangeBackgroundCompactionPriorityByTime >> TYdbControlPlaneStorageModifyConnection::ShouldCheckSuperUser [GOOD] >> TYdbControlPlaneStorageModifyConnection::ShouldCheckWithoutIdempotencyKey >> TFlatTableExecutor_BackgroundCompactions::TestChangeBackgroundCompactionPriorityByTime [GOOD] >> TFlatTableExecutor_BTreeIndex::EnableLocalDBBtreeIndex_Default >> TFlatTableExecutor_BTreeIndex::EnableLocalDBBtreeIndex_Default [GOOD] >> TFlatTableExecutor_BTreeIndex::EnableLocalDBBtreeIndex_True >> TYdbControlPlaneStorageListQueries::ShouldFilterAutomatic [GOOD] >> TYdbControlPlaneStorageListQueriesPermissions::ShouldApplyPermissionEmpty >> TYdbControlPlaneStorageModifyBinding::ShouldCheckModifyTheSame [GOOD] >> TYdbControlPlaneStorageModifyBinding::ShouldCheckSuperUser >> TYdbControlPlaneStorageListConnectionsPermissions::ShouldApplyPermissionViewPrivatePublic [GOOD] >> TYdbControlPlaneStorageListJobsPermissions::ShouldApplyPermissionEmpty >> TFlatTableExecutor_BTreeIndex::EnableLocalDBBtreeIndex_True [GOOD] >> TFlatTableExecutor_BTreeIndex::EnableLocalDBBtreeIndex_False >> TFlatTableExecutor_BTreeIndex::EnableLocalDBBtreeIndex_False [GOOD] >> TFlatTableExecutor_BTreeIndex::EnableLocalDBBtreeIndex_True_EnableLocalDBFlatIndex_False >> TFlatTableExecutor_BTreeIndex::EnableLocalDBBtreeIndex_True_EnableLocalDBFlatIndex_False [GOOD] >> TFlatTableExecutor_BTreeIndex::EnableLocalDBBtreeIndex_False_EnableLocalDBFlatIndex_False >> TYdbControlPlaneStorageListConnections::ShouldCheckFilterByName [GOOD] >> TYdbControlPlaneStorageListConnections::ShouldCheckFilterByMe >> TFlatTableExecutor_BTreeIndex::EnableLocalDBBtreeIndex_False_EnableLocalDBFlatIndex_False [GOOD] >> TFlatTableExecutor_BTreeIndex::EnableLocalDBBtreeIndex_True_TurnOff >> TFlatTableExecutor_BTreeIndex::EnableLocalDBBtreeIndex_True_TurnOff [GOOD] >> TFlatTableExecutor_BTreeIndex::EnableLocalDBBtreeIndex_True_Generations >> TopicAutoscaling::PartitionSplit_PreferedPartition_PQv1 [GOOD] >> TopicAutoscaling::PartitionSplit_ReadEmptyPartitions_BeforeAutoscaleAwareSDK >> TFlatTableExecutor_BTreeIndex::EnableLocalDBBtreeIndex_True_Generations [GOOD] >> TFlatTableExecutor_CachePressure::TestNotEnoughLocalCache [GOOD] >> TFlatTableExecutor_Cold::ColdBorrowScan [GOOD] >> TFlatTableExecutor_ColumnGroups::TestManyRows >> TYdbControlPlaneStorageListQueriesPermissions::ShouldApplyPermissionEmpty [GOOD] >> TYdbControlPlaneStorageListQueriesPermissions::ShouldApplyPermissionViewPublic >> TYdbControlPlaneStorageDeleteBindingPermissions::ShouldApplyPermissionViewPrivatePublic [GOOD] >> TPDiskTest::CheckChunkWriteOperationPriorities [GOOD] >> TYdbControlPlaneStorageListJobsPermissions::ShouldApplyPermissionEmpty [GOOD] >> TYdbControlPlaneStorageListJobsPermissions::ShouldApplyPermissionViewPublic >> TFlatTableExecutor_ColumnGroups::TestManyRows [GOOD] >> TFlatTableExecutor_CompactionScan::TestCompactionScan [GOOD] >> TFlatTableExecutor_CompressedSelectRows::TestCompressedSelectRows [GOOD] >> TFlatTableExecutor_Exceptions::TestTabletExecuteExceptionDirect >> TFlatTableExecutor_Exceptions::TestTabletExecuteExceptionDirect [GOOD] >> TFlatTableExecutor_Exceptions::TestTabletExecuteExceptionEnqueue [GOOD] >> TFlatTableExecutor_ExecutorTxLimit::TestExecutorTxLimit [GOOD] >> TFlatTableExecutor_Follower::BasicFollowerRead [GOOD] >> TFlatTableExecutor_Follower::FollowerEarlyRebootHoles [GOOD] >> TFlatTableExecutor_Follower::FollowerAttachOnTxQueueScanSnapshot >> TFlatTableExecutor_Follower::FollowerAttachOnTxQueueScanSnapshot [GOOD] >> TFlatTableExecutor_Follower::FollowerAttachAfterLoan [GOOD] >> TFlatTableExecutor_Follower::FollowerPromoteToLeaderWhileLoadingPages [GOOD] >> TFlatTableExecutor_Gc::TestFailedGcAfterReboot >> TYdbControlPlaneStorageModifyConnection::ShouldCheckWithoutIdempotencyKey [GOOD] >> TYdbControlPlaneStorageModifyConnection::ShouldCheckPreviousRevisionSuccess >> TFlatTableExecutor_Gc::TestFailedGcAfterReboot [GOOD] >> TFlatTableExecutor_IndexLoading::CalculateReadSize_FlatIndex >> TFlatTableExecutor_IndexLoading::CalculateReadSize_FlatIndex [GOOD] >> TFlatTableExecutor_IndexLoading::CalculateReadSize_BTreeIndex >> CommitOffset::Commit_WithoutSession_ParentNotFinished [GOOD] >> CommitOffset::Commit_WithoutSession_ToPastParentPartition >> TopicAutoscaling::ControlPlane_DisableAutoPartitioning [GOOD] >> TopicAutoscaling::ControlPlane_BackCompatibility >> TFlatTableExecutor_IndexLoading::CalculateReadSize_BTreeIndex [GOOD] >> TFlatTableExecutor_IndexLoading::PrechargeAndSeek_FlatIndex >> TYdbControlPlaneStorageListConnections::ShouldCheckFilterByMe [GOOD] >> TYdbControlPlaneStorageListConnections::ShouldCombineFilters >> TYdbControlPlaneStorageListQueriesPermissions::ShouldApplyPermissionViewPublic [GOOD] >> TYdbControlPlaneStorageListQueriesPermissions::ShouldApplyPermissionViewPrivate >> TVersions::Wreck1 [GOOD] >> TVersions::Wreck1Reverse >> TYdbControlPlaneStorageCreateConnection::ShouldCheckCommitTransactionReadWrite [GOOD] >> KqpStats::SysViewClientLost [GOOD] >> KqpStats::SysViewCancelled >> TYdbControlPlaneStorageListConnections::ShouldCombineFilters [GOOD] >> TYdbControlPlaneStorageListConnections::ShouldCheckFilterByConnectionType >> TYdbControlPlaneStorageListJobsPermissions::ShouldApplyPermissionViewPublic [GOOD] >> TYdbControlPlaneStorageListJobsPermissions::ShouldApplyPermissionViewPrivate >> TBsProxyFaultToleranceTest::CheckGetHardenedErasureMirror3dcCount6Idx5 [GOOD] >> TYdbControlPlaneStorageModifyConnection::ShouldCheckPreviousRevisionSuccess [GOOD] >> TYdbControlPlaneStorageModifyConnection::ShouldMoveFromScopeToPrivateWithError >> TYdbControlPlaneStorageListQueriesPermissions::ShouldApplyPermissionViewPrivate [GOOD] >> TYdbControlPlaneStorageListQueriesPermissions::ShouldApplyPermissionViewPrivatePublic >> TYdbControlPlaneStorageModifyBinding::ShouldCheckSuperUser [GOOD] >> TYdbControlPlaneStorageModifyBinding::ShouldCheckWithoutIdempotencyKey >> TYdbControlPlaneStorageListBindings::ShouldCheckLimit [GOOD] >> TYdbControlPlaneStorageListBindings::ShouldCheckScopeVisibility >> TChargeBTreeIndex::NoNodes_Groups_History [GOOD] >> TChargeBTreeIndex::OneNode >> TYdbControlPlaneStorageListConnections::ShouldCheckFilterByConnectionType [GOOD] >> TYdbControlPlaneStorageListQueriesPermissions::ShouldApplyPermissionViewPrivatePublic [GOOD] >> TYdbControlPlaneStorageModifyBinding::ShouldCheckAllowedSymbolsName >> TYdbControlPlaneStorageListJobsPermissions::ShouldApplyPermissionViewPrivate [GOOD] >> TYdbControlPlaneStorageListJobsPermissions::ShouldApplyPermissionViewPrivatePublic >> TYdbControlPlaneStorageModifyConnection::ShouldMoveFromScopeToPrivateWithError [GOOD] >> TYdbControlPlaneStorageModifyConnectionPermissions::ShouldApplyPermissionEmpty >> CommitOffset::Commit_Flat_WithWrongSession [GOOD] >> CommitOffset::Commit_Flat_WithWrongSession_ToPast >> TFlatTableExecutor_IndexLoading::PrechargeAndSeek_FlatIndex [GOOD] >> TFlatTableExecutor_IndexLoading::PrechargeAndSeek_BTreeIndex >> TYdbControlPlaneStorageListJobsPermissions::ShouldApplyPermissionViewPrivatePublic [GOOD] >> TYdbControlPlaneStorageListQueries::ShouldCheckLimit >> TChargeBTreeIndex::OneNode [GOOD] >> TChargeBTreeIndex::OneNode_Groups >> TopicAutoscaling::PartitionSplit_ManySession_AutoscaleAwareSDK [GOOD] >> TopicAutoscaling::PartitionSplit_ManySession_existed_AutoscaleAwareSDK >> TYdbControlPlaneStorageModifyBinding::ShouldCheckAllowedSymbolsName [GOOD] >> TYdbControlPlaneStorageModifyBinding::ShouldCheckExist >> TVersions::Wreck1Reverse [GOOD] >> TVersions::Wreck0 >> TAsyncIndexTests::CdcAndSplitWithReboots[TabletReboots] [GOOD] >> TYdbControlPlaneStorageModifyBinding::ShouldCheckWithoutIdempotencyKey [GOOD] >> TYdbControlPlaneStorageModifyBinding::ShouldCheckPreviousRevisionFailed >> TopicAutoscaling::ControlPlane_BackCompatibility [GOOD] >> TopicAutoscaling::ControlPlane_PauseAutoPartitioning >> TYdbControlPlaneStorageModifyConnectionPermissions::ShouldApplyPermissionEmpty [GOOD] >> TYdbControlPlaneStorageModifyConnectionPermissions::ShouldApplyPermissionViewPublic >> TImportWithRebootsTests::ShouldSucceedOnSingleTopic [GOOD] >> TYdbControlPlaneStorageModifyBinding::ShouldCheckExist [GOOD] >> TYdbControlPlaneStorageModifyBinding::ShouldCheckIdempotencyKey >> TopicAutoscaling::PartitionSplit_ReadEmptyPartitions_BeforeAutoscaleAwareSDK [GOOD] >> TopicAutoscaling::PartitionSplit_ReadEmptyPartitions_AutoscaleAwareSDK >> TFlatTableExecutor_IndexLoading::PrechargeAndSeek_BTreeIndex [GOOD] >> TFlatTableExecutor_IndexLoading::Scan_FlatIndex >> TKeyValueTest::TestVacuumOnEmptyTablet [GOOD] >> TKeyValueTest::TestVacuumOnEmptyTabletResetGeneration >> KqpStats::SysViewCancelled [GOOD] >> KqpTypes::DyNumberCompare >> TFlatTableExecutor_IndexLoading::Scan_FlatIndex [GOOD] >> TFlatTableExecutor_IndexLoading::Scan_BTreeIndex >> TYdbControlPlaneStorageListQueries::ShouldCheckLimit [GOOD] >> TYdbControlPlaneStorageListQueries::ShouldCheckScopeVisibility >> TKeyValueTest::TestVacuumOnEmptyTabletResetGeneration [GOOD] >> TopicDescribe::CompactionPreserveStartOffset [GOOD] >> TopicDescribe::RetentionChangesStartOffset >> TFlatTableExecutor_IndexLoading::Scan_BTreeIndex [GOOD] >> TFlatTableExecutor_IndexLoading::Scan_History_FlatIndex >> TYdbControlPlaneStorageModifyBinding::ShouldCheckPreviousRevisionFailed [GOOD] >> TYdbControlPlaneStorageModifyBinding::ShouldCheckPreviousRevisionSuccess >> TFlatTableExecutor_IndexLoading::Scan_History_FlatIndex [GOOD] >> TFlatTableExecutor_IndexLoading::Scan_History_BTreeIndex >> KqpTypes::DyNumberCompare [GOOD] >> KqpTypes::MultipleCurrentUtcTimestamp >> TFlatTableExecutor_IndexLoading::Scan_History_BTreeIndex [GOOD] >> TFlatTableExecutor_IndexLoading::Scan_Groups_FlatIndex >> TFlatTableExecutor_IndexLoading::Scan_Groups_FlatIndex [GOOD] >> TFlatTableExecutor_IndexLoading::Scan_Groups_BTreeIndex >> TYdbControlPlaneStorageModifyBinding::ShouldCheckIdempotencyKey [GOOD] >> TFlatTableExecutor_IndexLoading::Scan_Groups_BTreeIndex [GOOD] >> TFlatTableExecutor_IndexLoading::Scan_Groups_BTreeIndex_Empty [GOOD] >> TFlatTableExecutor_KeepEraseMarkers::TestKeepEraseMarkers [GOOD] >> TFlatTableExecutor_LongTx::MemTableLongTx [GOOD] >> TFlatTableExecutor_LongTx::CompactUncommittedLongTx [GOOD] >> TFlatTableExecutor_LongTx::CompactCommittedLongTx [GOOD] >> TFlatTableExecutor_LongTx::CompactedLongTxRestart [GOOD] >> TFlatTableExecutor_LongTx::CompactMultipleChanges [GOOD] >> TFlatTableExecutor_LongTx::LongTxBorrow [GOOD] >> TFlatTableExecutor_LongTx::MemTableLongTxRead [GOOD] >> TFlatTableExecutor_LongTx::CompactedTxIdReuse [GOOD] >> TFlatTableExecutor_LongTx::MergeSkewedCommitted >> TFlatTableExecutor_LongTx::MergeSkewedCommitted [GOOD] >> TFlatTableExecutor_LongTxAndBlobs::OuterBlobValues [GOOD] >> TFlatTableExecutor_LongTxAndBlobs::ExternalBlobValues [GOOD] >> TFlatTableExecutorGC::TestGCVectorDeduplicaton [GOOD] >> KqpTypes::MultipleCurrentUtcTimestamp [GOOD] >> TYdbControlPlaneStorageModifyConnectionPermissions::ShouldApplyPermissionViewPublic [GOOD] >> TYdbControlPlaneStorageModifyConnectionPermissions::ShouldApplyPermissionViewPrivate >> TVersions::Wreck0 [GOOD] >> TVersions::Wreck0Reverse >> TYdbControlPlaneStorageListQueries::ShouldCheckScopeVisibility [GOOD] >> TYdbControlPlaneStorageListQueries::ShouldCheckPrivateVisibility >> TYdbControlPlaneStorageListBindings::ShouldCheckScopeVisibility [GOOD] >> TYdbControlPlaneStorageListBindings::ShouldCheckPrivateVisibility >> CommitOffset::Commit_Flat_WithWrongSession_ToPast [GOOD] >> CommitOffset::Commit_WithSession_ParentNotFinished_SameSession >> TYdbControlPlaneStorageModifyBinding::ShouldCheckPreviousRevisionSuccess [GOOD] >> TYdbControlPlaneStorageModifyBinding::ShouldCheckMoveToScopeWithPrivateConnection >> TChargeBTreeIndex::OneNode_Groups [GOOD] >> TChargeBTreeIndex::OneNode_History >> TYdbControlPlaneStorageListQueries::ShouldCheckPrivateVisibility [GOOD] >> TYdbControlPlaneStorageListQueries::ShouldCheckSuperUser >> CommitOffset::Commit_WithoutSession_ToPastParentPartition [GOOD] >> CommitOffset::PartitionSplit_OffsetCommit >> TopicAutoscaling::ControlPlane_PauseAutoPartitioning [GOOD] >> TopicAutoscaling::ControlPlane_CDC >> TYdbControlPlaneStorageModifyBinding::ShouldCheckMoveToScopeWithPrivateConnection [GOOD] >> TYdbControlPlaneStorageModifyBinding::ShouldNotCreateScopeeBindingWithUnavailableConnection >> TYdbControlPlaneStorageModifyConnectionPermissions::ShouldApplyPermissionViewPrivate [GOOD] >> TYdbControlPlaneStorageModifyConnectionPermissions::ShouldApplyPermissionViewPrivatePublic >> TImportWithRebootsTests::ShouldSucceedOnIndexedTable [GOOD] >> TImportWithRebootsTests::ShouldSucceedOnDependentView >> TopicAutoscaling::PartitionSplit_ManySession_existed_AutoscaleAwareSDK [GOOD] >> TopicAutoscaling::PartitionSplit_AutosplitByLoad >> TYdbControlPlaneStorageListQueries::ShouldCheckSuperUser [GOOD] >> TYdbControlPlaneStorageListQueries::ShouldCombineFilters >> TVersions::Wreck0Reverse [GOOD] >> TYdbControlPlaneStorageListQueries::ShouldCombineFilters [GOOD] >> TYdbControlPlaneStorageListBindings::ShouldCheckPrivateVisibility [GOOD] >> TYdbControlPlaneStorageListBindings::ShouldCheckSuperUser >> TYdbControlPlaneStorageModifyBinding::ShouldNotCreateScopeeBindingWithUnavailableConnection [GOOD] >> TYdbControlPlaneStorageModifyBinding::ShouldNotCreatePrivateBindingWithUnavailableConnection >> TChargeBTreeIndex::OneNode_History [GOOD] >> TChargeBTreeIndex::OneNode_Groups_History >> TopicTimestamp::TimestampRead_6MB_LB_exact [GOOD] >> TopicTimestamp::TimestampRead_40MB_LegacyTopic_exact >> TopicDescribe::RetentionChangesStartOffset [GOOD] >> TopicTimestamp::TimestampRead_1MB_LegacyTopic_exact >> TopicAutoscaling::PartitionSplit_ReadEmptyPartitions_AutoscaleAwareSDK [GOOD] >> TopicAutoscaling::PartitionSplit_ReadEmptyPartitions_PQv1 >> KqpLimits::CancelAfterRwTx+useSink [GOOD] >> KqpLimits::CancelAfterRwTx-useSink >> TYdbControlPlaneStorageModifyConnectionPermissions::ShouldApplyPermissionViewPrivatePublic [GOOD] >> TYdbControlPlaneStorageModifyQuery::ShouldCheckPermission >> TYdbControlPlaneStorageModifyBinding::ShouldNotCreatePrivateBindingWithUnavailableConnection [GOOD] >> TYdbControlPlaneStorageModifyBinding::ShouldNotCreatePrivateConnectionWithDesctructionBinding >> TYdbControlPlaneStorageModifyQuery::ShouldCheckPermission [GOOD] >> TYdbControlPlaneStorageModifyQuery::ShouldCheckExist >> TYdbControlPlaneStorageModifyQuery::ShouldCheckExist [GOOD] >> TYdbControlPlaneStorageModifyQuery::ShouldCheckIdempotencyKey >> TYdbControlPlaneStorageModifyBinding::ShouldNotCreatePrivateConnectionWithDesctructionBinding [GOOD] >> TYdbControlPlaneStorageModifyBinding::ShouldCheckObjectStorageProjectionByTypes >> TYdbControlPlaneStorageModifyQuery::ShouldCheckIdempotencyKey [GOOD] >> TYdbControlPlaneStorageModifyQuery::ShouldCheckPreviousRevisionFailed >> TopicAutoscaling::ControlPlane_CDC [GOOD] >> TopicAutoscaling::ControlPlane_CDC_Enable >> TYdbControlPlaneStorageListBindings::ShouldCheckSuperUser [GOOD] >> TYdbControlPlaneStorageListBindings::ShouldCheckFilterByConnectionId >> TYdbControlPlaneStorageModifyBinding::ShouldCheckObjectStorageProjectionByTypes [GOOD] >> TYdbControlPlaneStorageModifyQuery::ShouldCheckPreviousRevisionFailed [GOOD] >> TYdbControlPlaneStorageModifyQuery::ShouldCheckPreviousRevisionSuccess >> TBsProxyFaultToleranceTest::CheckGetHardenedErasureBlock42Count6Idx4 [GOOD] >> TYdbControlPlaneStorageModifyQuery::ShouldCheckPreviousRevisionSuccess [GOOD] >> TYdbControlPlaneStorageModifyQuery::ShouldCheckQueryName >> TYdbControlPlaneStorageListBindings::ShouldCheckFilterByConnectionId [GOOD] >> TYdbControlPlaneStorageListBindings::ShouldCombineFilters >> TYdbControlPlaneStorageModifyQuery::ShouldCheckQueryName [GOOD] >> TYdbControlPlaneStorageModifyQuery::ShouldCheckAvailableConnections >> TYdbControlPlaneStorageListBindings::ShouldCombineFilters [GOOD] >> TBsProxyFaultToleranceTest::CheckGetHardenedErasureBlock42Count6Idx3 [GOOD] >> TopicAutoscaling::PartitionSplit_ReadEmptyPartitions_PQv1 [GOOD] >> TopicAutoscaling::PartitionSplit_ReadNotEmptyPartitions_BeforeAutoscaleAwareSDK >> TopicAutoscaling::ControlPlane_CDC_Enable [GOOD] >> TopicAutoscaling::ControlPlane_CDC_Disable >> TYdbControlPlaneStorageModifyQuery::ShouldCheckAvailableConnections [GOOD] >> CommitOffset::PartitionSplit_OffsetCommit [GOOD] >> CommitOffset::DistributedTxCommit >> CommitOffset::Commit_WithSession_ParentNotFinished_SameSession [GOOD] >> CommitOffset::Commit_WithSession_ParentNotFinished_OtherSession >> TBsVDiskGC::GCPutKeepBarrierSync [FAIL] >> TBsVDiskGC::GCPutManyBarriersNoSync >> TBsVDiskGC::GCPutManyBarriersNoSync [GOOD] >> TBsVDiskGC::TGCManyVPutsCompactGCAllTest >> TopicAutoscaling::ControlPlane_CDC_Disable [GOOD] >> TopicAutoscaling::MidOfRange [GOOD] >> TopicAutoscaling::OrderOfChildrenPartitions_Topic >> TBsProxyFaultToleranceTest::CheckGetHardenedErasureBlock42Count6Idx5 [GOOD] >> TChargeBTreeIndex::OneNode_Groups_History [GOOD] >> TChargeBTreeIndex::FewNodes >> TChargeBTreeIndex::FewNodes [GOOD] >> TChargeBTreeIndex::FewNodes_Groups >> TopicTimestamp::TimestampRead_1MB_LegacyTopic_exact [GOOD] >> TopicTimestamp::TimestampRead_1MB_LB_exact >> KqpLimits::CancelAfterRwTx-useSink [GOOD] >> TopicAutoscaling::PartitionSplit_ReadNotEmptyPartitions_BeforeAutoscaleAwareSDK [GOOD] >> TopicAutoscaling::PartitionSplit_ReadNotEmptyPartitions_PQv1 >> TBsVDiskGC::TGCManyVPutsCompactGCAllTest [GOOD] >> CommitOffset::Commit_WithSession_ParentNotFinished_OtherSession [GOOD] >> CommitOffset::Commit_WithSession_ParentNotFinished_OtherSession_ParentCommittedToEnd >> TChargeBTreeIndex::FewNodes_Groups [GOOD] >> TChargeBTreeIndex::FewNodes_History >> CommitOffset::DistributedTxCommit [GOOD] >> CommitOffset::DistributedTxCommit_ChildFirst >> TChargeBTreeIndex::FewNodes_History [GOOD] >> TChargeBTreeIndex::FewNodes_Sticky >> TopicAutoscaling::PartitionSplit_AutosplitByLoad [GOOD] >> TopicAutoscaling::PartitionSplit_AutosplitByLoad_AfterAlter >> TImportWithRebootsTests::ShouldSucceedOnSingleChangefeed [GOOD] >> TImportWithRebootsTests::ShouldSucceedOnSingleTableWithUniqueIndex >> TChargeBTreeIndex::FewNodes_Sticky [GOOD] >> TChargeBTreeIndex::FewNodes_Groups_History >> TAsyncIndexTests::SplitBothWithReboots[TabletReboots] [GOOD] >> TopicAutoscaling::OrderOfChildrenPartitions_Topic [GOOD] >> TBsProxyFaultToleranceTest::CheckGetHardenedErasureBlock42Count6Idx0 [GOOD] >> TImportWithRebootsTests::ShouldSucceedOnDependentView [GOOD] >> TImportWithRebootsTests::CancelShouldSucceedOnViewsAndTables >> TopicAutoscaling::PartitionSplit_ReadNotEmptyPartitions_PQv1 [GOOD] >> TopicAutoscaling::PartitionSplit_ReadNotEmptyPartitions_AutoscaleAwareSDK >> TopicTimestamp::TimestampRead_40MB_LegacyTopic_exact [GOOD] >> TopicTimestamp::TimestampRead_40MB_LegacyTopic_offset+middle >> TopicTimestamp::TimestampRead_1MB_LB_exact [GOOD] >> TopicTimestamp::TimestampRead_1MB_LB_offset+middle >> CommitOffset::Commit_WithSession_ParentNotFinished_OtherSession_ParentCommittedToEnd [GOOD] >> CommitOffset::Commit_WithSession_ToPastParentPartition >> TopicAutoscaling::PartitionSplit_ReadNotEmptyPartitions_AutoscaleAwareSDK [GOOD] >> TopicAutoscaling::ReBalancingAfterSplit_sessionsWithPartition >> TAsyncIndexTests::CdcAndMergeWithReboots[TabletReboots] [GOOD] >> TImportWithRebootsTests::CancelShouldSucceedOnViewsAndTables [GOOD] >> TBsProxyFaultToleranceTest::CheckTPutFaultToleranceTestErasureMirror3dc [GOOD] >> TChargeBTreeIndex::FewNodes_Groups_History [GOOD] >> TChargeBTreeIndex::FewNodes_Groups_History_Sticky >> CommitOffset::DistributedTxCommit_ChildFirst [GOOD] >> CommitOffset::DistributedTxCommit_CheckSessionResetAfterCommit >> TopicAutoscaling::PartitionSplit_AutosplitByLoad_AfterAlter [GOOD] >> TopicAutoscaling::ReBalancingAfterSplit_sessionsWithPartition [GOOD] >> CommitOffset::Commit_WithSession_ToPastParentPartition [GOOD] >> CommitOffset::Commit_FromSession_ToNewChild_WithoutCommitToParent >> TChargeBTreeIndex::FewNodes_Groups_History_Sticky [GOOD] >> TCompaction::OneMemtable [GOOD] >> TCompaction::ManyParts >> CommitOffset::DistributedTxCommit_CheckSessionResetAfterCommit [GOOD] >> CommitOffset::DistributedTxCommit_CheckOffsetCommitForDifferentCases >> TCompaction::ManyParts [GOOD] >> TCompaction::BootAbort [GOOD] >> TCompaction::Defaults [GOOD] >> TCompaction::Merges [GOOD] >> TCompactionMulti::ManyParts >> TCompactionMulti::ManyParts [GOOD] >> TCompactionMulti::MainPageCollectionEdge >> TCompactionMulti::MainPageCollectionEdge [GOOD] >> TCompactionMulti::MainPageCollectionEdgeMany >> TCompactionMulti::MainPageCollectionEdgeMany [GOOD] >> TCompactionMulti::MainPageCollectionOverflow [GOOD] >> TCompactionMulti::MainPageCollectionOverflowSmallRefs [GOOD] >> TCompactionMulti::MainPageCollectionOverflowLargeRefs [GOOD] >> TExecutorDb::RandomOps >> TopicTimestamp::TimestampRead_1MB_LB_offset+middle [GOOD] >> CommitOffset::Commit_FromSession_ToNewChild_WithoutCommitToParent [GOOD] >> TExecutorDb::RandomOps [GOOD] >> TExecutorDb::FullScan >> TExecutorDb::FullScan [GOOD] >> TExecutorDb::CoordinatorSimulation >> TExecutorDb::CoordinatorSimulation [GOOD] >> TExecutorDb::RandomCoordinatorSimulation >> TopicTimestamp::TimestampRead_40MB_LegacyTopic_offset+middle [GOOD] >> TopicTimestamp::TimestampRead_40MB_Topic_exact >> TExecutorDb::RandomCoordinatorSimulation [GOOD] >> TExecutorDb::MultiPage >> TExecutorDb::MultiPage [GOOD] >> TExecutorDb::EncodedPage >> TExecutorDb::EncodedPage [GOOD] >> TFlatCxxDatabaseTest::BasicSchemaTest >> TFlatCxxDatabaseTest::BasicSchemaTest [GOOD] >> TFlatCxxDatabaseTest::RenameColumnSchemaTest [GOOD] >> TFlatCxxDatabaseTest::SchemaFillerTest [GOOD] >> TFlatDatabaseDecimal::UpdateRead [GOOD] >> TFlatEraseCacheTest::BasicUsage [GOOD] >> TFlatEraseCacheTest::BasicUsageReverse [GOOD] >> TFlatEraseCacheTest::CacheEviction [GOOD] >> TFlatEraseCacheTest::StressGarbageCollection [GOOD] >> Self::Literals [GOOD] >> CommitOffset::DistributedTxCommit_CheckOffsetCommitForDifferentCases [GOOD] >> CommitOffset::DistributedTxCommit_Flat_CheckOffsetCommitForDifferentCases >> TAsyncIndexTests::MergeBothWithReboots[TabletReboots] [GOOD] >> CommitOffset::DistributedTxCommit_Flat_CheckOffsetCommitForDifferentCases [GOOD] >> CommitOffset::DistributedTxCommit_LongReadSession >> TopicTimestamp::TimestampRead_40MB_Topic_exact [GOOD] >> TopicTimestamp::TimestampRead_40MB_Topic_offset+middle >> TImportWithRebootsTests::ShouldSucceedOnSingleTableWithUniqueIndex [GOOD] >> CommitOffset::DistributedTxCommit_LongReadSession [GOOD] >> TopicTimestamp::TimestampRead_40MB_Topic_offset+middle [GOOD] >> TopicTimestamp::TimestampRead_40MB_LB_exact >> TopicTimestamp::TimestampRead_40MB_LB_exact [GOOD] >> TopicTimestamp::TimestampRead_40MB_LB_offset+middle >> TopicTimestamp::TimestampRead_40MB_LB_offset+middle [GOOD] Number of suites skipped due to a failed build: 455, skipped by size: 39 ------ sole chunk ran 7 tests (total:201.05s - test:200.80s) Info: Test run has exceeded 8.0G (8388608K) memory limit with 18.4G (19300992K) used. This may lead to test failure on the Autocheck/CI You can increase test's ram requirement using REQUIREMENTS(ram:X) in the ya.make pid rss ref pdirt 62302 45.1M 45.2M 6.5M test_tool run_test @/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ya_command_file_0.args 62776 30.8M 19.3M 7.2M └─ test_tool run_test @/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ya_command_file_0.args 62802 47.9M 47.9M 24.1M └─ test_tool run_ut @/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tablet_flat/ut_large/test-results/unittest/testing_out_stuff/test_tool.args 63173 18.1G 18.1G 18.3G └─ ydb-core-tablet_flat-ut_large --trace-path-append /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tablet_flat/ut_large/test-results/unittest/ytest.report.tra Log: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tablet_flat/ut_large/test-results/unittest/run_test.log Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tablet_flat/ut_large/test-results/unittest/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tablet_flat/ut_large/test-results/unittest/testing_out_stuff/stderr Total 474 suites: 474 - GOOD Total 2886 tests: 2886 - GOOD SOME TESTS DIDN'T RUN DUE TO BUILD ERRORS Cache efficiency ratio is 89.12% (52329 of 58719). Local: 0 (0.00%), dist: 9301 (15.84%), by dynamic uids: 0 (0.00%), avoided: 43028 (73.28%) Dist cache download: count=4668, size=13.6 GiB, speed=105.32 MiB/s Disk usage for tools/sdk at least 62.52 MiB Additional disk space consumed for build cache 229.74 GiB Critical path: [ 56050 ms] [CC] [9zDoFkYvXBGYElpE1pxPUQ tool]: $(BUILD_ROOT)/ydb/core/protos/config.pb.cc [started: 0 (1758814449310), finished: 56050 (1758814505360)] [ 232 ms] [AR] [vVsvsRL7M1SGSPBUU6QhXQ tool]: $(BUILD_ROOT)/ydb/core/protos/libydb-core-protos.a [started: 100594 (1758814549904), finished: 100826 (1758814550136)] [ 740 ms] [LD] [Vya3NfNuv5PsHsq0ZS_uog tool]: $(BUILD_ROOT)/ydb/core/control/lib/generated/codegen/ydb-core-control-generated-codegen [started: 103996 (1758814553306), finished: 104736 (1758814554046)] [ 61 ms] [PR] [fcbyNLRRYYlhFZeviGxmBw default-linux-x86_64 relwithdebinfo]: $(BUILD_ROOT)/ydb/core/control/lib/generated/control_board_proto.h [started: 104820 (1758814554130), finished: 104881 (1758814554191)] [129173 ms] [CC] [4tOHJkxyrpYGT7Zh-bxCeA default-linux-x86_64 relwithdebinfo]: $(SOURCE_ROOT)/ydb/core/tx/schemeshard/schemeshard__init.cpp [started: 393516 (1758814842826), finished: 522689 (1758814971999)] [ 526 ms] [AR] [npxvSYA8fCZxO9pSvP0N8A default-linux-x86_64 relwithdebinfo]: $(BUILD_ROOT)/ydb/core/tx/schemeshard/libcore-tx-schemeshard.a [started: 2454624 (1758816903934), finished: 2455150 (1758816904460)] [ 10121 ms] [LD] [2y7CBFYlcs4k0mlZ1n3BEw default-linux-x86_64 relwithdebinfo]: $(BUILD_ROOT)/ydb/core/blobstorage/ut_blobstorage/ut_huge/ydb-core-blobstorage-ut_blobstorage-ut_huge [started: 2471550 (1758816920860), finished: 2481671 (1758816930981)] [602718 ms] [TM] [rnd-3715639153948442901 default-linux-x86_64 relwithdebinfo]: ydb/core/blobstorage/ut_blobstorage/ut_huge/unittest [started: 2484035 (1758816933345), finished: 3086753 (1758817536063)] Time from start: 3321905.5590820312 ms, time elapsed by graph 799621 ms, time diff 2522284.5590820312 ms. The longest 10 tasks: [602718 ms] [TM] [rnd-3715639153948442901 default-linux-x86_64 relwithdebinfo]: ydb/core/blobstorage/ut_blobstorage/ut_huge/unittest [started: 1758816933345, finished: 1758817536063] [482491 ms] [prepare:$(bazel-store-put)] local [count: 4236, cps: 8.78, ave time 113.90 msec] [479807 ms] [TM] [rnd-10865256916694109099 default-linux-x86_64 relwithdebinfo]: ydb/core/blobstorage/pdisk/ut/unittest [started: 1758816880711, finished: 1758817360518] [475003 ms] [TM] [rnd-9351100933029236701 default-linux-x86_64 relwithdebinfo]: ydb/core/blobstorage/ut_vdisk/unittest [started: 1758816924145, finished: 1758817399148] [340098 ms] [TM] [rnd-10581466837278394503 default-linux-x86_64 relwithdebinfo]: ydb/core/erasure/ut/unittest [started: 1758814478347, finished: 1758814818445] [312260 ms] [CC] [vJqJEoV0FHfu_9hM5hijDw default-linux-x86_64 relwithdebinfo]: $(SOURCE_ROOT)/ydb/core/kqp/ut/scheme/kqp_scheme_ut.cpp [started: 1758816746136, finished: 1758817058396] [311071 ms] [TM] [rnd-8287235788666623904 default-linux-x86_64 relwithdebinfo]: ydb/core/tx/schemeshard/ut_restore/unittest [started: 1758817181800, finished: 1758817492871] [304409 ms] [TM] [rnd-12888546487179884933 default-linux-x86_64 relwithdebinfo]: ydb/core/blobstorage/dsproxy/ut_strategy/unittest [started: 1758816888959, finished: 1758817193368] [304052 ms] [TM] [rnd-10940317838413163000 default-linux-x86_64 relwithdebinfo]: ydb/core/blobstorage/vdisk/repl/ut/unittest [started: 1758816929549, finished: 1758817233601] [290977 ms] [TM] [rnd-13098862026932431359 default-linux-x86_64 relwithdebinfo]: ydb/core/tx/schemeshard/ut_index/unittest [started: 1758817175355, finished: 1758817466332] Total time by type: [154796938 ms] [CC] [count: 3438, ave time 45025.29 msec] [ 27381000 ms] [TM] [count: 1357, ave time 20177.60 msec] [ 14480584 ms] [prepare:get from dist cache] [count: 9301, ave time 1556.88 msec] [ 7118712 ms] [prepare:put to dist cache] [count: 4387, ave time 1622.68 msec] [ 4684100 ms] [LD] [count: 376, ave time 12457.71 msec] [ 1956389 ms] [TS] [count: 494, ave time 3960.30 msec] [ 800679 ms] [prepare:bazel-store] [count: 3, ave time 266893.00 msec] [ 303046 ms] [prepare:tools] [count: 21, ave time 14430.76 msec] [ 266450 ms] [prepare:put into local cache, clean build dir] [count: 9071, ave time 29.37 msec] [ 200479 ms] [TA] [count: 71, ave time 2823.65 msec] [ 195938 ms] [prepare:AC] [count: 4, ave time 48984.50 msec] [ 103348 ms] [AR] [count: 527, ave time 196.11 msec] [ 37717 ms] [PY] [count: 22, ave time 1714.41 msec] [ 14473 ms] [PB] [count: 23, ave time 629.26 msec] [ 1943 ms] [EN] [count: 40, ave time 48.58 msec] [ 1183 ms] [BN] [count: 7, ave time 169.00 msec] [ 721 ms] [PR] [count: 19, ave time 37.95 msec] [ 687 ms] [prepare:resources] [count: 2, ave time 343.50 msec] [ 616 ms] [PK] [count: 2, ave time 308.00 msec] [ 550 ms] [BI] [count: 2, ave time 275.00 msec] [ 517 ms] [SB] [count: 2, ave time 258.50 msec] [ 443 ms] [PD] [count: 2, ave time 221.50 msec] [ 398 ms] [UN] [count: 3, ave time 132.67 msec] [ 233 ms] [CF] [count: 3, ave time 77.67 msec] [ 176 ms] [CP] [count: 2, ave time 88.00 msec] [ 28 ms] [prepare:clean] [count: 3, ave time 9.33 msec] Total tasks times: Total failed tasks time - 0 ms (0.00%) Total tests tasks time - 29537868 ms (15.61%) Total run tasks time - 189181224 ms Configure time - 44.4 s Statistics overhead 1952 ms Warn: Test [project=ydb/core/external_sources/object_storage/inference/ut, name=gtest] (uid=rnd-vffcxbmfpp86q9x3): Infrastructure error - contact devtools@ for details. Suite build deps: [40zmIODTK7Ycp5o72HMZQQ {'project_path': 'ydb/core/external_sources/object_storage/inference/ut', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/core/kqp/gateway/ut, name=gtest] (uid=rnd-jf49asylwxtni3iy): Infrastructure error - contact devtools@ for details. Suite build deps: [-k1g4wzzukjK2T1Bn04cPw {'project_path': 'ydb/core/kqp/gateway/ut', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/public/sdk/cpp/tests/integration/basic_example, name=gtest] (uid=rnd-1f2ub70iy9i70ouz): Infrastructure error - contact devtools@ for details. Suite build deps: [EBcRIU702A0j0W9DXXb2Wg {'project_path': 'ydb/public/sdk/cpp/tests/integration/basic_example', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [N3wfuEiUgil-dbRCORbWUA {'project_path': 'ydb/apps/ydbd', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [RkhG7WuJXDr1zbK17a6x6g {'project_path': 'ydb/public/tools/ydb_recipe', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/public/sdk/cpp/tests/integration/bulk_upsert, name=gtest] (uid=rnd-xpxojqnqdru7fehk): Infrastructure error - contact devtools@ for details. Suite build deps: [8UoiqpDSHEUB1kMfAGCIjw {'project_path': 'ydb/public/sdk/cpp/tests/integration/bulk_upsert', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [N3wfuEiUgil-dbRCORbWUA {'project_path': 'ydb/apps/ydbd', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [RkhG7WuJXDr1zbK17a6x6g {'project_path': 'ydb/public/tools/ydb_recipe', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/public/sdk/cpp/tests/integration/server_restart, name=gtest] (uid=rnd-jyrxd8o2rxe5pgcx): Infrastructure error - contact devtools@ for details. Suite build deps: [45mRXg-JkXZFDJoXFeQXTw {'project_path': 'ydb/public/sdk/cpp/tests/integration/server_restart', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [N3wfuEiUgil-dbRCORbWUA {'project_path': 'ydb/apps/ydbd', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [RkhG7WuJXDr1zbK17a6x6g {'project_path': 'ydb/public/tools/ydb_recipe', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/public/sdk/cpp/tests/integration/sessions, name=gtest] (uid=rnd-5sk9i9w1hq6yvmlk): Infrastructure error - contact devtools@ for details. Suite build deps: [N3wfuEiUgil-dbRCORbWUA {'project_path': 'ydb/apps/ydbd', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [RkhG7WuJXDr1zbK17a6x6g {'project_path': 'ydb/public/tools/ydb_recipe', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [fcrU_es_tU5qXGu0wjZtVQ {'project_path': 'ydb/public/sdk/cpp/tests/integration/sessions', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/public/sdk/cpp/tests/integration/sessions_pool, name=gtest] (uid=rnd-a3novysehk2o0c8j): Infrastructure error - contact devtools@ for details. Suite build deps: [-LFYnZpw5iSKrsZ0AKkvAA {'project_path': 'ydb/public/sdk/cpp/tests/integration/sessions_pool', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [N3wfuEiUgil-dbRCORbWUA {'project_path': 'ydb/apps/ydbd', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [RkhG7WuJXDr1zbK17a6x6g {'project_path': 'ydb/public/tools/ydb_recipe', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/public/sdk/cpp/tests/integration/topic, name=gtest] (uid=rnd-5dhsrqsjjpav4g5v): Infrastructure error - contact devtools@ for details. Suite build deps: [N3wfuEiUgil-dbRCORbWUA {'project_path': 'ydb/apps/ydbd', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [RkhG7WuJXDr1zbK17a6x6g {'project_path': 'ydb/public/tools/ydb_recipe', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}], [yOq7phgMzDQr_1vn5MuZ6A {'project_path': 'ydb/public/sdk/cpp/tests/integration/topic', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}] Warn: Test [project=ydb/public/sdk/cpp/tests/integration/topic/with_direct_read, name=gtest] (uid=rnd-mbqo5r50dabz78ac): Infrastructure error - contact devtools@ for details. Suite build deps: [N3wfuEiUgil-dbRCORbWUA {'project_path': 'ydb/apps/ydbd', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [RkhG7WuJXDr1zbK17a6x6g {'project_path': 'ydb/public/tools/ydb_recipe', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [ilBj_nObxXU6emREGvsDug {'project_path': 'ydb/public/sdk/cpp/tests/integration/topic/with_direct_read', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/core/viewer/tests, name=py3test] (uid=rnd-4uk24exhuikajp60): Infrastructure error - contact devtools@ for details. Suite build deps: [N3wfuEiUgil-dbRCORbWUA {'project_path': 'ydb/apps/ydbd', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [olgfAqGEjtLFkEjPOFeyIQ {'project_path': 'ydb/core/viewer/tests', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/tests/datashard/add_column, name=py3test] (uid=rnd-0eqrc3dw2m81tppc): Infrastructure error - contact devtools@ for details. Suite build deps: [N3wfuEiUgil-dbRCORbWUA {'project_path': 'ydb/apps/ydbd', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [ayx8XGla8SIcK_sY9t2yIw {'project_path': 'ydb/tests/datashard/add_column', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [j-uqixGAAtQ-xB1Ut7_Nkg {'project_path': 'ydb/apps/ydb', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/tests/datashard/async_replication, name=py3test] (uid=rnd-6ihinte7xxkdlxyj): Infrastructure error - contact devtools@ for details. Suite build deps: [4AuU0L8ovKb_CS09YCFeCA {'project_path': 'ydb/tests/datashard/async_replication', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [N3wfuEiUgil-dbRCORbWUA {'project_path': 'ydb/apps/ydbd', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [j-uqixGAAtQ-xB1Ut7_Nkg {'project_path': 'ydb/apps/ydb', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/tests/datashard/copy_table, name=py3test] (uid=rnd-21o224ji4z9orodu): Infrastructure error - contact devtools@ for details. Suite build deps: [4IdQjBHMBOMpqEbA3nFG6g {'project_path': 'ydb/tests/datashard/copy_table', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [N3wfuEiUgil-dbRCORbWUA {'project_path': 'ydb/apps/ydbd', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [j-uqixGAAtQ-xB1Ut7_Nkg {'project_path': 'ydb/apps/ydb', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/tests/datashard/dml, name=py3test] (uid=rnd-us0s6kwxmwanpwo5): Infrastructure error - contact devtools@ for details. Suite build deps: [N3wfuEiUgil-dbRCORbWUA {'project_path': 'ydb/apps/ydbd', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [PgHCMk5WDQVgay0DqJYKlg {'project_path': 'ydb/tests/datashard/dml', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [j-uqixGAAtQ-xB1Ut7_Nkg {'project_path': 'ydb/apps/ydb', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/tests/datashard/dump_restore, name=py3test] (uid=rnd-j24je3toy91hf0uk): Infrastructure error - contact devtools@ for details. Suite build deps: [N3wfuEiUgil-dbRCORbWUA {'project_path': 'ydb/apps/ydbd', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [g_ICgH-RDb3psFGDgNHwow {'project_path': 'ydb/tests/datashard/dump_restore', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [j-uqixGAAtQ-xB1Ut7_Nkg {'project_path': 'ydb/apps/ydb', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/tests/datashard/parametrized_queries, name=py3test] (uid=rnd-rrzue70629t699g5): Infrastructure error - contact devtools@ for details. Suite build deps: [HT5Ims0WDa7mRdR43K1t5A {'project_path': 'ydb/tests/datashard/parametrized_queries', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [N3wfuEiUgil-dbRCORbWUA {'project_path': 'ydb/apps/ydbd', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [j-uqixGAAtQ-xB1Ut7_Nkg {'project_path': 'ydb/apps/ydb', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/tests/datashard/partitioning, name=py3test] (uid=rnd-1xt6uouqnppi27g7): Infrastructure error - contact devtools@ for details. Suite build deps: [N3wfuEiUgil-dbRCORbWUA {'project_path': 'ydb/apps/ydbd', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [j-uqixGAAtQ-xB1Ut7_Nkg {'project_path': 'ydb/apps/ydb', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}], [yPJkm5SFCpqFYg66vKb8iA {'project_path': 'ydb/tests/datashard/partitioning', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}] Warn: Test [project=ydb/tests/datashard/s3, name=py3test] (uid=rnd-ek9mqiwx4vmuj9je): Infrastructure error - contact devtools@ for details. Suite build deps: [N3wfuEiUgil-dbRCORbWUA {'project_path': 'ydb/apps/ydbd', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [PBslpMFkWX6JG4o4gr4wJA {'project_path': 'contrib/python/moto/bin', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [guIwuJOad3W9i-WWFPiA3g {'project_path': 'ydb/tests/datashard/s3', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [j-uqixGAAtQ-xB1Ut7_Nkg {'project_path': 'ydb/apps/ydb', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/tests/datashard/secondary_index, name=py3test] (uid=rnd-h7r3rq7liopdtf6i): Infrastructure error - contact devtools@ for details. Suite build deps: [N3wfuEiUgil-dbRCORbWUA {'project_path': 'ydb/apps/ydbd', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [UN3Z4X32s-sgtvUlwUUgpg {'project_path': 'ydb/tests/datashard/secondary_index', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [j-uqixGAAtQ-xB1Ut7_Nkg {'project_path': 'ydb/apps/ydb', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/tests/datashard/select, name=py3test] (uid=rnd-8exn9gsvip6zfkzz): Infrastructure error - contact devtools@ for details. Suite build deps: [N3wfuEiUgil-dbRCORbWUA {'project_path': 'ydb/apps/ydbd', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [j-uqixGAAtQ-xB1Ut7_Nkg {'project_path': 'ydb/apps/ydb', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}], [uaJybSHWpfKsTpsIDli7rQ {'project_path': 'ydb/tests/datashard/select', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}] Warn: Test [project=ydb/tests/datashard/split_merge, name=py3test] (uid=rnd-7z4xzrg9lc3u9yig): Infrastructure error - contact devtools@ for details. Suite build deps: [N3wfuEiUgil-dbRCORbWUA {'project_path': 'ydb/apps/ydbd', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [QF5DffHKQ00sJEoggVMtTw {'project_path': 'ydb/tests/datashard/split_merge', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [j-uqixGAAtQ-xB1Ut7_Nkg {'project_path': 'ydb/apps/ydb', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/tests/datashard/ttl, name=py3test] (uid=rnd-jvzwzf6aiur1pnco): Infrastructure error - contact devtools@ for details. Suite build deps: [N3wfuEiUgil-dbRCORbWUA {'project_path': 'ydb/apps/ydbd', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [Y6yKNwK9PavSqjxLMGIXEg {'project_path': 'ydb/tests/datashard/ttl', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [j-uqixGAAtQ-xB1Ut7_Nkg {'project_path': 'ydb/apps/ydb', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/tests/datashard/vector_index/medium, name=py3test] (uid=rnd-c83m3rflllz0xsfu): Infrastructure error - contact devtools@ for details. Suite build deps: [N3wfuEiUgil-dbRCORbWUA {'project_path': 'ydb/apps/ydbd', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [j-uqixGAAtQ-xB1Ut7_Nkg {'project_path': 'ydb/apps/ydb', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}], [u1JyxZVpnF1534lS5K7VJQ {'project_path': 'ydb/tests/datashard/vector_index/medium', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}] Warn: Test [project=ydb/tests/example, name=py3test] (uid=rnd-19mushrfxvco2x87): Infrastructure error - contact devtools@ for details. Suite build deps: [N3wfuEiUgil-dbRCORbWUA {'project_path': 'ydb/apps/ydbd', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [lwWq6TJNS3N78mAwjI9Mmg {'project_path': 'ydb/tests/example', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/tests/fq/common, name=py3test] (uid=rnd-2vml41rq8es8o0be): Infrastructure error - contact devtools@ for details. Suite build deps: [N3wfuEiUgil-dbRCORbWUA {'project_path': 'ydb/apps/ydbd', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [RkhG7WuJXDr1zbK17a6x6g {'project_path': 'ydb/public/tools/ydb_recipe', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [WkgDriT61HCMF_TvwH89yQ {'project_path': 'ydb/tests/fq/common', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/tests/fq/http_api, name=py3test] (uid=rnd-r2qcz2fqmlsarj00): Infrastructure error - contact devtools@ for details. Suite build deps: [AmP9R1q2ZjYyHQRtlMqqyQ {'project_path': 'ydb/tests/fq/http_api', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [N3wfuEiUgil-dbRCORbWUA {'project_path': 'ydb/apps/ydbd', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [RkhG7WuJXDr1zbK17a6x6g {'project_path': 'ydb/public/tools/ydb_recipe', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/tests/fq/mem_alloc, name=py3test] (uid=rnd-ex84trkhjlupzrq1): Infrastructure error - contact devtools@ for details. Suite build deps: [9w8SIosdmEdYg_0llg3dzQ {'project_path': 'ydb/tests/tools/pq_read', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [N3wfuEiUgil-dbRCORbWUA {'project_path': 'ydb/apps/ydbd', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [RkhG7WuJXDr1zbK17a6x6g {'project_path': 'ydb/public/tools/ydb_recipe', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}], [vd9PbooFChU8NRzM3oXRWQ {'project_path': 'ydb/tests/fq/mem_alloc', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}] Warn: Test [project=ydb/tests/fq/multi_plane, name=py3test] (uid=rnd-ncnx7ecuw62klkow): Infrastructure error - contact devtools@ for details. Suite build deps: [80EKhPAiZLgqfmJLolavyQ {'project_path': 'ydb/tests/fq/multi_plane', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [9w8SIosdmEdYg_0llg3dzQ {'project_path': 'ydb/tests/tools/pq_read', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [N3wfuEiUgil-dbRCORbWUA {'project_path': 'ydb/apps/ydbd', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [RkhG7WuJXDr1zbK17a6x6g {'project_path': 'ydb/public/tools/ydb_recipe', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/tests/fq/plans, name=py3test] (uid=rnd-pbpdvuwnqmute7vd): Infrastructure error - contact devtools@ for details. Suite build deps: [EAwgvmj8cC-V5e0pFFAVKQ {'project_path': 'ydb/tests/fq/plans', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [N3wfuEiUgil-dbRCORbWUA {'project_path': 'ydb/apps/ydbd', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [PBslpMFkWX6JG4o4gr4wJA {'project_path': 'contrib/python/moto/bin', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [RkhG7WuJXDr1zbK17a6x6g {'project_path': 'ydb/public/tools/ydb_recipe', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/tests/fq/restarts, name=py3test] (uid=rnd-mgiohxbekv14j23m): Infrastructure error - contact devtools@ for details. Suite build deps: [N3wfuEiUgil-dbRCORbWUA {'project_path': 'ydb/apps/ydbd', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [PBslpMFkWX6JG4o4gr4wJA {'project_path': 'contrib/python/moto/bin', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [RkhG7WuJXDr1zbK17a6x6g {'project_path': 'ydb/public/tools/ydb_recipe', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [scPYvT-oTX2h4T42fGg7Xg {'project_path': 'ydb/tests/fq/restarts', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/tests/fq/s3, name=py3test] (uid=rnd-nurkzcqv0ag4l9my): Infrastructure error - contact devtools@ for details. Suite build deps: [8eon1HDZ-btTtkjtQqnL5g {'project_path': 'ydb/tests/fq/s3', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [9w8SIosdmEdYg_0llg3dzQ {'project_path': 'ydb/tests/tools/pq_read', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [N3wfuEiUgil-dbRCORbWUA {'project_path': 'ydb/apps/ydbd', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [PBslpMFkWX6JG4o4gr4wJA {'project_path': 'contrib/python/moto/bin', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [RkhG7WuJXDr1zbK17a6x6g {'project_path': 'ydb/public/tools/ydb_recipe', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/tests/fq/solomon, name=py3test] (uid=rnd-e22xyojctn8ozzg2): Infrastructure error - contact devtools@ for details. Suite build deps: [-oa6qLc0UWliscp4Bygi8A {'project_path': 'ydb/tests/tools/kqprun', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [3s3JWLpIFxDCd8RLTz88dw {'project_path': 'yql/essentials/udfs/test/test_import', 'platform': 'default-linux-x86_64-relwithdebinfo-pic', 'tags': ['default-linux-x86_64', 'relwithdebinfo', 'pic']}], [47L8WBzgkkJikv5HXYyLRw {'project_path': 'ydb/tests/fq/solomon', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [AqHo-8SJeJJvYzvMzUf4fw {'project_path': 'ydb/library/yql/tools/solomon_emulator/bin', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}], [tnzldir5PgQBy06P5L_gdg {'project_path': 'ydb/library/yql/tools/solomon_emulator/recipe', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}] Warn: Test [project=ydb/tests/fq/streaming, name=py3test] (uid=rnd-sbqlxc74u8erfnyz): Infrastructure error - contact devtools@ for details. Suite build deps: [9w8SIosdmEdYg_0llg3dzQ {'project_path': 'ydb/tests/tools/pq_read', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [N3wfuEiUgil-dbRCORbWUA {'project_path': 'ydb/apps/ydbd', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [RkhG7WuJXDr1zbK17a6x6g {'project_path': 'ydb/public/tools/ydb_recipe', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [cgTaSZcQ-pmpIpOqbreTjw {'project_path': 'ydb/tests/fq/streaming', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [j-uqixGAAtQ-xB1Ut7_Nkg {'project_path': 'ydb/apps/ydb', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/tests/fq/streaming_optimize, name=py3test] (uid=rnd-2cuu7qes6axlri5b): Infrastructure error - contact devtools@ for details. Suite build deps: [7zAS-SOnP7yW-m0yXTooHw {'project_path': 'ydb/tests/tools/fqrun', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [AqHo-8SJeJJvYzvMzUf4fw {'project_path': 'ydb/library/yql/tools/solomon_emulator/bin', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [VY2HtEC5_gANfiTekPfyBg {'project_path': 'yql/essentials/tools/astdiff', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [WCV7s7e1XRGC3Sf5Fvy9yA {'project_path': 'yql/essentials/tools/sql2yql', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [XvHME7g29u0zeXwTHvjmPw {'project_path': 'ydb/tests/fq/streaming_optimize', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [lBCQX0PVpAnlcC0t65UL-w {'project_path': 'yql/essentials/tests/common/test_framework/udfs_deps', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}], [tnzldir5PgQBy06P5L_gdg {'project_path': 'ydb/library/yql/tools/solomon_emulator/recipe', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}] Warn: Test [project=ydb/tests/fq/yds, name=py3test] (uid=rnd-cgolqo1wywhny13j): Infrastructure error - contact devtools@ for details. Suite build deps: [9w8SIosdmEdYg_0llg3dzQ {'project_path': 'ydb/tests/tools/pq_read', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [N3wfuEiUgil-dbRCORbWUA {'project_path': 'ydb/apps/ydbd', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [RkhG7WuJXDr1zbK17a6x6g {'project_path': 'ydb/public/tools/ydb_recipe', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [lH8UONP_kSZg2f6QHRU5uw {'project_path': 'ydb/tests/fq/yds', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/tests/fq/yt/kqp_yt_file/part0, name=py3test] (uid=rnd-0h1lfbuivu5j3vud): Infrastructure error - contact devtools@ for details. Suite build deps: [-oa6qLc0UWliscp4Bygi8A {'project_path': 'ydb/tests/tools/kqprun', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [3s3JWLpIFxDCd8RLTz88dw {'project_path': 'yql/essentials/udfs/test/test_import', 'platform': 'default-linux-x86_64-relwithdebinfo-pic', 'tags': ['default-linux-x86_64', 'relwithdebinfo', 'pic']}], [KvB0wWryVi4R9v4YIrLLtQ {'project_path': 'ydb/tests/fq/yt/kqp_yt_file/part0', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [lBCQX0PVpAnlcC0t65UL-w {'project_path': 'yql/essentials/tests/common/test_framework/udfs_deps', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/tests/fq/yt/kqp_yt_file/part1, name=py3test] (uid=rnd-wehd20y6u2mlhknh): Infrastructure error - contact devtools@ for details. Suite build deps: [-oa6qLc0UWliscp4Bygi8A {'project_path': 'ydb/tests/tools/kqprun', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [3s3JWLpIFxDCd8RLTz88dw {'project_path': 'yql/essentials/udfs/test/test_import', 'platform': 'default-linux-x86_64-relwithdebinfo-pic', 'tags': ['default-linux-x86_64', 'relwithdebinfo', 'pic']}], [M_ohdGjD4ax3aQoU7WFb6g {'project_path': 'ydb/tests/fq/yt/kqp_yt_file/part1', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [lBCQX0PVpAnlcC0t65UL-w {'project_path': 'yql/essentials/tests/common/test_framework/udfs_deps', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/tests/fq/yt/kqp_yt_file/part10, name=py3test] (uid=rnd-h9z1c1iuom14z8l9): Infrastructure error - contact devtools@ for details. Suite build deps: [-oa6qLc0UWliscp4Bygi8A {'project_path': 'ydb/tests/tools/kqprun', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [3s3JWLpIFxDCd8RLTz88dw {'project_path': 'yql/essentials/udfs/test/test_import', 'platform': 'default-linux-x86_64-relwithdebinfo-pic', 'tags': ['default-linux-x86_64', 'relwithdebinfo', 'pic']}], [NRTPWWQ9uXgqpJ8orTx9jg {'project_path': 'ydb/tests/fq/yt/kqp_yt_file/part10', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [lBCQX0PVpAnlcC0t65UL-w {'project_path': 'yql/essentials/tests/common/test_framework/udfs_deps', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/tests/fq/yt/kqp_yt_file/part11, name=py3test] (uid=rnd-surlkb1mkl8epexa): Infrastructure error - contact devtools@ for details. Suite build deps: [-oa6qLc0UWliscp4Bygi8A {'project_path': 'ydb/tests/tools/kqprun', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [3s3JWLpIFxDCd8RLTz88dw {'project_path': 'yql/essentials/udfs/test/test_import', 'platform': 'default-linux-x86_64-relwithdebinfo-pic', 'tags': ['default-linux-x86_64', 'relwithdebinfo', 'pic']}], [kQy0Q54LSu2UDILJ3T_myg {'project_path': 'ydb/tests/fq/yt/kqp_yt_file/part11', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [lBCQX0PVpAnlcC0t65UL-w {'project_path': 'yql/essentials/tests/common/test_framework/udfs_deps', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/tests/fq/yt/kqp_yt_file/part12, name=py3test] (uid=rnd-u5le88qqxcfdpy4b): Infrastructure error - contact devtools@ for details. Suite build deps: [-oa6qLc0UWliscp4Bygi8A {'project_path': 'ydb/tests/tools/kqprun', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [2G00NM1F1n-2POpKbxeilg {'project_path': 'ydb/tests/fq/yt/kqp_yt_file/part12', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [3s3JWLpIFxDCd8RLTz88dw {'project_path': 'yql/essentials/udfs/test/test_import', 'platform': 'default-linux-x86_64-relwithdebinfo-pic', 'tags': ['default-linux-x86_64', 'relwithdebinfo', 'pic']}], [lBCQX0PVpAnlcC0t65UL-w {'project_path': 'yql/essentials/tests/common/test_framework/udfs_deps', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/tests/fq/yt/kqp_yt_file/part13, name=py3test] (uid=rnd-fmsgp393l11fooxm): Infrastructure error - contact devtools@ for details. Suite build deps: [-oa6qLc0UWliscp4Bygi8A {'project_path': 'ydb/tests/tools/kqprun', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [3s3JWLpIFxDCd8RLTz88dw {'project_path': 'yql/essentials/udfs/test/test_import', 'platform': 'default-linux-x86_64-relwithdebinfo-pic', 'tags': ['default-linux-x86_64', 'relwithdebinfo', 'pic']}], [gWuyy52rvCq-syaEgpnCsA {'project_path': 'ydb/tests/fq/yt/kqp_yt_file/part13', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [lBCQX0PVpAnlcC0t65UL-w {'project_path': 'yql/essentials/tests/common/test_framework/udfs_deps', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/tests/fq/yt/kqp_yt_file/part14, name=py3test] (uid=rnd-yu1zrvdyzs4ttbia): Infrastructure error - contact devtools@ for details. Suite build deps: [-oa6qLc0UWliscp4Bygi8A {'project_path': 'ydb/tests/tools/kqprun', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [3s3JWLpIFxDCd8RLTz88dw {'project_path': 'yql/essentials/udfs/test/test_import', 'platform': 'default-linux-x86_64-relwithdebinfo-pic', 'tags': ['default-linux-x86_64', 'relwithdebinfo', 'pic']}], [KvH9qEW4v6X0rvmb_n17xQ {'project_path': 'ydb/tests/fq/yt/kqp_yt_file/part14', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [lBCQX0PVpAnlcC0t65UL-w {'project_path': 'yql/essentials/tests/common/test_framework/udfs_deps', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/tests/fq/yt/kqp_yt_file/part15, name=py3test] (uid=rnd-o0tim20z0ioo6w5q): Infrastructure error - contact devtools@ for details. Suite build deps: [-oa6qLc0UWliscp4Bygi8A {'project_path': 'ydb/tests/tools/kqprun', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [3s3JWLpIFxDCd8RLTz88dw {'project_path': 'yql/essentials/udfs/test/test_import', 'platform': 'default-linux-x86_64-relwithdebinfo-pic', 'tags': ['default-linux-x86_64', 'relwithdebinfo', 'pic']}], [HNYyA0TSLNYOw3xmhmICYg {'project_path': 'ydb/tests/fq/yt/kqp_yt_file/part15', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [lBCQX0PVpAnlcC0t65UL-w {'project_path': 'yql/essentials/tests/common/test_framework/udfs_deps', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/tests/fq/yt/kqp_yt_file/part16, name=py3test] (uid=rnd-nyjdpohutcg4bh96): Infrastructure error - contact devtools@ for details. Suite build deps: [-oa6qLc0UWliscp4Bygi8A {'project_path': 'ydb/tests/tools/kqprun', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [07dVKO5sh7aY9Y3_A858FQ {'project_path': 'ydb/tests/fq/yt/kqp_yt_file/part16', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [3s3JWLpIFxDCd8RLTz88dw {'project_path': 'yql/essentials/udfs/test/test_import', 'platform': 'default-linux-x86_64-relwithdebinfo-pic', 'tags': ['default-linux-x86_64', 'relwithdebinfo', 'pic']}], [lBCQX0PVpAnlcC0t65UL-w {'project_path': 'yql/essentials/tests/common/test_framework/udfs_deps', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/tests/fq/yt/kqp_yt_file/part17, name=py3test] (uid=rnd-uvpszqj01xjilu4y): Infrastructure error - contact devtools@ for details. Suite build deps: [-oa6qLc0UWliscp4Bygi8A {'project_path': 'ydb/tests/tools/kqprun', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [3s3JWLpIFxDCd8RLTz88dw {'project_path': 'yql/essentials/udfs/test/test_import', 'platform': 'default-linux-x86_64-relwithdebinfo-pic', 'tags': ['default-linux-x86_64', 'relwithdebinfo', 'pic']}], [GTwN6d-ENXnCgb71eYIN5Q {'project_path': 'ydb/tests/fq/yt/kqp_yt_file/part17', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [lBCQX0PVpAnlcC0t65UL-w {'project_path': 'yql/essentials/tests/common/test_framework/udfs_deps', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/tests/fq/yt/kqp_yt_file/part18, name=py3test] (uid=rnd-ury7gwrhvuchnp95): Infrastructure error - contact devtools@ for details. Suite build deps: [-oa6qLc0UWliscp4Bygi8A {'project_path': 'ydb/tests/tools/kqprun', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [3s3JWLpIFxDCd8RLTz88dw {'project_path': 'yql/essentials/udfs/test/test_import', 'platform': 'default-linux-x86_64-relwithdebinfo-pic', 'tags': ['default-linux-x86_64', 'relwithdebinfo', 'pic']}], [GnNjxFmOGT5jbSg1hfHr3g {'project_path': 'ydb/tests/fq/yt/kqp_yt_file/part18', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [lBCQX0PVpAnlcC0t65UL-w {'project_path': 'yql/essentials/tests/common/test_framework/udfs_deps', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/tests/fq/yt/kqp_yt_file/part19, name=py3test] (uid=rnd-y39na4to2it8egoq): Infrastructure error - contact devtools@ for details. Suite build deps: [-oa6qLc0UWliscp4Bygi8A {'project_path': 'ydb/tests/tools/kqprun', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [3s3JWLpIFxDCd8RLTz88dw {'project_path': 'yql/essentials/udfs/test/test_import', 'platform': 'default-linux-x86_64-relwithdebinfo-pic', 'tags': ['default-linux-x86_64', 'relwithdebinfo', 'pic']}], [hXGXWyZHexihs7dasKqtbw {'project_path': 'ydb/tests/fq/yt/kqp_yt_file/part19', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [lBCQX0PVpAnlcC0t65UL-w {'project_path': 'yql/essentials/tests/common/test_framework/udfs_deps', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/tests/fq/yt/kqp_yt_file/part2, name=py3test] (uid=rnd-5vh467mbda3xejpd): Infrastructure error - contact devtools@ for details. Suite build deps: [-oa6qLc0UWliscp4Bygi8A {'project_path': 'ydb/tests/tools/kqprun', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [-pDeMZzNM1c2R6S07qExUQ {'project_path': 'ydb/tests/fq/yt/kqp_yt_file/part2', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [3s3JWLpIFxDCd8RLTz88dw {'project_path': 'yql/essentials/udfs/test/test_import', 'platform': 'default-linux-x86_64-relwithdebinfo-pic', 'tags': ['default-linux-x86_64', 'relwithdebinfo', 'pic']}], [lBCQX0PVpAnlcC0t65UL-w {'project_path': 'yql/essentials/tests/common/test_framework/udfs_deps', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/tests/fq/yt/kqp_yt_file/part3, name=py3test] (uid=rnd-1mogrwgakxeuxzmy): Infrastructure error - contact devtools@ for details. Suite build deps: [-oa6qLc0UWliscp4Bygi8A {'project_path': 'ydb/tests/tools/kqprun', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [3s3JWLpIFxDCd8RLTz88dw {'project_path': 'yql/essentials/udfs/test/test_import', 'platform': 'default-linux-x86_64-relwithdebinfo-pic', 'tags': ['default-linux-x86_64', 'relwithdebinfo', 'pic']}], [Jgk6dV6zWR3bw9CBpTnxxA {'project_path': 'ydb/tests/fq/yt/kqp_yt_file/part3', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [lBCQX0PVpAnlcC0t65UL-w {'project_path': 'yql/essentials/tests/common/test_framework/udfs_deps', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/tests/fq/yt/kqp_yt_file/part4, name=py3test] (uid=rnd-cykp15n1toti9k8z): Infrastructure error - contact devtools@ for details. Suite build deps: [-oa6qLc0UWliscp4Bygi8A {'project_path': 'ydb/tests/tools/kqprun', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [3s3JWLpIFxDCd8RLTz88dw {'project_path': 'yql/essentials/udfs/test/test_import', 'platform': 'default-linux-x86_64-relwithdebinfo-pic', 'tags': ['default-linux-x86_64', 'relwithdebinfo', 'pic']}], [8V2itxcbZPkHSca-g5WYJQ {'project_path': 'ydb/tests/fq/yt/kqp_yt_file/part4', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [lBCQX0PVpAnlcC0t65UL-w {'project_path': 'yql/essentials/tests/common/test_framework/udfs_deps', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/tests/fq/yt/kqp_yt_file/part5, name=py3test] (uid=rnd-5aaegb6cvwb1ttb1): Infrastructure error - contact devtools@ for details. Suite build deps: [-oa6qLc0UWliscp4Bygi8A {'project_path': 'ydb/tests/tools/kqprun', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [3s3JWLpIFxDCd8RLTz88dw {'project_path': 'yql/essentials/udfs/test/test_import', 'platform': 'default-linux-x86_64-relwithdebinfo-pic', 'tags': ['default-linux-x86_64', 'relwithdebinfo', 'pic']}], [lBCQX0PVpAnlcC0t65UL-w {'project_path': 'yql/essentials/tests/common/test_framework/udfs_deps', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}], [z0wDSPbuWcqXK-E8m7BBog {'project_path': 'ydb/tests/fq/yt/kqp_yt_file/part5', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}] Warn: Test [project=ydb/tests/fq/yt/kqp_yt_file/part6, name=py3test] (uid=rnd-djuhtzsf8cyz6qtm): Infrastructure error - contact devtools@ for details. Suite build deps: [-oa6qLc0UWliscp4Bygi8A {'project_path': 'ydb/tests/tools/kqprun', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [3s3JWLpIFxDCd8RLTz88dw {'project_path': 'yql/essentials/udfs/test/test_import', 'platform': 'default-linux-x86_64-relwithdebinfo-pic', 'tags': ['default-linux-x86_64', 'relwithdebinfo', 'pic']}], [lBCQX0PVpAnlcC0t65UL-w {'project_path': 'yql/essentials/tests/common/test_framework/udfs_deps', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}], [vZGQjbcwHPg3bwqEkWxFNg {'project_path': 'ydb/tests/fq/yt/kqp_yt_file/part6', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}] Warn: Test [project=ydb/tests/fq/yt/kqp_yt_file/part7, name=py3test] (uid=rnd-xtniwl3p4dvov6w0): Infrastructure error - contact devtools@ for details. Suite build deps: [-oa6qLc0UWliscp4Bygi8A {'project_path': 'ydb/tests/tools/kqprun', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [3s3JWLpIFxDCd8RLTz88dw {'project_path': 'yql/essentials/udfs/test/test_import', 'platform': 'default-linux-x86_64-relwithdebinfo-pic', 'tags': ['default-linux-x86_64', 'relwithdebinfo', 'pic']}], [bcbKv8EE_9N7UUM1zsRYUQ {'project_path': 'ydb/tests/fq/yt/kqp_yt_file/part7', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [lBCQX0PVpAnlcC0t65UL-w {'project_path': 'yql/essentials/tests/common/test_framework/udfs_deps', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/tests/fq/yt/kqp_yt_file/part8, name=py3test] (uid=rnd-jspkr9iai3v4wh69): Infrastructure error - contact devtools@ for details. Suite build deps: [-oa6qLc0UWliscp4Bygi8A {'project_path': 'ydb/tests/tools/kqprun', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [3s3JWLpIFxDCd8RLTz88dw {'project_path': 'yql/essentials/udfs/test/test_import', 'platform': 'default-linux-x86_64-relwithdebinfo-pic', 'tags': ['default-linux-x86_64', 'relwithdebinfo', 'pic']}], [U8YsV8x93shMCN6IneZYOg {'project_path': 'ydb/tests/fq/yt/kqp_yt_file/part8', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [lBCQX0PVpAnlcC0t65UL-w {'project_path': 'yql/essentials/tests/common/test_framework/udfs_deps', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/tests/fq/yt/kqp_yt_file/part9, name=py3test] (uid=rnd-o0hzb45knmnxkx2w): Infrastructure error - contact devtools@ for details. Suite build deps: [-oa6qLc0UWliscp4Bygi8A {'project_path': 'ydb/tests/tools/kqprun', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [3s3JWLpIFxDCd8RLTz88dw {'project_path': 'yql/essentials/udfs/test/test_import', 'platform': 'default-linux-x86_64-relwithdebinfo-pic', 'tags': ['default-linux-x86_64', 'relwithdebinfo', 'pic']}], [BPtiDP_04PQYlXT1ngiG2g {'project_path': 'ydb/tests/fq/yt/kqp_yt_file/part9', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [lBCQX0PVpAnlcC0t65UL-w {'project_path': 'yql/essentials/tests/common/test_framework/udfs_deps', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/tests/fq/yt/kqp_yt_import, name=py3test] (uid=rnd-bx7nxeqf2si0317k): Infrastructure error - contact devtools@ for details. Suite build deps: [-oa6qLc0UWliscp4Bygi8A {'project_path': 'ydb/tests/tools/kqprun', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}], [ycRZZrSySRso76goAOtbPQ {'project_path': 'ydb/tests/fq/yt/kqp_yt_import', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}] Warn: Test [project=ydb/tests/functional/api, name=py3test] (uid=rnd-mjb2v7o6mjftcho1): Infrastructure error - contact devtools@ for details. Suite build deps: [N3wfuEiUgil-dbRCORbWUA {'project_path': 'ydb/apps/ydbd', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [im_0wP_gQyog5zvGMydWGQ {'project_path': 'ydb/tests/functional/api', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/tests/functional/audit, name=py3test] (uid=rnd-vk2eqgmi5ihs796e): Infrastructure error - contact devtools@ for details. Suite build deps: [HL5sUBiPiTSqA9r3Q7NRuA {'project_path': 'ydb/tests/functional/audit', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [N3wfuEiUgil-dbRCORbWUA {'project_path': 'ydb/apps/ydbd', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [aLkbkLGwdI2ch9zTXTFP2A {'project_path': 'ydb/apps/dstool', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/tests/functional/autoconfig, name=py3test] (uid=rnd-5wgux9xuowjnul9o): Infrastructure error - contact devtools@ for details. Suite build deps: [N3wfuEiUgil-dbRCORbWUA {'project_path': 'ydb/apps/ydbd', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [Q51LidrcFfjxEoE6kAEOgA {'project_path': 'ydb/tests/functional/autoconfig', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/tests/functional/blobstorage, name=py3test] (uid=rnd-x73jtoaqb081t7sa): Infrastructure error - contact devtools@ for details. Suite build deps: [LJ0GXCjED_THn5B39AoLlQ {'project_path': 'ydb/tests/functional/blobstorage', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [N3wfuEiUgil-dbRCORbWUA {'project_path': 'ydb/apps/ydbd', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/tests/functional/canonical, name=py3test] (uid=rnd-pz2fj1rftyclb5rq): Infrastructure error - contact devtools@ for details. Suite build deps: [N3wfuEiUgil-dbRCORbWUA {'project_path': 'ydb/apps/ydbd', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}], [zTlnVzmRDBK0LsKh6K902w {'project_path': 'ydb/tests/functional/canonical', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}] Warn: Test [project=ydb/tests/functional/clickbench, name=py3test] (uid=rnd-7n9ux7gfaz9mr3e8): Infrastructure error - contact devtools@ for details. Suite build deps: [DwsICOZem2jlEmXycxVtCA {'project_path': 'ydb/tests/functional/clickbench', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [N3wfuEiUgil-dbRCORbWUA {'project_path': 'ydb/apps/ydbd', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [RkhG7WuJXDr1zbK17a6x6g {'project_path': 'ydb/public/tools/ydb_recipe', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [j-uqixGAAtQ-xB1Ut7_Nkg {'project_path': 'ydb/apps/ydb', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/tests/functional/cms, name=py3test] (uid=rnd-3vrqtpss2cybol82): Infrastructure error - contact devtools@ for details. Suite build deps: [3X_4aWop_9xlhZeq__pdjA {'project_path': 'ydb/tests/functional/cms', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [N3wfuEiUgil-dbRCORbWUA {'project_path': 'ydb/apps/ydbd', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/tests/functional/encryption, name=py3test] (uid=rnd-ba37xutl4rjscjbb): Infrastructure error - contact devtools@ for details. Suite build deps: [KHW-X2uVrzIunn8TBs-A5w {'project_path': 'ydb/tests/functional/encryption', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [N3wfuEiUgil-dbRCORbWUA {'project_path': 'ydb/apps/ydbd', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/tests/functional/hive, name=py3test] (uid=rnd-q8ujl5byy5k4kerj): Infrastructure error - contact devtools@ for details. Suite build deps: [N3wfuEiUgil-dbRCORbWUA {'project_path': 'ydb/apps/ydbd', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [nD1IDMU1vzAHhreuCwBJAg {'project_path': 'ydb/tests/functional/hive', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/tests/functional/kqp/plan2svg, name=py3test] (uid=rnd-fh583c4vrmfrbyku): Infrastructure error - contact devtools@ for details. Suite build deps: [-oa6qLc0UWliscp4Bygi8A {'project_path': 'ydb/tests/tools/kqprun', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [papuI5BL0Gg5HM63eorxBg {'project_path': 'ydb/tests/functional/kqp/plan2svg', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/tests/functional/limits, name=py3test] (uid=rnd-xynqa1q6tr84xs75): Infrastructure error - contact devtools@ for details. Suite build deps: [N3wfuEiUgil-dbRCORbWUA {'project_path': 'ydb/apps/ydbd', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [WQOb6VdWlSHMsI5RWglT4A {'project_path': 'ydb/tests/functional/limits', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/tests/functional/minidumps, name=py3test] (uid=rnd-twzbkqr8is7q3xg2): Infrastructure error - contact devtools@ for details. Suite build deps: [N3wfuEiUgil-dbRCORbWUA {'project_path': 'ydb/apps/ydbd', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [sxRcRBfTYgxW-lBThSOYCQ {'project_path': 'ydb/tests/functional/minidumps', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/tests/functional/postgresql, name=py3test] (uid=rnd-rf7hx9xo44g6gpts): Infrastructure error - contact devtools@ for details. Suite build deps: [CDj_hpCSXgjkNtlV_ewJIQ {'project_path': 'ydb/tests/functional/postgresql/psql', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [N3wfuEiUgil-dbRCORbWUA {'project_path': 'ydb/apps/ydbd', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [PLQu-tQpmIHqQm5uWrPXEA {'project_path': 'ydb/tests/functional/postgresql', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/tests/functional/query_cache, name=py3test] (uid=rnd-4usyodomnj88u95n): Infrastructure error - contact devtools@ for details. Suite build deps: [N3wfuEiUgil-dbRCORbWUA {'project_path': 'ydb/apps/ydbd', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [lK62UiUy8CZG-KLOLoM2GA {'project_path': 'ydb/tests/functional/query_cache', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/tests/functional/rename, name=py3test] (uid=rnd-fkn2nc2daumnjtno): Infrastructure error - contact devtools@ for details. Suite build deps: [Grc41KSRZBDh2daQFkGKSw {'project_path': 'ydb/tests/functional/rename', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [N3wfuEiUgil-dbRCORbWUA {'project_path': 'ydb/apps/ydbd', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/tests/functional/restarts, name=py3test] (uid=rnd-3znb64zgqfq5zip8): Infrastructure error - contact devtools@ for details. Suite build deps: [N3wfuEiUgil-dbRCORbWUA {'project_path': 'ydb/apps/ydbd', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [_HT3ybXkNdoh5teF9KbA9g {'project_path': 'ydb/tests/functional/restarts', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/tests/functional/scheme_shard, name=py3test] (uid=rnd-9md5gzy5b9fu8o8q): Infrastructure error - contact devtools@ for details. Suite build deps: [N3wfuEiUgil-dbRCORbWUA {'project_path': 'ydb/apps/ydbd', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}], [ydPR3mQ44DIJsCCabBpvjw {'project_path': 'ydb/tests/functional/scheme_shard', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}] Warn: Test [project=ydb/tests/functional/scheme_tests, name=py3test] (uid=rnd-x5ehwp428kz3iot0): Infrastructure error - contact devtools@ for details. Suite build deps: [N3wfuEiUgil-dbRCORbWUA {'project_path': 'ydb/apps/ydbd', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}], [v3M7Q5p6SkkasbM9VA5Ujg {'project_path': 'ydb/tests/functional/scheme_tests', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}] Warn: Test [project=ydb/tests/functional/script_execution, name=py3test] (uid=rnd-0kmzcyrfndp8dq6n): Infrastructure error - contact devtools@ for details. Suite build deps: [Mo_KEEOAnKNdXUva_VuVRA {'project_path': 'ydb/tests/functional/script_execution', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [N3wfuEiUgil-dbRCORbWUA {'project_path': 'ydb/apps/ydbd', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/tests/functional/security, name=py3test] (uid=rnd-2waanrnvhbfewvvm): Infrastructure error - contact devtools@ for details. Suite build deps: [N3wfuEiUgil-dbRCORbWUA {'project_path': 'ydb/apps/ydbd', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [ZtxfI4fmr-8KroBw7np3RA {'project_path': 'ydb/tests/functional/security', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/tests/functional/serializable, name=py3test] (uid=rnd-essz4izil0dy3tir): Infrastructure error - contact devtools@ for details. Suite build deps: [MeaX7qsLtlqiOQc0UFJtWw {'project_path': 'ydb/tests/functional/serializable', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [N3wfuEiUgil-dbRCORbWUA {'project_path': 'ydb/apps/ydbd', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [RkhG7WuJXDr1zbK17a6x6g {'project_path': 'ydb/public/tools/ydb_recipe', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/tests/functional/serverless, name=py3test] (uid=rnd-ldm0dpab6yfv707i): Infrastructure error - contact devtools@ for details. Suite build deps: [9QPZYfBspWRnfPsB-vXB0g {'project_path': 'ydb/tests/functional/serverless', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [N3wfuEiUgil-dbRCORbWUA {'project_path': 'ydb/apps/ydbd', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/tests/functional/sqs/cloud, name=py3test] (uid=rnd-vgkaes6xq84tlx7w): Infrastructure error - contact devtools@ for details. Suite build deps: [N3wfuEiUgil-dbRCORbWUA {'project_path': 'ydb/apps/ydbd', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [npRDw4mQghXmQ2PiWTrWcQ {'project_path': 'ydb/tests/functional/sqs/cloud', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/tests/functional/sqs/common, name=py3test] (uid=rnd-pzkaamke376eyus8): Infrastructure error - contact devtools@ for details. Suite build deps: [A-0HFt28yTTCtOzjPQ3LpA {'project_path': 'ydb/tests/functional/sqs/common', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [N3wfuEiUgil-dbRCORbWUA {'project_path': 'ydb/apps/ydbd', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/tests/functional/sqs/large, name=py3test] (uid=rnd-y8bc6bw0wuy4dnsm): Infrastructure error - contact devtools@ for details. Suite build deps: [Jb70WXWVJfdxyq-arJJ5dw {'project_path': 'ydb/tests/functional/sqs/large', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [N3wfuEiUgil-dbRCORbWUA {'project_path': 'ydb/apps/ydbd', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/tests/functional/sqs/merge_split_common_table/fifo, name=py3test] (uid=rnd-mfumrvqkxir2eiyl): Infrastructure error - contact devtools@ for details. Suite build deps: [N3wfuEiUgil-dbRCORbWUA {'project_path': 'ydb/apps/ydbd', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [TajYwI71J6fHSMD5gX1rtQ {'project_path': 'ydb/tests/functional/sqs/merge_split_common_table/fifo', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/tests/functional/sqs/merge_split_common_table/std, name=py3test] (uid=rnd-tft2jetajyqwgssp): Infrastructure error - contact devtools@ for details. Suite build deps: [E6uEnFwRqUA6dIJVnbAUTw {'project_path': 'ydb/tests/functional/sqs/merge_split_common_table/std', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [N3wfuEiUgil-dbRCORbWUA {'project_path': 'ydb/apps/ydbd', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/tests/functional/sqs/messaging, name=py3test] (uid=rnd-ao9cu8e065y405b9): Infrastructure error - contact devtools@ for details. Suite build deps: [GjA2gBupTIlWkZCJg2DFXg {'project_path': 'ydb/tests/functional/sqs/messaging', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [N3wfuEiUgil-dbRCORbWUA {'project_path': 'ydb/apps/ydbd', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/tests/functional/sqs/multinode, name=py3test] (uid=rnd-vxsm9mr6rbboqqno): Infrastructure error - contact devtools@ for details. Suite build deps: [6AzjpakulRNIvAeyOI0x7Q {'project_path': 'ydb/tests/functional/sqs/multinode', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [N3wfuEiUgil-dbRCORbWUA {'project_path': 'ydb/apps/ydbd', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/tests/functional/sqs/with_quotas, name=py3test] (uid=rnd-hqgc8k5cwfm7z9la): Infrastructure error - contact devtools@ for details. Suite build deps: [BSeuSZEo6i_o_PHcGBxZBg {'project_path': 'ydb/tests/functional/sqs/with_quotas', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [N3wfuEiUgil-dbRCORbWUA {'project_path': 'ydb/apps/ydbd', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/tests/functional/statistics, name=py3test] (uid=rnd-1albt1ha7oaszt2z): Infrastructure error - contact devtools@ for details. Suite build deps: [N3wfuEiUgil-dbRCORbWUA {'project_path': 'ydb/apps/ydbd', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [Q0fyPzZRKd8AOf3PxF3cMA {'project_path': 'ydb/tests/functional/statistics', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/tests/functional/suite_tests, name=py3test] (uid=rnd-djcadhgz3vght7fy): Infrastructure error - contact devtools@ for details. Suite build deps: [IhzAsKweF8WS35vxF_jCsQ {'project_path': 'ydb/tests/functional/suite_tests', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [N3wfuEiUgil-dbRCORbWUA {'project_path': 'ydb/apps/ydbd', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/tests/functional/tenants, name=py3test] (uid=rnd-3ertbbqork1b3r6r): Infrastructure error - contact devtools@ for details. Suite build deps: [MOLU2UhSCl_1hh7wm0gWZw {'project_path': 'ydb/tests/functional/tenants', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [N3wfuEiUgil-dbRCORbWUA {'project_path': 'ydb/apps/ydbd', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/tests/functional/tpc/medium, name=py3test] (uid=rnd-6xos6lyyov7qogim): Infrastructure error - contact devtools@ for details. Suite build deps: [Klgt4e1hSOwrr0Agi_1aIg {'project_path': 'ydb/tests/stress/oltp_workload', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [MWNTb0DJZ3Db0AxQmgdlwA {'project_path': 'ydb/tests/functional/tpc/medium', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [N3wfuEiUgil-dbRCORbWUA {'project_path': 'ydb/apps/ydbd', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [c5X-ZXks9Bgum8h-pHYZ0w {'project_path': 'ydb/tests/stress/simple_queue', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [j-uqixGAAtQ-xB1Ut7_Nkg {'project_path': 'ydb/apps/ydb', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/tests/functional/tpc/medium/tpch, name=py3test] (uid=rnd-pcanzt2nh8us21al): Infrastructure error - contact devtools@ for details. Suite build deps: [GO36W2XmzR_Kmd20m9v00A {'project_path': 'ydb/tests/functional/tpc/medium/tpch', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [Klgt4e1hSOwrr0Agi_1aIg {'project_path': 'ydb/tests/stress/oltp_workload', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [N3wfuEiUgil-dbRCORbWUA {'project_path': 'ydb/apps/ydbd', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [c5X-ZXks9Bgum8h-pHYZ0w {'project_path': 'ydb/tests/stress/simple_queue', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [j-uqixGAAtQ-xB1Ut7_Nkg {'project_path': 'ydb/apps/ydb', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/tests/functional/tpcc, name=py3test] (uid=rnd-t0hx2pyhg9lx35qw): Infrastructure error - contact devtools@ for details. Suite build deps: [N3wfuEiUgil-dbRCORbWUA {'project_path': 'ydb/apps/ydbd', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [RkhG7WuJXDr1zbK17a6x6g {'project_path': 'ydb/public/tools/ydb_recipe', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [U7kcVEsSE24IV79WBrBe5w {'project_path': 'ydb/tests/functional/tpcc', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [j-uqixGAAtQ-xB1Ut7_Nkg {'project_path': 'ydb/apps/ydb', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/tests/functional/ttl, name=py3test] (uid=rnd-78s0m7mizs86crbf): Infrastructure error - contact devtools@ for details. Suite build deps: [N3wfuEiUgil-dbRCORbWUA {'project_path': 'ydb/apps/ydbd', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [hbb7HF1k80L6R3x9eQeB2g {'project_path': 'ydb/tests/functional/ttl', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/tests/functional/wardens, name=py3test] (uid=rnd-fta4qsrjhpg8usuz): Infrastructure error - contact devtools@ for details. Suite build deps: [N3wfuEiUgil-dbRCORbWUA {'project_path': 'ydb/apps/ydbd', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [ea7gYS5uSNeGI7tDhZkSMA {'project_path': 'ydb/tests/functional/wardens', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/tests/functional/ydb_cli, name=py3test] (uid=rnd-bfafs5dvynod6rnv): Infrastructure error - contact devtools@ for details. Suite build deps: [N3wfuEiUgil-dbRCORbWUA {'project_path': 'ydb/apps/ydbd', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [beCT0pUSye70wN6RLyj-Yg {'project_path': 'ydb/tests/functional/ydb_cli', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [j-uqixGAAtQ-xB1Ut7_Nkg {'project_path': 'ydb/apps/ydb', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/tests/library/ut, name=py3test] (uid=rnd-nk1xwu4if0r86l34): Infrastructure error - contact devtools@ for details. Suite build deps: [8_y_hajYWx5OQhm2WxBpbA {'project_path': 'ydb/tests/library/ut', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [N3wfuEiUgil-dbRCORbWUA {'project_path': 'ydb/apps/ydbd', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/tests/olap, name=py3test] (uid=rnd-l7lfx29kya5nkpl7): Infrastructure error - contact devtools@ for details. Suite build deps: [N3wfuEiUgil-dbRCORbWUA {'project_path': 'ydb/apps/ydbd', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [j-uqixGAAtQ-xB1Ut7_Nkg {'project_path': 'ydb/apps/ydb', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [mfPkTABU6NKX9zEJ57znyw {'project_path': 'ydb/tests/olap', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/tests/olap/column_family/compression, name=py3test] (uid=rnd-h7m8f8kc1xqdla8l): Infrastructure error - contact devtools@ for details. Suite build deps: [Kt_15SOv0oJGbrSawHiEcA {'project_path': 'ydb/tests/olap/column_family/compression', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [N3wfuEiUgil-dbRCORbWUA {'project_path': 'ydb/apps/ydbd', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/tests/olap/data_quotas, name=py3test] (uid=rnd-zh1b5qv78dze4zdp): Infrastructure error - contact devtools@ for details. Suite build deps: [N3wfuEiUgil-dbRCORbWUA {'project_path': 'ydb/apps/ydbd', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [ea9H0VWTcBFsaNfkLDeR0w {'project_path': 'ydb/tests/olap/data_quotas', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [j-uqixGAAtQ-xB1Ut7_Nkg {'project_path': 'ydb/apps/ydb', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/tests/olap/delete, name=py3test] (uid=rnd-q9q1pp3g4i7m3le7): Infrastructure error - contact devtools@ for details. Suite build deps: [N3wfuEiUgil-dbRCORbWUA {'project_path': 'ydb/apps/ydbd', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [_2tsQ57TV1xFh7WRY3VE2w {'project_path': 'ydb/tests/olap/delete', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/tests/olap/oom, name=py3test] (uid=rnd-jcewd9henod4qvym): Infrastructure error - contact devtools@ for details. Suite build deps: [N3wfuEiUgil-dbRCORbWUA {'project_path': 'ydb/apps/ydbd', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}], [zVHX800X9O4pqkjL3iTuCA {'project_path': 'ydb/tests/olap/oom', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}] Warn: Test [project=ydb/tests/olap/s3_import, name=py3test] (uid=rnd-oiwkevnqm01ac7op): Infrastructure error - contact devtools@ for details. Suite build deps: [5E5Zm-obJy6m7FJ5rf4Vkg {'project_path': 'ydb/tests/olap/s3_import', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [N3wfuEiUgil-dbRCORbWUA {'project_path': 'ydb/apps/ydbd', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [PBslpMFkWX6JG4o4gr4wJA {'project_path': 'contrib/python/moto/bin', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [j-uqixGAAtQ-xB1Ut7_Nkg {'project_path': 'ydb/apps/ydb', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/tests/olap/scenario, name=py3test] (uid=rnd-2efb7qv9gv3a9cnb): Infrastructure error - contact devtools@ for details. Suite build deps: [2g69VCEFyaaNT7khUnGTog {'project_path': 'ydb/tests/olap/scenario', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [N3wfuEiUgil-dbRCORbWUA {'project_path': 'ydb/apps/ydbd', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/tests/olap/ttl_tiering, name=py3test] (uid=rnd-9ruahl27xlqfbg8r): Infrastructure error - contact devtools@ for details. Suite build deps: [N3wfuEiUgil-dbRCORbWUA {'project_path': 'ydb/apps/ydbd', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [PBslpMFkWX6JG4o4gr4wJA {'project_path': 'contrib/python/moto/bin', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [YO-RvKNsFgUB_Y77oZGqJQ {'project_path': 'ydb/tests/olap/ttl_tiering', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/tests/postgres_integrations/go-libpq, name=py3test] (uid=rnd-tqpcvt4x5kiz5htr): Infrastructure error - contact devtools@ for details. Suite build deps: [N3wfuEiUgil-dbRCORbWUA {'project_path': 'ydb/apps/ydbd', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [hfuXKVfGASlcANT199zZHw {'project_path': 'ydb/tests/postgres_integrations/go-libpq', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/tests/solomon/reading, name=py3test] (uid=rnd-o2a94a06dgaxm8tc): Infrastructure error - contact devtools@ for details. Suite build deps: [AqHo-8SJeJJvYzvMzUf4fw {'project_path': 'ydb/library/yql/tools/solomon_emulator/bin', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [N3wfuEiUgil-dbRCORbWUA {'project_path': 'ydb/apps/ydbd', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [RKfch0lrz383HLQVfsNWXA {'project_path': 'ydb/tests/solomon/reading', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}], [tnzldir5PgQBy06P5L_gdg {'project_path': 'ydb/library/yql/tools/solomon_emulator/recipe', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}] Warn: Test [project=ydb/tests/sql, name=py3test] (uid=rnd-nzpwt8mqz83rlf39): Infrastructure error - contact devtools@ for details. Suite build deps: [N3wfuEiUgil-dbRCORbWUA {'project_path': 'ydb/apps/ydbd', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [UymIl6-7mX85mCkUER5Aeg {'project_path': 'ydb/tests/sql', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [j-uqixGAAtQ-xB1Ut7_Nkg {'project_path': 'ydb/apps/ydb', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/tests/stress/cdc/tests, name=py3test] (uid=rnd-xuygs3pbva466iu9): Infrastructure error - contact devtools@ for details. Suite build deps: [N3wfuEiUgil-dbRCORbWUA {'project_path': 'ydb/apps/ydbd', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [bSBDjvXYkge2gpJqmRMEjQ {'project_path': 'ydb/tests/stress/cdc/tests', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [sDclqZdoPP6j9UWkhnAL4Q {'project_path': 'ydb/tests/stress/cdc', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/tests/stress/ctas/tests, name=py3test] (uid=rnd-npvzbfz1o4sgohx8): Infrastructure error - contact devtools@ for details. Suite build deps: [AetZSfNZ9CMILmh5FEuoRg {'project_path': 'ydb/tests/stress/ctas', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [N3wfuEiUgil-dbRCORbWUA {'project_path': 'ydb/apps/ydbd', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [iFwrj7e8-S-G52jioqHVnQ {'project_path': 'ydb/tests/stress/ctas/tests', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/tests/stress/kafka/tests, name=py3test] (uid=rnd-yg1gko5jqurrykcx): Infrastructure error - contact devtools@ for details. Suite build deps: [N3wfuEiUgil-dbRCORbWUA {'project_path': 'ydb/apps/ydbd', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [ZeFksnG3zpQa68GiwFowhA {'project_path': 'ydb/tests/stress/kafka/tests', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [ikbQYbWhERwPwNxWeJc_0A {'project_path': 'ydb/tests/stress/kafka', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [j-uqixGAAtQ-xB1Ut7_Nkg {'project_path': 'ydb/apps/ydb', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/tests/stress/kv/tests, name=py3test] (uid=rnd-nx9exem8jn8n9f60): Infrastructure error - contact devtools@ for details. Suite build deps: [L5_dPxUZuaQvMaa_YBK2fw {'project_path': 'ydb/tests/stress/kv', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [N3wfuEiUgil-dbRCORbWUA {'project_path': 'ydb/apps/ydbd', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [QMGF-kY3Et0hA9SuM2Fynw {'project_path': 'ydb/tests/stress/kv/tests', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [j-uqixGAAtQ-xB1Ut7_Nkg {'project_path': 'ydb/apps/ydb', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/tests/stress/log/tests, name=py3test] (uid=rnd-hjkg8ub5jakmnbmd): Infrastructure error - contact devtools@ for details. Suite build deps: [3jQfUN1v26zOkMZ69x4FlQ {'project_path': 'ydb/tests/stress/log', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [Ab0rMJiyARnZ6JrZAJmheA {'project_path': 'ydb/tests/stress/log/tests', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [N3wfuEiUgil-dbRCORbWUA {'project_path': 'ydb/apps/ydbd', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [j-uqixGAAtQ-xB1Ut7_Nkg {'project_path': 'ydb/apps/ydb', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/tests/stress/mixedpy/tests, name=py3test] (uid=rnd-bx7nf3h10iemzguh): Infrastructure error - contact devtools@ for details. Suite build deps: [5LQ5TI4a4QtTmgfqlZw80w {'project_path': 'ydb/tests/stress/mixedpy/tests', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [KC01rfqy1WlXdkxg82cjaA {'project_path': 'ydb/tests/stress/mixedpy', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [N3wfuEiUgil-dbRCORbWUA {'project_path': 'ydb/apps/ydbd', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [j-uqixGAAtQ-xB1Ut7_Nkg {'project_path': 'ydb/apps/ydb', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/tests/stress/node_broker/tests, name=py3test] (uid=rnd-wcxrbumsiyuifyh6): Infrastructure error - contact devtools@ for details. Suite build deps: [N3wfuEiUgil-dbRCORbWUA {'project_path': 'ydb/apps/ydbd', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [buu2lGedrbrv-cg3iduXWw {'project_path': 'ydb/tests/stress/node_broker/tests', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}], [z1AP_QLBfiua1S069grTmg {'project_path': 'ydb/tests/stress/node_broker', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}] Warn: Test [project=ydb/tests/stress/olap_workload/tests, name=py3test] (uid=rnd-e3x88m4s0040zvcp): Infrastructure error - contact devtools@ for details. Suite build deps: [1rzh3WqZWGj_x01E123SqQ {'project_path': 'ydb/tests/stress/olap_workload/tests', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [EdEbyk08XI49xwFMo65BKQ {'project_path': 'ydb/tests/stress/olap_workload', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [N3wfuEiUgil-dbRCORbWUA {'project_path': 'ydb/apps/ydbd', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/tests/stress/oltp_workload/tests, name=py3test] (uid=rnd-q39eo9e2ui7jvurs): Infrastructure error - contact devtools@ for details. Suite build deps: [N3wfuEiUgil-dbRCORbWUA {'project_path': 'ydb/apps/ydbd', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [h9wh5qE0vDGZd_cY4cb3Aw {'project_path': 'ydb/tests/stress/oltp_workload/tests', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/tests/stress/reconfig_state_storage_workload/tests, name=py3test] (uid=rnd-rstpaivrgyzt0cfi): Infrastructure error - contact devtools@ for details. Suite build deps: [KlOxmrsPOds-L2Cfw05nLA {'project_path': 'ydb/tests/stress/reconfig_state_storage_workload', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [N3wfuEiUgil-dbRCORbWUA {'project_path': 'ydb/apps/ydbd', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [cR5TVmKUFGLl41mv8wRu6A {'project_path': 'ydb/tests/stress/reconfig_state_storage_workload/tests', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [j-uqixGAAtQ-xB1Ut7_Nkg {'project_path': 'ydb/apps/ydb', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/tests/stress/s3_backups/tests, name=py3test] (uid=rnd-2ogh285njroeqekn): Infrastructure error - contact devtools@ for details. Suite build deps: [7EZZeYHdmBvGAgnMPGMniQ {'project_path': 'ydb/tests/stress/s3_backups/tests', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [LJ2t_qImD7Uey7cqPiAnNQ {'project_path': 'ydb/tests/tools/s3_recipe', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [N3wfuEiUgil-dbRCORbWUA {'project_path': 'ydb/apps/ydbd', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [PBslpMFkWX6JG4o4gr4wJA {'project_path': 'contrib/python/moto/bin', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}], [u4iQX4gRLubjamX2SB7hkw {'project_path': 'ydb/tests/stress/s3_backups', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}] Warn: Test [project=ydb/tests/stress/scheme_board/pile_promotion/tests, name=py3test] (uid=rnd-ajl54r4a1owc9eu7): Infrastructure error - contact devtools@ for details. Suite build deps: [-oGy1y7cJvhEvjF6QpbCqA {'project_path': 'ydb/tests/stress/scheme_board/pile_promotion', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [N3wfuEiUgil-dbRCORbWUA {'project_path': 'ydb/apps/ydbd', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [S9IVLNAunqYvZ3GrEK2ieg {'project_path': 'ydb/tests/stress/scheme_board/pile_promotion/tests', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [j-uqixGAAtQ-xB1Ut7_Nkg {'project_path': 'ydb/apps/ydb', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/tests/stress/show_create/view/tests, name=py3test] (uid=rnd-6xo6xb3gidpogmd8): Infrastructure error - contact devtools@ for details. Suite build deps: [5RvnpEFbCIiVFC7gtFnBmw {'project_path': 'ydb/tests/stress/show_create/view/tests', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [N3wfuEiUgil-dbRCORbWUA {'project_path': 'ydb/apps/ydbd', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [an659eRe9hftMpi2acAyKQ {'project_path': 'ydb/tests/stress/show_create/view', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [j-uqixGAAtQ-xB1Ut7_Nkg {'project_path': 'ydb/apps/ydb', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/tests/stress/simple_queue/tests, name=py3test] (uid=rnd-hniwjqt28p84rziv): Infrastructure error - contact devtools@ for details. Suite build deps: [N3wfuEiUgil-dbRCORbWUA {'project_path': 'ydb/apps/ydbd', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [SHtbvTw9gYtsm_YhF2YswA {'project_path': 'ydb/tests/stress/simple_queue/tests', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/tests/stress/topic/tests, name=py3test] (uid=rnd-iaiccarpe40fbr4e): Infrastructure error - contact devtools@ for details. Suite build deps: [-ZehxnvMT1V3FVYdVlICsQ {'project_path': 'ydb/tests/stress/topic/tests', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [8_7HrEMR6eCiWfBbutNH3A {'project_path': 'ydb/tests/stress/topic', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [N3wfuEiUgil-dbRCORbWUA {'project_path': 'ydb/apps/ydbd', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [j-uqixGAAtQ-xB1Ut7_Nkg {'project_path': 'ydb/apps/ydb', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/tests/stress/topic_kafka/tests, name=py3test] (uid=rnd-vr15cko0rpic7kme): Infrastructure error - contact devtools@ for details. Suite build deps: [HDnAnOkjtQVugl94ChIaMg {'project_path': 'ydb/tests/stress/topic_kafka/tests', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [N3wfuEiUgil-dbRCORbWUA {'project_path': 'ydb/apps/ydbd', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [X1ZOPbuqQ9CDONHyxiyk2Q {'project_path': 'ydb/tests/stress/topic_kafka', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [j-uqixGAAtQ-xB1Ut7_Nkg {'project_path': 'ydb/apps/ydb', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/tests/stress/transfer/tests, name=py3test] (uid=rnd-ychwx92fsq5x7m5i): Infrastructure error - contact devtools@ for details. Suite build deps: [6C5Ro_rt_dQsvdgCq6stCg {'project_path': 'ydb/tests/stress/transfer', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [N3wfuEiUgil-dbRCORbWUA {'project_path': 'ydb/apps/ydbd', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [foRukMRbRdQRKIDDDfLmrw {'project_path': 'ydb/tests/stress/transfer/tests', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [j-uqixGAAtQ-xB1Ut7_Nkg {'project_path': 'ydb/apps/ydb', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/tests/tools/kqprun/tests, name=py3test] (uid=rnd-ppkp176oexml43lc): Infrastructure error - contact devtools@ for details. Suite build deps: [-oa6qLc0UWliscp4Bygi8A {'project_path': 'ydb/tests/tools/kqprun', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [FY97zHmR3ruzzAgzN9uH6A {'project_path': 'ydb/tests/tools/kqprun/tests', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [GB2ieZj8CxlhtaSmSU54uQ {'project_path': 'ydb/tests/tools/kqprun/recipe', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/tests/tools/nemesis/ut, name=py3test] (uid=rnd-axzc7y06o06nc7dz): Infrastructure error - contact devtools@ for details. Suite build deps: [EaVcg6KtmBQwAYVQDNEtNw {'project_path': 'ydb/tests/tools/nemesis/ut', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [N3wfuEiUgil-dbRCORbWUA {'project_path': 'ydb/apps/ydbd', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/tests/tools/pq_read/test, name=py3test] (uid=rnd-4r1fy90mwx95kx2d): Infrastructure error - contact devtools@ for details. Suite build deps: [9w8SIosdmEdYg_0llg3dzQ {'project_path': 'ydb/tests/tools/pq_read', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [N3wfuEiUgil-dbRCORbWUA {'project_path': 'ydb/apps/ydbd', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [RkhG7WuJXDr1zbK17a6x6g {'project_path': 'ydb/public/tools/ydb_recipe', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}], [zVb8FYjdwyfXN4FffNjulQ {'project_path': 'ydb/tests/tools/pq_read/test', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}] Warn: Test [project=ydb/library/yql/tests/sql/dq_file/part0, name=pytest] (uid=rnd-fywi8uh7hvgv7v5k): Infrastructure error - contact devtools@ for details. Suite build deps: [3s3JWLpIFxDCd8RLTz88dw {'project_path': 'yql/essentials/udfs/test/test_import', 'platform': 'default-linux-x86_64-relwithdebinfo-pic', 'tags': ['default-linux-x86_64', 'relwithdebinfo', 'pic']}], [VY2HtEC5_gANfiTekPfyBg {'project_path': 'yql/essentials/tools/astdiff', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [WqG84sxUxmwfN5wS1cXmHw {'project_path': 'ydb/library/yql/tests/sql/dq_file/part0', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [f6ZW54AW4efDxiMcwphzSw {'project_path': 'ydb/library/yql/tools/dqrun', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [lBCQX0PVpAnlcC0t65UL-w {'project_path': 'yql/essentials/tests/common/test_framework/udfs_deps', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [oeQ4QxP-znnAPywYi258EQ {'project_path': 'yql/tools/yqlrun', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/library/yql/tests/sql/dq_file/part1, name=pytest] (uid=rnd-k4ir96ochodun0u8): Infrastructure error - contact devtools@ for details. Suite build deps: [3s3JWLpIFxDCd8RLTz88dw {'project_path': 'yql/essentials/udfs/test/test_import', 'platform': 'default-linux-x86_64-relwithdebinfo-pic', 'tags': ['default-linux-x86_64', 'relwithdebinfo', 'pic']}], [VY2HtEC5_gANfiTekPfyBg {'project_path': 'yql/essentials/tools/astdiff', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [f6ZW54AW4efDxiMcwphzSw {'project_path': 'ydb/library/yql/tools/dqrun', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [l9gz16nAgAHG2cNiw38qPg {'project_path': 'ydb/library/yql/tests/sql/dq_file/part1', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [lBCQX0PVpAnlcC0t65UL-w {'project_path': 'yql/essentials/tests/common/test_framework/udfs_deps', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [oeQ4QxP-znnAPywYi258EQ {'project_path': 'yql/tools/yqlrun', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/library/yql/tests/sql/dq_file/part10, name=pytest] (uid=rnd-gv31xx65eowag89z): Infrastructure error - contact devtools@ for details. Suite build deps: [3s3JWLpIFxDCd8RLTz88dw {'project_path': 'yql/essentials/udfs/test/test_import', 'platform': 'default-linux-x86_64-relwithdebinfo-pic', 'tags': ['default-linux-x86_64', 'relwithdebinfo', 'pic']}], [VY2HtEC5_gANfiTekPfyBg {'project_path': 'yql/essentials/tools/astdiff', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [f6ZW54AW4efDxiMcwphzSw {'project_path': 'ydb/library/yql/tools/dqrun', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [i9kYz9V2sLtWfb0Q-rekoA {'project_path': 'ydb/library/yql/tests/sql/dq_file/part10', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [lBCQX0PVpAnlcC0t65UL-w {'project_path': 'yql/essentials/tests/common/test_framework/udfs_deps', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [oeQ4QxP-znnAPywYi258EQ {'project_path': 'yql/tools/yqlrun', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/library/yql/tests/sql/dq_file/part11, name=pytest] (uid=rnd-svp4e46guj08s3nd): Infrastructure error - contact devtools@ for details. Suite build deps: [3s3JWLpIFxDCd8RLTz88dw {'project_path': 'yql/essentials/udfs/test/test_import', 'platform': 'default-linux-x86_64-relwithdebinfo-pic', 'tags': ['default-linux-x86_64', 'relwithdebinfo', 'pic']}], [RdhZQKNREGuPCIbfJ5uQUA {'project_path': 'ydb/library/yql/tests/sql/dq_file/part11', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [VY2HtEC5_gANfiTekPfyBg {'project_path': 'yql/essentials/tools/astdiff', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [f6ZW54AW4efDxiMcwphzSw {'project_path': 'ydb/library/yql/tools/dqrun', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [lBCQX0PVpAnlcC0t65UL-w {'project_path': 'yql/essentials/tests/common/test_framework/udfs_deps', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [oeQ4QxP-znnAPywYi258EQ {'project_path': 'yql/tools/yqlrun', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/library/yql/tests/sql/dq_file/part12, name=pytest] (uid=rnd-pek5jfvjyt8obua5): Infrastructure error - contact devtools@ for details. Suite build deps: [3s3JWLpIFxDCd8RLTz88dw {'project_path': 'yql/essentials/udfs/test/test_import', 'platform': 'default-linux-x86_64-relwithdebinfo-pic', 'tags': ['default-linux-x86_64', 'relwithdebinfo', 'pic']}], [VY2HtEC5_gANfiTekPfyBg {'project_path': 'yql/essentials/tools/astdiff', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [c2xft3o7C_mD78w7clzNSA {'project_path': 'ydb/library/yql/tests/sql/dq_file/part12', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [f6ZW54AW4efDxiMcwphzSw {'project_path': 'ydb/library/yql/tools/dqrun', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [lBCQX0PVpAnlcC0t65UL-w {'project_path': 'yql/essentials/tests/common/test_framework/udfs_deps', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [oeQ4QxP-znnAPywYi258EQ {'project_path': 'yql/tools/yqlrun', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/library/yql/tests/sql/dq_file/part13, name=pytest] (uid=rnd-surmj0ywu9ccpuha): Infrastructure error - contact devtools@ for details. Suite build deps: [3s3JWLpIFxDCd8RLTz88dw {'project_path': 'yql/essentials/udfs/test/test_import', 'platform': 'default-linux-x86_64-relwithdebinfo-pic', 'tags': ['default-linux-x86_64', 'relwithdebinfo', 'pic']}], [FL-Dq1YbRD5DpLwfkeuttw {'project_path': 'ydb/library/yql/tests/sql/dq_file/part13', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [VY2HtEC5_gANfiTekPfyBg {'project_path': 'yql/essentials/tools/astdiff', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [f6ZW54AW4efDxiMcwphzSw {'project_path': 'ydb/library/yql/tools/dqrun', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [lBCQX0PVpAnlcC0t65UL-w {'project_path': 'yql/essentials/tests/common/test_framework/udfs_deps', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [oeQ4QxP-znnAPywYi258EQ {'project_path': 'yql/tools/yqlrun', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/library/yql/tests/sql/dq_file/part14, name=pytest] (uid=rnd-q4netjxzvlk4tevs): Infrastructure error - contact devtools@ for details. Suite build deps: [3s3JWLpIFxDCd8RLTz88dw {'project_path': 'yql/essentials/udfs/test/test_import', 'platform': 'default-linux-x86_64-relwithdebinfo-pic', 'tags': ['default-linux-x86_64', 'relwithdebinfo', 'pic']}], [VY2HtEC5_gANfiTekPfyBg {'project_path': 'yql/essentials/tools/astdiff', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [f6ZW54AW4efDxiMcwphzSw {'project_path': 'ydb/library/yql/tools/dqrun', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [fdhRZqWF-n1ohWd4NSYCtg {'project_path': 'ydb/library/yql/tests/sql/dq_file/part14', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [lBCQX0PVpAnlcC0t65UL-w {'project_path': 'yql/essentials/tests/common/test_framework/udfs_deps', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [oeQ4QxP-znnAPywYi258EQ {'project_path': 'yql/tools/yqlrun', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/library/yql/tests/sql/dq_file/part15, name=pytest] (uid=rnd-pzc3bbj6bni7hjj4): Infrastructure error - contact devtools@ for details. Suite build deps: [3s3JWLpIFxDCd8RLTz88dw {'project_path': 'yql/essentials/udfs/test/test_import', 'platform': 'default-linux-x86_64-relwithdebinfo-pic', 'tags': ['default-linux-x86_64', 'relwithdebinfo', 'pic']}], [VY2HtEC5_gANfiTekPfyBg {'project_path': 'yql/essentials/tools/astdiff', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [f6ZW54AW4efDxiMcwphzSw {'project_path': 'ydb/library/yql/tools/dqrun', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [hFlwFQlcKfCdM_JPhRmpig {'project_path': 'ydb/library/yql/tests/sql/dq_file/part15', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [lBCQX0PVpAnlcC0t65UL-w {'project_path': 'yql/essentials/tests/common/test_framework/udfs_deps', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [oeQ4QxP-znnAPywYi258EQ {'project_path': 'yql/tools/yqlrun', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/library/yql/tests/sql/dq_file/part16, name=pytest] (uid=rnd-7voy0zo9vjop4pdq): Infrastructure error - contact devtools@ for details. Suite build deps: [3s3JWLpIFxDCd8RLTz88dw {'project_path': 'yql/essentials/udfs/test/test_import', 'platform': 'default-linux-x86_64-relwithdebinfo-pic', 'tags': ['default-linux-x86_64', 'relwithdebinfo', 'pic']}], [VY2HtEC5_gANfiTekPfyBg {'project_path': 'yql/essentials/tools/astdiff', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [_ldtsIYrOYePLnj0xiqG-Q {'project_path': 'ydb/library/yql/tests/sql/dq_file/part16', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [f6ZW54AW4efDxiMcwphzSw {'project_path': 'ydb/library/yql/tools/dqrun', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [lBCQX0PVpAnlcC0t65UL-w {'project_path': 'yql/essentials/tests/common/test_framework/udfs_deps', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [oeQ4QxP-znnAPywYi258EQ {'project_path': 'yql/tools/yqlrun', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/library/yql/tests/sql/dq_file/part17, name=pytest] (uid=rnd-48raqqj5grtl07tb): Infrastructure error - contact devtools@ for details. Suite build deps: [3s3JWLpIFxDCd8RLTz88dw {'project_path': 'yql/essentials/udfs/test/test_import', 'platform': 'default-linux-x86_64-relwithdebinfo-pic', 'tags': ['default-linux-x86_64', 'relwithdebinfo', 'pic']}], [VY2HtEC5_gANfiTekPfyBg {'project_path': 'yql/essentials/tools/astdiff', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [YEwPpvo7BuR53kwwrUvdzw {'project_path': 'ydb/library/yql/tests/sql/dq_file/part17', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [f6ZW54AW4efDxiMcwphzSw {'project_path': 'ydb/library/yql/tools/dqrun', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [lBCQX0PVpAnlcC0t65UL-w {'project_path': 'yql/essentials/tests/common/test_framework/udfs_deps', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [oeQ4QxP-znnAPywYi258EQ {'project_path': 'yql/tools/yqlrun', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/library/yql/tests/sql/dq_file/part18, name=pytest] (uid=rnd-91g88i7qcq89s3s3): Infrastructure error - contact devtools@ for details. Suite build deps: [3s3JWLpIFxDCd8RLTz88dw {'project_path': 'yql/essentials/udfs/test/test_import', 'platform': 'default-linux-x86_64-relwithdebinfo-pic', 'tags': ['default-linux-x86_64', 'relwithdebinfo', 'pic']}], [4SaXQwK3tipRRQR31kr7qw {'project_path': 'ydb/library/yql/tests/sql/dq_file/part18', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [VY2HtEC5_gANfiTekPfyBg {'project_path': 'yql/essentials/tools/astdiff', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [f6ZW54AW4efDxiMcwphzSw {'project_path': 'ydb/library/yql/tools/dqrun', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [lBCQX0PVpAnlcC0t65UL-w {'project_path': 'yql/essentials/tests/common/test_framework/udfs_deps', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [oeQ4QxP-znnAPywYi258EQ {'project_path': 'yql/tools/yqlrun', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/library/yql/tests/sql/dq_file/part19, name=pytest] (uid=rnd-ca1vt0p8vsw53hfk): Infrastructure error - contact devtools@ for details. Suite build deps: [3s3JWLpIFxDCd8RLTz88dw {'project_path': 'yql/essentials/udfs/test/test_import', 'platform': 'default-linux-x86_64-relwithdebinfo-pic', 'tags': ['default-linux-x86_64', 'relwithdebinfo', 'pic']}], [VY2HtEC5_gANfiTekPfyBg {'project_path': 'yql/essentials/tools/astdiff', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [eYdOfHnRZW_MsImFH8CDQg {'project_path': 'ydb/library/yql/tests/sql/dq_file/part19', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [f6ZW54AW4efDxiMcwphzSw {'project_path': 'ydb/library/yql/tools/dqrun', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [lBCQX0PVpAnlcC0t65UL-w {'project_path': 'yql/essentials/tests/common/test_framework/udfs_deps', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [oeQ4QxP-znnAPywYi258EQ {'project_path': 'yql/tools/yqlrun', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/library/yql/tests/sql/dq_file/part2, name=pytest] (uid=rnd-n2p69c94pow4mt88): Infrastructure error - contact devtools@ for details. Suite build deps: [3s3JWLpIFxDCd8RLTz88dw {'project_path': 'yql/essentials/udfs/test/test_import', 'platform': 'default-linux-x86_64-relwithdebinfo-pic', 'tags': ['default-linux-x86_64', 'relwithdebinfo', 'pic']}], [VY2HtEC5_gANfiTekPfyBg {'project_path': 'yql/essentials/tools/astdiff', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [cd6MrAMNe5G-PW1KAmHDtA {'project_path': 'ydb/library/yql/tests/sql/dq_file/part2', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [f6ZW54AW4efDxiMcwphzSw {'project_path': 'ydb/library/yql/tools/dqrun', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [lBCQX0PVpAnlcC0t65UL-w {'project_path': 'yql/essentials/tests/common/test_framework/udfs_deps', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [oeQ4QxP-znnAPywYi258EQ {'project_path': 'yql/tools/yqlrun', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/library/yql/tests/sql/dq_file/part3, name=pytest] (uid=rnd-nf4upi2ln8nu2fer): Infrastructure error - contact devtools@ for details. Suite build deps: [3s3JWLpIFxDCd8RLTz88dw {'project_path': 'yql/essentials/udfs/test/test_import', 'platform': 'default-linux-x86_64-relwithdebinfo-pic', 'tags': ['default-linux-x86_64', 'relwithdebinfo', 'pic']}], [VY2HtEC5_gANfiTekPfyBg {'project_path': 'yql/essentials/tools/astdiff', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [f6ZW54AW4efDxiMcwphzSw {'project_path': 'ydb/library/yql/tools/dqrun', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [lBCQX0PVpAnlcC0t65UL-w {'project_path': 'yql/essentials/tests/common/test_framework/udfs_deps', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [oeQ4QxP-znnAPywYi258EQ {'project_path': 'yql/tools/yqlrun', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [ppJd-cpOQ_HDk4-ftoZ4Uw {'project_path': 'ydb/library/yql/tests/sql/dq_file/part3', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/library/yql/tests/sql/dq_file/part4, name=pytest] (uid=rnd-ose4fl76c4fzbo8q): Infrastructure error - contact devtools@ for details. Suite build deps: [3s3JWLpIFxDCd8RLTz88dw {'project_path': 'yql/essentials/udfs/test/test_import', 'platform': 'default-linux-x86_64-relwithdebinfo-pic', 'tags': ['default-linux-x86_64', 'relwithdebinfo', 'pic']}], [VOg41eTnFnquCXOkzk_06A {'project_path': 'ydb/library/yql/tests/sql/dq_file/part4', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [VY2HtEC5_gANfiTekPfyBg {'project_path': 'yql/essentials/tools/astdiff', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [f6ZW54AW4efDxiMcwphzSw {'project_path': 'ydb/library/yql/tools/dqrun', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [lBCQX0PVpAnlcC0t65UL-w {'project_path': 'yql/essentials/tests/common/test_framework/udfs_deps', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [oeQ4QxP-znnAPywYi258EQ {'project_path': 'yql/tools/yqlrun', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/library/yql/tests/sql/dq_file/part5, name=pytest] (uid=rnd-4cnx67se7dfulc9m): Infrastructure error - contact devtools@ for details. Suite build deps: [3s3JWLpIFxDCd8RLTz88dw {'project_path': 'yql/essentials/udfs/test/test_import', 'platform': 'default-linux-x86_64-relwithdebinfo-pic', 'tags': ['default-linux-x86_64', 'relwithdebinfo', 'pic']}], [EZdEs3bCupXvK4pDZMdYfQ {'project_path': 'ydb/library/yql/tests/sql/dq_file/part5', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [VY2HtEC5_gANfiTekPfyBg {'project_path': 'yql/essentials/tools/astdiff', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [f6ZW54AW4efDxiMcwphzSw {'project_path': 'ydb/library/yql/tools/dqrun', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [lBCQX0PVpAnlcC0t65UL-w {'project_path': 'yql/essentials/tests/common/test_framework/udfs_deps', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [oeQ4QxP-znnAPywYi258EQ {'project_path': 'yql/tools/yqlrun', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/library/yql/tests/sql/dq_file/part6, name=pytest] (uid=rnd-dt1ynckrik0iky40): Infrastructure error - contact devtools@ for details. Suite build deps: [3s3JWLpIFxDCd8RLTz88dw {'project_path': 'yql/essentials/udfs/test/test_import', 'platform': 'default-linux-x86_64-relwithdebinfo-pic', 'tags': ['default-linux-x86_64', 'relwithdebinfo', 'pic']}], [VY2HtEC5_gANfiTekPfyBg {'project_path': 'yql/essentials/tools/astdiff', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [f6ZW54AW4efDxiMcwphzSw {'project_path': 'ydb/library/yql/tools/dqrun', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [fd-eee-ef-8PIV1-rO29Aw {'project_path': 'ydb/library/yql/tests/sql/dq_file/part6', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [lBCQX0PVpAnlcC0t65UL-w {'project_path': 'yql/essentials/tests/common/test_framework/udfs_deps', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [oeQ4QxP-znnAPywYi258EQ {'project_path': 'yql/tools/yqlrun', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/library/yql/tests/sql/dq_file/part7, name=pytest] (uid=rnd-ew8fgamcqth8xgdw): Infrastructure error - contact devtools@ for details. Suite build deps: [3s3JWLpIFxDCd8RLTz88dw {'project_path': 'yql/essentials/udfs/test/test_import', 'platform': 'default-linux-x86_64-relwithdebinfo-pic', 'tags': ['default-linux-x86_64', 'relwithdebinfo', 'pic']}], [VY2HtEC5_gANfiTekPfyBg {'project_path': 'yql/essentials/tools/astdiff', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [_yE54j4lJCRgctsUJ6yIag {'project_path': 'ydb/library/yql/tests/sql/dq_file/part7', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [f6ZW54AW4efDxiMcwphzSw {'project_path': 'ydb/library/yql/tools/dqrun', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [lBCQX0PVpAnlcC0t65UL-w {'project_path': 'yql/essentials/tests/common/test_framework/udfs_deps', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [oeQ4QxP-znnAPywYi258EQ {'project_path': 'yql/tools/yqlrun', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/library/yql/tests/sql/dq_file/part8, name=pytest] (uid=rnd-a6npyttpuirskoiv): Infrastructure error - contact devtools@ for details. Suite build deps: [3s3JWLpIFxDCd8RLTz88dw {'project_path': 'yql/essentials/udfs/test/test_import', 'platform': 'default-linux-x86_64-relwithdebinfo-pic', 'tags': ['default-linux-x86_64', 'relwithdebinfo', 'pic']}], [PPRWuiTyzSwbRSgLstunFQ {'project_path': 'ydb/library/yql/tests/sql/dq_file/part8', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [VY2HtEC5_gANfiTekPfyBg {'project_path': 'yql/essentials/tools/astdiff', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [f6ZW54AW4efDxiMcwphzSw {'project_path': 'ydb/library/yql/tools/dqrun', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [lBCQX0PVpAnlcC0t65UL-w {'project_path': 'yql/essentials/tests/common/test_framework/udfs_deps', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [oeQ4QxP-znnAPywYi258EQ {'project_path': 'yql/tools/yqlrun', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/library/yql/tests/sql/dq_file/part9, name=pytest] (uid=rnd-79ze7w8jfcyhwcku): Infrastructure error - contact devtools@ for details. Suite build deps: [3s3JWLpIFxDCd8RLTz88dw {'project_path': 'yql/essentials/udfs/test/test_import', 'platform': 'default-linux-x86_64-relwithdebinfo-pic', 'tags': ['default-linux-x86_64', 'relwithdebinfo', 'pic']}], [MkZEOWFrq_Cw0twlYPnATw {'project_path': 'ydb/library/yql/tests/sql/dq_file/part9', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [VY2HtEC5_gANfiTekPfyBg {'project_path': 'yql/essentials/tools/astdiff', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [f6ZW54AW4efDxiMcwphzSw {'project_path': 'ydb/library/yql/tools/dqrun', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [lBCQX0PVpAnlcC0t65UL-w {'project_path': 'yql/essentials/tests/common/test_framework/udfs_deps', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [oeQ4QxP-znnAPywYi258EQ {'project_path': 'yql/tools/yqlrun', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/library/yql/tests/sql/hybrid_file/part0, name=pytest] (uid=rnd-i0huhxbile4k03t7): Infrastructure error - contact devtools@ for details. Suite build deps: [3s3JWLpIFxDCd8RLTz88dw {'project_path': 'yql/essentials/udfs/test/test_import', 'platform': 'default-linux-x86_64-relwithdebinfo-pic', 'tags': ['default-linux-x86_64', 'relwithdebinfo', 'pic']}], [OTTBxZGYz2Xaa7SPvAUPWw {'project_path': 'ydb/library/yql/tests/sql/hybrid_file/part0', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [VY2HtEC5_gANfiTekPfyBg {'project_path': 'yql/essentials/tools/astdiff', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [f6ZW54AW4efDxiMcwphzSw {'project_path': 'ydb/library/yql/tools/dqrun', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [lBCQX0PVpAnlcC0t65UL-w {'project_path': 'yql/essentials/tests/common/test_framework/udfs_deps', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [oeQ4QxP-znnAPywYi258EQ {'project_path': 'yql/tools/yqlrun', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/library/yql/tests/sql/hybrid_file/part1, name=pytest] (uid=rnd-54bc3an1taf415nr): Infrastructure error - contact devtools@ for details. Suite build deps: [3s3JWLpIFxDCd8RLTz88dw {'project_path': 'yql/essentials/udfs/test/test_import', 'platform': 'default-linux-x86_64-relwithdebinfo-pic', 'tags': ['default-linux-x86_64', 'relwithdebinfo', 'pic']}], [QQW8zBZT3p3JzbZ5bFPECA {'project_path': 'ydb/library/yql/tests/sql/hybrid_file/part1', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [VY2HtEC5_gANfiTekPfyBg {'project_path': 'yql/essentials/tools/astdiff', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [f6ZW54AW4efDxiMcwphzSw {'project_path': 'ydb/library/yql/tools/dqrun', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [lBCQX0PVpAnlcC0t65UL-w {'project_path': 'yql/essentials/tests/common/test_framework/udfs_deps', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [oeQ4QxP-znnAPywYi258EQ {'project_path': 'yql/tools/yqlrun', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/library/yql/tests/sql/hybrid_file/part10, name=pytest] (uid=rnd-sdmzpxdhtpz3nt9d): Infrastructure error - contact devtools@ for details. Suite build deps: [3s3JWLpIFxDCd8RLTz88dw {'project_path': 'yql/essentials/udfs/test/test_import', 'platform': 'default-linux-x86_64-relwithdebinfo-pic', 'tags': ['default-linux-x86_64', 'relwithdebinfo', 'pic']}], [8wQTyN_n0vyPIEPZhM2Ljg {'project_path': 'ydb/library/yql/tests/sql/hybrid_file/part10', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [VY2HtEC5_gANfiTekPfyBg {'project_path': 'yql/essentials/tools/astdiff', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [f6ZW54AW4efDxiMcwphzSw {'project_path': 'ydb/library/yql/tools/dqrun', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [lBCQX0PVpAnlcC0t65UL-w {'project_path': 'yql/essentials/tests/common/test_framework/udfs_deps', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [oeQ4QxP-znnAPywYi258EQ {'project_path': 'yql/tools/yqlrun', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/library/yql/tests/sql/hybrid_file/part2, name=pytest] (uid=rnd-g29x5vath783u6k0): Infrastructure error - contact devtools@ for details. Suite build deps: [3s3JWLpIFxDCd8RLTz88dw {'project_path': 'yql/essentials/udfs/test/test_import', 'platform': 'default-linux-x86_64-relwithdebinfo-pic', 'tags': ['default-linux-x86_64', 'relwithdebinfo', 'pic']}], [7FtCiB4Nm4YTgSzvoeMj1g {'project_path': 'ydb/library/yql/tests/sql/hybrid_file/part2', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [VY2HtEC5_gANfiTekPfyBg {'project_path': 'yql/essentials/tools/astdiff', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [f6ZW54AW4efDxiMcwphzSw {'project_path': 'ydb/library/yql/tools/dqrun', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [lBCQX0PVpAnlcC0t65UL-w {'project_path': 'yql/essentials/tests/common/test_framework/udfs_deps', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [oeQ4QxP-znnAPywYi258EQ {'project_path': 'yql/tools/yqlrun', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/library/yql/tests/sql/hybrid_file/part3, name=pytest] (uid=rnd-ahjvipvx7fnd33pc): Infrastructure error - contact devtools@ for details. Suite build deps: [3s3JWLpIFxDCd8RLTz88dw {'project_path': 'yql/essentials/udfs/test/test_import', 'platform': 'default-linux-x86_64-relwithdebinfo-pic', 'tags': ['default-linux-x86_64', 'relwithdebinfo', 'pic']}], [VY2HtEC5_gANfiTekPfyBg {'project_path': 'yql/essentials/tools/astdiff', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [aexmIkZQAE_NsaIb7swZzQ {'project_path': 'ydb/library/yql/tests/sql/hybrid_file/part3', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [f6ZW54AW4efDxiMcwphzSw {'project_path': 'ydb/library/yql/tools/dqrun', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [lBCQX0PVpAnlcC0t65UL-w {'project_path': 'yql/essentials/tests/common/test_framework/udfs_deps', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [oeQ4QxP-znnAPywYi258EQ {'project_path': 'yql/tools/yqlrun', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/library/yql/tests/sql/hybrid_file/part4, name=pytest] (uid=rnd-7vhxy0z0zq3wmxyz): Infrastructure error - contact devtools@ for details. Suite build deps: [3s3JWLpIFxDCd8RLTz88dw {'project_path': 'yql/essentials/udfs/test/test_import', 'platform': 'default-linux-x86_64-relwithdebinfo-pic', 'tags': ['default-linux-x86_64', 'relwithdebinfo', 'pic']}], [EzJCVI1uLOb07_0AQInQBg {'project_path': 'ydb/library/yql/tests/sql/hybrid_file/part4', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [VY2HtEC5_gANfiTekPfyBg {'project_path': 'yql/essentials/tools/astdiff', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [f6ZW54AW4efDxiMcwphzSw {'project_path': 'ydb/library/yql/tools/dqrun', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [lBCQX0PVpAnlcC0t65UL-w {'project_path': 'yql/essentials/tests/common/test_framework/udfs_deps', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [oeQ4QxP-znnAPywYi258EQ {'project_path': 'yql/tools/yqlrun', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/library/yql/tests/sql/hybrid_file/part5, name=pytest] (uid=rnd-x7lht7ph3c424kfc): Infrastructure error - contact devtools@ for details. Suite build deps: [3s3JWLpIFxDCd8RLTz88dw {'project_path': 'yql/essentials/udfs/test/test_import', 'platform': 'default-linux-x86_64-relwithdebinfo-pic', 'tags': ['default-linux-x86_64', 'relwithdebinfo', 'pic']}], [TQ0idwAoSn0mI29Ondh_wg {'project_path': 'ydb/library/yql/tests/sql/hybrid_file/part5', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [VY2HtEC5_gANfiTekPfyBg {'project_path': 'yql/essentials/tools/astdiff', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [f6ZW54AW4efDxiMcwphzSw {'project_path': 'ydb/library/yql/tools/dqrun', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [lBCQX0PVpAnlcC0t65UL-w {'project_path': 'yql/essentials/tests/common/test_framework/udfs_deps', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [oeQ4QxP-znnAPywYi258EQ {'project_path': 'yql/tools/yqlrun', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/library/yql/tests/sql/hybrid_file/part6, name=pytest] (uid=rnd-52p9d45zdk3r2h2k): Infrastructure error - contact devtools@ for details. Suite build deps: [3s3JWLpIFxDCd8RLTz88dw {'project_path': 'yql/essentials/udfs/test/test_import', 'platform': 'default-linux-x86_64-relwithdebinfo-pic', 'tags': ['default-linux-x86_64', 'relwithdebinfo', 'pic']}], [GdUjNVGJJ4A5BQMLhasiEA {'project_path': 'ydb/library/yql/tests/sql/hybrid_file/part6', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [VY2HtEC5_gANfiTekPfyBg {'project_path': 'yql/essentials/tools/astdiff', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [f6ZW54AW4efDxiMcwphzSw {'project_path': 'ydb/library/yql/tools/dqrun', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [lBCQX0PVpAnlcC0t65UL-w {'project_path': 'yql/essentials/tests/common/test_framework/udfs_deps', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [oeQ4QxP-znnAPywYi258EQ {'project_path': 'yql/tools/yqlrun', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/library/yql/tests/sql/hybrid_file/part7, name=pytest] (uid=rnd-d3114nlds5vavg60): Infrastructure error - contact devtools@ for details. Suite build deps: [3s3JWLpIFxDCd8RLTz88dw {'project_path': 'yql/essentials/udfs/test/test_import', 'platform': 'default-linux-x86_64-relwithdebinfo-pic', 'tags': ['default-linux-x86_64', 'relwithdebinfo', 'pic']}], [O2SXASj22gKDRWJBF3Z00g {'project_path': 'ydb/library/yql/tests/sql/hybrid_file/part7', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [VY2HtEC5_gANfiTekPfyBg {'project_path': 'yql/essentials/tools/astdiff', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [f6ZW54AW4efDxiMcwphzSw {'project_path': 'ydb/library/yql/tools/dqrun', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [lBCQX0PVpAnlcC0t65UL-w {'project_path': 'yql/essentials/tests/common/test_framework/udfs_deps', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [oeQ4QxP-znnAPywYi258EQ {'project_path': 'yql/tools/yqlrun', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/library/yql/tests/sql/hybrid_file/part8, name=pytest] (uid=rnd-q0cv0ahgcfi71azf): Infrastructure error - contact devtools@ for details. Suite build deps: [3s3JWLpIFxDCd8RLTz88dw {'project_path': 'yql/essentials/udfs/test/test_import', 'platform': 'default-linux-x86_64-relwithdebinfo-pic', 'tags': ['default-linux-x86_64', 'relwithdebinfo', 'pic']}], [VY2HtEC5_gANfiTekPfyBg {'project_path': 'yql/essentials/tools/astdiff', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [VqodKfnOtHeiBTdD_8iHgQ {'project_path': 'ydb/library/yql/tests/sql/hybrid_file/part8', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [f6ZW54AW4efDxiMcwphzSw {'project_path': 'ydb/library/yql/tools/dqrun', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [lBCQX0PVpAnlcC0t65UL-w {'project_path': 'yql/essentials/tests/common/test_framework/udfs_deps', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [oeQ4QxP-znnAPywYi258EQ {'project_path': 'yql/tools/yqlrun', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/library/yql/tests/sql/hybrid_file/part9, name=pytest] (uid=rnd-363maqtki0amxddj): Infrastructure error - contact devtools@ for details. Suite build deps: [3s3JWLpIFxDCd8RLTz88dw {'project_path': 'yql/essentials/udfs/test/test_import', 'platform': 'default-linux-x86_64-relwithdebinfo-pic', 'tags': ['default-linux-x86_64', 'relwithdebinfo', 'pic']}], [VY2HtEC5_gANfiTekPfyBg {'project_path': 'yql/essentials/tools/astdiff', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [Xrpgg0yhlehTvVFrnQ3AvA {'project_path': 'ydb/library/yql/tests/sql/hybrid_file/part9', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [f6ZW54AW4efDxiMcwphzSw {'project_path': 'ydb/library/yql/tools/dqrun', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [lBCQX0PVpAnlcC0t65UL-w {'project_path': 'yql/essentials/tests/common/test_framework/udfs_deps', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [oeQ4QxP-znnAPywYi258EQ {'project_path': 'yql/tools/yqlrun', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/library/yql/tests/sql/solomon, name=pytest] (uid=rnd-jlvlbonnvbm452kd): Infrastructure error - contact devtools@ for details. Suite build deps: [3s3JWLpIFxDCd8RLTz88dw {'project_path': 'yql/essentials/udfs/test/test_import', 'platform': 'default-linux-x86_64-relwithdebinfo-pic', 'tags': ['default-linux-x86_64', 'relwithdebinfo', 'pic']}], [AqHo-8SJeJJvYzvMzUf4fw {'project_path': 'ydb/library/yql/tools/solomon_emulator/bin', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [VY2HtEC5_gANfiTekPfyBg {'project_path': 'yql/essentials/tools/astdiff', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [f6ZW54AW4efDxiMcwphzSw {'project_path': 'ydb/library/yql/tools/dqrun', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}], [tiFyM4wcAZ7tz-Yt4dS_Aw {'project_path': 'ydb/library/yql/tests/sql/solomon', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [tnzldir5PgQBy06P5L_gdg {'project_path': 'ydb/library/yql/tools/solomon_emulator/recipe', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}] Warn: Test [project=ydb/apps/etcd_proxy/service/ut, name=unittest] (uid=rnd-xo1btm05jeamd6y3): Infrastructure error - contact devtools@ for details. Suite build deps: [6n6HCQaxCmjvFz9N2j5jPg {'project_path': 'ydb/apps/etcd_proxy/service/ut', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/apps/ydb/ut, name=unittest] (uid=rnd-u17ziwjpmj413qjq): Infrastructure error - contact devtools@ for details. Suite build deps: [N3wfuEiUgil-dbRCORbWUA {'project_path': 'ydb/apps/ydbd', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [RkhG7WuJXDr1zbK17a6x6g {'project_path': 'ydb/public/tools/ydb_recipe', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [j-uqixGAAtQ-xB1Ut7_Nkg {'project_path': 'ydb/apps/ydb', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}], [wUwnk-hppPHLVOX0-ppFwg {'project_path': 'ydb/apps/ydb/ut', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}] Warn: Test [project=ydb/core/actorlib_impl/ut, name=unittest] (uid=rnd-kl38z01ytzwjn6lp): Infrastructure error - contact devtools@ for details. Suite build deps: [dNa7edmUw1SSz5SmqYiblQ {'project_path': 'ydb/core/actorlib_impl/ut', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/core/backup/impl/ut_local_partition_reader, name=unittest] (uid=rnd-wflvqh8v0b0m1x5k): Infrastructure error - contact devtools@ for details. Suite build deps: [desMz-8AnHIPBZmLNYpg8Q {'project_path': 'ydb/core/backup/impl/ut_local_partition_reader', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/core/backup/impl/ut_table_writer, name=unittest] (uid=rnd-kr28pjhdciqts6zt): Infrastructure error - contact devtools@ for details. Suite build deps: [gwQ-TSevxSCstvTqosC-ZA {'project_path': 'ydb/core/backup/impl/ut_table_writer', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/core/blobstorage/backpressure/ut_client, name=unittest] (uid=rnd-md99zge7qjcqnue4): Infrastructure error - contact devtools@ for details. Suite build deps: [J77MJcmbyM1_iV7l4Ywkqw {'project_path': 'ydb/core/blobstorage/backpressure/ut_client', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/core/blobstorage/dsproxy/ut, name=unittest] (uid=rnd-h85t75ebjc5jdwkp): Infrastructure error - contact devtools@ for details. Suite build deps: [lFVv07i6eFByo3CPopRe1w {'project_path': 'ydb/core/blobstorage/dsproxy/ut', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/core/blobstorage/dsproxy/ut_fat, name=unittest] (uid=rnd-3t933cqnsdoognpt): Infrastructure error - contact devtools@ for details. Suite build deps: [_L1T-Skeaa1MsdjMI6JhCA {'project_path': 'ydb/core/blobstorage/dsproxy/ut_fat', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/core/blobstorage/dsproxy/ut_ftol, name=unittest] (uid=rnd-l51shlxgr6sop4g4): Infrastructure error - contact devtools@ for details. Suite build deps: [GiwuoO1Qq1XvCnh1sd4ZEw {'project_path': 'ydb/core/blobstorage/dsproxy/ut_ftol', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/core/blobstorage/nodewarden/ut, name=unittest] (uid=rnd-u94uwj8zvcooxvji): Infrastructure error - contact devtools@ for details. Suite build deps: [Q840sQzpCZgGRB5ANVJREA {'project_path': 'ydb/core/blobstorage/nodewarden/ut', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/core/blobstorage/nodewarden/ut_sequence, name=unittest] (uid=rnd-xgq0nlt4plo6bxct): Infrastructure error - contact devtools@ for details. Suite build deps: [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}], [yrnE9w7Oqja0VWwnaHXSjA {'project_path': 'ydb/core/blobstorage/nodewarden/ut_sequence', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}] Warn: Test [project=ydb/core/blobstorage/pdisk/ut, name=unittest] (uid=rnd-stc6ag3h86e9gxpd): Infrastructure error - contact devtools@ for details. Suite build deps: [OMvI4sPlzFciW41OUgyoVQ {'project_path': 'ydb/core/blobstorage/pdisk/ut', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/core/blobstorage/storagepoolmon/ut, name=unittest] (uid=rnd-pgycxeugl5ycfq8b): Infrastructure error - contact devtools@ for details. Suite build deps: [3Io6FZWQ7ygJikfVx7FlQA {'project_path': 'ydb/core/blobstorage/storagepoolmon/ut', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/core/blobstorage/ut_blobstorage, name=unittest] (uid=rnd-xqu98yt42268onlo): Infrastructure error - contact devtools@ for details. Suite build deps: [5-pDglIbWbQzXaby7xu_9g {'project_path': 'ydb/core/blobstorage/ut_blobstorage', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/core/blobstorage/ut_blobstorage/ut_balancing, name=unittest] (uid=rnd-m63f9wzelgq0t9xb): Infrastructure error - contact devtools@ for details. Suite build deps: [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}], [xB7P38sJ6S8ryPS1RtIpOw {'project_path': 'ydb/core/blobstorage/ut_blobstorage/ut_balancing', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}] Warn: Test [project=ydb/core/blobstorage/ut_blobstorage/ut_blob_depot, name=unittest] (uid=rnd-fomnhq7fieybfn8j): Infrastructure error - contact devtools@ for details. Suite build deps: [e0nMVbgrBNJlkIQiBfiFNg {'project_path': 'ydb/core/blobstorage/ut_blobstorage/ut_blob_depot', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/core/blobstorage/ut_blobstorage/ut_bridge, name=unittest] (uid=rnd-6qtyn2ncb6j52qhq): Infrastructure error - contact devtools@ for details. Suite build deps: [3yCnpHVFlLtOBzHyIFvjVw {'project_path': 'ydb/core/blobstorage/ut_blobstorage/ut_bridge', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/core/blobstorage/ut_blobstorage/ut_check_integrity, name=unittest] (uid=rnd-p04j4dyn6w2xk7kr): Infrastructure error - contact devtools@ for details. Suite build deps: [jRsrFtIn8ZctxVCegT8qcw {'project_path': 'ydb/core/blobstorage/ut_blobstorage/ut_check_integrity', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/core/blobstorage/ut_blobstorage/ut_cluster_balancing, name=unittest] (uid=rnd-5fp3h4ki7cc9nemz): Infrastructure error - contact devtools@ for details. Suite build deps: [Gdk6rlnYl1v9ELYEbaLdTg {'project_path': 'ydb/core/blobstorage/ut_blobstorage/ut_cluster_balancing', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/core/blobstorage/ut_blobstorage/ut_donor, name=unittest] (uid=rnd-hbxselw7kipjr6af): Infrastructure error - contact devtools@ for details. Suite build deps: [mrURmxo8ElA6mlWtkAQ56g {'project_path': 'ydb/core/blobstorage/ut_blobstorage/ut_donor', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/core/blobstorage/ut_blobstorage/ut_huge, name=unittest] (uid=rnd-wuhj4xqskd25ua91): Infrastructure error - contact devtools@ for details. Suite build deps: [2y7CBFYlcs4k0mlZ1n3BEw {'project_path': 'ydb/core/blobstorage/ut_blobstorage/ut_huge', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/core/blobstorage/ut_blobstorage/ut_restart_pdisk, name=unittest] (uid=rnd-w78sehu5mzok32jf): Infrastructure error - contact devtools@ for details. Suite build deps: [4gO7d3FPQ5Ce2SwR0UsKrA {'project_path': 'ydb/core/blobstorage/ut_blobstorage/ut_restart_pdisk', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/core/blobstorage/ut_blobstorage/ut_statestorage, name=unittest] (uid=rnd-vdfb1xsk5x8lodc8): Infrastructure error - contact devtools@ for details. Suite build deps: [Bw49BkMTbOWRxpVoCTi6kg {'project_path': 'ydb/core/blobstorage/ut_blobstorage/ut_statestorage', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/core/blobstorage/ut_blobstorage/ut_stop_pdisk, name=unittest] (uid=rnd-63k9q194ak3wqltw): Infrastructure error - contact devtools@ for details. Suite build deps: [7syP3bbSPbLGdbjOgE_I7Q {'project_path': 'ydb/core/blobstorage/ut_blobstorage/ut_stop_pdisk', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/core/blobstorage/ut_blobstorage/ut_vdisk_restart, name=unittest] (uid=rnd-4fd5fifpmz5v6o29): Infrastructure error - contact devtools@ for details. Suite build deps: [krVoK3UZ96KD2LCqqWbCnA {'project_path': 'ydb/core/blobstorage/ut_blobstorage/ut_vdisk_restart', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/core/blobstorage/ut_mirror3of4, name=unittest] (uid=rnd-i5lml2wsnz4dvnr4): Infrastructure error - contact devtools@ for details. Suite build deps: [VohY1r2GQeTHf3HjJ3qVBw {'project_path': 'ydb/core/blobstorage/ut_mirror3of4', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/core/blobstorage/ut_testshard, name=unittest] (uid=rnd-lvq9qbeqliew5iun): Infrastructure error - contact devtools@ for details. Suite build deps: [cqamkaaw53ZT1_ik6HQ4oA {'project_path': 'ydb/core/blobstorage/ut_testshard', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/core/blobstorage/ut_vdisk, name=unittest] (uid=rnd-fvbqrfhx51sd8j6d): Infrastructure error - contact devtools@ for details. Suite build deps: [YwSTwtKTQ5qcEZ6R0Nll3Q {'project_path': 'ydb/core/blobstorage/ut_vdisk', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/core/blobstorage/ut_vdisk2, name=unittest] (uid=rnd-h1aogzzp3gzrljed): Infrastructure error - contact devtools@ for details. Suite build deps: [lGZpaJdCQ8UGpOANZcu-Gg {'project_path': 'ydb/core/blobstorage/ut_vdisk2', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/core/blobstorage/vdisk/skeleton/ut, name=unittest] (uid=rnd-mnwptfxnuq3o501p): Infrastructure error - contact devtools@ for details. Suite build deps: [3x-uuaD47atpDMU36TNgbg {'project_path': 'ydb/core/blobstorage/vdisk/skeleton/ut', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/core/blobstorage/vdisk/synclog/ut, name=unittest] (uid=rnd-fef5qjcqprjk7o5c): Infrastructure error - contact devtools@ for details. Suite build deps: [WazADWxQ-7A-noYokaagpg {'project_path': 'ydb/core/blobstorage/vdisk/synclog/ut', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/core/client/minikql_compile/ut, name=unittest] (uid=rnd-2qq9agi9iviggylk): Infrastructure error - contact devtools@ for details. Suite build deps: [oyDDPFxYkLUmoG4gqndHlA {'project_path': 'ydb/core/client/minikql_compile/ut', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/core/client/ut, name=unittest] (uid=rnd-fzm6qe4vowmt0ffm): Infrastructure error - contact devtools@ for details. Suite build deps: [3M9bquhx7xMkCwbzqStBtA {'project_path': 'ydb/tests/supp', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [6RslIKeiyKcgbTHK0HO1ZQ {'project_path': 'ydb/core/client/ut', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/core/cms/console/ut, name=unittest] (uid=rnd-cuieqbc9minmcj9c): Infrastructure error - contact devtools@ for details. Suite build deps: [di8kWGkoWGzV3K3nVkLr8g {'project_path': 'ydb/core/cms/console/ut', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/core/cms/ut, name=unittest] (uid=rnd-n03uatttr5euo6vz): Infrastructure error - contact devtools@ for details. Suite build deps: [ITbF__cK-Q9D_owLCqZQdg {'project_path': 'ydb/core/cms/ut', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/core/cms/ut_sentinel, name=unittest] (uid=rnd-oc2jx0wwztfjryw6): Infrastructure error - contact devtools@ for details. Suite build deps: [pApuhxdMH_72Q4IDCAKdQw {'project_path': 'ydb/core/cms/ut_sentinel', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/core/cms/ut_sentinel_unstable, name=unittest] (uid=rnd-qj8e973luhdwp51v): Infrastructure error - contact devtools@ for details. Suite build deps: [RF1zYngDSGRep8wqK3Hw3w {'project_path': 'ydb/core/cms/ut_sentinel_unstable', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/core/control/ut, name=unittest] (uid=rnd-jtiwl8bi6qap3mkc): Infrastructure error - contact devtools@ for details. Suite build deps: [UYk3Ft8qJuQ7_G7JZ6mwxA {'project_path': 'ydb/core/control/ut', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/core/engine/ut, name=unittest] (uid=rnd-b6e3ajxsoy3vk483): Infrastructure error - contact devtools@ for details. Suite build deps: [lKt0z0dM8jPrc2XymlYifQ {'project_path': 'ydb/core/engine/ut', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/core/external_sources/s3/ut, name=unittest] (uid=rnd-vf8fbf62uyuv5wvi): Infrastructure error - contact devtools@ for details. Suite build deps: [4qEj448_Wx_nXqrxCENx8g {'project_path': 'ydb/core/external_sources/s3/ut', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [HCBUIDcacx4RRnxJZbe-BA {'project_path': 'library/recipes/docker_compose', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}], [yKApAgkaXzebFsu668Rswg {'project_path': 'library/recipes/docker_compose/bin', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}] Warn: Test [project=ydb/core/fq/libs/checkpoint_storage/ut, name=unittest] (uid=rnd-oq5nb6cbtjxa60t1): Infrastructure error - contact devtools@ for details. Suite build deps: [N3wfuEiUgil-dbRCORbWUA {'project_path': 'ydb/apps/ydbd', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [RkhG7WuJXDr1zbK17a6x6g {'project_path': 'ydb/public/tools/ydb_recipe', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [pFtT7DoruicHEF9LOEgMVg {'project_path': 'ydb/core/fq/libs/checkpoint_storage/ut', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/core/fq/libs/checkpointing/ut, name=unittest] (uid=rnd-8lo0tnue81y36ki9): Infrastructure error - contact devtools@ for details. Suite build deps: [qzY3lI-LZg94L4BOlP93Bg {'project_path': 'ydb/core/fq/libs/checkpointing/ut', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/core/fq/libs/common/ut, name=unittest] (uid=rnd-lykj3xsju52zkp2s): Infrastructure error - contact devtools@ for details. Suite build deps: [3BkOtDWXn0hQLQJJ8QoIrA {'project_path': 'ydb/core/fq/libs/common/ut', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/core/fq/libs/control_plane_proxy/ut, name=unittest] (uid=rnd-r3m39lf3quismpn1): Infrastructure error - contact devtools@ for details. Suite build deps: [KLUdE-_QHSg641dOXSGHLg {'project_path': 'ydb/core/fq/libs/control_plane_proxy/ut', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/core/fq/libs/result_formatter/ut, name=unittest] (uid=rnd-pllrp684bobwgoka): Infrastructure error - contact devtools@ for details. Suite build deps: [6SEW03t25bzw5YblbAjTLw {'project_path': 'ydb/core/fq/libs/result_formatter/ut', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/core/fq/libs/row_dispatcher/format_handler/ut, name=unittest] (uid=rnd-8xr58dwko40t5aeg): Infrastructure error - contact devtools@ for details. Suite build deps: [c2bH_8NXmAtG264K6PckXA {'project_path': 'ydb/core/fq/libs/row_dispatcher/format_handler/ut', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/core/fq/libs/row_dispatcher/ut, name=unittest] (uid=rnd-aatv1y9n4onzjumx): Infrastructure error - contact devtools@ for details. Suite build deps: [N3wfuEiUgil-dbRCORbWUA {'project_path': 'ydb/apps/ydbd', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [RkhG7WuJXDr1zbK17a6x6g {'project_path': 'ydb/public/tools/ydb_recipe', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [tO1R3p3N1yDJIPlNsZFUkg {'project_path': 'ydb/core/fq/libs/row_dispatcher/ut', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/core/fq/libs/test_connection/ut, name=unittest] (uid=rnd-mmoqs5e4r3jg5stt): Infrastructure error - contact devtools@ for details. Suite build deps: [bX0OYY_FUoZdfAEu1gf7uA {'project_path': 'ydb/core/fq/libs/test_connection/ut', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/core/fq/libs/ydb/ut, name=unittest] (uid=rnd-95pnswnfr9i1sy83): Infrastructure error - contact devtools@ for details. Suite build deps: [N3wfuEiUgil-dbRCORbWUA {'project_path': 'ydb/apps/ydbd', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [RkhG7WuJXDr1zbK17a6x6g {'project_path': 'ydb/public/tools/ydb_recipe', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [nr-JoX2NmhNJBSQEo0j7AQ {'project_path': 'ydb/core/fq/libs/ydb/ut', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/core/graph/shard/ut, name=unittest] (uid=rnd-bunfzjkbf8ck2rro): Infrastructure error - contact devtools@ for details. Suite build deps: [R4zFh6NIYgzFJNMYxGcBVg {'project_path': 'ydb/core/graph/shard/ut', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/core/graph/ut, name=unittest] (uid=rnd-j1cqozg5wpt8onex): Infrastructure error - contact devtools@ for details. Suite build deps: [Y_J2ejkBz-kxhlgPmQokZg {'project_path': 'ydb/core/graph/ut', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/core/grpc_services/grpc_request_check_actor_ut, name=unittest] (uid=rnd-uen6edlheskgx2yw): Infrastructure error - contact devtools@ for details. Suite build deps: [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}], [xOHq4GF2TdQymHT1yqiNCg {'project_path': 'ydb/core/grpc_services/grpc_request_check_actor_ut', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}] Warn: Test [project=ydb/core/grpc_services/tablet/ut, name=unittest] (uid=rnd-wrmex9ioa7rzo7hy): Infrastructure error - contact devtools@ for details. Suite build deps: [T1h0RX--1i5b6gHZDUQ2-g {'project_path': 'ydb/core/grpc_services/tablet/ut', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/core/grpc_services/ut, name=unittest] (uid=rnd-j10j6fm4k7ttdrof): Infrastructure error - contact devtools@ for details. Suite build deps: [kO3GS5K09mVxOzJllfZn7Q {'project_path': 'ydb/core/grpc_services/ut', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/core/grpc_streaming/ut, name=unittest] (uid=rnd-6ceduyzc7rq1uslj): Infrastructure error - contact devtools@ for details. Suite build deps: [DTDar6h-iSSbtnu89OsO7w {'project_path': 'ydb/core/grpc_streaming/ut', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/core/health_check/ut, name=unittest] (uid=rnd-7o8y41g4qh30gyps): Infrastructure error - contact devtools@ for details. Suite build deps: [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}], [vhmq966BMUwTUKkpG3fRFQ {'project_path': 'ydb/core/health_check/ut', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}] Warn: Test [project=ydb/core/http_proxy/ut, name=unittest] (uid=rnd-zh8cfzdvo942ah7b): Infrastructure error - contact devtools@ for details. Suite build deps: [SeoZhL_RMyaC_cpK4FjCdA {'project_path': 'ydb/core/http_proxy/ut', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/core/kafka_proxy/ut, name=unittest] (uid=rnd-kmdoj84wlzh2ipte): Infrastructure error - contact devtools@ for details. Suite build deps: [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}], [tpoVF2M2g3f86xtfHgvEDA {'project_path': 'ydb/core/kafka_proxy/ut', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}] Warn: Test [project=ydb/core/kesus/proxy/ut, name=unittest] (uid=rnd-dey6a4bm64awvrd9): Infrastructure error - contact devtools@ for details. Suite build deps: [Ap_T5I8ahA9ejABnnuiXtw {'project_path': 'ydb/core/kesus/proxy/ut', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/core/kesus/tablet/ut, name=unittest] (uid=rnd-23d4i5dwvt3vf9iz): Infrastructure error - contact devtools@ for details. Suite build deps: [IXZuYuD7QsuseNIWyENiDg {'project_path': 'ydb/core/kesus/tablet/ut', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/core/keyvalue/ut, name=unittest] (uid=rnd-d7ny8h3gvxo0cwhv): Infrastructure error - contact devtools@ for details. Suite build deps: [dxkbTJc5KEqtjkvhAvkPlQ {'project_path': 'ydb/core/keyvalue/ut', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/core/keyvalue/ut_trace, name=unittest] (uid=rnd-fycqt7ftegtcxoz1): Infrastructure error - contact devtools@ for details. Suite build deps: [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}], [y5vitdGOmUvA9qEU0GAFUQ {'project_path': 'ydb/core/keyvalue/ut_trace', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}] Warn: Test [project=ydb/core/kqp/executer_actor/ut, name=unittest] (uid=rnd-mttanbxog2btje01): Infrastructure error - contact devtools@ for details. Suite build deps: [csvXMMmx2LljrfT5BCKHgQ {'project_path': 'ydb/core/kqp/executer_actor/ut', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/core/kqp/federated_query/ut_service, name=unittest] (uid=rnd-3hvugl83a91atdao): Infrastructure error - contact devtools@ for details. Suite build deps: [XJjHl15zWm3n_REMsCQfzg {'project_path': 'ydb/core/kqp/federated_query/ut_service', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/core/kqp/provider/ut, name=unittest] (uid=rnd-o8v5h5wj3iuif1hk): Infrastructure error - contact devtools@ for details. Suite build deps: [TeQBfEfPcDvTTtvXzXJ4Cw {'project_path': 'ydb/core/kqp/provider/ut', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/core/kqp/rm_service/ut, name=unittest] (uid=rnd-3svak9ryit2t1gg8): Infrastructure error - contact devtools@ for details. Suite build deps: [Pcj3FeLZMCfq6X4fj9B5LA {'project_path': 'ydb/core/kqp/rm_service/ut', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/core/kqp/runtime/ut, name=unittest] (uid=rnd-oppvqdp501a9p0rx): Infrastructure error - contact devtools@ for details. Suite build deps: [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}], [z_c_z6AlCEY8jWQ7oAqUJw {'project_path': 'ydb/core/kqp/runtime/ut', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}] Warn: Test [project=ydb/core/kqp/tests/kikimr_tpch, name=unittest] (uid=rnd-z0uajrkefq1apvxt): Infrastructure error - contact devtools@ for details. Suite build deps: [5NO0BtQNkc4BqHeFbFV7EA {'project_path': 'yql/essentials/udfs/common/pire', 'platform': 'default-linux-x86_64-relwithdebinfo-pic', 'tags': ['default-linux-x86_64', 'relwithdebinfo', 'pic']}], [I4e-bB8_UrdaYRaEXkVcow {'project_path': 'yql/essentials/udfs/common/string', 'platform': 'default-linux-x86_64-relwithdebinfo-pic', 'tags': ['default-linux-x86_64', 'relwithdebinfo', 'pic']}], [MZ4m6v9tSorHZ9Xg-PtqGw {'project_path': 'yql/essentials/udfs/common/datetime2', 'platform': 'default-linux-x86_64-relwithdebinfo-pic', 'tags': ['default-linux-x86_64', 'relwithdebinfo', 'pic']}], [N3wfuEiUgil-dbRCORbWUA {'project_path': 'ydb/apps/ydbd', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [RkhG7WuJXDr1zbK17a6x6g {'project_path': 'ydb/public/tools/ydb_recipe', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [Rn5vf2entABGIoBD0m-QuQ {'project_path': 'ydb/core/kqp/tests/kikimr_tpch', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [_T3UlkRa1h0lykmUS4n4dg {'project_path': 'yql/essentials/udfs/common/re2', 'platform': 'default-linux-x86_64-relwithdebinfo-pic', 'tags': ['default-linux-x86_64', 'relwithdebinfo', 'pic']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}], [xnyDUDZe-hlMDeCcyiyt7w {'project_path': 'ydb/library/yql/udfs/common/datetime', 'platform': 'default-linux-x86_64-relwithdebinfo-pic', 'tags': ['default-linux-x86_64', 'relwithdebinfo', 'pic']}] Warn: Test [project=ydb/core/kqp/ut/arrow, name=unittest] (uid=rnd-05bn5jy0imhyn1n6): Infrastructure error - contact devtools@ for details. Suite build deps: [IKkX2o8hZ2d_1qwlT4ktkA {'project_path': 'ydb/core/kqp/ut/arrow', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/core/kqp/ut/batch_operations, name=unittest] (uid=rnd-9rfvdhtxz95f8t8q): Infrastructure error - contact devtools@ for details. Suite build deps: [Lfc5mIisfgnbw3AqFP4l3A {'project_path': 'ydb/core/kqp/ut/batch_operations', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/core/kqp/ut/cost, name=unittest] (uid=rnd-xwusrcl0lokzsseu): Infrastructure error - contact devtools@ for details. Suite build deps: [KyDMEwhuWXlVrzKPg7EvTg {'project_path': 'ydb/core/kqp/ut/cost', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/core/kqp/ut/data, name=unittest] (uid=rnd-b1cc1ejczggxnax7): Infrastructure error - contact devtools@ for details. Suite build deps: [MCvjPHdUmiDt4VSEtJVCwA {'project_path': 'ydb/core/kqp/ut/data', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/core/kqp/ut/data_integrity, name=unittest] (uid=rnd-orl0w6dnzt9chaeo): Infrastructure error - contact devtools@ for details. Suite build deps: [9SW9xO3KBk51B8gR-r3pkA {'project_path': 'ydb/core/kqp/ut/data_integrity', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/core/kqp/ut/discovery, name=unittest] (uid=rnd-ql3df0y98ndcgdgo): Infrastructure error - contact devtools@ for details. Suite build deps: [W5Nujkm_R7jeIz9iKT5dAg {'project_path': 'ydb/core/kqp/ut/discovery', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/core/kqp/ut/effects, name=unittest] (uid=rnd-10i98dyrve11y1ir): Infrastructure error - contact devtools@ for details. Suite build deps: [Ro4meXyAzLAsDfinpqDt5w {'project_path': 'ydb/core/kqp/ut/effects', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/core/kqp/ut/federated_query/datastreams, name=unittest] (uid=rnd-hvhlu26mlvetcgms): Infrastructure error - contact devtools@ for details. Suite build deps: [LJ2t_qImD7Uey7cqPiAnNQ {'project_path': 'ydb/tests/tools/s3_recipe', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [N3wfuEiUgil-dbRCORbWUA {'project_path': 'ydb/apps/ydbd', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [PBslpMFkWX6JG4o4gr4wJA {'project_path': 'contrib/python/moto/bin', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [RkhG7WuJXDr1zbK17a6x6g {'project_path': 'ydb/public/tools/ydb_recipe', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [cRVW0jkMW0FoEUHCpmh6Iw {'project_path': 'ydb/core/kqp/ut/federated_query/datastreams', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/core/kqp/ut/federated_query/generic_ut, name=unittest] (uid=rnd-618dueq9qkc4wcrb): Infrastructure error - contact devtools@ for details. Suite build deps: [__QaTk8sigBpQSYjJrh3Ng {'project_path': 'ydb/core/kqp/ut/federated_query/generic_ut', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/core/kqp/ut/federated_query/large_results, name=unittest] (uid=rnd-8373w0dh2br1yxh2): Infrastructure error - contact devtools@ for details. Suite build deps: [KZBm5jzKkLpB1A4yEYbYXg {'project_path': 'ydb/core/kqp/ut/federated_query/large_results', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [LJ2t_qImD7Uey7cqPiAnNQ {'project_path': 'ydb/tests/tools/s3_recipe', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [PBslpMFkWX6JG4o4gr4wJA {'project_path': 'contrib/python/moto/bin', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/core/kqp/ut/federated_query/s3, name=unittest] (uid=rnd-xoyfnxok72spyrkq): Infrastructure error - contact devtools@ for details. Suite build deps: [LJ2t_qImD7Uey7cqPiAnNQ {'project_path': 'ydb/tests/tools/s3_recipe', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [PBslpMFkWX6JG4o4gr4wJA {'project_path': 'contrib/python/moto/bin', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [nJUEZrWFslD--Qygd6a_ig {'project_path': 'ydb/core/kqp/ut/federated_query/s3', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/core/kqp/ut/idx_test, name=unittest] (uid=rnd-wm97tkmxga8zzai3): Infrastructure error - contact devtools@ for details. Suite build deps: [W7lR72m4DBoLBDUra6BcHQ {'project_path': 'ydb/core/kqp/ut/idx_test', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/core/kqp/ut/indexes, name=unittest] (uid=rnd-8a89axo2qomyzc7j): Infrastructure error - contact devtools@ for details. Suite build deps: [DNWlXeWkxrzQir4YqL5_MQ {'project_path': 'ydb/core/kqp/ut/indexes', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/core/kqp/ut/join, name=unittest] (uid=rnd-klkszh3y9xoku9gt): Infrastructure error - contact devtools@ for details. Suite build deps: [7FyG1Fa1x1ZSlEl8gK6PdA {'project_path': 'ydb/core/kqp/ut/join', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/core/kqp/ut/olap, name=unittest] (uid=rnd-83zsgzfgroar0eq5): Infrastructure error - contact devtools@ for details. Suite build deps: [KUMLBflF1XT47QyeCEOaTQ {'project_path': 'ydb/core/kqp/ut/olap', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/core/kqp/ut/opt, name=unittest] (uid=rnd-d0of3rkd2v72qnfq): Infrastructure error - contact devtools@ for details. Suite build deps: [dr8BJDS5QzDtCr3vyHo2JQ {'project_path': 'ydb/core/kqp/ut/opt', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/core/kqp/ut/perf, name=unittest] (uid=rnd-k4jlbmfxuuatezf6): Infrastructure error - contact devtools@ for details. Suite build deps: [rxivR5lKAznC3jKEcjIl0Q {'project_path': 'ydb/core/kqp/ut/perf', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/core/kqp/ut/pg, name=unittest] (uid=rnd-hy6i43idcyqa52xs): Infrastructure error - contact devtools@ for details. Suite build deps: [-cX5SVbot8hB1b66u76gXQ {'project_path': 'ydb/core/kqp/ut/pg', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/core/kqp/ut/query, name=unittest] (uid=rnd-vlyf44rxi057u22t): Infrastructure error - contact devtools@ for details. Suite build deps: [TASBH2-uA4ZTYV-Q2wQmbg {'project_path': 'ydb/core/kqp/ut/query', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/core/kqp/ut/runtime, name=unittest] (uid=rnd-lsknt69yl9e0utvc): Infrastructure error - contact devtools@ for details. Suite build deps: [73N7OgVpg_v-vQE6eXrWaA {'project_path': 'ydb/core/kqp/ut/runtime', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/core/kqp/ut/scan, name=unittest] (uid=rnd-l6deh0rli1egczzb): Infrastructure error - contact devtools@ for details. Suite build deps: [nzNVRofrXkamgK590oNRrw {'project_path': 'ydb/core/kqp/ut/scan', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/core/kqp/ut/scheme, name=unittest] (uid=rnd-nmk0hulwgz2vfioq): Infrastructure error - contact devtools@ for details. Suite build deps: [hlIdGWt8kJBPY2PQJCqG7Q {'project_path': 'ydb/core/kqp/ut/scheme', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/core/kqp/ut/service, name=unittest] (uid=rnd-oyskp8zhp7e4hsjo): Infrastructure error - contact devtools@ for details. Suite build deps: [DP6sXHWF5DiSjlwIq2dr8Q {'project_path': 'ydb/core/kqp/ut/service', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/core/kqp/ut/sysview, name=unittest] (uid=rnd-1i2lwlz5m1xv8tjq): Infrastructure error - contact devtools@ for details. Suite build deps: [hgrO8CNpZ4VyHJR_q8sXdw {'project_path': 'ydb/core/kqp/ut/sysview', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/core/kqp/ut/tx, name=unittest] (uid=rnd-5k122ob5euyoyky1): Infrastructure error - contact devtools@ for details. Suite build deps: [MpKC41R15ISDPv5h51ZY0A {'project_path': 'ydb/core/kqp/ut/tx', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/core/kqp/ut/view, name=unittest] (uid=rnd-ybiwkwysqaq2illr): Infrastructure error - contact devtools@ for details. Suite build deps: [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}], [vyn8KJsG23lYprz_jHl5nw {'project_path': 'ydb/core/kqp/ut/view', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}] Warn: Test [project=ydb/core/kqp/ut/yql, name=unittest] (uid=rnd-vqhoxbgkrrnet0z7): Infrastructure error - contact devtools@ for details. Suite build deps: [nN7NU90M2EY8qlfUhiHLVw {'project_path': 'ydb/core/kqp/ut/yql', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/core/kqp/workload_service/ut, name=unittest] (uid=rnd-umix4ni7jpa6omot): Infrastructure error - contact devtools@ for details. Suite build deps: [96xGlMzbi8YK819ldDiC8g {'project_path': 'ydb/core/kqp/workload_service/ut', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/core/load_test/ut, name=unittest] (uid=rnd-1jnav3uv6yt9axq5): Infrastructure error - contact devtools@ for details. Suite build deps: [ETNiFapPwne4jlnXm_kzYg {'project_path': 'ydb/core/load_test/ut', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/core/memory_controller/ut, name=unittest] (uid=rnd-h1tqe8c5gway6dve): Infrastructure error - contact devtools@ for details. Suite build deps: [7PeKWQPC9OEWNe7-gtML9Q {'project_path': 'ydb/core/memory_controller/ut', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/core/mind/address_classification/ut, name=unittest] (uid=rnd-wstmmkt4k4tmq0vh): Infrastructure error - contact devtools@ for details. Suite build deps: [P85L5nZFnkC3WnicjJR42A {'project_path': 'ydb/core/mind/address_classification/ut', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/core/mind/bscontroller/ut, name=unittest] (uid=rnd-lyrmhlajdreh7kw4): Infrastructure error - contact devtools@ for details. Suite build deps: [CJ53u2dKW-njZV5g5NrW4A {'project_path': 'ydb/core/mind/bscontroller/ut', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/core/mind/bscontroller/ut_bscontroller, name=unittest] (uid=rnd-5i721n71q492258g): Infrastructure error - contact devtools@ for details. Suite build deps: [aEtZsMr2DhUluPTLsaCO-Q {'project_path': 'ydb/core/mind/bscontroller/ut_bscontroller', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/core/mind/bscontroller/ut_selfheal, name=unittest] (uid=rnd-97j3v461971p8u4u): Infrastructure error - contact devtools@ for details. Suite build deps: [8HVjDd453VcGXjBcqvwo5w {'project_path': 'ydb/core/mind/bscontroller/ut_selfheal', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/core/mind/hive/ut, name=unittest] (uid=rnd-6mlfj96y7wbxhxt6): Infrastructure error - contact devtools@ for details. Suite build deps: [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}], [xFfcW_FEN7QAi8B89g9mbQ {'project_path': 'ydb/core/mind/hive/ut', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}] Warn: Test [project=ydb/core/mind/ut, name=unittest] (uid=rnd-lwi2exqbowwtexd0): Infrastructure error - contact devtools@ for details. Suite build deps: [EpDqvKdKqx1fi_gWm3_ajQ {'project_path': 'ydb/core/mind/ut', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/core/mind/ut_fat, name=unittest] (uid=rnd-4uhjfiv5mcwvqvop): Infrastructure error - contact devtools@ for details. Suite build deps: [j34JPDFlXbGA-ummBhxlqw {'project_path': 'ydb/core/mind/ut_fat', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/core/mon/ut, name=unittest] (uid=rnd-gh2japdrgb38n61p): Infrastructure error - contact devtools@ for details. Suite build deps: [qi-RkrhUQTyk4ZDqyt0AeA {'project_path': 'ydb/core/mon/ut', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/core/persqueue/dread_cache_service/ut, name=unittest] (uid=rnd-xztk48qstmuttnex): Infrastructure error - contact devtools@ for details. Suite build deps: [mANhjC7zg18RtQDo6D8_FQ {'project_path': 'ydb/core/persqueue/dread_cache_service/ut', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/core/persqueue/pqtablet/cache/ut, name=unittest] (uid=rnd-0l46qqowk35917j1): Infrastructure error - contact devtools@ for details. Suite build deps: [m_Hm0Dv1_EOFV6p4TKV8kw {'project_path': 'ydb/core/persqueue/pqtablet/cache/ut', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/core/persqueue/public/fetcher/ut, name=unittest] (uid=rnd-lm6g2a4sbf3xz5nw): Infrastructure error - contact devtools@ for details. Suite build deps: [7gGqHdHgd6slRciTuz2h1w {'project_path': 'ydb/core/persqueue/public/fetcher/ut', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/core/persqueue/public/list_topics/ut, name=unittest] (uid=rnd-14h7ksp4lk8srsvy): Infrastructure error - contact devtools@ for details. Suite build deps: [MNKFhSKYk2e25o1dYTZiNA {'project_path': 'ydb/core/persqueue/public/list_topics/ut', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/core/persqueue/ut, name=unittest] (uid=rnd-8nt65n53ong4c30k): Infrastructure error - contact devtools@ for details. Suite build deps: [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}], [z_6AV9mb2pXBX4Jxd4YISA {'project_path': 'ydb/core/persqueue/ut', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}] Warn: Test [project=ydb/core/persqueue/ut/slow, name=unittest] (uid=rnd-g1ftlemb4s16e9md): Infrastructure error - contact devtools@ for details. Suite build deps: [QEQjz6HplUp7LifG7NZChA {'project_path': 'ydb/core/persqueue/ut/slow', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/core/persqueue/ut/ut_with_sdk, name=unittest] (uid=rnd-6lx5pn06ayokysca): Infrastructure error - contact devtools@ for details. Suite build deps: [kB1NyoWIQCymiRdk4hJpdQ {'project_path': 'ydb/core/persqueue/ut/ut_with_sdk', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/core/public_http/ut, name=unittest] (uid=rnd-nssp53wcb2t6dwxr): Infrastructure error - contact devtools@ for details. Suite build deps: [_8aCNhNK3HdHPFhbJvsLDg {'project_path': 'ydb/core/public_http/ut', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/core/quoter/ut, name=unittest] (uid=rnd-09257ci3w3f4wp3g): Infrastructure error - contact devtools@ for details. Suite build deps: [Jp9DzpYxX8vW0bzAhiwdoA {'project_path': 'ydb/core/quoter/ut', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/core/raw_socket/ut, name=unittest] (uid=rnd-5fe1bm3058cqxnsl): Infrastructure error - contact devtools@ for details. Suite build deps: [B0KWrlg1KZZXxsHix7fUrQ {'project_path': 'ydb/core/raw_socket/ut', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/core/security/certificate_check/ut, name=unittest] (uid=rnd-x4wjrivfrbq96ckh): Infrastructure error - contact devtools@ for details. Suite build deps: [kIpGmYnKNb7hVnjb3ELmlQ {'project_path': 'ydb/core/security/certificate_check/ut', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/core/security/ldap_auth_provider/ut, name=unittest] (uid=rnd-s1fipg7p4dmpil84): Infrastructure error - contact devtools@ for details. Suite build deps: [6Bxu96fIKin_AOD6Sn8dfw {'project_path': 'ydb/core/security/ldap_auth_provider/ut', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/core/security/ut, name=unittest] (uid=rnd-gkoevtgv4kn5r4ht): Infrastructure error - contact devtools@ for details. Suite build deps: [TuvJgrnqKiwzAnv0pUjkmw {'project_path': 'ydb/core/security/ut', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/core/statistics/aggregator/ut, name=unittest] (uid=rnd-pz9sfa4t1ek4624s): Infrastructure error - contact devtools@ for details. Suite build deps: [aUx59MNeDfM-sxClwcguIg {'project_path': 'ydb/core/statistics/aggregator/ut', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/core/statistics/database/ut, name=unittest] (uid=rnd-5v0rb6uiybei0wef): Infrastructure error - contact devtools@ for details. Suite build deps: [gERE8Fxnk80bAHc0z3nToA {'project_path': 'ydb/core/statistics/database/ut', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/core/statistics/service/ut, name=unittest] (uid=rnd-ifw4vu32plavu45o): Infrastructure error - contact devtools@ for details. Suite build deps: [9_5NtWFQDgxb8qKKxcjyog {'project_path': 'ydb/core/statistics/service/ut', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/core/statistics/service/ut/ut_aggregation, name=unittest] (uid=rnd-8l89ofa5i8a27e8f): Infrastructure error - contact devtools@ for details. Suite build deps: [MjoXtqOt3G1Z6vXTNBiLjA {'project_path': 'ydb/core/statistics/service/ut/ut_aggregation', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/core/sys_view/query_stats/ut, name=unittest] (uid=rnd-o6l2acolllpv8p84): Infrastructure error - contact devtools@ for details. Suite build deps: [p1JeLHqG3cXcv-sTC6UPOw {'project_path': 'ydb/core/sys_view/query_stats/ut', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/core/sys_view/ut, name=unittest] (uid=rnd-06kjj2wan7ir1bj2): Infrastructure error - contact devtools@ for details. Suite build deps: [FVK0KkH4pGzxN4IYYtG02Q {'project_path': 'ydb/core/sys_view/ut', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/core/tablet/ut, name=unittest] (uid=rnd-ubi8hcngo30lqe5a): Infrastructure error - contact devtools@ for details. Suite build deps: [B4xuLxs7qiYBZeFCnAEAGg {'project_path': 'ydb/core/tablet/ut', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/core/tablet_flat/ut, name=unittest] (uid=rnd-vnep7l96782d2fbn): Infrastructure error - contact devtools@ for details. Suite build deps: [PGjj9WvCtJc5gN44Znq6Dw {'project_path': 'ydb/core/tablet_flat/ut', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/core/transfer/ut/column_table, name=unittest] (uid=rnd-ftxdo6m9d70956t2): Infrastructure error - contact devtools@ for details. Suite build deps: [87tS7jLCId2ZuojuhFNN_w {'project_path': 'ydb/core/transfer/ut/column_table', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [N3wfuEiUgil-dbRCORbWUA {'project_path': 'ydb/apps/ydbd', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [RkhG7WuJXDr1zbK17a6x6g {'project_path': 'ydb/public/tools/ydb_recipe', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/core/transfer/ut/functional, name=unittest] (uid=rnd-mk5axt829idhtdlq): Infrastructure error - contact devtools@ for details. Suite build deps: [N3wfuEiUgil-dbRCORbWUA {'project_path': 'ydb/apps/ydbd', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [RkhG7WuJXDr1zbK17a6x6g {'project_path': 'ydb/public/tools/ydb_recipe', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [piOZAKrjmzU8k9m-T_felg {'project_path': 'ydb/core/transfer/ut/functional', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/core/transfer/ut/row_table, name=unittest] (uid=rnd-lxomfvlsl50v5ac6): Infrastructure error - contact devtools@ for details. Suite build deps: [8evqUqGXk9s5rUT5DJyDcA {'project_path': 'ydb/core/transfer/ut/row_table', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [N3wfuEiUgil-dbRCORbWUA {'project_path': 'ydb/apps/ydbd', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [RkhG7WuJXDr1zbK17a6x6g {'project_path': 'ydb/public/tools/ydb_recipe', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/core/tx/balance_coverage/ut, name=unittest] (uid=rnd-b56opbz269gypg6o): Infrastructure error - contact devtools@ for details. Suite build deps: [MaA6RTMgFHJKTmfeB8qC5Q {'project_path': 'ydb/core/tx/balance_coverage/ut', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/core/tx/columnshard/engines/ut, name=unittest] (uid=rnd-5vqvytqvvm5mkkhj): Infrastructure error - contact devtools@ for details. Suite build deps: [srLwByIyo1tlCEFKxY6dIw {'project_path': 'ydb/core/tx/columnshard/engines/ut', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/core/tx/columnshard/splitter/ut, name=unittest] (uid=rnd-kvxugkr0e29ayv77): Infrastructure error - contact devtools@ for details. Suite build deps: [nBg-gfBR3xa4OU27vSxRHA {'project_path': 'ydb/core/tx/columnshard/splitter/ut', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/core/tx/columnshard/ut_rw, name=unittest] (uid=rnd-w18utleurum7r568): Infrastructure error - contact devtools@ for details. Suite build deps: [eTZlzl1KXaRWx2RTY0jxvw {'project_path': 'ydb/core/tx/columnshard/ut_rw', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/core/tx/columnshard/ut_schema, name=unittest] (uid=rnd-8w4ykkv94p5clq6s): Infrastructure error - contact devtools@ for details. Suite build deps: [Djl7HckfnzTIyQ4wgpR0Hw {'project_path': 'ydb/core/tx/columnshard/ut_schema', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/core/tx/conveyor_composite/ut, name=unittest] (uid=rnd-ou4rjud23k1lgau1): Infrastructure error - contact devtools@ for details. Suite build deps: [HBjC7hgwf1ii9zG74MTEgA {'project_path': 'ydb/core/tx/conveyor_composite/ut', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/core/tx/coordinator/ut, name=unittest] (uid=rnd-0h9fwmiunt2p28uz): Infrastructure error - contact devtools@ for details. Suite build deps: [bynkxlXuTjOXWAR-mjiQkw {'project_path': 'ydb/core/tx/coordinator/ut', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/core/tx/datashard/build_index/ut, name=unittest] (uid=rnd-6bsdkhsk1f90ngq4): Infrastructure error - contact devtools@ for details. Suite build deps: [gXa7sUPPZEEJz6Dz3Akr3g {'project_path': 'ydb/core/tx/datashard/build_index/ut', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/core/tx/datashard/ut_background_compaction, name=unittest] (uid=rnd-lt1mwbvb7wt8zm9x): Infrastructure error - contact devtools@ for details. Suite build deps: [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}], [xYAaH6f7gXKiU_6_3fMlGQ {'project_path': 'ydb/core/tx/datashard/ut_background_compaction', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}] Warn: Test [project=ydb/core/tx/datashard/ut_change_exchange, name=unittest] (uid=rnd-jb7c9bon5gbgb9f9): Infrastructure error - contact devtools@ for details. Suite build deps: [UJ9VlZkJDlX3ckjmzYqpSw {'project_path': 'ydb/core/tx/datashard/ut_change_exchange', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/core/tx/datashard/ut_column_stats, name=unittest] (uid=rnd-m6t1b4iluhnxbwja): Infrastructure error - contact devtools@ for details. Suite build deps: [cdbPp_6trwWql4Umbv689g {'project_path': 'ydb/core/tx/datashard/ut_column_stats', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/core/tx/datashard/ut_compaction, name=unittest] (uid=rnd-geawyyu4u7isy84f): Infrastructure error - contact devtools@ for details. Suite build deps: [U3bVwkbjL1xsFm2k-YbY4g {'project_path': 'ydb/core/tx/datashard/ut_compaction', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/core/tx/datashard/ut_erase_rows, name=unittest] (uid=rnd-qjioxdizi197x1c1): Infrastructure error - contact devtools@ for details. Suite build deps: [NG2sxu1TjH1e8J_jnm6__g {'project_path': 'ydb/core/tx/datashard/ut_erase_rows', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/core/tx/datashard/ut_export, name=unittest] (uid=rnd-bg71vud94qcog44s): Infrastructure error - contact devtools@ for details. Suite build deps: [C7cmyMHukE138jh5D8EeLQ {'project_path': 'ydb/core/tx/datashard/ut_export', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/core/tx/datashard/ut_followers, name=unittest] (uid=rnd-qkrhm2tqql7fby0g): Infrastructure error - contact devtools@ for details. Suite build deps: [WRgNHyNITXkysSU-wLC2eg {'project_path': 'ydb/core/tx/datashard/ut_followers', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/core/tx/datashard/ut_incremental_backup, name=unittest] (uid=rnd-ldyctpztnlze7apg): Infrastructure error - contact devtools@ for details. Suite build deps: [6IVXir115R3jO0iVFt_ihg {'project_path': 'ydb/core/tx/datashard/ut_incremental_backup', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/core/tx/datashard/ut_incremental_restore_scan, name=unittest] (uid=rnd-wy5k8drdteczm014): Infrastructure error - contact devtools@ for details. Suite build deps: [1Pz-5UDD3ELu6R53Gcz_1A {'project_path': 'ydb/core/tx/datashard/ut_incremental_restore_scan', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/core/tx/datashard/ut_init, name=unittest] (uid=rnd-difbverwbrdbrefe): Infrastructure error - contact devtools@ for details. Suite build deps: [e7aq4hIBH7ePjPmgZr1QrQ {'project_path': 'ydb/core/tx/datashard/ut_init', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/core/tx/datashard/ut_keys, name=unittest] (uid=rnd-hf6fdjledzv3vm5o): Infrastructure error - contact devtools@ for details. Suite build deps: [0FKR21mujbDKCRHSp5-TcA {'project_path': 'ydb/core/tx/datashard/ut_keys', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/core/tx/datashard/ut_kqp, name=unittest] (uid=rnd-vhuu7rewfy56348p): Infrastructure error - contact devtools@ for details. Suite build deps: [M7rThwBjNpnkn_KbNFp24A {'project_path': 'ydb/core/tx/datashard/ut_kqp', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/core/tx/datashard/ut_kqp_errors, name=unittest] (uid=rnd-1vwv1r1qm0cqs3qk): Infrastructure error - contact devtools@ for details. Suite build deps: [ijoS0f3EJrAPrzZ9R4G23w {'project_path': 'ydb/core/tx/datashard/ut_kqp_errors', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/core/tx/datashard/ut_locks, name=unittest] (uid=rnd-8quld36b9gxbrem6): Infrastructure error - contact devtools@ for details. Suite build deps: [35QMsy-GoYXZgWWY2-wHPg {'project_path': 'ydb/core/tx/datashard/ut_locks', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/core/tx/datashard/ut_minikql, name=unittest] (uid=rnd-g0mbii5nt22anzhs): Infrastructure error - contact devtools@ for details. Suite build deps: [ixlg1GmcT6k-y6YprJnyLg {'project_path': 'ydb/core/tx/datashard/ut_minikql', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/core/tx/datashard/ut_object_storage_listing, name=unittest] (uid=rnd-1djlw1rsfnf9o91l): Infrastructure error - contact devtools@ for details. Suite build deps: [Jz1HRHSTn2bc8zLX-c-mHA {'project_path': 'ydb/core/tx/datashard/ut_object_storage_listing', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/core/tx/datashard/ut_order, name=unittest] (uid=rnd-c0on0t24atxf5ndz): Infrastructure error - contact devtools@ for details. Suite build deps: [kuWm3vQUFRT1UMp_e0sCww {'project_path': 'ydb/core/tx/datashard/ut_order', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/core/tx/datashard/ut_range_ops, name=unittest] (uid=rnd-znhdo5t56py5tf7t): Infrastructure error - contact devtools@ for details. Suite build deps: [DNPSOuckYrnNARJjQWt8QA {'project_path': 'ydb/core/tx/datashard/ut_range_ops', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/core/tx/datashard/ut_read_iterator, name=unittest] (uid=rnd-8mna3u0lgz7m57ez): Infrastructure error - contact devtools@ for details. Suite build deps: [SeTD-t_wzG2b3v46Ax9ukg {'project_path': 'ydb/core/tx/datashard/ut_read_iterator', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/core/tx/datashard/ut_read_table, name=unittest] (uid=rnd-yvhuu97ukkhfifwr): Infrastructure error - contact devtools@ for details. Suite build deps: [2oBFx6XKak6KmBgJQBT2YA {'project_path': 'ydb/core/tx/datashard/ut_read_table', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/core/tx/datashard/ut_reassign, name=unittest] (uid=rnd-t05e190cz6tlovf8): Infrastructure error - contact devtools@ for details. Suite build deps: [IKKpxuXYp3M3ybrJ65N4mg {'project_path': 'ydb/core/tx/datashard/ut_reassign', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/core/tx/datashard/ut_replication, name=unittest] (uid=rnd-gvbhbedad50iem82): Infrastructure error - contact devtools@ for details. Suite build deps: [6eEcXLQYoIJ0z-GIFrwKzw {'project_path': 'ydb/core/tx/datashard/ut_replication', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/core/tx/datashard/ut_rs, name=unittest] (uid=rnd-kpvfwjp5v2igq4kk): Infrastructure error - contact devtools@ for details. Suite build deps: [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}], [wz0eRBozGHwQhbKs65Rc7g {'project_path': 'ydb/core/tx/datashard/ut_rs', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}] Warn: Test [project=ydb/core/tx/datashard/ut_sequence, name=unittest] (uid=rnd-ecpvlvhh1mrdorzy): Infrastructure error - contact devtools@ for details. Suite build deps: [AOmxH2IAUDKck2Rm4IOVsg {'project_path': 'ydb/core/tx/datashard/ut_sequence', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/core/tx/datashard/ut_snapshot, name=unittest] (uid=rnd-10jrsykls90sokj8): Infrastructure error - contact devtools@ for details. Suite build deps: [dABJ59XXJTGlojDJbS2wvQ {'project_path': 'ydb/core/tx/datashard/ut_snapshot', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/core/tx/datashard/ut_stats, name=unittest] (uid=rnd-trplr5cg5a911c5y): Infrastructure error - contact devtools@ for details. Suite build deps: [ffUI2on3gDfBzd7nNgrAgQ {'project_path': 'ydb/core/tx/datashard/ut_stats', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/core/tx/datashard/ut_trace, name=unittest] (uid=rnd-co0uz9ay1wi2ka1r): Infrastructure error - contact devtools@ for details. Suite build deps: [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}], [u3YODMLb4I80UwZ0I9IsaA {'project_path': 'ydb/core/tx/datashard/ut_trace', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}] Warn: Test [project=ydb/core/tx/datashard/ut_upload_rows, name=unittest] (uid=rnd-5n5fen7in3vqvt4t): Infrastructure error - contact devtools@ for details. Suite build deps: [I49jyhlGCA6fO8egDUSLrQ {'project_path': 'ydb/core/tx/datashard/ut_upload_rows', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/core/tx/datashard/ut_vacuum, name=unittest] (uid=rnd-pafm3fk3t78a1hy9): Infrastructure error - contact devtools@ for details. Suite build deps: [XcmsW4-3pH2iCXRjitGe1Q {'project_path': 'ydb/core/tx/datashard/ut_vacuum', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/core/tx/datashard/ut_volatile, name=unittest] (uid=rnd-yxyewxrui9vvojxq): Infrastructure error - contact devtools@ for details. Suite build deps: [a_DSkiF-8cBowbz8bJBefg {'project_path': 'ydb/core/tx/datashard/ut_volatile', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/core/tx/limiter/grouped_memory/ut, name=unittest] (uid=rnd-q7j9rattwhqfxp63): Infrastructure error - contact devtools@ for details. Suite build deps: [A34YqSyVbB_48nEzqZSFuw {'project_path': 'ydb/core/tx/limiter/grouped_memory/ut', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/core/tx/long_tx_service/ut, name=unittest] (uid=rnd-32dz8fkxalel9hq2): Infrastructure error - contact devtools@ for details. Suite build deps: [hxicY-1TV7GGW74O090Cxw {'project_path': 'ydb/core/tx/long_tx_service/ut', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/core/tx/mediator/ut, name=unittest] (uid=rnd-2t8xu1jf7xsy8z6g): Infrastructure error - contact devtools@ for details. Suite build deps: [WayO-Q8MFfKpkhbXZkhaLw {'project_path': 'ydb/core/tx/mediator/ut', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/core/tx/replication/controller/ut_dst_creator, name=unittest] (uid=rnd-yt9apy35hanh5axn): Infrastructure error - contact devtools@ for details. Suite build deps: [BFzDmqMxJdwORz0Kf8u2pQ {'project_path': 'ydb/core/tx/replication/controller/ut_dst_creator', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/core/tx/replication/controller/ut_stream_creator, name=unittest] (uid=rnd-58t5fo51aq98mp5q): Infrastructure error - contact devtools@ for details. Suite build deps: [-eOsEaruiA8CVY_ZLV3n6w {'project_path': 'ydb/core/tx/replication/controller/ut_stream_creator', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/core/tx/replication/controller/ut_target_discoverer, name=unittest] (uid=rnd-nfs0xa45p4qd3ms1): Infrastructure error - contact devtools@ for details. Suite build deps: [9opFVjkunilByFTPjN91bw {'project_path': 'ydb/core/tx/replication/controller/ut_target_discoverer', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/core/tx/replication/service/ut_json_change_record, name=unittest] (uid=rnd-sjpx69xtv4qpu2py): Infrastructure error - contact devtools@ for details. Suite build deps: [L5vwiseFwzyWkh9s2aVFvw {'project_path': 'ydb/core/tx/replication/service/ut_json_change_record', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/core/tx/replication/service/ut_table_writer, name=unittest] (uid=rnd-4ujr9g9qfujc8pon): Infrastructure error - contact devtools@ for details. Suite build deps: [Xd0ni5756Hi0ZKdRbHoLnQ {'project_path': 'ydb/core/tx/replication/service/ut_table_writer', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/core/tx/replication/service/ut_worker, name=unittest] (uid=rnd-3pgpafmhj1bp3hnm): Infrastructure error - contact devtools@ for details. Suite build deps: [-UmUgxxpqn9DoG33gIP3BA {'project_path': 'ydb/core/tx/replication/service/ut_worker', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/core/tx/replication/ydb_proxy/ut, name=unittest] (uid=rnd-rv79tqnhdquk4yn8): Infrastructure error - contact devtools@ for details. Suite build deps: [i68m-_FhhioonN6zFgaA1A {'project_path': 'ydb/core/tx/replication/ydb_proxy/ut', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/core/tx/scheme_board/ut_double_indexed, name=unittest] (uid=rnd-lm3survrm9m5axqf): Infrastructure error - contact devtools@ for details. Suite build deps: [pdMl_-TxfWMX4ugQNRfKxg {'project_path': 'ydb/core/tx/scheme_board/ut_double_indexed', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/core/tx/scheme_board/ut_monitoring, name=unittest] (uid=rnd-hir1h65qxqjzw278): Infrastructure error - contact devtools@ for details. Suite build deps: [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}], [trzsnv92w8M376ItLmvYCA {'project_path': 'ydb/core/tx/scheme_board/ut_monitoring', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}] Warn: Test [project=ydb/core/tx/scheme_board/ut_populator, name=unittest] (uid=rnd-a4rwteb23978s0kj): Infrastructure error - contact devtools@ for details. Suite build deps: [YtrxxbNF50V9gBvd7WsawQ {'project_path': 'ydb/core/tx/scheme_board/ut_populator', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/core/tx/scheme_board/ut_replica, name=unittest] (uid=rnd-u20n5sjengbz0rm6): Infrastructure error - contact devtools@ for details. Suite build deps: [-BSJvsauTn95u4ypilmwvQ {'project_path': 'ydb/core/tx/scheme_board/ut_replica', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/core/tx/scheme_board/ut_subscriber, name=unittest] (uid=rnd-v5ov87eq20xuadek): Infrastructure error - contact devtools@ for details. Suite build deps: [KbSm-_Y3D3l3Sv-yEprw-w {'project_path': 'ydb/core/tx/scheme_board/ut_subscriber', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/core/tx/schemeshard/ut_auditsettings, name=unittest] (uid=rnd-5m39uduh2qslj0hg): Infrastructure error - contact devtools@ for details. Suite build deps: [DOhgrk1wzlVn1Q_mQBU2wg {'project_path': 'ydb/core/tx/schemeshard/ut_auditsettings', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/core/tx/schemeshard/ut_background_cleaning, name=unittest] (uid=rnd-5atq5xujqoz961y7): Infrastructure error - contact devtools@ for details. Suite build deps: [8Hxst4Fqk_58Ft1OjViVBw {'project_path': 'ydb/core/tx/schemeshard/ut_background_cleaning', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/core/tx/schemeshard/ut_backup_collection, name=unittest] (uid=rnd-u4z4o5kfltqru5hh): Infrastructure error - contact devtools@ for details. Suite build deps: [Db_DfC4BZN9p-Z58hJ0CtQ {'project_path': 'ydb/core/tx/schemeshard/ut_backup_collection', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/core/tx/schemeshard/ut_backup_collection_reboots, name=unittest] (uid=rnd-vtvt37qbfnr8nrb9): Infrastructure error - contact devtools@ for details. Suite build deps: [LqZF5w8QeNSZOAuc3GrBDA {'project_path': 'ydb/core/tx/schemeshard/ut_backup_collection_reboots', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/core/tx/schemeshard/ut_base, name=unittest] (uid=rnd-b2y9nfa0tdnrm2jt): Infrastructure error - contact devtools@ for details. Suite build deps: [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}], [zV5Xv4_weaC_GQW_wRVNmg {'project_path': 'ydb/core/tx/schemeshard/ut_base', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}] Warn: Test [project=ydb/core/tx/schemeshard/ut_base_reboots, name=unittest] (uid=rnd-elgxqmqrt4w8gk5y): Infrastructure error - contact devtools@ for details. Suite build deps: [dOjoM87TRpekcPUFYLpEAw {'project_path': 'ydb/core/tx/schemeshard/ut_base_reboots', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/core/tx/schemeshard/ut_bsvolume, name=unittest] (uid=rnd-opki1nwktepy7nts): Infrastructure error - contact devtools@ for details. Suite build deps: [iBJXPT1naAQICmKdocA7JQ {'project_path': 'ydb/core/tx/schemeshard/ut_bsvolume', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/core/tx/schemeshard/ut_bsvolume_reboots, name=unittest] (uid=rnd-7iijv5skavf25as1): Infrastructure error - contact devtools@ for details. Suite build deps: [jK0so_AbGL-alcaRo-M18g {'project_path': 'ydb/core/tx/schemeshard/ut_bsvolume_reboots', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/core/tx/schemeshard/ut_cdc_stream, name=unittest] (uid=rnd-ptd0sfovlh5rehuh): Infrastructure error - contact devtools@ for details. Suite build deps: [LIwCT7HQvOZClHx4vILs4g {'project_path': 'ydb/core/tx/schemeshard/ut_cdc_stream', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/core/tx/schemeshard/ut_cdc_stream_reboots, name=unittest] (uid=rnd-2q6dycfyw0xw921w): Infrastructure error - contact devtools@ for details. Suite build deps: [R7gP870dXf8SW6nSoqWCqg {'project_path': 'ydb/core/tx/schemeshard/ut_cdc_stream_reboots', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/core/tx/schemeshard/ut_column_build, name=unittest] (uid=rnd-boiuacumurrrtlbt): Infrastructure error - contact devtools@ for details. Suite build deps: [eqOAMZfwPk9OeKRF0RJs2Q {'project_path': 'ydb/core/tx/schemeshard/ut_column_build', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/core/tx/schemeshard/ut_compaction, name=unittest] (uid=rnd-xq97f1qy6vrttye4): Infrastructure error - contact devtools@ for details. Suite build deps: [d-1DWW1bbRdM3xSiLTBSiQ {'project_path': 'ydb/core/tx/schemeshard/ut_compaction', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/core/tx/schemeshard/ut_continuous_backup, name=unittest] (uid=rnd-4minl9moyq22hnnu): Infrastructure error - contact devtools@ for details. Suite build deps: [qMazPEkLfIqusWgn2NKjnA {'project_path': 'ydb/core/tx/schemeshard/ut_continuous_backup', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/core/tx/schemeshard/ut_continuous_backup_reboots, name=unittest] (uid=rnd-e47huipiqrmiq474): Infrastructure error - contact devtools@ for details. Suite build deps: [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}], [yMC6RhA5wWHgW8mr1U9VQQ {'project_path': 'ydb/core/tx/schemeshard/ut_continuous_backup_reboots', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}] Warn: Test [project=ydb/core/tx/schemeshard/ut_export, name=unittest] (uid=rnd-96xdbacu0dzuzdee): Infrastructure error - contact devtools@ for details. Suite build deps: [3M9bquhx7xMkCwbzqStBtA {'project_path': 'ydb/tests/supp', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [FKc4h0hkBqDTS-dkC6-DCw {'project_path': 'ydb/core/tx/schemeshard/ut_export', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/core/tx/schemeshard/ut_export_reboots_s3, name=unittest] (uid=rnd-3r8901kofzdemuwk): Infrastructure error - contact devtools@ for details. Suite build deps: [3M9bquhx7xMkCwbzqStBtA {'project_path': 'ydb/tests/supp', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [rq18eqxhepFDDhodkMHy2w {'project_path': 'ydb/core/tx/schemeshard/ut_export_reboots_s3', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/core/tx/schemeshard/ut_external_data_source, name=unittest] (uid=rnd-gytmt4v2ztk5p4qz): Infrastructure error - contact devtools@ for details. Suite build deps: [feuXhtlJIgmtr6fvHGca1A {'project_path': 'ydb/core/tx/schemeshard/ut_external_data_source', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/core/tx/schemeshard/ut_external_data_source_reboots, name=unittest] (uid=rnd-z1u3illuanoagz7u): Infrastructure error - contact devtools@ for details. Suite build deps: [IdZz8pNYL0Ibg6lJOUaUxg {'project_path': 'ydb/core/tx/schemeshard/ut_external_data_source_reboots', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/core/tx/schemeshard/ut_external_table, name=unittest] (uid=rnd-pq677edmy50b9nbj): Infrastructure error - contact devtools@ for details. Suite build deps: [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}], [wPM06lKQV_H-kTteeSAQaw {'project_path': 'ydb/core/tx/schemeshard/ut_external_table', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}] Warn: Test [project=ydb/core/tx/schemeshard/ut_external_table_reboots, name=unittest] (uid=rnd-62steof4l96to5bl): Infrastructure error - contact devtools@ for details. Suite build deps: [RhXXnCTTQAH8EzuW5cvTog {'project_path': 'ydb/core/tx/schemeshard/ut_external_table_reboots', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/core/tx/schemeshard/ut_extsubdomain, name=unittest] (uid=rnd-lskzox3vvovjwwvr): Infrastructure error - contact devtools@ for details. Suite build deps: [tBIPlKZLwdm7gNDTXyVXew {'project_path': 'ydb/core/tx/schemeshard/ut_extsubdomain', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/core/tx/schemeshard/ut_extsubdomain_reboots, name=unittest] (uid=rnd-lotuhbxtlyspmwm2): Infrastructure error - contact devtools@ for details. Suite build deps: [or0UXPwY2K9lM7OKERBifw {'project_path': 'ydb/core/tx/schemeshard/ut_extsubdomain_reboots', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/core/tx/schemeshard/ut_failure_injection, name=unittest] (uid=rnd-c1p34yfmshk30ub7): Infrastructure error - contact devtools@ for details. Suite build deps: [Uh6kg88si_7pnyphHK-Z1A {'project_path': 'ydb/core/tx/schemeshard/ut_failure_injection', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/core/tx/schemeshard/ut_filestore_reboots, name=unittest] (uid=rnd-qj1ablb1bllf82z4): Infrastructure error - contact devtools@ for details. Suite build deps: [Q0bmEBdJwVpH_TbF0N41iA {'project_path': 'ydb/core/tx/schemeshard/ut_filestore_reboots', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/core/tx/schemeshard/ut_incremental_restore, name=unittest] (uid=rnd-fg9vm36iz2l328i6): Infrastructure error - contact devtools@ for details. Suite build deps: [Hl__D-50cRK01vo0Ph1WDw {'project_path': 'ydb/core/tx/schemeshard/ut_incremental_restore', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/core/tx/schemeshard/ut_incremental_restore_reboots, name=unittest] (uid=rnd-1u84gn3mgnlzl214): Infrastructure error - contact devtools@ for details. Suite build deps: [m8z84lyOz1gMoeoXTCtLNw {'project_path': 'ydb/core/tx/schemeshard/ut_incremental_restore_reboots', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/core/tx/schemeshard/ut_index, name=unittest] (uid=rnd-dyjzjbjxwks49ved): Infrastructure error - contact devtools@ for details. Suite build deps: [dKt8UzMU89kns0FBHZj3jQ {'project_path': 'ydb/core/tx/schemeshard/ut_index', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/core/tx/schemeshard/ut_index_build_reboots, name=unittest] (uid=rnd-m3800fgihc5o8kta): Infrastructure error - contact devtools@ for details. Suite build deps: [awlMXfQySbdEdHs1---8LA {'project_path': 'ydb/core/tx/schemeshard/ut_index_build_reboots', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/core/tx/schemeshard/ut_login, name=unittest] (uid=rnd-o2ksgj6naz14y10c): Infrastructure error - contact devtools@ for details. Suite build deps: [awPwmsfHS3f9A0QfqgJZSw {'project_path': 'ydb/core/tx/schemeshard/ut_login', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/core/tx/schemeshard/ut_move, name=unittest] (uid=rnd-mmel688whzlm5m5n): Infrastructure error - contact devtools@ for details. Suite build deps: [oTbVZtEB-OtUWeiHxqNTow {'project_path': 'ydb/core/tx/schemeshard/ut_move', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/core/tx/schemeshard/ut_olap, name=unittest] (uid=rnd-2c7b8kv2w60p5ak7): Infrastructure error - contact devtools@ for details. Suite build deps: [4V_bmLIQncYNqnisHRQPVw {'project_path': 'ydb/core/tx/schemeshard/ut_olap', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/core/tx/schemeshard/ut_olap_reboots, name=unittest] (uid=rnd-xisb46zcusulbits): Infrastructure error - contact devtools@ for details. Suite build deps: [_jn8a7TgpLmTr92RiAJXng {'project_path': 'ydb/core/tx/schemeshard/ut_olap_reboots', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/core/tx/schemeshard/ut_reboots, name=unittest] (uid=rnd-adz8sykdqm5lq2hb): Infrastructure error - contact devtools@ for details. Suite build deps: [BjDNMg5YNxpdnIhWYyzxfg {'project_path': 'ydb/core/tx/schemeshard/ut_reboots', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/core/tx/schemeshard/ut_replication_reboots, name=unittest] (uid=rnd-i1mym1d15qizu8d4): Infrastructure error - contact devtools@ for details. Suite build deps: [MezbuoIhEz9abtj7eg4CpQ {'project_path': 'ydb/core/tx/schemeshard/ut_replication_reboots', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/core/tx/schemeshard/ut_resource_pool, name=unittest] (uid=rnd-x6lpojy7r0lfv829): Infrastructure error - contact devtools@ for details. Suite build deps: [e0MvAO-nl_WkOQ9CNmYSvA {'project_path': 'ydb/core/tx/schemeshard/ut_resource_pool', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/core/tx/schemeshard/ut_resource_pool_reboots, name=unittest] (uid=rnd-6x3eo7r0ul424zt5): Infrastructure error - contact devtools@ for details. Suite build deps: [GQycUOVsSwTibD037gntnA {'project_path': 'ydb/core/tx/schemeshard/ut_resource_pool_reboots', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/core/tx/schemeshard/ut_restore, name=unittest] (uid=rnd-a9727l23j3irevca): Infrastructure error - contact devtools@ for details. Suite build deps: [8ZdgwjwNXkCIl5ScKFsCug {'project_path': 'ydb/core/tx/schemeshard/ut_restore', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/core/tx/schemeshard/ut_rtmr, name=unittest] (uid=rnd-lnsh1ij36dmmt3rh): Infrastructure error - contact devtools@ for details. Suite build deps: [Rbq819mKbpLkrVwQQ2sx1A {'project_path': 'ydb/core/tx/schemeshard/ut_rtmr', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/core/tx/schemeshard/ut_rtmr_reboots, name=unittest] (uid=rnd-456x3ti0xhi2p758): Infrastructure error - contact devtools@ for details. Suite build deps: [IyzkEDbmV0M5lgwSM-Z9nA {'project_path': 'ydb/core/tx/schemeshard/ut_rtmr_reboots', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/core/tx/schemeshard/ut_ru_calculator, name=unittest] (uid=rnd-kfgetky7jtdv59vf): Infrastructure error - contact devtools@ for details. Suite build deps: [DXV2sXyIAdUoaMl1i5lRUg {'project_path': 'ydb/core/tx/schemeshard/ut_ru_calculator', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/core/tx/schemeshard/ut_secret, name=unittest] (uid=rnd-000qi9nkmn0i97by): Infrastructure error - contact devtools@ for details. Suite build deps: [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}], [xsVY6bhazrf5FTYpqO5m5g {'project_path': 'ydb/core/tx/schemeshard/ut_secret', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}] Warn: Test [project=ydb/core/tx/schemeshard/ut_secret_reboots, name=unittest] (uid=rnd-mr9vp2bvrnk00ko0): Infrastructure error - contact devtools@ for details. Suite build deps: [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}], [xS0SGi9BQ4aLqwFal0BNVg {'project_path': 'ydb/core/tx/schemeshard/ut_secret_reboots', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}] Warn: Test [project=ydb/core/tx/schemeshard/ut_serverless, name=unittest] (uid=rnd-89smyzjox1t09pa0): Infrastructure error - contact devtools@ for details. Suite build deps: [UddFLaadY9UdrrXJHI80ZQ {'project_path': 'ydb/core/tx/schemeshard/ut_serverless', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/core/tx/schemeshard/ut_split_merge, name=unittest] (uid=rnd-owwm6u3l3psqogg7): Infrastructure error - contact devtools@ for details. Suite build deps: [PiZMFH_CArv-IXXYcG2PfQ {'project_path': 'ydb/core/tx/schemeshard/ut_split_merge', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/core/tx/schemeshard/ut_stats, name=unittest] (uid=rnd-t8c7bbcpidlpe02u): Infrastructure error - contact devtools@ for details. Suite build deps: [-OeDz_BUdXnRyZ4DLkfd3Q {'project_path': 'ydb/core/tx/schemeshard/ut_stats', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/core/tx/schemeshard/ut_streaming_query, name=unittest] (uid=rnd-nr9kyotkpwmqhdwh): Infrastructure error - contact devtools@ for details. Suite build deps: [MHAw6NLyeOH0ZLZosBNAgQ {'project_path': 'ydb/core/tx/schemeshard/ut_streaming_query', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/core/tx/schemeshard/ut_streaming_query_reboots, name=unittest] (uid=rnd-nzjqtzwt9yp7oipa): Infrastructure error - contact devtools@ for details. Suite build deps: [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}], [yUpamWog8DTdJwAwrCv-gA {'project_path': 'ydb/core/tx/schemeshard/ut_streaming_query_reboots', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}] Warn: Test [project=ydb/core/tx/schemeshard/ut_subdomain, name=unittest] (uid=rnd-3k5grdkb4qs28lhb): Infrastructure error - contact devtools@ for details. Suite build deps: [qrLP0RMcRR3o08aB4_sPfA {'project_path': 'ydb/core/tx/schemeshard/ut_subdomain', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/core/tx/schemeshard/ut_subdomain_reboots, name=unittest] (uid=rnd-o7djf8g0acgdz4oz): Infrastructure error - contact devtools@ for details. Suite build deps: [r0TKetvm1hFusRAoVYTCVw {'project_path': 'ydb/core/tx/schemeshard/ut_subdomain_reboots', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/core/tx/schemeshard/ut_system_names, name=unittest] (uid=rnd-xauxfbhwf6nunna8): Infrastructure error - contact devtools@ for details. Suite build deps: [d3of7F2eSfbMZpPkaskASg {'project_path': 'ydb/core/tx/schemeshard/ut_system_names', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/core/tx/schemeshard/ut_sysview_reboots, name=unittest] (uid=rnd-azokns09oc7lo94m): Infrastructure error - contact devtools@ for details. Suite build deps: [WXat6498WJ4KEp6SlhiLdw {'project_path': 'ydb/core/tx/schemeshard/ut_sysview_reboots', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/core/tx/schemeshard/ut_topic_set_boundaries, name=unittest] (uid=rnd-e2p9swi86gmtbz3v): Infrastructure error - contact devtools@ for details. Suite build deps: [ESkls2LuEh_2Fl2dQ1BBDQ {'project_path': 'ydb/core/tx/schemeshard/ut_topic_set_boundaries', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/core/tx/schemeshard/ut_topic_splitmerge, name=unittest] (uid=rnd-ziw9zz3gj243xipa): Infrastructure error - contact devtools@ for details. Suite build deps: [XRZxG_rxrCUQ-sNV4OPr7A {'project_path': 'ydb/core/tx/schemeshard/ut_topic_splitmerge', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/core/tx/schemeshard/ut_ttl, name=unittest] (uid=rnd-mrfdseceja80hcbc): Infrastructure error - contact devtools@ for details. Suite build deps: [XDQbgiAdmCk47KXFk9Qzbw {'project_path': 'ydb/core/tx/schemeshard/ut_ttl', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/core/tx/schemeshard/ut_user_attributes, name=unittest] (uid=rnd-di91w55ukqnhzdv0): Infrastructure error - contact devtools@ for details. Suite build deps: [qNQdgqSP8Ps8QFskT-AJAA {'project_path': 'ydb/core/tx/schemeshard/ut_user_attributes', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/core/tx/schemeshard/ut_user_attributes_reboots, name=unittest] (uid=rnd-yhht36oidatk7u3u): Infrastructure error - contact devtools@ for details. Suite build deps: [WUYmfVgN8t_LZGxMmPi0YA {'project_path': 'ydb/core/tx/schemeshard/ut_user_attributes_reboots', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/core/tx/schemeshard/ut_vector_index_build_reboots, name=unittest] (uid=rnd-v72ptfflrvb5zuhd): Infrastructure error - contact devtools@ for details. Suite build deps: [9Rnm685erbAp1QnQhcKeaA {'project_path': 'ydb/core/tx/schemeshard/ut_vector_index_build_reboots', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/core/tx/sequenceproxy/ut, name=unittest] (uid=rnd-z77bvl3r736wz8vb): Infrastructure error - contact devtools@ for details. Suite build deps: [YVhMCiXz6q1y8KTnKZljKA {'project_path': 'ydb/core/tx/sequenceproxy/ut', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/core/tx/sequenceshard/ut, name=unittest] (uid=rnd-rgvqtlpyg4itb9pa): Infrastructure error - contact devtools@ for details. Suite build deps: [0vsdjE1bnxlIIezSe6s7aQ {'project_path': 'ydb/core/tx/sequenceshard/ut', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/core/tx/sharding/ut, name=unittest] (uid=rnd-m5qbss0w9d9z8p06): Infrastructure error - contact devtools@ for details. Suite build deps: [_p3HT93GNxzte96VNlKciw {'project_path': 'ydb/core/tx/sharding/ut', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/core/tx/tiering/ut, name=unittest] (uid=rnd-1x5ymswf1e7hxndk): Infrastructure error - contact devtools@ for details. Suite build deps: [lk2t8BGNWMVSSq9D1f_RcQ {'project_path': 'ydb/core/tx/tiering/ut', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/core/tx/time_cast/ut, name=unittest] (uid=rnd-2qzi2xviqc5w86hc): Infrastructure error - contact devtools@ for details. Suite build deps: [8Dg4aIIyNkowHS4DY2pIRw {'project_path': 'ydb/core/tx/time_cast/ut', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/core/tx/tx_allocator/ut, name=unittest] (uid=rnd-5s42b8ngv1f877rv): Infrastructure error - contact devtools@ for details. Suite build deps: [Dbk5GoeOJxYTT-4BiLLN9g {'project_path': 'ydb/core/tx/tx_allocator/ut', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/core/tx/tx_proxy/ut_base_tenant, name=unittest] (uid=rnd-ydobryfn2eleuzth): Infrastructure error - contact devtools@ for details. Suite build deps: [-OphCGfhfabU0qEPUkB4Zw {'project_path': 'ydb/core/tx/tx_proxy/ut_base_tenant', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/core/tx/tx_proxy/ut_encrypted_storage, name=unittest] (uid=rnd-kwvxzhtwnjfrdupz): Infrastructure error - contact devtools@ for details. Suite build deps: [LMe5ECRLZxpZI1_--4MDrA {'project_path': 'ydb/core/tx/tx_proxy/ut_encrypted_storage', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/core/tx/tx_proxy/ut_ext_tenant, name=unittest] (uid=rnd-zpz2jpmiy80m93ky): Infrastructure error - contact devtools@ for details. Suite build deps: [BsBthz5OF50xjOEe_5V_0A {'project_path': 'ydb/core/tx/tx_proxy/ut_ext_tenant', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/core/tx/tx_proxy/ut_storage_tenant, name=unittest] (uid=rnd-3ajg3hmhumd38tha): Infrastructure error - contact devtools@ for details. Suite build deps: [_Pb3e2dXxOYTmZrx5kc1JQ {'project_path': 'ydb/core/tx/tx_proxy/ut_storage_tenant', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/core/viewer/ut, name=unittest] (uid=rnd-m2gi8shn3pyhx54t): Infrastructure error - contact devtools@ for details. Suite build deps: [9G9jB5APk36vdZwwRdEjSQ {'project_path': 'ydb/core/viewer/ut', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/core/wrappers/ut, name=unittest] (uid=rnd-9sglj3x5tv6bgajg): Infrastructure error - contact devtools@ for details. Suite build deps: [Im4axzWBFIQGvfl5drWdzQ {'project_path': 'ydb/core/wrappers/ut', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/core/ydb_convert/ut, name=unittest] (uid=rnd-bzz6390dd7szdgtb): Infrastructure error - contact devtools@ for details. Suite build deps: [cXXgdiC5qGpXRCkEeP4LTQ {'project_path': 'ydb/core/ydb_convert/ut', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/core/ymq/actor/cloud_events/cloud_events_ut, name=unittest] (uid=rnd-q7j7z6kexacvvu2l): Infrastructure error - contact devtools@ for details. Suite build deps: [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}], [yLRNf0brYe2h8hNKMOA58Q {'project_path': 'ydb/core/ymq/actor/cloud_events/cloud_events_ut', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}] Warn: Test [project=ydb/core/ymq/actor/yc_search_ut, name=unittest] (uid=rnd-4xu69oyezmab1xjc): Infrastructure error - contact devtools@ for details. Suite build deps: [GusBf2M0Ns7V9r3pFaH_WA {'project_path': 'ydb/core/ymq/actor/yc_search_ut', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/core/ymq/base/ut, name=unittest] (uid=rnd-spy1mv7gpgw2u9cf): Infrastructure error - contact devtools@ for details. Suite build deps: [9DWg3VLsC_5C6HFnLeHxCQ {'project_path': 'ydb/core/ymq/base/ut', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/core/ymq/http/ut, name=unittest] (uid=rnd-cim1q057r4azfcmz): Infrastructure error - contact devtools@ for details. Suite build deps: [5yaK2b0ecP0gn3tHA49mDg {'project_path': 'ydb/core/ymq/http/ut', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/core/ymq/ut, name=unittest] (uid=rnd-mr5oewpjx3ohiz4g): Infrastructure error - contact devtools@ for details. Suite build deps: [WccYW12-XcwTZAwLQIl3iQ {'project_path': 'ydb/core/ymq/ut', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/library/ncloud/impl/ut, name=unittest] (uid=rnd-43neey5a5twyf8zm): Infrastructure error - contact devtools@ for details. Suite build deps: [AaX8AOvWbZzCFyMU7VJgfw {'project_path': 'ydb/library/ncloud/impl/ut', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/library/query_actor/ut, name=unittest] (uid=rnd-14zscc79fhegdcwy): Infrastructure error - contact devtools@ for details. Suite build deps: [kHlVdzYa3hK-MebVO-M0GA {'project_path': 'ydb/library/query_actor/ut', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/library/table_creator/ut, name=unittest] (uid=rnd-vb58gqa4n9clohjx): Infrastructure error - contact devtools@ for details. Suite build deps: [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}], [xpYWceOA6ApJasg7zsY6Mg {'project_path': 'ydb/library/table_creator/ut', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}] Warn: Test [project=ydb/library/yql/providers/generic/actors/ut, name=unittest] (uid=rnd-axfmhaic6ym652po): Infrastructure error - contact devtools@ for details. Suite build deps: [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}], [xGzrVYHlN_wbNkdYo5mVxw {'project_path': 'ydb/library/yql/providers/generic/actors/ut', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}] Warn: Test [project=ydb/library/yql/providers/pq/provider/ut, name=unittest] (uid=rnd-n1myk8pyqfclllqe): Infrastructure error - contact devtools@ for details. Suite build deps: [6pua9k7bHTm3uinIRY3sQA {'project_path': 'ydb/library/yql/providers/pq/provider/ut', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/library/yql/providers/solomon/actors/ut, name=unittest] (uid=rnd-468tb62ea3wjsx2j): Infrastructure error - contact devtools@ for details. Suite build deps: [AqHo-8SJeJJvYzvMzUf4fw {'project_path': 'ydb/library/yql/tools/solomon_emulator/bin', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [iqZovhX8-wZxMP4ZK0p5pQ {'project_path': 'ydb/library/yql/providers/solomon/actors/ut', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}], [tnzldir5PgQBy06P5L_gdg {'project_path': 'ydb/library/yql/tools/solomon_emulator/recipe', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}] Warn: Test [project=ydb/public/sdk/cpp/src/client/federated_topic/ut, name=unittest] (uid=rnd-20403tkbpefwmq8e): Infrastructure error - contact devtools@ for details. Suite build deps: [K30y2FGFi2wtayu9J-XxLA {'project_path': 'ydb/public/sdk/cpp/src/client/federated_topic/ut', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/public/sdk/cpp/src/client/persqueue_public/ut, name=unittest] (uid=rnd-b3keiixrrajsaet8): Infrastructure error - contact devtools@ for details. Suite build deps: [LewVznt6-7vbcp5hdtmIgw {'project_path': 'ydb/public/sdk/cpp/src/client/persqueue_public/ut', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/public/sdk/cpp/src/client/persqueue_public/ut/with_offset_ranges_mode_ut, name=unittest] (uid=rnd-61lf2kamn7hwjyph): Infrastructure error - contact devtools@ for details. Suite build deps: [9pD4AHMZhTJVR0eSOFHN8Q {'project_path': 'ydb/public/sdk/cpp/src/client/persqueue_public/ut/with_offset_ranges_mode_ut', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/public/sdk/cpp/src/client/topic/ut, name=unittest] (uid=rnd-59t2893az7h5khj2): Infrastructure error - contact devtools@ for details. Suite build deps: [2qxL0rdeHctdbRIj4up9Bw {'project_path': 'ydb/public/sdk/cpp/src/client/topic/ut', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/public/sdk/cpp/src/client/topic/ut/with_direct_read_ut, name=unittest] (uid=rnd-byt1f8ppgm1qd0np): Infrastructure error - contact devtools@ for details. Suite build deps: [idOZYXM632ouyQhfP7e_eQ {'project_path': 'ydb/public/sdk/cpp/src/client/topic/ut/with_direct_read_ut', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/services/cms/ut, name=unittest] (uid=rnd-80oct4xp85rdxbza): Infrastructure error - contact devtools@ for details. Suite build deps: [eOe_sx9dNL7mGOHq2ij_ZA {'project_path': 'ydb/services/cms/ut', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/services/config/ut, name=unittest] (uid=rnd-u2k9yb438rq4nbs5): Infrastructure error - contact devtools@ for details. Suite build deps: [KAmsgVjmQpesezfiZUibyw {'project_path': 'ydb/services/config/ut', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/services/datastreams/ut, name=unittest] (uid=rnd-dg65xc7kud0sev9q): Infrastructure error - contact devtools@ for details. Suite build deps: [Vl1zi_-X63C5LZ0E8w3Yiw {'project_path': 'ydb/services/datastreams/ut', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/services/dynamic_config/ut, name=unittest] (uid=rnd-q9sy9opwm5omq85i): Infrastructure error - contact devtools@ for details. Suite build deps: [Z9UqiYDmoYcqLBlEAMhojw {'project_path': 'ydb/services/dynamic_config/ut', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/services/ext_index/ut, name=unittest] (uid=rnd-6jxhb7s84zmz22xm): Infrastructure error - contact devtools@ for details. Suite build deps: [DBNs0CbxNFHp-aJdsu6giQ {'project_path': 'ydb/services/ext_index/ut', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/services/fq/ut_integration, name=unittest] (uid=rnd-s1dsgnp8i13urh7c): Infrastructure error - contact devtools@ for details. Suite build deps: [inKKMCq6JPg0_mxQrw-6Dg {'project_path': 'ydb/services/fq/ut_integration', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/services/keyvalue/ut, name=unittest] (uid=rnd-ljkmtmojxaeosneg): Infrastructure error - contact devtools@ for details. Suite build deps: [PrsYyrANCBu4kEm736Bz8w {'project_path': 'ydb/services/keyvalue/ut', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/services/metadata/initializer/ut, name=unittest] (uid=rnd-h2mma5zzie5842l1): Infrastructure error - contact devtools@ for details. Suite build deps: [hoIa9HSTKJtHFpSkf0Wtuw {'project_path': 'ydb/services/metadata/initializer/ut', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/services/metadata/secret/ut, name=unittest] (uid=rnd-vc6l7l0lssokq8u9): Infrastructure error - contact devtools@ for details. Suite build deps: [1lYo0nQlsoTIowLCznnQtQ {'project_path': 'ydb/services/metadata/secret/ut', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/services/persqueue_cluster_discovery/ut, name=unittest] (uid=rnd-flbwfhwfqnc64v1a): Infrastructure error - contact devtools@ for details. Suite build deps: [hldHtvsZNY3uKgAnZ6Tl-Q {'project_path': 'ydb/services/persqueue_cluster_discovery/ut', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/services/persqueue_v1/ut, name=unittest] (uid=rnd-ij2ppc08f1z8pjln): Infrastructure error - contact devtools@ for details. Suite build deps: [ir58ly821plRjVtkpG5cPQ {'project_path': 'ydb/services/persqueue_v1/ut', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/services/persqueue_v1/ut/describes_ut, name=unittest] (uid=rnd-c0dhbiyntc6j2syi): Infrastructure error - contact devtools@ for details. Suite build deps: [8Zoz_1JY7vzyYTkMJtQ1yg {'project_path': 'ydb/services/persqueue_v1/ut/describes_ut', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/services/persqueue_v1/ut/new_schemecache_ut, name=unittest] (uid=rnd-f1xga9lsjajqfv6s): Infrastructure error - contact devtools@ for details. Suite build deps: [3Yoer_B5f11XVTRet7lrSQ {'project_path': 'ydb/services/persqueue_v1/ut/new_schemecache_ut', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/services/rate_limiter/ut, name=unittest] (uid=rnd-sdmd4qd0o4m8ltad): Infrastructure error - contact devtools@ for details. Suite build deps: [lJrPizn6_pekTZ8I_78Nug {'project_path': 'ydb/services/rate_limiter/ut', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/services/ydb/backup_ut, name=unittest] (uid=rnd-vgx8cmt7lbrw60nn): Infrastructure error - contact devtools@ for details. Suite build deps: [5gMSu3U7dJM_rjPKGlTZNA {'project_path': 'ydb/services/ydb/backup_ut', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/services/ydb/ut, name=unittest] (uid=rnd-1otwb5kdygt0w0zd): Infrastructure error - contact devtools@ for details. Suite build deps: [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}], [y16TwfRXjlM9KDhC5oshxg {'project_path': 'ydb/services/ydb/ut', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}] Warn: Test [project=ydb/tests/fq/control_plane_storage, name=unittest] (uid=rnd-2h7g48hdnajegxe6): Infrastructure error - contact devtools@ for details. Suite build deps: [N3wfuEiUgil-dbRCORbWUA {'project_path': 'ydb/apps/ydbd', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [RkhG7WuJXDr1zbK17a6x6g {'project_path': 'ydb/public/tools/ydb_recipe', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [Yk-dm7FJEqws5N1DrNNaEA {'project_path': 'ydb/tests/fq/control_plane_storage', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/tests/fq/pq_async_io/ut, name=unittest] (uid=rnd-r1re6ga1dc9irb2a): Infrastructure error - contact devtools@ for details. Suite build deps: [N3wfuEiUgil-dbRCORbWUA {'project_path': 'ydb/apps/ydbd', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [RkhG7WuJXDr1zbK17a6x6g {'project_path': 'ydb/public/tools/ydb_recipe', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [tP5ByMKymBSAcmPs4xRz-w {'project_path': 'ydb/tests/fq/pq_async_io/ut', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/tests/functional/backup, name=unittest] (uid=rnd-lua0dfvqurhll507): Infrastructure error - contact devtools@ for details. Suite build deps: [4aAhZXFJekFavucKAv-9LQ {'project_path': 'ydb/tests/functional/backup', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [LJ2t_qImD7Uey7cqPiAnNQ {'project_path': 'ydb/tests/tools/s3_recipe', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [N3wfuEiUgil-dbRCORbWUA {'project_path': 'ydb/apps/ydbd', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [PBslpMFkWX6JG4o4gr4wJA {'project_path': 'contrib/python/moto/bin', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [RkhG7WuJXDr1zbK17a6x6g {'project_path': 'ydb/public/tools/ydb_recipe', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/tests/functional/backup/s3_path_style, name=unittest] (uid=rnd-xsgcmmjgbwey8ua2): Infrastructure error - contact devtools@ for details. Suite build deps: [GUWutYCsU98fXdqV46JJZw {'project_path': 'ydb/tests/functional/backup/s3_path_style', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [LJ2t_qImD7Uey7cqPiAnNQ {'project_path': 'ydb/tests/tools/s3_recipe', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [N3wfuEiUgil-dbRCORbWUA {'project_path': 'ydb/apps/ydbd', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [PBslpMFkWX6JG4o4gr4wJA {'project_path': 'contrib/python/moto/bin', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [RkhG7WuJXDr1zbK17a6x6g {'project_path': 'ydb/public/tools/ydb_recipe', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/tests/functional/kqp/kqp_indexes, name=unittest] (uid=rnd-l6293u1k111ne7ot): Infrastructure error - contact devtools@ for details. Suite build deps: [N3wfuEiUgil-dbRCORbWUA {'project_path': 'ydb/apps/ydbd', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [RkhG7WuJXDr1zbK17a6x6g {'project_path': 'ydb/public/tools/ydb_recipe', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [X5OgJxCiw9JtN6ASEv2UpA {'project_path': 'ydb/tests/functional/kqp/kqp_indexes', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/tests/functional/kqp/kqp_query_session, name=unittest] (uid=rnd-gpggx4r1awgz13p5): Infrastructure error - contact devtools@ for details. Suite build deps: [N3wfuEiUgil-dbRCORbWUA {'project_path': 'ydb/apps/ydbd', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [RkhG7WuJXDr1zbK17a6x6g {'project_path': 'ydb/public/tools/ydb_recipe', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [rJYMlPHqR_Rd-UpIAeRCFg {'project_path': 'ydb/tests/functional/kqp/kqp_query_session', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/tests/functional/kqp/kqp_query_svc, name=unittest] (uid=rnd-sxjj7v6zjz4cv3lf): Infrastructure error - contact devtools@ for details. Suite build deps: [N3wfuEiUgil-dbRCORbWUA {'project_path': 'ydb/apps/ydbd', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [RkhG7WuJXDr1zbK17a6x6g {'project_path': 'ydb/public/tools/ydb_recipe', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [i3D7PdHiySeSkneOZQHb9Q {'project_path': 'ydb/tests/functional/kqp/kqp_query_svc', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/tests/functional/replication, name=unittest] (uid=rnd-17zpopy4c1secji3): Infrastructure error - contact devtools@ for details. Suite build deps: [N3wfuEiUgil-dbRCORbWUA {'project_path': 'ydb/apps/ydbd', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [RkhG7WuJXDr1zbK17a6x6g {'project_path': 'ydb/public/tools/ydb_recipe', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [nUIAl444UQudDJXfaXIPiQ {'project_path': 'ydb/tests/functional/replication', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/tests/functional/sdk/cpp/sdk_credprovider, name=unittest] (uid=rnd-uqpc4fkthns67swv): Infrastructure error - contact devtools@ for details. Suite build deps: [N3wfuEiUgil-dbRCORbWUA {'project_path': 'ydb/apps/ydbd', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [RkhG7WuJXDr1zbK17a6x6g {'project_path': 'ydb/public/tools/ydb_recipe', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [nasmOdUCciMvCqo47xNijQ {'project_path': 'ydb/tests/functional/sdk/cpp/sdk_credprovider', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/tests/olap/high_load, name=unittest] (uid=rnd-tx4dh8d8xkkx51gy): Infrastructure error - contact devtools@ for details. Suite build deps: [N3wfuEiUgil-dbRCORbWUA {'project_path': 'ydb/apps/ydbd', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [RkhG7WuJXDr1zbK17a6x6g {'project_path': 'ydb/public/tools/ydb_recipe', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [pTicvaH3rEiPVN_3whOTKQ {'project_path': 'ydb/tests/olap/high_load', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/core/external_sources/object_storage/inference/ut, name=gtest] (uid=rnd-vffcxbmfpp86q9x3): Infrastructure error - contact devtools@ for details. Suite build deps: [40zmIODTK7Ycp5o72HMZQQ {'project_path': 'ydb/core/external_sources/object_storage/inference/ut', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/core/kqp/gateway/ut, name=gtest] (uid=rnd-jf49asylwxtni3iy): Infrastructure error - contact devtools@ for details. Suite build deps: [-k1g4wzzukjK2T1Bn04cPw {'project_path': 'ydb/core/kqp/gateway/ut', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/public/sdk/cpp/tests/integration/basic_example, name=gtest] (uid=rnd-1f2ub70iy9i70ouz): Infrastructure error - contact devtools@ for details. Suite build deps: [EBcRIU702A0j0W9DXXb2Wg {'project_path': 'ydb/public/sdk/cpp/tests/integration/basic_example', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [N3wfuEiUgil-dbRCORbWUA {'project_path': 'ydb/apps/ydbd', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [RkhG7WuJXDr1zbK17a6x6g {'project_path': 'ydb/public/tools/ydb_recipe', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/public/sdk/cpp/tests/integration/bulk_upsert, name=gtest] (uid=rnd-xpxojqnqdru7fehk): Infrastructure error - contact devtools@ for details. Suite build deps: [8UoiqpDSHEUB1kMfAGCIjw {'project_path': 'ydb/public/sdk/cpp/tests/integration/bulk_upsert', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [N3wfuEiUgil-dbRCORbWUA {'project_path': 'ydb/apps/ydbd', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [RkhG7WuJXDr1zbK17a6x6g {'project_path': 'ydb/public/tools/ydb_recipe', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/public/sdk/cpp/tests/integration/server_restart, name=gtest] (uid=rnd-jyrxd8o2rxe5pgcx): Infrastructure error - contact devtools@ for details. Suite build deps: [45mRXg-JkXZFDJoXFeQXTw {'project_path': 'ydb/public/sdk/cpp/tests/integration/server_restart', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [N3wfuEiUgil-dbRCORbWUA {'project_path': 'ydb/apps/ydbd', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [RkhG7WuJXDr1zbK17a6x6g {'project_path': 'ydb/public/tools/ydb_recipe', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/public/sdk/cpp/tests/integration/sessions, name=gtest] (uid=rnd-5sk9i9w1hq6yvmlk): Infrastructure error - contact devtools@ for details. Suite build deps: [N3wfuEiUgil-dbRCORbWUA {'project_path': 'ydb/apps/ydbd', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [RkhG7WuJXDr1zbK17a6x6g {'project_path': 'ydb/public/tools/ydb_recipe', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [fcrU_es_tU5qXGu0wjZtVQ {'project_path': 'ydb/public/sdk/cpp/tests/integration/sessions', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/public/sdk/cpp/tests/integration/sessions_pool, name=gtest] (uid=rnd-a3novysehk2o0c8j): Infrastructure error - contact devtools@ for details. Suite build deps: [-LFYnZpw5iSKrsZ0AKkvAA {'project_path': 'ydb/public/sdk/cpp/tests/integration/sessions_pool', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [N3wfuEiUgil-dbRCORbWUA {'project_path': 'ydb/apps/ydbd', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [RkhG7WuJXDr1zbK17a6x6g {'project_path': 'ydb/public/tools/ydb_recipe', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/public/sdk/cpp/tests/integration/topic, name=gtest] (uid=rnd-5dhsrqsjjpav4g5v): Infrastructure error - contact devtools@ for details. Suite build deps: [N3wfuEiUgil-dbRCORbWUA {'project_path': 'ydb/apps/ydbd', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [RkhG7WuJXDr1zbK17a6x6g {'project_path': 'ydb/public/tools/ydb_recipe', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}], [yOq7phgMzDQr_1vn5MuZ6A {'project_path': 'ydb/public/sdk/cpp/tests/integration/topic', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}] Warn: Test [project=ydb/public/sdk/cpp/tests/integration/topic/with_direct_read, name=gtest] (uid=rnd-mbqo5r50dabz78ac): Infrastructure error - contact devtools@ for details. Suite build deps: [N3wfuEiUgil-dbRCORbWUA {'project_path': 'ydb/apps/ydbd', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [RkhG7WuJXDr1zbK17a6x6g {'project_path': 'ydb/public/tools/ydb_recipe', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [ilBj_nObxXU6emREGvsDug {'project_path': 'ydb/public/sdk/cpp/tests/integration/topic/with_direct_read', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/core/viewer/tests, name=py3test] (uid=rnd-4uk24exhuikajp60): Infrastructure error - contact devtools@ for details. Suite build deps: [N3wfuEiUgil-dbRCORbWUA {'project_path': 'ydb/apps/ydbd', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [olgfAqGEjtLFkEjPOFeyIQ {'project_path': 'ydb/core/viewer/tests', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/tests/datashard/add_column, name=py3test] (uid=rnd-0eqrc3dw2m81tppc): Infrastructure error - contact devtools@ for details. Suite build deps: [N3wfuEiUgil-dbRCORbWUA {'project_path': 'ydb/apps/ydbd', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [ayx8XGla8SIcK_sY9t2yIw {'project_path': 'ydb/tests/datashard/add_column', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [j-uqixGAAtQ-xB1Ut7_Nkg {'project_path': 'ydb/apps/ydb', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/tests/datashard/async_replication, name=py3test] (uid=rnd-6ihinte7xxkdlxyj): Infrastructure error - contact devtools@ for details. Suite build deps: [4AuU0L8ovKb_CS09YCFeCA {'project_path': 'ydb/tests/datashard/async_replication', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [N3wfuEiUgil-dbRCORbWUA {'project_path': 'ydb/apps/ydbd', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [j-uqixGAAtQ-xB1Ut7_Nkg {'project_path': 'ydb/apps/ydb', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/tests/datashard/copy_table, name=py3test] (uid=rnd-21o224ji4z9orodu): Infrastructure error - contact devtools@ for details. Suite build deps: [4IdQjBHMBOMpqEbA3nFG6g {'project_path': 'ydb/tests/datashard/copy_table', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [N3wfuEiUgil-dbRCORbWUA {'project_path': 'ydb/apps/ydbd', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [j-uqixGAAtQ-xB1Ut7_Nkg {'project_path': 'ydb/apps/ydb', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/tests/datashard/dml, name=py3test] (uid=rnd-us0s6kwxmwanpwo5): Infrastructure error - contact devtools@ for details. Suite build deps: [N3wfuEiUgil-dbRCORbWUA {'project_path': 'ydb/apps/ydbd', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [PgHCMk5WDQVgay0DqJYKlg {'project_path': 'ydb/tests/datashard/dml', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [j-uqixGAAtQ-xB1Ut7_Nkg {'project_path': 'ydb/apps/ydb', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/tests/datashard/dump_restore, name=py3test] (uid=rnd-j24je3toy91hf0uk): Infrastructure error - contact devtools@ for details. Suite build deps: [N3wfuEiUgil-dbRCORbWUA {'project_path': 'ydb/apps/ydbd', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [g_ICgH-RDb3psFGDgNHwow {'project_path': 'ydb/tests/datashard/dump_restore', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [j-uqixGAAtQ-xB1Ut7_Nkg {'project_path': 'ydb/apps/ydb', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/tests/datashard/parametrized_queries, name=py3test] (uid=rnd-rrzue70629t699g5): Infrastructure error - contact devtools@ for details. Suite build deps: [HT5Ims0WDa7mRdR43K1t5A {'project_path': 'ydb/tests/datashard/parametrized_queries', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [N3wfuEiUgil-dbRCORbWUA {'project_path': 'ydb/apps/ydbd', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [j-uqixGAAtQ-xB1Ut7_Nkg {'project_path': 'ydb/apps/ydb', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/tests/datashard/partitioning, name=py3test] (uid=rnd-1xt6uouqnppi27g7): Infrastructure error - contact devtools@ for details. Suite build deps: [N3wfuEiUgil-dbRCORbWUA {'project_path': 'ydb/apps/ydbd', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [j-uqixGAAtQ-xB1Ut7_Nkg {'project_path': 'ydb/apps/ydb', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}], [yPJkm5SFCpqFYg66vKb8iA {'project_path': 'ydb/tests/datashard/partitioning', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}] Warn: Test [project=ydb/tests/datashard/s3, name=py3test] (uid=rnd-ek9mqiwx4vmuj9je): Infrastructure error - contact devtools@ for details. Suite build deps: [N3wfuEiUgil-dbRCORbWUA {'project_path': 'ydb/apps/ydbd', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [PBslpMFkWX6JG4o4gr4wJA {'project_path': 'contrib/python/moto/bin', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [guIwuJOad3W9i-WWFPiA3g {'project_path': 'ydb/tests/datashard/s3', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [j-uqixGAAtQ-xB1Ut7_Nkg {'project_path': 'ydb/apps/ydb', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/tests/datashard/secondary_index, name=py3test] (uid=rnd-h7r3rq7liopdtf6i): Infrastructure error - contact devtools@ for details. Suite build deps: [N3wfuEiUgil-dbRCORbWUA {'project_path': 'ydb/apps/ydbd', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [UN3Z4X32s-sgtvUlwUUgpg {'project_path': 'ydb/tests/datashard/secondary_index', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [j-uqixGAAtQ-xB1Ut7_Nkg {'project_path': 'ydb/apps/ydb', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/tests/datashard/select, name=py3test] (uid=rnd-8exn9gsvip6zfkzz): Infrastructure error - contact devtools@ for details. Suite build deps: [N3wfuEiUgil-dbRCORbWUA {'project_path': 'ydb/apps/ydbd', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [j-uqixGAAtQ-xB1Ut7_Nkg {'project_path': 'ydb/apps/ydb', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}], [uaJybSHWpfKsTpsIDli7rQ {'project_path': 'ydb/tests/datashard/select', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}] Warn: Test [project=ydb/tests/datashard/split_merge, name=py3test] (uid=rnd-7z4xzrg9lc3u9yig): Infrastructure error - contact devtools@ for details. Suite build deps: [N3wfuEiUgil-dbRCORbWUA {'project_path': 'ydb/apps/ydbd', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [QF5DffHKQ00sJEoggVMtTw {'project_path': 'ydb/tests/datashard/split_merge', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [j-uqixGAAtQ-xB1Ut7_Nkg {'project_path': 'ydb/apps/ydb', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/tests/datashard/ttl, name=py3test] (uid=rnd-jvzwzf6aiur1pnco): Infrastructure error - contact devtools@ for details. Suite build deps: [N3wfuEiUgil-dbRCORbWUA {'project_path': 'ydb/apps/ydbd', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [Y6yKNwK9PavSqjxLMGIXEg {'project_path': 'ydb/tests/datashard/ttl', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [j-uqixGAAtQ-xB1Ut7_Nkg {'project_path': 'ydb/apps/ydb', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/tests/datashard/vector_index/medium, name=py3test] (uid=rnd-c83m3rflllz0xsfu): Infrastructure error - contact devtools@ for details. Suite build deps: [N3wfuEiUgil-dbRCORbWUA {'project_path': 'ydb/apps/ydbd', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [j-uqixGAAtQ-xB1Ut7_Nkg {'project_path': 'ydb/apps/ydb', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}], [u1JyxZVpnF1534lS5K7VJQ {'project_path': 'ydb/tests/datashard/vector_index/medium', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}] Warn: Test [project=ydb/tests/example, name=py3test] (uid=rnd-19mushrfxvco2x87): Infrastructure error - contact devtools@ for details. Suite build deps: [N3wfuEiUgil-dbRCORbWUA {'project_path': 'ydb/apps/ydbd', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [lwWq6TJNS3N78mAwjI9Mmg {'project_path': 'ydb/tests/example', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/tests/fq/common, name=py3test] (uid=rnd-2vml41rq8es8o0be): Infrastructure error - contact devtools@ for details. Suite build deps: [N3wfuEiUgil-dbRCORbWUA {'project_path': 'ydb/apps/ydbd', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [RkhG7WuJXDr1zbK17a6x6g {'project_path': 'ydb/public/tools/ydb_recipe', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [WkgDriT61HCMF_TvwH89yQ {'project_path': 'ydb/tests/fq/common', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/tests/fq/http_api, name=py3test] (uid=rnd-r2qcz2fqmlsarj00): Infrastructure error - contact devtools@ for details. Suite build deps: [AmP9R1q2ZjYyHQRtlMqqyQ {'project_path': 'ydb/tests/fq/http_api', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [N3wfuEiUgil-dbRCORbWUA {'project_path': 'ydb/apps/ydbd', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [RkhG7WuJXDr1zbK17a6x6g {'project_path': 'ydb/public/tools/ydb_recipe', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/tests/fq/mem_alloc, name=py3test] (uid=rnd-ex84trkhjlupzrq1): Infrastructure error - contact devtools@ for details. Suite build deps: [9w8SIosdmEdYg_0llg3dzQ {'project_path': 'ydb/tests/tools/pq_read', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [N3wfuEiUgil-dbRCORbWUA {'project_path': 'ydb/apps/ydbd', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [RkhG7WuJXDr1zbK17a6x6g {'project_path': 'ydb/public/tools/ydb_recipe', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}], [vd9PbooFChU8NRzM3oXRWQ {'project_path': 'ydb/tests/fq/mem_alloc', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}] Warn: Test [project=ydb/tests/fq/multi_plane, name=py3test] (uid=rnd-ncnx7ecuw62klkow): Infrastructure error - contact devtools@ for details. Suite build deps: [80EKhPAiZLgqfmJLolavyQ {'project_path': 'ydb/tests/fq/multi_plane', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [9w8SIosdmEdYg_0llg3dzQ {'project_path': 'ydb/tests/tools/pq_read', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [N3wfuEiUgil-dbRCORbWUA {'project_path': 'ydb/apps/ydbd', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [RkhG7WuJXDr1zbK17a6x6g {'project_path': 'ydb/public/tools/ydb_recipe', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/tests/fq/plans, name=py3test] (uid=rnd-pbpdvuwnqmute7vd): Infrastructure error - contact devtools@ for details. Suite build deps: [EAwgvmj8cC-V5e0pFFAVKQ {'project_path': 'ydb/tests/fq/plans', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [N3wfuEiUgil-dbRCORbWUA {'project_path': 'ydb/apps/ydbd', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [PBslpMFkWX6JG4o4gr4wJA {'project_path': 'contrib/python/moto/bin', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [RkhG7WuJXDr1zbK17a6x6g {'project_path': 'ydb/public/tools/ydb_recipe', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/tests/fq/restarts, name=py3test] (uid=rnd-mgiohxbekv14j23m): Infrastructure error - contact devtools@ for details. Suite build deps: [N3wfuEiUgil-dbRCORbWUA {'project_path': 'ydb/apps/ydbd', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [PBslpMFkWX6JG4o4gr4wJA {'project_path': 'contrib/python/moto/bin', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [RkhG7WuJXDr1zbK17a6x6g {'project_path': 'ydb/public/tools/ydb_recipe', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [scPYvT-oTX2h4T42fGg7Xg {'project_path': 'ydb/tests/fq/restarts', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/tests/fq/s3, name=py3test] (uid=rnd-nurkzcqv0ag4l9my): Infrastructure error - contact devtools@ for details. Suite build deps: [8eon1HDZ-btTtkjtQqnL5g {'project_path': 'ydb/tests/fq/s3', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [9w8SIosdmEdYg_0llg3dzQ {'project_path': 'ydb/tests/tools/pq_read', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [N3wfuEiUgil-dbRCORbWUA {'project_path': 'ydb/apps/ydbd', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [PBslpMFkWX6JG4o4gr4wJA {'project_path': 'contrib/python/moto/bin', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [RkhG7WuJXDr1zbK17a6x6g {'project_path': 'ydb/public/tools/ydb_recipe', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/tests/fq/solomon, name=py3test] (uid=rnd-e22xyojctn8ozzg2): Infrastructure error - contact devtools@ for details. Suite build deps: [-oa6qLc0UWliscp4Bygi8A {'project_path': 'ydb/tests/tools/kqprun', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [3s3JWLpIFxDCd8RLTz88dw {'project_path': 'yql/essentials/udfs/test/test_import', 'platform': 'default-linux-x86_64-relwithdebinfo-pic', 'tags': ['default-linux-x86_64', 'relwithdebinfo', 'pic']}], [47L8WBzgkkJikv5HXYyLRw {'project_path': 'ydb/tests/fq/solomon', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [AqHo-8SJeJJvYzvMzUf4fw {'project_path': 'ydb/library/yql/tools/solomon_emulator/bin', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}], [tnzldir5PgQBy06P5L_gdg {'project_path': 'ydb/library/yql/tools/solomon_emulator/recipe', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}] Warn: Test [project=ydb/tests/fq/streaming, name=py3test] (uid=rnd-sbqlxc74u8erfnyz): Infrastructure error - contact devtools@ for details. Suite build deps: [9w8SIosdmEdYg_0llg3dzQ {'project_path': 'ydb/tests/tools/pq_read', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [N3wfuEiUgil-dbRCORbWUA {'project_path': 'ydb/apps/ydbd', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [RkhG7WuJXDr1zbK17a6x6g {'project_path': 'ydb/public/tools/ydb_recipe', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [cgTaSZcQ-pmpIpOqbreTjw {'project_path': 'ydb/tests/fq/streaming', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [j-uqixGAAtQ-xB1Ut7_Nkg {'project_path': 'ydb/apps/ydb', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/tests/fq/streaming_optimize, name=py3test] (uid=rnd-2cuu7qes6axlri5b): Infrastructure error - contact devtools@ for details. Suite build deps: [7zAS-SOnP7yW-m0yXTooHw {'project_path': 'ydb/tests/tools/fqrun', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [AqHo-8SJeJJvYzvMzUf4fw {'project_path': 'ydb/library/yql/tools/solomon_emulator/bin', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [VY2HtEC5_gANfiTekPfyBg {'project_path': 'yql/essentials/tools/astdiff', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [WCV7s7e1XRGC3Sf5Fvy9yA {'project_path': 'yql/essentials/tools/sql2yql', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [XvHME7g29u0zeXwTHvjmPw {'project_path': 'ydb/tests/fq/streaming_optimize', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [lBCQX0PVpAnlcC0t65UL-w {'project_path': 'yql/essentials/tests/common/test_framework/udfs_deps', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}], [tnzldir5PgQBy06P5L_gdg {'project_path': 'ydb/library/yql/tools/solomon_emulator/recipe', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}] Warn: Test [project=ydb/tests/fq/yds, name=py3test] (uid=rnd-cgolqo1wywhny13j): Infrastructure error - contact devtools@ for details. Suite build deps: [9w8SIosdmEdYg_0llg3dzQ {'project_path': 'ydb/tests/tools/pq_read', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [N3wfuEiUgil-dbRCORbWUA {'project_path': 'ydb/apps/ydbd', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [RkhG7WuJXDr1zbK17a6x6g {'project_path': 'ydb/public/tools/ydb_recipe', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [lH8UONP_kSZg2f6QHRU5uw {'project_path': 'ydb/tests/fq/yds', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/tests/fq/yt/kqp_yt_file/part0, name=py3test] (uid=rnd-0h1lfbuivu5j3vud): Infrastructure error - contact devtools@ for details. Suite build deps: [-oa6qLc0UWliscp4Bygi8A {'project_path': 'ydb/tests/tools/kqprun', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [3s3JWLpIFxDCd8RLTz88dw {'project_path': 'yql/essentials/udfs/test/test_import', 'platform': 'default-linux-x86_64-relwithdebinfo-pic', 'tags': ['default-linux-x86_64', 'relwithdebinfo', 'pic']}], [KvB0wWryVi4R9v4YIrLLtQ {'project_path': 'ydb/tests/fq/yt/kqp_yt_file/part0', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [lBCQX0PVpAnlcC0t65UL-w {'project_path': 'yql/essentials/tests/common/test_framework/udfs_deps', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/tests/fq/yt/kqp_yt_file/part1, name=py3test] (uid=rnd-wehd20y6u2mlhknh): Infrastructure error - contact devtools@ for details. Suite build deps: [-oa6qLc0UWliscp4Bygi8A {'project_path': 'ydb/tests/tools/kqprun', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [3s3JWLpIFxDCd8RLTz88dw {'project_path': 'yql/essentials/udfs/test/test_import', 'platform': 'default-linux-x86_64-relwithdebinfo-pic', 'tags': ['default-linux-x86_64', 'relwithdebinfo', 'pic']}], [M_ohdGjD4ax3aQoU7WFb6g {'project_path': 'ydb/tests/fq/yt/kqp_yt_file/part1', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [lBCQX0PVpAnlcC0t65UL-w {'project_path': 'yql/essentials/tests/common/test_framework/udfs_deps', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/tests/fq/yt/kqp_yt_file/part10, name=py3test] (uid=rnd-h9z1c1iuom14z8l9): Infrastructure error - contact devtools@ for details. Suite build deps: [-oa6qLc0UWliscp4Bygi8A {'project_path': 'ydb/tests/tools/kqprun', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [3s3JWLpIFxDCd8RLTz88dw {'project_path': 'yql/essentials/udfs/test/test_import', 'platform': 'default-linux-x86_64-relwithdebinfo-pic', 'tags': ['default-linux-x86_64', 'relwithdebinfo', 'pic']}], [NRTPWWQ9uXgqpJ8orTx9jg {'project_path': 'ydb/tests/fq/yt/kqp_yt_file/part10', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [lBCQX0PVpAnlcC0t65UL-w {'project_path': 'yql/essentials/tests/common/test_framework/udfs_deps', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/tests/fq/yt/kqp_yt_file/part11, name=py3test] (uid=rnd-surlkb1mkl8epexa): Infrastructure error - contact devtools@ for details. Suite build deps: [-oa6qLc0UWliscp4Bygi8A {'project_path': 'ydb/tests/tools/kqprun', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [3s3JWLpIFxDCd8RLTz88dw {'project_path': 'yql/essentials/udfs/test/test_import', 'platform': 'default-linux-x86_64-relwithdebinfo-pic', 'tags': ['default-linux-x86_64', 'relwithdebinfo', 'pic']}], [kQy0Q54LSu2UDILJ3T_myg {'project_path': 'ydb/tests/fq/yt/kqp_yt_file/part11', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [lBCQX0PVpAnlcC0t65UL-w {'project_path': 'yql/essentials/tests/common/test_framework/udfs_deps', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/tests/fq/yt/kqp_yt_file/part12, name=py3test] (uid=rnd-u5le88qqxcfdpy4b): Infrastructure error - contact devtools@ for details. Suite build deps: [-oa6qLc0UWliscp4Bygi8A {'project_path': 'ydb/tests/tools/kqprun', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [2G00NM1F1n-2POpKbxeilg {'project_path': 'ydb/tests/fq/yt/kqp_yt_file/part12', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [3s3JWLpIFxDCd8RLTz88dw {'project_path': 'yql/essentials/udfs/test/test_import', 'platform': 'default-linux-x86_64-relwithdebinfo-pic', 'tags': ['default-linux-x86_64', 'relwithdebinfo', 'pic']}], [lBCQX0PVpAnlcC0t65UL-w {'project_path': 'yql/essentials/tests/common/test_framework/udfs_deps', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/tests/fq/yt/kqp_yt_file/part13, name=py3test] (uid=rnd-fmsgp393l11fooxm): Infrastructure error - contact devtools@ for details. Suite build deps: [-oa6qLc0UWliscp4Bygi8A {'project_path': 'ydb/tests/tools/kqprun', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [3s3JWLpIFxDCd8RLTz88dw {'project_path': 'yql/essentials/udfs/test/test_import', 'platform': 'default-linux-x86_64-relwithdebinfo-pic', 'tags': ['default-linux-x86_64', 'relwithdebinfo', 'pic']}], [gWuyy52rvCq-syaEgpnCsA {'project_path': 'ydb/tests/fq/yt/kqp_yt_file/part13', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [lBCQX0PVpAnlcC0t65UL-w {'project_path': 'yql/essentials/tests/common/test_framework/udfs_deps', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/tests/fq/yt/kqp_yt_file/part14, name=py3test] (uid=rnd-yu1zrvdyzs4ttbia): Infrastructure error - contact devtools@ for details. Suite build deps: [-oa6qLc0UWliscp4Bygi8A {'project_path': 'ydb/tests/tools/kqprun', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [3s3JWLpIFxDCd8RLTz88dw {'project_path': 'yql/essentials/udfs/test/test_import', 'platform': 'default-linux-x86_64-relwithdebinfo-pic', 'tags': ['default-linux-x86_64', 'relwithdebinfo', 'pic']}], [KvH9qEW4v6X0rvmb_n17xQ {'project_path': 'ydb/tests/fq/yt/kqp_yt_file/part14', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [lBCQX0PVpAnlcC0t65UL-w {'project_path': 'yql/essentials/tests/common/test_framework/udfs_deps', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/tests/fq/yt/kqp_yt_file/part15, name=py3test] (uid=rnd-o0tim20z0ioo6w5q): Infrastructure error - contact devtools@ for details. Suite build deps: [-oa6qLc0UWliscp4Bygi8A {'project_path': 'ydb/tests/tools/kqprun', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [3s3JWLpIFxDCd8RLTz88dw {'project_path': 'yql/essentials/udfs/test/test_import', 'platform': 'default-linux-x86_64-relwithdebinfo-pic', 'tags': ['default-linux-x86_64', 'relwithdebinfo', 'pic']}], [HNYyA0TSLNYOw3xmhmICYg {'project_path': 'ydb/tests/fq/yt/kqp_yt_file/part15', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [lBCQX0PVpAnlcC0t65UL-w {'project_path': 'yql/essentials/tests/common/test_framework/udfs_deps', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/tests/fq/yt/kqp_yt_file/part16, name=py3test] (uid=rnd-nyjdpohutcg4bh96): Infrastructure error - contact devtools@ for details. Suite build deps: [-oa6qLc0UWliscp4Bygi8A {'project_path': 'ydb/tests/tools/kqprun', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [07dVKO5sh7aY9Y3_A858FQ {'project_path': 'ydb/tests/fq/yt/kqp_yt_file/part16', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [3s3JWLpIFxDCd8RLTz88dw {'project_path': 'yql/essentials/udfs/test/test_import', 'platform': 'default-linux-x86_64-relwithdebinfo-pic', 'tags': ['default-linux-x86_64', 'relwithdebinfo', 'pic']}], [lBCQX0PVpAnlcC0t65UL-w {'project_path': 'yql/essentials/tests/common/test_framework/udfs_deps', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/tests/fq/yt/kqp_yt_file/part17, name=py3test] (uid=rnd-uvpszqj01xjilu4y): Infrastructure error - contact devtools@ for details. Suite build deps: [-oa6qLc0UWliscp4Bygi8A {'project_path': 'ydb/tests/tools/kqprun', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [3s3JWLpIFxDCd8RLTz88dw {'project_path': 'yql/essentials/udfs/test/test_import', 'platform': 'default-linux-x86_64-relwithdebinfo-pic', 'tags': ['default-linux-x86_64', 'relwithdebinfo', 'pic']}], [GTwN6d-ENXnCgb71eYIN5Q {'project_path': 'ydb/tests/fq/yt/kqp_yt_file/part17', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [lBCQX0PVpAnlcC0t65UL-w {'project_path': 'yql/essentials/tests/common/test_framework/udfs_deps', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/tests/fq/yt/kqp_yt_file/part18, name=py3test] (uid=rnd-ury7gwrhvuchnp95): Infrastructure error - contact devtools@ for details. Suite build deps: [-oa6qLc0UWliscp4Bygi8A {'project_path': 'ydb/tests/tools/kqprun', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [3s3JWLpIFxDCd8RLTz88dw {'project_path': 'yql/essentials/udfs/test/test_import', 'platform': 'default-linux-x86_64-relwithdebinfo-pic', 'tags': ['default-linux-x86_64', 'relwithdebinfo', 'pic']}], [GnNjxFmOGT5jbSg1hfHr3g {'project_path': 'ydb/tests/fq/yt/kqp_yt_file/part18', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [lBCQX0PVpAnlcC0t65UL-w {'project_path': 'yql/essentials/tests/common/test_framework/udfs_deps', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/tests/fq/yt/kqp_yt_file/part19, name=py3test] (uid=rnd-y39na4to2it8egoq): Infrastructure error - contact devtools@ for details. Suite build deps: [-oa6qLc0UWliscp4Bygi8A {'project_path': 'ydb/tests/tools/kqprun', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [3s3JWLpIFxDCd8RLTz88dw {'project_path': 'yql/essentials/udfs/test/test_import', 'platform': 'default-linux-x86_64-relwithdebinfo-pic', 'tags': ['default-linux-x86_64', 'relwithdebinfo', 'pic']}], [hXGXWyZHexihs7dasKqtbw {'project_path': 'ydb/tests/fq/yt/kqp_yt_file/part19', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [lBCQX0PVpAnlcC0t65UL-w {'project_path': 'yql/essentials/tests/common/test_framework/udfs_deps', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/tests/fq/yt/kqp_yt_file/part2, name=py3test] (uid=rnd-5vh467mbda3xejpd): Infrastructure error - contact devtools@ for details. Suite build deps: [-oa6qLc0UWliscp4Bygi8A {'project_path': 'ydb/tests/tools/kqprun', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [-pDeMZzNM1c2R6S07qExUQ {'project_path': 'ydb/tests/fq/yt/kqp_yt_file/part2', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [3s3JWLpIFxDCd8RLTz88dw {'project_path': 'yql/essentials/udfs/test/test_import', 'platform': 'default-linux-x86_64-relwithdebinfo-pic', 'tags': ['default-linux-x86_64', 'relwithdebinfo', 'pic']}], [lBCQX0PVpAnlcC0t65UL-w {'project_path': 'yql/essentials/tests/common/test_framework/udfs_deps', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/tests/fq/yt/kqp_yt_file/part3, name=py3test] (uid=rnd-1mogrwgakxeuxzmy): Infrastructure error - contact devtools@ for details. Suite build deps: [-oa6qLc0UWliscp4Bygi8A {'project_path': 'ydb/tests/tools/kqprun', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [3s3JWLpIFxDCd8RLTz88dw {'project_path': 'yql/essentials/udfs/test/test_import', 'platform': 'default-linux-x86_64-relwithdebinfo-pic', 'tags': ['default-linux-x86_64', 'relwithdebinfo', 'pic']}], [Jgk6dV6zWR3bw9CBpTnxxA {'project_path': 'ydb/tests/fq/yt/kqp_yt_file/part3', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [lBCQX0PVpAnlcC0t65UL-w {'project_path': 'yql/essentials/tests/common/test_framework/udfs_deps', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/tests/fq/yt/kqp_yt_file/part4, name=py3test] (uid=rnd-cykp15n1toti9k8z): Infrastructure error - contact devtools@ for details. Suite build deps: [-oa6qLc0UWliscp4Bygi8A {'project_path': 'ydb/tests/tools/kqprun', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [3s3JWLpIFxDCd8RLTz88dw {'project_path': 'yql/essentials/udfs/test/test_import', 'platform': 'default-linux-x86_64-relwithdebinfo-pic', 'tags': ['default-linux-x86_64', 'relwithdebinfo', 'pic']}], [8V2itxcbZPkHSca-g5WYJQ {'project_path': 'ydb/tests/fq/yt/kqp_yt_file/part4', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [lBCQX0PVpAnlcC0t65UL-w {'project_path': 'yql/essentials/tests/common/test_framework/udfs_deps', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/tests/fq/yt/kqp_yt_file/part5, name=py3test] (uid=rnd-5aaegb6cvwb1ttb1): Infrastructure error - contact devtools@ for details. Suite build deps: [-oa6qLc0UWliscp4Bygi8A {'project_path': 'ydb/tests/tools/kqprun', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [3s3JWLpIFxDCd8RLTz88dw {'project_path': 'yql/essentials/udfs/test/test_import', 'platform': 'default-linux-x86_64-relwithdebinfo-pic', 'tags': ['default-linux-x86_64', 'relwithdebinfo', 'pic']}], [lBCQX0PVpAnlcC0t65UL-w {'project_path': 'yql/essentials/tests/common/test_framework/udfs_deps', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}], [z0wDSPbuWcqXK-E8m7BBog {'project_path': 'ydb/tests/fq/yt/kqp_yt_file/part5', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}] Warn: Test [project=ydb/tests/fq/yt/kqp_yt_file/part6, name=py3test] (uid=rnd-djuhtzsf8cyz6qtm): Infrastructure error - contact devtools@ for details. Suite build deps: [-oa6qLc0UWliscp4Bygi8A {'project_path': 'ydb/tests/tools/kqprun', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [3s3JWLpIFxDCd8RLTz88dw {'project_path': 'yql/essentials/udfs/test/test_import', 'platform': 'default-linux-x86_64-relwithdebinfo-pic', 'tags': ['default-linux-x86_64', 'relwithdebinfo', 'pic']}], [lBCQX0PVpAnlcC0t65UL-w {'project_path': 'yql/essentials/tests/common/test_framework/udfs_deps', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}], [vZGQjbcwHPg3bwqEkWxFNg {'project_path': 'ydb/tests/fq/yt/kqp_yt_file/part6', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}] Warn: Test [project=ydb/tests/fq/yt/kqp_yt_file/part7, name=py3test] (uid=rnd-xtniwl3p4dvov6w0): Infrastructure error - contact devtools@ for details. Suite build deps: [-oa6qLc0UWliscp4Bygi8A {'project_path': 'ydb/tests/tools/kqprun', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [3s3JWLpIFxDCd8RLTz88dw {'project_path': 'yql/essentials/udfs/test/test_import', 'platform': 'default-linux-x86_64-relwithdebinfo-pic', 'tags': ['default-linux-x86_64', 'relwithdebinfo', 'pic']}], [bcbKv8EE_9N7UUM1zsRYUQ {'project_path': 'ydb/tests/fq/yt/kqp_yt_file/part7', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [lBCQX0PVpAnlcC0t65UL-w {'project_path': 'yql/essentials/tests/common/test_framework/udfs_deps', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/tests/fq/yt/kqp_yt_file/part8, name=py3test] (uid=rnd-jspkr9iai3v4wh69): Infrastructure error - contact devtools@ for details. Suite build deps: [-oa6qLc0UWliscp4Bygi8A {'project_path': 'ydb/tests/tools/kqprun', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [3s3JWLpIFxDCd8RLTz88dw {'project_path': 'yql/essentials/udfs/test/test_import', 'platform': 'default-linux-x86_64-relwithdebinfo-pic', 'tags': ['default-linux-x86_64', 'relwithdebinfo', 'pic']}], [U8YsV8x93shMCN6IneZYOg {'project_path': 'ydb/tests/fq/yt/kqp_yt_file/part8', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [lBCQX0PVpAnlcC0t65UL-w {'project_path': 'yql/essentials/tests/common/test_framework/udfs_deps', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/tests/fq/yt/kqp_yt_file/part9, name=py3test] (uid=rnd-o0hzb45knmnxkx2w): Infrastructure error - contact devtools@ for details. Suite build deps: [-oa6qLc0UWliscp4Bygi8A {'project_path': 'ydb/tests/tools/kqprun', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [3s3JWLpIFxDCd8RLTz88dw {'project_path': 'yql/essentials/udfs/test/test_import', 'platform': 'default-linux-x86_64-relwithdebinfo-pic', 'tags': ['default-linux-x86_64', 'relwithdebinfo', 'pic']}], [BPtiDP_04PQYlXT1ngiG2g {'project_path': 'ydb/tests/fq/yt/kqp_yt_file/part9', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [lBCQX0PVpAnlcC0t65UL-w {'project_path': 'yql/essentials/tests/common/test_framework/udfs_deps', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/tests/fq/yt/kqp_yt_import, name=py3test] (uid=rnd-bx7nxeqf2si0317k): Infrastructure error - contact devtools@ for details. Suite build deps: [-oa6qLc0UWliscp4Bygi8A {'project_path': 'ydb/tests/tools/kqprun', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}], [ycRZZrSySRso76goAOtbPQ {'project_path': 'ydb/tests/fq/yt/kqp_yt_import', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}] Warn: Test [project=ydb/tests/functional/api, name=py3test] (uid=rnd-mjb2v7o6mjftcho1): Infrastructure error - contact devtools@ for details. Suite build deps: [N3wfuEiUgil-dbRCORbWUA {'project_path': 'ydb/apps/ydbd', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [im_0wP_gQyog5zvGMydWGQ {'project_path': 'ydb/tests/functional/api', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/tests/functional/audit, name=py3test] (uid=rnd-vk2eqgmi5ihs796e): Infrastructure error - contact devtools@ for details. Suite build deps: [HL5sUBiPiTSqA9r3Q7NRuA {'project_path': 'ydb/tests/functional/audit', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [N3wfuEiUgil-dbRCORbWUA {'project_path': 'ydb/apps/ydbd', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [aLkbkLGwdI2ch9zTXTFP2A {'project_path': 'ydb/apps/dstool', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/tests/functional/autoconfig, name=py3test] (uid=rnd-5wgux9xuowjnul9o): Infrastructure error - contact devtools@ for details. Suite build deps: [N3wfuEiUgil-dbRCORbWUA {'project_path': 'ydb/apps/ydbd', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [Q51LidrcFfjxEoE6kAEOgA {'project_path': 'ydb/tests/functional/autoconfig', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/tests/functional/blobstorage, name=py3test] (uid=rnd-x73jtoaqb081t7sa): Infrastructure error - contact devtools@ for details. Suite build deps: [LJ0GXCjED_THn5B39AoLlQ {'project_path': 'ydb/tests/functional/blobstorage', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [N3wfuEiUgil-dbRCORbWUA {'project_path': 'ydb/apps/ydbd', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/tests/functional/canonical, name=py3test] (uid=rnd-pz2fj1rftyclb5rq): Infrastructure error - contact devtools@ for details. Suite build deps: [N3wfuEiUgil-dbRCORbWUA {'project_path': 'ydb/apps/ydbd', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}], [zTlnVzmRDBK0LsKh6K902w {'project_path': 'ydb/tests/functional/canonical', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}] Warn: Test [project=ydb/tests/functional/clickbench, name=py3test] (uid=rnd-7n9ux7gfaz9mr3e8): Infrastructure error - contact devtools@ for details. Suite build deps: [DwsICOZem2jlEmXycxVtCA {'project_path': 'ydb/tests/functional/clickbench', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [N3wfuEiUgil-dbRCORbWUA {'project_path': 'ydb/apps/ydbd', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [RkhG7WuJXDr1zbK17a6x6g {'project_path': 'ydb/public/tools/ydb_recipe', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [j-uqixGAAtQ-xB1Ut7_Nkg {'project_path': 'ydb/apps/ydb', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/tests/functional/cms, name=py3test] (uid=rnd-3vrqtpss2cybol82): Infrastructure error - contact devtools@ for details. Suite build deps: [3X_4aWop_9xlhZeq__pdjA {'project_path': 'ydb/tests/functional/cms', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [N3wfuEiUgil-dbRCORbWUA {'project_path': 'ydb/apps/ydbd', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/tests/functional/encryption, name=py3test] (uid=rnd-ba37xutl4rjscjbb): Infrastructure error - contact devtools@ for details. Suite build deps: [KHW-X2uVrzIunn8TBs-A5w {'project_path': 'ydb/tests/functional/encryption', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [N3wfuEiUgil-dbRCORbWUA {'project_path': 'ydb/apps/ydbd', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/tests/functional/hive, name=py3test] (uid=rnd-q8ujl5byy5k4kerj): Infrastructure error - contact devtools@ for details. Suite build deps: [N3wfuEiUgil-dbRCORbWUA {'project_path': 'ydb/apps/ydbd', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [nD1IDMU1vzAHhreuCwBJAg {'project_path': 'ydb/tests/functional/hive', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/tests/functional/kqp/plan2svg, name=py3test] (uid=rnd-fh583c4vrmfrbyku): Infrastructure error - contact devtools@ for details. Suite build deps: [-oa6qLc0UWliscp4Bygi8A {'project_path': 'ydb/tests/tools/kqprun', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [papuI5BL0Gg5HM63eorxBg {'project_path': 'ydb/tests/functional/kqp/plan2svg', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/tests/functional/limits, name=py3test] (uid=rnd-xynqa1q6tr84xs75): Infrastructure error - contact devtools@ for details. Suite build deps: [N3wfuEiUgil-dbRCORbWUA {'project_path': 'ydb/apps/ydbd', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [WQOb6VdWlSHMsI5RWglT4A {'project_path': 'ydb/tests/functional/limits', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/tests/functional/minidumps, name=py3test] (uid=rnd-twzbkqr8is7q3xg2): Infrastructure error - contact devtools@ for details. Suite build deps: [N3wfuEiUgil-dbRCORbWUA {'project_path': 'ydb/apps/ydbd', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [sxRcRBfTYgxW-lBThSOYCQ {'project_path': 'ydb/tests/functional/minidumps', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/tests/functional/postgresql, name=py3test] (uid=rnd-rf7hx9xo44g6gpts): Infrastructure error - contact devtools@ for details. Suite build deps: [CDj_hpCSXgjkNtlV_ewJIQ {'project_path': 'ydb/tests/functional/postgresql/psql', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [N3wfuEiUgil-dbRCORbWUA {'project_path': 'ydb/apps/ydbd', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [PLQu-tQpmIHqQm5uWrPXEA {'project_path': 'ydb/tests/functional/postgresql', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/tests/functional/query_cache, name=py3test] (uid=rnd-4usyodomnj88u95n): Infrastructure error - contact devtools@ for details. Suite build deps: [N3wfuEiUgil-dbRCORbWUA {'project_path': 'ydb/apps/ydbd', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [lK62UiUy8CZG-KLOLoM2GA {'project_path': 'ydb/tests/functional/query_cache', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/tests/functional/rename, name=py3test] (uid=rnd-fkn2nc2daumnjtno): Infrastructure error - contact devtools@ for details. Suite build deps: [Grc41KSRZBDh2daQFkGKSw {'project_path': 'ydb/tests/functional/rename', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [N3wfuEiUgil-dbRCORbWUA {'project_path': 'ydb/apps/ydbd', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/tests/functional/restarts, name=py3test] (uid=rnd-3znb64zgqfq5zip8): Infrastructure error - contact devtools@ for details. Suite build deps: [N3wfuEiUgil-dbRCORbWUA {'project_path': 'ydb/apps/ydbd', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [_HT3ybXkNdoh5teF9KbA9g {'project_path': 'ydb/tests/functional/restarts', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/tests/functional/scheme_shard, name=py3test] (uid=rnd-9md5gzy5b9fu8o8q): Infrastructure error - contact devtools@ for details. Suite build deps: [N3wfuEiUgil-dbRCORbWUA {'project_path': 'ydb/apps/ydbd', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}], [ydPR3mQ44DIJsCCabBpvjw {'project_path': 'ydb/tests/functional/scheme_shard', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}] Warn: Test [project=ydb/tests/functional/scheme_tests, name=py3test] (uid=rnd-x5ehwp428kz3iot0): Infrastructure error - contact devtools@ for details. Suite build deps: [N3wfuEiUgil-dbRCORbWUA {'project_path': 'ydb/apps/ydbd', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}], [v3M7Q5p6SkkasbM9VA5Ujg {'project_path': 'ydb/tests/functional/scheme_tests', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}] Warn: Test [project=ydb/tests/functional/script_execution, name=py3test] (uid=rnd-0kmzcyrfndp8dq6n): Infrastructure error - contact devtools@ for details. Suite build deps: [Mo_KEEOAnKNdXUva_VuVRA {'project_path': 'ydb/tests/functional/script_execution', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [N3wfuEiUgil-dbRCORbWUA {'project_path': 'ydb/apps/ydbd', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/tests/functional/security, name=py3test] (uid=rnd-2waanrnvhbfewvvm): Infrastructure error - contact devtools@ for details. Suite build deps: [N3wfuEiUgil-dbRCORbWUA {'project_path': 'ydb/apps/ydbd', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [ZtxfI4fmr-8KroBw7np3RA {'project_path': 'ydb/tests/functional/security', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/tests/functional/serializable, name=py3test] (uid=rnd-essz4izil0dy3tir): Infrastructure error - contact devtools@ for details. Suite build deps: [MeaX7qsLtlqiOQc0UFJtWw {'project_path': 'ydb/tests/functional/serializable', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [N3wfuEiUgil-dbRCORbWUA {'project_path': 'ydb/apps/ydbd', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [RkhG7WuJXDr1zbK17a6x6g {'project_path': 'ydb/public/tools/ydb_recipe', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/tests/functional/serverless, name=py3test] (uid=rnd-ldm0dpab6yfv707i): Infrastructure error - contact devtools@ for details. Suite build deps: [9QPZYfBspWRnfPsB-vXB0g {'project_path': 'ydb/tests/functional/serverless', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [N3wfuEiUgil-dbRCORbWUA {'project_path': 'ydb/apps/ydbd', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/tests/functional/sqs/cloud, name=py3test] (uid=rnd-vgkaes6xq84tlx7w): Infrastructure error - contact devtools@ for details. Suite build deps: [N3wfuEiUgil-dbRCORbWUA {'project_path': 'ydb/apps/ydbd', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [npRDw4mQghXmQ2PiWTrWcQ {'project_path': 'ydb/tests/functional/sqs/cloud', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/tests/functional/sqs/common, name=py3test] (uid=rnd-pzkaamke376eyus8): Infrastructure error - contact devtools@ for details. Suite build deps: [A-0HFt28yTTCtOzjPQ3LpA {'project_path': 'ydb/tests/functional/sqs/common', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [N3wfuEiUgil-dbRCORbWUA {'project_path': 'ydb/apps/ydbd', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/tests/functional/sqs/large, name=py3test] (uid=rnd-y8bc6bw0wuy4dnsm): Infrastructure error - contact devtools@ for details. Suite build deps: [Jb70WXWVJfdxyq-arJJ5dw {'project_path': 'ydb/tests/functional/sqs/large', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [N3wfuEiUgil-dbRCORbWUA {'project_path': 'ydb/apps/ydbd', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/tests/functional/sqs/merge_split_common_table/fifo, name=py3test] (uid=rnd-mfumrvqkxir2eiyl): Infrastructure error - contact devtools@ for details. Suite build deps: [N3wfuEiUgil-dbRCORbWUA {'project_path': 'ydb/apps/ydbd', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [TajYwI71J6fHSMD5gX1rtQ {'project_path': 'ydb/tests/functional/sqs/merge_split_common_table/fifo', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/tests/functional/sqs/merge_split_common_table/std, name=py3test] (uid=rnd-tft2jetajyqwgssp): Infrastructure error - contact devtools@ for details. Suite build deps: [E6uEnFwRqUA6dIJVnbAUTw {'project_path': 'ydb/tests/functional/sqs/merge_split_common_table/std', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [N3wfuEiUgil-dbRCORbWUA {'project_path': 'ydb/apps/ydbd', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/tests/functional/sqs/messaging, name=py3test] (uid=rnd-ao9cu8e065y405b9): Infrastructure error - contact devtools@ for details. Suite build deps: [GjA2gBupTIlWkZCJg2DFXg {'project_path': 'ydb/tests/functional/sqs/messaging', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [N3wfuEiUgil-dbRCORbWUA {'project_path': 'ydb/apps/ydbd', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/tests/functional/sqs/multinode, name=py3test] (uid=rnd-vxsm9mr6rbboqqno): Infrastructure error - contact devtools@ for details. Suite build deps: [6AzjpakulRNIvAeyOI0x7Q {'project_path': 'ydb/tests/functional/sqs/multinode', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [N3wfuEiUgil-dbRCORbWUA {'project_path': 'ydb/apps/ydbd', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/tests/functional/sqs/with_quotas, name=py3test] (uid=rnd-hqgc8k5cwfm7z9la): Infrastructure error - contact devtools@ for details. Suite build deps: [BSeuSZEo6i_o_PHcGBxZBg {'project_path': 'ydb/tests/functional/sqs/with_quotas', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [N3wfuEiUgil-dbRCORbWUA {'project_path': 'ydb/apps/ydbd', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/tests/functional/statistics, name=py3test] (uid=rnd-1albt1ha7oaszt2z): Infrastructure error - contact devtools@ for details. Suite build deps: [N3wfuEiUgil-dbRCORbWUA {'project_path': 'ydb/apps/ydbd', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [Q0fyPzZRKd8AOf3PxF3cMA {'project_path': 'ydb/tests/functional/statistics', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/tests/functional/suite_tests, name=py3test] (uid=rnd-djcadhgz3vght7fy): Infrastructure error - contact devtools@ for details. Suite build deps: [IhzAsKweF8WS35vxF_jCsQ {'project_path': 'ydb/tests/functional/suite_tests', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [N3wfuEiUgil-dbRCORbWUA {'project_path': 'ydb/apps/ydbd', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/tests/functional/tenants, name=py3test] (uid=rnd-3ertbbqork1b3r6r): Infrastructure error - contact devtools@ for details. Suite build deps: [MOLU2UhSCl_1hh7wm0gWZw {'project_path': 'ydb/tests/functional/tenants', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [N3wfuEiUgil-dbRCORbWUA {'project_path': 'ydb/apps/ydbd', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/tests/functional/tpc/medium, name=py3test] (uid=rnd-6xos6lyyov7qogim): Infrastructure error - contact devtools@ for details. Suite build deps: [Klgt4e1hSOwrr0Agi_1aIg {'project_path': 'ydb/tests/stress/oltp_workload', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [MWNTb0DJZ3Db0AxQmgdlwA {'project_path': 'ydb/tests/functional/tpc/medium', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [N3wfuEiUgil-dbRCORbWUA {'project_path': 'ydb/apps/ydbd', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [c5X-ZXks9Bgum8h-pHYZ0w {'project_path': 'ydb/tests/stress/simple_queue', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [j-uqixGAAtQ-xB1Ut7_Nkg {'project_path': 'ydb/apps/ydb', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/tests/functional/tpc/medium/tpch, name=py3test] (uid=rnd-pcanzt2nh8us21al): Infrastructure error - contact devtools@ for details. Suite build deps: [GO36W2XmzR_Kmd20m9v00A {'project_path': 'ydb/tests/functional/tpc/medium/tpch', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [Klgt4e1hSOwrr0Agi_1aIg {'project_path': 'ydb/tests/stress/oltp_workload', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [N3wfuEiUgil-dbRCORbWUA {'project_path': 'ydb/apps/ydbd', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [c5X-ZXks9Bgum8h-pHYZ0w {'project_path': 'ydb/tests/stress/simple_queue', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [j-uqixGAAtQ-xB1Ut7_Nkg {'project_path': 'ydb/apps/ydb', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/tests/functional/tpcc, name=py3test] (uid=rnd-t0hx2pyhg9lx35qw): Infrastructure error - contact devtools@ for details. Suite build deps: [N3wfuEiUgil-dbRCORbWUA {'project_path': 'ydb/apps/ydbd', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [RkhG7WuJXDr1zbK17a6x6g {'project_path': 'ydb/public/tools/ydb_recipe', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [U7kcVEsSE24IV79WBrBe5w {'project_path': 'ydb/tests/functional/tpcc', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [j-uqixGAAtQ-xB1Ut7_Nkg {'project_path': 'ydb/apps/ydb', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/tests/functional/ttl, name=py3test] (uid=rnd-78s0m7mizs86crbf): Infrastructure error - contact devtools@ for details. Suite build deps: [N3wfuEiUgil-dbRCORbWUA {'project_path': 'ydb/apps/ydbd', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [hbb7HF1k80L6R3x9eQeB2g {'project_path': 'ydb/tests/functional/ttl', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/tests/functional/wardens, name=py3test] (uid=rnd-fta4qsrjhpg8usuz): Infrastructure error - contact devtools@ for details. Suite build deps: [N3wfuEiUgil-dbRCORbWUA {'project_path': 'ydb/apps/ydbd', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [ea7gYS5uSNeGI7tDhZkSMA {'project_path': 'ydb/tests/functional/wardens', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/tests/functional/ydb_cli, name=py3test] (uid=rnd-bfafs5dvynod6rnv): Infrastructure error - contact devtools@ for details. Suite build deps: [N3wfuEiUgil-dbRCORbWUA {'project_path': 'ydb/apps/ydbd', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [beCT0pUSye70wN6RLyj-Yg {'project_path': 'ydb/tests/functional/ydb_cli', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [j-uqixGAAtQ-xB1Ut7_Nkg {'project_path': 'ydb/apps/ydb', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/tests/library/ut, name=py3test] (uid=rnd-nk1xwu4if0r86l34): Infrastructure error - contact devtools@ for details. Suite build deps: [8_y_hajYWx5OQhm2WxBpbA {'project_path': 'ydb/tests/library/ut', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [N3wfuEiUgil-dbRCORbWUA {'project_path': 'ydb/apps/ydbd', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/tests/olap, name=py3test] (uid=rnd-l7lfx29kya5nkpl7): Infrastructure error - contact devtools@ for details. Suite build deps: [N3wfuEiUgil-dbRCORbWUA {'project_path': 'ydb/apps/ydbd', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [j-uqixGAAtQ-xB1Ut7_Nkg {'project_path': 'ydb/apps/ydb', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [mfPkTABU6NKX9zEJ57znyw {'project_path': 'ydb/tests/olap', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/tests/olap/column_family/compression, name=py3test] (uid=rnd-h7m8f8kc1xqdla8l): Infrastructure error - contact devtools@ for details. Suite build deps: [Kt_15SOv0oJGbrSawHiEcA {'project_path': 'ydb/tests/olap/column_family/compression', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [N3wfuEiUgil-dbRCORbWUA {'project_path': 'ydb/apps/ydbd', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/tests/olap/data_quotas, name=py3test] (uid=rnd-zh1b5qv78dze4zdp): Infrastructure error - contact devtools@ for details. Suite build deps: [N3wfuEiUgil-dbRCORbWUA {'project_path': 'ydb/apps/ydbd', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [ea9H0VWTcBFsaNfkLDeR0w {'project_path': 'ydb/tests/olap/data_quotas', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [j-uqixGAAtQ-xB1Ut7_Nkg {'project_path': 'ydb/apps/ydb', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/tests/olap/delete, name=py3test] (uid=rnd-q9q1pp3g4i7m3le7): Infrastructure error - contact devtools@ for details. Suite build deps: [N3wfuEiUgil-dbRCORbWUA {'project_path': 'ydb/apps/ydbd', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [_2tsQ57TV1xFh7WRY3VE2w {'project_path': 'ydb/tests/olap/delete', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/tests/olap/oom, name=py3test] (uid=rnd-jcewd9henod4qvym): Infrastructure error - contact devtools@ for details. Suite build deps: [N3wfuEiUgil-dbRCORbWUA {'project_path': 'ydb/apps/ydbd', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}], [zVHX800X9O4pqkjL3iTuCA {'project_path': 'ydb/tests/olap/oom', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}] Warn: Test [project=ydb/tests/olap/s3_import, name=py3test] (uid=rnd-oiwkevnqm01ac7op): Infrastructure error - contact devtools@ for details. Suite build deps: [5E5Zm-obJy6m7FJ5rf4Vkg {'project_path': 'ydb/tests/olap/s3_import', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [N3wfuEiUgil-dbRCORbWUA {'project_path': 'ydb/apps/ydbd', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [PBslpMFkWX6JG4o4gr4wJA {'project_path': 'contrib/python/moto/bin', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [j-uqixGAAtQ-xB1Ut7_Nkg {'project_path': 'ydb/apps/ydb', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/tests/olap/scenario, name=py3test] (uid=rnd-2efb7qv9gv3a9cnb): Infrastructure error - contact devtools@ for details. Suite build deps: [2g69VCEFyaaNT7khUnGTog {'project_path': 'ydb/tests/olap/scenario', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [N3wfuEiUgil-dbRCORbWUA {'project_path': 'ydb/apps/ydbd', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/tests/olap/ttl_tiering, name=py3test] (uid=rnd-9ruahl27xlqfbg8r): Infrastructure error - contact devtools@ for details. Suite build deps: [N3wfuEiUgil-dbRCORbWUA {'project_path': 'ydb/apps/ydbd', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [PBslpMFkWX6JG4o4gr4wJA {'project_path': 'contrib/python/moto/bin', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [YO-RvKNsFgUB_Y77oZGqJQ {'project_path': 'ydb/tests/olap/ttl_tiering', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/tests/postgres_integrations/go-libpq, name=py3test] (uid=rnd-tqpcvt4x5kiz5htr): Infrastructure error - contact devtools@ for details. Suite build deps: [N3wfuEiUgil-dbRCORbWUA {'project_path': 'ydb/apps/ydbd', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [hfuXKVfGASlcANT199zZHw {'project_path': 'ydb/tests/postgres_integrations/go-libpq', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/tests/solomon/reading, name=py3test] (uid=rnd-o2a94a06dgaxm8tc): Infrastructure error - contact devtools@ for details. Suite build deps: [AqHo-8SJeJJvYzvMzUf4fw {'project_path': 'ydb/library/yql/tools/solomon_emulator/bin', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [N3wfuEiUgil-dbRCORbWUA {'project_path': 'ydb/apps/ydbd', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [RKfch0lrz383HLQVfsNWXA {'project_path': 'ydb/tests/solomon/reading', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}], [tnzldir5PgQBy06P5L_gdg {'project_path': 'ydb/library/yql/tools/solomon_emulator/recipe', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}] Warn: Test [project=ydb/tests/sql, name=py3test] (uid=rnd-nzpwt8mqz83rlf39): Infrastructure error - contact devtools@ for details. Suite build deps: [N3wfuEiUgil-dbRCORbWUA {'project_path': 'ydb/apps/ydbd', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [UymIl6-7mX85mCkUER5Aeg {'project_path': 'ydb/tests/sql', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [j-uqixGAAtQ-xB1Ut7_Nkg {'project_path': 'ydb/apps/ydb', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/tests/stress/cdc/tests, name=py3test] (uid=rnd-xuygs3pbva466iu9): Infrastructure error - contact devtools@ for details. Suite build deps: [N3wfuEiUgil-dbRCORbWUA {'project_path': 'ydb/apps/ydbd', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [bSBDjvXYkge2gpJqmRMEjQ {'project_path': 'ydb/tests/stress/cdc/tests', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [sDclqZdoPP6j9UWkhnAL4Q {'project_path': 'ydb/tests/stress/cdc', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/tests/stress/ctas/tests, name=py3test] (uid=rnd-npvzbfz1o4sgohx8): Infrastructure error - contact devtools@ for details. Suite build deps: [AetZSfNZ9CMILmh5FEuoRg {'project_path': 'ydb/tests/stress/ctas', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [N3wfuEiUgil-dbRCORbWUA {'project_path': 'ydb/apps/ydbd', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [iFwrj7e8-S-G52jioqHVnQ {'project_path': 'ydb/tests/stress/ctas/tests', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/tests/stress/kafka/tests, name=py3test] (uid=rnd-yg1gko5jqurrykcx): Infrastructure error - contact devtools@ for details. Suite build deps: [N3wfuEiUgil-dbRCORbWUA {'project_path': 'ydb/apps/ydbd', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [ZeFksnG3zpQa68GiwFowhA {'project_path': 'ydb/tests/stress/kafka/tests', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [ikbQYbWhERwPwNxWeJc_0A {'project_path': 'ydb/tests/stress/kafka', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [j-uqixGAAtQ-xB1Ut7_Nkg {'project_path': 'ydb/apps/ydb', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/tests/stress/kv/tests, name=py3test] (uid=rnd-nx9exem8jn8n9f60): Infrastructure error - contact devtools@ for details. Suite build deps: [L5_dPxUZuaQvMaa_YBK2fw {'project_path': 'ydb/tests/stress/kv', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [N3wfuEiUgil-dbRCORbWUA {'project_path': 'ydb/apps/ydbd', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [QMGF-kY3Et0hA9SuM2Fynw {'project_path': 'ydb/tests/stress/kv/tests', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [j-uqixGAAtQ-xB1Ut7_Nkg {'project_path': 'ydb/apps/ydb', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/tests/stress/log/tests, name=py3test] (uid=rnd-hjkg8ub5jakmnbmd): Infrastructure error - contact devtools@ for details. Suite build deps: [3jQfUN1v26zOkMZ69x4FlQ {'project_path': 'ydb/tests/stress/log', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [Ab0rMJiyARnZ6JrZAJmheA {'project_path': 'ydb/tests/stress/log/tests', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [N3wfuEiUgil-dbRCORbWUA {'project_path': 'ydb/apps/ydbd', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [j-uqixGAAtQ-xB1Ut7_Nkg {'project_path': 'ydb/apps/ydb', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/tests/stress/mixedpy/tests, name=py3test] (uid=rnd-bx7nf3h10iemzguh): Infrastructure error - contact devtools@ for details. Suite build deps: [5LQ5TI4a4QtTmgfqlZw80w {'project_path': 'ydb/tests/stress/mixedpy/tests', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [KC01rfqy1WlXdkxg82cjaA {'project_path': 'ydb/tests/stress/mixedpy', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [N3wfuEiUgil-dbRCORbWUA {'project_path': 'ydb/apps/ydbd', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [j-uqixGAAtQ-xB1Ut7_Nkg {'project_path': 'ydb/apps/ydb', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/tests/stress/node_broker/tests, name=py3test] (uid=rnd-wcxrbumsiyuifyh6): Infrastructure error - contact devtools@ for details. Suite build deps: [N3wfuEiUgil-dbRCORbWUA {'project_path': 'ydb/apps/ydbd', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [buu2lGedrbrv-cg3iduXWw {'project_path': 'ydb/tests/stress/node_broker/tests', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}], [z1AP_QLBfiua1S069grTmg {'project_path': 'ydb/tests/stress/node_broker', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}] Warn: Test [project=ydb/tests/stress/olap_workload/tests, name=py3test] (uid=rnd-e3x88m4s0040zvcp): Infrastructure error - contact devtools@ for details. Suite build deps: [1rzh3WqZWGj_x01E123SqQ {'project_path': 'ydb/tests/stress/olap_workload/tests', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [EdEbyk08XI49xwFMo65BKQ {'project_path': 'ydb/tests/stress/olap_workload', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [N3wfuEiUgil-dbRCORbWUA {'project_path': 'ydb/apps/ydbd', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/tests/stress/oltp_workload/tests, name=py3test] (uid=rnd-q39eo9e2ui7jvurs): Infrastructure error - contact devtools@ for details. Suite build deps: [N3wfuEiUgil-dbRCORbWUA {'project_path': 'ydb/apps/ydbd', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [h9wh5qE0vDGZd_cY4cb3Aw {'project_path': 'ydb/tests/stress/oltp_workload/tests', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/tests/stress/reconfig_state_storage_workload/tests, name=py3test] (uid=rnd-rstpaivrgyzt0cfi): Infrastructure error - contact devtools@ for details. Suite build deps: [KlOxmrsPOds-L2Cfw05nLA {'project_path': 'ydb/tests/stress/reconfig_state_storage_workload', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [N3wfuEiUgil-dbRCORbWUA {'project_path': 'ydb/apps/ydbd', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [cR5TVmKUFGLl41mv8wRu6A {'project_path': 'ydb/tests/stress/reconfig_state_storage_workload/tests', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [j-uqixGAAtQ-xB1Ut7_Nkg {'project_path': 'ydb/apps/ydb', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/tests/stress/s3_backups/tests, name=py3test] (uid=rnd-2ogh285njroeqekn): Infrastructure error - contact devtools@ for details. Suite build deps: [7EZZeYHdmBvGAgnMPGMniQ {'project_path': 'ydb/tests/stress/s3_backups/tests', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [LJ2t_qImD7Uey7cqPiAnNQ {'project_path': 'ydb/tests/tools/s3_recipe', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [N3wfuEiUgil-dbRCORbWUA {'project_path': 'ydb/apps/ydbd', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [PBslpMFkWX6JG4o4gr4wJA {'project_path': 'contrib/python/moto/bin', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}], [u4iQX4gRLubjamX2SB7hkw {'project_path': 'ydb/tests/stress/s3_backups', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}] Warn: Test [project=ydb/tests/stress/scheme_board/pile_promotion/tests, name=py3test] (uid=rnd-ajl54r4a1owc9eu7): Infrastructure error - contact devtools@ for details. Suite build deps: [-oGy1y7cJvhEvjF6QpbCqA {'project_path': 'ydb/tests/stress/scheme_board/pile_promotion', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [N3wfuEiUgil-dbRCORbWUA {'project_path': 'ydb/apps/ydbd', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [S9IVLNAunqYvZ3GrEK2ieg {'project_path': 'ydb/tests/stress/scheme_board/pile_promotion/tests', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [j-uqixGAAtQ-xB1Ut7_Nkg {'project_path': 'ydb/apps/ydb', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/tests/stress/show_create/view/tests, name=py3test] (uid=rnd-6xo6xb3gidpogmd8): Infrastructure error - contact devtools@ for details. Suite build deps: [5RvnpEFbCIiVFC7gtFnBmw {'project_path': 'ydb/tests/stress/show_create/view/tests', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [N3wfuEiUgil-dbRCORbWUA {'project_path': 'ydb/apps/ydbd', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [an659eRe9hftMpi2acAyKQ {'project_path': 'ydb/tests/stress/show_create/view', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [j-uqixGAAtQ-xB1Ut7_Nkg {'project_path': 'ydb/apps/ydb', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/tests/stress/simple_queue/tests, name=py3test] (uid=rnd-hniwjqt28p84rziv): Infrastructure error - contact devtools@ for details. Suite build deps: [N3wfuEiUgil-dbRCORbWUA {'project_path': 'ydb/apps/ydbd', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [SHtbvTw9gYtsm_YhF2YswA {'project_path': 'ydb/tests/stress/simple_queue/tests', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/tests/stress/topic/tests, name=py3test] (uid=rnd-iaiccarpe40fbr4e): Infrastructure error - contact devtools@ for details. Suite build deps: [-ZehxnvMT1V3FVYdVlICsQ {'project_path': 'ydb/tests/stress/topic/tests', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [8_7HrEMR6eCiWfBbutNH3A {'project_path': 'ydb/tests/stress/topic', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [N3wfuEiUgil-dbRCORbWUA {'project_path': 'ydb/apps/ydbd', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [j-uqixGAAtQ-xB1Ut7_Nkg {'project_path': 'ydb/apps/ydb', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/tests/stress/topic_kafka/tests, name=py3test] (uid=rnd-vr15cko0rpic7kme): Infrastructure error - contact devtools@ for details. Suite build deps: [HDnAnOkjtQVugl94ChIaMg {'project_path': 'ydb/tests/stress/topic_kafka/tests', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [N3wfuEiUgil-dbRCORbWUA {'project_path': 'ydb/apps/ydbd', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [X1ZOPbuqQ9CDONHyxiyk2Q {'project_path': 'ydb/tests/stress/topic_kafka', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [j-uqixGAAtQ-xB1Ut7_Nkg {'project_path': 'ydb/apps/ydb', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/tests/stress/transfer/tests, name=py3test] (uid=rnd-ychwx92fsq5x7m5i): Infrastructure error - contact devtools@ for details. Suite build deps: [6C5Ro_rt_dQsvdgCq6stCg {'project_path': 'ydb/tests/stress/transfer', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [N3wfuEiUgil-dbRCORbWUA {'project_path': 'ydb/apps/ydbd', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [foRukMRbRdQRKIDDDfLmrw {'project_path': 'ydb/tests/stress/transfer/tests', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [j-uqixGAAtQ-xB1Ut7_Nkg {'project_path': 'ydb/apps/ydb', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/tests/tools/kqprun/tests, name=py3test] (uid=rnd-ppkp176oexml43lc): Infrastructure error - contact devtools@ for details. Suite build deps: [-oa6qLc0UWliscp4Bygi8A {'project_path': 'ydb/tests/tools/kqprun', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [FY97zHmR3ruzzAgzN9uH6A {'project_path': 'ydb/tests/tools/kqprun/tests', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [GB2ieZj8CxlhtaSmSU54uQ {'project_path': 'ydb/tests/tools/kqprun/recipe', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/tests/tools/nemesis/ut, name=py3test] (uid=rnd-axzc7y06o06nc7dz): Infrastructure error - contact devtools@ for details. Suite build deps: [EaVcg6KtmBQwAYVQDNEtNw {'project_path': 'ydb/tests/tools/nemesis/ut', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [N3wfuEiUgil-dbRCORbWUA {'project_path': 'ydb/apps/ydbd', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/tests/tools/pq_read/test, name=py3test] (uid=rnd-4r1fy90mwx95kx2d): Infrastructure error - contact devtools@ for details. Suite build deps: [9w8SIosdmEdYg_0llg3dzQ {'project_path': 'ydb/tests/tools/pq_read', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [N3wfuEiUgil-dbRCORbWUA {'project_path': 'ydb/apps/ydbd', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [RkhG7WuJXDr1zbK17a6x6g {'project_path': 'ydb/public/tools/ydb_recipe', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}], [zVb8FYjdwyfXN4FffNjulQ {'project_path': 'ydb/tests/tools/pq_read/test', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}] Warn: Test [project=ydb/library/yql/tests/sql/dq_file/part0, name=pytest] (uid=rnd-fywi8uh7hvgv7v5k): Infrastructure error - contact devtools@ for details. Suite build deps: [3s3JWLpIFxDCd8RLTz88dw {'project_path': 'yql/essentials/udfs/test/test_import', 'platform': 'default-linux-x86_64-relwithdebinfo-pic', 'tags': ['default-linux-x86_64', 'relwithdebinfo', 'pic']}], [VY2HtEC5_gANfiTekPfyBg {'project_path': 'yql/essentials/tools/astdiff', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [WqG84sxUxmwfN5wS1cXmHw {'project_path': 'ydb/library/yql/tests/sql/dq_file/part0', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [f6ZW54AW4efDxiMcwphzSw {'project_path': 'ydb/library/yql/tools/dqrun', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [lBCQX0PVpAnlcC0t65UL-w {'project_path': 'yql/essentials/tests/common/test_framework/udfs_deps', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [oeQ4QxP-znnAPywYi258EQ {'project_path': 'yql/tools/yqlrun', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/library/yql/tests/sql/dq_file/part1, name=pytest] (uid=rnd-k4ir96ochodun0u8): Infrastructure error - contact devtools@ for details. Suite build deps: [3s3JWLpIFxDCd8RLTz88dw {'project_path': 'yql/essentials/udfs/test/test_import', 'platform': 'default-linux-x86_64-relwithdebinfo-pic', 'tags': ['default-linux-x86_64', 'relwithdebinfo', 'pic']}], [VY2HtEC5_gANfiTekPfyBg {'project_path': 'yql/essentials/tools/astdiff', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [f6ZW54AW4efDxiMcwphzSw {'project_path': 'ydb/library/yql/tools/dqrun', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [l9gz16nAgAHG2cNiw38qPg {'project_path': 'ydb/library/yql/tests/sql/dq_file/part1', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [lBCQX0PVpAnlcC0t65UL-w {'project_path': 'yql/essentials/tests/common/test_framework/udfs_deps', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [oeQ4QxP-znnAPywYi258EQ {'project_path': 'yql/tools/yqlrun', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/library/yql/tests/sql/dq_file/part10, name=pytest] (uid=rnd-gv31xx65eowag89z): Infrastructure error - contact devtools@ for details. Suite build deps: [3s3JWLpIFxDCd8RLTz88dw {'project_path': 'yql/essentials/udfs/test/test_import', 'platform': 'default-linux-x86_64-relwithdebinfo-pic', 'tags': ['default-linux-x86_64', 'relwithdebinfo', 'pic']}], [VY2HtEC5_gANfiTekPfyBg {'project_path': 'yql/essentials/tools/astdiff', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [f6ZW54AW4efDxiMcwphzSw {'project_path': 'ydb/library/yql/tools/dqrun', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [i9kYz9V2sLtWfb0Q-rekoA {'project_path': 'ydb/library/yql/tests/sql/dq_file/part10', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [lBCQX0PVpAnlcC0t65UL-w {'project_path': 'yql/essentials/tests/common/test_framework/udfs_deps', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [oeQ4QxP-znnAPywYi258EQ {'project_path': 'yql/tools/yqlrun', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/library/yql/tests/sql/dq_file/part11, name=pytest] (uid=rnd-svp4e46guj08s3nd): Infrastructure error - contact devtools@ for details. Suite build deps: [3s3JWLpIFxDCd8RLTz88dw {'project_path': 'yql/essentials/udfs/test/test_import', 'platform': 'default-linux-x86_64-relwithdebinfo-pic', 'tags': ['default-linux-x86_64', 'relwithdebinfo', 'pic']}], [RdhZQKNREGuPCIbfJ5uQUA {'project_path': 'ydb/library/yql/tests/sql/dq_file/part11', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [VY2HtEC5_gANfiTekPfyBg {'project_path': 'yql/essentials/tools/astdiff', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [f6ZW54AW4efDxiMcwphzSw {'project_path': 'ydb/library/yql/tools/dqrun', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [lBCQX0PVpAnlcC0t65UL-w {'project_path': 'yql/essentials/tests/common/test_framework/udfs_deps', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [oeQ4QxP-znnAPywYi258EQ {'project_path': 'yql/tools/yqlrun', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/library/yql/tests/sql/dq_file/part12, name=pytest] (uid=rnd-pek5jfvjyt8obua5): Infrastructure error - contact devtools@ for details. Suite build deps: [3s3JWLpIFxDCd8RLTz88dw {'project_path': 'yql/essentials/udfs/test/test_import', 'platform': 'default-linux-x86_64-relwithdebinfo-pic', 'tags': ['default-linux-x86_64', 'relwithdebinfo', 'pic']}], [VY2HtEC5_gANfiTekPfyBg {'project_path': 'yql/essentials/tools/astdiff', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [c2xft3o7C_mD78w7clzNSA {'project_path': 'ydb/library/yql/tests/sql/dq_file/part12', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [f6ZW54AW4efDxiMcwphzSw {'project_path': 'ydb/library/yql/tools/dqrun', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [lBCQX0PVpAnlcC0t65UL-w {'project_path': 'yql/essentials/tests/common/test_framework/udfs_deps', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [oeQ4QxP-znnAPywYi258EQ {'project_path': 'yql/tools/yqlrun', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/library/yql/tests/sql/dq_file/part13, name=pytest] (uid=rnd-surmj0ywu9ccpuha): Infrastructure error - contact devtools@ for details. Suite build deps: [3s3JWLpIFxDCd8RLTz88dw {'project_path': 'yql/essentials/udfs/test/test_import', 'platform': 'default-linux-x86_64-relwithdebinfo-pic', 'tags': ['default-linux-x86_64', 'relwithdebinfo', 'pic']}], [FL-Dq1YbRD5DpLwfkeuttw {'project_path': 'ydb/library/yql/tests/sql/dq_file/part13', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [VY2HtEC5_gANfiTekPfyBg {'project_path': 'yql/essentials/tools/astdiff', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [f6ZW54AW4efDxiMcwphzSw {'project_path': 'ydb/library/yql/tools/dqrun', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [lBCQX0PVpAnlcC0t65UL-w {'project_path': 'yql/essentials/tests/common/test_framework/udfs_deps', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [oeQ4QxP-znnAPywYi258EQ {'project_path': 'yql/tools/yqlrun', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/library/yql/tests/sql/dq_file/part14, name=pytest] (uid=rnd-q4netjxzvlk4tevs): Infrastructure error - contact devtools@ for details. Suite build deps: [3s3JWLpIFxDCd8RLTz88dw {'project_path': 'yql/essentials/udfs/test/test_import', 'platform': 'default-linux-x86_64-relwithdebinfo-pic', 'tags': ['default-linux-x86_64', 'relwithdebinfo', 'pic']}], [VY2HtEC5_gANfiTekPfyBg {'project_path': 'yql/essentials/tools/astdiff', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [f6ZW54AW4efDxiMcwphzSw {'project_path': 'ydb/library/yql/tools/dqrun', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [fdhRZqWF-n1ohWd4NSYCtg {'project_path': 'ydb/library/yql/tests/sql/dq_file/part14', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [lBCQX0PVpAnlcC0t65UL-w {'project_path': 'yql/essentials/tests/common/test_framework/udfs_deps', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [oeQ4QxP-znnAPywYi258EQ {'project_path': 'yql/tools/yqlrun', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/library/yql/tests/sql/dq_file/part15, name=pytest] (uid=rnd-pzc3bbj6bni7hjj4): Infrastructure error - contact devtools@ for details. Suite build deps: [3s3JWLpIFxDCd8RLTz88dw {'project_path': 'yql/essentials/udfs/test/test_import', 'platform': 'default-linux-x86_64-relwithdebinfo-pic', 'tags': ['default-linux-x86_64', 'relwithdebinfo', 'pic']}], [VY2HtEC5_gANfiTekPfyBg {'project_path': 'yql/essentials/tools/astdiff', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [f6ZW54AW4efDxiMcwphzSw {'project_path': 'ydb/library/yql/tools/dqrun', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [hFlwFQlcKfCdM_JPhRmpig {'project_path': 'ydb/library/yql/tests/sql/dq_file/part15', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [lBCQX0PVpAnlcC0t65UL-w {'project_path': 'yql/essentials/tests/common/test_framework/udfs_deps', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [oeQ4QxP-znnAPywYi258EQ {'project_path': 'yql/tools/yqlrun', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/library/yql/tests/sql/dq_file/part16, name=pytest] (uid=rnd-7voy0zo9vjop4pdq): Infrastructure error - contact devtools@ for details. Suite build deps: [3s3JWLpIFxDCd8RLTz88dw {'project_path': 'yql/essentials/udfs/test/test_import', 'platform': 'default-linux-x86_64-relwithdebinfo-pic', 'tags': ['default-linux-x86_64', 'relwithdebinfo', 'pic']}], [VY2HtEC5_gANfiTekPfyBg {'project_path': 'yql/essentials/tools/astdiff', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [_ldtsIYrOYePLnj0xiqG-Q {'project_path': 'ydb/library/yql/tests/sql/dq_file/part16', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [f6ZW54AW4efDxiMcwphzSw {'project_path': 'ydb/library/yql/tools/dqrun', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [lBCQX0PVpAnlcC0t65UL-w {'project_path': 'yql/essentials/tests/common/test_framework/udfs_deps', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [oeQ4QxP-znnAPywYi258EQ {'project_path': 'yql/tools/yqlrun', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/library/yql/tests/sql/dq_file/part17, name=pytest] (uid=rnd-48raqqj5grtl07tb): Infrastructure error - contact devtools@ for details. Suite build deps: [3s3JWLpIFxDCd8RLTz88dw {'project_path': 'yql/essentials/udfs/test/test_import', 'platform': 'default-linux-x86_64-relwithdebinfo-pic', 'tags': ['default-linux-x86_64', 'relwithdebinfo', 'pic']}], [VY2HtEC5_gANfiTekPfyBg {'project_path': 'yql/essentials/tools/astdiff', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [YEwPpvo7BuR53kwwrUvdzw {'project_path': 'ydb/library/yql/tests/sql/dq_file/part17', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [f6ZW54AW4efDxiMcwphzSw {'project_path': 'ydb/library/yql/tools/dqrun', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [lBCQX0PVpAnlcC0t65UL-w {'project_path': 'yql/essentials/tests/common/test_framework/udfs_deps', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [oeQ4QxP-znnAPywYi258EQ {'project_path': 'yql/tools/yqlrun', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/library/yql/tests/sql/dq_file/part18, name=pytest] (uid=rnd-91g88i7qcq89s3s3): Infrastructure error - contact devtools@ for details. Suite build deps: [3s3JWLpIFxDCd8RLTz88dw {'project_path': 'yql/essentials/udfs/test/test_import', 'platform': 'default-linux-x86_64-relwithdebinfo-pic', 'tags': ['default-linux-x86_64', 'relwithdebinfo', 'pic']}], [4SaXQwK3tipRRQR31kr7qw {'project_path': 'ydb/library/yql/tests/sql/dq_file/part18', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [VY2HtEC5_gANfiTekPfyBg {'project_path': 'yql/essentials/tools/astdiff', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [f6ZW54AW4efDxiMcwphzSw {'project_path': 'ydb/library/yql/tools/dqrun', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [lBCQX0PVpAnlcC0t65UL-w {'project_path': 'yql/essentials/tests/common/test_framework/udfs_deps', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [oeQ4QxP-znnAPywYi258EQ {'project_path': 'yql/tools/yqlrun', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/library/yql/tests/sql/dq_file/part19, name=pytest] (uid=rnd-ca1vt0p8vsw53hfk): Infrastructure error - contact devtools@ for details. Suite build deps: [3s3JWLpIFxDCd8RLTz88dw {'project_path': 'yql/essentials/udfs/test/test_import', 'platform': 'default-linux-x86_64-relwithdebinfo-pic', 'tags': ['default-linux-x86_64', 'relwithdebinfo', 'pic']}], [VY2HtEC5_gANfiTekPfyBg {'project_path': 'yql/essentials/tools/astdiff', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [eYdOfHnRZW_MsImFH8CDQg {'project_path': 'ydb/library/yql/tests/sql/dq_file/part19', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [f6ZW54AW4efDxiMcwphzSw {'project_path': 'ydb/library/yql/tools/dqrun', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [lBCQX0PVpAnlcC0t65UL-w {'project_path': 'yql/essentials/tests/common/test_framework/udfs_deps', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [oeQ4QxP-znnAPywYi258EQ {'project_path': 'yql/tools/yqlrun', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/library/yql/tests/sql/dq_file/part2, name=pytest] (uid=rnd-n2p69c94pow4mt88): Infrastructure error - contact devtools@ for details. Suite build deps: [3s3JWLpIFxDCd8RLTz88dw {'project_path': 'yql/essentials/udfs/test/test_import', 'platform': 'default-linux-x86_64-relwithdebinfo-pic', 'tags': ['default-linux-x86_64', 'relwithdebinfo', 'pic']}], [VY2HtEC5_gANfiTekPfyBg {'project_path': 'yql/essentials/tools/astdiff', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [cd6MrAMNe5G-PW1KAmHDtA {'project_path': 'ydb/library/yql/tests/sql/dq_file/part2', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [f6ZW54AW4efDxiMcwphzSw {'project_path': 'ydb/library/yql/tools/dqrun', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [lBCQX0PVpAnlcC0t65UL-w {'project_path': 'yql/essentials/tests/common/test_framework/udfs_deps', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [oeQ4QxP-znnAPywYi258EQ {'project_path': 'yql/tools/yqlrun', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/library/yql/tests/sql/dq_file/part3, name=pytest] (uid=rnd-nf4upi2ln8nu2fer): Infrastructure error - contact devtools@ for details. Suite build deps: [3s3JWLpIFxDCd8RLTz88dw {'project_path': 'yql/essentials/udfs/test/test_import', 'platform': 'default-linux-x86_64-relwithdebinfo-pic', 'tags': ['default-linux-x86_64', 'relwithdebinfo', 'pic']}], [VY2HtEC5_gANfiTekPfyBg {'project_path': 'yql/essentials/tools/astdiff', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [f6ZW54AW4efDxiMcwphzSw {'project_path': 'ydb/library/yql/tools/dqrun', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [lBCQX0PVpAnlcC0t65UL-w {'project_path': 'yql/essentials/tests/common/test_framework/udfs_deps', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [oeQ4QxP-znnAPywYi258EQ {'project_path': 'yql/tools/yqlrun', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [ppJd-cpOQ_HDk4-ftoZ4Uw {'project_path': 'ydb/library/yql/tests/sql/dq_file/part3', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/library/yql/tests/sql/dq_file/part4, name=pytest] (uid=rnd-ose4fl76c4fzbo8q): Infrastructure error - contact devtools@ for details. Suite build deps: [3s3JWLpIFxDCd8RLTz88dw {'project_path': 'yql/essentials/udfs/test/test_import', 'platform': 'default-linux-x86_64-relwithdebinfo-pic', 'tags': ['default-linux-x86_64', 'relwithdebinfo', 'pic']}], [VOg41eTnFnquCXOkzk_06A {'project_path': 'ydb/library/yql/tests/sql/dq_file/part4', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [VY2HtEC5_gANfiTekPfyBg {'project_path': 'yql/essentials/tools/astdiff', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [f6ZW54AW4efDxiMcwphzSw {'project_path': 'ydb/library/yql/tools/dqrun', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [lBCQX0PVpAnlcC0t65UL-w {'project_path': 'yql/essentials/tests/common/test_framework/udfs_deps', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [oeQ4QxP-znnAPywYi258EQ {'project_path': 'yql/tools/yqlrun', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/library/yql/tests/sql/dq_file/part5, name=pytest] (uid=rnd-4cnx67se7dfulc9m): Infrastructure error - contact devtools@ for details. Suite build deps: [3s3JWLpIFxDCd8RLTz88dw {'project_path': 'yql/essentials/udfs/test/test_import', 'platform': 'default-linux-x86_64-relwithdebinfo-pic', 'tags': ['default-linux-x86_64', 'relwithdebinfo', 'pic']}], [EZdEs3bCupXvK4pDZMdYfQ {'project_path': 'ydb/library/yql/tests/sql/dq_file/part5', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [VY2HtEC5_gANfiTekPfyBg {'project_path': 'yql/essentials/tools/astdiff', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [f6ZW54AW4efDxiMcwphzSw {'project_path': 'ydb/library/yql/tools/dqrun', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [lBCQX0PVpAnlcC0t65UL-w {'project_path': 'yql/essentials/tests/common/test_framework/udfs_deps', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [oeQ4QxP-znnAPywYi258EQ {'project_path': 'yql/tools/yqlrun', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/library/yql/tests/sql/dq_file/part6, name=pytest] (uid=rnd-dt1ynckrik0iky40): Infrastructure error - contact devtools@ for details. Suite build deps: [3s3JWLpIFxDCd8RLTz88dw {'project_path': 'yql/essentials/udfs/test/test_import', 'platform': 'default-linux-x86_64-relwithdebinfo-pic', 'tags': ['default-linux-x86_64', 'relwithdebinfo', 'pic']}], [VY2HtEC5_gANfiTekPfyBg {'project_path': 'yql/essentials/tools/astdiff', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [f6ZW54AW4efDxiMcwphzSw {'project_path': 'ydb/library/yql/tools/dqrun', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [fd-eee-ef-8PIV1-rO29Aw {'project_path': 'ydb/library/yql/tests/sql/dq_file/part6', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [lBCQX0PVpAnlcC0t65UL-w {'project_path': 'yql/essentials/tests/common/test_framework/udfs_deps', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [oeQ4QxP-znnAPywYi258EQ {'project_path': 'yql/tools/yqlrun', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/library/yql/tests/sql/dq_file/part7, name=pytest] (uid=rnd-ew8fgamcqth8xgdw): Infrastructure error - contact devtools@ for details. Suite build deps: [3s3JWLpIFxDCd8RLTz88dw {'project_path': 'yql/essentials/udfs/test/test_import', 'platform': 'default-linux-x86_64-relwithdebinfo-pic', 'tags': ['default-linux-x86_64', 'relwithdebinfo', 'pic']}], [VY2HtEC5_gANfiTekPfyBg {'project_path': 'yql/essentials/tools/astdiff', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [_yE54j4lJCRgctsUJ6yIag {'project_path': 'ydb/library/yql/tests/sql/dq_file/part7', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [f6ZW54AW4efDxiMcwphzSw {'project_path': 'ydb/library/yql/tools/dqrun', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [lBCQX0PVpAnlcC0t65UL-w {'project_path': 'yql/essentials/tests/common/test_framework/udfs_deps', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [oeQ4QxP-znnAPywYi258EQ {'project_path': 'yql/tools/yqlrun', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/library/yql/tests/sql/dq_file/part8, name=pytest] (uid=rnd-a6npyttpuirskoiv): Infrastructure error - contact devtools@ for details. Suite build deps: [3s3JWLpIFxDCd8RLTz88dw {'project_path': 'yql/essentials/udfs/test/test_import', 'platform': 'default-linux-x86_64-relwithdebinfo-pic', 'tags': ['default-linux-x86_64', 'relwithdebinfo', 'pic']}], [PPRWuiTyzSwbRSgLstunFQ {'project_path': 'ydb/library/yql/tests/sql/dq_file/part8', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [VY2HtEC5_gANfiTekPfyBg {'project_path': 'yql/essentials/tools/astdiff', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [f6ZW54AW4efDxiMcwphzSw {'project_path': 'ydb/library/yql/tools/dqrun', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [lBCQX0PVpAnlcC0t65UL-w {'project_path': 'yql/essentials/tests/common/test_framework/udfs_deps', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [oeQ4QxP-znnAPywYi258EQ {'project_path': 'yql/tools/yqlrun', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/library/yql/tests/sql/dq_file/part9, name=pytest] (uid=rnd-79ze7w8jfcyhwcku): Infrastructure error - contact devtools@ for details. Suite build deps: [3s3JWLpIFxDCd8RLTz88dw {'project_path': 'yql/essentials/udfs/test/test_import', 'platform': 'default-linux-x86_64-relwithdebinfo-pic', 'tags': ['default-linux-x86_64', 'relwithdebinfo', 'pic']}], [MkZEOWFrq_Cw0twlYPnATw {'project_path': 'ydb/library/yql/tests/sql/dq_file/part9', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [VY2HtEC5_gANfiTekPfyBg {'project_path': 'yql/essentials/tools/astdiff', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [f6ZW54AW4efDxiMcwphzSw {'project_path': 'ydb/library/yql/tools/dqrun', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [lBCQX0PVpAnlcC0t65UL-w {'project_path': 'yql/essentials/tests/common/test_framework/udfs_deps', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [oeQ4QxP-znnAPywYi258EQ {'project_path': 'yql/tools/yqlrun', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/library/yql/tests/sql/hybrid_file/part0, name=pytest] (uid=rnd-i0huhxbile4k03t7): Infrastructure error - contact devtools@ for details. Suite build deps: [3s3JWLpIFxDCd8RLTz88dw {'project_path': 'yql/essentials/udfs/test/test_import', 'platform': 'default-linux-x86_64-relwithdebinfo-pic', 'tags': ['default-linux-x86_64', 'relwithdebinfo', 'pic']}], [OTTBxZGYz2Xaa7SPvAUPWw {'project_path': 'ydb/library/yql/tests/sql/hybrid_file/part0', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [VY2HtEC5_gANfiTekPfyBg {'project_path': 'yql/essentials/tools/astdiff', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [f6ZW54AW4efDxiMcwphzSw {'project_path': 'ydb/library/yql/tools/dqrun', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [lBCQX0PVpAnlcC0t65UL-w {'project_path': 'yql/essentials/tests/common/test_framework/udfs_deps', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [oeQ4QxP-znnAPywYi258EQ {'project_path': 'yql/tools/yqlrun', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/library/yql/tests/sql/hybrid_file/part1, name=pytest] (uid=rnd-54bc3an1taf415nr): Infrastructure error - contact devtools@ for details. Suite build deps: [3s3JWLpIFxDCd8RLTz88dw {'project_path': 'yql/essentials/udfs/test/test_import', 'platform': 'default-linux-x86_64-relwithdebinfo-pic', 'tags': ['default-linux-x86_64', 'relwithdebinfo', 'pic']}], [QQW8zBZT3p3JzbZ5bFPECA {'project_path': 'ydb/library/yql/tests/sql/hybrid_file/part1', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [VY2HtEC5_gANfiTekPfyBg {'project_path': 'yql/essentials/tools/astdiff', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [f6ZW54AW4efDxiMcwphzSw {'project_path': 'ydb/library/yql/tools/dqrun', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [lBCQX0PVpAnlcC0t65UL-w {'project_path': 'yql/essentials/tests/common/test_framework/udfs_deps', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [oeQ4QxP-znnAPywYi258EQ {'project_path': 'yql/tools/yqlrun', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/library/yql/tests/sql/hybrid_file/part10, name=pytest] (uid=rnd-sdmzpxdhtpz3nt9d): Infrastructure error - contact devtools@ for details. Suite build deps: [3s3JWLpIFxDCd8RLTz88dw {'project_path': 'yql/essentials/udfs/test/test_import', 'platform': 'default-linux-x86_64-relwithdebinfo-pic', 'tags': ['default-linux-x86_64', 'relwithdebinfo', 'pic']}], [8wQTyN_n0vyPIEPZhM2Ljg {'project_path': 'ydb/library/yql/tests/sql/hybrid_file/part10', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [VY2HtEC5_gANfiTekPfyBg {'project_path': 'yql/essentials/tools/astdiff', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [f6ZW54AW4efDxiMcwphzSw {'project_path': 'ydb/library/yql/tools/dqrun', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [lBCQX0PVpAnlcC0t65UL-w {'project_path': 'yql/essentials/tests/common/test_framework/udfs_deps', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [oeQ4QxP-znnAPywYi258EQ {'project_path': 'yql/tools/yqlrun', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/library/yql/tests/sql/hybrid_file/part2, name=pytest] (uid=rnd-g29x5vath783u6k0): Infrastructure error - contact devtools@ for details. Suite build deps: [3s3JWLpIFxDCd8RLTz88dw {'project_path': 'yql/essentials/udfs/test/test_import', 'platform': 'default-linux-x86_64-relwithdebinfo-pic', 'tags': ['default-linux-x86_64', 'relwithdebinfo', 'pic']}], [7FtCiB4Nm4YTgSzvoeMj1g {'project_path': 'ydb/library/yql/tests/sql/hybrid_file/part2', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [VY2HtEC5_gANfiTekPfyBg {'project_path': 'yql/essentials/tools/astdiff', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [f6ZW54AW4efDxiMcwphzSw {'project_path': 'ydb/library/yql/tools/dqrun', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [lBCQX0PVpAnlcC0t65UL-w {'project_path': 'yql/essentials/tests/common/test_framework/udfs_deps', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [oeQ4QxP-znnAPywYi258EQ {'project_path': 'yql/tools/yqlrun', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/library/yql/tests/sql/hybrid_file/part3, name=pytest] (uid=rnd-ahjvipvx7fnd33pc): Infrastructure error - contact devtools@ for details. Suite build deps: [3s3JWLpIFxDCd8RLTz88dw {'project_path': 'yql/essentials/udfs/test/test_import', 'platform': 'default-linux-x86_64-relwithdebinfo-pic', 'tags': ['default-linux-x86_64', 'relwithdebinfo', 'pic']}], [VY2HtEC5_gANfiTekPfyBg {'project_path': 'yql/essentials/tools/astdiff', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [aexmIkZQAE_NsaIb7swZzQ {'project_path': 'ydb/library/yql/tests/sql/hybrid_file/part3', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [f6ZW54AW4efDxiMcwphzSw {'project_path': 'ydb/library/yql/tools/dqrun', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [lBCQX0PVpAnlcC0t65UL-w {'project_path': 'yql/essentials/tests/common/test_framework/udfs_deps', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [oeQ4QxP-znnAPywYi258EQ {'project_path': 'yql/tools/yqlrun', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/library/yql/tests/sql/hybrid_file/part4, name=pytest] (uid=rnd-7vhxy0z0zq3wmxyz): Infrastructure error - contact devtools@ for details. Suite build deps: [3s3JWLpIFxDCd8RLTz88dw {'project_path': 'yql/essentials/udfs/test/test_import', 'platform': 'default-linux-x86_64-relwithdebinfo-pic', 'tags': ['default-linux-x86_64', 'relwithdebinfo', 'pic']}], [EzJCVI1uLOb07_0AQInQBg {'project_path': 'ydb/library/yql/tests/sql/hybrid_file/part4', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [VY2HtEC5_gANfiTekPfyBg {'project_path': 'yql/essentials/tools/astdiff', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [f6ZW54AW4efDxiMcwphzSw {'project_path': 'ydb/library/yql/tools/dqrun', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [lBCQX0PVpAnlcC0t65UL-w {'project_path': 'yql/essentials/tests/common/test_framework/udfs_deps', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [oeQ4QxP-znnAPywYi258EQ {'project_path': 'yql/tools/yqlrun', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/library/yql/tests/sql/hybrid_file/part5, name=pytest] (uid=rnd-x7lht7ph3c424kfc): Infrastructure error - contact devtools@ for details. Suite build deps: [3s3JWLpIFxDCd8RLTz88dw {'project_path': 'yql/essentials/udfs/test/test_import', 'platform': 'default-linux-x86_64-relwithdebinfo-pic', 'tags': ['default-linux-x86_64', 'relwithdebinfo', 'pic']}], [TQ0idwAoSn0mI29Ondh_wg {'project_path': 'ydb/library/yql/tests/sql/hybrid_file/part5', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [VY2HtEC5_gANfiTekPfyBg {'project_path': 'yql/essentials/tools/astdiff', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [f6ZW54AW4efDxiMcwphzSw {'project_path': 'ydb/library/yql/tools/dqrun', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [lBCQX0PVpAnlcC0t65UL-w {'project_path': 'yql/essentials/tests/common/test_framework/udfs_deps', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [oeQ4QxP-znnAPywYi258EQ {'project_path': 'yql/tools/yqlrun', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/library/yql/tests/sql/hybrid_file/part6, name=pytest] (uid=rnd-52p9d45zdk3r2h2k): Infrastructure error - contact devtools@ for details. Suite build deps: [3s3JWLpIFxDCd8RLTz88dw {'project_path': 'yql/essentials/udfs/test/test_import', 'platform': 'default-linux-x86_64-relwithdebinfo-pic', 'tags': ['default-linux-x86_64', 'relwithdebinfo', 'pic']}], [GdUjNVGJJ4A5BQMLhasiEA {'project_path': 'ydb/library/yql/tests/sql/hybrid_file/part6', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [VY2HtEC5_gANfiTekPfyBg {'project_path': 'yql/essentials/tools/astdiff', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [f6ZW54AW4efDxiMcwphzSw {'project_path': 'ydb/library/yql/tools/dqrun', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [lBCQX0PVpAnlcC0t65UL-w {'project_path': 'yql/essentials/tests/common/test_framework/udfs_deps', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [oeQ4QxP-znnAPywYi258EQ {'project_path': 'yql/tools/yqlrun', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/library/yql/tests/sql/hybrid_file/part7, name=pytest] (uid=rnd-d3114nlds5vavg60): Infrastructure error - contact devtools@ for details. Suite build deps: [3s3JWLpIFxDCd8RLTz88dw {'project_path': 'yql/essentials/udfs/test/test_import', 'platform': 'default-linux-x86_64-relwithdebinfo-pic', 'tags': ['default-linux-x86_64', 'relwithdebinfo', 'pic']}], [O2SXASj22gKDRWJBF3Z00g {'project_path': 'ydb/library/yql/tests/sql/hybrid_file/part7', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [VY2HtEC5_gANfiTekPfyBg {'project_path': 'yql/essentials/tools/astdiff', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [f6ZW54AW4efDxiMcwphzSw {'project_path': 'ydb/library/yql/tools/dqrun', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [lBCQX0PVpAnlcC0t65UL-w {'project_path': 'yql/essentials/tests/common/test_framework/udfs_deps', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [oeQ4QxP-znnAPywYi258EQ {'project_path': 'yql/tools/yqlrun', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/library/yql/tests/sql/hybrid_file/part8, name=pytest] (uid=rnd-q0cv0ahgcfi71azf): Infrastructure error - contact devtools@ for details. Suite build deps: [3s3JWLpIFxDCd8RLTz88dw {'project_path': 'yql/essentials/udfs/test/test_import', 'platform': 'default-linux-x86_64-relwithdebinfo-pic', 'tags': ['default-linux-x86_64', 'relwithdebinfo', 'pic']}], [VY2HtEC5_gANfiTekPfyBg {'project_path': 'yql/essentials/tools/astdiff', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [VqodKfnOtHeiBTdD_8iHgQ {'project_path': 'ydb/library/yql/tests/sql/hybrid_file/part8', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [f6ZW54AW4efDxiMcwphzSw {'project_path': 'ydb/library/yql/tools/dqrun', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [lBCQX0PVpAnlcC0t65UL-w {'project_path': 'yql/essentials/tests/common/test_framework/udfs_deps', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [oeQ4QxP-znnAPywYi258EQ {'project_path': 'yql/tools/yqlrun', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/library/yql/tests/sql/hybrid_file/part9, name=pytest] (uid=rnd-363maqtki0amxddj): Infrastructure error - contact devtools@ for details. Suite build deps: [3s3JWLpIFxDCd8RLTz88dw {'project_path': 'yql/essentials/udfs/test/test_import', 'platform': 'default-linux-x86_64-relwithdebinfo-pic', 'tags': ['default-linux-x86_64', 'relwithdebinfo', 'pic']}], [VY2HtEC5_gANfiTekPfyBg {'project_path': 'yql/essentials/tools/astdiff', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [Xrpgg0yhlehTvVFrnQ3AvA {'project_path': 'ydb/library/yql/tests/sql/hybrid_file/part9', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [f6ZW54AW4efDxiMcwphzSw {'project_path': 'ydb/library/yql/tools/dqrun', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [lBCQX0PVpAnlcC0t65UL-w {'project_path': 'yql/essentials/tests/common/test_framework/udfs_deps', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [oeQ4QxP-znnAPywYi258EQ {'project_path': 'yql/tools/yqlrun', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/library/yql/tests/sql/solomon, name=pytest] (uid=rnd-jlvlbonnvbm452kd): Infrastructure error - contact devtools@ for details. Suite build deps: [3s3JWLpIFxDCd8RLTz88dw {'project_path': 'yql/essentials/udfs/test/test_import', 'platform': 'default-linux-x86_64-relwithdebinfo-pic', 'tags': ['default-linux-x86_64', 'relwithdebinfo', 'pic']}], [AqHo-8SJeJJvYzvMzUf4fw {'project_path': 'ydb/library/yql/tools/solomon_emulator/bin', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [VY2HtEC5_gANfiTekPfyBg {'project_path': 'yql/essentials/tools/astdiff', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [f6ZW54AW4efDxiMcwphzSw {'project_path': 'ydb/library/yql/tools/dqrun', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}], [tiFyM4wcAZ7tz-Yt4dS_Aw {'project_path': 'ydb/library/yql/tests/sql/solomon', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [tnzldir5PgQBy06P5L_gdg {'project_path': 'ydb/library/yql/tools/solomon_emulator/recipe', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}] Warn: Test [project=ydb/apps/etcd_proxy/service/ut, name=unittest] (uid=rnd-xo1btm05jeamd6y3): Infrastructure error - contact devtools@ for details. Suite build deps: [6n6HCQaxCmjvFz9N2j5jPg {'project_path': 'ydb/apps/etcd_proxy/service/ut', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/apps/ydb/ut, name=unittest] (uid=rnd-u17ziwjpmj413qjq): Infrastructure error - contact devtools@ for details. Suite build deps: [N3wfuEiUgil-dbRCORbWUA {'project_path': 'ydb/apps/ydbd', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [RkhG7WuJXDr1zbK17a6x6g {'project_path': 'ydb/public/tools/ydb_recipe', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [j-uqixGAAtQ-xB1Ut7_Nkg {'project_path': 'ydb/apps/ydb', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}], [wUwnk-hppPHLVOX0-ppFwg {'project_path': 'ydb/apps/ydb/ut', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}] Warn: Test [project=ydb/core/actorlib_impl/ut, name=unittest] (uid=rnd-kl38z01ytzwjn6lp): Infrastructure error - contact devtools@ for details. Suite build deps: [dNa7edmUw1SSz5SmqYiblQ {'project_path': 'ydb/core/actorlib_impl/ut', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/core/backup/impl/ut_local_partition_reader, name=unittest] (uid=rnd-wflvqh8v0b0m1x5k): Infrastructure error - contact devtools@ for details. Suite build deps: [desMz-8AnHIPBZmLNYpg8Q {'project_path': 'ydb/core/backup/impl/ut_local_partition_reader', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/core/backup/impl/ut_table_writer, name=unittest] (uid=rnd-kr28pjhdciqts6zt): Infrastructure error - contact devtools@ for details. Suite build deps: [gwQ-TSevxSCstvTqosC-ZA {'project_path': 'ydb/core/backup/impl/ut_table_writer', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/core/blobstorage/backpressure/ut_client, name=unittest] (uid=rnd-md99zge7qjcqnue4): Infrastructure error - contact devtools@ for details. Suite build deps: [J77MJcmbyM1_iV7l4Ywkqw {'project_path': 'ydb/core/blobstorage/backpressure/ut_client', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/core/blobstorage/dsproxy/ut, name=unittest] (uid=rnd-h85t75ebjc5jdwkp): Infrastructure error - contact devtools@ for details. Suite build deps: [lFVv07i6eFByo3CPopRe1w {'project_path': 'ydb/core/blobstorage/dsproxy/ut', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/core/blobstorage/dsproxy/ut_fat, name=unittest] (uid=rnd-3t933cqnsdoognpt): Infrastructure error - contact devtools@ for details. Suite build deps: [_L1T-Skeaa1MsdjMI6JhCA {'project_path': 'ydb/core/blobstorage/dsproxy/ut_fat', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/core/blobstorage/dsproxy/ut_ftol, name=unittest] (uid=rnd-l51shlxgr6sop4g4): Infrastructure error - contact devtools@ for details. Suite build deps: [GiwuoO1Qq1XvCnh1sd4ZEw {'project_path': 'ydb/core/blobstorage/dsproxy/ut_ftol', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/core/blobstorage/nodewarden/ut, name=unittest] (uid=rnd-u94uwj8zvcooxvji): Infrastructure error - contact devtools@ for details. Suite build deps: [Q840sQzpCZgGRB5ANVJREA {'project_path': 'ydb/core/blobstorage/nodewarden/ut', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/core/blobstorage/nodewarden/ut_sequence, name=unittest] (uid=rnd-xgq0nlt4plo6bxct): Infrastructure error - contact devtools@ for details. Suite build deps: [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}], [yrnE9w7Oqja0VWwnaHXSjA {'project_path': 'ydb/core/blobstorage/nodewarden/ut_sequence', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}] Warn: Test [project=ydb/core/blobstorage/pdisk/ut, name=unittest] (uid=rnd-stc6ag3h86e9gxpd): Infrastructure error - contact devtools@ for details. Suite build deps: [OMvI4sPlzFciW41OUgyoVQ {'project_path': 'ydb/core/blobstorage/pdisk/ut', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/core/blobstorage/storagepoolmon/ut, name=unittest] (uid=rnd-pgycxeugl5ycfq8b): Infrastructure error - contact devtools@ for details. Suite build deps: [3Io6FZWQ7ygJikfVx7FlQA {'project_path': 'ydb/core/blobstorage/storagepoolmon/ut', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/core/blobstorage/ut_blobstorage, name=unittest] (uid=rnd-xqu98yt42268onlo): Infrastructure error - contact devtools@ for details. Suite build deps: [5-pDglIbWbQzXaby7xu_9g {'project_path': 'ydb/core/blobstorage/ut_blobstorage', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/core/blobstorage/ut_blobstorage/ut_balancing, name=unittest] (uid=rnd-m63f9wzelgq0t9xb): Infrastructure error - contact devtools@ for details. Suite build deps: [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}], [xB7P38sJ6S8ryPS1RtIpOw {'project_path': 'ydb/core/blobstorage/ut_blobstorage/ut_balancing', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}] Warn: Test [project=ydb/core/blobstorage/ut_blobstorage/ut_blob_depot, name=unittest] (uid=rnd-fomnhq7fieybfn8j): Infrastructure error - contact devtools@ for details. Suite build deps: [e0nMVbgrBNJlkIQiBfiFNg {'project_path': 'ydb/core/blobstorage/ut_blobstorage/ut_blob_depot', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/core/blobstorage/ut_blobstorage/ut_bridge, name=unittest] (uid=rnd-6qtyn2ncb6j52qhq): Infrastructure error - contact devtools@ for details. Suite build deps: [3yCnpHVFlLtOBzHyIFvjVw {'project_path': 'ydb/core/blobstorage/ut_blobstorage/ut_bridge', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/core/blobstorage/ut_blobstorage/ut_check_integrity, name=unittest] (uid=rnd-p04j4dyn6w2xk7kr): Infrastructure error - contact devtools@ for details. Suite build deps: [jRsrFtIn8ZctxVCegT8qcw {'project_path': 'ydb/core/blobstorage/ut_blobstorage/ut_check_integrity', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/core/blobstorage/ut_blobstorage/ut_cluster_balancing, name=unittest] (uid=rnd-5fp3h4ki7cc9nemz): Infrastructure error - contact devtools@ for details. Suite build deps: [Gdk6rlnYl1v9ELYEbaLdTg {'project_path': 'ydb/core/blobstorage/ut_blobstorage/ut_cluster_balancing', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/core/blobstorage/ut_blobstorage/ut_donor, name=unittest] (uid=rnd-hbxselw7kipjr6af): Infrastructure error - contact devtools@ for details. Suite build deps: [mrURmxo8ElA6mlWtkAQ56g {'project_path': 'ydb/core/blobstorage/ut_blobstorage/ut_donor', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/core/blobstorage/ut_blobstorage/ut_huge, name=unittest] (uid=rnd-wuhj4xqskd25ua91): Infrastructure error - contact devtools@ for details. Suite build deps: [2y7CBFYlcs4k0mlZ1n3BEw {'project_path': 'ydb/core/blobstorage/ut_blobstorage/ut_huge', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/core/blobstorage/ut_blobstorage/ut_restart_pdisk, name=unittest] (uid=rnd-w78sehu5mzok32jf): Infrastructure error - contact devtools@ for details. Suite build deps: [4gO7d3FPQ5Ce2SwR0UsKrA {'project_path': 'ydb/core/blobstorage/ut_blobstorage/ut_restart_pdisk', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/core/blobstorage/ut_blobstorage/ut_statestorage, name=unittest] (uid=rnd-vdfb1xsk5x8lodc8): Infrastructure error - contact devtools@ for details. Suite build deps: [Bw49BkMTbOWRxpVoCTi6kg {'project_path': 'ydb/core/blobstorage/ut_blobstorage/ut_statestorage', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/core/blobstorage/ut_blobstorage/ut_stop_pdisk, name=unittest] (uid=rnd-63k9q194ak3wqltw): Infrastructure error - contact devtools@ for details. Suite build deps: [7syP3bbSPbLGdbjOgE_I7Q {'project_path': 'ydb/core/blobstorage/ut_blobstorage/ut_stop_pdisk', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/core/blobstorage/ut_blobstorage/ut_vdisk_restart, name=unittest] (uid=rnd-4fd5fifpmz5v6o29): Infrastructure error - contact devtools@ for details. Suite build deps: [krVoK3UZ96KD2LCqqWbCnA {'project_path': 'ydb/core/blobstorage/ut_blobstorage/ut_vdisk_restart', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/core/blobstorage/ut_mirror3of4, name=unittest] (uid=rnd-i5lml2wsnz4dvnr4): Infrastructure error - contact devtools@ for details. Suite build deps: [VohY1r2GQeTHf3HjJ3qVBw {'project_path': 'ydb/core/blobstorage/ut_mirror3of4', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/core/blobstorage/ut_testshard, name=unittest] (uid=rnd-lvq9qbeqliew5iun): Infrastructure error - contact devtools@ for details. Suite build deps: [cqamkaaw53ZT1_ik6HQ4oA {'project_path': 'ydb/core/blobstorage/ut_testshard', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/core/blobstorage/ut_vdisk, name=unittest] (uid=rnd-fvbqrfhx51sd8j6d): Infrastructure error - contact devtools@ for details. Suite build deps: [YwSTwtKTQ5qcEZ6R0Nll3Q {'project_path': 'ydb/core/blobstorage/ut_vdisk', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/core/blobstorage/ut_vdisk2, name=unittest] (uid=rnd-h1aogzzp3gzrljed): Infrastructure error - contact devtools@ for details. Suite build deps: [lGZpaJdCQ8UGpOANZcu-Gg {'project_path': 'ydb/core/blobstorage/ut_vdisk2', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/core/blobstorage/vdisk/skeleton/ut, name=unittest] (uid=rnd-mnwptfxnuq3o501p): Infrastructure error - contact devtools@ for details. Suite build deps: [3x-uuaD47atpDMU36TNgbg {'project_path': 'ydb/core/blobstorage/vdisk/skeleton/ut', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/core/blobstorage/vdisk/synclog/ut, name=unittest] (uid=rnd-fef5qjcqprjk7o5c): Infrastructure error - contact devtools@ for details. Suite build deps: [WazADWxQ-7A-noYokaagpg {'project_path': 'ydb/core/blobstorage/vdisk/synclog/ut', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/core/client/minikql_compile/ut, name=unittest] (uid=rnd-2qq9agi9iviggylk): Infrastructure error - contact devtools@ for details. Suite build deps: [oyDDPFxYkLUmoG4gqndHlA {'project_path': 'ydb/core/client/minikql_compile/ut', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/core/client/ut, name=unittest] (uid=rnd-fzm6qe4vowmt0ffm): Infrastructure error - contact devtools@ for details. Suite build deps: [3M9bquhx7xMkCwbzqStBtA {'project_path': 'ydb/tests/supp', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [6RslIKeiyKcgbTHK0HO1ZQ {'project_path': 'ydb/core/client/ut', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/core/cms/console/ut, name=unittest] (uid=rnd-cuieqbc9minmcj9c): Infrastructure error - contact devtools@ for details. Suite build deps: [di8kWGkoWGzV3K3nVkLr8g {'project_path': 'ydb/core/cms/console/ut', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/core/cms/ut, name=unittest] (uid=rnd-n03uatttr5euo6vz): Infrastructure error - contact devtools@ for details. Suite build deps: [ITbF__cK-Q9D_owLCqZQdg {'project_path': 'ydb/core/cms/ut', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/core/cms/ut_sentinel, name=unittest] (uid=rnd-oc2jx0wwztfjryw6): Infrastructure error - contact devtools@ for details. Suite build deps: [pApuhxdMH_72Q4IDCAKdQw {'project_path': 'ydb/core/cms/ut_sentinel', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/core/cms/ut_sentinel_unstable, name=unittest] (uid=rnd-qj8e973luhdwp51v): Infrastructure error - contact devtools@ for details. Suite build deps: [RF1zYngDSGRep8wqK3Hw3w {'project_path': 'ydb/core/cms/ut_sentinel_unstable', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/core/control/ut, name=unittest] (uid=rnd-jtiwl8bi6qap3mkc): Infrastructure error - contact devtools@ for details. Suite build deps: [UYk3Ft8qJuQ7_G7JZ6mwxA {'project_path': 'ydb/core/control/ut', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/core/engine/ut, name=unittest] (uid=rnd-b6e3ajxsoy3vk483): Infrastructure error - contact devtools@ for details. Suite build deps: [lKt0z0dM8jPrc2XymlYifQ {'project_path': 'ydb/core/engine/ut', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/core/external_sources/s3/ut, name=unittest] (uid=rnd-vf8fbf62uyuv5wvi): Infrastructure error - contact devtools@ for details. Suite build deps: [4qEj448_Wx_nXqrxCENx8g {'project_path': 'ydb/core/external_sources/s3/ut', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [HCBUIDcacx4RRnxJZbe-BA {'project_path': 'library/recipes/docker_compose', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}], [yKApAgkaXzebFsu668Rswg {'project_path': 'library/recipes/docker_compose/bin', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}] Warn: Test [project=ydb/core/fq/libs/checkpoint_storage/ut, name=unittest] (uid=rnd-oq5nb6cbtjxa60t1): Infrastructure error - contact devtools@ for details. Suite build deps: [N3wfuEiUgil-dbRCORbWUA {'project_path': 'ydb/apps/ydbd', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [RkhG7WuJXDr1zbK17a6x6g {'project_path': 'ydb/public/tools/ydb_recipe', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [pFtT7DoruicHEF9LOEgMVg {'project_path': 'ydb/core/fq/libs/checkpoint_storage/ut', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/core/fq/libs/checkpointing/ut, name=unittest] (uid=rnd-8lo0tnue81y36ki9): Infrastructure error - contact devtools@ for details. Suite build deps: [qzY3lI-LZg94L4BOlP93Bg {'project_path': 'ydb/core/fq/libs/checkpointing/ut', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/core/fq/libs/common/ut, name=unittest] (uid=rnd-lykj3xsju52zkp2s): Infrastructure error - contact devtools@ for details. Suite build deps: [3BkOtDWXn0hQLQJJ8QoIrA {'project_path': 'ydb/core/fq/libs/common/ut', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/core/fq/libs/control_plane_proxy/ut, name=unittest] (uid=rnd-r3m39lf3quismpn1): Infrastructure error - contact devtools@ for details. Suite build deps: [KLUdE-_QHSg641dOXSGHLg {'project_path': 'ydb/core/fq/libs/control_plane_proxy/ut', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/core/fq/libs/result_formatter/ut, name=unittest] (uid=rnd-pllrp684bobwgoka): Infrastructure error - contact devtools@ for details. Suite build deps: [6SEW03t25bzw5YblbAjTLw {'project_path': 'ydb/core/fq/libs/result_formatter/ut', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/core/fq/libs/row_dispatcher/format_handler/ut, name=unittest] (uid=rnd-8xr58dwko40t5aeg): Infrastructure error - contact devtools@ for details. Suite build deps: [c2bH_8NXmAtG264K6PckXA {'project_path': 'ydb/core/fq/libs/row_dispatcher/format_handler/ut', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/core/fq/libs/row_dispatcher/ut, name=unittest] (uid=rnd-aatv1y9n4onzjumx): Infrastructure error - contact devtools@ for details. Suite build deps: [N3wfuEiUgil-dbRCORbWUA {'project_path': 'ydb/apps/ydbd', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [RkhG7WuJXDr1zbK17a6x6g {'project_path': 'ydb/public/tools/ydb_recipe', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [tO1R3p3N1yDJIPlNsZFUkg {'project_path': 'ydb/core/fq/libs/row_dispatcher/ut', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/core/fq/libs/test_connection/ut, name=unittest] (uid=rnd-mmoqs5e4r3jg5stt): Infrastructure error - contact devtools@ for details. Suite build deps: [bX0OYY_FUoZdfAEu1gf7uA {'project_path': 'ydb/core/fq/libs/test_connection/ut', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/core/fq/libs/ydb/ut, name=unittest] (uid=rnd-95pnswnfr9i1sy83): Infrastructure error - contact devtools@ for details. Suite build deps: [N3wfuEiUgil-dbRCORbWUA {'project_path': 'ydb/apps/ydbd', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [RkhG7WuJXDr1zbK17a6x6g {'project_path': 'ydb/public/tools/ydb_recipe', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [nr-JoX2NmhNJBSQEo0j7AQ {'project_path': 'ydb/core/fq/libs/ydb/ut', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/core/graph/shard/ut, name=unittest] (uid=rnd-bunfzjkbf8ck2rro): Infrastructure error - contact devtools@ for details. Suite build deps: [R4zFh6NIYgzFJNMYxGcBVg {'project_path': 'ydb/core/graph/shard/ut', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/core/graph/ut, name=unittest] (uid=rnd-j1cqozg5wpt8onex): Infrastructure error - contact devtools@ for details. Suite build deps: [Y_J2ejkBz-kxhlgPmQokZg {'project_path': 'ydb/core/graph/ut', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/core/grpc_services/grpc_request_check_actor_ut, name=unittest] (uid=rnd-uen6edlheskgx2yw): Infrastructure error - contact devtools@ for details. Suite build deps: [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}], [xOHq4GF2TdQymHT1yqiNCg {'project_path': 'ydb/core/grpc_services/grpc_request_check_actor_ut', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}] Warn: Test [project=ydb/core/grpc_services/tablet/ut, name=unittest] (uid=rnd-wrmex9ioa7rzo7hy): Infrastructure error - contact devtools@ for details. Suite build deps: [T1h0RX--1i5b6gHZDUQ2-g {'project_path': 'ydb/core/grpc_services/tablet/ut', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/core/grpc_services/ut, name=unittest] (uid=rnd-j10j6fm4k7ttdrof): Infrastructure error - contact devtools@ for details. Suite build deps: [kO3GS5K09mVxOzJllfZn7Q {'project_path': 'ydb/core/grpc_services/ut', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/core/grpc_streaming/ut, name=unittest] (uid=rnd-6ceduyzc7rq1uslj): Infrastructure error - contact devtools@ for details. Suite build deps: [DTDar6h-iSSbtnu89OsO7w {'project_path': 'ydb/core/grpc_streaming/ut', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/core/health_check/ut, name=unittest] (uid=rnd-7o8y41g4qh30gyps): Infrastructure error - contact devtools@ for details. Suite build deps: [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}], [vhmq966BMUwTUKkpG3fRFQ {'project_path': 'ydb/core/health_check/ut', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}] Warn: Test [project=ydb/core/http_proxy/ut, name=unittest] (uid=rnd-zh8cfzdvo942ah7b): Infrastructure error - contact devtools@ for details. Suite build deps: [SeoZhL_RMyaC_cpK4FjCdA {'project_path': 'ydb/core/http_proxy/ut', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/core/kafka_proxy/ut, name=unittest] (uid=rnd-kmdoj84wlzh2ipte): Infrastructure error - contact devtools@ for details. Suite build deps: [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}], [tpoVF2M2g3f86xtfHgvEDA {'project_path': 'ydb/core/kafka_proxy/ut', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}] Warn: Test [project=ydb/core/kesus/proxy/ut, name=unittest] (uid=rnd-dey6a4bm64awvrd9): Infrastructure error - contact devtools@ for details. Suite build deps: [Ap_T5I8ahA9ejABnnuiXtw {'project_path': 'ydb/core/kesus/proxy/ut', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/core/kesus/tablet/ut, name=unittest] (uid=rnd-23d4i5dwvt3vf9iz): Infrastructure error - contact devtools@ for details. Suite build deps: [IXZuYuD7QsuseNIWyENiDg {'project_path': 'ydb/core/kesus/tablet/ut', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/core/keyvalue/ut, name=unittest] (uid=rnd-d7ny8h3gvxo0cwhv): Infrastructure error - contact devtools@ for details. Suite build deps: [dxkbTJc5KEqtjkvhAvkPlQ {'project_path': 'ydb/core/keyvalue/ut', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/core/keyvalue/ut_trace, name=unittest] (uid=rnd-fycqt7ftegtcxoz1): Infrastructure error - contact devtools@ for details. Suite build deps: [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}], [y5vitdGOmUvA9qEU0GAFUQ {'project_path': 'ydb/core/keyvalue/ut_trace', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}] Warn: Test [project=ydb/core/kqp/executer_actor/ut, name=unittest] (uid=rnd-mttanbxog2btje01): Infrastructure error - contact devtools@ for details. Suite build deps: [csvXMMmx2LljrfT5BCKHgQ {'project_path': 'ydb/core/kqp/executer_actor/ut', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/core/kqp/federated_query/ut_service, name=unittest] (uid=rnd-3hvugl83a91atdao): Infrastructure error - contact devtools@ for details. Suite build deps: [XJjHl15zWm3n_REMsCQfzg {'project_path': 'ydb/core/kqp/federated_query/ut_service', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/core/kqp/provider/ut, name=unittest] (uid=rnd-o8v5h5wj3iuif1hk): Infrastructure error - contact devtools@ for details. Suite build deps: [TeQBfEfPcDvTTtvXzXJ4Cw {'project_path': 'ydb/core/kqp/provider/ut', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/core/kqp/rm_service/ut, name=unittest] (uid=rnd-3svak9ryit2t1gg8): Infrastructure error - contact devtools@ for details. Suite build deps: [Pcj3FeLZMCfq6X4fj9B5LA {'project_path': 'ydb/core/kqp/rm_service/ut', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/core/kqp/runtime/ut, name=unittest] (uid=rnd-oppvqdp501a9p0rx): Infrastructure error - contact devtools@ for details. Suite build deps: [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}], [z_c_z6AlCEY8jWQ7oAqUJw {'project_path': 'ydb/core/kqp/runtime/ut', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}] Warn: Test [project=ydb/core/kqp/tests/kikimr_tpch, name=unittest] (uid=rnd-z0uajrkefq1apvxt): Infrastructure error - contact devtools@ for details. Suite build deps: [5NO0BtQNkc4BqHeFbFV7EA {'project_path': 'yql/essentials/udfs/common/pire', 'platform': 'default-linux-x86_64-relwithdebinfo-pic', 'tags': ['default-linux-x86_64', 'relwithdebinfo', 'pic']}], [I4e-bB8_UrdaYRaEXkVcow {'project_path': 'yql/essentials/udfs/common/string', 'platform': 'default-linux-x86_64-relwithdebinfo-pic', 'tags': ['default-linux-x86_64', 'relwithdebinfo', 'pic']}], [MZ4m6v9tSorHZ9Xg-PtqGw {'project_path': 'yql/essentials/udfs/common/datetime2', 'platform': 'default-linux-x86_64-relwithdebinfo-pic', 'tags': ['default-linux-x86_64', 'relwithdebinfo', 'pic']}], [N3wfuEiUgil-dbRCORbWUA {'project_path': 'ydb/apps/ydbd', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [RkhG7WuJXDr1zbK17a6x6g {'project_path': 'ydb/public/tools/ydb_recipe', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [Rn5vf2entABGIoBD0m-QuQ {'project_path': 'ydb/core/kqp/tests/kikimr_tpch', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [_T3UlkRa1h0lykmUS4n4dg {'project_path': 'yql/essentials/udfs/common/re2', 'platform': 'default-linux-x86_64-relwithdebinfo-pic', 'tags': ['default-linux-x86_64', 'relwithdebinfo', 'pic']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}], [xnyDUDZe-hlMDeCcyiyt7w {'project_path': 'ydb/library/yql/udfs/common/datetime', 'platform': 'default-linux-x86_64-relwithdebinfo-pic', 'tags': ['default-linux-x86_64', 'relwithdebinfo', 'pic']}] Warn: Test [project=ydb/core/kqp/ut/arrow, name=unittest] (uid=rnd-05bn5jy0imhyn1n6): Infrastructure error - contact devtools@ for details. Suite build deps: [IKkX2o8hZ2d_1qwlT4ktkA {'project_path': 'ydb/core/kqp/ut/arrow', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/core/kqp/ut/batch_operations, name=unittest] (uid=rnd-9rfvdhtxz95f8t8q): Infrastructure error - contact devtools@ for details. Suite build deps: [Lfc5mIisfgnbw3AqFP4l3A {'project_path': 'ydb/core/kqp/ut/batch_operations', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/core/kqp/ut/cost, name=unittest] (uid=rnd-xwusrcl0lokzsseu): Infrastructure error - contact devtools@ for details. Suite build deps: [KyDMEwhuWXlVrzKPg7EvTg {'project_path': 'ydb/core/kqp/ut/cost', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/core/kqp/ut/data, name=unittest] (uid=rnd-b1cc1ejczggxnax7): Infrastructure error - contact devtools@ for details. Suite build deps: [MCvjPHdUmiDt4VSEtJVCwA {'project_path': 'ydb/core/kqp/ut/data', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/core/kqp/ut/data_integrity, name=unittest] (uid=rnd-orl0w6dnzt9chaeo): Infrastructure error - contact devtools@ for details. Suite build deps: [9SW9xO3KBk51B8gR-r3pkA {'project_path': 'ydb/core/kqp/ut/data_integrity', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/core/kqp/ut/discovery, name=unittest] (uid=rnd-ql3df0y98ndcgdgo): Infrastructure error - contact devtools@ for details. Suite build deps: [W5Nujkm_R7jeIz9iKT5dAg {'project_path': 'ydb/core/kqp/ut/discovery', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/core/kqp/ut/effects, name=unittest] (uid=rnd-10i98dyrve11y1ir): Infrastructure error - contact devtools@ for details. Suite build deps: [Ro4meXyAzLAsDfinpqDt5w {'project_path': 'ydb/core/kqp/ut/effects', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/core/kqp/ut/federated_query/datastreams, name=unittest] (uid=rnd-hvhlu26mlvetcgms): Infrastructure error - contact devtools@ for details. Suite build deps: [LJ2t_qImD7Uey7cqPiAnNQ {'project_path': 'ydb/tests/tools/s3_recipe', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [N3wfuEiUgil-dbRCORbWUA {'project_path': 'ydb/apps/ydbd', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [PBslpMFkWX6JG4o4gr4wJA {'project_path': 'contrib/python/moto/bin', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [RkhG7WuJXDr1zbK17a6x6g {'project_path': 'ydb/public/tools/ydb_recipe', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [cRVW0jkMW0FoEUHCpmh6Iw {'project_path': 'ydb/core/kqp/ut/federated_query/datastreams', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/core/kqp/ut/federated_query/generic_ut, name=unittest] (uid=rnd-618dueq9qkc4wcrb): Infrastructure error - contact devtools@ for details. Suite build deps: [__QaTk8sigBpQSYjJrh3Ng {'project_path': 'ydb/core/kqp/ut/federated_query/generic_ut', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/core/kqp/ut/federated_query/large_results, name=unittest] (uid=rnd-8373w0dh2br1yxh2): Infrastructure error - contact devtools@ for details. Suite build deps: [KZBm5jzKkLpB1A4yEYbYXg {'project_path': 'ydb/core/kqp/ut/federated_query/large_results', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [LJ2t_qImD7Uey7cqPiAnNQ {'project_path': 'ydb/tests/tools/s3_recipe', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [PBslpMFkWX6JG4o4gr4wJA {'project_path': 'contrib/python/moto/bin', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/core/kqp/ut/federated_query/s3, name=unittest] (uid=rnd-xoyfnxok72spyrkq): Infrastructure error - contact devtools@ for details. Suite build deps: [LJ2t_qImD7Uey7cqPiAnNQ {'project_path': 'ydb/tests/tools/s3_recipe', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [PBslpMFkWX6JG4o4gr4wJA {'project_path': 'contrib/python/moto/bin', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [nJUEZrWFslD--Qygd6a_ig {'project_path': 'ydb/core/kqp/ut/federated_query/s3', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/core/kqp/ut/idx_test, name=unittest] (uid=rnd-wm97tkmxga8zzai3): Infrastructure error - contact devtools@ for details. Suite build deps: [W7lR72m4DBoLBDUra6BcHQ {'project_path': 'ydb/core/kqp/ut/idx_test', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/core/kqp/ut/indexes, name=unittest] (uid=rnd-8a89axo2qomyzc7j): Infrastructure error - contact devtools@ for details. Suite build deps: [DNWlXeWkxrzQir4YqL5_MQ {'project_path': 'ydb/core/kqp/ut/indexes', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/core/kqp/ut/join, name=unittest] (uid=rnd-klkszh3y9xoku9gt): Infrastructure error - contact devtools@ for details. Suite build deps: [7FyG1Fa1x1ZSlEl8gK6PdA {'project_path': 'ydb/core/kqp/ut/join', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/core/kqp/ut/olap, name=unittest] (uid=rnd-83zsgzfgroar0eq5): Infrastructure error - contact devtools@ for details. Suite build deps: [KUMLBflF1XT47QyeCEOaTQ {'project_path': 'ydb/core/kqp/ut/olap', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/core/kqp/ut/opt, name=unittest] (uid=rnd-d0of3rkd2v72qnfq): Infrastructure error - contact devtools@ for details. Suite build deps: [dr8BJDS5QzDtCr3vyHo2JQ {'project_path': 'ydb/core/kqp/ut/opt', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/core/kqp/ut/perf, name=unittest] (uid=rnd-k4jlbmfxuuatezf6): Infrastructure error - contact devtools@ for details. Suite build deps: [rxivR5lKAznC3jKEcjIl0Q {'project_path': 'ydb/core/kqp/ut/perf', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/core/kqp/ut/pg, name=unittest] (uid=rnd-hy6i43idcyqa52xs): Infrastructure error - contact devtools@ for details. Suite build deps: [-cX5SVbot8hB1b66u76gXQ {'project_path': 'ydb/core/kqp/ut/pg', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/core/kqp/ut/query, name=unittest] (uid=rnd-vlyf44rxi057u22t): Infrastructure error - contact devtools@ for details. Suite build deps: [TASBH2-uA4ZTYV-Q2wQmbg {'project_path': 'ydb/core/kqp/ut/query', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/core/kqp/ut/runtime, name=unittest] (uid=rnd-lsknt69yl9e0utvc): Infrastructure error - contact devtools@ for details. Suite build deps: [73N7OgVpg_v-vQE6eXrWaA {'project_path': 'ydb/core/kqp/ut/runtime', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/core/kqp/ut/scan, name=unittest] (uid=rnd-l6deh0rli1egczzb): Infrastructure error - contact devtools@ for details. Suite build deps: [nzNVRofrXkamgK590oNRrw {'project_path': 'ydb/core/kqp/ut/scan', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/core/kqp/ut/scheme, name=unittest] (uid=rnd-nmk0hulwgz2vfioq): Infrastructure error - contact devtools@ for details. Suite build deps: [hlIdGWt8kJBPY2PQJCqG7Q {'project_path': 'ydb/core/kqp/ut/scheme', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/core/kqp/ut/service, name=unittest] (uid=rnd-oyskp8zhp7e4hsjo): Infrastructure error - contact devtools@ for details. Suite build deps: [DP6sXHWF5DiSjlwIq2dr8Q {'project_path': 'ydb/core/kqp/ut/service', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/core/kqp/ut/sysview, name=unittest] (uid=rnd-1i2lwlz5m1xv8tjq): Infrastructure error - contact devtools@ for details. Suite build deps: [hgrO8CNpZ4VyHJR_q8sXdw {'project_path': 'ydb/core/kqp/ut/sysview', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/core/kqp/ut/tx, name=unittest] (uid=rnd-5k122ob5euyoyky1): Infrastructure error - contact devtools@ for details. Suite build deps: [MpKC41R15ISDPv5h51ZY0A {'project_path': 'ydb/core/kqp/ut/tx', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/core/kqp/ut/view, name=unittest] (uid=rnd-ybiwkwysqaq2illr): Infrastructure error - contact devtools@ for details. Suite build deps: [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}], [vyn8KJsG23lYprz_jHl5nw {'project_path': 'ydb/core/kqp/ut/view', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}] Warn: Test [project=ydb/core/kqp/ut/yql, name=unittest] (uid=rnd-vqhoxbgkrrnet0z7): Infrastructure error - contact devtools@ for details. Suite build deps: [nN7NU90M2EY8qlfUhiHLVw {'project_path': 'ydb/core/kqp/ut/yql', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/core/kqp/workload_service/ut, name=unittest] (uid=rnd-umix4ni7jpa6omot): Infrastructure error - contact devtools@ for details. Suite build deps: [96xGlMzbi8YK819ldDiC8g {'project_path': 'ydb/core/kqp/workload_service/ut', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/core/load_test/ut, name=unittest] (uid=rnd-1jnav3uv6yt9axq5): Infrastructure error - contact devtools@ for details. Suite build deps: [ETNiFapPwne4jlnXm_kzYg {'project_path': 'ydb/core/load_test/ut', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/core/memory_controller/ut, name=unittest] (uid=rnd-h1tqe8c5gway6dve): Infrastructure error - contact devtools@ for details. Suite build deps: [7PeKWQPC9OEWNe7-gtML9Q {'project_path': 'ydb/core/memory_controller/ut', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/core/mind/address_classification/ut, name=unittest] (uid=rnd-wstmmkt4k4tmq0vh): Infrastructure error - contact devtools@ for details. Suite build deps: [P85L5nZFnkC3WnicjJR42A {'project_path': 'ydb/core/mind/address_classification/ut', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/core/mind/bscontroller/ut, name=unittest] (uid=rnd-lyrmhlajdreh7kw4): Infrastructure error - contact devtools@ for details. Suite build deps: [CJ53u2dKW-njZV5g5NrW4A {'project_path': 'ydb/core/mind/bscontroller/ut', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/core/mind/bscontroller/ut_bscontroller, name=unittest] (uid=rnd-5i721n71q492258g): Infrastructure error - contact devtools@ for details. Suite build deps: [aEtZsMr2DhUluPTLsaCO-Q {'project_path': 'ydb/core/mind/bscontroller/ut_bscontroller', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/core/mind/bscontroller/ut_selfheal, name=unittest] (uid=rnd-97j3v461971p8u4u): Infrastructure error - contact devtools@ for details. Suite build deps: [8HVjDd453VcGXjBcqvwo5w {'project_path': 'ydb/core/mind/bscontroller/ut_selfheal', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/core/mind/hive/ut, name=unittest] (uid=rnd-6mlfj96y7wbxhxt6): Infrastructure error - contact devtools@ for details. Suite build deps: [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}], [xFfcW_FEN7QAi8B89g9mbQ {'project_path': 'ydb/core/mind/hive/ut', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}] Warn: Test [project=ydb/core/mind/ut, name=unittest] (uid=rnd-lwi2exqbowwtexd0): Infrastructure error - contact devtools@ for details. Suite build deps: [EpDqvKdKqx1fi_gWm3_ajQ {'project_path': 'ydb/core/mind/ut', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/core/mind/ut_fat, name=unittest] (uid=rnd-4uhjfiv5mcwvqvop): Infrastructure error - contact devtools@ for details. Suite build deps: [j34JPDFlXbGA-ummBhxlqw {'project_path': 'ydb/core/mind/ut_fat', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/core/mon/ut, name=unittest] (uid=rnd-gh2japdrgb38n61p): Infrastructure error - contact devtools@ for details. Suite build deps: [qi-RkrhUQTyk4ZDqyt0AeA {'project_path': 'ydb/core/mon/ut', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/core/persqueue/dread_cache_service/ut, name=unittest] (uid=rnd-xztk48qstmuttnex): Infrastructure error - contact devtools@ for details. Suite build deps: [mANhjC7zg18RtQDo6D8_FQ {'project_path': 'ydb/core/persqueue/dread_cache_service/ut', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/core/persqueue/pqtablet/cache/ut, name=unittest] (uid=rnd-0l46qqowk35917j1): Infrastructure error - contact devtools@ for details. Suite build deps: [m_Hm0Dv1_EOFV6p4TKV8kw {'project_path': 'ydb/core/persqueue/pqtablet/cache/ut', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/core/persqueue/public/fetcher/ut, name=unittest] (uid=rnd-lm6g2a4sbf3xz5nw): Infrastructure error - contact devtools@ for details. Suite build deps: [7gGqHdHgd6slRciTuz2h1w {'project_path': 'ydb/core/persqueue/public/fetcher/ut', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/core/persqueue/public/list_topics/ut, name=unittest] (uid=rnd-14h7ksp4lk8srsvy): Infrastructure error - contact devtools@ for details. Suite build deps: [MNKFhSKYk2e25o1dYTZiNA {'project_path': 'ydb/core/persqueue/public/list_topics/ut', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/core/persqueue/ut, name=unittest] (uid=rnd-8nt65n53ong4c30k): Infrastructure error - contact devtools@ for details. Suite build deps: [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}], [z_6AV9mb2pXBX4Jxd4YISA {'project_path': 'ydb/core/persqueue/ut', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}] Warn: Test [project=ydb/core/persqueue/ut/slow, name=unittest] (uid=rnd-g1ftlemb4s16e9md): Infrastructure error - contact devtools@ for details. Suite build deps: [QEQjz6HplUp7LifG7NZChA {'project_path': 'ydb/core/persqueue/ut/slow', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/core/persqueue/ut/ut_with_sdk, name=unittest] (uid=rnd-6lx5pn06ayokysca): Infrastructure error - contact devtools@ for details. Suite build deps: [kB1NyoWIQCymiRdk4hJpdQ {'project_path': 'ydb/core/persqueue/ut/ut_with_sdk', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/core/public_http/ut, name=unittest] (uid=rnd-nssp53wcb2t6dwxr): Infrastructure error - contact devtools@ for details. Suite build deps: [_8aCNhNK3HdHPFhbJvsLDg {'project_path': 'ydb/core/public_http/ut', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/core/quoter/ut, name=unittest] (uid=rnd-09257ci3w3f4wp3g): Infrastructure error - contact devtools@ for details. Suite build deps: [Jp9DzpYxX8vW0bzAhiwdoA {'project_path': 'ydb/core/quoter/ut', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/core/raw_socket/ut, name=unittest] (uid=rnd-5fe1bm3058cqxnsl): Infrastructure error - contact devtools@ for details. Suite build deps: [B0KWrlg1KZZXxsHix7fUrQ {'project_path': 'ydb/core/raw_socket/ut', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/core/security/certificate_check/ut, name=unittest] (uid=rnd-x4wjrivfrbq96ckh): Infrastructure error - contact devtools@ for details. Suite build deps: [kIpGmYnKNb7hVnjb3ELmlQ {'project_path': 'ydb/core/security/certificate_check/ut', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/core/security/ldap_auth_provider/ut, name=unittest] (uid=rnd-s1fipg7p4dmpil84): Infrastructure error - contact devtools@ for details. Suite build deps: [6Bxu96fIKin_AOD6Sn8dfw {'project_path': 'ydb/core/security/ldap_auth_provider/ut', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/core/security/ut, name=unittest] (uid=rnd-gkoevtgv4kn5r4ht): Infrastructure error - contact devtools@ for details. Suite build deps: [TuvJgrnqKiwzAnv0pUjkmw {'project_path': 'ydb/core/security/ut', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/core/statistics/aggregator/ut, name=unittest] (uid=rnd-pz9sfa4t1ek4624s): Infrastructure error - contact devtools@ for details. Suite build deps: [aUx59MNeDfM-sxClwcguIg {'project_path': 'ydb/core/statistics/aggregator/ut', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/core/statistics/database/ut, name=unittest] (uid=rnd-5v0rb6uiybei0wef): Infrastructure error - contact devtools@ for details. Suite build deps: [gERE8Fxnk80bAHc0z3nToA {'project_path': 'ydb/core/statistics/database/ut', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/core/statistics/service/ut, name=unittest] (uid=rnd-ifw4vu32plavu45o): Infrastructure error - contact devtools@ for details. Suite build deps: [9_5NtWFQDgxb8qKKxcjyog {'project_path': 'ydb/core/statistics/service/ut', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/core/statistics/service/ut/ut_aggregation, name=unittest] (uid=rnd-8l89ofa5i8a27e8f): Infrastructure error - contact devtools@ for details. Suite build deps: [MjoXtqOt3G1Z6vXTNBiLjA {'project_path': 'ydb/core/statistics/service/ut/ut_aggregation', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/core/sys_view/query_stats/ut, name=unittest] (uid=rnd-o6l2acolllpv8p84): Infrastructure error - contact devtools@ for details. Suite build deps: [p1JeLHqG3cXcv-sTC6UPOw {'project_path': 'ydb/core/sys_view/query_stats/ut', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/core/sys_view/ut, name=unittest] (uid=rnd-06kjj2wan7ir1bj2): Infrastructure error - contact devtools@ for details. Suite build deps: [FVK0KkH4pGzxN4IYYtG02Q {'project_path': 'ydb/core/sys_view/ut', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/core/tablet/ut, name=unittest] (uid=rnd-ubi8hcngo30lqe5a): Infrastructure error - contact devtools@ for details. Suite build deps: [B4xuLxs7qiYBZeFCnAEAGg {'project_path': 'ydb/core/tablet/ut', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/core/tablet_flat/ut, name=unittest] (uid=rnd-vnep7l96782d2fbn): Infrastructure error - contact devtools@ for details. Suite build deps: [PGjj9WvCtJc5gN44Znq6Dw {'project_path': 'ydb/core/tablet_flat/ut', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/core/transfer/ut/column_table, name=unittest] (uid=rnd-ftxdo6m9d70956t2): Infrastructure error - contact devtools@ for details. Suite build deps: [87tS7jLCId2ZuojuhFNN_w {'project_path': 'ydb/core/transfer/ut/column_table', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [N3wfuEiUgil-dbRCORbWUA {'project_path': 'ydb/apps/ydbd', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [RkhG7WuJXDr1zbK17a6x6g {'project_path': 'ydb/public/tools/ydb_recipe', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/core/transfer/ut/functional, name=unittest] (uid=rnd-mk5axt829idhtdlq): Infrastructure error - contact devtools@ for details. Suite build deps: [N3wfuEiUgil-dbRCORbWUA {'project_path': 'ydb/apps/ydbd', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [RkhG7WuJXDr1zbK17a6x6g {'project_path': 'ydb/public/tools/ydb_recipe', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [piOZAKrjmzU8k9m-T_felg {'project_path': 'ydb/core/transfer/ut/functional', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/core/transfer/ut/row_table, name=unittest] (uid=rnd-lxomfvlsl50v5ac6): Infrastructure error - contact devtools@ for details. Suite build deps: [8evqUqGXk9s5rUT5DJyDcA {'project_path': 'ydb/core/transfer/ut/row_table', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [N3wfuEiUgil-dbRCORbWUA {'project_path': 'ydb/apps/ydbd', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [RkhG7WuJXDr1zbK17a6x6g {'project_path': 'ydb/public/tools/ydb_recipe', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/core/tx/balance_coverage/ut, name=unittest] (uid=rnd-b56opbz269gypg6o): Infrastructure error - contact devtools@ for details. Suite build deps: [MaA6RTMgFHJKTmfeB8qC5Q {'project_path': 'ydb/core/tx/balance_coverage/ut', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/core/tx/columnshard/engines/ut, name=unittest] (uid=rnd-5vqvytqvvm5mkkhj): Infrastructure error - contact devtools@ for details. Suite build deps: [srLwByIyo1tlCEFKxY6dIw {'project_path': 'ydb/core/tx/columnshard/engines/ut', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/core/tx/columnshard/splitter/ut, name=unittest] (uid=rnd-kvxugkr0e29ayv77): Infrastructure error - contact devtools@ for details. Suite build deps: [nBg-gfBR3xa4OU27vSxRHA {'project_path': 'ydb/core/tx/columnshard/splitter/ut', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/core/tx/columnshard/ut_rw, name=unittest] (uid=rnd-w18utleurum7r568): Infrastructure error - contact devtools@ for details. Suite build deps: [eTZlzl1KXaRWx2RTY0jxvw {'project_path': 'ydb/core/tx/columnshard/ut_rw', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/core/tx/columnshard/ut_schema, name=unittest] (uid=rnd-8w4ykkv94p5clq6s): Infrastructure error - contact devtools@ for details. Suite build deps: [Djl7HckfnzTIyQ4wgpR0Hw {'project_path': 'ydb/core/tx/columnshard/ut_schema', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/core/tx/conveyor_composite/ut, name=unittest] (uid=rnd-ou4rjud23k1lgau1): Infrastructure error - contact devtools@ for details. Suite build deps: [HBjC7hgwf1ii9zG74MTEgA {'project_path': 'ydb/core/tx/conveyor_composite/ut', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/core/tx/coordinator/ut, name=unittest] (uid=rnd-0h9fwmiunt2p28uz): Infrastructure error - contact devtools@ for details. Suite build deps: [bynkxlXuTjOXWAR-mjiQkw {'project_path': 'ydb/core/tx/coordinator/ut', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/core/tx/datashard/build_index/ut, name=unittest] (uid=rnd-6bsdkhsk1f90ngq4): Infrastructure error - contact devtools@ for details. Suite build deps: [gXa7sUPPZEEJz6Dz3Akr3g {'project_path': 'ydb/core/tx/datashard/build_index/ut', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/core/tx/datashard/ut_background_compaction, name=unittest] (uid=rnd-lt1mwbvb7wt8zm9x): Infrastructure error - contact devtools@ for details. Suite build deps: [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}], [xYAaH6f7gXKiU_6_3fMlGQ {'project_path': 'ydb/core/tx/datashard/ut_background_compaction', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}] Warn: Test [project=ydb/core/tx/datashard/ut_change_exchange, name=unittest] (uid=rnd-jb7c9bon5gbgb9f9): Infrastructure error - contact devtools@ for details. Suite build deps: [UJ9VlZkJDlX3ckjmzYqpSw {'project_path': 'ydb/core/tx/datashard/ut_change_exchange', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/core/tx/datashard/ut_column_stats, name=unittest] (uid=rnd-m6t1b4iluhnxbwja): Infrastructure error - contact devtools@ for details. Suite build deps: [cdbPp_6trwWql4Umbv689g {'project_path': 'ydb/core/tx/datashard/ut_column_stats', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/core/tx/datashard/ut_compaction, name=unittest] (uid=rnd-geawyyu4u7isy84f): Infrastructure error - contact devtools@ for details. Suite build deps: [U3bVwkbjL1xsFm2k-YbY4g {'project_path': 'ydb/core/tx/datashard/ut_compaction', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/core/tx/datashard/ut_erase_rows, name=unittest] (uid=rnd-qjioxdizi197x1c1): Infrastructure error - contact devtools@ for details. Suite build deps: [NG2sxu1TjH1e8J_jnm6__g {'project_path': 'ydb/core/tx/datashard/ut_erase_rows', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/core/tx/datashard/ut_export, name=unittest] (uid=rnd-bg71vud94qcog44s): Infrastructure error - contact devtools@ for details. Suite build deps: [C7cmyMHukE138jh5D8EeLQ {'project_path': 'ydb/core/tx/datashard/ut_export', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/core/tx/datashard/ut_followers, name=unittest] (uid=rnd-qkrhm2tqql7fby0g): Infrastructure error - contact devtools@ for details. Suite build deps: [WRgNHyNITXkysSU-wLC2eg {'project_path': 'ydb/core/tx/datashard/ut_followers', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/core/tx/datashard/ut_incremental_backup, name=unittest] (uid=rnd-ldyctpztnlze7apg): Infrastructure error - contact devtools@ for details. Suite build deps: [6IVXir115R3jO0iVFt_ihg {'project_path': 'ydb/core/tx/datashard/ut_incremental_backup', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/core/tx/datashard/ut_incremental_restore_scan, name=unittest] (uid=rnd-wy5k8drdteczm014): Infrastructure error - contact devtools@ for details. Suite build deps: [1Pz-5UDD3ELu6R53Gcz_1A {'project_path': 'ydb/core/tx/datashard/ut_incremental_restore_scan', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/core/tx/datashard/ut_init, name=unittest] (uid=rnd-difbverwbrdbrefe): Infrastructure error - contact devtools@ for details. Suite build deps: [e7aq4hIBH7ePjPmgZr1QrQ {'project_path': 'ydb/core/tx/datashard/ut_init', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/core/tx/datashard/ut_keys, name=unittest] (uid=rnd-hf6fdjledzv3vm5o): Infrastructure error - contact devtools@ for details. Suite build deps: [0FKR21mujbDKCRHSp5-TcA {'project_path': 'ydb/core/tx/datashard/ut_keys', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/core/tx/datashard/ut_kqp, name=unittest] (uid=rnd-vhuu7rewfy56348p): Infrastructure error - contact devtools@ for details. Suite build deps: [M7rThwBjNpnkn_KbNFp24A {'project_path': 'ydb/core/tx/datashard/ut_kqp', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/core/tx/datashard/ut_kqp_errors, name=unittest] (uid=rnd-1vwv1r1qm0cqs3qk): Infrastructure error - contact devtools@ for details. Suite build deps: [ijoS0f3EJrAPrzZ9R4G23w {'project_path': 'ydb/core/tx/datashard/ut_kqp_errors', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/core/tx/datashard/ut_locks, name=unittest] (uid=rnd-8quld36b9gxbrem6): Infrastructure error - contact devtools@ for details. Suite build deps: [35QMsy-GoYXZgWWY2-wHPg {'project_path': 'ydb/core/tx/datashard/ut_locks', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/core/tx/datashard/ut_minikql, name=unittest] (uid=rnd-g0mbii5nt22anzhs): Infrastructure error - contact devtools@ for details. Suite build deps: [ixlg1GmcT6k-y6YprJnyLg {'project_path': 'ydb/core/tx/datashard/ut_minikql', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/core/tx/datashard/ut_object_storage_listing, name=unittest] (uid=rnd-1djlw1rsfnf9o91l): Infrastructure error - contact devtools@ for details. Suite build deps: [Jz1HRHSTn2bc8zLX-c-mHA {'project_path': 'ydb/core/tx/datashard/ut_object_storage_listing', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/core/tx/datashard/ut_order, name=unittest] (uid=rnd-c0on0t24atxf5ndz): Infrastructure error - contact devtools@ for details. Suite build deps: [kuWm3vQUFRT1UMp_e0sCww {'project_path': 'ydb/core/tx/datashard/ut_order', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/core/tx/datashard/ut_range_ops, name=unittest] (uid=rnd-znhdo5t56py5tf7t): Infrastructure error - contact devtools@ for details. Suite build deps: [DNPSOuckYrnNARJjQWt8QA {'project_path': 'ydb/core/tx/datashard/ut_range_ops', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/core/tx/datashard/ut_read_iterator, name=unittest] (uid=rnd-8mna3u0lgz7m57ez): Infrastructure error - contact devtools@ for details. Suite build deps: [SeTD-t_wzG2b3v46Ax9ukg {'project_path': 'ydb/core/tx/datashard/ut_read_iterator', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/core/tx/datashard/ut_read_table, name=unittest] (uid=rnd-yvhuu97ukkhfifwr): Infrastructure error - contact devtools@ for details. Suite build deps: [2oBFx6XKak6KmBgJQBT2YA {'project_path': 'ydb/core/tx/datashard/ut_read_table', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/core/tx/datashard/ut_reassign, name=unittest] (uid=rnd-t05e190cz6tlovf8): Infrastructure error - contact devtools@ for details. Suite build deps: [IKKpxuXYp3M3ybrJ65N4mg {'project_path': 'ydb/core/tx/datashard/ut_reassign', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/core/tx/datashard/ut_replication, name=unittest] (uid=rnd-gvbhbedad50iem82): Infrastructure error - contact devtools@ for details. Suite build deps: [6eEcXLQYoIJ0z-GIFrwKzw {'project_path': 'ydb/core/tx/datashard/ut_replication', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/core/tx/datashard/ut_rs, name=unittest] (uid=rnd-kpvfwjp5v2igq4kk): Infrastructure error - contact devtools@ for details. Suite build deps: [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}], [wz0eRBozGHwQhbKs65Rc7g {'project_path': 'ydb/core/tx/datashard/ut_rs', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}] Warn: Test [project=ydb/core/tx/datashard/ut_sequence, name=unittest] (uid=rnd-ecpvlvhh1mrdorzy): Infrastructure error - contact devtools@ for details. Suite build deps: [AOmxH2IAUDKck2Rm4IOVsg {'project_path': 'ydb/core/tx/datashard/ut_sequence', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/core/tx/datashard/ut_snapshot, name=unittest] (uid=rnd-10jrsykls90sokj8): Infrastructure error - contact devtools@ for details. Suite build deps: [dABJ59XXJTGlojDJbS2wvQ {'project_path': 'ydb/core/tx/datashard/ut_snapshot', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/core/tx/datashard/ut_stats, name=unittest] (uid=rnd-trplr5cg5a911c5y): Infrastructure error - contact devtools@ for details. Suite build deps: [ffUI2on3gDfBzd7nNgrAgQ {'project_path': 'ydb/core/tx/datashard/ut_stats', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/core/tx/datashard/ut_trace, name=unittest] (uid=rnd-co0uz9ay1wi2ka1r): Infrastructure error - contact devtools@ for details. Suite build deps: [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}], [u3YODMLb4I80UwZ0I9IsaA {'project_path': 'ydb/core/tx/datashard/ut_trace', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}] Warn: Test [project=ydb/core/tx/datashard/ut_upload_rows, name=unittest] (uid=rnd-5n5fen7in3vqvt4t): Infrastructure error - contact devtools@ for details. Suite build deps: [I49jyhlGCA6fO8egDUSLrQ {'project_path': 'ydb/core/tx/datashard/ut_upload_rows', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/core/tx/datashard/ut_vacuum, name=unittest] (uid=rnd-pafm3fk3t78a1hy9): Infrastructure error - contact devtools@ for details. Suite build deps: [XcmsW4-3pH2iCXRjitGe1Q {'project_path': 'ydb/core/tx/datashard/ut_vacuum', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/core/tx/datashard/ut_volatile, name=unittest] (uid=rnd-yxyewxrui9vvojxq): Infrastructure error - contact devtools@ for details. Suite build deps: [a_DSkiF-8cBowbz8bJBefg {'project_path': 'ydb/core/tx/datashard/ut_volatile', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/core/tx/limiter/grouped_memory/ut, name=unittest] (uid=rnd-q7j9rattwhqfxp63): Infrastructure error - contact devtools@ for details. Suite build deps: [A34YqSyVbB_48nEzqZSFuw {'project_path': 'ydb/core/tx/limiter/grouped_memory/ut', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/core/tx/long_tx_service/ut, name=unittest] (uid=rnd-32dz8fkxalel9hq2): Infrastructure error - contact devtools@ for details. Suite build deps: [hxicY-1TV7GGW74O090Cxw {'project_path': 'ydb/core/tx/long_tx_service/ut', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/core/tx/mediator/ut, name=unittest] (uid=rnd-2t8xu1jf7xsy8z6g): Infrastructure error - contact devtools@ for details. Suite build deps: [WayO-Q8MFfKpkhbXZkhaLw {'project_path': 'ydb/core/tx/mediator/ut', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/core/tx/replication/controller/ut_dst_creator, name=unittest] (uid=rnd-yt9apy35hanh5axn): Infrastructure error - contact devtools@ for details. Suite build deps: [BFzDmqMxJdwORz0Kf8u2pQ {'project_path': 'ydb/core/tx/replication/controller/ut_dst_creator', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/core/tx/replication/controller/ut_stream_creator, name=unittest] (uid=rnd-58t5fo51aq98mp5q): Infrastructure error - contact devtools@ for details. Suite build deps: [-eOsEaruiA8CVY_ZLV3n6w {'project_path': 'ydb/core/tx/replication/controller/ut_stream_creator', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/core/tx/replication/controller/ut_target_discoverer, name=unittest] (uid=rnd-nfs0xa45p4qd3ms1): Infrastructure error - contact devtools@ for details. Suite build deps: [9opFVjkunilByFTPjN91bw {'project_path': 'ydb/core/tx/replication/controller/ut_target_discoverer', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/core/tx/replication/service/ut_json_change_record, name=unittest] (uid=rnd-sjpx69xtv4qpu2py): Infrastructure error - contact devtools@ for details. Suite build deps: [L5vwiseFwzyWkh9s2aVFvw {'project_path': 'ydb/core/tx/replication/service/ut_json_change_record', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/core/tx/replication/service/ut_table_writer, name=unittest] (uid=rnd-4ujr9g9qfujc8pon): Infrastructure error - contact devtools@ for details. Suite build deps: [Xd0ni5756Hi0ZKdRbHoLnQ {'project_path': 'ydb/core/tx/replication/service/ut_table_writer', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/core/tx/replication/service/ut_worker, name=unittest] (uid=rnd-3pgpafmhj1bp3hnm): Infrastructure error - contact devtools@ for details. Suite build deps: [-UmUgxxpqn9DoG33gIP3BA {'project_path': 'ydb/core/tx/replication/service/ut_worker', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/core/tx/replication/ydb_proxy/ut, name=unittest] (uid=rnd-rv79tqnhdquk4yn8): Infrastructure error - contact devtools@ for details. Suite build deps: [i68m-_FhhioonN6zFgaA1A {'project_path': 'ydb/core/tx/replication/ydb_proxy/ut', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/core/tx/scheme_board/ut_double_indexed, name=unittest] (uid=rnd-lm3survrm9m5axqf): Infrastructure error - contact devtools@ for details. Suite build deps: [pdMl_-TxfWMX4ugQNRfKxg {'project_path': 'ydb/core/tx/scheme_board/ut_double_indexed', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/core/tx/scheme_board/ut_monitoring, name=unittest] (uid=rnd-hir1h65qxqjzw278): Infrastructure error - contact devtools@ for details. Suite build deps: [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}], [trzsnv92w8M376ItLmvYCA {'project_path': 'ydb/core/tx/scheme_board/ut_monitoring', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}] Warn: Test [project=ydb/core/tx/scheme_board/ut_populator, name=unittest] (uid=rnd-a4rwteb23978s0kj): Infrastructure error - contact devtools@ for details. Suite build deps: [YtrxxbNF50V9gBvd7WsawQ {'project_path': 'ydb/core/tx/scheme_board/ut_populator', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/core/tx/scheme_board/ut_replica, name=unittest] (uid=rnd-u20n5sjengbz0rm6): Infrastructure error - contact devtools@ for details. Suite build deps: [-BSJvsauTn95u4ypilmwvQ {'project_path': 'ydb/core/tx/scheme_board/ut_replica', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/core/tx/scheme_board/ut_subscriber, name=unittest] (uid=rnd-v5ov87eq20xuadek): Infrastructure error - contact devtools@ for details. Suite build deps: [KbSm-_Y3D3l3Sv-yEprw-w {'project_path': 'ydb/core/tx/scheme_board/ut_subscriber', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/core/tx/schemeshard/ut_auditsettings, name=unittest] (uid=rnd-5m39uduh2qslj0hg): Infrastructure error - contact devtools@ for details. Suite build deps: [DOhgrk1wzlVn1Q_mQBU2wg {'project_path': 'ydb/core/tx/schemeshard/ut_auditsettings', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/core/tx/schemeshard/ut_background_cleaning, name=unittest] (uid=rnd-5atq5xujqoz961y7): Infrastructure error - contact devtools@ for details. Suite build deps: [8Hxst4Fqk_58Ft1OjViVBw {'project_path': 'ydb/core/tx/schemeshard/ut_background_cleaning', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/core/tx/schemeshard/ut_backup_collection, name=unittest] (uid=rnd-u4z4o5kfltqru5hh): Infrastructure error - contact devtools@ for details. Suite build deps: [Db_DfC4BZN9p-Z58hJ0CtQ {'project_path': 'ydb/core/tx/schemeshard/ut_backup_collection', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/core/tx/schemeshard/ut_backup_collection_reboots, name=unittest] (uid=rnd-vtvt37qbfnr8nrb9): Infrastructure error - contact devtools@ for details. Suite build deps: [LqZF5w8QeNSZOAuc3GrBDA {'project_path': 'ydb/core/tx/schemeshard/ut_backup_collection_reboots', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/core/tx/schemeshard/ut_base, name=unittest] (uid=rnd-b2y9nfa0tdnrm2jt): Infrastructure error - contact devtools@ for details. Suite build deps: [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}], [zV5Xv4_weaC_GQW_wRVNmg {'project_path': 'ydb/core/tx/schemeshard/ut_base', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}] Warn: Test [project=ydb/core/tx/schemeshard/ut_base_reboots, name=unittest] (uid=rnd-elgxqmqrt4w8gk5y): Infrastructure error - contact devtools@ for details. Suite build deps: [dOjoM87TRpekcPUFYLpEAw {'project_path': 'ydb/core/tx/schemeshard/ut_base_reboots', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/core/tx/schemeshard/ut_bsvolume, name=unittest] (uid=rnd-opki1nwktepy7nts): Infrastructure error - contact devtools@ for details. Suite build deps: [iBJXPT1naAQICmKdocA7JQ {'project_path': 'ydb/core/tx/schemeshard/ut_bsvolume', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/core/tx/schemeshard/ut_bsvolume_reboots, name=unittest] (uid=rnd-7iijv5skavf25as1): Infrastructure error - contact devtools@ for details. Suite build deps: [jK0so_AbGL-alcaRo-M18g {'project_path': 'ydb/core/tx/schemeshard/ut_bsvolume_reboots', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/core/tx/schemeshard/ut_cdc_stream, name=unittest] (uid=rnd-ptd0sfovlh5rehuh): Infrastructure error - contact devtools@ for details. Suite build deps: [LIwCT7HQvOZClHx4vILs4g {'project_path': 'ydb/core/tx/schemeshard/ut_cdc_stream', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/core/tx/schemeshard/ut_cdc_stream_reboots, name=unittest] (uid=rnd-2q6dycfyw0xw921w): Infrastructure error - contact devtools@ for details. Suite build deps: [R7gP870dXf8SW6nSoqWCqg {'project_path': 'ydb/core/tx/schemeshard/ut_cdc_stream_reboots', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/core/tx/schemeshard/ut_column_build, name=unittest] (uid=rnd-boiuacumurrrtlbt): Infrastructure error - contact devtools@ for details. Suite build deps: [eqOAMZfwPk9OeKRF0RJs2Q {'project_path': 'ydb/core/tx/schemeshard/ut_column_build', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/core/tx/schemeshard/ut_compaction, name=unittest] (uid=rnd-xq97f1qy6vrttye4): Infrastructure error - contact devtools@ for details. Suite build deps: [d-1DWW1bbRdM3xSiLTBSiQ {'project_path': 'ydb/core/tx/schemeshard/ut_compaction', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/core/tx/schemeshard/ut_continuous_backup, name=unittest] (uid=rnd-4minl9moyq22hnnu): Infrastructure error - contact devtools@ for details. Suite build deps: [qMazPEkLfIqusWgn2NKjnA {'project_path': 'ydb/core/tx/schemeshard/ut_continuous_backup', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/core/tx/schemeshard/ut_continuous_backup_reboots, name=unittest] (uid=rnd-e47huipiqrmiq474): Infrastructure error - contact devtools@ for details. Suite build deps: [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}], [yMC6RhA5wWHgW8mr1U9VQQ {'project_path': 'ydb/core/tx/schemeshard/ut_continuous_backup_reboots', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}] Warn: Test [project=ydb/core/tx/schemeshard/ut_export, name=unittest] (uid=rnd-96xdbacu0dzuzdee): Infrastructure error - contact devtools@ for details. Suite build deps: [3M9bquhx7xMkCwbzqStBtA {'project_path': 'ydb/tests/supp', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [FKc4h0hkBqDTS-dkC6-DCw {'project_path': 'ydb/core/tx/schemeshard/ut_export', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/core/tx/schemeshard/ut_export_reboots_s3, name=unittest] (uid=rnd-3r8901kofzdemuwk): Infrastructure error - contact devtools@ for details. Suite build deps: [3M9bquhx7xMkCwbzqStBtA {'project_path': 'ydb/tests/supp', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [rq18eqxhepFDDhodkMHy2w {'project_path': 'ydb/core/tx/schemeshard/ut_export_reboots_s3', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/core/tx/schemeshard/ut_external_data_source, name=unittest] (uid=rnd-gytmt4v2ztk5p4qz): Infrastructure error - contact devtools@ for details. Suite build deps: [feuXhtlJIgmtr6fvHGca1A {'project_path': 'ydb/core/tx/schemeshard/ut_external_data_source', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/core/tx/schemeshard/ut_external_data_source_reboots, name=unittest] (uid=rnd-z1u3illuanoagz7u): Infrastructure error - contact devtools@ for details. Suite build deps: [IdZz8pNYL0Ibg6lJOUaUxg {'project_path': 'ydb/core/tx/schemeshard/ut_external_data_source_reboots', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/core/tx/schemeshard/ut_external_table, name=unittest] (uid=rnd-pq677edmy50b9nbj): Infrastructure error - contact devtools@ for details. Suite build deps: [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}], [wPM06lKQV_H-kTteeSAQaw {'project_path': 'ydb/core/tx/schemeshard/ut_external_table', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}] Warn: Test [project=ydb/core/tx/schemeshard/ut_external_table_reboots, name=unittest] (uid=rnd-62steof4l96to5bl): Infrastructure error - contact devtools@ for details. Suite build deps: [RhXXnCTTQAH8EzuW5cvTog {'project_path': 'ydb/core/tx/schemeshard/ut_external_table_reboots', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/core/tx/schemeshard/ut_extsubdomain, name=unittest] (uid=rnd-lskzox3vvovjwwvr): Infrastructure error - contact devtools@ for details. Suite build deps: [tBIPlKZLwdm7gNDTXyVXew {'project_path': 'ydb/core/tx/schemeshard/ut_extsubdomain', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/core/tx/schemeshard/ut_extsubdomain_reboots, name=unittest] (uid=rnd-lotuhbxtlyspmwm2): Infrastructure error - contact devtools@ for details. Suite build deps: [or0UXPwY2K9lM7OKERBifw {'project_path': 'ydb/core/tx/schemeshard/ut_extsubdomain_reboots', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/core/tx/schemeshard/ut_failure_injection, name=unittest] (uid=rnd-c1p34yfmshk30ub7): Infrastructure error - contact devtools@ for details. Suite build deps: [Uh6kg88si_7pnyphHK-Z1A {'project_path': 'ydb/core/tx/schemeshard/ut_failure_injection', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/core/tx/schemeshard/ut_filestore_reboots, name=unittest] (uid=rnd-qj1ablb1bllf82z4): Infrastructure error - contact devtools@ for details. Suite build deps: [Q0bmEBdJwVpH_TbF0N41iA {'project_path': 'ydb/core/tx/schemeshard/ut_filestore_reboots', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/core/tx/schemeshard/ut_incremental_restore, name=unittest] (uid=rnd-fg9vm36iz2l328i6): Infrastructure error - contact devtools@ for details. Suite build deps: [Hl__D-50cRK01vo0Ph1WDw {'project_path': 'ydb/core/tx/schemeshard/ut_incremental_restore', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/core/tx/schemeshard/ut_incremental_restore_reboots, name=unittest] (uid=rnd-1u84gn3mgnlzl214): Infrastructure error - contact devtools@ for details. Suite build deps: [m8z84lyOz1gMoeoXTCtLNw {'project_path': 'ydb/core/tx/schemeshard/ut_incremental_restore_reboots', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/core/tx/schemeshard/ut_index, name=unittest] (uid=rnd-dyjzjbjxwks49ved): Infrastructure error - contact devtools@ for details. Suite build deps: [dKt8UzMU89kns0FBHZj3jQ {'project_path': 'ydb/core/tx/schemeshard/ut_index', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/core/tx/schemeshard/ut_index_build_reboots, name=unittest] (uid=rnd-m3800fgihc5o8kta): Infrastructure error - contact devtools@ for details. Suite build deps: [awlMXfQySbdEdHs1---8LA {'project_path': 'ydb/core/tx/schemeshard/ut_index_build_reboots', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/core/tx/schemeshard/ut_login, name=unittest] (uid=rnd-o2ksgj6naz14y10c): Infrastructure error - contact devtools@ for details. Suite build deps: [awPwmsfHS3f9A0QfqgJZSw {'project_path': 'ydb/core/tx/schemeshard/ut_login', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/core/tx/schemeshard/ut_move, name=unittest] (uid=rnd-mmel688whzlm5m5n): Infrastructure error - contact devtools@ for details. Suite build deps: [oTbVZtEB-OtUWeiHxqNTow {'project_path': 'ydb/core/tx/schemeshard/ut_move', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/core/tx/schemeshard/ut_olap, name=unittest] (uid=rnd-2c7b8kv2w60p5ak7): Infrastructure error - contact devtools@ for details. Suite build deps: [4V_bmLIQncYNqnisHRQPVw {'project_path': 'ydb/core/tx/schemeshard/ut_olap', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/core/tx/schemeshard/ut_olap_reboots, name=unittest] (uid=rnd-xisb46zcusulbits): Infrastructure error - contact devtools@ for details. Suite build deps: [_jn8a7TgpLmTr92RiAJXng {'project_path': 'ydb/core/tx/schemeshard/ut_olap_reboots', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/core/tx/schemeshard/ut_reboots, name=unittest] (uid=rnd-adz8sykdqm5lq2hb): Infrastructure error - contact devtools@ for details. Suite build deps: [BjDNMg5YNxpdnIhWYyzxfg {'project_path': 'ydb/core/tx/schemeshard/ut_reboots', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/core/tx/schemeshard/ut_replication_reboots, name=unittest] (uid=rnd-i1mym1d15qizu8d4): Infrastructure error - contact devtools@ for details. Suite build deps: [MezbuoIhEz9abtj7eg4CpQ {'project_path': 'ydb/core/tx/schemeshard/ut_replication_reboots', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/core/tx/schemeshard/ut_resource_pool, name=unittest] (uid=rnd-x6lpojy7r0lfv829): Infrastructure error - contact devtools@ for details. Suite build deps: [e0MvAO-nl_WkOQ9CNmYSvA {'project_path': 'ydb/core/tx/schemeshard/ut_resource_pool', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/core/tx/schemeshard/ut_resource_pool_reboots, name=unittest] (uid=rnd-6x3eo7r0ul424zt5): Infrastructure error - contact devtools@ for details. Suite build deps: [GQycUOVsSwTibD037gntnA {'project_path': 'ydb/core/tx/schemeshard/ut_resource_pool_reboots', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/core/tx/schemeshard/ut_restore, name=unittest] (uid=rnd-a9727l23j3irevca): Infrastructure error - contact devtools@ for details. Suite build deps: [8ZdgwjwNXkCIl5ScKFsCug {'project_path': 'ydb/core/tx/schemeshard/ut_restore', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/core/tx/schemeshard/ut_rtmr, name=unittest] (uid=rnd-lnsh1ij36dmmt3rh): Infrastructure error - contact devtools@ for details. Suite build deps: [Rbq819mKbpLkrVwQQ2sx1A {'project_path': 'ydb/core/tx/schemeshard/ut_rtmr', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/core/tx/schemeshard/ut_rtmr_reboots, name=unittest] (uid=rnd-456x3ti0xhi2p758): Infrastructure error - contact devtools@ for details. Suite build deps: [IyzkEDbmV0M5lgwSM-Z9nA {'project_path': 'ydb/core/tx/schemeshard/ut_rtmr_reboots', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/core/tx/schemeshard/ut_ru_calculator, name=unittest] (uid=rnd-kfgetky7jtdv59vf): Infrastructure error - contact devtools@ for details. Suite build deps: [DXV2sXyIAdUoaMl1i5lRUg {'project_path': 'ydb/core/tx/schemeshard/ut_ru_calculator', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/core/tx/schemeshard/ut_secret, name=unittest] (uid=rnd-000qi9nkmn0i97by): Infrastructure error - contact devtools@ for details. Suite build deps: [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}], [xsVY6bhazrf5FTYpqO5m5g {'project_path': 'ydb/core/tx/schemeshard/ut_secret', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}] Warn: Test [project=ydb/core/tx/schemeshard/ut_secret_reboots, name=unittest] (uid=rnd-mr9vp2bvrnk00ko0): Infrastructure error - contact devtools@ for details. Suite build deps: [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}], [xS0SGi9BQ4aLqwFal0BNVg {'project_path': 'ydb/core/tx/schemeshard/ut_secret_reboots', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}] Warn: Test [project=ydb/core/tx/schemeshard/ut_serverless, name=unittest] (uid=rnd-89smyzjox1t09pa0): Infrastructure error - contact devtools@ for details. Suite build deps: [UddFLaadY9UdrrXJHI80ZQ {'project_path': 'ydb/core/tx/schemeshard/ut_serverless', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/core/tx/schemeshard/ut_split_merge, name=unittest] (uid=rnd-owwm6u3l3psqogg7): Infrastructure error - contact devtools@ for details. Suite build deps: [PiZMFH_CArv-IXXYcG2PfQ {'project_path': 'ydb/core/tx/schemeshard/ut_split_merge', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/core/tx/schemeshard/ut_stats, name=unittest] (uid=rnd-t8c7bbcpidlpe02u): Infrastructure error - contact devtools@ for details. Suite build deps: [-OeDz_BUdXnRyZ4DLkfd3Q {'project_path': 'ydb/core/tx/schemeshard/ut_stats', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/core/tx/schemeshard/ut_streaming_query, name=unittest] (uid=rnd-nr9kyotkpwmqhdwh): Infrastructure error - contact devtools@ for details. Suite build deps: [MHAw6NLyeOH0ZLZosBNAgQ {'project_path': 'ydb/core/tx/schemeshard/ut_streaming_query', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/core/tx/schemeshard/ut_streaming_query_reboots, name=unittest] (uid=rnd-nzjqtzwt9yp7oipa): Infrastructure error - contact devtools@ for details. Suite build deps: [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}], [yUpamWog8DTdJwAwrCv-gA {'project_path': 'ydb/core/tx/schemeshard/ut_streaming_query_reboots', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}] Warn: Test [project=ydb/core/tx/schemeshard/ut_subdomain, name=unittest] (uid=rnd-3k5grdkb4qs28lhb): Infrastructure error - contact devtools@ for details. Suite build deps: [qrLP0RMcRR3o08aB4_sPfA {'project_path': 'ydb/core/tx/schemeshard/ut_subdomain', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/core/tx/schemeshard/ut_subdomain_reboots, name=unittest] (uid=rnd-o7djf8g0acgdz4oz): Infrastructure error - contact devtools@ for details. Suite build deps: [r0TKetvm1hFusRAoVYTCVw {'project_path': 'ydb/core/tx/schemeshard/ut_subdomain_reboots', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/core/tx/schemeshard/ut_system_names, name=unittest] (uid=rnd-xauxfbhwf6nunna8): Infrastructure error - contact devtools@ for details. Suite build deps: [d3of7F2eSfbMZpPkaskASg {'project_path': 'ydb/core/tx/schemeshard/ut_system_names', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/core/tx/schemeshard/ut_sysview_reboots, name=unittest] (uid=rnd-azokns09oc7lo94m): Infrastructure error - contact devtools@ for details. Suite build deps: [WXat6498WJ4KEp6SlhiLdw {'project_path': 'ydb/core/tx/schemeshard/ut_sysview_reboots', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/core/tx/schemeshard/ut_topic_set_boundaries, name=unittest] (uid=rnd-e2p9swi86gmtbz3v): Infrastructure error - contact devtools@ for details. Suite build deps: [ESkls2LuEh_2Fl2dQ1BBDQ {'project_path': 'ydb/core/tx/schemeshard/ut_topic_set_boundaries', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/core/tx/schemeshard/ut_topic_splitmerge, name=unittest] (uid=rnd-ziw9zz3gj243xipa): Infrastructure error - contact devtools@ for details. Suite build deps: [XRZxG_rxrCUQ-sNV4OPr7A {'project_path': 'ydb/core/tx/schemeshard/ut_topic_splitmerge', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/core/tx/schemeshard/ut_ttl, name=unittest] (uid=rnd-mrfdseceja80hcbc): Infrastructure error - contact devtools@ for details. Suite build deps: [XDQbgiAdmCk47KXFk9Qzbw {'project_path': 'ydb/core/tx/schemeshard/ut_ttl', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/core/tx/schemeshard/ut_user_attributes, name=unittest] (uid=rnd-di91w55ukqnhzdv0): Infrastructure error - contact devtools@ for details. Suite build deps: [qNQdgqSP8Ps8QFskT-AJAA {'project_path': 'ydb/core/tx/schemeshard/ut_user_attributes', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/core/tx/schemeshard/ut_user_attributes_reboots, name=unittest] (uid=rnd-yhht36oidatk7u3u): Infrastructure error - contact devtools@ for details. Suite build deps: [WUYmfVgN8t_LZGxMmPi0YA {'project_path': 'ydb/core/tx/schemeshard/ut_user_attributes_reboots', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/core/tx/schemeshard/ut_vector_index_build_reboots, name=unittest] (uid=rnd-v72ptfflrvb5zuhd): Infrastructure error - contact devtools@ for details. Suite build deps: [9Rnm685erbAp1QnQhcKeaA {'project_path': 'ydb/core/tx/schemeshard/ut_vector_index_build_reboots', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/core/tx/sequenceproxy/ut, name=unittest] (uid=rnd-z77bvl3r736wz8vb): Infrastructure error - contact devtools@ for details. Suite build deps: [YVhMCiXz6q1y8KTnKZljKA {'project_path': 'ydb/core/tx/sequenceproxy/ut', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/core/tx/sequenceshard/ut, name=unittest] (uid=rnd-rgvqtlpyg4itb9pa): Infrastructure error - contact devtools@ for details. Suite build deps: [0vsdjE1bnxlIIezSe6s7aQ {'project_path': 'ydb/core/tx/sequenceshard/ut', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/core/tx/sharding/ut, name=unittest] (uid=rnd-m5qbss0w9d9z8p06): Infrastructure error - contact devtools@ for details. Suite build deps: [_p3HT93GNxzte96VNlKciw {'project_path': 'ydb/core/tx/sharding/ut', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/core/tx/tiering/ut, name=unittest] (uid=rnd-1x5ymswf1e7hxndk): Infrastructure error - contact devtools@ for details. Suite build deps: [lk2t8BGNWMVSSq9D1f_RcQ {'project_path': 'ydb/core/tx/tiering/ut', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/core/tx/time_cast/ut, name=unittest] (uid=rnd-2qzi2xviqc5w86hc): Infrastructure error - contact devtools@ for details. Suite build deps: [8Dg4aIIyNkowHS4DY2pIRw {'project_path': 'ydb/core/tx/time_cast/ut', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/core/tx/tx_allocator/ut, name=unittest] (uid=rnd-5s42b8ngv1f877rv): Infrastructure error - contact devtools@ for details. Suite build deps: [Dbk5GoeOJxYTT-4BiLLN9g {'project_path': 'ydb/core/tx/tx_allocator/ut', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/core/tx/tx_proxy/ut_base_tenant, name=unittest] (uid=rnd-ydobryfn2eleuzth): Infrastructure error - contact devtools@ for details. Suite build deps: [-OphCGfhfabU0qEPUkB4Zw {'project_path': 'ydb/core/tx/tx_proxy/ut_base_tenant', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/core/tx/tx_proxy/ut_encrypted_storage, name=unittest] (uid=rnd-kwvxzhtwnjfrdupz): Infrastructure error - contact devtools@ for details. Suite build deps: [LMe5ECRLZxpZI1_--4MDrA {'project_path': 'ydb/core/tx/tx_proxy/ut_encrypted_storage', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/core/tx/tx_proxy/ut_ext_tenant, name=unittest] (uid=rnd-zpz2jpmiy80m93ky): Infrastructure error - contact devtools@ for details. Suite build deps: [BsBthz5OF50xjOEe_5V_0A {'project_path': 'ydb/core/tx/tx_proxy/ut_ext_tenant', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/core/tx/tx_proxy/ut_storage_tenant, name=unittest] (uid=rnd-3ajg3hmhumd38tha): Infrastructure error - contact devtools@ for details. Suite build deps: [_Pb3e2dXxOYTmZrx5kc1JQ {'project_path': 'ydb/core/tx/tx_proxy/ut_storage_tenant', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/core/viewer/ut, name=unittest] (uid=rnd-m2gi8shn3pyhx54t): Infrastructure error - contact devtools@ for details. Suite build deps: [9G9jB5APk36vdZwwRdEjSQ {'project_path': 'ydb/core/viewer/ut', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/core/wrappers/ut, name=unittest] (uid=rnd-9sglj3x5tv6bgajg): Infrastructure error - contact devtools@ for details. Suite build deps: [Im4axzWBFIQGvfl5drWdzQ {'project_path': 'ydb/core/wrappers/ut', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/core/ydb_convert/ut, name=unittest] (uid=rnd-bzz6390dd7szdgtb): Infrastructure error - contact devtools@ for details. Suite build deps: [cXXgdiC5qGpXRCkEeP4LTQ {'project_path': 'ydb/core/ydb_convert/ut', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/core/ymq/actor/cloud_events/cloud_events_ut, name=unittest] (uid=rnd-q7j7z6kexacvvu2l): Infrastructure error - contact devtools@ for details. Suite build deps: [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}], [yLRNf0brYe2h8hNKMOA58Q {'project_path': 'ydb/core/ymq/actor/cloud_events/cloud_events_ut', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}] Warn: Test [project=ydb/core/ymq/actor/yc_search_ut, name=unittest] (uid=rnd-4xu69oyezmab1xjc): Infrastructure error - contact devtools@ for details. Suite build deps: [GusBf2M0Ns7V9r3pFaH_WA {'project_path': 'ydb/core/ymq/actor/yc_search_ut', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/core/ymq/base/ut, name=unittest] (uid=rnd-spy1mv7gpgw2u9cf): Infrastructure error - contact devtools@ for details. Suite build deps: [9DWg3VLsC_5C6HFnLeHxCQ {'project_path': 'ydb/core/ymq/base/ut', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/core/ymq/http/ut, name=unittest] (uid=rnd-cim1q057r4azfcmz): Infrastructure error - contact devtools@ for details. Suite build deps: [5yaK2b0ecP0gn3tHA49mDg {'project_path': 'ydb/core/ymq/http/ut', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/core/ymq/ut, name=unittest] (uid=rnd-mr5oewpjx3ohiz4g): Infrastructure error - contact devtools@ for details. Suite build deps: [WccYW12-XcwTZAwLQIl3iQ {'project_path': 'ydb/core/ymq/ut', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/library/ncloud/impl/ut, name=unittest] (uid=rnd-43neey5a5twyf8zm): Infrastructure error - contact devtools@ for details. Suite build deps: [AaX8AOvWbZzCFyMU7VJgfw {'project_path': 'ydb/library/ncloud/impl/ut', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/library/query_actor/ut, name=unittest] (uid=rnd-14zscc79fhegdcwy): Infrastructure error - contact devtools@ for details. Suite build deps: [kHlVdzYa3hK-MebVO-M0GA {'project_path': 'ydb/library/query_actor/ut', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/library/table_creator/ut, name=unittest] (uid=rnd-vb58gqa4n9clohjx): Infrastructure error - contact devtools@ for details. Suite build deps: [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}], [xpYWceOA6ApJasg7zsY6Mg {'project_path': 'ydb/library/table_creator/ut', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}] Warn: Test [project=ydb/library/yql/providers/generic/actors/ut, name=unittest] (uid=rnd-axfmhaic6ym652po): Infrastructure error - contact devtools@ for details. Suite build deps: [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}], [xGzrVYHlN_wbNkdYo5mVxw {'project_path': 'ydb/library/yql/providers/generic/actors/ut', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}] Warn: Test [project=ydb/library/yql/providers/pq/provider/ut, name=unittest] (uid=rnd-n1myk8pyqfclllqe): Infrastructure error - contact devtools@ for details. Suite build deps: [6pua9k7bHTm3uinIRY3sQA {'project_path': 'ydb/library/yql/providers/pq/provider/ut', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/library/yql/providers/solomon/actors/ut, name=unittest] (uid=rnd-468tb62ea3wjsx2j): Infrastructure error - contact devtools@ for details. Suite build deps: [AqHo-8SJeJJvYzvMzUf4fw {'project_path': 'ydb/library/yql/tools/solomon_emulator/bin', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [iqZovhX8-wZxMP4ZK0p5pQ {'project_path': 'ydb/library/yql/providers/solomon/actors/ut', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}], [tnzldir5PgQBy06P5L_gdg {'project_path': 'ydb/library/yql/tools/solomon_emulator/recipe', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}] Warn: Test [project=ydb/public/sdk/cpp/src/client/federated_topic/ut, name=unittest] (uid=rnd-20403tkbpefwmq8e): Infrastructure error - contact devtools@ for details. Suite build deps: [K30y2FGFi2wtayu9J-XxLA {'project_path': 'ydb/public/sdk/cpp/src/client/federated_topic/ut', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/public/sdk/cpp/src/client/persqueue_public/ut, name=unittest] (uid=rnd-b3keiixrrajsaet8): Infrastructure error - contact devtools@ for details. Suite build deps: [LewVznt6-7vbcp5hdtmIgw {'project_path': 'ydb/public/sdk/cpp/src/client/persqueue_public/ut', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/public/sdk/cpp/src/client/persqueue_public/ut/with_offset_ranges_mode_ut, name=unittest] (uid=rnd-61lf2kamn7hwjyph): Infrastructure error - contact devtools@ for details. Suite build deps: [9pD4AHMZhTJVR0eSOFHN8Q {'project_path': 'ydb/public/sdk/cpp/src/client/persqueue_public/ut/with_offset_ranges_mode_ut', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/public/sdk/cpp/src/client/topic/ut, name=unittest] (uid=rnd-59t2893az7h5khj2): Infrastructure error - contact devtools@ for details. Suite build deps: [2qxL0rdeHctdbRIj4up9Bw {'project_path': 'ydb/public/sdk/cpp/src/client/topic/ut', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/public/sdk/cpp/src/client/topic/ut/with_direct_read_ut, name=unittest] (uid=rnd-byt1f8ppgm1qd0np): Infrastructure error - contact devtools@ for details. Suite build deps: [idOZYXM632ouyQhfP7e_eQ {'project_path': 'ydb/public/sdk/cpp/src/client/topic/ut/with_direct_read_ut', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/services/cms/ut, name=unittest] (uid=rnd-80oct4xp85rdxbza): Infrastructure error - contact devtools@ for details. Suite build deps: [eOe_sx9dNL7mGOHq2ij_ZA {'project_path': 'ydb/services/cms/ut', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/services/config/ut, name=unittest] (uid=rnd-u2k9yb438rq4nbs5): Infrastructure error - contact devtools@ for details. Suite build deps: [KAmsgVjmQpesezfiZUibyw {'project_path': 'ydb/services/config/ut', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/services/datastreams/ut, name=unittest] (uid=rnd-dg65xc7kud0sev9q): Infrastructure error - contact devtools@ for details. Suite build deps: [Vl1zi_-X63C5LZ0E8w3Yiw {'project_path': 'ydb/services/datastreams/ut', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/services/dynamic_config/ut, name=unittest] (uid=rnd-q9sy9opwm5omq85i): Infrastructure error - contact devtools@ for details. Suite build deps: [Z9UqiYDmoYcqLBlEAMhojw {'project_path': 'ydb/services/dynamic_config/ut', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/services/ext_index/ut, name=unittest] (uid=rnd-6jxhb7s84zmz22xm): Infrastructure error - contact devtools@ for details. Suite build deps: [DBNs0CbxNFHp-aJdsu6giQ {'project_path': 'ydb/services/ext_index/ut', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/services/fq/ut_integration, name=unittest] (uid=rnd-s1dsgnp8i13urh7c): Infrastructure error - contact devtools@ for details. Suite build deps: [inKKMCq6JPg0_mxQrw-6Dg {'project_path': 'ydb/services/fq/ut_integration', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/services/keyvalue/ut, name=unittest] (uid=rnd-ljkmtmojxaeosneg): Infrastructure error - contact devtools@ for details. Suite build deps: [PrsYyrANCBu4kEm736Bz8w {'project_path': 'ydb/services/keyvalue/ut', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/services/metadata/initializer/ut, name=unittest] (uid=rnd-h2mma5zzie5842l1): Infrastructure error - contact devtools@ for details. Suite build deps: [hoIa9HSTKJtHFpSkf0Wtuw {'project_path': 'ydb/services/metadata/initializer/ut', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/services/metadata/secret/ut, name=unittest] (uid=rnd-vc6l7l0lssokq8u9): Infrastructure error - contact devtools@ for details. Suite build deps: [1lYo0nQlsoTIowLCznnQtQ {'project_path': 'ydb/services/metadata/secret/ut', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/services/persqueue_cluster_discovery/ut, name=unittest] (uid=rnd-flbwfhwfqnc64v1a): Infrastructure error - contact devtools@ for details. Suite build deps: [hldHtvsZNY3uKgAnZ6Tl-Q {'project_path': 'ydb/services/persqueue_cluster_discovery/ut', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/services/persqueue_v1/ut, name=unittest] (uid=rnd-ij2ppc08f1z8pjln): Infrastructure error - contact devtools@ for details. Suite build deps: [ir58ly821plRjVtkpG5cPQ {'project_path': 'ydb/services/persqueue_v1/ut', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/services/persqueue_v1/ut/describes_ut, name=unittest] (uid=rnd-c0dhbiyntc6j2syi): Infrastructure error - contact devtools@ for details. Suite build deps: [8Zoz_1JY7vzyYTkMJtQ1yg {'project_path': 'ydb/services/persqueue_v1/ut/describes_ut', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/services/persqueue_v1/ut/new_schemecache_ut, name=unittest] (uid=rnd-f1xga9lsjajqfv6s): Infrastructure error - contact devtools@ for details. Suite build deps: [3Yoer_B5f11XVTRet7lrSQ {'project_path': 'ydb/services/persqueue_v1/ut/new_schemecache_ut', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/services/rate_limiter/ut, name=unittest] (uid=rnd-sdmd4qd0o4m8ltad): Infrastructure error - contact devtools@ for details. Suite build deps: [lJrPizn6_pekTZ8I_78Nug {'project_path': 'ydb/services/rate_limiter/ut', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/services/ydb/backup_ut, name=unittest] (uid=rnd-vgx8cmt7lbrw60nn): Infrastructure error - contact devtools@ for details. Suite build deps: [5gMSu3U7dJM_rjPKGlTZNA {'project_path': 'ydb/services/ydb/backup_ut', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/services/ydb/ut, name=unittest] (uid=rnd-1otwb5kdygt0w0zd): Infrastructure error - contact devtools@ for details. Suite build deps: [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}], [y16TwfRXjlM9KDhC5oshxg {'project_path': 'ydb/services/ydb/ut', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}] Warn: Test [project=ydb/tests/fq/control_plane_storage, name=unittest] (uid=rnd-2h7g48hdnajegxe6): Infrastructure error - contact devtools@ for details. Suite build deps: [N3wfuEiUgil-dbRCORbWUA {'project_path': 'ydb/apps/ydbd', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [RkhG7WuJXDr1zbK17a6x6g {'project_path': 'ydb/public/tools/ydb_recipe', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [Yk-dm7FJEqws5N1DrNNaEA {'project_path': 'ydb/tests/fq/control_plane_storage', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/tests/fq/pq_async_io/ut, name=unittest] (uid=rnd-r1re6ga1dc9irb2a): Infrastructure error - contact devtools@ for details. Suite build deps: [N3wfuEiUgil-dbRCORbWUA {'project_path': 'ydb/apps/ydbd', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [RkhG7WuJXDr1zbK17a6x6g {'project_path': 'ydb/public/tools/ydb_recipe', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [tP5ByMKymBSAcmPs4xRz-w {'project_path': 'ydb/tests/fq/pq_async_io/ut', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/tests/functional/backup, name=unittest] (uid=rnd-lua0dfvqurhll507): Infrastructure error - contact devtools@ for details. Suite build deps: [4aAhZXFJekFavucKAv-9LQ {'project_path': 'ydb/tests/functional/backup', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [LJ2t_qImD7Uey7cqPiAnNQ {'project_path': 'ydb/tests/tools/s3_recipe', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [N3wfuEiUgil-dbRCORbWUA {'project_path': 'ydb/apps/ydbd', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [PBslpMFkWX6JG4o4gr4wJA {'project_path': 'contrib/python/moto/bin', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [RkhG7WuJXDr1zbK17a6x6g {'project_path': 'ydb/public/tools/ydb_recipe', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/tests/functional/backup/s3_path_style, name=unittest] (uid=rnd-xsgcmmjgbwey8ua2): Infrastructure error - contact devtools@ for details. Suite build deps: [GUWutYCsU98fXdqV46JJZw {'project_path': 'ydb/tests/functional/backup/s3_path_style', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [LJ2t_qImD7Uey7cqPiAnNQ {'project_path': 'ydb/tests/tools/s3_recipe', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [N3wfuEiUgil-dbRCORbWUA {'project_path': 'ydb/apps/ydbd', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [PBslpMFkWX6JG4o4gr4wJA {'project_path': 'contrib/python/moto/bin', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [RkhG7WuJXDr1zbK17a6x6g {'project_path': 'ydb/public/tools/ydb_recipe', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/tests/functional/kqp/kqp_indexes, name=unittest] (uid=rnd-l6293u1k111ne7ot): Infrastructure error - contact devtools@ for details. Suite build deps: [N3wfuEiUgil-dbRCORbWUA {'project_path': 'ydb/apps/ydbd', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [RkhG7WuJXDr1zbK17a6x6g {'project_path': 'ydb/public/tools/ydb_recipe', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [X5OgJxCiw9JtN6ASEv2UpA {'project_path': 'ydb/tests/functional/kqp/kqp_indexes', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/tests/functional/kqp/kqp_query_session, name=unittest] (uid=rnd-gpggx4r1awgz13p5): Infrastructure error - contact devtools@ for details. Suite build deps: [N3wfuEiUgil-dbRCORbWUA {'project_path': 'ydb/apps/ydbd', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [RkhG7WuJXDr1zbK17a6x6g {'project_path': 'ydb/public/tools/ydb_recipe', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [rJYMlPHqR_Rd-UpIAeRCFg {'project_path': 'ydb/tests/functional/kqp/kqp_query_session', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/tests/functional/kqp/kqp_query_svc, name=unittest] (uid=rnd-sxjj7v6zjz4cv3lf): Infrastructure error - contact devtools@ for details. Suite build deps: [N3wfuEiUgil-dbRCORbWUA {'project_path': 'ydb/apps/ydbd', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [RkhG7WuJXDr1zbK17a6x6g {'project_path': 'ydb/public/tools/ydb_recipe', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [i3D7PdHiySeSkneOZQHb9Q {'project_path': 'ydb/tests/functional/kqp/kqp_query_svc', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/tests/functional/replication, name=unittest] (uid=rnd-17zpopy4c1secji3): Infrastructure error - contact devtools@ for details. Suite build deps: [N3wfuEiUgil-dbRCORbWUA {'project_path': 'ydb/apps/ydbd', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [RkhG7WuJXDr1zbK17a6x6g {'project_path': 'ydb/public/tools/ydb_recipe', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [nUIAl444UQudDJXfaXIPiQ {'project_path': 'ydb/tests/functional/replication', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/tests/functional/sdk/cpp/sdk_credprovider, name=unittest] (uid=rnd-uqpc4fkthns67swv): Infrastructure error - contact devtools@ for details. Suite build deps: [N3wfuEiUgil-dbRCORbWUA {'project_path': 'ydb/apps/ydbd', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [RkhG7WuJXDr1zbK17a6x6g {'project_path': 'ydb/public/tools/ydb_recipe', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [nasmOdUCciMvCqo47xNijQ {'project_path': 'ydb/tests/functional/sdk/cpp/sdk_credprovider', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/tests/olap/high_load, name=unittest] (uid=rnd-tx4dh8d8xkkx51gy): Infrastructure error - contact devtools@ for details. Suite build deps: [N3wfuEiUgil-dbRCORbWUA {'project_path': 'ydb/apps/ydbd', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [RkhG7WuJXDr1zbK17a6x6g {'project_path': 'ydb/public/tools/ydb_recipe', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [pTicvaH3rEiPVN_3whOTKQ {'project_path': 'ydb/tests/olap/high_load', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Info: Dump results report to /home/runner/actions_runner/_work/ydb/ydb/tmp/results/try_1/report.json Failed + echo 1